1 /* Tree inlining.
2    Copyright (C) 2001-2019 Free Software Foundation, Inc.
3    Contributed by Alexandre Oliva <aoliva@redhat.com>
4 
5 This file is part of GCC.
6 
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11 
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
15 GNU General Public License for more details.
16 
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3.  If not see
19 <http://www.gnu.org/licenses/>.  */
20 
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "target.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "gimple.h"
29 #include "cfghooks.h"
30 #include "tree-pass.h"
31 #include "ssa.h"
32 #include "cgraph.h"
33 #include "tree-pretty-print.h"
34 #include "diagnostic-core.h"
35 #include "gimple-predict.h"
36 #include "fold-const.h"
37 #include "stor-layout.h"
38 #include "calls.h"
39 #include "tree-inline.h"
40 #include "langhooks.h"
41 #include "cfganal.h"
42 #include "tree-iterator.h"
43 #include "intl.h"
44 #include "gimple-fold.h"
45 #include "tree-eh.h"
46 #include "gimplify.h"
47 #include "gimple-iterator.h"
48 #include "gimplify-me.h"
49 #include "gimple-walk.h"
50 #include "tree-cfg.h"
51 #include "tree-into-ssa.h"
52 #include "tree-dfa.h"
53 #include "tree-ssa.h"
54 #include "except.h"
55 #include "debug.h"
56 #include "params.h"
57 #include "value-prof.h"
58 #include "cfgloop.h"
59 #include "builtins.h"
60 #include "stringpool.h"
61 #include "attribs.h"
62 #include "sreal.h"
63 #include "tree-cfgcleanup.h"
64 
65 /* I'm not real happy about this, but we need to handle gimple and
66    non-gimple trees.  */
67 
68 /* Inlining, Cloning, Versioning, Parallelization
69 
70    Inlining: a function body is duplicated, but the PARM_DECLs are
71    remapped into VAR_DECLs, and non-void RETURN_EXPRs become
72    MODIFY_EXPRs that store to a dedicated returned-value variable.
73    The duplicated eh_region info of the copy will later be appended
74    to the info for the caller; the eh_region info in copied throwing
75    statements and RESX statements are adjusted accordingly.
76 
77    Cloning: (only in C++) We have one body for a con/de/structor, and
78    multiple function decls, each with a unique parameter list.
79    Duplicate the body, using the given splay tree; some parameters
80    will become constants (like 0 or 1).
81 
82    Versioning: a function body is duplicated and the result is a new
83    function rather than into blocks of an existing function as with
84    inlining.  Some parameters will become constants.
85 
86    Parallelization: a region of a function is duplicated resulting in
87    a new function.  Variables may be replaced with complex expressions
88    to enable shared variable semantics.
89 
90    All of these will simultaneously lookup any callgraph edges.  If
91    we're going to inline the duplicated function body, and the given
92    function has some cloned callgraph nodes (one for each place this
93    function will be inlined) those callgraph edges will be duplicated.
94    If we're cloning the body, those callgraph edges will be
95    updated to point into the new body.  (Note that the original
96    callgraph node and edge list will not be altered.)
97 
98    See the CALL_EXPR handling case in copy_tree_body_r ().  */
99 
100 /* To Do:
101 
102    o In order to make inlining-on-trees work, we pessimized
103      function-local static constants.  In particular, they are now
104      always output, even when not addressed.  Fix this by treating
105      function-local static constants just like global static
106      constants; the back-end already knows not to output them if they
107      are not needed.
108 
109    o Provide heuristics to clamp inlining of recursive template
110      calls?  */
111 
112 
113 /* Weights that estimate_num_insns uses to estimate the size of the
114    produced code.  */
115 
116 eni_weights eni_size_weights;
117 
118 /* Weights that estimate_num_insns uses to estimate the time necessary
119    to execute the produced code.  */
120 
121 eni_weights eni_time_weights;
122 
123 /* Prototypes.  */
124 
125 static tree declare_return_variable (copy_body_data *, tree, tree,
126 				     basic_block);
127 static void remap_block (tree *, copy_body_data *);
128 static void copy_bind_expr (tree *, int *, copy_body_data *);
129 static void declare_inline_vars (tree, tree);
130 static void remap_save_expr (tree *, hash_map<tree, tree> *, int *);
131 static void prepend_lexical_block (tree current_block, tree new_block);
132 static tree copy_decl_to_var (tree, copy_body_data *);
133 static tree copy_result_decl_to_var (tree, copy_body_data *);
134 static tree copy_decl_maybe_to_var (tree, copy_body_data *);
135 static gimple_seq remap_gimple_stmt (gimple *, copy_body_data *);
136 static void insert_init_stmt (copy_body_data *, basic_block, gimple *);
137 
138 /* Insert a tree->tree mapping for ID.  Despite the name suggests
139    that the trees should be variables, it is used for more than that.  */
140 
141 void
insert_decl_map(copy_body_data * id,tree key,tree value)142 insert_decl_map (copy_body_data *id, tree key, tree value)
143 {
144   id->decl_map->put (key, value);
145 
146   /* Always insert an identity map as well.  If we see this same new
147      node again, we won't want to duplicate it a second time.  */
148   if (key != value)
149     id->decl_map->put (value, value);
150 }
151 
152 /* Insert a tree->tree mapping for ID.  This is only used for
153    variables.  */
154 
155 static void
insert_debug_decl_map(copy_body_data * id,tree key,tree value)156 insert_debug_decl_map (copy_body_data *id, tree key, tree value)
157 {
158   if (!gimple_in_ssa_p (id->src_cfun))
159     return;
160 
161   if (!opt_for_fn (id->dst_fn, flag_var_tracking_assignments))
162     return;
163 
164   if (!target_for_debug_bind (key))
165     return;
166 
167   gcc_assert (TREE_CODE (key) == PARM_DECL);
168   gcc_assert (VAR_P (value));
169 
170   if (!id->debug_map)
171     id->debug_map = new hash_map<tree, tree>;
172 
173   id->debug_map->put (key, value);
174 }
175 
176 /* If nonzero, we're remapping the contents of inlined debug
177    statements.  If negative, an error has occurred, such as a
178    reference to a variable that isn't available in the inlined
179    context.  */
180 static int processing_debug_stmt = 0;
181 
182 /* Construct new SSA name for old NAME. ID is the inline context.  */
183 
184 static tree
remap_ssa_name(tree name,copy_body_data * id)185 remap_ssa_name (tree name, copy_body_data *id)
186 {
187   tree new_tree, var;
188   tree *n;
189 
190   gcc_assert (TREE_CODE (name) == SSA_NAME);
191 
192   n = id->decl_map->get (name);
193   if (n)
194     return unshare_expr (*n);
195 
196   if (processing_debug_stmt)
197     {
198       if (SSA_NAME_IS_DEFAULT_DEF (name)
199 	  && TREE_CODE (SSA_NAME_VAR (name)) == PARM_DECL
200 	  && id->entry_bb == NULL
201 	  && single_succ_p (ENTRY_BLOCK_PTR_FOR_FN (cfun)))
202 	{
203 	  tree vexpr = make_node (DEBUG_EXPR_DECL);
204 	  gimple *def_temp;
205 	  gimple_stmt_iterator gsi;
206 	  tree val = SSA_NAME_VAR (name);
207 
208 	  n = id->decl_map->get (val);
209 	  if (n != NULL)
210 	    val = *n;
211 	  if (TREE_CODE (val) != PARM_DECL
212 	      && !(VAR_P (val) && DECL_ABSTRACT_ORIGIN (val)))
213 	    {
214 	      processing_debug_stmt = -1;
215 	      return name;
216 	    }
217 	  n = id->decl_map->get (val);
218 	  if (n && TREE_CODE (*n) == DEBUG_EXPR_DECL)
219 	    return *n;
220 	  def_temp = gimple_build_debug_source_bind (vexpr, val, NULL);
221 	  DECL_ARTIFICIAL (vexpr) = 1;
222 	  TREE_TYPE (vexpr) = TREE_TYPE (name);
223 	  SET_DECL_MODE (vexpr, DECL_MODE (SSA_NAME_VAR (name)));
224 	  gsi = gsi_after_labels (single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
225 	  gsi_insert_before (&gsi, def_temp, GSI_SAME_STMT);
226 	  insert_decl_map (id, val, vexpr);
227 	  return vexpr;
228 	}
229 
230       processing_debug_stmt = -1;
231       return name;
232     }
233 
234   /* Remap anonymous SSA names or SSA names of anonymous decls.  */
235   var = SSA_NAME_VAR (name);
236   if (!var
237       || (!SSA_NAME_IS_DEFAULT_DEF (name)
238 	  && VAR_P (var)
239 	  && !VAR_DECL_IS_VIRTUAL_OPERAND (var)
240 	  && DECL_ARTIFICIAL (var)
241 	  && DECL_IGNORED_P (var)
242 	  && !DECL_NAME (var)))
243     {
244       struct ptr_info_def *pi;
245       new_tree = make_ssa_name (remap_type (TREE_TYPE (name), id));
246       if (!var && SSA_NAME_IDENTIFIER (name))
247 	SET_SSA_NAME_VAR_OR_IDENTIFIER (new_tree, SSA_NAME_IDENTIFIER (name));
248       insert_decl_map (id, name, new_tree);
249       SSA_NAME_OCCURS_IN_ABNORMAL_PHI (new_tree)
250 	= SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name);
251       /* At least IPA points-to info can be directly transferred.  */
252       if (id->src_cfun->gimple_df
253 	  && id->src_cfun->gimple_df->ipa_pta
254 	  && POINTER_TYPE_P (TREE_TYPE (name))
255 	  && (pi = SSA_NAME_PTR_INFO (name))
256 	  && !pi->pt.anything)
257 	{
258 	  struct ptr_info_def *new_pi = get_ptr_info (new_tree);
259 	  new_pi->pt = pi->pt;
260 	}
261       /* So can range-info.  */
262       if (!POINTER_TYPE_P (TREE_TYPE (name))
263 	  && SSA_NAME_RANGE_INFO (name))
264 	duplicate_ssa_name_range_info (new_tree, SSA_NAME_RANGE_TYPE (name),
265 				       SSA_NAME_RANGE_INFO (name));
266       return new_tree;
267     }
268 
269   /* Do not set DEF_STMT yet as statement is not copied yet. We do that
270      in copy_bb.  */
271   new_tree = remap_decl (var, id);
272 
273   /* We might've substituted constant or another SSA_NAME for
274      the variable.
275 
276      Replace the SSA name representing RESULT_DECL by variable during
277      inlining:  this saves us from need to introduce PHI node in a case
278      return value is just partly initialized.  */
279   if ((VAR_P (new_tree) || TREE_CODE (new_tree) == PARM_DECL)
280       && (!SSA_NAME_VAR (name)
281 	  || TREE_CODE (SSA_NAME_VAR (name)) != RESULT_DECL
282 	  || !id->transform_return_to_modify))
283     {
284       struct ptr_info_def *pi;
285       new_tree = make_ssa_name (new_tree);
286       insert_decl_map (id, name, new_tree);
287       SSA_NAME_OCCURS_IN_ABNORMAL_PHI (new_tree)
288 	= SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name);
289       /* At least IPA points-to info can be directly transferred.  */
290       if (id->src_cfun->gimple_df
291 	  && id->src_cfun->gimple_df->ipa_pta
292 	  && POINTER_TYPE_P (TREE_TYPE (name))
293 	  && (pi = SSA_NAME_PTR_INFO (name))
294 	  && !pi->pt.anything)
295 	{
296 	  struct ptr_info_def *new_pi = get_ptr_info (new_tree);
297 	  new_pi->pt = pi->pt;
298 	}
299       /* So can range-info.  */
300       if (!POINTER_TYPE_P (TREE_TYPE (name))
301 	  && SSA_NAME_RANGE_INFO (name))
302 	duplicate_ssa_name_range_info (new_tree, SSA_NAME_RANGE_TYPE (name),
303 				       SSA_NAME_RANGE_INFO (name));
304       if (SSA_NAME_IS_DEFAULT_DEF (name))
305 	{
306 	  /* By inlining function having uninitialized variable, we might
307 	     extend the lifetime (variable might get reused).  This cause
308 	     ICE in the case we end up extending lifetime of SSA name across
309 	     abnormal edge, but also increase register pressure.
310 
311 	     We simply initialize all uninitialized vars by 0 except
312 	     for case we are inlining to very first BB.  We can avoid
313 	     this for all BBs that are not inside strongly connected
314 	     regions of the CFG, but this is expensive to test.  */
315 	  if (id->entry_bb
316 	      && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name)
317 	      && (!SSA_NAME_VAR (name)
318 		  || TREE_CODE (SSA_NAME_VAR (name)) != PARM_DECL)
319 	      && (id->entry_bb != EDGE_SUCC (ENTRY_BLOCK_PTR_FOR_FN (cfun),
320 					     0)->dest
321 		  || EDGE_COUNT (id->entry_bb->preds) != 1))
322 	    {
323 	      gimple_stmt_iterator gsi = gsi_last_bb (id->entry_bb);
324 	      gimple *init_stmt;
325 	      tree zero = build_zero_cst (TREE_TYPE (new_tree));
326 
327 	      init_stmt = gimple_build_assign (new_tree, zero);
328 	      gsi_insert_after (&gsi, init_stmt, GSI_NEW_STMT);
329 	      SSA_NAME_IS_DEFAULT_DEF (new_tree) = 0;
330 	    }
331 	  else
332 	    {
333 	      SSA_NAME_DEF_STMT (new_tree) = gimple_build_nop ();
334 	      set_ssa_default_def (cfun, SSA_NAME_VAR (new_tree), new_tree);
335 	    }
336 	}
337     }
338   else
339     insert_decl_map (id, name, new_tree);
340   return new_tree;
341 }
342 
343 /* Remap DECL during the copying of the BLOCK tree for the function.  */
344 
345 tree
remap_decl(tree decl,copy_body_data * id)346 remap_decl (tree decl, copy_body_data *id)
347 {
348   tree *n;
349 
350   /* We only remap local variables in the current function.  */
351 
352   /* See if we have remapped this declaration.  */
353 
354   n = id->decl_map->get (decl);
355 
356   if (!n && processing_debug_stmt)
357     {
358       processing_debug_stmt = -1;
359       return decl;
360     }
361 
362   /* When remapping a type within copy_gimple_seq_and_replace_locals, all
363      necessary DECLs have already been remapped and we do not want to duplicate
364      a decl coming from outside of the sequence we are copying.  */
365   if (!n
366       && id->prevent_decl_creation_for_types
367       && id->remapping_type_depth > 0
368       && (VAR_P (decl) || TREE_CODE (decl) == PARM_DECL))
369     return decl;
370 
371   /* If we didn't already have an equivalent for this declaration, create one
372      now.  */
373   if (!n)
374     {
375       /* Make a copy of the variable or label.  */
376       tree t = id->copy_decl (decl, id);
377 
378       /* Remember it, so that if we encounter this local entity again
379 	 we can reuse this copy.  Do this early because remap_type may
380 	 need this decl for TYPE_STUB_DECL.  */
381       insert_decl_map (id, decl, t);
382 
383       if (!DECL_P (t))
384 	return t;
385 
386       /* Remap types, if necessary.  */
387       TREE_TYPE (t) = remap_type (TREE_TYPE (t), id);
388       if (TREE_CODE (t) == TYPE_DECL)
389 	{
390 	  DECL_ORIGINAL_TYPE (t) = remap_type (DECL_ORIGINAL_TYPE (t), id);
391 
392 	  /* Preserve the invariant that DECL_ORIGINAL_TYPE != TREE_TYPE,
393 	     which is enforced in gen_typedef_die when DECL_ABSTRACT_ORIGIN
394 	     is not set on the TYPE_DECL, for example in LTO mode.  */
395 	  if (DECL_ORIGINAL_TYPE (t) == TREE_TYPE (t))
396 	    {
397 	      tree x = build_variant_type_copy (TREE_TYPE (t));
398 	      TYPE_STUB_DECL (x) = TYPE_STUB_DECL (TREE_TYPE (t));
399 	      TYPE_NAME (x) = TYPE_NAME (TREE_TYPE (t));
400 	      DECL_ORIGINAL_TYPE (t) = x;
401 	    }
402 	}
403 
404       /* Remap sizes as necessary.  */
405       walk_tree (&DECL_SIZE (t), copy_tree_body_r, id, NULL);
406       walk_tree (&DECL_SIZE_UNIT (t), copy_tree_body_r, id, NULL);
407 
408       /* If fields, do likewise for offset and qualifier.  */
409       if (TREE_CODE (t) == FIELD_DECL)
410 	{
411 	  walk_tree (&DECL_FIELD_OFFSET (t), copy_tree_body_r, id, NULL);
412 	  if (TREE_CODE (DECL_CONTEXT (t)) == QUAL_UNION_TYPE)
413 	    walk_tree (&DECL_QUALIFIER (t), copy_tree_body_r, id, NULL);
414 	}
415 
416       return t;
417     }
418 
419   if (id->do_not_unshare)
420     return *n;
421   else
422     return unshare_expr (*n);
423 }
424 
425 static tree
remap_type_1(tree type,copy_body_data * id)426 remap_type_1 (tree type, copy_body_data *id)
427 {
428   tree new_tree, t;
429 
430   /* We do need a copy.  build and register it now.  If this is a pointer or
431      reference type, remap the designated type and make a new pointer or
432      reference type.  */
433   if (TREE_CODE (type) == POINTER_TYPE)
434     {
435       new_tree = build_pointer_type_for_mode (remap_type (TREE_TYPE (type), id),
436 					 TYPE_MODE (type),
437 					 TYPE_REF_CAN_ALIAS_ALL (type));
438       if (TYPE_ATTRIBUTES (type) || TYPE_QUALS (type))
439 	new_tree = build_type_attribute_qual_variant (new_tree,
440 						      TYPE_ATTRIBUTES (type),
441 						      TYPE_QUALS (type));
442       insert_decl_map (id, type, new_tree);
443       return new_tree;
444     }
445   else if (TREE_CODE (type) == REFERENCE_TYPE)
446     {
447       new_tree = build_reference_type_for_mode (remap_type (TREE_TYPE (type), id),
448 					    TYPE_MODE (type),
449 					    TYPE_REF_CAN_ALIAS_ALL (type));
450       if (TYPE_ATTRIBUTES (type) || TYPE_QUALS (type))
451 	new_tree = build_type_attribute_qual_variant (new_tree,
452 						      TYPE_ATTRIBUTES (type),
453 						      TYPE_QUALS (type));
454       insert_decl_map (id, type, new_tree);
455       return new_tree;
456     }
457   else
458     new_tree = copy_node (type);
459 
460   insert_decl_map (id, type, new_tree);
461 
462   /* This is a new type, not a copy of an old type.  Need to reassociate
463      variants.  We can handle everything except the main variant lazily.  */
464   t = TYPE_MAIN_VARIANT (type);
465   if (type != t)
466     {
467       t = remap_type (t, id);
468       TYPE_MAIN_VARIANT (new_tree) = t;
469       TYPE_NEXT_VARIANT (new_tree) = TYPE_NEXT_VARIANT (t);
470       TYPE_NEXT_VARIANT (t) = new_tree;
471     }
472   else
473     {
474       TYPE_MAIN_VARIANT (new_tree) = new_tree;
475       TYPE_NEXT_VARIANT (new_tree) = NULL;
476     }
477 
478   if (TYPE_STUB_DECL (type))
479     TYPE_STUB_DECL (new_tree) = remap_decl (TYPE_STUB_DECL (type), id);
480 
481   /* Lazily create pointer and reference types.  */
482   TYPE_POINTER_TO (new_tree) = NULL;
483   TYPE_REFERENCE_TO (new_tree) = NULL;
484 
485   /* Copy all types that may contain references to local variables; be sure to
486      preserve sharing in between type and its main variant when possible.  */
487   switch (TREE_CODE (new_tree))
488     {
489     case INTEGER_TYPE:
490     case REAL_TYPE:
491     case FIXED_POINT_TYPE:
492     case ENUMERAL_TYPE:
493     case BOOLEAN_TYPE:
494       if (TYPE_MAIN_VARIANT (new_tree) != new_tree)
495 	{
496 	  gcc_checking_assert (TYPE_MIN_VALUE (type) == TYPE_MIN_VALUE (TYPE_MAIN_VARIANT (type)));
497 	  gcc_checking_assert (TYPE_MAX_VALUE (type) == TYPE_MAX_VALUE (TYPE_MAIN_VARIANT (type)));
498 
499 	  TYPE_MIN_VALUE (new_tree) = TYPE_MIN_VALUE (TYPE_MAIN_VARIANT (new_tree));
500 	  TYPE_MAX_VALUE (new_tree) = TYPE_MAX_VALUE (TYPE_MAIN_VARIANT (new_tree));
501 	}
502       else
503 	{
504 	  t = TYPE_MIN_VALUE (new_tree);
505 	  if (t && TREE_CODE (t) != INTEGER_CST)
506 	    walk_tree (&TYPE_MIN_VALUE (new_tree), copy_tree_body_r, id, NULL);
507 
508 	  t = TYPE_MAX_VALUE (new_tree);
509 	  if (t && TREE_CODE (t) != INTEGER_CST)
510 	    walk_tree (&TYPE_MAX_VALUE (new_tree), copy_tree_body_r, id, NULL);
511 	}
512       return new_tree;
513 
514     case FUNCTION_TYPE:
515       if (TYPE_MAIN_VARIANT (new_tree) != new_tree
516 	  && TREE_TYPE (type) == TREE_TYPE (TYPE_MAIN_VARIANT (type)))
517 	TREE_TYPE (new_tree) = TREE_TYPE (TYPE_MAIN_VARIANT (new_tree));
518       else
519         TREE_TYPE (new_tree) = remap_type (TREE_TYPE (new_tree), id);
520       if (TYPE_MAIN_VARIANT (new_tree) != new_tree
521 	  && TYPE_ARG_TYPES (type) == TYPE_ARG_TYPES (TYPE_MAIN_VARIANT (type)))
522 	TYPE_ARG_TYPES (new_tree) = TYPE_ARG_TYPES (TYPE_MAIN_VARIANT (new_tree));
523       else
524         walk_tree (&TYPE_ARG_TYPES (new_tree), copy_tree_body_r, id, NULL);
525       return new_tree;
526 
527     case ARRAY_TYPE:
528       if (TYPE_MAIN_VARIANT (new_tree) != new_tree
529 	  && TREE_TYPE (type) == TREE_TYPE (TYPE_MAIN_VARIANT (type)))
530 	TREE_TYPE (new_tree) = TREE_TYPE (TYPE_MAIN_VARIANT (new_tree));
531       else
532 	TREE_TYPE (new_tree) = remap_type (TREE_TYPE (new_tree), id);
533 
534       if (TYPE_MAIN_VARIANT (new_tree) != new_tree)
535 	{
536 	  gcc_checking_assert (TYPE_DOMAIN (type)
537 			       == TYPE_DOMAIN (TYPE_MAIN_VARIANT (type)));
538 	  TYPE_DOMAIN (new_tree) = TYPE_DOMAIN (TYPE_MAIN_VARIANT (new_tree));
539 	}
540       else
541         {
542 	  TYPE_DOMAIN (new_tree) = remap_type (TYPE_DOMAIN (new_tree), id);
543 	  /* For array bounds where we have decided not to copy over the bounds
544 	     variable which isn't used in OpenMP/OpenACC region, change them to
545 	     an uninitialized VAR_DECL temporary.  */
546 	  if (id->adjust_array_error_bounds
547 	      && TYPE_DOMAIN (new_tree)
548 	      && TYPE_MAX_VALUE (TYPE_DOMAIN (new_tree)) == error_mark_node
549 	      && TYPE_MAX_VALUE (TYPE_DOMAIN (type)) != error_mark_node)
550 	    {
551 	      tree v = create_tmp_var (TREE_TYPE (TYPE_DOMAIN (new_tree)));
552 	      DECL_ATTRIBUTES (v)
553 		= tree_cons (get_identifier ("omp dummy var"), NULL_TREE,
554 			     DECL_ATTRIBUTES (v));
555 	      TYPE_MAX_VALUE (TYPE_DOMAIN (new_tree)) = v;
556 	    }
557         }
558       break;
559 
560     case RECORD_TYPE:
561     case UNION_TYPE:
562     case QUAL_UNION_TYPE:
563       if (TYPE_MAIN_VARIANT (type) != type
564 	  && TYPE_FIELDS (type) == TYPE_FIELDS (TYPE_MAIN_VARIANT (type)))
565 	TYPE_FIELDS (new_tree) = TYPE_FIELDS (TYPE_MAIN_VARIANT (new_tree));
566       else
567 	{
568 	  tree f, nf = NULL;
569 
570 	  for (f = TYPE_FIELDS (new_tree); f ; f = DECL_CHAIN (f))
571 	    {
572 	      t = remap_decl (f, id);
573 	      DECL_CONTEXT (t) = new_tree;
574 	      DECL_CHAIN (t) = nf;
575 	      nf = t;
576 	    }
577 	  TYPE_FIELDS (new_tree) = nreverse (nf);
578 	}
579       break;
580 
581     case OFFSET_TYPE:
582     default:
583       /* Shouldn't have been thought variable sized.  */
584       gcc_unreachable ();
585     }
586 
587   /* All variants of type share the same size, so use the already remaped data.  */
588   if (TYPE_MAIN_VARIANT (new_tree) != new_tree)
589     {
590       tree s = TYPE_SIZE (type);
591       tree mvs = TYPE_SIZE (TYPE_MAIN_VARIANT (type));
592       tree su = TYPE_SIZE_UNIT (type);
593       tree mvsu = TYPE_SIZE_UNIT (TYPE_MAIN_VARIANT (type));
594       gcc_checking_assert ((TREE_CODE (s) == PLACEHOLDER_EXPR
595 			    && (TREE_CODE (mvs) == PLACEHOLDER_EXPR))
596 			   || s == mvs);
597       gcc_checking_assert ((TREE_CODE (su) == PLACEHOLDER_EXPR
598 			    && (TREE_CODE (mvsu) == PLACEHOLDER_EXPR))
599 			   || su == mvsu);
600       TYPE_SIZE (new_tree) = TYPE_SIZE (TYPE_MAIN_VARIANT (new_tree));
601       TYPE_SIZE_UNIT (new_tree) = TYPE_SIZE_UNIT (TYPE_MAIN_VARIANT (new_tree));
602     }
603   else
604     {
605       walk_tree (&TYPE_SIZE (new_tree), copy_tree_body_r, id, NULL);
606       walk_tree (&TYPE_SIZE_UNIT (new_tree), copy_tree_body_r, id, NULL);
607     }
608 
609   return new_tree;
610 }
611 
612 /* Helper function for remap_type_2, called through walk_tree.  */
613 
614 static tree
remap_type_3(tree * tp,int * walk_subtrees,void * data)615 remap_type_3 (tree *tp, int *walk_subtrees, void *data)
616 {
617   copy_body_data *id = (copy_body_data *) data;
618 
619   if (TYPE_P (*tp))
620     *walk_subtrees = 0;
621 
622   else if (DECL_P (*tp) && remap_decl (*tp, id) != *tp)
623     return *tp;
624 
625   return NULL_TREE;
626 }
627 
628 /* Return true if TYPE needs to be remapped because remap_decl on any
629    needed embedded decl returns something other than that decl.  */
630 
631 static bool
remap_type_2(tree type,copy_body_data * id)632 remap_type_2 (tree type, copy_body_data *id)
633 {
634   tree t;
635 
636 #define RETURN_TRUE_IF_VAR(T) \
637   do								\
638     {								\
639       tree _t = (T);						\
640       if (_t)							\
641 	{							\
642 	  if (DECL_P (_t) && remap_decl (_t, id) != _t)		\
643 	    return true;					\
644 	  if (!TYPE_SIZES_GIMPLIFIED (type)			\
645 	      && walk_tree (&_t, remap_type_3, id, NULL))	\
646 	    return true;					\
647 	}							\
648     }								\
649   while (0)
650 
651   switch (TREE_CODE (type))
652     {
653     case POINTER_TYPE:
654     case REFERENCE_TYPE:
655     case FUNCTION_TYPE:
656     case METHOD_TYPE:
657       return remap_type_2 (TREE_TYPE (type), id);
658 
659     case INTEGER_TYPE:
660     case REAL_TYPE:
661     case FIXED_POINT_TYPE:
662     case ENUMERAL_TYPE:
663     case BOOLEAN_TYPE:
664       RETURN_TRUE_IF_VAR (TYPE_MIN_VALUE (type));
665       RETURN_TRUE_IF_VAR (TYPE_MAX_VALUE (type));
666       return false;
667 
668     case ARRAY_TYPE:
669       if (remap_type_2 (TREE_TYPE (type), id)
670 	  || (TYPE_DOMAIN (type) && remap_type_2 (TYPE_DOMAIN (type), id)))
671 	return true;
672       break;
673 
674     case RECORD_TYPE:
675     case UNION_TYPE:
676     case QUAL_UNION_TYPE:
677       for (t = TYPE_FIELDS (type); t; t = DECL_CHAIN (t))
678 	if (TREE_CODE (t) == FIELD_DECL)
679 	  {
680 	    RETURN_TRUE_IF_VAR (DECL_FIELD_OFFSET (t));
681 	    RETURN_TRUE_IF_VAR (DECL_SIZE (t));
682 	    RETURN_TRUE_IF_VAR (DECL_SIZE_UNIT (t));
683 	    if (TREE_CODE (type) == QUAL_UNION_TYPE)
684 	      RETURN_TRUE_IF_VAR (DECL_QUALIFIER (t));
685 	  }
686       break;
687 
688     default:
689       return false;
690     }
691 
692   RETURN_TRUE_IF_VAR (TYPE_SIZE (type));
693   RETURN_TRUE_IF_VAR (TYPE_SIZE_UNIT (type));
694   return false;
695 #undef RETURN_TRUE_IF_VAR
696 }
697 
698 tree
remap_type(tree type,copy_body_data * id)699 remap_type (tree type, copy_body_data *id)
700 {
701   tree *node;
702   tree tmp;
703 
704   if (type == NULL)
705     return type;
706 
707   /* See if we have remapped this type.  */
708   node = id->decl_map->get (type);
709   if (node)
710     return *node;
711 
712   /* The type only needs remapping if it's variably modified.  */
713   if (! variably_modified_type_p (type, id->src_fn)
714       /* Don't remap if copy_decl method doesn't always return a new
715 	 decl and for all embedded decls returns the passed in decl.  */
716       || (id->dont_remap_vla_if_no_change && !remap_type_2 (type, id)))
717     {
718       insert_decl_map (id, type, type);
719       return type;
720     }
721 
722   id->remapping_type_depth++;
723   tmp = remap_type_1 (type, id);
724   id->remapping_type_depth--;
725 
726   return tmp;
727 }
728 
729 /* Decide if DECL can be put into BLOCK_NONLOCAL_VARs.  */
730 
731 static bool
can_be_nonlocal(tree decl,copy_body_data * id)732 can_be_nonlocal (tree decl, copy_body_data *id)
733 {
734   /* We cannot duplicate function decls.  */
735   if (TREE_CODE (decl) == FUNCTION_DECL)
736     return true;
737 
738   /* Local static vars must be non-local or we get multiple declaration
739      problems.  */
740   if (VAR_P (decl) && !auto_var_in_fn_p (decl, id->src_fn))
741     return true;
742 
743   return false;
744 }
745 
746 static tree
remap_decls(tree decls,vec<tree,va_gc> ** nonlocalized_list,copy_body_data * id)747 remap_decls (tree decls, vec<tree, va_gc> **nonlocalized_list,
748 	     copy_body_data *id)
749 {
750   tree old_var;
751   tree new_decls = NULL_TREE;
752 
753   /* Remap its variables.  */
754   for (old_var = decls; old_var; old_var = DECL_CHAIN (old_var))
755     {
756       tree new_var;
757 
758       if (can_be_nonlocal (old_var, id))
759 	{
760 	  /* We need to add this variable to the local decls as otherwise
761 	     nothing else will do so.  */
762 	  if (VAR_P (old_var) && ! DECL_EXTERNAL (old_var) && cfun)
763 	    add_local_decl (cfun, old_var);
764 	  if ((!optimize || debug_info_level > DINFO_LEVEL_TERSE)
765 	      && !DECL_IGNORED_P (old_var)
766 	      && nonlocalized_list)
767 	    vec_safe_push (*nonlocalized_list, old_var);
768 	  continue;
769 	}
770 
771       /* Remap the variable.  */
772       new_var = remap_decl (old_var, id);
773 
774       /* If we didn't remap this variable, we can't mess with its
775 	 TREE_CHAIN.  If we remapped this variable to the return slot, it's
776 	 already declared somewhere else, so don't declare it here.  */
777 
778       if (new_var == id->retvar)
779 	;
780       else if (!new_var)
781         {
782 	  if ((!optimize || debug_info_level > DINFO_LEVEL_TERSE)
783 	      && !DECL_IGNORED_P (old_var)
784 	      && nonlocalized_list)
785 	    vec_safe_push (*nonlocalized_list, old_var);
786 	}
787       else
788 	{
789 	  gcc_assert (DECL_P (new_var));
790 	  DECL_CHAIN (new_var) = new_decls;
791 	  new_decls = new_var;
792 
793 	  /* Also copy value-expressions.  */
794 	  if (VAR_P (new_var) && DECL_HAS_VALUE_EXPR_P (new_var))
795 	    {
796 	      tree tem = DECL_VALUE_EXPR (new_var);
797 	      bool old_regimplify = id->regimplify;
798 	      id->remapping_type_depth++;
799 	      walk_tree (&tem, copy_tree_body_r, id, NULL);
800 	      id->remapping_type_depth--;
801 	      id->regimplify = old_regimplify;
802 	      SET_DECL_VALUE_EXPR (new_var, tem);
803 	    }
804 	}
805     }
806 
807   return nreverse (new_decls);
808 }
809 
810 /* Copy the BLOCK to contain remapped versions of the variables
811    therein.  And hook the new block into the block-tree.  */
812 
813 static void
remap_block(tree * block,copy_body_data * id)814 remap_block (tree *block, copy_body_data *id)
815 {
816   tree old_block;
817   tree new_block;
818 
819   /* Make the new block.  */
820   old_block = *block;
821   new_block = make_node (BLOCK);
822   TREE_USED (new_block) = TREE_USED (old_block);
823   BLOCK_ABSTRACT_ORIGIN (new_block) = BLOCK_ORIGIN (old_block);
824   BLOCK_SOURCE_LOCATION (new_block) = BLOCK_SOURCE_LOCATION (old_block);
825   BLOCK_NONLOCALIZED_VARS (new_block)
826     = vec_safe_copy (BLOCK_NONLOCALIZED_VARS (old_block));
827   *block = new_block;
828 
829   /* Remap its variables.  */
830   BLOCK_VARS (new_block) = remap_decls (BLOCK_VARS (old_block),
831   					&BLOCK_NONLOCALIZED_VARS (new_block),
832 					id);
833 
834   if (id->transform_lang_insert_block)
835     id->transform_lang_insert_block (new_block);
836 
837   /* Remember the remapped block.  */
838   insert_decl_map (id, old_block, new_block);
839 }
840 
841 /* Copy the whole block tree and root it in id->block.  */
842 
843 static tree
remap_blocks(tree block,copy_body_data * id)844 remap_blocks (tree block, copy_body_data *id)
845 {
846   tree t;
847   tree new_tree = block;
848 
849   if (!block)
850     return NULL;
851 
852   remap_block (&new_tree, id);
853   gcc_assert (new_tree != block);
854   for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
855     prepend_lexical_block (new_tree, remap_blocks (t, id));
856   /* Blocks are in arbitrary order, but make things slightly prettier and do
857      not swap order when producing a copy.  */
858   BLOCK_SUBBLOCKS (new_tree) = blocks_nreverse (BLOCK_SUBBLOCKS (new_tree));
859   return new_tree;
860 }
861 
862 /* Remap the block tree rooted at BLOCK to nothing.  */
863 
864 static void
remap_blocks_to_null(tree block,copy_body_data * id)865 remap_blocks_to_null (tree block, copy_body_data *id)
866 {
867   tree t;
868   insert_decl_map (id, block, NULL_TREE);
869   for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
870     remap_blocks_to_null (t, id);
871 }
872 
873 /* Remap the location info pointed to by LOCUS.  */
874 
875 static location_t
remap_location(location_t locus,copy_body_data * id)876 remap_location (location_t locus, copy_body_data *id)
877 {
878   if (LOCATION_BLOCK (locus))
879     {
880       tree *n = id->decl_map->get (LOCATION_BLOCK (locus));
881       gcc_assert (n);
882       if (*n)
883 	return set_block (locus, *n);
884     }
885 
886   locus = LOCATION_LOCUS (locus);
887 
888   if (locus != UNKNOWN_LOCATION && id->block)
889     return set_block (locus, id->block);
890 
891   return locus;
892 }
893 
894 static void
copy_statement_list(tree * tp)895 copy_statement_list (tree *tp)
896 {
897   tree_stmt_iterator oi, ni;
898   tree new_tree;
899 
900   new_tree = alloc_stmt_list ();
901   ni = tsi_start (new_tree);
902   oi = tsi_start (*tp);
903   TREE_TYPE (new_tree) = TREE_TYPE (*tp);
904   *tp = new_tree;
905 
906   for (; !tsi_end_p (oi); tsi_next (&oi))
907     {
908       tree stmt = tsi_stmt (oi);
909       if (TREE_CODE (stmt) == STATEMENT_LIST)
910 	/* This copy is not redundant; tsi_link_after will smash this
911 	   STATEMENT_LIST into the end of the one we're building, and we
912 	   don't want to do that with the original.  */
913 	copy_statement_list (&stmt);
914       tsi_link_after (&ni, stmt, TSI_CONTINUE_LINKING);
915     }
916 }
917 
918 static void
copy_bind_expr(tree * tp,int * walk_subtrees,copy_body_data * id)919 copy_bind_expr (tree *tp, int *walk_subtrees, copy_body_data *id)
920 {
921   tree block = BIND_EXPR_BLOCK (*tp);
922   /* Copy (and replace) the statement.  */
923   copy_tree_r (tp, walk_subtrees, NULL);
924   if (block)
925     {
926       remap_block (&block, id);
927       BIND_EXPR_BLOCK (*tp) = block;
928     }
929 
930   if (BIND_EXPR_VARS (*tp))
931     /* This will remap a lot of the same decls again, but this should be
932        harmless.  */
933     BIND_EXPR_VARS (*tp) = remap_decls (BIND_EXPR_VARS (*tp), NULL, id);
934 }
935 
936 
937 /* Create a new gimple_seq by remapping all the statements in BODY
938    using the inlining information in ID.  */
939 
940 static gimple_seq
remap_gimple_seq(gimple_seq body,copy_body_data * id)941 remap_gimple_seq (gimple_seq body, copy_body_data *id)
942 {
943   gimple_stmt_iterator si;
944   gimple_seq new_body = NULL;
945 
946   for (si = gsi_start (body); !gsi_end_p (si); gsi_next (&si))
947     {
948       gimple_seq new_stmts = remap_gimple_stmt (gsi_stmt (si), id);
949       gimple_seq_add_seq (&new_body, new_stmts);
950     }
951 
952   return new_body;
953 }
954 
955 
956 /* Copy a GIMPLE_BIND statement STMT, remapping all the symbols in its
957    block using the mapping information in ID.  */
958 
959 static gimple *
copy_gimple_bind(gbind * stmt,copy_body_data * id)960 copy_gimple_bind (gbind *stmt, copy_body_data *id)
961 {
962   gimple *new_bind;
963   tree new_block, new_vars;
964   gimple_seq body, new_body;
965 
966   /* Copy the statement.  Note that we purposely don't use copy_stmt
967      here because we need to remap statements as we copy.  */
968   body = gimple_bind_body (stmt);
969   new_body = remap_gimple_seq (body, id);
970 
971   new_block = gimple_bind_block (stmt);
972   if (new_block)
973     remap_block (&new_block, id);
974 
975   /* This will remap a lot of the same decls again, but this should be
976      harmless.  */
977   new_vars = gimple_bind_vars (stmt);
978   if (new_vars)
979     new_vars = remap_decls (new_vars, NULL, id);
980 
981   new_bind = gimple_build_bind (new_vars, new_body, new_block);
982 
983   return new_bind;
984 }
985 
986 /* Return true if DECL is a parameter or a SSA_NAME for a parameter.  */
987 
988 static bool
is_parm(tree decl)989 is_parm (tree decl)
990 {
991   if (TREE_CODE (decl) == SSA_NAME)
992     {
993       decl = SSA_NAME_VAR (decl);
994       if (!decl)
995 	return false;
996     }
997 
998   return (TREE_CODE (decl) == PARM_DECL);
999 }
1000 
1001 /* Remap the dependence CLIQUE from the source to the destination function
1002    as specified in ID.  */
1003 
1004 static unsigned short
remap_dependence_clique(copy_body_data * id,unsigned short clique)1005 remap_dependence_clique (copy_body_data *id, unsigned short clique)
1006 {
1007   if (clique == 0 || processing_debug_stmt)
1008     return 0;
1009   if (!id->dependence_map)
1010     id->dependence_map = new hash_map<dependence_hash, unsigned short>;
1011   bool existed;
1012   unsigned short &newc = id->dependence_map->get_or_insert (clique, &existed);
1013   if (!existed)
1014     {
1015       /* Clique 1 is reserved for local ones set by PTA.  */
1016       if (cfun->last_clique == 0)
1017 	cfun->last_clique = 1;
1018       newc = ++cfun->last_clique;
1019     }
1020   return newc;
1021 }
1022 
1023 /* Remap the GIMPLE operand pointed to by *TP.  DATA is really a
1024    'struct walk_stmt_info *'.  DATA->INFO is a 'copy_body_data *'.
1025    WALK_SUBTREES is used to indicate walk_gimple_op whether to keep
1026    recursing into the children nodes of *TP.  */
1027 
1028 static tree
remap_gimple_op_r(tree * tp,int * walk_subtrees,void * data)1029 remap_gimple_op_r (tree *tp, int *walk_subtrees, void *data)
1030 {
1031   struct walk_stmt_info *wi_p = (struct walk_stmt_info *) data;
1032   copy_body_data *id = (copy_body_data *) wi_p->info;
1033   tree fn = id->src_fn;
1034 
1035   /* For recursive invocations this is no longer the LHS itself.  */
1036   bool is_lhs = wi_p->is_lhs;
1037   wi_p->is_lhs = false;
1038 
1039   if (TREE_CODE (*tp) == SSA_NAME)
1040     {
1041       *tp = remap_ssa_name (*tp, id);
1042       *walk_subtrees = 0;
1043       if (is_lhs)
1044 	SSA_NAME_DEF_STMT (*tp) = wi_p->stmt;
1045       return NULL;
1046     }
1047   else if (auto_var_in_fn_p (*tp, fn))
1048     {
1049       /* Local variables and labels need to be replaced by equivalent
1050 	 variables.  We don't want to copy static variables; there's
1051 	 only one of those, no matter how many times we inline the
1052 	 containing function.  Similarly for globals from an outer
1053 	 function.  */
1054       tree new_decl;
1055 
1056       /* Remap the declaration.  */
1057       new_decl = remap_decl (*tp, id);
1058       gcc_assert (new_decl);
1059       /* Replace this variable with the copy.  */
1060       STRIP_TYPE_NOPS (new_decl);
1061       /* ???  The C++ frontend uses void * pointer zero to initialize
1062          any other type.  This confuses the middle-end type verification.
1063 	 As cloned bodies do not go through gimplification again the fixup
1064 	 there doesn't trigger.  */
1065       if (TREE_CODE (new_decl) == INTEGER_CST
1066 	  && !useless_type_conversion_p (TREE_TYPE (*tp), TREE_TYPE (new_decl)))
1067 	new_decl = fold_convert (TREE_TYPE (*tp), new_decl);
1068       *tp = new_decl;
1069       *walk_subtrees = 0;
1070     }
1071   else if (TREE_CODE (*tp) == STATEMENT_LIST)
1072     gcc_unreachable ();
1073   else if (TREE_CODE (*tp) == SAVE_EXPR)
1074     gcc_unreachable ();
1075   else if (TREE_CODE (*tp) == LABEL_DECL
1076 	   && (!DECL_CONTEXT (*tp)
1077 	       || decl_function_context (*tp) == id->src_fn))
1078     /* These may need to be remapped for EH handling.  */
1079     *tp = remap_decl (*tp, id);
1080   else if (TREE_CODE (*tp) == FIELD_DECL)
1081     {
1082       /* If the enclosing record type is variably_modified_type_p, the field
1083 	 has already been remapped.  Otherwise, it need not be.  */
1084       tree *n = id->decl_map->get (*tp);
1085       if (n)
1086 	*tp = *n;
1087       *walk_subtrees = 0;
1088     }
1089   else if (TYPE_P (*tp))
1090     /* Types may need remapping as well.  */
1091     *tp = remap_type (*tp, id);
1092   else if (CONSTANT_CLASS_P (*tp))
1093     {
1094       /* If this is a constant, we have to copy the node iff the type
1095 	 will be remapped.  copy_tree_r will not copy a constant.  */
1096       tree new_type = remap_type (TREE_TYPE (*tp), id);
1097 
1098       if (new_type == TREE_TYPE (*tp))
1099 	*walk_subtrees = 0;
1100 
1101       else if (TREE_CODE (*tp) == INTEGER_CST)
1102 	*tp = wide_int_to_tree (new_type, wi::to_wide (*tp));
1103       else
1104 	{
1105 	  *tp = copy_node (*tp);
1106 	  TREE_TYPE (*tp) = new_type;
1107 	}
1108     }
1109   else
1110     {
1111       /* Otherwise, just copy the node.  Note that copy_tree_r already
1112 	 knows not to copy VAR_DECLs, etc., so this is safe.  */
1113 
1114       if (TREE_CODE (*tp) == MEM_REF && !id->do_not_fold)
1115 	{
1116 	  /* We need to re-canonicalize MEM_REFs from inline substitutions
1117 	     that can happen when a pointer argument is an ADDR_EXPR.
1118 	     Recurse here manually to allow that.  */
1119 	  tree ptr = TREE_OPERAND (*tp, 0);
1120 	  tree type = remap_type (TREE_TYPE (*tp), id);
1121 	  tree old = *tp;
1122 	  walk_tree (&ptr, remap_gimple_op_r, data, NULL);
1123 	  *tp = fold_build2 (MEM_REF, type, ptr, TREE_OPERAND (*tp, 1));
1124 	  TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
1125 	  TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
1126 	  TREE_NO_WARNING (*tp) = TREE_NO_WARNING (old);
1127 	  if (MR_DEPENDENCE_CLIQUE (old) != 0)
1128 	    {
1129 	      MR_DEPENDENCE_CLIQUE (*tp)
1130 	        = remap_dependence_clique (id, MR_DEPENDENCE_CLIQUE (old));
1131 	      MR_DEPENDENCE_BASE (*tp) = MR_DEPENDENCE_BASE (old);
1132 	    }
1133 	  /* We cannot propagate the TREE_THIS_NOTRAP flag if we have
1134 	     remapped a parameter as the property might be valid only
1135 	     for the parameter itself.  */
1136 	  if (TREE_THIS_NOTRAP (old)
1137 	      && (!is_parm (TREE_OPERAND (old, 0))
1138 		  || (!id->transform_parameter && is_parm (ptr))))
1139 	    TREE_THIS_NOTRAP (*tp) = 1;
1140 	  REF_REVERSE_STORAGE_ORDER (*tp) = REF_REVERSE_STORAGE_ORDER (old);
1141 	  *walk_subtrees = 0;
1142 	  return NULL;
1143 	}
1144 
1145       /* Here is the "usual case".  Copy this tree node, and then
1146 	 tweak some special cases.  */
1147       copy_tree_r (tp, walk_subtrees, NULL);
1148 
1149       if (TREE_CODE (*tp) != OMP_CLAUSE)
1150 	TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id);
1151 
1152       if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
1153 	{
1154 	  /* The copied TARGET_EXPR has never been expanded, even if the
1155 	     original node was expanded already.  */
1156 	  TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
1157 	  TREE_OPERAND (*tp, 3) = NULL_TREE;
1158 	}
1159       else if (TREE_CODE (*tp) == ADDR_EXPR)
1160 	{
1161 	  /* Variable substitution need not be simple.  In particular,
1162 	     the MEM_REF substitution above.  Make sure that
1163 	     TREE_CONSTANT and friends are up-to-date.  */
1164 	  int invariant = is_gimple_min_invariant (*tp);
1165 	  walk_tree (&TREE_OPERAND (*tp, 0), remap_gimple_op_r, data, NULL);
1166 	  recompute_tree_invariant_for_addr_expr (*tp);
1167 
1168 	  /* If this used to be invariant, but is not any longer,
1169 	     then regimplification is probably needed.  */
1170 	  if (invariant && !is_gimple_min_invariant (*tp))
1171 	    id->regimplify = true;
1172 
1173 	  *walk_subtrees = 0;
1174 	}
1175     }
1176 
1177   /* Update the TREE_BLOCK for the cloned expr.  */
1178   if (EXPR_P (*tp))
1179     {
1180       tree new_block = id->remapping_type_depth == 0 ? id->block : NULL;
1181       tree old_block = TREE_BLOCK (*tp);
1182       if (old_block)
1183 	{
1184 	  tree *n;
1185 	  n = id->decl_map->get (TREE_BLOCK (*tp));
1186 	  if (n)
1187 	    new_block = *n;
1188 	}
1189       TREE_SET_BLOCK (*tp, new_block);
1190     }
1191 
1192   /* Keep iterating.  */
1193   return NULL_TREE;
1194 }
1195 
1196 
1197 /* Called from copy_body_id via walk_tree.  DATA is really a
1198    `copy_body_data *'.  */
1199 
1200 tree
copy_tree_body_r(tree * tp,int * walk_subtrees,void * data)1201 copy_tree_body_r (tree *tp, int *walk_subtrees, void *data)
1202 {
1203   copy_body_data *id = (copy_body_data *) data;
1204   tree fn = id->src_fn;
1205   tree new_block;
1206 
1207   /* Begin by recognizing trees that we'll completely rewrite for the
1208      inlining context.  Our output for these trees is completely
1209      different from out input (e.g. RETURN_EXPR is deleted, and morphs
1210      into an edge).  Further down, we'll handle trees that get
1211      duplicated and/or tweaked.  */
1212 
1213   /* When requested, RETURN_EXPRs should be transformed to just the
1214      contained MODIFY_EXPR.  The branch semantics of the return will
1215      be handled elsewhere by manipulating the CFG rather than a statement.  */
1216   if (TREE_CODE (*tp) == RETURN_EXPR && id->transform_return_to_modify)
1217     {
1218       tree assignment = TREE_OPERAND (*tp, 0);
1219 
1220       /* If we're returning something, just turn that into an
1221 	 assignment into the equivalent of the original RESULT_DECL.
1222 	 If the "assignment" is just the result decl, the result
1223 	 decl has already been set (e.g. a recent "foo (&result_decl,
1224 	 ...)"); just toss the entire RETURN_EXPR.  */
1225       if (assignment && TREE_CODE (assignment) == MODIFY_EXPR)
1226 	{
1227 	  /* Replace the RETURN_EXPR with (a copy of) the
1228 	     MODIFY_EXPR hanging underneath.  */
1229 	  *tp = copy_node (assignment);
1230 	}
1231       else /* Else the RETURN_EXPR returns no value.  */
1232 	{
1233 	  *tp = NULL;
1234 	  return (tree) (void *)1;
1235 	}
1236     }
1237   else if (TREE_CODE (*tp) == SSA_NAME)
1238     {
1239       *tp = remap_ssa_name (*tp, id);
1240       *walk_subtrees = 0;
1241       return NULL;
1242     }
1243 
1244   /* Local variables and labels need to be replaced by equivalent
1245      variables.  We don't want to copy static variables; there's only
1246      one of those, no matter how many times we inline the containing
1247      function.  Similarly for globals from an outer function.  */
1248   else if (auto_var_in_fn_p (*tp, fn))
1249     {
1250       tree new_decl;
1251 
1252       /* Remap the declaration.  */
1253       new_decl = remap_decl (*tp, id);
1254       gcc_assert (new_decl);
1255       /* Replace this variable with the copy.  */
1256       STRIP_TYPE_NOPS (new_decl);
1257       *tp = new_decl;
1258       *walk_subtrees = 0;
1259     }
1260   else if (TREE_CODE (*tp) == STATEMENT_LIST)
1261     copy_statement_list (tp);
1262   else if (TREE_CODE (*tp) == SAVE_EXPR
1263 	   || TREE_CODE (*tp) == TARGET_EXPR)
1264     remap_save_expr (tp, id->decl_map, walk_subtrees);
1265   else if (TREE_CODE (*tp) == LABEL_DECL
1266 	   && (! DECL_CONTEXT (*tp)
1267 	       || decl_function_context (*tp) == id->src_fn))
1268     /* These may need to be remapped for EH handling.  */
1269     *tp = remap_decl (*tp, id);
1270   else if (TREE_CODE (*tp) == BIND_EXPR)
1271     copy_bind_expr (tp, walk_subtrees, id);
1272   /* Types may need remapping as well.  */
1273   else if (TYPE_P (*tp))
1274     *tp = remap_type (*tp, id);
1275 
1276   /* If this is a constant, we have to copy the node iff the type will be
1277      remapped.  copy_tree_r will not copy a constant.  */
1278   else if (CONSTANT_CLASS_P (*tp))
1279     {
1280       tree new_type = remap_type (TREE_TYPE (*tp), id);
1281 
1282       if (new_type == TREE_TYPE (*tp))
1283 	*walk_subtrees = 0;
1284 
1285       else if (TREE_CODE (*tp) == INTEGER_CST)
1286 	*tp = wide_int_to_tree (new_type, wi::to_wide (*tp));
1287       else
1288 	{
1289 	  *tp = copy_node (*tp);
1290 	  TREE_TYPE (*tp) = new_type;
1291 	}
1292     }
1293 
1294   /* Otherwise, just copy the node.  Note that copy_tree_r already
1295      knows not to copy VAR_DECLs, etc., so this is safe.  */
1296   else
1297     {
1298       /* Here we handle trees that are not completely rewritten.
1299 	 First we detect some inlining-induced bogosities for
1300 	 discarding.  */
1301       if (TREE_CODE (*tp) == MODIFY_EXPR
1302 	  && TREE_OPERAND (*tp, 0) == TREE_OPERAND (*tp, 1)
1303 	  && (auto_var_in_fn_p (TREE_OPERAND (*tp, 0), fn)))
1304 	{
1305 	  /* Some assignments VAR = VAR; don't generate any rtl code
1306 	     and thus don't count as variable modification.  Avoid
1307 	     keeping bogosities like 0 = 0.  */
1308 	  tree decl = TREE_OPERAND (*tp, 0), value;
1309 	  tree *n;
1310 
1311 	  n = id->decl_map->get (decl);
1312 	  if (n)
1313 	    {
1314 	      value = *n;
1315 	      STRIP_TYPE_NOPS (value);
1316 	      if (TREE_CONSTANT (value) || TREE_READONLY (value))
1317 		{
1318 		  *tp = build_empty_stmt (EXPR_LOCATION (*tp));
1319 		  return copy_tree_body_r (tp, walk_subtrees, data);
1320 		}
1321 	    }
1322 	}
1323       else if (TREE_CODE (*tp) == INDIRECT_REF)
1324 	{
1325 	  /* Get rid of *& from inline substitutions that can happen when a
1326 	     pointer argument is an ADDR_EXPR.  */
1327 	  tree decl = TREE_OPERAND (*tp, 0);
1328 	  tree *n = id->decl_map->get (decl);
1329 	  if (n)
1330 	    {
1331 	      /* If we happen to get an ADDR_EXPR in n->value, strip
1332 	         it manually here as we'll eventually get ADDR_EXPRs
1333 		 which lie about their types pointed to.  In this case
1334 		 build_fold_indirect_ref wouldn't strip the INDIRECT_REF,
1335 		 but we absolutely rely on that.  As fold_indirect_ref
1336 	         does other useful transformations, try that first, though.  */
1337 	      tree type = TREE_TYPE (*tp);
1338 	      tree ptr = id->do_not_unshare ? *n : unshare_expr (*n);
1339 	      tree old = *tp;
1340 	      *tp = id->do_not_fold ? NULL : gimple_fold_indirect_ref (ptr);
1341 	      if (! *tp)
1342 	        {
1343 		  type = remap_type (type, id);
1344 		  if (TREE_CODE (ptr) == ADDR_EXPR && !id->do_not_fold)
1345 		    {
1346 		      *tp
1347 		        = fold_indirect_ref_1 (EXPR_LOCATION (ptr), type, ptr);
1348 		      /* ???  We should either assert here or build
1349 			 a VIEW_CONVERT_EXPR instead of blindly leaking
1350 			 incompatible types to our IL.  */
1351 		      if (! *tp)
1352 			*tp = TREE_OPERAND (ptr, 0);
1353 		    }
1354 	          else
1355 		    {
1356 	              *tp = build1 (INDIRECT_REF, type, ptr);
1357 		      TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
1358 		      TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
1359 		      TREE_READONLY (*tp) = TREE_READONLY (old);
1360 		      /* We cannot propagate the TREE_THIS_NOTRAP flag if we
1361 			 have remapped a parameter as the property might be
1362 			 valid only for the parameter itself.  */
1363 		      if (TREE_THIS_NOTRAP (old)
1364 			  && (!is_parm (TREE_OPERAND (old, 0))
1365 			      || (!id->transform_parameter && is_parm (ptr))))
1366 		        TREE_THIS_NOTRAP (*tp) = 1;
1367 		    }
1368 		}
1369 	      *walk_subtrees = 0;
1370 	      return NULL;
1371 	    }
1372 	}
1373       else if (TREE_CODE (*tp) == MEM_REF && !id->do_not_fold)
1374 	{
1375 	  /* We need to re-canonicalize MEM_REFs from inline substitutions
1376 	     that can happen when a pointer argument is an ADDR_EXPR.
1377 	     Recurse here manually to allow that.  */
1378 	  tree ptr = TREE_OPERAND (*tp, 0);
1379 	  tree type = remap_type (TREE_TYPE (*tp), id);
1380 	  tree old = *tp;
1381 	  walk_tree (&ptr, copy_tree_body_r, data, NULL);
1382 	  *tp = fold_build2 (MEM_REF, type, ptr, TREE_OPERAND (*tp, 1));
1383 	  TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
1384 	  TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
1385 	  TREE_NO_WARNING (*tp) = TREE_NO_WARNING (old);
1386 	  if (MR_DEPENDENCE_CLIQUE (old) != 0)
1387 	    {
1388 	      MR_DEPENDENCE_CLIQUE (*tp)
1389 		= remap_dependence_clique (id, MR_DEPENDENCE_CLIQUE (old));
1390 	      MR_DEPENDENCE_BASE (*tp) = MR_DEPENDENCE_BASE (old);
1391 	    }
1392 	  /* We cannot propagate the TREE_THIS_NOTRAP flag if we have
1393 	     remapped a parameter as the property might be valid only
1394 	     for the parameter itself.  */
1395 	  if (TREE_THIS_NOTRAP (old)
1396 	      && (!is_parm (TREE_OPERAND (old, 0))
1397 		  || (!id->transform_parameter && is_parm (ptr))))
1398 	    TREE_THIS_NOTRAP (*tp) = 1;
1399 	  REF_REVERSE_STORAGE_ORDER (*tp) = REF_REVERSE_STORAGE_ORDER (old);
1400 	  *walk_subtrees = 0;
1401 	  return NULL;
1402 	}
1403 
1404       /* Here is the "usual case".  Copy this tree node, and then
1405 	 tweak some special cases.  */
1406       copy_tree_r (tp, walk_subtrees, NULL);
1407 
1408       /* If EXPR has block defined, map it to newly constructed block.
1409          When inlining we want EXPRs without block appear in the block
1410 	 of function call if we are not remapping a type.  */
1411       if (EXPR_P (*tp))
1412 	{
1413 	  new_block = id->remapping_type_depth == 0 ? id->block : NULL;
1414 	  if (TREE_BLOCK (*tp))
1415 	    {
1416 	      tree *n;
1417 	      n = id->decl_map->get (TREE_BLOCK (*tp));
1418 	      if (n)
1419 		new_block = *n;
1420 	    }
1421 	  TREE_SET_BLOCK (*tp, new_block);
1422 	}
1423 
1424       if (TREE_CODE (*tp) != OMP_CLAUSE)
1425 	TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id);
1426 
1427       /* The copied TARGET_EXPR has never been expanded, even if the
1428 	 original node was expanded already.  */
1429       if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
1430 	{
1431 	  TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
1432 	  TREE_OPERAND (*tp, 3) = NULL_TREE;
1433 	}
1434 
1435       /* Variable substitution need not be simple.  In particular, the
1436 	 INDIRECT_REF substitution above.  Make sure that TREE_CONSTANT
1437 	 and friends are up-to-date.  */
1438       else if (TREE_CODE (*tp) == ADDR_EXPR)
1439 	{
1440 	  int invariant = is_gimple_min_invariant (*tp);
1441 	  walk_tree (&TREE_OPERAND (*tp, 0), copy_tree_body_r, id, NULL);
1442 
1443 	  /* Handle the case where we substituted an INDIRECT_REF
1444 	     into the operand of the ADDR_EXPR.  */
1445 	  if (TREE_CODE (TREE_OPERAND (*tp, 0)) == INDIRECT_REF
1446 	      && !id->do_not_fold)
1447 	    {
1448 	      tree t = TREE_OPERAND (TREE_OPERAND (*tp, 0), 0);
1449 	      if (TREE_TYPE (t) != TREE_TYPE (*tp))
1450 		t = fold_convert (remap_type (TREE_TYPE (*tp), id), t);
1451 	      *tp = t;
1452 	    }
1453 	  else
1454 	    recompute_tree_invariant_for_addr_expr (*tp);
1455 
1456 	  /* If this used to be invariant, but is not any longer,
1457 	     then regimplification is probably needed.  */
1458 	  if (invariant && !is_gimple_min_invariant (*tp))
1459 	    id->regimplify = true;
1460 
1461 	  *walk_subtrees = 0;
1462 	}
1463     }
1464 
1465   /* Keep iterating.  */
1466   return NULL_TREE;
1467 }
1468 
1469 /* Helper for remap_gimple_stmt.  Given an EH region number for the
1470    source function, map that to the duplicate EH region number in
1471    the destination function.  */
1472 
1473 static int
remap_eh_region_nr(int old_nr,copy_body_data * id)1474 remap_eh_region_nr (int old_nr, copy_body_data *id)
1475 {
1476   eh_region old_r, new_r;
1477 
1478   old_r = get_eh_region_from_number_fn (id->src_cfun, old_nr);
1479   new_r = static_cast<eh_region> (*id->eh_map->get (old_r));
1480 
1481   return new_r->index;
1482 }
1483 
1484 /* Similar, but operate on INTEGER_CSTs.  */
1485 
1486 static tree
remap_eh_region_tree_nr(tree old_t_nr,copy_body_data * id)1487 remap_eh_region_tree_nr (tree old_t_nr, copy_body_data *id)
1488 {
1489   int old_nr, new_nr;
1490 
1491   old_nr = tree_to_shwi (old_t_nr);
1492   new_nr = remap_eh_region_nr (old_nr, id);
1493 
1494   return build_int_cst (integer_type_node, new_nr);
1495 }
1496 
1497 /* Helper for copy_bb.  Remap statement STMT using the inlining
1498    information in ID.  Return the new statement copy.  */
1499 
1500 static gimple_seq
remap_gimple_stmt(gimple * stmt,copy_body_data * id)1501 remap_gimple_stmt (gimple *stmt, copy_body_data *id)
1502 {
1503   gimple *copy = NULL;
1504   struct walk_stmt_info wi;
1505   bool skip_first = false;
1506   gimple_seq stmts = NULL;
1507 
1508   if (is_gimple_debug (stmt)
1509       && (gimple_debug_nonbind_marker_p (stmt)
1510 	  ? !DECL_STRUCT_FUNCTION (id->dst_fn)->debug_nonbind_markers
1511 	  : !opt_for_fn (id->dst_fn, flag_var_tracking_assignments)))
1512     return NULL;
1513 
1514   /* Begin by recognizing trees that we'll completely rewrite for the
1515      inlining context.  Our output for these trees is completely
1516      different from our input (e.g. RETURN_EXPR is deleted and morphs
1517      into an edge).  Further down, we'll handle trees that get
1518      duplicated and/or tweaked.  */
1519 
1520   /* When requested, GIMPLE_RETURN should be transformed to just the
1521      contained GIMPLE_ASSIGN.  The branch semantics of the return will
1522      be handled elsewhere by manipulating the CFG rather than the
1523      statement.  */
1524   if (gimple_code (stmt) == GIMPLE_RETURN && id->transform_return_to_modify)
1525     {
1526       tree retval = gimple_return_retval (as_a <greturn *> (stmt));
1527 
1528       /* If we're returning something, just turn that into an
1529 	 assignment to the equivalent of the original RESULT_DECL.
1530 	 If RETVAL is just the result decl, the result decl has
1531 	 already been set (e.g. a recent "foo (&result_decl, ...)");
1532 	 just toss the entire GIMPLE_RETURN.  */
1533       if (retval
1534 	  && (TREE_CODE (retval) != RESULT_DECL
1535 	      && (TREE_CODE (retval) != SSA_NAME
1536 		  || ! SSA_NAME_VAR (retval)
1537 		  || TREE_CODE (SSA_NAME_VAR (retval)) != RESULT_DECL)))
1538         {
1539 	  copy = gimple_build_assign (id->do_not_unshare
1540 				      ? id->retvar : unshare_expr (id->retvar),
1541 				      retval);
1542 	  /* id->retvar is already substituted.  Skip it on later remapping.  */
1543 	  skip_first = true;
1544 	}
1545       else
1546 	return NULL;
1547     }
1548   else if (gimple_has_substatements (stmt))
1549     {
1550       gimple_seq s1, s2;
1551 
1552       /* When cloning bodies from the C++ front end, we will be handed bodies
1553 	 in High GIMPLE form.  Handle here all the High GIMPLE statements that
1554 	 have embedded statements.  */
1555       switch (gimple_code (stmt))
1556 	{
1557 	case GIMPLE_BIND:
1558 	  copy = copy_gimple_bind (as_a <gbind *> (stmt), id);
1559 	  break;
1560 
1561 	case GIMPLE_CATCH:
1562 	  {
1563 	    gcatch *catch_stmt = as_a <gcatch *> (stmt);
1564 	    s1 = remap_gimple_seq (gimple_catch_handler (catch_stmt), id);
1565 	    copy = gimple_build_catch (gimple_catch_types (catch_stmt), s1);
1566 	  }
1567 	  break;
1568 
1569 	case GIMPLE_EH_FILTER:
1570 	  s1 = remap_gimple_seq (gimple_eh_filter_failure (stmt), id);
1571 	  copy = gimple_build_eh_filter (gimple_eh_filter_types (stmt), s1);
1572 	  break;
1573 
1574 	case GIMPLE_TRY:
1575 	  s1 = remap_gimple_seq (gimple_try_eval (stmt), id);
1576 	  s2 = remap_gimple_seq (gimple_try_cleanup (stmt), id);
1577 	  copy = gimple_build_try (s1, s2, gimple_try_kind (stmt));
1578 	  break;
1579 
1580 	case GIMPLE_WITH_CLEANUP_EXPR:
1581 	  s1 = remap_gimple_seq (gimple_wce_cleanup (stmt), id);
1582 	  copy = gimple_build_wce (s1);
1583 	  break;
1584 
1585 	case GIMPLE_OMP_PARALLEL:
1586 	  {
1587 	    gomp_parallel *omp_par_stmt = as_a <gomp_parallel *> (stmt);
1588 	    s1 = remap_gimple_seq (gimple_omp_body (omp_par_stmt), id);
1589 	    copy = gimple_build_omp_parallel
1590 	             (s1,
1591 		      gimple_omp_parallel_clauses (omp_par_stmt),
1592 		      gimple_omp_parallel_child_fn (omp_par_stmt),
1593 		      gimple_omp_parallel_data_arg (omp_par_stmt));
1594 	  }
1595 	  break;
1596 
1597 	case GIMPLE_OMP_TASK:
1598 	  s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1599 	  copy = gimple_build_omp_task
1600 	           (s1,
1601 		    gimple_omp_task_clauses (stmt),
1602 		    gimple_omp_task_child_fn (stmt),
1603 		    gimple_omp_task_data_arg (stmt),
1604 		    gimple_omp_task_copy_fn (stmt),
1605 		    gimple_omp_task_arg_size (stmt),
1606 		    gimple_omp_task_arg_align (stmt));
1607 	  break;
1608 
1609 	case GIMPLE_OMP_FOR:
1610 	  s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1611 	  s2 = remap_gimple_seq (gimple_omp_for_pre_body (stmt), id);
1612 	  copy = gimple_build_omp_for (s1, gimple_omp_for_kind (stmt),
1613 				       gimple_omp_for_clauses (stmt),
1614 				       gimple_omp_for_collapse (stmt), s2);
1615 	  {
1616 	    size_t i;
1617 	    for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
1618 	      {
1619 		gimple_omp_for_set_index (copy, i,
1620 					  gimple_omp_for_index (stmt, i));
1621 		gimple_omp_for_set_initial (copy, i,
1622 					    gimple_omp_for_initial (stmt, i));
1623 		gimple_omp_for_set_final (copy, i,
1624 					  gimple_omp_for_final (stmt, i));
1625 		gimple_omp_for_set_incr (copy, i,
1626 					 gimple_omp_for_incr (stmt, i));
1627 		gimple_omp_for_set_cond (copy, i,
1628 					 gimple_omp_for_cond (stmt, i));
1629 	      }
1630 	  }
1631 	  break;
1632 
1633 	case GIMPLE_OMP_MASTER:
1634 	  s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1635 	  copy = gimple_build_omp_master (s1);
1636 	  break;
1637 
1638 	case GIMPLE_OMP_TASKGROUP:
1639 	  s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1640 	  copy = gimple_build_omp_taskgroup
1641 		   (s1, gimple_omp_taskgroup_clauses (stmt));
1642 	  break;
1643 
1644 	case GIMPLE_OMP_ORDERED:
1645 	  s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1646 	  copy = gimple_build_omp_ordered
1647 		   (s1,
1648 		    gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt)));
1649 	  break;
1650 
1651 	case GIMPLE_OMP_SECTION:
1652 	  s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1653 	  copy = gimple_build_omp_section (s1);
1654 	  break;
1655 
1656 	case GIMPLE_OMP_SECTIONS:
1657 	  s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1658 	  copy = gimple_build_omp_sections
1659 	           (s1, gimple_omp_sections_clauses (stmt));
1660 	  break;
1661 
1662 	case GIMPLE_OMP_SINGLE:
1663 	  s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1664 	  copy = gimple_build_omp_single
1665 	           (s1, gimple_omp_single_clauses (stmt));
1666 	  break;
1667 
1668 	case GIMPLE_OMP_TARGET:
1669 	  s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1670 	  copy = gimple_build_omp_target
1671 		   (s1, gimple_omp_target_kind (stmt),
1672 		    gimple_omp_target_clauses (stmt));
1673 	  break;
1674 
1675 	case GIMPLE_OMP_TEAMS:
1676 	  s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1677 	  copy = gimple_build_omp_teams
1678 		   (s1, gimple_omp_teams_clauses (stmt));
1679 	  break;
1680 
1681 	case GIMPLE_OMP_CRITICAL:
1682 	  s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1683 	  copy = gimple_build_omp_critical (s1,
1684 					    gimple_omp_critical_name
1685 					      (as_a <gomp_critical *> (stmt)),
1686 					    gimple_omp_critical_clauses
1687 					      (as_a <gomp_critical *> (stmt)));
1688 	  break;
1689 
1690 	case GIMPLE_TRANSACTION:
1691 	  {
1692 	    gtransaction *old_trans_stmt = as_a <gtransaction *> (stmt);
1693 	    gtransaction *new_trans_stmt;
1694 	    s1 = remap_gimple_seq (gimple_transaction_body (old_trans_stmt),
1695 				   id);
1696 	    copy = new_trans_stmt = gimple_build_transaction (s1);
1697 	    gimple_transaction_set_subcode (new_trans_stmt,
1698 	      gimple_transaction_subcode (old_trans_stmt));
1699 	    gimple_transaction_set_label_norm (new_trans_stmt,
1700 	      gimple_transaction_label_norm (old_trans_stmt));
1701 	    gimple_transaction_set_label_uninst (new_trans_stmt,
1702 	      gimple_transaction_label_uninst (old_trans_stmt));
1703 	    gimple_transaction_set_label_over (new_trans_stmt,
1704 	      gimple_transaction_label_over (old_trans_stmt));
1705 	  }
1706 	  break;
1707 
1708 	default:
1709 	  gcc_unreachable ();
1710 	}
1711     }
1712   else
1713     {
1714       if (gimple_assign_copy_p (stmt)
1715 	  && gimple_assign_lhs (stmt) == gimple_assign_rhs1 (stmt)
1716 	  && auto_var_in_fn_p (gimple_assign_lhs (stmt), id->src_fn))
1717 	{
1718 	  /* Here we handle statements that are not completely rewritten.
1719 	     First we detect some inlining-induced bogosities for
1720 	     discarding.  */
1721 
1722 	  /* Some assignments VAR = VAR; don't generate any rtl code
1723 	     and thus don't count as variable modification.  Avoid
1724 	     keeping bogosities like 0 = 0.  */
1725 	  tree decl = gimple_assign_lhs (stmt), value;
1726 	  tree *n;
1727 
1728 	  n = id->decl_map->get (decl);
1729 	  if (n)
1730 	    {
1731 	      value = *n;
1732 	      STRIP_TYPE_NOPS (value);
1733 	      if (TREE_CONSTANT (value) || TREE_READONLY (value))
1734 		return NULL;
1735 	    }
1736 	}
1737 
1738       /* For *ptr_N ={v} {CLOBBER}, if ptr_N is SSA_NAME defined
1739 	 in a block that we aren't copying during tree_function_versioning,
1740 	 just drop the clobber stmt.  */
1741       if (id->blocks_to_copy && gimple_clobber_p (stmt))
1742 	{
1743 	  tree lhs = gimple_assign_lhs (stmt);
1744 	  if (TREE_CODE (lhs) == MEM_REF
1745 	      && TREE_CODE (TREE_OPERAND (lhs, 0)) == SSA_NAME)
1746 	    {
1747 	      gimple *def_stmt = SSA_NAME_DEF_STMT (TREE_OPERAND (lhs, 0));
1748 	      if (gimple_bb (def_stmt)
1749 		  && !bitmap_bit_p (id->blocks_to_copy,
1750 				    gimple_bb (def_stmt)->index))
1751 		return NULL;
1752 	    }
1753 	}
1754 
1755       if (gimple_debug_bind_p (stmt))
1756 	{
1757 	  gdebug *copy
1758 	    = gimple_build_debug_bind (gimple_debug_bind_get_var (stmt),
1759 				       gimple_debug_bind_get_value (stmt),
1760 				       stmt);
1761 	  if (id->reset_location)
1762 	    gimple_set_location (copy, input_location);
1763 	  id->debug_stmts.safe_push (copy);
1764 	  gimple_seq_add_stmt (&stmts, copy);
1765 	  return stmts;
1766 	}
1767       if (gimple_debug_source_bind_p (stmt))
1768 	{
1769 	  gdebug *copy = gimple_build_debug_source_bind
1770 	                   (gimple_debug_source_bind_get_var (stmt),
1771 			    gimple_debug_source_bind_get_value (stmt),
1772 			    stmt);
1773 	  if (id->reset_location)
1774 	    gimple_set_location (copy, input_location);
1775 	  id->debug_stmts.safe_push (copy);
1776 	  gimple_seq_add_stmt (&stmts, copy);
1777 	  return stmts;
1778 	}
1779       if (gimple_debug_nonbind_marker_p (stmt))
1780 	{
1781 	  /* If the inlined function has too many debug markers,
1782 	     don't copy them.  */
1783 	  if (id->src_cfun->debug_marker_count
1784 	      > PARAM_VALUE (PARAM_MAX_DEBUG_MARKER_COUNT))
1785 	    return stmts;
1786 
1787 	  gdebug *copy = as_a <gdebug *> (gimple_copy (stmt));
1788 	  if (id->reset_location)
1789 	    gimple_set_location (copy, input_location);
1790 	  id->debug_stmts.safe_push (copy);
1791 	  gimple_seq_add_stmt (&stmts, copy);
1792 	  return stmts;
1793 	}
1794 
1795       /* Create a new deep copy of the statement.  */
1796       copy = gimple_copy (stmt);
1797 
1798       /* Clear flags that need revisiting.  */
1799       if (gcall *call_stmt = dyn_cast <gcall *> (copy))
1800         {
1801 	  if (gimple_call_tail_p (call_stmt))
1802 	    gimple_call_set_tail (call_stmt, false);
1803 	  if (gimple_call_from_thunk_p (call_stmt))
1804 	    gimple_call_set_from_thunk (call_stmt, false);
1805 	  if (gimple_call_internal_p (call_stmt))
1806 	    switch (gimple_call_internal_fn (call_stmt))
1807 	      {
1808 	      case IFN_GOMP_SIMD_LANE:
1809 	      case IFN_GOMP_SIMD_VF:
1810 	      case IFN_GOMP_SIMD_LAST_LANE:
1811 	      case IFN_GOMP_SIMD_ORDERED_START:
1812 	      case IFN_GOMP_SIMD_ORDERED_END:
1813 		DECL_STRUCT_FUNCTION (id->dst_fn)->has_simduid_loops = true;
1814 	        break;
1815 	      default:
1816 		break;
1817 	      }
1818 	}
1819 
1820       /* Remap the region numbers for __builtin_eh_{pointer,filter},
1821 	 RESX and EH_DISPATCH.  */
1822       if (id->eh_map)
1823 	switch (gimple_code (copy))
1824 	  {
1825 	  case GIMPLE_CALL:
1826 	    {
1827 	      tree r, fndecl = gimple_call_fndecl (copy);
1828 	      if (fndecl && fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
1829 		switch (DECL_FUNCTION_CODE (fndecl))
1830 		  {
1831 		  case BUILT_IN_EH_COPY_VALUES:
1832 		    r = gimple_call_arg (copy, 1);
1833 		    r = remap_eh_region_tree_nr (r, id);
1834 		    gimple_call_set_arg (copy, 1, r);
1835 		    /* FALLTHRU */
1836 
1837 		  case BUILT_IN_EH_POINTER:
1838 		  case BUILT_IN_EH_FILTER:
1839 		    r = gimple_call_arg (copy, 0);
1840 		    r = remap_eh_region_tree_nr (r, id);
1841 		    gimple_call_set_arg (copy, 0, r);
1842 		    break;
1843 
1844 		  default:
1845 		    break;
1846 		  }
1847 
1848 	      /* Reset alias info if we didn't apply measures to
1849 		 keep it valid over inlining by setting DECL_PT_UID.  */
1850 	      if (!id->src_cfun->gimple_df
1851 		  || !id->src_cfun->gimple_df->ipa_pta)
1852 		gimple_call_reset_alias_info (as_a <gcall *> (copy));
1853 	    }
1854 	    break;
1855 
1856 	  case GIMPLE_RESX:
1857 	    {
1858 	      gresx *resx_stmt = as_a <gresx *> (copy);
1859 	      int r = gimple_resx_region (resx_stmt);
1860 	      r = remap_eh_region_nr (r, id);
1861 	      gimple_resx_set_region (resx_stmt, r);
1862 	    }
1863 	    break;
1864 
1865 	  case GIMPLE_EH_DISPATCH:
1866 	    {
1867 	      geh_dispatch *eh_dispatch = as_a <geh_dispatch *> (copy);
1868 	      int r = gimple_eh_dispatch_region (eh_dispatch);
1869 	      r = remap_eh_region_nr (r, id);
1870 	      gimple_eh_dispatch_set_region (eh_dispatch, r);
1871 	    }
1872 	    break;
1873 
1874 	  default:
1875 	    break;
1876 	  }
1877     }
1878 
1879   /* If STMT has a block defined, map it to the newly constructed block.  */
1880   if (tree block = gimple_block (copy))
1881     {
1882       tree *n;
1883       n = id->decl_map->get (block);
1884       gcc_assert (n);
1885       gimple_set_block (copy, *n);
1886     }
1887 
1888   if (id->reset_location)
1889     gimple_set_location (copy, input_location);
1890 
1891   /* Debug statements ought to be rebuilt and not copied.  */
1892   gcc_checking_assert (!is_gimple_debug (copy));
1893 
1894   /* Remap all the operands in COPY.  */
1895   memset (&wi, 0, sizeof (wi));
1896   wi.info = id;
1897   if (skip_first)
1898     walk_tree (gimple_op_ptr (copy, 1), remap_gimple_op_r, &wi, NULL);
1899   else
1900     walk_gimple_op (copy, remap_gimple_op_r, &wi);
1901 
1902   /* Clear the copied virtual operands.  We are not remapping them here
1903      but are going to recreate them from scratch.  */
1904   if (gimple_has_mem_ops (copy))
1905     {
1906       gimple_set_vdef (copy, NULL_TREE);
1907       gimple_set_vuse (copy, NULL_TREE);
1908     }
1909 
1910   gimple_seq_add_stmt (&stmts, copy);
1911   return stmts;
1912 }
1913 
1914 
1915 /* Copy basic block, scale profile accordingly.  Edges will be taken care of
1916    later  */
1917 
1918 static basic_block
copy_bb(copy_body_data * id,basic_block bb,profile_count num,profile_count den)1919 copy_bb (copy_body_data *id, basic_block bb,
1920          profile_count num, profile_count den)
1921 {
1922   gimple_stmt_iterator gsi, copy_gsi, seq_gsi;
1923   basic_block copy_basic_block;
1924   tree decl;
1925   basic_block prev;
1926 
1927   profile_count::adjust_for_ipa_scaling (&num, &den);
1928 
1929   /* Search for previous copied basic block.  */
1930   prev = bb->prev_bb;
1931   while (!prev->aux)
1932     prev = prev->prev_bb;
1933 
1934   /* create_basic_block() will append every new block to
1935      basic_block_info automatically.  */
1936   copy_basic_block = create_basic_block (NULL, (basic_block) prev->aux);
1937   copy_basic_block->count = bb->count.apply_scale (num, den);
1938 
1939   copy_gsi = gsi_start_bb (copy_basic_block);
1940 
1941   for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1942     {
1943       gimple_seq stmts;
1944       gimple *stmt = gsi_stmt (gsi);
1945       gimple *orig_stmt = stmt;
1946       gimple_stmt_iterator stmts_gsi;
1947       bool stmt_added = false;
1948 
1949       id->regimplify = false;
1950       stmts = remap_gimple_stmt (stmt, id);
1951 
1952       if (gimple_seq_empty_p (stmts))
1953 	continue;
1954 
1955       seq_gsi = copy_gsi;
1956 
1957       for (stmts_gsi = gsi_start (stmts);
1958 	   !gsi_end_p (stmts_gsi); )
1959 	{
1960 	  stmt = gsi_stmt (stmts_gsi);
1961 
1962 	  /* Advance iterator now before stmt is moved to seq_gsi.  */
1963 	  gsi_next (&stmts_gsi);
1964 
1965 	  if (gimple_nop_p (stmt))
1966 	      continue;
1967 
1968 	  gimple_duplicate_stmt_histograms (cfun, stmt, id->src_cfun,
1969 					    orig_stmt);
1970 
1971 	  /* With return slot optimization we can end up with
1972 	     non-gimple (foo *)&this->m, fix that here.  */
1973 	  if (is_gimple_assign (stmt)
1974 	      && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt))
1975 	      && !is_gimple_val (gimple_assign_rhs1 (stmt)))
1976 	    {
1977 	      tree new_rhs;
1978 	      new_rhs = force_gimple_operand_gsi (&seq_gsi,
1979 						  gimple_assign_rhs1 (stmt),
1980 						  true, NULL, false,
1981 						  GSI_CONTINUE_LINKING);
1982 	      gimple_assign_set_rhs1 (stmt, new_rhs);
1983 	      id->regimplify = false;
1984 	    }
1985 
1986 	  gsi_insert_after (&seq_gsi, stmt, GSI_NEW_STMT);
1987 
1988 	  if (id->regimplify)
1989 	    gimple_regimplify_operands (stmt, &seq_gsi);
1990 
1991 	  stmt_added = true;
1992 	}
1993 
1994       if (!stmt_added)
1995 	continue;
1996 
1997       /* If copy_basic_block has been empty at the start of this iteration,
1998 	 call gsi_start_bb again to get at the newly added statements.  */
1999       if (gsi_end_p (copy_gsi))
2000 	copy_gsi = gsi_start_bb (copy_basic_block);
2001       else
2002 	gsi_next (&copy_gsi);
2003 
2004       /* Process the new statement.  The call to gimple_regimplify_operands
2005 	 possibly turned the statement into multiple statements, we
2006 	 need to process all of them.  */
2007       do
2008 	{
2009 	  tree fn;
2010 	  gcall *call_stmt;
2011 
2012 	  stmt = gsi_stmt (copy_gsi);
2013 	  call_stmt = dyn_cast <gcall *> (stmt);
2014 	  if (call_stmt
2015 	      && gimple_call_va_arg_pack_p (call_stmt)
2016 	      && id->call_stmt
2017 	      && ! gimple_call_va_arg_pack_p (id->call_stmt))
2018 	    {
2019 	      /* __builtin_va_arg_pack () should be replaced by
2020 		 all arguments corresponding to ... in the caller.  */
2021 	      tree p;
2022 	      gcall *new_call;
2023 	      vec<tree> argarray;
2024 	      size_t nargs = gimple_call_num_args (id->call_stmt);
2025 	      size_t n;
2026 
2027 	      for (p = DECL_ARGUMENTS (id->src_fn); p; p = DECL_CHAIN (p))
2028 		nargs--;
2029 
2030 	      /* Create the new array of arguments.  */
2031 	      n = nargs + gimple_call_num_args (call_stmt);
2032 	      argarray.create (n);
2033 	      argarray.safe_grow_cleared (n);
2034 
2035 	      /* Copy all the arguments before '...'  */
2036 	      memcpy (argarray.address (),
2037 		      gimple_call_arg_ptr (call_stmt, 0),
2038 		      gimple_call_num_args (call_stmt) * sizeof (tree));
2039 
2040 	      /* Append the arguments passed in '...'  */
2041 	      memcpy (argarray.address () + gimple_call_num_args (call_stmt),
2042 		      gimple_call_arg_ptr (id->call_stmt, 0)
2043 		      + (gimple_call_num_args (id->call_stmt) - nargs),
2044 		      nargs * sizeof (tree));
2045 
2046 	      new_call = gimple_build_call_vec (gimple_call_fn (call_stmt),
2047 						argarray);
2048 
2049 	      argarray.release ();
2050 
2051 	      /* Copy all GIMPLE_CALL flags, location and block, except
2052 		 GF_CALL_VA_ARG_PACK.  */
2053 	      gimple_call_copy_flags (new_call, call_stmt);
2054 	      gimple_call_set_va_arg_pack (new_call, false);
2055 	      /* location includes block.  */
2056 	      gimple_set_location (new_call, gimple_location (stmt));
2057 	      gimple_call_set_lhs (new_call, gimple_call_lhs (call_stmt));
2058 
2059 	      gsi_replace (&copy_gsi, new_call, false);
2060 	      stmt = new_call;
2061 	    }
2062 	  else if (call_stmt
2063 		   && id->call_stmt
2064 		   && (decl = gimple_call_fndecl (stmt))
2065 		   && fndecl_built_in_p (decl, BUILT_IN_VA_ARG_PACK_LEN))
2066 	    {
2067 	      /* __builtin_va_arg_pack_len () should be replaced by
2068 		 the number of anonymous arguments.  */
2069 	      size_t nargs = gimple_call_num_args (id->call_stmt);
2070 	      tree count, p;
2071 	      gimple *new_stmt;
2072 
2073 	      for (p = DECL_ARGUMENTS (id->src_fn); p; p = DECL_CHAIN (p))
2074 		nargs--;
2075 
2076 	      if (!gimple_call_lhs (stmt))
2077 		{
2078 		  /* Drop unused calls.  */
2079 		  gsi_remove (&copy_gsi, false);
2080 		  continue;
2081 		}
2082 	      else if (!gimple_call_va_arg_pack_p (id->call_stmt))
2083 		{
2084 		  count = build_int_cst (integer_type_node, nargs);
2085 		  new_stmt = gimple_build_assign (gimple_call_lhs (stmt), count);
2086 		  gsi_replace (&copy_gsi, new_stmt, false);
2087 		  stmt = new_stmt;
2088 		}
2089 	      else if (nargs != 0)
2090 		{
2091 		  tree newlhs = create_tmp_reg_or_ssa_name (integer_type_node);
2092 		  count = build_int_cst (integer_type_node, nargs);
2093 		  new_stmt = gimple_build_assign (gimple_call_lhs (stmt),
2094 						  PLUS_EXPR, newlhs, count);
2095 		  gimple_call_set_lhs (stmt, newlhs);
2096 		  gsi_insert_after (&copy_gsi, new_stmt, GSI_NEW_STMT);
2097 		}
2098 	    }
2099 	  else if (call_stmt
2100 		   && id->call_stmt
2101 		   && gimple_call_internal_p (stmt)
2102 		   && gimple_call_internal_fn (stmt) == IFN_TSAN_FUNC_EXIT)
2103 	    {
2104 	      /* Drop TSAN_FUNC_EXIT () internal calls during inlining.  */
2105 	      gsi_remove (&copy_gsi, false);
2106 	      continue;
2107 	    }
2108 
2109 	  /* Statements produced by inlining can be unfolded, especially
2110 	     when we constant propagated some operands.  We can't fold
2111 	     them right now for two reasons:
2112 	     1) folding require SSA_NAME_DEF_STMTs to be correct
2113 	     2) we can't change function calls to builtins.
2114 	     So we just mark statement for later folding.  We mark
2115 	     all new statements, instead just statements that has changed
2116 	     by some nontrivial substitution so even statements made
2117 	     foldable indirectly are updated.  If this turns out to be
2118 	     expensive, copy_body can be told to watch for nontrivial
2119 	     changes.  */
2120 	  if (id->statements_to_fold)
2121 	    id->statements_to_fold->add (stmt);
2122 
2123 	  /* We're duplicating a CALL_EXPR.  Find any corresponding
2124 	     callgraph edges and update or duplicate them.  */
2125 	  if (gcall *call_stmt = dyn_cast <gcall *> (stmt))
2126 	    {
2127 	      struct cgraph_edge *edge;
2128 
2129 	      switch (id->transform_call_graph_edges)
2130 		{
2131 		case CB_CGE_DUPLICATE:
2132 		  edge = id->src_node->get_edge (orig_stmt);
2133 		  if (edge)
2134 		    {
2135 		      struct cgraph_edge *old_edge = edge;
2136 		      profile_count old_cnt = edge->count;
2137 		      edge = edge->clone (id->dst_node, call_stmt,
2138 					  gimple_uid (stmt),
2139 					  num, den,
2140 					  true);
2141 
2142 		      /* Speculative calls consist of two edges - direct and
2143 			 indirect.  Duplicate the whole thing and distribute
2144 			 frequencies accordingly.  */
2145 		      if (edge->speculative)
2146 			{
2147 			  struct cgraph_edge *direct, *indirect;
2148 			  struct ipa_ref *ref;
2149 
2150 			  gcc_assert (!edge->indirect_unknown_callee);
2151 			  old_edge->speculative_call_info (direct, indirect, ref);
2152 
2153 			  profile_count indir_cnt = indirect->count;
2154 			  indirect = indirect->clone (id->dst_node, call_stmt,
2155 						      gimple_uid (stmt),
2156 						      num, den,
2157 						      true);
2158 
2159 			  profile_probability prob
2160 			     = indir_cnt.probability_in (old_cnt + indir_cnt);
2161 			  indirect->count
2162 			     = copy_basic_block->count.apply_probability (prob);
2163 			  edge->count = copy_basic_block->count - indirect->count;
2164 			  id->dst_node->clone_reference (ref, stmt);
2165 			}
2166 		      else
2167 			edge->count = copy_basic_block->count;
2168 		    }
2169 		  break;
2170 
2171 		case CB_CGE_MOVE_CLONES:
2172 		  id->dst_node->set_call_stmt_including_clones (orig_stmt,
2173 								call_stmt);
2174 		  edge = id->dst_node->get_edge (stmt);
2175 		  break;
2176 
2177 		case CB_CGE_MOVE:
2178 		  edge = id->dst_node->get_edge (orig_stmt);
2179 		  if (edge)
2180 		    edge->set_call_stmt (call_stmt);
2181 		  break;
2182 
2183 		default:
2184 		  gcc_unreachable ();
2185 		}
2186 
2187 	      /* Constant propagation on argument done during inlining
2188 		 may create new direct call.  Produce an edge for it.  */
2189 	      if ((!edge
2190 		   || (edge->indirect_inlining_edge
2191 		       && id->transform_call_graph_edges == CB_CGE_MOVE_CLONES))
2192 		  && id->dst_node->definition
2193 		  && (fn = gimple_call_fndecl (stmt)) != NULL)
2194 		{
2195 		  struct cgraph_node *dest = cgraph_node::get_create (fn);
2196 
2197 		  /* We have missing edge in the callgraph.  This can happen
2198 		     when previous inlining turned an indirect call into a
2199 		     direct call by constant propagating arguments or we are
2200 		     producing dead clone (for further cloning).  In all
2201 		     other cases we hit a bug (incorrect node sharing is the
2202 		     most common reason for missing edges).  */
2203 		  gcc_assert (!dest->definition
2204 			      || dest->address_taken
2205 		  	      || !id->src_node->definition
2206 			      || !id->dst_node->definition);
2207 		  if (id->transform_call_graph_edges == CB_CGE_MOVE_CLONES)
2208 		    id->dst_node->create_edge_including_clones
2209 		      (dest, orig_stmt, call_stmt, bb->count,
2210 		       CIF_ORIGINALLY_INDIRECT_CALL);
2211 		  else
2212 		    id->dst_node->create_edge (dest, call_stmt,
2213 					bb->count)->inline_failed
2214 		      = CIF_ORIGINALLY_INDIRECT_CALL;
2215 		  if (dump_file)
2216 		    {
2217 		      fprintf (dump_file, "Created new direct edge to %s\n",
2218 			       dest->name ());
2219 		    }
2220 		}
2221 
2222 	      notice_special_calls (as_a <gcall *> (stmt));
2223 	    }
2224 
2225 	  maybe_duplicate_eh_stmt_fn (cfun, stmt, id->src_cfun, orig_stmt,
2226 				      id->eh_map, id->eh_lp_nr);
2227 
2228 	  gsi_next (&copy_gsi);
2229 	}
2230       while (!gsi_end_p (copy_gsi));
2231 
2232       copy_gsi = gsi_last_bb (copy_basic_block);
2233     }
2234 
2235   return copy_basic_block;
2236 }
2237 
2238 /* Inserting Single Entry Multiple Exit region in SSA form into code in SSA
2239    form is quite easy, since dominator relationship for old basic blocks does
2240    not change.
2241 
2242    There is however exception where inlining might change dominator relation
2243    across EH edges from basic block within inlined functions destinating
2244    to landing pads in function we inline into.
2245 
2246    The function fills in PHI_RESULTs of such PHI nodes if they refer
2247    to gimple regs.  Otherwise, the function mark PHI_RESULT of such
2248    PHI nodes for renaming.  For non-gimple regs, renaming is safe: the
2249    EH edges are abnormal and SSA_NAME_OCCURS_IN_ABNORMAL_PHI must be
2250    set, and this means that there will be no overlapping live ranges
2251    for the underlying symbol.
2252 
2253    This might change in future if we allow redirecting of EH edges and
2254    we might want to change way build CFG pre-inlining to include
2255    all the possible edges then.  */
2256 static void
update_ssa_across_abnormal_edges(basic_block bb,basic_block ret_bb,bool can_throw,bool nonlocal_goto)2257 update_ssa_across_abnormal_edges (basic_block bb, basic_block ret_bb,
2258 				  bool can_throw, bool nonlocal_goto)
2259 {
2260   edge e;
2261   edge_iterator ei;
2262 
2263   FOR_EACH_EDGE (e, ei, bb->succs)
2264     if (!e->dest->aux
2265 	|| ((basic_block)e->dest->aux)->index == ENTRY_BLOCK)
2266       {
2267 	gphi *phi;
2268 	gphi_iterator si;
2269 
2270 	if (!nonlocal_goto)
2271 	  gcc_assert (e->flags & EDGE_EH);
2272 
2273 	if (!can_throw)
2274 	  gcc_assert (!(e->flags & EDGE_EH));
2275 
2276 	for (si = gsi_start_phis (e->dest); !gsi_end_p (si); gsi_next (&si))
2277 	  {
2278 	    edge re;
2279 
2280 	    phi = si.phi ();
2281 
2282 	    /* For abnormal goto/call edges the receiver can be the
2283 	       ENTRY_BLOCK.  Do not assert this cannot happen.  */
2284 
2285 	    gcc_assert ((e->flags & EDGE_EH)
2286 			|| SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)));
2287 
2288 	    re = find_edge (ret_bb, e->dest);
2289 	    gcc_checking_assert (re);
2290 	    gcc_assert ((re->flags & (EDGE_EH | EDGE_ABNORMAL))
2291 			== (e->flags & (EDGE_EH | EDGE_ABNORMAL)));
2292 
2293 	    SET_USE (PHI_ARG_DEF_PTR_FROM_EDGE (phi, e),
2294 		     USE_FROM_PTR (PHI_ARG_DEF_PTR_FROM_EDGE (phi, re)));
2295 	  }
2296       }
2297 }
2298 
2299 /* Insert clobbers for automatic variables of inlined ID->src_fn
2300    function at the start of basic block BB.  */
2301 
2302 static void
add_clobbers_to_eh_landing_pad(basic_block bb,copy_body_data * id)2303 add_clobbers_to_eh_landing_pad (basic_block bb, copy_body_data *id)
2304 {
2305   tree var;
2306   unsigned int i;
2307   FOR_EACH_VEC_SAFE_ELT (id->src_cfun->local_decls, i, var)
2308     if (VAR_P (var)
2309 	&& !DECL_HARD_REGISTER (var)
2310 	&& !TREE_THIS_VOLATILE (var)
2311 	&& !DECL_HAS_VALUE_EXPR_P (var)
2312 	&& !is_gimple_reg (var)
2313 	&& auto_var_in_fn_p (var, id->src_fn)
2314 	&& !lookup_attribute ("omp simd array", DECL_ATTRIBUTES (var)))
2315       {
2316 	tree *t = id->decl_map->get (var);
2317 	if (!t)
2318 	  continue;
2319 	tree new_var = *t;
2320 	if (VAR_P (new_var)
2321 	    && !DECL_HARD_REGISTER (new_var)
2322 	    && !TREE_THIS_VOLATILE (new_var)
2323 	    && !DECL_HAS_VALUE_EXPR_P (new_var)
2324 	    && !is_gimple_reg (new_var)
2325 	    && auto_var_in_fn_p (new_var, id->dst_fn))
2326 	  {
2327 	    gimple_stmt_iterator gsi = gsi_after_labels (bb);
2328 	    tree clobber = build_clobber (TREE_TYPE (new_var));
2329 	    gimple *clobber_stmt = gimple_build_assign (new_var, clobber);
2330 	    gsi_insert_before (&gsi, clobber_stmt, GSI_NEW_STMT);
2331 	  }
2332       }
2333 }
2334 
2335 /* Copy edges from BB into its copy constructed earlier, scale profile
2336    accordingly.  Edges will be taken care of later.  Assume aux
2337    pointers to point to the copies of each BB.  Return true if any
2338    debug stmts are left after a statement that must end the basic block.  */
2339 
2340 static bool
copy_edges_for_bb(basic_block bb,profile_count num,profile_count den,basic_block ret_bb,basic_block abnormal_goto_dest,copy_body_data * id)2341 copy_edges_for_bb (basic_block bb, profile_count num, profile_count den,
2342 		   basic_block ret_bb, basic_block abnormal_goto_dest,
2343 		   copy_body_data *id)
2344 {
2345   basic_block new_bb = (basic_block) bb->aux;
2346   edge_iterator ei;
2347   edge old_edge;
2348   gimple_stmt_iterator si;
2349   bool need_debug_cleanup = false;
2350 
2351   /* Use the indices from the original blocks to create edges for the
2352      new ones.  */
2353   FOR_EACH_EDGE (old_edge, ei, bb->succs)
2354     if (!(old_edge->flags & EDGE_EH))
2355       {
2356 	edge new_edge;
2357 	int flags = old_edge->flags;
2358 	location_t locus = old_edge->goto_locus;
2359 
2360 	/* Return edges do get a FALLTHRU flag when they get inlined.  */
2361 	if (old_edge->dest->index == EXIT_BLOCK
2362 	    && !(flags & (EDGE_TRUE_VALUE|EDGE_FALSE_VALUE|EDGE_FAKE))
2363 	    && old_edge->dest->aux != EXIT_BLOCK_PTR_FOR_FN (cfun))
2364 	  flags |= EDGE_FALLTHRU;
2365 
2366 	new_edge
2367 	  = make_edge (new_bb, (basic_block) old_edge->dest->aux, flags);
2368 	new_edge->probability = old_edge->probability;
2369 	if (!id->reset_location)
2370 	  new_edge->goto_locus = remap_location (locus, id);
2371       }
2372 
2373   if (bb->index == ENTRY_BLOCK || bb->index == EXIT_BLOCK)
2374     return false;
2375 
2376   /* When doing function splitting, we must decrease count of the return block
2377      which was previously reachable by block we did not copy.  */
2378   if (single_succ_p (bb) && single_succ_edge (bb)->dest->index == EXIT_BLOCK)
2379     FOR_EACH_EDGE (old_edge, ei, bb->preds)
2380       if (old_edge->src->index != ENTRY_BLOCK
2381 	  && !old_edge->src->aux)
2382 	new_bb->count -= old_edge->count ().apply_scale (num, den);
2383 
2384   for (si = gsi_start_bb (new_bb); !gsi_end_p (si);)
2385     {
2386       gimple *copy_stmt;
2387       bool can_throw, nonlocal_goto;
2388 
2389       copy_stmt = gsi_stmt (si);
2390       if (!is_gimple_debug (copy_stmt))
2391 	update_stmt (copy_stmt);
2392 
2393       /* Do this before the possible split_block.  */
2394       gsi_next (&si);
2395 
2396       /* If this tree could throw an exception, there are two
2397          cases where we need to add abnormal edge(s): the
2398          tree wasn't in a region and there is a "current
2399          region" in the caller; or the original tree had
2400          EH edges.  In both cases split the block after the tree,
2401          and add abnormal edge(s) as needed; we need both
2402          those from the callee and the caller.
2403          We check whether the copy can throw, because the const
2404          propagation can change an INDIRECT_REF which throws
2405          into a COMPONENT_REF which doesn't.  If the copy
2406          can throw, the original could also throw.  */
2407       can_throw = stmt_can_throw_internal (cfun, copy_stmt);
2408       nonlocal_goto
2409 	= (stmt_can_make_abnormal_goto (copy_stmt)
2410 	   && !computed_goto_p (copy_stmt));
2411 
2412       if (can_throw || nonlocal_goto)
2413 	{
2414 	  if (!gsi_end_p (si))
2415 	    {
2416 	      while (!gsi_end_p (si) && is_gimple_debug (gsi_stmt (si)))
2417 		gsi_next (&si);
2418 	      if (gsi_end_p (si))
2419 		need_debug_cleanup = true;
2420 	    }
2421 	  if (!gsi_end_p (si))
2422 	    /* Note that bb's predecessor edges aren't necessarily
2423 	       right at this point; split_block doesn't care.  */
2424 	    {
2425 	      edge e = split_block (new_bb, copy_stmt);
2426 
2427 	      new_bb = e->dest;
2428 	      new_bb->aux = e->src->aux;
2429 	      si = gsi_start_bb (new_bb);
2430 	    }
2431 	}
2432 
2433       bool update_probs = false;
2434 
2435       if (gimple_code (copy_stmt) == GIMPLE_EH_DISPATCH)
2436 	{
2437 	  make_eh_dispatch_edges (as_a <geh_dispatch *> (copy_stmt));
2438 	  update_probs = true;
2439 	}
2440       else if (can_throw)
2441 	{
2442 	  make_eh_edges (copy_stmt);
2443 	  update_probs = true;
2444 	}
2445 
2446       /* EH edges may not match old edges.  Copy as much as possible.  */
2447       if (update_probs)
2448 	{
2449           edge e;
2450           edge_iterator ei;
2451 	  basic_block copy_stmt_bb = gimple_bb (copy_stmt);
2452 
2453           FOR_EACH_EDGE (old_edge, ei, bb->succs)
2454             if ((old_edge->flags & EDGE_EH)
2455 		&& (e = find_edge (copy_stmt_bb,
2456 				   (basic_block) old_edge->dest->aux))
2457 		&& (e->flags & EDGE_EH))
2458 	      e->probability = old_edge->probability;
2459 
2460           FOR_EACH_EDGE (e, ei, copy_stmt_bb->succs)
2461 	    if (e->flags & EDGE_EH)
2462 	      {
2463 		if (!e->probability.initialized_p ())
2464 		  e->probability = profile_probability::never ();
2465 		if (e->dest->index < id->add_clobbers_to_eh_landing_pads)
2466 		  {
2467 		    add_clobbers_to_eh_landing_pad (e->dest, id);
2468 		    id->add_clobbers_to_eh_landing_pads = 0;
2469 		  }
2470 	      }
2471         }
2472 
2473 
2474       /* If the call we inline cannot make abnormal goto do not add
2475          additional abnormal edges but only retain those already present
2476 	 in the original function body.  */
2477       if (abnormal_goto_dest == NULL)
2478 	nonlocal_goto = false;
2479       if (nonlocal_goto)
2480 	{
2481 	  basic_block copy_stmt_bb = gimple_bb (copy_stmt);
2482 
2483 	  if (get_abnormal_succ_dispatcher (copy_stmt_bb))
2484 	    nonlocal_goto = false;
2485 	  /* ABNORMAL_DISPATCHER (1) is for longjmp/setjmp or nonlocal gotos
2486 	     in OpenMP regions which aren't allowed to be left abnormally.
2487 	     So, no need to add abnormal edge in that case.  */
2488 	  else if (is_gimple_call (copy_stmt)
2489 		   && gimple_call_internal_p (copy_stmt)
2490 		   && (gimple_call_internal_fn (copy_stmt)
2491 		       == IFN_ABNORMAL_DISPATCHER)
2492 		   && gimple_call_arg (copy_stmt, 0) == boolean_true_node)
2493 	    nonlocal_goto = false;
2494 	  else
2495 	    make_single_succ_edge (copy_stmt_bb, abnormal_goto_dest,
2496 				   EDGE_ABNORMAL);
2497 	}
2498 
2499       if ((can_throw || nonlocal_goto)
2500 	  && gimple_in_ssa_p (cfun))
2501 	update_ssa_across_abnormal_edges (gimple_bb (copy_stmt), ret_bb,
2502 					  can_throw, nonlocal_goto);
2503     }
2504   return need_debug_cleanup;
2505 }
2506 
2507 /* Copy the PHIs.  All blocks and edges are copied, some blocks
2508    was possibly split and new outgoing EH edges inserted.
2509    BB points to the block of original function and AUX pointers links
2510    the original and newly copied blocks.  */
2511 
2512 static void
copy_phis_for_bb(basic_block bb,copy_body_data * id)2513 copy_phis_for_bb (basic_block bb, copy_body_data *id)
2514 {
2515   basic_block const new_bb = (basic_block) bb->aux;
2516   edge_iterator ei;
2517   gphi *phi;
2518   gphi_iterator si;
2519   edge new_edge;
2520   bool inserted = false;
2521 
2522   for (si = gsi_start_phis (bb); !gsi_end_p (si); gsi_next (&si))
2523     {
2524       tree res, new_res;
2525       gphi *new_phi;
2526 
2527       phi = si.phi ();
2528       res = PHI_RESULT (phi);
2529       new_res = res;
2530       if (!virtual_operand_p (res))
2531 	{
2532 	  walk_tree (&new_res, copy_tree_body_r, id, NULL);
2533 	  if (EDGE_COUNT (new_bb->preds) == 0)
2534 	    {
2535 	      /* Technically we'd want a SSA_DEFAULT_DEF here... */
2536 	      SSA_NAME_DEF_STMT (new_res) = gimple_build_nop ();
2537 	    }
2538 	  else
2539 	    {
2540 	      new_phi = create_phi_node (new_res, new_bb);
2541 	      FOR_EACH_EDGE (new_edge, ei, new_bb->preds)
2542 		{
2543 		  edge old_edge = find_edge ((basic_block) new_edge->src->aux,
2544 					     bb);
2545 		  tree arg;
2546 		  tree new_arg;
2547 		  edge_iterator ei2;
2548 		  location_t locus;
2549 
2550 		  /* When doing partial cloning, we allow PHIs on the entry
2551 		     block as long as all the arguments are the same.
2552 		     Find any input edge to see argument to copy.  */
2553 		  if (!old_edge)
2554 		    FOR_EACH_EDGE (old_edge, ei2, bb->preds)
2555 		      if (!old_edge->src->aux)
2556 			break;
2557 
2558 		  arg = PHI_ARG_DEF_FROM_EDGE (phi, old_edge);
2559 		  new_arg = arg;
2560 		  walk_tree (&new_arg, copy_tree_body_r, id, NULL);
2561 		  gcc_assert (new_arg);
2562 		  /* With return slot optimization we can end up with
2563 		     non-gimple (foo *)&this->m, fix that here.  */
2564 		  if (TREE_CODE (new_arg) != SSA_NAME
2565 		      && TREE_CODE (new_arg) != FUNCTION_DECL
2566 		      && !is_gimple_val (new_arg))
2567 		    {
2568 		      gimple_seq stmts = NULL;
2569 		      new_arg = force_gimple_operand (new_arg, &stmts, true,
2570 						      NULL);
2571 		      gsi_insert_seq_on_edge (new_edge, stmts);
2572 		      inserted = true;
2573 		    }
2574 		  locus = gimple_phi_arg_location_from_edge (phi, old_edge);
2575 		  if (id->reset_location)
2576 		    locus = input_location;
2577 		  else
2578 		    locus = remap_location (locus, id);
2579 		  add_phi_arg (new_phi, new_arg, new_edge, locus);
2580 		}
2581 	    }
2582 	}
2583     }
2584 
2585   /* Commit the delayed edge insertions.  */
2586   if (inserted)
2587     FOR_EACH_EDGE (new_edge, ei, new_bb->preds)
2588       gsi_commit_one_edge_insert (new_edge, NULL);
2589 }
2590 
2591 
2592 /* Wrapper for remap_decl so it can be used as a callback.  */
2593 
2594 static tree
remap_decl_1(tree decl,void * data)2595 remap_decl_1 (tree decl, void *data)
2596 {
2597   return remap_decl (decl, (copy_body_data *) data);
2598 }
2599 
2600 /* Build struct function and associated datastructures for the new clone
2601    NEW_FNDECL to be build.  CALLEE_FNDECL is the original.  Function changes
2602    the cfun to the function of new_fndecl (and current_function_decl too).  */
2603 
2604 static void
initialize_cfun(tree new_fndecl,tree callee_fndecl,profile_count count)2605 initialize_cfun (tree new_fndecl, tree callee_fndecl, profile_count count)
2606 {
2607   struct function *src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
2608 
2609   if (!DECL_ARGUMENTS (new_fndecl))
2610     DECL_ARGUMENTS (new_fndecl) = DECL_ARGUMENTS (callee_fndecl);
2611   if (!DECL_RESULT (new_fndecl))
2612     DECL_RESULT (new_fndecl) = DECL_RESULT (callee_fndecl);
2613 
2614   /* Register specific tree functions.  */
2615   gimple_register_cfg_hooks ();
2616 
2617   /* Get clean struct function.  */
2618   push_struct_function (new_fndecl);
2619 
2620   /* We will rebuild these, so just sanity check that they are empty.  */
2621   gcc_assert (VALUE_HISTOGRAMS (cfun) == NULL);
2622   gcc_assert (cfun->local_decls == NULL);
2623   gcc_assert (cfun->cfg == NULL);
2624   gcc_assert (cfun->decl == new_fndecl);
2625 
2626   /* Copy items we preserve during cloning.  */
2627   cfun->static_chain_decl = src_cfun->static_chain_decl;
2628   cfun->nonlocal_goto_save_area = src_cfun->nonlocal_goto_save_area;
2629   cfun->function_end_locus = src_cfun->function_end_locus;
2630   cfun->curr_properties = src_cfun->curr_properties;
2631   cfun->last_verified = src_cfun->last_verified;
2632   cfun->va_list_gpr_size = src_cfun->va_list_gpr_size;
2633   cfun->va_list_fpr_size = src_cfun->va_list_fpr_size;
2634   cfun->has_nonlocal_label = src_cfun->has_nonlocal_label;
2635   cfun->stdarg = src_cfun->stdarg;
2636   cfun->after_inlining = src_cfun->after_inlining;
2637   cfun->can_throw_non_call_exceptions
2638     = src_cfun->can_throw_non_call_exceptions;
2639   cfun->can_delete_dead_exceptions = src_cfun->can_delete_dead_exceptions;
2640   cfun->returns_struct = src_cfun->returns_struct;
2641   cfun->returns_pcc_struct = src_cfun->returns_pcc_struct;
2642 
2643   init_empty_tree_cfg ();
2644 
2645   profile_status_for_fn (cfun) = profile_status_for_fn (src_cfun);
2646 
2647   profile_count num = count;
2648   profile_count den = ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count;
2649   profile_count::adjust_for_ipa_scaling (&num, &den);
2650 
2651   ENTRY_BLOCK_PTR_FOR_FN (cfun)->count =
2652     ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count.apply_scale (count,
2653 				ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count);
2654   EXIT_BLOCK_PTR_FOR_FN (cfun)->count =
2655     EXIT_BLOCK_PTR_FOR_FN (src_cfun)->count.apply_scale (count,
2656 				ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count);
2657   if (src_cfun->eh)
2658     init_eh_for_function ();
2659 
2660   if (src_cfun->gimple_df)
2661     {
2662       init_tree_ssa (cfun);
2663       cfun->gimple_df->in_ssa_p = src_cfun->gimple_df->in_ssa_p;
2664       if (cfun->gimple_df->in_ssa_p)
2665 	init_ssa_operands (cfun);
2666     }
2667 }
2668 
2669 /* Helper function for copy_cfg_body.  Move debug stmts from the end
2670    of NEW_BB to the beginning of successor basic blocks when needed.  If the
2671    successor has multiple predecessors, reset them, otherwise keep
2672    their value.  */
2673 
2674 static void
maybe_move_debug_stmts_to_successors(copy_body_data * id,basic_block new_bb)2675 maybe_move_debug_stmts_to_successors (copy_body_data *id, basic_block new_bb)
2676 {
2677   edge e;
2678   edge_iterator ei;
2679   gimple_stmt_iterator si = gsi_last_nondebug_bb (new_bb);
2680 
2681   if (gsi_end_p (si)
2682       || gsi_one_before_end_p (si)
2683       || !(stmt_can_throw_internal (cfun, gsi_stmt (si))
2684 	   || stmt_can_make_abnormal_goto (gsi_stmt (si))))
2685     return;
2686 
2687   FOR_EACH_EDGE (e, ei, new_bb->succs)
2688     {
2689       gimple_stmt_iterator ssi = gsi_last_bb (new_bb);
2690       gimple_stmt_iterator dsi = gsi_after_labels (e->dest);
2691       while (is_gimple_debug (gsi_stmt (ssi)))
2692 	{
2693 	  gimple *stmt = gsi_stmt (ssi);
2694 	  gdebug *new_stmt;
2695 	  tree var;
2696 	  tree value;
2697 
2698 	  /* For the last edge move the debug stmts instead of copying
2699 	     them.  */
2700 	  if (ei_one_before_end_p (ei))
2701 	    {
2702 	      si = ssi;
2703 	      gsi_prev (&ssi);
2704 	      if (!single_pred_p (e->dest) && gimple_debug_bind_p (stmt))
2705 		{
2706 		  gimple_debug_bind_reset_value (stmt);
2707 		  gimple_set_location (stmt, UNKNOWN_LOCATION);
2708 		}
2709 	      gsi_remove (&si, false);
2710 	      gsi_insert_before (&dsi, stmt, GSI_SAME_STMT);
2711 	      continue;
2712 	    }
2713 
2714 	  if (gimple_debug_bind_p (stmt))
2715 	    {
2716 	      var = gimple_debug_bind_get_var (stmt);
2717 	      if (single_pred_p (e->dest))
2718 		{
2719 		  value = gimple_debug_bind_get_value (stmt);
2720 		  value = unshare_expr (value);
2721 		  new_stmt = gimple_build_debug_bind (var, value, stmt);
2722 		}
2723 	      else
2724 		new_stmt = gimple_build_debug_bind (var, NULL_TREE, NULL);
2725 	    }
2726 	  else if (gimple_debug_source_bind_p (stmt))
2727 	    {
2728 	      var = gimple_debug_source_bind_get_var (stmt);
2729 	      value = gimple_debug_source_bind_get_value (stmt);
2730 	      new_stmt = gimple_build_debug_source_bind (var, value, stmt);
2731 	    }
2732 	  else if (gimple_debug_nonbind_marker_p (stmt))
2733 	    new_stmt = as_a <gdebug *> (gimple_copy (stmt));
2734 	  else
2735 	    gcc_unreachable ();
2736 	  gsi_insert_before (&dsi, new_stmt, GSI_SAME_STMT);
2737 	  id->debug_stmts.safe_push (new_stmt);
2738 	  gsi_prev (&ssi);
2739 	}
2740     }
2741 }
2742 
2743 /* Make a copy of the sub-loops of SRC_PARENT and place them
2744    as siblings of DEST_PARENT.  */
2745 
2746 static void
copy_loops(copy_body_data * id,struct loop * dest_parent,struct loop * src_parent)2747 copy_loops (copy_body_data *id,
2748 	    struct loop *dest_parent, struct loop *src_parent)
2749 {
2750   struct loop *src_loop = src_parent->inner;
2751   while (src_loop)
2752     {
2753       if (!id->blocks_to_copy
2754 	  || bitmap_bit_p (id->blocks_to_copy, src_loop->header->index))
2755 	{
2756 	  struct loop *dest_loop = alloc_loop ();
2757 
2758 	  /* Assign the new loop its header and latch and associate
2759 	     those with the new loop.  */
2760 	  dest_loop->header = (basic_block)src_loop->header->aux;
2761 	  dest_loop->header->loop_father = dest_loop;
2762 	  if (src_loop->latch != NULL)
2763 	    {
2764 	      dest_loop->latch = (basic_block)src_loop->latch->aux;
2765 	      dest_loop->latch->loop_father = dest_loop;
2766 	    }
2767 
2768 	  /* Copy loop meta-data.  */
2769 	  copy_loop_info (src_loop, dest_loop);
2770 	  if (dest_loop->unroll)
2771 	    cfun->has_unroll = true;
2772 	  if (dest_loop->force_vectorize)
2773 	    cfun->has_force_vectorize_loops = true;
2774 	  if (id->src_cfun->last_clique != 0)
2775 	    dest_loop->owned_clique
2776 	      = remap_dependence_clique (id,
2777 					 src_loop->owned_clique
2778 					 ? src_loop->owned_clique : 1);
2779 
2780 	  /* Finally place it into the loop array and the loop tree.  */
2781 	  place_new_loop (cfun, dest_loop);
2782 	  flow_loop_tree_node_add (dest_parent, dest_loop);
2783 
2784 	  if (src_loop->simduid)
2785 	    {
2786 	      dest_loop->simduid = remap_decl (src_loop->simduid, id);
2787 	      cfun->has_simduid_loops = true;
2788 	    }
2789 
2790 	  /* Recurse.  */
2791 	  copy_loops (id, dest_loop, src_loop);
2792 	}
2793       src_loop = src_loop->next;
2794     }
2795 }
2796 
2797 /* Call redirect_call_stmt_to_callee on all calls in BB.  */
2798 
2799 void
redirect_all_calls(copy_body_data * id,basic_block bb)2800 redirect_all_calls (copy_body_data * id, basic_block bb)
2801 {
2802   gimple_stmt_iterator si;
2803   gimple *last = last_stmt (bb);
2804   for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
2805     {
2806       gimple *stmt = gsi_stmt (si);
2807       if (is_gimple_call (stmt))
2808 	{
2809 	  struct cgraph_edge *edge = id->dst_node->get_edge (stmt);
2810 	  if (edge)
2811 	    {
2812 	      edge->redirect_call_stmt_to_callee ();
2813 	      if (stmt == last && id->call_stmt && maybe_clean_eh_stmt (stmt))
2814 		gimple_purge_dead_eh_edges (bb);
2815 	    }
2816 	}
2817     }
2818 }
2819 
2820 /* Make a copy of the body of FN so that it can be inserted inline in
2821    another function.  Walks FN via CFG, returns new fndecl.  */
2822 
2823 static tree
copy_cfg_body(copy_body_data * id,basic_block entry_block_map,basic_block exit_block_map,basic_block new_entry)2824 copy_cfg_body (copy_body_data * id,
2825 	       basic_block entry_block_map, basic_block exit_block_map,
2826 	       basic_block new_entry)
2827 {
2828   tree callee_fndecl = id->src_fn;
2829   /* Original cfun for the callee, doesn't change.  */
2830   struct function *src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
2831   struct function *cfun_to_copy;
2832   basic_block bb;
2833   tree new_fndecl = NULL;
2834   bool need_debug_cleanup = false;
2835   int last;
2836   profile_count den = ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count;
2837   profile_count num = entry_block_map->count;
2838 
2839   cfun_to_copy = id->src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
2840 
2841   /* Register specific tree functions.  */
2842   gimple_register_cfg_hooks ();
2843 
2844   /* If we are inlining just region of the function, make sure to connect
2845      new entry to ENTRY_BLOCK_PTR_FOR_FN (cfun).  Since new entry can be
2846      part of loop, we must compute frequency and probability of
2847      ENTRY_BLOCK_PTR_FOR_FN (cfun) based on the frequencies and
2848      probabilities of edges incoming from nonduplicated region.  */
2849   if (new_entry)
2850     {
2851       edge e;
2852       edge_iterator ei;
2853       den = profile_count::zero ();
2854 
2855       FOR_EACH_EDGE (e, ei, new_entry->preds)
2856 	if (!e->src->aux)
2857 	  den += e->count ();
2858       ENTRY_BLOCK_PTR_FOR_FN (cfun)->count = den;
2859     }
2860 
2861   profile_count::adjust_for_ipa_scaling (&num, &den);
2862 
2863   /* Must have a CFG here at this point.  */
2864   gcc_assert (ENTRY_BLOCK_PTR_FOR_FN
2865 	      (DECL_STRUCT_FUNCTION (callee_fndecl)));
2866 
2867 
2868   ENTRY_BLOCK_PTR_FOR_FN (cfun_to_copy)->aux = entry_block_map;
2869   EXIT_BLOCK_PTR_FOR_FN (cfun_to_copy)->aux = exit_block_map;
2870   entry_block_map->aux = ENTRY_BLOCK_PTR_FOR_FN (cfun_to_copy);
2871   exit_block_map->aux = EXIT_BLOCK_PTR_FOR_FN (cfun_to_copy);
2872 
2873   /* Duplicate any exception-handling regions.  */
2874   if (cfun->eh)
2875     id->eh_map = duplicate_eh_regions (cfun_to_copy, NULL, id->eh_lp_nr,
2876 				       remap_decl_1, id);
2877 
2878   /* Use aux pointers to map the original blocks to copy.  */
2879   FOR_EACH_BB_FN (bb, cfun_to_copy)
2880     if (!id->blocks_to_copy || bitmap_bit_p (id->blocks_to_copy, bb->index))
2881       {
2882 	basic_block new_bb = copy_bb (id, bb, num, den);
2883 	bb->aux = new_bb;
2884 	new_bb->aux = bb;
2885 	new_bb->loop_father = entry_block_map->loop_father;
2886       }
2887 
2888   last = last_basic_block_for_fn (cfun);
2889 
2890   /* Now that we've duplicated the blocks, duplicate their edges.  */
2891   basic_block abnormal_goto_dest = NULL;
2892   if (id->call_stmt
2893       && stmt_can_make_abnormal_goto (id->call_stmt))
2894     {
2895       gimple_stmt_iterator gsi = gsi_for_stmt (id->call_stmt);
2896 
2897       bb = gimple_bb (id->call_stmt);
2898       gsi_next (&gsi);
2899       if (gsi_end_p (gsi))
2900 	abnormal_goto_dest = get_abnormal_succ_dispatcher (bb);
2901     }
2902   FOR_ALL_BB_FN (bb, cfun_to_copy)
2903     if (!id->blocks_to_copy
2904 	|| (bb->index > 0 && bitmap_bit_p (id->blocks_to_copy, bb->index)))
2905       need_debug_cleanup |= copy_edges_for_bb (bb, num, den, exit_block_map,
2906 					       abnormal_goto_dest, id);
2907 
2908   if (new_entry)
2909     {
2910       edge e = make_edge (entry_block_map, (basic_block)new_entry->aux,
2911 			  EDGE_FALLTHRU);
2912       e->probability = profile_probability::always ();
2913     }
2914 
2915   /* Duplicate the loop tree, if available and wanted.  */
2916   if (loops_for_fn (src_cfun) != NULL
2917       && current_loops != NULL)
2918     {
2919       copy_loops (id, entry_block_map->loop_father,
2920 		  get_loop (src_cfun, 0));
2921       /* Defer to cfgcleanup to update loop-father fields of basic-blocks.  */
2922       loops_state_set (LOOPS_NEED_FIXUP);
2923     }
2924 
2925   /* If the loop tree in the source function needed fixup, mark the
2926      destination loop tree for fixup, too.  */
2927   if (loops_for_fn (src_cfun)->state & LOOPS_NEED_FIXUP)
2928     loops_state_set (LOOPS_NEED_FIXUP);
2929 
2930   if (gimple_in_ssa_p (cfun))
2931     FOR_ALL_BB_FN (bb, cfun_to_copy)
2932       if (!id->blocks_to_copy
2933 	  || (bb->index > 0 && bitmap_bit_p (id->blocks_to_copy, bb->index)))
2934 	copy_phis_for_bb (bb, id);
2935 
2936   FOR_ALL_BB_FN (bb, cfun_to_copy)
2937     if (bb->aux)
2938       {
2939 	if (need_debug_cleanup
2940 	    && bb->index != ENTRY_BLOCK
2941 	    && bb->index != EXIT_BLOCK)
2942 	  maybe_move_debug_stmts_to_successors (id, (basic_block) bb->aux);
2943 	/* Update call edge destinations.  This cannot be done before loop
2944 	   info is updated, because we may split basic blocks.  */
2945 	if (id->transform_call_graph_edges == CB_CGE_DUPLICATE
2946 	    && bb->index != ENTRY_BLOCK
2947 	    && bb->index != EXIT_BLOCK)
2948 	  redirect_all_calls (id, (basic_block)bb->aux);
2949 	((basic_block)bb->aux)->aux = NULL;
2950 	bb->aux = NULL;
2951       }
2952 
2953   /* Zero out AUX fields of newly created block during EH edge
2954      insertion. */
2955   for (; last < last_basic_block_for_fn (cfun); last++)
2956     {
2957       if (need_debug_cleanup)
2958 	maybe_move_debug_stmts_to_successors (id,
2959 					      BASIC_BLOCK_FOR_FN (cfun, last));
2960       BASIC_BLOCK_FOR_FN (cfun, last)->aux = NULL;
2961       /* Update call edge destinations.  This cannot be done before loop
2962 	 info is updated, because we may split basic blocks.  */
2963       if (id->transform_call_graph_edges == CB_CGE_DUPLICATE)
2964 	redirect_all_calls (id, BASIC_BLOCK_FOR_FN (cfun, last));
2965     }
2966   entry_block_map->aux = NULL;
2967   exit_block_map->aux = NULL;
2968 
2969   if (id->eh_map)
2970     {
2971       delete id->eh_map;
2972       id->eh_map = NULL;
2973     }
2974   if (id->dependence_map)
2975     {
2976       delete id->dependence_map;
2977       id->dependence_map = NULL;
2978     }
2979 
2980   return new_fndecl;
2981 }
2982 
2983 /* Copy the debug STMT using ID.  We deal with these statements in a
2984    special way: if any variable in their VALUE expression wasn't
2985    remapped yet, we won't remap it, because that would get decl uids
2986    out of sync, causing codegen differences between -g and -g0.  If
2987    this arises, we drop the VALUE expression altogether.  */
2988 
2989 static void
copy_debug_stmt(gdebug * stmt,copy_body_data * id)2990 copy_debug_stmt (gdebug *stmt, copy_body_data *id)
2991 {
2992   tree t, *n;
2993   struct walk_stmt_info wi;
2994 
2995   if (tree block = gimple_block (stmt))
2996     {
2997       n = id->decl_map->get (block);
2998       gimple_set_block (stmt, n ? *n : id->block);
2999     }
3000 
3001   if (gimple_debug_nonbind_marker_p (stmt))
3002     return;
3003 
3004   /* Remap all the operands in COPY.  */
3005   memset (&wi, 0, sizeof (wi));
3006   wi.info = id;
3007 
3008   processing_debug_stmt = 1;
3009 
3010   if (gimple_debug_source_bind_p (stmt))
3011     t = gimple_debug_source_bind_get_var (stmt);
3012   else if (gimple_debug_bind_p (stmt))
3013     t = gimple_debug_bind_get_var (stmt);
3014   else
3015     gcc_unreachable ();
3016 
3017   if (TREE_CODE (t) == PARM_DECL && id->debug_map
3018       && (n = id->debug_map->get (t)))
3019     {
3020       gcc_assert (VAR_P (*n));
3021       t = *n;
3022     }
3023   else if (VAR_P (t) && !is_global_var (t) && !id->decl_map->get (t))
3024     /* T is a non-localized variable.  */;
3025   else
3026     walk_tree (&t, remap_gimple_op_r, &wi, NULL);
3027 
3028   if (gimple_debug_bind_p (stmt))
3029     {
3030       gimple_debug_bind_set_var (stmt, t);
3031 
3032       if (gimple_debug_bind_has_value_p (stmt))
3033 	walk_tree (gimple_debug_bind_get_value_ptr (stmt),
3034 		   remap_gimple_op_r, &wi, NULL);
3035 
3036       /* Punt if any decl couldn't be remapped.  */
3037       if (processing_debug_stmt < 0)
3038 	gimple_debug_bind_reset_value (stmt);
3039     }
3040   else if (gimple_debug_source_bind_p (stmt))
3041     {
3042       gimple_debug_source_bind_set_var (stmt, t);
3043       /* When inlining and source bind refers to one of the optimized
3044 	 away parameters, change the source bind into normal debug bind
3045 	 referring to the corresponding DEBUG_EXPR_DECL that should have
3046 	 been bound before the call stmt.  */
3047       t = gimple_debug_source_bind_get_value (stmt);
3048       if (t != NULL_TREE
3049 	  && TREE_CODE (t) == PARM_DECL
3050 	  && id->call_stmt)
3051 	{
3052 	  vec<tree, va_gc> **debug_args = decl_debug_args_lookup (id->src_fn);
3053 	  unsigned int i;
3054 	  if (debug_args != NULL)
3055 	    {
3056 	      for (i = 0; i < vec_safe_length (*debug_args); i += 2)
3057 		if ((**debug_args)[i] == DECL_ORIGIN (t)
3058 		    && TREE_CODE ((**debug_args)[i + 1]) == DEBUG_EXPR_DECL)
3059 		  {
3060 		    t = (**debug_args)[i + 1];
3061 		    stmt->subcode = GIMPLE_DEBUG_BIND;
3062 		    gimple_debug_bind_set_value (stmt, t);
3063 		    break;
3064 		  }
3065 	    }
3066 	}
3067       if (gimple_debug_source_bind_p (stmt))
3068 	walk_tree (gimple_debug_source_bind_get_value_ptr (stmt),
3069 		   remap_gimple_op_r, &wi, NULL);
3070     }
3071 
3072   processing_debug_stmt = 0;
3073 
3074   update_stmt (stmt);
3075 }
3076 
3077 /* Process deferred debug stmts.  In order to give values better odds
3078    of being successfully remapped, we delay the processing of debug
3079    stmts until all other stmts that might require remapping are
3080    processed.  */
3081 
3082 static void
copy_debug_stmts(copy_body_data * id)3083 copy_debug_stmts (copy_body_data *id)
3084 {
3085   size_t i;
3086   gdebug *stmt;
3087 
3088   if (!id->debug_stmts.exists ())
3089     return;
3090 
3091   FOR_EACH_VEC_ELT (id->debug_stmts, i, stmt)
3092     copy_debug_stmt (stmt, id);
3093 
3094   id->debug_stmts.release ();
3095 }
3096 
3097 /* Make a copy of the body of SRC_FN so that it can be inserted inline in
3098    another function.  */
3099 
3100 static tree
copy_tree_body(copy_body_data * id)3101 copy_tree_body (copy_body_data *id)
3102 {
3103   tree fndecl = id->src_fn;
3104   tree body = DECL_SAVED_TREE (fndecl);
3105 
3106   walk_tree (&body, copy_tree_body_r, id, NULL);
3107 
3108   return body;
3109 }
3110 
3111 /* Make a copy of the body of FN so that it can be inserted inline in
3112    another function.  */
3113 
3114 static tree
copy_body(copy_body_data * id,basic_block entry_block_map,basic_block exit_block_map,basic_block new_entry)3115 copy_body (copy_body_data *id,
3116 	   basic_block entry_block_map, basic_block exit_block_map,
3117 	   basic_block new_entry)
3118 {
3119   tree fndecl = id->src_fn;
3120   tree body;
3121 
3122   /* If this body has a CFG, walk CFG and copy.  */
3123   gcc_assert (ENTRY_BLOCK_PTR_FOR_FN (DECL_STRUCT_FUNCTION (fndecl)));
3124   body = copy_cfg_body (id, entry_block_map, exit_block_map,
3125 			new_entry);
3126   copy_debug_stmts (id);
3127 
3128   return body;
3129 }
3130 
3131 /* Return true if VALUE is an ADDR_EXPR of an automatic variable
3132    defined in function FN, or of a data member thereof.  */
3133 
3134 static bool
self_inlining_addr_expr(tree value,tree fn)3135 self_inlining_addr_expr (tree value, tree fn)
3136 {
3137   tree var;
3138 
3139   if (TREE_CODE (value) != ADDR_EXPR)
3140     return false;
3141 
3142   var = get_base_address (TREE_OPERAND (value, 0));
3143 
3144   return var && auto_var_in_fn_p (var, fn);
3145 }
3146 
3147 /* Append to BB a debug annotation that binds VAR to VALUE, inheriting
3148    lexical block and line number information from base_stmt, if given,
3149    or from the last stmt of the block otherwise.  */
3150 
3151 static gimple *
insert_init_debug_bind(copy_body_data * id,basic_block bb,tree var,tree value,gimple * base_stmt)3152 insert_init_debug_bind (copy_body_data *id,
3153 			basic_block bb, tree var, tree value,
3154 			gimple *base_stmt)
3155 {
3156   gimple *note;
3157   gimple_stmt_iterator gsi;
3158   tree tracked_var;
3159 
3160   if (!gimple_in_ssa_p (id->src_cfun))
3161     return NULL;
3162 
3163   if (!opt_for_fn (id->dst_fn, flag_var_tracking_assignments))
3164     return NULL;
3165 
3166   tracked_var = target_for_debug_bind (var);
3167   if (!tracked_var)
3168     return NULL;
3169 
3170   if (bb)
3171     {
3172       gsi = gsi_last_bb (bb);
3173       if (!base_stmt && !gsi_end_p (gsi))
3174 	base_stmt = gsi_stmt (gsi);
3175     }
3176 
3177   note = gimple_build_debug_bind (tracked_var, unshare_expr (value), base_stmt);
3178 
3179   if (bb)
3180     {
3181       if (!gsi_end_p (gsi))
3182 	gsi_insert_after (&gsi, note, GSI_SAME_STMT);
3183       else
3184 	gsi_insert_before (&gsi, note, GSI_SAME_STMT);
3185     }
3186 
3187   return note;
3188 }
3189 
3190 static void
insert_init_stmt(copy_body_data * id,basic_block bb,gimple * init_stmt)3191 insert_init_stmt (copy_body_data *id, basic_block bb, gimple *init_stmt)
3192 {
3193   /* If VAR represents a zero-sized variable, it's possible that the
3194      assignment statement may result in no gimple statements.  */
3195   if (init_stmt)
3196     {
3197       gimple_stmt_iterator si = gsi_last_bb (bb);
3198 
3199       /* We can end up with init statements that store to a non-register
3200          from a rhs with a conversion.  Handle that here by forcing the
3201 	 rhs into a temporary.  gimple_regimplify_operands is not
3202 	 prepared to do this for us.  */
3203       if (!is_gimple_debug (init_stmt)
3204 	  && !is_gimple_reg (gimple_assign_lhs (init_stmt))
3205 	  && is_gimple_reg_type (TREE_TYPE (gimple_assign_lhs (init_stmt)))
3206 	  && gimple_assign_rhs_class (init_stmt) == GIMPLE_UNARY_RHS)
3207 	{
3208 	  tree rhs = build1 (gimple_assign_rhs_code (init_stmt),
3209 			     gimple_expr_type (init_stmt),
3210 			     gimple_assign_rhs1 (init_stmt));
3211 	  rhs = force_gimple_operand_gsi (&si, rhs, true, NULL_TREE, false,
3212 					  GSI_NEW_STMT);
3213 	  gimple_assign_set_rhs_code (init_stmt, TREE_CODE (rhs));
3214 	  gimple_assign_set_rhs1 (init_stmt, rhs);
3215 	}
3216       gsi_insert_after (&si, init_stmt, GSI_NEW_STMT);
3217       if (!is_gimple_debug (init_stmt))
3218 	{
3219 	  gimple_regimplify_operands (init_stmt, &si);
3220 
3221 	  tree def = gimple_assign_lhs (init_stmt);
3222 	  insert_init_debug_bind (id, bb, def, def, init_stmt);
3223 	}
3224     }
3225 }
3226 
3227 /* Initialize parameter P with VALUE.  If needed, produce init statement
3228    at the end of BB.  When BB is NULL, we return init statement to be
3229    output later.  */
3230 static gimple *
setup_one_parameter(copy_body_data * id,tree p,tree value,tree fn,basic_block bb,tree * vars)3231 setup_one_parameter (copy_body_data *id, tree p, tree value, tree fn,
3232 		     basic_block bb, tree *vars)
3233 {
3234   gimple *init_stmt = NULL;
3235   tree var;
3236   tree rhs = value;
3237   tree def = (gimple_in_ssa_p (cfun)
3238 	      ? ssa_default_def (id->src_cfun, p) : NULL);
3239 
3240   if (value
3241       && value != error_mark_node
3242       && !useless_type_conversion_p (TREE_TYPE (p), TREE_TYPE (value)))
3243     {
3244       /* If we can match up types by promotion/demotion do so.  */
3245       if (fold_convertible_p (TREE_TYPE (p), value))
3246 	rhs = fold_convert (TREE_TYPE (p), value);
3247       else
3248 	{
3249 	  /* ???  For valid programs we should not end up here.
3250 	     Still if we end up with truly mismatched types here, fall back
3251 	     to using a VIEW_CONVERT_EXPR or a literal zero to not leak invalid
3252 	     GIMPLE to the following passes.  */
3253 	  if (!is_gimple_reg_type (TREE_TYPE (value))
3254 	      || TYPE_SIZE (TREE_TYPE (p)) == TYPE_SIZE (TREE_TYPE (value)))
3255 	    rhs = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (p), value);
3256 	  else
3257 	    rhs = build_zero_cst (TREE_TYPE (p));
3258 	}
3259     }
3260 
3261   /* Make an equivalent VAR_DECL.  Note that we must NOT remap the type
3262      here since the type of this decl must be visible to the calling
3263      function.  */
3264   var = copy_decl_to_var (p, id);
3265 
3266   /* Declare this new variable.  */
3267   DECL_CHAIN (var) = *vars;
3268   *vars = var;
3269 
3270   /* Make gimplifier happy about this variable.  */
3271   DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
3272 
3273   /* If the parameter is never assigned to, has no SSA_NAMEs created,
3274      we would not need to create a new variable here at all, if it
3275      weren't for debug info.  Still, we can just use the argument
3276      value.  */
3277   if (TREE_READONLY (p)
3278       && !TREE_ADDRESSABLE (p)
3279       && value && !TREE_SIDE_EFFECTS (value)
3280       && !def)
3281     {
3282       /* We may produce non-gimple trees by adding NOPs or introduce
3283 	 invalid sharing when operand is not really constant.
3284 	 It is not big deal to prohibit constant propagation here as
3285 	 we will constant propagate in DOM1 pass anyway.  */
3286       if (is_gimple_min_invariant (value)
3287 	  && useless_type_conversion_p (TREE_TYPE (p),
3288 						 TREE_TYPE (value))
3289 	  /* We have to be very careful about ADDR_EXPR.  Make sure
3290 	     the base variable isn't a local variable of the inlined
3291 	     function, e.g., when doing recursive inlining, direct or
3292 	     mutually-recursive or whatever, which is why we don't
3293 	     just test whether fn == current_function_decl.  */
3294 	  && ! self_inlining_addr_expr (value, fn))
3295 	{
3296 	  insert_decl_map (id, p, value);
3297 	  insert_debug_decl_map (id, p, var);
3298 	  return insert_init_debug_bind (id, bb, var, value, NULL);
3299 	}
3300     }
3301 
3302   /* Register the VAR_DECL as the equivalent for the PARM_DECL;
3303      that way, when the PARM_DECL is encountered, it will be
3304      automatically replaced by the VAR_DECL.  */
3305   insert_decl_map (id, p, var);
3306 
3307   /* Even if P was TREE_READONLY, the new VAR should not be.
3308      In the original code, we would have constructed a
3309      temporary, and then the function body would have never
3310      changed the value of P.  However, now, we will be
3311      constructing VAR directly.  The constructor body may
3312      change its value multiple times as it is being
3313      constructed.  Therefore, it must not be TREE_READONLY;
3314      the back-end assumes that TREE_READONLY variable is
3315      assigned to only once.  */
3316   if (TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (p)))
3317     TREE_READONLY (var) = 0;
3318 
3319   /* If there is no setup required and we are in SSA, take the easy route
3320      replacing all SSA names representing the function parameter by the
3321      SSA name passed to function.
3322 
3323      We need to construct map for the variable anyway as it might be used
3324      in different SSA names when parameter is set in function.
3325 
3326      Do replacement at -O0 for const arguments replaced by constant.
3327      This is important for builtin_constant_p and other construct requiring
3328      constant argument to be visible in inlined function body.  */
3329   if (gimple_in_ssa_p (cfun) && rhs && def && is_gimple_reg (p)
3330       && (optimize
3331           || (TREE_READONLY (p)
3332 	      && is_gimple_min_invariant (rhs)))
3333       && (TREE_CODE (rhs) == SSA_NAME
3334 	  || is_gimple_min_invariant (rhs))
3335       && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def))
3336     {
3337       insert_decl_map (id, def, rhs);
3338       return insert_init_debug_bind (id, bb, var, rhs, NULL);
3339     }
3340 
3341   /* If the value of argument is never used, don't care about initializing
3342      it.  */
3343   if (optimize && gimple_in_ssa_p (cfun) && !def && is_gimple_reg (p))
3344     {
3345       gcc_assert (!value || !TREE_SIDE_EFFECTS (value));
3346       return insert_init_debug_bind (id, bb, var, rhs, NULL);
3347     }
3348 
3349   /* Initialize this VAR_DECL from the equivalent argument.  Convert
3350      the argument to the proper type in case it was promoted.  */
3351   if (value)
3352     {
3353       if (rhs == error_mark_node)
3354 	{
3355 	  insert_decl_map (id, p, var);
3356 	  return insert_init_debug_bind (id, bb, var, rhs, NULL);
3357 	}
3358 
3359       STRIP_USELESS_TYPE_CONVERSION (rhs);
3360 
3361       /* If we are in SSA form properly remap the default definition
3362          or assign to a dummy SSA name if the parameter is unused and
3363 	 we are not optimizing.  */
3364       if (gimple_in_ssa_p (cfun) && is_gimple_reg (p))
3365 	{
3366 	  if (def)
3367 	    {
3368 	      def = remap_ssa_name (def, id);
3369 	      init_stmt = gimple_build_assign (def, rhs);
3370 	      SSA_NAME_IS_DEFAULT_DEF (def) = 0;
3371 	      set_ssa_default_def (cfun, var, NULL);
3372 	    }
3373 	  else if (!optimize)
3374 	    {
3375 	      def = make_ssa_name (var);
3376 	      init_stmt = gimple_build_assign (def, rhs);
3377 	    }
3378 	}
3379       else
3380         init_stmt = gimple_build_assign (var, rhs);
3381 
3382       if (bb && init_stmt)
3383         insert_init_stmt (id, bb, init_stmt);
3384     }
3385   return init_stmt;
3386 }
3387 
3388 /* Generate code to initialize the parameters of the function at the
3389    top of the stack in ID from the GIMPLE_CALL STMT.  */
3390 
3391 static void
initialize_inlined_parameters(copy_body_data * id,gimple * stmt,tree fn,basic_block bb)3392 initialize_inlined_parameters (copy_body_data *id, gimple *stmt,
3393 			       tree fn, basic_block bb)
3394 {
3395   tree parms;
3396   size_t i;
3397   tree p;
3398   tree vars = NULL_TREE;
3399   tree static_chain = gimple_call_chain (stmt);
3400 
3401   /* Figure out what the parameters are.  */
3402   parms = DECL_ARGUMENTS (fn);
3403 
3404   /* Loop through the parameter declarations, replacing each with an
3405      equivalent VAR_DECL, appropriately initialized.  */
3406   for (p = parms, i = 0; p; p = DECL_CHAIN (p), i++)
3407     {
3408       tree val;
3409       val = i < gimple_call_num_args (stmt) ? gimple_call_arg (stmt, i) : NULL;
3410       setup_one_parameter (id, p, val, fn, bb, &vars);
3411     }
3412   /* After remapping parameters remap their types.  This has to be done
3413      in a second loop over all parameters to appropriately remap
3414      variable sized arrays when the size is specified in a
3415      parameter following the array.  */
3416   for (p = parms, i = 0; p; p = DECL_CHAIN (p), i++)
3417     {
3418       tree *varp = id->decl_map->get (p);
3419       if (varp && VAR_P (*varp))
3420 	{
3421 	  tree def = (gimple_in_ssa_p (cfun) && is_gimple_reg (p)
3422 		      ? ssa_default_def (id->src_cfun, p) : NULL);
3423 	  tree var = *varp;
3424 	  TREE_TYPE (var) = remap_type (TREE_TYPE (var), id);
3425 	  /* Also remap the default definition if it was remapped
3426 	     to the default definition of the parameter replacement
3427 	     by the parameter setup.  */
3428 	  if (def)
3429 	    {
3430 	      tree *defp = id->decl_map->get (def);
3431 	      if (defp
3432 		  && TREE_CODE (*defp) == SSA_NAME
3433 		  && SSA_NAME_VAR (*defp) == var)
3434 		TREE_TYPE (*defp) = TREE_TYPE (var);
3435 	    }
3436 	}
3437     }
3438 
3439   /* Initialize the static chain.  */
3440   p = DECL_STRUCT_FUNCTION (fn)->static_chain_decl;
3441   gcc_assert (fn != current_function_decl);
3442   if (p)
3443     {
3444       /* No static chain?  Seems like a bug in tree-nested.c.  */
3445       gcc_assert (static_chain);
3446 
3447       setup_one_parameter (id, p, static_chain, fn, bb, &vars);
3448     }
3449 
3450   declare_inline_vars (id->block, vars);
3451 }
3452 
3453 
3454 /* Declare a return variable to replace the RESULT_DECL for the
3455    function we are calling.  An appropriate DECL_STMT is returned.
3456    The USE_STMT is filled to contain a use of the declaration to
3457    indicate the return value of the function.
3458 
3459    RETURN_SLOT, if non-null is place where to store the result.  It
3460    is set only for CALL_EXPR_RETURN_SLOT_OPT.  MODIFY_DEST, if non-null,
3461    was the LHS of the MODIFY_EXPR to which this call is the RHS.
3462 
3463    The return value is a (possibly null) value that holds the result
3464    as seen by the caller.  */
3465 
3466 static tree
declare_return_variable(copy_body_data * id,tree return_slot,tree modify_dest,basic_block entry_bb)3467 declare_return_variable (copy_body_data *id, tree return_slot, tree modify_dest,
3468 			 basic_block entry_bb)
3469 {
3470   tree callee = id->src_fn;
3471   tree result = DECL_RESULT (callee);
3472   tree callee_type = TREE_TYPE (result);
3473   tree caller_type;
3474   tree var, use;
3475 
3476   /* Handle type-mismatches in the function declaration return type
3477      vs. the call expression.  */
3478   if (modify_dest)
3479     caller_type = TREE_TYPE (modify_dest);
3480   else
3481     caller_type = TREE_TYPE (TREE_TYPE (callee));
3482 
3483   /* We don't need to do anything for functions that don't return anything.  */
3484   if (VOID_TYPE_P (callee_type))
3485     return NULL_TREE;
3486 
3487   /* If there was a return slot, then the return value is the
3488      dereferenced address of that object.  */
3489   if (return_slot)
3490     {
3491       /* The front end shouldn't have used both return_slot and
3492 	 a modify expression.  */
3493       gcc_assert (!modify_dest);
3494       if (DECL_BY_REFERENCE (result))
3495 	{
3496 	  tree return_slot_addr = build_fold_addr_expr (return_slot);
3497 	  STRIP_USELESS_TYPE_CONVERSION (return_slot_addr);
3498 
3499 	  /* We are going to construct *&return_slot and we can't do that
3500 	     for variables believed to be not addressable.
3501 
3502 	     FIXME: This check possibly can match, because values returned
3503 	     via return slot optimization are not believed to have address
3504 	     taken by alias analysis.  */
3505 	  gcc_assert (TREE_CODE (return_slot) != SSA_NAME);
3506 	  var = return_slot_addr;
3507 	}
3508       else
3509 	{
3510 	  var = return_slot;
3511 	  gcc_assert (TREE_CODE (var) != SSA_NAME);
3512 	  if (TREE_ADDRESSABLE (result))
3513 	    mark_addressable (var);
3514 	}
3515       if ((TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE
3516            || TREE_CODE (TREE_TYPE (result)) == VECTOR_TYPE)
3517 	  && !DECL_GIMPLE_REG_P (result)
3518 	  && DECL_P (var))
3519 	DECL_GIMPLE_REG_P (var) = 0;
3520       use = NULL;
3521       goto done;
3522     }
3523 
3524   /* All types requiring non-trivial constructors should have been handled.  */
3525   gcc_assert (!TREE_ADDRESSABLE (callee_type));
3526 
3527   /* Attempt to avoid creating a new temporary variable.  */
3528   if (modify_dest
3529       && TREE_CODE (modify_dest) != SSA_NAME)
3530     {
3531       bool use_it = false;
3532 
3533       /* We can't use MODIFY_DEST if there's type promotion involved.  */
3534       if (!useless_type_conversion_p (callee_type, caller_type))
3535 	use_it = false;
3536 
3537       /* ??? If we're assigning to a variable sized type, then we must
3538 	 reuse the destination variable, because we've no good way to
3539 	 create variable sized temporaries at this point.  */
3540       else if (TREE_CODE (TYPE_SIZE_UNIT (caller_type)) != INTEGER_CST)
3541 	use_it = true;
3542 
3543       /* If the callee cannot possibly modify MODIFY_DEST, then we can
3544 	 reuse it as the result of the call directly.  Don't do this if
3545 	 it would promote MODIFY_DEST to addressable.  */
3546       else if (TREE_ADDRESSABLE (result))
3547 	use_it = false;
3548       else
3549 	{
3550 	  tree base_m = get_base_address (modify_dest);
3551 
3552 	  /* If the base isn't a decl, then it's a pointer, and we don't
3553 	     know where that's going to go.  */
3554 	  if (!DECL_P (base_m))
3555 	    use_it = false;
3556 	  else if (is_global_var (base_m))
3557 	    use_it = false;
3558 	  else if ((TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE
3559 		    || TREE_CODE (TREE_TYPE (result)) == VECTOR_TYPE)
3560 		   && !DECL_GIMPLE_REG_P (result)
3561 		   && DECL_GIMPLE_REG_P (base_m))
3562 	    use_it = false;
3563 	  else if (!TREE_ADDRESSABLE (base_m))
3564 	    use_it = true;
3565 	}
3566 
3567       if (use_it)
3568 	{
3569 	  var = modify_dest;
3570 	  use = NULL;
3571 	  goto done;
3572 	}
3573     }
3574 
3575   gcc_assert (TREE_CODE (TYPE_SIZE_UNIT (callee_type)) == INTEGER_CST);
3576 
3577   var = copy_result_decl_to_var (result, id);
3578   DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
3579 
3580   /* Do not have the rest of GCC warn about this variable as it should
3581      not be visible to the user.  */
3582   TREE_NO_WARNING (var) = 1;
3583 
3584   declare_inline_vars (id->block, var);
3585 
3586   /* Build the use expr.  If the return type of the function was
3587      promoted, convert it back to the expected type.  */
3588   use = var;
3589   if (!useless_type_conversion_p (caller_type, TREE_TYPE (var)))
3590     {
3591       /* If we can match up types by promotion/demotion do so.  */
3592       if (fold_convertible_p (caller_type, var))
3593 	use = fold_convert (caller_type, var);
3594       else
3595 	{
3596 	  /* ???  For valid programs we should not end up here.
3597 	     Still if we end up with truly mismatched types here, fall back
3598 	     to using a MEM_REF to not leak invalid GIMPLE to the following
3599 	     passes.  */
3600 	  /* Prevent var from being written into SSA form.  */
3601 	  if (TREE_CODE (TREE_TYPE (var)) == VECTOR_TYPE
3602 	      || TREE_CODE (TREE_TYPE (var)) == COMPLEX_TYPE)
3603 	    DECL_GIMPLE_REG_P (var) = false;
3604 	  else if (is_gimple_reg_type (TREE_TYPE (var)))
3605 	    TREE_ADDRESSABLE (var) = true;
3606 	  use = fold_build2 (MEM_REF, caller_type,
3607 			     build_fold_addr_expr (var),
3608 			     build_int_cst (ptr_type_node, 0));
3609 	}
3610     }
3611 
3612   STRIP_USELESS_TYPE_CONVERSION (use);
3613 
3614   if (DECL_BY_REFERENCE (result))
3615     {
3616       TREE_ADDRESSABLE (var) = 1;
3617       var = build_fold_addr_expr (var);
3618     }
3619 
3620  done:
3621   /* Register the VAR_DECL as the equivalent for the RESULT_DECL; that
3622      way, when the RESULT_DECL is encountered, it will be
3623      automatically replaced by the VAR_DECL.
3624 
3625      When returning by reference, ensure that RESULT_DECL remaps to
3626      gimple_val.  */
3627   if (DECL_BY_REFERENCE (result)
3628       && !is_gimple_val (var))
3629     {
3630       tree temp = create_tmp_var (TREE_TYPE (result), "retvalptr");
3631       insert_decl_map (id, result, temp);
3632       /* When RESULT_DECL is in SSA form, we need to remap and initialize
3633 	 it's default_def SSA_NAME.  */
3634       if (gimple_in_ssa_p (id->src_cfun)
3635 	  && is_gimple_reg (result))
3636 	{
3637 	  temp = make_ssa_name (temp);
3638 	  insert_decl_map (id, ssa_default_def (id->src_cfun, result), temp);
3639 	}
3640       insert_init_stmt (id, entry_bb, gimple_build_assign (temp, var));
3641     }
3642   else
3643     insert_decl_map (id, result, var);
3644 
3645   /* Remember this so we can ignore it in remap_decls.  */
3646   id->retvar = var;
3647   return use;
3648 }
3649 
3650 /* Determine if the function can be copied.  If so return NULL.  If
3651    not return a string describng the reason for failure.  */
3652 
3653 const char *
copy_forbidden(struct function * fun)3654 copy_forbidden (struct function *fun)
3655 {
3656   const char *reason = fun->cannot_be_copied_reason;
3657 
3658   /* Only examine the function once.  */
3659   if (fun->cannot_be_copied_set)
3660     return reason;
3661 
3662   /* We cannot copy a function that receives a non-local goto
3663      because we cannot remap the destination label used in the
3664      function that is performing the non-local goto.  */
3665   /* ??? Actually, this should be possible, if we work at it.
3666      No doubt there's just a handful of places that simply
3667      assume it doesn't happen and don't substitute properly.  */
3668   if (fun->has_nonlocal_label)
3669     {
3670       reason = G_("function %q+F can never be copied "
3671 		  "because it receives a non-local goto");
3672       goto fail;
3673     }
3674 
3675   if (fun->has_forced_label_in_static)
3676     {
3677       reason = G_("function %q+F can never be copied because it saves "
3678 		  "address of local label in a static variable");
3679       goto fail;
3680     }
3681 
3682  fail:
3683   fun->cannot_be_copied_reason = reason;
3684   fun->cannot_be_copied_set = true;
3685   return reason;
3686 }
3687 
3688 
3689 static const char *inline_forbidden_reason;
3690 
3691 /* A callback for walk_gimple_seq to handle statements.  Returns non-null
3692    iff a function cannot be inlined.  Also sets the reason why. */
3693 
3694 static tree
inline_forbidden_p_stmt(gimple_stmt_iterator * gsi,bool * handled_ops_p,struct walk_stmt_info * wip)3695 inline_forbidden_p_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
3696 			 struct walk_stmt_info *wip)
3697 {
3698   tree fn = (tree) wip->info;
3699   tree t;
3700   gimple *stmt = gsi_stmt (*gsi);
3701 
3702   switch (gimple_code (stmt))
3703     {
3704     case GIMPLE_CALL:
3705       /* Refuse to inline alloca call unless user explicitly forced so as
3706 	 this may change program's memory overhead drastically when the
3707 	 function using alloca is called in loop.  In GCC present in
3708 	 SPEC2000 inlining into schedule_block cause it to require 2GB of
3709 	 RAM instead of 256MB.  Don't do so for alloca calls emitted for
3710 	 VLA objects as those can't cause unbounded growth (they're always
3711 	 wrapped inside stack_save/stack_restore regions.  */
3712       if (gimple_maybe_alloca_call_p (stmt)
3713 	  && !gimple_call_alloca_for_var_p (as_a <gcall *> (stmt))
3714 	  && !lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn)))
3715 	{
3716 	  inline_forbidden_reason
3717 	    = G_("function %q+F can never be inlined because it uses "
3718 		 "alloca (override using the always_inline attribute)");
3719 	  *handled_ops_p = true;
3720 	  return fn;
3721 	}
3722 
3723       t = gimple_call_fndecl (stmt);
3724       if (t == NULL_TREE)
3725 	break;
3726 
3727       /* We cannot inline functions that call setjmp.  */
3728       if (setjmp_call_p (t))
3729 	{
3730 	  inline_forbidden_reason
3731 	    = G_("function %q+F can never be inlined because it uses setjmp");
3732 	  *handled_ops_p = true;
3733 	  return t;
3734 	}
3735 
3736       if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL)
3737 	switch (DECL_FUNCTION_CODE (t))
3738 	  {
3739 	    /* We cannot inline functions that take a variable number of
3740 	       arguments.  */
3741 	  case BUILT_IN_VA_START:
3742 	  case BUILT_IN_NEXT_ARG:
3743 	  case BUILT_IN_VA_END:
3744 	    inline_forbidden_reason
3745 	      = G_("function %q+F can never be inlined because it "
3746 		   "uses variable argument lists");
3747 	    *handled_ops_p = true;
3748 	    return t;
3749 
3750 	  case BUILT_IN_LONGJMP:
3751 	    /* We can't inline functions that call __builtin_longjmp at
3752 	       all.  The non-local goto machinery really requires the
3753 	       destination be in a different function.  If we allow the
3754 	       function calling __builtin_longjmp to be inlined into the
3755 	       function calling __builtin_setjmp, Things will Go Awry.  */
3756 	    inline_forbidden_reason
3757 	      = G_("function %q+F can never be inlined because "
3758 		   "it uses setjmp-longjmp exception handling");
3759 	    *handled_ops_p = true;
3760 	    return t;
3761 
3762 	  case BUILT_IN_NONLOCAL_GOTO:
3763 	    /* Similarly.  */
3764 	    inline_forbidden_reason
3765 	      = G_("function %q+F can never be inlined because "
3766 		   "it uses non-local goto");
3767 	    *handled_ops_p = true;
3768 	    return t;
3769 
3770 	  case BUILT_IN_RETURN:
3771 	  case BUILT_IN_APPLY_ARGS:
3772 	    /* If a __builtin_apply_args caller would be inlined,
3773 	       it would be saving arguments of the function it has
3774 	       been inlined into.  Similarly __builtin_return would
3775 	       return from the function the inline has been inlined into.  */
3776 	    inline_forbidden_reason
3777 	      = G_("function %q+F can never be inlined because "
3778 		   "it uses %<__builtin_return%> or %<__builtin_apply_args%>");
3779 	    *handled_ops_p = true;
3780 	    return t;
3781 
3782 	  default:
3783 	    break;
3784 	  }
3785       break;
3786 
3787     case GIMPLE_GOTO:
3788       t = gimple_goto_dest (stmt);
3789 
3790       /* We will not inline a function which uses computed goto.  The
3791 	 addresses of its local labels, which may be tucked into
3792 	 global storage, are of course not constant across
3793 	 instantiations, which causes unexpected behavior.  */
3794       if (TREE_CODE (t) != LABEL_DECL)
3795 	{
3796 	  inline_forbidden_reason
3797 	    = G_("function %q+F can never be inlined "
3798 		 "because it contains a computed goto");
3799 	  *handled_ops_p = true;
3800 	  return t;
3801 	}
3802       break;
3803 
3804     default:
3805       break;
3806     }
3807 
3808   *handled_ops_p = false;
3809   return NULL_TREE;
3810 }
3811 
3812 /* Return true if FNDECL is a function that cannot be inlined into
3813    another one.  */
3814 
3815 static bool
inline_forbidden_p(tree fndecl)3816 inline_forbidden_p (tree fndecl)
3817 {
3818   struct function *fun = DECL_STRUCT_FUNCTION (fndecl);
3819   struct walk_stmt_info wi;
3820   basic_block bb;
3821   bool forbidden_p = false;
3822 
3823   /* First check for shared reasons not to copy the code.  */
3824   inline_forbidden_reason = copy_forbidden (fun);
3825   if (inline_forbidden_reason != NULL)
3826     return true;
3827 
3828   /* Next, walk the statements of the function looking for
3829      constraucts we can't handle, or are non-optimal for inlining.  */
3830   hash_set<tree> visited_nodes;
3831   memset (&wi, 0, sizeof (wi));
3832   wi.info = (void *) fndecl;
3833   wi.pset = &visited_nodes;
3834 
3835   FOR_EACH_BB_FN (bb, fun)
3836     {
3837       gimple *ret;
3838       gimple_seq seq = bb_seq (bb);
3839       ret = walk_gimple_seq (seq, inline_forbidden_p_stmt, NULL, &wi);
3840       forbidden_p = (ret != NULL);
3841       if (forbidden_p)
3842 	break;
3843     }
3844 
3845   return forbidden_p;
3846 }
3847 
3848 /* Return false if the function FNDECL cannot be inlined on account of its
3849    attributes, true otherwise.  */
3850 static bool
function_attribute_inlinable_p(const_tree fndecl)3851 function_attribute_inlinable_p (const_tree fndecl)
3852 {
3853   if (targetm.attribute_table)
3854     {
3855       const_tree a;
3856 
3857       for (a = DECL_ATTRIBUTES (fndecl); a; a = TREE_CHAIN (a))
3858 	{
3859 	  const_tree name = TREE_PURPOSE (a);
3860 	  int i;
3861 
3862 	  for (i = 0; targetm.attribute_table[i].name != NULL; i++)
3863 	    if (is_attribute_p (targetm.attribute_table[i].name, name))
3864 	      return targetm.function_attribute_inlinable_p (fndecl);
3865 	}
3866     }
3867 
3868   return true;
3869 }
3870 
3871 /* Returns nonzero if FN is a function that does not have any
3872    fundamental inline blocking properties.  */
3873 
3874 bool
tree_inlinable_function_p(tree fn)3875 tree_inlinable_function_p (tree fn)
3876 {
3877   bool inlinable = true;
3878   bool do_warning;
3879   tree always_inline;
3880 
3881   /* If we've already decided this function shouldn't be inlined,
3882      there's no need to check again.  */
3883   if (DECL_UNINLINABLE (fn))
3884     return false;
3885 
3886   /* We only warn for functions declared `inline' by the user.  */
3887   do_warning = (warn_inline
3888 		&& DECL_DECLARED_INLINE_P (fn)
3889 		&& !DECL_NO_INLINE_WARNING_P (fn)
3890 		&& !DECL_IN_SYSTEM_HEADER (fn));
3891 
3892   always_inline = lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn));
3893 
3894   if (flag_no_inline
3895       && always_inline == NULL)
3896     {
3897       if (do_warning)
3898 	warning (OPT_Winline, "function %q+F can never be inlined because it "
3899 		 "is suppressed using %<-fno-inline%>", fn);
3900       inlinable = false;
3901     }
3902 
3903   else if (!function_attribute_inlinable_p (fn))
3904     {
3905       if (do_warning)
3906         warning (OPT_Winline, "function %q+F can never be inlined because it "
3907                  "uses attributes conflicting with inlining", fn);
3908       inlinable = false;
3909     }
3910 
3911   else if (inline_forbidden_p (fn))
3912     {
3913       /* See if we should warn about uninlinable functions.  Previously,
3914 	 some of these warnings would be issued while trying to expand
3915 	 the function inline, but that would cause multiple warnings
3916 	 about functions that would for example call alloca.  But since
3917 	 this a property of the function, just one warning is enough.
3918 	 As a bonus we can now give more details about the reason why a
3919 	 function is not inlinable.  */
3920       if (always_inline)
3921 	error (inline_forbidden_reason, fn);
3922       else if (do_warning)
3923 	warning (OPT_Winline, inline_forbidden_reason, fn);
3924 
3925       inlinable = false;
3926     }
3927 
3928   /* Squirrel away the result so that we don't have to check again.  */
3929   DECL_UNINLINABLE (fn) = !inlinable;
3930 
3931   return inlinable;
3932 }
3933 
3934 /* Estimate the cost of a memory move of type TYPE.  Use machine dependent
3935    word size and take possible memcpy call into account and return
3936    cost based on whether optimizing for size or speed according to SPEED_P.  */
3937 
3938 int
estimate_move_cost(tree type,bool ARG_UNUSED (speed_p))3939 estimate_move_cost (tree type, bool ARG_UNUSED (speed_p))
3940 {
3941   HOST_WIDE_INT size;
3942 
3943   gcc_assert (!VOID_TYPE_P (type));
3944 
3945   if (TREE_CODE (type) == VECTOR_TYPE)
3946     {
3947       scalar_mode inner = SCALAR_TYPE_MODE (TREE_TYPE (type));
3948       machine_mode simd = targetm.vectorize.preferred_simd_mode (inner);
3949       int orig_mode_size
3950 	= estimated_poly_value (GET_MODE_SIZE (TYPE_MODE (type)));
3951       int simd_mode_size = estimated_poly_value (GET_MODE_SIZE (simd));
3952       return ((orig_mode_size + simd_mode_size - 1)
3953 	      / simd_mode_size);
3954     }
3955 
3956   size = int_size_in_bytes (type);
3957 
3958   if (size < 0 || size > MOVE_MAX_PIECES * MOVE_RATIO (speed_p))
3959     /* Cost of a memcpy call, 3 arguments and the call.  */
3960     return 4;
3961   else
3962     return ((size + MOVE_MAX_PIECES - 1) / MOVE_MAX_PIECES);
3963 }
3964 
3965 /* Returns cost of operation CODE, according to WEIGHTS  */
3966 
3967 static int
estimate_operator_cost(enum tree_code code,eni_weights * weights,tree op1 ATTRIBUTE_UNUSED,tree op2)3968 estimate_operator_cost (enum tree_code code, eni_weights *weights,
3969 			tree op1 ATTRIBUTE_UNUSED, tree op2)
3970 {
3971   switch (code)
3972     {
3973     /* These are "free" conversions, or their presumed cost
3974        is folded into other operations.  */
3975     case RANGE_EXPR:
3976     CASE_CONVERT:
3977     case COMPLEX_EXPR:
3978     case PAREN_EXPR:
3979     case VIEW_CONVERT_EXPR:
3980       return 0;
3981 
3982     /* Assign cost of 1 to usual operations.
3983        ??? We may consider mapping RTL costs to this.  */
3984     case COND_EXPR:
3985     case VEC_COND_EXPR:
3986     case VEC_PERM_EXPR:
3987 
3988     case PLUS_EXPR:
3989     case POINTER_PLUS_EXPR:
3990     case POINTER_DIFF_EXPR:
3991     case MINUS_EXPR:
3992     case MULT_EXPR:
3993     case MULT_HIGHPART_EXPR:
3994 
3995     case ADDR_SPACE_CONVERT_EXPR:
3996     case FIXED_CONVERT_EXPR:
3997     case FIX_TRUNC_EXPR:
3998 
3999     case NEGATE_EXPR:
4000     case FLOAT_EXPR:
4001     case MIN_EXPR:
4002     case MAX_EXPR:
4003     case ABS_EXPR:
4004     case ABSU_EXPR:
4005 
4006     case LSHIFT_EXPR:
4007     case RSHIFT_EXPR:
4008     case LROTATE_EXPR:
4009     case RROTATE_EXPR:
4010 
4011     case BIT_IOR_EXPR:
4012     case BIT_XOR_EXPR:
4013     case BIT_AND_EXPR:
4014     case BIT_NOT_EXPR:
4015 
4016     case TRUTH_ANDIF_EXPR:
4017     case TRUTH_ORIF_EXPR:
4018     case TRUTH_AND_EXPR:
4019     case TRUTH_OR_EXPR:
4020     case TRUTH_XOR_EXPR:
4021     case TRUTH_NOT_EXPR:
4022 
4023     case LT_EXPR:
4024     case LE_EXPR:
4025     case GT_EXPR:
4026     case GE_EXPR:
4027     case EQ_EXPR:
4028     case NE_EXPR:
4029     case ORDERED_EXPR:
4030     case UNORDERED_EXPR:
4031 
4032     case UNLT_EXPR:
4033     case UNLE_EXPR:
4034     case UNGT_EXPR:
4035     case UNGE_EXPR:
4036     case UNEQ_EXPR:
4037     case LTGT_EXPR:
4038 
4039     case CONJ_EXPR:
4040 
4041     case PREDECREMENT_EXPR:
4042     case PREINCREMENT_EXPR:
4043     case POSTDECREMENT_EXPR:
4044     case POSTINCREMENT_EXPR:
4045 
4046     case REALIGN_LOAD_EXPR:
4047 
4048     case WIDEN_SUM_EXPR:
4049     case WIDEN_MULT_EXPR:
4050     case DOT_PROD_EXPR:
4051     case SAD_EXPR:
4052     case WIDEN_MULT_PLUS_EXPR:
4053     case WIDEN_MULT_MINUS_EXPR:
4054     case WIDEN_LSHIFT_EXPR:
4055 
4056     case VEC_WIDEN_MULT_HI_EXPR:
4057     case VEC_WIDEN_MULT_LO_EXPR:
4058     case VEC_WIDEN_MULT_EVEN_EXPR:
4059     case VEC_WIDEN_MULT_ODD_EXPR:
4060     case VEC_UNPACK_HI_EXPR:
4061     case VEC_UNPACK_LO_EXPR:
4062     case VEC_UNPACK_FLOAT_HI_EXPR:
4063     case VEC_UNPACK_FLOAT_LO_EXPR:
4064     case VEC_UNPACK_FIX_TRUNC_HI_EXPR:
4065     case VEC_UNPACK_FIX_TRUNC_LO_EXPR:
4066     case VEC_PACK_TRUNC_EXPR:
4067     case VEC_PACK_SAT_EXPR:
4068     case VEC_PACK_FIX_TRUNC_EXPR:
4069     case VEC_PACK_FLOAT_EXPR:
4070     case VEC_WIDEN_LSHIFT_HI_EXPR:
4071     case VEC_WIDEN_LSHIFT_LO_EXPR:
4072     case VEC_DUPLICATE_EXPR:
4073     case VEC_SERIES_EXPR:
4074 
4075       return 1;
4076 
4077     /* Few special cases of expensive operations.  This is useful
4078        to avoid inlining on functions having too many of these.  */
4079     case TRUNC_DIV_EXPR:
4080     case CEIL_DIV_EXPR:
4081     case FLOOR_DIV_EXPR:
4082     case ROUND_DIV_EXPR:
4083     case EXACT_DIV_EXPR:
4084     case TRUNC_MOD_EXPR:
4085     case CEIL_MOD_EXPR:
4086     case FLOOR_MOD_EXPR:
4087     case ROUND_MOD_EXPR:
4088     case RDIV_EXPR:
4089       if (TREE_CODE (op2) != INTEGER_CST)
4090         return weights->div_mod_cost;
4091       return 1;
4092 
4093     /* Bit-field insertion needs several shift and mask operations.  */
4094     case BIT_INSERT_EXPR:
4095       return 3;
4096 
4097     default:
4098       /* We expect a copy assignment with no operator.  */
4099       gcc_assert (get_gimple_rhs_class (code) == GIMPLE_SINGLE_RHS);
4100       return 0;
4101     }
4102 }
4103 
4104 
4105 /* Estimate number of instructions that will be created by expanding
4106    the statements in the statement sequence STMTS.
4107    WEIGHTS contains weights attributed to various constructs.  */
4108 
4109 int
estimate_num_insns_seq(gimple_seq stmts,eni_weights * weights)4110 estimate_num_insns_seq (gimple_seq stmts, eni_weights *weights)
4111 {
4112   int cost;
4113   gimple_stmt_iterator gsi;
4114 
4115   cost = 0;
4116   for (gsi = gsi_start (stmts); !gsi_end_p (gsi); gsi_next (&gsi))
4117     cost += estimate_num_insns (gsi_stmt (gsi), weights);
4118 
4119   return cost;
4120 }
4121 
4122 
4123 /* Estimate number of instructions that will be created by expanding STMT.
4124    WEIGHTS contains weights attributed to various constructs.  */
4125 
4126 int
estimate_num_insns(gimple * stmt,eni_weights * weights)4127 estimate_num_insns (gimple *stmt, eni_weights *weights)
4128 {
4129   unsigned cost, i;
4130   enum gimple_code code = gimple_code (stmt);
4131   tree lhs;
4132   tree rhs;
4133 
4134   switch (code)
4135     {
4136     case GIMPLE_ASSIGN:
4137       /* Try to estimate the cost of assignments.  We have three cases to
4138 	 deal with:
4139 	 1) Simple assignments to registers;
4140 	 2) Stores to things that must live in memory.  This includes
4141 	    "normal" stores to scalars, but also assignments of large
4142 	    structures, or constructors of big arrays;
4143 
4144 	 Let us look at the first two cases, assuming we have "a = b + C":
4145 	 <GIMPLE_ASSIGN <var_decl "a">
4146 	        <plus_expr <var_decl "b"> <constant C>>
4147 	 If "a" is a GIMPLE register, the assignment to it is free on almost
4148 	 any target, because "a" usually ends up in a real register.  Hence
4149 	 the only cost of this expression comes from the PLUS_EXPR, and we
4150 	 can ignore the GIMPLE_ASSIGN.
4151 	 If "a" is not a GIMPLE register, the assignment to "a" will most
4152 	 likely be a real store, so the cost of the GIMPLE_ASSIGN is the cost
4153 	 of moving something into "a", which we compute using the function
4154 	 estimate_move_cost.  */
4155       if (gimple_clobber_p (stmt))
4156 	return 0;	/* ={v} {CLOBBER} stmt expands to nothing.  */
4157 
4158       lhs = gimple_assign_lhs (stmt);
4159       rhs = gimple_assign_rhs1 (stmt);
4160 
4161       cost = 0;
4162 
4163       /* Account for the cost of moving to / from memory.  */
4164       if (gimple_store_p (stmt))
4165 	cost += estimate_move_cost (TREE_TYPE (lhs), weights->time_based);
4166       if (gimple_assign_load_p (stmt))
4167 	cost += estimate_move_cost (TREE_TYPE (rhs), weights->time_based);
4168 
4169       cost += estimate_operator_cost (gimple_assign_rhs_code (stmt), weights,
4170       				      gimple_assign_rhs1 (stmt),
4171 				      get_gimple_rhs_class (gimple_assign_rhs_code (stmt))
4172 				      == GIMPLE_BINARY_RHS
4173 				      ? gimple_assign_rhs2 (stmt) : NULL);
4174       break;
4175 
4176     case GIMPLE_COND:
4177       cost = 1 + estimate_operator_cost (gimple_cond_code (stmt), weights,
4178       				         gimple_op (stmt, 0),
4179 				         gimple_op (stmt, 1));
4180       break;
4181 
4182     case GIMPLE_SWITCH:
4183       {
4184 	gswitch *switch_stmt = as_a <gswitch *> (stmt);
4185 	/* Take into account cost of the switch + guess 2 conditional jumps for
4186 	   each case label.
4187 
4188 	   TODO: once the switch expansion logic is sufficiently separated, we can
4189 	   do better job on estimating cost of the switch.  */
4190 	if (weights->time_based)
4191 	  cost = floor_log2 (gimple_switch_num_labels (switch_stmt)) * 2;
4192 	else
4193 	  cost = gimple_switch_num_labels (switch_stmt) * 2;
4194       }
4195       break;
4196 
4197     case GIMPLE_CALL:
4198       {
4199 	tree decl;
4200 
4201 	if (gimple_call_internal_p (stmt))
4202 	  return 0;
4203 	else if ((decl = gimple_call_fndecl (stmt))
4204 		 && fndecl_built_in_p (decl))
4205 	  {
4206 	    /* Do not special case builtins where we see the body.
4207 	       This just confuse inliner.  */
4208 	    struct cgraph_node *node;
4209 	    if (!(node = cgraph_node::get (decl))
4210 		|| node->definition)
4211 	      ;
4212 	    /* For buitins that are likely expanded to nothing or
4213 	       inlined do not account operand costs.  */
4214 	    else if (is_simple_builtin (decl))
4215 	      return 0;
4216 	    else if (is_inexpensive_builtin (decl))
4217 	      return weights->target_builtin_call_cost;
4218 	    else if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
4219 	      {
4220 		/* We canonicalize x * x to pow (x, 2.0) with -ffast-math, so
4221 		   specialize the cheap expansion we do here.
4222 		   ???  This asks for a more general solution.  */
4223 		switch (DECL_FUNCTION_CODE (decl))
4224 		  {
4225 		    case BUILT_IN_POW:
4226 		    case BUILT_IN_POWF:
4227 		    case BUILT_IN_POWL:
4228 		      if (TREE_CODE (gimple_call_arg (stmt, 1)) == REAL_CST
4229 			  && (real_equal
4230 			      (&TREE_REAL_CST (gimple_call_arg (stmt, 1)),
4231 			       &dconst2)))
4232 			return estimate_operator_cost
4233 			    (MULT_EXPR, weights, gimple_call_arg (stmt, 0),
4234 			     gimple_call_arg (stmt, 0));
4235 		      break;
4236 
4237 		    default:
4238 		      break;
4239 		  }
4240 	      }
4241 	  }
4242 
4243 	cost = decl ? weights->call_cost : weights->indirect_call_cost;
4244 	if (gimple_call_lhs (stmt))
4245 	  cost += estimate_move_cost (TREE_TYPE (gimple_call_lhs (stmt)),
4246 				      weights->time_based);
4247 	for (i = 0; i < gimple_call_num_args (stmt); i++)
4248 	  {
4249 	    tree arg = gimple_call_arg (stmt, i);
4250 	    cost += estimate_move_cost (TREE_TYPE (arg),
4251 					weights->time_based);
4252 	  }
4253 	break;
4254       }
4255 
4256     case GIMPLE_RETURN:
4257       return weights->return_cost;
4258 
4259     case GIMPLE_GOTO:
4260     case GIMPLE_LABEL:
4261     case GIMPLE_NOP:
4262     case GIMPLE_PHI:
4263     case GIMPLE_PREDICT:
4264     case GIMPLE_DEBUG:
4265       return 0;
4266 
4267     case GIMPLE_ASM:
4268       {
4269 	int count = asm_str_count (gimple_asm_string (as_a <gasm *> (stmt)));
4270 	/* 1000 means infinity. This avoids overflows later
4271 	   with very long asm statements.  */
4272 	if (count > 1000)
4273 	  count = 1000;
4274 	/* If this asm is asm inline, count anything as minimum size.  */
4275 	if (gimple_asm_inline_p (as_a <gasm *> (stmt)))
4276 	  count = MIN (1, count);
4277 	return MAX (1, count);
4278       }
4279 
4280     case GIMPLE_RESX:
4281       /* This is either going to be an external function call with one
4282 	 argument, or two register copy statements plus a goto.  */
4283       return 2;
4284 
4285     case GIMPLE_EH_DISPATCH:
4286       /* ??? This is going to turn into a switch statement.  Ideally
4287 	 we'd have a look at the eh region and estimate the number of
4288 	 edges involved.  */
4289       return 10;
4290 
4291     case GIMPLE_BIND:
4292       return estimate_num_insns_seq (
4293 	       gimple_bind_body (as_a <gbind *> (stmt)),
4294 	       weights);
4295 
4296     case GIMPLE_EH_FILTER:
4297       return estimate_num_insns_seq (gimple_eh_filter_failure (stmt), weights);
4298 
4299     case GIMPLE_CATCH:
4300       return estimate_num_insns_seq (gimple_catch_handler (
4301 				       as_a <gcatch *> (stmt)),
4302 				     weights);
4303 
4304     case GIMPLE_TRY:
4305       return (estimate_num_insns_seq (gimple_try_eval (stmt), weights)
4306               + estimate_num_insns_seq (gimple_try_cleanup (stmt), weights));
4307 
4308     /* OMP directives are generally very expensive.  */
4309 
4310     case GIMPLE_OMP_RETURN:
4311     case GIMPLE_OMP_SECTIONS_SWITCH:
4312     case GIMPLE_OMP_ATOMIC_STORE:
4313     case GIMPLE_OMP_CONTINUE:
4314       /* ...except these, which are cheap.  */
4315       return 0;
4316 
4317     case GIMPLE_OMP_ATOMIC_LOAD:
4318       return weights->omp_cost;
4319 
4320     case GIMPLE_OMP_FOR:
4321       return (weights->omp_cost
4322               + estimate_num_insns_seq (gimple_omp_body (stmt), weights)
4323               + estimate_num_insns_seq (gimple_omp_for_pre_body (stmt), weights));
4324 
4325     case GIMPLE_OMP_PARALLEL:
4326     case GIMPLE_OMP_TASK:
4327     case GIMPLE_OMP_CRITICAL:
4328     case GIMPLE_OMP_MASTER:
4329     case GIMPLE_OMP_TASKGROUP:
4330     case GIMPLE_OMP_ORDERED:
4331     case GIMPLE_OMP_SECTION:
4332     case GIMPLE_OMP_SECTIONS:
4333     case GIMPLE_OMP_SINGLE:
4334     case GIMPLE_OMP_TARGET:
4335     case GIMPLE_OMP_TEAMS:
4336       return (weights->omp_cost
4337               + estimate_num_insns_seq (gimple_omp_body (stmt), weights));
4338 
4339     case GIMPLE_TRANSACTION:
4340       return (weights->tm_cost
4341 	      + estimate_num_insns_seq (gimple_transaction_body (
4342 					  as_a <gtransaction *> (stmt)),
4343 					weights));
4344 
4345     default:
4346       gcc_unreachable ();
4347     }
4348 
4349   return cost;
4350 }
4351 
4352 /* Estimate number of instructions that will be created by expanding
4353    function FNDECL.  WEIGHTS contains weights attributed to various
4354    constructs.  */
4355 
4356 int
estimate_num_insns_fn(tree fndecl,eni_weights * weights)4357 estimate_num_insns_fn (tree fndecl, eni_weights *weights)
4358 {
4359   struct function *my_function = DECL_STRUCT_FUNCTION (fndecl);
4360   gimple_stmt_iterator bsi;
4361   basic_block bb;
4362   int n = 0;
4363 
4364   gcc_assert (my_function && my_function->cfg);
4365   FOR_EACH_BB_FN (bb, my_function)
4366     {
4367       for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi))
4368 	n += estimate_num_insns (gsi_stmt (bsi), weights);
4369     }
4370 
4371   return n;
4372 }
4373 
4374 
4375 /* Initializes weights used by estimate_num_insns.  */
4376 
4377 void
init_inline_once(void)4378 init_inline_once (void)
4379 {
4380   eni_size_weights.call_cost = 1;
4381   eni_size_weights.indirect_call_cost = 3;
4382   eni_size_weights.target_builtin_call_cost = 1;
4383   eni_size_weights.div_mod_cost = 1;
4384   eni_size_weights.omp_cost = 40;
4385   eni_size_weights.tm_cost = 10;
4386   eni_size_weights.time_based = false;
4387   eni_size_weights.return_cost = 1;
4388 
4389   /* Estimating time for call is difficult, since we have no idea what the
4390      called function does.  In the current uses of eni_time_weights,
4391      underestimating the cost does less harm than overestimating it, so
4392      we choose a rather small value here.  */
4393   eni_time_weights.call_cost = 10;
4394   eni_time_weights.indirect_call_cost = 15;
4395   eni_time_weights.target_builtin_call_cost = 1;
4396   eni_time_weights.div_mod_cost = 10;
4397   eni_time_weights.omp_cost = 40;
4398   eni_time_weights.tm_cost = 40;
4399   eni_time_weights.time_based = true;
4400   eni_time_weights.return_cost = 2;
4401 }
4402 
4403 
4404 /* Install new lexical TREE_BLOCK underneath 'current_block'.  */
4405 
4406 static void
prepend_lexical_block(tree current_block,tree new_block)4407 prepend_lexical_block (tree current_block, tree new_block)
4408 {
4409   BLOCK_CHAIN (new_block) = BLOCK_SUBBLOCKS (current_block);
4410   BLOCK_SUBBLOCKS (current_block) = new_block;
4411   BLOCK_SUPERCONTEXT (new_block) = current_block;
4412 }
4413 
4414 /* Add local variables from CALLEE to CALLER.  */
4415 
4416 static inline void
add_local_variables(struct function * callee,struct function * caller,copy_body_data * id)4417 add_local_variables (struct function *callee, struct function *caller,
4418 		     copy_body_data *id)
4419 {
4420   tree var;
4421   unsigned ix;
4422 
4423   FOR_EACH_LOCAL_DECL (callee, ix, var)
4424     if (!can_be_nonlocal (var, id))
4425       {
4426         tree new_var = remap_decl (var, id);
4427 
4428         /* Remap debug-expressions.  */
4429 	if (VAR_P (new_var)
4430 	    && DECL_HAS_DEBUG_EXPR_P (var)
4431 	    && new_var != var)
4432 	  {
4433 	    tree tem = DECL_DEBUG_EXPR (var);
4434 	    bool old_regimplify = id->regimplify;
4435 	    id->remapping_type_depth++;
4436 	    walk_tree (&tem, copy_tree_body_r, id, NULL);
4437 	    id->remapping_type_depth--;
4438 	    id->regimplify = old_regimplify;
4439 	    SET_DECL_DEBUG_EXPR (new_var, tem);
4440 	    DECL_HAS_DEBUG_EXPR_P (new_var) = 1;
4441 	  }
4442 	add_local_decl (caller, new_var);
4443       }
4444 }
4445 
4446 /* Add to BINDINGS a debug stmt resetting SRCVAR if inlining might
4447    have brought in or introduced any debug stmts for SRCVAR.  */
4448 
4449 static inline void
reset_debug_binding(copy_body_data * id,tree srcvar,gimple_seq * bindings)4450 reset_debug_binding (copy_body_data *id, tree srcvar, gimple_seq *bindings)
4451 {
4452   tree *remappedvarp = id->decl_map->get (srcvar);
4453 
4454   if (!remappedvarp)
4455     return;
4456 
4457   if (!VAR_P (*remappedvarp))
4458     return;
4459 
4460   if (*remappedvarp == id->retvar)
4461     return;
4462 
4463   tree tvar = target_for_debug_bind (*remappedvarp);
4464   if (!tvar)
4465     return;
4466 
4467   gdebug *stmt = gimple_build_debug_bind (tvar, NULL_TREE,
4468 					  id->call_stmt);
4469   gimple_seq_add_stmt (bindings, stmt);
4470 }
4471 
4472 /* For each inlined variable for which we may have debug bind stmts,
4473    add before GSI a final debug stmt resetting it, marking the end of
4474    its life, so that var-tracking knows it doesn't have to compute
4475    further locations for it.  */
4476 
4477 static inline void
reset_debug_bindings(copy_body_data * id,gimple_stmt_iterator gsi)4478 reset_debug_bindings (copy_body_data *id, gimple_stmt_iterator gsi)
4479 {
4480   tree var;
4481   unsigned ix;
4482   gimple_seq bindings = NULL;
4483 
4484   if (!gimple_in_ssa_p (id->src_cfun))
4485     return;
4486 
4487   if (!opt_for_fn (id->dst_fn, flag_var_tracking_assignments))
4488     return;
4489 
4490   for (var = DECL_ARGUMENTS (id->src_fn);
4491        var; var = DECL_CHAIN (var))
4492     reset_debug_binding (id, var, &bindings);
4493 
4494   FOR_EACH_LOCAL_DECL (id->src_cfun, ix, var)
4495     reset_debug_binding (id, var, &bindings);
4496 
4497   gsi_insert_seq_before_without_update (&gsi, bindings, GSI_SAME_STMT);
4498 }
4499 
4500 /* If STMT is a GIMPLE_CALL, replace it with its inline expansion.  */
4501 
4502 static bool
expand_call_inline(basic_block bb,gimple * stmt,copy_body_data * id,bitmap to_purge)4503 expand_call_inline (basic_block bb, gimple *stmt, copy_body_data *id,
4504 		    bitmap to_purge)
4505 {
4506   tree use_retvar;
4507   tree fn;
4508   hash_map<tree, tree> *dst;
4509   hash_map<tree, tree> *st = NULL;
4510   tree return_slot;
4511   tree modify_dest;
4512   struct cgraph_edge *cg_edge;
4513   cgraph_inline_failed_t reason;
4514   basic_block return_block;
4515   edge e;
4516   gimple_stmt_iterator gsi, stmt_gsi;
4517   bool successfully_inlined = false;
4518   bool purge_dead_abnormal_edges;
4519   gcall *call_stmt;
4520   unsigned int prop_mask, src_properties;
4521   struct function *dst_cfun;
4522   tree simduid;
4523   use_operand_p use;
4524   gimple *simtenter_stmt = NULL;
4525   vec<tree> *simtvars_save;
4526 
4527   /* The gimplifier uses input_location in too many places, such as
4528      internal_get_tmp_var ().  */
4529   location_t saved_location = input_location;
4530   input_location = gimple_location (stmt);
4531 
4532   /* From here on, we're only interested in CALL_EXPRs.  */
4533   call_stmt = dyn_cast <gcall *> (stmt);
4534   if (!call_stmt)
4535     goto egress;
4536 
4537   cg_edge = id->dst_node->get_edge (stmt);
4538   gcc_checking_assert (cg_edge);
4539   /* First, see if we can figure out what function is being called.
4540      If we cannot, then there is no hope of inlining the function.  */
4541   if (cg_edge->indirect_unknown_callee)
4542     goto egress;
4543   fn = cg_edge->callee->decl;
4544   gcc_checking_assert (fn);
4545 
4546   /* If FN is a declaration of a function in a nested scope that was
4547      globally declared inline, we don't set its DECL_INITIAL.
4548      However, we can't blindly follow DECL_ABSTRACT_ORIGIN because the
4549      C++ front-end uses it for cdtors to refer to their internal
4550      declarations, that are not real functions.  Fortunately those
4551      don't have trees to be saved, so we can tell by checking their
4552      gimple_body.  */
4553   if (!DECL_INITIAL (fn)
4554       && DECL_ABSTRACT_ORIGIN (fn)
4555       && gimple_has_body_p (DECL_ABSTRACT_ORIGIN (fn)))
4556     fn = DECL_ABSTRACT_ORIGIN (fn);
4557 
4558   /* Don't try to inline functions that are not well-suited to inlining.  */
4559   if (cg_edge->inline_failed)
4560     {
4561       reason = cg_edge->inline_failed;
4562       /* If this call was originally indirect, we do not want to emit any
4563 	 inlining related warnings or sorry messages because there are no
4564 	 guarantees regarding those.  */
4565       if (cg_edge->indirect_inlining_edge)
4566 	goto egress;
4567 
4568       if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn))
4569           /* For extern inline functions that get redefined we always
4570 	     silently ignored always_inline flag. Better behavior would
4571 	     be to be able to keep both bodies and use extern inline body
4572 	     for inlining, but we can't do that because frontends overwrite
4573 	     the body.  */
4574 	  && !cg_edge->callee->local.redefined_extern_inline
4575 	  /* During early inline pass, report only when optimization is
4576 	     not turned on.  */
4577 	  && (symtab->global_info_ready
4578 	      || !optimize
4579 	      || cgraph_inline_failed_type (reason) == CIF_FINAL_ERROR)
4580 	  /* PR 20090218-1_0.c. Body can be provided by another module. */
4581 	  && (reason != CIF_BODY_NOT_AVAILABLE || !flag_generate_lto))
4582 	{
4583 	  error ("inlining failed in call to always_inline %q+F: %s", fn,
4584 		 cgraph_inline_failed_string (reason));
4585 	  if (gimple_location (stmt) != UNKNOWN_LOCATION)
4586 	    inform (gimple_location (stmt), "called from here");
4587 	  else if (DECL_SOURCE_LOCATION (cfun->decl) != UNKNOWN_LOCATION)
4588 	    inform (DECL_SOURCE_LOCATION (cfun->decl),
4589                    "called from this function");
4590 	}
4591       else if (warn_inline
4592 	       && DECL_DECLARED_INLINE_P (fn)
4593 	       && !DECL_NO_INLINE_WARNING_P (fn)
4594 	       && !DECL_IN_SYSTEM_HEADER (fn)
4595 	       && reason != CIF_UNSPECIFIED
4596 	       && !lookup_attribute ("noinline", DECL_ATTRIBUTES (fn))
4597 	       /* Do not warn about not inlined recursive calls.  */
4598 	       && !cg_edge->recursive_p ()
4599 	       /* Avoid warnings during early inline pass. */
4600 	       && symtab->global_info_ready)
4601 	{
4602 	  auto_diagnostic_group d;
4603 	  if (warning (OPT_Winline, "inlining failed in call to %q+F: %s",
4604 		       fn, _(cgraph_inline_failed_string (reason))))
4605 	    {
4606 	      if (gimple_location (stmt) != UNKNOWN_LOCATION)
4607 		inform (gimple_location (stmt), "called from here");
4608 	      else if (DECL_SOURCE_LOCATION (cfun->decl) != UNKNOWN_LOCATION)
4609 		inform (DECL_SOURCE_LOCATION (cfun->decl),
4610                        "called from this function");
4611 	    }
4612 	}
4613       goto egress;
4614     }
4615   id->src_node = cg_edge->callee;
4616 
4617   /* If callee is thunk, all we need is to adjust the THIS pointer
4618      and redirect to function being thunked.  */
4619   if (id->src_node->thunk.thunk_p)
4620     {
4621       cgraph_edge *edge;
4622       tree virtual_offset = NULL;
4623       profile_count count = cg_edge->count;
4624       tree op;
4625       gimple_stmt_iterator iter = gsi_for_stmt (stmt);
4626 
4627       cg_edge->remove ();
4628       edge = id->src_node->callees->clone (id->dst_node, call_stmt,
4629 		   		           gimple_uid (stmt),
4630 				   	   profile_count::one (),
4631 					   profile_count::one (),
4632 				           true);
4633       edge->count = count;
4634       if (id->src_node->thunk.virtual_offset_p)
4635         virtual_offset = size_int (id->src_node->thunk.virtual_value);
4636       op = create_tmp_reg_fn (cfun, TREE_TYPE (gimple_call_arg (stmt, 0)),
4637 			      NULL);
4638       gsi_insert_before (&iter, gimple_build_assign (op,
4639 						    gimple_call_arg (stmt, 0)),
4640 			 GSI_NEW_STMT);
4641       gcc_assert (id->src_node->thunk.this_adjusting);
4642       op = thunk_adjust (&iter, op, 1, id->src_node->thunk.fixed_offset,
4643 			 virtual_offset, id->src_node->thunk.indirect_offset);
4644 
4645       gimple_call_set_arg (stmt, 0, op);
4646       gimple_call_set_fndecl (stmt, edge->callee->decl);
4647       update_stmt (stmt);
4648       id->src_node->remove ();
4649       expand_call_inline (bb, stmt, id, to_purge);
4650       maybe_remove_unused_call_args (cfun, stmt);
4651       return true;
4652     }
4653   fn = cg_edge->callee->decl;
4654   cg_edge->callee->get_untransformed_body ();
4655 
4656   if (flag_checking && cg_edge->callee->decl != id->dst_node->decl)
4657     cg_edge->callee->verify ();
4658 
4659   /* We will be inlining this callee.  */
4660   id->eh_lp_nr = lookup_stmt_eh_lp (stmt);
4661 
4662   /* Update the callers EH personality.  */
4663   if (DECL_FUNCTION_PERSONALITY (fn))
4664     DECL_FUNCTION_PERSONALITY (cg_edge->caller->decl)
4665       = DECL_FUNCTION_PERSONALITY (fn);
4666 
4667   /* Split the block before the GIMPLE_CALL.  */
4668   stmt_gsi = gsi_for_stmt (stmt);
4669   gsi_prev (&stmt_gsi);
4670   e = split_block (bb, gsi_end_p (stmt_gsi) ? NULL : gsi_stmt (stmt_gsi));
4671   bb = e->src;
4672   return_block = e->dest;
4673   remove_edge (e);
4674 
4675   /* If the GIMPLE_CALL was in the last statement of BB, it may have
4676      been the source of abnormal edges.  In this case, schedule
4677      the removal of dead abnormal edges.  */
4678   gsi = gsi_start_bb (return_block);
4679   gsi_next (&gsi);
4680   purge_dead_abnormal_edges = gsi_end_p (gsi);
4681 
4682   stmt_gsi = gsi_start_bb (return_block);
4683 
4684   /* Build a block containing code to initialize the arguments, the
4685      actual inline expansion of the body, and a label for the return
4686      statements within the function to jump to.  The type of the
4687      statement expression is the return type of the function call.
4688      ???  If the call does not have an associated block then we will
4689      remap all callee blocks to NULL, effectively dropping most of
4690      its debug information.  This should only happen for calls to
4691      artificial decls inserted by the compiler itself.  We need to
4692      either link the inlined blocks into the caller block tree or
4693      not refer to them in any way to not break GC for locations.  */
4694   if (tree block = gimple_block (stmt))
4695     {
4696       /* We do want to assign a not UNKNOWN_LOCATION BLOCK_SOURCE_LOCATION
4697          to make inlined_function_outer_scope_p return true on this BLOCK.  */
4698       location_t loc = LOCATION_LOCUS (gimple_location (stmt));
4699       if (loc == UNKNOWN_LOCATION)
4700 	loc = LOCATION_LOCUS (DECL_SOURCE_LOCATION (fn));
4701       if (loc == UNKNOWN_LOCATION)
4702 	loc = BUILTINS_LOCATION;
4703       id->block = make_node (BLOCK);
4704       BLOCK_ABSTRACT_ORIGIN (id->block) = DECL_ORIGIN (fn);
4705       BLOCK_SOURCE_LOCATION (id->block) = loc;
4706       prepend_lexical_block (block, id->block);
4707     }
4708 
4709   /* Local declarations will be replaced by their equivalents in this map.  */
4710   st = id->decl_map;
4711   id->decl_map = new hash_map<tree, tree>;
4712   dst = id->debug_map;
4713   id->debug_map = NULL;
4714   if (flag_stack_reuse != SR_NONE)
4715     id->add_clobbers_to_eh_landing_pads = last_basic_block_for_fn (cfun);
4716 
4717   /* Record the function we are about to inline.  */
4718   id->src_fn = fn;
4719   id->src_cfun = DECL_STRUCT_FUNCTION (fn);
4720   id->reset_location = DECL_IGNORED_P (fn);
4721   id->call_stmt = call_stmt;
4722 
4723   /* When inlining into an OpenMP SIMD-on-SIMT loop, arrange for new automatic
4724      variables to be added to IFN_GOMP_SIMT_ENTER argument list.  */
4725   dst_cfun = DECL_STRUCT_FUNCTION (id->dst_fn);
4726   simtvars_save = id->dst_simt_vars;
4727   if (!(dst_cfun->curr_properties & PROP_gimple_lomp_dev)
4728       && (simduid = bb->loop_father->simduid) != NULL_TREE
4729       && (simduid = ssa_default_def (dst_cfun, simduid)) != NULL_TREE
4730       && single_imm_use (simduid, &use, &simtenter_stmt)
4731       && is_gimple_call (simtenter_stmt)
4732       && gimple_call_internal_p (simtenter_stmt, IFN_GOMP_SIMT_ENTER))
4733     vec_alloc (id->dst_simt_vars, 0);
4734   else
4735     id->dst_simt_vars = NULL;
4736 
4737   if (profile_status_for_fn (id->src_cfun) == PROFILE_ABSENT)
4738     profile_status_for_fn (dst_cfun) = PROFILE_ABSENT;
4739 
4740   /* If the src function contains an IFN_VA_ARG, then so will the dst
4741      function after inlining.  Likewise for IFN_GOMP_USE_SIMT.  */
4742   prop_mask = PROP_gimple_lva | PROP_gimple_lomp_dev;
4743   src_properties = id->src_cfun->curr_properties & prop_mask;
4744   if (src_properties != prop_mask)
4745     dst_cfun->curr_properties &= src_properties | ~prop_mask;
4746 
4747   gcc_assert (!id->src_cfun->after_inlining);
4748 
4749   id->entry_bb = bb;
4750   if (lookup_attribute ("cold", DECL_ATTRIBUTES (fn)))
4751     {
4752       gimple_stmt_iterator si = gsi_last_bb (bb);
4753       gsi_insert_after (&si, gimple_build_predict (PRED_COLD_FUNCTION,
4754       						   NOT_TAKEN),
4755 			GSI_NEW_STMT);
4756     }
4757   initialize_inlined_parameters (id, stmt, fn, bb);
4758   if (debug_nonbind_markers_p && debug_inline_points && id->block
4759       && inlined_function_outer_scope_p (id->block))
4760     {
4761       gimple_stmt_iterator si = gsi_last_bb (bb);
4762       gsi_insert_after (&si, gimple_build_debug_inline_entry
4763 			(id->block, DECL_SOURCE_LOCATION (id->src_fn)),
4764 			GSI_NEW_STMT);
4765     }
4766 
4767   if (DECL_INITIAL (fn))
4768     {
4769       if (gimple_block (stmt))
4770 	{
4771 	  tree *var;
4772 
4773 	  prepend_lexical_block (id->block,
4774 				 remap_blocks (DECL_INITIAL (fn), id));
4775 	  gcc_checking_assert (BLOCK_SUBBLOCKS (id->block)
4776 			       && (BLOCK_CHAIN (BLOCK_SUBBLOCKS (id->block))
4777 				   == NULL_TREE));
4778 	  /* Move vars for PARM_DECLs from DECL_INITIAL block to id->block,
4779 	     otherwise for DWARF DW_TAG_formal_parameter will not be children of
4780 	     DW_TAG_inlined_subroutine, but of a DW_TAG_lexical_block
4781 	     under it.  The parameters can be then evaluated in the debugger,
4782 	     but don't show in backtraces.  */
4783 	  for (var = &BLOCK_VARS (BLOCK_SUBBLOCKS (id->block)); *var; )
4784 	    if (TREE_CODE (DECL_ORIGIN (*var)) == PARM_DECL)
4785 	      {
4786 		tree v = *var;
4787 		*var = TREE_CHAIN (v);
4788 		TREE_CHAIN (v) = BLOCK_VARS (id->block);
4789 		BLOCK_VARS (id->block) = v;
4790 	      }
4791 	    else
4792 	      var = &TREE_CHAIN (*var);
4793 	}
4794       else
4795 	remap_blocks_to_null (DECL_INITIAL (fn), id);
4796     }
4797 
4798   /* Return statements in the function body will be replaced by jumps
4799      to the RET_LABEL.  */
4800   gcc_assert (DECL_INITIAL (fn));
4801   gcc_assert (TREE_CODE (DECL_INITIAL (fn)) == BLOCK);
4802 
4803   /* Find the LHS to which the result of this call is assigned.  */
4804   return_slot = NULL;
4805   if (gimple_call_lhs (stmt))
4806     {
4807       modify_dest = gimple_call_lhs (stmt);
4808 
4809       /* The function which we are inlining might not return a value,
4810 	 in which case we should issue a warning that the function
4811 	 does not return a value.  In that case the optimizers will
4812 	 see that the variable to which the value is assigned was not
4813 	 initialized.  We do not want to issue a warning about that
4814 	 uninitialized variable.  */
4815       if (DECL_P (modify_dest))
4816 	TREE_NO_WARNING (modify_dest) = 1;
4817 
4818       if (gimple_call_return_slot_opt_p (call_stmt))
4819 	{
4820 	  return_slot = modify_dest;
4821 	  modify_dest = NULL;
4822 	}
4823     }
4824   else
4825     modify_dest = NULL;
4826 
4827   /* If we are inlining a call to the C++ operator new, we don't want
4828      to use type based alias analysis on the return value.  Otherwise
4829      we may get confused if the compiler sees that the inlined new
4830      function returns a pointer which was just deleted.  See bug
4831      33407.  */
4832   if (DECL_IS_OPERATOR_NEW (fn))
4833     {
4834       return_slot = NULL;
4835       modify_dest = NULL;
4836     }
4837 
4838   /* Declare the return variable for the function.  */
4839   use_retvar = declare_return_variable (id, return_slot, modify_dest, bb);
4840 
4841   /* Add local vars in this inlined callee to caller.  */
4842   add_local_variables (id->src_cfun, cfun, id);
4843 
4844   if (dump_enabled_p ())
4845     {
4846       char buf[128];
4847       snprintf (buf, sizeof(buf), "%4.2f",
4848 		cg_edge->sreal_frequency ().to_double ());
4849       dump_printf_loc (MSG_NOTE | MSG_PRIORITY_INTERNALS,
4850 		       call_stmt,
4851 		       "Inlining %C to %C with frequency %s\n",
4852 		       id->src_node, id->dst_node, buf);
4853       if (dump_file && (dump_flags & TDF_DETAILS))
4854 	{
4855 	  id->src_node->dump (dump_file);
4856 	  id->dst_node->dump (dump_file);
4857 	}
4858     }
4859 
4860   /* This is it.  Duplicate the callee body.  Assume callee is
4861      pre-gimplified.  Note that we must not alter the caller
4862      function in any way before this point, as this CALL_EXPR may be
4863      a self-referential call; if we're calling ourselves, we need to
4864      duplicate our body before altering anything.  */
4865   copy_body (id, bb, return_block, NULL);
4866 
4867   reset_debug_bindings (id, stmt_gsi);
4868 
4869   if (flag_stack_reuse != SR_NONE)
4870     for (tree p = DECL_ARGUMENTS (id->src_fn); p; p = DECL_CHAIN (p))
4871       if (!TREE_THIS_VOLATILE (p))
4872 	{
4873 	  tree *varp = id->decl_map->get (p);
4874 	  if (varp && VAR_P (*varp) && !is_gimple_reg (*varp))
4875 	    {
4876 	      tree clobber = build_constructor (TREE_TYPE (*varp), NULL);
4877 	      gimple *clobber_stmt;
4878 	      TREE_THIS_VOLATILE (clobber) = 1;
4879 	      clobber_stmt = gimple_build_assign (*varp, clobber);
4880 	      gimple_set_location (clobber_stmt, gimple_location (stmt));
4881 	      gsi_insert_before (&stmt_gsi, clobber_stmt, GSI_SAME_STMT);
4882 	    }
4883 	}
4884 
4885   /* Reset the escaped solution.  */
4886   if (cfun->gimple_df)
4887     pt_solution_reset (&cfun->gimple_df->escaped);
4888 
4889   /* Add new automatic variables to IFN_GOMP_SIMT_ENTER arguments.  */
4890   if (id->dst_simt_vars && id->dst_simt_vars->length () > 0)
4891     {
4892       size_t nargs = gimple_call_num_args (simtenter_stmt);
4893       vec<tree> *vars = id->dst_simt_vars;
4894       auto_vec<tree> newargs (nargs + vars->length ());
4895       for (size_t i = 0; i < nargs; i++)
4896 	newargs.quick_push (gimple_call_arg (simtenter_stmt, i));
4897       for (tree *pvar = vars->begin (); pvar != vars->end (); pvar++)
4898 	{
4899 	  tree ptrtype = build_pointer_type (TREE_TYPE (*pvar));
4900 	  newargs.quick_push (build1 (ADDR_EXPR, ptrtype, *pvar));
4901 	}
4902       gcall *g = gimple_build_call_internal_vec (IFN_GOMP_SIMT_ENTER, newargs);
4903       gimple_call_set_lhs (g, gimple_call_lhs (simtenter_stmt));
4904       gimple_stmt_iterator gsi = gsi_for_stmt (simtenter_stmt);
4905       gsi_replace (&gsi, g, false);
4906     }
4907   vec_free (id->dst_simt_vars);
4908   id->dst_simt_vars = simtvars_save;
4909 
4910   /* Clean up.  */
4911   if (id->debug_map)
4912     {
4913       delete id->debug_map;
4914       id->debug_map = dst;
4915     }
4916   delete id->decl_map;
4917   id->decl_map = st;
4918 
4919   /* Unlink the calls virtual operands before replacing it.  */
4920   unlink_stmt_vdef (stmt);
4921   if (gimple_vdef (stmt)
4922       && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
4923     release_ssa_name (gimple_vdef (stmt));
4924 
4925   /* If the inlined function returns a result that we care about,
4926      substitute the GIMPLE_CALL with an assignment of the return
4927      variable to the LHS of the call.  That is, if STMT was
4928      'a = foo (...)', substitute the call with 'a = USE_RETVAR'.  */
4929   if (use_retvar && gimple_call_lhs (stmt))
4930     {
4931       gimple *old_stmt = stmt;
4932       stmt = gimple_build_assign (gimple_call_lhs (stmt), use_retvar);
4933       gimple_set_location (stmt, gimple_location (old_stmt));
4934       gsi_replace (&stmt_gsi, stmt, false);
4935       maybe_clean_or_replace_eh_stmt (old_stmt, stmt);
4936       /* Append a clobber for id->retvar if easily possible.  */
4937       if (flag_stack_reuse != SR_NONE
4938 	  && id->retvar
4939 	  && VAR_P (id->retvar)
4940 	  && id->retvar != return_slot
4941 	  && id->retvar != modify_dest
4942 	  && !TREE_THIS_VOLATILE (id->retvar)
4943 	  && !is_gimple_reg (id->retvar)
4944 	  && !stmt_ends_bb_p (stmt))
4945 	{
4946 	  tree clobber = build_constructor (TREE_TYPE (id->retvar), NULL);
4947 	  gimple *clobber_stmt;
4948 	  TREE_THIS_VOLATILE (clobber) = 1;
4949 	  clobber_stmt = gimple_build_assign (id->retvar, clobber);
4950 	  gimple_set_location (clobber_stmt, gimple_location (old_stmt));
4951 	  gsi_insert_after (&stmt_gsi, clobber_stmt, GSI_SAME_STMT);
4952 	}
4953     }
4954   else
4955     {
4956       /* Handle the case of inlining a function with no return
4957 	 statement, which causes the return value to become undefined.  */
4958       if (gimple_call_lhs (stmt)
4959 	  && TREE_CODE (gimple_call_lhs (stmt)) == SSA_NAME)
4960 	{
4961 	  tree name = gimple_call_lhs (stmt);
4962 	  tree var = SSA_NAME_VAR (name);
4963 	  tree def = var ? ssa_default_def (cfun, var) : NULL;
4964 
4965 	  if (def)
4966 	    {
4967 	      /* If the variable is used undefined, make this name
4968 		 undefined via a move.  */
4969 	      stmt = gimple_build_assign (gimple_call_lhs (stmt), def);
4970 	      gsi_replace (&stmt_gsi, stmt, true);
4971 	    }
4972 	  else
4973 	    {
4974 	      if (!var)
4975 		{
4976 		  var = create_tmp_reg_fn (cfun, TREE_TYPE (name), NULL);
4977 		  SET_SSA_NAME_VAR_OR_IDENTIFIER (name, var);
4978 		}
4979 	      /* Otherwise make this variable undefined.  */
4980 	      gsi_remove (&stmt_gsi, true);
4981 	      set_ssa_default_def (cfun, var, name);
4982 	      SSA_NAME_DEF_STMT (name) = gimple_build_nop ();
4983 	    }
4984 	}
4985       /* Replace with a clobber for id->retvar.  */
4986       else if (flag_stack_reuse != SR_NONE
4987 	       && id->retvar
4988 	       && VAR_P (id->retvar)
4989 	       && id->retvar != return_slot
4990 	       && id->retvar != modify_dest
4991 	       && !TREE_THIS_VOLATILE (id->retvar)
4992 	       && !is_gimple_reg (id->retvar))
4993 	{
4994 	  tree clobber = build_constructor (TREE_TYPE (id->retvar), NULL);
4995 	  gimple *clobber_stmt;
4996 	  TREE_THIS_VOLATILE (clobber) = 1;
4997 	  clobber_stmt = gimple_build_assign (id->retvar, clobber);
4998 	  gimple_set_location (clobber_stmt, gimple_location (stmt));
4999 	  gsi_replace (&stmt_gsi, clobber_stmt, false);
5000 	  maybe_clean_or_replace_eh_stmt (stmt, clobber_stmt);
5001 	}
5002       else
5003 	gsi_remove (&stmt_gsi, true);
5004     }
5005 
5006   if (purge_dead_abnormal_edges)
5007     bitmap_set_bit (to_purge, return_block->index);
5008 
5009   /* If the value of the new expression is ignored, that's OK.  We
5010      don't warn about this for CALL_EXPRs, so we shouldn't warn about
5011      the equivalent inlined version either.  */
5012   if (is_gimple_assign (stmt))
5013     {
5014       gcc_assert (gimple_assign_single_p (stmt)
5015 		  || CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt)));
5016       TREE_USED (gimple_assign_rhs1 (stmt)) = 1;
5017     }
5018 
5019   id->add_clobbers_to_eh_landing_pads = 0;
5020 
5021   /* Output the inlining info for this abstract function, since it has been
5022      inlined.  If we don't do this now, we can lose the information about the
5023      variables in the function when the blocks get blown away as soon as we
5024      remove the cgraph node.  */
5025   if (gimple_block (stmt))
5026     (*debug_hooks->outlining_inline_function) (fn);
5027 
5028   /* Update callgraph if needed.  */
5029   cg_edge->callee->remove ();
5030 
5031   id->block = NULL_TREE;
5032   id->retvar = NULL_TREE;
5033   successfully_inlined = true;
5034 
5035  egress:
5036   input_location = saved_location;
5037   return successfully_inlined;
5038 }
5039 
5040 /* Expand call statements reachable from STMT_P.
5041    We can only have CALL_EXPRs as the "toplevel" tree code or nested
5042    in a MODIFY_EXPR.  */
5043 
5044 static bool
gimple_expand_calls_inline(basic_block bb,copy_body_data * id,bitmap to_purge)5045 gimple_expand_calls_inline (basic_block bb, copy_body_data *id,
5046 			    bitmap to_purge)
5047 {
5048   gimple_stmt_iterator gsi;
5049   bool inlined = false;
5050 
5051   for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi);)
5052     {
5053       gimple *stmt = gsi_stmt (gsi);
5054       gsi_prev (&gsi);
5055 
5056       if (is_gimple_call (stmt)
5057 	  && !gimple_call_internal_p (stmt))
5058 	inlined |= expand_call_inline (bb, stmt, id, to_purge);
5059     }
5060 
5061   return inlined;
5062 }
5063 
5064 
5065 /* Walk all basic blocks created after FIRST and try to fold every statement
5066    in the STATEMENTS pointer set.  */
5067 
5068 static void
fold_marked_statements(int first,hash_set<gimple * > * statements)5069 fold_marked_statements (int first, hash_set<gimple *> *statements)
5070 {
5071   auto_bitmap to_purge;
5072   for (; first < last_basic_block_for_fn (cfun); first++)
5073     if (BASIC_BLOCK_FOR_FN (cfun, first))
5074       {
5075         gimple_stmt_iterator gsi;
5076 
5077 	for (gsi = gsi_start_bb (BASIC_BLOCK_FOR_FN (cfun, first));
5078 	     !gsi_end_p (gsi);
5079 	     gsi_next (&gsi))
5080 	  if (statements->contains (gsi_stmt (gsi)))
5081 	    {
5082 	      gimple *old_stmt = gsi_stmt (gsi);
5083 	      tree old_decl
5084 		= is_gimple_call (old_stmt) ? gimple_call_fndecl (old_stmt) : 0;
5085 
5086 	      if (old_decl && fndecl_built_in_p (old_decl))
5087 		{
5088 		  /* Folding builtins can create multiple instructions,
5089 		     we need to look at all of them.  */
5090 		  gimple_stmt_iterator i2 = gsi;
5091 		  gsi_prev (&i2);
5092 		  if (fold_stmt (&gsi))
5093 		    {
5094 		      gimple *new_stmt;
5095 		      /* If a builtin at the end of a bb folded into nothing,
5096 			 the following loop won't work.  */
5097 		      if (gsi_end_p (gsi))
5098 			{
5099 			  cgraph_update_edges_for_call_stmt (old_stmt,
5100 							     old_decl, NULL);
5101 			  break;
5102 			}
5103 		      if (gsi_end_p (i2))
5104 			i2 = gsi_start_bb (BASIC_BLOCK_FOR_FN (cfun, first));
5105 		      else
5106 			gsi_next (&i2);
5107 		      while (1)
5108 			{
5109 			  new_stmt = gsi_stmt (i2);
5110 			  update_stmt (new_stmt);
5111 			  cgraph_update_edges_for_call_stmt (old_stmt, old_decl,
5112 							     new_stmt);
5113 
5114 			  if (new_stmt == gsi_stmt (gsi))
5115 			    {
5116 			      /* It is okay to check only for the very last
5117 				 of these statements.  If it is a throwing
5118 				 statement nothing will change.  If it isn't
5119 				 this can remove EH edges.  If that weren't
5120 				 correct then because some intermediate stmts
5121 				 throw, but not the last one.  That would mean
5122 				 we'd have to split the block, which we can't
5123 				 here and we'd loose anyway.  And as builtins
5124 				 probably never throw, this all
5125 				 is mood anyway.  */
5126 			      if (maybe_clean_or_replace_eh_stmt (old_stmt,
5127 								  new_stmt))
5128 				bitmap_set_bit (to_purge, first);
5129 			      break;
5130 			    }
5131 			  gsi_next (&i2);
5132 			}
5133 		    }
5134 		}
5135 	      else if (fold_stmt (&gsi))
5136 		{
5137 		  /* Re-read the statement from GSI as fold_stmt() may
5138 		     have changed it.  */
5139 		  gimple *new_stmt = gsi_stmt (gsi);
5140 		  update_stmt (new_stmt);
5141 
5142 		  if (is_gimple_call (old_stmt)
5143 		      || is_gimple_call (new_stmt))
5144 		    cgraph_update_edges_for_call_stmt (old_stmt, old_decl,
5145 						       new_stmt);
5146 
5147 		  if (maybe_clean_or_replace_eh_stmt (old_stmt, new_stmt))
5148 		    bitmap_set_bit (to_purge, first);
5149 		}
5150 	    }
5151       }
5152   gimple_purge_all_dead_eh_edges (to_purge);
5153 }
5154 
5155 /* Expand calls to inline functions in the body of FN.  */
5156 
5157 unsigned int
optimize_inline_calls(tree fn)5158 optimize_inline_calls (tree fn)
5159 {
5160   copy_body_data id;
5161   basic_block bb;
5162   int last = n_basic_blocks_for_fn (cfun);
5163   bool inlined_p = false;
5164 
5165   /* Clear out ID.  */
5166   memset (&id, 0, sizeof (id));
5167 
5168   id.src_node = id.dst_node = cgraph_node::get (fn);
5169   gcc_assert (id.dst_node->definition);
5170   id.dst_fn = fn;
5171   /* Or any functions that aren't finished yet.  */
5172   if (current_function_decl)
5173     id.dst_fn = current_function_decl;
5174 
5175   id.copy_decl = copy_decl_maybe_to_var;
5176   id.transform_call_graph_edges = CB_CGE_DUPLICATE;
5177   id.transform_new_cfg = false;
5178   id.transform_return_to_modify = true;
5179   id.transform_parameter = true;
5180   id.transform_lang_insert_block = NULL;
5181   id.statements_to_fold = new hash_set<gimple *>;
5182 
5183   push_gimplify_context ();
5184 
5185   /* We make no attempts to keep dominance info up-to-date.  */
5186   free_dominance_info (CDI_DOMINATORS);
5187   free_dominance_info (CDI_POST_DOMINATORS);
5188 
5189   /* Register specific gimple functions.  */
5190   gimple_register_cfg_hooks ();
5191 
5192   /* Reach the trees by walking over the CFG, and note the
5193      enclosing basic-blocks in the call edges.  */
5194   /* We walk the blocks going forward, because inlined function bodies
5195      will split id->current_basic_block, and the new blocks will
5196      follow it; we'll trudge through them, processing their CALL_EXPRs
5197      along the way.  */
5198   auto_bitmap to_purge;
5199   FOR_EACH_BB_FN (bb, cfun)
5200     inlined_p |= gimple_expand_calls_inline (bb, &id, to_purge);
5201 
5202   pop_gimplify_context (NULL);
5203 
5204   if (flag_checking)
5205     {
5206       struct cgraph_edge *e;
5207 
5208       id.dst_node->verify ();
5209 
5210       /* Double check that we inlined everything we are supposed to inline.  */
5211       for (e = id.dst_node->callees; e; e = e->next_callee)
5212 	gcc_assert (e->inline_failed);
5213     }
5214 
5215   /* Fold queued statements.  */
5216   update_max_bb_count ();
5217   fold_marked_statements (last, id.statements_to_fold);
5218   delete id.statements_to_fold;
5219 
5220   /* Finally purge EH and abnormal edges from the call stmts we inlined.
5221      We need to do this after fold_marked_statements since that may walk
5222      the SSA use-def chain.  */
5223   unsigned i;
5224   bitmap_iterator bi;
5225   EXECUTE_IF_SET_IN_BITMAP (to_purge, 0, i, bi)
5226     {
5227       basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
5228       if (bb)
5229 	{
5230 	  gimple_purge_dead_eh_edges (bb);
5231 	  gimple_purge_dead_abnormal_call_edges (bb);
5232 	}
5233     }
5234 
5235   gcc_assert (!id.debug_stmts.exists ());
5236 
5237   /* If we didn't inline into the function there is nothing to do.  */
5238   if (!inlined_p)
5239     return 0;
5240 
5241   /* Renumber the lexical scoping (non-code) blocks consecutively.  */
5242   number_blocks (fn);
5243 
5244   delete_unreachable_blocks_update_callgraph (id.dst_node, false);
5245 
5246   if (flag_checking)
5247     id.dst_node->verify ();
5248 
5249   /* It would be nice to check SSA/CFG/statement consistency here, but it is
5250      not possible yet - the IPA passes might make various functions to not
5251      throw and they don't care to proactively update local EH info.  This is
5252      done later in fixup_cfg pass that also execute the verification.  */
5253   return (TODO_update_ssa
5254 	  | TODO_cleanup_cfg
5255 	  | (gimple_in_ssa_p (cfun) ? TODO_remove_unused_locals : 0)
5256 	  | (gimple_in_ssa_p (cfun) ? TODO_update_address_taken : 0)
5257 	  | (profile_status_for_fn (cfun) != PROFILE_ABSENT
5258 	     ? TODO_rebuild_frequencies : 0));
5259 }
5260 
5261 /* Passed to walk_tree.  Copies the node pointed to, if appropriate.  */
5262 
5263 tree
copy_tree_r(tree * tp,int * walk_subtrees,void * data ATTRIBUTE_UNUSED)5264 copy_tree_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
5265 {
5266   enum tree_code code = TREE_CODE (*tp);
5267   enum tree_code_class cl = TREE_CODE_CLASS (code);
5268 
5269   /* We make copies of most nodes.  */
5270   if (IS_EXPR_CODE_CLASS (cl)
5271       || code == TREE_LIST
5272       || code == TREE_VEC
5273       || code == TYPE_DECL
5274       || code == OMP_CLAUSE)
5275     {
5276       /* Because the chain gets clobbered when we make a copy, we save it
5277 	 here.  */
5278       tree chain = NULL_TREE, new_tree;
5279 
5280       if (CODE_CONTAINS_STRUCT (code, TS_COMMON))
5281 	chain = TREE_CHAIN (*tp);
5282 
5283       /* Copy the node.  */
5284       new_tree = copy_node (*tp);
5285 
5286       *tp = new_tree;
5287 
5288       /* Now, restore the chain, if appropriate.  That will cause
5289 	 walk_tree to walk into the chain as well.  */
5290       if (code == PARM_DECL
5291 	  || code == TREE_LIST
5292 	  || code == OMP_CLAUSE)
5293 	TREE_CHAIN (*tp) = chain;
5294 
5295       /* For now, we don't update BLOCKs when we make copies.  So, we
5296 	 have to nullify all BIND_EXPRs.  */
5297       if (TREE_CODE (*tp) == BIND_EXPR)
5298 	BIND_EXPR_BLOCK (*tp) = NULL_TREE;
5299     }
5300   else if (code == CONSTRUCTOR)
5301     {
5302       /* CONSTRUCTOR nodes need special handling because
5303          we need to duplicate the vector of elements.  */
5304       tree new_tree;
5305 
5306       new_tree = copy_node (*tp);
5307       CONSTRUCTOR_ELTS (new_tree) = vec_safe_copy (CONSTRUCTOR_ELTS (*tp));
5308       *tp = new_tree;
5309     }
5310   else if (code == STATEMENT_LIST)
5311     /* We used to just abort on STATEMENT_LIST, but we can run into them
5312        with statement-expressions (c++/40975).  */
5313     copy_statement_list (tp);
5314   else if (TREE_CODE_CLASS (code) == tcc_type)
5315     *walk_subtrees = 0;
5316   else if (TREE_CODE_CLASS (code) == tcc_declaration)
5317     *walk_subtrees = 0;
5318   else if (TREE_CODE_CLASS (code) == tcc_constant)
5319     *walk_subtrees = 0;
5320   return NULL_TREE;
5321 }
5322 
5323 /* The SAVE_EXPR pointed to by TP is being copied.  If ST contains
5324    information indicating to what new SAVE_EXPR this one should be mapped,
5325    use that one.  Otherwise, create a new node and enter it in ST.  FN is
5326    the function into which the copy will be placed.  */
5327 
5328 static void
remap_save_expr(tree * tp,hash_map<tree,tree> * st,int * walk_subtrees)5329 remap_save_expr (tree *tp, hash_map<tree, tree> *st, int *walk_subtrees)
5330 {
5331   tree *n;
5332   tree t;
5333 
5334   /* See if we already encountered this SAVE_EXPR.  */
5335   n = st->get (*tp);
5336 
5337   /* If we didn't already remap this SAVE_EXPR, do so now.  */
5338   if (!n)
5339     {
5340       t = copy_node (*tp);
5341 
5342       /* Remember this SAVE_EXPR.  */
5343       st->put (*tp, t);
5344       /* Make sure we don't remap an already-remapped SAVE_EXPR.  */
5345       st->put (t, t);
5346     }
5347   else
5348     {
5349       /* We've already walked into this SAVE_EXPR; don't do it again.  */
5350       *walk_subtrees = 0;
5351       t = *n;
5352     }
5353 
5354   /* Replace this SAVE_EXPR with the copy.  */
5355   *tp = t;
5356 }
5357 
5358 /* Called via walk_gimple_seq.  If *GSIP points to a GIMPLE_LABEL for a local
5359    label, copies the declaration and enters it in the splay_tree in DATA (which
5360    is really a 'copy_body_data *'.  */
5361 
5362 static tree
mark_local_labels_stmt(gimple_stmt_iterator * gsip,bool * handled_ops_p ATTRIBUTE_UNUSED,struct walk_stmt_info * wi)5363 mark_local_labels_stmt (gimple_stmt_iterator *gsip,
5364 		        bool *handled_ops_p ATTRIBUTE_UNUSED,
5365 		        struct walk_stmt_info *wi)
5366 {
5367   copy_body_data *id = (copy_body_data *) wi->info;
5368   glabel *stmt = dyn_cast <glabel *> (gsi_stmt (*gsip));
5369 
5370   if (stmt)
5371     {
5372       tree decl = gimple_label_label (stmt);
5373 
5374       /* Copy the decl and remember the copy.  */
5375       insert_decl_map (id, decl, id->copy_decl (decl, id));
5376     }
5377 
5378   return NULL_TREE;
5379 }
5380 
5381 static gimple_seq duplicate_remap_omp_clause_seq (gimple_seq seq,
5382 						  struct walk_stmt_info *wi);
5383 
5384 /* Called via walk_gimple_seq by copy_gimple_seq_and_replace_local.
5385    Using the splay_tree pointed to by ST (which is really a `splay_tree'),
5386    remaps all local declarations to appropriate replacements in gimple
5387    operands. */
5388 
5389 static tree
replace_locals_op(tree * tp,int * walk_subtrees,void * data)5390 replace_locals_op (tree *tp, int *walk_subtrees, void *data)
5391 {
5392   struct walk_stmt_info *wi = (struct walk_stmt_info*) data;
5393   copy_body_data *id = (copy_body_data *) wi->info;
5394   hash_map<tree, tree> *st = id->decl_map;
5395   tree *n;
5396   tree expr = *tp;
5397 
5398   /* For recursive invocations this is no longer the LHS itself.  */
5399   bool is_lhs = wi->is_lhs;
5400   wi->is_lhs = false;
5401 
5402   if (TREE_CODE (expr) == SSA_NAME)
5403     {
5404       *tp = remap_ssa_name (*tp, id);
5405       *walk_subtrees = 0;
5406       if (is_lhs)
5407 	SSA_NAME_DEF_STMT (*tp) = gsi_stmt (wi->gsi);
5408     }
5409   /* Only a local declaration (variable or label).  */
5410   else if ((VAR_P (expr) && !TREE_STATIC (expr))
5411 	   || TREE_CODE (expr) == LABEL_DECL)
5412     {
5413       /* Lookup the declaration.  */
5414       n = st->get (expr);
5415 
5416       /* If it's there, remap it.  */
5417       if (n)
5418 	*tp = *n;
5419       *walk_subtrees = 0;
5420     }
5421   else if (TREE_CODE (expr) == STATEMENT_LIST
5422 	   || TREE_CODE (expr) == BIND_EXPR
5423 	   || TREE_CODE (expr) == SAVE_EXPR)
5424     gcc_unreachable ();
5425   else if (TREE_CODE (expr) == TARGET_EXPR)
5426     {
5427       /* Don't mess with a TARGET_EXPR that hasn't been expanded.
5428          It's OK for this to happen if it was part of a subtree that
5429          isn't immediately expanded, such as operand 2 of another
5430          TARGET_EXPR.  */
5431       if (!TREE_OPERAND (expr, 1))
5432 	{
5433 	  TREE_OPERAND (expr, 1) = TREE_OPERAND (expr, 3);
5434 	  TREE_OPERAND (expr, 3) = NULL_TREE;
5435 	}
5436     }
5437   else if (TREE_CODE (expr) == OMP_CLAUSE)
5438     {
5439       /* Before the omplower pass completes, some OMP clauses can contain
5440 	 sequences that are neither copied by gimple_seq_copy nor walked by
5441 	 walk_gimple_seq.  To make copy_gimple_seq_and_replace_locals work even
5442 	 in those situations, we have to copy and process them explicitely.  */
5443 
5444       if (OMP_CLAUSE_CODE (expr) == OMP_CLAUSE_LASTPRIVATE)
5445 	{
5446 	  gimple_seq seq = OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (expr);
5447 	  seq = duplicate_remap_omp_clause_seq (seq, wi);
5448 	  OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (expr) = seq;
5449 	}
5450       else if (OMP_CLAUSE_CODE (expr) == OMP_CLAUSE_LINEAR)
5451 	{
5452 	  gimple_seq seq = OMP_CLAUSE_LINEAR_GIMPLE_SEQ (expr);
5453 	  seq = duplicate_remap_omp_clause_seq (seq, wi);
5454 	  OMP_CLAUSE_LINEAR_GIMPLE_SEQ (expr) = seq;
5455 	}
5456       else if (OMP_CLAUSE_CODE (expr) == OMP_CLAUSE_REDUCTION)
5457 	{
5458 	  gimple_seq seq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (expr);
5459 	  seq = duplicate_remap_omp_clause_seq (seq, wi);
5460 	  OMP_CLAUSE_REDUCTION_GIMPLE_INIT (expr) = seq;
5461 	  seq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (expr);
5462 	  seq = duplicate_remap_omp_clause_seq (seq, wi);
5463 	  OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (expr) = seq;
5464 	}
5465     }
5466 
5467   /* Keep iterating.  */
5468   return NULL_TREE;
5469 }
5470 
5471 
5472 /* Called via walk_gimple_seq by copy_gimple_seq_and_replace_local.
5473    Using the splay_tree pointed to by ST (which is really a `splay_tree'),
5474    remaps all local declarations to appropriate replacements in gimple
5475    statements. */
5476 
5477 static tree
replace_locals_stmt(gimple_stmt_iterator * gsip,bool * handled_ops_p ATTRIBUTE_UNUSED,struct walk_stmt_info * wi)5478 replace_locals_stmt (gimple_stmt_iterator *gsip,
5479 		     bool *handled_ops_p ATTRIBUTE_UNUSED,
5480 		     struct walk_stmt_info *wi)
5481 {
5482   copy_body_data *id = (copy_body_data *) wi->info;
5483   gimple *gs = gsi_stmt (*gsip);
5484 
5485   if (gbind *stmt = dyn_cast <gbind *> (gs))
5486     {
5487       tree block = gimple_bind_block (stmt);
5488 
5489       if (block)
5490 	{
5491 	  remap_block (&block, id);
5492 	  gimple_bind_set_block (stmt, block);
5493 	}
5494 
5495       /* This will remap a lot of the same decls again, but this should be
5496 	 harmless.  */
5497       if (gimple_bind_vars (stmt))
5498 	{
5499 	  tree old_var, decls = gimple_bind_vars (stmt);
5500 
5501 	  for (old_var = decls; old_var; old_var = DECL_CHAIN (old_var))
5502 	    if (!can_be_nonlocal (old_var, id)
5503 		&& ! variably_modified_type_p (TREE_TYPE (old_var), id->src_fn))
5504 	      remap_decl (old_var, id);
5505 
5506 	  gcc_checking_assert (!id->prevent_decl_creation_for_types);
5507 	  id->prevent_decl_creation_for_types = true;
5508 	  gimple_bind_set_vars (stmt, remap_decls (decls, NULL, id));
5509 	  id->prevent_decl_creation_for_types = false;
5510 	}
5511     }
5512 
5513   /* Keep iterating.  */
5514   return NULL_TREE;
5515 }
5516 
5517 /* Create a copy of SEQ and remap all decls in it.  */
5518 
5519 static gimple_seq
duplicate_remap_omp_clause_seq(gimple_seq seq,struct walk_stmt_info * wi)5520 duplicate_remap_omp_clause_seq (gimple_seq seq, struct walk_stmt_info *wi)
5521 {
5522   if (!seq)
5523     return NULL;
5524 
5525   /* If there are any labels in OMP sequences, they can be only referred to in
5526      the sequence itself and therefore we can do both here.  */
5527   walk_gimple_seq (seq, mark_local_labels_stmt, NULL, wi);
5528   gimple_seq copy = gimple_seq_copy (seq);
5529   walk_gimple_seq (copy, replace_locals_stmt, replace_locals_op, wi);
5530   return copy;
5531 }
5532 
5533 /* Copies everything in SEQ and replaces variables and labels local to
5534    current_function_decl.  */
5535 
5536 gimple_seq
copy_gimple_seq_and_replace_locals(gimple_seq seq)5537 copy_gimple_seq_and_replace_locals (gimple_seq seq)
5538 {
5539   copy_body_data id;
5540   struct walk_stmt_info wi;
5541   gimple_seq copy;
5542 
5543   /* There's nothing to do for NULL_TREE.  */
5544   if (seq == NULL)
5545     return seq;
5546 
5547   /* Set up ID.  */
5548   memset (&id, 0, sizeof (id));
5549   id.src_fn = current_function_decl;
5550   id.dst_fn = current_function_decl;
5551   id.src_cfun = cfun;
5552   id.decl_map = new hash_map<tree, tree>;
5553   id.debug_map = NULL;
5554 
5555   id.copy_decl = copy_decl_no_change;
5556   id.transform_call_graph_edges = CB_CGE_DUPLICATE;
5557   id.transform_new_cfg = false;
5558   id.transform_return_to_modify = false;
5559   id.transform_parameter = false;
5560   id.transform_lang_insert_block = NULL;
5561 
5562   /* Walk the tree once to find local labels.  */
5563   memset (&wi, 0, sizeof (wi));
5564   hash_set<tree> visited;
5565   wi.info = &id;
5566   wi.pset = &visited;
5567   walk_gimple_seq (seq, mark_local_labels_stmt, NULL, &wi);
5568 
5569   copy = gimple_seq_copy (seq);
5570 
5571   /* Walk the copy, remapping decls.  */
5572   memset (&wi, 0, sizeof (wi));
5573   wi.info = &id;
5574   walk_gimple_seq (copy, replace_locals_stmt, replace_locals_op, &wi);
5575 
5576   /* Clean up.  */
5577   delete id.decl_map;
5578   if (id.debug_map)
5579     delete id.debug_map;
5580   if (id.dependence_map)
5581     {
5582       delete id.dependence_map;
5583       id.dependence_map = NULL;
5584     }
5585 
5586   return copy;
5587 }
5588 
5589 
5590 /* Allow someone to determine if SEARCH is a child of TOP from gdb.  */
5591 
5592 static tree
debug_find_tree_1(tree * tp,int * walk_subtrees ATTRIBUTE_UNUSED,void * data)5593 debug_find_tree_1 (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED, void *data)
5594 {
5595   if (*tp == data)
5596     return (tree) data;
5597   else
5598     return NULL;
5599 }
5600 
5601 DEBUG_FUNCTION bool
debug_find_tree(tree top,tree search)5602 debug_find_tree (tree top, tree search)
5603 {
5604   return walk_tree_without_duplicates (&top, debug_find_tree_1, search) != 0;
5605 }
5606 
5607 
5608 /* Declare the variables created by the inliner.  Add all the variables in
5609    VARS to BIND_EXPR.  */
5610 
5611 static void
declare_inline_vars(tree block,tree vars)5612 declare_inline_vars (tree block, tree vars)
5613 {
5614   tree t;
5615   for (t = vars; t; t = DECL_CHAIN (t))
5616     {
5617       DECL_SEEN_IN_BIND_EXPR_P (t) = 1;
5618       gcc_assert (!TREE_STATIC (t) && !TREE_ASM_WRITTEN (t));
5619       add_local_decl (cfun, t);
5620     }
5621 
5622   if (block)
5623     BLOCK_VARS (block) = chainon (BLOCK_VARS (block), vars);
5624 }
5625 
5626 /* Copy NODE (which must be a DECL).  The DECL originally was in the FROM_FN,
5627    but now it will be in the TO_FN.  PARM_TO_VAR means enable PARM_DECL to
5628    VAR_DECL translation.  */
5629 
5630 tree
copy_decl_for_dup_finish(copy_body_data * id,tree decl,tree copy)5631 copy_decl_for_dup_finish (copy_body_data *id, tree decl, tree copy)
5632 {
5633   /* Don't generate debug information for the copy if we wouldn't have
5634      generated it for the copy either.  */
5635   DECL_ARTIFICIAL (copy) = DECL_ARTIFICIAL (decl);
5636   DECL_IGNORED_P (copy) = DECL_IGNORED_P (decl);
5637 
5638   /* Set the DECL_ABSTRACT_ORIGIN so the debugging routines know what
5639      declaration inspired this copy.  */
5640   DECL_ABSTRACT_ORIGIN (copy) = DECL_ORIGIN (decl);
5641 
5642   /* The new variable/label has no RTL, yet.  */
5643   if (CODE_CONTAINS_STRUCT (TREE_CODE (copy), TS_DECL_WRTL)
5644       && !TREE_STATIC (copy) && !DECL_EXTERNAL (copy))
5645     SET_DECL_RTL (copy, 0);
5646   /* For vector typed decls make sure to update DECL_MODE according
5647      to the new function context.  */
5648   if (VECTOR_TYPE_P (TREE_TYPE (copy)))
5649     SET_DECL_MODE (copy, TYPE_MODE (TREE_TYPE (copy)));
5650 
5651   /* These args would always appear unused, if not for this.  */
5652   TREE_USED (copy) = 1;
5653 
5654   /* Set the context for the new declaration.  */
5655   if (!DECL_CONTEXT (decl))
5656     /* Globals stay global.  */
5657     ;
5658   else if (DECL_CONTEXT (decl) != id->src_fn)
5659     /* Things that weren't in the scope of the function we're inlining
5660        from aren't in the scope we're inlining to, either.  */
5661     ;
5662   else if (TREE_STATIC (decl))
5663     /* Function-scoped static variables should stay in the original
5664        function.  */
5665     ;
5666   else
5667     {
5668       /* Ordinary automatic local variables are now in the scope of the
5669 	 new function.  */
5670       DECL_CONTEXT (copy) = id->dst_fn;
5671       if (VAR_P (copy) && id->dst_simt_vars && !is_gimple_reg (copy))
5672 	{
5673 	  if (!lookup_attribute ("omp simt private", DECL_ATTRIBUTES (copy)))
5674 	    DECL_ATTRIBUTES (copy)
5675 	      = tree_cons (get_identifier ("omp simt private"), NULL,
5676 			   DECL_ATTRIBUTES (copy));
5677 	  id->dst_simt_vars->safe_push (copy);
5678 	}
5679     }
5680 
5681   return copy;
5682 }
5683 
5684 static tree
copy_decl_to_var(tree decl,copy_body_data * id)5685 copy_decl_to_var (tree decl, copy_body_data *id)
5686 {
5687   tree copy, type;
5688 
5689   gcc_assert (TREE_CODE (decl) == PARM_DECL
5690 	      || TREE_CODE (decl) == RESULT_DECL);
5691 
5692   type = TREE_TYPE (decl);
5693 
5694   copy = build_decl (DECL_SOURCE_LOCATION (id->dst_fn),
5695 		     VAR_DECL, DECL_NAME (decl), type);
5696   if (DECL_PT_UID_SET_P (decl))
5697     SET_DECL_PT_UID (copy, DECL_PT_UID (decl));
5698   TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
5699   TREE_READONLY (copy) = TREE_READONLY (decl);
5700   TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
5701   DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (decl);
5702   DECL_BY_REFERENCE (copy) = DECL_BY_REFERENCE (decl);
5703 
5704   return copy_decl_for_dup_finish (id, decl, copy);
5705 }
5706 
5707 /* Like copy_decl_to_var, but create a return slot object instead of a
5708    pointer variable for return by invisible reference.  */
5709 
5710 static tree
copy_result_decl_to_var(tree decl,copy_body_data * id)5711 copy_result_decl_to_var (tree decl, copy_body_data *id)
5712 {
5713   tree copy, type;
5714 
5715   gcc_assert (TREE_CODE (decl) == PARM_DECL
5716 	      || TREE_CODE (decl) == RESULT_DECL);
5717 
5718   type = TREE_TYPE (decl);
5719   if (DECL_BY_REFERENCE (decl))
5720     type = TREE_TYPE (type);
5721 
5722   copy = build_decl (DECL_SOURCE_LOCATION (id->dst_fn),
5723 		     VAR_DECL, DECL_NAME (decl), type);
5724   if (DECL_PT_UID_SET_P (decl))
5725     SET_DECL_PT_UID (copy, DECL_PT_UID (decl));
5726   TREE_READONLY (copy) = TREE_READONLY (decl);
5727   TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
5728   if (!DECL_BY_REFERENCE (decl))
5729     {
5730       TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
5731       DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (decl);
5732     }
5733 
5734   return copy_decl_for_dup_finish (id, decl, copy);
5735 }
5736 
5737 tree
copy_decl_no_change(tree decl,copy_body_data * id)5738 copy_decl_no_change (tree decl, copy_body_data *id)
5739 {
5740   tree copy;
5741 
5742   copy = copy_node (decl);
5743 
5744   /* The COPY is not abstract; it will be generated in DST_FN.  */
5745   DECL_ABSTRACT_P (copy) = false;
5746   lang_hooks.dup_lang_specific_decl (copy);
5747 
5748   /* TREE_ADDRESSABLE isn't used to indicate that a label's address has
5749      been taken; it's for internal bookkeeping in expand_goto_internal.  */
5750   if (TREE_CODE (copy) == LABEL_DECL)
5751     {
5752       TREE_ADDRESSABLE (copy) = 0;
5753       LABEL_DECL_UID (copy) = -1;
5754     }
5755 
5756   return copy_decl_for_dup_finish (id, decl, copy);
5757 }
5758 
5759 static tree
copy_decl_maybe_to_var(tree decl,copy_body_data * id)5760 copy_decl_maybe_to_var (tree decl, copy_body_data *id)
5761 {
5762   if (TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == RESULT_DECL)
5763     return copy_decl_to_var (decl, id);
5764   else
5765     return copy_decl_no_change (decl, id);
5766 }
5767 
5768 /* Return a copy of the function's argument tree.  */
5769 static tree
copy_arguments_for_versioning(tree orig_parm,copy_body_data * id,bitmap args_to_skip,tree * vars)5770 copy_arguments_for_versioning (tree orig_parm, copy_body_data * id,
5771 			       bitmap args_to_skip, tree *vars)
5772 {
5773   tree arg, *parg;
5774   tree new_parm = NULL;
5775   int i = 0;
5776 
5777   parg = &new_parm;
5778 
5779   for (arg = orig_parm; arg; arg = DECL_CHAIN (arg), i++)
5780     if (!args_to_skip || !bitmap_bit_p (args_to_skip, i))
5781       {
5782         tree new_tree = remap_decl (arg, id);
5783 	if (TREE_CODE (new_tree) != PARM_DECL)
5784 	  new_tree = id->copy_decl (arg, id);
5785         lang_hooks.dup_lang_specific_decl (new_tree);
5786         *parg = new_tree;
5787 	parg = &DECL_CHAIN (new_tree);
5788       }
5789     else if (!id->decl_map->get (arg))
5790       {
5791 	/* Make an equivalent VAR_DECL.  If the argument was used
5792 	   as temporary variable later in function, the uses will be
5793 	   replaced by local variable.  */
5794 	tree var = copy_decl_to_var (arg, id);
5795 	insert_decl_map (id, arg, var);
5796         /* Declare this new variable.  */
5797         DECL_CHAIN (var) = *vars;
5798         *vars = var;
5799       }
5800   return new_parm;
5801 }
5802 
5803 /* Return a copy of the function's static chain.  */
5804 static tree
copy_static_chain(tree static_chain,copy_body_data * id)5805 copy_static_chain (tree static_chain, copy_body_data * id)
5806 {
5807   tree *chain_copy, *pvar;
5808 
5809   chain_copy = &static_chain;
5810   for (pvar = chain_copy; *pvar; pvar = &DECL_CHAIN (*pvar))
5811     {
5812       tree new_tree = remap_decl (*pvar, id);
5813       lang_hooks.dup_lang_specific_decl (new_tree);
5814       DECL_CHAIN (new_tree) = DECL_CHAIN (*pvar);
5815       *pvar = new_tree;
5816     }
5817   return static_chain;
5818 }
5819 
5820 /* Return true if the function is allowed to be versioned.
5821    This is a guard for the versioning functionality.  */
5822 
5823 bool
tree_versionable_function_p(tree fndecl)5824 tree_versionable_function_p (tree fndecl)
5825 {
5826   return (!lookup_attribute ("noclone", DECL_ATTRIBUTES (fndecl))
5827 	  && copy_forbidden (DECL_STRUCT_FUNCTION (fndecl)) == NULL);
5828 }
5829 
5830 /* Update clone info after duplication.  */
5831 
5832 static void
update_clone_info(copy_body_data * id)5833 update_clone_info (copy_body_data * id)
5834 {
5835   struct cgraph_node *node;
5836   if (!id->dst_node->clones)
5837     return;
5838   for (node = id->dst_node->clones; node != id->dst_node;)
5839     {
5840       /* First update replace maps to match the new body.  */
5841       if (node->clone.tree_map)
5842         {
5843 	  unsigned int i;
5844           for (i = 0; i < vec_safe_length (node->clone.tree_map); i++)
5845 	    {
5846 	      struct ipa_replace_map *replace_info;
5847 	      replace_info = (*node->clone.tree_map)[i];
5848 	      walk_tree (&replace_info->old_tree, copy_tree_body_r, id, NULL);
5849 	      walk_tree (&replace_info->new_tree, copy_tree_body_r, id, NULL);
5850 	    }
5851 	}
5852       if (node->clones)
5853 	node = node->clones;
5854       else if (node->next_sibling_clone)
5855 	node = node->next_sibling_clone;
5856       else
5857 	{
5858 	  while (node != id->dst_node && !node->next_sibling_clone)
5859 	    node = node->clone_of;
5860 	  if (node != id->dst_node)
5861 	    node = node->next_sibling_clone;
5862 	}
5863     }
5864 }
5865 
5866 /* Create a copy of a function's tree.
5867    OLD_DECL and NEW_DECL are FUNCTION_DECL tree nodes
5868    of the original function and the new copied function
5869    respectively.  In case we want to replace a DECL
5870    tree with another tree while duplicating the function's
5871    body, TREE_MAP represents the mapping between these
5872    trees. If UPDATE_CLONES is set, the call_stmt fields
5873    of edges of clones of the function will be updated.
5874 
5875    If non-NULL ARGS_TO_SKIP determine function parameters to remove
5876    from new version.
5877    If SKIP_RETURN is true, the new version will return void.
5878    If non-NULL BLOCK_TO_COPY determine what basic blocks to copy.
5879    If non_NULL NEW_ENTRY determine new entry BB of the clone.
5880 */
5881 void
tree_function_versioning(tree old_decl,tree new_decl,vec<ipa_replace_map *,va_gc> * tree_map,bool update_clones,bitmap args_to_skip,bool skip_return,bitmap blocks_to_copy,basic_block new_entry)5882 tree_function_versioning (tree old_decl, tree new_decl,
5883 			  vec<ipa_replace_map *, va_gc> *tree_map,
5884 			  bool update_clones, bitmap args_to_skip,
5885 			  bool skip_return, bitmap blocks_to_copy,
5886 			  basic_block new_entry)
5887 {
5888   struct cgraph_node *old_version_node;
5889   struct cgraph_node *new_version_node;
5890   copy_body_data id;
5891   tree p;
5892   unsigned i;
5893   struct ipa_replace_map *replace_info;
5894   basic_block old_entry_block, bb;
5895   auto_vec<gimple *, 10> init_stmts;
5896   tree vars = NULL_TREE;
5897   bitmap debug_args_to_skip = args_to_skip;
5898 
5899   gcc_assert (TREE_CODE (old_decl) == FUNCTION_DECL
5900 	      && TREE_CODE (new_decl) == FUNCTION_DECL);
5901   DECL_POSSIBLY_INLINED (old_decl) = 1;
5902 
5903   old_version_node = cgraph_node::get (old_decl);
5904   gcc_checking_assert (old_version_node);
5905   new_version_node = cgraph_node::get (new_decl);
5906   gcc_checking_assert (new_version_node);
5907 
5908   /* Copy over debug args.  */
5909   if (DECL_HAS_DEBUG_ARGS_P (old_decl))
5910     {
5911       vec<tree, va_gc> **new_debug_args, **old_debug_args;
5912       gcc_checking_assert (decl_debug_args_lookup (new_decl) == NULL);
5913       DECL_HAS_DEBUG_ARGS_P (new_decl) = 0;
5914       old_debug_args = decl_debug_args_lookup (old_decl);
5915       if (old_debug_args)
5916 	{
5917 	  new_debug_args = decl_debug_args_insert (new_decl);
5918 	  *new_debug_args = vec_safe_copy (*old_debug_args);
5919 	}
5920     }
5921 
5922   /* Output the inlining info for this abstract function, since it has been
5923      inlined.  If we don't do this now, we can lose the information about the
5924      variables in the function when the blocks get blown away as soon as we
5925      remove the cgraph node.  */
5926   (*debug_hooks->outlining_inline_function) (old_decl);
5927 
5928   DECL_ARTIFICIAL (new_decl) = 1;
5929   DECL_ABSTRACT_ORIGIN (new_decl) = DECL_ORIGIN (old_decl);
5930   if (DECL_ORIGIN (old_decl) == old_decl)
5931     old_version_node->used_as_abstract_origin = true;
5932   DECL_FUNCTION_PERSONALITY (new_decl) = DECL_FUNCTION_PERSONALITY (old_decl);
5933 
5934   /* Prepare the data structures for the tree copy.  */
5935   memset (&id, 0, sizeof (id));
5936 
5937   /* Generate a new name for the new version. */
5938   id.statements_to_fold = new hash_set<gimple *>;
5939 
5940   id.decl_map = new hash_map<tree, tree>;
5941   id.debug_map = NULL;
5942   id.src_fn = old_decl;
5943   id.dst_fn = new_decl;
5944   id.src_node = old_version_node;
5945   id.dst_node = new_version_node;
5946   id.src_cfun = DECL_STRUCT_FUNCTION (old_decl);
5947   id.blocks_to_copy = blocks_to_copy;
5948 
5949   id.copy_decl = copy_decl_no_change;
5950   id.transform_call_graph_edges
5951     = update_clones ? CB_CGE_MOVE_CLONES : CB_CGE_MOVE;
5952   id.transform_new_cfg = true;
5953   id.transform_return_to_modify = false;
5954   id.transform_parameter = false;
5955   id.transform_lang_insert_block = NULL;
5956 
5957   old_entry_block = ENTRY_BLOCK_PTR_FOR_FN
5958     (DECL_STRUCT_FUNCTION (old_decl));
5959   DECL_RESULT (new_decl) = DECL_RESULT (old_decl);
5960   DECL_ARGUMENTS (new_decl) = DECL_ARGUMENTS (old_decl);
5961   initialize_cfun (new_decl, old_decl,
5962 		   new_entry ? new_entry->count : old_entry_block->count);
5963   if (DECL_STRUCT_FUNCTION (new_decl)->gimple_df)
5964     DECL_STRUCT_FUNCTION (new_decl)->gimple_df->ipa_pta
5965       = id.src_cfun->gimple_df->ipa_pta;
5966 
5967   /* Copy the function's static chain.  */
5968   p = DECL_STRUCT_FUNCTION (old_decl)->static_chain_decl;
5969   if (p)
5970     DECL_STRUCT_FUNCTION (new_decl)->static_chain_decl
5971       = copy_static_chain (p, &id);
5972 
5973   /* If there's a tree_map, prepare for substitution.  */
5974   if (tree_map)
5975     for (i = 0; i < tree_map->length (); i++)
5976       {
5977 	gimple *init;
5978 	replace_info = (*tree_map)[i];
5979 	if (replace_info->replace_p)
5980 	  {
5981 	    int parm_num = -1;
5982 	    if (!replace_info->old_tree)
5983 	      {
5984 		int p = replace_info->parm_num;
5985 		tree parm;
5986 		tree req_type, new_type;
5987 
5988 		for (parm = DECL_ARGUMENTS (old_decl); p;
5989 		     parm = DECL_CHAIN (parm))
5990 		  p--;
5991 		replace_info->old_tree = parm;
5992 		parm_num = replace_info->parm_num;
5993 		req_type = TREE_TYPE (parm);
5994 		new_type = TREE_TYPE (replace_info->new_tree);
5995 		if (!useless_type_conversion_p (req_type, new_type))
5996 		  {
5997 		    if (fold_convertible_p (req_type, replace_info->new_tree))
5998 		      replace_info->new_tree
5999 			= fold_build1 (NOP_EXPR, req_type,
6000 				       replace_info->new_tree);
6001 		    else if (TYPE_SIZE (req_type) == TYPE_SIZE (new_type))
6002 		      replace_info->new_tree
6003 			= fold_build1 (VIEW_CONVERT_EXPR, req_type,
6004 				       replace_info->new_tree);
6005 		    else
6006 		      {
6007 			if (dump_file)
6008 			  {
6009 			    fprintf (dump_file, "    const ");
6010 			    print_generic_expr (dump_file,
6011 						replace_info->new_tree);
6012 			    fprintf (dump_file,
6013 				     "  can't be converted to param ");
6014 			    print_generic_expr (dump_file, parm);
6015 			    fprintf (dump_file, "\n");
6016 			  }
6017 			replace_info->old_tree = NULL;
6018 		      }
6019 		  }
6020 	      }
6021 	    else
6022 	      gcc_assert (TREE_CODE (replace_info->old_tree) == PARM_DECL);
6023 	    if (replace_info->old_tree)
6024 	      {
6025 		init = setup_one_parameter (&id, replace_info->old_tree,
6026 					    replace_info->new_tree, id.src_fn,
6027 					    NULL,
6028 					    &vars);
6029 		if (init)
6030 		  init_stmts.safe_push (init);
6031 		if (MAY_HAVE_DEBUG_BIND_STMTS && args_to_skip)
6032 		  {
6033 		    if (parm_num == -1)
6034 		      {
6035 			tree parm;
6036 			int p;
6037 			for (parm = DECL_ARGUMENTS (old_decl), p = 0; parm;
6038 			     parm = DECL_CHAIN (parm), p++)
6039 			  if (parm == replace_info->old_tree)
6040 			    {
6041 			      parm_num = p;
6042 			      break;
6043 			    }
6044 		      }
6045 		    if (parm_num != -1)
6046 		      {
6047 			if (debug_args_to_skip == args_to_skip)
6048 			  {
6049 			    debug_args_to_skip = BITMAP_ALLOC (NULL);
6050 			    bitmap_copy (debug_args_to_skip, args_to_skip);
6051 			  }
6052 			bitmap_clear_bit (debug_args_to_skip, parm_num);
6053 		      }
6054 		  }
6055 	      }
6056 	  }
6057       }
6058   /* Copy the function's arguments.  */
6059   if (DECL_ARGUMENTS (old_decl) != NULL_TREE)
6060     DECL_ARGUMENTS (new_decl)
6061       = copy_arguments_for_versioning (DECL_ARGUMENTS (old_decl), &id,
6062 				       args_to_skip, &vars);
6063 
6064   DECL_INITIAL (new_decl) = remap_blocks (DECL_INITIAL (id.src_fn), &id);
6065   BLOCK_SUPERCONTEXT (DECL_INITIAL (new_decl)) = new_decl;
6066 
6067   declare_inline_vars (DECL_INITIAL (new_decl), vars);
6068 
6069   if (!vec_safe_is_empty (DECL_STRUCT_FUNCTION (old_decl)->local_decls))
6070     /* Add local vars.  */
6071     add_local_variables (DECL_STRUCT_FUNCTION (old_decl), cfun, &id);
6072 
6073   if (DECL_RESULT (old_decl) == NULL_TREE)
6074     ;
6075   else if (skip_return && !VOID_TYPE_P (TREE_TYPE (DECL_RESULT (old_decl))))
6076     {
6077       DECL_RESULT (new_decl)
6078 	= build_decl (DECL_SOURCE_LOCATION (DECL_RESULT (old_decl)),
6079 		      RESULT_DECL, NULL_TREE, void_type_node);
6080       DECL_CONTEXT (DECL_RESULT (new_decl)) = new_decl;
6081       cfun->returns_struct = 0;
6082       cfun->returns_pcc_struct = 0;
6083     }
6084   else
6085     {
6086       tree old_name;
6087       DECL_RESULT (new_decl) = remap_decl (DECL_RESULT (old_decl), &id);
6088       lang_hooks.dup_lang_specific_decl (DECL_RESULT (new_decl));
6089       if (gimple_in_ssa_p (id.src_cfun)
6090 	  && DECL_BY_REFERENCE (DECL_RESULT (old_decl))
6091 	  && (old_name = ssa_default_def (id.src_cfun, DECL_RESULT (old_decl))))
6092 	{
6093 	  tree new_name = make_ssa_name (DECL_RESULT (new_decl));
6094 	  insert_decl_map (&id, old_name, new_name);
6095 	  SSA_NAME_DEF_STMT (new_name) = gimple_build_nop ();
6096 	  set_ssa_default_def (cfun, DECL_RESULT (new_decl), new_name);
6097 	}
6098     }
6099 
6100   /* Set up the destination functions loop tree.  */
6101   if (loops_for_fn (DECL_STRUCT_FUNCTION (old_decl)) != NULL)
6102     {
6103       cfun->curr_properties &= ~PROP_loops;
6104       loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
6105       cfun->curr_properties |= PROP_loops;
6106     }
6107 
6108   /* Copy the Function's body.  */
6109   copy_body (&id, ENTRY_BLOCK_PTR_FOR_FN (cfun), EXIT_BLOCK_PTR_FOR_FN (cfun),
6110 	     new_entry);
6111 
6112   /* Renumber the lexical scoping (non-code) blocks consecutively.  */
6113   number_blocks (new_decl);
6114 
6115   /* We want to create the BB unconditionally, so that the addition of
6116      debug stmts doesn't affect BB count, which may in the end cause
6117      codegen differences.  */
6118   bb = split_edge (single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
6119   while (init_stmts.length ())
6120     insert_init_stmt (&id, bb, init_stmts.pop ());
6121   update_clone_info (&id);
6122 
6123   /* Remap the nonlocal_goto_save_area, if any.  */
6124   if (cfun->nonlocal_goto_save_area)
6125     {
6126       struct walk_stmt_info wi;
6127 
6128       memset (&wi, 0, sizeof (wi));
6129       wi.info = &id;
6130       walk_tree (&cfun->nonlocal_goto_save_area, remap_gimple_op_r, &wi, NULL);
6131     }
6132 
6133   /* Clean up.  */
6134   delete id.decl_map;
6135   if (id.debug_map)
6136     delete id.debug_map;
6137   free_dominance_info (CDI_DOMINATORS);
6138   free_dominance_info (CDI_POST_DOMINATORS);
6139 
6140   update_max_bb_count ();
6141   fold_marked_statements (0, id.statements_to_fold);
6142   delete id.statements_to_fold;
6143   delete_unreachable_blocks_update_callgraph (id.dst_node, update_clones);
6144   if (id.dst_node->definition)
6145     cgraph_edge::rebuild_references ();
6146   if (loops_state_satisfies_p (LOOPS_NEED_FIXUP))
6147     {
6148       calculate_dominance_info (CDI_DOMINATORS);
6149       fix_loop_structure (NULL);
6150     }
6151   update_ssa (TODO_update_ssa);
6152 
6153   /* After partial cloning we need to rescale frequencies, so they are
6154      within proper range in the cloned function.  */
6155   if (new_entry)
6156     {
6157       struct cgraph_edge *e;
6158       rebuild_frequencies ();
6159 
6160       new_version_node->count = ENTRY_BLOCK_PTR_FOR_FN (cfun)->count;
6161       for (e = new_version_node->callees; e; e = e->next_callee)
6162 	{
6163 	  basic_block bb = gimple_bb (e->call_stmt);
6164 	  e->count = bb->count;
6165 	}
6166       for (e = new_version_node->indirect_calls; e; e = e->next_callee)
6167 	{
6168 	  basic_block bb = gimple_bb (e->call_stmt);
6169 	  e->count = bb->count;
6170 	}
6171     }
6172 
6173   if (debug_args_to_skip && MAY_HAVE_DEBUG_BIND_STMTS)
6174     {
6175       tree parm;
6176       vec<tree, va_gc> **debug_args = NULL;
6177       unsigned int len = 0;
6178       for (parm = DECL_ARGUMENTS (old_decl), i = 0;
6179 	   parm; parm = DECL_CHAIN (parm), i++)
6180 	if (bitmap_bit_p (debug_args_to_skip, i) && is_gimple_reg (parm))
6181 	  {
6182 	    tree ddecl;
6183 
6184 	    if (debug_args == NULL)
6185 	      {
6186 		debug_args = decl_debug_args_insert (new_decl);
6187 		len = vec_safe_length (*debug_args);
6188 	      }
6189 	    ddecl = make_node (DEBUG_EXPR_DECL);
6190 	    DECL_ARTIFICIAL (ddecl) = 1;
6191 	    TREE_TYPE (ddecl) = TREE_TYPE (parm);
6192 	    SET_DECL_MODE (ddecl, DECL_MODE (parm));
6193 	    vec_safe_push (*debug_args, DECL_ORIGIN (parm));
6194 	    vec_safe_push (*debug_args, ddecl);
6195 	  }
6196       if (debug_args != NULL)
6197 	{
6198 	  /* On the callee side, add
6199 	     DEBUG D#Y s=> parm
6200 	     DEBUG var => D#Y
6201 	     stmts to the first bb where var is a VAR_DECL created for the
6202 	     optimized away parameter in DECL_INITIAL block.  This hints
6203 	     in the debug info that var (whole DECL_ORIGIN is the parm
6204 	     PARM_DECL) is optimized away, but could be looked up at the
6205 	     call site as value of D#X there.  */
6206 	  tree var = vars, vexpr;
6207 	  gimple_stmt_iterator cgsi
6208 	    = gsi_after_labels (single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
6209 	  gimple *def_temp;
6210 	  var = vars;
6211 	  i = vec_safe_length (*debug_args);
6212 	  do
6213 	    {
6214 	      i -= 2;
6215 	      while (var != NULL_TREE
6216 		     && DECL_ABSTRACT_ORIGIN (var) != (**debug_args)[i])
6217 		var = TREE_CHAIN (var);
6218 	      if (var == NULL_TREE)
6219 		break;
6220 	      vexpr = make_node (DEBUG_EXPR_DECL);
6221 	      parm = (**debug_args)[i];
6222 	      DECL_ARTIFICIAL (vexpr) = 1;
6223 	      TREE_TYPE (vexpr) = TREE_TYPE (parm);
6224 	      SET_DECL_MODE (vexpr, DECL_MODE (parm));
6225 	      def_temp = gimple_build_debug_bind (var, vexpr, NULL);
6226 	      gsi_insert_before (&cgsi, def_temp, GSI_NEW_STMT);
6227 	      def_temp = gimple_build_debug_source_bind (vexpr, parm, NULL);
6228 	      gsi_insert_before (&cgsi, def_temp, GSI_NEW_STMT);
6229 	    }
6230 	  while (i > len);
6231 	}
6232     }
6233 
6234   if (debug_args_to_skip && debug_args_to_skip != args_to_skip)
6235     BITMAP_FREE (debug_args_to_skip);
6236   free_dominance_info (CDI_DOMINATORS);
6237   free_dominance_info (CDI_POST_DOMINATORS);
6238 
6239   gcc_assert (!id.debug_stmts.exists ());
6240   pop_cfun ();
6241   return;
6242 }
6243 
6244 /* EXP is CALL_EXPR present in a GENERIC expression tree.  Try to integrate
6245    the callee and return the inlined body on success.  */
6246 
6247 tree
maybe_inline_call_in_expr(tree exp)6248 maybe_inline_call_in_expr (tree exp)
6249 {
6250   tree fn = get_callee_fndecl (exp);
6251 
6252   /* We can only try to inline "const" functions.  */
6253   if (fn && TREE_READONLY (fn) && DECL_SAVED_TREE (fn))
6254     {
6255       call_expr_arg_iterator iter;
6256       copy_body_data id;
6257       tree param, arg, t;
6258       hash_map<tree, tree> decl_map;
6259 
6260       /* Remap the parameters.  */
6261       for (param = DECL_ARGUMENTS (fn), arg = first_call_expr_arg (exp, &iter);
6262 	   param;
6263 	   param = DECL_CHAIN (param), arg = next_call_expr_arg (&iter))
6264 	decl_map.put (param, arg);
6265 
6266       memset (&id, 0, sizeof (id));
6267       id.src_fn = fn;
6268       id.dst_fn = current_function_decl;
6269       id.src_cfun = DECL_STRUCT_FUNCTION (fn);
6270       id.decl_map = &decl_map;
6271 
6272       id.copy_decl = copy_decl_no_change;
6273       id.transform_call_graph_edges = CB_CGE_DUPLICATE;
6274       id.transform_new_cfg = false;
6275       id.transform_return_to_modify = true;
6276       id.transform_parameter = true;
6277       id.transform_lang_insert_block = NULL;
6278 
6279       /* Make sure not to unshare trees behind the front-end's back
6280 	 since front-end specific mechanisms may rely on sharing.  */
6281       id.regimplify = false;
6282       id.do_not_unshare = true;
6283 
6284       /* We're not inside any EH region.  */
6285       id.eh_lp_nr = 0;
6286 
6287       t = copy_tree_body (&id);
6288 
6289       /* We can only return something suitable for use in a GENERIC
6290 	 expression tree.  */
6291       if (TREE_CODE (t) == MODIFY_EXPR)
6292 	return TREE_OPERAND (t, 1);
6293     }
6294 
6295    return NULL_TREE;
6296 }
6297 
6298 /* Duplicate a type, fields and all.  */
6299 
6300 tree
build_duplicate_type(tree type)6301 build_duplicate_type (tree type)
6302 {
6303   struct copy_body_data id;
6304 
6305   memset (&id, 0, sizeof (id));
6306   id.src_fn = current_function_decl;
6307   id.dst_fn = current_function_decl;
6308   id.src_cfun = cfun;
6309   id.decl_map = new hash_map<tree, tree>;
6310   id.debug_map = NULL;
6311   id.copy_decl = copy_decl_no_change;
6312 
6313   type = remap_type_1 (type, &id);
6314 
6315   delete id.decl_map;
6316   if (id.debug_map)
6317     delete id.debug_map;
6318 
6319   TYPE_CANONICAL (type) = type;
6320 
6321   return type;
6322 }
6323 
6324 /* Unshare the entire DECL_SAVED_TREE of FN and return the remapped
6325    parameters and RESULT_DECL in PARMS and RESULT.  Used by C++ constexpr
6326    evaluation.  */
6327 
6328 tree
copy_fn(tree fn,tree & parms,tree & result)6329 copy_fn (tree fn, tree& parms, tree& result)
6330 {
6331   copy_body_data id;
6332   tree param;
6333   hash_map<tree, tree> decl_map;
6334 
6335   tree *p = &parms;
6336   *p = NULL_TREE;
6337 
6338   memset (&id, 0, sizeof (id));
6339   id.src_fn = fn;
6340   id.dst_fn = current_function_decl;
6341   id.src_cfun = DECL_STRUCT_FUNCTION (fn);
6342   id.decl_map = &decl_map;
6343 
6344   id.copy_decl = copy_decl_no_change;
6345   id.transform_call_graph_edges = CB_CGE_DUPLICATE;
6346   id.transform_new_cfg = false;
6347   id.transform_return_to_modify = false;
6348   id.transform_parameter = true;
6349   id.transform_lang_insert_block = NULL;
6350 
6351   /* Make sure not to unshare trees behind the front-end's back
6352      since front-end specific mechanisms may rely on sharing.  */
6353   id.regimplify = false;
6354   id.do_not_unshare = true;
6355   id.do_not_fold = true;
6356 
6357   /* We're not inside any EH region.  */
6358   id.eh_lp_nr = 0;
6359 
6360   /* Remap the parameters and result and return them to the caller.  */
6361   for (param = DECL_ARGUMENTS (fn);
6362        param;
6363        param = DECL_CHAIN (param))
6364     {
6365       *p = remap_decl (param, &id);
6366       p = &DECL_CHAIN (*p);
6367     }
6368 
6369   if (DECL_RESULT (fn))
6370     result = remap_decl (DECL_RESULT (fn), &id);
6371   else
6372     result = NULL_TREE;
6373 
6374   return copy_tree_body (&id);
6375 }
6376