1 /* Tree inlining.
2    Copyright (C) 2001-2016 Free Software Foundation, Inc.
3    Contributed by Alexandre Oliva <aoliva@redhat.com>
4 
5 This file is part of GCC.
6 
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11 
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
15 GNU General Public License for more details.
16 
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3.  If not see
19 <http://www.gnu.org/licenses/>.  */
20 
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "target.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "gimple.h"
29 #include "cfghooks.h"
30 #include "tree-pass.h"
31 #include "ssa.h"
32 #include "cgraph.h"
33 #include "tree-pretty-print.h"
34 #include "diagnostic-core.h"
35 #include "gimple-predict.h"
36 #include "fold-const.h"
37 #include "stor-layout.h"
38 #include "calls.h"
39 #include "tree-inline.h"
40 #include "langhooks.h"
41 #include "cfganal.h"
42 #include "tree-iterator.h"
43 #include "intl.h"
44 #include "gimple-fold.h"
45 #include "tree-eh.h"
46 #include "gimplify.h"
47 #include "gimple-iterator.h"
48 #include "gimplify-me.h"
49 #include "gimple-walk.h"
50 #include "tree-cfg.h"
51 #include "tree-into-ssa.h"
52 #include "tree-dfa.h"
53 #include "tree-ssa.h"
54 #include "except.h"
55 #include "debug.h"
56 #include "value-prof.h"
57 #include "cfgloop.h"
58 #include "builtins.h"
59 #include "tree-chkp.h"
60 
61 
62 /* I'm not real happy about this, but we need to handle gimple and
63    non-gimple trees.  */
64 
65 /* Inlining, Cloning, Versioning, Parallelization
66 
67    Inlining: a function body is duplicated, but the PARM_DECLs are
68    remapped into VAR_DECLs, and non-void RETURN_EXPRs become
69    MODIFY_EXPRs that store to a dedicated returned-value variable.
70    The duplicated eh_region info of the copy will later be appended
71    to the info for the caller; the eh_region info in copied throwing
72    statements and RESX statements are adjusted accordingly.
73 
74    Cloning: (only in C++) We have one body for a con/de/structor, and
75    multiple function decls, each with a unique parameter list.
76    Duplicate the body, using the given splay tree; some parameters
77    will become constants (like 0 or 1).
78 
79    Versioning: a function body is duplicated and the result is a new
80    function rather than into blocks of an existing function as with
81    inlining.  Some parameters will become constants.
82 
83    Parallelization: a region of a function is duplicated resulting in
84    a new function.  Variables may be replaced with complex expressions
85    to enable shared variable semantics.
86 
87    All of these will simultaneously lookup any callgraph edges.  If
88    we're going to inline the duplicated function body, and the given
89    function has some cloned callgraph nodes (one for each place this
90    function will be inlined) those callgraph edges will be duplicated.
91    If we're cloning the body, those callgraph edges will be
92    updated to point into the new body.  (Note that the original
93    callgraph node and edge list will not be altered.)
94 
95    See the CALL_EXPR handling case in copy_tree_body_r ().  */
96 
97 /* To Do:
98 
99    o In order to make inlining-on-trees work, we pessimized
100      function-local static constants.  In particular, they are now
101      always output, even when not addressed.  Fix this by treating
102      function-local static constants just like global static
103      constants; the back-end already knows not to output them if they
104      are not needed.
105 
106    o Provide heuristics to clamp inlining of recursive template
107      calls?  */
108 
109 
110 /* Weights that estimate_num_insns uses to estimate the size of the
111    produced code.  */
112 
113 eni_weights eni_size_weights;
114 
115 /* Weights that estimate_num_insns uses to estimate the time necessary
116    to execute the produced code.  */
117 
118 eni_weights eni_time_weights;
119 
120 /* Prototypes.  */
121 
122 static tree declare_return_variable (copy_body_data *, tree, tree, tree,
123 				     basic_block);
124 static void remap_block (tree *, copy_body_data *);
125 static void copy_bind_expr (tree *, int *, copy_body_data *);
126 static void declare_inline_vars (tree, tree);
127 static void remap_save_expr (tree *, hash_map<tree, tree> *, int *);
128 static void prepend_lexical_block (tree current_block, tree new_block);
129 static tree copy_decl_to_var (tree, copy_body_data *);
130 static tree copy_result_decl_to_var (tree, copy_body_data *);
131 static tree copy_decl_maybe_to_var (tree, copy_body_data *);
132 static gimple_seq remap_gimple_stmt (gimple *, copy_body_data *);
133 static bool delete_unreachable_blocks_update_callgraph (copy_body_data *id);
134 static void insert_init_stmt (copy_body_data *, basic_block, gimple *);
135 
136 /* Insert a tree->tree mapping for ID.  Despite the name suggests
137    that the trees should be variables, it is used for more than that.  */
138 
139 void
insert_decl_map(copy_body_data * id,tree key,tree value)140 insert_decl_map (copy_body_data *id, tree key, tree value)
141 {
142   id->decl_map->put (key, value);
143 
144   /* Always insert an identity map as well.  If we see this same new
145      node again, we won't want to duplicate it a second time.  */
146   if (key != value)
147     id->decl_map->put (value, value);
148 }
149 
150 /* Insert a tree->tree mapping for ID.  This is only used for
151    variables.  */
152 
153 static void
insert_debug_decl_map(copy_body_data * id,tree key,tree value)154 insert_debug_decl_map (copy_body_data *id, tree key, tree value)
155 {
156   if (!gimple_in_ssa_p (id->src_cfun))
157     return;
158 
159   if (!opt_for_fn (id->dst_fn, flag_var_tracking_assignments))
160     return;
161 
162   if (!target_for_debug_bind (key))
163     return;
164 
165   gcc_assert (TREE_CODE (key) == PARM_DECL);
166   gcc_assert (TREE_CODE (value) == VAR_DECL);
167 
168   if (!id->debug_map)
169     id->debug_map = new hash_map<tree, tree>;
170 
171   id->debug_map->put (key, value);
172 }
173 
174 /* If nonzero, we're remapping the contents of inlined debug
175    statements.  If negative, an error has occurred, such as a
176    reference to a variable that isn't available in the inlined
177    context.  */
178 static int processing_debug_stmt = 0;
179 
180 /* Construct new SSA name for old NAME. ID is the inline context.  */
181 
182 static tree
remap_ssa_name(tree name,copy_body_data * id)183 remap_ssa_name (tree name, copy_body_data *id)
184 {
185   tree new_tree, var;
186   tree *n;
187 
188   gcc_assert (TREE_CODE (name) == SSA_NAME);
189 
190   n = id->decl_map->get (name);
191   if (n)
192     return unshare_expr (*n);
193 
194   if (processing_debug_stmt)
195     {
196       if (SSA_NAME_IS_DEFAULT_DEF (name)
197 	  && TREE_CODE (SSA_NAME_VAR (name)) == PARM_DECL
198 	  && id->entry_bb == NULL
199 	  && single_succ_p (ENTRY_BLOCK_PTR_FOR_FN (cfun)))
200 	{
201 	  tree vexpr = make_node (DEBUG_EXPR_DECL);
202 	  gimple *def_temp;
203 	  gimple_stmt_iterator gsi;
204 	  tree val = SSA_NAME_VAR (name);
205 
206 	  n = id->decl_map->get (val);
207 	  if (n != NULL)
208 	    val = *n;
209 	  if (TREE_CODE (val) != PARM_DECL)
210 	    {
211 	      processing_debug_stmt = -1;
212 	      return name;
213 	    }
214 	  def_temp = gimple_build_debug_source_bind (vexpr, val, NULL);
215 	  DECL_ARTIFICIAL (vexpr) = 1;
216 	  TREE_TYPE (vexpr) = TREE_TYPE (name);
217 	  DECL_MODE (vexpr) = DECL_MODE (SSA_NAME_VAR (name));
218 	  gsi = gsi_after_labels (single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
219 	  gsi_insert_before (&gsi, def_temp, GSI_SAME_STMT);
220 	  return vexpr;
221 	}
222 
223       processing_debug_stmt = -1;
224       return name;
225     }
226 
227   /* Remap anonymous SSA names or SSA names of anonymous decls.  */
228   var = SSA_NAME_VAR (name);
229   if (!var
230       || (!SSA_NAME_IS_DEFAULT_DEF (name)
231 	  && TREE_CODE (var) == VAR_DECL
232 	  && !VAR_DECL_IS_VIRTUAL_OPERAND (var)
233 	  && DECL_ARTIFICIAL (var)
234 	  && DECL_IGNORED_P (var)
235 	  && !DECL_NAME (var)))
236     {
237       struct ptr_info_def *pi;
238       new_tree = make_ssa_name (remap_type (TREE_TYPE (name), id));
239       if (!var && SSA_NAME_IDENTIFIER (name))
240 	SET_SSA_NAME_VAR_OR_IDENTIFIER (new_tree, SSA_NAME_IDENTIFIER (name));
241       insert_decl_map (id, name, new_tree);
242       SSA_NAME_OCCURS_IN_ABNORMAL_PHI (new_tree)
243 	= SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name);
244       /* At least IPA points-to info can be directly transferred.  */
245       if (id->src_cfun->gimple_df
246 	  && id->src_cfun->gimple_df->ipa_pta
247 	  && (pi = SSA_NAME_PTR_INFO (name))
248 	  && !pi->pt.anything)
249 	{
250 	  struct ptr_info_def *new_pi = get_ptr_info (new_tree);
251 	  new_pi->pt = pi->pt;
252 	}
253       return new_tree;
254     }
255 
256   /* Do not set DEF_STMT yet as statement is not copied yet. We do that
257      in copy_bb.  */
258   new_tree = remap_decl (var, id);
259 
260   /* We might've substituted constant or another SSA_NAME for
261      the variable.
262 
263      Replace the SSA name representing RESULT_DECL by variable during
264      inlining:  this saves us from need to introduce PHI node in a case
265      return value is just partly initialized.  */
266   if ((TREE_CODE (new_tree) == VAR_DECL || TREE_CODE (new_tree) == PARM_DECL)
267       && (!SSA_NAME_VAR (name)
268 	  || TREE_CODE (SSA_NAME_VAR (name)) != RESULT_DECL
269 	  || !id->transform_return_to_modify))
270     {
271       struct ptr_info_def *pi;
272       new_tree = make_ssa_name (new_tree);
273       insert_decl_map (id, name, new_tree);
274       SSA_NAME_OCCURS_IN_ABNORMAL_PHI (new_tree)
275 	= SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name);
276       /* At least IPA points-to info can be directly transferred.  */
277       if (id->src_cfun->gimple_df
278 	  && id->src_cfun->gimple_df->ipa_pta
279 	  && (pi = SSA_NAME_PTR_INFO (name))
280 	  && !pi->pt.anything)
281 	{
282 	  struct ptr_info_def *new_pi = get_ptr_info (new_tree);
283 	  new_pi->pt = pi->pt;
284 	}
285       if (SSA_NAME_IS_DEFAULT_DEF (name))
286 	{
287 	  /* By inlining function having uninitialized variable, we might
288 	     extend the lifetime (variable might get reused).  This cause
289 	     ICE in the case we end up extending lifetime of SSA name across
290 	     abnormal edge, but also increase register pressure.
291 
292 	     We simply initialize all uninitialized vars by 0 except
293 	     for case we are inlining to very first BB.  We can avoid
294 	     this for all BBs that are not inside strongly connected
295 	     regions of the CFG, but this is expensive to test.  */
296 	  if (id->entry_bb
297 	      && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name)
298 	      && (!SSA_NAME_VAR (name)
299 		  || TREE_CODE (SSA_NAME_VAR (name)) != PARM_DECL)
300 	      && (id->entry_bb != EDGE_SUCC (ENTRY_BLOCK_PTR_FOR_FN (cfun),
301 					     0)->dest
302 		  || EDGE_COUNT (id->entry_bb->preds) != 1))
303 	    {
304 	      gimple_stmt_iterator gsi = gsi_last_bb (id->entry_bb);
305 	      gimple *init_stmt;
306 	      tree zero = build_zero_cst (TREE_TYPE (new_tree));
307 
308 	      init_stmt = gimple_build_assign (new_tree, zero);
309 	      gsi_insert_after (&gsi, init_stmt, GSI_NEW_STMT);
310 	      SSA_NAME_IS_DEFAULT_DEF (new_tree) = 0;
311 	    }
312 	  else
313 	    {
314 	      SSA_NAME_DEF_STMT (new_tree) = gimple_build_nop ();
315 	      set_ssa_default_def (cfun, SSA_NAME_VAR (new_tree), new_tree);
316 	    }
317 	}
318     }
319   else
320     insert_decl_map (id, name, new_tree);
321   return new_tree;
322 }
323 
324 /* Remap DECL during the copying of the BLOCK tree for the function.  */
325 
326 tree
remap_decl(tree decl,copy_body_data * id)327 remap_decl (tree decl, copy_body_data *id)
328 {
329   tree *n;
330 
331   /* We only remap local variables in the current function.  */
332 
333   /* See if we have remapped this declaration.  */
334 
335   n = id->decl_map->get (decl);
336 
337   if (!n && processing_debug_stmt)
338     {
339       processing_debug_stmt = -1;
340       return decl;
341     }
342 
343   /* When remapping a type within copy_gimple_seq_and_replace_locals, all
344      necessary DECLs have already been remapped and we do not want to duplicate
345      a decl coming from outside of the sequence we are copying.  */
346   if (!n
347       && id->prevent_decl_creation_for_types
348       && id->remapping_type_depth > 0
349       && (VAR_P (decl) || TREE_CODE (decl) == PARM_DECL))
350     return decl;
351 
352   /* If we didn't already have an equivalent for this declaration, create one
353      now.  */
354   if (!n)
355     {
356       /* Make a copy of the variable or label.  */
357       tree t = id->copy_decl (decl, id);
358 
359       /* Remember it, so that if we encounter this local entity again
360 	 we can reuse this copy.  Do this early because remap_type may
361 	 need this decl for TYPE_STUB_DECL.  */
362       insert_decl_map (id, decl, t);
363 
364       if (!DECL_P (t))
365 	return t;
366 
367       /* Remap types, if necessary.  */
368       TREE_TYPE (t) = remap_type (TREE_TYPE (t), id);
369       if (TREE_CODE (t) == TYPE_DECL)
370         DECL_ORIGINAL_TYPE (t) = remap_type (DECL_ORIGINAL_TYPE (t), id);
371 
372       /* Remap sizes as necessary.  */
373       walk_tree (&DECL_SIZE (t), copy_tree_body_r, id, NULL);
374       walk_tree (&DECL_SIZE_UNIT (t), copy_tree_body_r, id, NULL);
375 
376       /* If fields, do likewise for offset and qualifier.  */
377       if (TREE_CODE (t) == FIELD_DECL)
378 	{
379 	  walk_tree (&DECL_FIELD_OFFSET (t), copy_tree_body_r, id, NULL);
380 	  if (TREE_CODE (DECL_CONTEXT (t)) == QUAL_UNION_TYPE)
381 	    walk_tree (&DECL_QUALIFIER (t), copy_tree_body_r, id, NULL);
382 	}
383 
384       return t;
385     }
386 
387   if (id->do_not_unshare)
388     return *n;
389   else
390     return unshare_expr (*n);
391 }
392 
393 static tree
remap_type_1(tree type,copy_body_data * id)394 remap_type_1 (tree type, copy_body_data *id)
395 {
396   tree new_tree, t;
397 
398   /* We do need a copy.  build and register it now.  If this is a pointer or
399      reference type, remap the designated type and make a new pointer or
400      reference type.  */
401   if (TREE_CODE (type) == POINTER_TYPE)
402     {
403       new_tree = build_pointer_type_for_mode (remap_type (TREE_TYPE (type), id),
404 					 TYPE_MODE (type),
405 					 TYPE_REF_CAN_ALIAS_ALL (type));
406       if (TYPE_ATTRIBUTES (type) || TYPE_QUALS (type))
407 	new_tree = build_type_attribute_qual_variant (new_tree,
408 						      TYPE_ATTRIBUTES (type),
409 						      TYPE_QUALS (type));
410       insert_decl_map (id, type, new_tree);
411       return new_tree;
412     }
413   else if (TREE_CODE (type) == REFERENCE_TYPE)
414     {
415       new_tree = build_reference_type_for_mode (remap_type (TREE_TYPE (type), id),
416 					    TYPE_MODE (type),
417 					    TYPE_REF_CAN_ALIAS_ALL (type));
418       if (TYPE_ATTRIBUTES (type) || TYPE_QUALS (type))
419 	new_tree = build_type_attribute_qual_variant (new_tree,
420 						      TYPE_ATTRIBUTES (type),
421 						      TYPE_QUALS (type));
422       insert_decl_map (id, type, new_tree);
423       return new_tree;
424     }
425   else
426     new_tree = copy_node (type);
427 
428   insert_decl_map (id, type, new_tree);
429 
430   /* This is a new type, not a copy of an old type.  Need to reassociate
431      variants.  We can handle everything except the main variant lazily.  */
432   t = TYPE_MAIN_VARIANT (type);
433   if (type != t)
434     {
435       t = remap_type (t, id);
436       TYPE_MAIN_VARIANT (new_tree) = t;
437       TYPE_NEXT_VARIANT (new_tree) = TYPE_NEXT_VARIANT (t);
438       TYPE_NEXT_VARIANT (t) = new_tree;
439     }
440   else
441     {
442       TYPE_MAIN_VARIANT (new_tree) = new_tree;
443       TYPE_NEXT_VARIANT (new_tree) = NULL;
444     }
445 
446   if (TYPE_STUB_DECL (type))
447     TYPE_STUB_DECL (new_tree) = remap_decl (TYPE_STUB_DECL (type), id);
448 
449   /* Lazily create pointer and reference types.  */
450   TYPE_POINTER_TO (new_tree) = NULL;
451   TYPE_REFERENCE_TO (new_tree) = NULL;
452 
453   /* Copy all types that may contain references to local variables; be sure to
454      preserve sharing in between type and its main variant when possible.  */
455   switch (TREE_CODE (new_tree))
456     {
457     case INTEGER_TYPE:
458     case REAL_TYPE:
459     case FIXED_POINT_TYPE:
460     case ENUMERAL_TYPE:
461     case BOOLEAN_TYPE:
462       if (TYPE_MAIN_VARIANT (new_tree) != new_tree)
463 	{
464 	  gcc_checking_assert (TYPE_MIN_VALUE (type) == TYPE_MIN_VALUE (TYPE_MAIN_VARIANT (type)));
465 	  gcc_checking_assert (TYPE_MAX_VALUE (type) == TYPE_MAX_VALUE (TYPE_MAIN_VARIANT (type)));
466 
467 	  TYPE_MIN_VALUE (new_tree) = TYPE_MIN_VALUE (TYPE_MAIN_VARIANT (new_tree));
468 	  TYPE_MAX_VALUE (new_tree) = TYPE_MAX_VALUE (TYPE_MAIN_VARIANT (new_tree));
469 	}
470       else
471 	{
472 	  t = TYPE_MIN_VALUE (new_tree);
473 	  if (t && TREE_CODE (t) != INTEGER_CST)
474 	    walk_tree (&TYPE_MIN_VALUE (new_tree), copy_tree_body_r, id, NULL);
475 
476 	  t = TYPE_MAX_VALUE (new_tree);
477 	  if (t && TREE_CODE (t) != INTEGER_CST)
478 	    walk_tree (&TYPE_MAX_VALUE (new_tree), copy_tree_body_r, id, NULL);
479 	}
480       return new_tree;
481 
482     case FUNCTION_TYPE:
483       if (TYPE_MAIN_VARIANT (new_tree) != new_tree
484 	  && TREE_TYPE (type) == TREE_TYPE (TYPE_MAIN_VARIANT (type)))
485 	TREE_TYPE (new_tree) = TREE_TYPE (TYPE_MAIN_VARIANT (new_tree));
486       else
487         TREE_TYPE (new_tree) = remap_type (TREE_TYPE (new_tree), id);
488       if (TYPE_MAIN_VARIANT (new_tree) != new_tree
489 	  && TYPE_ARG_TYPES (type) == TYPE_ARG_TYPES (TYPE_MAIN_VARIANT (type)))
490 	TYPE_ARG_TYPES (new_tree) = TYPE_ARG_TYPES (TYPE_MAIN_VARIANT (new_tree));
491       else
492         walk_tree (&TYPE_ARG_TYPES (new_tree), copy_tree_body_r, id, NULL);
493       return new_tree;
494 
495     case ARRAY_TYPE:
496       if (TYPE_MAIN_VARIANT (new_tree) != new_tree
497 	  && TREE_TYPE (type) == TREE_TYPE (TYPE_MAIN_VARIANT (type)))
498 	TREE_TYPE (new_tree) = TREE_TYPE (TYPE_MAIN_VARIANT (new_tree));
499       else
500 	TREE_TYPE (new_tree) = remap_type (TREE_TYPE (new_tree), id);
501 
502       if (TYPE_MAIN_VARIANT (new_tree) != new_tree)
503 	{
504 	  gcc_checking_assert (TYPE_DOMAIN (type) == TYPE_DOMAIN (TYPE_MAIN_VARIANT (type)));
505 	  TYPE_DOMAIN (new_tree) = TYPE_DOMAIN (TYPE_MAIN_VARIANT (new_tree));
506 	}
507       else
508 	TYPE_DOMAIN (new_tree) = remap_type (TYPE_DOMAIN (new_tree), id);
509       break;
510 
511     case RECORD_TYPE:
512     case UNION_TYPE:
513     case QUAL_UNION_TYPE:
514       if (TYPE_MAIN_VARIANT (type) != type
515 	  && TYPE_FIELDS (type) == TYPE_FIELDS (TYPE_MAIN_VARIANT (type)))
516 	TYPE_FIELDS (new_tree) = TYPE_FIELDS (TYPE_MAIN_VARIANT (new_tree));
517       else
518 	{
519 	  tree f, nf = NULL;
520 
521 	  for (f = TYPE_FIELDS (new_tree); f ; f = DECL_CHAIN (f))
522 	    {
523 	      t = remap_decl (f, id);
524 	      DECL_CONTEXT (t) = new_tree;
525 	      DECL_CHAIN (t) = nf;
526 	      nf = t;
527 	    }
528 	  TYPE_FIELDS (new_tree) = nreverse (nf);
529 	}
530       break;
531 
532     case OFFSET_TYPE:
533     default:
534       /* Shouldn't have been thought variable sized.  */
535       gcc_unreachable ();
536     }
537 
538   /* All variants of type share the same size, so use the already remaped data.  */
539   if (TYPE_MAIN_VARIANT (new_tree) != new_tree)
540     {
541       gcc_checking_assert (TYPE_SIZE (type) == TYPE_SIZE (TYPE_MAIN_VARIANT (type)));
542       gcc_checking_assert (TYPE_SIZE_UNIT (type) == TYPE_SIZE_UNIT (TYPE_MAIN_VARIANT (type)));
543 
544       TYPE_SIZE (new_tree) = TYPE_SIZE (TYPE_MAIN_VARIANT (new_tree));
545       TYPE_SIZE_UNIT (new_tree) = TYPE_SIZE_UNIT (TYPE_MAIN_VARIANT (new_tree));
546     }
547   else
548     {
549       walk_tree (&TYPE_SIZE (new_tree), copy_tree_body_r, id, NULL);
550       walk_tree (&TYPE_SIZE_UNIT (new_tree), copy_tree_body_r, id, NULL);
551     }
552 
553   return new_tree;
554 }
555 
556 tree
remap_type(tree type,copy_body_data * id)557 remap_type (tree type, copy_body_data *id)
558 {
559   tree *node;
560   tree tmp;
561 
562   if (type == NULL)
563     return type;
564 
565   /* See if we have remapped this type.  */
566   node = id->decl_map->get (type);
567   if (node)
568     return *node;
569 
570   /* The type only needs remapping if it's variably modified.  */
571   if (! variably_modified_type_p (type, id->src_fn))
572     {
573       insert_decl_map (id, type, type);
574       return type;
575     }
576 
577   id->remapping_type_depth++;
578   tmp = remap_type_1 (type, id);
579   id->remapping_type_depth--;
580 
581   return tmp;
582 }
583 
584 /* Decide if DECL can be put into BLOCK_NONLOCAL_VARs.  */
585 
586 static bool
can_be_nonlocal(tree decl,copy_body_data * id)587 can_be_nonlocal (tree decl, copy_body_data *id)
588 {
589   /* We can not duplicate function decls.  */
590   if (TREE_CODE (decl) == FUNCTION_DECL)
591     return true;
592 
593   /* Local static vars must be non-local or we get multiple declaration
594      problems.  */
595   if (TREE_CODE (decl) == VAR_DECL
596       && !auto_var_in_fn_p (decl, id->src_fn))
597     return true;
598 
599   return false;
600 }
601 
602 static tree
remap_decls(tree decls,vec<tree,va_gc> ** nonlocalized_list,copy_body_data * id)603 remap_decls (tree decls, vec<tree, va_gc> **nonlocalized_list,
604 	     copy_body_data *id)
605 {
606   tree old_var;
607   tree new_decls = NULL_TREE;
608 
609   /* Remap its variables.  */
610   for (old_var = decls; old_var; old_var = DECL_CHAIN (old_var))
611     {
612       tree new_var;
613 
614       if (can_be_nonlocal (old_var, id))
615 	{
616 	  /* We need to add this variable to the local decls as otherwise
617 	     nothing else will do so.  */
618 	  if (TREE_CODE (old_var) == VAR_DECL
619 	      && ! DECL_EXTERNAL (old_var)
620 	      && cfun)
621 	    add_local_decl (cfun, old_var);
622 	  if ((!optimize || debug_info_level > DINFO_LEVEL_TERSE)
623 	      && !DECL_IGNORED_P (old_var)
624 	      && nonlocalized_list)
625 	    vec_safe_push (*nonlocalized_list, old_var);
626 	  continue;
627 	}
628 
629       /* Remap the variable.  */
630       new_var = remap_decl (old_var, id);
631 
632       /* If we didn't remap this variable, we can't mess with its
633 	 TREE_CHAIN.  If we remapped this variable to the return slot, it's
634 	 already declared somewhere else, so don't declare it here.  */
635 
636       if (new_var == id->retvar)
637 	;
638       else if (!new_var)
639         {
640 	  if ((!optimize || debug_info_level > DINFO_LEVEL_TERSE)
641 	      && !DECL_IGNORED_P (old_var)
642 	      && nonlocalized_list)
643 	    vec_safe_push (*nonlocalized_list, old_var);
644 	}
645       else
646 	{
647 	  gcc_assert (DECL_P (new_var));
648 	  DECL_CHAIN (new_var) = new_decls;
649 	  new_decls = new_var;
650 
651 	  /* Also copy value-expressions.  */
652 	  if (TREE_CODE (new_var) == VAR_DECL
653 	      && DECL_HAS_VALUE_EXPR_P (new_var))
654 	    {
655 	      tree tem = DECL_VALUE_EXPR (new_var);
656 	      bool old_regimplify = id->regimplify;
657 	      id->remapping_type_depth++;
658 	      walk_tree (&tem, copy_tree_body_r, id, NULL);
659 	      id->remapping_type_depth--;
660 	      id->regimplify = old_regimplify;
661 	      SET_DECL_VALUE_EXPR (new_var, tem);
662 	    }
663 	}
664     }
665 
666   return nreverse (new_decls);
667 }
668 
669 /* Copy the BLOCK to contain remapped versions of the variables
670    therein.  And hook the new block into the block-tree.  */
671 
672 static void
remap_block(tree * block,copy_body_data * id)673 remap_block (tree *block, copy_body_data *id)
674 {
675   tree old_block;
676   tree new_block;
677 
678   /* Make the new block.  */
679   old_block = *block;
680   new_block = make_node (BLOCK);
681   TREE_USED (new_block) = TREE_USED (old_block);
682   BLOCK_ABSTRACT_ORIGIN (new_block) = old_block;
683   BLOCK_SOURCE_LOCATION (new_block) = BLOCK_SOURCE_LOCATION (old_block);
684   BLOCK_NONLOCALIZED_VARS (new_block)
685     = vec_safe_copy (BLOCK_NONLOCALIZED_VARS (old_block));
686   *block = new_block;
687 
688   /* Remap its variables.  */
689   BLOCK_VARS (new_block) = remap_decls (BLOCK_VARS (old_block),
690   					&BLOCK_NONLOCALIZED_VARS (new_block),
691 					id);
692 
693   if (id->transform_lang_insert_block)
694     id->transform_lang_insert_block (new_block);
695 
696   /* Remember the remapped block.  */
697   insert_decl_map (id, old_block, new_block);
698 }
699 
700 /* Copy the whole block tree and root it in id->block.  */
701 static tree
remap_blocks(tree block,copy_body_data * id)702 remap_blocks (tree block, copy_body_data *id)
703 {
704   tree t;
705   tree new_tree = block;
706 
707   if (!block)
708     return NULL;
709 
710   remap_block (&new_tree, id);
711   gcc_assert (new_tree != block);
712   for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
713     prepend_lexical_block (new_tree, remap_blocks (t, id));
714   /* Blocks are in arbitrary order, but make things slightly prettier and do
715      not swap order when producing a copy.  */
716   BLOCK_SUBBLOCKS (new_tree) = blocks_nreverse (BLOCK_SUBBLOCKS (new_tree));
717   return new_tree;
718 }
719 
720 /* Remap the block tree rooted at BLOCK to nothing.  */
721 static void
remap_blocks_to_null(tree block,copy_body_data * id)722 remap_blocks_to_null (tree block, copy_body_data *id)
723 {
724   tree t;
725   insert_decl_map (id, block, NULL_TREE);
726   for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
727     remap_blocks_to_null (t, id);
728 }
729 
730 static void
copy_statement_list(tree * tp)731 copy_statement_list (tree *tp)
732 {
733   tree_stmt_iterator oi, ni;
734   tree new_tree;
735 
736   new_tree = alloc_stmt_list ();
737   ni = tsi_start (new_tree);
738   oi = tsi_start (*tp);
739   TREE_TYPE (new_tree) = TREE_TYPE (*tp);
740   *tp = new_tree;
741 
742   for (; !tsi_end_p (oi); tsi_next (&oi))
743     {
744       tree stmt = tsi_stmt (oi);
745       if (TREE_CODE (stmt) == STATEMENT_LIST)
746 	/* This copy is not redundant; tsi_link_after will smash this
747 	   STATEMENT_LIST into the end of the one we're building, and we
748 	   don't want to do that with the original.  */
749 	copy_statement_list (&stmt);
750       tsi_link_after (&ni, stmt, TSI_CONTINUE_LINKING);
751     }
752 }
753 
754 static void
copy_bind_expr(tree * tp,int * walk_subtrees,copy_body_data * id)755 copy_bind_expr (tree *tp, int *walk_subtrees, copy_body_data *id)
756 {
757   tree block = BIND_EXPR_BLOCK (*tp);
758   /* Copy (and replace) the statement.  */
759   copy_tree_r (tp, walk_subtrees, NULL);
760   if (block)
761     {
762       remap_block (&block, id);
763       BIND_EXPR_BLOCK (*tp) = block;
764     }
765 
766   if (BIND_EXPR_VARS (*tp))
767     /* This will remap a lot of the same decls again, but this should be
768        harmless.  */
769     BIND_EXPR_VARS (*tp) = remap_decls (BIND_EXPR_VARS (*tp), NULL, id);
770 }
771 
772 
773 /* Create a new gimple_seq by remapping all the statements in BODY
774    using the inlining information in ID.  */
775 
776 static gimple_seq
remap_gimple_seq(gimple_seq body,copy_body_data * id)777 remap_gimple_seq (gimple_seq body, copy_body_data *id)
778 {
779   gimple_stmt_iterator si;
780   gimple_seq new_body = NULL;
781 
782   for (si = gsi_start (body); !gsi_end_p (si); gsi_next (&si))
783     {
784       gimple_seq new_stmts = remap_gimple_stmt (gsi_stmt (si), id);
785       gimple_seq_add_seq (&new_body, new_stmts);
786     }
787 
788   return new_body;
789 }
790 
791 
792 /* Copy a GIMPLE_BIND statement STMT, remapping all the symbols in its
793    block using the mapping information in ID.  */
794 
795 static gimple *
copy_gimple_bind(gbind * stmt,copy_body_data * id)796 copy_gimple_bind (gbind *stmt, copy_body_data *id)
797 {
798   gimple *new_bind;
799   tree new_block, new_vars;
800   gimple_seq body, new_body;
801 
802   /* Copy the statement.  Note that we purposely don't use copy_stmt
803      here because we need to remap statements as we copy.  */
804   body = gimple_bind_body (stmt);
805   new_body = remap_gimple_seq (body, id);
806 
807   new_block = gimple_bind_block (stmt);
808   if (new_block)
809     remap_block (&new_block, id);
810 
811   /* This will remap a lot of the same decls again, but this should be
812      harmless.  */
813   new_vars = gimple_bind_vars (stmt);
814   if (new_vars)
815     new_vars = remap_decls (new_vars, NULL, id);
816 
817   new_bind = gimple_build_bind (new_vars, new_body, new_block);
818 
819   return new_bind;
820 }
821 
822 /* Return true if DECL is a parameter or a SSA_NAME for a parameter.  */
823 
824 static bool
is_parm(tree decl)825 is_parm (tree decl)
826 {
827   if (TREE_CODE (decl) == SSA_NAME)
828     {
829       decl = SSA_NAME_VAR (decl);
830       if (!decl)
831 	return false;
832     }
833 
834   return (TREE_CODE (decl) == PARM_DECL);
835 }
836 
837 /* Remap the dependence CLIQUE from the source to the destination function
838    as specified in ID.  */
839 
840 static unsigned short
remap_dependence_clique(copy_body_data * id,unsigned short clique)841 remap_dependence_clique (copy_body_data *id, unsigned short clique)
842 {
843   if (clique == 0 || processing_debug_stmt)
844     return 0;
845   if (!id->dependence_map)
846     id->dependence_map = new hash_map<dependence_hash, unsigned short>;
847   bool existed;
848   unsigned short &newc = id->dependence_map->get_or_insert (clique, &existed);
849   if (!existed)
850     newc = ++cfun->last_clique;
851   return newc;
852 }
853 
854 /* Remap the GIMPLE operand pointed to by *TP.  DATA is really a
855    'struct walk_stmt_info *'.  DATA->INFO is a 'copy_body_data *'.
856    WALK_SUBTREES is used to indicate walk_gimple_op whether to keep
857    recursing into the children nodes of *TP.  */
858 
859 static tree
remap_gimple_op_r(tree * tp,int * walk_subtrees,void * data)860 remap_gimple_op_r (tree *tp, int *walk_subtrees, void *data)
861 {
862   struct walk_stmt_info *wi_p = (struct walk_stmt_info *) data;
863   copy_body_data *id = (copy_body_data *) wi_p->info;
864   tree fn = id->src_fn;
865 
866   if (TREE_CODE (*tp) == SSA_NAME)
867     {
868       *tp = remap_ssa_name (*tp, id);
869       *walk_subtrees = 0;
870       return NULL;
871     }
872   else if (auto_var_in_fn_p (*tp, fn))
873     {
874       /* Local variables and labels need to be replaced by equivalent
875 	 variables.  We don't want to copy static variables; there's
876 	 only one of those, no matter how many times we inline the
877 	 containing function.  Similarly for globals from an outer
878 	 function.  */
879       tree new_decl;
880 
881       /* Remap the declaration.  */
882       new_decl = remap_decl (*tp, id);
883       gcc_assert (new_decl);
884       /* Replace this variable with the copy.  */
885       STRIP_TYPE_NOPS (new_decl);
886       /* ???  The C++ frontend uses void * pointer zero to initialize
887          any other type.  This confuses the middle-end type verification.
888 	 As cloned bodies do not go through gimplification again the fixup
889 	 there doesn't trigger.  */
890       if (TREE_CODE (new_decl) == INTEGER_CST
891 	  && !useless_type_conversion_p (TREE_TYPE (*tp), TREE_TYPE (new_decl)))
892 	new_decl = fold_convert (TREE_TYPE (*tp), new_decl);
893       *tp = new_decl;
894       *walk_subtrees = 0;
895     }
896   else if (TREE_CODE (*tp) == STATEMENT_LIST)
897     gcc_unreachable ();
898   else if (TREE_CODE (*tp) == SAVE_EXPR)
899     gcc_unreachable ();
900   else if (TREE_CODE (*tp) == LABEL_DECL
901 	   && (!DECL_CONTEXT (*tp)
902 	       || decl_function_context (*tp) == id->src_fn))
903     /* These may need to be remapped for EH handling.  */
904     *tp = remap_decl (*tp, id);
905   else if (TREE_CODE (*tp) == FIELD_DECL)
906     {
907       /* If the enclosing record type is variably_modified_type_p, the field
908 	 has already been remapped.  Otherwise, it need not be.  */
909       tree *n = id->decl_map->get (*tp);
910       if (n)
911 	*tp = *n;
912       *walk_subtrees = 0;
913     }
914   else if (TYPE_P (*tp))
915     /* Types may need remapping as well.  */
916     *tp = remap_type (*tp, id);
917   else if (CONSTANT_CLASS_P (*tp))
918     {
919       /* If this is a constant, we have to copy the node iff the type
920 	 will be remapped.  copy_tree_r will not copy a constant.  */
921       tree new_type = remap_type (TREE_TYPE (*tp), id);
922 
923       if (new_type == TREE_TYPE (*tp))
924 	*walk_subtrees = 0;
925 
926       else if (TREE_CODE (*tp) == INTEGER_CST)
927 	*tp = wide_int_to_tree (new_type, *tp);
928       else
929 	{
930 	  *tp = copy_node (*tp);
931 	  TREE_TYPE (*tp) = new_type;
932 	}
933     }
934   else
935     {
936       /* Otherwise, just copy the node.  Note that copy_tree_r already
937 	 knows not to copy VAR_DECLs, etc., so this is safe.  */
938 
939       if (TREE_CODE (*tp) == MEM_REF)
940 	{
941 	  /* We need to re-canonicalize MEM_REFs from inline substitutions
942 	     that can happen when a pointer argument is an ADDR_EXPR.
943 	     Recurse here manually to allow that.  */
944 	  tree ptr = TREE_OPERAND (*tp, 0);
945 	  tree type = remap_type (TREE_TYPE (*tp), id);
946 	  tree old = *tp;
947 	  walk_tree (&ptr, remap_gimple_op_r, data, NULL);
948 	  *tp = fold_build2 (MEM_REF, type, ptr, TREE_OPERAND (*tp, 1));
949 	  TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
950 	  TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
951 	  TREE_NO_WARNING (*tp) = TREE_NO_WARNING (old);
952 	  if (MR_DEPENDENCE_CLIQUE (old) != 0)
953 	    {
954 	      MR_DEPENDENCE_CLIQUE (*tp)
955 	        = remap_dependence_clique (id, MR_DEPENDENCE_CLIQUE (old));
956 	      MR_DEPENDENCE_BASE (*tp) = MR_DEPENDENCE_BASE (old);
957 	    }
958 	  /* We cannot propagate the TREE_THIS_NOTRAP flag if we have
959 	     remapped a parameter as the property might be valid only
960 	     for the parameter itself.  */
961 	  if (TREE_THIS_NOTRAP (old)
962 	      && (!is_parm (TREE_OPERAND (old, 0))
963 		  || (!id->transform_parameter && is_parm (ptr))))
964 	    TREE_THIS_NOTRAP (*tp) = 1;
965 	  REF_REVERSE_STORAGE_ORDER (*tp) = REF_REVERSE_STORAGE_ORDER (old);
966 	  *walk_subtrees = 0;
967 	  return NULL;
968 	}
969 
970       /* Here is the "usual case".  Copy this tree node, and then
971 	 tweak some special cases.  */
972       copy_tree_r (tp, walk_subtrees, NULL);
973 
974       if (TREE_CODE (*tp) != OMP_CLAUSE)
975 	TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id);
976 
977       if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
978 	{
979 	  /* The copied TARGET_EXPR has never been expanded, even if the
980 	     original node was expanded already.  */
981 	  TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
982 	  TREE_OPERAND (*tp, 3) = NULL_TREE;
983 	}
984       else if (TREE_CODE (*tp) == ADDR_EXPR)
985 	{
986 	  /* Variable substitution need not be simple.  In particular,
987 	     the MEM_REF substitution above.  Make sure that
988 	     TREE_CONSTANT and friends are up-to-date.  */
989 	  int invariant = is_gimple_min_invariant (*tp);
990 	  walk_tree (&TREE_OPERAND (*tp, 0), remap_gimple_op_r, data, NULL);
991 	  recompute_tree_invariant_for_addr_expr (*tp);
992 
993 	  /* If this used to be invariant, but is not any longer,
994 	     then regimplification is probably needed.  */
995 	  if (invariant && !is_gimple_min_invariant (*tp))
996 	    id->regimplify = true;
997 
998 	  *walk_subtrees = 0;
999 	}
1000     }
1001 
1002   /* Update the TREE_BLOCK for the cloned expr.  */
1003   if (EXPR_P (*tp))
1004     {
1005       tree new_block = id->remapping_type_depth == 0 ? id->block : NULL;
1006       tree old_block = TREE_BLOCK (*tp);
1007       if (old_block)
1008 	{
1009 	  tree *n;
1010 	  n = id->decl_map->get (TREE_BLOCK (*tp));
1011 	  if (n)
1012 	    new_block = *n;
1013 	}
1014       TREE_SET_BLOCK (*tp, new_block);
1015     }
1016 
1017   /* Keep iterating.  */
1018   return NULL_TREE;
1019 }
1020 
1021 
1022 /* Called from copy_body_id via walk_tree.  DATA is really a
1023    `copy_body_data *'.  */
1024 
1025 tree
copy_tree_body_r(tree * tp,int * walk_subtrees,void * data)1026 copy_tree_body_r (tree *tp, int *walk_subtrees, void *data)
1027 {
1028   copy_body_data *id = (copy_body_data *) data;
1029   tree fn = id->src_fn;
1030   tree new_block;
1031 
1032   /* Begin by recognizing trees that we'll completely rewrite for the
1033      inlining context.  Our output for these trees is completely
1034      different from out input (e.g. RETURN_EXPR is deleted, and morphs
1035      into an edge).  Further down, we'll handle trees that get
1036      duplicated and/or tweaked.  */
1037 
1038   /* When requested, RETURN_EXPRs should be transformed to just the
1039      contained MODIFY_EXPR.  The branch semantics of the return will
1040      be handled elsewhere by manipulating the CFG rather than a statement.  */
1041   if (TREE_CODE (*tp) == RETURN_EXPR && id->transform_return_to_modify)
1042     {
1043       tree assignment = TREE_OPERAND (*tp, 0);
1044 
1045       /* If we're returning something, just turn that into an
1046 	 assignment into the equivalent of the original RESULT_DECL.
1047 	 If the "assignment" is just the result decl, the result
1048 	 decl has already been set (e.g. a recent "foo (&result_decl,
1049 	 ...)"); just toss the entire RETURN_EXPR.  */
1050       if (assignment && TREE_CODE (assignment) == MODIFY_EXPR)
1051 	{
1052 	  /* Replace the RETURN_EXPR with (a copy of) the
1053 	     MODIFY_EXPR hanging underneath.  */
1054 	  *tp = copy_node (assignment);
1055 	}
1056       else /* Else the RETURN_EXPR returns no value.  */
1057 	{
1058 	  *tp = NULL;
1059 	  return (tree) (void *)1;
1060 	}
1061     }
1062   else if (TREE_CODE (*tp) == SSA_NAME)
1063     {
1064       *tp = remap_ssa_name (*tp, id);
1065       *walk_subtrees = 0;
1066       return NULL;
1067     }
1068 
1069   /* Local variables and labels need to be replaced by equivalent
1070      variables.  We don't want to copy static variables; there's only
1071      one of those, no matter how many times we inline the containing
1072      function.  Similarly for globals from an outer function.  */
1073   else if (auto_var_in_fn_p (*tp, fn))
1074     {
1075       tree new_decl;
1076 
1077       /* Remap the declaration.  */
1078       new_decl = remap_decl (*tp, id);
1079       gcc_assert (new_decl);
1080       /* Replace this variable with the copy.  */
1081       STRIP_TYPE_NOPS (new_decl);
1082       *tp = new_decl;
1083       *walk_subtrees = 0;
1084     }
1085   else if (TREE_CODE (*tp) == STATEMENT_LIST)
1086     copy_statement_list (tp);
1087   else if (TREE_CODE (*tp) == SAVE_EXPR
1088 	   || TREE_CODE (*tp) == TARGET_EXPR)
1089     remap_save_expr (tp, id->decl_map, walk_subtrees);
1090   else if (TREE_CODE (*tp) == LABEL_DECL
1091 	   && (! DECL_CONTEXT (*tp)
1092 	       || decl_function_context (*tp) == id->src_fn))
1093     /* These may need to be remapped for EH handling.  */
1094     *tp = remap_decl (*tp, id);
1095   else if (TREE_CODE (*tp) == BIND_EXPR)
1096     copy_bind_expr (tp, walk_subtrees, id);
1097   /* Types may need remapping as well.  */
1098   else if (TYPE_P (*tp))
1099     *tp = remap_type (*tp, id);
1100 
1101   /* If this is a constant, we have to copy the node iff the type will be
1102      remapped.  copy_tree_r will not copy a constant.  */
1103   else if (CONSTANT_CLASS_P (*tp))
1104     {
1105       tree new_type = remap_type (TREE_TYPE (*tp), id);
1106 
1107       if (new_type == TREE_TYPE (*tp))
1108 	*walk_subtrees = 0;
1109 
1110       else if (TREE_CODE (*tp) == INTEGER_CST)
1111 	*tp = wide_int_to_tree (new_type, *tp);
1112       else
1113 	{
1114 	  *tp = copy_node (*tp);
1115 	  TREE_TYPE (*tp) = new_type;
1116 	}
1117     }
1118 
1119   /* Otherwise, just copy the node.  Note that copy_tree_r already
1120      knows not to copy VAR_DECLs, etc., so this is safe.  */
1121   else
1122     {
1123       /* Here we handle trees that are not completely rewritten.
1124 	 First we detect some inlining-induced bogosities for
1125 	 discarding.  */
1126       if (TREE_CODE (*tp) == MODIFY_EXPR
1127 	  && TREE_OPERAND (*tp, 0) == TREE_OPERAND (*tp, 1)
1128 	  && (auto_var_in_fn_p (TREE_OPERAND (*tp, 0), fn)))
1129 	{
1130 	  /* Some assignments VAR = VAR; don't generate any rtl code
1131 	     and thus don't count as variable modification.  Avoid
1132 	     keeping bogosities like 0 = 0.  */
1133 	  tree decl = TREE_OPERAND (*tp, 0), value;
1134 	  tree *n;
1135 
1136 	  n = id->decl_map->get (decl);
1137 	  if (n)
1138 	    {
1139 	      value = *n;
1140 	      STRIP_TYPE_NOPS (value);
1141 	      if (TREE_CONSTANT (value) || TREE_READONLY (value))
1142 		{
1143 		  *tp = build_empty_stmt (EXPR_LOCATION (*tp));
1144 		  return copy_tree_body_r (tp, walk_subtrees, data);
1145 		}
1146 	    }
1147 	}
1148       else if (TREE_CODE (*tp) == INDIRECT_REF)
1149 	{
1150 	  /* Get rid of *& from inline substitutions that can happen when a
1151 	     pointer argument is an ADDR_EXPR.  */
1152 	  tree decl = TREE_OPERAND (*tp, 0);
1153 	  tree *n = id->decl_map->get (decl);
1154 	  if (n)
1155 	    {
1156 	      /* If we happen to get an ADDR_EXPR in n->value, strip
1157 	         it manually here as we'll eventually get ADDR_EXPRs
1158 		 which lie about their types pointed to.  In this case
1159 		 build_fold_indirect_ref wouldn't strip the INDIRECT_REF,
1160 		 but we absolutely rely on that.  As fold_indirect_ref
1161 	         does other useful transformations, try that first, though.  */
1162 	      tree type = TREE_TYPE (*tp);
1163 	      tree ptr = id->do_not_unshare ? *n : unshare_expr (*n);
1164 	      tree old = *tp;
1165 	      *tp = gimple_fold_indirect_ref (ptr);
1166 	      if (! *tp)
1167 	        {
1168 		  if (TREE_CODE (ptr) == ADDR_EXPR)
1169 		    {
1170 		      *tp
1171 		        = fold_indirect_ref_1 (EXPR_LOCATION (ptr), type, ptr);
1172 		      /* ???  We should either assert here or build
1173 			 a VIEW_CONVERT_EXPR instead of blindly leaking
1174 			 incompatible types to our IL.  */
1175 		      if (! *tp)
1176 			*tp = TREE_OPERAND (ptr, 0);
1177 		    }
1178 	          else
1179 		    {
1180 	              *tp = build1 (INDIRECT_REF, type, ptr);
1181 		      TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
1182 		      TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
1183 		      TREE_READONLY (*tp) = TREE_READONLY (old);
1184 		      /* We cannot propagate the TREE_THIS_NOTRAP flag if we
1185 			 have remapped a parameter as the property might be
1186 			 valid only for the parameter itself.  */
1187 		      if (TREE_THIS_NOTRAP (old)
1188 			  && (!is_parm (TREE_OPERAND (old, 0))
1189 			      || (!id->transform_parameter && is_parm (ptr))))
1190 		        TREE_THIS_NOTRAP (*tp) = 1;
1191 		    }
1192 		}
1193 	      *walk_subtrees = 0;
1194 	      return NULL;
1195 	    }
1196 	}
1197       else if (TREE_CODE (*tp) == MEM_REF)
1198 	{
1199 	  /* We need to re-canonicalize MEM_REFs from inline substitutions
1200 	     that can happen when a pointer argument is an ADDR_EXPR.
1201 	     Recurse here manually to allow that.  */
1202 	  tree ptr = TREE_OPERAND (*tp, 0);
1203 	  tree type = remap_type (TREE_TYPE (*tp), id);
1204 	  tree old = *tp;
1205 	  walk_tree (&ptr, copy_tree_body_r, data, NULL);
1206 	  *tp = fold_build2 (MEM_REF, type, ptr, TREE_OPERAND (*tp, 1));
1207 	  TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
1208 	  TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
1209 	  TREE_NO_WARNING (*tp) = TREE_NO_WARNING (old);
1210 	  if (MR_DEPENDENCE_CLIQUE (old) != 0)
1211 	    {
1212 	      MR_DEPENDENCE_CLIQUE (*tp)
1213 		= remap_dependence_clique (id, MR_DEPENDENCE_CLIQUE (old));
1214 	      MR_DEPENDENCE_BASE (*tp) = MR_DEPENDENCE_BASE (old);
1215 	    }
1216 	  /* We cannot propagate the TREE_THIS_NOTRAP flag if we have
1217 	     remapped a parameter as the property might be valid only
1218 	     for the parameter itself.  */
1219 	  if (TREE_THIS_NOTRAP (old)
1220 	      && (!is_parm (TREE_OPERAND (old, 0))
1221 		  || (!id->transform_parameter && is_parm (ptr))))
1222 	    TREE_THIS_NOTRAP (*tp) = 1;
1223 	  REF_REVERSE_STORAGE_ORDER (*tp) = REF_REVERSE_STORAGE_ORDER (old);
1224 	  *walk_subtrees = 0;
1225 	  return NULL;
1226 	}
1227 
1228       /* Here is the "usual case".  Copy this tree node, and then
1229 	 tweak some special cases.  */
1230       copy_tree_r (tp, walk_subtrees, NULL);
1231 
1232       /* If EXPR has block defined, map it to newly constructed block.
1233          When inlining we want EXPRs without block appear in the block
1234 	 of function call if we are not remapping a type.  */
1235       if (EXPR_P (*tp))
1236 	{
1237 	  new_block = id->remapping_type_depth == 0 ? id->block : NULL;
1238 	  if (TREE_BLOCK (*tp))
1239 	    {
1240 	      tree *n;
1241 	      n = id->decl_map->get (TREE_BLOCK (*tp));
1242 	      if (n)
1243 		new_block = *n;
1244 	    }
1245 	  TREE_SET_BLOCK (*tp, new_block);
1246 	}
1247 
1248       if (TREE_CODE (*tp) != OMP_CLAUSE)
1249 	TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id);
1250 
1251       /* The copied TARGET_EXPR has never been expanded, even if the
1252 	 original node was expanded already.  */
1253       if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
1254 	{
1255 	  TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
1256 	  TREE_OPERAND (*tp, 3) = NULL_TREE;
1257 	}
1258 
1259       /* Variable substitution need not be simple.  In particular, the
1260 	 INDIRECT_REF substitution above.  Make sure that TREE_CONSTANT
1261 	 and friends are up-to-date.  */
1262       else if (TREE_CODE (*tp) == ADDR_EXPR)
1263 	{
1264 	  int invariant = is_gimple_min_invariant (*tp);
1265 	  walk_tree (&TREE_OPERAND (*tp, 0), copy_tree_body_r, id, NULL);
1266 
1267 	  /* Handle the case where we substituted an INDIRECT_REF
1268 	     into the operand of the ADDR_EXPR.  */
1269 	  if (TREE_CODE (TREE_OPERAND (*tp, 0)) == INDIRECT_REF)
1270 	    {
1271 	      tree t = TREE_OPERAND (TREE_OPERAND (*tp, 0), 0);
1272 	      if (TREE_TYPE (t) != TREE_TYPE (*tp))
1273 		t = fold_convert (remap_type (TREE_TYPE (*tp), id), t);
1274 	      *tp = t;
1275 	    }
1276 	  else
1277 	    recompute_tree_invariant_for_addr_expr (*tp);
1278 
1279 	  /* If this used to be invariant, but is not any longer,
1280 	     then regimplification is probably needed.  */
1281 	  if (invariant && !is_gimple_min_invariant (*tp))
1282 	    id->regimplify = true;
1283 
1284 	  *walk_subtrees = 0;
1285 	}
1286     }
1287 
1288   /* Keep iterating.  */
1289   return NULL_TREE;
1290 }
1291 
1292 /* Helper for remap_gimple_stmt.  Given an EH region number for the
1293    source function, map that to the duplicate EH region number in
1294    the destination function.  */
1295 
1296 static int
remap_eh_region_nr(int old_nr,copy_body_data * id)1297 remap_eh_region_nr (int old_nr, copy_body_data *id)
1298 {
1299   eh_region old_r, new_r;
1300 
1301   old_r = get_eh_region_from_number_fn (id->src_cfun, old_nr);
1302   new_r = static_cast<eh_region> (*id->eh_map->get (old_r));
1303 
1304   return new_r->index;
1305 }
1306 
1307 /* Similar, but operate on INTEGER_CSTs.  */
1308 
1309 static tree
remap_eh_region_tree_nr(tree old_t_nr,copy_body_data * id)1310 remap_eh_region_tree_nr (tree old_t_nr, copy_body_data *id)
1311 {
1312   int old_nr, new_nr;
1313 
1314   old_nr = tree_to_shwi (old_t_nr);
1315   new_nr = remap_eh_region_nr (old_nr, id);
1316 
1317   return build_int_cst (integer_type_node, new_nr);
1318 }
1319 
1320 /* Helper for copy_bb.  Remap statement STMT using the inlining
1321    information in ID.  Return the new statement copy.  */
1322 
1323 static gimple_seq
remap_gimple_stmt(gimple * stmt,copy_body_data * id)1324 remap_gimple_stmt (gimple *stmt, copy_body_data *id)
1325 {
1326   gimple *copy = NULL;
1327   struct walk_stmt_info wi;
1328   bool skip_first = false;
1329   gimple_seq stmts = NULL;
1330 
1331   if (is_gimple_debug (stmt)
1332       && !opt_for_fn (id->dst_fn, flag_var_tracking_assignments))
1333     return stmts;
1334 
1335   /* Begin by recognizing trees that we'll completely rewrite for the
1336      inlining context.  Our output for these trees is completely
1337      different from out input (e.g. RETURN_EXPR is deleted, and morphs
1338      into an edge).  Further down, we'll handle trees that get
1339      duplicated and/or tweaked.  */
1340 
1341   /* When requested, GIMPLE_RETURNs should be transformed to just the
1342      contained GIMPLE_ASSIGN.  The branch semantics of the return will
1343      be handled elsewhere by manipulating the CFG rather than the
1344      statement.  */
1345   if (gimple_code (stmt) == GIMPLE_RETURN && id->transform_return_to_modify)
1346     {
1347       tree retval = gimple_return_retval (as_a <greturn *> (stmt));
1348       tree retbnd = gimple_return_retbnd (stmt);
1349       tree bndslot = id->retbnd;
1350 
1351       if (retbnd && bndslot)
1352 	{
1353 	  gimple *bndcopy = gimple_build_assign (bndslot, retbnd);
1354 	  memset (&wi, 0, sizeof (wi));
1355 	  wi.info = id;
1356 	  walk_gimple_op (bndcopy, remap_gimple_op_r, &wi);
1357 	  gimple_seq_add_stmt (&stmts, bndcopy);
1358 	}
1359 
1360       /* If we're returning something, just turn that into an
1361 	 assignment into the equivalent of the original RESULT_DECL.
1362 	 If RETVAL is just the result decl, the result decl has
1363 	 already been set (e.g. a recent "foo (&result_decl, ...)");
1364 	 just toss the entire GIMPLE_RETURN.  */
1365       if (retval
1366 	  && (TREE_CODE (retval) != RESULT_DECL
1367 	      && (TREE_CODE (retval) != SSA_NAME
1368 		  || ! SSA_NAME_VAR (retval)
1369 		  || TREE_CODE (SSA_NAME_VAR (retval)) != RESULT_DECL)))
1370         {
1371 	  copy = gimple_build_assign (id->do_not_unshare
1372 				      ? id->retvar : unshare_expr (id->retvar),
1373 				      retval);
1374 	  /* id->retvar is already substituted.  Skip it on later remapping.  */
1375 	  skip_first = true;
1376 
1377 	  /* We need to copy bounds if return structure with pointers into
1378 	     instrumented function.  */
1379 	  if (chkp_function_instrumented_p (id->dst_fn)
1380 	      && !bndslot
1381 	      && !BOUNDED_P (id->retvar)
1382 	      && chkp_type_has_pointer (TREE_TYPE (id->retvar)))
1383 	    id->assign_stmts.safe_push (copy);
1384 
1385 	}
1386       else
1387 	return stmts;
1388     }
1389   else if (gimple_has_substatements (stmt))
1390     {
1391       gimple_seq s1, s2;
1392 
1393       /* When cloning bodies from the C++ front end, we will be handed bodies
1394 	 in High GIMPLE form.  Handle here all the High GIMPLE statements that
1395 	 have embedded statements.  */
1396       switch (gimple_code (stmt))
1397 	{
1398 	case GIMPLE_BIND:
1399 	  copy = copy_gimple_bind (as_a <gbind *> (stmt), id);
1400 	  break;
1401 
1402 	case GIMPLE_CATCH:
1403 	  {
1404 	    gcatch *catch_stmt = as_a <gcatch *> (stmt);
1405 	    s1 = remap_gimple_seq (gimple_catch_handler (catch_stmt), id);
1406 	    copy = gimple_build_catch (gimple_catch_types (catch_stmt), s1);
1407 	  }
1408 	  break;
1409 
1410 	case GIMPLE_EH_FILTER:
1411 	  s1 = remap_gimple_seq (gimple_eh_filter_failure (stmt), id);
1412 	  copy = gimple_build_eh_filter (gimple_eh_filter_types (stmt), s1);
1413 	  break;
1414 
1415 	case GIMPLE_TRY:
1416 	  s1 = remap_gimple_seq (gimple_try_eval (stmt), id);
1417 	  s2 = remap_gimple_seq (gimple_try_cleanup (stmt), id);
1418 	  copy = gimple_build_try (s1, s2, gimple_try_kind (stmt));
1419 	  break;
1420 
1421 	case GIMPLE_WITH_CLEANUP_EXPR:
1422 	  s1 = remap_gimple_seq (gimple_wce_cleanup (stmt), id);
1423 	  copy = gimple_build_wce (s1);
1424 	  break;
1425 
1426 	case GIMPLE_OMP_PARALLEL:
1427 	  {
1428 	    gomp_parallel *omp_par_stmt = as_a <gomp_parallel *> (stmt);
1429 	    s1 = remap_gimple_seq (gimple_omp_body (omp_par_stmt), id);
1430 	    copy = gimple_build_omp_parallel
1431 	             (s1,
1432 		      gimple_omp_parallel_clauses (omp_par_stmt),
1433 		      gimple_omp_parallel_child_fn (omp_par_stmt),
1434 		      gimple_omp_parallel_data_arg (omp_par_stmt));
1435 	  }
1436 	  break;
1437 
1438 	case GIMPLE_OMP_TASK:
1439 	  s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1440 	  copy = gimple_build_omp_task
1441 	           (s1,
1442 		    gimple_omp_task_clauses (stmt),
1443 		    gimple_omp_task_child_fn (stmt),
1444 		    gimple_omp_task_data_arg (stmt),
1445 		    gimple_omp_task_copy_fn (stmt),
1446 		    gimple_omp_task_arg_size (stmt),
1447 		    gimple_omp_task_arg_align (stmt));
1448 	  break;
1449 
1450 	case GIMPLE_OMP_FOR:
1451 	  s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1452 	  s2 = remap_gimple_seq (gimple_omp_for_pre_body (stmt), id);
1453 	  copy = gimple_build_omp_for (s1, gimple_omp_for_kind (stmt),
1454 				       gimple_omp_for_clauses (stmt),
1455 				       gimple_omp_for_collapse (stmt), s2);
1456 	  {
1457 	    size_t i;
1458 	    for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
1459 	      {
1460 		gimple_omp_for_set_index (copy, i,
1461 					  gimple_omp_for_index (stmt, i));
1462 		gimple_omp_for_set_initial (copy, i,
1463 					    gimple_omp_for_initial (stmt, i));
1464 		gimple_omp_for_set_final (copy, i,
1465 					  gimple_omp_for_final (stmt, i));
1466 		gimple_omp_for_set_incr (copy, i,
1467 					 gimple_omp_for_incr (stmt, i));
1468 		gimple_omp_for_set_cond (copy, i,
1469 					 gimple_omp_for_cond (stmt, i));
1470 	      }
1471 	  }
1472 	  break;
1473 
1474 	case GIMPLE_OMP_MASTER:
1475 	  s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1476 	  copy = gimple_build_omp_master (s1);
1477 	  break;
1478 
1479 	case GIMPLE_OMP_TASKGROUP:
1480 	  s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1481 	  copy = gimple_build_omp_taskgroup (s1);
1482 	  break;
1483 
1484 	case GIMPLE_OMP_ORDERED:
1485 	  s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1486 	  copy = gimple_build_omp_ordered
1487 		   (s1,
1488 		    gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt)));
1489 	  break;
1490 
1491 	case GIMPLE_OMP_SECTION:
1492 	  s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1493 	  copy = gimple_build_omp_section (s1);
1494 	  break;
1495 
1496 	case GIMPLE_OMP_SECTIONS:
1497 	  s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1498 	  copy = gimple_build_omp_sections
1499 	           (s1, gimple_omp_sections_clauses (stmt));
1500 	  break;
1501 
1502 	case GIMPLE_OMP_SINGLE:
1503 	  s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1504 	  copy = gimple_build_omp_single
1505 	           (s1, gimple_omp_single_clauses (stmt));
1506 	  break;
1507 
1508 	case GIMPLE_OMP_TARGET:
1509 	  s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1510 	  copy = gimple_build_omp_target
1511 		   (s1, gimple_omp_target_kind (stmt),
1512 		    gimple_omp_target_clauses (stmt));
1513 	  break;
1514 
1515 	case GIMPLE_OMP_TEAMS:
1516 	  s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1517 	  copy = gimple_build_omp_teams
1518 		   (s1, gimple_omp_teams_clauses (stmt));
1519 	  break;
1520 
1521 	case GIMPLE_OMP_CRITICAL:
1522 	  s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1523 	  copy = gimple_build_omp_critical (s1,
1524 					    gimple_omp_critical_name
1525 					      (as_a <gomp_critical *> (stmt)),
1526 					    gimple_omp_critical_clauses
1527 					      (as_a <gomp_critical *> (stmt)));
1528 	  break;
1529 
1530 	case GIMPLE_TRANSACTION:
1531 	  {
1532 	    gtransaction *old_trans_stmt = as_a <gtransaction *> (stmt);
1533 	    gtransaction *new_trans_stmt;
1534 	    s1 = remap_gimple_seq (gimple_transaction_body (old_trans_stmt),
1535 				   id);
1536 	    copy = new_trans_stmt = gimple_build_transaction (s1);
1537 	    gimple_transaction_set_subcode (new_trans_stmt,
1538 	      gimple_transaction_subcode (old_trans_stmt));
1539 	    gimple_transaction_set_label_norm (new_trans_stmt,
1540 	      gimple_transaction_label_norm (old_trans_stmt));
1541 	    gimple_transaction_set_label_uninst (new_trans_stmt,
1542 	      gimple_transaction_label_uninst (old_trans_stmt));
1543 	    gimple_transaction_set_label_over (new_trans_stmt,
1544 	      gimple_transaction_label_over (old_trans_stmt));
1545 	  }
1546 	  break;
1547 
1548 	default:
1549 	  gcc_unreachable ();
1550 	}
1551     }
1552   else
1553     {
1554       if (gimple_assign_copy_p (stmt)
1555 	  && gimple_assign_lhs (stmt) == gimple_assign_rhs1 (stmt)
1556 	  && auto_var_in_fn_p (gimple_assign_lhs (stmt), id->src_fn))
1557 	{
1558 	  /* Here we handle statements that are not completely rewritten.
1559 	     First we detect some inlining-induced bogosities for
1560 	     discarding.  */
1561 
1562 	  /* Some assignments VAR = VAR; don't generate any rtl code
1563 	     and thus don't count as variable modification.  Avoid
1564 	     keeping bogosities like 0 = 0.  */
1565 	  tree decl = gimple_assign_lhs (stmt), value;
1566 	  tree *n;
1567 
1568 	  n = id->decl_map->get (decl);
1569 	  if (n)
1570 	    {
1571 	      value = *n;
1572 	      STRIP_TYPE_NOPS (value);
1573 	      if (TREE_CONSTANT (value) || TREE_READONLY (value))
1574 		return NULL;
1575 	    }
1576 	}
1577 
1578       /* For *ptr_N ={v} {CLOBBER}, if ptr_N is SSA_NAME defined
1579 	 in a block that we aren't copying during tree_function_versioning,
1580 	 just drop the clobber stmt.  */
1581       if (id->blocks_to_copy && gimple_clobber_p (stmt))
1582 	{
1583 	  tree lhs = gimple_assign_lhs (stmt);
1584 	  if (TREE_CODE (lhs) == MEM_REF
1585 	      && TREE_CODE (TREE_OPERAND (lhs, 0)) == SSA_NAME)
1586 	    {
1587 	      gimple *def_stmt = SSA_NAME_DEF_STMT (TREE_OPERAND (lhs, 0));
1588 	      if (gimple_bb (def_stmt)
1589 		  && !bitmap_bit_p (id->blocks_to_copy,
1590 				    gimple_bb (def_stmt)->index))
1591 		return NULL;
1592 	    }
1593 	}
1594 
1595       if (gimple_debug_bind_p (stmt))
1596 	{
1597 	  gdebug *copy
1598 	    = gimple_build_debug_bind (gimple_debug_bind_get_var (stmt),
1599 				       gimple_debug_bind_get_value (stmt),
1600 				       stmt);
1601 	  id->debug_stmts.safe_push (copy);
1602 	  gimple_seq_add_stmt (&stmts, copy);
1603 	  return stmts;
1604 	}
1605       if (gimple_debug_source_bind_p (stmt))
1606 	{
1607 	  gdebug *copy = gimple_build_debug_source_bind
1608 	                   (gimple_debug_source_bind_get_var (stmt),
1609 			    gimple_debug_source_bind_get_value (stmt),
1610 			    stmt);
1611 	  id->debug_stmts.safe_push (copy);
1612 	  gimple_seq_add_stmt (&stmts, copy);
1613 	  return stmts;
1614 	}
1615 
1616       /* Create a new deep copy of the statement.  */
1617       copy = gimple_copy (stmt);
1618 
1619       /* Clear flags that need revisiting.  */
1620       if (gcall *call_stmt = dyn_cast <gcall *> (copy))
1621         {
1622 	  if (gimple_call_tail_p (call_stmt))
1623 	    gimple_call_set_tail (call_stmt, false);
1624 	  if (gimple_call_from_thunk_p (call_stmt))
1625 	    gimple_call_set_from_thunk (call_stmt, false);
1626 	  if (gimple_call_internal_p (call_stmt))
1627 	    switch (gimple_call_internal_fn (call_stmt))
1628 	      {
1629 	      case IFN_GOMP_SIMD_LANE:
1630 	      case IFN_GOMP_SIMD_VF:
1631 	      case IFN_GOMP_SIMD_LAST_LANE:
1632 	      case IFN_GOMP_SIMD_ORDERED_START:
1633 	      case IFN_GOMP_SIMD_ORDERED_END:
1634 		DECL_STRUCT_FUNCTION (id->dst_fn)->has_simduid_loops = true;
1635 	        break;
1636 	      default:
1637 		break;
1638 	      }
1639 	}
1640 
1641       /* Remap the region numbers for __builtin_eh_{pointer,filter},
1642 	 RESX and EH_DISPATCH.  */
1643       if (id->eh_map)
1644 	switch (gimple_code (copy))
1645 	  {
1646 	  case GIMPLE_CALL:
1647 	    {
1648 	      tree r, fndecl = gimple_call_fndecl (copy);
1649 	      if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
1650 		switch (DECL_FUNCTION_CODE (fndecl))
1651 		  {
1652 		  case BUILT_IN_EH_COPY_VALUES:
1653 		    r = gimple_call_arg (copy, 1);
1654 		    r = remap_eh_region_tree_nr (r, id);
1655 		    gimple_call_set_arg (copy, 1, r);
1656 		    /* FALLTHRU */
1657 
1658 		  case BUILT_IN_EH_POINTER:
1659 		  case BUILT_IN_EH_FILTER:
1660 		    r = gimple_call_arg (copy, 0);
1661 		    r = remap_eh_region_tree_nr (r, id);
1662 		    gimple_call_set_arg (copy, 0, r);
1663 		    break;
1664 
1665 		  default:
1666 		    break;
1667 		  }
1668 
1669 	      /* Reset alias info if we didn't apply measures to
1670 		 keep it valid over inlining by setting DECL_PT_UID.  */
1671 	      if (!id->src_cfun->gimple_df
1672 		  || !id->src_cfun->gimple_df->ipa_pta)
1673 		gimple_call_reset_alias_info (as_a <gcall *> (copy));
1674 	    }
1675 	    break;
1676 
1677 	  case GIMPLE_RESX:
1678 	    {
1679 	      gresx *resx_stmt = as_a <gresx *> (copy);
1680 	      int r = gimple_resx_region (resx_stmt);
1681 	      r = remap_eh_region_nr (r, id);
1682 	      gimple_resx_set_region (resx_stmt, r);
1683 	    }
1684 	    break;
1685 
1686 	  case GIMPLE_EH_DISPATCH:
1687 	    {
1688 	      geh_dispatch *eh_dispatch = as_a <geh_dispatch *> (copy);
1689 	      int r = gimple_eh_dispatch_region (eh_dispatch);
1690 	      r = remap_eh_region_nr (r, id);
1691 	      gimple_eh_dispatch_set_region (eh_dispatch, r);
1692 	    }
1693 	    break;
1694 
1695 	  default:
1696 	    break;
1697 	  }
1698     }
1699 
1700   /* If STMT has a block defined, map it to the newly constructed
1701      block.  */
1702   if (gimple_block (copy))
1703     {
1704       tree *n;
1705       n = id->decl_map->get (gimple_block (copy));
1706       gcc_assert (n);
1707       gimple_set_block (copy, *n);
1708     }
1709 
1710   if (gimple_debug_bind_p (copy) || gimple_debug_source_bind_p (copy))
1711     {
1712       gimple_seq_add_stmt (&stmts, copy);
1713       return stmts;
1714     }
1715 
1716   /* Remap all the operands in COPY.  */
1717   memset (&wi, 0, sizeof (wi));
1718   wi.info = id;
1719   if (skip_first)
1720     walk_tree (gimple_op_ptr (copy, 1), remap_gimple_op_r, &wi, NULL);
1721   else
1722     walk_gimple_op (copy, remap_gimple_op_r, &wi);
1723 
1724   /* Clear the copied virtual operands.  We are not remapping them here
1725      but are going to recreate them from scratch.  */
1726   if (gimple_has_mem_ops (copy))
1727     {
1728       gimple_set_vdef (copy, NULL_TREE);
1729       gimple_set_vuse (copy, NULL_TREE);
1730     }
1731 
1732   gimple_seq_add_stmt (&stmts, copy);
1733   return stmts;
1734 }
1735 
1736 
1737 /* Copy basic block, scale profile accordingly.  Edges will be taken care of
1738    later  */
1739 
1740 static basic_block
copy_bb(copy_body_data * id,basic_block bb,int frequency_scale,gcov_type count_scale)1741 copy_bb (copy_body_data *id, basic_block bb, int frequency_scale,
1742          gcov_type count_scale)
1743 {
1744   gimple_stmt_iterator gsi, copy_gsi, seq_gsi;
1745   basic_block copy_basic_block;
1746   tree decl;
1747   gcov_type freq;
1748   basic_block prev;
1749 
1750   /* Search for previous copied basic block.  */
1751   prev = bb->prev_bb;
1752   while (!prev->aux)
1753     prev = prev->prev_bb;
1754 
1755   /* create_basic_block() will append every new block to
1756      basic_block_info automatically.  */
1757   copy_basic_block = create_basic_block (NULL, (basic_block) prev->aux);
1758   copy_basic_block->count = apply_scale (bb->count, count_scale);
1759 
1760   /* We are going to rebuild frequencies from scratch.  These values
1761      have just small importance to drive canonicalize_loop_headers.  */
1762   freq = apply_scale ((gcov_type)bb->frequency, frequency_scale);
1763 
1764   /* We recompute frequencies after inlining, so this is quite safe.  */
1765   if (freq > BB_FREQ_MAX)
1766     freq = BB_FREQ_MAX;
1767   copy_basic_block->frequency = freq;
1768 
1769   copy_gsi = gsi_start_bb (copy_basic_block);
1770 
1771   for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1772     {
1773       gimple_seq stmts;
1774       gimple *stmt = gsi_stmt (gsi);
1775       gimple *orig_stmt = stmt;
1776       gimple_stmt_iterator stmts_gsi;
1777       bool stmt_added = false;
1778 
1779       id->regimplify = false;
1780       stmts = remap_gimple_stmt (stmt, id);
1781 
1782       if (gimple_seq_empty_p (stmts))
1783 	continue;
1784 
1785       seq_gsi = copy_gsi;
1786 
1787       for (stmts_gsi = gsi_start (stmts);
1788 	   !gsi_end_p (stmts_gsi); )
1789 	{
1790 	  stmt = gsi_stmt (stmts_gsi);
1791 
1792 	  /* Advance iterator now before stmt is moved to seq_gsi.  */
1793 	  gsi_next (&stmts_gsi);
1794 
1795 	  if (gimple_nop_p (stmt))
1796 	      continue;
1797 
1798 	  gimple_duplicate_stmt_histograms (cfun, stmt, id->src_cfun,
1799 					    orig_stmt);
1800 
1801 	  /* With return slot optimization we can end up with
1802 	     non-gimple (foo *)&this->m, fix that here.  */
1803 	  if (is_gimple_assign (stmt)
1804 	      && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt))
1805 	      && !is_gimple_val (gimple_assign_rhs1 (stmt)))
1806 	    {
1807 	      tree new_rhs;
1808 	      new_rhs = force_gimple_operand_gsi (&seq_gsi,
1809 						  gimple_assign_rhs1 (stmt),
1810 						  true, NULL, false,
1811 						  GSI_CONTINUE_LINKING);
1812 	      gimple_assign_set_rhs1 (stmt, new_rhs);
1813 	      id->regimplify = false;
1814 	    }
1815 
1816 	  gsi_insert_after (&seq_gsi, stmt, GSI_NEW_STMT);
1817 
1818 	  if (id->regimplify)
1819 	    gimple_regimplify_operands (stmt, &seq_gsi);
1820 
1821 	  stmt_added = true;
1822 	}
1823 
1824       if (!stmt_added)
1825 	continue;
1826 
1827       /* If copy_basic_block has been empty at the start of this iteration,
1828 	 call gsi_start_bb again to get at the newly added statements.  */
1829       if (gsi_end_p (copy_gsi))
1830 	copy_gsi = gsi_start_bb (copy_basic_block);
1831       else
1832 	gsi_next (&copy_gsi);
1833 
1834       /* Process the new statement.  The call to gimple_regimplify_operands
1835 	 possibly turned the statement into multiple statements, we
1836 	 need to process all of them.  */
1837       do
1838 	{
1839 	  tree fn;
1840 	  gcall *call_stmt;
1841 
1842 	  stmt = gsi_stmt (copy_gsi);
1843 	  call_stmt = dyn_cast <gcall *> (stmt);
1844 	  if (call_stmt
1845 	      && gimple_call_va_arg_pack_p (call_stmt)
1846 	      && id->call_stmt
1847 	      && ! gimple_call_va_arg_pack_p (id->call_stmt))
1848 	    {
1849 	      /* __builtin_va_arg_pack () should be replaced by
1850 		 all arguments corresponding to ... in the caller.  */
1851 	      tree p;
1852 	      gcall *new_call;
1853 	      vec<tree> argarray;
1854 	      size_t nargs = gimple_call_num_args (id->call_stmt);
1855 	      size_t n, i, nargs_to_copy;
1856 	      bool remove_bounds = false;
1857 
1858 	      for (p = DECL_ARGUMENTS (id->src_fn); p; p = DECL_CHAIN (p))
1859 		nargs--;
1860 
1861 	      /* Bounds should be removed from arg pack in case
1862 		 we handle not instrumented call in instrumented
1863 		 function.  */
1864 	      nargs_to_copy = nargs;
1865 	      if (gimple_call_with_bounds_p (id->call_stmt)
1866 		  && !gimple_call_with_bounds_p (stmt))
1867 		{
1868 		  for (i = gimple_call_num_args (id->call_stmt) - nargs;
1869 		       i < gimple_call_num_args (id->call_stmt);
1870 		       i++)
1871 		    if (POINTER_BOUNDS_P (gimple_call_arg (id->call_stmt, i)))
1872 		      nargs_to_copy--;
1873 		  remove_bounds = true;
1874 		}
1875 
1876 	      /* Create the new array of arguments.  */
1877 	      n = nargs_to_copy + gimple_call_num_args (call_stmt);
1878 	      argarray.create (n);
1879 	      argarray.safe_grow_cleared (n);
1880 
1881 	      /* Copy all the arguments before '...'  */
1882 	      memcpy (argarray.address (),
1883 		      gimple_call_arg_ptr (call_stmt, 0),
1884 		      gimple_call_num_args (call_stmt) * sizeof (tree));
1885 
1886 	      if (remove_bounds)
1887 		{
1888 		  /* Append the rest of arguments removing bounds.  */
1889 		  unsigned cur = gimple_call_num_args (call_stmt);
1890 		  i = gimple_call_num_args (id->call_stmt) - nargs;
1891 		  for (i = gimple_call_num_args (id->call_stmt) - nargs;
1892 		       i < gimple_call_num_args (id->call_stmt);
1893 		       i++)
1894 		    if (!POINTER_BOUNDS_P (gimple_call_arg (id->call_stmt, i)))
1895 		      argarray[cur++] = gimple_call_arg (id->call_stmt, i);
1896 		  gcc_assert (cur == n);
1897 		}
1898 	      else
1899 		{
1900 		  /* Append the arguments passed in '...'  */
1901 		  memcpy (argarray.address () + gimple_call_num_args (call_stmt),
1902 			  gimple_call_arg_ptr (id->call_stmt, 0)
1903 			  + (gimple_call_num_args (id->call_stmt) - nargs),
1904 			  nargs * sizeof (tree));
1905 		}
1906 
1907 	      new_call = gimple_build_call_vec (gimple_call_fn (call_stmt),
1908 						argarray);
1909 
1910 	      argarray.release ();
1911 
1912 	      /* Copy all GIMPLE_CALL flags, location and block, except
1913 		 GF_CALL_VA_ARG_PACK.  */
1914 	      gimple_call_copy_flags (new_call, call_stmt);
1915 	      gimple_call_set_va_arg_pack (new_call, false);
1916 	      gimple_set_location (new_call, gimple_location (stmt));
1917 	      gimple_set_block (new_call, gimple_block (stmt));
1918 	      gimple_call_set_lhs (new_call, gimple_call_lhs (call_stmt));
1919 
1920 	      gsi_replace (&copy_gsi, new_call, false);
1921 	      stmt = new_call;
1922 	    }
1923 	  else if (call_stmt
1924 		   && id->call_stmt
1925 		   && (decl = gimple_call_fndecl (stmt))
1926 		   && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
1927 		   && DECL_FUNCTION_CODE (decl) == BUILT_IN_VA_ARG_PACK_LEN
1928 		   && ! gimple_call_va_arg_pack_p (id->call_stmt))
1929 	    {
1930 	      /* __builtin_va_arg_pack_len () should be replaced by
1931 		 the number of anonymous arguments.  */
1932 	      size_t nargs = gimple_call_num_args (id->call_stmt), i;
1933 	      tree count, p;
1934 	      gimple *new_stmt;
1935 
1936 	      for (p = DECL_ARGUMENTS (id->src_fn); p; p = DECL_CHAIN (p))
1937 		nargs--;
1938 
1939 	      /* For instrumented calls we should ignore bounds.  */
1940 	      for (i = gimple_call_num_args (id->call_stmt) - nargs;
1941 		   i < gimple_call_num_args (id->call_stmt);
1942 		   i++)
1943 		if (POINTER_BOUNDS_P (gimple_call_arg (id->call_stmt, i)))
1944 		  nargs--;
1945 
1946 	      count = build_int_cst (integer_type_node, nargs);
1947 	      new_stmt = gimple_build_assign (gimple_call_lhs (stmt), count);
1948 	      gsi_replace (&copy_gsi, new_stmt, false);
1949 	      stmt = new_stmt;
1950 	    }
1951 	  else if (call_stmt
1952 		   && id->call_stmt
1953 		   && gimple_call_internal_p (stmt)
1954 		   && gimple_call_internal_fn (stmt) == IFN_TSAN_FUNC_EXIT)
1955 	    {
1956 	      /* Drop TSAN_FUNC_EXIT () internal calls during inlining.  */
1957 	      gsi_remove (&copy_gsi, false);
1958 	      continue;
1959 	    }
1960 
1961 	  /* Statements produced by inlining can be unfolded, especially
1962 	     when we constant propagated some operands.  We can't fold
1963 	     them right now for two reasons:
1964 	     1) folding require SSA_NAME_DEF_STMTs to be correct
1965 	     2) we can't change function calls to builtins.
1966 	     So we just mark statement for later folding.  We mark
1967 	     all new statements, instead just statements that has changed
1968 	     by some nontrivial substitution so even statements made
1969 	     foldable indirectly are updated.  If this turns out to be
1970 	     expensive, copy_body can be told to watch for nontrivial
1971 	     changes.  */
1972 	  if (id->statements_to_fold)
1973 	    id->statements_to_fold->add (stmt);
1974 
1975 	  /* We're duplicating a CALL_EXPR.  Find any corresponding
1976 	     callgraph edges and update or duplicate them.  */
1977 	  if (gcall *call_stmt = dyn_cast <gcall *> (stmt))
1978 	    {
1979 	      struct cgraph_edge *edge;
1980 
1981 	      switch (id->transform_call_graph_edges)
1982 		{
1983 		case CB_CGE_DUPLICATE:
1984 		  edge = id->src_node->get_edge (orig_stmt);
1985 		  if (edge)
1986 		    {
1987 		      int edge_freq = edge->frequency;
1988 		      int new_freq;
1989 		      struct cgraph_edge *old_edge = edge;
1990 		      edge = edge->clone (id->dst_node, call_stmt,
1991 					  gimple_uid (stmt),
1992 					  REG_BR_PROB_BASE, CGRAPH_FREQ_BASE,
1993 					  true);
1994 		      /* We could also just rescale the frequency, but
1995 		         doing so would introduce roundoff errors and make
1996 			 verifier unhappy.  */
1997 		      new_freq  = compute_call_stmt_bb_frequency (id->dst_node->decl,
1998 								  copy_basic_block);
1999 
2000 		      /* Speculative calls consist of two edges - direct and indirect.
2001 			 Duplicate the whole thing and distribute frequencies accordingly.  */
2002 		      if (edge->speculative)
2003 			{
2004 			  struct cgraph_edge *direct, *indirect;
2005 			  struct ipa_ref *ref;
2006 
2007 			  gcc_assert (!edge->indirect_unknown_callee);
2008 			  old_edge->speculative_call_info (direct, indirect, ref);
2009 			  indirect = indirect->clone (id->dst_node, call_stmt,
2010 						      gimple_uid (stmt),
2011 						      REG_BR_PROB_BASE, CGRAPH_FREQ_BASE,
2012 						      true);
2013 			  if (old_edge->frequency + indirect->frequency)
2014 			    {
2015 			      edge->frequency = MIN (RDIV ((gcov_type)new_freq * old_edge->frequency,
2016 						           (old_edge->frequency + indirect->frequency)),
2017 						     CGRAPH_FREQ_MAX);
2018 			      indirect->frequency = MIN (RDIV ((gcov_type)new_freq * indirect->frequency,
2019 							       (old_edge->frequency + indirect->frequency)),
2020 							 CGRAPH_FREQ_MAX);
2021 			    }
2022 			  id->dst_node->clone_reference (ref, stmt);
2023 			}
2024 		      else
2025 			{
2026 			  edge->frequency = new_freq;
2027 			  if (dump_file
2028 			      && profile_status_for_fn (cfun) != PROFILE_ABSENT
2029 			      && (edge_freq > edge->frequency + 10
2030 				  || edge_freq < edge->frequency - 10))
2031 			    {
2032 			      fprintf (dump_file, "Edge frequency estimated by "
2033 				       "cgraph %i diverge from inliner's estimate %i\n",
2034 				       edge_freq,
2035 				       edge->frequency);
2036 			      fprintf (dump_file,
2037 				       "Orig bb: %i, orig bb freq %i, new bb freq %i\n",
2038 				       bb->index,
2039 				       bb->frequency,
2040 				       copy_basic_block->frequency);
2041 			    }
2042 			}
2043 		    }
2044 		  break;
2045 
2046 		case CB_CGE_MOVE_CLONES:
2047 		  id->dst_node->set_call_stmt_including_clones (orig_stmt,
2048 								call_stmt);
2049 		  edge = id->dst_node->get_edge (stmt);
2050 		  break;
2051 
2052 		case CB_CGE_MOVE:
2053 		  edge = id->dst_node->get_edge (orig_stmt);
2054 		  if (edge)
2055 		    edge->set_call_stmt (call_stmt);
2056 		  break;
2057 
2058 		default:
2059 		  gcc_unreachable ();
2060 		}
2061 
2062 	      /* Constant propagation on argument done during inlining
2063 		 may create new direct call.  Produce an edge for it.  */
2064 	      if ((!edge
2065 		   || (edge->indirect_inlining_edge
2066 		       && id->transform_call_graph_edges == CB_CGE_MOVE_CLONES))
2067 		  && id->dst_node->definition
2068 		  && (fn = gimple_call_fndecl (stmt)) != NULL)
2069 		{
2070 		  struct cgraph_node *dest = cgraph_node::get (fn);
2071 
2072 		  /* We have missing edge in the callgraph.  This can happen
2073 		     when previous inlining turned an indirect call into a
2074 		     direct call by constant propagating arguments or we are
2075 		     producing dead clone (for further cloning).  In all
2076 		     other cases we hit a bug (incorrect node sharing is the
2077 		     most common reason for missing edges).  */
2078 		  gcc_assert (!dest->definition
2079 			      || dest->address_taken
2080 		  	      || !id->src_node->definition
2081 			      || !id->dst_node->definition);
2082 		  if (id->transform_call_graph_edges == CB_CGE_MOVE_CLONES)
2083 		    id->dst_node->create_edge_including_clones
2084 		      (dest, orig_stmt, call_stmt, bb->count,
2085 		       compute_call_stmt_bb_frequency (id->dst_node->decl,
2086 		       				       copy_basic_block),
2087 		       CIF_ORIGINALLY_INDIRECT_CALL);
2088 		  else
2089 		    id->dst_node->create_edge (dest, call_stmt,
2090 					bb->count,
2091 					compute_call_stmt_bb_frequency
2092 					  (id->dst_node->decl,
2093 					   copy_basic_block))->inline_failed
2094 		      = CIF_ORIGINALLY_INDIRECT_CALL;
2095 		  if (dump_file)
2096 		    {
2097 		      fprintf (dump_file, "Created new direct edge to %s\n",
2098 			       dest->name ());
2099 		    }
2100 		}
2101 
2102 	      notice_special_calls (as_a <gcall *> (stmt));
2103 	    }
2104 
2105 	  maybe_duplicate_eh_stmt_fn (cfun, stmt, id->src_cfun, orig_stmt,
2106 				      id->eh_map, id->eh_lp_nr);
2107 
2108 	  if (gimple_in_ssa_p (cfun) && !is_gimple_debug (stmt))
2109 	    {
2110 	      ssa_op_iter i;
2111 	      tree def;
2112 
2113 	      FOR_EACH_SSA_TREE_OPERAND (def, stmt, i, SSA_OP_DEF)
2114 		if (TREE_CODE (def) == SSA_NAME)
2115 		  SSA_NAME_DEF_STMT (def) = stmt;
2116 	    }
2117 
2118 	  gsi_next (&copy_gsi);
2119 	}
2120       while (!gsi_end_p (copy_gsi));
2121 
2122       copy_gsi = gsi_last_bb (copy_basic_block);
2123     }
2124 
2125   return copy_basic_block;
2126 }
2127 
2128 /* Inserting Single Entry Multiple Exit region in SSA form into code in SSA
2129    form is quite easy, since dominator relationship for old basic blocks does
2130    not change.
2131 
2132    There is however exception where inlining might change dominator relation
2133    across EH edges from basic block within inlined functions destinating
2134    to landing pads in function we inline into.
2135 
2136    The function fills in PHI_RESULTs of such PHI nodes if they refer
2137    to gimple regs.  Otherwise, the function mark PHI_RESULT of such
2138    PHI nodes for renaming.  For non-gimple regs, renaming is safe: the
2139    EH edges are abnormal and SSA_NAME_OCCURS_IN_ABNORMAL_PHI must be
2140    set, and this means that there will be no overlapping live ranges
2141    for the underlying symbol.
2142 
2143    This might change in future if we allow redirecting of EH edges and
2144    we might want to change way build CFG pre-inlining to include
2145    all the possible edges then.  */
2146 static void
update_ssa_across_abnormal_edges(basic_block bb,basic_block ret_bb,bool can_throw,bool nonlocal_goto)2147 update_ssa_across_abnormal_edges (basic_block bb, basic_block ret_bb,
2148 				  bool can_throw, bool nonlocal_goto)
2149 {
2150   edge e;
2151   edge_iterator ei;
2152 
2153   FOR_EACH_EDGE (e, ei, bb->succs)
2154     if (!e->dest->aux
2155 	|| ((basic_block)e->dest->aux)->index == ENTRY_BLOCK)
2156       {
2157 	gphi *phi;
2158 	gphi_iterator si;
2159 
2160 	if (!nonlocal_goto)
2161 	  gcc_assert (e->flags & EDGE_EH);
2162 
2163 	if (!can_throw)
2164 	  gcc_assert (!(e->flags & EDGE_EH));
2165 
2166 	for (si = gsi_start_phis (e->dest); !gsi_end_p (si); gsi_next (&si))
2167 	  {
2168 	    edge re;
2169 
2170 	    phi = si.phi ();
2171 
2172 	    /* For abnormal goto/call edges the receiver can be the
2173 	       ENTRY_BLOCK.  Do not assert this cannot happen.  */
2174 
2175 	    gcc_assert ((e->flags & EDGE_EH)
2176 			|| SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)));
2177 
2178 	    re = find_edge (ret_bb, e->dest);
2179 	    gcc_checking_assert (re);
2180 	    gcc_assert ((re->flags & (EDGE_EH | EDGE_ABNORMAL))
2181 			== (e->flags & (EDGE_EH | EDGE_ABNORMAL)));
2182 
2183 	    SET_USE (PHI_ARG_DEF_PTR_FROM_EDGE (phi, e),
2184 		     USE_FROM_PTR (PHI_ARG_DEF_PTR_FROM_EDGE (phi, re)));
2185 	  }
2186       }
2187 }
2188 
2189 
2190 /* Copy edges from BB into its copy constructed earlier, scale profile
2191    accordingly.  Edges will be taken care of later.  Assume aux
2192    pointers to point to the copies of each BB.  Return true if any
2193    debug stmts are left after a statement that must end the basic block.  */
2194 
2195 static bool
copy_edges_for_bb(basic_block bb,gcov_type count_scale,basic_block ret_bb,basic_block abnormal_goto_dest)2196 copy_edges_for_bb (basic_block bb, gcov_type count_scale, basic_block ret_bb,
2197 		   basic_block abnormal_goto_dest)
2198 {
2199   basic_block new_bb = (basic_block) bb->aux;
2200   edge_iterator ei;
2201   edge old_edge;
2202   gimple_stmt_iterator si;
2203   int flags;
2204   bool need_debug_cleanup = false;
2205 
2206   /* Use the indices from the original blocks to create edges for the
2207      new ones.  */
2208   FOR_EACH_EDGE (old_edge, ei, bb->succs)
2209     if (!(old_edge->flags & EDGE_EH))
2210       {
2211 	edge new_edge;
2212 
2213 	flags = old_edge->flags;
2214 
2215 	/* Return edges do get a FALLTHRU flag when the get inlined.  */
2216 	if (old_edge->dest->index == EXIT_BLOCK
2217 	    && !(old_edge->flags & (EDGE_TRUE_VALUE|EDGE_FALSE_VALUE|EDGE_FAKE))
2218 	    && old_edge->dest->aux != EXIT_BLOCK_PTR_FOR_FN (cfun))
2219 	  flags |= EDGE_FALLTHRU;
2220 	new_edge = make_edge (new_bb, (basic_block) old_edge->dest->aux, flags);
2221 	new_edge->count = apply_scale (old_edge->count, count_scale);
2222 	new_edge->probability = old_edge->probability;
2223       }
2224 
2225   if (bb->index == ENTRY_BLOCK || bb->index == EXIT_BLOCK)
2226     return false;
2227 
2228   for (si = gsi_start_bb (new_bb); !gsi_end_p (si);)
2229     {
2230       gimple *copy_stmt;
2231       bool can_throw, nonlocal_goto;
2232 
2233       copy_stmt = gsi_stmt (si);
2234       if (!is_gimple_debug (copy_stmt))
2235 	update_stmt (copy_stmt);
2236 
2237       /* Do this before the possible split_block.  */
2238       gsi_next (&si);
2239 
2240       /* If this tree could throw an exception, there are two
2241          cases where we need to add abnormal edge(s): the
2242          tree wasn't in a region and there is a "current
2243          region" in the caller; or the original tree had
2244          EH edges.  In both cases split the block after the tree,
2245          and add abnormal edge(s) as needed; we need both
2246          those from the callee and the caller.
2247          We check whether the copy can throw, because the const
2248          propagation can change an INDIRECT_REF which throws
2249          into a COMPONENT_REF which doesn't.  If the copy
2250          can throw, the original could also throw.  */
2251       can_throw = stmt_can_throw_internal (copy_stmt);
2252       nonlocal_goto
2253 	= (stmt_can_make_abnormal_goto (copy_stmt)
2254 	   && !computed_goto_p (copy_stmt));
2255 
2256       if (can_throw || nonlocal_goto)
2257 	{
2258 	  if (!gsi_end_p (si))
2259 	    {
2260 	      while (!gsi_end_p (si) && is_gimple_debug (gsi_stmt (si)))
2261 		gsi_next (&si);
2262 	      if (gsi_end_p (si))
2263 		need_debug_cleanup = true;
2264 	    }
2265 	  if (!gsi_end_p (si))
2266 	    /* Note that bb's predecessor edges aren't necessarily
2267 	       right at this point; split_block doesn't care.  */
2268 	    {
2269 	      edge e = split_block (new_bb, copy_stmt);
2270 
2271 	      new_bb = e->dest;
2272 	      new_bb->aux = e->src->aux;
2273 	      si = gsi_start_bb (new_bb);
2274 	    }
2275 	}
2276 
2277       if (gimple_code (copy_stmt) == GIMPLE_EH_DISPATCH)
2278 	make_eh_dispatch_edges (as_a <geh_dispatch *> (copy_stmt));
2279       else if (can_throw)
2280 	make_eh_edges (copy_stmt);
2281 
2282       /* If the call we inline cannot make abnormal goto do not add
2283          additional abnormal edges but only retain those already present
2284 	 in the original function body.  */
2285       if (abnormal_goto_dest == NULL)
2286 	nonlocal_goto = false;
2287       if (nonlocal_goto)
2288 	{
2289 	  basic_block copy_stmt_bb = gimple_bb (copy_stmt);
2290 
2291 	  if (get_abnormal_succ_dispatcher (copy_stmt_bb))
2292 	    nonlocal_goto = false;
2293 	  /* ABNORMAL_DISPATCHER (1) is for longjmp/setjmp or nonlocal gotos
2294 	     in OpenMP regions which aren't allowed to be left abnormally.
2295 	     So, no need to add abnormal edge in that case.  */
2296 	  else if (is_gimple_call (copy_stmt)
2297 		   && gimple_call_internal_p (copy_stmt)
2298 		   && (gimple_call_internal_fn (copy_stmt)
2299 		       == IFN_ABNORMAL_DISPATCHER)
2300 		   && gimple_call_arg (copy_stmt, 0) == boolean_true_node)
2301 	    nonlocal_goto = false;
2302 	  else
2303 	    make_edge (copy_stmt_bb, abnormal_goto_dest, EDGE_ABNORMAL);
2304 	}
2305 
2306       if ((can_throw || nonlocal_goto)
2307 	  && gimple_in_ssa_p (cfun))
2308 	update_ssa_across_abnormal_edges (gimple_bb (copy_stmt), ret_bb,
2309 					  can_throw, nonlocal_goto);
2310     }
2311   return need_debug_cleanup;
2312 }
2313 
2314 /* Copy the PHIs.  All blocks and edges are copied, some blocks
2315    was possibly split and new outgoing EH edges inserted.
2316    BB points to the block of original function and AUX pointers links
2317    the original and newly copied blocks.  */
2318 
2319 static void
copy_phis_for_bb(basic_block bb,copy_body_data * id)2320 copy_phis_for_bb (basic_block bb, copy_body_data *id)
2321 {
2322   basic_block const new_bb = (basic_block) bb->aux;
2323   edge_iterator ei;
2324   gphi *phi;
2325   gphi_iterator si;
2326   edge new_edge;
2327   bool inserted = false;
2328 
2329   for (si = gsi_start_phis (bb); !gsi_end_p (si); gsi_next (&si))
2330     {
2331       tree res, new_res;
2332       gphi *new_phi;
2333 
2334       phi = si.phi ();
2335       res = PHI_RESULT (phi);
2336       new_res = res;
2337       if (!virtual_operand_p (res))
2338 	{
2339 	  walk_tree (&new_res, copy_tree_body_r, id, NULL);
2340 	  new_phi = create_phi_node (new_res, new_bb);
2341 	  if (EDGE_COUNT (new_bb->preds) == 0)
2342 	    {
2343 	      /* Technically we'd want a SSA_DEFAULT_DEF here... */
2344 	      SSA_NAME_DEF_STMT (new_res) = gimple_build_nop ();
2345 	    }
2346 	  else
2347 	    FOR_EACH_EDGE (new_edge, ei, new_bb->preds)
2348 	      {
2349 		edge old_edge = find_edge ((basic_block) new_edge->src->aux, bb);
2350 		tree arg;
2351 		tree new_arg;
2352 		edge_iterator ei2;
2353 		location_t locus;
2354 
2355 		/* When doing partial cloning, we allow PHIs on the entry block
2356 		   as long as all the arguments are the same.  Find any input
2357 		   edge to see argument to copy.  */
2358 		if (!old_edge)
2359 		  FOR_EACH_EDGE (old_edge, ei2, bb->preds)
2360 		    if (!old_edge->src->aux)
2361 		      break;
2362 
2363 		arg = PHI_ARG_DEF_FROM_EDGE (phi, old_edge);
2364 		new_arg = arg;
2365 		walk_tree (&new_arg, copy_tree_body_r, id, NULL);
2366 		gcc_assert (new_arg);
2367 		/* With return slot optimization we can end up with
2368 		   non-gimple (foo *)&this->m, fix that here.  */
2369 		if (TREE_CODE (new_arg) != SSA_NAME
2370 		    && TREE_CODE (new_arg) != FUNCTION_DECL
2371 		    && !is_gimple_val (new_arg))
2372 		  {
2373 		    gimple_seq stmts = NULL;
2374 		    new_arg = force_gimple_operand (new_arg, &stmts, true, NULL);
2375 		    gsi_insert_seq_on_edge (new_edge, stmts);
2376 		    inserted = true;
2377 		  }
2378 		locus = gimple_phi_arg_location_from_edge (phi, old_edge);
2379 		if (LOCATION_BLOCK (locus))
2380 		  {
2381 		    tree *n;
2382 		    n = id->decl_map->get (LOCATION_BLOCK (locus));
2383 		    gcc_assert (n);
2384 		    locus = set_block (locus, *n);
2385 		  }
2386 		else
2387 		  locus = LOCATION_LOCUS (locus);
2388 
2389 		add_phi_arg (new_phi, new_arg, new_edge, locus);
2390 	      }
2391 	}
2392     }
2393 
2394   /* Commit the delayed edge insertions.  */
2395   if (inserted)
2396     FOR_EACH_EDGE (new_edge, ei, new_bb->preds)
2397       gsi_commit_one_edge_insert (new_edge, NULL);
2398 }
2399 
2400 
2401 /* Wrapper for remap_decl so it can be used as a callback.  */
2402 
2403 static tree
remap_decl_1(tree decl,void * data)2404 remap_decl_1 (tree decl, void *data)
2405 {
2406   return remap_decl (decl, (copy_body_data *) data);
2407 }
2408 
2409 /* Build struct function and associated datastructures for the new clone
2410    NEW_FNDECL to be build.  CALLEE_FNDECL is the original.  Function changes
2411    the cfun to the function of new_fndecl (and current_function_decl too).  */
2412 
2413 static void
initialize_cfun(tree new_fndecl,tree callee_fndecl,gcov_type count)2414 initialize_cfun (tree new_fndecl, tree callee_fndecl, gcov_type count)
2415 {
2416   struct function *src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
2417   gcov_type count_scale;
2418 
2419   if (!DECL_ARGUMENTS (new_fndecl))
2420     DECL_ARGUMENTS (new_fndecl) = DECL_ARGUMENTS (callee_fndecl);
2421   if (!DECL_RESULT (new_fndecl))
2422     DECL_RESULT (new_fndecl) = DECL_RESULT (callee_fndecl);
2423 
2424   if (ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count)
2425     count_scale
2426         = GCOV_COMPUTE_SCALE (count,
2427                               ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count);
2428   else
2429     count_scale = REG_BR_PROB_BASE;
2430 
2431   /* Register specific tree functions.  */
2432   gimple_register_cfg_hooks ();
2433 
2434   /* Get clean struct function.  */
2435   push_struct_function (new_fndecl);
2436 
2437   /* We will rebuild these, so just sanity check that they are empty.  */
2438   gcc_assert (VALUE_HISTOGRAMS (cfun) == NULL);
2439   gcc_assert (cfun->local_decls == NULL);
2440   gcc_assert (cfun->cfg == NULL);
2441   gcc_assert (cfun->decl == new_fndecl);
2442 
2443   /* Copy items we preserve during cloning.  */
2444   cfun->static_chain_decl = src_cfun->static_chain_decl;
2445   cfun->nonlocal_goto_save_area = src_cfun->nonlocal_goto_save_area;
2446   cfun->function_end_locus = src_cfun->function_end_locus;
2447   cfun->curr_properties = src_cfun->curr_properties;
2448   cfun->last_verified = src_cfun->last_verified;
2449   cfun->va_list_gpr_size = src_cfun->va_list_gpr_size;
2450   cfun->va_list_fpr_size = src_cfun->va_list_fpr_size;
2451   cfun->has_nonlocal_label = src_cfun->has_nonlocal_label;
2452   cfun->stdarg = src_cfun->stdarg;
2453   cfun->after_inlining = src_cfun->after_inlining;
2454   cfun->can_throw_non_call_exceptions
2455     = src_cfun->can_throw_non_call_exceptions;
2456   cfun->can_delete_dead_exceptions = src_cfun->can_delete_dead_exceptions;
2457   cfun->returns_struct = src_cfun->returns_struct;
2458   cfun->returns_pcc_struct = src_cfun->returns_pcc_struct;
2459 
2460   init_empty_tree_cfg ();
2461 
2462   profile_status_for_fn (cfun) = profile_status_for_fn (src_cfun);
2463   ENTRY_BLOCK_PTR_FOR_FN (cfun)->count =
2464     (ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count * count_scale /
2465      REG_BR_PROB_BASE);
2466   ENTRY_BLOCK_PTR_FOR_FN (cfun)->frequency
2467     = ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->frequency;
2468   EXIT_BLOCK_PTR_FOR_FN (cfun)->count =
2469     (EXIT_BLOCK_PTR_FOR_FN (src_cfun)->count * count_scale /
2470      REG_BR_PROB_BASE);
2471   EXIT_BLOCK_PTR_FOR_FN (cfun)->frequency =
2472     EXIT_BLOCK_PTR_FOR_FN (src_cfun)->frequency;
2473   if (src_cfun->eh)
2474     init_eh_for_function ();
2475 
2476   if (src_cfun->gimple_df)
2477     {
2478       init_tree_ssa (cfun);
2479       cfun->gimple_df->in_ssa_p = true;
2480       init_ssa_operands (cfun);
2481     }
2482 }
2483 
2484 /* Helper function for copy_cfg_body.  Move debug stmts from the end
2485    of NEW_BB to the beginning of successor basic blocks when needed.  If the
2486    successor has multiple predecessors, reset them, otherwise keep
2487    their value.  */
2488 
2489 static void
maybe_move_debug_stmts_to_successors(copy_body_data * id,basic_block new_bb)2490 maybe_move_debug_stmts_to_successors (copy_body_data *id, basic_block new_bb)
2491 {
2492   edge e;
2493   edge_iterator ei;
2494   gimple_stmt_iterator si = gsi_last_nondebug_bb (new_bb);
2495 
2496   if (gsi_end_p (si)
2497       || gsi_one_before_end_p (si)
2498       || !(stmt_can_throw_internal (gsi_stmt (si))
2499 	   || stmt_can_make_abnormal_goto (gsi_stmt (si))))
2500     return;
2501 
2502   FOR_EACH_EDGE (e, ei, new_bb->succs)
2503     {
2504       gimple_stmt_iterator ssi = gsi_last_bb (new_bb);
2505       gimple_stmt_iterator dsi = gsi_after_labels (e->dest);
2506       while (is_gimple_debug (gsi_stmt (ssi)))
2507 	{
2508 	  gimple *stmt = gsi_stmt (ssi);
2509 	  gdebug *new_stmt;
2510 	  tree var;
2511 	  tree value;
2512 
2513 	  /* For the last edge move the debug stmts instead of copying
2514 	     them.  */
2515 	  if (ei_one_before_end_p (ei))
2516 	    {
2517 	      si = ssi;
2518 	      gsi_prev (&ssi);
2519 	      if (!single_pred_p (e->dest) && gimple_debug_bind_p (stmt))
2520 		gimple_debug_bind_reset_value (stmt);
2521 	      gsi_remove (&si, false);
2522 	      gsi_insert_before (&dsi, stmt, GSI_SAME_STMT);
2523 	      continue;
2524 	    }
2525 
2526 	  if (gimple_debug_bind_p (stmt))
2527 	    {
2528 	      var = gimple_debug_bind_get_var (stmt);
2529 	      if (single_pred_p (e->dest))
2530 		{
2531 		  value = gimple_debug_bind_get_value (stmt);
2532 		  value = unshare_expr (value);
2533 		}
2534 	      else
2535 		value = NULL_TREE;
2536 	      new_stmt = gimple_build_debug_bind (var, value, stmt);
2537 	    }
2538 	  else if (gimple_debug_source_bind_p (stmt))
2539 	    {
2540 	      var = gimple_debug_source_bind_get_var (stmt);
2541 	      value = gimple_debug_source_bind_get_value (stmt);
2542 	      new_stmt = gimple_build_debug_source_bind (var, value, stmt);
2543 	    }
2544 	  else
2545 	    gcc_unreachable ();
2546 	  gsi_insert_before (&dsi, new_stmt, GSI_SAME_STMT);
2547 	  id->debug_stmts.safe_push (new_stmt);
2548 	  gsi_prev (&ssi);
2549 	}
2550     }
2551 }
2552 
2553 /* Make a copy of the sub-loops of SRC_PARENT and place them
2554    as siblings of DEST_PARENT.  */
2555 
2556 static void
copy_loops(copy_body_data * id,struct loop * dest_parent,struct loop * src_parent)2557 copy_loops (copy_body_data *id,
2558 	    struct loop *dest_parent, struct loop *src_parent)
2559 {
2560   struct loop *src_loop = src_parent->inner;
2561   while (src_loop)
2562     {
2563       if (!id->blocks_to_copy
2564 	  || bitmap_bit_p (id->blocks_to_copy, src_loop->header->index))
2565 	{
2566 	  struct loop *dest_loop = alloc_loop ();
2567 
2568 	  /* Assign the new loop its header and latch and associate
2569 	     those with the new loop.  */
2570 	  dest_loop->header = (basic_block)src_loop->header->aux;
2571 	  dest_loop->header->loop_father = dest_loop;
2572 	  if (src_loop->latch != NULL)
2573 	    {
2574 	      dest_loop->latch = (basic_block)src_loop->latch->aux;
2575 	      dest_loop->latch->loop_father = dest_loop;
2576 	    }
2577 
2578 	  /* Copy loop meta-data.  */
2579 	  copy_loop_info (src_loop, dest_loop);
2580 
2581 	  /* Finally place it into the loop array and the loop tree.  */
2582 	  place_new_loop (cfun, dest_loop);
2583 	  flow_loop_tree_node_add (dest_parent, dest_loop);
2584 
2585 	  dest_loop->safelen = src_loop->safelen;
2586 	  dest_loop->dont_vectorize = src_loop->dont_vectorize;
2587 	  if (src_loop->force_vectorize)
2588 	    {
2589 	      dest_loop->force_vectorize = true;
2590 	      cfun->has_force_vectorize_loops = true;
2591 	    }
2592 	  if (src_loop->simduid)
2593 	    {
2594 	      dest_loop->simduid = remap_decl (src_loop->simduid, id);
2595 	      cfun->has_simduid_loops = true;
2596 	    }
2597 
2598 	  /* Recurse.  */
2599 	  copy_loops (id, dest_loop, src_loop);
2600 	}
2601       src_loop = src_loop->next;
2602     }
2603 }
2604 
2605 /* Call cgraph_redirect_edge_call_stmt_to_callee on all calls in BB */
2606 
2607 void
redirect_all_calls(copy_body_data * id,basic_block bb)2608 redirect_all_calls (copy_body_data * id, basic_block bb)
2609 {
2610   gimple_stmt_iterator si;
2611   gimple *last = last_stmt (bb);
2612   for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
2613     {
2614       gimple *stmt = gsi_stmt (si);
2615       if (is_gimple_call (stmt))
2616 	{
2617 	  struct cgraph_edge *edge = id->dst_node->get_edge (stmt);
2618 	  if (edge)
2619 	    {
2620 	      edge->redirect_call_stmt_to_callee ();
2621 	      if (stmt == last && id->call_stmt && maybe_clean_eh_stmt (stmt))
2622 		gimple_purge_dead_eh_edges (bb);
2623 	    }
2624 	}
2625     }
2626 }
2627 
2628 /* Convert estimated frequencies into counts for NODE, scaling COUNT
2629    with each bb's frequency. Used when NODE has a 0-weight entry
2630    but we are about to inline it into a non-zero count call bb.
2631    See the comments for handle_missing_profiles() in predict.c for
2632    when this can happen for COMDATs.  */
2633 
2634 void
freqs_to_counts(struct cgraph_node * node,gcov_type count)2635 freqs_to_counts (struct cgraph_node *node, gcov_type count)
2636 {
2637   basic_block bb;
2638   edge_iterator ei;
2639   edge e;
2640   struct function *fn = DECL_STRUCT_FUNCTION (node->decl);
2641 
2642   FOR_ALL_BB_FN(bb, fn)
2643     {
2644       bb->count = apply_scale (count,
2645                                GCOV_COMPUTE_SCALE (bb->frequency, BB_FREQ_MAX));
2646       FOR_EACH_EDGE (e, ei, bb->succs)
2647         e->count = apply_probability (e->src->count, e->probability);
2648     }
2649 }
2650 
2651 /* Make a copy of the body of FN so that it can be inserted inline in
2652    another function.  Walks FN via CFG, returns new fndecl.  */
2653 
2654 static tree
copy_cfg_body(copy_body_data * id,gcov_type count,int frequency_scale,basic_block entry_block_map,basic_block exit_block_map,basic_block new_entry)2655 copy_cfg_body (copy_body_data * id, gcov_type count, int frequency_scale,
2656 	       basic_block entry_block_map, basic_block exit_block_map,
2657 	       basic_block new_entry)
2658 {
2659   tree callee_fndecl = id->src_fn;
2660   /* Original cfun for the callee, doesn't change.  */
2661   struct function *src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
2662   struct function *cfun_to_copy;
2663   basic_block bb;
2664   tree new_fndecl = NULL;
2665   bool need_debug_cleanup = false;
2666   gcov_type count_scale;
2667   int last;
2668   int incoming_frequency = 0;
2669   gcov_type incoming_count = 0;
2670 
2671   /* This can happen for COMDAT routines that end up with 0 counts
2672      despite being called (see the comments for handle_missing_profiles()
2673      in predict.c as to why). Apply counts to the blocks in the callee
2674      before inlining, using the guessed edge frequencies, so that we don't
2675      end up with a 0-count inline body which can confuse downstream
2676      optimizations such as function splitting.  */
2677   if (!ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count && count)
2678     {
2679       /* Apply the larger of the call bb count and the total incoming
2680          call edge count to the callee.  */
2681       gcov_type in_count = 0;
2682       struct cgraph_edge *in_edge;
2683       for (in_edge = id->src_node->callers; in_edge;
2684            in_edge = in_edge->next_caller)
2685         in_count += in_edge->count;
2686       freqs_to_counts (id->src_node, count > in_count ? count : in_count);
2687     }
2688 
2689   if (ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count)
2690     count_scale
2691         = GCOV_COMPUTE_SCALE (count,
2692                               ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count);
2693   else
2694     count_scale = REG_BR_PROB_BASE;
2695 
2696   /* Register specific tree functions.  */
2697   gimple_register_cfg_hooks ();
2698 
2699   /* If we are inlining just region of the function, make sure to connect
2700      new entry to ENTRY_BLOCK_PTR_FOR_FN (cfun).  Since new entry can be
2701      part of loop, we must compute frequency and probability of
2702      ENTRY_BLOCK_PTR_FOR_FN (cfun) based on the frequencies and
2703      probabilities of edges incoming from nonduplicated region.  */
2704   if (new_entry)
2705     {
2706       edge e;
2707       edge_iterator ei;
2708 
2709       FOR_EACH_EDGE (e, ei, new_entry->preds)
2710 	if (!e->src->aux)
2711 	  {
2712 	    incoming_frequency += EDGE_FREQUENCY (e);
2713 	    incoming_count += e->count;
2714 	  }
2715       incoming_count = apply_scale (incoming_count, count_scale);
2716       incoming_frequency
2717 	= apply_scale ((gcov_type)incoming_frequency, frequency_scale);
2718       ENTRY_BLOCK_PTR_FOR_FN (cfun)->count = incoming_count;
2719       ENTRY_BLOCK_PTR_FOR_FN (cfun)->frequency = incoming_frequency;
2720     }
2721 
2722   /* Must have a CFG here at this point.  */
2723   gcc_assert (ENTRY_BLOCK_PTR_FOR_FN
2724 	      (DECL_STRUCT_FUNCTION (callee_fndecl)));
2725 
2726   cfun_to_copy = id->src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
2727 
2728   ENTRY_BLOCK_PTR_FOR_FN (cfun_to_copy)->aux = entry_block_map;
2729   EXIT_BLOCK_PTR_FOR_FN (cfun_to_copy)->aux = exit_block_map;
2730   entry_block_map->aux = ENTRY_BLOCK_PTR_FOR_FN (cfun_to_copy);
2731   exit_block_map->aux = EXIT_BLOCK_PTR_FOR_FN (cfun_to_copy);
2732 
2733   /* Duplicate any exception-handling regions.  */
2734   if (cfun->eh)
2735     id->eh_map = duplicate_eh_regions (cfun_to_copy, NULL, id->eh_lp_nr,
2736 				       remap_decl_1, id);
2737 
2738   /* Use aux pointers to map the original blocks to copy.  */
2739   FOR_EACH_BB_FN (bb, cfun_to_copy)
2740     if (!id->blocks_to_copy || bitmap_bit_p (id->blocks_to_copy, bb->index))
2741       {
2742 	basic_block new_bb = copy_bb (id, bb, frequency_scale, count_scale);
2743 	bb->aux = new_bb;
2744 	new_bb->aux = bb;
2745 	new_bb->loop_father = entry_block_map->loop_father;
2746       }
2747 
2748   last = last_basic_block_for_fn (cfun);
2749 
2750   /* Now that we've duplicated the blocks, duplicate their edges.  */
2751   basic_block abnormal_goto_dest = NULL;
2752   if (id->call_stmt
2753       && stmt_can_make_abnormal_goto (id->call_stmt))
2754     {
2755       gimple_stmt_iterator gsi = gsi_for_stmt (id->call_stmt);
2756 
2757       bb = gimple_bb (id->call_stmt);
2758       gsi_next (&gsi);
2759       if (gsi_end_p (gsi))
2760 	abnormal_goto_dest = get_abnormal_succ_dispatcher (bb);
2761     }
2762   FOR_ALL_BB_FN (bb, cfun_to_copy)
2763     if (!id->blocks_to_copy
2764 	|| (bb->index > 0 && bitmap_bit_p (id->blocks_to_copy, bb->index)))
2765       need_debug_cleanup |= copy_edges_for_bb (bb, count_scale, exit_block_map,
2766 					       abnormal_goto_dest);
2767 
2768   if (new_entry)
2769     {
2770       edge e = make_edge (entry_block_map, (basic_block)new_entry->aux, EDGE_FALLTHRU);
2771       e->probability = REG_BR_PROB_BASE;
2772       e->count = incoming_count;
2773     }
2774 
2775   /* Duplicate the loop tree, if available and wanted.  */
2776   if (loops_for_fn (src_cfun) != NULL
2777       && current_loops != NULL)
2778     {
2779       copy_loops (id, entry_block_map->loop_father,
2780 		  get_loop (src_cfun, 0));
2781       /* Defer to cfgcleanup to update loop-father fields of basic-blocks.  */
2782       loops_state_set (LOOPS_NEED_FIXUP);
2783     }
2784 
2785   /* If the loop tree in the source function needed fixup, mark the
2786      destination loop tree for fixup, too.  */
2787   if (loops_for_fn (src_cfun)->state & LOOPS_NEED_FIXUP)
2788     loops_state_set (LOOPS_NEED_FIXUP);
2789 
2790   if (gimple_in_ssa_p (cfun))
2791     FOR_ALL_BB_FN (bb, cfun_to_copy)
2792       if (!id->blocks_to_copy
2793 	  || (bb->index > 0 && bitmap_bit_p (id->blocks_to_copy, bb->index)))
2794 	copy_phis_for_bb (bb, id);
2795 
2796   FOR_ALL_BB_FN (bb, cfun_to_copy)
2797     if (bb->aux)
2798       {
2799 	if (need_debug_cleanup
2800 	    && bb->index != ENTRY_BLOCK
2801 	    && bb->index != EXIT_BLOCK)
2802 	  maybe_move_debug_stmts_to_successors (id, (basic_block) bb->aux);
2803 	/* Update call edge destinations.  This can not be done before loop
2804 	   info is updated, because we may split basic blocks.  */
2805 	if (id->transform_call_graph_edges == CB_CGE_DUPLICATE
2806 	    && bb->index != ENTRY_BLOCK
2807 	    && bb->index != EXIT_BLOCK)
2808 	  redirect_all_calls (id, (basic_block)bb->aux);
2809 	((basic_block)bb->aux)->aux = NULL;
2810 	bb->aux = NULL;
2811       }
2812 
2813   /* Zero out AUX fields of newly created block during EH edge
2814      insertion. */
2815   for (; last < last_basic_block_for_fn (cfun); last++)
2816     {
2817       if (need_debug_cleanup)
2818 	maybe_move_debug_stmts_to_successors (id,
2819 					      BASIC_BLOCK_FOR_FN (cfun, last));
2820       BASIC_BLOCK_FOR_FN (cfun, last)->aux = NULL;
2821       /* Update call edge destinations.  This can not be done before loop
2822 	 info is updated, because we may split basic blocks.  */
2823       if (id->transform_call_graph_edges == CB_CGE_DUPLICATE)
2824 	redirect_all_calls (id, BASIC_BLOCK_FOR_FN (cfun, last));
2825     }
2826   entry_block_map->aux = NULL;
2827   exit_block_map->aux = NULL;
2828 
2829   if (id->eh_map)
2830     {
2831       delete id->eh_map;
2832       id->eh_map = NULL;
2833     }
2834   if (id->dependence_map)
2835     {
2836       delete id->dependence_map;
2837       id->dependence_map = NULL;
2838     }
2839 
2840   return new_fndecl;
2841 }
2842 
2843 /* Copy the debug STMT using ID.  We deal with these statements in a
2844    special way: if any variable in their VALUE expression wasn't
2845    remapped yet, we won't remap it, because that would get decl uids
2846    out of sync, causing codegen differences between -g and -g0.  If
2847    this arises, we drop the VALUE expression altogether.  */
2848 
2849 static void
copy_debug_stmt(gdebug * stmt,copy_body_data * id)2850 copy_debug_stmt (gdebug *stmt, copy_body_data *id)
2851 {
2852   tree t, *n;
2853   struct walk_stmt_info wi;
2854 
2855   if (gimple_block (stmt))
2856     {
2857       n = id->decl_map->get (gimple_block (stmt));
2858       gimple_set_block (stmt, n ? *n : id->block);
2859     }
2860 
2861   /* Remap all the operands in COPY.  */
2862   memset (&wi, 0, sizeof (wi));
2863   wi.info = id;
2864 
2865   processing_debug_stmt = 1;
2866 
2867   if (gimple_debug_source_bind_p (stmt))
2868     t = gimple_debug_source_bind_get_var (stmt);
2869   else
2870     t = gimple_debug_bind_get_var (stmt);
2871 
2872   if (TREE_CODE (t) == PARM_DECL && id->debug_map
2873       && (n = id->debug_map->get (t)))
2874     {
2875       gcc_assert (TREE_CODE (*n) == VAR_DECL);
2876       t = *n;
2877     }
2878   else if (TREE_CODE (t) == VAR_DECL
2879 	   && !is_global_var (t)
2880 	   && !id->decl_map->get (t))
2881     /* T is a non-localized variable.  */;
2882   else
2883     walk_tree (&t, remap_gimple_op_r, &wi, NULL);
2884 
2885   if (gimple_debug_bind_p (stmt))
2886     {
2887       gimple_debug_bind_set_var (stmt, t);
2888 
2889       if (gimple_debug_bind_has_value_p (stmt))
2890 	walk_tree (gimple_debug_bind_get_value_ptr (stmt),
2891 		   remap_gimple_op_r, &wi, NULL);
2892 
2893       /* Punt if any decl couldn't be remapped.  */
2894       if (processing_debug_stmt < 0)
2895 	gimple_debug_bind_reset_value (stmt);
2896     }
2897   else if (gimple_debug_source_bind_p (stmt))
2898     {
2899       gimple_debug_source_bind_set_var (stmt, t);
2900       /* When inlining and source bind refers to one of the optimized
2901 	 away parameters, change the source bind into normal debug bind
2902 	 referring to the corresponding DEBUG_EXPR_DECL that should have
2903 	 been bound before the call stmt.  */
2904       t = gimple_debug_source_bind_get_value (stmt);
2905       if (t != NULL_TREE
2906 	  && TREE_CODE (t) == PARM_DECL
2907 	  && id->call_stmt)
2908 	{
2909 	  vec<tree, va_gc> **debug_args = decl_debug_args_lookup (id->src_fn);
2910 	  unsigned int i;
2911 	  if (debug_args != NULL)
2912 	    {
2913 	      for (i = 0; i < vec_safe_length (*debug_args); i += 2)
2914 		if ((**debug_args)[i] == DECL_ORIGIN (t)
2915 		    && TREE_CODE ((**debug_args)[i + 1]) == DEBUG_EXPR_DECL)
2916 		  {
2917 		    t = (**debug_args)[i + 1];
2918 		    stmt->subcode = GIMPLE_DEBUG_BIND;
2919 		    gimple_debug_bind_set_value (stmt, t);
2920 		    break;
2921 		  }
2922 	    }
2923 	}
2924       if (gimple_debug_source_bind_p (stmt))
2925 	walk_tree (gimple_debug_source_bind_get_value_ptr (stmt),
2926 		   remap_gimple_op_r, &wi, NULL);
2927     }
2928 
2929   processing_debug_stmt = 0;
2930 
2931   update_stmt (stmt);
2932 }
2933 
2934 /* Process deferred debug stmts.  In order to give values better odds
2935    of being successfully remapped, we delay the processing of debug
2936    stmts until all other stmts that might require remapping are
2937    processed.  */
2938 
2939 static void
copy_debug_stmts(copy_body_data * id)2940 copy_debug_stmts (copy_body_data *id)
2941 {
2942   size_t i;
2943   gdebug *stmt;
2944 
2945   if (!id->debug_stmts.exists ())
2946     return;
2947 
2948   FOR_EACH_VEC_ELT (id->debug_stmts, i, stmt)
2949     copy_debug_stmt (stmt, id);
2950 
2951   id->debug_stmts.release ();
2952 }
2953 
2954 /* Make a copy of the body of SRC_FN so that it can be inserted inline in
2955    another function.  */
2956 
2957 static tree
copy_tree_body(copy_body_data * id)2958 copy_tree_body (copy_body_data *id)
2959 {
2960   tree fndecl = id->src_fn;
2961   tree body = DECL_SAVED_TREE (fndecl);
2962 
2963   walk_tree (&body, copy_tree_body_r, id, NULL);
2964 
2965   return body;
2966 }
2967 
2968 /* Make a copy of the body of FN so that it can be inserted inline in
2969    another function.  */
2970 
2971 static tree
copy_body(copy_body_data * id,gcov_type count,int frequency_scale,basic_block entry_block_map,basic_block exit_block_map,basic_block new_entry)2972 copy_body (copy_body_data *id, gcov_type count, int frequency_scale,
2973 	   basic_block entry_block_map, basic_block exit_block_map,
2974 	   basic_block new_entry)
2975 {
2976   tree fndecl = id->src_fn;
2977   tree body;
2978 
2979   /* If this body has a CFG, walk CFG and copy.  */
2980   gcc_assert (ENTRY_BLOCK_PTR_FOR_FN (DECL_STRUCT_FUNCTION (fndecl)));
2981   body = copy_cfg_body (id, count, frequency_scale, entry_block_map, exit_block_map,
2982 			new_entry);
2983   copy_debug_stmts (id);
2984 
2985   return body;
2986 }
2987 
2988 /* Return true if VALUE is an ADDR_EXPR of an automatic variable
2989    defined in function FN, or of a data member thereof.  */
2990 
2991 static bool
self_inlining_addr_expr(tree value,tree fn)2992 self_inlining_addr_expr (tree value, tree fn)
2993 {
2994   tree var;
2995 
2996   if (TREE_CODE (value) != ADDR_EXPR)
2997     return false;
2998 
2999   var = get_base_address (TREE_OPERAND (value, 0));
3000 
3001   return var && auto_var_in_fn_p (var, fn);
3002 }
3003 
3004 /* Append to BB a debug annotation that binds VAR to VALUE, inheriting
3005    lexical block and line number information from base_stmt, if given,
3006    or from the last stmt of the block otherwise.  */
3007 
3008 static gimple *
insert_init_debug_bind(copy_body_data * id,basic_block bb,tree var,tree value,gimple * base_stmt)3009 insert_init_debug_bind (copy_body_data *id,
3010 			basic_block bb, tree var, tree value,
3011 			gimple *base_stmt)
3012 {
3013   gimple *note;
3014   gimple_stmt_iterator gsi;
3015   tree tracked_var;
3016 
3017   if (!gimple_in_ssa_p (id->src_cfun))
3018     return NULL;
3019 
3020   if (!opt_for_fn (id->dst_fn, flag_var_tracking_assignments))
3021     return NULL;
3022 
3023   tracked_var = target_for_debug_bind (var);
3024   if (!tracked_var)
3025     return NULL;
3026 
3027   if (bb)
3028     {
3029       gsi = gsi_last_bb (bb);
3030       if (!base_stmt && !gsi_end_p (gsi))
3031 	base_stmt = gsi_stmt (gsi);
3032     }
3033 
3034   note = gimple_build_debug_bind (tracked_var, unshare_expr (value), base_stmt);
3035 
3036   if (bb)
3037     {
3038       if (!gsi_end_p (gsi))
3039 	gsi_insert_after (&gsi, note, GSI_SAME_STMT);
3040       else
3041 	gsi_insert_before (&gsi, note, GSI_SAME_STMT);
3042     }
3043 
3044   return note;
3045 }
3046 
3047 static void
insert_init_stmt(copy_body_data * id,basic_block bb,gimple * init_stmt)3048 insert_init_stmt (copy_body_data *id, basic_block bb, gimple *init_stmt)
3049 {
3050   /* If VAR represents a zero-sized variable, it's possible that the
3051      assignment statement may result in no gimple statements.  */
3052   if (init_stmt)
3053     {
3054       gimple_stmt_iterator si = gsi_last_bb (bb);
3055 
3056       /* We can end up with init statements that store to a non-register
3057          from a rhs with a conversion.  Handle that here by forcing the
3058 	 rhs into a temporary.  gimple_regimplify_operands is not
3059 	 prepared to do this for us.  */
3060       if (!is_gimple_debug (init_stmt)
3061 	  && !is_gimple_reg (gimple_assign_lhs (init_stmt))
3062 	  && is_gimple_reg_type (TREE_TYPE (gimple_assign_lhs (init_stmt)))
3063 	  && gimple_assign_rhs_class (init_stmt) == GIMPLE_UNARY_RHS)
3064 	{
3065 	  tree rhs = build1 (gimple_assign_rhs_code (init_stmt),
3066 			     gimple_expr_type (init_stmt),
3067 			     gimple_assign_rhs1 (init_stmt));
3068 	  rhs = force_gimple_operand_gsi (&si, rhs, true, NULL_TREE, false,
3069 					  GSI_NEW_STMT);
3070 	  gimple_assign_set_rhs_code (init_stmt, TREE_CODE (rhs));
3071 	  gimple_assign_set_rhs1 (init_stmt, rhs);
3072 	}
3073       gsi_insert_after (&si, init_stmt, GSI_NEW_STMT);
3074       gimple_regimplify_operands (init_stmt, &si);
3075 
3076       if (!is_gimple_debug (init_stmt))
3077 	{
3078 	  tree def = gimple_assign_lhs (init_stmt);
3079 	  insert_init_debug_bind (id, bb, def, def, init_stmt);
3080 	}
3081     }
3082 }
3083 
3084 /* Initialize parameter P with VALUE.  If needed, produce init statement
3085    at the end of BB.  When BB is NULL, we return init statement to be
3086    output later.  */
3087 static gimple *
setup_one_parameter(copy_body_data * id,tree p,tree value,tree fn,basic_block bb,tree * vars)3088 setup_one_parameter (copy_body_data *id, tree p, tree value, tree fn,
3089 		     basic_block bb, tree *vars)
3090 {
3091   gimple *init_stmt = NULL;
3092   tree var;
3093   tree rhs = value;
3094   tree def = (gimple_in_ssa_p (cfun)
3095 	      ? ssa_default_def (id->src_cfun, p) : NULL);
3096 
3097   if (value
3098       && value != error_mark_node
3099       && !useless_type_conversion_p (TREE_TYPE (p), TREE_TYPE (value)))
3100     {
3101       /* If we can match up types by promotion/demotion do so.  */
3102       if (fold_convertible_p (TREE_TYPE (p), value))
3103 	rhs = fold_convert (TREE_TYPE (p), value);
3104       else
3105 	{
3106 	  /* ???  For valid programs we should not end up here.
3107 	     Still if we end up with truly mismatched types here, fall back
3108 	     to using a VIEW_CONVERT_EXPR or a literal zero to not leak invalid
3109 	     GIMPLE to the following passes.  */
3110 	  if (!is_gimple_reg_type (TREE_TYPE (value))
3111 	      || TYPE_SIZE (TREE_TYPE (p)) == TYPE_SIZE (TREE_TYPE (value)))
3112 	    rhs = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (p), value);
3113 	  else
3114 	    rhs = build_zero_cst (TREE_TYPE (p));
3115 	}
3116     }
3117 
3118   /* Make an equivalent VAR_DECL.  Note that we must NOT remap the type
3119      here since the type of this decl must be visible to the calling
3120      function.  */
3121   var = copy_decl_to_var (p, id);
3122 
3123   /* Declare this new variable.  */
3124   DECL_CHAIN (var) = *vars;
3125   *vars = var;
3126 
3127   /* Make gimplifier happy about this variable.  */
3128   DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
3129 
3130   /* If the parameter is never assigned to, has no SSA_NAMEs created,
3131      we would not need to create a new variable here at all, if it
3132      weren't for debug info.  Still, we can just use the argument
3133      value.  */
3134   if (TREE_READONLY (p)
3135       && !TREE_ADDRESSABLE (p)
3136       && value && !TREE_SIDE_EFFECTS (value)
3137       && !def)
3138     {
3139       /* We may produce non-gimple trees by adding NOPs or introduce
3140 	 invalid sharing when operand is not really constant.
3141 	 It is not big deal to prohibit constant propagation here as
3142 	 we will constant propagate in DOM1 pass anyway.  */
3143       if (is_gimple_min_invariant (value)
3144 	  && useless_type_conversion_p (TREE_TYPE (p),
3145 						 TREE_TYPE (value))
3146 	  /* We have to be very careful about ADDR_EXPR.  Make sure
3147 	     the base variable isn't a local variable of the inlined
3148 	     function, e.g., when doing recursive inlining, direct or
3149 	     mutually-recursive or whatever, which is why we don't
3150 	     just test whether fn == current_function_decl.  */
3151 	  && ! self_inlining_addr_expr (value, fn))
3152 	{
3153 	  insert_decl_map (id, p, value);
3154 	  insert_debug_decl_map (id, p, var);
3155 	  return insert_init_debug_bind (id, bb, var, value, NULL);
3156 	}
3157     }
3158 
3159   /* Register the VAR_DECL as the equivalent for the PARM_DECL;
3160      that way, when the PARM_DECL is encountered, it will be
3161      automatically replaced by the VAR_DECL.  */
3162   insert_decl_map (id, p, var);
3163 
3164   /* Even if P was TREE_READONLY, the new VAR should not be.
3165      In the original code, we would have constructed a
3166      temporary, and then the function body would have never
3167      changed the value of P.  However, now, we will be
3168      constructing VAR directly.  The constructor body may
3169      change its value multiple times as it is being
3170      constructed.  Therefore, it must not be TREE_READONLY;
3171      the back-end assumes that TREE_READONLY variable is
3172      assigned to only once.  */
3173   if (TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (p)))
3174     TREE_READONLY (var) = 0;
3175 
3176   /* If there is no setup required and we are in SSA, take the easy route
3177      replacing all SSA names representing the function parameter by the
3178      SSA name passed to function.
3179 
3180      We need to construct map for the variable anyway as it might be used
3181      in different SSA names when parameter is set in function.
3182 
3183      Do replacement at -O0 for const arguments replaced by constant.
3184      This is important for builtin_constant_p and other construct requiring
3185      constant argument to be visible in inlined function body.  */
3186   if (gimple_in_ssa_p (cfun) && rhs && def && is_gimple_reg (p)
3187       && (optimize
3188           || (TREE_READONLY (p)
3189 	      && is_gimple_min_invariant (rhs)))
3190       && (TREE_CODE (rhs) == SSA_NAME
3191 	  || is_gimple_min_invariant (rhs))
3192       && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def))
3193     {
3194       insert_decl_map (id, def, rhs);
3195       return insert_init_debug_bind (id, bb, var, rhs, NULL);
3196     }
3197 
3198   /* If the value of argument is never used, don't care about initializing
3199      it.  */
3200   if (optimize && gimple_in_ssa_p (cfun) && !def && is_gimple_reg (p))
3201     {
3202       gcc_assert (!value || !TREE_SIDE_EFFECTS (value));
3203       return insert_init_debug_bind (id, bb, var, rhs, NULL);
3204     }
3205 
3206   /* Initialize this VAR_DECL from the equivalent argument.  Convert
3207      the argument to the proper type in case it was promoted.  */
3208   if (value)
3209     {
3210       if (rhs == error_mark_node)
3211 	{
3212 	  insert_decl_map (id, p, var);
3213 	  return insert_init_debug_bind (id, bb, var, rhs, NULL);
3214 	}
3215 
3216       STRIP_USELESS_TYPE_CONVERSION (rhs);
3217 
3218       /* If we are in SSA form properly remap the default definition
3219          or assign to a dummy SSA name if the parameter is unused and
3220 	 we are not optimizing.  */
3221       if (gimple_in_ssa_p (cfun) && is_gimple_reg (p))
3222 	{
3223 	  if (def)
3224 	    {
3225 	      def = remap_ssa_name (def, id);
3226 	      init_stmt = gimple_build_assign (def, rhs);
3227 	      SSA_NAME_IS_DEFAULT_DEF (def) = 0;
3228 	      set_ssa_default_def (cfun, var, NULL);
3229 	    }
3230 	  else if (!optimize)
3231 	    {
3232 	      def = make_ssa_name (var);
3233 	      init_stmt = gimple_build_assign (def, rhs);
3234 	    }
3235 	}
3236       else
3237         init_stmt = gimple_build_assign (var, rhs);
3238 
3239       if (bb && init_stmt)
3240         insert_init_stmt (id, bb, init_stmt);
3241     }
3242   return init_stmt;
3243 }
3244 
3245 /* Generate code to initialize the parameters of the function at the
3246    top of the stack in ID from the GIMPLE_CALL STMT.  */
3247 
3248 static void
initialize_inlined_parameters(copy_body_data * id,gimple * stmt,tree fn,basic_block bb)3249 initialize_inlined_parameters (copy_body_data *id, gimple *stmt,
3250 			       tree fn, basic_block bb)
3251 {
3252   tree parms;
3253   size_t i;
3254   tree p;
3255   tree vars = NULL_TREE;
3256   tree static_chain = gimple_call_chain (stmt);
3257 
3258   /* Figure out what the parameters are.  */
3259   parms = DECL_ARGUMENTS (fn);
3260 
3261   /* Loop through the parameter declarations, replacing each with an
3262      equivalent VAR_DECL, appropriately initialized.  */
3263   for (p = parms, i = 0; p; p = DECL_CHAIN (p), i++)
3264     {
3265       tree val;
3266       val = i < gimple_call_num_args (stmt) ? gimple_call_arg (stmt, i) : NULL;
3267       setup_one_parameter (id, p, val, fn, bb, &vars);
3268     }
3269   /* After remapping parameters remap their types.  This has to be done
3270      in a second loop over all parameters to appropriately remap
3271      variable sized arrays when the size is specified in a
3272      parameter following the array.  */
3273   for (p = parms, i = 0; p; p = DECL_CHAIN (p), i++)
3274     {
3275       tree *varp = id->decl_map->get (p);
3276       if (varp
3277 	  && TREE_CODE (*varp) == VAR_DECL)
3278 	{
3279 	  tree def = (gimple_in_ssa_p (cfun) && is_gimple_reg (p)
3280 		      ? ssa_default_def (id->src_cfun, p) : NULL);
3281 	  tree var = *varp;
3282 	  TREE_TYPE (var) = remap_type (TREE_TYPE (var), id);
3283 	  /* Also remap the default definition if it was remapped
3284 	     to the default definition of the parameter replacement
3285 	     by the parameter setup.  */
3286 	  if (def)
3287 	    {
3288 	      tree *defp = id->decl_map->get (def);
3289 	      if (defp
3290 		  && TREE_CODE (*defp) == SSA_NAME
3291 		  && SSA_NAME_VAR (*defp) == var)
3292 		TREE_TYPE (*defp) = TREE_TYPE (var);
3293 	    }
3294 	}
3295     }
3296 
3297   /* Initialize the static chain.  */
3298   p = DECL_STRUCT_FUNCTION (fn)->static_chain_decl;
3299   gcc_assert (fn != current_function_decl);
3300   if (p)
3301     {
3302       /* No static chain?  Seems like a bug in tree-nested.c.  */
3303       gcc_assert (static_chain);
3304 
3305       setup_one_parameter (id, p, static_chain, fn, bb, &vars);
3306     }
3307 
3308   declare_inline_vars (id->block, vars);
3309 }
3310 
3311 
3312 /* Declare a return variable to replace the RESULT_DECL for the
3313    function we are calling.  An appropriate DECL_STMT is returned.
3314    The USE_STMT is filled to contain a use of the declaration to
3315    indicate the return value of the function.
3316 
3317    RETURN_SLOT, if non-null is place where to store the result.  It
3318    is set only for CALL_EXPR_RETURN_SLOT_OPT.  MODIFY_DEST, if non-null,
3319    was the LHS of the MODIFY_EXPR to which this call is the RHS.
3320 
3321    RETURN_BOUNDS holds a destination for returned bounds.
3322 
3323    The return value is a (possibly null) value that holds the result
3324    as seen by the caller.  */
3325 
3326 static tree
declare_return_variable(copy_body_data * id,tree return_slot,tree modify_dest,tree return_bounds,basic_block entry_bb)3327 declare_return_variable (copy_body_data *id, tree return_slot, tree modify_dest,
3328 			 tree return_bounds, basic_block entry_bb)
3329 {
3330   tree callee = id->src_fn;
3331   tree result = DECL_RESULT (callee);
3332   tree callee_type = TREE_TYPE (result);
3333   tree caller_type;
3334   tree var, use;
3335 
3336   /* Handle type-mismatches in the function declaration return type
3337      vs. the call expression.  */
3338   if (modify_dest)
3339     caller_type = TREE_TYPE (modify_dest);
3340   else
3341     caller_type = TREE_TYPE (TREE_TYPE (callee));
3342 
3343   /* We don't need to do anything for functions that don't return anything.  */
3344   if (VOID_TYPE_P (callee_type))
3345     return NULL_TREE;
3346 
3347   /* If there was a return slot, then the return value is the
3348      dereferenced address of that object.  */
3349   if (return_slot)
3350     {
3351       /* The front end shouldn't have used both return_slot and
3352 	 a modify expression.  */
3353       gcc_assert (!modify_dest);
3354       if (DECL_BY_REFERENCE (result))
3355 	{
3356 	  tree return_slot_addr = build_fold_addr_expr (return_slot);
3357 	  STRIP_USELESS_TYPE_CONVERSION (return_slot_addr);
3358 
3359 	  /* We are going to construct *&return_slot and we can't do that
3360 	     for variables believed to be not addressable.
3361 
3362 	     FIXME: This check possibly can match, because values returned
3363 	     via return slot optimization are not believed to have address
3364 	     taken by alias analysis.  */
3365 	  gcc_assert (TREE_CODE (return_slot) != SSA_NAME);
3366 	  var = return_slot_addr;
3367 	}
3368       else
3369 	{
3370 	  var = return_slot;
3371 	  gcc_assert (TREE_CODE (var) != SSA_NAME);
3372 	  if (TREE_ADDRESSABLE (result))
3373 	    mark_addressable (var);
3374 	}
3375       if ((TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE
3376            || TREE_CODE (TREE_TYPE (result)) == VECTOR_TYPE)
3377 	  && !DECL_GIMPLE_REG_P (result)
3378 	  && DECL_P (var))
3379 	DECL_GIMPLE_REG_P (var) = 0;
3380       use = NULL;
3381       goto done;
3382     }
3383 
3384   /* All types requiring non-trivial constructors should have been handled.  */
3385   gcc_assert (!TREE_ADDRESSABLE (callee_type));
3386 
3387   /* Attempt to avoid creating a new temporary variable.  */
3388   if (modify_dest
3389       && TREE_CODE (modify_dest) != SSA_NAME)
3390     {
3391       bool use_it = false;
3392 
3393       /* We can't use MODIFY_DEST if there's type promotion involved.  */
3394       if (!useless_type_conversion_p (callee_type, caller_type))
3395 	use_it = false;
3396 
3397       /* ??? If we're assigning to a variable sized type, then we must
3398 	 reuse the destination variable, because we've no good way to
3399 	 create variable sized temporaries at this point.  */
3400       else if (TREE_CODE (TYPE_SIZE_UNIT (caller_type)) != INTEGER_CST)
3401 	use_it = true;
3402 
3403       /* If the callee cannot possibly modify MODIFY_DEST, then we can
3404 	 reuse it as the result of the call directly.  Don't do this if
3405 	 it would promote MODIFY_DEST to addressable.  */
3406       else if (TREE_ADDRESSABLE (result))
3407 	use_it = false;
3408       else
3409 	{
3410 	  tree base_m = get_base_address (modify_dest);
3411 
3412 	  /* If the base isn't a decl, then it's a pointer, and we don't
3413 	     know where that's going to go.  */
3414 	  if (!DECL_P (base_m))
3415 	    use_it = false;
3416 	  else if (is_global_var (base_m))
3417 	    use_it = false;
3418 	  else if ((TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE
3419 		    || TREE_CODE (TREE_TYPE (result)) == VECTOR_TYPE)
3420 		   && !DECL_GIMPLE_REG_P (result)
3421 		   && DECL_GIMPLE_REG_P (base_m))
3422 	    use_it = false;
3423 	  else if (!TREE_ADDRESSABLE (base_m))
3424 	    use_it = true;
3425 	}
3426 
3427       if (use_it)
3428 	{
3429 	  var = modify_dest;
3430 	  use = NULL;
3431 	  goto done;
3432 	}
3433     }
3434 
3435   gcc_assert (TREE_CODE (TYPE_SIZE_UNIT (callee_type)) == INTEGER_CST);
3436 
3437   var = copy_result_decl_to_var (result, id);
3438   DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
3439 
3440   /* Do not have the rest of GCC warn about this variable as it should
3441      not be visible to the user.  */
3442   TREE_NO_WARNING (var) = 1;
3443 
3444   declare_inline_vars (id->block, var);
3445 
3446   /* Build the use expr.  If the return type of the function was
3447      promoted, convert it back to the expected type.  */
3448   use = var;
3449   if (!useless_type_conversion_p (caller_type, TREE_TYPE (var)))
3450     {
3451       /* If we can match up types by promotion/demotion do so.  */
3452       if (fold_convertible_p (caller_type, var))
3453 	use = fold_convert (caller_type, var);
3454       else
3455 	{
3456 	  /* ???  For valid programs we should not end up here.
3457 	     Still if we end up with truly mismatched types here, fall back
3458 	     to using a MEM_REF to not leak invalid GIMPLE to the following
3459 	     passes.  */
3460 	  /* Prevent var from being written into SSA form.  */
3461 	  if (TREE_CODE (TREE_TYPE (var)) == VECTOR_TYPE
3462 	      || TREE_CODE (TREE_TYPE (var)) == COMPLEX_TYPE)
3463 	    DECL_GIMPLE_REG_P (var) = false;
3464 	  else if (is_gimple_reg_type (TREE_TYPE (var)))
3465 	    TREE_ADDRESSABLE (var) = true;
3466 	  use = fold_build2 (MEM_REF, caller_type,
3467 			     build_fold_addr_expr (var),
3468 			     build_int_cst (ptr_type_node, 0));
3469 	}
3470     }
3471 
3472   STRIP_USELESS_TYPE_CONVERSION (use);
3473 
3474   if (DECL_BY_REFERENCE (result))
3475     {
3476       TREE_ADDRESSABLE (var) = 1;
3477       var = build_fold_addr_expr (var);
3478     }
3479 
3480  done:
3481   /* Register the VAR_DECL as the equivalent for the RESULT_DECL; that
3482      way, when the RESULT_DECL is encountered, it will be
3483      automatically replaced by the VAR_DECL.
3484 
3485      When returning by reference, ensure that RESULT_DECL remaps to
3486      gimple_val.  */
3487   if (DECL_BY_REFERENCE (result)
3488       && !is_gimple_val (var))
3489     {
3490       tree temp = create_tmp_var (TREE_TYPE (result), "retvalptr");
3491       insert_decl_map (id, result, temp);
3492       /* When RESULT_DECL is in SSA form, we need to remap and initialize
3493 	 it's default_def SSA_NAME.  */
3494       if (gimple_in_ssa_p (id->src_cfun)
3495 	  && is_gimple_reg (result))
3496 	{
3497 	  temp = make_ssa_name (temp);
3498 	  insert_decl_map (id, ssa_default_def (id->src_cfun, result), temp);
3499 	}
3500       insert_init_stmt (id, entry_bb, gimple_build_assign (temp, var));
3501     }
3502   else
3503     insert_decl_map (id, result, var);
3504 
3505   /* Remember this so we can ignore it in remap_decls.  */
3506   id->retvar = var;
3507 
3508   /* If returned bounds are used, then make var for them.  */
3509   if (return_bounds)
3510   {
3511     tree bndtemp = create_tmp_var (pointer_bounds_type_node, "retbnd");
3512     DECL_SEEN_IN_BIND_EXPR_P (bndtemp) = 1;
3513     TREE_NO_WARNING (bndtemp) = 1;
3514     declare_inline_vars (id->block, bndtemp);
3515 
3516     id->retbnd = bndtemp;
3517     insert_init_stmt (id, entry_bb,
3518 		      gimple_build_assign (bndtemp, chkp_get_zero_bounds_var ()));
3519   }
3520 
3521   return use;
3522 }
3523 
3524 /* Determine if the function can be copied.  If so return NULL.  If
3525    not return a string describng the reason for failure.  */
3526 
3527 const char *
copy_forbidden(struct function * fun)3528 copy_forbidden (struct function *fun)
3529 {
3530   const char *reason = fun->cannot_be_copied_reason;
3531 
3532   /* Only examine the function once.  */
3533   if (fun->cannot_be_copied_set)
3534     return reason;
3535 
3536   /* We cannot copy a function that receives a non-local goto
3537      because we cannot remap the destination label used in the
3538      function that is performing the non-local goto.  */
3539   /* ??? Actually, this should be possible, if we work at it.
3540      No doubt there's just a handful of places that simply
3541      assume it doesn't happen and don't substitute properly.  */
3542   if (fun->has_nonlocal_label)
3543     {
3544       reason = G_("function %q+F can never be copied "
3545 		  "because it receives a non-local goto");
3546       goto fail;
3547     }
3548 
3549   if (fun->has_forced_label_in_static)
3550     {
3551       reason = G_("function %q+F can never be copied because it saves "
3552 		  "address of local label in a static variable");
3553       goto fail;
3554     }
3555 
3556  fail:
3557   fun->cannot_be_copied_reason = reason;
3558   fun->cannot_be_copied_set = true;
3559   return reason;
3560 }
3561 
3562 
3563 static const char *inline_forbidden_reason;
3564 
3565 /* A callback for walk_gimple_seq to handle statements.  Returns non-null
3566    iff a function can not be inlined.  Also sets the reason why. */
3567 
3568 static tree
inline_forbidden_p_stmt(gimple_stmt_iterator * gsi,bool * handled_ops_p,struct walk_stmt_info * wip)3569 inline_forbidden_p_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
3570 			 struct walk_stmt_info *wip)
3571 {
3572   tree fn = (tree) wip->info;
3573   tree t;
3574   gimple *stmt = gsi_stmt (*gsi);
3575 
3576   switch (gimple_code (stmt))
3577     {
3578     case GIMPLE_CALL:
3579       /* Refuse to inline alloca call unless user explicitly forced so as
3580 	 this may change program's memory overhead drastically when the
3581 	 function using alloca is called in loop.  In GCC present in
3582 	 SPEC2000 inlining into schedule_block cause it to require 2GB of
3583 	 RAM instead of 256MB.  Don't do so for alloca calls emitted for
3584 	 VLA objects as those can't cause unbounded growth (they're always
3585 	 wrapped inside stack_save/stack_restore regions.  */
3586       if (gimple_alloca_call_p (stmt)
3587 	  && !gimple_call_alloca_for_var_p (as_a <gcall *> (stmt))
3588 	  && !lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn)))
3589 	{
3590 	  inline_forbidden_reason
3591 	    = G_("function %q+F can never be inlined because it uses "
3592 		 "alloca (override using the always_inline attribute)");
3593 	  *handled_ops_p = true;
3594 	  return fn;
3595 	}
3596 
3597       t = gimple_call_fndecl (stmt);
3598       if (t == NULL_TREE)
3599 	break;
3600 
3601       /* We cannot inline functions that call setjmp.  */
3602       if (setjmp_call_p (t))
3603 	{
3604 	  inline_forbidden_reason
3605 	    = G_("function %q+F can never be inlined because it uses setjmp");
3606 	  *handled_ops_p = true;
3607 	  return t;
3608 	}
3609 
3610       if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL)
3611 	switch (DECL_FUNCTION_CODE (t))
3612 	  {
3613 	    /* We cannot inline functions that take a variable number of
3614 	       arguments.  */
3615 	  case BUILT_IN_VA_START:
3616 	  case BUILT_IN_NEXT_ARG:
3617 	  case BUILT_IN_VA_END:
3618 	    inline_forbidden_reason
3619 	      = G_("function %q+F can never be inlined because it "
3620 		   "uses variable argument lists");
3621 	    *handled_ops_p = true;
3622 	    return t;
3623 
3624 	  case BUILT_IN_LONGJMP:
3625 	    /* We can't inline functions that call __builtin_longjmp at
3626 	       all.  The non-local goto machinery really requires the
3627 	       destination be in a different function.  If we allow the
3628 	       function calling __builtin_longjmp to be inlined into the
3629 	       function calling __builtin_setjmp, Things will Go Awry.  */
3630 	    inline_forbidden_reason
3631 	      = G_("function %q+F can never be inlined because "
3632 		   "it uses setjmp-longjmp exception handling");
3633 	    *handled_ops_p = true;
3634 	    return t;
3635 
3636 	  case BUILT_IN_NONLOCAL_GOTO:
3637 	    /* Similarly.  */
3638 	    inline_forbidden_reason
3639 	      = G_("function %q+F can never be inlined because "
3640 		   "it uses non-local goto");
3641 	    *handled_ops_p = true;
3642 	    return t;
3643 
3644 	  case BUILT_IN_RETURN:
3645 	  case BUILT_IN_APPLY_ARGS:
3646 	    /* If a __builtin_apply_args caller would be inlined,
3647 	       it would be saving arguments of the function it has
3648 	       been inlined into.  Similarly __builtin_return would
3649 	       return from the function the inline has been inlined into.  */
3650 	    inline_forbidden_reason
3651 	      = G_("function %q+F can never be inlined because "
3652 		   "it uses __builtin_return or __builtin_apply_args");
3653 	    *handled_ops_p = true;
3654 	    return t;
3655 
3656 	  default:
3657 	    break;
3658 	  }
3659       break;
3660 
3661     case GIMPLE_GOTO:
3662       t = gimple_goto_dest (stmt);
3663 
3664       /* We will not inline a function which uses computed goto.  The
3665 	 addresses of its local labels, which may be tucked into
3666 	 global storage, are of course not constant across
3667 	 instantiations, which causes unexpected behavior.  */
3668       if (TREE_CODE (t) != LABEL_DECL)
3669 	{
3670 	  inline_forbidden_reason
3671 	    = G_("function %q+F can never be inlined "
3672 		 "because it contains a computed goto");
3673 	  *handled_ops_p = true;
3674 	  return t;
3675 	}
3676       break;
3677 
3678     default:
3679       break;
3680     }
3681 
3682   *handled_ops_p = false;
3683   return NULL_TREE;
3684 }
3685 
3686 /* Return true if FNDECL is a function that cannot be inlined into
3687    another one.  */
3688 
3689 static bool
inline_forbidden_p(tree fndecl)3690 inline_forbidden_p (tree fndecl)
3691 {
3692   struct function *fun = DECL_STRUCT_FUNCTION (fndecl);
3693   struct walk_stmt_info wi;
3694   basic_block bb;
3695   bool forbidden_p = false;
3696 
3697   /* First check for shared reasons not to copy the code.  */
3698   inline_forbidden_reason = copy_forbidden (fun);
3699   if (inline_forbidden_reason != NULL)
3700     return true;
3701 
3702   /* Next, walk the statements of the function looking for
3703      constraucts we can't handle, or are non-optimal for inlining.  */
3704   hash_set<tree> visited_nodes;
3705   memset (&wi, 0, sizeof (wi));
3706   wi.info = (void *) fndecl;
3707   wi.pset = &visited_nodes;
3708 
3709   FOR_EACH_BB_FN (bb, fun)
3710     {
3711       gimple *ret;
3712       gimple_seq seq = bb_seq (bb);
3713       ret = walk_gimple_seq (seq, inline_forbidden_p_stmt, NULL, &wi);
3714       forbidden_p = (ret != NULL);
3715       if (forbidden_p)
3716 	break;
3717     }
3718 
3719   return forbidden_p;
3720 }
3721 
3722 /* Return false if the function FNDECL cannot be inlined on account of its
3723    attributes, true otherwise.  */
3724 static bool
function_attribute_inlinable_p(const_tree fndecl)3725 function_attribute_inlinable_p (const_tree fndecl)
3726 {
3727   if (targetm.attribute_table)
3728     {
3729       const_tree a;
3730 
3731       for (a = DECL_ATTRIBUTES (fndecl); a; a = TREE_CHAIN (a))
3732 	{
3733 	  const_tree name = TREE_PURPOSE (a);
3734 	  int i;
3735 
3736 	  for (i = 0; targetm.attribute_table[i].name != NULL; i++)
3737 	    if (is_attribute_p (targetm.attribute_table[i].name, name))
3738 	      return targetm.function_attribute_inlinable_p (fndecl);
3739 	}
3740     }
3741 
3742   return true;
3743 }
3744 
3745 /* Returns nonzero if FN is a function that does not have any
3746    fundamental inline blocking properties.  */
3747 
3748 bool
tree_inlinable_function_p(tree fn)3749 tree_inlinable_function_p (tree fn)
3750 {
3751   bool inlinable = true;
3752   bool do_warning;
3753   tree always_inline;
3754 
3755   /* If we've already decided this function shouldn't be inlined,
3756      there's no need to check again.  */
3757   if (DECL_UNINLINABLE (fn))
3758     return false;
3759 
3760   /* We only warn for functions declared `inline' by the user.  */
3761   do_warning = (warn_inline
3762 		&& DECL_DECLARED_INLINE_P (fn)
3763 		&& !DECL_NO_INLINE_WARNING_P (fn)
3764 		&& !DECL_IN_SYSTEM_HEADER (fn));
3765 
3766   always_inline = lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn));
3767 
3768   if (flag_no_inline
3769       && always_inline == NULL)
3770     {
3771       if (do_warning)
3772         warning (OPT_Winline, "function %q+F can never be inlined because it "
3773                  "is suppressed using -fno-inline", fn);
3774       inlinable = false;
3775     }
3776 
3777   else if (!function_attribute_inlinable_p (fn))
3778     {
3779       if (do_warning)
3780         warning (OPT_Winline, "function %q+F can never be inlined because it "
3781                  "uses attributes conflicting with inlining", fn);
3782       inlinable = false;
3783     }
3784 
3785   else if (inline_forbidden_p (fn))
3786     {
3787       /* See if we should warn about uninlinable functions.  Previously,
3788 	 some of these warnings would be issued while trying to expand
3789 	 the function inline, but that would cause multiple warnings
3790 	 about functions that would for example call alloca.  But since
3791 	 this a property of the function, just one warning is enough.
3792 	 As a bonus we can now give more details about the reason why a
3793 	 function is not inlinable.  */
3794       if (always_inline)
3795 	error (inline_forbidden_reason, fn);
3796       else if (do_warning)
3797 	warning (OPT_Winline, inline_forbidden_reason, fn);
3798 
3799       inlinable = false;
3800     }
3801 
3802   /* Squirrel away the result so that we don't have to check again.  */
3803   DECL_UNINLINABLE (fn) = !inlinable;
3804 
3805   return inlinable;
3806 }
3807 
3808 /* Estimate the cost of a memory move of type TYPE.  Use machine dependent
3809    word size and take possible memcpy call into account and return
3810    cost based on whether optimizing for size or speed according to SPEED_P.  */
3811 
3812 int
estimate_move_cost(tree type,bool ARG_UNUSED (speed_p))3813 estimate_move_cost (tree type, bool ARG_UNUSED (speed_p))
3814 {
3815   HOST_WIDE_INT size;
3816 
3817   gcc_assert (!VOID_TYPE_P (type));
3818 
3819   if (TREE_CODE (type) == VECTOR_TYPE)
3820     {
3821       machine_mode inner = TYPE_MODE (TREE_TYPE (type));
3822       machine_mode simd
3823 	= targetm.vectorize.preferred_simd_mode (inner);
3824       int simd_mode_size = GET_MODE_SIZE (simd);
3825       return ((GET_MODE_SIZE (TYPE_MODE (type)) + simd_mode_size - 1)
3826 	      / simd_mode_size);
3827     }
3828 
3829   size = int_size_in_bytes (type);
3830 
3831   if (size < 0 || size > MOVE_MAX_PIECES * MOVE_RATIO (speed_p))
3832     /* Cost of a memcpy call, 3 arguments and the call.  */
3833     return 4;
3834   else
3835     return ((size + MOVE_MAX_PIECES - 1) / MOVE_MAX_PIECES);
3836 }
3837 
3838 /* Returns cost of operation CODE, according to WEIGHTS  */
3839 
3840 static int
estimate_operator_cost(enum tree_code code,eni_weights * weights,tree op1 ATTRIBUTE_UNUSED,tree op2)3841 estimate_operator_cost (enum tree_code code, eni_weights *weights,
3842 			tree op1 ATTRIBUTE_UNUSED, tree op2)
3843 {
3844   switch (code)
3845     {
3846     /* These are "free" conversions, or their presumed cost
3847        is folded into other operations.  */
3848     case RANGE_EXPR:
3849     CASE_CONVERT:
3850     case COMPLEX_EXPR:
3851     case PAREN_EXPR:
3852     case VIEW_CONVERT_EXPR:
3853       return 0;
3854 
3855     /* Assign cost of 1 to usual operations.
3856        ??? We may consider mapping RTL costs to this.  */
3857     case COND_EXPR:
3858     case VEC_COND_EXPR:
3859     case VEC_PERM_EXPR:
3860 
3861     case PLUS_EXPR:
3862     case POINTER_PLUS_EXPR:
3863     case MINUS_EXPR:
3864     case MULT_EXPR:
3865     case MULT_HIGHPART_EXPR:
3866     case FMA_EXPR:
3867 
3868     case ADDR_SPACE_CONVERT_EXPR:
3869     case FIXED_CONVERT_EXPR:
3870     case FIX_TRUNC_EXPR:
3871 
3872     case NEGATE_EXPR:
3873     case FLOAT_EXPR:
3874     case MIN_EXPR:
3875     case MAX_EXPR:
3876     case ABS_EXPR:
3877 
3878     case LSHIFT_EXPR:
3879     case RSHIFT_EXPR:
3880     case LROTATE_EXPR:
3881     case RROTATE_EXPR:
3882 
3883     case BIT_IOR_EXPR:
3884     case BIT_XOR_EXPR:
3885     case BIT_AND_EXPR:
3886     case BIT_NOT_EXPR:
3887 
3888     case TRUTH_ANDIF_EXPR:
3889     case TRUTH_ORIF_EXPR:
3890     case TRUTH_AND_EXPR:
3891     case TRUTH_OR_EXPR:
3892     case TRUTH_XOR_EXPR:
3893     case TRUTH_NOT_EXPR:
3894 
3895     case LT_EXPR:
3896     case LE_EXPR:
3897     case GT_EXPR:
3898     case GE_EXPR:
3899     case EQ_EXPR:
3900     case NE_EXPR:
3901     case ORDERED_EXPR:
3902     case UNORDERED_EXPR:
3903 
3904     case UNLT_EXPR:
3905     case UNLE_EXPR:
3906     case UNGT_EXPR:
3907     case UNGE_EXPR:
3908     case UNEQ_EXPR:
3909     case LTGT_EXPR:
3910 
3911     case CONJ_EXPR:
3912 
3913     case PREDECREMENT_EXPR:
3914     case PREINCREMENT_EXPR:
3915     case POSTDECREMENT_EXPR:
3916     case POSTINCREMENT_EXPR:
3917 
3918     case REALIGN_LOAD_EXPR:
3919 
3920     case REDUC_MAX_EXPR:
3921     case REDUC_MIN_EXPR:
3922     case REDUC_PLUS_EXPR:
3923     case WIDEN_SUM_EXPR:
3924     case WIDEN_MULT_EXPR:
3925     case DOT_PROD_EXPR:
3926     case SAD_EXPR:
3927     case WIDEN_MULT_PLUS_EXPR:
3928     case WIDEN_MULT_MINUS_EXPR:
3929     case WIDEN_LSHIFT_EXPR:
3930 
3931     case VEC_WIDEN_MULT_HI_EXPR:
3932     case VEC_WIDEN_MULT_LO_EXPR:
3933     case VEC_WIDEN_MULT_EVEN_EXPR:
3934     case VEC_WIDEN_MULT_ODD_EXPR:
3935     case VEC_UNPACK_HI_EXPR:
3936     case VEC_UNPACK_LO_EXPR:
3937     case VEC_UNPACK_FLOAT_HI_EXPR:
3938     case VEC_UNPACK_FLOAT_LO_EXPR:
3939     case VEC_PACK_TRUNC_EXPR:
3940     case VEC_PACK_SAT_EXPR:
3941     case VEC_PACK_FIX_TRUNC_EXPR:
3942     case VEC_WIDEN_LSHIFT_HI_EXPR:
3943     case VEC_WIDEN_LSHIFT_LO_EXPR:
3944 
3945       return 1;
3946 
3947     /* Few special cases of expensive operations.  This is useful
3948        to avoid inlining on functions having too many of these.  */
3949     case TRUNC_DIV_EXPR:
3950     case CEIL_DIV_EXPR:
3951     case FLOOR_DIV_EXPR:
3952     case ROUND_DIV_EXPR:
3953     case EXACT_DIV_EXPR:
3954     case TRUNC_MOD_EXPR:
3955     case CEIL_MOD_EXPR:
3956     case FLOOR_MOD_EXPR:
3957     case ROUND_MOD_EXPR:
3958     case RDIV_EXPR:
3959       if (TREE_CODE (op2) != INTEGER_CST)
3960         return weights->div_mod_cost;
3961       return 1;
3962 
3963     default:
3964       /* We expect a copy assignment with no operator.  */
3965       gcc_assert (get_gimple_rhs_class (code) == GIMPLE_SINGLE_RHS);
3966       return 0;
3967     }
3968 }
3969 
3970 
3971 /* Estimate number of instructions that will be created by expanding
3972    the statements in the statement sequence STMTS.
3973    WEIGHTS contains weights attributed to various constructs.  */
3974 
3975 int
estimate_num_insns_seq(gimple_seq stmts,eni_weights * weights)3976 estimate_num_insns_seq (gimple_seq stmts, eni_weights *weights)
3977 {
3978   int cost;
3979   gimple_stmt_iterator gsi;
3980 
3981   cost = 0;
3982   for (gsi = gsi_start (stmts); !gsi_end_p (gsi); gsi_next (&gsi))
3983     cost += estimate_num_insns (gsi_stmt (gsi), weights);
3984 
3985   return cost;
3986 }
3987 
3988 
3989 /* Estimate number of instructions that will be created by expanding STMT.
3990    WEIGHTS contains weights attributed to various constructs.  */
3991 
3992 int
estimate_num_insns(gimple * stmt,eni_weights * weights)3993 estimate_num_insns (gimple *stmt, eni_weights *weights)
3994 {
3995   unsigned cost, i;
3996   enum gimple_code code = gimple_code (stmt);
3997   tree lhs;
3998   tree rhs;
3999 
4000   switch (code)
4001     {
4002     case GIMPLE_ASSIGN:
4003       /* Try to estimate the cost of assignments.  We have three cases to
4004 	 deal with:
4005 	 1) Simple assignments to registers;
4006 	 2) Stores to things that must live in memory.  This includes
4007 	    "normal" stores to scalars, but also assignments of large
4008 	    structures, or constructors of big arrays;
4009 
4010 	 Let us look at the first two cases, assuming we have "a = b + C":
4011 	 <GIMPLE_ASSIGN <var_decl "a">
4012 	        <plus_expr <var_decl "b"> <constant C>>
4013 	 If "a" is a GIMPLE register, the assignment to it is free on almost
4014 	 any target, because "a" usually ends up in a real register.  Hence
4015 	 the only cost of this expression comes from the PLUS_EXPR, and we
4016 	 can ignore the GIMPLE_ASSIGN.
4017 	 If "a" is not a GIMPLE register, the assignment to "a" will most
4018 	 likely be a real store, so the cost of the GIMPLE_ASSIGN is the cost
4019 	 of moving something into "a", which we compute using the function
4020 	 estimate_move_cost.  */
4021       if (gimple_clobber_p (stmt))
4022 	return 0;	/* ={v} {CLOBBER} stmt expands to nothing.  */
4023 
4024       lhs = gimple_assign_lhs (stmt);
4025       rhs = gimple_assign_rhs1 (stmt);
4026 
4027       cost = 0;
4028 
4029       /* Account for the cost of moving to / from memory.  */
4030       if (gimple_store_p (stmt))
4031 	cost += estimate_move_cost (TREE_TYPE (lhs), weights->time_based);
4032       if (gimple_assign_load_p (stmt))
4033 	cost += estimate_move_cost (TREE_TYPE (rhs), weights->time_based);
4034 
4035       cost += estimate_operator_cost (gimple_assign_rhs_code (stmt), weights,
4036       				      gimple_assign_rhs1 (stmt),
4037 				      get_gimple_rhs_class (gimple_assign_rhs_code (stmt))
4038 				      == GIMPLE_BINARY_RHS
4039 				      ? gimple_assign_rhs2 (stmt) : NULL);
4040       break;
4041 
4042     case GIMPLE_COND:
4043       cost = 1 + estimate_operator_cost (gimple_cond_code (stmt), weights,
4044       				         gimple_op (stmt, 0),
4045 				         gimple_op (stmt, 1));
4046       break;
4047 
4048     case GIMPLE_SWITCH:
4049       {
4050 	gswitch *switch_stmt = as_a <gswitch *> (stmt);
4051 	/* Take into account cost of the switch + guess 2 conditional jumps for
4052 	   each case label.
4053 
4054 	   TODO: once the switch expansion logic is sufficiently separated, we can
4055 	   do better job on estimating cost of the switch.  */
4056 	if (weights->time_based)
4057 	  cost = floor_log2 (gimple_switch_num_labels (switch_stmt)) * 2;
4058 	else
4059 	  cost = gimple_switch_num_labels (switch_stmt) * 2;
4060       }
4061       break;
4062 
4063     case GIMPLE_CALL:
4064       {
4065 	tree decl;
4066 
4067 	if (gimple_call_internal_p (stmt))
4068 	  return 0;
4069 	else if ((decl = gimple_call_fndecl (stmt))
4070 		 && DECL_BUILT_IN (decl))
4071 	  {
4072 	    /* Do not special case builtins where we see the body.
4073 	       This just confuse inliner.  */
4074 	    struct cgraph_node *node;
4075 	    if (!(node = cgraph_node::get (decl))
4076 		|| node->definition)
4077 	      ;
4078 	    /* For buitins that are likely expanded to nothing or
4079 	       inlined do not account operand costs.  */
4080 	    else if (is_simple_builtin (decl))
4081 	      return 0;
4082 	    else if (is_inexpensive_builtin (decl))
4083 	      return weights->target_builtin_call_cost;
4084 	    else if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
4085 	      {
4086 		/* We canonicalize x * x to pow (x, 2.0) with -ffast-math, so
4087 		   specialize the cheap expansion we do here.
4088 		   ???  This asks for a more general solution.  */
4089 		switch (DECL_FUNCTION_CODE (decl))
4090 		  {
4091 		    case BUILT_IN_POW:
4092 		    case BUILT_IN_POWF:
4093 		    case BUILT_IN_POWL:
4094 		      if (TREE_CODE (gimple_call_arg (stmt, 1)) == REAL_CST
4095 			  && (real_equal
4096 			      (&TREE_REAL_CST (gimple_call_arg (stmt, 1)),
4097 			       &dconst2)))
4098 			return estimate_operator_cost
4099 			    (MULT_EXPR, weights, gimple_call_arg (stmt, 0),
4100 			     gimple_call_arg (stmt, 0));
4101 		      break;
4102 
4103 		    default:
4104 		      break;
4105 		  }
4106 	      }
4107 	  }
4108 
4109 	cost = decl ? weights->call_cost : weights->indirect_call_cost;
4110 	if (gimple_call_lhs (stmt))
4111 	  cost += estimate_move_cost (TREE_TYPE (gimple_call_lhs (stmt)),
4112 				      weights->time_based);
4113 	for (i = 0; i < gimple_call_num_args (stmt); i++)
4114 	  {
4115 	    tree arg = gimple_call_arg (stmt, i);
4116 	    cost += estimate_move_cost (TREE_TYPE (arg),
4117 					weights->time_based);
4118 	  }
4119 	break;
4120       }
4121 
4122     case GIMPLE_RETURN:
4123       return weights->return_cost;
4124 
4125     case GIMPLE_GOTO:
4126     case GIMPLE_LABEL:
4127     case GIMPLE_NOP:
4128     case GIMPLE_PHI:
4129     case GIMPLE_PREDICT:
4130     case GIMPLE_DEBUG:
4131       return 0;
4132 
4133     case GIMPLE_ASM:
4134       {
4135 	int count = asm_str_count (gimple_asm_string (as_a <gasm *> (stmt)));
4136 	/* 1000 means infinity. This avoids overflows later
4137 	   with very long asm statements.  */
4138 	if (count > 1000)
4139 	  count = 1000;
4140 	return count;
4141       }
4142 
4143     case GIMPLE_RESX:
4144       /* This is either going to be an external function call with one
4145 	 argument, or two register copy statements plus a goto.  */
4146       return 2;
4147 
4148     case GIMPLE_EH_DISPATCH:
4149       /* ??? This is going to turn into a switch statement.  Ideally
4150 	 we'd have a look at the eh region and estimate the number of
4151 	 edges involved.  */
4152       return 10;
4153 
4154     case GIMPLE_BIND:
4155       return estimate_num_insns_seq (
4156 	       gimple_bind_body (as_a <gbind *> (stmt)),
4157 	       weights);
4158 
4159     case GIMPLE_EH_FILTER:
4160       return estimate_num_insns_seq (gimple_eh_filter_failure (stmt), weights);
4161 
4162     case GIMPLE_CATCH:
4163       return estimate_num_insns_seq (gimple_catch_handler (
4164 				       as_a <gcatch *> (stmt)),
4165 				     weights);
4166 
4167     case GIMPLE_TRY:
4168       return (estimate_num_insns_seq (gimple_try_eval (stmt), weights)
4169               + estimate_num_insns_seq (gimple_try_cleanup (stmt), weights));
4170 
4171     /* OMP directives are generally very expensive.  */
4172 
4173     case GIMPLE_OMP_RETURN:
4174     case GIMPLE_OMP_SECTIONS_SWITCH:
4175     case GIMPLE_OMP_ATOMIC_STORE:
4176     case GIMPLE_OMP_CONTINUE:
4177       /* ...except these, which are cheap.  */
4178       return 0;
4179 
4180     case GIMPLE_OMP_ATOMIC_LOAD:
4181       return weights->omp_cost;
4182 
4183     case GIMPLE_OMP_FOR:
4184       return (weights->omp_cost
4185               + estimate_num_insns_seq (gimple_omp_body (stmt), weights)
4186               + estimate_num_insns_seq (gimple_omp_for_pre_body (stmt), weights));
4187 
4188     case GIMPLE_OMP_PARALLEL:
4189     case GIMPLE_OMP_TASK:
4190     case GIMPLE_OMP_CRITICAL:
4191     case GIMPLE_OMP_MASTER:
4192     case GIMPLE_OMP_TASKGROUP:
4193     case GIMPLE_OMP_ORDERED:
4194     case GIMPLE_OMP_SECTION:
4195     case GIMPLE_OMP_SECTIONS:
4196     case GIMPLE_OMP_SINGLE:
4197     case GIMPLE_OMP_TARGET:
4198     case GIMPLE_OMP_TEAMS:
4199       return (weights->omp_cost
4200               + estimate_num_insns_seq (gimple_omp_body (stmt), weights));
4201 
4202     case GIMPLE_TRANSACTION:
4203       return (weights->tm_cost
4204 	      + estimate_num_insns_seq (gimple_transaction_body (
4205 					  as_a <gtransaction *> (stmt)),
4206 					weights));
4207 
4208     default:
4209       gcc_unreachable ();
4210     }
4211 
4212   return cost;
4213 }
4214 
4215 /* Estimate number of instructions that will be created by expanding
4216    function FNDECL.  WEIGHTS contains weights attributed to various
4217    constructs.  */
4218 
4219 int
estimate_num_insns_fn(tree fndecl,eni_weights * weights)4220 estimate_num_insns_fn (tree fndecl, eni_weights *weights)
4221 {
4222   struct function *my_function = DECL_STRUCT_FUNCTION (fndecl);
4223   gimple_stmt_iterator bsi;
4224   basic_block bb;
4225   int n = 0;
4226 
4227   gcc_assert (my_function && my_function->cfg);
4228   FOR_EACH_BB_FN (bb, my_function)
4229     {
4230       for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi))
4231 	n += estimate_num_insns (gsi_stmt (bsi), weights);
4232     }
4233 
4234   return n;
4235 }
4236 
4237 
4238 /* Initializes weights used by estimate_num_insns.  */
4239 
4240 void
init_inline_once(void)4241 init_inline_once (void)
4242 {
4243   eni_size_weights.call_cost = 1;
4244   eni_size_weights.indirect_call_cost = 3;
4245   eni_size_weights.target_builtin_call_cost = 1;
4246   eni_size_weights.div_mod_cost = 1;
4247   eni_size_weights.omp_cost = 40;
4248   eni_size_weights.tm_cost = 10;
4249   eni_size_weights.time_based = false;
4250   eni_size_weights.return_cost = 1;
4251 
4252   /* Estimating time for call is difficult, since we have no idea what the
4253      called function does.  In the current uses of eni_time_weights,
4254      underestimating the cost does less harm than overestimating it, so
4255      we choose a rather small value here.  */
4256   eni_time_weights.call_cost = 10;
4257   eni_time_weights.indirect_call_cost = 15;
4258   eni_time_weights.target_builtin_call_cost = 1;
4259   eni_time_weights.div_mod_cost = 10;
4260   eni_time_weights.omp_cost = 40;
4261   eni_time_weights.tm_cost = 40;
4262   eni_time_weights.time_based = true;
4263   eni_time_weights.return_cost = 2;
4264 }
4265 
4266 
4267 /* Install new lexical TREE_BLOCK underneath 'current_block'.  */
4268 
4269 static void
prepend_lexical_block(tree current_block,tree new_block)4270 prepend_lexical_block (tree current_block, tree new_block)
4271 {
4272   BLOCK_CHAIN (new_block) = BLOCK_SUBBLOCKS (current_block);
4273   BLOCK_SUBBLOCKS (current_block) = new_block;
4274   BLOCK_SUPERCONTEXT (new_block) = current_block;
4275 }
4276 
4277 /* Add local variables from CALLEE to CALLER.  */
4278 
4279 static inline void
add_local_variables(struct function * callee,struct function * caller,copy_body_data * id)4280 add_local_variables (struct function *callee, struct function *caller,
4281 		     copy_body_data *id)
4282 {
4283   tree var;
4284   unsigned ix;
4285 
4286   FOR_EACH_LOCAL_DECL (callee, ix, var)
4287     if (!can_be_nonlocal (var, id))
4288       {
4289         tree new_var = remap_decl (var, id);
4290 
4291         /* Remap debug-expressions.  */
4292 	if (TREE_CODE (new_var) == VAR_DECL
4293 	    && DECL_HAS_DEBUG_EXPR_P (var)
4294 	    && new_var != var)
4295 	  {
4296 	    tree tem = DECL_DEBUG_EXPR (var);
4297 	    bool old_regimplify = id->regimplify;
4298 	    id->remapping_type_depth++;
4299 	    walk_tree (&tem, copy_tree_body_r, id, NULL);
4300 	    id->remapping_type_depth--;
4301 	    id->regimplify = old_regimplify;
4302 	    SET_DECL_DEBUG_EXPR (new_var, tem);
4303 	    DECL_HAS_DEBUG_EXPR_P (new_var) = 1;
4304 	  }
4305 	add_local_decl (caller, new_var);
4306       }
4307 }
4308 
4309 /* Add to BINDINGS a debug stmt resetting SRCVAR if inlining might
4310    have brought in or introduced any debug stmts for SRCVAR.  */
4311 
4312 static inline void
reset_debug_binding(copy_body_data * id,tree srcvar,gimple_seq * bindings)4313 reset_debug_binding (copy_body_data *id, tree srcvar, gimple_seq *bindings)
4314 {
4315   tree *remappedvarp = id->decl_map->get (srcvar);
4316 
4317   if (!remappedvarp)
4318     return;
4319 
4320   if (TREE_CODE (*remappedvarp) != VAR_DECL)
4321     return;
4322 
4323   if (*remappedvarp == id->retvar || *remappedvarp == id->retbnd)
4324     return;
4325 
4326   tree tvar = target_for_debug_bind (*remappedvarp);
4327   if (!tvar)
4328     return;
4329 
4330   gdebug *stmt = gimple_build_debug_bind (tvar, NULL_TREE,
4331 					  id->call_stmt);
4332   gimple_seq_add_stmt (bindings, stmt);
4333 }
4334 
4335 /* For each inlined variable for which we may have debug bind stmts,
4336    add before GSI a final debug stmt resetting it, marking the end of
4337    its life, so that var-tracking knows it doesn't have to compute
4338    further locations for it.  */
4339 
4340 static inline void
reset_debug_bindings(copy_body_data * id,gimple_stmt_iterator gsi)4341 reset_debug_bindings (copy_body_data *id, gimple_stmt_iterator gsi)
4342 {
4343   tree var;
4344   unsigned ix;
4345   gimple_seq bindings = NULL;
4346 
4347   if (!gimple_in_ssa_p (id->src_cfun))
4348     return;
4349 
4350   if (!opt_for_fn (id->dst_fn, flag_var_tracking_assignments))
4351     return;
4352 
4353   for (var = DECL_ARGUMENTS (id->src_fn);
4354        var; var = DECL_CHAIN (var))
4355     reset_debug_binding (id, var, &bindings);
4356 
4357   FOR_EACH_LOCAL_DECL (id->src_cfun, ix, var)
4358     reset_debug_binding (id, var, &bindings);
4359 
4360   gsi_insert_seq_before_without_update (&gsi, bindings, GSI_SAME_STMT);
4361 }
4362 
4363 /* If STMT is a GIMPLE_CALL, replace it with its inline expansion.  */
4364 
4365 static bool
expand_call_inline(basic_block bb,gimple * stmt,copy_body_data * id)4366 expand_call_inline (basic_block bb, gimple *stmt, copy_body_data *id)
4367 {
4368   tree use_retvar;
4369   tree fn;
4370   hash_map<tree, tree> *dst;
4371   hash_map<tree, tree> *st = NULL;
4372   tree return_slot;
4373   tree modify_dest;
4374   tree return_bounds = NULL;
4375   struct cgraph_edge *cg_edge;
4376   cgraph_inline_failed_t reason;
4377   basic_block return_block;
4378   edge e;
4379   gimple_stmt_iterator gsi, stmt_gsi;
4380   bool successfully_inlined = false;
4381   bool purge_dead_abnormal_edges;
4382   gcall *call_stmt;
4383   unsigned int i;
4384 
4385   /* The gimplifier uses input_location in too many places, such as
4386      internal_get_tmp_var ().  */
4387   location_t saved_location = input_location;
4388   input_location = gimple_location (stmt);
4389 
4390   /* From here on, we're only interested in CALL_EXPRs.  */
4391   call_stmt = dyn_cast <gcall *> (stmt);
4392   if (!call_stmt)
4393     goto egress;
4394 
4395   cg_edge = id->dst_node->get_edge (stmt);
4396   gcc_checking_assert (cg_edge);
4397   /* First, see if we can figure out what function is being called.
4398      If we cannot, then there is no hope of inlining the function.  */
4399   if (cg_edge->indirect_unknown_callee)
4400     goto egress;
4401   fn = cg_edge->callee->decl;
4402   gcc_checking_assert (fn);
4403 
4404   /* If FN is a declaration of a function in a nested scope that was
4405      globally declared inline, we don't set its DECL_INITIAL.
4406      However, we can't blindly follow DECL_ABSTRACT_ORIGIN because the
4407      C++ front-end uses it for cdtors to refer to their internal
4408      declarations, that are not real functions.  Fortunately those
4409      don't have trees to be saved, so we can tell by checking their
4410      gimple_body.  */
4411   if (!DECL_INITIAL (fn)
4412       && DECL_ABSTRACT_ORIGIN (fn)
4413       && gimple_has_body_p (DECL_ABSTRACT_ORIGIN (fn)))
4414     fn = DECL_ABSTRACT_ORIGIN (fn);
4415 
4416   /* Don't try to inline functions that are not well-suited to inlining.  */
4417   if (cg_edge->inline_failed)
4418     {
4419       reason = cg_edge->inline_failed;
4420       /* If this call was originally indirect, we do not want to emit any
4421 	 inlining related warnings or sorry messages because there are no
4422 	 guarantees regarding those.  */
4423       if (cg_edge->indirect_inlining_edge)
4424 	goto egress;
4425 
4426       if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn))
4427           /* For extern inline functions that get redefined we always
4428 	     silently ignored always_inline flag. Better behavior would
4429 	     be to be able to keep both bodies and use extern inline body
4430 	     for inlining, but we can't do that because frontends overwrite
4431 	     the body.  */
4432 	  && !cg_edge->callee->local.redefined_extern_inline
4433 	  /* During early inline pass, report only when optimization is
4434 	     not turned on.  */
4435 	  && (symtab->global_info_ready
4436 	      || !optimize
4437 	      || cgraph_inline_failed_type (reason) == CIF_FINAL_ERROR)
4438 	  /* PR 20090218-1_0.c. Body can be provided by another module. */
4439 	  && (reason != CIF_BODY_NOT_AVAILABLE || !flag_generate_lto))
4440 	{
4441 	  error ("inlining failed in call to always_inline %q+F: %s", fn,
4442 		 cgraph_inline_failed_string (reason));
4443 	  if (gimple_location (stmt) != UNKNOWN_LOCATION)
4444 	    inform (gimple_location (stmt), "called from here");
4445 	  else if (DECL_SOURCE_LOCATION (cfun->decl) != UNKNOWN_LOCATION)
4446 	    inform (DECL_SOURCE_LOCATION (cfun->decl),
4447                    "called from this function");
4448 	}
4449       else if (warn_inline
4450 	       && DECL_DECLARED_INLINE_P (fn)
4451 	       && !DECL_NO_INLINE_WARNING_P (fn)
4452 	       && !DECL_IN_SYSTEM_HEADER (fn)
4453 	       && reason != CIF_UNSPECIFIED
4454 	       && !lookup_attribute ("noinline", DECL_ATTRIBUTES (fn))
4455 	       /* Do not warn about not inlined recursive calls.  */
4456 	       && !cg_edge->recursive_p ()
4457 	       /* Avoid warnings during early inline pass. */
4458 	       && symtab->global_info_ready)
4459 	{
4460 	  if (warning (OPT_Winline, "inlining failed in call to %q+F: %s",
4461 		       fn, _(cgraph_inline_failed_string (reason))))
4462 	    {
4463 	      if (gimple_location (stmt) != UNKNOWN_LOCATION)
4464 		inform (gimple_location (stmt), "called from here");
4465 	      else if (DECL_SOURCE_LOCATION (cfun->decl) != UNKNOWN_LOCATION)
4466 		inform (DECL_SOURCE_LOCATION (cfun->decl),
4467                        "called from this function");
4468 	    }
4469 	}
4470       goto egress;
4471     }
4472   fn = cg_edge->callee->decl;
4473   cg_edge->callee->get_untransformed_body ();
4474 
4475   if (flag_checking && cg_edge->callee->decl != id->dst_node->decl)
4476     cg_edge->callee->verify ();
4477 
4478   /* We will be inlining this callee.  */
4479   id->eh_lp_nr = lookup_stmt_eh_lp (stmt);
4480   id->assign_stmts.create (0);
4481 
4482   /* Update the callers EH personality.  */
4483   if (DECL_FUNCTION_PERSONALITY (cg_edge->callee->decl))
4484     DECL_FUNCTION_PERSONALITY (cg_edge->caller->decl)
4485       = DECL_FUNCTION_PERSONALITY (cg_edge->callee->decl);
4486 
4487   /* Split the block holding the GIMPLE_CALL.  */
4488   e = split_block (bb, stmt);
4489   bb = e->src;
4490   return_block = e->dest;
4491   remove_edge (e);
4492 
4493   /* split_block splits after the statement; work around this by
4494      moving the call into the second block manually.  Not pretty,
4495      but seems easier than doing the CFG manipulation by hand
4496      when the GIMPLE_CALL is in the last statement of BB.  */
4497   stmt_gsi = gsi_last_bb (bb);
4498   gsi_remove (&stmt_gsi, false);
4499 
4500   /* If the GIMPLE_CALL was in the last statement of BB, it may have
4501      been the source of abnormal edges.  In this case, schedule
4502      the removal of dead abnormal edges.  */
4503   gsi = gsi_start_bb (return_block);
4504   if (gsi_end_p (gsi))
4505     {
4506       gsi_insert_after (&gsi, stmt, GSI_NEW_STMT);
4507       purge_dead_abnormal_edges = true;
4508     }
4509   else
4510     {
4511       gsi_insert_before (&gsi, stmt, GSI_NEW_STMT);
4512       purge_dead_abnormal_edges = false;
4513     }
4514 
4515   stmt_gsi = gsi_start_bb (return_block);
4516 
4517   /* Build a block containing code to initialize the arguments, the
4518      actual inline expansion of the body, and a label for the return
4519      statements within the function to jump to.  The type of the
4520      statement expression is the return type of the function call.
4521      ???  If the call does not have an associated block then we will
4522      remap all callee blocks to NULL, effectively dropping most of
4523      its debug information.  This should only happen for calls to
4524      artificial decls inserted by the compiler itself.  We need to
4525      either link the inlined blocks into the caller block tree or
4526      not refer to them in any way to not break GC for locations.  */
4527   if (gimple_block (stmt))
4528     {
4529       id->block = make_node (BLOCK);
4530       BLOCK_ABSTRACT_ORIGIN (id->block) = fn;
4531       BLOCK_SOURCE_LOCATION (id->block)
4532 	= LOCATION_LOCUS (gimple_location (stmt));
4533       prepend_lexical_block (gimple_block (stmt), id->block);
4534     }
4535 
4536   /* Local declarations will be replaced by their equivalents in this
4537      map.  */
4538   st = id->decl_map;
4539   id->decl_map = new hash_map<tree, tree>;
4540   dst = id->debug_map;
4541   id->debug_map = NULL;
4542 
4543   /* Record the function we are about to inline.  */
4544   id->src_fn = fn;
4545   id->src_node = cg_edge->callee;
4546   id->src_cfun = DECL_STRUCT_FUNCTION (fn);
4547   id->call_stmt = call_stmt;
4548 
4549   /* If the src function contains an IFN_VA_ARG, then so will the dst
4550      function after inlining.  */
4551   if ((id->src_cfun->curr_properties & PROP_gimple_lva) == 0)
4552     {
4553       struct function *dst_cfun = DECL_STRUCT_FUNCTION (id->dst_fn);
4554       dst_cfun->curr_properties &= ~PROP_gimple_lva;
4555     }
4556 
4557   gcc_assert (!id->src_cfun->after_inlining);
4558 
4559   id->entry_bb = bb;
4560   if (lookup_attribute ("cold", DECL_ATTRIBUTES (fn)))
4561     {
4562       gimple_stmt_iterator si = gsi_last_bb (bb);
4563       gsi_insert_after (&si, gimple_build_predict (PRED_COLD_FUNCTION,
4564       						   NOT_TAKEN),
4565 			GSI_NEW_STMT);
4566     }
4567   initialize_inlined_parameters (id, stmt, fn, bb);
4568 
4569   if (DECL_INITIAL (fn))
4570     {
4571       if (gimple_block (stmt))
4572 	{
4573 	  tree *var;
4574 
4575 	  prepend_lexical_block (id->block,
4576 				 remap_blocks (DECL_INITIAL (fn), id));
4577 	  gcc_checking_assert (BLOCK_SUBBLOCKS (id->block)
4578 			       && (BLOCK_CHAIN (BLOCK_SUBBLOCKS (id->block))
4579 				   == NULL_TREE));
4580 	  /* Move vars for PARM_DECLs from DECL_INITIAL block to id->block,
4581 	     otherwise for DWARF DW_TAG_formal_parameter will not be children of
4582 	     DW_TAG_inlined_subroutine, but of a DW_TAG_lexical_block
4583 	     under it.  The parameters can be then evaluated in the debugger,
4584 	     but don't show in backtraces.  */
4585 	  for (var = &BLOCK_VARS (BLOCK_SUBBLOCKS (id->block)); *var; )
4586 	    if (TREE_CODE (DECL_ORIGIN (*var)) == PARM_DECL)
4587 	      {
4588 		tree v = *var;
4589 		*var = TREE_CHAIN (v);
4590 		TREE_CHAIN (v) = BLOCK_VARS (id->block);
4591 		BLOCK_VARS (id->block) = v;
4592 	      }
4593 	    else
4594 	      var = &TREE_CHAIN (*var);
4595 	}
4596       else
4597 	remap_blocks_to_null (DECL_INITIAL (fn), id);
4598     }
4599 
4600   /* Return statements in the function body will be replaced by jumps
4601      to the RET_LABEL.  */
4602   gcc_assert (DECL_INITIAL (fn));
4603   gcc_assert (TREE_CODE (DECL_INITIAL (fn)) == BLOCK);
4604 
4605   /* Find the LHS to which the result of this call is assigned.  */
4606   return_slot = NULL;
4607   if (gimple_call_lhs (stmt))
4608     {
4609       modify_dest = gimple_call_lhs (stmt);
4610 
4611       /* Remember where to copy returned bounds.  */
4612       if (gimple_call_with_bounds_p (stmt)
4613 	  && TREE_CODE (modify_dest) == SSA_NAME)
4614 	{
4615 	  gcall *retbnd = chkp_retbnd_call_by_val (modify_dest);
4616 	  if (retbnd)
4617 	    {
4618 	      return_bounds = gimple_call_lhs (retbnd);
4619 	      /* If returned bounds are not used then just
4620 		 remove unused call.  */
4621 	      if (!return_bounds)
4622 		{
4623 		  gimple_stmt_iterator iter = gsi_for_stmt (retbnd);
4624 		  gsi_remove (&iter, true);
4625 		}
4626 	    }
4627 	}
4628 
4629       /* The function which we are inlining might not return a value,
4630 	 in which case we should issue a warning that the function
4631 	 does not return a value.  In that case the optimizers will
4632 	 see that the variable to which the value is assigned was not
4633 	 initialized.  We do not want to issue a warning about that
4634 	 uninitialized variable.  */
4635       if (DECL_P (modify_dest))
4636 	TREE_NO_WARNING (modify_dest) = 1;
4637 
4638       if (gimple_call_return_slot_opt_p (call_stmt))
4639 	{
4640 	  return_slot = modify_dest;
4641 	  modify_dest = NULL;
4642 	}
4643     }
4644   else
4645     modify_dest = NULL;
4646 
4647   /* If we are inlining a call to the C++ operator new, we don't want
4648      to use type based alias analysis on the return value.  Otherwise
4649      we may get confused if the compiler sees that the inlined new
4650      function returns a pointer which was just deleted.  See bug
4651      33407.  */
4652   if (DECL_IS_OPERATOR_NEW (fn))
4653     {
4654       return_slot = NULL;
4655       modify_dest = NULL;
4656     }
4657 
4658   /* Declare the return variable for the function.  */
4659   use_retvar = declare_return_variable (id, return_slot, modify_dest,
4660 					return_bounds, bb);
4661 
4662   /* Add local vars in this inlined callee to caller.  */
4663   add_local_variables (id->src_cfun, cfun, id);
4664 
4665   if (dump_file && (dump_flags & TDF_DETAILS))
4666     {
4667       fprintf (dump_file, "Inlining ");
4668       print_generic_expr (dump_file, id->src_fn, 0);
4669       fprintf (dump_file, " to ");
4670       print_generic_expr (dump_file, id->dst_fn, 0);
4671       fprintf (dump_file, " with frequency %i\n", cg_edge->frequency);
4672     }
4673 
4674   /* This is it.  Duplicate the callee body.  Assume callee is
4675      pre-gimplified.  Note that we must not alter the caller
4676      function in any way before this point, as this CALL_EXPR may be
4677      a self-referential call; if we're calling ourselves, we need to
4678      duplicate our body before altering anything.  */
4679   copy_body (id, cg_edge->callee->count,
4680   	     GCOV_COMPUTE_SCALE (cg_edge->frequency, CGRAPH_FREQ_BASE),
4681 	     bb, return_block, NULL);
4682 
4683   reset_debug_bindings (id, stmt_gsi);
4684 
4685   /* Reset the escaped solution.  */
4686   if (cfun->gimple_df)
4687     pt_solution_reset (&cfun->gimple_df->escaped);
4688 
4689   /* Clean up.  */
4690   if (id->debug_map)
4691     {
4692       delete id->debug_map;
4693       id->debug_map = dst;
4694     }
4695   delete id->decl_map;
4696   id->decl_map = st;
4697 
4698   /* Unlink the calls virtual operands before replacing it.  */
4699   unlink_stmt_vdef (stmt);
4700   if (gimple_vdef (stmt)
4701       && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
4702     release_ssa_name (gimple_vdef (stmt));
4703 
4704   /* If the inlined function returns a result that we care about,
4705      substitute the GIMPLE_CALL with an assignment of the return
4706      variable to the LHS of the call.  That is, if STMT was
4707      'a = foo (...)', substitute the call with 'a = USE_RETVAR'.  */
4708   if (use_retvar && gimple_call_lhs (stmt))
4709     {
4710       gimple *old_stmt = stmt;
4711       stmt = gimple_build_assign (gimple_call_lhs (stmt), use_retvar);
4712       gsi_replace (&stmt_gsi, stmt, false);
4713       maybe_clean_or_replace_eh_stmt (old_stmt, stmt);
4714 
4715       /* Copy bounds if we copy structure with bounds.  */
4716       if (chkp_function_instrumented_p (id->dst_fn)
4717 	  && !BOUNDED_P (use_retvar)
4718 	  && chkp_type_has_pointer (TREE_TYPE (use_retvar)))
4719 	id->assign_stmts.safe_push (stmt);
4720     }
4721   else
4722     {
4723       /* Handle the case of inlining a function with no return
4724 	 statement, which causes the return value to become undefined.  */
4725       if (gimple_call_lhs (stmt)
4726 	  && TREE_CODE (gimple_call_lhs (stmt)) == SSA_NAME)
4727 	{
4728 	  tree name = gimple_call_lhs (stmt);
4729 	  tree var = SSA_NAME_VAR (name);
4730 	  tree def = var ? ssa_default_def (cfun, var) : NULL;
4731 
4732 	  if (def)
4733 	    {
4734 	      /* If the variable is used undefined, make this name
4735 		 undefined via a move.  */
4736 	      stmt = gimple_build_assign (gimple_call_lhs (stmt), def);
4737 	      gsi_replace (&stmt_gsi, stmt, true);
4738 	    }
4739 	  else
4740 	    {
4741 	      if (!var)
4742 		{
4743 		  var = create_tmp_reg_fn (cfun, TREE_TYPE (name), NULL);
4744 		  SET_SSA_NAME_VAR_OR_IDENTIFIER (name, var);
4745 		}
4746 	      /* Otherwise make this variable undefined.  */
4747 	      gsi_remove (&stmt_gsi, true);
4748 	      set_ssa_default_def (cfun, var, name);
4749 	      SSA_NAME_DEF_STMT (name) = gimple_build_nop ();
4750 	    }
4751 	}
4752       else
4753         gsi_remove (&stmt_gsi, true);
4754     }
4755 
4756   /* Put returned bounds into the correct place if required.  */
4757   if (return_bounds)
4758     {
4759       gimple *old_stmt = SSA_NAME_DEF_STMT (return_bounds);
4760       gimple *new_stmt = gimple_build_assign (return_bounds, id->retbnd);
4761       gimple_stmt_iterator bnd_gsi = gsi_for_stmt (old_stmt);
4762       unlink_stmt_vdef (old_stmt);
4763       gsi_replace (&bnd_gsi, new_stmt, false);
4764       maybe_clean_or_replace_eh_stmt (old_stmt, new_stmt);
4765       cgraph_update_edges_for_call_stmt (old_stmt,
4766 					 gimple_call_fndecl (old_stmt),
4767 					 new_stmt);
4768     }
4769 
4770   if (purge_dead_abnormal_edges)
4771     {
4772       gimple_purge_dead_eh_edges (return_block);
4773       gimple_purge_dead_abnormal_call_edges (return_block);
4774     }
4775 
4776   /* If the value of the new expression is ignored, that's OK.  We
4777      don't warn about this for CALL_EXPRs, so we shouldn't warn about
4778      the equivalent inlined version either.  */
4779   if (is_gimple_assign (stmt))
4780     {
4781       gcc_assert (gimple_assign_single_p (stmt)
4782 		  || CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt)));
4783       TREE_USED (gimple_assign_rhs1 (stmt)) = 1;
4784     }
4785 
4786   /* Copy bounds for all generated assigns that need it.  */
4787   for (i = 0; i < id->assign_stmts.length (); i++)
4788     chkp_copy_bounds_for_assign (id->assign_stmts[i], cg_edge);
4789   id->assign_stmts.release ();
4790 
4791   /* Output the inlining info for this abstract function, since it has been
4792      inlined.  If we don't do this now, we can lose the information about the
4793      variables in the function when the blocks get blown away as soon as we
4794      remove the cgraph node.  */
4795   if (gimple_block (stmt))
4796     (*debug_hooks->outlining_inline_function) (cg_edge->callee->decl);
4797 
4798   /* Update callgraph if needed.  */
4799   cg_edge->callee->remove ();
4800 
4801   id->block = NULL_TREE;
4802   successfully_inlined = true;
4803 
4804  egress:
4805   input_location = saved_location;
4806   return successfully_inlined;
4807 }
4808 
4809 /* Expand call statements reachable from STMT_P.
4810    We can only have CALL_EXPRs as the "toplevel" tree code or nested
4811    in a MODIFY_EXPR.  */
4812 
4813 static bool
gimple_expand_calls_inline(basic_block bb,copy_body_data * id)4814 gimple_expand_calls_inline (basic_block bb, copy_body_data *id)
4815 {
4816   gimple_stmt_iterator gsi;
4817   bool inlined = false;
4818 
4819   for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi);)
4820     {
4821       gimple *stmt = gsi_stmt (gsi);
4822       gsi_prev (&gsi);
4823 
4824       if (is_gimple_call (stmt)
4825 	  && !gimple_call_internal_p (stmt))
4826 	inlined |= expand_call_inline (bb, stmt, id);
4827     }
4828 
4829   return inlined;
4830 }
4831 
4832 
4833 /* Walk all basic blocks created after FIRST and try to fold every statement
4834    in the STATEMENTS pointer set.  */
4835 
4836 static void
fold_marked_statements(int first,hash_set<gimple * > * statements)4837 fold_marked_statements (int first, hash_set<gimple *> *statements)
4838 {
4839   for (; first < n_basic_blocks_for_fn (cfun); first++)
4840     if (BASIC_BLOCK_FOR_FN (cfun, first))
4841       {
4842         gimple_stmt_iterator gsi;
4843 
4844 	for (gsi = gsi_start_bb (BASIC_BLOCK_FOR_FN (cfun, first));
4845 	     !gsi_end_p (gsi);
4846 	     gsi_next (&gsi))
4847 	  if (statements->contains (gsi_stmt (gsi)))
4848 	    {
4849 	      gimple *old_stmt = gsi_stmt (gsi);
4850 	      tree old_decl = is_gimple_call (old_stmt) ? gimple_call_fndecl (old_stmt) : 0;
4851 
4852 	      if (old_decl && DECL_BUILT_IN (old_decl))
4853 		{
4854 		  /* Folding builtins can create multiple instructions,
4855 		     we need to look at all of them.  */
4856 		  gimple_stmt_iterator i2 = gsi;
4857 		  gsi_prev (&i2);
4858 		  if (fold_stmt (&gsi))
4859 		    {
4860 		      gimple *new_stmt;
4861 		      /* If a builtin at the end of a bb folded into nothing,
4862 			 the following loop won't work.  */
4863 		      if (gsi_end_p (gsi))
4864 			{
4865 			  cgraph_update_edges_for_call_stmt (old_stmt,
4866 							     old_decl, NULL);
4867 			  break;
4868 			}
4869 		      if (gsi_end_p (i2))
4870 			i2 = gsi_start_bb (BASIC_BLOCK_FOR_FN (cfun, first));
4871 		      else
4872 			gsi_next (&i2);
4873 		      while (1)
4874 			{
4875 			  new_stmt = gsi_stmt (i2);
4876 			  update_stmt (new_stmt);
4877 			  cgraph_update_edges_for_call_stmt (old_stmt, old_decl,
4878 							     new_stmt);
4879 
4880 			  if (new_stmt == gsi_stmt (gsi))
4881 			    {
4882 			      /* It is okay to check only for the very last
4883 				 of these statements.  If it is a throwing
4884 				 statement nothing will change.  If it isn't
4885 				 this can remove EH edges.  If that weren't
4886 				 correct then because some intermediate stmts
4887 				 throw, but not the last one.  That would mean
4888 				 we'd have to split the block, which we can't
4889 				 here and we'd loose anyway.  And as builtins
4890 				 probably never throw, this all
4891 				 is mood anyway.  */
4892 			      if (maybe_clean_or_replace_eh_stmt (old_stmt,
4893 								  new_stmt))
4894 				gimple_purge_dead_eh_edges (
4895 				  BASIC_BLOCK_FOR_FN (cfun, first));
4896 			      break;
4897 			    }
4898 			  gsi_next (&i2);
4899 			}
4900 		    }
4901 		}
4902 	      else if (fold_stmt (&gsi))
4903 		{
4904 		  /* Re-read the statement from GSI as fold_stmt() may
4905 		     have changed it.  */
4906 		  gimple *new_stmt = gsi_stmt (gsi);
4907 		  update_stmt (new_stmt);
4908 
4909 		  if (is_gimple_call (old_stmt)
4910 		      || is_gimple_call (new_stmt))
4911 		    cgraph_update_edges_for_call_stmt (old_stmt, old_decl,
4912 						       new_stmt);
4913 
4914 		  if (maybe_clean_or_replace_eh_stmt (old_stmt, new_stmt))
4915 		    gimple_purge_dead_eh_edges (BASIC_BLOCK_FOR_FN (cfun,
4916 								    first));
4917 		}
4918 	    }
4919       }
4920 }
4921 
4922 /* Expand calls to inline functions in the body of FN.  */
4923 
4924 unsigned int
optimize_inline_calls(tree fn)4925 optimize_inline_calls (tree fn)
4926 {
4927   copy_body_data id;
4928   basic_block bb;
4929   int last = n_basic_blocks_for_fn (cfun);
4930   bool inlined_p = false;
4931 
4932   /* Clear out ID.  */
4933   memset (&id, 0, sizeof (id));
4934 
4935   id.src_node = id.dst_node = cgraph_node::get (fn);
4936   gcc_assert (id.dst_node->definition);
4937   id.dst_fn = fn;
4938   /* Or any functions that aren't finished yet.  */
4939   if (current_function_decl)
4940     id.dst_fn = current_function_decl;
4941 
4942   id.copy_decl = copy_decl_maybe_to_var;
4943   id.transform_call_graph_edges = CB_CGE_DUPLICATE;
4944   id.transform_new_cfg = false;
4945   id.transform_return_to_modify = true;
4946   id.transform_parameter = true;
4947   id.transform_lang_insert_block = NULL;
4948   id.statements_to_fold = new hash_set<gimple *>;
4949 
4950   push_gimplify_context ();
4951 
4952   /* We make no attempts to keep dominance info up-to-date.  */
4953   free_dominance_info (CDI_DOMINATORS);
4954   free_dominance_info (CDI_POST_DOMINATORS);
4955 
4956   /* Register specific gimple functions.  */
4957   gimple_register_cfg_hooks ();
4958 
4959   /* Reach the trees by walking over the CFG, and note the
4960      enclosing basic-blocks in the call edges.  */
4961   /* We walk the blocks going forward, because inlined function bodies
4962      will split id->current_basic_block, and the new blocks will
4963      follow it; we'll trudge through them, processing their CALL_EXPRs
4964      along the way.  */
4965   FOR_EACH_BB_FN (bb, cfun)
4966     inlined_p |= gimple_expand_calls_inline (bb, &id);
4967 
4968   pop_gimplify_context (NULL);
4969 
4970   if (flag_checking)
4971     {
4972       struct cgraph_edge *e;
4973 
4974       id.dst_node->verify ();
4975 
4976       /* Double check that we inlined everything we are supposed to inline.  */
4977       for (e = id.dst_node->callees; e; e = e->next_callee)
4978 	gcc_assert (e->inline_failed);
4979     }
4980 
4981   /* Fold queued statements.  */
4982   fold_marked_statements (last, id.statements_to_fold);
4983   delete id.statements_to_fold;
4984 
4985   gcc_assert (!id.debug_stmts.exists ());
4986 
4987   /* If we didn't inline into the function there is nothing to do.  */
4988   if (!inlined_p)
4989     return 0;
4990 
4991   /* Renumber the lexical scoping (non-code) blocks consecutively.  */
4992   number_blocks (fn);
4993 
4994   delete_unreachable_blocks_update_callgraph (&id);
4995   if (flag_checking)
4996     id.dst_node->verify ();
4997 
4998   /* It would be nice to check SSA/CFG/statement consistency here, but it is
4999      not possible yet - the IPA passes might make various functions to not
5000      throw and they don't care to proactively update local EH info.  This is
5001      done later in fixup_cfg pass that also execute the verification.  */
5002   return (TODO_update_ssa
5003 	  | TODO_cleanup_cfg
5004 	  | (gimple_in_ssa_p (cfun) ? TODO_remove_unused_locals : 0)
5005 	  | (gimple_in_ssa_p (cfun) ? TODO_update_address_taken : 0)
5006 	  | (profile_status_for_fn (cfun) != PROFILE_ABSENT
5007 	     ? TODO_rebuild_frequencies : 0));
5008 }
5009 
5010 /* Passed to walk_tree.  Copies the node pointed to, if appropriate.  */
5011 
5012 tree
copy_tree_r(tree * tp,int * walk_subtrees,void * data ATTRIBUTE_UNUSED)5013 copy_tree_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
5014 {
5015   enum tree_code code = TREE_CODE (*tp);
5016   enum tree_code_class cl = TREE_CODE_CLASS (code);
5017 
5018   /* We make copies of most nodes.  */
5019   if (IS_EXPR_CODE_CLASS (cl)
5020       || code == TREE_LIST
5021       || code == TREE_VEC
5022       || code == TYPE_DECL
5023       || code == OMP_CLAUSE)
5024     {
5025       /* Because the chain gets clobbered when we make a copy, we save it
5026 	 here.  */
5027       tree chain = NULL_TREE, new_tree;
5028 
5029       if (CODE_CONTAINS_STRUCT (code, TS_COMMON))
5030 	chain = TREE_CHAIN (*tp);
5031 
5032       /* Copy the node.  */
5033       new_tree = copy_node (*tp);
5034 
5035       *tp = new_tree;
5036 
5037       /* Now, restore the chain, if appropriate.  That will cause
5038 	 walk_tree to walk into the chain as well.  */
5039       if (code == PARM_DECL
5040 	  || code == TREE_LIST
5041 	  || code == OMP_CLAUSE)
5042 	TREE_CHAIN (*tp) = chain;
5043 
5044       /* For now, we don't update BLOCKs when we make copies.  So, we
5045 	 have to nullify all BIND_EXPRs.  */
5046       if (TREE_CODE (*tp) == BIND_EXPR)
5047 	BIND_EXPR_BLOCK (*tp) = NULL_TREE;
5048     }
5049   else if (code == CONSTRUCTOR)
5050     {
5051       /* CONSTRUCTOR nodes need special handling because
5052          we need to duplicate the vector of elements.  */
5053       tree new_tree;
5054 
5055       new_tree = copy_node (*tp);
5056       CONSTRUCTOR_ELTS (new_tree) = vec_safe_copy (CONSTRUCTOR_ELTS (*tp));
5057       *tp = new_tree;
5058     }
5059   else if (code == STATEMENT_LIST)
5060     /* We used to just abort on STATEMENT_LIST, but we can run into them
5061        with statement-expressions (c++/40975).  */
5062     copy_statement_list (tp);
5063   else if (TREE_CODE_CLASS (code) == tcc_type)
5064     *walk_subtrees = 0;
5065   else if (TREE_CODE_CLASS (code) == tcc_declaration)
5066     *walk_subtrees = 0;
5067   else if (TREE_CODE_CLASS (code) == tcc_constant)
5068     *walk_subtrees = 0;
5069   return NULL_TREE;
5070 }
5071 
5072 /* The SAVE_EXPR pointed to by TP is being copied.  If ST contains
5073    information indicating to what new SAVE_EXPR this one should be mapped,
5074    use that one.  Otherwise, create a new node and enter it in ST.  FN is
5075    the function into which the copy will be placed.  */
5076 
5077 static void
remap_save_expr(tree * tp,hash_map<tree,tree> * st,int * walk_subtrees)5078 remap_save_expr (tree *tp, hash_map<tree, tree> *st, int *walk_subtrees)
5079 {
5080   tree *n;
5081   tree t;
5082 
5083   /* See if we already encountered this SAVE_EXPR.  */
5084   n = st->get (*tp);
5085 
5086   /* If we didn't already remap this SAVE_EXPR, do so now.  */
5087   if (!n)
5088     {
5089       t = copy_node (*tp);
5090 
5091       /* Remember this SAVE_EXPR.  */
5092       st->put (*tp, t);
5093       /* Make sure we don't remap an already-remapped SAVE_EXPR.  */
5094       st->put (t, t);
5095     }
5096   else
5097     {
5098       /* We've already walked into this SAVE_EXPR; don't do it again.  */
5099       *walk_subtrees = 0;
5100       t = *n;
5101     }
5102 
5103   /* Replace this SAVE_EXPR with the copy.  */
5104   *tp = t;
5105 }
5106 
5107 /* Called via walk_gimple_seq.  If *GSIP points to a GIMPLE_LABEL for a local
5108    label, copies the declaration and enters it in the splay_tree in DATA (which
5109    is really a 'copy_body_data *'.  */
5110 
5111 static tree
mark_local_labels_stmt(gimple_stmt_iterator * gsip,bool * handled_ops_p ATTRIBUTE_UNUSED,struct walk_stmt_info * wi)5112 mark_local_labels_stmt (gimple_stmt_iterator *gsip,
5113 		        bool *handled_ops_p ATTRIBUTE_UNUSED,
5114 		        struct walk_stmt_info *wi)
5115 {
5116   copy_body_data *id = (copy_body_data *) wi->info;
5117   glabel *stmt = dyn_cast <glabel *> (gsi_stmt (*gsip));
5118 
5119   if (stmt)
5120     {
5121       tree decl = gimple_label_label (stmt);
5122 
5123       /* Copy the decl and remember the copy.  */
5124       insert_decl_map (id, decl, id->copy_decl (decl, id));
5125     }
5126 
5127   return NULL_TREE;
5128 }
5129 
5130 static gimple_seq duplicate_remap_omp_clause_seq (gimple_seq seq,
5131 						  struct walk_stmt_info *wi);
5132 
5133 /* Called via walk_gimple_seq by copy_gimple_seq_and_replace_local.
5134    Using the splay_tree pointed to by ST (which is really a `splay_tree'),
5135    remaps all local declarations to appropriate replacements in gimple
5136    operands. */
5137 
5138 static tree
replace_locals_op(tree * tp,int * walk_subtrees,void * data)5139 replace_locals_op (tree *tp, int *walk_subtrees, void *data)
5140 {
5141   struct walk_stmt_info *wi = (struct walk_stmt_info*) data;
5142   copy_body_data *id = (copy_body_data *) wi->info;
5143   hash_map<tree, tree> *st = id->decl_map;
5144   tree *n;
5145   tree expr = *tp;
5146 
5147   /* Only a local declaration (variable or label).  */
5148   if ((TREE_CODE (expr) == VAR_DECL
5149        && !TREE_STATIC (expr))
5150       || TREE_CODE (expr) == LABEL_DECL)
5151     {
5152       /* Lookup the declaration.  */
5153       n = st->get (expr);
5154 
5155       /* If it's there, remap it.  */
5156       if (n)
5157 	*tp = *n;
5158       *walk_subtrees = 0;
5159     }
5160   else if (TREE_CODE (expr) == STATEMENT_LIST
5161 	   || TREE_CODE (expr) == BIND_EXPR
5162 	   || TREE_CODE (expr) == SAVE_EXPR)
5163     gcc_unreachable ();
5164   else if (TREE_CODE (expr) == TARGET_EXPR)
5165     {
5166       /* Don't mess with a TARGET_EXPR that hasn't been expanded.
5167          It's OK for this to happen if it was part of a subtree that
5168          isn't immediately expanded, such as operand 2 of another
5169          TARGET_EXPR.  */
5170       if (!TREE_OPERAND (expr, 1))
5171 	{
5172 	  TREE_OPERAND (expr, 1) = TREE_OPERAND (expr, 3);
5173 	  TREE_OPERAND (expr, 3) = NULL_TREE;
5174 	}
5175     }
5176   else if (TREE_CODE (expr) == OMP_CLAUSE)
5177     {
5178       /* Before the omplower pass completes, some OMP clauses can contain
5179 	 sequences that are neither copied by gimple_seq_copy nor walked by
5180 	 walk_gimple_seq.  To make copy_gimple_seq_and_replace_locals work even
5181 	 in those situations, we have to copy and process them explicitely.  */
5182 
5183       if (OMP_CLAUSE_CODE (expr) == OMP_CLAUSE_LASTPRIVATE)
5184 	{
5185 	  gimple_seq seq = OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (expr);
5186 	  seq = duplicate_remap_omp_clause_seq (seq, wi);
5187 	  OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (expr) = seq;
5188 	}
5189       else if (OMP_CLAUSE_CODE (expr) == OMP_CLAUSE_LINEAR)
5190 	{
5191 	  gimple_seq seq = OMP_CLAUSE_LINEAR_GIMPLE_SEQ (expr);
5192 	  seq = duplicate_remap_omp_clause_seq (seq, wi);
5193 	  OMP_CLAUSE_LINEAR_GIMPLE_SEQ (expr) = seq;
5194 	}
5195       else if (OMP_CLAUSE_CODE (expr) == OMP_CLAUSE_REDUCTION)
5196 	{
5197 	  gimple_seq seq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (expr);
5198 	  seq = duplicate_remap_omp_clause_seq (seq, wi);
5199 	  OMP_CLAUSE_REDUCTION_GIMPLE_INIT (expr) = seq;
5200 	  seq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (expr);
5201 	  seq = duplicate_remap_omp_clause_seq (seq, wi);
5202 	  OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (expr) = seq;
5203 	}
5204     }
5205 
5206   /* Keep iterating.  */
5207   return NULL_TREE;
5208 }
5209 
5210 
5211 /* Called via walk_gimple_seq by copy_gimple_seq_and_replace_local.
5212    Using the splay_tree pointed to by ST (which is really a `splay_tree'),
5213    remaps all local declarations to appropriate replacements in gimple
5214    statements. */
5215 
5216 static tree
replace_locals_stmt(gimple_stmt_iterator * gsip,bool * handled_ops_p ATTRIBUTE_UNUSED,struct walk_stmt_info * wi)5217 replace_locals_stmt (gimple_stmt_iterator *gsip,
5218 		     bool *handled_ops_p ATTRIBUTE_UNUSED,
5219 		     struct walk_stmt_info *wi)
5220 {
5221   copy_body_data *id = (copy_body_data *) wi->info;
5222   gimple *gs = gsi_stmt (*gsip);
5223 
5224   if (gbind *stmt = dyn_cast <gbind *> (gs))
5225     {
5226       tree block = gimple_bind_block (stmt);
5227 
5228       if (block)
5229 	{
5230 	  remap_block (&block, id);
5231 	  gimple_bind_set_block (stmt, block);
5232 	}
5233 
5234       /* This will remap a lot of the same decls again, but this should be
5235 	 harmless.  */
5236       if (gimple_bind_vars (stmt))
5237 	{
5238 	  tree old_var, decls = gimple_bind_vars (stmt);
5239 
5240 	  for (old_var = decls; old_var; old_var = DECL_CHAIN (old_var))
5241 	    if (!can_be_nonlocal (old_var, id)
5242 		&& ! variably_modified_type_p (TREE_TYPE (old_var), id->src_fn))
5243 	      remap_decl (old_var, id);
5244 
5245 	  gcc_checking_assert (!id->prevent_decl_creation_for_types);
5246 	  id->prevent_decl_creation_for_types = true;
5247 	  gimple_bind_set_vars (stmt, remap_decls (decls, NULL, id));
5248 	  id->prevent_decl_creation_for_types = false;
5249 	}
5250     }
5251 
5252   /* Keep iterating.  */
5253   return NULL_TREE;
5254 }
5255 
5256 /* Create a copy of SEQ and remap all decls in it.  */
5257 
5258 static gimple_seq
duplicate_remap_omp_clause_seq(gimple_seq seq,struct walk_stmt_info * wi)5259 duplicate_remap_omp_clause_seq (gimple_seq seq, struct walk_stmt_info *wi)
5260 {
5261   if (!seq)
5262     return NULL;
5263 
5264   /* If there are any labels in OMP sequences, they can be only referred to in
5265      the sequence itself and therefore we can do both here.  */
5266   walk_gimple_seq (seq, mark_local_labels_stmt, NULL, wi);
5267   gimple_seq copy = gimple_seq_copy (seq);
5268   walk_gimple_seq (copy, replace_locals_stmt, replace_locals_op, wi);
5269   return copy;
5270 }
5271 
5272 /* Copies everything in SEQ and replaces variables and labels local to
5273    current_function_decl.  */
5274 
5275 gimple_seq
copy_gimple_seq_and_replace_locals(gimple_seq seq)5276 copy_gimple_seq_and_replace_locals (gimple_seq seq)
5277 {
5278   copy_body_data id;
5279   struct walk_stmt_info wi;
5280   gimple_seq copy;
5281 
5282   /* There's nothing to do for NULL_TREE.  */
5283   if (seq == NULL)
5284     return seq;
5285 
5286   /* Set up ID.  */
5287   memset (&id, 0, sizeof (id));
5288   id.src_fn = current_function_decl;
5289   id.dst_fn = current_function_decl;
5290   id.decl_map = new hash_map<tree, tree>;
5291   id.debug_map = NULL;
5292 
5293   id.copy_decl = copy_decl_no_change;
5294   id.transform_call_graph_edges = CB_CGE_DUPLICATE;
5295   id.transform_new_cfg = false;
5296   id.transform_return_to_modify = false;
5297   id.transform_parameter = false;
5298   id.transform_lang_insert_block = NULL;
5299 
5300   /* Walk the tree once to find local labels.  */
5301   memset (&wi, 0, sizeof (wi));
5302   hash_set<tree> visited;
5303   wi.info = &id;
5304   wi.pset = &visited;
5305   walk_gimple_seq (seq, mark_local_labels_stmt, NULL, &wi);
5306 
5307   copy = gimple_seq_copy (seq);
5308 
5309   /* Walk the copy, remapping decls.  */
5310   memset (&wi, 0, sizeof (wi));
5311   wi.info = &id;
5312   walk_gimple_seq (copy, replace_locals_stmt, replace_locals_op, &wi);
5313 
5314   /* Clean up.  */
5315   delete id.decl_map;
5316   if (id.debug_map)
5317     delete id.debug_map;
5318   if (id.dependence_map)
5319     {
5320       delete id.dependence_map;
5321       id.dependence_map = NULL;
5322     }
5323 
5324   return copy;
5325 }
5326 
5327 
5328 /* Allow someone to determine if SEARCH is a child of TOP from gdb.  */
5329 
5330 static tree
debug_find_tree_1(tree * tp,int * walk_subtrees ATTRIBUTE_UNUSED,void * data)5331 debug_find_tree_1 (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED, void *data)
5332 {
5333   if (*tp == data)
5334     return (tree) data;
5335   else
5336     return NULL;
5337 }
5338 
5339 DEBUG_FUNCTION bool
debug_find_tree(tree top,tree search)5340 debug_find_tree (tree top, tree search)
5341 {
5342   return walk_tree_without_duplicates (&top, debug_find_tree_1, search) != 0;
5343 }
5344 
5345 
5346 /* Declare the variables created by the inliner.  Add all the variables in
5347    VARS to BIND_EXPR.  */
5348 
5349 static void
declare_inline_vars(tree block,tree vars)5350 declare_inline_vars (tree block, tree vars)
5351 {
5352   tree t;
5353   for (t = vars; t; t = DECL_CHAIN (t))
5354     {
5355       DECL_SEEN_IN_BIND_EXPR_P (t) = 1;
5356       gcc_assert (!TREE_STATIC (t) && !TREE_ASM_WRITTEN (t));
5357       add_local_decl (cfun, t);
5358     }
5359 
5360   if (block)
5361     BLOCK_VARS (block) = chainon (BLOCK_VARS (block), vars);
5362 }
5363 
5364 /* Copy NODE (which must be a DECL).  The DECL originally was in the FROM_FN,
5365    but now it will be in the TO_FN.  PARM_TO_VAR means enable PARM_DECL to
5366    VAR_DECL translation.  */
5367 
5368 static tree
copy_decl_for_dup_finish(copy_body_data * id,tree decl,tree copy)5369 copy_decl_for_dup_finish (copy_body_data *id, tree decl, tree copy)
5370 {
5371   /* Don't generate debug information for the copy if we wouldn't have
5372      generated it for the copy either.  */
5373   DECL_ARTIFICIAL (copy) = DECL_ARTIFICIAL (decl);
5374   DECL_IGNORED_P (copy) = DECL_IGNORED_P (decl);
5375 
5376   /* Set the DECL_ABSTRACT_ORIGIN so the debugging routines know what
5377      declaration inspired this copy.  */
5378   DECL_ABSTRACT_ORIGIN (copy) = DECL_ORIGIN (decl);
5379 
5380   /* The new variable/label has no RTL, yet.  */
5381   if (CODE_CONTAINS_STRUCT (TREE_CODE (copy), TS_DECL_WRTL)
5382       && !TREE_STATIC (copy) && !DECL_EXTERNAL (copy))
5383     SET_DECL_RTL (copy, 0);
5384 
5385   /* These args would always appear unused, if not for this.  */
5386   TREE_USED (copy) = 1;
5387 
5388   /* Set the context for the new declaration.  */
5389   if (!DECL_CONTEXT (decl))
5390     /* Globals stay global.  */
5391     ;
5392   else if (DECL_CONTEXT (decl) != id->src_fn)
5393     /* Things that weren't in the scope of the function we're inlining
5394        from aren't in the scope we're inlining to, either.  */
5395     ;
5396   else if (TREE_STATIC (decl))
5397     /* Function-scoped static variables should stay in the original
5398        function.  */
5399     ;
5400   else
5401     /* Ordinary automatic local variables are now in the scope of the
5402        new function.  */
5403     DECL_CONTEXT (copy) = id->dst_fn;
5404 
5405   return copy;
5406 }
5407 
5408 static tree
copy_decl_to_var(tree decl,copy_body_data * id)5409 copy_decl_to_var (tree decl, copy_body_data *id)
5410 {
5411   tree copy, type;
5412 
5413   gcc_assert (TREE_CODE (decl) == PARM_DECL
5414 	      || TREE_CODE (decl) == RESULT_DECL);
5415 
5416   type = TREE_TYPE (decl);
5417 
5418   copy = build_decl (DECL_SOURCE_LOCATION (id->dst_fn),
5419 		     VAR_DECL, DECL_NAME (decl), type);
5420   if (DECL_PT_UID_SET_P (decl))
5421     SET_DECL_PT_UID (copy, DECL_PT_UID (decl));
5422   TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
5423   TREE_READONLY (copy) = TREE_READONLY (decl);
5424   TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
5425   DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (decl);
5426 
5427   return copy_decl_for_dup_finish (id, decl, copy);
5428 }
5429 
5430 /* Like copy_decl_to_var, but create a return slot object instead of a
5431    pointer variable for return by invisible reference.  */
5432 
5433 static tree
copy_result_decl_to_var(tree decl,copy_body_data * id)5434 copy_result_decl_to_var (tree decl, copy_body_data *id)
5435 {
5436   tree copy, type;
5437 
5438   gcc_assert (TREE_CODE (decl) == PARM_DECL
5439 	      || TREE_CODE (decl) == RESULT_DECL);
5440 
5441   type = TREE_TYPE (decl);
5442   if (DECL_BY_REFERENCE (decl))
5443     type = TREE_TYPE (type);
5444 
5445   copy = build_decl (DECL_SOURCE_LOCATION (id->dst_fn),
5446 		     VAR_DECL, DECL_NAME (decl), type);
5447   if (DECL_PT_UID_SET_P (decl))
5448     SET_DECL_PT_UID (copy, DECL_PT_UID (decl));
5449   TREE_READONLY (copy) = TREE_READONLY (decl);
5450   TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
5451   if (!DECL_BY_REFERENCE (decl))
5452     {
5453       TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
5454       DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (decl);
5455     }
5456 
5457   return copy_decl_for_dup_finish (id, decl, copy);
5458 }
5459 
5460 tree
copy_decl_no_change(tree decl,copy_body_data * id)5461 copy_decl_no_change (tree decl, copy_body_data *id)
5462 {
5463   tree copy;
5464 
5465   copy = copy_node (decl);
5466 
5467   /* The COPY is not abstract; it will be generated in DST_FN.  */
5468   DECL_ABSTRACT_P (copy) = false;
5469   lang_hooks.dup_lang_specific_decl (copy);
5470 
5471   /* TREE_ADDRESSABLE isn't used to indicate that a label's address has
5472      been taken; it's for internal bookkeeping in expand_goto_internal.  */
5473   if (TREE_CODE (copy) == LABEL_DECL)
5474     {
5475       TREE_ADDRESSABLE (copy) = 0;
5476       LABEL_DECL_UID (copy) = -1;
5477     }
5478 
5479   return copy_decl_for_dup_finish (id, decl, copy);
5480 }
5481 
5482 static tree
copy_decl_maybe_to_var(tree decl,copy_body_data * id)5483 copy_decl_maybe_to_var (tree decl, copy_body_data *id)
5484 {
5485   if (TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == RESULT_DECL)
5486     return copy_decl_to_var (decl, id);
5487   else
5488     return copy_decl_no_change (decl, id);
5489 }
5490 
5491 /* Return a copy of the function's argument tree.  */
5492 static tree
copy_arguments_for_versioning(tree orig_parm,copy_body_data * id,bitmap args_to_skip,tree * vars)5493 copy_arguments_for_versioning (tree orig_parm, copy_body_data * id,
5494 			       bitmap args_to_skip, tree *vars)
5495 {
5496   tree arg, *parg;
5497   tree new_parm = NULL;
5498   int i = 0;
5499 
5500   parg = &new_parm;
5501 
5502   for (arg = orig_parm; arg; arg = DECL_CHAIN (arg), i++)
5503     if (!args_to_skip || !bitmap_bit_p (args_to_skip, i))
5504       {
5505         tree new_tree = remap_decl (arg, id);
5506 	if (TREE_CODE (new_tree) != PARM_DECL)
5507 	  new_tree = id->copy_decl (arg, id);
5508         lang_hooks.dup_lang_specific_decl (new_tree);
5509         *parg = new_tree;
5510 	parg = &DECL_CHAIN (new_tree);
5511       }
5512     else if (!id->decl_map->get (arg))
5513       {
5514 	/* Make an equivalent VAR_DECL.  If the argument was used
5515 	   as temporary variable later in function, the uses will be
5516 	   replaced by local variable.  */
5517 	tree var = copy_decl_to_var (arg, id);
5518 	insert_decl_map (id, arg, var);
5519         /* Declare this new variable.  */
5520         DECL_CHAIN (var) = *vars;
5521         *vars = var;
5522       }
5523   return new_parm;
5524 }
5525 
5526 /* Return a copy of the function's static chain.  */
5527 static tree
copy_static_chain(tree static_chain,copy_body_data * id)5528 copy_static_chain (tree static_chain, copy_body_data * id)
5529 {
5530   tree *chain_copy, *pvar;
5531 
5532   chain_copy = &static_chain;
5533   for (pvar = chain_copy; *pvar; pvar = &DECL_CHAIN (*pvar))
5534     {
5535       tree new_tree = remap_decl (*pvar, id);
5536       lang_hooks.dup_lang_specific_decl (new_tree);
5537       DECL_CHAIN (new_tree) = DECL_CHAIN (*pvar);
5538       *pvar = new_tree;
5539     }
5540   return static_chain;
5541 }
5542 
5543 /* Return true if the function is allowed to be versioned.
5544    This is a guard for the versioning functionality.  */
5545 
5546 bool
tree_versionable_function_p(tree fndecl)5547 tree_versionable_function_p (tree fndecl)
5548 {
5549   return (!lookup_attribute ("noclone", DECL_ATTRIBUTES (fndecl))
5550 	  && copy_forbidden (DECL_STRUCT_FUNCTION (fndecl)) == NULL);
5551 }
5552 
5553 /* Delete all unreachable basic blocks and update callgraph.
5554    Doing so is somewhat nontrivial because we need to update all clones and
5555    remove inline function that become unreachable.  */
5556 
5557 static bool
delete_unreachable_blocks_update_callgraph(copy_body_data * id)5558 delete_unreachable_blocks_update_callgraph (copy_body_data *id)
5559 {
5560   bool changed = false;
5561   basic_block b, next_bb;
5562 
5563   find_unreachable_blocks ();
5564 
5565   /* Delete all unreachable basic blocks.  */
5566 
5567   for (b = ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb; b
5568        != EXIT_BLOCK_PTR_FOR_FN (cfun); b = next_bb)
5569     {
5570       next_bb = b->next_bb;
5571 
5572       if (!(b->flags & BB_REACHABLE))
5573 	{
5574           gimple_stmt_iterator bsi;
5575 
5576           for (bsi = gsi_start_bb (b); !gsi_end_p (bsi); gsi_next (&bsi))
5577 	    {
5578 	      struct cgraph_edge *e;
5579 	      struct cgraph_node *node;
5580 
5581 	      id->dst_node->remove_stmt_references (gsi_stmt (bsi));
5582 
5583 	      if (gimple_code (gsi_stmt (bsi)) == GIMPLE_CALL
5584 		  &&(e = id->dst_node->get_edge (gsi_stmt (bsi))) != NULL)
5585 		{
5586 		  if (!e->inline_failed)
5587 		    e->callee->remove_symbol_and_inline_clones (id->dst_node);
5588 		  else
5589 		    e->remove ();
5590 		}
5591 	      if (id->transform_call_graph_edges == CB_CGE_MOVE_CLONES
5592 		  && id->dst_node->clones)
5593 		for (node = id->dst_node->clones; node != id->dst_node;)
5594 		  {
5595 		    node->remove_stmt_references (gsi_stmt (bsi));
5596 		    if (gimple_code (gsi_stmt (bsi)) == GIMPLE_CALL
5597 			&& (e = node->get_edge (gsi_stmt (bsi))) != NULL)
5598 		      {
5599 			if (!e->inline_failed)
5600 			  e->callee->remove_symbol_and_inline_clones (id->dst_node);
5601 			else
5602 			  e->remove ();
5603 		      }
5604 
5605 		    if (node->clones)
5606 		      node = node->clones;
5607 		    else if (node->next_sibling_clone)
5608 		      node = node->next_sibling_clone;
5609 		    else
5610 		      {
5611 			while (node != id->dst_node && !node->next_sibling_clone)
5612 			  node = node->clone_of;
5613 			if (node != id->dst_node)
5614 			  node = node->next_sibling_clone;
5615 		      }
5616 		  }
5617 	    }
5618 	  delete_basic_block (b);
5619 	  changed = true;
5620 	}
5621     }
5622 
5623   return changed;
5624 }
5625 
5626 /* Update clone info after duplication.  */
5627 
5628 static void
update_clone_info(copy_body_data * id)5629 update_clone_info (copy_body_data * id)
5630 {
5631   struct cgraph_node *node;
5632   if (!id->dst_node->clones)
5633     return;
5634   for (node = id->dst_node->clones; node != id->dst_node;)
5635     {
5636       /* First update replace maps to match the new body.  */
5637       if (node->clone.tree_map)
5638         {
5639 	  unsigned int i;
5640           for (i = 0; i < vec_safe_length (node->clone.tree_map); i++)
5641 	    {
5642 	      struct ipa_replace_map *replace_info;
5643 	      replace_info = (*node->clone.tree_map)[i];
5644 	      walk_tree (&replace_info->old_tree, copy_tree_body_r, id, NULL);
5645 	      walk_tree (&replace_info->new_tree, copy_tree_body_r, id, NULL);
5646 	    }
5647 	}
5648       if (node->clones)
5649 	node = node->clones;
5650       else if (node->next_sibling_clone)
5651 	node = node->next_sibling_clone;
5652       else
5653 	{
5654 	  while (node != id->dst_node && !node->next_sibling_clone)
5655 	    node = node->clone_of;
5656 	  if (node != id->dst_node)
5657 	    node = node->next_sibling_clone;
5658 	}
5659     }
5660 }
5661 
5662 /* Create a copy of a function's tree.
5663    OLD_DECL and NEW_DECL are FUNCTION_DECL tree nodes
5664    of the original function and the new copied function
5665    respectively.  In case we want to replace a DECL
5666    tree with another tree while duplicating the function's
5667    body, TREE_MAP represents the mapping between these
5668    trees. If UPDATE_CLONES is set, the call_stmt fields
5669    of edges of clones of the function will be updated.
5670 
5671    If non-NULL ARGS_TO_SKIP determine function parameters to remove
5672    from new version.
5673    If SKIP_RETURN is true, the new version will return void.
5674    If non-NULL BLOCK_TO_COPY determine what basic blocks to copy.
5675    If non_NULL NEW_ENTRY determine new entry BB of the clone.
5676 */
5677 void
tree_function_versioning(tree old_decl,tree new_decl,vec<ipa_replace_map *,va_gc> * tree_map,bool update_clones,bitmap args_to_skip,bool skip_return,bitmap blocks_to_copy,basic_block new_entry)5678 tree_function_versioning (tree old_decl, tree new_decl,
5679 			  vec<ipa_replace_map *, va_gc> *tree_map,
5680 			  bool update_clones, bitmap args_to_skip,
5681 			  bool skip_return, bitmap blocks_to_copy,
5682 			  basic_block new_entry)
5683 {
5684   struct cgraph_node *old_version_node;
5685   struct cgraph_node *new_version_node;
5686   copy_body_data id;
5687   tree p;
5688   unsigned i;
5689   struct ipa_replace_map *replace_info;
5690   basic_block old_entry_block, bb;
5691   auto_vec<gimple *, 10> init_stmts;
5692   tree vars = NULL_TREE;
5693   bitmap debug_args_to_skip = args_to_skip;
5694 
5695   gcc_assert (TREE_CODE (old_decl) == FUNCTION_DECL
5696 	      && TREE_CODE (new_decl) == FUNCTION_DECL);
5697   DECL_POSSIBLY_INLINED (old_decl) = 1;
5698 
5699   old_version_node = cgraph_node::get (old_decl);
5700   gcc_checking_assert (old_version_node);
5701   new_version_node = cgraph_node::get (new_decl);
5702   gcc_checking_assert (new_version_node);
5703 
5704   /* Copy over debug args.  */
5705   if (DECL_HAS_DEBUG_ARGS_P (old_decl))
5706     {
5707       vec<tree, va_gc> **new_debug_args, **old_debug_args;
5708       gcc_checking_assert (decl_debug_args_lookup (new_decl) == NULL);
5709       DECL_HAS_DEBUG_ARGS_P (new_decl) = 0;
5710       old_debug_args = decl_debug_args_lookup (old_decl);
5711       if (old_debug_args)
5712 	{
5713 	  new_debug_args = decl_debug_args_insert (new_decl);
5714 	  *new_debug_args = vec_safe_copy (*old_debug_args);
5715 	}
5716     }
5717 
5718   /* Output the inlining info for this abstract function, since it has been
5719      inlined.  If we don't do this now, we can lose the information about the
5720      variables in the function when the blocks get blown away as soon as we
5721      remove the cgraph node.  */
5722   (*debug_hooks->outlining_inline_function) (old_decl);
5723 
5724   DECL_ARTIFICIAL (new_decl) = 1;
5725   DECL_ABSTRACT_ORIGIN (new_decl) = DECL_ORIGIN (old_decl);
5726   if (DECL_ORIGIN (old_decl) == old_decl)
5727     old_version_node->used_as_abstract_origin = true;
5728   DECL_FUNCTION_PERSONALITY (new_decl) = DECL_FUNCTION_PERSONALITY (old_decl);
5729 
5730   /* Prepare the data structures for the tree copy.  */
5731   memset (&id, 0, sizeof (id));
5732 
5733   /* Generate a new name for the new version. */
5734   id.statements_to_fold = new hash_set<gimple *>;
5735 
5736   id.decl_map = new hash_map<tree, tree>;
5737   id.debug_map = NULL;
5738   id.src_fn = old_decl;
5739   id.dst_fn = new_decl;
5740   id.src_node = old_version_node;
5741   id.dst_node = new_version_node;
5742   id.src_cfun = DECL_STRUCT_FUNCTION (old_decl);
5743   id.blocks_to_copy = blocks_to_copy;
5744 
5745   id.copy_decl = copy_decl_no_change;
5746   id.transform_call_graph_edges
5747     = update_clones ? CB_CGE_MOVE_CLONES : CB_CGE_MOVE;
5748   id.transform_new_cfg = true;
5749   id.transform_return_to_modify = false;
5750   id.transform_parameter = false;
5751   id.transform_lang_insert_block = NULL;
5752 
5753   old_entry_block = ENTRY_BLOCK_PTR_FOR_FN
5754     (DECL_STRUCT_FUNCTION (old_decl));
5755   DECL_RESULT (new_decl) = DECL_RESULT (old_decl);
5756   DECL_ARGUMENTS (new_decl) = DECL_ARGUMENTS (old_decl);
5757   initialize_cfun (new_decl, old_decl,
5758 		   old_entry_block->count);
5759   if (DECL_STRUCT_FUNCTION (new_decl)->gimple_df)
5760     DECL_STRUCT_FUNCTION (new_decl)->gimple_df->ipa_pta
5761       = id.src_cfun->gimple_df->ipa_pta;
5762 
5763   /* Copy the function's static chain.  */
5764   p = DECL_STRUCT_FUNCTION (old_decl)->static_chain_decl;
5765   if (p)
5766     DECL_STRUCT_FUNCTION (new_decl)->static_chain_decl
5767       = copy_static_chain (p, &id);
5768 
5769   /* If there's a tree_map, prepare for substitution.  */
5770   if (tree_map)
5771     for (i = 0; i < tree_map->length (); i++)
5772       {
5773 	gimple *init;
5774 	replace_info = (*tree_map)[i];
5775 	if (replace_info->replace_p)
5776 	  {
5777 	    int parm_num = -1;
5778 	    if (!replace_info->old_tree)
5779 	      {
5780 		int p = replace_info->parm_num;
5781 		tree parm;
5782 		tree req_type, new_type;
5783 
5784 		for (parm = DECL_ARGUMENTS (old_decl); p;
5785 		     parm = DECL_CHAIN (parm))
5786 		  p--;
5787 		replace_info->old_tree = parm;
5788 		parm_num = replace_info->parm_num;
5789 		req_type = TREE_TYPE (parm);
5790 		new_type = TREE_TYPE (replace_info->new_tree);
5791 		if (!useless_type_conversion_p (req_type, new_type))
5792 		  {
5793 		    if (fold_convertible_p (req_type, replace_info->new_tree))
5794 		      replace_info->new_tree
5795 			= fold_build1 (NOP_EXPR, req_type,
5796 				       replace_info->new_tree);
5797 		    else if (TYPE_SIZE (req_type) == TYPE_SIZE (new_type))
5798 		      replace_info->new_tree
5799 			= fold_build1 (VIEW_CONVERT_EXPR, req_type,
5800 				       replace_info->new_tree);
5801 		    else
5802 		      {
5803 			if (dump_file)
5804 			  {
5805 			    fprintf (dump_file, "    const ");
5806 			    print_generic_expr (dump_file,
5807 						replace_info->new_tree, 0);
5808 			    fprintf (dump_file,
5809 				     "  can't be converted to param ");
5810 			    print_generic_expr (dump_file, parm, 0);
5811 			    fprintf (dump_file, "\n");
5812 			  }
5813 			replace_info->old_tree = NULL;
5814 		      }
5815 		  }
5816 	      }
5817 	    else
5818 	      gcc_assert (TREE_CODE (replace_info->old_tree) == PARM_DECL);
5819 	    if (replace_info->old_tree)
5820 	      {
5821 		init = setup_one_parameter (&id, replace_info->old_tree,
5822 					    replace_info->new_tree, id.src_fn,
5823 					    NULL,
5824 					    &vars);
5825 		if (init)
5826 		  init_stmts.safe_push (init);
5827 		if (MAY_HAVE_DEBUG_STMTS && args_to_skip)
5828 		  {
5829 		    if (parm_num == -1)
5830 		      {
5831 			tree parm;
5832 			int p;
5833 			for (parm = DECL_ARGUMENTS (old_decl), p = 0; parm;
5834 			     parm = DECL_CHAIN (parm), p++)
5835 			  if (parm == replace_info->old_tree)
5836 			    {
5837 			      parm_num = p;
5838 			      break;
5839 			    }
5840 		      }
5841 		    if (parm_num != -1)
5842 		      {
5843 			if (debug_args_to_skip == args_to_skip)
5844 			  {
5845 			    debug_args_to_skip = BITMAP_ALLOC (NULL);
5846 			    bitmap_copy (debug_args_to_skip, args_to_skip);
5847 			  }
5848 			bitmap_clear_bit (debug_args_to_skip, parm_num);
5849 		      }
5850 		  }
5851 	      }
5852 	  }
5853       }
5854   /* Copy the function's arguments.  */
5855   if (DECL_ARGUMENTS (old_decl) != NULL_TREE)
5856     DECL_ARGUMENTS (new_decl)
5857       = copy_arguments_for_versioning (DECL_ARGUMENTS (old_decl), &id,
5858 				       args_to_skip, &vars);
5859 
5860   DECL_INITIAL (new_decl) = remap_blocks (DECL_INITIAL (id.src_fn), &id);
5861   BLOCK_SUPERCONTEXT (DECL_INITIAL (new_decl)) = new_decl;
5862 
5863   declare_inline_vars (DECL_INITIAL (new_decl), vars);
5864 
5865   if (!vec_safe_is_empty (DECL_STRUCT_FUNCTION (old_decl)->local_decls))
5866     /* Add local vars.  */
5867     add_local_variables (DECL_STRUCT_FUNCTION (old_decl), cfun, &id);
5868 
5869   if (DECL_RESULT (old_decl) == NULL_TREE)
5870     ;
5871   else if (skip_return && !VOID_TYPE_P (TREE_TYPE (DECL_RESULT (old_decl))))
5872     {
5873       DECL_RESULT (new_decl)
5874 	= build_decl (DECL_SOURCE_LOCATION (DECL_RESULT (old_decl)),
5875 		      RESULT_DECL, NULL_TREE, void_type_node);
5876       DECL_CONTEXT (DECL_RESULT (new_decl)) = new_decl;
5877       cfun->returns_struct = 0;
5878       cfun->returns_pcc_struct = 0;
5879     }
5880   else
5881     {
5882       tree old_name;
5883       DECL_RESULT (new_decl) = remap_decl (DECL_RESULT (old_decl), &id);
5884       lang_hooks.dup_lang_specific_decl (DECL_RESULT (new_decl));
5885       if (gimple_in_ssa_p (id.src_cfun)
5886 	  && DECL_BY_REFERENCE (DECL_RESULT (old_decl))
5887 	  && (old_name = ssa_default_def (id.src_cfun, DECL_RESULT (old_decl))))
5888 	{
5889 	  tree new_name = make_ssa_name (DECL_RESULT (new_decl));
5890 	  insert_decl_map (&id, old_name, new_name);
5891 	  SSA_NAME_DEF_STMT (new_name) = gimple_build_nop ();
5892 	  set_ssa_default_def (cfun, DECL_RESULT (new_decl), new_name);
5893 	}
5894     }
5895 
5896   /* Set up the destination functions loop tree.  */
5897   if (loops_for_fn (DECL_STRUCT_FUNCTION (old_decl)) != NULL)
5898     {
5899       cfun->curr_properties &= ~PROP_loops;
5900       loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
5901       cfun->curr_properties |= PROP_loops;
5902     }
5903 
5904   /* Copy the Function's body.  */
5905   copy_body (&id, old_entry_block->count, REG_BR_PROB_BASE,
5906 	     ENTRY_BLOCK_PTR_FOR_FN (cfun), EXIT_BLOCK_PTR_FOR_FN (cfun),
5907 	     new_entry);
5908 
5909   /* Renumber the lexical scoping (non-code) blocks consecutively.  */
5910   number_blocks (new_decl);
5911 
5912   /* We want to create the BB unconditionally, so that the addition of
5913      debug stmts doesn't affect BB count, which may in the end cause
5914      codegen differences.  */
5915   bb = split_edge (single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
5916   while (init_stmts.length ())
5917     insert_init_stmt (&id, bb, init_stmts.pop ());
5918   update_clone_info (&id);
5919 
5920   /* Remap the nonlocal_goto_save_area, if any.  */
5921   if (cfun->nonlocal_goto_save_area)
5922     {
5923       struct walk_stmt_info wi;
5924 
5925       memset (&wi, 0, sizeof (wi));
5926       wi.info = &id;
5927       walk_tree (&cfun->nonlocal_goto_save_area, remap_gimple_op_r, &wi, NULL);
5928     }
5929 
5930   /* Clean up.  */
5931   delete id.decl_map;
5932   if (id.debug_map)
5933     delete id.debug_map;
5934   free_dominance_info (CDI_DOMINATORS);
5935   free_dominance_info (CDI_POST_DOMINATORS);
5936 
5937   fold_marked_statements (0, id.statements_to_fold);
5938   delete id.statements_to_fold;
5939   delete_unreachable_blocks_update_callgraph (&id);
5940   if (id.dst_node->definition)
5941     cgraph_edge::rebuild_references ();
5942   if (loops_state_satisfies_p (LOOPS_NEED_FIXUP))
5943     {
5944       calculate_dominance_info (CDI_DOMINATORS);
5945       fix_loop_structure (NULL);
5946     }
5947   update_ssa (TODO_update_ssa);
5948 
5949   /* After partial cloning we need to rescale frequencies, so they are
5950      within proper range in the cloned function.  */
5951   if (new_entry)
5952     {
5953       struct cgraph_edge *e;
5954       rebuild_frequencies ();
5955 
5956       new_version_node->count = ENTRY_BLOCK_PTR_FOR_FN (cfun)->count;
5957       for (e = new_version_node->callees; e; e = e->next_callee)
5958 	{
5959 	  basic_block bb = gimple_bb (e->call_stmt);
5960 	  e->frequency = compute_call_stmt_bb_frequency (current_function_decl,
5961 							 bb);
5962 	  e->count = bb->count;
5963 	}
5964       for (e = new_version_node->indirect_calls; e; e = e->next_callee)
5965 	{
5966 	  basic_block bb = gimple_bb (e->call_stmt);
5967 	  e->frequency = compute_call_stmt_bb_frequency (current_function_decl,
5968 							 bb);
5969 	  e->count = bb->count;
5970 	}
5971     }
5972 
5973   if (debug_args_to_skip && MAY_HAVE_DEBUG_STMTS)
5974     {
5975       tree parm;
5976       vec<tree, va_gc> **debug_args = NULL;
5977       unsigned int len = 0;
5978       for (parm = DECL_ARGUMENTS (old_decl), i = 0;
5979 	   parm; parm = DECL_CHAIN (parm), i++)
5980 	if (bitmap_bit_p (debug_args_to_skip, i) && is_gimple_reg (parm))
5981 	  {
5982 	    tree ddecl;
5983 
5984 	    if (debug_args == NULL)
5985 	      {
5986 		debug_args = decl_debug_args_insert (new_decl);
5987 		len = vec_safe_length (*debug_args);
5988 	      }
5989 	    ddecl = make_node (DEBUG_EXPR_DECL);
5990 	    DECL_ARTIFICIAL (ddecl) = 1;
5991 	    TREE_TYPE (ddecl) = TREE_TYPE (parm);
5992 	    DECL_MODE (ddecl) = DECL_MODE (parm);
5993 	    vec_safe_push (*debug_args, DECL_ORIGIN (parm));
5994 	    vec_safe_push (*debug_args, ddecl);
5995 	  }
5996       if (debug_args != NULL)
5997 	{
5998 	  /* On the callee side, add
5999 	     DEBUG D#Y s=> parm
6000 	     DEBUG var => D#Y
6001 	     stmts to the first bb where var is a VAR_DECL created for the
6002 	     optimized away parameter in DECL_INITIAL block.  This hints
6003 	     in the debug info that var (whole DECL_ORIGIN is the parm
6004 	     PARM_DECL) is optimized away, but could be looked up at the
6005 	     call site as value of D#X there.  */
6006 	  tree var = vars, vexpr;
6007 	  gimple_stmt_iterator cgsi
6008 	    = gsi_after_labels (single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
6009 	  gimple *def_temp;
6010 	  var = vars;
6011 	  i = vec_safe_length (*debug_args);
6012 	  do
6013 	    {
6014 	      i -= 2;
6015 	      while (var != NULL_TREE
6016 		     && DECL_ABSTRACT_ORIGIN (var) != (**debug_args)[i])
6017 		var = TREE_CHAIN (var);
6018 	      if (var == NULL_TREE)
6019 		break;
6020 	      vexpr = make_node (DEBUG_EXPR_DECL);
6021 	      parm = (**debug_args)[i];
6022 	      DECL_ARTIFICIAL (vexpr) = 1;
6023 	      TREE_TYPE (vexpr) = TREE_TYPE (parm);
6024 	      DECL_MODE (vexpr) = DECL_MODE (parm);
6025 	      def_temp = gimple_build_debug_bind (var, vexpr, NULL);
6026 	      gsi_insert_before (&cgsi, def_temp, GSI_NEW_STMT);
6027 	      def_temp = gimple_build_debug_source_bind (vexpr, parm, NULL);
6028 	      gsi_insert_before (&cgsi, def_temp, GSI_NEW_STMT);
6029 	    }
6030 	  while (i > len);
6031 	}
6032     }
6033 
6034   if (debug_args_to_skip && debug_args_to_skip != args_to_skip)
6035     BITMAP_FREE (debug_args_to_skip);
6036   free_dominance_info (CDI_DOMINATORS);
6037   free_dominance_info (CDI_POST_DOMINATORS);
6038 
6039   gcc_assert (!id.debug_stmts.exists ());
6040   pop_cfun ();
6041   return;
6042 }
6043 
6044 /* EXP is CALL_EXPR present in a GENERIC expression tree.  Try to integrate
6045    the callee and return the inlined body on success.  */
6046 
6047 tree
maybe_inline_call_in_expr(tree exp)6048 maybe_inline_call_in_expr (tree exp)
6049 {
6050   tree fn = get_callee_fndecl (exp);
6051 
6052   /* We can only try to inline "const" functions.  */
6053   if (fn && TREE_READONLY (fn) && DECL_SAVED_TREE (fn))
6054     {
6055       call_expr_arg_iterator iter;
6056       copy_body_data id;
6057       tree param, arg, t;
6058       hash_map<tree, tree> decl_map;
6059 
6060       /* Remap the parameters.  */
6061       for (param = DECL_ARGUMENTS (fn), arg = first_call_expr_arg (exp, &iter);
6062 	   param;
6063 	   param = DECL_CHAIN (param), arg = next_call_expr_arg (&iter))
6064 	decl_map.put (param, arg);
6065 
6066       memset (&id, 0, sizeof (id));
6067       id.src_fn = fn;
6068       id.dst_fn = current_function_decl;
6069       id.src_cfun = DECL_STRUCT_FUNCTION (fn);
6070       id.decl_map = &decl_map;
6071 
6072       id.copy_decl = copy_decl_no_change;
6073       id.transform_call_graph_edges = CB_CGE_DUPLICATE;
6074       id.transform_new_cfg = false;
6075       id.transform_return_to_modify = true;
6076       id.transform_parameter = true;
6077       id.transform_lang_insert_block = NULL;
6078 
6079       /* Make sure not to unshare trees behind the front-end's back
6080 	 since front-end specific mechanisms may rely on sharing.  */
6081       id.regimplify = false;
6082       id.do_not_unshare = true;
6083 
6084       /* We're not inside any EH region.  */
6085       id.eh_lp_nr = 0;
6086 
6087       t = copy_tree_body (&id);
6088 
6089       /* We can only return something suitable for use in a GENERIC
6090 	 expression tree.  */
6091       if (TREE_CODE (t) == MODIFY_EXPR)
6092 	return TREE_OPERAND (t, 1);
6093     }
6094 
6095    return NULL_TREE;
6096 }
6097 
6098 /* Duplicate a type, fields and all.  */
6099 
6100 tree
build_duplicate_type(tree type)6101 build_duplicate_type (tree type)
6102 {
6103   struct copy_body_data id;
6104 
6105   memset (&id, 0, sizeof (id));
6106   id.src_fn = current_function_decl;
6107   id.dst_fn = current_function_decl;
6108   id.src_cfun = cfun;
6109   id.decl_map = new hash_map<tree, tree>;
6110   id.debug_map = NULL;
6111   id.copy_decl = copy_decl_no_change;
6112 
6113   type = remap_type_1 (type, &id);
6114 
6115   delete id.decl_map;
6116   if (id.debug_map)
6117     delete id.debug_map;
6118 
6119   TYPE_CANONICAL (type) = type;
6120 
6121   return type;
6122 }
6123 
6124 /* Unshare the entire DECL_SAVED_TREE of FN and return the remapped
6125    parameters and RESULT_DECL in PARMS and RESULT.  Used by C++ constexpr
6126    evaluation.  */
6127 
6128 tree
copy_fn(tree fn,tree & parms,tree & result)6129 copy_fn (tree fn, tree& parms, tree& result)
6130 {
6131   copy_body_data id;
6132   tree param;
6133   hash_map<tree, tree> decl_map;
6134 
6135   tree *p = &parms;
6136   *p = NULL_TREE;
6137 
6138   memset (&id, 0, sizeof (id));
6139   id.src_fn = fn;
6140   id.dst_fn = current_function_decl;
6141   id.src_cfun = DECL_STRUCT_FUNCTION (fn);
6142   id.decl_map = &decl_map;
6143 
6144   id.copy_decl = copy_decl_no_change;
6145   id.transform_call_graph_edges = CB_CGE_DUPLICATE;
6146   id.transform_new_cfg = false;
6147   id.transform_return_to_modify = false;
6148   id.transform_parameter = true;
6149   id.transform_lang_insert_block = NULL;
6150 
6151   /* Make sure not to unshare trees behind the front-end's back
6152      since front-end specific mechanisms may rely on sharing.  */
6153   id.regimplify = false;
6154   id.do_not_unshare = true;
6155 
6156   /* We're not inside any EH region.  */
6157   id.eh_lp_nr = 0;
6158 
6159   /* Remap the parameters and result and return them to the caller.  */
6160   for (param = DECL_ARGUMENTS (fn);
6161        param;
6162        param = DECL_CHAIN (param))
6163     {
6164       *p = remap_decl (param, &id);
6165       p = &DECL_CHAIN (*p);
6166     }
6167 
6168   if (DECL_RESULT (fn))
6169     result = remap_decl (DECL_RESULT (fn), &id);
6170   else
6171     result = NULL_TREE;
6172 
6173   return copy_tree_body (&id);
6174 }
6175