1 /* Tree inlining.
2 Copyright (C) 2001-2018 Free Software Foundation, Inc.
3 Contributed by Alexandre Oliva <aoliva@redhat.com>
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "target.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "gimple.h"
29 #include "cfghooks.h"
30 #include "tree-pass.h"
31 #include "ssa.h"
32 #include "cgraph.h"
33 #include "tree-pretty-print.h"
34 #include "diagnostic-core.h"
35 #include "gimple-predict.h"
36 #include "fold-const.h"
37 #include "stor-layout.h"
38 #include "calls.h"
39 #include "tree-inline.h"
40 #include "langhooks.h"
41 #include "cfganal.h"
42 #include "tree-iterator.h"
43 #include "intl.h"
44 #include "gimple-fold.h"
45 #include "tree-eh.h"
46 #include "gimplify.h"
47 #include "gimple-iterator.h"
48 #include "gimplify-me.h"
49 #include "gimple-walk.h"
50 #include "tree-cfg.h"
51 #include "tree-into-ssa.h"
52 #include "tree-dfa.h"
53 #include "tree-ssa.h"
54 #include "except.h"
55 #include "debug.h"
56 #include "params.h"
57 #include "value-prof.h"
58 #include "cfgloop.h"
59 #include "builtins.h"
60 #include "tree-chkp.h"
61 #include "stringpool.h"
62 #include "attribs.h"
63 #include "sreal.h"
64
65 /* I'm not real happy about this, but we need to handle gimple and
66 non-gimple trees. */
67
68 /* Inlining, Cloning, Versioning, Parallelization
69
70 Inlining: a function body is duplicated, but the PARM_DECLs are
71 remapped into VAR_DECLs, and non-void RETURN_EXPRs become
72 MODIFY_EXPRs that store to a dedicated returned-value variable.
73 The duplicated eh_region info of the copy will later be appended
74 to the info for the caller; the eh_region info in copied throwing
75 statements and RESX statements are adjusted accordingly.
76
77 Cloning: (only in C++) We have one body for a con/de/structor, and
78 multiple function decls, each with a unique parameter list.
79 Duplicate the body, using the given splay tree; some parameters
80 will become constants (like 0 or 1).
81
82 Versioning: a function body is duplicated and the result is a new
83 function rather than into blocks of an existing function as with
84 inlining. Some parameters will become constants.
85
86 Parallelization: a region of a function is duplicated resulting in
87 a new function. Variables may be replaced with complex expressions
88 to enable shared variable semantics.
89
90 All of these will simultaneously lookup any callgraph edges. If
91 we're going to inline the duplicated function body, and the given
92 function has some cloned callgraph nodes (one for each place this
93 function will be inlined) those callgraph edges will be duplicated.
94 If we're cloning the body, those callgraph edges will be
95 updated to point into the new body. (Note that the original
96 callgraph node and edge list will not be altered.)
97
98 See the CALL_EXPR handling case in copy_tree_body_r (). */
99
100 /* To Do:
101
102 o In order to make inlining-on-trees work, we pessimized
103 function-local static constants. In particular, they are now
104 always output, even when not addressed. Fix this by treating
105 function-local static constants just like global static
106 constants; the back-end already knows not to output them if they
107 are not needed.
108
109 o Provide heuristics to clamp inlining of recursive template
110 calls? */
111
112
113 /* Weights that estimate_num_insns uses to estimate the size of the
114 produced code. */
115
116 eni_weights eni_size_weights;
117
118 /* Weights that estimate_num_insns uses to estimate the time necessary
119 to execute the produced code. */
120
121 eni_weights eni_time_weights;
122
123 /* Prototypes. */
124
125 static tree declare_return_variable (copy_body_data *, tree, tree, tree,
126 basic_block);
127 static void remap_block (tree *, copy_body_data *);
128 static void copy_bind_expr (tree *, int *, copy_body_data *);
129 static void declare_inline_vars (tree, tree);
130 static void remap_save_expr (tree *, hash_map<tree, tree> *, int *);
131 static void prepend_lexical_block (tree current_block, tree new_block);
132 static tree copy_decl_to_var (tree, copy_body_data *);
133 static tree copy_result_decl_to_var (tree, copy_body_data *);
134 static tree copy_decl_maybe_to_var (tree, copy_body_data *);
135 static gimple_seq remap_gimple_stmt (gimple *, copy_body_data *);
136 static bool delete_unreachable_blocks_update_callgraph (copy_body_data *id);
137 static void insert_init_stmt (copy_body_data *, basic_block, gimple *);
138
139 /* Insert a tree->tree mapping for ID. Despite the name suggests
140 that the trees should be variables, it is used for more than that. */
141
142 void
insert_decl_map(copy_body_data * id,tree key,tree value)143 insert_decl_map (copy_body_data *id, tree key, tree value)
144 {
145 id->decl_map->put (key, value);
146
147 /* Always insert an identity map as well. If we see this same new
148 node again, we won't want to duplicate it a second time. */
149 if (key != value)
150 id->decl_map->put (value, value);
151 }
152
153 /* Insert a tree->tree mapping for ID. This is only used for
154 variables. */
155
156 static void
insert_debug_decl_map(copy_body_data * id,tree key,tree value)157 insert_debug_decl_map (copy_body_data *id, tree key, tree value)
158 {
159 if (!gimple_in_ssa_p (id->src_cfun))
160 return;
161
162 if (!opt_for_fn (id->dst_fn, flag_var_tracking_assignments))
163 return;
164
165 if (!target_for_debug_bind (key))
166 return;
167
168 gcc_assert (TREE_CODE (key) == PARM_DECL);
169 gcc_assert (VAR_P (value));
170
171 if (!id->debug_map)
172 id->debug_map = new hash_map<tree, tree>;
173
174 id->debug_map->put (key, value);
175 }
176
177 /* If nonzero, we're remapping the contents of inlined debug
178 statements. If negative, an error has occurred, such as a
179 reference to a variable that isn't available in the inlined
180 context. */
181 static int processing_debug_stmt = 0;
182
183 /* Construct new SSA name for old NAME. ID is the inline context. */
184
185 static tree
remap_ssa_name(tree name,copy_body_data * id)186 remap_ssa_name (tree name, copy_body_data *id)
187 {
188 tree new_tree, var;
189 tree *n;
190
191 gcc_assert (TREE_CODE (name) == SSA_NAME);
192
193 n = id->decl_map->get (name);
194 if (n)
195 return unshare_expr (*n);
196
197 if (processing_debug_stmt)
198 {
199 if (SSA_NAME_IS_DEFAULT_DEF (name)
200 && TREE_CODE (SSA_NAME_VAR (name)) == PARM_DECL
201 && id->entry_bb == NULL
202 && single_succ_p (ENTRY_BLOCK_PTR_FOR_FN (cfun)))
203 {
204 tree vexpr = make_node (DEBUG_EXPR_DECL);
205 gimple *def_temp;
206 gimple_stmt_iterator gsi;
207 tree val = SSA_NAME_VAR (name);
208
209 n = id->decl_map->get (val);
210 if (n != NULL)
211 val = *n;
212 if (TREE_CODE (val) != PARM_DECL)
213 {
214 processing_debug_stmt = -1;
215 return name;
216 }
217 def_temp = gimple_build_debug_source_bind (vexpr, val, NULL);
218 DECL_ARTIFICIAL (vexpr) = 1;
219 TREE_TYPE (vexpr) = TREE_TYPE (name);
220 SET_DECL_MODE (vexpr, DECL_MODE (SSA_NAME_VAR (name)));
221 gsi = gsi_after_labels (single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
222 gsi_insert_before (&gsi, def_temp, GSI_SAME_STMT);
223 return vexpr;
224 }
225
226 processing_debug_stmt = -1;
227 return name;
228 }
229
230 /* Remap anonymous SSA names or SSA names of anonymous decls. */
231 var = SSA_NAME_VAR (name);
232 if (!var
233 || (!SSA_NAME_IS_DEFAULT_DEF (name)
234 && VAR_P (var)
235 && !VAR_DECL_IS_VIRTUAL_OPERAND (var)
236 && DECL_ARTIFICIAL (var)
237 && DECL_IGNORED_P (var)
238 && !DECL_NAME (var)))
239 {
240 struct ptr_info_def *pi;
241 new_tree = make_ssa_name (remap_type (TREE_TYPE (name), id));
242 if (!var && SSA_NAME_IDENTIFIER (name))
243 SET_SSA_NAME_VAR_OR_IDENTIFIER (new_tree, SSA_NAME_IDENTIFIER (name));
244 insert_decl_map (id, name, new_tree);
245 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (new_tree)
246 = SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name);
247 /* At least IPA points-to info can be directly transferred. */
248 if (id->src_cfun->gimple_df
249 && id->src_cfun->gimple_df->ipa_pta
250 && POINTER_TYPE_P (TREE_TYPE (name))
251 && (pi = SSA_NAME_PTR_INFO (name))
252 && !pi->pt.anything)
253 {
254 struct ptr_info_def *new_pi = get_ptr_info (new_tree);
255 new_pi->pt = pi->pt;
256 }
257 return new_tree;
258 }
259
260 /* Do not set DEF_STMT yet as statement is not copied yet. We do that
261 in copy_bb. */
262 new_tree = remap_decl (var, id);
263
264 /* We might've substituted constant or another SSA_NAME for
265 the variable.
266
267 Replace the SSA name representing RESULT_DECL by variable during
268 inlining: this saves us from need to introduce PHI node in a case
269 return value is just partly initialized. */
270 if ((VAR_P (new_tree) || TREE_CODE (new_tree) == PARM_DECL)
271 && (!SSA_NAME_VAR (name)
272 || TREE_CODE (SSA_NAME_VAR (name)) != RESULT_DECL
273 || !id->transform_return_to_modify))
274 {
275 struct ptr_info_def *pi;
276 new_tree = make_ssa_name (new_tree);
277 insert_decl_map (id, name, new_tree);
278 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (new_tree)
279 = SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name);
280 /* At least IPA points-to info can be directly transferred. */
281 if (id->src_cfun->gimple_df
282 && id->src_cfun->gimple_df->ipa_pta
283 && POINTER_TYPE_P (TREE_TYPE (name))
284 && (pi = SSA_NAME_PTR_INFO (name))
285 && !pi->pt.anything)
286 {
287 struct ptr_info_def *new_pi = get_ptr_info (new_tree);
288 new_pi->pt = pi->pt;
289 }
290 if (SSA_NAME_IS_DEFAULT_DEF (name))
291 {
292 /* By inlining function having uninitialized variable, we might
293 extend the lifetime (variable might get reused). This cause
294 ICE in the case we end up extending lifetime of SSA name across
295 abnormal edge, but also increase register pressure.
296
297 We simply initialize all uninitialized vars by 0 except
298 for case we are inlining to very first BB. We can avoid
299 this for all BBs that are not inside strongly connected
300 regions of the CFG, but this is expensive to test. */
301 if (id->entry_bb
302 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name)
303 && (!SSA_NAME_VAR (name)
304 || TREE_CODE (SSA_NAME_VAR (name)) != PARM_DECL)
305 && (id->entry_bb != EDGE_SUCC (ENTRY_BLOCK_PTR_FOR_FN (cfun),
306 0)->dest
307 || EDGE_COUNT (id->entry_bb->preds) != 1))
308 {
309 gimple_stmt_iterator gsi = gsi_last_bb (id->entry_bb);
310 gimple *init_stmt;
311 tree zero = build_zero_cst (TREE_TYPE (new_tree));
312
313 init_stmt = gimple_build_assign (new_tree, zero);
314 gsi_insert_after (&gsi, init_stmt, GSI_NEW_STMT);
315 SSA_NAME_IS_DEFAULT_DEF (new_tree) = 0;
316 }
317 else
318 {
319 SSA_NAME_DEF_STMT (new_tree) = gimple_build_nop ();
320 set_ssa_default_def (cfun, SSA_NAME_VAR (new_tree), new_tree);
321 }
322 }
323 }
324 else
325 insert_decl_map (id, name, new_tree);
326 return new_tree;
327 }
328
329 /* Remap DECL during the copying of the BLOCK tree for the function. */
330
331 tree
remap_decl(tree decl,copy_body_data * id)332 remap_decl (tree decl, copy_body_data *id)
333 {
334 tree *n;
335
336 /* We only remap local variables in the current function. */
337
338 /* See if we have remapped this declaration. */
339
340 n = id->decl_map->get (decl);
341
342 if (!n && processing_debug_stmt)
343 {
344 processing_debug_stmt = -1;
345 return decl;
346 }
347
348 /* When remapping a type within copy_gimple_seq_and_replace_locals, all
349 necessary DECLs have already been remapped and we do not want to duplicate
350 a decl coming from outside of the sequence we are copying. */
351 if (!n
352 && id->prevent_decl_creation_for_types
353 && id->remapping_type_depth > 0
354 && (VAR_P (decl) || TREE_CODE (decl) == PARM_DECL))
355 return decl;
356
357 /* If we didn't already have an equivalent for this declaration, create one
358 now. */
359 if (!n)
360 {
361 /* Make a copy of the variable or label. */
362 tree t = id->copy_decl (decl, id);
363
364 /* Remember it, so that if we encounter this local entity again
365 we can reuse this copy. Do this early because remap_type may
366 need this decl for TYPE_STUB_DECL. */
367 insert_decl_map (id, decl, t);
368
369 if (!DECL_P (t))
370 return t;
371
372 /* Remap types, if necessary. */
373 TREE_TYPE (t) = remap_type (TREE_TYPE (t), id);
374 if (TREE_CODE (t) == TYPE_DECL)
375 {
376 DECL_ORIGINAL_TYPE (t) = remap_type (DECL_ORIGINAL_TYPE (t), id);
377
378 /* Preserve the invariant that DECL_ORIGINAL_TYPE != TREE_TYPE,
379 which is enforced in gen_typedef_die when DECL_ABSTRACT_ORIGIN
380 is not set on the TYPE_DECL, for example in LTO mode. */
381 if (DECL_ORIGINAL_TYPE (t) == TREE_TYPE (t))
382 {
383 tree x = build_variant_type_copy (TREE_TYPE (t));
384 TYPE_STUB_DECL (x) = TYPE_STUB_DECL (TREE_TYPE (t));
385 TYPE_NAME (x) = TYPE_NAME (TREE_TYPE (t));
386 DECL_ORIGINAL_TYPE (t) = x;
387 }
388 }
389
390 /* Remap sizes as necessary. */
391 walk_tree (&DECL_SIZE (t), copy_tree_body_r, id, NULL);
392 walk_tree (&DECL_SIZE_UNIT (t), copy_tree_body_r, id, NULL);
393
394 /* If fields, do likewise for offset and qualifier. */
395 if (TREE_CODE (t) == FIELD_DECL)
396 {
397 walk_tree (&DECL_FIELD_OFFSET (t), copy_tree_body_r, id, NULL);
398 if (TREE_CODE (DECL_CONTEXT (t)) == QUAL_UNION_TYPE)
399 walk_tree (&DECL_QUALIFIER (t), copy_tree_body_r, id, NULL);
400 }
401
402 return t;
403 }
404
405 if (id->do_not_unshare)
406 return *n;
407 else
408 return unshare_expr (*n);
409 }
410
411 static tree
remap_type_1(tree type,copy_body_data * id)412 remap_type_1 (tree type, copy_body_data *id)
413 {
414 tree new_tree, t;
415
416 /* We do need a copy. build and register it now. If this is a pointer or
417 reference type, remap the designated type and make a new pointer or
418 reference type. */
419 if (TREE_CODE (type) == POINTER_TYPE)
420 {
421 new_tree = build_pointer_type_for_mode (remap_type (TREE_TYPE (type), id),
422 TYPE_MODE (type),
423 TYPE_REF_CAN_ALIAS_ALL (type));
424 if (TYPE_ATTRIBUTES (type) || TYPE_QUALS (type))
425 new_tree = build_type_attribute_qual_variant (new_tree,
426 TYPE_ATTRIBUTES (type),
427 TYPE_QUALS (type));
428 insert_decl_map (id, type, new_tree);
429 return new_tree;
430 }
431 else if (TREE_CODE (type) == REFERENCE_TYPE)
432 {
433 new_tree = build_reference_type_for_mode (remap_type (TREE_TYPE (type), id),
434 TYPE_MODE (type),
435 TYPE_REF_CAN_ALIAS_ALL (type));
436 if (TYPE_ATTRIBUTES (type) || TYPE_QUALS (type))
437 new_tree = build_type_attribute_qual_variant (new_tree,
438 TYPE_ATTRIBUTES (type),
439 TYPE_QUALS (type));
440 insert_decl_map (id, type, new_tree);
441 return new_tree;
442 }
443 else
444 new_tree = copy_node (type);
445
446 insert_decl_map (id, type, new_tree);
447
448 /* This is a new type, not a copy of an old type. Need to reassociate
449 variants. We can handle everything except the main variant lazily. */
450 t = TYPE_MAIN_VARIANT (type);
451 if (type != t)
452 {
453 t = remap_type (t, id);
454 TYPE_MAIN_VARIANT (new_tree) = t;
455 TYPE_NEXT_VARIANT (new_tree) = TYPE_NEXT_VARIANT (t);
456 TYPE_NEXT_VARIANT (t) = new_tree;
457 }
458 else
459 {
460 TYPE_MAIN_VARIANT (new_tree) = new_tree;
461 TYPE_NEXT_VARIANT (new_tree) = NULL;
462 }
463
464 if (TYPE_STUB_DECL (type))
465 TYPE_STUB_DECL (new_tree) = remap_decl (TYPE_STUB_DECL (type), id);
466
467 /* Lazily create pointer and reference types. */
468 TYPE_POINTER_TO (new_tree) = NULL;
469 TYPE_REFERENCE_TO (new_tree) = NULL;
470
471 /* Copy all types that may contain references to local variables; be sure to
472 preserve sharing in between type and its main variant when possible. */
473 switch (TREE_CODE (new_tree))
474 {
475 case INTEGER_TYPE:
476 case REAL_TYPE:
477 case FIXED_POINT_TYPE:
478 case ENUMERAL_TYPE:
479 case BOOLEAN_TYPE:
480 if (TYPE_MAIN_VARIANT (new_tree) != new_tree)
481 {
482 gcc_checking_assert (TYPE_MIN_VALUE (type) == TYPE_MIN_VALUE (TYPE_MAIN_VARIANT (type)));
483 gcc_checking_assert (TYPE_MAX_VALUE (type) == TYPE_MAX_VALUE (TYPE_MAIN_VARIANT (type)));
484
485 TYPE_MIN_VALUE (new_tree) = TYPE_MIN_VALUE (TYPE_MAIN_VARIANT (new_tree));
486 TYPE_MAX_VALUE (new_tree) = TYPE_MAX_VALUE (TYPE_MAIN_VARIANT (new_tree));
487 }
488 else
489 {
490 t = TYPE_MIN_VALUE (new_tree);
491 if (t && TREE_CODE (t) != INTEGER_CST)
492 walk_tree (&TYPE_MIN_VALUE (new_tree), copy_tree_body_r, id, NULL);
493
494 t = TYPE_MAX_VALUE (new_tree);
495 if (t && TREE_CODE (t) != INTEGER_CST)
496 walk_tree (&TYPE_MAX_VALUE (new_tree), copy_tree_body_r, id, NULL);
497 }
498 return new_tree;
499
500 case FUNCTION_TYPE:
501 if (TYPE_MAIN_VARIANT (new_tree) != new_tree
502 && TREE_TYPE (type) == TREE_TYPE (TYPE_MAIN_VARIANT (type)))
503 TREE_TYPE (new_tree) = TREE_TYPE (TYPE_MAIN_VARIANT (new_tree));
504 else
505 TREE_TYPE (new_tree) = remap_type (TREE_TYPE (new_tree), id);
506 if (TYPE_MAIN_VARIANT (new_tree) != new_tree
507 && TYPE_ARG_TYPES (type) == TYPE_ARG_TYPES (TYPE_MAIN_VARIANT (type)))
508 TYPE_ARG_TYPES (new_tree) = TYPE_ARG_TYPES (TYPE_MAIN_VARIANT (new_tree));
509 else
510 walk_tree (&TYPE_ARG_TYPES (new_tree), copy_tree_body_r, id, NULL);
511 return new_tree;
512
513 case ARRAY_TYPE:
514 if (TYPE_MAIN_VARIANT (new_tree) != new_tree
515 && TREE_TYPE (type) == TREE_TYPE (TYPE_MAIN_VARIANT (type)))
516 TREE_TYPE (new_tree) = TREE_TYPE (TYPE_MAIN_VARIANT (new_tree));
517 else
518 TREE_TYPE (new_tree) = remap_type (TREE_TYPE (new_tree), id);
519
520 if (TYPE_MAIN_VARIANT (new_tree) != new_tree)
521 {
522 gcc_checking_assert (TYPE_DOMAIN (type) == TYPE_DOMAIN (TYPE_MAIN_VARIANT (type)));
523 TYPE_DOMAIN (new_tree) = TYPE_DOMAIN (TYPE_MAIN_VARIANT (new_tree));
524 }
525 else
526 TYPE_DOMAIN (new_tree) = remap_type (TYPE_DOMAIN (new_tree), id);
527 break;
528
529 case RECORD_TYPE:
530 case UNION_TYPE:
531 case QUAL_UNION_TYPE:
532 if (TYPE_MAIN_VARIANT (type) != type
533 && TYPE_FIELDS (type) == TYPE_FIELDS (TYPE_MAIN_VARIANT (type)))
534 TYPE_FIELDS (new_tree) = TYPE_FIELDS (TYPE_MAIN_VARIANT (new_tree));
535 else
536 {
537 tree f, nf = NULL;
538
539 for (f = TYPE_FIELDS (new_tree); f ; f = DECL_CHAIN (f))
540 {
541 t = remap_decl (f, id);
542 DECL_CONTEXT (t) = new_tree;
543 DECL_CHAIN (t) = nf;
544 nf = t;
545 }
546 TYPE_FIELDS (new_tree) = nreverse (nf);
547 }
548 break;
549
550 case OFFSET_TYPE:
551 default:
552 /* Shouldn't have been thought variable sized. */
553 gcc_unreachable ();
554 }
555
556 /* All variants of type share the same size, so use the already remaped data. */
557 if (TYPE_MAIN_VARIANT (new_tree) != new_tree)
558 {
559 tree s = TYPE_SIZE (type);
560 tree mvs = TYPE_SIZE (TYPE_MAIN_VARIANT (type));
561 tree su = TYPE_SIZE_UNIT (type);
562 tree mvsu = TYPE_SIZE_UNIT (TYPE_MAIN_VARIANT (type));
563 gcc_checking_assert ((TREE_CODE (s) == PLACEHOLDER_EXPR
564 && (TREE_CODE (mvs) == PLACEHOLDER_EXPR))
565 || s == mvs);
566 gcc_checking_assert ((TREE_CODE (su) == PLACEHOLDER_EXPR
567 && (TREE_CODE (mvsu) == PLACEHOLDER_EXPR))
568 || su == mvsu);
569 TYPE_SIZE (new_tree) = TYPE_SIZE (TYPE_MAIN_VARIANT (new_tree));
570 TYPE_SIZE_UNIT (new_tree) = TYPE_SIZE_UNIT (TYPE_MAIN_VARIANT (new_tree));
571 }
572 else
573 {
574 walk_tree (&TYPE_SIZE (new_tree), copy_tree_body_r, id, NULL);
575 walk_tree (&TYPE_SIZE_UNIT (new_tree), copy_tree_body_r, id, NULL);
576 }
577
578 return new_tree;
579 }
580
581 tree
remap_type(tree type,copy_body_data * id)582 remap_type (tree type, copy_body_data *id)
583 {
584 tree *node;
585 tree tmp;
586
587 if (type == NULL)
588 return type;
589
590 /* See if we have remapped this type. */
591 node = id->decl_map->get (type);
592 if (node)
593 return *node;
594
595 /* The type only needs remapping if it's variably modified. */
596 if (! variably_modified_type_p (type, id->src_fn))
597 {
598 insert_decl_map (id, type, type);
599 return type;
600 }
601
602 id->remapping_type_depth++;
603 tmp = remap_type_1 (type, id);
604 id->remapping_type_depth--;
605
606 return tmp;
607 }
608
609 /* Decide if DECL can be put into BLOCK_NONLOCAL_VARs. */
610
611 static bool
can_be_nonlocal(tree decl,copy_body_data * id)612 can_be_nonlocal (tree decl, copy_body_data *id)
613 {
614 /* We can not duplicate function decls. */
615 if (TREE_CODE (decl) == FUNCTION_DECL)
616 return true;
617
618 /* Local static vars must be non-local or we get multiple declaration
619 problems. */
620 if (VAR_P (decl) && !auto_var_in_fn_p (decl, id->src_fn))
621 return true;
622
623 return false;
624 }
625
626 static tree
remap_decls(tree decls,vec<tree,va_gc> ** nonlocalized_list,copy_body_data * id)627 remap_decls (tree decls, vec<tree, va_gc> **nonlocalized_list,
628 copy_body_data *id)
629 {
630 tree old_var;
631 tree new_decls = NULL_TREE;
632
633 /* Remap its variables. */
634 for (old_var = decls; old_var; old_var = DECL_CHAIN (old_var))
635 {
636 tree new_var;
637
638 if (can_be_nonlocal (old_var, id))
639 {
640 /* We need to add this variable to the local decls as otherwise
641 nothing else will do so. */
642 if (VAR_P (old_var) && ! DECL_EXTERNAL (old_var) && cfun)
643 add_local_decl (cfun, old_var);
644 if ((!optimize || debug_info_level > DINFO_LEVEL_TERSE)
645 && !DECL_IGNORED_P (old_var)
646 && nonlocalized_list)
647 vec_safe_push (*nonlocalized_list, old_var);
648 continue;
649 }
650
651 /* Remap the variable. */
652 new_var = remap_decl (old_var, id);
653
654 /* If we didn't remap this variable, we can't mess with its
655 TREE_CHAIN. If we remapped this variable to the return slot, it's
656 already declared somewhere else, so don't declare it here. */
657
658 if (new_var == id->retvar)
659 ;
660 else if (!new_var)
661 {
662 if ((!optimize || debug_info_level > DINFO_LEVEL_TERSE)
663 && !DECL_IGNORED_P (old_var)
664 && nonlocalized_list)
665 vec_safe_push (*nonlocalized_list, old_var);
666 }
667 else
668 {
669 gcc_assert (DECL_P (new_var));
670 DECL_CHAIN (new_var) = new_decls;
671 new_decls = new_var;
672
673 /* Also copy value-expressions. */
674 if (VAR_P (new_var) && DECL_HAS_VALUE_EXPR_P (new_var))
675 {
676 tree tem = DECL_VALUE_EXPR (new_var);
677 bool old_regimplify = id->regimplify;
678 id->remapping_type_depth++;
679 walk_tree (&tem, copy_tree_body_r, id, NULL);
680 id->remapping_type_depth--;
681 id->regimplify = old_regimplify;
682 SET_DECL_VALUE_EXPR (new_var, tem);
683 }
684 }
685 }
686
687 return nreverse (new_decls);
688 }
689
690 /* Copy the BLOCK to contain remapped versions of the variables
691 therein. And hook the new block into the block-tree. */
692
693 static void
remap_block(tree * block,copy_body_data * id)694 remap_block (tree *block, copy_body_data *id)
695 {
696 tree old_block;
697 tree new_block;
698
699 /* Make the new block. */
700 old_block = *block;
701 new_block = make_node (BLOCK);
702 TREE_USED (new_block) = TREE_USED (old_block);
703 BLOCK_ABSTRACT_ORIGIN (new_block) = old_block;
704 BLOCK_SOURCE_LOCATION (new_block) = BLOCK_SOURCE_LOCATION (old_block);
705 BLOCK_NONLOCALIZED_VARS (new_block)
706 = vec_safe_copy (BLOCK_NONLOCALIZED_VARS (old_block));
707 *block = new_block;
708
709 /* Remap its variables. */
710 BLOCK_VARS (new_block) = remap_decls (BLOCK_VARS (old_block),
711 &BLOCK_NONLOCALIZED_VARS (new_block),
712 id);
713
714 if (id->transform_lang_insert_block)
715 id->transform_lang_insert_block (new_block);
716
717 /* Remember the remapped block. */
718 insert_decl_map (id, old_block, new_block);
719 }
720
721 /* Copy the whole block tree and root it in id->block. */
722 static tree
remap_blocks(tree block,copy_body_data * id)723 remap_blocks (tree block, copy_body_data *id)
724 {
725 tree t;
726 tree new_tree = block;
727
728 if (!block)
729 return NULL;
730
731 remap_block (&new_tree, id);
732 gcc_assert (new_tree != block);
733 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
734 prepend_lexical_block (new_tree, remap_blocks (t, id));
735 /* Blocks are in arbitrary order, but make things slightly prettier and do
736 not swap order when producing a copy. */
737 BLOCK_SUBBLOCKS (new_tree) = blocks_nreverse (BLOCK_SUBBLOCKS (new_tree));
738 return new_tree;
739 }
740
741 /* Remap the block tree rooted at BLOCK to nothing. */
742 static void
remap_blocks_to_null(tree block,copy_body_data * id)743 remap_blocks_to_null (tree block, copy_body_data *id)
744 {
745 tree t;
746 insert_decl_map (id, block, NULL_TREE);
747 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
748 remap_blocks_to_null (t, id);
749 }
750
751 static void
copy_statement_list(tree * tp)752 copy_statement_list (tree *tp)
753 {
754 tree_stmt_iterator oi, ni;
755 tree new_tree;
756
757 new_tree = alloc_stmt_list ();
758 ni = tsi_start (new_tree);
759 oi = tsi_start (*tp);
760 TREE_TYPE (new_tree) = TREE_TYPE (*tp);
761 *tp = new_tree;
762
763 for (; !tsi_end_p (oi); tsi_next (&oi))
764 {
765 tree stmt = tsi_stmt (oi);
766 if (TREE_CODE (stmt) == STATEMENT_LIST)
767 /* This copy is not redundant; tsi_link_after will smash this
768 STATEMENT_LIST into the end of the one we're building, and we
769 don't want to do that with the original. */
770 copy_statement_list (&stmt);
771 tsi_link_after (&ni, stmt, TSI_CONTINUE_LINKING);
772 }
773 }
774
775 static void
copy_bind_expr(tree * tp,int * walk_subtrees,copy_body_data * id)776 copy_bind_expr (tree *tp, int *walk_subtrees, copy_body_data *id)
777 {
778 tree block = BIND_EXPR_BLOCK (*tp);
779 /* Copy (and replace) the statement. */
780 copy_tree_r (tp, walk_subtrees, NULL);
781 if (block)
782 {
783 remap_block (&block, id);
784 BIND_EXPR_BLOCK (*tp) = block;
785 }
786
787 if (BIND_EXPR_VARS (*tp))
788 /* This will remap a lot of the same decls again, but this should be
789 harmless. */
790 BIND_EXPR_VARS (*tp) = remap_decls (BIND_EXPR_VARS (*tp), NULL, id);
791 }
792
793
794 /* Create a new gimple_seq by remapping all the statements in BODY
795 using the inlining information in ID. */
796
797 static gimple_seq
remap_gimple_seq(gimple_seq body,copy_body_data * id)798 remap_gimple_seq (gimple_seq body, copy_body_data *id)
799 {
800 gimple_stmt_iterator si;
801 gimple_seq new_body = NULL;
802
803 for (si = gsi_start (body); !gsi_end_p (si); gsi_next (&si))
804 {
805 gimple_seq new_stmts = remap_gimple_stmt (gsi_stmt (si), id);
806 gimple_seq_add_seq (&new_body, new_stmts);
807 }
808
809 return new_body;
810 }
811
812
813 /* Copy a GIMPLE_BIND statement STMT, remapping all the symbols in its
814 block using the mapping information in ID. */
815
816 static gimple *
copy_gimple_bind(gbind * stmt,copy_body_data * id)817 copy_gimple_bind (gbind *stmt, copy_body_data *id)
818 {
819 gimple *new_bind;
820 tree new_block, new_vars;
821 gimple_seq body, new_body;
822
823 /* Copy the statement. Note that we purposely don't use copy_stmt
824 here because we need to remap statements as we copy. */
825 body = gimple_bind_body (stmt);
826 new_body = remap_gimple_seq (body, id);
827
828 new_block = gimple_bind_block (stmt);
829 if (new_block)
830 remap_block (&new_block, id);
831
832 /* This will remap a lot of the same decls again, but this should be
833 harmless. */
834 new_vars = gimple_bind_vars (stmt);
835 if (new_vars)
836 new_vars = remap_decls (new_vars, NULL, id);
837
838 new_bind = gimple_build_bind (new_vars, new_body, new_block);
839
840 return new_bind;
841 }
842
843 /* Return true if DECL is a parameter or a SSA_NAME for a parameter. */
844
845 static bool
is_parm(tree decl)846 is_parm (tree decl)
847 {
848 if (TREE_CODE (decl) == SSA_NAME)
849 {
850 decl = SSA_NAME_VAR (decl);
851 if (!decl)
852 return false;
853 }
854
855 return (TREE_CODE (decl) == PARM_DECL);
856 }
857
858 /* Remap the dependence CLIQUE from the source to the destination function
859 as specified in ID. */
860
861 static unsigned short
remap_dependence_clique(copy_body_data * id,unsigned short clique)862 remap_dependence_clique (copy_body_data *id, unsigned short clique)
863 {
864 if (clique == 0 || processing_debug_stmt)
865 return 0;
866 if (!id->dependence_map)
867 id->dependence_map = new hash_map<dependence_hash, unsigned short>;
868 bool existed;
869 unsigned short &newc = id->dependence_map->get_or_insert (clique, &existed);
870 if (!existed)
871 newc = ++cfun->last_clique;
872 return newc;
873 }
874
875 /* Remap the GIMPLE operand pointed to by *TP. DATA is really a
876 'struct walk_stmt_info *'. DATA->INFO is a 'copy_body_data *'.
877 WALK_SUBTREES is used to indicate walk_gimple_op whether to keep
878 recursing into the children nodes of *TP. */
879
880 static tree
remap_gimple_op_r(tree * tp,int * walk_subtrees,void * data)881 remap_gimple_op_r (tree *tp, int *walk_subtrees, void *data)
882 {
883 struct walk_stmt_info *wi_p = (struct walk_stmt_info *) data;
884 copy_body_data *id = (copy_body_data *) wi_p->info;
885 tree fn = id->src_fn;
886
887 /* For recursive invocations this is no longer the LHS itself. */
888 bool is_lhs = wi_p->is_lhs;
889 wi_p->is_lhs = false;
890
891 if (TREE_CODE (*tp) == SSA_NAME)
892 {
893 *tp = remap_ssa_name (*tp, id);
894 *walk_subtrees = 0;
895 if (is_lhs)
896 SSA_NAME_DEF_STMT (*tp) = wi_p->stmt;
897 return NULL;
898 }
899 else if (auto_var_in_fn_p (*tp, fn))
900 {
901 /* Local variables and labels need to be replaced by equivalent
902 variables. We don't want to copy static variables; there's
903 only one of those, no matter how many times we inline the
904 containing function. Similarly for globals from an outer
905 function. */
906 tree new_decl;
907
908 /* Remap the declaration. */
909 new_decl = remap_decl (*tp, id);
910 gcc_assert (new_decl);
911 /* Replace this variable with the copy. */
912 STRIP_TYPE_NOPS (new_decl);
913 /* ??? The C++ frontend uses void * pointer zero to initialize
914 any other type. This confuses the middle-end type verification.
915 As cloned bodies do not go through gimplification again the fixup
916 there doesn't trigger. */
917 if (TREE_CODE (new_decl) == INTEGER_CST
918 && !useless_type_conversion_p (TREE_TYPE (*tp), TREE_TYPE (new_decl)))
919 new_decl = fold_convert (TREE_TYPE (*tp), new_decl);
920 *tp = new_decl;
921 *walk_subtrees = 0;
922 }
923 else if (TREE_CODE (*tp) == STATEMENT_LIST)
924 gcc_unreachable ();
925 else if (TREE_CODE (*tp) == SAVE_EXPR)
926 gcc_unreachable ();
927 else if (TREE_CODE (*tp) == LABEL_DECL
928 && (!DECL_CONTEXT (*tp)
929 || decl_function_context (*tp) == id->src_fn))
930 /* These may need to be remapped for EH handling. */
931 *tp = remap_decl (*tp, id);
932 else if (TREE_CODE (*tp) == FIELD_DECL)
933 {
934 /* If the enclosing record type is variably_modified_type_p, the field
935 has already been remapped. Otherwise, it need not be. */
936 tree *n = id->decl_map->get (*tp);
937 if (n)
938 *tp = *n;
939 *walk_subtrees = 0;
940 }
941 else if (TYPE_P (*tp))
942 /* Types may need remapping as well. */
943 *tp = remap_type (*tp, id);
944 else if (CONSTANT_CLASS_P (*tp))
945 {
946 /* If this is a constant, we have to copy the node iff the type
947 will be remapped. copy_tree_r will not copy a constant. */
948 tree new_type = remap_type (TREE_TYPE (*tp), id);
949
950 if (new_type == TREE_TYPE (*tp))
951 *walk_subtrees = 0;
952
953 else if (TREE_CODE (*tp) == INTEGER_CST)
954 *tp = wide_int_to_tree (new_type, wi::to_wide (*tp));
955 else
956 {
957 *tp = copy_node (*tp);
958 TREE_TYPE (*tp) = new_type;
959 }
960 }
961 else
962 {
963 /* Otherwise, just copy the node. Note that copy_tree_r already
964 knows not to copy VAR_DECLs, etc., so this is safe. */
965
966 if (TREE_CODE (*tp) == MEM_REF)
967 {
968 /* We need to re-canonicalize MEM_REFs from inline substitutions
969 that can happen when a pointer argument is an ADDR_EXPR.
970 Recurse here manually to allow that. */
971 tree ptr = TREE_OPERAND (*tp, 0);
972 tree type = remap_type (TREE_TYPE (*tp), id);
973 tree old = *tp;
974 walk_tree (&ptr, remap_gimple_op_r, data, NULL);
975 *tp = fold_build2 (MEM_REF, type, ptr, TREE_OPERAND (*tp, 1));
976 TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
977 TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
978 TREE_NO_WARNING (*tp) = TREE_NO_WARNING (old);
979 if (MR_DEPENDENCE_CLIQUE (old) != 0)
980 {
981 MR_DEPENDENCE_CLIQUE (*tp)
982 = remap_dependence_clique (id, MR_DEPENDENCE_CLIQUE (old));
983 MR_DEPENDENCE_BASE (*tp) = MR_DEPENDENCE_BASE (old);
984 }
985 /* We cannot propagate the TREE_THIS_NOTRAP flag if we have
986 remapped a parameter as the property might be valid only
987 for the parameter itself. */
988 if (TREE_THIS_NOTRAP (old)
989 && (!is_parm (TREE_OPERAND (old, 0))
990 || (!id->transform_parameter && is_parm (ptr))))
991 TREE_THIS_NOTRAP (*tp) = 1;
992 REF_REVERSE_STORAGE_ORDER (*tp) = REF_REVERSE_STORAGE_ORDER (old);
993 *walk_subtrees = 0;
994 return NULL;
995 }
996
997 /* Here is the "usual case". Copy this tree node, and then
998 tweak some special cases. */
999 copy_tree_r (tp, walk_subtrees, NULL);
1000
1001 if (TREE_CODE (*tp) != OMP_CLAUSE)
1002 TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id);
1003
1004 if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
1005 {
1006 /* The copied TARGET_EXPR has never been expanded, even if the
1007 original node was expanded already. */
1008 TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
1009 TREE_OPERAND (*tp, 3) = NULL_TREE;
1010 }
1011 else if (TREE_CODE (*tp) == ADDR_EXPR)
1012 {
1013 /* Variable substitution need not be simple. In particular,
1014 the MEM_REF substitution above. Make sure that
1015 TREE_CONSTANT and friends are up-to-date. */
1016 int invariant = is_gimple_min_invariant (*tp);
1017 walk_tree (&TREE_OPERAND (*tp, 0), remap_gimple_op_r, data, NULL);
1018 recompute_tree_invariant_for_addr_expr (*tp);
1019
1020 /* If this used to be invariant, but is not any longer,
1021 then regimplification is probably needed. */
1022 if (invariant && !is_gimple_min_invariant (*tp))
1023 id->regimplify = true;
1024
1025 *walk_subtrees = 0;
1026 }
1027 }
1028
1029 /* Update the TREE_BLOCK for the cloned expr. */
1030 if (EXPR_P (*tp))
1031 {
1032 tree new_block = id->remapping_type_depth == 0 ? id->block : NULL;
1033 tree old_block = TREE_BLOCK (*tp);
1034 if (old_block)
1035 {
1036 tree *n;
1037 n = id->decl_map->get (TREE_BLOCK (*tp));
1038 if (n)
1039 new_block = *n;
1040 }
1041 TREE_SET_BLOCK (*tp, new_block);
1042 }
1043
1044 /* Keep iterating. */
1045 return NULL_TREE;
1046 }
1047
1048
1049 /* Called from copy_body_id via walk_tree. DATA is really a
1050 `copy_body_data *'. */
1051
1052 tree
copy_tree_body_r(tree * tp,int * walk_subtrees,void * data)1053 copy_tree_body_r (tree *tp, int *walk_subtrees, void *data)
1054 {
1055 copy_body_data *id = (copy_body_data *) data;
1056 tree fn = id->src_fn;
1057 tree new_block;
1058
1059 /* Begin by recognizing trees that we'll completely rewrite for the
1060 inlining context. Our output for these trees is completely
1061 different from out input (e.g. RETURN_EXPR is deleted, and morphs
1062 into an edge). Further down, we'll handle trees that get
1063 duplicated and/or tweaked. */
1064
1065 /* When requested, RETURN_EXPRs should be transformed to just the
1066 contained MODIFY_EXPR. The branch semantics of the return will
1067 be handled elsewhere by manipulating the CFG rather than a statement. */
1068 if (TREE_CODE (*tp) == RETURN_EXPR && id->transform_return_to_modify)
1069 {
1070 tree assignment = TREE_OPERAND (*tp, 0);
1071
1072 /* If we're returning something, just turn that into an
1073 assignment into the equivalent of the original RESULT_DECL.
1074 If the "assignment" is just the result decl, the result
1075 decl has already been set (e.g. a recent "foo (&result_decl,
1076 ...)"); just toss the entire RETURN_EXPR. */
1077 if (assignment && TREE_CODE (assignment) == MODIFY_EXPR)
1078 {
1079 /* Replace the RETURN_EXPR with (a copy of) the
1080 MODIFY_EXPR hanging underneath. */
1081 *tp = copy_node (assignment);
1082 }
1083 else /* Else the RETURN_EXPR returns no value. */
1084 {
1085 *tp = NULL;
1086 return (tree) (void *)1;
1087 }
1088 }
1089 else if (TREE_CODE (*tp) == SSA_NAME)
1090 {
1091 *tp = remap_ssa_name (*tp, id);
1092 *walk_subtrees = 0;
1093 return NULL;
1094 }
1095
1096 /* Local variables and labels need to be replaced by equivalent
1097 variables. We don't want to copy static variables; there's only
1098 one of those, no matter how many times we inline the containing
1099 function. Similarly for globals from an outer function. */
1100 else if (auto_var_in_fn_p (*tp, fn))
1101 {
1102 tree new_decl;
1103
1104 /* Remap the declaration. */
1105 new_decl = remap_decl (*tp, id);
1106 gcc_assert (new_decl);
1107 /* Replace this variable with the copy. */
1108 STRIP_TYPE_NOPS (new_decl);
1109 *tp = new_decl;
1110 *walk_subtrees = 0;
1111 }
1112 else if (TREE_CODE (*tp) == STATEMENT_LIST)
1113 copy_statement_list (tp);
1114 else if (TREE_CODE (*tp) == SAVE_EXPR
1115 || TREE_CODE (*tp) == TARGET_EXPR)
1116 remap_save_expr (tp, id->decl_map, walk_subtrees);
1117 else if (TREE_CODE (*tp) == LABEL_DECL
1118 && (! DECL_CONTEXT (*tp)
1119 || decl_function_context (*tp) == id->src_fn))
1120 /* These may need to be remapped for EH handling. */
1121 *tp = remap_decl (*tp, id);
1122 else if (TREE_CODE (*tp) == BIND_EXPR)
1123 copy_bind_expr (tp, walk_subtrees, id);
1124 /* Types may need remapping as well. */
1125 else if (TYPE_P (*tp))
1126 *tp = remap_type (*tp, id);
1127
1128 /* If this is a constant, we have to copy the node iff the type will be
1129 remapped. copy_tree_r will not copy a constant. */
1130 else if (CONSTANT_CLASS_P (*tp))
1131 {
1132 tree new_type = remap_type (TREE_TYPE (*tp), id);
1133
1134 if (new_type == TREE_TYPE (*tp))
1135 *walk_subtrees = 0;
1136
1137 else if (TREE_CODE (*tp) == INTEGER_CST)
1138 *tp = wide_int_to_tree (new_type, wi::to_wide (*tp));
1139 else
1140 {
1141 *tp = copy_node (*tp);
1142 TREE_TYPE (*tp) = new_type;
1143 }
1144 }
1145
1146 /* Otherwise, just copy the node. Note that copy_tree_r already
1147 knows not to copy VAR_DECLs, etc., so this is safe. */
1148 else
1149 {
1150 /* Here we handle trees that are not completely rewritten.
1151 First we detect some inlining-induced bogosities for
1152 discarding. */
1153 if (TREE_CODE (*tp) == MODIFY_EXPR
1154 && TREE_OPERAND (*tp, 0) == TREE_OPERAND (*tp, 1)
1155 && (auto_var_in_fn_p (TREE_OPERAND (*tp, 0), fn)))
1156 {
1157 /* Some assignments VAR = VAR; don't generate any rtl code
1158 and thus don't count as variable modification. Avoid
1159 keeping bogosities like 0 = 0. */
1160 tree decl = TREE_OPERAND (*tp, 0), value;
1161 tree *n;
1162
1163 n = id->decl_map->get (decl);
1164 if (n)
1165 {
1166 value = *n;
1167 STRIP_TYPE_NOPS (value);
1168 if (TREE_CONSTANT (value) || TREE_READONLY (value))
1169 {
1170 *tp = build_empty_stmt (EXPR_LOCATION (*tp));
1171 return copy_tree_body_r (tp, walk_subtrees, data);
1172 }
1173 }
1174 }
1175 else if (TREE_CODE (*tp) == INDIRECT_REF)
1176 {
1177 /* Get rid of *& from inline substitutions that can happen when a
1178 pointer argument is an ADDR_EXPR. */
1179 tree decl = TREE_OPERAND (*tp, 0);
1180 tree *n = id->decl_map->get (decl);
1181 if (n)
1182 {
1183 /* If we happen to get an ADDR_EXPR in n->value, strip
1184 it manually here as we'll eventually get ADDR_EXPRs
1185 which lie about their types pointed to. In this case
1186 build_fold_indirect_ref wouldn't strip the INDIRECT_REF,
1187 but we absolutely rely on that. As fold_indirect_ref
1188 does other useful transformations, try that first, though. */
1189 tree type = TREE_TYPE (*tp);
1190 tree ptr = id->do_not_unshare ? *n : unshare_expr (*n);
1191 tree old = *tp;
1192 *tp = gimple_fold_indirect_ref (ptr);
1193 if (! *tp)
1194 {
1195 type = remap_type (type, id);
1196 if (TREE_CODE (ptr) == ADDR_EXPR)
1197 {
1198 *tp
1199 = fold_indirect_ref_1 (EXPR_LOCATION (ptr), type, ptr);
1200 /* ??? We should either assert here or build
1201 a VIEW_CONVERT_EXPR instead of blindly leaking
1202 incompatible types to our IL. */
1203 if (! *tp)
1204 *tp = TREE_OPERAND (ptr, 0);
1205 }
1206 else
1207 {
1208 *tp = build1 (INDIRECT_REF, type, ptr);
1209 TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
1210 TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
1211 TREE_READONLY (*tp) = TREE_READONLY (old);
1212 /* We cannot propagate the TREE_THIS_NOTRAP flag if we
1213 have remapped a parameter as the property might be
1214 valid only for the parameter itself. */
1215 if (TREE_THIS_NOTRAP (old)
1216 && (!is_parm (TREE_OPERAND (old, 0))
1217 || (!id->transform_parameter && is_parm (ptr))))
1218 TREE_THIS_NOTRAP (*tp) = 1;
1219 }
1220 }
1221 *walk_subtrees = 0;
1222 return NULL;
1223 }
1224 }
1225 else if (TREE_CODE (*tp) == MEM_REF)
1226 {
1227 /* We need to re-canonicalize MEM_REFs from inline substitutions
1228 that can happen when a pointer argument is an ADDR_EXPR.
1229 Recurse here manually to allow that. */
1230 tree ptr = TREE_OPERAND (*tp, 0);
1231 tree type = remap_type (TREE_TYPE (*tp), id);
1232 tree old = *tp;
1233 walk_tree (&ptr, copy_tree_body_r, data, NULL);
1234 *tp = fold_build2 (MEM_REF, type, ptr, TREE_OPERAND (*tp, 1));
1235 TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
1236 TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
1237 TREE_NO_WARNING (*tp) = TREE_NO_WARNING (old);
1238 if (MR_DEPENDENCE_CLIQUE (old) != 0)
1239 {
1240 MR_DEPENDENCE_CLIQUE (*tp)
1241 = remap_dependence_clique (id, MR_DEPENDENCE_CLIQUE (old));
1242 MR_DEPENDENCE_BASE (*tp) = MR_DEPENDENCE_BASE (old);
1243 }
1244 /* We cannot propagate the TREE_THIS_NOTRAP flag if we have
1245 remapped a parameter as the property might be valid only
1246 for the parameter itself. */
1247 if (TREE_THIS_NOTRAP (old)
1248 && (!is_parm (TREE_OPERAND (old, 0))
1249 || (!id->transform_parameter && is_parm (ptr))))
1250 TREE_THIS_NOTRAP (*tp) = 1;
1251 REF_REVERSE_STORAGE_ORDER (*tp) = REF_REVERSE_STORAGE_ORDER (old);
1252 *walk_subtrees = 0;
1253 return NULL;
1254 }
1255
1256 /* Here is the "usual case". Copy this tree node, and then
1257 tweak some special cases. */
1258 copy_tree_r (tp, walk_subtrees, NULL);
1259
1260 /* If EXPR has block defined, map it to newly constructed block.
1261 When inlining we want EXPRs without block appear in the block
1262 of function call if we are not remapping a type. */
1263 if (EXPR_P (*tp))
1264 {
1265 new_block = id->remapping_type_depth == 0 ? id->block : NULL;
1266 if (TREE_BLOCK (*tp))
1267 {
1268 tree *n;
1269 n = id->decl_map->get (TREE_BLOCK (*tp));
1270 if (n)
1271 new_block = *n;
1272 }
1273 TREE_SET_BLOCK (*tp, new_block);
1274 }
1275
1276 if (TREE_CODE (*tp) != OMP_CLAUSE)
1277 TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id);
1278
1279 /* The copied TARGET_EXPR has never been expanded, even if the
1280 original node was expanded already. */
1281 if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
1282 {
1283 TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
1284 TREE_OPERAND (*tp, 3) = NULL_TREE;
1285 }
1286
1287 /* Variable substitution need not be simple. In particular, the
1288 INDIRECT_REF substitution above. Make sure that TREE_CONSTANT
1289 and friends are up-to-date. */
1290 else if (TREE_CODE (*tp) == ADDR_EXPR)
1291 {
1292 int invariant = is_gimple_min_invariant (*tp);
1293 walk_tree (&TREE_OPERAND (*tp, 0), copy_tree_body_r, id, NULL);
1294
1295 /* Handle the case where we substituted an INDIRECT_REF
1296 into the operand of the ADDR_EXPR. */
1297 if (TREE_CODE (TREE_OPERAND (*tp, 0)) == INDIRECT_REF)
1298 {
1299 tree t = TREE_OPERAND (TREE_OPERAND (*tp, 0), 0);
1300 if (TREE_TYPE (t) != TREE_TYPE (*tp))
1301 t = fold_convert (remap_type (TREE_TYPE (*tp), id), t);
1302 *tp = t;
1303 }
1304 else
1305 recompute_tree_invariant_for_addr_expr (*tp);
1306
1307 /* If this used to be invariant, but is not any longer,
1308 then regimplification is probably needed. */
1309 if (invariant && !is_gimple_min_invariant (*tp))
1310 id->regimplify = true;
1311
1312 *walk_subtrees = 0;
1313 }
1314 }
1315
1316 /* Keep iterating. */
1317 return NULL_TREE;
1318 }
1319
1320 /* Helper for remap_gimple_stmt. Given an EH region number for the
1321 source function, map that to the duplicate EH region number in
1322 the destination function. */
1323
1324 static int
remap_eh_region_nr(int old_nr,copy_body_data * id)1325 remap_eh_region_nr (int old_nr, copy_body_data *id)
1326 {
1327 eh_region old_r, new_r;
1328
1329 old_r = get_eh_region_from_number_fn (id->src_cfun, old_nr);
1330 new_r = static_cast<eh_region> (*id->eh_map->get (old_r));
1331
1332 return new_r->index;
1333 }
1334
1335 /* Similar, but operate on INTEGER_CSTs. */
1336
1337 static tree
remap_eh_region_tree_nr(tree old_t_nr,copy_body_data * id)1338 remap_eh_region_tree_nr (tree old_t_nr, copy_body_data *id)
1339 {
1340 int old_nr, new_nr;
1341
1342 old_nr = tree_to_shwi (old_t_nr);
1343 new_nr = remap_eh_region_nr (old_nr, id);
1344
1345 return build_int_cst (integer_type_node, new_nr);
1346 }
1347
1348 /* Helper for copy_bb. Remap statement STMT using the inlining
1349 information in ID. Return the new statement copy. */
1350
1351 static gimple_seq
remap_gimple_stmt(gimple * stmt,copy_body_data * id)1352 remap_gimple_stmt (gimple *stmt, copy_body_data *id)
1353 {
1354 gimple *copy = NULL;
1355 struct walk_stmt_info wi;
1356 bool skip_first = false;
1357 gimple_seq stmts = NULL;
1358
1359 if (is_gimple_debug (stmt)
1360 && (gimple_debug_nonbind_marker_p (stmt)
1361 ? !DECL_STRUCT_FUNCTION (id->dst_fn)->debug_nonbind_markers
1362 : !opt_for_fn (id->dst_fn, flag_var_tracking_assignments)))
1363 return stmts;
1364
1365 /* Begin by recognizing trees that we'll completely rewrite for the
1366 inlining context. Our output for these trees is completely
1367 different from out input (e.g. RETURN_EXPR is deleted, and morphs
1368 into an edge). Further down, we'll handle trees that get
1369 duplicated and/or tweaked. */
1370
1371 /* When requested, GIMPLE_RETURNs should be transformed to just the
1372 contained GIMPLE_ASSIGN. The branch semantics of the return will
1373 be handled elsewhere by manipulating the CFG rather than the
1374 statement. */
1375 if (gimple_code (stmt) == GIMPLE_RETURN && id->transform_return_to_modify)
1376 {
1377 tree retval = gimple_return_retval (as_a <greturn *> (stmt));
1378 tree retbnd = gimple_return_retbnd (stmt);
1379 tree bndslot = id->retbnd;
1380
1381 if (retbnd && bndslot)
1382 {
1383 gimple *bndcopy = gimple_build_assign (bndslot, retbnd);
1384 memset (&wi, 0, sizeof (wi));
1385 wi.info = id;
1386 walk_gimple_op (bndcopy, remap_gimple_op_r, &wi);
1387 gimple_seq_add_stmt (&stmts, bndcopy);
1388 }
1389
1390 /* If we're returning something, just turn that into an
1391 assignment into the equivalent of the original RESULT_DECL.
1392 If RETVAL is just the result decl, the result decl has
1393 already been set (e.g. a recent "foo (&result_decl, ...)");
1394 just toss the entire GIMPLE_RETURN. */
1395 if (retval
1396 && (TREE_CODE (retval) != RESULT_DECL
1397 && (TREE_CODE (retval) != SSA_NAME
1398 || ! SSA_NAME_VAR (retval)
1399 || TREE_CODE (SSA_NAME_VAR (retval)) != RESULT_DECL)))
1400 {
1401 copy = gimple_build_assign (id->do_not_unshare
1402 ? id->retvar : unshare_expr (id->retvar),
1403 retval);
1404 /* id->retvar is already substituted. Skip it on later remapping. */
1405 skip_first = true;
1406
1407 /* We need to copy bounds if return structure with pointers into
1408 instrumented function. */
1409 if (chkp_function_instrumented_p (id->dst_fn)
1410 && !bndslot
1411 && !BOUNDED_P (id->retvar)
1412 && chkp_type_has_pointer (TREE_TYPE (id->retvar)))
1413 id->assign_stmts.safe_push (copy);
1414
1415 }
1416 else
1417 return stmts;
1418 }
1419 else if (gimple_has_substatements (stmt))
1420 {
1421 gimple_seq s1, s2;
1422
1423 /* When cloning bodies from the C++ front end, we will be handed bodies
1424 in High GIMPLE form. Handle here all the High GIMPLE statements that
1425 have embedded statements. */
1426 switch (gimple_code (stmt))
1427 {
1428 case GIMPLE_BIND:
1429 copy = copy_gimple_bind (as_a <gbind *> (stmt), id);
1430 break;
1431
1432 case GIMPLE_CATCH:
1433 {
1434 gcatch *catch_stmt = as_a <gcatch *> (stmt);
1435 s1 = remap_gimple_seq (gimple_catch_handler (catch_stmt), id);
1436 copy = gimple_build_catch (gimple_catch_types (catch_stmt), s1);
1437 }
1438 break;
1439
1440 case GIMPLE_EH_FILTER:
1441 s1 = remap_gimple_seq (gimple_eh_filter_failure (stmt), id);
1442 copy = gimple_build_eh_filter (gimple_eh_filter_types (stmt), s1);
1443 break;
1444
1445 case GIMPLE_TRY:
1446 s1 = remap_gimple_seq (gimple_try_eval (stmt), id);
1447 s2 = remap_gimple_seq (gimple_try_cleanup (stmt), id);
1448 copy = gimple_build_try (s1, s2, gimple_try_kind (stmt));
1449 break;
1450
1451 case GIMPLE_WITH_CLEANUP_EXPR:
1452 s1 = remap_gimple_seq (gimple_wce_cleanup (stmt), id);
1453 copy = gimple_build_wce (s1);
1454 break;
1455
1456 case GIMPLE_OMP_PARALLEL:
1457 {
1458 gomp_parallel *omp_par_stmt = as_a <gomp_parallel *> (stmt);
1459 s1 = remap_gimple_seq (gimple_omp_body (omp_par_stmt), id);
1460 copy = gimple_build_omp_parallel
1461 (s1,
1462 gimple_omp_parallel_clauses (omp_par_stmt),
1463 gimple_omp_parallel_child_fn (omp_par_stmt),
1464 gimple_omp_parallel_data_arg (omp_par_stmt));
1465 }
1466 break;
1467
1468 case GIMPLE_OMP_TASK:
1469 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1470 copy = gimple_build_omp_task
1471 (s1,
1472 gimple_omp_task_clauses (stmt),
1473 gimple_omp_task_child_fn (stmt),
1474 gimple_omp_task_data_arg (stmt),
1475 gimple_omp_task_copy_fn (stmt),
1476 gimple_omp_task_arg_size (stmt),
1477 gimple_omp_task_arg_align (stmt));
1478 break;
1479
1480 case GIMPLE_OMP_FOR:
1481 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1482 s2 = remap_gimple_seq (gimple_omp_for_pre_body (stmt), id);
1483 copy = gimple_build_omp_for (s1, gimple_omp_for_kind (stmt),
1484 gimple_omp_for_clauses (stmt),
1485 gimple_omp_for_collapse (stmt), s2);
1486 {
1487 size_t i;
1488 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
1489 {
1490 gimple_omp_for_set_index (copy, i,
1491 gimple_omp_for_index (stmt, i));
1492 gimple_omp_for_set_initial (copy, i,
1493 gimple_omp_for_initial (stmt, i));
1494 gimple_omp_for_set_final (copy, i,
1495 gimple_omp_for_final (stmt, i));
1496 gimple_omp_for_set_incr (copy, i,
1497 gimple_omp_for_incr (stmt, i));
1498 gimple_omp_for_set_cond (copy, i,
1499 gimple_omp_for_cond (stmt, i));
1500 }
1501 }
1502 break;
1503
1504 case GIMPLE_OMP_MASTER:
1505 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1506 copy = gimple_build_omp_master (s1);
1507 break;
1508
1509 case GIMPLE_OMP_TASKGROUP:
1510 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1511 copy = gimple_build_omp_taskgroup (s1);
1512 break;
1513
1514 case GIMPLE_OMP_ORDERED:
1515 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1516 copy = gimple_build_omp_ordered
1517 (s1,
1518 gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt)));
1519 break;
1520
1521 case GIMPLE_OMP_SECTION:
1522 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1523 copy = gimple_build_omp_section (s1);
1524 break;
1525
1526 case GIMPLE_OMP_SECTIONS:
1527 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1528 copy = gimple_build_omp_sections
1529 (s1, gimple_omp_sections_clauses (stmt));
1530 break;
1531
1532 case GIMPLE_OMP_SINGLE:
1533 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1534 copy = gimple_build_omp_single
1535 (s1, gimple_omp_single_clauses (stmt));
1536 break;
1537
1538 case GIMPLE_OMP_TARGET:
1539 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1540 copy = gimple_build_omp_target
1541 (s1, gimple_omp_target_kind (stmt),
1542 gimple_omp_target_clauses (stmt));
1543 break;
1544
1545 case GIMPLE_OMP_TEAMS:
1546 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1547 copy = gimple_build_omp_teams
1548 (s1, gimple_omp_teams_clauses (stmt));
1549 break;
1550
1551 case GIMPLE_OMP_CRITICAL:
1552 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1553 copy = gimple_build_omp_critical (s1,
1554 gimple_omp_critical_name
1555 (as_a <gomp_critical *> (stmt)),
1556 gimple_omp_critical_clauses
1557 (as_a <gomp_critical *> (stmt)));
1558 break;
1559
1560 case GIMPLE_TRANSACTION:
1561 {
1562 gtransaction *old_trans_stmt = as_a <gtransaction *> (stmt);
1563 gtransaction *new_trans_stmt;
1564 s1 = remap_gimple_seq (gimple_transaction_body (old_trans_stmt),
1565 id);
1566 copy = new_trans_stmt = gimple_build_transaction (s1);
1567 gimple_transaction_set_subcode (new_trans_stmt,
1568 gimple_transaction_subcode (old_trans_stmt));
1569 gimple_transaction_set_label_norm (new_trans_stmt,
1570 gimple_transaction_label_norm (old_trans_stmt));
1571 gimple_transaction_set_label_uninst (new_trans_stmt,
1572 gimple_transaction_label_uninst (old_trans_stmt));
1573 gimple_transaction_set_label_over (new_trans_stmt,
1574 gimple_transaction_label_over (old_trans_stmt));
1575 }
1576 break;
1577
1578 default:
1579 gcc_unreachable ();
1580 }
1581 }
1582 else
1583 {
1584 if (gimple_assign_copy_p (stmt)
1585 && gimple_assign_lhs (stmt) == gimple_assign_rhs1 (stmt)
1586 && auto_var_in_fn_p (gimple_assign_lhs (stmt), id->src_fn))
1587 {
1588 /* Here we handle statements that are not completely rewritten.
1589 First we detect some inlining-induced bogosities for
1590 discarding. */
1591
1592 /* Some assignments VAR = VAR; don't generate any rtl code
1593 and thus don't count as variable modification. Avoid
1594 keeping bogosities like 0 = 0. */
1595 tree decl = gimple_assign_lhs (stmt), value;
1596 tree *n;
1597
1598 n = id->decl_map->get (decl);
1599 if (n)
1600 {
1601 value = *n;
1602 STRIP_TYPE_NOPS (value);
1603 if (TREE_CONSTANT (value) || TREE_READONLY (value))
1604 return NULL;
1605 }
1606 }
1607
1608 /* For *ptr_N ={v} {CLOBBER}, if ptr_N is SSA_NAME defined
1609 in a block that we aren't copying during tree_function_versioning,
1610 just drop the clobber stmt. */
1611 if (id->blocks_to_copy && gimple_clobber_p (stmt))
1612 {
1613 tree lhs = gimple_assign_lhs (stmt);
1614 if (TREE_CODE (lhs) == MEM_REF
1615 && TREE_CODE (TREE_OPERAND (lhs, 0)) == SSA_NAME)
1616 {
1617 gimple *def_stmt = SSA_NAME_DEF_STMT (TREE_OPERAND (lhs, 0));
1618 if (gimple_bb (def_stmt)
1619 && !bitmap_bit_p (id->blocks_to_copy,
1620 gimple_bb (def_stmt)->index))
1621 return NULL;
1622 }
1623 }
1624
1625 if (gimple_debug_bind_p (stmt))
1626 {
1627 gdebug *copy
1628 = gimple_build_debug_bind (gimple_debug_bind_get_var (stmt),
1629 gimple_debug_bind_get_value (stmt),
1630 stmt);
1631 id->debug_stmts.safe_push (copy);
1632 gimple_seq_add_stmt (&stmts, copy);
1633 return stmts;
1634 }
1635 if (gimple_debug_source_bind_p (stmt))
1636 {
1637 gdebug *copy = gimple_build_debug_source_bind
1638 (gimple_debug_source_bind_get_var (stmt),
1639 gimple_debug_source_bind_get_value (stmt),
1640 stmt);
1641 id->debug_stmts.safe_push (copy);
1642 gimple_seq_add_stmt (&stmts, copy);
1643 return stmts;
1644 }
1645 if (gimple_debug_nonbind_marker_p (stmt))
1646 {
1647 /* If the inlined function has too many debug markers,
1648 don't copy them. */
1649 if (id->src_cfun->debug_marker_count
1650 > PARAM_VALUE (PARAM_MAX_DEBUG_MARKER_COUNT))
1651 return stmts;
1652
1653 gdebug *copy = as_a <gdebug *> (gimple_copy (stmt));
1654 id->debug_stmts.safe_push (copy);
1655 gimple_seq_add_stmt (&stmts, copy);
1656 return stmts;
1657 }
1658 gcc_checking_assert (!is_gimple_debug (stmt));
1659
1660 /* Create a new deep copy of the statement. */
1661 copy = gimple_copy (stmt);
1662
1663 /* Clear flags that need revisiting. */
1664 if (gcall *call_stmt = dyn_cast <gcall *> (copy))
1665 {
1666 if (gimple_call_tail_p (call_stmt))
1667 gimple_call_set_tail (call_stmt, false);
1668 if (gimple_call_from_thunk_p (call_stmt))
1669 gimple_call_set_from_thunk (call_stmt, false);
1670 if (gimple_call_internal_p (call_stmt))
1671 switch (gimple_call_internal_fn (call_stmt))
1672 {
1673 case IFN_GOMP_SIMD_LANE:
1674 case IFN_GOMP_SIMD_VF:
1675 case IFN_GOMP_SIMD_LAST_LANE:
1676 case IFN_GOMP_SIMD_ORDERED_START:
1677 case IFN_GOMP_SIMD_ORDERED_END:
1678 DECL_STRUCT_FUNCTION (id->dst_fn)->has_simduid_loops = true;
1679 break;
1680 default:
1681 break;
1682 }
1683 }
1684
1685 /* Remap the region numbers for __builtin_eh_{pointer,filter},
1686 RESX and EH_DISPATCH. */
1687 if (id->eh_map)
1688 switch (gimple_code (copy))
1689 {
1690 case GIMPLE_CALL:
1691 {
1692 tree r, fndecl = gimple_call_fndecl (copy);
1693 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
1694 switch (DECL_FUNCTION_CODE (fndecl))
1695 {
1696 case BUILT_IN_EH_COPY_VALUES:
1697 r = gimple_call_arg (copy, 1);
1698 r = remap_eh_region_tree_nr (r, id);
1699 gimple_call_set_arg (copy, 1, r);
1700 /* FALLTHRU */
1701
1702 case BUILT_IN_EH_POINTER:
1703 case BUILT_IN_EH_FILTER:
1704 r = gimple_call_arg (copy, 0);
1705 r = remap_eh_region_tree_nr (r, id);
1706 gimple_call_set_arg (copy, 0, r);
1707 break;
1708
1709 default:
1710 break;
1711 }
1712
1713 /* Reset alias info if we didn't apply measures to
1714 keep it valid over inlining by setting DECL_PT_UID. */
1715 if (!id->src_cfun->gimple_df
1716 || !id->src_cfun->gimple_df->ipa_pta)
1717 gimple_call_reset_alias_info (as_a <gcall *> (copy));
1718 }
1719 break;
1720
1721 case GIMPLE_RESX:
1722 {
1723 gresx *resx_stmt = as_a <gresx *> (copy);
1724 int r = gimple_resx_region (resx_stmt);
1725 r = remap_eh_region_nr (r, id);
1726 gimple_resx_set_region (resx_stmt, r);
1727 }
1728 break;
1729
1730 case GIMPLE_EH_DISPATCH:
1731 {
1732 geh_dispatch *eh_dispatch = as_a <geh_dispatch *> (copy);
1733 int r = gimple_eh_dispatch_region (eh_dispatch);
1734 r = remap_eh_region_nr (r, id);
1735 gimple_eh_dispatch_set_region (eh_dispatch, r);
1736 }
1737 break;
1738
1739 default:
1740 break;
1741 }
1742 }
1743
1744 /* If STMT has a block defined, map it to the newly constructed
1745 block. */
1746 if (gimple_block (copy))
1747 {
1748 tree *n;
1749 n = id->decl_map->get (gimple_block (copy));
1750 gcc_assert (n);
1751 gimple_set_block (copy, *n);
1752 }
1753
1754 if (gimple_debug_bind_p (copy) || gimple_debug_source_bind_p (copy)
1755 || gimple_debug_nonbind_marker_p (copy))
1756 {
1757 gimple_seq_add_stmt (&stmts, copy);
1758 return stmts;
1759 }
1760
1761 /* Remap all the operands in COPY. */
1762 memset (&wi, 0, sizeof (wi));
1763 wi.info = id;
1764 if (skip_first)
1765 walk_tree (gimple_op_ptr (copy, 1), remap_gimple_op_r, &wi, NULL);
1766 else
1767 walk_gimple_op (copy, remap_gimple_op_r, &wi);
1768
1769 /* Clear the copied virtual operands. We are not remapping them here
1770 but are going to recreate them from scratch. */
1771 if (gimple_has_mem_ops (copy))
1772 {
1773 gimple_set_vdef (copy, NULL_TREE);
1774 gimple_set_vuse (copy, NULL_TREE);
1775 }
1776
1777 gimple_seq_add_stmt (&stmts, copy);
1778 return stmts;
1779 }
1780
1781
1782 /* Copy basic block, scale profile accordingly. Edges will be taken care of
1783 later */
1784
1785 static basic_block
copy_bb(copy_body_data * id,basic_block bb,profile_count num,profile_count den)1786 copy_bb (copy_body_data *id, basic_block bb,
1787 profile_count num, profile_count den)
1788 {
1789 gimple_stmt_iterator gsi, copy_gsi, seq_gsi;
1790 basic_block copy_basic_block;
1791 tree decl;
1792 basic_block prev;
1793
1794 profile_count::adjust_for_ipa_scaling (&num, &den);
1795
1796 /* Search for previous copied basic block. */
1797 prev = bb->prev_bb;
1798 while (!prev->aux)
1799 prev = prev->prev_bb;
1800
1801 /* create_basic_block() will append every new block to
1802 basic_block_info automatically. */
1803 copy_basic_block = create_basic_block (NULL, (basic_block) prev->aux);
1804 copy_basic_block->count = bb->count.apply_scale (num, den);
1805
1806 copy_gsi = gsi_start_bb (copy_basic_block);
1807
1808 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1809 {
1810 gimple_seq stmts;
1811 gimple *stmt = gsi_stmt (gsi);
1812 gimple *orig_stmt = stmt;
1813 gimple_stmt_iterator stmts_gsi;
1814 bool stmt_added = false;
1815
1816 id->regimplify = false;
1817 stmts = remap_gimple_stmt (stmt, id);
1818
1819 if (gimple_seq_empty_p (stmts))
1820 continue;
1821
1822 seq_gsi = copy_gsi;
1823
1824 for (stmts_gsi = gsi_start (stmts);
1825 !gsi_end_p (stmts_gsi); )
1826 {
1827 stmt = gsi_stmt (stmts_gsi);
1828
1829 /* Advance iterator now before stmt is moved to seq_gsi. */
1830 gsi_next (&stmts_gsi);
1831
1832 if (gimple_nop_p (stmt))
1833 continue;
1834
1835 gimple_duplicate_stmt_histograms (cfun, stmt, id->src_cfun,
1836 orig_stmt);
1837
1838 /* With return slot optimization we can end up with
1839 non-gimple (foo *)&this->m, fix that here. */
1840 if (is_gimple_assign (stmt)
1841 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt))
1842 && !is_gimple_val (gimple_assign_rhs1 (stmt)))
1843 {
1844 tree new_rhs;
1845 new_rhs = force_gimple_operand_gsi (&seq_gsi,
1846 gimple_assign_rhs1 (stmt),
1847 true, NULL, false,
1848 GSI_CONTINUE_LINKING);
1849 gimple_assign_set_rhs1 (stmt, new_rhs);
1850 id->regimplify = false;
1851 }
1852
1853 gsi_insert_after (&seq_gsi, stmt, GSI_NEW_STMT);
1854
1855 if (id->regimplify)
1856 gimple_regimplify_operands (stmt, &seq_gsi);
1857
1858 stmt_added = true;
1859 }
1860
1861 if (!stmt_added)
1862 continue;
1863
1864 /* If copy_basic_block has been empty at the start of this iteration,
1865 call gsi_start_bb again to get at the newly added statements. */
1866 if (gsi_end_p (copy_gsi))
1867 copy_gsi = gsi_start_bb (copy_basic_block);
1868 else
1869 gsi_next (©_gsi);
1870
1871 /* Process the new statement. The call to gimple_regimplify_operands
1872 possibly turned the statement into multiple statements, we
1873 need to process all of them. */
1874 do
1875 {
1876 tree fn;
1877 gcall *call_stmt;
1878
1879 stmt = gsi_stmt (copy_gsi);
1880 call_stmt = dyn_cast <gcall *> (stmt);
1881 if (call_stmt
1882 && gimple_call_va_arg_pack_p (call_stmt)
1883 && id->call_stmt
1884 && ! gimple_call_va_arg_pack_p (id->call_stmt))
1885 {
1886 /* __builtin_va_arg_pack () should be replaced by
1887 all arguments corresponding to ... in the caller. */
1888 tree p;
1889 gcall *new_call;
1890 vec<tree> argarray;
1891 size_t nargs = gimple_call_num_args (id->call_stmt);
1892 size_t n, i, nargs_to_copy;
1893 bool remove_bounds = false;
1894
1895 for (p = DECL_ARGUMENTS (id->src_fn); p; p = DECL_CHAIN (p))
1896 nargs--;
1897
1898 /* Bounds should be removed from arg pack in case
1899 we handle not instrumented call in instrumented
1900 function. */
1901 nargs_to_copy = nargs;
1902 if (gimple_call_with_bounds_p (id->call_stmt)
1903 && !gimple_call_with_bounds_p (stmt))
1904 {
1905 for (i = gimple_call_num_args (id->call_stmt) - nargs;
1906 i < gimple_call_num_args (id->call_stmt);
1907 i++)
1908 if (POINTER_BOUNDS_P (gimple_call_arg (id->call_stmt, i)))
1909 nargs_to_copy--;
1910 remove_bounds = true;
1911 }
1912
1913 /* Create the new array of arguments. */
1914 n = nargs_to_copy + gimple_call_num_args (call_stmt);
1915 argarray.create (n);
1916 argarray.safe_grow_cleared (n);
1917
1918 /* Copy all the arguments before '...' */
1919 memcpy (argarray.address (),
1920 gimple_call_arg_ptr (call_stmt, 0),
1921 gimple_call_num_args (call_stmt) * sizeof (tree));
1922
1923 if (remove_bounds)
1924 {
1925 /* Append the rest of arguments removing bounds. */
1926 unsigned cur = gimple_call_num_args (call_stmt);
1927 i = gimple_call_num_args (id->call_stmt) - nargs;
1928 for (i = gimple_call_num_args (id->call_stmt) - nargs;
1929 i < gimple_call_num_args (id->call_stmt);
1930 i++)
1931 if (!POINTER_BOUNDS_P (gimple_call_arg (id->call_stmt, i)))
1932 argarray[cur++] = gimple_call_arg (id->call_stmt, i);
1933 gcc_assert (cur == n);
1934 }
1935 else
1936 {
1937 /* Append the arguments passed in '...' */
1938 memcpy (argarray.address () + gimple_call_num_args (call_stmt),
1939 gimple_call_arg_ptr (id->call_stmt, 0)
1940 + (gimple_call_num_args (id->call_stmt) - nargs),
1941 nargs * sizeof (tree));
1942 }
1943
1944 new_call = gimple_build_call_vec (gimple_call_fn (call_stmt),
1945 argarray);
1946
1947 argarray.release ();
1948
1949 /* Copy all GIMPLE_CALL flags, location and block, except
1950 GF_CALL_VA_ARG_PACK. */
1951 gimple_call_copy_flags (new_call, call_stmt);
1952 gimple_call_set_va_arg_pack (new_call, false);
1953 gimple_set_location (new_call, gimple_location (stmt));
1954 gimple_set_block (new_call, gimple_block (stmt));
1955 gimple_call_set_lhs (new_call, gimple_call_lhs (call_stmt));
1956
1957 gsi_replace (©_gsi, new_call, false);
1958 stmt = new_call;
1959 }
1960 else if (call_stmt
1961 && id->call_stmt
1962 && (decl = gimple_call_fndecl (stmt))
1963 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
1964 && DECL_FUNCTION_CODE (decl) == BUILT_IN_VA_ARG_PACK_LEN)
1965 {
1966 /* __builtin_va_arg_pack_len () should be replaced by
1967 the number of anonymous arguments. */
1968 size_t nargs = gimple_call_num_args (id->call_stmt), i;
1969 tree count, p;
1970 gimple *new_stmt;
1971
1972 for (p = DECL_ARGUMENTS (id->src_fn); p; p = DECL_CHAIN (p))
1973 nargs--;
1974
1975 /* For instrumented calls we should ignore bounds. */
1976 for (i = gimple_call_num_args (id->call_stmt) - nargs;
1977 i < gimple_call_num_args (id->call_stmt);
1978 i++)
1979 if (POINTER_BOUNDS_P (gimple_call_arg (id->call_stmt, i)))
1980 nargs--;
1981
1982 if (!gimple_call_lhs (stmt))
1983 {
1984 /* Drop unused calls. */
1985 gsi_remove (©_gsi, false);
1986 continue;
1987 }
1988 else if (!gimple_call_va_arg_pack_p (id->call_stmt))
1989 {
1990 count = build_int_cst (integer_type_node, nargs);
1991 new_stmt = gimple_build_assign (gimple_call_lhs (stmt), count);
1992 gsi_replace (©_gsi, new_stmt, false);
1993 stmt = new_stmt;
1994 }
1995 else if (nargs != 0)
1996 {
1997 tree newlhs = create_tmp_reg_or_ssa_name (integer_type_node);
1998 count = build_int_cst (integer_type_node, nargs);
1999 new_stmt = gimple_build_assign (gimple_call_lhs (stmt),
2000 PLUS_EXPR, newlhs, count);
2001 gimple_call_set_lhs (stmt, newlhs);
2002 gsi_insert_after (©_gsi, new_stmt, GSI_NEW_STMT);
2003 }
2004 }
2005 else if (call_stmt
2006 && id->call_stmt
2007 && gimple_call_internal_p (stmt)
2008 && gimple_call_internal_fn (stmt) == IFN_TSAN_FUNC_EXIT)
2009 {
2010 /* Drop TSAN_FUNC_EXIT () internal calls during inlining. */
2011 gsi_remove (©_gsi, false);
2012 continue;
2013 }
2014
2015 /* Statements produced by inlining can be unfolded, especially
2016 when we constant propagated some operands. We can't fold
2017 them right now for two reasons:
2018 1) folding require SSA_NAME_DEF_STMTs to be correct
2019 2) we can't change function calls to builtins.
2020 So we just mark statement for later folding. We mark
2021 all new statements, instead just statements that has changed
2022 by some nontrivial substitution so even statements made
2023 foldable indirectly are updated. If this turns out to be
2024 expensive, copy_body can be told to watch for nontrivial
2025 changes. */
2026 if (id->statements_to_fold)
2027 id->statements_to_fold->add (stmt);
2028
2029 /* We're duplicating a CALL_EXPR. Find any corresponding
2030 callgraph edges and update or duplicate them. */
2031 if (gcall *call_stmt = dyn_cast <gcall *> (stmt))
2032 {
2033 struct cgraph_edge *edge;
2034
2035 switch (id->transform_call_graph_edges)
2036 {
2037 case CB_CGE_DUPLICATE:
2038 edge = id->src_node->get_edge (orig_stmt);
2039 if (edge)
2040 {
2041 struct cgraph_edge *old_edge = edge;
2042 profile_count old_cnt = edge->count;
2043 edge = edge->clone (id->dst_node, call_stmt,
2044 gimple_uid (stmt),
2045 num, den,
2046 true);
2047
2048 /* Speculative calls consist of two edges - direct and
2049 indirect. Duplicate the whole thing and distribute
2050 frequencies accordingly. */
2051 if (edge->speculative)
2052 {
2053 struct cgraph_edge *direct, *indirect;
2054 struct ipa_ref *ref;
2055
2056 gcc_assert (!edge->indirect_unknown_callee);
2057 old_edge->speculative_call_info (direct, indirect, ref);
2058
2059 profile_count indir_cnt = indirect->count;
2060 indirect = indirect->clone (id->dst_node, call_stmt,
2061 gimple_uid (stmt),
2062 num, den,
2063 true);
2064
2065 profile_probability prob
2066 = indir_cnt.probability_in (old_cnt + indir_cnt);
2067 indirect->count
2068 = copy_basic_block->count.apply_probability (prob);
2069 edge->count = copy_basic_block->count - indirect->count;
2070 id->dst_node->clone_reference (ref, stmt);
2071 }
2072 else
2073 edge->count = copy_basic_block->count;
2074 }
2075 break;
2076
2077 case CB_CGE_MOVE_CLONES:
2078 id->dst_node->set_call_stmt_including_clones (orig_stmt,
2079 call_stmt);
2080 edge = id->dst_node->get_edge (stmt);
2081 break;
2082
2083 case CB_CGE_MOVE:
2084 edge = id->dst_node->get_edge (orig_stmt);
2085 if (edge)
2086 edge->set_call_stmt (call_stmt);
2087 break;
2088
2089 default:
2090 gcc_unreachable ();
2091 }
2092
2093 /* Constant propagation on argument done during inlining
2094 may create new direct call. Produce an edge for it. */
2095 if ((!edge
2096 || (edge->indirect_inlining_edge
2097 && id->transform_call_graph_edges == CB_CGE_MOVE_CLONES))
2098 && id->dst_node->definition
2099 && (fn = gimple_call_fndecl (stmt)) != NULL)
2100 {
2101 struct cgraph_node *dest = cgraph_node::get_create (fn);
2102
2103 /* We have missing edge in the callgraph. This can happen
2104 when previous inlining turned an indirect call into a
2105 direct call by constant propagating arguments or we are
2106 producing dead clone (for further cloning). In all
2107 other cases we hit a bug (incorrect node sharing is the
2108 most common reason for missing edges). */
2109 gcc_assert (!dest->definition
2110 || dest->address_taken
2111 || !id->src_node->definition
2112 || !id->dst_node->definition);
2113 if (id->transform_call_graph_edges == CB_CGE_MOVE_CLONES)
2114 id->dst_node->create_edge_including_clones
2115 (dest, orig_stmt, call_stmt, bb->count,
2116 CIF_ORIGINALLY_INDIRECT_CALL);
2117 else
2118 id->dst_node->create_edge (dest, call_stmt,
2119 bb->count)->inline_failed
2120 = CIF_ORIGINALLY_INDIRECT_CALL;
2121 if (dump_file)
2122 {
2123 fprintf (dump_file, "Created new direct edge to %s\n",
2124 dest->name ());
2125 }
2126 }
2127
2128 notice_special_calls (as_a <gcall *> (stmt));
2129 }
2130
2131 maybe_duplicate_eh_stmt_fn (cfun, stmt, id->src_cfun, orig_stmt,
2132 id->eh_map, id->eh_lp_nr);
2133
2134 gsi_next (©_gsi);
2135 }
2136 while (!gsi_end_p (copy_gsi));
2137
2138 copy_gsi = gsi_last_bb (copy_basic_block);
2139 }
2140
2141 return copy_basic_block;
2142 }
2143
2144 /* Inserting Single Entry Multiple Exit region in SSA form into code in SSA
2145 form is quite easy, since dominator relationship for old basic blocks does
2146 not change.
2147
2148 There is however exception where inlining might change dominator relation
2149 across EH edges from basic block within inlined functions destinating
2150 to landing pads in function we inline into.
2151
2152 The function fills in PHI_RESULTs of such PHI nodes if they refer
2153 to gimple regs. Otherwise, the function mark PHI_RESULT of such
2154 PHI nodes for renaming. For non-gimple regs, renaming is safe: the
2155 EH edges are abnormal and SSA_NAME_OCCURS_IN_ABNORMAL_PHI must be
2156 set, and this means that there will be no overlapping live ranges
2157 for the underlying symbol.
2158
2159 This might change in future if we allow redirecting of EH edges and
2160 we might want to change way build CFG pre-inlining to include
2161 all the possible edges then. */
2162 static void
update_ssa_across_abnormal_edges(basic_block bb,basic_block ret_bb,bool can_throw,bool nonlocal_goto)2163 update_ssa_across_abnormal_edges (basic_block bb, basic_block ret_bb,
2164 bool can_throw, bool nonlocal_goto)
2165 {
2166 edge e;
2167 edge_iterator ei;
2168
2169 FOR_EACH_EDGE (e, ei, bb->succs)
2170 if (!e->dest->aux
2171 || ((basic_block)e->dest->aux)->index == ENTRY_BLOCK)
2172 {
2173 gphi *phi;
2174 gphi_iterator si;
2175
2176 if (!nonlocal_goto)
2177 gcc_assert (e->flags & EDGE_EH);
2178
2179 if (!can_throw)
2180 gcc_assert (!(e->flags & EDGE_EH));
2181
2182 for (si = gsi_start_phis (e->dest); !gsi_end_p (si); gsi_next (&si))
2183 {
2184 edge re;
2185
2186 phi = si.phi ();
2187
2188 /* For abnormal goto/call edges the receiver can be the
2189 ENTRY_BLOCK. Do not assert this cannot happen. */
2190
2191 gcc_assert ((e->flags & EDGE_EH)
2192 || SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)));
2193
2194 re = find_edge (ret_bb, e->dest);
2195 gcc_checking_assert (re);
2196 gcc_assert ((re->flags & (EDGE_EH | EDGE_ABNORMAL))
2197 == (e->flags & (EDGE_EH | EDGE_ABNORMAL)));
2198
2199 SET_USE (PHI_ARG_DEF_PTR_FROM_EDGE (phi, e),
2200 USE_FROM_PTR (PHI_ARG_DEF_PTR_FROM_EDGE (phi, re)));
2201 }
2202 }
2203 }
2204
2205
2206 /* Copy edges from BB into its copy constructed earlier, scale profile
2207 accordingly. Edges will be taken care of later. Assume aux
2208 pointers to point to the copies of each BB. Return true if any
2209 debug stmts are left after a statement that must end the basic block. */
2210
2211 static bool
copy_edges_for_bb(basic_block bb,profile_count num,profile_count den,basic_block ret_bb,basic_block abnormal_goto_dest)2212 copy_edges_for_bb (basic_block bb, profile_count num, profile_count den,
2213 basic_block ret_bb, basic_block abnormal_goto_dest)
2214 {
2215 basic_block new_bb = (basic_block) bb->aux;
2216 edge_iterator ei;
2217 edge old_edge;
2218 gimple_stmt_iterator si;
2219 int flags;
2220 bool need_debug_cleanup = false;
2221
2222 /* Use the indices from the original blocks to create edges for the
2223 new ones. */
2224 FOR_EACH_EDGE (old_edge, ei, bb->succs)
2225 if (!(old_edge->flags & EDGE_EH))
2226 {
2227 edge new_edge;
2228
2229 flags = old_edge->flags;
2230
2231 /* Return edges do get a FALLTHRU flag when the get inlined. */
2232 if (old_edge->dest->index == EXIT_BLOCK
2233 && !(old_edge->flags & (EDGE_TRUE_VALUE|EDGE_FALSE_VALUE|EDGE_FAKE))
2234 && old_edge->dest->aux != EXIT_BLOCK_PTR_FOR_FN (cfun))
2235 flags |= EDGE_FALLTHRU;
2236 new_edge = make_edge (new_bb, (basic_block) old_edge->dest->aux, flags);
2237 new_edge->probability = old_edge->probability;
2238 }
2239
2240 if (bb->index == ENTRY_BLOCK || bb->index == EXIT_BLOCK)
2241 return false;
2242
2243 /* When doing function splitting, we must decreate count of the return block
2244 which was previously reachable by block we did not copy. */
2245 if (single_succ_p (bb) && single_succ_edge (bb)->dest->index == EXIT_BLOCK)
2246 FOR_EACH_EDGE (old_edge, ei, bb->preds)
2247 if (old_edge->src->index != ENTRY_BLOCK
2248 && !old_edge->src->aux)
2249 new_bb->count -= old_edge->count ().apply_scale (num, den);
2250
2251 for (si = gsi_start_bb (new_bb); !gsi_end_p (si);)
2252 {
2253 gimple *copy_stmt;
2254 bool can_throw, nonlocal_goto;
2255
2256 copy_stmt = gsi_stmt (si);
2257 if (!is_gimple_debug (copy_stmt))
2258 update_stmt (copy_stmt);
2259
2260 /* Do this before the possible split_block. */
2261 gsi_next (&si);
2262
2263 /* If this tree could throw an exception, there are two
2264 cases where we need to add abnormal edge(s): the
2265 tree wasn't in a region and there is a "current
2266 region" in the caller; or the original tree had
2267 EH edges. In both cases split the block after the tree,
2268 and add abnormal edge(s) as needed; we need both
2269 those from the callee and the caller.
2270 We check whether the copy can throw, because the const
2271 propagation can change an INDIRECT_REF which throws
2272 into a COMPONENT_REF which doesn't. If the copy
2273 can throw, the original could also throw. */
2274 can_throw = stmt_can_throw_internal (copy_stmt);
2275 nonlocal_goto
2276 = (stmt_can_make_abnormal_goto (copy_stmt)
2277 && !computed_goto_p (copy_stmt));
2278
2279 if (can_throw || nonlocal_goto)
2280 {
2281 if (!gsi_end_p (si))
2282 {
2283 while (!gsi_end_p (si) && is_gimple_debug (gsi_stmt (si)))
2284 gsi_next (&si);
2285 if (gsi_end_p (si))
2286 need_debug_cleanup = true;
2287 }
2288 if (!gsi_end_p (si))
2289 /* Note that bb's predecessor edges aren't necessarily
2290 right at this point; split_block doesn't care. */
2291 {
2292 edge e = split_block (new_bb, copy_stmt);
2293
2294 new_bb = e->dest;
2295 new_bb->aux = e->src->aux;
2296 si = gsi_start_bb (new_bb);
2297 }
2298 }
2299
2300 bool update_probs = false;
2301
2302 if (gimple_code (copy_stmt) == GIMPLE_EH_DISPATCH)
2303 {
2304 make_eh_dispatch_edges (as_a <geh_dispatch *> (copy_stmt));
2305 update_probs = true;
2306 }
2307 else if (can_throw)
2308 {
2309 make_eh_edges (copy_stmt);
2310 update_probs = true;
2311 }
2312
2313 /* EH edges may not match old edges. Copy as much as possible. */
2314 if (update_probs)
2315 {
2316 edge e;
2317 edge_iterator ei;
2318 basic_block copy_stmt_bb = gimple_bb (copy_stmt);
2319
2320 FOR_EACH_EDGE (old_edge, ei, bb->succs)
2321 if ((old_edge->flags & EDGE_EH)
2322 && (e = find_edge (copy_stmt_bb,
2323 (basic_block) old_edge->dest->aux))
2324 && (e->flags & EDGE_EH))
2325 e->probability = old_edge->probability;
2326
2327 FOR_EACH_EDGE (e, ei, copy_stmt_bb->succs)
2328 if ((e->flags & EDGE_EH) && !e->probability.initialized_p ())
2329 e->probability = profile_probability::never ();
2330 }
2331
2332
2333 /* If the call we inline cannot make abnormal goto do not add
2334 additional abnormal edges but only retain those already present
2335 in the original function body. */
2336 if (abnormal_goto_dest == NULL)
2337 nonlocal_goto = false;
2338 if (nonlocal_goto)
2339 {
2340 basic_block copy_stmt_bb = gimple_bb (copy_stmt);
2341
2342 if (get_abnormal_succ_dispatcher (copy_stmt_bb))
2343 nonlocal_goto = false;
2344 /* ABNORMAL_DISPATCHER (1) is for longjmp/setjmp or nonlocal gotos
2345 in OpenMP regions which aren't allowed to be left abnormally.
2346 So, no need to add abnormal edge in that case. */
2347 else if (is_gimple_call (copy_stmt)
2348 && gimple_call_internal_p (copy_stmt)
2349 && (gimple_call_internal_fn (copy_stmt)
2350 == IFN_ABNORMAL_DISPATCHER)
2351 && gimple_call_arg (copy_stmt, 0) == boolean_true_node)
2352 nonlocal_goto = false;
2353 else
2354 make_single_succ_edge (copy_stmt_bb, abnormal_goto_dest,
2355 EDGE_ABNORMAL);
2356 }
2357
2358 if ((can_throw || nonlocal_goto)
2359 && gimple_in_ssa_p (cfun))
2360 update_ssa_across_abnormal_edges (gimple_bb (copy_stmt), ret_bb,
2361 can_throw, nonlocal_goto);
2362 }
2363 return need_debug_cleanup;
2364 }
2365
2366 /* Copy the PHIs. All blocks and edges are copied, some blocks
2367 was possibly split and new outgoing EH edges inserted.
2368 BB points to the block of original function and AUX pointers links
2369 the original and newly copied blocks. */
2370
2371 static void
copy_phis_for_bb(basic_block bb,copy_body_data * id)2372 copy_phis_for_bb (basic_block bb, copy_body_data *id)
2373 {
2374 basic_block const new_bb = (basic_block) bb->aux;
2375 edge_iterator ei;
2376 gphi *phi;
2377 gphi_iterator si;
2378 edge new_edge;
2379 bool inserted = false;
2380
2381 for (si = gsi_start_phis (bb); !gsi_end_p (si); gsi_next (&si))
2382 {
2383 tree res, new_res;
2384 gphi *new_phi;
2385
2386 phi = si.phi ();
2387 res = PHI_RESULT (phi);
2388 new_res = res;
2389 if (!virtual_operand_p (res))
2390 {
2391 walk_tree (&new_res, copy_tree_body_r, id, NULL);
2392 if (EDGE_COUNT (new_bb->preds) == 0)
2393 {
2394 /* Technically we'd want a SSA_DEFAULT_DEF here... */
2395 SSA_NAME_DEF_STMT (new_res) = gimple_build_nop ();
2396 }
2397 else
2398 {
2399 new_phi = create_phi_node (new_res, new_bb);
2400 FOR_EACH_EDGE (new_edge, ei, new_bb->preds)
2401 {
2402 edge old_edge = find_edge ((basic_block) new_edge->src->aux,
2403 bb);
2404 tree arg;
2405 tree new_arg;
2406 edge_iterator ei2;
2407 location_t locus;
2408
2409 /* When doing partial cloning, we allow PHIs on the entry
2410 block as long as all the arguments are the same.
2411 Find any input edge to see argument to copy. */
2412 if (!old_edge)
2413 FOR_EACH_EDGE (old_edge, ei2, bb->preds)
2414 if (!old_edge->src->aux)
2415 break;
2416
2417 arg = PHI_ARG_DEF_FROM_EDGE (phi, old_edge);
2418 new_arg = arg;
2419 walk_tree (&new_arg, copy_tree_body_r, id, NULL);
2420 gcc_assert (new_arg);
2421 /* With return slot optimization we can end up with
2422 non-gimple (foo *)&this->m, fix that here. */
2423 if (TREE_CODE (new_arg) != SSA_NAME
2424 && TREE_CODE (new_arg) != FUNCTION_DECL
2425 && !is_gimple_val (new_arg))
2426 {
2427 gimple_seq stmts = NULL;
2428 new_arg = force_gimple_operand (new_arg, &stmts, true,
2429 NULL);
2430 gsi_insert_seq_on_edge (new_edge, stmts);
2431 inserted = true;
2432 }
2433 locus = gimple_phi_arg_location_from_edge (phi, old_edge);
2434 if (LOCATION_BLOCK (locus))
2435 {
2436 tree *n;
2437 n = id->decl_map->get (LOCATION_BLOCK (locus));
2438 gcc_assert (n);
2439 locus = set_block (locus, *n);
2440 }
2441 else
2442 locus = LOCATION_LOCUS (locus);
2443
2444 add_phi_arg (new_phi, new_arg, new_edge, locus);
2445 }
2446 }
2447 }
2448 }
2449
2450 /* Commit the delayed edge insertions. */
2451 if (inserted)
2452 FOR_EACH_EDGE (new_edge, ei, new_bb->preds)
2453 gsi_commit_one_edge_insert (new_edge, NULL);
2454 }
2455
2456
2457 /* Wrapper for remap_decl so it can be used as a callback. */
2458
2459 static tree
remap_decl_1(tree decl,void * data)2460 remap_decl_1 (tree decl, void *data)
2461 {
2462 return remap_decl (decl, (copy_body_data *) data);
2463 }
2464
2465 /* Build struct function and associated datastructures for the new clone
2466 NEW_FNDECL to be build. CALLEE_FNDECL is the original. Function changes
2467 the cfun to the function of new_fndecl (and current_function_decl too). */
2468
2469 static void
initialize_cfun(tree new_fndecl,tree callee_fndecl,profile_count count)2470 initialize_cfun (tree new_fndecl, tree callee_fndecl, profile_count count)
2471 {
2472 struct function *src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
2473
2474 if (!DECL_ARGUMENTS (new_fndecl))
2475 DECL_ARGUMENTS (new_fndecl) = DECL_ARGUMENTS (callee_fndecl);
2476 if (!DECL_RESULT (new_fndecl))
2477 DECL_RESULT (new_fndecl) = DECL_RESULT (callee_fndecl);
2478
2479 /* Register specific tree functions. */
2480 gimple_register_cfg_hooks ();
2481
2482 /* Get clean struct function. */
2483 push_struct_function (new_fndecl);
2484
2485 /* We will rebuild these, so just sanity check that they are empty. */
2486 gcc_assert (VALUE_HISTOGRAMS (cfun) == NULL);
2487 gcc_assert (cfun->local_decls == NULL);
2488 gcc_assert (cfun->cfg == NULL);
2489 gcc_assert (cfun->decl == new_fndecl);
2490
2491 /* Copy items we preserve during cloning. */
2492 cfun->static_chain_decl = src_cfun->static_chain_decl;
2493 cfun->nonlocal_goto_save_area = src_cfun->nonlocal_goto_save_area;
2494 cfun->function_end_locus = src_cfun->function_end_locus;
2495 cfun->curr_properties = src_cfun->curr_properties;
2496 cfun->last_verified = src_cfun->last_verified;
2497 cfun->va_list_gpr_size = src_cfun->va_list_gpr_size;
2498 cfun->va_list_fpr_size = src_cfun->va_list_fpr_size;
2499 cfun->has_nonlocal_label = src_cfun->has_nonlocal_label;
2500 cfun->stdarg = src_cfun->stdarg;
2501 cfun->after_inlining = src_cfun->after_inlining;
2502 cfun->can_throw_non_call_exceptions
2503 = src_cfun->can_throw_non_call_exceptions;
2504 cfun->can_delete_dead_exceptions = src_cfun->can_delete_dead_exceptions;
2505 cfun->returns_struct = src_cfun->returns_struct;
2506 cfun->returns_pcc_struct = src_cfun->returns_pcc_struct;
2507
2508 init_empty_tree_cfg ();
2509
2510 profile_status_for_fn (cfun) = profile_status_for_fn (src_cfun);
2511
2512 profile_count num = count;
2513 profile_count den = ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count;
2514 profile_count::adjust_for_ipa_scaling (&num, &den);
2515
2516 ENTRY_BLOCK_PTR_FOR_FN (cfun)->count =
2517 ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count.apply_scale (count,
2518 ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count);
2519 EXIT_BLOCK_PTR_FOR_FN (cfun)->count =
2520 EXIT_BLOCK_PTR_FOR_FN (src_cfun)->count.apply_scale (count,
2521 ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count);
2522 if (src_cfun->eh)
2523 init_eh_for_function ();
2524
2525 if (src_cfun->gimple_df)
2526 {
2527 init_tree_ssa (cfun);
2528 cfun->gimple_df->in_ssa_p = src_cfun->gimple_df->in_ssa_p;
2529 if (cfun->gimple_df->in_ssa_p)
2530 init_ssa_operands (cfun);
2531 }
2532 }
2533
2534 /* Helper function for copy_cfg_body. Move debug stmts from the end
2535 of NEW_BB to the beginning of successor basic blocks when needed. If the
2536 successor has multiple predecessors, reset them, otherwise keep
2537 their value. */
2538
2539 static void
maybe_move_debug_stmts_to_successors(copy_body_data * id,basic_block new_bb)2540 maybe_move_debug_stmts_to_successors (copy_body_data *id, basic_block new_bb)
2541 {
2542 edge e;
2543 edge_iterator ei;
2544 gimple_stmt_iterator si = gsi_last_nondebug_bb (new_bb);
2545
2546 if (gsi_end_p (si)
2547 || gsi_one_before_end_p (si)
2548 || !(stmt_can_throw_internal (gsi_stmt (si))
2549 || stmt_can_make_abnormal_goto (gsi_stmt (si))))
2550 return;
2551
2552 FOR_EACH_EDGE (e, ei, new_bb->succs)
2553 {
2554 gimple_stmt_iterator ssi = gsi_last_bb (new_bb);
2555 gimple_stmt_iterator dsi = gsi_after_labels (e->dest);
2556 while (is_gimple_debug (gsi_stmt (ssi)))
2557 {
2558 gimple *stmt = gsi_stmt (ssi);
2559 gdebug *new_stmt;
2560 tree var;
2561 tree value;
2562
2563 /* For the last edge move the debug stmts instead of copying
2564 them. */
2565 if (ei_one_before_end_p (ei))
2566 {
2567 si = ssi;
2568 gsi_prev (&ssi);
2569 if (!single_pred_p (e->dest) && gimple_debug_bind_p (stmt))
2570 gimple_debug_bind_reset_value (stmt);
2571 gsi_remove (&si, false);
2572 gsi_insert_before (&dsi, stmt, GSI_SAME_STMT);
2573 continue;
2574 }
2575
2576 if (gimple_debug_bind_p (stmt))
2577 {
2578 var = gimple_debug_bind_get_var (stmt);
2579 if (single_pred_p (e->dest))
2580 {
2581 value = gimple_debug_bind_get_value (stmt);
2582 value = unshare_expr (value);
2583 }
2584 else
2585 value = NULL_TREE;
2586 new_stmt = gimple_build_debug_bind (var, value, stmt);
2587 }
2588 else if (gimple_debug_source_bind_p (stmt))
2589 {
2590 var = gimple_debug_source_bind_get_var (stmt);
2591 value = gimple_debug_source_bind_get_value (stmt);
2592 new_stmt = gimple_build_debug_source_bind (var, value, stmt);
2593 }
2594 else if (gimple_debug_nonbind_marker_p (stmt))
2595 new_stmt = as_a <gdebug *> (gimple_copy (stmt));
2596 else
2597 gcc_unreachable ();
2598 gsi_insert_before (&dsi, new_stmt, GSI_SAME_STMT);
2599 id->debug_stmts.safe_push (new_stmt);
2600 gsi_prev (&ssi);
2601 }
2602 }
2603 }
2604
2605 /* Make a copy of the sub-loops of SRC_PARENT and place them
2606 as siblings of DEST_PARENT. */
2607
2608 static void
copy_loops(copy_body_data * id,struct loop * dest_parent,struct loop * src_parent)2609 copy_loops (copy_body_data *id,
2610 struct loop *dest_parent, struct loop *src_parent)
2611 {
2612 struct loop *src_loop = src_parent->inner;
2613 while (src_loop)
2614 {
2615 if (!id->blocks_to_copy
2616 || bitmap_bit_p (id->blocks_to_copy, src_loop->header->index))
2617 {
2618 struct loop *dest_loop = alloc_loop ();
2619
2620 /* Assign the new loop its header and latch and associate
2621 those with the new loop. */
2622 dest_loop->header = (basic_block)src_loop->header->aux;
2623 dest_loop->header->loop_father = dest_loop;
2624 if (src_loop->latch != NULL)
2625 {
2626 dest_loop->latch = (basic_block)src_loop->latch->aux;
2627 dest_loop->latch->loop_father = dest_loop;
2628 }
2629
2630 /* Copy loop meta-data. */
2631 copy_loop_info (src_loop, dest_loop);
2632
2633 /* Finally place it into the loop array and the loop tree. */
2634 place_new_loop (cfun, dest_loop);
2635 flow_loop_tree_node_add (dest_parent, dest_loop);
2636
2637 dest_loop->safelen = src_loop->safelen;
2638 if (src_loop->unroll)
2639 {
2640 dest_loop->unroll = src_loop->unroll;
2641 cfun->has_unroll = true;
2642 }
2643 dest_loop->dont_vectorize = src_loop->dont_vectorize;
2644 if (src_loop->force_vectorize)
2645 {
2646 dest_loop->force_vectorize = true;
2647 cfun->has_force_vectorize_loops = true;
2648 }
2649 if (src_loop->simduid)
2650 {
2651 dest_loop->simduid = remap_decl (src_loop->simduid, id);
2652 cfun->has_simduid_loops = true;
2653 }
2654
2655 /* Recurse. */
2656 copy_loops (id, dest_loop, src_loop);
2657 }
2658 src_loop = src_loop->next;
2659 }
2660 }
2661
2662 /* Call cgraph_redirect_edge_call_stmt_to_callee on all calls in BB */
2663
2664 void
redirect_all_calls(copy_body_data * id,basic_block bb)2665 redirect_all_calls (copy_body_data * id, basic_block bb)
2666 {
2667 gimple_stmt_iterator si;
2668 gimple *last = last_stmt (bb);
2669 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
2670 {
2671 gimple *stmt = gsi_stmt (si);
2672 if (is_gimple_call (stmt))
2673 {
2674 struct cgraph_edge *edge = id->dst_node->get_edge (stmt);
2675 if (edge)
2676 {
2677 edge->redirect_call_stmt_to_callee ();
2678 if (stmt == last && id->call_stmt && maybe_clean_eh_stmt (stmt))
2679 gimple_purge_dead_eh_edges (bb);
2680 }
2681 }
2682 }
2683 }
2684
2685 /* Make a copy of the body of FN so that it can be inserted inline in
2686 another function. Walks FN via CFG, returns new fndecl. */
2687
2688 static tree
copy_cfg_body(copy_body_data * id,basic_block entry_block_map,basic_block exit_block_map,basic_block new_entry)2689 copy_cfg_body (copy_body_data * id,
2690 basic_block entry_block_map, basic_block exit_block_map,
2691 basic_block new_entry)
2692 {
2693 tree callee_fndecl = id->src_fn;
2694 /* Original cfun for the callee, doesn't change. */
2695 struct function *src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
2696 struct function *cfun_to_copy;
2697 basic_block bb;
2698 tree new_fndecl = NULL;
2699 bool need_debug_cleanup = false;
2700 int last;
2701 profile_count den = ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count;
2702 profile_count num = entry_block_map->count;
2703
2704 cfun_to_copy = id->src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
2705
2706 /* Register specific tree functions. */
2707 gimple_register_cfg_hooks ();
2708
2709 /* If we are inlining just region of the function, make sure to connect
2710 new entry to ENTRY_BLOCK_PTR_FOR_FN (cfun). Since new entry can be
2711 part of loop, we must compute frequency and probability of
2712 ENTRY_BLOCK_PTR_FOR_FN (cfun) based on the frequencies and
2713 probabilities of edges incoming from nonduplicated region. */
2714 if (new_entry)
2715 {
2716 edge e;
2717 edge_iterator ei;
2718 den = profile_count::zero ();
2719
2720 FOR_EACH_EDGE (e, ei, new_entry->preds)
2721 if (!e->src->aux)
2722 den += e->count ();
2723 ENTRY_BLOCK_PTR_FOR_FN (cfun)->count = den;
2724 }
2725
2726 profile_count::adjust_for_ipa_scaling (&num, &den);
2727
2728 /* Must have a CFG here at this point. */
2729 gcc_assert (ENTRY_BLOCK_PTR_FOR_FN
2730 (DECL_STRUCT_FUNCTION (callee_fndecl)));
2731
2732
2733 ENTRY_BLOCK_PTR_FOR_FN (cfun_to_copy)->aux = entry_block_map;
2734 EXIT_BLOCK_PTR_FOR_FN (cfun_to_copy)->aux = exit_block_map;
2735 entry_block_map->aux = ENTRY_BLOCK_PTR_FOR_FN (cfun_to_copy);
2736 exit_block_map->aux = EXIT_BLOCK_PTR_FOR_FN (cfun_to_copy);
2737
2738 /* Duplicate any exception-handling regions. */
2739 if (cfun->eh)
2740 id->eh_map = duplicate_eh_regions (cfun_to_copy, NULL, id->eh_lp_nr,
2741 remap_decl_1, id);
2742
2743 /* Use aux pointers to map the original blocks to copy. */
2744 FOR_EACH_BB_FN (bb, cfun_to_copy)
2745 if (!id->blocks_to_copy || bitmap_bit_p (id->blocks_to_copy, bb->index))
2746 {
2747 basic_block new_bb = copy_bb (id, bb, num, den);
2748 bb->aux = new_bb;
2749 new_bb->aux = bb;
2750 new_bb->loop_father = entry_block_map->loop_father;
2751 }
2752
2753 last = last_basic_block_for_fn (cfun);
2754
2755 /* Now that we've duplicated the blocks, duplicate their edges. */
2756 basic_block abnormal_goto_dest = NULL;
2757 if (id->call_stmt
2758 && stmt_can_make_abnormal_goto (id->call_stmt))
2759 {
2760 gimple_stmt_iterator gsi = gsi_for_stmt (id->call_stmt);
2761
2762 bb = gimple_bb (id->call_stmt);
2763 gsi_next (&gsi);
2764 if (gsi_end_p (gsi))
2765 abnormal_goto_dest = get_abnormal_succ_dispatcher (bb);
2766 }
2767 FOR_ALL_BB_FN (bb, cfun_to_copy)
2768 if (!id->blocks_to_copy
2769 || (bb->index > 0 && bitmap_bit_p (id->blocks_to_copy, bb->index)))
2770 need_debug_cleanup |= copy_edges_for_bb (bb, num, den, exit_block_map,
2771 abnormal_goto_dest);
2772
2773 if (new_entry)
2774 {
2775 edge e = make_edge (entry_block_map, (basic_block)new_entry->aux,
2776 EDGE_FALLTHRU);
2777 e->probability = profile_probability::always ();
2778 }
2779
2780 /* Duplicate the loop tree, if available and wanted. */
2781 if (loops_for_fn (src_cfun) != NULL
2782 && current_loops != NULL)
2783 {
2784 copy_loops (id, entry_block_map->loop_father,
2785 get_loop (src_cfun, 0));
2786 /* Defer to cfgcleanup to update loop-father fields of basic-blocks. */
2787 loops_state_set (LOOPS_NEED_FIXUP);
2788 }
2789
2790 /* If the loop tree in the source function needed fixup, mark the
2791 destination loop tree for fixup, too. */
2792 if (loops_for_fn (src_cfun)->state & LOOPS_NEED_FIXUP)
2793 loops_state_set (LOOPS_NEED_FIXUP);
2794
2795 if (gimple_in_ssa_p (cfun))
2796 FOR_ALL_BB_FN (bb, cfun_to_copy)
2797 if (!id->blocks_to_copy
2798 || (bb->index > 0 && bitmap_bit_p (id->blocks_to_copy, bb->index)))
2799 copy_phis_for_bb (bb, id);
2800
2801 FOR_ALL_BB_FN (bb, cfun_to_copy)
2802 if (bb->aux)
2803 {
2804 if (need_debug_cleanup
2805 && bb->index != ENTRY_BLOCK
2806 && bb->index != EXIT_BLOCK)
2807 maybe_move_debug_stmts_to_successors (id, (basic_block) bb->aux);
2808 /* Update call edge destinations. This can not be done before loop
2809 info is updated, because we may split basic blocks. */
2810 if (id->transform_call_graph_edges == CB_CGE_DUPLICATE
2811 && bb->index != ENTRY_BLOCK
2812 && bb->index != EXIT_BLOCK)
2813 redirect_all_calls (id, (basic_block)bb->aux);
2814 ((basic_block)bb->aux)->aux = NULL;
2815 bb->aux = NULL;
2816 }
2817
2818 /* Zero out AUX fields of newly created block during EH edge
2819 insertion. */
2820 for (; last < last_basic_block_for_fn (cfun); last++)
2821 {
2822 if (need_debug_cleanup)
2823 maybe_move_debug_stmts_to_successors (id,
2824 BASIC_BLOCK_FOR_FN (cfun, last));
2825 BASIC_BLOCK_FOR_FN (cfun, last)->aux = NULL;
2826 /* Update call edge destinations. This can not be done before loop
2827 info is updated, because we may split basic blocks. */
2828 if (id->transform_call_graph_edges == CB_CGE_DUPLICATE)
2829 redirect_all_calls (id, BASIC_BLOCK_FOR_FN (cfun, last));
2830 }
2831 entry_block_map->aux = NULL;
2832 exit_block_map->aux = NULL;
2833
2834 if (id->eh_map)
2835 {
2836 delete id->eh_map;
2837 id->eh_map = NULL;
2838 }
2839 if (id->dependence_map)
2840 {
2841 delete id->dependence_map;
2842 id->dependence_map = NULL;
2843 }
2844
2845 return new_fndecl;
2846 }
2847
2848 /* Copy the debug STMT using ID. We deal with these statements in a
2849 special way: if any variable in their VALUE expression wasn't
2850 remapped yet, we won't remap it, because that would get decl uids
2851 out of sync, causing codegen differences between -g and -g0. If
2852 this arises, we drop the VALUE expression altogether. */
2853
2854 static void
copy_debug_stmt(gdebug * stmt,copy_body_data * id)2855 copy_debug_stmt (gdebug *stmt, copy_body_data *id)
2856 {
2857 tree t, *n;
2858 struct walk_stmt_info wi;
2859
2860 if (gimple_block (stmt))
2861 {
2862 n = id->decl_map->get (gimple_block (stmt));
2863 gimple_set_block (stmt, n ? *n : id->block);
2864 }
2865
2866 if (gimple_debug_nonbind_marker_p (stmt))
2867 return;
2868
2869 /* Remap all the operands in COPY. */
2870 memset (&wi, 0, sizeof (wi));
2871 wi.info = id;
2872
2873 processing_debug_stmt = 1;
2874
2875 if (gimple_debug_source_bind_p (stmt))
2876 t = gimple_debug_source_bind_get_var (stmt);
2877 else if (gimple_debug_bind_p (stmt))
2878 t = gimple_debug_bind_get_var (stmt);
2879 else
2880 gcc_unreachable ();
2881
2882 if (TREE_CODE (t) == PARM_DECL && id->debug_map
2883 && (n = id->debug_map->get (t)))
2884 {
2885 gcc_assert (VAR_P (*n));
2886 t = *n;
2887 }
2888 else if (VAR_P (t) && !is_global_var (t) && !id->decl_map->get (t))
2889 /* T is a non-localized variable. */;
2890 else
2891 walk_tree (&t, remap_gimple_op_r, &wi, NULL);
2892
2893 if (gimple_debug_bind_p (stmt))
2894 {
2895 gimple_debug_bind_set_var (stmt, t);
2896
2897 if (gimple_debug_bind_has_value_p (stmt))
2898 walk_tree (gimple_debug_bind_get_value_ptr (stmt),
2899 remap_gimple_op_r, &wi, NULL);
2900
2901 /* Punt if any decl couldn't be remapped. */
2902 if (processing_debug_stmt < 0)
2903 gimple_debug_bind_reset_value (stmt);
2904 }
2905 else if (gimple_debug_source_bind_p (stmt))
2906 {
2907 gimple_debug_source_bind_set_var (stmt, t);
2908 /* When inlining and source bind refers to one of the optimized
2909 away parameters, change the source bind into normal debug bind
2910 referring to the corresponding DEBUG_EXPR_DECL that should have
2911 been bound before the call stmt. */
2912 t = gimple_debug_source_bind_get_value (stmt);
2913 if (t != NULL_TREE
2914 && TREE_CODE (t) == PARM_DECL
2915 && id->call_stmt)
2916 {
2917 vec<tree, va_gc> **debug_args = decl_debug_args_lookup (id->src_fn);
2918 unsigned int i;
2919 if (debug_args != NULL)
2920 {
2921 for (i = 0; i < vec_safe_length (*debug_args); i += 2)
2922 if ((**debug_args)[i] == DECL_ORIGIN (t)
2923 && TREE_CODE ((**debug_args)[i + 1]) == DEBUG_EXPR_DECL)
2924 {
2925 t = (**debug_args)[i + 1];
2926 stmt->subcode = GIMPLE_DEBUG_BIND;
2927 gimple_debug_bind_set_value (stmt, t);
2928 break;
2929 }
2930 }
2931 }
2932 if (gimple_debug_source_bind_p (stmt))
2933 walk_tree (gimple_debug_source_bind_get_value_ptr (stmt),
2934 remap_gimple_op_r, &wi, NULL);
2935 }
2936
2937 processing_debug_stmt = 0;
2938
2939 update_stmt (stmt);
2940 }
2941
2942 /* Process deferred debug stmts. In order to give values better odds
2943 of being successfully remapped, we delay the processing of debug
2944 stmts until all other stmts that might require remapping are
2945 processed. */
2946
2947 static void
copy_debug_stmts(copy_body_data * id)2948 copy_debug_stmts (copy_body_data *id)
2949 {
2950 size_t i;
2951 gdebug *stmt;
2952
2953 if (!id->debug_stmts.exists ())
2954 return;
2955
2956 FOR_EACH_VEC_ELT (id->debug_stmts, i, stmt)
2957 copy_debug_stmt (stmt, id);
2958
2959 id->debug_stmts.release ();
2960 }
2961
2962 /* Make a copy of the body of SRC_FN so that it can be inserted inline in
2963 another function. */
2964
2965 static tree
copy_tree_body(copy_body_data * id)2966 copy_tree_body (copy_body_data *id)
2967 {
2968 tree fndecl = id->src_fn;
2969 tree body = DECL_SAVED_TREE (fndecl);
2970
2971 walk_tree (&body, copy_tree_body_r, id, NULL);
2972
2973 return body;
2974 }
2975
2976 /* Make a copy of the body of FN so that it can be inserted inline in
2977 another function. */
2978
2979 static tree
copy_body(copy_body_data * id,basic_block entry_block_map,basic_block exit_block_map,basic_block new_entry)2980 copy_body (copy_body_data *id,
2981 basic_block entry_block_map, basic_block exit_block_map,
2982 basic_block new_entry)
2983 {
2984 tree fndecl = id->src_fn;
2985 tree body;
2986
2987 /* If this body has a CFG, walk CFG and copy. */
2988 gcc_assert (ENTRY_BLOCK_PTR_FOR_FN (DECL_STRUCT_FUNCTION (fndecl)));
2989 body = copy_cfg_body (id, entry_block_map, exit_block_map,
2990 new_entry);
2991 copy_debug_stmts (id);
2992
2993 return body;
2994 }
2995
2996 /* Return true if VALUE is an ADDR_EXPR of an automatic variable
2997 defined in function FN, or of a data member thereof. */
2998
2999 static bool
self_inlining_addr_expr(tree value,tree fn)3000 self_inlining_addr_expr (tree value, tree fn)
3001 {
3002 tree var;
3003
3004 if (TREE_CODE (value) != ADDR_EXPR)
3005 return false;
3006
3007 var = get_base_address (TREE_OPERAND (value, 0));
3008
3009 return var && auto_var_in_fn_p (var, fn);
3010 }
3011
3012 /* Append to BB a debug annotation that binds VAR to VALUE, inheriting
3013 lexical block and line number information from base_stmt, if given,
3014 or from the last stmt of the block otherwise. */
3015
3016 static gimple *
insert_init_debug_bind(copy_body_data * id,basic_block bb,tree var,tree value,gimple * base_stmt)3017 insert_init_debug_bind (copy_body_data *id,
3018 basic_block bb, tree var, tree value,
3019 gimple *base_stmt)
3020 {
3021 gimple *note;
3022 gimple_stmt_iterator gsi;
3023 tree tracked_var;
3024
3025 if (!gimple_in_ssa_p (id->src_cfun))
3026 return NULL;
3027
3028 if (!opt_for_fn (id->dst_fn, flag_var_tracking_assignments))
3029 return NULL;
3030
3031 tracked_var = target_for_debug_bind (var);
3032 if (!tracked_var)
3033 return NULL;
3034
3035 if (bb)
3036 {
3037 gsi = gsi_last_bb (bb);
3038 if (!base_stmt && !gsi_end_p (gsi))
3039 base_stmt = gsi_stmt (gsi);
3040 }
3041
3042 note = gimple_build_debug_bind (tracked_var, unshare_expr (value), base_stmt);
3043
3044 if (bb)
3045 {
3046 if (!gsi_end_p (gsi))
3047 gsi_insert_after (&gsi, note, GSI_SAME_STMT);
3048 else
3049 gsi_insert_before (&gsi, note, GSI_SAME_STMT);
3050 }
3051
3052 return note;
3053 }
3054
3055 static void
insert_init_stmt(copy_body_data * id,basic_block bb,gimple * init_stmt)3056 insert_init_stmt (copy_body_data *id, basic_block bb, gimple *init_stmt)
3057 {
3058 /* If VAR represents a zero-sized variable, it's possible that the
3059 assignment statement may result in no gimple statements. */
3060 if (init_stmt)
3061 {
3062 gimple_stmt_iterator si = gsi_last_bb (bb);
3063
3064 /* We can end up with init statements that store to a non-register
3065 from a rhs with a conversion. Handle that here by forcing the
3066 rhs into a temporary. gimple_regimplify_operands is not
3067 prepared to do this for us. */
3068 if (!is_gimple_debug (init_stmt)
3069 && !is_gimple_reg (gimple_assign_lhs (init_stmt))
3070 && is_gimple_reg_type (TREE_TYPE (gimple_assign_lhs (init_stmt)))
3071 && gimple_assign_rhs_class (init_stmt) == GIMPLE_UNARY_RHS)
3072 {
3073 tree rhs = build1 (gimple_assign_rhs_code (init_stmt),
3074 gimple_expr_type (init_stmt),
3075 gimple_assign_rhs1 (init_stmt));
3076 rhs = force_gimple_operand_gsi (&si, rhs, true, NULL_TREE, false,
3077 GSI_NEW_STMT);
3078 gimple_assign_set_rhs_code (init_stmt, TREE_CODE (rhs));
3079 gimple_assign_set_rhs1 (init_stmt, rhs);
3080 }
3081 gsi_insert_after (&si, init_stmt, GSI_NEW_STMT);
3082 gimple_regimplify_operands (init_stmt, &si);
3083
3084 if (!is_gimple_debug (init_stmt))
3085 {
3086 tree def = gimple_assign_lhs (init_stmt);
3087 insert_init_debug_bind (id, bb, def, def, init_stmt);
3088 }
3089 }
3090 }
3091
3092 /* Initialize parameter P with VALUE. If needed, produce init statement
3093 at the end of BB. When BB is NULL, we return init statement to be
3094 output later. */
3095 static gimple *
setup_one_parameter(copy_body_data * id,tree p,tree value,tree fn,basic_block bb,tree * vars)3096 setup_one_parameter (copy_body_data *id, tree p, tree value, tree fn,
3097 basic_block bb, tree *vars)
3098 {
3099 gimple *init_stmt = NULL;
3100 tree var;
3101 tree rhs = value;
3102 tree def = (gimple_in_ssa_p (cfun)
3103 ? ssa_default_def (id->src_cfun, p) : NULL);
3104
3105 if (value
3106 && value != error_mark_node
3107 && !useless_type_conversion_p (TREE_TYPE (p), TREE_TYPE (value)))
3108 {
3109 /* If we can match up types by promotion/demotion do so. */
3110 if (fold_convertible_p (TREE_TYPE (p), value))
3111 rhs = fold_convert (TREE_TYPE (p), value);
3112 else
3113 {
3114 /* ??? For valid programs we should not end up here.
3115 Still if we end up with truly mismatched types here, fall back
3116 to using a VIEW_CONVERT_EXPR or a literal zero to not leak invalid
3117 GIMPLE to the following passes. */
3118 if (!is_gimple_reg_type (TREE_TYPE (value))
3119 || TYPE_SIZE (TREE_TYPE (p)) == TYPE_SIZE (TREE_TYPE (value)))
3120 rhs = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (p), value);
3121 else
3122 rhs = build_zero_cst (TREE_TYPE (p));
3123 }
3124 }
3125
3126 /* Make an equivalent VAR_DECL. Note that we must NOT remap the type
3127 here since the type of this decl must be visible to the calling
3128 function. */
3129 var = copy_decl_to_var (p, id);
3130
3131 /* Declare this new variable. */
3132 DECL_CHAIN (var) = *vars;
3133 *vars = var;
3134
3135 /* Make gimplifier happy about this variable. */
3136 DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
3137
3138 /* If the parameter is never assigned to, has no SSA_NAMEs created,
3139 we would not need to create a new variable here at all, if it
3140 weren't for debug info. Still, we can just use the argument
3141 value. */
3142 if (TREE_READONLY (p)
3143 && !TREE_ADDRESSABLE (p)
3144 && value && !TREE_SIDE_EFFECTS (value)
3145 && !def)
3146 {
3147 /* We may produce non-gimple trees by adding NOPs or introduce
3148 invalid sharing when operand is not really constant.
3149 It is not big deal to prohibit constant propagation here as
3150 we will constant propagate in DOM1 pass anyway. */
3151 if (is_gimple_min_invariant (value)
3152 && useless_type_conversion_p (TREE_TYPE (p),
3153 TREE_TYPE (value))
3154 /* We have to be very careful about ADDR_EXPR. Make sure
3155 the base variable isn't a local variable of the inlined
3156 function, e.g., when doing recursive inlining, direct or
3157 mutually-recursive or whatever, which is why we don't
3158 just test whether fn == current_function_decl. */
3159 && ! self_inlining_addr_expr (value, fn))
3160 {
3161 insert_decl_map (id, p, value);
3162 insert_debug_decl_map (id, p, var);
3163 return insert_init_debug_bind (id, bb, var, value, NULL);
3164 }
3165 }
3166
3167 /* Register the VAR_DECL as the equivalent for the PARM_DECL;
3168 that way, when the PARM_DECL is encountered, it will be
3169 automatically replaced by the VAR_DECL. */
3170 insert_decl_map (id, p, var);
3171
3172 /* Even if P was TREE_READONLY, the new VAR should not be.
3173 In the original code, we would have constructed a
3174 temporary, and then the function body would have never
3175 changed the value of P. However, now, we will be
3176 constructing VAR directly. The constructor body may
3177 change its value multiple times as it is being
3178 constructed. Therefore, it must not be TREE_READONLY;
3179 the back-end assumes that TREE_READONLY variable is
3180 assigned to only once. */
3181 if (TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (p)))
3182 TREE_READONLY (var) = 0;
3183
3184 /* If there is no setup required and we are in SSA, take the easy route
3185 replacing all SSA names representing the function parameter by the
3186 SSA name passed to function.
3187
3188 We need to construct map for the variable anyway as it might be used
3189 in different SSA names when parameter is set in function.
3190
3191 Do replacement at -O0 for const arguments replaced by constant.
3192 This is important for builtin_constant_p and other construct requiring
3193 constant argument to be visible in inlined function body. */
3194 if (gimple_in_ssa_p (cfun) && rhs && def && is_gimple_reg (p)
3195 && (optimize
3196 || (TREE_READONLY (p)
3197 && is_gimple_min_invariant (rhs)))
3198 && (TREE_CODE (rhs) == SSA_NAME
3199 || is_gimple_min_invariant (rhs))
3200 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def))
3201 {
3202 insert_decl_map (id, def, rhs);
3203 return insert_init_debug_bind (id, bb, var, rhs, NULL);
3204 }
3205
3206 /* If the value of argument is never used, don't care about initializing
3207 it. */
3208 if (optimize && gimple_in_ssa_p (cfun) && !def && is_gimple_reg (p))
3209 {
3210 gcc_assert (!value || !TREE_SIDE_EFFECTS (value));
3211 return insert_init_debug_bind (id, bb, var, rhs, NULL);
3212 }
3213
3214 /* Initialize this VAR_DECL from the equivalent argument. Convert
3215 the argument to the proper type in case it was promoted. */
3216 if (value)
3217 {
3218 if (rhs == error_mark_node)
3219 {
3220 insert_decl_map (id, p, var);
3221 return insert_init_debug_bind (id, bb, var, rhs, NULL);
3222 }
3223
3224 STRIP_USELESS_TYPE_CONVERSION (rhs);
3225
3226 /* If we are in SSA form properly remap the default definition
3227 or assign to a dummy SSA name if the parameter is unused and
3228 we are not optimizing. */
3229 if (gimple_in_ssa_p (cfun) && is_gimple_reg (p))
3230 {
3231 if (def)
3232 {
3233 def = remap_ssa_name (def, id);
3234 init_stmt = gimple_build_assign (def, rhs);
3235 SSA_NAME_IS_DEFAULT_DEF (def) = 0;
3236 set_ssa_default_def (cfun, var, NULL);
3237 }
3238 else if (!optimize)
3239 {
3240 def = make_ssa_name (var);
3241 init_stmt = gimple_build_assign (def, rhs);
3242 }
3243 }
3244 else
3245 init_stmt = gimple_build_assign (var, rhs);
3246
3247 if (bb && init_stmt)
3248 insert_init_stmt (id, bb, init_stmt);
3249 }
3250 return init_stmt;
3251 }
3252
3253 /* Generate code to initialize the parameters of the function at the
3254 top of the stack in ID from the GIMPLE_CALL STMT. */
3255
3256 static void
initialize_inlined_parameters(copy_body_data * id,gimple * stmt,tree fn,basic_block bb)3257 initialize_inlined_parameters (copy_body_data *id, gimple *stmt,
3258 tree fn, basic_block bb)
3259 {
3260 tree parms;
3261 size_t i;
3262 tree p;
3263 tree vars = NULL_TREE;
3264 tree static_chain = gimple_call_chain (stmt);
3265
3266 /* Figure out what the parameters are. */
3267 parms = DECL_ARGUMENTS (fn);
3268
3269 /* Loop through the parameter declarations, replacing each with an
3270 equivalent VAR_DECL, appropriately initialized. */
3271 for (p = parms, i = 0; p; p = DECL_CHAIN (p), i++)
3272 {
3273 tree val;
3274 val = i < gimple_call_num_args (stmt) ? gimple_call_arg (stmt, i) : NULL;
3275 setup_one_parameter (id, p, val, fn, bb, &vars);
3276 }
3277 /* After remapping parameters remap their types. This has to be done
3278 in a second loop over all parameters to appropriately remap
3279 variable sized arrays when the size is specified in a
3280 parameter following the array. */
3281 for (p = parms, i = 0; p; p = DECL_CHAIN (p), i++)
3282 {
3283 tree *varp = id->decl_map->get (p);
3284 if (varp && VAR_P (*varp))
3285 {
3286 tree def = (gimple_in_ssa_p (cfun) && is_gimple_reg (p)
3287 ? ssa_default_def (id->src_cfun, p) : NULL);
3288 tree var = *varp;
3289 TREE_TYPE (var) = remap_type (TREE_TYPE (var), id);
3290 /* Also remap the default definition if it was remapped
3291 to the default definition of the parameter replacement
3292 by the parameter setup. */
3293 if (def)
3294 {
3295 tree *defp = id->decl_map->get (def);
3296 if (defp
3297 && TREE_CODE (*defp) == SSA_NAME
3298 && SSA_NAME_VAR (*defp) == var)
3299 TREE_TYPE (*defp) = TREE_TYPE (var);
3300 }
3301 }
3302 }
3303
3304 /* Initialize the static chain. */
3305 p = DECL_STRUCT_FUNCTION (fn)->static_chain_decl;
3306 gcc_assert (fn != current_function_decl);
3307 if (p)
3308 {
3309 /* No static chain? Seems like a bug in tree-nested.c. */
3310 gcc_assert (static_chain);
3311
3312 setup_one_parameter (id, p, static_chain, fn, bb, &vars);
3313 }
3314
3315 declare_inline_vars (id->block, vars);
3316 }
3317
3318
3319 /* Declare a return variable to replace the RESULT_DECL for the
3320 function we are calling. An appropriate DECL_STMT is returned.
3321 The USE_STMT is filled to contain a use of the declaration to
3322 indicate the return value of the function.
3323
3324 RETURN_SLOT, if non-null is place where to store the result. It
3325 is set only for CALL_EXPR_RETURN_SLOT_OPT. MODIFY_DEST, if non-null,
3326 was the LHS of the MODIFY_EXPR to which this call is the RHS.
3327
3328 RETURN_BOUNDS holds a destination for returned bounds.
3329
3330 The return value is a (possibly null) value that holds the result
3331 as seen by the caller. */
3332
3333 static tree
declare_return_variable(copy_body_data * id,tree return_slot,tree modify_dest,tree return_bounds,basic_block entry_bb)3334 declare_return_variable (copy_body_data *id, tree return_slot, tree modify_dest,
3335 tree return_bounds, basic_block entry_bb)
3336 {
3337 tree callee = id->src_fn;
3338 tree result = DECL_RESULT (callee);
3339 tree callee_type = TREE_TYPE (result);
3340 tree caller_type;
3341 tree var, use;
3342
3343 /* Handle type-mismatches in the function declaration return type
3344 vs. the call expression. */
3345 if (modify_dest)
3346 caller_type = TREE_TYPE (modify_dest);
3347 else
3348 caller_type = TREE_TYPE (TREE_TYPE (callee));
3349
3350 /* We don't need to do anything for functions that don't return anything. */
3351 if (VOID_TYPE_P (callee_type))
3352 return NULL_TREE;
3353
3354 /* If there was a return slot, then the return value is the
3355 dereferenced address of that object. */
3356 if (return_slot)
3357 {
3358 /* The front end shouldn't have used both return_slot and
3359 a modify expression. */
3360 gcc_assert (!modify_dest);
3361 if (DECL_BY_REFERENCE (result))
3362 {
3363 tree return_slot_addr = build_fold_addr_expr (return_slot);
3364 STRIP_USELESS_TYPE_CONVERSION (return_slot_addr);
3365
3366 /* We are going to construct *&return_slot and we can't do that
3367 for variables believed to be not addressable.
3368
3369 FIXME: This check possibly can match, because values returned
3370 via return slot optimization are not believed to have address
3371 taken by alias analysis. */
3372 gcc_assert (TREE_CODE (return_slot) != SSA_NAME);
3373 var = return_slot_addr;
3374 }
3375 else
3376 {
3377 var = return_slot;
3378 gcc_assert (TREE_CODE (var) != SSA_NAME);
3379 if (TREE_ADDRESSABLE (result))
3380 mark_addressable (var);
3381 }
3382 if ((TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE
3383 || TREE_CODE (TREE_TYPE (result)) == VECTOR_TYPE)
3384 && !DECL_GIMPLE_REG_P (result)
3385 && DECL_P (var))
3386 DECL_GIMPLE_REG_P (var) = 0;
3387 use = NULL;
3388 goto done;
3389 }
3390
3391 /* All types requiring non-trivial constructors should have been handled. */
3392 gcc_assert (!TREE_ADDRESSABLE (callee_type));
3393
3394 /* Attempt to avoid creating a new temporary variable. */
3395 if (modify_dest
3396 && TREE_CODE (modify_dest) != SSA_NAME)
3397 {
3398 bool use_it = false;
3399
3400 /* We can't use MODIFY_DEST if there's type promotion involved. */
3401 if (!useless_type_conversion_p (callee_type, caller_type))
3402 use_it = false;
3403
3404 /* ??? If we're assigning to a variable sized type, then we must
3405 reuse the destination variable, because we've no good way to
3406 create variable sized temporaries at this point. */
3407 else if (TREE_CODE (TYPE_SIZE_UNIT (caller_type)) != INTEGER_CST)
3408 use_it = true;
3409
3410 /* If the callee cannot possibly modify MODIFY_DEST, then we can
3411 reuse it as the result of the call directly. Don't do this if
3412 it would promote MODIFY_DEST to addressable. */
3413 else if (TREE_ADDRESSABLE (result))
3414 use_it = false;
3415 else
3416 {
3417 tree base_m = get_base_address (modify_dest);
3418
3419 /* If the base isn't a decl, then it's a pointer, and we don't
3420 know where that's going to go. */
3421 if (!DECL_P (base_m))
3422 use_it = false;
3423 else if (is_global_var (base_m))
3424 use_it = false;
3425 else if ((TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE
3426 || TREE_CODE (TREE_TYPE (result)) == VECTOR_TYPE)
3427 && !DECL_GIMPLE_REG_P (result)
3428 && DECL_GIMPLE_REG_P (base_m))
3429 use_it = false;
3430 else if (!TREE_ADDRESSABLE (base_m))
3431 use_it = true;
3432 }
3433
3434 if (use_it)
3435 {
3436 var = modify_dest;
3437 use = NULL;
3438 goto done;
3439 }
3440 }
3441
3442 gcc_assert (TREE_CODE (TYPE_SIZE_UNIT (callee_type)) == INTEGER_CST);
3443
3444 var = copy_result_decl_to_var (result, id);
3445 DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
3446
3447 /* Do not have the rest of GCC warn about this variable as it should
3448 not be visible to the user. */
3449 TREE_NO_WARNING (var) = 1;
3450
3451 declare_inline_vars (id->block, var);
3452
3453 /* Build the use expr. If the return type of the function was
3454 promoted, convert it back to the expected type. */
3455 use = var;
3456 if (!useless_type_conversion_p (caller_type, TREE_TYPE (var)))
3457 {
3458 /* If we can match up types by promotion/demotion do so. */
3459 if (fold_convertible_p (caller_type, var))
3460 use = fold_convert (caller_type, var);
3461 else
3462 {
3463 /* ??? For valid programs we should not end up here.
3464 Still if we end up with truly mismatched types here, fall back
3465 to using a MEM_REF to not leak invalid GIMPLE to the following
3466 passes. */
3467 /* Prevent var from being written into SSA form. */
3468 if (TREE_CODE (TREE_TYPE (var)) == VECTOR_TYPE
3469 || TREE_CODE (TREE_TYPE (var)) == COMPLEX_TYPE)
3470 DECL_GIMPLE_REG_P (var) = false;
3471 else if (is_gimple_reg_type (TREE_TYPE (var)))
3472 TREE_ADDRESSABLE (var) = true;
3473 use = fold_build2 (MEM_REF, caller_type,
3474 build_fold_addr_expr (var),
3475 build_int_cst (ptr_type_node, 0));
3476 }
3477 }
3478
3479 STRIP_USELESS_TYPE_CONVERSION (use);
3480
3481 if (DECL_BY_REFERENCE (result))
3482 {
3483 TREE_ADDRESSABLE (var) = 1;
3484 var = build_fold_addr_expr (var);
3485 }
3486
3487 done:
3488 /* Register the VAR_DECL as the equivalent for the RESULT_DECL; that
3489 way, when the RESULT_DECL is encountered, it will be
3490 automatically replaced by the VAR_DECL.
3491
3492 When returning by reference, ensure that RESULT_DECL remaps to
3493 gimple_val. */
3494 if (DECL_BY_REFERENCE (result)
3495 && !is_gimple_val (var))
3496 {
3497 tree temp = create_tmp_var (TREE_TYPE (result), "retvalptr");
3498 insert_decl_map (id, result, temp);
3499 /* When RESULT_DECL is in SSA form, we need to remap and initialize
3500 it's default_def SSA_NAME. */
3501 if (gimple_in_ssa_p (id->src_cfun)
3502 && is_gimple_reg (result))
3503 {
3504 temp = make_ssa_name (temp);
3505 insert_decl_map (id, ssa_default_def (id->src_cfun, result), temp);
3506 }
3507 insert_init_stmt (id, entry_bb, gimple_build_assign (temp, var));
3508 }
3509 else
3510 insert_decl_map (id, result, var);
3511
3512 /* Remember this so we can ignore it in remap_decls. */
3513 id->retvar = var;
3514
3515 /* If returned bounds are used, then make var for them. */
3516 if (return_bounds)
3517 {
3518 tree bndtemp = create_tmp_var (pointer_bounds_type_node, "retbnd");
3519 DECL_SEEN_IN_BIND_EXPR_P (bndtemp) = 1;
3520 TREE_NO_WARNING (bndtemp) = 1;
3521 declare_inline_vars (id->block, bndtemp);
3522
3523 id->retbnd = bndtemp;
3524 insert_init_stmt (id, entry_bb,
3525 gimple_build_assign (bndtemp, chkp_get_zero_bounds_var ()));
3526 }
3527
3528 return use;
3529 }
3530
3531 /* Determine if the function can be copied. If so return NULL. If
3532 not return a string describng the reason for failure. */
3533
3534 const char *
copy_forbidden(struct function * fun)3535 copy_forbidden (struct function *fun)
3536 {
3537 const char *reason = fun->cannot_be_copied_reason;
3538
3539 /* Only examine the function once. */
3540 if (fun->cannot_be_copied_set)
3541 return reason;
3542
3543 /* We cannot copy a function that receives a non-local goto
3544 because we cannot remap the destination label used in the
3545 function that is performing the non-local goto. */
3546 /* ??? Actually, this should be possible, if we work at it.
3547 No doubt there's just a handful of places that simply
3548 assume it doesn't happen and don't substitute properly. */
3549 if (fun->has_nonlocal_label)
3550 {
3551 reason = G_("function %q+F can never be copied "
3552 "because it receives a non-local goto");
3553 goto fail;
3554 }
3555
3556 if (fun->has_forced_label_in_static)
3557 {
3558 reason = G_("function %q+F can never be copied because it saves "
3559 "address of local label in a static variable");
3560 goto fail;
3561 }
3562
3563 fail:
3564 fun->cannot_be_copied_reason = reason;
3565 fun->cannot_be_copied_set = true;
3566 return reason;
3567 }
3568
3569
3570 static const char *inline_forbidden_reason;
3571
3572 /* A callback for walk_gimple_seq to handle statements. Returns non-null
3573 iff a function can not be inlined. Also sets the reason why. */
3574
3575 static tree
inline_forbidden_p_stmt(gimple_stmt_iterator * gsi,bool * handled_ops_p,struct walk_stmt_info * wip)3576 inline_forbidden_p_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
3577 struct walk_stmt_info *wip)
3578 {
3579 tree fn = (tree) wip->info;
3580 tree t;
3581 gimple *stmt = gsi_stmt (*gsi);
3582
3583 switch (gimple_code (stmt))
3584 {
3585 case GIMPLE_CALL:
3586 /* Refuse to inline alloca call unless user explicitly forced so as
3587 this may change program's memory overhead drastically when the
3588 function using alloca is called in loop. In GCC present in
3589 SPEC2000 inlining into schedule_block cause it to require 2GB of
3590 RAM instead of 256MB. Don't do so for alloca calls emitted for
3591 VLA objects as those can't cause unbounded growth (they're always
3592 wrapped inside stack_save/stack_restore regions. */
3593 if (gimple_maybe_alloca_call_p (stmt)
3594 && !gimple_call_alloca_for_var_p (as_a <gcall *> (stmt))
3595 && !lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn)))
3596 {
3597 inline_forbidden_reason
3598 = G_("function %q+F can never be inlined because it uses "
3599 "alloca (override using the always_inline attribute)");
3600 *handled_ops_p = true;
3601 return fn;
3602 }
3603
3604 t = gimple_call_fndecl (stmt);
3605 if (t == NULL_TREE)
3606 break;
3607
3608 /* We cannot inline functions that call setjmp. */
3609 if (setjmp_call_p (t))
3610 {
3611 inline_forbidden_reason
3612 = G_("function %q+F can never be inlined because it uses setjmp");
3613 *handled_ops_p = true;
3614 return t;
3615 }
3616
3617 if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL)
3618 switch (DECL_FUNCTION_CODE (t))
3619 {
3620 /* We cannot inline functions that take a variable number of
3621 arguments. */
3622 case BUILT_IN_VA_START:
3623 case BUILT_IN_NEXT_ARG:
3624 case BUILT_IN_VA_END:
3625 inline_forbidden_reason
3626 = G_("function %q+F can never be inlined because it "
3627 "uses variable argument lists");
3628 *handled_ops_p = true;
3629 return t;
3630
3631 case BUILT_IN_LONGJMP:
3632 /* We can't inline functions that call __builtin_longjmp at
3633 all. The non-local goto machinery really requires the
3634 destination be in a different function. If we allow the
3635 function calling __builtin_longjmp to be inlined into the
3636 function calling __builtin_setjmp, Things will Go Awry. */
3637 inline_forbidden_reason
3638 = G_("function %q+F can never be inlined because "
3639 "it uses setjmp-longjmp exception handling");
3640 *handled_ops_p = true;
3641 return t;
3642
3643 case BUILT_IN_NONLOCAL_GOTO:
3644 /* Similarly. */
3645 inline_forbidden_reason
3646 = G_("function %q+F can never be inlined because "
3647 "it uses non-local goto");
3648 *handled_ops_p = true;
3649 return t;
3650
3651 case BUILT_IN_RETURN:
3652 case BUILT_IN_APPLY_ARGS:
3653 /* If a __builtin_apply_args caller would be inlined,
3654 it would be saving arguments of the function it has
3655 been inlined into. Similarly __builtin_return would
3656 return from the function the inline has been inlined into. */
3657 inline_forbidden_reason
3658 = G_("function %q+F can never be inlined because "
3659 "it uses __builtin_return or __builtin_apply_args");
3660 *handled_ops_p = true;
3661 return t;
3662
3663 default:
3664 break;
3665 }
3666 break;
3667
3668 case GIMPLE_GOTO:
3669 t = gimple_goto_dest (stmt);
3670
3671 /* We will not inline a function which uses computed goto. The
3672 addresses of its local labels, which may be tucked into
3673 global storage, are of course not constant across
3674 instantiations, which causes unexpected behavior. */
3675 if (TREE_CODE (t) != LABEL_DECL)
3676 {
3677 inline_forbidden_reason
3678 = G_("function %q+F can never be inlined "
3679 "because it contains a computed goto");
3680 *handled_ops_p = true;
3681 return t;
3682 }
3683 break;
3684
3685 default:
3686 break;
3687 }
3688
3689 *handled_ops_p = false;
3690 return NULL_TREE;
3691 }
3692
3693 /* Return true if FNDECL is a function that cannot be inlined into
3694 another one. */
3695
3696 static bool
inline_forbidden_p(tree fndecl)3697 inline_forbidden_p (tree fndecl)
3698 {
3699 struct function *fun = DECL_STRUCT_FUNCTION (fndecl);
3700 struct walk_stmt_info wi;
3701 basic_block bb;
3702 bool forbidden_p = false;
3703
3704 /* First check for shared reasons not to copy the code. */
3705 inline_forbidden_reason = copy_forbidden (fun);
3706 if (inline_forbidden_reason != NULL)
3707 return true;
3708
3709 /* Next, walk the statements of the function looking for
3710 constraucts we can't handle, or are non-optimal for inlining. */
3711 hash_set<tree> visited_nodes;
3712 memset (&wi, 0, sizeof (wi));
3713 wi.info = (void *) fndecl;
3714 wi.pset = &visited_nodes;
3715
3716 FOR_EACH_BB_FN (bb, fun)
3717 {
3718 gimple *ret;
3719 gimple_seq seq = bb_seq (bb);
3720 ret = walk_gimple_seq (seq, inline_forbidden_p_stmt, NULL, &wi);
3721 forbidden_p = (ret != NULL);
3722 if (forbidden_p)
3723 break;
3724 }
3725
3726 return forbidden_p;
3727 }
3728
3729 /* Return false if the function FNDECL cannot be inlined on account of its
3730 attributes, true otherwise. */
3731 static bool
function_attribute_inlinable_p(const_tree fndecl)3732 function_attribute_inlinable_p (const_tree fndecl)
3733 {
3734 if (targetm.attribute_table)
3735 {
3736 const_tree a;
3737
3738 for (a = DECL_ATTRIBUTES (fndecl); a; a = TREE_CHAIN (a))
3739 {
3740 const_tree name = TREE_PURPOSE (a);
3741 int i;
3742
3743 for (i = 0; targetm.attribute_table[i].name != NULL; i++)
3744 if (is_attribute_p (targetm.attribute_table[i].name, name))
3745 return targetm.function_attribute_inlinable_p (fndecl);
3746 }
3747 }
3748
3749 return true;
3750 }
3751
3752 /* Returns nonzero if FN is a function that does not have any
3753 fundamental inline blocking properties. */
3754
3755 bool
tree_inlinable_function_p(tree fn)3756 tree_inlinable_function_p (tree fn)
3757 {
3758 bool inlinable = true;
3759 bool do_warning;
3760 tree always_inline;
3761
3762 /* If we've already decided this function shouldn't be inlined,
3763 there's no need to check again. */
3764 if (DECL_UNINLINABLE (fn))
3765 return false;
3766
3767 /* We only warn for functions declared `inline' by the user. */
3768 do_warning = (warn_inline
3769 && DECL_DECLARED_INLINE_P (fn)
3770 && !DECL_NO_INLINE_WARNING_P (fn)
3771 && !DECL_IN_SYSTEM_HEADER (fn));
3772
3773 always_inline = lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn));
3774
3775 if (flag_no_inline
3776 && always_inline == NULL)
3777 {
3778 if (do_warning)
3779 warning (OPT_Winline, "function %q+F can never be inlined because it "
3780 "is suppressed using -fno-inline", fn);
3781 inlinable = false;
3782 }
3783
3784 else if (!function_attribute_inlinable_p (fn))
3785 {
3786 if (do_warning)
3787 warning (OPT_Winline, "function %q+F can never be inlined because it "
3788 "uses attributes conflicting with inlining", fn);
3789 inlinable = false;
3790 }
3791
3792 else if (inline_forbidden_p (fn))
3793 {
3794 /* See if we should warn about uninlinable functions. Previously,
3795 some of these warnings would be issued while trying to expand
3796 the function inline, but that would cause multiple warnings
3797 about functions that would for example call alloca. But since
3798 this a property of the function, just one warning is enough.
3799 As a bonus we can now give more details about the reason why a
3800 function is not inlinable. */
3801 if (always_inline)
3802 error (inline_forbidden_reason, fn);
3803 else if (do_warning)
3804 warning (OPT_Winline, inline_forbidden_reason, fn);
3805
3806 inlinable = false;
3807 }
3808
3809 /* Squirrel away the result so that we don't have to check again. */
3810 DECL_UNINLINABLE (fn) = !inlinable;
3811
3812 return inlinable;
3813 }
3814
3815 /* Estimate the cost of a memory move of type TYPE. Use machine dependent
3816 word size and take possible memcpy call into account and return
3817 cost based on whether optimizing for size or speed according to SPEED_P. */
3818
3819 int
estimate_move_cost(tree type,bool ARG_UNUSED (speed_p))3820 estimate_move_cost (tree type, bool ARG_UNUSED (speed_p))
3821 {
3822 HOST_WIDE_INT size;
3823
3824 gcc_assert (!VOID_TYPE_P (type));
3825
3826 if (TREE_CODE (type) == VECTOR_TYPE)
3827 {
3828 scalar_mode inner = SCALAR_TYPE_MODE (TREE_TYPE (type));
3829 machine_mode simd = targetm.vectorize.preferred_simd_mode (inner);
3830 int orig_mode_size
3831 = estimated_poly_value (GET_MODE_SIZE (TYPE_MODE (type)));
3832 int simd_mode_size = estimated_poly_value (GET_MODE_SIZE (simd));
3833 return ((orig_mode_size + simd_mode_size - 1)
3834 / simd_mode_size);
3835 }
3836
3837 size = int_size_in_bytes (type);
3838
3839 if (size < 0 || size > MOVE_MAX_PIECES * MOVE_RATIO (speed_p))
3840 /* Cost of a memcpy call, 3 arguments and the call. */
3841 return 4;
3842 else
3843 return ((size + MOVE_MAX_PIECES - 1) / MOVE_MAX_PIECES);
3844 }
3845
3846 /* Returns cost of operation CODE, according to WEIGHTS */
3847
3848 static int
estimate_operator_cost(enum tree_code code,eni_weights * weights,tree op1 ATTRIBUTE_UNUSED,tree op2)3849 estimate_operator_cost (enum tree_code code, eni_weights *weights,
3850 tree op1 ATTRIBUTE_UNUSED, tree op2)
3851 {
3852 switch (code)
3853 {
3854 /* These are "free" conversions, or their presumed cost
3855 is folded into other operations. */
3856 case RANGE_EXPR:
3857 CASE_CONVERT:
3858 case COMPLEX_EXPR:
3859 case PAREN_EXPR:
3860 case VIEW_CONVERT_EXPR:
3861 return 0;
3862
3863 /* Assign cost of 1 to usual operations.
3864 ??? We may consider mapping RTL costs to this. */
3865 case COND_EXPR:
3866 case VEC_COND_EXPR:
3867 case VEC_PERM_EXPR:
3868
3869 case PLUS_EXPR:
3870 case POINTER_PLUS_EXPR:
3871 case POINTER_DIFF_EXPR:
3872 case MINUS_EXPR:
3873 case MULT_EXPR:
3874 case MULT_HIGHPART_EXPR:
3875 case FMA_EXPR:
3876
3877 case ADDR_SPACE_CONVERT_EXPR:
3878 case FIXED_CONVERT_EXPR:
3879 case FIX_TRUNC_EXPR:
3880
3881 case NEGATE_EXPR:
3882 case FLOAT_EXPR:
3883 case MIN_EXPR:
3884 case MAX_EXPR:
3885 case ABS_EXPR:
3886
3887 case LSHIFT_EXPR:
3888 case RSHIFT_EXPR:
3889 case LROTATE_EXPR:
3890 case RROTATE_EXPR:
3891
3892 case BIT_IOR_EXPR:
3893 case BIT_XOR_EXPR:
3894 case BIT_AND_EXPR:
3895 case BIT_NOT_EXPR:
3896
3897 case TRUTH_ANDIF_EXPR:
3898 case TRUTH_ORIF_EXPR:
3899 case TRUTH_AND_EXPR:
3900 case TRUTH_OR_EXPR:
3901 case TRUTH_XOR_EXPR:
3902 case TRUTH_NOT_EXPR:
3903
3904 case LT_EXPR:
3905 case LE_EXPR:
3906 case GT_EXPR:
3907 case GE_EXPR:
3908 case EQ_EXPR:
3909 case NE_EXPR:
3910 case ORDERED_EXPR:
3911 case UNORDERED_EXPR:
3912
3913 case UNLT_EXPR:
3914 case UNLE_EXPR:
3915 case UNGT_EXPR:
3916 case UNGE_EXPR:
3917 case UNEQ_EXPR:
3918 case LTGT_EXPR:
3919
3920 case CONJ_EXPR:
3921
3922 case PREDECREMENT_EXPR:
3923 case PREINCREMENT_EXPR:
3924 case POSTDECREMENT_EXPR:
3925 case POSTINCREMENT_EXPR:
3926
3927 case REALIGN_LOAD_EXPR:
3928
3929 case WIDEN_SUM_EXPR:
3930 case WIDEN_MULT_EXPR:
3931 case DOT_PROD_EXPR:
3932 case SAD_EXPR:
3933 case WIDEN_MULT_PLUS_EXPR:
3934 case WIDEN_MULT_MINUS_EXPR:
3935 case WIDEN_LSHIFT_EXPR:
3936
3937 case VEC_WIDEN_MULT_HI_EXPR:
3938 case VEC_WIDEN_MULT_LO_EXPR:
3939 case VEC_WIDEN_MULT_EVEN_EXPR:
3940 case VEC_WIDEN_MULT_ODD_EXPR:
3941 case VEC_UNPACK_HI_EXPR:
3942 case VEC_UNPACK_LO_EXPR:
3943 case VEC_UNPACK_FLOAT_HI_EXPR:
3944 case VEC_UNPACK_FLOAT_LO_EXPR:
3945 case VEC_PACK_TRUNC_EXPR:
3946 case VEC_PACK_SAT_EXPR:
3947 case VEC_PACK_FIX_TRUNC_EXPR:
3948 case VEC_WIDEN_LSHIFT_HI_EXPR:
3949 case VEC_WIDEN_LSHIFT_LO_EXPR:
3950 case VEC_DUPLICATE_EXPR:
3951 case VEC_SERIES_EXPR:
3952
3953 return 1;
3954
3955 /* Few special cases of expensive operations. This is useful
3956 to avoid inlining on functions having too many of these. */
3957 case TRUNC_DIV_EXPR:
3958 case CEIL_DIV_EXPR:
3959 case FLOOR_DIV_EXPR:
3960 case ROUND_DIV_EXPR:
3961 case EXACT_DIV_EXPR:
3962 case TRUNC_MOD_EXPR:
3963 case CEIL_MOD_EXPR:
3964 case FLOOR_MOD_EXPR:
3965 case ROUND_MOD_EXPR:
3966 case RDIV_EXPR:
3967 if (TREE_CODE (op2) != INTEGER_CST)
3968 return weights->div_mod_cost;
3969 return 1;
3970
3971 /* Bit-field insertion needs several shift and mask operations. */
3972 case BIT_INSERT_EXPR:
3973 return 3;
3974
3975 default:
3976 /* We expect a copy assignment with no operator. */
3977 gcc_assert (get_gimple_rhs_class (code) == GIMPLE_SINGLE_RHS);
3978 return 0;
3979 }
3980 }
3981
3982
3983 /* Estimate number of instructions that will be created by expanding
3984 the statements in the statement sequence STMTS.
3985 WEIGHTS contains weights attributed to various constructs. */
3986
3987 int
estimate_num_insns_seq(gimple_seq stmts,eni_weights * weights)3988 estimate_num_insns_seq (gimple_seq stmts, eni_weights *weights)
3989 {
3990 int cost;
3991 gimple_stmt_iterator gsi;
3992
3993 cost = 0;
3994 for (gsi = gsi_start (stmts); !gsi_end_p (gsi); gsi_next (&gsi))
3995 cost += estimate_num_insns (gsi_stmt (gsi), weights);
3996
3997 return cost;
3998 }
3999
4000
4001 /* Estimate number of instructions that will be created by expanding STMT.
4002 WEIGHTS contains weights attributed to various constructs. */
4003
4004 int
estimate_num_insns(gimple * stmt,eni_weights * weights)4005 estimate_num_insns (gimple *stmt, eni_weights *weights)
4006 {
4007 unsigned cost, i;
4008 enum gimple_code code = gimple_code (stmt);
4009 tree lhs;
4010 tree rhs;
4011
4012 switch (code)
4013 {
4014 case GIMPLE_ASSIGN:
4015 /* Try to estimate the cost of assignments. We have three cases to
4016 deal with:
4017 1) Simple assignments to registers;
4018 2) Stores to things that must live in memory. This includes
4019 "normal" stores to scalars, but also assignments of large
4020 structures, or constructors of big arrays;
4021
4022 Let us look at the first two cases, assuming we have "a = b + C":
4023 <GIMPLE_ASSIGN <var_decl "a">
4024 <plus_expr <var_decl "b"> <constant C>>
4025 If "a" is a GIMPLE register, the assignment to it is free on almost
4026 any target, because "a" usually ends up in a real register. Hence
4027 the only cost of this expression comes from the PLUS_EXPR, and we
4028 can ignore the GIMPLE_ASSIGN.
4029 If "a" is not a GIMPLE register, the assignment to "a" will most
4030 likely be a real store, so the cost of the GIMPLE_ASSIGN is the cost
4031 of moving something into "a", which we compute using the function
4032 estimate_move_cost. */
4033 if (gimple_clobber_p (stmt))
4034 return 0; /* ={v} {CLOBBER} stmt expands to nothing. */
4035
4036 lhs = gimple_assign_lhs (stmt);
4037 rhs = gimple_assign_rhs1 (stmt);
4038
4039 cost = 0;
4040
4041 /* Account for the cost of moving to / from memory. */
4042 if (gimple_store_p (stmt))
4043 cost += estimate_move_cost (TREE_TYPE (lhs), weights->time_based);
4044 if (gimple_assign_load_p (stmt))
4045 cost += estimate_move_cost (TREE_TYPE (rhs), weights->time_based);
4046
4047 cost += estimate_operator_cost (gimple_assign_rhs_code (stmt), weights,
4048 gimple_assign_rhs1 (stmt),
4049 get_gimple_rhs_class (gimple_assign_rhs_code (stmt))
4050 == GIMPLE_BINARY_RHS
4051 ? gimple_assign_rhs2 (stmt) : NULL);
4052 break;
4053
4054 case GIMPLE_COND:
4055 cost = 1 + estimate_operator_cost (gimple_cond_code (stmt), weights,
4056 gimple_op (stmt, 0),
4057 gimple_op (stmt, 1));
4058 break;
4059
4060 case GIMPLE_SWITCH:
4061 {
4062 gswitch *switch_stmt = as_a <gswitch *> (stmt);
4063 /* Take into account cost of the switch + guess 2 conditional jumps for
4064 each case label.
4065
4066 TODO: once the switch expansion logic is sufficiently separated, we can
4067 do better job on estimating cost of the switch. */
4068 if (weights->time_based)
4069 cost = floor_log2 (gimple_switch_num_labels (switch_stmt)) * 2;
4070 else
4071 cost = gimple_switch_num_labels (switch_stmt) * 2;
4072 }
4073 break;
4074
4075 case GIMPLE_CALL:
4076 {
4077 tree decl;
4078
4079 if (gimple_call_internal_p (stmt))
4080 return 0;
4081 else if ((decl = gimple_call_fndecl (stmt))
4082 && DECL_BUILT_IN (decl))
4083 {
4084 /* Do not special case builtins where we see the body.
4085 This just confuse inliner. */
4086 struct cgraph_node *node;
4087 if (!(node = cgraph_node::get (decl))
4088 || node->definition)
4089 ;
4090 /* For buitins that are likely expanded to nothing or
4091 inlined do not account operand costs. */
4092 else if (is_simple_builtin (decl))
4093 return 0;
4094 else if (is_inexpensive_builtin (decl))
4095 return weights->target_builtin_call_cost;
4096 else if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
4097 {
4098 /* We canonicalize x * x to pow (x, 2.0) with -ffast-math, so
4099 specialize the cheap expansion we do here.
4100 ??? This asks for a more general solution. */
4101 switch (DECL_FUNCTION_CODE (decl))
4102 {
4103 case BUILT_IN_POW:
4104 case BUILT_IN_POWF:
4105 case BUILT_IN_POWL:
4106 if (TREE_CODE (gimple_call_arg (stmt, 1)) == REAL_CST
4107 && (real_equal
4108 (&TREE_REAL_CST (gimple_call_arg (stmt, 1)),
4109 &dconst2)))
4110 return estimate_operator_cost
4111 (MULT_EXPR, weights, gimple_call_arg (stmt, 0),
4112 gimple_call_arg (stmt, 0));
4113 break;
4114
4115 default:
4116 break;
4117 }
4118 }
4119 }
4120
4121 cost = decl ? weights->call_cost : weights->indirect_call_cost;
4122 if (gimple_call_lhs (stmt))
4123 cost += estimate_move_cost (TREE_TYPE (gimple_call_lhs (stmt)),
4124 weights->time_based);
4125 for (i = 0; i < gimple_call_num_args (stmt); i++)
4126 {
4127 tree arg = gimple_call_arg (stmt, i);
4128 cost += estimate_move_cost (TREE_TYPE (arg),
4129 weights->time_based);
4130 }
4131 break;
4132 }
4133
4134 case GIMPLE_RETURN:
4135 return weights->return_cost;
4136
4137 case GIMPLE_GOTO:
4138 case GIMPLE_LABEL:
4139 case GIMPLE_NOP:
4140 case GIMPLE_PHI:
4141 case GIMPLE_PREDICT:
4142 case GIMPLE_DEBUG:
4143 return 0;
4144
4145 case GIMPLE_ASM:
4146 {
4147 int count = asm_str_count (gimple_asm_string (as_a <gasm *> (stmt)));
4148 /* 1000 means infinity. This avoids overflows later
4149 with very long asm statements. */
4150 if (count > 1000)
4151 count = 1000;
4152 /* If this asm is asm inline, count anything as minimum size. */
4153 if (gimple_asm_inline_p (as_a <gasm *> (stmt)))
4154 count = MIN (1, count);
4155 return MAX (1, count);
4156 }
4157
4158 case GIMPLE_RESX:
4159 /* This is either going to be an external function call with one
4160 argument, or two register copy statements plus a goto. */
4161 return 2;
4162
4163 case GIMPLE_EH_DISPATCH:
4164 /* ??? This is going to turn into a switch statement. Ideally
4165 we'd have a look at the eh region and estimate the number of
4166 edges involved. */
4167 return 10;
4168
4169 case GIMPLE_BIND:
4170 return estimate_num_insns_seq (
4171 gimple_bind_body (as_a <gbind *> (stmt)),
4172 weights);
4173
4174 case GIMPLE_EH_FILTER:
4175 return estimate_num_insns_seq (gimple_eh_filter_failure (stmt), weights);
4176
4177 case GIMPLE_CATCH:
4178 return estimate_num_insns_seq (gimple_catch_handler (
4179 as_a <gcatch *> (stmt)),
4180 weights);
4181
4182 case GIMPLE_TRY:
4183 return (estimate_num_insns_seq (gimple_try_eval (stmt), weights)
4184 + estimate_num_insns_seq (gimple_try_cleanup (stmt), weights));
4185
4186 /* OMP directives are generally very expensive. */
4187
4188 case GIMPLE_OMP_RETURN:
4189 case GIMPLE_OMP_SECTIONS_SWITCH:
4190 case GIMPLE_OMP_ATOMIC_STORE:
4191 case GIMPLE_OMP_CONTINUE:
4192 /* ...except these, which are cheap. */
4193 return 0;
4194
4195 case GIMPLE_OMP_ATOMIC_LOAD:
4196 return weights->omp_cost;
4197
4198 case GIMPLE_OMP_FOR:
4199 return (weights->omp_cost
4200 + estimate_num_insns_seq (gimple_omp_body (stmt), weights)
4201 + estimate_num_insns_seq (gimple_omp_for_pre_body (stmt), weights));
4202
4203 case GIMPLE_OMP_PARALLEL:
4204 case GIMPLE_OMP_TASK:
4205 case GIMPLE_OMP_CRITICAL:
4206 case GIMPLE_OMP_MASTER:
4207 case GIMPLE_OMP_TASKGROUP:
4208 case GIMPLE_OMP_ORDERED:
4209 case GIMPLE_OMP_SECTION:
4210 case GIMPLE_OMP_SECTIONS:
4211 case GIMPLE_OMP_SINGLE:
4212 case GIMPLE_OMP_TARGET:
4213 case GIMPLE_OMP_TEAMS:
4214 return (weights->omp_cost
4215 + estimate_num_insns_seq (gimple_omp_body (stmt), weights));
4216
4217 case GIMPLE_TRANSACTION:
4218 return (weights->tm_cost
4219 + estimate_num_insns_seq (gimple_transaction_body (
4220 as_a <gtransaction *> (stmt)),
4221 weights));
4222
4223 default:
4224 gcc_unreachable ();
4225 }
4226
4227 return cost;
4228 }
4229
4230 /* Estimate number of instructions that will be created by expanding
4231 function FNDECL. WEIGHTS contains weights attributed to various
4232 constructs. */
4233
4234 int
estimate_num_insns_fn(tree fndecl,eni_weights * weights)4235 estimate_num_insns_fn (tree fndecl, eni_weights *weights)
4236 {
4237 struct function *my_function = DECL_STRUCT_FUNCTION (fndecl);
4238 gimple_stmt_iterator bsi;
4239 basic_block bb;
4240 int n = 0;
4241
4242 gcc_assert (my_function && my_function->cfg);
4243 FOR_EACH_BB_FN (bb, my_function)
4244 {
4245 for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi))
4246 n += estimate_num_insns (gsi_stmt (bsi), weights);
4247 }
4248
4249 return n;
4250 }
4251
4252
4253 /* Initializes weights used by estimate_num_insns. */
4254
4255 void
init_inline_once(void)4256 init_inline_once (void)
4257 {
4258 eni_size_weights.call_cost = 1;
4259 eni_size_weights.indirect_call_cost = 3;
4260 eni_size_weights.target_builtin_call_cost = 1;
4261 eni_size_weights.div_mod_cost = 1;
4262 eni_size_weights.omp_cost = 40;
4263 eni_size_weights.tm_cost = 10;
4264 eni_size_weights.time_based = false;
4265 eni_size_weights.return_cost = 1;
4266
4267 /* Estimating time for call is difficult, since we have no idea what the
4268 called function does. In the current uses of eni_time_weights,
4269 underestimating the cost does less harm than overestimating it, so
4270 we choose a rather small value here. */
4271 eni_time_weights.call_cost = 10;
4272 eni_time_weights.indirect_call_cost = 15;
4273 eni_time_weights.target_builtin_call_cost = 1;
4274 eni_time_weights.div_mod_cost = 10;
4275 eni_time_weights.omp_cost = 40;
4276 eni_time_weights.tm_cost = 40;
4277 eni_time_weights.time_based = true;
4278 eni_time_weights.return_cost = 2;
4279 }
4280
4281
4282 /* Install new lexical TREE_BLOCK underneath 'current_block'. */
4283
4284 static void
prepend_lexical_block(tree current_block,tree new_block)4285 prepend_lexical_block (tree current_block, tree new_block)
4286 {
4287 BLOCK_CHAIN (new_block) = BLOCK_SUBBLOCKS (current_block);
4288 BLOCK_SUBBLOCKS (current_block) = new_block;
4289 BLOCK_SUPERCONTEXT (new_block) = current_block;
4290 }
4291
4292 /* Add local variables from CALLEE to CALLER. */
4293
4294 static inline void
add_local_variables(struct function * callee,struct function * caller,copy_body_data * id)4295 add_local_variables (struct function *callee, struct function *caller,
4296 copy_body_data *id)
4297 {
4298 tree var;
4299 unsigned ix;
4300
4301 FOR_EACH_LOCAL_DECL (callee, ix, var)
4302 if (!can_be_nonlocal (var, id))
4303 {
4304 tree new_var = remap_decl (var, id);
4305
4306 /* Remap debug-expressions. */
4307 if (VAR_P (new_var)
4308 && DECL_HAS_DEBUG_EXPR_P (var)
4309 && new_var != var)
4310 {
4311 tree tem = DECL_DEBUG_EXPR (var);
4312 bool old_regimplify = id->regimplify;
4313 id->remapping_type_depth++;
4314 walk_tree (&tem, copy_tree_body_r, id, NULL);
4315 id->remapping_type_depth--;
4316 id->regimplify = old_regimplify;
4317 SET_DECL_DEBUG_EXPR (new_var, tem);
4318 DECL_HAS_DEBUG_EXPR_P (new_var) = 1;
4319 }
4320 add_local_decl (caller, new_var);
4321 }
4322 }
4323
4324 /* Add to BINDINGS a debug stmt resetting SRCVAR if inlining might
4325 have brought in or introduced any debug stmts for SRCVAR. */
4326
4327 static inline void
reset_debug_binding(copy_body_data * id,tree srcvar,gimple_seq * bindings)4328 reset_debug_binding (copy_body_data *id, tree srcvar, gimple_seq *bindings)
4329 {
4330 tree *remappedvarp = id->decl_map->get (srcvar);
4331
4332 if (!remappedvarp)
4333 return;
4334
4335 if (!VAR_P (*remappedvarp))
4336 return;
4337
4338 if (*remappedvarp == id->retvar || *remappedvarp == id->retbnd)
4339 return;
4340
4341 tree tvar = target_for_debug_bind (*remappedvarp);
4342 if (!tvar)
4343 return;
4344
4345 gdebug *stmt = gimple_build_debug_bind (tvar, NULL_TREE,
4346 id->call_stmt);
4347 gimple_seq_add_stmt (bindings, stmt);
4348 }
4349
4350 /* For each inlined variable for which we may have debug bind stmts,
4351 add before GSI a final debug stmt resetting it, marking the end of
4352 its life, so that var-tracking knows it doesn't have to compute
4353 further locations for it. */
4354
4355 static inline void
reset_debug_bindings(copy_body_data * id,gimple_stmt_iterator gsi)4356 reset_debug_bindings (copy_body_data *id, gimple_stmt_iterator gsi)
4357 {
4358 tree var;
4359 unsigned ix;
4360 gimple_seq bindings = NULL;
4361
4362 if (!gimple_in_ssa_p (id->src_cfun))
4363 return;
4364
4365 if (!opt_for_fn (id->dst_fn, flag_var_tracking_assignments))
4366 return;
4367
4368 for (var = DECL_ARGUMENTS (id->src_fn);
4369 var; var = DECL_CHAIN (var))
4370 reset_debug_binding (id, var, &bindings);
4371
4372 FOR_EACH_LOCAL_DECL (id->src_cfun, ix, var)
4373 reset_debug_binding (id, var, &bindings);
4374
4375 gsi_insert_seq_before_without_update (&gsi, bindings, GSI_SAME_STMT);
4376 }
4377
4378 /* If STMT is a GIMPLE_CALL, replace it with its inline expansion. */
4379
4380 static bool
expand_call_inline(basic_block bb,gimple * stmt,copy_body_data * id)4381 expand_call_inline (basic_block bb, gimple *stmt, copy_body_data *id)
4382 {
4383 tree use_retvar;
4384 tree fn;
4385 hash_map<tree, tree> *dst;
4386 hash_map<tree, tree> *st = NULL;
4387 tree return_slot;
4388 tree modify_dest;
4389 tree return_bounds = NULL;
4390 struct cgraph_edge *cg_edge;
4391 cgraph_inline_failed_t reason;
4392 basic_block return_block;
4393 edge e;
4394 gimple_stmt_iterator gsi, stmt_gsi;
4395 bool successfully_inlined = false;
4396 bool purge_dead_abnormal_edges;
4397 gcall *call_stmt;
4398 unsigned int i;
4399 unsigned int prop_mask, src_properties;
4400 struct function *dst_cfun;
4401 tree simduid;
4402 use_operand_p use;
4403 gimple *simtenter_stmt = NULL;
4404 vec<tree> *simtvars_save;
4405
4406 /* The gimplifier uses input_location in too many places, such as
4407 internal_get_tmp_var (). */
4408 location_t saved_location = input_location;
4409 input_location = gimple_location (stmt);
4410
4411 /* From here on, we're only interested in CALL_EXPRs. */
4412 call_stmt = dyn_cast <gcall *> (stmt);
4413 if (!call_stmt)
4414 goto egress;
4415
4416 cg_edge = id->dst_node->get_edge (stmt);
4417 gcc_checking_assert (cg_edge);
4418 /* First, see if we can figure out what function is being called.
4419 If we cannot, then there is no hope of inlining the function. */
4420 if (cg_edge->indirect_unknown_callee)
4421 goto egress;
4422 fn = cg_edge->callee->decl;
4423 gcc_checking_assert (fn);
4424
4425 /* If FN is a declaration of a function in a nested scope that was
4426 globally declared inline, we don't set its DECL_INITIAL.
4427 However, we can't blindly follow DECL_ABSTRACT_ORIGIN because the
4428 C++ front-end uses it for cdtors to refer to their internal
4429 declarations, that are not real functions. Fortunately those
4430 don't have trees to be saved, so we can tell by checking their
4431 gimple_body. */
4432 if (!DECL_INITIAL (fn)
4433 && DECL_ABSTRACT_ORIGIN (fn)
4434 && gimple_has_body_p (DECL_ABSTRACT_ORIGIN (fn)))
4435 fn = DECL_ABSTRACT_ORIGIN (fn);
4436
4437 /* Don't try to inline functions that are not well-suited to inlining. */
4438 if (cg_edge->inline_failed)
4439 {
4440 reason = cg_edge->inline_failed;
4441 /* If this call was originally indirect, we do not want to emit any
4442 inlining related warnings or sorry messages because there are no
4443 guarantees regarding those. */
4444 if (cg_edge->indirect_inlining_edge)
4445 goto egress;
4446
4447 if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn))
4448 /* For extern inline functions that get redefined we always
4449 silently ignored always_inline flag. Better behavior would
4450 be to be able to keep both bodies and use extern inline body
4451 for inlining, but we can't do that because frontends overwrite
4452 the body. */
4453 && !cg_edge->callee->local.redefined_extern_inline
4454 /* During early inline pass, report only when optimization is
4455 not turned on. */
4456 && (symtab->global_info_ready
4457 || !optimize
4458 || cgraph_inline_failed_type (reason) == CIF_FINAL_ERROR)
4459 /* PR 20090218-1_0.c. Body can be provided by another module. */
4460 && (reason != CIF_BODY_NOT_AVAILABLE || !flag_generate_lto))
4461 {
4462 error ("inlining failed in call to always_inline %q+F: %s", fn,
4463 cgraph_inline_failed_string (reason));
4464 if (gimple_location (stmt) != UNKNOWN_LOCATION)
4465 inform (gimple_location (stmt), "called from here");
4466 else if (DECL_SOURCE_LOCATION (cfun->decl) != UNKNOWN_LOCATION)
4467 inform (DECL_SOURCE_LOCATION (cfun->decl),
4468 "called from this function");
4469 }
4470 else if (warn_inline
4471 && DECL_DECLARED_INLINE_P (fn)
4472 && !DECL_NO_INLINE_WARNING_P (fn)
4473 && !DECL_IN_SYSTEM_HEADER (fn)
4474 && reason != CIF_UNSPECIFIED
4475 && !lookup_attribute ("noinline", DECL_ATTRIBUTES (fn))
4476 /* Do not warn about not inlined recursive calls. */
4477 && !cg_edge->recursive_p ()
4478 /* Avoid warnings during early inline pass. */
4479 && symtab->global_info_ready)
4480 {
4481 if (warning (OPT_Winline, "inlining failed in call to %q+F: %s",
4482 fn, _(cgraph_inline_failed_string (reason))))
4483 {
4484 if (gimple_location (stmt) != UNKNOWN_LOCATION)
4485 inform (gimple_location (stmt), "called from here");
4486 else if (DECL_SOURCE_LOCATION (cfun->decl) != UNKNOWN_LOCATION)
4487 inform (DECL_SOURCE_LOCATION (cfun->decl),
4488 "called from this function");
4489 }
4490 }
4491 goto egress;
4492 }
4493 id->src_node = cg_edge->callee;
4494
4495 /* If callee is thunk, all we need is to adjust the THIS pointer
4496 and redirect to function being thunked. */
4497 if (id->src_node->thunk.thunk_p)
4498 {
4499 cgraph_edge *edge;
4500 tree virtual_offset = NULL;
4501 profile_count count = cg_edge->count;
4502 tree op;
4503 gimple_stmt_iterator iter = gsi_for_stmt (stmt);
4504
4505 cg_edge->remove ();
4506 edge = id->src_node->callees->clone (id->dst_node, call_stmt,
4507 gimple_uid (stmt),
4508 profile_count::one (),
4509 profile_count::one (),
4510 true);
4511 edge->count = count;
4512 if (id->src_node->thunk.virtual_offset_p)
4513 virtual_offset = size_int (id->src_node->thunk.virtual_value);
4514 op = create_tmp_reg_fn (cfun, TREE_TYPE (gimple_call_arg (stmt, 0)),
4515 NULL);
4516 gsi_insert_before (&iter, gimple_build_assign (op,
4517 gimple_call_arg (stmt, 0)),
4518 GSI_NEW_STMT);
4519 gcc_assert (id->src_node->thunk.this_adjusting);
4520 op = thunk_adjust (&iter, op, 1, id->src_node->thunk.fixed_offset,
4521 virtual_offset);
4522
4523 gimple_call_set_arg (stmt, 0, op);
4524 gimple_call_set_fndecl (stmt, edge->callee->decl);
4525 update_stmt (stmt);
4526 id->src_node->remove ();
4527 expand_call_inline (bb, stmt, id);
4528 maybe_remove_unused_call_args (cfun, stmt);
4529 return true;
4530 }
4531 fn = cg_edge->callee->decl;
4532 cg_edge->callee->get_untransformed_body ();
4533
4534 if (flag_checking && cg_edge->callee->decl != id->dst_node->decl)
4535 cg_edge->callee->verify ();
4536
4537 /* We will be inlining this callee. */
4538 id->eh_lp_nr = lookup_stmt_eh_lp (stmt);
4539 id->assign_stmts.create (0);
4540
4541 /* Update the callers EH personality. */
4542 if (DECL_FUNCTION_PERSONALITY (cg_edge->callee->decl))
4543 DECL_FUNCTION_PERSONALITY (cg_edge->caller->decl)
4544 = DECL_FUNCTION_PERSONALITY (cg_edge->callee->decl);
4545
4546 /* Split the block before the GIMPLE_CALL. */
4547 stmt_gsi = gsi_for_stmt (stmt);
4548 gsi_prev (&stmt_gsi);
4549 e = split_block (bb, gsi_end_p (stmt_gsi) ? NULL : gsi_stmt (stmt_gsi));
4550 bb = e->src;
4551 return_block = e->dest;
4552 remove_edge (e);
4553
4554 /* If the GIMPLE_CALL was in the last statement of BB, it may have
4555 been the source of abnormal edges. In this case, schedule
4556 the removal of dead abnormal edges. */
4557 gsi = gsi_start_bb (return_block);
4558 gsi_next (&gsi);
4559 purge_dead_abnormal_edges = gsi_end_p (gsi);
4560
4561 stmt_gsi = gsi_start_bb (return_block);
4562
4563 /* Build a block containing code to initialize the arguments, the
4564 actual inline expansion of the body, and a label for the return
4565 statements within the function to jump to. The type of the
4566 statement expression is the return type of the function call.
4567 ??? If the call does not have an associated block then we will
4568 remap all callee blocks to NULL, effectively dropping most of
4569 its debug information. This should only happen for calls to
4570 artificial decls inserted by the compiler itself. We need to
4571 either link the inlined blocks into the caller block tree or
4572 not refer to them in any way to not break GC for locations. */
4573 if (gimple_block (stmt))
4574 {
4575 /* We do want to assign a not UNKNOWN_LOCATION BLOCK_SOURCE_LOCATION
4576 to make inlined_function_outer_scope_p return true on this BLOCK. */
4577 location_t loc = LOCATION_LOCUS (gimple_location (stmt));
4578 if (loc == UNKNOWN_LOCATION)
4579 loc = LOCATION_LOCUS (DECL_SOURCE_LOCATION (fn));
4580 if (loc == UNKNOWN_LOCATION)
4581 loc = BUILTINS_LOCATION;
4582 id->block = make_node (BLOCK);
4583 BLOCK_ABSTRACT_ORIGIN (id->block) = fn;
4584 BLOCK_SOURCE_LOCATION (id->block) = loc;
4585 prepend_lexical_block (gimple_block (stmt), id->block);
4586 }
4587
4588 /* Local declarations will be replaced by their equivalents in this
4589 map. */
4590 st = id->decl_map;
4591 id->decl_map = new hash_map<tree, tree>;
4592 dst = id->debug_map;
4593 id->debug_map = NULL;
4594
4595 /* Record the function we are about to inline. */
4596 id->src_fn = fn;
4597 id->src_cfun = DECL_STRUCT_FUNCTION (fn);
4598 id->call_stmt = call_stmt;
4599
4600 /* When inlining into an OpenMP SIMD-on-SIMT loop, arrange for new automatic
4601 variables to be added to IFN_GOMP_SIMT_ENTER argument list. */
4602 dst_cfun = DECL_STRUCT_FUNCTION (id->dst_fn);
4603 simtvars_save = id->dst_simt_vars;
4604 if (!(dst_cfun->curr_properties & PROP_gimple_lomp_dev)
4605 && (simduid = bb->loop_father->simduid) != NULL_TREE
4606 && (simduid = ssa_default_def (dst_cfun, simduid)) != NULL_TREE
4607 && single_imm_use (simduid, &use, &simtenter_stmt)
4608 && is_gimple_call (simtenter_stmt)
4609 && gimple_call_internal_p (simtenter_stmt, IFN_GOMP_SIMT_ENTER))
4610 vec_alloc (id->dst_simt_vars, 0);
4611 else
4612 id->dst_simt_vars = NULL;
4613
4614 if (profile_status_for_fn (id->src_cfun) == PROFILE_ABSENT)
4615 profile_status_for_fn (dst_cfun) = PROFILE_ABSENT;
4616
4617 /* If the src function contains an IFN_VA_ARG, then so will the dst
4618 function after inlining. Likewise for IFN_GOMP_USE_SIMT. */
4619 prop_mask = PROP_gimple_lva | PROP_gimple_lomp_dev;
4620 src_properties = id->src_cfun->curr_properties & prop_mask;
4621 if (src_properties != prop_mask)
4622 dst_cfun->curr_properties &= src_properties | ~prop_mask;
4623
4624 gcc_assert (!id->src_cfun->after_inlining);
4625
4626 id->entry_bb = bb;
4627 if (lookup_attribute ("cold", DECL_ATTRIBUTES (fn)))
4628 {
4629 gimple_stmt_iterator si = gsi_last_bb (bb);
4630 gsi_insert_after (&si, gimple_build_predict (PRED_COLD_FUNCTION,
4631 NOT_TAKEN),
4632 GSI_NEW_STMT);
4633 }
4634 initialize_inlined_parameters (id, stmt, fn, bb);
4635 if (debug_nonbind_markers_p && debug_inline_points && id->block
4636 && inlined_function_outer_scope_p (id->block))
4637 {
4638 gimple_stmt_iterator si = gsi_last_bb (bb);
4639 gsi_insert_after (&si, gimple_build_debug_inline_entry
4640 (id->block, input_location), GSI_NEW_STMT);
4641 }
4642
4643 if (DECL_INITIAL (fn))
4644 {
4645 if (gimple_block (stmt))
4646 {
4647 tree *var;
4648
4649 prepend_lexical_block (id->block,
4650 remap_blocks (DECL_INITIAL (fn), id));
4651 gcc_checking_assert (BLOCK_SUBBLOCKS (id->block)
4652 && (BLOCK_CHAIN (BLOCK_SUBBLOCKS (id->block))
4653 == NULL_TREE));
4654 /* Move vars for PARM_DECLs from DECL_INITIAL block to id->block,
4655 otherwise for DWARF DW_TAG_formal_parameter will not be children of
4656 DW_TAG_inlined_subroutine, but of a DW_TAG_lexical_block
4657 under it. The parameters can be then evaluated in the debugger,
4658 but don't show in backtraces. */
4659 for (var = &BLOCK_VARS (BLOCK_SUBBLOCKS (id->block)); *var; )
4660 if (TREE_CODE (DECL_ORIGIN (*var)) == PARM_DECL)
4661 {
4662 tree v = *var;
4663 *var = TREE_CHAIN (v);
4664 TREE_CHAIN (v) = BLOCK_VARS (id->block);
4665 BLOCK_VARS (id->block) = v;
4666 }
4667 else
4668 var = &TREE_CHAIN (*var);
4669 }
4670 else
4671 remap_blocks_to_null (DECL_INITIAL (fn), id);
4672 }
4673
4674 /* Return statements in the function body will be replaced by jumps
4675 to the RET_LABEL. */
4676 gcc_assert (DECL_INITIAL (fn));
4677 gcc_assert (TREE_CODE (DECL_INITIAL (fn)) == BLOCK);
4678
4679 /* Find the LHS to which the result of this call is assigned. */
4680 return_slot = NULL;
4681 if (gimple_call_lhs (stmt))
4682 {
4683 modify_dest = gimple_call_lhs (stmt);
4684
4685 /* Remember where to copy returned bounds. */
4686 if (gimple_call_with_bounds_p (stmt)
4687 && TREE_CODE (modify_dest) == SSA_NAME)
4688 {
4689 gcall *retbnd = chkp_retbnd_call_by_val (modify_dest);
4690 if (retbnd)
4691 {
4692 return_bounds = gimple_call_lhs (retbnd);
4693 /* If returned bounds are not used then just
4694 remove unused call. */
4695 if (!return_bounds)
4696 {
4697 gimple_stmt_iterator iter = gsi_for_stmt (retbnd);
4698 gsi_remove (&iter, true);
4699 }
4700 }
4701 }
4702
4703 /* The function which we are inlining might not return a value,
4704 in which case we should issue a warning that the function
4705 does not return a value. In that case the optimizers will
4706 see that the variable to which the value is assigned was not
4707 initialized. We do not want to issue a warning about that
4708 uninitialized variable. */
4709 if (DECL_P (modify_dest))
4710 TREE_NO_WARNING (modify_dest) = 1;
4711
4712 if (gimple_call_return_slot_opt_p (call_stmt))
4713 {
4714 return_slot = modify_dest;
4715 modify_dest = NULL;
4716 }
4717 }
4718 else
4719 modify_dest = NULL;
4720
4721 /* If we are inlining a call to the C++ operator new, we don't want
4722 to use type based alias analysis on the return value. Otherwise
4723 we may get confused if the compiler sees that the inlined new
4724 function returns a pointer which was just deleted. See bug
4725 33407. */
4726 if (DECL_IS_OPERATOR_NEW (fn))
4727 {
4728 return_slot = NULL;
4729 modify_dest = NULL;
4730 }
4731
4732 /* Declare the return variable for the function. */
4733 use_retvar = declare_return_variable (id, return_slot, modify_dest,
4734 return_bounds, bb);
4735
4736 /* Add local vars in this inlined callee to caller. */
4737 add_local_variables (id->src_cfun, cfun, id);
4738
4739 if (dump_file && (dump_flags & TDF_DETAILS))
4740 {
4741 fprintf (dump_file, "Inlining %s to %s with frequency %4.2f\n",
4742 id->src_node->dump_name (),
4743 id->dst_node->dump_name (),
4744 cg_edge->sreal_frequency ().to_double ());
4745 id->src_node->dump (dump_file);
4746 id->dst_node->dump (dump_file);
4747 }
4748
4749 /* This is it. Duplicate the callee body. Assume callee is
4750 pre-gimplified. Note that we must not alter the caller
4751 function in any way before this point, as this CALL_EXPR may be
4752 a self-referential call; if we're calling ourselves, we need to
4753 duplicate our body before altering anything. */
4754 copy_body (id, bb, return_block, NULL);
4755
4756 reset_debug_bindings (id, stmt_gsi);
4757
4758 if (flag_stack_reuse != SR_NONE)
4759 for (tree p = DECL_ARGUMENTS (id->src_fn); p; p = DECL_CHAIN (p))
4760 if (!TREE_THIS_VOLATILE (p))
4761 {
4762 tree *varp = id->decl_map->get (p);
4763 if (varp && VAR_P (*varp) && !is_gimple_reg (*varp))
4764 {
4765 tree clobber = build_constructor (TREE_TYPE (*varp), NULL);
4766 gimple *clobber_stmt;
4767 TREE_THIS_VOLATILE (clobber) = 1;
4768 clobber_stmt = gimple_build_assign (*varp, clobber);
4769 gimple_set_location (clobber_stmt, gimple_location (stmt));
4770 gsi_insert_before (&stmt_gsi, clobber_stmt, GSI_SAME_STMT);
4771 }
4772 }
4773
4774 /* Reset the escaped solution. */
4775 if (cfun->gimple_df)
4776 pt_solution_reset (&cfun->gimple_df->escaped);
4777
4778 /* Add new automatic variables to IFN_GOMP_SIMT_ENTER arguments. */
4779 if (id->dst_simt_vars && id->dst_simt_vars->length () > 0)
4780 {
4781 size_t nargs = gimple_call_num_args (simtenter_stmt);
4782 vec<tree> *vars = id->dst_simt_vars;
4783 auto_vec<tree> newargs (nargs + vars->length ());
4784 for (size_t i = 0; i < nargs; i++)
4785 newargs.quick_push (gimple_call_arg (simtenter_stmt, i));
4786 for (tree *pvar = vars->begin (); pvar != vars->end (); pvar++)
4787 {
4788 tree ptrtype = build_pointer_type (TREE_TYPE (*pvar));
4789 newargs.quick_push (build1 (ADDR_EXPR, ptrtype, *pvar));
4790 }
4791 gcall *g = gimple_build_call_internal_vec (IFN_GOMP_SIMT_ENTER, newargs);
4792 gimple_call_set_lhs (g, gimple_call_lhs (simtenter_stmt));
4793 gimple_stmt_iterator gsi = gsi_for_stmt (simtenter_stmt);
4794 gsi_replace (&gsi, g, false);
4795 }
4796 vec_free (id->dst_simt_vars);
4797 id->dst_simt_vars = simtvars_save;
4798
4799 /* Clean up. */
4800 if (id->debug_map)
4801 {
4802 delete id->debug_map;
4803 id->debug_map = dst;
4804 }
4805 delete id->decl_map;
4806 id->decl_map = st;
4807
4808 /* Unlink the calls virtual operands before replacing it. */
4809 unlink_stmt_vdef (stmt);
4810 if (gimple_vdef (stmt)
4811 && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
4812 release_ssa_name (gimple_vdef (stmt));
4813
4814 /* If the inlined function returns a result that we care about,
4815 substitute the GIMPLE_CALL with an assignment of the return
4816 variable to the LHS of the call. That is, if STMT was
4817 'a = foo (...)', substitute the call with 'a = USE_RETVAR'. */
4818 if (use_retvar && gimple_call_lhs (stmt))
4819 {
4820 gimple *old_stmt = stmt;
4821 stmt = gimple_build_assign (gimple_call_lhs (stmt), use_retvar);
4822 gsi_replace (&stmt_gsi, stmt, false);
4823 maybe_clean_or_replace_eh_stmt (old_stmt, stmt);
4824 /* Append a clobber for id->retvar if easily possible. */
4825 if (flag_stack_reuse != SR_NONE
4826 && id->retvar
4827 && VAR_P (id->retvar)
4828 && id->retvar != return_slot
4829 && id->retvar != modify_dest
4830 && !TREE_THIS_VOLATILE (id->retvar)
4831 && !is_gimple_reg (id->retvar)
4832 && !stmt_ends_bb_p (stmt))
4833 {
4834 tree clobber = build_constructor (TREE_TYPE (id->retvar), NULL);
4835 gimple *clobber_stmt;
4836 TREE_THIS_VOLATILE (clobber) = 1;
4837 clobber_stmt = gimple_build_assign (id->retvar, clobber);
4838 gimple_set_location (clobber_stmt, gimple_location (old_stmt));
4839 gsi_insert_after (&stmt_gsi, clobber_stmt, GSI_SAME_STMT);
4840 }
4841
4842 /* Copy bounds if we copy structure with bounds. */
4843 if (chkp_function_instrumented_p (id->dst_fn)
4844 && !BOUNDED_P (use_retvar)
4845 && chkp_type_has_pointer (TREE_TYPE (use_retvar)))
4846 id->assign_stmts.safe_push (stmt);
4847 }
4848 else
4849 {
4850 /* Handle the case of inlining a function with no return
4851 statement, which causes the return value to become undefined. */
4852 if (gimple_call_lhs (stmt)
4853 && TREE_CODE (gimple_call_lhs (stmt)) == SSA_NAME)
4854 {
4855 tree name = gimple_call_lhs (stmt);
4856 tree var = SSA_NAME_VAR (name);
4857 tree def = var ? ssa_default_def (cfun, var) : NULL;
4858
4859 if (def)
4860 {
4861 /* If the variable is used undefined, make this name
4862 undefined via a move. */
4863 stmt = gimple_build_assign (gimple_call_lhs (stmt), def);
4864 gsi_replace (&stmt_gsi, stmt, true);
4865 }
4866 else
4867 {
4868 if (!var)
4869 {
4870 var = create_tmp_reg_fn (cfun, TREE_TYPE (name), NULL);
4871 SET_SSA_NAME_VAR_OR_IDENTIFIER (name, var);
4872 }
4873 /* Otherwise make this variable undefined. */
4874 gsi_remove (&stmt_gsi, true);
4875 set_ssa_default_def (cfun, var, name);
4876 SSA_NAME_DEF_STMT (name) = gimple_build_nop ();
4877 }
4878 }
4879 /* Replace with a clobber for id->retvar. */
4880 else if (flag_stack_reuse != SR_NONE
4881 && id->retvar
4882 && VAR_P (id->retvar)
4883 && id->retvar != return_slot
4884 && id->retvar != modify_dest
4885 && !TREE_THIS_VOLATILE (id->retvar)
4886 && !is_gimple_reg (id->retvar))
4887 {
4888 tree clobber = build_constructor (TREE_TYPE (id->retvar), NULL);
4889 gimple *clobber_stmt;
4890 TREE_THIS_VOLATILE (clobber) = 1;
4891 clobber_stmt = gimple_build_assign (id->retvar, clobber);
4892 gimple_set_location (clobber_stmt, gimple_location (stmt));
4893 gsi_replace (&stmt_gsi, clobber_stmt, false);
4894 maybe_clean_or_replace_eh_stmt (stmt, clobber_stmt);
4895 }
4896 else
4897 gsi_remove (&stmt_gsi, true);
4898 }
4899
4900 /* Put returned bounds into the correct place if required. */
4901 if (return_bounds)
4902 {
4903 gimple *old_stmt = SSA_NAME_DEF_STMT (return_bounds);
4904 gimple *new_stmt = gimple_build_assign (return_bounds, id->retbnd);
4905 gimple_stmt_iterator bnd_gsi = gsi_for_stmt (old_stmt);
4906 unlink_stmt_vdef (old_stmt);
4907 gsi_replace (&bnd_gsi, new_stmt, false);
4908 maybe_clean_or_replace_eh_stmt (old_stmt, new_stmt);
4909 cgraph_update_edges_for_call_stmt (old_stmt,
4910 gimple_call_fndecl (old_stmt),
4911 new_stmt);
4912 }
4913
4914 if (purge_dead_abnormal_edges)
4915 {
4916 gimple_purge_dead_eh_edges (return_block);
4917 gimple_purge_dead_abnormal_call_edges (return_block);
4918 }
4919
4920 /* If the value of the new expression is ignored, that's OK. We
4921 don't warn about this for CALL_EXPRs, so we shouldn't warn about
4922 the equivalent inlined version either. */
4923 if (is_gimple_assign (stmt))
4924 {
4925 gcc_assert (gimple_assign_single_p (stmt)
4926 || CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt)));
4927 TREE_USED (gimple_assign_rhs1 (stmt)) = 1;
4928 }
4929
4930 /* Copy bounds for all generated assigns that need it. */
4931 for (i = 0; i < id->assign_stmts.length (); i++)
4932 chkp_copy_bounds_for_assign (id->assign_stmts[i], cg_edge);
4933 id->assign_stmts.release ();
4934
4935 /* Output the inlining info for this abstract function, since it has been
4936 inlined. If we don't do this now, we can lose the information about the
4937 variables in the function when the blocks get blown away as soon as we
4938 remove the cgraph node. */
4939 if (gimple_block (stmt))
4940 (*debug_hooks->outlining_inline_function) (cg_edge->callee->decl);
4941
4942 /* Update callgraph if needed. */
4943 cg_edge->callee->remove ();
4944
4945 id->block = NULL_TREE;
4946 id->retvar = NULL_TREE;
4947 id->retbnd = NULL_TREE;
4948 successfully_inlined = true;
4949
4950 egress:
4951 input_location = saved_location;
4952 return successfully_inlined;
4953 }
4954
4955 /* Expand call statements reachable from STMT_P.
4956 We can only have CALL_EXPRs as the "toplevel" tree code or nested
4957 in a MODIFY_EXPR. */
4958
4959 static bool
gimple_expand_calls_inline(basic_block bb,copy_body_data * id)4960 gimple_expand_calls_inline (basic_block bb, copy_body_data *id)
4961 {
4962 gimple_stmt_iterator gsi;
4963 bool inlined = false;
4964
4965 for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi);)
4966 {
4967 gimple *stmt = gsi_stmt (gsi);
4968 gsi_prev (&gsi);
4969
4970 if (is_gimple_call (stmt)
4971 && !gimple_call_internal_p (stmt))
4972 inlined |= expand_call_inline (bb, stmt, id);
4973 }
4974
4975 return inlined;
4976 }
4977
4978
4979 /* Walk all basic blocks created after FIRST and try to fold every statement
4980 in the STATEMENTS pointer set. */
4981
4982 static void
fold_marked_statements(int first,hash_set<gimple * > * statements)4983 fold_marked_statements (int first, hash_set<gimple *> *statements)
4984 {
4985 for (; first < n_basic_blocks_for_fn (cfun); first++)
4986 if (BASIC_BLOCK_FOR_FN (cfun, first))
4987 {
4988 gimple_stmt_iterator gsi;
4989
4990 for (gsi = gsi_start_bb (BASIC_BLOCK_FOR_FN (cfun, first));
4991 !gsi_end_p (gsi);
4992 gsi_next (&gsi))
4993 if (statements->contains (gsi_stmt (gsi)))
4994 {
4995 gimple *old_stmt = gsi_stmt (gsi);
4996 tree old_decl = is_gimple_call (old_stmt) ? gimple_call_fndecl (old_stmt) : 0;
4997
4998 if (old_decl && DECL_BUILT_IN (old_decl))
4999 {
5000 /* Folding builtins can create multiple instructions,
5001 we need to look at all of them. */
5002 gimple_stmt_iterator i2 = gsi;
5003 gsi_prev (&i2);
5004 if (fold_stmt (&gsi))
5005 {
5006 gimple *new_stmt;
5007 /* If a builtin at the end of a bb folded into nothing,
5008 the following loop won't work. */
5009 if (gsi_end_p (gsi))
5010 {
5011 cgraph_update_edges_for_call_stmt (old_stmt,
5012 old_decl, NULL);
5013 break;
5014 }
5015 if (gsi_end_p (i2))
5016 i2 = gsi_start_bb (BASIC_BLOCK_FOR_FN (cfun, first));
5017 else
5018 gsi_next (&i2);
5019 while (1)
5020 {
5021 new_stmt = gsi_stmt (i2);
5022 update_stmt (new_stmt);
5023 cgraph_update_edges_for_call_stmt (old_stmt, old_decl,
5024 new_stmt);
5025
5026 if (new_stmt == gsi_stmt (gsi))
5027 {
5028 /* It is okay to check only for the very last
5029 of these statements. If it is a throwing
5030 statement nothing will change. If it isn't
5031 this can remove EH edges. If that weren't
5032 correct then because some intermediate stmts
5033 throw, but not the last one. That would mean
5034 we'd have to split the block, which we can't
5035 here and we'd loose anyway. And as builtins
5036 probably never throw, this all
5037 is mood anyway. */
5038 if (maybe_clean_or_replace_eh_stmt (old_stmt,
5039 new_stmt))
5040 gimple_purge_dead_eh_edges (
5041 BASIC_BLOCK_FOR_FN (cfun, first));
5042 break;
5043 }
5044 gsi_next (&i2);
5045 }
5046 }
5047 }
5048 else if (fold_stmt (&gsi))
5049 {
5050 /* Re-read the statement from GSI as fold_stmt() may
5051 have changed it. */
5052 gimple *new_stmt = gsi_stmt (gsi);
5053 update_stmt (new_stmt);
5054
5055 if (is_gimple_call (old_stmt)
5056 || is_gimple_call (new_stmt))
5057 cgraph_update_edges_for_call_stmt (old_stmt, old_decl,
5058 new_stmt);
5059
5060 if (maybe_clean_or_replace_eh_stmt (old_stmt, new_stmt))
5061 gimple_purge_dead_eh_edges (BASIC_BLOCK_FOR_FN (cfun,
5062 first));
5063 }
5064 }
5065 }
5066 }
5067
5068 /* Expand calls to inline functions in the body of FN. */
5069
5070 unsigned int
optimize_inline_calls(tree fn)5071 optimize_inline_calls (tree fn)
5072 {
5073 copy_body_data id;
5074 basic_block bb;
5075 int last = n_basic_blocks_for_fn (cfun);
5076 bool inlined_p = false;
5077
5078 /* Clear out ID. */
5079 memset (&id, 0, sizeof (id));
5080
5081 id.src_node = id.dst_node = cgraph_node::get (fn);
5082 gcc_assert (id.dst_node->definition);
5083 id.dst_fn = fn;
5084 /* Or any functions that aren't finished yet. */
5085 if (current_function_decl)
5086 id.dst_fn = current_function_decl;
5087
5088 id.copy_decl = copy_decl_maybe_to_var;
5089 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
5090 id.transform_new_cfg = false;
5091 id.transform_return_to_modify = true;
5092 id.transform_parameter = true;
5093 id.transform_lang_insert_block = NULL;
5094 id.statements_to_fold = new hash_set<gimple *>;
5095
5096 push_gimplify_context ();
5097
5098 /* We make no attempts to keep dominance info up-to-date. */
5099 free_dominance_info (CDI_DOMINATORS);
5100 free_dominance_info (CDI_POST_DOMINATORS);
5101
5102 /* Register specific gimple functions. */
5103 gimple_register_cfg_hooks ();
5104
5105 /* Reach the trees by walking over the CFG, and note the
5106 enclosing basic-blocks in the call edges. */
5107 /* We walk the blocks going forward, because inlined function bodies
5108 will split id->current_basic_block, and the new blocks will
5109 follow it; we'll trudge through them, processing their CALL_EXPRs
5110 along the way. */
5111 FOR_EACH_BB_FN (bb, cfun)
5112 inlined_p |= gimple_expand_calls_inline (bb, &id);
5113
5114 pop_gimplify_context (NULL);
5115
5116 if (flag_checking)
5117 {
5118 struct cgraph_edge *e;
5119
5120 id.dst_node->verify ();
5121
5122 /* Double check that we inlined everything we are supposed to inline. */
5123 for (e = id.dst_node->callees; e; e = e->next_callee)
5124 gcc_assert (e->inline_failed);
5125 }
5126
5127 /* Fold queued statements. */
5128 update_max_bb_count ();
5129 fold_marked_statements (last, id.statements_to_fold);
5130 delete id.statements_to_fold;
5131
5132 gcc_assert (!id.debug_stmts.exists ());
5133
5134 /* If we didn't inline into the function there is nothing to do. */
5135 if (!inlined_p)
5136 return 0;
5137
5138 /* Renumber the lexical scoping (non-code) blocks consecutively. */
5139 number_blocks (fn);
5140
5141 delete_unreachable_blocks_update_callgraph (&id);
5142 if (flag_checking)
5143 id.dst_node->verify ();
5144
5145 /* It would be nice to check SSA/CFG/statement consistency here, but it is
5146 not possible yet - the IPA passes might make various functions to not
5147 throw and they don't care to proactively update local EH info. This is
5148 done later in fixup_cfg pass that also execute the verification. */
5149 return (TODO_update_ssa
5150 | TODO_cleanup_cfg
5151 | (gimple_in_ssa_p (cfun) ? TODO_remove_unused_locals : 0)
5152 | (gimple_in_ssa_p (cfun) ? TODO_update_address_taken : 0)
5153 | (profile_status_for_fn (cfun) != PROFILE_ABSENT
5154 ? TODO_rebuild_frequencies : 0));
5155 }
5156
5157 /* Passed to walk_tree. Copies the node pointed to, if appropriate. */
5158
5159 tree
copy_tree_r(tree * tp,int * walk_subtrees,void * data ATTRIBUTE_UNUSED)5160 copy_tree_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
5161 {
5162 enum tree_code code = TREE_CODE (*tp);
5163 enum tree_code_class cl = TREE_CODE_CLASS (code);
5164
5165 /* We make copies of most nodes. */
5166 if (IS_EXPR_CODE_CLASS (cl)
5167 || code == TREE_LIST
5168 || code == TREE_VEC
5169 || code == TYPE_DECL
5170 || code == OMP_CLAUSE)
5171 {
5172 /* Because the chain gets clobbered when we make a copy, we save it
5173 here. */
5174 tree chain = NULL_TREE, new_tree;
5175
5176 if (CODE_CONTAINS_STRUCT (code, TS_COMMON))
5177 chain = TREE_CHAIN (*tp);
5178
5179 /* Copy the node. */
5180 new_tree = copy_node (*tp);
5181
5182 *tp = new_tree;
5183
5184 /* Now, restore the chain, if appropriate. That will cause
5185 walk_tree to walk into the chain as well. */
5186 if (code == PARM_DECL
5187 || code == TREE_LIST
5188 || code == OMP_CLAUSE)
5189 TREE_CHAIN (*tp) = chain;
5190
5191 /* For now, we don't update BLOCKs when we make copies. So, we
5192 have to nullify all BIND_EXPRs. */
5193 if (TREE_CODE (*tp) == BIND_EXPR)
5194 BIND_EXPR_BLOCK (*tp) = NULL_TREE;
5195 }
5196 else if (code == CONSTRUCTOR)
5197 {
5198 /* CONSTRUCTOR nodes need special handling because
5199 we need to duplicate the vector of elements. */
5200 tree new_tree;
5201
5202 new_tree = copy_node (*tp);
5203 CONSTRUCTOR_ELTS (new_tree) = vec_safe_copy (CONSTRUCTOR_ELTS (*tp));
5204 *tp = new_tree;
5205 }
5206 else if (code == STATEMENT_LIST)
5207 /* We used to just abort on STATEMENT_LIST, but we can run into them
5208 with statement-expressions (c++/40975). */
5209 copy_statement_list (tp);
5210 else if (TREE_CODE_CLASS (code) == tcc_type)
5211 *walk_subtrees = 0;
5212 else if (TREE_CODE_CLASS (code) == tcc_declaration)
5213 *walk_subtrees = 0;
5214 else if (TREE_CODE_CLASS (code) == tcc_constant)
5215 *walk_subtrees = 0;
5216 return NULL_TREE;
5217 }
5218
5219 /* The SAVE_EXPR pointed to by TP is being copied. If ST contains
5220 information indicating to what new SAVE_EXPR this one should be mapped,
5221 use that one. Otherwise, create a new node and enter it in ST. FN is
5222 the function into which the copy will be placed. */
5223
5224 static void
remap_save_expr(tree * tp,hash_map<tree,tree> * st,int * walk_subtrees)5225 remap_save_expr (tree *tp, hash_map<tree, tree> *st, int *walk_subtrees)
5226 {
5227 tree *n;
5228 tree t;
5229
5230 /* See if we already encountered this SAVE_EXPR. */
5231 n = st->get (*tp);
5232
5233 /* If we didn't already remap this SAVE_EXPR, do so now. */
5234 if (!n)
5235 {
5236 t = copy_node (*tp);
5237
5238 /* Remember this SAVE_EXPR. */
5239 st->put (*tp, t);
5240 /* Make sure we don't remap an already-remapped SAVE_EXPR. */
5241 st->put (t, t);
5242 }
5243 else
5244 {
5245 /* We've already walked into this SAVE_EXPR; don't do it again. */
5246 *walk_subtrees = 0;
5247 t = *n;
5248 }
5249
5250 /* Replace this SAVE_EXPR with the copy. */
5251 *tp = t;
5252 }
5253
5254 /* Called via walk_gimple_seq. If *GSIP points to a GIMPLE_LABEL for a local
5255 label, copies the declaration and enters it in the splay_tree in DATA (which
5256 is really a 'copy_body_data *'. */
5257
5258 static tree
mark_local_labels_stmt(gimple_stmt_iterator * gsip,bool * handled_ops_p ATTRIBUTE_UNUSED,struct walk_stmt_info * wi)5259 mark_local_labels_stmt (gimple_stmt_iterator *gsip,
5260 bool *handled_ops_p ATTRIBUTE_UNUSED,
5261 struct walk_stmt_info *wi)
5262 {
5263 copy_body_data *id = (copy_body_data *) wi->info;
5264 glabel *stmt = dyn_cast <glabel *> (gsi_stmt (*gsip));
5265
5266 if (stmt)
5267 {
5268 tree decl = gimple_label_label (stmt);
5269
5270 /* Copy the decl and remember the copy. */
5271 insert_decl_map (id, decl, id->copy_decl (decl, id));
5272 }
5273
5274 return NULL_TREE;
5275 }
5276
5277 static gimple_seq duplicate_remap_omp_clause_seq (gimple_seq seq,
5278 struct walk_stmt_info *wi);
5279
5280 /* Called via walk_gimple_seq by copy_gimple_seq_and_replace_local.
5281 Using the splay_tree pointed to by ST (which is really a `splay_tree'),
5282 remaps all local declarations to appropriate replacements in gimple
5283 operands. */
5284
5285 static tree
replace_locals_op(tree * tp,int * walk_subtrees,void * data)5286 replace_locals_op (tree *tp, int *walk_subtrees, void *data)
5287 {
5288 struct walk_stmt_info *wi = (struct walk_stmt_info*) data;
5289 copy_body_data *id = (copy_body_data *) wi->info;
5290 hash_map<tree, tree> *st = id->decl_map;
5291 tree *n;
5292 tree expr = *tp;
5293
5294 /* For recursive invocations this is no longer the LHS itself. */
5295 bool is_lhs = wi->is_lhs;
5296 wi->is_lhs = false;
5297
5298 if (TREE_CODE (expr) == SSA_NAME)
5299 {
5300 *tp = remap_ssa_name (*tp, id);
5301 *walk_subtrees = 0;
5302 if (is_lhs)
5303 SSA_NAME_DEF_STMT (*tp) = gsi_stmt (wi->gsi);
5304 }
5305 /* Only a local declaration (variable or label). */
5306 else if ((VAR_P (expr) && !TREE_STATIC (expr))
5307 || TREE_CODE (expr) == LABEL_DECL)
5308 {
5309 /* Lookup the declaration. */
5310 n = st->get (expr);
5311
5312 /* If it's there, remap it. */
5313 if (n)
5314 *tp = *n;
5315 *walk_subtrees = 0;
5316 }
5317 else if (TREE_CODE (expr) == STATEMENT_LIST
5318 || TREE_CODE (expr) == BIND_EXPR
5319 || TREE_CODE (expr) == SAVE_EXPR)
5320 gcc_unreachable ();
5321 else if (TREE_CODE (expr) == TARGET_EXPR)
5322 {
5323 /* Don't mess with a TARGET_EXPR that hasn't been expanded.
5324 It's OK for this to happen if it was part of a subtree that
5325 isn't immediately expanded, such as operand 2 of another
5326 TARGET_EXPR. */
5327 if (!TREE_OPERAND (expr, 1))
5328 {
5329 TREE_OPERAND (expr, 1) = TREE_OPERAND (expr, 3);
5330 TREE_OPERAND (expr, 3) = NULL_TREE;
5331 }
5332 }
5333 else if (TREE_CODE (expr) == OMP_CLAUSE)
5334 {
5335 /* Before the omplower pass completes, some OMP clauses can contain
5336 sequences that are neither copied by gimple_seq_copy nor walked by
5337 walk_gimple_seq. To make copy_gimple_seq_and_replace_locals work even
5338 in those situations, we have to copy and process them explicitely. */
5339
5340 if (OMP_CLAUSE_CODE (expr) == OMP_CLAUSE_LASTPRIVATE)
5341 {
5342 gimple_seq seq = OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (expr);
5343 seq = duplicate_remap_omp_clause_seq (seq, wi);
5344 OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (expr) = seq;
5345 }
5346 else if (OMP_CLAUSE_CODE (expr) == OMP_CLAUSE_LINEAR)
5347 {
5348 gimple_seq seq = OMP_CLAUSE_LINEAR_GIMPLE_SEQ (expr);
5349 seq = duplicate_remap_omp_clause_seq (seq, wi);
5350 OMP_CLAUSE_LINEAR_GIMPLE_SEQ (expr) = seq;
5351 }
5352 else if (OMP_CLAUSE_CODE (expr) == OMP_CLAUSE_REDUCTION)
5353 {
5354 gimple_seq seq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (expr);
5355 seq = duplicate_remap_omp_clause_seq (seq, wi);
5356 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (expr) = seq;
5357 seq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (expr);
5358 seq = duplicate_remap_omp_clause_seq (seq, wi);
5359 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (expr) = seq;
5360 }
5361 }
5362
5363 /* Keep iterating. */
5364 return NULL_TREE;
5365 }
5366
5367
5368 /* Called via walk_gimple_seq by copy_gimple_seq_and_replace_local.
5369 Using the splay_tree pointed to by ST (which is really a `splay_tree'),
5370 remaps all local declarations to appropriate replacements in gimple
5371 statements. */
5372
5373 static tree
replace_locals_stmt(gimple_stmt_iterator * gsip,bool * handled_ops_p ATTRIBUTE_UNUSED,struct walk_stmt_info * wi)5374 replace_locals_stmt (gimple_stmt_iterator *gsip,
5375 bool *handled_ops_p ATTRIBUTE_UNUSED,
5376 struct walk_stmt_info *wi)
5377 {
5378 copy_body_data *id = (copy_body_data *) wi->info;
5379 gimple *gs = gsi_stmt (*gsip);
5380
5381 if (gbind *stmt = dyn_cast <gbind *> (gs))
5382 {
5383 tree block = gimple_bind_block (stmt);
5384
5385 if (block)
5386 {
5387 remap_block (&block, id);
5388 gimple_bind_set_block (stmt, block);
5389 }
5390
5391 /* This will remap a lot of the same decls again, but this should be
5392 harmless. */
5393 if (gimple_bind_vars (stmt))
5394 {
5395 tree old_var, decls = gimple_bind_vars (stmt);
5396
5397 for (old_var = decls; old_var; old_var = DECL_CHAIN (old_var))
5398 if (!can_be_nonlocal (old_var, id)
5399 && ! variably_modified_type_p (TREE_TYPE (old_var), id->src_fn))
5400 remap_decl (old_var, id);
5401
5402 gcc_checking_assert (!id->prevent_decl_creation_for_types);
5403 id->prevent_decl_creation_for_types = true;
5404 gimple_bind_set_vars (stmt, remap_decls (decls, NULL, id));
5405 id->prevent_decl_creation_for_types = false;
5406 }
5407 }
5408
5409 /* Keep iterating. */
5410 return NULL_TREE;
5411 }
5412
5413 /* Create a copy of SEQ and remap all decls in it. */
5414
5415 static gimple_seq
duplicate_remap_omp_clause_seq(gimple_seq seq,struct walk_stmt_info * wi)5416 duplicate_remap_omp_clause_seq (gimple_seq seq, struct walk_stmt_info *wi)
5417 {
5418 if (!seq)
5419 return NULL;
5420
5421 /* If there are any labels in OMP sequences, they can be only referred to in
5422 the sequence itself and therefore we can do both here. */
5423 walk_gimple_seq (seq, mark_local_labels_stmt, NULL, wi);
5424 gimple_seq copy = gimple_seq_copy (seq);
5425 walk_gimple_seq (copy, replace_locals_stmt, replace_locals_op, wi);
5426 return copy;
5427 }
5428
5429 /* Copies everything in SEQ and replaces variables and labels local to
5430 current_function_decl. */
5431
5432 gimple_seq
copy_gimple_seq_and_replace_locals(gimple_seq seq)5433 copy_gimple_seq_and_replace_locals (gimple_seq seq)
5434 {
5435 copy_body_data id;
5436 struct walk_stmt_info wi;
5437 gimple_seq copy;
5438
5439 /* There's nothing to do for NULL_TREE. */
5440 if (seq == NULL)
5441 return seq;
5442
5443 /* Set up ID. */
5444 memset (&id, 0, sizeof (id));
5445 id.src_fn = current_function_decl;
5446 id.dst_fn = current_function_decl;
5447 id.src_cfun = cfun;
5448 id.decl_map = new hash_map<tree, tree>;
5449 id.debug_map = NULL;
5450
5451 id.copy_decl = copy_decl_no_change;
5452 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
5453 id.transform_new_cfg = false;
5454 id.transform_return_to_modify = false;
5455 id.transform_parameter = false;
5456 id.transform_lang_insert_block = NULL;
5457
5458 /* Walk the tree once to find local labels. */
5459 memset (&wi, 0, sizeof (wi));
5460 hash_set<tree> visited;
5461 wi.info = &id;
5462 wi.pset = &visited;
5463 walk_gimple_seq (seq, mark_local_labels_stmt, NULL, &wi);
5464
5465 copy = gimple_seq_copy (seq);
5466
5467 /* Walk the copy, remapping decls. */
5468 memset (&wi, 0, sizeof (wi));
5469 wi.info = &id;
5470 walk_gimple_seq (copy, replace_locals_stmt, replace_locals_op, &wi);
5471
5472 /* Clean up. */
5473 delete id.decl_map;
5474 if (id.debug_map)
5475 delete id.debug_map;
5476 if (id.dependence_map)
5477 {
5478 delete id.dependence_map;
5479 id.dependence_map = NULL;
5480 }
5481
5482 return copy;
5483 }
5484
5485
5486 /* Allow someone to determine if SEARCH is a child of TOP from gdb. */
5487
5488 static tree
debug_find_tree_1(tree * tp,int * walk_subtrees ATTRIBUTE_UNUSED,void * data)5489 debug_find_tree_1 (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED, void *data)
5490 {
5491 if (*tp == data)
5492 return (tree) data;
5493 else
5494 return NULL;
5495 }
5496
5497 DEBUG_FUNCTION bool
debug_find_tree(tree top,tree search)5498 debug_find_tree (tree top, tree search)
5499 {
5500 return walk_tree_without_duplicates (&top, debug_find_tree_1, search) != 0;
5501 }
5502
5503
5504 /* Declare the variables created by the inliner. Add all the variables in
5505 VARS to BIND_EXPR. */
5506
5507 static void
declare_inline_vars(tree block,tree vars)5508 declare_inline_vars (tree block, tree vars)
5509 {
5510 tree t;
5511 for (t = vars; t; t = DECL_CHAIN (t))
5512 {
5513 DECL_SEEN_IN_BIND_EXPR_P (t) = 1;
5514 gcc_assert (!TREE_STATIC (t) && !TREE_ASM_WRITTEN (t));
5515 add_local_decl (cfun, t);
5516 }
5517
5518 if (block)
5519 BLOCK_VARS (block) = chainon (BLOCK_VARS (block), vars);
5520 }
5521
5522 /* Copy NODE (which must be a DECL). The DECL originally was in the FROM_FN,
5523 but now it will be in the TO_FN. PARM_TO_VAR means enable PARM_DECL to
5524 VAR_DECL translation. */
5525
5526 tree
copy_decl_for_dup_finish(copy_body_data * id,tree decl,tree copy)5527 copy_decl_for_dup_finish (copy_body_data *id, tree decl, tree copy)
5528 {
5529 /* Don't generate debug information for the copy if we wouldn't have
5530 generated it for the copy either. */
5531 DECL_ARTIFICIAL (copy) = DECL_ARTIFICIAL (decl);
5532 DECL_IGNORED_P (copy) = DECL_IGNORED_P (decl);
5533
5534 /* Set the DECL_ABSTRACT_ORIGIN so the debugging routines know what
5535 declaration inspired this copy. */
5536 DECL_ABSTRACT_ORIGIN (copy) = DECL_ORIGIN (decl);
5537
5538 /* The new variable/label has no RTL, yet. */
5539 if (CODE_CONTAINS_STRUCT (TREE_CODE (copy), TS_DECL_WRTL)
5540 && !TREE_STATIC (copy) && !DECL_EXTERNAL (copy))
5541 SET_DECL_RTL (copy, 0);
5542 /* For vector typed decls make sure to update DECL_MODE according
5543 to the new function context. */
5544 if (VECTOR_TYPE_P (TREE_TYPE (copy)))
5545 SET_DECL_MODE (copy, TYPE_MODE (TREE_TYPE (copy)));
5546
5547 /* These args would always appear unused, if not for this. */
5548 TREE_USED (copy) = 1;
5549
5550 /* Set the context for the new declaration. */
5551 if (!DECL_CONTEXT (decl))
5552 /* Globals stay global. */
5553 ;
5554 else if (DECL_CONTEXT (decl) != id->src_fn)
5555 /* Things that weren't in the scope of the function we're inlining
5556 from aren't in the scope we're inlining to, either. */
5557 ;
5558 else if (TREE_STATIC (decl))
5559 /* Function-scoped static variables should stay in the original
5560 function. */
5561 ;
5562 else
5563 {
5564 /* Ordinary automatic local variables are now in the scope of the
5565 new function. */
5566 DECL_CONTEXT (copy) = id->dst_fn;
5567 if (VAR_P (copy) && id->dst_simt_vars && !is_gimple_reg (copy))
5568 {
5569 if (!lookup_attribute ("omp simt private", DECL_ATTRIBUTES (copy)))
5570 DECL_ATTRIBUTES (copy)
5571 = tree_cons (get_identifier ("omp simt private"), NULL,
5572 DECL_ATTRIBUTES (copy));
5573 id->dst_simt_vars->safe_push (copy);
5574 }
5575 }
5576
5577 return copy;
5578 }
5579
5580 static tree
copy_decl_to_var(tree decl,copy_body_data * id)5581 copy_decl_to_var (tree decl, copy_body_data *id)
5582 {
5583 tree copy, type;
5584
5585 gcc_assert (TREE_CODE (decl) == PARM_DECL
5586 || TREE_CODE (decl) == RESULT_DECL);
5587
5588 type = TREE_TYPE (decl);
5589
5590 copy = build_decl (DECL_SOURCE_LOCATION (id->dst_fn),
5591 VAR_DECL, DECL_NAME (decl), type);
5592 if (DECL_PT_UID_SET_P (decl))
5593 SET_DECL_PT_UID (copy, DECL_PT_UID (decl));
5594 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
5595 TREE_READONLY (copy) = TREE_READONLY (decl);
5596 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
5597 DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (decl);
5598
5599 return copy_decl_for_dup_finish (id, decl, copy);
5600 }
5601
5602 /* Like copy_decl_to_var, but create a return slot object instead of a
5603 pointer variable for return by invisible reference. */
5604
5605 static tree
copy_result_decl_to_var(tree decl,copy_body_data * id)5606 copy_result_decl_to_var (tree decl, copy_body_data *id)
5607 {
5608 tree copy, type;
5609
5610 gcc_assert (TREE_CODE (decl) == PARM_DECL
5611 || TREE_CODE (decl) == RESULT_DECL);
5612
5613 type = TREE_TYPE (decl);
5614 if (DECL_BY_REFERENCE (decl))
5615 type = TREE_TYPE (type);
5616
5617 copy = build_decl (DECL_SOURCE_LOCATION (id->dst_fn),
5618 VAR_DECL, DECL_NAME (decl), type);
5619 if (DECL_PT_UID_SET_P (decl))
5620 SET_DECL_PT_UID (copy, DECL_PT_UID (decl));
5621 TREE_READONLY (copy) = TREE_READONLY (decl);
5622 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
5623 if (!DECL_BY_REFERENCE (decl))
5624 {
5625 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
5626 DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (decl);
5627 }
5628
5629 return copy_decl_for_dup_finish (id, decl, copy);
5630 }
5631
5632 tree
copy_decl_no_change(tree decl,copy_body_data * id)5633 copy_decl_no_change (tree decl, copy_body_data *id)
5634 {
5635 tree copy;
5636
5637 copy = copy_node (decl);
5638
5639 /* The COPY is not abstract; it will be generated in DST_FN. */
5640 DECL_ABSTRACT_P (copy) = false;
5641 lang_hooks.dup_lang_specific_decl (copy);
5642
5643 /* TREE_ADDRESSABLE isn't used to indicate that a label's address has
5644 been taken; it's for internal bookkeeping in expand_goto_internal. */
5645 if (TREE_CODE (copy) == LABEL_DECL)
5646 {
5647 TREE_ADDRESSABLE (copy) = 0;
5648 LABEL_DECL_UID (copy) = -1;
5649 }
5650
5651 return copy_decl_for_dup_finish (id, decl, copy);
5652 }
5653
5654 static tree
copy_decl_maybe_to_var(tree decl,copy_body_data * id)5655 copy_decl_maybe_to_var (tree decl, copy_body_data *id)
5656 {
5657 if (TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == RESULT_DECL)
5658 return copy_decl_to_var (decl, id);
5659 else
5660 return copy_decl_no_change (decl, id);
5661 }
5662
5663 /* Return a copy of the function's argument tree. */
5664 static tree
copy_arguments_for_versioning(tree orig_parm,copy_body_data * id,bitmap args_to_skip,tree * vars)5665 copy_arguments_for_versioning (tree orig_parm, copy_body_data * id,
5666 bitmap args_to_skip, tree *vars)
5667 {
5668 tree arg, *parg;
5669 tree new_parm = NULL;
5670 int i = 0;
5671
5672 parg = &new_parm;
5673
5674 for (arg = orig_parm; arg; arg = DECL_CHAIN (arg), i++)
5675 if (!args_to_skip || !bitmap_bit_p (args_to_skip, i))
5676 {
5677 tree new_tree = remap_decl (arg, id);
5678 if (TREE_CODE (new_tree) != PARM_DECL)
5679 new_tree = id->copy_decl (arg, id);
5680 lang_hooks.dup_lang_specific_decl (new_tree);
5681 *parg = new_tree;
5682 parg = &DECL_CHAIN (new_tree);
5683 }
5684 else if (!id->decl_map->get (arg))
5685 {
5686 /* Make an equivalent VAR_DECL. If the argument was used
5687 as temporary variable later in function, the uses will be
5688 replaced by local variable. */
5689 tree var = copy_decl_to_var (arg, id);
5690 insert_decl_map (id, arg, var);
5691 /* Declare this new variable. */
5692 DECL_CHAIN (var) = *vars;
5693 *vars = var;
5694 }
5695 return new_parm;
5696 }
5697
5698 /* Return a copy of the function's static chain. */
5699 static tree
copy_static_chain(tree static_chain,copy_body_data * id)5700 copy_static_chain (tree static_chain, copy_body_data * id)
5701 {
5702 tree *chain_copy, *pvar;
5703
5704 chain_copy = &static_chain;
5705 for (pvar = chain_copy; *pvar; pvar = &DECL_CHAIN (*pvar))
5706 {
5707 tree new_tree = remap_decl (*pvar, id);
5708 lang_hooks.dup_lang_specific_decl (new_tree);
5709 DECL_CHAIN (new_tree) = DECL_CHAIN (*pvar);
5710 *pvar = new_tree;
5711 }
5712 return static_chain;
5713 }
5714
5715 /* Return true if the function is allowed to be versioned.
5716 This is a guard for the versioning functionality. */
5717
5718 bool
tree_versionable_function_p(tree fndecl)5719 tree_versionable_function_p (tree fndecl)
5720 {
5721 return (!lookup_attribute ("noclone", DECL_ATTRIBUTES (fndecl))
5722 && copy_forbidden (DECL_STRUCT_FUNCTION (fndecl)) == NULL);
5723 }
5724
5725 /* Delete all unreachable basic blocks and update callgraph.
5726 Doing so is somewhat nontrivial because we need to update all clones and
5727 remove inline function that become unreachable. */
5728
5729 static bool
delete_unreachable_blocks_update_callgraph(copy_body_data * id)5730 delete_unreachable_blocks_update_callgraph (copy_body_data *id)
5731 {
5732 bool changed = false;
5733 basic_block b, next_bb;
5734
5735 find_unreachable_blocks ();
5736
5737 /* Delete all unreachable basic blocks. */
5738
5739 for (b = ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb; b
5740 != EXIT_BLOCK_PTR_FOR_FN (cfun); b = next_bb)
5741 {
5742 next_bb = b->next_bb;
5743
5744 if (!(b->flags & BB_REACHABLE))
5745 {
5746 gimple_stmt_iterator bsi;
5747
5748 for (bsi = gsi_start_bb (b); !gsi_end_p (bsi); gsi_next (&bsi))
5749 {
5750 struct cgraph_edge *e;
5751 struct cgraph_node *node;
5752
5753 id->dst_node->remove_stmt_references (gsi_stmt (bsi));
5754
5755 if (gimple_code (gsi_stmt (bsi)) == GIMPLE_CALL
5756 &&(e = id->dst_node->get_edge (gsi_stmt (bsi))) != NULL)
5757 {
5758 if (!e->inline_failed)
5759 e->callee->remove_symbol_and_inline_clones (id->dst_node);
5760 else
5761 e->remove ();
5762 }
5763 if (id->transform_call_graph_edges == CB_CGE_MOVE_CLONES
5764 && id->dst_node->clones)
5765 for (node = id->dst_node->clones; node != id->dst_node;)
5766 {
5767 node->remove_stmt_references (gsi_stmt (bsi));
5768 if (gimple_code (gsi_stmt (bsi)) == GIMPLE_CALL
5769 && (e = node->get_edge (gsi_stmt (bsi))) != NULL)
5770 {
5771 if (!e->inline_failed)
5772 e->callee->remove_symbol_and_inline_clones (id->dst_node);
5773 else
5774 e->remove ();
5775 }
5776
5777 if (node->clones)
5778 node = node->clones;
5779 else if (node->next_sibling_clone)
5780 node = node->next_sibling_clone;
5781 else
5782 {
5783 while (node != id->dst_node && !node->next_sibling_clone)
5784 node = node->clone_of;
5785 if (node != id->dst_node)
5786 node = node->next_sibling_clone;
5787 }
5788 }
5789 }
5790 delete_basic_block (b);
5791 changed = true;
5792 }
5793 }
5794
5795 return changed;
5796 }
5797
5798 /* Update clone info after duplication. */
5799
5800 static void
update_clone_info(copy_body_data * id)5801 update_clone_info (copy_body_data * id)
5802 {
5803 struct cgraph_node *node;
5804 if (!id->dst_node->clones)
5805 return;
5806 for (node = id->dst_node->clones; node != id->dst_node;)
5807 {
5808 /* First update replace maps to match the new body. */
5809 if (node->clone.tree_map)
5810 {
5811 unsigned int i;
5812 for (i = 0; i < vec_safe_length (node->clone.tree_map); i++)
5813 {
5814 struct ipa_replace_map *replace_info;
5815 replace_info = (*node->clone.tree_map)[i];
5816 walk_tree (&replace_info->old_tree, copy_tree_body_r, id, NULL);
5817 walk_tree (&replace_info->new_tree, copy_tree_body_r, id, NULL);
5818 }
5819 }
5820 if (node->clones)
5821 node = node->clones;
5822 else if (node->next_sibling_clone)
5823 node = node->next_sibling_clone;
5824 else
5825 {
5826 while (node != id->dst_node && !node->next_sibling_clone)
5827 node = node->clone_of;
5828 if (node != id->dst_node)
5829 node = node->next_sibling_clone;
5830 }
5831 }
5832 }
5833
5834 /* Create a copy of a function's tree.
5835 OLD_DECL and NEW_DECL are FUNCTION_DECL tree nodes
5836 of the original function and the new copied function
5837 respectively. In case we want to replace a DECL
5838 tree with another tree while duplicating the function's
5839 body, TREE_MAP represents the mapping between these
5840 trees. If UPDATE_CLONES is set, the call_stmt fields
5841 of edges of clones of the function will be updated.
5842
5843 If non-NULL ARGS_TO_SKIP determine function parameters to remove
5844 from new version.
5845 If SKIP_RETURN is true, the new version will return void.
5846 If non-NULL BLOCK_TO_COPY determine what basic blocks to copy.
5847 If non_NULL NEW_ENTRY determine new entry BB of the clone.
5848 */
5849 void
tree_function_versioning(tree old_decl,tree new_decl,vec<ipa_replace_map *,va_gc> * tree_map,bool update_clones,bitmap args_to_skip,bool skip_return,bitmap blocks_to_copy,basic_block new_entry)5850 tree_function_versioning (tree old_decl, tree new_decl,
5851 vec<ipa_replace_map *, va_gc> *tree_map,
5852 bool update_clones, bitmap args_to_skip,
5853 bool skip_return, bitmap blocks_to_copy,
5854 basic_block new_entry)
5855 {
5856 struct cgraph_node *old_version_node;
5857 struct cgraph_node *new_version_node;
5858 copy_body_data id;
5859 tree p;
5860 unsigned i;
5861 struct ipa_replace_map *replace_info;
5862 basic_block old_entry_block, bb;
5863 auto_vec<gimple *, 10> init_stmts;
5864 tree vars = NULL_TREE;
5865 bitmap debug_args_to_skip = args_to_skip;
5866
5867 gcc_assert (TREE_CODE (old_decl) == FUNCTION_DECL
5868 && TREE_CODE (new_decl) == FUNCTION_DECL);
5869 DECL_POSSIBLY_INLINED (old_decl) = 1;
5870
5871 old_version_node = cgraph_node::get (old_decl);
5872 gcc_checking_assert (old_version_node);
5873 new_version_node = cgraph_node::get (new_decl);
5874 gcc_checking_assert (new_version_node);
5875
5876 /* Copy over debug args. */
5877 if (DECL_HAS_DEBUG_ARGS_P (old_decl))
5878 {
5879 vec<tree, va_gc> **new_debug_args, **old_debug_args;
5880 gcc_checking_assert (decl_debug_args_lookup (new_decl) == NULL);
5881 DECL_HAS_DEBUG_ARGS_P (new_decl) = 0;
5882 old_debug_args = decl_debug_args_lookup (old_decl);
5883 if (old_debug_args)
5884 {
5885 new_debug_args = decl_debug_args_insert (new_decl);
5886 *new_debug_args = vec_safe_copy (*old_debug_args);
5887 }
5888 }
5889
5890 /* Output the inlining info for this abstract function, since it has been
5891 inlined. If we don't do this now, we can lose the information about the
5892 variables in the function when the blocks get blown away as soon as we
5893 remove the cgraph node. */
5894 (*debug_hooks->outlining_inline_function) (old_decl);
5895
5896 DECL_ARTIFICIAL (new_decl) = 1;
5897 DECL_ABSTRACT_ORIGIN (new_decl) = DECL_ORIGIN (old_decl);
5898 if (DECL_ORIGIN (old_decl) == old_decl)
5899 old_version_node->used_as_abstract_origin = true;
5900 DECL_FUNCTION_PERSONALITY (new_decl) = DECL_FUNCTION_PERSONALITY (old_decl);
5901
5902 /* Prepare the data structures for the tree copy. */
5903 memset (&id, 0, sizeof (id));
5904
5905 /* Generate a new name for the new version. */
5906 id.statements_to_fold = new hash_set<gimple *>;
5907
5908 id.decl_map = new hash_map<tree, tree>;
5909 id.debug_map = NULL;
5910 id.src_fn = old_decl;
5911 id.dst_fn = new_decl;
5912 id.src_node = old_version_node;
5913 id.dst_node = new_version_node;
5914 id.src_cfun = DECL_STRUCT_FUNCTION (old_decl);
5915 id.blocks_to_copy = blocks_to_copy;
5916
5917 id.copy_decl = copy_decl_no_change;
5918 id.transform_call_graph_edges
5919 = update_clones ? CB_CGE_MOVE_CLONES : CB_CGE_MOVE;
5920 id.transform_new_cfg = true;
5921 id.transform_return_to_modify = false;
5922 id.transform_parameter = false;
5923 id.transform_lang_insert_block = NULL;
5924
5925 old_entry_block = ENTRY_BLOCK_PTR_FOR_FN
5926 (DECL_STRUCT_FUNCTION (old_decl));
5927 DECL_RESULT (new_decl) = DECL_RESULT (old_decl);
5928 DECL_ARGUMENTS (new_decl) = DECL_ARGUMENTS (old_decl);
5929 initialize_cfun (new_decl, old_decl,
5930 new_entry ? new_entry->count : old_entry_block->count);
5931 if (DECL_STRUCT_FUNCTION (new_decl)->gimple_df)
5932 DECL_STRUCT_FUNCTION (new_decl)->gimple_df->ipa_pta
5933 = id.src_cfun->gimple_df->ipa_pta;
5934
5935 /* Copy the function's static chain. */
5936 p = DECL_STRUCT_FUNCTION (old_decl)->static_chain_decl;
5937 if (p)
5938 DECL_STRUCT_FUNCTION (new_decl)->static_chain_decl
5939 = copy_static_chain (p, &id);
5940
5941 /* If there's a tree_map, prepare for substitution. */
5942 if (tree_map)
5943 for (i = 0; i < tree_map->length (); i++)
5944 {
5945 gimple *init;
5946 replace_info = (*tree_map)[i];
5947 if (replace_info->replace_p)
5948 {
5949 int parm_num = -1;
5950 if (!replace_info->old_tree)
5951 {
5952 int p = replace_info->parm_num;
5953 tree parm;
5954 tree req_type, new_type;
5955
5956 for (parm = DECL_ARGUMENTS (old_decl); p;
5957 parm = DECL_CHAIN (parm))
5958 p--;
5959 replace_info->old_tree = parm;
5960 parm_num = replace_info->parm_num;
5961 req_type = TREE_TYPE (parm);
5962 new_type = TREE_TYPE (replace_info->new_tree);
5963 if (!useless_type_conversion_p (req_type, new_type))
5964 {
5965 if (fold_convertible_p (req_type, replace_info->new_tree))
5966 replace_info->new_tree
5967 = fold_build1 (NOP_EXPR, req_type,
5968 replace_info->new_tree);
5969 else if (TYPE_SIZE (req_type) == TYPE_SIZE (new_type))
5970 replace_info->new_tree
5971 = fold_build1 (VIEW_CONVERT_EXPR, req_type,
5972 replace_info->new_tree);
5973 else
5974 {
5975 if (dump_file)
5976 {
5977 fprintf (dump_file, " const ");
5978 print_generic_expr (dump_file,
5979 replace_info->new_tree);
5980 fprintf (dump_file,
5981 " can't be converted to param ");
5982 print_generic_expr (dump_file, parm);
5983 fprintf (dump_file, "\n");
5984 }
5985 replace_info->old_tree = NULL;
5986 }
5987 }
5988 }
5989 else
5990 gcc_assert (TREE_CODE (replace_info->old_tree) == PARM_DECL);
5991 if (replace_info->old_tree)
5992 {
5993 init = setup_one_parameter (&id, replace_info->old_tree,
5994 replace_info->new_tree, id.src_fn,
5995 NULL,
5996 &vars);
5997 if (init)
5998 init_stmts.safe_push (init);
5999 if (MAY_HAVE_DEBUG_BIND_STMTS && args_to_skip)
6000 {
6001 if (parm_num == -1)
6002 {
6003 tree parm;
6004 int p;
6005 for (parm = DECL_ARGUMENTS (old_decl), p = 0; parm;
6006 parm = DECL_CHAIN (parm), p++)
6007 if (parm == replace_info->old_tree)
6008 {
6009 parm_num = p;
6010 break;
6011 }
6012 }
6013 if (parm_num != -1)
6014 {
6015 if (debug_args_to_skip == args_to_skip)
6016 {
6017 debug_args_to_skip = BITMAP_ALLOC (NULL);
6018 bitmap_copy (debug_args_to_skip, args_to_skip);
6019 }
6020 bitmap_clear_bit (debug_args_to_skip, parm_num);
6021 }
6022 }
6023 }
6024 }
6025 }
6026 /* Copy the function's arguments. */
6027 if (DECL_ARGUMENTS (old_decl) != NULL_TREE)
6028 DECL_ARGUMENTS (new_decl)
6029 = copy_arguments_for_versioning (DECL_ARGUMENTS (old_decl), &id,
6030 args_to_skip, &vars);
6031
6032 DECL_INITIAL (new_decl) = remap_blocks (DECL_INITIAL (id.src_fn), &id);
6033 BLOCK_SUPERCONTEXT (DECL_INITIAL (new_decl)) = new_decl;
6034
6035 declare_inline_vars (DECL_INITIAL (new_decl), vars);
6036
6037 if (!vec_safe_is_empty (DECL_STRUCT_FUNCTION (old_decl)->local_decls))
6038 /* Add local vars. */
6039 add_local_variables (DECL_STRUCT_FUNCTION (old_decl), cfun, &id);
6040
6041 if (DECL_RESULT (old_decl) == NULL_TREE)
6042 ;
6043 else if (skip_return && !VOID_TYPE_P (TREE_TYPE (DECL_RESULT (old_decl))))
6044 {
6045 DECL_RESULT (new_decl)
6046 = build_decl (DECL_SOURCE_LOCATION (DECL_RESULT (old_decl)),
6047 RESULT_DECL, NULL_TREE, void_type_node);
6048 DECL_CONTEXT (DECL_RESULT (new_decl)) = new_decl;
6049 cfun->returns_struct = 0;
6050 cfun->returns_pcc_struct = 0;
6051 }
6052 else
6053 {
6054 tree old_name;
6055 DECL_RESULT (new_decl) = remap_decl (DECL_RESULT (old_decl), &id);
6056 lang_hooks.dup_lang_specific_decl (DECL_RESULT (new_decl));
6057 if (gimple_in_ssa_p (id.src_cfun)
6058 && DECL_BY_REFERENCE (DECL_RESULT (old_decl))
6059 && (old_name = ssa_default_def (id.src_cfun, DECL_RESULT (old_decl))))
6060 {
6061 tree new_name = make_ssa_name (DECL_RESULT (new_decl));
6062 insert_decl_map (&id, old_name, new_name);
6063 SSA_NAME_DEF_STMT (new_name) = gimple_build_nop ();
6064 set_ssa_default_def (cfun, DECL_RESULT (new_decl), new_name);
6065 }
6066 }
6067
6068 /* Set up the destination functions loop tree. */
6069 if (loops_for_fn (DECL_STRUCT_FUNCTION (old_decl)) != NULL)
6070 {
6071 cfun->curr_properties &= ~PROP_loops;
6072 loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
6073 cfun->curr_properties |= PROP_loops;
6074 }
6075
6076 /* Copy the Function's body. */
6077 copy_body (&id, ENTRY_BLOCK_PTR_FOR_FN (cfun), EXIT_BLOCK_PTR_FOR_FN (cfun),
6078 new_entry);
6079
6080 /* Renumber the lexical scoping (non-code) blocks consecutively. */
6081 number_blocks (new_decl);
6082
6083 /* We want to create the BB unconditionally, so that the addition of
6084 debug stmts doesn't affect BB count, which may in the end cause
6085 codegen differences. */
6086 bb = split_edge (single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
6087 while (init_stmts.length ())
6088 insert_init_stmt (&id, bb, init_stmts.pop ());
6089 update_clone_info (&id);
6090
6091 /* Remap the nonlocal_goto_save_area, if any. */
6092 if (cfun->nonlocal_goto_save_area)
6093 {
6094 struct walk_stmt_info wi;
6095
6096 memset (&wi, 0, sizeof (wi));
6097 wi.info = &id;
6098 walk_tree (&cfun->nonlocal_goto_save_area, remap_gimple_op_r, &wi, NULL);
6099 }
6100
6101 /* Clean up. */
6102 delete id.decl_map;
6103 if (id.debug_map)
6104 delete id.debug_map;
6105 free_dominance_info (CDI_DOMINATORS);
6106 free_dominance_info (CDI_POST_DOMINATORS);
6107
6108 update_max_bb_count ();
6109 fold_marked_statements (0, id.statements_to_fold);
6110 delete id.statements_to_fold;
6111 delete_unreachable_blocks_update_callgraph (&id);
6112 if (id.dst_node->definition)
6113 cgraph_edge::rebuild_references ();
6114 if (loops_state_satisfies_p (LOOPS_NEED_FIXUP))
6115 {
6116 calculate_dominance_info (CDI_DOMINATORS);
6117 fix_loop_structure (NULL);
6118 }
6119 update_ssa (TODO_update_ssa);
6120
6121 /* After partial cloning we need to rescale frequencies, so they are
6122 within proper range in the cloned function. */
6123 if (new_entry)
6124 {
6125 struct cgraph_edge *e;
6126 rebuild_frequencies ();
6127
6128 new_version_node->count = ENTRY_BLOCK_PTR_FOR_FN (cfun)->count;
6129 for (e = new_version_node->callees; e; e = e->next_callee)
6130 {
6131 basic_block bb = gimple_bb (e->call_stmt);
6132 e->count = bb->count;
6133 }
6134 for (e = new_version_node->indirect_calls; e; e = e->next_callee)
6135 {
6136 basic_block bb = gimple_bb (e->call_stmt);
6137 e->count = bb->count;
6138 }
6139 }
6140
6141 if (debug_args_to_skip && MAY_HAVE_DEBUG_BIND_STMTS)
6142 {
6143 tree parm;
6144 vec<tree, va_gc> **debug_args = NULL;
6145 unsigned int len = 0;
6146 for (parm = DECL_ARGUMENTS (old_decl), i = 0;
6147 parm; parm = DECL_CHAIN (parm), i++)
6148 if (bitmap_bit_p (debug_args_to_skip, i) && is_gimple_reg (parm))
6149 {
6150 tree ddecl;
6151
6152 if (debug_args == NULL)
6153 {
6154 debug_args = decl_debug_args_insert (new_decl);
6155 len = vec_safe_length (*debug_args);
6156 }
6157 ddecl = make_node (DEBUG_EXPR_DECL);
6158 DECL_ARTIFICIAL (ddecl) = 1;
6159 TREE_TYPE (ddecl) = TREE_TYPE (parm);
6160 SET_DECL_MODE (ddecl, DECL_MODE (parm));
6161 vec_safe_push (*debug_args, DECL_ORIGIN (parm));
6162 vec_safe_push (*debug_args, ddecl);
6163 }
6164 if (debug_args != NULL)
6165 {
6166 /* On the callee side, add
6167 DEBUG D#Y s=> parm
6168 DEBUG var => D#Y
6169 stmts to the first bb where var is a VAR_DECL created for the
6170 optimized away parameter in DECL_INITIAL block. This hints
6171 in the debug info that var (whole DECL_ORIGIN is the parm
6172 PARM_DECL) is optimized away, but could be looked up at the
6173 call site as value of D#X there. */
6174 tree var = vars, vexpr;
6175 gimple_stmt_iterator cgsi
6176 = gsi_after_labels (single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
6177 gimple *def_temp;
6178 var = vars;
6179 i = vec_safe_length (*debug_args);
6180 do
6181 {
6182 i -= 2;
6183 while (var != NULL_TREE
6184 && DECL_ABSTRACT_ORIGIN (var) != (**debug_args)[i])
6185 var = TREE_CHAIN (var);
6186 if (var == NULL_TREE)
6187 break;
6188 vexpr = make_node (DEBUG_EXPR_DECL);
6189 parm = (**debug_args)[i];
6190 DECL_ARTIFICIAL (vexpr) = 1;
6191 TREE_TYPE (vexpr) = TREE_TYPE (parm);
6192 SET_DECL_MODE (vexpr, DECL_MODE (parm));
6193 def_temp = gimple_build_debug_bind (var, vexpr, NULL);
6194 gsi_insert_before (&cgsi, def_temp, GSI_NEW_STMT);
6195 def_temp = gimple_build_debug_source_bind (vexpr, parm, NULL);
6196 gsi_insert_before (&cgsi, def_temp, GSI_NEW_STMT);
6197 }
6198 while (i > len);
6199 }
6200 }
6201
6202 if (debug_args_to_skip && debug_args_to_skip != args_to_skip)
6203 BITMAP_FREE (debug_args_to_skip);
6204 free_dominance_info (CDI_DOMINATORS);
6205 free_dominance_info (CDI_POST_DOMINATORS);
6206
6207 gcc_assert (!id.debug_stmts.exists ());
6208 pop_cfun ();
6209 return;
6210 }
6211
6212 /* EXP is CALL_EXPR present in a GENERIC expression tree. Try to integrate
6213 the callee and return the inlined body on success. */
6214
6215 tree
maybe_inline_call_in_expr(tree exp)6216 maybe_inline_call_in_expr (tree exp)
6217 {
6218 tree fn = get_callee_fndecl (exp);
6219
6220 /* We can only try to inline "const" functions. */
6221 if (fn && TREE_READONLY (fn) && DECL_SAVED_TREE (fn))
6222 {
6223 call_expr_arg_iterator iter;
6224 copy_body_data id;
6225 tree param, arg, t;
6226 hash_map<tree, tree> decl_map;
6227
6228 /* Remap the parameters. */
6229 for (param = DECL_ARGUMENTS (fn), arg = first_call_expr_arg (exp, &iter);
6230 param;
6231 param = DECL_CHAIN (param), arg = next_call_expr_arg (&iter))
6232 decl_map.put (param, arg);
6233
6234 memset (&id, 0, sizeof (id));
6235 id.src_fn = fn;
6236 id.dst_fn = current_function_decl;
6237 id.src_cfun = DECL_STRUCT_FUNCTION (fn);
6238 id.decl_map = &decl_map;
6239
6240 id.copy_decl = copy_decl_no_change;
6241 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
6242 id.transform_new_cfg = false;
6243 id.transform_return_to_modify = true;
6244 id.transform_parameter = true;
6245 id.transform_lang_insert_block = NULL;
6246
6247 /* Make sure not to unshare trees behind the front-end's back
6248 since front-end specific mechanisms may rely on sharing. */
6249 id.regimplify = false;
6250 id.do_not_unshare = true;
6251
6252 /* We're not inside any EH region. */
6253 id.eh_lp_nr = 0;
6254
6255 t = copy_tree_body (&id);
6256
6257 /* We can only return something suitable for use in a GENERIC
6258 expression tree. */
6259 if (TREE_CODE (t) == MODIFY_EXPR)
6260 return TREE_OPERAND (t, 1);
6261 }
6262
6263 return NULL_TREE;
6264 }
6265
6266 /* Duplicate a type, fields and all. */
6267
6268 tree
build_duplicate_type(tree type)6269 build_duplicate_type (tree type)
6270 {
6271 struct copy_body_data id;
6272
6273 memset (&id, 0, sizeof (id));
6274 id.src_fn = current_function_decl;
6275 id.dst_fn = current_function_decl;
6276 id.src_cfun = cfun;
6277 id.decl_map = new hash_map<tree, tree>;
6278 id.debug_map = NULL;
6279 id.copy_decl = copy_decl_no_change;
6280
6281 type = remap_type_1 (type, &id);
6282
6283 delete id.decl_map;
6284 if (id.debug_map)
6285 delete id.debug_map;
6286
6287 TYPE_CANONICAL (type) = type;
6288
6289 return type;
6290 }
6291
6292 /* Unshare the entire DECL_SAVED_TREE of FN and return the remapped
6293 parameters and RESULT_DECL in PARMS and RESULT. Used by C++ constexpr
6294 evaluation. */
6295
6296 tree
copy_fn(tree fn,tree & parms,tree & result)6297 copy_fn (tree fn, tree& parms, tree& result)
6298 {
6299 copy_body_data id;
6300 tree param;
6301 hash_map<tree, tree> decl_map;
6302
6303 tree *p = &parms;
6304 *p = NULL_TREE;
6305
6306 memset (&id, 0, sizeof (id));
6307 id.src_fn = fn;
6308 id.dst_fn = current_function_decl;
6309 id.src_cfun = DECL_STRUCT_FUNCTION (fn);
6310 id.decl_map = &decl_map;
6311
6312 id.copy_decl = copy_decl_no_change;
6313 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
6314 id.transform_new_cfg = false;
6315 id.transform_return_to_modify = false;
6316 id.transform_parameter = true;
6317 id.transform_lang_insert_block = NULL;
6318
6319 /* Make sure not to unshare trees behind the front-end's back
6320 since front-end specific mechanisms may rely on sharing. */
6321 id.regimplify = false;
6322 id.do_not_unshare = true;
6323
6324 /* We're not inside any EH region. */
6325 id.eh_lp_nr = 0;
6326
6327 /* Remap the parameters and result and return them to the caller. */
6328 for (param = DECL_ARGUMENTS (fn);
6329 param;
6330 param = DECL_CHAIN (param))
6331 {
6332 *p = remap_decl (param, &id);
6333 p = &DECL_CHAIN (*p);
6334 }
6335
6336 if (DECL_RESULT (fn))
6337 result = remap_decl (DECL_RESULT (fn), &id);
6338 else
6339 result = NULL_TREE;
6340
6341 return copy_tree_body (&id);
6342 }
6343