1 /* Tree inlining.
2 Copyright (C) 2001-2020 Free Software Foundation, Inc.
3 Contributed by Alexandre Oliva <aoliva@redhat.com>
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "target.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "gimple.h"
29 #include "cfghooks.h"
30 #include "tree-pass.h"
31 #include "ssa.h"
32 #include "cgraph.h"
33 #include "tree-pretty-print.h"
34 #include "diagnostic-core.h"
35 #include "gimple-predict.h"
36 #include "fold-const.h"
37 #include "stor-layout.h"
38 #include "calls.h"
39 #include "tree-inline.h"
40 #include "langhooks.h"
41 #include "cfganal.h"
42 #include "tree-iterator.h"
43 #include "intl.h"
44 #include "gimple-fold.h"
45 #include "tree-eh.h"
46 #include "gimplify.h"
47 #include "gimple-iterator.h"
48 #include "gimplify-me.h"
49 #include "gimple-walk.h"
50 #include "tree-cfg.h"
51 #include "tree-into-ssa.h"
52 #include "tree-dfa.h"
53 #include "tree-ssa.h"
54 #include "except.h"
55 #include "debug.h"
56 #include "value-prof.h"
57 #include "cfgloop.h"
58 #include "builtins.h"
59 #include "stringpool.h"
60 #include "attribs.h"
61 #include "sreal.h"
62 #include "tree-cfgcleanup.h"
63 #include "tree-ssa-live.h"
64
65 /* I'm not real happy about this, but we need to handle gimple and
66 non-gimple trees. */
67
68 /* Inlining, Cloning, Versioning, Parallelization
69
70 Inlining: a function body is duplicated, but the PARM_DECLs are
71 remapped into VAR_DECLs, and non-void RETURN_EXPRs become
72 MODIFY_EXPRs that store to a dedicated returned-value variable.
73 The duplicated eh_region info of the copy will later be appended
74 to the info for the caller; the eh_region info in copied throwing
75 statements and RESX statements are adjusted accordingly.
76
77 Cloning: (only in C++) We have one body for a con/de/structor, and
78 multiple function decls, each with a unique parameter list.
79 Duplicate the body, using the given splay tree; some parameters
80 will become constants (like 0 or 1).
81
82 Versioning: a function body is duplicated and the result is a new
83 function rather than into blocks of an existing function as with
84 inlining. Some parameters will become constants.
85
86 Parallelization: a region of a function is duplicated resulting in
87 a new function. Variables may be replaced with complex expressions
88 to enable shared variable semantics.
89
90 All of these will simultaneously lookup any callgraph edges. If
91 we're going to inline the duplicated function body, and the given
92 function has some cloned callgraph nodes (one for each place this
93 function will be inlined) those callgraph edges will be duplicated.
94 If we're cloning the body, those callgraph edges will be
95 updated to point into the new body. (Note that the original
96 callgraph node and edge list will not be altered.)
97
98 See the CALL_EXPR handling case in copy_tree_body_r (). */
99
100 /* To Do:
101
102 o In order to make inlining-on-trees work, we pessimized
103 function-local static constants. In particular, they are now
104 always output, even when not addressed. Fix this by treating
105 function-local static constants just like global static
106 constants; the back-end already knows not to output them if they
107 are not needed.
108
109 o Provide heuristics to clamp inlining of recursive template
110 calls? */
111
112
113 /* Weights that estimate_num_insns uses to estimate the size of the
114 produced code. */
115
116 eni_weights eni_size_weights;
117
118 /* Weights that estimate_num_insns uses to estimate the time necessary
119 to execute the produced code. */
120
121 eni_weights eni_time_weights;
122
123 /* Prototypes. */
124
125 static tree declare_return_variable (copy_body_data *, tree, tree,
126 basic_block);
127 static void remap_block (tree *, copy_body_data *);
128 static void copy_bind_expr (tree *, int *, copy_body_data *);
129 static void declare_inline_vars (tree, tree);
130 static void remap_save_expr (tree *, hash_map<tree, tree> *, int *);
131 static void prepend_lexical_block (tree current_block, tree new_block);
132 static tree copy_result_decl_to_var (tree, copy_body_data *);
133 static tree copy_decl_maybe_to_var (tree, copy_body_data *);
134 static gimple_seq remap_gimple_stmt (gimple *, copy_body_data *);
135 static void insert_init_stmt (copy_body_data *, basic_block, gimple *);
136
137 /* Insert a tree->tree mapping for ID. Despite the name suggests
138 that the trees should be variables, it is used for more than that. */
139
140 void
insert_decl_map(copy_body_data * id,tree key,tree value)141 insert_decl_map (copy_body_data *id, tree key, tree value)
142 {
143 id->decl_map->put (key, value);
144
145 /* Always insert an identity map as well. If we see this same new
146 node again, we won't want to duplicate it a second time. */
147 if (key != value)
148 id->decl_map->put (value, value);
149 }
150
151 /* Insert a tree->tree mapping for ID. This is only used for
152 variables. */
153
154 static void
insert_debug_decl_map(copy_body_data * id,tree key,tree value)155 insert_debug_decl_map (copy_body_data *id, tree key, tree value)
156 {
157 if (!gimple_in_ssa_p (id->src_cfun))
158 return;
159
160 if (!opt_for_fn (id->dst_fn, flag_var_tracking_assignments))
161 return;
162
163 if (!target_for_debug_bind (key))
164 return;
165
166 gcc_assert (TREE_CODE (key) == PARM_DECL);
167 gcc_assert (VAR_P (value));
168
169 if (!id->debug_map)
170 id->debug_map = new hash_map<tree, tree>;
171
172 id->debug_map->put (key, value);
173 }
174
175 /* If nonzero, we're remapping the contents of inlined debug
176 statements. If negative, an error has occurred, such as a
177 reference to a variable that isn't available in the inlined
178 context. */
179 static int processing_debug_stmt = 0;
180
181 /* Construct new SSA name for old NAME. ID is the inline context. */
182
183 static tree
remap_ssa_name(tree name,copy_body_data * id)184 remap_ssa_name (tree name, copy_body_data *id)
185 {
186 tree new_tree, var;
187 tree *n;
188
189 gcc_assert (TREE_CODE (name) == SSA_NAME);
190
191 n = id->decl_map->get (name);
192 if (n)
193 {
194 /* WHen we perform edge redirection as part of CFG copy, IPA-SRA can
195 remove an unused LHS from a call statement. Such LHS can however
196 still appear in debug statements, but their value is lost in this
197 function and we do not want to map them. */
198 if (id->killed_new_ssa_names
199 && id->killed_new_ssa_names->contains (*n))
200 {
201 gcc_assert (processing_debug_stmt);
202 processing_debug_stmt = -1;
203 return name;
204 }
205
206 return unshare_expr (*n);
207 }
208
209 if (processing_debug_stmt)
210 {
211 if (SSA_NAME_IS_DEFAULT_DEF (name)
212 && TREE_CODE (SSA_NAME_VAR (name)) == PARM_DECL
213 && id->entry_bb == NULL
214 && single_succ_p (ENTRY_BLOCK_PTR_FOR_FN (cfun)))
215 {
216 tree vexpr = make_node (DEBUG_EXPR_DECL);
217 gimple *def_temp;
218 gimple_stmt_iterator gsi;
219 tree val = SSA_NAME_VAR (name);
220
221 n = id->decl_map->get (val);
222 if (n != NULL)
223 val = *n;
224 if (TREE_CODE (val) != PARM_DECL
225 && !(VAR_P (val) && DECL_ABSTRACT_ORIGIN (val)))
226 {
227 processing_debug_stmt = -1;
228 return name;
229 }
230 n = id->decl_map->get (val);
231 if (n && TREE_CODE (*n) == DEBUG_EXPR_DECL)
232 return *n;
233 def_temp = gimple_build_debug_source_bind (vexpr, val, NULL);
234 DECL_ARTIFICIAL (vexpr) = 1;
235 TREE_TYPE (vexpr) = TREE_TYPE (name);
236 SET_DECL_MODE (vexpr, DECL_MODE (SSA_NAME_VAR (name)));
237 gsi = gsi_after_labels (single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
238 gsi_insert_before (&gsi, def_temp, GSI_SAME_STMT);
239 insert_decl_map (id, val, vexpr);
240 return vexpr;
241 }
242
243 processing_debug_stmt = -1;
244 return name;
245 }
246
247 /* Remap anonymous SSA names or SSA names of anonymous decls. */
248 var = SSA_NAME_VAR (name);
249 if (!var
250 || (!SSA_NAME_IS_DEFAULT_DEF (name)
251 && VAR_P (var)
252 && !VAR_DECL_IS_VIRTUAL_OPERAND (var)
253 && DECL_ARTIFICIAL (var)
254 && DECL_IGNORED_P (var)
255 && !DECL_NAME (var)))
256 {
257 struct ptr_info_def *pi;
258 new_tree = make_ssa_name (remap_type (TREE_TYPE (name), id));
259 if (!var && SSA_NAME_IDENTIFIER (name))
260 SET_SSA_NAME_VAR_OR_IDENTIFIER (new_tree, SSA_NAME_IDENTIFIER (name));
261 insert_decl_map (id, name, new_tree);
262 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (new_tree)
263 = SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name);
264 /* At least IPA points-to info can be directly transferred. */
265 if (id->src_cfun->gimple_df
266 && id->src_cfun->gimple_df->ipa_pta
267 && POINTER_TYPE_P (TREE_TYPE (name))
268 && (pi = SSA_NAME_PTR_INFO (name))
269 && !pi->pt.anything)
270 {
271 struct ptr_info_def *new_pi = get_ptr_info (new_tree);
272 new_pi->pt = pi->pt;
273 }
274 /* So can range-info. */
275 if (!POINTER_TYPE_P (TREE_TYPE (name))
276 && SSA_NAME_RANGE_INFO (name))
277 duplicate_ssa_name_range_info (new_tree, SSA_NAME_RANGE_TYPE (name),
278 SSA_NAME_RANGE_INFO (name));
279 return new_tree;
280 }
281
282 /* Do not set DEF_STMT yet as statement is not copied yet. We do that
283 in copy_bb. */
284 new_tree = remap_decl (var, id);
285
286 /* We might've substituted constant or another SSA_NAME for
287 the variable.
288
289 Replace the SSA name representing RESULT_DECL by variable during
290 inlining: this saves us from need to introduce PHI node in a case
291 return value is just partly initialized. */
292 if ((VAR_P (new_tree) || TREE_CODE (new_tree) == PARM_DECL)
293 && (!SSA_NAME_VAR (name)
294 || TREE_CODE (SSA_NAME_VAR (name)) != RESULT_DECL
295 || !id->transform_return_to_modify))
296 {
297 struct ptr_info_def *pi;
298 new_tree = make_ssa_name (new_tree);
299 insert_decl_map (id, name, new_tree);
300 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (new_tree)
301 = SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name);
302 /* At least IPA points-to info can be directly transferred. */
303 if (id->src_cfun->gimple_df
304 && id->src_cfun->gimple_df->ipa_pta
305 && POINTER_TYPE_P (TREE_TYPE (name))
306 && (pi = SSA_NAME_PTR_INFO (name))
307 && !pi->pt.anything)
308 {
309 struct ptr_info_def *new_pi = get_ptr_info (new_tree);
310 new_pi->pt = pi->pt;
311 }
312 /* So can range-info. */
313 if (!POINTER_TYPE_P (TREE_TYPE (name))
314 && SSA_NAME_RANGE_INFO (name))
315 duplicate_ssa_name_range_info (new_tree, SSA_NAME_RANGE_TYPE (name),
316 SSA_NAME_RANGE_INFO (name));
317 if (SSA_NAME_IS_DEFAULT_DEF (name))
318 {
319 /* By inlining function having uninitialized variable, we might
320 extend the lifetime (variable might get reused). This cause
321 ICE in the case we end up extending lifetime of SSA name across
322 abnormal edge, but also increase register pressure.
323
324 We simply initialize all uninitialized vars by 0 except
325 for case we are inlining to very first BB. We can avoid
326 this for all BBs that are not inside strongly connected
327 regions of the CFG, but this is expensive to test. */
328 if (id->entry_bb
329 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name)
330 && (!SSA_NAME_VAR (name)
331 || TREE_CODE (SSA_NAME_VAR (name)) != PARM_DECL)
332 && (id->entry_bb != EDGE_SUCC (ENTRY_BLOCK_PTR_FOR_FN (cfun),
333 0)->dest
334 || EDGE_COUNT (id->entry_bb->preds) != 1))
335 {
336 gimple_stmt_iterator gsi = gsi_last_bb (id->entry_bb);
337 gimple *init_stmt;
338 tree zero = build_zero_cst (TREE_TYPE (new_tree));
339
340 init_stmt = gimple_build_assign (new_tree, zero);
341 gsi_insert_after (&gsi, init_stmt, GSI_NEW_STMT);
342 SSA_NAME_IS_DEFAULT_DEF (new_tree) = 0;
343 }
344 else
345 {
346 SSA_NAME_DEF_STMT (new_tree) = gimple_build_nop ();
347 set_ssa_default_def (cfun, SSA_NAME_VAR (new_tree), new_tree);
348 }
349 }
350 }
351 else
352 insert_decl_map (id, name, new_tree);
353 return new_tree;
354 }
355
356 /* Remap DECL during the copying of the BLOCK tree for the function. */
357
358 tree
remap_decl(tree decl,copy_body_data * id)359 remap_decl (tree decl, copy_body_data *id)
360 {
361 tree *n;
362
363 /* We only remap local variables in the current function. */
364
365 /* See if we have remapped this declaration. */
366
367 n = id->decl_map->get (decl);
368
369 if (!n && processing_debug_stmt)
370 {
371 processing_debug_stmt = -1;
372 return decl;
373 }
374
375 /* When remapping a type within copy_gimple_seq_and_replace_locals, all
376 necessary DECLs have already been remapped and we do not want to duplicate
377 a decl coming from outside of the sequence we are copying. */
378 if (!n
379 && id->prevent_decl_creation_for_types
380 && id->remapping_type_depth > 0
381 && (VAR_P (decl) || TREE_CODE (decl) == PARM_DECL))
382 return decl;
383
384 /* If we didn't already have an equivalent for this declaration, create one
385 now. */
386 if (!n)
387 {
388 /* Make a copy of the variable or label. */
389 tree t = id->copy_decl (decl, id);
390
391 /* Remember it, so that if we encounter this local entity again
392 we can reuse this copy. Do this early because remap_type may
393 need this decl for TYPE_STUB_DECL. */
394 insert_decl_map (id, decl, t);
395
396 if (!DECL_P (t))
397 return t;
398
399 /* Remap types, if necessary. */
400 TREE_TYPE (t) = remap_type (TREE_TYPE (t), id);
401 if (TREE_CODE (t) == TYPE_DECL)
402 {
403 DECL_ORIGINAL_TYPE (t) = remap_type (DECL_ORIGINAL_TYPE (t), id);
404
405 /* Preserve the invariant that DECL_ORIGINAL_TYPE != TREE_TYPE,
406 which is enforced in gen_typedef_die when DECL_ABSTRACT_ORIGIN
407 is not set on the TYPE_DECL, for example in LTO mode. */
408 if (DECL_ORIGINAL_TYPE (t) == TREE_TYPE (t))
409 {
410 tree x = build_variant_type_copy (TREE_TYPE (t));
411 TYPE_STUB_DECL (x) = TYPE_STUB_DECL (TREE_TYPE (t));
412 TYPE_NAME (x) = TYPE_NAME (TREE_TYPE (t));
413 DECL_ORIGINAL_TYPE (t) = x;
414 }
415 }
416
417 /* Remap sizes as necessary. */
418 walk_tree (&DECL_SIZE (t), copy_tree_body_r, id, NULL);
419 walk_tree (&DECL_SIZE_UNIT (t), copy_tree_body_r, id, NULL);
420
421 /* If fields, do likewise for offset and qualifier. */
422 if (TREE_CODE (t) == FIELD_DECL)
423 {
424 walk_tree (&DECL_FIELD_OFFSET (t), copy_tree_body_r, id, NULL);
425 if (TREE_CODE (DECL_CONTEXT (t)) == QUAL_UNION_TYPE)
426 walk_tree (&DECL_QUALIFIER (t), copy_tree_body_r, id, NULL);
427 }
428
429 return t;
430 }
431
432 if (id->do_not_unshare)
433 return *n;
434 else
435 return unshare_expr (*n);
436 }
437
438 static tree
remap_type_1(tree type,copy_body_data * id)439 remap_type_1 (tree type, copy_body_data *id)
440 {
441 tree new_tree, t;
442
443 /* We do need a copy. build and register it now. If this is a pointer or
444 reference type, remap the designated type and make a new pointer or
445 reference type. */
446 if (TREE_CODE (type) == POINTER_TYPE)
447 {
448 new_tree = build_pointer_type_for_mode (remap_type (TREE_TYPE (type), id),
449 TYPE_MODE (type),
450 TYPE_REF_CAN_ALIAS_ALL (type));
451 if (TYPE_ATTRIBUTES (type) || TYPE_QUALS (type))
452 new_tree = build_type_attribute_qual_variant (new_tree,
453 TYPE_ATTRIBUTES (type),
454 TYPE_QUALS (type));
455 insert_decl_map (id, type, new_tree);
456 return new_tree;
457 }
458 else if (TREE_CODE (type) == REFERENCE_TYPE)
459 {
460 new_tree = build_reference_type_for_mode (remap_type (TREE_TYPE (type), id),
461 TYPE_MODE (type),
462 TYPE_REF_CAN_ALIAS_ALL (type));
463 if (TYPE_ATTRIBUTES (type) || TYPE_QUALS (type))
464 new_tree = build_type_attribute_qual_variant (new_tree,
465 TYPE_ATTRIBUTES (type),
466 TYPE_QUALS (type));
467 insert_decl_map (id, type, new_tree);
468 return new_tree;
469 }
470 else
471 new_tree = copy_node (type);
472
473 insert_decl_map (id, type, new_tree);
474
475 /* This is a new type, not a copy of an old type. Need to reassociate
476 variants. We can handle everything except the main variant lazily. */
477 t = TYPE_MAIN_VARIANT (type);
478 if (type != t)
479 {
480 t = remap_type (t, id);
481 TYPE_MAIN_VARIANT (new_tree) = t;
482 TYPE_NEXT_VARIANT (new_tree) = TYPE_NEXT_VARIANT (t);
483 TYPE_NEXT_VARIANT (t) = new_tree;
484 }
485 else
486 {
487 TYPE_MAIN_VARIANT (new_tree) = new_tree;
488 TYPE_NEXT_VARIANT (new_tree) = NULL;
489 }
490
491 if (TYPE_STUB_DECL (type))
492 TYPE_STUB_DECL (new_tree) = remap_decl (TYPE_STUB_DECL (type), id);
493
494 /* Lazily create pointer and reference types. */
495 TYPE_POINTER_TO (new_tree) = NULL;
496 TYPE_REFERENCE_TO (new_tree) = NULL;
497
498 /* Copy all types that may contain references to local variables; be sure to
499 preserve sharing in between type and its main variant when possible. */
500 switch (TREE_CODE (new_tree))
501 {
502 case INTEGER_TYPE:
503 case REAL_TYPE:
504 case FIXED_POINT_TYPE:
505 case ENUMERAL_TYPE:
506 case BOOLEAN_TYPE:
507 if (TYPE_MAIN_VARIANT (new_tree) != new_tree)
508 {
509 gcc_checking_assert (TYPE_MIN_VALUE (type) == TYPE_MIN_VALUE (TYPE_MAIN_VARIANT (type)));
510 gcc_checking_assert (TYPE_MAX_VALUE (type) == TYPE_MAX_VALUE (TYPE_MAIN_VARIANT (type)));
511
512 TYPE_MIN_VALUE (new_tree) = TYPE_MIN_VALUE (TYPE_MAIN_VARIANT (new_tree));
513 TYPE_MAX_VALUE (new_tree) = TYPE_MAX_VALUE (TYPE_MAIN_VARIANT (new_tree));
514 }
515 else
516 {
517 t = TYPE_MIN_VALUE (new_tree);
518 if (t && TREE_CODE (t) != INTEGER_CST)
519 walk_tree (&TYPE_MIN_VALUE (new_tree), copy_tree_body_r, id, NULL);
520
521 t = TYPE_MAX_VALUE (new_tree);
522 if (t && TREE_CODE (t) != INTEGER_CST)
523 walk_tree (&TYPE_MAX_VALUE (new_tree), copy_tree_body_r, id, NULL);
524 }
525 return new_tree;
526
527 case FUNCTION_TYPE:
528 if (TYPE_MAIN_VARIANT (new_tree) != new_tree
529 && TREE_TYPE (type) == TREE_TYPE (TYPE_MAIN_VARIANT (type)))
530 TREE_TYPE (new_tree) = TREE_TYPE (TYPE_MAIN_VARIANT (new_tree));
531 else
532 TREE_TYPE (new_tree) = remap_type (TREE_TYPE (new_tree), id);
533 if (TYPE_MAIN_VARIANT (new_tree) != new_tree
534 && TYPE_ARG_TYPES (type) == TYPE_ARG_TYPES (TYPE_MAIN_VARIANT (type)))
535 TYPE_ARG_TYPES (new_tree) = TYPE_ARG_TYPES (TYPE_MAIN_VARIANT (new_tree));
536 else
537 walk_tree (&TYPE_ARG_TYPES (new_tree), copy_tree_body_r, id, NULL);
538 return new_tree;
539
540 case ARRAY_TYPE:
541 if (TYPE_MAIN_VARIANT (new_tree) != new_tree
542 && TREE_TYPE (type) == TREE_TYPE (TYPE_MAIN_VARIANT (type)))
543 TREE_TYPE (new_tree) = TREE_TYPE (TYPE_MAIN_VARIANT (new_tree));
544 else
545 TREE_TYPE (new_tree) = remap_type (TREE_TYPE (new_tree), id);
546
547 if (TYPE_MAIN_VARIANT (new_tree) != new_tree)
548 {
549 gcc_checking_assert (TYPE_DOMAIN (type)
550 == TYPE_DOMAIN (TYPE_MAIN_VARIANT (type)));
551 TYPE_DOMAIN (new_tree) = TYPE_DOMAIN (TYPE_MAIN_VARIANT (new_tree));
552 }
553 else
554 {
555 TYPE_DOMAIN (new_tree) = remap_type (TYPE_DOMAIN (new_tree), id);
556 /* For array bounds where we have decided not to copy over the bounds
557 variable which isn't used in OpenMP/OpenACC region, change them to
558 an uninitialized VAR_DECL temporary. */
559 if (id->adjust_array_error_bounds
560 && TYPE_DOMAIN (new_tree)
561 && TYPE_MAX_VALUE (TYPE_DOMAIN (new_tree)) == error_mark_node
562 && TYPE_MAX_VALUE (TYPE_DOMAIN (type)) != error_mark_node)
563 {
564 tree v = create_tmp_var (TREE_TYPE (TYPE_DOMAIN (new_tree)));
565 DECL_ATTRIBUTES (v)
566 = tree_cons (get_identifier ("omp dummy var"), NULL_TREE,
567 DECL_ATTRIBUTES (v));
568 TYPE_MAX_VALUE (TYPE_DOMAIN (new_tree)) = v;
569 }
570 }
571 break;
572
573 case RECORD_TYPE:
574 case UNION_TYPE:
575 case QUAL_UNION_TYPE:
576 if (TYPE_MAIN_VARIANT (type) != type
577 && TYPE_FIELDS (type) == TYPE_FIELDS (TYPE_MAIN_VARIANT (type)))
578 TYPE_FIELDS (new_tree) = TYPE_FIELDS (TYPE_MAIN_VARIANT (new_tree));
579 else
580 {
581 tree f, nf = NULL;
582
583 for (f = TYPE_FIELDS (new_tree); f ; f = DECL_CHAIN (f))
584 {
585 t = remap_decl (f, id);
586 DECL_CONTEXT (t) = new_tree;
587 DECL_CHAIN (t) = nf;
588 nf = t;
589 }
590 TYPE_FIELDS (new_tree) = nreverse (nf);
591 }
592 break;
593
594 case OFFSET_TYPE:
595 default:
596 /* Shouldn't have been thought variable sized. */
597 gcc_unreachable ();
598 }
599
600 /* All variants of type share the same size, so use the already remaped data. */
601 if (TYPE_MAIN_VARIANT (new_tree) != new_tree)
602 {
603 tree s = TYPE_SIZE (type);
604 tree mvs = TYPE_SIZE (TYPE_MAIN_VARIANT (type));
605 tree su = TYPE_SIZE_UNIT (type);
606 tree mvsu = TYPE_SIZE_UNIT (TYPE_MAIN_VARIANT (type));
607 gcc_checking_assert ((TREE_CODE (s) == PLACEHOLDER_EXPR
608 && (TREE_CODE (mvs) == PLACEHOLDER_EXPR))
609 || s == mvs);
610 gcc_checking_assert ((TREE_CODE (su) == PLACEHOLDER_EXPR
611 && (TREE_CODE (mvsu) == PLACEHOLDER_EXPR))
612 || su == mvsu);
613 TYPE_SIZE (new_tree) = TYPE_SIZE (TYPE_MAIN_VARIANT (new_tree));
614 TYPE_SIZE_UNIT (new_tree) = TYPE_SIZE_UNIT (TYPE_MAIN_VARIANT (new_tree));
615 }
616 else
617 {
618 walk_tree (&TYPE_SIZE (new_tree), copy_tree_body_r, id, NULL);
619 walk_tree (&TYPE_SIZE_UNIT (new_tree), copy_tree_body_r, id, NULL);
620 }
621
622 return new_tree;
623 }
624
625 /* Helper function for remap_type_2, called through walk_tree. */
626
627 static tree
remap_type_3(tree * tp,int * walk_subtrees,void * data)628 remap_type_3 (tree *tp, int *walk_subtrees, void *data)
629 {
630 copy_body_data *id = (copy_body_data *) data;
631
632 if (TYPE_P (*tp))
633 *walk_subtrees = 0;
634
635 else if (DECL_P (*tp) && remap_decl (*tp, id) != *tp)
636 return *tp;
637
638 return NULL_TREE;
639 }
640
641 /* Return true if TYPE needs to be remapped because remap_decl on any
642 needed embedded decl returns something other than that decl. */
643
644 static bool
remap_type_2(tree type,copy_body_data * id)645 remap_type_2 (tree type, copy_body_data *id)
646 {
647 tree t;
648
649 #define RETURN_TRUE_IF_VAR(T) \
650 do \
651 { \
652 tree _t = (T); \
653 if (_t) \
654 { \
655 if (DECL_P (_t) && remap_decl (_t, id) != _t) \
656 return true; \
657 if (!TYPE_SIZES_GIMPLIFIED (type) \
658 && walk_tree (&_t, remap_type_3, id, NULL)) \
659 return true; \
660 } \
661 } \
662 while (0)
663
664 switch (TREE_CODE (type))
665 {
666 case POINTER_TYPE:
667 case REFERENCE_TYPE:
668 case FUNCTION_TYPE:
669 case METHOD_TYPE:
670 return remap_type_2 (TREE_TYPE (type), id);
671
672 case INTEGER_TYPE:
673 case REAL_TYPE:
674 case FIXED_POINT_TYPE:
675 case ENUMERAL_TYPE:
676 case BOOLEAN_TYPE:
677 RETURN_TRUE_IF_VAR (TYPE_MIN_VALUE (type));
678 RETURN_TRUE_IF_VAR (TYPE_MAX_VALUE (type));
679 return false;
680
681 case ARRAY_TYPE:
682 if (remap_type_2 (TREE_TYPE (type), id)
683 || (TYPE_DOMAIN (type) && remap_type_2 (TYPE_DOMAIN (type), id)))
684 return true;
685 break;
686
687 case RECORD_TYPE:
688 case UNION_TYPE:
689 case QUAL_UNION_TYPE:
690 for (t = TYPE_FIELDS (type); t; t = DECL_CHAIN (t))
691 if (TREE_CODE (t) == FIELD_DECL)
692 {
693 RETURN_TRUE_IF_VAR (DECL_FIELD_OFFSET (t));
694 RETURN_TRUE_IF_VAR (DECL_SIZE (t));
695 RETURN_TRUE_IF_VAR (DECL_SIZE_UNIT (t));
696 if (TREE_CODE (type) == QUAL_UNION_TYPE)
697 RETURN_TRUE_IF_VAR (DECL_QUALIFIER (t));
698 }
699 break;
700
701 default:
702 return false;
703 }
704
705 RETURN_TRUE_IF_VAR (TYPE_SIZE (type));
706 RETURN_TRUE_IF_VAR (TYPE_SIZE_UNIT (type));
707 return false;
708 #undef RETURN_TRUE_IF_VAR
709 }
710
711 tree
remap_type(tree type,copy_body_data * id)712 remap_type (tree type, copy_body_data *id)
713 {
714 tree *node;
715 tree tmp;
716
717 if (type == NULL)
718 return type;
719
720 /* See if we have remapped this type. */
721 node = id->decl_map->get (type);
722 if (node)
723 return *node;
724
725 /* The type only needs remapping if it's variably modified. */
726 if (! variably_modified_type_p (type, id->src_fn)
727 /* Don't remap if copy_decl method doesn't always return a new
728 decl and for all embedded decls returns the passed in decl. */
729 || (id->dont_remap_vla_if_no_change && !remap_type_2 (type, id)))
730 {
731 insert_decl_map (id, type, type);
732 return type;
733 }
734
735 id->remapping_type_depth++;
736 tmp = remap_type_1 (type, id);
737 id->remapping_type_depth--;
738
739 return tmp;
740 }
741
742 /* Decide if DECL can be put into BLOCK_NONLOCAL_VARs. */
743
744 static bool
can_be_nonlocal(tree decl,copy_body_data * id)745 can_be_nonlocal (tree decl, copy_body_data *id)
746 {
747 /* We cannot duplicate function decls. */
748 if (TREE_CODE (decl) == FUNCTION_DECL)
749 return true;
750
751 /* Local static vars must be non-local or we get multiple declaration
752 problems. */
753 if (VAR_P (decl) && !auto_var_in_fn_p (decl, id->src_fn))
754 return true;
755
756 return false;
757 }
758
759 static tree
remap_decls(tree decls,vec<tree,va_gc> ** nonlocalized_list,copy_body_data * id)760 remap_decls (tree decls, vec<tree, va_gc> **nonlocalized_list,
761 copy_body_data *id)
762 {
763 tree old_var;
764 tree new_decls = NULL_TREE;
765
766 /* Remap its variables. */
767 for (old_var = decls; old_var; old_var = DECL_CHAIN (old_var))
768 {
769 tree new_var;
770
771 if (can_be_nonlocal (old_var, id))
772 {
773 /* We need to add this variable to the local decls as otherwise
774 nothing else will do so. */
775 if (VAR_P (old_var) && ! DECL_EXTERNAL (old_var) && cfun)
776 add_local_decl (cfun, old_var);
777 if ((!optimize || debug_info_level > DINFO_LEVEL_TERSE)
778 && !DECL_IGNORED_P (old_var)
779 && nonlocalized_list)
780 vec_safe_push (*nonlocalized_list, old_var);
781 continue;
782 }
783
784 /* Remap the variable. */
785 new_var = remap_decl (old_var, id);
786
787 /* If we didn't remap this variable, we can't mess with its
788 TREE_CHAIN. If we remapped this variable to the return slot, it's
789 already declared somewhere else, so don't declare it here. */
790
791 if (new_var == id->retvar)
792 ;
793 else if (!new_var)
794 {
795 if ((!optimize || debug_info_level > DINFO_LEVEL_TERSE)
796 && !DECL_IGNORED_P (old_var)
797 && nonlocalized_list)
798 vec_safe_push (*nonlocalized_list, old_var);
799 }
800 else
801 {
802 gcc_assert (DECL_P (new_var));
803 DECL_CHAIN (new_var) = new_decls;
804 new_decls = new_var;
805
806 /* Also copy value-expressions. */
807 if (VAR_P (new_var) && DECL_HAS_VALUE_EXPR_P (new_var))
808 {
809 tree tem = DECL_VALUE_EXPR (new_var);
810 bool old_regimplify = id->regimplify;
811 id->remapping_type_depth++;
812 walk_tree (&tem, copy_tree_body_r, id, NULL);
813 id->remapping_type_depth--;
814 id->regimplify = old_regimplify;
815 SET_DECL_VALUE_EXPR (new_var, tem);
816 }
817 }
818 }
819
820 return nreverse (new_decls);
821 }
822
823 /* Copy the BLOCK to contain remapped versions of the variables
824 therein. And hook the new block into the block-tree. */
825
826 static void
remap_block(tree * block,copy_body_data * id)827 remap_block (tree *block, copy_body_data *id)
828 {
829 tree old_block;
830 tree new_block;
831
832 /* Make the new block. */
833 old_block = *block;
834 new_block = make_node (BLOCK);
835 TREE_USED (new_block) = TREE_USED (old_block);
836 BLOCK_ABSTRACT_ORIGIN (new_block) = BLOCK_ORIGIN (old_block);
837 BLOCK_SOURCE_LOCATION (new_block) = BLOCK_SOURCE_LOCATION (old_block);
838 BLOCK_NONLOCALIZED_VARS (new_block)
839 = vec_safe_copy (BLOCK_NONLOCALIZED_VARS (old_block));
840 *block = new_block;
841
842 /* Remap its variables. */
843 BLOCK_VARS (new_block) = remap_decls (BLOCK_VARS (old_block),
844 &BLOCK_NONLOCALIZED_VARS (new_block),
845 id);
846
847 if (id->transform_lang_insert_block)
848 id->transform_lang_insert_block (new_block);
849
850 /* Remember the remapped block. */
851 insert_decl_map (id, old_block, new_block);
852 }
853
854 /* Copy the whole block tree and root it in id->block. */
855
856 static tree
remap_blocks(tree block,copy_body_data * id)857 remap_blocks (tree block, copy_body_data *id)
858 {
859 tree t;
860 tree new_tree = block;
861
862 if (!block)
863 return NULL;
864
865 remap_block (&new_tree, id);
866 gcc_assert (new_tree != block);
867 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
868 prepend_lexical_block (new_tree, remap_blocks (t, id));
869 /* Blocks are in arbitrary order, but make things slightly prettier and do
870 not swap order when producing a copy. */
871 BLOCK_SUBBLOCKS (new_tree) = blocks_nreverse (BLOCK_SUBBLOCKS (new_tree));
872 return new_tree;
873 }
874
875 /* Remap the block tree rooted at BLOCK to nothing. */
876
877 static void
remap_blocks_to_null(tree block,copy_body_data * id)878 remap_blocks_to_null (tree block, copy_body_data *id)
879 {
880 tree t;
881 insert_decl_map (id, block, NULL_TREE);
882 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
883 remap_blocks_to_null (t, id);
884 }
885
886 /* Remap the location info pointed to by LOCUS. */
887
888 static location_t
remap_location(location_t locus,copy_body_data * id)889 remap_location (location_t locus, copy_body_data *id)
890 {
891 if (LOCATION_BLOCK (locus))
892 {
893 tree *n = id->decl_map->get (LOCATION_BLOCK (locus));
894 gcc_assert (n);
895 if (*n)
896 return set_block (locus, *n);
897 }
898
899 locus = LOCATION_LOCUS (locus);
900
901 if (locus != UNKNOWN_LOCATION && id->block)
902 return set_block (locus, id->block);
903
904 return locus;
905 }
906
907 static void
copy_statement_list(tree * tp)908 copy_statement_list (tree *tp)
909 {
910 tree_stmt_iterator oi, ni;
911 tree new_tree;
912
913 new_tree = alloc_stmt_list ();
914 ni = tsi_start (new_tree);
915 oi = tsi_start (*tp);
916 TREE_TYPE (new_tree) = TREE_TYPE (*tp);
917 *tp = new_tree;
918
919 for (; !tsi_end_p (oi); tsi_next (&oi))
920 {
921 tree stmt = tsi_stmt (oi);
922 if (TREE_CODE (stmt) == STATEMENT_LIST)
923 /* This copy is not redundant; tsi_link_after will smash this
924 STATEMENT_LIST into the end of the one we're building, and we
925 don't want to do that with the original. */
926 copy_statement_list (&stmt);
927 tsi_link_after (&ni, stmt, TSI_CONTINUE_LINKING);
928 }
929 }
930
931 static void
copy_bind_expr(tree * tp,int * walk_subtrees,copy_body_data * id)932 copy_bind_expr (tree *tp, int *walk_subtrees, copy_body_data *id)
933 {
934 tree block = BIND_EXPR_BLOCK (*tp);
935 /* Copy (and replace) the statement. */
936 copy_tree_r (tp, walk_subtrees, NULL);
937 if (block)
938 {
939 remap_block (&block, id);
940 BIND_EXPR_BLOCK (*tp) = block;
941 }
942
943 if (BIND_EXPR_VARS (*tp))
944 /* This will remap a lot of the same decls again, but this should be
945 harmless. */
946 BIND_EXPR_VARS (*tp) = remap_decls (BIND_EXPR_VARS (*tp), NULL, id);
947 }
948
949
950 /* Create a new gimple_seq by remapping all the statements in BODY
951 using the inlining information in ID. */
952
953 static gimple_seq
remap_gimple_seq(gimple_seq body,copy_body_data * id)954 remap_gimple_seq (gimple_seq body, copy_body_data *id)
955 {
956 gimple_stmt_iterator si;
957 gimple_seq new_body = NULL;
958
959 for (si = gsi_start (body); !gsi_end_p (si); gsi_next (&si))
960 {
961 gimple_seq new_stmts = remap_gimple_stmt (gsi_stmt (si), id);
962 gimple_seq_add_seq (&new_body, new_stmts);
963 }
964
965 return new_body;
966 }
967
968
969 /* Copy a GIMPLE_BIND statement STMT, remapping all the symbols in its
970 block using the mapping information in ID. */
971
972 static gimple *
copy_gimple_bind(gbind * stmt,copy_body_data * id)973 copy_gimple_bind (gbind *stmt, copy_body_data *id)
974 {
975 gimple *new_bind;
976 tree new_block, new_vars;
977 gimple_seq body, new_body;
978
979 /* Copy the statement. Note that we purposely don't use copy_stmt
980 here because we need to remap statements as we copy. */
981 body = gimple_bind_body (stmt);
982 new_body = remap_gimple_seq (body, id);
983
984 new_block = gimple_bind_block (stmt);
985 if (new_block)
986 remap_block (&new_block, id);
987
988 /* This will remap a lot of the same decls again, but this should be
989 harmless. */
990 new_vars = gimple_bind_vars (stmt);
991 if (new_vars)
992 new_vars = remap_decls (new_vars, NULL, id);
993
994 new_bind = gimple_build_bind (new_vars, new_body, new_block);
995
996 return new_bind;
997 }
998
999 /* Return true if DECL is a parameter or a SSA_NAME for a parameter. */
1000
1001 static bool
is_parm(tree decl)1002 is_parm (tree decl)
1003 {
1004 if (TREE_CODE (decl) == SSA_NAME)
1005 {
1006 decl = SSA_NAME_VAR (decl);
1007 if (!decl)
1008 return false;
1009 }
1010
1011 return (TREE_CODE (decl) == PARM_DECL);
1012 }
1013
1014 /* Remap the dependence CLIQUE from the source to the destination function
1015 as specified in ID. */
1016
1017 static unsigned short
remap_dependence_clique(copy_body_data * id,unsigned short clique)1018 remap_dependence_clique (copy_body_data *id, unsigned short clique)
1019 {
1020 if (clique == 0 || processing_debug_stmt)
1021 return 0;
1022 if (!id->dependence_map)
1023 id->dependence_map = new hash_map<dependence_hash, unsigned short>;
1024 bool existed;
1025 unsigned short &newc = id->dependence_map->get_or_insert (clique, &existed);
1026 if (!existed)
1027 {
1028 /* Clique 1 is reserved for local ones set by PTA. */
1029 if (cfun->last_clique == 0)
1030 cfun->last_clique = 1;
1031 newc = ++cfun->last_clique;
1032 }
1033 return newc;
1034 }
1035
1036 /* Remap the GIMPLE operand pointed to by *TP. DATA is really a
1037 'struct walk_stmt_info *'. DATA->INFO is a 'copy_body_data *'.
1038 WALK_SUBTREES is used to indicate walk_gimple_op whether to keep
1039 recursing into the children nodes of *TP. */
1040
1041 static tree
remap_gimple_op_r(tree * tp,int * walk_subtrees,void * data)1042 remap_gimple_op_r (tree *tp, int *walk_subtrees, void *data)
1043 {
1044 struct walk_stmt_info *wi_p = (struct walk_stmt_info *) data;
1045 copy_body_data *id = (copy_body_data *) wi_p->info;
1046 tree fn = id->src_fn;
1047
1048 /* For recursive invocations this is no longer the LHS itself. */
1049 bool is_lhs = wi_p->is_lhs;
1050 wi_p->is_lhs = false;
1051
1052 if (TREE_CODE (*tp) == SSA_NAME)
1053 {
1054 *tp = remap_ssa_name (*tp, id);
1055 *walk_subtrees = 0;
1056 if (is_lhs)
1057 SSA_NAME_DEF_STMT (*tp) = wi_p->stmt;
1058 return NULL;
1059 }
1060 else if (auto_var_in_fn_p (*tp, fn))
1061 {
1062 /* Local variables and labels need to be replaced by equivalent
1063 variables. We don't want to copy static variables; there's
1064 only one of those, no matter how many times we inline the
1065 containing function. Similarly for globals from an outer
1066 function. */
1067 tree new_decl;
1068
1069 /* Remap the declaration. */
1070 new_decl = remap_decl (*tp, id);
1071 gcc_assert (new_decl);
1072 /* Replace this variable with the copy. */
1073 STRIP_TYPE_NOPS (new_decl);
1074 /* ??? The C++ frontend uses void * pointer zero to initialize
1075 any other type. This confuses the middle-end type verification.
1076 As cloned bodies do not go through gimplification again the fixup
1077 there doesn't trigger. */
1078 if (TREE_CODE (new_decl) == INTEGER_CST
1079 && !useless_type_conversion_p (TREE_TYPE (*tp), TREE_TYPE (new_decl)))
1080 new_decl = fold_convert (TREE_TYPE (*tp), new_decl);
1081 *tp = new_decl;
1082 *walk_subtrees = 0;
1083 }
1084 else if (TREE_CODE (*tp) == STATEMENT_LIST)
1085 gcc_unreachable ();
1086 else if (TREE_CODE (*tp) == SAVE_EXPR)
1087 gcc_unreachable ();
1088 else if (TREE_CODE (*tp) == LABEL_DECL
1089 && (!DECL_CONTEXT (*tp)
1090 || decl_function_context (*tp) == id->src_fn))
1091 /* These may need to be remapped for EH handling. */
1092 *tp = remap_decl (*tp, id);
1093 else if (TREE_CODE (*tp) == FIELD_DECL)
1094 {
1095 /* If the enclosing record type is variably_modified_type_p, the field
1096 has already been remapped. Otherwise, it need not be. */
1097 tree *n = id->decl_map->get (*tp);
1098 if (n)
1099 *tp = *n;
1100 *walk_subtrees = 0;
1101 }
1102 else if (TYPE_P (*tp))
1103 /* Types may need remapping as well. */
1104 *tp = remap_type (*tp, id);
1105 else if (CONSTANT_CLASS_P (*tp))
1106 {
1107 /* If this is a constant, we have to copy the node iff the type
1108 will be remapped. copy_tree_r will not copy a constant. */
1109 tree new_type = remap_type (TREE_TYPE (*tp), id);
1110
1111 if (new_type == TREE_TYPE (*tp))
1112 *walk_subtrees = 0;
1113
1114 else if (TREE_CODE (*tp) == INTEGER_CST)
1115 *tp = wide_int_to_tree (new_type, wi::to_wide (*tp));
1116 else
1117 {
1118 *tp = copy_node (*tp);
1119 TREE_TYPE (*tp) = new_type;
1120 }
1121 }
1122 else
1123 {
1124 /* Otherwise, just copy the node. Note that copy_tree_r already
1125 knows not to copy VAR_DECLs, etc., so this is safe. */
1126
1127 if (TREE_CODE (*tp) == MEM_REF && !id->do_not_fold)
1128 {
1129 /* We need to re-canonicalize MEM_REFs from inline substitutions
1130 that can happen when a pointer argument is an ADDR_EXPR.
1131 Recurse here manually to allow that. */
1132 tree ptr = TREE_OPERAND (*tp, 0);
1133 tree type = remap_type (TREE_TYPE (*tp), id);
1134 tree old = *tp;
1135 walk_tree (&ptr, remap_gimple_op_r, data, NULL);
1136 *tp = fold_build2 (MEM_REF, type, ptr, TREE_OPERAND (*tp, 1));
1137 TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
1138 TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
1139 TREE_NO_WARNING (*tp) = TREE_NO_WARNING (old);
1140 if (MR_DEPENDENCE_CLIQUE (old) != 0)
1141 {
1142 MR_DEPENDENCE_CLIQUE (*tp)
1143 = remap_dependence_clique (id, MR_DEPENDENCE_CLIQUE (old));
1144 MR_DEPENDENCE_BASE (*tp) = MR_DEPENDENCE_BASE (old);
1145 }
1146 /* We cannot propagate the TREE_THIS_NOTRAP flag if we have
1147 remapped a parameter as the property might be valid only
1148 for the parameter itself. */
1149 if (TREE_THIS_NOTRAP (old)
1150 && (!is_parm (TREE_OPERAND (old, 0))
1151 || (!id->transform_parameter && is_parm (ptr))))
1152 TREE_THIS_NOTRAP (*tp) = 1;
1153 REF_REVERSE_STORAGE_ORDER (*tp) = REF_REVERSE_STORAGE_ORDER (old);
1154 *walk_subtrees = 0;
1155 return NULL;
1156 }
1157
1158 /* Here is the "usual case". Copy this tree node, and then
1159 tweak some special cases. */
1160 copy_tree_r (tp, walk_subtrees, NULL);
1161
1162 if (TREE_CODE (*tp) != OMP_CLAUSE)
1163 TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id);
1164
1165 if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
1166 {
1167 /* The copied TARGET_EXPR has never been expanded, even if the
1168 original node was expanded already. */
1169 TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
1170 TREE_OPERAND (*tp, 3) = NULL_TREE;
1171 }
1172 else if (TREE_CODE (*tp) == ADDR_EXPR)
1173 {
1174 /* Variable substitution need not be simple. In particular,
1175 the MEM_REF substitution above. Make sure that
1176 TREE_CONSTANT and friends are up-to-date. */
1177 int invariant = is_gimple_min_invariant (*tp);
1178 walk_tree (&TREE_OPERAND (*tp, 0), remap_gimple_op_r, data, NULL);
1179 recompute_tree_invariant_for_addr_expr (*tp);
1180
1181 /* If this used to be invariant, but is not any longer,
1182 then regimplification is probably needed. */
1183 if (invariant && !is_gimple_min_invariant (*tp))
1184 id->regimplify = true;
1185
1186 *walk_subtrees = 0;
1187 }
1188 }
1189
1190 /* Update the TREE_BLOCK for the cloned expr. */
1191 if (EXPR_P (*tp))
1192 {
1193 tree new_block = id->remapping_type_depth == 0 ? id->block : NULL;
1194 tree old_block = TREE_BLOCK (*tp);
1195 if (old_block)
1196 {
1197 tree *n;
1198 n = id->decl_map->get (TREE_BLOCK (*tp));
1199 if (n)
1200 new_block = *n;
1201 }
1202 TREE_SET_BLOCK (*tp, new_block);
1203 }
1204
1205 /* Keep iterating. */
1206 return NULL_TREE;
1207 }
1208
1209
1210 /* Called from copy_body_id via walk_tree. DATA is really a
1211 `copy_body_data *'. */
1212
1213 tree
copy_tree_body_r(tree * tp,int * walk_subtrees,void * data)1214 copy_tree_body_r (tree *tp, int *walk_subtrees, void *data)
1215 {
1216 copy_body_data *id = (copy_body_data *) data;
1217 tree fn = id->src_fn;
1218 tree new_block;
1219
1220 /* Begin by recognizing trees that we'll completely rewrite for the
1221 inlining context. Our output for these trees is completely
1222 different from out input (e.g. RETURN_EXPR is deleted, and morphs
1223 into an edge). Further down, we'll handle trees that get
1224 duplicated and/or tweaked. */
1225
1226 /* When requested, RETURN_EXPRs should be transformed to just the
1227 contained MODIFY_EXPR. The branch semantics of the return will
1228 be handled elsewhere by manipulating the CFG rather than a statement. */
1229 if (TREE_CODE (*tp) == RETURN_EXPR && id->transform_return_to_modify)
1230 {
1231 tree assignment = TREE_OPERAND (*tp, 0);
1232
1233 /* If we're returning something, just turn that into an
1234 assignment into the equivalent of the original RESULT_DECL.
1235 If the "assignment" is just the result decl, the result
1236 decl has already been set (e.g. a recent "foo (&result_decl,
1237 ...)"); just toss the entire RETURN_EXPR. */
1238 if (assignment && TREE_CODE (assignment) == MODIFY_EXPR)
1239 {
1240 /* Replace the RETURN_EXPR with (a copy of) the
1241 MODIFY_EXPR hanging underneath. */
1242 *tp = copy_node (assignment);
1243 }
1244 else /* Else the RETURN_EXPR returns no value. */
1245 {
1246 *tp = NULL;
1247 return (tree) (void *)1;
1248 }
1249 }
1250 else if (TREE_CODE (*tp) == SSA_NAME)
1251 {
1252 *tp = remap_ssa_name (*tp, id);
1253 *walk_subtrees = 0;
1254 return NULL;
1255 }
1256
1257 /* Local variables and labels need to be replaced by equivalent
1258 variables. We don't want to copy static variables; there's only
1259 one of those, no matter how many times we inline the containing
1260 function. Similarly for globals from an outer function. */
1261 else if (auto_var_in_fn_p (*tp, fn))
1262 {
1263 tree new_decl;
1264
1265 /* Remap the declaration. */
1266 new_decl = remap_decl (*tp, id);
1267 gcc_assert (new_decl);
1268 /* Replace this variable with the copy. */
1269 STRIP_TYPE_NOPS (new_decl);
1270 *tp = new_decl;
1271 *walk_subtrees = 0;
1272 }
1273 else if (TREE_CODE (*tp) == STATEMENT_LIST)
1274 copy_statement_list (tp);
1275 else if (TREE_CODE (*tp) == SAVE_EXPR
1276 || TREE_CODE (*tp) == TARGET_EXPR)
1277 remap_save_expr (tp, id->decl_map, walk_subtrees);
1278 else if (TREE_CODE (*tp) == LABEL_DECL
1279 && (! DECL_CONTEXT (*tp)
1280 || decl_function_context (*tp) == id->src_fn))
1281 /* These may need to be remapped for EH handling. */
1282 *tp = remap_decl (*tp, id);
1283 else if (TREE_CODE (*tp) == BIND_EXPR)
1284 copy_bind_expr (tp, walk_subtrees, id);
1285 /* Types may need remapping as well. */
1286 else if (TYPE_P (*tp))
1287 *tp = remap_type (*tp, id);
1288
1289 /* If this is a constant, we have to copy the node iff the type will be
1290 remapped. copy_tree_r will not copy a constant. */
1291 else if (CONSTANT_CLASS_P (*tp))
1292 {
1293 tree new_type = remap_type (TREE_TYPE (*tp), id);
1294
1295 if (new_type == TREE_TYPE (*tp))
1296 *walk_subtrees = 0;
1297
1298 else if (TREE_CODE (*tp) == INTEGER_CST)
1299 *tp = wide_int_to_tree (new_type, wi::to_wide (*tp));
1300 else
1301 {
1302 *tp = copy_node (*tp);
1303 TREE_TYPE (*tp) = new_type;
1304 }
1305 }
1306
1307 /* Otherwise, just copy the node. Note that copy_tree_r already
1308 knows not to copy VAR_DECLs, etc., so this is safe. */
1309 else
1310 {
1311 /* Here we handle trees that are not completely rewritten.
1312 First we detect some inlining-induced bogosities for
1313 discarding. */
1314 if (TREE_CODE (*tp) == MODIFY_EXPR
1315 && TREE_OPERAND (*tp, 0) == TREE_OPERAND (*tp, 1)
1316 && (auto_var_in_fn_p (TREE_OPERAND (*tp, 0), fn)))
1317 {
1318 /* Some assignments VAR = VAR; don't generate any rtl code
1319 and thus don't count as variable modification. Avoid
1320 keeping bogosities like 0 = 0. */
1321 tree decl = TREE_OPERAND (*tp, 0), value;
1322 tree *n;
1323
1324 n = id->decl_map->get (decl);
1325 if (n)
1326 {
1327 value = *n;
1328 STRIP_TYPE_NOPS (value);
1329 if (TREE_CONSTANT (value) || TREE_READONLY (value))
1330 {
1331 *tp = build_empty_stmt (EXPR_LOCATION (*tp));
1332 return copy_tree_body_r (tp, walk_subtrees, data);
1333 }
1334 }
1335 }
1336 else if (TREE_CODE (*tp) == INDIRECT_REF)
1337 {
1338 /* Get rid of *& from inline substitutions that can happen when a
1339 pointer argument is an ADDR_EXPR. */
1340 tree decl = TREE_OPERAND (*tp, 0);
1341 tree *n = id->decl_map->get (decl);
1342 if (n)
1343 {
1344 /* If we happen to get an ADDR_EXPR in n->value, strip
1345 it manually here as we'll eventually get ADDR_EXPRs
1346 which lie about their types pointed to. In this case
1347 build_fold_indirect_ref wouldn't strip the INDIRECT_REF,
1348 but we absolutely rely on that. As fold_indirect_ref
1349 does other useful transformations, try that first, though. */
1350 tree type = TREE_TYPE (*tp);
1351 tree ptr = id->do_not_unshare ? *n : unshare_expr (*n);
1352 tree old = *tp;
1353 *tp = id->do_not_fold ? NULL : gimple_fold_indirect_ref (ptr);
1354 if (! *tp)
1355 {
1356 type = remap_type (type, id);
1357 if (TREE_CODE (ptr) == ADDR_EXPR && !id->do_not_fold)
1358 {
1359 *tp
1360 = fold_indirect_ref_1 (EXPR_LOCATION (ptr), type, ptr);
1361 /* ??? We should either assert here or build
1362 a VIEW_CONVERT_EXPR instead of blindly leaking
1363 incompatible types to our IL. */
1364 if (! *tp)
1365 *tp = TREE_OPERAND (ptr, 0);
1366 }
1367 else
1368 {
1369 *tp = build1 (INDIRECT_REF, type, ptr);
1370 TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
1371 TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
1372 TREE_READONLY (*tp) = TREE_READONLY (old);
1373 /* We cannot propagate the TREE_THIS_NOTRAP flag if we
1374 have remapped a parameter as the property might be
1375 valid only for the parameter itself. */
1376 if (TREE_THIS_NOTRAP (old)
1377 && (!is_parm (TREE_OPERAND (old, 0))
1378 || (!id->transform_parameter && is_parm (ptr))))
1379 TREE_THIS_NOTRAP (*tp) = 1;
1380 }
1381 }
1382 *walk_subtrees = 0;
1383 return NULL;
1384 }
1385 }
1386 else if (TREE_CODE (*tp) == MEM_REF && !id->do_not_fold)
1387 {
1388 /* We need to re-canonicalize MEM_REFs from inline substitutions
1389 that can happen when a pointer argument is an ADDR_EXPR.
1390 Recurse here manually to allow that. */
1391 tree ptr = TREE_OPERAND (*tp, 0);
1392 tree type = remap_type (TREE_TYPE (*tp), id);
1393 tree old = *tp;
1394 walk_tree (&ptr, copy_tree_body_r, data, NULL);
1395 *tp = fold_build2 (MEM_REF, type, ptr, TREE_OPERAND (*tp, 1));
1396 TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
1397 TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
1398 TREE_NO_WARNING (*tp) = TREE_NO_WARNING (old);
1399 if (MR_DEPENDENCE_CLIQUE (old) != 0)
1400 {
1401 MR_DEPENDENCE_CLIQUE (*tp)
1402 = remap_dependence_clique (id, MR_DEPENDENCE_CLIQUE (old));
1403 MR_DEPENDENCE_BASE (*tp) = MR_DEPENDENCE_BASE (old);
1404 }
1405 /* We cannot propagate the TREE_THIS_NOTRAP flag if we have
1406 remapped a parameter as the property might be valid only
1407 for the parameter itself. */
1408 if (TREE_THIS_NOTRAP (old)
1409 && (!is_parm (TREE_OPERAND (old, 0))
1410 || (!id->transform_parameter && is_parm (ptr))))
1411 TREE_THIS_NOTRAP (*tp) = 1;
1412 REF_REVERSE_STORAGE_ORDER (*tp) = REF_REVERSE_STORAGE_ORDER (old);
1413 *walk_subtrees = 0;
1414 return NULL;
1415 }
1416
1417 /* Here is the "usual case". Copy this tree node, and then
1418 tweak some special cases. */
1419 copy_tree_r (tp, walk_subtrees, NULL);
1420
1421 /* If EXPR has block defined, map it to newly constructed block.
1422 When inlining we want EXPRs without block appear in the block
1423 of function call if we are not remapping a type. */
1424 if (EXPR_P (*tp))
1425 {
1426 new_block = id->remapping_type_depth == 0 ? id->block : NULL;
1427 if (TREE_BLOCK (*tp))
1428 {
1429 tree *n;
1430 n = id->decl_map->get (TREE_BLOCK (*tp));
1431 if (n)
1432 new_block = *n;
1433 }
1434 TREE_SET_BLOCK (*tp, new_block);
1435 }
1436
1437 if (TREE_CODE (*tp) != OMP_CLAUSE)
1438 TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id);
1439
1440 /* The copied TARGET_EXPR has never been expanded, even if the
1441 original node was expanded already. */
1442 if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
1443 {
1444 TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
1445 TREE_OPERAND (*tp, 3) = NULL_TREE;
1446 }
1447
1448 /* Variable substitution need not be simple. In particular, the
1449 INDIRECT_REF substitution above. Make sure that TREE_CONSTANT
1450 and friends are up-to-date. */
1451 else if (TREE_CODE (*tp) == ADDR_EXPR)
1452 {
1453 int invariant = is_gimple_min_invariant (*tp);
1454 walk_tree (&TREE_OPERAND (*tp, 0), copy_tree_body_r, id, NULL);
1455
1456 /* Handle the case where we substituted an INDIRECT_REF
1457 into the operand of the ADDR_EXPR. */
1458 if (TREE_CODE (TREE_OPERAND (*tp, 0)) == INDIRECT_REF
1459 && !id->do_not_fold)
1460 {
1461 tree t = TREE_OPERAND (TREE_OPERAND (*tp, 0), 0);
1462 if (TREE_TYPE (t) != TREE_TYPE (*tp))
1463 t = fold_convert (remap_type (TREE_TYPE (*tp), id), t);
1464 *tp = t;
1465 }
1466 else
1467 recompute_tree_invariant_for_addr_expr (*tp);
1468
1469 /* If this used to be invariant, but is not any longer,
1470 then regimplification is probably needed. */
1471 if (invariant && !is_gimple_min_invariant (*tp))
1472 id->regimplify = true;
1473
1474 *walk_subtrees = 0;
1475 }
1476 }
1477
1478 /* Keep iterating. */
1479 return NULL_TREE;
1480 }
1481
1482 /* Helper for remap_gimple_stmt. Given an EH region number for the
1483 source function, map that to the duplicate EH region number in
1484 the destination function. */
1485
1486 static int
remap_eh_region_nr(int old_nr,copy_body_data * id)1487 remap_eh_region_nr (int old_nr, copy_body_data *id)
1488 {
1489 eh_region old_r, new_r;
1490
1491 old_r = get_eh_region_from_number_fn (id->src_cfun, old_nr);
1492 new_r = static_cast<eh_region> (*id->eh_map->get (old_r));
1493
1494 return new_r->index;
1495 }
1496
1497 /* Similar, but operate on INTEGER_CSTs. */
1498
1499 static tree
remap_eh_region_tree_nr(tree old_t_nr,copy_body_data * id)1500 remap_eh_region_tree_nr (tree old_t_nr, copy_body_data *id)
1501 {
1502 int old_nr, new_nr;
1503
1504 old_nr = tree_to_shwi (old_t_nr);
1505 new_nr = remap_eh_region_nr (old_nr, id);
1506
1507 return build_int_cst (integer_type_node, new_nr);
1508 }
1509
1510 /* Helper for copy_bb. Remap statement STMT using the inlining
1511 information in ID. Return the new statement copy. */
1512
1513 static gimple_seq
remap_gimple_stmt(gimple * stmt,copy_body_data * id)1514 remap_gimple_stmt (gimple *stmt, copy_body_data *id)
1515 {
1516 gimple *copy = NULL;
1517 struct walk_stmt_info wi;
1518 bool skip_first = false;
1519 gimple_seq stmts = NULL;
1520
1521 if (is_gimple_debug (stmt)
1522 && (gimple_debug_nonbind_marker_p (stmt)
1523 ? !DECL_STRUCT_FUNCTION (id->dst_fn)->debug_nonbind_markers
1524 : !opt_for_fn (id->dst_fn, flag_var_tracking_assignments)))
1525 return NULL;
1526
1527 /* Begin by recognizing trees that we'll completely rewrite for the
1528 inlining context. Our output for these trees is completely
1529 different from our input (e.g. RETURN_EXPR is deleted and morphs
1530 into an edge). Further down, we'll handle trees that get
1531 duplicated and/or tweaked. */
1532
1533 /* When requested, GIMPLE_RETURN should be transformed to just the
1534 contained GIMPLE_ASSIGN. The branch semantics of the return will
1535 be handled elsewhere by manipulating the CFG rather than the
1536 statement. */
1537 if (gimple_code (stmt) == GIMPLE_RETURN && id->transform_return_to_modify)
1538 {
1539 tree retval = gimple_return_retval (as_a <greturn *> (stmt));
1540
1541 /* If we're returning something, just turn that into an
1542 assignment to the equivalent of the original RESULT_DECL.
1543 If RETVAL is just the result decl, the result decl has
1544 already been set (e.g. a recent "foo (&result_decl, ...)");
1545 just toss the entire GIMPLE_RETURN. Likewise for when the
1546 call doesn't want the return value. */
1547 if (retval
1548 && (TREE_CODE (retval) != RESULT_DECL
1549 && (!id->call_stmt
1550 || gimple_call_lhs (id->call_stmt) != NULL_TREE)
1551 && (TREE_CODE (retval) != SSA_NAME
1552 || ! SSA_NAME_VAR (retval)
1553 || TREE_CODE (SSA_NAME_VAR (retval)) != RESULT_DECL)))
1554 {
1555 copy = gimple_build_assign (id->do_not_unshare
1556 ? id->retvar : unshare_expr (id->retvar),
1557 retval);
1558 /* id->retvar is already substituted. Skip it on later remapping. */
1559 skip_first = true;
1560 }
1561 else
1562 return NULL;
1563 }
1564 else if (gimple_has_substatements (stmt))
1565 {
1566 gimple_seq s1, s2;
1567
1568 /* When cloning bodies from the C++ front end, we will be handed bodies
1569 in High GIMPLE form. Handle here all the High GIMPLE statements that
1570 have embedded statements. */
1571 switch (gimple_code (stmt))
1572 {
1573 case GIMPLE_BIND:
1574 copy = copy_gimple_bind (as_a <gbind *> (stmt), id);
1575 break;
1576
1577 case GIMPLE_CATCH:
1578 {
1579 gcatch *catch_stmt = as_a <gcatch *> (stmt);
1580 s1 = remap_gimple_seq (gimple_catch_handler (catch_stmt), id);
1581 copy = gimple_build_catch (gimple_catch_types (catch_stmt), s1);
1582 }
1583 break;
1584
1585 case GIMPLE_EH_FILTER:
1586 s1 = remap_gimple_seq (gimple_eh_filter_failure (stmt), id);
1587 copy = gimple_build_eh_filter (gimple_eh_filter_types (stmt), s1);
1588 break;
1589
1590 case GIMPLE_TRY:
1591 s1 = remap_gimple_seq (gimple_try_eval (stmt), id);
1592 s2 = remap_gimple_seq (gimple_try_cleanup (stmt), id);
1593 copy = gimple_build_try (s1, s2, gimple_try_kind (stmt));
1594 break;
1595
1596 case GIMPLE_WITH_CLEANUP_EXPR:
1597 s1 = remap_gimple_seq (gimple_wce_cleanup (stmt), id);
1598 copy = gimple_build_wce (s1);
1599 break;
1600
1601 case GIMPLE_OMP_PARALLEL:
1602 {
1603 gomp_parallel *omp_par_stmt = as_a <gomp_parallel *> (stmt);
1604 s1 = remap_gimple_seq (gimple_omp_body (omp_par_stmt), id);
1605 copy = gimple_build_omp_parallel
1606 (s1,
1607 gimple_omp_parallel_clauses (omp_par_stmt),
1608 gimple_omp_parallel_child_fn (omp_par_stmt),
1609 gimple_omp_parallel_data_arg (omp_par_stmt));
1610 }
1611 break;
1612
1613 case GIMPLE_OMP_TASK:
1614 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1615 copy = gimple_build_omp_task
1616 (s1,
1617 gimple_omp_task_clauses (stmt),
1618 gimple_omp_task_child_fn (stmt),
1619 gimple_omp_task_data_arg (stmt),
1620 gimple_omp_task_copy_fn (stmt),
1621 gimple_omp_task_arg_size (stmt),
1622 gimple_omp_task_arg_align (stmt));
1623 break;
1624
1625 case GIMPLE_OMP_FOR:
1626 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1627 s2 = remap_gimple_seq (gimple_omp_for_pre_body (stmt), id);
1628 copy = gimple_build_omp_for (s1, gimple_omp_for_kind (stmt),
1629 gimple_omp_for_clauses (stmt),
1630 gimple_omp_for_collapse (stmt), s2);
1631 {
1632 size_t i;
1633 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
1634 {
1635 gimple_omp_for_set_index (copy, i,
1636 gimple_omp_for_index (stmt, i));
1637 gimple_omp_for_set_initial (copy, i,
1638 gimple_omp_for_initial (stmt, i));
1639 gimple_omp_for_set_final (copy, i,
1640 gimple_omp_for_final (stmt, i));
1641 gimple_omp_for_set_incr (copy, i,
1642 gimple_omp_for_incr (stmt, i));
1643 gimple_omp_for_set_cond (copy, i,
1644 gimple_omp_for_cond (stmt, i));
1645 }
1646 }
1647 break;
1648
1649 case GIMPLE_OMP_MASTER:
1650 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1651 copy = gimple_build_omp_master (s1);
1652 break;
1653
1654 case GIMPLE_OMP_TASKGROUP:
1655 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1656 copy = gimple_build_omp_taskgroup
1657 (s1, gimple_omp_taskgroup_clauses (stmt));
1658 break;
1659
1660 case GIMPLE_OMP_ORDERED:
1661 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1662 copy = gimple_build_omp_ordered
1663 (s1,
1664 gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt)));
1665 break;
1666
1667 case GIMPLE_OMP_SCAN:
1668 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1669 copy = gimple_build_omp_scan
1670 (s1, gimple_omp_scan_clauses (as_a <gomp_scan *> (stmt)));
1671 break;
1672
1673 case GIMPLE_OMP_SECTION:
1674 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1675 copy = gimple_build_omp_section (s1);
1676 break;
1677
1678 case GIMPLE_OMP_SECTIONS:
1679 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1680 copy = gimple_build_omp_sections
1681 (s1, gimple_omp_sections_clauses (stmt));
1682 break;
1683
1684 case GIMPLE_OMP_SINGLE:
1685 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1686 copy = gimple_build_omp_single
1687 (s1, gimple_omp_single_clauses (stmt));
1688 break;
1689
1690 case GIMPLE_OMP_TARGET:
1691 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1692 copy = gimple_build_omp_target
1693 (s1, gimple_omp_target_kind (stmt),
1694 gimple_omp_target_clauses (stmt));
1695 break;
1696
1697 case GIMPLE_OMP_TEAMS:
1698 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1699 copy = gimple_build_omp_teams
1700 (s1, gimple_omp_teams_clauses (stmt));
1701 break;
1702
1703 case GIMPLE_OMP_CRITICAL:
1704 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1705 copy = gimple_build_omp_critical (s1,
1706 gimple_omp_critical_name
1707 (as_a <gomp_critical *> (stmt)),
1708 gimple_omp_critical_clauses
1709 (as_a <gomp_critical *> (stmt)));
1710 break;
1711
1712 case GIMPLE_TRANSACTION:
1713 {
1714 gtransaction *old_trans_stmt = as_a <gtransaction *> (stmt);
1715 gtransaction *new_trans_stmt;
1716 s1 = remap_gimple_seq (gimple_transaction_body (old_trans_stmt),
1717 id);
1718 copy = new_trans_stmt = gimple_build_transaction (s1);
1719 gimple_transaction_set_subcode (new_trans_stmt,
1720 gimple_transaction_subcode (old_trans_stmt));
1721 gimple_transaction_set_label_norm (new_trans_stmt,
1722 gimple_transaction_label_norm (old_trans_stmt));
1723 gimple_transaction_set_label_uninst (new_trans_stmt,
1724 gimple_transaction_label_uninst (old_trans_stmt));
1725 gimple_transaction_set_label_over (new_trans_stmt,
1726 gimple_transaction_label_over (old_trans_stmt));
1727 }
1728 break;
1729
1730 default:
1731 gcc_unreachable ();
1732 }
1733 }
1734 else
1735 {
1736 if (gimple_assign_copy_p (stmt)
1737 && gimple_assign_lhs (stmt) == gimple_assign_rhs1 (stmt)
1738 && auto_var_in_fn_p (gimple_assign_lhs (stmt), id->src_fn))
1739 {
1740 /* Here we handle statements that are not completely rewritten.
1741 First we detect some inlining-induced bogosities for
1742 discarding. */
1743
1744 /* Some assignments VAR = VAR; don't generate any rtl code
1745 and thus don't count as variable modification. Avoid
1746 keeping bogosities like 0 = 0. */
1747 tree decl = gimple_assign_lhs (stmt), value;
1748 tree *n;
1749
1750 n = id->decl_map->get (decl);
1751 if (n)
1752 {
1753 value = *n;
1754 STRIP_TYPE_NOPS (value);
1755 if (TREE_CONSTANT (value) || TREE_READONLY (value))
1756 return NULL;
1757 }
1758 }
1759
1760 /* For *ptr_N ={v} {CLOBBER}, if ptr_N is SSA_NAME defined
1761 in a block that we aren't copying during tree_function_versioning,
1762 just drop the clobber stmt. */
1763 if (id->blocks_to_copy && gimple_clobber_p (stmt))
1764 {
1765 tree lhs = gimple_assign_lhs (stmt);
1766 if (TREE_CODE (lhs) == MEM_REF
1767 && TREE_CODE (TREE_OPERAND (lhs, 0)) == SSA_NAME)
1768 {
1769 gimple *def_stmt = SSA_NAME_DEF_STMT (TREE_OPERAND (lhs, 0));
1770 if (gimple_bb (def_stmt)
1771 && !bitmap_bit_p (id->blocks_to_copy,
1772 gimple_bb (def_stmt)->index))
1773 return NULL;
1774 }
1775 }
1776
1777 /* We do not allow CLOBBERs of handled components. In case
1778 returned value is stored via such handled component, remove
1779 the clobber so stmt verifier is happy. */
1780 if (gimple_clobber_p (stmt)
1781 && TREE_CODE (gimple_assign_lhs (stmt)) == RESULT_DECL)
1782 {
1783 tree remapped = remap_decl (gimple_assign_lhs (stmt), id);
1784 if (!DECL_P (remapped)
1785 && TREE_CODE (remapped) != MEM_REF)
1786 return NULL;
1787 }
1788
1789 if (gimple_debug_bind_p (stmt))
1790 {
1791 gdebug *copy
1792 = gimple_build_debug_bind (gimple_debug_bind_get_var (stmt),
1793 gimple_debug_bind_get_value (stmt),
1794 stmt);
1795 if (id->reset_location)
1796 gimple_set_location (copy, input_location);
1797 id->debug_stmts.safe_push (copy);
1798 gimple_seq_add_stmt (&stmts, copy);
1799 return stmts;
1800 }
1801 if (gimple_debug_source_bind_p (stmt))
1802 {
1803 gdebug *copy = gimple_build_debug_source_bind
1804 (gimple_debug_source_bind_get_var (stmt),
1805 gimple_debug_source_bind_get_value (stmt),
1806 stmt);
1807 if (id->reset_location)
1808 gimple_set_location (copy, input_location);
1809 id->debug_stmts.safe_push (copy);
1810 gimple_seq_add_stmt (&stmts, copy);
1811 return stmts;
1812 }
1813 if (gimple_debug_nonbind_marker_p (stmt))
1814 {
1815 /* If the inlined function has too many debug markers,
1816 don't copy them. */
1817 if (id->src_cfun->debug_marker_count
1818 > param_max_debug_marker_count)
1819 return stmts;
1820
1821 gdebug *copy = as_a <gdebug *> (gimple_copy (stmt));
1822 if (id->reset_location)
1823 gimple_set_location (copy, input_location);
1824 id->debug_stmts.safe_push (copy);
1825 gimple_seq_add_stmt (&stmts, copy);
1826 return stmts;
1827 }
1828
1829 /* Create a new deep copy of the statement. */
1830 copy = gimple_copy (stmt);
1831
1832 /* Clear flags that need revisiting. */
1833 if (gcall *call_stmt = dyn_cast <gcall *> (copy))
1834 {
1835 if (gimple_call_tail_p (call_stmt))
1836 gimple_call_set_tail (call_stmt, false);
1837 if (gimple_call_from_thunk_p (call_stmt))
1838 gimple_call_set_from_thunk (call_stmt, false);
1839 if (gimple_call_internal_p (call_stmt))
1840 switch (gimple_call_internal_fn (call_stmt))
1841 {
1842 case IFN_GOMP_SIMD_LANE:
1843 case IFN_GOMP_SIMD_VF:
1844 case IFN_GOMP_SIMD_LAST_LANE:
1845 case IFN_GOMP_SIMD_ORDERED_START:
1846 case IFN_GOMP_SIMD_ORDERED_END:
1847 DECL_STRUCT_FUNCTION (id->dst_fn)->has_simduid_loops = true;
1848 break;
1849 default:
1850 break;
1851 }
1852 }
1853
1854 /* Remap the region numbers for __builtin_eh_{pointer,filter},
1855 RESX and EH_DISPATCH. */
1856 if (id->eh_map)
1857 switch (gimple_code (copy))
1858 {
1859 case GIMPLE_CALL:
1860 {
1861 tree r, fndecl = gimple_call_fndecl (copy);
1862 if (fndecl && fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
1863 switch (DECL_FUNCTION_CODE (fndecl))
1864 {
1865 case BUILT_IN_EH_COPY_VALUES:
1866 r = gimple_call_arg (copy, 1);
1867 r = remap_eh_region_tree_nr (r, id);
1868 gimple_call_set_arg (copy, 1, r);
1869 /* FALLTHRU */
1870
1871 case BUILT_IN_EH_POINTER:
1872 case BUILT_IN_EH_FILTER:
1873 r = gimple_call_arg (copy, 0);
1874 r = remap_eh_region_tree_nr (r, id);
1875 gimple_call_set_arg (copy, 0, r);
1876 break;
1877
1878 default:
1879 break;
1880 }
1881
1882 /* Reset alias info if we didn't apply measures to
1883 keep it valid over inlining by setting DECL_PT_UID. */
1884 if (!id->src_cfun->gimple_df
1885 || !id->src_cfun->gimple_df->ipa_pta)
1886 gimple_call_reset_alias_info (as_a <gcall *> (copy));
1887 }
1888 break;
1889
1890 case GIMPLE_RESX:
1891 {
1892 gresx *resx_stmt = as_a <gresx *> (copy);
1893 int r = gimple_resx_region (resx_stmt);
1894 r = remap_eh_region_nr (r, id);
1895 gimple_resx_set_region (resx_stmt, r);
1896 }
1897 break;
1898
1899 case GIMPLE_EH_DISPATCH:
1900 {
1901 geh_dispatch *eh_dispatch = as_a <geh_dispatch *> (copy);
1902 int r = gimple_eh_dispatch_region (eh_dispatch);
1903 r = remap_eh_region_nr (r, id);
1904 gimple_eh_dispatch_set_region (eh_dispatch, r);
1905 }
1906 break;
1907
1908 default:
1909 break;
1910 }
1911 }
1912
1913 /* If STMT has a block defined, map it to the newly constructed block. */
1914 if (tree block = gimple_block (copy))
1915 {
1916 tree *n;
1917 n = id->decl_map->get (block);
1918 gcc_assert (n);
1919 gimple_set_block (copy, *n);
1920 }
1921 if (id->param_body_adjs)
1922 {
1923 gimple_seq extra_stmts = NULL;
1924 id->param_body_adjs->modify_gimple_stmt (©, &extra_stmts);
1925 if (!gimple_seq_empty_p (extra_stmts))
1926 {
1927 memset (&wi, 0, sizeof (wi));
1928 wi.info = id;
1929 for (gimple_stmt_iterator egsi = gsi_start (extra_stmts);
1930 !gsi_end_p (egsi);
1931 gsi_next (&egsi))
1932 walk_gimple_op (gsi_stmt (egsi), remap_gimple_op_r, &wi);
1933 gimple_seq_add_seq (&stmts, extra_stmts);
1934 }
1935 }
1936
1937 if (id->reset_location)
1938 gimple_set_location (copy, input_location);
1939
1940 /* Debug statements ought to be rebuilt and not copied. */
1941 gcc_checking_assert (!is_gimple_debug (copy));
1942
1943 /* Remap all the operands in COPY. */
1944 memset (&wi, 0, sizeof (wi));
1945 wi.info = id;
1946 if (skip_first)
1947 walk_tree (gimple_op_ptr (copy, 1), remap_gimple_op_r, &wi, NULL);
1948 else
1949 walk_gimple_op (copy, remap_gimple_op_r, &wi);
1950
1951 /* Clear the copied virtual operands. We are not remapping them here
1952 but are going to recreate them from scratch. */
1953 if (gimple_has_mem_ops (copy))
1954 {
1955 gimple_set_vdef (copy, NULL_TREE);
1956 gimple_set_vuse (copy, NULL_TREE);
1957 }
1958
1959 if (cfun->can_throw_non_call_exceptions)
1960 {
1961 /* When inlining a function which does not have non-call exceptions
1962 enabled into a function that has (which only happens with
1963 always-inline) we have to fixup stmts that cannot throw. */
1964 if (gcond *cond = dyn_cast <gcond *> (copy))
1965 if (gimple_could_trap_p (cond))
1966 {
1967 gassign *cmp
1968 = gimple_build_assign (make_ssa_name (boolean_type_node),
1969 gimple_cond_code (cond),
1970 gimple_cond_lhs (cond),
1971 gimple_cond_rhs (cond));
1972 gimple_seq_add_stmt (&stmts, cmp);
1973 gimple_cond_set_code (cond, NE_EXPR);
1974 gimple_cond_set_lhs (cond, gimple_assign_lhs (cmp));
1975 gimple_cond_set_rhs (cond, boolean_false_node);
1976 }
1977 if (gassign *ass = dyn_cast <gassign *> (copy))
1978 if ((gimple_assign_rhs_code (ass) == COND_EXPR
1979 || gimple_assign_rhs_code (ass) == VEC_COND_EXPR)
1980 && gimple_could_trap_p (ass))
1981 {
1982 gassign *cmp
1983 = gimple_build_assign (make_ssa_name (boolean_type_node),
1984 gimple_assign_rhs1 (ass));
1985 gimple_seq_add_stmt (&stmts, cmp);
1986 gimple_assign_set_rhs1 (ass, gimple_assign_lhs (cmp));
1987 }
1988 }
1989
1990 gimple_seq_add_stmt (&stmts, copy);
1991 return stmts;
1992 }
1993
1994
1995 /* Copy basic block, scale profile accordingly. Edges will be taken care of
1996 later */
1997
1998 static basic_block
copy_bb(copy_body_data * id,basic_block bb,profile_count num,profile_count den)1999 copy_bb (copy_body_data *id, basic_block bb,
2000 profile_count num, profile_count den)
2001 {
2002 gimple_stmt_iterator gsi, copy_gsi, seq_gsi;
2003 basic_block copy_basic_block;
2004 tree decl;
2005 basic_block prev;
2006
2007 profile_count::adjust_for_ipa_scaling (&num, &den);
2008
2009 /* Search for previous copied basic block. */
2010 prev = bb->prev_bb;
2011 while (!prev->aux)
2012 prev = prev->prev_bb;
2013
2014 /* create_basic_block() will append every new block to
2015 basic_block_info automatically. */
2016 copy_basic_block = create_basic_block (NULL, (basic_block) prev->aux);
2017 copy_basic_block->count = bb->count.apply_scale (num, den);
2018
2019 copy_gsi = gsi_start_bb (copy_basic_block);
2020
2021 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2022 {
2023 gimple_seq stmts;
2024 gimple *stmt = gsi_stmt (gsi);
2025 gimple *orig_stmt = stmt;
2026 gimple_stmt_iterator stmts_gsi;
2027 bool stmt_added = false;
2028
2029 id->regimplify = false;
2030 stmts = remap_gimple_stmt (stmt, id);
2031
2032 if (gimple_seq_empty_p (stmts))
2033 continue;
2034
2035 seq_gsi = copy_gsi;
2036
2037 for (stmts_gsi = gsi_start (stmts);
2038 !gsi_end_p (stmts_gsi); )
2039 {
2040 stmt = gsi_stmt (stmts_gsi);
2041
2042 /* Advance iterator now before stmt is moved to seq_gsi. */
2043 gsi_next (&stmts_gsi);
2044
2045 if (gimple_nop_p (stmt))
2046 continue;
2047
2048 gimple_duplicate_stmt_histograms (cfun, stmt, id->src_cfun,
2049 orig_stmt);
2050
2051 /* With return slot optimization we can end up with
2052 non-gimple (foo *)&this->m, fix that here. */
2053 if (is_gimple_assign (stmt)
2054 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt))
2055 && !is_gimple_val (gimple_assign_rhs1 (stmt)))
2056 {
2057 tree new_rhs;
2058 new_rhs = force_gimple_operand_gsi (&seq_gsi,
2059 gimple_assign_rhs1 (stmt),
2060 true, NULL, false,
2061 GSI_CONTINUE_LINKING);
2062 gimple_assign_set_rhs1 (stmt, new_rhs);
2063 id->regimplify = false;
2064 }
2065
2066 gsi_insert_after (&seq_gsi, stmt, GSI_NEW_STMT);
2067
2068 if (id->regimplify)
2069 gimple_regimplify_operands (stmt, &seq_gsi);
2070
2071 stmt_added = true;
2072 }
2073
2074 if (!stmt_added)
2075 continue;
2076
2077 /* If copy_basic_block has been empty at the start of this iteration,
2078 call gsi_start_bb again to get at the newly added statements. */
2079 if (gsi_end_p (copy_gsi))
2080 copy_gsi = gsi_start_bb (copy_basic_block);
2081 else
2082 gsi_next (©_gsi);
2083
2084 /* Process the new statement. The call to gimple_regimplify_operands
2085 possibly turned the statement into multiple statements, we
2086 need to process all of them. */
2087 do
2088 {
2089 tree fn;
2090 gcall *call_stmt;
2091
2092 stmt = gsi_stmt (copy_gsi);
2093 call_stmt = dyn_cast <gcall *> (stmt);
2094 if (call_stmt
2095 && gimple_call_va_arg_pack_p (call_stmt)
2096 && id->call_stmt
2097 && ! gimple_call_va_arg_pack_p (id->call_stmt))
2098 {
2099 /* __builtin_va_arg_pack () should be replaced by
2100 all arguments corresponding to ... in the caller. */
2101 tree p;
2102 gcall *new_call;
2103 vec<tree> argarray;
2104 size_t nargs = gimple_call_num_args (id->call_stmt);
2105 size_t n;
2106
2107 for (p = DECL_ARGUMENTS (id->src_fn); p; p = DECL_CHAIN (p))
2108 nargs--;
2109
2110 /* Create the new array of arguments. */
2111 n = nargs + gimple_call_num_args (call_stmt);
2112 argarray.create (n);
2113 argarray.safe_grow_cleared (n);
2114
2115 /* Copy all the arguments before '...' */
2116 memcpy (argarray.address (),
2117 gimple_call_arg_ptr (call_stmt, 0),
2118 gimple_call_num_args (call_stmt) * sizeof (tree));
2119
2120 /* Append the arguments passed in '...' */
2121 memcpy (argarray.address () + gimple_call_num_args (call_stmt),
2122 gimple_call_arg_ptr (id->call_stmt, 0)
2123 + (gimple_call_num_args (id->call_stmt) - nargs),
2124 nargs * sizeof (tree));
2125
2126 new_call = gimple_build_call_vec (gimple_call_fn (call_stmt),
2127 argarray);
2128
2129 argarray.release ();
2130
2131 /* Copy all GIMPLE_CALL flags, location and block, except
2132 GF_CALL_VA_ARG_PACK. */
2133 gimple_call_copy_flags (new_call, call_stmt);
2134 gimple_call_set_va_arg_pack (new_call, false);
2135 /* location includes block. */
2136 gimple_set_location (new_call, gimple_location (stmt));
2137 gimple_call_set_lhs (new_call, gimple_call_lhs (call_stmt));
2138
2139 gsi_replace (©_gsi, new_call, false);
2140 stmt = new_call;
2141 }
2142 else if (call_stmt
2143 && id->call_stmt
2144 && (decl = gimple_call_fndecl (stmt))
2145 && fndecl_built_in_p (decl, BUILT_IN_VA_ARG_PACK_LEN))
2146 {
2147 /* __builtin_va_arg_pack_len () should be replaced by
2148 the number of anonymous arguments. */
2149 size_t nargs = gimple_call_num_args (id->call_stmt);
2150 tree count, p;
2151 gimple *new_stmt;
2152
2153 for (p = DECL_ARGUMENTS (id->src_fn); p; p = DECL_CHAIN (p))
2154 nargs--;
2155
2156 if (!gimple_call_lhs (stmt))
2157 {
2158 /* Drop unused calls. */
2159 gsi_remove (©_gsi, false);
2160 continue;
2161 }
2162 else if (!gimple_call_va_arg_pack_p (id->call_stmt))
2163 {
2164 count = build_int_cst (integer_type_node, nargs);
2165 new_stmt = gimple_build_assign (gimple_call_lhs (stmt), count);
2166 gsi_replace (©_gsi, new_stmt, false);
2167 stmt = new_stmt;
2168 }
2169 else if (nargs != 0)
2170 {
2171 tree newlhs = create_tmp_reg_or_ssa_name (integer_type_node);
2172 count = build_int_cst (integer_type_node, nargs);
2173 new_stmt = gimple_build_assign (gimple_call_lhs (stmt),
2174 PLUS_EXPR, newlhs, count);
2175 gimple_call_set_lhs (stmt, newlhs);
2176 gsi_insert_after (©_gsi, new_stmt, GSI_NEW_STMT);
2177 }
2178 }
2179 else if (call_stmt
2180 && id->call_stmt
2181 && gimple_call_internal_p (stmt)
2182 && gimple_call_internal_fn (stmt) == IFN_TSAN_FUNC_EXIT)
2183 {
2184 /* Drop TSAN_FUNC_EXIT () internal calls during inlining. */
2185 gsi_remove (©_gsi, false);
2186 continue;
2187 }
2188
2189 /* Statements produced by inlining can be unfolded, especially
2190 when we constant propagated some operands. We can't fold
2191 them right now for two reasons:
2192 1) folding require SSA_NAME_DEF_STMTs to be correct
2193 2) we can't change function calls to builtins.
2194 So we just mark statement for later folding. We mark
2195 all new statements, instead just statements that has changed
2196 by some nontrivial substitution so even statements made
2197 foldable indirectly are updated. If this turns out to be
2198 expensive, copy_body can be told to watch for nontrivial
2199 changes. */
2200 if (id->statements_to_fold)
2201 id->statements_to_fold->add (stmt);
2202
2203 /* We're duplicating a CALL_EXPR. Find any corresponding
2204 callgraph edges and update or duplicate them. */
2205 if (gcall *call_stmt = dyn_cast <gcall *> (stmt))
2206 {
2207 struct cgraph_edge *edge;
2208
2209 switch (id->transform_call_graph_edges)
2210 {
2211 case CB_CGE_DUPLICATE:
2212 edge = id->src_node->get_edge (orig_stmt);
2213 if (edge)
2214 {
2215 struct cgraph_edge *old_edge = edge;
2216
2217 /* A speculative call is consist of multiple
2218 edges - indirect edge and one or more direct edges
2219 Duplicate the whole thing and distribute frequencies
2220 accordingly. */
2221 if (edge->speculative)
2222 {
2223 int n = 0;
2224 profile_count direct_cnt
2225 = profile_count::zero ();
2226
2227 /* First figure out the distribution of counts
2228 so we can re-scale BB profile accordingly. */
2229 for (cgraph_edge *e = old_edge; e;
2230 e = e->next_speculative_call_target ())
2231 direct_cnt = direct_cnt + e->count;
2232
2233 cgraph_edge *indirect
2234 = old_edge->speculative_call_indirect_edge ();
2235 profile_count indir_cnt = indirect->count;
2236
2237 /* Next iterate all direct edges, clone it and its
2238 corresponding reference and update profile. */
2239 for (cgraph_edge *e = old_edge;
2240 e;
2241 e = e->next_speculative_call_target ())
2242 {
2243 profile_count cnt = e->count;
2244
2245 id->dst_node->clone_reference
2246 (e->speculative_call_target_ref (), stmt);
2247 edge = e->clone (id->dst_node, call_stmt,
2248 gimple_uid (stmt), num, den,
2249 true);
2250 profile_probability prob
2251 = cnt.probability_in (direct_cnt
2252 + indir_cnt);
2253 edge->count
2254 = copy_basic_block->count.apply_probability
2255 (prob);
2256 n++;
2257 }
2258 gcc_checking_assert
2259 (indirect->num_speculative_call_targets_p ()
2260 == n);
2261
2262 /* Duplicate the indirect edge after all direct edges
2263 cloned. */
2264 indirect = indirect->clone (id->dst_node, call_stmt,
2265 gimple_uid (stmt),
2266 num, den,
2267 true);
2268
2269 profile_probability prob
2270 = indir_cnt.probability_in (direct_cnt
2271 + indir_cnt);
2272 indirect->count
2273 = copy_basic_block->count.apply_probability (prob);
2274 }
2275 else
2276 {
2277 edge = edge->clone (id->dst_node, call_stmt,
2278 gimple_uid (stmt),
2279 num, den,
2280 true);
2281 edge->count = copy_basic_block->count;
2282 }
2283 }
2284 break;
2285
2286 case CB_CGE_MOVE_CLONES:
2287 id->dst_node->set_call_stmt_including_clones (orig_stmt,
2288 call_stmt);
2289 edge = id->dst_node->get_edge (stmt);
2290 break;
2291
2292 case CB_CGE_MOVE:
2293 edge = id->dst_node->get_edge (orig_stmt);
2294 if (edge)
2295 edge = cgraph_edge::set_call_stmt (edge, call_stmt);
2296 break;
2297
2298 default:
2299 gcc_unreachable ();
2300 }
2301
2302 /* Constant propagation on argument done during inlining
2303 may create new direct call. Produce an edge for it. */
2304 if ((!edge
2305 || (edge->indirect_inlining_edge
2306 && id->transform_call_graph_edges == CB_CGE_MOVE_CLONES))
2307 && id->dst_node->definition
2308 && (fn = gimple_call_fndecl (stmt)) != NULL)
2309 {
2310 struct cgraph_node *dest = cgraph_node::get_create (fn);
2311
2312 /* We have missing edge in the callgraph. This can happen
2313 when previous inlining turned an indirect call into a
2314 direct call by constant propagating arguments or we are
2315 producing dead clone (for further cloning). In all
2316 other cases we hit a bug (incorrect node sharing is the
2317 most common reason for missing edges). */
2318 gcc_assert (!dest->definition
2319 || dest->address_taken
2320 || !id->src_node->definition
2321 || !id->dst_node->definition);
2322 if (id->transform_call_graph_edges == CB_CGE_MOVE_CLONES)
2323 id->dst_node->create_edge_including_clones
2324 (dest, orig_stmt, call_stmt, bb->count,
2325 CIF_ORIGINALLY_INDIRECT_CALL);
2326 else
2327 id->dst_node->create_edge (dest, call_stmt,
2328 bb->count)->inline_failed
2329 = CIF_ORIGINALLY_INDIRECT_CALL;
2330 if (dump_file)
2331 {
2332 fprintf (dump_file, "Created new direct edge to %s\n",
2333 dest->dump_name ());
2334 }
2335 }
2336
2337 notice_special_calls (as_a <gcall *> (stmt));
2338 }
2339
2340 maybe_duplicate_eh_stmt_fn (cfun, stmt, id->src_cfun, orig_stmt,
2341 id->eh_map, id->eh_lp_nr);
2342
2343 gsi_next (©_gsi);
2344 }
2345 while (!gsi_end_p (copy_gsi));
2346
2347 copy_gsi = gsi_last_bb (copy_basic_block);
2348 }
2349
2350 return copy_basic_block;
2351 }
2352
2353 /* Inserting Single Entry Multiple Exit region in SSA form into code in SSA
2354 form is quite easy, since dominator relationship for old basic blocks does
2355 not change.
2356
2357 There is however exception where inlining might change dominator relation
2358 across EH edges from basic block within inlined functions destinating
2359 to landing pads in function we inline into.
2360
2361 The function fills in PHI_RESULTs of such PHI nodes if they refer
2362 to gimple regs. Otherwise, the function mark PHI_RESULT of such
2363 PHI nodes for renaming. For non-gimple regs, renaming is safe: the
2364 EH edges are abnormal and SSA_NAME_OCCURS_IN_ABNORMAL_PHI must be
2365 set, and this means that there will be no overlapping live ranges
2366 for the underlying symbol.
2367
2368 This might change in future if we allow redirecting of EH edges and
2369 we might want to change way build CFG pre-inlining to include
2370 all the possible edges then. */
2371 static void
update_ssa_across_abnormal_edges(basic_block bb,basic_block ret_bb,bool can_throw,bool nonlocal_goto)2372 update_ssa_across_abnormal_edges (basic_block bb, basic_block ret_bb,
2373 bool can_throw, bool nonlocal_goto)
2374 {
2375 edge e;
2376 edge_iterator ei;
2377
2378 FOR_EACH_EDGE (e, ei, bb->succs)
2379 if (!e->dest->aux
2380 || ((basic_block)e->dest->aux)->index == ENTRY_BLOCK)
2381 {
2382 gphi *phi;
2383 gphi_iterator si;
2384
2385 if (!nonlocal_goto)
2386 gcc_assert (e->flags & EDGE_EH);
2387
2388 if (!can_throw)
2389 gcc_assert (!(e->flags & EDGE_EH));
2390
2391 for (si = gsi_start_phis (e->dest); !gsi_end_p (si); gsi_next (&si))
2392 {
2393 edge re;
2394
2395 phi = si.phi ();
2396
2397 /* For abnormal goto/call edges the receiver can be the
2398 ENTRY_BLOCK. Do not assert this cannot happen. */
2399
2400 gcc_assert ((e->flags & EDGE_EH)
2401 || SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)));
2402
2403 re = find_edge (ret_bb, e->dest);
2404 gcc_checking_assert (re);
2405 gcc_assert ((re->flags & (EDGE_EH | EDGE_ABNORMAL))
2406 == (e->flags & (EDGE_EH | EDGE_ABNORMAL)));
2407
2408 SET_USE (PHI_ARG_DEF_PTR_FROM_EDGE (phi, e),
2409 USE_FROM_PTR (PHI_ARG_DEF_PTR_FROM_EDGE (phi, re)));
2410 }
2411 }
2412 }
2413
2414 /* Insert clobbers for automatic variables of inlined ID->src_fn
2415 function at the start of basic block ID->eh_landing_pad_dest. */
2416
2417 static void
add_clobbers_to_eh_landing_pad(copy_body_data * id)2418 add_clobbers_to_eh_landing_pad (copy_body_data *id)
2419 {
2420 tree var;
2421 basic_block bb = id->eh_landing_pad_dest;
2422 live_vars_map *vars = NULL;
2423 unsigned int cnt = 0;
2424 unsigned int i;
2425 FOR_EACH_VEC_SAFE_ELT (id->src_cfun->local_decls, i, var)
2426 if (VAR_P (var)
2427 && !DECL_HARD_REGISTER (var)
2428 && !TREE_THIS_VOLATILE (var)
2429 && !DECL_HAS_VALUE_EXPR_P (var)
2430 && !is_gimple_reg (var)
2431 && auto_var_in_fn_p (var, id->src_fn)
2432 && !lookup_attribute ("omp simd array", DECL_ATTRIBUTES (var)))
2433 {
2434 tree *t = id->decl_map->get (var);
2435 if (!t)
2436 continue;
2437 tree new_var = *t;
2438 if (VAR_P (new_var)
2439 && !DECL_HARD_REGISTER (new_var)
2440 && !TREE_THIS_VOLATILE (new_var)
2441 && !DECL_HAS_VALUE_EXPR_P (new_var)
2442 && !is_gimple_reg (new_var)
2443 && auto_var_in_fn_p (new_var, id->dst_fn))
2444 {
2445 if (vars == NULL)
2446 vars = new live_vars_map;
2447 vars->put (DECL_UID (var), cnt++);
2448 }
2449 }
2450 if (vars == NULL)
2451 return;
2452
2453 vec<bitmap_head> live = compute_live_vars (id->src_cfun, vars);
2454 FOR_EACH_VEC_SAFE_ELT (id->src_cfun->local_decls, i, var)
2455 if (VAR_P (var))
2456 {
2457 edge e;
2458 edge_iterator ei;
2459 bool needed = false;
2460 unsigned int *v = vars->get (DECL_UID (var));
2461 if (v == NULL)
2462 continue;
2463 FOR_EACH_EDGE (e, ei, bb->preds)
2464 if ((e->flags & EDGE_EH) != 0
2465 && e->src->index >= id->add_clobbers_to_eh_landing_pads)
2466 {
2467 basic_block src_bb = (basic_block) e->src->aux;
2468
2469 if (bitmap_bit_p (&live[src_bb->index], *v))
2470 {
2471 needed = true;
2472 break;
2473 }
2474 }
2475 if (needed)
2476 {
2477 tree new_var = *id->decl_map->get (var);
2478 gimple_stmt_iterator gsi = gsi_after_labels (bb);
2479 tree clobber = build_clobber (TREE_TYPE (new_var));
2480 gimple *clobber_stmt = gimple_build_assign (new_var, clobber);
2481 gsi_insert_before (&gsi, clobber_stmt, GSI_NEW_STMT);
2482 }
2483 }
2484 destroy_live_vars (live);
2485 delete vars;
2486 }
2487
2488 /* Copy edges from BB into its copy constructed earlier, scale profile
2489 accordingly. Edges will be taken care of later. Assume aux
2490 pointers to point to the copies of each BB. Return true if any
2491 debug stmts are left after a statement that must end the basic block. */
2492
2493 static bool
copy_edges_for_bb(basic_block bb,profile_count num,profile_count den,basic_block ret_bb,basic_block abnormal_goto_dest,copy_body_data * id)2494 copy_edges_for_bb (basic_block bb, profile_count num, profile_count den,
2495 basic_block ret_bb, basic_block abnormal_goto_dest,
2496 copy_body_data *id)
2497 {
2498 basic_block new_bb = (basic_block) bb->aux;
2499 edge_iterator ei;
2500 edge old_edge;
2501 gimple_stmt_iterator si;
2502 bool need_debug_cleanup = false;
2503
2504 /* Use the indices from the original blocks to create edges for the
2505 new ones. */
2506 FOR_EACH_EDGE (old_edge, ei, bb->succs)
2507 if (!(old_edge->flags & EDGE_EH))
2508 {
2509 edge new_edge;
2510 int flags = old_edge->flags;
2511 location_t locus = old_edge->goto_locus;
2512
2513 /* Return edges do get a FALLTHRU flag when they get inlined. */
2514 if (old_edge->dest->index == EXIT_BLOCK
2515 && !(flags & (EDGE_TRUE_VALUE|EDGE_FALSE_VALUE|EDGE_FAKE))
2516 && old_edge->dest->aux != EXIT_BLOCK_PTR_FOR_FN (cfun))
2517 flags |= EDGE_FALLTHRU;
2518
2519 new_edge
2520 = make_edge (new_bb, (basic_block) old_edge->dest->aux, flags);
2521 new_edge->probability = old_edge->probability;
2522 if (!id->reset_location)
2523 new_edge->goto_locus = remap_location (locus, id);
2524 }
2525
2526 if (bb->index == ENTRY_BLOCK || bb->index == EXIT_BLOCK)
2527 return false;
2528
2529 /* When doing function splitting, we must decrease count of the return block
2530 which was previously reachable by block we did not copy. */
2531 if (single_succ_p (bb) && single_succ_edge (bb)->dest->index == EXIT_BLOCK)
2532 FOR_EACH_EDGE (old_edge, ei, bb->preds)
2533 if (old_edge->src->index != ENTRY_BLOCK
2534 && !old_edge->src->aux)
2535 new_bb->count -= old_edge->count ().apply_scale (num, den);
2536
2537 for (si = gsi_start_bb (new_bb); !gsi_end_p (si);)
2538 {
2539 gimple *copy_stmt;
2540 bool can_throw, nonlocal_goto;
2541
2542 copy_stmt = gsi_stmt (si);
2543 if (!is_gimple_debug (copy_stmt))
2544 update_stmt (copy_stmt);
2545
2546 /* Do this before the possible split_block. */
2547 gsi_next (&si);
2548
2549 /* If this tree could throw an exception, there are two
2550 cases where we need to add abnormal edge(s): the
2551 tree wasn't in a region and there is a "current
2552 region" in the caller; or the original tree had
2553 EH edges. In both cases split the block after the tree,
2554 and add abnormal edge(s) as needed; we need both
2555 those from the callee and the caller.
2556 We check whether the copy can throw, because the const
2557 propagation can change an INDIRECT_REF which throws
2558 into a COMPONENT_REF which doesn't. If the copy
2559 can throw, the original could also throw. */
2560 can_throw = stmt_can_throw_internal (cfun, copy_stmt);
2561 nonlocal_goto
2562 = (stmt_can_make_abnormal_goto (copy_stmt)
2563 && !computed_goto_p (copy_stmt));
2564
2565 if (can_throw || nonlocal_goto)
2566 {
2567 if (!gsi_end_p (si))
2568 {
2569 while (!gsi_end_p (si) && is_gimple_debug (gsi_stmt (si)))
2570 gsi_next (&si);
2571 if (gsi_end_p (si))
2572 need_debug_cleanup = true;
2573 }
2574 if (!gsi_end_p (si))
2575 /* Note that bb's predecessor edges aren't necessarily
2576 right at this point; split_block doesn't care. */
2577 {
2578 edge e = split_block (new_bb, copy_stmt);
2579
2580 new_bb = e->dest;
2581 new_bb->aux = e->src->aux;
2582 si = gsi_start_bb (new_bb);
2583 }
2584 }
2585
2586 bool update_probs = false;
2587
2588 if (gimple_code (copy_stmt) == GIMPLE_EH_DISPATCH)
2589 {
2590 make_eh_dispatch_edges (as_a <geh_dispatch *> (copy_stmt));
2591 update_probs = true;
2592 }
2593 else if (can_throw)
2594 {
2595 make_eh_edges (copy_stmt);
2596 update_probs = true;
2597 }
2598
2599 /* EH edges may not match old edges. Copy as much as possible. */
2600 if (update_probs)
2601 {
2602 edge e;
2603 edge_iterator ei;
2604 basic_block copy_stmt_bb = gimple_bb (copy_stmt);
2605
2606 FOR_EACH_EDGE (old_edge, ei, bb->succs)
2607 if ((old_edge->flags & EDGE_EH)
2608 && (e = find_edge (copy_stmt_bb,
2609 (basic_block) old_edge->dest->aux))
2610 && (e->flags & EDGE_EH))
2611 e->probability = old_edge->probability;
2612
2613 FOR_EACH_EDGE (e, ei, copy_stmt_bb->succs)
2614 if (e->flags & EDGE_EH)
2615 {
2616 if (!e->probability.initialized_p ())
2617 e->probability = profile_probability::never ();
2618 if (e->dest->index < id->add_clobbers_to_eh_landing_pads)
2619 {
2620 if (id->eh_landing_pad_dest == NULL)
2621 id->eh_landing_pad_dest = e->dest;
2622 else
2623 gcc_assert (id->eh_landing_pad_dest == e->dest);
2624 }
2625 }
2626 }
2627
2628
2629 /* If the call we inline cannot make abnormal goto do not add
2630 additional abnormal edges but only retain those already present
2631 in the original function body. */
2632 if (abnormal_goto_dest == NULL)
2633 nonlocal_goto = false;
2634 if (nonlocal_goto)
2635 {
2636 basic_block copy_stmt_bb = gimple_bb (copy_stmt);
2637
2638 if (get_abnormal_succ_dispatcher (copy_stmt_bb))
2639 nonlocal_goto = false;
2640 /* ABNORMAL_DISPATCHER (1) is for longjmp/setjmp or nonlocal gotos
2641 in OpenMP regions which aren't allowed to be left abnormally.
2642 So, no need to add abnormal edge in that case. */
2643 else if (is_gimple_call (copy_stmt)
2644 && gimple_call_internal_p (copy_stmt)
2645 && (gimple_call_internal_fn (copy_stmt)
2646 == IFN_ABNORMAL_DISPATCHER)
2647 && gimple_call_arg (copy_stmt, 0) == boolean_true_node)
2648 nonlocal_goto = false;
2649 else
2650 make_single_succ_edge (copy_stmt_bb, abnormal_goto_dest,
2651 EDGE_ABNORMAL);
2652 }
2653
2654 if ((can_throw || nonlocal_goto)
2655 && gimple_in_ssa_p (cfun))
2656 update_ssa_across_abnormal_edges (gimple_bb (copy_stmt), ret_bb,
2657 can_throw, nonlocal_goto);
2658 }
2659 return need_debug_cleanup;
2660 }
2661
2662 /* Copy the PHIs. All blocks and edges are copied, some blocks
2663 was possibly split and new outgoing EH edges inserted.
2664 BB points to the block of original function and AUX pointers links
2665 the original and newly copied blocks. */
2666
2667 static void
copy_phis_for_bb(basic_block bb,copy_body_data * id)2668 copy_phis_for_bb (basic_block bb, copy_body_data *id)
2669 {
2670 basic_block const new_bb = (basic_block) bb->aux;
2671 edge_iterator ei;
2672 gphi *phi;
2673 gphi_iterator si;
2674 edge new_edge;
2675 bool inserted = false;
2676
2677 for (si = gsi_start_phis (bb); !gsi_end_p (si); gsi_next (&si))
2678 {
2679 tree res, new_res;
2680 gphi *new_phi;
2681
2682 phi = si.phi ();
2683 res = PHI_RESULT (phi);
2684 new_res = res;
2685 if (!virtual_operand_p (res))
2686 {
2687 walk_tree (&new_res, copy_tree_body_r, id, NULL);
2688 if (EDGE_COUNT (new_bb->preds) == 0)
2689 {
2690 /* Technically we'd want a SSA_DEFAULT_DEF here... */
2691 SSA_NAME_DEF_STMT (new_res) = gimple_build_nop ();
2692 }
2693 else
2694 {
2695 new_phi = create_phi_node (new_res, new_bb);
2696 FOR_EACH_EDGE (new_edge, ei, new_bb->preds)
2697 {
2698 edge old_edge = find_edge ((basic_block) new_edge->src->aux,
2699 bb);
2700 tree arg;
2701 tree new_arg;
2702 edge_iterator ei2;
2703 location_t locus;
2704
2705 /* When doing partial cloning, we allow PHIs on the entry
2706 block as long as all the arguments are the same.
2707 Find any input edge to see argument to copy. */
2708 if (!old_edge)
2709 FOR_EACH_EDGE (old_edge, ei2, bb->preds)
2710 if (!old_edge->src->aux)
2711 break;
2712
2713 arg = PHI_ARG_DEF_FROM_EDGE (phi, old_edge);
2714 new_arg = arg;
2715 walk_tree (&new_arg, copy_tree_body_r, id, NULL);
2716 gcc_assert (new_arg);
2717 /* With return slot optimization we can end up with
2718 non-gimple (foo *)&this->m, fix that here. */
2719 if (TREE_CODE (new_arg) != SSA_NAME
2720 && TREE_CODE (new_arg) != FUNCTION_DECL
2721 && !is_gimple_val (new_arg))
2722 {
2723 gimple_seq stmts = NULL;
2724 new_arg = force_gimple_operand (new_arg, &stmts, true,
2725 NULL);
2726 gsi_insert_seq_on_edge (new_edge, stmts);
2727 inserted = true;
2728 }
2729 locus = gimple_phi_arg_location_from_edge (phi, old_edge);
2730 if (id->reset_location)
2731 locus = input_location;
2732 else
2733 locus = remap_location (locus, id);
2734 add_phi_arg (new_phi, new_arg, new_edge, locus);
2735 }
2736 }
2737 }
2738 }
2739
2740 /* Commit the delayed edge insertions. */
2741 if (inserted)
2742 FOR_EACH_EDGE (new_edge, ei, new_bb->preds)
2743 gsi_commit_one_edge_insert (new_edge, NULL);
2744 }
2745
2746
2747 /* Wrapper for remap_decl so it can be used as a callback. */
2748
2749 static tree
remap_decl_1(tree decl,void * data)2750 remap_decl_1 (tree decl, void *data)
2751 {
2752 return remap_decl (decl, (copy_body_data *) data);
2753 }
2754
2755 /* Build struct function and associated datastructures for the new clone
2756 NEW_FNDECL to be build. CALLEE_FNDECL is the original. Function changes
2757 the cfun to the function of new_fndecl (and current_function_decl too). */
2758
2759 static void
initialize_cfun(tree new_fndecl,tree callee_fndecl,profile_count count)2760 initialize_cfun (tree new_fndecl, tree callee_fndecl, profile_count count)
2761 {
2762 struct function *src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
2763
2764 if (!DECL_ARGUMENTS (new_fndecl))
2765 DECL_ARGUMENTS (new_fndecl) = DECL_ARGUMENTS (callee_fndecl);
2766 if (!DECL_RESULT (new_fndecl))
2767 DECL_RESULT (new_fndecl) = DECL_RESULT (callee_fndecl);
2768
2769 /* Register specific tree functions. */
2770 gimple_register_cfg_hooks ();
2771
2772 /* Get clean struct function. */
2773 push_struct_function (new_fndecl);
2774
2775 /* We will rebuild these, so just sanity check that they are empty. */
2776 gcc_assert (VALUE_HISTOGRAMS (cfun) == NULL);
2777 gcc_assert (cfun->local_decls == NULL);
2778 gcc_assert (cfun->cfg == NULL);
2779 gcc_assert (cfun->decl == new_fndecl);
2780
2781 /* Copy items we preserve during cloning. */
2782 cfun->static_chain_decl = src_cfun->static_chain_decl;
2783 cfun->nonlocal_goto_save_area = src_cfun->nonlocal_goto_save_area;
2784 cfun->function_end_locus = src_cfun->function_end_locus;
2785 cfun->curr_properties = src_cfun->curr_properties;
2786 cfun->last_verified = src_cfun->last_verified;
2787 cfun->va_list_gpr_size = src_cfun->va_list_gpr_size;
2788 cfun->va_list_fpr_size = src_cfun->va_list_fpr_size;
2789 cfun->has_nonlocal_label = src_cfun->has_nonlocal_label;
2790 cfun->calls_eh_return = src_cfun->calls_eh_return;
2791 cfun->stdarg = src_cfun->stdarg;
2792 cfun->after_inlining = src_cfun->after_inlining;
2793 cfun->can_throw_non_call_exceptions
2794 = src_cfun->can_throw_non_call_exceptions;
2795 cfun->can_delete_dead_exceptions = src_cfun->can_delete_dead_exceptions;
2796 cfun->returns_struct = src_cfun->returns_struct;
2797 cfun->returns_pcc_struct = src_cfun->returns_pcc_struct;
2798
2799 init_empty_tree_cfg ();
2800
2801 profile_status_for_fn (cfun) = profile_status_for_fn (src_cfun);
2802
2803 profile_count num = count;
2804 profile_count den = ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count;
2805 profile_count::adjust_for_ipa_scaling (&num, &den);
2806
2807 ENTRY_BLOCK_PTR_FOR_FN (cfun)->count =
2808 ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count.apply_scale (count,
2809 ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count);
2810 EXIT_BLOCK_PTR_FOR_FN (cfun)->count =
2811 EXIT_BLOCK_PTR_FOR_FN (src_cfun)->count.apply_scale (count,
2812 ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count);
2813 if (src_cfun->eh)
2814 init_eh_for_function ();
2815
2816 if (src_cfun->gimple_df)
2817 {
2818 init_tree_ssa (cfun);
2819 cfun->gimple_df->in_ssa_p = src_cfun->gimple_df->in_ssa_p;
2820 if (cfun->gimple_df->in_ssa_p)
2821 init_ssa_operands (cfun);
2822 }
2823 }
2824
2825 /* Helper function for copy_cfg_body. Move debug stmts from the end
2826 of NEW_BB to the beginning of successor basic blocks when needed. If the
2827 successor has multiple predecessors, reset them, otherwise keep
2828 their value. */
2829
2830 static void
maybe_move_debug_stmts_to_successors(copy_body_data * id,basic_block new_bb)2831 maybe_move_debug_stmts_to_successors (copy_body_data *id, basic_block new_bb)
2832 {
2833 edge e;
2834 edge_iterator ei;
2835 gimple_stmt_iterator si = gsi_last_nondebug_bb (new_bb);
2836
2837 if (gsi_end_p (si)
2838 || gsi_one_before_end_p (si)
2839 || !(stmt_can_throw_internal (cfun, gsi_stmt (si))
2840 || stmt_can_make_abnormal_goto (gsi_stmt (si))))
2841 return;
2842
2843 FOR_EACH_EDGE (e, ei, new_bb->succs)
2844 {
2845 gimple_stmt_iterator ssi = gsi_last_bb (new_bb);
2846 gimple_stmt_iterator dsi = gsi_after_labels (e->dest);
2847 while (is_gimple_debug (gsi_stmt (ssi)))
2848 {
2849 gimple *stmt = gsi_stmt (ssi);
2850 gdebug *new_stmt;
2851 tree var;
2852 tree value;
2853
2854 /* For the last edge move the debug stmts instead of copying
2855 them. */
2856 if (ei_one_before_end_p (ei))
2857 {
2858 si = ssi;
2859 gsi_prev (&ssi);
2860 if (!single_pred_p (e->dest) && gimple_debug_bind_p (stmt))
2861 {
2862 gimple_debug_bind_reset_value (stmt);
2863 gimple_set_location (stmt, UNKNOWN_LOCATION);
2864 }
2865 gsi_remove (&si, false);
2866 gsi_insert_before (&dsi, stmt, GSI_SAME_STMT);
2867 continue;
2868 }
2869
2870 if (gimple_debug_bind_p (stmt))
2871 {
2872 var = gimple_debug_bind_get_var (stmt);
2873 if (single_pred_p (e->dest))
2874 {
2875 value = gimple_debug_bind_get_value (stmt);
2876 value = unshare_expr (value);
2877 new_stmt = gimple_build_debug_bind (var, value, stmt);
2878 }
2879 else
2880 new_stmt = gimple_build_debug_bind (var, NULL_TREE, NULL);
2881 }
2882 else if (gimple_debug_source_bind_p (stmt))
2883 {
2884 var = gimple_debug_source_bind_get_var (stmt);
2885 value = gimple_debug_source_bind_get_value (stmt);
2886 new_stmt = gimple_build_debug_source_bind (var, value, stmt);
2887 }
2888 else if (gimple_debug_nonbind_marker_p (stmt))
2889 new_stmt = as_a <gdebug *> (gimple_copy (stmt));
2890 else
2891 gcc_unreachable ();
2892 gsi_insert_before (&dsi, new_stmt, GSI_SAME_STMT);
2893 id->debug_stmts.safe_push (new_stmt);
2894 gsi_prev (&ssi);
2895 }
2896 }
2897 }
2898
2899 /* Make a copy of the sub-loops of SRC_PARENT and place them
2900 as siblings of DEST_PARENT. */
2901
2902 static void
copy_loops(copy_body_data * id,class loop * dest_parent,class loop * src_parent)2903 copy_loops (copy_body_data *id,
2904 class loop *dest_parent, class loop *src_parent)
2905 {
2906 class loop *src_loop = src_parent->inner;
2907 while (src_loop)
2908 {
2909 if (!id->blocks_to_copy
2910 || bitmap_bit_p (id->blocks_to_copy, src_loop->header->index))
2911 {
2912 class loop *dest_loop = alloc_loop ();
2913
2914 /* Assign the new loop its header and latch and associate
2915 those with the new loop. */
2916 dest_loop->header = (basic_block)src_loop->header->aux;
2917 dest_loop->header->loop_father = dest_loop;
2918 if (src_loop->latch != NULL)
2919 {
2920 dest_loop->latch = (basic_block)src_loop->latch->aux;
2921 dest_loop->latch->loop_father = dest_loop;
2922 }
2923
2924 /* Copy loop meta-data. */
2925 copy_loop_info (src_loop, dest_loop);
2926 if (dest_loop->unroll)
2927 cfun->has_unroll = true;
2928 if (dest_loop->force_vectorize)
2929 cfun->has_force_vectorize_loops = true;
2930 if (id->src_cfun->last_clique != 0)
2931 dest_loop->owned_clique
2932 = remap_dependence_clique (id,
2933 src_loop->owned_clique
2934 ? src_loop->owned_clique : 1);
2935
2936 /* Finally place it into the loop array and the loop tree. */
2937 place_new_loop (cfun, dest_loop);
2938 flow_loop_tree_node_add (dest_parent, dest_loop);
2939
2940 if (src_loop->simduid)
2941 {
2942 dest_loop->simduid = remap_decl (src_loop->simduid, id);
2943 cfun->has_simduid_loops = true;
2944 }
2945
2946 /* Recurse. */
2947 copy_loops (id, dest_loop, src_loop);
2948 }
2949 src_loop = src_loop->next;
2950 }
2951 }
2952
2953 /* Call redirect_call_stmt_to_callee on all calls in BB. */
2954
2955 void
redirect_all_calls(copy_body_data * id,basic_block bb)2956 redirect_all_calls (copy_body_data * id, basic_block bb)
2957 {
2958 gimple_stmt_iterator si;
2959 gimple *last = last_stmt (bb);
2960 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
2961 {
2962 gimple *stmt = gsi_stmt (si);
2963 if (is_gimple_call (stmt))
2964 {
2965 tree old_lhs = gimple_call_lhs (stmt);
2966 struct cgraph_edge *edge = id->dst_node->get_edge (stmt);
2967 if (edge)
2968 {
2969 gimple *new_stmt
2970 = cgraph_edge::redirect_call_stmt_to_callee (edge);
2971 /* If IPA-SRA transformation, run as part of edge redirection,
2972 removed the LHS because it is unused, save it to
2973 killed_new_ssa_names so that we can prune it from debug
2974 statements. */
2975 if (old_lhs
2976 && TREE_CODE (old_lhs) == SSA_NAME
2977 && !gimple_call_lhs (new_stmt))
2978 {
2979 if (!id->killed_new_ssa_names)
2980 id->killed_new_ssa_names = new hash_set<tree> (16);
2981 id->killed_new_ssa_names->add (old_lhs);
2982 }
2983
2984 if (stmt == last && id->call_stmt && maybe_clean_eh_stmt (stmt))
2985 gimple_purge_dead_eh_edges (bb);
2986 }
2987 }
2988 }
2989 }
2990
2991 /* Make a copy of the body of FN so that it can be inserted inline in
2992 another function. Walks FN via CFG, returns new fndecl. */
2993
2994 static tree
copy_cfg_body(copy_body_data * id,basic_block entry_block_map,basic_block exit_block_map,basic_block new_entry)2995 copy_cfg_body (copy_body_data * id,
2996 basic_block entry_block_map, basic_block exit_block_map,
2997 basic_block new_entry)
2998 {
2999 tree callee_fndecl = id->src_fn;
3000 /* Original cfun for the callee, doesn't change. */
3001 struct function *src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
3002 struct function *cfun_to_copy;
3003 basic_block bb;
3004 tree new_fndecl = NULL;
3005 bool need_debug_cleanup = false;
3006 int last;
3007 profile_count den = ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count;
3008 profile_count num = entry_block_map->count;
3009
3010 cfun_to_copy = id->src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
3011
3012 /* Register specific tree functions. */
3013 gimple_register_cfg_hooks ();
3014
3015 /* If we are inlining just region of the function, make sure to connect
3016 new entry to ENTRY_BLOCK_PTR_FOR_FN (cfun). Since new entry can be
3017 part of loop, we must compute frequency and probability of
3018 ENTRY_BLOCK_PTR_FOR_FN (cfun) based on the frequencies and
3019 probabilities of edges incoming from nonduplicated region. */
3020 if (new_entry)
3021 {
3022 edge e;
3023 edge_iterator ei;
3024 den = profile_count::zero ();
3025
3026 FOR_EACH_EDGE (e, ei, new_entry->preds)
3027 if (!e->src->aux)
3028 den += e->count ();
3029 ENTRY_BLOCK_PTR_FOR_FN (cfun)->count = den;
3030 }
3031
3032 profile_count::adjust_for_ipa_scaling (&num, &den);
3033
3034 /* Must have a CFG here at this point. */
3035 gcc_assert (ENTRY_BLOCK_PTR_FOR_FN
3036 (DECL_STRUCT_FUNCTION (callee_fndecl)));
3037
3038
3039 ENTRY_BLOCK_PTR_FOR_FN (cfun_to_copy)->aux = entry_block_map;
3040 EXIT_BLOCK_PTR_FOR_FN (cfun_to_copy)->aux = exit_block_map;
3041 entry_block_map->aux = ENTRY_BLOCK_PTR_FOR_FN (cfun_to_copy);
3042 exit_block_map->aux = EXIT_BLOCK_PTR_FOR_FN (cfun_to_copy);
3043
3044 /* Duplicate any exception-handling regions. */
3045 if (cfun->eh)
3046 id->eh_map = duplicate_eh_regions (cfun_to_copy, NULL, id->eh_lp_nr,
3047 remap_decl_1, id);
3048
3049 /* Use aux pointers to map the original blocks to copy. */
3050 FOR_EACH_BB_FN (bb, cfun_to_copy)
3051 if (!id->blocks_to_copy || bitmap_bit_p (id->blocks_to_copy, bb->index))
3052 {
3053 basic_block new_bb = copy_bb (id, bb, num, den);
3054 bb->aux = new_bb;
3055 new_bb->aux = bb;
3056 new_bb->loop_father = entry_block_map->loop_father;
3057 }
3058
3059 last = last_basic_block_for_fn (cfun);
3060
3061 /* Now that we've duplicated the blocks, duplicate their edges. */
3062 basic_block abnormal_goto_dest = NULL;
3063 if (id->call_stmt
3064 && stmt_can_make_abnormal_goto (id->call_stmt))
3065 {
3066 gimple_stmt_iterator gsi = gsi_for_stmt (id->call_stmt);
3067
3068 bb = gimple_bb (id->call_stmt);
3069 gsi_next (&gsi);
3070 if (gsi_end_p (gsi))
3071 abnormal_goto_dest = get_abnormal_succ_dispatcher (bb);
3072 }
3073 FOR_ALL_BB_FN (bb, cfun_to_copy)
3074 if (!id->blocks_to_copy
3075 || (bb->index > 0 && bitmap_bit_p (id->blocks_to_copy, bb->index)))
3076 need_debug_cleanup |= copy_edges_for_bb (bb, num, den, exit_block_map,
3077 abnormal_goto_dest, id);
3078
3079 if (id->eh_landing_pad_dest)
3080 {
3081 add_clobbers_to_eh_landing_pad (id);
3082 id->eh_landing_pad_dest = NULL;
3083 }
3084
3085 if (new_entry)
3086 {
3087 edge e = make_edge (entry_block_map, (basic_block)new_entry->aux,
3088 EDGE_FALLTHRU);
3089 e->probability = profile_probability::always ();
3090 }
3091
3092 /* Duplicate the loop tree, if available and wanted. */
3093 if (loops_for_fn (src_cfun) != NULL
3094 && current_loops != NULL)
3095 {
3096 copy_loops (id, entry_block_map->loop_father,
3097 get_loop (src_cfun, 0));
3098 /* Defer to cfgcleanup to update loop-father fields of basic-blocks. */
3099 loops_state_set (LOOPS_NEED_FIXUP);
3100 }
3101
3102 /* If the loop tree in the source function needed fixup, mark the
3103 destination loop tree for fixup, too. */
3104 if (loops_for_fn (src_cfun)->state & LOOPS_NEED_FIXUP)
3105 loops_state_set (LOOPS_NEED_FIXUP);
3106
3107 if (gimple_in_ssa_p (cfun))
3108 FOR_ALL_BB_FN (bb, cfun_to_copy)
3109 if (!id->blocks_to_copy
3110 || (bb->index > 0 && bitmap_bit_p (id->blocks_to_copy, bb->index)))
3111 copy_phis_for_bb (bb, id);
3112
3113 FOR_ALL_BB_FN (bb, cfun_to_copy)
3114 if (bb->aux)
3115 {
3116 if (need_debug_cleanup
3117 && bb->index != ENTRY_BLOCK
3118 && bb->index != EXIT_BLOCK)
3119 maybe_move_debug_stmts_to_successors (id, (basic_block) bb->aux);
3120 /* Update call edge destinations. This cannot be done before loop
3121 info is updated, because we may split basic blocks. */
3122 if (id->transform_call_graph_edges == CB_CGE_DUPLICATE
3123 && bb->index != ENTRY_BLOCK
3124 && bb->index != EXIT_BLOCK)
3125 redirect_all_calls (id, (basic_block)bb->aux);
3126 ((basic_block)bb->aux)->aux = NULL;
3127 bb->aux = NULL;
3128 }
3129
3130 /* Zero out AUX fields of newly created block during EH edge
3131 insertion. */
3132 for (; last < last_basic_block_for_fn (cfun); last++)
3133 {
3134 if (need_debug_cleanup)
3135 maybe_move_debug_stmts_to_successors (id,
3136 BASIC_BLOCK_FOR_FN (cfun, last));
3137 BASIC_BLOCK_FOR_FN (cfun, last)->aux = NULL;
3138 /* Update call edge destinations. This cannot be done before loop
3139 info is updated, because we may split basic blocks. */
3140 if (id->transform_call_graph_edges == CB_CGE_DUPLICATE)
3141 redirect_all_calls (id, BASIC_BLOCK_FOR_FN (cfun, last));
3142 }
3143 entry_block_map->aux = NULL;
3144 exit_block_map->aux = NULL;
3145
3146 if (id->eh_map)
3147 {
3148 delete id->eh_map;
3149 id->eh_map = NULL;
3150 }
3151 if (id->dependence_map)
3152 {
3153 delete id->dependence_map;
3154 id->dependence_map = NULL;
3155 }
3156
3157 return new_fndecl;
3158 }
3159
3160 /* Copy the debug STMT using ID. We deal with these statements in a
3161 special way: if any variable in their VALUE expression wasn't
3162 remapped yet, we won't remap it, because that would get decl uids
3163 out of sync, causing codegen differences between -g and -g0. If
3164 this arises, we drop the VALUE expression altogether. */
3165
3166 static void
copy_debug_stmt(gdebug * stmt,copy_body_data * id)3167 copy_debug_stmt (gdebug *stmt, copy_body_data *id)
3168 {
3169 tree t, *n;
3170 struct walk_stmt_info wi;
3171
3172 if (tree block = gimple_block (stmt))
3173 {
3174 n = id->decl_map->get (block);
3175 gimple_set_block (stmt, n ? *n : id->block);
3176 }
3177
3178 if (gimple_debug_nonbind_marker_p (stmt))
3179 return;
3180
3181 /* Remap all the operands in COPY. */
3182 memset (&wi, 0, sizeof (wi));
3183 wi.info = id;
3184
3185 processing_debug_stmt = 1;
3186
3187 if (gimple_debug_source_bind_p (stmt))
3188 t = gimple_debug_source_bind_get_var (stmt);
3189 else if (gimple_debug_bind_p (stmt))
3190 t = gimple_debug_bind_get_var (stmt);
3191 else
3192 gcc_unreachable ();
3193
3194 if (TREE_CODE (t) == PARM_DECL && id->debug_map
3195 && (n = id->debug_map->get (t)))
3196 {
3197 gcc_assert (VAR_P (*n));
3198 t = *n;
3199 }
3200 else if (VAR_P (t) && !is_global_var (t) && !id->decl_map->get (t))
3201 /* T is a non-localized variable. */;
3202 else
3203 walk_tree (&t, remap_gimple_op_r, &wi, NULL);
3204
3205 if (gimple_debug_bind_p (stmt))
3206 {
3207 gimple_debug_bind_set_var (stmt, t);
3208
3209 if (gimple_debug_bind_has_value_p (stmt))
3210 walk_tree (gimple_debug_bind_get_value_ptr (stmt),
3211 remap_gimple_op_r, &wi, NULL);
3212
3213 /* Punt if any decl couldn't be remapped. */
3214 if (processing_debug_stmt < 0)
3215 gimple_debug_bind_reset_value (stmt);
3216 }
3217 else if (gimple_debug_source_bind_p (stmt))
3218 {
3219 gimple_debug_source_bind_set_var (stmt, t);
3220 /* When inlining and source bind refers to one of the optimized
3221 away parameters, change the source bind into normal debug bind
3222 referring to the corresponding DEBUG_EXPR_DECL that should have
3223 been bound before the call stmt. */
3224 t = gimple_debug_source_bind_get_value (stmt);
3225 if (t != NULL_TREE
3226 && TREE_CODE (t) == PARM_DECL
3227 && id->call_stmt)
3228 {
3229 vec<tree, va_gc> **debug_args = decl_debug_args_lookup (id->src_fn);
3230 unsigned int i;
3231 if (debug_args != NULL)
3232 {
3233 for (i = 0; i < vec_safe_length (*debug_args); i += 2)
3234 if ((**debug_args)[i] == DECL_ORIGIN (t)
3235 && TREE_CODE ((**debug_args)[i + 1]) == DEBUG_EXPR_DECL)
3236 {
3237 t = (**debug_args)[i + 1];
3238 stmt->subcode = GIMPLE_DEBUG_BIND;
3239 gimple_debug_bind_set_value (stmt, t);
3240 break;
3241 }
3242 }
3243 }
3244 if (gimple_debug_source_bind_p (stmt))
3245 walk_tree (gimple_debug_source_bind_get_value_ptr (stmt),
3246 remap_gimple_op_r, &wi, NULL);
3247 }
3248
3249 processing_debug_stmt = 0;
3250
3251 update_stmt (stmt);
3252 }
3253
3254 /* Process deferred debug stmts. In order to give values better odds
3255 of being successfully remapped, we delay the processing of debug
3256 stmts until all other stmts that might require remapping are
3257 processed. */
3258
3259 static void
copy_debug_stmts(copy_body_data * id)3260 copy_debug_stmts (copy_body_data *id)
3261 {
3262 size_t i;
3263 gdebug *stmt;
3264
3265 if (!id->debug_stmts.exists ())
3266 return;
3267
3268 FOR_EACH_VEC_ELT (id->debug_stmts, i, stmt)
3269 copy_debug_stmt (stmt, id);
3270
3271 id->debug_stmts.release ();
3272 }
3273
3274 /* Make a copy of the body of SRC_FN so that it can be inserted inline in
3275 another function. */
3276
3277 static tree
copy_tree_body(copy_body_data * id)3278 copy_tree_body (copy_body_data *id)
3279 {
3280 tree fndecl = id->src_fn;
3281 tree body = DECL_SAVED_TREE (fndecl);
3282
3283 walk_tree (&body, copy_tree_body_r, id, NULL);
3284
3285 return body;
3286 }
3287
3288 /* Make a copy of the body of FN so that it can be inserted inline in
3289 another function. */
3290
3291 static tree
copy_body(copy_body_data * id,basic_block entry_block_map,basic_block exit_block_map,basic_block new_entry)3292 copy_body (copy_body_data *id,
3293 basic_block entry_block_map, basic_block exit_block_map,
3294 basic_block new_entry)
3295 {
3296 tree fndecl = id->src_fn;
3297 tree body;
3298
3299 /* If this body has a CFG, walk CFG and copy. */
3300 gcc_assert (ENTRY_BLOCK_PTR_FOR_FN (DECL_STRUCT_FUNCTION (fndecl)));
3301 body = copy_cfg_body (id, entry_block_map, exit_block_map,
3302 new_entry);
3303 copy_debug_stmts (id);
3304 delete id->killed_new_ssa_names;
3305 id->killed_new_ssa_names = NULL;
3306
3307 return body;
3308 }
3309
3310 /* Return true if VALUE is an ADDR_EXPR of an automatic variable
3311 defined in function FN, or of a data member thereof. */
3312
3313 static bool
self_inlining_addr_expr(tree value,tree fn)3314 self_inlining_addr_expr (tree value, tree fn)
3315 {
3316 tree var;
3317
3318 if (TREE_CODE (value) != ADDR_EXPR)
3319 return false;
3320
3321 var = get_base_address (TREE_OPERAND (value, 0));
3322
3323 return var && auto_var_in_fn_p (var, fn);
3324 }
3325
3326 /* Append to BB a debug annotation that binds VAR to VALUE, inheriting
3327 lexical block and line number information from base_stmt, if given,
3328 or from the last stmt of the block otherwise. */
3329
3330 static gimple *
insert_init_debug_bind(copy_body_data * id,basic_block bb,tree var,tree value,gimple * base_stmt)3331 insert_init_debug_bind (copy_body_data *id,
3332 basic_block bb, tree var, tree value,
3333 gimple *base_stmt)
3334 {
3335 gimple *note;
3336 gimple_stmt_iterator gsi;
3337 tree tracked_var;
3338
3339 if (!gimple_in_ssa_p (id->src_cfun))
3340 return NULL;
3341
3342 if (!opt_for_fn (id->dst_fn, flag_var_tracking_assignments))
3343 return NULL;
3344
3345 tracked_var = target_for_debug_bind (var);
3346 if (!tracked_var)
3347 return NULL;
3348
3349 if (bb)
3350 {
3351 gsi = gsi_last_bb (bb);
3352 if (!base_stmt && !gsi_end_p (gsi))
3353 base_stmt = gsi_stmt (gsi);
3354 }
3355
3356 note = gimple_build_debug_bind (tracked_var, unshare_expr (value), base_stmt);
3357
3358 if (bb)
3359 {
3360 if (!gsi_end_p (gsi))
3361 gsi_insert_after (&gsi, note, GSI_SAME_STMT);
3362 else
3363 gsi_insert_before (&gsi, note, GSI_SAME_STMT);
3364 }
3365
3366 return note;
3367 }
3368
3369 static void
insert_init_stmt(copy_body_data * id,basic_block bb,gimple * init_stmt)3370 insert_init_stmt (copy_body_data *id, basic_block bb, gimple *init_stmt)
3371 {
3372 /* If VAR represents a zero-sized variable, it's possible that the
3373 assignment statement may result in no gimple statements. */
3374 if (init_stmt)
3375 {
3376 gimple_stmt_iterator si = gsi_last_bb (bb);
3377
3378 /* We can end up with init statements that store to a non-register
3379 from a rhs with a conversion. Handle that here by forcing the
3380 rhs into a temporary. gimple_regimplify_operands is not
3381 prepared to do this for us. */
3382 if (!is_gimple_debug (init_stmt)
3383 && !is_gimple_reg (gimple_assign_lhs (init_stmt))
3384 && is_gimple_reg_type (TREE_TYPE (gimple_assign_lhs (init_stmt)))
3385 && gimple_assign_rhs_class (init_stmt) == GIMPLE_UNARY_RHS)
3386 {
3387 tree rhs = build1 (gimple_assign_rhs_code (init_stmt),
3388 gimple_expr_type (init_stmt),
3389 gimple_assign_rhs1 (init_stmt));
3390 rhs = force_gimple_operand_gsi (&si, rhs, true, NULL_TREE, false,
3391 GSI_NEW_STMT);
3392 gimple_assign_set_rhs_code (init_stmt, TREE_CODE (rhs));
3393 gimple_assign_set_rhs1 (init_stmt, rhs);
3394 }
3395 gsi_insert_after (&si, init_stmt, GSI_NEW_STMT);
3396 if (!is_gimple_debug (init_stmt))
3397 {
3398 gimple_regimplify_operands (init_stmt, &si);
3399
3400 tree def = gimple_assign_lhs (init_stmt);
3401 insert_init_debug_bind (id, bb, def, def, init_stmt);
3402 }
3403 }
3404 }
3405
3406 /* Deal with mismatched formal/actual parameters, in a rather brute-force way
3407 if need be (which should only be necessary for invalid programs). Attempt
3408 to convert VAL to TYPE and return the result if it is possible, just return
3409 a zero constant of the given type if it fails. */
3410
3411 tree
force_value_to_type(tree type,tree value)3412 force_value_to_type (tree type, tree value)
3413 {
3414 /* If we can match up types by promotion/demotion do so. */
3415 if (fold_convertible_p (type, value))
3416 return fold_convert (type, value);
3417
3418 /* ??? For valid programs we should not end up here.
3419 Still if we end up with truly mismatched types here, fall back
3420 to using a VIEW_CONVERT_EXPR or a literal zero to not leak invalid
3421 GIMPLE to the following passes. */
3422 if (!is_gimple_reg_type (TREE_TYPE (value))
3423 || TYPE_SIZE (type) == TYPE_SIZE (TREE_TYPE (value)))
3424 return fold_build1 (VIEW_CONVERT_EXPR, type, value);
3425 else
3426 return build_zero_cst (type);
3427 }
3428
3429 /* Initialize parameter P with VALUE. If needed, produce init statement
3430 at the end of BB. When BB is NULL, we return init statement to be
3431 output later. */
3432 static gimple *
setup_one_parameter(copy_body_data * id,tree p,tree value,tree fn,basic_block bb,tree * vars)3433 setup_one_parameter (copy_body_data *id, tree p, tree value, tree fn,
3434 basic_block bb, tree *vars)
3435 {
3436 gimple *init_stmt = NULL;
3437 tree var;
3438 tree rhs = value;
3439 tree def = (gimple_in_ssa_p (cfun)
3440 ? ssa_default_def (id->src_cfun, p) : NULL);
3441
3442 if (value
3443 && value != error_mark_node
3444 && !useless_type_conversion_p (TREE_TYPE (p), TREE_TYPE (value)))
3445 rhs = force_value_to_type (TREE_TYPE (p), value);
3446
3447 /* Make an equivalent VAR_DECL. Note that we must NOT remap the type
3448 here since the type of this decl must be visible to the calling
3449 function. */
3450 var = copy_decl_to_var (p, id);
3451
3452 /* Declare this new variable. */
3453 DECL_CHAIN (var) = *vars;
3454 *vars = var;
3455
3456 /* Make gimplifier happy about this variable. */
3457 DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
3458
3459 /* If the parameter is never assigned to, has no SSA_NAMEs created,
3460 we would not need to create a new variable here at all, if it
3461 weren't for debug info. Still, we can just use the argument
3462 value. */
3463 if (TREE_READONLY (p)
3464 && !TREE_ADDRESSABLE (p)
3465 && value && !TREE_SIDE_EFFECTS (value)
3466 && !def)
3467 {
3468 /* We may produce non-gimple trees by adding NOPs or introduce
3469 invalid sharing when operand is not really constant.
3470 It is not big deal to prohibit constant propagation here as
3471 we will constant propagate in DOM1 pass anyway. */
3472 if (is_gimple_min_invariant (value)
3473 && useless_type_conversion_p (TREE_TYPE (p),
3474 TREE_TYPE (value))
3475 /* We have to be very careful about ADDR_EXPR. Make sure
3476 the base variable isn't a local variable of the inlined
3477 function, e.g., when doing recursive inlining, direct or
3478 mutually-recursive or whatever, which is why we don't
3479 just test whether fn == current_function_decl. */
3480 && ! self_inlining_addr_expr (value, fn))
3481 {
3482 insert_decl_map (id, p, value);
3483 insert_debug_decl_map (id, p, var);
3484 return insert_init_debug_bind (id, bb, var, value, NULL);
3485 }
3486 }
3487
3488 /* Register the VAR_DECL as the equivalent for the PARM_DECL;
3489 that way, when the PARM_DECL is encountered, it will be
3490 automatically replaced by the VAR_DECL. */
3491 insert_decl_map (id, p, var);
3492
3493 /* Even if P was TREE_READONLY, the new VAR should not be.
3494 In the original code, we would have constructed a
3495 temporary, and then the function body would have never
3496 changed the value of P. However, now, we will be
3497 constructing VAR directly. The constructor body may
3498 change its value multiple times as it is being
3499 constructed. Therefore, it must not be TREE_READONLY;
3500 the back-end assumes that TREE_READONLY variable is
3501 assigned to only once. */
3502 if (TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (p)))
3503 TREE_READONLY (var) = 0;
3504
3505 /* If there is no setup required and we are in SSA, take the easy route
3506 replacing all SSA names representing the function parameter by the
3507 SSA name passed to function.
3508
3509 We need to construct map for the variable anyway as it might be used
3510 in different SSA names when parameter is set in function.
3511
3512 Do replacement at -O0 for const arguments replaced by constant.
3513 This is important for builtin_constant_p and other construct requiring
3514 constant argument to be visible in inlined function body. */
3515 if (gimple_in_ssa_p (cfun) && rhs && def && is_gimple_reg (p)
3516 && (optimize
3517 || (TREE_READONLY (p)
3518 && is_gimple_min_invariant (rhs)))
3519 && (TREE_CODE (rhs) == SSA_NAME
3520 || is_gimple_min_invariant (rhs))
3521 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def))
3522 {
3523 insert_decl_map (id, def, rhs);
3524 return insert_init_debug_bind (id, bb, var, rhs, NULL);
3525 }
3526
3527 /* If the value of argument is never used, don't care about initializing
3528 it. */
3529 if (optimize && gimple_in_ssa_p (cfun) && !def && is_gimple_reg (p))
3530 {
3531 gcc_assert (!value || !TREE_SIDE_EFFECTS (value));
3532 return insert_init_debug_bind (id, bb, var, rhs, NULL);
3533 }
3534
3535 /* Initialize this VAR_DECL from the equivalent argument. Convert
3536 the argument to the proper type in case it was promoted. */
3537 if (value)
3538 {
3539 if (rhs == error_mark_node)
3540 {
3541 insert_decl_map (id, p, var);
3542 return insert_init_debug_bind (id, bb, var, rhs, NULL);
3543 }
3544
3545 STRIP_USELESS_TYPE_CONVERSION (rhs);
3546
3547 /* If we are in SSA form properly remap the default definition
3548 or assign to a dummy SSA name if the parameter is unused and
3549 we are not optimizing. */
3550 if (gimple_in_ssa_p (cfun) && is_gimple_reg (p))
3551 {
3552 if (def)
3553 {
3554 def = remap_ssa_name (def, id);
3555 init_stmt = gimple_build_assign (def, rhs);
3556 SSA_NAME_IS_DEFAULT_DEF (def) = 0;
3557 set_ssa_default_def (cfun, var, NULL);
3558 }
3559 else if (!optimize)
3560 {
3561 def = make_ssa_name (var);
3562 init_stmt = gimple_build_assign (def, rhs);
3563 }
3564 }
3565 else
3566 init_stmt = gimple_build_assign (var, rhs);
3567
3568 if (bb && init_stmt)
3569 insert_init_stmt (id, bb, init_stmt);
3570 }
3571 return init_stmt;
3572 }
3573
3574 /* Generate code to initialize the parameters of the function at the
3575 top of the stack in ID from the GIMPLE_CALL STMT. */
3576
3577 static void
initialize_inlined_parameters(copy_body_data * id,gimple * stmt,tree fn,basic_block bb)3578 initialize_inlined_parameters (copy_body_data *id, gimple *stmt,
3579 tree fn, basic_block bb)
3580 {
3581 tree parms;
3582 size_t i;
3583 tree p;
3584 tree vars = NULL_TREE;
3585 tree static_chain = gimple_call_chain (stmt);
3586
3587 /* Figure out what the parameters are. */
3588 parms = DECL_ARGUMENTS (fn);
3589
3590 /* Loop through the parameter declarations, replacing each with an
3591 equivalent VAR_DECL, appropriately initialized. */
3592 for (p = parms, i = 0; p; p = DECL_CHAIN (p), i++)
3593 {
3594 tree val;
3595 val = i < gimple_call_num_args (stmt) ? gimple_call_arg (stmt, i) : NULL;
3596 setup_one_parameter (id, p, val, fn, bb, &vars);
3597 }
3598 /* After remapping parameters remap their types. This has to be done
3599 in a second loop over all parameters to appropriately remap
3600 variable sized arrays when the size is specified in a
3601 parameter following the array. */
3602 for (p = parms, i = 0; p; p = DECL_CHAIN (p), i++)
3603 {
3604 tree *varp = id->decl_map->get (p);
3605 if (varp && VAR_P (*varp))
3606 {
3607 tree def = (gimple_in_ssa_p (cfun) && is_gimple_reg (p)
3608 ? ssa_default_def (id->src_cfun, p) : NULL);
3609 tree var = *varp;
3610 TREE_TYPE (var) = remap_type (TREE_TYPE (var), id);
3611 /* Also remap the default definition if it was remapped
3612 to the default definition of the parameter replacement
3613 by the parameter setup. */
3614 if (def)
3615 {
3616 tree *defp = id->decl_map->get (def);
3617 if (defp
3618 && TREE_CODE (*defp) == SSA_NAME
3619 && SSA_NAME_VAR (*defp) == var)
3620 TREE_TYPE (*defp) = TREE_TYPE (var);
3621 }
3622 }
3623 }
3624
3625 /* Initialize the static chain. */
3626 p = DECL_STRUCT_FUNCTION (fn)->static_chain_decl;
3627 gcc_assert (fn != current_function_decl);
3628 if (p)
3629 {
3630 /* No static chain? Seems like a bug in tree-nested.c. */
3631 gcc_assert (static_chain);
3632
3633 setup_one_parameter (id, p, static_chain, fn, bb, &vars);
3634 }
3635
3636 declare_inline_vars (id->block, vars);
3637 }
3638
3639
3640 /* Declare a return variable to replace the RESULT_DECL for the
3641 function we are calling. An appropriate DECL_STMT is returned.
3642 The USE_STMT is filled to contain a use of the declaration to
3643 indicate the return value of the function.
3644
3645 RETURN_SLOT, if non-null is place where to store the result. It
3646 is set only for CALL_EXPR_RETURN_SLOT_OPT. MODIFY_DEST, if non-null,
3647 was the LHS of the MODIFY_EXPR to which this call is the RHS.
3648
3649 The return value is a (possibly null) value that holds the result
3650 as seen by the caller. */
3651
3652 static tree
declare_return_variable(copy_body_data * id,tree return_slot,tree modify_dest,basic_block entry_bb)3653 declare_return_variable (copy_body_data *id, tree return_slot, tree modify_dest,
3654 basic_block entry_bb)
3655 {
3656 tree callee = id->src_fn;
3657 tree result = DECL_RESULT (callee);
3658 tree callee_type = TREE_TYPE (result);
3659 tree caller_type;
3660 tree var, use;
3661
3662 /* Handle type-mismatches in the function declaration return type
3663 vs. the call expression. */
3664 if (modify_dest)
3665 caller_type = TREE_TYPE (modify_dest);
3666 else if (return_slot)
3667 caller_type = TREE_TYPE (return_slot);
3668 else /* No LHS on the call. */
3669 caller_type = TREE_TYPE (TREE_TYPE (callee));
3670
3671 /* We don't need to do anything for functions that don't return anything. */
3672 if (VOID_TYPE_P (callee_type))
3673 return NULL_TREE;
3674
3675 /* If there was a return slot, then the return value is the
3676 dereferenced address of that object. */
3677 if (return_slot)
3678 {
3679 /* The front end shouldn't have used both return_slot and
3680 a modify expression. */
3681 gcc_assert (!modify_dest);
3682 if (DECL_BY_REFERENCE (result))
3683 {
3684 tree return_slot_addr = build_fold_addr_expr (return_slot);
3685 STRIP_USELESS_TYPE_CONVERSION (return_slot_addr);
3686
3687 /* We are going to construct *&return_slot and we can't do that
3688 for variables believed to be not addressable.
3689
3690 FIXME: This check possibly can match, because values returned
3691 via return slot optimization are not believed to have address
3692 taken by alias analysis. */
3693 gcc_assert (TREE_CODE (return_slot) != SSA_NAME);
3694 var = return_slot_addr;
3695 mark_addressable (return_slot);
3696 }
3697 else
3698 {
3699 var = return_slot;
3700 gcc_assert (TREE_CODE (var) != SSA_NAME);
3701 if (TREE_ADDRESSABLE (result))
3702 mark_addressable (var);
3703 }
3704 if ((TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE
3705 || TREE_CODE (TREE_TYPE (result)) == VECTOR_TYPE)
3706 && !DECL_GIMPLE_REG_P (result)
3707 && DECL_P (var))
3708 DECL_GIMPLE_REG_P (var) = 0;
3709
3710 if (!useless_type_conversion_p (callee_type, caller_type))
3711 var = build1 (VIEW_CONVERT_EXPR, callee_type, var);
3712
3713 use = NULL;
3714 goto done;
3715 }
3716
3717 /* All types requiring non-trivial constructors should have been handled. */
3718 gcc_assert (!TREE_ADDRESSABLE (callee_type));
3719
3720 /* Attempt to avoid creating a new temporary variable. */
3721 if (modify_dest
3722 && TREE_CODE (modify_dest) != SSA_NAME)
3723 {
3724 bool use_it = false;
3725
3726 /* We can't use MODIFY_DEST if there's type promotion involved. */
3727 if (!useless_type_conversion_p (callee_type, caller_type))
3728 use_it = false;
3729
3730 /* ??? If we're assigning to a variable sized type, then we must
3731 reuse the destination variable, because we've no good way to
3732 create variable sized temporaries at this point. */
3733 else if (!poly_int_tree_p (TYPE_SIZE_UNIT (caller_type)))
3734 use_it = true;
3735
3736 /* If the callee cannot possibly modify MODIFY_DEST, then we can
3737 reuse it as the result of the call directly. Don't do this if
3738 it would promote MODIFY_DEST to addressable. */
3739 else if (TREE_ADDRESSABLE (result))
3740 use_it = false;
3741 else
3742 {
3743 tree base_m = get_base_address (modify_dest);
3744
3745 /* If the base isn't a decl, then it's a pointer, and we don't
3746 know where that's going to go. */
3747 if (!DECL_P (base_m))
3748 use_it = false;
3749 else if (is_global_var (base_m))
3750 use_it = false;
3751 else if ((TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE
3752 || TREE_CODE (TREE_TYPE (result)) == VECTOR_TYPE)
3753 && !DECL_GIMPLE_REG_P (result)
3754 && DECL_GIMPLE_REG_P (base_m))
3755 use_it = false;
3756 else if (!TREE_ADDRESSABLE (base_m))
3757 use_it = true;
3758 }
3759
3760 if (use_it)
3761 {
3762 var = modify_dest;
3763 use = NULL;
3764 goto done;
3765 }
3766 }
3767
3768 gcc_assert (poly_int_tree_p (TYPE_SIZE_UNIT (callee_type)));
3769
3770 var = copy_result_decl_to_var (result, id);
3771 DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
3772
3773 /* Do not have the rest of GCC warn about this variable as it should
3774 not be visible to the user. */
3775 TREE_NO_WARNING (var) = 1;
3776
3777 declare_inline_vars (id->block, var);
3778
3779 /* Build the use expr. If the return type of the function was
3780 promoted, convert it back to the expected type. */
3781 use = var;
3782 if (!useless_type_conversion_p (caller_type, TREE_TYPE (var)))
3783 {
3784 /* If we can match up types by promotion/demotion do so. */
3785 if (fold_convertible_p (caller_type, var))
3786 use = fold_convert (caller_type, var);
3787 else
3788 {
3789 /* ??? For valid programs we should not end up here.
3790 Still if we end up with truly mismatched types here, fall back
3791 to using a MEM_REF to not leak invalid GIMPLE to the following
3792 passes. */
3793 /* Prevent var from being written into SSA form. */
3794 if (TREE_CODE (TREE_TYPE (var)) == VECTOR_TYPE
3795 || TREE_CODE (TREE_TYPE (var)) == COMPLEX_TYPE)
3796 DECL_GIMPLE_REG_P (var) = false;
3797 else if (is_gimple_reg_type (TREE_TYPE (var)))
3798 TREE_ADDRESSABLE (var) = true;
3799 use = fold_build2 (MEM_REF, caller_type,
3800 build_fold_addr_expr (var),
3801 build_int_cst (ptr_type_node, 0));
3802 }
3803 }
3804
3805 STRIP_USELESS_TYPE_CONVERSION (use);
3806
3807 if (DECL_BY_REFERENCE (result))
3808 {
3809 TREE_ADDRESSABLE (var) = 1;
3810 var = build_fold_addr_expr (var);
3811 }
3812
3813 done:
3814 /* Register the VAR_DECL as the equivalent for the RESULT_DECL; that
3815 way, when the RESULT_DECL is encountered, it will be
3816 automatically replaced by the VAR_DECL.
3817
3818 When returning by reference, ensure that RESULT_DECL remaps to
3819 gimple_val. */
3820 if (DECL_BY_REFERENCE (result)
3821 && !is_gimple_val (var))
3822 {
3823 tree temp = create_tmp_var (TREE_TYPE (result), "retvalptr");
3824 insert_decl_map (id, result, temp);
3825 /* When RESULT_DECL is in SSA form, we need to remap and initialize
3826 it's default_def SSA_NAME. */
3827 if (gimple_in_ssa_p (id->src_cfun)
3828 && is_gimple_reg (result))
3829 {
3830 temp = make_ssa_name (temp);
3831 insert_decl_map (id, ssa_default_def (id->src_cfun, result), temp);
3832 }
3833 insert_init_stmt (id, entry_bb, gimple_build_assign (temp, var));
3834 }
3835 else
3836 insert_decl_map (id, result, var);
3837
3838 /* Remember this so we can ignore it in remap_decls. */
3839 id->retvar = var;
3840 return use;
3841 }
3842
3843 /* Determine if the function can be copied. If so return NULL. If
3844 not return a string describng the reason for failure. */
3845
3846 const char *
copy_forbidden(struct function * fun)3847 copy_forbidden (struct function *fun)
3848 {
3849 const char *reason = fun->cannot_be_copied_reason;
3850
3851 /* Only examine the function once. */
3852 if (fun->cannot_be_copied_set)
3853 return reason;
3854
3855 /* We cannot copy a function that receives a non-local goto
3856 because we cannot remap the destination label used in the
3857 function that is performing the non-local goto. */
3858 /* ??? Actually, this should be possible, if we work at it.
3859 No doubt there's just a handful of places that simply
3860 assume it doesn't happen and don't substitute properly. */
3861 if (fun->has_nonlocal_label)
3862 {
3863 reason = G_("function %q+F can never be copied "
3864 "because it receives a non-local goto");
3865 goto fail;
3866 }
3867
3868 if (fun->has_forced_label_in_static)
3869 {
3870 reason = G_("function %q+F can never be copied because it saves "
3871 "address of local label in a static variable");
3872 goto fail;
3873 }
3874
3875 fail:
3876 fun->cannot_be_copied_reason = reason;
3877 fun->cannot_be_copied_set = true;
3878 return reason;
3879 }
3880
3881
3882 static const char *inline_forbidden_reason;
3883
3884 /* A callback for walk_gimple_seq to handle statements. Returns non-null
3885 iff a function cannot be inlined. Also sets the reason why. */
3886
3887 static tree
inline_forbidden_p_stmt(gimple_stmt_iterator * gsi,bool * handled_ops_p,struct walk_stmt_info * wip)3888 inline_forbidden_p_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
3889 struct walk_stmt_info *wip)
3890 {
3891 tree fn = (tree) wip->info;
3892 tree t;
3893 gimple *stmt = gsi_stmt (*gsi);
3894
3895 switch (gimple_code (stmt))
3896 {
3897 case GIMPLE_CALL:
3898 /* Refuse to inline alloca call unless user explicitly forced so as
3899 this may change program's memory overhead drastically when the
3900 function using alloca is called in loop. In GCC present in
3901 SPEC2000 inlining into schedule_block cause it to require 2GB of
3902 RAM instead of 256MB. Don't do so for alloca calls emitted for
3903 VLA objects as those can't cause unbounded growth (they're always
3904 wrapped inside stack_save/stack_restore regions. */
3905 if (gimple_maybe_alloca_call_p (stmt)
3906 && !gimple_call_alloca_for_var_p (as_a <gcall *> (stmt))
3907 && !lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn)))
3908 {
3909 inline_forbidden_reason
3910 = G_("function %q+F can never be inlined because it uses "
3911 "alloca (override using the always_inline attribute)");
3912 *handled_ops_p = true;
3913 return fn;
3914 }
3915
3916 t = gimple_call_fndecl (stmt);
3917 if (t == NULL_TREE)
3918 break;
3919
3920 /* We cannot inline functions that call setjmp. */
3921 if (setjmp_call_p (t))
3922 {
3923 inline_forbidden_reason
3924 = G_("function %q+F can never be inlined because it uses setjmp");
3925 *handled_ops_p = true;
3926 return t;
3927 }
3928
3929 if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL)
3930 switch (DECL_FUNCTION_CODE (t))
3931 {
3932 /* We cannot inline functions that take a variable number of
3933 arguments. */
3934 case BUILT_IN_VA_START:
3935 case BUILT_IN_NEXT_ARG:
3936 case BUILT_IN_VA_END:
3937 inline_forbidden_reason
3938 = G_("function %q+F can never be inlined because it "
3939 "uses variable argument lists");
3940 *handled_ops_p = true;
3941 return t;
3942
3943 case BUILT_IN_LONGJMP:
3944 /* We can't inline functions that call __builtin_longjmp at
3945 all. The non-local goto machinery really requires the
3946 destination be in a different function. If we allow the
3947 function calling __builtin_longjmp to be inlined into the
3948 function calling __builtin_setjmp, Things will Go Awry. */
3949 inline_forbidden_reason
3950 = G_("function %q+F can never be inlined because "
3951 "it uses setjmp-longjmp exception handling");
3952 *handled_ops_p = true;
3953 return t;
3954
3955 case BUILT_IN_NONLOCAL_GOTO:
3956 /* Similarly. */
3957 inline_forbidden_reason
3958 = G_("function %q+F can never be inlined because "
3959 "it uses non-local goto");
3960 *handled_ops_p = true;
3961 return t;
3962
3963 case BUILT_IN_RETURN:
3964 case BUILT_IN_APPLY_ARGS:
3965 /* If a __builtin_apply_args caller would be inlined,
3966 it would be saving arguments of the function it has
3967 been inlined into. Similarly __builtin_return would
3968 return from the function the inline has been inlined into. */
3969 inline_forbidden_reason
3970 = G_("function %q+F can never be inlined because "
3971 "it uses %<__builtin_return%> or %<__builtin_apply_args%>");
3972 *handled_ops_p = true;
3973 return t;
3974
3975 default:
3976 break;
3977 }
3978 break;
3979
3980 case GIMPLE_GOTO:
3981 t = gimple_goto_dest (stmt);
3982
3983 /* We will not inline a function which uses computed goto. The
3984 addresses of its local labels, which may be tucked into
3985 global storage, are of course not constant across
3986 instantiations, which causes unexpected behavior. */
3987 if (TREE_CODE (t) != LABEL_DECL)
3988 {
3989 inline_forbidden_reason
3990 = G_("function %q+F can never be inlined "
3991 "because it contains a computed goto");
3992 *handled_ops_p = true;
3993 return t;
3994 }
3995 break;
3996
3997 default:
3998 break;
3999 }
4000
4001 *handled_ops_p = false;
4002 return NULL_TREE;
4003 }
4004
4005 /* Return true if FNDECL is a function that cannot be inlined into
4006 another one. */
4007
4008 static bool
inline_forbidden_p(tree fndecl)4009 inline_forbidden_p (tree fndecl)
4010 {
4011 struct function *fun = DECL_STRUCT_FUNCTION (fndecl);
4012 struct walk_stmt_info wi;
4013 basic_block bb;
4014 bool forbidden_p = false;
4015
4016 /* First check for shared reasons not to copy the code. */
4017 inline_forbidden_reason = copy_forbidden (fun);
4018 if (inline_forbidden_reason != NULL)
4019 return true;
4020
4021 /* Next, walk the statements of the function looking for
4022 constraucts we can't handle, or are non-optimal for inlining. */
4023 hash_set<tree> visited_nodes;
4024 memset (&wi, 0, sizeof (wi));
4025 wi.info = (void *) fndecl;
4026 wi.pset = &visited_nodes;
4027
4028 FOR_EACH_BB_FN (bb, fun)
4029 {
4030 gimple *ret;
4031 gimple_seq seq = bb_seq (bb);
4032 ret = walk_gimple_seq (seq, inline_forbidden_p_stmt, NULL, &wi);
4033 forbidden_p = (ret != NULL);
4034 if (forbidden_p)
4035 break;
4036 }
4037
4038 return forbidden_p;
4039 }
4040
4041 /* Return false if the function FNDECL cannot be inlined on account of its
4042 attributes, true otherwise. */
4043 static bool
function_attribute_inlinable_p(const_tree fndecl)4044 function_attribute_inlinable_p (const_tree fndecl)
4045 {
4046 if (targetm.attribute_table)
4047 {
4048 const_tree a;
4049
4050 for (a = DECL_ATTRIBUTES (fndecl); a; a = TREE_CHAIN (a))
4051 {
4052 const_tree name = get_attribute_name (a);
4053 int i;
4054
4055 for (i = 0; targetm.attribute_table[i].name != NULL; i++)
4056 if (is_attribute_p (targetm.attribute_table[i].name, name))
4057 return targetm.function_attribute_inlinable_p (fndecl);
4058 }
4059 }
4060
4061 return true;
4062 }
4063
4064 /* Returns nonzero if FN is a function that does not have any
4065 fundamental inline blocking properties. */
4066
4067 bool
tree_inlinable_function_p(tree fn)4068 tree_inlinable_function_p (tree fn)
4069 {
4070 bool inlinable = true;
4071 bool do_warning;
4072 tree always_inline;
4073
4074 /* If we've already decided this function shouldn't be inlined,
4075 there's no need to check again. */
4076 if (DECL_UNINLINABLE (fn))
4077 return false;
4078
4079 /* We only warn for functions declared `inline' by the user. */
4080 do_warning = (opt_for_fn (fn, warn_inline)
4081 && DECL_DECLARED_INLINE_P (fn)
4082 && !DECL_NO_INLINE_WARNING_P (fn)
4083 && !DECL_IN_SYSTEM_HEADER (fn));
4084
4085 always_inline = lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn));
4086
4087 if (flag_no_inline
4088 && always_inline == NULL)
4089 {
4090 if (do_warning)
4091 warning (OPT_Winline, "function %q+F can never be inlined because it "
4092 "is suppressed using %<-fno-inline%>", fn);
4093 inlinable = false;
4094 }
4095
4096 else if (!function_attribute_inlinable_p (fn))
4097 {
4098 if (do_warning)
4099 warning (OPT_Winline, "function %q+F can never be inlined because it "
4100 "uses attributes conflicting with inlining", fn);
4101 inlinable = false;
4102 }
4103
4104 else if (inline_forbidden_p (fn))
4105 {
4106 /* See if we should warn about uninlinable functions. Previously,
4107 some of these warnings would be issued while trying to expand
4108 the function inline, but that would cause multiple warnings
4109 about functions that would for example call alloca. But since
4110 this a property of the function, just one warning is enough.
4111 As a bonus we can now give more details about the reason why a
4112 function is not inlinable. */
4113 if (always_inline)
4114 error (inline_forbidden_reason, fn);
4115 else if (do_warning)
4116 warning (OPT_Winline, inline_forbidden_reason, fn);
4117
4118 inlinable = false;
4119 }
4120
4121 /* Squirrel away the result so that we don't have to check again. */
4122 DECL_UNINLINABLE (fn) = !inlinable;
4123
4124 return inlinable;
4125 }
4126
4127 /* Estimate the cost of a memory move of type TYPE. Use machine dependent
4128 word size and take possible memcpy call into account and return
4129 cost based on whether optimizing for size or speed according to SPEED_P. */
4130
4131 int
estimate_move_cost(tree type,bool ARG_UNUSED (speed_p))4132 estimate_move_cost (tree type, bool ARG_UNUSED (speed_p))
4133 {
4134 HOST_WIDE_INT size;
4135
4136 gcc_assert (!VOID_TYPE_P (type));
4137
4138 if (TREE_CODE (type) == VECTOR_TYPE)
4139 {
4140 scalar_mode inner = SCALAR_TYPE_MODE (TREE_TYPE (type));
4141 machine_mode simd = targetm.vectorize.preferred_simd_mode (inner);
4142 int orig_mode_size
4143 = estimated_poly_value (GET_MODE_SIZE (TYPE_MODE (type)));
4144 int simd_mode_size = estimated_poly_value (GET_MODE_SIZE (simd));
4145 return ((orig_mode_size + simd_mode_size - 1)
4146 / simd_mode_size);
4147 }
4148
4149 size = int_size_in_bytes (type);
4150
4151 if (size < 0 || size > MOVE_MAX_PIECES * MOVE_RATIO (speed_p))
4152 /* Cost of a memcpy call, 3 arguments and the call. */
4153 return 4;
4154 else
4155 return ((size + MOVE_MAX_PIECES - 1) / MOVE_MAX_PIECES);
4156 }
4157
4158 /* Returns cost of operation CODE, according to WEIGHTS */
4159
4160 static int
estimate_operator_cost(enum tree_code code,eni_weights * weights,tree op1 ATTRIBUTE_UNUSED,tree op2)4161 estimate_operator_cost (enum tree_code code, eni_weights *weights,
4162 tree op1 ATTRIBUTE_UNUSED, tree op2)
4163 {
4164 switch (code)
4165 {
4166 /* These are "free" conversions, or their presumed cost
4167 is folded into other operations. */
4168 case RANGE_EXPR:
4169 CASE_CONVERT:
4170 case COMPLEX_EXPR:
4171 case PAREN_EXPR:
4172 case VIEW_CONVERT_EXPR:
4173 return 0;
4174
4175 /* Assign cost of 1 to usual operations.
4176 ??? We may consider mapping RTL costs to this. */
4177 case COND_EXPR:
4178 case VEC_COND_EXPR:
4179 case VEC_PERM_EXPR:
4180
4181 case PLUS_EXPR:
4182 case POINTER_PLUS_EXPR:
4183 case POINTER_DIFF_EXPR:
4184 case MINUS_EXPR:
4185 case MULT_EXPR:
4186 case MULT_HIGHPART_EXPR:
4187
4188 case ADDR_SPACE_CONVERT_EXPR:
4189 case FIXED_CONVERT_EXPR:
4190 case FIX_TRUNC_EXPR:
4191
4192 case NEGATE_EXPR:
4193 case FLOAT_EXPR:
4194 case MIN_EXPR:
4195 case MAX_EXPR:
4196 case ABS_EXPR:
4197 case ABSU_EXPR:
4198
4199 case LSHIFT_EXPR:
4200 case RSHIFT_EXPR:
4201 case LROTATE_EXPR:
4202 case RROTATE_EXPR:
4203
4204 case BIT_IOR_EXPR:
4205 case BIT_XOR_EXPR:
4206 case BIT_AND_EXPR:
4207 case BIT_NOT_EXPR:
4208
4209 case TRUTH_ANDIF_EXPR:
4210 case TRUTH_ORIF_EXPR:
4211 case TRUTH_AND_EXPR:
4212 case TRUTH_OR_EXPR:
4213 case TRUTH_XOR_EXPR:
4214 case TRUTH_NOT_EXPR:
4215
4216 case LT_EXPR:
4217 case LE_EXPR:
4218 case GT_EXPR:
4219 case GE_EXPR:
4220 case EQ_EXPR:
4221 case NE_EXPR:
4222 case ORDERED_EXPR:
4223 case UNORDERED_EXPR:
4224
4225 case UNLT_EXPR:
4226 case UNLE_EXPR:
4227 case UNGT_EXPR:
4228 case UNGE_EXPR:
4229 case UNEQ_EXPR:
4230 case LTGT_EXPR:
4231
4232 case CONJ_EXPR:
4233
4234 case PREDECREMENT_EXPR:
4235 case PREINCREMENT_EXPR:
4236 case POSTDECREMENT_EXPR:
4237 case POSTINCREMENT_EXPR:
4238
4239 case REALIGN_LOAD_EXPR:
4240
4241 case WIDEN_SUM_EXPR:
4242 case WIDEN_MULT_EXPR:
4243 case DOT_PROD_EXPR:
4244 case SAD_EXPR:
4245 case WIDEN_MULT_PLUS_EXPR:
4246 case WIDEN_MULT_MINUS_EXPR:
4247 case WIDEN_LSHIFT_EXPR:
4248
4249 case VEC_WIDEN_MULT_HI_EXPR:
4250 case VEC_WIDEN_MULT_LO_EXPR:
4251 case VEC_WIDEN_MULT_EVEN_EXPR:
4252 case VEC_WIDEN_MULT_ODD_EXPR:
4253 case VEC_UNPACK_HI_EXPR:
4254 case VEC_UNPACK_LO_EXPR:
4255 case VEC_UNPACK_FLOAT_HI_EXPR:
4256 case VEC_UNPACK_FLOAT_LO_EXPR:
4257 case VEC_UNPACK_FIX_TRUNC_HI_EXPR:
4258 case VEC_UNPACK_FIX_TRUNC_LO_EXPR:
4259 case VEC_PACK_TRUNC_EXPR:
4260 case VEC_PACK_SAT_EXPR:
4261 case VEC_PACK_FIX_TRUNC_EXPR:
4262 case VEC_PACK_FLOAT_EXPR:
4263 case VEC_WIDEN_LSHIFT_HI_EXPR:
4264 case VEC_WIDEN_LSHIFT_LO_EXPR:
4265 case VEC_DUPLICATE_EXPR:
4266 case VEC_SERIES_EXPR:
4267
4268 return 1;
4269
4270 /* Few special cases of expensive operations. This is useful
4271 to avoid inlining on functions having too many of these. */
4272 case TRUNC_DIV_EXPR:
4273 case CEIL_DIV_EXPR:
4274 case FLOOR_DIV_EXPR:
4275 case ROUND_DIV_EXPR:
4276 case EXACT_DIV_EXPR:
4277 case TRUNC_MOD_EXPR:
4278 case CEIL_MOD_EXPR:
4279 case FLOOR_MOD_EXPR:
4280 case ROUND_MOD_EXPR:
4281 case RDIV_EXPR:
4282 if (TREE_CODE (op2) != INTEGER_CST)
4283 return weights->div_mod_cost;
4284 return 1;
4285
4286 /* Bit-field insertion needs several shift and mask operations. */
4287 case BIT_INSERT_EXPR:
4288 return 3;
4289
4290 default:
4291 /* We expect a copy assignment with no operator. */
4292 gcc_assert (get_gimple_rhs_class (code) == GIMPLE_SINGLE_RHS);
4293 return 0;
4294 }
4295 }
4296
4297
4298 /* Estimate number of instructions that will be created by expanding
4299 the statements in the statement sequence STMTS.
4300 WEIGHTS contains weights attributed to various constructs. */
4301
4302 int
estimate_num_insns_seq(gimple_seq stmts,eni_weights * weights)4303 estimate_num_insns_seq (gimple_seq stmts, eni_weights *weights)
4304 {
4305 int cost;
4306 gimple_stmt_iterator gsi;
4307
4308 cost = 0;
4309 for (gsi = gsi_start (stmts); !gsi_end_p (gsi); gsi_next (&gsi))
4310 cost += estimate_num_insns (gsi_stmt (gsi), weights);
4311
4312 return cost;
4313 }
4314
4315
4316 /* Estimate number of instructions that will be created by expanding STMT.
4317 WEIGHTS contains weights attributed to various constructs. */
4318
4319 int
estimate_num_insns(gimple * stmt,eni_weights * weights)4320 estimate_num_insns (gimple *stmt, eni_weights *weights)
4321 {
4322 unsigned cost, i;
4323 enum gimple_code code = gimple_code (stmt);
4324 tree lhs;
4325 tree rhs;
4326
4327 switch (code)
4328 {
4329 case GIMPLE_ASSIGN:
4330 /* Try to estimate the cost of assignments. We have three cases to
4331 deal with:
4332 1) Simple assignments to registers;
4333 2) Stores to things that must live in memory. This includes
4334 "normal" stores to scalars, but also assignments of large
4335 structures, or constructors of big arrays;
4336
4337 Let us look at the first two cases, assuming we have "a = b + C":
4338 <GIMPLE_ASSIGN <var_decl "a">
4339 <plus_expr <var_decl "b"> <constant C>>
4340 If "a" is a GIMPLE register, the assignment to it is free on almost
4341 any target, because "a" usually ends up in a real register. Hence
4342 the only cost of this expression comes from the PLUS_EXPR, and we
4343 can ignore the GIMPLE_ASSIGN.
4344 If "a" is not a GIMPLE register, the assignment to "a" will most
4345 likely be a real store, so the cost of the GIMPLE_ASSIGN is the cost
4346 of moving something into "a", which we compute using the function
4347 estimate_move_cost. */
4348 if (gimple_clobber_p (stmt))
4349 return 0; /* ={v} {CLOBBER} stmt expands to nothing. */
4350
4351 lhs = gimple_assign_lhs (stmt);
4352 rhs = gimple_assign_rhs1 (stmt);
4353
4354 cost = 0;
4355
4356 /* Account for the cost of moving to / from memory. */
4357 if (gimple_store_p (stmt))
4358 cost += estimate_move_cost (TREE_TYPE (lhs), weights->time_based);
4359 if (gimple_assign_load_p (stmt))
4360 cost += estimate_move_cost (TREE_TYPE (rhs), weights->time_based);
4361
4362 cost += estimate_operator_cost (gimple_assign_rhs_code (stmt), weights,
4363 gimple_assign_rhs1 (stmt),
4364 get_gimple_rhs_class (gimple_assign_rhs_code (stmt))
4365 == GIMPLE_BINARY_RHS
4366 ? gimple_assign_rhs2 (stmt) : NULL);
4367 break;
4368
4369 case GIMPLE_COND:
4370 cost = 1 + estimate_operator_cost (gimple_cond_code (stmt), weights,
4371 gimple_op (stmt, 0),
4372 gimple_op (stmt, 1));
4373 break;
4374
4375 case GIMPLE_SWITCH:
4376 {
4377 gswitch *switch_stmt = as_a <gswitch *> (stmt);
4378 /* Take into account cost of the switch + guess 2 conditional jumps for
4379 each case label.
4380
4381 TODO: once the switch expansion logic is sufficiently separated, we can
4382 do better job on estimating cost of the switch. */
4383 if (weights->time_based)
4384 cost = floor_log2 (gimple_switch_num_labels (switch_stmt)) * 2;
4385 else
4386 cost = gimple_switch_num_labels (switch_stmt) * 2;
4387 }
4388 break;
4389
4390 case GIMPLE_CALL:
4391 {
4392 tree decl;
4393
4394 if (gimple_call_internal_p (stmt))
4395 return 0;
4396 else if ((decl = gimple_call_fndecl (stmt))
4397 && fndecl_built_in_p (decl))
4398 {
4399 /* Do not special case builtins where we see the body.
4400 This just confuse inliner. */
4401 struct cgraph_node *node;
4402 if (!(node = cgraph_node::get (decl))
4403 || node->definition)
4404 ;
4405 /* For buitins that are likely expanded to nothing or
4406 inlined do not account operand costs. */
4407 else if (is_simple_builtin (decl))
4408 return 0;
4409 else if (is_inexpensive_builtin (decl))
4410 return weights->target_builtin_call_cost;
4411 else if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
4412 {
4413 /* We canonicalize x * x to pow (x, 2.0) with -ffast-math, so
4414 specialize the cheap expansion we do here.
4415 ??? This asks for a more general solution. */
4416 switch (DECL_FUNCTION_CODE (decl))
4417 {
4418 case BUILT_IN_POW:
4419 case BUILT_IN_POWF:
4420 case BUILT_IN_POWL:
4421 if (TREE_CODE (gimple_call_arg (stmt, 1)) == REAL_CST
4422 && (real_equal
4423 (&TREE_REAL_CST (gimple_call_arg (stmt, 1)),
4424 &dconst2)))
4425 return estimate_operator_cost
4426 (MULT_EXPR, weights, gimple_call_arg (stmt, 0),
4427 gimple_call_arg (stmt, 0));
4428 break;
4429
4430 default:
4431 break;
4432 }
4433 }
4434 }
4435
4436 cost = decl ? weights->call_cost : weights->indirect_call_cost;
4437 if (gimple_call_lhs (stmt))
4438 cost += estimate_move_cost (TREE_TYPE (gimple_call_lhs (stmt)),
4439 weights->time_based);
4440 for (i = 0; i < gimple_call_num_args (stmt); i++)
4441 {
4442 tree arg = gimple_call_arg (stmt, i);
4443 cost += estimate_move_cost (TREE_TYPE (arg),
4444 weights->time_based);
4445 }
4446 break;
4447 }
4448
4449 case GIMPLE_RETURN:
4450 return weights->return_cost;
4451
4452 case GIMPLE_GOTO:
4453 case GIMPLE_LABEL:
4454 case GIMPLE_NOP:
4455 case GIMPLE_PHI:
4456 case GIMPLE_PREDICT:
4457 case GIMPLE_DEBUG:
4458 return 0;
4459
4460 case GIMPLE_ASM:
4461 {
4462 int count = asm_str_count (gimple_asm_string (as_a <gasm *> (stmt)));
4463 /* 1000 means infinity. This avoids overflows later
4464 with very long asm statements. */
4465 if (count > 1000)
4466 count = 1000;
4467 /* If this asm is asm inline, count anything as minimum size. */
4468 if (gimple_asm_inline_p (as_a <gasm *> (stmt)))
4469 count = MIN (1, count);
4470 return MAX (1, count);
4471 }
4472
4473 case GIMPLE_RESX:
4474 /* This is either going to be an external function call with one
4475 argument, or two register copy statements plus a goto. */
4476 return 2;
4477
4478 case GIMPLE_EH_DISPATCH:
4479 /* ??? This is going to turn into a switch statement. Ideally
4480 we'd have a look at the eh region and estimate the number of
4481 edges involved. */
4482 return 10;
4483
4484 case GIMPLE_BIND:
4485 return estimate_num_insns_seq (
4486 gimple_bind_body (as_a <gbind *> (stmt)),
4487 weights);
4488
4489 case GIMPLE_EH_FILTER:
4490 return estimate_num_insns_seq (gimple_eh_filter_failure (stmt), weights);
4491
4492 case GIMPLE_CATCH:
4493 return estimate_num_insns_seq (gimple_catch_handler (
4494 as_a <gcatch *> (stmt)),
4495 weights);
4496
4497 case GIMPLE_TRY:
4498 return (estimate_num_insns_seq (gimple_try_eval (stmt), weights)
4499 + estimate_num_insns_seq (gimple_try_cleanup (stmt), weights));
4500
4501 /* OMP directives are generally very expensive. */
4502
4503 case GIMPLE_OMP_RETURN:
4504 case GIMPLE_OMP_SECTIONS_SWITCH:
4505 case GIMPLE_OMP_ATOMIC_STORE:
4506 case GIMPLE_OMP_CONTINUE:
4507 /* ...except these, which are cheap. */
4508 return 0;
4509
4510 case GIMPLE_OMP_ATOMIC_LOAD:
4511 return weights->omp_cost;
4512
4513 case GIMPLE_OMP_FOR:
4514 return (weights->omp_cost
4515 + estimate_num_insns_seq (gimple_omp_body (stmt), weights)
4516 + estimate_num_insns_seq (gimple_omp_for_pre_body (stmt), weights));
4517
4518 case GIMPLE_OMP_PARALLEL:
4519 case GIMPLE_OMP_TASK:
4520 case GIMPLE_OMP_CRITICAL:
4521 case GIMPLE_OMP_MASTER:
4522 case GIMPLE_OMP_TASKGROUP:
4523 case GIMPLE_OMP_ORDERED:
4524 case GIMPLE_OMP_SCAN:
4525 case GIMPLE_OMP_SECTION:
4526 case GIMPLE_OMP_SECTIONS:
4527 case GIMPLE_OMP_SINGLE:
4528 case GIMPLE_OMP_TARGET:
4529 case GIMPLE_OMP_TEAMS:
4530 return (weights->omp_cost
4531 + estimate_num_insns_seq (gimple_omp_body (stmt), weights));
4532
4533 case GIMPLE_TRANSACTION:
4534 return (weights->tm_cost
4535 + estimate_num_insns_seq (gimple_transaction_body (
4536 as_a <gtransaction *> (stmt)),
4537 weights));
4538
4539 default:
4540 gcc_unreachable ();
4541 }
4542
4543 return cost;
4544 }
4545
4546 /* Estimate number of instructions that will be created by expanding
4547 function FNDECL. WEIGHTS contains weights attributed to various
4548 constructs. */
4549
4550 int
estimate_num_insns_fn(tree fndecl,eni_weights * weights)4551 estimate_num_insns_fn (tree fndecl, eni_weights *weights)
4552 {
4553 struct function *my_function = DECL_STRUCT_FUNCTION (fndecl);
4554 gimple_stmt_iterator bsi;
4555 basic_block bb;
4556 int n = 0;
4557
4558 gcc_assert (my_function && my_function->cfg);
4559 FOR_EACH_BB_FN (bb, my_function)
4560 {
4561 for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi))
4562 n += estimate_num_insns (gsi_stmt (bsi), weights);
4563 }
4564
4565 return n;
4566 }
4567
4568
4569 /* Initializes weights used by estimate_num_insns. */
4570
4571 void
init_inline_once(void)4572 init_inline_once (void)
4573 {
4574 eni_size_weights.call_cost = 1;
4575 eni_size_weights.indirect_call_cost = 3;
4576 eni_size_weights.target_builtin_call_cost = 1;
4577 eni_size_weights.div_mod_cost = 1;
4578 eni_size_weights.omp_cost = 40;
4579 eni_size_weights.tm_cost = 10;
4580 eni_size_weights.time_based = false;
4581 eni_size_weights.return_cost = 1;
4582
4583 /* Estimating time for call is difficult, since we have no idea what the
4584 called function does. In the current uses of eni_time_weights,
4585 underestimating the cost does less harm than overestimating it, so
4586 we choose a rather small value here. */
4587 eni_time_weights.call_cost = 10;
4588 eni_time_weights.indirect_call_cost = 15;
4589 eni_time_weights.target_builtin_call_cost = 1;
4590 eni_time_weights.div_mod_cost = 10;
4591 eni_time_weights.omp_cost = 40;
4592 eni_time_weights.tm_cost = 40;
4593 eni_time_weights.time_based = true;
4594 eni_time_weights.return_cost = 2;
4595 }
4596
4597
4598 /* Install new lexical TREE_BLOCK underneath 'current_block'. */
4599
4600 static void
prepend_lexical_block(tree current_block,tree new_block)4601 prepend_lexical_block (tree current_block, tree new_block)
4602 {
4603 BLOCK_CHAIN (new_block) = BLOCK_SUBBLOCKS (current_block);
4604 BLOCK_SUBBLOCKS (current_block) = new_block;
4605 BLOCK_SUPERCONTEXT (new_block) = current_block;
4606 }
4607
4608 /* Add local variables from CALLEE to CALLER. */
4609
4610 static inline void
add_local_variables(struct function * callee,struct function * caller,copy_body_data * id)4611 add_local_variables (struct function *callee, struct function *caller,
4612 copy_body_data *id)
4613 {
4614 tree var;
4615 unsigned ix;
4616
4617 FOR_EACH_LOCAL_DECL (callee, ix, var)
4618 if (!can_be_nonlocal (var, id))
4619 {
4620 tree new_var = remap_decl (var, id);
4621
4622 /* Remap debug-expressions. */
4623 if (VAR_P (new_var)
4624 && DECL_HAS_DEBUG_EXPR_P (var)
4625 && new_var != var)
4626 {
4627 tree tem = DECL_DEBUG_EXPR (var);
4628 bool old_regimplify = id->regimplify;
4629 id->remapping_type_depth++;
4630 walk_tree (&tem, copy_tree_body_r, id, NULL);
4631 id->remapping_type_depth--;
4632 id->regimplify = old_regimplify;
4633 SET_DECL_DEBUG_EXPR (new_var, tem);
4634 DECL_HAS_DEBUG_EXPR_P (new_var) = 1;
4635 }
4636 add_local_decl (caller, new_var);
4637 }
4638 }
4639
4640 /* Add to BINDINGS a debug stmt resetting SRCVAR if inlining might
4641 have brought in or introduced any debug stmts for SRCVAR. */
4642
4643 static inline void
reset_debug_binding(copy_body_data * id,tree srcvar,gimple_seq * bindings)4644 reset_debug_binding (copy_body_data *id, tree srcvar, gimple_seq *bindings)
4645 {
4646 tree *remappedvarp = id->decl_map->get (srcvar);
4647
4648 if (!remappedvarp)
4649 return;
4650
4651 if (!VAR_P (*remappedvarp))
4652 return;
4653
4654 if (*remappedvarp == id->retvar)
4655 return;
4656
4657 tree tvar = target_for_debug_bind (*remappedvarp);
4658 if (!tvar)
4659 return;
4660
4661 gdebug *stmt = gimple_build_debug_bind (tvar, NULL_TREE,
4662 id->call_stmt);
4663 gimple_seq_add_stmt (bindings, stmt);
4664 }
4665
4666 /* For each inlined variable for which we may have debug bind stmts,
4667 add before GSI a final debug stmt resetting it, marking the end of
4668 its life, so that var-tracking knows it doesn't have to compute
4669 further locations for it. */
4670
4671 static inline void
reset_debug_bindings(copy_body_data * id,gimple_stmt_iterator gsi)4672 reset_debug_bindings (copy_body_data *id, gimple_stmt_iterator gsi)
4673 {
4674 tree var;
4675 unsigned ix;
4676 gimple_seq bindings = NULL;
4677
4678 if (!gimple_in_ssa_p (id->src_cfun))
4679 return;
4680
4681 if (!opt_for_fn (id->dst_fn, flag_var_tracking_assignments))
4682 return;
4683
4684 for (var = DECL_ARGUMENTS (id->src_fn);
4685 var; var = DECL_CHAIN (var))
4686 reset_debug_binding (id, var, &bindings);
4687
4688 FOR_EACH_LOCAL_DECL (id->src_cfun, ix, var)
4689 reset_debug_binding (id, var, &bindings);
4690
4691 gsi_insert_seq_before_without_update (&gsi, bindings, GSI_SAME_STMT);
4692 }
4693
4694 /* If STMT is a GIMPLE_CALL, replace it with its inline expansion. */
4695
4696 static bool
expand_call_inline(basic_block bb,gimple * stmt,copy_body_data * id,bitmap to_purge)4697 expand_call_inline (basic_block bb, gimple *stmt, copy_body_data *id,
4698 bitmap to_purge)
4699 {
4700 tree use_retvar;
4701 tree fn;
4702 hash_map<tree, tree> *dst;
4703 hash_map<tree, tree> *st = NULL;
4704 tree return_slot;
4705 tree modify_dest;
4706 struct cgraph_edge *cg_edge;
4707 cgraph_inline_failed_t reason;
4708 basic_block return_block;
4709 edge e;
4710 gimple_stmt_iterator gsi, stmt_gsi;
4711 bool successfully_inlined = false;
4712 bool purge_dead_abnormal_edges;
4713 gcall *call_stmt;
4714 unsigned int prop_mask, src_properties;
4715 struct function *dst_cfun;
4716 tree simduid;
4717 use_operand_p use;
4718 gimple *simtenter_stmt = NULL;
4719 vec<tree> *simtvars_save;
4720
4721 /* The gimplifier uses input_location in too many places, such as
4722 internal_get_tmp_var (). */
4723 location_t saved_location = input_location;
4724 input_location = gimple_location (stmt);
4725
4726 /* From here on, we're only interested in CALL_EXPRs. */
4727 call_stmt = dyn_cast <gcall *> (stmt);
4728 if (!call_stmt)
4729 goto egress;
4730
4731 cg_edge = id->dst_node->get_edge (stmt);
4732 gcc_checking_assert (cg_edge);
4733 /* First, see if we can figure out what function is being called.
4734 If we cannot, then there is no hope of inlining the function. */
4735 if (cg_edge->indirect_unknown_callee)
4736 goto egress;
4737 fn = cg_edge->callee->decl;
4738 gcc_checking_assert (fn);
4739
4740 /* If FN is a declaration of a function in a nested scope that was
4741 globally declared inline, we don't set its DECL_INITIAL.
4742 However, we can't blindly follow DECL_ABSTRACT_ORIGIN because the
4743 C++ front-end uses it for cdtors to refer to their internal
4744 declarations, that are not real functions. Fortunately those
4745 don't have trees to be saved, so we can tell by checking their
4746 gimple_body. */
4747 if (!DECL_INITIAL (fn)
4748 && DECL_ABSTRACT_ORIGIN (fn)
4749 && gimple_has_body_p (DECL_ABSTRACT_ORIGIN (fn)))
4750 fn = DECL_ABSTRACT_ORIGIN (fn);
4751
4752 /* Don't try to inline functions that are not well-suited to inlining. */
4753 if (cg_edge->inline_failed)
4754 {
4755 reason = cg_edge->inline_failed;
4756 /* If this call was originally indirect, we do not want to emit any
4757 inlining related warnings or sorry messages because there are no
4758 guarantees regarding those. */
4759 if (cg_edge->indirect_inlining_edge)
4760 goto egress;
4761
4762 if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn))
4763 /* For extern inline functions that get redefined we always
4764 silently ignored always_inline flag. Better behavior would
4765 be to be able to keep both bodies and use extern inline body
4766 for inlining, but we can't do that because frontends overwrite
4767 the body. */
4768 && !cg_edge->callee->redefined_extern_inline
4769 /* During early inline pass, report only when optimization is
4770 not turned on. */
4771 && (symtab->global_info_ready
4772 || !optimize
4773 || cgraph_inline_failed_type (reason) == CIF_FINAL_ERROR)
4774 /* PR 20090218-1_0.c. Body can be provided by another module. */
4775 && (reason != CIF_BODY_NOT_AVAILABLE || !flag_generate_lto))
4776 {
4777 error ("inlining failed in call to %<always_inline%> %q+F: %s", fn,
4778 cgraph_inline_failed_string (reason));
4779 if (gimple_location (stmt) != UNKNOWN_LOCATION)
4780 inform (gimple_location (stmt), "called from here");
4781 else if (DECL_SOURCE_LOCATION (cfun->decl) != UNKNOWN_LOCATION)
4782 inform (DECL_SOURCE_LOCATION (cfun->decl),
4783 "called from this function");
4784 }
4785 else if (opt_for_fn (fn, warn_inline)
4786 && DECL_DECLARED_INLINE_P (fn)
4787 && !DECL_NO_INLINE_WARNING_P (fn)
4788 && !DECL_IN_SYSTEM_HEADER (fn)
4789 && reason != CIF_UNSPECIFIED
4790 && !lookup_attribute ("noinline", DECL_ATTRIBUTES (fn))
4791 /* Do not warn about not inlined recursive calls. */
4792 && !cg_edge->recursive_p ()
4793 /* Avoid warnings during early inline pass. */
4794 && symtab->global_info_ready)
4795 {
4796 auto_diagnostic_group d;
4797 if (warning (OPT_Winline, "inlining failed in call to %q+F: %s",
4798 fn, _(cgraph_inline_failed_string (reason))))
4799 {
4800 if (gimple_location (stmt) != UNKNOWN_LOCATION)
4801 inform (gimple_location (stmt), "called from here");
4802 else if (DECL_SOURCE_LOCATION (cfun->decl) != UNKNOWN_LOCATION)
4803 inform (DECL_SOURCE_LOCATION (cfun->decl),
4804 "called from this function");
4805 }
4806 }
4807 goto egress;
4808 }
4809 id->src_node = cg_edge->callee;
4810
4811 /* If callee is thunk, all we need is to adjust the THIS pointer
4812 and redirect to function being thunked. */
4813 if (id->src_node->thunk.thunk_p)
4814 {
4815 cgraph_edge *edge;
4816 tree virtual_offset = NULL;
4817 profile_count count = cg_edge->count;
4818 tree op;
4819 gimple_stmt_iterator iter = gsi_for_stmt (stmt);
4820
4821 cgraph_edge::remove (cg_edge);
4822 edge = id->src_node->callees->clone (id->dst_node, call_stmt,
4823 gimple_uid (stmt),
4824 profile_count::one (),
4825 profile_count::one (),
4826 true);
4827 edge->count = count;
4828 if (id->src_node->thunk.virtual_offset_p)
4829 virtual_offset = size_int (id->src_node->thunk.virtual_value);
4830 op = create_tmp_reg_fn (cfun, TREE_TYPE (gimple_call_arg (stmt, 0)),
4831 NULL);
4832 gsi_insert_before (&iter, gimple_build_assign (op,
4833 gimple_call_arg (stmt, 0)),
4834 GSI_NEW_STMT);
4835 gcc_assert (id->src_node->thunk.this_adjusting);
4836 op = thunk_adjust (&iter, op, 1, id->src_node->thunk.fixed_offset,
4837 virtual_offset, id->src_node->thunk.indirect_offset);
4838
4839 gimple_call_set_arg (stmt, 0, op);
4840 gimple_call_set_fndecl (stmt, edge->callee->decl);
4841 update_stmt (stmt);
4842 id->src_node->remove ();
4843 expand_call_inline (bb, stmt, id, to_purge);
4844 maybe_remove_unused_call_args (cfun, stmt);
4845 return true;
4846 }
4847 fn = cg_edge->callee->decl;
4848 cg_edge->callee->get_untransformed_body ();
4849
4850 if (flag_checking && cg_edge->callee->decl != id->dst_node->decl)
4851 cg_edge->callee->verify ();
4852
4853 /* We will be inlining this callee. */
4854 id->eh_lp_nr = lookup_stmt_eh_lp (stmt);
4855
4856 /* Update the callers EH personality. */
4857 if (DECL_FUNCTION_PERSONALITY (fn))
4858 DECL_FUNCTION_PERSONALITY (cg_edge->caller->decl)
4859 = DECL_FUNCTION_PERSONALITY (fn);
4860
4861 /* Split the block before the GIMPLE_CALL. */
4862 stmt_gsi = gsi_for_stmt (stmt);
4863 gsi_prev (&stmt_gsi);
4864 e = split_block (bb, gsi_end_p (stmt_gsi) ? NULL : gsi_stmt (stmt_gsi));
4865 bb = e->src;
4866 return_block = e->dest;
4867 remove_edge (e);
4868
4869 /* If the GIMPLE_CALL was in the last statement of BB, it may have
4870 been the source of abnormal edges. In this case, schedule
4871 the removal of dead abnormal edges. */
4872 gsi = gsi_start_bb (return_block);
4873 gsi_next (&gsi);
4874 purge_dead_abnormal_edges = gsi_end_p (gsi);
4875
4876 stmt_gsi = gsi_start_bb (return_block);
4877
4878 /* Build a block containing code to initialize the arguments, the
4879 actual inline expansion of the body, and a label for the return
4880 statements within the function to jump to. The type of the
4881 statement expression is the return type of the function call.
4882 ??? If the call does not have an associated block then we will
4883 remap all callee blocks to NULL, effectively dropping most of
4884 its debug information. This should only happen for calls to
4885 artificial decls inserted by the compiler itself. We need to
4886 either link the inlined blocks into the caller block tree or
4887 not refer to them in any way to not break GC for locations. */
4888 if (tree block = gimple_block (stmt))
4889 {
4890 /* We do want to assign a not UNKNOWN_LOCATION BLOCK_SOURCE_LOCATION
4891 to make inlined_function_outer_scope_p return true on this BLOCK. */
4892 location_t loc = LOCATION_LOCUS (gimple_location (stmt));
4893 if (loc == UNKNOWN_LOCATION)
4894 loc = LOCATION_LOCUS (DECL_SOURCE_LOCATION (fn));
4895 if (loc == UNKNOWN_LOCATION)
4896 loc = BUILTINS_LOCATION;
4897 id->block = make_node (BLOCK);
4898 BLOCK_ABSTRACT_ORIGIN (id->block) = DECL_ORIGIN (fn);
4899 BLOCK_SOURCE_LOCATION (id->block) = loc;
4900 prepend_lexical_block (block, id->block);
4901 }
4902
4903 /* Local declarations will be replaced by their equivalents in this map. */
4904 st = id->decl_map;
4905 id->decl_map = new hash_map<tree, tree>;
4906 dst = id->debug_map;
4907 id->debug_map = NULL;
4908 if (flag_stack_reuse != SR_NONE)
4909 id->add_clobbers_to_eh_landing_pads = last_basic_block_for_fn (cfun);
4910
4911 /* Record the function we are about to inline. */
4912 id->src_fn = fn;
4913 id->src_cfun = DECL_STRUCT_FUNCTION (fn);
4914 id->reset_location = DECL_IGNORED_P (fn);
4915 id->call_stmt = call_stmt;
4916
4917 /* When inlining into an OpenMP SIMD-on-SIMT loop, arrange for new automatic
4918 variables to be added to IFN_GOMP_SIMT_ENTER argument list. */
4919 dst_cfun = DECL_STRUCT_FUNCTION (id->dst_fn);
4920 simtvars_save = id->dst_simt_vars;
4921 if (!(dst_cfun->curr_properties & PROP_gimple_lomp_dev)
4922 && (simduid = bb->loop_father->simduid) != NULL_TREE
4923 && (simduid = ssa_default_def (dst_cfun, simduid)) != NULL_TREE
4924 && single_imm_use (simduid, &use, &simtenter_stmt)
4925 && is_gimple_call (simtenter_stmt)
4926 && gimple_call_internal_p (simtenter_stmt, IFN_GOMP_SIMT_ENTER))
4927 vec_alloc (id->dst_simt_vars, 0);
4928 else
4929 id->dst_simt_vars = NULL;
4930
4931 if (profile_status_for_fn (id->src_cfun) == PROFILE_ABSENT)
4932 profile_status_for_fn (dst_cfun) = PROFILE_ABSENT;
4933
4934 /* If the src function contains an IFN_VA_ARG, then so will the dst
4935 function after inlining. Likewise for IFN_GOMP_USE_SIMT. */
4936 prop_mask = PROP_gimple_lva | PROP_gimple_lomp_dev;
4937 src_properties = id->src_cfun->curr_properties & prop_mask;
4938 if (src_properties != prop_mask)
4939 dst_cfun->curr_properties &= src_properties | ~prop_mask;
4940 dst_cfun->calls_eh_return |= id->src_cfun->calls_eh_return;
4941
4942 gcc_assert (!id->src_cfun->after_inlining);
4943
4944 id->entry_bb = bb;
4945 if (lookup_attribute ("cold", DECL_ATTRIBUTES (fn)))
4946 {
4947 gimple_stmt_iterator si = gsi_last_bb (bb);
4948 gsi_insert_after (&si, gimple_build_predict (PRED_COLD_FUNCTION,
4949 NOT_TAKEN),
4950 GSI_NEW_STMT);
4951 }
4952 initialize_inlined_parameters (id, stmt, fn, bb);
4953 if (debug_nonbind_markers_p && debug_inline_points && id->block
4954 && inlined_function_outer_scope_p (id->block))
4955 {
4956 gimple_stmt_iterator si = gsi_last_bb (bb);
4957 gsi_insert_after (&si, gimple_build_debug_inline_entry
4958 (id->block, DECL_SOURCE_LOCATION (id->src_fn)),
4959 GSI_NEW_STMT);
4960 }
4961
4962 if (DECL_INITIAL (fn))
4963 {
4964 if (gimple_block (stmt))
4965 {
4966 tree *var;
4967
4968 prepend_lexical_block (id->block,
4969 remap_blocks (DECL_INITIAL (fn), id));
4970 gcc_checking_assert (BLOCK_SUBBLOCKS (id->block)
4971 && (BLOCK_CHAIN (BLOCK_SUBBLOCKS (id->block))
4972 == NULL_TREE));
4973 /* Move vars for PARM_DECLs from DECL_INITIAL block to id->block,
4974 otherwise for DWARF DW_TAG_formal_parameter will not be children of
4975 DW_TAG_inlined_subroutine, but of a DW_TAG_lexical_block
4976 under it. The parameters can be then evaluated in the debugger,
4977 but don't show in backtraces. */
4978 for (var = &BLOCK_VARS (BLOCK_SUBBLOCKS (id->block)); *var; )
4979 if (TREE_CODE (DECL_ORIGIN (*var)) == PARM_DECL)
4980 {
4981 tree v = *var;
4982 *var = TREE_CHAIN (v);
4983 TREE_CHAIN (v) = BLOCK_VARS (id->block);
4984 BLOCK_VARS (id->block) = v;
4985 }
4986 else
4987 var = &TREE_CHAIN (*var);
4988 }
4989 else
4990 remap_blocks_to_null (DECL_INITIAL (fn), id);
4991 }
4992
4993 /* Return statements in the function body will be replaced by jumps
4994 to the RET_LABEL. */
4995 gcc_assert (DECL_INITIAL (fn));
4996 gcc_assert (TREE_CODE (DECL_INITIAL (fn)) == BLOCK);
4997
4998 /* Find the LHS to which the result of this call is assigned. */
4999 return_slot = NULL;
5000 if (gimple_call_lhs (stmt))
5001 {
5002 modify_dest = gimple_call_lhs (stmt);
5003
5004 /* The function which we are inlining might not return a value,
5005 in which case we should issue a warning that the function
5006 does not return a value. In that case the optimizers will
5007 see that the variable to which the value is assigned was not
5008 initialized. We do not want to issue a warning about that
5009 uninitialized variable. */
5010 if (DECL_P (modify_dest))
5011 TREE_NO_WARNING (modify_dest) = 1;
5012
5013 if (gimple_call_return_slot_opt_p (call_stmt))
5014 {
5015 return_slot = modify_dest;
5016 modify_dest = NULL;
5017 }
5018 }
5019 else
5020 modify_dest = NULL;
5021
5022 /* If we are inlining a call to the C++ operator new, we don't want
5023 to use type based alias analysis on the return value. Otherwise
5024 we may get confused if the compiler sees that the inlined new
5025 function returns a pointer which was just deleted. See bug
5026 33407. */
5027 if (DECL_IS_OPERATOR_NEW_P (fn))
5028 {
5029 return_slot = NULL;
5030 modify_dest = NULL;
5031 }
5032
5033 /* Declare the return variable for the function. */
5034 use_retvar = declare_return_variable (id, return_slot, modify_dest, bb);
5035
5036 /* Add local vars in this inlined callee to caller. */
5037 add_local_variables (id->src_cfun, cfun, id);
5038
5039 if (id->src_node->clone.performed_splits)
5040 {
5041 /* Any calls from the inlined function will be turned into calls from the
5042 function we inline into. We must preserve notes about how to split
5043 parameters such calls should be redirected/updated. */
5044 unsigned len = vec_safe_length (id->src_node->clone.performed_splits);
5045 for (unsigned i = 0; i < len; i++)
5046 {
5047 ipa_param_performed_split ps
5048 = (*id->src_node->clone.performed_splits)[i];
5049 ps.dummy_decl = remap_decl (ps.dummy_decl, id);
5050 vec_safe_push (id->dst_node->clone.performed_splits, ps);
5051 }
5052
5053 if (flag_checking)
5054 {
5055 len = vec_safe_length (id->dst_node->clone.performed_splits);
5056 for (unsigned i = 0; i < len; i++)
5057 {
5058 ipa_param_performed_split *ps1
5059 = &(*id->dst_node->clone.performed_splits)[i];
5060 for (unsigned j = i + 1; j < len; j++)
5061 {
5062 ipa_param_performed_split *ps2
5063 = &(*id->dst_node->clone.performed_splits)[j];
5064 gcc_assert (ps1->dummy_decl != ps2->dummy_decl
5065 || ps1->unit_offset != ps2->unit_offset);
5066 }
5067 }
5068 }
5069 }
5070
5071 if (dump_enabled_p ())
5072 {
5073 char buf[128];
5074 snprintf (buf, sizeof(buf), "%4.2f",
5075 cg_edge->sreal_frequency ().to_double ());
5076 dump_printf_loc (MSG_NOTE | MSG_PRIORITY_INTERNALS,
5077 call_stmt,
5078 "Inlining %C to %C with frequency %s\n",
5079 id->src_node, id->dst_node, buf);
5080 if (dump_file && (dump_flags & TDF_DETAILS))
5081 {
5082 id->src_node->dump (dump_file);
5083 id->dst_node->dump (dump_file);
5084 }
5085 }
5086
5087 /* This is it. Duplicate the callee body. Assume callee is
5088 pre-gimplified. Note that we must not alter the caller
5089 function in any way before this point, as this CALL_EXPR may be
5090 a self-referential call; if we're calling ourselves, we need to
5091 duplicate our body before altering anything. */
5092 copy_body (id, bb, return_block, NULL);
5093
5094 reset_debug_bindings (id, stmt_gsi);
5095
5096 if (flag_stack_reuse != SR_NONE)
5097 for (tree p = DECL_ARGUMENTS (id->src_fn); p; p = DECL_CHAIN (p))
5098 if (!TREE_THIS_VOLATILE (p))
5099 {
5100 tree *varp = id->decl_map->get (p);
5101 if (varp && VAR_P (*varp) && !is_gimple_reg (*varp))
5102 {
5103 tree clobber = build_clobber (TREE_TYPE (*varp));
5104 gimple *clobber_stmt;
5105 clobber_stmt = gimple_build_assign (*varp, clobber);
5106 gimple_set_location (clobber_stmt, gimple_location (stmt));
5107 gsi_insert_before (&stmt_gsi, clobber_stmt, GSI_SAME_STMT);
5108 }
5109 }
5110
5111 /* Reset the escaped solution. */
5112 if (cfun->gimple_df)
5113 pt_solution_reset (&cfun->gimple_df->escaped);
5114
5115 /* Add new automatic variables to IFN_GOMP_SIMT_ENTER arguments. */
5116 if (id->dst_simt_vars && id->dst_simt_vars->length () > 0)
5117 {
5118 size_t nargs = gimple_call_num_args (simtenter_stmt);
5119 vec<tree> *vars = id->dst_simt_vars;
5120 auto_vec<tree> newargs (nargs + vars->length ());
5121 for (size_t i = 0; i < nargs; i++)
5122 newargs.quick_push (gimple_call_arg (simtenter_stmt, i));
5123 for (tree *pvar = vars->begin (); pvar != vars->end (); pvar++)
5124 {
5125 tree ptrtype = build_pointer_type (TREE_TYPE (*pvar));
5126 newargs.quick_push (build1 (ADDR_EXPR, ptrtype, *pvar));
5127 }
5128 gcall *g = gimple_build_call_internal_vec (IFN_GOMP_SIMT_ENTER, newargs);
5129 gimple_call_set_lhs (g, gimple_call_lhs (simtenter_stmt));
5130 gimple_stmt_iterator gsi = gsi_for_stmt (simtenter_stmt);
5131 gsi_replace (&gsi, g, false);
5132 }
5133 vec_free (id->dst_simt_vars);
5134 id->dst_simt_vars = simtvars_save;
5135
5136 /* Clean up. */
5137 if (id->debug_map)
5138 {
5139 delete id->debug_map;
5140 id->debug_map = dst;
5141 }
5142 delete id->decl_map;
5143 id->decl_map = st;
5144
5145 /* Unlink the calls virtual operands before replacing it. */
5146 unlink_stmt_vdef (stmt);
5147 if (gimple_vdef (stmt)
5148 && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
5149 release_ssa_name (gimple_vdef (stmt));
5150
5151 /* If the inlined function returns a result that we care about,
5152 substitute the GIMPLE_CALL with an assignment of the return
5153 variable to the LHS of the call. That is, if STMT was
5154 'a = foo (...)', substitute the call with 'a = USE_RETVAR'. */
5155 if (use_retvar && gimple_call_lhs (stmt))
5156 {
5157 gimple *old_stmt = stmt;
5158 stmt = gimple_build_assign (gimple_call_lhs (stmt), use_retvar);
5159 gimple_set_location (stmt, gimple_location (old_stmt));
5160 gsi_replace (&stmt_gsi, stmt, false);
5161 maybe_clean_or_replace_eh_stmt (old_stmt, stmt);
5162 /* Append a clobber for id->retvar if easily possible. */
5163 if (flag_stack_reuse != SR_NONE
5164 && id->retvar
5165 && VAR_P (id->retvar)
5166 && id->retvar != return_slot
5167 && id->retvar != modify_dest
5168 && !TREE_THIS_VOLATILE (id->retvar)
5169 && !is_gimple_reg (id->retvar)
5170 && !stmt_ends_bb_p (stmt))
5171 {
5172 tree clobber = build_clobber (TREE_TYPE (id->retvar));
5173 gimple *clobber_stmt;
5174 clobber_stmt = gimple_build_assign (id->retvar, clobber);
5175 gimple_set_location (clobber_stmt, gimple_location (old_stmt));
5176 gsi_insert_after (&stmt_gsi, clobber_stmt, GSI_SAME_STMT);
5177 }
5178 }
5179 else
5180 {
5181 /* Handle the case of inlining a function with no return
5182 statement, which causes the return value to become undefined. */
5183 if (gimple_call_lhs (stmt)
5184 && TREE_CODE (gimple_call_lhs (stmt)) == SSA_NAME)
5185 {
5186 tree name = gimple_call_lhs (stmt);
5187 tree var = SSA_NAME_VAR (name);
5188 tree def = var ? ssa_default_def (cfun, var) : NULL;
5189
5190 if (def)
5191 {
5192 /* If the variable is used undefined, make this name
5193 undefined via a move. */
5194 stmt = gimple_build_assign (gimple_call_lhs (stmt), def);
5195 gsi_replace (&stmt_gsi, stmt, true);
5196 }
5197 else
5198 {
5199 if (!var)
5200 {
5201 var = create_tmp_reg_fn (cfun, TREE_TYPE (name), NULL);
5202 SET_SSA_NAME_VAR_OR_IDENTIFIER (name, var);
5203 }
5204 /* Otherwise make this variable undefined. */
5205 gsi_remove (&stmt_gsi, true);
5206 set_ssa_default_def (cfun, var, name);
5207 SSA_NAME_DEF_STMT (name) = gimple_build_nop ();
5208 }
5209 }
5210 /* Replace with a clobber for id->retvar. */
5211 else if (flag_stack_reuse != SR_NONE
5212 && id->retvar
5213 && VAR_P (id->retvar)
5214 && id->retvar != return_slot
5215 && id->retvar != modify_dest
5216 && !TREE_THIS_VOLATILE (id->retvar)
5217 && !is_gimple_reg (id->retvar))
5218 {
5219 tree clobber = build_clobber (TREE_TYPE (id->retvar));
5220 gimple *clobber_stmt;
5221 clobber_stmt = gimple_build_assign (id->retvar, clobber);
5222 gimple_set_location (clobber_stmt, gimple_location (stmt));
5223 gsi_replace (&stmt_gsi, clobber_stmt, false);
5224 maybe_clean_or_replace_eh_stmt (stmt, clobber_stmt);
5225 }
5226 else
5227 gsi_remove (&stmt_gsi, true);
5228 }
5229
5230 if (purge_dead_abnormal_edges)
5231 bitmap_set_bit (to_purge, return_block->index);
5232
5233 /* If the value of the new expression is ignored, that's OK. We
5234 don't warn about this for CALL_EXPRs, so we shouldn't warn about
5235 the equivalent inlined version either. */
5236 if (is_gimple_assign (stmt))
5237 {
5238 gcc_assert (gimple_assign_single_p (stmt)
5239 || CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt)));
5240 TREE_USED (gimple_assign_rhs1 (stmt)) = 1;
5241 }
5242
5243 id->add_clobbers_to_eh_landing_pads = 0;
5244
5245 /* Output the inlining info for this abstract function, since it has been
5246 inlined. If we don't do this now, we can lose the information about the
5247 variables in the function when the blocks get blown away as soon as we
5248 remove the cgraph node. */
5249 if (gimple_block (stmt))
5250 (*debug_hooks->outlining_inline_function) (fn);
5251
5252 /* Update callgraph if needed. */
5253 cg_edge->callee->remove ();
5254
5255 id->block = NULL_TREE;
5256 id->retvar = NULL_TREE;
5257 successfully_inlined = true;
5258
5259 egress:
5260 input_location = saved_location;
5261 return successfully_inlined;
5262 }
5263
5264 /* Expand call statements reachable from STMT_P.
5265 We can only have CALL_EXPRs as the "toplevel" tree code or nested
5266 in a MODIFY_EXPR. */
5267
5268 static bool
gimple_expand_calls_inline(basic_block bb,copy_body_data * id,bitmap to_purge)5269 gimple_expand_calls_inline (basic_block bb, copy_body_data *id,
5270 bitmap to_purge)
5271 {
5272 gimple_stmt_iterator gsi;
5273 bool inlined = false;
5274
5275 for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi);)
5276 {
5277 gimple *stmt = gsi_stmt (gsi);
5278 gsi_prev (&gsi);
5279
5280 if (is_gimple_call (stmt)
5281 && !gimple_call_internal_p (stmt))
5282 inlined |= expand_call_inline (bb, stmt, id, to_purge);
5283 }
5284
5285 return inlined;
5286 }
5287
5288
5289 /* Walk all basic blocks created after FIRST and try to fold every statement
5290 in the STATEMENTS pointer set. */
5291
5292 static void
fold_marked_statements(int first,hash_set<gimple * > * statements)5293 fold_marked_statements (int first, hash_set<gimple *> *statements)
5294 {
5295 auto_bitmap to_purge;
5296
5297 auto_vec<edge, 20> stack (n_basic_blocks_for_fn (cfun) + 2);
5298 auto_sbitmap visited (last_basic_block_for_fn (cfun));
5299 bitmap_clear (visited);
5300
5301 stack.quick_push (single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
5302 while (!stack.is_empty ())
5303 {
5304 /* Look at the edge on the top of the stack. */
5305 edge e = stack.pop ();
5306 basic_block dest = e->dest;
5307
5308 if (dest == EXIT_BLOCK_PTR_FOR_FN (cfun)
5309 || bitmap_bit_p (visited, dest->index))
5310 continue;
5311
5312 bitmap_set_bit (visited, dest->index);
5313
5314 if (dest->index >= first)
5315 for (gimple_stmt_iterator gsi = gsi_start_bb (dest);
5316 !gsi_end_p (gsi); gsi_next (&gsi))
5317 {
5318 if (!statements->contains (gsi_stmt (gsi)))
5319 continue;
5320
5321 gimple *old_stmt = gsi_stmt (gsi);
5322 tree old_decl = (is_gimple_call (old_stmt)
5323 ? gimple_call_fndecl (old_stmt) : 0);
5324 if (old_decl && fndecl_built_in_p (old_decl))
5325 {
5326 /* Folding builtins can create multiple instructions,
5327 we need to look at all of them. */
5328 gimple_stmt_iterator i2 = gsi;
5329 gsi_prev (&i2);
5330 if (fold_stmt (&gsi))
5331 {
5332 gimple *new_stmt;
5333 /* If a builtin at the end of a bb folded into nothing,
5334 the following loop won't work. */
5335 if (gsi_end_p (gsi))
5336 {
5337 cgraph_update_edges_for_call_stmt (old_stmt,
5338 old_decl, NULL);
5339 break;
5340 }
5341 if (gsi_end_p (i2))
5342 i2 = gsi_start_bb (dest);
5343 else
5344 gsi_next (&i2);
5345 while (1)
5346 {
5347 new_stmt = gsi_stmt (i2);
5348 update_stmt (new_stmt);
5349 cgraph_update_edges_for_call_stmt (old_stmt, old_decl,
5350 new_stmt);
5351
5352 if (new_stmt == gsi_stmt (gsi))
5353 {
5354 /* It is okay to check only for the very last
5355 of these statements. If it is a throwing
5356 statement nothing will change. If it isn't
5357 this can remove EH edges. If that weren't
5358 correct then because some intermediate stmts
5359 throw, but not the last one. That would mean
5360 we'd have to split the block, which we can't
5361 here and we'd loose anyway. And as builtins
5362 probably never throw, this all
5363 is mood anyway. */
5364 if (maybe_clean_or_replace_eh_stmt (old_stmt,
5365 new_stmt))
5366 bitmap_set_bit (to_purge, dest->index);
5367 break;
5368 }
5369 gsi_next (&i2);
5370 }
5371 }
5372 }
5373 else if (fold_stmt (&gsi))
5374 {
5375 /* Re-read the statement from GSI as fold_stmt() may
5376 have changed it. */
5377 gimple *new_stmt = gsi_stmt (gsi);
5378 update_stmt (new_stmt);
5379
5380 if (is_gimple_call (old_stmt)
5381 || is_gimple_call (new_stmt))
5382 cgraph_update_edges_for_call_stmt (old_stmt, old_decl,
5383 new_stmt);
5384
5385 if (maybe_clean_or_replace_eh_stmt (old_stmt, new_stmt))
5386 bitmap_set_bit (to_purge, dest->index);
5387 }
5388 }
5389
5390 if (EDGE_COUNT (dest->succs) > 0)
5391 {
5392 /* Avoid warnings emitted from folding statements that
5393 became unreachable because of inlined function parameter
5394 propagation. */
5395 e = find_taken_edge (dest, NULL_TREE);
5396 if (e)
5397 stack.quick_push (e);
5398 else
5399 {
5400 edge_iterator ei;
5401 FOR_EACH_EDGE (e, ei, dest->succs)
5402 stack.safe_push (e);
5403 }
5404 }
5405 }
5406
5407 gimple_purge_all_dead_eh_edges (to_purge);
5408 }
5409
5410 /* Expand calls to inline functions in the body of FN. */
5411
5412 unsigned int
optimize_inline_calls(tree fn)5413 optimize_inline_calls (tree fn)
5414 {
5415 copy_body_data id;
5416 basic_block bb;
5417 int last = n_basic_blocks_for_fn (cfun);
5418 bool inlined_p = false;
5419
5420 /* Clear out ID. */
5421 memset (&id, 0, sizeof (id));
5422
5423 id.src_node = id.dst_node = cgraph_node::get (fn);
5424 gcc_assert (id.dst_node->definition);
5425 id.dst_fn = fn;
5426 /* Or any functions that aren't finished yet. */
5427 if (current_function_decl)
5428 id.dst_fn = current_function_decl;
5429
5430 id.copy_decl = copy_decl_maybe_to_var;
5431 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
5432 id.transform_new_cfg = false;
5433 id.transform_return_to_modify = true;
5434 id.transform_parameter = true;
5435 id.transform_lang_insert_block = NULL;
5436 id.statements_to_fold = new hash_set<gimple *>;
5437
5438 push_gimplify_context ();
5439
5440 /* We make no attempts to keep dominance info up-to-date. */
5441 free_dominance_info (CDI_DOMINATORS);
5442 free_dominance_info (CDI_POST_DOMINATORS);
5443
5444 /* Register specific gimple functions. */
5445 gimple_register_cfg_hooks ();
5446
5447 /* Reach the trees by walking over the CFG, and note the
5448 enclosing basic-blocks in the call edges. */
5449 /* We walk the blocks going forward, because inlined function bodies
5450 will split id->current_basic_block, and the new blocks will
5451 follow it; we'll trudge through them, processing their CALL_EXPRs
5452 along the way. */
5453 auto_bitmap to_purge;
5454 FOR_EACH_BB_FN (bb, cfun)
5455 inlined_p |= gimple_expand_calls_inline (bb, &id, to_purge);
5456
5457 pop_gimplify_context (NULL);
5458
5459 if (flag_checking)
5460 {
5461 struct cgraph_edge *e;
5462
5463 id.dst_node->verify ();
5464
5465 /* Double check that we inlined everything we are supposed to inline. */
5466 for (e = id.dst_node->callees; e; e = e->next_callee)
5467 gcc_assert (e->inline_failed);
5468 }
5469
5470 /* If we didn't inline into the function there is nothing to do. */
5471 if (!inlined_p)
5472 {
5473 delete id.statements_to_fold;
5474 return 0;
5475 }
5476
5477 /* Fold queued statements. */
5478 update_max_bb_count ();
5479 fold_marked_statements (last, id.statements_to_fold);
5480 delete id.statements_to_fold;
5481
5482 /* Finally purge EH and abnormal edges from the call stmts we inlined.
5483 We need to do this after fold_marked_statements since that may walk
5484 the SSA use-def chain. */
5485 unsigned i;
5486 bitmap_iterator bi;
5487 EXECUTE_IF_SET_IN_BITMAP (to_purge, 0, i, bi)
5488 {
5489 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
5490 if (bb)
5491 {
5492 gimple_purge_dead_eh_edges (bb);
5493 gimple_purge_dead_abnormal_call_edges (bb);
5494 }
5495 }
5496
5497 gcc_assert (!id.debug_stmts.exists ());
5498
5499 /* Renumber the lexical scoping (non-code) blocks consecutively. */
5500 number_blocks (fn);
5501
5502 delete_unreachable_blocks_update_callgraph (id.dst_node, false);
5503 id.dst_node->calls_comdat_local = id.dst_node->check_calls_comdat_local_p ();
5504
5505 if (flag_checking)
5506 id.dst_node->verify ();
5507
5508 /* It would be nice to check SSA/CFG/statement consistency here, but it is
5509 not possible yet - the IPA passes might make various functions to not
5510 throw and they don't care to proactively update local EH info. This is
5511 done later in fixup_cfg pass that also execute the verification. */
5512 return (TODO_update_ssa
5513 | TODO_cleanup_cfg
5514 | (gimple_in_ssa_p (cfun) ? TODO_remove_unused_locals : 0)
5515 | (gimple_in_ssa_p (cfun) ? TODO_update_address_taken : 0)
5516 | (profile_status_for_fn (cfun) != PROFILE_ABSENT
5517 ? TODO_rebuild_frequencies : 0));
5518 }
5519
5520 /* Passed to walk_tree. Copies the node pointed to, if appropriate. */
5521
5522 tree
copy_tree_r(tree * tp,int * walk_subtrees,void * data ATTRIBUTE_UNUSED)5523 copy_tree_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
5524 {
5525 enum tree_code code = TREE_CODE (*tp);
5526 enum tree_code_class cl = TREE_CODE_CLASS (code);
5527
5528 /* We make copies of most nodes. */
5529 if (IS_EXPR_CODE_CLASS (cl)
5530 || code == TREE_LIST
5531 || code == TREE_VEC
5532 || code == TYPE_DECL
5533 || code == OMP_CLAUSE)
5534 {
5535 /* Because the chain gets clobbered when we make a copy, we save it
5536 here. */
5537 tree chain = NULL_TREE, new_tree;
5538
5539 if (CODE_CONTAINS_STRUCT (code, TS_COMMON))
5540 chain = TREE_CHAIN (*tp);
5541
5542 /* Copy the node. */
5543 new_tree = copy_node (*tp);
5544
5545 *tp = new_tree;
5546
5547 /* Now, restore the chain, if appropriate. That will cause
5548 walk_tree to walk into the chain as well. */
5549 if (code == PARM_DECL
5550 || code == TREE_LIST
5551 || code == OMP_CLAUSE)
5552 TREE_CHAIN (*tp) = chain;
5553
5554 /* For now, we don't update BLOCKs when we make copies. So, we
5555 have to nullify all BIND_EXPRs. */
5556 if (TREE_CODE (*tp) == BIND_EXPR)
5557 BIND_EXPR_BLOCK (*tp) = NULL_TREE;
5558 }
5559 else if (code == CONSTRUCTOR)
5560 {
5561 /* CONSTRUCTOR nodes need special handling because
5562 we need to duplicate the vector of elements. */
5563 tree new_tree;
5564
5565 new_tree = copy_node (*tp);
5566 CONSTRUCTOR_ELTS (new_tree) = vec_safe_copy (CONSTRUCTOR_ELTS (*tp));
5567 *tp = new_tree;
5568 }
5569 else if (code == STATEMENT_LIST)
5570 /* We used to just abort on STATEMENT_LIST, but we can run into them
5571 with statement-expressions (c++/40975). */
5572 copy_statement_list (tp);
5573 else if (TREE_CODE_CLASS (code) == tcc_type)
5574 *walk_subtrees = 0;
5575 else if (TREE_CODE_CLASS (code) == tcc_declaration)
5576 *walk_subtrees = 0;
5577 else if (TREE_CODE_CLASS (code) == tcc_constant)
5578 *walk_subtrees = 0;
5579 return NULL_TREE;
5580 }
5581
5582 /* The SAVE_EXPR pointed to by TP is being copied. If ST contains
5583 information indicating to what new SAVE_EXPR this one should be mapped,
5584 use that one. Otherwise, create a new node and enter it in ST. FN is
5585 the function into which the copy will be placed. */
5586
5587 static void
remap_save_expr(tree * tp,hash_map<tree,tree> * st,int * walk_subtrees)5588 remap_save_expr (tree *tp, hash_map<tree, tree> *st, int *walk_subtrees)
5589 {
5590 tree *n;
5591 tree t;
5592
5593 /* See if we already encountered this SAVE_EXPR. */
5594 n = st->get (*tp);
5595
5596 /* If we didn't already remap this SAVE_EXPR, do so now. */
5597 if (!n)
5598 {
5599 t = copy_node (*tp);
5600
5601 /* Remember this SAVE_EXPR. */
5602 st->put (*tp, t);
5603 /* Make sure we don't remap an already-remapped SAVE_EXPR. */
5604 st->put (t, t);
5605 }
5606 else
5607 {
5608 /* We've already walked into this SAVE_EXPR; don't do it again. */
5609 *walk_subtrees = 0;
5610 t = *n;
5611 }
5612
5613 /* Replace this SAVE_EXPR with the copy. */
5614 *tp = t;
5615 }
5616
5617 /* Called via walk_gimple_seq. If *GSIP points to a GIMPLE_LABEL for a local
5618 label, copies the declaration and enters it in the splay_tree in DATA (which
5619 is really a 'copy_body_data *'. */
5620
5621 static tree
mark_local_labels_stmt(gimple_stmt_iterator * gsip,bool * handled_ops_p ATTRIBUTE_UNUSED,struct walk_stmt_info * wi)5622 mark_local_labels_stmt (gimple_stmt_iterator *gsip,
5623 bool *handled_ops_p ATTRIBUTE_UNUSED,
5624 struct walk_stmt_info *wi)
5625 {
5626 copy_body_data *id = (copy_body_data *) wi->info;
5627 glabel *stmt = dyn_cast <glabel *> (gsi_stmt (*gsip));
5628
5629 if (stmt)
5630 {
5631 tree decl = gimple_label_label (stmt);
5632
5633 /* Copy the decl and remember the copy. */
5634 insert_decl_map (id, decl, id->copy_decl (decl, id));
5635 }
5636
5637 return NULL_TREE;
5638 }
5639
5640 static gimple_seq duplicate_remap_omp_clause_seq (gimple_seq seq,
5641 struct walk_stmt_info *wi);
5642
5643 /* Called via walk_gimple_seq by copy_gimple_seq_and_replace_local.
5644 Using the splay_tree pointed to by ST (which is really a `splay_tree'),
5645 remaps all local declarations to appropriate replacements in gimple
5646 operands. */
5647
5648 static tree
replace_locals_op(tree * tp,int * walk_subtrees,void * data)5649 replace_locals_op (tree *tp, int *walk_subtrees, void *data)
5650 {
5651 struct walk_stmt_info *wi = (struct walk_stmt_info*) data;
5652 copy_body_data *id = (copy_body_data *) wi->info;
5653 hash_map<tree, tree> *st = id->decl_map;
5654 tree *n;
5655 tree expr = *tp;
5656
5657 /* For recursive invocations this is no longer the LHS itself. */
5658 bool is_lhs = wi->is_lhs;
5659 wi->is_lhs = false;
5660
5661 if (TREE_CODE (expr) == SSA_NAME)
5662 {
5663 *tp = remap_ssa_name (*tp, id);
5664 *walk_subtrees = 0;
5665 if (is_lhs)
5666 SSA_NAME_DEF_STMT (*tp) = gsi_stmt (wi->gsi);
5667 }
5668 /* Only a local declaration (variable or label). */
5669 else if ((VAR_P (expr) && !TREE_STATIC (expr))
5670 || TREE_CODE (expr) == LABEL_DECL)
5671 {
5672 /* Lookup the declaration. */
5673 n = st->get (expr);
5674
5675 /* If it's there, remap it. */
5676 if (n)
5677 *tp = *n;
5678 *walk_subtrees = 0;
5679 }
5680 else if (TREE_CODE (expr) == STATEMENT_LIST
5681 || TREE_CODE (expr) == BIND_EXPR
5682 || TREE_CODE (expr) == SAVE_EXPR)
5683 gcc_unreachable ();
5684 else if (TREE_CODE (expr) == TARGET_EXPR)
5685 {
5686 /* Don't mess with a TARGET_EXPR that hasn't been expanded.
5687 It's OK for this to happen if it was part of a subtree that
5688 isn't immediately expanded, such as operand 2 of another
5689 TARGET_EXPR. */
5690 if (!TREE_OPERAND (expr, 1))
5691 {
5692 TREE_OPERAND (expr, 1) = TREE_OPERAND (expr, 3);
5693 TREE_OPERAND (expr, 3) = NULL_TREE;
5694 }
5695 }
5696 else if (TREE_CODE (expr) == OMP_CLAUSE)
5697 {
5698 /* Before the omplower pass completes, some OMP clauses can contain
5699 sequences that are neither copied by gimple_seq_copy nor walked by
5700 walk_gimple_seq. To make copy_gimple_seq_and_replace_locals work even
5701 in those situations, we have to copy and process them explicitely. */
5702
5703 if (OMP_CLAUSE_CODE (expr) == OMP_CLAUSE_LASTPRIVATE)
5704 {
5705 gimple_seq seq = OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (expr);
5706 seq = duplicate_remap_omp_clause_seq (seq, wi);
5707 OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (expr) = seq;
5708 }
5709 else if (OMP_CLAUSE_CODE (expr) == OMP_CLAUSE_LINEAR)
5710 {
5711 gimple_seq seq = OMP_CLAUSE_LINEAR_GIMPLE_SEQ (expr);
5712 seq = duplicate_remap_omp_clause_seq (seq, wi);
5713 OMP_CLAUSE_LINEAR_GIMPLE_SEQ (expr) = seq;
5714 }
5715 else if (OMP_CLAUSE_CODE (expr) == OMP_CLAUSE_REDUCTION)
5716 {
5717 gimple_seq seq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (expr);
5718 seq = duplicate_remap_omp_clause_seq (seq, wi);
5719 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (expr) = seq;
5720 seq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (expr);
5721 seq = duplicate_remap_omp_clause_seq (seq, wi);
5722 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (expr) = seq;
5723 }
5724 }
5725
5726 /* Keep iterating. */
5727 return NULL_TREE;
5728 }
5729
5730
5731 /* Called via walk_gimple_seq by copy_gimple_seq_and_replace_local.
5732 Using the splay_tree pointed to by ST (which is really a `splay_tree'),
5733 remaps all local declarations to appropriate replacements in gimple
5734 statements. */
5735
5736 static tree
replace_locals_stmt(gimple_stmt_iterator * gsip,bool * handled_ops_p ATTRIBUTE_UNUSED,struct walk_stmt_info * wi)5737 replace_locals_stmt (gimple_stmt_iterator *gsip,
5738 bool *handled_ops_p ATTRIBUTE_UNUSED,
5739 struct walk_stmt_info *wi)
5740 {
5741 copy_body_data *id = (copy_body_data *) wi->info;
5742 gimple *gs = gsi_stmt (*gsip);
5743
5744 if (gbind *stmt = dyn_cast <gbind *> (gs))
5745 {
5746 tree block = gimple_bind_block (stmt);
5747
5748 if (block)
5749 {
5750 remap_block (&block, id);
5751 gimple_bind_set_block (stmt, block);
5752 }
5753
5754 /* This will remap a lot of the same decls again, but this should be
5755 harmless. */
5756 if (gimple_bind_vars (stmt))
5757 {
5758 tree old_var, decls = gimple_bind_vars (stmt);
5759
5760 for (old_var = decls; old_var; old_var = DECL_CHAIN (old_var))
5761 if (!can_be_nonlocal (old_var, id)
5762 && ! variably_modified_type_p (TREE_TYPE (old_var), id->src_fn))
5763 remap_decl (old_var, id);
5764
5765 gcc_checking_assert (!id->prevent_decl_creation_for_types);
5766 id->prevent_decl_creation_for_types = true;
5767 gimple_bind_set_vars (stmt, remap_decls (decls, NULL, id));
5768 id->prevent_decl_creation_for_types = false;
5769 }
5770 }
5771
5772 /* Keep iterating. */
5773 return NULL_TREE;
5774 }
5775
5776 /* Create a copy of SEQ and remap all decls in it. */
5777
5778 static gimple_seq
duplicate_remap_omp_clause_seq(gimple_seq seq,struct walk_stmt_info * wi)5779 duplicate_remap_omp_clause_seq (gimple_seq seq, struct walk_stmt_info *wi)
5780 {
5781 if (!seq)
5782 return NULL;
5783
5784 /* If there are any labels in OMP sequences, they can be only referred to in
5785 the sequence itself and therefore we can do both here. */
5786 walk_gimple_seq (seq, mark_local_labels_stmt, NULL, wi);
5787 gimple_seq copy = gimple_seq_copy (seq);
5788 walk_gimple_seq (copy, replace_locals_stmt, replace_locals_op, wi);
5789 return copy;
5790 }
5791
5792 /* Copies everything in SEQ and replaces variables and labels local to
5793 current_function_decl. */
5794
5795 gimple_seq
copy_gimple_seq_and_replace_locals(gimple_seq seq)5796 copy_gimple_seq_and_replace_locals (gimple_seq seq)
5797 {
5798 copy_body_data id;
5799 struct walk_stmt_info wi;
5800 gimple_seq copy;
5801
5802 /* There's nothing to do for NULL_TREE. */
5803 if (seq == NULL)
5804 return seq;
5805
5806 /* Set up ID. */
5807 memset (&id, 0, sizeof (id));
5808 id.src_fn = current_function_decl;
5809 id.dst_fn = current_function_decl;
5810 id.src_cfun = cfun;
5811 id.decl_map = new hash_map<tree, tree>;
5812 id.debug_map = NULL;
5813
5814 id.copy_decl = copy_decl_no_change;
5815 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
5816 id.transform_new_cfg = false;
5817 id.transform_return_to_modify = false;
5818 id.transform_parameter = false;
5819 id.transform_lang_insert_block = NULL;
5820
5821 /* Walk the tree once to find local labels. */
5822 memset (&wi, 0, sizeof (wi));
5823 hash_set<tree> visited;
5824 wi.info = &id;
5825 wi.pset = &visited;
5826 walk_gimple_seq (seq, mark_local_labels_stmt, NULL, &wi);
5827
5828 copy = gimple_seq_copy (seq);
5829
5830 /* Walk the copy, remapping decls. */
5831 memset (&wi, 0, sizeof (wi));
5832 wi.info = &id;
5833 walk_gimple_seq (copy, replace_locals_stmt, replace_locals_op, &wi);
5834
5835 /* Clean up. */
5836 delete id.decl_map;
5837 if (id.debug_map)
5838 delete id.debug_map;
5839 if (id.dependence_map)
5840 {
5841 delete id.dependence_map;
5842 id.dependence_map = NULL;
5843 }
5844
5845 return copy;
5846 }
5847
5848
5849 /* Allow someone to determine if SEARCH is a child of TOP from gdb. */
5850
5851 static tree
debug_find_tree_1(tree * tp,int * walk_subtrees ATTRIBUTE_UNUSED,void * data)5852 debug_find_tree_1 (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED, void *data)
5853 {
5854 if (*tp == data)
5855 return (tree) data;
5856 else
5857 return NULL;
5858 }
5859
5860 DEBUG_FUNCTION bool
debug_find_tree(tree top,tree search)5861 debug_find_tree (tree top, tree search)
5862 {
5863 return walk_tree_without_duplicates (&top, debug_find_tree_1, search) != 0;
5864 }
5865
5866
5867 /* Declare the variables created by the inliner. Add all the variables in
5868 VARS to BIND_EXPR. */
5869
5870 static void
declare_inline_vars(tree block,tree vars)5871 declare_inline_vars (tree block, tree vars)
5872 {
5873 tree t;
5874 for (t = vars; t; t = DECL_CHAIN (t))
5875 {
5876 DECL_SEEN_IN_BIND_EXPR_P (t) = 1;
5877 gcc_assert (!TREE_STATIC (t) && !TREE_ASM_WRITTEN (t));
5878 add_local_decl (cfun, t);
5879 }
5880
5881 if (block)
5882 BLOCK_VARS (block) = chainon (BLOCK_VARS (block), vars);
5883 }
5884
5885 /* Copy NODE (which must be a DECL). The DECL originally was in the FROM_FN,
5886 but now it will be in the TO_FN. PARM_TO_VAR means enable PARM_DECL to
5887 VAR_DECL translation. */
5888
5889 tree
copy_decl_for_dup_finish(copy_body_data * id,tree decl,tree copy)5890 copy_decl_for_dup_finish (copy_body_data *id, tree decl, tree copy)
5891 {
5892 /* Don't generate debug information for the copy if we wouldn't have
5893 generated it for the copy either. */
5894 DECL_ARTIFICIAL (copy) = DECL_ARTIFICIAL (decl);
5895 DECL_IGNORED_P (copy) = DECL_IGNORED_P (decl);
5896
5897 /* Set the DECL_ABSTRACT_ORIGIN so the debugging routines know what
5898 declaration inspired this copy. */
5899 DECL_ABSTRACT_ORIGIN (copy) = DECL_ORIGIN (decl);
5900
5901 /* The new variable/label has no RTL, yet. */
5902 if (CODE_CONTAINS_STRUCT (TREE_CODE (copy), TS_DECL_WRTL)
5903 && !TREE_STATIC (copy) && !DECL_EXTERNAL (copy))
5904 SET_DECL_RTL (copy, 0);
5905 /* For vector typed decls make sure to update DECL_MODE according
5906 to the new function context. */
5907 if (VECTOR_TYPE_P (TREE_TYPE (copy)))
5908 SET_DECL_MODE (copy, TYPE_MODE (TREE_TYPE (copy)));
5909
5910 /* These args would always appear unused, if not for this. */
5911 TREE_USED (copy) = 1;
5912
5913 /* Set the context for the new declaration. */
5914 if (!DECL_CONTEXT (decl))
5915 /* Globals stay global. */
5916 ;
5917 else if (DECL_CONTEXT (decl) != id->src_fn)
5918 /* Things that weren't in the scope of the function we're inlining
5919 from aren't in the scope we're inlining to, either. */
5920 ;
5921 else if (TREE_STATIC (decl))
5922 /* Function-scoped static variables should stay in the original
5923 function. */
5924 ;
5925 else
5926 {
5927 /* Ordinary automatic local variables are now in the scope of the
5928 new function. */
5929 DECL_CONTEXT (copy) = id->dst_fn;
5930 if (VAR_P (copy) && id->dst_simt_vars && !is_gimple_reg (copy))
5931 {
5932 if (!lookup_attribute ("omp simt private", DECL_ATTRIBUTES (copy)))
5933 DECL_ATTRIBUTES (copy)
5934 = tree_cons (get_identifier ("omp simt private"), NULL,
5935 DECL_ATTRIBUTES (copy));
5936 id->dst_simt_vars->safe_push (copy);
5937 }
5938 }
5939
5940 return copy;
5941 }
5942
5943 /* Create a new VAR_DECL that is indentical in all respect to DECL except that
5944 DECL can be either a VAR_DECL, a PARM_DECL or RESULT_DECL. The original
5945 DECL must come from ID->src_fn and the copy will be part of ID->dst_fn. */
5946
5947 tree
copy_decl_to_var(tree decl,copy_body_data * id)5948 copy_decl_to_var (tree decl, copy_body_data *id)
5949 {
5950 tree copy, type;
5951
5952 gcc_assert (TREE_CODE (decl) == PARM_DECL
5953 || TREE_CODE (decl) == RESULT_DECL);
5954
5955 type = TREE_TYPE (decl);
5956
5957 copy = build_decl (DECL_SOURCE_LOCATION (id->dst_fn),
5958 VAR_DECL, DECL_NAME (decl), type);
5959 if (DECL_PT_UID_SET_P (decl))
5960 SET_DECL_PT_UID (copy, DECL_PT_UID (decl));
5961 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
5962 TREE_READONLY (copy) = TREE_READONLY (decl);
5963 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
5964 DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (decl);
5965 DECL_BY_REFERENCE (copy) = DECL_BY_REFERENCE (decl);
5966
5967 return copy_decl_for_dup_finish (id, decl, copy);
5968 }
5969
5970 /* Like copy_decl_to_var, but create a return slot object instead of a
5971 pointer variable for return by invisible reference. */
5972
5973 static tree
copy_result_decl_to_var(tree decl,copy_body_data * id)5974 copy_result_decl_to_var (tree decl, copy_body_data *id)
5975 {
5976 tree copy, type;
5977
5978 gcc_assert (TREE_CODE (decl) == PARM_DECL
5979 || TREE_CODE (decl) == RESULT_DECL);
5980
5981 type = TREE_TYPE (decl);
5982 if (DECL_BY_REFERENCE (decl))
5983 type = TREE_TYPE (type);
5984
5985 copy = build_decl (DECL_SOURCE_LOCATION (id->dst_fn),
5986 VAR_DECL, DECL_NAME (decl), type);
5987 if (DECL_PT_UID_SET_P (decl))
5988 SET_DECL_PT_UID (copy, DECL_PT_UID (decl));
5989 TREE_READONLY (copy) = TREE_READONLY (decl);
5990 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
5991 if (!DECL_BY_REFERENCE (decl))
5992 {
5993 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
5994 DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (decl);
5995 }
5996
5997 return copy_decl_for_dup_finish (id, decl, copy);
5998 }
5999
6000 tree
copy_decl_no_change(tree decl,copy_body_data * id)6001 copy_decl_no_change (tree decl, copy_body_data *id)
6002 {
6003 tree copy;
6004
6005 copy = copy_node (decl);
6006
6007 /* The COPY is not abstract; it will be generated in DST_FN. */
6008 DECL_ABSTRACT_P (copy) = false;
6009 lang_hooks.dup_lang_specific_decl (copy);
6010
6011 /* TREE_ADDRESSABLE isn't used to indicate that a label's address has
6012 been taken; it's for internal bookkeeping in expand_goto_internal. */
6013 if (TREE_CODE (copy) == LABEL_DECL)
6014 {
6015 TREE_ADDRESSABLE (copy) = 0;
6016 LABEL_DECL_UID (copy) = -1;
6017 }
6018
6019 return copy_decl_for_dup_finish (id, decl, copy);
6020 }
6021
6022 static tree
copy_decl_maybe_to_var(tree decl,copy_body_data * id)6023 copy_decl_maybe_to_var (tree decl, copy_body_data *id)
6024 {
6025 if (TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == RESULT_DECL)
6026 return copy_decl_to_var (decl, id);
6027 else
6028 return copy_decl_no_change (decl, id);
6029 }
6030
6031 /* Return a copy of the function's argument tree without any modifications. */
6032
6033 static tree
copy_arguments_nochange(tree orig_parm,copy_body_data * id)6034 copy_arguments_nochange (tree orig_parm, copy_body_data * id)
6035 {
6036 tree arg, *parg;
6037 tree new_parm = NULL;
6038
6039 parg = &new_parm;
6040 for (arg = orig_parm; arg; arg = DECL_CHAIN (arg))
6041 {
6042 tree new_tree = remap_decl (arg, id);
6043 if (TREE_CODE (new_tree) != PARM_DECL)
6044 new_tree = id->copy_decl (arg, id);
6045 lang_hooks.dup_lang_specific_decl (new_tree);
6046 *parg = new_tree;
6047 parg = &DECL_CHAIN (new_tree);
6048 }
6049 return new_parm;
6050 }
6051
6052 /* Return a copy of the function's static chain. */
6053 static tree
copy_static_chain(tree static_chain,copy_body_data * id)6054 copy_static_chain (tree static_chain, copy_body_data * id)
6055 {
6056 tree *chain_copy, *pvar;
6057
6058 chain_copy = &static_chain;
6059 for (pvar = chain_copy; *pvar; pvar = &DECL_CHAIN (*pvar))
6060 {
6061 tree new_tree = remap_decl (*pvar, id);
6062 lang_hooks.dup_lang_specific_decl (new_tree);
6063 DECL_CHAIN (new_tree) = DECL_CHAIN (*pvar);
6064 *pvar = new_tree;
6065 }
6066 return static_chain;
6067 }
6068
6069 /* Return true if the function is allowed to be versioned.
6070 This is a guard for the versioning functionality. */
6071
6072 bool
tree_versionable_function_p(tree fndecl)6073 tree_versionable_function_p (tree fndecl)
6074 {
6075 return (!lookup_attribute ("noclone", DECL_ATTRIBUTES (fndecl))
6076 && copy_forbidden (DECL_STRUCT_FUNCTION (fndecl)) == NULL);
6077 }
6078
6079 /* Update clone info after duplication. */
6080
6081 static void
update_clone_info(copy_body_data * id)6082 update_clone_info (copy_body_data * id)
6083 {
6084 vec<ipa_param_performed_split, va_gc> *cur_performed_splits
6085 = id->dst_node->clone.performed_splits;
6086 if (cur_performed_splits)
6087 {
6088 unsigned len = cur_performed_splits->length ();
6089 for (unsigned i = 0; i < len; i++)
6090 {
6091 ipa_param_performed_split *ps = &(*cur_performed_splits)[i];
6092 ps->dummy_decl = remap_decl (ps->dummy_decl, id);
6093 }
6094 }
6095
6096 struct cgraph_node *node;
6097 if (!id->dst_node->clones)
6098 return;
6099 for (node = id->dst_node->clones; node != id->dst_node;)
6100 {
6101 /* First update replace maps to match the new body. */
6102 if (node->clone.tree_map)
6103 {
6104 unsigned int i;
6105 for (i = 0; i < vec_safe_length (node->clone.tree_map); i++)
6106 {
6107 struct ipa_replace_map *replace_info;
6108 replace_info = (*node->clone.tree_map)[i];
6109 walk_tree (&replace_info->new_tree, copy_tree_body_r, id, NULL);
6110 }
6111 }
6112 if (node->clone.performed_splits)
6113 {
6114 unsigned len = vec_safe_length (node->clone.performed_splits);
6115 for (unsigned i = 0; i < len; i++)
6116 {
6117 ipa_param_performed_split *ps
6118 = &(*node->clone.performed_splits)[i];
6119 ps->dummy_decl = remap_decl (ps->dummy_decl, id);
6120 }
6121 }
6122 if (unsigned len = vec_safe_length (cur_performed_splits))
6123 {
6124 /* We do not want to add current performed splits when we are saving
6125 a copy of function body for later during inlining, that would just
6126 duplicate all entries. So let's have a look whether anything
6127 referring to the first dummy_decl is present. */
6128 unsigned dst_len = vec_safe_length (node->clone.performed_splits);
6129 ipa_param_performed_split *first = &(*cur_performed_splits)[0];
6130 for (unsigned i = 0; i < dst_len; i++)
6131 if ((*node->clone.performed_splits)[i].dummy_decl
6132 == first->dummy_decl)
6133 {
6134 len = 0;
6135 break;
6136 }
6137
6138 for (unsigned i = 0; i < len; i++)
6139 vec_safe_push (node->clone.performed_splits,
6140 (*cur_performed_splits)[i]);
6141 if (flag_checking)
6142 {
6143 for (unsigned i = 0; i < dst_len; i++)
6144 {
6145 ipa_param_performed_split *ps1
6146 = &(*node->clone.performed_splits)[i];
6147 for (unsigned j = i + 1; j < dst_len; j++)
6148 {
6149 ipa_param_performed_split *ps2
6150 = &(*node->clone.performed_splits)[j];
6151 gcc_assert (ps1->dummy_decl != ps2->dummy_decl
6152 || ps1->unit_offset != ps2->unit_offset);
6153 }
6154 }
6155 }
6156 }
6157
6158 if (node->clones)
6159 node = node->clones;
6160 else if (node->next_sibling_clone)
6161 node = node->next_sibling_clone;
6162 else
6163 {
6164 while (node != id->dst_node && !node->next_sibling_clone)
6165 node = node->clone_of;
6166 if (node != id->dst_node)
6167 node = node->next_sibling_clone;
6168 }
6169 }
6170 }
6171
6172 /* Create a copy of a function's tree.
6173 OLD_DECL and NEW_DECL are FUNCTION_DECL tree nodes
6174 of the original function and the new copied function
6175 respectively. In case we want to replace a DECL
6176 tree with another tree while duplicating the function's
6177 body, TREE_MAP represents the mapping between these
6178 trees. If UPDATE_CLONES is set, the call_stmt fields
6179 of edges of clones of the function will be updated.
6180
6181 If non-NULL PARAM_ADJUSTMENTS determines how function prototype (i.e. the
6182 function parameters and return value) should be modified).
6183 If non-NULL BLOCKS_TO_COPY determine what basic blocks to copy.
6184 If non_NULL NEW_ENTRY determine new entry BB of the clone.
6185 */
6186 void
tree_function_versioning(tree old_decl,tree new_decl,vec<ipa_replace_map *,va_gc> * tree_map,ipa_param_adjustments * param_adjustments,bool update_clones,bitmap blocks_to_copy,basic_block new_entry)6187 tree_function_versioning (tree old_decl, tree new_decl,
6188 vec<ipa_replace_map *, va_gc> *tree_map,
6189 ipa_param_adjustments *param_adjustments,
6190 bool update_clones, bitmap blocks_to_copy,
6191 basic_block new_entry)
6192 {
6193 struct cgraph_node *old_version_node;
6194 struct cgraph_node *new_version_node;
6195 copy_body_data id;
6196 tree p;
6197 unsigned i;
6198 struct ipa_replace_map *replace_info;
6199 basic_block old_entry_block, bb;
6200 auto_vec<gimple *, 10> init_stmts;
6201 tree vars = NULL_TREE;
6202
6203 gcc_assert (TREE_CODE (old_decl) == FUNCTION_DECL
6204 && TREE_CODE (new_decl) == FUNCTION_DECL);
6205 DECL_POSSIBLY_INLINED (old_decl) = 1;
6206
6207 old_version_node = cgraph_node::get (old_decl);
6208 gcc_checking_assert (old_version_node);
6209 new_version_node = cgraph_node::get (new_decl);
6210 gcc_checking_assert (new_version_node);
6211
6212 /* Copy over debug args. */
6213 if (DECL_HAS_DEBUG_ARGS_P (old_decl))
6214 {
6215 vec<tree, va_gc> **new_debug_args, **old_debug_args;
6216 gcc_checking_assert (decl_debug_args_lookup (new_decl) == NULL);
6217 DECL_HAS_DEBUG_ARGS_P (new_decl) = 0;
6218 old_debug_args = decl_debug_args_lookup (old_decl);
6219 if (old_debug_args)
6220 {
6221 new_debug_args = decl_debug_args_insert (new_decl);
6222 *new_debug_args = vec_safe_copy (*old_debug_args);
6223 }
6224 }
6225
6226 /* Output the inlining info for this abstract function, since it has been
6227 inlined. If we don't do this now, we can lose the information about the
6228 variables in the function when the blocks get blown away as soon as we
6229 remove the cgraph node. */
6230 (*debug_hooks->outlining_inline_function) (old_decl);
6231
6232 DECL_ARTIFICIAL (new_decl) = 1;
6233 DECL_ABSTRACT_ORIGIN (new_decl) = DECL_ORIGIN (old_decl);
6234 if (DECL_ORIGIN (old_decl) == old_decl)
6235 old_version_node->used_as_abstract_origin = true;
6236 DECL_FUNCTION_PERSONALITY (new_decl) = DECL_FUNCTION_PERSONALITY (old_decl);
6237
6238 /* Prepare the data structures for the tree copy. */
6239 memset (&id, 0, sizeof (id));
6240
6241 /* Generate a new name for the new version. */
6242 id.statements_to_fold = new hash_set<gimple *>;
6243
6244 id.decl_map = new hash_map<tree, tree>;
6245 id.debug_map = NULL;
6246 id.src_fn = old_decl;
6247 id.dst_fn = new_decl;
6248 id.src_node = old_version_node;
6249 id.dst_node = new_version_node;
6250 id.src_cfun = DECL_STRUCT_FUNCTION (old_decl);
6251 id.blocks_to_copy = blocks_to_copy;
6252
6253 id.copy_decl = copy_decl_no_change;
6254 id.transform_call_graph_edges
6255 = update_clones ? CB_CGE_MOVE_CLONES : CB_CGE_MOVE;
6256 id.transform_new_cfg = true;
6257 id.transform_return_to_modify = false;
6258 id.transform_parameter = false;
6259 id.transform_lang_insert_block = NULL;
6260
6261 old_entry_block = ENTRY_BLOCK_PTR_FOR_FN
6262 (DECL_STRUCT_FUNCTION (old_decl));
6263 DECL_RESULT (new_decl) = DECL_RESULT (old_decl);
6264 DECL_ARGUMENTS (new_decl) = DECL_ARGUMENTS (old_decl);
6265 initialize_cfun (new_decl, old_decl,
6266 new_entry ? new_entry->count : old_entry_block->count);
6267 if (DECL_STRUCT_FUNCTION (new_decl)->gimple_df)
6268 DECL_STRUCT_FUNCTION (new_decl)->gimple_df->ipa_pta
6269 = id.src_cfun->gimple_df->ipa_pta;
6270
6271 /* Copy the function's static chain. */
6272 p = DECL_STRUCT_FUNCTION (old_decl)->static_chain_decl;
6273 if (p)
6274 DECL_STRUCT_FUNCTION (new_decl)->static_chain_decl
6275 = copy_static_chain (p, &id);
6276
6277 auto_vec<int, 16> new_param_indices;
6278 ipa_param_adjustments *old_param_adjustments
6279 = old_version_node->clone.param_adjustments;
6280 if (old_param_adjustments)
6281 old_param_adjustments->get_updated_indices (&new_param_indices);
6282
6283 /* If there's a tree_map, prepare for substitution. */
6284 if (tree_map)
6285 for (i = 0; i < tree_map->length (); i++)
6286 {
6287 gimple *init;
6288 replace_info = (*tree_map)[i];
6289
6290 int p = replace_info->parm_num;
6291 if (old_param_adjustments)
6292 p = new_param_indices[p];
6293
6294 tree parm;
6295 tree req_type, new_type;
6296
6297 for (parm = DECL_ARGUMENTS (old_decl); p;
6298 parm = DECL_CHAIN (parm))
6299 p--;
6300 tree old_tree = parm;
6301 req_type = TREE_TYPE (parm);
6302 new_type = TREE_TYPE (replace_info->new_tree);
6303 if (!useless_type_conversion_p (req_type, new_type))
6304 {
6305 if (fold_convertible_p (req_type, replace_info->new_tree))
6306 replace_info->new_tree
6307 = fold_build1 (NOP_EXPR, req_type, replace_info->new_tree);
6308 else if (TYPE_SIZE (req_type) == TYPE_SIZE (new_type))
6309 replace_info->new_tree
6310 = fold_build1 (VIEW_CONVERT_EXPR, req_type,
6311 replace_info->new_tree);
6312 else
6313 {
6314 if (dump_file)
6315 {
6316 fprintf (dump_file, " const ");
6317 print_generic_expr (dump_file,
6318 replace_info->new_tree);
6319 fprintf (dump_file,
6320 " can't be converted to param ");
6321 print_generic_expr (dump_file, parm);
6322 fprintf (dump_file, "\n");
6323 }
6324 old_tree = NULL;
6325 }
6326 }
6327
6328 if (old_tree)
6329 {
6330 init = setup_one_parameter (&id, old_tree, replace_info->new_tree,
6331 id.src_fn, NULL, &vars);
6332 if (init)
6333 init_stmts.safe_push (init);
6334 }
6335 }
6336
6337 ipa_param_body_adjustments *param_body_adjs = NULL;
6338 if (param_adjustments)
6339 {
6340 param_body_adjs = new ipa_param_body_adjustments (param_adjustments,
6341 new_decl, old_decl,
6342 &id, &vars, tree_map);
6343 id.param_body_adjs = param_body_adjs;
6344 DECL_ARGUMENTS (new_decl) = param_body_adjs->get_new_param_chain ();
6345 }
6346 else if (DECL_ARGUMENTS (old_decl) != NULL_TREE)
6347 DECL_ARGUMENTS (new_decl)
6348 = copy_arguments_nochange (DECL_ARGUMENTS (old_decl), &id);
6349
6350 DECL_INITIAL (new_decl) = remap_blocks (DECL_INITIAL (id.src_fn), &id);
6351 BLOCK_SUPERCONTEXT (DECL_INITIAL (new_decl)) = new_decl;
6352
6353 declare_inline_vars (DECL_INITIAL (new_decl), vars);
6354
6355 if (!vec_safe_is_empty (DECL_STRUCT_FUNCTION (old_decl)->local_decls))
6356 /* Add local vars. */
6357 add_local_variables (DECL_STRUCT_FUNCTION (old_decl), cfun, &id);
6358
6359 if (DECL_RESULT (old_decl) == NULL_TREE)
6360 ;
6361 else if (param_adjustments && param_adjustments->m_skip_return
6362 && !VOID_TYPE_P (TREE_TYPE (DECL_RESULT (old_decl))))
6363 {
6364 tree resdecl_repl = copy_result_decl_to_var (DECL_RESULT (old_decl),
6365 &id);
6366 declare_inline_vars (NULL, resdecl_repl);
6367 insert_decl_map (&id, DECL_RESULT (old_decl), resdecl_repl);
6368
6369 DECL_RESULT (new_decl)
6370 = build_decl (DECL_SOURCE_LOCATION (DECL_RESULT (old_decl)),
6371 RESULT_DECL, NULL_TREE, void_type_node);
6372 DECL_CONTEXT (DECL_RESULT (new_decl)) = new_decl;
6373 DECL_IS_MALLOC (new_decl) = false;
6374 cfun->returns_struct = 0;
6375 cfun->returns_pcc_struct = 0;
6376 }
6377 else
6378 {
6379 tree old_name;
6380 DECL_RESULT (new_decl) = remap_decl (DECL_RESULT (old_decl), &id);
6381 lang_hooks.dup_lang_specific_decl (DECL_RESULT (new_decl));
6382 if (gimple_in_ssa_p (id.src_cfun)
6383 && DECL_BY_REFERENCE (DECL_RESULT (old_decl))
6384 && (old_name = ssa_default_def (id.src_cfun, DECL_RESULT (old_decl))))
6385 {
6386 tree new_name = make_ssa_name (DECL_RESULT (new_decl));
6387 insert_decl_map (&id, old_name, new_name);
6388 SSA_NAME_DEF_STMT (new_name) = gimple_build_nop ();
6389 set_ssa_default_def (cfun, DECL_RESULT (new_decl), new_name);
6390 }
6391 }
6392
6393 /* Set up the destination functions loop tree. */
6394 if (loops_for_fn (DECL_STRUCT_FUNCTION (old_decl)) != NULL)
6395 {
6396 cfun->curr_properties &= ~PROP_loops;
6397 loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
6398 cfun->curr_properties |= PROP_loops;
6399 }
6400
6401 /* Copy the Function's body. */
6402 copy_body (&id, ENTRY_BLOCK_PTR_FOR_FN (cfun), EXIT_BLOCK_PTR_FOR_FN (cfun),
6403 new_entry);
6404
6405 /* Renumber the lexical scoping (non-code) blocks consecutively. */
6406 number_blocks (new_decl);
6407
6408 /* We want to create the BB unconditionally, so that the addition of
6409 debug stmts doesn't affect BB count, which may in the end cause
6410 codegen differences. */
6411 bb = split_edge (single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
6412 while (init_stmts.length ())
6413 insert_init_stmt (&id, bb, init_stmts.pop ());
6414 update_clone_info (&id);
6415
6416 /* Remap the nonlocal_goto_save_area, if any. */
6417 if (cfun->nonlocal_goto_save_area)
6418 {
6419 struct walk_stmt_info wi;
6420
6421 memset (&wi, 0, sizeof (wi));
6422 wi.info = &id;
6423 walk_tree (&cfun->nonlocal_goto_save_area, remap_gimple_op_r, &wi, NULL);
6424 }
6425
6426 /* Clean up. */
6427 delete id.decl_map;
6428 if (id.debug_map)
6429 delete id.debug_map;
6430 free_dominance_info (CDI_DOMINATORS);
6431 free_dominance_info (CDI_POST_DOMINATORS);
6432
6433 update_max_bb_count ();
6434 fold_marked_statements (0, id.statements_to_fold);
6435 delete id.statements_to_fold;
6436 delete_unreachable_blocks_update_callgraph (id.dst_node, update_clones);
6437 if (id.dst_node->definition)
6438 cgraph_edge::rebuild_references ();
6439 if (loops_state_satisfies_p (LOOPS_NEED_FIXUP))
6440 {
6441 calculate_dominance_info (CDI_DOMINATORS);
6442 fix_loop_structure (NULL);
6443 }
6444 update_ssa (TODO_update_ssa);
6445
6446 /* After partial cloning we need to rescale frequencies, so they are
6447 within proper range in the cloned function. */
6448 if (new_entry)
6449 {
6450 struct cgraph_edge *e;
6451 rebuild_frequencies ();
6452
6453 new_version_node->count = ENTRY_BLOCK_PTR_FOR_FN (cfun)->count;
6454 for (e = new_version_node->callees; e; e = e->next_callee)
6455 {
6456 basic_block bb = gimple_bb (e->call_stmt);
6457 e->count = bb->count;
6458 }
6459 for (e = new_version_node->indirect_calls; e; e = e->next_callee)
6460 {
6461 basic_block bb = gimple_bb (e->call_stmt);
6462 e->count = bb->count;
6463 }
6464 }
6465
6466 if (param_body_adjs && MAY_HAVE_DEBUG_BIND_STMTS)
6467 {
6468 vec<tree, va_gc> **debug_args = NULL;
6469 unsigned int len = 0;
6470 unsigned reset_len = param_body_adjs->m_reset_debug_decls.length ();
6471
6472 for (i = 0; i < reset_len; i++)
6473 {
6474 tree parm = param_body_adjs->m_reset_debug_decls[i];
6475 gcc_assert (is_gimple_reg (parm));
6476 tree ddecl;
6477
6478 if (debug_args == NULL)
6479 {
6480 debug_args = decl_debug_args_insert (new_decl);
6481 len = vec_safe_length (*debug_args);
6482 }
6483 ddecl = make_node (DEBUG_EXPR_DECL);
6484 DECL_ARTIFICIAL (ddecl) = 1;
6485 TREE_TYPE (ddecl) = TREE_TYPE (parm);
6486 SET_DECL_MODE (ddecl, DECL_MODE (parm));
6487 vec_safe_push (*debug_args, DECL_ORIGIN (parm));
6488 vec_safe_push (*debug_args, ddecl);
6489 }
6490 if (debug_args != NULL)
6491 {
6492 /* On the callee side, add
6493 DEBUG D#Y s=> parm
6494 DEBUG var => D#Y
6495 stmts to the first bb where var is a VAR_DECL created for the
6496 optimized away parameter in DECL_INITIAL block. This hints
6497 in the debug info that var (whole DECL_ORIGIN is the parm
6498 PARM_DECL) is optimized away, but could be looked up at the
6499 call site as value of D#X there. */
6500 tree vexpr;
6501 gimple_stmt_iterator cgsi
6502 = gsi_after_labels (single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
6503 gimple *def_temp;
6504 tree var = vars;
6505 i = vec_safe_length (*debug_args);
6506 do
6507 {
6508 i -= 2;
6509 while (var != NULL_TREE
6510 && DECL_ABSTRACT_ORIGIN (var) != (**debug_args)[i])
6511 var = TREE_CHAIN (var);
6512 if (var == NULL_TREE)
6513 break;
6514 vexpr = make_node (DEBUG_EXPR_DECL);
6515 tree parm = (**debug_args)[i];
6516 DECL_ARTIFICIAL (vexpr) = 1;
6517 TREE_TYPE (vexpr) = TREE_TYPE (parm);
6518 SET_DECL_MODE (vexpr, DECL_MODE (parm));
6519 def_temp = gimple_build_debug_bind (var, vexpr, NULL);
6520 gsi_insert_before (&cgsi, def_temp, GSI_NEW_STMT);
6521 def_temp = gimple_build_debug_source_bind (vexpr, parm, NULL);
6522 gsi_insert_before (&cgsi, def_temp, GSI_NEW_STMT);
6523 }
6524 while (i > len);
6525 }
6526 }
6527 delete param_body_adjs;
6528 free_dominance_info (CDI_DOMINATORS);
6529 free_dominance_info (CDI_POST_DOMINATORS);
6530
6531 gcc_assert (!id.debug_stmts.exists ());
6532 pop_cfun ();
6533 return;
6534 }
6535
6536 /* EXP is CALL_EXPR present in a GENERIC expression tree. Try to integrate
6537 the callee and return the inlined body on success. */
6538
6539 tree
maybe_inline_call_in_expr(tree exp)6540 maybe_inline_call_in_expr (tree exp)
6541 {
6542 tree fn = get_callee_fndecl (exp);
6543
6544 /* We can only try to inline "const" functions. */
6545 if (fn && TREE_READONLY (fn) && DECL_SAVED_TREE (fn))
6546 {
6547 call_expr_arg_iterator iter;
6548 copy_body_data id;
6549 tree param, arg, t;
6550 hash_map<tree, tree> decl_map;
6551
6552 /* Remap the parameters. */
6553 for (param = DECL_ARGUMENTS (fn), arg = first_call_expr_arg (exp, &iter);
6554 param;
6555 param = DECL_CHAIN (param), arg = next_call_expr_arg (&iter))
6556 decl_map.put (param, arg);
6557
6558 memset (&id, 0, sizeof (id));
6559 id.src_fn = fn;
6560 id.dst_fn = current_function_decl;
6561 id.src_cfun = DECL_STRUCT_FUNCTION (fn);
6562 id.decl_map = &decl_map;
6563
6564 id.copy_decl = copy_decl_no_change;
6565 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
6566 id.transform_new_cfg = false;
6567 id.transform_return_to_modify = true;
6568 id.transform_parameter = true;
6569 id.transform_lang_insert_block = NULL;
6570
6571 /* Make sure not to unshare trees behind the front-end's back
6572 since front-end specific mechanisms may rely on sharing. */
6573 id.regimplify = false;
6574 id.do_not_unshare = true;
6575
6576 /* We're not inside any EH region. */
6577 id.eh_lp_nr = 0;
6578
6579 t = copy_tree_body (&id);
6580
6581 /* We can only return something suitable for use in a GENERIC
6582 expression tree. */
6583 if (TREE_CODE (t) == MODIFY_EXPR)
6584 return TREE_OPERAND (t, 1);
6585 }
6586
6587 return NULL_TREE;
6588 }
6589
6590 /* Duplicate a type, fields and all. */
6591
6592 tree
build_duplicate_type(tree type)6593 build_duplicate_type (tree type)
6594 {
6595 struct copy_body_data id;
6596
6597 memset (&id, 0, sizeof (id));
6598 id.src_fn = current_function_decl;
6599 id.dst_fn = current_function_decl;
6600 id.src_cfun = cfun;
6601 id.decl_map = new hash_map<tree, tree>;
6602 id.debug_map = NULL;
6603 id.copy_decl = copy_decl_no_change;
6604
6605 type = remap_type_1 (type, &id);
6606
6607 delete id.decl_map;
6608 if (id.debug_map)
6609 delete id.debug_map;
6610
6611 TYPE_CANONICAL (type) = type;
6612
6613 return type;
6614 }
6615
6616 /* Unshare the entire DECL_SAVED_TREE of FN and return the remapped
6617 parameters and RESULT_DECL in PARMS and RESULT. Used by C++ constexpr
6618 evaluation. */
6619
6620 tree
copy_fn(tree fn,tree & parms,tree & result)6621 copy_fn (tree fn, tree& parms, tree& result)
6622 {
6623 copy_body_data id;
6624 tree param;
6625 hash_map<tree, tree> decl_map;
6626
6627 tree *p = &parms;
6628 *p = NULL_TREE;
6629
6630 memset (&id, 0, sizeof (id));
6631 id.src_fn = fn;
6632 id.dst_fn = current_function_decl;
6633 id.src_cfun = DECL_STRUCT_FUNCTION (fn);
6634 id.decl_map = &decl_map;
6635
6636 id.copy_decl = copy_decl_no_change;
6637 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
6638 id.transform_new_cfg = false;
6639 id.transform_return_to_modify = false;
6640 id.transform_parameter = true;
6641 id.transform_lang_insert_block = NULL;
6642
6643 /* Make sure not to unshare trees behind the front-end's back
6644 since front-end specific mechanisms may rely on sharing. */
6645 id.regimplify = false;
6646 id.do_not_unshare = true;
6647 id.do_not_fold = true;
6648
6649 /* We're not inside any EH region. */
6650 id.eh_lp_nr = 0;
6651
6652 /* Remap the parameters and result and return them to the caller. */
6653 for (param = DECL_ARGUMENTS (fn);
6654 param;
6655 param = DECL_CHAIN (param))
6656 {
6657 *p = remap_decl (param, &id);
6658 p = &DECL_CHAIN (*p);
6659 }
6660
6661 if (DECL_RESULT (fn))
6662 result = remap_decl (DECL_RESULT (fn), &id);
6663 else
6664 result = NULL_TREE;
6665
6666 return copy_tree_body (&id);
6667 }
6668