1 /* Interprocedural analyses.
2 Copyright (C) 2005-2021 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "backend.h"
24 #include "rtl.h"
25 #include "tree.h"
26 #include "gimple.h"
27 #include "alloc-pool.h"
28 #include "tree-pass.h"
29 #include "ssa.h"
30 #include "tree-streamer.h"
31 #include "cgraph.h"
32 #include "diagnostic.h"
33 #include "fold-const.h"
34 #include "gimple-fold.h"
35 #include "tree-eh.h"
36 #include "calls.h"
37 #include "stor-layout.h"
38 #include "print-tree.h"
39 #include "gimplify.h"
40 #include "gimple-iterator.h"
41 #include "gimplify-me.h"
42 #include "gimple-walk.h"
43 #include "symbol-summary.h"
44 #include "ipa-prop.h"
45 #include "tree-cfg.h"
46 #include "tree-dfa.h"
47 #include "tree-inline.h"
48 #include "ipa-fnsummary.h"
49 #include "gimple-pretty-print.h"
50 #include "ipa-utils.h"
51 #include "dbgcnt.h"
52 #include "domwalk.h"
53 #include "builtins.h"
54 #include "tree-cfgcleanup.h"
55 #include "options.h"
56 #include "symtab-clones.h"
57 #include "attr-fnspec.h"
58
59 /* Function summary where the parameter infos are actually stored. */
60 ipa_node_params_t *ipa_node_params_sum = NULL;
61
62 function_summary <ipcp_transformation *> *ipcp_transformation_sum = NULL;
63
64 /* Edge summary for IPA-CP edge information. */
65 ipa_edge_args_sum_t *ipa_edge_args_sum;
66
67 /* Traits for a hash table for reusing already existing ipa_bits. */
68
69 struct ipa_bit_ggc_hash_traits : public ggc_cache_remove <ipa_bits *>
70 {
71 typedef ipa_bits *value_type;
72 typedef ipa_bits *compare_type;
73 static hashval_t
hashipa_bit_ggc_hash_traits74 hash (const ipa_bits *p)
75 {
76 hashval_t t = (hashval_t) p->value.to_shwi ();
77 return iterative_hash_host_wide_int (p->mask.to_shwi (), t);
78 }
79 static bool
equalipa_bit_ggc_hash_traits80 equal (const ipa_bits *a, const ipa_bits *b)
81 {
82 return a->value == b->value && a->mask == b->mask;
83 }
84 static const bool empty_zero_p = true;
85 static void
mark_emptyipa_bit_ggc_hash_traits86 mark_empty (ipa_bits *&p)
87 {
88 p = NULL;
89 }
90 static bool
is_emptyipa_bit_ggc_hash_traits91 is_empty (const ipa_bits *p)
92 {
93 return p == NULL;
94 }
95 static bool
is_deletedipa_bit_ggc_hash_traits96 is_deleted (const ipa_bits *p)
97 {
98 return p == reinterpret_cast<const ipa_bits *> (1);
99 }
100 static void
mark_deletedipa_bit_ggc_hash_traits101 mark_deleted (ipa_bits *&p)
102 {
103 p = reinterpret_cast<ipa_bits *> (1);
104 }
105 };
106
107 /* Hash table for avoid repeated allocations of equal ipa_bits. */
108 static GTY ((cache)) hash_table<ipa_bit_ggc_hash_traits> *ipa_bits_hash_table;
109
110 /* Traits for a hash table for reusing value_ranges used for IPA. Note that
111 the equiv bitmap is not hashed and is expected to be NULL. */
112
113 struct ipa_vr_ggc_hash_traits : public ggc_cache_remove <value_range *>
114 {
115 typedef value_range *value_type;
116 typedef value_range *compare_type;
117 static hashval_t
hashipa_vr_ggc_hash_traits118 hash (const value_range *p)
119 {
120 inchash::hash hstate (p->kind ());
121 inchash::add_expr (p->min (), hstate);
122 inchash::add_expr (p->max (), hstate);
123 return hstate.end ();
124 }
125 static bool
equalipa_vr_ggc_hash_traits126 equal (const value_range *a, const value_range *b)
127 {
128 return (a->equal_p (*b)
129 && types_compatible_p (a->type (), b->type ()));
130 }
131 static const bool empty_zero_p = true;
132 static void
mark_emptyipa_vr_ggc_hash_traits133 mark_empty (value_range *&p)
134 {
135 p = NULL;
136 }
137 static bool
is_emptyipa_vr_ggc_hash_traits138 is_empty (const value_range *p)
139 {
140 return p == NULL;
141 }
142 static bool
is_deletedipa_vr_ggc_hash_traits143 is_deleted (const value_range *p)
144 {
145 return p == reinterpret_cast<const value_range *> (1);
146 }
147 static void
mark_deletedipa_vr_ggc_hash_traits148 mark_deleted (value_range *&p)
149 {
150 p = reinterpret_cast<value_range *> (1);
151 }
152 };
153
154 /* Hash table for avoid repeated allocations of equal value_ranges. */
155 static GTY ((cache)) hash_table<ipa_vr_ggc_hash_traits> *ipa_vr_hash_table;
156
157 /* Holders of ipa cgraph hooks: */
158 static struct cgraph_node_hook_list *function_insertion_hook_holder;
159
160 /* Description of a reference to an IPA constant. */
161 struct ipa_cst_ref_desc
162 {
163 /* Edge that corresponds to the statement which took the reference. */
164 struct cgraph_edge *cs;
165 /* Linked list of duplicates created when call graph edges are cloned. */
166 struct ipa_cst_ref_desc *next_duplicate;
167 /* Number of references in IPA structures, IPA_UNDESCRIBED_USE if the value
168 if out of control. */
169 int refcount;
170 };
171
172 /* Allocation pool for reference descriptions. */
173
174 static object_allocator<ipa_cst_ref_desc> ipa_refdesc_pool
175 ("IPA-PROP ref descriptions");
176
177 /* Return true if DECL_FUNCTION_SPECIFIC_OPTIMIZATION of the decl associated
178 with NODE should prevent us from analyzing it for the purposes of IPA-CP. */
179
180 static bool
ipa_func_spec_opts_forbid_analysis_p(struct cgraph_node * node)181 ipa_func_spec_opts_forbid_analysis_p (struct cgraph_node *node)
182 {
183 tree fs_opts = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (node->decl);
184
185 if (!fs_opts)
186 return false;
187 return !opt_for_fn (node->decl, optimize) || !opt_for_fn (node->decl, flag_ipa_cp);
188 }
189
190 /* Return index of the formal whose tree is PTREE in function which corresponds
191 to INFO. */
192
193 static int
ipa_get_param_decl_index_1(vec<ipa_param_descriptor,va_gc> * descriptors,tree ptree)194 ipa_get_param_decl_index_1 (vec<ipa_param_descriptor, va_gc> *descriptors,
195 tree ptree)
196 {
197 int i, count;
198
199 count = vec_safe_length (descriptors);
200 for (i = 0; i < count; i++)
201 if ((*descriptors)[i].decl_or_type == ptree)
202 return i;
203
204 return -1;
205 }
206
207 /* Return index of the formal whose tree is PTREE in function which corresponds
208 to INFO. */
209
210 int
ipa_get_param_decl_index(class ipa_node_params * info,tree ptree)211 ipa_get_param_decl_index (class ipa_node_params *info, tree ptree)
212 {
213 return ipa_get_param_decl_index_1 (info->descriptors, ptree);
214 }
215
216 /* Populate the param_decl field in parameter DESCRIPTORS that correspond to
217 NODE. */
218
219 static void
ipa_populate_param_decls(struct cgraph_node * node,vec<ipa_param_descriptor,va_gc> & descriptors)220 ipa_populate_param_decls (struct cgraph_node *node,
221 vec<ipa_param_descriptor, va_gc> &descriptors)
222 {
223 tree fndecl;
224 tree fnargs;
225 tree parm;
226 int param_num;
227
228 fndecl = node->decl;
229 gcc_assert (gimple_has_body_p (fndecl));
230 fnargs = DECL_ARGUMENTS (fndecl);
231 param_num = 0;
232 for (parm = fnargs; parm; parm = DECL_CHAIN (parm))
233 {
234 descriptors[param_num].decl_or_type = parm;
235 unsigned int cost = estimate_move_cost (TREE_TYPE (parm), true);
236 descriptors[param_num].move_cost = cost;
237 /* Watch overflow, move_cost is a bitfield. */
238 gcc_checking_assert (cost == descriptors[param_num].move_cost);
239 param_num++;
240 }
241 }
242
243 /* Return how many formal parameters FNDECL has. */
244
245 int
count_formal_params(tree fndecl)246 count_formal_params (tree fndecl)
247 {
248 tree parm;
249 int count = 0;
250 gcc_assert (gimple_has_body_p (fndecl));
251
252 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
253 count++;
254
255 return count;
256 }
257
258 /* Return the declaration of Ith formal parameter of the function corresponding
259 to INFO. Note there is no setter function as this array is built just once
260 using ipa_initialize_node_params. */
261
262 void
ipa_dump_param(FILE * file,class ipa_node_params * info,int i)263 ipa_dump_param (FILE *file, class ipa_node_params *info, int i)
264 {
265 fprintf (file, "param #%i", i);
266 if ((*info->descriptors)[i].decl_or_type)
267 {
268 fprintf (file, " ");
269 print_generic_expr (file, (*info->descriptors)[i].decl_or_type);
270 }
271 }
272
273 /* If necessary, allocate vector of parameter descriptors in info of NODE.
274 Return true if they were allocated, false if not. */
275
276 static bool
ipa_alloc_node_params(struct cgraph_node * node,int param_count)277 ipa_alloc_node_params (struct cgraph_node *node, int param_count)
278 {
279 class ipa_node_params *info = IPA_NODE_REF_GET_CREATE (node);
280
281 if (!info->descriptors && param_count)
282 {
283 vec_safe_grow_cleared (info->descriptors, param_count, true);
284 return true;
285 }
286 else
287 return false;
288 }
289
290 /* Initialize the ipa_node_params structure associated with NODE by counting
291 the function parameters, creating the descriptors and populating their
292 param_decls. */
293
294 void
ipa_initialize_node_params(struct cgraph_node * node)295 ipa_initialize_node_params (struct cgraph_node *node)
296 {
297 class ipa_node_params *info = IPA_NODE_REF_GET_CREATE (node);
298
299 if (!info->descriptors
300 && ipa_alloc_node_params (node, count_formal_params (node->decl)))
301 ipa_populate_param_decls (node, *info->descriptors);
302 }
303
304 /* Print the jump functions associated with call graph edge CS to file F. */
305
306 static void
ipa_print_node_jump_functions_for_edge(FILE * f,struct cgraph_edge * cs)307 ipa_print_node_jump_functions_for_edge (FILE *f, struct cgraph_edge *cs)
308 {
309 int i, count;
310
311 count = ipa_get_cs_argument_count (IPA_EDGE_REF (cs));
312 for (i = 0; i < count; i++)
313 {
314 struct ipa_jump_func *jump_func;
315 enum jump_func_type type;
316
317 jump_func = ipa_get_ith_jump_func (IPA_EDGE_REF (cs), i);
318 type = jump_func->type;
319
320 fprintf (f, " param %d: ", i);
321 if (type == IPA_JF_UNKNOWN)
322 fprintf (f, "UNKNOWN\n");
323 else if (type == IPA_JF_CONST)
324 {
325 tree val = jump_func->value.constant.value;
326 fprintf (f, "CONST: ");
327 print_generic_expr (f, val);
328 if (TREE_CODE (val) == ADDR_EXPR
329 && TREE_CODE (TREE_OPERAND (val, 0)) == CONST_DECL)
330 {
331 fprintf (f, " -> ");
332 print_generic_expr (f, DECL_INITIAL (TREE_OPERAND (val, 0)));
333 }
334 fprintf (f, "\n");
335 }
336 else if (type == IPA_JF_PASS_THROUGH)
337 {
338 fprintf (f, "PASS THROUGH: ");
339 fprintf (f, "%d, op %s",
340 jump_func->value.pass_through.formal_id,
341 get_tree_code_name(jump_func->value.pass_through.operation));
342 if (jump_func->value.pass_through.operation != NOP_EXPR)
343 {
344 fprintf (f, " ");
345 print_generic_expr (f, jump_func->value.pass_through.operand);
346 }
347 if (jump_func->value.pass_through.agg_preserved)
348 fprintf (f, ", agg_preserved");
349 fprintf (f, "\n");
350 }
351 else if (type == IPA_JF_ANCESTOR)
352 {
353 fprintf (f, "ANCESTOR: ");
354 fprintf (f, "%d, offset " HOST_WIDE_INT_PRINT_DEC,
355 jump_func->value.ancestor.formal_id,
356 jump_func->value.ancestor.offset);
357 if (jump_func->value.ancestor.agg_preserved)
358 fprintf (f, ", agg_preserved");
359 fprintf (f, "\n");
360 }
361
362 if (jump_func->agg.items)
363 {
364 struct ipa_agg_jf_item *item;
365 int j;
366
367 fprintf (f, " Aggregate passed by %s:\n",
368 jump_func->agg.by_ref ? "reference" : "value");
369 FOR_EACH_VEC_ELT (*jump_func->agg.items, j, item)
370 {
371 fprintf (f, " offset: " HOST_WIDE_INT_PRINT_DEC ", ",
372 item->offset);
373 fprintf (f, "type: ");
374 print_generic_expr (f, item->type);
375 fprintf (f, ", ");
376 if (item->jftype == IPA_JF_PASS_THROUGH)
377 fprintf (f, "PASS THROUGH: %d,",
378 item->value.pass_through.formal_id);
379 else if (item->jftype == IPA_JF_LOAD_AGG)
380 {
381 fprintf (f, "LOAD AGG: %d",
382 item->value.pass_through.formal_id);
383 fprintf (f, " [offset: " HOST_WIDE_INT_PRINT_DEC ", by %s],",
384 item->value.load_agg.offset,
385 item->value.load_agg.by_ref ? "reference"
386 : "value");
387 }
388
389 if (item->jftype == IPA_JF_PASS_THROUGH
390 || item->jftype == IPA_JF_LOAD_AGG)
391 {
392 fprintf (f, " op %s",
393 get_tree_code_name (item->value.pass_through.operation));
394 if (item->value.pass_through.operation != NOP_EXPR)
395 {
396 fprintf (f, " ");
397 print_generic_expr (f, item->value.pass_through.operand);
398 }
399 }
400 else if (item->jftype == IPA_JF_CONST)
401 {
402 fprintf (f, "CONST: ");
403 print_generic_expr (f, item->value.constant);
404 }
405 else if (item->jftype == IPA_JF_UNKNOWN)
406 fprintf (f, "UNKNOWN: " HOST_WIDE_INT_PRINT_DEC " bits",
407 tree_to_uhwi (TYPE_SIZE (item->type)));
408 fprintf (f, "\n");
409 }
410 }
411
412 class ipa_polymorphic_call_context *ctx
413 = ipa_get_ith_polymorhic_call_context (IPA_EDGE_REF (cs), i);
414 if (ctx && !ctx->useless_p ())
415 {
416 fprintf (f, " Context: ");
417 ctx->dump (dump_file);
418 }
419
420 if (jump_func->bits)
421 {
422 fprintf (f, " value: ");
423 print_hex (jump_func->bits->value, f);
424 fprintf (f, ", mask: ");
425 print_hex (jump_func->bits->mask, f);
426 fprintf (f, "\n");
427 }
428 else
429 fprintf (f, " Unknown bits\n");
430
431 if (jump_func->m_vr)
432 {
433 fprintf (f, " VR ");
434 fprintf (f, "%s[",
435 (jump_func->m_vr->kind () == VR_ANTI_RANGE) ? "~" : "");
436 print_decs (wi::to_wide (jump_func->m_vr->min ()), f);
437 fprintf (f, ", ");
438 print_decs (wi::to_wide (jump_func->m_vr->max ()), f);
439 fprintf (f, "]\n");
440 }
441 else
442 fprintf (f, " Unknown VR\n");
443 }
444 }
445
446
447 /* Print the jump functions of all arguments on all call graph edges going from
448 NODE to file F. */
449
450 void
ipa_print_node_jump_functions(FILE * f,struct cgraph_node * node)451 ipa_print_node_jump_functions (FILE *f, struct cgraph_node *node)
452 {
453 struct cgraph_edge *cs;
454
455 fprintf (f, " Jump functions of caller %s:\n", node->dump_name ());
456 for (cs = node->callees; cs; cs = cs->next_callee)
457 {
458
459 fprintf (f, " callsite %s -> %s : \n",
460 node->dump_name (),
461 cs->callee->dump_name ());
462 if (!ipa_edge_args_info_available_for_edge_p (cs))
463 fprintf (f, " no arg info\n");
464 else
465 ipa_print_node_jump_functions_for_edge (f, cs);
466 }
467
468 for (cs = node->indirect_calls; cs; cs = cs->next_callee)
469 {
470 class cgraph_indirect_call_info *ii;
471
472 ii = cs->indirect_info;
473 if (ii->agg_contents)
474 fprintf (f, " indirect %s callsite, calling param %i, "
475 "offset " HOST_WIDE_INT_PRINT_DEC ", %s",
476 ii->member_ptr ? "member ptr" : "aggregate",
477 ii->param_index, ii->offset,
478 ii->by_ref ? "by reference" : "by_value");
479 else
480 fprintf (f, " indirect %s callsite, calling param %i, "
481 "offset " HOST_WIDE_INT_PRINT_DEC,
482 ii->polymorphic ? "polymorphic" : "simple", ii->param_index,
483 ii->offset);
484
485 if (cs->call_stmt)
486 {
487 fprintf (f, ", for stmt ");
488 print_gimple_stmt (f, cs->call_stmt, 0, TDF_SLIM);
489 }
490 else
491 fprintf (f, "\n");
492 if (ii->polymorphic)
493 ii->context.dump (f);
494 if (!ipa_edge_args_info_available_for_edge_p (cs))
495 fprintf (f, " no arg info\n");
496 else
497 ipa_print_node_jump_functions_for_edge (f, cs);
498 }
499 }
500
501 /* Print ipa_jump_func data structures of all nodes in the call graph to F. */
502
503 void
ipa_print_all_jump_functions(FILE * f)504 ipa_print_all_jump_functions (FILE *f)
505 {
506 struct cgraph_node *node;
507
508 fprintf (f, "\nJump functions:\n");
509 FOR_EACH_FUNCTION (node)
510 {
511 ipa_print_node_jump_functions (f, node);
512 }
513 }
514
515 /* Set jfunc to be a know-really nothing jump function. */
516
517 static void
ipa_set_jf_unknown(struct ipa_jump_func * jfunc)518 ipa_set_jf_unknown (struct ipa_jump_func *jfunc)
519 {
520 jfunc->type = IPA_JF_UNKNOWN;
521 }
522
523 /* Set JFUNC to be a copy of another jmp (to be used by jump function
524 combination code). The two functions will share their rdesc. */
525
526 static void
ipa_set_jf_cst_copy(struct ipa_jump_func * dst,struct ipa_jump_func * src)527 ipa_set_jf_cst_copy (struct ipa_jump_func *dst,
528 struct ipa_jump_func *src)
529
530 {
531 gcc_checking_assert (src->type == IPA_JF_CONST);
532 dst->type = IPA_JF_CONST;
533 dst->value.constant = src->value.constant;
534 }
535
536 /* Set JFUNC to be a constant jmp function. */
537
538 static void
ipa_set_jf_constant(struct ipa_jump_func * jfunc,tree constant,struct cgraph_edge * cs)539 ipa_set_jf_constant (struct ipa_jump_func *jfunc, tree constant,
540 struct cgraph_edge *cs)
541 {
542 jfunc->type = IPA_JF_CONST;
543 jfunc->value.constant.value = unshare_expr_without_location (constant);
544
545 if (TREE_CODE (constant) == ADDR_EXPR
546 && TREE_CODE (TREE_OPERAND (constant, 0)) == FUNCTION_DECL)
547 {
548 struct ipa_cst_ref_desc *rdesc;
549
550 rdesc = ipa_refdesc_pool.allocate ();
551 rdesc->cs = cs;
552 rdesc->next_duplicate = NULL;
553 rdesc->refcount = 1;
554 jfunc->value.constant.rdesc = rdesc;
555 }
556 else
557 jfunc->value.constant.rdesc = NULL;
558 }
559
560 /* Set JFUNC to be a simple pass-through jump function. */
561 static void
ipa_set_jf_simple_pass_through(struct ipa_jump_func * jfunc,int formal_id,bool agg_preserved)562 ipa_set_jf_simple_pass_through (struct ipa_jump_func *jfunc, int formal_id,
563 bool agg_preserved)
564 {
565 jfunc->type = IPA_JF_PASS_THROUGH;
566 jfunc->value.pass_through.operand = NULL_TREE;
567 jfunc->value.pass_through.formal_id = formal_id;
568 jfunc->value.pass_through.operation = NOP_EXPR;
569 jfunc->value.pass_through.agg_preserved = agg_preserved;
570 }
571
572 /* Set JFUNC to be an unary pass through jump function. */
573
574 static void
ipa_set_jf_unary_pass_through(struct ipa_jump_func * jfunc,int formal_id,enum tree_code operation)575 ipa_set_jf_unary_pass_through (struct ipa_jump_func *jfunc, int formal_id,
576 enum tree_code operation)
577 {
578 jfunc->type = IPA_JF_PASS_THROUGH;
579 jfunc->value.pass_through.operand = NULL_TREE;
580 jfunc->value.pass_through.formal_id = formal_id;
581 jfunc->value.pass_through.operation = operation;
582 jfunc->value.pass_through.agg_preserved = false;
583 }
584 /* Set JFUNC to be an arithmetic pass through jump function. */
585
586 static void
ipa_set_jf_arith_pass_through(struct ipa_jump_func * jfunc,int formal_id,tree operand,enum tree_code operation)587 ipa_set_jf_arith_pass_through (struct ipa_jump_func *jfunc, int formal_id,
588 tree operand, enum tree_code operation)
589 {
590 jfunc->type = IPA_JF_PASS_THROUGH;
591 jfunc->value.pass_through.operand = unshare_expr_without_location (operand);
592 jfunc->value.pass_through.formal_id = formal_id;
593 jfunc->value.pass_through.operation = operation;
594 jfunc->value.pass_through.agg_preserved = false;
595 }
596
597 /* Set JFUNC to be an ancestor jump function. */
598
599 static void
ipa_set_ancestor_jf(struct ipa_jump_func * jfunc,HOST_WIDE_INT offset,int formal_id,bool agg_preserved)600 ipa_set_ancestor_jf (struct ipa_jump_func *jfunc, HOST_WIDE_INT offset,
601 int formal_id, bool agg_preserved)
602 {
603 jfunc->type = IPA_JF_ANCESTOR;
604 jfunc->value.ancestor.formal_id = formal_id;
605 jfunc->value.ancestor.offset = offset;
606 jfunc->value.ancestor.agg_preserved = agg_preserved;
607 }
608
609 /* Get IPA BB information about the given BB. FBI is the context of analyzis
610 of this function body. */
611
612 static struct ipa_bb_info *
ipa_get_bb_info(struct ipa_func_body_info * fbi,basic_block bb)613 ipa_get_bb_info (struct ipa_func_body_info *fbi, basic_block bb)
614 {
615 gcc_checking_assert (fbi);
616 return &fbi->bb_infos[bb->index];
617 }
618
619 /* Structure to be passed in between detect_type_change and
620 check_stmt_for_type_change. */
621
622 struct prop_type_change_info
623 {
624 /* Offset into the object where there is the virtual method pointer we are
625 looking for. */
626 HOST_WIDE_INT offset;
627 /* The declaration or SSA_NAME pointer of the base that we are checking for
628 type change. */
629 tree object;
630 /* Set to true if dynamic type change has been detected. */
631 bool type_maybe_changed;
632 };
633
634 /* Return true if STMT can modify a virtual method table pointer.
635
636 This function makes special assumptions about both constructors and
637 destructors which are all the functions that are allowed to alter the VMT
638 pointers. It assumes that destructors begin with assignment into all VMT
639 pointers and that constructors essentially look in the following way:
640
641 1) The very first thing they do is that they call constructors of ancestor
642 sub-objects that have them.
643
644 2) Then VMT pointers of this and all its ancestors is set to new values
645 corresponding to the type corresponding to the constructor.
646
647 3) Only afterwards, other stuff such as constructor of member sub-objects
648 and the code written by the user is run. Only this may include calling
649 virtual functions, directly or indirectly.
650
651 There is no way to call a constructor of an ancestor sub-object in any
652 other way.
653
654 This means that we do not have to care whether constructors get the correct
655 type information because they will always change it (in fact, if we define
656 the type to be given by the VMT pointer, it is undefined).
657
658 The most important fact to derive from the above is that if, for some
659 statement in the section 3, we try to detect whether the dynamic type has
660 changed, we can safely ignore all calls as we examine the function body
661 backwards until we reach statements in section 2 because these calls cannot
662 be ancestor constructors or destructors (if the input is not bogus) and so
663 do not change the dynamic type (this holds true only for automatically
664 allocated objects but at the moment we devirtualize only these). We then
665 must detect that statements in section 2 change the dynamic type and can try
666 to derive the new type. That is enough and we can stop, we will never see
667 the calls into constructors of sub-objects in this code. Therefore we can
668 safely ignore all call statements that we traverse.
669 */
670
671 static bool
stmt_may_be_vtbl_ptr_store(gimple * stmt)672 stmt_may_be_vtbl_ptr_store (gimple *stmt)
673 {
674 if (is_gimple_call (stmt))
675 return false;
676 if (gimple_clobber_p (stmt))
677 return false;
678 else if (is_gimple_assign (stmt))
679 {
680 tree lhs = gimple_assign_lhs (stmt);
681
682 if (!AGGREGATE_TYPE_P (TREE_TYPE (lhs)))
683 {
684 if (flag_strict_aliasing
685 && !POINTER_TYPE_P (TREE_TYPE (lhs)))
686 return false;
687
688 if (TREE_CODE (lhs) == COMPONENT_REF
689 && !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1)))
690 return false;
691 /* In the future we might want to use get_ref_base_and_extent to find
692 if there is a field corresponding to the offset and if so, proceed
693 almost like if it was a component ref. */
694 }
695 }
696 return true;
697 }
698
699 /* Callback of walk_aliased_vdefs and a helper function for detect_type_change
700 to check whether a particular statement may modify the virtual table
701 pointerIt stores its result into DATA, which points to a
702 prop_type_change_info structure. */
703
704 static bool
check_stmt_for_type_change(ao_ref * ao ATTRIBUTE_UNUSED,tree vdef,void * data)705 check_stmt_for_type_change (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef, void *data)
706 {
707 gimple *stmt = SSA_NAME_DEF_STMT (vdef);
708 struct prop_type_change_info *tci = (struct prop_type_change_info *) data;
709
710 if (stmt_may_be_vtbl_ptr_store (stmt))
711 {
712 tci->type_maybe_changed = true;
713 return true;
714 }
715 else
716 return false;
717 }
718
719 /* See if ARG is PARAM_DECl describing instance passed by pointer
720 or reference in FUNCTION. Return false if the dynamic type may change
721 in between beggining of the function until CALL is invoked.
722
723 Generally functions are not allowed to change type of such instances,
724 but they call destructors. We assume that methods cannot destroy the THIS
725 pointer. Also as a special cases, constructor and destructors may change
726 type of the THIS pointer. */
727
728 static bool
param_type_may_change_p(tree function,tree arg,gimple * call)729 param_type_may_change_p (tree function, tree arg, gimple *call)
730 {
731 /* Pure functions cannot do any changes on the dynamic type;
732 that require writting to memory. */
733 if (flags_from_decl_or_type (function) & (ECF_PURE | ECF_CONST))
734 return false;
735 /* We need to check if we are within inlined consturctor
736 or destructor (ideally we would have way to check that the
737 inline cdtor is actually working on ARG, but we don't have
738 easy tie on this, so punt on all non-pure cdtors.
739 We may also record the types of cdtors and once we know type
740 of the instance match them.
741
742 Also code unification optimizations may merge calls from
743 different blocks making return values unreliable. So
744 do nothing during late optimization. */
745 if (DECL_STRUCT_FUNCTION (function)->after_inlining)
746 return true;
747 if (TREE_CODE (arg) == SSA_NAME
748 && SSA_NAME_IS_DEFAULT_DEF (arg)
749 && TREE_CODE (SSA_NAME_VAR (arg)) == PARM_DECL)
750 {
751 /* Normal (non-THIS) argument. */
752 if ((SSA_NAME_VAR (arg) != DECL_ARGUMENTS (function)
753 || TREE_CODE (TREE_TYPE (function)) != METHOD_TYPE)
754 /* THIS pointer of an method - here we want to watch constructors
755 and destructors as those definitely may change the dynamic
756 type. */
757 || (TREE_CODE (TREE_TYPE (function)) == METHOD_TYPE
758 && !DECL_CXX_CONSTRUCTOR_P (function)
759 && !DECL_CXX_DESTRUCTOR_P (function)
760 && (SSA_NAME_VAR (arg) == DECL_ARGUMENTS (function))))
761 {
762 /* Walk the inline stack and watch out for ctors/dtors. */
763 for (tree block = gimple_block (call); block && TREE_CODE (block) == BLOCK;
764 block = BLOCK_SUPERCONTEXT (block))
765 if (inlined_polymorphic_ctor_dtor_block_p (block, false))
766 return true;
767 return false;
768 }
769 }
770 return true;
771 }
772
773 /* Detect whether the dynamic type of ARG of COMP_TYPE has changed (before
774 callsite CALL) by looking for assignments to its virtual table pointer. If
775 it is, return true. ARG is the object itself (not a pointer
776 to it, unless dereferenced). BASE is the base of the memory access as
777 returned by get_ref_base_and_extent, as is the offset.
778
779 This is helper function for detect_type_change and detect_type_change_ssa
780 that does the heavy work which is usually unnecesary. */
781
782 static bool
detect_type_change_from_memory_writes(ipa_func_body_info * fbi,tree arg,tree base,tree comp_type,gcall * call,HOST_WIDE_INT offset)783 detect_type_change_from_memory_writes (ipa_func_body_info *fbi, tree arg,
784 tree base, tree comp_type, gcall *call,
785 HOST_WIDE_INT offset)
786 {
787 struct prop_type_change_info tci;
788 ao_ref ao;
789
790 gcc_checking_assert (DECL_P (arg)
791 || TREE_CODE (arg) == MEM_REF
792 || handled_component_p (arg));
793
794 comp_type = TYPE_MAIN_VARIANT (comp_type);
795
796 /* Const calls cannot call virtual methods through VMT and so type changes do
797 not matter. */
798 if (!flag_devirtualize || !gimple_vuse (call)
799 /* Be sure expected_type is polymorphic. */
800 || !comp_type
801 || TREE_CODE (comp_type) != RECORD_TYPE
802 || !TYPE_BINFO (TYPE_MAIN_VARIANT (comp_type))
803 || !BINFO_VTABLE (TYPE_BINFO (TYPE_MAIN_VARIANT (comp_type))))
804 return true;
805
806 if (fbi->aa_walk_budget == 0)
807 return false;
808
809 ao_ref_init (&ao, arg);
810 ao.base = base;
811 ao.offset = offset;
812 ao.size = POINTER_SIZE;
813 ao.max_size = ao.size;
814
815 tci.offset = offset;
816 tci.object = get_base_address (arg);
817 tci.type_maybe_changed = false;
818
819 int walked
820 = walk_aliased_vdefs (&ao, gimple_vuse (call), check_stmt_for_type_change,
821 &tci, NULL, NULL, fbi->aa_walk_budget);
822 if (walked >= 0)
823 fbi->aa_walk_budget -= walked;
824 else
825 fbi->aa_walk_budget = 0;
826
827 if (walked >= 0 && !tci.type_maybe_changed)
828 return false;
829
830 return true;
831 }
832
833 /* Detect whether the dynamic type of ARG of COMP_TYPE may have changed.
834 If it is, return true. ARG is the object itself (not a pointer
835 to it, unless dereferenced). BASE is the base of the memory access as
836 returned by get_ref_base_and_extent, as is the offset. */
837
838 static bool
detect_type_change(ipa_func_body_info * fbi,tree arg,tree base,tree comp_type,gcall * call,HOST_WIDE_INT offset)839 detect_type_change (ipa_func_body_info *fbi, tree arg, tree base,
840 tree comp_type, gcall *call,
841 HOST_WIDE_INT offset)
842 {
843 if (!flag_devirtualize)
844 return false;
845
846 if (TREE_CODE (base) == MEM_REF
847 && !param_type_may_change_p (current_function_decl,
848 TREE_OPERAND (base, 0),
849 call))
850 return false;
851 return detect_type_change_from_memory_writes (fbi, arg, base, comp_type,
852 call, offset);
853 }
854
855 /* Like detect_type_change but ARG is supposed to be a non-dereferenced pointer
856 SSA name (its dereference will become the base and the offset is assumed to
857 be zero). */
858
859 static bool
detect_type_change_ssa(ipa_func_body_info * fbi,tree arg,tree comp_type,gcall * call)860 detect_type_change_ssa (ipa_func_body_info *fbi, tree arg, tree comp_type,
861 gcall *call)
862 {
863 gcc_checking_assert (TREE_CODE (arg) == SSA_NAME);
864 if (!flag_devirtualize
865 || !POINTER_TYPE_P (TREE_TYPE (arg)))
866 return false;
867
868 if (!param_type_may_change_p (current_function_decl, arg, call))
869 return false;
870
871 arg = build2 (MEM_REF, ptr_type_node, arg,
872 build_int_cst (ptr_type_node, 0));
873
874 return detect_type_change_from_memory_writes (fbi, arg, arg, comp_type,
875 call, 0);
876 }
877
878 /* Callback of walk_aliased_vdefs. Flags that it has been invoked to the
879 boolean variable pointed to by DATA. */
880
881 static bool
mark_modified(ao_ref * ao ATTRIBUTE_UNUSED,tree vdef ATTRIBUTE_UNUSED,void * data)882 mark_modified (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef ATTRIBUTE_UNUSED,
883 void *data)
884 {
885 bool *b = (bool *) data;
886 *b = true;
887 return true;
888 }
889
890 /* Find the nearest valid aa status for parameter specified by INDEX that
891 dominates BB. */
892
893 static struct ipa_param_aa_status *
find_dominating_aa_status(struct ipa_func_body_info * fbi,basic_block bb,int index)894 find_dominating_aa_status (struct ipa_func_body_info *fbi, basic_block bb,
895 int index)
896 {
897 while (true)
898 {
899 bb = get_immediate_dominator (CDI_DOMINATORS, bb);
900 if (!bb)
901 return NULL;
902 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
903 if (!bi->param_aa_statuses.is_empty ()
904 && bi->param_aa_statuses[index].valid)
905 return &bi->param_aa_statuses[index];
906 }
907 }
908
909 /* Get AA status structure for the given BB and parameter with INDEX. Allocate
910 structures and/or intialize the result with a dominating description as
911 necessary. */
912
913 static struct ipa_param_aa_status *
parm_bb_aa_status_for_bb(struct ipa_func_body_info * fbi,basic_block bb,int index)914 parm_bb_aa_status_for_bb (struct ipa_func_body_info *fbi, basic_block bb,
915 int index)
916 {
917 gcc_checking_assert (fbi);
918 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
919 if (bi->param_aa_statuses.is_empty ())
920 bi->param_aa_statuses.safe_grow_cleared (fbi->param_count, true);
921 struct ipa_param_aa_status *paa = &bi->param_aa_statuses[index];
922 if (!paa->valid)
923 {
924 gcc_checking_assert (!paa->parm_modified
925 && !paa->ref_modified
926 && !paa->pt_modified);
927 struct ipa_param_aa_status *dom_paa;
928 dom_paa = find_dominating_aa_status (fbi, bb, index);
929 if (dom_paa)
930 *paa = *dom_paa;
931 else
932 paa->valid = true;
933 }
934
935 return paa;
936 }
937
938 /* Return true if a load from a formal parameter PARM_LOAD is known to retrieve
939 a value known not to be modified in this function before reaching the
940 statement STMT. FBI holds information about the function we have so far
941 gathered but do not survive the summary building stage. */
942
943 static bool
parm_preserved_before_stmt_p(struct ipa_func_body_info * fbi,int index,gimple * stmt,tree parm_load)944 parm_preserved_before_stmt_p (struct ipa_func_body_info *fbi, int index,
945 gimple *stmt, tree parm_load)
946 {
947 struct ipa_param_aa_status *paa;
948 bool modified = false;
949 ao_ref refd;
950
951 tree base = get_base_address (parm_load);
952 gcc_assert (TREE_CODE (base) == PARM_DECL);
953 if (TREE_READONLY (base))
954 return true;
955
956 gcc_checking_assert (fbi);
957 paa = parm_bb_aa_status_for_bb (fbi, gimple_bb (stmt), index);
958 if (paa->parm_modified || fbi->aa_walk_budget == 0)
959 return false;
960
961 gcc_checking_assert (gimple_vuse (stmt) != NULL_TREE);
962 ao_ref_init (&refd, parm_load);
963 int walked = walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified,
964 &modified, NULL, NULL,
965 fbi->aa_walk_budget);
966 if (walked < 0)
967 {
968 modified = true;
969 fbi->aa_walk_budget = 0;
970 }
971 else
972 fbi->aa_walk_budget -= walked;
973 if (paa && modified)
974 paa->parm_modified = true;
975 return !modified;
976 }
977
978 /* If STMT is an assignment that loads a value from an parameter declaration,
979 return the index of the parameter in ipa_node_params which has not been
980 modified. Otherwise return -1. */
981
982 static int
load_from_unmodified_param(struct ipa_func_body_info * fbi,vec<ipa_param_descriptor,va_gc> * descriptors,gimple * stmt)983 load_from_unmodified_param (struct ipa_func_body_info *fbi,
984 vec<ipa_param_descriptor, va_gc> *descriptors,
985 gimple *stmt)
986 {
987 int index;
988 tree op1;
989
990 if (!gimple_assign_single_p (stmt))
991 return -1;
992
993 op1 = gimple_assign_rhs1 (stmt);
994 if (TREE_CODE (op1) != PARM_DECL)
995 return -1;
996
997 index = ipa_get_param_decl_index_1 (descriptors, op1);
998 if (index < 0
999 || !parm_preserved_before_stmt_p (fbi, index, stmt, op1))
1000 return -1;
1001
1002 return index;
1003 }
1004
1005 /* Return true if memory reference REF (which must be a load through parameter
1006 with INDEX) loads data that are known to be unmodified in this function
1007 before reaching statement STMT. */
1008
1009 static bool
parm_ref_data_preserved_p(struct ipa_func_body_info * fbi,int index,gimple * stmt,tree ref)1010 parm_ref_data_preserved_p (struct ipa_func_body_info *fbi,
1011 int index, gimple *stmt, tree ref)
1012 {
1013 struct ipa_param_aa_status *paa;
1014 bool modified = false;
1015 ao_ref refd;
1016
1017 gcc_checking_assert (fbi);
1018 paa = parm_bb_aa_status_for_bb (fbi, gimple_bb (stmt), index);
1019 if (paa->ref_modified || fbi->aa_walk_budget == 0)
1020 return false;
1021
1022 gcc_checking_assert (gimple_vuse (stmt));
1023 ao_ref_init (&refd, ref);
1024 int walked = walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified,
1025 &modified, NULL, NULL,
1026 fbi->aa_walk_budget);
1027 if (walked < 0)
1028 {
1029 modified = true;
1030 fbi->aa_walk_budget = 0;
1031 }
1032 else
1033 fbi->aa_walk_budget -= walked;
1034 if (modified)
1035 paa->ref_modified = true;
1036 return !modified;
1037 }
1038
1039 /* Return true if the data pointed to by PARM (which is a parameter with INDEX)
1040 is known to be unmodified in this function before reaching call statement
1041 CALL into which it is passed. FBI describes the function body. */
1042
1043 static bool
parm_ref_data_pass_through_p(struct ipa_func_body_info * fbi,int index,gimple * call,tree parm)1044 parm_ref_data_pass_through_p (struct ipa_func_body_info *fbi, int index,
1045 gimple *call, tree parm)
1046 {
1047 bool modified = false;
1048 ao_ref refd;
1049
1050 /* It's unnecessary to calculate anything about memory contnets for a const
1051 function because it is not goin to use it. But do not cache the result
1052 either. Also, no such calculations for non-pointers. */
1053 if (!gimple_vuse (call)
1054 || !POINTER_TYPE_P (TREE_TYPE (parm)))
1055 return false;
1056
1057 struct ipa_param_aa_status *paa = parm_bb_aa_status_for_bb (fbi,
1058 gimple_bb (call),
1059 index);
1060 if (paa->pt_modified || fbi->aa_walk_budget == 0)
1061 return false;
1062
1063 ao_ref_init_from_ptr_and_size (&refd, parm, NULL_TREE);
1064 int walked = walk_aliased_vdefs (&refd, gimple_vuse (call), mark_modified,
1065 &modified, NULL, NULL,
1066 fbi->aa_walk_budget);
1067 if (walked < 0)
1068 {
1069 fbi->aa_walk_budget = 0;
1070 modified = true;
1071 }
1072 else
1073 fbi->aa_walk_budget -= walked;
1074 if (modified)
1075 paa->pt_modified = true;
1076 return !modified;
1077 }
1078
1079 /* Return true if we can prove that OP is a memory reference loading
1080 data from an aggregate passed as a parameter.
1081
1082 The function works in two modes. If GUARANTEED_UNMODIFIED is NULL, it return
1083 false if it cannot prove that the value has not been modified before the
1084 load in STMT. If GUARANTEED_UNMODIFIED is not NULL, it will return true even
1085 if it cannot prove the value has not been modified, in that case it will
1086 store false to *GUARANTEED_UNMODIFIED, otherwise it will store true there.
1087
1088 INFO and PARMS_AINFO describe parameters of the current function (but the
1089 latter can be NULL), STMT is the load statement. If function returns true,
1090 *INDEX_P, *OFFSET_P and *BY_REF is filled with the parameter index, offset
1091 within the aggregate and whether it is a load from a value passed by
1092 reference respectively. */
1093
1094 bool
ipa_load_from_parm_agg(struct ipa_func_body_info * fbi,vec<ipa_param_descriptor,va_gc> * descriptors,gimple * stmt,tree op,int * index_p,HOST_WIDE_INT * offset_p,poly_int64 * size_p,bool * by_ref_p,bool * guaranteed_unmodified)1095 ipa_load_from_parm_agg (struct ipa_func_body_info *fbi,
1096 vec<ipa_param_descriptor, va_gc> *descriptors,
1097 gimple *stmt, tree op, int *index_p,
1098 HOST_WIDE_INT *offset_p, poly_int64 *size_p,
1099 bool *by_ref_p, bool *guaranteed_unmodified)
1100 {
1101 int index;
1102 HOST_WIDE_INT size;
1103 bool reverse;
1104 tree base = get_ref_base_and_extent_hwi (op, offset_p, &size, &reverse);
1105
1106 if (!base)
1107 return false;
1108
1109 if (DECL_P (base))
1110 {
1111 int index = ipa_get_param_decl_index_1 (descriptors, base);
1112 if (index >= 0
1113 && parm_preserved_before_stmt_p (fbi, index, stmt, op))
1114 {
1115 *index_p = index;
1116 *by_ref_p = false;
1117 if (size_p)
1118 *size_p = size;
1119 if (guaranteed_unmodified)
1120 *guaranteed_unmodified = true;
1121 return true;
1122 }
1123 return false;
1124 }
1125
1126 if (TREE_CODE (base) != MEM_REF
1127 || TREE_CODE (TREE_OPERAND (base, 0)) != SSA_NAME
1128 || !integer_zerop (TREE_OPERAND (base, 1)))
1129 return false;
1130
1131 if (SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (base, 0)))
1132 {
1133 tree parm = SSA_NAME_VAR (TREE_OPERAND (base, 0));
1134 index = ipa_get_param_decl_index_1 (descriptors, parm);
1135 }
1136 else
1137 {
1138 /* This branch catches situations where a pointer parameter is not a
1139 gimple register, for example:
1140
1141 void hip7(S*) (struct S * p)
1142 {
1143 void (*<T2e4>) (struct S *) D.1867;
1144 struct S * p.1;
1145
1146 <bb 2>:
1147 p.1_1 = p;
1148 D.1867_2 = p.1_1->f;
1149 D.1867_2 ();
1150 gdp = &p;
1151 */
1152
1153 gimple *def = SSA_NAME_DEF_STMT (TREE_OPERAND (base, 0));
1154 index = load_from_unmodified_param (fbi, descriptors, def);
1155 }
1156
1157 if (index >= 0)
1158 {
1159 bool data_preserved = parm_ref_data_preserved_p (fbi, index, stmt, op);
1160 if (!data_preserved && !guaranteed_unmodified)
1161 return false;
1162
1163 *index_p = index;
1164 *by_ref_p = true;
1165 if (size_p)
1166 *size_p = size;
1167 if (guaranteed_unmodified)
1168 *guaranteed_unmodified = data_preserved;
1169 return true;
1170 }
1171 return false;
1172 }
1173
1174 /* If STMT is an assignment that loads a value from a parameter declaration,
1175 or from an aggregate passed as the parameter either by value or reference,
1176 return the index of the parameter in ipa_node_params. Otherwise return -1.
1177
1178 FBI holds gathered information about the function. INFO describes
1179 parameters of the function, STMT is the assignment statement. If it is a
1180 memory load from an aggregate, *OFFSET_P is filled with offset within the
1181 aggregate, and *BY_REF_P specifies whether the aggregate is passed by
1182 reference. */
1183
1184 static int
load_from_unmodified_param_or_agg(struct ipa_func_body_info * fbi,class ipa_node_params * info,gimple * stmt,HOST_WIDE_INT * offset_p,bool * by_ref_p)1185 load_from_unmodified_param_or_agg (struct ipa_func_body_info *fbi,
1186 class ipa_node_params *info,
1187 gimple *stmt,
1188 HOST_WIDE_INT *offset_p,
1189 bool *by_ref_p)
1190 {
1191 int index = load_from_unmodified_param (fbi, info->descriptors, stmt);
1192 poly_int64 size;
1193
1194 /* Load value from a parameter declaration. */
1195 if (index >= 0)
1196 {
1197 *offset_p = -1;
1198 return index;
1199 }
1200
1201 if (!gimple_assign_load_p (stmt))
1202 return -1;
1203
1204 tree rhs = gimple_assign_rhs1 (stmt);
1205
1206 /* Skip memory reference containing VIEW_CONVERT_EXPR. */
1207 for (tree t = rhs; handled_component_p (t); t = TREE_OPERAND (t, 0))
1208 if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
1209 return -1;
1210
1211 /* Skip memory reference containing bit-field. */
1212 if (TREE_CODE (rhs) == BIT_FIELD_REF
1213 || contains_bitfld_component_ref_p (rhs))
1214 return -1;
1215
1216 if (!ipa_load_from_parm_agg (fbi, info->descriptors, stmt, rhs, &index,
1217 offset_p, &size, by_ref_p))
1218 return -1;
1219
1220 gcc_assert (!maybe_ne (tree_to_poly_int64 (TYPE_SIZE (TREE_TYPE (rhs))),
1221 size));
1222 if (!*by_ref_p)
1223 {
1224 tree param_type = ipa_get_type (info, index);
1225
1226 if (!param_type || !AGGREGATE_TYPE_P (param_type))
1227 return -1;
1228 }
1229 else if (TREE_THIS_VOLATILE (rhs))
1230 return -1;
1231
1232 return index;
1233 }
1234
1235 /* Walk pointer adjustemnts from OP (such as POINTER_PLUS and ADDR_EXPR)
1236 to find original pointer. Initialize RET to the pointer which results from
1237 the walk.
1238 If offset is known return true and initialize OFFSET_RET. */
1239
1240 bool
unadjusted_ptr_and_unit_offset(tree op,tree * ret,poly_int64 * offset_ret)1241 unadjusted_ptr_and_unit_offset (tree op, tree *ret, poly_int64 *offset_ret)
1242 {
1243 poly_int64 offset = 0;
1244 bool offset_known = true;
1245 int i;
1246
1247 for (i = 0; i < param_ipa_jump_function_lookups; i++)
1248 {
1249 if (TREE_CODE (op) == ADDR_EXPR)
1250 {
1251 poly_int64 extra_offset = 0;
1252 tree base = get_addr_base_and_unit_offset (TREE_OPERAND (op, 0),
1253 &offset);
1254 if (!base)
1255 {
1256 base = get_base_address (TREE_OPERAND (op, 0));
1257 if (TREE_CODE (base) != MEM_REF)
1258 break;
1259 offset_known = false;
1260 }
1261 else
1262 {
1263 if (TREE_CODE (base) != MEM_REF)
1264 break;
1265 offset += extra_offset;
1266 }
1267 op = TREE_OPERAND (base, 0);
1268 if (mem_ref_offset (base).to_shwi (&extra_offset))
1269 offset += extra_offset;
1270 else
1271 offset_known = false;
1272 }
1273 else if (TREE_CODE (op) == SSA_NAME
1274 && !SSA_NAME_IS_DEFAULT_DEF (op))
1275 {
1276 gimple *pstmt = SSA_NAME_DEF_STMT (op);
1277
1278 if (gimple_assign_single_p (pstmt))
1279 op = gimple_assign_rhs1 (pstmt);
1280 else if (is_gimple_assign (pstmt)
1281 && gimple_assign_rhs_code (pstmt) == POINTER_PLUS_EXPR)
1282 {
1283 poly_int64 extra_offset = 0;
1284 if (ptrdiff_tree_p (gimple_assign_rhs2 (pstmt),
1285 &extra_offset))
1286 offset += extra_offset;
1287 else
1288 offset_known = false;
1289 op = gimple_assign_rhs1 (pstmt);
1290 }
1291 else
1292 break;
1293 }
1294 else
1295 break;
1296 }
1297 *ret = op;
1298 *offset_ret = offset;
1299 return offset_known;
1300 }
1301
1302 /* Given that an actual argument is an SSA_NAME (given in NAME) and is a result
1303 of an assignment statement STMT, try to determine whether we are actually
1304 handling any of the following cases and construct an appropriate jump
1305 function into JFUNC if so:
1306
1307 1) The passed value is loaded from a formal parameter which is not a gimple
1308 register (most probably because it is addressable, the value has to be
1309 scalar) and we can guarantee the value has not changed. This case can
1310 therefore be described by a simple pass-through jump function. For example:
1311
1312 foo (int a)
1313 {
1314 int a.0;
1315
1316 a.0_2 = a;
1317 bar (a.0_2);
1318
1319 2) The passed value can be described by a simple arithmetic pass-through
1320 jump function. E.g.
1321
1322 foo (int a)
1323 {
1324 int D.2064;
1325
1326 D.2064_4 = a.1(D) + 4;
1327 bar (D.2064_4);
1328
1329 This case can also occur in combination of the previous one, e.g.:
1330
1331 foo (int a, int z)
1332 {
1333 int a.0;
1334 int D.2064;
1335
1336 a.0_3 = a;
1337 D.2064_4 = a.0_3 + 4;
1338 foo (D.2064_4);
1339
1340 3) The passed value is an address of an object within another one (which
1341 also passed by reference). Such situations are described by an ancestor
1342 jump function and describe situations such as:
1343
1344 B::foo() (struct B * const this)
1345 {
1346 struct A * D.1845;
1347
1348 D.1845_2 = &this_1(D)->D.1748;
1349 A::bar (D.1845_2);
1350
1351 INFO is the structure describing individual parameters access different
1352 stages of IPA optimizations. PARMS_AINFO contains the information that is
1353 only needed for intraprocedural analysis. */
1354
1355 static void
compute_complex_assign_jump_func(struct ipa_func_body_info * fbi,class ipa_node_params * info,struct ipa_jump_func * jfunc,gcall * call,gimple * stmt,tree name,tree param_type)1356 compute_complex_assign_jump_func (struct ipa_func_body_info *fbi,
1357 class ipa_node_params *info,
1358 struct ipa_jump_func *jfunc,
1359 gcall *call, gimple *stmt, tree name,
1360 tree param_type)
1361 {
1362 HOST_WIDE_INT offset, size;
1363 tree op1, tc_ssa, base, ssa;
1364 bool reverse;
1365 int index;
1366
1367 op1 = gimple_assign_rhs1 (stmt);
1368
1369 if (TREE_CODE (op1) == SSA_NAME)
1370 {
1371 if (SSA_NAME_IS_DEFAULT_DEF (op1))
1372 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (op1));
1373 else
1374 index = load_from_unmodified_param (fbi, info->descriptors,
1375 SSA_NAME_DEF_STMT (op1));
1376 tc_ssa = op1;
1377 }
1378 else
1379 {
1380 index = load_from_unmodified_param (fbi, info->descriptors, stmt);
1381 tc_ssa = gimple_assign_lhs (stmt);
1382 }
1383
1384 if (index >= 0)
1385 {
1386 switch (gimple_assign_rhs_class (stmt))
1387 {
1388 case GIMPLE_BINARY_RHS:
1389 {
1390 tree op2 = gimple_assign_rhs2 (stmt);
1391 if (!is_gimple_ip_invariant (op2)
1392 || ((TREE_CODE_CLASS (gimple_assign_rhs_code (stmt))
1393 != tcc_comparison)
1394 && !useless_type_conversion_p (TREE_TYPE (name),
1395 TREE_TYPE (op1))))
1396 return;
1397
1398 ipa_set_jf_arith_pass_through (jfunc, index, op2,
1399 gimple_assign_rhs_code (stmt));
1400 break;
1401 }
1402 case GIMPLE_SINGLE_RHS:
1403 {
1404 bool agg_p = parm_ref_data_pass_through_p (fbi, index, call,
1405 tc_ssa);
1406 ipa_set_jf_simple_pass_through (jfunc, index, agg_p);
1407 break;
1408 }
1409 case GIMPLE_UNARY_RHS:
1410 if (!CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt)))
1411 ipa_set_jf_unary_pass_through (jfunc, index,
1412 gimple_assign_rhs_code (stmt));
1413 default:;
1414 }
1415 return;
1416 }
1417
1418 if (TREE_CODE (op1) != ADDR_EXPR)
1419 return;
1420 op1 = TREE_OPERAND (op1, 0);
1421 if (TREE_CODE (TREE_TYPE (op1)) != RECORD_TYPE)
1422 return;
1423 base = get_ref_base_and_extent_hwi (op1, &offset, &size, &reverse);
1424 offset_int mem_offset;
1425 if (!base
1426 || TREE_CODE (base) != MEM_REF
1427 || !mem_ref_offset (base).is_constant (&mem_offset))
1428 return;
1429 offset += mem_offset.to_short_addr () * BITS_PER_UNIT;
1430 ssa = TREE_OPERAND (base, 0);
1431 if (TREE_CODE (ssa) != SSA_NAME
1432 || !SSA_NAME_IS_DEFAULT_DEF (ssa)
1433 || offset < 0)
1434 return;
1435
1436 /* Dynamic types are changed in constructors and destructors. */
1437 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (ssa));
1438 if (index >= 0 && param_type && POINTER_TYPE_P (param_type))
1439 ipa_set_ancestor_jf (jfunc, offset, index,
1440 parm_ref_data_pass_through_p (fbi, index, call, ssa));
1441 }
1442
1443 /* Extract the base, offset and MEM_REF expression from a statement ASSIGN if
1444 it looks like:
1445
1446 iftmp.1_3 = &obj_2(D)->D.1762;
1447
1448 The base of the MEM_REF must be a default definition SSA NAME of a
1449 parameter. Return NULL_TREE if it looks otherwise. If case of success, the
1450 whole MEM_REF expression is returned and the offset calculated from any
1451 handled components and the MEM_REF itself is stored into *OFFSET. The whole
1452 RHS stripped off the ADDR_EXPR is stored into *OBJ_P. */
1453
1454 static tree
get_ancestor_addr_info(gimple * assign,tree * obj_p,HOST_WIDE_INT * offset)1455 get_ancestor_addr_info (gimple *assign, tree *obj_p, HOST_WIDE_INT *offset)
1456 {
1457 HOST_WIDE_INT size;
1458 tree expr, parm, obj;
1459 bool reverse;
1460
1461 if (!gimple_assign_single_p (assign))
1462 return NULL_TREE;
1463 expr = gimple_assign_rhs1 (assign);
1464
1465 if (TREE_CODE (expr) != ADDR_EXPR)
1466 return NULL_TREE;
1467 expr = TREE_OPERAND (expr, 0);
1468 obj = expr;
1469 expr = get_ref_base_and_extent_hwi (expr, offset, &size, &reverse);
1470
1471 offset_int mem_offset;
1472 if (!expr
1473 || TREE_CODE (expr) != MEM_REF
1474 || !mem_ref_offset (expr).is_constant (&mem_offset))
1475 return NULL_TREE;
1476 parm = TREE_OPERAND (expr, 0);
1477 if (TREE_CODE (parm) != SSA_NAME
1478 || !SSA_NAME_IS_DEFAULT_DEF (parm)
1479 || TREE_CODE (SSA_NAME_VAR (parm)) != PARM_DECL)
1480 return NULL_TREE;
1481
1482 *offset += mem_offset.to_short_addr () * BITS_PER_UNIT;
1483 *obj_p = obj;
1484 return expr;
1485 }
1486
1487
1488 /* Given that an actual argument is an SSA_NAME that is a result of a phi
1489 statement PHI, try to find out whether NAME is in fact a
1490 multiple-inheritance typecast from a descendant into an ancestor of a formal
1491 parameter and thus can be described by an ancestor jump function and if so,
1492 write the appropriate function into JFUNC.
1493
1494 Essentially we want to match the following pattern:
1495
1496 if (obj_2(D) != 0B)
1497 goto <bb 3>;
1498 else
1499 goto <bb 4>;
1500
1501 <bb 3>:
1502 iftmp.1_3 = &obj_2(D)->D.1762;
1503
1504 <bb 4>:
1505 # iftmp.1_1 = PHI <iftmp.1_3(3), 0B(2)>
1506 D.1879_6 = middleman_1 (iftmp.1_1, i_5(D));
1507 return D.1879_6; */
1508
1509 static void
compute_complex_ancestor_jump_func(struct ipa_func_body_info * fbi,class ipa_node_params * info,struct ipa_jump_func * jfunc,gcall * call,gphi * phi)1510 compute_complex_ancestor_jump_func (struct ipa_func_body_info *fbi,
1511 class ipa_node_params *info,
1512 struct ipa_jump_func *jfunc,
1513 gcall *call, gphi *phi)
1514 {
1515 HOST_WIDE_INT offset;
1516 gimple *assign, *cond;
1517 basic_block phi_bb, assign_bb, cond_bb;
1518 tree tmp, parm, expr, obj;
1519 int index, i;
1520
1521 if (gimple_phi_num_args (phi) != 2)
1522 return;
1523
1524 if (integer_zerop (PHI_ARG_DEF (phi, 1)))
1525 tmp = PHI_ARG_DEF (phi, 0);
1526 else if (integer_zerop (PHI_ARG_DEF (phi, 0)))
1527 tmp = PHI_ARG_DEF (phi, 1);
1528 else
1529 return;
1530 if (TREE_CODE (tmp) != SSA_NAME
1531 || SSA_NAME_IS_DEFAULT_DEF (tmp)
1532 || !POINTER_TYPE_P (TREE_TYPE (tmp))
1533 || TREE_CODE (TREE_TYPE (TREE_TYPE (tmp))) != RECORD_TYPE)
1534 return;
1535
1536 assign = SSA_NAME_DEF_STMT (tmp);
1537 assign_bb = gimple_bb (assign);
1538 if (!single_pred_p (assign_bb))
1539 return;
1540 expr = get_ancestor_addr_info (assign, &obj, &offset);
1541 if (!expr)
1542 return;
1543 parm = TREE_OPERAND (expr, 0);
1544 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (parm));
1545 if (index < 0)
1546 return;
1547
1548 cond_bb = single_pred (assign_bb);
1549 cond = last_stmt (cond_bb);
1550 if (!cond
1551 || gimple_code (cond) != GIMPLE_COND
1552 || gimple_cond_code (cond) != NE_EXPR
1553 || gimple_cond_lhs (cond) != parm
1554 || !integer_zerop (gimple_cond_rhs (cond)))
1555 return;
1556
1557 phi_bb = gimple_bb (phi);
1558 for (i = 0; i < 2; i++)
1559 {
1560 basic_block pred = EDGE_PRED (phi_bb, i)->src;
1561 if (pred != assign_bb && pred != cond_bb)
1562 return;
1563 }
1564
1565 ipa_set_ancestor_jf (jfunc, offset, index,
1566 parm_ref_data_pass_through_p (fbi, index, call, parm));
1567 }
1568
1569 /* Inspect the given TYPE and return true iff it has the same structure (the
1570 same number of fields of the same types) as a C++ member pointer. If
1571 METHOD_PTR and DELTA are non-NULL, store the trees representing the
1572 corresponding fields there. */
1573
1574 static bool
type_like_member_ptr_p(tree type,tree * method_ptr,tree * delta)1575 type_like_member_ptr_p (tree type, tree *method_ptr, tree *delta)
1576 {
1577 tree fld;
1578
1579 if (TREE_CODE (type) != RECORD_TYPE)
1580 return false;
1581
1582 fld = TYPE_FIELDS (type);
1583 if (!fld || !POINTER_TYPE_P (TREE_TYPE (fld))
1584 || TREE_CODE (TREE_TYPE (TREE_TYPE (fld))) != METHOD_TYPE
1585 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld)))
1586 return false;
1587
1588 if (method_ptr)
1589 *method_ptr = fld;
1590
1591 fld = DECL_CHAIN (fld);
1592 if (!fld || INTEGRAL_TYPE_P (fld)
1593 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld)))
1594 return false;
1595 if (delta)
1596 *delta = fld;
1597
1598 if (DECL_CHAIN (fld))
1599 return false;
1600
1601 return true;
1602 }
1603
1604 /* If RHS is an SSA_NAME and it is defined by a simple copy assign statement,
1605 return the rhs of its defining statement, and this statement is stored in
1606 *RHS_STMT. Otherwise return RHS as it is. */
1607
1608 static inline tree
get_ssa_def_if_simple_copy(tree rhs,gimple ** rhs_stmt)1609 get_ssa_def_if_simple_copy (tree rhs, gimple **rhs_stmt)
1610 {
1611 while (TREE_CODE (rhs) == SSA_NAME && !SSA_NAME_IS_DEFAULT_DEF (rhs))
1612 {
1613 gimple *def_stmt = SSA_NAME_DEF_STMT (rhs);
1614
1615 if (gimple_assign_single_p (def_stmt))
1616 rhs = gimple_assign_rhs1 (def_stmt);
1617 else
1618 break;
1619 *rhs_stmt = def_stmt;
1620 }
1621 return rhs;
1622 }
1623
1624 /* Simple linked list, describing contents of an aggregate before call. */
1625
1626 struct ipa_known_agg_contents_list
1627 {
1628 /* Offset and size of the described part of the aggregate. */
1629 HOST_WIDE_INT offset, size;
1630
1631 /* Type of the described part of the aggregate. */
1632 tree type;
1633
1634 /* Known constant value or jump function data describing contents. */
1635 struct ipa_load_agg_data value;
1636
1637 /* Pointer to the next structure in the list. */
1638 struct ipa_known_agg_contents_list *next;
1639 };
1640
1641 /* Add an aggregate content item into a linked list of
1642 ipa_known_agg_contents_list structure, in which all elements
1643 are sorted ascendingly by offset. */
1644
1645 static inline void
add_to_agg_contents_list(struct ipa_known_agg_contents_list ** plist,struct ipa_known_agg_contents_list * item)1646 add_to_agg_contents_list (struct ipa_known_agg_contents_list **plist,
1647 struct ipa_known_agg_contents_list *item)
1648 {
1649 struct ipa_known_agg_contents_list *list = *plist;
1650
1651 for (; list; list = list->next)
1652 {
1653 if (list->offset >= item->offset)
1654 break;
1655
1656 plist = &list->next;
1657 }
1658
1659 item->next = list;
1660 *plist = item;
1661 }
1662
1663 /* Check whether a given aggregate content is clobbered by certain element in
1664 a linked list of ipa_known_agg_contents_list. */
1665
1666 static inline bool
clobber_by_agg_contents_list_p(struct ipa_known_agg_contents_list * list,struct ipa_known_agg_contents_list * item)1667 clobber_by_agg_contents_list_p (struct ipa_known_agg_contents_list *list,
1668 struct ipa_known_agg_contents_list *item)
1669 {
1670 for (; list; list = list->next)
1671 {
1672 if (list->offset >= item->offset)
1673 return list->offset < item->offset + item->size;
1674
1675 if (list->offset + list->size > item->offset)
1676 return true;
1677 }
1678
1679 return false;
1680 }
1681
1682 /* Build aggregate jump function from LIST, assuming there are exactly
1683 VALUE_COUNT entries there and that offset of the passed argument
1684 is ARG_OFFSET and store it into JFUNC. */
1685
1686 static void
build_agg_jump_func_from_list(struct ipa_known_agg_contents_list * list,int value_count,HOST_WIDE_INT arg_offset,struct ipa_jump_func * jfunc)1687 build_agg_jump_func_from_list (struct ipa_known_agg_contents_list *list,
1688 int value_count, HOST_WIDE_INT arg_offset,
1689 struct ipa_jump_func *jfunc)
1690 {
1691 vec_safe_reserve (jfunc->agg.items, value_count, true);
1692 for (; list; list = list->next)
1693 {
1694 struct ipa_agg_jf_item item;
1695 tree operand = list->value.pass_through.operand;
1696
1697 if (list->value.pass_through.formal_id >= 0)
1698 {
1699 /* Content value is derived from some formal paramerter. */
1700 if (list->value.offset >= 0)
1701 item.jftype = IPA_JF_LOAD_AGG;
1702 else
1703 item.jftype = IPA_JF_PASS_THROUGH;
1704
1705 item.value.load_agg = list->value;
1706 if (operand)
1707 item.value.pass_through.operand
1708 = unshare_expr_without_location (operand);
1709 }
1710 else if (operand)
1711 {
1712 /* Content value is known constant. */
1713 item.jftype = IPA_JF_CONST;
1714 item.value.constant = unshare_expr_without_location (operand);
1715 }
1716 else
1717 continue;
1718
1719 item.type = list->type;
1720 gcc_assert (tree_to_shwi (TYPE_SIZE (list->type)) == list->size);
1721
1722 item.offset = list->offset - arg_offset;
1723 gcc_assert ((item.offset % BITS_PER_UNIT) == 0);
1724
1725 jfunc->agg.items->quick_push (item);
1726 }
1727 }
1728
1729 /* Given an assignment statement STMT, try to collect information into
1730 AGG_VALUE that will be used to construct jump function for RHS of the
1731 assignment, from which content value of an aggregate part comes.
1732
1733 Besides constant and simple pass-through jump functions, also try to
1734 identify whether it matches the following pattern that can be described by
1735 a load-value-from-aggregate jump function, which is a derivative of simple
1736 pass-through jump function.
1737
1738 foo (int *p)
1739 {
1740 ...
1741
1742 *(q_5 + 4) = *(p_3(D) + 28) op 1;
1743 bar (q_5);
1744 }
1745
1746 Here IPA_LOAD_AGG_DATA data structure is informative enough to describe
1747 constant, simple pass-through and load-vale-from-aggregate. If value
1748 is constant, it will be kept in field OPERAND, and field FORMAL_ID is
1749 set to -1. For simple pass-through and load-value-from-aggregate, field
1750 FORMAL_ID specifies the related formal parameter index, and field
1751 OFFSET can be used to distinguish them, -1 means simple pass-through,
1752 otherwise means load-value-from-aggregate. */
1753
1754 static void
analyze_agg_content_value(struct ipa_func_body_info * fbi,struct ipa_load_agg_data * agg_value,gimple * stmt)1755 analyze_agg_content_value (struct ipa_func_body_info *fbi,
1756 struct ipa_load_agg_data *agg_value,
1757 gimple *stmt)
1758 {
1759 tree lhs = gimple_assign_lhs (stmt);
1760 tree rhs1 = gimple_assign_rhs1 (stmt);
1761 enum tree_code code;
1762 int index = -1;
1763
1764 /* Initialize jump function data for the aggregate part. */
1765 memset (agg_value, 0, sizeof (*agg_value));
1766 agg_value->pass_through.operation = NOP_EXPR;
1767 agg_value->pass_through.formal_id = -1;
1768 agg_value->offset = -1;
1769
1770 if (AGGREGATE_TYPE_P (TREE_TYPE (lhs)) /* TODO: Support aggregate type. */
1771 || TREE_THIS_VOLATILE (lhs)
1772 || TREE_CODE (lhs) == BIT_FIELD_REF
1773 || contains_bitfld_component_ref_p (lhs))
1774 return;
1775
1776 /* Skip SSA copies. */
1777 while (gimple_assign_rhs_class (stmt) == GIMPLE_SINGLE_RHS)
1778 {
1779 if (TREE_CODE (rhs1) != SSA_NAME || SSA_NAME_IS_DEFAULT_DEF (rhs1))
1780 break;
1781
1782 stmt = SSA_NAME_DEF_STMT (rhs1);
1783 if (!is_gimple_assign (stmt))
1784 break;
1785
1786 rhs1 = gimple_assign_rhs1 (stmt);
1787 }
1788
1789 if (gphi *phi = dyn_cast<gphi *> (stmt))
1790 {
1791 /* Also special case like the following (a is a formal parameter):
1792
1793 _12 = *a_11(D).dim[0].stride;
1794 ...
1795 # iftmp.22_9 = PHI <_12(2), 1(3)>
1796 ...
1797 parm.6.dim[0].stride = iftmp.22_9;
1798 ...
1799 __x_MOD_foo (&parm.6, b_31(D));
1800
1801 The aggregate function describing parm.6.dim[0].stride is encoded as a
1802 PASS-THROUGH jump function with ASSERT_EXPR operation whith operand 1
1803 (the constant from the PHI node). */
1804
1805 if (gimple_phi_num_args (phi) != 2)
1806 return;
1807 tree arg0 = gimple_phi_arg_def (phi, 0);
1808 tree arg1 = gimple_phi_arg_def (phi, 1);
1809 tree operand;
1810
1811 if (is_gimple_ip_invariant (arg1))
1812 {
1813 operand = arg1;
1814 rhs1 = arg0;
1815 }
1816 else if (is_gimple_ip_invariant (arg0))
1817 {
1818 operand = arg0;
1819 rhs1 = arg1;
1820 }
1821 else
1822 return;
1823
1824 rhs1 = get_ssa_def_if_simple_copy (rhs1, &stmt);
1825 if (!is_gimple_assign (stmt))
1826 return;
1827
1828 code = ASSERT_EXPR;
1829 agg_value->pass_through.operand = operand;
1830 }
1831 else if (is_gimple_assign (stmt))
1832 {
1833 code = gimple_assign_rhs_code (stmt);
1834 switch (gimple_assign_rhs_class (stmt))
1835 {
1836 case GIMPLE_SINGLE_RHS:
1837 if (is_gimple_ip_invariant (rhs1))
1838 {
1839 agg_value->pass_through.operand = rhs1;
1840 return;
1841 }
1842 code = NOP_EXPR;
1843 break;
1844
1845 case GIMPLE_UNARY_RHS:
1846 /* NOTE: A GIMPLE_UNARY_RHS operation might not be tcc_unary
1847 (truth_not_expr is example), GIMPLE_BINARY_RHS does not imply
1848 tcc_binary, this subtleness is somewhat misleading.
1849
1850 Since tcc_unary is widely used in IPA-CP code to check an operation
1851 with one operand, here we only allow tc_unary operation to avoid
1852 possible problem. Then we can use (opclass == tc_unary) or not to
1853 distinguish unary and binary. */
1854 if (TREE_CODE_CLASS (code) != tcc_unary || CONVERT_EXPR_CODE_P (code))
1855 return;
1856
1857 rhs1 = get_ssa_def_if_simple_copy (rhs1, &stmt);
1858 break;
1859
1860 case GIMPLE_BINARY_RHS:
1861 {
1862 gimple *rhs1_stmt = stmt;
1863 gimple *rhs2_stmt = stmt;
1864 tree rhs2 = gimple_assign_rhs2 (stmt);
1865
1866 rhs1 = get_ssa_def_if_simple_copy (rhs1, &rhs1_stmt);
1867 rhs2 = get_ssa_def_if_simple_copy (rhs2, &rhs2_stmt);
1868
1869 if (is_gimple_ip_invariant (rhs2))
1870 {
1871 agg_value->pass_through.operand = rhs2;
1872 stmt = rhs1_stmt;
1873 }
1874 else if (is_gimple_ip_invariant (rhs1))
1875 {
1876 if (TREE_CODE_CLASS (code) == tcc_comparison)
1877 code = swap_tree_comparison (code);
1878 else if (!commutative_tree_code (code))
1879 return;
1880
1881 agg_value->pass_through.operand = rhs1;
1882 stmt = rhs2_stmt;
1883 rhs1 = rhs2;
1884 }
1885 else
1886 return;
1887
1888 if (TREE_CODE_CLASS (code) != tcc_comparison
1889 && !useless_type_conversion_p (TREE_TYPE (lhs),
1890 TREE_TYPE (rhs1)))
1891 return;
1892 }
1893 break;
1894
1895 default:
1896 return;
1897 }
1898 }
1899 else
1900 return;
1901
1902 if (TREE_CODE (rhs1) != SSA_NAME)
1903 index = load_from_unmodified_param_or_agg (fbi, fbi->info, stmt,
1904 &agg_value->offset,
1905 &agg_value->by_ref);
1906 else if (SSA_NAME_IS_DEFAULT_DEF (rhs1))
1907 index = ipa_get_param_decl_index (fbi->info, SSA_NAME_VAR (rhs1));
1908
1909 if (index >= 0)
1910 {
1911 if (agg_value->offset >= 0)
1912 agg_value->type = TREE_TYPE (rhs1);
1913 agg_value->pass_through.formal_id = index;
1914 agg_value->pass_through.operation = code;
1915 }
1916 else
1917 agg_value->pass_through.operand = NULL_TREE;
1918 }
1919
1920 /* If STMT is a memory store to the object whose address is BASE, extract
1921 information (offset, size, and value) into CONTENT, and return true,
1922 otherwise we conservatively assume the whole object is modified with
1923 unknown content, and return false. CHECK_REF means that access to object
1924 is expected to be in form of MEM_REF expression. */
1925
1926 static bool
extract_mem_content(struct ipa_func_body_info * fbi,gimple * stmt,tree base,bool check_ref,struct ipa_known_agg_contents_list * content)1927 extract_mem_content (struct ipa_func_body_info *fbi,
1928 gimple *stmt, tree base, bool check_ref,
1929 struct ipa_known_agg_contents_list *content)
1930 {
1931 HOST_WIDE_INT lhs_offset, lhs_size;
1932 bool reverse;
1933
1934 if (!is_gimple_assign (stmt))
1935 return false;
1936
1937 tree lhs = gimple_assign_lhs (stmt);
1938 tree lhs_base = get_ref_base_and_extent_hwi (lhs, &lhs_offset, &lhs_size,
1939 &reverse);
1940 if (!lhs_base)
1941 return false;
1942
1943 if (check_ref)
1944 {
1945 if (TREE_CODE (lhs_base) != MEM_REF
1946 || TREE_OPERAND (lhs_base, 0) != base
1947 || !integer_zerop (TREE_OPERAND (lhs_base, 1)))
1948 return false;
1949 }
1950 else if (lhs_base != base)
1951 return false;
1952
1953 content->offset = lhs_offset;
1954 content->size = lhs_size;
1955 content->type = TREE_TYPE (lhs);
1956 content->next = NULL;
1957
1958 analyze_agg_content_value (fbi, &content->value, stmt);
1959 return true;
1960 }
1961
1962 /* Traverse statements from CALL backwards, scanning whether an aggregate given
1963 in ARG is filled in constants or values that are derived from caller's
1964 formal parameter in the way described by some kinds of jump functions. FBI
1965 is the context of the caller function for interprocedural analysis. ARG can
1966 either be an aggregate expression or a pointer to an aggregate. ARG_TYPE is
1967 the type of the aggregate, JFUNC is the jump function for the aggregate. */
1968
1969 static void
determine_known_aggregate_parts(struct ipa_func_body_info * fbi,gcall * call,tree arg,tree arg_type,struct ipa_jump_func * jfunc)1970 determine_known_aggregate_parts (struct ipa_func_body_info *fbi,
1971 gcall *call, tree arg,
1972 tree arg_type,
1973 struct ipa_jump_func *jfunc)
1974 {
1975 struct ipa_known_agg_contents_list *list = NULL, *all_list = NULL;
1976 bitmap visited = NULL;
1977 int item_count = 0, value_count = 0;
1978 HOST_WIDE_INT arg_offset, arg_size;
1979 tree arg_base;
1980 bool check_ref, by_ref;
1981 ao_ref r;
1982 int max_agg_items = opt_for_fn (fbi->node->decl, param_ipa_max_agg_items);
1983
1984 if (max_agg_items == 0)
1985 return;
1986
1987 /* The function operates in three stages. First, we prepare check_ref, r,
1988 arg_base and arg_offset based on what is actually passed as an actual
1989 argument. */
1990
1991 if (POINTER_TYPE_P (arg_type))
1992 {
1993 by_ref = true;
1994 if (TREE_CODE (arg) == SSA_NAME)
1995 {
1996 tree type_size;
1997 if (!tree_fits_uhwi_p (TYPE_SIZE (TREE_TYPE (arg_type)))
1998 || !POINTER_TYPE_P (TREE_TYPE (arg)))
1999 return;
2000 check_ref = true;
2001 arg_base = arg;
2002 arg_offset = 0;
2003 type_size = TYPE_SIZE (TREE_TYPE (arg_type));
2004 arg_size = tree_to_uhwi (type_size);
2005 ao_ref_init_from_ptr_and_size (&r, arg_base, NULL_TREE);
2006 }
2007 else if (TREE_CODE (arg) == ADDR_EXPR)
2008 {
2009 bool reverse;
2010
2011 arg = TREE_OPERAND (arg, 0);
2012 arg_base = get_ref_base_and_extent_hwi (arg, &arg_offset,
2013 &arg_size, &reverse);
2014 if (!arg_base)
2015 return;
2016 if (DECL_P (arg_base))
2017 {
2018 check_ref = false;
2019 ao_ref_init (&r, arg_base);
2020 }
2021 else
2022 return;
2023 }
2024 else
2025 return;
2026 }
2027 else
2028 {
2029 bool reverse;
2030
2031 gcc_checking_assert (AGGREGATE_TYPE_P (TREE_TYPE (arg)));
2032
2033 by_ref = false;
2034 check_ref = false;
2035 arg_base = get_ref_base_and_extent_hwi (arg, &arg_offset,
2036 &arg_size, &reverse);
2037 if (!arg_base)
2038 return;
2039
2040 ao_ref_init (&r, arg);
2041 }
2042
2043 /* Second stage traverses virtual SSA web backwards starting from the call
2044 statement, only looks at individual dominating virtual operand (its
2045 definition dominates the call), as long as it is confident that content
2046 of the aggregate is affected by definition of the virtual operand, it
2047 builds a sorted linked list of ipa_agg_jf_list describing that. */
2048
2049 for (tree dom_vuse = gimple_vuse (call);
2050 dom_vuse && fbi->aa_walk_budget > 0;)
2051 {
2052 gimple *stmt = SSA_NAME_DEF_STMT (dom_vuse);
2053
2054 if (gimple_code (stmt) == GIMPLE_PHI)
2055 {
2056 dom_vuse = get_continuation_for_phi (stmt, &r, true,
2057 fbi->aa_walk_budget,
2058 &visited, false, NULL, NULL);
2059 continue;
2060 }
2061
2062 fbi->aa_walk_budget--;
2063 if (stmt_may_clobber_ref_p_1 (stmt, &r))
2064 {
2065 struct ipa_known_agg_contents_list *content
2066 = XALLOCA (struct ipa_known_agg_contents_list);
2067
2068 if (!extract_mem_content (fbi, stmt, arg_base, check_ref, content))
2069 break;
2070
2071 /* Now we get a dominating virtual operand, and need to check
2072 whether its value is clobbered any other dominating one. */
2073 if ((content->value.pass_through.formal_id >= 0
2074 || content->value.pass_through.operand)
2075 && !clobber_by_agg_contents_list_p (all_list, content))
2076 {
2077 struct ipa_known_agg_contents_list *copy
2078 = XALLOCA (struct ipa_known_agg_contents_list);
2079
2080 /* Add to the list consisting of only dominating virtual
2081 operands, whose definitions can finally reach the call. */
2082 add_to_agg_contents_list (&list, (*copy = *content, copy));
2083
2084 if (++value_count == max_agg_items)
2085 break;
2086 }
2087
2088 /* Add to the list consisting of all dominating virtual operands. */
2089 add_to_agg_contents_list (&all_list, content);
2090
2091 if (++item_count == 2 * max_agg_items)
2092 break;
2093 }
2094 dom_vuse = gimple_vuse (stmt);
2095 }
2096
2097 if (visited)
2098 BITMAP_FREE (visited);
2099
2100 /* Third stage just goes over the list and creates an appropriate vector of
2101 ipa_agg_jf_item structures out of it, of course only if there are
2102 any meaningful items to begin with. */
2103
2104 if (value_count)
2105 {
2106 jfunc->agg.by_ref = by_ref;
2107 build_agg_jump_func_from_list (list, value_count, arg_offset, jfunc);
2108 }
2109 }
2110
2111
2112 /* Return the Ith param type of callee associated with call graph
2113 edge E. */
2114
2115 tree
ipa_get_callee_param_type(struct cgraph_edge * e,int i)2116 ipa_get_callee_param_type (struct cgraph_edge *e, int i)
2117 {
2118 int n;
2119 tree type = (e->callee
2120 ? TREE_TYPE (e->callee->decl)
2121 : gimple_call_fntype (e->call_stmt));
2122 tree t = TYPE_ARG_TYPES (type);
2123
2124 for (n = 0; n < i; n++)
2125 {
2126 if (!t)
2127 break;
2128 t = TREE_CHAIN (t);
2129 }
2130 if (t)
2131 return TREE_VALUE (t);
2132 if (!e->callee)
2133 return NULL;
2134 t = DECL_ARGUMENTS (e->callee->decl);
2135 for (n = 0; n < i; n++)
2136 {
2137 if (!t)
2138 return NULL;
2139 t = TREE_CHAIN (t);
2140 }
2141 if (t)
2142 return TREE_TYPE (t);
2143 return NULL;
2144 }
2145
2146 /* Return ipa_bits with VALUE and MASK values, which can be either a newly
2147 allocated structure or a previously existing one shared with other jump
2148 functions and/or transformation summaries. */
2149
2150 ipa_bits *
ipa_get_ipa_bits_for_value(const widest_int & value,const widest_int & mask)2151 ipa_get_ipa_bits_for_value (const widest_int &value, const widest_int &mask)
2152 {
2153 ipa_bits tmp;
2154 tmp.value = value;
2155 tmp.mask = mask;
2156
2157 ipa_bits **slot = ipa_bits_hash_table->find_slot (&tmp, INSERT);
2158 if (*slot)
2159 return *slot;
2160
2161 ipa_bits *res = ggc_alloc<ipa_bits> ();
2162 res->value = value;
2163 res->mask = mask;
2164 *slot = res;
2165
2166 return res;
2167 }
2168
2169 /* Assign to JF a pointer to ipa_bits structure with VALUE and MASK. Use hash
2170 table in order to avoid creating multiple same ipa_bits structures. */
2171
2172 static void
ipa_set_jfunc_bits(ipa_jump_func * jf,const widest_int & value,const widest_int & mask)2173 ipa_set_jfunc_bits (ipa_jump_func *jf, const widest_int &value,
2174 const widest_int &mask)
2175 {
2176 jf->bits = ipa_get_ipa_bits_for_value (value, mask);
2177 }
2178
2179 /* Return a pointer to a value_range just like *TMP, but either find it in
2180 ipa_vr_hash_table or allocate it in GC memory. TMP->equiv must be NULL. */
2181
2182 static value_range *
ipa_get_value_range(value_range * tmp)2183 ipa_get_value_range (value_range *tmp)
2184 {
2185 value_range **slot = ipa_vr_hash_table->find_slot (tmp, INSERT);
2186 if (*slot)
2187 return *slot;
2188
2189 value_range *vr = new (ggc_alloc<value_range> ()) value_range;
2190 *vr = *tmp;
2191 *slot = vr;
2192
2193 return vr;
2194 }
2195
2196 /* Return a pointer to a value range consisting of TYPE, MIN, MAX and an empty
2197 equiv set. Use hash table in order to avoid creating multiple same copies of
2198 value_ranges. */
2199
2200 static value_range *
ipa_get_value_range(enum value_range_kind kind,tree min,tree max)2201 ipa_get_value_range (enum value_range_kind kind, tree min, tree max)
2202 {
2203 value_range tmp (min, max, kind);
2204 return ipa_get_value_range (&tmp);
2205 }
2206
2207 /* Assign to JF a pointer to a value_range structure with TYPE, MIN and MAX and
2208 a NULL equiv bitmap. Use hash table in order to avoid creating multiple
2209 same value_range structures. */
2210
2211 static void
ipa_set_jfunc_vr(ipa_jump_func * jf,enum value_range_kind type,tree min,tree max)2212 ipa_set_jfunc_vr (ipa_jump_func *jf, enum value_range_kind type,
2213 tree min, tree max)
2214 {
2215 jf->m_vr = ipa_get_value_range (type, min, max);
2216 }
2217
2218 /* Assign to JF a pointer to a value_range just like TMP but either fetch a
2219 copy from ipa_vr_hash_table or allocate a new on in GC memory. */
2220
2221 static void
ipa_set_jfunc_vr(ipa_jump_func * jf,value_range * tmp)2222 ipa_set_jfunc_vr (ipa_jump_func *jf, value_range *tmp)
2223 {
2224 jf->m_vr = ipa_get_value_range (tmp);
2225 }
2226
2227 /* Compute jump function for all arguments of callsite CS and insert the
2228 information in the jump_functions array in the ipa_edge_args corresponding
2229 to this callsite. */
2230
2231 static void
ipa_compute_jump_functions_for_edge(struct ipa_func_body_info * fbi,struct cgraph_edge * cs)2232 ipa_compute_jump_functions_for_edge (struct ipa_func_body_info *fbi,
2233 struct cgraph_edge *cs)
2234 {
2235 class ipa_node_params *info = IPA_NODE_REF (cs->caller);
2236 class ipa_edge_args *args = IPA_EDGE_REF_GET_CREATE (cs);
2237 gcall *call = cs->call_stmt;
2238 int n, arg_num = gimple_call_num_args (call);
2239 bool useful_context = false;
2240
2241 if (arg_num == 0 || args->jump_functions)
2242 return;
2243 vec_safe_grow_cleared (args->jump_functions, arg_num, true);
2244 if (flag_devirtualize)
2245 vec_safe_grow_cleared (args->polymorphic_call_contexts, arg_num, true);
2246
2247 if (gimple_call_internal_p (call))
2248 return;
2249 if (ipa_func_spec_opts_forbid_analysis_p (cs->caller))
2250 return;
2251
2252 for (n = 0; n < arg_num; n++)
2253 {
2254 struct ipa_jump_func *jfunc = ipa_get_ith_jump_func (args, n);
2255 tree arg = gimple_call_arg (call, n);
2256 tree param_type = ipa_get_callee_param_type (cs, n);
2257 if (flag_devirtualize && POINTER_TYPE_P (TREE_TYPE (arg)))
2258 {
2259 tree instance;
2260 class ipa_polymorphic_call_context context (cs->caller->decl,
2261 arg, cs->call_stmt,
2262 &instance);
2263 context.get_dynamic_type (instance, arg, NULL, cs->call_stmt,
2264 &fbi->aa_walk_budget);
2265 *ipa_get_ith_polymorhic_call_context (args, n) = context;
2266 if (!context.useless_p ())
2267 useful_context = true;
2268 }
2269
2270 if (POINTER_TYPE_P (TREE_TYPE (arg)))
2271 {
2272 bool addr_nonzero = false;
2273 bool strict_overflow = false;
2274
2275 if (TREE_CODE (arg) == SSA_NAME
2276 && param_type
2277 && get_ptr_nonnull (arg))
2278 addr_nonzero = true;
2279 else if (tree_single_nonzero_warnv_p (arg, &strict_overflow))
2280 addr_nonzero = true;
2281
2282 if (addr_nonzero)
2283 {
2284 tree z = build_int_cst (TREE_TYPE (arg), 0);
2285 ipa_set_jfunc_vr (jfunc, VR_ANTI_RANGE, z, z);
2286 }
2287 else
2288 gcc_assert (!jfunc->m_vr);
2289 }
2290 else
2291 {
2292 wide_int min, max;
2293 value_range_kind kind;
2294 if (TREE_CODE (arg) == SSA_NAME
2295 && param_type
2296 && (kind = get_range_info (arg, &min, &max))
2297 && (kind == VR_RANGE || kind == VR_ANTI_RANGE))
2298 {
2299 value_range resvr;
2300 value_range tmpvr (wide_int_to_tree (TREE_TYPE (arg), min),
2301 wide_int_to_tree (TREE_TYPE (arg), max),
2302 kind);
2303 range_fold_unary_expr (&resvr, NOP_EXPR, param_type,
2304 &tmpvr, TREE_TYPE (arg));
2305 if (!resvr.undefined_p () && !resvr.varying_p ())
2306 ipa_set_jfunc_vr (jfunc, &resvr);
2307 else
2308 gcc_assert (!jfunc->m_vr);
2309 }
2310 else
2311 gcc_assert (!jfunc->m_vr);
2312 }
2313
2314 if (INTEGRAL_TYPE_P (TREE_TYPE (arg))
2315 && (TREE_CODE (arg) == SSA_NAME || TREE_CODE (arg) == INTEGER_CST))
2316 {
2317 if (TREE_CODE (arg) == SSA_NAME)
2318 ipa_set_jfunc_bits (jfunc, 0,
2319 widest_int::from (get_nonzero_bits (arg),
2320 TYPE_SIGN (TREE_TYPE (arg))));
2321 else
2322 ipa_set_jfunc_bits (jfunc, wi::to_widest (arg), 0);
2323 }
2324 else if (POINTER_TYPE_P (TREE_TYPE (arg)))
2325 {
2326 unsigned HOST_WIDE_INT bitpos;
2327 unsigned align;
2328
2329 get_pointer_alignment_1 (arg, &align, &bitpos);
2330 widest_int mask = wi::bit_and_not
2331 (wi::mask<widest_int> (TYPE_PRECISION (TREE_TYPE (arg)), false),
2332 align / BITS_PER_UNIT - 1);
2333 widest_int value = bitpos / BITS_PER_UNIT;
2334 ipa_set_jfunc_bits (jfunc, value, mask);
2335 }
2336 else
2337 gcc_assert (!jfunc->bits);
2338
2339 if (is_gimple_ip_invariant (arg)
2340 || (VAR_P (arg)
2341 && is_global_var (arg)
2342 && TREE_READONLY (arg)))
2343 ipa_set_jf_constant (jfunc, arg, cs);
2344 else if (!is_gimple_reg_type (TREE_TYPE (arg))
2345 && TREE_CODE (arg) == PARM_DECL)
2346 {
2347 int index = ipa_get_param_decl_index (info, arg);
2348
2349 gcc_assert (index >=0);
2350 /* Aggregate passed by value, check for pass-through, otherwise we
2351 will attempt to fill in aggregate contents later in this
2352 for cycle. */
2353 if (parm_preserved_before_stmt_p (fbi, index, call, arg))
2354 {
2355 ipa_set_jf_simple_pass_through (jfunc, index, false);
2356 continue;
2357 }
2358 }
2359 else if (TREE_CODE (arg) == SSA_NAME)
2360 {
2361 if (SSA_NAME_IS_DEFAULT_DEF (arg))
2362 {
2363 int index = ipa_get_param_decl_index (info, SSA_NAME_VAR (arg));
2364 if (index >= 0)
2365 {
2366 bool agg_p;
2367 agg_p = parm_ref_data_pass_through_p (fbi, index, call, arg);
2368 ipa_set_jf_simple_pass_through (jfunc, index, agg_p);
2369 }
2370 }
2371 else
2372 {
2373 gimple *stmt = SSA_NAME_DEF_STMT (arg);
2374 if (is_gimple_assign (stmt))
2375 compute_complex_assign_jump_func (fbi, info, jfunc,
2376 call, stmt, arg, param_type);
2377 else if (gimple_code (stmt) == GIMPLE_PHI)
2378 compute_complex_ancestor_jump_func (fbi, info, jfunc,
2379 call,
2380 as_a <gphi *> (stmt));
2381 }
2382 }
2383
2384 /* If ARG is pointer, we cannot use its type to determine the type of aggregate
2385 passed (because type conversions are ignored in gimple). Usually we can
2386 safely get type from function declaration, but in case of K&R prototypes or
2387 variadic functions we can try our luck with type of the pointer passed.
2388 TODO: Since we look for actual initialization of the memory object, we may better
2389 work out the type based on the memory stores we find. */
2390 if (!param_type)
2391 param_type = TREE_TYPE (arg);
2392
2393 if ((jfunc->type != IPA_JF_PASS_THROUGH
2394 || !ipa_get_jf_pass_through_agg_preserved (jfunc))
2395 && (jfunc->type != IPA_JF_ANCESTOR
2396 || !ipa_get_jf_ancestor_agg_preserved (jfunc))
2397 && (AGGREGATE_TYPE_P (TREE_TYPE (arg))
2398 || POINTER_TYPE_P (param_type)))
2399 determine_known_aggregate_parts (fbi, call, arg, param_type, jfunc);
2400 }
2401 if (!useful_context)
2402 vec_free (args->polymorphic_call_contexts);
2403 }
2404
2405 /* Compute jump functions for all edges - both direct and indirect - outgoing
2406 from BB. */
2407
2408 static void
ipa_compute_jump_functions_for_bb(struct ipa_func_body_info * fbi,basic_block bb)2409 ipa_compute_jump_functions_for_bb (struct ipa_func_body_info *fbi, basic_block bb)
2410 {
2411 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
2412 int i;
2413 struct cgraph_edge *cs;
2414
2415 FOR_EACH_VEC_ELT_REVERSE (bi->cg_edges, i, cs)
2416 {
2417 struct cgraph_node *callee = cs->callee;
2418
2419 if (callee)
2420 {
2421 callee = callee->ultimate_alias_target ();
2422 /* We do not need to bother analyzing calls to unknown functions
2423 unless they may become known during lto/whopr. */
2424 if (!callee->definition && !flag_lto
2425 && !gimple_call_fnspec (cs->call_stmt).known_p ())
2426 continue;
2427 }
2428 ipa_compute_jump_functions_for_edge (fbi, cs);
2429 }
2430 }
2431
2432 /* If STMT looks like a statement loading a value from a member pointer formal
2433 parameter, return that parameter and store the offset of the field to
2434 *OFFSET_P, if it is non-NULL. Otherwise return NULL (but *OFFSET_P still
2435 might be clobbered). If USE_DELTA, then we look for a use of the delta
2436 field rather than the pfn. */
2437
2438 static tree
ipa_get_stmt_member_ptr_load_param(gimple * stmt,bool use_delta,HOST_WIDE_INT * offset_p)2439 ipa_get_stmt_member_ptr_load_param (gimple *stmt, bool use_delta,
2440 HOST_WIDE_INT *offset_p)
2441 {
2442 tree rhs, rec, ref_field, ref_offset, fld, ptr_field, delta_field;
2443
2444 if (!gimple_assign_single_p (stmt))
2445 return NULL_TREE;
2446
2447 rhs = gimple_assign_rhs1 (stmt);
2448 if (TREE_CODE (rhs) == COMPONENT_REF)
2449 {
2450 ref_field = TREE_OPERAND (rhs, 1);
2451 rhs = TREE_OPERAND (rhs, 0);
2452 }
2453 else
2454 ref_field = NULL_TREE;
2455 if (TREE_CODE (rhs) != MEM_REF)
2456 return NULL_TREE;
2457 rec = TREE_OPERAND (rhs, 0);
2458 if (TREE_CODE (rec) != ADDR_EXPR)
2459 return NULL_TREE;
2460 rec = TREE_OPERAND (rec, 0);
2461 if (TREE_CODE (rec) != PARM_DECL
2462 || !type_like_member_ptr_p (TREE_TYPE (rec), &ptr_field, &delta_field))
2463 return NULL_TREE;
2464 ref_offset = TREE_OPERAND (rhs, 1);
2465
2466 if (use_delta)
2467 fld = delta_field;
2468 else
2469 fld = ptr_field;
2470 if (offset_p)
2471 *offset_p = int_bit_position (fld);
2472
2473 if (ref_field)
2474 {
2475 if (integer_nonzerop (ref_offset))
2476 return NULL_TREE;
2477 return ref_field == fld ? rec : NULL_TREE;
2478 }
2479 else
2480 return tree_int_cst_equal (byte_position (fld), ref_offset) ? rec
2481 : NULL_TREE;
2482 }
2483
2484 /* Returns true iff T is an SSA_NAME defined by a statement. */
2485
2486 static bool
ipa_is_ssa_with_stmt_def(tree t)2487 ipa_is_ssa_with_stmt_def (tree t)
2488 {
2489 if (TREE_CODE (t) == SSA_NAME
2490 && !SSA_NAME_IS_DEFAULT_DEF (t))
2491 return true;
2492 else
2493 return false;
2494 }
2495
2496 /* Find the indirect call graph edge corresponding to STMT and mark it as a
2497 call to a parameter number PARAM_INDEX. NODE is the caller. Return the
2498 indirect call graph edge.
2499 If POLYMORPHIC is true record is as a destination of polymorphic call. */
2500
2501 static struct cgraph_edge *
ipa_note_param_call(struct cgraph_node * node,int param_index,gcall * stmt,bool polymorphic)2502 ipa_note_param_call (struct cgraph_node *node, int param_index,
2503 gcall *stmt, bool polymorphic)
2504 {
2505 struct cgraph_edge *cs;
2506
2507 cs = node->get_edge (stmt);
2508 cs->indirect_info->param_index = param_index;
2509 cs->indirect_info->agg_contents = 0;
2510 cs->indirect_info->member_ptr = 0;
2511 cs->indirect_info->guaranteed_unmodified = 0;
2512 ipa_set_param_used_by_indirect_call (IPA_NODE_REF (node),
2513 param_index, true);
2514 if (cs->indirect_info->polymorphic || polymorphic)
2515 ipa_set_param_used_by_polymorphic_call
2516 (IPA_NODE_REF (node), param_index, true);
2517 return cs;
2518 }
2519
2520 /* Analyze the CALL and examine uses of formal parameters of the caller NODE
2521 (described by INFO). PARMS_AINFO is a pointer to a vector containing
2522 intermediate information about each formal parameter. Currently it checks
2523 whether the call calls a pointer that is a formal parameter and if so, the
2524 parameter is marked with the called flag and an indirect call graph edge
2525 describing the call is created. This is very simple for ordinary pointers
2526 represented in SSA but not-so-nice when it comes to member pointers. The
2527 ugly part of this function does nothing more than trying to match the
2528 pattern of such a call. An example of such a pattern is the gimple dump
2529 below, the call is on the last line:
2530
2531 <bb 2>:
2532 f$__delta_5 = f.__delta;
2533 f$__pfn_24 = f.__pfn;
2534
2535 or
2536 <bb 2>:
2537 f$__delta_5 = MEM[(struct *)&f];
2538 f$__pfn_24 = MEM[(struct *)&f + 4B];
2539
2540 and a few lines below:
2541
2542 <bb 5>
2543 D.2496_3 = (int) f$__pfn_24;
2544 D.2497_4 = D.2496_3 & 1;
2545 if (D.2497_4 != 0)
2546 goto <bb 3>;
2547 else
2548 goto <bb 4>;
2549
2550 <bb 6>:
2551 D.2500_7 = (unsigned int) f$__delta_5;
2552 D.2501_8 = &S + D.2500_7;
2553 D.2502_9 = (int (*__vtbl_ptr_type) (void) * *) D.2501_8;
2554 D.2503_10 = *D.2502_9;
2555 D.2504_12 = f$__pfn_24 + -1;
2556 D.2505_13 = (unsigned int) D.2504_12;
2557 D.2506_14 = D.2503_10 + D.2505_13;
2558 D.2507_15 = *D.2506_14;
2559 iftmp.11_16 = (String:: *) D.2507_15;
2560
2561 <bb 7>:
2562 # iftmp.11_1 = PHI <iftmp.11_16(3), f$__pfn_24(2)>
2563 D.2500_19 = (unsigned int) f$__delta_5;
2564 D.2508_20 = &S + D.2500_19;
2565 D.2493_21 = iftmp.11_1 (D.2508_20, 4);
2566
2567 Such patterns are results of simple calls to a member pointer:
2568
2569 int doprinting (int (MyString::* f)(int) const)
2570 {
2571 MyString S ("somestring");
2572
2573 return (S.*f)(4);
2574 }
2575
2576 Moreover, the function also looks for called pointers loaded from aggregates
2577 passed by value or reference. */
2578
2579 static void
ipa_analyze_indirect_call_uses(struct ipa_func_body_info * fbi,gcall * call,tree target)2580 ipa_analyze_indirect_call_uses (struct ipa_func_body_info *fbi, gcall *call,
2581 tree target)
2582 {
2583 class ipa_node_params *info = fbi->info;
2584 HOST_WIDE_INT offset;
2585 bool by_ref;
2586
2587 if (SSA_NAME_IS_DEFAULT_DEF (target))
2588 {
2589 tree var = SSA_NAME_VAR (target);
2590 int index = ipa_get_param_decl_index (info, var);
2591 if (index >= 0)
2592 ipa_note_param_call (fbi->node, index, call, false);
2593 return;
2594 }
2595
2596 int index;
2597 gimple *def = SSA_NAME_DEF_STMT (target);
2598 bool guaranteed_unmodified;
2599 if (gimple_assign_single_p (def)
2600 && ipa_load_from_parm_agg (fbi, info->descriptors, def,
2601 gimple_assign_rhs1 (def), &index, &offset,
2602 NULL, &by_ref, &guaranteed_unmodified))
2603 {
2604 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index,
2605 call, false);
2606 cs->indirect_info->offset = offset;
2607 cs->indirect_info->agg_contents = 1;
2608 cs->indirect_info->by_ref = by_ref;
2609 cs->indirect_info->guaranteed_unmodified = guaranteed_unmodified;
2610 return;
2611 }
2612
2613 /* Now we need to try to match the complex pattern of calling a member
2614 pointer. */
2615 if (gimple_code (def) != GIMPLE_PHI
2616 || gimple_phi_num_args (def) != 2
2617 || !POINTER_TYPE_P (TREE_TYPE (target))
2618 || TREE_CODE (TREE_TYPE (TREE_TYPE (target))) != METHOD_TYPE)
2619 return;
2620
2621 /* First, we need to check whether one of these is a load from a member
2622 pointer that is a parameter to this function. */
2623 tree n1 = PHI_ARG_DEF (def, 0);
2624 tree n2 = PHI_ARG_DEF (def, 1);
2625 if (!ipa_is_ssa_with_stmt_def (n1) || !ipa_is_ssa_with_stmt_def (n2))
2626 return;
2627 gimple *d1 = SSA_NAME_DEF_STMT (n1);
2628 gimple *d2 = SSA_NAME_DEF_STMT (n2);
2629
2630 tree rec;
2631 basic_block bb, virt_bb;
2632 basic_block join = gimple_bb (def);
2633 if ((rec = ipa_get_stmt_member_ptr_load_param (d1, false, &offset)))
2634 {
2635 if (ipa_get_stmt_member_ptr_load_param (d2, false, NULL))
2636 return;
2637
2638 bb = EDGE_PRED (join, 0)->src;
2639 virt_bb = gimple_bb (d2);
2640 }
2641 else if ((rec = ipa_get_stmt_member_ptr_load_param (d2, false, &offset)))
2642 {
2643 bb = EDGE_PRED (join, 1)->src;
2644 virt_bb = gimple_bb (d1);
2645 }
2646 else
2647 return;
2648
2649 /* Second, we need to check that the basic blocks are laid out in the way
2650 corresponding to the pattern. */
2651
2652 if (!single_pred_p (virt_bb) || !single_succ_p (virt_bb)
2653 || single_pred (virt_bb) != bb
2654 || single_succ (virt_bb) != join)
2655 return;
2656
2657 /* Third, let's see that the branching is done depending on the least
2658 significant bit of the pfn. */
2659
2660 gimple *branch = last_stmt (bb);
2661 if (!branch || gimple_code (branch) != GIMPLE_COND)
2662 return;
2663
2664 if ((gimple_cond_code (branch) != NE_EXPR
2665 && gimple_cond_code (branch) != EQ_EXPR)
2666 || !integer_zerop (gimple_cond_rhs (branch)))
2667 return;
2668
2669 tree cond = gimple_cond_lhs (branch);
2670 if (!ipa_is_ssa_with_stmt_def (cond))
2671 return;
2672
2673 def = SSA_NAME_DEF_STMT (cond);
2674 if (!is_gimple_assign (def)
2675 || gimple_assign_rhs_code (def) != BIT_AND_EXPR
2676 || !integer_onep (gimple_assign_rhs2 (def)))
2677 return;
2678
2679 cond = gimple_assign_rhs1 (def);
2680 if (!ipa_is_ssa_with_stmt_def (cond))
2681 return;
2682
2683 def = SSA_NAME_DEF_STMT (cond);
2684
2685 if (is_gimple_assign (def)
2686 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def)))
2687 {
2688 cond = gimple_assign_rhs1 (def);
2689 if (!ipa_is_ssa_with_stmt_def (cond))
2690 return;
2691 def = SSA_NAME_DEF_STMT (cond);
2692 }
2693
2694 tree rec2;
2695 rec2 = ipa_get_stmt_member_ptr_load_param (def,
2696 (TARGET_PTRMEMFUNC_VBIT_LOCATION
2697 == ptrmemfunc_vbit_in_delta),
2698 NULL);
2699 if (rec != rec2)
2700 return;
2701
2702 index = ipa_get_param_decl_index (info, rec);
2703 if (index >= 0
2704 && parm_preserved_before_stmt_p (fbi, index, call, rec))
2705 {
2706 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index,
2707 call, false);
2708 cs->indirect_info->offset = offset;
2709 cs->indirect_info->agg_contents = 1;
2710 cs->indirect_info->member_ptr = 1;
2711 cs->indirect_info->guaranteed_unmodified = 1;
2712 }
2713
2714 return;
2715 }
2716
2717 /* Analyze a CALL to an OBJ_TYPE_REF which is passed in TARGET and if the
2718 object referenced in the expression is a formal parameter of the caller
2719 FBI->node (described by FBI->info), create a call note for the
2720 statement. */
2721
2722 static void
ipa_analyze_virtual_call_uses(struct ipa_func_body_info * fbi,gcall * call,tree target)2723 ipa_analyze_virtual_call_uses (struct ipa_func_body_info *fbi,
2724 gcall *call, tree target)
2725 {
2726 tree obj = OBJ_TYPE_REF_OBJECT (target);
2727 int index;
2728 HOST_WIDE_INT anc_offset;
2729
2730 if (!flag_devirtualize)
2731 return;
2732
2733 if (TREE_CODE (obj) != SSA_NAME)
2734 return;
2735
2736 class ipa_node_params *info = fbi->info;
2737 if (SSA_NAME_IS_DEFAULT_DEF (obj))
2738 {
2739 if (TREE_CODE (SSA_NAME_VAR (obj)) != PARM_DECL)
2740 return;
2741
2742 anc_offset = 0;
2743 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (obj));
2744 gcc_assert (index >= 0);
2745 if (detect_type_change_ssa (fbi, obj, obj_type_ref_class (target),
2746 call))
2747 return;
2748 }
2749 else
2750 {
2751 gimple *stmt = SSA_NAME_DEF_STMT (obj);
2752 tree expr;
2753
2754 expr = get_ancestor_addr_info (stmt, &obj, &anc_offset);
2755 if (!expr)
2756 return;
2757 index = ipa_get_param_decl_index (info,
2758 SSA_NAME_VAR (TREE_OPERAND (expr, 0)));
2759 gcc_assert (index >= 0);
2760 if (detect_type_change (fbi, obj, expr, obj_type_ref_class (target),
2761 call, anc_offset))
2762 return;
2763 }
2764
2765 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index,
2766 call, true);
2767 class cgraph_indirect_call_info *ii = cs->indirect_info;
2768 ii->offset = anc_offset;
2769 ii->otr_token = tree_to_uhwi (OBJ_TYPE_REF_TOKEN (target));
2770 ii->otr_type = obj_type_ref_class (target);
2771 ii->polymorphic = 1;
2772 }
2773
2774 /* Analyze a call statement CALL whether and how it utilizes formal parameters
2775 of the caller (described by INFO). PARMS_AINFO is a pointer to a vector
2776 containing intermediate information about each formal parameter. */
2777
2778 static void
ipa_analyze_call_uses(struct ipa_func_body_info * fbi,gcall * call)2779 ipa_analyze_call_uses (struct ipa_func_body_info *fbi, gcall *call)
2780 {
2781 tree target = gimple_call_fn (call);
2782
2783 if (!target
2784 || (TREE_CODE (target) != SSA_NAME
2785 && !virtual_method_call_p (target)))
2786 return;
2787
2788 struct cgraph_edge *cs = fbi->node->get_edge (call);
2789 /* If we previously turned the call into a direct call, there is
2790 no need to analyze. */
2791 if (cs && !cs->indirect_unknown_callee)
2792 return;
2793
2794 if (cs->indirect_info->polymorphic && flag_devirtualize)
2795 {
2796 tree instance;
2797 tree target = gimple_call_fn (call);
2798 ipa_polymorphic_call_context context (current_function_decl,
2799 target, call, &instance);
2800
2801 gcc_checking_assert (cs->indirect_info->otr_type
2802 == obj_type_ref_class (target));
2803 gcc_checking_assert (cs->indirect_info->otr_token
2804 == tree_to_shwi (OBJ_TYPE_REF_TOKEN (target)));
2805
2806 cs->indirect_info->vptr_changed
2807 = !context.get_dynamic_type (instance,
2808 OBJ_TYPE_REF_OBJECT (target),
2809 obj_type_ref_class (target), call,
2810 &fbi->aa_walk_budget);
2811 cs->indirect_info->context = context;
2812 }
2813
2814 if (TREE_CODE (target) == SSA_NAME)
2815 ipa_analyze_indirect_call_uses (fbi, call, target);
2816 else if (virtual_method_call_p (target))
2817 ipa_analyze_virtual_call_uses (fbi, call, target);
2818 }
2819
2820
2821 /* Analyze the call statement STMT with respect to formal parameters (described
2822 in INFO) of caller given by FBI->NODE. Currently it only checks whether
2823 formal parameters are called. */
2824
2825 static void
ipa_analyze_stmt_uses(struct ipa_func_body_info * fbi,gimple * stmt)2826 ipa_analyze_stmt_uses (struct ipa_func_body_info *fbi, gimple *stmt)
2827 {
2828 if (is_gimple_call (stmt))
2829 ipa_analyze_call_uses (fbi, as_a <gcall *> (stmt));
2830 }
2831
2832 /* Callback of walk_stmt_load_store_addr_ops for the visit_load.
2833 If OP is a parameter declaration, mark it as used in the info structure
2834 passed in DATA. */
2835
2836 static bool
visit_ref_for_mod_analysis(gimple *,tree op,tree,void * data)2837 visit_ref_for_mod_analysis (gimple *, tree op, tree, void *data)
2838 {
2839 class ipa_node_params *info = (class ipa_node_params *) data;
2840
2841 op = get_base_address (op);
2842 if (op
2843 && TREE_CODE (op) == PARM_DECL)
2844 {
2845 int index = ipa_get_param_decl_index (info, op);
2846 gcc_assert (index >= 0);
2847 ipa_set_param_used (info, index, true);
2848 }
2849
2850 return false;
2851 }
2852
2853 /* Scan the statements in BB and inspect the uses of formal parameters. Store
2854 the findings in various structures of the associated ipa_node_params
2855 structure, such as parameter flags, notes etc. FBI holds various data about
2856 the function being analyzed. */
2857
2858 static void
ipa_analyze_params_uses_in_bb(struct ipa_func_body_info * fbi,basic_block bb)2859 ipa_analyze_params_uses_in_bb (struct ipa_func_body_info *fbi, basic_block bb)
2860 {
2861 gimple_stmt_iterator gsi;
2862 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2863 {
2864 gimple *stmt = gsi_stmt (gsi);
2865
2866 if (is_gimple_debug (stmt))
2867 continue;
2868
2869 ipa_analyze_stmt_uses (fbi, stmt);
2870 walk_stmt_load_store_addr_ops (stmt, fbi->info,
2871 visit_ref_for_mod_analysis,
2872 visit_ref_for_mod_analysis,
2873 visit_ref_for_mod_analysis);
2874 }
2875 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2876 walk_stmt_load_store_addr_ops (gsi_stmt (gsi), fbi->info,
2877 visit_ref_for_mod_analysis,
2878 visit_ref_for_mod_analysis,
2879 visit_ref_for_mod_analysis);
2880 }
2881
2882 /* Calculate controlled uses of parameters of NODE. */
2883
2884 static void
ipa_analyze_controlled_uses(struct cgraph_node * node)2885 ipa_analyze_controlled_uses (struct cgraph_node *node)
2886 {
2887 class ipa_node_params *info = IPA_NODE_REF (node);
2888
2889 for (int i = 0; i < ipa_get_param_count (info); i++)
2890 {
2891 tree parm = ipa_get_param (info, i);
2892 int controlled_uses = 0;
2893
2894 /* For SSA regs see if parameter is used. For non-SSA we compute
2895 the flag during modification analysis. */
2896 if (is_gimple_reg (parm))
2897 {
2898 tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->decl),
2899 parm);
2900 if (ddef && !has_zero_uses (ddef))
2901 {
2902 imm_use_iterator imm_iter;
2903 use_operand_p use_p;
2904
2905 ipa_set_param_used (info, i, true);
2906 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, ddef)
2907 if (!is_gimple_call (USE_STMT (use_p)))
2908 {
2909 if (!is_gimple_debug (USE_STMT (use_p)))
2910 {
2911 controlled_uses = IPA_UNDESCRIBED_USE;
2912 break;
2913 }
2914 }
2915 else
2916 controlled_uses++;
2917 }
2918 else
2919 controlled_uses = 0;
2920 }
2921 else
2922 controlled_uses = IPA_UNDESCRIBED_USE;
2923 ipa_set_controlled_uses (info, i, controlled_uses);
2924 }
2925 }
2926
2927 /* Free stuff in BI. */
2928
2929 static void
free_ipa_bb_info(struct ipa_bb_info * bi)2930 free_ipa_bb_info (struct ipa_bb_info *bi)
2931 {
2932 bi->cg_edges.release ();
2933 bi->param_aa_statuses.release ();
2934 }
2935
2936 /* Dominator walker driving the analysis. */
2937
2938 class analysis_dom_walker : public dom_walker
2939 {
2940 public:
analysis_dom_walker(struct ipa_func_body_info * fbi)2941 analysis_dom_walker (struct ipa_func_body_info *fbi)
2942 : dom_walker (CDI_DOMINATORS), m_fbi (fbi) {}
2943
2944 virtual edge before_dom_children (basic_block);
2945
2946 private:
2947 struct ipa_func_body_info *m_fbi;
2948 };
2949
2950 edge
before_dom_children(basic_block bb)2951 analysis_dom_walker::before_dom_children (basic_block bb)
2952 {
2953 ipa_analyze_params_uses_in_bb (m_fbi, bb);
2954 ipa_compute_jump_functions_for_bb (m_fbi, bb);
2955 return NULL;
2956 }
2957
2958 /* Release body info FBI. */
2959
2960 void
ipa_release_body_info(struct ipa_func_body_info * fbi)2961 ipa_release_body_info (struct ipa_func_body_info *fbi)
2962 {
2963 int i;
2964 struct ipa_bb_info *bi;
2965
2966 FOR_EACH_VEC_ELT (fbi->bb_infos, i, bi)
2967 free_ipa_bb_info (bi);
2968 fbi->bb_infos.release ();
2969 }
2970
2971 /* Initialize the array describing properties of formal parameters
2972 of NODE, analyze their uses and compute jump functions associated
2973 with actual arguments of calls from within NODE. */
2974
2975 void
ipa_analyze_node(struct cgraph_node * node)2976 ipa_analyze_node (struct cgraph_node *node)
2977 {
2978 struct ipa_func_body_info fbi;
2979 class ipa_node_params *info;
2980
2981 ipa_check_create_node_params ();
2982 ipa_check_create_edge_args ();
2983 info = IPA_NODE_REF_GET_CREATE (node);
2984
2985 if (info->analysis_done)
2986 return;
2987 info->analysis_done = 1;
2988
2989 if (ipa_func_spec_opts_forbid_analysis_p (node))
2990 {
2991 for (int i = 0; i < ipa_get_param_count (info); i++)
2992 {
2993 ipa_set_param_used (info, i, true);
2994 ipa_set_controlled_uses (info, i, IPA_UNDESCRIBED_USE);
2995 }
2996 return;
2997 }
2998
2999 struct function *func = DECL_STRUCT_FUNCTION (node->decl);
3000 push_cfun (func);
3001 calculate_dominance_info (CDI_DOMINATORS);
3002 ipa_initialize_node_params (node);
3003 ipa_analyze_controlled_uses (node);
3004
3005 fbi.node = node;
3006 fbi.info = IPA_NODE_REF (node);
3007 fbi.bb_infos = vNULL;
3008 fbi.bb_infos.safe_grow_cleared (last_basic_block_for_fn (cfun), true);
3009 fbi.param_count = ipa_get_param_count (info);
3010 fbi.aa_walk_budget = opt_for_fn (node->decl, param_ipa_max_aa_steps);
3011
3012 for (struct cgraph_edge *cs = node->callees; cs; cs = cs->next_callee)
3013 {
3014 ipa_bb_info *bi = ipa_get_bb_info (&fbi, gimple_bb (cs->call_stmt));
3015 bi->cg_edges.safe_push (cs);
3016 }
3017
3018 for (struct cgraph_edge *cs = node->indirect_calls; cs; cs = cs->next_callee)
3019 {
3020 ipa_bb_info *bi = ipa_get_bb_info (&fbi, gimple_bb (cs->call_stmt));
3021 bi->cg_edges.safe_push (cs);
3022 }
3023
3024 analysis_dom_walker (&fbi).walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
3025
3026 ipa_release_body_info (&fbi);
3027 free_dominance_info (CDI_DOMINATORS);
3028 pop_cfun ();
3029 }
3030
3031 /* Update the jump functions associated with call graph edge E when the call
3032 graph edge CS is being inlined, assuming that E->caller is already (possibly
3033 indirectly) inlined into CS->callee and that E has not been inlined. */
3034
3035 static void
update_jump_functions_after_inlining(struct cgraph_edge * cs,struct cgraph_edge * e)3036 update_jump_functions_after_inlining (struct cgraph_edge *cs,
3037 struct cgraph_edge *e)
3038 {
3039 class ipa_edge_args *top = IPA_EDGE_REF (cs);
3040 class ipa_edge_args *args = IPA_EDGE_REF (e);
3041 if (!args)
3042 return;
3043 int count = ipa_get_cs_argument_count (args);
3044 int i;
3045
3046 for (i = 0; i < count; i++)
3047 {
3048 struct ipa_jump_func *dst = ipa_get_ith_jump_func (args, i);
3049 class ipa_polymorphic_call_context *dst_ctx
3050 = ipa_get_ith_polymorhic_call_context (args, i);
3051
3052 if (dst->agg.items)
3053 {
3054 struct ipa_agg_jf_item *item;
3055 int j;
3056
3057 FOR_EACH_VEC_ELT (*dst->agg.items, j, item)
3058 {
3059 int dst_fid;
3060 struct ipa_jump_func *src;
3061
3062 if (item->jftype != IPA_JF_PASS_THROUGH
3063 && item->jftype != IPA_JF_LOAD_AGG)
3064 continue;
3065
3066 dst_fid = item->value.pass_through.formal_id;
3067 if (!top || dst_fid >= ipa_get_cs_argument_count (top))
3068 {
3069 item->jftype = IPA_JF_UNKNOWN;
3070 continue;
3071 }
3072
3073 item->value.pass_through.formal_id = -1;
3074 src = ipa_get_ith_jump_func (top, dst_fid);
3075 if (src->type == IPA_JF_CONST)
3076 {
3077 if (item->jftype == IPA_JF_PASS_THROUGH
3078 && item->value.pass_through.operation == NOP_EXPR)
3079 {
3080 item->jftype = IPA_JF_CONST;
3081 item->value.constant = src->value.constant.value;
3082 continue;
3083 }
3084 }
3085 else if (src->type == IPA_JF_PASS_THROUGH
3086 && src->value.pass_through.operation == NOP_EXPR)
3087 {
3088 if (item->jftype == IPA_JF_PASS_THROUGH
3089 || !item->value.load_agg.by_ref
3090 || src->value.pass_through.agg_preserved)
3091 item->value.pass_through.formal_id
3092 = src->value.pass_through.formal_id;
3093 }
3094 else if (src->type == IPA_JF_ANCESTOR)
3095 {
3096 if (item->jftype == IPA_JF_PASS_THROUGH)
3097 {
3098 if (!src->value.ancestor.offset)
3099 item->value.pass_through.formal_id
3100 = src->value.ancestor.formal_id;
3101 }
3102 else if (src->value.ancestor.agg_preserved)
3103 {
3104 gcc_checking_assert (item->value.load_agg.by_ref);
3105
3106 item->value.pass_through.formal_id
3107 = src->value.ancestor.formal_id;
3108 item->value.load_agg.offset
3109 += src->value.ancestor.offset;
3110 }
3111 }
3112
3113 if (item->value.pass_through.formal_id < 0)
3114 item->jftype = IPA_JF_UNKNOWN;
3115 }
3116 }
3117
3118 if (!top)
3119 {
3120 ipa_set_jf_unknown (dst);
3121 continue;
3122 }
3123
3124 if (dst->type == IPA_JF_ANCESTOR)
3125 {
3126 struct ipa_jump_func *src;
3127 int dst_fid = dst->value.ancestor.formal_id;
3128 class ipa_polymorphic_call_context *src_ctx
3129 = ipa_get_ith_polymorhic_call_context (top, dst_fid);
3130
3131 /* Variable number of arguments can cause havoc if we try to access
3132 one that does not exist in the inlined edge. So make sure we
3133 don't. */
3134 if (dst_fid >= ipa_get_cs_argument_count (top))
3135 {
3136 ipa_set_jf_unknown (dst);
3137 continue;
3138 }
3139
3140 src = ipa_get_ith_jump_func (top, dst_fid);
3141
3142 if (src_ctx && !src_ctx->useless_p ())
3143 {
3144 class ipa_polymorphic_call_context ctx = *src_ctx;
3145
3146 /* TODO: Make type preserved safe WRT contexts. */
3147 if (!ipa_get_jf_ancestor_type_preserved (dst))
3148 ctx.possible_dynamic_type_change (e->in_polymorphic_cdtor);
3149 ctx.offset_by (dst->value.ancestor.offset);
3150 if (!ctx.useless_p ())
3151 {
3152 if (!dst_ctx)
3153 {
3154 vec_safe_grow_cleared (args->polymorphic_call_contexts,
3155 count, true);
3156 dst_ctx = ipa_get_ith_polymorhic_call_context (args, i);
3157 }
3158
3159 dst_ctx->combine_with (ctx);
3160 }
3161 }
3162
3163 /* Parameter and argument in ancestor jump function must be pointer
3164 type, which means access to aggregate must be by-reference. */
3165 gcc_assert (!src->agg.items || src->agg.by_ref);
3166
3167 if (src->agg.items && dst->value.ancestor.agg_preserved)
3168 {
3169 struct ipa_agg_jf_item *item;
3170 int j;
3171
3172 /* Currently we do not produce clobber aggregate jump functions,
3173 replace with merging when we do. */
3174 gcc_assert (!dst->agg.items);
3175
3176 dst->agg.items = vec_safe_copy (src->agg.items);
3177 dst->agg.by_ref = src->agg.by_ref;
3178 FOR_EACH_VEC_SAFE_ELT (dst->agg.items, j, item)
3179 item->offset -= dst->value.ancestor.offset;
3180 }
3181
3182 if (src->type == IPA_JF_PASS_THROUGH
3183 && src->value.pass_through.operation == NOP_EXPR)
3184 {
3185 dst->value.ancestor.formal_id = src->value.pass_through.formal_id;
3186 dst->value.ancestor.agg_preserved &=
3187 src->value.pass_through.agg_preserved;
3188 }
3189 else if (src->type == IPA_JF_ANCESTOR)
3190 {
3191 dst->value.ancestor.formal_id = src->value.ancestor.formal_id;
3192 dst->value.ancestor.offset += src->value.ancestor.offset;
3193 dst->value.ancestor.agg_preserved &=
3194 src->value.ancestor.agg_preserved;
3195 }
3196 else
3197 ipa_set_jf_unknown (dst);
3198 }
3199 else if (dst->type == IPA_JF_PASS_THROUGH)
3200 {
3201 struct ipa_jump_func *src;
3202 /* We must check range due to calls with variable number of arguments
3203 and we cannot combine jump functions with operations. */
3204 if (dst->value.pass_through.operation == NOP_EXPR
3205 && (top && dst->value.pass_through.formal_id
3206 < ipa_get_cs_argument_count (top)))
3207 {
3208 int dst_fid = dst->value.pass_through.formal_id;
3209 src = ipa_get_ith_jump_func (top, dst_fid);
3210 bool dst_agg_p = ipa_get_jf_pass_through_agg_preserved (dst);
3211 class ipa_polymorphic_call_context *src_ctx
3212 = ipa_get_ith_polymorhic_call_context (top, dst_fid);
3213
3214 if (src_ctx && !src_ctx->useless_p ())
3215 {
3216 class ipa_polymorphic_call_context ctx = *src_ctx;
3217
3218 /* TODO: Make type preserved safe WRT contexts. */
3219 if (!ipa_get_jf_pass_through_type_preserved (dst))
3220 ctx.possible_dynamic_type_change (e->in_polymorphic_cdtor);
3221 if (!ctx.useless_p ())
3222 {
3223 if (!dst_ctx)
3224 {
3225 vec_safe_grow_cleared (args->polymorphic_call_contexts,
3226 count, true);
3227 dst_ctx = ipa_get_ith_polymorhic_call_context (args, i);
3228 }
3229 dst_ctx->combine_with (ctx);
3230 }
3231 }
3232 switch (src->type)
3233 {
3234 case IPA_JF_UNKNOWN:
3235 ipa_set_jf_unknown (dst);
3236 break;
3237 case IPA_JF_CONST:
3238 ipa_set_jf_cst_copy (dst, src);
3239 break;
3240
3241 case IPA_JF_PASS_THROUGH:
3242 {
3243 int formal_id = ipa_get_jf_pass_through_formal_id (src);
3244 enum tree_code operation;
3245 operation = ipa_get_jf_pass_through_operation (src);
3246
3247 if (operation == NOP_EXPR)
3248 {
3249 bool agg_p;
3250 agg_p = dst_agg_p
3251 && ipa_get_jf_pass_through_agg_preserved (src);
3252 ipa_set_jf_simple_pass_through (dst, formal_id, agg_p);
3253 }
3254 else if (TREE_CODE_CLASS (operation) == tcc_unary)
3255 ipa_set_jf_unary_pass_through (dst, formal_id, operation);
3256 else
3257 {
3258 tree operand = ipa_get_jf_pass_through_operand (src);
3259 ipa_set_jf_arith_pass_through (dst, formal_id, operand,
3260 operation);
3261 }
3262 break;
3263 }
3264 case IPA_JF_ANCESTOR:
3265 {
3266 bool agg_p;
3267 agg_p = dst_agg_p
3268 && ipa_get_jf_ancestor_agg_preserved (src);
3269 ipa_set_ancestor_jf (dst,
3270 ipa_get_jf_ancestor_offset (src),
3271 ipa_get_jf_ancestor_formal_id (src),
3272 agg_p);
3273 break;
3274 }
3275 default:
3276 gcc_unreachable ();
3277 }
3278
3279 if (src->agg.items
3280 && (dst_agg_p || !src->agg.by_ref))
3281 {
3282 /* Currently we do not produce clobber aggregate jump
3283 functions, replace with merging when we do. */
3284 gcc_assert (!dst->agg.items);
3285
3286 dst->agg.by_ref = src->agg.by_ref;
3287 dst->agg.items = vec_safe_copy (src->agg.items);
3288 }
3289 }
3290 else
3291 ipa_set_jf_unknown (dst);
3292 }
3293 }
3294 }
3295
3296 /* If TARGET is an addr_expr of a function declaration, make it the
3297 (SPECULATIVE)destination of an indirect edge IE and return the edge.
3298 Otherwise, return NULL. */
3299
3300 struct cgraph_edge *
ipa_make_edge_direct_to_target(struct cgraph_edge * ie,tree target,bool speculative)3301 ipa_make_edge_direct_to_target (struct cgraph_edge *ie, tree target,
3302 bool speculative)
3303 {
3304 struct cgraph_node *callee;
3305 bool unreachable = false;
3306
3307 if (TREE_CODE (target) == ADDR_EXPR)
3308 target = TREE_OPERAND (target, 0);
3309 if (TREE_CODE (target) != FUNCTION_DECL)
3310 {
3311 target = canonicalize_constructor_val (target, NULL);
3312 if (!target || TREE_CODE (target) != FUNCTION_DECL)
3313 {
3314 /* Member pointer call that goes through a VMT lookup. */
3315 if (ie->indirect_info->member_ptr
3316 /* Or if target is not an invariant expression and we do not
3317 know if it will evaulate to function at runtime.
3318 This can happen when folding through &VAR, where &VAR
3319 is IP invariant, but VAR itself is not.
3320
3321 TODO: Revisit this when GCC 5 is branched. It seems that
3322 member_ptr check is not needed and that we may try to fold
3323 the expression and see if VAR is readonly. */
3324 || !is_gimple_ip_invariant (target))
3325 {
3326 if (dump_enabled_p ())
3327 {
3328 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, ie->call_stmt,
3329 "discovered direct call non-invariant %s\n",
3330 ie->caller->dump_name ());
3331 }
3332 return NULL;
3333 }
3334
3335
3336 if (dump_enabled_p ())
3337 {
3338 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, ie->call_stmt,
3339 "discovered direct call to non-function in %s, "
3340 "making it __builtin_unreachable\n",
3341 ie->caller->dump_name ());
3342 }
3343
3344 target = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
3345 callee = cgraph_node::get_create (target);
3346 unreachable = true;
3347 }
3348 else
3349 callee = cgraph_node::get (target);
3350 }
3351 else
3352 callee = cgraph_node::get (target);
3353
3354 /* Because may-edges are not explicitely represented and vtable may be external,
3355 we may create the first reference to the object in the unit. */
3356 if (!callee || callee->inlined_to)
3357 {
3358
3359 /* We are better to ensure we can refer to it.
3360 In the case of static functions we are out of luck, since we already
3361 removed its body. In the case of public functions we may or may
3362 not introduce the reference. */
3363 if (!canonicalize_constructor_val (target, NULL)
3364 || !TREE_PUBLIC (target))
3365 {
3366 if (dump_file)
3367 fprintf (dump_file, "ipa-prop: Discovered call to a known target "
3368 "(%s -> %s) but cannot refer to it. Giving up.\n",
3369 ie->caller->dump_name (),
3370 ie->callee->dump_name ());
3371 return NULL;
3372 }
3373 callee = cgraph_node::get_create (target);
3374 }
3375
3376 /* If the edge is already speculated. */
3377 if (speculative && ie->speculative)
3378 {
3379 if (dump_file)
3380 {
3381 cgraph_edge *e2 = ie->speculative_call_for_target (callee);
3382 if (!e2)
3383 {
3384 if (dump_file)
3385 fprintf (dump_file, "ipa-prop: Discovered call to a "
3386 "speculative target (%s -> %s) but the call is "
3387 "already speculated to different target. "
3388 "Giving up.\n",
3389 ie->caller->dump_name (), callee->dump_name ());
3390 }
3391 else
3392 {
3393 if (dump_file)
3394 fprintf (dump_file,
3395 "ipa-prop: Discovered call to a speculative target "
3396 "(%s -> %s) this agree with previous speculation.\n",
3397 ie->caller->dump_name (), callee->dump_name ());
3398 }
3399 }
3400 return NULL;
3401 }
3402
3403 if (!dbg_cnt (devirt))
3404 return NULL;
3405
3406 ipa_check_create_node_params ();
3407
3408 /* We cannot make edges to inline clones. It is bug that someone removed
3409 the cgraph node too early. */
3410 gcc_assert (!callee->inlined_to);
3411
3412 if (dump_file && !unreachable)
3413 {
3414 fprintf (dump_file, "ipa-prop: Discovered %s call to a %s target "
3415 "(%s -> %s), for stmt ",
3416 ie->indirect_info->polymorphic ? "a virtual" : "an indirect",
3417 speculative ? "speculative" : "known",
3418 ie->caller->dump_name (),
3419 callee->dump_name ());
3420 if (ie->call_stmt)
3421 print_gimple_stmt (dump_file, ie->call_stmt, 2, TDF_SLIM);
3422 else
3423 fprintf (dump_file, "with uid %i\n", ie->lto_stmt_uid);
3424 }
3425 if (dump_enabled_p ())
3426 {
3427 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, ie->call_stmt,
3428 "converting indirect call in %s to direct call to %s\n",
3429 ie->caller->dump_name (), callee->dump_name ());
3430 }
3431 if (!speculative)
3432 {
3433 struct cgraph_edge *orig = ie;
3434 ie = cgraph_edge::make_direct (ie, callee);
3435 /* If we resolved speculative edge the cost is already up to date
3436 for direct call (adjusted by inline_edge_duplication_hook). */
3437 if (ie == orig)
3438 {
3439 ipa_call_summary *es = ipa_call_summaries->get (ie);
3440 es->call_stmt_size -= (eni_size_weights.indirect_call_cost
3441 - eni_size_weights.call_cost);
3442 es->call_stmt_time -= (eni_time_weights.indirect_call_cost
3443 - eni_time_weights.call_cost);
3444 }
3445 }
3446 else
3447 {
3448 if (!callee->can_be_discarded_p ())
3449 {
3450 cgraph_node *alias;
3451 alias = dyn_cast<cgraph_node *> (callee->noninterposable_alias ());
3452 if (alias)
3453 callee = alias;
3454 }
3455 /* make_speculative will update ie's cost to direct call cost. */
3456 ie = ie->make_speculative
3457 (callee, ie->count.apply_scale (8, 10));
3458 }
3459
3460 return ie;
3461 }
3462
3463 /* Attempt to locate an interprocedural constant at a given REQ_OFFSET in
3464 CONSTRUCTOR and return it. Return NULL if the search fails for some
3465 reason. */
3466
3467 static tree
find_constructor_constant_at_offset(tree constructor,HOST_WIDE_INT req_offset)3468 find_constructor_constant_at_offset (tree constructor, HOST_WIDE_INT req_offset)
3469 {
3470 tree type = TREE_TYPE (constructor);
3471 if (TREE_CODE (type) != ARRAY_TYPE
3472 && TREE_CODE (type) != RECORD_TYPE)
3473 return NULL;
3474
3475 unsigned ix;
3476 tree index, val;
3477 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (constructor), ix, index, val)
3478 {
3479 HOST_WIDE_INT elt_offset;
3480 if (TREE_CODE (type) == ARRAY_TYPE)
3481 {
3482 offset_int off;
3483 tree unit_size = TYPE_SIZE_UNIT (TREE_TYPE (type));
3484 gcc_assert (TREE_CODE (unit_size) == INTEGER_CST);
3485
3486 if (index)
3487 {
3488 if (TREE_CODE (index) == RANGE_EXPR)
3489 off = wi::to_offset (TREE_OPERAND (index, 0));
3490 else
3491 off = wi::to_offset (index);
3492 if (TYPE_DOMAIN (type) && TYPE_MIN_VALUE (TYPE_DOMAIN (type)))
3493 {
3494 tree low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
3495 gcc_assert (TREE_CODE (unit_size) == INTEGER_CST);
3496 off = wi::sext (off - wi::to_offset (low_bound),
3497 TYPE_PRECISION (TREE_TYPE (index)));
3498 }
3499 off *= wi::to_offset (unit_size);
3500 /* ??? Handle more than just the first index of a
3501 RANGE_EXPR. */
3502 }
3503 else
3504 off = wi::to_offset (unit_size) * ix;
3505
3506 off = wi::lshift (off, LOG2_BITS_PER_UNIT);
3507 if (!wi::fits_shwi_p (off) || wi::neg_p (off))
3508 continue;
3509 elt_offset = off.to_shwi ();
3510 }
3511 else if (TREE_CODE (type) == RECORD_TYPE)
3512 {
3513 gcc_checking_assert (index && TREE_CODE (index) == FIELD_DECL);
3514 if (DECL_BIT_FIELD (index))
3515 continue;
3516 elt_offset = int_bit_position (index);
3517 }
3518 else
3519 gcc_unreachable ();
3520
3521 if (elt_offset > req_offset)
3522 return NULL;
3523
3524 if (TREE_CODE (val) == CONSTRUCTOR)
3525 return find_constructor_constant_at_offset (val,
3526 req_offset - elt_offset);
3527
3528 if (elt_offset == req_offset
3529 && is_gimple_reg_type (TREE_TYPE (val))
3530 && is_gimple_ip_invariant (val))
3531 return val;
3532 }
3533 return NULL;
3534 }
3535
3536 /* Check whether SCALAR could be used to look up an aggregate interprocedural
3537 invariant from a static constructor and if so, return it. Otherwise return
3538 NULL. */
3539
3540 static tree
ipa_find_agg_cst_from_init(tree scalar,HOST_WIDE_INT offset,bool by_ref)3541 ipa_find_agg_cst_from_init (tree scalar, HOST_WIDE_INT offset, bool by_ref)
3542 {
3543 if (by_ref)
3544 {
3545 if (TREE_CODE (scalar) != ADDR_EXPR)
3546 return NULL;
3547 scalar = TREE_OPERAND (scalar, 0);
3548 }
3549
3550 if (!VAR_P (scalar)
3551 || !is_global_var (scalar)
3552 || !TREE_READONLY (scalar)
3553 || !DECL_INITIAL (scalar)
3554 || TREE_CODE (DECL_INITIAL (scalar)) != CONSTRUCTOR)
3555 return NULL;
3556
3557 return find_constructor_constant_at_offset (DECL_INITIAL (scalar), offset);
3558 }
3559
3560 /* Retrieve value from AGG, a set of known offset/value for an aggregate or
3561 static initializer of SCALAR (which can be NULL) for the given OFFSET or
3562 return NULL if there is none. BY_REF specifies whether the value has to be
3563 passed by reference or by value. If FROM_GLOBAL_CONSTANT is non-NULL, then
3564 the boolean it points to is set to true if the value comes from an
3565 initializer of a constant. */
3566
3567 tree
ipa_find_agg_cst_for_param(struct ipa_agg_value_set * agg,tree scalar,HOST_WIDE_INT offset,bool by_ref,bool * from_global_constant)3568 ipa_find_agg_cst_for_param (struct ipa_agg_value_set *agg, tree scalar,
3569 HOST_WIDE_INT offset, bool by_ref,
3570 bool *from_global_constant)
3571 {
3572 struct ipa_agg_value *item;
3573 int i;
3574
3575 if (scalar)
3576 {
3577 tree res = ipa_find_agg_cst_from_init (scalar, offset, by_ref);
3578 if (res)
3579 {
3580 if (from_global_constant)
3581 *from_global_constant = true;
3582 return res;
3583 }
3584 }
3585
3586 if (!agg
3587 || by_ref != agg->by_ref)
3588 return NULL;
3589
3590 FOR_EACH_VEC_ELT (agg->items, i, item)
3591 if (item->offset == offset)
3592 {
3593 /* Currently we do not have clobber values, return NULL for them once
3594 we do. */
3595 gcc_checking_assert (is_gimple_ip_invariant (item->value));
3596 if (from_global_constant)
3597 *from_global_constant = false;
3598 return item->value;
3599 }
3600 return NULL;
3601 }
3602
3603 /* Remove a reference to SYMBOL from the list of references of a node given by
3604 reference description RDESC. Return true if the reference has been
3605 successfully found and removed. */
3606
3607 static bool
remove_described_reference(symtab_node * symbol,struct ipa_cst_ref_desc * rdesc)3608 remove_described_reference (symtab_node *symbol, struct ipa_cst_ref_desc *rdesc)
3609 {
3610 struct ipa_ref *to_del;
3611 struct cgraph_edge *origin;
3612
3613 origin = rdesc->cs;
3614 if (!origin)
3615 return false;
3616 to_del = origin->caller->find_reference (symbol, origin->call_stmt,
3617 origin->lto_stmt_uid);
3618 if (!to_del)
3619 return false;
3620
3621 to_del->remove_reference ();
3622 if (dump_file)
3623 fprintf (dump_file, "ipa-prop: Removed a reference from %s to %s.\n",
3624 origin->caller->dump_name (), symbol->dump_name ());
3625 return true;
3626 }
3627
3628 /* If JFUNC has a reference description with refcount different from
3629 IPA_UNDESCRIBED_USE, return the reference description, otherwise return
3630 NULL. JFUNC must be a constant jump function. */
3631
3632 static struct ipa_cst_ref_desc *
jfunc_rdesc_usable(struct ipa_jump_func * jfunc)3633 jfunc_rdesc_usable (struct ipa_jump_func *jfunc)
3634 {
3635 struct ipa_cst_ref_desc *rdesc = ipa_get_jf_constant_rdesc (jfunc);
3636 if (rdesc && rdesc->refcount != IPA_UNDESCRIBED_USE)
3637 return rdesc;
3638 else
3639 return NULL;
3640 }
3641
3642 /* If the value of constant jump function JFUNC is an address of a function
3643 declaration, return the associated call graph node. Otherwise return
3644 NULL. */
3645
3646 static cgraph_node *
cgraph_node_for_jfunc(struct ipa_jump_func * jfunc)3647 cgraph_node_for_jfunc (struct ipa_jump_func *jfunc)
3648 {
3649 gcc_checking_assert (jfunc->type == IPA_JF_CONST);
3650 tree cst = ipa_get_jf_constant (jfunc);
3651 if (TREE_CODE (cst) != ADDR_EXPR
3652 || TREE_CODE (TREE_OPERAND (cst, 0)) != FUNCTION_DECL)
3653 return NULL;
3654
3655 return cgraph_node::get (TREE_OPERAND (cst, 0));
3656 }
3657
3658
3659 /* If JFUNC is a constant jump function with a usable rdesc, decrement its
3660 refcount and if it hits zero, remove reference to SYMBOL from the caller of
3661 the edge specified in the rdesc. Return false if either the symbol or the
3662 reference could not be found, otherwise return true. */
3663
3664 static bool
try_decrement_rdesc_refcount(struct ipa_jump_func * jfunc)3665 try_decrement_rdesc_refcount (struct ipa_jump_func *jfunc)
3666 {
3667 struct ipa_cst_ref_desc *rdesc;
3668 if (jfunc->type == IPA_JF_CONST
3669 && (rdesc = jfunc_rdesc_usable (jfunc))
3670 && --rdesc->refcount == 0)
3671 {
3672 symtab_node *symbol = cgraph_node_for_jfunc (jfunc);
3673 if (!symbol)
3674 return false;
3675
3676 return remove_described_reference (symbol, rdesc);
3677 }
3678 return true;
3679 }
3680
3681 /* Try to find a destination for indirect edge IE that corresponds to a simple
3682 call or a call of a member function pointer and where the destination is a
3683 pointer formal parameter described by jump function JFUNC. TARGET_TYPE is
3684 the type of the parameter to which the result of JFUNC is passed. If it can
3685 be determined, return the newly direct edge, otherwise return NULL.
3686 NEW_ROOT and NEW_ROOT_INFO is the node and its info that JFUNC lattices are
3687 relative to. */
3688
3689 static struct cgraph_edge *
try_make_edge_direct_simple_call(struct cgraph_edge * ie,struct ipa_jump_func * jfunc,tree target_type,struct cgraph_node * new_root,class ipa_node_params * new_root_info)3690 try_make_edge_direct_simple_call (struct cgraph_edge *ie,
3691 struct ipa_jump_func *jfunc, tree target_type,
3692 struct cgraph_node *new_root,
3693 class ipa_node_params *new_root_info)
3694 {
3695 struct cgraph_edge *cs;
3696 tree target;
3697 bool agg_contents = ie->indirect_info->agg_contents;
3698 tree scalar = ipa_value_from_jfunc (new_root_info, jfunc, target_type);
3699 if (agg_contents)
3700 {
3701 bool from_global_constant;
3702 ipa_agg_value_set agg = ipa_agg_value_set_from_jfunc (new_root_info,
3703 new_root,
3704 &jfunc->agg);
3705 target = ipa_find_agg_cst_for_param (&agg, scalar,
3706 ie->indirect_info->offset,
3707 ie->indirect_info->by_ref,
3708 &from_global_constant);
3709 agg.release ();
3710 if (target
3711 && !from_global_constant
3712 && !ie->indirect_info->guaranteed_unmodified)
3713 return NULL;
3714 }
3715 else
3716 target = scalar;
3717 if (!target)
3718 return NULL;
3719 cs = ipa_make_edge_direct_to_target (ie, target);
3720
3721 if (cs && !agg_contents)
3722 {
3723 bool ok;
3724 gcc_checking_assert (cs->callee
3725 && (cs != ie
3726 || jfunc->type != IPA_JF_CONST
3727 || !cgraph_node_for_jfunc (jfunc)
3728 || cs->callee == cgraph_node_for_jfunc (jfunc)));
3729 ok = try_decrement_rdesc_refcount (jfunc);
3730 gcc_checking_assert (ok);
3731 }
3732
3733 return cs;
3734 }
3735
3736 /* Return the target to be used in cases of impossible devirtualization. IE
3737 and target (the latter can be NULL) are dumped when dumping is enabled. */
3738
3739 tree
ipa_impossible_devirt_target(struct cgraph_edge * ie,tree target)3740 ipa_impossible_devirt_target (struct cgraph_edge *ie, tree target)
3741 {
3742 if (dump_file)
3743 {
3744 if (target)
3745 fprintf (dump_file,
3746 "Type inconsistent devirtualization: %s->%s\n",
3747 ie->caller->dump_name (),
3748 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (target)));
3749 else
3750 fprintf (dump_file,
3751 "No devirtualization target in %s\n",
3752 ie->caller->dump_name ());
3753 }
3754 tree new_target = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
3755 cgraph_node::get_create (new_target);
3756 return new_target;
3757 }
3758
3759 /* Try to find a destination for indirect edge IE that corresponds to a virtual
3760 call based on a formal parameter which is described by jump function JFUNC
3761 and if it can be determined, make it direct and return the direct edge.
3762 Otherwise, return NULL. CTX describes the polymorphic context that the
3763 parameter the call is based on brings along with it. NEW_ROOT and
3764 NEW_ROOT_INFO is the node and its info that JFUNC lattices are relative
3765 to. */
3766
3767 static struct cgraph_edge *
try_make_edge_direct_virtual_call(struct cgraph_edge * ie,struct ipa_jump_func * jfunc,class ipa_polymorphic_call_context ctx,struct cgraph_node * new_root,class ipa_node_params * new_root_info)3768 try_make_edge_direct_virtual_call (struct cgraph_edge *ie,
3769 struct ipa_jump_func *jfunc,
3770 class ipa_polymorphic_call_context ctx,
3771 struct cgraph_node *new_root,
3772 class ipa_node_params *new_root_info)
3773 {
3774 tree target = NULL;
3775 bool speculative = false;
3776
3777 if (!opt_for_fn (ie->caller->decl, flag_devirtualize))
3778 return NULL;
3779
3780 gcc_assert (!ie->indirect_info->by_ref);
3781
3782 /* Try to do lookup via known virtual table pointer value. */
3783 if (!ie->indirect_info->vptr_changed
3784 || opt_for_fn (ie->caller->decl, flag_devirtualize_speculatively))
3785 {
3786 tree vtable;
3787 unsigned HOST_WIDE_INT offset;
3788 tree scalar = (jfunc->type == IPA_JF_CONST) ? ipa_get_jf_constant (jfunc)
3789 : NULL;
3790 ipa_agg_value_set agg = ipa_agg_value_set_from_jfunc (new_root_info,
3791 new_root,
3792 &jfunc->agg);
3793 tree t = ipa_find_agg_cst_for_param (&agg, scalar,
3794 ie->indirect_info->offset,
3795 true);
3796 agg.release ();
3797 if (t && vtable_pointer_value_to_vtable (t, &vtable, &offset))
3798 {
3799 bool can_refer;
3800 t = gimple_get_virt_method_for_vtable (ie->indirect_info->otr_token,
3801 vtable, offset, &can_refer);
3802 if (can_refer)
3803 {
3804 if (!t
3805 || fndecl_built_in_p (t, BUILT_IN_UNREACHABLE)
3806 || !possible_polymorphic_call_target_p
3807 (ie, cgraph_node::get (t)))
3808 {
3809 /* Do not speculate builtin_unreachable, it is stupid! */
3810 if (!ie->indirect_info->vptr_changed)
3811 target = ipa_impossible_devirt_target (ie, target);
3812 else
3813 target = NULL;
3814 }
3815 else
3816 {
3817 target = t;
3818 speculative = ie->indirect_info->vptr_changed;
3819 }
3820 }
3821 }
3822 }
3823
3824 ipa_polymorphic_call_context ie_context (ie);
3825 vec <cgraph_node *>targets;
3826 bool final;
3827
3828 ctx.offset_by (ie->indirect_info->offset);
3829 if (ie->indirect_info->vptr_changed)
3830 ctx.possible_dynamic_type_change (ie->in_polymorphic_cdtor,
3831 ie->indirect_info->otr_type);
3832 ctx.combine_with (ie_context, ie->indirect_info->otr_type);
3833 targets = possible_polymorphic_call_targets
3834 (ie->indirect_info->otr_type,
3835 ie->indirect_info->otr_token,
3836 ctx, &final);
3837 if (final && targets.length () <= 1)
3838 {
3839 speculative = false;
3840 if (targets.length () == 1)
3841 target = targets[0]->decl;
3842 else
3843 target = ipa_impossible_devirt_target (ie, NULL_TREE);
3844 }
3845 else if (!target && opt_for_fn (ie->caller->decl, flag_devirtualize_speculatively)
3846 && !ie->speculative && ie->maybe_hot_p ())
3847 {
3848 cgraph_node *n;
3849 n = try_speculative_devirtualization (ie->indirect_info->otr_type,
3850 ie->indirect_info->otr_token,
3851 ie->indirect_info->context);
3852 if (n)
3853 {
3854 target = n->decl;
3855 speculative = true;
3856 }
3857 }
3858
3859 if (target)
3860 {
3861 if (!possible_polymorphic_call_target_p
3862 (ie, cgraph_node::get_create (target)))
3863 {
3864 if (speculative)
3865 return NULL;
3866 target = ipa_impossible_devirt_target (ie, target);
3867 }
3868 return ipa_make_edge_direct_to_target (ie, target, speculative);
3869 }
3870 else
3871 return NULL;
3872 }
3873
3874 /* Update the param called notes associated with NODE when CS is being inlined,
3875 assuming NODE is (potentially indirectly) inlined into CS->callee.
3876 Moreover, if the callee is discovered to be constant, create a new cgraph
3877 edge for it. Newly discovered indirect edges will be added to *NEW_EDGES,
3878 unless NEW_EDGES is NULL. Return true iff a new edge(s) were created. */
3879
3880 static bool
update_indirect_edges_after_inlining(struct cgraph_edge * cs,struct cgraph_node * node,vec<cgraph_edge * > * new_edges)3881 update_indirect_edges_after_inlining (struct cgraph_edge *cs,
3882 struct cgraph_node *node,
3883 vec<cgraph_edge *> *new_edges)
3884 {
3885 class ipa_edge_args *top;
3886 struct cgraph_edge *ie, *next_ie, *new_direct_edge;
3887 struct cgraph_node *new_root;
3888 class ipa_node_params *new_root_info, *inlined_node_info;
3889 bool res = false;
3890
3891 ipa_check_create_edge_args ();
3892 top = IPA_EDGE_REF (cs);
3893 new_root = cs->caller->inlined_to
3894 ? cs->caller->inlined_to : cs->caller;
3895 new_root_info = IPA_NODE_REF (new_root);
3896 inlined_node_info = IPA_NODE_REF (cs->callee->function_symbol ());
3897
3898 for (ie = node->indirect_calls; ie; ie = next_ie)
3899 {
3900 class cgraph_indirect_call_info *ici = ie->indirect_info;
3901 struct ipa_jump_func *jfunc;
3902 int param_index;
3903
3904 next_ie = ie->next_callee;
3905
3906 if (ici->param_index == -1)
3907 continue;
3908
3909 /* We must check range due to calls with variable number of arguments: */
3910 if (!top || ici->param_index >= ipa_get_cs_argument_count (top))
3911 {
3912 ici->param_index = -1;
3913 continue;
3914 }
3915
3916 param_index = ici->param_index;
3917 jfunc = ipa_get_ith_jump_func (top, param_index);
3918
3919 auto_vec<cgraph_node *, 4> spec_targets;
3920 if (ie->speculative)
3921 for (cgraph_edge *direct = ie->first_speculative_call_target ();
3922 direct;
3923 direct = direct->next_speculative_call_target ())
3924 spec_targets.safe_push (direct->callee);
3925
3926 if (!opt_for_fn (node->decl, flag_indirect_inlining))
3927 new_direct_edge = NULL;
3928 else if (ici->polymorphic)
3929 {
3930 ipa_polymorphic_call_context ctx;
3931 ctx = ipa_context_from_jfunc (new_root_info, cs, param_index, jfunc);
3932 new_direct_edge = try_make_edge_direct_virtual_call (ie, jfunc, ctx,
3933 new_root,
3934 new_root_info);
3935 }
3936 else
3937 {
3938 tree target_type = ipa_get_type (inlined_node_info, param_index);
3939 new_direct_edge = try_make_edge_direct_simple_call (ie, jfunc,
3940 target_type,
3941 new_root,
3942 new_root_info);
3943 }
3944
3945 /* If speculation was removed, then we need to do nothing. */
3946 if (new_direct_edge && new_direct_edge != ie
3947 && spec_targets.contains (new_direct_edge->callee))
3948 {
3949 new_direct_edge->indirect_inlining_edge = 1;
3950 top = IPA_EDGE_REF (cs);
3951 res = true;
3952 if (!new_direct_edge->speculative)
3953 continue;
3954 }
3955 else if (new_direct_edge)
3956 {
3957 new_direct_edge->indirect_inlining_edge = 1;
3958 if (new_edges)
3959 {
3960 new_edges->safe_push (new_direct_edge);
3961 res = true;
3962 }
3963 top = IPA_EDGE_REF (cs);
3964 /* If speculative edge was introduced we still need to update
3965 call info of the indirect edge. */
3966 if (!new_direct_edge->speculative)
3967 continue;
3968 }
3969 if (jfunc->type == IPA_JF_PASS_THROUGH
3970 && ipa_get_jf_pass_through_operation (jfunc) == NOP_EXPR)
3971 {
3972 if (ici->agg_contents
3973 && !ipa_get_jf_pass_through_agg_preserved (jfunc)
3974 && !ici->polymorphic)
3975 ici->param_index = -1;
3976 else
3977 {
3978 ici->param_index = ipa_get_jf_pass_through_formal_id (jfunc);
3979 if (ici->polymorphic
3980 && !ipa_get_jf_pass_through_type_preserved (jfunc))
3981 ici->vptr_changed = true;
3982 ipa_set_param_used_by_indirect_call (new_root_info,
3983 ici->param_index, true);
3984 if (ici->polymorphic)
3985 ipa_set_param_used_by_polymorphic_call (new_root_info,
3986 ici->param_index, true);
3987 }
3988 }
3989 else if (jfunc->type == IPA_JF_ANCESTOR)
3990 {
3991 if (ici->agg_contents
3992 && !ipa_get_jf_ancestor_agg_preserved (jfunc)
3993 && !ici->polymorphic)
3994 ici->param_index = -1;
3995 else
3996 {
3997 ici->param_index = ipa_get_jf_ancestor_formal_id (jfunc);
3998 ici->offset += ipa_get_jf_ancestor_offset (jfunc);
3999 if (ici->polymorphic
4000 && !ipa_get_jf_ancestor_type_preserved (jfunc))
4001 ici->vptr_changed = true;
4002 ipa_set_param_used_by_indirect_call (new_root_info,
4003 ici->param_index, true);
4004 if (ici->polymorphic)
4005 ipa_set_param_used_by_polymorphic_call (new_root_info,
4006 ici->param_index, true);
4007 }
4008 }
4009 else
4010 /* Either we can find a destination for this edge now or never. */
4011 ici->param_index = -1;
4012 }
4013
4014 return res;
4015 }
4016
4017 /* Recursively traverse subtree of NODE (including node) made of inlined
4018 cgraph_edges when CS has been inlined and invoke
4019 update_indirect_edges_after_inlining on all nodes and
4020 update_jump_functions_after_inlining on all non-inlined edges that lead out
4021 of this subtree. Newly discovered indirect edges will be added to
4022 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were
4023 created. */
4024
4025 static bool
propagate_info_to_inlined_callees(struct cgraph_edge * cs,struct cgraph_node * node,vec<cgraph_edge * > * new_edges)4026 propagate_info_to_inlined_callees (struct cgraph_edge *cs,
4027 struct cgraph_node *node,
4028 vec<cgraph_edge *> *new_edges)
4029 {
4030 struct cgraph_edge *e;
4031 bool res;
4032
4033 res = update_indirect_edges_after_inlining (cs, node, new_edges);
4034
4035 for (e = node->callees; e; e = e->next_callee)
4036 if (!e->inline_failed)
4037 res |= propagate_info_to_inlined_callees (cs, e->callee, new_edges);
4038 else
4039 update_jump_functions_after_inlining (cs, e);
4040 for (e = node->indirect_calls; e; e = e->next_callee)
4041 update_jump_functions_after_inlining (cs, e);
4042
4043 return res;
4044 }
4045
4046 /* Combine two controlled uses counts as done during inlining. */
4047
4048 static int
combine_controlled_uses_counters(int c,int d)4049 combine_controlled_uses_counters (int c, int d)
4050 {
4051 if (c == IPA_UNDESCRIBED_USE || d == IPA_UNDESCRIBED_USE)
4052 return IPA_UNDESCRIBED_USE;
4053 else
4054 return c + d - 1;
4055 }
4056
4057 /* Propagate number of controlled users from CS->caleee to the new root of the
4058 tree of inlined nodes. */
4059
4060 static void
propagate_controlled_uses(struct cgraph_edge * cs)4061 propagate_controlled_uses (struct cgraph_edge *cs)
4062 {
4063 class ipa_edge_args *args = IPA_EDGE_REF (cs);
4064 if (!args)
4065 return;
4066 struct cgraph_node *new_root = cs->caller->inlined_to
4067 ? cs->caller->inlined_to : cs->caller;
4068 class ipa_node_params *new_root_info = IPA_NODE_REF (new_root);
4069 class ipa_node_params *old_root_info = IPA_NODE_REF (cs->callee);
4070 int count, i;
4071
4072 if (!old_root_info)
4073 return;
4074
4075 count = MIN (ipa_get_cs_argument_count (args),
4076 ipa_get_param_count (old_root_info));
4077 for (i = 0; i < count; i++)
4078 {
4079 struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i);
4080 struct ipa_cst_ref_desc *rdesc;
4081
4082 if (jf->type == IPA_JF_PASS_THROUGH)
4083 {
4084 int src_idx, c, d;
4085 src_idx = ipa_get_jf_pass_through_formal_id (jf);
4086 c = ipa_get_controlled_uses (new_root_info, src_idx);
4087 d = ipa_get_controlled_uses (old_root_info, i);
4088
4089 gcc_checking_assert (ipa_get_jf_pass_through_operation (jf)
4090 == NOP_EXPR || c == IPA_UNDESCRIBED_USE);
4091 c = combine_controlled_uses_counters (c, d);
4092 ipa_set_controlled_uses (new_root_info, src_idx, c);
4093 if (c == 0 && new_root_info->ipcp_orig_node)
4094 {
4095 struct cgraph_node *n;
4096 struct ipa_ref *ref;
4097 tree t = new_root_info->known_csts[src_idx];
4098
4099 if (t && TREE_CODE (t) == ADDR_EXPR
4100 && TREE_CODE (TREE_OPERAND (t, 0)) == FUNCTION_DECL
4101 && (n = cgraph_node::get (TREE_OPERAND (t, 0)))
4102 && (ref = new_root->find_reference (n, NULL, 0)))
4103 {
4104 if (dump_file)
4105 fprintf (dump_file, "ipa-prop: Removing cloning-created "
4106 "reference from %s to %s.\n",
4107 new_root->dump_name (),
4108 n->dump_name ());
4109 ref->remove_reference ();
4110 }
4111 }
4112 }
4113 else if (jf->type == IPA_JF_CONST
4114 && (rdesc = jfunc_rdesc_usable (jf)))
4115 {
4116 int d = ipa_get_controlled_uses (old_root_info, i);
4117 int c = rdesc->refcount;
4118 rdesc->refcount = combine_controlled_uses_counters (c, d);
4119 if (rdesc->refcount == 0)
4120 {
4121 tree cst = ipa_get_jf_constant (jf);
4122 struct cgraph_node *n;
4123 gcc_checking_assert (TREE_CODE (cst) == ADDR_EXPR
4124 && TREE_CODE (TREE_OPERAND (cst, 0))
4125 == FUNCTION_DECL);
4126 n = cgraph_node::get (TREE_OPERAND (cst, 0));
4127 if (n)
4128 {
4129 struct cgraph_node *clone;
4130 bool ok;
4131 ok = remove_described_reference (n, rdesc);
4132 gcc_checking_assert (ok);
4133
4134 clone = cs->caller;
4135 while (clone->inlined_to
4136 && clone->ipcp_clone
4137 && clone != rdesc->cs->caller)
4138 {
4139 struct ipa_ref *ref;
4140 ref = clone->find_reference (n, NULL, 0);
4141 if (ref)
4142 {
4143 if (dump_file)
4144 fprintf (dump_file, "ipa-prop: Removing "
4145 "cloning-created reference "
4146 "from %s to %s.\n",
4147 clone->dump_name (),
4148 n->dump_name ());
4149 ref->remove_reference ();
4150 }
4151 clone = clone->callers->caller;
4152 }
4153 }
4154 }
4155 }
4156 }
4157
4158 for (i = ipa_get_param_count (old_root_info);
4159 i < ipa_get_cs_argument_count (args);
4160 i++)
4161 {
4162 struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i);
4163
4164 if (jf->type == IPA_JF_CONST)
4165 {
4166 struct ipa_cst_ref_desc *rdesc = jfunc_rdesc_usable (jf);
4167 if (rdesc)
4168 rdesc->refcount = IPA_UNDESCRIBED_USE;
4169 }
4170 else if (jf->type == IPA_JF_PASS_THROUGH)
4171 ipa_set_controlled_uses (new_root_info,
4172 jf->value.pass_through.formal_id,
4173 IPA_UNDESCRIBED_USE);
4174 }
4175 }
4176
4177 /* Update jump functions and call note functions on inlining the call site CS.
4178 CS is expected to lead to a node already cloned by
4179 cgraph_clone_inline_nodes. Newly discovered indirect edges will be added to
4180 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were +
4181 created. */
4182
4183 bool
ipa_propagate_indirect_call_infos(struct cgraph_edge * cs,vec<cgraph_edge * > * new_edges)4184 ipa_propagate_indirect_call_infos (struct cgraph_edge *cs,
4185 vec<cgraph_edge *> *new_edges)
4186 {
4187 bool changed;
4188 /* Do nothing if the preparation phase has not been carried out yet
4189 (i.e. during early inlining). */
4190 if (!ipa_node_params_sum)
4191 return false;
4192 gcc_assert (ipa_edge_args_sum);
4193
4194 propagate_controlled_uses (cs);
4195 changed = propagate_info_to_inlined_callees (cs, cs->callee, new_edges);
4196 ipa_node_params_sum->remove (cs->callee);
4197
4198 class ipa_edge_args *args = IPA_EDGE_REF (cs);
4199 if (args)
4200 {
4201 bool ok = true;
4202 if (args->jump_functions)
4203 {
4204 struct ipa_jump_func *jf;
4205 int i;
4206 FOR_EACH_VEC_ELT (*args->jump_functions, i, jf)
4207 if (jf->type == IPA_JF_CONST
4208 && ipa_get_jf_constant_rdesc (jf))
4209 {
4210 ok = false;
4211 break;
4212 }
4213 }
4214 if (ok)
4215 ipa_edge_args_sum->remove (cs);
4216 }
4217 if (ipcp_transformation_sum)
4218 ipcp_transformation_sum->remove (cs->callee);
4219
4220 return changed;
4221 }
4222
4223 /* Ensure that array of edge arguments infos is big enough to accommodate a
4224 structure for all edges and reallocates it if not. Also, allocate
4225 associated hash tables is they do not already exist. */
4226
4227 void
ipa_check_create_edge_args(void)4228 ipa_check_create_edge_args (void)
4229 {
4230 if (!ipa_edge_args_sum)
4231 ipa_edge_args_sum
4232 = (new (ggc_alloc_no_dtor<ipa_edge_args_sum_t> ())
4233 ipa_edge_args_sum_t (symtab, true));
4234 if (!ipa_bits_hash_table)
4235 ipa_bits_hash_table = hash_table<ipa_bit_ggc_hash_traits>::create_ggc (37);
4236 if (!ipa_vr_hash_table)
4237 ipa_vr_hash_table = hash_table<ipa_vr_ggc_hash_traits>::create_ggc (37);
4238 }
4239
4240 /* Free all ipa_edge structures. */
4241
4242 void
ipa_free_all_edge_args(void)4243 ipa_free_all_edge_args (void)
4244 {
4245 if (!ipa_edge_args_sum)
4246 return;
4247
4248 ggc_delete (ipa_edge_args_sum);
4249 ipa_edge_args_sum = NULL;
4250 }
4251
4252 /* Free all ipa_node_params structures. */
4253
4254 void
ipa_free_all_node_params(void)4255 ipa_free_all_node_params (void)
4256 {
4257 if (ipa_node_params_sum)
4258 ggc_delete (ipa_node_params_sum);
4259 ipa_node_params_sum = NULL;
4260 }
4261
4262 /* Initialize IPA CP transformation summary and also allocate any necessary hash
4263 tables if they do not already exist. */
4264
4265 void
ipcp_transformation_initialize(void)4266 ipcp_transformation_initialize (void)
4267 {
4268 if (!ipa_bits_hash_table)
4269 ipa_bits_hash_table = hash_table<ipa_bit_ggc_hash_traits>::create_ggc (37);
4270 if (!ipa_vr_hash_table)
4271 ipa_vr_hash_table = hash_table<ipa_vr_ggc_hash_traits>::create_ggc (37);
4272 if (ipcp_transformation_sum == NULL)
4273 {
4274 ipcp_transformation_sum = ipcp_transformation_t::create_ggc (symtab);
4275 ipcp_transformation_sum->disable_insertion_hook ();
4276 }
4277 }
4278
4279 /* Release the IPA CP transformation summary. */
4280
4281 void
ipcp_free_transformation_sum(void)4282 ipcp_free_transformation_sum (void)
4283 {
4284 if (!ipcp_transformation_sum)
4285 return;
4286
4287 ipcp_transformation_sum->~function_summary<ipcp_transformation *> ();
4288 ggc_free (ipcp_transformation_sum);
4289 ipcp_transformation_sum = NULL;
4290 }
4291
4292 /* Set the aggregate replacements of NODE to be AGGVALS. */
4293
4294 void
ipa_set_node_agg_value_chain(struct cgraph_node * node,struct ipa_agg_replacement_value * aggvals)4295 ipa_set_node_agg_value_chain (struct cgraph_node *node,
4296 struct ipa_agg_replacement_value *aggvals)
4297 {
4298 ipcp_transformation_initialize ();
4299 ipcp_transformation *s = ipcp_transformation_sum->get_create (node);
4300 s->agg_values = aggvals;
4301 }
4302
4303 /* Hook that is called by cgraph.c when an edge is removed. Adjust reference
4304 count data structures accordingly. */
4305
4306 void
remove(cgraph_edge * cs,ipa_edge_args * args)4307 ipa_edge_args_sum_t::remove (cgraph_edge *cs, ipa_edge_args *args)
4308 {
4309 if (args->jump_functions)
4310 {
4311 struct ipa_jump_func *jf;
4312 int i;
4313 FOR_EACH_VEC_ELT (*args->jump_functions, i, jf)
4314 {
4315 struct ipa_cst_ref_desc *rdesc;
4316 try_decrement_rdesc_refcount (jf);
4317 if (jf->type == IPA_JF_CONST
4318 && (rdesc = ipa_get_jf_constant_rdesc (jf))
4319 && rdesc->cs == cs)
4320 rdesc->cs = NULL;
4321 }
4322 }
4323 }
4324
4325 /* Method invoked when an edge is duplicated. Copy ipa_edge_args and adjust
4326 reference count data strucutres accordingly. */
4327
4328 void
duplicate(cgraph_edge * src,cgraph_edge * dst,ipa_edge_args * old_args,ipa_edge_args * new_args)4329 ipa_edge_args_sum_t::duplicate (cgraph_edge *src, cgraph_edge *dst,
4330 ipa_edge_args *old_args, ipa_edge_args *new_args)
4331 {
4332 unsigned int i;
4333
4334 new_args->jump_functions = vec_safe_copy (old_args->jump_functions);
4335 if (old_args->polymorphic_call_contexts)
4336 new_args->polymorphic_call_contexts
4337 = vec_safe_copy (old_args->polymorphic_call_contexts);
4338
4339 for (i = 0; i < vec_safe_length (old_args->jump_functions); i++)
4340 {
4341 struct ipa_jump_func *src_jf = ipa_get_ith_jump_func (old_args, i);
4342 struct ipa_jump_func *dst_jf = ipa_get_ith_jump_func (new_args, i);
4343
4344 dst_jf->agg.items = vec_safe_copy (dst_jf->agg.items);
4345
4346 if (src_jf->type == IPA_JF_CONST)
4347 {
4348 struct ipa_cst_ref_desc *src_rdesc = jfunc_rdesc_usable (src_jf);
4349
4350 if (!src_rdesc)
4351 dst_jf->value.constant.rdesc = NULL;
4352 else if (src->caller == dst->caller)
4353 {
4354 struct ipa_ref *ref;
4355 symtab_node *n = cgraph_node_for_jfunc (src_jf);
4356 gcc_checking_assert (n);
4357 ref = src->caller->find_reference (n, src->call_stmt,
4358 src->lto_stmt_uid);
4359 gcc_checking_assert (ref);
4360 dst->caller->clone_reference (ref, ref->stmt);
4361
4362 struct ipa_cst_ref_desc *dst_rdesc = ipa_refdesc_pool.allocate ();
4363 dst_rdesc->cs = dst;
4364 dst_rdesc->refcount = src_rdesc->refcount;
4365 dst_rdesc->next_duplicate = NULL;
4366 dst_jf->value.constant.rdesc = dst_rdesc;
4367 }
4368 else if (src_rdesc->cs == src)
4369 {
4370 struct ipa_cst_ref_desc *dst_rdesc = ipa_refdesc_pool.allocate ();
4371 dst_rdesc->cs = dst;
4372 dst_rdesc->refcount = src_rdesc->refcount;
4373 dst_rdesc->next_duplicate = src_rdesc->next_duplicate;
4374 src_rdesc->next_duplicate = dst_rdesc;
4375 dst_jf->value.constant.rdesc = dst_rdesc;
4376 }
4377 else
4378 {
4379 struct ipa_cst_ref_desc *dst_rdesc;
4380 /* This can happen during inlining, when a JFUNC can refer to a
4381 reference taken in a function up in the tree of inline clones.
4382 We need to find the duplicate that refers to our tree of
4383 inline clones. */
4384
4385 gcc_assert (dst->caller->inlined_to);
4386 for (dst_rdesc = src_rdesc->next_duplicate;
4387 dst_rdesc;
4388 dst_rdesc = dst_rdesc->next_duplicate)
4389 {
4390 struct cgraph_node *top;
4391 top = dst_rdesc->cs->caller->inlined_to
4392 ? dst_rdesc->cs->caller->inlined_to
4393 : dst_rdesc->cs->caller;
4394 if (dst->caller->inlined_to == top)
4395 break;
4396 }
4397 gcc_assert (dst_rdesc);
4398 dst_jf->value.constant.rdesc = dst_rdesc;
4399 }
4400 }
4401 else if (dst_jf->type == IPA_JF_PASS_THROUGH
4402 && src->caller == dst->caller)
4403 {
4404 struct cgraph_node *inline_root = dst->caller->inlined_to
4405 ? dst->caller->inlined_to : dst->caller;
4406 class ipa_node_params *root_info = IPA_NODE_REF (inline_root);
4407 int idx = ipa_get_jf_pass_through_formal_id (dst_jf);
4408
4409 int c = ipa_get_controlled_uses (root_info, idx);
4410 if (c != IPA_UNDESCRIBED_USE)
4411 {
4412 c++;
4413 ipa_set_controlled_uses (root_info, idx, c);
4414 }
4415 }
4416 }
4417 }
4418
4419 /* Analyze newly added function into callgraph. */
4420
4421 static void
ipa_add_new_function(cgraph_node * node,void * data ATTRIBUTE_UNUSED)4422 ipa_add_new_function (cgraph_node *node, void *data ATTRIBUTE_UNUSED)
4423 {
4424 if (node->has_gimple_body_p ())
4425 ipa_analyze_node (node);
4426 }
4427
4428 /* Hook that is called by summary when a node is duplicated. */
4429
4430 void
duplicate(cgraph_node * src,cgraph_node * dst,ipa_node_params * old_info,ipa_node_params * new_info)4431 ipa_node_params_t::duplicate(cgraph_node *src, cgraph_node *dst,
4432 ipa_node_params *old_info,
4433 ipa_node_params *new_info)
4434 {
4435 ipa_agg_replacement_value *old_av, *new_av;
4436
4437 new_info->descriptors = vec_safe_copy (old_info->descriptors);
4438 new_info->lattices = NULL;
4439 new_info->ipcp_orig_node = old_info->ipcp_orig_node;
4440 new_info->known_csts = old_info->known_csts.copy ();
4441 new_info->known_contexts = old_info->known_contexts.copy ();
4442
4443 new_info->analysis_done = old_info->analysis_done;
4444 new_info->node_enqueued = old_info->node_enqueued;
4445 new_info->versionable = old_info->versionable;
4446
4447 old_av = ipa_get_agg_replacements_for_node (src);
4448 if (old_av)
4449 {
4450 new_av = NULL;
4451 while (old_av)
4452 {
4453 struct ipa_agg_replacement_value *v;
4454
4455 v = ggc_alloc<ipa_agg_replacement_value> ();
4456 memcpy (v, old_av, sizeof (*v));
4457 v->next = new_av;
4458 new_av = v;
4459 old_av = old_av->next;
4460 }
4461 ipa_set_node_agg_value_chain (dst, new_av);
4462 }
4463 }
4464
4465 /* Duplication of ipcp transformation summaries. */
4466
4467 void
duplicate(cgraph_node *,cgraph_node * dst,ipcp_transformation * src_trans,ipcp_transformation * dst_trans)4468 ipcp_transformation_t::duplicate(cgraph_node *, cgraph_node *dst,
4469 ipcp_transformation *src_trans,
4470 ipcp_transformation *dst_trans)
4471 {
4472 /* Avoid redundant work of duplicating vectors we will never use. */
4473 if (dst->inlined_to)
4474 return;
4475 dst_trans->bits = vec_safe_copy (src_trans->bits);
4476 dst_trans->m_vr = vec_safe_copy (src_trans->m_vr);
4477 ipa_agg_replacement_value *agg = src_trans->agg_values,
4478 **aggptr = &dst_trans->agg_values;
4479 while (agg)
4480 {
4481 *aggptr = ggc_alloc<ipa_agg_replacement_value> ();
4482 **aggptr = *agg;
4483 agg = agg->next;
4484 aggptr = &(*aggptr)->next;
4485 }
4486 }
4487
4488 /* Register our cgraph hooks if they are not already there. */
4489
4490 void
ipa_register_cgraph_hooks(void)4491 ipa_register_cgraph_hooks (void)
4492 {
4493 ipa_check_create_node_params ();
4494 ipa_check_create_edge_args ();
4495
4496 function_insertion_hook_holder =
4497 symtab->add_cgraph_insertion_hook (&ipa_add_new_function, NULL);
4498 }
4499
4500 /* Unregister our cgraph hooks if they are not already there. */
4501
4502 static void
ipa_unregister_cgraph_hooks(void)4503 ipa_unregister_cgraph_hooks (void)
4504 {
4505 if (function_insertion_hook_holder)
4506 symtab->remove_cgraph_insertion_hook (function_insertion_hook_holder);
4507 function_insertion_hook_holder = NULL;
4508 }
4509
4510 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
4511 longer needed after ipa-cp. */
4512
4513 void
ipa_free_all_structures_after_ipa_cp(void)4514 ipa_free_all_structures_after_ipa_cp (void)
4515 {
4516 if (!optimize && !in_lto_p)
4517 {
4518 ipa_free_all_edge_args ();
4519 ipa_free_all_node_params ();
4520 ipcp_sources_pool.release ();
4521 ipcp_cst_values_pool.release ();
4522 ipcp_poly_ctx_values_pool.release ();
4523 ipcp_agg_lattice_pool.release ();
4524 ipa_unregister_cgraph_hooks ();
4525 ipa_refdesc_pool.release ();
4526 }
4527 }
4528
4529 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
4530 longer needed after indirect inlining. */
4531
4532 void
ipa_free_all_structures_after_iinln(void)4533 ipa_free_all_structures_after_iinln (void)
4534 {
4535 ipa_free_all_edge_args ();
4536 ipa_free_all_node_params ();
4537 ipa_unregister_cgraph_hooks ();
4538 ipcp_sources_pool.release ();
4539 ipcp_cst_values_pool.release ();
4540 ipcp_poly_ctx_values_pool.release ();
4541 ipcp_agg_lattice_pool.release ();
4542 ipa_refdesc_pool.release ();
4543 }
4544
4545 /* Print ipa_tree_map data structures of all functions in the
4546 callgraph to F. */
4547
4548 void
ipa_print_node_params(FILE * f,struct cgraph_node * node)4549 ipa_print_node_params (FILE *f, struct cgraph_node *node)
4550 {
4551 int i, count;
4552 class ipa_node_params *info;
4553
4554 if (!node->definition)
4555 return;
4556 info = IPA_NODE_REF (node);
4557 fprintf (f, " function %s parameter descriptors:\n", node->dump_name ());
4558 if (!info)
4559 {
4560 fprintf (f, " no params return\n");
4561 return;
4562 }
4563 count = ipa_get_param_count (info);
4564 for (i = 0; i < count; i++)
4565 {
4566 int c;
4567
4568 fprintf (f, " ");
4569 ipa_dump_param (f, info, i);
4570 if (ipa_is_param_used (info, i))
4571 fprintf (f, " used");
4572 if (ipa_is_param_used_by_ipa_predicates (info, i))
4573 fprintf (f, " used_by_ipa_predicates");
4574 if (ipa_is_param_used_by_indirect_call (info, i))
4575 fprintf (f, " used_by_indirect_call");
4576 if (ipa_is_param_used_by_polymorphic_call (info, i))
4577 fprintf (f, " used_by_polymorphic_call");
4578 c = ipa_get_controlled_uses (info, i);
4579 if (c == IPA_UNDESCRIBED_USE)
4580 fprintf (f, " undescribed_use");
4581 else
4582 fprintf (f, " controlled_uses=%i", c);
4583 fprintf (f, "\n");
4584 }
4585 }
4586
4587 /* Print ipa_tree_map data structures of all functions in the
4588 callgraph to F. */
4589
4590 void
ipa_print_all_params(FILE * f)4591 ipa_print_all_params (FILE * f)
4592 {
4593 struct cgraph_node *node;
4594
4595 fprintf (f, "\nFunction parameters:\n");
4596 FOR_EACH_FUNCTION (node)
4597 ipa_print_node_params (f, node);
4598 }
4599
4600 /* Dump the AV linked list. */
4601
4602 void
ipa_dump_agg_replacement_values(FILE * f,struct ipa_agg_replacement_value * av)4603 ipa_dump_agg_replacement_values (FILE *f, struct ipa_agg_replacement_value *av)
4604 {
4605 bool comma = false;
4606 fprintf (f, " Aggregate replacements:");
4607 for (; av; av = av->next)
4608 {
4609 fprintf (f, "%s %i[" HOST_WIDE_INT_PRINT_DEC "]=", comma ? "," : "",
4610 av->index, av->offset);
4611 print_generic_expr (f, av->value);
4612 comma = true;
4613 }
4614 fprintf (f, "\n");
4615 }
4616
4617 /* Stream out jump function JUMP_FUNC to OB. */
4618
4619 static void
ipa_write_jump_function(struct output_block * ob,struct ipa_jump_func * jump_func)4620 ipa_write_jump_function (struct output_block *ob,
4621 struct ipa_jump_func *jump_func)
4622 {
4623 struct ipa_agg_jf_item *item;
4624 struct bitpack_d bp;
4625 int i, count;
4626 int flag = 0;
4627
4628 /* ADDR_EXPRs are very comon IP invariants; save some streamer data
4629 as well as WPA memory by handling them specially. */
4630 if (jump_func->type == IPA_JF_CONST
4631 && TREE_CODE (jump_func->value.constant.value) == ADDR_EXPR)
4632 flag = 1;
4633
4634 streamer_write_uhwi (ob, jump_func->type * 2 + flag);
4635 switch (jump_func->type)
4636 {
4637 case IPA_JF_UNKNOWN:
4638 break;
4639 case IPA_JF_CONST:
4640 gcc_assert (
4641 EXPR_LOCATION (jump_func->value.constant.value) == UNKNOWN_LOCATION);
4642 stream_write_tree (ob,
4643 flag
4644 ? TREE_OPERAND (jump_func->value.constant.value, 0)
4645 : jump_func->value.constant.value, true);
4646 break;
4647 case IPA_JF_PASS_THROUGH:
4648 streamer_write_uhwi (ob, jump_func->value.pass_through.operation);
4649 if (jump_func->value.pass_through.operation == NOP_EXPR)
4650 {
4651 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4652 bp = bitpack_create (ob->main_stream);
4653 bp_pack_value (&bp, jump_func->value.pass_through.agg_preserved, 1);
4654 streamer_write_bitpack (&bp);
4655 }
4656 else if (TREE_CODE_CLASS (jump_func->value.pass_through.operation)
4657 == tcc_unary)
4658 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4659 else
4660 {
4661 stream_write_tree (ob, jump_func->value.pass_through.operand, true);
4662 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4663 }
4664 break;
4665 case IPA_JF_ANCESTOR:
4666 streamer_write_uhwi (ob, jump_func->value.ancestor.offset);
4667 streamer_write_uhwi (ob, jump_func->value.ancestor.formal_id);
4668 bp = bitpack_create (ob->main_stream);
4669 bp_pack_value (&bp, jump_func->value.ancestor.agg_preserved, 1);
4670 streamer_write_bitpack (&bp);
4671 break;
4672 default:
4673 fatal_error (UNKNOWN_LOCATION, "invalid jump function in LTO stream");
4674 }
4675
4676 count = vec_safe_length (jump_func->agg.items);
4677 streamer_write_uhwi (ob, count);
4678 if (count)
4679 {
4680 bp = bitpack_create (ob->main_stream);
4681 bp_pack_value (&bp, jump_func->agg.by_ref, 1);
4682 streamer_write_bitpack (&bp);
4683 }
4684
4685 FOR_EACH_VEC_SAFE_ELT (jump_func->agg.items, i, item)
4686 {
4687 stream_write_tree (ob, item->type, true);
4688 streamer_write_uhwi (ob, item->offset);
4689 streamer_write_uhwi (ob, item->jftype);
4690 switch (item->jftype)
4691 {
4692 case IPA_JF_UNKNOWN:
4693 break;
4694 case IPA_JF_CONST:
4695 stream_write_tree (ob, item->value.constant, true);
4696 break;
4697 case IPA_JF_PASS_THROUGH:
4698 case IPA_JF_LOAD_AGG:
4699 streamer_write_uhwi (ob, item->value.pass_through.operation);
4700 streamer_write_uhwi (ob, item->value.pass_through.formal_id);
4701 if (TREE_CODE_CLASS (item->value.pass_through.operation)
4702 != tcc_unary)
4703 stream_write_tree (ob, item->value.pass_through.operand, true);
4704 if (item->jftype == IPA_JF_LOAD_AGG)
4705 {
4706 stream_write_tree (ob, item->value.load_agg.type, true);
4707 streamer_write_uhwi (ob, item->value.load_agg.offset);
4708 bp = bitpack_create (ob->main_stream);
4709 bp_pack_value (&bp, item->value.load_agg.by_ref, 1);
4710 streamer_write_bitpack (&bp);
4711 }
4712 break;
4713 default:
4714 fatal_error (UNKNOWN_LOCATION,
4715 "invalid jump function in LTO stream");
4716 }
4717 }
4718
4719 bp = bitpack_create (ob->main_stream);
4720 bp_pack_value (&bp, !!jump_func->bits, 1);
4721 streamer_write_bitpack (&bp);
4722 if (jump_func->bits)
4723 {
4724 streamer_write_widest_int (ob, jump_func->bits->value);
4725 streamer_write_widest_int (ob, jump_func->bits->mask);
4726 }
4727 bp_pack_value (&bp, !!jump_func->m_vr, 1);
4728 streamer_write_bitpack (&bp);
4729 if (jump_func->m_vr)
4730 {
4731 streamer_write_enum (ob->main_stream, value_rang_type,
4732 VR_LAST, jump_func->m_vr->kind ());
4733 stream_write_tree (ob, jump_func->m_vr->min (), true);
4734 stream_write_tree (ob, jump_func->m_vr->max (), true);
4735 }
4736 }
4737
4738 /* Read in jump function JUMP_FUNC from IB. */
4739
4740 static void
ipa_read_jump_function(class lto_input_block * ib,struct ipa_jump_func * jump_func,struct cgraph_edge * cs,class data_in * data_in,bool prevails)4741 ipa_read_jump_function (class lto_input_block *ib,
4742 struct ipa_jump_func *jump_func,
4743 struct cgraph_edge *cs,
4744 class data_in *data_in,
4745 bool prevails)
4746 {
4747 enum jump_func_type jftype;
4748 enum tree_code operation;
4749 int i, count;
4750 int val = streamer_read_uhwi (ib);
4751 bool flag = val & 1;
4752
4753 jftype = (enum jump_func_type) (val / 2);
4754 switch (jftype)
4755 {
4756 case IPA_JF_UNKNOWN:
4757 ipa_set_jf_unknown (jump_func);
4758 break;
4759 case IPA_JF_CONST:
4760 {
4761 tree t = stream_read_tree (ib, data_in);
4762 if (flag && prevails)
4763 t = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (t)), t);
4764 ipa_set_jf_constant (jump_func, t, cs);
4765 }
4766 break;
4767 case IPA_JF_PASS_THROUGH:
4768 operation = (enum tree_code) streamer_read_uhwi (ib);
4769 if (operation == NOP_EXPR)
4770 {
4771 int formal_id = streamer_read_uhwi (ib);
4772 struct bitpack_d bp = streamer_read_bitpack (ib);
4773 bool agg_preserved = bp_unpack_value (&bp, 1);
4774 ipa_set_jf_simple_pass_through (jump_func, formal_id, agg_preserved);
4775 }
4776 else if (TREE_CODE_CLASS (operation) == tcc_unary)
4777 {
4778 int formal_id = streamer_read_uhwi (ib);
4779 ipa_set_jf_unary_pass_through (jump_func, formal_id, operation);
4780 }
4781 else
4782 {
4783 tree operand = stream_read_tree (ib, data_in);
4784 int formal_id = streamer_read_uhwi (ib);
4785 ipa_set_jf_arith_pass_through (jump_func, formal_id, operand,
4786 operation);
4787 }
4788 break;
4789 case IPA_JF_ANCESTOR:
4790 {
4791 HOST_WIDE_INT offset = streamer_read_uhwi (ib);
4792 int formal_id = streamer_read_uhwi (ib);
4793 struct bitpack_d bp = streamer_read_bitpack (ib);
4794 bool agg_preserved = bp_unpack_value (&bp, 1);
4795 ipa_set_ancestor_jf (jump_func, offset, formal_id, agg_preserved);
4796 break;
4797 }
4798 default:
4799 fatal_error (UNKNOWN_LOCATION, "invalid jump function in LTO stream");
4800 }
4801
4802 count = streamer_read_uhwi (ib);
4803 if (prevails)
4804 {
4805 jump_func->agg.items = NULL;
4806 vec_safe_reserve (jump_func->agg.items, count, true);
4807 }
4808 if (count)
4809 {
4810 struct bitpack_d bp = streamer_read_bitpack (ib);
4811 jump_func->agg.by_ref = bp_unpack_value (&bp, 1);
4812 }
4813 for (i = 0; i < count; i++)
4814 {
4815 struct ipa_agg_jf_item item;
4816 item.type = stream_read_tree (ib, data_in);
4817 item.offset = streamer_read_uhwi (ib);
4818 item.jftype = (enum jump_func_type) streamer_read_uhwi (ib);
4819
4820 switch (item.jftype)
4821 {
4822 case IPA_JF_UNKNOWN:
4823 break;
4824 case IPA_JF_CONST:
4825 item.value.constant = stream_read_tree (ib, data_in);
4826 break;
4827 case IPA_JF_PASS_THROUGH:
4828 case IPA_JF_LOAD_AGG:
4829 operation = (enum tree_code) streamer_read_uhwi (ib);
4830 item.value.pass_through.operation = operation;
4831 item.value.pass_through.formal_id = streamer_read_uhwi (ib);
4832 if (TREE_CODE_CLASS (operation) == tcc_unary)
4833 item.value.pass_through.operand = NULL_TREE;
4834 else
4835 item.value.pass_through.operand = stream_read_tree (ib, data_in);
4836 if (item.jftype == IPA_JF_LOAD_AGG)
4837 {
4838 struct bitpack_d bp;
4839 item.value.load_agg.type = stream_read_tree (ib, data_in);
4840 item.value.load_agg.offset = streamer_read_uhwi (ib);
4841 bp = streamer_read_bitpack (ib);
4842 item.value.load_agg.by_ref = bp_unpack_value (&bp, 1);
4843 }
4844 break;
4845 default:
4846 fatal_error (UNKNOWN_LOCATION,
4847 "invalid jump function in LTO stream");
4848 }
4849 if (prevails)
4850 jump_func->agg.items->quick_push (item);
4851 }
4852
4853 struct bitpack_d bp = streamer_read_bitpack (ib);
4854 bool bits_known = bp_unpack_value (&bp, 1);
4855 if (bits_known)
4856 {
4857 widest_int value = streamer_read_widest_int (ib);
4858 widest_int mask = streamer_read_widest_int (ib);
4859 if (prevails)
4860 ipa_set_jfunc_bits (jump_func, value, mask);
4861 }
4862 else
4863 jump_func->bits = NULL;
4864
4865 struct bitpack_d vr_bp = streamer_read_bitpack (ib);
4866 bool vr_known = bp_unpack_value (&vr_bp, 1);
4867 if (vr_known)
4868 {
4869 enum value_range_kind type = streamer_read_enum (ib, value_range_kind,
4870 VR_LAST);
4871 tree min = stream_read_tree (ib, data_in);
4872 tree max = stream_read_tree (ib, data_in);
4873 if (prevails)
4874 ipa_set_jfunc_vr (jump_func, type, min, max);
4875 }
4876 else
4877 jump_func->m_vr = NULL;
4878 }
4879
4880 /* Stream out parts of cgraph_indirect_call_info corresponding to CS that are
4881 relevant to indirect inlining to OB. */
4882
4883 static void
ipa_write_indirect_edge_info(struct output_block * ob,struct cgraph_edge * cs)4884 ipa_write_indirect_edge_info (struct output_block *ob,
4885 struct cgraph_edge *cs)
4886 {
4887 class cgraph_indirect_call_info *ii = cs->indirect_info;
4888 struct bitpack_d bp;
4889
4890 streamer_write_hwi (ob, ii->param_index);
4891 bp = bitpack_create (ob->main_stream);
4892 bp_pack_value (&bp, ii->polymorphic, 1);
4893 bp_pack_value (&bp, ii->agg_contents, 1);
4894 bp_pack_value (&bp, ii->member_ptr, 1);
4895 bp_pack_value (&bp, ii->by_ref, 1);
4896 bp_pack_value (&bp, ii->guaranteed_unmodified, 1);
4897 bp_pack_value (&bp, ii->vptr_changed, 1);
4898 streamer_write_bitpack (&bp);
4899 if (ii->agg_contents || ii->polymorphic)
4900 streamer_write_hwi (ob, ii->offset);
4901 else
4902 gcc_assert (ii->offset == 0);
4903
4904 if (ii->polymorphic)
4905 {
4906 streamer_write_hwi (ob, ii->otr_token);
4907 stream_write_tree (ob, ii->otr_type, true);
4908 ii->context.stream_out (ob);
4909 }
4910 }
4911
4912 /* Read in parts of cgraph_indirect_call_info corresponding to CS that are
4913 relevant to indirect inlining from IB. */
4914
4915 static void
ipa_read_indirect_edge_info(class lto_input_block * ib,class data_in * data_in,struct cgraph_edge * cs,class ipa_node_params * info)4916 ipa_read_indirect_edge_info (class lto_input_block *ib,
4917 class data_in *data_in,
4918 struct cgraph_edge *cs,
4919 class ipa_node_params *info)
4920 {
4921 class cgraph_indirect_call_info *ii = cs->indirect_info;
4922 struct bitpack_d bp;
4923
4924 ii->param_index = (int) streamer_read_hwi (ib);
4925 bp = streamer_read_bitpack (ib);
4926 ii->polymorphic = bp_unpack_value (&bp, 1);
4927 ii->agg_contents = bp_unpack_value (&bp, 1);
4928 ii->member_ptr = bp_unpack_value (&bp, 1);
4929 ii->by_ref = bp_unpack_value (&bp, 1);
4930 ii->guaranteed_unmodified = bp_unpack_value (&bp, 1);
4931 ii->vptr_changed = bp_unpack_value (&bp, 1);
4932 if (ii->agg_contents || ii->polymorphic)
4933 ii->offset = (HOST_WIDE_INT) streamer_read_hwi (ib);
4934 else
4935 ii->offset = 0;
4936 if (ii->polymorphic)
4937 {
4938 ii->otr_token = (HOST_WIDE_INT) streamer_read_hwi (ib);
4939 ii->otr_type = stream_read_tree (ib, data_in);
4940 ii->context.stream_in (ib, data_in);
4941 }
4942 if (info && ii->param_index >= 0)
4943 {
4944 if (ii->polymorphic)
4945 ipa_set_param_used_by_polymorphic_call (info,
4946 ii->param_index , true);
4947 ipa_set_param_used_by_indirect_call (info,
4948 ii->param_index, true);
4949 }
4950 }
4951
4952 /* Stream out NODE info to OB. */
4953
4954 static void
ipa_write_node_info(struct output_block * ob,struct cgraph_node * node)4955 ipa_write_node_info (struct output_block *ob, struct cgraph_node *node)
4956 {
4957 int node_ref;
4958 lto_symtab_encoder_t encoder;
4959 class ipa_node_params *info = IPA_NODE_REF (node);
4960 int j;
4961 struct cgraph_edge *e;
4962 struct bitpack_d bp;
4963
4964 encoder = ob->decl_state->symtab_node_encoder;
4965 node_ref = lto_symtab_encoder_encode (encoder, node);
4966 streamer_write_uhwi (ob, node_ref);
4967
4968 streamer_write_uhwi (ob, ipa_get_param_count (info));
4969 for (j = 0; j < ipa_get_param_count (info); j++)
4970 streamer_write_uhwi (ob, ipa_get_param_move_cost (info, j));
4971 bp = bitpack_create (ob->main_stream);
4972 gcc_assert (info->analysis_done
4973 || ipa_get_param_count (info) == 0);
4974 gcc_assert (!info->node_enqueued);
4975 gcc_assert (!info->ipcp_orig_node);
4976 for (j = 0; j < ipa_get_param_count (info); j++)
4977 bp_pack_value (&bp, ipa_is_param_used (info, j), 1);
4978 streamer_write_bitpack (&bp);
4979 for (j = 0; j < ipa_get_param_count (info); j++)
4980 {
4981 streamer_write_hwi (ob, ipa_get_controlled_uses (info, j));
4982 stream_write_tree (ob, ipa_get_type (info, j), true);
4983 }
4984 for (e = node->callees; e; e = e->next_callee)
4985 {
4986 class ipa_edge_args *args = IPA_EDGE_REF (e);
4987
4988 if (!args)
4989 {
4990 streamer_write_uhwi (ob, 0);
4991 continue;
4992 }
4993
4994 streamer_write_uhwi (ob,
4995 ipa_get_cs_argument_count (args) * 2
4996 + (args->polymorphic_call_contexts != NULL));
4997 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
4998 {
4999 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
5000 if (args->polymorphic_call_contexts != NULL)
5001 ipa_get_ith_polymorhic_call_context (args, j)->stream_out (ob);
5002 }
5003 }
5004 for (e = node->indirect_calls; e; e = e->next_callee)
5005 {
5006 class ipa_edge_args *args = IPA_EDGE_REF (e);
5007 if (!args)
5008 streamer_write_uhwi (ob, 0);
5009 else
5010 {
5011 streamer_write_uhwi (ob,
5012 ipa_get_cs_argument_count (args) * 2
5013 + (args->polymorphic_call_contexts != NULL));
5014 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
5015 {
5016 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
5017 if (args->polymorphic_call_contexts != NULL)
5018 ipa_get_ith_polymorhic_call_context (args, j)->stream_out (ob);
5019 }
5020 }
5021 ipa_write_indirect_edge_info (ob, e);
5022 }
5023 }
5024
5025 /* Stream in edge E from IB. */
5026
5027 static void
ipa_read_edge_info(class lto_input_block * ib,class data_in * data_in,struct cgraph_edge * e,bool prevails)5028 ipa_read_edge_info (class lto_input_block *ib,
5029 class data_in *data_in,
5030 struct cgraph_edge *e, bool prevails)
5031 {
5032 int count = streamer_read_uhwi (ib);
5033 bool contexts_computed = count & 1;
5034
5035 count /= 2;
5036 if (!count)
5037 return;
5038 if (prevails
5039 && (e->possibly_call_in_translation_unit_p ()
5040 /* Also stream in jump functions to builtins in hope that they
5041 will get fnspecs. */
5042 || fndecl_built_in_p (e->callee->decl, BUILT_IN_NORMAL)))
5043 {
5044 class ipa_edge_args *args = IPA_EDGE_REF_GET_CREATE (e);
5045 vec_safe_grow_cleared (args->jump_functions, count, true);
5046 if (contexts_computed)
5047 vec_safe_grow_cleared (args->polymorphic_call_contexts, count, true);
5048 for (int k = 0; k < count; k++)
5049 {
5050 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), e,
5051 data_in, prevails);
5052 if (contexts_computed)
5053 ipa_get_ith_polymorhic_call_context (args, k)->stream_in
5054 (ib, data_in);
5055 }
5056 }
5057 else
5058 {
5059 for (int k = 0; k < count; k++)
5060 {
5061 struct ipa_jump_func dummy;
5062 ipa_read_jump_function (ib, &dummy, e,
5063 data_in, prevails);
5064 if (contexts_computed)
5065 {
5066 class ipa_polymorphic_call_context ctx;
5067 ctx.stream_in (ib, data_in);
5068 }
5069 }
5070 }
5071 }
5072
5073 /* Stream in NODE info from IB. */
5074
5075 static void
ipa_read_node_info(class lto_input_block * ib,struct cgraph_node * node,class data_in * data_in)5076 ipa_read_node_info (class lto_input_block *ib, struct cgraph_node *node,
5077 class data_in *data_in)
5078 {
5079 int k;
5080 struct cgraph_edge *e;
5081 struct bitpack_d bp;
5082 bool prevails = node->prevailing_p ();
5083 class ipa_node_params *info = prevails
5084 ? IPA_NODE_REF_GET_CREATE (node) : NULL;
5085
5086 int param_count = streamer_read_uhwi (ib);
5087 if (prevails)
5088 {
5089 ipa_alloc_node_params (node, param_count);
5090 for (k = 0; k < param_count; k++)
5091 (*info->descriptors)[k].move_cost = streamer_read_uhwi (ib);
5092 if (ipa_get_param_count (info) != 0)
5093 info->analysis_done = true;
5094 info->node_enqueued = false;
5095 }
5096 else
5097 for (k = 0; k < param_count; k++)
5098 streamer_read_uhwi (ib);
5099
5100 bp = streamer_read_bitpack (ib);
5101 for (k = 0; k < param_count; k++)
5102 {
5103 bool used = bp_unpack_value (&bp, 1);
5104
5105 if (prevails)
5106 ipa_set_param_used (info, k, used);
5107 }
5108 for (k = 0; k < param_count; k++)
5109 {
5110 int nuses = streamer_read_hwi (ib);
5111 tree type = stream_read_tree (ib, data_in);
5112
5113 if (prevails)
5114 {
5115 ipa_set_controlled_uses (info, k, nuses);
5116 (*info->descriptors)[k].decl_or_type = type;
5117 }
5118 }
5119 for (e = node->callees; e; e = e->next_callee)
5120 ipa_read_edge_info (ib, data_in, e, prevails);
5121 for (e = node->indirect_calls; e; e = e->next_callee)
5122 {
5123 ipa_read_edge_info (ib, data_in, e, prevails);
5124 ipa_read_indirect_edge_info (ib, data_in, e, info);
5125 }
5126 }
5127
5128 /* Write jump functions for nodes in SET. */
5129
5130 void
ipa_prop_write_jump_functions(void)5131 ipa_prop_write_jump_functions (void)
5132 {
5133 struct cgraph_node *node;
5134 struct output_block *ob;
5135 unsigned int count = 0;
5136 lto_symtab_encoder_iterator lsei;
5137 lto_symtab_encoder_t encoder;
5138
5139 if (!ipa_node_params_sum || !ipa_edge_args_sum)
5140 return;
5141
5142 ob = create_output_block (LTO_section_jump_functions);
5143 encoder = ob->decl_state->symtab_node_encoder;
5144 ob->symbol = NULL;
5145 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
5146 lsei_next_function_in_partition (&lsei))
5147 {
5148 node = lsei_cgraph_node (lsei);
5149 if (node->has_gimple_body_p ()
5150 && IPA_NODE_REF (node) != NULL)
5151 count++;
5152 }
5153
5154 streamer_write_uhwi (ob, count);
5155
5156 /* Process all of the functions. */
5157 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
5158 lsei_next_function_in_partition (&lsei))
5159 {
5160 node = lsei_cgraph_node (lsei);
5161 if (node->has_gimple_body_p ()
5162 && IPA_NODE_REF (node) != NULL)
5163 ipa_write_node_info (ob, node);
5164 }
5165 streamer_write_char_stream (ob->main_stream, 0);
5166 produce_asm (ob, NULL);
5167 destroy_output_block (ob);
5168 }
5169
5170 /* Read section in file FILE_DATA of length LEN with data DATA. */
5171
5172 static void
ipa_prop_read_section(struct lto_file_decl_data * file_data,const char * data,size_t len)5173 ipa_prop_read_section (struct lto_file_decl_data *file_data, const char *data,
5174 size_t len)
5175 {
5176 const struct lto_function_header *header =
5177 (const struct lto_function_header *) data;
5178 const int cfg_offset = sizeof (struct lto_function_header);
5179 const int main_offset = cfg_offset + header->cfg_size;
5180 const int string_offset = main_offset + header->main_size;
5181 class data_in *data_in;
5182 unsigned int i;
5183 unsigned int count;
5184
5185 lto_input_block ib_main ((const char *) data + main_offset,
5186 header->main_size, file_data->mode_table);
5187
5188 data_in =
5189 lto_data_in_create (file_data, (const char *) data + string_offset,
5190 header->string_size, vNULL);
5191 count = streamer_read_uhwi (&ib_main);
5192
5193 for (i = 0; i < count; i++)
5194 {
5195 unsigned int index;
5196 struct cgraph_node *node;
5197 lto_symtab_encoder_t encoder;
5198
5199 index = streamer_read_uhwi (&ib_main);
5200 encoder = file_data->symtab_node_encoder;
5201 node = dyn_cast<cgraph_node *> (lto_symtab_encoder_deref (encoder,
5202 index));
5203 gcc_assert (node->definition);
5204 ipa_read_node_info (&ib_main, node, data_in);
5205 }
5206 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
5207 len);
5208 lto_data_in_delete (data_in);
5209 }
5210
5211 /* Read ipcp jump functions. */
5212
5213 void
ipa_prop_read_jump_functions(void)5214 ipa_prop_read_jump_functions (void)
5215 {
5216 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
5217 struct lto_file_decl_data *file_data;
5218 unsigned int j = 0;
5219
5220 ipa_check_create_node_params ();
5221 ipa_check_create_edge_args ();
5222 ipa_register_cgraph_hooks ();
5223
5224 while ((file_data = file_data_vec[j++]))
5225 {
5226 size_t len;
5227 const char *data
5228 = lto_get_summary_section_data (file_data, LTO_section_jump_functions,
5229 &len);
5230 if (data)
5231 ipa_prop_read_section (file_data, data, len);
5232 }
5233 }
5234
5235 void
write_ipcp_transformation_info(output_block * ob,cgraph_node * node)5236 write_ipcp_transformation_info (output_block *ob, cgraph_node *node)
5237 {
5238 int node_ref;
5239 unsigned int count = 0;
5240 lto_symtab_encoder_t encoder;
5241 struct ipa_agg_replacement_value *aggvals, *av;
5242
5243 aggvals = ipa_get_agg_replacements_for_node (node);
5244 encoder = ob->decl_state->symtab_node_encoder;
5245 node_ref = lto_symtab_encoder_encode (encoder, node);
5246 streamer_write_uhwi (ob, node_ref);
5247
5248 for (av = aggvals; av; av = av->next)
5249 count++;
5250 streamer_write_uhwi (ob, count);
5251
5252 for (av = aggvals; av; av = av->next)
5253 {
5254 struct bitpack_d bp;
5255
5256 streamer_write_uhwi (ob, av->offset);
5257 streamer_write_uhwi (ob, av->index);
5258 stream_write_tree (ob, av->value, true);
5259
5260 bp = bitpack_create (ob->main_stream);
5261 bp_pack_value (&bp, av->by_ref, 1);
5262 streamer_write_bitpack (&bp);
5263 }
5264
5265 ipcp_transformation *ts = ipcp_get_transformation_summary (node);
5266 if (ts && vec_safe_length (ts->m_vr) > 0)
5267 {
5268 count = ts->m_vr->length ();
5269 streamer_write_uhwi (ob, count);
5270 for (unsigned i = 0; i < count; ++i)
5271 {
5272 struct bitpack_d bp;
5273 ipa_vr *parm_vr = &(*ts->m_vr)[i];
5274 bp = bitpack_create (ob->main_stream);
5275 bp_pack_value (&bp, parm_vr->known, 1);
5276 streamer_write_bitpack (&bp);
5277 if (parm_vr->known)
5278 {
5279 streamer_write_enum (ob->main_stream, value_rang_type,
5280 VR_LAST, parm_vr->type);
5281 streamer_write_wide_int (ob, parm_vr->min);
5282 streamer_write_wide_int (ob, parm_vr->max);
5283 }
5284 }
5285 }
5286 else
5287 streamer_write_uhwi (ob, 0);
5288
5289 if (ts && vec_safe_length (ts->bits) > 0)
5290 {
5291 count = ts->bits->length ();
5292 streamer_write_uhwi (ob, count);
5293
5294 for (unsigned i = 0; i < count; ++i)
5295 {
5296 const ipa_bits *bits_jfunc = (*ts->bits)[i];
5297 struct bitpack_d bp = bitpack_create (ob->main_stream);
5298 bp_pack_value (&bp, !!bits_jfunc, 1);
5299 streamer_write_bitpack (&bp);
5300 if (bits_jfunc)
5301 {
5302 streamer_write_widest_int (ob, bits_jfunc->value);
5303 streamer_write_widest_int (ob, bits_jfunc->mask);
5304 }
5305 }
5306 }
5307 else
5308 streamer_write_uhwi (ob, 0);
5309 }
5310
5311 /* Stream in the aggregate value replacement chain for NODE from IB. */
5312
5313 static void
read_ipcp_transformation_info(lto_input_block * ib,cgraph_node * node,data_in * data_in)5314 read_ipcp_transformation_info (lto_input_block *ib, cgraph_node *node,
5315 data_in *data_in)
5316 {
5317 struct ipa_agg_replacement_value *aggvals = NULL;
5318 unsigned int count, i;
5319
5320 count = streamer_read_uhwi (ib);
5321 for (i = 0; i <count; i++)
5322 {
5323 struct ipa_agg_replacement_value *av;
5324 struct bitpack_d bp;
5325
5326 av = ggc_alloc<ipa_agg_replacement_value> ();
5327 av->offset = streamer_read_uhwi (ib);
5328 av->index = streamer_read_uhwi (ib);
5329 av->value = stream_read_tree (ib, data_in);
5330 bp = streamer_read_bitpack (ib);
5331 av->by_ref = bp_unpack_value (&bp, 1);
5332 av->next = aggvals;
5333 aggvals = av;
5334 }
5335 ipa_set_node_agg_value_chain (node, aggvals);
5336
5337 count = streamer_read_uhwi (ib);
5338 if (count > 0)
5339 {
5340 ipcp_transformation_initialize ();
5341 ipcp_transformation *ts = ipcp_transformation_sum->get_create (node);
5342 vec_safe_grow_cleared (ts->m_vr, count, true);
5343 for (i = 0; i < count; i++)
5344 {
5345 ipa_vr *parm_vr;
5346 parm_vr = &(*ts->m_vr)[i];
5347 struct bitpack_d bp;
5348 bp = streamer_read_bitpack (ib);
5349 parm_vr->known = bp_unpack_value (&bp, 1);
5350 if (parm_vr->known)
5351 {
5352 parm_vr->type = streamer_read_enum (ib, value_range_kind,
5353 VR_LAST);
5354 parm_vr->min = streamer_read_wide_int (ib);
5355 parm_vr->max = streamer_read_wide_int (ib);
5356 }
5357 }
5358 }
5359 count = streamer_read_uhwi (ib);
5360 if (count > 0)
5361 {
5362 ipcp_transformation_initialize ();
5363 ipcp_transformation *ts = ipcp_transformation_sum->get_create (node);
5364 vec_safe_grow_cleared (ts->bits, count, true);
5365
5366 for (i = 0; i < count; i++)
5367 {
5368 struct bitpack_d bp = streamer_read_bitpack (ib);
5369 bool known = bp_unpack_value (&bp, 1);
5370 if (known)
5371 {
5372 const widest_int value = streamer_read_widest_int (ib);
5373 const widest_int mask = streamer_read_widest_int (ib);
5374 ipa_bits *bits
5375 = ipa_get_ipa_bits_for_value (value, mask);
5376 (*ts->bits)[i] = bits;
5377 }
5378 }
5379 }
5380 }
5381
5382 /* Write all aggregate replacement for nodes in set. */
5383
5384 void
ipcp_write_transformation_summaries(void)5385 ipcp_write_transformation_summaries (void)
5386 {
5387 struct cgraph_node *node;
5388 struct output_block *ob;
5389 unsigned int count = 0;
5390 lto_symtab_encoder_iterator lsei;
5391 lto_symtab_encoder_t encoder;
5392
5393 ob = create_output_block (LTO_section_ipcp_transform);
5394 encoder = ob->decl_state->symtab_node_encoder;
5395 ob->symbol = NULL;
5396 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
5397 lsei_next_function_in_partition (&lsei))
5398 {
5399 node = lsei_cgraph_node (lsei);
5400 if (node->has_gimple_body_p ())
5401 count++;
5402 }
5403
5404 streamer_write_uhwi (ob, count);
5405
5406 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
5407 lsei_next_function_in_partition (&lsei))
5408 {
5409 node = lsei_cgraph_node (lsei);
5410 if (node->has_gimple_body_p ())
5411 write_ipcp_transformation_info (ob, node);
5412 }
5413 streamer_write_char_stream (ob->main_stream, 0);
5414 produce_asm (ob, NULL);
5415 destroy_output_block (ob);
5416 }
5417
5418 /* Read replacements section in file FILE_DATA of length LEN with data
5419 DATA. */
5420
5421 static void
read_replacements_section(struct lto_file_decl_data * file_data,const char * data,size_t len)5422 read_replacements_section (struct lto_file_decl_data *file_data,
5423 const char *data,
5424 size_t len)
5425 {
5426 const struct lto_function_header *header =
5427 (const struct lto_function_header *) data;
5428 const int cfg_offset = sizeof (struct lto_function_header);
5429 const int main_offset = cfg_offset + header->cfg_size;
5430 const int string_offset = main_offset + header->main_size;
5431 class data_in *data_in;
5432 unsigned int i;
5433 unsigned int count;
5434
5435 lto_input_block ib_main ((const char *) data + main_offset,
5436 header->main_size, file_data->mode_table);
5437
5438 data_in = lto_data_in_create (file_data, (const char *) data + string_offset,
5439 header->string_size, vNULL);
5440 count = streamer_read_uhwi (&ib_main);
5441
5442 for (i = 0; i < count; i++)
5443 {
5444 unsigned int index;
5445 struct cgraph_node *node;
5446 lto_symtab_encoder_t encoder;
5447
5448 index = streamer_read_uhwi (&ib_main);
5449 encoder = file_data->symtab_node_encoder;
5450 node = dyn_cast<cgraph_node *> (lto_symtab_encoder_deref (encoder,
5451 index));
5452 gcc_assert (node->definition);
5453 read_ipcp_transformation_info (&ib_main, node, data_in);
5454 }
5455 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
5456 len);
5457 lto_data_in_delete (data_in);
5458 }
5459
5460 /* Read IPA-CP aggregate replacements. */
5461
5462 void
ipcp_read_transformation_summaries(void)5463 ipcp_read_transformation_summaries (void)
5464 {
5465 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
5466 struct lto_file_decl_data *file_data;
5467 unsigned int j = 0;
5468
5469 while ((file_data = file_data_vec[j++]))
5470 {
5471 size_t len;
5472 const char *data
5473 = lto_get_summary_section_data (file_data, LTO_section_ipcp_transform,
5474 &len);
5475 if (data)
5476 read_replacements_section (file_data, data, len);
5477 }
5478 }
5479
5480 /* Adjust the aggregate replacements in AGGVAL to reflect parameters skipped in
5481 NODE. */
5482
5483 static void
adjust_agg_replacement_values(struct cgraph_node * node,struct ipa_agg_replacement_value * aggval)5484 adjust_agg_replacement_values (struct cgraph_node *node,
5485 struct ipa_agg_replacement_value *aggval)
5486 {
5487 struct ipa_agg_replacement_value *v;
5488 clone_info *cinfo = clone_info::get (node);
5489
5490 if (!cinfo || !cinfo->param_adjustments)
5491 return;
5492
5493 auto_vec<int, 16> new_indices;
5494 cinfo->param_adjustments->get_updated_indices (&new_indices);
5495 for (v = aggval; v; v = v->next)
5496 {
5497 gcc_checking_assert (v->index >= 0);
5498
5499 if ((unsigned) v->index < new_indices.length ())
5500 v->index = new_indices[v->index];
5501 else
5502 /* This can happen if we know about a constant passed by reference by
5503 an argument which is never actually used for anything, let alone
5504 loading that constant. */
5505 v->index = -1;
5506 }
5507 }
5508
5509 /* Dominator walker driving the ipcp modification phase. */
5510
5511 class ipcp_modif_dom_walker : public dom_walker
5512 {
5513 public:
ipcp_modif_dom_walker(struct ipa_func_body_info * fbi,vec<ipa_param_descriptor,va_gc> * descs,struct ipa_agg_replacement_value * av,bool * sc)5514 ipcp_modif_dom_walker (struct ipa_func_body_info *fbi,
5515 vec<ipa_param_descriptor, va_gc> *descs,
5516 struct ipa_agg_replacement_value *av,
5517 bool *sc)
5518 : dom_walker (CDI_DOMINATORS), m_fbi (fbi), m_descriptors (descs),
5519 m_aggval (av), m_something_changed (sc) {}
5520
5521 virtual edge before_dom_children (basic_block);
cleanup_eh()5522 bool cleanup_eh ()
5523 { return gimple_purge_all_dead_eh_edges (m_need_eh_cleanup); }
5524
5525 private:
5526 struct ipa_func_body_info *m_fbi;
5527 vec<ipa_param_descriptor, va_gc> *m_descriptors;
5528 struct ipa_agg_replacement_value *m_aggval;
5529 bool *m_something_changed;
5530 auto_bitmap m_need_eh_cleanup;
5531 };
5532
5533 edge
before_dom_children(basic_block bb)5534 ipcp_modif_dom_walker::before_dom_children (basic_block bb)
5535 {
5536 gimple_stmt_iterator gsi;
5537 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5538 {
5539 struct ipa_agg_replacement_value *v;
5540 gimple *stmt = gsi_stmt (gsi);
5541 tree rhs, val, t;
5542 HOST_WIDE_INT offset;
5543 poly_int64 size;
5544 int index;
5545 bool by_ref, vce;
5546
5547 if (!gimple_assign_load_p (stmt))
5548 continue;
5549 rhs = gimple_assign_rhs1 (stmt);
5550 if (!is_gimple_reg_type (TREE_TYPE (rhs)))
5551 continue;
5552
5553 vce = false;
5554 t = rhs;
5555 while (handled_component_p (t))
5556 {
5557 /* V_C_E can do things like convert an array of integers to one
5558 bigger integer and similar things we do not handle below. */
5559 if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
5560 {
5561 vce = true;
5562 break;
5563 }
5564 t = TREE_OPERAND (t, 0);
5565 }
5566 if (vce)
5567 continue;
5568
5569 if (!ipa_load_from_parm_agg (m_fbi, m_descriptors, stmt, rhs, &index,
5570 &offset, &size, &by_ref))
5571 continue;
5572 for (v = m_aggval; v; v = v->next)
5573 if (v->index == index
5574 && v->offset == offset)
5575 break;
5576 if (!v
5577 || v->by_ref != by_ref
5578 || maybe_ne (tree_to_poly_int64 (TYPE_SIZE (TREE_TYPE (v->value))),
5579 size))
5580 continue;
5581
5582 gcc_checking_assert (is_gimple_ip_invariant (v->value));
5583 if (!useless_type_conversion_p (TREE_TYPE (rhs), TREE_TYPE (v->value)))
5584 {
5585 if (fold_convertible_p (TREE_TYPE (rhs), v->value))
5586 val = fold_build1 (NOP_EXPR, TREE_TYPE (rhs), v->value);
5587 else if (TYPE_SIZE (TREE_TYPE (rhs))
5588 == TYPE_SIZE (TREE_TYPE (v->value)))
5589 val = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (rhs), v->value);
5590 else
5591 {
5592 if (dump_file)
5593 {
5594 fprintf (dump_file, " const ");
5595 print_generic_expr (dump_file, v->value);
5596 fprintf (dump_file, " can't be converted to type of ");
5597 print_generic_expr (dump_file, rhs);
5598 fprintf (dump_file, "\n");
5599 }
5600 continue;
5601 }
5602 }
5603 else
5604 val = v->value;
5605
5606 if (dump_file && (dump_flags & TDF_DETAILS))
5607 {
5608 fprintf (dump_file, "Modifying stmt:\n ");
5609 print_gimple_stmt (dump_file, stmt, 0);
5610 }
5611 gimple_assign_set_rhs_from_tree (&gsi, val);
5612 update_stmt (stmt);
5613
5614 if (dump_file && (dump_flags & TDF_DETAILS))
5615 {
5616 fprintf (dump_file, "into:\n ");
5617 print_gimple_stmt (dump_file, stmt, 0);
5618 fprintf (dump_file, "\n");
5619 }
5620
5621 *m_something_changed = true;
5622 if (maybe_clean_eh_stmt (stmt))
5623 bitmap_set_bit (m_need_eh_cleanup, bb->index);
5624 }
5625 return NULL;
5626 }
5627
5628 /* Return true if we have recorded VALUE and MASK about PARM.
5629 Set VALUE and MASk accordingly. */
5630
5631 bool
ipcp_get_parm_bits(tree parm,tree * value,widest_int * mask)5632 ipcp_get_parm_bits (tree parm, tree *value, widest_int *mask)
5633 {
5634 cgraph_node *cnode = cgraph_node::get (current_function_decl);
5635 ipcp_transformation *ts = ipcp_get_transformation_summary (cnode);
5636 if (!ts || vec_safe_length (ts->bits) == 0)
5637 return false;
5638
5639 int i = 0;
5640 for (tree p = DECL_ARGUMENTS (current_function_decl);
5641 p != parm; p = DECL_CHAIN (p))
5642 {
5643 i++;
5644 /* Ignore static chain. */
5645 if (!p)
5646 return false;
5647 }
5648
5649 clone_info *cinfo = clone_info::get (cnode);
5650 if (cinfo && cinfo->param_adjustments)
5651 {
5652 i = cinfo->param_adjustments->get_original_index (i);
5653 if (i < 0)
5654 return false;
5655 }
5656
5657 vec<ipa_bits *, va_gc> &bits = *ts->bits;
5658 if (!bits[i])
5659 return false;
5660 *mask = bits[i]->mask;
5661 *value = wide_int_to_tree (TREE_TYPE (parm), bits[i]->value);
5662 return true;
5663 }
5664
5665
5666 /* Update bits info of formal parameters as described in
5667 ipcp_transformation. */
5668
5669 static void
ipcp_update_bits(struct cgraph_node * node)5670 ipcp_update_bits (struct cgraph_node *node)
5671 {
5672 ipcp_transformation *ts = ipcp_get_transformation_summary (node);
5673
5674 if (!ts || vec_safe_length (ts->bits) == 0)
5675 return;
5676 vec<ipa_bits *, va_gc> &bits = *ts->bits;
5677 unsigned count = bits.length ();
5678 if (!count)
5679 return;
5680
5681 auto_vec<int, 16> new_indices;
5682 bool need_remapping = false;
5683 clone_info *cinfo = clone_info::get (node);
5684 if (cinfo && cinfo->param_adjustments)
5685 {
5686 cinfo->param_adjustments->get_updated_indices (&new_indices);
5687 need_remapping = true;
5688 }
5689 auto_vec <tree, 16> parm_decls;
5690 push_function_arg_decls (&parm_decls, node->decl);
5691
5692 for (unsigned i = 0; i < count; ++i)
5693 {
5694 tree parm;
5695 if (need_remapping)
5696 {
5697 if (i >= new_indices.length ())
5698 continue;
5699 int idx = new_indices[i];
5700 if (idx < 0)
5701 continue;
5702 parm = parm_decls[idx];
5703 }
5704 else
5705 parm = parm_decls[i];
5706 gcc_checking_assert (parm);
5707
5708
5709 if (!bits[i]
5710 || !(INTEGRAL_TYPE_P (TREE_TYPE (parm))
5711 || POINTER_TYPE_P (TREE_TYPE (parm)))
5712 || !is_gimple_reg (parm))
5713 continue;
5714
5715 tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->decl), parm);
5716 if (!ddef)
5717 continue;
5718
5719 if (dump_file)
5720 {
5721 fprintf (dump_file, "Adjusting mask for param %u to ", i);
5722 print_hex (bits[i]->mask, dump_file);
5723 fprintf (dump_file, "\n");
5724 }
5725
5726 if (INTEGRAL_TYPE_P (TREE_TYPE (ddef)))
5727 {
5728 unsigned prec = TYPE_PRECISION (TREE_TYPE (ddef));
5729 signop sgn = TYPE_SIGN (TREE_TYPE (ddef));
5730
5731 wide_int nonzero_bits = wide_int::from (bits[i]->mask, prec, UNSIGNED)
5732 | wide_int::from (bits[i]->value, prec, sgn);
5733 set_nonzero_bits (ddef, nonzero_bits);
5734 }
5735 else
5736 {
5737 unsigned tem = bits[i]->mask.to_uhwi ();
5738 unsigned HOST_WIDE_INT bitpos = bits[i]->value.to_uhwi ();
5739 unsigned align = tem & -tem;
5740 unsigned misalign = bitpos & (align - 1);
5741
5742 if (align > 1)
5743 {
5744 if (dump_file)
5745 fprintf (dump_file, "Adjusting align: %u, misalign: %u\n", align, misalign);
5746
5747 unsigned old_align, old_misalign;
5748 struct ptr_info_def *pi = get_ptr_info (ddef);
5749 bool old_known = get_ptr_info_alignment (pi, &old_align, &old_misalign);
5750
5751 if (old_known
5752 && old_align > align)
5753 {
5754 if (dump_file)
5755 {
5756 fprintf (dump_file, "But alignment was already %u.\n", old_align);
5757 if ((old_misalign & (align - 1)) != misalign)
5758 fprintf (dump_file, "old_misalign (%u) and misalign (%u) mismatch\n",
5759 old_misalign, misalign);
5760 }
5761 continue;
5762 }
5763
5764 if (old_known
5765 && ((misalign & (old_align - 1)) != old_misalign)
5766 && dump_file)
5767 fprintf (dump_file, "old_misalign (%u) and misalign (%u) mismatch\n",
5768 old_misalign, misalign);
5769
5770 set_ptr_info_alignment (pi, align, misalign);
5771 }
5772 }
5773 }
5774 }
5775
5776 bool
nonzero_p(tree expr_type)5777 ipa_vr::nonzero_p (tree expr_type) const
5778 {
5779 if (type == VR_ANTI_RANGE && wi::eq_p (min, 0) && wi::eq_p (max, 0))
5780 return true;
5781
5782 unsigned prec = TYPE_PRECISION (expr_type);
5783 return (type == VR_RANGE
5784 && TYPE_UNSIGNED (expr_type)
5785 && wi::eq_p (min, wi::one (prec))
5786 && wi::eq_p (max, wi::max_value (prec, TYPE_SIGN (expr_type))));
5787 }
5788
5789 /* Update value range of formal parameters as described in
5790 ipcp_transformation. */
5791
5792 static void
ipcp_update_vr(struct cgraph_node * node)5793 ipcp_update_vr (struct cgraph_node *node)
5794 {
5795 ipcp_transformation *ts = ipcp_get_transformation_summary (node);
5796 if (!ts || vec_safe_length (ts->m_vr) == 0)
5797 return;
5798 const vec<ipa_vr, va_gc> &vr = *ts->m_vr;
5799 unsigned count = vr.length ();
5800 if (!count)
5801 return;
5802
5803 auto_vec<int, 16> new_indices;
5804 bool need_remapping = false;
5805 clone_info *cinfo = clone_info::get (node);
5806 if (cinfo && cinfo->param_adjustments)
5807 {
5808 cinfo->param_adjustments->get_updated_indices (&new_indices);
5809 need_remapping = true;
5810 }
5811 auto_vec <tree, 16> parm_decls;
5812 push_function_arg_decls (&parm_decls, node->decl);
5813
5814 for (unsigned i = 0; i < count; ++i)
5815 {
5816 tree parm;
5817 int remapped_idx;
5818 if (need_remapping)
5819 {
5820 if (i >= new_indices.length ())
5821 continue;
5822 remapped_idx = new_indices[i];
5823 if (remapped_idx < 0)
5824 continue;
5825 }
5826 else
5827 remapped_idx = i;
5828
5829 parm = parm_decls[remapped_idx];
5830
5831 gcc_checking_assert (parm);
5832 tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->decl), parm);
5833
5834 if (!ddef || !is_gimple_reg (parm))
5835 continue;
5836
5837 if (vr[i].known
5838 && (vr[i].type == VR_RANGE || vr[i].type == VR_ANTI_RANGE))
5839 {
5840 tree type = TREE_TYPE (ddef);
5841 unsigned prec = TYPE_PRECISION (type);
5842 if (INTEGRAL_TYPE_P (TREE_TYPE (ddef)))
5843 {
5844 if (dump_file)
5845 {
5846 fprintf (dump_file, "Setting value range of param %u "
5847 "(now %i) ", i, remapped_idx);
5848 fprintf (dump_file, "%s[",
5849 (vr[i].type == VR_ANTI_RANGE) ? "~" : "");
5850 print_decs (vr[i].min, dump_file);
5851 fprintf (dump_file, ", ");
5852 print_decs (vr[i].max, dump_file);
5853 fprintf (dump_file, "]\n");
5854 }
5855 set_range_info (ddef, vr[i].type,
5856 wide_int_storage::from (vr[i].min, prec,
5857 TYPE_SIGN (type)),
5858 wide_int_storage::from (vr[i].max, prec,
5859 TYPE_SIGN (type)));
5860 }
5861 else if (POINTER_TYPE_P (TREE_TYPE (ddef))
5862 && vr[i].nonzero_p (TREE_TYPE (ddef)))
5863 {
5864 if (dump_file)
5865 fprintf (dump_file, "Setting nonnull for %u\n", i);
5866 set_ptr_nonnull (ddef);
5867 }
5868 }
5869 }
5870 }
5871
5872 /* IPCP transformation phase doing propagation of aggregate values. */
5873
5874 unsigned int
ipcp_transform_function(struct cgraph_node * node)5875 ipcp_transform_function (struct cgraph_node *node)
5876 {
5877 vec<ipa_param_descriptor, va_gc> *descriptors = NULL;
5878 struct ipa_func_body_info fbi;
5879 struct ipa_agg_replacement_value *aggval;
5880 int param_count;
5881 bool something_changed = false;
5882
5883 gcc_checking_assert (cfun);
5884 gcc_checking_assert (current_function_decl);
5885
5886 if (dump_file)
5887 fprintf (dump_file, "Modification phase of node %s\n",
5888 node->dump_name ());
5889
5890 ipcp_update_bits (node);
5891 ipcp_update_vr (node);
5892 aggval = ipa_get_agg_replacements_for_node (node);
5893 if (!aggval)
5894 return 0;
5895 param_count = count_formal_params (node->decl);
5896 if (param_count == 0)
5897 return 0;
5898 adjust_agg_replacement_values (node, aggval);
5899 if (dump_file)
5900 ipa_dump_agg_replacement_values (dump_file, aggval);
5901
5902 fbi.node = node;
5903 fbi.info = NULL;
5904 fbi.bb_infos = vNULL;
5905 fbi.bb_infos.safe_grow_cleared (last_basic_block_for_fn (cfun), true);
5906 fbi.param_count = param_count;
5907 fbi.aa_walk_budget = opt_for_fn (node->decl, param_ipa_max_aa_steps);
5908
5909 vec_safe_grow_cleared (descriptors, param_count, true);
5910 ipa_populate_param_decls (node, *descriptors);
5911 calculate_dominance_info (CDI_DOMINATORS);
5912 ipcp_modif_dom_walker walker (&fbi, descriptors, aggval, &something_changed);
5913 walker.walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
5914 free_dominance_info (CDI_DOMINATORS);
5915 bool cfg_changed = walker.cleanup_eh ();
5916
5917 int i;
5918 struct ipa_bb_info *bi;
5919 FOR_EACH_VEC_ELT (fbi.bb_infos, i, bi)
5920 free_ipa_bb_info (bi);
5921 fbi.bb_infos.release ();
5922
5923 ipcp_transformation *s = ipcp_transformation_sum->get (node);
5924 s->agg_values = NULL;
5925 s->bits = NULL;
5926 s->m_vr = NULL;
5927
5928 vec_free (descriptors);
5929
5930 if (!something_changed)
5931 return 0;
5932
5933 if (cfg_changed)
5934 delete_unreachable_blocks_update_callgraph (node, false);
5935
5936 return TODO_update_ssa_only_virtuals;
5937 }
5938
5939
5940 /* Return true if OTHER describes same agg value. */
5941 bool
equal_to(const ipa_agg_value & other)5942 ipa_agg_value::equal_to (const ipa_agg_value &other)
5943 {
5944 return offset == other.offset
5945 && operand_equal_p (value, other.value, 0);
5946 }
5947
5948 /* Destructor also removing individual aggregate values. */
5949
~ipa_auto_call_arg_values()5950 ipa_auto_call_arg_values::~ipa_auto_call_arg_values ()
5951 {
5952 ipa_release_agg_values (m_known_aggs, false);
5953 }
5954
5955
5956
5957 #include "gt-ipa-prop.h"
5958