1 /* Tree based points-to analysis
2 Copyright (C) 2005-2021 Free Software Foundation, Inc.
3 Contributed by Daniel Berlin <dberlin@dberlin.org>
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify
8 under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3 of the License, or
10 (at your option) any later version.
11
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "gimple.h"
28 #include "alloc-pool.h"
29 #include "tree-pass.h"
30 #include "ssa.h"
31 #include "cgraph.h"
32 #include "tree-pretty-print.h"
33 #include "diagnostic-core.h"
34 #include "fold-const.h"
35 #include "stor-layout.h"
36 #include "stmt.h"
37 #include "gimple-iterator.h"
38 #include "tree-into-ssa.h"
39 #include "tree-dfa.h"
40 #include "gimple-walk.h"
41 #include "varasm.h"
42 #include "stringpool.h"
43 #include "attribs.h"
44 #include "tree-ssa.h"
45 #include "tree-cfg.h"
46
47 /* The idea behind this analyzer is to generate set constraints from the
48 program, then solve the resulting constraints in order to generate the
49 points-to sets.
50
51 Set constraints are a way of modeling program analysis problems that
52 involve sets. They consist of an inclusion constraint language,
53 describing the variables (each variable is a set) and operations that
54 are involved on the variables, and a set of rules that derive facts
55 from these operations. To solve a system of set constraints, you derive
56 all possible facts under the rules, which gives you the correct sets
57 as a consequence.
58
59 See "Efficient Field-sensitive pointer analysis for C" by "David
60 J. Pearce and Paul H. J. Kelly and Chris Hankin", at
61 http://citeseer.ist.psu.edu/pearce04efficient.html
62
63 Also see "Ultra-fast Aliasing Analysis using CLA: A Million Lines
64 of C Code in a Second" by "Nevin Heintze and Olivier Tardieu" at
65 http://citeseer.ist.psu.edu/heintze01ultrafast.html
66
67 There are three types of real constraint expressions, DEREF,
68 ADDRESSOF, and SCALAR. Each constraint expression consists
69 of a constraint type, a variable, and an offset.
70
71 SCALAR is a constraint expression type used to represent x, whether
72 it appears on the LHS or the RHS of a statement.
73 DEREF is a constraint expression type used to represent *x, whether
74 it appears on the LHS or the RHS of a statement.
75 ADDRESSOF is a constraint expression used to represent &x, whether
76 it appears on the LHS or the RHS of a statement.
77
78 Each pointer variable in the program is assigned an integer id, and
79 each field of a structure variable is assigned an integer id as well.
80
81 Structure variables are linked to their list of fields through a "next
82 field" in each variable that points to the next field in offset
83 order.
84 Each variable for a structure field has
85
86 1. "size", that tells the size in bits of that field.
87 2. "fullsize", that tells the size in bits of the entire structure.
88 3. "offset", that tells the offset in bits from the beginning of the
89 structure to this field.
90
91 Thus,
92 struct f
93 {
94 int a;
95 int b;
96 } foo;
97 int *bar;
98
99 looks like
100
101 foo.a -> id 1, size 32, offset 0, fullsize 64, next foo.b
102 foo.b -> id 2, size 32, offset 32, fullsize 64, next NULL
103 bar -> id 3, size 32, offset 0, fullsize 32, next NULL
104
105
106 In order to solve the system of set constraints, the following is
107 done:
108
109 1. Each constraint variable x has a solution set associated with it,
110 Sol(x).
111
112 2. Constraints are separated into direct, copy, and complex.
113 Direct constraints are ADDRESSOF constraints that require no extra
114 processing, such as P = &Q
115 Copy constraints are those of the form P = Q.
116 Complex constraints are all the constraints involving dereferences
117 and offsets (including offsetted copies).
118
119 3. All direct constraints of the form P = &Q are processed, such
120 that Q is added to Sol(P)
121
122 4. All complex constraints for a given constraint variable are stored in a
123 linked list attached to that variable's node.
124
125 5. A directed graph is built out of the copy constraints. Each
126 constraint variable is a node in the graph, and an edge from
127 Q to P is added for each copy constraint of the form P = Q
128
129 6. The graph is then walked, and solution sets are
130 propagated along the copy edges, such that an edge from Q to P
131 causes Sol(P) <- Sol(P) union Sol(Q).
132
133 7. As we visit each node, all complex constraints associated with
134 that node are processed by adding appropriate copy edges to the graph, or the
135 appropriate variables to the solution set.
136
137 8. The process of walking the graph is iterated until no solution
138 sets change.
139
140 Prior to walking the graph in steps 6 and 7, We perform static
141 cycle elimination on the constraint graph, as well
142 as off-line variable substitution.
143
144 TODO: Adding offsets to pointer-to-structures can be handled (IE not punted
145 on and turned into anything), but isn't. You can just see what offset
146 inside the pointed-to struct it's going to access.
147
148 TODO: Constant bounded arrays can be handled as if they were structs of the
149 same number of elements.
150
151 TODO: Modeling heap and incoming pointers becomes much better if we
152 add fields to them as we discover them, which we could do.
153
154 TODO: We could handle unions, but to be honest, it's probably not
155 worth the pain or slowdown. */
156
157 /* IPA-PTA optimizations possible.
158
159 When the indirect function called is ANYTHING we can add disambiguation
160 based on the function signatures (or simply the parameter count which
161 is the varinfo size). We also do not need to consider functions that
162 do not have their address taken.
163
164 The is_global_var bit which marks escape points is overly conservative
165 in IPA mode. Split it to is_escape_point and is_global_var - only
166 externally visible globals are escape points in IPA mode.
167 There is now is_ipa_escape_point but this is only used in a few
168 selected places.
169
170 The way we introduce DECL_PT_UID to avoid fixing up all points-to
171 sets in the translation unit when we copy a DECL during inlining
172 pessimizes precision. The advantage is that the DECL_PT_UID keeps
173 compile-time and memory usage overhead low - the points-to sets
174 do not grow or get unshared as they would during a fixup phase.
175 An alternative solution is to delay IPA PTA until after all
176 inlining transformations have been applied.
177
178 The way we propagate clobber/use information isn't optimized.
179 It should use a new complex constraint that properly filters
180 out local variables of the callee (though that would make
181 the sets invalid after inlining). OTOH we might as well
182 admit defeat to WHOPR and simply do all the clobber/use analysis
183 and propagation after PTA finished but before we threw away
184 points-to information for memory variables. WHOPR and PTA
185 do not play along well anyway - the whole constraint solving
186 would need to be done in WPA phase and it will be very interesting
187 to apply the results to local SSA names during LTRANS phase.
188
189 We probably should compute a per-function unit-ESCAPE solution
190 propagating it simply like the clobber / uses solutions. The
191 solution can go alongside the non-IPA escaped solution and be
192 used to query which vars escape the unit through a function.
193 This is also required to make the escaped-HEAP trick work in IPA mode.
194
195 We never put function decls in points-to sets so we do not
196 keep the set of called functions for indirect calls.
197
198 And probably more. */
199
200 static bool use_field_sensitive = true;
201 static int in_ipa_mode = 0;
202
203 /* Used for predecessor bitmaps. */
204 static bitmap_obstack predbitmap_obstack;
205
206 /* Used for points-to sets. */
207 static bitmap_obstack pta_obstack;
208
209 /* Used for oldsolution members of variables. */
210 static bitmap_obstack oldpta_obstack;
211
212 /* Used for per-solver-iteration bitmaps. */
213 static bitmap_obstack iteration_obstack;
214
215 static unsigned int create_variable_info_for (tree, const char *, bool);
216 typedef struct constraint_graph *constraint_graph_t;
217 static void unify_nodes (constraint_graph_t, unsigned int, unsigned int, bool);
218
219 struct constraint;
220 typedef struct constraint *constraint_t;
221
222
223 #define EXECUTE_IF_IN_NONNULL_BITMAP(a, b, c, d) \
224 if (a) \
225 EXECUTE_IF_SET_IN_BITMAP (a, b, c, d)
226
227 static struct constraint_stats
228 {
229 unsigned int total_vars;
230 unsigned int nonpointer_vars;
231 unsigned int unified_vars_static;
232 unsigned int unified_vars_dynamic;
233 unsigned int iterations;
234 unsigned int num_edges;
235 unsigned int num_implicit_edges;
236 unsigned int points_to_sets_created;
237 } stats;
238
239 struct variable_info
240 {
241 /* ID of this variable */
242 unsigned int id;
243
244 /* True if this is a variable created by the constraint analysis, such as
245 heap variables and constraints we had to break up. */
246 unsigned int is_artificial_var : 1;
247
248 /* True if this is a special variable whose solution set should not be
249 changed. */
250 unsigned int is_special_var : 1;
251
252 /* True for variables whose size is not known or variable. */
253 unsigned int is_unknown_size_var : 1;
254
255 /* True for (sub-)fields that represent a whole variable. */
256 unsigned int is_full_var : 1;
257
258 /* True if this is a heap variable. */
259 unsigned int is_heap_var : 1;
260
261 /* True if this is a register variable. */
262 unsigned int is_reg_var : 1;
263
264 /* True if this field may contain pointers. */
265 unsigned int may_have_pointers : 1;
266
267 /* True if this field has only restrict qualified pointers. */
268 unsigned int only_restrict_pointers : 1;
269
270 /* True if this represents a heap var created for a restrict qualified
271 pointer. */
272 unsigned int is_restrict_var : 1;
273
274 /* True if this represents a global variable. */
275 unsigned int is_global_var : 1;
276
277 /* True if this represents a module escape point for IPA analysis. */
278 unsigned int is_ipa_escape_point : 1;
279
280 /* True if this represents a IPA function info. */
281 unsigned int is_fn_info : 1;
282
283 /* True if this appears as RHS in a ADDRESSOF constraint. */
284 unsigned int address_taken : 1;
285
286 /* ??? Store somewhere better. */
287 unsigned short ruid;
288
289 /* The ID of the variable for the next field in this structure
290 or zero for the last field in this structure. */
291 unsigned next;
292
293 /* The ID of the variable for the first field in this structure. */
294 unsigned head;
295
296 /* Offset of this variable, in bits, from the base variable */
297 unsigned HOST_WIDE_INT offset;
298
299 /* Size of the variable, in bits. */
300 unsigned HOST_WIDE_INT size;
301
302 /* Full size of the base variable, in bits. */
303 unsigned HOST_WIDE_INT fullsize;
304
305 /* In IPA mode the shadow UID in case the variable needs to be duplicated in
306 the final points-to solution because it reaches its containing
307 function recursively. Zero if none is needed. */
308 unsigned int shadow_var_uid;
309
310 /* Name of this variable */
311 const char *name;
312
313 /* Tree that this variable is associated with. */
314 tree decl;
315
316 /* Points-to set for this variable. */
317 bitmap solution;
318
319 /* Old points-to set for this variable. */
320 bitmap oldsolution;
321 };
322 typedef struct variable_info *varinfo_t;
323
324 static varinfo_t first_vi_for_offset (varinfo_t, unsigned HOST_WIDE_INT);
325 static varinfo_t first_or_preceding_vi_for_offset (varinfo_t,
326 unsigned HOST_WIDE_INT);
327 static varinfo_t lookup_vi_for_tree (tree);
328 static inline bool type_can_have_subvars (const_tree);
329 static void make_param_constraints (varinfo_t);
330
331 /* Pool of variable info structures. */
332 static object_allocator<variable_info> variable_info_pool
333 ("Variable info pool");
334
335 /* Map varinfo to final pt_solution. */
336 static hash_map<varinfo_t, pt_solution *> *final_solutions;
337 struct obstack final_solutions_obstack;
338
339 /* Table of variable info structures for constraint variables.
340 Indexed directly by variable info id. */
341 static vec<varinfo_t> varmap;
342
343 /* Return the varmap element N */
344
345 static inline varinfo_t
get_varinfo(unsigned int n)346 get_varinfo (unsigned int n)
347 {
348 return varmap[n];
349 }
350
351 /* Return the next variable in the list of sub-variables of VI
352 or NULL if VI is the last sub-variable. */
353
354 static inline varinfo_t
vi_next(varinfo_t vi)355 vi_next (varinfo_t vi)
356 {
357 return get_varinfo (vi->next);
358 }
359
360 /* Static IDs for the special variables. Variable ID zero is unused
361 and used as terminator for the sub-variable chain. */
362 enum { nothing_id = 1, anything_id = 2, string_id = 3,
363 escaped_id = 4, nonlocal_id = 5,
364 storedanything_id = 6, integer_id = 7 };
365
366 /* Return a new variable info structure consisting for a variable
367 named NAME, and using constraint graph node NODE. Append it
368 to the vector of variable info structures. */
369
370 static varinfo_t
new_var_info(tree t,const char * name,bool add_id)371 new_var_info (tree t, const char *name, bool add_id)
372 {
373 unsigned index = varmap.length ();
374 varinfo_t ret = variable_info_pool.allocate ();
375
376 if (dump_file && add_id)
377 {
378 char *tempname = xasprintf ("%s(%d)", name, index);
379 name = ggc_strdup (tempname);
380 free (tempname);
381 }
382
383 ret->id = index;
384 ret->name = name;
385 ret->decl = t;
386 /* Vars without decl are artificial and do not have sub-variables. */
387 ret->is_artificial_var = (t == NULL_TREE);
388 ret->is_special_var = false;
389 ret->is_unknown_size_var = false;
390 ret->is_full_var = (t == NULL_TREE);
391 ret->is_heap_var = false;
392 ret->may_have_pointers = true;
393 ret->only_restrict_pointers = false;
394 ret->is_restrict_var = false;
395 ret->ruid = 0;
396 ret->is_global_var = (t == NULL_TREE);
397 ret->is_ipa_escape_point = false;
398 ret->is_fn_info = false;
399 ret->address_taken = false;
400 if (t && DECL_P (t))
401 ret->is_global_var = (is_global_var (t)
402 /* We have to treat even local register variables
403 as escape points. */
404 || (VAR_P (t) && DECL_HARD_REGISTER (t)));
405 ret->is_reg_var = (t && TREE_CODE (t) == SSA_NAME);
406 ret->solution = BITMAP_ALLOC (&pta_obstack);
407 ret->oldsolution = NULL;
408 ret->next = 0;
409 ret->shadow_var_uid = 0;
410 ret->head = ret->id;
411
412 stats.total_vars++;
413
414 varmap.safe_push (ret);
415
416 return ret;
417 }
418
419 /* A map mapping call statements to per-stmt variables for uses
420 and clobbers specific to the call. */
421 static hash_map<gimple *, varinfo_t> *call_stmt_vars;
422
423 /* Lookup or create the variable for the call statement CALL. */
424
425 static varinfo_t
get_call_vi(gcall * call)426 get_call_vi (gcall *call)
427 {
428 varinfo_t vi, vi2;
429
430 bool existed;
431 varinfo_t *slot_p = &call_stmt_vars->get_or_insert (call, &existed);
432 if (existed)
433 return *slot_p;
434
435 vi = new_var_info (NULL_TREE, "CALLUSED", true);
436 vi->offset = 0;
437 vi->size = 1;
438 vi->fullsize = 2;
439 vi->is_full_var = true;
440 vi->is_reg_var = true;
441
442 vi2 = new_var_info (NULL_TREE, "CALLCLOBBERED", true);
443 vi2->offset = 1;
444 vi2->size = 1;
445 vi2->fullsize = 2;
446 vi2->is_full_var = true;
447 vi2->is_reg_var = true;
448
449 vi->next = vi2->id;
450
451 *slot_p = vi;
452 return vi;
453 }
454
455 /* Lookup the variable for the call statement CALL representing
456 the uses. Returns NULL if there is nothing special about this call. */
457
458 static varinfo_t
lookup_call_use_vi(gcall * call)459 lookup_call_use_vi (gcall *call)
460 {
461 varinfo_t *slot_p = call_stmt_vars->get (call);
462 if (slot_p)
463 return *slot_p;
464
465 return NULL;
466 }
467
468 /* Lookup the variable for the call statement CALL representing
469 the clobbers. Returns NULL if there is nothing special about this call. */
470
471 static varinfo_t
lookup_call_clobber_vi(gcall * call)472 lookup_call_clobber_vi (gcall *call)
473 {
474 varinfo_t uses = lookup_call_use_vi (call);
475 if (!uses)
476 return NULL;
477
478 return vi_next (uses);
479 }
480
481 /* Lookup or create the variable for the call statement CALL representing
482 the uses. */
483
484 static varinfo_t
get_call_use_vi(gcall * call)485 get_call_use_vi (gcall *call)
486 {
487 return get_call_vi (call);
488 }
489
490 /* Lookup or create the variable for the call statement CALL representing
491 the clobbers. */
492
493 static varinfo_t ATTRIBUTE_UNUSED
get_call_clobber_vi(gcall * call)494 get_call_clobber_vi (gcall *call)
495 {
496 return vi_next (get_call_vi (call));
497 }
498
499
500 enum constraint_expr_type {SCALAR, DEREF, ADDRESSOF};
501
502 /* An expression that appears in a constraint. */
503
504 struct constraint_expr
505 {
506 /* Constraint type. */
507 constraint_expr_type type;
508
509 /* Variable we are referring to in the constraint. */
510 unsigned int var;
511
512 /* Offset, in bits, of this constraint from the beginning of
513 variables it ends up referring to.
514
515 IOW, in a deref constraint, we would deref, get the result set,
516 then add OFFSET to each member. */
517 HOST_WIDE_INT offset;
518 };
519
520 /* Use 0x8000... as special unknown offset. */
521 #define UNKNOWN_OFFSET HOST_WIDE_INT_MIN
522
523 typedef struct constraint_expr ce_s;
524 static void get_constraint_for_1 (tree, vec<ce_s> *, bool, bool);
525 static void get_constraint_for (tree, vec<ce_s> *);
526 static void get_constraint_for_rhs (tree, vec<ce_s> *);
527 static void do_deref (vec<ce_s> *);
528
529 /* Our set constraints are made up of two constraint expressions, one
530 LHS, and one RHS.
531
532 As described in the introduction, our set constraints each represent an
533 operation between set valued variables.
534 */
535 struct constraint
536 {
537 struct constraint_expr lhs;
538 struct constraint_expr rhs;
539 };
540
541 /* List of constraints that we use to build the constraint graph from. */
542
543 static vec<constraint_t> constraints;
544 static object_allocator<constraint> constraint_pool ("Constraint pool");
545
546 /* The constraint graph is represented as an array of bitmaps
547 containing successor nodes. */
548
549 struct constraint_graph
550 {
551 /* Size of this graph, which may be different than the number of
552 nodes in the variable map. */
553 unsigned int size;
554
555 /* Explicit successors of each node. */
556 bitmap *succs;
557
558 /* Implicit predecessors of each node (Used for variable
559 substitution). */
560 bitmap *implicit_preds;
561
562 /* Explicit predecessors of each node (Used for variable substitution). */
563 bitmap *preds;
564
565 /* Indirect cycle representatives, or -1 if the node has no indirect
566 cycles. */
567 int *indirect_cycles;
568
569 /* Representative node for a node. rep[a] == a unless the node has
570 been unified. */
571 unsigned int *rep;
572
573 /* Equivalence class representative for a label. This is used for
574 variable substitution. */
575 int *eq_rep;
576
577 /* Pointer equivalence label for a node. All nodes with the same
578 pointer equivalence label can be unified together at some point
579 (either during constraint optimization or after the constraint
580 graph is built). */
581 unsigned int *pe;
582
583 /* Pointer equivalence representative for a label. This is used to
584 handle nodes that are pointer equivalent but not location
585 equivalent. We can unite these once the addressof constraints
586 are transformed into initial points-to sets. */
587 int *pe_rep;
588
589 /* Pointer equivalence label for each node, used during variable
590 substitution. */
591 unsigned int *pointer_label;
592
593 /* Location equivalence label for each node, used during location
594 equivalence finding. */
595 unsigned int *loc_label;
596
597 /* Pointed-by set for each node, used during location equivalence
598 finding. This is pointed-by rather than pointed-to, because it
599 is constructed using the predecessor graph. */
600 bitmap *pointed_by;
601
602 /* Points to sets for pointer equivalence. This is *not* the actual
603 points-to sets for nodes. */
604 bitmap *points_to;
605
606 /* Bitmap of nodes where the bit is set if the node is a direct
607 node. Used for variable substitution. */
608 sbitmap direct_nodes;
609
610 /* Bitmap of nodes where the bit is set if the node is address
611 taken. Used for variable substitution. */
612 bitmap address_taken;
613
614 /* Vector of complex constraints for each graph node. Complex
615 constraints are those involving dereferences or offsets that are
616 not 0. */
617 vec<constraint_t> *complex;
618 };
619
620 static constraint_graph_t graph;
621
622 /* During variable substitution and the offline version of indirect
623 cycle finding, we create nodes to represent dereferences and
624 address taken constraints. These represent where these start and
625 end. */
626 #define FIRST_REF_NODE (varmap).length ()
627 #define LAST_REF_NODE (FIRST_REF_NODE + (FIRST_REF_NODE - 1))
628
629 /* Return the representative node for NODE, if NODE has been unioned
630 with another NODE.
631 This function performs path compression along the way to finding
632 the representative. */
633
634 static unsigned int
find(unsigned int node)635 find (unsigned int node)
636 {
637 gcc_checking_assert (node < graph->size);
638 if (graph->rep[node] != node)
639 return graph->rep[node] = find (graph->rep[node]);
640 return node;
641 }
642
643 /* Union the TO and FROM nodes to the TO nodes.
644 Note that at some point in the future, we may want to do
645 union-by-rank, in which case we are going to have to return the
646 node we unified to. */
647
648 static bool
unite(unsigned int to,unsigned int from)649 unite (unsigned int to, unsigned int from)
650 {
651 gcc_checking_assert (to < graph->size && from < graph->size);
652 if (to != from && graph->rep[from] != to)
653 {
654 graph->rep[from] = to;
655 return true;
656 }
657 return false;
658 }
659
660 /* Create a new constraint consisting of LHS and RHS expressions. */
661
662 static constraint_t
new_constraint(const struct constraint_expr lhs,const struct constraint_expr rhs)663 new_constraint (const struct constraint_expr lhs,
664 const struct constraint_expr rhs)
665 {
666 constraint_t ret = constraint_pool.allocate ();
667 ret->lhs = lhs;
668 ret->rhs = rhs;
669 return ret;
670 }
671
672 /* Print out constraint C to FILE. */
673
674 static void
dump_constraint(FILE * file,constraint_t c)675 dump_constraint (FILE *file, constraint_t c)
676 {
677 if (c->lhs.type == ADDRESSOF)
678 fprintf (file, "&");
679 else if (c->lhs.type == DEREF)
680 fprintf (file, "*");
681 if (dump_file)
682 fprintf (file, "%s", get_varinfo (c->lhs.var)->name);
683 else
684 fprintf (file, "V%d", c->lhs.var);
685 if (c->lhs.offset == UNKNOWN_OFFSET)
686 fprintf (file, " + UNKNOWN");
687 else if (c->lhs.offset != 0)
688 fprintf (file, " + " HOST_WIDE_INT_PRINT_DEC, c->lhs.offset);
689 fprintf (file, " = ");
690 if (c->rhs.type == ADDRESSOF)
691 fprintf (file, "&");
692 else if (c->rhs.type == DEREF)
693 fprintf (file, "*");
694 if (dump_file)
695 fprintf (file, "%s", get_varinfo (c->rhs.var)->name);
696 else
697 fprintf (file, "V%d", c->rhs.var);
698 if (c->rhs.offset == UNKNOWN_OFFSET)
699 fprintf (file, " + UNKNOWN");
700 else if (c->rhs.offset != 0)
701 fprintf (file, " + " HOST_WIDE_INT_PRINT_DEC, c->rhs.offset);
702 }
703
704
705 void debug_constraint (constraint_t);
706 void debug_constraints (void);
707 void debug_constraint_graph (void);
708 void debug_solution_for_var (unsigned int);
709 void debug_sa_points_to_info (void);
710 void debug_varinfo (varinfo_t);
711 void debug_varmap (void);
712
713 /* Print out constraint C to stderr. */
714
715 DEBUG_FUNCTION void
debug_constraint(constraint_t c)716 debug_constraint (constraint_t c)
717 {
718 dump_constraint (stderr, c);
719 fprintf (stderr, "\n");
720 }
721
722 /* Print out all constraints to FILE */
723
724 static void
dump_constraints(FILE * file,int from)725 dump_constraints (FILE *file, int from)
726 {
727 int i;
728 constraint_t c;
729 for (i = from; constraints.iterate (i, &c); i++)
730 if (c)
731 {
732 dump_constraint (file, c);
733 fprintf (file, "\n");
734 }
735 }
736
737 /* Print out all constraints to stderr. */
738
739 DEBUG_FUNCTION void
debug_constraints(void)740 debug_constraints (void)
741 {
742 dump_constraints (stderr, 0);
743 }
744
745 /* Print the constraint graph in dot format. */
746
747 static void
dump_constraint_graph(FILE * file)748 dump_constraint_graph (FILE *file)
749 {
750 unsigned int i;
751
752 /* Only print the graph if it has already been initialized: */
753 if (!graph)
754 return;
755
756 /* Prints the header of the dot file: */
757 fprintf (file, "strict digraph {\n");
758 fprintf (file, " node [\n shape = box\n ]\n");
759 fprintf (file, " edge [\n fontsize = \"12\"\n ]\n");
760 fprintf (file, "\n // List of nodes and complex constraints in "
761 "the constraint graph:\n");
762
763 /* The next lines print the nodes in the graph together with the
764 complex constraints attached to them. */
765 for (i = 1; i < graph->size; i++)
766 {
767 if (i == FIRST_REF_NODE)
768 continue;
769 if (find (i) != i)
770 continue;
771 if (i < FIRST_REF_NODE)
772 fprintf (file, "\"%s\"", get_varinfo (i)->name);
773 else
774 fprintf (file, "\"*%s\"", get_varinfo (i - FIRST_REF_NODE)->name);
775 if (graph->complex[i].exists ())
776 {
777 unsigned j;
778 constraint_t c;
779 fprintf (file, " [label=\"\\N\\n");
780 for (j = 0; graph->complex[i].iterate (j, &c); ++j)
781 {
782 dump_constraint (file, c);
783 fprintf (file, "\\l");
784 }
785 fprintf (file, "\"]");
786 }
787 fprintf (file, ";\n");
788 }
789
790 /* Go over the edges. */
791 fprintf (file, "\n // Edges in the constraint graph:\n");
792 for (i = 1; i < graph->size; i++)
793 {
794 unsigned j;
795 bitmap_iterator bi;
796 if (find (i) != i)
797 continue;
798 EXECUTE_IF_IN_NONNULL_BITMAP (graph->succs[i], 0, j, bi)
799 {
800 unsigned to = find (j);
801 if (i == to)
802 continue;
803 if (i < FIRST_REF_NODE)
804 fprintf (file, "\"%s\"", get_varinfo (i)->name);
805 else
806 fprintf (file, "\"*%s\"", get_varinfo (i - FIRST_REF_NODE)->name);
807 fprintf (file, " -> ");
808 if (to < FIRST_REF_NODE)
809 fprintf (file, "\"%s\"", get_varinfo (to)->name);
810 else
811 fprintf (file, "\"*%s\"", get_varinfo (to - FIRST_REF_NODE)->name);
812 fprintf (file, ";\n");
813 }
814 }
815
816 /* Prints the tail of the dot file. */
817 fprintf (file, "}\n");
818 }
819
820 /* Print out the constraint graph to stderr. */
821
822 DEBUG_FUNCTION void
debug_constraint_graph(void)823 debug_constraint_graph (void)
824 {
825 dump_constraint_graph (stderr);
826 }
827
828 /* SOLVER FUNCTIONS
829
830 The solver is a simple worklist solver, that works on the following
831 algorithm:
832
833 sbitmap changed_nodes = all zeroes;
834 changed_count = 0;
835 For each node that is not already collapsed:
836 changed_count++;
837 set bit in changed nodes
838
839 while (changed_count > 0)
840 {
841 compute topological ordering for constraint graph
842
843 find and collapse cycles in the constraint graph (updating
844 changed if necessary)
845
846 for each node (n) in the graph in topological order:
847 changed_count--;
848
849 Process each complex constraint associated with the node,
850 updating changed if necessary.
851
852 For each outgoing edge from n, propagate the solution from n to
853 the destination of the edge, updating changed as necessary.
854
855 } */
856
857 /* Return true if two constraint expressions A and B are equal. */
858
859 static bool
constraint_expr_equal(struct constraint_expr a,struct constraint_expr b)860 constraint_expr_equal (struct constraint_expr a, struct constraint_expr b)
861 {
862 return a.type == b.type && a.var == b.var && a.offset == b.offset;
863 }
864
865 /* Return true if constraint expression A is less than constraint expression
866 B. This is just arbitrary, but consistent, in order to give them an
867 ordering. */
868
869 static bool
constraint_expr_less(struct constraint_expr a,struct constraint_expr b)870 constraint_expr_less (struct constraint_expr a, struct constraint_expr b)
871 {
872 if (a.type == b.type)
873 {
874 if (a.var == b.var)
875 return a.offset < b.offset;
876 else
877 return a.var < b.var;
878 }
879 else
880 return a.type < b.type;
881 }
882
883 /* Return true if constraint A is less than constraint B. This is just
884 arbitrary, but consistent, in order to give them an ordering. */
885
886 static bool
constraint_less(const constraint_t & a,const constraint_t & b)887 constraint_less (const constraint_t &a, const constraint_t &b)
888 {
889 if (constraint_expr_less (a->lhs, b->lhs))
890 return true;
891 else if (constraint_expr_less (b->lhs, a->lhs))
892 return false;
893 else
894 return constraint_expr_less (a->rhs, b->rhs);
895 }
896
897 /* Return true if two constraints A and B are equal. */
898
899 static bool
constraint_equal(struct constraint a,struct constraint b)900 constraint_equal (struct constraint a, struct constraint b)
901 {
902 return constraint_expr_equal (a.lhs, b.lhs)
903 && constraint_expr_equal (a.rhs, b.rhs);
904 }
905
906
907 /* Find a constraint LOOKFOR in the sorted constraint vector VEC */
908
909 static constraint_t
constraint_vec_find(vec<constraint_t> vec,struct constraint lookfor)910 constraint_vec_find (vec<constraint_t> vec,
911 struct constraint lookfor)
912 {
913 unsigned int place;
914 constraint_t found;
915
916 if (!vec.exists ())
917 return NULL;
918
919 place = vec.lower_bound (&lookfor, constraint_less);
920 if (place >= vec.length ())
921 return NULL;
922 found = vec[place];
923 if (!constraint_equal (*found, lookfor))
924 return NULL;
925 return found;
926 }
927
928 /* Union two constraint vectors, TO and FROM. Put the result in TO.
929 Returns true of TO set is changed. */
930
931 static bool
constraint_set_union(vec<constraint_t> * to,vec<constraint_t> * from)932 constraint_set_union (vec<constraint_t> *to,
933 vec<constraint_t> *from)
934 {
935 int i;
936 constraint_t c;
937 bool any_change = false;
938
939 FOR_EACH_VEC_ELT (*from, i, c)
940 {
941 if (constraint_vec_find (*to, *c) == NULL)
942 {
943 unsigned int place = to->lower_bound (c, constraint_less);
944 to->safe_insert (place, c);
945 any_change = true;
946 }
947 }
948 return any_change;
949 }
950
951 /* Expands the solution in SET to all sub-fields of variables included. */
952
953 static bitmap
solution_set_expand(bitmap set,bitmap * expanded)954 solution_set_expand (bitmap set, bitmap *expanded)
955 {
956 bitmap_iterator bi;
957 unsigned j;
958
959 if (*expanded)
960 return *expanded;
961
962 *expanded = BITMAP_ALLOC (&iteration_obstack);
963
964 /* In a first pass expand to the head of the variables we need to
965 add all sub-fields off. This avoids quadratic behavior. */
966 EXECUTE_IF_SET_IN_BITMAP (set, 0, j, bi)
967 {
968 varinfo_t v = get_varinfo (j);
969 if (v->is_artificial_var
970 || v->is_full_var)
971 continue;
972 bitmap_set_bit (*expanded, v->head);
973 }
974
975 /* In the second pass now expand all head variables with subfields. */
976 EXECUTE_IF_SET_IN_BITMAP (*expanded, 0, j, bi)
977 {
978 varinfo_t v = get_varinfo (j);
979 if (v->head != j)
980 continue;
981 for (v = vi_next (v); v != NULL; v = vi_next (v))
982 bitmap_set_bit (*expanded, v->id);
983 }
984
985 /* And finally set the rest of the bits from SET. */
986 bitmap_ior_into (*expanded, set);
987
988 return *expanded;
989 }
990
991 /* Union solution sets TO and DELTA, and add INC to each member of DELTA in the
992 process. */
993
994 static bool
set_union_with_increment(bitmap to,bitmap delta,HOST_WIDE_INT inc,bitmap * expanded_delta)995 set_union_with_increment (bitmap to, bitmap delta, HOST_WIDE_INT inc,
996 bitmap *expanded_delta)
997 {
998 bool changed = false;
999 bitmap_iterator bi;
1000 unsigned int i;
1001
1002 /* If the solution of DELTA contains anything it is good enough to transfer
1003 this to TO. */
1004 if (bitmap_bit_p (delta, anything_id))
1005 return bitmap_set_bit (to, anything_id);
1006
1007 /* If the offset is unknown we have to expand the solution to
1008 all subfields. */
1009 if (inc == UNKNOWN_OFFSET)
1010 {
1011 delta = solution_set_expand (delta, expanded_delta);
1012 changed |= bitmap_ior_into (to, delta);
1013 return changed;
1014 }
1015
1016 /* For non-zero offset union the offsetted solution into the destination. */
1017 EXECUTE_IF_SET_IN_BITMAP (delta, 0, i, bi)
1018 {
1019 varinfo_t vi = get_varinfo (i);
1020
1021 /* If this is a variable with just one field just set its bit
1022 in the result. */
1023 if (vi->is_artificial_var
1024 || vi->is_unknown_size_var
1025 || vi->is_full_var)
1026 changed |= bitmap_set_bit (to, i);
1027 else
1028 {
1029 HOST_WIDE_INT fieldoffset = vi->offset + inc;
1030 unsigned HOST_WIDE_INT size = vi->size;
1031
1032 /* If the offset makes the pointer point to before the
1033 variable use offset zero for the field lookup. */
1034 if (fieldoffset < 0)
1035 vi = get_varinfo (vi->head);
1036 else
1037 vi = first_or_preceding_vi_for_offset (vi, fieldoffset);
1038
1039 do
1040 {
1041 changed |= bitmap_set_bit (to, vi->id);
1042 if (vi->is_full_var
1043 || vi->next == 0)
1044 break;
1045
1046 /* We have to include all fields that overlap the current field
1047 shifted by inc. */
1048 vi = vi_next (vi);
1049 }
1050 while (vi->offset < fieldoffset + size);
1051 }
1052 }
1053
1054 return changed;
1055 }
1056
1057 /* Insert constraint C into the list of complex constraints for graph
1058 node VAR. */
1059
1060 static void
insert_into_complex(constraint_graph_t graph,unsigned int var,constraint_t c)1061 insert_into_complex (constraint_graph_t graph,
1062 unsigned int var, constraint_t c)
1063 {
1064 vec<constraint_t> complex = graph->complex[var];
1065 unsigned int place = complex.lower_bound (c, constraint_less);
1066
1067 /* Only insert constraints that do not already exist. */
1068 if (place >= complex.length ()
1069 || !constraint_equal (*c, *complex[place]))
1070 graph->complex[var].safe_insert (place, c);
1071 }
1072
1073
1074 /* Condense two variable nodes into a single variable node, by moving
1075 all associated info from FROM to TO. Returns true if TO node's
1076 constraint set changes after the merge. */
1077
1078 static bool
merge_node_constraints(constraint_graph_t graph,unsigned int to,unsigned int from)1079 merge_node_constraints (constraint_graph_t graph, unsigned int to,
1080 unsigned int from)
1081 {
1082 unsigned int i;
1083 constraint_t c;
1084 bool any_change = false;
1085
1086 gcc_checking_assert (find (from) == to);
1087
1088 /* Move all complex constraints from src node into to node */
1089 FOR_EACH_VEC_ELT (graph->complex[from], i, c)
1090 {
1091 /* In complex constraints for node FROM, we may have either
1092 a = *FROM, and *FROM = a, or an offseted constraint which are
1093 always added to the rhs node's constraints. */
1094
1095 if (c->rhs.type == DEREF)
1096 c->rhs.var = to;
1097 else if (c->lhs.type == DEREF)
1098 c->lhs.var = to;
1099 else
1100 c->rhs.var = to;
1101
1102 }
1103 any_change = constraint_set_union (&graph->complex[to],
1104 &graph->complex[from]);
1105 graph->complex[from].release ();
1106 return any_change;
1107 }
1108
1109
1110 /* Remove edges involving NODE from GRAPH. */
1111
1112 static void
clear_edges_for_node(constraint_graph_t graph,unsigned int node)1113 clear_edges_for_node (constraint_graph_t graph, unsigned int node)
1114 {
1115 if (graph->succs[node])
1116 BITMAP_FREE (graph->succs[node]);
1117 }
1118
1119 /* Merge GRAPH nodes FROM and TO into node TO. */
1120
1121 static void
merge_graph_nodes(constraint_graph_t graph,unsigned int to,unsigned int from)1122 merge_graph_nodes (constraint_graph_t graph, unsigned int to,
1123 unsigned int from)
1124 {
1125 if (graph->indirect_cycles[from] != -1)
1126 {
1127 /* If we have indirect cycles with the from node, and we have
1128 none on the to node, the to node has indirect cycles from the
1129 from node now that they are unified.
1130 If indirect cycles exist on both, unify the nodes that they
1131 are in a cycle with, since we know they are in a cycle with
1132 each other. */
1133 if (graph->indirect_cycles[to] == -1)
1134 graph->indirect_cycles[to] = graph->indirect_cycles[from];
1135 }
1136
1137 /* Merge all the successor edges. */
1138 if (graph->succs[from])
1139 {
1140 if (!graph->succs[to])
1141 graph->succs[to] = BITMAP_ALLOC (&pta_obstack);
1142 bitmap_ior_into (graph->succs[to],
1143 graph->succs[from]);
1144 }
1145
1146 clear_edges_for_node (graph, from);
1147 }
1148
1149
1150 /* Add an indirect graph edge to GRAPH, going from TO to FROM if
1151 it doesn't exist in the graph already. */
1152
1153 static void
add_implicit_graph_edge(constraint_graph_t graph,unsigned int to,unsigned int from)1154 add_implicit_graph_edge (constraint_graph_t graph, unsigned int to,
1155 unsigned int from)
1156 {
1157 if (to == from)
1158 return;
1159
1160 if (!graph->implicit_preds[to])
1161 graph->implicit_preds[to] = BITMAP_ALLOC (&predbitmap_obstack);
1162
1163 if (bitmap_set_bit (graph->implicit_preds[to], from))
1164 stats.num_implicit_edges++;
1165 }
1166
1167 /* Add a predecessor graph edge to GRAPH, going from TO to FROM if
1168 it doesn't exist in the graph already.
1169 Return false if the edge already existed, true otherwise. */
1170
1171 static void
add_pred_graph_edge(constraint_graph_t graph,unsigned int to,unsigned int from)1172 add_pred_graph_edge (constraint_graph_t graph, unsigned int to,
1173 unsigned int from)
1174 {
1175 if (!graph->preds[to])
1176 graph->preds[to] = BITMAP_ALLOC (&predbitmap_obstack);
1177 bitmap_set_bit (graph->preds[to], from);
1178 }
1179
1180 /* Add a graph edge to GRAPH, going from FROM to TO if
1181 it doesn't exist in the graph already.
1182 Return false if the edge already existed, true otherwise. */
1183
1184 static bool
add_graph_edge(constraint_graph_t graph,unsigned int to,unsigned int from)1185 add_graph_edge (constraint_graph_t graph, unsigned int to,
1186 unsigned int from)
1187 {
1188 if (to == from)
1189 {
1190 return false;
1191 }
1192 else
1193 {
1194 bool r = false;
1195
1196 if (!graph->succs[from])
1197 graph->succs[from] = BITMAP_ALLOC (&pta_obstack);
1198 if (bitmap_set_bit (graph->succs[from], to))
1199 {
1200 r = true;
1201 if (to < FIRST_REF_NODE && from < FIRST_REF_NODE)
1202 stats.num_edges++;
1203 }
1204 return r;
1205 }
1206 }
1207
1208
1209 /* Initialize the constraint graph structure to contain SIZE nodes. */
1210
1211 static void
init_graph(unsigned int size)1212 init_graph (unsigned int size)
1213 {
1214 unsigned int j;
1215
1216 graph = XCNEW (struct constraint_graph);
1217 graph->size = size;
1218 graph->succs = XCNEWVEC (bitmap, graph->size);
1219 graph->indirect_cycles = XNEWVEC (int, graph->size);
1220 graph->rep = XNEWVEC (unsigned int, graph->size);
1221 /* ??? Macros do not support template types with multiple arguments,
1222 so we use a typedef to work around it. */
1223 typedef vec<constraint_t> vec_constraint_t_heap;
1224 graph->complex = XCNEWVEC (vec_constraint_t_heap, size);
1225 graph->pe = XCNEWVEC (unsigned int, graph->size);
1226 graph->pe_rep = XNEWVEC (int, graph->size);
1227
1228 for (j = 0; j < graph->size; j++)
1229 {
1230 graph->rep[j] = j;
1231 graph->pe_rep[j] = -1;
1232 graph->indirect_cycles[j] = -1;
1233 }
1234 }
1235
1236 /* Build the constraint graph, adding only predecessor edges right now. */
1237
1238 static void
build_pred_graph(void)1239 build_pred_graph (void)
1240 {
1241 int i;
1242 constraint_t c;
1243 unsigned int j;
1244
1245 graph->implicit_preds = XCNEWVEC (bitmap, graph->size);
1246 graph->preds = XCNEWVEC (bitmap, graph->size);
1247 graph->pointer_label = XCNEWVEC (unsigned int, graph->size);
1248 graph->loc_label = XCNEWVEC (unsigned int, graph->size);
1249 graph->pointed_by = XCNEWVEC (bitmap, graph->size);
1250 graph->points_to = XCNEWVEC (bitmap, graph->size);
1251 graph->eq_rep = XNEWVEC (int, graph->size);
1252 graph->direct_nodes = sbitmap_alloc (graph->size);
1253 graph->address_taken = BITMAP_ALLOC (&predbitmap_obstack);
1254 bitmap_clear (graph->direct_nodes);
1255
1256 for (j = 1; j < FIRST_REF_NODE; j++)
1257 {
1258 if (!get_varinfo (j)->is_special_var)
1259 bitmap_set_bit (graph->direct_nodes, j);
1260 }
1261
1262 for (j = 0; j < graph->size; j++)
1263 graph->eq_rep[j] = -1;
1264
1265 for (j = 0; j < varmap.length (); j++)
1266 graph->indirect_cycles[j] = -1;
1267
1268 FOR_EACH_VEC_ELT (constraints, i, c)
1269 {
1270 struct constraint_expr lhs = c->lhs;
1271 struct constraint_expr rhs = c->rhs;
1272 unsigned int lhsvar = lhs.var;
1273 unsigned int rhsvar = rhs.var;
1274
1275 if (lhs.type == DEREF)
1276 {
1277 /* *x = y. */
1278 if (rhs.offset == 0 && lhs.offset == 0 && rhs.type == SCALAR)
1279 add_pred_graph_edge (graph, FIRST_REF_NODE + lhsvar, rhsvar);
1280 }
1281 else if (rhs.type == DEREF)
1282 {
1283 /* x = *y */
1284 if (rhs.offset == 0 && lhs.offset == 0 && lhs.type == SCALAR)
1285 add_pred_graph_edge (graph, lhsvar, FIRST_REF_NODE + rhsvar);
1286 else
1287 bitmap_clear_bit (graph->direct_nodes, lhsvar);
1288 }
1289 else if (rhs.type == ADDRESSOF)
1290 {
1291 varinfo_t v;
1292
1293 /* x = &y */
1294 if (graph->points_to[lhsvar] == NULL)
1295 graph->points_to[lhsvar] = BITMAP_ALLOC (&predbitmap_obstack);
1296 bitmap_set_bit (graph->points_to[lhsvar], rhsvar);
1297
1298 if (graph->pointed_by[rhsvar] == NULL)
1299 graph->pointed_by[rhsvar] = BITMAP_ALLOC (&predbitmap_obstack);
1300 bitmap_set_bit (graph->pointed_by[rhsvar], lhsvar);
1301
1302 /* Implicitly, *x = y */
1303 add_implicit_graph_edge (graph, FIRST_REF_NODE + lhsvar, rhsvar);
1304
1305 /* All related variables are no longer direct nodes. */
1306 bitmap_clear_bit (graph->direct_nodes, rhsvar);
1307 v = get_varinfo (rhsvar);
1308 if (!v->is_full_var)
1309 {
1310 v = get_varinfo (v->head);
1311 do
1312 {
1313 bitmap_clear_bit (graph->direct_nodes, v->id);
1314 v = vi_next (v);
1315 }
1316 while (v != NULL);
1317 }
1318 bitmap_set_bit (graph->address_taken, rhsvar);
1319 }
1320 else if (lhsvar > anything_id
1321 && lhsvar != rhsvar && lhs.offset == 0 && rhs.offset == 0)
1322 {
1323 /* x = y */
1324 add_pred_graph_edge (graph, lhsvar, rhsvar);
1325 /* Implicitly, *x = *y */
1326 add_implicit_graph_edge (graph, FIRST_REF_NODE + lhsvar,
1327 FIRST_REF_NODE + rhsvar);
1328 }
1329 else if (lhs.offset != 0 || rhs.offset != 0)
1330 {
1331 if (rhs.offset != 0)
1332 bitmap_clear_bit (graph->direct_nodes, lhs.var);
1333 else if (lhs.offset != 0)
1334 bitmap_clear_bit (graph->direct_nodes, rhs.var);
1335 }
1336 }
1337 }
1338
1339 /* Build the constraint graph, adding successor edges. */
1340
1341 static void
build_succ_graph(void)1342 build_succ_graph (void)
1343 {
1344 unsigned i, t;
1345 constraint_t c;
1346
1347 FOR_EACH_VEC_ELT (constraints, i, c)
1348 {
1349 struct constraint_expr lhs;
1350 struct constraint_expr rhs;
1351 unsigned int lhsvar;
1352 unsigned int rhsvar;
1353
1354 if (!c)
1355 continue;
1356
1357 lhs = c->lhs;
1358 rhs = c->rhs;
1359 lhsvar = find (lhs.var);
1360 rhsvar = find (rhs.var);
1361
1362 if (lhs.type == DEREF)
1363 {
1364 if (rhs.offset == 0 && lhs.offset == 0 && rhs.type == SCALAR)
1365 add_graph_edge (graph, FIRST_REF_NODE + lhsvar, rhsvar);
1366 }
1367 else if (rhs.type == DEREF)
1368 {
1369 if (rhs.offset == 0 && lhs.offset == 0 && lhs.type == SCALAR)
1370 add_graph_edge (graph, lhsvar, FIRST_REF_NODE + rhsvar);
1371 }
1372 else if (rhs.type == ADDRESSOF)
1373 {
1374 /* x = &y */
1375 gcc_checking_assert (find (rhs.var) == rhs.var);
1376 bitmap_set_bit (get_varinfo (lhsvar)->solution, rhsvar);
1377 }
1378 else if (lhsvar > anything_id
1379 && lhsvar != rhsvar && lhs.offset == 0 && rhs.offset == 0)
1380 {
1381 add_graph_edge (graph, lhsvar, rhsvar);
1382 }
1383 }
1384
1385 /* Add edges from STOREDANYTHING to all non-direct nodes that can
1386 receive pointers. */
1387 t = find (storedanything_id);
1388 for (i = integer_id + 1; i < FIRST_REF_NODE; ++i)
1389 {
1390 if (!bitmap_bit_p (graph->direct_nodes, i)
1391 && get_varinfo (i)->may_have_pointers)
1392 add_graph_edge (graph, find (i), t);
1393 }
1394
1395 /* Everything stored to ANYTHING also potentially escapes. */
1396 add_graph_edge (graph, find (escaped_id), t);
1397 }
1398
1399
1400 /* Changed variables on the last iteration. */
1401 static bitmap changed;
1402
1403 /* Strongly Connected Component visitation info. */
1404
1405 class scc_info
1406 {
1407 public:
1408 scc_info (size_t size);
1409 ~scc_info ();
1410
1411 auto_sbitmap visited;
1412 auto_sbitmap deleted;
1413 unsigned int *dfs;
1414 unsigned int *node_mapping;
1415 int current_index;
1416 auto_vec<unsigned> scc_stack;
1417 };
1418
1419
1420 /* Recursive routine to find strongly connected components in GRAPH.
1421 SI is the SCC info to store the information in, and N is the id of current
1422 graph node we are processing.
1423
1424 This is Tarjan's strongly connected component finding algorithm, as
1425 modified by Nuutila to keep only non-root nodes on the stack.
1426 The algorithm can be found in "On finding the strongly connected
1427 connected components in a directed graph" by Esko Nuutila and Eljas
1428 Soisalon-Soininen, in Information Processing Letters volume 49,
1429 number 1, pages 9-14. */
1430
1431 static void
scc_visit(constraint_graph_t graph,class scc_info * si,unsigned int n)1432 scc_visit (constraint_graph_t graph, class scc_info *si, unsigned int n)
1433 {
1434 unsigned int i;
1435 bitmap_iterator bi;
1436 unsigned int my_dfs;
1437
1438 bitmap_set_bit (si->visited, n);
1439 si->dfs[n] = si->current_index ++;
1440 my_dfs = si->dfs[n];
1441
1442 /* Visit all the successors. */
1443 EXECUTE_IF_IN_NONNULL_BITMAP (graph->succs[n], 0, i, bi)
1444 {
1445 unsigned int w;
1446
1447 if (i > LAST_REF_NODE)
1448 break;
1449
1450 w = find (i);
1451 if (bitmap_bit_p (si->deleted, w))
1452 continue;
1453
1454 if (!bitmap_bit_p (si->visited, w))
1455 scc_visit (graph, si, w);
1456
1457 unsigned int t = find (w);
1458 gcc_checking_assert (find (n) == n);
1459 if (si->dfs[t] < si->dfs[n])
1460 si->dfs[n] = si->dfs[t];
1461 }
1462
1463 /* See if any components have been identified. */
1464 if (si->dfs[n] == my_dfs)
1465 {
1466 if (si->scc_stack.length () > 0
1467 && si->dfs[si->scc_stack.last ()] >= my_dfs)
1468 {
1469 bitmap scc = BITMAP_ALLOC (NULL);
1470 unsigned int lowest_node;
1471 bitmap_iterator bi;
1472
1473 bitmap_set_bit (scc, n);
1474
1475 while (si->scc_stack.length () != 0
1476 && si->dfs[si->scc_stack.last ()] >= my_dfs)
1477 {
1478 unsigned int w = si->scc_stack.pop ();
1479
1480 bitmap_set_bit (scc, w);
1481 }
1482
1483 lowest_node = bitmap_first_set_bit (scc);
1484 gcc_assert (lowest_node < FIRST_REF_NODE);
1485
1486 /* Collapse the SCC nodes into a single node, and mark the
1487 indirect cycles. */
1488 EXECUTE_IF_SET_IN_BITMAP (scc, 0, i, bi)
1489 {
1490 if (i < FIRST_REF_NODE)
1491 {
1492 if (unite (lowest_node, i))
1493 unify_nodes (graph, lowest_node, i, false);
1494 }
1495 else
1496 {
1497 unite (lowest_node, i);
1498 graph->indirect_cycles[i - FIRST_REF_NODE] = lowest_node;
1499 }
1500 }
1501 }
1502 bitmap_set_bit (si->deleted, n);
1503 }
1504 else
1505 si->scc_stack.safe_push (n);
1506 }
1507
1508 /* Unify node FROM into node TO, updating the changed count if
1509 necessary when UPDATE_CHANGED is true. */
1510
1511 static void
unify_nodes(constraint_graph_t graph,unsigned int to,unsigned int from,bool update_changed)1512 unify_nodes (constraint_graph_t graph, unsigned int to, unsigned int from,
1513 bool update_changed)
1514 {
1515 gcc_checking_assert (to != from && find (to) == to);
1516
1517 if (dump_file && (dump_flags & TDF_DETAILS))
1518 fprintf (dump_file, "Unifying %s to %s\n",
1519 get_varinfo (from)->name,
1520 get_varinfo (to)->name);
1521
1522 if (update_changed)
1523 stats.unified_vars_dynamic++;
1524 else
1525 stats.unified_vars_static++;
1526
1527 merge_graph_nodes (graph, to, from);
1528 if (merge_node_constraints (graph, to, from))
1529 {
1530 if (update_changed)
1531 bitmap_set_bit (changed, to);
1532 }
1533
1534 /* Mark TO as changed if FROM was changed. If TO was already marked
1535 as changed, decrease the changed count. */
1536
1537 if (update_changed
1538 && bitmap_clear_bit (changed, from))
1539 bitmap_set_bit (changed, to);
1540 varinfo_t fromvi = get_varinfo (from);
1541 if (fromvi->solution)
1542 {
1543 /* If the solution changes because of the merging, we need to mark
1544 the variable as changed. */
1545 varinfo_t tovi = get_varinfo (to);
1546 if (bitmap_ior_into (tovi->solution, fromvi->solution))
1547 {
1548 if (update_changed)
1549 bitmap_set_bit (changed, to);
1550 }
1551
1552 BITMAP_FREE (fromvi->solution);
1553 if (fromvi->oldsolution)
1554 BITMAP_FREE (fromvi->oldsolution);
1555
1556 if (stats.iterations > 0
1557 && tovi->oldsolution)
1558 BITMAP_FREE (tovi->oldsolution);
1559 }
1560 if (graph->succs[to])
1561 bitmap_clear_bit (graph->succs[to], to);
1562 }
1563
1564 /* Information needed to compute the topological ordering of a graph. */
1565
1566 struct topo_info
1567 {
1568 /* sbitmap of visited nodes. */
1569 sbitmap visited;
1570 /* Array that stores the topological order of the graph, *in
1571 reverse*. */
1572 vec<unsigned> topo_order;
1573 };
1574
1575
1576 /* Initialize and return a topological info structure. */
1577
1578 static struct topo_info *
init_topo_info(void)1579 init_topo_info (void)
1580 {
1581 size_t size = graph->size;
1582 struct topo_info *ti = XNEW (struct topo_info);
1583 ti->visited = sbitmap_alloc (size);
1584 bitmap_clear (ti->visited);
1585 ti->topo_order.create (1);
1586 return ti;
1587 }
1588
1589
1590 /* Free the topological sort info pointed to by TI. */
1591
1592 static void
free_topo_info(struct topo_info * ti)1593 free_topo_info (struct topo_info *ti)
1594 {
1595 sbitmap_free (ti->visited);
1596 ti->topo_order.release ();
1597 free (ti);
1598 }
1599
1600 /* Visit the graph in topological order, and store the order in the
1601 topo_info structure. */
1602
1603 static void
topo_visit(constraint_graph_t graph,struct topo_info * ti,unsigned int n)1604 topo_visit (constraint_graph_t graph, struct topo_info *ti,
1605 unsigned int n)
1606 {
1607 bitmap_iterator bi;
1608 unsigned int j;
1609
1610 bitmap_set_bit (ti->visited, n);
1611
1612 if (graph->succs[n])
1613 EXECUTE_IF_SET_IN_BITMAP (graph->succs[n], 0, j, bi)
1614 {
1615 if (!bitmap_bit_p (ti->visited, j))
1616 topo_visit (graph, ti, j);
1617 }
1618
1619 ti->topo_order.safe_push (n);
1620 }
1621
1622 /* Process a constraint C that represents x = *(y + off), using DELTA as the
1623 starting solution for y. */
1624
1625 static void
do_sd_constraint(constraint_graph_t graph,constraint_t c,bitmap delta,bitmap * expanded_delta)1626 do_sd_constraint (constraint_graph_t graph, constraint_t c,
1627 bitmap delta, bitmap *expanded_delta)
1628 {
1629 unsigned int lhs = c->lhs.var;
1630 bool flag = false;
1631 bitmap sol = get_varinfo (lhs)->solution;
1632 unsigned int j;
1633 bitmap_iterator bi;
1634 HOST_WIDE_INT roffset = c->rhs.offset;
1635
1636 /* Our IL does not allow this. */
1637 gcc_checking_assert (c->lhs.offset == 0);
1638
1639 /* If the solution of Y contains anything it is good enough to transfer
1640 this to the LHS. */
1641 if (bitmap_bit_p (delta, anything_id))
1642 {
1643 flag |= bitmap_set_bit (sol, anything_id);
1644 goto done;
1645 }
1646
1647 /* If we do not know at with offset the rhs is dereferenced compute
1648 the reachability set of DELTA, conservatively assuming it is
1649 dereferenced at all valid offsets. */
1650 if (roffset == UNKNOWN_OFFSET)
1651 {
1652 delta = solution_set_expand (delta, expanded_delta);
1653 /* No further offset processing is necessary. */
1654 roffset = 0;
1655 }
1656
1657 /* For each variable j in delta (Sol(y)), add
1658 an edge in the graph from j to x, and union Sol(j) into Sol(x). */
1659 EXECUTE_IF_SET_IN_BITMAP (delta, 0, j, bi)
1660 {
1661 varinfo_t v = get_varinfo (j);
1662 HOST_WIDE_INT fieldoffset = v->offset + roffset;
1663 unsigned HOST_WIDE_INT size = v->size;
1664 unsigned int t;
1665
1666 if (v->is_full_var)
1667 ;
1668 else if (roffset != 0)
1669 {
1670 if (fieldoffset < 0)
1671 v = get_varinfo (v->head);
1672 else
1673 v = first_or_preceding_vi_for_offset (v, fieldoffset);
1674 }
1675
1676 /* We have to include all fields that overlap the current field
1677 shifted by roffset. */
1678 do
1679 {
1680 t = find (v->id);
1681
1682 /* Adding edges from the special vars is pointless.
1683 They don't have sets that can change. */
1684 if (get_varinfo (t)->is_special_var)
1685 flag |= bitmap_ior_into (sol, get_varinfo (t)->solution);
1686 /* Merging the solution from ESCAPED needlessly increases
1687 the set. Use ESCAPED as representative instead. */
1688 else if (v->id == escaped_id)
1689 flag |= bitmap_set_bit (sol, escaped_id);
1690 else if (v->may_have_pointers
1691 && add_graph_edge (graph, lhs, t))
1692 flag |= bitmap_ior_into (sol, get_varinfo (t)->solution);
1693
1694 if (v->is_full_var
1695 || v->next == 0)
1696 break;
1697
1698 v = vi_next (v);
1699 }
1700 while (v->offset < fieldoffset + size);
1701 }
1702
1703 done:
1704 /* If the LHS solution changed, mark the var as changed. */
1705 if (flag)
1706 {
1707 get_varinfo (lhs)->solution = sol;
1708 bitmap_set_bit (changed, lhs);
1709 }
1710 }
1711
1712 /* Process a constraint C that represents *(x + off) = y using DELTA
1713 as the starting solution for x. */
1714
1715 static void
do_ds_constraint(constraint_t c,bitmap delta,bitmap * expanded_delta)1716 do_ds_constraint (constraint_t c, bitmap delta, bitmap *expanded_delta)
1717 {
1718 unsigned int rhs = c->rhs.var;
1719 bitmap sol = get_varinfo (rhs)->solution;
1720 unsigned int j;
1721 bitmap_iterator bi;
1722 HOST_WIDE_INT loff = c->lhs.offset;
1723 bool escaped_p = false;
1724
1725 /* Our IL does not allow this. */
1726 gcc_checking_assert (c->rhs.offset == 0);
1727
1728 /* If the solution of y contains ANYTHING simply use the ANYTHING
1729 solution. This avoids needlessly increasing the points-to sets. */
1730 if (bitmap_bit_p (sol, anything_id))
1731 sol = get_varinfo (find (anything_id))->solution;
1732
1733 /* If the solution for x contains ANYTHING we have to merge the
1734 solution of y into all pointer variables which we do via
1735 STOREDANYTHING. */
1736 if (bitmap_bit_p (delta, anything_id))
1737 {
1738 unsigned t = find (storedanything_id);
1739 if (add_graph_edge (graph, t, rhs))
1740 {
1741 if (bitmap_ior_into (get_varinfo (t)->solution, sol))
1742 bitmap_set_bit (changed, t);
1743 }
1744 return;
1745 }
1746
1747 /* If we do not know at with offset the rhs is dereferenced compute
1748 the reachability set of DELTA, conservatively assuming it is
1749 dereferenced at all valid offsets. */
1750 if (loff == UNKNOWN_OFFSET)
1751 {
1752 delta = solution_set_expand (delta, expanded_delta);
1753 loff = 0;
1754 }
1755
1756 /* For each member j of delta (Sol(x)), add an edge from y to j and
1757 union Sol(y) into Sol(j) */
1758 EXECUTE_IF_SET_IN_BITMAP (delta, 0, j, bi)
1759 {
1760 varinfo_t v = get_varinfo (j);
1761 unsigned int t;
1762 HOST_WIDE_INT fieldoffset = v->offset + loff;
1763 unsigned HOST_WIDE_INT size = v->size;
1764
1765 if (v->is_full_var)
1766 ;
1767 else if (loff != 0)
1768 {
1769 if (fieldoffset < 0)
1770 v = get_varinfo (v->head);
1771 else
1772 v = first_or_preceding_vi_for_offset (v, fieldoffset);
1773 }
1774
1775 /* We have to include all fields that overlap the current field
1776 shifted by loff. */
1777 do
1778 {
1779 if (v->may_have_pointers)
1780 {
1781 /* If v is a global variable then this is an escape point. */
1782 if (v->is_global_var
1783 && !escaped_p)
1784 {
1785 t = find (escaped_id);
1786 if (add_graph_edge (graph, t, rhs)
1787 && bitmap_ior_into (get_varinfo (t)->solution, sol))
1788 bitmap_set_bit (changed, t);
1789 /* Enough to let rhs escape once. */
1790 escaped_p = true;
1791 }
1792
1793 if (v->is_special_var)
1794 break;
1795
1796 t = find (v->id);
1797 if (add_graph_edge (graph, t, rhs)
1798 && bitmap_ior_into (get_varinfo (t)->solution, sol))
1799 bitmap_set_bit (changed, t);
1800 }
1801
1802 if (v->is_full_var
1803 || v->next == 0)
1804 break;
1805
1806 v = vi_next (v);
1807 }
1808 while (v->offset < fieldoffset + size);
1809 }
1810 }
1811
1812 /* Handle a non-simple (simple meaning requires no iteration),
1813 constraint (IE *x = &y, x = *y, *x = y, and x = y with offsets involved). */
1814
1815 static void
do_complex_constraint(constraint_graph_t graph,constraint_t c,bitmap delta,bitmap * expanded_delta)1816 do_complex_constraint (constraint_graph_t graph, constraint_t c, bitmap delta,
1817 bitmap *expanded_delta)
1818 {
1819 if (c->lhs.type == DEREF)
1820 {
1821 if (c->rhs.type == ADDRESSOF)
1822 {
1823 gcc_unreachable ();
1824 }
1825 else
1826 {
1827 /* *x = y */
1828 do_ds_constraint (c, delta, expanded_delta);
1829 }
1830 }
1831 else if (c->rhs.type == DEREF)
1832 {
1833 /* x = *y */
1834 if (!(get_varinfo (c->lhs.var)->is_special_var))
1835 do_sd_constraint (graph, c, delta, expanded_delta);
1836 }
1837 else
1838 {
1839 bitmap tmp;
1840 bool flag = false;
1841
1842 gcc_checking_assert (c->rhs.type == SCALAR && c->lhs.type == SCALAR
1843 && c->rhs.offset != 0 && c->lhs.offset == 0);
1844 tmp = get_varinfo (c->lhs.var)->solution;
1845
1846 flag = set_union_with_increment (tmp, delta, c->rhs.offset,
1847 expanded_delta);
1848
1849 if (flag)
1850 bitmap_set_bit (changed, c->lhs.var);
1851 }
1852 }
1853
1854 /* Initialize and return a new SCC info structure. */
1855
scc_info(size_t size)1856 scc_info::scc_info (size_t size) :
1857 visited (size), deleted (size), current_index (0), scc_stack (1)
1858 {
1859 bitmap_clear (visited);
1860 bitmap_clear (deleted);
1861 node_mapping = XNEWVEC (unsigned int, size);
1862 dfs = XCNEWVEC (unsigned int, size);
1863
1864 for (size_t i = 0; i < size; i++)
1865 node_mapping[i] = i;
1866 }
1867
1868 /* Free an SCC info structure pointed to by SI */
1869
~scc_info()1870 scc_info::~scc_info ()
1871 {
1872 free (node_mapping);
1873 free (dfs);
1874 }
1875
1876
1877 /* Find indirect cycles in GRAPH that occur, using strongly connected
1878 components, and note them in the indirect cycles map.
1879
1880 This technique comes from Ben Hardekopf and Calvin Lin,
1881 "It Pays to be Lazy: Fast and Accurate Pointer Analysis for Millions of
1882 Lines of Code", submitted to PLDI 2007. */
1883
1884 static void
find_indirect_cycles(constraint_graph_t graph)1885 find_indirect_cycles (constraint_graph_t graph)
1886 {
1887 unsigned int i;
1888 unsigned int size = graph->size;
1889 scc_info si (size);
1890
1891 for (i = 0; i < MIN (LAST_REF_NODE, size); i ++ )
1892 if (!bitmap_bit_p (si.visited, i) && find (i) == i)
1893 scc_visit (graph, &si, i);
1894 }
1895
1896 /* Compute a topological ordering for GRAPH, and store the result in the
1897 topo_info structure TI. */
1898
1899 static void
compute_topo_order(constraint_graph_t graph,struct topo_info * ti)1900 compute_topo_order (constraint_graph_t graph,
1901 struct topo_info *ti)
1902 {
1903 unsigned int i;
1904 unsigned int size = graph->size;
1905
1906 for (i = 0; i != size; ++i)
1907 if (!bitmap_bit_p (ti->visited, i) && find (i) == i)
1908 topo_visit (graph, ti, i);
1909 }
1910
1911 /* Structure used to for hash value numbering of pointer equivalence
1912 classes. */
1913
1914 typedef struct equiv_class_label
1915 {
1916 hashval_t hashcode;
1917 unsigned int equivalence_class;
1918 bitmap labels;
1919 } *equiv_class_label_t;
1920 typedef const struct equiv_class_label *const_equiv_class_label_t;
1921
1922 /* Equiv_class_label hashtable helpers. */
1923
1924 struct equiv_class_hasher : nofree_ptr_hash <equiv_class_label>
1925 {
1926 static inline hashval_t hash (const equiv_class_label *);
1927 static inline bool equal (const equiv_class_label *,
1928 const equiv_class_label *);
1929 };
1930
1931 /* Hash function for a equiv_class_label_t */
1932
1933 inline hashval_t
hash(const equiv_class_label * ecl)1934 equiv_class_hasher::hash (const equiv_class_label *ecl)
1935 {
1936 return ecl->hashcode;
1937 }
1938
1939 /* Equality function for two equiv_class_label_t's. */
1940
1941 inline bool
equal(const equiv_class_label * eql1,const equiv_class_label * eql2)1942 equiv_class_hasher::equal (const equiv_class_label *eql1,
1943 const equiv_class_label *eql2)
1944 {
1945 return (eql1->hashcode == eql2->hashcode
1946 && bitmap_equal_p (eql1->labels, eql2->labels));
1947 }
1948
1949 /* A hashtable for mapping a bitmap of labels->pointer equivalence
1950 classes. */
1951 static hash_table<equiv_class_hasher> *pointer_equiv_class_table;
1952
1953 /* A hashtable for mapping a bitmap of labels->location equivalence
1954 classes. */
1955 static hash_table<equiv_class_hasher> *location_equiv_class_table;
1956
1957 struct obstack equiv_class_obstack;
1958
1959 /* Lookup a equivalence class in TABLE by the bitmap of LABELS with
1960 hash HAS it contains. Sets *REF_LABELS to the bitmap LABELS
1961 is equivalent to. */
1962
1963 static equiv_class_label *
equiv_class_lookup_or_add(hash_table<equiv_class_hasher> * table,bitmap labels)1964 equiv_class_lookup_or_add (hash_table<equiv_class_hasher> *table,
1965 bitmap labels)
1966 {
1967 equiv_class_label **slot;
1968 equiv_class_label ecl;
1969
1970 ecl.labels = labels;
1971 ecl.hashcode = bitmap_hash (labels);
1972 slot = table->find_slot (&ecl, INSERT);
1973 if (!*slot)
1974 {
1975 *slot = XOBNEW (&equiv_class_obstack, struct equiv_class_label);
1976 (*slot)->labels = labels;
1977 (*slot)->hashcode = ecl.hashcode;
1978 (*slot)->equivalence_class = 0;
1979 }
1980
1981 return *slot;
1982 }
1983
1984 /* Perform offline variable substitution.
1985
1986 This is a worst case quadratic time way of identifying variables
1987 that must have equivalent points-to sets, including those caused by
1988 static cycles, and single entry subgraphs, in the constraint graph.
1989
1990 The technique is described in "Exploiting Pointer and Location
1991 Equivalence to Optimize Pointer Analysis. In the 14th International
1992 Static Analysis Symposium (SAS), August 2007." It is known as the
1993 "HU" algorithm, and is equivalent to value numbering the collapsed
1994 constraint graph including evaluating unions.
1995
1996 The general method of finding equivalence classes is as follows:
1997 Add fake nodes (REF nodes) and edges for *a = b and a = *b constraints.
1998 Initialize all non-REF nodes to be direct nodes.
1999 For each constraint a = a U {b}, we set pts(a) = pts(a) u {fresh
2000 variable}
2001 For each constraint containing the dereference, we also do the same
2002 thing.
2003
2004 We then compute SCC's in the graph and unify nodes in the same SCC,
2005 including pts sets.
2006
2007 For each non-collapsed node x:
2008 Visit all unvisited explicit incoming edges.
2009 Ignoring all non-pointers, set pts(x) = Union of pts(a) for y
2010 where y->x.
2011 Lookup the equivalence class for pts(x).
2012 If we found one, equivalence_class(x) = found class.
2013 Otherwise, equivalence_class(x) = new class, and new_class is
2014 added to the lookup table.
2015
2016 All direct nodes with the same equivalence class can be replaced
2017 with a single representative node.
2018 All unlabeled nodes (label == 0) are not pointers and all edges
2019 involving them can be eliminated.
2020 We perform these optimizations during rewrite_constraints
2021
2022 In addition to pointer equivalence class finding, we also perform
2023 location equivalence class finding. This is the set of variables
2024 that always appear together in points-to sets. We use this to
2025 compress the size of the points-to sets. */
2026
2027 /* Current maximum pointer equivalence class id. */
2028 static int pointer_equiv_class;
2029
2030 /* Current maximum location equivalence class id. */
2031 static int location_equiv_class;
2032
2033 /* Recursive routine to find strongly connected components in GRAPH,
2034 and label it's nodes with DFS numbers. */
2035
2036 static void
condense_visit(constraint_graph_t graph,class scc_info * si,unsigned int n)2037 condense_visit (constraint_graph_t graph, class scc_info *si, unsigned int n)
2038 {
2039 unsigned int i;
2040 bitmap_iterator bi;
2041 unsigned int my_dfs;
2042
2043 gcc_checking_assert (si->node_mapping[n] == n);
2044 bitmap_set_bit (si->visited, n);
2045 si->dfs[n] = si->current_index ++;
2046 my_dfs = si->dfs[n];
2047
2048 /* Visit all the successors. */
2049 EXECUTE_IF_IN_NONNULL_BITMAP (graph->preds[n], 0, i, bi)
2050 {
2051 unsigned int w = si->node_mapping[i];
2052
2053 if (bitmap_bit_p (si->deleted, w))
2054 continue;
2055
2056 if (!bitmap_bit_p (si->visited, w))
2057 condense_visit (graph, si, w);
2058
2059 unsigned int t = si->node_mapping[w];
2060 gcc_checking_assert (si->node_mapping[n] == n);
2061 if (si->dfs[t] < si->dfs[n])
2062 si->dfs[n] = si->dfs[t];
2063 }
2064
2065 /* Visit all the implicit predecessors. */
2066 EXECUTE_IF_IN_NONNULL_BITMAP (graph->implicit_preds[n], 0, i, bi)
2067 {
2068 unsigned int w = si->node_mapping[i];
2069
2070 if (bitmap_bit_p (si->deleted, w))
2071 continue;
2072
2073 if (!bitmap_bit_p (si->visited, w))
2074 condense_visit (graph, si, w);
2075
2076 unsigned int t = si->node_mapping[w];
2077 gcc_assert (si->node_mapping[n] == n);
2078 if (si->dfs[t] < si->dfs[n])
2079 si->dfs[n] = si->dfs[t];
2080 }
2081
2082 /* See if any components have been identified. */
2083 if (si->dfs[n] == my_dfs)
2084 {
2085 if (si->scc_stack.length () != 0
2086 && si->dfs[si->scc_stack.last ()] >= my_dfs)
2087 {
2088 /* Find the first node of the SCC and do non-bitmap work. */
2089 bool direct_p = true;
2090 unsigned first = si->scc_stack.length ();
2091 do
2092 {
2093 --first;
2094 unsigned int w = si->scc_stack[first];
2095 si->node_mapping[w] = n;
2096 if (!bitmap_bit_p (graph->direct_nodes, w))
2097 direct_p = false;
2098 }
2099 while (first > 0
2100 && si->dfs[si->scc_stack[first - 1]] >= my_dfs);
2101 if (!direct_p)
2102 bitmap_clear_bit (graph->direct_nodes, n);
2103
2104 /* Want to reduce to node n, push that first. */
2105 si->scc_stack.reserve (1);
2106 si->scc_stack.quick_push (si->scc_stack[first]);
2107 si->scc_stack[first] = n;
2108
2109 unsigned scc_size = si->scc_stack.length () - first;
2110 unsigned split = scc_size / 2;
2111 unsigned carry = scc_size - split * 2;
2112 while (split > 0)
2113 {
2114 for (unsigned i = 0; i < split; ++i)
2115 {
2116 unsigned a = si->scc_stack[first + i];
2117 unsigned b = si->scc_stack[first + split + carry + i];
2118
2119 /* Unify our nodes. */
2120 if (graph->preds[b])
2121 {
2122 if (!graph->preds[a])
2123 std::swap (graph->preds[a], graph->preds[b]);
2124 else
2125 bitmap_ior_into_and_free (graph->preds[a],
2126 &graph->preds[b]);
2127 }
2128 if (graph->implicit_preds[b])
2129 {
2130 if (!graph->implicit_preds[a])
2131 std::swap (graph->implicit_preds[a],
2132 graph->implicit_preds[b]);
2133 else
2134 bitmap_ior_into_and_free (graph->implicit_preds[a],
2135 &graph->implicit_preds[b]);
2136 }
2137 if (graph->points_to[b])
2138 {
2139 if (!graph->points_to[a])
2140 std::swap (graph->points_to[a], graph->points_to[b]);
2141 else
2142 bitmap_ior_into_and_free (graph->points_to[a],
2143 &graph->points_to[b]);
2144 }
2145 }
2146 unsigned remain = split + carry;
2147 split = remain / 2;
2148 carry = remain - split * 2;
2149 }
2150 /* Actually pop the SCC. */
2151 si->scc_stack.truncate (first);
2152 }
2153 bitmap_set_bit (si->deleted, n);
2154 }
2155 else
2156 si->scc_stack.safe_push (n);
2157 }
2158
2159 /* Label pointer equivalences.
2160
2161 This performs a value numbering of the constraint graph to
2162 discover which variables will always have the same points-to sets
2163 under the current set of constraints.
2164
2165 The way it value numbers is to store the set of points-to bits
2166 generated by the constraints and graph edges. This is just used as a
2167 hash and equality comparison. The *actual set of points-to bits* is
2168 completely irrelevant, in that we don't care about being able to
2169 extract them later.
2170
2171 The equality values (currently bitmaps) just have to satisfy a few
2172 constraints, the main ones being:
2173 1. The combining operation must be order independent.
2174 2. The end result of a given set of operations must be unique iff the
2175 combination of input values is unique
2176 3. Hashable. */
2177
2178 static void
label_visit(constraint_graph_t graph,class scc_info * si,unsigned int n)2179 label_visit (constraint_graph_t graph, class scc_info *si, unsigned int n)
2180 {
2181 unsigned int i, first_pred;
2182 bitmap_iterator bi;
2183
2184 bitmap_set_bit (si->visited, n);
2185
2186 /* Label and union our incoming edges's points to sets. */
2187 first_pred = -1U;
2188 EXECUTE_IF_IN_NONNULL_BITMAP (graph->preds[n], 0, i, bi)
2189 {
2190 unsigned int w = si->node_mapping[i];
2191 if (!bitmap_bit_p (si->visited, w))
2192 label_visit (graph, si, w);
2193
2194 /* Skip unused edges */
2195 if (w == n || graph->pointer_label[w] == 0)
2196 continue;
2197
2198 if (graph->points_to[w])
2199 {
2200 if (!graph->points_to[n])
2201 {
2202 if (first_pred == -1U)
2203 first_pred = w;
2204 else
2205 {
2206 graph->points_to[n] = BITMAP_ALLOC (&predbitmap_obstack);
2207 bitmap_ior (graph->points_to[n],
2208 graph->points_to[first_pred],
2209 graph->points_to[w]);
2210 }
2211 }
2212 else
2213 bitmap_ior_into (graph->points_to[n], graph->points_to[w]);
2214 }
2215 }
2216
2217 /* Indirect nodes get fresh variables and a new pointer equiv class. */
2218 if (!bitmap_bit_p (graph->direct_nodes, n))
2219 {
2220 if (!graph->points_to[n])
2221 {
2222 graph->points_to[n] = BITMAP_ALLOC (&predbitmap_obstack);
2223 if (first_pred != -1U)
2224 bitmap_copy (graph->points_to[n], graph->points_to[first_pred]);
2225 }
2226 bitmap_set_bit (graph->points_to[n], FIRST_REF_NODE + n);
2227 graph->pointer_label[n] = pointer_equiv_class++;
2228 equiv_class_label_t ecl;
2229 ecl = equiv_class_lookup_or_add (pointer_equiv_class_table,
2230 graph->points_to[n]);
2231 ecl->equivalence_class = graph->pointer_label[n];
2232 return;
2233 }
2234
2235 /* If there was only a single non-empty predecessor the pointer equiv
2236 class is the same. */
2237 if (!graph->points_to[n])
2238 {
2239 if (first_pred != -1U)
2240 {
2241 graph->pointer_label[n] = graph->pointer_label[first_pred];
2242 graph->points_to[n] = graph->points_to[first_pred];
2243 }
2244 return;
2245 }
2246
2247 if (!bitmap_empty_p (graph->points_to[n]))
2248 {
2249 equiv_class_label_t ecl;
2250 ecl = equiv_class_lookup_or_add (pointer_equiv_class_table,
2251 graph->points_to[n]);
2252 if (ecl->equivalence_class == 0)
2253 ecl->equivalence_class = pointer_equiv_class++;
2254 else
2255 {
2256 BITMAP_FREE (graph->points_to[n]);
2257 graph->points_to[n] = ecl->labels;
2258 }
2259 graph->pointer_label[n] = ecl->equivalence_class;
2260 }
2261 }
2262
2263 /* Print the pred graph in dot format. */
2264
2265 static void
dump_pred_graph(class scc_info * si,FILE * file)2266 dump_pred_graph (class scc_info *si, FILE *file)
2267 {
2268 unsigned int i;
2269
2270 /* Only print the graph if it has already been initialized: */
2271 if (!graph)
2272 return;
2273
2274 /* Prints the header of the dot file: */
2275 fprintf (file, "strict digraph {\n");
2276 fprintf (file, " node [\n shape = box\n ]\n");
2277 fprintf (file, " edge [\n fontsize = \"12\"\n ]\n");
2278 fprintf (file, "\n // List of nodes and complex constraints in "
2279 "the constraint graph:\n");
2280
2281 /* The next lines print the nodes in the graph together with the
2282 complex constraints attached to them. */
2283 for (i = 1; i < graph->size; i++)
2284 {
2285 if (i == FIRST_REF_NODE)
2286 continue;
2287 if (si->node_mapping[i] != i)
2288 continue;
2289 if (i < FIRST_REF_NODE)
2290 fprintf (file, "\"%s\"", get_varinfo (i)->name);
2291 else
2292 fprintf (file, "\"*%s\"", get_varinfo (i - FIRST_REF_NODE)->name);
2293 if (graph->points_to[i]
2294 && !bitmap_empty_p (graph->points_to[i]))
2295 {
2296 if (i < FIRST_REF_NODE)
2297 fprintf (file, "[label=\"%s = {", get_varinfo (i)->name);
2298 else
2299 fprintf (file, "[label=\"*%s = {",
2300 get_varinfo (i - FIRST_REF_NODE)->name);
2301 unsigned j;
2302 bitmap_iterator bi;
2303 EXECUTE_IF_SET_IN_BITMAP (graph->points_to[i], 0, j, bi)
2304 fprintf (file, " %d", j);
2305 fprintf (file, " }\"]");
2306 }
2307 fprintf (file, ";\n");
2308 }
2309
2310 /* Go over the edges. */
2311 fprintf (file, "\n // Edges in the constraint graph:\n");
2312 for (i = 1; i < graph->size; i++)
2313 {
2314 unsigned j;
2315 bitmap_iterator bi;
2316 if (si->node_mapping[i] != i)
2317 continue;
2318 EXECUTE_IF_IN_NONNULL_BITMAP (graph->preds[i], 0, j, bi)
2319 {
2320 unsigned from = si->node_mapping[j];
2321 if (from < FIRST_REF_NODE)
2322 fprintf (file, "\"%s\"", get_varinfo (from)->name);
2323 else
2324 fprintf (file, "\"*%s\"", get_varinfo (from - FIRST_REF_NODE)->name);
2325 fprintf (file, " -> ");
2326 if (i < FIRST_REF_NODE)
2327 fprintf (file, "\"%s\"", get_varinfo (i)->name);
2328 else
2329 fprintf (file, "\"*%s\"", get_varinfo (i - FIRST_REF_NODE)->name);
2330 fprintf (file, ";\n");
2331 }
2332 }
2333
2334 /* Prints the tail of the dot file. */
2335 fprintf (file, "}\n");
2336 }
2337
2338 /* Perform offline variable substitution, discovering equivalence
2339 classes, and eliminating non-pointer variables. */
2340
2341 static class scc_info *
perform_var_substitution(constraint_graph_t graph)2342 perform_var_substitution (constraint_graph_t graph)
2343 {
2344 unsigned int i;
2345 unsigned int size = graph->size;
2346 scc_info *si = new scc_info (size);
2347
2348 bitmap_obstack_initialize (&iteration_obstack);
2349 gcc_obstack_init (&equiv_class_obstack);
2350 pointer_equiv_class_table = new hash_table<equiv_class_hasher> (511);
2351 location_equiv_class_table
2352 = new hash_table<equiv_class_hasher> (511);
2353 pointer_equiv_class = 1;
2354 location_equiv_class = 1;
2355
2356 /* Condense the nodes, which means to find SCC's, count incoming
2357 predecessors, and unite nodes in SCC's. */
2358 for (i = 1; i < FIRST_REF_NODE; i++)
2359 if (!bitmap_bit_p (si->visited, si->node_mapping[i]))
2360 condense_visit (graph, si, si->node_mapping[i]);
2361
2362 if (dump_file && (dump_flags & TDF_GRAPH))
2363 {
2364 fprintf (dump_file, "\n\n// The constraint graph before var-substitution "
2365 "in dot format:\n");
2366 dump_pred_graph (si, dump_file);
2367 fprintf (dump_file, "\n\n");
2368 }
2369
2370 bitmap_clear (si->visited);
2371 /* Actually the label the nodes for pointer equivalences */
2372 for (i = 1; i < FIRST_REF_NODE; i++)
2373 if (!bitmap_bit_p (si->visited, si->node_mapping[i]))
2374 label_visit (graph, si, si->node_mapping[i]);
2375
2376 /* Calculate location equivalence labels. */
2377 for (i = 1; i < FIRST_REF_NODE; i++)
2378 {
2379 bitmap pointed_by;
2380 bitmap_iterator bi;
2381 unsigned int j;
2382
2383 if (!graph->pointed_by[i])
2384 continue;
2385 pointed_by = BITMAP_ALLOC (&iteration_obstack);
2386
2387 /* Translate the pointed-by mapping for pointer equivalence
2388 labels. */
2389 EXECUTE_IF_SET_IN_BITMAP (graph->pointed_by[i], 0, j, bi)
2390 {
2391 bitmap_set_bit (pointed_by,
2392 graph->pointer_label[si->node_mapping[j]]);
2393 }
2394 /* The original pointed_by is now dead. */
2395 BITMAP_FREE (graph->pointed_by[i]);
2396
2397 /* Look up the location equivalence label if one exists, or make
2398 one otherwise. */
2399 equiv_class_label_t ecl;
2400 ecl = equiv_class_lookup_or_add (location_equiv_class_table, pointed_by);
2401 if (ecl->equivalence_class == 0)
2402 ecl->equivalence_class = location_equiv_class++;
2403 else
2404 {
2405 if (dump_file && (dump_flags & TDF_DETAILS))
2406 fprintf (dump_file, "Found location equivalence for node %s\n",
2407 get_varinfo (i)->name);
2408 BITMAP_FREE (pointed_by);
2409 }
2410 graph->loc_label[i] = ecl->equivalence_class;
2411
2412 }
2413
2414 if (dump_file && (dump_flags & TDF_DETAILS))
2415 for (i = 1; i < FIRST_REF_NODE; i++)
2416 {
2417 unsigned j = si->node_mapping[i];
2418 if (j != i)
2419 {
2420 fprintf (dump_file, "%s node id %d ",
2421 bitmap_bit_p (graph->direct_nodes, i)
2422 ? "Direct" : "Indirect", i);
2423 if (i < FIRST_REF_NODE)
2424 fprintf (dump_file, "\"%s\"", get_varinfo (i)->name);
2425 else
2426 fprintf (dump_file, "\"*%s\"",
2427 get_varinfo (i - FIRST_REF_NODE)->name);
2428 fprintf (dump_file, " mapped to SCC leader node id %d ", j);
2429 if (j < FIRST_REF_NODE)
2430 fprintf (dump_file, "\"%s\"\n", get_varinfo (j)->name);
2431 else
2432 fprintf (dump_file, "\"*%s\"\n",
2433 get_varinfo (j - FIRST_REF_NODE)->name);
2434 }
2435 else
2436 {
2437 fprintf (dump_file,
2438 "Equivalence classes for %s node id %d ",
2439 bitmap_bit_p (graph->direct_nodes, i)
2440 ? "direct" : "indirect", i);
2441 if (i < FIRST_REF_NODE)
2442 fprintf (dump_file, "\"%s\"", get_varinfo (i)->name);
2443 else
2444 fprintf (dump_file, "\"*%s\"",
2445 get_varinfo (i - FIRST_REF_NODE)->name);
2446 fprintf (dump_file,
2447 ": pointer %d, location %d\n",
2448 graph->pointer_label[i], graph->loc_label[i]);
2449 }
2450 }
2451
2452 /* Quickly eliminate our non-pointer variables. */
2453
2454 for (i = 1; i < FIRST_REF_NODE; i++)
2455 {
2456 unsigned int node = si->node_mapping[i];
2457
2458 if (graph->pointer_label[node] == 0)
2459 {
2460 if (dump_file && (dump_flags & TDF_DETAILS))
2461 fprintf (dump_file,
2462 "%s is a non-pointer variable, eliminating edges.\n",
2463 get_varinfo (node)->name);
2464 stats.nonpointer_vars++;
2465 clear_edges_for_node (graph, node);
2466 }
2467 }
2468
2469 return si;
2470 }
2471
2472 /* Free information that was only necessary for variable
2473 substitution. */
2474
2475 static void
free_var_substitution_info(class scc_info * si)2476 free_var_substitution_info (class scc_info *si)
2477 {
2478 delete si;
2479 free (graph->pointer_label);
2480 free (graph->loc_label);
2481 free (graph->pointed_by);
2482 free (graph->points_to);
2483 free (graph->eq_rep);
2484 sbitmap_free (graph->direct_nodes);
2485 delete pointer_equiv_class_table;
2486 pointer_equiv_class_table = NULL;
2487 delete location_equiv_class_table;
2488 location_equiv_class_table = NULL;
2489 obstack_free (&equiv_class_obstack, NULL);
2490 bitmap_obstack_release (&iteration_obstack);
2491 }
2492
2493 /* Return an existing node that is equivalent to NODE, which has
2494 equivalence class LABEL, if one exists. Return NODE otherwise. */
2495
2496 static unsigned int
find_equivalent_node(constraint_graph_t graph,unsigned int node,unsigned int label)2497 find_equivalent_node (constraint_graph_t graph,
2498 unsigned int node, unsigned int label)
2499 {
2500 /* If the address version of this variable is unused, we can
2501 substitute it for anything else with the same label.
2502 Otherwise, we know the pointers are equivalent, but not the
2503 locations, and we can unite them later. */
2504
2505 if (!bitmap_bit_p (graph->address_taken, node))
2506 {
2507 gcc_checking_assert (label < graph->size);
2508
2509 if (graph->eq_rep[label] != -1)
2510 {
2511 /* Unify the two variables since we know they are equivalent. */
2512 if (unite (graph->eq_rep[label], node))
2513 unify_nodes (graph, graph->eq_rep[label], node, false);
2514 return graph->eq_rep[label];
2515 }
2516 else
2517 {
2518 graph->eq_rep[label] = node;
2519 graph->pe_rep[label] = node;
2520 }
2521 }
2522 else
2523 {
2524 gcc_checking_assert (label < graph->size);
2525 graph->pe[node] = label;
2526 if (graph->pe_rep[label] == -1)
2527 graph->pe_rep[label] = node;
2528 }
2529
2530 return node;
2531 }
2532
2533 /* Unite pointer equivalent but not location equivalent nodes in
2534 GRAPH. This may only be performed once variable substitution is
2535 finished. */
2536
2537 static void
unite_pointer_equivalences(constraint_graph_t graph)2538 unite_pointer_equivalences (constraint_graph_t graph)
2539 {
2540 unsigned int i;
2541
2542 /* Go through the pointer equivalences and unite them to their
2543 representative, if they aren't already. */
2544 for (i = 1; i < FIRST_REF_NODE; i++)
2545 {
2546 unsigned int label = graph->pe[i];
2547 if (label)
2548 {
2549 int label_rep = graph->pe_rep[label];
2550
2551 if (label_rep == -1)
2552 continue;
2553
2554 label_rep = find (label_rep);
2555 if (label_rep >= 0 && unite (label_rep, find (i)))
2556 unify_nodes (graph, label_rep, i, false);
2557 }
2558 }
2559 }
2560
2561 /* Move complex constraints to the GRAPH nodes they belong to. */
2562
2563 static void
move_complex_constraints(constraint_graph_t graph)2564 move_complex_constraints (constraint_graph_t graph)
2565 {
2566 int i;
2567 constraint_t c;
2568
2569 FOR_EACH_VEC_ELT (constraints, i, c)
2570 {
2571 if (c)
2572 {
2573 struct constraint_expr lhs = c->lhs;
2574 struct constraint_expr rhs = c->rhs;
2575
2576 if (lhs.type == DEREF)
2577 {
2578 insert_into_complex (graph, lhs.var, c);
2579 }
2580 else if (rhs.type == DEREF)
2581 {
2582 if (!(get_varinfo (lhs.var)->is_special_var))
2583 insert_into_complex (graph, rhs.var, c);
2584 }
2585 else if (rhs.type != ADDRESSOF && lhs.var > anything_id
2586 && (lhs.offset != 0 || rhs.offset != 0))
2587 {
2588 insert_into_complex (graph, rhs.var, c);
2589 }
2590 }
2591 }
2592 }
2593
2594
2595 /* Optimize and rewrite complex constraints while performing
2596 collapsing of equivalent nodes. SI is the SCC_INFO that is the
2597 result of perform_variable_substitution. */
2598
2599 static void
rewrite_constraints(constraint_graph_t graph,class scc_info * si)2600 rewrite_constraints (constraint_graph_t graph,
2601 class scc_info *si)
2602 {
2603 int i;
2604 constraint_t c;
2605
2606 if (flag_checking)
2607 {
2608 for (unsigned int j = 0; j < graph->size; j++)
2609 gcc_assert (find (j) == j);
2610 }
2611
2612 FOR_EACH_VEC_ELT (constraints, i, c)
2613 {
2614 struct constraint_expr lhs = c->lhs;
2615 struct constraint_expr rhs = c->rhs;
2616 unsigned int lhsvar = find (lhs.var);
2617 unsigned int rhsvar = find (rhs.var);
2618 unsigned int lhsnode, rhsnode;
2619 unsigned int lhslabel, rhslabel;
2620
2621 lhsnode = si->node_mapping[lhsvar];
2622 rhsnode = si->node_mapping[rhsvar];
2623 lhslabel = graph->pointer_label[lhsnode];
2624 rhslabel = graph->pointer_label[rhsnode];
2625
2626 /* See if it is really a non-pointer variable, and if so, ignore
2627 the constraint. */
2628 if (lhslabel == 0)
2629 {
2630 if (dump_file && (dump_flags & TDF_DETAILS))
2631 {
2632
2633 fprintf (dump_file, "%s is a non-pointer variable, "
2634 "ignoring constraint:",
2635 get_varinfo (lhs.var)->name);
2636 dump_constraint (dump_file, c);
2637 fprintf (dump_file, "\n");
2638 }
2639 constraints[i] = NULL;
2640 continue;
2641 }
2642
2643 if (rhslabel == 0)
2644 {
2645 if (dump_file && (dump_flags & TDF_DETAILS))
2646 {
2647
2648 fprintf (dump_file, "%s is a non-pointer variable, "
2649 "ignoring constraint:",
2650 get_varinfo (rhs.var)->name);
2651 dump_constraint (dump_file, c);
2652 fprintf (dump_file, "\n");
2653 }
2654 constraints[i] = NULL;
2655 continue;
2656 }
2657
2658 lhsvar = find_equivalent_node (graph, lhsvar, lhslabel);
2659 rhsvar = find_equivalent_node (graph, rhsvar, rhslabel);
2660 c->lhs.var = lhsvar;
2661 c->rhs.var = rhsvar;
2662 }
2663 }
2664
2665 /* Eliminate indirect cycles involving NODE. Return true if NODE was
2666 part of an SCC, false otherwise. */
2667
2668 static bool
eliminate_indirect_cycles(unsigned int node)2669 eliminate_indirect_cycles (unsigned int node)
2670 {
2671 if (graph->indirect_cycles[node] != -1
2672 && !bitmap_empty_p (get_varinfo (node)->solution))
2673 {
2674 unsigned int i;
2675 auto_vec<unsigned> queue;
2676 int queuepos;
2677 unsigned int to = find (graph->indirect_cycles[node]);
2678 bitmap_iterator bi;
2679
2680 /* We can't touch the solution set and call unify_nodes
2681 at the same time, because unify_nodes is going to do
2682 bitmap unions into it. */
2683
2684 EXECUTE_IF_SET_IN_BITMAP (get_varinfo (node)->solution, 0, i, bi)
2685 {
2686 if (find (i) == i && i != to)
2687 {
2688 if (unite (to, i))
2689 queue.safe_push (i);
2690 }
2691 }
2692
2693 for (queuepos = 0;
2694 queue.iterate (queuepos, &i);
2695 queuepos++)
2696 {
2697 unify_nodes (graph, to, i, true);
2698 }
2699 return true;
2700 }
2701 return false;
2702 }
2703
2704 /* Solve the constraint graph GRAPH using our worklist solver.
2705 This is based on the PW* family of solvers from the "Efficient Field
2706 Sensitive Pointer Analysis for C" paper.
2707 It works by iterating over all the graph nodes, processing the complex
2708 constraints and propagating the copy constraints, until everything stops
2709 changed. This corresponds to steps 6-8 in the solving list given above. */
2710
2711 static void
solve_graph(constraint_graph_t graph)2712 solve_graph (constraint_graph_t graph)
2713 {
2714 unsigned int size = graph->size;
2715 unsigned int i;
2716 bitmap pts;
2717
2718 changed = BITMAP_ALLOC (NULL);
2719
2720 /* Mark all initial non-collapsed nodes as changed. */
2721 for (i = 1; i < size; i++)
2722 {
2723 varinfo_t ivi = get_varinfo (i);
2724 if (find (i) == i && !bitmap_empty_p (ivi->solution)
2725 && ((graph->succs[i] && !bitmap_empty_p (graph->succs[i]))
2726 || graph->complex[i].length () > 0))
2727 bitmap_set_bit (changed, i);
2728 }
2729
2730 /* Allocate a bitmap to be used to store the changed bits. */
2731 pts = BITMAP_ALLOC (&pta_obstack);
2732
2733 while (!bitmap_empty_p (changed))
2734 {
2735 unsigned int i;
2736 struct topo_info *ti = init_topo_info ();
2737 stats.iterations++;
2738
2739 bitmap_obstack_initialize (&iteration_obstack);
2740
2741 compute_topo_order (graph, ti);
2742
2743 while (ti->topo_order.length () != 0)
2744 {
2745
2746 i = ti->topo_order.pop ();
2747
2748 /* If this variable is not a representative, skip it. */
2749 if (find (i) != i)
2750 continue;
2751
2752 /* In certain indirect cycle cases, we may merge this
2753 variable to another. */
2754 if (eliminate_indirect_cycles (i) && find (i) != i)
2755 continue;
2756
2757 /* If the node has changed, we need to process the
2758 complex constraints and outgoing edges again. */
2759 if (bitmap_clear_bit (changed, i))
2760 {
2761 unsigned int j;
2762 constraint_t c;
2763 bitmap solution;
2764 vec<constraint_t> complex = graph->complex[i];
2765 varinfo_t vi = get_varinfo (i);
2766 bool solution_empty;
2767
2768 /* Compute the changed set of solution bits. If anything
2769 is in the solution just propagate that. */
2770 if (bitmap_bit_p (vi->solution, anything_id))
2771 {
2772 /* If anything is also in the old solution there is
2773 nothing to do.
2774 ??? But we shouldn't ended up with "changed" set ... */
2775 if (vi->oldsolution
2776 && bitmap_bit_p (vi->oldsolution, anything_id))
2777 continue;
2778 bitmap_copy (pts, get_varinfo (find (anything_id))->solution);
2779 }
2780 else if (vi->oldsolution)
2781 bitmap_and_compl (pts, vi->solution, vi->oldsolution);
2782 else
2783 bitmap_copy (pts, vi->solution);
2784
2785 if (bitmap_empty_p (pts))
2786 continue;
2787
2788 if (vi->oldsolution)
2789 bitmap_ior_into (vi->oldsolution, pts);
2790 else
2791 {
2792 vi->oldsolution = BITMAP_ALLOC (&oldpta_obstack);
2793 bitmap_copy (vi->oldsolution, pts);
2794 }
2795
2796 solution = vi->solution;
2797 solution_empty = bitmap_empty_p (solution);
2798
2799 /* Process the complex constraints */
2800 bitmap expanded_pts = NULL;
2801 FOR_EACH_VEC_ELT (complex, j, c)
2802 {
2803 /* XXX: This is going to unsort the constraints in
2804 some cases, which will occasionally add duplicate
2805 constraints during unification. This does not
2806 affect correctness. */
2807 c->lhs.var = find (c->lhs.var);
2808 c->rhs.var = find (c->rhs.var);
2809
2810 /* The only complex constraint that can change our
2811 solution to non-empty, given an empty solution,
2812 is a constraint where the lhs side is receiving
2813 some set from elsewhere. */
2814 if (!solution_empty || c->lhs.type != DEREF)
2815 do_complex_constraint (graph, c, pts, &expanded_pts);
2816 }
2817 BITMAP_FREE (expanded_pts);
2818
2819 solution_empty = bitmap_empty_p (solution);
2820
2821 if (!solution_empty)
2822 {
2823 bitmap_iterator bi;
2824 unsigned eff_escaped_id = find (escaped_id);
2825
2826 /* Propagate solution to all successors. */
2827 unsigned to_remove = ~0U;
2828 EXECUTE_IF_IN_NONNULL_BITMAP (graph->succs[i],
2829 0, j, bi)
2830 {
2831 if (to_remove != ~0U)
2832 {
2833 bitmap_clear_bit (graph->succs[i], to_remove);
2834 to_remove = ~0U;
2835 }
2836 unsigned int to = find (j);
2837 if (to != j)
2838 {
2839 /* Update the succ graph, avoiding duplicate
2840 work. */
2841 to_remove = j;
2842 if (! bitmap_set_bit (graph->succs[i], to))
2843 continue;
2844 /* We eventually end up processing 'to' twice
2845 as it is undefined whether bitmap iteration
2846 iterates over bits set during iteration.
2847 Play safe instead of doing tricks. */
2848 }
2849 /* Don't try to propagate to ourselves. */
2850 if (to == i)
2851 continue;
2852
2853 bitmap tmp = get_varinfo (to)->solution;
2854 bool flag = false;
2855
2856 /* If we propagate from ESCAPED use ESCAPED as
2857 placeholder. */
2858 if (i == eff_escaped_id)
2859 flag = bitmap_set_bit (tmp, escaped_id);
2860 else
2861 flag = bitmap_ior_into (tmp, pts);
2862
2863 if (flag)
2864 bitmap_set_bit (changed, to);
2865 }
2866 if (to_remove != ~0U)
2867 bitmap_clear_bit (graph->succs[i], to_remove);
2868 }
2869 }
2870 }
2871 free_topo_info (ti);
2872 bitmap_obstack_release (&iteration_obstack);
2873 }
2874
2875 BITMAP_FREE (pts);
2876 BITMAP_FREE (changed);
2877 bitmap_obstack_release (&oldpta_obstack);
2878 }
2879
2880 /* Map from trees to variable infos. */
2881 static hash_map<tree, varinfo_t> *vi_for_tree;
2882
2883
2884 /* Insert ID as the variable id for tree T in the vi_for_tree map. */
2885
2886 static void
insert_vi_for_tree(tree t,varinfo_t vi)2887 insert_vi_for_tree (tree t, varinfo_t vi)
2888 {
2889 gcc_assert (vi);
2890 gcc_assert (!vi_for_tree->put (t, vi));
2891 }
2892
2893 /* Find the variable info for tree T in VI_FOR_TREE. If T does not
2894 exist in the map, return NULL, otherwise, return the varinfo we found. */
2895
2896 static varinfo_t
lookup_vi_for_tree(tree t)2897 lookup_vi_for_tree (tree t)
2898 {
2899 varinfo_t *slot = vi_for_tree->get (t);
2900 if (slot == NULL)
2901 return NULL;
2902
2903 return *slot;
2904 }
2905
2906 /* Return a printable name for DECL */
2907
2908 static const char *
alias_get_name(tree decl)2909 alias_get_name (tree decl)
2910 {
2911 const char *res = "NULL";
2912 if (dump_file)
2913 {
2914 char *temp = NULL;
2915 if (TREE_CODE (decl) == SSA_NAME)
2916 {
2917 res = get_name (decl);
2918 temp = xasprintf ("%s_%u", res ? res : "", SSA_NAME_VERSION (decl));
2919 }
2920 else if (HAS_DECL_ASSEMBLER_NAME_P (decl)
2921 && DECL_ASSEMBLER_NAME_SET_P (decl))
2922 res = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME_RAW (decl));
2923 else if (DECL_P (decl))
2924 {
2925 res = get_name (decl);
2926 if (!res)
2927 temp = xasprintf ("D.%u", DECL_UID (decl));
2928 }
2929
2930 if (temp)
2931 {
2932 res = ggc_strdup (temp);
2933 free (temp);
2934 }
2935 }
2936
2937 return res;
2938 }
2939
2940 /* Find the variable id for tree T in the map.
2941 If T doesn't exist in the map, create an entry for it and return it. */
2942
2943 static varinfo_t
get_vi_for_tree(tree t)2944 get_vi_for_tree (tree t)
2945 {
2946 varinfo_t *slot = vi_for_tree->get (t);
2947 if (slot == NULL)
2948 {
2949 unsigned int id = create_variable_info_for (t, alias_get_name (t), false);
2950 return get_varinfo (id);
2951 }
2952
2953 return *slot;
2954 }
2955
2956 /* Get a scalar constraint expression for a new temporary variable. */
2957
2958 static struct constraint_expr
new_scalar_tmp_constraint_exp(const char * name,bool add_id)2959 new_scalar_tmp_constraint_exp (const char *name, bool add_id)
2960 {
2961 struct constraint_expr tmp;
2962 varinfo_t vi;
2963
2964 vi = new_var_info (NULL_TREE, name, add_id);
2965 vi->offset = 0;
2966 vi->size = -1;
2967 vi->fullsize = -1;
2968 vi->is_full_var = 1;
2969 vi->is_reg_var = 1;
2970
2971 tmp.var = vi->id;
2972 tmp.type = SCALAR;
2973 tmp.offset = 0;
2974
2975 return tmp;
2976 }
2977
2978 /* Get a constraint expression vector from an SSA_VAR_P node.
2979 If address_p is true, the result will be taken its address of. */
2980
2981 static void
get_constraint_for_ssa_var(tree t,vec<ce_s> * results,bool address_p)2982 get_constraint_for_ssa_var (tree t, vec<ce_s> *results, bool address_p)
2983 {
2984 struct constraint_expr cexpr;
2985 varinfo_t vi;
2986
2987 /* We allow FUNCTION_DECLs here even though it doesn't make much sense. */
2988 gcc_assert (TREE_CODE (t) == SSA_NAME || DECL_P (t));
2989
2990 if (TREE_CODE (t) == SSA_NAME
2991 && SSA_NAME_IS_DEFAULT_DEF (t))
2992 {
2993 /* For parameters, get at the points-to set for the actual parm
2994 decl. */
2995 if (TREE_CODE (SSA_NAME_VAR (t)) == PARM_DECL
2996 || TREE_CODE (SSA_NAME_VAR (t)) == RESULT_DECL)
2997 {
2998 get_constraint_for_ssa_var (SSA_NAME_VAR (t), results, address_p);
2999 return;
3000 }
3001 /* For undefined SSA names return nothing. */
3002 else if (!ssa_defined_default_def_p (t))
3003 {
3004 cexpr.var = nothing_id;
3005 cexpr.type = SCALAR;
3006 cexpr.offset = 0;
3007 results->safe_push (cexpr);
3008 return;
3009 }
3010 }
3011
3012 /* For global variables resort to the alias target. */
3013 if (VAR_P (t) && (TREE_STATIC (t) || DECL_EXTERNAL (t)))
3014 {
3015 varpool_node *node = varpool_node::get (t);
3016 if (node && node->alias && node->analyzed)
3017 {
3018 node = node->ultimate_alias_target ();
3019 /* Canonicalize the PT uid of all aliases to the ultimate target.
3020 ??? Hopefully the set of aliases can't change in a way that
3021 changes the ultimate alias target. */
3022 gcc_assert ((! DECL_PT_UID_SET_P (node->decl)
3023 || DECL_PT_UID (node->decl) == DECL_UID (node->decl))
3024 && (! DECL_PT_UID_SET_P (t)
3025 || DECL_PT_UID (t) == DECL_UID (node->decl)));
3026 DECL_PT_UID (t) = DECL_UID (node->decl);
3027 t = node->decl;
3028 }
3029
3030 /* If this is decl may bind to NULL note that. */
3031 if (address_p
3032 && (! node || ! node->nonzero_address ()))
3033 {
3034 cexpr.var = nothing_id;
3035 cexpr.type = SCALAR;
3036 cexpr.offset = 0;
3037 results->safe_push (cexpr);
3038 }
3039 }
3040
3041 vi = get_vi_for_tree (t);
3042 cexpr.var = vi->id;
3043 cexpr.type = SCALAR;
3044 cexpr.offset = 0;
3045
3046 /* If we are not taking the address of the constraint expr, add all
3047 sub-fiels of the variable as well. */
3048 if (!address_p
3049 && !vi->is_full_var)
3050 {
3051 for (; vi; vi = vi_next (vi))
3052 {
3053 cexpr.var = vi->id;
3054 results->safe_push (cexpr);
3055 }
3056 return;
3057 }
3058
3059 results->safe_push (cexpr);
3060 }
3061
3062 /* Process constraint T, performing various simplifications and then
3063 adding it to our list of overall constraints. */
3064
3065 static void
process_constraint(constraint_t t)3066 process_constraint (constraint_t t)
3067 {
3068 struct constraint_expr rhs = t->rhs;
3069 struct constraint_expr lhs = t->lhs;
3070
3071 gcc_assert (rhs.var < varmap.length ());
3072 gcc_assert (lhs.var < varmap.length ());
3073
3074 /* If we didn't get any useful constraint from the lhs we get
3075 &ANYTHING as fallback from get_constraint_for. Deal with
3076 it here by turning it into *ANYTHING. */
3077 if (lhs.type == ADDRESSOF
3078 && lhs.var == anything_id)
3079 lhs.type = DEREF;
3080
3081 /* ADDRESSOF on the lhs is invalid. */
3082 gcc_assert (lhs.type != ADDRESSOF);
3083
3084 /* We shouldn't add constraints from things that cannot have pointers.
3085 It's not completely trivial to avoid in the callers, so do it here. */
3086 if (rhs.type != ADDRESSOF
3087 && !get_varinfo (rhs.var)->may_have_pointers)
3088 return;
3089
3090 /* Likewise adding to the solution of a non-pointer var isn't useful. */
3091 if (!get_varinfo (lhs.var)->may_have_pointers)
3092 return;
3093
3094 /* This can happen in our IR with things like n->a = *p */
3095 if (rhs.type == DEREF && lhs.type == DEREF && rhs.var != anything_id)
3096 {
3097 /* Split into tmp = *rhs, *lhs = tmp */
3098 struct constraint_expr tmplhs;
3099 tmplhs = new_scalar_tmp_constraint_exp ("doubledereftmp", true);
3100 process_constraint (new_constraint (tmplhs, rhs));
3101 process_constraint (new_constraint (lhs, tmplhs));
3102 }
3103 else if ((rhs.type != SCALAR || rhs.offset != 0) && lhs.type == DEREF)
3104 {
3105 /* Split into tmp = &rhs, *lhs = tmp */
3106 struct constraint_expr tmplhs;
3107 tmplhs = new_scalar_tmp_constraint_exp ("derefaddrtmp", true);
3108 process_constraint (new_constraint (tmplhs, rhs));
3109 process_constraint (new_constraint (lhs, tmplhs));
3110 }
3111 else
3112 {
3113 gcc_assert (rhs.type != ADDRESSOF || rhs.offset == 0);
3114 if (rhs.type == ADDRESSOF)
3115 get_varinfo (get_varinfo (rhs.var)->head)->address_taken = true;
3116 constraints.safe_push (t);
3117 }
3118 }
3119
3120
3121 /* Return the position, in bits, of FIELD_DECL from the beginning of its
3122 structure. */
3123
3124 static HOST_WIDE_INT
bitpos_of_field(const tree fdecl)3125 bitpos_of_field (const tree fdecl)
3126 {
3127 if (!tree_fits_shwi_p (DECL_FIELD_OFFSET (fdecl))
3128 || !tree_fits_shwi_p (DECL_FIELD_BIT_OFFSET (fdecl)))
3129 return -1;
3130
3131 return (tree_to_shwi (DECL_FIELD_OFFSET (fdecl)) * BITS_PER_UNIT
3132 + tree_to_shwi (DECL_FIELD_BIT_OFFSET (fdecl)));
3133 }
3134
3135
3136 /* Get constraint expressions for offsetting PTR by OFFSET. Stores the
3137 resulting constraint expressions in *RESULTS. */
3138
3139 static void
get_constraint_for_ptr_offset(tree ptr,tree offset,vec<ce_s> * results)3140 get_constraint_for_ptr_offset (tree ptr, tree offset,
3141 vec<ce_s> *results)
3142 {
3143 struct constraint_expr c;
3144 unsigned int j, n;
3145 HOST_WIDE_INT rhsoffset;
3146
3147 /* If we do not do field-sensitive PTA adding offsets to pointers
3148 does not change the points-to solution. */
3149 if (!use_field_sensitive)
3150 {
3151 get_constraint_for_rhs (ptr, results);
3152 return;
3153 }
3154
3155 /* If the offset is not a non-negative integer constant that fits
3156 in a HOST_WIDE_INT, we have to fall back to a conservative
3157 solution which includes all sub-fields of all pointed-to
3158 variables of ptr. */
3159 if (offset == NULL_TREE
3160 || TREE_CODE (offset) != INTEGER_CST)
3161 rhsoffset = UNKNOWN_OFFSET;
3162 else
3163 {
3164 /* Sign-extend the offset. */
3165 offset_int soffset = offset_int::from (wi::to_wide (offset), SIGNED);
3166 if (!wi::fits_shwi_p (soffset))
3167 rhsoffset = UNKNOWN_OFFSET;
3168 else
3169 {
3170 /* Make sure the bit-offset also fits. */
3171 HOST_WIDE_INT rhsunitoffset = soffset.to_shwi ();
3172 rhsoffset = rhsunitoffset * (unsigned HOST_WIDE_INT) BITS_PER_UNIT;
3173 if (rhsunitoffset != rhsoffset / BITS_PER_UNIT)
3174 rhsoffset = UNKNOWN_OFFSET;
3175 }
3176 }
3177
3178 get_constraint_for_rhs (ptr, results);
3179 if (rhsoffset == 0)
3180 return;
3181
3182 /* As we are eventually appending to the solution do not use
3183 vec::iterate here. */
3184 n = results->length ();
3185 for (j = 0; j < n; j++)
3186 {
3187 varinfo_t curr;
3188 c = (*results)[j];
3189 curr = get_varinfo (c.var);
3190
3191 if (c.type == ADDRESSOF
3192 /* If this varinfo represents a full variable just use it. */
3193 && curr->is_full_var)
3194 ;
3195 else if (c.type == ADDRESSOF
3196 /* If we do not know the offset add all subfields. */
3197 && rhsoffset == UNKNOWN_OFFSET)
3198 {
3199 varinfo_t temp = get_varinfo (curr->head);
3200 do
3201 {
3202 struct constraint_expr c2;
3203 c2.var = temp->id;
3204 c2.type = ADDRESSOF;
3205 c2.offset = 0;
3206 if (c2.var != c.var)
3207 results->safe_push (c2);
3208 temp = vi_next (temp);
3209 }
3210 while (temp);
3211 }
3212 else if (c.type == ADDRESSOF)
3213 {
3214 varinfo_t temp;
3215 unsigned HOST_WIDE_INT offset = curr->offset + rhsoffset;
3216
3217 /* If curr->offset + rhsoffset is less than zero adjust it. */
3218 if (rhsoffset < 0
3219 && curr->offset < offset)
3220 offset = 0;
3221
3222 /* We have to include all fields that overlap the current
3223 field shifted by rhsoffset. And we include at least
3224 the last or the first field of the variable to represent
3225 reachability of off-bound addresses, in particular &object + 1,
3226 conservatively correct. */
3227 temp = first_or_preceding_vi_for_offset (curr, offset);
3228 c.var = temp->id;
3229 c.offset = 0;
3230 temp = vi_next (temp);
3231 while (temp
3232 && temp->offset < offset + curr->size)
3233 {
3234 struct constraint_expr c2;
3235 c2.var = temp->id;
3236 c2.type = ADDRESSOF;
3237 c2.offset = 0;
3238 results->safe_push (c2);
3239 temp = vi_next (temp);
3240 }
3241 }
3242 else if (c.type == SCALAR)
3243 {
3244 gcc_assert (c.offset == 0);
3245 c.offset = rhsoffset;
3246 }
3247 else
3248 /* We shouldn't get any DEREFs here. */
3249 gcc_unreachable ();
3250
3251 (*results)[j] = c;
3252 }
3253 }
3254
3255
3256 /* Given a COMPONENT_REF T, return the constraint_expr vector for it.
3257 If address_p is true the result will be taken its address of.
3258 If lhs_p is true then the constraint expression is assumed to be used
3259 as the lhs. */
3260
3261 static void
get_constraint_for_component_ref(tree t,vec<ce_s> * results,bool address_p,bool lhs_p)3262 get_constraint_for_component_ref (tree t, vec<ce_s> *results,
3263 bool address_p, bool lhs_p)
3264 {
3265 tree orig_t = t;
3266 poly_int64 bitsize = -1;
3267 poly_int64 bitmaxsize = -1;
3268 poly_int64 bitpos;
3269 bool reverse;
3270 tree forzero;
3271
3272 /* Some people like to do cute things like take the address of
3273 &0->a.b */
3274 forzero = t;
3275 while (handled_component_p (forzero)
3276 || INDIRECT_REF_P (forzero)
3277 || TREE_CODE (forzero) == MEM_REF)
3278 forzero = TREE_OPERAND (forzero, 0);
3279
3280 if (CONSTANT_CLASS_P (forzero) && integer_zerop (forzero))
3281 {
3282 struct constraint_expr temp;
3283
3284 temp.offset = 0;
3285 temp.var = integer_id;
3286 temp.type = SCALAR;
3287 results->safe_push (temp);
3288 return;
3289 }
3290
3291 t = get_ref_base_and_extent (t, &bitpos, &bitsize, &bitmaxsize, &reverse);
3292
3293 /* We can end up here for component references on a
3294 VIEW_CONVERT_EXPR <>(&foobar) or things like a
3295 BIT_FIELD_REF <&MEM[(void *)&b + 4B], ...>. So for
3296 symbolic constants simply give up. */
3297 if (TREE_CODE (t) == ADDR_EXPR)
3298 {
3299 constraint_expr result;
3300 result.type = SCALAR;
3301 result.var = anything_id;
3302 result.offset = 0;
3303 results->safe_push (result);
3304 return;
3305 }
3306
3307 /* Avoid creating pointer-offset constraints, so handle MEM_REF
3308 offsets directly. Pretend to take the address of the base,
3309 we'll take care of adding the required subset of sub-fields below. */
3310 if (TREE_CODE (t) == MEM_REF
3311 && !integer_zerop (TREE_OPERAND (t, 0)))
3312 {
3313 poly_offset_int off = mem_ref_offset (t);
3314 off <<= LOG2_BITS_PER_UNIT;
3315 off += bitpos;
3316 poly_int64 off_hwi;
3317 if (off.to_shwi (&off_hwi))
3318 bitpos = off_hwi;
3319 else
3320 {
3321 bitpos = 0;
3322 bitmaxsize = -1;
3323 }
3324 get_constraint_for_1 (TREE_OPERAND (t, 0), results, false, lhs_p);
3325 do_deref (results);
3326 }
3327 else
3328 get_constraint_for_1 (t, results, true, lhs_p);
3329
3330 /* Strip off nothing_id. */
3331 if (results->length () == 2)
3332 {
3333 gcc_assert ((*results)[0].var == nothing_id);
3334 results->unordered_remove (0);
3335 }
3336 gcc_assert (results->length () == 1);
3337 struct constraint_expr &result = results->last ();
3338
3339 if (result.type == SCALAR
3340 && get_varinfo (result.var)->is_full_var)
3341 /* For single-field vars do not bother about the offset. */
3342 result.offset = 0;
3343 else if (result.type == SCALAR)
3344 {
3345 /* In languages like C, you can access one past the end of an
3346 array. You aren't allowed to dereference it, so we can
3347 ignore this constraint. When we handle pointer subtraction,
3348 we may have to do something cute here. */
3349
3350 if (maybe_lt (poly_uint64 (bitpos), get_varinfo (result.var)->fullsize)
3351 && maybe_ne (bitmaxsize, 0))
3352 {
3353 /* It's also not true that the constraint will actually start at the
3354 right offset, it may start in some padding. We only care about
3355 setting the constraint to the first actual field it touches, so
3356 walk to find it. */
3357 struct constraint_expr cexpr = result;
3358 varinfo_t curr;
3359 results->pop ();
3360 cexpr.offset = 0;
3361 for (curr = get_varinfo (cexpr.var); curr; curr = vi_next (curr))
3362 {
3363 if (ranges_maybe_overlap_p (poly_int64 (curr->offset),
3364 curr->size, bitpos, bitmaxsize))
3365 {
3366 cexpr.var = curr->id;
3367 results->safe_push (cexpr);
3368 if (address_p)
3369 break;
3370 }
3371 }
3372 /* If we are going to take the address of this field then
3373 to be able to compute reachability correctly add at least
3374 the last field of the variable. */
3375 if (address_p && results->length () == 0)
3376 {
3377 curr = get_varinfo (cexpr.var);
3378 while (curr->next != 0)
3379 curr = vi_next (curr);
3380 cexpr.var = curr->id;
3381 results->safe_push (cexpr);
3382 }
3383 else if (results->length () == 0)
3384 /* Assert that we found *some* field there. The user couldn't be
3385 accessing *only* padding. */
3386 /* Still the user could access one past the end of an array
3387 embedded in a struct resulting in accessing *only* padding. */
3388 /* Or accessing only padding via type-punning to a type
3389 that has a filed just in padding space. */
3390 {
3391 cexpr.type = SCALAR;
3392 cexpr.var = anything_id;
3393 cexpr.offset = 0;
3394 results->safe_push (cexpr);
3395 }
3396 }
3397 else if (known_eq (bitmaxsize, 0))
3398 {
3399 if (dump_file && (dump_flags & TDF_DETAILS))
3400 fprintf (dump_file, "Access to zero-sized part of variable, "
3401 "ignoring\n");
3402 }
3403 else
3404 if (dump_file && (dump_flags & TDF_DETAILS))
3405 fprintf (dump_file, "Access to past the end of variable, ignoring\n");
3406 }
3407 else if (result.type == DEREF)
3408 {
3409 /* If we do not know exactly where the access goes say so. Note
3410 that only for non-structure accesses we know that we access
3411 at most one subfiled of any variable. */
3412 HOST_WIDE_INT const_bitpos;
3413 if (!bitpos.is_constant (&const_bitpos)
3414 || const_bitpos == -1
3415 || maybe_ne (bitsize, bitmaxsize)
3416 || AGGREGATE_TYPE_P (TREE_TYPE (orig_t))
3417 || result.offset == UNKNOWN_OFFSET)
3418 result.offset = UNKNOWN_OFFSET;
3419 else
3420 result.offset += const_bitpos;
3421 }
3422 else if (result.type == ADDRESSOF)
3423 {
3424 /* We can end up here for component references on constants like
3425 VIEW_CONVERT_EXPR <>({ 0, 1, 2, 3 })[i]. */
3426 result.type = SCALAR;
3427 result.var = anything_id;
3428 result.offset = 0;
3429 }
3430 else
3431 gcc_unreachable ();
3432 }
3433
3434
3435 /* Dereference the constraint expression CONS, and return the result.
3436 DEREF (ADDRESSOF) = SCALAR
3437 DEREF (SCALAR) = DEREF
3438 DEREF (DEREF) = (temp = DEREF1; result = DEREF(temp))
3439 This is needed so that we can handle dereferencing DEREF constraints. */
3440
3441 static void
do_deref(vec<ce_s> * constraints)3442 do_deref (vec<ce_s> *constraints)
3443 {
3444 struct constraint_expr *c;
3445 unsigned int i = 0;
3446
3447 FOR_EACH_VEC_ELT (*constraints, i, c)
3448 {
3449 if (c->type == SCALAR)
3450 c->type = DEREF;
3451 else if (c->type == ADDRESSOF)
3452 c->type = SCALAR;
3453 else if (c->type == DEREF)
3454 {
3455 struct constraint_expr tmplhs;
3456 tmplhs = new_scalar_tmp_constraint_exp ("dereftmp", true);
3457 process_constraint (new_constraint (tmplhs, *c));
3458 c->var = tmplhs.var;
3459 }
3460 else
3461 gcc_unreachable ();
3462 }
3463 }
3464
3465 /* Given a tree T, return the constraint expression for taking the
3466 address of it. */
3467
3468 static void
get_constraint_for_address_of(tree t,vec<ce_s> * results)3469 get_constraint_for_address_of (tree t, vec<ce_s> *results)
3470 {
3471 struct constraint_expr *c;
3472 unsigned int i;
3473
3474 get_constraint_for_1 (t, results, true, true);
3475
3476 FOR_EACH_VEC_ELT (*results, i, c)
3477 {
3478 if (c->type == DEREF)
3479 c->type = SCALAR;
3480 else
3481 c->type = ADDRESSOF;
3482 }
3483 }
3484
3485 /* Given a tree T, return the constraint expression for it. */
3486
3487 static void
get_constraint_for_1(tree t,vec<ce_s> * results,bool address_p,bool lhs_p)3488 get_constraint_for_1 (tree t, vec<ce_s> *results, bool address_p,
3489 bool lhs_p)
3490 {
3491 struct constraint_expr temp;
3492
3493 /* x = integer is all glommed to a single variable, which doesn't
3494 point to anything by itself. That is, of course, unless it is an
3495 integer constant being treated as a pointer, in which case, we
3496 will return that this is really the addressof anything. This
3497 happens below, since it will fall into the default case. The only
3498 case we know something about an integer treated like a pointer is
3499 when it is the NULL pointer, and then we just say it points to
3500 NULL.
3501
3502 Do not do that if -fno-delete-null-pointer-checks though, because
3503 in that case *NULL does not fail, so it _should_ alias *anything.
3504 It is not worth adding a new option or renaming the existing one,
3505 since this case is relatively obscure. */
3506 if ((TREE_CODE (t) == INTEGER_CST
3507 && integer_zerop (t))
3508 /* The only valid CONSTRUCTORs in gimple with pointer typed
3509 elements are zero-initializer. But in IPA mode we also
3510 process global initializers, so verify at least. */
3511 || (TREE_CODE (t) == CONSTRUCTOR
3512 && CONSTRUCTOR_NELTS (t) == 0))
3513 {
3514 if (flag_delete_null_pointer_checks)
3515 temp.var = nothing_id;
3516 else
3517 temp.var = nonlocal_id;
3518 temp.type = ADDRESSOF;
3519 temp.offset = 0;
3520 results->safe_push (temp);
3521 return;
3522 }
3523
3524 /* String constants are read-only, ideally we'd have a CONST_DECL
3525 for those. */
3526 if (TREE_CODE (t) == STRING_CST)
3527 {
3528 temp.var = string_id;
3529 temp.type = SCALAR;
3530 temp.offset = 0;
3531 results->safe_push (temp);
3532 return;
3533 }
3534
3535 switch (TREE_CODE_CLASS (TREE_CODE (t)))
3536 {
3537 case tcc_expression:
3538 {
3539 switch (TREE_CODE (t))
3540 {
3541 case ADDR_EXPR:
3542 get_constraint_for_address_of (TREE_OPERAND (t, 0), results);
3543 return;
3544 default:;
3545 }
3546 break;
3547 }
3548 case tcc_reference:
3549 {
3550 switch (TREE_CODE (t))
3551 {
3552 case MEM_REF:
3553 {
3554 struct constraint_expr cs;
3555 varinfo_t vi, curr;
3556 get_constraint_for_ptr_offset (TREE_OPERAND (t, 0),
3557 TREE_OPERAND (t, 1), results);
3558 do_deref (results);
3559
3560 /* If we are not taking the address then make sure to process
3561 all subvariables we might access. */
3562 if (address_p)
3563 return;
3564
3565 cs = results->last ();
3566 if (cs.type == DEREF
3567 && type_can_have_subvars (TREE_TYPE (t)))
3568 {
3569 /* For dereferences this means we have to defer it
3570 to solving time. */
3571 results->last ().offset = UNKNOWN_OFFSET;
3572 return;
3573 }
3574 if (cs.type != SCALAR)
3575 return;
3576
3577 vi = get_varinfo (cs.var);
3578 curr = vi_next (vi);
3579 if (!vi->is_full_var
3580 && curr)
3581 {
3582 unsigned HOST_WIDE_INT size;
3583 if (tree_fits_uhwi_p (TYPE_SIZE (TREE_TYPE (t))))
3584 size = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (t)));
3585 else
3586 size = -1;
3587 for (; curr; curr = vi_next (curr))
3588 {
3589 if (curr->offset - vi->offset < size)
3590 {
3591 cs.var = curr->id;
3592 results->safe_push (cs);
3593 }
3594 else
3595 break;
3596 }
3597 }
3598 return;
3599 }
3600 case ARRAY_REF:
3601 case ARRAY_RANGE_REF:
3602 case COMPONENT_REF:
3603 case IMAGPART_EXPR:
3604 case REALPART_EXPR:
3605 case BIT_FIELD_REF:
3606 get_constraint_for_component_ref (t, results, address_p, lhs_p);
3607 return;
3608 case VIEW_CONVERT_EXPR:
3609 get_constraint_for_1 (TREE_OPERAND (t, 0), results, address_p,
3610 lhs_p);
3611 return;
3612 /* We are missing handling for TARGET_MEM_REF here. */
3613 default:;
3614 }
3615 break;
3616 }
3617 case tcc_exceptional:
3618 {
3619 switch (TREE_CODE (t))
3620 {
3621 case SSA_NAME:
3622 {
3623 get_constraint_for_ssa_var (t, results, address_p);
3624 return;
3625 }
3626 case CONSTRUCTOR:
3627 {
3628 unsigned int i;
3629 tree val;
3630 auto_vec<ce_s> tmp;
3631 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (t), i, val)
3632 {
3633 struct constraint_expr *rhsp;
3634 unsigned j;
3635 get_constraint_for_1 (val, &tmp, address_p, lhs_p);
3636 FOR_EACH_VEC_ELT (tmp, j, rhsp)
3637 results->safe_push (*rhsp);
3638 tmp.truncate (0);
3639 }
3640 /* We do not know whether the constructor was complete,
3641 so technically we have to add &NOTHING or &ANYTHING
3642 like we do for an empty constructor as well. */
3643 return;
3644 }
3645 default:;
3646 }
3647 break;
3648 }
3649 case tcc_declaration:
3650 {
3651 get_constraint_for_ssa_var (t, results, address_p);
3652 return;
3653 }
3654 case tcc_constant:
3655 {
3656 /* We cannot refer to automatic variables through constants. */
3657 temp.type = ADDRESSOF;
3658 temp.var = nonlocal_id;
3659 temp.offset = 0;
3660 results->safe_push (temp);
3661 return;
3662 }
3663 default:;
3664 }
3665
3666 /* The default fallback is a constraint from anything. */
3667 temp.type = ADDRESSOF;
3668 temp.var = anything_id;
3669 temp.offset = 0;
3670 results->safe_push (temp);
3671 }
3672
3673 /* Given a gimple tree T, return the constraint expression vector for it. */
3674
3675 static void
get_constraint_for(tree t,vec<ce_s> * results)3676 get_constraint_for (tree t, vec<ce_s> *results)
3677 {
3678 gcc_assert (results->length () == 0);
3679
3680 get_constraint_for_1 (t, results, false, true);
3681 }
3682
3683 /* Given a gimple tree T, return the constraint expression vector for it
3684 to be used as the rhs of a constraint. */
3685
3686 static void
get_constraint_for_rhs(tree t,vec<ce_s> * results)3687 get_constraint_for_rhs (tree t, vec<ce_s> *results)
3688 {
3689 gcc_assert (results->length () == 0);
3690
3691 get_constraint_for_1 (t, results, false, false);
3692 }
3693
3694
3695 /* Efficiently generates constraints from all entries in *RHSC to all
3696 entries in *LHSC. */
3697
3698 static void
process_all_all_constraints(vec<ce_s> lhsc,vec<ce_s> rhsc)3699 process_all_all_constraints (vec<ce_s> lhsc,
3700 vec<ce_s> rhsc)
3701 {
3702 struct constraint_expr *lhsp, *rhsp;
3703 unsigned i, j;
3704
3705 if (lhsc.length () <= 1 || rhsc.length () <= 1)
3706 {
3707 FOR_EACH_VEC_ELT (lhsc, i, lhsp)
3708 FOR_EACH_VEC_ELT (rhsc, j, rhsp)
3709 process_constraint (new_constraint (*lhsp, *rhsp));
3710 }
3711 else
3712 {
3713 struct constraint_expr tmp;
3714 tmp = new_scalar_tmp_constraint_exp ("allalltmp", true);
3715 FOR_EACH_VEC_ELT (rhsc, i, rhsp)
3716 process_constraint (new_constraint (tmp, *rhsp));
3717 FOR_EACH_VEC_ELT (lhsc, i, lhsp)
3718 process_constraint (new_constraint (*lhsp, tmp));
3719 }
3720 }
3721
3722 /* Handle aggregate copies by expanding into copies of the respective
3723 fields of the structures. */
3724
3725 static void
do_structure_copy(tree lhsop,tree rhsop)3726 do_structure_copy (tree lhsop, tree rhsop)
3727 {
3728 struct constraint_expr *lhsp, *rhsp;
3729 auto_vec<ce_s> lhsc;
3730 auto_vec<ce_s> rhsc;
3731 unsigned j;
3732
3733 get_constraint_for (lhsop, &lhsc);
3734 get_constraint_for_rhs (rhsop, &rhsc);
3735 lhsp = &lhsc[0];
3736 rhsp = &rhsc[0];
3737 if (lhsp->type == DEREF
3738 || (lhsp->type == ADDRESSOF && lhsp->var == anything_id)
3739 || rhsp->type == DEREF)
3740 {
3741 if (lhsp->type == DEREF)
3742 {
3743 gcc_assert (lhsc.length () == 1);
3744 lhsp->offset = UNKNOWN_OFFSET;
3745 }
3746 if (rhsp->type == DEREF)
3747 {
3748 gcc_assert (rhsc.length () == 1);
3749 rhsp->offset = UNKNOWN_OFFSET;
3750 }
3751 process_all_all_constraints (lhsc, rhsc);
3752 }
3753 else if (lhsp->type == SCALAR
3754 && (rhsp->type == SCALAR
3755 || rhsp->type == ADDRESSOF))
3756 {
3757 HOST_WIDE_INT lhssize, lhsoffset;
3758 HOST_WIDE_INT rhssize, rhsoffset;
3759 bool reverse;
3760 unsigned k = 0;
3761 if (!get_ref_base_and_extent_hwi (lhsop, &lhsoffset, &lhssize, &reverse)
3762 || !get_ref_base_and_extent_hwi (rhsop, &rhsoffset, &rhssize,
3763 &reverse))
3764 {
3765 process_all_all_constraints (lhsc, rhsc);
3766 return;
3767 }
3768 for (j = 0; lhsc.iterate (j, &lhsp);)
3769 {
3770 varinfo_t lhsv, rhsv;
3771 rhsp = &rhsc[k];
3772 lhsv = get_varinfo (lhsp->var);
3773 rhsv = get_varinfo (rhsp->var);
3774 if (lhsv->may_have_pointers
3775 && (lhsv->is_full_var
3776 || rhsv->is_full_var
3777 || ranges_overlap_p (lhsv->offset + rhsoffset, lhsv->size,
3778 rhsv->offset + lhsoffset, rhsv->size)))
3779 process_constraint (new_constraint (*lhsp, *rhsp));
3780 if (!rhsv->is_full_var
3781 && (lhsv->is_full_var
3782 || (lhsv->offset + rhsoffset + lhsv->size
3783 > rhsv->offset + lhsoffset + rhsv->size)))
3784 {
3785 ++k;
3786 if (k >= rhsc.length ())
3787 break;
3788 }
3789 else
3790 ++j;
3791 }
3792 }
3793 else
3794 gcc_unreachable ();
3795 }
3796
3797 /* Create constraints ID = { rhsc }. */
3798
3799 static void
make_constraints_to(unsigned id,vec<ce_s> rhsc)3800 make_constraints_to (unsigned id, vec<ce_s> rhsc)
3801 {
3802 struct constraint_expr *c;
3803 struct constraint_expr includes;
3804 unsigned int j;
3805
3806 includes.var = id;
3807 includes.offset = 0;
3808 includes.type = SCALAR;
3809
3810 FOR_EACH_VEC_ELT (rhsc, j, c)
3811 process_constraint (new_constraint (includes, *c));
3812 }
3813
3814 /* Create a constraint ID = OP. */
3815
3816 static void
make_constraint_to(unsigned id,tree op)3817 make_constraint_to (unsigned id, tree op)
3818 {
3819 auto_vec<ce_s> rhsc;
3820 get_constraint_for_rhs (op, &rhsc);
3821 make_constraints_to (id, rhsc);
3822 }
3823
3824 /* Create a constraint ID = &FROM. */
3825
3826 static void
make_constraint_from(varinfo_t vi,int from)3827 make_constraint_from (varinfo_t vi, int from)
3828 {
3829 struct constraint_expr lhs, rhs;
3830
3831 lhs.var = vi->id;
3832 lhs.offset = 0;
3833 lhs.type = SCALAR;
3834
3835 rhs.var = from;
3836 rhs.offset = 0;
3837 rhs.type = ADDRESSOF;
3838 process_constraint (new_constraint (lhs, rhs));
3839 }
3840
3841 /* Create a constraint ID = FROM. */
3842
3843 static void
make_copy_constraint(varinfo_t vi,int from)3844 make_copy_constraint (varinfo_t vi, int from)
3845 {
3846 struct constraint_expr lhs, rhs;
3847
3848 lhs.var = vi->id;
3849 lhs.offset = 0;
3850 lhs.type = SCALAR;
3851
3852 rhs.var = from;
3853 rhs.offset = 0;
3854 rhs.type = SCALAR;
3855 process_constraint (new_constraint (lhs, rhs));
3856 }
3857
3858 /* Make constraints necessary to make OP escape. */
3859
3860 static void
make_escape_constraint(tree op)3861 make_escape_constraint (tree op)
3862 {
3863 make_constraint_to (escaped_id, op);
3864 }
3865
3866 /* Make constraint necessary to make all indirect references
3867 from VI escape. */
3868
3869 static void
make_indirect_escape_constraint(varinfo_t vi)3870 make_indirect_escape_constraint (varinfo_t vi)
3871 {
3872 struct constraint_expr lhs, rhs;
3873 /* escaped = *(VAR + UNKNOWN); */
3874 lhs.type = SCALAR;
3875 lhs.var = escaped_id;
3876 lhs.offset = 0;
3877 rhs.type = DEREF;
3878 rhs.var = vi->id;
3879 rhs.offset = UNKNOWN_OFFSET;
3880 process_constraint (new_constraint (lhs, rhs));
3881 }
3882
3883 /* Add constraints to that the solution of VI is transitively closed. */
3884
3885 static void
make_transitive_closure_constraints(varinfo_t vi)3886 make_transitive_closure_constraints (varinfo_t vi)
3887 {
3888 struct constraint_expr lhs, rhs;
3889
3890 /* VAR = *(VAR + UNKNOWN); */
3891 lhs.type = SCALAR;
3892 lhs.var = vi->id;
3893 lhs.offset = 0;
3894 rhs.type = DEREF;
3895 rhs.var = vi->id;
3896 rhs.offset = UNKNOWN_OFFSET;
3897 process_constraint (new_constraint (lhs, rhs));
3898 }
3899
3900 /* Add constraints to that the solution of VI has all subvariables added. */
3901
3902 static void
make_any_offset_constraints(varinfo_t vi)3903 make_any_offset_constraints (varinfo_t vi)
3904 {
3905 struct constraint_expr lhs, rhs;
3906
3907 /* VAR = VAR + UNKNOWN; */
3908 lhs.type = SCALAR;
3909 lhs.var = vi->id;
3910 lhs.offset = 0;
3911 rhs.type = SCALAR;
3912 rhs.var = vi->id;
3913 rhs.offset = UNKNOWN_OFFSET;
3914 process_constraint (new_constraint (lhs, rhs));
3915 }
3916
3917 /* Temporary storage for fake var decls. */
3918 struct obstack fake_var_decl_obstack;
3919
3920 /* Build a fake VAR_DECL acting as referrer to a DECL_UID. */
3921
3922 static tree
build_fake_var_decl(tree type)3923 build_fake_var_decl (tree type)
3924 {
3925 tree decl = (tree) XOBNEW (&fake_var_decl_obstack, struct tree_var_decl);
3926 memset (decl, 0, sizeof (struct tree_var_decl));
3927 TREE_SET_CODE (decl, VAR_DECL);
3928 TREE_TYPE (decl) = type;
3929 DECL_UID (decl) = allocate_decl_uid ();
3930 SET_DECL_PT_UID (decl, -1);
3931 layout_decl (decl, 0);
3932 return decl;
3933 }
3934
3935 /* Create a new artificial heap variable with NAME.
3936 Return the created variable. */
3937
3938 static varinfo_t
make_heapvar(const char * name,bool add_id)3939 make_heapvar (const char *name, bool add_id)
3940 {
3941 varinfo_t vi;
3942 tree heapvar;
3943
3944 heapvar = build_fake_var_decl (ptr_type_node);
3945 DECL_EXTERNAL (heapvar) = 1;
3946
3947 vi = new_var_info (heapvar, name, add_id);
3948 vi->is_heap_var = true;
3949 vi->is_unknown_size_var = true;
3950 vi->offset = 0;
3951 vi->fullsize = ~0;
3952 vi->size = ~0;
3953 vi->is_full_var = true;
3954 insert_vi_for_tree (heapvar, vi);
3955
3956 return vi;
3957 }
3958
3959 /* Create a new artificial heap variable with NAME and make a
3960 constraint from it to LHS. Set flags according to a tag used
3961 for tracking restrict pointers. */
3962
3963 static varinfo_t
make_constraint_from_restrict(varinfo_t lhs,const char * name,bool add_id)3964 make_constraint_from_restrict (varinfo_t lhs, const char *name, bool add_id)
3965 {
3966 varinfo_t vi = make_heapvar (name, add_id);
3967 vi->is_restrict_var = 1;
3968 vi->is_global_var = 1;
3969 vi->may_have_pointers = 1;
3970 make_constraint_from (lhs, vi->id);
3971 return vi;
3972 }
3973
3974 /* Create a new artificial heap variable with NAME and make a
3975 constraint from it to LHS. Set flags according to a tag used
3976 for tracking restrict pointers and make the artificial heap
3977 point to global memory. */
3978
3979 static varinfo_t
make_constraint_from_global_restrict(varinfo_t lhs,const char * name,bool add_id)3980 make_constraint_from_global_restrict (varinfo_t lhs, const char *name,
3981 bool add_id)
3982 {
3983 varinfo_t vi = make_constraint_from_restrict (lhs, name, add_id);
3984 make_copy_constraint (vi, nonlocal_id);
3985 return vi;
3986 }
3987
3988 /* In IPA mode there are varinfos for different aspects of reach
3989 function designator. One for the points-to set of the return
3990 value, one for the variables that are clobbered by the function,
3991 one for its uses and one for each parameter (including a single
3992 glob for remaining variadic arguments). */
3993
3994 enum { fi_clobbers = 1, fi_uses = 2,
3995 fi_static_chain = 3, fi_result = 4, fi_parm_base = 5 };
3996
3997 /* Get a constraint for the requested part of a function designator FI
3998 when operating in IPA mode. */
3999
4000 static struct constraint_expr
get_function_part_constraint(varinfo_t fi,unsigned part)4001 get_function_part_constraint (varinfo_t fi, unsigned part)
4002 {
4003 struct constraint_expr c;
4004
4005 gcc_assert (in_ipa_mode);
4006
4007 if (fi->id == anything_id)
4008 {
4009 /* ??? We probably should have a ANYFN special variable. */
4010 c.var = anything_id;
4011 c.offset = 0;
4012 c.type = SCALAR;
4013 }
4014 else if (fi->decl && TREE_CODE (fi->decl) == FUNCTION_DECL)
4015 {
4016 varinfo_t ai = first_vi_for_offset (fi, part);
4017 if (ai)
4018 c.var = ai->id;
4019 else
4020 c.var = anything_id;
4021 c.offset = 0;
4022 c.type = SCALAR;
4023 }
4024 else
4025 {
4026 c.var = fi->id;
4027 c.offset = part;
4028 c.type = DEREF;
4029 }
4030
4031 return c;
4032 }
4033
4034 /* For non-IPA mode, generate constraints necessary for a call on the
4035 RHS. */
4036
4037 static void
handle_rhs_call(gcall * stmt,vec<ce_s> * results)4038 handle_rhs_call (gcall *stmt, vec<ce_s> *results)
4039 {
4040 struct constraint_expr rhsc;
4041 unsigned i;
4042 bool returns_uses = false;
4043
4044 for (i = 0; i < gimple_call_num_args (stmt); ++i)
4045 {
4046 tree arg = gimple_call_arg (stmt, i);
4047 int flags = gimple_call_arg_flags (stmt, i);
4048
4049 /* If the argument is not used we can ignore it. */
4050 if (flags & EAF_UNUSED)
4051 continue;
4052
4053 /* As we compute ESCAPED context-insensitive we do not gain
4054 any precision with just EAF_NOCLOBBER but not EAF_NOESCAPE
4055 set. The argument would still get clobbered through the
4056 escape solution. */
4057 if ((flags & EAF_NOCLOBBER)
4058 && (flags & (EAF_NOESCAPE | EAF_NODIRECTESCAPE)))
4059 {
4060 varinfo_t uses = get_call_use_vi (stmt);
4061 varinfo_t tem = new_var_info (NULL_TREE, "callarg", true);
4062 tem->is_reg_var = true;
4063 make_constraint_to (tem->id, arg);
4064 make_any_offset_constraints (tem);
4065 if (!(flags & EAF_DIRECT))
4066 make_transitive_closure_constraints (tem);
4067 make_copy_constraint (uses, tem->id);
4068 if (!(flags & (EAF_NOESCAPE | EAF_DIRECT)))
4069 make_indirect_escape_constraint (tem);
4070 returns_uses = true;
4071 }
4072 else if (flags & (EAF_NOESCAPE | EAF_NODIRECTESCAPE))
4073 {
4074 struct constraint_expr lhs, rhs;
4075 varinfo_t uses = get_call_use_vi (stmt);
4076 varinfo_t clobbers = get_call_clobber_vi (stmt);
4077 varinfo_t tem = new_var_info (NULL_TREE, "callarg", true);
4078 tem->is_reg_var = true;
4079 make_constraint_to (tem->id, arg);
4080 make_any_offset_constraints (tem);
4081 if (!(flags & EAF_DIRECT))
4082 make_transitive_closure_constraints (tem);
4083 make_copy_constraint (uses, tem->id);
4084 make_copy_constraint (clobbers, tem->id);
4085 /* Add *tem = nonlocal, do not add *tem = callused as
4086 EAF_NOESCAPE parameters do not escape to other parameters
4087 and all other uses appear in NONLOCAL as well. */
4088 lhs.type = DEREF;
4089 lhs.var = tem->id;
4090 lhs.offset = 0;
4091 rhs.type = SCALAR;
4092 rhs.var = nonlocal_id;
4093 rhs.offset = 0;
4094 process_constraint (new_constraint (lhs, rhs));
4095 if (!(flags & (EAF_NOESCAPE | EAF_DIRECT)))
4096 make_indirect_escape_constraint (tem);
4097 returns_uses = true;
4098 }
4099 else
4100 make_escape_constraint (arg);
4101 }
4102
4103 /* If we added to the calls uses solution make sure we account for
4104 pointers to it to be returned. */
4105 if (returns_uses)
4106 {
4107 rhsc.var = get_call_use_vi (stmt)->id;
4108 rhsc.offset = UNKNOWN_OFFSET;
4109 rhsc.type = SCALAR;
4110 results->safe_push (rhsc);
4111 }
4112
4113 /* The static chain escapes as well. */
4114 if (gimple_call_chain (stmt))
4115 make_escape_constraint (gimple_call_chain (stmt));
4116
4117 /* And if we applied NRV the address of the return slot escapes as well. */
4118 if (gimple_call_return_slot_opt_p (stmt)
4119 && gimple_call_lhs (stmt) != NULL_TREE
4120 && TREE_ADDRESSABLE (TREE_TYPE (gimple_call_lhs (stmt))))
4121 {
4122 auto_vec<ce_s> tmpc;
4123 struct constraint_expr lhsc, *c;
4124 get_constraint_for_address_of (gimple_call_lhs (stmt), &tmpc);
4125 lhsc.var = escaped_id;
4126 lhsc.offset = 0;
4127 lhsc.type = SCALAR;
4128 FOR_EACH_VEC_ELT (tmpc, i, c)
4129 process_constraint (new_constraint (lhsc, *c));
4130 }
4131
4132 /* Regular functions return nonlocal memory. */
4133 rhsc.var = nonlocal_id;
4134 rhsc.offset = 0;
4135 rhsc.type = SCALAR;
4136 results->safe_push (rhsc);
4137 }
4138
4139 /* For non-IPA mode, generate constraints necessary for a call
4140 that returns a pointer and assigns it to LHS. This simply makes
4141 the LHS point to global and escaped variables. */
4142
4143 static void
handle_lhs_call(gcall * stmt,tree lhs,int flags,vec<ce_s> rhsc,tree fndecl)4144 handle_lhs_call (gcall *stmt, tree lhs, int flags, vec<ce_s> rhsc,
4145 tree fndecl)
4146 {
4147 auto_vec<ce_s> lhsc;
4148
4149 get_constraint_for (lhs, &lhsc);
4150 /* If the store is to a global decl make sure to
4151 add proper escape constraints. */
4152 lhs = get_base_address (lhs);
4153 if (lhs
4154 && DECL_P (lhs)
4155 && is_global_var (lhs))
4156 {
4157 struct constraint_expr tmpc;
4158 tmpc.var = escaped_id;
4159 tmpc.offset = 0;
4160 tmpc.type = SCALAR;
4161 lhsc.safe_push (tmpc);
4162 }
4163
4164 /* If the call returns an argument unmodified override the rhs
4165 constraints. */
4166 if (flags & ERF_RETURNS_ARG
4167 && (flags & ERF_RETURN_ARG_MASK) < gimple_call_num_args (stmt))
4168 {
4169 tree arg;
4170 rhsc.create (0);
4171 arg = gimple_call_arg (stmt, flags & ERF_RETURN_ARG_MASK);
4172 get_constraint_for (arg, &rhsc);
4173 process_all_all_constraints (lhsc, rhsc);
4174 rhsc.release ();
4175 }
4176 else if (flags & ERF_NOALIAS)
4177 {
4178 varinfo_t vi;
4179 struct constraint_expr tmpc;
4180 rhsc.create (0);
4181 vi = make_heapvar ("HEAP", true);
4182 /* We are marking allocated storage local, we deal with it becoming
4183 global by escaping and setting of vars_contains_escaped_heap. */
4184 DECL_EXTERNAL (vi->decl) = 0;
4185 vi->is_global_var = 0;
4186 /* If this is not a real malloc call assume the memory was
4187 initialized and thus may point to global memory. All
4188 builtin functions with the malloc attribute behave in a sane way. */
4189 if (!fndecl
4190 || !fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
4191 make_constraint_from (vi, nonlocal_id);
4192 tmpc.var = vi->id;
4193 tmpc.offset = 0;
4194 tmpc.type = ADDRESSOF;
4195 rhsc.safe_push (tmpc);
4196 process_all_all_constraints (lhsc, rhsc);
4197 rhsc.release ();
4198 }
4199 else
4200 process_all_all_constraints (lhsc, rhsc);
4201 }
4202
4203 /* For non-IPA mode, generate constraints necessary for a call of a
4204 const function that returns a pointer in the statement STMT. */
4205
4206 static void
handle_const_call(gcall * stmt,vec<ce_s> * results)4207 handle_const_call (gcall *stmt, vec<ce_s> *results)
4208 {
4209 struct constraint_expr rhsc;
4210 unsigned int k;
4211 bool need_uses = false;
4212
4213 /* Treat nested const functions the same as pure functions as far
4214 as the static chain is concerned. */
4215 if (gimple_call_chain (stmt))
4216 {
4217 varinfo_t uses = get_call_use_vi (stmt);
4218 make_constraint_to (uses->id, gimple_call_chain (stmt));
4219 need_uses = true;
4220 }
4221
4222 /* And if we applied NRV the address of the return slot escapes as well. */
4223 if (gimple_call_return_slot_opt_p (stmt)
4224 && gimple_call_lhs (stmt) != NULL_TREE
4225 && TREE_ADDRESSABLE (TREE_TYPE (gimple_call_lhs (stmt))))
4226 {
4227 varinfo_t uses = get_call_use_vi (stmt);
4228 auto_vec<ce_s> tmpc;
4229 get_constraint_for_address_of (gimple_call_lhs (stmt), &tmpc);
4230 make_constraints_to (uses->id, tmpc);
4231 need_uses = true;
4232 }
4233
4234 if (need_uses)
4235 {
4236 varinfo_t uses = get_call_use_vi (stmt);
4237 make_any_offset_constraints (uses);
4238 make_transitive_closure_constraints (uses);
4239 rhsc.var = uses->id;
4240 rhsc.offset = 0;
4241 rhsc.type = SCALAR;
4242 results->safe_push (rhsc);
4243 }
4244
4245 /* May return offsetted arguments. */
4246 varinfo_t tem = NULL;
4247 if (gimple_call_num_args (stmt) != 0)
4248 {
4249 tem = new_var_info (NULL_TREE, "callarg", true);
4250 tem->is_reg_var = true;
4251 }
4252 for (k = 0; k < gimple_call_num_args (stmt); ++k)
4253 {
4254 tree arg = gimple_call_arg (stmt, k);
4255 auto_vec<ce_s> argc;
4256 get_constraint_for_rhs (arg, &argc);
4257 make_constraints_to (tem->id, argc);
4258 }
4259 if (tem)
4260 {
4261 ce_s ce;
4262 ce.type = SCALAR;
4263 ce.var = tem->id;
4264 ce.offset = UNKNOWN_OFFSET;
4265 results->safe_push (ce);
4266 }
4267
4268 /* May return addresses of globals. */
4269 rhsc.var = nonlocal_id;
4270 rhsc.offset = 0;
4271 rhsc.type = ADDRESSOF;
4272 results->safe_push (rhsc);
4273 }
4274
4275 /* For non-IPA mode, generate constraints necessary for a call to a
4276 pure function in statement STMT. */
4277
4278 static void
handle_pure_call(gcall * stmt,vec<ce_s> * results)4279 handle_pure_call (gcall *stmt, vec<ce_s> *results)
4280 {
4281 struct constraint_expr rhsc;
4282 unsigned i;
4283 varinfo_t uses = NULL;
4284
4285 /* Memory reached from pointer arguments is call-used. */
4286 for (i = 0; i < gimple_call_num_args (stmt); ++i)
4287 {
4288 tree arg = gimple_call_arg (stmt, i);
4289 int flags = gimple_call_arg_flags (stmt, i);
4290
4291 /* If the argument is not used we can ignore it. */
4292 if (flags & EAF_UNUSED)
4293 continue;
4294 if (!uses)
4295 {
4296 uses = get_call_use_vi (stmt);
4297 make_any_offset_constraints (uses);
4298 make_transitive_closure_constraints (uses);
4299 }
4300 make_constraint_to (uses->id, arg);
4301 }
4302
4303 /* The static chain is used as well. */
4304 if (gimple_call_chain (stmt))
4305 {
4306 if (!uses)
4307 {
4308 uses = get_call_use_vi (stmt);
4309 make_any_offset_constraints (uses);
4310 make_transitive_closure_constraints (uses);
4311 }
4312 make_constraint_to (uses->id, gimple_call_chain (stmt));
4313 }
4314
4315 /* And if we applied NRV the address of the return slot. */
4316 if (gimple_call_return_slot_opt_p (stmt)
4317 && gimple_call_lhs (stmt) != NULL_TREE
4318 && TREE_ADDRESSABLE (TREE_TYPE (gimple_call_lhs (stmt))))
4319 {
4320 if (!uses)
4321 {
4322 uses = get_call_use_vi (stmt);
4323 make_any_offset_constraints (uses);
4324 make_transitive_closure_constraints (uses);
4325 }
4326 auto_vec<ce_s> tmpc;
4327 get_constraint_for_address_of (gimple_call_lhs (stmt), &tmpc);
4328 make_constraints_to (uses->id, tmpc);
4329 }
4330
4331 /* Pure functions may return call-used and nonlocal memory. */
4332 if (uses)
4333 {
4334 rhsc.var = uses->id;
4335 rhsc.offset = 0;
4336 rhsc.type = SCALAR;
4337 results->safe_push (rhsc);
4338 }
4339 rhsc.var = nonlocal_id;
4340 rhsc.offset = 0;
4341 rhsc.type = SCALAR;
4342 results->safe_push (rhsc);
4343 }
4344
4345
4346 /* Return the varinfo for the callee of CALL. */
4347
4348 static varinfo_t
get_fi_for_callee(gcall * call)4349 get_fi_for_callee (gcall *call)
4350 {
4351 tree decl, fn = gimple_call_fn (call);
4352
4353 if (fn && TREE_CODE (fn) == OBJ_TYPE_REF)
4354 fn = OBJ_TYPE_REF_EXPR (fn);
4355
4356 /* If we can directly resolve the function being called, do so.
4357 Otherwise, it must be some sort of indirect expression that
4358 we should still be able to handle. */
4359 decl = gimple_call_addr_fndecl (fn);
4360 if (decl)
4361 return get_vi_for_tree (decl);
4362
4363 /* If the function is anything other than a SSA name pointer we have no
4364 clue and should be getting ANYFN (well, ANYTHING for now). */
4365 if (!fn || TREE_CODE (fn) != SSA_NAME)
4366 return get_varinfo (anything_id);
4367
4368 if (SSA_NAME_IS_DEFAULT_DEF (fn)
4369 && (TREE_CODE (SSA_NAME_VAR (fn)) == PARM_DECL
4370 || TREE_CODE (SSA_NAME_VAR (fn)) == RESULT_DECL))
4371 fn = SSA_NAME_VAR (fn);
4372
4373 return get_vi_for_tree (fn);
4374 }
4375
4376 /* Create constraints for assigning call argument ARG to the incoming parameter
4377 INDEX of function FI. */
4378
4379 static void
find_func_aliases_for_call_arg(varinfo_t fi,unsigned index,tree arg)4380 find_func_aliases_for_call_arg (varinfo_t fi, unsigned index, tree arg)
4381 {
4382 struct constraint_expr lhs;
4383 lhs = get_function_part_constraint (fi, fi_parm_base + index);
4384
4385 auto_vec<ce_s, 2> rhsc;
4386 get_constraint_for_rhs (arg, &rhsc);
4387
4388 unsigned j;
4389 struct constraint_expr *rhsp;
4390 FOR_EACH_VEC_ELT (rhsc, j, rhsp)
4391 process_constraint (new_constraint (lhs, *rhsp));
4392 }
4393
4394 /* Return true if FNDECL may be part of another lto partition. */
4395
4396 static bool
fndecl_maybe_in_other_partition(tree fndecl)4397 fndecl_maybe_in_other_partition (tree fndecl)
4398 {
4399 cgraph_node *fn_node = cgraph_node::get (fndecl);
4400 if (fn_node == NULL)
4401 return true;
4402
4403 return fn_node->in_other_partition;
4404 }
4405
4406 /* Create constraints for the builtin call T. Return true if the call
4407 was handled, otherwise false. */
4408
4409 static bool
find_func_aliases_for_builtin_call(struct function * fn,gcall * t)4410 find_func_aliases_for_builtin_call (struct function *fn, gcall *t)
4411 {
4412 tree fndecl = gimple_call_fndecl (t);
4413 auto_vec<ce_s, 2> lhsc;
4414 auto_vec<ce_s, 4> rhsc;
4415 varinfo_t fi;
4416
4417 if (gimple_call_builtin_p (t, BUILT_IN_NORMAL))
4418 /* ??? All builtins that are handled here need to be handled
4419 in the alias-oracle query functions explicitly! */
4420 switch (DECL_FUNCTION_CODE (fndecl))
4421 {
4422 /* All the following functions return a pointer to the same object
4423 as their first argument points to. The functions do not add
4424 to the ESCAPED solution. The functions make the first argument
4425 pointed to memory point to what the second argument pointed to
4426 memory points to. */
4427 case BUILT_IN_STRCPY:
4428 case BUILT_IN_STRNCPY:
4429 case BUILT_IN_BCOPY:
4430 case BUILT_IN_MEMCPY:
4431 case BUILT_IN_MEMMOVE:
4432 case BUILT_IN_MEMPCPY:
4433 case BUILT_IN_STPCPY:
4434 case BUILT_IN_STPNCPY:
4435 case BUILT_IN_STRCAT:
4436 case BUILT_IN_STRNCAT:
4437 case BUILT_IN_STRCPY_CHK:
4438 case BUILT_IN_STRNCPY_CHK:
4439 case BUILT_IN_MEMCPY_CHK:
4440 case BUILT_IN_MEMMOVE_CHK:
4441 case BUILT_IN_MEMPCPY_CHK:
4442 case BUILT_IN_STPCPY_CHK:
4443 case BUILT_IN_STPNCPY_CHK:
4444 case BUILT_IN_STRCAT_CHK:
4445 case BUILT_IN_STRNCAT_CHK:
4446 case BUILT_IN_TM_MEMCPY:
4447 case BUILT_IN_TM_MEMMOVE:
4448 {
4449 tree res = gimple_call_lhs (t);
4450 tree dest = gimple_call_arg (t, (DECL_FUNCTION_CODE (fndecl)
4451 == BUILT_IN_BCOPY ? 1 : 0));
4452 tree src = gimple_call_arg (t, (DECL_FUNCTION_CODE (fndecl)
4453 == BUILT_IN_BCOPY ? 0 : 1));
4454 if (res != NULL_TREE)
4455 {
4456 get_constraint_for (res, &lhsc);
4457 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_MEMPCPY
4458 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STPCPY
4459 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STPNCPY
4460 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_MEMPCPY_CHK
4461 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STPCPY_CHK
4462 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STPNCPY_CHK)
4463 get_constraint_for_ptr_offset (dest, NULL_TREE, &rhsc);
4464 else
4465 get_constraint_for (dest, &rhsc);
4466 process_all_all_constraints (lhsc, rhsc);
4467 lhsc.truncate (0);
4468 rhsc.truncate (0);
4469 }
4470 get_constraint_for_ptr_offset (dest, NULL_TREE, &lhsc);
4471 get_constraint_for_ptr_offset (src, NULL_TREE, &rhsc);
4472 do_deref (&lhsc);
4473 do_deref (&rhsc);
4474 process_all_all_constraints (lhsc, rhsc);
4475 return true;
4476 }
4477 case BUILT_IN_MEMSET:
4478 case BUILT_IN_MEMSET_CHK:
4479 case BUILT_IN_TM_MEMSET:
4480 {
4481 tree res = gimple_call_lhs (t);
4482 tree dest = gimple_call_arg (t, 0);
4483 unsigned i;
4484 ce_s *lhsp;
4485 struct constraint_expr ac;
4486 if (res != NULL_TREE)
4487 {
4488 get_constraint_for (res, &lhsc);
4489 get_constraint_for (dest, &rhsc);
4490 process_all_all_constraints (lhsc, rhsc);
4491 lhsc.truncate (0);
4492 }
4493 get_constraint_for_ptr_offset (dest, NULL_TREE, &lhsc);
4494 do_deref (&lhsc);
4495 if (flag_delete_null_pointer_checks
4496 && integer_zerop (gimple_call_arg (t, 1)))
4497 {
4498 ac.type = ADDRESSOF;
4499 ac.var = nothing_id;
4500 }
4501 else
4502 {
4503 ac.type = SCALAR;
4504 ac.var = integer_id;
4505 }
4506 ac.offset = 0;
4507 FOR_EACH_VEC_ELT (lhsc, i, lhsp)
4508 process_constraint (new_constraint (*lhsp, ac));
4509 return true;
4510 }
4511 case BUILT_IN_STACK_SAVE:
4512 case BUILT_IN_STACK_RESTORE:
4513 /* Nothing interesting happens. */
4514 return true;
4515 case BUILT_IN_ALLOCA:
4516 case BUILT_IN_ALLOCA_WITH_ALIGN:
4517 case BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX:
4518 {
4519 tree ptr = gimple_call_lhs (t);
4520 if (ptr == NULL_TREE)
4521 return true;
4522 get_constraint_for (ptr, &lhsc);
4523 varinfo_t vi = make_heapvar ("HEAP", true);
4524 /* Alloca storage is never global. To exempt it from escaped
4525 handling make it a non-heap var. */
4526 DECL_EXTERNAL (vi->decl) = 0;
4527 vi->is_global_var = 0;
4528 vi->is_heap_var = 0;
4529 struct constraint_expr tmpc;
4530 tmpc.var = vi->id;
4531 tmpc.offset = 0;
4532 tmpc.type = ADDRESSOF;
4533 rhsc.safe_push (tmpc);
4534 process_all_all_constraints (lhsc, rhsc);
4535 return true;
4536 }
4537 case BUILT_IN_POSIX_MEMALIGN:
4538 {
4539 tree ptrptr = gimple_call_arg (t, 0);
4540 get_constraint_for (ptrptr, &lhsc);
4541 do_deref (&lhsc);
4542 varinfo_t vi = make_heapvar ("HEAP", true);
4543 /* We are marking allocated storage local, we deal with it becoming
4544 global by escaping and setting of vars_contains_escaped_heap. */
4545 DECL_EXTERNAL (vi->decl) = 0;
4546 vi->is_global_var = 0;
4547 struct constraint_expr tmpc;
4548 tmpc.var = vi->id;
4549 tmpc.offset = 0;
4550 tmpc.type = ADDRESSOF;
4551 rhsc.safe_push (tmpc);
4552 process_all_all_constraints (lhsc, rhsc);
4553 return true;
4554 }
4555 case BUILT_IN_ASSUME_ALIGNED:
4556 {
4557 tree res = gimple_call_lhs (t);
4558 tree dest = gimple_call_arg (t, 0);
4559 if (res != NULL_TREE)
4560 {
4561 get_constraint_for (res, &lhsc);
4562 get_constraint_for (dest, &rhsc);
4563 process_all_all_constraints (lhsc, rhsc);
4564 }
4565 return true;
4566 }
4567 /* All the following functions do not return pointers, do not
4568 modify the points-to sets of memory reachable from their
4569 arguments and do not add to the ESCAPED solution. */
4570 case BUILT_IN_SINCOS:
4571 case BUILT_IN_SINCOSF:
4572 case BUILT_IN_SINCOSL:
4573 case BUILT_IN_FREXP:
4574 case BUILT_IN_FREXPF:
4575 case BUILT_IN_FREXPL:
4576 case BUILT_IN_GAMMA_R:
4577 case BUILT_IN_GAMMAF_R:
4578 case BUILT_IN_GAMMAL_R:
4579 case BUILT_IN_LGAMMA_R:
4580 case BUILT_IN_LGAMMAF_R:
4581 case BUILT_IN_LGAMMAL_R:
4582 case BUILT_IN_MODF:
4583 case BUILT_IN_MODFF:
4584 case BUILT_IN_MODFL:
4585 case BUILT_IN_REMQUO:
4586 case BUILT_IN_REMQUOF:
4587 case BUILT_IN_REMQUOL:
4588 case BUILT_IN_FREE:
4589 return true;
4590 case BUILT_IN_STRDUP:
4591 case BUILT_IN_STRNDUP:
4592 case BUILT_IN_REALLOC:
4593 if (gimple_call_lhs (t))
4594 {
4595 handle_lhs_call (t, gimple_call_lhs (t),
4596 gimple_call_return_flags (t) | ERF_NOALIAS,
4597 vNULL, fndecl);
4598 get_constraint_for_ptr_offset (gimple_call_lhs (t),
4599 NULL_TREE, &lhsc);
4600 get_constraint_for_ptr_offset (gimple_call_arg (t, 0),
4601 NULL_TREE, &rhsc);
4602 do_deref (&lhsc);
4603 do_deref (&rhsc);
4604 process_all_all_constraints (lhsc, rhsc);
4605 lhsc.truncate (0);
4606 rhsc.truncate (0);
4607 /* For realloc the resulting pointer can be equal to the
4608 argument as well. But only doing this wouldn't be
4609 correct because with ptr == 0 realloc behaves like malloc. */
4610 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_REALLOC)
4611 {
4612 get_constraint_for (gimple_call_lhs (t), &lhsc);
4613 get_constraint_for (gimple_call_arg (t, 0), &rhsc);
4614 process_all_all_constraints (lhsc, rhsc);
4615 }
4616 return true;
4617 }
4618 break;
4619 /* String / character search functions return a pointer into the
4620 source string or NULL. */
4621 case BUILT_IN_INDEX:
4622 case BUILT_IN_STRCHR:
4623 case BUILT_IN_STRRCHR:
4624 case BUILT_IN_MEMCHR:
4625 case BUILT_IN_STRSTR:
4626 case BUILT_IN_STRPBRK:
4627 if (gimple_call_lhs (t))
4628 {
4629 tree src = gimple_call_arg (t, 0);
4630 get_constraint_for_ptr_offset (src, NULL_TREE, &rhsc);
4631 constraint_expr nul;
4632 nul.var = nothing_id;
4633 nul.offset = 0;
4634 nul.type = ADDRESSOF;
4635 rhsc.safe_push (nul);
4636 get_constraint_for (gimple_call_lhs (t), &lhsc);
4637 process_all_all_constraints (lhsc, rhsc);
4638 }
4639 return true;
4640 /* Pure functions that return something not based on any object and
4641 that use the memory pointed to by their arguments (but not
4642 transitively). */
4643 case BUILT_IN_STRCMP:
4644 case BUILT_IN_STRCMP_EQ:
4645 case BUILT_IN_STRNCMP:
4646 case BUILT_IN_STRNCMP_EQ:
4647 case BUILT_IN_STRCASECMP:
4648 case BUILT_IN_STRNCASECMP:
4649 case BUILT_IN_MEMCMP:
4650 case BUILT_IN_BCMP:
4651 case BUILT_IN_STRSPN:
4652 case BUILT_IN_STRCSPN:
4653 {
4654 varinfo_t uses = get_call_use_vi (t);
4655 make_any_offset_constraints (uses);
4656 make_constraint_to (uses->id, gimple_call_arg (t, 0));
4657 make_constraint_to (uses->id, gimple_call_arg (t, 1));
4658 /* No constraints are necessary for the return value. */
4659 return true;
4660 }
4661 case BUILT_IN_STRLEN:
4662 {
4663 varinfo_t uses = get_call_use_vi (t);
4664 make_any_offset_constraints (uses);
4665 make_constraint_to (uses->id, gimple_call_arg (t, 0));
4666 /* No constraints are necessary for the return value. */
4667 return true;
4668 }
4669 case BUILT_IN_OBJECT_SIZE:
4670 case BUILT_IN_CONSTANT_P:
4671 {
4672 /* No constraints are necessary for the return value or the
4673 arguments. */
4674 return true;
4675 }
4676 /* Trampolines are special - they set up passing the static
4677 frame. */
4678 case BUILT_IN_INIT_TRAMPOLINE:
4679 {
4680 tree tramp = gimple_call_arg (t, 0);
4681 tree nfunc = gimple_call_arg (t, 1);
4682 tree frame = gimple_call_arg (t, 2);
4683 unsigned i;
4684 struct constraint_expr lhs, *rhsp;
4685 if (in_ipa_mode)
4686 {
4687 varinfo_t nfi = NULL;
4688 gcc_assert (TREE_CODE (nfunc) == ADDR_EXPR);
4689 nfi = lookup_vi_for_tree (TREE_OPERAND (nfunc, 0));
4690 if (nfi)
4691 {
4692 lhs = get_function_part_constraint (nfi, fi_static_chain);
4693 get_constraint_for (frame, &rhsc);
4694 FOR_EACH_VEC_ELT (rhsc, i, rhsp)
4695 process_constraint (new_constraint (lhs, *rhsp));
4696 rhsc.truncate (0);
4697
4698 /* Make the frame point to the function for
4699 the trampoline adjustment call. */
4700 get_constraint_for (tramp, &lhsc);
4701 do_deref (&lhsc);
4702 get_constraint_for (nfunc, &rhsc);
4703 process_all_all_constraints (lhsc, rhsc);
4704
4705 return true;
4706 }
4707 }
4708 /* Else fallthru to generic handling which will let
4709 the frame escape. */
4710 break;
4711 }
4712 case BUILT_IN_ADJUST_TRAMPOLINE:
4713 {
4714 tree tramp = gimple_call_arg (t, 0);
4715 tree res = gimple_call_lhs (t);
4716 if (in_ipa_mode && res)
4717 {
4718 get_constraint_for (res, &lhsc);
4719 get_constraint_for (tramp, &rhsc);
4720 do_deref (&rhsc);
4721 process_all_all_constraints (lhsc, rhsc);
4722 }
4723 return true;
4724 }
4725 CASE_BUILT_IN_TM_STORE (1):
4726 CASE_BUILT_IN_TM_STORE (2):
4727 CASE_BUILT_IN_TM_STORE (4):
4728 CASE_BUILT_IN_TM_STORE (8):
4729 CASE_BUILT_IN_TM_STORE (FLOAT):
4730 CASE_BUILT_IN_TM_STORE (DOUBLE):
4731 CASE_BUILT_IN_TM_STORE (LDOUBLE):
4732 CASE_BUILT_IN_TM_STORE (M64):
4733 CASE_BUILT_IN_TM_STORE (M128):
4734 CASE_BUILT_IN_TM_STORE (M256):
4735 {
4736 tree addr = gimple_call_arg (t, 0);
4737 tree src = gimple_call_arg (t, 1);
4738
4739 get_constraint_for (addr, &lhsc);
4740 do_deref (&lhsc);
4741 get_constraint_for (src, &rhsc);
4742 process_all_all_constraints (lhsc, rhsc);
4743 return true;
4744 }
4745 CASE_BUILT_IN_TM_LOAD (1):
4746 CASE_BUILT_IN_TM_LOAD (2):
4747 CASE_BUILT_IN_TM_LOAD (4):
4748 CASE_BUILT_IN_TM_LOAD (8):
4749 CASE_BUILT_IN_TM_LOAD (FLOAT):
4750 CASE_BUILT_IN_TM_LOAD (DOUBLE):
4751 CASE_BUILT_IN_TM_LOAD (LDOUBLE):
4752 CASE_BUILT_IN_TM_LOAD (M64):
4753 CASE_BUILT_IN_TM_LOAD (M128):
4754 CASE_BUILT_IN_TM_LOAD (M256):
4755 {
4756 tree dest = gimple_call_lhs (t);
4757 tree addr = gimple_call_arg (t, 0);
4758
4759 get_constraint_for (dest, &lhsc);
4760 get_constraint_for (addr, &rhsc);
4761 do_deref (&rhsc);
4762 process_all_all_constraints (lhsc, rhsc);
4763 return true;
4764 }
4765 /* Variadic argument handling needs to be handled in IPA
4766 mode as well. */
4767 case BUILT_IN_VA_START:
4768 {
4769 tree valist = gimple_call_arg (t, 0);
4770 struct constraint_expr rhs, *lhsp;
4771 unsigned i;
4772 get_constraint_for_ptr_offset (valist, NULL_TREE, &lhsc);
4773 do_deref (&lhsc);
4774 /* The va_list gets access to pointers in variadic
4775 arguments. Which we know in the case of IPA analysis
4776 and otherwise are just all nonlocal variables. */
4777 if (in_ipa_mode)
4778 {
4779 fi = lookup_vi_for_tree (fn->decl);
4780 rhs = get_function_part_constraint (fi, ~0);
4781 rhs.type = ADDRESSOF;
4782 }
4783 else
4784 {
4785 rhs.var = nonlocal_id;
4786 rhs.type = ADDRESSOF;
4787 rhs.offset = 0;
4788 }
4789 FOR_EACH_VEC_ELT (lhsc, i, lhsp)
4790 process_constraint (new_constraint (*lhsp, rhs));
4791 /* va_list is clobbered. */
4792 make_constraint_to (get_call_clobber_vi (t)->id, valist);
4793 return true;
4794 }
4795 /* va_end doesn't have any effect that matters. */
4796 case BUILT_IN_VA_END:
4797 return true;
4798 /* Alternate return. Simply give up for now. */
4799 case BUILT_IN_RETURN:
4800 {
4801 fi = NULL;
4802 if (!in_ipa_mode
4803 || !(fi = get_vi_for_tree (fn->decl)))
4804 make_constraint_from (get_varinfo (escaped_id), anything_id);
4805 else if (in_ipa_mode
4806 && fi != NULL)
4807 {
4808 struct constraint_expr lhs, rhs;
4809 lhs = get_function_part_constraint (fi, fi_result);
4810 rhs.var = anything_id;
4811 rhs.offset = 0;
4812 rhs.type = SCALAR;
4813 process_constraint (new_constraint (lhs, rhs));
4814 }
4815 return true;
4816 }
4817 case BUILT_IN_GOMP_PARALLEL:
4818 case BUILT_IN_GOACC_PARALLEL:
4819 {
4820 if (in_ipa_mode)
4821 {
4822 unsigned int fnpos, argpos;
4823 switch (DECL_FUNCTION_CODE (fndecl))
4824 {
4825 case BUILT_IN_GOMP_PARALLEL:
4826 /* __builtin_GOMP_parallel (fn, data, num_threads, flags). */
4827 fnpos = 0;
4828 argpos = 1;
4829 break;
4830 case BUILT_IN_GOACC_PARALLEL:
4831 /* __builtin_GOACC_parallel (flags_m, fn, mapnum, hostaddrs,
4832 sizes, kinds, ...). */
4833 fnpos = 1;
4834 argpos = 3;
4835 break;
4836 default:
4837 gcc_unreachable ();
4838 }
4839
4840 tree fnarg = gimple_call_arg (t, fnpos);
4841 gcc_assert (TREE_CODE (fnarg) == ADDR_EXPR);
4842 tree fndecl = TREE_OPERAND (fnarg, 0);
4843 if (fndecl_maybe_in_other_partition (fndecl))
4844 /* Fallthru to general call handling. */
4845 break;
4846
4847 tree arg = gimple_call_arg (t, argpos);
4848
4849 varinfo_t fi = get_vi_for_tree (fndecl);
4850 find_func_aliases_for_call_arg (fi, 0, arg);
4851 return true;
4852 }
4853 /* Else fallthru to generic call handling. */
4854 break;
4855 }
4856 /* printf-style functions may have hooks to set pointers to
4857 point to somewhere into the generated string. Leave them
4858 for a later exercise... */
4859 default:
4860 /* Fallthru to general call handling. */;
4861 }
4862
4863 return false;
4864 }
4865
4866 /* Create constraints for the call T. */
4867
4868 static void
find_func_aliases_for_call(struct function * fn,gcall * t)4869 find_func_aliases_for_call (struct function *fn, gcall *t)
4870 {
4871 tree fndecl = gimple_call_fndecl (t);
4872 varinfo_t fi;
4873
4874 if (fndecl != NULL_TREE
4875 && fndecl_built_in_p (fndecl)
4876 && find_func_aliases_for_builtin_call (fn, t))
4877 return;
4878
4879 fi = get_fi_for_callee (t);
4880 if (!in_ipa_mode
4881 || (fi->decl && fndecl && !fi->is_fn_info))
4882 {
4883 auto_vec<ce_s, 16> rhsc;
4884 int flags = gimple_call_flags (t);
4885
4886 /* Const functions can return their arguments and addresses
4887 of global memory but not of escaped memory. */
4888 if (flags & (ECF_CONST|ECF_NOVOPS))
4889 {
4890 if (gimple_call_lhs (t))
4891 handle_const_call (t, &rhsc);
4892 }
4893 /* Pure functions can return addresses in and of memory
4894 reachable from their arguments, but they are not an escape
4895 point for reachable memory of their arguments. */
4896 else if (flags & (ECF_PURE|ECF_LOOPING_CONST_OR_PURE))
4897 handle_pure_call (t, &rhsc);
4898 /* If the call is to a replaceable operator delete and results
4899 from a delete expression as opposed to a direct call to
4900 such operator, then the effects for PTA (in particular
4901 the escaping of the pointer) can be ignored. */
4902 else if (fndecl
4903 && DECL_IS_OPERATOR_DELETE_P (fndecl)
4904 && gimple_call_from_new_or_delete (t))
4905 ;
4906 else
4907 handle_rhs_call (t, &rhsc);
4908 if (gimple_call_lhs (t))
4909 handle_lhs_call (t, gimple_call_lhs (t),
4910 gimple_call_return_flags (t), rhsc, fndecl);
4911 }
4912 else
4913 {
4914 auto_vec<ce_s, 2> rhsc;
4915 tree lhsop;
4916 unsigned j;
4917
4918 /* Assign all the passed arguments to the appropriate incoming
4919 parameters of the function. */
4920 for (j = 0; j < gimple_call_num_args (t); j++)
4921 {
4922 tree arg = gimple_call_arg (t, j);
4923 find_func_aliases_for_call_arg (fi, j, arg);
4924 }
4925
4926 /* If we are returning a value, assign it to the result. */
4927 lhsop = gimple_call_lhs (t);
4928 if (lhsop)
4929 {
4930 auto_vec<ce_s, 2> lhsc;
4931 struct constraint_expr rhs;
4932 struct constraint_expr *lhsp;
4933 bool aggr_p = aggregate_value_p (lhsop, gimple_call_fntype (t));
4934
4935 get_constraint_for (lhsop, &lhsc);
4936 rhs = get_function_part_constraint (fi, fi_result);
4937 if (aggr_p)
4938 {
4939 auto_vec<ce_s, 2> tem;
4940 tem.quick_push (rhs);
4941 do_deref (&tem);
4942 gcc_checking_assert (tem.length () == 1);
4943 rhs = tem[0];
4944 }
4945 FOR_EACH_VEC_ELT (lhsc, j, lhsp)
4946 process_constraint (new_constraint (*lhsp, rhs));
4947
4948 /* If we pass the result decl by reference, honor that. */
4949 if (aggr_p)
4950 {
4951 struct constraint_expr lhs;
4952 struct constraint_expr *rhsp;
4953
4954 get_constraint_for_address_of (lhsop, &rhsc);
4955 lhs = get_function_part_constraint (fi, fi_result);
4956 FOR_EACH_VEC_ELT (rhsc, j, rhsp)
4957 process_constraint (new_constraint (lhs, *rhsp));
4958 rhsc.truncate (0);
4959 }
4960 }
4961
4962 /* If we use a static chain, pass it along. */
4963 if (gimple_call_chain (t))
4964 {
4965 struct constraint_expr lhs;
4966 struct constraint_expr *rhsp;
4967
4968 get_constraint_for (gimple_call_chain (t), &rhsc);
4969 lhs = get_function_part_constraint (fi, fi_static_chain);
4970 FOR_EACH_VEC_ELT (rhsc, j, rhsp)
4971 process_constraint (new_constraint (lhs, *rhsp));
4972 }
4973 }
4974 }
4975
4976 /* Walk statement T setting up aliasing constraints according to the
4977 references found in T. This function is the main part of the
4978 constraint builder. AI points to auxiliary alias information used
4979 when building alias sets and computing alias grouping heuristics. */
4980
4981 static void
find_func_aliases(struct function * fn,gimple * origt)4982 find_func_aliases (struct function *fn, gimple *origt)
4983 {
4984 gimple *t = origt;
4985 auto_vec<ce_s, 16> lhsc;
4986 auto_vec<ce_s, 16> rhsc;
4987 varinfo_t fi;
4988
4989 /* Now build constraints expressions. */
4990 if (gimple_code (t) == GIMPLE_PHI)
4991 {
4992 /* For a phi node, assign all the arguments to
4993 the result. */
4994 get_constraint_for (gimple_phi_result (t), &lhsc);
4995 for (unsigned i = 0; i < gimple_phi_num_args (t); i++)
4996 {
4997 get_constraint_for_rhs (gimple_phi_arg_def (t, i), &rhsc);
4998 process_all_all_constraints (lhsc, rhsc);
4999 rhsc.truncate (0);
5000 }
5001 }
5002 /* In IPA mode, we need to generate constraints to pass call
5003 arguments through their calls. There are two cases,
5004 either a GIMPLE_CALL returning a value, or just a plain
5005 GIMPLE_CALL when we are not.
5006
5007 In non-ipa mode, we need to generate constraints for each
5008 pointer passed by address. */
5009 else if (is_gimple_call (t))
5010 find_func_aliases_for_call (fn, as_a <gcall *> (t));
5011
5012 /* Otherwise, just a regular assignment statement. Only care about
5013 operations with pointer result, others are dealt with as escape
5014 points if they have pointer operands. */
5015 else if (is_gimple_assign (t))
5016 {
5017 /* Otherwise, just a regular assignment statement. */
5018 tree lhsop = gimple_assign_lhs (t);
5019 tree rhsop = (gimple_num_ops (t) == 2) ? gimple_assign_rhs1 (t) : NULL;
5020
5021 if (rhsop && TREE_CLOBBER_P (rhsop))
5022 /* Ignore clobbers, they don't actually store anything into
5023 the LHS. */
5024 ;
5025 else if (rhsop && AGGREGATE_TYPE_P (TREE_TYPE (lhsop)))
5026 do_structure_copy (lhsop, rhsop);
5027 else
5028 {
5029 enum tree_code code = gimple_assign_rhs_code (t);
5030
5031 get_constraint_for (lhsop, &lhsc);
5032
5033 if (code == POINTER_PLUS_EXPR)
5034 get_constraint_for_ptr_offset (gimple_assign_rhs1 (t),
5035 gimple_assign_rhs2 (t), &rhsc);
5036 else if (code == POINTER_DIFF_EXPR)
5037 /* The result is not a pointer (part). */
5038 ;
5039 else if (code == BIT_AND_EXPR
5040 && TREE_CODE (gimple_assign_rhs2 (t)) == INTEGER_CST)
5041 {
5042 /* Aligning a pointer via a BIT_AND_EXPR is offsetting
5043 the pointer. Handle it by offsetting it by UNKNOWN. */
5044 get_constraint_for_ptr_offset (gimple_assign_rhs1 (t),
5045 NULL_TREE, &rhsc);
5046 }
5047 else if (code == TRUNC_DIV_EXPR
5048 || code == CEIL_DIV_EXPR
5049 || code == FLOOR_DIV_EXPR
5050 || code == ROUND_DIV_EXPR
5051 || code == EXACT_DIV_EXPR
5052 || code == TRUNC_MOD_EXPR
5053 || code == CEIL_MOD_EXPR
5054 || code == FLOOR_MOD_EXPR
5055 || code == ROUND_MOD_EXPR)
5056 /* Division and modulo transfer the pointer from the LHS. */
5057 get_constraint_for_ptr_offset (gimple_assign_rhs1 (t),
5058 NULL_TREE, &rhsc);
5059 else if (CONVERT_EXPR_CODE_P (code)
5060 || gimple_assign_single_p (t))
5061 /* See through conversions, single RHS are handled by
5062 get_constraint_for_rhs. */
5063 get_constraint_for_rhs (rhsop, &rhsc);
5064 else if (code == COND_EXPR)
5065 {
5066 /* The result is a merge of both COND_EXPR arms. */
5067 auto_vec<ce_s, 2> tmp;
5068 struct constraint_expr *rhsp;
5069 unsigned i;
5070 get_constraint_for_rhs (gimple_assign_rhs2 (t), &rhsc);
5071 get_constraint_for_rhs (gimple_assign_rhs3 (t), &tmp);
5072 FOR_EACH_VEC_ELT (tmp, i, rhsp)
5073 rhsc.safe_push (*rhsp);
5074 }
5075 else if (truth_value_p (code))
5076 /* Truth value results are not pointer (parts). Or at least
5077 very unreasonable obfuscation of a part. */
5078 ;
5079 else
5080 {
5081 /* All other operations are possibly offsetting merges. */
5082 auto_vec<ce_s, 4> tmp;
5083 struct constraint_expr *rhsp;
5084 unsigned i, j;
5085 get_constraint_for_ptr_offset (gimple_assign_rhs1 (t),
5086 NULL_TREE, &rhsc);
5087 for (i = 2; i < gimple_num_ops (t); ++i)
5088 {
5089 get_constraint_for_ptr_offset (gimple_op (t, i),
5090 NULL_TREE, &tmp);
5091 FOR_EACH_VEC_ELT (tmp, j, rhsp)
5092 rhsc.safe_push (*rhsp);
5093 tmp.truncate (0);
5094 }
5095 }
5096 process_all_all_constraints (lhsc, rhsc);
5097 }
5098 /* If there is a store to a global variable the rhs escapes. */
5099 if ((lhsop = get_base_address (lhsop)) != NULL_TREE
5100 && DECL_P (lhsop))
5101 {
5102 varinfo_t vi = get_vi_for_tree (lhsop);
5103 if ((! in_ipa_mode && vi->is_global_var)
5104 || vi->is_ipa_escape_point)
5105 make_escape_constraint (rhsop);
5106 }
5107 }
5108 /* Handle escapes through return. */
5109 else if (gimple_code (t) == GIMPLE_RETURN
5110 && gimple_return_retval (as_a <greturn *> (t)) != NULL_TREE)
5111 {
5112 greturn *return_stmt = as_a <greturn *> (t);
5113 fi = NULL;
5114 if (!in_ipa_mode
5115 && SSA_VAR_P (gimple_return_retval (return_stmt)))
5116 {
5117 /* We handle simple returns by post-processing the solutions. */
5118 ;
5119 }
5120 if (!(fi = get_vi_for_tree (fn->decl)))
5121 make_escape_constraint (gimple_return_retval (return_stmt));
5122 else if (in_ipa_mode)
5123 {
5124 struct constraint_expr lhs ;
5125 struct constraint_expr *rhsp;
5126 unsigned i;
5127
5128 lhs = get_function_part_constraint (fi, fi_result);
5129 get_constraint_for_rhs (gimple_return_retval (return_stmt), &rhsc);
5130 FOR_EACH_VEC_ELT (rhsc, i, rhsp)
5131 process_constraint (new_constraint (lhs, *rhsp));
5132 }
5133 }
5134 /* Handle asms conservatively by adding escape constraints to everything. */
5135 else if (gasm *asm_stmt = dyn_cast <gasm *> (t))
5136 {
5137 unsigned i, noutputs;
5138 const char **oconstraints;
5139 const char *constraint;
5140 bool allows_mem, allows_reg, is_inout;
5141
5142 noutputs = gimple_asm_noutputs (asm_stmt);
5143 oconstraints = XALLOCAVEC (const char *, noutputs);
5144
5145 for (i = 0; i < noutputs; ++i)
5146 {
5147 tree link = gimple_asm_output_op (asm_stmt, i);
5148 tree op = TREE_VALUE (link);
5149
5150 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
5151 oconstraints[i] = constraint;
5152 parse_output_constraint (&constraint, i, 0, 0, &allows_mem,
5153 &allows_reg, &is_inout);
5154
5155 /* A memory constraint makes the address of the operand escape. */
5156 if (!allows_reg && allows_mem)
5157 make_escape_constraint (build_fold_addr_expr (op));
5158
5159 /* The asm may read global memory, so outputs may point to
5160 any global memory. */
5161 if (op)
5162 {
5163 auto_vec<ce_s, 2> lhsc;
5164 struct constraint_expr rhsc, *lhsp;
5165 unsigned j;
5166 get_constraint_for (op, &lhsc);
5167 rhsc.var = nonlocal_id;
5168 rhsc.offset = 0;
5169 rhsc.type = SCALAR;
5170 FOR_EACH_VEC_ELT (lhsc, j, lhsp)
5171 process_constraint (new_constraint (*lhsp, rhsc));
5172 }
5173 }
5174 for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
5175 {
5176 tree link = gimple_asm_input_op (asm_stmt, i);
5177 tree op = TREE_VALUE (link);
5178
5179 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
5180
5181 parse_input_constraint (&constraint, 0, 0, noutputs, 0, oconstraints,
5182 &allows_mem, &allows_reg);
5183
5184 /* A memory constraint makes the address of the operand escape. */
5185 if (!allows_reg && allows_mem)
5186 make_escape_constraint (build_fold_addr_expr (op));
5187 /* Strictly we'd only need the constraint to ESCAPED if
5188 the asm clobbers memory, otherwise using something
5189 along the lines of per-call clobbers/uses would be enough. */
5190 else if (op)
5191 make_escape_constraint (op);
5192 }
5193 }
5194 }
5195
5196
5197 /* Create a constraint adding to the clobber set of FI the memory
5198 pointed to by PTR. */
5199
5200 static void
process_ipa_clobber(varinfo_t fi,tree ptr)5201 process_ipa_clobber (varinfo_t fi, tree ptr)
5202 {
5203 vec<ce_s> ptrc = vNULL;
5204 struct constraint_expr *c, lhs;
5205 unsigned i;
5206 get_constraint_for_rhs (ptr, &ptrc);
5207 lhs = get_function_part_constraint (fi, fi_clobbers);
5208 FOR_EACH_VEC_ELT (ptrc, i, c)
5209 process_constraint (new_constraint (lhs, *c));
5210 ptrc.release ();
5211 }
5212
5213 /* Walk statement T setting up clobber and use constraints according to the
5214 references found in T. This function is a main part of the
5215 IPA constraint builder. */
5216
5217 static void
find_func_clobbers(struct function * fn,gimple * origt)5218 find_func_clobbers (struct function *fn, gimple *origt)
5219 {
5220 gimple *t = origt;
5221 auto_vec<ce_s, 16> lhsc;
5222 auto_vec<ce_s, 16> rhsc;
5223 varinfo_t fi;
5224
5225 /* Add constraints for clobbered/used in IPA mode.
5226 We are not interested in what automatic variables are clobbered
5227 or used as we only use the information in the caller to which
5228 they do not escape. */
5229 gcc_assert (in_ipa_mode);
5230
5231 /* If the stmt refers to memory in any way it better had a VUSE. */
5232 if (gimple_vuse (t) == NULL_TREE)
5233 return;
5234
5235 /* We'd better have function information for the current function. */
5236 fi = lookup_vi_for_tree (fn->decl);
5237 gcc_assert (fi != NULL);
5238
5239 /* Account for stores in assignments and calls. */
5240 if (gimple_vdef (t) != NULL_TREE
5241 && gimple_has_lhs (t))
5242 {
5243 tree lhs = gimple_get_lhs (t);
5244 tree tem = lhs;
5245 while (handled_component_p (tem))
5246 tem = TREE_OPERAND (tem, 0);
5247 if ((DECL_P (tem)
5248 && !auto_var_in_fn_p (tem, fn->decl))
5249 || INDIRECT_REF_P (tem)
5250 || (TREE_CODE (tem) == MEM_REF
5251 && !(TREE_CODE (TREE_OPERAND (tem, 0)) == ADDR_EXPR
5252 && auto_var_in_fn_p
5253 (TREE_OPERAND (TREE_OPERAND (tem, 0), 0), fn->decl))))
5254 {
5255 struct constraint_expr lhsc, *rhsp;
5256 unsigned i;
5257 lhsc = get_function_part_constraint (fi, fi_clobbers);
5258 get_constraint_for_address_of (lhs, &rhsc);
5259 FOR_EACH_VEC_ELT (rhsc, i, rhsp)
5260 process_constraint (new_constraint (lhsc, *rhsp));
5261 rhsc.truncate (0);
5262 }
5263 }
5264
5265 /* Account for uses in assigments and returns. */
5266 if (gimple_assign_single_p (t)
5267 || (gimple_code (t) == GIMPLE_RETURN
5268 && gimple_return_retval (as_a <greturn *> (t)) != NULL_TREE))
5269 {
5270 tree rhs = (gimple_assign_single_p (t)
5271 ? gimple_assign_rhs1 (t)
5272 : gimple_return_retval (as_a <greturn *> (t)));
5273 tree tem = rhs;
5274 while (handled_component_p (tem))
5275 tem = TREE_OPERAND (tem, 0);
5276 if ((DECL_P (tem)
5277 && !auto_var_in_fn_p (tem, fn->decl))
5278 || INDIRECT_REF_P (tem)
5279 || (TREE_CODE (tem) == MEM_REF
5280 && !(TREE_CODE (TREE_OPERAND (tem, 0)) == ADDR_EXPR
5281 && auto_var_in_fn_p
5282 (TREE_OPERAND (TREE_OPERAND (tem, 0), 0), fn->decl))))
5283 {
5284 struct constraint_expr lhs, *rhsp;
5285 unsigned i;
5286 lhs = get_function_part_constraint (fi, fi_uses);
5287 get_constraint_for_address_of (rhs, &rhsc);
5288 FOR_EACH_VEC_ELT (rhsc, i, rhsp)
5289 process_constraint (new_constraint (lhs, *rhsp));
5290 rhsc.truncate (0);
5291 }
5292 }
5293
5294 if (gcall *call_stmt = dyn_cast <gcall *> (t))
5295 {
5296 varinfo_t cfi = NULL;
5297 tree decl = gimple_call_fndecl (t);
5298 struct constraint_expr lhs, rhs;
5299 unsigned i, j;
5300
5301 /* For builtins we do not have separate function info. For those
5302 we do not generate escapes for we have to generate clobbers/uses. */
5303 if (gimple_call_builtin_p (t, BUILT_IN_NORMAL))
5304 switch (DECL_FUNCTION_CODE (decl))
5305 {
5306 /* The following functions use and clobber memory pointed to
5307 by their arguments. */
5308 case BUILT_IN_STRCPY:
5309 case BUILT_IN_STRNCPY:
5310 case BUILT_IN_BCOPY:
5311 case BUILT_IN_MEMCPY:
5312 case BUILT_IN_MEMMOVE:
5313 case BUILT_IN_MEMPCPY:
5314 case BUILT_IN_STPCPY:
5315 case BUILT_IN_STPNCPY:
5316 case BUILT_IN_STRCAT:
5317 case BUILT_IN_STRNCAT:
5318 case BUILT_IN_STRCPY_CHK:
5319 case BUILT_IN_STRNCPY_CHK:
5320 case BUILT_IN_MEMCPY_CHK:
5321 case BUILT_IN_MEMMOVE_CHK:
5322 case BUILT_IN_MEMPCPY_CHK:
5323 case BUILT_IN_STPCPY_CHK:
5324 case BUILT_IN_STPNCPY_CHK:
5325 case BUILT_IN_STRCAT_CHK:
5326 case BUILT_IN_STRNCAT_CHK:
5327 {
5328 tree dest = gimple_call_arg (t, (DECL_FUNCTION_CODE (decl)
5329 == BUILT_IN_BCOPY ? 1 : 0));
5330 tree src = gimple_call_arg (t, (DECL_FUNCTION_CODE (decl)
5331 == BUILT_IN_BCOPY ? 0 : 1));
5332 unsigned i;
5333 struct constraint_expr *rhsp, *lhsp;
5334 get_constraint_for_ptr_offset (dest, NULL_TREE, &lhsc);
5335 lhs = get_function_part_constraint (fi, fi_clobbers);
5336 FOR_EACH_VEC_ELT (lhsc, i, lhsp)
5337 process_constraint (new_constraint (lhs, *lhsp));
5338 get_constraint_for_ptr_offset (src, NULL_TREE, &rhsc);
5339 lhs = get_function_part_constraint (fi, fi_uses);
5340 FOR_EACH_VEC_ELT (rhsc, i, rhsp)
5341 process_constraint (new_constraint (lhs, *rhsp));
5342 return;
5343 }
5344 /* The following function clobbers memory pointed to by
5345 its argument. */
5346 case BUILT_IN_MEMSET:
5347 case BUILT_IN_MEMSET_CHK:
5348 case BUILT_IN_POSIX_MEMALIGN:
5349 {
5350 tree dest = gimple_call_arg (t, 0);
5351 unsigned i;
5352 ce_s *lhsp;
5353 get_constraint_for_ptr_offset (dest, NULL_TREE, &lhsc);
5354 lhs = get_function_part_constraint (fi, fi_clobbers);
5355 FOR_EACH_VEC_ELT (lhsc, i, lhsp)
5356 process_constraint (new_constraint (lhs, *lhsp));
5357 return;
5358 }
5359 /* The following functions clobber their second and third
5360 arguments. */
5361 case BUILT_IN_SINCOS:
5362 case BUILT_IN_SINCOSF:
5363 case BUILT_IN_SINCOSL:
5364 {
5365 process_ipa_clobber (fi, gimple_call_arg (t, 1));
5366 process_ipa_clobber (fi, gimple_call_arg (t, 2));
5367 return;
5368 }
5369 /* The following functions clobber their second argument. */
5370 case BUILT_IN_FREXP:
5371 case BUILT_IN_FREXPF:
5372 case BUILT_IN_FREXPL:
5373 case BUILT_IN_LGAMMA_R:
5374 case BUILT_IN_LGAMMAF_R:
5375 case BUILT_IN_LGAMMAL_R:
5376 case BUILT_IN_GAMMA_R:
5377 case BUILT_IN_GAMMAF_R:
5378 case BUILT_IN_GAMMAL_R:
5379 case BUILT_IN_MODF:
5380 case BUILT_IN_MODFF:
5381 case BUILT_IN_MODFL:
5382 {
5383 process_ipa_clobber (fi, gimple_call_arg (t, 1));
5384 return;
5385 }
5386 /* The following functions clobber their third argument. */
5387 case BUILT_IN_REMQUO:
5388 case BUILT_IN_REMQUOF:
5389 case BUILT_IN_REMQUOL:
5390 {
5391 process_ipa_clobber (fi, gimple_call_arg (t, 2));
5392 return;
5393 }
5394 /* The following functions neither read nor clobber memory. */
5395 case BUILT_IN_ASSUME_ALIGNED:
5396 case BUILT_IN_FREE:
5397 return;
5398 /* Trampolines are of no interest to us. */
5399 case BUILT_IN_INIT_TRAMPOLINE:
5400 case BUILT_IN_ADJUST_TRAMPOLINE:
5401 return;
5402 case BUILT_IN_VA_START:
5403 case BUILT_IN_VA_END:
5404 return;
5405 case BUILT_IN_GOMP_PARALLEL:
5406 case BUILT_IN_GOACC_PARALLEL:
5407 {
5408 unsigned int fnpos, argpos;
5409 unsigned int implicit_use_args[2];
5410 unsigned int num_implicit_use_args = 0;
5411 switch (DECL_FUNCTION_CODE (decl))
5412 {
5413 case BUILT_IN_GOMP_PARALLEL:
5414 /* __builtin_GOMP_parallel (fn, data, num_threads, flags). */
5415 fnpos = 0;
5416 argpos = 1;
5417 break;
5418 case BUILT_IN_GOACC_PARALLEL:
5419 /* __builtin_GOACC_parallel (flags_m, fn, mapnum, hostaddrs,
5420 sizes, kinds, ...). */
5421 fnpos = 1;
5422 argpos = 3;
5423 implicit_use_args[num_implicit_use_args++] = 4;
5424 implicit_use_args[num_implicit_use_args++] = 5;
5425 break;
5426 default:
5427 gcc_unreachable ();
5428 }
5429
5430 tree fnarg = gimple_call_arg (t, fnpos);
5431 gcc_assert (TREE_CODE (fnarg) == ADDR_EXPR);
5432 tree fndecl = TREE_OPERAND (fnarg, 0);
5433 if (fndecl_maybe_in_other_partition (fndecl))
5434 /* Fallthru to general call handling. */
5435 break;
5436
5437 varinfo_t cfi = get_vi_for_tree (fndecl);
5438
5439 tree arg = gimple_call_arg (t, argpos);
5440
5441 /* Parameter passed by value is used. */
5442 lhs = get_function_part_constraint (fi, fi_uses);
5443 struct constraint_expr *rhsp;
5444 get_constraint_for (arg, &rhsc);
5445 FOR_EACH_VEC_ELT (rhsc, j, rhsp)
5446 process_constraint (new_constraint (lhs, *rhsp));
5447 rhsc.truncate (0);
5448
5449 /* Handle parameters used by the call, but not used in cfi, as
5450 implicitly used by cfi. */
5451 lhs = get_function_part_constraint (cfi, fi_uses);
5452 for (unsigned i = 0; i < num_implicit_use_args; ++i)
5453 {
5454 tree arg = gimple_call_arg (t, implicit_use_args[i]);
5455 get_constraint_for (arg, &rhsc);
5456 FOR_EACH_VEC_ELT (rhsc, j, rhsp)
5457 process_constraint (new_constraint (lhs, *rhsp));
5458 rhsc.truncate (0);
5459 }
5460
5461 /* The caller clobbers what the callee does. */
5462 lhs = get_function_part_constraint (fi, fi_clobbers);
5463 rhs = get_function_part_constraint (cfi, fi_clobbers);
5464 process_constraint (new_constraint (lhs, rhs));
5465
5466 /* The caller uses what the callee does. */
5467 lhs = get_function_part_constraint (fi, fi_uses);
5468 rhs = get_function_part_constraint (cfi, fi_uses);
5469 process_constraint (new_constraint (lhs, rhs));
5470
5471 return;
5472 }
5473 /* printf-style functions may have hooks to set pointers to
5474 point to somewhere into the generated string. Leave them
5475 for a later exercise... */
5476 default:
5477 /* Fallthru to general call handling. */;
5478 }
5479
5480 /* Parameters passed by value are used. */
5481 lhs = get_function_part_constraint (fi, fi_uses);
5482 for (i = 0; i < gimple_call_num_args (t); i++)
5483 {
5484 struct constraint_expr *rhsp;
5485 tree arg = gimple_call_arg (t, i);
5486
5487 if (TREE_CODE (arg) == SSA_NAME
5488 || is_gimple_min_invariant (arg))
5489 continue;
5490
5491 get_constraint_for_address_of (arg, &rhsc);
5492 FOR_EACH_VEC_ELT (rhsc, j, rhsp)
5493 process_constraint (new_constraint (lhs, *rhsp));
5494 rhsc.truncate (0);
5495 }
5496
5497 /* Build constraints for propagating clobbers/uses along the
5498 callgraph edges. */
5499 cfi = get_fi_for_callee (call_stmt);
5500 if (cfi->id == anything_id)
5501 {
5502 if (gimple_vdef (t))
5503 make_constraint_from (first_vi_for_offset (fi, fi_clobbers),
5504 anything_id);
5505 make_constraint_from (first_vi_for_offset (fi, fi_uses),
5506 anything_id);
5507 return;
5508 }
5509
5510 /* For callees without function info (that's external functions),
5511 ESCAPED is clobbered and used. */
5512 if (cfi->decl
5513 && TREE_CODE (cfi->decl) == FUNCTION_DECL
5514 && !cfi->is_fn_info)
5515 {
5516 varinfo_t vi;
5517
5518 if (gimple_vdef (t))
5519 make_copy_constraint (first_vi_for_offset (fi, fi_clobbers),
5520 escaped_id);
5521 make_copy_constraint (first_vi_for_offset (fi, fi_uses), escaped_id);
5522
5523 /* Also honor the call statement use/clobber info. */
5524 if ((vi = lookup_call_clobber_vi (call_stmt)) != NULL)
5525 make_copy_constraint (first_vi_for_offset (fi, fi_clobbers),
5526 vi->id);
5527 if ((vi = lookup_call_use_vi (call_stmt)) != NULL)
5528 make_copy_constraint (first_vi_for_offset (fi, fi_uses),
5529 vi->id);
5530 return;
5531 }
5532
5533 /* Otherwise the caller clobbers and uses what the callee does.
5534 ??? This should use a new complex constraint that filters
5535 local variables of the callee. */
5536 if (gimple_vdef (t))
5537 {
5538 lhs = get_function_part_constraint (fi, fi_clobbers);
5539 rhs = get_function_part_constraint (cfi, fi_clobbers);
5540 process_constraint (new_constraint (lhs, rhs));
5541 }
5542 lhs = get_function_part_constraint (fi, fi_uses);
5543 rhs = get_function_part_constraint (cfi, fi_uses);
5544 process_constraint (new_constraint (lhs, rhs));
5545 }
5546 else if (gimple_code (t) == GIMPLE_ASM)
5547 {
5548 /* ??? Ick. We can do better. */
5549 if (gimple_vdef (t))
5550 make_constraint_from (first_vi_for_offset (fi, fi_clobbers),
5551 anything_id);
5552 make_constraint_from (first_vi_for_offset (fi, fi_uses),
5553 anything_id);
5554 }
5555 }
5556
5557
5558 /* Find the first varinfo in the same variable as START that overlaps with
5559 OFFSET. Return NULL if we can't find one. */
5560
5561 static varinfo_t
first_vi_for_offset(varinfo_t start,unsigned HOST_WIDE_INT offset)5562 first_vi_for_offset (varinfo_t start, unsigned HOST_WIDE_INT offset)
5563 {
5564 /* If the offset is outside of the variable, bail out. */
5565 if (offset >= start->fullsize)
5566 return NULL;
5567
5568 /* If we cannot reach offset from start, lookup the first field
5569 and start from there. */
5570 if (start->offset > offset)
5571 start = get_varinfo (start->head);
5572
5573 while (start)
5574 {
5575 /* We may not find a variable in the field list with the actual
5576 offset when we have glommed a structure to a variable.
5577 In that case, however, offset should still be within the size
5578 of the variable. */
5579 if (offset >= start->offset
5580 && (offset - start->offset) < start->size)
5581 return start;
5582
5583 start = vi_next (start);
5584 }
5585
5586 return NULL;
5587 }
5588
5589 /* Find the first varinfo in the same variable as START that overlaps with
5590 OFFSET. If there is no such varinfo the varinfo directly preceding
5591 OFFSET is returned. */
5592
5593 static varinfo_t
first_or_preceding_vi_for_offset(varinfo_t start,unsigned HOST_WIDE_INT offset)5594 first_or_preceding_vi_for_offset (varinfo_t start,
5595 unsigned HOST_WIDE_INT offset)
5596 {
5597 /* If we cannot reach offset from start, lookup the first field
5598 and start from there. */
5599 if (start->offset > offset)
5600 start = get_varinfo (start->head);
5601
5602 /* We may not find a variable in the field list with the actual
5603 offset when we have glommed a structure to a variable.
5604 In that case, however, offset should still be within the size
5605 of the variable.
5606 If we got beyond the offset we look for return the field
5607 directly preceding offset which may be the last field. */
5608 while (start->next
5609 && offset >= start->offset
5610 && !((offset - start->offset) < start->size))
5611 start = vi_next (start);
5612
5613 return start;
5614 }
5615
5616
5617 /* This structure is used during pushing fields onto the fieldstack
5618 to track the offset of the field, since bitpos_of_field gives it
5619 relative to its immediate containing type, and we want it relative
5620 to the ultimate containing object. */
5621
5622 struct fieldoff
5623 {
5624 /* Offset from the base of the base containing object to this field. */
5625 HOST_WIDE_INT offset;
5626
5627 /* Size, in bits, of the field. */
5628 unsigned HOST_WIDE_INT size;
5629
5630 unsigned has_unknown_size : 1;
5631
5632 unsigned must_have_pointers : 1;
5633
5634 unsigned may_have_pointers : 1;
5635
5636 unsigned only_restrict_pointers : 1;
5637
5638 tree restrict_pointed_type;
5639 };
5640 typedef struct fieldoff fieldoff_s;
5641
5642
5643 /* qsort comparison function for two fieldoff's PA and PB */
5644
5645 static int
fieldoff_compare(const void * pa,const void * pb)5646 fieldoff_compare (const void *pa, const void *pb)
5647 {
5648 const fieldoff_s *foa = (const fieldoff_s *)pa;
5649 const fieldoff_s *fob = (const fieldoff_s *)pb;
5650 unsigned HOST_WIDE_INT foasize, fobsize;
5651
5652 if (foa->offset < fob->offset)
5653 return -1;
5654 else if (foa->offset > fob->offset)
5655 return 1;
5656
5657 foasize = foa->size;
5658 fobsize = fob->size;
5659 if (foasize < fobsize)
5660 return -1;
5661 else if (foasize > fobsize)
5662 return 1;
5663 return 0;
5664 }
5665
5666 /* Sort a fieldstack according to the field offset and sizes. */
5667 static void
sort_fieldstack(vec<fieldoff_s> fieldstack)5668 sort_fieldstack (vec<fieldoff_s> fieldstack)
5669 {
5670 fieldstack.qsort (fieldoff_compare);
5671 }
5672
5673 /* Return true if T is a type that can have subvars. */
5674
5675 static inline bool
type_can_have_subvars(const_tree t)5676 type_can_have_subvars (const_tree t)
5677 {
5678 /* Aggregates without overlapping fields can have subvars. */
5679 return TREE_CODE (t) == RECORD_TYPE;
5680 }
5681
5682 /* Return true if V is a tree that we can have subvars for.
5683 Normally, this is any aggregate type. Also complex
5684 types which are not gimple registers can have subvars. */
5685
5686 static inline bool
var_can_have_subvars(const_tree v)5687 var_can_have_subvars (const_tree v)
5688 {
5689 /* Volatile variables should never have subvars. */
5690 if (TREE_THIS_VOLATILE (v))
5691 return false;
5692
5693 /* Non decls or memory tags can never have subvars. */
5694 if (!DECL_P (v))
5695 return false;
5696
5697 return type_can_have_subvars (TREE_TYPE (v));
5698 }
5699
5700 /* Return true if T is a type that does contain pointers. */
5701
5702 static bool
type_must_have_pointers(tree type)5703 type_must_have_pointers (tree type)
5704 {
5705 if (POINTER_TYPE_P (type))
5706 return true;
5707
5708 if (TREE_CODE (type) == ARRAY_TYPE)
5709 return type_must_have_pointers (TREE_TYPE (type));
5710
5711 /* A function or method can have pointers as arguments, so track
5712 those separately. */
5713 if (TREE_CODE (type) == FUNCTION_TYPE
5714 || TREE_CODE (type) == METHOD_TYPE)
5715 return true;
5716
5717 return false;
5718 }
5719
5720 static bool
field_must_have_pointers(tree t)5721 field_must_have_pointers (tree t)
5722 {
5723 return type_must_have_pointers (TREE_TYPE (t));
5724 }
5725
5726 /* Given a TYPE, and a vector of field offsets FIELDSTACK, push all
5727 the fields of TYPE onto fieldstack, recording their offsets along
5728 the way.
5729
5730 OFFSET is used to keep track of the offset in this entire
5731 structure, rather than just the immediately containing structure.
5732 Returns false if the caller is supposed to handle the field we
5733 recursed for. */
5734
5735 static bool
push_fields_onto_fieldstack(tree type,vec<fieldoff_s> * fieldstack,HOST_WIDE_INT offset)5736 push_fields_onto_fieldstack (tree type, vec<fieldoff_s> *fieldstack,
5737 HOST_WIDE_INT offset)
5738 {
5739 tree field;
5740 bool empty_p = true;
5741
5742 if (TREE_CODE (type) != RECORD_TYPE)
5743 return false;
5744
5745 /* If the vector of fields is growing too big, bail out early.
5746 Callers check for vec::length <= param_max_fields_for_field_sensitive, make
5747 sure this fails. */
5748 if (fieldstack->length () > (unsigned)param_max_fields_for_field_sensitive)
5749 return false;
5750
5751 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
5752 if (TREE_CODE (field) == FIELD_DECL)
5753 {
5754 bool push = false;
5755 HOST_WIDE_INT foff = bitpos_of_field (field);
5756 tree field_type = TREE_TYPE (field);
5757
5758 if (!var_can_have_subvars (field)
5759 || TREE_CODE (field_type) == QUAL_UNION_TYPE
5760 || TREE_CODE (field_type) == UNION_TYPE)
5761 push = true;
5762 else if (!push_fields_onto_fieldstack
5763 (field_type, fieldstack, offset + foff)
5764 && (DECL_SIZE (field)
5765 && !integer_zerop (DECL_SIZE (field))))
5766 /* Empty structures may have actual size, like in C++. So
5767 see if we didn't push any subfields and the size is
5768 nonzero, push the field onto the stack. */
5769 push = true;
5770
5771 if (push)
5772 {
5773 fieldoff_s *pair = NULL;
5774 bool has_unknown_size = false;
5775 bool must_have_pointers_p;
5776
5777 if (!fieldstack->is_empty ())
5778 pair = &fieldstack->last ();
5779
5780 /* If there isn't anything at offset zero, create sth. */
5781 if (!pair
5782 && offset + foff != 0)
5783 {
5784 fieldoff_s e
5785 = {0, offset + foff, false, false, true, false, NULL_TREE};
5786 pair = fieldstack->safe_push (e);
5787 }
5788
5789 if (!DECL_SIZE (field)
5790 || !tree_fits_uhwi_p (DECL_SIZE (field)))
5791 has_unknown_size = true;
5792
5793 /* If adjacent fields do not contain pointers merge them. */
5794 must_have_pointers_p = field_must_have_pointers (field);
5795 if (pair
5796 && !has_unknown_size
5797 && !must_have_pointers_p
5798 && !pair->must_have_pointers
5799 && !pair->has_unknown_size
5800 && pair->offset + (HOST_WIDE_INT)pair->size == offset + foff)
5801 {
5802 pair->size += tree_to_uhwi (DECL_SIZE (field));
5803 }
5804 else
5805 {
5806 fieldoff_s e;
5807 e.offset = offset + foff;
5808 e.has_unknown_size = has_unknown_size;
5809 if (!has_unknown_size)
5810 e.size = tree_to_uhwi (DECL_SIZE (field));
5811 else
5812 e.size = -1;
5813 e.must_have_pointers = must_have_pointers_p;
5814 e.may_have_pointers = true;
5815 e.only_restrict_pointers
5816 = (!has_unknown_size
5817 && POINTER_TYPE_P (field_type)
5818 && TYPE_RESTRICT (field_type));
5819 if (e.only_restrict_pointers)
5820 e.restrict_pointed_type = TREE_TYPE (field_type);
5821 fieldstack->safe_push (e);
5822 }
5823 }
5824
5825 empty_p = false;
5826 }
5827
5828 return !empty_p;
5829 }
5830
5831 /* Count the number of arguments DECL has, and set IS_VARARGS to true
5832 if it is a varargs function. */
5833
5834 static unsigned int
count_num_arguments(tree decl,bool * is_varargs)5835 count_num_arguments (tree decl, bool *is_varargs)
5836 {
5837 unsigned int num = 0;
5838 tree t;
5839
5840 /* Capture named arguments for K&R functions. They do not
5841 have a prototype and thus no TYPE_ARG_TYPES. */
5842 for (t = DECL_ARGUMENTS (decl); t; t = DECL_CHAIN (t))
5843 ++num;
5844
5845 /* Check if the function has variadic arguments. */
5846 for (t = TYPE_ARG_TYPES (TREE_TYPE (decl)); t; t = TREE_CHAIN (t))
5847 if (TREE_VALUE (t) == void_type_node)
5848 break;
5849 if (!t)
5850 *is_varargs = true;
5851
5852 return num;
5853 }
5854
5855 /* Creation function node for DECL, using NAME, and return the index
5856 of the variable we've created for the function. If NONLOCAL_p, create
5857 initial constraints. */
5858
5859 static varinfo_t
create_function_info_for(tree decl,const char * name,bool add_id,bool nonlocal_p)5860 create_function_info_for (tree decl, const char *name, bool add_id,
5861 bool nonlocal_p)
5862 {
5863 struct function *fn = DECL_STRUCT_FUNCTION (decl);
5864 varinfo_t vi, prev_vi;
5865 tree arg;
5866 unsigned int i;
5867 bool is_varargs = false;
5868 unsigned int num_args = count_num_arguments (decl, &is_varargs);
5869
5870 /* Create the variable info. */
5871
5872 vi = new_var_info (decl, name, add_id);
5873 vi->offset = 0;
5874 vi->size = 1;
5875 vi->fullsize = fi_parm_base + num_args;
5876 vi->is_fn_info = 1;
5877 vi->may_have_pointers = false;
5878 if (is_varargs)
5879 vi->fullsize = ~0;
5880 insert_vi_for_tree (vi->decl, vi);
5881
5882 prev_vi = vi;
5883
5884 /* Create a variable for things the function clobbers and one for
5885 things the function uses. */
5886 {
5887 varinfo_t clobbervi, usevi;
5888 const char *newname;
5889 char *tempname;
5890
5891 tempname = xasprintf ("%s.clobber", name);
5892 newname = ggc_strdup (tempname);
5893 free (tempname);
5894
5895 clobbervi = new_var_info (NULL, newname, false);
5896 clobbervi->offset = fi_clobbers;
5897 clobbervi->size = 1;
5898 clobbervi->fullsize = vi->fullsize;
5899 clobbervi->is_full_var = true;
5900 clobbervi->is_global_var = false;
5901 clobbervi->is_reg_var = true;
5902
5903 gcc_assert (prev_vi->offset < clobbervi->offset);
5904 prev_vi->next = clobbervi->id;
5905 prev_vi = clobbervi;
5906
5907 tempname = xasprintf ("%s.use", name);
5908 newname = ggc_strdup (tempname);
5909 free (tempname);
5910
5911 usevi = new_var_info (NULL, newname, false);
5912 usevi->offset = fi_uses;
5913 usevi->size = 1;
5914 usevi->fullsize = vi->fullsize;
5915 usevi->is_full_var = true;
5916 usevi->is_global_var = false;
5917 usevi->is_reg_var = true;
5918
5919 gcc_assert (prev_vi->offset < usevi->offset);
5920 prev_vi->next = usevi->id;
5921 prev_vi = usevi;
5922 }
5923
5924 /* And one for the static chain. */
5925 if (fn->static_chain_decl != NULL_TREE)
5926 {
5927 varinfo_t chainvi;
5928 const char *newname;
5929 char *tempname;
5930
5931 tempname = xasprintf ("%s.chain", name);
5932 newname = ggc_strdup (tempname);
5933 free (tempname);
5934
5935 chainvi = new_var_info (fn->static_chain_decl, newname, false);
5936 chainvi->offset = fi_static_chain;
5937 chainvi->size = 1;
5938 chainvi->fullsize = vi->fullsize;
5939 chainvi->is_full_var = true;
5940 chainvi->is_global_var = false;
5941
5942 insert_vi_for_tree (fn->static_chain_decl, chainvi);
5943
5944 if (nonlocal_p
5945 && chainvi->may_have_pointers)
5946 make_constraint_from (chainvi, nonlocal_id);
5947
5948 gcc_assert (prev_vi->offset < chainvi->offset);
5949 prev_vi->next = chainvi->id;
5950 prev_vi = chainvi;
5951 }
5952
5953 /* Create a variable for the return var. */
5954 if (DECL_RESULT (decl) != NULL
5955 || !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (decl))))
5956 {
5957 varinfo_t resultvi;
5958 const char *newname;
5959 char *tempname;
5960 tree resultdecl = decl;
5961
5962 if (DECL_RESULT (decl))
5963 resultdecl = DECL_RESULT (decl);
5964
5965 tempname = xasprintf ("%s.result", name);
5966 newname = ggc_strdup (tempname);
5967 free (tempname);
5968
5969 resultvi = new_var_info (resultdecl, newname, false);
5970 resultvi->offset = fi_result;
5971 resultvi->size = 1;
5972 resultvi->fullsize = vi->fullsize;
5973 resultvi->is_full_var = true;
5974 if (DECL_RESULT (decl))
5975 resultvi->may_have_pointers = true;
5976
5977 if (DECL_RESULT (decl))
5978 insert_vi_for_tree (DECL_RESULT (decl), resultvi);
5979
5980 if (nonlocal_p
5981 && DECL_RESULT (decl)
5982 && DECL_BY_REFERENCE (DECL_RESULT (decl)))
5983 make_constraint_from (resultvi, nonlocal_id);
5984
5985 gcc_assert (prev_vi->offset < resultvi->offset);
5986 prev_vi->next = resultvi->id;
5987 prev_vi = resultvi;
5988 }
5989
5990 /* We also need to make function return values escape. Nothing
5991 escapes by returning from main though. */
5992 if (nonlocal_p
5993 && !MAIN_NAME_P (DECL_NAME (decl)))
5994 {
5995 varinfo_t fi, rvi;
5996 fi = lookup_vi_for_tree (decl);
5997 rvi = first_vi_for_offset (fi, fi_result);
5998 if (rvi && rvi->offset == fi_result)
5999 make_copy_constraint (get_varinfo (escaped_id), rvi->id);
6000 }
6001
6002 /* Set up variables for each argument. */
6003 arg = DECL_ARGUMENTS (decl);
6004 for (i = 0; i < num_args; i++)
6005 {
6006 varinfo_t argvi;
6007 const char *newname;
6008 char *tempname;
6009 tree argdecl = decl;
6010
6011 if (arg)
6012 argdecl = arg;
6013
6014 tempname = xasprintf ("%s.arg%d", name, i);
6015 newname = ggc_strdup (tempname);
6016 free (tempname);
6017
6018 argvi = new_var_info (argdecl, newname, false);
6019 argvi->offset = fi_parm_base + i;
6020 argvi->size = 1;
6021 argvi->is_full_var = true;
6022 argvi->fullsize = vi->fullsize;
6023 if (arg)
6024 argvi->may_have_pointers = true;
6025
6026 if (arg)
6027 insert_vi_for_tree (arg, argvi);
6028
6029 if (nonlocal_p
6030 && argvi->may_have_pointers)
6031 make_constraint_from (argvi, nonlocal_id);
6032
6033 gcc_assert (prev_vi->offset < argvi->offset);
6034 prev_vi->next = argvi->id;
6035 prev_vi = argvi;
6036 if (arg)
6037 arg = DECL_CHAIN (arg);
6038 }
6039
6040 /* Add one representative for all further args. */
6041 if (is_varargs)
6042 {
6043 varinfo_t argvi;
6044 const char *newname;
6045 char *tempname;
6046 tree decl;
6047
6048 tempname = xasprintf ("%s.varargs", name);
6049 newname = ggc_strdup (tempname);
6050 free (tempname);
6051
6052 /* We need sth that can be pointed to for va_start. */
6053 decl = build_fake_var_decl (ptr_type_node);
6054
6055 argvi = new_var_info (decl, newname, false);
6056 argvi->offset = fi_parm_base + num_args;
6057 argvi->size = ~0;
6058 argvi->is_full_var = true;
6059 argvi->is_heap_var = true;
6060 argvi->fullsize = vi->fullsize;
6061
6062 if (nonlocal_p
6063 && argvi->may_have_pointers)
6064 make_constraint_from (argvi, nonlocal_id);
6065
6066 gcc_assert (prev_vi->offset < argvi->offset);
6067 prev_vi->next = argvi->id;
6068 }
6069
6070 return vi;
6071 }
6072
6073
6074 /* Return true if FIELDSTACK contains fields that overlap.
6075 FIELDSTACK is assumed to be sorted by offset. */
6076
6077 static bool
check_for_overlaps(vec<fieldoff_s> fieldstack)6078 check_for_overlaps (vec<fieldoff_s> fieldstack)
6079 {
6080 fieldoff_s *fo = NULL;
6081 unsigned int i;
6082 HOST_WIDE_INT lastoffset = -1;
6083
6084 FOR_EACH_VEC_ELT (fieldstack, i, fo)
6085 {
6086 if (fo->offset == lastoffset)
6087 return true;
6088 lastoffset = fo->offset;
6089 }
6090 return false;
6091 }
6092
6093 /* Create a varinfo structure for NAME and DECL, and add it to VARMAP.
6094 This will also create any varinfo structures necessary for fields
6095 of DECL. DECL is a function parameter if HANDLE_PARAM is set.
6096 HANDLED_STRUCT_TYPE is used to register struct types reached by following
6097 restrict pointers. This is needed to prevent infinite recursion.
6098 If ADD_RESTRICT, pretend that the pointer NAME is restrict even if DECL
6099 does not advertise it. */
6100
6101 static varinfo_t
6102 create_variable_info_for_1 (tree decl, const char *name, bool add_id,
6103 bool handle_param, bitmap handled_struct_type,
6104 bool add_restrict = false)
6105 {
6106 varinfo_t vi, newvi;
6107 tree decl_type = TREE_TYPE (decl);
6108 tree declsize = DECL_P (decl) ? DECL_SIZE (decl) : TYPE_SIZE (decl_type);
6109 auto_vec<fieldoff_s> fieldstack;
6110 fieldoff_s *fo;
6111 unsigned int i;
6112
6113 if (!declsize
6114 || !tree_fits_uhwi_p (declsize))
6115 {
6116 vi = new_var_info (decl, name, add_id);
6117 vi->offset = 0;
6118 vi->size = ~0;
6119 vi->fullsize = ~0;
6120 vi->is_unknown_size_var = true;
6121 vi->is_full_var = true;
6122 vi->may_have_pointers = true;
6123 return vi;
6124 }
6125
6126 /* Collect field information. */
6127 if (use_field_sensitive
6128 && var_can_have_subvars (decl)
6129 /* ??? Force us to not use subfields for globals in IPA mode.
6130 Else we'd have to parse arbitrary initializers. */
6131 && !(in_ipa_mode
6132 && is_global_var (decl)))
6133 {
6134 fieldoff_s *fo = NULL;
6135 bool notokay = false;
6136 unsigned int i;
6137
6138 push_fields_onto_fieldstack (decl_type, &fieldstack, 0);
6139
6140 for (i = 0; !notokay && fieldstack.iterate (i, &fo); i++)
6141 if (fo->has_unknown_size
6142 || fo->offset < 0)
6143 {
6144 notokay = true;
6145 break;
6146 }
6147
6148 /* We can't sort them if we have a field with a variable sized type,
6149 which will make notokay = true. In that case, we are going to return
6150 without creating varinfos for the fields anyway, so sorting them is a
6151 waste to boot. */
6152 if (!notokay)
6153 {
6154 sort_fieldstack (fieldstack);
6155 /* Due to some C++ FE issues, like PR 22488, we might end up
6156 what appear to be overlapping fields even though they,
6157 in reality, do not overlap. Until the C++ FE is fixed,
6158 we will simply disable field-sensitivity for these cases. */
6159 notokay = check_for_overlaps (fieldstack);
6160 }
6161
6162 if (notokay)
6163 fieldstack.release ();
6164 }
6165
6166 /* If we didn't end up collecting sub-variables create a full
6167 variable for the decl. */
6168 if (fieldstack.length () == 0
6169 || fieldstack.length () > (unsigned)param_max_fields_for_field_sensitive)
6170 {
6171 vi = new_var_info (decl, name, add_id);
6172 vi->offset = 0;
6173 vi->may_have_pointers = true;
6174 vi->fullsize = tree_to_uhwi (declsize);
6175 vi->size = vi->fullsize;
6176 vi->is_full_var = true;
6177 if (POINTER_TYPE_P (decl_type)
6178 && (TYPE_RESTRICT (decl_type) || add_restrict))
6179 vi->only_restrict_pointers = 1;
6180 if (vi->only_restrict_pointers
6181 && !type_contains_placeholder_p (TREE_TYPE (decl_type))
6182 && handle_param
6183 && !bitmap_bit_p (handled_struct_type,
6184 TYPE_UID (TREE_TYPE (decl_type))))
6185 {
6186 varinfo_t rvi;
6187 tree heapvar = build_fake_var_decl (TREE_TYPE (decl_type));
6188 DECL_EXTERNAL (heapvar) = 1;
6189 if (var_can_have_subvars (heapvar))
6190 bitmap_set_bit (handled_struct_type,
6191 TYPE_UID (TREE_TYPE (decl_type)));
6192 rvi = create_variable_info_for_1 (heapvar, "PARM_NOALIAS", true,
6193 true, handled_struct_type);
6194 if (var_can_have_subvars (heapvar))
6195 bitmap_clear_bit (handled_struct_type,
6196 TYPE_UID (TREE_TYPE (decl_type)));
6197 rvi->is_restrict_var = 1;
6198 insert_vi_for_tree (heapvar, rvi);
6199 make_constraint_from (vi, rvi->id);
6200 make_param_constraints (rvi);
6201 }
6202 fieldstack.release ();
6203 return vi;
6204 }
6205
6206 vi = new_var_info (decl, name, add_id);
6207 vi->fullsize = tree_to_uhwi (declsize);
6208 if (fieldstack.length () == 1)
6209 vi->is_full_var = true;
6210 for (i = 0, newvi = vi;
6211 fieldstack.iterate (i, &fo);
6212 ++i, newvi = vi_next (newvi))
6213 {
6214 const char *newname = NULL;
6215 char *tempname;
6216
6217 if (dump_file)
6218 {
6219 if (fieldstack.length () != 1)
6220 {
6221 tempname
6222 = xasprintf ("%s." HOST_WIDE_INT_PRINT_DEC
6223 "+" HOST_WIDE_INT_PRINT_DEC, name,
6224 fo->offset, fo->size);
6225 newname = ggc_strdup (tempname);
6226 free (tempname);
6227 }
6228 }
6229 else
6230 newname = "NULL";
6231
6232 if (newname)
6233 newvi->name = newname;
6234 newvi->offset = fo->offset;
6235 newvi->size = fo->size;
6236 newvi->fullsize = vi->fullsize;
6237 newvi->may_have_pointers = fo->may_have_pointers;
6238 newvi->only_restrict_pointers = fo->only_restrict_pointers;
6239 if (handle_param
6240 && newvi->only_restrict_pointers
6241 && !type_contains_placeholder_p (fo->restrict_pointed_type)
6242 && !bitmap_bit_p (handled_struct_type,
6243 TYPE_UID (fo->restrict_pointed_type)))
6244 {
6245 varinfo_t rvi;
6246 tree heapvar = build_fake_var_decl (fo->restrict_pointed_type);
6247 DECL_EXTERNAL (heapvar) = 1;
6248 if (var_can_have_subvars (heapvar))
6249 bitmap_set_bit (handled_struct_type,
6250 TYPE_UID (fo->restrict_pointed_type));
6251 rvi = create_variable_info_for_1 (heapvar, "PARM_NOALIAS", true,
6252 true, handled_struct_type);
6253 if (var_can_have_subvars (heapvar))
6254 bitmap_clear_bit (handled_struct_type,
6255 TYPE_UID (fo->restrict_pointed_type));
6256 rvi->is_restrict_var = 1;
6257 insert_vi_for_tree (heapvar, rvi);
6258 make_constraint_from (newvi, rvi->id);
6259 make_param_constraints (rvi);
6260 }
6261 if (i + 1 < fieldstack.length ())
6262 {
6263 varinfo_t tem = new_var_info (decl, name, false);
6264 newvi->next = tem->id;
6265 tem->head = vi->id;
6266 }
6267 }
6268
6269 return vi;
6270 }
6271
6272 static unsigned int
create_variable_info_for(tree decl,const char * name,bool add_id)6273 create_variable_info_for (tree decl, const char *name, bool add_id)
6274 {
6275 /* First see if we are dealing with an ifunc resolver call and
6276 assiociate that with a call to the resolver function result. */
6277 cgraph_node *node;
6278 if (in_ipa_mode
6279 && TREE_CODE (decl) == FUNCTION_DECL
6280 && (node = cgraph_node::get (decl))
6281 && node->ifunc_resolver)
6282 {
6283 varinfo_t fi = get_vi_for_tree (node->get_alias_target ()->decl);
6284 constraint_expr rhs
6285 = get_function_part_constraint (fi, fi_result);
6286 fi = new_var_info (NULL_TREE, "ifuncres", true);
6287 fi->is_reg_var = true;
6288 constraint_expr lhs;
6289 lhs.type = SCALAR;
6290 lhs.var = fi->id;
6291 lhs.offset = 0;
6292 process_constraint (new_constraint (lhs, rhs));
6293 insert_vi_for_tree (decl, fi);
6294 return fi->id;
6295 }
6296
6297 varinfo_t vi = create_variable_info_for_1 (decl, name, add_id, false, NULL);
6298 unsigned int id = vi->id;
6299
6300 insert_vi_for_tree (decl, vi);
6301
6302 if (!VAR_P (decl))
6303 return id;
6304
6305 /* Create initial constraints for globals. */
6306 for (; vi; vi = vi_next (vi))
6307 {
6308 if (!vi->may_have_pointers
6309 || !vi->is_global_var)
6310 continue;
6311
6312 /* Mark global restrict qualified pointers. */
6313 if ((POINTER_TYPE_P (TREE_TYPE (decl))
6314 && TYPE_RESTRICT (TREE_TYPE (decl)))
6315 || vi->only_restrict_pointers)
6316 {
6317 varinfo_t rvi
6318 = make_constraint_from_global_restrict (vi, "GLOBAL_RESTRICT",
6319 true);
6320 /* ??? For now exclude reads from globals as restrict sources
6321 if those are not (indirectly) from incoming parameters. */
6322 rvi->is_restrict_var = false;
6323 continue;
6324 }
6325
6326 /* In non-IPA mode the initializer from nonlocal is all we need. */
6327 if (!in_ipa_mode
6328 || DECL_HARD_REGISTER (decl))
6329 make_copy_constraint (vi, nonlocal_id);
6330
6331 /* In IPA mode parse the initializer and generate proper constraints
6332 for it. */
6333 else
6334 {
6335 varpool_node *vnode = varpool_node::get (decl);
6336
6337 /* For escaped variables initialize them from nonlocal. */
6338 if (!vnode->all_refs_explicit_p ())
6339 make_copy_constraint (vi, nonlocal_id);
6340
6341 /* If this is a global variable with an initializer and we are in
6342 IPA mode generate constraints for it. */
6343 ipa_ref *ref;
6344 for (unsigned idx = 0; vnode->iterate_reference (idx, ref); ++idx)
6345 {
6346 auto_vec<ce_s> rhsc;
6347 struct constraint_expr lhs, *rhsp;
6348 unsigned i;
6349 get_constraint_for_address_of (ref->referred->decl, &rhsc);
6350 lhs.var = vi->id;
6351 lhs.offset = 0;
6352 lhs.type = SCALAR;
6353 FOR_EACH_VEC_ELT (rhsc, i, rhsp)
6354 process_constraint (new_constraint (lhs, *rhsp));
6355 /* If this is a variable that escapes from the unit
6356 the initializer escapes as well. */
6357 if (!vnode->all_refs_explicit_p ())
6358 {
6359 lhs.var = escaped_id;
6360 lhs.offset = 0;
6361 lhs.type = SCALAR;
6362 FOR_EACH_VEC_ELT (rhsc, i, rhsp)
6363 process_constraint (new_constraint (lhs, *rhsp));
6364 }
6365 }
6366 }
6367 }
6368
6369 return id;
6370 }
6371
6372 /* Print out the points-to solution for VAR to FILE. */
6373
6374 static void
dump_solution_for_var(FILE * file,unsigned int var)6375 dump_solution_for_var (FILE *file, unsigned int var)
6376 {
6377 varinfo_t vi = get_varinfo (var);
6378 unsigned int i;
6379 bitmap_iterator bi;
6380
6381 /* Dump the solution for unified vars anyway, this avoids difficulties
6382 in scanning dumps in the testsuite. */
6383 fprintf (file, "%s = { ", vi->name);
6384 vi = get_varinfo (find (var));
6385 EXECUTE_IF_SET_IN_BITMAP (vi->solution, 0, i, bi)
6386 fprintf (file, "%s ", get_varinfo (i)->name);
6387 fprintf (file, "}");
6388
6389 /* But note when the variable was unified. */
6390 if (vi->id != var)
6391 fprintf (file, " same as %s", vi->name);
6392
6393 fprintf (file, "\n");
6394 }
6395
6396 /* Print the points-to solution for VAR to stderr. */
6397
6398 DEBUG_FUNCTION void
debug_solution_for_var(unsigned int var)6399 debug_solution_for_var (unsigned int var)
6400 {
6401 dump_solution_for_var (stderr, var);
6402 }
6403
6404 /* Register the constraints for function parameter related VI. */
6405
6406 static void
make_param_constraints(varinfo_t vi)6407 make_param_constraints (varinfo_t vi)
6408 {
6409 for (; vi; vi = vi_next (vi))
6410 {
6411 if (vi->only_restrict_pointers)
6412 ;
6413 else if (vi->may_have_pointers)
6414 make_constraint_from (vi, nonlocal_id);
6415
6416 if (vi->is_full_var)
6417 break;
6418 }
6419 }
6420
6421 /* Create varinfo structures for all of the variables in the
6422 function for intraprocedural mode. */
6423
6424 static void
intra_create_variable_infos(struct function * fn)6425 intra_create_variable_infos (struct function *fn)
6426 {
6427 tree t;
6428 bitmap handled_struct_type = NULL;
6429 bool this_parm_in_ctor = DECL_CXX_CONSTRUCTOR_P (fn->decl);
6430
6431 /* For each incoming pointer argument arg, create the constraint ARG
6432 = NONLOCAL or a dummy variable if it is a restrict qualified
6433 passed-by-reference argument. */
6434 for (t = DECL_ARGUMENTS (fn->decl); t; t = DECL_CHAIN (t))
6435 {
6436 if (handled_struct_type == NULL)
6437 handled_struct_type = BITMAP_ALLOC (NULL);
6438
6439 varinfo_t p
6440 = create_variable_info_for_1 (t, alias_get_name (t), false, true,
6441 handled_struct_type, this_parm_in_ctor);
6442 insert_vi_for_tree (t, p);
6443
6444 make_param_constraints (p);
6445
6446 this_parm_in_ctor = false;
6447 }
6448
6449 if (handled_struct_type != NULL)
6450 BITMAP_FREE (handled_struct_type);
6451
6452 /* Add a constraint for a result decl that is passed by reference. */
6453 if (DECL_RESULT (fn->decl)
6454 && DECL_BY_REFERENCE (DECL_RESULT (fn->decl)))
6455 {
6456 varinfo_t p, result_vi = get_vi_for_tree (DECL_RESULT (fn->decl));
6457
6458 for (p = result_vi; p; p = vi_next (p))
6459 make_constraint_from (p, nonlocal_id);
6460 }
6461
6462 /* Add a constraint for the incoming static chain parameter. */
6463 if (fn->static_chain_decl != NULL_TREE)
6464 {
6465 varinfo_t p, chain_vi = get_vi_for_tree (fn->static_chain_decl);
6466
6467 for (p = chain_vi; p; p = vi_next (p))
6468 make_constraint_from (p, nonlocal_id);
6469 }
6470 }
6471
6472 /* Structure used to put solution bitmaps in a hashtable so they can
6473 be shared among variables with the same points-to set. */
6474
6475 typedef struct shared_bitmap_info
6476 {
6477 bitmap pt_vars;
6478 hashval_t hashcode;
6479 } *shared_bitmap_info_t;
6480 typedef const struct shared_bitmap_info *const_shared_bitmap_info_t;
6481
6482 /* Shared_bitmap hashtable helpers. */
6483
6484 struct shared_bitmap_hasher : free_ptr_hash <shared_bitmap_info>
6485 {
6486 static inline hashval_t hash (const shared_bitmap_info *);
6487 static inline bool equal (const shared_bitmap_info *,
6488 const shared_bitmap_info *);
6489 };
6490
6491 /* Hash function for a shared_bitmap_info_t */
6492
6493 inline hashval_t
hash(const shared_bitmap_info * bi)6494 shared_bitmap_hasher::hash (const shared_bitmap_info *bi)
6495 {
6496 return bi->hashcode;
6497 }
6498
6499 /* Equality function for two shared_bitmap_info_t's. */
6500
6501 inline bool
equal(const shared_bitmap_info * sbi1,const shared_bitmap_info * sbi2)6502 shared_bitmap_hasher::equal (const shared_bitmap_info *sbi1,
6503 const shared_bitmap_info *sbi2)
6504 {
6505 return bitmap_equal_p (sbi1->pt_vars, sbi2->pt_vars);
6506 }
6507
6508 /* Shared_bitmap hashtable. */
6509
6510 static hash_table<shared_bitmap_hasher> *shared_bitmap_table;
6511
6512 /* Lookup a bitmap in the shared bitmap hashtable, and return an already
6513 existing instance if there is one, NULL otherwise. */
6514
6515 static bitmap
shared_bitmap_lookup(bitmap pt_vars)6516 shared_bitmap_lookup (bitmap pt_vars)
6517 {
6518 shared_bitmap_info **slot;
6519 struct shared_bitmap_info sbi;
6520
6521 sbi.pt_vars = pt_vars;
6522 sbi.hashcode = bitmap_hash (pt_vars);
6523
6524 slot = shared_bitmap_table->find_slot (&sbi, NO_INSERT);
6525 if (!slot)
6526 return NULL;
6527 else
6528 return (*slot)->pt_vars;
6529 }
6530
6531
6532 /* Add a bitmap to the shared bitmap hashtable. */
6533
6534 static void
shared_bitmap_add(bitmap pt_vars)6535 shared_bitmap_add (bitmap pt_vars)
6536 {
6537 shared_bitmap_info **slot;
6538 shared_bitmap_info_t sbi = XNEW (struct shared_bitmap_info);
6539
6540 sbi->pt_vars = pt_vars;
6541 sbi->hashcode = bitmap_hash (pt_vars);
6542
6543 slot = shared_bitmap_table->find_slot (sbi, INSERT);
6544 gcc_assert (!*slot);
6545 *slot = sbi;
6546 }
6547
6548
6549 /* Set bits in INTO corresponding to the variable uids in solution set FROM. */
6550
6551 static void
set_uids_in_ptset(bitmap into,bitmap from,struct pt_solution * pt,tree fndecl)6552 set_uids_in_ptset (bitmap into, bitmap from, struct pt_solution *pt,
6553 tree fndecl)
6554 {
6555 unsigned int i;
6556 bitmap_iterator bi;
6557 varinfo_t escaped_vi = get_varinfo (find (escaped_id));
6558 bool everything_escaped
6559 = escaped_vi->solution && bitmap_bit_p (escaped_vi->solution, anything_id);
6560
6561 EXECUTE_IF_SET_IN_BITMAP (from, 0, i, bi)
6562 {
6563 varinfo_t vi = get_varinfo (i);
6564
6565 if (vi->is_artificial_var)
6566 continue;
6567
6568 if (everything_escaped
6569 || (escaped_vi->solution
6570 && bitmap_bit_p (escaped_vi->solution, i)))
6571 {
6572 pt->vars_contains_escaped = true;
6573 pt->vars_contains_escaped_heap |= vi->is_heap_var;
6574 }
6575
6576 if (vi->is_restrict_var)
6577 pt->vars_contains_restrict = true;
6578
6579 if (VAR_P (vi->decl)
6580 || TREE_CODE (vi->decl) == PARM_DECL
6581 || TREE_CODE (vi->decl) == RESULT_DECL)
6582 {
6583 /* If we are in IPA mode we will not recompute points-to
6584 sets after inlining so make sure they stay valid. */
6585 if (in_ipa_mode
6586 && !DECL_PT_UID_SET_P (vi->decl))
6587 SET_DECL_PT_UID (vi->decl, DECL_UID (vi->decl));
6588
6589 /* Add the decl to the points-to set. Note that the points-to
6590 set contains global variables. */
6591 bitmap_set_bit (into, DECL_PT_UID (vi->decl));
6592 if (vi->is_global_var
6593 /* In IPA mode the escaped_heap trick doesn't work as
6594 ESCAPED is escaped from the unit but
6595 pt_solution_includes_global needs to answer true for
6596 all variables not automatic within a function.
6597 For the same reason is_global_var is not the
6598 correct flag to track - local variables from other
6599 functions also need to be considered global.
6600 Conveniently all HEAP vars are not put in function
6601 scope. */
6602 || (in_ipa_mode
6603 && fndecl
6604 && ! auto_var_in_fn_p (vi->decl, fndecl)))
6605 pt->vars_contains_nonlocal = true;
6606
6607 /* If we have a variable that is interposable record that fact
6608 for pointer comparison simplification. */
6609 if (VAR_P (vi->decl)
6610 && (TREE_STATIC (vi->decl) || DECL_EXTERNAL (vi->decl))
6611 && ! decl_binds_to_current_def_p (vi->decl))
6612 pt->vars_contains_interposable = true;
6613
6614 /* If this is a local variable we can have overlapping lifetime
6615 of different function invocations through recursion duplicate
6616 it with its shadow variable. */
6617 if (in_ipa_mode
6618 && vi->shadow_var_uid != 0)
6619 {
6620 bitmap_set_bit (into, vi->shadow_var_uid);
6621 pt->vars_contains_nonlocal = true;
6622 }
6623 }
6624
6625 else if (TREE_CODE (vi->decl) == FUNCTION_DECL
6626 || TREE_CODE (vi->decl) == LABEL_DECL)
6627 {
6628 /* Nothing should read/write from/to code so we can
6629 save bits by not including them in the points-to bitmaps.
6630 Still mark the points-to set as containing global memory
6631 to make code-patching possible - see PR70128. */
6632 pt->vars_contains_nonlocal = true;
6633 }
6634 }
6635 }
6636
6637
6638 /* Compute the points-to solution *PT for the variable VI. */
6639
6640 static struct pt_solution
find_what_var_points_to(tree fndecl,varinfo_t orig_vi)6641 find_what_var_points_to (tree fndecl, varinfo_t orig_vi)
6642 {
6643 unsigned int i;
6644 bitmap_iterator bi;
6645 bitmap finished_solution;
6646 bitmap result;
6647 varinfo_t vi;
6648 struct pt_solution *pt;
6649
6650 /* This variable may have been collapsed, let's get the real
6651 variable. */
6652 vi = get_varinfo (find (orig_vi->id));
6653
6654 /* See if we have already computed the solution and return it. */
6655 pt_solution **slot = &final_solutions->get_or_insert (vi);
6656 if (*slot != NULL)
6657 return **slot;
6658
6659 *slot = pt = XOBNEW (&final_solutions_obstack, struct pt_solution);
6660 memset (pt, 0, sizeof (struct pt_solution));
6661
6662 /* Translate artificial variables into SSA_NAME_PTR_INFO
6663 attributes. */
6664 EXECUTE_IF_SET_IN_BITMAP (vi->solution, 0, i, bi)
6665 {
6666 varinfo_t vi = get_varinfo (i);
6667
6668 if (vi->is_artificial_var)
6669 {
6670 if (vi->id == nothing_id)
6671 pt->null = 1;
6672 else if (vi->id == escaped_id)
6673 {
6674 if (in_ipa_mode)
6675 pt->ipa_escaped = 1;
6676 else
6677 pt->escaped = 1;
6678 /* Expand some special vars of ESCAPED in-place here. */
6679 varinfo_t evi = get_varinfo (find (escaped_id));
6680 if (bitmap_bit_p (evi->solution, nonlocal_id))
6681 pt->nonlocal = 1;
6682 }
6683 else if (vi->id == nonlocal_id)
6684 pt->nonlocal = 1;
6685 else if (vi->id == string_id)
6686 /* Nobody cares - STRING_CSTs are read-only entities. */
6687 ;
6688 else if (vi->id == anything_id
6689 || vi->id == integer_id)
6690 pt->anything = 1;
6691 }
6692 }
6693
6694 /* Instead of doing extra work, simply do not create
6695 elaborate points-to information for pt_anything pointers. */
6696 if (pt->anything)
6697 return *pt;
6698
6699 /* Share the final set of variables when possible. */
6700 finished_solution = BITMAP_GGC_ALLOC ();
6701 stats.points_to_sets_created++;
6702
6703 set_uids_in_ptset (finished_solution, vi->solution, pt, fndecl);
6704 result = shared_bitmap_lookup (finished_solution);
6705 if (!result)
6706 {
6707 shared_bitmap_add (finished_solution);
6708 pt->vars = finished_solution;
6709 }
6710 else
6711 {
6712 pt->vars = result;
6713 bitmap_clear (finished_solution);
6714 }
6715
6716 return *pt;
6717 }
6718
6719 /* Given a pointer variable P, fill in its points-to set. */
6720
6721 static void
find_what_p_points_to(tree fndecl,tree p)6722 find_what_p_points_to (tree fndecl, tree p)
6723 {
6724 struct ptr_info_def *pi;
6725 tree lookup_p = p;
6726 varinfo_t vi;
6727 bool nonnull = get_ptr_nonnull (p);
6728
6729 /* For parameters, get at the points-to set for the actual parm
6730 decl. */
6731 if (TREE_CODE (p) == SSA_NAME
6732 && SSA_NAME_IS_DEFAULT_DEF (p)
6733 && (TREE_CODE (SSA_NAME_VAR (p)) == PARM_DECL
6734 || TREE_CODE (SSA_NAME_VAR (p)) == RESULT_DECL))
6735 lookup_p = SSA_NAME_VAR (p);
6736
6737 vi = lookup_vi_for_tree (lookup_p);
6738 if (!vi)
6739 return;
6740
6741 pi = get_ptr_info (p);
6742 pi->pt = find_what_var_points_to (fndecl, vi);
6743 /* Conservatively set to NULL from PTA (to true). */
6744 pi->pt.null = 1;
6745 /* Preserve pointer nonnull computed by VRP. See get_ptr_nonnull
6746 in gcc/tree-ssaname.c for more information. */
6747 if (nonnull)
6748 set_ptr_nonnull (p);
6749 }
6750
6751
6752 /* Query statistics for points-to solutions. */
6753
6754 static struct {
6755 unsigned HOST_WIDE_INT pt_solution_includes_may_alias;
6756 unsigned HOST_WIDE_INT pt_solution_includes_no_alias;
6757 unsigned HOST_WIDE_INT pt_solutions_intersect_may_alias;
6758 unsigned HOST_WIDE_INT pt_solutions_intersect_no_alias;
6759 } pta_stats;
6760
6761 void
dump_pta_stats(FILE * s)6762 dump_pta_stats (FILE *s)
6763 {
6764 fprintf (s, "\nPTA query stats:\n");
6765 fprintf (s, " pt_solution_includes: "
6766 HOST_WIDE_INT_PRINT_DEC" disambiguations, "
6767 HOST_WIDE_INT_PRINT_DEC" queries\n",
6768 pta_stats.pt_solution_includes_no_alias,
6769 pta_stats.pt_solution_includes_no_alias
6770 + pta_stats.pt_solution_includes_may_alias);
6771 fprintf (s, " pt_solutions_intersect: "
6772 HOST_WIDE_INT_PRINT_DEC" disambiguations, "
6773 HOST_WIDE_INT_PRINT_DEC" queries\n",
6774 pta_stats.pt_solutions_intersect_no_alias,
6775 pta_stats.pt_solutions_intersect_no_alias
6776 + pta_stats.pt_solutions_intersect_may_alias);
6777 }
6778
6779
6780 /* Reset the points-to solution *PT to a conservative default
6781 (point to anything). */
6782
6783 void
pt_solution_reset(struct pt_solution * pt)6784 pt_solution_reset (struct pt_solution *pt)
6785 {
6786 memset (pt, 0, sizeof (struct pt_solution));
6787 pt->anything = true;
6788 pt->null = true;
6789 }
6790
6791 /* Set the points-to solution *PT to point only to the variables
6792 in VARS. VARS_CONTAINS_GLOBAL specifies whether that contains
6793 global variables and VARS_CONTAINS_RESTRICT specifies whether
6794 it contains restrict tag variables. */
6795
6796 void
pt_solution_set(struct pt_solution * pt,bitmap vars,bool vars_contains_nonlocal)6797 pt_solution_set (struct pt_solution *pt, bitmap vars,
6798 bool vars_contains_nonlocal)
6799 {
6800 memset (pt, 0, sizeof (struct pt_solution));
6801 pt->vars = vars;
6802 pt->vars_contains_nonlocal = vars_contains_nonlocal;
6803 pt->vars_contains_escaped
6804 = (cfun->gimple_df->escaped.anything
6805 || bitmap_intersect_p (cfun->gimple_df->escaped.vars, vars));
6806 }
6807
6808 /* Set the points-to solution *PT to point only to the variable VAR. */
6809
6810 void
pt_solution_set_var(struct pt_solution * pt,tree var)6811 pt_solution_set_var (struct pt_solution *pt, tree var)
6812 {
6813 memset (pt, 0, sizeof (struct pt_solution));
6814 pt->vars = BITMAP_GGC_ALLOC ();
6815 bitmap_set_bit (pt->vars, DECL_PT_UID (var));
6816 pt->vars_contains_nonlocal = is_global_var (var);
6817 pt->vars_contains_escaped
6818 = (cfun->gimple_df->escaped.anything
6819 || bitmap_bit_p (cfun->gimple_df->escaped.vars, DECL_PT_UID (var)));
6820 }
6821
6822 /* Computes the union of the points-to solutions *DEST and *SRC and
6823 stores the result in *DEST. This changes the points-to bitmap
6824 of *DEST and thus may not be used if that might be shared.
6825 The points-to bitmap of *SRC and *DEST will not be shared after
6826 this function if they were not before. */
6827
6828 static void
pt_solution_ior_into(struct pt_solution * dest,struct pt_solution * src)6829 pt_solution_ior_into (struct pt_solution *dest, struct pt_solution *src)
6830 {
6831 dest->anything |= src->anything;
6832 if (dest->anything)
6833 {
6834 pt_solution_reset (dest);
6835 return;
6836 }
6837
6838 dest->nonlocal |= src->nonlocal;
6839 dest->escaped |= src->escaped;
6840 dest->ipa_escaped |= src->ipa_escaped;
6841 dest->null |= src->null;
6842 dest->vars_contains_nonlocal |= src->vars_contains_nonlocal;
6843 dest->vars_contains_escaped |= src->vars_contains_escaped;
6844 dest->vars_contains_escaped_heap |= src->vars_contains_escaped_heap;
6845 if (!src->vars)
6846 return;
6847
6848 if (!dest->vars)
6849 dest->vars = BITMAP_GGC_ALLOC ();
6850 bitmap_ior_into (dest->vars, src->vars);
6851 }
6852
6853 /* Return true if the points-to solution *PT is empty. */
6854
6855 bool
pt_solution_empty_p(const pt_solution * pt)6856 pt_solution_empty_p (const pt_solution *pt)
6857 {
6858 if (pt->anything
6859 || pt->nonlocal)
6860 return false;
6861
6862 if (pt->vars
6863 && !bitmap_empty_p (pt->vars))
6864 return false;
6865
6866 /* If the solution includes ESCAPED, check if that is empty. */
6867 if (pt->escaped
6868 && !pt_solution_empty_p (&cfun->gimple_df->escaped))
6869 return false;
6870
6871 /* If the solution includes ESCAPED, check if that is empty. */
6872 if (pt->ipa_escaped
6873 && !pt_solution_empty_p (&ipa_escaped_pt))
6874 return false;
6875
6876 return true;
6877 }
6878
6879 /* Return true if the points-to solution *PT only point to a single var, and
6880 return the var uid in *UID. */
6881
6882 bool
pt_solution_singleton_or_null_p(struct pt_solution * pt,unsigned * uid)6883 pt_solution_singleton_or_null_p (struct pt_solution *pt, unsigned *uid)
6884 {
6885 if (pt->anything || pt->nonlocal || pt->escaped || pt->ipa_escaped
6886 || pt->vars == NULL
6887 || !bitmap_single_bit_set_p (pt->vars))
6888 return false;
6889
6890 *uid = bitmap_first_set_bit (pt->vars);
6891 return true;
6892 }
6893
6894 /* Return true if the points-to solution *PT includes global memory. */
6895
6896 bool
pt_solution_includes_global(struct pt_solution * pt)6897 pt_solution_includes_global (struct pt_solution *pt)
6898 {
6899 if (pt->anything
6900 || pt->nonlocal
6901 || pt->vars_contains_nonlocal
6902 /* The following is a hack to make the malloc escape hack work.
6903 In reality we'd need different sets for escaped-through-return
6904 and escaped-to-callees and passes would need to be updated. */
6905 || pt->vars_contains_escaped_heap)
6906 return true;
6907
6908 /* 'escaped' is also a placeholder so we have to look into it. */
6909 if (pt->escaped)
6910 return pt_solution_includes_global (&cfun->gimple_df->escaped);
6911
6912 if (pt->ipa_escaped)
6913 return pt_solution_includes_global (&ipa_escaped_pt);
6914
6915 return false;
6916 }
6917
6918 /* Return true if the points-to solution *PT includes the variable
6919 declaration DECL. */
6920
6921 static bool
pt_solution_includes_1(struct pt_solution * pt,const_tree decl)6922 pt_solution_includes_1 (struct pt_solution *pt, const_tree decl)
6923 {
6924 if (pt->anything)
6925 return true;
6926
6927 if (pt->nonlocal
6928 && is_global_var (decl))
6929 return true;
6930
6931 if (pt->vars
6932 && bitmap_bit_p (pt->vars, DECL_PT_UID (decl)))
6933 return true;
6934
6935 /* If the solution includes ESCAPED, check it. */
6936 if (pt->escaped
6937 && pt_solution_includes_1 (&cfun->gimple_df->escaped, decl))
6938 return true;
6939
6940 /* If the solution includes ESCAPED, check it. */
6941 if (pt->ipa_escaped
6942 && pt_solution_includes_1 (&ipa_escaped_pt, decl))
6943 return true;
6944
6945 return false;
6946 }
6947
6948 bool
pt_solution_includes(struct pt_solution * pt,const_tree decl)6949 pt_solution_includes (struct pt_solution *pt, const_tree decl)
6950 {
6951 bool res = pt_solution_includes_1 (pt, decl);
6952 if (res)
6953 ++pta_stats.pt_solution_includes_may_alias;
6954 else
6955 ++pta_stats.pt_solution_includes_no_alias;
6956 return res;
6957 }
6958
6959 /* Return true if both points-to solutions PT1 and PT2 have a non-empty
6960 intersection. */
6961
6962 static bool
pt_solutions_intersect_1(struct pt_solution * pt1,struct pt_solution * pt2)6963 pt_solutions_intersect_1 (struct pt_solution *pt1, struct pt_solution *pt2)
6964 {
6965 if (pt1->anything || pt2->anything)
6966 return true;
6967
6968 /* If either points to unknown global memory and the other points to
6969 any global memory they alias. */
6970 if ((pt1->nonlocal
6971 && (pt2->nonlocal
6972 || pt2->vars_contains_nonlocal))
6973 || (pt2->nonlocal
6974 && pt1->vars_contains_nonlocal))
6975 return true;
6976
6977 /* If either points to all escaped memory and the other points to
6978 any escaped memory they alias. */
6979 if ((pt1->escaped
6980 && (pt2->escaped
6981 || pt2->vars_contains_escaped))
6982 || (pt2->escaped
6983 && pt1->vars_contains_escaped))
6984 return true;
6985
6986 /* Check the escaped solution if required.
6987 ??? Do we need to check the local against the IPA escaped sets? */
6988 if ((pt1->ipa_escaped || pt2->ipa_escaped)
6989 && !pt_solution_empty_p (&ipa_escaped_pt))
6990 {
6991 /* If both point to escaped memory and that solution
6992 is not empty they alias. */
6993 if (pt1->ipa_escaped && pt2->ipa_escaped)
6994 return true;
6995
6996 /* If either points to escaped memory see if the escaped solution
6997 intersects with the other. */
6998 if ((pt1->ipa_escaped
6999 && pt_solutions_intersect_1 (&ipa_escaped_pt, pt2))
7000 || (pt2->ipa_escaped
7001 && pt_solutions_intersect_1 (&ipa_escaped_pt, pt1)))
7002 return true;
7003 }
7004
7005 /* Now both pointers alias if their points-to solution intersects. */
7006 return (pt1->vars
7007 && pt2->vars
7008 && bitmap_intersect_p (pt1->vars, pt2->vars));
7009 }
7010
7011 bool
pt_solutions_intersect(struct pt_solution * pt1,struct pt_solution * pt2)7012 pt_solutions_intersect (struct pt_solution *pt1, struct pt_solution *pt2)
7013 {
7014 bool res = pt_solutions_intersect_1 (pt1, pt2);
7015 if (res)
7016 ++pta_stats.pt_solutions_intersect_may_alias;
7017 else
7018 ++pta_stats.pt_solutions_intersect_no_alias;
7019 return res;
7020 }
7021
7022
7023 /* Dump points-to information to OUTFILE. */
7024
7025 static void
dump_sa_points_to_info(FILE * outfile)7026 dump_sa_points_to_info (FILE *outfile)
7027 {
7028 unsigned int i;
7029
7030 fprintf (outfile, "\nPoints-to sets\n\n");
7031
7032 if (dump_flags & TDF_STATS)
7033 {
7034 fprintf (outfile, "Stats:\n");
7035 fprintf (outfile, "Total vars: %d\n", stats.total_vars);
7036 fprintf (outfile, "Non-pointer vars: %d\n",
7037 stats.nonpointer_vars);
7038 fprintf (outfile, "Statically unified vars: %d\n",
7039 stats.unified_vars_static);
7040 fprintf (outfile, "Dynamically unified vars: %d\n",
7041 stats.unified_vars_dynamic);
7042 fprintf (outfile, "Iterations: %d\n", stats.iterations);
7043 fprintf (outfile, "Number of edges: %d\n", stats.num_edges);
7044 fprintf (outfile, "Number of implicit edges: %d\n",
7045 stats.num_implicit_edges);
7046 }
7047
7048 for (i = 1; i < varmap.length (); i++)
7049 {
7050 varinfo_t vi = get_varinfo (i);
7051 if (!vi->may_have_pointers)
7052 continue;
7053 dump_solution_for_var (outfile, i);
7054 }
7055 }
7056
7057
7058 /* Debug points-to information to stderr. */
7059
7060 DEBUG_FUNCTION void
debug_sa_points_to_info(void)7061 debug_sa_points_to_info (void)
7062 {
7063 dump_sa_points_to_info (stderr);
7064 }
7065
7066
7067 /* Initialize the always-existing constraint variables for NULL
7068 ANYTHING, READONLY, and INTEGER */
7069
7070 static void
init_base_vars(void)7071 init_base_vars (void)
7072 {
7073 struct constraint_expr lhs, rhs;
7074 varinfo_t var_anything;
7075 varinfo_t var_nothing;
7076 varinfo_t var_string;
7077 varinfo_t var_escaped;
7078 varinfo_t var_nonlocal;
7079 varinfo_t var_storedanything;
7080 varinfo_t var_integer;
7081
7082 /* Variable ID zero is reserved and should be NULL. */
7083 varmap.safe_push (NULL);
7084
7085 /* Create the NULL variable, used to represent that a variable points
7086 to NULL. */
7087 var_nothing = new_var_info (NULL_TREE, "NULL", false);
7088 gcc_assert (var_nothing->id == nothing_id);
7089 var_nothing->is_artificial_var = 1;
7090 var_nothing->offset = 0;
7091 var_nothing->size = ~0;
7092 var_nothing->fullsize = ~0;
7093 var_nothing->is_special_var = 1;
7094 var_nothing->may_have_pointers = 0;
7095 var_nothing->is_global_var = 0;
7096
7097 /* Create the ANYTHING variable, used to represent that a variable
7098 points to some unknown piece of memory. */
7099 var_anything = new_var_info (NULL_TREE, "ANYTHING", false);
7100 gcc_assert (var_anything->id == anything_id);
7101 var_anything->is_artificial_var = 1;
7102 var_anything->size = ~0;
7103 var_anything->offset = 0;
7104 var_anything->fullsize = ~0;
7105 var_anything->is_special_var = 1;
7106
7107 /* Anything points to anything. This makes deref constraints just
7108 work in the presence of linked list and other p = *p type loops,
7109 by saying that *ANYTHING = ANYTHING. */
7110 lhs.type = SCALAR;
7111 lhs.var = anything_id;
7112 lhs.offset = 0;
7113 rhs.type = ADDRESSOF;
7114 rhs.var = anything_id;
7115 rhs.offset = 0;
7116
7117 /* This specifically does not use process_constraint because
7118 process_constraint ignores all anything = anything constraints, since all
7119 but this one are redundant. */
7120 constraints.safe_push (new_constraint (lhs, rhs));
7121
7122 /* Create the STRING variable, used to represent that a variable
7123 points to a string literal. String literals don't contain
7124 pointers so STRING doesn't point to anything. */
7125 var_string = new_var_info (NULL_TREE, "STRING", false);
7126 gcc_assert (var_string->id == string_id);
7127 var_string->is_artificial_var = 1;
7128 var_string->offset = 0;
7129 var_string->size = ~0;
7130 var_string->fullsize = ~0;
7131 var_string->is_special_var = 1;
7132 var_string->may_have_pointers = 0;
7133
7134 /* Create the ESCAPED variable, used to represent the set of escaped
7135 memory. */
7136 var_escaped = new_var_info (NULL_TREE, "ESCAPED", false);
7137 gcc_assert (var_escaped->id == escaped_id);
7138 var_escaped->is_artificial_var = 1;
7139 var_escaped->offset = 0;
7140 var_escaped->size = ~0;
7141 var_escaped->fullsize = ~0;
7142 var_escaped->is_special_var = 0;
7143
7144 /* Create the NONLOCAL variable, used to represent the set of nonlocal
7145 memory. */
7146 var_nonlocal = new_var_info (NULL_TREE, "NONLOCAL", false);
7147 gcc_assert (var_nonlocal->id == nonlocal_id);
7148 var_nonlocal->is_artificial_var = 1;
7149 var_nonlocal->offset = 0;
7150 var_nonlocal->size = ~0;
7151 var_nonlocal->fullsize = ~0;
7152 var_nonlocal->is_special_var = 1;
7153
7154 /* ESCAPED = *ESCAPED, because escaped is may-deref'd at calls, etc. */
7155 lhs.type = SCALAR;
7156 lhs.var = escaped_id;
7157 lhs.offset = 0;
7158 rhs.type = DEREF;
7159 rhs.var = escaped_id;
7160 rhs.offset = 0;
7161 process_constraint (new_constraint (lhs, rhs));
7162
7163 /* ESCAPED = ESCAPED + UNKNOWN_OFFSET, because if a sub-field escapes the
7164 whole variable escapes. */
7165 lhs.type = SCALAR;
7166 lhs.var = escaped_id;
7167 lhs.offset = 0;
7168 rhs.type = SCALAR;
7169 rhs.var = escaped_id;
7170 rhs.offset = UNKNOWN_OFFSET;
7171 process_constraint (new_constraint (lhs, rhs));
7172
7173 /* *ESCAPED = NONLOCAL. This is true because we have to assume
7174 everything pointed to by escaped points to what global memory can
7175 point to. */
7176 lhs.type = DEREF;
7177 lhs.var = escaped_id;
7178 lhs.offset = 0;
7179 rhs.type = SCALAR;
7180 rhs.var = nonlocal_id;
7181 rhs.offset = 0;
7182 process_constraint (new_constraint (lhs, rhs));
7183
7184 /* NONLOCAL = &NONLOCAL, NONLOCAL = &ESCAPED. This is true because
7185 global memory may point to global memory and escaped memory. */
7186 lhs.type = SCALAR;
7187 lhs.var = nonlocal_id;
7188 lhs.offset = 0;
7189 rhs.type = ADDRESSOF;
7190 rhs.var = nonlocal_id;
7191 rhs.offset = 0;
7192 process_constraint (new_constraint (lhs, rhs));
7193 rhs.type = ADDRESSOF;
7194 rhs.var = escaped_id;
7195 rhs.offset = 0;
7196 process_constraint (new_constraint (lhs, rhs));
7197
7198 /* Create the STOREDANYTHING variable, used to represent the set of
7199 variables stored to *ANYTHING. */
7200 var_storedanything = new_var_info (NULL_TREE, "STOREDANYTHING", false);
7201 gcc_assert (var_storedanything->id == storedanything_id);
7202 var_storedanything->is_artificial_var = 1;
7203 var_storedanything->offset = 0;
7204 var_storedanything->size = ~0;
7205 var_storedanything->fullsize = ~0;
7206 var_storedanything->is_special_var = 0;
7207
7208 /* Create the INTEGER variable, used to represent that a variable points
7209 to what an INTEGER "points to". */
7210 var_integer = new_var_info (NULL_TREE, "INTEGER", false);
7211 gcc_assert (var_integer->id == integer_id);
7212 var_integer->is_artificial_var = 1;
7213 var_integer->size = ~0;
7214 var_integer->fullsize = ~0;
7215 var_integer->offset = 0;
7216 var_integer->is_special_var = 1;
7217
7218 /* INTEGER = ANYTHING, because we don't know where a dereference of
7219 a random integer will point to. */
7220 lhs.type = SCALAR;
7221 lhs.var = integer_id;
7222 lhs.offset = 0;
7223 rhs.type = ADDRESSOF;
7224 rhs.var = anything_id;
7225 rhs.offset = 0;
7226 process_constraint (new_constraint (lhs, rhs));
7227 }
7228
7229 /* Initialize things necessary to perform PTA */
7230
7231 static void
init_alias_vars(void)7232 init_alias_vars (void)
7233 {
7234 use_field_sensitive = (param_max_fields_for_field_sensitive > 1);
7235
7236 bitmap_obstack_initialize (&pta_obstack);
7237 bitmap_obstack_initialize (&oldpta_obstack);
7238 bitmap_obstack_initialize (&predbitmap_obstack);
7239
7240 constraints.create (8);
7241 varmap.create (8);
7242 vi_for_tree = new hash_map<tree, varinfo_t>;
7243 call_stmt_vars = new hash_map<gimple *, varinfo_t>;
7244
7245 memset (&stats, 0, sizeof (stats));
7246 shared_bitmap_table = new hash_table<shared_bitmap_hasher> (511);
7247 init_base_vars ();
7248
7249 gcc_obstack_init (&fake_var_decl_obstack);
7250
7251 final_solutions = new hash_map<varinfo_t, pt_solution *>;
7252 gcc_obstack_init (&final_solutions_obstack);
7253 }
7254
7255 /* Remove the REF and ADDRESS edges from GRAPH, as well as all the
7256 predecessor edges. */
7257
7258 static void
remove_preds_and_fake_succs(constraint_graph_t graph)7259 remove_preds_and_fake_succs (constraint_graph_t graph)
7260 {
7261 unsigned int i;
7262
7263 /* Clear the implicit ref and address nodes from the successor
7264 lists. */
7265 for (i = 1; i < FIRST_REF_NODE; i++)
7266 {
7267 if (graph->succs[i])
7268 bitmap_clear_range (graph->succs[i], FIRST_REF_NODE,
7269 FIRST_REF_NODE * 2);
7270 }
7271
7272 /* Free the successor list for the non-ref nodes. */
7273 for (i = FIRST_REF_NODE + 1; i < graph->size; i++)
7274 {
7275 if (graph->succs[i])
7276 BITMAP_FREE (graph->succs[i]);
7277 }
7278
7279 /* Now reallocate the size of the successor list as, and blow away
7280 the predecessor bitmaps. */
7281 graph->size = varmap.length ();
7282 graph->succs = XRESIZEVEC (bitmap, graph->succs, graph->size);
7283
7284 free (graph->implicit_preds);
7285 graph->implicit_preds = NULL;
7286 free (graph->preds);
7287 graph->preds = NULL;
7288 bitmap_obstack_release (&predbitmap_obstack);
7289 }
7290
7291 /* Solve the constraint set. */
7292
7293 static void
solve_constraints(void)7294 solve_constraints (void)
7295 {
7296 class scc_info *si;
7297
7298 /* Sort varinfos so that ones that cannot be pointed to are last.
7299 This makes bitmaps more efficient. */
7300 unsigned int *map = XNEWVEC (unsigned int, varmap.length ());
7301 for (unsigned i = 0; i < integer_id + 1; ++i)
7302 map[i] = i;
7303 /* Start with address-taken vars, followed by not address-taken vars
7304 to move vars never appearing in the points-to solution bitmaps last. */
7305 unsigned j = integer_id + 1;
7306 for (unsigned i = integer_id + 1; i < varmap.length (); ++i)
7307 if (varmap[varmap[i]->head]->address_taken)
7308 map[i] = j++;
7309 for (unsigned i = integer_id + 1; i < varmap.length (); ++i)
7310 if (! varmap[varmap[i]->head]->address_taken)
7311 map[i] = j++;
7312 /* Shuffle varmap according to map. */
7313 for (unsigned i = integer_id + 1; i < varmap.length (); ++i)
7314 {
7315 while (map[varmap[i]->id] != i)
7316 std::swap (varmap[i], varmap[map[varmap[i]->id]]);
7317 gcc_assert (bitmap_empty_p (varmap[i]->solution));
7318 varmap[i]->id = i;
7319 varmap[i]->next = map[varmap[i]->next];
7320 varmap[i]->head = map[varmap[i]->head];
7321 }
7322 /* Finally rewrite constraints. */
7323 for (unsigned i = 0; i < constraints.length (); ++i)
7324 {
7325 constraints[i]->lhs.var = map[constraints[i]->lhs.var];
7326 constraints[i]->rhs.var = map[constraints[i]->rhs.var];
7327 }
7328 free (map);
7329
7330 if (dump_file)
7331 fprintf (dump_file,
7332 "\nCollapsing static cycles and doing variable "
7333 "substitution\n");
7334
7335 init_graph (varmap.length () * 2);
7336
7337 if (dump_file)
7338 fprintf (dump_file, "Building predecessor graph\n");
7339 build_pred_graph ();
7340
7341 if (dump_file)
7342 fprintf (dump_file, "Detecting pointer and location "
7343 "equivalences\n");
7344 si = perform_var_substitution (graph);
7345
7346 if (dump_file)
7347 fprintf (dump_file, "Rewriting constraints and unifying "
7348 "variables\n");
7349 rewrite_constraints (graph, si);
7350
7351 build_succ_graph ();
7352
7353 free_var_substitution_info (si);
7354
7355 /* Attach complex constraints to graph nodes. */
7356 move_complex_constraints (graph);
7357
7358 if (dump_file)
7359 fprintf (dump_file, "Uniting pointer but not location equivalent "
7360 "variables\n");
7361 unite_pointer_equivalences (graph);
7362
7363 if (dump_file)
7364 fprintf (dump_file, "Finding indirect cycles\n");
7365 find_indirect_cycles (graph);
7366
7367 /* Implicit nodes and predecessors are no longer necessary at this
7368 point. */
7369 remove_preds_and_fake_succs (graph);
7370
7371 if (dump_file && (dump_flags & TDF_GRAPH))
7372 {
7373 fprintf (dump_file, "\n\n// The constraint graph before solve-graph "
7374 "in dot format:\n");
7375 dump_constraint_graph (dump_file);
7376 fprintf (dump_file, "\n\n");
7377 }
7378
7379 if (dump_file)
7380 fprintf (dump_file, "Solving graph\n");
7381
7382 solve_graph (graph);
7383
7384 if (dump_file && (dump_flags & TDF_GRAPH))
7385 {
7386 fprintf (dump_file, "\n\n// The constraint graph after solve-graph "
7387 "in dot format:\n");
7388 dump_constraint_graph (dump_file);
7389 fprintf (dump_file, "\n\n");
7390 }
7391 }
7392
7393 /* Create points-to sets for the current function. See the comments
7394 at the start of the file for an algorithmic overview. */
7395
7396 static void
compute_points_to_sets(void)7397 compute_points_to_sets (void)
7398 {
7399 basic_block bb;
7400 varinfo_t vi;
7401
7402 timevar_push (TV_TREE_PTA);
7403
7404 init_alias_vars ();
7405
7406 intra_create_variable_infos (cfun);
7407
7408 /* Now walk all statements and build the constraint set. */
7409 FOR_EACH_BB_FN (bb, cfun)
7410 {
7411 for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
7412 gsi_next (&gsi))
7413 {
7414 gphi *phi = gsi.phi ();
7415
7416 if (! virtual_operand_p (gimple_phi_result (phi)))
7417 find_func_aliases (cfun, phi);
7418 }
7419
7420 for (gimple_stmt_iterator gsi = gsi_start_bb (bb); !gsi_end_p (gsi);
7421 gsi_next (&gsi))
7422 {
7423 gimple *stmt = gsi_stmt (gsi);
7424
7425 find_func_aliases (cfun, stmt);
7426 }
7427 }
7428
7429 if (dump_file)
7430 {
7431 fprintf (dump_file, "Points-to analysis\n\nConstraints:\n\n");
7432 dump_constraints (dump_file, 0);
7433 }
7434
7435 /* From the constraints compute the points-to sets. */
7436 solve_constraints ();
7437
7438 /* Post-process solutions for escapes through returns. */
7439 edge_iterator ei;
7440 edge e;
7441 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
7442 if (greturn *ret = safe_dyn_cast <greturn *> (last_stmt (e->src)))
7443 {
7444 tree val = gimple_return_retval (ret);
7445 /* ??? Easy to handle simple indirections with some work.
7446 Arbitrary references like foo.bar.baz are more difficult
7447 (but conservatively easy enough with just looking at the base).
7448 Mind to fixup find_func_aliases as well. */
7449 if (!val || !SSA_VAR_P (val))
7450 continue;
7451 /* returns happen last in non-IPA so they only influence
7452 the ESCAPED solution and we can filter local variables. */
7453 varinfo_t escaped_vi = get_varinfo (find (escaped_id));
7454 varinfo_t vi = lookup_vi_for_tree (val);
7455 bitmap delta = BITMAP_ALLOC (&pta_obstack);
7456 bitmap_iterator bi;
7457 unsigned i;
7458 for (; vi; vi = vi_next (vi))
7459 {
7460 varinfo_t part_vi = get_varinfo (find (vi->id));
7461 EXECUTE_IF_AND_COMPL_IN_BITMAP (part_vi->solution,
7462 escaped_vi->solution, 0, i, bi)
7463 {
7464 varinfo_t pointed_to_vi = get_varinfo (i);
7465 if (pointed_to_vi->is_global_var
7466 /* We delay marking of heap memory as global. */
7467 || pointed_to_vi->is_heap_var)
7468 bitmap_set_bit (delta, i);
7469 }
7470 }
7471
7472 /* Now compute the transitive closure. */
7473 bitmap_ior_into (escaped_vi->solution, delta);
7474 bitmap new_delta = BITMAP_ALLOC (&pta_obstack);
7475 while (!bitmap_empty_p (delta))
7476 {
7477 EXECUTE_IF_SET_IN_BITMAP (delta, 0, i, bi)
7478 {
7479 varinfo_t pointed_to_vi = get_varinfo (i);
7480 pointed_to_vi = get_varinfo (find (pointed_to_vi->id));
7481 unsigned j;
7482 bitmap_iterator bi2;
7483 EXECUTE_IF_AND_COMPL_IN_BITMAP (pointed_to_vi->solution,
7484 escaped_vi->solution,
7485 0, j, bi2)
7486 {
7487 varinfo_t pointed_to_vi2 = get_varinfo (j);
7488 if (pointed_to_vi2->is_global_var
7489 /* We delay marking of heap memory as global. */
7490 || pointed_to_vi2->is_heap_var)
7491 bitmap_set_bit (new_delta, j);
7492 }
7493 }
7494 bitmap_ior_into (escaped_vi->solution, new_delta);
7495 bitmap_clear (delta);
7496 std::swap (delta, new_delta);
7497 }
7498 BITMAP_FREE (delta);
7499 BITMAP_FREE (new_delta);
7500 }
7501
7502 if (dump_file)
7503 dump_sa_points_to_info (dump_file);
7504
7505 /* Compute the points-to set for ESCAPED used for call-clobber analysis. */
7506 cfun->gimple_df->escaped = find_what_var_points_to (cfun->decl,
7507 get_varinfo (escaped_id));
7508
7509 /* Make sure the ESCAPED solution (which is used as placeholder in
7510 other solutions) does not reference itself. This simplifies
7511 points-to solution queries. */
7512 cfun->gimple_df->escaped.escaped = 0;
7513
7514 /* Compute the points-to sets for pointer SSA_NAMEs. */
7515 unsigned i;
7516 tree ptr;
7517
7518 FOR_EACH_SSA_NAME (i, ptr, cfun)
7519 {
7520 if (POINTER_TYPE_P (TREE_TYPE (ptr)))
7521 find_what_p_points_to (cfun->decl, ptr);
7522 }
7523
7524 /* Compute the call-used/clobbered sets. */
7525 FOR_EACH_BB_FN (bb, cfun)
7526 {
7527 gimple_stmt_iterator gsi;
7528
7529 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
7530 {
7531 gcall *stmt;
7532 struct pt_solution *pt;
7533
7534 stmt = dyn_cast <gcall *> (gsi_stmt (gsi));
7535 if (!stmt)
7536 continue;
7537
7538 pt = gimple_call_use_set (stmt);
7539 if (gimple_call_flags (stmt) & ECF_CONST)
7540 memset (pt, 0, sizeof (struct pt_solution));
7541 else if ((vi = lookup_call_use_vi (stmt)) != NULL)
7542 {
7543 *pt = find_what_var_points_to (cfun->decl, vi);
7544 /* Escaped (and thus nonlocal) variables are always
7545 implicitly used by calls. */
7546 /* ??? ESCAPED can be empty even though NONLOCAL
7547 always escaped. */
7548 pt->nonlocal = 1;
7549 pt->escaped = 1;
7550 }
7551 else
7552 {
7553 /* If there is nothing special about this call then
7554 we have made everything that is used also escape. */
7555 *pt = cfun->gimple_df->escaped;
7556 pt->nonlocal = 1;
7557 }
7558
7559 pt = gimple_call_clobber_set (stmt);
7560 if (gimple_call_flags (stmt) & (ECF_CONST|ECF_PURE|ECF_NOVOPS))
7561 memset (pt, 0, sizeof (struct pt_solution));
7562 else if ((vi = lookup_call_clobber_vi (stmt)) != NULL)
7563 {
7564 *pt = find_what_var_points_to (cfun->decl, vi);
7565 /* Escaped (and thus nonlocal) variables are always
7566 implicitly clobbered by calls. */
7567 /* ??? ESCAPED can be empty even though NONLOCAL
7568 always escaped. */
7569 pt->nonlocal = 1;
7570 pt->escaped = 1;
7571 }
7572 else
7573 {
7574 /* If there is nothing special about this call then
7575 we have made everything that is used also escape. */
7576 *pt = cfun->gimple_df->escaped;
7577 pt->nonlocal = 1;
7578 }
7579 }
7580 }
7581
7582 timevar_pop (TV_TREE_PTA);
7583 }
7584
7585
7586 /* Delete created points-to sets. */
7587
7588 static void
delete_points_to_sets(void)7589 delete_points_to_sets (void)
7590 {
7591 unsigned int i;
7592
7593 delete shared_bitmap_table;
7594 shared_bitmap_table = NULL;
7595 if (dump_file && (dump_flags & TDF_STATS))
7596 fprintf (dump_file, "Points to sets created:%d\n",
7597 stats.points_to_sets_created);
7598
7599 delete vi_for_tree;
7600 delete call_stmt_vars;
7601 bitmap_obstack_release (&pta_obstack);
7602 constraints.release ();
7603
7604 for (i = 0; i < graph->size; i++)
7605 graph->complex[i].release ();
7606 free (graph->complex);
7607
7608 free (graph->rep);
7609 free (graph->succs);
7610 free (graph->pe);
7611 free (graph->pe_rep);
7612 free (graph->indirect_cycles);
7613 free (graph);
7614
7615 varmap.release ();
7616 variable_info_pool.release ();
7617 constraint_pool.release ();
7618
7619 obstack_free (&fake_var_decl_obstack, NULL);
7620
7621 delete final_solutions;
7622 obstack_free (&final_solutions_obstack, NULL);
7623 }
7624
7625 struct vls_data
7626 {
7627 unsigned short clique;
7628 bool escaped_p;
7629 bitmap rvars;
7630 };
7631
7632 /* Mark "other" loads and stores as belonging to CLIQUE and with
7633 base zero. */
7634
7635 static bool
visit_loadstore(gimple *,tree base,tree ref,void * data)7636 visit_loadstore (gimple *, tree base, tree ref, void *data)
7637 {
7638 unsigned short clique = ((vls_data *) data)->clique;
7639 bitmap rvars = ((vls_data *) data)->rvars;
7640 bool escaped_p = ((vls_data *) data)->escaped_p;
7641 if (TREE_CODE (base) == MEM_REF
7642 || TREE_CODE (base) == TARGET_MEM_REF)
7643 {
7644 tree ptr = TREE_OPERAND (base, 0);
7645 if (TREE_CODE (ptr) == SSA_NAME)
7646 {
7647 /* For parameters, get at the points-to set for the actual parm
7648 decl. */
7649 if (SSA_NAME_IS_DEFAULT_DEF (ptr)
7650 && (TREE_CODE (SSA_NAME_VAR (ptr)) == PARM_DECL
7651 || TREE_CODE (SSA_NAME_VAR (ptr)) == RESULT_DECL))
7652 ptr = SSA_NAME_VAR (ptr);
7653
7654 /* We need to make sure 'ptr' doesn't include any of
7655 the restrict tags we added bases for in its points-to set. */
7656 varinfo_t vi = lookup_vi_for_tree (ptr);
7657 if (! vi)
7658 return false;
7659
7660 vi = get_varinfo (find (vi->id));
7661 if (bitmap_intersect_p (rvars, vi->solution)
7662 || (escaped_p && bitmap_bit_p (vi->solution, escaped_id)))
7663 return false;
7664 }
7665
7666 /* Do not overwrite existing cliques (that includes clique, base
7667 pairs we just set). */
7668 if (MR_DEPENDENCE_CLIQUE (base) == 0)
7669 {
7670 MR_DEPENDENCE_CLIQUE (base) = clique;
7671 MR_DEPENDENCE_BASE (base) = 0;
7672 }
7673 }
7674
7675 /* For plain decl accesses see whether they are accesses to globals
7676 and rewrite them to MEM_REFs with { clique, 0 }. */
7677 if (VAR_P (base)
7678 && is_global_var (base)
7679 /* ??? We can't rewrite a plain decl with the walk_stmt_load_store
7680 ops callback. */
7681 && base != ref)
7682 {
7683 tree *basep = &ref;
7684 while (handled_component_p (*basep))
7685 basep = &TREE_OPERAND (*basep, 0);
7686 gcc_assert (VAR_P (*basep));
7687 tree ptr = build_fold_addr_expr (*basep);
7688 tree zero = build_int_cst (TREE_TYPE (ptr), 0);
7689 *basep = build2 (MEM_REF, TREE_TYPE (*basep), ptr, zero);
7690 MR_DEPENDENCE_CLIQUE (*basep) = clique;
7691 MR_DEPENDENCE_BASE (*basep) = 0;
7692 }
7693
7694 return false;
7695 }
7696
7697 struct msdi_data {
7698 tree ptr;
7699 unsigned short *clique;
7700 unsigned short *last_ruid;
7701 varinfo_t restrict_var;
7702 };
7703
7704 /* If BASE is a MEM_REF then assign a clique, base pair to it, updating
7705 CLIQUE, *RESTRICT_VAR and LAST_RUID as passed via DATA.
7706 Return whether dependence info was assigned to BASE. */
7707
7708 static bool
maybe_set_dependence_info(gimple *,tree base,tree,void * data)7709 maybe_set_dependence_info (gimple *, tree base, tree, void *data)
7710 {
7711 tree ptr = ((msdi_data *)data)->ptr;
7712 unsigned short &clique = *((msdi_data *)data)->clique;
7713 unsigned short &last_ruid = *((msdi_data *)data)->last_ruid;
7714 varinfo_t restrict_var = ((msdi_data *)data)->restrict_var;
7715 if ((TREE_CODE (base) == MEM_REF
7716 || TREE_CODE (base) == TARGET_MEM_REF)
7717 && TREE_OPERAND (base, 0) == ptr)
7718 {
7719 /* Do not overwrite existing cliques. This avoids overwriting dependence
7720 info inlined from a function with restrict parameters inlined
7721 into a function with restrict parameters. This usually means we
7722 prefer to be precise in innermost loops. */
7723 if (MR_DEPENDENCE_CLIQUE (base) == 0)
7724 {
7725 if (clique == 0)
7726 {
7727 if (cfun->last_clique == 0)
7728 cfun->last_clique = 1;
7729 clique = 1;
7730 }
7731 if (restrict_var->ruid == 0)
7732 restrict_var->ruid = ++last_ruid;
7733 MR_DEPENDENCE_CLIQUE (base) = clique;
7734 MR_DEPENDENCE_BASE (base) = restrict_var->ruid;
7735 return true;
7736 }
7737 }
7738 return false;
7739 }
7740
7741 /* Clear dependence info for the clique DATA. */
7742
7743 static bool
clear_dependence_clique(gimple *,tree base,tree,void * data)7744 clear_dependence_clique (gimple *, tree base, tree, void *data)
7745 {
7746 unsigned short clique = (uintptr_t)data;
7747 if ((TREE_CODE (base) == MEM_REF
7748 || TREE_CODE (base) == TARGET_MEM_REF)
7749 && MR_DEPENDENCE_CLIQUE (base) == clique)
7750 {
7751 MR_DEPENDENCE_CLIQUE (base) = 0;
7752 MR_DEPENDENCE_BASE (base) = 0;
7753 }
7754
7755 return false;
7756 }
7757
7758 /* Compute the set of independend memory references based on restrict
7759 tags and their conservative propagation to the points-to sets. */
7760
7761 static void
compute_dependence_clique(void)7762 compute_dependence_clique (void)
7763 {
7764 /* First clear the special "local" clique. */
7765 basic_block bb;
7766 if (cfun->last_clique != 0)
7767 FOR_EACH_BB_FN (bb, cfun)
7768 for (gimple_stmt_iterator gsi = gsi_start_bb (bb);
7769 !gsi_end_p (gsi); gsi_next (&gsi))
7770 {
7771 gimple *stmt = gsi_stmt (gsi);
7772 walk_stmt_load_store_ops (stmt, (void *)(uintptr_t) 1,
7773 clear_dependence_clique,
7774 clear_dependence_clique);
7775 }
7776
7777 unsigned short clique = 0;
7778 unsigned short last_ruid = 0;
7779 bitmap rvars = BITMAP_ALLOC (NULL);
7780 bool escaped_p = false;
7781 for (unsigned i = 0; i < num_ssa_names; ++i)
7782 {
7783 tree ptr = ssa_name (i);
7784 if (!ptr || !POINTER_TYPE_P (TREE_TYPE (ptr)))
7785 continue;
7786
7787 /* Avoid all this when ptr is not dereferenced? */
7788 tree p = ptr;
7789 if (SSA_NAME_IS_DEFAULT_DEF (ptr)
7790 && (TREE_CODE (SSA_NAME_VAR (ptr)) == PARM_DECL
7791 || TREE_CODE (SSA_NAME_VAR (ptr)) == RESULT_DECL))
7792 p = SSA_NAME_VAR (ptr);
7793 varinfo_t vi = lookup_vi_for_tree (p);
7794 if (!vi)
7795 continue;
7796 vi = get_varinfo (find (vi->id));
7797 bitmap_iterator bi;
7798 unsigned j;
7799 varinfo_t restrict_var = NULL;
7800 EXECUTE_IF_SET_IN_BITMAP (vi->solution, 0, j, bi)
7801 {
7802 varinfo_t oi = get_varinfo (j);
7803 if (oi->head != j)
7804 oi = get_varinfo (oi->head);
7805 if (oi->is_restrict_var)
7806 {
7807 if (restrict_var
7808 && restrict_var != oi)
7809 {
7810 if (dump_file && (dump_flags & TDF_DETAILS))
7811 {
7812 fprintf (dump_file, "found restrict pointed-to "
7813 "for ");
7814 print_generic_expr (dump_file, ptr);
7815 fprintf (dump_file, " but not exclusively\n");
7816 }
7817 restrict_var = NULL;
7818 break;
7819 }
7820 restrict_var = oi;
7821 }
7822 /* NULL is the only other valid points-to entry. */
7823 else if (oi->id != nothing_id)
7824 {
7825 restrict_var = NULL;
7826 break;
7827 }
7828 }
7829 /* Ok, found that ptr must(!) point to a single(!) restrict
7830 variable. */
7831 /* ??? PTA isn't really a proper propagation engine to compute
7832 this property.
7833 ??? We could handle merging of two restricts by unifying them. */
7834 if (restrict_var)
7835 {
7836 /* Now look at possible dereferences of ptr. */
7837 imm_use_iterator ui;
7838 gimple *use_stmt;
7839 bool used = false;
7840 msdi_data data = { ptr, &clique, &last_ruid, restrict_var };
7841 FOR_EACH_IMM_USE_STMT (use_stmt, ui, ptr)
7842 used |= walk_stmt_load_store_ops (use_stmt, &data,
7843 maybe_set_dependence_info,
7844 maybe_set_dependence_info);
7845 if (used)
7846 {
7847 /* Add all subvars to the set of restrict pointed-to set. */
7848 for (unsigned sv = restrict_var->head; sv != 0;
7849 sv = get_varinfo (sv)->next)
7850 bitmap_set_bit (rvars, sv);
7851 varinfo_t escaped = get_varinfo (find (escaped_id));
7852 if (bitmap_bit_p (escaped->solution, restrict_var->id))
7853 escaped_p = true;
7854 }
7855 }
7856 }
7857
7858 if (clique != 0)
7859 {
7860 /* Assign the BASE id zero to all accesses not based on a restrict
7861 pointer. That way they get disambiguated against restrict
7862 accesses but not against each other. */
7863 /* ??? For restricts derived from globals (thus not incoming
7864 parameters) we can't restrict scoping properly thus the following
7865 is too aggressive there. For now we have excluded those globals from
7866 getting into the MR_DEPENDENCE machinery. */
7867 vls_data data = { clique, escaped_p, rvars };
7868 basic_block bb;
7869 FOR_EACH_BB_FN (bb, cfun)
7870 for (gimple_stmt_iterator gsi = gsi_start_bb (bb);
7871 !gsi_end_p (gsi); gsi_next (&gsi))
7872 {
7873 gimple *stmt = gsi_stmt (gsi);
7874 walk_stmt_load_store_ops (stmt, &data,
7875 visit_loadstore, visit_loadstore);
7876 }
7877 }
7878
7879 BITMAP_FREE (rvars);
7880 }
7881
7882 /* Compute points-to information for every SSA_NAME pointer in the
7883 current function and compute the transitive closure of escaped
7884 variables to re-initialize the call-clobber states of local variables. */
7885
7886 unsigned int
compute_may_aliases(void)7887 compute_may_aliases (void)
7888 {
7889 if (cfun->gimple_df->ipa_pta)
7890 {
7891 if (dump_file)
7892 {
7893 fprintf (dump_file, "\nNot re-computing points-to information "
7894 "because IPA points-to information is available.\n\n");
7895
7896 /* But still dump what we have remaining it. */
7897 dump_alias_info (dump_file);
7898 }
7899
7900 return 0;
7901 }
7902
7903 /* For each pointer P_i, determine the sets of variables that P_i may
7904 point-to. Compute the reachability set of escaped and call-used
7905 variables. */
7906 compute_points_to_sets ();
7907
7908 /* Debugging dumps. */
7909 if (dump_file)
7910 dump_alias_info (dump_file);
7911
7912 /* Compute restrict-based memory disambiguations. */
7913 compute_dependence_clique ();
7914
7915 /* Deallocate memory used by aliasing data structures and the internal
7916 points-to solution. */
7917 delete_points_to_sets ();
7918
7919 gcc_assert (!need_ssa_update_p (cfun));
7920
7921 return 0;
7922 }
7923
7924 /* A dummy pass to cause points-to information to be computed via
7925 TODO_rebuild_alias. */
7926
7927 namespace {
7928
7929 const pass_data pass_data_build_alias =
7930 {
7931 GIMPLE_PASS, /* type */
7932 "alias", /* name */
7933 OPTGROUP_NONE, /* optinfo_flags */
7934 TV_NONE, /* tv_id */
7935 ( PROP_cfg | PROP_ssa ), /* properties_required */
7936 0, /* properties_provided */
7937 0, /* properties_destroyed */
7938 0, /* todo_flags_start */
7939 TODO_rebuild_alias, /* todo_flags_finish */
7940 };
7941
7942 class pass_build_alias : public gimple_opt_pass
7943 {
7944 public:
pass_build_alias(gcc::context * ctxt)7945 pass_build_alias (gcc::context *ctxt)
7946 : gimple_opt_pass (pass_data_build_alias, ctxt)
7947 {}
7948
7949 /* opt_pass methods: */
gate(function *)7950 virtual bool gate (function *) { return flag_tree_pta; }
7951
7952 }; // class pass_build_alias
7953
7954 } // anon namespace
7955
7956 gimple_opt_pass *
make_pass_build_alias(gcc::context * ctxt)7957 make_pass_build_alias (gcc::context *ctxt)
7958 {
7959 return new pass_build_alias (ctxt);
7960 }
7961
7962 /* A dummy pass to cause points-to information to be computed via
7963 TODO_rebuild_alias. */
7964
7965 namespace {
7966
7967 const pass_data pass_data_build_ealias =
7968 {
7969 GIMPLE_PASS, /* type */
7970 "ealias", /* name */
7971 OPTGROUP_NONE, /* optinfo_flags */
7972 TV_NONE, /* tv_id */
7973 ( PROP_cfg | PROP_ssa ), /* properties_required */
7974 0, /* properties_provided */
7975 0, /* properties_destroyed */
7976 0, /* todo_flags_start */
7977 TODO_rebuild_alias, /* todo_flags_finish */
7978 };
7979
7980 class pass_build_ealias : public gimple_opt_pass
7981 {
7982 public:
pass_build_ealias(gcc::context * ctxt)7983 pass_build_ealias (gcc::context *ctxt)
7984 : gimple_opt_pass (pass_data_build_ealias, ctxt)
7985 {}
7986
7987 /* opt_pass methods: */
gate(function *)7988 virtual bool gate (function *) { return flag_tree_pta; }
7989
7990 }; // class pass_build_ealias
7991
7992 } // anon namespace
7993
7994 gimple_opt_pass *
make_pass_build_ealias(gcc::context * ctxt)7995 make_pass_build_ealias (gcc::context *ctxt)
7996 {
7997 return new pass_build_ealias (ctxt);
7998 }
7999
8000
8001 /* IPA PTA solutions for ESCAPED. */
8002 struct pt_solution ipa_escaped_pt
8003 = { true, false, false, false, false,
8004 false, false, false, false, false, NULL };
8005
8006 /* Associate node with varinfo DATA. Worker for
8007 cgraph_for_symbol_thunks_and_aliases. */
8008 static bool
associate_varinfo_to_alias(struct cgraph_node * node,void * data)8009 associate_varinfo_to_alias (struct cgraph_node *node, void *data)
8010 {
8011 if ((node->alias
8012 || (node->thunk
8013 && ! node->inlined_to))
8014 && node->analyzed
8015 && !node->ifunc_resolver)
8016 insert_vi_for_tree (node->decl, (varinfo_t)data);
8017 return false;
8018 }
8019
8020 /* Dump varinfo VI to FILE. */
8021
8022 static void
dump_varinfo(FILE * file,varinfo_t vi)8023 dump_varinfo (FILE *file, varinfo_t vi)
8024 {
8025 if (vi == NULL)
8026 return;
8027
8028 fprintf (file, "%u: %s\n", vi->id, vi->name);
8029
8030 const char *sep = " ";
8031 if (vi->is_artificial_var)
8032 fprintf (file, "%sartificial", sep);
8033 if (vi->is_special_var)
8034 fprintf (file, "%sspecial", sep);
8035 if (vi->is_unknown_size_var)
8036 fprintf (file, "%sunknown-size", sep);
8037 if (vi->is_full_var)
8038 fprintf (file, "%sfull", sep);
8039 if (vi->is_heap_var)
8040 fprintf (file, "%sheap", sep);
8041 if (vi->may_have_pointers)
8042 fprintf (file, "%smay-have-pointers", sep);
8043 if (vi->only_restrict_pointers)
8044 fprintf (file, "%sonly-restrict-pointers", sep);
8045 if (vi->is_restrict_var)
8046 fprintf (file, "%sis-restrict-var", sep);
8047 if (vi->is_global_var)
8048 fprintf (file, "%sglobal", sep);
8049 if (vi->is_ipa_escape_point)
8050 fprintf (file, "%sipa-escape-point", sep);
8051 if (vi->is_fn_info)
8052 fprintf (file, "%sfn-info", sep);
8053 if (vi->ruid)
8054 fprintf (file, "%srestrict-uid:%u", sep, vi->ruid);
8055 if (vi->next)
8056 fprintf (file, "%snext:%u", sep, vi->next);
8057 if (vi->head != vi->id)
8058 fprintf (file, "%shead:%u", sep, vi->head);
8059 if (vi->offset)
8060 fprintf (file, "%soffset:" HOST_WIDE_INT_PRINT_DEC, sep, vi->offset);
8061 if (vi->size != ~(unsigned HOST_WIDE_INT)0)
8062 fprintf (file, "%ssize:" HOST_WIDE_INT_PRINT_DEC, sep, vi->size);
8063 if (vi->fullsize != ~(unsigned HOST_WIDE_INT)0
8064 && vi->fullsize != vi->size)
8065 fprintf (file, "%sfullsize:" HOST_WIDE_INT_PRINT_DEC, sep,
8066 vi->fullsize);
8067 fprintf (file, "\n");
8068
8069 if (vi->solution && !bitmap_empty_p (vi->solution))
8070 {
8071 bitmap_iterator bi;
8072 unsigned i;
8073 fprintf (file, " solution: {");
8074 EXECUTE_IF_SET_IN_BITMAP (vi->solution, 0, i, bi)
8075 fprintf (file, " %u", i);
8076 fprintf (file, " }\n");
8077 }
8078
8079 if (vi->oldsolution && !bitmap_empty_p (vi->oldsolution)
8080 && !bitmap_equal_p (vi->solution, vi->oldsolution))
8081 {
8082 bitmap_iterator bi;
8083 unsigned i;
8084 fprintf (file, " oldsolution: {");
8085 EXECUTE_IF_SET_IN_BITMAP (vi->oldsolution, 0, i, bi)
8086 fprintf (file, " %u", i);
8087 fprintf (file, " }\n");
8088 }
8089 }
8090
8091 /* Dump varinfo VI to stderr. */
8092
8093 DEBUG_FUNCTION void
debug_varinfo(varinfo_t vi)8094 debug_varinfo (varinfo_t vi)
8095 {
8096 dump_varinfo (stderr, vi);
8097 }
8098
8099 /* Dump varmap to FILE. */
8100
8101 static void
dump_varmap(FILE * file)8102 dump_varmap (FILE *file)
8103 {
8104 if (varmap.length () == 0)
8105 return;
8106
8107 fprintf (file, "variables:\n");
8108
8109 for (unsigned int i = 0; i < varmap.length (); ++i)
8110 {
8111 varinfo_t vi = get_varinfo (i);
8112 dump_varinfo (file, vi);
8113 }
8114
8115 fprintf (file, "\n");
8116 }
8117
8118 /* Dump varmap to stderr. */
8119
8120 DEBUG_FUNCTION void
debug_varmap(void)8121 debug_varmap (void)
8122 {
8123 dump_varmap (stderr);
8124 }
8125
8126 /* Compute whether node is refered to non-locally. Worker for
8127 cgraph_for_symbol_thunks_and_aliases. */
8128 static bool
refered_from_nonlocal_fn(struct cgraph_node * node,void * data)8129 refered_from_nonlocal_fn (struct cgraph_node *node, void *data)
8130 {
8131 bool *nonlocal_p = (bool *)data;
8132 *nonlocal_p |= (node->used_from_other_partition
8133 || DECL_EXTERNAL (node->decl)
8134 || TREE_PUBLIC (node->decl)
8135 || node->force_output
8136 || lookup_attribute ("noipa", DECL_ATTRIBUTES (node->decl)));
8137 return false;
8138 }
8139
8140 /* Same for varpool nodes. */
8141 static bool
refered_from_nonlocal_var(struct varpool_node * node,void * data)8142 refered_from_nonlocal_var (struct varpool_node *node, void *data)
8143 {
8144 bool *nonlocal_p = (bool *)data;
8145 *nonlocal_p |= (node->used_from_other_partition
8146 || DECL_EXTERNAL (node->decl)
8147 || TREE_PUBLIC (node->decl)
8148 || node->force_output);
8149 return false;
8150 }
8151
8152 /* Execute the driver for IPA PTA. */
8153 static unsigned int
ipa_pta_execute(void)8154 ipa_pta_execute (void)
8155 {
8156 struct cgraph_node *node;
8157 varpool_node *var;
8158 unsigned int from = 0;
8159
8160 in_ipa_mode = 1;
8161
8162 init_alias_vars ();
8163
8164 if (dump_file && (dump_flags & TDF_DETAILS))
8165 {
8166 symtab->dump (dump_file);
8167 fprintf (dump_file, "\n");
8168 }
8169
8170 if (dump_file)
8171 {
8172 fprintf (dump_file, "Generating generic constraints\n\n");
8173 dump_constraints (dump_file, from);
8174 fprintf (dump_file, "\n");
8175 from = constraints.length ();
8176 }
8177
8178 /* Build the constraints. */
8179 FOR_EACH_DEFINED_FUNCTION (node)
8180 {
8181 varinfo_t vi;
8182 /* Nodes without a body are not interesting. Especially do not
8183 visit clones at this point for now - we get duplicate decls
8184 there for inline clones at least. */
8185 if (!node->has_gimple_body_p () || node->inlined_to)
8186 continue;
8187 node->get_body ();
8188
8189 gcc_assert (!node->clone_of);
8190
8191 /* For externally visible or attribute used annotated functions use
8192 local constraints for their arguments.
8193 For local functions we see all callers and thus do not need initial
8194 constraints for parameters. */
8195 bool nonlocal_p = (node->used_from_other_partition
8196 || DECL_EXTERNAL (node->decl)
8197 || TREE_PUBLIC (node->decl)
8198 || node->force_output
8199 || lookup_attribute ("noipa",
8200 DECL_ATTRIBUTES (node->decl)));
8201 node->call_for_symbol_thunks_and_aliases (refered_from_nonlocal_fn,
8202 &nonlocal_p, true);
8203
8204 vi = create_function_info_for (node->decl,
8205 alias_get_name (node->decl), false,
8206 nonlocal_p);
8207 if (dump_file
8208 && from != constraints.length ())
8209 {
8210 fprintf (dump_file,
8211 "Generating initial constraints for %s",
8212 node->dump_name ());
8213 if (DECL_ASSEMBLER_NAME_SET_P (node->decl))
8214 fprintf (dump_file, " (%s)",
8215 IDENTIFIER_POINTER
8216 (DECL_ASSEMBLER_NAME (node->decl)));
8217 fprintf (dump_file, "\n\n");
8218 dump_constraints (dump_file, from);
8219 fprintf (dump_file, "\n");
8220
8221 from = constraints.length ();
8222 }
8223
8224 node->call_for_symbol_thunks_and_aliases
8225 (associate_varinfo_to_alias, vi, true);
8226 }
8227
8228 /* Create constraints for global variables and their initializers. */
8229 FOR_EACH_VARIABLE (var)
8230 {
8231 if (var->alias && var->analyzed)
8232 continue;
8233
8234 varinfo_t vi = get_vi_for_tree (var->decl);
8235
8236 /* For the purpose of IPA PTA unit-local globals are not
8237 escape points. */
8238 bool nonlocal_p = (DECL_EXTERNAL (var->decl)
8239 || TREE_PUBLIC (var->decl)
8240 || var->used_from_other_partition
8241 || var->force_output);
8242 var->call_for_symbol_and_aliases (refered_from_nonlocal_var,
8243 &nonlocal_p, true);
8244 if (nonlocal_p)
8245 vi->is_ipa_escape_point = true;
8246 }
8247
8248 if (dump_file
8249 && from != constraints.length ())
8250 {
8251 fprintf (dump_file,
8252 "Generating constraints for global initializers\n\n");
8253 dump_constraints (dump_file, from);
8254 fprintf (dump_file, "\n");
8255 from = constraints.length ();
8256 }
8257
8258 FOR_EACH_DEFINED_FUNCTION (node)
8259 {
8260 struct function *func;
8261 basic_block bb;
8262
8263 /* Nodes without a body are not interesting. */
8264 if (!node->has_gimple_body_p () || node->clone_of)
8265 continue;
8266
8267 if (dump_file)
8268 {
8269 fprintf (dump_file,
8270 "Generating constraints for %s", node->dump_name ());
8271 if (DECL_ASSEMBLER_NAME_SET_P (node->decl))
8272 fprintf (dump_file, " (%s)",
8273 IDENTIFIER_POINTER
8274 (DECL_ASSEMBLER_NAME (node->decl)));
8275 fprintf (dump_file, "\n");
8276 }
8277
8278 func = DECL_STRUCT_FUNCTION (node->decl);
8279 gcc_assert (cfun == NULL);
8280
8281 /* Build constriants for the function body. */
8282 FOR_EACH_BB_FN (bb, func)
8283 {
8284 for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
8285 gsi_next (&gsi))
8286 {
8287 gphi *phi = gsi.phi ();
8288
8289 if (! virtual_operand_p (gimple_phi_result (phi)))
8290 find_func_aliases (func, phi);
8291 }
8292
8293 for (gimple_stmt_iterator gsi = gsi_start_bb (bb); !gsi_end_p (gsi);
8294 gsi_next (&gsi))
8295 {
8296 gimple *stmt = gsi_stmt (gsi);
8297
8298 find_func_aliases (func, stmt);
8299 find_func_clobbers (func, stmt);
8300 }
8301 }
8302
8303 if (dump_file)
8304 {
8305 fprintf (dump_file, "\n");
8306 dump_constraints (dump_file, from);
8307 fprintf (dump_file, "\n");
8308 from = constraints.length ();
8309 }
8310 }
8311
8312 /* From the constraints compute the points-to sets. */
8313 solve_constraints ();
8314
8315 if (dump_file)
8316 dump_sa_points_to_info (dump_file);
8317
8318 /* Now post-process solutions to handle locals from different
8319 runtime instantiations coming in through recursive invocations. */
8320 unsigned shadow_var_cnt = 0;
8321 for (unsigned i = 1; i < varmap.length (); ++i)
8322 {
8323 varinfo_t fi = get_varinfo (i);
8324 if (fi->is_fn_info
8325 && fi->decl)
8326 /* Automatic variables pointed to by their containing functions
8327 parameters need this treatment. */
8328 for (varinfo_t ai = first_vi_for_offset (fi, fi_parm_base);
8329 ai; ai = vi_next (ai))
8330 {
8331 varinfo_t vi = get_varinfo (find (ai->id));
8332 bitmap_iterator bi;
8333 unsigned j;
8334 EXECUTE_IF_SET_IN_BITMAP (vi->solution, 0, j, bi)
8335 {
8336 varinfo_t pt = get_varinfo (j);
8337 if (pt->shadow_var_uid == 0
8338 && pt->decl
8339 && auto_var_in_fn_p (pt->decl, fi->decl))
8340 {
8341 pt->shadow_var_uid = allocate_decl_uid ();
8342 shadow_var_cnt++;
8343 }
8344 }
8345 }
8346 /* As well as global variables which are another way of passing
8347 arguments to recursive invocations. */
8348 else if (fi->is_global_var)
8349 {
8350 for (varinfo_t ai = fi; ai; ai = vi_next (ai))
8351 {
8352 varinfo_t vi = get_varinfo (find (ai->id));
8353 bitmap_iterator bi;
8354 unsigned j;
8355 EXECUTE_IF_SET_IN_BITMAP (vi->solution, 0, j, bi)
8356 {
8357 varinfo_t pt = get_varinfo (j);
8358 if (pt->shadow_var_uid == 0
8359 && pt->decl
8360 && auto_var_p (pt->decl))
8361 {
8362 pt->shadow_var_uid = allocate_decl_uid ();
8363 shadow_var_cnt++;
8364 }
8365 }
8366 }
8367 }
8368 }
8369 if (shadow_var_cnt && dump_file && (dump_flags & TDF_DETAILS))
8370 fprintf (dump_file, "Allocated %u shadow variables for locals "
8371 "maybe leaking into recursive invocations of their containing "
8372 "functions\n", shadow_var_cnt);
8373
8374 /* Compute the global points-to sets for ESCAPED.
8375 ??? Note that the computed escape set is not correct
8376 for the whole unit as we fail to consider graph edges to
8377 externally visible functions. */
8378 ipa_escaped_pt = find_what_var_points_to (NULL, get_varinfo (escaped_id));
8379
8380 /* Make sure the ESCAPED solution (which is used as placeholder in
8381 other solutions) does not reference itself. This simplifies
8382 points-to solution queries. */
8383 ipa_escaped_pt.ipa_escaped = 0;
8384
8385 /* Assign the points-to sets to the SSA names in the unit. */
8386 FOR_EACH_DEFINED_FUNCTION (node)
8387 {
8388 tree ptr;
8389 struct function *fn;
8390 unsigned i;
8391 basic_block bb;
8392
8393 /* Nodes without a body are not interesting. */
8394 if (!node->has_gimple_body_p () || node->clone_of)
8395 continue;
8396
8397 fn = DECL_STRUCT_FUNCTION (node->decl);
8398
8399 /* Compute the points-to sets for pointer SSA_NAMEs. */
8400 FOR_EACH_VEC_ELT (*fn->gimple_df->ssa_names, i, ptr)
8401 {
8402 if (ptr
8403 && POINTER_TYPE_P (TREE_TYPE (ptr)))
8404 find_what_p_points_to (node->decl, ptr);
8405 }
8406
8407 /* Compute the call-use and call-clobber sets for indirect calls
8408 and calls to external functions. */
8409 FOR_EACH_BB_FN (bb, fn)
8410 {
8411 gimple_stmt_iterator gsi;
8412
8413 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
8414 {
8415 gcall *stmt;
8416 struct pt_solution *pt;
8417 varinfo_t vi, fi;
8418 tree decl;
8419
8420 stmt = dyn_cast <gcall *> (gsi_stmt (gsi));
8421 if (!stmt)
8422 continue;
8423
8424 /* Handle direct calls to functions with body. */
8425 decl = gimple_call_fndecl (stmt);
8426
8427 {
8428 tree called_decl = NULL_TREE;
8429 if (gimple_call_builtin_p (stmt, BUILT_IN_GOMP_PARALLEL))
8430 called_decl = TREE_OPERAND (gimple_call_arg (stmt, 0), 0);
8431 else if (gimple_call_builtin_p (stmt, BUILT_IN_GOACC_PARALLEL))
8432 called_decl = TREE_OPERAND (gimple_call_arg (stmt, 1), 0);
8433
8434 if (called_decl != NULL_TREE
8435 && !fndecl_maybe_in_other_partition (called_decl))
8436 decl = called_decl;
8437 }
8438
8439 if (decl
8440 && (fi = lookup_vi_for_tree (decl))
8441 && fi->is_fn_info)
8442 {
8443 *gimple_call_clobber_set (stmt)
8444 = find_what_var_points_to
8445 (node->decl, first_vi_for_offset (fi, fi_clobbers));
8446 *gimple_call_use_set (stmt)
8447 = find_what_var_points_to
8448 (node->decl, first_vi_for_offset (fi, fi_uses));
8449 }
8450 /* Handle direct calls to external functions. */
8451 else if (decl && (!fi || fi->decl))
8452 {
8453 pt = gimple_call_use_set (stmt);
8454 if (gimple_call_flags (stmt) & ECF_CONST)
8455 memset (pt, 0, sizeof (struct pt_solution));
8456 else if ((vi = lookup_call_use_vi (stmt)) != NULL)
8457 {
8458 *pt = find_what_var_points_to (node->decl, vi);
8459 /* Escaped (and thus nonlocal) variables are always
8460 implicitly used by calls. */
8461 /* ??? ESCAPED can be empty even though NONLOCAL
8462 always escaped. */
8463 pt->nonlocal = 1;
8464 pt->ipa_escaped = 1;
8465 }
8466 else
8467 {
8468 /* If there is nothing special about this call then
8469 we have made everything that is used also escape. */
8470 *pt = ipa_escaped_pt;
8471 pt->nonlocal = 1;
8472 }
8473
8474 pt = gimple_call_clobber_set (stmt);
8475 if (gimple_call_flags (stmt) & (ECF_CONST|ECF_PURE|ECF_NOVOPS))
8476 memset (pt, 0, sizeof (struct pt_solution));
8477 else if ((vi = lookup_call_clobber_vi (stmt)) != NULL)
8478 {
8479 *pt = find_what_var_points_to (node->decl, vi);
8480 /* Escaped (and thus nonlocal) variables are always
8481 implicitly clobbered by calls. */
8482 /* ??? ESCAPED can be empty even though NONLOCAL
8483 always escaped. */
8484 pt->nonlocal = 1;
8485 pt->ipa_escaped = 1;
8486 }
8487 else
8488 {
8489 /* If there is nothing special about this call then
8490 we have made everything that is used also escape. */
8491 *pt = ipa_escaped_pt;
8492 pt->nonlocal = 1;
8493 }
8494 }
8495 /* Handle indirect calls. */
8496 else if ((fi = get_fi_for_callee (stmt)))
8497 {
8498 /* We need to accumulate all clobbers/uses of all possible
8499 callees. */
8500 fi = get_varinfo (find (fi->id));
8501 /* If we cannot constrain the set of functions we'll end up
8502 calling we end up using/clobbering everything. */
8503 if (bitmap_bit_p (fi->solution, anything_id)
8504 || bitmap_bit_p (fi->solution, nonlocal_id)
8505 || bitmap_bit_p (fi->solution, escaped_id))
8506 {
8507 pt_solution_reset (gimple_call_clobber_set (stmt));
8508 pt_solution_reset (gimple_call_use_set (stmt));
8509 }
8510 else
8511 {
8512 bitmap_iterator bi;
8513 unsigned i;
8514 struct pt_solution *uses, *clobbers;
8515
8516 uses = gimple_call_use_set (stmt);
8517 clobbers = gimple_call_clobber_set (stmt);
8518 memset (uses, 0, sizeof (struct pt_solution));
8519 memset (clobbers, 0, sizeof (struct pt_solution));
8520 EXECUTE_IF_SET_IN_BITMAP (fi->solution, 0, i, bi)
8521 {
8522 struct pt_solution sol;
8523
8524 vi = get_varinfo (i);
8525 if (!vi->is_fn_info)
8526 {
8527 /* ??? We could be more precise here? */
8528 uses->nonlocal = 1;
8529 uses->ipa_escaped = 1;
8530 clobbers->nonlocal = 1;
8531 clobbers->ipa_escaped = 1;
8532 continue;
8533 }
8534
8535 if (!uses->anything)
8536 {
8537 sol = find_what_var_points_to
8538 (node->decl,
8539 first_vi_for_offset (vi, fi_uses));
8540 pt_solution_ior_into (uses, &sol);
8541 }
8542 if (!clobbers->anything)
8543 {
8544 sol = find_what_var_points_to
8545 (node->decl,
8546 first_vi_for_offset (vi, fi_clobbers));
8547 pt_solution_ior_into (clobbers, &sol);
8548 }
8549 }
8550 }
8551 }
8552 else
8553 gcc_unreachable ();
8554 }
8555 }
8556
8557 fn->gimple_df->ipa_pta = true;
8558
8559 /* We have to re-set the final-solution cache after each function
8560 because what is a "global" is dependent on function context. */
8561 final_solutions->empty ();
8562 obstack_free (&final_solutions_obstack, NULL);
8563 gcc_obstack_init (&final_solutions_obstack);
8564 }
8565
8566 delete_points_to_sets ();
8567
8568 in_ipa_mode = 0;
8569
8570 return 0;
8571 }
8572
8573 namespace {
8574
8575 const pass_data pass_data_ipa_pta =
8576 {
8577 SIMPLE_IPA_PASS, /* type */
8578 "pta", /* name */
8579 OPTGROUP_NONE, /* optinfo_flags */
8580 TV_IPA_PTA, /* tv_id */
8581 0, /* properties_required */
8582 0, /* properties_provided */
8583 0, /* properties_destroyed */
8584 0, /* todo_flags_start */
8585 0, /* todo_flags_finish */
8586 };
8587
8588 class pass_ipa_pta : public simple_ipa_opt_pass
8589 {
8590 public:
pass_ipa_pta(gcc::context * ctxt)8591 pass_ipa_pta (gcc::context *ctxt)
8592 : simple_ipa_opt_pass (pass_data_ipa_pta, ctxt)
8593 {}
8594
8595 /* opt_pass methods: */
gate(function *)8596 virtual bool gate (function *)
8597 {
8598 return (optimize
8599 && flag_ipa_pta
8600 /* Don't bother doing anything if the program has errors. */
8601 && !seen_error ());
8602 }
8603
clone()8604 opt_pass * clone () { return new pass_ipa_pta (m_ctxt); }
8605
execute(function *)8606 virtual unsigned int execute (function *) { return ipa_pta_execute (); }
8607
8608 }; // class pass_ipa_pta
8609
8610 } // anon namespace
8611
8612 simple_ipa_opt_pass *
make_pass_ipa_pta(gcc::context * ctxt)8613 make_pass_ipa_pta (gcc::context *ctxt)
8614 {
8615 return new pass_ipa_pta (ctxt);
8616 }
8617