1 /* Tree based points-to analysis
2 Copyright (C) 2005, 2006 Free Software Foundation, Inc.
3 Contributed by Daniel Berlin <dberlin@dberlin.org>
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify
8 under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2 of the License, or
10 (at your option) any later version.
11
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; if not, write to the Free Software
19 Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
20 */
21
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "ggc.h"
27 #include "obstack.h"
28 #include "bitmap.h"
29 #include "flags.h"
30 #include "rtl.h"
31 #include "tm_p.h"
32 #include "hard-reg-set.h"
33 #include "basic-block.h"
34 #include "output.h"
35 #include "errors.h"
36 #include "diagnostic.h"
37 #include "tree.h"
38 #include "c-common.h"
39 #include "tree-flow.h"
40 #include "tree-inline.h"
41 #include "varray.h"
42 #include "c-tree.h"
43 #include "tree-gimple.h"
44 #include "hashtab.h"
45 #include "function.h"
46 #include "cgraph.h"
47 #include "tree-pass.h"
48 #include "timevar.h"
49 #include "alloc-pool.h"
50 #include "splay-tree.h"
51 #include "params.h"
52 #include "tree-ssa-structalias.h"
53 #include "cgraph.h"
54 #include "pointer-set.h"
55
56 /* The idea behind this analyzer is to generate set constraints from the
57 program, then solve the resulting constraints in order to generate the
58 points-to sets.
59
60 Set constraints are a way of modeling program analysis problems that
61 involve sets. They consist of an inclusion constraint language,
62 describing the variables (each variable is a set) and operations that
63 are involved on the variables, and a set of rules that derive facts
64 from these operations. To solve a system of set constraints, you derive
65 all possible facts under the rules, which gives you the correct sets
66 as a consequence.
67
68 See "Efficient Field-sensitive pointer analysis for C" by "David
69 J. Pearce and Paul H. J. Kelly and Chris Hankin, at
70 http://citeseer.ist.psu.edu/pearce04efficient.html
71
72 Also see "Ultra-fast Aliasing Analysis using CLA: A Million Lines
73 of C Code in a Second" by ""Nevin Heintze and Olivier Tardieu" at
74 http://citeseer.ist.psu.edu/heintze01ultrafast.html
75
76 There are three types of real constraint expressions, DEREF,
77 ADDRESSOF, and SCALAR. Each constraint expression consists
78 of a constraint type, a variable, and an offset.
79
80 SCALAR is a constraint expression type used to represent x, whether
81 it appears on the LHS or the RHS of a statement.
82 DEREF is a constraint expression type used to represent *x, whether
83 it appears on the LHS or the RHS of a statement.
84 ADDRESSOF is a constraint expression used to represent &x, whether
85 it appears on the LHS or the RHS of a statement.
86
87 Each pointer variable in the program is assigned an integer id, and
88 each field of a structure variable is assigned an integer id as well.
89
90 Structure variables are linked to their list of fields through a "next
91 field" in each variable that points to the next field in offset
92 order.
93 Each variable for a structure field has
94
95 1. "size", that tells the size in bits of that field.
96 2. "fullsize, that tells the size in bits of the entire structure.
97 3. "offset", that tells the offset in bits from the beginning of the
98 structure to this field.
99
100 Thus,
101 struct f
102 {
103 int a;
104 int b;
105 } foo;
106 int *bar;
107
108 looks like
109
110 foo.a -> id 1, size 32, offset 0, fullsize 64, next foo.b
111 foo.b -> id 2, size 32, offset 32, fullsize 64, next NULL
112 bar -> id 3, size 32, offset 0, fullsize 32, next NULL
113
114
115 In order to solve the system of set constraints, the following is
116 done:
117
118 1. Each constraint variable x has a solution set associated with it,
119 Sol(x).
120
121 2. Constraints are separated into direct, copy, and complex.
122 Direct constraints are ADDRESSOF constraints that require no extra
123 processing, such as P = &Q
124 Copy constraints are those of the form P = Q.
125 Complex constraints are all the constraints involving dereferences
126 and offsets (including offsetted copies).
127
128 3. All direct constraints of the form P = &Q are processed, such
129 that Q is added to Sol(P)
130
131 4. All complex constraints for a given constraint variable are stored in a
132 linked list attached to that variable's node.
133
134 5. A directed graph is built out of the copy constraints. Each
135 constraint variable is a node in the graph, and an edge from
136 Q to P is added for each copy constraint of the form P = Q
137
138 6. The graph is then walked, and solution sets are
139 propagated along the copy edges, such that an edge from Q to P
140 causes Sol(P) <- Sol(P) union Sol(Q).
141
142 7. As we visit each node, all complex constraints associated with
143 that node are processed by adding appropriate copy edges to the graph, or the
144 appropriate variables to the solution set.
145
146 8. The process of walking the graph is iterated until no solution
147 sets change.
148
149 Prior to walking the graph in steps 6 and 7, We perform static
150 cycle elimination on the constraint graph, as well
151 as off-line variable substitution.
152
153 TODO: Adding offsets to pointer-to-structures can be handled (IE not punted
154 on and turned into anything), but isn't. You can just see what offset
155 inside the pointed-to struct it's going to access.
156
157 TODO: Constant bounded arrays can be handled as if they were structs of the
158 same number of elements.
159
160 TODO: Modeling heap and incoming pointers becomes much better if we
161 add fields to them as we discover them, which we could do.
162
163 TODO: We could handle unions, but to be honest, it's probably not
164 worth the pain or slowdown. */
165
166 static GTY ((if_marked ("tree_map_marked_p"), param_is (struct tree_map))) htab_t heapvar_for_stmt;
167
168 /* One variable to represent all non-local accesses. */
169 tree nonlocal_all;
170
171 static bool use_field_sensitive = true;
172 static int in_ipa_mode = 0;
173
174 /* Used for predecessor bitmaps. */
175 static bitmap_obstack predbitmap_obstack;
176
177 /* Used for points-to sets. */
178 static bitmap_obstack pta_obstack;
179
180 /* Used for oldsolution members of variables. */
181 static bitmap_obstack oldpta_obstack;
182
183 /* Used for per-solver-iteration bitmaps. */
184 static bitmap_obstack iteration_obstack;
185
186 static unsigned int create_variable_info_for (tree, const char *);
187 typedef struct constraint_graph *constraint_graph_t;
188 static void unify_nodes (constraint_graph_t, unsigned int, unsigned int, bool);
189
190 DEF_VEC_P(constraint_t);
191 DEF_VEC_ALLOC_P(constraint_t,heap);
192
193 #define EXECUTE_IF_IN_NONNULL_BITMAP(a, b, c, d) \
194 if (a) \
195 EXECUTE_IF_SET_IN_BITMAP (a, b, c, d)
196
197 static struct constraint_stats
198 {
199 unsigned int total_vars;
200 unsigned int nonpointer_vars;
201 unsigned int unified_vars_static;
202 unsigned int unified_vars_dynamic;
203 unsigned int iterations;
204 unsigned int num_edges;
205 unsigned int num_implicit_edges;
206 unsigned int points_to_sets_created;
207 } stats;
208
209 struct variable_info
210 {
211 /* ID of this variable */
212 unsigned int id;
213
214 /* Name of this variable */
215 const char *name;
216
217 /* Tree that this variable is associated with. */
218 tree decl;
219
220 /* Offset of this variable, in bits, from the base variable */
221 unsigned HOST_WIDE_INT offset;
222
223 /* Size of the variable, in bits. */
224 unsigned HOST_WIDE_INT size;
225
226 /* Full size of the base variable, in bits. */
227 unsigned HOST_WIDE_INT fullsize;
228
229 /* A link to the variable for the next field in this structure. */
230 struct variable_info *next;
231
232 /* True if the variable is directly the target of a dereference.
233 This is used to track which variables are *actually* dereferenced
234 so we can prune their points to listed. */
235 unsigned int directly_dereferenced:1;
236
237 /* True if this is a variable created by the constraint analysis, such as
238 heap variables and constraints we had to break up. */
239 unsigned int is_artificial_var:1;
240
241 /* True if this is a special variable whose solution set should not be
242 changed. */
243 unsigned int is_special_var:1;
244
245 /* True for variables whose size is not known or variable. */
246 unsigned int is_unknown_size_var:1;
247
248 /* True for variables that have unions somewhere in them. */
249 unsigned int has_union:1;
250
251 /* True if this is a heap variable. */
252 unsigned int is_heap_var:1;
253
254 /* Points-to set for this variable. */
255 bitmap solution;
256
257 /* Old points-to set for this variable. */
258 bitmap oldsolution;
259
260 /* Variable ids represented by this node. */
261 bitmap variables;
262
263 /* Variable id this was collapsed to due to type unsafety. This
264 should be unused completely after build_succ_graph, or something
265 is broken. */
266 struct variable_info *collapsed_to;
267 };
268 typedef struct variable_info *varinfo_t;
269
270 static varinfo_t first_vi_for_offset (varinfo_t, unsigned HOST_WIDE_INT);
271
272 /* Pool of variable info structures. */
273 static alloc_pool variable_info_pool;
274
275 DEF_VEC_P(varinfo_t);
276
277 DEF_VEC_ALLOC_P(varinfo_t, heap);
278
279 /* Table of variable info structures for constraint variables.
280 Indexed directly by variable info id. */
VEC(varinfo_t,heap)281 static VEC(varinfo_t,heap) *varmap;
282
283 /* Return the varmap element N */
284
285 static inline varinfo_t
286 get_varinfo (unsigned int n)
287 {
288 return VEC_index (varinfo_t, varmap, n);
289 }
290
291 /* Return the varmap element N, following the collapsed_to link. */
292
293 static inline varinfo_t
get_varinfo_fc(unsigned int n)294 get_varinfo_fc (unsigned int n)
295 {
296 varinfo_t v = VEC_index (varinfo_t, varmap, n);
297
298 if (v->collapsed_to)
299 return v->collapsed_to;
300 return v;
301 }
302
303 /* Variable that represents the unknown pointer. */
304 static varinfo_t var_anything;
305 static tree anything_tree;
306 static unsigned int anything_id;
307
308 /* Variable that represents the NULL pointer. */
309 static varinfo_t var_nothing;
310 static tree nothing_tree;
311 static unsigned int nothing_id;
312
313 /* Variable that represents read only memory. */
314 static varinfo_t var_readonly;
315 static tree readonly_tree;
316 static unsigned int readonly_id;
317
318 /* Variable that represents integers. This is used for when people do things
319 like &0->a.b. */
320 static varinfo_t var_integer;
321 static tree integer_tree;
322 static unsigned int integer_id;
323
324 /* Variable that represents escaped variables. This is used to give
325 incoming pointer variables a better set than ANYTHING. */
326 static varinfo_t var_escaped_vars;
327 static tree escaped_vars_tree;
328 static unsigned int escaped_vars_id;
329
330 /* Variable that represents non-local variables before we expand it to
331 one for each type. */
332 static unsigned int nonlocal_vars_id;
333 /* Lookup a heap var for FROM, and return it if we find one. */
334
335 static tree
heapvar_lookup(tree from)336 heapvar_lookup (tree from)
337 {
338 struct tree_map *h, in;
339 in.from = from;
340
341 h = htab_find_with_hash (heapvar_for_stmt, &in, htab_hash_pointer (from));
342 if (h)
343 return h->to;
344 return NULL_TREE;
345 }
346
347 /* Insert a mapping FROM->TO in the heap var for statement
348 hashtable. */
349
350 static void
heapvar_insert(tree from,tree to)351 heapvar_insert (tree from, tree to)
352 {
353 struct tree_map *h;
354 void **loc;
355
356 h = ggc_alloc (sizeof (struct tree_map));
357 h->hash = htab_hash_pointer (from);
358 h->from = from;
359 h->to = to;
360 loc = htab_find_slot_with_hash (heapvar_for_stmt, h, h->hash, INSERT);
361 *(struct tree_map **) loc = h;
362 }
363
364 /* Return a new variable info structure consisting for a variable
365 named NAME, and using constraint graph node NODE. */
366
367 static varinfo_t
new_var_info(tree t,unsigned int id,const char * name)368 new_var_info (tree t, unsigned int id, const char *name)
369 {
370 varinfo_t ret = pool_alloc (variable_info_pool);
371
372 ret->id = id;
373 ret->name = name;
374 ret->decl = t;
375 ret->directly_dereferenced = false;
376 ret->is_artificial_var = false;
377 ret->is_heap_var = false;
378 ret->is_special_var = false;
379 ret->is_unknown_size_var = false;
380 ret->has_union = false;
381 ret->solution = BITMAP_ALLOC (&pta_obstack);
382 ret->oldsolution = BITMAP_ALLOC (&oldpta_obstack);
383 ret->next = NULL;
384 ret->collapsed_to = NULL;
385 return ret;
386 }
387
388 typedef enum {SCALAR, DEREF, ADDRESSOF} constraint_expr_type;
389
390 /* An expression that appears in a constraint. */
391
392 struct constraint_expr
393 {
394 /* Constraint type. */
395 constraint_expr_type type;
396
397 /* Variable we are referring to in the constraint. */
398 unsigned int var;
399
400 /* Offset, in bits, of this constraint from the beginning of
401 variables it ends up referring to.
402
403 IOW, in a deref constraint, we would deref, get the result set,
404 then add OFFSET to each member. */
405 unsigned HOST_WIDE_INT offset;
406 };
407
408 typedef struct constraint_expr ce_s;
409 DEF_VEC_O(ce_s);
410 DEF_VEC_ALLOC_O(ce_s, heap);
411 static void get_constraint_for (tree, VEC(ce_s, heap) **);
412 static void do_deref (VEC (ce_s, heap) **);
413
414 /* Our set constraints are made up of two constraint expressions, one
415 LHS, and one RHS.
416
417 As described in the introduction, our set constraints each represent an
418 operation between set valued variables.
419 */
420 struct constraint
421 {
422 struct constraint_expr lhs;
423 struct constraint_expr rhs;
424 };
425
426 /* List of constraints that we use to build the constraint graph from. */
427
428 static VEC(constraint_t,heap) *constraints;
429 static alloc_pool constraint_pool;
430
431
432 DEF_VEC_I(int);
433 DEF_VEC_ALLOC_I(int, heap);
434
435 /* The constraint graph is represented as an array of bitmaps
436 containing successor nodes. */
437
438 struct constraint_graph
439 {
440 /* Size of this graph, which may be different than the number of
441 nodes in the variable map. */
442 unsigned int size;
443
444 /* Explicit successors of each node. */
445 bitmap *succs;
446
447 /* Implicit predecessors of each node (Used for variable
448 substitution). */
449 bitmap *implicit_preds;
450
451 /* Explicit predecessors of each node (Used for variable substitution). */
452 bitmap *preds;
453
454 /* Indirect cycle representatives, or -1 if the node has no indirect
455 cycles. */
456 int *indirect_cycles;
457
458 /* Representative node for a node. rep[a] == a unless the node has
459 been unified. */
460 unsigned int *rep;
461
462 /* Equivalence class representative for a node. This is used for
463 variable substitution. */
464 int *eq_rep;
465
466 /* Label for each node, used during variable substitution. */
467 unsigned int *label;
468
469 /* Bitmap of nodes where the bit is set if the node is a direct
470 node. Used for variable substitution. */
471 sbitmap direct_nodes;
472
473 /* Vector of complex constraints for each graph node. Complex
474 constraints are those involving dereferences or offsets that are
475 not 0. */
476 VEC(constraint_t,heap) **complex;
477 };
478
479 static constraint_graph_t graph;
480
481 /* During variable substitution and the offline version of indirect
482 cycle finding, we create nodes to represent dereferences and
483 address taken constraints. These represent where these start and
484 end. */
485 #define FIRST_REF_NODE (VEC_length (varinfo_t, varmap))
486 #define LAST_REF_NODE (FIRST_REF_NODE + (FIRST_REF_NODE - 1))
487 #define FIRST_ADDR_NODE (LAST_REF_NODE + 1)
488
489 /* Return the representative node for NODE, if NODE has been unioned
490 with another NODE.
491 This function performs path compression along the way to finding
492 the representative. */
493
494 static unsigned int
find(unsigned int node)495 find (unsigned int node)
496 {
497 gcc_assert (node < graph->size);
498 if (graph->rep[node] != node)
499 return graph->rep[node] = find (graph->rep[node]);
500 return node;
501 }
502
503 /* Union the TO and FROM nodes to the TO nodes.
504 Note that at some point in the future, we may want to do
505 union-by-rank, in which case we are going to have to return the
506 node we unified to. */
507
508 static bool
unite(unsigned int to,unsigned int from)509 unite (unsigned int to, unsigned int from)
510 {
511 gcc_assert (to < graph->size && from < graph->size);
512 if (to != from && graph->rep[from] != to)
513 {
514 graph->rep[from] = to;
515 return true;
516 }
517 return false;
518 }
519
520 /* Create a new constraint consisting of LHS and RHS expressions. */
521
522 static constraint_t
new_constraint(const struct constraint_expr lhs,const struct constraint_expr rhs)523 new_constraint (const struct constraint_expr lhs,
524 const struct constraint_expr rhs)
525 {
526 constraint_t ret = pool_alloc (constraint_pool);
527 ret->lhs = lhs;
528 ret->rhs = rhs;
529 return ret;
530 }
531
532 /* Print out constraint C to FILE. */
533
534 void
dump_constraint(FILE * file,constraint_t c)535 dump_constraint (FILE *file, constraint_t c)
536 {
537 if (c->lhs.type == ADDRESSOF)
538 fprintf (file, "&");
539 else if (c->lhs.type == DEREF)
540 fprintf (file, "*");
541 fprintf (file, "%s", get_varinfo_fc (c->lhs.var)->name);
542 if (c->lhs.offset != 0)
543 fprintf (file, " + " HOST_WIDE_INT_PRINT_DEC, c->lhs.offset);
544 fprintf (file, " = ");
545 if (c->rhs.type == ADDRESSOF)
546 fprintf (file, "&");
547 else if (c->rhs.type == DEREF)
548 fprintf (file, "*");
549 fprintf (file, "%s", get_varinfo_fc (c->rhs.var)->name);
550 if (c->rhs.offset != 0)
551 fprintf (file, " + " HOST_WIDE_INT_PRINT_DEC, c->rhs.offset);
552 fprintf (file, "\n");
553 }
554
555 /* Print out constraint C to stderr. */
556
557 void
debug_constraint(constraint_t c)558 debug_constraint (constraint_t c)
559 {
560 dump_constraint (stderr, c);
561 }
562
563 /* Print out all constraints to FILE */
564
565 void
dump_constraints(FILE * file)566 dump_constraints (FILE *file)
567 {
568 int i;
569 constraint_t c;
570 for (i = 0; VEC_iterate (constraint_t, constraints, i, c); i++)
571 dump_constraint (file, c);
572 }
573
574 /* Print out all constraints to stderr. */
575
576 void
debug_constraints(void)577 debug_constraints (void)
578 {
579 dump_constraints (stderr);
580 }
581
582 /* SOLVER FUNCTIONS
583
584 The solver is a simple worklist solver, that works on the following
585 algorithm:
586
587 sbitmap changed_nodes = all zeroes;
588 changed_count = 0;
589 For each node that is not already collapsed:
590 changed_count++;
591 set bit in changed nodes
592
593 while (changed_count > 0)
594 {
595 compute topological ordering for constraint graph
596
597 find and collapse cycles in the constraint graph (updating
598 changed if necessary)
599
600 for each node (n) in the graph in topological order:
601 changed_count--;
602
603 Process each complex constraint associated with the node,
604 updating changed if necessary.
605
606 For each outgoing edge from n, propagate the solution from n to
607 the destination of the edge, updating changed as necessary.
608
609 } */
610
611 /* Return true if two constraint expressions A and B are equal. */
612
613 static bool
constraint_expr_equal(struct constraint_expr a,struct constraint_expr b)614 constraint_expr_equal (struct constraint_expr a, struct constraint_expr b)
615 {
616 return a.type == b.type && a.var == b.var && a.offset == b.offset;
617 }
618
619 /* Return true if constraint expression A is less than constraint expression
620 B. This is just arbitrary, but consistent, in order to give them an
621 ordering. */
622
623 static bool
constraint_expr_less(struct constraint_expr a,struct constraint_expr b)624 constraint_expr_less (struct constraint_expr a, struct constraint_expr b)
625 {
626 if (a.type == b.type)
627 {
628 if (a.var == b.var)
629 return a.offset < b.offset;
630 else
631 return a.var < b.var;
632 }
633 else
634 return a.type < b.type;
635 }
636
637 /* Return true if constraint A is less than constraint B. This is just
638 arbitrary, but consistent, in order to give them an ordering. */
639
640 static bool
constraint_less(const constraint_t a,const constraint_t b)641 constraint_less (const constraint_t a, const constraint_t b)
642 {
643 if (constraint_expr_less (a->lhs, b->lhs))
644 return true;
645 else if (constraint_expr_less (b->lhs, a->lhs))
646 return false;
647 else
648 return constraint_expr_less (a->rhs, b->rhs);
649 }
650
651 /* Return true if two constraints A and B are equal. */
652
653 static bool
constraint_equal(struct constraint a,struct constraint b)654 constraint_equal (struct constraint a, struct constraint b)
655 {
656 return constraint_expr_equal (a.lhs, b.lhs)
657 && constraint_expr_equal (a.rhs, b.rhs);
658 }
659
660
661 /* Find a constraint LOOKFOR in the sorted constraint vector VEC */
662
663 static constraint_t
constraint_vec_find(VEC (constraint_t,heap)* vec,struct constraint lookfor)664 constraint_vec_find (VEC(constraint_t,heap) *vec,
665 struct constraint lookfor)
666 {
667 unsigned int place;
668 constraint_t found;
669
670 if (vec == NULL)
671 return NULL;
672
673 place = VEC_lower_bound (constraint_t, vec, &lookfor, constraint_less);
674 if (place >= VEC_length (constraint_t, vec))
675 return NULL;
676 found = VEC_index (constraint_t, vec, place);
677 if (!constraint_equal (*found, lookfor))
678 return NULL;
679 return found;
680 }
681
682 /* Union two constraint vectors, TO and FROM. Put the result in TO. */
683
684 static void
constraint_set_union(VEC (constraint_t,heap)** to,VEC (constraint_t,heap)** from)685 constraint_set_union (VEC(constraint_t,heap) **to,
686 VEC(constraint_t,heap) **from)
687 {
688 int i;
689 constraint_t c;
690
691 for (i = 0; VEC_iterate (constraint_t, *from, i, c); i++)
692 {
693 if (constraint_vec_find (*to, *c) == NULL)
694 {
695 unsigned int place = VEC_lower_bound (constraint_t, *to, c,
696 constraint_less);
697 VEC_safe_insert (constraint_t, heap, *to, place, c);
698 }
699 }
700 }
701
702 /* Take a solution set SET, add OFFSET to each member of the set, and
703 overwrite SET with the result when done. */
704
705 static void
solution_set_add(bitmap set,unsigned HOST_WIDE_INT offset)706 solution_set_add (bitmap set, unsigned HOST_WIDE_INT offset)
707 {
708 bitmap result = BITMAP_ALLOC (&iteration_obstack);
709 unsigned int i;
710 bitmap_iterator bi;
711 unsigned HOST_WIDE_INT min = -1, max = 0;
712
713 /* Compute set of vars we can reach from set + offset. */
714
715 EXECUTE_IF_SET_IN_BITMAP (set, 0, i, bi)
716 {
717 if (get_varinfo (i)->is_artificial_var
718 || get_varinfo (i)->has_union
719 || get_varinfo (i)->is_unknown_size_var)
720 continue;
721
722 if (get_varinfo (i)->offset + offset < min)
723 min = get_varinfo (i)->offset + offset;
724 if (get_varinfo (i)->offset + get_varinfo (i)->size + offset > max)
725 {
726 max = get_varinfo (i)->offset + get_varinfo (i)->size + offset;
727 if (max > get_varinfo (i)->fullsize)
728 max = get_varinfo (i)->fullsize;
729 }
730 }
731
732 EXECUTE_IF_SET_IN_BITMAP (set, 0, i, bi)
733 {
734 /* If this is a properly sized variable, only add offset if it's
735 less than end. Otherwise, it is globbed to a single
736 variable. */
737
738 if (get_varinfo (i)->offset + get_varinfo (i)->size - 1 >= min
739 && get_varinfo (i)->offset < max)
740 {
741 bitmap_set_bit (result, i);
742 }
743 else if (get_varinfo (i)->is_artificial_var
744 || get_varinfo (i)->has_union
745 || get_varinfo (i)->is_unknown_size_var)
746 {
747 bitmap_set_bit (result, i);
748 }
749 }
750
751 bitmap_copy (set, result);
752 BITMAP_FREE (result);
753 }
754
755 /* Union solution sets TO and FROM, and add INC to each member of FROM in the
756 process. */
757
758 static bool
set_union_with_increment(bitmap to,bitmap from,unsigned HOST_WIDE_INT inc)759 set_union_with_increment (bitmap to, bitmap from, unsigned HOST_WIDE_INT inc)
760 {
761 if (inc == 0)
762 return bitmap_ior_into (to, from);
763 else
764 {
765 bitmap tmp;
766 bool res;
767
768 tmp = BITMAP_ALLOC (&iteration_obstack);
769 bitmap_copy (tmp, from);
770 solution_set_add (tmp, inc);
771 res = bitmap_ior_into (to, tmp);
772 BITMAP_FREE (tmp);
773 return res;
774 }
775 }
776
777 /* Insert constraint C into the list of complex constraints for graph
778 node VAR. */
779
780 static void
insert_into_complex(constraint_graph_t graph,unsigned int var,constraint_t c)781 insert_into_complex (constraint_graph_t graph,
782 unsigned int var, constraint_t c)
783 {
784 VEC (constraint_t, heap) *complex = graph->complex[var];
785 unsigned int place = VEC_lower_bound (constraint_t, complex, c,
786 constraint_less);
787
788 /* Only insert constraints that do not already exist. */
789 if (place >= VEC_length (constraint_t, complex)
790 || !constraint_equal (*c, *VEC_index (constraint_t, complex, place)))
791 VEC_safe_insert (constraint_t, heap, graph->complex[var], place, c);
792 }
793
794
795 /* Condense two variable nodes into a single variable node, by moving
796 all associated info from SRC to TO. */
797
798 static void
merge_node_constraints(constraint_graph_t graph,unsigned int to,unsigned int from)799 merge_node_constraints (constraint_graph_t graph, unsigned int to,
800 unsigned int from)
801 {
802 unsigned int i;
803 constraint_t c;
804
805 gcc_assert (find (from) == to);
806
807 /* Move all complex constraints from src node into to node */
808 for (i = 0; VEC_iterate (constraint_t, graph->complex[from], i, c); i++)
809 {
810 /* In complex constraints for node src, we may have either
811 a = *src, and *src = a, or an offseted constraint which are
812 always added to the rhs node's constraints. */
813
814 if (c->rhs.type == DEREF)
815 c->rhs.var = to;
816 else if (c->lhs.type == DEREF)
817 c->lhs.var = to;
818 else
819 c->rhs.var = to;
820 }
821 constraint_set_union (&graph->complex[to], &graph->complex[from]);
822 VEC_free (constraint_t, heap, graph->complex[from]);
823 graph->complex[from] = NULL;
824 }
825
826
827 /* Remove edges involving NODE from GRAPH. */
828
829 static void
clear_edges_for_node(constraint_graph_t graph,unsigned int node)830 clear_edges_for_node (constraint_graph_t graph, unsigned int node)
831 {
832 if (graph->succs[node])
833 BITMAP_FREE (graph->succs[node]);
834 }
835
836 /* Merge GRAPH nodes FROM and TO into node TO. */
837
838 static void
merge_graph_nodes(constraint_graph_t graph,unsigned int to,unsigned int from)839 merge_graph_nodes (constraint_graph_t graph, unsigned int to,
840 unsigned int from)
841 {
842 if (graph->indirect_cycles[from] != -1)
843 {
844 /* If we have indirect cycles with the from node, and we have
845 none on the to node, the to node has indirect cycles from the
846 from node now that they are unified.
847 If indirect cycles exist on both, unify the nodes that they
848 are in a cycle with, since we know they are in a cycle with
849 each other. */
850 if (graph->indirect_cycles[to] == -1)
851 {
852 graph->indirect_cycles[to] = graph->indirect_cycles[from];
853 }
854 else
855 {
856 unsigned int tonode = find (graph->indirect_cycles[to]);
857 unsigned int fromnode = find (graph->indirect_cycles[from]);
858
859 if (unite (tonode, fromnode))
860 unify_nodes (graph, tonode, fromnode, true);
861 }
862 }
863
864 /* Merge all the successor edges. */
865 if (graph->succs[from])
866 {
867 if (!graph->succs[to])
868 graph->succs[to] = BITMAP_ALLOC (&pta_obstack);
869 bitmap_ior_into (graph->succs[to],
870 graph->succs[from]);
871 }
872
873 clear_edges_for_node (graph, from);
874 }
875
876
877 /* Add an indirect graph edge to GRAPH, going from TO to FROM if
878 it doesn't exist in the graph already. */
879
880 static void
add_implicit_graph_edge(constraint_graph_t graph,unsigned int to,unsigned int from)881 add_implicit_graph_edge (constraint_graph_t graph, unsigned int to,
882 unsigned int from)
883 {
884 if (to == from)
885 return;
886
887 if (!graph->implicit_preds[to])
888 graph->implicit_preds[to] = BITMAP_ALLOC (&predbitmap_obstack);
889
890 if (!bitmap_bit_p (graph->implicit_preds[to], from))
891 {
892 stats.num_implicit_edges++;
893 bitmap_set_bit (graph->implicit_preds[to], from);
894 }
895 }
896
897 /* Add a predecessor graph edge to GRAPH, going from TO to FROM if
898 it doesn't exist in the graph already.
899 Return false if the edge already existed, true otherwise. */
900
901 static void
add_pred_graph_edge(constraint_graph_t graph,unsigned int to,unsigned int from)902 add_pred_graph_edge (constraint_graph_t graph, unsigned int to,
903 unsigned int from)
904 {
905 if (!graph->preds[to])
906 graph->preds[to] = BITMAP_ALLOC (&predbitmap_obstack);
907 if (!bitmap_bit_p (graph->preds[to], from))
908 bitmap_set_bit (graph->preds[to], from);
909 }
910
911 /* Add a graph edge to GRAPH, going from FROM to TO if
912 it doesn't exist in the graph already.
913 Return false if the edge already existed, true otherwise. */
914
915 static bool
add_graph_edge(constraint_graph_t graph,unsigned int to,unsigned int from)916 add_graph_edge (constraint_graph_t graph, unsigned int to,
917 unsigned int from)
918 {
919 if (to == from)
920 {
921 return false;
922 }
923 else
924 {
925 bool r = false;
926
927 if (!graph->succs[from])
928 graph->succs[from] = BITMAP_ALLOC (&pta_obstack);
929 if (!bitmap_bit_p (graph->succs[from], to))
930 {
931 r = true;
932 if (to < FIRST_REF_NODE && from < FIRST_REF_NODE)
933 stats.num_edges++;
934 bitmap_set_bit (graph->succs[from], to);
935 }
936 return r;
937 }
938 }
939
940
941 /* Return true if {DEST.SRC} is an existing graph edge in GRAPH. */
942
943 static bool
valid_graph_edge(constraint_graph_t graph,unsigned int src,unsigned int dest)944 valid_graph_edge (constraint_graph_t graph, unsigned int src,
945 unsigned int dest)
946 {
947 return (graph->succs[dest]
948 && bitmap_bit_p (graph->succs[dest], src));
949 }
950
951 /* Build the constraint graph, adding only predecessor edges right now. */
952
953 static void
build_pred_graph(void)954 build_pred_graph (void)
955 {
956 int i;
957 constraint_t c;
958 unsigned int j;
959
960 graph = XNEW (struct constraint_graph);
961 graph->size = (VEC_length (varinfo_t, varmap)) * 3;
962 graph->succs = XCNEWVEC (bitmap, graph->size);
963 graph->implicit_preds = XCNEWVEC (bitmap, graph->size);
964 graph->preds = XCNEWVEC (bitmap, graph->size);
965 graph->indirect_cycles = XNEWVEC (int, VEC_length (varinfo_t, varmap));
966 graph->label = XCNEWVEC (unsigned int, graph->size);
967 graph->rep = XNEWVEC (unsigned int, graph->size);
968 graph->eq_rep = XNEWVEC (int, graph->size);
969 graph->complex = XCNEWVEC (VEC(constraint_t, heap) *,
970 VEC_length (varinfo_t, varmap));
971 graph->direct_nodes = sbitmap_alloc (graph->size);
972 sbitmap_zero (graph->direct_nodes);
973
974 for (j = 0; j < FIRST_REF_NODE; j++)
975 {
976 if (!get_varinfo (j)->is_special_var)
977 SET_BIT (graph->direct_nodes, j);
978 }
979
980 for (j = 0; j < graph->size; j++)
981 {
982 graph->rep[j] = j;
983 graph->eq_rep[j] = -1;
984 }
985
986 for (j = 0; j < VEC_length (varinfo_t, varmap); j++)
987 graph->indirect_cycles[j] = -1;
988
989 for (i = 0; VEC_iterate (constraint_t, constraints, i, c); i++)
990 {
991 struct constraint_expr lhs = c->lhs;
992 struct constraint_expr rhs = c->rhs;
993 unsigned int lhsvar = get_varinfo_fc (lhs.var)->id;
994 unsigned int rhsvar = get_varinfo_fc (rhs.var)->id;
995
996 if (lhs.type == DEREF)
997 {
998 /* *x = y. */
999 if (rhs.offset == 0 && lhs.offset == 0 && rhs.type == SCALAR)
1000 add_pred_graph_edge (graph, FIRST_REF_NODE + lhsvar, rhsvar);
1001 if (rhs.type == ADDRESSOF)
1002 RESET_BIT (graph->direct_nodes, rhsvar);
1003 }
1004 else if (rhs.type == DEREF)
1005 {
1006 /* x = *y */
1007 if (rhs.offset == 0 && lhs.offset == 0 && lhs.type == SCALAR)
1008 add_pred_graph_edge (graph, lhsvar, FIRST_REF_NODE + rhsvar);
1009 else
1010 RESET_BIT (graph->direct_nodes, lhsvar);
1011 }
1012 else if (rhs.type == ADDRESSOF)
1013 {
1014 /* x = &y */
1015 add_pred_graph_edge (graph, lhsvar, FIRST_ADDR_NODE + rhsvar);
1016 /* Implicitly, *x = y */
1017 add_implicit_graph_edge (graph, FIRST_REF_NODE + lhsvar, rhsvar);
1018
1019 RESET_BIT (graph->direct_nodes, rhsvar);
1020 }
1021 else if (lhsvar > anything_id
1022 && lhsvar != rhsvar && lhs.offset == 0 && rhs.offset == 0)
1023 {
1024 /* x = y */
1025 add_pred_graph_edge (graph, lhsvar, rhsvar);
1026 /* Implicitly, *x = *y */
1027 add_implicit_graph_edge (graph, FIRST_REF_NODE + lhsvar,
1028 FIRST_REF_NODE + rhsvar);
1029 }
1030 else if (lhs.offset != 0 || rhs.offset != 0)
1031 {
1032 if (rhs.offset != 0)
1033 RESET_BIT (graph->direct_nodes, lhs.var);
1034 if (lhs.offset != 0)
1035 RESET_BIT (graph->direct_nodes, rhs.var);
1036 }
1037 }
1038 }
1039
1040 /* Build the constraint graph, adding successor edges. */
1041
1042 static void
build_succ_graph(void)1043 build_succ_graph (void)
1044 {
1045 int i;
1046 constraint_t c;
1047
1048 for (i = 0; VEC_iterate (constraint_t, constraints, i, c); i++)
1049 {
1050 struct constraint_expr lhs;
1051 struct constraint_expr rhs;
1052 unsigned int lhsvar;
1053 unsigned int rhsvar;
1054
1055 if (!c)
1056 continue;
1057
1058 lhs = c->lhs;
1059 rhs = c->rhs;
1060 lhsvar = find (get_varinfo_fc (lhs.var)->id);
1061 rhsvar = find (get_varinfo_fc (rhs.var)->id);
1062
1063 if (lhs.type == DEREF)
1064 {
1065 if (rhs.offset == 0 && lhs.offset == 0 && rhs.type == SCALAR)
1066 add_graph_edge (graph, FIRST_REF_NODE + lhsvar, rhsvar);
1067 }
1068 else if (rhs.type == DEREF)
1069 {
1070 if (rhs.offset == 0 && lhs.offset == 0 && lhs.type == SCALAR)
1071 add_graph_edge (graph, lhsvar, FIRST_REF_NODE + rhsvar);
1072 }
1073 else if (rhs.type == ADDRESSOF)
1074 {
1075 /* x = &y */
1076 gcc_assert (find (get_varinfo_fc (rhs.var)->id)
1077 == get_varinfo_fc (rhs.var)->id);
1078 bitmap_set_bit (get_varinfo (lhsvar)->solution, rhsvar);
1079 }
1080 else if (lhsvar > anything_id
1081 && lhsvar != rhsvar && lhs.offset == 0 && rhs.offset == 0)
1082 {
1083 add_graph_edge (graph, lhsvar, rhsvar);
1084 }
1085 }
1086 }
1087
1088
1089 /* Changed variables on the last iteration. */
1090 static unsigned int changed_count;
1091 static sbitmap changed;
1092
1093 DEF_VEC_I(unsigned);
1094 DEF_VEC_ALLOC_I(unsigned,heap);
1095
1096
1097 /* Strongly Connected Component visitation info. */
1098
1099 struct scc_info
1100 {
1101 sbitmap visited;
1102 sbitmap roots;
1103 unsigned int *dfs;
1104 unsigned int *node_mapping;
1105 int current_index;
1106 VEC(unsigned,heap) *scc_stack;
1107 };
1108
1109
1110 /* Recursive routine to find strongly connected components in GRAPH.
1111 SI is the SCC info to store the information in, and N is the id of current
1112 graph node we are processing.
1113
1114 This is Tarjan's strongly connected component finding algorithm, as
1115 modified by Nuutila to keep only non-root nodes on the stack.
1116 The algorithm can be found in "On finding the strongly connected
1117 connected components in a directed graph" by Esko Nuutila and Eljas
1118 Soisalon-Soininen, in Information Processing Letters volume 49,
1119 number 1, pages 9-14. */
1120
1121 static void
scc_visit(constraint_graph_t graph,struct scc_info * si,unsigned int n)1122 scc_visit (constraint_graph_t graph, struct scc_info *si, unsigned int n)
1123 {
1124 unsigned int i;
1125 bitmap_iterator bi;
1126 unsigned int my_dfs;
1127
1128 SET_BIT (si->visited, n);
1129 si->dfs[n] = si->current_index ++;
1130 my_dfs = si->dfs[n];
1131
1132 /* Visit all the successors. */
1133 EXECUTE_IF_IN_NONNULL_BITMAP (graph->succs[n], 0, i, bi)
1134 {
1135 unsigned int w;
1136
1137 if (i > LAST_REF_NODE)
1138 break;
1139
1140 w = find (i);
1141 if (TEST_BIT (si->roots, w))
1142 continue;
1143
1144 if (!TEST_BIT (si->visited, w))
1145 scc_visit (graph, si, w);
1146 {
1147 unsigned int t = find (w);
1148 unsigned int nnode = find (n);
1149 gcc_assert (nnode == n);
1150
1151 if (si->dfs[t] < si->dfs[nnode])
1152 si->dfs[n] = si->dfs[t];
1153 }
1154 }
1155
1156 /* See if any components have been identified. */
1157 if (si->dfs[n] == my_dfs)
1158 {
1159 if (VEC_length (unsigned, si->scc_stack) > 0
1160 && si->dfs[VEC_last (unsigned, si->scc_stack)] >= my_dfs)
1161 {
1162 bitmap scc = BITMAP_ALLOC (NULL);
1163 bool have_ref_node = n >= FIRST_REF_NODE;
1164 unsigned int lowest_node;
1165 bitmap_iterator bi;
1166
1167 bitmap_set_bit (scc, n);
1168
1169 while (VEC_length (unsigned, si->scc_stack) != 0
1170 && si->dfs[VEC_last (unsigned, si->scc_stack)] >= my_dfs)
1171 {
1172 unsigned int w = VEC_pop (unsigned, si->scc_stack);
1173
1174 bitmap_set_bit (scc, w);
1175 if (w >= FIRST_REF_NODE)
1176 have_ref_node = true;
1177 }
1178
1179 lowest_node = bitmap_first_set_bit (scc);
1180 gcc_assert (lowest_node < FIRST_REF_NODE);
1181 EXECUTE_IF_SET_IN_BITMAP (scc, 0, i, bi)
1182 {
1183 if (i < FIRST_REF_NODE)
1184 {
1185 /* Mark this node for collapsing. */
1186 if (unite (lowest_node, i))
1187 unify_nodes (graph, lowest_node, i, false);
1188 }
1189 else
1190 {
1191 unite (lowest_node, i);
1192 graph->indirect_cycles[i - FIRST_REF_NODE] = lowest_node;
1193 }
1194 }
1195 }
1196 SET_BIT (si->roots, n);
1197 }
1198 else
1199 VEC_safe_push (unsigned, heap, si->scc_stack, n);
1200 }
1201
1202 /* Unify node FROM into node TO, updating the changed count if
1203 necessary when UPDATE_CHANGED is true. */
1204
1205 static void
unify_nodes(constraint_graph_t graph,unsigned int to,unsigned int from,bool update_changed)1206 unify_nodes (constraint_graph_t graph, unsigned int to, unsigned int from,
1207 bool update_changed)
1208 {
1209
1210 gcc_assert (to != from && find (to) == to);
1211 if (dump_file && (dump_flags & TDF_DETAILS))
1212 fprintf (dump_file, "Unifying %s to %s\n",
1213 get_varinfo (from)->name,
1214 get_varinfo (to)->name);
1215
1216 if (update_changed)
1217 stats.unified_vars_dynamic++;
1218 else
1219 stats.unified_vars_static++;
1220
1221 merge_graph_nodes (graph, to, from);
1222 merge_node_constraints (graph, to, from);
1223
1224 if (update_changed && TEST_BIT (changed, from))
1225 {
1226 RESET_BIT (changed, from);
1227 if (!TEST_BIT (changed, to))
1228 SET_BIT (changed, to);
1229 else
1230 {
1231 gcc_assert (changed_count > 0);
1232 changed_count--;
1233 }
1234 }
1235
1236 /* If the solution changes because of the merging, we need to mark
1237 the variable as changed. */
1238 if (bitmap_ior_into (get_varinfo (to)->solution,
1239 get_varinfo (from)->solution))
1240 {
1241 if (update_changed && !TEST_BIT (changed, to))
1242 {
1243 SET_BIT (changed, to);
1244 changed_count++;
1245 }
1246 }
1247
1248 BITMAP_FREE (get_varinfo (from)->solution);
1249 BITMAP_FREE (get_varinfo (from)->oldsolution);
1250
1251 if (stats.iterations > 0)
1252 {
1253 BITMAP_FREE (get_varinfo (to)->oldsolution);
1254 get_varinfo (to)->oldsolution = BITMAP_ALLOC (&oldpta_obstack);
1255 }
1256
1257 if (valid_graph_edge (graph, to, to))
1258 {
1259 if (graph->succs[to])
1260 bitmap_clear_bit (graph->succs[to], to);
1261 }
1262 }
1263
1264 /* Information needed to compute the topological ordering of a graph. */
1265
1266 struct topo_info
1267 {
1268 /* sbitmap of visited nodes. */
1269 sbitmap visited;
1270 /* Array that stores the topological order of the graph, *in
1271 reverse*. */
1272 VEC(unsigned,heap) *topo_order;
1273 };
1274
1275
1276 /* Initialize and return a topological info structure. */
1277
1278 static struct topo_info *
init_topo_info(void)1279 init_topo_info (void)
1280 {
1281 size_t size = VEC_length (varinfo_t, varmap);
1282 struct topo_info *ti = XNEW (struct topo_info);
1283 ti->visited = sbitmap_alloc (size);
1284 sbitmap_zero (ti->visited);
1285 ti->topo_order = VEC_alloc (unsigned, heap, 1);
1286 return ti;
1287 }
1288
1289
1290 /* Free the topological sort info pointed to by TI. */
1291
1292 static void
free_topo_info(struct topo_info * ti)1293 free_topo_info (struct topo_info *ti)
1294 {
1295 sbitmap_free (ti->visited);
1296 VEC_free (unsigned, heap, ti->topo_order);
1297 free (ti);
1298 }
1299
1300 /* Visit the graph in topological order, and store the order in the
1301 topo_info structure. */
1302
1303 static void
topo_visit(constraint_graph_t graph,struct topo_info * ti,unsigned int n)1304 topo_visit (constraint_graph_t graph, struct topo_info *ti,
1305 unsigned int n)
1306 {
1307 bitmap_iterator bi;
1308 unsigned int j;
1309
1310 SET_BIT (ti->visited, n);
1311
1312 if (graph->succs[n])
1313 EXECUTE_IF_SET_IN_BITMAP (graph->succs[n], 0, j, bi)
1314 {
1315 if (!TEST_BIT (ti->visited, j))
1316 topo_visit (graph, ti, j);
1317 }
1318
1319 VEC_safe_push (unsigned, heap, ti->topo_order, n);
1320 }
1321
1322 /* Return true if variable N + OFFSET is a legal field of N. */
1323
1324 static bool
type_safe(unsigned int n,unsigned HOST_WIDE_INT * offset)1325 type_safe (unsigned int n, unsigned HOST_WIDE_INT *offset)
1326 {
1327 varinfo_t ninfo = get_varinfo (n);
1328
1329 /* For things we've globbed to single variables, any offset into the
1330 variable acts like the entire variable, so that it becomes offset
1331 0. */
1332 if (ninfo->is_special_var
1333 || ninfo->is_artificial_var
1334 || ninfo->is_unknown_size_var)
1335 {
1336 *offset = 0;
1337 return true;
1338 }
1339 return (get_varinfo (n)->offset + *offset) < get_varinfo (n)->fullsize;
1340 }
1341
1342 /* Process a constraint C that represents *x = &y. */
1343
1344 static void
do_da_constraint(constraint_graph_t graph ATTRIBUTE_UNUSED,constraint_t c,bitmap delta)1345 do_da_constraint (constraint_graph_t graph ATTRIBUTE_UNUSED,
1346 constraint_t c, bitmap delta)
1347 {
1348 unsigned int rhs = c->rhs.var;
1349 unsigned int j;
1350 bitmap_iterator bi;
1351
1352 /* For each member j of Delta (Sol(x)), add x to Sol(j) */
1353 EXECUTE_IF_SET_IN_BITMAP (delta, 0, j, bi)
1354 {
1355 unsigned HOST_WIDE_INT offset = c->lhs.offset;
1356 if (type_safe (j, &offset) && !(get_varinfo (j)->is_special_var))
1357 {
1358 /* *x != NULL && *x != ANYTHING*/
1359 varinfo_t v;
1360 unsigned int t;
1361 bitmap sol;
1362 unsigned HOST_WIDE_INT fieldoffset = get_varinfo (j)->offset + offset;
1363
1364 v = first_vi_for_offset (get_varinfo (j), fieldoffset);
1365 if (!v)
1366 continue;
1367 t = find (v->id);
1368 sol = get_varinfo (t)->solution;
1369 if (!bitmap_bit_p (sol, rhs))
1370 {
1371 bitmap_set_bit (sol, rhs);
1372 if (!TEST_BIT (changed, t))
1373 {
1374 SET_BIT (changed, t);
1375 changed_count++;
1376 }
1377 }
1378 }
1379 else if (0 && dump_file && !(get_varinfo (j)->is_special_var))
1380 fprintf (dump_file, "Untypesafe usage in do_da_constraint.\n");
1381
1382 }
1383 }
1384
1385 /* Process a constraint C that represents x = *y, using DELTA as the
1386 starting solution. */
1387
1388 static void
do_sd_constraint(constraint_graph_t graph,constraint_t c,bitmap delta)1389 do_sd_constraint (constraint_graph_t graph, constraint_t c,
1390 bitmap delta)
1391 {
1392 unsigned int lhs = find (c->lhs.var);
1393 bool flag = false;
1394 bitmap sol = get_varinfo (lhs)->solution;
1395 unsigned int j;
1396 bitmap_iterator bi;
1397
1398 if (bitmap_bit_p (delta, anything_id))
1399 {
1400 flag = !bitmap_bit_p (sol, anything_id);
1401 if (flag)
1402 bitmap_set_bit (sol, anything_id);
1403 goto done;
1404 }
1405 /* For each variable j in delta (Sol(y)), add
1406 an edge in the graph from j to x, and union Sol(j) into Sol(x). */
1407 EXECUTE_IF_SET_IN_BITMAP (delta, 0, j, bi)
1408 {
1409 unsigned HOST_WIDE_INT roffset = c->rhs.offset;
1410 if (type_safe (j, &roffset))
1411 {
1412 varinfo_t v;
1413 unsigned HOST_WIDE_INT fieldoffset = get_varinfo (j)->offset + roffset;
1414 unsigned int t;
1415
1416 v = first_vi_for_offset (get_varinfo (j), fieldoffset);
1417 if (!v)
1418 continue;
1419 t = find (v->id);
1420
1421 /* Adding edges from the special vars is pointless.
1422 They don't have sets that can change. */
1423 if (get_varinfo (t) ->is_special_var)
1424 flag |= bitmap_ior_into (sol, get_varinfo (t)->solution);
1425 else if (add_graph_edge (graph, lhs, t))
1426 flag |= bitmap_ior_into (sol, get_varinfo (t)->solution);
1427 }
1428 else if (0 && dump_file && !(get_varinfo (j)->is_special_var))
1429 fprintf (dump_file, "Untypesafe usage in do_sd_constraint\n");
1430
1431 }
1432
1433 done:
1434 /* If the LHS solution changed, mark the var as changed. */
1435 if (flag)
1436 {
1437 get_varinfo (lhs)->solution = sol;
1438 if (!TEST_BIT (changed, lhs))
1439 {
1440 SET_BIT (changed, lhs);
1441 changed_count++;
1442 }
1443 }
1444 }
1445
1446 /* Process a constraint C that represents *x = y. */
1447
1448 static void
do_ds_constraint(constraint_t c,bitmap delta)1449 do_ds_constraint (constraint_t c, bitmap delta)
1450 {
1451 unsigned int rhs = find (c->rhs.var);
1452 unsigned HOST_WIDE_INT roff = c->rhs.offset;
1453 bitmap sol = get_varinfo (rhs)->solution;
1454 unsigned int j;
1455 bitmap_iterator bi;
1456
1457 if (bitmap_bit_p (sol, anything_id))
1458 {
1459 EXECUTE_IF_SET_IN_BITMAP (delta, 0, j, bi)
1460 {
1461 varinfo_t jvi = get_varinfo (j);
1462 unsigned int t;
1463 unsigned int loff = c->lhs.offset;
1464 unsigned HOST_WIDE_INT fieldoffset = jvi->offset + loff;
1465 varinfo_t v;
1466
1467 v = first_vi_for_offset (get_varinfo (j), fieldoffset);
1468 if (!v)
1469 continue;
1470 t = find (v->id);
1471
1472 if (!bitmap_bit_p (get_varinfo (t)->solution, anything_id))
1473 {
1474 bitmap_set_bit (get_varinfo (t)->solution, anything_id);
1475 if (!TEST_BIT (changed, t))
1476 {
1477 SET_BIT (changed, t);
1478 changed_count++;
1479 }
1480 }
1481 }
1482 return;
1483 }
1484
1485 /* For each member j of delta (Sol(x)), add an edge from y to j and
1486 union Sol(y) into Sol(j) */
1487 EXECUTE_IF_SET_IN_BITMAP (delta, 0, j, bi)
1488 {
1489 unsigned HOST_WIDE_INT loff = c->lhs.offset;
1490 if (type_safe (j, &loff) && !(get_varinfo (j)->is_special_var))
1491 {
1492 varinfo_t v;
1493 unsigned int t;
1494 unsigned HOST_WIDE_INT fieldoffset = get_varinfo (j)->offset + loff;
1495 bitmap tmp;
1496
1497 v = first_vi_for_offset (get_varinfo (j), fieldoffset);
1498 if (!v)
1499 continue;
1500 t = find (v->id);
1501 tmp = get_varinfo (t)->solution;
1502
1503 if (set_union_with_increment (tmp, sol, roff))
1504 {
1505 get_varinfo (t)->solution = tmp;
1506 if (t == rhs)
1507 sol = get_varinfo (rhs)->solution;
1508 if (!TEST_BIT (changed, t))
1509 {
1510 SET_BIT (changed, t);
1511 changed_count++;
1512 }
1513 }
1514 }
1515 else if (0 && dump_file && !(get_varinfo (j)->is_special_var))
1516 fprintf (dump_file, "Untypesafe usage in do_ds_constraint\n");
1517 }
1518 }
1519
1520 /* Handle a non-simple (simple meaning requires no iteration),
1521 constraint (IE *x = &y, x = *y, *x = y, and x = y with offsets involved). */
1522
1523 static void
do_complex_constraint(constraint_graph_t graph,constraint_t c,bitmap delta)1524 do_complex_constraint (constraint_graph_t graph, constraint_t c, bitmap delta)
1525 {
1526 if (c->lhs.type == DEREF)
1527 {
1528 if (c->rhs.type == ADDRESSOF)
1529 {
1530 /* *x = &y */
1531 do_da_constraint (graph, c, delta);
1532 }
1533 else
1534 {
1535 /* *x = y */
1536 do_ds_constraint (c, delta);
1537 }
1538 }
1539 else if (c->rhs.type == DEREF)
1540 {
1541 /* x = *y */
1542 if (!(get_varinfo (c->lhs.var)->is_special_var))
1543 do_sd_constraint (graph, c, delta);
1544 }
1545 else
1546 {
1547 bitmap tmp;
1548 bitmap solution;
1549 bool flag = false;
1550 unsigned int t;
1551
1552 gcc_assert (c->rhs.type == SCALAR && c->lhs.type == SCALAR);
1553 t = find (c->rhs.var);
1554 solution = get_varinfo (t)->solution;
1555 t = find (c->lhs.var);
1556 tmp = get_varinfo (t)->solution;
1557
1558 flag = set_union_with_increment (tmp, solution, c->rhs.offset);
1559
1560 if (flag)
1561 {
1562 get_varinfo (t)->solution = tmp;
1563 if (!TEST_BIT (changed, t))
1564 {
1565 SET_BIT (changed, t);
1566 changed_count++;
1567 }
1568 }
1569 }
1570 }
1571
1572 /* Initialize and return a new SCC info structure. */
1573
1574 static struct scc_info *
init_scc_info(size_t size)1575 init_scc_info (size_t size)
1576 {
1577 struct scc_info *si = XNEW (struct scc_info);
1578 size_t i;
1579
1580 si->current_index = 0;
1581 si->visited = sbitmap_alloc (size);
1582 sbitmap_zero (si->visited);
1583 si->roots = sbitmap_alloc (size);
1584 sbitmap_zero (si->roots);
1585 si->node_mapping = XNEWVEC (unsigned int, size);
1586 si->dfs = XCNEWVEC (unsigned int, size);
1587
1588 for (i = 0; i < size; i++)
1589 si->node_mapping[i] = i;
1590
1591 si->scc_stack = VEC_alloc (unsigned, heap, 1);
1592 return si;
1593 }
1594
1595 /* Free an SCC info structure pointed to by SI */
1596
1597 static void
free_scc_info(struct scc_info * si)1598 free_scc_info (struct scc_info *si)
1599 {
1600 sbitmap_free (si->visited);
1601 sbitmap_free (si->roots);
1602 free (si->node_mapping);
1603 free (si->dfs);
1604 VEC_free (unsigned, heap, si->scc_stack);
1605 free (si);
1606 }
1607
1608
1609 /* Find indirect cycles in GRAPH that occur, using strongly connected
1610 components, and note them in the indirect cycles map.
1611
1612 This technique comes from Ben Hardekopf and Calvin Lin,
1613 "It Pays to be Lazy: Fast and Accurate Pointer Analysis for Millions of
1614 Lines of Code", submitted to PLDI 2007. */
1615
1616 static void
find_indirect_cycles(constraint_graph_t graph)1617 find_indirect_cycles (constraint_graph_t graph)
1618 {
1619 unsigned int i;
1620 unsigned int size = graph->size;
1621 struct scc_info *si = init_scc_info (size);
1622
1623 for (i = 0; i < MIN (LAST_REF_NODE, size); i ++ )
1624 if (!TEST_BIT (si->visited, i) && find (i) == i)
1625 scc_visit (graph, si, i);
1626
1627 free_scc_info (si);
1628 }
1629
1630 /* Compute a topological ordering for GRAPH, and store the result in the
1631 topo_info structure TI. */
1632
1633 static void
compute_topo_order(constraint_graph_t graph,struct topo_info * ti)1634 compute_topo_order (constraint_graph_t graph,
1635 struct topo_info *ti)
1636 {
1637 unsigned int i;
1638 unsigned int size = VEC_length (varinfo_t, varmap);
1639
1640 for (i = 0; i != size; ++i)
1641 if (!TEST_BIT (ti->visited, i) && find (i) == i)
1642 topo_visit (graph, ti, i);
1643 }
1644
1645 /* Perform offline variable substitution.
1646
1647 This is a linear time way of identifying variables that must have
1648 equivalent points-to sets, including those caused by static cycles,
1649 and single entry subgraphs, in the constraint graph.
1650
1651 The technique is described in "Off-line variable substitution for
1652 scaling points-to analysis" by Atanas Rountev and Satish Chandra,
1653 in "ACM SIGPLAN Notices" volume 35, number 5, pages 47-56.
1654
1655 There is an optimal way to do this involving hash based value
1656 numbering, once the technique is published i will implement it
1657 here.
1658
1659 The general method of finding equivalence classes is as follows:
1660 Add fake nodes (REF nodes) and edges for *a = b and a = *b constraints.
1661 Add fake nodes (ADDRESS nodes) and edges for a = &b constraints.
1662 Initialize all non-REF/ADDRESS nodes to be direct nodes
1663 For each SCC in the predecessor graph:
1664 for each member (x) of the SCC
1665 if x is not a direct node:
1666 set rootnode(SCC) to be not a direct node
1667 collapse node x into rootnode(SCC).
1668 if rootnode(SCC) is not a direct node:
1669 label rootnode(SCC) with a new equivalence class
1670 else:
1671 if all labeled predecessors of rootnode(SCC) have the same
1672 label:
1673 label rootnode(SCC) with this label
1674 else:
1675 label rootnode(SCC) with a new equivalence class
1676
1677 All direct nodes with the same equivalence class can be replaced
1678 with a single representative node.
1679 All unlabeled nodes (label == 0) are not pointers and all edges
1680 involving them can be eliminated.
1681 We perform these optimizations during move_complex_constraints.
1682 */
1683
1684 static int equivalence_class;
1685
1686 /* Recursive routine to find strongly connected components in GRAPH,
1687 and label it's nodes with equivalence classes.
1688 This is used during variable substitution to find cycles involving
1689 the regular or implicit predecessors, and label them as equivalent.
1690 The SCC finding algorithm used is the same as that for scc_visit. */
1691
1692 static void
label_visit(constraint_graph_t graph,struct scc_info * si,unsigned int n)1693 label_visit (constraint_graph_t graph, struct scc_info *si, unsigned int n)
1694 {
1695 unsigned int i;
1696 bitmap_iterator bi;
1697 unsigned int my_dfs;
1698
1699 gcc_assert (si->node_mapping[n] == n);
1700 SET_BIT (si->visited, n);
1701 si->dfs[n] = si->current_index ++;
1702 my_dfs = si->dfs[n];
1703
1704 /* Visit all the successors. */
1705 EXECUTE_IF_IN_NONNULL_BITMAP (graph->preds[n], 0, i, bi)
1706 {
1707 unsigned int w = si->node_mapping[i];
1708
1709 if (TEST_BIT (si->roots, w))
1710 continue;
1711
1712 if (!TEST_BIT (si->visited, w))
1713 label_visit (graph, si, w);
1714 {
1715 unsigned int t = si->node_mapping[w];
1716 unsigned int nnode = si->node_mapping[n];
1717 gcc_assert (nnode == n);
1718
1719 if (si->dfs[t] < si->dfs[nnode])
1720 si->dfs[n] = si->dfs[t];
1721 }
1722 }
1723
1724 /* Visit all the implicit predecessors. */
1725 EXECUTE_IF_IN_NONNULL_BITMAP (graph->implicit_preds[n], 0, i, bi)
1726 {
1727 unsigned int w = si->node_mapping[i];
1728
1729 if (TEST_BIT (si->roots, w))
1730 continue;
1731
1732 if (!TEST_BIT (si->visited, w))
1733 label_visit (graph, si, w);
1734 {
1735 unsigned int t = si->node_mapping[w];
1736 unsigned int nnode = si->node_mapping[n];
1737 gcc_assert (nnode == n);
1738
1739 if (si->dfs[t] < si->dfs[nnode])
1740 si->dfs[n] = si->dfs[t];
1741 }
1742 }
1743
1744 /* See if any components have been identified. */
1745 if (si->dfs[n] == my_dfs)
1746 {
1747 while (VEC_length (unsigned, si->scc_stack) != 0
1748 && si->dfs[VEC_last (unsigned, si->scc_stack)] >= my_dfs)
1749 {
1750 unsigned int w = VEC_pop (unsigned, si->scc_stack);
1751 si->node_mapping[w] = n;
1752
1753 if (!TEST_BIT (graph->direct_nodes, w))
1754 RESET_BIT (graph->direct_nodes, n);
1755 }
1756 SET_BIT (si->roots, n);
1757
1758 if (!TEST_BIT (graph->direct_nodes, n))
1759 {
1760 graph->label[n] = equivalence_class++;
1761 }
1762 else
1763 {
1764 unsigned int size = 0;
1765 unsigned int firstlabel = ~0;
1766
1767 EXECUTE_IF_IN_NONNULL_BITMAP (graph->preds[n], 0, i, bi)
1768 {
1769 unsigned int j = si->node_mapping[i];
1770
1771 if (j == n || graph->label[j] == 0)
1772 continue;
1773
1774 if (firstlabel == (unsigned int)~0)
1775 {
1776 firstlabel = graph->label[j];
1777 size++;
1778 }
1779 else if (graph->label[j] != firstlabel)
1780 size++;
1781 }
1782
1783 if (size == 0)
1784 graph->label[n] = 0;
1785 else if (size == 1)
1786 graph->label[n] = firstlabel;
1787 else
1788 graph->label[n] = equivalence_class++;
1789 }
1790 }
1791 else
1792 VEC_safe_push (unsigned, heap, si->scc_stack, n);
1793 }
1794
1795 /* Perform offline variable substitution, discovering equivalence
1796 classes, and eliminating non-pointer variables. */
1797
1798 static struct scc_info *
perform_var_substitution(constraint_graph_t graph)1799 perform_var_substitution (constraint_graph_t graph)
1800 {
1801 unsigned int i;
1802 unsigned int size = graph->size;
1803 struct scc_info *si = init_scc_info (size);
1804
1805 bitmap_obstack_initialize (&iteration_obstack);
1806 equivalence_class = 0;
1807
1808 /* We only need to visit the non-address nodes for labeling
1809 purposes, as the address nodes will never have any predecessors,
1810 because &x never appears on the LHS of a constraint. */
1811 for (i = 0; i < LAST_REF_NODE; i++)
1812 if (!TEST_BIT (si->visited, si->node_mapping[i]))
1813 label_visit (graph, si, si->node_mapping[i]);
1814
1815 if (dump_file && (dump_flags & TDF_DETAILS))
1816 for (i = 0; i < FIRST_REF_NODE; i++)
1817 {
1818 bool direct_node = TEST_BIT (graph->direct_nodes, i);
1819 fprintf (dump_file,
1820 "Equivalence class for %s node id %d:%s is %d\n",
1821 direct_node ? "Direct node" : "Indirect node", i,
1822 get_varinfo (i)->name,
1823 graph->label[si->node_mapping[i]]);
1824 }
1825
1826 /* Quickly eliminate our non-pointer variables. */
1827
1828 for (i = 0; i < FIRST_REF_NODE; i++)
1829 {
1830 unsigned int node = si->node_mapping[i];
1831
1832 if (graph->label[node] == 0 && TEST_BIT (graph->direct_nodes, node))
1833 {
1834 if (dump_file && (dump_flags & TDF_DETAILS))
1835 fprintf (dump_file,
1836 "%s is a non-pointer variable, eliminating edges.\n",
1837 get_varinfo (node)->name);
1838 stats.nonpointer_vars++;
1839 clear_edges_for_node (graph, node);
1840 }
1841 }
1842 return si;
1843 }
1844
1845 /* Free information that was only necessary for variable
1846 substitution. */
1847
1848 static void
free_var_substitution_info(struct scc_info * si)1849 free_var_substitution_info (struct scc_info *si)
1850 {
1851 free_scc_info (si);
1852 free (graph->label);
1853 free (graph->eq_rep);
1854 sbitmap_free (graph->direct_nodes);
1855 bitmap_obstack_release (&iteration_obstack);
1856 }
1857
1858 /* Return an existing node that is equivalent to NODE, which has
1859 equivalence class LABEL, if one exists. Return NODE otherwise. */
1860
1861 static unsigned int
find_equivalent_node(constraint_graph_t graph,unsigned int node,unsigned int label)1862 find_equivalent_node (constraint_graph_t graph,
1863 unsigned int node, unsigned int label)
1864 {
1865 /* If the address version of this variable is unused, we can
1866 substitute it for anything else with the same label.
1867 Otherwise, we know the pointers are equivalent, but not the
1868 locations. */
1869
1870 if (graph->label[FIRST_ADDR_NODE + node] == 0)
1871 {
1872 gcc_assert (label < graph->size);
1873
1874 if (graph->eq_rep[label] != -1)
1875 {
1876 /* Unify the two variables since we know they are equivalent. */
1877 if (unite (graph->eq_rep[label], node))
1878 unify_nodes (graph, graph->eq_rep[label], node, false);
1879 return graph->eq_rep[label];
1880 }
1881 else
1882 {
1883 graph->eq_rep[label] = node;
1884 }
1885 }
1886 return node;
1887 }
1888
1889 /* Move complex constraints to the appropriate nodes, and collapse
1890 variables we've discovered are equivalent during variable
1891 substitution. SI is the SCC_INFO that is the result of
1892 perform_variable_substitution. */
1893
1894 static void
move_complex_constraints(constraint_graph_t graph,struct scc_info * si)1895 move_complex_constraints (constraint_graph_t graph,
1896 struct scc_info *si)
1897 {
1898 int i;
1899 unsigned int j;
1900 constraint_t c;
1901
1902 for (j = 0; j < graph->size; j++)
1903 gcc_assert (find (j) == j);
1904
1905 for (i = 0; VEC_iterate (constraint_t, constraints, i, c); i++)
1906 {
1907 struct constraint_expr lhs = c->lhs;
1908 struct constraint_expr rhs = c->rhs;
1909 unsigned int lhsvar = find (get_varinfo_fc (lhs.var)->id);
1910 unsigned int rhsvar = find (get_varinfo_fc (rhs.var)->id);
1911 unsigned int lhsnode, rhsnode;
1912 unsigned int lhslabel, rhslabel;
1913
1914 lhsnode = si->node_mapping[lhsvar];
1915 rhsnode = si->node_mapping[rhsvar];
1916 lhslabel = graph->label[lhsnode];
1917 rhslabel = graph->label[rhsnode];
1918
1919 /* See if it is really a non-pointer variable, and if so, ignore
1920 the constraint. */
1921 if (lhslabel == 0)
1922 {
1923 if (!TEST_BIT (graph->direct_nodes, lhsnode))
1924 lhslabel = graph->label[lhsnode] = equivalence_class++;
1925 else
1926 {
1927 if (dump_file && (dump_flags & TDF_DETAILS))
1928 {
1929
1930 fprintf (dump_file, "%s is a non-pointer variable,"
1931 "ignoring constraint:",
1932 get_varinfo (lhs.var)->name);
1933 dump_constraint (dump_file, c);
1934 }
1935 VEC_replace (constraint_t, constraints, i, NULL);
1936 continue;
1937 }
1938 }
1939
1940 if (rhslabel == 0)
1941 {
1942 if (!TEST_BIT (graph->direct_nodes, rhsnode))
1943 rhslabel = graph->label[rhsnode] = equivalence_class++;
1944 else
1945 {
1946 if (dump_file && (dump_flags & TDF_DETAILS))
1947 {
1948
1949 fprintf (dump_file, "%s is a non-pointer variable,"
1950 "ignoring constraint:",
1951 get_varinfo (rhs.var)->name);
1952 dump_constraint (dump_file, c);
1953 }
1954 VEC_replace (constraint_t, constraints, i, NULL);
1955 continue;
1956 }
1957 }
1958
1959 lhsvar = find_equivalent_node (graph, lhsvar, lhslabel);
1960 rhsvar = find_equivalent_node (graph, rhsvar, rhslabel);
1961 c->lhs.var = lhsvar;
1962 c->rhs.var = rhsvar;
1963
1964 if (lhs.type == DEREF)
1965 {
1966 if (rhs.type == ADDRESSOF || rhsvar > anything_id)
1967 insert_into_complex (graph, lhsvar, c);
1968 }
1969 else if (rhs.type == DEREF)
1970 {
1971 if (!(get_varinfo (lhsvar)->is_special_var))
1972 insert_into_complex (graph, rhsvar, c);
1973 }
1974 else if (rhs.type != ADDRESSOF && lhsvar > anything_id
1975 && (lhs.offset != 0 || rhs.offset != 0))
1976 {
1977 insert_into_complex (graph, rhsvar, c);
1978 }
1979
1980 }
1981 }
1982
1983 /* Eliminate indirect cycles involving NODE. Return true if NODE was
1984 part of an SCC, false otherwise. */
1985
1986 static bool
eliminate_indirect_cycles(unsigned int node)1987 eliminate_indirect_cycles (unsigned int node)
1988 {
1989 if (graph->indirect_cycles[node] != -1
1990 && !bitmap_empty_p (get_varinfo (node)->solution))
1991 {
1992 unsigned int i;
1993 VEC(unsigned,heap) *queue = NULL;
1994 int queuepos;
1995 unsigned int to = find (graph->indirect_cycles[node]);
1996 bitmap_iterator bi;
1997
1998 /* We can't touch the solution set and call unify_nodes
1999 at the same time, because unify_nodes is going to do
2000 bitmap unions into it. */
2001
2002 EXECUTE_IF_SET_IN_BITMAP (get_varinfo (node)->solution, 0, i, bi)
2003 {
2004 if (find (i) == i && i != to)
2005 {
2006 if (unite (to, i))
2007 VEC_safe_push (unsigned, heap, queue, i);
2008 }
2009 }
2010
2011 for (queuepos = 0;
2012 VEC_iterate (unsigned, queue, queuepos, i);
2013 queuepos++)
2014 {
2015 unify_nodes (graph, to, i, true);
2016 }
2017 VEC_free (unsigned, heap, queue);
2018 return true;
2019 }
2020 return false;
2021 }
2022
2023 /* Solve the constraint graph GRAPH using our worklist solver.
2024 This is based on the PW* family of solvers from the "Efficient Field
2025 Sensitive Pointer Analysis for C" paper.
2026 It works by iterating over all the graph nodes, processing the complex
2027 constraints and propagating the copy constraints, until everything stops
2028 changed. This corresponds to steps 6-8 in the solving list given above. */
2029
2030 static void
solve_graph(constraint_graph_t graph)2031 solve_graph (constraint_graph_t graph)
2032 {
2033 unsigned int size = VEC_length (varinfo_t, varmap);
2034 unsigned int i;
2035 bitmap pts;
2036
2037 changed_count = 0;
2038 changed = sbitmap_alloc (size);
2039 sbitmap_zero (changed);
2040
2041 /* Mark all initial non-collapsed nodes as changed. */
2042 for (i = 0; i < size; i++)
2043 {
2044 varinfo_t ivi = get_varinfo (i);
2045 if (find (i) == i && !bitmap_empty_p (ivi->solution)
2046 && ((graph->succs[i] && !bitmap_empty_p (graph->succs[i]))
2047 || VEC_length (constraint_t, graph->complex[i]) > 0))
2048 {
2049 SET_BIT (changed, i);
2050 changed_count++;
2051 }
2052 }
2053
2054 /* Allocate a bitmap to be used to store the changed bits. */
2055 pts = BITMAP_ALLOC (&pta_obstack);
2056
2057 while (changed_count > 0)
2058 {
2059 unsigned int i;
2060 struct topo_info *ti = init_topo_info ();
2061 stats.iterations++;
2062
2063 bitmap_obstack_initialize (&iteration_obstack);
2064
2065 compute_topo_order (graph, ti);
2066
2067 while (VEC_length (unsigned, ti->topo_order) != 0)
2068 {
2069
2070 i = VEC_pop (unsigned, ti->topo_order);
2071
2072 /* If this variable is not a representative, skip it. */
2073 if (find (i) != i)
2074 continue;
2075
2076 /* In certain indirect cycle cases, we may merge this
2077 variable to another. */
2078 if (eliminate_indirect_cycles (i) && find (i) != i)
2079 continue;
2080
2081 /* If the node has changed, we need to process the
2082 complex constraints and outgoing edges again. */
2083 if (TEST_BIT (changed, i))
2084 {
2085 unsigned int j;
2086 constraint_t c;
2087 bitmap solution;
2088 VEC(constraint_t,heap) *complex = graph->complex[i];
2089 bool solution_empty;
2090
2091 RESET_BIT (changed, i);
2092 changed_count--;
2093
2094 /* Compute the changed set of solution bits. */
2095 bitmap_and_compl (pts, get_varinfo (i)->solution,
2096 get_varinfo (i)->oldsolution);
2097
2098 if (bitmap_empty_p (pts))
2099 continue;
2100
2101 bitmap_ior_into (get_varinfo (i)->oldsolution, pts);
2102
2103 solution = get_varinfo (i)->solution;
2104 solution_empty = bitmap_empty_p (solution);
2105
2106 /* Process the complex constraints */
2107 for (j = 0; VEC_iterate (constraint_t, complex, j, c); j++)
2108 {
2109 /* The only complex constraint that can change our
2110 solution to non-empty, given an empty solution,
2111 is a constraint where the lhs side is receiving
2112 some set from elsewhere. */
2113 if (!solution_empty || c->lhs.type != DEREF)
2114 do_complex_constraint (graph, c, pts);
2115 }
2116
2117 solution_empty = bitmap_empty_p (solution);
2118
2119 if (!solution_empty)
2120 {
2121 bitmap_iterator bi;
2122
2123 /* Propagate solution to all successors. */
2124 EXECUTE_IF_IN_NONNULL_BITMAP (graph->succs[i],
2125 0, j, bi)
2126 {
2127 bitmap tmp;
2128 bool flag;
2129
2130 unsigned int to = find (j);
2131 tmp = get_varinfo (to)->solution;
2132 flag = false;
2133
2134 /* Don't try to propagate to ourselves. */
2135 if (to == i)
2136 continue;
2137
2138 flag = set_union_with_increment (tmp, pts, 0);
2139
2140 if (flag)
2141 {
2142 get_varinfo (to)->solution = tmp;
2143 if (!TEST_BIT (changed, to))
2144 {
2145 SET_BIT (changed, to);
2146 changed_count++;
2147 }
2148 }
2149 }
2150 }
2151 }
2152 }
2153 free_topo_info (ti);
2154 bitmap_obstack_release (&iteration_obstack);
2155 }
2156
2157 BITMAP_FREE (pts);
2158 sbitmap_free (changed);
2159 bitmap_obstack_release (&oldpta_obstack);
2160 }
2161
2162 /* Map from trees to variable infos. */
2163 static struct pointer_map_t *vi_for_tree;
2164
2165
2166 /* Insert ID as the variable id for tree T in the vi_for_tree map. */
2167
2168 static void
insert_vi_for_tree(tree t,varinfo_t vi)2169 insert_vi_for_tree (tree t, varinfo_t vi)
2170 {
2171 void **slot = pointer_map_insert (vi_for_tree, t);
2172 gcc_assert (vi);
2173 gcc_assert (*slot == NULL);
2174 *slot = vi;
2175 }
2176
2177 /* Find the variable info for tree T in VI_FOR_TREE. If T does not
2178 exist in the map, return NULL, otherwise, return the varinfo we found. */
2179
2180 static varinfo_t
lookup_vi_for_tree(tree t)2181 lookup_vi_for_tree (tree t)
2182 {
2183 void **slot = pointer_map_contains (vi_for_tree, t);
2184 if (slot == NULL)
2185 return NULL;
2186
2187 return (varinfo_t) *slot;
2188 }
2189
2190 /* Return a printable name for DECL */
2191
2192 static const char *
alias_get_name(tree decl)2193 alias_get_name (tree decl)
2194 {
2195 const char *res = get_name (decl);
2196 char *temp;
2197 int num_printed = 0;
2198
2199 if (res != NULL)
2200 return res;
2201
2202 res = "NULL";
2203 if (!dump_file)
2204 return res;
2205
2206 if (TREE_CODE (decl) == SSA_NAME)
2207 {
2208 num_printed = asprintf (&temp, "%s_%u",
2209 alias_get_name (SSA_NAME_VAR (decl)),
2210 SSA_NAME_VERSION (decl));
2211 }
2212 else if (DECL_P (decl))
2213 {
2214 num_printed = asprintf (&temp, "D.%u", DECL_UID (decl));
2215 }
2216 if (num_printed > 0)
2217 {
2218 res = ggc_strdup (temp);
2219 free (temp);
2220 }
2221 return res;
2222 }
2223
2224 /* Find the variable id for tree T in the map.
2225 If T doesn't exist in the map, create an entry for it and return it. */
2226
2227 static varinfo_t
get_vi_for_tree(tree t)2228 get_vi_for_tree (tree t)
2229 {
2230 void **slot = pointer_map_contains (vi_for_tree, t);
2231 if (slot == NULL)
2232 return get_varinfo (create_variable_info_for (t, alias_get_name (t)));
2233
2234 return (varinfo_t) *slot;
2235 }
2236
2237 /* Get a constraint expression from an SSA_VAR_P node. */
2238
2239 static struct constraint_expr
get_constraint_exp_from_ssa_var(tree t)2240 get_constraint_exp_from_ssa_var (tree t)
2241 {
2242 struct constraint_expr cexpr;
2243
2244 gcc_assert (SSA_VAR_P (t) || DECL_P (t));
2245
2246 /* For parameters, get at the points-to set for the actual parm
2247 decl. */
2248 if (TREE_CODE (t) == SSA_NAME
2249 && TREE_CODE (SSA_NAME_VAR (t)) == PARM_DECL
2250 && default_def (SSA_NAME_VAR (t)) == t)
2251 return get_constraint_exp_from_ssa_var (SSA_NAME_VAR (t));
2252
2253 cexpr.type = SCALAR;
2254
2255 cexpr.var = get_vi_for_tree (t)->id;
2256 /* If we determine the result is "anything", and we know this is readonly,
2257 say it points to readonly memory instead. */
2258 if (cexpr.var == anything_id && TREE_READONLY (t))
2259 {
2260 cexpr.type = ADDRESSOF;
2261 cexpr.var = readonly_id;
2262 }
2263
2264 cexpr.offset = 0;
2265 return cexpr;
2266 }
2267
2268 /* Process a completed constraint T, and add it to the constraint
2269 list. */
2270
2271 static void
process_constraint(constraint_t t)2272 process_constraint (constraint_t t)
2273 {
2274 struct constraint_expr rhs = t->rhs;
2275 struct constraint_expr lhs = t->lhs;
2276
2277 gcc_assert (rhs.var < VEC_length (varinfo_t, varmap));
2278 gcc_assert (lhs.var < VEC_length (varinfo_t, varmap));
2279
2280 if (lhs.type == DEREF)
2281 get_varinfo (lhs.var)->directly_dereferenced = true;
2282 if (rhs.type == DEREF)
2283 get_varinfo (rhs.var)->directly_dereferenced = true;
2284
2285 if (!use_field_sensitive)
2286 {
2287 t->rhs.offset = 0;
2288 t->lhs.offset = 0;
2289 }
2290
2291 /* ANYTHING == ANYTHING is pointless. */
2292 if (lhs.var == anything_id && rhs.var == anything_id)
2293 return;
2294
2295 /* If we have &ANYTHING = something, convert to SOMETHING = &ANYTHING) */
2296 else if (lhs.var == anything_id && lhs.type == ADDRESSOF)
2297 {
2298 rhs = t->lhs;
2299 t->lhs = t->rhs;
2300 t->rhs = rhs;
2301 process_constraint (t);
2302 }
2303 /* This can happen in our IR with things like n->a = *p */
2304 else if (rhs.type == DEREF && lhs.type == DEREF && rhs.var != anything_id)
2305 {
2306 /* Split into tmp = *rhs, *lhs = tmp */
2307 tree rhsdecl = get_varinfo (rhs.var)->decl;
2308 tree pointertype = TREE_TYPE (rhsdecl);
2309 tree pointedtotype = TREE_TYPE (pointertype);
2310 tree tmpvar = create_tmp_var_raw (pointedtotype, "doubledereftmp");
2311 struct constraint_expr tmplhs = get_constraint_exp_from_ssa_var (tmpvar);
2312
2313 /* If this is an aggregate of known size, we should have passed
2314 this off to do_structure_copy, and it should have broken it
2315 up. */
2316 gcc_assert (!AGGREGATE_TYPE_P (pointedtotype)
2317 || get_varinfo (rhs.var)->is_unknown_size_var);
2318
2319 process_constraint (new_constraint (tmplhs, rhs));
2320 process_constraint (new_constraint (lhs, tmplhs));
2321 }
2322 else
2323 {
2324 gcc_assert (rhs.type != ADDRESSOF || rhs.offset == 0);
2325 VEC_safe_push (constraint_t, heap, constraints, t);
2326 }
2327 }
2328
2329 /* Return true if T is a variable of a type that could contain
2330 pointers. */
2331
2332 static bool
could_have_pointers(tree t)2333 could_have_pointers (tree t)
2334 {
2335 tree type = TREE_TYPE (t);
2336
2337 if (POINTER_TYPE_P (type)
2338 || AGGREGATE_TYPE_P (type)
2339 || TREE_CODE (type) == COMPLEX_TYPE)
2340 return true;
2341
2342 return false;
2343 }
2344
2345 /* Return the position, in bits, of FIELD_DECL from the beginning of its
2346 structure. */
2347
2348 static unsigned HOST_WIDE_INT
bitpos_of_field(const tree fdecl)2349 bitpos_of_field (const tree fdecl)
2350 {
2351
2352 if (TREE_CODE (DECL_FIELD_OFFSET (fdecl)) != INTEGER_CST
2353 || TREE_CODE (DECL_FIELD_BIT_OFFSET (fdecl)) != INTEGER_CST)
2354 return -1;
2355
2356 return (tree_low_cst (DECL_FIELD_OFFSET (fdecl), 1) * 8)
2357 + tree_low_cst (DECL_FIELD_BIT_OFFSET (fdecl), 1);
2358 }
2359
2360
2361 /* Return true if an access to [ACCESSPOS, ACCESSSIZE]
2362 overlaps with a field at [FIELDPOS, FIELDSIZE] */
2363
2364 static bool
offset_overlaps_with_access(const unsigned HOST_WIDE_INT fieldpos,const unsigned HOST_WIDE_INT fieldsize,const unsigned HOST_WIDE_INT accesspos,const unsigned HOST_WIDE_INT accesssize)2365 offset_overlaps_with_access (const unsigned HOST_WIDE_INT fieldpos,
2366 const unsigned HOST_WIDE_INT fieldsize,
2367 const unsigned HOST_WIDE_INT accesspos,
2368 const unsigned HOST_WIDE_INT accesssize)
2369 {
2370 if (fieldpos == accesspos && fieldsize == accesssize)
2371 return true;
2372 if (accesspos >= fieldpos && accesspos < (fieldpos + fieldsize))
2373 return true;
2374 if (accesspos < fieldpos && (accesspos + accesssize > fieldpos))
2375 return true;
2376
2377 return false;
2378 }
2379
2380 /* Given a COMPONENT_REF T, return the constraint_expr for it. */
2381
2382 static void
get_constraint_for_component_ref(tree t,VEC (ce_s,heap)** results)2383 get_constraint_for_component_ref (tree t, VEC(ce_s, heap) **results)
2384 {
2385 tree orig_t = t;
2386 HOST_WIDE_INT bitsize = -1;
2387 HOST_WIDE_INT bitmaxsize = -1;
2388 HOST_WIDE_INT bitpos;
2389 tree forzero;
2390 struct constraint_expr *result;
2391 unsigned int beforelength = VEC_length (ce_s, *results);
2392
2393 /* Some people like to do cute things like take the address of
2394 &0->a.b */
2395 forzero = t;
2396 while (!SSA_VAR_P (forzero) && !CONSTANT_CLASS_P (forzero))
2397 forzero = TREE_OPERAND (forzero, 0);
2398
2399 if (CONSTANT_CLASS_P (forzero) && integer_zerop (forzero))
2400 {
2401 struct constraint_expr temp;
2402
2403 temp.offset = 0;
2404 temp.var = integer_id;
2405 temp.type = SCALAR;
2406 VEC_safe_push (ce_s, heap, *results, &temp);
2407 return;
2408 }
2409
2410 t = get_ref_base_and_extent (t, &bitpos, &bitsize, &bitmaxsize);
2411
2412 /* String constants are readonly, so there is nothing to really do
2413 here. */
2414 if (TREE_CODE (t) == STRING_CST)
2415 return;
2416
2417 get_constraint_for (t, results);
2418 result = VEC_last (ce_s, *results);
2419 result->offset = bitpos;
2420
2421 gcc_assert (beforelength + 1 == VEC_length (ce_s, *results));
2422
2423 /* This can also happen due to weird offsetof type macros. */
2424 if (TREE_CODE (t) != ADDR_EXPR && result->type == ADDRESSOF)
2425 result->type = SCALAR;
2426
2427 if (result->type == SCALAR)
2428 {
2429 /* In languages like C, you can access one past the end of an
2430 array. You aren't allowed to dereference it, so we can
2431 ignore this constraint. When we handle pointer subtraction,
2432 we may have to do something cute here. */
2433
2434 if (result->offset < get_varinfo (result->var)->fullsize
2435 && bitmaxsize != 0)
2436 {
2437 /* It's also not true that the constraint will actually start at the
2438 right offset, it may start in some padding. We only care about
2439 setting the constraint to the first actual field it touches, so
2440 walk to find it. */
2441 varinfo_t curr;
2442 for (curr = get_varinfo (result->var); curr; curr = curr->next)
2443 {
2444 if (offset_overlaps_with_access (curr->offset, curr->size,
2445 result->offset, bitmaxsize))
2446 {
2447 result->var = curr->id;
2448 break;
2449 }
2450 }
2451 /* assert that we found *some* field there. The user couldn't be
2452 accessing *only* padding. */
2453 /* Still the user could access one past the end of an array
2454 embedded in a struct resulting in accessing *only* padding. */
2455 gcc_assert (curr || ref_contains_array_ref (orig_t));
2456 }
2457 else if (bitmaxsize == 0)
2458 {
2459 if (dump_file && (dump_flags & TDF_DETAILS))
2460 fprintf (dump_file, "Access to zero-sized part of variable,"
2461 "ignoring\n");
2462 }
2463 else
2464 if (dump_file && (dump_flags & TDF_DETAILS))
2465 fprintf (dump_file, "Access to past the end of variable, ignoring\n");
2466
2467 result->offset = 0;
2468 }
2469 }
2470
2471
2472 /* Dereference the constraint expression CONS, and return the result.
2473 DEREF (ADDRESSOF) = SCALAR
2474 DEREF (SCALAR) = DEREF
2475 DEREF (DEREF) = (temp = DEREF1; result = DEREF(temp))
2476 This is needed so that we can handle dereferencing DEREF constraints. */
2477
2478 static void
do_deref(VEC (ce_s,heap)** constraints)2479 do_deref (VEC (ce_s, heap) **constraints)
2480 {
2481 struct constraint_expr *c;
2482 unsigned int i = 0;
2483
2484 for (i = 0; VEC_iterate (ce_s, *constraints, i, c); i++)
2485 {
2486 if (c->type == SCALAR)
2487 c->type = DEREF;
2488 else if (c->type == ADDRESSOF)
2489 c->type = SCALAR;
2490 else if (c->type == DEREF)
2491 {
2492 tree tmpvar = create_tmp_var_raw (ptr_type_node, "dereftmp");
2493 struct constraint_expr tmplhs = get_constraint_exp_from_ssa_var (tmpvar);
2494 process_constraint (new_constraint (tmplhs, *c));
2495 c->var = tmplhs.var;
2496 }
2497 else
2498 gcc_unreachable ();
2499 }
2500 }
2501
2502 /* Create a nonlocal variable of TYPE to represent nonlocals we can
2503 alias. */
2504
2505 static tree
create_nonlocal_var(tree type)2506 create_nonlocal_var (tree type)
2507 {
2508 tree nonlocal = create_tmp_var_raw (type, "NONLOCAL");
2509
2510 if (referenced_vars)
2511 add_referenced_var (nonlocal);
2512
2513 DECL_EXTERNAL (nonlocal) = 1;
2514 return nonlocal;
2515 }
2516
2517 /* Given a tree T, return the constraint expression for it. */
2518
2519 static void
get_constraint_for(tree t,VEC (ce_s,heap)** results)2520 get_constraint_for (tree t, VEC (ce_s, heap) **results)
2521 {
2522 struct constraint_expr temp;
2523
2524 /* x = integer is all glommed to a single variable, which doesn't
2525 point to anything by itself. That is, of course, unless it is an
2526 integer constant being treated as a pointer, in which case, we
2527 will return that this is really the addressof anything. This
2528 happens below, since it will fall into the default case. The only
2529 case we know something about an integer treated like a pointer is
2530 when it is the NULL pointer, and then we just say it points to
2531 NULL. */
2532 if (TREE_CODE (t) == INTEGER_CST
2533 && !POINTER_TYPE_P (TREE_TYPE (t)))
2534 {
2535 temp.var = integer_id;
2536 temp.type = SCALAR;
2537 temp.offset = 0;
2538 VEC_safe_push (ce_s, heap, *results, &temp);
2539 return;
2540 }
2541 else if (TREE_CODE (t) == INTEGER_CST
2542 && integer_zerop (t))
2543 {
2544 temp.var = nothing_id;
2545 temp.type = ADDRESSOF;
2546 temp.offset = 0;
2547 VEC_safe_push (ce_s, heap, *results, &temp);
2548 return;
2549 }
2550
2551 switch (TREE_CODE_CLASS (TREE_CODE (t)))
2552 {
2553 case tcc_expression:
2554 {
2555 switch (TREE_CODE (t))
2556 {
2557 case ADDR_EXPR:
2558 {
2559 struct constraint_expr *c;
2560 unsigned int i;
2561 tree exp = TREE_OPERAND (t, 0);
2562 tree pttype = TREE_TYPE (TREE_TYPE (t));
2563
2564 get_constraint_for (exp, results);
2565
2566 /* Make sure we capture constraints to all elements
2567 of an array. */
2568 if ((handled_component_p (exp)
2569 && ref_contains_array_ref (exp))
2570 || TREE_CODE (TREE_TYPE (exp)) == ARRAY_TYPE)
2571 {
2572 struct constraint_expr *origrhs;
2573 varinfo_t origvar;
2574 struct constraint_expr tmp;
2575
2576 if (VEC_length (ce_s, *results) == 0)
2577 return;
2578
2579 gcc_assert (VEC_length (ce_s, *results) == 1);
2580 origrhs = VEC_last (ce_s, *results);
2581 tmp = *origrhs;
2582 VEC_pop (ce_s, *results);
2583 origvar = get_varinfo (origrhs->var);
2584 for (; origvar; origvar = origvar->next)
2585 {
2586 tmp.var = origvar->id;
2587 VEC_safe_push (ce_s, heap, *results, &tmp);
2588 }
2589 }
2590 else if (VEC_length (ce_s, *results) == 1
2591 && (AGGREGATE_TYPE_P (pttype)
2592 || TREE_CODE (pttype) == COMPLEX_TYPE))
2593 {
2594 struct constraint_expr *origrhs;
2595 varinfo_t origvar;
2596 struct constraint_expr tmp;
2597
2598 gcc_assert (VEC_length (ce_s, *results) == 1);
2599 origrhs = VEC_last (ce_s, *results);
2600 tmp = *origrhs;
2601 VEC_pop (ce_s, *results);
2602 origvar = get_varinfo (origrhs->var);
2603 for (; origvar; origvar = origvar->next)
2604 {
2605 tmp.var = origvar->id;
2606 VEC_safe_push (ce_s, heap, *results, &tmp);
2607 }
2608 }
2609
2610 for (i = 0; VEC_iterate (ce_s, *results, i, c); i++)
2611 {
2612 if (c->type == DEREF)
2613 c->type = SCALAR;
2614 else
2615 c->type = ADDRESSOF;
2616 }
2617 return;
2618 }
2619 break;
2620 case CALL_EXPR:
2621 /* XXX: In interprocedural mode, if we didn't have the
2622 body, we would need to do *each pointer argument =
2623 &ANYTHING added. */
2624 if (call_expr_flags (t) & (ECF_MALLOC | ECF_MAY_BE_ALLOCA))
2625 {
2626 varinfo_t vi;
2627 tree heapvar = heapvar_lookup (t);
2628
2629 if (heapvar == NULL)
2630 {
2631 heapvar = create_tmp_var_raw (ptr_type_node, "HEAP");
2632 DECL_EXTERNAL (heapvar) = 1;
2633 if (referenced_vars)
2634 add_referenced_var (heapvar);
2635 heapvar_insert (t, heapvar);
2636 }
2637
2638 temp.var = create_variable_info_for (heapvar,
2639 alias_get_name (heapvar));
2640
2641 vi = get_varinfo (temp.var);
2642 vi->is_artificial_var = 1;
2643 vi->is_heap_var = 1;
2644 temp.type = ADDRESSOF;
2645 temp.offset = 0;
2646 VEC_safe_push (ce_s, heap, *results, &temp);
2647 return;
2648 }
2649 else
2650 {
2651 temp.var = escaped_vars_id;
2652 temp.type = SCALAR;
2653 temp.offset = 0;
2654 VEC_safe_push (ce_s, heap, *results, &temp);
2655 return;
2656 }
2657 break;
2658 default:
2659 {
2660 temp.type = ADDRESSOF;
2661 temp.var = anything_id;
2662 temp.offset = 0;
2663 VEC_safe_push (ce_s, heap, *results, &temp);
2664 return;
2665 }
2666 }
2667 }
2668 case tcc_reference:
2669 {
2670 switch (TREE_CODE (t))
2671 {
2672 case INDIRECT_REF:
2673 {
2674 get_constraint_for (TREE_OPERAND (t, 0), results);
2675 do_deref (results);
2676 return;
2677 }
2678 case ARRAY_REF:
2679 case ARRAY_RANGE_REF:
2680 case COMPONENT_REF:
2681 get_constraint_for_component_ref (t, results);
2682 return;
2683 default:
2684 {
2685 temp.type = ADDRESSOF;
2686 temp.var = anything_id;
2687 temp.offset = 0;
2688 VEC_safe_push (ce_s, heap, *results, &temp);
2689 return;
2690 }
2691 }
2692 }
2693 case tcc_unary:
2694 {
2695 switch (TREE_CODE (t))
2696 {
2697 case NOP_EXPR:
2698 case CONVERT_EXPR:
2699 case NON_LVALUE_EXPR:
2700 {
2701 tree op = TREE_OPERAND (t, 0);
2702
2703 /* Cast from non-pointer to pointers are bad news for us.
2704 Anything else, we see through */
2705 if (!(POINTER_TYPE_P (TREE_TYPE (t))
2706 && ! POINTER_TYPE_P (TREE_TYPE (op))))
2707 {
2708 get_constraint_for (op, results);
2709 return;
2710 }
2711
2712 /* FALLTHRU */
2713 }
2714 default:
2715 {
2716 temp.type = ADDRESSOF;
2717 temp.var = anything_id;
2718 temp.offset = 0;
2719 VEC_safe_push (ce_s, heap, *results, &temp);
2720 return;
2721 }
2722 }
2723 }
2724 case tcc_exceptional:
2725 {
2726 switch (TREE_CODE (t))
2727 {
2728 case PHI_NODE:
2729 {
2730 get_constraint_for (PHI_RESULT (t), results);
2731 return;
2732 }
2733 break;
2734 case SSA_NAME:
2735 {
2736 struct constraint_expr temp;
2737 temp = get_constraint_exp_from_ssa_var (t);
2738 VEC_safe_push (ce_s, heap, *results, &temp);
2739 return;
2740 }
2741 break;
2742 default:
2743 {
2744 temp.type = ADDRESSOF;
2745 temp.var = anything_id;
2746 temp.offset = 0;
2747 VEC_safe_push (ce_s, heap, *results, &temp);
2748 return;
2749 }
2750 }
2751 }
2752 case tcc_declaration:
2753 {
2754 struct constraint_expr temp;
2755 temp = get_constraint_exp_from_ssa_var (t);
2756 VEC_safe_push (ce_s, heap, *results, &temp);
2757 return;
2758 }
2759 default:
2760 {
2761 temp.type = ADDRESSOF;
2762 temp.var = anything_id;
2763 temp.offset = 0;
2764 VEC_safe_push (ce_s, heap, *results, &temp);
2765 return;
2766 }
2767 }
2768 }
2769
2770
2771 /* Handle the structure copy case where we have a simple structure copy
2772 between LHS and RHS that is of SIZE (in bits)
2773
2774 For each field of the lhs variable (lhsfield)
2775 For each field of the rhs variable at lhsfield.offset (rhsfield)
2776 add the constraint lhsfield = rhsfield
2777
2778 If we fail due to some kind of type unsafety or other thing we
2779 can't handle, return false. We expect the caller to collapse the
2780 variable in that case. */
2781
2782 static bool
do_simple_structure_copy(const struct constraint_expr lhs,const struct constraint_expr rhs,const unsigned HOST_WIDE_INT size)2783 do_simple_structure_copy (const struct constraint_expr lhs,
2784 const struct constraint_expr rhs,
2785 const unsigned HOST_WIDE_INT size)
2786 {
2787 varinfo_t p = get_varinfo (lhs.var);
2788 unsigned HOST_WIDE_INT pstart, last;
2789 pstart = p->offset;
2790 last = p->offset + size;
2791 for (; p && p->offset < last; p = p->next)
2792 {
2793 varinfo_t q;
2794 struct constraint_expr templhs = lhs;
2795 struct constraint_expr temprhs = rhs;
2796 unsigned HOST_WIDE_INT fieldoffset;
2797
2798 templhs.var = p->id;
2799 q = get_varinfo (temprhs.var);
2800 fieldoffset = p->offset - pstart;
2801 q = first_vi_for_offset (q, q->offset + fieldoffset);
2802 if (!q)
2803 return false;
2804 temprhs.var = q->id;
2805 process_constraint (new_constraint (templhs, temprhs));
2806 }
2807 return true;
2808 }
2809
2810
2811 /* Handle the structure copy case where we have a structure copy between a
2812 aggregate on the LHS and a dereference of a pointer on the RHS
2813 that is of SIZE (in bits)
2814
2815 For each field of the lhs variable (lhsfield)
2816 rhs.offset = lhsfield->offset
2817 add the constraint lhsfield = rhs
2818 */
2819
2820 static void
do_rhs_deref_structure_copy(const struct constraint_expr lhs,const struct constraint_expr rhs,const unsigned HOST_WIDE_INT size)2821 do_rhs_deref_structure_copy (const struct constraint_expr lhs,
2822 const struct constraint_expr rhs,
2823 const unsigned HOST_WIDE_INT size)
2824 {
2825 varinfo_t p = get_varinfo (lhs.var);
2826 unsigned HOST_WIDE_INT pstart,last;
2827 pstart = p->offset;
2828 last = p->offset + size;
2829
2830 for (; p && p->offset < last; p = p->next)
2831 {
2832 varinfo_t q;
2833 struct constraint_expr templhs = lhs;
2834 struct constraint_expr temprhs = rhs;
2835 unsigned HOST_WIDE_INT fieldoffset;
2836
2837
2838 if (templhs.type == SCALAR)
2839 templhs.var = p->id;
2840 else
2841 templhs.offset = p->offset;
2842
2843 q = get_varinfo (temprhs.var);
2844 fieldoffset = p->offset - pstart;
2845 temprhs.offset += fieldoffset;
2846 process_constraint (new_constraint (templhs, temprhs));
2847 }
2848 }
2849
2850 /* Handle the structure copy case where we have a structure copy
2851 between a aggregate on the RHS and a dereference of a pointer on
2852 the LHS that is of SIZE (in bits)
2853
2854 For each field of the rhs variable (rhsfield)
2855 lhs.offset = rhsfield->offset
2856 add the constraint lhs = rhsfield
2857 */
2858
2859 static void
do_lhs_deref_structure_copy(const struct constraint_expr lhs,const struct constraint_expr rhs,const unsigned HOST_WIDE_INT size)2860 do_lhs_deref_structure_copy (const struct constraint_expr lhs,
2861 const struct constraint_expr rhs,
2862 const unsigned HOST_WIDE_INT size)
2863 {
2864 varinfo_t p = get_varinfo (rhs.var);
2865 unsigned HOST_WIDE_INT pstart,last;
2866 pstart = p->offset;
2867 last = p->offset + size;
2868
2869 for (; p && p->offset < last; p = p->next)
2870 {
2871 varinfo_t q;
2872 struct constraint_expr templhs = lhs;
2873 struct constraint_expr temprhs = rhs;
2874 unsigned HOST_WIDE_INT fieldoffset;
2875
2876
2877 if (temprhs.type == SCALAR)
2878 temprhs.var = p->id;
2879 else
2880 temprhs.offset = p->offset;
2881
2882 q = get_varinfo (templhs.var);
2883 fieldoffset = p->offset - pstart;
2884 templhs.offset += fieldoffset;
2885 process_constraint (new_constraint (templhs, temprhs));
2886 }
2887 }
2888
2889 /* Sometimes, frontends like to give us bad type information. This
2890 function will collapse all the fields from VAR to the end of VAR,
2891 into VAR, so that we treat those fields as a single variable.
2892 We return the variable they were collapsed into. */
2893
2894 static unsigned int
collapse_rest_of_var(unsigned int var)2895 collapse_rest_of_var (unsigned int var)
2896 {
2897 varinfo_t currvar = get_varinfo (var);
2898 varinfo_t field;
2899
2900 for (field = currvar->next; field; field = field->next)
2901 {
2902 if (dump_file)
2903 fprintf (dump_file, "Type safety: Collapsing var %s into %s\n",
2904 field->name, currvar->name);
2905
2906 gcc_assert (!field->collapsed_to);
2907 field->collapsed_to = currvar;
2908 }
2909
2910 currvar->next = NULL;
2911 currvar->size = currvar->fullsize - currvar->offset;
2912
2913 return currvar->id;
2914 }
2915
2916 /* Handle aggregate copies by expanding into copies of the respective
2917 fields of the structures. */
2918
2919 static void
do_structure_copy(tree lhsop,tree rhsop)2920 do_structure_copy (tree lhsop, tree rhsop)
2921 {
2922 struct constraint_expr lhs, rhs, tmp;
2923 VEC (ce_s, heap) *lhsc = NULL, *rhsc = NULL;
2924 varinfo_t p;
2925 unsigned HOST_WIDE_INT lhssize;
2926 unsigned HOST_WIDE_INT rhssize;
2927
2928 get_constraint_for (lhsop, &lhsc);
2929 get_constraint_for (rhsop, &rhsc);
2930 gcc_assert (VEC_length (ce_s, lhsc) == 1);
2931 gcc_assert (VEC_length (ce_s, rhsc) == 1);
2932 lhs = *(VEC_last (ce_s, lhsc));
2933 rhs = *(VEC_last (ce_s, rhsc));
2934
2935 VEC_free (ce_s, heap, lhsc);
2936 VEC_free (ce_s, heap, rhsc);
2937
2938 /* If we have special var = x, swap it around. */
2939 if (lhs.var <= integer_id && !(get_varinfo (rhs.var)->is_special_var))
2940 {
2941 tmp = lhs;
2942 lhs = rhs;
2943 rhs = tmp;
2944 }
2945
2946 /* This is fairly conservative for the RHS == ADDRESSOF case, in that it's
2947 possible it's something we could handle. However, most cases falling
2948 into this are dealing with transparent unions, which are slightly
2949 weird. */
2950 if (rhs.type == ADDRESSOF && !(get_varinfo (rhs.var)->is_special_var))
2951 {
2952 rhs.type = ADDRESSOF;
2953 rhs.var = anything_id;
2954 }
2955
2956 /* If the RHS is a special var, or an addressof, set all the LHS fields to
2957 that special var. */
2958 if (rhs.var <= integer_id)
2959 {
2960 for (p = get_varinfo (lhs.var); p; p = p->next)
2961 {
2962 struct constraint_expr templhs = lhs;
2963 struct constraint_expr temprhs = rhs;
2964
2965 if (templhs.type == SCALAR )
2966 templhs.var = p->id;
2967 else
2968 templhs.offset += p->offset;
2969 process_constraint (new_constraint (templhs, temprhs));
2970 }
2971 }
2972 else
2973 {
2974 tree rhstype = TREE_TYPE (rhsop);
2975 tree lhstype = TREE_TYPE (lhsop);
2976 tree rhstypesize;
2977 tree lhstypesize;
2978
2979 lhstypesize = DECL_P (lhsop) ? DECL_SIZE (lhsop) : TYPE_SIZE (lhstype);
2980 rhstypesize = DECL_P (rhsop) ? DECL_SIZE (rhsop) : TYPE_SIZE (rhstype);
2981
2982 /* If we have a variably sized types on the rhs or lhs, and a deref
2983 constraint, add the constraint, lhsconstraint = &ANYTHING.
2984 This is conservatively correct because either the lhs is an unknown
2985 sized var (if the constraint is SCALAR), or the lhs is a DEREF
2986 constraint, and every variable it can point to must be unknown sized
2987 anyway, so we don't need to worry about fields at all. */
2988 if ((rhs.type == DEREF && TREE_CODE (rhstypesize) != INTEGER_CST)
2989 || (lhs.type == DEREF && TREE_CODE (lhstypesize) != INTEGER_CST))
2990 {
2991 rhs.var = anything_id;
2992 rhs.type = ADDRESSOF;
2993 rhs.offset = 0;
2994 process_constraint (new_constraint (lhs, rhs));
2995 return;
2996 }
2997
2998 /* The size only really matters insofar as we don't set more or less of
2999 the variable. If we hit an unknown size var, the size should be the
3000 whole darn thing. */
3001 if (get_varinfo (rhs.var)->is_unknown_size_var)
3002 rhssize = ~0;
3003 else
3004 rhssize = TREE_INT_CST_LOW (rhstypesize);
3005
3006 if (get_varinfo (lhs.var)->is_unknown_size_var)
3007 lhssize = ~0;
3008 else
3009 lhssize = TREE_INT_CST_LOW (lhstypesize);
3010
3011
3012 if (rhs.type == SCALAR && lhs.type == SCALAR)
3013 {
3014 if (!do_simple_structure_copy (lhs, rhs, MIN (lhssize, rhssize)))
3015 {
3016 lhs.var = collapse_rest_of_var (lhs.var);
3017 rhs.var = collapse_rest_of_var (rhs.var);
3018 lhs.offset = 0;
3019 rhs.offset = 0;
3020 lhs.type = SCALAR;
3021 rhs.type = SCALAR;
3022 process_constraint (new_constraint (lhs, rhs));
3023 }
3024 }
3025 else if (lhs.type != DEREF && rhs.type == DEREF)
3026 do_rhs_deref_structure_copy (lhs, rhs, MIN (lhssize, rhssize));
3027 else if (lhs.type == DEREF && rhs.type != DEREF)
3028 do_lhs_deref_structure_copy (lhs, rhs, MIN (lhssize, rhssize));
3029 else
3030 {
3031 tree pointedtotype = lhstype;
3032 tree tmpvar;
3033
3034 gcc_assert (rhs.type == DEREF && lhs.type == DEREF);
3035 tmpvar = create_tmp_var_raw (pointedtotype, "structcopydereftmp");
3036 do_structure_copy (tmpvar, rhsop);
3037 do_structure_copy (lhsop, tmpvar);
3038 }
3039 }
3040 }
3041
3042
3043 /* Update related alias information kept in AI. This is used when
3044 building name tags, alias sets and deciding grouping heuristics.
3045 STMT is the statement to process. This function also updates
3046 ADDRESSABLE_VARS. */
3047
3048 static void
update_alias_info(tree stmt,struct alias_info * ai)3049 update_alias_info (tree stmt, struct alias_info *ai)
3050 {
3051 bitmap addr_taken;
3052 use_operand_p use_p;
3053 ssa_op_iter iter;
3054 enum escape_type stmt_escape_type = is_escape_site (stmt);
3055 tree op;
3056
3057 if (stmt_escape_type == ESCAPE_TO_CALL
3058 || stmt_escape_type == ESCAPE_TO_PURE_CONST)
3059 {
3060 ai->num_calls_found++;
3061 if (stmt_escape_type == ESCAPE_TO_PURE_CONST)
3062 ai->num_pure_const_calls_found++;
3063 }
3064
3065 /* Mark all the variables whose address are taken by the statement. */
3066 addr_taken = addresses_taken (stmt);
3067 if (addr_taken)
3068 {
3069 bitmap_ior_into (addressable_vars, addr_taken);
3070
3071 /* If STMT is an escape point, all the addresses taken by it are
3072 call-clobbered. */
3073 if (stmt_escape_type != NO_ESCAPE)
3074 {
3075 bitmap_iterator bi;
3076 unsigned i;
3077
3078 EXECUTE_IF_SET_IN_BITMAP (addr_taken, 0, i, bi)
3079 {
3080 tree rvar = referenced_var (i);
3081 if (!unmodifiable_var_p (rvar))
3082 mark_call_clobbered (rvar, stmt_escape_type);
3083 }
3084 }
3085 }
3086
3087 /* Process each operand use. If an operand may be aliased, keep
3088 track of how many times it's being used. For pointers, determine
3089 whether they are dereferenced by the statement, or whether their
3090 value escapes, etc. */
3091 FOR_EACH_PHI_OR_STMT_USE (use_p, stmt, iter, SSA_OP_USE)
3092 {
3093 tree op, var;
3094 var_ann_t v_ann;
3095 struct ptr_info_def *pi;
3096 bool is_store, is_potential_deref;
3097 unsigned num_uses, num_derefs;
3098
3099 op = USE_FROM_PTR (use_p);
3100
3101 /* If STMT is a PHI node, OP may be an ADDR_EXPR. If so, add it
3102 to the set of addressable variables. */
3103 if (TREE_CODE (op) == ADDR_EXPR)
3104 {
3105 gcc_assert (TREE_CODE (stmt) == PHI_NODE);
3106
3107 /* PHI nodes don't have annotations for pinning the set
3108 of addresses taken, so we collect them here.
3109
3110 FIXME, should we allow PHI nodes to have annotations
3111 so that they can be treated like regular statements?
3112 Currently, they are treated as second-class
3113 statements. */
3114 add_to_addressable_set (TREE_OPERAND (op, 0), &addressable_vars);
3115 continue;
3116 }
3117
3118 /* Ignore constants. */
3119 if (TREE_CODE (op) != SSA_NAME)
3120 continue;
3121
3122 var = SSA_NAME_VAR (op);
3123 v_ann = var_ann (var);
3124
3125 /* The base variable of an ssa name must be a GIMPLE register, and thus
3126 it cannot be aliased. */
3127 gcc_assert (!may_be_aliased (var));
3128
3129 /* We are only interested in pointers. */
3130 if (!POINTER_TYPE_P (TREE_TYPE (op)))
3131 continue;
3132
3133 pi = get_ptr_info (op);
3134
3135 /* Add OP to AI->PROCESSED_PTRS, if it's not there already. */
3136 if (!TEST_BIT (ai->ssa_names_visited, SSA_NAME_VERSION (op)))
3137 {
3138 SET_BIT (ai->ssa_names_visited, SSA_NAME_VERSION (op));
3139 VEC_safe_push (tree, heap, ai->processed_ptrs, op);
3140 }
3141
3142 /* If STMT is a PHI node, then it will not have pointer
3143 dereferences and it will not be an escape point. */
3144 if (TREE_CODE (stmt) == PHI_NODE)
3145 continue;
3146
3147 /* Determine whether OP is a dereferenced pointer, and if STMT
3148 is an escape point, whether OP escapes. */
3149 count_uses_and_derefs (op, stmt, &num_uses, &num_derefs, &is_store);
3150
3151 /* Handle a corner case involving address expressions of the
3152 form '&PTR->FLD'. The problem with these expressions is that
3153 they do not represent a dereference of PTR. However, if some
3154 other transformation propagates them into an INDIRECT_REF
3155 expression, we end up with '*(&PTR->FLD)' which is folded
3156 into 'PTR->FLD'.
3157
3158 So, if the original code had no other dereferences of PTR,
3159 the aliaser will not create memory tags for it, and when
3160 &PTR->FLD gets propagated to INDIRECT_REF expressions, the
3161 memory operations will receive no V_MAY_DEF/VUSE operands.
3162
3163 One solution would be to have count_uses_and_derefs consider
3164 &PTR->FLD a dereference of PTR. But that is wrong, since it
3165 is not really a dereference but an offset calculation.
3166
3167 What we do here is to recognize these special ADDR_EXPR
3168 nodes. Since these expressions are never GIMPLE values (they
3169 are not GIMPLE invariants), they can only appear on the RHS
3170 of an assignment and their base address is always an
3171 INDIRECT_REF expression. */
3172 is_potential_deref = false;
3173 if (TREE_CODE (stmt) == MODIFY_EXPR
3174 && TREE_CODE (TREE_OPERAND (stmt, 1)) == ADDR_EXPR
3175 && !is_gimple_val (TREE_OPERAND (stmt, 1)))
3176 {
3177 /* If the RHS if of the form &PTR->FLD and PTR == OP, then
3178 this represents a potential dereference of PTR. */
3179 tree rhs = TREE_OPERAND (stmt, 1);
3180 tree base = get_base_address (TREE_OPERAND (rhs, 0));
3181 if (TREE_CODE (base) == INDIRECT_REF
3182 && TREE_OPERAND (base, 0) == op)
3183 is_potential_deref = true;
3184 }
3185
3186 if (num_derefs > 0 || is_potential_deref)
3187 {
3188 /* Mark OP as dereferenced. In a subsequent pass,
3189 dereferenced pointers that point to a set of
3190 variables will be assigned a name tag to alias
3191 all the variables OP points to. */
3192 pi->is_dereferenced = 1;
3193
3194 /* Keep track of how many time we've dereferenced each
3195 pointer. */
3196 NUM_REFERENCES_INC (v_ann);
3197
3198 /* If this is a store operation, mark OP as being
3199 dereferenced to store, otherwise mark it as being
3200 dereferenced to load. */
3201 if (is_store)
3202 bitmap_set_bit (ai->dereferenced_ptrs_store, DECL_UID (var));
3203 else
3204 bitmap_set_bit (ai->dereferenced_ptrs_load, DECL_UID (var));
3205 }
3206
3207 if (stmt_escape_type != NO_ESCAPE && num_derefs < num_uses)
3208 {
3209 /* If STMT is an escape point and STMT contains at
3210 least one direct use of OP, then the value of OP
3211 escapes and so the pointed-to variables need to
3212 be marked call-clobbered. */
3213 pi->value_escapes_p = 1;
3214 pi->escape_mask |= stmt_escape_type;
3215
3216 /* If the statement makes a function call, assume
3217 that pointer OP will be dereferenced in a store
3218 operation inside the called function. */
3219 if (get_call_expr_in (stmt)
3220 || stmt_escape_type == ESCAPE_STORED_IN_GLOBAL)
3221 {
3222 bitmap_set_bit (ai->dereferenced_ptrs_store, DECL_UID (var));
3223 pi->is_dereferenced = 1;
3224 }
3225 }
3226 }
3227
3228 if (TREE_CODE (stmt) == PHI_NODE)
3229 return;
3230
3231 /* Update reference counter for definitions to any
3232 potentially aliased variable. This is used in the alias
3233 grouping heuristics. */
3234 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_DEF)
3235 {
3236 tree var = SSA_NAME_VAR (op);
3237 var_ann_t ann = var_ann (var);
3238 bitmap_set_bit (ai->written_vars, DECL_UID (var));
3239 if (may_be_aliased (var))
3240 NUM_REFERENCES_INC (ann);
3241
3242 }
3243
3244 /* Mark variables in V_MAY_DEF operands as being written to. */
3245 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_VIRTUAL_DEFS)
3246 {
3247 tree var = DECL_P (op) ? op : SSA_NAME_VAR (op);
3248 bitmap_set_bit (ai->written_vars, DECL_UID (var));
3249 }
3250 }
3251
3252 /* Handle pointer arithmetic EXPR when creating aliasing constraints.
3253 Expressions of the type PTR + CST can be handled in two ways:
3254
3255 1- If the constraint for PTR is ADDRESSOF for a non-structure
3256 variable, then we can use it directly because adding or
3257 subtracting a constant may not alter the original ADDRESSOF
3258 constraint (i.e., pointer arithmetic may not legally go outside
3259 an object's boundaries).
3260
3261 2- If the constraint for PTR is ADDRESSOF for a structure variable,
3262 then if CST is a compile-time constant that can be used as an
3263 offset, we can determine which sub-variable will be pointed-to
3264 by the expression.
3265
3266 Return true if the expression is handled. For any other kind of
3267 expression, return false so that each operand can be added as a
3268 separate constraint by the caller. */
3269
3270 static bool
handle_ptr_arith(VEC (ce_s,heap)* lhsc,tree expr)3271 handle_ptr_arith (VEC (ce_s, heap) *lhsc, tree expr)
3272 {
3273 tree op0, op1;
3274 struct constraint_expr *c, *c2;
3275 unsigned int i = 0;
3276 unsigned int j = 0;
3277 VEC (ce_s, heap) *temp = NULL;
3278 unsigned HOST_WIDE_INT rhsoffset = 0;
3279
3280 if (TREE_CODE (expr) != PLUS_EXPR
3281 && TREE_CODE (expr) != MINUS_EXPR)
3282 return false;
3283
3284 op0 = TREE_OPERAND (expr, 0);
3285 op1 = TREE_OPERAND (expr, 1);
3286
3287 get_constraint_for (op0, &temp);
3288 if (POINTER_TYPE_P (TREE_TYPE (op0))
3289 && host_integerp (op1, 1)
3290 && TREE_CODE (expr) == PLUS_EXPR)
3291 {
3292 if ((TREE_INT_CST_LOW (op1) * BITS_PER_UNIT) / BITS_PER_UNIT
3293 != TREE_INT_CST_LOW (op1))
3294 return false;
3295 rhsoffset = TREE_INT_CST_LOW (op1) * BITS_PER_UNIT;
3296 }
3297 else
3298 return false;
3299
3300
3301 for (i = 0; VEC_iterate (ce_s, lhsc, i, c); i++)
3302 for (j = 0; VEC_iterate (ce_s, temp, j, c2); j++)
3303 {
3304 if (c2->type == ADDRESSOF && rhsoffset != 0)
3305 {
3306 varinfo_t temp = get_varinfo (c2->var);
3307
3308 /* An access one after the end of an array is valid,
3309 so simply punt on accesses we cannot resolve. */
3310 temp = first_vi_for_offset (temp, rhsoffset);
3311 if (temp == NULL)
3312 continue;
3313 c2->var = temp->id;
3314 c2->offset = 0;
3315 }
3316 else
3317 c2->offset = rhsoffset;
3318 process_constraint (new_constraint (*c, *c2));
3319 }
3320
3321 VEC_free (ce_s, heap, temp);
3322
3323 return true;
3324 }
3325
3326
3327 /* Walk statement T setting up aliasing constraints according to the
3328 references found in T. This function is the main part of the
3329 constraint builder. AI points to auxiliary alias information used
3330 when building alias sets and computing alias grouping heuristics. */
3331
3332 static void
find_func_aliases(tree origt)3333 find_func_aliases (tree origt)
3334 {
3335 tree t = origt;
3336 VEC(ce_s, heap) *lhsc = NULL;
3337 VEC(ce_s, heap) *rhsc = NULL;
3338 struct constraint_expr *c;
3339
3340 if (TREE_CODE (t) == RETURN_EXPR && TREE_OPERAND (t, 0))
3341 t = TREE_OPERAND (t, 0);
3342
3343 /* Now build constraints expressions. */
3344 if (TREE_CODE (t) == PHI_NODE)
3345 {
3346 gcc_assert (!AGGREGATE_TYPE_P (TREE_TYPE (PHI_RESULT (t))));
3347
3348 /* Only care about pointers and structures containing
3349 pointers. */
3350 if (could_have_pointers (PHI_RESULT (t)))
3351 {
3352 int i;
3353 unsigned int j;
3354
3355 /* For a phi node, assign all the arguments to
3356 the result. */
3357 get_constraint_for (PHI_RESULT (t), &lhsc);
3358 for (i = 0; i < PHI_NUM_ARGS (t); i++)
3359 {
3360 tree rhstype;
3361 tree strippedrhs = PHI_ARG_DEF (t, i);
3362
3363 STRIP_NOPS (strippedrhs);
3364 rhstype = TREE_TYPE (strippedrhs);
3365 get_constraint_for (PHI_ARG_DEF (t, i), &rhsc);
3366
3367 for (j = 0; VEC_iterate (ce_s, lhsc, j, c); j++)
3368 {
3369 struct constraint_expr *c2;
3370 while (VEC_length (ce_s, rhsc) > 0)
3371 {
3372 c2 = VEC_last (ce_s, rhsc);
3373 process_constraint (new_constraint (*c, *c2));
3374 VEC_pop (ce_s, rhsc);
3375 }
3376 }
3377 }
3378 }
3379 }
3380 /* In IPA mode, we need to generate constraints to pass call
3381 arguments through their calls. There are two case, either a
3382 modify_expr when we are returning a value, or just a plain
3383 call_expr when we are not. */
3384 else if (in_ipa_mode
3385 && ((TREE_CODE (t) == MODIFY_EXPR
3386 && TREE_CODE (TREE_OPERAND (t, 1)) == CALL_EXPR
3387 && !(call_expr_flags (TREE_OPERAND (t, 1))
3388 & (ECF_MALLOC | ECF_MAY_BE_ALLOCA)))
3389 || (TREE_CODE (t) == CALL_EXPR
3390 && !(call_expr_flags (t)
3391 & (ECF_MALLOC | ECF_MAY_BE_ALLOCA)))))
3392 {
3393 tree lhsop;
3394 tree rhsop;
3395 tree arglist;
3396 varinfo_t fi;
3397 int i = 1;
3398 tree decl;
3399 if (TREE_CODE (t) == MODIFY_EXPR)
3400 {
3401 lhsop = TREE_OPERAND (t, 0);
3402 rhsop = TREE_OPERAND (t, 1);
3403 }
3404 else
3405 {
3406 lhsop = NULL;
3407 rhsop = t;
3408 }
3409 decl = get_callee_fndecl (rhsop);
3410
3411 /* If we can directly resolve the function being called, do so.
3412 Otherwise, it must be some sort of indirect expression that
3413 we should still be able to handle. */
3414 if (decl)
3415 {
3416 fi = get_vi_for_tree (decl);
3417 }
3418 else
3419 {
3420 decl = TREE_OPERAND (rhsop, 0);
3421 fi = get_vi_for_tree (decl);
3422 }
3423
3424 /* Assign all the passed arguments to the appropriate incoming
3425 parameters of the function. */
3426 arglist = TREE_OPERAND (rhsop, 1);
3427
3428 for (;arglist; arglist = TREE_CHAIN (arglist))
3429 {
3430 tree arg = TREE_VALUE (arglist);
3431 struct constraint_expr lhs ;
3432 struct constraint_expr *rhsp;
3433
3434 get_constraint_for (arg, &rhsc);
3435 if (TREE_CODE (decl) != FUNCTION_DECL)
3436 {
3437 lhs.type = DEREF;
3438 lhs.var = fi->id;
3439 lhs.offset = i;
3440 }
3441 else
3442 {
3443 lhs.type = SCALAR;
3444 lhs.var = first_vi_for_offset (fi, i)->id;
3445 lhs.offset = 0;
3446 }
3447 while (VEC_length (ce_s, rhsc) != 0)
3448 {
3449 rhsp = VEC_last (ce_s, rhsc);
3450 process_constraint (new_constraint (lhs, *rhsp));
3451 VEC_pop (ce_s, rhsc);
3452 }
3453 i++;
3454 }
3455
3456 /* If we are returning a value, assign it to the result. */
3457 if (lhsop)
3458 {
3459 struct constraint_expr rhs;
3460 struct constraint_expr *lhsp;
3461 unsigned int j = 0;
3462
3463 get_constraint_for (lhsop, &lhsc);
3464 if (TREE_CODE (decl) != FUNCTION_DECL)
3465 {
3466 rhs.type = DEREF;
3467 rhs.var = fi->id;
3468 rhs.offset = i;
3469 }
3470 else
3471 {
3472 rhs.type = SCALAR;
3473 rhs.var = first_vi_for_offset (fi, i)->id;
3474 rhs.offset = 0;
3475 }
3476 for (j = 0; VEC_iterate (ce_s, lhsc, j, lhsp); j++)
3477 process_constraint (new_constraint (*lhsp, rhs));
3478 }
3479 }
3480 /* Otherwise, just a regular assignment statement. */
3481 else if (TREE_CODE (t) == MODIFY_EXPR)
3482 {
3483 tree lhsop = TREE_OPERAND (t, 0);
3484 tree rhsop = TREE_OPERAND (t, 1);
3485 int i;
3486
3487 if ((AGGREGATE_TYPE_P (TREE_TYPE (lhsop))
3488 || TREE_CODE (TREE_TYPE (lhsop)) == COMPLEX_TYPE)
3489 && (AGGREGATE_TYPE_P (TREE_TYPE (rhsop))
3490 || TREE_CODE (TREE_TYPE (lhsop)) == COMPLEX_TYPE))
3491 {
3492 do_structure_copy (lhsop, rhsop);
3493 }
3494 else
3495 {
3496 /* Only care about operations with pointers, structures
3497 containing pointers, dereferences, and call expressions. */
3498 if (could_have_pointers (lhsop)
3499 || TREE_CODE (rhsop) == CALL_EXPR)
3500 {
3501 get_constraint_for (lhsop, &lhsc);
3502 switch (TREE_CODE_CLASS (TREE_CODE (rhsop)))
3503 {
3504 /* RHS that consist of unary operations,
3505 exceptional types, or bare decls/constants, get
3506 handled directly by get_constraint_for. */
3507 case tcc_reference:
3508 case tcc_declaration:
3509 case tcc_constant:
3510 case tcc_exceptional:
3511 case tcc_expression:
3512 case tcc_unary:
3513 {
3514 unsigned int j;
3515
3516 get_constraint_for (rhsop, &rhsc);
3517 for (j = 0; VEC_iterate (ce_s, lhsc, j, c); j++)
3518 {
3519 struct constraint_expr *c2;
3520 unsigned int k;
3521
3522 for (k = 0; VEC_iterate (ce_s, rhsc, k, c2); k++)
3523 process_constraint (new_constraint (*c, *c2));
3524 }
3525
3526 }
3527 break;
3528
3529 case tcc_binary:
3530 {
3531 /* For pointer arithmetic of the form
3532 PTR + CST, we can simply use PTR's
3533 constraint because pointer arithmetic is
3534 not allowed to go out of bounds. */
3535 if (handle_ptr_arith (lhsc, rhsop))
3536 break;
3537 }
3538 /* FALLTHRU */
3539
3540 /* Otherwise, walk each operand. Notice that we
3541 can't use the operand interface because we need
3542 to process expressions other than simple operands
3543 (e.g. INDIRECT_REF, ADDR_EXPR, CALL_EXPR). */
3544 default:
3545 for (i = 0; i < TREE_CODE_LENGTH (TREE_CODE (rhsop)); i++)
3546 {
3547 tree op = TREE_OPERAND (rhsop, i);
3548 unsigned int j;
3549
3550 gcc_assert (VEC_length (ce_s, rhsc) == 0);
3551 get_constraint_for (op, &rhsc);
3552 for (j = 0; VEC_iterate (ce_s, lhsc, j, c); j++)
3553 {
3554 struct constraint_expr *c2;
3555 while (VEC_length (ce_s, rhsc) > 0)
3556 {
3557 c2 = VEC_last (ce_s, rhsc);
3558 process_constraint (new_constraint (*c, *c2));
3559 VEC_pop (ce_s, rhsc);
3560 }
3561 }
3562 }
3563 }
3564 }
3565 }
3566 }
3567
3568 /* After promoting variables and computing aliasing we will
3569 need to re-scan most statements. FIXME: Try to minimize the
3570 number of statements re-scanned. It's not really necessary to
3571 re-scan *all* statements. */
3572 mark_stmt_modified (origt);
3573 VEC_free (ce_s, heap, rhsc);
3574 VEC_free (ce_s, heap, lhsc);
3575 }
3576
3577
3578 /* Find the first varinfo in the same variable as START that overlaps with
3579 OFFSET.
3580 Effectively, walk the chain of fields for the variable START to find the
3581 first field that overlaps with OFFSET.
3582 Return NULL if we can't find one. */
3583
3584 static varinfo_t
first_vi_for_offset(varinfo_t start,unsigned HOST_WIDE_INT offset)3585 first_vi_for_offset (varinfo_t start, unsigned HOST_WIDE_INT offset)
3586 {
3587 varinfo_t curr = start;
3588 while (curr)
3589 {
3590 /* We may not find a variable in the field list with the actual
3591 offset when when we have glommed a structure to a variable.
3592 In that case, however, offset should still be within the size
3593 of the variable. */
3594 if (offset >= curr->offset && offset < (curr->offset + curr->size))
3595 return curr;
3596 curr = curr->next;
3597 }
3598 return NULL;
3599 }
3600
3601
3602 /* Insert the varinfo FIELD into the field list for BASE, at the front
3603 of the list. */
3604
3605 static void
insert_into_field_list(varinfo_t base,varinfo_t field)3606 insert_into_field_list (varinfo_t base, varinfo_t field)
3607 {
3608 varinfo_t prev = base;
3609 varinfo_t curr = base->next;
3610
3611 field->next = curr;
3612 prev->next = field;
3613 }
3614
3615 /* Insert the varinfo FIELD into the field list for BASE, ordered by
3616 offset. */
3617
3618 static void
insert_into_field_list_sorted(varinfo_t base,varinfo_t field)3619 insert_into_field_list_sorted (varinfo_t base, varinfo_t field)
3620 {
3621 varinfo_t prev = base;
3622 varinfo_t curr = base->next;
3623
3624 if (curr == NULL)
3625 {
3626 prev->next = field;
3627 field->next = NULL;
3628 }
3629 else
3630 {
3631 while (curr)
3632 {
3633 if (field->offset <= curr->offset)
3634 break;
3635 prev = curr;
3636 curr = curr->next;
3637 }
3638 field->next = prev->next;
3639 prev->next = field;
3640 }
3641 }
3642
3643 /* qsort comparison function for two fieldoff's PA and PB */
3644
3645 static int
fieldoff_compare(const void * pa,const void * pb)3646 fieldoff_compare (const void *pa, const void *pb)
3647 {
3648 const fieldoff_s *foa = (const fieldoff_s *)pa;
3649 const fieldoff_s *fob = (const fieldoff_s *)pb;
3650 HOST_WIDE_INT foasize, fobsize;
3651
3652 if (foa->offset != fob->offset)
3653 return foa->offset - fob->offset;
3654
3655 foasize = TREE_INT_CST_LOW (foa->size);
3656 fobsize = TREE_INT_CST_LOW (fob->size);
3657 return foasize - fobsize;
3658 }
3659
3660 /* Sort a fieldstack according to the field offset and sizes. */
3661 void
sort_fieldstack(VEC (fieldoff_s,heap)* fieldstack)3662 sort_fieldstack (VEC(fieldoff_s,heap) *fieldstack)
3663 {
3664 qsort (VEC_address (fieldoff_s, fieldstack),
3665 VEC_length (fieldoff_s, fieldstack),
3666 sizeof (fieldoff_s),
3667 fieldoff_compare);
3668 }
3669
3670 /* Given a TYPE, and a vector of field offsets FIELDSTACK, push all the fields
3671 of TYPE onto fieldstack, recording their offsets along the way.
3672 OFFSET is used to keep track of the offset in this entire structure, rather
3673 than just the immediately containing structure. Returns the number
3674 of fields pushed.
3675 HAS_UNION is set to true if we find a union type as a field of
3676 TYPE. */
3677
3678 int
push_fields_onto_fieldstack(tree type,VEC (fieldoff_s,heap)** fieldstack,HOST_WIDE_INT offset,bool * has_union)3679 push_fields_onto_fieldstack (tree type, VEC(fieldoff_s,heap) **fieldstack,
3680 HOST_WIDE_INT offset, bool *has_union)
3681 {
3682 tree field;
3683 int count = 0;
3684 unsigned HOST_WIDE_INT minoffset = -1;
3685
3686 if (TREE_CODE (type) == COMPLEX_TYPE)
3687 {
3688 fieldoff_s *real_part, *img_part;
3689 real_part = VEC_safe_push (fieldoff_s, heap, *fieldstack, NULL);
3690 real_part->type = TREE_TYPE (type);
3691 real_part->size = TYPE_SIZE (TREE_TYPE (type));
3692 real_part->offset = offset;
3693 real_part->decl = NULL_TREE;
3694
3695 img_part = VEC_safe_push (fieldoff_s, heap, *fieldstack, NULL);
3696 img_part->type = TREE_TYPE (type);
3697 img_part->size = TYPE_SIZE (TREE_TYPE (type));
3698 img_part->offset = offset + TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (type)));
3699 img_part->decl = NULL_TREE;
3700
3701 return 2;
3702 }
3703
3704 if (TREE_CODE (type) == ARRAY_TYPE)
3705 {
3706 tree sz = TYPE_SIZE (type);
3707 tree elsz = TYPE_SIZE (TREE_TYPE (type));
3708 HOST_WIDE_INT nr;
3709 int i;
3710
3711 if (! sz
3712 || ! host_integerp (sz, 1)
3713 || TREE_INT_CST_LOW (sz) == 0
3714 || ! elsz
3715 || ! host_integerp (elsz, 1)
3716 || TREE_INT_CST_LOW (elsz) == 0)
3717 return 0;
3718
3719 nr = TREE_INT_CST_LOW (sz) / TREE_INT_CST_LOW (elsz);
3720 if (nr > SALIAS_MAX_ARRAY_ELEMENTS)
3721 return 0;
3722
3723 for (i = 0; i < nr; ++i)
3724 {
3725 bool push = false;
3726 int pushed = 0;
3727
3728 if (has_union
3729 && (TREE_CODE (TREE_TYPE (type)) == QUAL_UNION_TYPE
3730 || TREE_CODE (TREE_TYPE (type)) == UNION_TYPE))
3731 *has_union = true;
3732
3733 if (!AGGREGATE_TYPE_P (TREE_TYPE (type))) /* var_can_have_subvars */
3734 push = true;
3735 else if (!(pushed = push_fields_onto_fieldstack
3736 (TREE_TYPE (type), fieldstack,
3737 offset + i * TREE_INT_CST_LOW (elsz), has_union)))
3738 /* Empty structures may have actual size, like in C++. So
3739 see if we didn't push any subfields and the size is
3740 nonzero, push the field onto the stack */
3741 push = true;
3742
3743 if (push)
3744 {
3745 fieldoff_s *pair;
3746
3747 pair = VEC_safe_push (fieldoff_s, heap, *fieldstack, NULL);
3748 pair->type = TREE_TYPE (type);
3749 pair->size = elsz;
3750 pair->decl = NULL_TREE;
3751 pair->offset = offset + i * TREE_INT_CST_LOW (elsz);
3752 count++;
3753 }
3754 else
3755 count += pushed;
3756 }
3757
3758 return count;
3759 }
3760
3761 for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field))
3762 if (TREE_CODE (field) == FIELD_DECL)
3763 {
3764 bool push = false;
3765 int pushed = 0;
3766
3767 if (has_union
3768 && (TREE_CODE (TREE_TYPE (field)) == QUAL_UNION_TYPE
3769 || TREE_CODE (TREE_TYPE (field)) == UNION_TYPE))
3770 *has_union = true;
3771
3772 if (!var_can_have_subvars (field))
3773 push = true;
3774 else if (!(pushed = push_fields_onto_fieldstack
3775 (TREE_TYPE (field), fieldstack,
3776 offset + bitpos_of_field (field), has_union))
3777 && DECL_SIZE (field)
3778 && !integer_zerop (DECL_SIZE (field)))
3779 /* Empty structures may have actual size, like in C++. So
3780 see if we didn't push any subfields and the size is
3781 nonzero, push the field onto the stack */
3782 push = true;
3783
3784 if (push)
3785 {
3786 fieldoff_s *pair;
3787
3788 pair = VEC_safe_push (fieldoff_s, heap, *fieldstack, NULL);
3789 pair->type = TREE_TYPE (field);
3790 pair->size = DECL_SIZE (field);
3791 pair->decl = field;
3792 pair->offset = offset + bitpos_of_field (field);
3793 count++;
3794 }
3795 else
3796 count += pushed;
3797
3798 if (bitpos_of_field (field) < minoffset)
3799 minoffset = bitpos_of_field (field);
3800 }
3801
3802 /* We need to create a fake subvar for empty bases. But _only_ for non-empty
3803 classes. */
3804 if (minoffset != 0 && count != 0)
3805 {
3806 fieldoff_s *pair;
3807
3808 pair = VEC_safe_push (fieldoff_s, heap, *fieldstack, NULL);
3809 pair->type = void_type_node;
3810 pair->size = build_int_cst (size_type_node, minoffset);
3811 pair->decl = NULL;
3812 pair->offset = offset;
3813 count++;
3814 }
3815
3816 return count;
3817 }
3818
3819 /* Create a constraint from ESCAPED_VARS variable to VI. */
3820 static void
make_constraint_from_escaped(varinfo_t vi)3821 make_constraint_from_escaped (varinfo_t vi)
3822 {
3823 struct constraint_expr lhs, rhs;
3824
3825 lhs.var = vi->id;
3826 lhs.offset = 0;
3827 lhs.type = SCALAR;
3828
3829 rhs.var = escaped_vars_id;
3830 rhs.offset = 0;
3831 rhs.type = SCALAR;
3832 process_constraint (new_constraint (lhs, rhs));
3833 }
3834
3835 /* Create a constraint to the ESCAPED_VARS variable from constraint
3836 expression RHS. */
3837
3838 static void
make_constraint_to_escaped(struct constraint_expr rhs)3839 make_constraint_to_escaped (struct constraint_expr rhs)
3840 {
3841 struct constraint_expr lhs;
3842
3843 lhs.var = escaped_vars_id;
3844 lhs.offset = 0;
3845 lhs.type = SCALAR;
3846
3847 process_constraint (new_constraint (lhs, rhs));
3848 }
3849
3850 /* Count the number of arguments DECL has, and set IS_VARARGS to true
3851 if it is a varargs function. */
3852
3853 static unsigned int
count_num_arguments(tree decl,bool * is_varargs)3854 count_num_arguments (tree decl, bool *is_varargs)
3855 {
3856 unsigned int i = 0;
3857 tree t;
3858
3859 for (t = TYPE_ARG_TYPES (TREE_TYPE (decl));
3860 t;
3861 t = TREE_CHAIN (t))
3862 {
3863 if (TREE_VALUE (t) == void_type_node)
3864 break;
3865 i++;
3866 }
3867
3868 if (!t)
3869 *is_varargs = true;
3870 return i;
3871 }
3872
3873 /* Creation function node for DECL, using NAME, and return the index
3874 of the variable we've created for the function. */
3875
3876 static unsigned int
create_function_info_for(tree decl,const char * name)3877 create_function_info_for (tree decl, const char *name)
3878 {
3879 unsigned int index = VEC_length (varinfo_t, varmap);
3880 varinfo_t vi;
3881 tree arg;
3882 unsigned int i;
3883 bool is_varargs = false;
3884
3885 /* Create the variable info. */
3886
3887 vi = new_var_info (decl, index, name);
3888 vi->decl = decl;
3889 vi->offset = 0;
3890 vi->has_union = 0;
3891 vi->size = 1;
3892 vi->fullsize = count_num_arguments (decl, &is_varargs) + 1;
3893 insert_vi_for_tree (vi->decl, vi);
3894 VEC_safe_push (varinfo_t, heap, varmap, vi);
3895
3896 stats.total_vars++;
3897
3898 /* If it's varargs, we don't know how many arguments it has, so we
3899 can't do much.
3900 */
3901 if (is_varargs)
3902 {
3903 vi->fullsize = ~0;
3904 vi->size = ~0;
3905 vi->is_unknown_size_var = true;
3906 return index;
3907 }
3908
3909
3910 arg = DECL_ARGUMENTS (decl);
3911
3912 /* Set up variables for each argument. */
3913 for (i = 1; i < vi->fullsize; i++)
3914 {
3915 varinfo_t argvi;
3916 const char *newname;
3917 char *tempname;
3918 unsigned int newindex;
3919 tree argdecl = decl;
3920
3921 if (arg)
3922 argdecl = arg;
3923
3924 newindex = VEC_length (varinfo_t, varmap);
3925 asprintf (&tempname, "%s.arg%d", name, i-1);
3926 newname = ggc_strdup (tempname);
3927 free (tempname);
3928
3929 argvi = new_var_info (argdecl, newindex, newname);
3930 argvi->decl = argdecl;
3931 VEC_safe_push (varinfo_t, heap, varmap, argvi);
3932 argvi->offset = i;
3933 argvi->size = 1;
3934 argvi->fullsize = vi->fullsize;
3935 argvi->has_union = false;
3936 insert_into_field_list_sorted (vi, argvi);
3937 stats.total_vars ++;
3938 if (arg)
3939 {
3940 insert_vi_for_tree (arg, argvi);
3941 arg = TREE_CHAIN (arg);
3942 }
3943 }
3944
3945 /* Create a variable for the return var. */
3946 if (DECL_RESULT (decl) != NULL
3947 || !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (decl))))
3948 {
3949 varinfo_t resultvi;
3950 const char *newname;
3951 char *tempname;
3952 unsigned int newindex;
3953 tree resultdecl = decl;
3954
3955 vi->fullsize ++;
3956
3957 if (DECL_RESULT (decl))
3958 resultdecl = DECL_RESULT (decl);
3959
3960 newindex = VEC_length (varinfo_t, varmap);
3961 asprintf (&tempname, "%s.result", name);
3962 newname = ggc_strdup (tempname);
3963 free (tempname);
3964
3965 resultvi = new_var_info (resultdecl, newindex, newname);
3966 resultvi->decl = resultdecl;
3967 VEC_safe_push (varinfo_t, heap, varmap, resultvi);
3968 resultvi->offset = i;
3969 resultvi->size = 1;
3970 resultvi->fullsize = vi->fullsize;
3971 resultvi->has_union = false;
3972 insert_into_field_list_sorted (vi, resultvi);
3973 stats.total_vars ++;
3974 if (DECL_RESULT (decl))
3975 insert_vi_for_tree (DECL_RESULT (decl), resultvi);
3976 }
3977 return index;
3978 }
3979
3980
3981 /* Return true if FIELDSTACK contains fields that overlap.
3982 FIELDSTACK is assumed to be sorted by offset. */
3983
3984 static bool
check_for_overlaps(VEC (fieldoff_s,heap)* fieldstack)3985 check_for_overlaps (VEC (fieldoff_s,heap) *fieldstack)
3986 {
3987 fieldoff_s *fo = NULL;
3988 unsigned int i;
3989 HOST_WIDE_INT lastoffset = -1;
3990
3991 for (i = 0; VEC_iterate (fieldoff_s, fieldstack, i, fo); i++)
3992 {
3993 if (fo->offset == lastoffset)
3994 return true;
3995 lastoffset = fo->offset;
3996 }
3997 return false;
3998 }
3999
4000 /* This function is called through walk_tree to walk global
4001 initializers looking for constraints we need to add to the
4002 constraint list. */
4003
4004 static tree
find_global_initializers(tree * tp,int * walk_subtrees ATTRIBUTE_UNUSED,void * viv)4005 find_global_initializers (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED,
4006 void *viv)
4007 {
4008 varinfo_t vi = (varinfo_t)viv;
4009 tree t = *tp;
4010
4011 switch (TREE_CODE (t))
4012 {
4013 /* Dereferences and addressofs are the only important things
4014 here, and i don't even remember if dereferences are legal
4015 here in initializers. */
4016 case INDIRECT_REF:
4017 case ADDR_EXPR:
4018 {
4019 struct constraint_expr *c;
4020 size_t i;
4021
4022 VEC(ce_s, heap) *rhsc = NULL;
4023 get_constraint_for (t, &rhsc);
4024 for (i = 0; VEC_iterate (ce_s, rhsc, i, c); i++)
4025 {
4026 struct constraint_expr lhs;
4027
4028 lhs.var = vi->id;
4029 lhs.type = SCALAR;
4030 lhs.offset = 0;
4031 process_constraint (new_constraint (lhs, *c));
4032 }
4033
4034 VEC_free (ce_s, heap, rhsc);
4035 }
4036 break;
4037 case VAR_DECL:
4038 /* We might not have walked this because we skip
4039 DECL_EXTERNALs during the initial scan. */
4040 if (referenced_vars)
4041 {
4042 get_var_ann (t);
4043 if (referenced_var_check_and_insert (t))
4044 mark_sym_for_renaming (t);
4045 }
4046 break;
4047 default:
4048 break;
4049 }
4050 return NULL_TREE;
4051 }
4052
4053 /* Create a varinfo structure for NAME and DECL, and add it to VARMAP.
4054 This will also create any varinfo structures necessary for fields
4055 of DECL. */
4056
4057 static unsigned int
create_variable_info_for(tree decl,const char * name)4058 create_variable_info_for (tree decl, const char *name)
4059 {
4060 unsigned int index = VEC_length (varinfo_t, varmap);
4061 varinfo_t vi;
4062 tree decltype = TREE_TYPE (decl);
4063 tree declsize = DECL_P (decl) ? DECL_SIZE (decl) : TYPE_SIZE (decltype);
4064 bool notokay = false;
4065 bool hasunion;
4066 bool is_global = DECL_P (decl) ? is_global_var (decl) : false;
4067 VEC (fieldoff_s,heap) *fieldstack = NULL;
4068
4069 if (TREE_CODE (decl) == FUNCTION_DECL && in_ipa_mode)
4070 return create_function_info_for (decl, name);
4071
4072 hasunion = TREE_CODE (decltype) == UNION_TYPE
4073 || TREE_CODE (decltype) == QUAL_UNION_TYPE;
4074 if (var_can_have_subvars (decl) && use_field_sensitive && !hasunion)
4075 {
4076 push_fields_onto_fieldstack (decltype, &fieldstack, 0, &hasunion);
4077 if (hasunion)
4078 {
4079 VEC_free (fieldoff_s, heap, fieldstack);
4080 notokay = true;
4081 }
4082 }
4083
4084
4085 /* If the variable doesn't have subvars, we may end up needing to
4086 sort the field list and create fake variables for all the
4087 fields. */
4088 vi = new_var_info (decl, index, name);
4089 vi->decl = decl;
4090 vi->offset = 0;
4091 vi->has_union = hasunion;
4092 if (!declsize
4093 || TREE_CODE (declsize) != INTEGER_CST
4094 || TREE_CODE (decltype) == UNION_TYPE
4095 || TREE_CODE (decltype) == QUAL_UNION_TYPE)
4096 {
4097 vi->is_unknown_size_var = true;
4098 vi->fullsize = ~0;
4099 vi->size = ~0;
4100 }
4101 else
4102 {
4103 vi->fullsize = TREE_INT_CST_LOW (declsize);
4104 vi->size = vi->fullsize;
4105 }
4106
4107 insert_vi_for_tree (vi->decl, vi);
4108 VEC_safe_push (varinfo_t, heap, varmap, vi);
4109 if (is_global && (!flag_whole_program || !in_ipa_mode))
4110 {
4111 make_constraint_from_escaped (vi);
4112
4113 /* If the variable can't be aliased, there is no point in
4114 putting it in the set of nonlocal vars. */
4115 if (may_be_aliased (vi->decl))
4116 {
4117 struct constraint_expr rhs;
4118 rhs.var = index;
4119 rhs.type = ADDRESSOF;
4120 rhs.offset = 0;
4121 make_constraint_to_escaped (rhs);
4122 }
4123
4124 if (TREE_CODE (decl) != FUNCTION_DECL && DECL_INITIAL (decl))
4125 {
4126 walk_tree_without_duplicates (&DECL_INITIAL (decl),
4127 find_global_initializers,
4128 (void *)vi);
4129 }
4130 }
4131
4132 stats.total_vars++;
4133 if (use_field_sensitive
4134 && !notokay
4135 && !vi->is_unknown_size_var
4136 && var_can_have_subvars (decl)
4137 && VEC_length (fieldoff_s, fieldstack) <= MAX_FIELDS_FOR_FIELD_SENSITIVE)
4138 {
4139 unsigned int newindex = VEC_length (varinfo_t, varmap);
4140 fieldoff_s *fo = NULL;
4141 unsigned int i;
4142
4143 for (i = 0; !notokay && VEC_iterate (fieldoff_s, fieldstack, i, fo); i++)
4144 {
4145 if (! fo->size
4146 || TREE_CODE (fo->size) != INTEGER_CST
4147 || fo->offset < 0)
4148 {
4149 notokay = true;
4150 break;
4151 }
4152 }
4153
4154 /* We can't sort them if we have a field with a variable sized type,
4155 which will make notokay = true. In that case, we are going to return
4156 without creating varinfos for the fields anyway, so sorting them is a
4157 waste to boot. */
4158 if (!notokay)
4159 {
4160 sort_fieldstack (fieldstack);
4161 /* Due to some C++ FE issues, like PR 22488, we might end up
4162 what appear to be overlapping fields even though they,
4163 in reality, do not overlap. Until the C++ FE is fixed,
4164 we will simply disable field-sensitivity for these cases. */
4165 notokay = check_for_overlaps (fieldstack);
4166 }
4167
4168
4169 if (VEC_length (fieldoff_s, fieldstack) != 0)
4170 fo = VEC_index (fieldoff_s, fieldstack, 0);
4171
4172 if (fo == NULL || notokay)
4173 {
4174 vi->is_unknown_size_var = 1;
4175 vi->fullsize = ~0;
4176 vi->size = ~0;
4177 VEC_free (fieldoff_s, heap, fieldstack);
4178 return index;
4179 }
4180
4181 vi->size = TREE_INT_CST_LOW (fo->size);
4182 vi->offset = fo->offset;
4183 for (i = VEC_length (fieldoff_s, fieldstack) - 1;
4184 i >= 1 && VEC_iterate (fieldoff_s, fieldstack, i, fo);
4185 i--)
4186 {
4187 varinfo_t newvi;
4188 const char *newname = "NULL";
4189 char *tempname;
4190
4191 newindex = VEC_length (varinfo_t, varmap);
4192 if (dump_file)
4193 {
4194 if (fo->decl)
4195 asprintf (&tempname, "%s.%s",
4196 vi->name, alias_get_name (fo->decl));
4197 else
4198 asprintf (&tempname, "%s." HOST_WIDE_INT_PRINT_DEC,
4199 vi->name, fo->offset);
4200 newname = ggc_strdup (tempname);
4201 free (tempname);
4202 }
4203 newvi = new_var_info (decl, newindex, newname);
4204 newvi->offset = fo->offset;
4205 newvi->size = TREE_INT_CST_LOW (fo->size);
4206 newvi->fullsize = vi->fullsize;
4207 insert_into_field_list (vi, newvi);
4208 VEC_safe_push (varinfo_t, heap, varmap, newvi);
4209 if (is_global && (!flag_whole_program || !in_ipa_mode))
4210 {
4211 /* If the variable can't be aliased, there is no point in
4212 putting it in the set of nonlocal vars. */
4213 if (may_be_aliased (vi->decl))
4214 {
4215 struct constraint_expr rhs;
4216
4217 rhs.var = newindex;
4218 rhs.type = ADDRESSOF;
4219 rhs.offset = 0;
4220 make_constraint_to_escaped (rhs);
4221 }
4222 make_constraint_from_escaped (newvi);
4223 }
4224
4225 stats.total_vars++;
4226 }
4227 VEC_free (fieldoff_s, heap, fieldstack);
4228 }
4229 return index;
4230 }
4231
4232 /* Print out the points-to solution for VAR to FILE. */
4233
4234 void
dump_solution_for_var(FILE * file,unsigned int var)4235 dump_solution_for_var (FILE *file, unsigned int var)
4236 {
4237 varinfo_t vi = get_varinfo (var);
4238 unsigned int i;
4239 bitmap_iterator bi;
4240
4241 if (find (var) != var)
4242 {
4243 varinfo_t vipt = get_varinfo (find (var));
4244 fprintf (file, "%s = same as %s\n", vi->name, vipt->name);
4245 }
4246 else
4247 {
4248 fprintf (file, "%s = { ", vi->name);
4249 EXECUTE_IF_SET_IN_BITMAP (vi->solution, 0, i, bi)
4250 {
4251 fprintf (file, "%s ", get_varinfo (i)->name);
4252 }
4253 fprintf (file, "}\n");
4254 }
4255 }
4256
4257 /* Print the points-to solution for VAR to stdout. */
4258
4259 void
debug_solution_for_var(unsigned int var)4260 debug_solution_for_var (unsigned int var)
4261 {
4262 dump_solution_for_var (stdout, var);
4263 }
4264
4265 /* Create varinfo structures for all of the variables in the
4266 function for intraprocedural mode. */
4267
4268 static void
intra_create_variable_infos(void)4269 intra_create_variable_infos (void)
4270 {
4271 tree t;
4272 struct constraint_expr lhs, rhs;
4273 varinfo_t nonlocal_vi;
4274
4275 /* For each incoming pointer argument arg, ARG = ESCAPED_VARS or a
4276 dummy variable if flag_argument_noalias > 2. */
4277 for (t = DECL_ARGUMENTS (current_function_decl); t; t = TREE_CHAIN (t))
4278 {
4279 varinfo_t p;
4280 unsigned int arg_id;
4281
4282 if (!could_have_pointers (t))
4283 continue;
4284
4285 arg_id = get_vi_for_tree (t)->id;
4286
4287 /* With flag_argument_noalias greater than two means that the incoming
4288 argument cannot alias anything except for itself so create a HEAP
4289 variable. */
4290 if (POINTER_TYPE_P (TREE_TYPE (t))
4291 && flag_argument_noalias > 2)
4292 {
4293 varinfo_t vi;
4294 tree heapvar = heapvar_lookup (t);
4295
4296 lhs.offset = 0;
4297 lhs.type = SCALAR;
4298 lhs.var = get_vi_for_tree (t)->id;
4299
4300 if (heapvar == NULL_TREE)
4301 {
4302 heapvar = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (t)),
4303 "PARM_NOALIAS");
4304 DECL_EXTERNAL (heapvar) = 1;
4305 if (referenced_vars)
4306 add_referenced_var (heapvar);
4307 heapvar_insert (t, heapvar);
4308 }
4309
4310 vi = get_vi_for_tree (heapvar);
4311 vi->is_artificial_var = 1;
4312 vi->is_heap_var = 1;
4313 rhs.var = vi->id;
4314 rhs.type = ADDRESSOF;
4315 rhs.offset = 0;
4316 for (p = get_varinfo (lhs.var); p; p = p->next)
4317 {
4318 struct constraint_expr temp = lhs;
4319 temp.var = p->id;
4320 process_constraint (new_constraint (temp, rhs));
4321 }
4322 }
4323 else
4324 {
4325 for (p = get_varinfo (arg_id); p; p = p->next)
4326 make_constraint_from_escaped (p);
4327 }
4328 }
4329 if (!nonlocal_all)
4330 nonlocal_all = create_nonlocal_var (void_type_node);
4331
4332 /* Create variable info for the nonlocal var if it does not
4333 exist. */
4334 nonlocal_vars_id = create_variable_info_for (nonlocal_all,
4335 get_name (nonlocal_all));
4336 nonlocal_vi = get_varinfo (nonlocal_vars_id);
4337 nonlocal_vi->is_artificial_var = 1;
4338 nonlocal_vi->is_heap_var = 1;
4339 nonlocal_vi->is_unknown_size_var = 1;
4340 nonlocal_vi->directly_dereferenced = true;
4341
4342 rhs.var = nonlocal_vars_id;
4343 rhs.type = ADDRESSOF;
4344 rhs.offset = 0;
4345
4346 lhs.var = escaped_vars_id;
4347 lhs.type = SCALAR;
4348 lhs.offset = 0;
4349
4350 process_constraint (new_constraint (lhs, rhs));
4351 }
4352
4353 /* Set bits in INTO corresponding to the variable uids in solution set
4354 FROM, which came from variable PTR.
4355 For variables that are actually dereferenced, we also use type
4356 based alias analysis to prune the points-to sets. */
4357
4358 static void
set_uids_in_ptset(tree ptr,bitmap into,bitmap from)4359 set_uids_in_ptset (tree ptr, bitmap into, bitmap from)
4360 {
4361 unsigned int i;
4362 bitmap_iterator bi;
4363 subvar_t sv;
4364 unsigned HOST_WIDE_INT ptr_alias_set = get_alias_set (TREE_TYPE (ptr));
4365
4366 EXECUTE_IF_SET_IN_BITMAP (from, 0, i, bi)
4367 {
4368 varinfo_t vi = get_varinfo (i);
4369 unsigned HOST_WIDE_INT var_alias_set;
4370
4371 /* The only artificial variables that are allowed in a may-alias
4372 set are heap variables. */
4373 if (vi->is_artificial_var && !vi->is_heap_var)
4374 continue;
4375
4376 if (vi->has_union && get_subvars_for_var (vi->decl) != NULL)
4377 {
4378 /* Variables containing unions may need to be converted to
4379 their SFT's, because SFT's can have unions and we cannot. */
4380 for (sv = get_subvars_for_var (vi->decl); sv; sv = sv->next)
4381 bitmap_set_bit (into, DECL_UID (sv->var));
4382 }
4383 else if (TREE_CODE (vi->decl) == VAR_DECL
4384 || TREE_CODE (vi->decl) == PARM_DECL
4385 || TREE_CODE (vi->decl) == RESULT_DECL)
4386 {
4387 if (var_can_have_subvars (vi->decl)
4388 && get_subvars_for_var (vi->decl))
4389 {
4390 /* If VI->DECL is an aggregate for which we created
4391 SFTs, add the SFT corresponding to VI->OFFSET. */
4392 tree sft = get_subvar_at (vi->decl, vi->offset);
4393 if (sft)
4394 {
4395 var_alias_set = get_alias_set (sft);
4396 if (!vi->directly_dereferenced
4397 || alias_sets_conflict_p (ptr_alias_set, var_alias_set))
4398 bitmap_set_bit (into, DECL_UID (sft));
4399 }
4400 }
4401 else
4402 {
4403 /* Otherwise, just add VI->DECL to the alias set.
4404 Don't type prune artificial vars. */
4405 if (vi->is_artificial_var)
4406 bitmap_set_bit (into, DECL_UID (vi->decl));
4407 else
4408 {
4409 var_alias_set = get_alias_set (vi->decl);
4410 if (!vi->directly_dereferenced
4411 || alias_sets_conflict_p (ptr_alias_set, var_alias_set))
4412 bitmap_set_bit (into, DECL_UID (vi->decl));
4413 }
4414 }
4415 }
4416 }
4417 }
4418
4419
4420 static bool have_alias_info = false;
4421
4422 /* Given a pointer variable P, fill in its points-to set, or return
4423 false if we can't. */
4424
4425 bool
find_what_p_points_to(tree p)4426 find_what_p_points_to (tree p)
4427 {
4428 tree lookup_p = p;
4429 varinfo_t vi;
4430
4431 if (!have_alias_info)
4432 return false;
4433
4434 /* For parameters, get at the points-to set for the actual parm
4435 decl. */
4436 if (TREE_CODE (p) == SSA_NAME
4437 && TREE_CODE (SSA_NAME_VAR (p)) == PARM_DECL
4438 && default_def (SSA_NAME_VAR (p)) == p)
4439 lookup_p = SSA_NAME_VAR (p);
4440
4441 vi = lookup_vi_for_tree (lookup_p);
4442 if (vi)
4443 {
4444
4445 if (vi->is_artificial_var)
4446 return false;
4447
4448 /* See if this is a field or a structure. */
4449 if (vi->size != vi->fullsize)
4450 {
4451 /* Nothing currently asks about structure fields directly,
4452 but when they do, we need code here to hand back the
4453 points-to set. */
4454 if (!var_can_have_subvars (vi->decl)
4455 || get_subvars_for_var (vi->decl) == NULL)
4456 return false;
4457 }
4458 else
4459 {
4460 struct ptr_info_def *pi = get_ptr_info (p);
4461 unsigned int i;
4462 bitmap_iterator bi;
4463
4464 /* This variable may have been collapsed, let's get the real
4465 variable. */
4466 vi = get_varinfo (find (vi->id));
4467
4468 /* Translate artificial variables into SSA_NAME_PTR_INFO
4469 attributes. */
4470 EXECUTE_IF_SET_IN_BITMAP (vi->solution, 0, i, bi)
4471 {
4472 varinfo_t vi = get_varinfo (i);
4473
4474 if (vi->is_artificial_var)
4475 {
4476 /* FIXME. READONLY should be handled better so that
4477 flow insensitive aliasing can disregard writable
4478 aliases. */
4479 if (vi->id == nothing_id)
4480 pi->pt_null = 1;
4481 else if (vi->id == anything_id)
4482 pi->pt_anything = 1;
4483 else if (vi->id == readonly_id)
4484 pi->pt_anything = 1;
4485 else if (vi->id == integer_id)
4486 pi->pt_anything = 1;
4487 else if (vi->is_heap_var)
4488 pi->pt_global_mem = 1;
4489 }
4490 }
4491
4492 if (pi->pt_anything)
4493 return false;
4494
4495 if (!pi->pt_vars)
4496 pi->pt_vars = BITMAP_GGC_ALLOC ();
4497
4498 set_uids_in_ptset (vi->decl, pi->pt_vars, vi->solution);
4499
4500 if (bitmap_empty_p (pi->pt_vars))
4501 pi->pt_vars = NULL;
4502
4503 return true;
4504 }
4505 }
4506
4507 return false;
4508 }
4509
4510
4511
4512 /* Dump points-to information to OUTFILE. */
4513
4514 void
dump_sa_points_to_info(FILE * outfile)4515 dump_sa_points_to_info (FILE *outfile)
4516 {
4517 unsigned int i;
4518
4519 fprintf (outfile, "\nPoints-to sets\n\n");
4520
4521 if (dump_flags & TDF_STATS)
4522 {
4523 fprintf (outfile, "Stats:\n");
4524 fprintf (outfile, "Total vars: %d\n", stats.total_vars);
4525 fprintf (outfile, "Non-pointer vars: %d\n",
4526 stats.nonpointer_vars);
4527 fprintf (outfile, "Statically unified vars: %d\n",
4528 stats.unified_vars_static);
4529 fprintf (outfile, "Dynamically unified vars: %d\n",
4530 stats.unified_vars_dynamic);
4531 fprintf (outfile, "Iterations: %d\n", stats.iterations);
4532 fprintf (outfile, "Number of edges: %d\n", stats.num_edges);
4533 fprintf (outfile, "Number of implicit edges: %d\n",
4534 stats.num_implicit_edges);
4535 }
4536
4537 for (i = 0; i < VEC_length (varinfo_t, varmap); i++)
4538 dump_solution_for_var (outfile, i);
4539 }
4540
4541
4542 /* Debug points-to information to stderr. */
4543
4544 void
debug_sa_points_to_info(void)4545 debug_sa_points_to_info (void)
4546 {
4547 dump_sa_points_to_info (stderr);
4548 }
4549
4550
4551 /* Initialize the always-existing constraint variables for NULL
4552 ANYTHING, READONLY, and INTEGER */
4553
4554 static void
init_base_vars(void)4555 init_base_vars (void)
4556 {
4557 struct constraint_expr lhs, rhs;
4558
4559 /* Create the NULL variable, used to represent that a variable points
4560 to NULL. */
4561 nothing_tree = create_tmp_var_raw (void_type_node, "NULL");
4562 var_nothing = new_var_info (nothing_tree, 0, "NULL");
4563 insert_vi_for_tree (nothing_tree, var_nothing);
4564 var_nothing->is_artificial_var = 1;
4565 var_nothing->offset = 0;
4566 var_nothing->size = ~0;
4567 var_nothing->fullsize = ~0;
4568 var_nothing->is_special_var = 1;
4569 nothing_id = 0;
4570 VEC_safe_push (varinfo_t, heap, varmap, var_nothing);
4571
4572 /* Create the ANYTHING variable, used to represent that a variable
4573 points to some unknown piece of memory. */
4574 anything_tree = create_tmp_var_raw (void_type_node, "ANYTHING");
4575 var_anything = new_var_info (anything_tree, 1, "ANYTHING");
4576 insert_vi_for_tree (anything_tree, var_anything);
4577 var_anything->is_artificial_var = 1;
4578 var_anything->size = ~0;
4579 var_anything->offset = 0;
4580 var_anything->next = NULL;
4581 var_anything->fullsize = ~0;
4582 var_anything->is_special_var = 1;
4583 anything_id = 1;
4584
4585 /* Anything points to anything. This makes deref constraints just
4586 work in the presence of linked list and other p = *p type loops,
4587 by saying that *ANYTHING = ANYTHING. */
4588 VEC_safe_push (varinfo_t, heap, varmap, var_anything);
4589 lhs.type = SCALAR;
4590 lhs.var = anything_id;
4591 lhs.offset = 0;
4592 rhs.type = ADDRESSOF;
4593 rhs.var = anything_id;
4594 rhs.offset = 0;
4595
4596 /* This specifically does not use process_constraint because
4597 process_constraint ignores all anything = anything constraints, since all
4598 but this one are redundant. */
4599 VEC_safe_push (constraint_t, heap, constraints, new_constraint (lhs, rhs));
4600
4601 /* Create the READONLY variable, used to represent that a variable
4602 points to readonly memory. */
4603 readonly_tree = create_tmp_var_raw (void_type_node, "READONLY");
4604 var_readonly = new_var_info (readonly_tree, 2, "READONLY");
4605 var_readonly->is_artificial_var = 1;
4606 var_readonly->offset = 0;
4607 var_readonly->size = ~0;
4608 var_readonly->fullsize = ~0;
4609 var_readonly->next = NULL;
4610 var_readonly->is_special_var = 1;
4611 insert_vi_for_tree (readonly_tree, var_readonly);
4612 readonly_id = 2;
4613 VEC_safe_push (varinfo_t, heap, varmap, var_readonly);
4614
4615 /* readonly memory points to anything, in order to make deref
4616 easier. In reality, it points to anything the particular
4617 readonly variable can point to, but we don't track this
4618 separately. */
4619 lhs.type = SCALAR;
4620 lhs.var = readonly_id;
4621 lhs.offset = 0;
4622 rhs.type = ADDRESSOF;
4623 rhs.var = anything_id;
4624 rhs.offset = 0;
4625
4626 process_constraint (new_constraint (lhs, rhs));
4627
4628 /* Create the INTEGER variable, used to represent that a variable points
4629 to an INTEGER. */
4630 integer_tree = create_tmp_var_raw (void_type_node, "INTEGER");
4631 var_integer = new_var_info (integer_tree, 3, "INTEGER");
4632 insert_vi_for_tree (integer_tree, var_integer);
4633 var_integer->is_artificial_var = 1;
4634 var_integer->size = ~0;
4635 var_integer->fullsize = ~0;
4636 var_integer->offset = 0;
4637 var_integer->next = NULL;
4638 var_integer->is_special_var = 1;
4639 integer_id = 3;
4640 VEC_safe_push (varinfo_t, heap, varmap, var_integer);
4641
4642 /* INTEGER = ANYTHING, because we don't know where a dereference of
4643 a random integer will point to. */
4644 lhs.type = SCALAR;
4645 lhs.var = integer_id;
4646 lhs.offset = 0;
4647 rhs.type = ADDRESSOF;
4648 rhs.var = anything_id;
4649 rhs.offset = 0;
4650 process_constraint (new_constraint (lhs, rhs));
4651
4652 /* Create the ESCAPED_VARS variable used to represent variables that
4653 escape this function. */
4654 escaped_vars_tree = create_tmp_var_raw (void_type_node, "ESCAPED_VARS");
4655 var_escaped_vars = new_var_info (escaped_vars_tree, 4, "ESCAPED_VARS");
4656 insert_vi_for_tree (escaped_vars_tree, var_escaped_vars);
4657 var_escaped_vars->is_artificial_var = 1;
4658 var_escaped_vars->size = ~0;
4659 var_escaped_vars->fullsize = ~0;
4660 var_escaped_vars->offset = 0;
4661 var_escaped_vars->next = NULL;
4662 escaped_vars_id = 4;
4663 VEC_safe_push (varinfo_t, heap, varmap, var_escaped_vars);
4664
4665 /* ESCAPED_VARS = *ESCAPED_VARS */
4666 lhs.type = SCALAR;
4667 lhs.var = escaped_vars_id;
4668 lhs.offset = 0;
4669 rhs.type = DEREF;
4670 rhs.var = escaped_vars_id;
4671 rhs.offset = 0;
4672 process_constraint (new_constraint (lhs, rhs));
4673
4674 }
4675
4676 /* Initialize things necessary to perform PTA */
4677
4678 static void
init_alias_vars(void)4679 init_alias_vars (void)
4680 {
4681 bitmap_obstack_initialize (&pta_obstack);
4682 bitmap_obstack_initialize (&oldpta_obstack);
4683 bitmap_obstack_initialize (&predbitmap_obstack);
4684
4685 constraint_pool = create_alloc_pool ("Constraint pool",
4686 sizeof (struct constraint), 30);
4687 variable_info_pool = create_alloc_pool ("Variable info pool",
4688 sizeof (struct variable_info), 30);
4689 constraints = VEC_alloc (constraint_t, heap, 8);
4690 varmap = VEC_alloc (varinfo_t, heap, 8);
4691 vi_for_tree = pointer_map_create ();
4692
4693 memset (&stats, 0, sizeof (stats));
4694 init_base_vars ();
4695 }
4696
4697 /* Given a statement STMT, generate necessary constraints to
4698 escaped_vars for the escaping variables. */
4699
4700 static void
find_escape_constraints(tree stmt)4701 find_escape_constraints (tree stmt)
4702 {
4703 enum escape_type stmt_escape_type = is_escape_site (stmt);
4704 tree rhs;
4705 VEC(ce_s, heap) *rhsc = NULL;
4706 struct constraint_expr *c;
4707 size_t i;
4708
4709 if (stmt_escape_type == NO_ESCAPE)
4710 return;
4711
4712 if (TREE_CODE (stmt) == RETURN_EXPR)
4713 {
4714 /* Returns are either bare, with an embedded MODIFY_EXPR, or
4715 just a plain old expression. */
4716 if (!TREE_OPERAND (stmt, 0))
4717 return;
4718 if (TREE_CODE (TREE_OPERAND (stmt, 0)) == MODIFY_EXPR)
4719 rhs = TREE_OPERAND (TREE_OPERAND (stmt, 0), 1);
4720 else
4721 rhs = TREE_OPERAND (stmt, 0);
4722
4723 get_constraint_for (rhs, &rhsc);
4724 for (i = 0; VEC_iterate (ce_s, rhsc, i, c); i++)
4725 make_constraint_to_escaped (*c);
4726 VEC_free (ce_s, heap, rhsc);
4727 return;
4728 }
4729 else if (TREE_CODE (stmt) == ASM_EXPR)
4730 {
4731 /* Whatever the inputs of the ASM are, escape. */
4732 tree arg;
4733
4734 for (arg = ASM_INPUTS (stmt); arg; arg = TREE_CHAIN (arg))
4735 {
4736 rhsc = NULL;
4737 get_constraint_for (TREE_VALUE (arg), &rhsc);
4738 for (i = 0; VEC_iterate (ce_s, rhsc, i, c); i++)
4739 make_constraint_to_escaped (*c);
4740 VEC_free (ce_s, heap, rhsc);
4741 }
4742 return;
4743 }
4744 else if (TREE_CODE (stmt) == CALL_EXPR
4745 || (TREE_CODE (stmt) == MODIFY_EXPR
4746 && TREE_CODE (TREE_OPERAND (stmt, 1)) == CALL_EXPR))
4747 {
4748 /* Calls cause all of the arguments passed in to escape. */
4749 tree arg;
4750
4751 if (TREE_CODE (stmt) == MODIFY_EXPR)
4752 stmt = TREE_OPERAND (stmt, 1);
4753 for (arg = TREE_OPERAND (stmt, 1); arg; arg = TREE_CHAIN (arg))
4754 {
4755 if (POINTER_TYPE_P (TREE_TYPE (TREE_VALUE (arg))))
4756 {
4757 rhsc = NULL;
4758 get_constraint_for (TREE_VALUE (arg), &rhsc);
4759 for (i = 0; VEC_iterate (ce_s, rhsc, i, c); i++)
4760 make_constraint_to_escaped (*c);
4761 VEC_free (ce_s, heap, rhsc);
4762 }
4763 }
4764 return;
4765 }
4766 else
4767 {
4768 gcc_assert (TREE_CODE (stmt) == MODIFY_EXPR);
4769 }
4770
4771 gcc_assert (stmt_escape_type == ESCAPE_BAD_CAST
4772 || stmt_escape_type == ESCAPE_STORED_IN_GLOBAL
4773 || stmt_escape_type == ESCAPE_UNKNOWN);
4774 rhs = TREE_OPERAND (stmt, 1);
4775
4776 /* Look through casts for the real escaping variable.
4777 Constants don't really escape, so ignore them.
4778 Otherwise, whatever escapes must be on our RHS. */
4779 if (TREE_CODE (rhs) == NOP_EXPR
4780 || TREE_CODE (rhs) == CONVERT_EXPR
4781 || TREE_CODE (rhs) == NON_LVALUE_EXPR)
4782 {
4783 get_constraint_for (TREE_OPERAND (rhs, 0), &rhsc);
4784 }
4785 else if (CONSTANT_CLASS_P (rhs))
4786 return;
4787 else
4788 {
4789 get_constraint_for (rhs, &rhsc);
4790 }
4791 for (i = 0; VEC_iterate (ce_s, rhsc, i, c); i++)
4792 make_constraint_to_escaped (*c);
4793 VEC_free (ce_s, heap, rhsc);
4794 }
4795
4796
4797 /* Remove the REF and ADDRESS edges from GRAPH, as well as all the
4798 predecessor edges. */
4799
4800 static void
remove_preds_and_fake_succs(constraint_graph_t graph)4801 remove_preds_and_fake_succs (constraint_graph_t graph)
4802 {
4803 unsigned int i;
4804
4805 /* Clear the implicit ref and address nodes from the successor
4806 lists. */
4807 for (i = 0; i < FIRST_REF_NODE; i++)
4808 {
4809 if (graph->succs[i])
4810 bitmap_clear_range (graph->succs[i], FIRST_REF_NODE,
4811 FIRST_REF_NODE * 2);
4812 }
4813
4814 /* Free the successor list for the non-ref nodes. */
4815 for (i = FIRST_REF_NODE; i < graph->size; i++)
4816 {
4817 if (graph->succs[i])
4818 BITMAP_FREE (graph->succs[i]);
4819 }
4820
4821 /* Now reallocate the size of the successor list as, and blow away
4822 the predecessor bitmaps. */
4823 graph->size = VEC_length (varinfo_t, varmap);
4824 graph->succs = xrealloc (graph->succs, graph->size * sizeof (bitmap));
4825
4826 free (graph->implicit_preds);
4827 graph->implicit_preds = NULL;
4828 free (graph->preds);
4829 graph->preds = NULL;
4830 bitmap_obstack_release (&predbitmap_obstack);
4831 }
4832
4833 /* Create points-to sets for the current function. See the comments
4834 at the start of the file for an algorithmic overview. */
4835
4836 void
compute_points_to_sets(struct alias_info * ai)4837 compute_points_to_sets (struct alias_info *ai)
4838 {
4839 basic_block bb;
4840 struct scc_info *si;
4841
4842 timevar_push (TV_TREE_PTA);
4843
4844 init_alias_vars ();
4845 init_alias_heapvars ();
4846
4847 intra_create_variable_infos ();
4848
4849 /* Now walk all statements and derive aliases. */
4850 FOR_EACH_BB (bb)
4851 {
4852 block_stmt_iterator bsi;
4853 tree phi;
4854
4855 for (phi = phi_nodes (bb); phi; phi = TREE_CHAIN (phi))
4856 {
4857 if (is_gimple_reg (PHI_RESULT (phi)))
4858 {
4859 find_func_aliases (phi);
4860 /* Update various related attributes like escaped
4861 addresses, pointer dereferences for loads and stores.
4862 This is used when creating name tags and alias
4863 sets. */
4864 update_alias_info (phi, ai);
4865 }
4866 }
4867
4868 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
4869 {
4870 tree stmt = bsi_stmt (bsi);
4871
4872 find_func_aliases (stmt);
4873 find_escape_constraints (stmt);
4874 /* Update various related attributes like escaped
4875 addresses, pointer dereferences for loads and stores.
4876 This is used when creating name tags and alias
4877 sets. */
4878 update_alias_info (stmt, ai);
4879 }
4880 }
4881
4882
4883 if (dump_file)
4884 {
4885 fprintf (dump_file, "Points-to analysis\n\nConstraints:\n\n");
4886 dump_constraints (dump_file);
4887 }
4888
4889 if (dump_file)
4890 fprintf (dump_file,
4891 "\nCollapsing static cycles and doing variable "
4892 "substitution:\n");
4893
4894 build_pred_graph ();
4895 si = perform_var_substitution (graph);
4896 move_complex_constraints (graph, si);
4897 free_var_substitution_info (si);
4898
4899 build_succ_graph ();
4900 find_indirect_cycles (graph);
4901
4902 /* Implicit nodes and predecessors are no longer necessary at this
4903 point. */
4904 remove_preds_and_fake_succs (graph);
4905
4906 if (dump_file)
4907 fprintf (dump_file, "\nSolving graph:\n");
4908
4909 solve_graph (graph);
4910
4911 if (dump_file)
4912 dump_sa_points_to_info (dump_file);
4913 have_alias_info = true;
4914
4915 timevar_pop (TV_TREE_PTA);
4916 }
4917
4918 /* Delete created points-to sets. */
4919
4920 void
delete_points_to_sets(void)4921 delete_points_to_sets (void)
4922 {
4923 varinfo_t v;
4924 int i;
4925
4926 if (dump_file && (dump_flags & TDF_STATS))
4927 fprintf (dump_file, "Points to sets created:%d\n",
4928 stats.points_to_sets_created);
4929
4930 pointer_map_destroy (vi_for_tree);
4931 bitmap_obstack_release (&pta_obstack);
4932 VEC_free (constraint_t, heap, constraints);
4933
4934 for (i = 0; VEC_iterate (varinfo_t, varmap, i, v); i++)
4935 VEC_free (constraint_t, heap, graph->complex[i]);
4936 free (graph->complex);
4937
4938 free (graph->rep);
4939 free (graph->succs);
4940 free (graph->indirect_cycles);
4941 free (graph);
4942
4943 VEC_free (varinfo_t, heap, varmap);
4944 free_alloc_pool (variable_info_pool);
4945 free_alloc_pool (constraint_pool);
4946 have_alias_info = false;
4947 }
4948
4949 /* Return true if we should execute IPA PTA. */
4950 static bool
gate_ipa_pta(void)4951 gate_ipa_pta (void)
4952 {
4953 return (flag_unit_at_a_time != 0
4954 && flag_ipa_pta
4955 /* Don't bother doing anything if the program has errors. */
4956 && !(errorcount || sorrycount));
4957 }
4958
4959 /* Execute the driver for IPA PTA. */
4960 static unsigned int
ipa_pta_execute(void)4961 ipa_pta_execute (void)
4962 {
4963 #if 0
4964 struct cgraph_node *node;
4965 in_ipa_mode = 1;
4966 init_alias_heapvars ();
4967 init_alias_vars ();
4968
4969 for (node = cgraph_nodes; node; node = node->next)
4970 {
4971 if (!node->analyzed || cgraph_is_master_clone (node))
4972 {
4973 unsigned int varid;
4974
4975 varid = create_function_info_for (node->decl,
4976 cgraph_node_name (node));
4977 if (node->local.externally_visible)
4978 {
4979 varinfo_t fi = get_varinfo (varid);
4980 for (; fi; fi = fi->next)
4981 make_constraint_from_escaped (fi);
4982 }
4983 }
4984 }
4985 for (node = cgraph_nodes; node; node = node->next)
4986 {
4987 if (node->analyzed && cgraph_is_master_clone (node))
4988 {
4989 struct function *cfun = DECL_STRUCT_FUNCTION (node->decl);
4990 basic_block bb;
4991 tree old_func_decl = current_function_decl;
4992 if (dump_file)
4993 fprintf (dump_file,
4994 "Generating constraints for %s\n",
4995 cgraph_node_name (node));
4996 push_cfun (cfun);
4997 current_function_decl = node->decl;
4998
4999 FOR_EACH_BB_FN (bb, cfun)
5000 {
5001 block_stmt_iterator bsi;
5002 tree phi;
5003
5004 for (phi = phi_nodes (bb); phi; phi = TREE_CHAIN (phi))
5005 {
5006 if (is_gimple_reg (PHI_RESULT (phi)))
5007 {
5008 find_func_aliases (phi);
5009 }
5010 }
5011
5012 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
5013 {
5014 tree stmt = bsi_stmt (bsi);
5015 find_func_aliases (stmt);
5016 }
5017 }
5018 current_function_decl = old_func_decl;
5019 pop_cfun ();
5020 }
5021 else
5022 {
5023 /* Make point to anything. */
5024 }
5025 }
5026
5027 build_constraint_graph ();
5028
5029 if (dump_file)
5030 {
5031 fprintf (dump_file, "Points-to analysis\n\nConstraints:\n\n");
5032 dump_constraints (dump_file);
5033 }
5034
5035 if (dump_file)
5036 fprintf (dump_file,
5037 "\nCollapsing static cycles and doing variable "
5038 "substitution:\n");
5039
5040 find_and_collapse_graph_cycles (graph, false);
5041 perform_var_substitution (graph);
5042
5043 if (dump_file)
5044 fprintf (dump_file, "\nSolving graph:\n");
5045
5046 solve_graph (graph);
5047
5048 if (dump_file)
5049 dump_sa_points_to_info (dump_file);
5050 in_ipa_mode = 0;
5051 delete_alias_heapvars ();
5052 delete_points_to_sets ();
5053 #endif
5054 return 0;
5055 }
5056
5057 struct tree_opt_pass pass_ipa_pta =
5058 {
5059 "pta", /* name */
5060 gate_ipa_pta, /* gate */
5061 ipa_pta_execute, /* execute */
5062 NULL, /* sub */
5063 NULL, /* next */
5064 0, /* static_pass_number */
5065 TV_IPA_PTA, /* tv_id */
5066 0, /* properties_required */
5067 0, /* properties_provided */
5068 0, /* properties_destroyed */
5069 0, /* todo_flags_start */
5070 0, /* todo_flags_finish */
5071 0 /* letter */
5072 };
5073
5074 /* Initialize the heapvar for statement mapping. */
5075 void
init_alias_heapvars(void)5076 init_alias_heapvars (void)
5077 {
5078 if (!heapvar_for_stmt)
5079 heapvar_for_stmt = htab_create_ggc (11, tree_map_hash, tree_map_eq,
5080 NULL);
5081 nonlocal_all = NULL_TREE;
5082 }
5083
5084 void
delete_alias_heapvars(void)5085 delete_alias_heapvars (void)
5086 {
5087 nonlocal_all = NULL_TREE;
5088 htab_delete (heapvar_for_stmt);
5089 heapvar_for_stmt = NULL;
5090 }
5091
5092 #include "gt-tree-ssa-structalias.h"
5093