1 /* Tree based points-to analysis
2    Copyright (C) 2005-2013 Free Software Foundation, Inc.
3    Contributed by Daniel Berlin <dberlin@dberlin.org>
4 
5    This file is part of GCC.
6 
7    GCC is free software; you can redistribute it and/or modify
8    under the terms of the GNU General Public License as published by
9    the Free Software Foundation; either version 3 of the License, or
10    (at your option) any later version.
11 
12    GCC is distributed in the hope that it will be useful,
13    but WITHOUT ANY WARRANTY; without even the implied warranty of
14    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
15    GNU General Public License for more details.
16 
17    You should have received a copy of the GNU General Public License
18    along with GCC; see the file COPYING3.  If not see
19    <http://www.gnu.org/licenses/>.  */
20 
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "ggc.h"
26 #include "obstack.h"
27 #include "bitmap.h"
28 #include "flags.h"
29 #include "basic-block.h"
30 #include "tree.h"
31 #include "tree-flow.h"
32 #include "tree-inline.h"
33 #include "diagnostic-core.h"
34 #include "gimple.h"
35 #include "hashtab.h"
36 #include "function.h"
37 #include "cgraph.h"
38 #include "tree-pass.h"
39 #include "alloc-pool.h"
40 #include "splay-tree.h"
41 #include "params.h"
42 #include "cgraph.h"
43 #include "alias.h"
44 #include "pointer-set.h"
45 
46 /* The idea behind this analyzer is to generate set constraints from the
47    program, then solve the resulting constraints in order to generate the
48    points-to sets.
49 
50    Set constraints are a way of modeling program analysis problems that
51    involve sets.  They consist of an inclusion constraint language,
52    describing the variables (each variable is a set) and operations that
53    are involved on the variables, and a set of rules that derive facts
54    from these operations.  To solve a system of set constraints, you derive
55    all possible facts under the rules, which gives you the correct sets
56    as a consequence.
57 
58    See  "Efficient Field-sensitive pointer analysis for C" by "David
59    J. Pearce and Paul H. J. Kelly and Chris Hankin, at
60    http://citeseer.ist.psu.edu/pearce04efficient.html
61 
62    Also see "Ultra-fast Aliasing Analysis using CLA: A Million Lines
63    of C Code in a Second" by ""Nevin Heintze and Olivier Tardieu" at
64    http://citeseer.ist.psu.edu/heintze01ultrafast.html
65 
66    There are three types of real constraint expressions, DEREF,
67    ADDRESSOF, and SCALAR.  Each constraint expression consists
68    of a constraint type, a variable, and an offset.
69 
70    SCALAR is a constraint expression type used to represent x, whether
71    it appears on the LHS or the RHS of a statement.
72    DEREF is a constraint expression type used to represent *x, whether
73    it appears on the LHS or the RHS of a statement.
74    ADDRESSOF is a constraint expression used to represent &x, whether
75    it appears on the LHS or the RHS of a statement.
76 
77    Each pointer variable in the program is assigned an integer id, and
78    each field of a structure variable is assigned an integer id as well.
79 
80    Structure variables are linked to their list of fields through a "next
81    field" in each variable that points to the next field in offset
82    order.
83    Each variable for a structure field has
84 
85    1. "size", that tells the size in bits of that field.
86    2. "fullsize, that tells the size in bits of the entire structure.
87    3. "offset", that tells the offset in bits from the beginning of the
88    structure to this field.
89 
90    Thus,
91    struct f
92    {
93      int a;
94      int b;
95    } foo;
96    int *bar;
97 
98    looks like
99 
100    foo.a -> id 1, size 32, offset 0, fullsize 64, next foo.b
101    foo.b -> id 2, size 32, offset 32, fullsize 64, next NULL
102    bar -> id 3, size 32, offset 0, fullsize 32, next NULL
103 
104 
105   In order to solve the system of set constraints, the following is
106   done:
107 
108   1. Each constraint variable x has a solution set associated with it,
109   Sol(x).
110 
111   2. Constraints are separated into direct, copy, and complex.
112   Direct constraints are ADDRESSOF constraints that require no extra
113   processing, such as P = &Q
114   Copy constraints are those of the form P = Q.
115   Complex constraints are all the constraints involving dereferences
116   and offsets (including offsetted copies).
117 
118   3. All direct constraints of the form P = &Q are processed, such
119   that Q is added to Sol(P)
120 
121   4. All complex constraints for a given constraint variable are stored in a
122   linked list attached to that variable's node.
123 
124   5. A directed graph is built out of the copy constraints. Each
125   constraint variable is a node in the graph, and an edge from
126   Q to P is added for each copy constraint of the form P = Q
127 
128   6. The graph is then walked, and solution sets are
129   propagated along the copy edges, such that an edge from Q to P
130   causes Sol(P) <- Sol(P) union Sol(Q).
131 
132   7.  As we visit each node, all complex constraints associated with
133   that node are processed by adding appropriate copy edges to the graph, or the
134   appropriate variables to the solution set.
135 
136   8. The process of walking the graph is iterated until no solution
137   sets change.
138 
139   Prior to walking the graph in steps 6 and 7, We perform static
140   cycle elimination on the constraint graph, as well
141   as off-line variable substitution.
142 
143   TODO: Adding offsets to pointer-to-structures can be handled (IE not punted
144   on and turned into anything), but isn't.  You can just see what offset
145   inside the pointed-to struct it's going to access.
146 
147   TODO: Constant bounded arrays can be handled as if they were structs of the
148   same number of elements.
149 
150   TODO: Modeling heap and incoming pointers becomes much better if we
151   add fields to them as we discover them, which we could do.
152 
153   TODO: We could handle unions, but to be honest, it's probably not
154   worth the pain or slowdown.  */
155 
156 /* IPA-PTA optimizations possible.
157 
158    When the indirect function called is ANYTHING we can add disambiguation
159    based on the function signatures (or simply the parameter count which
160    is the varinfo size).  We also do not need to consider functions that
161    do not have their address taken.
162 
163    The is_global_var bit which marks escape points is overly conservative
164    in IPA mode.  Split it to is_escape_point and is_global_var - only
165    externally visible globals are escape points in IPA mode.  This is
166    also needed to fix the pt_solution_includes_global predicate
167    (and thus ptr_deref_may_alias_global_p).
168 
169    The way we introduce DECL_PT_UID to avoid fixing up all points-to
170    sets in the translation unit when we copy a DECL during inlining
171    pessimizes precision.  The advantage is that the DECL_PT_UID keeps
172    compile-time and memory usage overhead low - the points-to sets
173    do not grow or get unshared as they would during a fixup phase.
174    An alternative solution is to delay IPA PTA until after all
175    inlining transformations have been applied.
176 
177    The way we propagate clobber/use information isn't optimized.
178    It should use a new complex constraint that properly filters
179    out local variables of the callee (though that would make
180    the sets invalid after inlining).  OTOH we might as well
181    admit defeat to WHOPR and simply do all the clobber/use analysis
182    and propagation after PTA finished but before we threw away
183    points-to information for memory variables.  WHOPR and PTA
184    do not play along well anyway - the whole constraint solving
185    would need to be done in WPA phase and it will be very interesting
186    to apply the results to local SSA names during LTRANS phase.
187 
188    We probably should compute a per-function unit-ESCAPE solution
189    propagating it simply like the clobber / uses solutions.  The
190    solution can go alongside the non-IPA espaced solution and be
191    used to query which vars escape the unit through a function.
192 
193    We never put function decls in points-to sets so we do not
194    keep the set of called functions for indirect calls.
195 
196    And probably more.  */
197 
198 static bool use_field_sensitive = true;
199 static int in_ipa_mode = 0;
200 
201 /* Used for predecessor bitmaps. */
202 static bitmap_obstack predbitmap_obstack;
203 
204 /* Used for points-to sets.  */
205 static bitmap_obstack pta_obstack;
206 
207 /* Used for oldsolution members of variables. */
208 static bitmap_obstack oldpta_obstack;
209 
210 /* Used for per-solver-iteration bitmaps.  */
211 static bitmap_obstack iteration_obstack;
212 
213 static unsigned int create_variable_info_for (tree, const char *);
214 typedef struct constraint_graph *constraint_graph_t;
215 static void unify_nodes (constraint_graph_t, unsigned int, unsigned int, bool);
216 
217 struct constraint;
218 typedef struct constraint *constraint_t;
219 
220 
221 #define EXECUTE_IF_IN_NONNULL_BITMAP(a, b, c, d)	\
222   if (a)						\
223     EXECUTE_IF_SET_IN_BITMAP (a, b, c, d)
224 
225 static struct constraint_stats
226 {
227   unsigned int total_vars;
228   unsigned int nonpointer_vars;
229   unsigned int unified_vars_static;
230   unsigned int unified_vars_dynamic;
231   unsigned int iterations;
232   unsigned int num_edges;
233   unsigned int num_implicit_edges;
234   unsigned int points_to_sets_created;
235 } stats;
236 
237 struct variable_info
238 {
239   /* ID of this variable  */
240   unsigned int id;
241 
242   /* True if this is a variable created by the constraint analysis, such as
243      heap variables and constraints we had to break up.  */
244   unsigned int is_artificial_var : 1;
245 
246   /* True if this is a special variable whose solution set should not be
247      changed.  */
248   unsigned int is_special_var : 1;
249 
250   /* True for variables whose size is not known or variable.  */
251   unsigned int is_unknown_size_var : 1;
252 
253   /* True for (sub-)fields that represent a whole variable.  */
254   unsigned int is_full_var : 1;
255 
256   /* True if this is a heap variable.  */
257   unsigned int is_heap_var : 1;
258 
259   /* True if this field may contain pointers.  */
260   unsigned int may_have_pointers : 1;
261 
262   /* True if this field has only restrict qualified pointers.  */
263   unsigned int only_restrict_pointers : 1;
264 
265   /* True if this represents a global variable.  */
266   unsigned int is_global_var : 1;
267 
268   /* True if this represents a IPA function info.  */
269   unsigned int is_fn_info : 1;
270 
271   /* A link to the variable for the next field in this structure.  */
272   struct variable_info *next;
273 
274   /* Offset of this variable, in bits, from the base variable  */
275   unsigned HOST_WIDE_INT offset;
276 
277   /* Size of the variable, in bits.  */
278   unsigned HOST_WIDE_INT size;
279 
280   /* Full size of the base variable, in bits.  */
281   unsigned HOST_WIDE_INT fullsize;
282 
283   /* Name of this variable */
284   const char *name;
285 
286   /* Tree that this variable is associated with.  */
287   tree decl;
288 
289   /* Points-to set for this variable.  */
290   bitmap solution;
291 
292   /* Old points-to set for this variable.  */
293   bitmap oldsolution;
294 };
295 typedef struct variable_info *varinfo_t;
296 
297 static varinfo_t first_vi_for_offset (varinfo_t, unsigned HOST_WIDE_INT);
298 static varinfo_t first_or_preceding_vi_for_offset (varinfo_t,
299 						   unsigned HOST_WIDE_INT);
300 static varinfo_t lookup_vi_for_tree (tree);
301 static inline bool type_can_have_subvars (const_tree);
302 
303 /* Pool of variable info structures.  */
304 static alloc_pool variable_info_pool;
305 
306 /* Map varinfo to final pt_solution.  */
307 static pointer_map_t *final_solutions;
308 struct obstack final_solutions_obstack;
309 
310 /* Table of variable info structures for constraint variables.
311    Indexed directly by variable info id.  */
312 static vec<varinfo_t> varmap;
313 
314 /* Return the varmap element N */
315 
316 static inline varinfo_t
get_varinfo(unsigned int n)317 get_varinfo (unsigned int n)
318 {
319   return varmap[n];
320 }
321 
322 /* Static IDs for the special variables.  */
323 enum { nothing_id = 0, anything_id = 1, readonly_id = 2,
324        escaped_id = 3, nonlocal_id = 4,
325        storedanything_id = 5, integer_id = 6 };
326 
327 /* Return a new variable info structure consisting for a variable
328    named NAME, and using constraint graph node NODE.  Append it
329    to the vector of variable info structures.  */
330 
331 static varinfo_t
new_var_info(tree t,const char * name)332 new_var_info (tree t, const char *name)
333 {
334   unsigned index = varmap.length ();
335   varinfo_t ret = (varinfo_t) pool_alloc (variable_info_pool);
336 
337   ret->id = index;
338   ret->name = name;
339   ret->decl = t;
340   /* Vars without decl are artificial and do not have sub-variables.  */
341   ret->is_artificial_var = (t == NULL_TREE);
342   ret->is_special_var = false;
343   ret->is_unknown_size_var = false;
344   ret->is_full_var = (t == NULL_TREE);
345   ret->is_heap_var = false;
346   ret->may_have_pointers = true;
347   ret->only_restrict_pointers = false;
348   ret->is_global_var = (t == NULL_TREE);
349   ret->is_fn_info = false;
350   if (t && DECL_P (t))
351     ret->is_global_var = (is_global_var (t)
352 			  /* We have to treat even local register variables
353 			     as escape points.  */
354 			  || (TREE_CODE (t) == VAR_DECL
355 			      && DECL_HARD_REGISTER (t)));
356   ret->solution = BITMAP_ALLOC (&pta_obstack);
357   ret->oldsolution = NULL;
358   ret->next = NULL;
359 
360   stats.total_vars++;
361 
362   varmap.safe_push (ret);
363 
364   return ret;
365 }
366 
367 
368 /* A map mapping call statements to per-stmt variables for uses
369    and clobbers specific to the call.  */
370 struct pointer_map_t *call_stmt_vars;
371 
372 /* Lookup or create the variable for the call statement CALL.  */
373 
374 static varinfo_t
get_call_vi(gimple call)375 get_call_vi (gimple call)
376 {
377   void **slot_p;
378   varinfo_t vi, vi2;
379 
380   slot_p = pointer_map_insert (call_stmt_vars, call);
381   if (*slot_p)
382     return (varinfo_t) *slot_p;
383 
384   vi = new_var_info (NULL_TREE, "CALLUSED");
385   vi->offset = 0;
386   vi->size = 1;
387   vi->fullsize = 2;
388   vi->is_full_var = true;
389 
390   vi->next = vi2 = new_var_info (NULL_TREE, "CALLCLOBBERED");
391   vi2->offset = 1;
392   vi2->size = 1;
393   vi2->fullsize = 2;
394   vi2->is_full_var = true;
395 
396   *slot_p = (void *) vi;
397   return vi;
398 }
399 
400 /* Lookup the variable for the call statement CALL representing
401    the uses.  Returns NULL if there is nothing special about this call.  */
402 
403 static varinfo_t
lookup_call_use_vi(gimple call)404 lookup_call_use_vi (gimple call)
405 {
406   void **slot_p;
407 
408   slot_p = pointer_map_contains (call_stmt_vars, call);
409   if (slot_p)
410     return (varinfo_t) *slot_p;
411 
412   return NULL;
413 }
414 
415 /* Lookup the variable for the call statement CALL representing
416    the clobbers.  Returns NULL if there is nothing special about this call.  */
417 
418 static varinfo_t
lookup_call_clobber_vi(gimple call)419 lookup_call_clobber_vi (gimple call)
420 {
421   varinfo_t uses = lookup_call_use_vi (call);
422   if (!uses)
423     return NULL;
424 
425   return uses->next;
426 }
427 
428 /* Lookup or create the variable for the call statement CALL representing
429    the uses.  */
430 
431 static varinfo_t
get_call_use_vi(gimple call)432 get_call_use_vi (gimple call)
433 {
434   return get_call_vi (call);
435 }
436 
437 /* Lookup or create the variable for the call statement CALL representing
438    the clobbers.  */
439 
440 static varinfo_t ATTRIBUTE_UNUSED
get_call_clobber_vi(gimple call)441 get_call_clobber_vi (gimple call)
442 {
443   return get_call_vi (call)->next;
444 }
445 
446 
447 typedef enum {SCALAR, DEREF, ADDRESSOF} constraint_expr_type;
448 
449 /* An expression that appears in a constraint.  */
450 
451 struct constraint_expr
452 {
453   /* Constraint type.  */
454   constraint_expr_type type;
455 
456   /* Variable we are referring to in the constraint.  */
457   unsigned int var;
458 
459   /* Offset, in bits, of this constraint from the beginning of
460      variables it ends up referring to.
461 
462      IOW, in a deref constraint, we would deref, get the result set,
463      then add OFFSET to each member.   */
464   HOST_WIDE_INT offset;
465 };
466 
467 /* Use 0x8000... as special unknown offset.  */
468 #define UNKNOWN_OFFSET ((HOST_WIDE_INT)-1 << (HOST_BITS_PER_WIDE_INT-1))
469 
470 typedef struct constraint_expr ce_s;
471 static void get_constraint_for_1 (tree, vec<ce_s> *, bool, bool);
472 static void get_constraint_for (tree, vec<ce_s> *);
473 static void get_constraint_for_rhs (tree, vec<ce_s> *);
474 static void do_deref (vec<ce_s> *);
475 
476 /* Our set constraints are made up of two constraint expressions, one
477    LHS, and one RHS.
478 
479    As described in the introduction, our set constraints each represent an
480    operation between set valued variables.
481 */
482 struct constraint
483 {
484   struct constraint_expr lhs;
485   struct constraint_expr rhs;
486 };
487 
488 /* List of constraints that we use to build the constraint graph from.  */
489 
490 static vec<constraint_t> constraints;
491 static alloc_pool constraint_pool;
492 
493 /* The constraint graph is represented as an array of bitmaps
494    containing successor nodes.  */
495 
496 struct constraint_graph
497 {
498   /* Size of this graph, which may be different than the number of
499      nodes in the variable map.  */
500   unsigned int size;
501 
502   /* Explicit successors of each node. */
503   bitmap *succs;
504 
505   /* Implicit predecessors of each node (Used for variable
506      substitution). */
507   bitmap *implicit_preds;
508 
509   /* Explicit predecessors of each node (Used for variable substitution).  */
510   bitmap *preds;
511 
512   /* Indirect cycle representatives, or -1 if the node has no indirect
513      cycles.  */
514   int *indirect_cycles;
515 
516   /* Representative node for a node.  rep[a] == a unless the node has
517      been unified. */
518   unsigned int *rep;
519 
520   /* Equivalence class representative for a label.  This is used for
521      variable substitution.  */
522   int *eq_rep;
523 
524   /* Pointer equivalence label for a node.  All nodes with the same
525      pointer equivalence label can be unified together at some point
526      (either during constraint optimization or after the constraint
527      graph is built).  */
528   unsigned int *pe;
529 
530   /* Pointer equivalence representative for a label.  This is used to
531      handle nodes that are pointer equivalent but not location
532      equivalent.  We can unite these once the addressof constraints
533      are transformed into initial points-to sets.  */
534   int *pe_rep;
535 
536   /* Pointer equivalence label for each node, used during variable
537      substitution.  */
538   unsigned int *pointer_label;
539 
540   /* Location equivalence label for each node, used during location
541      equivalence finding.  */
542   unsigned int *loc_label;
543 
544   /* Pointed-by set for each node, used during location equivalence
545      finding.  This is pointed-by rather than pointed-to, because it
546      is constructed using the predecessor graph.  */
547   bitmap *pointed_by;
548 
549   /* Points to sets for pointer equivalence.  This is *not* the actual
550      points-to sets for nodes.  */
551   bitmap *points_to;
552 
553   /* Bitmap of nodes where the bit is set if the node is a direct
554      node.  Used for variable substitution.  */
555   sbitmap direct_nodes;
556 
557   /* Bitmap of nodes where the bit is set if the node is address
558      taken.  Used for variable substitution.  */
559   bitmap address_taken;
560 
561   /* Vector of complex constraints for each graph node.  Complex
562      constraints are those involving dereferences or offsets that are
563      not 0.  */
564   vec<constraint_t> *complex;
565 };
566 
567 static constraint_graph_t graph;
568 
569 /* During variable substitution and the offline version of indirect
570    cycle finding, we create nodes to represent dereferences and
571    address taken constraints.  These represent where these start and
572    end.  */
573 #define FIRST_REF_NODE (varmap).length ()
574 #define LAST_REF_NODE (FIRST_REF_NODE + (FIRST_REF_NODE - 1))
575 
576 /* Return the representative node for NODE, if NODE has been unioned
577    with another NODE.
578    This function performs path compression along the way to finding
579    the representative.  */
580 
581 static unsigned int
find(unsigned int node)582 find (unsigned int node)
583 {
584   gcc_assert (node < graph->size);
585   if (graph->rep[node] != node)
586     return graph->rep[node] = find (graph->rep[node]);
587   return node;
588 }
589 
590 /* Union the TO and FROM nodes to the TO nodes.
591    Note that at some point in the future, we may want to do
592    union-by-rank, in which case we are going to have to return the
593    node we unified to.  */
594 
595 static bool
unite(unsigned int to,unsigned int from)596 unite (unsigned int to, unsigned int from)
597 {
598   gcc_assert (to < graph->size && from < graph->size);
599   if (to != from && graph->rep[from] != to)
600     {
601       graph->rep[from] = to;
602       return true;
603     }
604   return false;
605 }
606 
607 /* Create a new constraint consisting of LHS and RHS expressions.  */
608 
609 static constraint_t
new_constraint(const struct constraint_expr lhs,const struct constraint_expr rhs)610 new_constraint (const struct constraint_expr lhs,
611 		const struct constraint_expr rhs)
612 {
613   constraint_t ret = (constraint_t) pool_alloc (constraint_pool);
614   ret->lhs = lhs;
615   ret->rhs = rhs;
616   return ret;
617 }
618 
619 /* Print out constraint C to FILE.  */
620 
621 static void
dump_constraint(FILE * file,constraint_t c)622 dump_constraint (FILE *file, constraint_t c)
623 {
624   if (c->lhs.type == ADDRESSOF)
625     fprintf (file, "&");
626   else if (c->lhs.type == DEREF)
627     fprintf (file, "*");
628   fprintf (file, "%s", get_varinfo (c->lhs.var)->name);
629   if (c->lhs.offset == UNKNOWN_OFFSET)
630     fprintf (file, " + UNKNOWN");
631   else if (c->lhs.offset != 0)
632     fprintf (file, " + " HOST_WIDE_INT_PRINT_DEC, c->lhs.offset);
633   fprintf (file, " = ");
634   if (c->rhs.type == ADDRESSOF)
635     fprintf (file, "&");
636   else if (c->rhs.type == DEREF)
637     fprintf (file, "*");
638   fprintf (file, "%s", get_varinfo (c->rhs.var)->name);
639   if (c->rhs.offset == UNKNOWN_OFFSET)
640     fprintf (file, " + UNKNOWN");
641   else if (c->rhs.offset != 0)
642     fprintf (file, " + " HOST_WIDE_INT_PRINT_DEC, c->rhs.offset);
643 }
644 
645 
646 void debug_constraint (constraint_t);
647 void debug_constraints (void);
648 void debug_constraint_graph (void);
649 void debug_solution_for_var (unsigned int);
650 void debug_sa_points_to_info (void);
651 
652 /* Print out constraint C to stderr.  */
653 
654 DEBUG_FUNCTION void
debug_constraint(constraint_t c)655 debug_constraint (constraint_t c)
656 {
657   dump_constraint (stderr, c);
658   fprintf (stderr, "\n");
659 }
660 
661 /* Print out all constraints to FILE */
662 
663 static void
dump_constraints(FILE * file,int from)664 dump_constraints (FILE *file, int from)
665 {
666   int i;
667   constraint_t c;
668   for (i = from; constraints.iterate (i, &c); i++)
669     if (c)
670       {
671 	dump_constraint (file, c);
672 	fprintf (file, "\n");
673       }
674 }
675 
676 /* Print out all constraints to stderr.  */
677 
678 DEBUG_FUNCTION void
debug_constraints(void)679 debug_constraints (void)
680 {
681   dump_constraints (stderr, 0);
682 }
683 
684 /* Print the constraint graph in dot format.  */
685 
686 static void
dump_constraint_graph(FILE * file)687 dump_constraint_graph (FILE *file)
688 {
689   unsigned int i;
690 
691   /* Only print the graph if it has already been initialized:  */
692   if (!graph)
693     return;
694 
695   /* Prints the header of the dot file:  */
696   fprintf (file, "strict digraph {\n");
697   fprintf (file, "  node [\n    shape = box\n  ]\n");
698   fprintf (file, "  edge [\n    fontsize = \"12\"\n  ]\n");
699   fprintf (file, "\n  // List of nodes and complex constraints in "
700 	   "the constraint graph:\n");
701 
702   /* The next lines print the nodes in the graph together with the
703      complex constraints attached to them.  */
704   for (i = 0; i < graph->size; i++)
705     {
706       if (find (i) != i)
707 	continue;
708       if (i < FIRST_REF_NODE)
709 	fprintf (file, "\"%s\"", get_varinfo (i)->name);
710       else
711 	fprintf (file, "\"*%s\"", get_varinfo (i - FIRST_REF_NODE)->name);
712       if (graph->complex[i].exists ())
713 	{
714 	  unsigned j;
715 	  constraint_t c;
716 	  fprintf (file, " [label=\"\\N\\n");
717 	  for (j = 0; graph->complex[i].iterate (j, &c); ++j)
718 	    {
719 	      dump_constraint (file, c);
720 	      fprintf (file, "\\l");
721 	    }
722 	  fprintf (file, "\"]");
723 	}
724       fprintf (file, ";\n");
725     }
726 
727   /* Go over the edges.  */
728   fprintf (file, "\n  // Edges in the constraint graph:\n");
729   for (i = 0; i < graph->size; i++)
730     {
731       unsigned j;
732       bitmap_iterator bi;
733       if (find (i) != i)
734 	continue;
735       EXECUTE_IF_IN_NONNULL_BITMAP (graph->succs[i], 0, j, bi)
736 	{
737 	  unsigned to = find (j);
738 	  if (i == to)
739 	    continue;
740 	  if (i < FIRST_REF_NODE)
741 	    fprintf (file, "\"%s\"", get_varinfo (i)->name);
742 	  else
743 	    fprintf (file, "\"*%s\"", get_varinfo (i - FIRST_REF_NODE)->name);
744 	  fprintf (file, " -> ");
745 	  if (to < FIRST_REF_NODE)
746 	    fprintf (file, "\"%s\"", get_varinfo (to)->name);
747 	  else
748 	    fprintf (file, "\"*%s\"", get_varinfo (to - FIRST_REF_NODE)->name);
749 	  fprintf (file, ";\n");
750 	}
751     }
752 
753   /* Prints the tail of the dot file.  */
754   fprintf (file, "}\n");
755 }
756 
757 /* Print out the constraint graph to stderr.  */
758 
759 DEBUG_FUNCTION void
debug_constraint_graph(void)760 debug_constraint_graph (void)
761 {
762   dump_constraint_graph (stderr);
763 }
764 
765 /* SOLVER FUNCTIONS
766 
767    The solver is a simple worklist solver, that works on the following
768    algorithm:
769 
770    sbitmap changed_nodes = all zeroes;
771    changed_count = 0;
772    For each node that is not already collapsed:
773        changed_count++;
774        set bit in changed nodes
775 
776    while (changed_count > 0)
777    {
778      compute topological ordering for constraint graph
779 
780      find and collapse cycles in the constraint graph (updating
781      changed if necessary)
782 
783      for each node (n) in the graph in topological order:
784        changed_count--;
785 
786        Process each complex constraint associated with the node,
787        updating changed if necessary.
788 
789        For each outgoing edge from n, propagate the solution from n to
790        the destination of the edge, updating changed as necessary.
791 
792    }  */
793 
794 /* Return true if two constraint expressions A and B are equal.  */
795 
796 static bool
constraint_expr_equal(struct constraint_expr a,struct constraint_expr b)797 constraint_expr_equal (struct constraint_expr a, struct constraint_expr b)
798 {
799   return a.type == b.type && a.var == b.var && a.offset == b.offset;
800 }
801 
802 /* Return true if constraint expression A is less than constraint expression
803    B.  This is just arbitrary, but consistent, in order to give them an
804    ordering.  */
805 
806 static bool
constraint_expr_less(struct constraint_expr a,struct constraint_expr b)807 constraint_expr_less (struct constraint_expr a, struct constraint_expr b)
808 {
809   if (a.type == b.type)
810     {
811       if (a.var == b.var)
812 	return a.offset < b.offset;
813       else
814 	return a.var < b.var;
815     }
816   else
817     return a.type < b.type;
818 }
819 
820 /* Return true if constraint A is less than constraint B.  This is just
821    arbitrary, but consistent, in order to give them an ordering.  */
822 
823 static bool
constraint_less(const constraint_t & a,const constraint_t & b)824 constraint_less (const constraint_t &a, const constraint_t &b)
825 {
826   if (constraint_expr_less (a->lhs, b->lhs))
827     return true;
828   else if (constraint_expr_less (b->lhs, a->lhs))
829     return false;
830   else
831     return constraint_expr_less (a->rhs, b->rhs);
832 }
833 
834 /* Return true if two constraints A and B are equal.  */
835 
836 static bool
constraint_equal(struct constraint a,struct constraint b)837 constraint_equal (struct constraint a, struct constraint b)
838 {
839   return constraint_expr_equal (a.lhs, b.lhs)
840     && constraint_expr_equal (a.rhs, b.rhs);
841 }
842 
843 
844 /* Find a constraint LOOKFOR in the sorted constraint vector VEC */
845 
846 static constraint_t
constraint_vec_find(vec<constraint_t> vec,struct constraint lookfor)847 constraint_vec_find (vec<constraint_t> vec,
848 		     struct constraint lookfor)
849 {
850   unsigned int place;
851   constraint_t found;
852 
853   if (!vec.exists ())
854     return NULL;
855 
856   place = vec.lower_bound (&lookfor, constraint_less);
857   if (place >= vec.length ())
858     return NULL;
859   found = vec[place];
860   if (!constraint_equal (*found, lookfor))
861     return NULL;
862   return found;
863 }
864 
865 /* Union two constraint vectors, TO and FROM.  Put the result in TO.  */
866 
867 static void
constraint_set_union(vec<constraint_t> * to,vec<constraint_t> * from)868 constraint_set_union (vec<constraint_t> *to,
869 		      vec<constraint_t> *from)
870 {
871   int i;
872   constraint_t c;
873 
874   FOR_EACH_VEC_ELT (*from, i, c)
875     {
876       if (constraint_vec_find (*to, *c) == NULL)
877 	{
878 	  unsigned int place = to->lower_bound (c, constraint_less);
879 	  to->safe_insert (place, c);
880 	}
881     }
882 }
883 
884 /* Expands the solution in SET to all sub-fields of variables included.
885    Union the expanded result into RESULT.  */
886 
887 static void
solution_set_expand(bitmap result,bitmap set)888 solution_set_expand (bitmap result, bitmap set)
889 {
890   bitmap_iterator bi;
891   bitmap vars = NULL;
892   unsigned j;
893 
894   /* In a first pass record all variables we need to add all
895      sub-fields off.  This avoids quadratic behavior.  */
896   EXECUTE_IF_SET_IN_BITMAP (set, 0, j, bi)
897     {
898       varinfo_t v = get_varinfo (j);
899       if (v->is_artificial_var
900 	  || v->is_full_var)
901 	continue;
902       v = lookup_vi_for_tree (v->decl);
903       if (vars == NULL)
904 	vars = BITMAP_ALLOC (NULL);
905       bitmap_set_bit (vars, v->id);
906     }
907 
908   /* In the second pass now do the addition to the solution and
909      to speed up solving add it to the delta as well.  */
910   if (vars != NULL)
911     {
912       EXECUTE_IF_SET_IN_BITMAP (vars, 0, j, bi)
913 	{
914 	  varinfo_t v = get_varinfo (j);
915 	  for (; v != NULL; v = v->next)
916 	    bitmap_set_bit (result, v->id);
917 	}
918       BITMAP_FREE (vars);
919     }
920 }
921 
922 /* Take a solution set SET, add OFFSET to each member of the set, and
923    overwrite SET with the result when done.  */
924 
925 static void
solution_set_add(bitmap set,HOST_WIDE_INT offset)926 solution_set_add (bitmap set, HOST_WIDE_INT offset)
927 {
928   bitmap result = BITMAP_ALLOC (&iteration_obstack);
929   unsigned int i;
930   bitmap_iterator bi;
931 
932   /* If the offset is unknown we have to expand the solution to
933      all subfields.  */
934   if (offset == UNKNOWN_OFFSET)
935     {
936       solution_set_expand (set, set);
937       return;
938     }
939 
940   EXECUTE_IF_SET_IN_BITMAP (set, 0, i, bi)
941     {
942       varinfo_t vi = get_varinfo (i);
943 
944       /* If this is a variable with just one field just set its bit
945          in the result.  */
946       if (vi->is_artificial_var
947 	  || vi->is_unknown_size_var
948 	  || vi->is_full_var)
949 	bitmap_set_bit (result, i);
950       else
951 	{
952 	  unsigned HOST_WIDE_INT fieldoffset = vi->offset + offset;
953 
954 	  /* If the offset makes the pointer point to before the
955 	     variable use offset zero for the field lookup.  */
956 	  if (offset < 0
957 	      && fieldoffset > vi->offset)
958 	    fieldoffset = 0;
959 
960 	  if (offset != 0)
961 	    vi = first_or_preceding_vi_for_offset (vi, fieldoffset);
962 
963 	  bitmap_set_bit (result, vi->id);
964 	  /* If the result is not exactly at fieldoffset include the next
965 	     field as well.  See get_constraint_for_ptr_offset for more
966 	     rationale.  */
967 	  if (vi->offset != fieldoffset
968 	      && vi->next != NULL)
969 	    bitmap_set_bit (result, vi->next->id);
970 	}
971     }
972 
973   bitmap_copy (set, result);
974   BITMAP_FREE (result);
975 }
976 
977 /* Union solution sets TO and FROM, and add INC to each member of FROM in the
978    process.  */
979 
980 static bool
set_union_with_increment(bitmap to,bitmap from,HOST_WIDE_INT inc)981 set_union_with_increment  (bitmap to, bitmap from, HOST_WIDE_INT inc)
982 {
983   if (inc == 0)
984     return bitmap_ior_into (to, from);
985   else
986     {
987       bitmap tmp;
988       bool res;
989 
990       tmp = BITMAP_ALLOC (&iteration_obstack);
991       bitmap_copy (tmp, from);
992       solution_set_add (tmp, inc);
993       res = bitmap_ior_into (to, tmp);
994       BITMAP_FREE (tmp);
995       return res;
996     }
997 }
998 
999 /* Insert constraint C into the list of complex constraints for graph
1000    node VAR.  */
1001 
1002 static void
insert_into_complex(constraint_graph_t graph,unsigned int var,constraint_t c)1003 insert_into_complex (constraint_graph_t graph,
1004 		     unsigned int var, constraint_t c)
1005 {
1006   vec<constraint_t> complex = graph->complex[var];
1007   unsigned int place = complex.lower_bound (c, constraint_less);
1008 
1009   /* Only insert constraints that do not already exist.  */
1010   if (place >= complex.length ()
1011       || !constraint_equal (*c, *complex[place]))
1012     graph->complex[var].safe_insert (place, c);
1013 }
1014 
1015 
1016 /* Condense two variable nodes into a single variable node, by moving
1017    all associated info from SRC to TO.  */
1018 
1019 static void
merge_node_constraints(constraint_graph_t graph,unsigned int to,unsigned int from)1020 merge_node_constraints (constraint_graph_t graph, unsigned int to,
1021 			unsigned int from)
1022 {
1023   unsigned int i;
1024   constraint_t c;
1025 
1026   gcc_assert (find (from) == to);
1027 
1028   /* Move all complex constraints from src node into to node  */
1029   FOR_EACH_VEC_ELT (graph->complex[from], i, c)
1030     {
1031       /* In complex constraints for node src, we may have either
1032 	 a = *src, and *src = a, or an offseted constraint which are
1033 	 always added to the rhs node's constraints.  */
1034 
1035       if (c->rhs.type == DEREF)
1036 	c->rhs.var = to;
1037       else if (c->lhs.type == DEREF)
1038 	c->lhs.var = to;
1039       else
1040 	c->rhs.var = to;
1041     }
1042   constraint_set_union (&graph->complex[to], &graph->complex[from]);
1043   graph->complex[from].release ();
1044 }
1045 
1046 
1047 /* Remove edges involving NODE from GRAPH.  */
1048 
1049 static void
clear_edges_for_node(constraint_graph_t graph,unsigned int node)1050 clear_edges_for_node (constraint_graph_t graph, unsigned int node)
1051 {
1052   if (graph->succs[node])
1053     BITMAP_FREE (graph->succs[node]);
1054 }
1055 
1056 /* Merge GRAPH nodes FROM and TO into node TO.  */
1057 
1058 static void
merge_graph_nodes(constraint_graph_t graph,unsigned int to,unsigned int from)1059 merge_graph_nodes (constraint_graph_t graph, unsigned int to,
1060 		   unsigned int from)
1061 {
1062   if (graph->indirect_cycles[from] != -1)
1063     {
1064       /* If we have indirect cycles with the from node, and we have
1065 	 none on the to node, the to node has indirect cycles from the
1066 	 from node now that they are unified.
1067 	 If indirect cycles exist on both, unify the nodes that they
1068 	 are in a cycle with, since we know they are in a cycle with
1069 	 each other.  */
1070       if (graph->indirect_cycles[to] == -1)
1071 	graph->indirect_cycles[to] = graph->indirect_cycles[from];
1072     }
1073 
1074   /* Merge all the successor edges.  */
1075   if (graph->succs[from])
1076     {
1077       if (!graph->succs[to])
1078 	graph->succs[to] = BITMAP_ALLOC (&pta_obstack);
1079       bitmap_ior_into (graph->succs[to],
1080 		       graph->succs[from]);
1081     }
1082 
1083   clear_edges_for_node (graph, from);
1084 }
1085 
1086 
1087 /* Add an indirect graph edge to GRAPH, going from TO to FROM if
1088    it doesn't exist in the graph already.  */
1089 
1090 static void
add_implicit_graph_edge(constraint_graph_t graph,unsigned int to,unsigned int from)1091 add_implicit_graph_edge (constraint_graph_t graph, unsigned int to,
1092 			 unsigned int from)
1093 {
1094   if (to == from)
1095     return;
1096 
1097   if (!graph->implicit_preds[to])
1098     graph->implicit_preds[to] = BITMAP_ALLOC (&predbitmap_obstack);
1099 
1100   if (bitmap_set_bit (graph->implicit_preds[to], from))
1101     stats.num_implicit_edges++;
1102 }
1103 
1104 /* Add a predecessor graph edge to GRAPH, going from TO to FROM if
1105    it doesn't exist in the graph already.
1106    Return false if the edge already existed, true otherwise.  */
1107 
1108 static void
add_pred_graph_edge(constraint_graph_t graph,unsigned int to,unsigned int from)1109 add_pred_graph_edge (constraint_graph_t graph, unsigned int to,
1110 		     unsigned int from)
1111 {
1112   if (!graph->preds[to])
1113     graph->preds[to] = BITMAP_ALLOC (&predbitmap_obstack);
1114   bitmap_set_bit (graph->preds[to], from);
1115 }
1116 
1117 /* Add a graph edge to GRAPH, going from FROM to TO if
1118    it doesn't exist in the graph already.
1119    Return false if the edge already existed, true otherwise.  */
1120 
1121 static bool
add_graph_edge(constraint_graph_t graph,unsigned int to,unsigned int from)1122 add_graph_edge (constraint_graph_t graph, unsigned int to,
1123 		unsigned int from)
1124 {
1125   if (to == from)
1126     {
1127       return false;
1128     }
1129   else
1130     {
1131       bool r = false;
1132 
1133       if (!graph->succs[from])
1134 	graph->succs[from] = BITMAP_ALLOC (&pta_obstack);
1135       if (bitmap_set_bit (graph->succs[from], to))
1136 	{
1137 	  r = true;
1138 	  if (to < FIRST_REF_NODE && from < FIRST_REF_NODE)
1139 	    stats.num_edges++;
1140 	}
1141       return r;
1142     }
1143 }
1144 
1145 
1146 /* Return true if {DEST.SRC} is an existing graph edge in GRAPH.  */
1147 
1148 static bool
valid_graph_edge(constraint_graph_t graph,unsigned int src,unsigned int dest)1149 valid_graph_edge (constraint_graph_t graph, unsigned int src,
1150 		  unsigned int dest)
1151 {
1152   return (graph->succs[dest]
1153 	  && bitmap_bit_p (graph->succs[dest], src));
1154 }
1155 
1156 /* Initialize the constraint graph structure to contain SIZE nodes.  */
1157 
1158 static void
init_graph(unsigned int size)1159 init_graph (unsigned int size)
1160 {
1161   unsigned int j;
1162 
1163   graph = XCNEW (struct constraint_graph);
1164   graph->size = size;
1165   graph->succs = XCNEWVEC (bitmap, graph->size);
1166   graph->indirect_cycles = XNEWVEC (int, graph->size);
1167   graph->rep = XNEWVEC (unsigned int, graph->size);
1168   /* ??? Macros do not support template types with multiple arguments,
1169      so we use a typedef to work around it.  */
1170   typedef vec<constraint_t> vec_constraint_t_heap;
1171   graph->complex = XCNEWVEC (vec_constraint_t_heap, size);
1172   graph->pe = XCNEWVEC (unsigned int, graph->size);
1173   graph->pe_rep = XNEWVEC (int, graph->size);
1174 
1175   for (j = 0; j < graph->size; j++)
1176     {
1177       graph->rep[j] = j;
1178       graph->pe_rep[j] = -1;
1179       graph->indirect_cycles[j] = -1;
1180     }
1181 }
1182 
1183 /* Build the constraint graph, adding only predecessor edges right now.  */
1184 
1185 static void
build_pred_graph(void)1186 build_pred_graph (void)
1187 {
1188   int i;
1189   constraint_t c;
1190   unsigned int j;
1191 
1192   graph->implicit_preds = XCNEWVEC (bitmap, graph->size);
1193   graph->preds = XCNEWVEC (bitmap, graph->size);
1194   graph->pointer_label = XCNEWVEC (unsigned int, graph->size);
1195   graph->loc_label = XCNEWVEC (unsigned int, graph->size);
1196   graph->pointed_by = XCNEWVEC (bitmap, graph->size);
1197   graph->points_to = XCNEWVEC (bitmap, graph->size);
1198   graph->eq_rep = XNEWVEC (int, graph->size);
1199   graph->direct_nodes = sbitmap_alloc (graph->size);
1200   graph->address_taken = BITMAP_ALLOC (&predbitmap_obstack);
1201   bitmap_clear (graph->direct_nodes);
1202 
1203   for (j = 0; j < FIRST_REF_NODE; j++)
1204     {
1205       if (!get_varinfo (j)->is_special_var)
1206 	bitmap_set_bit (graph->direct_nodes, j);
1207     }
1208 
1209   for (j = 0; j < graph->size; j++)
1210     graph->eq_rep[j] = -1;
1211 
1212   for (j = 0; j < varmap.length (); j++)
1213     graph->indirect_cycles[j] = -1;
1214 
1215   FOR_EACH_VEC_ELT (constraints, i, c)
1216     {
1217       struct constraint_expr lhs = c->lhs;
1218       struct constraint_expr rhs = c->rhs;
1219       unsigned int lhsvar = lhs.var;
1220       unsigned int rhsvar = rhs.var;
1221 
1222       if (lhs.type == DEREF)
1223 	{
1224 	  /* *x = y.  */
1225 	  if (rhs.offset == 0 && lhs.offset == 0 && rhs.type == SCALAR)
1226 	    add_pred_graph_edge (graph, FIRST_REF_NODE + lhsvar, rhsvar);
1227 	}
1228       else if (rhs.type == DEREF)
1229 	{
1230 	  /* x = *y */
1231 	  if (rhs.offset == 0 && lhs.offset == 0 && lhs.type == SCALAR)
1232 	    add_pred_graph_edge (graph, lhsvar, FIRST_REF_NODE + rhsvar);
1233 	  else
1234 	    bitmap_clear_bit (graph->direct_nodes, lhsvar);
1235 	}
1236       else if (rhs.type == ADDRESSOF)
1237 	{
1238 	  varinfo_t v;
1239 
1240 	  /* x = &y */
1241 	  if (graph->points_to[lhsvar] == NULL)
1242 	    graph->points_to[lhsvar] = BITMAP_ALLOC (&predbitmap_obstack);
1243 	  bitmap_set_bit (graph->points_to[lhsvar], rhsvar);
1244 
1245 	  if (graph->pointed_by[rhsvar] == NULL)
1246 	    graph->pointed_by[rhsvar] = BITMAP_ALLOC (&predbitmap_obstack);
1247 	  bitmap_set_bit (graph->pointed_by[rhsvar], lhsvar);
1248 
1249 	  /* Implicitly, *x = y */
1250 	  add_implicit_graph_edge (graph, FIRST_REF_NODE + lhsvar, rhsvar);
1251 
1252 	  /* All related variables are no longer direct nodes.  */
1253 	  bitmap_clear_bit (graph->direct_nodes, rhsvar);
1254           v = get_varinfo (rhsvar);
1255           if (!v->is_full_var)
1256             {
1257               v = lookup_vi_for_tree (v->decl);
1258               do
1259                 {
1260                   bitmap_clear_bit (graph->direct_nodes, v->id);
1261                   v = v->next;
1262                 }
1263               while (v != NULL);
1264             }
1265 	  bitmap_set_bit (graph->address_taken, rhsvar);
1266 	}
1267       else if (lhsvar > anything_id
1268 	       && lhsvar != rhsvar && lhs.offset == 0 && rhs.offset == 0)
1269 	{
1270 	  /* x = y */
1271 	  add_pred_graph_edge (graph, lhsvar, rhsvar);
1272 	  /* Implicitly, *x = *y */
1273 	  add_implicit_graph_edge (graph, FIRST_REF_NODE + lhsvar,
1274 				   FIRST_REF_NODE + rhsvar);
1275 	}
1276       else if (lhs.offset != 0 || rhs.offset != 0)
1277 	{
1278 	  if (rhs.offset != 0)
1279 	    bitmap_clear_bit (graph->direct_nodes, lhs.var);
1280 	  else if (lhs.offset != 0)
1281 	    bitmap_clear_bit (graph->direct_nodes, rhs.var);
1282 	}
1283     }
1284 }
1285 
1286 /* Build the constraint graph, adding successor edges.  */
1287 
1288 static void
build_succ_graph(void)1289 build_succ_graph (void)
1290 {
1291   unsigned i, t;
1292   constraint_t c;
1293 
1294   FOR_EACH_VEC_ELT (constraints, i, c)
1295     {
1296       struct constraint_expr lhs;
1297       struct constraint_expr rhs;
1298       unsigned int lhsvar;
1299       unsigned int rhsvar;
1300 
1301       if (!c)
1302 	continue;
1303 
1304       lhs = c->lhs;
1305       rhs = c->rhs;
1306       lhsvar = find (lhs.var);
1307       rhsvar = find (rhs.var);
1308 
1309       if (lhs.type == DEREF)
1310 	{
1311 	  if (rhs.offset == 0 && lhs.offset == 0 && rhs.type == SCALAR)
1312 	    add_graph_edge (graph, FIRST_REF_NODE + lhsvar, rhsvar);
1313 	}
1314       else if (rhs.type == DEREF)
1315 	{
1316 	  if (rhs.offset == 0 && lhs.offset == 0 && lhs.type == SCALAR)
1317 	    add_graph_edge (graph, lhsvar, FIRST_REF_NODE + rhsvar);
1318 	}
1319       else if (rhs.type == ADDRESSOF)
1320 	{
1321 	  /* x = &y */
1322 	  gcc_assert (find (rhs.var) == rhs.var);
1323 	  bitmap_set_bit (get_varinfo (lhsvar)->solution, rhsvar);
1324 	}
1325       else if (lhsvar > anything_id
1326 	       && lhsvar != rhsvar && lhs.offset == 0 && rhs.offset == 0)
1327 	{
1328 	  add_graph_edge (graph, lhsvar, rhsvar);
1329 	}
1330     }
1331 
1332   /* Add edges from STOREDANYTHING to all non-direct nodes that can
1333      receive pointers.  */
1334   t = find (storedanything_id);
1335   for (i = integer_id + 1; i < FIRST_REF_NODE; ++i)
1336     {
1337       if (!bitmap_bit_p (graph->direct_nodes, i)
1338 	  && get_varinfo (i)->may_have_pointers)
1339 	add_graph_edge (graph, find (i), t);
1340     }
1341 
1342   /* Everything stored to ANYTHING also potentially escapes.  */
1343   add_graph_edge (graph, find (escaped_id), t);
1344 }
1345 
1346 
1347 /* Changed variables on the last iteration.  */
1348 static bitmap changed;
1349 
1350 /* Strongly Connected Component visitation info.  */
1351 
1352 struct scc_info
1353 {
1354   sbitmap visited;
1355   sbitmap deleted;
1356   unsigned int *dfs;
1357   unsigned int *node_mapping;
1358   int current_index;
1359   vec<unsigned> scc_stack;
1360 };
1361 
1362 
1363 /* Recursive routine to find strongly connected components in GRAPH.
1364    SI is the SCC info to store the information in, and N is the id of current
1365    graph node we are processing.
1366 
1367    This is Tarjan's strongly connected component finding algorithm, as
1368    modified by Nuutila to keep only non-root nodes on the stack.
1369    The algorithm can be found in "On finding the strongly connected
1370    connected components in a directed graph" by Esko Nuutila and Eljas
1371    Soisalon-Soininen, in Information Processing Letters volume 49,
1372    number 1, pages 9-14.  */
1373 
1374 static void
scc_visit(constraint_graph_t graph,struct scc_info * si,unsigned int n)1375 scc_visit (constraint_graph_t graph, struct scc_info *si, unsigned int n)
1376 {
1377   unsigned int i;
1378   bitmap_iterator bi;
1379   unsigned int my_dfs;
1380 
1381   bitmap_set_bit (si->visited, n);
1382   si->dfs[n] = si->current_index ++;
1383   my_dfs = si->dfs[n];
1384 
1385   /* Visit all the successors.  */
1386   EXECUTE_IF_IN_NONNULL_BITMAP (graph->succs[n], 0, i, bi)
1387     {
1388       unsigned int w;
1389 
1390       if (i > LAST_REF_NODE)
1391 	break;
1392 
1393       w = find (i);
1394       if (bitmap_bit_p (si->deleted, w))
1395 	continue;
1396 
1397       if (!bitmap_bit_p (si->visited, w))
1398 	scc_visit (graph, si, w);
1399       {
1400 	unsigned int t = find (w);
1401 	unsigned int nnode = find (n);
1402 	gcc_assert (nnode == n);
1403 
1404 	if (si->dfs[t] < si->dfs[nnode])
1405 	  si->dfs[n] = si->dfs[t];
1406       }
1407     }
1408 
1409   /* See if any components have been identified.  */
1410   if (si->dfs[n] == my_dfs)
1411     {
1412       if (si->scc_stack.length () > 0
1413 	  && si->dfs[si->scc_stack.last ()] >= my_dfs)
1414 	{
1415 	  bitmap scc = BITMAP_ALLOC (NULL);
1416 	  unsigned int lowest_node;
1417 	  bitmap_iterator bi;
1418 
1419 	  bitmap_set_bit (scc, n);
1420 
1421 	  while (si->scc_stack.length () != 0
1422 		 && si->dfs[si->scc_stack.last ()] >= my_dfs)
1423 	    {
1424 	      unsigned int w = si->scc_stack.pop ();
1425 
1426 	      bitmap_set_bit (scc, w);
1427 	    }
1428 
1429 	  lowest_node = bitmap_first_set_bit (scc);
1430 	  gcc_assert (lowest_node < FIRST_REF_NODE);
1431 
1432 	  /* Collapse the SCC nodes into a single node, and mark the
1433 	     indirect cycles.  */
1434 	  EXECUTE_IF_SET_IN_BITMAP (scc, 0, i, bi)
1435 	    {
1436 	      if (i < FIRST_REF_NODE)
1437 		{
1438 		  if (unite (lowest_node, i))
1439 		    unify_nodes (graph, lowest_node, i, false);
1440 		}
1441 	      else
1442 		{
1443 		  unite (lowest_node, i);
1444 		  graph->indirect_cycles[i - FIRST_REF_NODE] = lowest_node;
1445 		}
1446 	    }
1447 	}
1448       bitmap_set_bit (si->deleted, n);
1449     }
1450   else
1451     si->scc_stack.safe_push (n);
1452 }
1453 
1454 /* Unify node FROM into node TO, updating the changed count if
1455    necessary when UPDATE_CHANGED is true.  */
1456 
1457 static void
unify_nodes(constraint_graph_t graph,unsigned int to,unsigned int from,bool update_changed)1458 unify_nodes (constraint_graph_t graph, unsigned int to, unsigned int from,
1459 	     bool update_changed)
1460 {
1461 
1462   gcc_assert (to != from && find (to) == to);
1463   if (dump_file && (dump_flags & TDF_DETAILS))
1464     fprintf (dump_file, "Unifying %s to %s\n",
1465 	     get_varinfo (from)->name,
1466 	     get_varinfo (to)->name);
1467 
1468   if (update_changed)
1469     stats.unified_vars_dynamic++;
1470   else
1471     stats.unified_vars_static++;
1472 
1473   merge_graph_nodes (graph, to, from);
1474   merge_node_constraints (graph, to, from);
1475 
1476   /* Mark TO as changed if FROM was changed. If TO was already marked
1477      as changed, decrease the changed count.  */
1478 
1479   if (update_changed
1480       && bitmap_bit_p (changed, from))
1481     {
1482       bitmap_clear_bit (changed, from);
1483       bitmap_set_bit (changed, to);
1484     }
1485   if (get_varinfo (from)->solution)
1486     {
1487       /* If the solution changes because of the merging, we need to mark
1488 	 the variable as changed.  */
1489       if (bitmap_ior_into (get_varinfo (to)->solution,
1490 			   get_varinfo (from)->solution))
1491 	{
1492 	  if (update_changed)
1493 	    bitmap_set_bit (changed, to);
1494 	}
1495 
1496       BITMAP_FREE (get_varinfo (from)->solution);
1497       if (get_varinfo (from)->oldsolution)
1498 	BITMAP_FREE (get_varinfo (from)->oldsolution);
1499 
1500       if (stats.iterations > 0
1501 	  && get_varinfo (to)->oldsolution)
1502 	BITMAP_FREE (get_varinfo (to)->oldsolution);
1503     }
1504   if (valid_graph_edge (graph, to, to))
1505     {
1506       if (graph->succs[to])
1507 	bitmap_clear_bit (graph->succs[to], to);
1508     }
1509 }
1510 
1511 /* Information needed to compute the topological ordering of a graph.  */
1512 
1513 struct topo_info
1514 {
1515   /* sbitmap of visited nodes.  */
1516   sbitmap visited;
1517   /* Array that stores the topological order of the graph, *in
1518      reverse*.  */
1519   vec<unsigned> topo_order;
1520 };
1521 
1522 
1523 /* Initialize and return a topological info structure.  */
1524 
1525 static struct topo_info *
init_topo_info(void)1526 init_topo_info (void)
1527 {
1528   size_t size = graph->size;
1529   struct topo_info *ti = XNEW (struct topo_info);
1530   ti->visited = sbitmap_alloc (size);
1531   bitmap_clear (ti->visited);
1532   ti->topo_order.create (1);
1533   return ti;
1534 }
1535 
1536 
1537 /* Free the topological sort info pointed to by TI.  */
1538 
1539 static void
free_topo_info(struct topo_info * ti)1540 free_topo_info (struct topo_info *ti)
1541 {
1542   sbitmap_free (ti->visited);
1543   ti->topo_order.release ();
1544   free (ti);
1545 }
1546 
1547 /* Visit the graph in topological order, and store the order in the
1548    topo_info structure.  */
1549 
1550 static void
topo_visit(constraint_graph_t graph,struct topo_info * ti,unsigned int n)1551 topo_visit (constraint_graph_t graph, struct topo_info *ti,
1552 	    unsigned int n)
1553 {
1554   bitmap_iterator bi;
1555   unsigned int j;
1556 
1557   bitmap_set_bit (ti->visited, n);
1558 
1559   if (graph->succs[n])
1560     EXECUTE_IF_SET_IN_BITMAP (graph->succs[n], 0, j, bi)
1561       {
1562 	if (!bitmap_bit_p (ti->visited, j))
1563 	  topo_visit (graph, ti, j);
1564       }
1565 
1566   ti->topo_order.safe_push (n);
1567 }
1568 
1569 /* Process a constraint C that represents x = *(y + off), using DELTA as the
1570    starting solution for y.  */
1571 
1572 static void
do_sd_constraint(constraint_graph_t graph,constraint_t c,bitmap delta)1573 do_sd_constraint (constraint_graph_t graph, constraint_t c,
1574 		  bitmap delta)
1575 {
1576   unsigned int lhs = c->lhs.var;
1577   bool flag = false;
1578   bitmap sol = get_varinfo (lhs)->solution;
1579   unsigned int j;
1580   bitmap_iterator bi;
1581   HOST_WIDE_INT roffset = c->rhs.offset;
1582 
1583   /* Our IL does not allow this.  */
1584   gcc_assert (c->lhs.offset == 0);
1585 
1586   /* If the solution of Y contains anything it is good enough to transfer
1587      this to the LHS.  */
1588   if (bitmap_bit_p (delta, anything_id))
1589     {
1590       flag |= bitmap_set_bit (sol, anything_id);
1591       goto done;
1592     }
1593 
1594   /* If we do not know at with offset the rhs is dereferenced compute
1595      the reachability set of DELTA, conservatively assuming it is
1596      dereferenced at all valid offsets.  */
1597   if (roffset == UNKNOWN_OFFSET)
1598     {
1599       solution_set_expand (delta, delta);
1600       /* No further offset processing is necessary.  */
1601       roffset = 0;
1602     }
1603 
1604   /* For each variable j in delta (Sol(y)), add
1605      an edge in the graph from j to x, and union Sol(j) into Sol(x).  */
1606   EXECUTE_IF_SET_IN_BITMAP (delta, 0, j, bi)
1607     {
1608       varinfo_t v = get_varinfo (j);
1609       HOST_WIDE_INT fieldoffset = v->offset + roffset;
1610       unsigned int t;
1611 
1612       if (v->is_full_var)
1613 	fieldoffset = v->offset;
1614       else if (roffset != 0)
1615 	v = first_vi_for_offset (v, fieldoffset);
1616       /* If the access is outside of the variable we can ignore it.  */
1617       if (!v)
1618 	continue;
1619 
1620       do
1621 	{
1622 	  t = find (v->id);
1623 
1624 	  /* Adding edges from the special vars is pointless.
1625 	     They don't have sets that can change.  */
1626 	  if (get_varinfo (t)->is_special_var)
1627 	    flag |= bitmap_ior_into (sol, get_varinfo (t)->solution);
1628 	  /* Merging the solution from ESCAPED needlessly increases
1629 	     the set.  Use ESCAPED as representative instead.  */
1630 	  else if (v->id == escaped_id)
1631 	    flag |= bitmap_set_bit (sol, escaped_id);
1632 	  else if (v->may_have_pointers
1633 		   && add_graph_edge (graph, lhs, t))
1634 	    flag |= bitmap_ior_into (sol, get_varinfo (t)->solution);
1635 
1636 	  /* If the variable is not exactly at the requested offset
1637 	     we have to include the next one.  */
1638 	  if (v->offset == (unsigned HOST_WIDE_INT)fieldoffset
1639 	      || v->next == NULL)
1640 	    break;
1641 
1642 	  v = v->next;
1643 	  fieldoffset = v->offset;
1644 	}
1645       while (1);
1646     }
1647 
1648 done:
1649   /* If the LHS solution changed, mark the var as changed.  */
1650   if (flag)
1651     {
1652       get_varinfo (lhs)->solution = sol;
1653       bitmap_set_bit (changed, lhs);
1654     }
1655 }
1656 
1657 /* Process a constraint C that represents *(x + off) = y using DELTA
1658    as the starting solution for x.  */
1659 
1660 static void
do_ds_constraint(constraint_t c,bitmap delta)1661 do_ds_constraint (constraint_t c, bitmap delta)
1662 {
1663   unsigned int rhs = c->rhs.var;
1664   bitmap sol = get_varinfo (rhs)->solution;
1665   unsigned int j;
1666   bitmap_iterator bi;
1667   HOST_WIDE_INT loff = c->lhs.offset;
1668   bool escaped_p = false;
1669 
1670   /* Our IL does not allow this.  */
1671   gcc_assert (c->rhs.offset == 0);
1672 
1673   /* If the solution of y contains ANYTHING simply use the ANYTHING
1674      solution.  This avoids needlessly increasing the points-to sets.  */
1675   if (bitmap_bit_p (sol, anything_id))
1676     sol = get_varinfo (find (anything_id))->solution;
1677 
1678   /* If the solution for x contains ANYTHING we have to merge the
1679      solution of y into all pointer variables which we do via
1680      STOREDANYTHING.  */
1681   if (bitmap_bit_p (delta, anything_id))
1682     {
1683       unsigned t = find (storedanything_id);
1684       if (add_graph_edge (graph, t, rhs))
1685 	{
1686 	  if (bitmap_ior_into (get_varinfo (t)->solution, sol))
1687 	    bitmap_set_bit (changed, t);
1688 	}
1689       return;
1690     }
1691 
1692   /* If we do not know at with offset the rhs is dereferenced compute
1693      the reachability set of DELTA, conservatively assuming it is
1694      dereferenced at all valid offsets.  */
1695   if (loff == UNKNOWN_OFFSET)
1696     {
1697       solution_set_expand (delta, delta);
1698       loff = 0;
1699     }
1700 
1701   /* For each member j of delta (Sol(x)), add an edge from y to j and
1702      union Sol(y) into Sol(j) */
1703   EXECUTE_IF_SET_IN_BITMAP (delta, 0, j, bi)
1704     {
1705       varinfo_t v = get_varinfo (j);
1706       unsigned int t;
1707       HOST_WIDE_INT fieldoffset = v->offset + loff;
1708 
1709       if (v->is_full_var)
1710 	fieldoffset = v->offset;
1711       else if (loff != 0)
1712 	v = first_vi_for_offset (v, fieldoffset);
1713       /* If the access is outside of the variable we can ignore it.  */
1714       if (!v)
1715 	continue;
1716 
1717       do
1718 	{
1719 	  if (v->may_have_pointers)
1720 	    {
1721 	      /* If v is a global variable then this is an escape point.  */
1722 	      if (v->is_global_var
1723 		  && !escaped_p)
1724 		{
1725 		  t = find (escaped_id);
1726 		  if (add_graph_edge (graph, t, rhs)
1727 		      && bitmap_ior_into (get_varinfo (t)->solution, sol))
1728 		    bitmap_set_bit (changed, t);
1729 		  /* Enough to let rhs escape once.  */
1730 		  escaped_p = true;
1731 		}
1732 
1733 	      if (v->is_special_var)
1734 		break;
1735 
1736 	      t = find (v->id);
1737 	      if (add_graph_edge (graph, t, rhs)
1738 		  && bitmap_ior_into (get_varinfo (t)->solution, sol))
1739 		bitmap_set_bit (changed, t);
1740 	    }
1741 
1742 	  /* If the variable is not exactly at the requested offset
1743 	     we have to include the next one.  */
1744 	  if (v->offset == (unsigned HOST_WIDE_INT)fieldoffset
1745 	      || v->next == NULL)
1746 	    break;
1747 
1748 	  v = v->next;
1749 	  fieldoffset = v->offset;
1750 	}
1751       while (1);
1752     }
1753 }
1754 
1755 /* Handle a non-simple (simple meaning requires no iteration),
1756    constraint (IE *x = &y, x = *y, *x = y, and x = y with offsets involved).  */
1757 
1758 static void
do_complex_constraint(constraint_graph_t graph,constraint_t c,bitmap delta)1759 do_complex_constraint (constraint_graph_t graph, constraint_t c, bitmap delta)
1760 {
1761   if (c->lhs.type == DEREF)
1762     {
1763       if (c->rhs.type == ADDRESSOF)
1764 	{
1765 	  gcc_unreachable();
1766 	}
1767       else
1768 	{
1769 	  /* *x = y */
1770 	  do_ds_constraint (c, delta);
1771 	}
1772     }
1773   else if (c->rhs.type == DEREF)
1774     {
1775       /* x = *y */
1776       if (!(get_varinfo (c->lhs.var)->is_special_var))
1777 	do_sd_constraint (graph, c, delta);
1778     }
1779   else
1780     {
1781       bitmap tmp;
1782       bitmap solution;
1783       bool flag = false;
1784 
1785       gcc_assert (c->rhs.type == SCALAR && c->lhs.type == SCALAR);
1786       solution = get_varinfo (c->rhs.var)->solution;
1787       tmp = get_varinfo (c->lhs.var)->solution;
1788 
1789       flag = set_union_with_increment (tmp, solution, c->rhs.offset);
1790 
1791       if (flag)
1792 	{
1793 	  get_varinfo (c->lhs.var)->solution = tmp;
1794 	  bitmap_set_bit (changed, c->lhs.var);
1795 	}
1796     }
1797 }
1798 
1799 /* Initialize and return a new SCC info structure.  */
1800 
1801 static struct scc_info *
init_scc_info(size_t size)1802 init_scc_info (size_t size)
1803 {
1804   struct scc_info *si = XNEW (struct scc_info);
1805   size_t i;
1806 
1807   si->current_index = 0;
1808   si->visited = sbitmap_alloc (size);
1809   bitmap_clear (si->visited);
1810   si->deleted = sbitmap_alloc (size);
1811   bitmap_clear (si->deleted);
1812   si->node_mapping = XNEWVEC (unsigned int, size);
1813   si->dfs = XCNEWVEC (unsigned int, size);
1814 
1815   for (i = 0; i < size; i++)
1816     si->node_mapping[i] = i;
1817 
1818   si->scc_stack.create (1);
1819   return si;
1820 }
1821 
1822 /* Free an SCC info structure pointed to by SI */
1823 
1824 static void
free_scc_info(struct scc_info * si)1825 free_scc_info (struct scc_info *si)
1826 {
1827   sbitmap_free (si->visited);
1828   sbitmap_free (si->deleted);
1829   free (si->node_mapping);
1830   free (si->dfs);
1831   si->scc_stack.release ();
1832   free (si);
1833 }
1834 
1835 
1836 /* Find indirect cycles in GRAPH that occur, using strongly connected
1837    components, and note them in the indirect cycles map.
1838 
1839    This technique comes from Ben Hardekopf and Calvin Lin,
1840    "It Pays to be Lazy: Fast and Accurate Pointer Analysis for Millions of
1841    Lines of Code", submitted to PLDI 2007.  */
1842 
1843 static void
find_indirect_cycles(constraint_graph_t graph)1844 find_indirect_cycles (constraint_graph_t graph)
1845 {
1846   unsigned int i;
1847   unsigned int size = graph->size;
1848   struct scc_info *si = init_scc_info (size);
1849 
1850   for (i = 0; i < MIN (LAST_REF_NODE, size); i ++ )
1851     if (!bitmap_bit_p (si->visited, i) && find (i) == i)
1852       scc_visit (graph, si, i);
1853 
1854   free_scc_info (si);
1855 }
1856 
1857 /* Compute a topological ordering for GRAPH, and store the result in the
1858    topo_info structure TI.  */
1859 
1860 static void
compute_topo_order(constraint_graph_t graph,struct topo_info * ti)1861 compute_topo_order (constraint_graph_t graph,
1862 		    struct topo_info *ti)
1863 {
1864   unsigned int i;
1865   unsigned int size = graph->size;
1866 
1867   for (i = 0; i != size; ++i)
1868     if (!bitmap_bit_p (ti->visited, i) && find (i) == i)
1869       topo_visit (graph, ti, i);
1870 }
1871 
1872 /* Structure used to for hash value numbering of pointer equivalence
1873    classes.  */
1874 
1875 typedef struct equiv_class_label
1876 {
1877   hashval_t hashcode;
1878   unsigned int equivalence_class;
1879   bitmap labels;
1880 } *equiv_class_label_t;
1881 typedef const struct equiv_class_label *const_equiv_class_label_t;
1882 
1883 /* A hashtable for mapping a bitmap of labels->pointer equivalence
1884    classes.  */
1885 static htab_t pointer_equiv_class_table;
1886 
1887 /* A hashtable for mapping a bitmap of labels->location equivalence
1888    classes.  */
1889 static htab_t location_equiv_class_table;
1890 
1891 /* Hash function for a equiv_class_label_t */
1892 
1893 static hashval_t
equiv_class_label_hash(const void * p)1894 equiv_class_label_hash (const void *p)
1895 {
1896   const_equiv_class_label_t const ecl = (const_equiv_class_label_t) p;
1897   return ecl->hashcode;
1898 }
1899 
1900 /* Equality function for two equiv_class_label_t's.  */
1901 
1902 static int
equiv_class_label_eq(const void * p1,const void * p2)1903 equiv_class_label_eq (const void *p1, const void *p2)
1904 {
1905   const_equiv_class_label_t const eql1 = (const_equiv_class_label_t) p1;
1906   const_equiv_class_label_t const eql2 = (const_equiv_class_label_t) p2;
1907   return (eql1->hashcode == eql2->hashcode
1908 	  && bitmap_equal_p (eql1->labels, eql2->labels));
1909 }
1910 
1911 /* Lookup a equivalence class in TABLE by the bitmap of LABELS with
1912    hash HAS it contains.  Sets *REF_LABELS to the bitmap LABELS
1913    is equivalent to.  */
1914 
1915 static equiv_class_label *
equiv_class_lookup_or_add(htab_t table,bitmap labels)1916 equiv_class_lookup_or_add (htab_t table, bitmap labels)
1917 {
1918   equiv_class_label **slot;
1919   equiv_class_label ecl;
1920 
1921   ecl.labels = labels;
1922   ecl.hashcode = bitmap_hash (labels);
1923   slot = (equiv_class_label **) htab_find_slot_with_hash (table, &ecl,
1924 							  ecl.hashcode, INSERT);
1925   if (!*slot)
1926     {
1927       *slot = XNEW (struct equiv_class_label);
1928       (*slot)->labels = labels;
1929       (*slot)->hashcode = ecl.hashcode;
1930       (*slot)->equivalence_class = 0;
1931     }
1932 
1933   return *slot;
1934 }
1935 
1936 /* Perform offline variable substitution.
1937 
1938    This is a worst case quadratic time way of identifying variables
1939    that must have equivalent points-to sets, including those caused by
1940    static cycles, and single entry subgraphs, in the constraint graph.
1941 
1942    The technique is described in "Exploiting Pointer and Location
1943    Equivalence to Optimize Pointer Analysis. In the 14th International
1944    Static Analysis Symposium (SAS), August 2007."  It is known as the
1945    "HU" algorithm, and is equivalent to value numbering the collapsed
1946    constraint graph including evaluating unions.
1947 
1948    The general method of finding equivalence classes is as follows:
1949    Add fake nodes (REF nodes) and edges for *a = b and a = *b constraints.
1950    Initialize all non-REF nodes to be direct nodes.
1951    For each constraint a = a U {b}, we set pts(a) = pts(a) u {fresh
1952    variable}
1953    For each constraint containing the dereference, we also do the same
1954    thing.
1955 
1956    We then compute SCC's in the graph and unify nodes in the same SCC,
1957    including pts sets.
1958 
1959    For each non-collapsed node x:
1960     Visit all unvisited explicit incoming edges.
1961     Ignoring all non-pointers, set pts(x) = Union of pts(a) for y
1962     where y->x.
1963     Lookup the equivalence class for pts(x).
1964      If we found one, equivalence_class(x) = found class.
1965      Otherwise, equivalence_class(x) = new class, and new_class is
1966     added to the lookup table.
1967 
1968    All direct nodes with the same equivalence class can be replaced
1969    with a single representative node.
1970    All unlabeled nodes (label == 0) are not pointers and all edges
1971    involving them can be eliminated.
1972    We perform these optimizations during rewrite_constraints
1973 
1974    In addition to pointer equivalence class finding, we also perform
1975    location equivalence class finding.  This is the set of variables
1976    that always appear together in points-to sets.  We use this to
1977    compress the size of the points-to sets.  */
1978 
1979 /* Current maximum pointer equivalence class id.  */
1980 static int pointer_equiv_class;
1981 
1982 /* Current maximum location equivalence class id.  */
1983 static int location_equiv_class;
1984 
1985 /* Recursive routine to find strongly connected components in GRAPH,
1986    and label it's nodes with DFS numbers.  */
1987 
1988 static void
condense_visit(constraint_graph_t graph,struct scc_info * si,unsigned int n)1989 condense_visit (constraint_graph_t graph, struct scc_info *si, unsigned int n)
1990 {
1991   unsigned int i;
1992   bitmap_iterator bi;
1993   unsigned int my_dfs;
1994 
1995   gcc_assert (si->node_mapping[n] == n);
1996   bitmap_set_bit (si->visited, n);
1997   si->dfs[n] = si->current_index ++;
1998   my_dfs = si->dfs[n];
1999 
2000   /* Visit all the successors.  */
2001   EXECUTE_IF_IN_NONNULL_BITMAP (graph->preds[n], 0, i, bi)
2002     {
2003       unsigned int w = si->node_mapping[i];
2004 
2005       if (bitmap_bit_p (si->deleted, w))
2006 	continue;
2007 
2008       if (!bitmap_bit_p (si->visited, w))
2009 	condense_visit (graph, si, w);
2010       {
2011 	unsigned int t = si->node_mapping[w];
2012 	unsigned int nnode = si->node_mapping[n];
2013 	gcc_assert (nnode == n);
2014 
2015 	if (si->dfs[t] < si->dfs[nnode])
2016 	  si->dfs[n] = si->dfs[t];
2017       }
2018     }
2019 
2020   /* Visit all the implicit predecessors.  */
2021   EXECUTE_IF_IN_NONNULL_BITMAP (graph->implicit_preds[n], 0, i, bi)
2022     {
2023       unsigned int w = si->node_mapping[i];
2024 
2025       if (bitmap_bit_p (si->deleted, w))
2026 	continue;
2027 
2028       if (!bitmap_bit_p (si->visited, w))
2029 	condense_visit (graph, si, w);
2030       {
2031 	unsigned int t = si->node_mapping[w];
2032 	unsigned int nnode = si->node_mapping[n];
2033 	gcc_assert (nnode == n);
2034 
2035 	if (si->dfs[t] < si->dfs[nnode])
2036 	  si->dfs[n] = si->dfs[t];
2037       }
2038     }
2039 
2040   /* See if any components have been identified.  */
2041   if (si->dfs[n] == my_dfs)
2042     {
2043       while (si->scc_stack.length () != 0
2044 	     && si->dfs[si->scc_stack.last ()] >= my_dfs)
2045 	{
2046 	  unsigned int w = si->scc_stack.pop ();
2047 	  si->node_mapping[w] = n;
2048 
2049 	  if (!bitmap_bit_p (graph->direct_nodes, w))
2050 	    bitmap_clear_bit (graph->direct_nodes, n);
2051 
2052 	  /* Unify our nodes.  */
2053 	  if (graph->preds[w])
2054 	    {
2055 	      if (!graph->preds[n])
2056 		graph->preds[n] = BITMAP_ALLOC (&predbitmap_obstack);
2057 	      bitmap_ior_into (graph->preds[n], graph->preds[w]);
2058 	    }
2059 	  if (graph->implicit_preds[w])
2060 	    {
2061 	      if (!graph->implicit_preds[n])
2062 		graph->implicit_preds[n] = BITMAP_ALLOC (&predbitmap_obstack);
2063 	      bitmap_ior_into (graph->implicit_preds[n],
2064 			       graph->implicit_preds[w]);
2065 	    }
2066 	  if (graph->points_to[w])
2067 	    {
2068 	      if (!graph->points_to[n])
2069 		graph->points_to[n] = BITMAP_ALLOC (&predbitmap_obstack);
2070 	      bitmap_ior_into (graph->points_to[n],
2071 			       graph->points_to[w]);
2072 	    }
2073 	}
2074       bitmap_set_bit (si->deleted, n);
2075     }
2076   else
2077     si->scc_stack.safe_push (n);
2078 }
2079 
2080 /* Label pointer equivalences.  */
2081 
2082 static void
label_visit(constraint_graph_t graph,struct scc_info * si,unsigned int n)2083 label_visit (constraint_graph_t graph, struct scc_info *si, unsigned int n)
2084 {
2085   unsigned int i, first_pred;
2086   bitmap_iterator bi;
2087 
2088   bitmap_set_bit (si->visited, n);
2089 
2090   /* Label and union our incoming edges's points to sets.  */
2091   first_pred = -1U;
2092   EXECUTE_IF_IN_NONNULL_BITMAP (graph->preds[n], 0, i, bi)
2093     {
2094       unsigned int w = si->node_mapping[i];
2095       if (!bitmap_bit_p (si->visited, w))
2096 	label_visit (graph, si, w);
2097 
2098       /* Skip unused edges  */
2099       if (w == n || graph->pointer_label[w] == 0)
2100 	continue;
2101 
2102       if (graph->points_to[w])
2103 	{
2104 	  if (!graph->points_to[n])
2105 	    {
2106 	      if (first_pred == -1U)
2107 		first_pred = w;
2108 	      else
2109 		{
2110 		  graph->points_to[n] = BITMAP_ALLOC (&predbitmap_obstack);
2111 		  bitmap_ior (graph->points_to[n],
2112 			      graph->points_to[first_pred],
2113 			      graph->points_to[w]);
2114 		}
2115 	    }
2116 	  else
2117 	    bitmap_ior_into(graph->points_to[n], graph->points_to[w]);
2118 	}
2119     }
2120 
2121   /* Indirect nodes get fresh variables and a new pointer equiv class.  */
2122   if (!bitmap_bit_p (graph->direct_nodes, n))
2123     {
2124       if (!graph->points_to[n])
2125 	{
2126 	  graph->points_to[n] = BITMAP_ALLOC (&predbitmap_obstack);
2127 	  if (first_pred != -1U)
2128 	    bitmap_copy (graph->points_to[n], graph->points_to[first_pred]);
2129 	}
2130       bitmap_set_bit (graph->points_to[n], FIRST_REF_NODE + n);
2131       graph->pointer_label[n] = pointer_equiv_class++;
2132       equiv_class_label_t ecl;
2133       ecl = equiv_class_lookup_or_add (pointer_equiv_class_table,
2134 				       graph->points_to[n]);
2135       ecl->equivalence_class = graph->pointer_label[n];
2136       return;
2137     }
2138 
2139   /* If there was only a single non-empty predecessor the pointer equiv
2140      class is the same.  */
2141   if (!graph->points_to[n])
2142     {
2143       if (first_pred != -1U)
2144 	{
2145 	  graph->pointer_label[n] = graph->pointer_label[first_pred];
2146 	  graph->points_to[n] = graph->points_to[first_pred];
2147 	}
2148       return;
2149     }
2150 
2151   if (!bitmap_empty_p (graph->points_to[n]))
2152     {
2153       equiv_class_label_t ecl;
2154       ecl = equiv_class_lookup_or_add (pointer_equiv_class_table,
2155 				       graph->points_to[n]);
2156       if (ecl->equivalence_class == 0)
2157 	ecl->equivalence_class = pointer_equiv_class++;
2158       else
2159 	{
2160 	  BITMAP_FREE (graph->points_to[n]);
2161 	  graph->points_to[n] = ecl->labels;
2162 	}
2163       graph->pointer_label[n] = ecl->equivalence_class;
2164     }
2165 }
2166 
2167 /* Perform offline variable substitution, discovering equivalence
2168    classes, and eliminating non-pointer variables.  */
2169 
2170 static struct scc_info *
perform_var_substitution(constraint_graph_t graph)2171 perform_var_substitution (constraint_graph_t graph)
2172 {
2173   unsigned int i;
2174   unsigned int size = graph->size;
2175   struct scc_info *si = init_scc_info (size);
2176 
2177   bitmap_obstack_initialize (&iteration_obstack);
2178   pointer_equiv_class_table = htab_create (511, equiv_class_label_hash,
2179 					   equiv_class_label_eq, free);
2180   location_equiv_class_table = htab_create (511, equiv_class_label_hash,
2181 					    equiv_class_label_eq, free);
2182   pointer_equiv_class = 1;
2183   location_equiv_class = 1;
2184 
2185   /* Condense the nodes, which means to find SCC's, count incoming
2186      predecessors, and unite nodes in SCC's.  */
2187   for (i = 0; i < FIRST_REF_NODE; i++)
2188     if (!bitmap_bit_p (si->visited, si->node_mapping[i]))
2189       condense_visit (graph, si, si->node_mapping[i]);
2190 
2191   bitmap_clear (si->visited);
2192   /* Actually the label the nodes for pointer equivalences  */
2193   for (i = 0; i < FIRST_REF_NODE; i++)
2194     if (!bitmap_bit_p (si->visited, si->node_mapping[i]))
2195       label_visit (graph, si, si->node_mapping[i]);
2196 
2197   /* Calculate location equivalence labels.  */
2198   for (i = 0; i < FIRST_REF_NODE; i++)
2199     {
2200       bitmap pointed_by;
2201       bitmap_iterator bi;
2202       unsigned int j;
2203 
2204       if (!graph->pointed_by[i])
2205 	continue;
2206       pointed_by = BITMAP_ALLOC (&iteration_obstack);
2207 
2208       /* Translate the pointed-by mapping for pointer equivalence
2209 	 labels.  */
2210       EXECUTE_IF_SET_IN_BITMAP (graph->pointed_by[i], 0, j, bi)
2211 	{
2212 	  bitmap_set_bit (pointed_by,
2213 			  graph->pointer_label[si->node_mapping[j]]);
2214 	}
2215       /* The original pointed_by is now dead.  */
2216       BITMAP_FREE (graph->pointed_by[i]);
2217 
2218       /* Look up the location equivalence label if one exists, or make
2219 	 one otherwise.  */
2220       equiv_class_label_t ecl;
2221       ecl = equiv_class_lookup_or_add (location_equiv_class_table, pointed_by);
2222       if (ecl->equivalence_class == 0)
2223 	ecl->equivalence_class = location_equiv_class++;
2224       else
2225 	{
2226 	  if (dump_file && (dump_flags & TDF_DETAILS))
2227 	    fprintf (dump_file, "Found location equivalence for node %s\n",
2228 		     get_varinfo (i)->name);
2229 	  BITMAP_FREE (pointed_by);
2230 	}
2231       graph->loc_label[i] = ecl->equivalence_class;
2232 
2233     }
2234 
2235   if (dump_file && (dump_flags & TDF_DETAILS))
2236     for (i = 0; i < FIRST_REF_NODE; i++)
2237       {
2238 	unsigned j = si->node_mapping[i];
2239 	if (j != i)
2240 	  fprintf (dump_file, "%s node id %d (%s) mapped to SCC leader "
2241 		   "node id %d (%s)\n",
2242 		    bitmap_bit_p (graph->direct_nodes, i)
2243 		    ? "Direct" : "Indirect", i, get_varinfo (i)->name,
2244 		    j, get_varinfo (j)->name);
2245 	else
2246 	  fprintf (dump_file,
2247 		   "Equivalence classes for %s node id %d (%s): pointer %d"
2248 		   ", location %d\n",
2249 		   bitmap_bit_p (graph->direct_nodes, i)
2250 		   ? "direct" : "indirect", i, get_varinfo (i)->name,
2251 		   graph->pointer_label[i], graph->loc_label[i]);
2252       }
2253 
2254   /* Quickly eliminate our non-pointer variables.  */
2255 
2256   for (i = 0; i < FIRST_REF_NODE; i++)
2257     {
2258       unsigned int node = si->node_mapping[i];
2259 
2260       if (graph->pointer_label[node] == 0)
2261 	{
2262 	  if (dump_file && (dump_flags & TDF_DETAILS))
2263 	    fprintf (dump_file,
2264 		     "%s is a non-pointer variable, eliminating edges.\n",
2265 		     get_varinfo (node)->name);
2266 	  stats.nonpointer_vars++;
2267 	  clear_edges_for_node (graph, node);
2268 	}
2269     }
2270 
2271   return si;
2272 }
2273 
2274 /* Free information that was only necessary for variable
2275    substitution.  */
2276 
2277 static void
free_var_substitution_info(struct scc_info * si)2278 free_var_substitution_info (struct scc_info *si)
2279 {
2280   free_scc_info (si);
2281   free (graph->pointer_label);
2282   free (graph->loc_label);
2283   free (graph->pointed_by);
2284   free (graph->points_to);
2285   free (graph->eq_rep);
2286   sbitmap_free (graph->direct_nodes);
2287   htab_delete (pointer_equiv_class_table);
2288   htab_delete (location_equiv_class_table);
2289   bitmap_obstack_release (&iteration_obstack);
2290 }
2291 
2292 /* Return an existing node that is equivalent to NODE, which has
2293    equivalence class LABEL, if one exists.  Return NODE otherwise.  */
2294 
2295 static unsigned int
find_equivalent_node(constraint_graph_t graph,unsigned int node,unsigned int label)2296 find_equivalent_node (constraint_graph_t graph,
2297 		      unsigned int node, unsigned int label)
2298 {
2299   /* If the address version of this variable is unused, we can
2300      substitute it for anything else with the same label.
2301      Otherwise, we know the pointers are equivalent, but not the
2302      locations, and we can unite them later.  */
2303 
2304   if (!bitmap_bit_p (graph->address_taken, node))
2305     {
2306       gcc_assert (label < graph->size);
2307 
2308       if (graph->eq_rep[label] != -1)
2309 	{
2310 	  /* Unify the two variables since we know they are equivalent.  */
2311 	  if (unite (graph->eq_rep[label], node))
2312 	    unify_nodes (graph, graph->eq_rep[label], node, false);
2313 	  return graph->eq_rep[label];
2314 	}
2315       else
2316 	{
2317 	  graph->eq_rep[label] = node;
2318 	  graph->pe_rep[label] = node;
2319 	}
2320     }
2321   else
2322     {
2323       gcc_assert (label < graph->size);
2324       graph->pe[node] = label;
2325       if (graph->pe_rep[label] == -1)
2326 	graph->pe_rep[label] = node;
2327     }
2328 
2329   return node;
2330 }
2331 
2332 /* Unite pointer equivalent but not location equivalent nodes in
2333    GRAPH.  This may only be performed once variable substitution is
2334    finished.  */
2335 
2336 static void
unite_pointer_equivalences(constraint_graph_t graph)2337 unite_pointer_equivalences (constraint_graph_t graph)
2338 {
2339   unsigned int i;
2340 
2341   /* Go through the pointer equivalences and unite them to their
2342      representative, if they aren't already.  */
2343   for (i = 0; i < FIRST_REF_NODE; i++)
2344     {
2345       unsigned int label = graph->pe[i];
2346       if (label)
2347 	{
2348 	  int label_rep = graph->pe_rep[label];
2349 
2350 	  if (label_rep == -1)
2351 	    continue;
2352 
2353 	  label_rep = find (label_rep);
2354 	  if (label_rep >= 0 && unite (label_rep, find (i)))
2355 	    unify_nodes (graph, label_rep, i, false);
2356 	}
2357     }
2358 }
2359 
2360 /* Move complex constraints to the GRAPH nodes they belong to.  */
2361 
2362 static void
move_complex_constraints(constraint_graph_t graph)2363 move_complex_constraints (constraint_graph_t graph)
2364 {
2365   int i;
2366   constraint_t c;
2367 
2368   FOR_EACH_VEC_ELT (constraints, i, c)
2369     {
2370       if (c)
2371 	{
2372 	  struct constraint_expr lhs = c->lhs;
2373 	  struct constraint_expr rhs = c->rhs;
2374 
2375 	  if (lhs.type == DEREF)
2376 	    {
2377 	      insert_into_complex (graph, lhs.var, c);
2378 	    }
2379 	  else if (rhs.type == DEREF)
2380 	    {
2381 	      if (!(get_varinfo (lhs.var)->is_special_var))
2382 		insert_into_complex (graph, rhs.var, c);
2383 	    }
2384 	  else if (rhs.type != ADDRESSOF && lhs.var > anything_id
2385 		   && (lhs.offset != 0 || rhs.offset != 0))
2386 	    {
2387 	      insert_into_complex (graph, rhs.var, c);
2388 	    }
2389 	}
2390     }
2391 }
2392 
2393 
2394 /* Optimize and rewrite complex constraints while performing
2395    collapsing of equivalent nodes.  SI is the SCC_INFO that is the
2396    result of perform_variable_substitution.  */
2397 
2398 static void
rewrite_constraints(constraint_graph_t graph,struct scc_info * si)2399 rewrite_constraints (constraint_graph_t graph,
2400 		     struct scc_info *si)
2401 {
2402   int i;
2403   unsigned int j;
2404   constraint_t c;
2405 
2406   for (j = 0; j < graph->size; j++)
2407     gcc_assert (find (j) == j);
2408 
2409   FOR_EACH_VEC_ELT (constraints, i, c)
2410     {
2411       struct constraint_expr lhs = c->lhs;
2412       struct constraint_expr rhs = c->rhs;
2413       unsigned int lhsvar = find (lhs.var);
2414       unsigned int rhsvar = find (rhs.var);
2415       unsigned int lhsnode, rhsnode;
2416       unsigned int lhslabel, rhslabel;
2417 
2418       lhsnode = si->node_mapping[lhsvar];
2419       rhsnode = si->node_mapping[rhsvar];
2420       lhslabel = graph->pointer_label[lhsnode];
2421       rhslabel = graph->pointer_label[rhsnode];
2422 
2423       /* See if it is really a non-pointer variable, and if so, ignore
2424 	 the constraint.  */
2425       if (lhslabel == 0)
2426 	{
2427 	  if (dump_file && (dump_flags & TDF_DETAILS))
2428 	    {
2429 
2430 	      fprintf (dump_file, "%s is a non-pointer variable,"
2431 		       "ignoring constraint:",
2432 		       get_varinfo (lhs.var)->name);
2433 	      dump_constraint (dump_file, c);
2434 	      fprintf (dump_file, "\n");
2435 	    }
2436 	  constraints[i] = NULL;
2437 	  continue;
2438 	}
2439 
2440       if (rhslabel == 0)
2441 	{
2442 	  if (dump_file && (dump_flags & TDF_DETAILS))
2443 	    {
2444 
2445 	      fprintf (dump_file, "%s is a non-pointer variable,"
2446 		       "ignoring constraint:",
2447 		       get_varinfo (rhs.var)->name);
2448 	      dump_constraint (dump_file, c);
2449 	      fprintf (dump_file, "\n");
2450 	    }
2451 	  constraints[i] = NULL;
2452 	  continue;
2453 	}
2454 
2455       lhsvar = find_equivalent_node (graph, lhsvar, lhslabel);
2456       rhsvar = find_equivalent_node (graph, rhsvar, rhslabel);
2457       c->lhs.var = lhsvar;
2458       c->rhs.var = rhsvar;
2459 
2460     }
2461 }
2462 
2463 /* Eliminate indirect cycles involving NODE.  Return true if NODE was
2464    part of an SCC, false otherwise.  */
2465 
2466 static bool
eliminate_indirect_cycles(unsigned int node)2467 eliminate_indirect_cycles (unsigned int node)
2468 {
2469   if (graph->indirect_cycles[node] != -1
2470       && !bitmap_empty_p (get_varinfo (node)->solution))
2471     {
2472       unsigned int i;
2473       vec<unsigned> queue = vNULL;
2474       int queuepos;
2475       unsigned int to = find (graph->indirect_cycles[node]);
2476       bitmap_iterator bi;
2477 
2478       /* We can't touch the solution set and call unify_nodes
2479 	 at the same time, because unify_nodes is going to do
2480 	 bitmap unions into it. */
2481 
2482       EXECUTE_IF_SET_IN_BITMAP (get_varinfo (node)->solution, 0, i, bi)
2483 	{
2484 	  if (find (i) == i && i != to)
2485 	    {
2486 	      if (unite (to, i))
2487 		queue.safe_push (i);
2488 	    }
2489 	}
2490 
2491       for (queuepos = 0;
2492 	   queue.iterate (queuepos, &i);
2493 	   queuepos++)
2494 	{
2495 	  unify_nodes (graph, to, i, true);
2496 	}
2497       queue.release ();
2498       return true;
2499     }
2500   return false;
2501 }
2502 
2503 /* Solve the constraint graph GRAPH using our worklist solver.
2504    This is based on the PW* family of solvers from the "Efficient Field
2505    Sensitive Pointer Analysis for C" paper.
2506    It works by iterating over all the graph nodes, processing the complex
2507    constraints and propagating the copy constraints, until everything stops
2508    changed.  This corresponds to steps 6-8 in the solving list given above.  */
2509 
2510 static void
solve_graph(constraint_graph_t graph)2511 solve_graph (constraint_graph_t graph)
2512 {
2513   unsigned int size = graph->size;
2514   unsigned int i;
2515   bitmap pts;
2516 
2517   changed = BITMAP_ALLOC (NULL);
2518 
2519   /* Mark all initial non-collapsed nodes as changed.  */
2520   for (i = 0; i < size; i++)
2521     {
2522       varinfo_t ivi = get_varinfo (i);
2523       if (find (i) == i && !bitmap_empty_p (ivi->solution)
2524 	  && ((graph->succs[i] && !bitmap_empty_p (graph->succs[i]))
2525 	      || graph->complex[i].length () > 0))
2526 	bitmap_set_bit (changed, i);
2527     }
2528 
2529   /* Allocate a bitmap to be used to store the changed bits.  */
2530   pts = BITMAP_ALLOC (&pta_obstack);
2531 
2532   while (!bitmap_empty_p (changed))
2533     {
2534       unsigned int i;
2535       struct topo_info *ti = init_topo_info ();
2536       stats.iterations++;
2537 
2538       bitmap_obstack_initialize (&iteration_obstack);
2539 
2540       compute_topo_order (graph, ti);
2541 
2542       while (ti->topo_order.length () != 0)
2543 	{
2544 
2545 	  i = ti->topo_order.pop ();
2546 
2547 	  /* If this variable is not a representative, skip it.  */
2548 	  if (find (i) != i)
2549 	    continue;
2550 
2551 	  /* In certain indirect cycle cases, we may merge this
2552 	     variable to another.  */
2553 	  if (eliminate_indirect_cycles (i) && find (i) != i)
2554 	    continue;
2555 
2556 	  /* If the node has changed, we need to process the
2557 	     complex constraints and outgoing edges again.  */
2558 	  if (bitmap_clear_bit (changed, i))
2559 	    {
2560 	      unsigned int j;
2561 	      constraint_t c;
2562 	      bitmap solution;
2563 	      vec<constraint_t> complex = graph->complex[i];
2564 	      varinfo_t vi = get_varinfo (i);
2565 	      bool solution_empty;
2566 
2567 	      /* Compute the changed set of solution bits.  */
2568 	      if (vi->oldsolution)
2569 		bitmap_and_compl (pts, vi->solution, vi->oldsolution);
2570 	      else
2571 		bitmap_copy (pts, vi->solution);
2572 
2573 	      if (bitmap_empty_p (pts))
2574 		continue;
2575 
2576 	      if (vi->oldsolution)
2577 		bitmap_ior_into (vi->oldsolution, pts);
2578 	      else
2579 		{
2580 		  vi->oldsolution = BITMAP_ALLOC (&oldpta_obstack);
2581 		  bitmap_copy (vi->oldsolution, pts);
2582 		}
2583 
2584 	      solution = vi->solution;
2585 	      solution_empty = bitmap_empty_p (solution);
2586 
2587 	      /* Process the complex constraints */
2588 	      FOR_EACH_VEC_ELT (complex, j, c)
2589 		{
2590 		  /* XXX: This is going to unsort the constraints in
2591 		     some cases, which will occasionally add duplicate
2592 		     constraints during unification.  This does not
2593 		     affect correctness.  */
2594 		  c->lhs.var = find (c->lhs.var);
2595 		  c->rhs.var = find (c->rhs.var);
2596 
2597 		  /* The only complex constraint that can change our
2598 		     solution to non-empty, given an empty solution,
2599 		     is a constraint where the lhs side is receiving
2600 		     some set from elsewhere.  */
2601 		  if (!solution_empty || c->lhs.type != DEREF)
2602 		    do_complex_constraint (graph, c, pts);
2603 		}
2604 
2605 	      solution_empty = bitmap_empty_p (solution);
2606 
2607 	      if (!solution_empty)
2608 		{
2609 		  bitmap_iterator bi;
2610 		  unsigned eff_escaped_id = find (escaped_id);
2611 
2612 		  /* Propagate solution to all successors.  */
2613 		  EXECUTE_IF_IN_NONNULL_BITMAP (graph->succs[i],
2614 						0, j, bi)
2615 		    {
2616 		      bitmap tmp;
2617 		      bool flag;
2618 
2619 		      unsigned int to = find (j);
2620 		      tmp = get_varinfo (to)->solution;
2621 		      flag = false;
2622 
2623 		      /* Don't try to propagate to ourselves.  */
2624 		      if (to == i)
2625 			continue;
2626 
2627 		      /* If we propagate from ESCAPED use ESCAPED as
2628 		         placeholder.  */
2629 		      if (i == eff_escaped_id)
2630 			flag = bitmap_set_bit (tmp, escaped_id);
2631 		      else
2632 			flag = set_union_with_increment (tmp, pts, 0);
2633 
2634 		      if (flag)
2635 			{
2636 			  get_varinfo (to)->solution = tmp;
2637 			  bitmap_set_bit (changed, to);
2638 			}
2639 		    }
2640 		}
2641 	    }
2642 	}
2643       free_topo_info (ti);
2644       bitmap_obstack_release (&iteration_obstack);
2645     }
2646 
2647   BITMAP_FREE (pts);
2648   BITMAP_FREE (changed);
2649   bitmap_obstack_release (&oldpta_obstack);
2650 }
2651 
2652 /* Map from trees to variable infos.  */
2653 static struct pointer_map_t *vi_for_tree;
2654 
2655 
2656 /* Insert ID as the variable id for tree T in the vi_for_tree map.  */
2657 
2658 static void
insert_vi_for_tree(tree t,varinfo_t vi)2659 insert_vi_for_tree (tree t, varinfo_t vi)
2660 {
2661   void **slot = pointer_map_insert (vi_for_tree, t);
2662   gcc_assert (vi);
2663   gcc_assert (*slot == NULL);
2664   *slot = vi;
2665 }
2666 
2667 /* Find the variable info for tree T in VI_FOR_TREE.  If T does not
2668    exist in the map, return NULL, otherwise, return the varinfo we found.  */
2669 
2670 static varinfo_t
lookup_vi_for_tree(tree t)2671 lookup_vi_for_tree (tree t)
2672 {
2673   void **slot = pointer_map_contains (vi_for_tree, t);
2674   if (slot == NULL)
2675     return NULL;
2676 
2677   return (varinfo_t) *slot;
2678 }
2679 
2680 /* Return a printable name for DECL  */
2681 
2682 static const char *
alias_get_name(tree decl)2683 alias_get_name (tree decl)
2684 {
2685   const char *res = NULL;
2686   char *temp;
2687   int num_printed = 0;
2688 
2689   if (!dump_file)
2690     return "NULL";
2691 
2692   if (TREE_CODE (decl) == SSA_NAME)
2693     {
2694       res = get_name (decl);
2695       if (res)
2696 	num_printed = asprintf (&temp, "%s_%u", res, SSA_NAME_VERSION (decl));
2697       else
2698 	num_printed = asprintf (&temp, "_%u", SSA_NAME_VERSION (decl));
2699       if (num_printed > 0)
2700 	{
2701 	  res = ggc_strdup (temp);
2702 	  free (temp);
2703 	}
2704     }
2705   else if (DECL_P (decl))
2706     {
2707       if (DECL_ASSEMBLER_NAME_SET_P (decl))
2708 	res = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
2709       else
2710 	{
2711 	  res = get_name (decl);
2712 	  if (!res)
2713 	    {
2714 	      num_printed = asprintf (&temp, "D.%u", DECL_UID (decl));
2715 	      if (num_printed > 0)
2716 		{
2717 		  res = ggc_strdup (temp);
2718 		  free (temp);
2719 		}
2720 	    }
2721 	}
2722     }
2723   if (res != NULL)
2724     return res;
2725 
2726   return "NULL";
2727 }
2728 
2729 /* Find the variable id for tree T in the map.
2730    If T doesn't exist in the map, create an entry for it and return it.  */
2731 
2732 static varinfo_t
get_vi_for_tree(tree t)2733 get_vi_for_tree (tree t)
2734 {
2735   void **slot = pointer_map_contains (vi_for_tree, t);
2736   if (slot == NULL)
2737     return get_varinfo (create_variable_info_for (t, alias_get_name (t)));
2738 
2739   return (varinfo_t) *slot;
2740 }
2741 
2742 /* Get a scalar constraint expression for a new temporary variable.  */
2743 
2744 static struct constraint_expr
new_scalar_tmp_constraint_exp(const char * name)2745 new_scalar_tmp_constraint_exp (const char *name)
2746 {
2747   struct constraint_expr tmp;
2748   varinfo_t vi;
2749 
2750   vi = new_var_info (NULL_TREE, name);
2751   vi->offset = 0;
2752   vi->size = -1;
2753   vi->fullsize = -1;
2754   vi->is_full_var = 1;
2755 
2756   tmp.var = vi->id;
2757   tmp.type = SCALAR;
2758   tmp.offset = 0;
2759 
2760   return tmp;
2761 }
2762 
2763 /* Get a constraint expression vector from an SSA_VAR_P node.
2764    If address_p is true, the result will be taken its address of.  */
2765 
2766 static void
get_constraint_for_ssa_var(tree t,vec<ce_s> * results,bool address_p)2767 get_constraint_for_ssa_var (tree t, vec<ce_s> *results, bool address_p)
2768 {
2769   struct constraint_expr cexpr;
2770   varinfo_t vi;
2771 
2772   /* We allow FUNCTION_DECLs here even though it doesn't make much sense.  */
2773   gcc_assert (TREE_CODE (t) == SSA_NAME || DECL_P (t));
2774 
2775   /* For parameters, get at the points-to set for the actual parm
2776      decl.  */
2777   if (TREE_CODE (t) == SSA_NAME
2778       && SSA_NAME_IS_DEFAULT_DEF (t)
2779       && (TREE_CODE (SSA_NAME_VAR (t)) == PARM_DECL
2780 	  || TREE_CODE (SSA_NAME_VAR (t)) == RESULT_DECL))
2781     {
2782       get_constraint_for_ssa_var (SSA_NAME_VAR (t), results, address_p);
2783       return;
2784     }
2785 
2786   /* For global variables resort to the alias target.  */
2787   if (TREE_CODE (t) == VAR_DECL
2788       && (TREE_STATIC (t) || DECL_EXTERNAL (t)))
2789     {
2790       struct varpool_node *node = varpool_get_node (t);
2791       if (node && node->alias)
2792 	{
2793 	  node = varpool_variable_node (node, NULL);
2794 	  t = node->symbol.decl;
2795 	}
2796     }
2797 
2798   vi = get_vi_for_tree (t);
2799   cexpr.var = vi->id;
2800   cexpr.type = SCALAR;
2801   cexpr.offset = 0;
2802   /* If we determine the result is "anything", and we know this is readonly,
2803      say it points to readonly memory instead.  */
2804   if (cexpr.var == anything_id && TREE_READONLY (t))
2805     {
2806       gcc_unreachable ();
2807       cexpr.type = ADDRESSOF;
2808       cexpr.var = readonly_id;
2809     }
2810 
2811   /* If we are not taking the address of the constraint expr, add all
2812      sub-fiels of the variable as well.  */
2813   if (!address_p
2814       && !vi->is_full_var)
2815     {
2816       for (; vi; vi = vi->next)
2817 	{
2818 	  cexpr.var = vi->id;
2819 	  results->safe_push (cexpr);
2820 	}
2821       return;
2822     }
2823 
2824   results->safe_push (cexpr);
2825 }
2826 
2827 /* Process constraint T, performing various simplifications and then
2828    adding it to our list of overall constraints.  */
2829 
2830 static void
process_constraint(constraint_t t)2831 process_constraint (constraint_t t)
2832 {
2833   struct constraint_expr rhs = t->rhs;
2834   struct constraint_expr lhs = t->lhs;
2835 
2836   gcc_assert (rhs.var < varmap.length ());
2837   gcc_assert (lhs.var < varmap.length ());
2838 
2839   /* If we didn't get any useful constraint from the lhs we get
2840      &ANYTHING as fallback from get_constraint_for.  Deal with
2841      it here by turning it into *ANYTHING.  */
2842   if (lhs.type == ADDRESSOF
2843       && lhs.var == anything_id)
2844     lhs.type = DEREF;
2845 
2846   /* ADDRESSOF on the lhs is invalid.  */
2847   gcc_assert (lhs.type != ADDRESSOF);
2848 
2849   /* We shouldn't add constraints from things that cannot have pointers.
2850      It's not completely trivial to avoid in the callers, so do it here.  */
2851   if (rhs.type != ADDRESSOF
2852       && !get_varinfo (rhs.var)->may_have_pointers)
2853     return;
2854 
2855   /* Likewise adding to the solution of a non-pointer var isn't useful.  */
2856   if (!get_varinfo (lhs.var)->may_have_pointers)
2857     return;
2858 
2859   /* This can happen in our IR with things like n->a = *p */
2860   if (rhs.type == DEREF && lhs.type == DEREF && rhs.var != anything_id)
2861     {
2862       /* Split into tmp = *rhs, *lhs = tmp */
2863       struct constraint_expr tmplhs;
2864       tmplhs = new_scalar_tmp_constraint_exp ("doubledereftmp");
2865       process_constraint (new_constraint (tmplhs, rhs));
2866       process_constraint (new_constraint (lhs, tmplhs));
2867     }
2868   else if (rhs.type == ADDRESSOF && lhs.type == DEREF)
2869     {
2870       /* Split into tmp = &rhs, *lhs = tmp */
2871       struct constraint_expr tmplhs;
2872       tmplhs = new_scalar_tmp_constraint_exp ("derefaddrtmp");
2873       process_constraint (new_constraint (tmplhs, rhs));
2874       process_constraint (new_constraint (lhs, tmplhs));
2875     }
2876   else
2877     {
2878       gcc_assert (rhs.type != ADDRESSOF || rhs.offset == 0);
2879       constraints.safe_push (t);
2880     }
2881 }
2882 
2883 
2884 /* Return the position, in bits, of FIELD_DECL from the beginning of its
2885    structure.  */
2886 
2887 static HOST_WIDE_INT
bitpos_of_field(const tree fdecl)2888 bitpos_of_field (const tree fdecl)
2889 {
2890   if (!host_integerp (DECL_FIELD_OFFSET (fdecl), 0)
2891       || !host_integerp (DECL_FIELD_BIT_OFFSET (fdecl), 0))
2892     return -1;
2893 
2894   return (TREE_INT_CST_LOW (DECL_FIELD_OFFSET (fdecl)) * BITS_PER_UNIT
2895 	  + TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (fdecl)));
2896 }
2897 
2898 
2899 /* Get constraint expressions for offsetting PTR by OFFSET.  Stores the
2900    resulting constraint expressions in *RESULTS.  */
2901 
2902 static void
get_constraint_for_ptr_offset(tree ptr,tree offset,vec<ce_s> * results)2903 get_constraint_for_ptr_offset (tree ptr, tree offset,
2904 			       vec<ce_s> *results)
2905 {
2906   struct constraint_expr c;
2907   unsigned int j, n;
2908   HOST_WIDE_INT rhsoffset;
2909 
2910   /* If we do not do field-sensitive PTA adding offsets to pointers
2911      does not change the points-to solution.  */
2912   if (!use_field_sensitive)
2913     {
2914       get_constraint_for_rhs (ptr, results);
2915       return;
2916     }
2917 
2918   /* If the offset is not a non-negative integer constant that fits
2919      in a HOST_WIDE_INT, we have to fall back to a conservative
2920      solution which includes all sub-fields of all pointed-to
2921      variables of ptr.  */
2922   if (offset == NULL_TREE
2923       || TREE_CODE (offset) != INTEGER_CST)
2924     rhsoffset = UNKNOWN_OFFSET;
2925   else
2926     {
2927       /* Sign-extend the offset.  */
2928       double_int soffset = tree_to_double_int (offset)
2929 			   .sext (TYPE_PRECISION (TREE_TYPE (offset)));
2930       if (!soffset.fits_shwi ())
2931 	rhsoffset = UNKNOWN_OFFSET;
2932       else
2933 	{
2934 	  /* Make sure the bit-offset also fits.  */
2935 	  HOST_WIDE_INT rhsunitoffset = soffset.low;
2936 	  rhsoffset = rhsunitoffset * BITS_PER_UNIT;
2937 	  if (rhsunitoffset != rhsoffset / BITS_PER_UNIT)
2938 	    rhsoffset = UNKNOWN_OFFSET;
2939 	}
2940     }
2941 
2942   get_constraint_for_rhs (ptr, results);
2943   if (rhsoffset == 0)
2944     return;
2945 
2946   /* As we are eventually appending to the solution do not use
2947      vec::iterate here.  */
2948   n = results->length ();
2949   for (j = 0; j < n; j++)
2950     {
2951       varinfo_t curr;
2952       c = (*results)[j];
2953       curr = get_varinfo (c.var);
2954 
2955       if (c.type == ADDRESSOF
2956 	  /* If this varinfo represents a full variable just use it.  */
2957 	  && curr->is_full_var)
2958 	c.offset = 0;
2959       else if (c.type == ADDRESSOF
2960 	       /* If we do not know the offset add all subfields.  */
2961 	       && rhsoffset == UNKNOWN_OFFSET)
2962 	{
2963 	  varinfo_t temp = lookup_vi_for_tree (curr->decl);
2964 	  do
2965 	    {
2966 	      struct constraint_expr c2;
2967 	      c2.var = temp->id;
2968 	      c2.type = ADDRESSOF;
2969 	      c2.offset = 0;
2970 	      if (c2.var != c.var)
2971 		results->safe_push (c2);
2972 	      temp = temp->next;
2973 	    }
2974 	  while (temp);
2975 	}
2976       else if (c.type == ADDRESSOF)
2977 	{
2978 	  varinfo_t temp;
2979 	  unsigned HOST_WIDE_INT offset = curr->offset + rhsoffset;
2980 
2981 	  /* Search the sub-field which overlaps with the
2982 	     pointed-to offset.  If the result is outside of the variable
2983 	     we have to provide a conservative result, as the variable is
2984 	     still reachable from the resulting pointer (even though it
2985 	     technically cannot point to anything).  The last and first
2986 	     sub-fields are such conservative results.
2987 	     ???  If we always had a sub-field for &object + 1 then
2988 	     we could represent this in a more precise way.  */
2989 	  if (rhsoffset < 0
2990 	      && curr->offset < offset)
2991 	    offset = 0;
2992 	  temp = first_or_preceding_vi_for_offset (curr, offset);
2993 
2994 	  /* If the found variable is not exactly at the pointed to
2995 	     result, we have to include the next variable in the
2996 	     solution as well.  Otherwise two increments by offset / 2
2997 	     do not result in the same or a conservative superset
2998 	     solution.  */
2999 	  if (temp->offset != offset
3000 	      && temp->next != NULL)
3001 	    {
3002 	      struct constraint_expr c2;
3003 	      c2.var = temp->next->id;
3004 	      c2.type = ADDRESSOF;
3005 	      c2.offset = 0;
3006 	      results->safe_push (c2);
3007 	    }
3008 	  c.var = temp->id;
3009 	  c.offset = 0;
3010 	}
3011       else
3012 	c.offset = rhsoffset;
3013 
3014       (*results)[j] = c;
3015     }
3016 }
3017 
3018 
3019 /* Given a COMPONENT_REF T, return the constraint_expr vector for it.
3020    If address_p is true the result will be taken its address of.
3021    If lhs_p is true then the constraint expression is assumed to be used
3022    as the lhs.  */
3023 
3024 static void
get_constraint_for_component_ref(tree t,vec<ce_s> * results,bool address_p,bool lhs_p)3025 get_constraint_for_component_ref (tree t, vec<ce_s> *results,
3026 				  bool address_p, bool lhs_p)
3027 {
3028   tree orig_t = t;
3029   HOST_WIDE_INT bitsize = -1;
3030   HOST_WIDE_INT bitmaxsize = -1;
3031   HOST_WIDE_INT bitpos;
3032   tree forzero;
3033 
3034   /* Some people like to do cute things like take the address of
3035      &0->a.b */
3036   forzero = t;
3037   while (handled_component_p (forzero)
3038 	 || INDIRECT_REF_P (forzero)
3039 	 || TREE_CODE (forzero) == MEM_REF)
3040     forzero = TREE_OPERAND (forzero, 0);
3041 
3042   if (CONSTANT_CLASS_P (forzero) && integer_zerop (forzero))
3043     {
3044       struct constraint_expr temp;
3045 
3046       temp.offset = 0;
3047       temp.var = integer_id;
3048       temp.type = SCALAR;
3049       results->safe_push (temp);
3050       return;
3051     }
3052 
3053   /* Handle type-punning through unions.  If we are extracting a pointer
3054      from a union via a possibly type-punning access that pointer
3055      points to anything, similar to a conversion of an integer to
3056      a pointer.  */
3057   if (!lhs_p)
3058     {
3059       tree u;
3060       for (u = t;
3061 	   TREE_CODE (u) == COMPONENT_REF || TREE_CODE (u) == ARRAY_REF;
3062 	   u = TREE_OPERAND (u, 0))
3063 	if (TREE_CODE (u) == COMPONENT_REF
3064 	    && TREE_CODE (TREE_TYPE (TREE_OPERAND (u, 0))) == UNION_TYPE)
3065 	  {
3066 	    struct constraint_expr temp;
3067 
3068 	    temp.offset = 0;
3069 	    temp.var = anything_id;
3070 	    temp.type = ADDRESSOF;
3071 	    results->safe_push (temp);
3072 	    return;
3073 	  }
3074     }
3075 
3076   t = get_ref_base_and_extent (t, &bitpos, &bitsize, &bitmaxsize);
3077 
3078   /* Pretend to take the address of the base, we'll take care of
3079      adding the required subset of sub-fields below.  */
3080   get_constraint_for_1 (t, results, true, lhs_p);
3081   gcc_assert (results->length () == 1);
3082   struct constraint_expr &result = results->last ();
3083 
3084   if (result.type == SCALAR
3085       && get_varinfo (result.var)->is_full_var)
3086     /* For single-field vars do not bother about the offset.  */
3087     result.offset = 0;
3088   else if (result.type == SCALAR)
3089     {
3090       /* In languages like C, you can access one past the end of an
3091 	 array.  You aren't allowed to dereference it, so we can
3092 	 ignore this constraint. When we handle pointer subtraction,
3093 	 we may have to do something cute here.  */
3094 
3095       if ((unsigned HOST_WIDE_INT)bitpos < get_varinfo (result.var)->fullsize
3096 	  && bitmaxsize != 0)
3097 	{
3098 	  /* It's also not true that the constraint will actually start at the
3099 	     right offset, it may start in some padding.  We only care about
3100 	     setting the constraint to the first actual field it touches, so
3101 	     walk to find it.  */
3102 	  struct constraint_expr cexpr = result;
3103 	  varinfo_t curr;
3104 	  results->pop ();
3105 	  cexpr.offset = 0;
3106 	  for (curr = get_varinfo (cexpr.var); curr; curr = curr->next)
3107 	    {
3108 	      if (ranges_overlap_p (curr->offset, curr->size,
3109 				    bitpos, bitmaxsize))
3110 		{
3111 		  cexpr.var = curr->id;
3112 		  results->safe_push (cexpr);
3113 		  if (address_p)
3114 		    break;
3115 		}
3116 	    }
3117 	  /* If we are going to take the address of this field then
3118 	     to be able to compute reachability correctly add at least
3119 	     the last field of the variable.  */
3120 	  if (address_p && results->length () == 0)
3121 	    {
3122 	      curr = get_varinfo (cexpr.var);
3123 	      while (curr->next != NULL)
3124 		curr = curr->next;
3125 	      cexpr.var = curr->id;
3126 	      results->safe_push (cexpr);
3127 	    }
3128 	  else if (results->length () == 0)
3129 	    /* Assert that we found *some* field there. The user couldn't be
3130 	       accessing *only* padding.  */
3131 	    /* Still the user could access one past the end of an array
3132 	       embedded in a struct resulting in accessing *only* padding.  */
3133 	    /* Or accessing only padding via type-punning to a type
3134 	       that has a filed just in padding space.  */
3135 	    {
3136 	      cexpr.type = SCALAR;
3137 	      cexpr.var = anything_id;
3138 	      cexpr.offset = 0;
3139 	      results->safe_push (cexpr);
3140 	    }
3141 	}
3142       else if (bitmaxsize == 0)
3143 	{
3144 	  if (dump_file && (dump_flags & TDF_DETAILS))
3145 	    fprintf (dump_file, "Access to zero-sized part of variable,"
3146 		     "ignoring\n");
3147 	}
3148       else
3149 	if (dump_file && (dump_flags & TDF_DETAILS))
3150 	  fprintf (dump_file, "Access to past the end of variable, ignoring\n");
3151     }
3152   else if (result.type == DEREF)
3153     {
3154       /* If we do not know exactly where the access goes say so.  Note
3155 	 that only for non-structure accesses we know that we access
3156 	 at most one subfiled of any variable.  */
3157       if (bitpos == -1
3158 	  || bitsize != bitmaxsize
3159 	  || AGGREGATE_TYPE_P (TREE_TYPE (orig_t))
3160 	  || result.offset == UNKNOWN_OFFSET)
3161 	result.offset = UNKNOWN_OFFSET;
3162       else
3163 	result.offset += bitpos;
3164     }
3165   else if (result.type == ADDRESSOF)
3166     {
3167       /* We can end up here for component references on a
3168          VIEW_CONVERT_EXPR <>(&foobar).  */
3169       result.type = SCALAR;
3170       result.var = anything_id;
3171       result.offset = 0;
3172     }
3173   else
3174     gcc_unreachable ();
3175 }
3176 
3177 
3178 /* Dereference the constraint expression CONS, and return the result.
3179    DEREF (ADDRESSOF) = SCALAR
3180    DEREF (SCALAR) = DEREF
3181    DEREF (DEREF) = (temp = DEREF1; result = DEREF(temp))
3182    This is needed so that we can handle dereferencing DEREF constraints.  */
3183 
3184 static void
do_deref(vec<ce_s> * constraints)3185 do_deref (vec<ce_s> *constraints)
3186 {
3187   struct constraint_expr *c;
3188   unsigned int i = 0;
3189 
3190   FOR_EACH_VEC_ELT (*constraints, i, c)
3191     {
3192       if (c->type == SCALAR)
3193 	c->type = DEREF;
3194       else if (c->type == ADDRESSOF)
3195 	c->type = SCALAR;
3196       else if (c->type == DEREF)
3197 	{
3198 	  struct constraint_expr tmplhs;
3199 	  tmplhs = new_scalar_tmp_constraint_exp ("dereftmp");
3200 	  process_constraint (new_constraint (tmplhs, *c));
3201 	  c->var = tmplhs.var;
3202 	}
3203       else
3204 	gcc_unreachable ();
3205     }
3206 }
3207 
3208 /* Given a tree T, return the constraint expression for taking the
3209    address of it.  */
3210 
3211 static void
get_constraint_for_address_of(tree t,vec<ce_s> * results)3212 get_constraint_for_address_of (tree t, vec<ce_s> *results)
3213 {
3214   struct constraint_expr *c;
3215   unsigned int i;
3216 
3217   get_constraint_for_1 (t, results, true, true);
3218 
3219   FOR_EACH_VEC_ELT (*results, i, c)
3220     {
3221       if (c->type == DEREF)
3222 	c->type = SCALAR;
3223       else
3224 	c->type = ADDRESSOF;
3225     }
3226 }
3227 
3228 /* Given a tree T, return the constraint expression for it.  */
3229 
3230 static void
get_constraint_for_1(tree t,vec<ce_s> * results,bool address_p,bool lhs_p)3231 get_constraint_for_1 (tree t, vec<ce_s> *results, bool address_p,
3232 		      bool lhs_p)
3233 {
3234   struct constraint_expr temp;
3235 
3236   /* x = integer is all glommed to a single variable, which doesn't
3237      point to anything by itself.  That is, of course, unless it is an
3238      integer constant being treated as a pointer, in which case, we
3239      will return that this is really the addressof anything.  This
3240      happens below, since it will fall into the default case. The only
3241      case we know something about an integer treated like a pointer is
3242      when it is the NULL pointer, and then we just say it points to
3243      NULL.
3244 
3245      Do not do that if -fno-delete-null-pointer-checks though, because
3246      in that case *NULL does not fail, so it _should_ alias *anything.
3247      It is not worth adding a new option or renaming the existing one,
3248      since this case is relatively obscure.  */
3249   if ((TREE_CODE (t) == INTEGER_CST
3250        && integer_zerop (t))
3251       /* The only valid CONSTRUCTORs in gimple with pointer typed
3252 	 elements are zero-initializer.  But in IPA mode we also
3253 	 process global initializers, so verify at least.  */
3254       || (TREE_CODE (t) == CONSTRUCTOR
3255 	  && CONSTRUCTOR_NELTS (t) == 0))
3256     {
3257       if (flag_delete_null_pointer_checks)
3258 	temp.var = nothing_id;
3259       else
3260 	temp.var = nonlocal_id;
3261       temp.type = ADDRESSOF;
3262       temp.offset = 0;
3263       results->safe_push (temp);
3264       return;
3265     }
3266 
3267   /* String constants are read-only.  */
3268   if (TREE_CODE (t) == STRING_CST)
3269     {
3270       temp.var = readonly_id;
3271       temp.type = SCALAR;
3272       temp.offset = 0;
3273       results->safe_push (temp);
3274       return;
3275     }
3276 
3277   switch (TREE_CODE_CLASS (TREE_CODE (t)))
3278     {
3279     case tcc_expression:
3280       {
3281 	switch (TREE_CODE (t))
3282 	  {
3283 	  case ADDR_EXPR:
3284 	    get_constraint_for_address_of (TREE_OPERAND (t, 0), results);
3285 	    return;
3286 	  default:;
3287 	  }
3288 	break;
3289       }
3290     case tcc_reference:
3291       {
3292 	switch (TREE_CODE (t))
3293 	  {
3294 	  case MEM_REF:
3295 	    {
3296 	      struct constraint_expr cs;
3297 	      varinfo_t vi, curr;
3298 	      get_constraint_for_ptr_offset (TREE_OPERAND (t, 0),
3299 					     TREE_OPERAND (t, 1), results);
3300 	      do_deref (results);
3301 
3302 	      /* If we are not taking the address then make sure to process
3303 		 all subvariables we might access.  */
3304 	      if (address_p)
3305 		return;
3306 
3307 	      cs = results->last ();
3308 	      if (cs.type == DEREF
3309 		  && type_can_have_subvars (TREE_TYPE (t)))
3310 		{
3311 		  /* For dereferences this means we have to defer it
3312 		     to solving time.  */
3313 		  results->last ().offset = UNKNOWN_OFFSET;
3314 		  return;
3315 		}
3316 	      if (cs.type != SCALAR)
3317 		return;
3318 
3319 	      vi = get_varinfo (cs.var);
3320 	      curr = vi->next;
3321 	      if (!vi->is_full_var
3322 		  && curr)
3323 		{
3324 		  unsigned HOST_WIDE_INT size;
3325 		  if (host_integerp (TYPE_SIZE (TREE_TYPE (t)), 1))
3326 		    size = TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (t)));
3327 		  else
3328 		    size = -1;
3329 		  for (; curr; curr = curr->next)
3330 		    {
3331 		      if (curr->offset - vi->offset < size)
3332 			{
3333 			  cs.var = curr->id;
3334 			  results->safe_push (cs);
3335 			}
3336 		      else
3337 			break;
3338 		    }
3339 		}
3340 	      return;
3341 	    }
3342 	  case ARRAY_REF:
3343 	  case ARRAY_RANGE_REF:
3344 	  case COMPONENT_REF:
3345 	    get_constraint_for_component_ref (t, results, address_p, lhs_p);
3346 	    return;
3347 	  case VIEW_CONVERT_EXPR:
3348 	    get_constraint_for_1 (TREE_OPERAND (t, 0), results, address_p,
3349 				  lhs_p);
3350 	    return;
3351 	  /* We are missing handling for TARGET_MEM_REF here.  */
3352 	  default:;
3353 	  }
3354 	break;
3355       }
3356     case tcc_exceptional:
3357       {
3358 	switch (TREE_CODE (t))
3359 	  {
3360 	  case SSA_NAME:
3361 	    {
3362 	      get_constraint_for_ssa_var (t, results, address_p);
3363 	      return;
3364 	    }
3365 	  case CONSTRUCTOR:
3366 	    {
3367 	      unsigned int i;
3368 	      tree val;
3369 	      vec<ce_s> tmp = vNULL;
3370 	      FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (t), i, val)
3371 		{
3372 		  struct constraint_expr *rhsp;
3373 		  unsigned j;
3374 		  get_constraint_for_1 (val, &tmp, address_p, lhs_p);
3375 		  FOR_EACH_VEC_ELT (tmp, j, rhsp)
3376 		    results->safe_push (*rhsp);
3377 		  tmp.truncate (0);
3378 		}
3379 	      tmp.release ();
3380 	      /* We do not know whether the constructor was complete,
3381 	         so technically we have to add &NOTHING or &ANYTHING
3382 		 like we do for an empty constructor as well.  */
3383 	      return;
3384 	    }
3385 	  default:;
3386 	  }
3387 	break;
3388       }
3389     case tcc_declaration:
3390       {
3391 	get_constraint_for_ssa_var (t, results, address_p);
3392 	return;
3393       }
3394     case tcc_constant:
3395       {
3396 	/* We cannot refer to automatic variables through constants.  */
3397 	temp.type = ADDRESSOF;
3398 	temp.var = nonlocal_id;
3399 	temp.offset = 0;
3400 	results->safe_push (temp);
3401 	return;
3402       }
3403     default:;
3404     }
3405 
3406   /* The default fallback is a constraint from anything.  */
3407   temp.type = ADDRESSOF;
3408   temp.var = anything_id;
3409   temp.offset = 0;
3410   results->safe_push (temp);
3411 }
3412 
3413 /* Given a gimple tree T, return the constraint expression vector for it.  */
3414 
3415 static void
get_constraint_for(tree t,vec<ce_s> * results)3416 get_constraint_for (tree t, vec<ce_s> *results)
3417 {
3418   gcc_assert (results->length () == 0);
3419 
3420   get_constraint_for_1 (t, results, false, true);
3421 }
3422 
3423 /* Given a gimple tree T, return the constraint expression vector for it
3424    to be used as the rhs of a constraint.  */
3425 
3426 static void
get_constraint_for_rhs(tree t,vec<ce_s> * results)3427 get_constraint_for_rhs (tree t, vec<ce_s> *results)
3428 {
3429   gcc_assert (results->length () == 0);
3430 
3431   get_constraint_for_1 (t, results, false, false);
3432 }
3433 
3434 
3435 /* Efficiently generates constraints from all entries in *RHSC to all
3436    entries in *LHSC.  */
3437 
3438 static void
process_all_all_constraints(vec<ce_s> lhsc,vec<ce_s> rhsc)3439 process_all_all_constraints (vec<ce_s> lhsc,
3440 			     vec<ce_s> rhsc)
3441 {
3442   struct constraint_expr *lhsp, *rhsp;
3443   unsigned i, j;
3444 
3445   if (lhsc.length () <= 1 || rhsc.length () <= 1)
3446     {
3447       FOR_EACH_VEC_ELT (lhsc, i, lhsp)
3448 	FOR_EACH_VEC_ELT (rhsc, j, rhsp)
3449 	  process_constraint (new_constraint (*lhsp, *rhsp));
3450     }
3451   else
3452     {
3453       struct constraint_expr tmp;
3454       tmp = new_scalar_tmp_constraint_exp ("allalltmp");
3455       FOR_EACH_VEC_ELT (rhsc, i, rhsp)
3456 	process_constraint (new_constraint (tmp, *rhsp));
3457       FOR_EACH_VEC_ELT (lhsc, i, lhsp)
3458 	process_constraint (new_constraint (*lhsp, tmp));
3459     }
3460 }
3461 
3462 /* Handle aggregate copies by expanding into copies of the respective
3463    fields of the structures.  */
3464 
3465 static void
do_structure_copy(tree lhsop,tree rhsop)3466 do_structure_copy (tree lhsop, tree rhsop)
3467 {
3468   struct constraint_expr *lhsp, *rhsp;
3469   vec<ce_s> lhsc = vNULL;
3470   vec<ce_s> rhsc = vNULL;
3471   unsigned j;
3472 
3473   get_constraint_for (lhsop, &lhsc);
3474   get_constraint_for_rhs (rhsop, &rhsc);
3475   lhsp = &lhsc[0];
3476   rhsp = &rhsc[0];
3477   if (lhsp->type == DEREF
3478       || (lhsp->type == ADDRESSOF && lhsp->var == anything_id)
3479       || rhsp->type == DEREF)
3480     {
3481       if (lhsp->type == DEREF)
3482 	{
3483 	  gcc_assert (lhsc.length () == 1);
3484 	  lhsp->offset = UNKNOWN_OFFSET;
3485 	}
3486       if (rhsp->type == DEREF)
3487 	{
3488 	  gcc_assert (rhsc.length () == 1);
3489 	  rhsp->offset = UNKNOWN_OFFSET;
3490 	}
3491       process_all_all_constraints (lhsc, rhsc);
3492     }
3493   else if (lhsp->type == SCALAR
3494 	   && (rhsp->type == SCALAR
3495 	       || rhsp->type == ADDRESSOF))
3496     {
3497       HOST_WIDE_INT lhssize, lhsmaxsize, lhsoffset;
3498       HOST_WIDE_INT rhssize, rhsmaxsize, rhsoffset;
3499       unsigned k = 0;
3500       get_ref_base_and_extent (lhsop, &lhsoffset, &lhssize, &lhsmaxsize);
3501       get_ref_base_and_extent (rhsop, &rhsoffset, &rhssize, &rhsmaxsize);
3502       for (j = 0; lhsc.iterate (j, &lhsp);)
3503 	{
3504 	  varinfo_t lhsv, rhsv;
3505 	  rhsp = &rhsc[k];
3506 	  lhsv = get_varinfo (lhsp->var);
3507 	  rhsv = get_varinfo (rhsp->var);
3508 	  if (lhsv->may_have_pointers
3509 	      && (lhsv->is_full_var
3510 		  || rhsv->is_full_var
3511 		  || ranges_overlap_p (lhsv->offset + rhsoffset, lhsv->size,
3512 				       rhsv->offset + lhsoffset, rhsv->size)))
3513 	    process_constraint (new_constraint (*lhsp, *rhsp));
3514 	  if (!rhsv->is_full_var
3515 	      && (lhsv->is_full_var
3516 		  || (lhsv->offset + rhsoffset + lhsv->size
3517 		      > rhsv->offset + lhsoffset + rhsv->size)))
3518 	    {
3519 	      ++k;
3520 	      if (k >= rhsc.length ())
3521 		break;
3522 	    }
3523 	  else
3524 	    ++j;
3525 	}
3526     }
3527   else
3528     gcc_unreachable ();
3529 
3530   lhsc.release ();
3531   rhsc.release ();
3532 }
3533 
3534 /* Create constraints ID = { rhsc }.  */
3535 
3536 static void
make_constraints_to(unsigned id,vec<ce_s> rhsc)3537 make_constraints_to (unsigned id, vec<ce_s> rhsc)
3538 {
3539   struct constraint_expr *c;
3540   struct constraint_expr includes;
3541   unsigned int j;
3542 
3543   includes.var = id;
3544   includes.offset = 0;
3545   includes.type = SCALAR;
3546 
3547   FOR_EACH_VEC_ELT (rhsc, j, c)
3548     process_constraint (new_constraint (includes, *c));
3549 }
3550 
3551 /* Create a constraint ID = OP.  */
3552 
3553 static void
make_constraint_to(unsigned id,tree op)3554 make_constraint_to (unsigned id, tree op)
3555 {
3556   vec<ce_s> rhsc = vNULL;
3557   get_constraint_for_rhs (op, &rhsc);
3558   make_constraints_to (id, rhsc);
3559   rhsc.release ();
3560 }
3561 
3562 /* Create a constraint ID = &FROM.  */
3563 
3564 static void
make_constraint_from(varinfo_t vi,int from)3565 make_constraint_from (varinfo_t vi, int from)
3566 {
3567   struct constraint_expr lhs, rhs;
3568 
3569   lhs.var = vi->id;
3570   lhs.offset = 0;
3571   lhs.type = SCALAR;
3572 
3573   rhs.var = from;
3574   rhs.offset = 0;
3575   rhs.type = ADDRESSOF;
3576   process_constraint (new_constraint (lhs, rhs));
3577 }
3578 
3579 /* Create a constraint ID = FROM.  */
3580 
3581 static void
make_copy_constraint(varinfo_t vi,int from)3582 make_copy_constraint (varinfo_t vi, int from)
3583 {
3584   struct constraint_expr lhs, rhs;
3585 
3586   lhs.var = vi->id;
3587   lhs.offset = 0;
3588   lhs.type = SCALAR;
3589 
3590   rhs.var = from;
3591   rhs.offset = 0;
3592   rhs.type = SCALAR;
3593   process_constraint (new_constraint (lhs, rhs));
3594 }
3595 
3596 /* Make constraints necessary to make OP escape.  */
3597 
3598 static void
make_escape_constraint(tree op)3599 make_escape_constraint (tree op)
3600 {
3601   make_constraint_to (escaped_id, op);
3602 }
3603 
3604 /* Add constraints to that the solution of VI is transitively closed.  */
3605 
3606 static void
make_transitive_closure_constraints(varinfo_t vi)3607 make_transitive_closure_constraints (varinfo_t vi)
3608 {
3609   struct constraint_expr lhs, rhs;
3610 
3611   /* VAR = *VAR;  */
3612   lhs.type = SCALAR;
3613   lhs.var = vi->id;
3614   lhs.offset = 0;
3615   rhs.type = DEREF;
3616   rhs.var = vi->id;
3617   rhs.offset = 0;
3618   process_constraint (new_constraint (lhs, rhs));
3619 
3620   /* VAR = VAR + UNKNOWN;  */
3621   lhs.type = SCALAR;
3622   lhs.var = vi->id;
3623   lhs.offset = 0;
3624   rhs.type = SCALAR;
3625   rhs.var = vi->id;
3626   rhs.offset = UNKNOWN_OFFSET;
3627   process_constraint (new_constraint (lhs, rhs));
3628 }
3629 
3630 /* Temporary storage for fake var decls.  */
3631 struct obstack fake_var_decl_obstack;
3632 
3633 /* Build a fake VAR_DECL acting as referrer to a DECL_UID.  */
3634 
3635 static tree
build_fake_var_decl(tree type)3636 build_fake_var_decl (tree type)
3637 {
3638   tree decl = (tree) XOBNEW (&fake_var_decl_obstack, struct tree_var_decl);
3639   memset (decl, 0, sizeof (struct tree_var_decl));
3640   TREE_SET_CODE (decl, VAR_DECL);
3641   TREE_TYPE (decl) = type;
3642   DECL_UID (decl) = allocate_decl_uid ();
3643   SET_DECL_PT_UID (decl, -1);
3644   layout_decl (decl, 0);
3645   return decl;
3646 }
3647 
3648 /* Create a new artificial heap variable with NAME.
3649    Return the created variable.  */
3650 
3651 static varinfo_t
make_heapvar(const char * name)3652 make_heapvar (const char *name)
3653 {
3654   varinfo_t vi;
3655   tree heapvar;
3656 
3657   heapvar = build_fake_var_decl (ptr_type_node);
3658   DECL_EXTERNAL (heapvar) = 1;
3659 
3660   vi = new_var_info (heapvar, name);
3661   vi->is_artificial_var = true;
3662   vi->is_heap_var = true;
3663   vi->is_unknown_size_var = true;
3664   vi->offset = 0;
3665   vi->fullsize = ~0;
3666   vi->size = ~0;
3667   vi->is_full_var = true;
3668   insert_vi_for_tree (heapvar, vi);
3669 
3670   return vi;
3671 }
3672 
3673 /* Create a new artificial heap variable with NAME and make a
3674    constraint from it to LHS.  Set flags according to a tag used
3675    for tracking restrict pointers.  */
3676 
3677 static varinfo_t
make_constraint_from_restrict(varinfo_t lhs,const char * name)3678 make_constraint_from_restrict (varinfo_t lhs, const char *name)
3679 {
3680   varinfo_t vi = make_heapvar (name);
3681   vi->is_global_var = 1;
3682   vi->may_have_pointers = 1;
3683   make_constraint_from (lhs, vi->id);
3684   return vi;
3685 }
3686 
3687 /* Create a new artificial heap variable with NAME and make a
3688    constraint from it to LHS.  Set flags according to a tag used
3689    for tracking restrict pointers and make the artificial heap
3690    point to global memory.  */
3691 
3692 static varinfo_t
make_constraint_from_global_restrict(varinfo_t lhs,const char * name)3693 make_constraint_from_global_restrict (varinfo_t lhs, const char *name)
3694 {
3695   varinfo_t vi = make_constraint_from_restrict (lhs, name);
3696   make_copy_constraint (vi, nonlocal_id);
3697   return vi;
3698 }
3699 
3700 /* In IPA mode there are varinfos for different aspects of reach
3701    function designator.  One for the points-to set of the return
3702    value, one for the variables that are clobbered by the function,
3703    one for its uses and one for each parameter (including a single
3704    glob for remaining variadic arguments).  */
3705 
3706 enum { fi_clobbers = 1, fi_uses = 2,
3707        fi_static_chain = 3, fi_result = 4, fi_parm_base = 5 };
3708 
3709 /* Get a constraint for the requested part of a function designator FI
3710    when operating in IPA mode.  */
3711 
3712 static struct constraint_expr
get_function_part_constraint(varinfo_t fi,unsigned part)3713 get_function_part_constraint (varinfo_t fi, unsigned part)
3714 {
3715   struct constraint_expr c;
3716 
3717   gcc_assert (in_ipa_mode);
3718 
3719   if (fi->id == anything_id)
3720     {
3721       /* ???  We probably should have a ANYFN special variable.  */
3722       c.var = anything_id;
3723       c.offset = 0;
3724       c.type = SCALAR;
3725     }
3726   else if (TREE_CODE (fi->decl) == FUNCTION_DECL)
3727     {
3728       varinfo_t ai = first_vi_for_offset (fi, part);
3729       if (ai)
3730 	c.var = ai->id;
3731       else
3732 	c.var = anything_id;
3733       c.offset = 0;
3734       c.type = SCALAR;
3735     }
3736   else
3737     {
3738       c.var = fi->id;
3739       c.offset = part;
3740       c.type = DEREF;
3741     }
3742 
3743   return c;
3744 }
3745 
3746 /* For non-IPA mode, generate constraints necessary for a call on the
3747    RHS.  */
3748 
3749 static void
handle_rhs_call(gimple stmt,vec<ce_s> * results)3750 handle_rhs_call (gimple stmt, vec<ce_s> *results)
3751 {
3752   struct constraint_expr rhsc;
3753   unsigned i;
3754   bool returns_uses = false;
3755 
3756   for (i = 0; i < gimple_call_num_args (stmt); ++i)
3757     {
3758       tree arg = gimple_call_arg (stmt, i);
3759       int flags = gimple_call_arg_flags (stmt, i);
3760 
3761       /* If the argument is not used we can ignore it.  */
3762       if (flags & EAF_UNUSED)
3763 	continue;
3764 
3765       /* As we compute ESCAPED context-insensitive we do not gain
3766          any precision with just EAF_NOCLOBBER but not EAF_NOESCAPE
3767 	 set.  The argument would still get clobbered through the
3768 	 escape solution.  */
3769       if ((flags & EAF_NOCLOBBER)
3770 	   && (flags & EAF_NOESCAPE))
3771 	{
3772 	  varinfo_t uses = get_call_use_vi (stmt);
3773 	  if (!(flags & EAF_DIRECT))
3774 	    {
3775 	      varinfo_t tem = new_var_info (NULL_TREE, "callarg");
3776 	      make_constraint_to (tem->id, arg);
3777 	      make_transitive_closure_constraints (tem);
3778 	      make_copy_constraint (uses, tem->id);
3779 	    }
3780 	  else
3781 	    make_constraint_to (uses->id, arg);
3782 	  returns_uses = true;
3783 	}
3784       else if (flags & EAF_NOESCAPE)
3785 	{
3786 	  struct constraint_expr lhs, rhs;
3787 	  varinfo_t uses = get_call_use_vi (stmt);
3788 	  varinfo_t clobbers = get_call_clobber_vi (stmt);
3789 	  varinfo_t tem = new_var_info (NULL_TREE, "callarg");
3790 	  make_constraint_to (tem->id, arg);
3791 	  if (!(flags & EAF_DIRECT))
3792 	    make_transitive_closure_constraints (tem);
3793 	  make_copy_constraint (uses, tem->id);
3794 	  make_copy_constraint (clobbers, tem->id);
3795 	  /* Add *tem = nonlocal, do not add *tem = callused as
3796 	     EAF_NOESCAPE parameters do not escape to other parameters
3797 	     and all other uses appear in NONLOCAL as well.  */
3798 	  lhs.type = DEREF;
3799 	  lhs.var = tem->id;
3800 	  lhs.offset = 0;
3801 	  rhs.type = SCALAR;
3802 	  rhs.var = nonlocal_id;
3803 	  rhs.offset = 0;
3804 	  process_constraint (new_constraint (lhs, rhs));
3805 	  returns_uses = true;
3806 	}
3807       else
3808 	make_escape_constraint (arg);
3809     }
3810 
3811   /* If we added to the calls uses solution make sure we account for
3812      pointers to it to be returned.  */
3813   if (returns_uses)
3814     {
3815       rhsc.var = get_call_use_vi (stmt)->id;
3816       rhsc.offset = 0;
3817       rhsc.type = SCALAR;
3818       results->safe_push (rhsc);
3819     }
3820 
3821   /* The static chain escapes as well.  */
3822   if (gimple_call_chain (stmt))
3823     make_escape_constraint (gimple_call_chain (stmt));
3824 
3825   /* And if we applied NRV the address of the return slot escapes as well.  */
3826   if (gimple_call_return_slot_opt_p (stmt)
3827       && gimple_call_lhs (stmt) != NULL_TREE
3828       && TREE_ADDRESSABLE (TREE_TYPE (gimple_call_lhs (stmt))))
3829     {
3830       vec<ce_s> tmpc = vNULL;
3831       struct constraint_expr lhsc, *c;
3832       get_constraint_for_address_of (gimple_call_lhs (stmt), &tmpc);
3833       lhsc.var = escaped_id;
3834       lhsc.offset = 0;
3835       lhsc.type = SCALAR;
3836       FOR_EACH_VEC_ELT (tmpc, i, c)
3837 	process_constraint (new_constraint (lhsc, *c));
3838       tmpc.release ();
3839     }
3840 
3841   /* Regular functions return nonlocal memory.  */
3842   rhsc.var = nonlocal_id;
3843   rhsc.offset = 0;
3844   rhsc.type = SCALAR;
3845   results->safe_push (rhsc);
3846 }
3847 
3848 /* For non-IPA mode, generate constraints necessary for a call
3849    that returns a pointer and assigns it to LHS.  This simply makes
3850    the LHS point to global and escaped variables.  */
3851 
3852 static void
handle_lhs_call(gimple stmt,tree lhs,int flags,vec<ce_s> rhsc,tree fndecl)3853 handle_lhs_call (gimple stmt, tree lhs, int flags, vec<ce_s> rhsc,
3854 		 tree fndecl)
3855 {
3856   vec<ce_s> lhsc = vNULL;
3857 
3858   get_constraint_for (lhs, &lhsc);
3859   /* If the store is to a global decl make sure to
3860      add proper escape constraints.  */
3861   lhs = get_base_address (lhs);
3862   if (lhs
3863       && DECL_P (lhs)
3864       && is_global_var (lhs))
3865     {
3866       struct constraint_expr tmpc;
3867       tmpc.var = escaped_id;
3868       tmpc.offset = 0;
3869       tmpc.type = SCALAR;
3870       lhsc.safe_push (tmpc);
3871     }
3872 
3873   /* If the call returns an argument unmodified override the rhs
3874      constraints.  */
3875   flags = gimple_call_return_flags (stmt);
3876   if (flags & ERF_RETURNS_ARG
3877       && (flags & ERF_RETURN_ARG_MASK) < gimple_call_num_args (stmt))
3878     {
3879       tree arg;
3880       rhsc.create (0);
3881       arg = gimple_call_arg (stmt, flags & ERF_RETURN_ARG_MASK);
3882       get_constraint_for (arg, &rhsc);
3883       process_all_all_constraints (lhsc, rhsc);
3884       rhsc.release ();
3885     }
3886   else if (flags & ERF_NOALIAS)
3887     {
3888       varinfo_t vi;
3889       struct constraint_expr tmpc;
3890       rhsc.create (0);
3891       vi = make_heapvar ("HEAP");
3892       /* We delay marking allocated storage global until we know if
3893          it escapes.  */
3894       DECL_EXTERNAL (vi->decl) = 0;
3895       vi->is_global_var = 0;
3896       /* If this is not a real malloc call assume the memory was
3897 	 initialized and thus may point to global memory.  All
3898 	 builtin functions with the malloc attribute behave in a sane way.  */
3899       if (!fndecl
3900 	  || DECL_BUILT_IN_CLASS (fndecl) != BUILT_IN_NORMAL)
3901 	make_constraint_from (vi, nonlocal_id);
3902       tmpc.var = vi->id;
3903       tmpc.offset = 0;
3904       tmpc.type = ADDRESSOF;
3905       rhsc.safe_push (tmpc);
3906       process_all_all_constraints (lhsc, rhsc);
3907       rhsc.release ();
3908     }
3909   else
3910     process_all_all_constraints (lhsc, rhsc);
3911 
3912   lhsc.release ();
3913 }
3914 
3915 /* For non-IPA mode, generate constraints necessary for a call of a
3916    const function that returns a pointer in the statement STMT.  */
3917 
3918 static void
handle_const_call(gimple stmt,vec<ce_s> * results)3919 handle_const_call (gimple stmt, vec<ce_s> *results)
3920 {
3921   struct constraint_expr rhsc;
3922   unsigned int k;
3923 
3924   /* Treat nested const functions the same as pure functions as far
3925      as the static chain is concerned.  */
3926   if (gimple_call_chain (stmt))
3927     {
3928       varinfo_t uses = get_call_use_vi (stmt);
3929       make_transitive_closure_constraints (uses);
3930       make_constraint_to (uses->id, gimple_call_chain (stmt));
3931       rhsc.var = uses->id;
3932       rhsc.offset = 0;
3933       rhsc.type = SCALAR;
3934       results->safe_push (rhsc);
3935     }
3936 
3937   /* May return arguments.  */
3938   for (k = 0; k < gimple_call_num_args (stmt); ++k)
3939     {
3940       tree arg = gimple_call_arg (stmt, k);
3941       vec<ce_s> argc = vNULL;
3942       unsigned i;
3943       struct constraint_expr *argp;
3944       get_constraint_for_rhs (arg, &argc);
3945       FOR_EACH_VEC_ELT (argc, i, argp)
3946 	results->safe_push (*argp);
3947       argc.release ();
3948     }
3949 
3950   /* May return addresses of globals.  */
3951   rhsc.var = nonlocal_id;
3952   rhsc.offset = 0;
3953   rhsc.type = ADDRESSOF;
3954   results->safe_push (rhsc);
3955 }
3956 
3957 /* For non-IPA mode, generate constraints necessary for a call to a
3958    pure function in statement STMT.  */
3959 
3960 static void
handle_pure_call(gimple stmt,vec<ce_s> * results)3961 handle_pure_call (gimple stmt, vec<ce_s> *results)
3962 {
3963   struct constraint_expr rhsc;
3964   unsigned i;
3965   varinfo_t uses = NULL;
3966 
3967   /* Memory reached from pointer arguments is call-used.  */
3968   for (i = 0; i < gimple_call_num_args (stmt); ++i)
3969     {
3970       tree arg = gimple_call_arg (stmt, i);
3971       if (!uses)
3972 	{
3973 	  uses = get_call_use_vi (stmt);
3974 	  make_transitive_closure_constraints (uses);
3975 	}
3976       make_constraint_to (uses->id, arg);
3977     }
3978 
3979   /* The static chain is used as well.  */
3980   if (gimple_call_chain (stmt))
3981     {
3982       if (!uses)
3983 	{
3984 	  uses = get_call_use_vi (stmt);
3985 	  make_transitive_closure_constraints (uses);
3986 	}
3987       make_constraint_to (uses->id, gimple_call_chain (stmt));
3988     }
3989 
3990   /* Pure functions may return call-used and nonlocal memory.  */
3991   if (uses)
3992     {
3993       rhsc.var = uses->id;
3994       rhsc.offset = 0;
3995       rhsc.type = SCALAR;
3996       results->safe_push (rhsc);
3997     }
3998   rhsc.var = nonlocal_id;
3999   rhsc.offset = 0;
4000   rhsc.type = SCALAR;
4001   results->safe_push (rhsc);
4002 }
4003 
4004 
4005 /* Return the varinfo for the callee of CALL.  */
4006 
4007 static varinfo_t
get_fi_for_callee(gimple call)4008 get_fi_for_callee (gimple call)
4009 {
4010   tree decl, fn = gimple_call_fn (call);
4011 
4012   if (fn && TREE_CODE (fn) == OBJ_TYPE_REF)
4013     fn = OBJ_TYPE_REF_EXPR (fn);
4014 
4015   /* If we can directly resolve the function being called, do so.
4016      Otherwise, it must be some sort of indirect expression that
4017      we should still be able to handle.  */
4018   decl = gimple_call_addr_fndecl (fn);
4019   if (decl)
4020     return get_vi_for_tree (decl);
4021 
4022   /* If the function is anything other than a SSA name pointer we have no
4023      clue and should be getting ANYFN (well, ANYTHING for now).  */
4024   if (!fn || TREE_CODE (fn) != SSA_NAME)
4025     return get_varinfo (anything_id);
4026 
4027   if (SSA_NAME_IS_DEFAULT_DEF (fn)
4028       && (TREE_CODE (SSA_NAME_VAR (fn)) == PARM_DECL
4029 	  || TREE_CODE (SSA_NAME_VAR (fn)) == RESULT_DECL))
4030     fn = SSA_NAME_VAR (fn);
4031 
4032   return get_vi_for_tree (fn);
4033 }
4034 
4035 /* Create constraints for the builtin call T.  Return true if the call
4036    was handled, otherwise false.  */
4037 
4038 static bool
find_func_aliases_for_builtin_call(gimple t)4039 find_func_aliases_for_builtin_call (gimple t)
4040 {
4041   tree fndecl = gimple_call_fndecl (t);
4042   vec<ce_s> lhsc = vNULL;
4043   vec<ce_s> rhsc = vNULL;
4044   varinfo_t fi;
4045 
4046   if (gimple_call_builtin_p (t, BUILT_IN_NORMAL))
4047     /* ???  All builtins that are handled here need to be handled
4048        in the alias-oracle query functions explicitly!  */
4049     switch (DECL_FUNCTION_CODE (fndecl))
4050       {
4051       /* All the following functions return a pointer to the same object
4052 	 as their first argument points to.  The functions do not add
4053 	 to the ESCAPED solution.  The functions make the first argument
4054 	 pointed to memory point to what the second argument pointed to
4055 	 memory points to.  */
4056       case BUILT_IN_STRCPY:
4057       case BUILT_IN_STRNCPY:
4058       case BUILT_IN_BCOPY:
4059       case BUILT_IN_MEMCPY:
4060       case BUILT_IN_MEMMOVE:
4061       case BUILT_IN_MEMPCPY:
4062       case BUILT_IN_STPCPY:
4063       case BUILT_IN_STPNCPY:
4064       case BUILT_IN_STRCAT:
4065       case BUILT_IN_STRNCAT:
4066       case BUILT_IN_STRCPY_CHK:
4067       case BUILT_IN_STRNCPY_CHK:
4068       case BUILT_IN_MEMCPY_CHK:
4069       case BUILT_IN_MEMMOVE_CHK:
4070       case BUILT_IN_MEMPCPY_CHK:
4071       case BUILT_IN_STPCPY_CHK:
4072       case BUILT_IN_STPNCPY_CHK:
4073       case BUILT_IN_STRCAT_CHK:
4074       case BUILT_IN_STRNCAT_CHK:
4075       case BUILT_IN_TM_MEMCPY:
4076       case BUILT_IN_TM_MEMMOVE:
4077 	{
4078 	  tree res = gimple_call_lhs (t);
4079 	  tree dest = gimple_call_arg (t, (DECL_FUNCTION_CODE (fndecl)
4080 					   == BUILT_IN_BCOPY ? 1 : 0));
4081 	  tree src = gimple_call_arg (t, (DECL_FUNCTION_CODE (fndecl)
4082 					  == BUILT_IN_BCOPY ? 0 : 1));
4083 	  if (res != NULL_TREE)
4084 	    {
4085 	      get_constraint_for (res, &lhsc);
4086 	      if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_MEMPCPY
4087 		  || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STPCPY
4088 		  || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STPNCPY
4089 		  || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_MEMPCPY_CHK
4090 		  || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STPCPY_CHK
4091 		  || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STPNCPY_CHK)
4092 		get_constraint_for_ptr_offset (dest, NULL_TREE, &rhsc);
4093 	      else
4094 		get_constraint_for (dest, &rhsc);
4095 	      process_all_all_constraints (lhsc, rhsc);
4096 	      lhsc.release ();
4097 	      rhsc.release ();
4098 	    }
4099 	  get_constraint_for_ptr_offset (dest, NULL_TREE, &lhsc);
4100 	  get_constraint_for_ptr_offset (src, NULL_TREE, &rhsc);
4101 	  do_deref (&lhsc);
4102 	  do_deref (&rhsc);
4103 	  process_all_all_constraints (lhsc, rhsc);
4104 	  lhsc.release ();
4105 	  rhsc.release ();
4106 	  return true;
4107 	}
4108       case BUILT_IN_MEMSET:
4109       case BUILT_IN_MEMSET_CHK:
4110       case BUILT_IN_TM_MEMSET:
4111 	{
4112 	  tree res = gimple_call_lhs (t);
4113 	  tree dest = gimple_call_arg (t, 0);
4114 	  unsigned i;
4115 	  ce_s *lhsp;
4116 	  struct constraint_expr ac;
4117 	  if (res != NULL_TREE)
4118 	    {
4119 	      get_constraint_for (res, &lhsc);
4120 	      get_constraint_for (dest, &rhsc);
4121 	      process_all_all_constraints (lhsc, rhsc);
4122 	      lhsc.release ();
4123 	      rhsc.release ();
4124 	    }
4125 	  get_constraint_for_ptr_offset (dest, NULL_TREE, &lhsc);
4126 	  do_deref (&lhsc);
4127 	  if (flag_delete_null_pointer_checks
4128 	      && integer_zerop (gimple_call_arg (t, 1)))
4129 	    {
4130 	      ac.type = ADDRESSOF;
4131 	      ac.var = nothing_id;
4132 	    }
4133 	  else
4134 	    {
4135 	      ac.type = SCALAR;
4136 	      ac.var = integer_id;
4137 	    }
4138 	  ac.offset = 0;
4139 	  FOR_EACH_VEC_ELT (lhsc, i, lhsp)
4140 	      process_constraint (new_constraint (*lhsp, ac));
4141 	  lhsc.release ();
4142 	  return true;
4143 	}
4144       case BUILT_IN_ASSUME_ALIGNED:
4145 	{
4146 	  tree res = gimple_call_lhs (t);
4147 	  tree dest = gimple_call_arg (t, 0);
4148 	  if (res != NULL_TREE)
4149 	    {
4150 	      get_constraint_for (res, &lhsc);
4151 	      get_constraint_for (dest, &rhsc);
4152 	      process_all_all_constraints (lhsc, rhsc);
4153 	      lhsc.release ();
4154 	      rhsc.release ();
4155 	    }
4156 	  return true;
4157 	}
4158       /* All the following functions do not return pointers, do not
4159 	 modify the points-to sets of memory reachable from their
4160 	 arguments and do not add to the ESCAPED solution.  */
4161       case BUILT_IN_SINCOS:
4162       case BUILT_IN_SINCOSF:
4163       case BUILT_IN_SINCOSL:
4164       case BUILT_IN_FREXP:
4165       case BUILT_IN_FREXPF:
4166       case BUILT_IN_FREXPL:
4167       case BUILT_IN_GAMMA_R:
4168       case BUILT_IN_GAMMAF_R:
4169       case BUILT_IN_GAMMAL_R:
4170       case BUILT_IN_LGAMMA_R:
4171       case BUILT_IN_LGAMMAF_R:
4172       case BUILT_IN_LGAMMAL_R:
4173       case BUILT_IN_MODF:
4174       case BUILT_IN_MODFF:
4175       case BUILT_IN_MODFL:
4176       case BUILT_IN_REMQUO:
4177       case BUILT_IN_REMQUOF:
4178       case BUILT_IN_REMQUOL:
4179       case BUILT_IN_FREE:
4180 	return true;
4181       case BUILT_IN_STRDUP:
4182       case BUILT_IN_STRNDUP:
4183 	if (gimple_call_lhs (t))
4184 	  {
4185 	    handle_lhs_call (t, gimple_call_lhs (t), gimple_call_flags (t),
4186 			     vNULL, fndecl);
4187 	    get_constraint_for_ptr_offset (gimple_call_lhs (t),
4188 					   NULL_TREE, &lhsc);
4189 	    get_constraint_for_ptr_offset (gimple_call_arg (t, 0),
4190 					   NULL_TREE, &rhsc);
4191 	    do_deref (&lhsc);
4192 	    do_deref (&rhsc);
4193 	    process_all_all_constraints (lhsc, rhsc);
4194 	    lhsc.release ();
4195 	    rhsc.release ();
4196 	    return true;
4197 	  }
4198 	break;
4199       /* Trampolines are special - they set up passing the static
4200 	 frame.  */
4201       case BUILT_IN_INIT_TRAMPOLINE:
4202 	{
4203 	  tree tramp = gimple_call_arg (t, 0);
4204 	  tree nfunc = gimple_call_arg (t, 1);
4205 	  tree frame = gimple_call_arg (t, 2);
4206 	  unsigned i;
4207 	  struct constraint_expr lhs, *rhsp;
4208 	  if (in_ipa_mode)
4209 	    {
4210 	      varinfo_t nfi = NULL;
4211 	      gcc_assert (TREE_CODE (nfunc) == ADDR_EXPR);
4212 	      nfi = lookup_vi_for_tree (TREE_OPERAND (nfunc, 0));
4213 	      if (nfi)
4214 		{
4215 		  lhs = get_function_part_constraint (nfi, fi_static_chain);
4216 		  get_constraint_for (frame, &rhsc);
4217 		  FOR_EACH_VEC_ELT (rhsc, i, rhsp)
4218 		      process_constraint (new_constraint (lhs, *rhsp));
4219 		  rhsc.release ();
4220 
4221 		  /* Make the frame point to the function for
4222 		     the trampoline adjustment call.  */
4223 		  get_constraint_for (tramp, &lhsc);
4224 		  do_deref (&lhsc);
4225 		  get_constraint_for (nfunc, &rhsc);
4226 		  process_all_all_constraints (lhsc, rhsc);
4227 		  rhsc.release ();
4228 		  lhsc.release ();
4229 
4230 		  return true;
4231 		}
4232 	    }
4233 	  /* Else fallthru to generic handling which will let
4234 	     the frame escape.  */
4235 	  break;
4236 	}
4237       case BUILT_IN_ADJUST_TRAMPOLINE:
4238 	{
4239 	  tree tramp = gimple_call_arg (t, 0);
4240 	  tree res = gimple_call_lhs (t);
4241 	  if (in_ipa_mode && res)
4242 	    {
4243 	      get_constraint_for (res, &lhsc);
4244 	      get_constraint_for (tramp, &rhsc);
4245 	      do_deref (&rhsc);
4246 	      process_all_all_constraints (lhsc, rhsc);
4247 	      rhsc.release ();
4248 	      lhsc.release ();
4249 	    }
4250 	  return true;
4251 	}
4252       CASE_BUILT_IN_TM_STORE (1):
4253       CASE_BUILT_IN_TM_STORE (2):
4254       CASE_BUILT_IN_TM_STORE (4):
4255       CASE_BUILT_IN_TM_STORE (8):
4256       CASE_BUILT_IN_TM_STORE (FLOAT):
4257       CASE_BUILT_IN_TM_STORE (DOUBLE):
4258       CASE_BUILT_IN_TM_STORE (LDOUBLE):
4259       CASE_BUILT_IN_TM_STORE (M64):
4260       CASE_BUILT_IN_TM_STORE (M128):
4261       CASE_BUILT_IN_TM_STORE (M256):
4262 	{
4263 	  tree addr = gimple_call_arg (t, 0);
4264 	  tree src = gimple_call_arg (t, 1);
4265 
4266 	  get_constraint_for (addr, &lhsc);
4267 	  do_deref (&lhsc);
4268 	  get_constraint_for (src, &rhsc);
4269 	  process_all_all_constraints (lhsc, rhsc);
4270 	  lhsc.release ();
4271 	  rhsc.release ();
4272 	  return true;
4273 	}
4274       CASE_BUILT_IN_TM_LOAD (1):
4275       CASE_BUILT_IN_TM_LOAD (2):
4276       CASE_BUILT_IN_TM_LOAD (4):
4277       CASE_BUILT_IN_TM_LOAD (8):
4278       CASE_BUILT_IN_TM_LOAD (FLOAT):
4279       CASE_BUILT_IN_TM_LOAD (DOUBLE):
4280       CASE_BUILT_IN_TM_LOAD (LDOUBLE):
4281       CASE_BUILT_IN_TM_LOAD (M64):
4282       CASE_BUILT_IN_TM_LOAD (M128):
4283       CASE_BUILT_IN_TM_LOAD (M256):
4284 	{
4285 	  tree dest = gimple_call_lhs (t);
4286 	  tree addr = gimple_call_arg (t, 0);
4287 
4288 	  get_constraint_for (dest, &lhsc);
4289 	  get_constraint_for (addr, &rhsc);
4290 	  do_deref (&rhsc);
4291 	  process_all_all_constraints (lhsc, rhsc);
4292 	  lhsc.release ();
4293 	  rhsc.release ();
4294 	  return true;
4295 	}
4296       /* Variadic argument handling needs to be handled in IPA
4297 	 mode as well.  */
4298       case BUILT_IN_VA_START:
4299 	{
4300 	  tree valist = gimple_call_arg (t, 0);
4301 	  struct constraint_expr rhs, *lhsp;
4302 	  unsigned i;
4303 	  get_constraint_for (valist, &lhsc);
4304 	  do_deref (&lhsc);
4305 	  /* The va_list gets access to pointers in variadic
4306 	     arguments.  Which we know in the case of IPA analysis
4307 	     and otherwise are just all nonlocal variables.  */
4308 	  if (in_ipa_mode)
4309 	    {
4310 	      fi = lookup_vi_for_tree (cfun->decl);
4311 	      rhs = get_function_part_constraint (fi, ~0);
4312 	      rhs.type = ADDRESSOF;
4313 	    }
4314 	  else
4315 	    {
4316 	      rhs.var = nonlocal_id;
4317 	      rhs.type = ADDRESSOF;
4318 	      rhs.offset = 0;
4319 	    }
4320 	  FOR_EACH_VEC_ELT (lhsc, i, lhsp)
4321 	    process_constraint (new_constraint (*lhsp, rhs));
4322 	  lhsc.release ();
4323 	  /* va_list is clobbered.  */
4324 	  make_constraint_to (get_call_clobber_vi (t)->id, valist);
4325 	  return true;
4326 	}
4327       /* va_end doesn't have any effect that matters.  */
4328       case BUILT_IN_VA_END:
4329 	return true;
4330       /* Alternate return.  Simply give up for now.  */
4331       case BUILT_IN_RETURN:
4332 	{
4333 	  fi = NULL;
4334 	  if (!in_ipa_mode
4335 	      || !(fi = get_vi_for_tree (cfun->decl)))
4336 	    make_constraint_from (get_varinfo (escaped_id), anything_id);
4337 	  else if (in_ipa_mode
4338 		   && fi != NULL)
4339 	    {
4340 	      struct constraint_expr lhs, rhs;
4341 	      lhs = get_function_part_constraint (fi, fi_result);
4342 	      rhs.var = anything_id;
4343 	      rhs.offset = 0;
4344 	      rhs.type = SCALAR;
4345 	      process_constraint (new_constraint (lhs, rhs));
4346 	    }
4347 	  return true;
4348 	}
4349       /* printf-style functions may have hooks to set pointers to
4350 	 point to somewhere into the generated string.  Leave them
4351 	 for a later excercise...  */
4352       default:
4353 	/* Fallthru to general call handling.  */;
4354       }
4355 
4356   return false;
4357 }
4358 
4359 /* Create constraints for the call T.  */
4360 
4361 static void
find_func_aliases_for_call(gimple t)4362 find_func_aliases_for_call (gimple t)
4363 {
4364   tree fndecl = gimple_call_fndecl (t);
4365   vec<ce_s> lhsc = vNULL;
4366   vec<ce_s> rhsc = vNULL;
4367   varinfo_t fi;
4368 
4369   if (fndecl != NULL_TREE
4370       && DECL_BUILT_IN (fndecl)
4371       && find_func_aliases_for_builtin_call (t))
4372     return;
4373 
4374   fi = get_fi_for_callee (t);
4375   if (!in_ipa_mode
4376       || (fndecl && !fi->is_fn_info))
4377     {
4378       vec<ce_s> rhsc = vNULL;
4379       int flags = gimple_call_flags (t);
4380 
4381       /* Const functions can return their arguments and addresses
4382 	 of global memory but not of escaped memory.  */
4383       if (flags & (ECF_CONST|ECF_NOVOPS))
4384 	{
4385 	  if (gimple_call_lhs (t))
4386 	    handle_const_call (t, &rhsc);
4387 	}
4388       /* Pure functions can return addresses in and of memory
4389 	 reachable from their arguments, but they are not an escape
4390 	 point for reachable memory of their arguments.  */
4391       else if (flags & (ECF_PURE|ECF_LOOPING_CONST_OR_PURE))
4392 	handle_pure_call (t, &rhsc);
4393       else
4394 	handle_rhs_call (t, &rhsc);
4395       if (gimple_call_lhs (t))
4396 	handle_lhs_call (t, gimple_call_lhs (t), flags, rhsc, fndecl);
4397       rhsc.release ();
4398     }
4399   else
4400     {
4401       tree lhsop;
4402       unsigned j;
4403 
4404       /* Assign all the passed arguments to the appropriate incoming
4405 	 parameters of the function.  */
4406       for (j = 0; j < gimple_call_num_args (t); j++)
4407 	{
4408 	  struct constraint_expr lhs ;
4409 	  struct constraint_expr *rhsp;
4410 	  tree arg = gimple_call_arg (t, j);
4411 
4412 	  get_constraint_for_rhs (arg, &rhsc);
4413 	  lhs = get_function_part_constraint (fi, fi_parm_base + j);
4414 	  while (rhsc.length () != 0)
4415 	    {
4416 	      rhsp = &rhsc.last ();
4417 	      process_constraint (new_constraint (lhs, *rhsp));
4418 	      rhsc.pop ();
4419 	    }
4420 	}
4421 
4422       /* If we are returning a value, assign it to the result.  */
4423       lhsop = gimple_call_lhs (t);
4424       if (lhsop)
4425 	{
4426 	  struct constraint_expr rhs;
4427 	  struct constraint_expr *lhsp;
4428 
4429 	  get_constraint_for (lhsop, &lhsc);
4430 	  rhs = get_function_part_constraint (fi, fi_result);
4431 	  if (fndecl
4432 	      && DECL_RESULT (fndecl)
4433 	      && DECL_BY_REFERENCE (DECL_RESULT (fndecl)))
4434 	    {
4435 	      vec<ce_s> tem = vNULL;
4436 	      tem.safe_push (rhs);
4437 	      do_deref (&tem);
4438 	      rhs = tem[0];
4439 	      tem.release ();
4440 	    }
4441 	  FOR_EACH_VEC_ELT (lhsc, j, lhsp)
4442 	    process_constraint (new_constraint (*lhsp, rhs));
4443 	}
4444 
4445       /* If we pass the result decl by reference, honor that.  */
4446       if (lhsop
4447 	  && fndecl
4448 	  && DECL_RESULT (fndecl)
4449 	  && DECL_BY_REFERENCE (DECL_RESULT (fndecl)))
4450 	{
4451 	  struct constraint_expr lhs;
4452 	  struct constraint_expr *rhsp;
4453 
4454 	  get_constraint_for_address_of (lhsop, &rhsc);
4455 	  lhs = get_function_part_constraint (fi, fi_result);
4456 	  FOR_EACH_VEC_ELT (rhsc, j, rhsp)
4457 	    process_constraint (new_constraint (lhs, *rhsp));
4458 	  rhsc.release ();
4459 	}
4460 
4461       /* If we use a static chain, pass it along.  */
4462       if (gimple_call_chain (t))
4463 	{
4464 	  struct constraint_expr lhs;
4465 	  struct constraint_expr *rhsp;
4466 
4467 	  get_constraint_for (gimple_call_chain (t), &rhsc);
4468 	  lhs = get_function_part_constraint (fi, fi_static_chain);
4469 	  FOR_EACH_VEC_ELT (rhsc, j, rhsp)
4470 	    process_constraint (new_constraint (lhs, *rhsp));
4471 	}
4472     }
4473 }
4474 
4475 /* Walk statement T setting up aliasing constraints according to the
4476    references found in T.  This function is the main part of the
4477    constraint builder.  AI points to auxiliary alias information used
4478    when building alias sets and computing alias grouping heuristics.  */
4479 
4480 static void
find_func_aliases(gimple origt)4481 find_func_aliases (gimple origt)
4482 {
4483   gimple t = origt;
4484   vec<ce_s> lhsc = vNULL;
4485   vec<ce_s> rhsc = vNULL;
4486   struct constraint_expr *c;
4487   varinfo_t fi;
4488 
4489   /* Now build constraints expressions.  */
4490   if (gimple_code (t) == GIMPLE_PHI)
4491     {
4492       size_t i;
4493       unsigned int j;
4494 
4495       /* For a phi node, assign all the arguments to
4496 	 the result.  */
4497       get_constraint_for (gimple_phi_result (t), &lhsc);
4498       for (i = 0; i < gimple_phi_num_args (t); i++)
4499 	{
4500 	  tree strippedrhs = PHI_ARG_DEF (t, i);
4501 
4502 	  STRIP_NOPS (strippedrhs);
4503 	  get_constraint_for_rhs (gimple_phi_arg_def (t, i), &rhsc);
4504 
4505 	  FOR_EACH_VEC_ELT (lhsc, j, c)
4506 	    {
4507 	      struct constraint_expr *c2;
4508 	      while (rhsc.length () > 0)
4509 		{
4510 		  c2 = &rhsc.last ();
4511 		  process_constraint (new_constraint (*c, *c2));
4512 		  rhsc.pop ();
4513 		}
4514 	    }
4515 	}
4516     }
4517   /* In IPA mode, we need to generate constraints to pass call
4518      arguments through their calls.   There are two cases,
4519      either a GIMPLE_CALL returning a value, or just a plain
4520      GIMPLE_CALL when we are not.
4521 
4522      In non-ipa mode, we need to generate constraints for each
4523      pointer passed by address.  */
4524   else if (is_gimple_call (t))
4525     find_func_aliases_for_call (t);
4526 
4527   /* Otherwise, just a regular assignment statement.  Only care about
4528      operations with pointer result, others are dealt with as escape
4529      points if they have pointer operands.  */
4530   else if (is_gimple_assign (t))
4531     {
4532       /* Otherwise, just a regular assignment statement.  */
4533       tree lhsop = gimple_assign_lhs (t);
4534       tree rhsop = (gimple_num_ops (t) == 2) ? gimple_assign_rhs1 (t) : NULL;
4535 
4536       if (rhsop && TREE_CLOBBER_P (rhsop))
4537 	/* Ignore clobbers, they don't actually store anything into
4538 	   the LHS.  */
4539 	;
4540       else if (rhsop && AGGREGATE_TYPE_P (TREE_TYPE (lhsop)))
4541 	do_structure_copy (lhsop, rhsop);
4542       else
4543 	{
4544 	  enum tree_code code = gimple_assign_rhs_code (t);
4545 
4546 	  get_constraint_for (lhsop, &lhsc);
4547 
4548 	  if (code == POINTER_PLUS_EXPR)
4549 	    get_constraint_for_ptr_offset (gimple_assign_rhs1 (t),
4550 					   gimple_assign_rhs2 (t), &rhsc);
4551 	  else if (code == BIT_AND_EXPR
4552 		   && TREE_CODE (gimple_assign_rhs2 (t)) == INTEGER_CST)
4553 	    {
4554 	      /* Aligning a pointer via a BIT_AND_EXPR is offsetting
4555 		 the pointer.  Handle it by offsetting it by UNKNOWN.  */
4556 	      get_constraint_for_ptr_offset (gimple_assign_rhs1 (t),
4557 					     NULL_TREE, &rhsc);
4558 	    }
4559 	  else if ((CONVERT_EXPR_CODE_P (code)
4560 		    && !(POINTER_TYPE_P (gimple_expr_type (t))
4561 			 && !POINTER_TYPE_P (TREE_TYPE (rhsop))))
4562 		   || gimple_assign_single_p (t))
4563 	    get_constraint_for_rhs (rhsop, &rhsc);
4564 	  else if (code == COND_EXPR)
4565 	    {
4566 	      /* The result is a merge of both COND_EXPR arms.  */
4567 	      vec<ce_s> tmp = vNULL;
4568 	      struct constraint_expr *rhsp;
4569 	      unsigned i;
4570 	      get_constraint_for_rhs (gimple_assign_rhs2 (t), &rhsc);
4571 	      get_constraint_for_rhs (gimple_assign_rhs3 (t), &tmp);
4572 	      FOR_EACH_VEC_ELT (tmp, i, rhsp)
4573 		rhsc.safe_push (*rhsp);
4574 	      tmp.release ();
4575 	    }
4576 	  else if (truth_value_p (code))
4577 	    /* Truth value results are not pointer (parts).  Or at least
4578 	       very very unreasonable obfuscation of a part.  */
4579 	    ;
4580 	  else
4581 	    {
4582 	      /* All other operations are merges.  */
4583 	      vec<ce_s> tmp = vNULL;
4584 	      struct constraint_expr *rhsp;
4585 	      unsigned i, j;
4586 	      get_constraint_for_rhs (gimple_assign_rhs1 (t), &rhsc);
4587 	      for (i = 2; i < gimple_num_ops (t); ++i)
4588 		{
4589 		  get_constraint_for_rhs (gimple_op (t, i), &tmp);
4590 		  FOR_EACH_VEC_ELT (tmp, j, rhsp)
4591 		    rhsc.safe_push (*rhsp);
4592 		  tmp.truncate (0);
4593 		}
4594 	      tmp.release ();
4595 	    }
4596 	  process_all_all_constraints (lhsc, rhsc);
4597 	}
4598       /* If there is a store to a global variable the rhs escapes.  */
4599       if ((lhsop = get_base_address (lhsop)) != NULL_TREE
4600 	  && DECL_P (lhsop)
4601 	  && is_global_var (lhsop)
4602 	  && (!in_ipa_mode
4603 	      || DECL_EXTERNAL (lhsop) || TREE_PUBLIC (lhsop)))
4604 	make_escape_constraint (rhsop);
4605     }
4606   /* Handle escapes through return.  */
4607   else if (gimple_code (t) == GIMPLE_RETURN
4608 	   && gimple_return_retval (t) != NULL_TREE)
4609     {
4610       fi = NULL;
4611       if (!in_ipa_mode
4612 	  || !(fi = get_vi_for_tree (cfun->decl)))
4613 	make_escape_constraint (gimple_return_retval (t));
4614       else if (in_ipa_mode
4615 	       && fi != NULL)
4616 	{
4617 	  struct constraint_expr lhs ;
4618 	  struct constraint_expr *rhsp;
4619 	  unsigned i;
4620 
4621 	  lhs = get_function_part_constraint (fi, fi_result);
4622 	  get_constraint_for_rhs (gimple_return_retval (t), &rhsc);
4623 	  FOR_EACH_VEC_ELT (rhsc, i, rhsp)
4624 	    process_constraint (new_constraint (lhs, *rhsp));
4625 	}
4626     }
4627   /* Handle asms conservatively by adding escape constraints to everything.  */
4628   else if (gimple_code (t) == GIMPLE_ASM)
4629     {
4630       unsigned i, noutputs;
4631       const char **oconstraints;
4632       const char *constraint;
4633       bool allows_mem, allows_reg, is_inout;
4634 
4635       noutputs = gimple_asm_noutputs (t);
4636       oconstraints = XALLOCAVEC (const char *, noutputs);
4637 
4638       for (i = 0; i < noutputs; ++i)
4639 	{
4640 	  tree link = gimple_asm_output_op (t, i);
4641 	  tree op = TREE_VALUE (link);
4642 
4643 	  constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
4644 	  oconstraints[i] = constraint;
4645 	  parse_output_constraint (&constraint, i, 0, 0, &allows_mem,
4646 				   &allows_reg, &is_inout);
4647 
4648 	  /* A memory constraint makes the address of the operand escape.  */
4649 	  if (!allows_reg && allows_mem)
4650 	    make_escape_constraint (build_fold_addr_expr (op));
4651 
4652 	  /* The asm may read global memory, so outputs may point to
4653 	     any global memory.  */
4654 	  if (op)
4655 	    {
4656 	      vec<ce_s> lhsc = vNULL;
4657 	      struct constraint_expr rhsc, *lhsp;
4658 	      unsigned j;
4659 	      get_constraint_for (op, &lhsc);
4660 	      rhsc.var = nonlocal_id;
4661 	      rhsc.offset = 0;
4662 	      rhsc.type = SCALAR;
4663 	      FOR_EACH_VEC_ELT (lhsc, j, lhsp)
4664 		process_constraint (new_constraint (*lhsp, rhsc));
4665 	      lhsc.release ();
4666 	    }
4667 	}
4668       for (i = 0; i < gimple_asm_ninputs (t); ++i)
4669 	{
4670 	  tree link = gimple_asm_input_op (t, i);
4671 	  tree op = TREE_VALUE (link);
4672 
4673 	  constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
4674 
4675 	  parse_input_constraint (&constraint, 0, 0, noutputs, 0, oconstraints,
4676 				  &allows_mem, &allows_reg);
4677 
4678 	  /* A memory constraint makes the address of the operand escape.  */
4679 	  if (!allows_reg && allows_mem)
4680 	    make_escape_constraint (build_fold_addr_expr (op));
4681 	  /* Strictly we'd only need the constraint to ESCAPED if
4682 	     the asm clobbers memory, otherwise using something
4683 	     along the lines of per-call clobbers/uses would be enough.  */
4684 	  else if (op)
4685 	    make_escape_constraint (op);
4686 	}
4687     }
4688 
4689   rhsc.release ();
4690   lhsc.release ();
4691 }
4692 
4693 
4694 /* Create a constraint adding to the clobber set of FI the memory
4695    pointed to by PTR.  */
4696 
4697 static void
process_ipa_clobber(varinfo_t fi,tree ptr)4698 process_ipa_clobber (varinfo_t fi, tree ptr)
4699 {
4700   vec<ce_s> ptrc = vNULL;
4701   struct constraint_expr *c, lhs;
4702   unsigned i;
4703   get_constraint_for_rhs (ptr, &ptrc);
4704   lhs = get_function_part_constraint (fi, fi_clobbers);
4705   FOR_EACH_VEC_ELT (ptrc, i, c)
4706     process_constraint (new_constraint (lhs, *c));
4707   ptrc.release ();
4708 }
4709 
4710 /* Walk statement T setting up clobber and use constraints according to the
4711    references found in T.  This function is a main part of the
4712    IPA constraint builder.  */
4713 
4714 static void
find_func_clobbers(gimple origt)4715 find_func_clobbers (gimple origt)
4716 {
4717   gimple t = origt;
4718   vec<ce_s> lhsc = vNULL;
4719   vec<ce_s> rhsc = vNULL;
4720   varinfo_t fi;
4721 
4722   /* Add constraints for clobbered/used in IPA mode.
4723      We are not interested in what automatic variables are clobbered
4724      or used as we only use the information in the caller to which
4725      they do not escape.  */
4726   gcc_assert (in_ipa_mode);
4727 
4728   /* If the stmt refers to memory in any way it better had a VUSE.  */
4729   if (gimple_vuse (t) == NULL_TREE)
4730     return;
4731 
4732   /* We'd better have function information for the current function.  */
4733   fi = lookup_vi_for_tree (cfun->decl);
4734   gcc_assert (fi != NULL);
4735 
4736   /* Account for stores in assignments and calls.  */
4737   if (gimple_vdef (t) != NULL_TREE
4738       && gimple_has_lhs (t))
4739     {
4740       tree lhs = gimple_get_lhs (t);
4741       tree tem = lhs;
4742       while (handled_component_p (tem))
4743 	tem = TREE_OPERAND (tem, 0);
4744       if ((DECL_P (tem)
4745 	   && !auto_var_in_fn_p (tem, cfun->decl))
4746 	  || INDIRECT_REF_P (tem)
4747 	  || (TREE_CODE (tem) == MEM_REF
4748 	      && !(TREE_CODE (TREE_OPERAND (tem, 0)) == ADDR_EXPR
4749 		   && auto_var_in_fn_p
4750 		        (TREE_OPERAND (TREE_OPERAND (tem, 0), 0), cfun->decl))))
4751 	{
4752 	  struct constraint_expr lhsc, *rhsp;
4753 	  unsigned i;
4754 	  lhsc = get_function_part_constraint (fi, fi_clobbers);
4755 	  get_constraint_for_address_of (lhs, &rhsc);
4756 	  FOR_EACH_VEC_ELT (rhsc, i, rhsp)
4757 	    process_constraint (new_constraint (lhsc, *rhsp));
4758 	  rhsc.release ();
4759 	}
4760     }
4761 
4762   /* Account for uses in assigments and returns.  */
4763   if (gimple_assign_single_p (t)
4764       || (gimple_code (t) == GIMPLE_RETURN
4765 	  && gimple_return_retval (t) != NULL_TREE))
4766     {
4767       tree rhs = (gimple_assign_single_p (t)
4768 		  ? gimple_assign_rhs1 (t) : gimple_return_retval (t));
4769       tree tem = rhs;
4770       while (handled_component_p (tem))
4771 	tem = TREE_OPERAND (tem, 0);
4772       if ((DECL_P (tem)
4773 	   && !auto_var_in_fn_p (tem, cfun->decl))
4774 	  || INDIRECT_REF_P (tem)
4775 	  || (TREE_CODE (tem) == MEM_REF
4776 	      && !(TREE_CODE (TREE_OPERAND (tem, 0)) == ADDR_EXPR
4777 		   && auto_var_in_fn_p
4778 		        (TREE_OPERAND (TREE_OPERAND (tem, 0), 0), cfun->decl))))
4779 	{
4780 	  struct constraint_expr lhs, *rhsp;
4781 	  unsigned i;
4782 	  lhs = get_function_part_constraint (fi, fi_uses);
4783 	  get_constraint_for_address_of (rhs, &rhsc);
4784 	  FOR_EACH_VEC_ELT (rhsc, i, rhsp)
4785 	    process_constraint (new_constraint (lhs, *rhsp));
4786 	  rhsc.release ();
4787 	}
4788     }
4789 
4790   if (is_gimple_call (t))
4791     {
4792       varinfo_t cfi = NULL;
4793       tree decl = gimple_call_fndecl (t);
4794       struct constraint_expr lhs, rhs;
4795       unsigned i, j;
4796 
4797       /* For builtins we do not have separate function info.  For those
4798 	 we do not generate escapes for we have to generate clobbers/uses.  */
4799       if (gimple_call_builtin_p (t, BUILT_IN_NORMAL))
4800 	switch (DECL_FUNCTION_CODE (decl))
4801 	  {
4802 	  /* The following functions use and clobber memory pointed to
4803 	     by their arguments.  */
4804 	  case BUILT_IN_STRCPY:
4805 	  case BUILT_IN_STRNCPY:
4806 	  case BUILT_IN_BCOPY:
4807 	  case BUILT_IN_MEMCPY:
4808 	  case BUILT_IN_MEMMOVE:
4809 	  case BUILT_IN_MEMPCPY:
4810 	  case BUILT_IN_STPCPY:
4811 	  case BUILT_IN_STPNCPY:
4812 	  case BUILT_IN_STRCAT:
4813 	  case BUILT_IN_STRNCAT:
4814 	  case BUILT_IN_STRCPY_CHK:
4815 	  case BUILT_IN_STRNCPY_CHK:
4816 	  case BUILT_IN_MEMCPY_CHK:
4817 	  case BUILT_IN_MEMMOVE_CHK:
4818 	  case BUILT_IN_MEMPCPY_CHK:
4819 	  case BUILT_IN_STPCPY_CHK:
4820 	  case BUILT_IN_STPNCPY_CHK:
4821 	  case BUILT_IN_STRCAT_CHK:
4822 	  case BUILT_IN_STRNCAT_CHK:
4823 	    {
4824 	      tree dest = gimple_call_arg (t, (DECL_FUNCTION_CODE (decl)
4825 					       == BUILT_IN_BCOPY ? 1 : 0));
4826 	      tree src = gimple_call_arg (t, (DECL_FUNCTION_CODE (decl)
4827 					      == BUILT_IN_BCOPY ? 0 : 1));
4828 	      unsigned i;
4829 	      struct constraint_expr *rhsp, *lhsp;
4830 	      get_constraint_for_ptr_offset (dest, NULL_TREE, &lhsc);
4831 	      lhs = get_function_part_constraint (fi, fi_clobbers);
4832 	      FOR_EACH_VEC_ELT (lhsc, i, lhsp)
4833 		process_constraint (new_constraint (lhs, *lhsp));
4834 	      lhsc.release ();
4835 	      get_constraint_for_ptr_offset (src, NULL_TREE, &rhsc);
4836 	      lhs = get_function_part_constraint (fi, fi_uses);
4837 	      FOR_EACH_VEC_ELT (rhsc, i, rhsp)
4838 		process_constraint (new_constraint (lhs, *rhsp));
4839 	      rhsc.release ();
4840 	      return;
4841 	    }
4842 	  /* The following function clobbers memory pointed to by
4843 	     its argument.  */
4844 	  case BUILT_IN_MEMSET:
4845 	  case BUILT_IN_MEMSET_CHK:
4846 	    {
4847 	      tree dest = gimple_call_arg (t, 0);
4848 	      unsigned i;
4849 	      ce_s *lhsp;
4850 	      get_constraint_for_ptr_offset (dest, NULL_TREE, &lhsc);
4851 	      lhs = get_function_part_constraint (fi, fi_clobbers);
4852 	      FOR_EACH_VEC_ELT (lhsc, i, lhsp)
4853 		process_constraint (new_constraint (lhs, *lhsp));
4854 	      lhsc.release ();
4855 	      return;
4856 	    }
4857 	  /* The following functions clobber their second and third
4858 	     arguments.  */
4859 	  case BUILT_IN_SINCOS:
4860 	  case BUILT_IN_SINCOSF:
4861 	  case BUILT_IN_SINCOSL:
4862 	    {
4863 	      process_ipa_clobber (fi, gimple_call_arg (t, 1));
4864 	      process_ipa_clobber (fi, gimple_call_arg (t, 2));
4865 	      return;
4866 	    }
4867 	  /* The following functions clobber their second argument.  */
4868 	  case BUILT_IN_FREXP:
4869 	  case BUILT_IN_FREXPF:
4870 	  case BUILT_IN_FREXPL:
4871 	  case BUILT_IN_LGAMMA_R:
4872 	  case BUILT_IN_LGAMMAF_R:
4873 	  case BUILT_IN_LGAMMAL_R:
4874 	  case BUILT_IN_GAMMA_R:
4875 	  case BUILT_IN_GAMMAF_R:
4876 	  case BUILT_IN_GAMMAL_R:
4877 	  case BUILT_IN_MODF:
4878 	  case BUILT_IN_MODFF:
4879 	  case BUILT_IN_MODFL:
4880 	    {
4881 	      process_ipa_clobber (fi, gimple_call_arg (t, 1));
4882 	      return;
4883 	    }
4884 	  /* The following functions clobber their third argument.  */
4885 	  case BUILT_IN_REMQUO:
4886 	  case BUILT_IN_REMQUOF:
4887 	  case BUILT_IN_REMQUOL:
4888 	    {
4889 	      process_ipa_clobber (fi, gimple_call_arg (t, 2));
4890 	      return;
4891 	    }
4892 	  /* The following functions neither read nor clobber memory.  */
4893 	  case BUILT_IN_ASSUME_ALIGNED:
4894 	  case BUILT_IN_FREE:
4895 	    return;
4896 	  /* Trampolines are of no interest to us.  */
4897 	  case BUILT_IN_INIT_TRAMPOLINE:
4898 	  case BUILT_IN_ADJUST_TRAMPOLINE:
4899 	    return;
4900 	  case BUILT_IN_VA_START:
4901 	  case BUILT_IN_VA_END:
4902 	    return;
4903 	  /* printf-style functions may have hooks to set pointers to
4904 	     point to somewhere into the generated string.  Leave them
4905 	     for a later excercise...  */
4906 	  default:
4907 	    /* Fallthru to general call handling.  */;
4908 	  }
4909 
4910       /* Parameters passed by value are used.  */
4911       lhs = get_function_part_constraint (fi, fi_uses);
4912       for (i = 0; i < gimple_call_num_args (t); i++)
4913 	{
4914 	  struct constraint_expr *rhsp;
4915 	  tree arg = gimple_call_arg (t, i);
4916 
4917 	  if (TREE_CODE (arg) == SSA_NAME
4918 	      || is_gimple_min_invariant (arg))
4919 	    continue;
4920 
4921 	  get_constraint_for_address_of (arg, &rhsc);
4922 	  FOR_EACH_VEC_ELT (rhsc, j, rhsp)
4923 	    process_constraint (new_constraint (lhs, *rhsp));
4924 	  rhsc.release ();
4925 	}
4926 
4927       /* Build constraints for propagating clobbers/uses along the
4928 	 callgraph edges.  */
4929       cfi = get_fi_for_callee (t);
4930       if (cfi->id == anything_id)
4931 	{
4932 	  if (gimple_vdef (t))
4933 	    make_constraint_from (first_vi_for_offset (fi, fi_clobbers),
4934 				  anything_id);
4935 	  make_constraint_from (first_vi_for_offset (fi, fi_uses),
4936 				anything_id);
4937 	  return;
4938 	}
4939 
4940       /* For callees without function info (that's external functions),
4941 	 ESCAPED is clobbered and used.  */
4942       if (gimple_call_fndecl (t)
4943 	  && !cfi->is_fn_info)
4944 	{
4945 	  varinfo_t vi;
4946 
4947 	  if (gimple_vdef (t))
4948 	    make_copy_constraint (first_vi_for_offset (fi, fi_clobbers),
4949 				  escaped_id);
4950 	  make_copy_constraint (first_vi_for_offset (fi, fi_uses), escaped_id);
4951 
4952 	  /* Also honor the call statement use/clobber info.  */
4953 	  if ((vi = lookup_call_clobber_vi (t)) != NULL)
4954 	    make_copy_constraint (first_vi_for_offset (fi, fi_clobbers),
4955 				  vi->id);
4956 	  if ((vi = lookup_call_use_vi (t)) != NULL)
4957 	    make_copy_constraint (first_vi_for_offset (fi, fi_uses),
4958 				  vi->id);
4959 	  return;
4960 	}
4961 
4962       /* Otherwise the caller clobbers and uses what the callee does.
4963 	 ???  This should use a new complex constraint that filters
4964 	 local variables of the callee.  */
4965       if (gimple_vdef (t))
4966 	{
4967 	  lhs = get_function_part_constraint (fi, fi_clobbers);
4968 	  rhs = get_function_part_constraint (cfi, fi_clobbers);
4969 	  process_constraint (new_constraint (lhs, rhs));
4970 	}
4971       lhs = get_function_part_constraint (fi, fi_uses);
4972       rhs = get_function_part_constraint (cfi, fi_uses);
4973       process_constraint (new_constraint (lhs, rhs));
4974     }
4975   else if (gimple_code (t) == GIMPLE_ASM)
4976     {
4977       /* ???  Ick.  We can do better.  */
4978       if (gimple_vdef (t))
4979 	make_constraint_from (first_vi_for_offset (fi, fi_clobbers),
4980 			      anything_id);
4981       make_constraint_from (first_vi_for_offset (fi, fi_uses),
4982 			    anything_id);
4983     }
4984 
4985   rhsc.release ();
4986 }
4987 
4988 
4989 /* Find the first varinfo in the same variable as START that overlaps with
4990    OFFSET.  Return NULL if we can't find one.  */
4991 
4992 static varinfo_t
first_vi_for_offset(varinfo_t start,unsigned HOST_WIDE_INT offset)4993 first_vi_for_offset (varinfo_t start, unsigned HOST_WIDE_INT offset)
4994 {
4995   /* If the offset is outside of the variable, bail out.  */
4996   if (offset >= start->fullsize)
4997     return NULL;
4998 
4999   /* If we cannot reach offset from start, lookup the first field
5000      and start from there.  */
5001   if (start->offset > offset)
5002     start = lookup_vi_for_tree (start->decl);
5003 
5004   while (start)
5005     {
5006       /* We may not find a variable in the field list with the actual
5007 	 offset when when we have glommed a structure to a variable.
5008 	 In that case, however, offset should still be within the size
5009 	 of the variable. */
5010       if (offset >= start->offset
5011 	  && (offset - start->offset) < start->size)
5012 	return start;
5013 
5014       start= start->next;
5015     }
5016 
5017   return NULL;
5018 }
5019 
5020 /* Find the first varinfo in the same variable as START that overlaps with
5021    OFFSET.  If there is no such varinfo the varinfo directly preceding
5022    OFFSET is returned.  */
5023 
5024 static varinfo_t
first_or_preceding_vi_for_offset(varinfo_t start,unsigned HOST_WIDE_INT offset)5025 first_or_preceding_vi_for_offset (varinfo_t start,
5026 				  unsigned HOST_WIDE_INT offset)
5027 {
5028   /* If we cannot reach offset from start, lookup the first field
5029      and start from there.  */
5030   if (start->offset > offset)
5031     start = lookup_vi_for_tree (start->decl);
5032 
5033   /* We may not find a variable in the field list with the actual
5034      offset when when we have glommed a structure to a variable.
5035      In that case, however, offset should still be within the size
5036      of the variable.
5037      If we got beyond the offset we look for return the field
5038      directly preceding offset which may be the last field.  */
5039   while (start->next
5040 	 && offset >= start->offset
5041 	 && !((offset - start->offset) < start->size))
5042     start = start->next;
5043 
5044   return start;
5045 }
5046 
5047 
5048 /* This structure is used during pushing fields onto the fieldstack
5049    to track the offset of the field, since bitpos_of_field gives it
5050    relative to its immediate containing type, and we want it relative
5051    to the ultimate containing object.  */
5052 
5053 struct fieldoff
5054 {
5055   /* Offset from the base of the base containing object to this field.  */
5056   HOST_WIDE_INT offset;
5057 
5058   /* Size, in bits, of the field.  */
5059   unsigned HOST_WIDE_INT size;
5060 
5061   unsigned has_unknown_size : 1;
5062 
5063   unsigned must_have_pointers : 1;
5064 
5065   unsigned may_have_pointers : 1;
5066 
5067   unsigned only_restrict_pointers : 1;
5068 };
5069 typedef struct fieldoff fieldoff_s;
5070 
5071 
5072 /* qsort comparison function for two fieldoff's PA and PB */
5073 
5074 static int
fieldoff_compare(const void * pa,const void * pb)5075 fieldoff_compare (const void *pa, const void *pb)
5076 {
5077   const fieldoff_s *foa = (const fieldoff_s *)pa;
5078   const fieldoff_s *fob = (const fieldoff_s *)pb;
5079   unsigned HOST_WIDE_INT foasize, fobsize;
5080 
5081   if (foa->offset < fob->offset)
5082     return -1;
5083   else if (foa->offset > fob->offset)
5084     return 1;
5085 
5086   foasize = foa->size;
5087   fobsize = fob->size;
5088   if (foasize < fobsize)
5089     return -1;
5090   else if (foasize > fobsize)
5091     return 1;
5092   return 0;
5093 }
5094 
5095 /* Sort a fieldstack according to the field offset and sizes.  */
5096 static void
sort_fieldstack(vec<fieldoff_s> fieldstack)5097 sort_fieldstack (vec<fieldoff_s> fieldstack)
5098 {
5099   fieldstack.qsort (fieldoff_compare);
5100 }
5101 
5102 /* Return true if T is a type that can have subvars.  */
5103 
5104 static inline bool
type_can_have_subvars(const_tree t)5105 type_can_have_subvars (const_tree t)
5106 {
5107   /* Aggregates without overlapping fields can have subvars.  */
5108   return TREE_CODE (t) == RECORD_TYPE;
5109 }
5110 
5111 /* Return true if V is a tree that we can have subvars for.
5112    Normally, this is any aggregate type.  Also complex
5113    types which are not gimple registers can have subvars.  */
5114 
5115 static inline bool
var_can_have_subvars(const_tree v)5116 var_can_have_subvars (const_tree v)
5117 {
5118   /* Volatile variables should never have subvars.  */
5119   if (TREE_THIS_VOLATILE (v))
5120     return false;
5121 
5122   /* Non decls or memory tags can never have subvars.  */
5123   if (!DECL_P (v))
5124     return false;
5125 
5126   return type_can_have_subvars (TREE_TYPE (v));
5127 }
5128 
5129 /* Return true if T is a type that does contain pointers.  */
5130 
5131 static bool
type_must_have_pointers(tree type)5132 type_must_have_pointers (tree type)
5133 {
5134   if (POINTER_TYPE_P (type))
5135     return true;
5136 
5137   if (TREE_CODE (type) == ARRAY_TYPE)
5138     return type_must_have_pointers (TREE_TYPE (type));
5139 
5140   /* A function or method can have pointers as arguments, so track
5141      those separately.  */
5142   if (TREE_CODE (type) == FUNCTION_TYPE
5143       || TREE_CODE (type) == METHOD_TYPE)
5144     return true;
5145 
5146   return false;
5147 }
5148 
5149 static bool
field_must_have_pointers(tree t)5150 field_must_have_pointers (tree t)
5151 {
5152   return type_must_have_pointers (TREE_TYPE (t));
5153 }
5154 
5155 /* Given a TYPE, and a vector of field offsets FIELDSTACK, push all
5156    the fields of TYPE onto fieldstack, recording their offsets along
5157    the way.
5158 
5159    OFFSET is used to keep track of the offset in this entire
5160    structure, rather than just the immediately containing structure.
5161    Returns false if the caller is supposed to handle the field we
5162    recursed for.  */
5163 
5164 static bool
push_fields_onto_fieldstack(tree type,vec<fieldoff_s> * fieldstack,HOST_WIDE_INT offset)5165 push_fields_onto_fieldstack (tree type, vec<fieldoff_s> *fieldstack,
5166 			     HOST_WIDE_INT offset)
5167 {
5168   tree field;
5169   bool empty_p = true;
5170 
5171   if (TREE_CODE (type) != RECORD_TYPE)
5172     return false;
5173 
5174   /* If the vector of fields is growing too big, bail out early.
5175      Callers check for vec::length <= MAX_FIELDS_FOR_FIELD_SENSITIVE, make
5176      sure this fails.  */
5177   if (fieldstack->length () > MAX_FIELDS_FOR_FIELD_SENSITIVE)
5178     return false;
5179 
5180   for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
5181     if (TREE_CODE (field) == FIELD_DECL)
5182       {
5183 	bool push = false;
5184 	HOST_WIDE_INT foff = bitpos_of_field (field);
5185 
5186 	if (!var_can_have_subvars (field)
5187 	    || TREE_CODE (TREE_TYPE (field)) == QUAL_UNION_TYPE
5188 	    || TREE_CODE (TREE_TYPE (field)) == UNION_TYPE)
5189 	  push = true;
5190 	else if (!push_fields_onto_fieldstack
5191 		    (TREE_TYPE (field), fieldstack, offset + foff)
5192 		 && (DECL_SIZE (field)
5193 		     && !integer_zerop (DECL_SIZE (field))))
5194 	  /* Empty structures may have actual size, like in C++.  So
5195 	     see if we didn't push any subfields and the size is
5196 	     nonzero, push the field onto the stack.  */
5197 	  push = true;
5198 
5199 	if (push)
5200 	  {
5201 	    fieldoff_s *pair = NULL;
5202 	    bool has_unknown_size = false;
5203 	    bool must_have_pointers_p;
5204 
5205 	    if (!fieldstack->is_empty ())
5206 	      pair = &fieldstack->last ();
5207 
5208 	    /* If there isn't anything at offset zero, create sth.  */
5209 	    if (!pair
5210 		&& offset + foff != 0)
5211 	      {
5212 		fieldoff_s e = {0, offset + foff, false, false, false, false};
5213 		pair = fieldstack->safe_push (e);
5214 	      }
5215 
5216 	    if (!DECL_SIZE (field)
5217 		|| !host_integerp (DECL_SIZE (field), 1))
5218 	      has_unknown_size = true;
5219 
5220 	    /* If adjacent fields do not contain pointers merge them.  */
5221 	    must_have_pointers_p = field_must_have_pointers (field);
5222 	    if (pair
5223 		&& !has_unknown_size
5224 		&& !must_have_pointers_p
5225 		&& !pair->must_have_pointers
5226 		&& !pair->has_unknown_size
5227 		&& pair->offset + (HOST_WIDE_INT)pair->size == offset + foff)
5228 	      {
5229 		pair->size += TREE_INT_CST_LOW (DECL_SIZE (field));
5230 	      }
5231 	    else
5232 	      {
5233 		fieldoff_s e;
5234 		e.offset = offset + foff;
5235 		e.has_unknown_size = has_unknown_size;
5236 		if (!has_unknown_size)
5237 		  e.size = TREE_INT_CST_LOW (DECL_SIZE (field));
5238 		else
5239 		  e.size = -1;
5240 		e.must_have_pointers = must_have_pointers_p;
5241 		e.may_have_pointers = true;
5242 		e.only_restrict_pointers
5243 		  = (!has_unknown_size
5244 		     && POINTER_TYPE_P (TREE_TYPE (field))
5245 		     && TYPE_RESTRICT (TREE_TYPE (field)));
5246 		fieldstack->safe_push (e);
5247 	      }
5248 	  }
5249 
5250 	empty_p = false;
5251       }
5252 
5253   return !empty_p;
5254 }
5255 
5256 /* Count the number of arguments DECL has, and set IS_VARARGS to true
5257    if it is a varargs function.  */
5258 
5259 static unsigned int
count_num_arguments(tree decl,bool * is_varargs)5260 count_num_arguments (tree decl, bool *is_varargs)
5261 {
5262   unsigned int num = 0;
5263   tree t;
5264 
5265   /* Capture named arguments for K&R functions.  They do not
5266      have a prototype and thus no TYPE_ARG_TYPES.  */
5267   for (t = DECL_ARGUMENTS (decl); t; t = DECL_CHAIN (t))
5268     ++num;
5269 
5270   /* Check if the function has variadic arguments.  */
5271   for (t = TYPE_ARG_TYPES (TREE_TYPE (decl)); t; t = TREE_CHAIN (t))
5272     if (TREE_VALUE (t) == void_type_node)
5273       break;
5274   if (!t)
5275     *is_varargs = true;
5276 
5277   return num;
5278 }
5279 
5280 /* Creation function node for DECL, using NAME, and return the index
5281    of the variable we've created for the function.  */
5282 
5283 static varinfo_t
create_function_info_for(tree decl,const char * name)5284 create_function_info_for (tree decl, const char *name)
5285 {
5286   struct function *fn = DECL_STRUCT_FUNCTION (decl);
5287   varinfo_t vi, prev_vi;
5288   tree arg;
5289   unsigned int i;
5290   bool is_varargs = false;
5291   unsigned int num_args = count_num_arguments (decl, &is_varargs);
5292 
5293   /* Create the variable info.  */
5294 
5295   vi = new_var_info (decl, name);
5296   vi->offset = 0;
5297   vi->size = 1;
5298   vi->fullsize = fi_parm_base + num_args;
5299   vi->is_fn_info = 1;
5300   vi->may_have_pointers = false;
5301   if (is_varargs)
5302     vi->fullsize = ~0;
5303   insert_vi_for_tree (vi->decl, vi);
5304 
5305   prev_vi = vi;
5306 
5307   /* Create a variable for things the function clobbers and one for
5308      things the function uses.  */
5309     {
5310       varinfo_t clobbervi, usevi;
5311       const char *newname;
5312       char *tempname;
5313 
5314       asprintf (&tempname, "%s.clobber", name);
5315       newname = ggc_strdup (tempname);
5316       free (tempname);
5317 
5318       clobbervi = new_var_info (NULL, newname);
5319       clobbervi->offset = fi_clobbers;
5320       clobbervi->size = 1;
5321       clobbervi->fullsize = vi->fullsize;
5322       clobbervi->is_full_var = true;
5323       clobbervi->is_global_var = false;
5324       gcc_assert (prev_vi->offset < clobbervi->offset);
5325       prev_vi->next = clobbervi;
5326       prev_vi = clobbervi;
5327 
5328       asprintf (&tempname, "%s.use", name);
5329       newname = ggc_strdup (tempname);
5330       free (tempname);
5331 
5332       usevi = new_var_info (NULL, newname);
5333       usevi->offset = fi_uses;
5334       usevi->size = 1;
5335       usevi->fullsize = vi->fullsize;
5336       usevi->is_full_var = true;
5337       usevi->is_global_var = false;
5338       gcc_assert (prev_vi->offset < usevi->offset);
5339       prev_vi->next = usevi;
5340       prev_vi = usevi;
5341     }
5342 
5343   /* And one for the static chain.  */
5344   if (fn->static_chain_decl != NULL_TREE)
5345     {
5346       varinfo_t chainvi;
5347       const char *newname;
5348       char *tempname;
5349 
5350       asprintf (&tempname, "%s.chain", name);
5351       newname = ggc_strdup (tempname);
5352       free (tempname);
5353 
5354       chainvi = new_var_info (fn->static_chain_decl, newname);
5355       chainvi->offset = fi_static_chain;
5356       chainvi->size = 1;
5357       chainvi->fullsize = vi->fullsize;
5358       chainvi->is_full_var = true;
5359       chainvi->is_global_var = false;
5360       gcc_assert (prev_vi->offset < chainvi->offset);
5361       prev_vi->next = chainvi;
5362       prev_vi = chainvi;
5363       insert_vi_for_tree (fn->static_chain_decl, chainvi);
5364     }
5365 
5366   /* Create a variable for the return var.  */
5367   if (DECL_RESULT (decl) != NULL
5368       || !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (decl))))
5369     {
5370       varinfo_t resultvi;
5371       const char *newname;
5372       char *tempname;
5373       tree resultdecl = decl;
5374 
5375       if (DECL_RESULT (decl))
5376 	resultdecl = DECL_RESULT (decl);
5377 
5378       asprintf (&tempname, "%s.result", name);
5379       newname = ggc_strdup (tempname);
5380       free (tempname);
5381 
5382       resultvi = new_var_info (resultdecl, newname);
5383       resultvi->offset = fi_result;
5384       resultvi->size = 1;
5385       resultvi->fullsize = vi->fullsize;
5386       resultvi->is_full_var = true;
5387       if (DECL_RESULT (decl))
5388 	resultvi->may_have_pointers = true;
5389       gcc_assert (prev_vi->offset < resultvi->offset);
5390       prev_vi->next = resultvi;
5391       prev_vi = resultvi;
5392       if (DECL_RESULT (decl))
5393 	insert_vi_for_tree (DECL_RESULT (decl), resultvi);
5394     }
5395 
5396   /* Set up variables for each argument.  */
5397   arg = DECL_ARGUMENTS (decl);
5398   for (i = 0; i < num_args; i++)
5399     {
5400       varinfo_t argvi;
5401       const char *newname;
5402       char *tempname;
5403       tree argdecl = decl;
5404 
5405       if (arg)
5406 	argdecl = arg;
5407 
5408       asprintf (&tempname, "%s.arg%d", name, i);
5409       newname = ggc_strdup (tempname);
5410       free (tempname);
5411 
5412       argvi = new_var_info (argdecl, newname);
5413       argvi->offset = fi_parm_base + i;
5414       argvi->size = 1;
5415       argvi->is_full_var = true;
5416       argvi->fullsize = vi->fullsize;
5417       if (arg)
5418 	argvi->may_have_pointers = true;
5419       gcc_assert (prev_vi->offset < argvi->offset);
5420       prev_vi->next = argvi;
5421       prev_vi = argvi;
5422       if (arg)
5423 	{
5424 	  insert_vi_for_tree (arg, argvi);
5425 	  arg = DECL_CHAIN (arg);
5426 	}
5427     }
5428 
5429   /* Add one representative for all further args.  */
5430   if (is_varargs)
5431     {
5432       varinfo_t argvi;
5433       const char *newname;
5434       char *tempname;
5435       tree decl;
5436 
5437       asprintf (&tempname, "%s.varargs", name);
5438       newname = ggc_strdup (tempname);
5439       free (tempname);
5440 
5441       /* We need sth that can be pointed to for va_start.  */
5442       decl = build_fake_var_decl (ptr_type_node);
5443 
5444       argvi = new_var_info (decl, newname);
5445       argvi->offset = fi_parm_base + num_args;
5446       argvi->size = ~0;
5447       argvi->is_full_var = true;
5448       argvi->is_heap_var = true;
5449       argvi->fullsize = vi->fullsize;
5450       gcc_assert (prev_vi->offset < argvi->offset);
5451       prev_vi->next = argvi;
5452       prev_vi = argvi;
5453     }
5454 
5455   return vi;
5456 }
5457 
5458 
5459 /* Return true if FIELDSTACK contains fields that overlap.
5460    FIELDSTACK is assumed to be sorted by offset.  */
5461 
5462 static bool
check_for_overlaps(vec<fieldoff_s> fieldstack)5463 check_for_overlaps (vec<fieldoff_s> fieldstack)
5464 {
5465   fieldoff_s *fo = NULL;
5466   unsigned int i;
5467   HOST_WIDE_INT lastoffset = -1;
5468 
5469   FOR_EACH_VEC_ELT (fieldstack, i, fo)
5470     {
5471       if (fo->offset == lastoffset)
5472 	return true;
5473       lastoffset = fo->offset;
5474     }
5475   return false;
5476 }
5477 
5478 /* Create a varinfo structure for NAME and DECL, and add it to VARMAP.
5479    This will also create any varinfo structures necessary for fields
5480    of DECL.  */
5481 
5482 static varinfo_t
create_variable_info_for_1(tree decl,const char * name)5483 create_variable_info_for_1 (tree decl, const char *name)
5484 {
5485   varinfo_t vi, newvi;
5486   tree decl_type = TREE_TYPE (decl);
5487   tree declsize = DECL_P (decl) ? DECL_SIZE (decl) : TYPE_SIZE (decl_type);
5488   vec<fieldoff_s> fieldstack = vNULL;
5489   fieldoff_s *fo;
5490   unsigned int i;
5491 
5492   if (!declsize
5493       || !host_integerp (declsize, 1))
5494     {
5495       vi = new_var_info (decl, name);
5496       vi->offset = 0;
5497       vi->size = ~0;
5498       vi->fullsize = ~0;
5499       vi->is_unknown_size_var = true;
5500       vi->is_full_var = true;
5501       vi->may_have_pointers = true;
5502       return vi;
5503     }
5504 
5505   /* Collect field information.  */
5506   if (use_field_sensitive
5507       && var_can_have_subvars (decl)
5508       /* ???  Force us to not use subfields for global initializers
5509 	 in IPA mode.  Else we'd have to parse arbitrary initializers.  */
5510       && !(in_ipa_mode
5511 	   && is_global_var (decl)
5512 	   && DECL_INITIAL (decl)))
5513     {
5514       fieldoff_s *fo = NULL;
5515       bool notokay = false;
5516       unsigned int i;
5517 
5518       push_fields_onto_fieldstack (decl_type, &fieldstack, 0);
5519 
5520       for (i = 0; !notokay && fieldstack.iterate (i, &fo); i++)
5521 	if (fo->has_unknown_size
5522 	    || fo->offset < 0)
5523 	  {
5524 	    notokay = true;
5525 	    break;
5526 	  }
5527 
5528       /* We can't sort them if we have a field with a variable sized type,
5529 	 which will make notokay = true.  In that case, we are going to return
5530 	 without creating varinfos for the fields anyway, so sorting them is a
5531 	 waste to boot.  */
5532       if (!notokay)
5533 	{
5534 	  sort_fieldstack (fieldstack);
5535 	  /* Due to some C++ FE issues, like PR 22488, we might end up
5536 	     what appear to be overlapping fields even though they,
5537 	     in reality, do not overlap.  Until the C++ FE is fixed,
5538 	     we will simply disable field-sensitivity for these cases.  */
5539 	  notokay = check_for_overlaps (fieldstack);
5540 	}
5541 
5542       if (notokay)
5543 	fieldstack.release ();
5544     }
5545 
5546   /* If we didn't end up collecting sub-variables create a full
5547      variable for the decl.  */
5548   if (fieldstack.length () <= 1
5549       || fieldstack.length () > MAX_FIELDS_FOR_FIELD_SENSITIVE)
5550     {
5551       vi = new_var_info (decl, name);
5552       vi->offset = 0;
5553       vi->may_have_pointers = true;
5554       vi->fullsize = TREE_INT_CST_LOW (declsize);
5555       vi->size = vi->fullsize;
5556       vi->is_full_var = true;
5557       fieldstack.release ();
5558       return vi;
5559     }
5560 
5561   vi = new_var_info (decl, name);
5562   vi->fullsize = TREE_INT_CST_LOW (declsize);
5563   for (i = 0, newvi = vi;
5564        fieldstack.iterate (i, &fo);
5565        ++i, newvi = newvi->next)
5566     {
5567       const char *newname = "NULL";
5568       char *tempname;
5569 
5570       if (dump_file)
5571 	{
5572 	  asprintf (&tempname, "%s." HOST_WIDE_INT_PRINT_DEC
5573 		    "+" HOST_WIDE_INT_PRINT_DEC, name, fo->offset, fo->size);
5574 	  newname = ggc_strdup (tempname);
5575 	  free (tempname);
5576 	}
5577       newvi->name = newname;
5578       newvi->offset = fo->offset;
5579       newvi->size = fo->size;
5580       newvi->fullsize = vi->fullsize;
5581       newvi->may_have_pointers = fo->may_have_pointers;
5582       newvi->only_restrict_pointers = fo->only_restrict_pointers;
5583       if (i + 1 < fieldstack.length ())
5584 	newvi->next = new_var_info (decl, name);
5585     }
5586 
5587   fieldstack.release ();
5588 
5589   return vi;
5590 }
5591 
5592 static unsigned int
create_variable_info_for(tree decl,const char * name)5593 create_variable_info_for (tree decl, const char *name)
5594 {
5595   varinfo_t vi = create_variable_info_for_1 (decl, name);
5596   unsigned int id = vi->id;
5597 
5598   insert_vi_for_tree (decl, vi);
5599 
5600   if (TREE_CODE (decl) != VAR_DECL)
5601     return id;
5602 
5603   /* Create initial constraints for globals.  */
5604   for (; vi; vi = vi->next)
5605     {
5606       if (!vi->may_have_pointers
5607 	  || !vi->is_global_var)
5608 	continue;
5609 
5610       /* Mark global restrict qualified pointers.  */
5611       if ((POINTER_TYPE_P (TREE_TYPE (decl))
5612 	   && TYPE_RESTRICT (TREE_TYPE (decl)))
5613 	  || vi->only_restrict_pointers)
5614 	{
5615 	  make_constraint_from_global_restrict (vi, "GLOBAL_RESTRICT");
5616 	  continue;
5617 	}
5618 
5619       /* In non-IPA mode the initializer from nonlocal is all we need.  */
5620       if (!in_ipa_mode
5621 	  || DECL_HARD_REGISTER (decl))
5622 	make_copy_constraint (vi, nonlocal_id);
5623 
5624       /* In IPA mode parse the initializer and generate proper constraints
5625 	 for it.  */
5626       else
5627 	{
5628 	  struct varpool_node *vnode = varpool_get_node (decl);
5629 
5630 	  /* For escaped variables initialize them from nonlocal.  */
5631 	  if (!varpool_all_refs_explicit_p (vnode))
5632 	    make_copy_constraint (vi, nonlocal_id);
5633 
5634 	  /* If this is a global variable with an initializer and we are in
5635 	     IPA mode generate constraints for it.  */
5636 	  if (DECL_INITIAL (decl)
5637 	      && vnode->analyzed)
5638 	    {
5639 	      vec<ce_s> rhsc = vNULL;
5640 	      struct constraint_expr lhs, *rhsp;
5641 	      unsigned i;
5642 	      get_constraint_for_rhs (DECL_INITIAL (decl), &rhsc);
5643 	      lhs.var = vi->id;
5644 	      lhs.offset = 0;
5645 	      lhs.type = SCALAR;
5646 	      FOR_EACH_VEC_ELT (rhsc, i, rhsp)
5647 		process_constraint (new_constraint (lhs, *rhsp));
5648 	      /* If this is a variable that escapes from the unit
5649 		 the initializer escapes as well.  */
5650 	      if (!varpool_all_refs_explicit_p (vnode))
5651 		{
5652 		  lhs.var = escaped_id;
5653 		  lhs.offset = 0;
5654 		  lhs.type = SCALAR;
5655 		  FOR_EACH_VEC_ELT (rhsc, i, rhsp)
5656 		    process_constraint (new_constraint (lhs, *rhsp));
5657 		}
5658 	      rhsc.release ();
5659 	    }
5660 	}
5661     }
5662 
5663   return id;
5664 }
5665 
5666 /* Print out the points-to solution for VAR to FILE.  */
5667 
5668 static void
dump_solution_for_var(FILE * file,unsigned int var)5669 dump_solution_for_var (FILE *file, unsigned int var)
5670 {
5671   varinfo_t vi = get_varinfo (var);
5672   unsigned int i;
5673   bitmap_iterator bi;
5674 
5675   /* Dump the solution for unified vars anyway, this avoids difficulties
5676      in scanning dumps in the testsuite.  */
5677   fprintf (file, "%s = { ", vi->name);
5678   vi = get_varinfo (find (var));
5679   EXECUTE_IF_SET_IN_BITMAP (vi->solution, 0, i, bi)
5680     fprintf (file, "%s ", get_varinfo (i)->name);
5681   fprintf (file, "}");
5682 
5683   /* But note when the variable was unified.  */
5684   if (vi->id != var)
5685     fprintf (file, " same as %s", vi->name);
5686 
5687   fprintf (file, "\n");
5688 }
5689 
5690 /* Print the points-to solution for VAR to stdout.  */
5691 
5692 DEBUG_FUNCTION void
debug_solution_for_var(unsigned int var)5693 debug_solution_for_var (unsigned int var)
5694 {
5695   dump_solution_for_var (stdout, var);
5696 }
5697 
5698 /* Create varinfo structures for all of the variables in the
5699    function for intraprocedural mode.  */
5700 
5701 static void
intra_create_variable_infos(void)5702 intra_create_variable_infos (void)
5703 {
5704   tree t;
5705 
5706   /* For each incoming pointer argument arg, create the constraint ARG
5707      = NONLOCAL or a dummy variable if it is a restrict qualified
5708      passed-by-reference argument.  */
5709   for (t = DECL_ARGUMENTS (current_function_decl); t; t = DECL_CHAIN (t))
5710     {
5711       varinfo_t p = get_vi_for_tree (t);
5712 
5713       /* For restrict qualified pointers to objects passed by
5714          reference build a real representative for the pointed-to object.
5715 	 Treat restrict qualified references the same.  */
5716       if (TYPE_RESTRICT (TREE_TYPE (t))
5717 	  && ((DECL_BY_REFERENCE (t) && POINTER_TYPE_P (TREE_TYPE (t)))
5718 	      || TREE_CODE (TREE_TYPE (t)) == REFERENCE_TYPE)
5719 	  && !type_contains_placeholder_p (TREE_TYPE (TREE_TYPE (t))))
5720 	{
5721 	  struct constraint_expr lhsc, rhsc;
5722 	  varinfo_t vi;
5723 	  tree heapvar = build_fake_var_decl (TREE_TYPE (TREE_TYPE (t)));
5724 	  DECL_EXTERNAL (heapvar) = 1;
5725 	  vi = create_variable_info_for_1 (heapvar, "PARM_NOALIAS");
5726 	  insert_vi_for_tree (heapvar, vi);
5727 	  lhsc.var = p->id;
5728 	  lhsc.type = SCALAR;
5729 	  lhsc.offset = 0;
5730 	  rhsc.var = vi->id;
5731 	  rhsc.type = ADDRESSOF;
5732 	  rhsc.offset = 0;
5733 	  process_constraint (new_constraint (lhsc, rhsc));
5734 	  for (; vi; vi = vi->next)
5735 	    if (vi->may_have_pointers)
5736 	      {
5737 		if (vi->only_restrict_pointers)
5738 		  make_constraint_from_global_restrict (vi, "GLOBAL_RESTRICT");
5739 		else
5740 		  make_copy_constraint (vi, nonlocal_id);
5741 	      }
5742 	  continue;
5743 	}
5744 
5745       if (POINTER_TYPE_P (TREE_TYPE (t))
5746 	  && TYPE_RESTRICT (TREE_TYPE (t)))
5747 	make_constraint_from_global_restrict (p, "PARM_RESTRICT");
5748       else
5749 	{
5750 	  for (; p; p = p->next)
5751 	    {
5752 	      if (p->only_restrict_pointers)
5753 		make_constraint_from_global_restrict (p, "PARM_RESTRICT");
5754 	      else if (p->may_have_pointers)
5755 		make_constraint_from (p, nonlocal_id);
5756 	    }
5757 	}
5758     }
5759 
5760   /* Add a constraint for a result decl that is passed by reference.  */
5761   if (DECL_RESULT (cfun->decl)
5762       && DECL_BY_REFERENCE (DECL_RESULT (cfun->decl)))
5763     {
5764       varinfo_t p, result_vi = get_vi_for_tree (DECL_RESULT (cfun->decl));
5765 
5766       for (p = result_vi; p; p = p->next)
5767 	make_constraint_from (p, nonlocal_id);
5768     }
5769 
5770   /* Add a constraint for the incoming static chain parameter.  */
5771   if (cfun->static_chain_decl != NULL_TREE)
5772     {
5773       varinfo_t p, chain_vi = get_vi_for_tree (cfun->static_chain_decl);
5774 
5775       for (p = chain_vi; p; p = p->next)
5776 	make_constraint_from (p, nonlocal_id);
5777     }
5778 }
5779 
5780 /* Structure used to put solution bitmaps in a hashtable so they can
5781    be shared among variables with the same points-to set.  */
5782 
5783 typedef struct shared_bitmap_info
5784 {
5785   bitmap pt_vars;
5786   hashval_t hashcode;
5787 } *shared_bitmap_info_t;
5788 typedef const struct shared_bitmap_info *const_shared_bitmap_info_t;
5789 
5790 static htab_t shared_bitmap_table;
5791 
5792 /* Hash function for a shared_bitmap_info_t */
5793 
5794 static hashval_t
shared_bitmap_hash(const void * p)5795 shared_bitmap_hash (const void *p)
5796 {
5797   const_shared_bitmap_info_t const bi = (const_shared_bitmap_info_t) p;
5798   return bi->hashcode;
5799 }
5800 
5801 /* Equality function for two shared_bitmap_info_t's. */
5802 
5803 static int
shared_bitmap_eq(const void * p1,const void * p2)5804 shared_bitmap_eq (const void *p1, const void *p2)
5805 {
5806   const_shared_bitmap_info_t const sbi1 = (const_shared_bitmap_info_t) p1;
5807   const_shared_bitmap_info_t const sbi2 = (const_shared_bitmap_info_t) p2;
5808   return bitmap_equal_p (sbi1->pt_vars, sbi2->pt_vars);
5809 }
5810 
5811 /* Lookup a bitmap in the shared bitmap hashtable, and return an already
5812    existing instance if there is one, NULL otherwise.  */
5813 
5814 static bitmap
shared_bitmap_lookup(bitmap pt_vars)5815 shared_bitmap_lookup (bitmap pt_vars)
5816 {
5817   void **slot;
5818   struct shared_bitmap_info sbi;
5819 
5820   sbi.pt_vars = pt_vars;
5821   sbi.hashcode = bitmap_hash (pt_vars);
5822 
5823   slot = htab_find_slot_with_hash (shared_bitmap_table, &sbi,
5824 				   sbi.hashcode, NO_INSERT);
5825   if (!slot)
5826     return NULL;
5827   else
5828     return ((shared_bitmap_info_t) *slot)->pt_vars;
5829 }
5830 
5831 
5832 /* Add a bitmap to the shared bitmap hashtable.  */
5833 
5834 static void
shared_bitmap_add(bitmap pt_vars)5835 shared_bitmap_add (bitmap pt_vars)
5836 {
5837   void **slot;
5838   shared_bitmap_info_t sbi = XNEW (struct shared_bitmap_info);
5839 
5840   sbi->pt_vars = pt_vars;
5841   sbi->hashcode = bitmap_hash (pt_vars);
5842 
5843   slot = htab_find_slot_with_hash (shared_bitmap_table, sbi,
5844 				   sbi->hashcode, INSERT);
5845   gcc_assert (!*slot);
5846   *slot = (void *) sbi;
5847 }
5848 
5849 
5850 /* Set bits in INTO corresponding to the variable uids in solution set FROM.  */
5851 
5852 static void
set_uids_in_ptset(bitmap into,bitmap from,struct pt_solution * pt)5853 set_uids_in_ptset (bitmap into, bitmap from, struct pt_solution *pt)
5854 {
5855   unsigned int i;
5856   bitmap_iterator bi;
5857 
5858   EXECUTE_IF_SET_IN_BITMAP (from, 0, i, bi)
5859     {
5860       varinfo_t vi = get_varinfo (i);
5861 
5862       /* The only artificial variables that are allowed in a may-alias
5863 	 set are heap variables.  */
5864       if (vi->is_artificial_var && !vi->is_heap_var)
5865 	continue;
5866 
5867       if (TREE_CODE (vi->decl) == VAR_DECL
5868 	  || TREE_CODE (vi->decl) == PARM_DECL
5869 	  || TREE_CODE (vi->decl) == RESULT_DECL)
5870 	{
5871 	  /* If we are in IPA mode we will not recompute points-to
5872 	     sets after inlining so make sure they stay valid.  */
5873 	  if (in_ipa_mode
5874 	      && !DECL_PT_UID_SET_P (vi->decl))
5875 	    SET_DECL_PT_UID (vi->decl, DECL_UID (vi->decl));
5876 
5877 	  /* Add the decl to the points-to set.  Note that the points-to
5878 	     set contains global variables.  */
5879 	  bitmap_set_bit (into, DECL_PT_UID (vi->decl));
5880 	  if (vi->is_global_var)
5881 	    pt->vars_contains_global = true;
5882 	}
5883     }
5884 }
5885 
5886 
5887 /* Compute the points-to solution *PT for the variable VI.  */
5888 
5889 static struct pt_solution
find_what_var_points_to(varinfo_t orig_vi)5890 find_what_var_points_to (varinfo_t orig_vi)
5891 {
5892   unsigned int i;
5893   bitmap_iterator bi;
5894   bitmap finished_solution;
5895   bitmap result;
5896   varinfo_t vi;
5897   void **slot;
5898   struct pt_solution *pt;
5899 
5900   /* This variable may have been collapsed, let's get the real
5901      variable.  */
5902   vi = get_varinfo (find (orig_vi->id));
5903 
5904   /* See if we have already computed the solution and return it.  */
5905   slot = pointer_map_insert (final_solutions, vi);
5906   if (*slot != NULL)
5907     return *(struct pt_solution *)*slot;
5908 
5909   *slot = pt = XOBNEW (&final_solutions_obstack, struct pt_solution);
5910   memset (pt, 0, sizeof (struct pt_solution));
5911 
5912   /* Translate artificial variables into SSA_NAME_PTR_INFO
5913      attributes.  */
5914   EXECUTE_IF_SET_IN_BITMAP (vi->solution, 0, i, bi)
5915     {
5916       varinfo_t vi = get_varinfo (i);
5917 
5918       if (vi->is_artificial_var)
5919 	{
5920 	  if (vi->id == nothing_id)
5921 	    pt->null = 1;
5922 	  else if (vi->id == escaped_id)
5923 	    {
5924 	      if (in_ipa_mode)
5925 		pt->ipa_escaped = 1;
5926 	      else
5927 		pt->escaped = 1;
5928 	    }
5929 	  else if (vi->id == nonlocal_id)
5930 	    pt->nonlocal = 1;
5931 	  else if (vi->is_heap_var)
5932 	    /* We represent heapvars in the points-to set properly.  */
5933 	    ;
5934 	  else if (vi->id == readonly_id)
5935 	    /* Nobody cares.  */
5936 	    ;
5937 	  else if (vi->id == anything_id
5938 		   || vi->id == integer_id)
5939 	    pt->anything = 1;
5940 	}
5941     }
5942 
5943   /* Instead of doing extra work, simply do not create
5944      elaborate points-to information for pt_anything pointers.  */
5945   if (pt->anything)
5946     return *pt;
5947 
5948   /* Share the final set of variables when possible.  */
5949   finished_solution = BITMAP_GGC_ALLOC ();
5950   stats.points_to_sets_created++;
5951 
5952   set_uids_in_ptset (finished_solution, vi->solution, pt);
5953   result = shared_bitmap_lookup (finished_solution);
5954   if (!result)
5955     {
5956       shared_bitmap_add (finished_solution);
5957       pt->vars = finished_solution;
5958     }
5959   else
5960     {
5961       pt->vars = result;
5962       bitmap_clear (finished_solution);
5963     }
5964 
5965   return *pt;
5966 }
5967 
5968 /* Given a pointer variable P, fill in its points-to set.  */
5969 
5970 static void
find_what_p_points_to(tree p)5971 find_what_p_points_to (tree p)
5972 {
5973   struct ptr_info_def *pi;
5974   tree lookup_p = p;
5975   varinfo_t vi;
5976 
5977   /* For parameters, get at the points-to set for the actual parm
5978      decl.  */
5979   if (TREE_CODE (p) == SSA_NAME
5980       && SSA_NAME_IS_DEFAULT_DEF (p)
5981       && (TREE_CODE (SSA_NAME_VAR (p)) == PARM_DECL
5982 	  || TREE_CODE (SSA_NAME_VAR (p)) == RESULT_DECL))
5983     lookup_p = SSA_NAME_VAR (p);
5984 
5985   vi = lookup_vi_for_tree (lookup_p);
5986   if (!vi)
5987     return;
5988 
5989   pi = get_ptr_info (p);
5990   pi->pt = find_what_var_points_to (vi);
5991 }
5992 
5993 
5994 /* Query statistics for points-to solutions.  */
5995 
5996 static struct {
5997   unsigned HOST_WIDE_INT pt_solution_includes_may_alias;
5998   unsigned HOST_WIDE_INT pt_solution_includes_no_alias;
5999   unsigned HOST_WIDE_INT pt_solutions_intersect_may_alias;
6000   unsigned HOST_WIDE_INT pt_solutions_intersect_no_alias;
6001 } pta_stats;
6002 
6003 void
dump_pta_stats(FILE * s)6004 dump_pta_stats (FILE *s)
6005 {
6006   fprintf (s, "\nPTA query stats:\n");
6007   fprintf (s, "  pt_solution_includes: "
6008 	   HOST_WIDE_INT_PRINT_DEC " disambiguations, "
6009 	   HOST_WIDE_INT_PRINT_DEC " queries\n",
6010 	   pta_stats.pt_solution_includes_no_alias,
6011 	   pta_stats.pt_solution_includes_no_alias
6012 	   + pta_stats.pt_solution_includes_may_alias);
6013   fprintf (s, "  pt_solutions_intersect: "
6014 	   HOST_WIDE_INT_PRINT_DEC " disambiguations, "
6015 	   HOST_WIDE_INT_PRINT_DEC " queries\n",
6016 	   pta_stats.pt_solutions_intersect_no_alias,
6017 	   pta_stats.pt_solutions_intersect_no_alias
6018 	   + pta_stats.pt_solutions_intersect_may_alias);
6019 }
6020 
6021 
6022 /* Reset the points-to solution *PT to a conservative default
6023    (point to anything).  */
6024 
6025 void
pt_solution_reset(struct pt_solution * pt)6026 pt_solution_reset (struct pt_solution *pt)
6027 {
6028   memset (pt, 0, sizeof (struct pt_solution));
6029   pt->anything = true;
6030 }
6031 
6032 /* Set the points-to solution *PT to point only to the variables
6033    in VARS.  VARS_CONTAINS_GLOBAL specifies whether that contains
6034    global variables and VARS_CONTAINS_RESTRICT specifies whether
6035    it contains restrict tag variables.  */
6036 
6037 void
pt_solution_set(struct pt_solution * pt,bitmap vars,bool vars_contains_global)6038 pt_solution_set (struct pt_solution *pt, bitmap vars, bool vars_contains_global)
6039 {
6040   memset (pt, 0, sizeof (struct pt_solution));
6041   pt->vars = vars;
6042   pt->vars_contains_global = vars_contains_global;
6043 }
6044 
6045 /* Set the points-to solution *PT to point only to the variable VAR.  */
6046 
6047 void
pt_solution_set_var(struct pt_solution * pt,tree var)6048 pt_solution_set_var (struct pt_solution *pt, tree var)
6049 {
6050   memset (pt, 0, sizeof (struct pt_solution));
6051   pt->vars = BITMAP_GGC_ALLOC ();
6052   bitmap_set_bit (pt->vars, DECL_PT_UID (var));
6053   pt->vars_contains_global = is_global_var (var);
6054 }
6055 
6056 /* Computes the union of the points-to solutions *DEST and *SRC and
6057    stores the result in *DEST.  This changes the points-to bitmap
6058    of *DEST and thus may not be used if that might be shared.
6059    The points-to bitmap of *SRC and *DEST will not be shared after
6060    this function if they were not before.  */
6061 
6062 static void
pt_solution_ior_into(struct pt_solution * dest,struct pt_solution * src)6063 pt_solution_ior_into (struct pt_solution *dest, struct pt_solution *src)
6064 {
6065   dest->anything |= src->anything;
6066   if (dest->anything)
6067     {
6068       pt_solution_reset (dest);
6069       return;
6070     }
6071 
6072   dest->nonlocal |= src->nonlocal;
6073   dest->escaped |= src->escaped;
6074   dest->ipa_escaped |= src->ipa_escaped;
6075   dest->null |= src->null;
6076   dest->vars_contains_global |= src->vars_contains_global;
6077   if (!src->vars)
6078     return;
6079 
6080   if (!dest->vars)
6081     dest->vars = BITMAP_GGC_ALLOC ();
6082   bitmap_ior_into (dest->vars, src->vars);
6083 }
6084 
6085 /* Return true if the points-to solution *PT is empty.  */
6086 
6087 bool
pt_solution_empty_p(struct pt_solution * pt)6088 pt_solution_empty_p (struct pt_solution *pt)
6089 {
6090   if (pt->anything
6091       || pt->nonlocal)
6092     return false;
6093 
6094   if (pt->vars
6095       && !bitmap_empty_p (pt->vars))
6096     return false;
6097 
6098   /* If the solution includes ESCAPED, check if that is empty.  */
6099   if (pt->escaped
6100       && !pt_solution_empty_p (&cfun->gimple_df->escaped))
6101     return false;
6102 
6103   /* If the solution includes ESCAPED, check if that is empty.  */
6104   if (pt->ipa_escaped
6105       && !pt_solution_empty_p (&ipa_escaped_pt))
6106     return false;
6107 
6108   return true;
6109 }
6110 
6111 /* Return true if the points-to solution *PT only point to a single var, and
6112    return the var uid in *UID.  */
6113 
6114 bool
pt_solution_singleton_p(struct pt_solution * pt,unsigned * uid)6115 pt_solution_singleton_p (struct pt_solution *pt, unsigned *uid)
6116 {
6117   if (pt->anything || pt->nonlocal || pt->escaped || pt->ipa_escaped
6118       || pt->null || pt->vars == NULL
6119       || !bitmap_single_bit_set_p (pt->vars))
6120     return false;
6121 
6122   *uid = bitmap_first_set_bit (pt->vars);
6123   return true;
6124 }
6125 
6126 /* Return true if the points-to solution *PT includes global memory.  */
6127 
6128 bool
pt_solution_includes_global(struct pt_solution * pt)6129 pt_solution_includes_global (struct pt_solution *pt)
6130 {
6131   if (pt->anything
6132       || pt->nonlocal
6133       || pt->vars_contains_global)
6134     return true;
6135 
6136   if (pt->escaped)
6137     return pt_solution_includes_global (&cfun->gimple_df->escaped);
6138 
6139   if (pt->ipa_escaped)
6140     return pt_solution_includes_global (&ipa_escaped_pt);
6141 
6142   /* ???  This predicate is not correct for the IPA-PTA solution
6143      as we do not properly distinguish between unit escape points
6144      and global variables.  */
6145   if (cfun->gimple_df->ipa_pta)
6146     return true;
6147 
6148   return false;
6149 }
6150 
6151 /* Return true if the points-to solution *PT includes the variable
6152    declaration DECL.  */
6153 
6154 static bool
pt_solution_includes_1(struct pt_solution * pt,const_tree decl)6155 pt_solution_includes_1 (struct pt_solution *pt, const_tree decl)
6156 {
6157   if (pt->anything)
6158     return true;
6159 
6160   if (pt->nonlocal
6161       && is_global_var (decl))
6162     return true;
6163 
6164   if (pt->vars
6165       && bitmap_bit_p (pt->vars, DECL_PT_UID (decl)))
6166     return true;
6167 
6168   /* If the solution includes ESCAPED, check it.  */
6169   if (pt->escaped
6170       && pt_solution_includes_1 (&cfun->gimple_df->escaped, decl))
6171     return true;
6172 
6173   /* If the solution includes ESCAPED, check it.  */
6174   if (pt->ipa_escaped
6175       && pt_solution_includes_1 (&ipa_escaped_pt, decl))
6176     return true;
6177 
6178   return false;
6179 }
6180 
6181 bool
pt_solution_includes(struct pt_solution * pt,const_tree decl)6182 pt_solution_includes (struct pt_solution *pt, const_tree decl)
6183 {
6184   bool res = pt_solution_includes_1 (pt, decl);
6185   if (res)
6186     ++pta_stats.pt_solution_includes_may_alias;
6187   else
6188     ++pta_stats.pt_solution_includes_no_alias;
6189   return res;
6190 }
6191 
6192 /* Return true if both points-to solutions PT1 and PT2 have a non-empty
6193    intersection.  */
6194 
6195 static bool
pt_solutions_intersect_1(struct pt_solution * pt1,struct pt_solution * pt2)6196 pt_solutions_intersect_1 (struct pt_solution *pt1, struct pt_solution *pt2)
6197 {
6198   if (pt1->anything || pt2->anything)
6199     return true;
6200 
6201   /* If either points to unknown global memory and the other points to
6202      any global memory they alias.  */
6203   if ((pt1->nonlocal
6204        && (pt2->nonlocal
6205 	   || pt2->vars_contains_global))
6206       || (pt2->nonlocal
6207 	  && pt1->vars_contains_global))
6208     return true;
6209 
6210   /* Check the escaped solution if required.  */
6211   if ((pt1->escaped || pt2->escaped)
6212       && !pt_solution_empty_p (&cfun->gimple_df->escaped))
6213     {
6214       /* If both point to escaped memory and that solution
6215 	 is not empty they alias.  */
6216       if (pt1->escaped && pt2->escaped)
6217 	return true;
6218 
6219       /* If either points to escaped memory see if the escaped solution
6220 	 intersects with the other.  */
6221       if ((pt1->escaped
6222 	   && pt_solutions_intersect_1 (&cfun->gimple_df->escaped, pt2))
6223 	  || (pt2->escaped
6224 	      && pt_solutions_intersect_1 (&cfun->gimple_df->escaped, pt1)))
6225 	return true;
6226     }
6227 
6228   /* Check the escaped solution if required.
6229      ???  Do we need to check the local against the IPA escaped sets?  */
6230   if ((pt1->ipa_escaped || pt2->ipa_escaped)
6231       && !pt_solution_empty_p (&ipa_escaped_pt))
6232     {
6233       /* If both point to escaped memory and that solution
6234 	 is not empty they alias.  */
6235       if (pt1->ipa_escaped && pt2->ipa_escaped)
6236 	return true;
6237 
6238       /* If either points to escaped memory see if the escaped solution
6239 	 intersects with the other.  */
6240       if ((pt1->ipa_escaped
6241 	   && pt_solutions_intersect_1 (&ipa_escaped_pt, pt2))
6242 	  || (pt2->ipa_escaped
6243 	      && pt_solutions_intersect_1 (&ipa_escaped_pt, pt1)))
6244 	return true;
6245     }
6246 
6247   /* Now both pointers alias if their points-to solution intersects.  */
6248   return (pt1->vars
6249 	  && pt2->vars
6250 	  && bitmap_intersect_p (pt1->vars, pt2->vars));
6251 }
6252 
6253 bool
pt_solutions_intersect(struct pt_solution * pt1,struct pt_solution * pt2)6254 pt_solutions_intersect (struct pt_solution *pt1, struct pt_solution *pt2)
6255 {
6256   bool res = pt_solutions_intersect_1 (pt1, pt2);
6257   if (res)
6258     ++pta_stats.pt_solutions_intersect_may_alias;
6259   else
6260     ++pta_stats.pt_solutions_intersect_no_alias;
6261   return res;
6262 }
6263 
6264 
6265 /* Dump points-to information to OUTFILE.  */
6266 
6267 static void
dump_sa_points_to_info(FILE * outfile)6268 dump_sa_points_to_info (FILE *outfile)
6269 {
6270   unsigned int i;
6271 
6272   fprintf (outfile, "\nPoints-to sets\n\n");
6273 
6274   if (dump_flags & TDF_STATS)
6275     {
6276       fprintf (outfile, "Stats:\n");
6277       fprintf (outfile, "Total vars:               %d\n", stats.total_vars);
6278       fprintf (outfile, "Non-pointer vars:          %d\n",
6279 	       stats.nonpointer_vars);
6280       fprintf (outfile, "Statically unified vars:  %d\n",
6281 	       stats.unified_vars_static);
6282       fprintf (outfile, "Dynamically unified vars: %d\n",
6283 	       stats.unified_vars_dynamic);
6284       fprintf (outfile, "Iterations:               %d\n", stats.iterations);
6285       fprintf (outfile, "Number of edges:          %d\n", stats.num_edges);
6286       fprintf (outfile, "Number of implicit edges: %d\n",
6287 	       stats.num_implicit_edges);
6288     }
6289 
6290   for (i = 0; i < varmap.length (); i++)
6291     {
6292       varinfo_t vi = get_varinfo (i);
6293       if (!vi->may_have_pointers)
6294 	continue;
6295       dump_solution_for_var (outfile, i);
6296     }
6297 }
6298 
6299 
6300 /* Debug points-to information to stderr.  */
6301 
6302 DEBUG_FUNCTION void
debug_sa_points_to_info(void)6303 debug_sa_points_to_info (void)
6304 {
6305   dump_sa_points_to_info (stderr);
6306 }
6307 
6308 
6309 /* Initialize the always-existing constraint variables for NULL
6310    ANYTHING, READONLY, and INTEGER */
6311 
6312 static void
init_base_vars(void)6313 init_base_vars (void)
6314 {
6315   struct constraint_expr lhs, rhs;
6316   varinfo_t var_anything;
6317   varinfo_t var_nothing;
6318   varinfo_t var_readonly;
6319   varinfo_t var_escaped;
6320   varinfo_t var_nonlocal;
6321   varinfo_t var_storedanything;
6322   varinfo_t var_integer;
6323 
6324   /* Create the NULL variable, used to represent that a variable points
6325      to NULL.  */
6326   var_nothing = new_var_info (NULL_TREE, "NULL");
6327   gcc_assert (var_nothing->id == nothing_id);
6328   var_nothing->is_artificial_var = 1;
6329   var_nothing->offset = 0;
6330   var_nothing->size = ~0;
6331   var_nothing->fullsize = ~0;
6332   var_nothing->is_special_var = 1;
6333   var_nothing->may_have_pointers = 0;
6334   var_nothing->is_global_var = 0;
6335 
6336   /* Create the ANYTHING variable, used to represent that a variable
6337      points to some unknown piece of memory.  */
6338   var_anything = new_var_info (NULL_TREE, "ANYTHING");
6339   gcc_assert (var_anything->id == anything_id);
6340   var_anything->is_artificial_var = 1;
6341   var_anything->size = ~0;
6342   var_anything->offset = 0;
6343   var_anything->next = NULL;
6344   var_anything->fullsize = ~0;
6345   var_anything->is_special_var = 1;
6346 
6347   /* Anything points to anything.  This makes deref constraints just
6348      work in the presence of linked list and other p = *p type loops,
6349      by saying that *ANYTHING = ANYTHING. */
6350   lhs.type = SCALAR;
6351   lhs.var = anything_id;
6352   lhs.offset = 0;
6353   rhs.type = ADDRESSOF;
6354   rhs.var = anything_id;
6355   rhs.offset = 0;
6356 
6357   /* This specifically does not use process_constraint because
6358      process_constraint ignores all anything = anything constraints, since all
6359      but this one are redundant.  */
6360   constraints.safe_push (new_constraint (lhs, rhs));
6361 
6362   /* Create the READONLY variable, used to represent that a variable
6363      points to readonly memory.  */
6364   var_readonly = new_var_info (NULL_TREE, "READONLY");
6365   gcc_assert (var_readonly->id == readonly_id);
6366   var_readonly->is_artificial_var = 1;
6367   var_readonly->offset = 0;
6368   var_readonly->size = ~0;
6369   var_readonly->fullsize = ~0;
6370   var_readonly->next = NULL;
6371   var_readonly->is_special_var = 1;
6372 
6373   /* readonly memory points to anything, in order to make deref
6374      easier.  In reality, it points to anything the particular
6375      readonly variable can point to, but we don't track this
6376      separately. */
6377   lhs.type = SCALAR;
6378   lhs.var = readonly_id;
6379   lhs.offset = 0;
6380   rhs.type = ADDRESSOF;
6381   rhs.var = readonly_id;  /* FIXME */
6382   rhs.offset = 0;
6383   process_constraint (new_constraint (lhs, rhs));
6384 
6385   /* Create the ESCAPED variable, used to represent the set of escaped
6386      memory.  */
6387   var_escaped = new_var_info (NULL_TREE, "ESCAPED");
6388   gcc_assert (var_escaped->id == escaped_id);
6389   var_escaped->is_artificial_var = 1;
6390   var_escaped->offset = 0;
6391   var_escaped->size = ~0;
6392   var_escaped->fullsize = ~0;
6393   var_escaped->is_special_var = 0;
6394 
6395   /* Create the NONLOCAL variable, used to represent the set of nonlocal
6396      memory.  */
6397   var_nonlocal = new_var_info (NULL_TREE, "NONLOCAL");
6398   gcc_assert (var_nonlocal->id == nonlocal_id);
6399   var_nonlocal->is_artificial_var = 1;
6400   var_nonlocal->offset = 0;
6401   var_nonlocal->size = ~0;
6402   var_nonlocal->fullsize = ~0;
6403   var_nonlocal->is_special_var = 1;
6404 
6405   /* ESCAPED = *ESCAPED, because escaped is may-deref'd at calls, etc.  */
6406   lhs.type = SCALAR;
6407   lhs.var = escaped_id;
6408   lhs.offset = 0;
6409   rhs.type = DEREF;
6410   rhs.var = escaped_id;
6411   rhs.offset = 0;
6412   process_constraint (new_constraint (lhs, rhs));
6413 
6414   /* ESCAPED = ESCAPED + UNKNOWN_OFFSET, because if a sub-field escapes the
6415      whole variable escapes.  */
6416   lhs.type = SCALAR;
6417   lhs.var = escaped_id;
6418   lhs.offset = 0;
6419   rhs.type = SCALAR;
6420   rhs.var = escaped_id;
6421   rhs.offset = UNKNOWN_OFFSET;
6422   process_constraint (new_constraint (lhs, rhs));
6423 
6424   /* *ESCAPED = NONLOCAL.  This is true because we have to assume
6425      everything pointed to by escaped points to what global memory can
6426      point to.  */
6427   lhs.type = DEREF;
6428   lhs.var = escaped_id;
6429   lhs.offset = 0;
6430   rhs.type = SCALAR;
6431   rhs.var = nonlocal_id;
6432   rhs.offset = 0;
6433   process_constraint (new_constraint (lhs, rhs));
6434 
6435   /* NONLOCAL = &NONLOCAL, NONLOCAL = &ESCAPED.  This is true because
6436      global memory may point to global memory and escaped memory.  */
6437   lhs.type = SCALAR;
6438   lhs.var = nonlocal_id;
6439   lhs.offset = 0;
6440   rhs.type = ADDRESSOF;
6441   rhs.var = nonlocal_id;
6442   rhs.offset = 0;
6443   process_constraint (new_constraint (lhs, rhs));
6444   rhs.type = ADDRESSOF;
6445   rhs.var = escaped_id;
6446   rhs.offset = 0;
6447   process_constraint (new_constraint (lhs, rhs));
6448 
6449   /* Create the STOREDANYTHING variable, used to represent the set of
6450      variables stored to *ANYTHING.  */
6451   var_storedanything = new_var_info (NULL_TREE, "STOREDANYTHING");
6452   gcc_assert (var_storedanything->id == storedanything_id);
6453   var_storedanything->is_artificial_var = 1;
6454   var_storedanything->offset = 0;
6455   var_storedanything->size = ~0;
6456   var_storedanything->fullsize = ~0;
6457   var_storedanything->is_special_var = 0;
6458 
6459   /* Create the INTEGER variable, used to represent that a variable points
6460      to what an INTEGER "points to".  */
6461   var_integer = new_var_info (NULL_TREE, "INTEGER");
6462   gcc_assert (var_integer->id == integer_id);
6463   var_integer->is_artificial_var = 1;
6464   var_integer->size = ~0;
6465   var_integer->fullsize = ~0;
6466   var_integer->offset = 0;
6467   var_integer->next = NULL;
6468   var_integer->is_special_var = 1;
6469 
6470   /* INTEGER = ANYTHING, because we don't know where a dereference of
6471      a random integer will point to.  */
6472   lhs.type = SCALAR;
6473   lhs.var = integer_id;
6474   lhs.offset = 0;
6475   rhs.type = ADDRESSOF;
6476   rhs.var = anything_id;
6477   rhs.offset = 0;
6478   process_constraint (new_constraint (lhs, rhs));
6479 }
6480 
6481 /* Initialize things necessary to perform PTA */
6482 
6483 static void
init_alias_vars(void)6484 init_alias_vars (void)
6485 {
6486   use_field_sensitive = (MAX_FIELDS_FOR_FIELD_SENSITIVE > 1);
6487 
6488   bitmap_obstack_initialize (&pta_obstack);
6489   bitmap_obstack_initialize (&oldpta_obstack);
6490   bitmap_obstack_initialize (&predbitmap_obstack);
6491 
6492   constraint_pool = create_alloc_pool ("Constraint pool",
6493 				       sizeof (struct constraint), 30);
6494   variable_info_pool = create_alloc_pool ("Variable info pool",
6495 					  sizeof (struct variable_info), 30);
6496   constraints.create (8);
6497   varmap.create (8);
6498   vi_for_tree = pointer_map_create ();
6499   call_stmt_vars = pointer_map_create ();
6500 
6501   memset (&stats, 0, sizeof (stats));
6502   shared_bitmap_table = htab_create (511, shared_bitmap_hash,
6503 				     shared_bitmap_eq, free);
6504   init_base_vars ();
6505 
6506   gcc_obstack_init (&fake_var_decl_obstack);
6507 
6508   final_solutions = pointer_map_create ();
6509   gcc_obstack_init (&final_solutions_obstack);
6510 }
6511 
6512 /* Remove the REF and ADDRESS edges from GRAPH, as well as all the
6513    predecessor edges.  */
6514 
6515 static void
remove_preds_and_fake_succs(constraint_graph_t graph)6516 remove_preds_and_fake_succs (constraint_graph_t graph)
6517 {
6518   unsigned int i;
6519 
6520   /* Clear the implicit ref and address nodes from the successor
6521      lists.  */
6522   for (i = 0; i < FIRST_REF_NODE; i++)
6523     {
6524       if (graph->succs[i])
6525 	bitmap_clear_range (graph->succs[i], FIRST_REF_NODE,
6526 			    FIRST_REF_NODE * 2);
6527     }
6528 
6529   /* Free the successor list for the non-ref nodes.  */
6530   for (i = FIRST_REF_NODE; i < graph->size; i++)
6531     {
6532       if (graph->succs[i])
6533 	BITMAP_FREE (graph->succs[i]);
6534     }
6535 
6536   /* Now reallocate the size of the successor list as, and blow away
6537      the predecessor bitmaps.  */
6538   graph->size = varmap.length ();
6539   graph->succs = XRESIZEVEC (bitmap, graph->succs, graph->size);
6540 
6541   free (graph->implicit_preds);
6542   graph->implicit_preds = NULL;
6543   free (graph->preds);
6544   graph->preds = NULL;
6545   bitmap_obstack_release (&predbitmap_obstack);
6546 }
6547 
6548 /* Solve the constraint set.  */
6549 
6550 static void
solve_constraints(void)6551 solve_constraints (void)
6552 {
6553   struct scc_info *si;
6554 
6555   if (dump_file)
6556     fprintf (dump_file,
6557 	     "\nCollapsing static cycles and doing variable "
6558 	     "substitution\n");
6559 
6560   init_graph (varmap.length () * 2);
6561 
6562   if (dump_file)
6563     fprintf (dump_file, "Building predecessor graph\n");
6564   build_pred_graph ();
6565 
6566   if (dump_file)
6567     fprintf (dump_file, "Detecting pointer and location "
6568 	     "equivalences\n");
6569   si = perform_var_substitution (graph);
6570 
6571   if (dump_file)
6572     fprintf (dump_file, "Rewriting constraints and unifying "
6573 	     "variables\n");
6574   rewrite_constraints (graph, si);
6575 
6576   build_succ_graph ();
6577 
6578   free_var_substitution_info (si);
6579 
6580   /* Attach complex constraints to graph nodes.  */
6581   move_complex_constraints (graph);
6582 
6583   if (dump_file)
6584     fprintf (dump_file, "Uniting pointer but not location equivalent "
6585 	     "variables\n");
6586   unite_pointer_equivalences (graph);
6587 
6588   if (dump_file)
6589     fprintf (dump_file, "Finding indirect cycles\n");
6590   find_indirect_cycles (graph);
6591 
6592   /* Implicit nodes and predecessors are no longer necessary at this
6593      point. */
6594   remove_preds_and_fake_succs (graph);
6595 
6596   if (dump_file && (dump_flags & TDF_GRAPH))
6597     {
6598       fprintf (dump_file, "\n\n// The constraint graph before solve-graph "
6599 	       "in dot format:\n");
6600       dump_constraint_graph (dump_file);
6601       fprintf (dump_file, "\n\n");
6602     }
6603 
6604   if (dump_file)
6605     fprintf (dump_file, "Solving graph\n");
6606 
6607   solve_graph (graph);
6608 
6609   if (dump_file && (dump_flags & TDF_GRAPH))
6610     {
6611       fprintf (dump_file, "\n\n// The constraint graph after solve-graph "
6612 	       "in dot format:\n");
6613       dump_constraint_graph (dump_file);
6614       fprintf (dump_file, "\n\n");
6615     }
6616 
6617   if (dump_file)
6618     dump_sa_points_to_info (dump_file);
6619 }
6620 
6621 /* Create points-to sets for the current function.  See the comments
6622    at the start of the file for an algorithmic overview.  */
6623 
6624 static void
compute_points_to_sets(void)6625 compute_points_to_sets (void)
6626 {
6627   basic_block bb;
6628   unsigned i;
6629   varinfo_t vi;
6630 
6631   timevar_push (TV_TREE_PTA);
6632 
6633   init_alias_vars ();
6634 
6635   intra_create_variable_infos ();
6636 
6637   /* Now walk all statements and build the constraint set.  */
6638   FOR_EACH_BB (bb)
6639     {
6640       gimple_stmt_iterator gsi;
6641 
6642       for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
6643 	{
6644 	  gimple phi = gsi_stmt (gsi);
6645 
6646 	  if (! virtual_operand_p (gimple_phi_result (phi)))
6647 	    find_func_aliases (phi);
6648 	}
6649 
6650       for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
6651 	{
6652 	  gimple stmt = gsi_stmt (gsi);
6653 
6654 	  find_func_aliases (stmt);
6655 	}
6656     }
6657 
6658   if (dump_file)
6659     {
6660       fprintf (dump_file, "Points-to analysis\n\nConstraints:\n\n");
6661       dump_constraints (dump_file, 0);
6662     }
6663 
6664   /* From the constraints compute the points-to sets.  */
6665   solve_constraints ();
6666 
6667   /* Compute the points-to set for ESCAPED used for call-clobber analysis.  */
6668   cfun->gimple_df->escaped = find_what_var_points_to (get_varinfo (escaped_id));
6669 
6670   /* Make sure the ESCAPED solution (which is used as placeholder in
6671      other solutions) does not reference itself.  This simplifies
6672      points-to solution queries.  */
6673   cfun->gimple_df->escaped.escaped = 0;
6674 
6675   /* Mark escaped HEAP variables as global.  */
6676   FOR_EACH_VEC_ELT (varmap, i, vi)
6677     if (vi->is_heap_var
6678 	&& !vi->is_global_var)
6679       DECL_EXTERNAL (vi->decl) = vi->is_global_var
6680 	= pt_solution_includes (&cfun->gimple_df->escaped, vi->decl);
6681 
6682   /* Compute the points-to sets for pointer SSA_NAMEs.  */
6683   for (i = 0; i < num_ssa_names; ++i)
6684     {
6685       tree ptr = ssa_name (i);
6686       if (ptr
6687 	  && POINTER_TYPE_P (TREE_TYPE (ptr)))
6688 	find_what_p_points_to (ptr);
6689     }
6690 
6691   /* Compute the call-used/clobbered sets.  */
6692   FOR_EACH_BB (bb)
6693     {
6694       gimple_stmt_iterator gsi;
6695 
6696       for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
6697 	{
6698 	  gimple stmt = gsi_stmt (gsi);
6699 	  struct pt_solution *pt;
6700 	  if (!is_gimple_call (stmt))
6701 	    continue;
6702 
6703 	  pt = gimple_call_use_set (stmt);
6704 	  if (gimple_call_flags (stmt) & ECF_CONST)
6705 	    memset (pt, 0, sizeof (struct pt_solution));
6706 	  else if ((vi = lookup_call_use_vi (stmt)) != NULL)
6707 	    {
6708 	      *pt = find_what_var_points_to (vi);
6709 	      /* Escaped (and thus nonlocal) variables are always
6710 	         implicitly used by calls.  */
6711 	      /* ???  ESCAPED can be empty even though NONLOCAL
6712 		 always escaped.  */
6713 	      pt->nonlocal = 1;
6714 	      pt->escaped = 1;
6715 	    }
6716 	  else
6717 	    {
6718 	      /* If there is nothing special about this call then
6719 		 we have made everything that is used also escape.  */
6720 	      *pt = cfun->gimple_df->escaped;
6721 	      pt->nonlocal = 1;
6722 	    }
6723 
6724 	  pt = gimple_call_clobber_set (stmt);
6725 	  if (gimple_call_flags (stmt) & (ECF_CONST|ECF_PURE|ECF_NOVOPS))
6726 	    memset (pt, 0, sizeof (struct pt_solution));
6727 	  else if ((vi = lookup_call_clobber_vi (stmt)) != NULL)
6728 	    {
6729 	      *pt = find_what_var_points_to (vi);
6730 	      /* Escaped (and thus nonlocal) variables are always
6731 	         implicitly clobbered by calls.  */
6732 	      /* ???  ESCAPED can be empty even though NONLOCAL
6733 		 always escaped.  */
6734 	      pt->nonlocal = 1;
6735 	      pt->escaped = 1;
6736 	    }
6737 	  else
6738 	    {
6739 	      /* If there is nothing special about this call then
6740 		 we have made everything that is used also escape.  */
6741 	      *pt = cfun->gimple_df->escaped;
6742 	      pt->nonlocal = 1;
6743 	    }
6744 	}
6745     }
6746 
6747   timevar_pop (TV_TREE_PTA);
6748 }
6749 
6750 
6751 /* Delete created points-to sets.  */
6752 
6753 static void
delete_points_to_sets(void)6754 delete_points_to_sets (void)
6755 {
6756   unsigned int i;
6757 
6758   htab_delete (shared_bitmap_table);
6759   if (dump_file && (dump_flags & TDF_STATS))
6760     fprintf (dump_file, "Points to sets created:%d\n",
6761 	     stats.points_to_sets_created);
6762 
6763   pointer_map_destroy (vi_for_tree);
6764   pointer_map_destroy (call_stmt_vars);
6765   bitmap_obstack_release (&pta_obstack);
6766   constraints.release ();
6767 
6768   for (i = 0; i < graph->size; i++)
6769     graph->complex[i].release ();
6770   free (graph->complex);
6771 
6772   free (graph->rep);
6773   free (graph->succs);
6774   free (graph->pe);
6775   free (graph->pe_rep);
6776   free (graph->indirect_cycles);
6777   free (graph);
6778 
6779   varmap.release ();
6780   free_alloc_pool (variable_info_pool);
6781   free_alloc_pool (constraint_pool);
6782 
6783   obstack_free (&fake_var_decl_obstack, NULL);
6784 
6785   pointer_map_destroy (final_solutions);
6786   obstack_free (&final_solutions_obstack, NULL);
6787 }
6788 
6789 
6790 /* Compute points-to information for every SSA_NAME pointer in the
6791    current function and compute the transitive closure of escaped
6792    variables to re-initialize the call-clobber states of local variables.  */
6793 
6794 unsigned int
compute_may_aliases(void)6795 compute_may_aliases (void)
6796 {
6797   if (cfun->gimple_df->ipa_pta)
6798     {
6799       if (dump_file)
6800 	{
6801 	  fprintf (dump_file, "\nNot re-computing points-to information "
6802 		   "because IPA points-to information is available.\n\n");
6803 
6804 	  /* But still dump what we have remaining it.  */
6805 	  dump_alias_info (dump_file);
6806 	}
6807 
6808       return 0;
6809     }
6810 
6811   /* For each pointer P_i, determine the sets of variables that P_i may
6812      point-to.  Compute the reachability set of escaped and call-used
6813      variables.  */
6814   compute_points_to_sets ();
6815 
6816   /* Debugging dumps.  */
6817   if (dump_file)
6818     dump_alias_info (dump_file);
6819 
6820   /* Deallocate memory used by aliasing data structures and the internal
6821      points-to solution.  */
6822   delete_points_to_sets ();
6823 
6824   gcc_assert (!need_ssa_update_p (cfun));
6825 
6826   return 0;
6827 }
6828 
6829 static bool
gate_tree_pta(void)6830 gate_tree_pta (void)
6831 {
6832   return flag_tree_pta;
6833 }
6834 
6835 /* A dummy pass to cause points-to information to be computed via
6836    TODO_rebuild_alias.  */
6837 
6838 struct gimple_opt_pass pass_build_alias =
6839 {
6840  {
6841   GIMPLE_PASS,
6842   "alias",		    /* name */
6843   OPTGROUP_NONE,            /* optinfo_flags */
6844   gate_tree_pta,	    /* gate */
6845   NULL,                     /* execute */
6846   NULL,                     /* sub */
6847   NULL,                     /* next */
6848   0,                        /* static_pass_number */
6849   TV_NONE,                  /* tv_id */
6850   PROP_cfg | PROP_ssa,      /* properties_required */
6851   0,			    /* properties_provided */
6852   0,                        /* properties_destroyed */
6853   0,                        /* todo_flags_start */
6854   TODO_rebuild_alias        /* todo_flags_finish */
6855  }
6856 };
6857 
6858 /* A dummy pass to cause points-to information to be computed via
6859    TODO_rebuild_alias.  */
6860 
6861 struct gimple_opt_pass pass_build_ealias =
6862 {
6863  {
6864   GIMPLE_PASS,
6865   "ealias",		    /* name */
6866   OPTGROUP_NONE,            /* optinfo_flags */
6867   gate_tree_pta,	    /* gate */
6868   NULL,                     /* execute */
6869   NULL,                     /* sub */
6870   NULL,                     /* next */
6871   0,                        /* static_pass_number */
6872   TV_NONE,                  /* tv_id */
6873   PROP_cfg | PROP_ssa,      /* properties_required */
6874   0,			    /* properties_provided */
6875   0,                        /* properties_destroyed */
6876   0,                        /* todo_flags_start */
6877   TODO_rebuild_alias        /* todo_flags_finish */
6878  }
6879 };
6880 
6881 
6882 /* Return true if we should execute IPA PTA.  */
6883 static bool
gate_ipa_pta(void)6884 gate_ipa_pta (void)
6885 {
6886   return (optimize
6887 	  && flag_ipa_pta
6888 	  /* Don't bother doing anything if the program has errors.  */
6889 	  && !seen_error ());
6890 }
6891 
6892 /* IPA PTA solutions for ESCAPED.  */
6893 struct pt_solution ipa_escaped_pt
6894   = { true, false, false, false, false, false, NULL };
6895 
6896 /* Associate node with varinfo DATA. Worker for
6897    cgraph_for_node_and_aliases.  */
6898 static bool
associate_varinfo_to_alias(struct cgraph_node * node,void * data)6899 associate_varinfo_to_alias (struct cgraph_node *node, void *data)
6900 {
6901   if (node->alias || node->thunk.thunk_p)
6902     insert_vi_for_tree (node->symbol.decl, (varinfo_t)data);
6903   return false;
6904 }
6905 
6906 /* Execute the driver for IPA PTA.  */
6907 static unsigned int
ipa_pta_execute(void)6908 ipa_pta_execute (void)
6909 {
6910   struct cgraph_node *node;
6911   struct varpool_node *var;
6912   int from;
6913 
6914   in_ipa_mode = 1;
6915 
6916   init_alias_vars ();
6917 
6918   if (dump_file && (dump_flags & TDF_DETAILS))
6919     {
6920       dump_symtab (dump_file);
6921       fprintf (dump_file, "\n");
6922     }
6923 
6924   /* Build the constraints.  */
6925   FOR_EACH_DEFINED_FUNCTION (node)
6926     {
6927       varinfo_t vi;
6928       /* Nodes without a body are not interesting.  Especially do not
6929          visit clones at this point for now - we get duplicate decls
6930 	 there for inline clones at least.  */
6931       if (!cgraph_function_with_gimple_body_p (node))
6932 	continue;
6933 
6934       gcc_assert (!node->clone_of);
6935 
6936       vi = create_function_info_for (node->symbol.decl,
6937 			             alias_get_name (node->symbol.decl));
6938       cgraph_for_node_and_aliases (node, associate_varinfo_to_alias, vi, true);
6939     }
6940 
6941   /* Create constraints for global variables and their initializers.  */
6942   FOR_EACH_VARIABLE (var)
6943     {
6944       if (var->alias)
6945 	continue;
6946 
6947       get_vi_for_tree (var->symbol.decl);
6948     }
6949 
6950   if (dump_file)
6951     {
6952       fprintf (dump_file,
6953 	       "Generating constraints for global initializers\n\n");
6954       dump_constraints (dump_file, 0);
6955       fprintf (dump_file, "\n");
6956     }
6957   from = constraints.length ();
6958 
6959   FOR_EACH_DEFINED_FUNCTION (node)
6960     {
6961       struct function *func;
6962       basic_block bb;
6963 
6964       /* Nodes without a body are not interesting.  */
6965       if (!cgraph_function_with_gimple_body_p (node))
6966 	continue;
6967 
6968       if (dump_file)
6969 	{
6970 	  fprintf (dump_file,
6971 		   "Generating constraints for %s", cgraph_node_name (node));
6972 	  if (DECL_ASSEMBLER_NAME_SET_P (node->symbol.decl))
6973 	    fprintf (dump_file, " (%s)",
6974 		     IDENTIFIER_POINTER
6975 		       (DECL_ASSEMBLER_NAME (node->symbol.decl)));
6976 	  fprintf (dump_file, "\n");
6977 	}
6978 
6979       func = DECL_STRUCT_FUNCTION (node->symbol.decl);
6980       push_cfun (func);
6981 
6982       /* For externally visible or attribute used annotated functions use
6983 	 local constraints for their arguments.
6984 	 For local functions we see all callers and thus do not need initial
6985 	 constraints for parameters.  */
6986       if (node->symbol.used_from_other_partition
6987 	  || node->symbol.externally_visible
6988 	  || node->symbol.force_output)
6989 	{
6990 	  intra_create_variable_infos ();
6991 
6992 	  /* We also need to make function return values escape.  Nothing
6993 	     escapes by returning from main though.  */
6994 	  if (!MAIN_NAME_P (DECL_NAME (node->symbol.decl)))
6995 	    {
6996 	      varinfo_t fi, rvi;
6997 	      fi = lookup_vi_for_tree (node->symbol.decl);
6998 	      rvi = first_vi_for_offset (fi, fi_result);
6999 	      if (rvi && rvi->offset == fi_result)
7000 		{
7001 		  struct constraint_expr includes;
7002 		  struct constraint_expr var;
7003 		  includes.var = escaped_id;
7004 		  includes.offset = 0;
7005 		  includes.type = SCALAR;
7006 		  var.var = rvi->id;
7007 		  var.offset = 0;
7008 		  var.type = SCALAR;
7009 		  process_constraint (new_constraint (includes, var));
7010 		}
7011 	    }
7012 	}
7013 
7014       /* Build constriants for the function body.  */
7015       FOR_EACH_BB_FN (bb, func)
7016 	{
7017 	  gimple_stmt_iterator gsi;
7018 
7019 	  for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
7020 	       gsi_next (&gsi))
7021 	    {
7022 	      gimple phi = gsi_stmt (gsi);
7023 
7024 	      if (! virtual_operand_p (gimple_phi_result (phi)))
7025 		find_func_aliases (phi);
7026 	    }
7027 
7028 	  for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
7029 	    {
7030 	      gimple stmt = gsi_stmt (gsi);
7031 
7032 	      find_func_aliases (stmt);
7033 	      find_func_clobbers (stmt);
7034 	    }
7035 	}
7036 
7037       pop_cfun ();
7038 
7039       if (dump_file)
7040 	{
7041 	  fprintf (dump_file, "\n");
7042 	  dump_constraints (dump_file, from);
7043 	  fprintf (dump_file, "\n");
7044 	}
7045       from = constraints.length ();
7046     }
7047 
7048   /* From the constraints compute the points-to sets.  */
7049   solve_constraints ();
7050 
7051   /* Compute the global points-to sets for ESCAPED.
7052      ???  Note that the computed escape set is not correct
7053      for the whole unit as we fail to consider graph edges to
7054      externally visible functions.  */
7055   ipa_escaped_pt = find_what_var_points_to (get_varinfo (escaped_id));
7056 
7057   /* Make sure the ESCAPED solution (which is used as placeholder in
7058      other solutions) does not reference itself.  This simplifies
7059      points-to solution queries.  */
7060   ipa_escaped_pt.ipa_escaped = 0;
7061 
7062   /* Assign the points-to sets to the SSA names in the unit.  */
7063   FOR_EACH_DEFINED_FUNCTION (node)
7064     {
7065       tree ptr;
7066       struct function *fn;
7067       unsigned i;
7068       varinfo_t fi;
7069       basic_block bb;
7070       struct pt_solution uses, clobbers;
7071       struct cgraph_edge *e;
7072 
7073       /* Nodes without a body are not interesting.  */
7074       if (!cgraph_function_with_gimple_body_p (node))
7075 	continue;
7076 
7077       fn = DECL_STRUCT_FUNCTION (node->symbol.decl);
7078 
7079       /* Compute the points-to sets for pointer SSA_NAMEs.  */
7080       FOR_EACH_VEC_ELT (*fn->gimple_df->ssa_names, i, ptr)
7081 	{
7082 	  if (ptr
7083 	      && POINTER_TYPE_P (TREE_TYPE (ptr)))
7084 	    find_what_p_points_to (ptr);
7085 	}
7086 
7087       /* Compute the call-use and call-clobber sets for all direct calls.  */
7088       fi = lookup_vi_for_tree (node->symbol.decl);
7089       gcc_assert (fi->is_fn_info);
7090       clobbers
7091 	= find_what_var_points_to (first_vi_for_offset (fi, fi_clobbers));
7092       uses = find_what_var_points_to (first_vi_for_offset (fi, fi_uses));
7093       for (e = node->callers; e; e = e->next_caller)
7094 	{
7095 	  if (!e->call_stmt)
7096 	    continue;
7097 
7098 	  *gimple_call_clobber_set (e->call_stmt) = clobbers;
7099 	  *gimple_call_use_set (e->call_stmt) = uses;
7100 	}
7101 
7102       /* Compute the call-use and call-clobber sets for indirect calls
7103 	 and calls to external functions.  */
7104       FOR_EACH_BB_FN (bb, fn)
7105 	{
7106 	  gimple_stmt_iterator gsi;
7107 
7108 	  for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
7109 	    {
7110 	      gimple stmt = gsi_stmt (gsi);
7111 	      struct pt_solution *pt;
7112 	      varinfo_t vi;
7113 	      tree decl;
7114 
7115 	      if (!is_gimple_call (stmt))
7116 		continue;
7117 
7118 	      /* Handle direct calls to external functions.  */
7119 	      decl = gimple_call_fndecl (stmt);
7120 	      if (decl
7121 		  && (!(fi = lookup_vi_for_tree (decl))
7122 		      || !fi->is_fn_info))
7123 		{
7124 		  pt = gimple_call_use_set (stmt);
7125 		  if (gimple_call_flags (stmt) & ECF_CONST)
7126 		    memset (pt, 0, sizeof (struct pt_solution));
7127 		  else if ((vi = lookup_call_use_vi (stmt)) != NULL)
7128 		    {
7129 		      *pt = find_what_var_points_to (vi);
7130 		      /* Escaped (and thus nonlocal) variables are always
7131 			 implicitly used by calls.  */
7132 		      /* ???  ESCAPED can be empty even though NONLOCAL
7133 			 always escaped.  */
7134 		      pt->nonlocal = 1;
7135 		      pt->ipa_escaped = 1;
7136 		    }
7137 		  else
7138 		    {
7139 		      /* If there is nothing special about this call then
7140 			 we have made everything that is used also escape.  */
7141 		      *pt = ipa_escaped_pt;
7142 		      pt->nonlocal = 1;
7143 		    }
7144 
7145 		  pt = gimple_call_clobber_set (stmt);
7146 		  if (gimple_call_flags (stmt) & (ECF_CONST|ECF_PURE|ECF_NOVOPS))
7147 		    memset (pt, 0, sizeof (struct pt_solution));
7148 		  else if ((vi = lookup_call_clobber_vi (stmt)) != NULL)
7149 		    {
7150 		      *pt = find_what_var_points_to (vi);
7151 		      /* Escaped (and thus nonlocal) variables are always
7152 			 implicitly clobbered by calls.  */
7153 		      /* ???  ESCAPED can be empty even though NONLOCAL
7154 			 always escaped.  */
7155 		      pt->nonlocal = 1;
7156 		      pt->ipa_escaped = 1;
7157 		    }
7158 		  else
7159 		    {
7160 		      /* If there is nothing special about this call then
7161 			 we have made everything that is used also escape.  */
7162 		      *pt = ipa_escaped_pt;
7163 		      pt->nonlocal = 1;
7164 		    }
7165 		}
7166 
7167 	      /* Handle indirect calls.  */
7168 	      if (!decl
7169 		  && (fi = get_fi_for_callee (stmt)))
7170 		{
7171 		  /* We need to accumulate all clobbers/uses of all possible
7172 		     callees.  */
7173 		  fi = get_varinfo (find (fi->id));
7174 		  /* If we cannot constrain the set of functions we'll end up
7175 		     calling we end up using/clobbering everything.  */
7176 		  if (bitmap_bit_p (fi->solution, anything_id)
7177 		      || bitmap_bit_p (fi->solution, nonlocal_id)
7178 		      || bitmap_bit_p (fi->solution, escaped_id))
7179 		    {
7180 		      pt_solution_reset (gimple_call_clobber_set (stmt));
7181 		      pt_solution_reset (gimple_call_use_set (stmt));
7182 		    }
7183 		  else
7184 		    {
7185 		      bitmap_iterator bi;
7186 		      unsigned i;
7187 		      struct pt_solution *uses, *clobbers;
7188 
7189 		      uses = gimple_call_use_set (stmt);
7190 		      clobbers = gimple_call_clobber_set (stmt);
7191 		      memset (uses, 0, sizeof (struct pt_solution));
7192 		      memset (clobbers, 0, sizeof (struct pt_solution));
7193 		      EXECUTE_IF_SET_IN_BITMAP (fi->solution, 0, i, bi)
7194 			{
7195 			  struct pt_solution sol;
7196 
7197 			  vi = get_varinfo (i);
7198 			  if (!vi->is_fn_info)
7199 			    {
7200 			      /* ???  We could be more precise here?  */
7201 			      uses->nonlocal = 1;
7202 			      uses->ipa_escaped = 1;
7203 			      clobbers->nonlocal = 1;
7204 			      clobbers->ipa_escaped = 1;
7205 			      continue;
7206 			    }
7207 
7208 			  if (!uses->anything)
7209 			    {
7210 			      sol = find_what_var_points_to
7211 				      (first_vi_for_offset (vi, fi_uses));
7212 			      pt_solution_ior_into (uses, &sol);
7213 			    }
7214 			  if (!clobbers->anything)
7215 			    {
7216 			      sol = find_what_var_points_to
7217 				      (first_vi_for_offset (vi, fi_clobbers));
7218 			      pt_solution_ior_into (clobbers, &sol);
7219 			    }
7220 			}
7221 		    }
7222 		}
7223 	    }
7224 	}
7225 
7226       fn->gimple_df->ipa_pta = true;
7227     }
7228 
7229   delete_points_to_sets ();
7230 
7231   in_ipa_mode = 0;
7232 
7233   return 0;
7234 }
7235 
7236 struct simple_ipa_opt_pass pass_ipa_pta =
7237 {
7238  {
7239   SIMPLE_IPA_PASS,
7240   "pta",		                /* name */
7241   OPTGROUP_NONE,                        /* optinfo_flags */
7242   gate_ipa_pta,			/* gate */
7243   ipa_pta_execute,			/* execute */
7244   NULL,					/* sub */
7245   NULL,					/* next */
7246   0,					/* static_pass_number */
7247   TV_IPA_PTA,		        /* tv_id */
7248   0,	                                /* properties_required */
7249   0,					/* properties_provided */
7250   0,					/* properties_destroyed */
7251   0,					/* todo_flags_start */
7252   TODO_update_ssa                       /* todo_flags_finish */
7253  }
7254 };
7255