1 /* Alias analysis for trees.
2    Copyright (C) 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011
3    Free Software Foundation, Inc.
4    Contributed by Diego Novillo <dnovillo@redhat.com>
5 
6 This file is part of GCC.
7 
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
11 any later version.
12 
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
16 GNU General Public License for more details.
17 
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3.  If not see
20 <http://www.gnu.org/licenses/>.  */
21 
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "tree.h"
27 #include "tm_p.h"
28 #include "target.h"
29 #include "basic-block.h"
30 #include "timevar.h"
31 #include "ggc.h"
32 #include "langhooks.h"
33 #include "flags.h"
34 #include "function.h"
35 #include "tree-pretty-print.h"
36 #include "tree-dump.h"
37 #include "gimple.h"
38 #include "tree-flow.h"
39 #include "tree-inline.h"
40 #include "tree-pass.h"
41 #include "convert.h"
42 #include "params.h"
43 #include "vec.h"
44 #include "bitmap.h"
45 #include "vecprim.h"
46 #include "pointer-set.h"
47 #include "alloc-pool.h"
48 #include "tree-ssa-alias.h"
49 
50 /* Broad overview of how alias analysis on gimple works:
51 
52    Statements clobbering or using memory are linked through the
53    virtual operand factored use-def chain.  The virtual operand
54    is unique per function, its symbol is accessible via gimple_vop (cfun).
55    Virtual operands are used for efficiently walking memory statements
56    in the gimple IL and are useful for things like value-numbering as
57    a generation count for memory references.
58 
59    SSA_NAME pointers may have associated points-to information
60    accessible via the SSA_NAME_PTR_INFO macro.  Flow-insensitive
61    points-to information is (re-)computed by the TODO_rebuild_alias
62    pass manager todo.  Points-to information is also used for more
63    precise tracking of call-clobbered and call-used variables and
64    related disambiguations.
65 
66    This file contains functions for disambiguating memory references,
67    the so called alias-oracle and tools for walking of the gimple IL.
68 
69    The main alias-oracle entry-points are
70 
71    bool stmt_may_clobber_ref_p (gimple, tree)
72 
73      This function queries if a statement may invalidate (parts of)
74      the memory designated by the reference tree argument.
75 
76    bool ref_maybe_used_by_stmt_p (gimple, tree)
77 
78      This function queries if a statement may need (parts of) the
79      memory designated by the reference tree argument.
80 
81    There are variants of these functions that only handle the call
82    part of a statement, call_may_clobber_ref_p and ref_maybe_used_by_call_p.
83    Note that these do not disambiguate against a possible call lhs.
84 
85    bool refs_may_alias_p (tree, tree)
86 
87      This function tries to disambiguate two reference trees.
88 
89    bool ptr_deref_may_alias_global_p (tree)
90 
91      This function queries if dereferencing a pointer variable may
92      alias global memory.
93 
94    More low-level disambiguators are available and documented in
95    this file.  Low-level disambiguators dealing with points-to
96    information are in tree-ssa-structalias.c.  */
97 
98 
99 /* Query statistics for the different low-level disambiguators.
100    A high-level query may trigger multiple of them.  */
101 
102 static struct {
103   unsigned HOST_WIDE_INT refs_may_alias_p_may_alias;
104   unsigned HOST_WIDE_INT refs_may_alias_p_no_alias;
105   unsigned HOST_WIDE_INT ref_maybe_used_by_call_p_may_alias;
106   unsigned HOST_WIDE_INT ref_maybe_used_by_call_p_no_alias;
107   unsigned HOST_WIDE_INT call_may_clobber_ref_p_may_alias;
108   unsigned HOST_WIDE_INT call_may_clobber_ref_p_no_alias;
109 } alias_stats;
110 
111 void
112 dump_alias_stats (FILE *s)
113 {
114   fprintf (s, "\nAlias oracle query stats:\n");
115   fprintf (s, "  refs_may_alias_p: "
116 	   HOST_WIDE_INT_PRINT_DEC" disambiguations, "
117 	   HOST_WIDE_INT_PRINT_DEC" queries\n",
118 	   alias_stats.refs_may_alias_p_no_alias,
119 	   alias_stats.refs_may_alias_p_no_alias
120 	   + alias_stats.refs_may_alias_p_may_alias);
121   fprintf (s, "  ref_maybe_used_by_call_p: "
122 	   HOST_WIDE_INT_PRINT_DEC" disambiguations, "
123 	   HOST_WIDE_INT_PRINT_DEC" queries\n",
124 	   alias_stats.ref_maybe_used_by_call_p_no_alias,
125 	   alias_stats.refs_may_alias_p_no_alias
126 	   + alias_stats.ref_maybe_used_by_call_p_may_alias);
127   fprintf (s, "  call_may_clobber_ref_p: "
128 	   HOST_WIDE_INT_PRINT_DEC" disambiguations, "
129 	   HOST_WIDE_INT_PRINT_DEC" queries\n",
130 	   alias_stats.call_may_clobber_ref_p_no_alias,
131 	   alias_stats.call_may_clobber_ref_p_no_alias
132 	   + alias_stats.call_may_clobber_ref_p_may_alias);
133 }
134 
135 
136 /* Return true, if dereferencing PTR may alias with a global variable.  */
137 
138 bool
139 ptr_deref_may_alias_global_p (tree ptr)
140 {
141   struct ptr_info_def *pi;
142 
143   /* If we end up with a pointer constant here that may point
144      to global memory.  */
145   if (TREE_CODE (ptr) != SSA_NAME)
146     return true;
147 
148   pi = SSA_NAME_PTR_INFO (ptr);
149 
150   /* If we do not have points-to information for this variable,
151      we have to punt.  */
152   if (!pi)
153     return true;
154 
155   /* ???  This does not use TBAA to prune globals ptr may not access.  */
156   return pt_solution_includes_global (&pi->pt);
157 }
158 
159 /* Return true if dereferencing PTR may alias DECL.
160    The caller is responsible for applying TBAA to see if PTR
161    may access DECL at all.  */
162 
163 static bool
164 ptr_deref_may_alias_decl_p (tree ptr, tree decl)
165 {
166   struct ptr_info_def *pi;
167 
168   /* Conversions are irrelevant for points-to information and
169      data-dependence analysis can feed us those.  */
170   STRIP_NOPS (ptr);
171 
172   /* Anything we do not explicilty handle aliases.  */
173   if ((TREE_CODE (ptr) != SSA_NAME
174        && TREE_CODE (ptr) != ADDR_EXPR
175        && TREE_CODE (ptr) != POINTER_PLUS_EXPR)
176       || !POINTER_TYPE_P (TREE_TYPE (ptr))
177       || (TREE_CODE (decl) != VAR_DECL
178 	  && TREE_CODE (decl) != PARM_DECL
179 	  && TREE_CODE (decl) != RESULT_DECL))
180     return true;
181 
182   /* Disregard pointer offsetting.  */
183   if (TREE_CODE (ptr) == POINTER_PLUS_EXPR)
184     {
185       do
186 	{
187 	  ptr = TREE_OPERAND (ptr, 0);
188 	}
189       while (TREE_CODE (ptr) == POINTER_PLUS_EXPR);
190       return ptr_deref_may_alias_decl_p (ptr, decl);
191     }
192 
193   /* ADDR_EXPR pointers either just offset another pointer or directly
194      specify the pointed-to set.  */
195   if (TREE_CODE (ptr) == ADDR_EXPR)
196     {
197       tree base = get_base_address (TREE_OPERAND (ptr, 0));
198       if (base
199 	  && (TREE_CODE (base) == MEM_REF
200 	      || TREE_CODE (base) == TARGET_MEM_REF))
201 	ptr = TREE_OPERAND (base, 0);
202       else if (base
203 	       && DECL_P (base))
204 	return base == decl;
205       else if (base
206 	       && CONSTANT_CLASS_P (base))
207 	return false;
208       else
209 	return true;
210     }
211 
212   /* Non-aliased variables can not be pointed to.  */
213   if (!may_be_aliased (decl))
214     return false;
215 
216   /* If we do not have useful points-to information for this pointer
217      we cannot disambiguate anything else.  */
218   pi = SSA_NAME_PTR_INFO (ptr);
219   if (!pi)
220     return true;
221 
222   return pt_solution_includes (&pi->pt, decl);
223 }
224 
225 /* Return true if dereferenced PTR1 and PTR2 may alias.
226    The caller is responsible for applying TBAA to see if accesses
227    through PTR1 and PTR2 may conflict at all.  */
228 
229 bool
230 ptr_derefs_may_alias_p (tree ptr1, tree ptr2)
231 {
232   struct ptr_info_def *pi1, *pi2;
233 
234   /* Conversions are irrelevant for points-to information and
235      data-dependence analysis can feed us those.  */
236   STRIP_NOPS (ptr1);
237   STRIP_NOPS (ptr2);
238 
239   /* Disregard pointer offsetting.  */
240   if (TREE_CODE (ptr1) == POINTER_PLUS_EXPR)
241     {
242       do
243 	{
244 	  ptr1 = TREE_OPERAND (ptr1, 0);
245 	}
246       while (TREE_CODE (ptr1) == POINTER_PLUS_EXPR);
247       return ptr_derefs_may_alias_p (ptr1, ptr2);
248     }
249   if (TREE_CODE (ptr2) == POINTER_PLUS_EXPR)
250     {
251       do
252 	{
253 	  ptr2 = TREE_OPERAND (ptr2, 0);
254 	}
255       while (TREE_CODE (ptr2) == POINTER_PLUS_EXPR);
256       return ptr_derefs_may_alias_p (ptr1, ptr2);
257     }
258 
259   /* ADDR_EXPR pointers either just offset another pointer or directly
260      specify the pointed-to set.  */
261   if (TREE_CODE (ptr1) == ADDR_EXPR)
262     {
263       tree base = get_base_address (TREE_OPERAND (ptr1, 0));
264       if (base
265 	  && (TREE_CODE (base) == MEM_REF
266 	      || TREE_CODE (base) == TARGET_MEM_REF))
267 	return ptr_derefs_may_alias_p (TREE_OPERAND (base, 0), ptr2);
268       else if (base
269 	       && DECL_P (base))
270 	return ptr_deref_may_alias_decl_p (ptr2, base);
271       else
272 	return true;
273     }
274   if (TREE_CODE (ptr2) == ADDR_EXPR)
275     {
276       tree base = get_base_address (TREE_OPERAND (ptr2, 0));
277       if (base
278 	  && (TREE_CODE (base) == MEM_REF
279 	      || TREE_CODE (base) == TARGET_MEM_REF))
280 	return ptr_derefs_may_alias_p (ptr1, TREE_OPERAND (base, 0));
281       else if (base
282 	       && DECL_P (base))
283 	return ptr_deref_may_alias_decl_p (ptr1, base);
284       else
285 	return true;
286     }
287 
288   /* From here we require SSA name pointers.  Anything else aliases.  */
289   if (TREE_CODE (ptr1) != SSA_NAME
290       || TREE_CODE (ptr2) != SSA_NAME
291       || !POINTER_TYPE_P (TREE_TYPE (ptr1))
292       || !POINTER_TYPE_P (TREE_TYPE (ptr2)))
293     return true;
294 
295   /* We may end up with two empty points-to solutions for two same pointers.
296      In this case we still want to say both pointers alias, so shortcut
297      that here.  */
298   if (ptr1 == ptr2)
299     return true;
300 
301   /* If we do not have useful points-to information for either pointer
302      we cannot disambiguate anything else.  */
303   pi1 = SSA_NAME_PTR_INFO (ptr1);
304   pi2 = SSA_NAME_PTR_INFO (ptr2);
305   if (!pi1 || !pi2)
306     return true;
307 
308   /* ???  This does not use TBAA to prune decls from the intersection
309      that not both pointers may access.  */
310   return pt_solutions_intersect (&pi1->pt, &pi2->pt);
311 }
312 
313 /* Return true if dereferencing PTR may alias *REF.
314    The caller is responsible for applying TBAA to see if PTR
315    may access *REF at all.  */
316 
317 static bool
318 ptr_deref_may_alias_ref_p_1 (tree ptr, ao_ref *ref)
319 {
320   tree base = ao_ref_base (ref);
321 
322   if (TREE_CODE (base) == MEM_REF
323       || TREE_CODE (base) == TARGET_MEM_REF)
324     return ptr_derefs_may_alias_p (ptr, TREE_OPERAND (base, 0));
325   else if (DECL_P (base))
326     return ptr_deref_may_alias_decl_p (ptr, base);
327 
328   return true;
329 }
330 
331 
332 /* Dump alias information on FILE.  */
333 
334 void
335 dump_alias_info (FILE *file)
336 {
337   size_t i;
338   const char *funcname
339     = lang_hooks.decl_printable_name (current_function_decl, 2);
340   referenced_var_iterator rvi;
341   tree var;
342 
343   fprintf (file, "\n\nAlias information for %s\n\n", funcname);
344 
345   fprintf (file, "Aliased symbols\n\n");
346 
347   FOR_EACH_REFERENCED_VAR (cfun, var, rvi)
348     {
349       if (may_be_aliased (var))
350 	dump_variable (file, var);
351     }
352 
353   fprintf (file, "\nCall clobber information\n");
354 
355   fprintf (file, "\nESCAPED");
356   dump_points_to_solution (file, &cfun->gimple_df->escaped);
357 
358   fprintf (file, "\n\nFlow-insensitive points-to information\n\n");
359 
360   for (i = 1; i < num_ssa_names; i++)
361     {
362       tree ptr = ssa_name (i);
363       struct ptr_info_def *pi;
364 
365       if (ptr == NULL_TREE
366 	  || SSA_NAME_IN_FREE_LIST (ptr))
367 	continue;
368 
369       pi = SSA_NAME_PTR_INFO (ptr);
370       if (pi)
371 	dump_points_to_info_for (file, ptr);
372     }
373 
374   fprintf (file, "\n");
375 }
376 
377 
378 /* Dump alias information on stderr.  */
379 
380 DEBUG_FUNCTION void
381 debug_alias_info (void)
382 {
383   dump_alias_info (stderr);
384 }
385 
386 
387 /* Dump the points-to set *PT into FILE.  */
388 
389 void
390 dump_points_to_solution (FILE *file, struct pt_solution *pt)
391 {
392   if (pt->anything)
393     fprintf (file, ", points-to anything");
394 
395   if (pt->nonlocal)
396     fprintf (file, ", points-to non-local");
397 
398   if (pt->escaped)
399     fprintf (file, ", points-to escaped");
400 
401   if (pt->ipa_escaped)
402     fprintf (file, ", points-to unit escaped");
403 
404   if (pt->null)
405     fprintf (file, ", points-to NULL");
406 
407   if (pt->vars)
408     {
409       fprintf (file, ", points-to vars: ");
410       dump_decl_set (file, pt->vars);
411       if (pt->vars_contains_global)
412 	fprintf (file, " (includes global vars)");
413     }
414 }
415 
416 /* Dump points-to information for SSA_NAME PTR into FILE.  */
417 
418 void
419 dump_points_to_info_for (FILE *file, tree ptr)
420 {
421   struct ptr_info_def *pi = SSA_NAME_PTR_INFO (ptr);
422 
423   print_generic_expr (file, ptr, dump_flags);
424 
425   if (pi)
426     dump_points_to_solution (file, &pi->pt);
427   else
428     fprintf (file, ", points-to anything");
429 
430   fprintf (file, "\n");
431 }
432 
433 
434 /* Dump points-to information for VAR into stderr.  */
435 
436 DEBUG_FUNCTION void
437 debug_points_to_info_for (tree var)
438 {
439   dump_points_to_info_for (stderr, var);
440 }
441 
442 
443 /* Initializes the alias-oracle reference representation *R from REF.  */
444 
445 void
446 ao_ref_init (ao_ref *r, tree ref)
447 {
448   r->ref = ref;
449   r->base = NULL_TREE;
450   r->offset = 0;
451   r->size = -1;
452   r->max_size = -1;
453   r->ref_alias_set = -1;
454   r->base_alias_set = -1;
455   r->volatile_p = ref ? TREE_THIS_VOLATILE (ref) : false;
456 }
457 
458 /* Returns the base object of the memory reference *REF.  */
459 
460 tree
461 ao_ref_base (ao_ref *ref)
462 {
463   if (ref->base)
464     return ref->base;
465   ref->base = get_ref_base_and_extent (ref->ref, &ref->offset, &ref->size,
466 				       &ref->max_size);
467   return ref->base;
468 }
469 
470 /* Returns the base object alias set of the memory reference *REF.  */
471 
472 static alias_set_type
473 ao_ref_base_alias_set (ao_ref *ref)
474 {
475   tree base_ref;
476   if (ref->base_alias_set != -1)
477     return ref->base_alias_set;
478   if (!ref->ref)
479     return 0;
480   base_ref = ref->ref;
481   while (handled_component_p (base_ref))
482     base_ref = TREE_OPERAND (base_ref, 0);
483   ref->base_alias_set = get_alias_set (base_ref);
484   return ref->base_alias_set;
485 }
486 
487 /* Returns the reference alias set of the memory reference *REF.  */
488 
489 alias_set_type
490 ao_ref_alias_set (ao_ref *ref)
491 {
492   if (ref->ref_alias_set != -1)
493     return ref->ref_alias_set;
494   ref->ref_alias_set = get_alias_set (ref->ref);
495   return ref->ref_alias_set;
496 }
497 
498 /* Init an alias-oracle reference representation from a gimple pointer
499    PTR and a gimple size SIZE in bytes.  If SIZE is NULL_TREE the the
500    size is assumed to be unknown.  The access is assumed to be only
501    to or after of the pointer target, not before it.  */
502 
503 void
504 ao_ref_init_from_ptr_and_size (ao_ref *ref, tree ptr, tree size)
505 {
506   HOST_WIDE_INT t1, t2;
507   ref->ref = NULL_TREE;
508   if (TREE_CODE (ptr) == ADDR_EXPR)
509     ref->base = get_ref_base_and_extent (TREE_OPERAND (ptr, 0),
510 					 &ref->offset, &t1, &t2);
511   else
512     {
513       ref->base = build2 (MEM_REF, char_type_node,
514 			  ptr, null_pointer_node);
515       ref->offset = 0;
516     }
517   if (size
518       && host_integerp (size, 0)
519       && TREE_INT_CST_LOW (size) * 8 / 8 == TREE_INT_CST_LOW (size))
520     ref->max_size = ref->size = TREE_INT_CST_LOW (size) * 8;
521   else
522     ref->max_size = ref->size = -1;
523   ref->ref_alias_set = 0;
524   ref->base_alias_set = 0;
525   ref->volatile_p = false;
526 }
527 
528 /* Return 1 if TYPE1 and TYPE2 are to be considered equivalent for the
529    purpose of TBAA.  Return 0 if they are distinct and -1 if we cannot
530    decide.  */
531 
532 static inline int
533 same_type_for_tbaa (tree type1, tree type2)
534 {
535   type1 = TYPE_MAIN_VARIANT (type1);
536   type2 = TYPE_MAIN_VARIANT (type2);
537 
538   /* If we would have to do structural comparison bail out.  */
539   if (TYPE_STRUCTURAL_EQUALITY_P (type1)
540       || TYPE_STRUCTURAL_EQUALITY_P (type2))
541     return -1;
542 
543   /* Compare the canonical types.  */
544   if (TYPE_CANONICAL (type1) == TYPE_CANONICAL (type2))
545     return 1;
546 
547   /* ??? Array types are not properly unified in all cases as we have
548      spurious changes in the index types for example.  Removing this
549      causes all sorts of problems with the Fortran frontend.  */
550   if (TREE_CODE (type1) == ARRAY_TYPE
551       && TREE_CODE (type2) == ARRAY_TYPE)
552     return -1;
553 
554   /* ??? In Ada, an lvalue of an unconstrained type can be used to access an
555      object of one of its constrained subtypes, e.g. when a function with an
556      unconstrained parameter passed by reference is called on an object and
557      inlined.  But, even in the case of a fixed size, type and subtypes are
558      not equivalent enough as to share the same TYPE_CANONICAL, since this
559      would mean that conversions between them are useless, whereas they are
560      not (e.g. type and subtypes can have different modes).  So, in the end,
561      they are only guaranteed to have the same alias set.  */
562   if (get_alias_set (type1) == get_alias_set (type2))
563     return -1;
564 
565   /* The types are known to be not equal.  */
566   return 0;
567 }
568 
569 /* Determine if the two component references REF1 and REF2 which are
570    based on access types TYPE1 and TYPE2 and of which at least one is based
571    on an indirect reference may alias.  REF2 is the only one that can
572    be a decl in which case REF2_IS_DECL is true.
573    REF1_ALIAS_SET, BASE1_ALIAS_SET, REF2_ALIAS_SET and BASE2_ALIAS_SET
574    are the respective alias sets.  */
575 
576 static bool
577 aliasing_component_refs_p (tree ref1,
578 			   alias_set_type ref1_alias_set,
579 			   alias_set_type base1_alias_set,
580 			   HOST_WIDE_INT offset1, HOST_WIDE_INT max_size1,
581 			   tree ref2,
582 			   alias_set_type ref2_alias_set,
583 			   alias_set_type base2_alias_set,
584 			   HOST_WIDE_INT offset2, HOST_WIDE_INT max_size2,
585 			   bool ref2_is_decl)
586 {
587   /* If one reference is a component references through pointers try to find a
588      common base and apply offset based disambiguation.  This handles
589      for example
590        struct A { int i; int j; } *q;
591        struct B { struct A a; int k; } *p;
592      disambiguating q->i and p->a.j.  */
593   tree base1, base2;
594   tree type1, type2;
595   tree *refp;
596   int same_p;
597 
598   /* Choose bases and base types to search for.  */
599   base1 = ref1;
600   while (handled_component_p (base1))
601     base1 = TREE_OPERAND (base1, 0);
602   type1 = TREE_TYPE (base1);
603   base2 = ref2;
604   while (handled_component_p (base2))
605     base2 = TREE_OPERAND (base2, 0);
606   type2 = TREE_TYPE (base2);
607 
608   /* Now search for the type1 in the access path of ref2.  This
609      would be a common base for doing offset based disambiguation on.  */
610   refp = &ref2;
611   while (handled_component_p (*refp)
612 	 && same_type_for_tbaa (TREE_TYPE (*refp), type1) == 0)
613     refp = &TREE_OPERAND (*refp, 0);
614   same_p = same_type_for_tbaa (TREE_TYPE (*refp), type1);
615   /* If we couldn't compare types we have to bail out.  */
616   if (same_p == -1)
617     return true;
618   else if (same_p == 1)
619     {
620       HOST_WIDE_INT offadj, sztmp, msztmp;
621       get_ref_base_and_extent (*refp, &offadj, &sztmp, &msztmp);
622       offset2 -= offadj;
623       get_ref_base_and_extent (base1, &offadj, &sztmp, &msztmp);
624       offset1 -= offadj;
625       return ranges_overlap_p (offset1, max_size1, offset2, max_size2);
626     }
627   /* If we didn't find a common base, try the other way around.  */
628   refp = &ref1;
629   while (handled_component_p (*refp)
630 	 && same_type_for_tbaa (TREE_TYPE (*refp), type2) == 0)
631     refp = &TREE_OPERAND (*refp, 0);
632   same_p = same_type_for_tbaa (TREE_TYPE (*refp), type2);
633   /* If we couldn't compare types we have to bail out.  */
634   if (same_p == -1)
635     return true;
636   else if (same_p == 1)
637     {
638       HOST_WIDE_INT offadj, sztmp, msztmp;
639       get_ref_base_and_extent (*refp, &offadj, &sztmp, &msztmp);
640       offset1 -= offadj;
641       get_ref_base_and_extent (base2, &offadj, &sztmp, &msztmp);
642       offset2 -= offadj;
643       return ranges_overlap_p (offset1, max_size1, offset2, max_size2);
644     }
645 
646   /* If we have two type access paths B1.path1 and B2.path2 they may
647      only alias if either B1 is in B2.path2 or B2 is in B1.path1.
648      But we can still have a path that goes B1.path1...B2.path2 with
649      a part that we do not see.  So we can only disambiguate now
650      if there is no B2 in the tail of path1 and no B1 on the
651      tail of path2.  */
652   if (base1_alias_set == ref2_alias_set
653       || alias_set_subset_of (base1_alias_set, ref2_alias_set))
654     return true;
655   /* If this is ptr vs. decl then we know there is no ptr ... decl path.  */
656   if (!ref2_is_decl)
657     return (base2_alias_set == ref1_alias_set
658 	    || alias_set_subset_of (base2_alias_set, ref1_alias_set));
659   return false;
660 }
661 
662 /* Return true if two memory references based on the variables BASE1
663    and BASE2 constrained to [OFFSET1, OFFSET1 + MAX_SIZE1) and
664    [OFFSET2, OFFSET2 + MAX_SIZE2) may alias.  */
665 
666 static bool
667 decl_refs_may_alias_p (tree base1,
668 		       HOST_WIDE_INT offset1, HOST_WIDE_INT max_size1,
669 		       tree base2,
670 		       HOST_WIDE_INT offset2, HOST_WIDE_INT max_size2)
671 {
672   gcc_checking_assert (DECL_P (base1) && DECL_P (base2));
673 
674   /* If both references are based on different variables, they cannot alias.  */
675   if (base1 != base2)
676     return false;
677 
678   /* If both references are based on the same variable, they cannot alias if
679      the accesses do not overlap.  */
680   return ranges_overlap_p (offset1, max_size1, offset2, max_size2);
681 }
682 
683 /* Return true if an indirect reference based on *PTR1 constrained
684    to [OFFSET1, OFFSET1 + MAX_SIZE1) may alias a variable based on BASE2
685    constrained to [OFFSET2, OFFSET2 + MAX_SIZE2).  *PTR1 and BASE2 have
686    the alias sets BASE1_ALIAS_SET and BASE2_ALIAS_SET which can be -1
687    in which case they are computed on-demand.  REF1 and REF2
688    if non-NULL are the complete memory reference trees.  */
689 
690 static bool
691 indirect_ref_may_alias_decl_p (tree ref1 ATTRIBUTE_UNUSED, tree base1,
692 			       HOST_WIDE_INT offset1,
693 			       HOST_WIDE_INT max_size1 ATTRIBUTE_UNUSED,
694 			       alias_set_type ref1_alias_set,
695 			       alias_set_type base1_alias_set,
696 			       tree ref2 ATTRIBUTE_UNUSED, tree base2,
697 			       HOST_WIDE_INT offset2, HOST_WIDE_INT max_size2,
698 			       alias_set_type ref2_alias_set,
699 			       alias_set_type base2_alias_set, bool tbaa_p)
700 {
701   tree ptr1;
702   tree ptrtype1, dbase2;
703   HOST_WIDE_INT offset1p = offset1, offset2p = offset2;
704   HOST_WIDE_INT doffset1, doffset2;
705   double_int moff;
706 
707   gcc_checking_assert ((TREE_CODE (base1) == MEM_REF
708 			|| TREE_CODE (base1) == TARGET_MEM_REF)
709 		       && DECL_P (base2));
710 
711   ptr1 = TREE_OPERAND (base1, 0);
712 
713   /* The offset embedded in MEM_REFs can be negative.  Bias them
714      so that the resulting offset adjustment is positive.  */
715   moff = mem_ref_offset (base1);
716   moff = double_int_lshift (moff,
717 			    BITS_PER_UNIT == 8
718 			    ? 3 : exact_log2 (BITS_PER_UNIT),
719 			    HOST_BITS_PER_DOUBLE_INT, true);
720   if (double_int_negative_p (moff))
721     offset2p += double_int_neg (moff).low;
722   else
723     offset1p += moff.low;
724 
725   /* If only one reference is based on a variable, they cannot alias if
726      the pointer access is beyond the extent of the variable access.
727      (the pointer base cannot validly point to an offset less than zero
728      of the variable).
729      ???  IVOPTs creates bases that do not honor this restriction,
730      so do not apply this optimization for TARGET_MEM_REFs.  */
731   if (TREE_CODE (base1) != TARGET_MEM_REF
732       && !ranges_overlap_p (MAX (0, offset1p), -1, offset2p, max_size2))
733     return false;
734   /* They also cannot alias if the pointer may not point to the decl.  */
735   if (!ptr_deref_may_alias_decl_p (ptr1, base2))
736     return false;
737 
738   /* Disambiguations that rely on strict aliasing rules follow.  */
739   if (!flag_strict_aliasing || !tbaa_p)
740     return true;
741 
742   ptrtype1 = TREE_TYPE (TREE_OPERAND (base1, 1));
743 
744   /* If the alias set for a pointer access is zero all bets are off.  */
745   if (base1_alias_set == -1)
746     base1_alias_set = get_deref_alias_set (ptrtype1);
747   if (base1_alias_set == 0)
748     return true;
749   if (base2_alias_set == -1)
750     base2_alias_set = get_alias_set (base2);
751 
752   /* When we are trying to disambiguate an access with a pointer dereference
753      as base versus one with a decl as base we can use both the size
754      of the decl and its dynamic type for extra disambiguation.
755      ???  We do not know anything about the dynamic type of the decl
756      other than that its alias-set contains base2_alias_set as a subset
757      which does not help us here.  */
758   /* As we know nothing useful about the dynamic type of the decl just
759      use the usual conflict check rather than a subset test.
760      ???  We could introduce -fvery-strict-aliasing when the language
761      does not allow decls to have a dynamic type that differs from their
762      static type.  Then we can check
763      !alias_set_subset_of (base1_alias_set, base2_alias_set) instead.  */
764   if (base1_alias_set != base2_alias_set
765       && !alias_sets_conflict_p (base1_alias_set, base2_alias_set))
766     return false;
767   /* If the size of the access relevant for TBAA through the pointer
768      is bigger than the size of the decl we can't possibly access the
769      decl via that pointer.  */
770   if (DECL_SIZE (base2) && COMPLETE_TYPE_P (TREE_TYPE (ptrtype1))
771       && TREE_CODE (DECL_SIZE (base2)) == INTEGER_CST
772       && TREE_CODE (TYPE_SIZE (TREE_TYPE (ptrtype1))) == INTEGER_CST
773       /* ???  This in turn may run afoul when a decl of type T which is
774 	 a member of union type U is accessed through a pointer to
775 	 type U and sizeof T is smaller than sizeof U.  */
776       && TREE_CODE (TREE_TYPE (ptrtype1)) != UNION_TYPE
777       && TREE_CODE (TREE_TYPE (ptrtype1)) != QUAL_UNION_TYPE
778       && tree_int_cst_lt (DECL_SIZE (base2), TYPE_SIZE (TREE_TYPE (ptrtype1))))
779     return false;
780 
781   if (!ref2)
782     return true;
783 
784   /* If the decl is accessed via a MEM_REF, reconstruct the base
785      we can use for TBAA and an appropriately adjusted offset.  */
786   dbase2 = ref2;
787   while (handled_component_p (dbase2))
788     dbase2 = TREE_OPERAND (dbase2, 0);
789   doffset1 = offset1;
790   doffset2 = offset2;
791   if (TREE_CODE (dbase2) == MEM_REF
792       || TREE_CODE (dbase2) == TARGET_MEM_REF)
793     {
794       double_int moff = mem_ref_offset (dbase2);
795       moff = double_int_lshift (moff,
796 				BITS_PER_UNIT == 8
797 				? 3 : exact_log2 (BITS_PER_UNIT),
798 				HOST_BITS_PER_DOUBLE_INT, true);
799       if (double_int_negative_p (moff))
800 	doffset1 -= double_int_neg (moff).low;
801       else
802 	doffset2 -= moff.low;
803     }
804 
805   /* If either reference is view-converted, give up now.  */
806   if (same_type_for_tbaa (TREE_TYPE (base1), TREE_TYPE (ptrtype1)) != 1
807       || same_type_for_tbaa (TREE_TYPE (dbase2), TREE_TYPE (base2)) != 1)
808     return true;
809 
810   /* If both references are through the same type, they do not alias
811      if the accesses do not overlap.  This does extra disambiguation
812      for mixed/pointer accesses but requires strict aliasing.
813      For MEM_REFs we require that the component-ref offset we computed
814      is relative to the start of the type which we ensure by
815      comparing rvalue and access type and disregarding the constant
816      pointer offset.  */
817   if ((TREE_CODE (base1) != TARGET_MEM_REF
818        || (!TMR_INDEX (base1) && !TMR_INDEX2 (base1)))
819       && same_type_for_tbaa (TREE_TYPE (base1), TREE_TYPE (dbase2)) == 1)
820     return ranges_overlap_p (doffset1, max_size1, doffset2, max_size2);
821 
822   /* Do access-path based disambiguation.  */
823   if (ref1 && ref2
824       && (handled_component_p (ref1) || handled_component_p (ref2)))
825     return aliasing_component_refs_p (ref1,
826 				      ref1_alias_set, base1_alias_set,
827 				      offset1, max_size1,
828 				      ref2,
829 				      ref2_alias_set, base2_alias_set,
830 				      offset2, max_size2, true);
831 
832   return true;
833 }
834 
835 /* Return true if two indirect references based on *PTR1
836    and *PTR2 constrained to [OFFSET1, OFFSET1 + MAX_SIZE1) and
837    [OFFSET2, OFFSET2 + MAX_SIZE2) may alias.  *PTR1 and *PTR2 have
838    the alias sets BASE1_ALIAS_SET and BASE2_ALIAS_SET which can be -1
839    in which case they are computed on-demand.  REF1 and REF2
840    if non-NULL are the complete memory reference trees. */
841 
842 static bool
843 indirect_refs_may_alias_p (tree ref1 ATTRIBUTE_UNUSED, tree base1,
844 			   HOST_WIDE_INT offset1, HOST_WIDE_INT max_size1,
845 			   alias_set_type ref1_alias_set,
846 			   alias_set_type base1_alias_set,
847 			   tree ref2 ATTRIBUTE_UNUSED, tree base2,
848 			   HOST_WIDE_INT offset2, HOST_WIDE_INT max_size2,
849 			   alias_set_type ref2_alias_set,
850 			   alias_set_type base2_alias_set, bool tbaa_p)
851 {
852   tree ptr1;
853   tree ptr2;
854   tree ptrtype1, ptrtype2;
855 
856   gcc_checking_assert ((TREE_CODE (base1) == MEM_REF
857 			|| TREE_CODE (base1) == TARGET_MEM_REF)
858 		       && (TREE_CODE (base2) == MEM_REF
859 			   || TREE_CODE (base2) == TARGET_MEM_REF));
860 
861   ptr1 = TREE_OPERAND (base1, 0);
862   ptr2 = TREE_OPERAND (base2, 0);
863 
864   /* If both bases are based on pointers they cannot alias if they may not
865      point to the same memory object or if they point to the same object
866      and the accesses do not overlap.  */
867   if ((!cfun || gimple_in_ssa_p (cfun))
868       && operand_equal_p (ptr1, ptr2, 0)
869       && (((TREE_CODE (base1) != TARGET_MEM_REF
870 	    || (!TMR_INDEX (base1) && !TMR_INDEX2 (base1)))
871 	   && (TREE_CODE (base2) != TARGET_MEM_REF
872 	       || (!TMR_INDEX (base2) && !TMR_INDEX2 (base2))))
873 	  || (TREE_CODE (base1) == TARGET_MEM_REF
874 	      && TREE_CODE (base2) == TARGET_MEM_REF
875 	      && (TMR_STEP (base1) == TMR_STEP (base2)
876 		  || (TMR_STEP (base1) && TMR_STEP (base2)
877 		      && operand_equal_p (TMR_STEP (base1),
878 					  TMR_STEP (base2), 0)))
879 	      && (TMR_INDEX (base1) == TMR_INDEX (base2)
880 		  || (TMR_INDEX (base1) && TMR_INDEX (base2)
881 		      && operand_equal_p (TMR_INDEX (base1),
882 					  TMR_INDEX (base2), 0)))
883 	      && (TMR_INDEX2 (base1) == TMR_INDEX2 (base2)
884 		  || (TMR_INDEX2 (base1) && TMR_INDEX2 (base2)
885 		      && operand_equal_p (TMR_INDEX2 (base1),
886 					  TMR_INDEX2 (base2), 0))))))
887     {
888       double_int moff;
889       /* The offset embedded in MEM_REFs can be negative.  Bias them
890 	 so that the resulting offset adjustment is positive.  */
891       moff = mem_ref_offset (base1);
892       moff = double_int_lshift (moff,
893 				BITS_PER_UNIT == 8
894 				? 3 : exact_log2 (BITS_PER_UNIT),
895 				HOST_BITS_PER_DOUBLE_INT, true);
896       if (double_int_negative_p (moff))
897 	offset2 += double_int_neg (moff).low;
898       else
899 	offset1 += moff.low;
900       moff = mem_ref_offset (base2);
901       moff = double_int_lshift (moff,
902 				BITS_PER_UNIT == 8
903 				? 3 : exact_log2 (BITS_PER_UNIT),
904 				HOST_BITS_PER_DOUBLE_INT, true);
905       if (double_int_negative_p (moff))
906 	offset1 += double_int_neg (moff).low;
907       else
908 	offset2 += moff.low;
909       return ranges_overlap_p (offset1, max_size1, offset2, max_size2);
910     }
911   if (!ptr_derefs_may_alias_p (ptr1, ptr2))
912     return false;
913 
914   /* Disambiguations that rely on strict aliasing rules follow.  */
915   if (!flag_strict_aliasing || !tbaa_p)
916     return true;
917 
918   ptrtype1 = TREE_TYPE (TREE_OPERAND (base1, 1));
919   ptrtype2 = TREE_TYPE (TREE_OPERAND (base2, 1));
920 
921   /* If the alias set for a pointer access is zero all bets are off.  */
922   if (base1_alias_set == -1)
923     base1_alias_set = get_deref_alias_set (ptrtype1);
924   if (base1_alias_set == 0)
925     return true;
926   if (base2_alias_set == -1)
927     base2_alias_set = get_deref_alias_set (ptrtype2);
928   if (base2_alias_set == 0)
929     return true;
930 
931   /* If both references are through the same type, they do not alias
932      if the accesses do not overlap.  This does extra disambiguation
933      for mixed/pointer accesses but requires strict aliasing.  */
934   if ((TREE_CODE (base1) != TARGET_MEM_REF
935        || (!TMR_INDEX (base1) && !TMR_INDEX2 (base1)))
936       && (TREE_CODE (base2) != TARGET_MEM_REF
937 	  || (!TMR_INDEX (base2) && !TMR_INDEX2 (base2)))
938       && same_type_for_tbaa (TREE_TYPE (base1), TREE_TYPE (ptrtype1)) == 1
939       && same_type_for_tbaa (TREE_TYPE (base2), TREE_TYPE (ptrtype2)) == 1
940       && same_type_for_tbaa (TREE_TYPE (ptrtype1),
941 			     TREE_TYPE (ptrtype2)) == 1)
942     return ranges_overlap_p (offset1, max_size1, offset2, max_size2);
943 
944   /* Do type-based disambiguation.  */
945   if (base1_alias_set != base2_alias_set
946       && !alias_sets_conflict_p (base1_alias_set, base2_alias_set))
947     return false;
948 
949   /* Do access-path based disambiguation.  */
950   if (ref1 && ref2
951       && (handled_component_p (ref1) || handled_component_p (ref2))
952       && same_type_for_tbaa (TREE_TYPE (base1), TREE_TYPE (ptrtype1)) == 1
953       && same_type_for_tbaa (TREE_TYPE (base2), TREE_TYPE (ptrtype2)) == 1)
954     return aliasing_component_refs_p (ref1,
955 				      ref1_alias_set, base1_alias_set,
956 				      offset1, max_size1,
957 				      ref2,
958 				      ref2_alias_set, base2_alias_set,
959 				      offset2, max_size2, false);
960 
961   return true;
962 }
963 
964 /* Return true, if the two memory references REF1 and REF2 may alias.  */
965 
966 bool
967 refs_may_alias_p_1 (ao_ref *ref1, ao_ref *ref2, bool tbaa_p)
968 {
969   tree base1, base2;
970   HOST_WIDE_INT offset1 = 0, offset2 = 0;
971   HOST_WIDE_INT max_size1 = -1, max_size2 = -1;
972   bool var1_p, var2_p, ind1_p, ind2_p;
973 
974   gcc_checking_assert ((!ref1->ref
975 			|| TREE_CODE (ref1->ref) == SSA_NAME
976 			|| DECL_P (ref1->ref)
977 			|| TREE_CODE (ref1->ref) == STRING_CST
978 			|| handled_component_p (ref1->ref)
979 			|| TREE_CODE (ref1->ref) == MEM_REF
980 			|| TREE_CODE (ref1->ref) == TARGET_MEM_REF)
981 		       && (!ref2->ref
982 			   || TREE_CODE (ref2->ref) == SSA_NAME
983 			   || DECL_P (ref2->ref)
984 			   || TREE_CODE (ref2->ref) == STRING_CST
985 			   || handled_component_p (ref2->ref)
986 			   || TREE_CODE (ref2->ref) == MEM_REF
987 			   || TREE_CODE (ref2->ref) == TARGET_MEM_REF));
988 
989   /* Decompose the references into their base objects and the access.  */
990   base1 = ao_ref_base (ref1);
991   offset1 = ref1->offset;
992   max_size1 = ref1->max_size;
993   base2 = ao_ref_base (ref2);
994   offset2 = ref2->offset;
995   max_size2 = ref2->max_size;
996 
997   /* We can end up with registers or constants as bases for example from
998      *D.1663_44 = VIEW_CONVERT_EXPR<struct DB_LSN>(__tmp$B0F64_59);
999      which is seen as a struct copy.  */
1000   if (TREE_CODE (base1) == SSA_NAME
1001       || TREE_CODE (base1) == CONST_DECL
1002       || TREE_CODE (base1) == CONSTRUCTOR
1003       || TREE_CODE (base1) == ADDR_EXPR
1004       || CONSTANT_CLASS_P (base1)
1005       || TREE_CODE (base2) == SSA_NAME
1006       || TREE_CODE (base2) == CONST_DECL
1007       || TREE_CODE (base2) == CONSTRUCTOR
1008       || TREE_CODE (base2) == ADDR_EXPR
1009       || CONSTANT_CLASS_P (base2))
1010     return false;
1011 
1012   /* We can end up refering to code via function and label decls.
1013      As we likely do not properly track code aliases conservatively
1014      bail out.  */
1015   if (TREE_CODE (base1) == FUNCTION_DECL
1016       || TREE_CODE (base1) == LABEL_DECL
1017       || TREE_CODE (base2) == FUNCTION_DECL
1018       || TREE_CODE (base2) == LABEL_DECL)
1019     return true;
1020 
1021   /* Two volatile accesses always conflict.  */
1022   if (ref1->volatile_p
1023       && ref2->volatile_p)
1024     return true;
1025 
1026   /* Defer to simple offset based disambiguation if we have
1027      references based on two decls.  Do this before defering to
1028      TBAA to handle must-alias cases in conformance with the
1029      GCC extension of allowing type-punning through unions.  */
1030   var1_p = DECL_P (base1);
1031   var2_p = DECL_P (base2);
1032   if (var1_p && var2_p)
1033     return decl_refs_may_alias_p (base1, offset1, max_size1,
1034 				  base2, offset2, max_size2);
1035 
1036   ind1_p = (TREE_CODE (base1) == MEM_REF
1037 	    || TREE_CODE (base1) == TARGET_MEM_REF);
1038   ind2_p = (TREE_CODE (base2) == MEM_REF
1039 	    || TREE_CODE (base2) == TARGET_MEM_REF);
1040 
1041   /* Canonicalize the pointer-vs-decl case.  */
1042   if (ind1_p && var2_p)
1043     {
1044       HOST_WIDE_INT tmp1;
1045       tree tmp2;
1046       ao_ref *tmp3;
1047       tmp1 = offset1; offset1 = offset2; offset2 = tmp1;
1048       tmp1 = max_size1; max_size1 = max_size2; max_size2 = tmp1;
1049       tmp2 = base1; base1 = base2; base2 = tmp2;
1050       tmp3 = ref1; ref1 = ref2; ref2 = tmp3;
1051       var1_p = true;
1052       ind1_p = false;
1053       var2_p = false;
1054       ind2_p = true;
1055     }
1056 
1057   /* First defer to TBAA if possible.  */
1058   if (tbaa_p
1059       && flag_strict_aliasing
1060       && !alias_sets_conflict_p (ao_ref_alias_set (ref1),
1061 				 ao_ref_alias_set (ref2)))
1062     return false;
1063 
1064   /* Dispatch to the pointer-vs-decl or pointer-vs-pointer disambiguators.  */
1065   if (var1_p && ind2_p)
1066     return indirect_ref_may_alias_decl_p (ref2->ref, base2,
1067 					  offset2, max_size2,
1068 					  ao_ref_alias_set (ref2), -1,
1069 					  ref1->ref, base1,
1070 					  offset1, max_size1,
1071 					  ao_ref_alias_set (ref1),
1072 					  ao_ref_base_alias_set (ref1),
1073 					  tbaa_p);
1074   else if (ind1_p && ind2_p)
1075     return indirect_refs_may_alias_p (ref1->ref, base1,
1076 				      offset1, max_size1,
1077 				      ao_ref_alias_set (ref1), -1,
1078 				      ref2->ref, base2,
1079 				      offset2, max_size2,
1080 				      ao_ref_alias_set (ref2), -1,
1081 				      tbaa_p);
1082 
1083   /* We really do not want to end up here, but returning true is safe.  */
1084 #ifdef ENABLE_CHECKING
1085   gcc_unreachable ();
1086 #else
1087   return true;
1088 #endif
1089 }
1090 
1091 bool
1092 refs_may_alias_p (tree ref1, tree ref2)
1093 {
1094   ao_ref r1, r2;
1095   bool res;
1096   ao_ref_init (&r1, ref1);
1097   ao_ref_init (&r2, ref2);
1098   res = refs_may_alias_p_1 (&r1, &r2, true);
1099   if (res)
1100     ++alias_stats.refs_may_alias_p_may_alias;
1101   else
1102     ++alias_stats.refs_may_alias_p_no_alias;
1103   return res;
1104 }
1105 
1106 /* Returns true if there is a anti-dependence for the STORE that
1107    executes after the LOAD.  */
1108 
1109 bool
1110 refs_anti_dependent_p (tree load, tree store)
1111 {
1112   ao_ref r1, r2;
1113   ao_ref_init (&r1, load);
1114   ao_ref_init (&r2, store);
1115   return refs_may_alias_p_1 (&r1, &r2, false);
1116 }
1117 
1118 /* Returns true if there is a output dependence for the stores
1119    STORE1 and STORE2.  */
1120 
1121 bool
1122 refs_output_dependent_p (tree store1, tree store2)
1123 {
1124   ao_ref r1, r2;
1125   ao_ref_init (&r1, store1);
1126   ao_ref_init (&r2, store2);
1127   return refs_may_alias_p_1 (&r1, &r2, false);
1128 }
1129 
1130 /* If the call CALL may use the memory reference REF return true,
1131    otherwise return false.  */
1132 
1133 static bool
1134 ref_maybe_used_by_call_p_1 (gimple call, ao_ref *ref)
1135 {
1136   tree base, callee;
1137   unsigned i;
1138   int flags = gimple_call_flags (call);
1139 
1140   /* Const functions without a static chain do not implicitly use memory.  */
1141   if (!gimple_call_chain (call)
1142       && (flags & (ECF_CONST|ECF_NOVOPS)))
1143     goto process_args;
1144 
1145   base = ao_ref_base (ref);
1146   if (!base)
1147     return true;
1148 
1149   /* A call that is not without side-effects might involve volatile
1150      accesses and thus conflicts with all other volatile accesses.  */
1151   if (ref->volatile_p)
1152     return true;
1153 
1154   /* If the reference is based on a decl that is not aliased the call
1155      cannot possibly use it.  */
1156   if (DECL_P (base)
1157       && !may_be_aliased (base)
1158       /* But local statics can be used through recursion.  */
1159       && !is_global_var (base))
1160     goto process_args;
1161 
1162   callee = gimple_call_fndecl (call);
1163 
1164   /* Handle those builtin functions explicitly that do not act as
1165      escape points.  See tree-ssa-structalias.c:find_func_aliases
1166      for the list of builtins we might need to handle here.  */
1167   if (callee != NULL_TREE
1168       && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL)
1169     switch (DECL_FUNCTION_CODE (callee))
1170       {
1171 	/* All the following functions read memory pointed to by
1172 	   their second argument.  strcat/strncat additionally
1173 	   reads memory pointed to by the first argument.  */
1174 	case BUILT_IN_STRCAT:
1175 	case BUILT_IN_STRNCAT:
1176 	  {
1177 	    ao_ref dref;
1178 	    ao_ref_init_from_ptr_and_size (&dref,
1179 					   gimple_call_arg (call, 0),
1180 					   NULL_TREE);
1181 	    if (refs_may_alias_p_1 (&dref, ref, false))
1182 	      return true;
1183 	  }
1184 	  /* FALLTHRU */
1185 	case BUILT_IN_STRCPY:
1186 	case BUILT_IN_STRNCPY:
1187 	case BUILT_IN_MEMCPY:
1188 	case BUILT_IN_MEMMOVE:
1189 	case BUILT_IN_MEMPCPY:
1190 	case BUILT_IN_STPCPY:
1191 	case BUILT_IN_STPNCPY:
1192 	case BUILT_IN_TM_MEMCPY:
1193 	case BUILT_IN_TM_MEMMOVE:
1194 	  {
1195 	    ao_ref dref;
1196 	    tree size = NULL_TREE;
1197 	    if (gimple_call_num_args (call) == 3)
1198 	      size = gimple_call_arg (call, 2);
1199 	    ao_ref_init_from_ptr_and_size (&dref,
1200 					   gimple_call_arg (call, 1),
1201 					   size);
1202 	    return refs_may_alias_p_1 (&dref, ref, false);
1203 	  }
1204 	case BUILT_IN_STRCAT_CHK:
1205 	case BUILT_IN_STRNCAT_CHK:
1206 	  {
1207 	    ao_ref dref;
1208 	    ao_ref_init_from_ptr_and_size (&dref,
1209 					   gimple_call_arg (call, 0),
1210 					   NULL_TREE);
1211 	    if (refs_may_alias_p_1 (&dref, ref, false))
1212 	      return true;
1213 	  }
1214 	  /* FALLTHRU */
1215 	case BUILT_IN_STRCPY_CHK:
1216 	case BUILT_IN_STRNCPY_CHK:
1217 	case BUILT_IN_MEMCPY_CHK:
1218 	case BUILT_IN_MEMMOVE_CHK:
1219 	case BUILT_IN_MEMPCPY_CHK:
1220 	case BUILT_IN_STPCPY_CHK:
1221 	case BUILT_IN_STPNCPY_CHK:
1222 	  {
1223 	    ao_ref dref;
1224 	    tree size = NULL_TREE;
1225 	    if (gimple_call_num_args (call) == 4)
1226 	      size = gimple_call_arg (call, 2);
1227 	    ao_ref_init_from_ptr_and_size (&dref,
1228 					   gimple_call_arg (call, 1),
1229 					   size);
1230 	    return refs_may_alias_p_1 (&dref, ref, false);
1231 	  }
1232 	case BUILT_IN_BCOPY:
1233 	  {
1234 	    ao_ref dref;
1235 	    tree size = gimple_call_arg (call, 2);
1236 	    ao_ref_init_from_ptr_and_size (&dref,
1237 					   gimple_call_arg (call, 0),
1238 					   size);
1239 	    return refs_may_alias_p_1 (&dref, ref, false);
1240 	  }
1241 
1242 	/* The following functions read memory pointed to by their
1243 	   first argument.  */
1244 	CASE_BUILT_IN_TM_LOAD (1):
1245 	CASE_BUILT_IN_TM_LOAD (2):
1246 	CASE_BUILT_IN_TM_LOAD (4):
1247 	CASE_BUILT_IN_TM_LOAD (8):
1248 	CASE_BUILT_IN_TM_LOAD (FLOAT):
1249 	CASE_BUILT_IN_TM_LOAD (DOUBLE):
1250 	CASE_BUILT_IN_TM_LOAD (LDOUBLE):
1251 	CASE_BUILT_IN_TM_LOAD (M64):
1252 	CASE_BUILT_IN_TM_LOAD (M128):
1253 	CASE_BUILT_IN_TM_LOAD (M256):
1254 	case BUILT_IN_TM_LOG:
1255 	case BUILT_IN_TM_LOG_1:
1256 	case BUILT_IN_TM_LOG_2:
1257 	case BUILT_IN_TM_LOG_4:
1258 	case BUILT_IN_TM_LOG_8:
1259 	case BUILT_IN_TM_LOG_FLOAT:
1260 	case BUILT_IN_TM_LOG_DOUBLE:
1261 	case BUILT_IN_TM_LOG_LDOUBLE:
1262 	case BUILT_IN_TM_LOG_M64:
1263 	case BUILT_IN_TM_LOG_M128:
1264 	case BUILT_IN_TM_LOG_M256:
1265 	  return ptr_deref_may_alias_ref_p_1 (gimple_call_arg (call, 0), ref);
1266 
1267 	/* These read memory pointed to by the first argument.  */
1268 	case BUILT_IN_STRDUP:
1269 	case BUILT_IN_STRNDUP:
1270 	  {
1271 	    ao_ref dref;
1272 	    tree size = NULL_TREE;
1273 	    if (gimple_call_num_args (call) == 2)
1274 	      size = gimple_call_arg (call, 1);
1275 	    ao_ref_init_from_ptr_and_size (&dref,
1276 					   gimple_call_arg (call, 0),
1277 					   size);
1278 	    return refs_may_alias_p_1 (&dref, ref, false);
1279 	  }
1280 	/* The following builtins do not read from memory.  */
1281 	case BUILT_IN_FREE:
1282 	case BUILT_IN_MALLOC:
1283 	case BUILT_IN_CALLOC:
1284 	case BUILT_IN_ALLOCA:
1285 	case BUILT_IN_ALLOCA_WITH_ALIGN:
1286 	case BUILT_IN_STACK_SAVE:
1287 	case BUILT_IN_STACK_RESTORE:
1288 	case BUILT_IN_MEMSET:
1289 	case BUILT_IN_TM_MEMSET:
1290 	case BUILT_IN_MEMSET_CHK:
1291 	case BUILT_IN_FREXP:
1292 	case BUILT_IN_FREXPF:
1293 	case BUILT_IN_FREXPL:
1294 	case BUILT_IN_GAMMA_R:
1295 	case BUILT_IN_GAMMAF_R:
1296 	case BUILT_IN_GAMMAL_R:
1297 	case BUILT_IN_LGAMMA_R:
1298 	case BUILT_IN_LGAMMAF_R:
1299 	case BUILT_IN_LGAMMAL_R:
1300 	case BUILT_IN_MODF:
1301 	case BUILT_IN_MODFF:
1302 	case BUILT_IN_MODFL:
1303 	case BUILT_IN_REMQUO:
1304 	case BUILT_IN_REMQUOF:
1305 	case BUILT_IN_REMQUOL:
1306 	case BUILT_IN_SINCOS:
1307 	case BUILT_IN_SINCOSF:
1308 	case BUILT_IN_SINCOSL:
1309 	case BUILT_IN_ASSUME_ALIGNED:
1310 	case BUILT_IN_VA_END:
1311 	  return false;
1312 	/* __sync_* builtins and some OpenMP builtins act as threading
1313 	   barriers.  */
1314 #undef DEF_SYNC_BUILTIN
1315 #define DEF_SYNC_BUILTIN(ENUM, NAME, TYPE, ATTRS) case ENUM:
1316 #include "sync-builtins.def"
1317 #undef DEF_SYNC_BUILTIN
1318 	case BUILT_IN_GOMP_ATOMIC_START:
1319 	case BUILT_IN_GOMP_ATOMIC_END:
1320 	case BUILT_IN_GOMP_BARRIER:
1321 	case BUILT_IN_GOMP_TASKWAIT:
1322 	case BUILT_IN_GOMP_CRITICAL_START:
1323 	case BUILT_IN_GOMP_CRITICAL_END:
1324 	case BUILT_IN_GOMP_CRITICAL_NAME_START:
1325 	case BUILT_IN_GOMP_CRITICAL_NAME_END:
1326 	case BUILT_IN_GOMP_LOOP_END:
1327 	case BUILT_IN_GOMP_ORDERED_START:
1328 	case BUILT_IN_GOMP_ORDERED_END:
1329 	case BUILT_IN_GOMP_PARALLEL_END:
1330 	case BUILT_IN_GOMP_SECTIONS_END:
1331 	case BUILT_IN_GOMP_SINGLE_COPY_START:
1332 	case BUILT_IN_GOMP_SINGLE_COPY_END:
1333 	  return true;
1334 
1335 	default:
1336 	  /* Fallthru to general call handling.  */;
1337       }
1338 
1339   /* Check if base is a global static variable that is not read
1340      by the function.  */
1341   if (callee != NULL_TREE
1342       && TREE_CODE (base) == VAR_DECL
1343       && TREE_STATIC (base))
1344     {
1345       struct cgraph_node *node = cgraph_get_node (callee);
1346       bitmap not_read;
1347 
1348       /* FIXME: Callee can be an OMP builtin that does not have a call graph
1349 	 node yet.  We should enforce that there are nodes for all decls in the
1350 	 IL and remove this check instead.  */
1351       if (node
1352 	  && (not_read = ipa_reference_get_not_read_global (node))
1353 	  && bitmap_bit_p (not_read, DECL_UID (base)))
1354 	goto process_args;
1355     }
1356 
1357   /* Check if the base variable is call-used.  */
1358   if (DECL_P (base))
1359     {
1360       if (pt_solution_includes (gimple_call_use_set (call), base))
1361 	return true;
1362     }
1363   else if ((TREE_CODE (base) == MEM_REF
1364 	    || TREE_CODE (base) == TARGET_MEM_REF)
1365 	   && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
1366     {
1367       struct ptr_info_def *pi = SSA_NAME_PTR_INFO (TREE_OPERAND (base, 0));
1368       if (!pi)
1369 	return true;
1370 
1371       if (pt_solutions_intersect (gimple_call_use_set (call), &pi->pt))
1372 	return true;
1373     }
1374   else
1375     return true;
1376 
1377   /* Inspect call arguments for passed-by-value aliases.  */
1378 process_args:
1379   for (i = 0; i < gimple_call_num_args (call); ++i)
1380     {
1381       tree op = gimple_call_arg (call, i);
1382       int flags = gimple_call_arg_flags (call, i);
1383 
1384       if (flags & EAF_UNUSED)
1385 	continue;
1386 
1387       if (TREE_CODE (op) == WITH_SIZE_EXPR)
1388 	op = TREE_OPERAND (op, 0);
1389 
1390       if (TREE_CODE (op) != SSA_NAME
1391 	  && !is_gimple_min_invariant (op))
1392 	{
1393 	  ao_ref r;
1394 	  ao_ref_init (&r, op);
1395 	  if (refs_may_alias_p_1 (&r, ref, true))
1396 	    return true;
1397 	}
1398     }
1399 
1400   return false;
1401 }
1402 
1403 static bool
1404 ref_maybe_used_by_call_p (gimple call, tree ref)
1405 {
1406   ao_ref r;
1407   bool res;
1408   ao_ref_init (&r, ref);
1409   res = ref_maybe_used_by_call_p_1 (call, &r);
1410   if (res)
1411     ++alias_stats.ref_maybe_used_by_call_p_may_alias;
1412   else
1413     ++alias_stats.ref_maybe_used_by_call_p_no_alias;
1414   return res;
1415 }
1416 
1417 
1418 /* If the statement STMT may use the memory reference REF return
1419    true, otherwise return false.  */
1420 
1421 bool
1422 ref_maybe_used_by_stmt_p (gimple stmt, tree ref)
1423 {
1424   if (is_gimple_assign (stmt))
1425     {
1426       tree rhs;
1427 
1428       /* All memory assign statements are single.  */
1429       if (!gimple_assign_single_p (stmt))
1430 	return false;
1431 
1432       rhs = gimple_assign_rhs1 (stmt);
1433       if (is_gimple_reg (rhs)
1434 	  || is_gimple_min_invariant (rhs)
1435 	  || gimple_assign_rhs_code (stmt) == CONSTRUCTOR)
1436 	return false;
1437 
1438       return refs_may_alias_p (rhs, ref);
1439     }
1440   else if (is_gimple_call (stmt))
1441     return ref_maybe_used_by_call_p (stmt, ref);
1442   else if (gimple_code (stmt) == GIMPLE_RETURN)
1443     {
1444       tree retval = gimple_return_retval (stmt);
1445       tree base;
1446       if (retval
1447 	  && TREE_CODE (retval) != SSA_NAME
1448 	  && !is_gimple_min_invariant (retval)
1449 	  && refs_may_alias_p (retval, ref))
1450 	return true;
1451       /* If ref escapes the function then the return acts as a use.  */
1452       base = get_base_address (ref);
1453       if (!base)
1454 	;
1455       else if (DECL_P (base))
1456 	return is_global_var (base);
1457       else if (TREE_CODE (base) == MEM_REF
1458 	       || TREE_CODE (base) == TARGET_MEM_REF)
1459 	return ptr_deref_may_alias_global_p (TREE_OPERAND (base, 0));
1460       return false;
1461     }
1462 
1463   return true;
1464 }
1465 
1466 /* If the call in statement CALL may clobber the memory reference REF
1467    return true, otherwise return false.  */
1468 
1469 static bool
1470 call_may_clobber_ref_p_1 (gimple call, ao_ref *ref)
1471 {
1472   tree base;
1473   tree callee;
1474 
1475   /* If the call is pure or const it cannot clobber anything.  */
1476   if (gimple_call_flags (call)
1477       & (ECF_PURE|ECF_CONST|ECF_LOOPING_CONST_OR_PURE|ECF_NOVOPS))
1478     return false;
1479 
1480   base = ao_ref_base (ref);
1481   if (!base)
1482     return true;
1483 
1484   if (TREE_CODE (base) == SSA_NAME
1485       || CONSTANT_CLASS_P (base))
1486     return false;
1487 
1488   /* A call that is not without side-effects might involve volatile
1489      accesses and thus conflicts with all other volatile accesses.  */
1490   if (ref->volatile_p)
1491     return true;
1492 
1493   /* If the reference is based on a decl that is not aliased the call
1494      cannot possibly clobber it.  */
1495   if (DECL_P (base)
1496       && !may_be_aliased (base)
1497       /* But local non-readonly statics can be modified through recursion
1498          or the call may implement a threading barrier which we must
1499 	 treat as may-def.  */
1500       && (TREE_READONLY (base)
1501 	  || !is_global_var (base)))
1502     return false;
1503 
1504   callee = gimple_call_fndecl (call);
1505 
1506   /* Handle those builtin functions explicitly that do not act as
1507      escape points.  See tree-ssa-structalias.c:find_func_aliases
1508      for the list of builtins we might need to handle here.  */
1509   if (callee != NULL_TREE
1510       && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL)
1511     switch (DECL_FUNCTION_CODE (callee))
1512       {
1513 	/* All the following functions clobber memory pointed to by
1514 	   their first argument.  */
1515 	case BUILT_IN_STRCPY:
1516 	case BUILT_IN_STRNCPY:
1517 	case BUILT_IN_MEMCPY:
1518 	case BUILT_IN_MEMMOVE:
1519 	case BUILT_IN_MEMPCPY:
1520 	case BUILT_IN_STPCPY:
1521 	case BUILT_IN_STPNCPY:
1522 	case BUILT_IN_STRCAT:
1523 	case BUILT_IN_STRNCAT:
1524 	case BUILT_IN_MEMSET:
1525 	case BUILT_IN_TM_MEMSET:
1526 	CASE_BUILT_IN_TM_STORE (1):
1527 	CASE_BUILT_IN_TM_STORE (2):
1528 	CASE_BUILT_IN_TM_STORE (4):
1529 	CASE_BUILT_IN_TM_STORE (8):
1530 	CASE_BUILT_IN_TM_STORE (FLOAT):
1531 	CASE_BUILT_IN_TM_STORE (DOUBLE):
1532 	CASE_BUILT_IN_TM_STORE (LDOUBLE):
1533 	CASE_BUILT_IN_TM_STORE (M64):
1534 	CASE_BUILT_IN_TM_STORE (M128):
1535 	CASE_BUILT_IN_TM_STORE (M256):
1536 	case BUILT_IN_TM_MEMCPY:
1537 	case BUILT_IN_TM_MEMMOVE:
1538 	  {
1539 	    ao_ref dref;
1540 	    tree size = NULL_TREE;
1541 	    /* Don't pass in size for strncat, as the maximum size
1542 	       is strlen (dest) + n + 1 instead of n, resp.
1543 	       n + 1 at dest + strlen (dest), but strlen (dest) isn't
1544 	       known.  */
1545 	    if (gimple_call_num_args (call) == 3
1546 		&& DECL_FUNCTION_CODE (callee) != BUILT_IN_STRNCAT)
1547 	      size = gimple_call_arg (call, 2);
1548 	    ao_ref_init_from_ptr_and_size (&dref,
1549 					   gimple_call_arg (call, 0),
1550 					   size);
1551 	    return refs_may_alias_p_1 (&dref, ref, false);
1552 	  }
1553 	case BUILT_IN_STRCPY_CHK:
1554 	case BUILT_IN_STRNCPY_CHK:
1555 	case BUILT_IN_MEMCPY_CHK:
1556 	case BUILT_IN_MEMMOVE_CHK:
1557 	case BUILT_IN_MEMPCPY_CHK:
1558 	case BUILT_IN_STPCPY_CHK:
1559 	case BUILT_IN_STPNCPY_CHK:
1560 	case BUILT_IN_STRCAT_CHK:
1561 	case BUILT_IN_STRNCAT_CHK:
1562 	case BUILT_IN_MEMSET_CHK:
1563 	  {
1564 	    ao_ref dref;
1565 	    tree size = NULL_TREE;
1566 	    /* Don't pass in size for __strncat_chk, as the maximum size
1567 	       is strlen (dest) + n + 1 instead of n, resp.
1568 	       n + 1 at dest + strlen (dest), but strlen (dest) isn't
1569 	       known.  */
1570 	    if (gimple_call_num_args (call) == 4
1571 		&& DECL_FUNCTION_CODE (callee) != BUILT_IN_STRNCAT_CHK)
1572 	      size = gimple_call_arg (call, 2);
1573 	    ao_ref_init_from_ptr_and_size (&dref,
1574 					   gimple_call_arg (call, 0),
1575 					   size);
1576 	    return refs_may_alias_p_1 (&dref, ref, false);
1577 	  }
1578 	case BUILT_IN_BCOPY:
1579 	  {
1580 	    ao_ref dref;
1581 	    tree size = gimple_call_arg (call, 2);
1582 	    ao_ref_init_from_ptr_and_size (&dref,
1583 					   gimple_call_arg (call, 1),
1584 					   size);
1585 	    return refs_may_alias_p_1 (&dref, ref, false);
1586 	  }
1587 	/* Allocating memory does not have any side-effects apart from
1588 	   being the definition point for the pointer.  */
1589 	case BUILT_IN_MALLOC:
1590 	case BUILT_IN_CALLOC:
1591 	case BUILT_IN_STRDUP:
1592 	case BUILT_IN_STRNDUP:
1593 	  /* Unix98 specifies that errno is set on allocation failure.  */
1594 	  if (flag_errno_math
1595 	      && targetm.ref_may_alias_errno (ref))
1596 	    return true;
1597 	  return false;
1598 	case BUILT_IN_STACK_SAVE:
1599 	case BUILT_IN_ALLOCA:
1600 	case BUILT_IN_ALLOCA_WITH_ALIGN:
1601 	case BUILT_IN_ASSUME_ALIGNED:
1602 	  return false;
1603 	/* Freeing memory kills the pointed-to memory.  More importantly
1604 	   the call has to serve as a barrier for moving loads and stores
1605 	   across it.  */
1606 	case BUILT_IN_FREE:
1607 	case BUILT_IN_VA_END:
1608 	  {
1609 	    tree ptr = gimple_call_arg (call, 0);
1610 	    return ptr_deref_may_alias_ref_p_1 (ptr, ref);
1611 	  }
1612 	case BUILT_IN_GAMMA_R:
1613 	case BUILT_IN_GAMMAF_R:
1614 	case BUILT_IN_GAMMAL_R:
1615 	case BUILT_IN_LGAMMA_R:
1616 	case BUILT_IN_LGAMMAF_R:
1617 	case BUILT_IN_LGAMMAL_R:
1618 	  {
1619 	    tree out = gimple_call_arg (call, 1);
1620 	    if (ptr_deref_may_alias_ref_p_1 (out, ref))
1621 	      return true;
1622 	    if (flag_errno_math)
1623 	      break;
1624 	    return false;
1625 	  }
1626 	case BUILT_IN_FREXP:
1627 	case BUILT_IN_FREXPF:
1628 	case BUILT_IN_FREXPL:
1629 	case BUILT_IN_MODF:
1630 	case BUILT_IN_MODFF:
1631 	case BUILT_IN_MODFL:
1632 	  {
1633 	    tree out = gimple_call_arg (call, 1);
1634 	    return ptr_deref_may_alias_ref_p_1 (out, ref);
1635 	  }
1636 	case BUILT_IN_REMQUO:
1637 	case BUILT_IN_REMQUOF:
1638 	case BUILT_IN_REMQUOL:
1639 	  {
1640 	    tree out = gimple_call_arg (call, 2);
1641 	    if (ptr_deref_may_alias_ref_p_1 (out, ref))
1642 	      return true;
1643 	    if (flag_errno_math)
1644 	      break;
1645 	    return false;
1646 	  }
1647 	case BUILT_IN_SINCOS:
1648 	case BUILT_IN_SINCOSF:
1649 	case BUILT_IN_SINCOSL:
1650 	  {
1651 	    tree sin = gimple_call_arg (call, 1);
1652 	    tree cos = gimple_call_arg (call, 2);
1653 	    return (ptr_deref_may_alias_ref_p_1 (sin, ref)
1654 		    || ptr_deref_may_alias_ref_p_1 (cos, ref));
1655 	  }
1656 	/* __sync_* builtins and some OpenMP builtins act as threading
1657 	   barriers.  */
1658 #undef DEF_SYNC_BUILTIN
1659 #define DEF_SYNC_BUILTIN(ENUM, NAME, TYPE, ATTRS) case ENUM:
1660 #include "sync-builtins.def"
1661 #undef DEF_SYNC_BUILTIN
1662 	case BUILT_IN_GOMP_ATOMIC_START:
1663 	case BUILT_IN_GOMP_ATOMIC_END:
1664 	case BUILT_IN_GOMP_BARRIER:
1665 	case BUILT_IN_GOMP_TASKWAIT:
1666 	case BUILT_IN_GOMP_CRITICAL_START:
1667 	case BUILT_IN_GOMP_CRITICAL_END:
1668 	case BUILT_IN_GOMP_CRITICAL_NAME_START:
1669 	case BUILT_IN_GOMP_CRITICAL_NAME_END:
1670 	case BUILT_IN_GOMP_LOOP_END:
1671 	case BUILT_IN_GOMP_ORDERED_START:
1672 	case BUILT_IN_GOMP_ORDERED_END:
1673 	case BUILT_IN_GOMP_PARALLEL_END:
1674 	case BUILT_IN_GOMP_SECTIONS_END:
1675 	case BUILT_IN_GOMP_SINGLE_COPY_START:
1676 	case BUILT_IN_GOMP_SINGLE_COPY_END:
1677 	  return true;
1678 	default:
1679 	  /* Fallthru to general call handling.  */;
1680       }
1681 
1682   /* Check if base is a global static variable that is not written
1683      by the function.  */
1684   if (callee != NULL_TREE
1685       && TREE_CODE (base) == VAR_DECL
1686       && TREE_STATIC (base))
1687     {
1688       struct cgraph_node *node = cgraph_get_node (callee);
1689       bitmap not_written;
1690 
1691       if (node
1692 	  && (not_written = ipa_reference_get_not_written_global (node))
1693 	  && bitmap_bit_p (not_written, DECL_UID (base)))
1694 	return false;
1695     }
1696 
1697   /* Check if the base variable is call-clobbered.  */
1698   if (DECL_P (base))
1699     return pt_solution_includes (gimple_call_clobber_set (call), base);
1700   else if ((TREE_CODE (base) == MEM_REF
1701 	    || TREE_CODE (base) == TARGET_MEM_REF)
1702 	   && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
1703     {
1704       struct ptr_info_def *pi = SSA_NAME_PTR_INFO (TREE_OPERAND (base, 0));
1705       if (!pi)
1706 	return true;
1707 
1708       return pt_solutions_intersect (gimple_call_clobber_set (call), &pi->pt);
1709     }
1710 
1711   return true;
1712 }
1713 
1714 /* If the call in statement CALL may clobber the memory reference REF
1715    return true, otherwise return false.  */
1716 
1717 bool
1718 call_may_clobber_ref_p (gimple call, tree ref)
1719 {
1720   bool res;
1721   ao_ref r;
1722   ao_ref_init (&r, ref);
1723   res = call_may_clobber_ref_p_1 (call, &r);
1724   if (res)
1725     ++alias_stats.call_may_clobber_ref_p_may_alias;
1726   else
1727     ++alias_stats.call_may_clobber_ref_p_no_alias;
1728   return res;
1729 }
1730 
1731 
1732 /* If the statement STMT may clobber the memory reference REF return true,
1733    otherwise return false.  */
1734 
1735 bool
1736 stmt_may_clobber_ref_p_1 (gimple stmt, ao_ref *ref)
1737 {
1738   if (is_gimple_call (stmt))
1739     {
1740       tree lhs = gimple_call_lhs (stmt);
1741       if (lhs
1742 	  && TREE_CODE (lhs) != SSA_NAME)
1743 	{
1744 	  ao_ref r;
1745 	  ao_ref_init (&r, lhs);
1746 	  if (refs_may_alias_p_1 (ref, &r, true))
1747 	    return true;
1748 	}
1749 
1750       return call_may_clobber_ref_p_1 (stmt, ref);
1751     }
1752   else if (gimple_assign_single_p (stmt))
1753     {
1754       tree lhs = gimple_assign_lhs (stmt);
1755       if (TREE_CODE (lhs) != SSA_NAME)
1756 	{
1757 	  ao_ref r;
1758 	  ao_ref_init (&r, lhs);
1759 	  return refs_may_alias_p_1 (ref, &r, true);
1760 	}
1761     }
1762   else if (gimple_code (stmt) == GIMPLE_ASM)
1763     return true;
1764 
1765   return false;
1766 }
1767 
1768 bool
1769 stmt_may_clobber_ref_p (gimple stmt, tree ref)
1770 {
1771   ao_ref r;
1772   ao_ref_init (&r, ref);
1773   return stmt_may_clobber_ref_p_1 (stmt, &r);
1774 }
1775 
1776 /* If STMT kills the memory reference REF return true, otherwise
1777    return false.  */
1778 
1779 static bool
1780 stmt_kills_ref_p_1 (gimple stmt, ao_ref *ref)
1781 {
1782   /* For a must-alias check we need to be able to constrain
1783      the access properly.  */
1784   ao_ref_base (ref);
1785   if (ref->max_size == -1)
1786     return false;
1787 
1788   if (gimple_has_lhs (stmt)
1789       && TREE_CODE (gimple_get_lhs (stmt)) != SSA_NAME
1790       /* The assignment is not necessarily carried out if it can throw
1791 	 and we can catch it in the current function where we could inspect
1792 	 the previous value.
1793 	 ???  We only need to care about the RHS throwing.  For aggregate
1794 	 assignments or similar calls and non-call exceptions the LHS
1795 	 might throw as well.  */
1796       && !stmt_can_throw_internal (stmt))
1797     {
1798       tree base, lhs = gimple_get_lhs (stmt);
1799       HOST_WIDE_INT size, offset, max_size;
1800       base = get_ref_base_and_extent (lhs, &offset, &size, &max_size);
1801       /* We can get MEM[symbol: sZ, index: D.8862_1] here,
1802 	 so base == ref->base does not always hold.  */
1803       if (base == ref->base)
1804 	{
1805 	  /* For a must-alias check we need to be able to constrain
1806 	     the access properly.  */
1807 	  if (size != -1 && size == max_size)
1808 	    {
1809 	      if (offset <= ref->offset
1810 		  && offset + size >= ref->offset + ref->max_size)
1811 		return true;
1812 	    }
1813 	}
1814     }
1815 
1816   if (is_gimple_call (stmt))
1817     {
1818       tree callee = gimple_call_fndecl (stmt);
1819       if (callee != NULL_TREE
1820 	  && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL)
1821 	switch (DECL_FUNCTION_CODE (callee))
1822 	  {
1823 	  case BUILT_IN_MEMCPY:
1824 	  case BUILT_IN_MEMPCPY:
1825 	  case BUILT_IN_MEMMOVE:
1826 	  case BUILT_IN_MEMSET:
1827 	  case BUILT_IN_MEMCPY_CHK:
1828 	  case BUILT_IN_MEMPCPY_CHK:
1829 	  case BUILT_IN_MEMMOVE_CHK:
1830 	  case BUILT_IN_MEMSET_CHK:
1831 	    {
1832 	      tree dest = gimple_call_arg (stmt, 0);
1833 	      tree len = gimple_call_arg (stmt, 2);
1834 	      tree base = NULL_TREE;
1835 	      HOST_WIDE_INT offset = 0;
1836 	      if (!host_integerp (len, 0))
1837 		return false;
1838 	      if (TREE_CODE (dest) == ADDR_EXPR)
1839 		base = get_addr_base_and_unit_offset (TREE_OPERAND (dest, 0),
1840 						      &offset);
1841 	      else if (TREE_CODE (dest) == SSA_NAME)
1842 		base = dest;
1843 	      if (base
1844 		  && base == ao_ref_base (ref))
1845 		{
1846 		  HOST_WIDE_INT size = TREE_INT_CST_LOW (len);
1847 		  if (offset <= ref->offset / BITS_PER_UNIT
1848 		      && (offset + size
1849 		          >= ((ref->offset + ref->max_size + BITS_PER_UNIT - 1)
1850 			      / BITS_PER_UNIT)))
1851 		    return true;
1852 		}
1853 	      break;
1854 	    }
1855 
1856 	  case BUILT_IN_VA_END:
1857 	    {
1858 	      tree ptr = gimple_call_arg (stmt, 0);
1859 	      if (TREE_CODE (ptr) == ADDR_EXPR)
1860 		{
1861 		  tree base = ao_ref_base (ref);
1862 		  if (TREE_OPERAND (ptr, 0) == base)
1863 		    return true;
1864 		}
1865 	      break;
1866 	    }
1867 
1868 	  default:;
1869 	  }
1870     }
1871   return false;
1872 }
1873 
1874 bool
1875 stmt_kills_ref_p (gimple stmt, tree ref)
1876 {
1877   ao_ref r;
1878   ao_ref_init (&r, ref);
1879   return stmt_kills_ref_p_1 (stmt, &r);
1880 }
1881 
1882 
1883 /* Walk the virtual use-def chain of VUSE until hitting the virtual operand
1884    TARGET or a statement clobbering the memory reference REF in which
1885    case false is returned.  The walk starts with VUSE, one argument of PHI.  */
1886 
1887 static bool
1888 maybe_skip_until (gimple phi, tree target, ao_ref *ref,
1889 		  tree vuse, bitmap *visited, bool abort_on_visited)
1890 {
1891   basic_block bb = gimple_bb (phi);
1892 
1893   if (!*visited)
1894     *visited = BITMAP_ALLOC (NULL);
1895 
1896   bitmap_set_bit (*visited, SSA_NAME_VERSION (PHI_RESULT (phi)));
1897 
1898   /* Walk until we hit the target.  */
1899   while (vuse != target)
1900     {
1901       gimple def_stmt = SSA_NAME_DEF_STMT (vuse);
1902       /* Recurse for PHI nodes.  */
1903       if (gimple_code (def_stmt) == GIMPLE_PHI)
1904 	{
1905 	  /* An already visited PHI node ends the walk successfully.  */
1906 	  if (bitmap_bit_p (*visited, SSA_NAME_VERSION (PHI_RESULT (def_stmt))))
1907 	    return !abort_on_visited;
1908 	  vuse = get_continuation_for_phi (def_stmt, ref,
1909 					   visited, abort_on_visited);
1910 	  if (!vuse)
1911 	    return false;
1912 	  continue;
1913 	}
1914       /* A clobbering statement or the end of the IL ends it failing.  */
1915       else if (gimple_nop_p (def_stmt)
1916 	       || stmt_may_clobber_ref_p_1 (def_stmt, ref))
1917 	return false;
1918       /* If we reach a new basic-block see if we already skipped it
1919          in a previous walk that ended successfully.  */
1920       if (gimple_bb (def_stmt) != bb)
1921 	{
1922 	  if (!bitmap_set_bit (*visited, SSA_NAME_VERSION (vuse)))
1923 	    return !abort_on_visited;
1924 	  bb = gimple_bb (def_stmt);
1925 	}
1926       vuse = gimple_vuse (def_stmt);
1927     }
1928   return true;
1929 }
1930 
1931 /* For two PHI arguments ARG0 and ARG1 try to skip non-aliasing code
1932    until we hit the phi argument definition that dominates the other one.
1933    Return that, or NULL_TREE if there is no such definition.  */
1934 
1935 static tree
1936 get_continuation_for_phi_1 (gimple phi, tree arg0, tree arg1,
1937 			    ao_ref *ref, bitmap *visited,
1938 			    bool abort_on_visited)
1939 {
1940   gimple def0 = SSA_NAME_DEF_STMT (arg0);
1941   gimple def1 = SSA_NAME_DEF_STMT (arg1);
1942   tree common_vuse;
1943 
1944   if (arg0 == arg1)
1945     return arg0;
1946   else if (gimple_nop_p (def0)
1947 	   || (!gimple_nop_p (def1)
1948 	       && dominated_by_p (CDI_DOMINATORS,
1949 				  gimple_bb (def1), gimple_bb (def0))))
1950     {
1951       if (maybe_skip_until (phi, arg0, ref, arg1, visited, abort_on_visited))
1952 	return arg0;
1953     }
1954   else if (gimple_nop_p (def1)
1955 	   || dominated_by_p (CDI_DOMINATORS,
1956 			      gimple_bb (def0), gimple_bb (def1)))
1957     {
1958       if (maybe_skip_until (phi, arg1, ref, arg0, visited, abort_on_visited))
1959 	return arg1;
1960     }
1961   /* Special case of a diamond:
1962        MEM_1 = ...
1963        goto (cond) ? L1 : L2
1964        L1: store1 = ...    #MEM_2 = vuse(MEM_1)
1965 	   goto L3
1966        L2: store2 = ...    #MEM_3 = vuse(MEM_1)
1967        L3: MEM_4 = PHI<MEM_2, MEM_3>
1968      We were called with the PHI at L3, MEM_2 and MEM_3 don't
1969      dominate each other, but still we can easily skip this PHI node
1970      if we recognize that the vuse MEM operand is the same for both,
1971      and that we can skip both statements (they don't clobber us).
1972      This is still linear.  Don't use maybe_skip_until, that might
1973      potentially be slow.  */
1974   else if ((common_vuse = gimple_vuse (def0))
1975 	   && common_vuse == gimple_vuse (def1))
1976     {
1977       if (!stmt_may_clobber_ref_p_1 (def0, ref)
1978 	  && !stmt_may_clobber_ref_p_1 (def1, ref))
1979 	return common_vuse;
1980     }
1981 
1982   return NULL_TREE;
1983 }
1984 
1985 
1986 /* Starting from a PHI node for the virtual operand of the memory reference
1987    REF find a continuation virtual operand that allows to continue walking
1988    statements dominating PHI skipping only statements that cannot possibly
1989    clobber REF.  Returns NULL_TREE if no suitable virtual operand can
1990    be found.  */
1991 
1992 tree
1993 get_continuation_for_phi (gimple phi, ao_ref *ref, bitmap *visited,
1994 			  bool abort_on_visited)
1995 {
1996   unsigned nargs = gimple_phi_num_args (phi);
1997 
1998   /* Through a single-argument PHI we can simply look through.  */
1999   if (nargs == 1)
2000     return PHI_ARG_DEF (phi, 0);
2001 
2002   /* For two or more arguments try to pairwise skip non-aliasing code
2003      until we hit the phi argument definition that dominates the other one.  */
2004   else if (nargs >= 2)
2005     {
2006       tree arg0, arg1;
2007       unsigned i;
2008 
2009       /* Find a candidate for the virtual operand which definition
2010 	 dominates those of all others.  */
2011       arg0 = PHI_ARG_DEF (phi, 0);
2012       if (!SSA_NAME_IS_DEFAULT_DEF (arg0))
2013 	for (i = 1; i < nargs; ++i)
2014 	  {
2015 	    arg1 = PHI_ARG_DEF (phi, i);
2016 	    if (SSA_NAME_IS_DEFAULT_DEF (arg1))
2017 	      {
2018 		arg0 = arg1;
2019 		break;
2020 	      }
2021 	    if (dominated_by_p (CDI_DOMINATORS,
2022 				gimple_bb (SSA_NAME_DEF_STMT (arg0)),
2023 				gimple_bb (SSA_NAME_DEF_STMT (arg1))))
2024 	      arg0 = arg1;
2025 	  }
2026 
2027       /* Then pairwise reduce against the found candidate.  */
2028       for (i = 0; i < nargs; ++i)
2029 	{
2030 	  arg1 = PHI_ARG_DEF (phi, i);
2031 	  arg0 = get_continuation_for_phi_1 (phi, arg0, arg1, ref, visited,
2032 					     abort_on_visited);
2033 	  if (!arg0)
2034 	    return NULL_TREE;
2035 	}
2036 
2037       return arg0;
2038     }
2039 
2040   return NULL_TREE;
2041 }
2042 
2043 /* Based on the memory reference REF and its virtual use VUSE call
2044    WALKER for each virtual use that is equivalent to VUSE, including VUSE
2045    itself.  That is, for each virtual use for which its defining statement
2046    does not clobber REF.
2047 
2048    WALKER is called with REF, the current virtual use and DATA.  If
2049    WALKER returns non-NULL the walk stops and its result is returned.
2050    At the end of a non-successful walk NULL is returned.
2051 
2052    TRANSLATE if non-NULL is called with a pointer to REF, the virtual
2053    use which definition is a statement that may clobber REF and DATA.
2054    If TRANSLATE returns (void *)-1 the walk stops and NULL is returned.
2055    If TRANSLATE returns non-NULL the walk stops and its result is returned.
2056    If TRANSLATE returns NULL the walk continues and TRANSLATE is supposed
2057    to adjust REF and *DATA to make that valid.
2058 
2059    TODO: Cache the vector of equivalent vuses per ref, vuse pair.  */
2060 
2061 void *
2062 walk_non_aliased_vuses (ao_ref *ref, tree vuse,
2063 			void *(*walker)(ao_ref *, tree, void *),
2064 			void *(*translate)(ao_ref *, tree, void *), void *data)
2065 {
2066   bitmap visited = NULL;
2067   void *res;
2068   bool translated = false;
2069 
2070   timevar_push (TV_ALIAS_STMT_WALK);
2071 
2072   do
2073     {
2074       gimple def_stmt;
2075 
2076       /* ???  Do we want to account this to TV_ALIAS_STMT_WALK?  */
2077       res = (*walker) (ref, vuse, data);
2078       if (res)
2079 	break;
2080 
2081       def_stmt = SSA_NAME_DEF_STMT (vuse);
2082       if (gimple_nop_p (def_stmt))
2083 	break;
2084       else if (gimple_code (def_stmt) == GIMPLE_PHI)
2085 	vuse = get_continuation_for_phi (def_stmt, ref, &visited, translated);
2086       else
2087 	{
2088 	  if (stmt_may_clobber_ref_p_1 (def_stmt, ref))
2089 	    {
2090 	      if (!translate)
2091 		break;
2092 	      res = (*translate) (ref, vuse, data);
2093 	      /* Failed lookup and translation.  */
2094 	      if (res == (void *)-1)
2095 		{
2096 		  res = NULL;
2097 		  break;
2098 		}
2099 	      /* Lookup succeeded.  */
2100 	      else if (res != NULL)
2101 		break;
2102 	      /* Translation succeeded, continue walking.  */
2103 	      translated = true;
2104 	    }
2105 	  vuse = gimple_vuse (def_stmt);
2106 	}
2107     }
2108   while (vuse);
2109 
2110   if (visited)
2111     BITMAP_FREE (visited);
2112 
2113   timevar_pop (TV_ALIAS_STMT_WALK);
2114 
2115   return res;
2116 }
2117 
2118 
2119 /* Based on the memory reference REF call WALKER for each vdef which
2120    defining statement may clobber REF, starting with VDEF.  If REF
2121    is NULL_TREE, each defining statement is visited.
2122 
2123    WALKER is called with REF, the current vdef and DATA.  If WALKER
2124    returns true the walk is stopped, otherwise it continues.
2125 
2126    At PHI nodes walk_aliased_vdefs forks into one walk for reach
2127    PHI argument (but only one walk continues on merge points), the
2128    return value is true if any of the walks was successful.
2129 
2130    The function returns the number of statements walked.  */
2131 
2132 static unsigned int
2133 walk_aliased_vdefs_1 (ao_ref *ref, tree vdef,
2134 		      bool (*walker)(ao_ref *, tree, void *), void *data,
2135 		      bitmap *visited, unsigned int cnt)
2136 {
2137   do
2138     {
2139       gimple def_stmt = SSA_NAME_DEF_STMT (vdef);
2140 
2141       if (*visited
2142 	  && !bitmap_set_bit (*visited, SSA_NAME_VERSION (vdef)))
2143 	return cnt;
2144 
2145       if (gimple_nop_p (def_stmt))
2146 	return cnt;
2147       else if (gimple_code (def_stmt) == GIMPLE_PHI)
2148 	{
2149 	  unsigned i;
2150 	  if (!*visited)
2151 	    *visited = BITMAP_ALLOC (NULL);
2152 	  for (i = 0; i < gimple_phi_num_args (def_stmt); ++i)
2153 	    cnt += walk_aliased_vdefs_1 (ref, gimple_phi_arg_def (def_stmt, i),
2154 					 walker, data, visited, 0);
2155 	  return cnt;
2156 	}
2157 
2158       /* ???  Do we want to account this to TV_ALIAS_STMT_WALK?  */
2159       cnt++;
2160       if ((!ref
2161 	   || stmt_may_clobber_ref_p_1 (def_stmt, ref))
2162 	  && (*walker) (ref, vdef, data))
2163 	return cnt;
2164 
2165       vdef = gimple_vuse (def_stmt);
2166     }
2167   while (1);
2168 }
2169 
2170 unsigned int
2171 walk_aliased_vdefs (ao_ref *ref, tree vdef,
2172 		    bool (*walker)(ao_ref *, tree, void *), void *data,
2173 		    bitmap *visited)
2174 {
2175   bitmap local_visited = NULL;
2176   unsigned int ret;
2177 
2178   timevar_push (TV_ALIAS_STMT_WALK);
2179 
2180   ret = walk_aliased_vdefs_1 (ref, vdef, walker, data,
2181 			      visited ? visited : &local_visited, 0);
2182   if (local_visited)
2183     BITMAP_FREE (local_visited);
2184 
2185   timevar_pop (TV_ALIAS_STMT_WALK);
2186 
2187   return ret;
2188 }
2189 
2190