1 /* Alias analysis for trees.
2    Copyright (C) 2004-2013 Free Software Foundation, Inc.
3    Contributed by Diego Novillo <dnovillo@redhat.com>
4 
5 This file is part of GCC.
6 
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11 
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
15 GNU General Public License for more details.
16 
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3.  If not see
19 <http://www.gnu.org/licenses/>.  */
20 
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "tm_p.h"
27 #include "target.h"
28 #include "basic-block.h"
29 #include "timevar.h"	/* for TV_ALIAS_STMT_WALK */
30 #include "ggc.h"
31 #include "langhooks.h"
32 #include "flags.h"
33 #include "function.h"
34 #include "tree-pretty-print.h"
35 #include "dumpfile.h"
36 #include "gimple.h"
37 #include "tree-flow.h"
38 #include "tree-inline.h"
39 #include "params.h"
40 #include "vec.h"
41 #include "bitmap.h"
42 #include "pointer-set.h"
43 #include "alloc-pool.h"
44 #include "tree-ssa-alias.h"
45 
46 /* Broad overview of how alias analysis on gimple works:
47 
48    Statements clobbering or using memory are linked through the
49    virtual operand factored use-def chain.  The virtual operand
50    is unique per function, its symbol is accessible via gimple_vop (cfun).
51    Virtual operands are used for efficiently walking memory statements
52    in the gimple IL and are useful for things like value-numbering as
53    a generation count for memory references.
54 
55    SSA_NAME pointers may have associated points-to information
56    accessible via the SSA_NAME_PTR_INFO macro.  Flow-insensitive
57    points-to information is (re-)computed by the TODO_rebuild_alias
58    pass manager todo.  Points-to information is also used for more
59    precise tracking of call-clobbered and call-used variables and
60    related disambiguations.
61 
62    This file contains functions for disambiguating memory references,
63    the so called alias-oracle and tools for walking of the gimple IL.
64 
65    The main alias-oracle entry-points are
66 
67    bool stmt_may_clobber_ref_p (gimple, tree)
68 
69      This function queries if a statement may invalidate (parts of)
70      the memory designated by the reference tree argument.
71 
72    bool ref_maybe_used_by_stmt_p (gimple, tree)
73 
74      This function queries if a statement may need (parts of) the
75      memory designated by the reference tree argument.
76 
77    There are variants of these functions that only handle the call
78    part of a statement, call_may_clobber_ref_p and ref_maybe_used_by_call_p.
79    Note that these do not disambiguate against a possible call lhs.
80 
81    bool refs_may_alias_p (tree, tree)
82 
83      This function tries to disambiguate two reference trees.
84 
85    bool ptr_deref_may_alias_global_p (tree)
86 
87      This function queries if dereferencing a pointer variable may
88      alias global memory.
89 
90    More low-level disambiguators are available and documented in
91    this file.  Low-level disambiguators dealing with points-to
92    information are in tree-ssa-structalias.c.  */
93 
94 
95 /* Query statistics for the different low-level disambiguators.
96    A high-level query may trigger multiple of them.  */
97 
98 static struct {
99   unsigned HOST_WIDE_INT refs_may_alias_p_may_alias;
100   unsigned HOST_WIDE_INT refs_may_alias_p_no_alias;
101   unsigned HOST_WIDE_INT ref_maybe_used_by_call_p_may_alias;
102   unsigned HOST_WIDE_INT ref_maybe_used_by_call_p_no_alias;
103   unsigned HOST_WIDE_INT call_may_clobber_ref_p_may_alias;
104   unsigned HOST_WIDE_INT call_may_clobber_ref_p_no_alias;
105 } alias_stats;
106 
107 void
dump_alias_stats(FILE * s)108 dump_alias_stats (FILE *s)
109 {
110   fprintf (s, "\nAlias oracle query stats:\n");
111   fprintf (s, "  refs_may_alias_p: "
112 	   HOST_WIDE_INT_PRINT_DEC" disambiguations, "
113 	   HOST_WIDE_INT_PRINT_DEC" queries\n",
114 	   alias_stats.refs_may_alias_p_no_alias,
115 	   alias_stats.refs_may_alias_p_no_alias
116 	   + alias_stats.refs_may_alias_p_may_alias);
117   fprintf (s, "  ref_maybe_used_by_call_p: "
118 	   HOST_WIDE_INT_PRINT_DEC" disambiguations, "
119 	   HOST_WIDE_INT_PRINT_DEC" queries\n",
120 	   alias_stats.ref_maybe_used_by_call_p_no_alias,
121 	   alias_stats.refs_may_alias_p_no_alias
122 	   + alias_stats.ref_maybe_used_by_call_p_may_alias);
123   fprintf (s, "  call_may_clobber_ref_p: "
124 	   HOST_WIDE_INT_PRINT_DEC" disambiguations, "
125 	   HOST_WIDE_INT_PRINT_DEC" queries\n",
126 	   alias_stats.call_may_clobber_ref_p_no_alias,
127 	   alias_stats.call_may_clobber_ref_p_no_alias
128 	   + alias_stats.call_may_clobber_ref_p_may_alias);
129 }
130 
131 
132 /* Return true, if dereferencing PTR may alias with a global variable.  */
133 
134 bool
ptr_deref_may_alias_global_p(tree ptr)135 ptr_deref_may_alias_global_p (tree ptr)
136 {
137   struct ptr_info_def *pi;
138 
139   /* If we end up with a pointer constant here that may point
140      to global memory.  */
141   if (TREE_CODE (ptr) != SSA_NAME)
142     return true;
143 
144   pi = SSA_NAME_PTR_INFO (ptr);
145 
146   /* If we do not have points-to information for this variable,
147      we have to punt.  */
148   if (!pi)
149     return true;
150 
151   /* ???  This does not use TBAA to prune globals ptr may not access.  */
152   return pt_solution_includes_global (&pi->pt);
153 }
154 
155 /* Return true if dereferencing PTR may alias DECL.
156    The caller is responsible for applying TBAA to see if PTR
157    may access DECL at all.  */
158 
159 static bool
ptr_deref_may_alias_decl_p(tree ptr,tree decl)160 ptr_deref_may_alias_decl_p (tree ptr, tree decl)
161 {
162   struct ptr_info_def *pi;
163 
164   /* Conversions are irrelevant for points-to information and
165      data-dependence analysis can feed us those.  */
166   STRIP_NOPS (ptr);
167 
168   /* Anything we do not explicilty handle aliases.  */
169   if ((TREE_CODE (ptr) != SSA_NAME
170        && TREE_CODE (ptr) != ADDR_EXPR
171        && TREE_CODE (ptr) != POINTER_PLUS_EXPR)
172       || !POINTER_TYPE_P (TREE_TYPE (ptr))
173       || (TREE_CODE (decl) != VAR_DECL
174 	  && TREE_CODE (decl) != PARM_DECL
175 	  && TREE_CODE (decl) != RESULT_DECL))
176     return true;
177 
178   /* Disregard pointer offsetting.  */
179   if (TREE_CODE (ptr) == POINTER_PLUS_EXPR)
180     {
181       do
182 	{
183 	  ptr = TREE_OPERAND (ptr, 0);
184 	}
185       while (TREE_CODE (ptr) == POINTER_PLUS_EXPR);
186       return ptr_deref_may_alias_decl_p (ptr, decl);
187     }
188 
189   /* ADDR_EXPR pointers either just offset another pointer or directly
190      specify the pointed-to set.  */
191   if (TREE_CODE (ptr) == ADDR_EXPR)
192     {
193       tree base = get_base_address (TREE_OPERAND (ptr, 0));
194       if (base
195 	  && (TREE_CODE (base) == MEM_REF
196 	      || TREE_CODE (base) == TARGET_MEM_REF))
197 	ptr = TREE_OPERAND (base, 0);
198       else if (base
199 	       && DECL_P (base))
200 	return base == decl;
201       else if (base
202 	       && CONSTANT_CLASS_P (base))
203 	return false;
204       else
205 	return true;
206     }
207 
208   /* Non-aliased variables can not be pointed to.  */
209   if (!may_be_aliased (decl))
210     return false;
211 
212   /* If we do not have useful points-to information for this pointer
213      we cannot disambiguate anything else.  */
214   pi = SSA_NAME_PTR_INFO (ptr);
215   if (!pi)
216     return true;
217 
218   return pt_solution_includes (&pi->pt, decl);
219 }
220 
221 /* Return true if dereferenced PTR1 and PTR2 may alias.
222    The caller is responsible for applying TBAA to see if accesses
223    through PTR1 and PTR2 may conflict at all.  */
224 
225 bool
ptr_derefs_may_alias_p(tree ptr1,tree ptr2)226 ptr_derefs_may_alias_p (tree ptr1, tree ptr2)
227 {
228   struct ptr_info_def *pi1, *pi2;
229 
230   /* Conversions are irrelevant for points-to information and
231      data-dependence analysis can feed us those.  */
232   STRIP_NOPS (ptr1);
233   STRIP_NOPS (ptr2);
234 
235   /* Disregard pointer offsetting.  */
236   if (TREE_CODE (ptr1) == POINTER_PLUS_EXPR)
237     {
238       do
239 	{
240 	  ptr1 = TREE_OPERAND (ptr1, 0);
241 	}
242       while (TREE_CODE (ptr1) == POINTER_PLUS_EXPR);
243       return ptr_derefs_may_alias_p (ptr1, ptr2);
244     }
245   if (TREE_CODE (ptr2) == POINTER_PLUS_EXPR)
246     {
247       do
248 	{
249 	  ptr2 = TREE_OPERAND (ptr2, 0);
250 	}
251       while (TREE_CODE (ptr2) == POINTER_PLUS_EXPR);
252       return ptr_derefs_may_alias_p (ptr1, ptr2);
253     }
254 
255   /* ADDR_EXPR pointers either just offset another pointer or directly
256      specify the pointed-to set.  */
257   if (TREE_CODE (ptr1) == ADDR_EXPR)
258     {
259       tree base = get_base_address (TREE_OPERAND (ptr1, 0));
260       if (base
261 	  && (TREE_CODE (base) == MEM_REF
262 	      || TREE_CODE (base) == TARGET_MEM_REF))
263 	return ptr_derefs_may_alias_p (TREE_OPERAND (base, 0), ptr2);
264       else if (base
265 	       && DECL_P (base))
266 	return ptr_deref_may_alias_decl_p (ptr2, base);
267       else
268 	return true;
269     }
270   if (TREE_CODE (ptr2) == ADDR_EXPR)
271     {
272       tree base = get_base_address (TREE_OPERAND (ptr2, 0));
273       if (base
274 	  && (TREE_CODE (base) == MEM_REF
275 	      || TREE_CODE (base) == TARGET_MEM_REF))
276 	return ptr_derefs_may_alias_p (ptr1, TREE_OPERAND (base, 0));
277       else if (base
278 	       && DECL_P (base))
279 	return ptr_deref_may_alias_decl_p (ptr1, base);
280       else
281 	return true;
282     }
283 
284   /* From here we require SSA name pointers.  Anything else aliases.  */
285   if (TREE_CODE (ptr1) != SSA_NAME
286       || TREE_CODE (ptr2) != SSA_NAME
287       || !POINTER_TYPE_P (TREE_TYPE (ptr1))
288       || !POINTER_TYPE_P (TREE_TYPE (ptr2)))
289     return true;
290 
291   /* We may end up with two empty points-to solutions for two same pointers.
292      In this case we still want to say both pointers alias, so shortcut
293      that here.  */
294   if (ptr1 == ptr2)
295     return true;
296 
297   /* If we do not have useful points-to information for either pointer
298      we cannot disambiguate anything else.  */
299   pi1 = SSA_NAME_PTR_INFO (ptr1);
300   pi2 = SSA_NAME_PTR_INFO (ptr2);
301   if (!pi1 || !pi2)
302     return true;
303 
304   /* ???  This does not use TBAA to prune decls from the intersection
305      that not both pointers may access.  */
306   return pt_solutions_intersect (&pi1->pt, &pi2->pt);
307 }
308 
309 /* Return true if dereferencing PTR may alias *REF.
310    The caller is responsible for applying TBAA to see if PTR
311    may access *REF at all.  */
312 
313 static bool
ptr_deref_may_alias_ref_p_1(tree ptr,ao_ref * ref)314 ptr_deref_may_alias_ref_p_1 (tree ptr, ao_ref *ref)
315 {
316   tree base = ao_ref_base (ref);
317 
318   if (TREE_CODE (base) == MEM_REF
319       || TREE_CODE (base) == TARGET_MEM_REF)
320     return ptr_derefs_may_alias_p (ptr, TREE_OPERAND (base, 0));
321   else if (DECL_P (base))
322     return ptr_deref_may_alias_decl_p (ptr, base);
323 
324   return true;
325 }
326 
327 /* Return true whether REF may refer to global memory.  */
328 
329 bool
ref_may_alias_global_p(tree ref)330 ref_may_alias_global_p (tree ref)
331 {
332   tree base = get_base_address (ref);
333   if (DECL_P (base))
334     return is_global_var (base);
335   else if (TREE_CODE (base) == MEM_REF
336 	   || TREE_CODE (base) == TARGET_MEM_REF)
337     return ptr_deref_may_alias_global_p (TREE_OPERAND (base, 0));
338   return true;
339 }
340 
341 /* Return true whether STMT may clobber global memory.  */
342 
343 bool
stmt_may_clobber_global_p(gimple stmt)344 stmt_may_clobber_global_p (gimple stmt)
345 {
346   tree lhs;
347 
348   if (!gimple_vdef (stmt))
349     return false;
350 
351   /* ???  We can ask the oracle whether an artificial pointer
352      dereference with a pointer with points-to information covering
353      all global memory (what about non-address taken memory?) maybe
354      clobbered by this call.  As there is at the moment no convenient
355      way of doing that without generating garbage do some manual
356      checking instead.
357      ???  We could make a NULL ao_ref argument to the various
358      predicates special, meaning any global memory.  */
359 
360   switch (gimple_code (stmt))
361     {
362     case GIMPLE_ASSIGN:
363       lhs = gimple_assign_lhs (stmt);
364       return (TREE_CODE (lhs) != SSA_NAME
365 	      && ref_may_alias_global_p (lhs));
366     case GIMPLE_CALL:
367       return true;
368     default:
369       return true;
370     }
371 }
372 
373 
374 /* Dump alias information on FILE.  */
375 
376 void
dump_alias_info(FILE * file)377 dump_alias_info (FILE *file)
378 {
379   unsigned i;
380   const char *funcname
381     = lang_hooks.decl_printable_name (current_function_decl, 2);
382   tree var;
383 
384   fprintf (file, "\n\nAlias information for %s\n\n", funcname);
385 
386   fprintf (file, "Aliased symbols\n\n");
387 
388   FOR_EACH_LOCAL_DECL (cfun, i, var)
389     {
390       if (may_be_aliased (var))
391 	dump_variable (file, var);
392     }
393 
394   fprintf (file, "\nCall clobber information\n");
395 
396   fprintf (file, "\nESCAPED");
397   dump_points_to_solution (file, &cfun->gimple_df->escaped);
398 
399   fprintf (file, "\n\nFlow-insensitive points-to information\n\n");
400 
401   for (i = 1; i < num_ssa_names; i++)
402     {
403       tree ptr = ssa_name (i);
404       struct ptr_info_def *pi;
405 
406       if (ptr == NULL_TREE
407 	  || SSA_NAME_IN_FREE_LIST (ptr))
408 	continue;
409 
410       pi = SSA_NAME_PTR_INFO (ptr);
411       if (pi)
412 	dump_points_to_info_for (file, ptr);
413     }
414 
415   fprintf (file, "\n");
416 }
417 
418 
419 /* Dump alias information on stderr.  */
420 
421 DEBUG_FUNCTION void
debug_alias_info(void)422 debug_alias_info (void)
423 {
424   dump_alias_info (stderr);
425 }
426 
427 
428 /* Dump the points-to set *PT into FILE.  */
429 
430 void
dump_points_to_solution(FILE * file,struct pt_solution * pt)431 dump_points_to_solution (FILE *file, struct pt_solution *pt)
432 {
433   if (pt->anything)
434     fprintf (file, ", points-to anything");
435 
436   if (pt->nonlocal)
437     fprintf (file, ", points-to non-local");
438 
439   if (pt->escaped)
440     fprintf (file, ", points-to escaped");
441 
442   if (pt->ipa_escaped)
443     fprintf (file, ", points-to unit escaped");
444 
445   if (pt->null)
446     fprintf (file, ", points-to NULL");
447 
448   if (pt->vars)
449     {
450       fprintf (file, ", points-to vars: ");
451       dump_decl_set (file, pt->vars);
452       if (pt->vars_contains_global)
453 	fprintf (file, " (includes global vars)");
454     }
455 }
456 
457 /* Dump points-to information for SSA_NAME PTR into FILE.  */
458 
459 void
dump_points_to_info_for(FILE * file,tree ptr)460 dump_points_to_info_for (FILE *file, tree ptr)
461 {
462   struct ptr_info_def *pi = SSA_NAME_PTR_INFO (ptr);
463 
464   print_generic_expr (file, ptr, dump_flags);
465 
466   if (pi)
467     dump_points_to_solution (file, &pi->pt);
468   else
469     fprintf (file, ", points-to anything");
470 
471   fprintf (file, "\n");
472 }
473 
474 
475 /* Dump points-to information for VAR into stderr.  */
476 
477 DEBUG_FUNCTION void
debug_points_to_info_for(tree var)478 debug_points_to_info_for (tree var)
479 {
480   dump_points_to_info_for (stderr, var);
481 }
482 
483 
484 /* Initializes the alias-oracle reference representation *R from REF.  */
485 
486 void
ao_ref_init(ao_ref * r,tree ref)487 ao_ref_init (ao_ref *r, tree ref)
488 {
489   r->ref = ref;
490   r->base = NULL_TREE;
491   r->offset = 0;
492   r->size = -1;
493   r->max_size = -1;
494   r->ref_alias_set = -1;
495   r->base_alias_set = -1;
496   r->volatile_p = ref ? TREE_THIS_VOLATILE (ref) : false;
497 }
498 
499 /* Returns the base object of the memory reference *REF.  */
500 
501 tree
ao_ref_base(ao_ref * ref)502 ao_ref_base (ao_ref *ref)
503 {
504   if (ref->base)
505     return ref->base;
506   ref->base = get_ref_base_and_extent (ref->ref, &ref->offset, &ref->size,
507 				       &ref->max_size);
508   return ref->base;
509 }
510 
511 /* Returns the base object alias set of the memory reference *REF.  */
512 
513 static alias_set_type
ao_ref_base_alias_set(ao_ref * ref)514 ao_ref_base_alias_set (ao_ref *ref)
515 {
516   tree base_ref;
517   if (ref->base_alias_set != -1)
518     return ref->base_alias_set;
519   if (!ref->ref)
520     return 0;
521   base_ref = ref->ref;
522   while (handled_component_p (base_ref))
523     base_ref = TREE_OPERAND (base_ref, 0);
524   ref->base_alias_set = get_alias_set (base_ref);
525   return ref->base_alias_set;
526 }
527 
528 /* Returns the reference alias set of the memory reference *REF.  */
529 
530 alias_set_type
ao_ref_alias_set(ao_ref * ref)531 ao_ref_alias_set (ao_ref *ref)
532 {
533   if (ref->ref_alias_set != -1)
534     return ref->ref_alias_set;
535   ref->ref_alias_set = get_alias_set (ref->ref);
536   return ref->ref_alias_set;
537 }
538 
539 /* Init an alias-oracle reference representation from a gimple pointer
540    PTR and a gimple size SIZE in bytes.  If SIZE is NULL_TREE the the
541    size is assumed to be unknown.  The access is assumed to be only
542    to or after of the pointer target, not before it.  */
543 
544 void
ao_ref_init_from_ptr_and_size(ao_ref * ref,tree ptr,tree size)545 ao_ref_init_from_ptr_and_size (ao_ref *ref, tree ptr, tree size)
546 {
547   HOST_WIDE_INT t1, t2;
548   ref->ref = NULL_TREE;
549   if (TREE_CODE (ptr) == ADDR_EXPR)
550     ref->base = get_ref_base_and_extent (TREE_OPERAND (ptr, 0),
551 					 &ref->offset, &t1, &t2);
552   else
553     {
554       ref->base = build2 (MEM_REF, char_type_node,
555 			  ptr, null_pointer_node);
556       ref->offset = 0;
557     }
558   if (size
559       && host_integerp (size, 0)
560       && TREE_INT_CST_LOW (size) * 8 / 8 == TREE_INT_CST_LOW (size))
561     ref->max_size = ref->size = TREE_INT_CST_LOW (size) * 8;
562   else
563     ref->max_size = ref->size = -1;
564   ref->ref_alias_set = 0;
565   ref->base_alias_set = 0;
566   ref->volatile_p = false;
567 }
568 
569 /* Return 1 if TYPE1 and TYPE2 are to be considered equivalent for the
570    purpose of TBAA.  Return 0 if they are distinct and -1 if we cannot
571    decide.  */
572 
573 static inline int
same_type_for_tbaa(tree type1,tree type2)574 same_type_for_tbaa (tree type1, tree type2)
575 {
576   type1 = TYPE_MAIN_VARIANT (type1);
577   type2 = TYPE_MAIN_VARIANT (type2);
578 
579   /* If we would have to do structural comparison bail out.  */
580   if (TYPE_STRUCTURAL_EQUALITY_P (type1)
581       || TYPE_STRUCTURAL_EQUALITY_P (type2))
582     return -1;
583 
584   /* Compare the canonical types.  */
585   if (TYPE_CANONICAL (type1) == TYPE_CANONICAL (type2))
586     return 1;
587 
588   /* ??? Array types are not properly unified in all cases as we have
589      spurious changes in the index types for example.  Removing this
590      causes all sorts of problems with the Fortran frontend.  */
591   if (TREE_CODE (type1) == ARRAY_TYPE
592       && TREE_CODE (type2) == ARRAY_TYPE)
593     return -1;
594 
595   /* ??? In Ada, an lvalue of an unconstrained type can be used to access an
596      object of one of its constrained subtypes, e.g. when a function with an
597      unconstrained parameter passed by reference is called on an object and
598      inlined.  But, even in the case of a fixed size, type and subtypes are
599      not equivalent enough as to share the same TYPE_CANONICAL, since this
600      would mean that conversions between them are useless, whereas they are
601      not (e.g. type and subtypes can have different modes).  So, in the end,
602      they are only guaranteed to have the same alias set.  */
603   if (get_alias_set (type1) == get_alias_set (type2))
604     return -1;
605 
606   /* The types are known to be not equal.  */
607   return 0;
608 }
609 
610 /* Determine if the two component references REF1 and REF2 which are
611    based on access types TYPE1 and TYPE2 and of which at least one is based
612    on an indirect reference may alias.  REF2 is the only one that can
613    be a decl in which case REF2_IS_DECL is true.
614    REF1_ALIAS_SET, BASE1_ALIAS_SET, REF2_ALIAS_SET and BASE2_ALIAS_SET
615    are the respective alias sets.  */
616 
617 static bool
aliasing_component_refs_p(tree ref1,alias_set_type ref1_alias_set,alias_set_type base1_alias_set,HOST_WIDE_INT offset1,HOST_WIDE_INT max_size1,tree ref2,alias_set_type ref2_alias_set,alias_set_type base2_alias_set,HOST_WIDE_INT offset2,HOST_WIDE_INT max_size2,bool ref2_is_decl)618 aliasing_component_refs_p (tree ref1,
619 			   alias_set_type ref1_alias_set,
620 			   alias_set_type base1_alias_set,
621 			   HOST_WIDE_INT offset1, HOST_WIDE_INT max_size1,
622 			   tree ref2,
623 			   alias_set_type ref2_alias_set,
624 			   alias_set_type base2_alias_set,
625 			   HOST_WIDE_INT offset2, HOST_WIDE_INT max_size2,
626 			   bool ref2_is_decl)
627 {
628   /* If one reference is a component references through pointers try to find a
629      common base and apply offset based disambiguation.  This handles
630      for example
631        struct A { int i; int j; } *q;
632        struct B { struct A a; int k; } *p;
633      disambiguating q->i and p->a.j.  */
634   tree base1, base2;
635   tree type1, type2;
636   tree *refp;
637   int same_p;
638 
639   /* Choose bases and base types to search for.  */
640   base1 = ref1;
641   while (handled_component_p (base1))
642     base1 = TREE_OPERAND (base1, 0);
643   type1 = TREE_TYPE (base1);
644   base2 = ref2;
645   while (handled_component_p (base2))
646     base2 = TREE_OPERAND (base2, 0);
647   type2 = TREE_TYPE (base2);
648 
649   /* Now search for the type1 in the access path of ref2.  This
650      would be a common base for doing offset based disambiguation on.  */
651   refp = &ref2;
652   while (handled_component_p (*refp)
653 	 && same_type_for_tbaa (TREE_TYPE (*refp), type1) == 0)
654     refp = &TREE_OPERAND (*refp, 0);
655   same_p = same_type_for_tbaa (TREE_TYPE (*refp), type1);
656   /* If we couldn't compare types we have to bail out.  */
657   if (same_p == -1)
658     return true;
659   else if (same_p == 1)
660     {
661       HOST_WIDE_INT offadj, sztmp, msztmp;
662       get_ref_base_and_extent (*refp, &offadj, &sztmp, &msztmp);
663       offset2 -= offadj;
664       get_ref_base_and_extent (base1, &offadj, &sztmp, &msztmp);
665       offset1 -= offadj;
666       return ranges_overlap_p (offset1, max_size1, offset2, max_size2);
667     }
668   /* If we didn't find a common base, try the other way around.  */
669   refp = &ref1;
670   while (handled_component_p (*refp)
671 	 && same_type_for_tbaa (TREE_TYPE (*refp), type2) == 0)
672     refp = &TREE_OPERAND (*refp, 0);
673   same_p = same_type_for_tbaa (TREE_TYPE (*refp), type2);
674   /* If we couldn't compare types we have to bail out.  */
675   if (same_p == -1)
676     return true;
677   else if (same_p == 1)
678     {
679       HOST_WIDE_INT offadj, sztmp, msztmp;
680       get_ref_base_and_extent (*refp, &offadj, &sztmp, &msztmp);
681       offset1 -= offadj;
682       get_ref_base_and_extent (base2, &offadj, &sztmp, &msztmp);
683       offset2 -= offadj;
684       return ranges_overlap_p (offset1, max_size1, offset2, max_size2);
685     }
686 
687   /* If we have two type access paths B1.path1 and B2.path2 they may
688      only alias if either B1 is in B2.path2 or B2 is in B1.path1.
689      But we can still have a path that goes B1.path1...B2.path2 with
690      a part that we do not see.  So we can only disambiguate now
691      if there is no B2 in the tail of path1 and no B1 on the
692      tail of path2.  */
693   if (base1_alias_set == ref2_alias_set
694       || alias_set_subset_of (base1_alias_set, ref2_alias_set))
695     return true;
696   /* If this is ptr vs. decl then we know there is no ptr ... decl path.  */
697   if (!ref2_is_decl)
698     return (base2_alias_set == ref1_alias_set
699 	    || alias_set_subset_of (base2_alias_set, ref1_alias_set));
700   return false;
701 }
702 
703 /* Return true if two memory references based on the variables BASE1
704    and BASE2 constrained to [OFFSET1, OFFSET1 + MAX_SIZE1) and
705    [OFFSET2, OFFSET2 + MAX_SIZE2) may alias.  */
706 
707 static bool
decl_refs_may_alias_p(tree base1,HOST_WIDE_INT offset1,HOST_WIDE_INT max_size1,tree base2,HOST_WIDE_INT offset2,HOST_WIDE_INT max_size2)708 decl_refs_may_alias_p (tree base1,
709 		       HOST_WIDE_INT offset1, HOST_WIDE_INT max_size1,
710 		       tree base2,
711 		       HOST_WIDE_INT offset2, HOST_WIDE_INT max_size2)
712 {
713   gcc_checking_assert (DECL_P (base1) && DECL_P (base2));
714 
715   /* If both references are based on different variables, they cannot alias.  */
716   if (base1 != base2)
717     return false;
718 
719   /* If both references are based on the same variable, they cannot alias if
720      the accesses do not overlap.  */
721   return ranges_overlap_p (offset1, max_size1, offset2, max_size2);
722 }
723 
724 /* Return true if an indirect reference based on *PTR1 constrained
725    to [OFFSET1, OFFSET1 + MAX_SIZE1) may alias a variable based on BASE2
726    constrained to [OFFSET2, OFFSET2 + MAX_SIZE2).  *PTR1 and BASE2 have
727    the alias sets BASE1_ALIAS_SET and BASE2_ALIAS_SET which can be -1
728    in which case they are computed on-demand.  REF1 and REF2
729    if non-NULL are the complete memory reference trees.  */
730 
731 static bool
indirect_ref_may_alias_decl_p(tree ref1 ATTRIBUTE_UNUSED,tree base1,HOST_WIDE_INT offset1,HOST_WIDE_INT max_size1 ATTRIBUTE_UNUSED,alias_set_type ref1_alias_set,alias_set_type base1_alias_set,tree ref2 ATTRIBUTE_UNUSED,tree base2,HOST_WIDE_INT offset2,HOST_WIDE_INT max_size2,alias_set_type ref2_alias_set,alias_set_type base2_alias_set,bool tbaa_p)732 indirect_ref_may_alias_decl_p (tree ref1 ATTRIBUTE_UNUSED, tree base1,
733 			       HOST_WIDE_INT offset1,
734 			       HOST_WIDE_INT max_size1 ATTRIBUTE_UNUSED,
735 			       alias_set_type ref1_alias_set,
736 			       alias_set_type base1_alias_set,
737 			       tree ref2 ATTRIBUTE_UNUSED, tree base2,
738 			       HOST_WIDE_INT offset2, HOST_WIDE_INT max_size2,
739 			       alias_set_type ref2_alias_set,
740 			       alias_set_type base2_alias_set, bool tbaa_p)
741 {
742   tree ptr1;
743   tree ptrtype1, dbase2;
744   HOST_WIDE_INT offset1p = offset1, offset2p = offset2;
745   HOST_WIDE_INT doffset1, doffset2;
746   double_int moff;
747 
748   gcc_checking_assert ((TREE_CODE (base1) == MEM_REF
749 			|| TREE_CODE (base1) == TARGET_MEM_REF)
750 		       && DECL_P (base2));
751 
752   ptr1 = TREE_OPERAND (base1, 0);
753 
754   /* The offset embedded in MEM_REFs can be negative.  Bias them
755      so that the resulting offset adjustment is positive.  */
756   moff = mem_ref_offset (base1);
757   moff = moff.alshift (BITS_PER_UNIT == 8
758 		       ? 3 : exact_log2 (BITS_PER_UNIT),
759 		       HOST_BITS_PER_DOUBLE_INT);
760   if (moff.is_negative ())
761     offset2p += (-moff).low;
762   else
763     offset1p += moff.low;
764 
765   /* If only one reference is based on a variable, they cannot alias if
766      the pointer access is beyond the extent of the variable access.
767      (the pointer base cannot validly point to an offset less than zero
768      of the variable).
769      ???  IVOPTs creates bases that do not honor this restriction,
770      so do not apply this optimization for TARGET_MEM_REFs.  */
771   if (TREE_CODE (base1) != TARGET_MEM_REF
772       && !ranges_overlap_p (MAX (0, offset1p), -1, offset2p, max_size2))
773     return false;
774   /* They also cannot alias if the pointer may not point to the decl.  */
775   if (!ptr_deref_may_alias_decl_p (ptr1, base2))
776     return false;
777 
778   /* Disambiguations that rely on strict aliasing rules follow.  */
779   if (!flag_strict_aliasing || !tbaa_p)
780     return true;
781 
782   ptrtype1 = TREE_TYPE (TREE_OPERAND (base1, 1));
783 
784   /* If the alias set for a pointer access is zero all bets are off.  */
785   if (base1_alias_set == -1)
786     base1_alias_set = get_deref_alias_set (ptrtype1);
787   if (base1_alias_set == 0)
788     return true;
789   if (base2_alias_set == -1)
790     base2_alias_set = get_alias_set (base2);
791 
792   /* When we are trying to disambiguate an access with a pointer dereference
793      as base versus one with a decl as base we can use both the size
794      of the decl and its dynamic type for extra disambiguation.
795      ???  We do not know anything about the dynamic type of the decl
796      other than that its alias-set contains base2_alias_set as a subset
797      which does not help us here.  */
798   /* As we know nothing useful about the dynamic type of the decl just
799      use the usual conflict check rather than a subset test.
800      ???  We could introduce -fvery-strict-aliasing when the language
801      does not allow decls to have a dynamic type that differs from their
802      static type.  Then we can check
803      !alias_set_subset_of (base1_alias_set, base2_alias_set) instead.  */
804   if (base1_alias_set != base2_alias_set
805       && !alias_sets_conflict_p (base1_alias_set, base2_alias_set))
806     return false;
807   /* If the size of the access relevant for TBAA through the pointer
808      is bigger than the size of the decl we can't possibly access the
809      decl via that pointer.  */
810   if (DECL_SIZE (base2) && COMPLETE_TYPE_P (TREE_TYPE (ptrtype1))
811       && TREE_CODE (DECL_SIZE (base2)) == INTEGER_CST
812       && TREE_CODE (TYPE_SIZE (TREE_TYPE (ptrtype1))) == INTEGER_CST
813       /* ???  This in turn may run afoul when a decl of type T which is
814 	 a member of union type U is accessed through a pointer to
815 	 type U and sizeof T is smaller than sizeof U.  */
816       && TREE_CODE (TREE_TYPE (ptrtype1)) != UNION_TYPE
817       && TREE_CODE (TREE_TYPE (ptrtype1)) != QUAL_UNION_TYPE
818       && tree_int_cst_lt (DECL_SIZE (base2), TYPE_SIZE (TREE_TYPE (ptrtype1))))
819     return false;
820 
821   if (!ref2)
822     return true;
823 
824   /* If the decl is accessed via a MEM_REF, reconstruct the base
825      we can use for TBAA and an appropriately adjusted offset.  */
826   dbase2 = ref2;
827   while (handled_component_p (dbase2))
828     dbase2 = TREE_OPERAND (dbase2, 0);
829   doffset1 = offset1;
830   doffset2 = offset2;
831   if (TREE_CODE (dbase2) == MEM_REF
832       || TREE_CODE (dbase2) == TARGET_MEM_REF)
833     {
834       double_int moff = mem_ref_offset (dbase2);
835       moff = moff.alshift (BITS_PER_UNIT == 8
836 			   ? 3 : exact_log2 (BITS_PER_UNIT),
837 			   HOST_BITS_PER_DOUBLE_INT);
838       if (moff.is_negative ())
839 	doffset1 -= (-moff).low;
840       else
841 	doffset2 -= moff.low;
842     }
843 
844   /* If either reference is view-converted, give up now.  */
845   if (same_type_for_tbaa (TREE_TYPE (base1), TREE_TYPE (ptrtype1)) != 1
846       || same_type_for_tbaa (TREE_TYPE (dbase2), TREE_TYPE (base2)) != 1)
847     return true;
848 
849   /* If both references are through the same type, they do not alias
850      if the accesses do not overlap.  This does extra disambiguation
851      for mixed/pointer accesses but requires strict aliasing.
852      For MEM_REFs we require that the component-ref offset we computed
853      is relative to the start of the type which we ensure by
854      comparing rvalue and access type and disregarding the constant
855      pointer offset.  */
856   if ((TREE_CODE (base1) != TARGET_MEM_REF
857        || (!TMR_INDEX (base1) && !TMR_INDEX2 (base1)))
858       && same_type_for_tbaa (TREE_TYPE (base1), TREE_TYPE (dbase2)) == 1)
859     return ranges_overlap_p (doffset1, max_size1, doffset2, max_size2);
860 
861   /* Do access-path based disambiguation.  */
862   if (ref1 && ref2
863       && (handled_component_p (ref1) || handled_component_p (ref2)))
864     return aliasing_component_refs_p (ref1,
865 				      ref1_alias_set, base1_alias_set,
866 				      offset1, max_size1,
867 				      ref2,
868 				      ref2_alias_set, base2_alias_set,
869 				      offset2, max_size2, true);
870 
871   return true;
872 }
873 
874 /* Return true if two indirect references based on *PTR1
875    and *PTR2 constrained to [OFFSET1, OFFSET1 + MAX_SIZE1) and
876    [OFFSET2, OFFSET2 + MAX_SIZE2) may alias.  *PTR1 and *PTR2 have
877    the alias sets BASE1_ALIAS_SET and BASE2_ALIAS_SET which can be -1
878    in which case they are computed on-demand.  REF1 and REF2
879    if non-NULL are the complete memory reference trees. */
880 
881 static bool
indirect_refs_may_alias_p(tree ref1 ATTRIBUTE_UNUSED,tree base1,HOST_WIDE_INT offset1,HOST_WIDE_INT max_size1,alias_set_type ref1_alias_set,alias_set_type base1_alias_set,tree ref2 ATTRIBUTE_UNUSED,tree base2,HOST_WIDE_INT offset2,HOST_WIDE_INT max_size2,alias_set_type ref2_alias_set,alias_set_type base2_alias_set,bool tbaa_p)882 indirect_refs_may_alias_p (tree ref1 ATTRIBUTE_UNUSED, tree base1,
883 			   HOST_WIDE_INT offset1, HOST_WIDE_INT max_size1,
884 			   alias_set_type ref1_alias_set,
885 			   alias_set_type base1_alias_set,
886 			   tree ref2 ATTRIBUTE_UNUSED, tree base2,
887 			   HOST_WIDE_INT offset2, HOST_WIDE_INT max_size2,
888 			   alias_set_type ref2_alias_set,
889 			   alias_set_type base2_alias_set, bool tbaa_p)
890 {
891   tree ptr1;
892   tree ptr2;
893   tree ptrtype1, ptrtype2;
894 
895   gcc_checking_assert ((TREE_CODE (base1) == MEM_REF
896 			|| TREE_CODE (base1) == TARGET_MEM_REF)
897 		       && (TREE_CODE (base2) == MEM_REF
898 			   || TREE_CODE (base2) == TARGET_MEM_REF));
899 
900   ptr1 = TREE_OPERAND (base1, 0);
901   ptr2 = TREE_OPERAND (base2, 0);
902 
903   /* If both bases are based on pointers they cannot alias if they may not
904      point to the same memory object or if they point to the same object
905      and the accesses do not overlap.  */
906   if ((!cfun || gimple_in_ssa_p (cfun))
907       && operand_equal_p (ptr1, ptr2, 0)
908       && (((TREE_CODE (base1) != TARGET_MEM_REF
909 	    || (!TMR_INDEX (base1) && !TMR_INDEX2 (base1)))
910 	   && (TREE_CODE (base2) != TARGET_MEM_REF
911 	       || (!TMR_INDEX (base2) && !TMR_INDEX2 (base2))))
912 	  || (TREE_CODE (base1) == TARGET_MEM_REF
913 	      && TREE_CODE (base2) == TARGET_MEM_REF
914 	      && (TMR_STEP (base1) == TMR_STEP (base2)
915 		  || (TMR_STEP (base1) && TMR_STEP (base2)
916 		      && operand_equal_p (TMR_STEP (base1),
917 					  TMR_STEP (base2), 0)))
918 	      && (TMR_INDEX (base1) == TMR_INDEX (base2)
919 		  || (TMR_INDEX (base1) && TMR_INDEX (base2)
920 		      && operand_equal_p (TMR_INDEX (base1),
921 					  TMR_INDEX (base2), 0)))
922 	      && (TMR_INDEX2 (base1) == TMR_INDEX2 (base2)
923 		  || (TMR_INDEX2 (base1) && TMR_INDEX2 (base2)
924 		      && operand_equal_p (TMR_INDEX2 (base1),
925 					  TMR_INDEX2 (base2), 0))))))
926     {
927       double_int moff;
928       /* The offset embedded in MEM_REFs can be negative.  Bias them
929 	 so that the resulting offset adjustment is positive.  */
930       moff = mem_ref_offset (base1);
931       moff = moff.alshift (BITS_PER_UNIT == 8
932 			   ? 3 : exact_log2 (BITS_PER_UNIT),
933 			   HOST_BITS_PER_DOUBLE_INT);
934       if (moff.is_negative ())
935 	offset2 += (-moff).low;
936       else
937 	offset1 += moff.low;
938       moff = mem_ref_offset (base2);
939       moff = moff.alshift (BITS_PER_UNIT == 8
940 			   ? 3 : exact_log2 (BITS_PER_UNIT),
941 			   HOST_BITS_PER_DOUBLE_INT);
942       if (moff.is_negative ())
943 	offset1 += (-moff).low;
944       else
945 	offset2 += moff.low;
946       return ranges_overlap_p (offset1, max_size1, offset2, max_size2);
947     }
948   if (!ptr_derefs_may_alias_p (ptr1, ptr2))
949     return false;
950 
951   /* Disambiguations that rely on strict aliasing rules follow.  */
952   if (!flag_strict_aliasing || !tbaa_p)
953     return true;
954 
955   ptrtype1 = TREE_TYPE (TREE_OPERAND (base1, 1));
956   ptrtype2 = TREE_TYPE (TREE_OPERAND (base2, 1));
957 
958   /* If the alias set for a pointer access is zero all bets are off.  */
959   if (base1_alias_set == -1)
960     base1_alias_set = get_deref_alias_set (ptrtype1);
961   if (base1_alias_set == 0)
962     return true;
963   if (base2_alias_set == -1)
964     base2_alias_set = get_deref_alias_set (ptrtype2);
965   if (base2_alias_set == 0)
966     return true;
967 
968   /* If both references are through the same type, they do not alias
969      if the accesses do not overlap.  This does extra disambiguation
970      for mixed/pointer accesses but requires strict aliasing.  */
971   if ((TREE_CODE (base1) != TARGET_MEM_REF
972        || (!TMR_INDEX (base1) && !TMR_INDEX2 (base1)))
973       && (TREE_CODE (base2) != TARGET_MEM_REF
974 	  || (!TMR_INDEX (base2) && !TMR_INDEX2 (base2)))
975       && same_type_for_tbaa (TREE_TYPE (base1), TREE_TYPE (ptrtype1)) == 1
976       && same_type_for_tbaa (TREE_TYPE (base2), TREE_TYPE (ptrtype2)) == 1
977       && same_type_for_tbaa (TREE_TYPE (ptrtype1),
978 			     TREE_TYPE (ptrtype2)) == 1)
979     return ranges_overlap_p (offset1, max_size1, offset2, max_size2);
980 
981   /* Do type-based disambiguation.  */
982   if (base1_alias_set != base2_alias_set
983       && !alias_sets_conflict_p (base1_alias_set, base2_alias_set))
984     return false;
985 
986   /* Do access-path based disambiguation.  */
987   if (ref1 && ref2
988       && (handled_component_p (ref1) || handled_component_p (ref2))
989       && same_type_for_tbaa (TREE_TYPE (base1), TREE_TYPE (ptrtype1)) == 1
990       && same_type_for_tbaa (TREE_TYPE (base2), TREE_TYPE (ptrtype2)) == 1)
991     return aliasing_component_refs_p (ref1,
992 				      ref1_alias_set, base1_alias_set,
993 				      offset1, max_size1,
994 				      ref2,
995 				      ref2_alias_set, base2_alias_set,
996 				      offset2, max_size2, false);
997 
998   return true;
999 }
1000 
1001 /* Return true, if the two memory references REF1 and REF2 may alias.  */
1002 
1003 bool
refs_may_alias_p_1(ao_ref * ref1,ao_ref * ref2,bool tbaa_p)1004 refs_may_alias_p_1 (ao_ref *ref1, ao_ref *ref2, bool tbaa_p)
1005 {
1006   tree base1, base2;
1007   HOST_WIDE_INT offset1 = 0, offset2 = 0;
1008   HOST_WIDE_INT max_size1 = -1, max_size2 = -1;
1009   bool var1_p, var2_p, ind1_p, ind2_p;
1010 
1011   gcc_checking_assert ((!ref1->ref
1012 			|| TREE_CODE (ref1->ref) == SSA_NAME
1013 			|| DECL_P (ref1->ref)
1014 			|| TREE_CODE (ref1->ref) == STRING_CST
1015 			|| handled_component_p (ref1->ref)
1016 			|| TREE_CODE (ref1->ref) == MEM_REF
1017 			|| TREE_CODE (ref1->ref) == TARGET_MEM_REF)
1018 		       && (!ref2->ref
1019 			   || TREE_CODE (ref2->ref) == SSA_NAME
1020 			   || DECL_P (ref2->ref)
1021 			   || TREE_CODE (ref2->ref) == STRING_CST
1022 			   || handled_component_p (ref2->ref)
1023 			   || TREE_CODE (ref2->ref) == MEM_REF
1024 			   || TREE_CODE (ref2->ref) == TARGET_MEM_REF));
1025 
1026   /* Decompose the references into their base objects and the access.  */
1027   base1 = ao_ref_base (ref1);
1028   offset1 = ref1->offset;
1029   max_size1 = ref1->max_size;
1030   base2 = ao_ref_base (ref2);
1031   offset2 = ref2->offset;
1032   max_size2 = ref2->max_size;
1033 
1034   /* We can end up with registers or constants as bases for example from
1035      *D.1663_44 = VIEW_CONVERT_EXPR<struct DB_LSN>(__tmp$B0F64_59);
1036      which is seen as a struct copy.  */
1037   if (TREE_CODE (base1) == SSA_NAME
1038       || TREE_CODE (base1) == CONST_DECL
1039       || TREE_CODE (base1) == CONSTRUCTOR
1040       || TREE_CODE (base1) == ADDR_EXPR
1041       || CONSTANT_CLASS_P (base1)
1042       || TREE_CODE (base2) == SSA_NAME
1043       || TREE_CODE (base2) == CONST_DECL
1044       || TREE_CODE (base2) == CONSTRUCTOR
1045       || TREE_CODE (base2) == ADDR_EXPR
1046       || CONSTANT_CLASS_P (base2))
1047     return false;
1048 
1049   /* We can end up referring to code via function and label decls.
1050      As we likely do not properly track code aliases conservatively
1051      bail out.  */
1052   if (TREE_CODE (base1) == FUNCTION_DECL
1053       || TREE_CODE (base1) == LABEL_DECL
1054       || TREE_CODE (base2) == FUNCTION_DECL
1055       || TREE_CODE (base2) == LABEL_DECL)
1056     return true;
1057 
1058   /* Two volatile accesses always conflict.  */
1059   if (ref1->volatile_p
1060       && ref2->volatile_p)
1061     return true;
1062 
1063   /* Defer to simple offset based disambiguation if we have
1064      references based on two decls.  Do this before defering to
1065      TBAA to handle must-alias cases in conformance with the
1066      GCC extension of allowing type-punning through unions.  */
1067   var1_p = DECL_P (base1);
1068   var2_p = DECL_P (base2);
1069   if (var1_p && var2_p)
1070     return decl_refs_may_alias_p (base1, offset1, max_size1,
1071 				  base2, offset2, max_size2);
1072 
1073   ind1_p = (TREE_CODE (base1) == MEM_REF
1074 	    || TREE_CODE (base1) == TARGET_MEM_REF);
1075   ind2_p = (TREE_CODE (base2) == MEM_REF
1076 	    || TREE_CODE (base2) == TARGET_MEM_REF);
1077 
1078   /* Canonicalize the pointer-vs-decl case.  */
1079   if (ind1_p && var2_p)
1080     {
1081       HOST_WIDE_INT tmp1;
1082       tree tmp2;
1083       ao_ref *tmp3;
1084       tmp1 = offset1; offset1 = offset2; offset2 = tmp1;
1085       tmp1 = max_size1; max_size1 = max_size2; max_size2 = tmp1;
1086       tmp2 = base1; base1 = base2; base2 = tmp2;
1087       tmp3 = ref1; ref1 = ref2; ref2 = tmp3;
1088       var1_p = true;
1089       ind1_p = false;
1090       var2_p = false;
1091       ind2_p = true;
1092     }
1093 
1094   /* First defer to TBAA if possible.  */
1095   if (tbaa_p
1096       && flag_strict_aliasing
1097       && !alias_sets_conflict_p (ao_ref_alias_set (ref1),
1098 				 ao_ref_alias_set (ref2)))
1099     return false;
1100 
1101   /* Dispatch to the pointer-vs-decl or pointer-vs-pointer disambiguators.  */
1102   if (var1_p && ind2_p)
1103     return indirect_ref_may_alias_decl_p (ref2->ref, base2,
1104 					  offset2, max_size2,
1105 					  ao_ref_alias_set (ref2), -1,
1106 					  ref1->ref, base1,
1107 					  offset1, max_size1,
1108 					  ao_ref_alias_set (ref1),
1109 					  ao_ref_base_alias_set (ref1),
1110 					  tbaa_p);
1111   else if (ind1_p && ind2_p)
1112     return indirect_refs_may_alias_p (ref1->ref, base1,
1113 				      offset1, max_size1,
1114 				      ao_ref_alias_set (ref1), -1,
1115 				      ref2->ref, base2,
1116 				      offset2, max_size2,
1117 				      ao_ref_alias_set (ref2), -1,
1118 				      tbaa_p);
1119 
1120   /* We really do not want to end up here, but returning true is safe.  */
1121 #ifdef ENABLE_CHECKING
1122   gcc_unreachable ();
1123 #else
1124   return true;
1125 #endif
1126 }
1127 
1128 bool
refs_may_alias_p(tree ref1,tree ref2)1129 refs_may_alias_p (tree ref1, tree ref2)
1130 {
1131   ao_ref r1, r2;
1132   bool res;
1133   ao_ref_init (&r1, ref1);
1134   ao_ref_init (&r2, ref2);
1135   res = refs_may_alias_p_1 (&r1, &r2, true);
1136   if (res)
1137     ++alias_stats.refs_may_alias_p_may_alias;
1138   else
1139     ++alias_stats.refs_may_alias_p_no_alias;
1140   return res;
1141 }
1142 
1143 /* Returns true if there is a anti-dependence for the STORE that
1144    executes after the LOAD.  */
1145 
1146 bool
refs_anti_dependent_p(tree load,tree store)1147 refs_anti_dependent_p (tree load, tree store)
1148 {
1149   ao_ref r1, r2;
1150   ao_ref_init (&r1, load);
1151   ao_ref_init (&r2, store);
1152   return refs_may_alias_p_1 (&r1, &r2, false);
1153 }
1154 
1155 /* Returns true if there is a output dependence for the stores
1156    STORE1 and STORE2.  */
1157 
1158 bool
refs_output_dependent_p(tree store1,tree store2)1159 refs_output_dependent_p (tree store1, tree store2)
1160 {
1161   ao_ref r1, r2;
1162   ao_ref_init (&r1, store1);
1163   ao_ref_init (&r2, store2);
1164   return refs_may_alias_p_1 (&r1, &r2, false);
1165 }
1166 
1167 /* If the call CALL may use the memory reference REF return true,
1168    otherwise return false.  */
1169 
1170 static bool
ref_maybe_used_by_call_p_1(gimple call,ao_ref * ref)1171 ref_maybe_used_by_call_p_1 (gimple call, ao_ref *ref)
1172 {
1173   tree base, callee;
1174   unsigned i;
1175   int flags = gimple_call_flags (call);
1176 
1177   /* Const functions without a static chain do not implicitly use memory.  */
1178   if (!gimple_call_chain (call)
1179       && (flags & (ECF_CONST|ECF_NOVOPS)))
1180     goto process_args;
1181 
1182   base = ao_ref_base (ref);
1183   if (!base)
1184     return true;
1185 
1186   /* A call that is not without side-effects might involve volatile
1187      accesses and thus conflicts with all other volatile accesses.  */
1188   if (ref->volatile_p)
1189     return true;
1190 
1191   /* If the reference is based on a decl that is not aliased the call
1192      cannot possibly use it.  */
1193   if (DECL_P (base)
1194       && !may_be_aliased (base)
1195       /* But local statics can be used through recursion.  */
1196       && !is_global_var (base))
1197     goto process_args;
1198 
1199   callee = gimple_call_fndecl (call);
1200 
1201   /* Handle those builtin functions explicitly that do not act as
1202      escape points.  See tree-ssa-structalias.c:find_func_aliases
1203      for the list of builtins we might need to handle here.  */
1204   if (callee != NULL_TREE
1205       && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL)
1206     switch (DECL_FUNCTION_CODE (callee))
1207       {
1208 	/* All the following functions read memory pointed to by
1209 	   their second argument.  strcat/strncat additionally
1210 	   reads memory pointed to by the first argument.  */
1211 	case BUILT_IN_STRCAT:
1212 	case BUILT_IN_STRNCAT:
1213 	  {
1214 	    ao_ref dref;
1215 	    ao_ref_init_from_ptr_and_size (&dref,
1216 					   gimple_call_arg (call, 0),
1217 					   NULL_TREE);
1218 	    if (refs_may_alias_p_1 (&dref, ref, false))
1219 	      return true;
1220 	  }
1221 	  /* FALLTHRU */
1222 	case BUILT_IN_STRCPY:
1223 	case BUILT_IN_STRNCPY:
1224 	case BUILT_IN_MEMCPY:
1225 	case BUILT_IN_MEMMOVE:
1226 	case BUILT_IN_MEMPCPY:
1227 	case BUILT_IN_STPCPY:
1228 	case BUILT_IN_STPNCPY:
1229 	case BUILT_IN_TM_MEMCPY:
1230 	case BUILT_IN_TM_MEMMOVE:
1231 	  {
1232 	    ao_ref dref;
1233 	    tree size = NULL_TREE;
1234 	    if (gimple_call_num_args (call) == 3)
1235 	      size = gimple_call_arg (call, 2);
1236 	    ao_ref_init_from_ptr_and_size (&dref,
1237 					   gimple_call_arg (call, 1),
1238 					   size);
1239 	    return refs_may_alias_p_1 (&dref, ref, false);
1240 	  }
1241 	case BUILT_IN_STRCAT_CHK:
1242 	case BUILT_IN_STRNCAT_CHK:
1243 	  {
1244 	    ao_ref dref;
1245 	    ao_ref_init_from_ptr_and_size (&dref,
1246 					   gimple_call_arg (call, 0),
1247 					   NULL_TREE);
1248 	    if (refs_may_alias_p_1 (&dref, ref, false))
1249 	      return true;
1250 	  }
1251 	  /* FALLTHRU */
1252 	case BUILT_IN_STRCPY_CHK:
1253 	case BUILT_IN_STRNCPY_CHK:
1254 	case BUILT_IN_MEMCPY_CHK:
1255 	case BUILT_IN_MEMMOVE_CHK:
1256 	case BUILT_IN_MEMPCPY_CHK:
1257 	case BUILT_IN_STPCPY_CHK:
1258 	case BUILT_IN_STPNCPY_CHK:
1259 	  {
1260 	    ao_ref dref;
1261 	    tree size = NULL_TREE;
1262 	    if (gimple_call_num_args (call) == 4)
1263 	      size = gimple_call_arg (call, 2);
1264 	    ao_ref_init_from_ptr_and_size (&dref,
1265 					   gimple_call_arg (call, 1),
1266 					   size);
1267 	    return refs_may_alias_p_1 (&dref, ref, false);
1268 	  }
1269 	case BUILT_IN_BCOPY:
1270 	  {
1271 	    ao_ref dref;
1272 	    tree size = gimple_call_arg (call, 2);
1273 	    ao_ref_init_from_ptr_and_size (&dref,
1274 					   gimple_call_arg (call, 0),
1275 					   size);
1276 	    return refs_may_alias_p_1 (&dref, ref, false);
1277 	  }
1278 
1279 	/* The following functions read memory pointed to by their
1280 	   first argument.  */
1281 	CASE_BUILT_IN_TM_LOAD (1):
1282 	CASE_BUILT_IN_TM_LOAD (2):
1283 	CASE_BUILT_IN_TM_LOAD (4):
1284 	CASE_BUILT_IN_TM_LOAD (8):
1285 	CASE_BUILT_IN_TM_LOAD (FLOAT):
1286 	CASE_BUILT_IN_TM_LOAD (DOUBLE):
1287 	CASE_BUILT_IN_TM_LOAD (LDOUBLE):
1288 	CASE_BUILT_IN_TM_LOAD (M64):
1289 	CASE_BUILT_IN_TM_LOAD (M128):
1290 	CASE_BUILT_IN_TM_LOAD (M256):
1291 	case BUILT_IN_TM_LOG:
1292 	case BUILT_IN_TM_LOG_1:
1293 	case BUILT_IN_TM_LOG_2:
1294 	case BUILT_IN_TM_LOG_4:
1295 	case BUILT_IN_TM_LOG_8:
1296 	case BUILT_IN_TM_LOG_FLOAT:
1297 	case BUILT_IN_TM_LOG_DOUBLE:
1298 	case BUILT_IN_TM_LOG_LDOUBLE:
1299 	case BUILT_IN_TM_LOG_M64:
1300 	case BUILT_IN_TM_LOG_M128:
1301 	case BUILT_IN_TM_LOG_M256:
1302 	  return ptr_deref_may_alias_ref_p_1 (gimple_call_arg (call, 0), ref);
1303 
1304 	/* These read memory pointed to by the first argument.  */
1305 	case BUILT_IN_STRDUP:
1306 	case BUILT_IN_STRNDUP:
1307 	  {
1308 	    ao_ref dref;
1309 	    tree size = NULL_TREE;
1310 	    if (gimple_call_num_args (call) == 2)
1311 	      size = gimple_call_arg (call, 1);
1312 	    ao_ref_init_from_ptr_and_size (&dref,
1313 					   gimple_call_arg (call, 0),
1314 					   size);
1315 	    return refs_may_alias_p_1 (&dref, ref, false);
1316 	  }
1317 	/* The following builtins do not read from memory.  */
1318 	case BUILT_IN_FREE:
1319 	case BUILT_IN_MALLOC:
1320 	case BUILT_IN_CALLOC:
1321 	case BUILT_IN_ALLOCA:
1322 	case BUILT_IN_ALLOCA_WITH_ALIGN:
1323 	case BUILT_IN_STACK_SAVE:
1324 	case BUILT_IN_STACK_RESTORE:
1325 	case BUILT_IN_MEMSET:
1326 	case BUILT_IN_TM_MEMSET:
1327 	case BUILT_IN_MEMSET_CHK:
1328 	case BUILT_IN_FREXP:
1329 	case BUILT_IN_FREXPF:
1330 	case BUILT_IN_FREXPL:
1331 	case BUILT_IN_GAMMA_R:
1332 	case BUILT_IN_GAMMAF_R:
1333 	case BUILT_IN_GAMMAL_R:
1334 	case BUILT_IN_LGAMMA_R:
1335 	case BUILT_IN_LGAMMAF_R:
1336 	case BUILT_IN_LGAMMAL_R:
1337 	case BUILT_IN_MODF:
1338 	case BUILT_IN_MODFF:
1339 	case BUILT_IN_MODFL:
1340 	case BUILT_IN_REMQUO:
1341 	case BUILT_IN_REMQUOF:
1342 	case BUILT_IN_REMQUOL:
1343 	case BUILT_IN_SINCOS:
1344 	case BUILT_IN_SINCOSF:
1345 	case BUILT_IN_SINCOSL:
1346 	case BUILT_IN_ASSUME_ALIGNED:
1347 	case BUILT_IN_VA_END:
1348 	  return false;
1349 	/* __sync_* builtins and some OpenMP builtins act as threading
1350 	   barriers.  */
1351 #undef DEF_SYNC_BUILTIN
1352 #define DEF_SYNC_BUILTIN(ENUM, NAME, TYPE, ATTRS) case ENUM:
1353 #include "sync-builtins.def"
1354 #undef DEF_SYNC_BUILTIN
1355 	case BUILT_IN_GOMP_ATOMIC_START:
1356 	case BUILT_IN_GOMP_ATOMIC_END:
1357 	case BUILT_IN_GOMP_BARRIER:
1358 	case BUILT_IN_GOMP_TASKWAIT:
1359 	case BUILT_IN_GOMP_CRITICAL_START:
1360 	case BUILT_IN_GOMP_CRITICAL_END:
1361 	case BUILT_IN_GOMP_CRITICAL_NAME_START:
1362 	case BUILT_IN_GOMP_CRITICAL_NAME_END:
1363 	case BUILT_IN_GOMP_LOOP_END:
1364 	case BUILT_IN_GOMP_ORDERED_START:
1365 	case BUILT_IN_GOMP_ORDERED_END:
1366 	case BUILT_IN_GOMP_PARALLEL_END:
1367 	case BUILT_IN_GOMP_SECTIONS_END:
1368 	case BUILT_IN_GOMP_SINGLE_COPY_START:
1369 	case BUILT_IN_GOMP_SINGLE_COPY_END:
1370 	  return true;
1371 
1372 	default:
1373 	  /* Fallthru to general call handling.  */;
1374       }
1375 
1376   /* Check if base is a global static variable that is not read
1377      by the function.  */
1378   if (callee != NULL_TREE
1379       && TREE_CODE (base) == VAR_DECL
1380       && TREE_STATIC (base))
1381     {
1382       struct cgraph_node *node = cgraph_get_node (callee);
1383       bitmap not_read;
1384 
1385       /* FIXME: Callee can be an OMP builtin that does not have a call graph
1386 	 node yet.  We should enforce that there are nodes for all decls in the
1387 	 IL and remove this check instead.  */
1388       if (node
1389 	  && (not_read = ipa_reference_get_not_read_global (node))
1390 	  && bitmap_bit_p (not_read, DECL_UID (base)))
1391 	goto process_args;
1392     }
1393 
1394   /* Check if the base variable is call-used.  */
1395   if (DECL_P (base))
1396     {
1397       if (pt_solution_includes (gimple_call_use_set (call), base))
1398 	return true;
1399     }
1400   else if ((TREE_CODE (base) == MEM_REF
1401 	    || TREE_CODE (base) == TARGET_MEM_REF)
1402 	   && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
1403     {
1404       struct ptr_info_def *pi = SSA_NAME_PTR_INFO (TREE_OPERAND (base, 0));
1405       if (!pi)
1406 	return true;
1407 
1408       if (pt_solutions_intersect (gimple_call_use_set (call), &pi->pt))
1409 	return true;
1410     }
1411   else
1412     return true;
1413 
1414   /* Inspect call arguments for passed-by-value aliases.  */
1415 process_args:
1416   for (i = 0; i < gimple_call_num_args (call); ++i)
1417     {
1418       tree op = gimple_call_arg (call, i);
1419       int flags = gimple_call_arg_flags (call, i);
1420 
1421       if (flags & EAF_UNUSED)
1422 	continue;
1423 
1424       if (TREE_CODE (op) == WITH_SIZE_EXPR)
1425 	op = TREE_OPERAND (op, 0);
1426 
1427       if (TREE_CODE (op) != SSA_NAME
1428 	  && !is_gimple_min_invariant (op))
1429 	{
1430 	  ao_ref r;
1431 	  ao_ref_init (&r, op);
1432 	  if (refs_may_alias_p_1 (&r, ref, true))
1433 	    return true;
1434 	}
1435     }
1436 
1437   return false;
1438 }
1439 
1440 static bool
ref_maybe_used_by_call_p(gimple call,tree ref)1441 ref_maybe_used_by_call_p (gimple call, tree ref)
1442 {
1443   ao_ref r;
1444   bool res;
1445   ao_ref_init (&r, ref);
1446   res = ref_maybe_used_by_call_p_1 (call, &r);
1447   if (res)
1448     ++alias_stats.ref_maybe_used_by_call_p_may_alias;
1449   else
1450     ++alias_stats.ref_maybe_used_by_call_p_no_alias;
1451   return res;
1452 }
1453 
1454 
1455 /* If the statement STMT may use the memory reference REF return
1456    true, otherwise return false.  */
1457 
1458 bool
ref_maybe_used_by_stmt_p(gimple stmt,tree ref)1459 ref_maybe_used_by_stmt_p (gimple stmt, tree ref)
1460 {
1461   if (is_gimple_assign (stmt))
1462     {
1463       tree rhs;
1464 
1465       /* All memory assign statements are single.  */
1466       if (!gimple_assign_single_p (stmt))
1467 	return false;
1468 
1469       rhs = gimple_assign_rhs1 (stmt);
1470       if (is_gimple_reg (rhs)
1471 	  || is_gimple_min_invariant (rhs)
1472 	  || gimple_assign_rhs_code (stmt) == CONSTRUCTOR)
1473 	return false;
1474 
1475       return refs_may_alias_p (rhs, ref);
1476     }
1477   else if (is_gimple_call (stmt))
1478     return ref_maybe_used_by_call_p (stmt, ref);
1479   else if (gimple_code (stmt) == GIMPLE_RETURN)
1480     {
1481       tree retval = gimple_return_retval (stmt);
1482       tree base;
1483       if (retval
1484 	  && TREE_CODE (retval) != SSA_NAME
1485 	  && !is_gimple_min_invariant (retval)
1486 	  && refs_may_alias_p (retval, ref))
1487 	return true;
1488       /* If ref escapes the function then the return acts as a use.  */
1489       base = get_base_address (ref);
1490       if (!base)
1491 	;
1492       else if (DECL_P (base))
1493 	return is_global_var (base);
1494       else if (TREE_CODE (base) == MEM_REF
1495 	       || TREE_CODE (base) == TARGET_MEM_REF)
1496 	return ptr_deref_may_alias_global_p (TREE_OPERAND (base, 0));
1497       return false;
1498     }
1499 
1500   return true;
1501 }
1502 
1503 /* If the call in statement CALL may clobber the memory reference REF
1504    return true, otherwise return false.  */
1505 
1506 static bool
call_may_clobber_ref_p_1(gimple call,ao_ref * ref)1507 call_may_clobber_ref_p_1 (gimple call, ao_ref *ref)
1508 {
1509   tree base;
1510   tree callee;
1511 
1512   /* If the call is pure or const it cannot clobber anything.  */
1513   if (gimple_call_flags (call)
1514       & (ECF_PURE|ECF_CONST|ECF_LOOPING_CONST_OR_PURE|ECF_NOVOPS))
1515     return false;
1516 
1517   base = ao_ref_base (ref);
1518   if (!base)
1519     return true;
1520 
1521   if (TREE_CODE (base) == SSA_NAME
1522       || CONSTANT_CLASS_P (base))
1523     return false;
1524 
1525   /* A call that is not without side-effects might involve volatile
1526      accesses and thus conflicts with all other volatile accesses.  */
1527   if (ref->volatile_p)
1528     return true;
1529 
1530   /* If the reference is based on a decl that is not aliased the call
1531      cannot possibly clobber it.  */
1532   if (DECL_P (base)
1533       && !may_be_aliased (base)
1534       /* But local non-readonly statics can be modified through recursion
1535          or the call may implement a threading barrier which we must
1536 	 treat as may-def.  */
1537       && (TREE_READONLY (base)
1538 	  || !is_global_var (base)))
1539     return false;
1540 
1541   callee = gimple_call_fndecl (call);
1542 
1543   /* Handle those builtin functions explicitly that do not act as
1544      escape points.  See tree-ssa-structalias.c:find_func_aliases
1545      for the list of builtins we might need to handle here.  */
1546   if (callee != NULL_TREE
1547       && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL)
1548     switch (DECL_FUNCTION_CODE (callee))
1549       {
1550 	/* All the following functions clobber memory pointed to by
1551 	   their first argument.  */
1552 	case BUILT_IN_STRCPY:
1553 	case BUILT_IN_STRNCPY:
1554 	case BUILT_IN_MEMCPY:
1555 	case BUILT_IN_MEMMOVE:
1556 	case BUILT_IN_MEMPCPY:
1557 	case BUILT_IN_STPCPY:
1558 	case BUILT_IN_STPNCPY:
1559 	case BUILT_IN_STRCAT:
1560 	case BUILT_IN_STRNCAT:
1561 	case BUILT_IN_MEMSET:
1562 	case BUILT_IN_TM_MEMSET:
1563 	CASE_BUILT_IN_TM_STORE (1):
1564 	CASE_BUILT_IN_TM_STORE (2):
1565 	CASE_BUILT_IN_TM_STORE (4):
1566 	CASE_BUILT_IN_TM_STORE (8):
1567 	CASE_BUILT_IN_TM_STORE (FLOAT):
1568 	CASE_BUILT_IN_TM_STORE (DOUBLE):
1569 	CASE_BUILT_IN_TM_STORE (LDOUBLE):
1570 	CASE_BUILT_IN_TM_STORE (M64):
1571 	CASE_BUILT_IN_TM_STORE (M128):
1572 	CASE_BUILT_IN_TM_STORE (M256):
1573 	case BUILT_IN_TM_MEMCPY:
1574 	case BUILT_IN_TM_MEMMOVE:
1575 	  {
1576 	    ao_ref dref;
1577 	    tree size = NULL_TREE;
1578 	    /* Don't pass in size for strncat, as the maximum size
1579 	       is strlen (dest) + n + 1 instead of n, resp.
1580 	       n + 1 at dest + strlen (dest), but strlen (dest) isn't
1581 	       known.  */
1582 	    if (gimple_call_num_args (call) == 3
1583 		&& DECL_FUNCTION_CODE (callee) != BUILT_IN_STRNCAT)
1584 	      size = gimple_call_arg (call, 2);
1585 	    ao_ref_init_from_ptr_and_size (&dref,
1586 					   gimple_call_arg (call, 0),
1587 					   size);
1588 	    return refs_may_alias_p_1 (&dref, ref, false);
1589 	  }
1590 	case BUILT_IN_STRCPY_CHK:
1591 	case BUILT_IN_STRNCPY_CHK:
1592 	case BUILT_IN_MEMCPY_CHK:
1593 	case BUILT_IN_MEMMOVE_CHK:
1594 	case BUILT_IN_MEMPCPY_CHK:
1595 	case BUILT_IN_STPCPY_CHK:
1596 	case BUILT_IN_STPNCPY_CHK:
1597 	case BUILT_IN_STRCAT_CHK:
1598 	case BUILT_IN_STRNCAT_CHK:
1599 	case BUILT_IN_MEMSET_CHK:
1600 	  {
1601 	    ao_ref dref;
1602 	    tree size = NULL_TREE;
1603 	    /* Don't pass in size for __strncat_chk, as the maximum size
1604 	       is strlen (dest) + n + 1 instead of n, resp.
1605 	       n + 1 at dest + strlen (dest), but strlen (dest) isn't
1606 	       known.  */
1607 	    if (gimple_call_num_args (call) == 4
1608 		&& DECL_FUNCTION_CODE (callee) != BUILT_IN_STRNCAT_CHK)
1609 	      size = gimple_call_arg (call, 2);
1610 	    ao_ref_init_from_ptr_and_size (&dref,
1611 					   gimple_call_arg (call, 0),
1612 					   size);
1613 	    return refs_may_alias_p_1 (&dref, ref, false);
1614 	  }
1615 	case BUILT_IN_BCOPY:
1616 	  {
1617 	    ao_ref dref;
1618 	    tree size = gimple_call_arg (call, 2);
1619 	    ao_ref_init_from_ptr_and_size (&dref,
1620 					   gimple_call_arg (call, 1),
1621 					   size);
1622 	    return refs_may_alias_p_1 (&dref, ref, false);
1623 	  }
1624 	/* Allocating memory does not have any side-effects apart from
1625 	   being the definition point for the pointer.  */
1626 	case BUILT_IN_MALLOC:
1627 	case BUILT_IN_CALLOC:
1628 	case BUILT_IN_STRDUP:
1629 	case BUILT_IN_STRNDUP:
1630 	  /* Unix98 specifies that errno is set on allocation failure.  */
1631 	  if (flag_errno_math
1632 	      && targetm.ref_may_alias_errno (ref))
1633 	    return true;
1634 	  return false;
1635 	case BUILT_IN_STACK_SAVE:
1636 	case BUILT_IN_ALLOCA:
1637 	case BUILT_IN_ALLOCA_WITH_ALIGN:
1638 	case BUILT_IN_ASSUME_ALIGNED:
1639 	  return false;
1640 	/* Freeing memory kills the pointed-to memory.  More importantly
1641 	   the call has to serve as a barrier for moving loads and stores
1642 	   across it.  */
1643 	case BUILT_IN_FREE:
1644 	case BUILT_IN_VA_END:
1645 	  {
1646 	    tree ptr = gimple_call_arg (call, 0);
1647 	    return ptr_deref_may_alias_ref_p_1 (ptr, ref);
1648 	  }
1649 	case BUILT_IN_GAMMA_R:
1650 	case BUILT_IN_GAMMAF_R:
1651 	case BUILT_IN_GAMMAL_R:
1652 	case BUILT_IN_LGAMMA_R:
1653 	case BUILT_IN_LGAMMAF_R:
1654 	case BUILT_IN_LGAMMAL_R:
1655 	  {
1656 	    tree out = gimple_call_arg (call, 1);
1657 	    if (ptr_deref_may_alias_ref_p_1 (out, ref))
1658 	      return true;
1659 	    if (flag_errno_math)
1660 	      break;
1661 	    return false;
1662 	  }
1663 	case BUILT_IN_FREXP:
1664 	case BUILT_IN_FREXPF:
1665 	case BUILT_IN_FREXPL:
1666 	case BUILT_IN_MODF:
1667 	case BUILT_IN_MODFF:
1668 	case BUILT_IN_MODFL:
1669 	  {
1670 	    tree out = gimple_call_arg (call, 1);
1671 	    return ptr_deref_may_alias_ref_p_1 (out, ref);
1672 	  }
1673 	case BUILT_IN_REMQUO:
1674 	case BUILT_IN_REMQUOF:
1675 	case BUILT_IN_REMQUOL:
1676 	  {
1677 	    tree out = gimple_call_arg (call, 2);
1678 	    if (ptr_deref_may_alias_ref_p_1 (out, ref))
1679 	      return true;
1680 	    if (flag_errno_math)
1681 	      break;
1682 	    return false;
1683 	  }
1684 	case BUILT_IN_SINCOS:
1685 	case BUILT_IN_SINCOSF:
1686 	case BUILT_IN_SINCOSL:
1687 	  {
1688 	    tree sin = gimple_call_arg (call, 1);
1689 	    tree cos = gimple_call_arg (call, 2);
1690 	    return (ptr_deref_may_alias_ref_p_1 (sin, ref)
1691 		    || ptr_deref_may_alias_ref_p_1 (cos, ref));
1692 	  }
1693 	/* __sync_* builtins and some OpenMP builtins act as threading
1694 	   barriers.  */
1695 #undef DEF_SYNC_BUILTIN
1696 #define DEF_SYNC_BUILTIN(ENUM, NAME, TYPE, ATTRS) case ENUM:
1697 #include "sync-builtins.def"
1698 #undef DEF_SYNC_BUILTIN
1699 	case BUILT_IN_GOMP_ATOMIC_START:
1700 	case BUILT_IN_GOMP_ATOMIC_END:
1701 	case BUILT_IN_GOMP_BARRIER:
1702 	case BUILT_IN_GOMP_TASKWAIT:
1703 	case BUILT_IN_GOMP_CRITICAL_START:
1704 	case BUILT_IN_GOMP_CRITICAL_END:
1705 	case BUILT_IN_GOMP_CRITICAL_NAME_START:
1706 	case BUILT_IN_GOMP_CRITICAL_NAME_END:
1707 	case BUILT_IN_GOMP_LOOP_END:
1708 	case BUILT_IN_GOMP_ORDERED_START:
1709 	case BUILT_IN_GOMP_ORDERED_END:
1710 	case BUILT_IN_GOMP_PARALLEL_END:
1711 	case BUILT_IN_GOMP_SECTIONS_END:
1712 	case BUILT_IN_GOMP_SINGLE_COPY_START:
1713 	case BUILT_IN_GOMP_SINGLE_COPY_END:
1714 	  return true;
1715 	default:
1716 	  /* Fallthru to general call handling.  */;
1717       }
1718 
1719   /* Check if base is a global static variable that is not written
1720      by the function.  */
1721   if (callee != NULL_TREE
1722       && TREE_CODE (base) == VAR_DECL
1723       && TREE_STATIC (base))
1724     {
1725       struct cgraph_node *node = cgraph_get_node (callee);
1726       bitmap not_written;
1727 
1728       if (node
1729 	  && (not_written = ipa_reference_get_not_written_global (node))
1730 	  && bitmap_bit_p (not_written, DECL_UID (base)))
1731 	return false;
1732     }
1733 
1734   /* Check if the base variable is call-clobbered.  */
1735   if (DECL_P (base))
1736     return pt_solution_includes (gimple_call_clobber_set (call), base);
1737   else if ((TREE_CODE (base) == MEM_REF
1738 	    || TREE_CODE (base) == TARGET_MEM_REF)
1739 	   && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
1740     {
1741       struct ptr_info_def *pi = SSA_NAME_PTR_INFO (TREE_OPERAND (base, 0));
1742       if (!pi)
1743 	return true;
1744 
1745       return pt_solutions_intersect (gimple_call_clobber_set (call), &pi->pt);
1746     }
1747 
1748   return true;
1749 }
1750 
1751 /* If the call in statement CALL may clobber the memory reference REF
1752    return true, otherwise return false.  */
1753 
1754 bool
call_may_clobber_ref_p(gimple call,tree ref)1755 call_may_clobber_ref_p (gimple call, tree ref)
1756 {
1757   bool res;
1758   ao_ref r;
1759   ao_ref_init (&r, ref);
1760   res = call_may_clobber_ref_p_1 (call, &r);
1761   if (res)
1762     ++alias_stats.call_may_clobber_ref_p_may_alias;
1763   else
1764     ++alias_stats.call_may_clobber_ref_p_no_alias;
1765   return res;
1766 }
1767 
1768 
1769 /* If the statement STMT may clobber the memory reference REF return true,
1770    otherwise return false.  */
1771 
1772 bool
stmt_may_clobber_ref_p_1(gimple stmt,ao_ref * ref)1773 stmt_may_clobber_ref_p_1 (gimple stmt, ao_ref *ref)
1774 {
1775   if (is_gimple_call (stmt))
1776     {
1777       tree lhs = gimple_call_lhs (stmt);
1778       if (lhs
1779 	  && TREE_CODE (lhs) != SSA_NAME)
1780 	{
1781 	  ao_ref r;
1782 	  ao_ref_init (&r, lhs);
1783 	  if (refs_may_alias_p_1 (ref, &r, true))
1784 	    return true;
1785 	}
1786 
1787       return call_may_clobber_ref_p_1 (stmt, ref);
1788     }
1789   else if (gimple_assign_single_p (stmt))
1790     {
1791       tree lhs = gimple_assign_lhs (stmt);
1792       if (TREE_CODE (lhs) != SSA_NAME)
1793 	{
1794 	  ao_ref r;
1795 	  ao_ref_init (&r, lhs);
1796 	  return refs_may_alias_p_1 (ref, &r, true);
1797 	}
1798     }
1799   else if (gimple_code (stmt) == GIMPLE_ASM)
1800     return true;
1801 
1802   return false;
1803 }
1804 
1805 bool
stmt_may_clobber_ref_p(gimple stmt,tree ref)1806 stmt_may_clobber_ref_p (gimple stmt, tree ref)
1807 {
1808   ao_ref r;
1809   ao_ref_init (&r, ref);
1810   return stmt_may_clobber_ref_p_1 (stmt, &r);
1811 }
1812 
1813 /* If STMT kills the memory reference REF return true, otherwise
1814    return false.  */
1815 
1816 static bool
stmt_kills_ref_p_1(gimple stmt,ao_ref * ref)1817 stmt_kills_ref_p_1 (gimple stmt, ao_ref *ref)
1818 {
1819   /* For a must-alias check we need to be able to constrain
1820      the access properly.  */
1821   ao_ref_base (ref);
1822   if (ref->max_size == -1)
1823     return false;
1824 
1825   if (gimple_has_lhs (stmt)
1826       && TREE_CODE (gimple_get_lhs (stmt)) != SSA_NAME
1827       /* The assignment is not necessarily carried out if it can throw
1828 	 and we can catch it in the current function where we could inspect
1829 	 the previous value.
1830 	 ???  We only need to care about the RHS throwing.  For aggregate
1831 	 assignments or similar calls and non-call exceptions the LHS
1832 	 might throw as well.  */
1833       && !stmt_can_throw_internal (stmt))
1834     {
1835       tree base, lhs = gimple_get_lhs (stmt);
1836       HOST_WIDE_INT size, offset, max_size;
1837       base = get_ref_base_and_extent (lhs, &offset, &size, &max_size);
1838       /* We can get MEM[symbol: sZ, index: D.8862_1] here,
1839 	 so base == ref->base does not always hold.  */
1840       if (base == ref->base)
1841 	{
1842 	  /* For a must-alias check we need to be able to constrain
1843 	     the access properly.  */
1844 	  if (size != -1 && size == max_size)
1845 	    {
1846 	      if (offset <= ref->offset
1847 		  && offset + size >= ref->offset + ref->max_size)
1848 		return true;
1849 	    }
1850 	}
1851     }
1852 
1853   if (is_gimple_call (stmt))
1854     {
1855       tree callee = gimple_call_fndecl (stmt);
1856       if (callee != NULL_TREE
1857 	  && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL)
1858 	switch (DECL_FUNCTION_CODE (callee))
1859 	  {
1860 	  case BUILT_IN_MEMCPY:
1861 	  case BUILT_IN_MEMPCPY:
1862 	  case BUILT_IN_MEMMOVE:
1863 	  case BUILT_IN_MEMSET:
1864 	  case BUILT_IN_MEMCPY_CHK:
1865 	  case BUILT_IN_MEMPCPY_CHK:
1866 	  case BUILT_IN_MEMMOVE_CHK:
1867 	  case BUILT_IN_MEMSET_CHK:
1868 	    {
1869 	      tree dest = gimple_call_arg (stmt, 0);
1870 	      tree len = gimple_call_arg (stmt, 2);
1871 	      tree base = NULL_TREE;
1872 	      HOST_WIDE_INT offset = 0;
1873 	      if (!host_integerp (len, 0))
1874 		return false;
1875 	      if (TREE_CODE (dest) == ADDR_EXPR)
1876 		base = get_addr_base_and_unit_offset (TREE_OPERAND (dest, 0),
1877 						      &offset);
1878 	      else if (TREE_CODE (dest) == SSA_NAME)
1879 		base = dest;
1880 	      if (base
1881 		  && base == ao_ref_base (ref))
1882 		{
1883 		  HOST_WIDE_INT size = TREE_INT_CST_LOW (len);
1884 		  if (offset <= ref->offset / BITS_PER_UNIT
1885 		      && (offset + size
1886 		          >= ((ref->offset + ref->max_size + BITS_PER_UNIT - 1)
1887 			      / BITS_PER_UNIT)))
1888 		    return true;
1889 		}
1890 	      break;
1891 	    }
1892 
1893 	  case BUILT_IN_VA_END:
1894 	    {
1895 	      tree ptr = gimple_call_arg (stmt, 0);
1896 	      if (TREE_CODE (ptr) == ADDR_EXPR)
1897 		{
1898 		  tree base = ao_ref_base (ref);
1899 		  if (TREE_OPERAND (ptr, 0) == base)
1900 		    return true;
1901 		}
1902 	      break;
1903 	    }
1904 
1905 	  default:;
1906 	  }
1907     }
1908   return false;
1909 }
1910 
1911 bool
stmt_kills_ref_p(gimple stmt,tree ref)1912 stmt_kills_ref_p (gimple stmt, tree ref)
1913 {
1914   ao_ref r;
1915   ao_ref_init (&r, ref);
1916   return stmt_kills_ref_p_1 (stmt, &r);
1917 }
1918 
1919 
1920 /* Walk the virtual use-def chain of VUSE until hitting the virtual operand
1921    TARGET or a statement clobbering the memory reference REF in which
1922    case false is returned.  The walk starts with VUSE, one argument of PHI.  */
1923 
1924 static bool
maybe_skip_until(gimple phi,tree target,ao_ref * ref,tree vuse,unsigned int * cnt,bitmap * visited,bool abort_on_visited)1925 maybe_skip_until (gimple phi, tree target, ao_ref *ref,
1926 		  tree vuse, unsigned int *cnt, bitmap *visited,
1927 		  bool abort_on_visited)
1928 {
1929   basic_block bb = gimple_bb (phi);
1930 
1931   if (!*visited)
1932     *visited = BITMAP_ALLOC (NULL);
1933 
1934   bitmap_set_bit (*visited, SSA_NAME_VERSION (PHI_RESULT (phi)));
1935 
1936   /* Walk until we hit the target.  */
1937   while (vuse != target)
1938     {
1939       gimple def_stmt = SSA_NAME_DEF_STMT (vuse);
1940       /* Recurse for PHI nodes.  */
1941       if (gimple_code (def_stmt) == GIMPLE_PHI)
1942 	{
1943 	  /* An already visited PHI node ends the walk successfully.  */
1944 	  if (bitmap_bit_p (*visited, SSA_NAME_VERSION (PHI_RESULT (def_stmt))))
1945 	    return !abort_on_visited;
1946 	  vuse = get_continuation_for_phi (def_stmt, ref, cnt,
1947 					   visited, abort_on_visited);
1948 	  if (!vuse)
1949 	    return false;
1950 	  continue;
1951 	}
1952       else if (gimple_nop_p (def_stmt))
1953 	return false;
1954       else
1955 	{
1956 	  /* A clobbering statement or the end of the IL ends it failing.  */
1957 	  ++*cnt;
1958 	  if (stmt_may_clobber_ref_p_1 (def_stmt, ref))
1959 	    return false;
1960 	}
1961       /* If we reach a new basic-block see if we already skipped it
1962          in a previous walk that ended successfully.  */
1963       if (gimple_bb (def_stmt) != bb)
1964 	{
1965 	  if (!bitmap_set_bit (*visited, SSA_NAME_VERSION (vuse)))
1966 	    return !abort_on_visited;
1967 	  bb = gimple_bb (def_stmt);
1968 	}
1969       vuse = gimple_vuse (def_stmt);
1970     }
1971   return true;
1972 }
1973 
1974 /* For two PHI arguments ARG0 and ARG1 try to skip non-aliasing code
1975    until we hit the phi argument definition that dominates the other one.
1976    Return that, or NULL_TREE if there is no such definition.  */
1977 
1978 static tree
get_continuation_for_phi_1(gimple phi,tree arg0,tree arg1,ao_ref * ref,unsigned int * cnt,bitmap * visited,bool abort_on_visited)1979 get_continuation_for_phi_1 (gimple phi, tree arg0, tree arg1,
1980 			    ao_ref *ref, unsigned int *cnt,
1981 			    bitmap *visited, bool abort_on_visited)
1982 {
1983   gimple def0 = SSA_NAME_DEF_STMT (arg0);
1984   gimple def1 = SSA_NAME_DEF_STMT (arg1);
1985   tree common_vuse;
1986 
1987   if (arg0 == arg1)
1988     return arg0;
1989   else if (gimple_nop_p (def0)
1990 	   || (!gimple_nop_p (def1)
1991 	       && dominated_by_p (CDI_DOMINATORS,
1992 				  gimple_bb (def1), gimple_bb (def0))))
1993     {
1994       if (maybe_skip_until (phi, arg0, ref, arg1, cnt,
1995 			    visited, abort_on_visited))
1996 	return arg0;
1997     }
1998   else if (gimple_nop_p (def1)
1999 	   || dominated_by_p (CDI_DOMINATORS,
2000 			      gimple_bb (def0), gimple_bb (def1)))
2001     {
2002       if (maybe_skip_until (phi, arg1, ref, arg0, cnt,
2003 			    visited, abort_on_visited))
2004 	return arg1;
2005     }
2006   /* Special case of a diamond:
2007        MEM_1 = ...
2008        goto (cond) ? L1 : L2
2009        L1: store1 = ...    #MEM_2 = vuse(MEM_1)
2010 	   goto L3
2011        L2: store2 = ...    #MEM_3 = vuse(MEM_1)
2012        L3: MEM_4 = PHI<MEM_2, MEM_3>
2013      We were called with the PHI at L3, MEM_2 and MEM_3 don't
2014      dominate each other, but still we can easily skip this PHI node
2015      if we recognize that the vuse MEM operand is the same for both,
2016      and that we can skip both statements (they don't clobber us).
2017      This is still linear.  Don't use maybe_skip_until, that might
2018      potentially be slow.  */
2019   else if ((common_vuse = gimple_vuse (def0))
2020 	   && common_vuse == gimple_vuse (def1))
2021     {
2022       *cnt += 2;
2023       if (!stmt_may_clobber_ref_p_1 (def0, ref)
2024 	  && !stmt_may_clobber_ref_p_1 (def1, ref))
2025 	return common_vuse;
2026     }
2027 
2028   return NULL_TREE;
2029 }
2030 
2031 
2032 /* Starting from a PHI node for the virtual operand of the memory reference
2033    REF find a continuation virtual operand that allows to continue walking
2034    statements dominating PHI skipping only statements that cannot possibly
2035    clobber REF.  Increments *CNT for each alias disambiguation done.
2036    Returns NULL_TREE if no suitable virtual operand can be found.  */
2037 
2038 tree
get_continuation_for_phi(gimple phi,ao_ref * ref,unsigned int * cnt,bitmap * visited,bool abort_on_visited)2039 get_continuation_for_phi (gimple phi, ao_ref *ref,
2040 			  unsigned int *cnt, bitmap *visited,
2041 			  bool abort_on_visited)
2042 {
2043   unsigned nargs = gimple_phi_num_args (phi);
2044 
2045   /* Through a single-argument PHI we can simply look through.  */
2046   if (nargs == 1)
2047     return PHI_ARG_DEF (phi, 0);
2048 
2049   /* For two or more arguments try to pairwise skip non-aliasing code
2050      until we hit the phi argument definition that dominates the other one.  */
2051   else if (nargs >= 2)
2052     {
2053       tree arg0, arg1;
2054       unsigned i;
2055 
2056       /* Find a candidate for the virtual operand which definition
2057 	 dominates those of all others.  */
2058       arg0 = PHI_ARG_DEF (phi, 0);
2059       if (!SSA_NAME_IS_DEFAULT_DEF (arg0))
2060 	for (i = 1; i < nargs; ++i)
2061 	  {
2062 	    arg1 = PHI_ARG_DEF (phi, i);
2063 	    if (SSA_NAME_IS_DEFAULT_DEF (arg1))
2064 	      {
2065 		arg0 = arg1;
2066 		break;
2067 	      }
2068 	    if (dominated_by_p (CDI_DOMINATORS,
2069 				gimple_bb (SSA_NAME_DEF_STMT (arg0)),
2070 				gimple_bb (SSA_NAME_DEF_STMT (arg1))))
2071 	      arg0 = arg1;
2072 	  }
2073 
2074       /* Then pairwise reduce against the found candidate.  */
2075       for (i = 0; i < nargs; ++i)
2076 	{
2077 	  arg1 = PHI_ARG_DEF (phi, i);
2078 	  arg0 = get_continuation_for_phi_1 (phi, arg0, arg1, ref,
2079 					     cnt, visited, abort_on_visited);
2080 	  if (!arg0)
2081 	    return NULL_TREE;
2082 	}
2083 
2084       return arg0;
2085     }
2086 
2087   return NULL_TREE;
2088 }
2089 
2090 /* Based on the memory reference REF and its virtual use VUSE call
2091    WALKER for each virtual use that is equivalent to VUSE, including VUSE
2092    itself.  That is, for each virtual use for which its defining statement
2093    does not clobber REF.
2094 
2095    WALKER is called with REF, the current virtual use and DATA.  If
2096    WALKER returns non-NULL the walk stops and its result is returned.
2097    At the end of a non-successful walk NULL is returned.
2098 
2099    TRANSLATE if non-NULL is called with a pointer to REF, the virtual
2100    use which definition is a statement that may clobber REF and DATA.
2101    If TRANSLATE returns (void *)-1 the walk stops and NULL is returned.
2102    If TRANSLATE returns non-NULL the walk stops and its result is returned.
2103    If TRANSLATE returns NULL the walk continues and TRANSLATE is supposed
2104    to adjust REF and *DATA to make that valid.
2105 
2106    TODO: Cache the vector of equivalent vuses per ref, vuse pair.  */
2107 
2108 void *
walk_non_aliased_vuses(ao_ref * ref,tree vuse,void * (* walker)(ao_ref *,tree,unsigned int,void *),void * (* translate)(ao_ref *,tree,void *),void * data)2109 walk_non_aliased_vuses (ao_ref *ref, tree vuse,
2110 			void *(*walker)(ao_ref *, tree, unsigned int, void *),
2111 			void *(*translate)(ao_ref *, tree, void *), void *data)
2112 {
2113   bitmap visited = NULL;
2114   void *res;
2115   unsigned int cnt = 0;
2116   bool translated = false;
2117 
2118   timevar_push (TV_ALIAS_STMT_WALK);
2119 
2120   do
2121     {
2122       gimple def_stmt;
2123 
2124       /* ???  Do we want to account this to TV_ALIAS_STMT_WALK?  */
2125       res = (*walker) (ref, vuse, cnt, data);
2126       /* Abort walk.  */
2127       if (res == (void *)-1)
2128 	{
2129 	  res = NULL;
2130 	  break;
2131 	}
2132       /* Lookup succeeded.  */
2133       else if (res != NULL)
2134 	break;
2135 
2136       def_stmt = SSA_NAME_DEF_STMT (vuse);
2137       if (gimple_nop_p (def_stmt))
2138 	break;
2139       else if (gimple_code (def_stmt) == GIMPLE_PHI)
2140 	vuse = get_continuation_for_phi (def_stmt, ref, &cnt,
2141 					 &visited, translated);
2142       else
2143 	{
2144 	  cnt++;
2145 	  if (stmt_may_clobber_ref_p_1 (def_stmt, ref))
2146 	    {
2147 	      if (!translate)
2148 		break;
2149 	      res = (*translate) (ref, vuse, data);
2150 	      /* Failed lookup and translation.  */
2151 	      if (res == (void *)-1)
2152 		{
2153 		  res = NULL;
2154 		  break;
2155 		}
2156 	      /* Lookup succeeded.  */
2157 	      else if (res != NULL)
2158 		break;
2159 	      /* Translation succeeded, continue walking.  */
2160 	      translated = true;
2161 	    }
2162 	  vuse = gimple_vuse (def_stmt);
2163 	}
2164     }
2165   while (vuse);
2166 
2167   if (visited)
2168     BITMAP_FREE (visited);
2169 
2170   timevar_pop (TV_ALIAS_STMT_WALK);
2171 
2172   return res;
2173 }
2174 
2175 
2176 /* Based on the memory reference REF call WALKER for each vdef which
2177    defining statement may clobber REF, starting with VDEF.  If REF
2178    is NULL_TREE, each defining statement is visited.
2179 
2180    WALKER is called with REF, the current vdef and DATA.  If WALKER
2181    returns true the walk is stopped, otherwise it continues.
2182 
2183    At PHI nodes walk_aliased_vdefs forks into one walk for reach
2184    PHI argument (but only one walk continues on merge points), the
2185    return value is true if any of the walks was successful.
2186 
2187    The function returns the number of statements walked.  */
2188 
2189 static unsigned int
walk_aliased_vdefs_1(ao_ref * ref,tree vdef,bool (* walker)(ao_ref *,tree,void *),void * data,bitmap * visited,unsigned int cnt)2190 walk_aliased_vdefs_1 (ao_ref *ref, tree vdef,
2191 		      bool (*walker)(ao_ref *, tree, void *), void *data,
2192 		      bitmap *visited, unsigned int cnt)
2193 {
2194   do
2195     {
2196       gimple def_stmt = SSA_NAME_DEF_STMT (vdef);
2197 
2198       if (*visited
2199 	  && !bitmap_set_bit (*visited, SSA_NAME_VERSION (vdef)))
2200 	return cnt;
2201 
2202       if (gimple_nop_p (def_stmt))
2203 	return cnt;
2204       else if (gimple_code (def_stmt) == GIMPLE_PHI)
2205 	{
2206 	  unsigned i;
2207 	  if (!*visited)
2208 	    *visited = BITMAP_ALLOC (NULL);
2209 	  for (i = 0; i < gimple_phi_num_args (def_stmt); ++i)
2210 	    cnt += walk_aliased_vdefs_1 (ref, gimple_phi_arg_def (def_stmt, i),
2211 					 walker, data, visited, 0);
2212 	  return cnt;
2213 	}
2214 
2215       /* ???  Do we want to account this to TV_ALIAS_STMT_WALK?  */
2216       cnt++;
2217       if ((!ref
2218 	   || stmt_may_clobber_ref_p_1 (def_stmt, ref))
2219 	  && (*walker) (ref, vdef, data))
2220 	return cnt;
2221 
2222       vdef = gimple_vuse (def_stmt);
2223     }
2224   while (1);
2225 }
2226 
2227 unsigned int
walk_aliased_vdefs(ao_ref * ref,tree vdef,bool (* walker)(ao_ref *,tree,void *),void * data,bitmap * visited)2228 walk_aliased_vdefs (ao_ref *ref, tree vdef,
2229 		    bool (*walker)(ao_ref *, tree, void *), void *data,
2230 		    bitmap *visited)
2231 {
2232   bitmap local_visited = NULL;
2233   unsigned int ret;
2234 
2235   timevar_push (TV_ALIAS_STMT_WALK);
2236 
2237   ret = walk_aliased_vdefs_1 (ref, vdef, walker, data,
2238 			      visited ? visited : &local_visited, 0);
2239   if (local_visited)
2240     BITMAP_FREE (local_visited);
2241 
2242   timevar_pop (TV_ALIAS_STMT_WALK);
2243 
2244   return ret;
2245 }
2246 
2247