1 /* Alias analysis for trees.
2    Copyright (C) 2004-2014 Free Software Foundation, Inc.
3    Contributed by Diego Novillo <dnovillo@redhat.com>
4 
5 This file is part of GCC.
6 
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11 
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
15 GNU General Public License for more details.
16 
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3.  If not see
19 <http://www.gnu.org/licenses/>.  */
20 
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "tm_p.h"
27 #include "target.h"
28 #include "basic-block.h"
29 #include "timevar.h"	/* for TV_ALIAS_STMT_WALK */
30 #include "langhooks.h"
31 #include "flags.h"
32 #include "function.h"
33 #include "tree-pretty-print.h"
34 #include "dumpfile.h"
35 #include "tree-ssa-alias.h"
36 #include "internal-fn.h"
37 #include "tree-eh.h"
38 #include "gimple-expr.h"
39 #include "is-a.h"
40 #include "gimple.h"
41 #include "gimple-ssa.h"
42 #include "stringpool.h"
43 #include "tree-ssanames.h"
44 #include "expr.h"
45 #include "tree-dfa.h"
46 #include "tree-inline.h"
47 #include "params.h"
48 #include "alloc-pool.h"
49 #include "tree-ssa-alias.h"
50 #include "ipa-reference.h"
51 
52 /* Broad overview of how alias analysis on gimple works:
53 
54    Statements clobbering or using memory are linked through the
55    virtual operand factored use-def chain.  The virtual operand
56    is unique per function, its symbol is accessible via gimple_vop (cfun).
57    Virtual operands are used for efficiently walking memory statements
58    in the gimple IL and are useful for things like value-numbering as
59    a generation count for memory references.
60 
61    SSA_NAME pointers may have associated points-to information
62    accessible via the SSA_NAME_PTR_INFO macro.  Flow-insensitive
63    points-to information is (re-)computed by the TODO_rebuild_alias
64    pass manager todo.  Points-to information is also used for more
65    precise tracking of call-clobbered and call-used variables and
66    related disambiguations.
67 
68    This file contains functions for disambiguating memory references,
69    the so called alias-oracle and tools for walking of the gimple IL.
70 
71    The main alias-oracle entry-points are
72 
73    bool stmt_may_clobber_ref_p (gimple, tree)
74 
75      This function queries if a statement may invalidate (parts of)
76      the memory designated by the reference tree argument.
77 
78    bool ref_maybe_used_by_stmt_p (gimple, tree)
79 
80      This function queries if a statement may need (parts of) the
81      memory designated by the reference tree argument.
82 
83    There are variants of these functions that only handle the call
84    part of a statement, call_may_clobber_ref_p and ref_maybe_used_by_call_p.
85    Note that these do not disambiguate against a possible call lhs.
86 
87    bool refs_may_alias_p (tree, tree)
88 
89      This function tries to disambiguate two reference trees.
90 
91    bool ptr_deref_may_alias_global_p (tree)
92 
93      This function queries if dereferencing a pointer variable may
94      alias global memory.
95 
96    More low-level disambiguators are available and documented in
97    this file.  Low-level disambiguators dealing with points-to
98    information are in tree-ssa-structalias.c.  */
99 
100 
101 /* Query statistics for the different low-level disambiguators.
102    A high-level query may trigger multiple of them.  */
103 
104 static struct {
105   unsigned HOST_WIDE_INT refs_may_alias_p_may_alias;
106   unsigned HOST_WIDE_INT refs_may_alias_p_no_alias;
107   unsigned HOST_WIDE_INT ref_maybe_used_by_call_p_may_alias;
108   unsigned HOST_WIDE_INT ref_maybe_used_by_call_p_no_alias;
109   unsigned HOST_WIDE_INT call_may_clobber_ref_p_may_alias;
110   unsigned HOST_WIDE_INT call_may_clobber_ref_p_no_alias;
111 } alias_stats;
112 
113 void
dump_alias_stats(FILE * s)114 dump_alias_stats (FILE *s)
115 {
116   fprintf (s, "\nAlias oracle query stats:\n");
117   fprintf (s, "  refs_may_alias_p: "
118 	   HOST_WIDE_INT_PRINT_DEC" disambiguations, "
119 	   HOST_WIDE_INT_PRINT_DEC" queries\n",
120 	   alias_stats.refs_may_alias_p_no_alias,
121 	   alias_stats.refs_may_alias_p_no_alias
122 	   + alias_stats.refs_may_alias_p_may_alias);
123   fprintf (s, "  ref_maybe_used_by_call_p: "
124 	   HOST_WIDE_INT_PRINT_DEC" disambiguations, "
125 	   HOST_WIDE_INT_PRINT_DEC" queries\n",
126 	   alias_stats.ref_maybe_used_by_call_p_no_alias,
127 	   alias_stats.refs_may_alias_p_no_alias
128 	   + alias_stats.ref_maybe_used_by_call_p_may_alias);
129   fprintf (s, "  call_may_clobber_ref_p: "
130 	   HOST_WIDE_INT_PRINT_DEC" disambiguations, "
131 	   HOST_WIDE_INT_PRINT_DEC" queries\n",
132 	   alias_stats.call_may_clobber_ref_p_no_alias,
133 	   alias_stats.call_may_clobber_ref_p_no_alias
134 	   + alias_stats.call_may_clobber_ref_p_may_alias);
135 }
136 
137 
138 /* Return true, if dereferencing PTR may alias with a global variable.  */
139 
140 bool
ptr_deref_may_alias_global_p(tree ptr)141 ptr_deref_may_alias_global_p (tree ptr)
142 {
143   struct ptr_info_def *pi;
144 
145   /* If we end up with a pointer constant here that may point
146      to global memory.  */
147   if (TREE_CODE (ptr) != SSA_NAME)
148     return true;
149 
150   pi = SSA_NAME_PTR_INFO (ptr);
151 
152   /* If we do not have points-to information for this variable,
153      we have to punt.  */
154   if (!pi)
155     return true;
156 
157   /* ???  This does not use TBAA to prune globals ptr may not access.  */
158   return pt_solution_includes_global (&pi->pt);
159 }
160 
161 /* Return true if dereferencing PTR may alias DECL.
162    The caller is responsible for applying TBAA to see if PTR
163    may access DECL at all.  */
164 
165 static bool
ptr_deref_may_alias_decl_p(tree ptr,tree decl)166 ptr_deref_may_alias_decl_p (tree ptr, tree decl)
167 {
168   struct ptr_info_def *pi;
169 
170   /* Conversions are irrelevant for points-to information and
171      data-dependence analysis can feed us those.  */
172   STRIP_NOPS (ptr);
173 
174   /* Anything we do not explicilty handle aliases.  */
175   if ((TREE_CODE (ptr) != SSA_NAME
176        && TREE_CODE (ptr) != ADDR_EXPR
177        && TREE_CODE (ptr) != POINTER_PLUS_EXPR)
178       || !POINTER_TYPE_P (TREE_TYPE (ptr))
179       || (TREE_CODE (decl) != VAR_DECL
180 	  && TREE_CODE (decl) != PARM_DECL
181 	  && TREE_CODE (decl) != RESULT_DECL))
182     return true;
183 
184   /* Disregard pointer offsetting.  */
185   if (TREE_CODE (ptr) == POINTER_PLUS_EXPR)
186     {
187       do
188 	{
189 	  ptr = TREE_OPERAND (ptr, 0);
190 	}
191       while (TREE_CODE (ptr) == POINTER_PLUS_EXPR);
192       return ptr_deref_may_alias_decl_p (ptr, decl);
193     }
194 
195   /* ADDR_EXPR pointers either just offset another pointer or directly
196      specify the pointed-to set.  */
197   if (TREE_CODE (ptr) == ADDR_EXPR)
198     {
199       tree base = get_base_address (TREE_OPERAND (ptr, 0));
200       if (base
201 	  && (TREE_CODE (base) == MEM_REF
202 	      || TREE_CODE (base) == TARGET_MEM_REF))
203 	ptr = TREE_OPERAND (base, 0);
204       else if (base
205 	       && DECL_P (base))
206 	return base == decl;
207       else if (base
208 	       && CONSTANT_CLASS_P (base))
209 	return false;
210       else
211 	return true;
212     }
213 
214   /* Non-aliased variables can not be pointed to.  */
215   if (!may_be_aliased (decl))
216     return false;
217 
218   /* If we do not have useful points-to information for this pointer
219      we cannot disambiguate anything else.  */
220   pi = SSA_NAME_PTR_INFO (ptr);
221   if (!pi)
222     return true;
223 
224   return pt_solution_includes (&pi->pt, decl);
225 }
226 
227 /* Return true if dereferenced PTR1 and PTR2 may alias.
228    The caller is responsible for applying TBAA to see if accesses
229    through PTR1 and PTR2 may conflict at all.  */
230 
231 bool
ptr_derefs_may_alias_p(tree ptr1,tree ptr2)232 ptr_derefs_may_alias_p (tree ptr1, tree ptr2)
233 {
234   struct ptr_info_def *pi1, *pi2;
235 
236   /* Conversions are irrelevant for points-to information and
237      data-dependence analysis can feed us those.  */
238   STRIP_NOPS (ptr1);
239   STRIP_NOPS (ptr2);
240 
241   /* Disregard pointer offsetting.  */
242   if (TREE_CODE (ptr1) == POINTER_PLUS_EXPR)
243     {
244       do
245 	{
246 	  ptr1 = TREE_OPERAND (ptr1, 0);
247 	}
248       while (TREE_CODE (ptr1) == POINTER_PLUS_EXPR);
249       return ptr_derefs_may_alias_p (ptr1, ptr2);
250     }
251   if (TREE_CODE (ptr2) == POINTER_PLUS_EXPR)
252     {
253       do
254 	{
255 	  ptr2 = TREE_OPERAND (ptr2, 0);
256 	}
257       while (TREE_CODE (ptr2) == POINTER_PLUS_EXPR);
258       return ptr_derefs_may_alias_p (ptr1, ptr2);
259     }
260 
261   /* ADDR_EXPR pointers either just offset another pointer or directly
262      specify the pointed-to set.  */
263   if (TREE_CODE (ptr1) == ADDR_EXPR)
264     {
265       tree base = get_base_address (TREE_OPERAND (ptr1, 0));
266       if (base
267 	  && (TREE_CODE (base) == MEM_REF
268 	      || TREE_CODE (base) == TARGET_MEM_REF))
269 	return ptr_derefs_may_alias_p (TREE_OPERAND (base, 0), ptr2);
270       else if (base
271 	       && DECL_P (base))
272 	return ptr_deref_may_alias_decl_p (ptr2, base);
273       else
274 	return true;
275     }
276   if (TREE_CODE (ptr2) == ADDR_EXPR)
277     {
278       tree base = get_base_address (TREE_OPERAND (ptr2, 0));
279       if (base
280 	  && (TREE_CODE (base) == MEM_REF
281 	      || TREE_CODE (base) == TARGET_MEM_REF))
282 	return ptr_derefs_may_alias_p (ptr1, TREE_OPERAND (base, 0));
283       else if (base
284 	       && DECL_P (base))
285 	return ptr_deref_may_alias_decl_p (ptr1, base);
286       else
287 	return true;
288     }
289 
290   /* From here we require SSA name pointers.  Anything else aliases.  */
291   if (TREE_CODE (ptr1) != SSA_NAME
292       || TREE_CODE (ptr2) != SSA_NAME
293       || !POINTER_TYPE_P (TREE_TYPE (ptr1))
294       || !POINTER_TYPE_P (TREE_TYPE (ptr2)))
295     return true;
296 
297   /* We may end up with two empty points-to solutions for two same pointers.
298      In this case we still want to say both pointers alias, so shortcut
299      that here.  */
300   if (ptr1 == ptr2)
301     return true;
302 
303   /* If we do not have useful points-to information for either pointer
304      we cannot disambiguate anything else.  */
305   pi1 = SSA_NAME_PTR_INFO (ptr1);
306   pi2 = SSA_NAME_PTR_INFO (ptr2);
307   if (!pi1 || !pi2)
308     return true;
309 
310   /* ???  This does not use TBAA to prune decls from the intersection
311      that not both pointers may access.  */
312   return pt_solutions_intersect (&pi1->pt, &pi2->pt);
313 }
314 
315 /* Return true if dereferencing PTR may alias *REF.
316    The caller is responsible for applying TBAA to see if PTR
317    may access *REF at all.  */
318 
319 static bool
ptr_deref_may_alias_ref_p_1(tree ptr,ao_ref * ref)320 ptr_deref_may_alias_ref_p_1 (tree ptr, ao_ref *ref)
321 {
322   tree base = ao_ref_base (ref);
323 
324   if (TREE_CODE (base) == MEM_REF
325       || TREE_CODE (base) == TARGET_MEM_REF)
326     return ptr_derefs_may_alias_p (ptr, TREE_OPERAND (base, 0));
327   else if (DECL_P (base))
328     return ptr_deref_may_alias_decl_p (ptr, base);
329 
330   return true;
331 }
332 
333 /* Return true whether REF may refer to global memory.  */
334 
335 bool
ref_may_alias_global_p(tree ref)336 ref_may_alias_global_p (tree ref)
337 {
338   tree base = get_base_address (ref);
339   if (DECL_P (base))
340     return is_global_var (base);
341   else if (TREE_CODE (base) == MEM_REF
342 	   || TREE_CODE (base) == TARGET_MEM_REF)
343     return ptr_deref_may_alias_global_p (TREE_OPERAND (base, 0));
344   return true;
345 }
346 
347 /* Return true whether STMT may clobber global memory.  */
348 
349 bool
stmt_may_clobber_global_p(gimple stmt)350 stmt_may_clobber_global_p (gimple stmt)
351 {
352   tree lhs;
353 
354   if (!gimple_vdef (stmt))
355     return false;
356 
357   /* ???  We can ask the oracle whether an artificial pointer
358      dereference with a pointer with points-to information covering
359      all global memory (what about non-address taken memory?) maybe
360      clobbered by this call.  As there is at the moment no convenient
361      way of doing that without generating garbage do some manual
362      checking instead.
363      ???  We could make a NULL ao_ref argument to the various
364      predicates special, meaning any global memory.  */
365 
366   switch (gimple_code (stmt))
367     {
368     case GIMPLE_ASSIGN:
369       lhs = gimple_assign_lhs (stmt);
370       return (TREE_CODE (lhs) != SSA_NAME
371 	      && ref_may_alias_global_p (lhs));
372     case GIMPLE_CALL:
373       return true;
374     default:
375       return true;
376     }
377 }
378 
379 
380 /* Dump alias information on FILE.  */
381 
382 void
dump_alias_info(FILE * file)383 dump_alias_info (FILE *file)
384 {
385   unsigned i;
386   const char *funcname
387     = lang_hooks.decl_printable_name (current_function_decl, 2);
388   tree var;
389 
390   fprintf (file, "\n\nAlias information for %s\n\n", funcname);
391 
392   fprintf (file, "Aliased symbols\n\n");
393 
394   FOR_EACH_LOCAL_DECL (cfun, i, var)
395     {
396       if (may_be_aliased (var))
397 	dump_variable (file, var);
398     }
399 
400   fprintf (file, "\nCall clobber information\n");
401 
402   fprintf (file, "\nESCAPED");
403   dump_points_to_solution (file, &cfun->gimple_df->escaped);
404 
405   fprintf (file, "\n\nFlow-insensitive points-to information\n\n");
406 
407   for (i = 1; i < num_ssa_names; i++)
408     {
409       tree ptr = ssa_name (i);
410       struct ptr_info_def *pi;
411 
412       if (ptr == NULL_TREE
413 	  || !POINTER_TYPE_P (TREE_TYPE (ptr))
414 	  || SSA_NAME_IN_FREE_LIST (ptr))
415 	continue;
416 
417       pi = SSA_NAME_PTR_INFO (ptr);
418       if (pi)
419 	dump_points_to_info_for (file, ptr);
420     }
421 
422   fprintf (file, "\n");
423 }
424 
425 
426 /* Dump alias information on stderr.  */
427 
428 DEBUG_FUNCTION void
debug_alias_info(void)429 debug_alias_info (void)
430 {
431   dump_alias_info (stderr);
432 }
433 
434 
435 /* Dump the points-to set *PT into FILE.  */
436 
437 void
dump_points_to_solution(FILE * file,struct pt_solution * pt)438 dump_points_to_solution (FILE *file, struct pt_solution *pt)
439 {
440   if (pt->anything)
441     fprintf (file, ", points-to anything");
442 
443   if (pt->nonlocal)
444     fprintf (file, ", points-to non-local");
445 
446   if (pt->escaped)
447     fprintf (file, ", points-to escaped");
448 
449   if (pt->ipa_escaped)
450     fprintf (file, ", points-to unit escaped");
451 
452   if (pt->null)
453     fprintf (file, ", points-to NULL");
454 
455   if (pt->vars)
456     {
457       fprintf (file, ", points-to vars: ");
458       dump_decl_set (file, pt->vars);
459       if (pt->vars_contains_nonlocal
460 	  && pt->vars_contains_escaped_heap)
461 	fprintf (file, " (nonlocal, escaped heap)");
462       else if (pt->vars_contains_nonlocal
463 	       && pt->vars_contains_escaped)
464 	fprintf (file, " (nonlocal, escaped)");
465       else if (pt->vars_contains_nonlocal)
466 	fprintf (file, " (nonlocal)");
467       else if (pt->vars_contains_escaped_heap)
468 	fprintf (file, " (escaped heap)");
469       else if (pt->vars_contains_escaped)
470 	fprintf (file, " (escaped)");
471     }
472 }
473 
474 
475 /* Unified dump function for pt_solution.  */
476 
477 DEBUG_FUNCTION void
debug(pt_solution & ref)478 debug (pt_solution &ref)
479 {
480   dump_points_to_solution (stderr, &ref);
481 }
482 
483 DEBUG_FUNCTION void
debug(pt_solution * ptr)484 debug (pt_solution *ptr)
485 {
486   if (ptr)
487     debug (*ptr);
488   else
489     fprintf (stderr, "<nil>\n");
490 }
491 
492 
493 /* Dump points-to information for SSA_NAME PTR into FILE.  */
494 
495 void
dump_points_to_info_for(FILE * file,tree ptr)496 dump_points_to_info_for (FILE *file, tree ptr)
497 {
498   struct ptr_info_def *pi = SSA_NAME_PTR_INFO (ptr);
499 
500   print_generic_expr (file, ptr, dump_flags);
501 
502   if (pi)
503     dump_points_to_solution (file, &pi->pt);
504   else
505     fprintf (file, ", points-to anything");
506 
507   fprintf (file, "\n");
508 }
509 
510 
511 /* Dump points-to information for VAR into stderr.  */
512 
513 DEBUG_FUNCTION void
debug_points_to_info_for(tree var)514 debug_points_to_info_for (tree var)
515 {
516   dump_points_to_info_for (stderr, var);
517 }
518 
519 
520 /* Initializes the alias-oracle reference representation *R from REF.  */
521 
522 void
ao_ref_init(ao_ref * r,tree ref)523 ao_ref_init (ao_ref *r, tree ref)
524 {
525   r->ref = ref;
526   r->base = NULL_TREE;
527   r->offset = 0;
528   r->size = -1;
529   r->max_size = -1;
530   r->ref_alias_set = -1;
531   r->base_alias_set = -1;
532   r->volatile_p = ref ? TREE_THIS_VOLATILE (ref) : false;
533 }
534 
535 /* Returns the base object of the memory reference *REF.  */
536 
537 tree
ao_ref_base(ao_ref * ref)538 ao_ref_base (ao_ref *ref)
539 {
540   if (ref->base)
541     return ref->base;
542   ref->base = get_ref_base_and_extent (ref->ref, &ref->offset, &ref->size,
543 				       &ref->max_size);
544   return ref->base;
545 }
546 
547 /* Returns the base object alias set of the memory reference *REF.  */
548 
549 static alias_set_type
ao_ref_base_alias_set(ao_ref * ref)550 ao_ref_base_alias_set (ao_ref *ref)
551 {
552   tree base_ref;
553   if (ref->base_alias_set != -1)
554     return ref->base_alias_set;
555   if (!ref->ref)
556     return 0;
557   base_ref = ref->ref;
558   while (handled_component_p (base_ref))
559     base_ref = TREE_OPERAND (base_ref, 0);
560   ref->base_alias_set = get_alias_set (base_ref);
561   return ref->base_alias_set;
562 }
563 
564 /* Returns the reference alias set of the memory reference *REF.  */
565 
566 alias_set_type
ao_ref_alias_set(ao_ref * ref)567 ao_ref_alias_set (ao_ref *ref)
568 {
569   if (ref->ref_alias_set != -1)
570     return ref->ref_alias_set;
571   ref->ref_alias_set = get_alias_set (ref->ref);
572   return ref->ref_alias_set;
573 }
574 
575 /* Init an alias-oracle reference representation from a gimple pointer
576    PTR and a gimple size SIZE in bytes.  If SIZE is NULL_TREE then the
577    size is assumed to be unknown.  The access is assumed to be only
578    to or after of the pointer target, not before it.  */
579 
580 void
ao_ref_init_from_ptr_and_size(ao_ref * ref,tree ptr,tree size)581 ao_ref_init_from_ptr_and_size (ao_ref *ref, tree ptr, tree size)
582 {
583   HOST_WIDE_INT t, size_hwi, extra_offset = 0;
584   ref->ref = NULL_TREE;
585   if (TREE_CODE (ptr) == SSA_NAME)
586     {
587       gimple stmt = SSA_NAME_DEF_STMT (ptr);
588       if (gimple_assign_single_p (stmt)
589 	  && gimple_assign_rhs_code (stmt) == ADDR_EXPR)
590 	ptr = gimple_assign_rhs1 (stmt);
591       else if (is_gimple_assign (stmt)
592 	       && gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
593 	       && TREE_CODE (gimple_assign_rhs2 (stmt)) == INTEGER_CST)
594 	{
595 	  ptr = gimple_assign_rhs1 (stmt);
596 	  extra_offset = BITS_PER_UNIT
597 			 * int_cst_value (gimple_assign_rhs2 (stmt));
598 	}
599     }
600 
601   if (TREE_CODE (ptr) == ADDR_EXPR)
602     {
603       ref->base = get_addr_base_and_unit_offset (TREE_OPERAND (ptr, 0), &t);
604       if (ref->base)
605 	ref->offset = BITS_PER_UNIT * t;
606       else
607 	{
608 	  size = NULL_TREE;
609 	  ref->offset = 0;
610 	  ref->base = get_base_address (TREE_OPERAND (ptr, 0));
611 	}
612     }
613   else
614     {
615       ref->base = build2 (MEM_REF, char_type_node,
616 			  ptr, null_pointer_node);
617       ref->offset = 0;
618     }
619   ref->offset += extra_offset;
620   if (size
621       && tree_fits_shwi_p (size)
622       && (size_hwi = tree_to_shwi (size)) <= HOST_WIDE_INT_MAX / BITS_PER_UNIT)
623     ref->max_size = ref->size = size_hwi * BITS_PER_UNIT;
624   else
625     ref->max_size = ref->size = -1;
626   ref->ref_alias_set = 0;
627   ref->base_alias_set = 0;
628   ref->volatile_p = false;
629 }
630 
631 /* Return 1 if TYPE1 and TYPE2 are to be considered equivalent for the
632    purpose of TBAA.  Return 0 if they are distinct and -1 if we cannot
633    decide.  */
634 
635 static inline int
same_type_for_tbaa(tree type1,tree type2)636 same_type_for_tbaa (tree type1, tree type2)
637 {
638   type1 = TYPE_MAIN_VARIANT (type1);
639   type2 = TYPE_MAIN_VARIANT (type2);
640 
641   /* If we would have to do structural comparison bail out.  */
642   if (TYPE_STRUCTURAL_EQUALITY_P (type1)
643       || TYPE_STRUCTURAL_EQUALITY_P (type2))
644     return -1;
645 
646   /* Compare the canonical types.  */
647   if (TYPE_CANONICAL (type1) == TYPE_CANONICAL (type2))
648     return 1;
649 
650   /* ??? Array types are not properly unified in all cases as we have
651      spurious changes in the index types for example.  Removing this
652      causes all sorts of problems with the Fortran frontend.  */
653   if (TREE_CODE (type1) == ARRAY_TYPE
654       && TREE_CODE (type2) == ARRAY_TYPE)
655     return -1;
656 
657   /* ??? In Ada, an lvalue of an unconstrained type can be used to access an
658      object of one of its constrained subtypes, e.g. when a function with an
659      unconstrained parameter passed by reference is called on an object and
660      inlined.  But, even in the case of a fixed size, type and subtypes are
661      not equivalent enough as to share the same TYPE_CANONICAL, since this
662      would mean that conversions between them are useless, whereas they are
663      not (e.g. type and subtypes can have different modes).  So, in the end,
664      they are only guaranteed to have the same alias set.  */
665   if (get_alias_set (type1) == get_alias_set (type2))
666     return -1;
667 
668   /* The types are known to be not equal.  */
669   return 0;
670 }
671 
672 /* Determine if the two component references REF1 and REF2 which are
673    based on access types TYPE1 and TYPE2 and of which at least one is based
674    on an indirect reference may alias.  REF2 is the only one that can
675    be a decl in which case REF2_IS_DECL is true.
676    REF1_ALIAS_SET, BASE1_ALIAS_SET, REF2_ALIAS_SET and BASE2_ALIAS_SET
677    are the respective alias sets.  */
678 
679 static bool
aliasing_component_refs_p(tree ref1,alias_set_type ref1_alias_set,alias_set_type base1_alias_set,HOST_WIDE_INT offset1,HOST_WIDE_INT max_size1,tree ref2,alias_set_type ref2_alias_set,alias_set_type base2_alias_set,HOST_WIDE_INT offset2,HOST_WIDE_INT max_size2,bool ref2_is_decl)680 aliasing_component_refs_p (tree ref1,
681 			   alias_set_type ref1_alias_set,
682 			   alias_set_type base1_alias_set,
683 			   HOST_WIDE_INT offset1, HOST_WIDE_INT max_size1,
684 			   tree ref2,
685 			   alias_set_type ref2_alias_set,
686 			   alias_set_type base2_alias_set,
687 			   HOST_WIDE_INT offset2, HOST_WIDE_INT max_size2,
688 			   bool ref2_is_decl)
689 {
690   /* If one reference is a component references through pointers try to find a
691      common base and apply offset based disambiguation.  This handles
692      for example
693        struct A { int i; int j; } *q;
694        struct B { struct A a; int k; } *p;
695      disambiguating q->i and p->a.j.  */
696   tree base1, base2;
697   tree type1, type2;
698   tree *refp;
699   int same_p;
700 
701   /* Choose bases and base types to search for.  */
702   base1 = ref1;
703   while (handled_component_p (base1))
704     base1 = TREE_OPERAND (base1, 0);
705   type1 = TREE_TYPE (base1);
706   base2 = ref2;
707   while (handled_component_p (base2))
708     base2 = TREE_OPERAND (base2, 0);
709   type2 = TREE_TYPE (base2);
710 
711   /* Now search for the type1 in the access path of ref2.  This
712      would be a common base for doing offset based disambiguation on.  */
713   refp = &ref2;
714   while (handled_component_p (*refp)
715 	 && same_type_for_tbaa (TREE_TYPE (*refp), type1) == 0)
716     refp = &TREE_OPERAND (*refp, 0);
717   same_p = same_type_for_tbaa (TREE_TYPE (*refp), type1);
718   /* If we couldn't compare types we have to bail out.  */
719   if (same_p == -1)
720     return true;
721   else if (same_p == 1)
722     {
723       HOST_WIDE_INT offadj, sztmp, msztmp;
724       get_ref_base_and_extent (*refp, &offadj, &sztmp, &msztmp);
725       offset2 -= offadj;
726       get_ref_base_and_extent (base1, &offadj, &sztmp, &msztmp);
727       offset1 -= offadj;
728       return ranges_overlap_p (offset1, max_size1, offset2, max_size2);
729     }
730   /* If we didn't find a common base, try the other way around.  */
731   refp = &ref1;
732   while (handled_component_p (*refp)
733 	 && same_type_for_tbaa (TREE_TYPE (*refp), type2) == 0)
734     refp = &TREE_OPERAND (*refp, 0);
735   same_p = same_type_for_tbaa (TREE_TYPE (*refp), type2);
736   /* If we couldn't compare types we have to bail out.  */
737   if (same_p == -1)
738     return true;
739   else if (same_p == 1)
740     {
741       HOST_WIDE_INT offadj, sztmp, msztmp;
742       get_ref_base_and_extent (*refp, &offadj, &sztmp, &msztmp);
743       offset1 -= offadj;
744       get_ref_base_and_extent (base2, &offadj, &sztmp, &msztmp);
745       offset2 -= offadj;
746       return ranges_overlap_p (offset1, max_size1, offset2, max_size2);
747     }
748 
749   /* If we have two type access paths B1.path1 and B2.path2 they may
750      only alias if either B1 is in B2.path2 or B2 is in B1.path1.
751      But we can still have a path that goes B1.path1...B2.path2 with
752      a part that we do not see.  So we can only disambiguate now
753      if there is no B2 in the tail of path1 and no B1 on the
754      tail of path2.  */
755   if (base1_alias_set == ref2_alias_set
756       || alias_set_subset_of (base1_alias_set, ref2_alias_set))
757     return true;
758   /* If this is ptr vs. decl then we know there is no ptr ... decl path.  */
759   if (!ref2_is_decl)
760     return (base2_alias_set == ref1_alias_set
761 	    || alias_set_subset_of (base2_alias_set, ref1_alias_set));
762   return false;
763 }
764 
765 /* Return true if we can determine that component references REF1 and REF2,
766    that are within a common DECL, cannot overlap.  */
767 
768 static bool
nonoverlapping_component_refs_of_decl_p(tree ref1,tree ref2)769 nonoverlapping_component_refs_of_decl_p (tree ref1, tree ref2)
770 {
771   auto_vec<tree, 16> component_refs1;
772   auto_vec<tree, 16> component_refs2;
773 
774   /* Create the stack of handled components for REF1.  */
775   while (handled_component_p (ref1))
776     {
777       component_refs1.safe_push (ref1);
778       ref1 = TREE_OPERAND (ref1, 0);
779     }
780   if (TREE_CODE (ref1) == MEM_REF)
781     {
782       if (!integer_zerop (TREE_OPERAND (ref1, 1)))
783 	goto may_overlap;
784       ref1 = TREE_OPERAND (TREE_OPERAND (ref1, 0), 0);
785     }
786 
787   /* Create the stack of handled components for REF2.  */
788   while (handled_component_p (ref2))
789     {
790       component_refs2.safe_push (ref2);
791       ref2 = TREE_OPERAND (ref2, 0);
792     }
793   if (TREE_CODE (ref2) == MEM_REF)
794     {
795       if (!integer_zerop (TREE_OPERAND (ref2, 1)))
796 	goto may_overlap;
797       ref2 = TREE_OPERAND (TREE_OPERAND (ref2, 0), 0);
798     }
799 
800   /* We must have the same base DECL.  */
801   gcc_assert (ref1 == ref2);
802 
803   /* Pop the stacks in parallel and examine the COMPONENT_REFs of the same
804      rank.  This is sufficient because we start from the same DECL and you
805      cannot reference several fields at a time with COMPONENT_REFs (unlike
806      with ARRAY_RANGE_REFs for arrays) so you always need the same number
807      of them to access a sub-component, unless you're in a union, in which
808      case the return value will precisely be false.  */
809   while (true)
810     {
811       do
812 	{
813 	  if (component_refs1.is_empty ())
814 	    goto may_overlap;
815 	  ref1 = component_refs1.pop ();
816 	}
817       while (!RECORD_OR_UNION_TYPE_P (TREE_TYPE (TREE_OPERAND (ref1, 0))));
818 
819       do
820 	{
821 	  if (component_refs2.is_empty ())
822 	     goto may_overlap;
823 	  ref2 = component_refs2.pop ();
824 	}
825       while (!RECORD_OR_UNION_TYPE_P (TREE_TYPE (TREE_OPERAND (ref2, 0))));
826 
827       /* Beware of BIT_FIELD_REF.  */
828       if (TREE_CODE (ref1) != COMPONENT_REF
829 	  || TREE_CODE (ref2) != COMPONENT_REF)
830 	goto may_overlap;
831 
832       tree field1 = TREE_OPERAND (ref1, 1);
833       tree field2 = TREE_OPERAND (ref2, 1);
834 
835       /* ??? We cannot simply use the type of operand #0 of the refs here
836 	 as the Fortran compiler smuggles type punning into COMPONENT_REFs
837 	 for common blocks instead of using unions like everyone else.  */
838       tree type1 = DECL_CONTEXT (field1);
839       tree type2 = DECL_CONTEXT (field2);
840 
841       /* We cannot disambiguate fields in a union or qualified union.  */
842       if (type1 != type2 || TREE_CODE (type1) != RECORD_TYPE)
843 	 goto may_overlap;
844 
845       /* Different fields of the same record type cannot overlap.
846 	 ??? Bitfields can overlap at RTL level so punt on them.  */
847       if (field1 != field2)
848 	{
849 	  component_refs1.release ();
850 	  component_refs2.release ();
851 	  return !(DECL_BIT_FIELD (field1) && DECL_BIT_FIELD (field2));
852 	}
853     }
854 
855 may_overlap:
856   component_refs1.release ();
857   component_refs2.release ();
858   return false;
859 }
860 
861 /* Return true if two memory references based on the variables BASE1
862    and BASE2 constrained to [OFFSET1, OFFSET1 + MAX_SIZE1) and
863    [OFFSET2, OFFSET2 + MAX_SIZE2) may alias.  REF1 and REF2
864    if non-NULL are the complete memory reference trees.  */
865 
866 static bool
decl_refs_may_alias_p(tree ref1,tree base1,HOST_WIDE_INT offset1,HOST_WIDE_INT max_size1,tree ref2,tree base2,HOST_WIDE_INT offset2,HOST_WIDE_INT max_size2)867 decl_refs_may_alias_p (tree ref1, tree base1,
868 		       HOST_WIDE_INT offset1, HOST_WIDE_INT max_size1,
869 		       tree ref2, tree base2,
870 		       HOST_WIDE_INT offset2, HOST_WIDE_INT max_size2)
871 {
872   gcc_checking_assert (DECL_P (base1) && DECL_P (base2));
873 
874   /* If both references are based on different variables, they cannot alias.  */
875   if (base1 != base2)
876     return false;
877 
878   /* If both references are based on the same variable, they cannot alias if
879      the accesses do not overlap.  */
880   if (!ranges_overlap_p (offset1, max_size1, offset2, max_size2))
881     return false;
882 
883   /* For components with variable position, the above test isn't sufficient,
884      so we disambiguate component references manually.  */
885   if (ref1 && ref2
886       && handled_component_p (ref1) && handled_component_p (ref2)
887       && nonoverlapping_component_refs_of_decl_p (ref1, ref2))
888     return false;
889 
890   return true;
891 }
892 
893 /* Return true if an indirect reference based on *PTR1 constrained
894    to [OFFSET1, OFFSET1 + MAX_SIZE1) may alias a variable based on BASE2
895    constrained to [OFFSET2, OFFSET2 + MAX_SIZE2).  *PTR1 and BASE2 have
896    the alias sets BASE1_ALIAS_SET and BASE2_ALIAS_SET which can be -1
897    in which case they are computed on-demand.  REF1 and REF2
898    if non-NULL are the complete memory reference trees.  */
899 
900 static bool
indirect_ref_may_alias_decl_p(tree ref1 ATTRIBUTE_UNUSED,tree base1,HOST_WIDE_INT offset1,HOST_WIDE_INT max_size1 ATTRIBUTE_UNUSED,alias_set_type ref1_alias_set,alias_set_type base1_alias_set,tree ref2 ATTRIBUTE_UNUSED,tree base2,HOST_WIDE_INT offset2,HOST_WIDE_INT max_size2,alias_set_type ref2_alias_set,alias_set_type base2_alias_set,bool tbaa_p)901 indirect_ref_may_alias_decl_p (tree ref1 ATTRIBUTE_UNUSED, tree base1,
902 			       HOST_WIDE_INT offset1,
903 			       HOST_WIDE_INT max_size1 ATTRIBUTE_UNUSED,
904 			       alias_set_type ref1_alias_set,
905 			       alias_set_type base1_alias_set,
906 			       tree ref2 ATTRIBUTE_UNUSED, tree base2,
907 			       HOST_WIDE_INT offset2, HOST_WIDE_INT max_size2,
908 			       alias_set_type ref2_alias_set,
909 			       alias_set_type base2_alias_set, bool tbaa_p)
910 {
911   tree ptr1;
912   tree ptrtype1, dbase2;
913   HOST_WIDE_INT offset1p = offset1, offset2p = offset2;
914   HOST_WIDE_INT doffset1, doffset2;
915   double_int moff;
916 
917   gcc_checking_assert ((TREE_CODE (base1) == MEM_REF
918 			|| TREE_CODE (base1) == TARGET_MEM_REF)
919 		       && DECL_P (base2));
920 
921   ptr1 = TREE_OPERAND (base1, 0);
922 
923   /* The offset embedded in MEM_REFs can be negative.  Bias them
924      so that the resulting offset adjustment is positive.  */
925   moff = mem_ref_offset (base1);
926   moff = moff.lshift (BITS_PER_UNIT == 8 ? 3 : exact_log2 (BITS_PER_UNIT));
927   if (moff.is_negative ())
928     offset2p += (-moff).low;
929   else
930     offset1p += moff.low;
931 
932   /* If only one reference is based on a variable, they cannot alias if
933      the pointer access is beyond the extent of the variable access.
934      (the pointer base cannot validly point to an offset less than zero
935      of the variable).
936      ???  IVOPTs creates bases that do not honor this restriction,
937      so do not apply this optimization for TARGET_MEM_REFs.  */
938   if (TREE_CODE (base1) != TARGET_MEM_REF
939       && !ranges_overlap_p (MAX (0, offset1p), -1, offset2p, max_size2))
940     return false;
941   /* They also cannot alias if the pointer may not point to the decl.  */
942   if (!ptr_deref_may_alias_decl_p (ptr1, base2))
943     return false;
944 
945   /* Disambiguations that rely on strict aliasing rules follow.  */
946   if (!flag_strict_aliasing || !tbaa_p)
947     return true;
948 
949   ptrtype1 = TREE_TYPE (TREE_OPERAND (base1, 1));
950 
951   /* If the alias set for a pointer access is zero all bets are off.  */
952   if (base1_alias_set == -1)
953     base1_alias_set = get_deref_alias_set (ptrtype1);
954   if (base1_alias_set == 0)
955     return true;
956   if (base2_alias_set == -1)
957     base2_alias_set = get_alias_set (base2);
958 
959   /* When we are trying to disambiguate an access with a pointer dereference
960      as base versus one with a decl as base we can use both the size
961      of the decl and its dynamic type for extra disambiguation.
962      ???  We do not know anything about the dynamic type of the decl
963      other than that its alias-set contains base2_alias_set as a subset
964      which does not help us here.  */
965   /* As we know nothing useful about the dynamic type of the decl just
966      use the usual conflict check rather than a subset test.
967      ???  We could introduce -fvery-strict-aliasing when the language
968      does not allow decls to have a dynamic type that differs from their
969      static type.  Then we can check
970      !alias_set_subset_of (base1_alias_set, base2_alias_set) instead.  */
971   if (base1_alias_set != base2_alias_set
972       && !alias_sets_conflict_p (base1_alias_set, base2_alias_set))
973     return false;
974   /* If the size of the access relevant for TBAA through the pointer
975      is bigger than the size of the decl we can't possibly access the
976      decl via that pointer.  */
977   if (DECL_SIZE (base2) && COMPLETE_TYPE_P (TREE_TYPE (ptrtype1))
978       && TREE_CODE (DECL_SIZE (base2)) == INTEGER_CST
979       && TREE_CODE (TYPE_SIZE (TREE_TYPE (ptrtype1))) == INTEGER_CST
980       /* ???  This in turn may run afoul when a decl of type T which is
981 	 a member of union type U is accessed through a pointer to
982 	 type U and sizeof T is smaller than sizeof U.  */
983       && TREE_CODE (TREE_TYPE (ptrtype1)) != UNION_TYPE
984       && TREE_CODE (TREE_TYPE (ptrtype1)) != QUAL_UNION_TYPE
985       && tree_int_cst_lt (DECL_SIZE (base2), TYPE_SIZE (TREE_TYPE (ptrtype1))))
986     return false;
987 
988   if (!ref2)
989     return true;
990 
991   /* If the decl is accessed via a MEM_REF, reconstruct the base
992      we can use for TBAA and an appropriately adjusted offset.  */
993   dbase2 = ref2;
994   while (handled_component_p (dbase2))
995     dbase2 = TREE_OPERAND (dbase2, 0);
996   doffset1 = offset1;
997   doffset2 = offset2;
998   if (TREE_CODE (dbase2) == MEM_REF
999       || TREE_CODE (dbase2) == TARGET_MEM_REF)
1000     {
1001       double_int moff = mem_ref_offset (dbase2);
1002       moff = moff.lshift (BITS_PER_UNIT == 8 ? 3 : exact_log2 (BITS_PER_UNIT));
1003       if (moff.is_negative ())
1004 	doffset1 -= (-moff).low;
1005       else
1006 	doffset2 -= moff.low;
1007     }
1008 
1009   /* If either reference is view-converted, give up now.  */
1010   if (same_type_for_tbaa (TREE_TYPE (base1), TREE_TYPE (ptrtype1)) != 1
1011       || same_type_for_tbaa (TREE_TYPE (dbase2), TREE_TYPE (base2)) != 1)
1012     return true;
1013 
1014   /* If both references are through the same type, they do not alias
1015      if the accesses do not overlap.  This does extra disambiguation
1016      for mixed/pointer accesses but requires strict aliasing.
1017      For MEM_REFs we require that the component-ref offset we computed
1018      is relative to the start of the type which we ensure by
1019      comparing rvalue and access type and disregarding the constant
1020      pointer offset.  */
1021   if ((TREE_CODE (base1) != TARGET_MEM_REF
1022        || (!TMR_INDEX (base1) && !TMR_INDEX2 (base1)))
1023       && same_type_for_tbaa (TREE_TYPE (base1), TREE_TYPE (dbase2)) == 1)
1024     return ranges_overlap_p (doffset1, max_size1, doffset2, max_size2);
1025 
1026   /* Do access-path based disambiguation.  */
1027   if (ref1 && ref2
1028       && (handled_component_p (ref1) || handled_component_p (ref2)))
1029     return aliasing_component_refs_p (ref1,
1030 				      ref1_alias_set, base1_alias_set,
1031 				      offset1, max_size1,
1032 				      ref2,
1033 				      ref2_alias_set, base2_alias_set,
1034 				      offset2, max_size2, true);
1035 
1036   return true;
1037 }
1038 
1039 /* Return true if two indirect references based on *PTR1
1040    and *PTR2 constrained to [OFFSET1, OFFSET1 + MAX_SIZE1) and
1041    [OFFSET2, OFFSET2 + MAX_SIZE2) may alias.  *PTR1 and *PTR2 have
1042    the alias sets BASE1_ALIAS_SET and BASE2_ALIAS_SET which can be -1
1043    in which case they are computed on-demand.  REF1 and REF2
1044    if non-NULL are the complete memory reference trees. */
1045 
1046 static bool
indirect_refs_may_alias_p(tree ref1 ATTRIBUTE_UNUSED,tree base1,HOST_WIDE_INT offset1,HOST_WIDE_INT max_size1,alias_set_type ref1_alias_set,alias_set_type base1_alias_set,tree ref2 ATTRIBUTE_UNUSED,tree base2,HOST_WIDE_INT offset2,HOST_WIDE_INT max_size2,alias_set_type ref2_alias_set,alias_set_type base2_alias_set,bool tbaa_p)1047 indirect_refs_may_alias_p (tree ref1 ATTRIBUTE_UNUSED, tree base1,
1048 			   HOST_WIDE_INT offset1, HOST_WIDE_INT max_size1,
1049 			   alias_set_type ref1_alias_set,
1050 			   alias_set_type base1_alias_set,
1051 			   tree ref2 ATTRIBUTE_UNUSED, tree base2,
1052 			   HOST_WIDE_INT offset2, HOST_WIDE_INT max_size2,
1053 			   alias_set_type ref2_alias_set,
1054 			   alias_set_type base2_alias_set, bool tbaa_p)
1055 {
1056   tree ptr1;
1057   tree ptr2;
1058   tree ptrtype1, ptrtype2;
1059 
1060   gcc_checking_assert ((TREE_CODE (base1) == MEM_REF
1061 			|| TREE_CODE (base1) == TARGET_MEM_REF)
1062 		       && (TREE_CODE (base2) == MEM_REF
1063 			   || TREE_CODE (base2) == TARGET_MEM_REF));
1064 
1065   ptr1 = TREE_OPERAND (base1, 0);
1066   ptr2 = TREE_OPERAND (base2, 0);
1067 
1068   /* If both bases are based on pointers they cannot alias if they may not
1069      point to the same memory object or if they point to the same object
1070      and the accesses do not overlap.  */
1071   if ((!cfun || gimple_in_ssa_p (cfun))
1072       && operand_equal_p (ptr1, ptr2, 0)
1073       && (((TREE_CODE (base1) != TARGET_MEM_REF
1074 	    || (!TMR_INDEX (base1) && !TMR_INDEX2 (base1)))
1075 	   && (TREE_CODE (base2) != TARGET_MEM_REF
1076 	       || (!TMR_INDEX (base2) && !TMR_INDEX2 (base2))))
1077 	  || (TREE_CODE (base1) == TARGET_MEM_REF
1078 	      && TREE_CODE (base2) == TARGET_MEM_REF
1079 	      && (TMR_STEP (base1) == TMR_STEP (base2)
1080 		  || (TMR_STEP (base1) && TMR_STEP (base2)
1081 		      && operand_equal_p (TMR_STEP (base1),
1082 					  TMR_STEP (base2), 0)))
1083 	      && (TMR_INDEX (base1) == TMR_INDEX (base2)
1084 		  || (TMR_INDEX (base1) && TMR_INDEX (base2)
1085 		      && operand_equal_p (TMR_INDEX (base1),
1086 					  TMR_INDEX (base2), 0)))
1087 	      && (TMR_INDEX2 (base1) == TMR_INDEX2 (base2)
1088 		  || (TMR_INDEX2 (base1) && TMR_INDEX2 (base2)
1089 		      && operand_equal_p (TMR_INDEX2 (base1),
1090 					  TMR_INDEX2 (base2), 0))))))
1091     {
1092       double_int moff;
1093       /* The offset embedded in MEM_REFs can be negative.  Bias them
1094 	 so that the resulting offset adjustment is positive.  */
1095       moff = mem_ref_offset (base1);
1096       moff = moff.lshift (BITS_PER_UNIT == 8 ? 3 : exact_log2 (BITS_PER_UNIT));
1097       if (moff.is_negative ())
1098 	offset2 += (-moff).low;
1099       else
1100 	offset1 += moff.low;
1101       moff = mem_ref_offset (base2);
1102       moff = moff.lshift (BITS_PER_UNIT == 8 ? 3 : exact_log2 (BITS_PER_UNIT));
1103       if (moff.is_negative ())
1104 	offset1 += (-moff).low;
1105       else
1106 	offset2 += moff.low;
1107       return ranges_overlap_p (offset1, max_size1, offset2, max_size2);
1108     }
1109   if (!ptr_derefs_may_alias_p (ptr1, ptr2))
1110     return false;
1111 
1112   /* Disambiguations that rely on strict aliasing rules follow.  */
1113   if (!flag_strict_aliasing || !tbaa_p)
1114     return true;
1115 
1116   ptrtype1 = TREE_TYPE (TREE_OPERAND (base1, 1));
1117   ptrtype2 = TREE_TYPE (TREE_OPERAND (base2, 1));
1118 
1119   /* If the alias set for a pointer access is zero all bets are off.  */
1120   if (base1_alias_set == -1)
1121     base1_alias_set = get_deref_alias_set (ptrtype1);
1122   if (base1_alias_set == 0)
1123     return true;
1124   if (base2_alias_set == -1)
1125     base2_alias_set = get_deref_alias_set (ptrtype2);
1126   if (base2_alias_set == 0)
1127     return true;
1128 
1129   /* If both references are through the same type, they do not alias
1130      if the accesses do not overlap.  This does extra disambiguation
1131      for mixed/pointer accesses but requires strict aliasing.  */
1132   if ((TREE_CODE (base1) != TARGET_MEM_REF
1133        || (!TMR_INDEX (base1) && !TMR_INDEX2 (base1)))
1134       && (TREE_CODE (base2) != TARGET_MEM_REF
1135 	  || (!TMR_INDEX (base2) && !TMR_INDEX2 (base2)))
1136       && same_type_for_tbaa (TREE_TYPE (base1), TREE_TYPE (ptrtype1)) == 1
1137       && same_type_for_tbaa (TREE_TYPE (base2), TREE_TYPE (ptrtype2)) == 1
1138       && same_type_for_tbaa (TREE_TYPE (ptrtype1),
1139 			     TREE_TYPE (ptrtype2)) == 1)
1140     return ranges_overlap_p (offset1, max_size1, offset2, max_size2);
1141 
1142   /* Do type-based disambiguation.  */
1143   if (base1_alias_set != base2_alias_set
1144       && !alias_sets_conflict_p (base1_alias_set, base2_alias_set))
1145     return false;
1146 
1147   /* Do access-path based disambiguation.  */
1148   if (ref1 && ref2
1149       && (handled_component_p (ref1) || handled_component_p (ref2))
1150       && same_type_for_tbaa (TREE_TYPE (base1), TREE_TYPE (ptrtype1)) == 1
1151       && same_type_for_tbaa (TREE_TYPE (base2), TREE_TYPE (ptrtype2)) == 1)
1152     return aliasing_component_refs_p (ref1,
1153 				      ref1_alias_set, base1_alias_set,
1154 				      offset1, max_size1,
1155 				      ref2,
1156 				      ref2_alias_set, base2_alias_set,
1157 				      offset2, max_size2, false);
1158 
1159   return true;
1160 }
1161 
1162 /* Return true, if the two memory references REF1 and REF2 may alias.  */
1163 
1164 bool
refs_may_alias_p_1(ao_ref * ref1,ao_ref * ref2,bool tbaa_p)1165 refs_may_alias_p_1 (ao_ref *ref1, ao_ref *ref2, bool tbaa_p)
1166 {
1167   tree base1, base2;
1168   HOST_WIDE_INT offset1 = 0, offset2 = 0;
1169   HOST_WIDE_INT max_size1 = -1, max_size2 = -1;
1170   bool var1_p, var2_p, ind1_p, ind2_p;
1171 
1172   gcc_checking_assert ((!ref1->ref
1173 			|| TREE_CODE (ref1->ref) == SSA_NAME
1174 			|| DECL_P (ref1->ref)
1175 			|| TREE_CODE (ref1->ref) == STRING_CST
1176 			|| handled_component_p (ref1->ref)
1177 			|| TREE_CODE (ref1->ref) == MEM_REF
1178 			|| TREE_CODE (ref1->ref) == TARGET_MEM_REF)
1179 		       && (!ref2->ref
1180 			   || TREE_CODE (ref2->ref) == SSA_NAME
1181 			   || DECL_P (ref2->ref)
1182 			   || TREE_CODE (ref2->ref) == STRING_CST
1183 			   || handled_component_p (ref2->ref)
1184 			   || TREE_CODE (ref2->ref) == MEM_REF
1185 			   || TREE_CODE (ref2->ref) == TARGET_MEM_REF));
1186 
1187   /* Decompose the references into their base objects and the access.  */
1188   base1 = ao_ref_base (ref1);
1189   offset1 = ref1->offset;
1190   max_size1 = ref1->max_size;
1191   base2 = ao_ref_base (ref2);
1192   offset2 = ref2->offset;
1193   max_size2 = ref2->max_size;
1194 
1195   /* We can end up with registers or constants as bases for example from
1196      *D.1663_44 = VIEW_CONVERT_EXPR<struct DB_LSN>(__tmp$B0F64_59);
1197      which is seen as a struct copy.  */
1198   if (TREE_CODE (base1) == SSA_NAME
1199       || TREE_CODE (base1) == CONST_DECL
1200       || TREE_CODE (base1) == CONSTRUCTOR
1201       || TREE_CODE (base1) == ADDR_EXPR
1202       || CONSTANT_CLASS_P (base1)
1203       || TREE_CODE (base2) == SSA_NAME
1204       || TREE_CODE (base2) == CONST_DECL
1205       || TREE_CODE (base2) == CONSTRUCTOR
1206       || TREE_CODE (base2) == ADDR_EXPR
1207       || CONSTANT_CLASS_P (base2))
1208     return false;
1209 
1210   /* We can end up referring to code via function and label decls.
1211      As we likely do not properly track code aliases conservatively
1212      bail out.  */
1213   if (TREE_CODE (base1) == FUNCTION_DECL
1214       || TREE_CODE (base1) == LABEL_DECL
1215       || TREE_CODE (base2) == FUNCTION_DECL
1216       || TREE_CODE (base2) == LABEL_DECL)
1217     return true;
1218 
1219   /* Two volatile accesses always conflict.  */
1220   if (ref1->volatile_p
1221       && ref2->volatile_p)
1222     return true;
1223 
1224   /* Defer to simple offset based disambiguation if we have
1225      references based on two decls.  Do this before defering to
1226      TBAA to handle must-alias cases in conformance with the
1227      GCC extension of allowing type-punning through unions.  */
1228   var1_p = DECL_P (base1);
1229   var2_p = DECL_P (base2);
1230   if (var1_p && var2_p)
1231     return decl_refs_may_alias_p (ref1->ref, base1, offset1, max_size1,
1232 				  ref2->ref, base2, offset2, max_size2);
1233 
1234   ind1_p = (TREE_CODE (base1) == MEM_REF
1235 	    || TREE_CODE (base1) == TARGET_MEM_REF);
1236   ind2_p = (TREE_CODE (base2) == MEM_REF
1237 	    || TREE_CODE (base2) == TARGET_MEM_REF);
1238 
1239   /* Canonicalize the pointer-vs-decl case.  */
1240   if (ind1_p && var2_p)
1241     {
1242       HOST_WIDE_INT tmp1;
1243       tree tmp2;
1244       ao_ref *tmp3;
1245       tmp1 = offset1; offset1 = offset2; offset2 = tmp1;
1246       tmp1 = max_size1; max_size1 = max_size2; max_size2 = tmp1;
1247       tmp2 = base1; base1 = base2; base2 = tmp2;
1248       tmp3 = ref1; ref1 = ref2; ref2 = tmp3;
1249       var1_p = true;
1250       ind1_p = false;
1251       var2_p = false;
1252       ind2_p = true;
1253     }
1254 
1255   /* First defer to TBAA if possible.  */
1256   if (tbaa_p
1257       && flag_strict_aliasing
1258       && !alias_sets_conflict_p (ao_ref_alias_set (ref1),
1259 				 ao_ref_alias_set (ref2)))
1260     return false;
1261 
1262   /* Dispatch to the pointer-vs-decl or pointer-vs-pointer disambiguators.  */
1263   if (var1_p && ind2_p)
1264     return indirect_ref_may_alias_decl_p (ref2->ref, base2,
1265 					  offset2, max_size2,
1266 					  ao_ref_alias_set (ref2), -1,
1267 					  ref1->ref, base1,
1268 					  offset1, max_size1,
1269 					  ao_ref_alias_set (ref1),
1270 					  ao_ref_base_alias_set (ref1),
1271 					  tbaa_p);
1272   else if (ind1_p && ind2_p)
1273     return indirect_refs_may_alias_p (ref1->ref, base1,
1274 				      offset1, max_size1,
1275 				      ao_ref_alias_set (ref1), -1,
1276 				      ref2->ref, base2,
1277 				      offset2, max_size2,
1278 				      ao_ref_alias_set (ref2), -1,
1279 				      tbaa_p);
1280 
1281   /* We really do not want to end up here, but returning true is safe.  */
1282 #ifdef ENABLE_CHECKING
1283   gcc_unreachable ();
1284 #else
1285   return true;
1286 #endif
1287 }
1288 
1289 bool
refs_may_alias_p(tree ref1,tree ref2)1290 refs_may_alias_p (tree ref1, tree ref2)
1291 {
1292   ao_ref r1, r2;
1293   bool res;
1294   ao_ref_init (&r1, ref1);
1295   ao_ref_init (&r2, ref2);
1296   res = refs_may_alias_p_1 (&r1, &r2, true);
1297   if (res)
1298     ++alias_stats.refs_may_alias_p_may_alias;
1299   else
1300     ++alias_stats.refs_may_alias_p_no_alias;
1301   return res;
1302 }
1303 
1304 /* Returns true if there is a anti-dependence for the STORE that
1305    executes after the LOAD.  */
1306 
1307 bool
refs_anti_dependent_p(tree load,tree store)1308 refs_anti_dependent_p (tree load, tree store)
1309 {
1310   ao_ref r1, r2;
1311   ao_ref_init (&r1, load);
1312   ao_ref_init (&r2, store);
1313   return refs_may_alias_p_1 (&r1, &r2, false);
1314 }
1315 
1316 /* Returns true if there is a output dependence for the stores
1317    STORE1 and STORE2.  */
1318 
1319 bool
refs_output_dependent_p(tree store1,tree store2)1320 refs_output_dependent_p (tree store1, tree store2)
1321 {
1322   ao_ref r1, r2;
1323   ao_ref_init (&r1, store1);
1324   ao_ref_init (&r2, store2);
1325   return refs_may_alias_p_1 (&r1, &r2, false);
1326 }
1327 
1328 /* If the call CALL may use the memory reference REF return true,
1329    otherwise return false.  */
1330 
1331 static bool
ref_maybe_used_by_call_p_1(gimple call,ao_ref * ref)1332 ref_maybe_used_by_call_p_1 (gimple call, ao_ref *ref)
1333 {
1334   tree base, callee;
1335   unsigned i;
1336   int flags = gimple_call_flags (call);
1337 
1338   /* Const functions without a static chain do not implicitly use memory.  */
1339   if (!gimple_call_chain (call)
1340       && (flags & (ECF_CONST|ECF_NOVOPS)))
1341     goto process_args;
1342 
1343   base = ao_ref_base (ref);
1344   if (!base)
1345     return true;
1346 
1347   /* A call that is not without side-effects might involve volatile
1348      accesses and thus conflicts with all other volatile accesses.  */
1349   if (ref->volatile_p)
1350     return true;
1351 
1352   /* If the reference is based on a decl that is not aliased the call
1353      cannot possibly use it.  */
1354   if (DECL_P (base)
1355       && !may_be_aliased (base)
1356       /* But local statics can be used through recursion.  */
1357       && !is_global_var (base))
1358     goto process_args;
1359 
1360   callee = gimple_call_fndecl (call);
1361 
1362   /* Handle those builtin functions explicitly that do not act as
1363      escape points.  See tree-ssa-structalias.c:find_func_aliases
1364      for the list of builtins we might need to handle here.  */
1365   if (callee != NULL_TREE
1366       && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL)
1367     switch (DECL_FUNCTION_CODE (callee))
1368       {
1369 	/* All the following functions read memory pointed to by
1370 	   their second argument.  strcat/strncat additionally
1371 	   reads memory pointed to by the first argument.  */
1372 	case BUILT_IN_STRCAT:
1373 	case BUILT_IN_STRNCAT:
1374 	  {
1375 	    ao_ref dref;
1376 	    ao_ref_init_from_ptr_and_size (&dref,
1377 					   gimple_call_arg (call, 0),
1378 					   NULL_TREE);
1379 	    if (refs_may_alias_p_1 (&dref, ref, false))
1380 	      return true;
1381 	  }
1382 	  /* FALLTHRU */
1383 	case BUILT_IN_STRCPY:
1384 	case BUILT_IN_STRNCPY:
1385 	case BUILT_IN_MEMCPY:
1386 	case BUILT_IN_MEMMOVE:
1387 	case BUILT_IN_MEMPCPY:
1388 	case BUILT_IN_STPCPY:
1389 	case BUILT_IN_STPNCPY:
1390 	case BUILT_IN_TM_MEMCPY:
1391 	case BUILT_IN_TM_MEMMOVE:
1392 	  {
1393 	    ao_ref dref;
1394 	    tree size = NULL_TREE;
1395 	    if (gimple_call_num_args (call) == 3)
1396 	      size = gimple_call_arg (call, 2);
1397 	    ao_ref_init_from_ptr_and_size (&dref,
1398 					   gimple_call_arg (call, 1),
1399 					   size);
1400 	    return refs_may_alias_p_1 (&dref, ref, false);
1401 	  }
1402 	case BUILT_IN_STRCAT_CHK:
1403 	case BUILT_IN_STRNCAT_CHK:
1404 	  {
1405 	    ao_ref dref;
1406 	    ao_ref_init_from_ptr_and_size (&dref,
1407 					   gimple_call_arg (call, 0),
1408 					   NULL_TREE);
1409 	    if (refs_may_alias_p_1 (&dref, ref, false))
1410 	      return true;
1411 	  }
1412 	  /* FALLTHRU */
1413 	case BUILT_IN_STRCPY_CHK:
1414 	case BUILT_IN_STRNCPY_CHK:
1415 	case BUILT_IN_MEMCPY_CHK:
1416 	case BUILT_IN_MEMMOVE_CHK:
1417 	case BUILT_IN_MEMPCPY_CHK:
1418 	case BUILT_IN_STPCPY_CHK:
1419 	case BUILT_IN_STPNCPY_CHK:
1420 	  {
1421 	    ao_ref dref;
1422 	    tree size = NULL_TREE;
1423 	    if (gimple_call_num_args (call) == 4)
1424 	      size = gimple_call_arg (call, 2);
1425 	    ao_ref_init_from_ptr_and_size (&dref,
1426 					   gimple_call_arg (call, 1),
1427 					   size);
1428 	    return refs_may_alias_p_1 (&dref, ref, false);
1429 	  }
1430 	case BUILT_IN_BCOPY:
1431 	  {
1432 	    ao_ref dref;
1433 	    tree size = gimple_call_arg (call, 2);
1434 	    ao_ref_init_from_ptr_and_size (&dref,
1435 					   gimple_call_arg (call, 0),
1436 					   size);
1437 	    return refs_may_alias_p_1 (&dref, ref, false);
1438 	  }
1439 
1440 	/* The following functions read memory pointed to by their
1441 	   first argument.  */
1442 	CASE_BUILT_IN_TM_LOAD (1):
1443 	CASE_BUILT_IN_TM_LOAD (2):
1444 	CASE_BUILT_IN_TM_LOAD (4):
1445 	CASE_BUILT_IN_TM_LOAD (8):
1446 	CASE_BUILT_IN_TM_LOAD (FLOAT):
1447 	CASE_BUILT_IN_TM_LOAD (DOUBLE):
1448 	CASE_BUILT_IN_TM_LOAD (LDOUBLE):
1449 	CASE_BUILT_IN_TM_LOAD (M64):
1450 	CASE_BUILT_IN_TM_LOAD (M128):
1451 	CASE_BUILT_IN_TM_LOAD (M256):
1452 	case BUILT_IN_TM_LOG:
1453 	case BUILT_IN_TM_LOG_1:
1454 	case BUILT_IN_TM_LOG_2:
1455 	case BUILT_IN_TM_LOG_4:
1456 	case BUILT_IN_TM_LOG_8:
1457 	case BUILT_IN_TM_LOG_FLOAT:
1458 	case BUILT_IN_TM_LOG_DOUBLE:
1459 	case BUILT_IN_TM_LOG_LDOUBLE:
1460 	case BUILT_IN_TM_LOG_M64:
1461 	case BUILT_IN_TM_LOG_M128:
1462 	case BUILT_IN_TM_LOG_M256:
1463 	  return ptr_deref_may_alias_ref_p_1 (gimple_call_arg (call, 0), ref);
1464 
1465 	/* These read memory pointed to by the first argument.  */
1466 	case BUILT_IN_STRDUP:
1467 	case BUILT_IN_STRNDUP:
1468 	  {
1469 	    ao_ref dref;
1470 	    tree size = NULL_TREE;
1471 	    if (gimple_call_num_args (call) == 2)
1472 	      size = gimple_call_arg (call, 1);
1473 	    ao_ref_init_from_ptr_and_size (&dref,
1474 					   gimple_call_arg (call, 0),
1475 					   size);
1476 	    return refs_may_alias_p_1 (&dref, ref, false);
1477 	  }
1478 	/* These read memory pointed to by the first argument.  */
1479 	case BUILT_IN_INDEX:
1480 	case BUILT_IN_STRCHR:
1481 	case BUILT_IN_STRRCHR:
1482 	  {
1483 	    ao_ref dref;
1484 	    ao_ref_init_from_ptr_and_size (&dref,
1485 					   gimple_call_arg (call, 0),
1486 					   NULL_TREE);
1487 	    return refs_may_alias_p_1 (&dref, ref, false);
1488 	  }
1489 	/* These read memory pointed to by the first argument with size
1490 	   in the third argument.  */
1491 	case BUILT_IN_MEMCHR:
1492 	  {
1493 	    ao_ref dref;
1494 	    ao_ref_init_from_ptr_and_size (&dref,
1495 					   gimple_call_arg (call, 0),
1496 					   gimple_call_arg (call, 2));
1497 	    return refs_may_alias_p_1 (&dref, ref, false);
1498 	  }
1499 	/* These read memory pointed to by the first and second arguments.  */
1500 	case BUILT_IN_STRSTR:
1501 	case BUILT_IN_STRPBRK:
1502 	  {
1503 	    ao_ref dref;
1504 	    ao_ref_init_from_ptr_and_size (&dref,
1505 					   gimple_call_arg (call, 0),
1506 					   NULL_TREE);
1507 	    if (refs_may_alias_p_1 (&dref, ref, false))
1508 	      return true;
1509 	    ao_ref_init_from_ptr_and_size (&dref,
1510 					   gimple_call_arg (call, 1),
1511 					   NULL_TREE);
1512 	    return refs_may_alias_p_1 (&dref, ref, false);
1513 	  }
1514 
1515 	/* The following builtins do not read from memory.  */
1516 	case BUILT_IN_FREE:
1517 	case BUILT_IN_MALLOC:
1518 	case BUILT_IN_POSIX_MEMALIGN:
1519 	case BUILT_IN_CALLOC:
1520 	case BUILT_IN_ALLOCA:
1521 	case BUILT_IN_ALLOCA_WITH_ALIGN:
1522 	case BUILT_IN_STACK_SAVE:
1523 	case BUILT_IN_STACK_RESTORE:
1524 	case BUILT_IN_MEMSET:
1525 	case BUILT_IN_TM_MEMSET:
1526 	case BUILT_IN_MEMSET_CHK:
1527 	case BUILT_IN_FREXP:
1528 	case BUILT_IN_FREXPF:
1529 	case BUILT_IN_FREXPL:
1530 	case BUILT_IN_GAMMA_R:
1531 	case BUILT_IN_GAMMAF_R:
1532 	case BUILT_IN_GAMMAL_R:
1533 	case BUILT_IN_LGAMMA_R:
1534 	case BUILT_IN_LGAMMAF_R:
1535 	case BUILT_IN_LGAMMAL_R:
1536 	case BUILT_IN_MODF:
1537 	case BUILT_IN_MODFF:
1538 	case BUILT_IN_MODFL:
1539 	case BUILT_IN_REMQUO:
1540 	case BUILT_IN_REMQUOF:
1541 	case BUILT_IN_REMQUOL:
1542 	case BUILT_IN_SINCOS:
1543 	case BUILT_IN_SINCOSF:
1544 	case BUILT_IN_SINCOSL:
1545 	case BUILT_IN_ASSUME_ALIGNED:
1546 	case BUILT_IN_VA_END:
1547 	  return false;
1548 	/* __sync_* builtins and some OpenMP builtins act as threading
1549 	   barriers.  */
1550 #undef DEF_SYNC_BUILTIN
1551 #define DEF_SYNC_BUILTIN(ENUM, NAME, TYPE, ATTRS) case ENUM:
1552 #include "sync-builtins.def"
1553 #undef DEF_SYNC_BUILTIN
1554 	case BUILT_IN_GOMP_ATOMIC_START:
1555 	case BUILT_IN_GOMP_ATOMIC_END:
1556 	case BUILT_IN_GOMP_BARRIER:
1557 	case BUILT_IN_GOMP_BARRIER_CANCEL:
1558 	case BUILT_IN_GOMP_TASKWAIT:
1559 	case BUILT_IN_GOMP_TASKGROUP_END:
1560 	case BUILT_IN_GOMP_CRITICAL_START:
1561 	case BUILT_IN_GOMP_CRITICAL_END:
1562 	case BUILT_IN_GOMP_CRITICAL_NAME_START:
1563 	case BUILT_IN_GOMP_CRITICAL_NAME_END:
1564 	case BUILT_IN_GOMP_LOOP_END:
1565 	case BUILT_IN_GOMP_LOOP_END_CANCEL:
1566 	case BUILT_IN_GOMP_ORDERED_START:
1567 	case BUILT_IN_GOMP_ORDERED_END:
1568 	case BUILT_IN_GOMP_SECTIONS_END:
1569 	case BUILT_IN_GOMP_SECTIONS_END_CANCEL:
1570 	case BUILT_IN_GOMP_SINGLE_COPY_START:
1571 	case BUILT_IN_GOMP_SINGLE_COPY_END:
1572 	  return true;
1573 
1574 	default:
1575 	  /* Fallthru to general call handling.  */;
1576       }
1577 
1578   /* Check if base is a global static variable that is not read
1579      by the function.  */
1580   if (callee != NULL_TREE
1581       && TREE_CODE (base) == VAR_DECL
1582       && TREE_STATIC (base))
1583     {
1584       struct cgraph_node *node = cgraph_get_node (callee);
1585       bitmap not_read;
1586 
1587       /* FIXME: Callee can be an OMP builtin that does not have a call graph
1588 	 node yet.  We should enforce that there are nodes for all decls in the
1589 	 IL and remove this check instead.  */
1590       if (node
1591 	  && (not_read = ipa_reference_get_not_read_global (node))
1592 	  && bitmap_bit_p (not_read, DECL_UID (base)))
1593 	goto process_args;
1594     }
1595 
1596   /* Check if the base variable is call-used.  */
1597   if (DECL_P (base))
1598     {
1599       if (pt_solution_includes (gimple_call_use_set (call), base))
1600 	return true;
1601     }
1602   else if ((TREE_CODE (base) == MEM_REF
1603 	    || TREE_CODE (base) == TARGET_MEM_REF)
1604 	   && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
1605     {
1606       struct ptr_info_def *pi = SSA_NAME_PTR_INFO (TREE_OPERAND (base, 0));
1607       if (!pi)
1608 	return true;
1609 
1610       if (pt_solutions_intersect (gimple_call_use_set (call), &pi->pt))
1611 	return true;
1612     }
1613   else
1614     return true;
1615 
1616   /* Inspect call arguments for passed-by-value aliases.  */
1617 process_args:
1618   for (i = 0; i < gimple_call_num_args (call); ++i)
1619     {
1620       tree op = gimple_call_arg (call, i);
1621       int flags = gimple_call_arg_flags (call, i);
1622 
1623       if (flags & EAF_UNUSED)
1624 	continue;
1625 
1626       if (TREE_CODE (op) == WITH_SIZE_EXPR)
1627 	op = TREE_OPERAND (op, 0);
1628 
1629       if (TREE_CODE (op) != SSA_NAME
1630 	  && !is_gimple_min_invariant (op))
1631 	{
1632 	  ao_ref r;
1633 	  ao_ref_init (&r, op);
1634 	  if (refs_may_alias_p_1 (&r, ref, true))
1635 	    return true;
1636 	}
1637     }
1638 
1639   return false;
1640 }
1641 
1642 static bool
ref_maybe_used_by_call_p(gimple call,tree ref)1643 ref_maybe_used_by_call_p (gimple call, tree ref)
1644 {
1645   ao_ref r;
1646   bool res;
1647   ao_ref_init (&r, ref);
1648   res = ref_maybe_used_by_call_p_1 (call, &r);
1649   if (res)
1650     ++alias_stats.ref_maybe_used_by_call_p_may_alias;
1651   else
1652     ++alias_stats.ref_maybe_used_by_call_p_no_alias;
1653   return res;
1654 }
1655 
1656 
1657 /* If the statement STMT may use the memory reference REF return
1658    true, otherwise return false.  */
1659 
1660 bool
ref_maybe_used_by_stmt_p(gimple stmt,tree ref)1661 ref_maybe_used_by_stmt_p (gimple stmt, tree ref)
1662 {
1663   if (is_gimple_assign (stmt))
1664     {
1665       tree rhs;
1666 
1667       /* All memory assign statements are single.  */
1668       if (!gimple_assign_single_p (stmt))
1669 	return false;
1670 
1671       rhs = gimple_assign_rhs1 (stmt);
1672       if (is_gimple_reg (rhs)
1673 	  || is_gimple_min_invariant (rhs)
1674 	  || gimple_assign_rhs_code (stmt) == CONSTRUCTOR)
1675 	return false;
1676 
1677       return refs_may_alias_p (rhs, ref);
1678     }
1679   else if (is_gimple_call (stmt))
1680     return ref_maybe_used_by_call_p (stmt, ref);
1681   else if (gimple_code (stmt) == GIMPLE_RETURN)
1682     {
1683       tree retval = gimple_return_retval (stmt);
1684       tree base;
1685       if (retval
1686 	  && TREE_CODE (retval) != SSA_NAME
1687 	  && !is_gimple_min_invariant (retval)
1688 	  && refs_may_alias_p (retval, ref))
1689 	return true;
1690       /* If ref escapes the function then the return acts as a use.  */
1691       base = get_base_address (ref);
1692       if (!base)
1693 	;
1694       else if (DECL_P (base))
1695 	return is_global_var (base);
1696       else if (TREE_CODE (base) == MEM_REF
1697 	       || TREE_CODE (base) == TARGET_MEM_REF)
1698 	return ptr_deref_may_alias_global_p (TREE_OPERAND (base, 0));
1699       return false;
1700     }
1701 
1702   return true;
1703 }
1704 
1705 /* If the call in statement CALL may clobber the memory reference REF
1706    return true, otherwise return false.  */
1707 
1708 static bool
call_may_clobber_ref_p_1(gimple call,ao_ref * ref)1709 call_may_clobber_ref_p_1 (gimple call, ao_ref *ref)
1710 {
1711   tree base;
1712   tree callee;
1713 
1714   /* If the call is pure or const it cannot clobber anything.  */
1715   if (gimple_call_flags (call)
1716       & (ECF_PURE|ECF_CONST|ECF_LOOPING_CONST_OR_PURE|ECF_NOVOPS))
1717     return false;
1718 
1719   base = ao_ref_base (ref);
1720   if (!base)
1721     return true;
1722 
1723   if (TREE_CODE (base) == SSA_NAME
1724       || CONSTANT_CLASS_P (base))
1725     return false;
1726 
1727   /* A call that is not without side-effects might involve volatile
1728      accesses and thus conflicts with all other volatile accesses.  */
1729   if (ref->volatile_p)
1730     return true;
1731 
1732   /* If the reference is based on a decl that is not aliased the call
1733      cannot possibly clobber it.  */
1734   if (DECL_P (base)
1735       && !may_be_aliased (base)
1736       /* But local non-readonly statics can be modified through recursion
1737          or the call may implement a threading barrier which we must
1738 	 treat as may-def.  */
1739       && (TREE_READONLY (base)
1740 	  || !is_global_var (base)))
1741     return false;
1742 
1743   callee = gimple_call_fndecl (call);
1744 
1745   /* Handle those builtin functions explicitly that do not act as
1746      escape points.  See tree-ssa-structalias.c:find_func_aliases
1747      for the list of builtins we might need to handle here.  */
1748   if (callee != NULL_TREE
1749       && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL)
1750     switch (DECL_FUNCTION_CODE (callee))
1751       {
1752 	/* All the following functions clobber memory pointed to by
1753 	   their first argument.  */
1754 	case BUILT_IN_STRCPY:
1755 	case BUILT_IN_STRNCPY:
1756 	case BUILT_IN_MEMCPY:
1757 	case BUILT_IN_MEMMOVE:
1758 	case BUILT_IN_MEMPCPY:
1759 	case BUILT_IN_STPCPY:
1760 	case BUILT_IN_STPNCPY:
1761 	case BUILT_IN_STRCAT:
1762 	case BUILT_IN_STRNCAT:
1763 	case BUILT_IN_MEMSET:
1764 	case BUILT_IN_TM_MEMSET:
1765 	CASE_BUILT_IN_TM_STORE (1):
1766 	CASE_BUILT_IN_TM_STORE (2):
1767 	CASE_BUILT_IN_TM_STORE (4):
1768 	CASE_BUILT_IN_TM_STORE (8):
1769 	CASE_BUILT_IN_TM_STORE (FLOAT):
1770 	CASE_BUILT_IN_TM_STORE (DOUBLE):
1771 	CASE_BUILT_IN_TM_STORE (LDOUBLE):
1772 	CASE_BUILT_IN_TM_STORE (M64):
1773 	CASE_BUILT_IN_TM_STORE (M128):
1774 	CASE_BUILT_IN_TM_STORE (M256):
1775 	case BUILT_IN_TM_MEMCPY:
1776 	case BUILT_IN_TM_MEMMOVE:
1777 	  {
1778 	    ao_ref dref;
1779 	    tree size = NULL_TREE;
1780 	    /* Don't pass in size for strncat, as the maximum size
1781 	       is strlen (dest) + n + 1 instead of n, resp.
1782 	       n + 1 at dest + strlen (dest), but strlen (dest) isn't
1783 	       known.  */
1784 	    if (gimple_call_num_args (call) == 3
1785 		&& DECL_FUNCTION_CODE (callee) != BUILT_IN_STRNCAT)
1786 	      size = gimple_call_arg (call, 2);
1787 	    ao_ref_init_from_ptr_and_size (&dref,
1788 					   gimple_call_arg (call, 0),
1789 					   size);
1790 	    return refs_may_alias_p_1 (&dref, ref, false);
1791 	  }
1792 	case BUILT_IN_STRCPY_CHK:
1793 	case BUILT_IN_STRNCPY_CHK:
1794 	case BUILT_IN_MEMCPY_CHK:
1795 	case BUILT_IN_MEMMOVE_CHK:
1796 	case BUILT_IN_MEMPCPY_CHK:
1797 	case BUILT_IN_STPCPY_CHK:
1798 	case BUILT_IN_STPNCPY_CHK:
1799 	case BUILT_IN_STRCAT_CHK:
1800 	case BUILT_IN_STRNCAT_CHK:
1801 	case BUILT_IN_MEMSET_CHK:
1802 	  {
1803 	    ao_ref dref;
1804 	    tree size = NULL_TREE;
1805 	    /* Don't pass in size for __strncat_chk, as the maximum size
1806 	       is strlen (dest) + n + 1 instead of n, resp.
1807 	       n + 1 at dest + strlen (dest), but strlen (dest) isn't
1808 	       known.  */
1809 	    if (gimple_call_num_args (call) == 4
1810 		&& DECL_FUNCTION_CODE (callee) != BUILT_IN_STRNCAT_CHK)
1811 	      size = gimple_call_arg (call, 2);
1812 	    ao_ref_init_from_ptr_and_size (&dref,
1813 					   gimple_call_arg (call, 0),
1814 					   size);
1815 	    return refs_may_alias_p_1 (&dref, ref, false);
1816 	  }
1817 	case BUILT_IN_BCOPY:
1818 	  {
1819 	    ao_ref dref;
1820 	    tree size = gimple_call_arg (call, 2);
1821 	    ao_ref_init_from_ptr_and_size (&dref,
1822 					   gimple_call_arg (call, 1),
1823 					   size);
1824 	    return refs_may_alias_p_1 (&dref, ref, false);
1825 	  }
1826 	/* Allocating memory does not have any side-effects apart from
1827 	   being the definition point for the pointer.  */
1828 	case BUILT_IN_MALLOC:
1829 	case BUILT_IN_CALLOC:
1830 	case BUILT_IN_STRDUP:
1831 	case BUILT_IN_STRNDUP:
1832 	  /* Unix98 specifies that errno is set on allocation failure.  */
1833 	  if (flag_errno_math
1834 	      && targetm.ref_may_alias_errno (ref))
1835 	    return true;
1836 	  return false;
1837 	case BUILT_IN_STACK_SAVE:
1838 	case BUILT_IN_ALLOCA:
1839 	case BUILT_IN_ALLOCA_WITH_ALIGN:
1840 	case BUILT_IN_ASSUME_ALIGNED:
1841 	  return false;
1842 	/* But posix_memalign stores a pointer into the memory pointed to
1843 	   by its first argument.  */
1844 	case BUILT_IN_POSIX_MEMALIGN:
1845 	  {
1846 	    tree ptrptr = gimple_call_arg (call, 0);
1847 	    ao_ref dref;
1848 	    ao_ref_init_from_ptr_and_size (&dref, ptrptr,
1849 					   TYPE_SIZE_UNIT (ptr_type_node));
1850 	    return (refs_may_alias_p_1 (&dref, ref, false)
1851 		    || (flag_errno_math
1852 			&& targetm.ref_may_alias_errno (ref)));
1853 	  }
1854 	/* Freeing memory kills the pointed-to memory.  More importantly
1855 	   the call has to serve as a barrier for moving loads and stores
1856 	   across it.  */
1857 	case BUILT_IN_FREE:
1858 	case BUILT_IN_VA_END:
1859 	  {
1860 	    tree ptr = gimple_call_arg (call, 0);
1861 	    return ptr_deref_may_alias_ref_p_1 (ptr, ref);
1862 	  }
1863 	case BUILT_IN_GAMMA_R:
1864 	case BUILT_IN_GAMMAF_R:
1865 	case BUILT_IN_GAMMAL_R:
1866 	case BUILT_IN_LGAMMA_R:
1867 	case BUILT_IN_LGAMMAF_R:
1868 	case BUILT_IN_LGAMMAL_R:
1869 	  {
1870 	    tree out = gimple_call_arg (call, 1);
1871 	    if (ptr_deref_may_alias_ref_p_1 (out, ref))
1872 	      return true;
1873 	    if (flag_errno_math)
1874 	      break;
1875 	    return false;
1876 	  }
1877 	case BUILT_IN_FREXP:
1878 	case BUILT_IN_FREXPF:
1879 	case BUILT_IN_FREXPL:
1880 	case BUILT_IN_MODF:
1881 	case BUILT_IN_MODFF:
1882 	case BUILT_IN_MODFL:
1883 	  {
1884 	    tree out = gimple_call_arg (call, 1);
1885 	    return ptr_deref_may_alias_ref_p_1 (out, ref);
1886 	  }
1887 	case BUILT_IN_REMQUO:
1888 	case BUILT_IN_REMQUOF:
1889 	case BUILT_IN_REMQUOL:
1890 	  {
1891 	    tree out = gimple_call_arg (call, 2);
1892 	    if (ptr_deref_may_alias_ref_p_1 (out, ref))
1893 	      return true;
1894 	    if (flag_errno_math)
1895 	      break;
1896 	    return false;
1897 	  }
1898 	case BUILT_IN_SINCOS:
1899 	case BUILT_IN_SINCOSF:
1900 	case BUILT_IN_SINCOSL:
1901 	  {
1902 	    tree sin = gimple_call_arg (call, 1);
1903 	    tree cos = gimple_call_arg (call, 2);
1904 	    return (ptr_deref_may_alias_ref_p_1 (sin, ref)
1905 		    || ptr_deref_may_alias_ref_p_1 (cos, ref));
1906 	  }
1907 	/* __sync_* builtins and some OpenMP builtins act as threading
1908 	   barriers.  */
1909 #undef DEF_SYNC_BUILTIN
1910 #define DEF_SYNC_BUILTIN(ENUM, NAME, TYPE, ATTRS) case ENUM:
1911 #include "sync-builtins.def"
1912 #undef DEF_SYNC_BUILTIN
1913 	case BUILT_IN_GOMP_ATOMIC_START:
1914 	case BUILT_IN_GOMP_ATOMIC_END:
1915 	case BUILT_IN_GOMP_BARRIER:
1916 	case BUILT_IN_GOMP_BARRIER_CANCEL:
1917 	case BUILT_IN_GOMP_TASKWAIT:
1918 	case BUILT_IN_GOMP_TASKGROUP_END:
1919 	case BUILT_IN_GOMP_CRITICAL_START:
1920 	case BUILT_IN_GOMP_CRITICAL_END:
1921 	case BUILT_IN_GOMP_CRITICAL_NAME_START:
1922 	case BUILT_IN_GOMP_CRITICAL_NAME_END:
1923 	case BUILT_IN_GOMP_LOOP_END:
1924 	case BUILT_IN_GOMP_LOOP_END_CANCEL:
1925 	case BUILT_IN_GOMP_ORDERED_START:
1926 	case BUILT_IN_GOMP_ORDERED_END:
1927 	case BUILT_IN_GOMP_SECTIONS_END:
1928 	case BUILT_IN_GOMP_SECTIONS_END_CANCEL:
1929 	case BUILT_IN_GOMP_SINGLE_COPY_START:
1930 	case BUILT_IN_GOMP_SINGLE_COPY_END:
1931 	  return true;
1932 	default:
1933 	  /* Fallthru to general call handling.  */;
1934       }
1935 
1936   /* Check if base is a global static variable that is not written
1937      by the function.  */
1938   if (callee != NULL_TREE
1939       && TREE_CODE (base) == VAR_DECL
1940       && TREE_STATIC (base))
1941     {
1942       struct cgraph_node *node = cgraph_get_node (callee);
1943       bitmap not_written;
1944 
1945       if (node
1946 	  && (not_written = ipa_reference_get_not_written_global (node))
1947 	  && bitmap_bit_p (not_written, DECL_UID (base)))
1948 	return false;
1949     }
1950 
1951   /* Check if the base variable is call-clobbered.  */
1952   if (DECL_P (base))
1953     return pt_solution_includes (gimple_call_clobber_set (call), base);
1954   else if ((TREE_CODE (base) == MEM_REF
1955 	    || TREE_CODE (base) == TARGET_MEM_REF)
1956 	   && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
1957     {
1958       struct ptr_info_def *pi = SSA_NAME_PTR_INFO (TREE_OPERAND (base, 0));
1959       if (!pi)
1960 	return true;
1961 
1962       return pt_solutions_intersect (gimple_call_clobber_set (call), &pi->pt);
1963     }
1964 
1965   return true;
1966 }
1967 
1968 /* If the call in statement CALL may clobber the memory reference REF
1969    return true, otherwise return false.  */
1970 
1971 bool
call_may_clobber_ref_p(gimple call,tree ref)1972 call_may_clobber_ref_p (gimple call, tree ref)
1973 {
1974   bool res;
1975   ao_ref r;
1976   ao_ref_init (&r, ref);
1977   res = call_may_clobber_ref_p_1 (call, &r);
1978   if (res)
1979     ++alias_stats.call_may_clobber_ref_p_may_alias;
1980   else
1981     ++alias_stats.call_may_clobber_ref_p_no_alias;
1982   return res;
1983 }
1984 
1985 
1986 /* If the statement STMT may clobber the memory reference REF return true,
1987    otherwise return false.  */
1988 
1989 bool
stmt_may_clobber_ref_p_1(gimple stmt,ao_ref * ref)1990 stmt_may_clobber_ref_p_1 (gimple stmt, ao_ref *ref)
1991 {
1992   if (is_gimple_call (stmt))
1993     {
1994       tree lhs = gimple_call_lhs (stmt);
1995       if (lhs
1996 	  && TREE_CODE (lhs) != SSA_NAME)
1997 	{
1998 	  ao_ref r;
1999 	  ao_ref_init (&r, lhs);
2000 	  if (refs_may_alias_p_1 (ref, &r, true))
2001 	    return true;
2002 	}
2003 
2004       return call_may_clobber_ref_p_1 (stmt, ref);
2005     }
2006   else if (gimple_assign_single_p (stmt))
2007     {
2008       tree lhs = gimple_assign_lhs (stmt);
2009       if (TREE_CODE (lhs) != SSA_NAME)
2010 	{
2011 	  ao_ref r;
2012 	  ao_ref_init (&r, lhs);
2013 	  return refs_may_alias_p_1 (ref, &r, true);
2014 	}
2015     }
2016   else if (gimple_code (stmt) == GIMPLE_ASM)
2017     return true;
2018 
2019   return false;
2020 }
2021 
2022 bool
stmt_may_clobber_ref_p(gimple stmt,tree ref)2023 stmt_may_clobber_ref_p (gimple stmt, tree ref)
2024 {
2025   ao_ref r;
2026   ao_ref_init (&r, ref);
2027   return stmt_may_clobber_ref_p_1 (stmt, &r);
2028 }
2029 
2030 /* If STMT kills the memory reference REF return true, otherwise
2031    return false.  */
2032 
2033 static bool
stmt_kills_ref_p_1(gimple stmt,ao_ref * ref)2034 stmt_kills_ref_p_1 (gimple stmt, ao_ref *ref)
2035 {
2036   /* For a must-alias check we need to be able to constrain
2037      the access properly.
2038      FIXME: except for BUILTIN_FREE.  */
2039   if (!ao_ref_base (ref)
2040       || ref->max_size == -1)
2041     return false;
2042 
2043   if (gimple_has_lhs (stmt)
2044       && TREE_CODE (gimple_get_lhs (stmt)) != SSA_NAME
2045       /* The assignment is not necessarily carried out if it can throw
2046 	 and we can catch it in the current function where we could inspect
2047 	 the previous value.
2048 	 ???  We only need to care about the RHS throwing.  For aggregate
2049 	 assignments or similar calls and non-call exceptions the LHS
2050 	 might throw as well.  */
2051       && !stmt_can_throw_internal (stmt))
2052     {
2053       tree base, lhs = gimple_get_lhs (stmt);
2054       HOST_WIDE_INT size, offset, max_size, ref_offset = ref->offset;
2055       base = get_ref_base_and_extent (lhs, &offset, &size, &max_size);
2056       /* We can get MEM[symbol: sZ, index: D.8862_1] here,
2057 	 so base == ref->base does not always hold.  */
2058       if (base != ref->base)
2059 	{
2060 	  /* If both base and ref->base are MEM_REFs, only compare the
2061 	     first operand, and if the second operand isn't equal constant,
2062 	     try to add the offsets into offset and ref_offset.  */
2063 	  if (TREE_CODE (base) == MEM_REF && TREE_CODE (ref->base) == MEM_REF
2064 	      && TREE_OPERAND (base, 0) == TREE_OPERAND (ref->base, 0))
2065 	    {
2066 	      if (!tree_int_cst_equal (TREE_OPERAND (base, 1),
2067 				       TREE_OPERAND (ref->base, 1)))
2068 		{
2069 		  double_int off1 = mem_ref_offset (base);
2070 		  off1 = off1.lshift (BITS_PER_UNIT == 8
2071 				      ? 3 : exact_log2 (BITS_PER_UNIT));
2072 		  off1 = off1 + double_int::from_shwi (offset);
2073 		  double_int off2 = mem_ref_offset (ref->base);
2074 		  off2 = off2.lshift (BITS_PER_UNIT == 8
2075 				      ? 3 : exact_log2 (BITS_PER_UNIT));
2076 		  off2 = off2 + double_int::from_shwi (ref_offset);
2077 		  if (off1.fits_shwi () && off2.fits_shwi ())
2078 		    {
2079 		      offset = off1.to_shwi ();
2080 		      ref_offset = off2.to_shwi ();
2081 		    }
2082 		  else
2083 		    size = -1;
2084 		}
2085 	    }
2086 	  else
2087 	    size = -1;
2088 	}
2089       /* For a must-alias check we need to be able to constrain
2090 	 the access properly.  */
2091       if (size != -1 && size == max_size)
2092 	{
2093 	  if (offset <= ref_offset
2094 	      && offset + size >= ref_offset + ref->max_size)
2095 	    return true;
2096 	}
2097     }
2098 
2099   if (is_gimple_call (stmt))
2100     {
2101       tree callee = gimple_call_fndecl (stmt);
2102       if (callee != NULL_TREE
2103 	  && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL)
2104 	switch (DECL_FUNCTION_CODE (callee))
2105 	  {
2106 	  case BUILT_IN_FREE:
2107 	    {
2108 	      tree ptr = gimple_call_arg (stmt, 0);
2109 	      tree base = ao_ref_base (ref);
2110 	      if (base && TREE_CODE (base) == MEM_REF
2111 		  && TREE_OPERAND (base, 0) == ptr)
2112 		return true;
2113 	      break;
2114 	    }
2115 
2116 	  case BUILT_IN_MEMCPY:
2117 	  case BUILT_IN_MEMPCPY:
2118 	  case BUILT_IN_MEMMOVE:
2119 	  case BUILT_IN_MEMSET:
2120 	  case BUILT_IN_MEMCPY_CHK:
2121 	  case BUILT_IN_MEMPCPY_CHK:
2122 	  case BUILT_IN_MEMMOVE_CHK:
2123 	  case BUILT_IN_MEMSET_CHK:
2124 	    {
2125 	      tree dest = gimple_call_arg (stmt, 0);
2126 	      tree len = gimple_call_arg (stmt, 2);
2127 	      if (!tree_fits_shwi_p (len))
2128 		return false;
2129 	      tree rbase = ref->base;
2130 	      double_int roffset = double_int::from_shwi (ref->offset);
2131 	      ao_ref dref;
2132 	      ao_ref_init_from_ptr_and_size (&dref, dest, len);
2133 	      tree base = ao_ref_base (&dref);
2134 	      double_int offset = double_int::from_shwi (dref.offset);
2135 	      double_int bpu = double_int::from_uhwi (BITS_PER_UNIT);
2136 	      if (!base || dref.size == -1)
2137 		return false;
2138 	      if (TREE_CODE (base) == MEM_REF)
2139 		{
2140 		  if (TREE_CODE (rbase) != MEM_REF)
2141 		    return false;
2142 		  // Compare pointers.
2143 		  offset += bpu * mem_ref_offset (base);
2144 		  roffset += bpu * mem_ref_offset (rbase);
2145 		  base = TREE_OPERAND (base, 0);
2146 		  rbase = TREE_OPERAND (rbase, 0);
2147 		}
2148 	      if (base == rbase)
2149 		{
2150 		  double_int size = bpu * tree_to_double_int (len);
2151 		  double_int rsize = double_int::from_uhwi (ref->max_size);
2152 		  if (offset.sle (roffset)
2153 		      && (roffset + rsize).sle (offset + size))
2154 		    return true;
2155 		}
2156 	      break;
2157 	    }
2158 
2159 	  case BUILT_IN_VA_END:
2160 	    {
2161 	      tree ptr = gimple_call_arg (stmt, 0);
2162 	      if (TREE_CODE (ptr) == ADDR_EXPR)
2163 		{
2164 		  tree base = ao_ref_base (ref);
2165 		  if (TREE_OPERAND (ptr, 0) == base)
2166 		    return true;
2167 		}
2168 	      break;
2169 	    }
2170 
2171 	  default:;
2172 	  }
2173     }
2174   return false;
2175 }
2176 
2177 bool
stmt_kills_ref_p(gimple stmt,tree ref)2178 stmt_kills_ref_p (gimple stmt, tree ref)
2179 {
2180   ao_ref r;
2181   ao_ref_init (&r, ref);
2182   return stmt_kills_ref_p_1 (stmt, &r);
2183 }
2184 
2185 
2186 /* Walk the virtual use-def chain of VUSE until hitting the virtual operand
2187    TARGET or a statement clobbering the memory reference REF in which
2188    case false is returned.  The walk starts with VUSE, one argument of PHI.  */
2189 
2190 static bool
maybe_skip_until(gimple phi,tree target,ao_ref * ref,tree vuse,unsigned int * cnt,bitmap * visited,bool abort_on_visited)2191 maybe_skip_until (gimple phi, tree target, ao_ref *ref,
2192 		  tree vuse, unsigned int *cnt, bitmap *visited,
2193 		  bool abort_on_visited)
2194 {
2195   basic_block bb = gimple_bb (phi);
2196 
2197   if (!*visited)
2198     *visited = BITMAP_ALLOC (NULL);
2199 
2200   bitmap_set_bit (*visited, SSA_NAME_VERSION (PHI_RESULT (phi)));
2201 
2202   /* Walk until we hit the target.  */
2203   while (vuse != target)
2204     {
2205       gimple def_stmt = SSA_NAME_DEF_STMT (vuse);
2206       /* Recurse for PHI nodes.  */
2207       if (gimple_code (def_stmt) == GIMPLE_PHI)
2208 	{
2209 	  /* An already visited PHI node ends the walk successfully.  */
2210 	  if (bitmap_bit_p (*visited, SSA_NAME_VERSION (PHI_RESULT (def_stmt))))
2211 	    return !abort_on_visited;
2212 	  vuse = get_continuation_for_phi (def_stmt, ref, cnt,
2213 					   visited, abort_on_visited);
2214 	  if (!vuse)
2215 	    return false;
2216 	  continue;
2217 	}
2218       else if (gimple_nop_p (def_stmt))
2219 	return false;
2220       else
2221 	{
2222 	  /* A clobbering statement or the end of the IL ends it failing.  */
2223 	  ++*cnt;
2224 	  if (stmt_may_clobber_ref_p_1 (def_stmt, ref))
2225 	    return false;
2226 	}
2227       /* If we reach a new basic-block see if we already skipped it
2228          in a previous walk that ended successfully.  */
2229       if (gimple_bb (def_stmt) != bb)
2230 	{
2231 	  if (!bitmap_set_bit (*visited, SSA_NAME_VERSION (vuse)))
2232 	    return !abort_on_visited;
2233 	  bb = gimple_bb (def_stmt);
2234 	}
2235       vuse = gimple_vuse (def_stmt);
2236     }
2237   return true;
2238 }
2239 
2240 /* For two PHI arguments ARG0 and ARG1 try to skip non-aliasing code
2241    until we hit the phi argument definition that dominates the other one.
2242    Return that, or NULL_TREE if there is no such definition.  */
2243 
2244 static tree
get_continuation_for_phi_1(gimple phi,tree arg0,tree arg1,ao_ref * ref,unsigned int * cnt,bitmap * visited,bool abort_on_visited)2245 get_continuation_for_phi_1 (gimple phi, tree arg0, tree arg1,
2246 			    ao_ref *ref, unsigned int *cnt,
2247 			    bitmap *visited, bool abort_on_visited)
2248 {
2249   gimple def0 = SSA_NAME_DEF_STMT (arg0);
2250   gimple def1 = SSA_NAME_DEF_STMT (arg1);
2251   tree common_vuse;
2252 
2253   if (arg0 == arg1)
2254     return arg0;
2255   else if (gimple_nop_p (def0)
2256 	   || (!gimple_nop_p (def1)
2257 	       && dominated_by_p (CDI_DOMINATORS,
2258 				  gimple_bb (def1), gimple_bb (def0))))
2259     {
2260       if (maybe_skip_until (phi, arg0, ref, arg1, cnt,
2261 			    visited, abort_on_visited))
2262 	return arg0;
2263     }
2264   else if (gimple_nop_p (def1)
2265 	   || dominated_by_p (CDI_DOMINATORS,
2266 			      gimple_bb (def0), gimple_bb (def1)))
2267     {
2268       if (maybe_skip_until (phi, arg1, ref, arg0, cnt,
2269 			    visited, abort_on_visited))
2270 	return arg1;
2271     }
2272   /* Special case of a diamond:
2273        MEM_1 = ...
2274        goto (cond) ? L1 : L2
2275        L1: store1 = ...    #MEM_2 = vuse(MEM_1)
2276 	   goto L3
2277        L2: store2 = ...    #MEM_3 = vuse(MEM_1)
2278        L3: MEM_4 = PHI<MEM_2, MEM_3>
2279      We were called with the PHI at L3, MEM_2 and MEM_3 don't
2280      dominate each other, but still we can easily skip this PHI node
2281      if we recognize that the vuse MEM operand is the same for both,
2282      and that we can skip both statements (they don't clobber us).
2283      This is still linear.  Don't use maybe_skip_until, that might
2284      potentially be slow.  */
2285   else if ((common_vuse = gimple_vuse (def0))
2286 	   && common_vuse == gimple_vuse (def1))
2287     {
2288       *cnt += 2;
2289       if (!stmt_may_clobber_ref_p_1 (def0, ref)
2290 	  && !stmt_may_clobber_ref_p_1 (def1, ref))
2291 	return common_vuse;
2292     }
2293 
2294   return NULL_TREE;
2295 }
2296 
2297 
2298 /* Starting from a PHI node for the virtual operand of the memory reference
2299    REF find a continuation virtual operand that allows to continue walking
2300    statements dominating PHI skipping only statements that cannot possibly
2301    clobber REF.  Increments *CNT for each alias disambiguation done.
2302    Returns NULL_TREE if no suitable virtual operand can be found.  */
2303 
2304 tree
get_continuation_for_phi(gimple phi,ao_ref * ref,unsigned int * cnt,bitmap * visited,bool abort_on_visited)2305 get_continuation_for_phi (gimple phi, ao_ref *ref,
2306 			  unsigned int *cnt, bitmap *visited,
2307 			  bool abort_on_visited)
2308 {
2309   unsigned nargs = gimple_phi_num_args (phi);
2310 
2311   /* Through a single-argument PHI we can simply look through.  */
2312   if (nargs == 1)
2313     return PHI_ARG_DEF (phi, 0);
2314 
2315   /* For two or more arguments try to pairwise skip non-aliasing code
2316      until we hit the phi argument definition that dominates the other one.  */
2317   else if (nargs >= 2)
2318     {
2319       tree arg0, arg1;
2320       unsigned i;
2321 
2322       /* Find a candidate for the virtual operand which definition
2323 	 dominates those of all others.  */
2324       arg0 = PHI_ARG_DEF (phi, 0);
2325       if (!SSA_NAME_IS_DEFAULT_DEF (arg0))
2326 	for (i = 1; i < nargs; ++i)
2327 	  {
2328 	    arg1 = PHI_ARG_DEF (phi, i);
2329 	    if (SSA_NAME_IS_DEFAULT_DEF (arg1))
2330 	      {
2331 		arg0 = arg1;
2332 		break;
2333 	      }
2334 	    if (dominated_by_p (CDI_DOMINATORS,
2335 				gimple_bb (SSA_NAME_DEF_STMT (arg0)),
2336 				gimple_bb (SSA_NAME_DEF_STMT (arg1))))
2337 	      arg0 = arg1;
2338 	  }
2339 
2340       /* Then pairwise reduce against the found candidate.  */
2341       for (i = 0; i < nargs; ++i)
2342 	{
2343 	  arg1 = PHI_ARG_DEF (phi, i);
2344 	  arg0 = get_continuation_for_phi_1 (phi, arg0, arg1, ref,
2345 					     cnt, visited, abort_on_visited);
2346 	  if (!arg0)
2347 	    return NULL_TREE;
2348 	}
2349 
2350       return arg0;
2351     }
2352 
2353   return NULL_TREE;
2354 }
2355 
2356 /* Based on the memory reference REF and its virtual use VUSE call
2357    WALKER for each virtual use that is equivalent to VUSE, including VUSE
2358    itself.  That is, for each virtual use for which its defining statement
2359    does not clobber REF.
2360 
2361    WALKER is called with REF, the current virtual use and DATA.  If
2362    WALKER returns non-NULL the walk stops and its result is returned.
2363    At the end of a non-successful walk NULL is returned.
2364 
2365    TRANSLATE if non-NULL is called with a pointer to REF, the virtual
2366    use which definition is a statement that may clobber REF and DATA.
2367    If TRANSLATE returns (void *)-1 the walk stops and NULL is returned.
2368    If TRANSLATE returns non-NULL the walk stops and its result is returned.
2369    If TRANSLATE returns NULL the walk continues and TRANSLATE is supposed
2370    to adjust REF and *DATA to make that valid.
2371 
2372    TODO: Cache the vector of equivalent vuses per ref, vuse pair.  */
2373 
2374 void *
walk_non_aliased_vuses(ao_ref * ref,tree vuse,void * (* walker)(ao_ref *,tree,unsigned int,void *),void * (* translate)(ao_ref *,tree,void *),void * data)2375 walk_non_aliased_vuses (ao_ref *ref, tree vuse,
2376 			void *(*walker)(ao_ref *, tree, unsigned int, void *),
2377 			void *(*translate)(ao_ref *, tree, void *), void *data)
2378 {
2379   bitmap visited = NULL;
2380   void *res;
2381   unsigned int cnt = 0;
2382   bool translated = false;
2383 
2384   timevar_push (TV_ALIAS_STMT_WALK);
2385 
2386   do
2387     {
2388       gimple def_stmt;
2389 
2390       /* ???  Do we want to account this to TV_ALIAS_STMT_WALK?  */
2391       res = (*walker) (ref, vuse, cnt, data);
2392       /* Abort walk.  */
2393       if (res == (void *)-1)
2394 	{
2395 	  res = NULL;
2396 	  break;
2397 	}
2398       /* Lookup succeeded.  */
2399       else if (res != NULL)
2400 	break;
2401 
2402       def_stmt = SSA_NAME_DEF_STMT (vuse);
2403       if (gimple_nop_p (def_stmt))
2404 	break;
2405       else if (gimple_code (def_stmt) == GIMPLE_PHI)
2406 	vuse = get_continuation_for_phi (def_stmt, ref, &cnt,
2407 					 &visited, translated);
2408       else
2409 	{
2410 	  cnt++;
2411 	  if (stmt_may_clobber_ref_p_1 (def_stmt, ref))
2412 	    {
2413 	      if (!translate)
2414 		break;
2415 	      res = (*translate) (ref, vuse, data);
2416 	      /* Failed lookup and translation.  */
2417 	      if (res == (void *)-1)
2418 		{
2419 		  res = NULL;
2420 		  break;
2421 		}
2422 	      /* Lookup succeeded.  */
2423 	      else if (res != NULL)
2424 		break;
2425 	      /* Translation succeeded, continue walking.  */
2426 	      translated = true;
2427 	    }
2428 	  vuse = gimple_vuse (def_stmt);
2429 	}
2430     }
2431   while (vuse);
2432 
2433   if (visited)
2434     BITMAP_FREE (visited);
2435 
2436   timevar_pop (TV_ALIAS_STMT_WALK);
2437 
2438   return res;
2439 }
2440 
2441 
2442 /* Based on the memory reference REF call WALKER for each vdef which
2443    defining statement may clobber REF, starting with VDEF.  If REF
2444    is NULL_TREE, each defining statement is visited.
2445 
2446    WALKER is called with REF, the current vdef and DATA.  If WALKER
2447    returns true the walk is stopped, otherwise it continues.
2448 
2449    At PHI nodes walk_aliased_vdefs forks into one walk for reach
2450    PHI argument (but only one walk continues on merge points), the
2451    return value is true if any of the walks was successful.
2452 
2453    The function returns the number of statements walked.  */
2454 
2455 static unsigned int
walk_aliased_vdefs_1(ao_ref * ref,tree vdef,bool (* walker)(ao_ref *,tree,void *),void * data,bitmap * visited,unsigned int cnt)2456 walk_aliased_vdefs_1 (ao_ref *ref, tree vdef,
2457 		      bool (*walker)(ao_ref *, tree, void *), void *data,
2458 		      bitmap *visited, unsigned int cnt)
2459 {
2460   do
2461     {
2462       gimple def_stmt = SSA_NAME_DEF_STMT (vdef);
2463 
2464       if (*visited
2465 	  && !bitmap_set_bit (*visited, SSA_NAME_VERSION (vdef)))
2466 	return cnt;
2467 
2468       if (gimple_nop_p (def_stmt))
2469 	return cnt;
2470       else if (gimple_code (def_stmt) == GIMPLE_PHI)
2471 	{
2472 	  unsigned i;
2473 	  if (!*visited)
2474 	    *visited = BITMAP_ALLOC (NULL);
2475 	  for (i = 0; i < gimple_phi_num_args (def_stmt); ++i)
2476 	    cnt += walk_aliased_vdefs_1 (ref, gimple_phi_arg_def (def_stmt, i),
2477 					 walker, data, visited, 0);
2478 	  return cnt;
2479 	}
2480 
2481       /* ???  Do we want to account this to TV_ALIAS_STMT_WALK?  */
2482       cnt++;
2483       if ((!ref
2484 	   || stmt_may_clobber_ref_p_1 (def_stmt, ref))
2485 	  && (*walker) (ref, vdef, data))
2486 	return cnt;
2487 
2488       vdef = gimple_vuse (def_stmt);
2489     }
2490   while (1);
2491 }
2492 
2493 unsigned int
walk_aliased_vdefs(ao_ref * ref,tree vdef,bool (* walker)(ao_ref *,tree,void *),void * data,bitmap * visited)2494 walk_aliased_vdefs (ao_ref *ref, tree vdef,
2495 		    bool (*walker)(ao_ref *, tree, void *), void *data,
2496 		    bitmap *visited)
2497 {
2498   bitmap local_visited = NULL;
2499   unsigned int ret;
2500 
2501   timevar_push (TV_ALIAS_STMT_WALK);
2502 
2503   ret = walk_aliased_vdefs_1 (ref, vdef, walker, data,
2504 			      visited ? visited : &local_visited, 0);
2505   if (local_visited)
2506     BITMAP_FREE (local_visited);
2507 
2508   timevar_pop (TV_ALIAS_STMT_WALK);
2509 
2510   return ret;
2511 }
2512 
2513