1 /* Alias analysis for trees.
2    Copyright (C) 2004-2019 Free Software Foundation, Inc.
3    Contributed by Diego Novillo <dnovillo@redhat.com>
4 
5 This file is part of GCC.
6 
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11 
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
15 GNU General Public License for more details.
16 
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3.  If not see
19 <http://www.gnu.org/licenses/>.  */
20 
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "target.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "gimple.h"
29 #include "timevar.h"	/* for TV_ALIAS_STMT_WALK */
30 #include "ssa.h"
31 #include "cgraph.h"
32 #include "tree-pretty-print.h"
33 #include "alias.h"
34 #include "fold-const.h"
35 #include "langhooks.h"
36 #include "dumpfile.h"
37 #include "tree-eh.h"
38 #include "tree-dfa.h"
39 #include "ipa-reference.h"
40 #include "varasm.h"
41 
42 /* Broad overview of how alias analysis on gimple works:
43 
44    Statements clobbering or using memory are linked through the
45    virtual operand factored use-def chain.  The virtual operand
46    is unique per function, its symbol is accessible via gimple_vop (cfun).
47    Virtual operands are used for efficiently walking memory statements
48    in the gimple IL and are useful for things like value-numbering as
49    a generation count for memory references.
50 
51    SSA_NAME pointers may have associated points-to information
52    accessible via the SSA_NAME_PTR_INFO macro.  Flow-insensitive
53    points-to information is (re-)computed by the TODO_rebuild_alias
54    pass manager todo.  Points-to information is also used for more
55    precise tracking of call-clobbered and call-used variables and
56    related disambiguations.
57 
58    This file contains functions for disambiguating memory references,
59    the so called alias-oracle and tools for walking of the gimple IL.
60 
61    The main alias-oracle entry-points are
62 
63    bool stmt_may_clobber_ref_p (gimple *, tree)
64 
65      This function queries if a statement may invalidate (parts of)
66      the memory designated by the reference tree argument.
67 
68    bool ref_maybe_used_by_stmt_p (gimple *, tree)
69 
70      This function queries if a statement may need (parts of) the
71      memory designated by the reference tree argument.
72 
73    There are variants of these functions that only handle the call
74    part of a statement, call_may_clobber_ref_p and ref_maybe_used_by_call_p.
75    Note that these do not disambiguate against a possible call lhs.
76 
77    bool refs_may_alias_p (tree, tree)
78 
79      This function tries to disambiguate two reference trees.
80 
81    bool ptr_deref_may_alias_global_p (tree)
82 
83      This function queries if dereferencing a pointer variable may
84      alias global memory.
85 
86    More low-level disambiguators are available and documented in
87    this file.  Low-level disambiguators dealing with points-to
88    information are in tree-ssa-structalias.c.  */
89 
90 
91 /* Query statistics for the different low-level disambiguators.
92    A high-level query may trigger multiple of them.  */
93 
94 static struct {
95   unsigned HOST_WIDE_INT refs_may_alias_p_may_alias;
96   unsigned HOST_WIDE_INT refs_may_alias_p_no_alias;
97   unsigned HOST_WIDE_INT ref_maybe_used_by_call_p_may_alias;
98   unsigned HOST_WIDE_INT ref_maybe_used_by_call_p_no_alias;
99   unsigned HOST_WIDE_INT call_may_clobber_ref_p_may_alias;
100   unsigned HOST_WIDE_INT call_may_clobber_ref_p_no_alias;
101 } alias_stats;
102 
103 void
dump_alias_stats(FILE * s)104 dump_alias_stats (FILE *s)
105 {
106   fprintf (s, "\nAlias oracle query stats:\n");
107   fprintf (s, "  refs_may_alias_p: "
108 	   HOST_WIDE_INT_PRINT_DEC" disambiguations, "
109 	   HOST_WIDE_INT_PRINT_DEC" queries\n",
110 	   alias_stats.refs_may_alias_p_no_alias,
111 	   alias_stats.refs_may_alias_p_no_alias
112 	   + alias_stats.refs_may_alias_p_may_alias);
113   fprintf (s, "  ref_maybe_used_by_call_p: "
114 	   HOST_WIDE_INT_PRINT_DEC" disambiguations, "
115 	   HOST_WIDE_INT_PRINT_DEC" queries\n",
116 	   alias_stats.ref_maybe_used_by_call_p_no_alias,
117 	   alias_stats.refs_may_alias_p_no_alias
118 	   + alias_stats.ref_maybe_used_by_call_p_may_alias);
119   fprintf (s, "  call_may_clobber_ref_p: "
120 	   HOST_WIDE_INT_PRINT_DEC" disambiguations, "
121 	   HOST_WIDE_INT_PRINT_DEC" queries\n",
122 	   alias_stats.call_may_clobber_ref_p_no_alias,
123 	   alias_stats.call_may_clobber_ref_p_no_alias
124 	   + alias_stats.call_may_clobber_ref_p_may_alias);
125   dump_alias_stats_in_alias_c (s);
126 }
127 
128 
129 /* Return true, if dereferencing PTR may alias with a global variable.  */
130 
131 bool
ptr_deref_may_alias_global_p(tree ptr)132 ptr_deref_may_alias_global_p (tree ptr)
133 {
134   struct ptr_info_def *pi;
135 
136   /* If we end up with a pointer constant here that may point
137      to global memory.  */
138   if (TREE_CODE (ptr) != SSA_NAME)
139     return true;
140 
141   pi = SSA_NAME_PTR_INFO (ptr);
142 
143   /* If we do not have points-to information for this variable,
144      we have to punt.  */
145   if (!pi)
146     return true;
147 
148   /* ???  This does not use TBAA to prune globals ptr may not access.  */
149   return pt_solution_includes_global (&pi->pt);
150 }
151 
152 /* Return true if dereferencing PTR may alias DECL.
153    The caller is responsible for applying TBAA to see if PTR
154    may access DECL at all.  */
155 
156 static bool
ptr_deref_may_alias_decl_p(tree ptr,tree decl)157 ptr_deref_may_alias_decl_p (tree ptr, tree decl)
158 {
159   struct ptr_info_def *pi;
160 
161   /* Conversions are irrelevant for points-to information and
162      data-dependence analysis can feed us those.  */
163   STRIP_NOPS (ptr);
164 
165   /* Anything we do not explicilty handle aliases.  */
166   if ((TREE_CODE (ptr) != SSA_NAME
167        && TREE_CODE (ptr) != ADDR_EXPR
168        && TREE_CODE (ptr) != POINTER_PLUS_EXPR)
169       || !POINTER_TYPE_P (TREE_TYPE (ptr))
170       || (!VAR_P (decl)
171 	  && TREE_CODE (decl) != PARM_DECL
172 	  && TREE_CODE (decl) != RESULT_DECL))
173     return true;
174 
175   /* Disregard pointer offsetting.  */
176   if (TREE_CODE (ptr) == POINTER_PLUS_EXPR)
177     {
178       do
179 	{
180 	  ptr = TREE_OPERAND (ptr, 0);
181 	}
182       while (TREE_CODE (ptr) == POINTER_PLUS_EXPR);
183       return ptr_deref_may_alias_decl_p (ptr, decl);
184     }
185 
186   /* ADDR_EXPR pointers either just offset another pointer or directly
187      specify the pointed-to set.  */
188   if (TREE_CODE (ptr) == ADDR_EXPR)
189     {
190       tree base = get_base_address (TREE_OPERAND (ptr, 0));
191       if (base
192 	  && (TREE_CODE (base) == MEM_REF
193 	      || TREE_CODE (base) == TARGET_MEM_REF))
194 	ptr = TREE_OPERAND (base, 0);
195       else if (base
196 	       && DECL_P (base))
197 	return compare_base_decls (base, decl) != 0;
198       else if (base
199 	       && CONSTANT_CLASS_P (base))
200 	return false;
201       else
202 	return true;
203     }
204 
205   /* Non-aliased variables cannot be pointed to.  */
206   if (!may_be_aliased (decl))
207     return false;
208 
209   /* If we do not have useful points-to information for this pointer
210      we cannot disambiguate anything else.  */
211   pi = SSA_NAME_PTR_INFO (ptr);
212   if (!pi)
213     return true;
214 
215   return pt_solution_includes (&pi->pt, decl);
216 }
217 
218 /* Return true if dereferenced PTR1 and PTR2 may alias.
219    The caller is responsible for applying TBAA to see if accesses
220    through PTR1 and PTR2 may conflict at all.  */
221 
222 bool
ptr_derefs_may_alias_p(tree ptr1,tree ptr2)223 ptr_derefs_may_alias_p (tree ptr1, tree ptr2)
224 {
225   struct ptr_info_def *pi1, *pi2;
226 
227   /* Conversions are irrelevant for points-to information and
228      data-dependence analysis can feed us those.  */
229   STRIP_NOPS (ptr1);
230   STRIP_NOPS (ptr2);
231 
232   /* Disregard pointer offsetting.  */
233   if (TREE_CODE (ptr1) == POINTER_PLUS_EXPR)
234     {
235       do
236 	{
237 	  ptr1 = TREE_OPERAND (ptr1, 0);
238 	}
239       while (TREE_CODE (ptr1) == POINTER_PLUS_EXPR);
240       return ptr_derefs_may_alias_p (ptr1, ptr2);
241     }
242   if (TREE_CODE (ptr2) == POINTER_PLUS_EXPR)
243     {
244       do
245 	{
246 	  ptr2 = TREE_OPERAND (ptr2, 0);
247 	}
248       while (TREE_CODE (ptr2) == POINTER_PLUS_EXPR);
249       return ptr_derefs_may_alias_p (ptr1, ptr2);
250     }
251 
252   /* ADDR_EXPR pointers either just offset another pointer or directly
253      specify the pointed-to set.  */
254   if (TREE_CODE (ptr1) == ADDR_EXPR)
255     {
256       tree base = get_base_address (TREE_OPERAND (ptr1, 0));
257       if (base
258 	  && (TREE_CODE (base) == MEM_REF
259 	      || TREE_CODE (base) == TARGET_MEM_REF))
260 	return ptr_derefs_may_alias_p (TREE_OPERAND (base, 0), ptr2);
261       else if (base
262 	       && DECL_P (base))
263 	return ptr_deref_may_alias_decl_p (ptr2, base);
264       else
265 	return true;
266     }
267   if (TREE_CODE (ptr2) == ADDR_EXPR)
268     {
269       tree base = get_base_address (TREE_OPERAND (ptr2, 0));
270       if (base
271 	  && (TREE_CODE (base) == MEM_REF
272 	      || TREE_CODE (base) == TARGET_MEM_REF))
273 	return ptr_derefs_may_alias_p (ptr1, TREE_OPERAND (base, 0));
274       else if (base
275 	       && DECL_P (base))
276 	return ptr_deref_may_alias_decl_p (ptr1, base);
277       else
278 	return true;
279     }
280 
281   /* From here we require SSA name pointers.  Anything else aliases.  */
282   if (TREE_CODE (ptr1) != SSA_NAME
283       || TREE_CODE (ptr2) != SSA_NAME
284       || !POINTER_TYPE_P (TREE_TYPE (ptr1))
285       || !POINTER_TYPE_P (TREE_TYPE (ptr2)))
286     return true;
287 
288   /* We may end up with two empty points-to solutions for two same pointers.
289      In this case we still want to say both pointers alias, so shortcut
290      that here.  */
291   if (ptr1 == ptr2)
292     return true;
293 
294   /* If we do not have useful points-to information for either pointer
295      we cannot disambiguate anything else.  */
296   pi1 = SSA_NAME_PTR_INFO (ptr1);
297   pi2 = SSA_NAME_PTR_INFO (ptr2);
298   if (!pi1 || !pi2)
299     return true;
300 
301   /* ???  This does not use TBAA to prune decls from the intersection
302      that not both pointers may access.  */
303   return pt_solutions_intersect (&pi1->pt, &pi2->pt);
304 }
305 
306 /* Return true if dereferencing PTR may alias *REF.
307    The caller is responsible for applying TBAA to see if PTR
308    may access *REF at all.  */
309 
310 static bool
ptr_deref_may_alias_ref_p_1(tree ptr,ao_ref * ref)311 ptr_deref_may_alias_ref_p_1 (tree ptr, ao_ref *ref)
312 {
313   tree base = ao_ref_base (ref);
314 
315   if (TREE_CODE (base) == MEM_REF
316       || TREE_CODE (base) == TARGET_MEM_REF)
317     return ptr_derefs_may_alias_p (ptr, TREE_OPERAND (base, 0));
318   else if (DECL_P (base))
319     return ptr_deref_may_alias_decl_p (ptr, base);
320 
321   return true;
322 }
323 
324 /* Returns true if PTR1 and PTR2 compare unequal because of points-to.  */
325 
326 bool
ptrs_compare_unequal(tree ptr1,tree ptr2)327 ptrs_compare_unequal (tree ptr1, tree ptr2)
328 {
329   /* First resolve the pointers down to a SSA name pointer base or
330      a VAR_DECL, PARM_DECL or RESULT_DECL.  This explicitely does
331      not yet try to handle LABEL_DECLs, FUNCTION_DECLs, CONST_DECLs
332      or STRING_CSTs which needs points-to adjustments to track them
333      in the points-to sets.  */
334   tree obj1 = NULL_TREE;
335   tree obj2 = NULL_TREE;
336   if (TREE_CODE (ptr1) == ADDR_EXPR)
337     {
338       tree tem = get_base_address (TREE_OPERAND (ptr1, 0));
339       if (! tem)
340 	return false;
341       if (VAR_P (tem)
342 	  || TREE_CODE (tem) == PARM_DECL
343 	  || TREE_CODE (tem) == RESULT_DECL)
344 	obj1 = tem;
345       else if (TREE_CODE (tem) == MEM_REF)
346 	ptr1 = TREE_OPERAND (tem, 0);
347     }
348   if (TREE_CODE (ptr2) == ADDR_EXPR)
349     {
350       tree tem = get_base_address (TREE_OPERAND (ptr2, 0));
351       if (! tem)
352 	return false;
353       if (VAR_P (tem)
354 	  || TREE_CODE (tem) == PARM_DECL
355 	  || TREE_CODE (tem) == RESULT_DECL)
356 	obj2 = tem;
357       else if (TREE_CODE (tem) == MEM_REF)
358 	ptr2 = TREE_OPERAND (tem, 0);
359     }
360 
361   /* Canonicalize ptr vs. object.  */
362   if (TREE_CODE (ptr1) == SSA_NAME && obj2)
363     {
364       std::swap (ptr1, ptr2);
365       std::swap (obj1, obj2);
366     }
367 
368   if (obj1 && obj2)
369     /* Other code handles this correctly, no need to duplicate it here.  */;
370   else if (obj1 && TREE_CODE (ptr2) == SSA_NAME)
371     {
372       struct ptr_info_def *pi = SSA_NAME_PTR_INFO (ptr2);
373       /* We may not use restrict to optimize pointer comparisons.
374          See PR71062.  So we have to assume that restrict-pointed-to
375 	 may be in fact obj1.  */
376       if (!pi
377 	  || pi->pt.vars_contains_restrict
378 	  || pi->pt.vars_contains_interposable)
379 	return false;
380       if (VAR_P (obj1)
381 	  && (TREE_STATIC (obj1) || DECL_EXTERNAL (obj1)))
382 	{
383 	  varpool_node *node = varpool_node::get (obj1);
384 	  /* If obj1 may bind to NULL give up (see below).  */
385 	  if (! node
386 	      || ! node->nonzero_address ()
387 	      || ! decl_binds_to_current_def_p (obj1))
388 	    return false;
389 	}
390       return !pt_solution_includes (&pi->pt, obj1);
391     }
392 
393   /* ???  We'd like to handle ptr1 != NULL and ptr1 != ptr2
394      but those require pt.null to be conservatively correct.  */
395 
396   return false;
397 }
398 
399 /* Returns whether reference REF to BASE may refer to global memory.  */
400 
401 static bool
ref_may_alias_global_p_1(tree base)402 ref_may_alias_global_p_1 (tree base)
403 {
404   if (DECL_P (base))
405     return is_global_var (base);
406   else if (TREE_CODE (base) == MEM_REF
407 	   || TREE_CODE (base) == TARGET_MEM_REF)
408     return ptr_deref_may_alias_global_p (TREE_OPERAND (base, 0));
409   return true;
410 }
411 
412 bool
ref_may_alias_global_p(ao_ref * ref)413 ref_may_alias_global_p (ao_ref *ref)
414 {
415   tree base = ao_ref_base (ref);
416   return ref_may_alias_global_p_1 (base);
417 }
418 
419 bool
ref_may_alias_global_p(tree ref)420 ref_may_alias_global_p (tree ref)
421 {
422   tree base = get_base_address (ref);
423   return ref_may_alias_global_p_1 (base);
424 }
425 
426 /* Return true whether STMT may clobber global memory.  */
427 
428 bool
stmt_may_clobber_global_p(gimple * stmt)429 stmt_may_clobber_global_p (gimple *stmt)
430 {
431   tree lhs;
432 
433   if (!gimple_vdef (stmt))
434     return false;
435 
436   /* ???  We can ask the oracle whether an artificial pointer
437      dereference with a pointer with points-to information covering
438      all global memory (what about non-address taken memory?) maybe
439      clobbered by this call.  As there is at the moment no convenient
440      way of doing that without generating garbage do some manual
441      checking instead.
442      ???  We could make a NULL ao_ref argument to the various
443      predicates special, meaning any global memory.  */
444 
445   switch (gimple_code (stmt))
446     {
447     case GIMPLE_ASSIGN:
448       lhs = gimple_assign_lhs (stmt);
449       return (TREE_CODE (lhs) != SSA_NAME
450 	      && ref_may_alias_global_p (lhs));
451     case GIMPLE_CALL:
452       return true;
453     default:
454       return true;
455     }
456 }
457 
458 
459 /* Dump alias information on FILE.  */
460 
461 void
dump_alias_info(FILE * file)462 dump_alias_info (FILE *file)
463 {
464   unsigned i;
465   tree ptr;
466   const char *funcname
467     = lang_hooks.decl_printable_name (current_function_decl, 2);
468   tree var;
469 
470   fprintf (file, "\n\nAlias information for %s\n\n", funcname);
471 
472   fprintf (file, "Aliased symbols\n\n");
473 
474   FOR_EACH_LOCAL_DECL (cfun, i, var)
475     {
476       if (may_be_aliased (var))
477 	dump_variable (file, var);
478     }
479 
480   fprintf (file, "\nCall clobber information\n");
481 
482   fprintf (file, "\nESCAPED");
483   dump_points_to_solution (file, &cfun->gimple_df->escaped);
484 
485   fprintf (file, "\n\nFlow-insensitive points-to information\n\n");
486 
487   FOR_EACH_SSA_NAME (i, ptr, cfun)
488     {
489       struct ptr_info_def *pi;
490 
491       if (!POINTER_TYPE_P (TREE_TYPE (ptr))
492 	  || SSA_NAME_IN_FREE_LIST (ptr))
493 	continue;
494 
495       pi = SSA_NAME_PTR_INFO (ptr);
496       if (pi)
497 	dump_points_to_info_for (file, ptr);
498     }
499 
500   fprintf (file, "\n");
501 }
502 
503 
504 /* Dump alias information on stderr.  */
505 
506 DEBUG_FUNCTION void
debug_alias_info(void)507 debug_alias_info (void)
508 {
509   dump_alias_info (stderr);
510 }
511 
512 
513 /* Dump the points-to set *PT into FILE.  */
514 
515 void
dump_points_to_solution(FILE * file,struct pt_solution * pt)516 dump_points_to_solution (FILE *file, struct pt_solution *pt)
517 {
518   if (pt->anything)
519     fprintf (file, ", points-to anything");
520 
521   if (pt->nonlocal)
522     fprintf (file, ", points-to non-local");
523 
524   if (pt->escaped)
525     fprintf (file, ", points-to escaped");
526 
527   if (pt->ipa_escaped)
528     fprintf (file, ", points-to unit escaped");
529 
530   if (pt->null)
531     fprintf (file, ", points-to NULL");
532 
533   if (pt->vars)
534     {
535       fprintf (file, ", points-to vars: ");
536       dump_decl_set (file, pt->vars);
537       if (pt->vars_contains_nonlocal
538 	  || pt->vars_contains_escaped
539 	  || pt->vars_contains_escaped_heap
540 	  || pt->vars_contains_restrict)
541 	{
542 	  const char *comma = "";
543 	  fprintf (file, " (");
544 	  if (pt->vars_contains_nonlocal)
545 	    {
546 	      fprintf (file, "nonlocal");
547 	      comma = ", ";
548 	    }
549 	  if (pt->vars_contains_escaped)
550 	    {
551 	      fprintf (file, "%sescaped", comma);
552 	      comma = ", ";
553 	    }
554 	  if (pt->vars_contains_escaped_heap)
555 	    {
556 	      fprintf (file, "%sescaped heap", comma);
557 	      comma = ", ";
558 	    }
559 	  if (pt->vars_contains_restrict)
560 	    {
561 	      fprintf (file, "%srestrict", comma);
562 	      comma = ", ";
563 	    }
564 	  if (pt->vars_contains_interposable)
565 	    fprintf (file, "%sinterposable", comma);
566 	  fprintf (file, ")");
567 	}
568     }
569 }
570 
571 
572 /* Unified dump function for pt_solution.  */
573 
574 DEBUG_FUNCTION void
debug(pt_solution & ref)575 debug (pt_solution &ref)
576 {
577   dump_points_to_solution (stderr, &ref);
578 }
579 
580 DEBUG_FUNCTION void
debug(pt_solution * ptr)581 debug (pt_solution *ptr)
582 {
583   if (ptr)
584     debug (*ptr);
585   else
586     fprintf (stderr, "<nil>\n");
587 }
588 
589 
590 /* Dump points-to information for SSA_NAME PTR into FILE.  */
591 
592 void
dump_points_to_info_for(FILE * file,tree ptr)593 dump_points_to_info_for (FILE *file, tree ptr)
594 {
595   struct ptr_info_def *pi = SSA_NAME_PTR_INFO (ptr);
596 
597   print_generic_expr (file, ptr, dump_flags);
598 
599   if (pi)
600     dump_points_to_solution (file, &pi->pt);
601   else
602     fprintf (file, ", points-to anything");
603 
604   fprintf (file, "\n");
605 }
606 
607 
608 /* Dump points-to information for VAR into stderr.  */
609 
610 DEBUG_FUNCTION void
debug_points_to_info_for(tree var)611 debug_points_to_info_for (tree var)
612 {
613   dump_points_to_info_for (stderr, var);
614 }
615 
616 
617 /* Initializes the alias-oracle reference representation *R from REF.  */
618 
619 void
ao_ref_init(ao_ref * r,tree ref)620 ao_ref_init (ao_ref *r, tree ref)
621 {
622   r->ref = ref;
623   r->base = NULL_TREE;
624   r->offset = 0;
625   r->size = -1;
626   r->max_size = -1;
627   r->ref_alias_set = -1;
628   r->base_alias_set = -1;
629   r->volatile_p = ref ? TREE_THIS_VOLATILE (ref) : false;
630 }
631 
632 /* Returns the base object of the memory reference *REF.  */
633 
634 tree
ao_ref_base(ao_ref * ref)635 ao_ref_base (ao_ref *ref)
636 {
637   bool reverse;
638 
639   if (ref->base)
640     return ref->base;
641   ref->base = get_ref_base_and_extent (ref->ref, &ref->offset, &ref->size,
642 				       &ref->max_size, &reverse);
643   return ref->base;
644 }
645 
646 /* Returns the base object alias set of the memory reference *REF.  */
647 
648 alias_set_type
ao_ref_base_alias_set(ao_ref * ref)649 ao_ref_base_alias_set (ao_ref *ref)
650 {
651   tree base_ref;
652   if (ref->base_alias_set != -1)
653     return ref->base_alias_set;
654   if (!ref->ref)
655     return 0;
656   base_ref = ref->ref;
657   while (handled_component_p (base_ref))
658     base_ref = TREE_OPERAND (base_ref, 0);
659   ref->base_alias_set = get_alias_set (base_ref);
660   return ref->base_alias_set;
661 }
662 
663 /* Returns the reference alias set of the memory reference *REF.  */
664 
665 alias_set_type
ao_ref_alias_set(ao_ref * ref)666 ao_ref_alias_set (ao_ref *ref)
667 {
668   if (ref->ref_alias_set != -1)
669     return ref->ref_alias_set;
670   ref->ref_alias_set = get_alias_set (ref->ref);
671   return ref->ref_alias_set;
672 }
673 
674 /* Init an alias-oracle reference representation from a gimple pointer
675    PTR and a gimple size SIZE in bytes.  If SIZE is NULL_TREE then the
676    size is assumed to be unknown.  The access is assumed to be only
677    to or after of the pointer target, not before it.  */
678 
679 void
ao_ref_init_from_ptr_and_size(ao_ref * ref,tree ptr,tree size)680 ao_ref_init_from_ptr_and_size (ao_ref *ref, tree ptr, tree size)
681 {
682   poly_int64 t, size_hwi, extra_offset = 0;
683   ref->ref = NULL_TREE;
684   if (TREE_CODE (ptr) == SSA_NAME)
685     {
686       gimple *stmt = SSA_NAME_DEF_STMT (ptr);
687       if (gimple_assign_single_p (stmt)
688 	  && gimple_assign_rhs_code (stmt) == ADDR_EXPR)
689 	ptr = gimple_assign_rhs1 (stmt);
690       else if (is_gimple_assign (stmt)
691 	       && gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
692 	       && ptrdiff_tree_p (gimple_assign_rhs2 (stmt), &extra_offset))
693 	{
694 	  ptr = gimple_assign_rhs1 (stmt);
695 	  extra_offset *= BITS_PER_UNIT;
696 	}
697     }
698 
699   if (TREE_CODE (ptr) == ADDR_EXPR)
700     {
701       ref->base = get_addr_base_and_unit_offset (TREE_OPERAND (ptr, 0), &t);
702       if (ref->base)
703 	ref->offset = BITS_PER_UNIT * t;
704       else
705 	{
706 	  size = NULL_TREE;
707 	  ref->offset = 0;
708 	  ref->base = get_base_address (TREE_OPERAND (ptr, 0));
709 	}
710     }
711   else
712     {
713       gcc_assert (POINTER_TYPE_P (TREE_TYPE (ptr)));
714       ref->base = build2 (MEM_REF, char_type_node,
715 			  ptr, null_pointer_node);
716       ref->offset = 0;
717     }
718   ref->offset += extra_offset;
719   if (size
720       && poly_int_tree_p (size, &size_hwi)
721       && coeffs_in_range_p (size_hwi, 0, HOST_WIDE_INT_MAX / BITS_PER_UNIT))
722     ref->max_size = ref->size = size_hwi * BITS_PER_UNIT;
723   else
724     ref->max_size = ref->size = -1;
725   ref->ref_alias_set = 0;
726   ref->base_alias_set = 0;
727   ref->volatile_p = false;
728 }
729 
730 /* Return 1 if TYPE1 and TYPE2 are to be considered equivalent for the
731    purpose of TBAA.  Return 0 if they are distinct and -1 if we cannot
732    decide.  */
733 
734 static inline int
same_type_for_tbaa(tree type1,tree type2)735 same_type_for_tbaa (tree type1, tree type2)
736 {
737   type1 = TYPE_MAIN_VARIANT (type1);
738   type2 = TYPE_MAIN_VARIANT (type2);
739 
740   /* If we would have to do structural comparison bail out.  */
741   if (TYPE_STRUCTURAL_EQUALITY_P (type1)
742       || TYPE_STRUCTURAL_EQUALITY_P (type2))
743     return -1;
744 
745   /* Compare the canonical types.  */
746   if (TYPE_CANONICAL (type1) == TYPE_CANONICAL (type2))
747     return 1;
748 
749   /* ??? Array types are not properly unified in all cases as we have
750      spurious changes in the index types for example.  Removing this
751      causes all sorts of problems with the Fortran frontend.  */
752   if (TREE_CODE (type1) == ARRAY_TYPE
753       && TREE_CODE (type2) == ARRAY_TYPE)
754     return -1;
755 
756   /* ??? In Ada, an lvalue of an unconstrained type can be used to access an
757      object of one of its constrained subtypes, e.g. when a function with an
758      unconstrained parameter passed by reference is called on an object and
759      inlined.  But, even in the case of a fixed size, type and subtypes are
760      not equivalent enough as to share the same TYPE_CANONICAL, since this
761      would mean that conversions between them are useless, whereas they are
762      not (e.g. type and subtypes can have different modes).  So, in the end,
763      they are only guaranteed to have the same alias set.  */
764   if (get_alias_set (type1) == get_alias_set (type2))
765     return -1;
766 
767   /* The types are known to be not equal.  */
768   return 0;
769 }
770 
771 /* Determine if the two component references REF1 and REF2 which are
772    based on access types TYPE1 and TYPE2 and of which at least one is based
773    on an indirect reference may alias.  REF2 is the only one that can
774    be a decl in which case REF2_IS_DECL is true.
775    REF1_ALIAS_SET, BASE1_ALIAS_SET, REF2_ALIAS_SET and BASE2_ALIAS_SET
776    are the respective alias sets.  */
777 
778 static bool
aliasing_component_refs_p(tree ref1,alias_set_type ref1_alias_set,alias_set_type base1_alias_set,poly_int64 offset1,poly_int64 max_size1,tree ref2,alias_set_type ref2_alias_set,alias_set_type base2_alias_set,poly_int64 offset2,poly_int64 max_size2,bool ref2_is_decl)779 aliasing_component_refs_p (tree ref1,
780 			   alias_set_type ref1_alias_set,
781 			   alias_set_type base1_alias_set,
782 			   poly_int64 offset1, poly_int64 max_size1,
783 			   tree ref2,
784 			   alias_set_type ref2_alias_set,
785 			   alias_set_type base2_alias_set,
786 			   poly_int64 offset2, poly_int64 max_size2,
787 			   bool ref2_is_decl)
788 {
789   /* If one reference is a component references through pointers try to find a
790      common base and apply offset based disambiguation.  This handles
791      for example
792        struct A { int i; int j; } *q;
793        struct B { struct A a; int k; } *p;
794      disambiguating q->i and p->a.j.  */
795   tree base1, base2;
796   tree type1, type2;
797   tree *refp;
798   int same_p;
799 
800   /* Choose bases and base types to search for.  */
801   base1 = ref1;
802   while (handled_component_p (base1))
803     base1 = TREE_OPERAND (base1, 0);
804   type1 = TREE_TYPE (base1);
805   base2 = ref2;
806   while (handled_component_p (base2))
807     base2 = TREE_OPERAND (base2, 0);
808   type2 = TREE_TYPE (base2);
809 
810   /* Now search for the type1 in the access path of ref2.  This
811      would be a common base for doing offset based disambiguation on.  */
812   refp = &ref2;
813   while (handled_component_p (*refp)
814 	 && same_type_for_tbaa (TREE_TYPE (*refp), type1) == 0)
815     refp = &TREE_OPERAND (*refp, 0);
816   same_p = same_type_for_tbaa (TREE_TYPE (*refp), type1);
817   /* If we couldn't compare types we have to bail out.  */
818   if (same_p == -1)
819     return true;
820   else if (same_p == 1)
821     {
822       poly_int64 offadj, sztmp, msztmp;
823       bool reverse;
824       get_ref_base_and_extent (*refp, &offadj, &sztmp, &msztmp, &reverse);
825       offset2 -= offadj;
826       get_ref_base_and_extent (base1, &offadj, &sztmp, &msztmp, &reverse);
827       offset1 -= offadj;
828       return ranges_maybe_overlap_p (offset1, max_size1, offset2, max_size2);
829     }
830   /* If we didn't find a common base, try the other way around.  */
831   refp = &ref1;
832   while (handled_component_p (*refp)
833 	 && same_type_for_tbaa (TREE_TYPE (*refp), type2) == 0)
834     refp = &TREE_OPERAND (*refp, 0);
835   same_p = same_type_for_tbaa (TREE_TYPE (*refp), type2);
836   /* If we couldn't compare types we have to bail out.  */
837   if (same_p == -1)
838     return true;
839   else if (same_p == 1)
840     {
841       poly_int64 offadj, sztmp, msztmp;
842       bool reverse;
843       get_ref_base_and_extent (*refp, &offadj, &sztmp, &msztmp, &reverse);
844       offset1 -= offadj;
845       get_ref_base_and_extent (base2, &offadj, &sztmp, &msztmp, &reverse);
846       offset2 -= offadj;
847       return ranges_maybe_overlap_p (offset1, max_size1, offset2, max_size2);
848     }
849 
850   /* If we have two type access paths B1.path1 and B2.path2 they may
851      only alias if either B1 is in B2.path2 or B2 is in B1.path1.
852      But we can still have a path that goes B1.path1...B2.path2 with
853      a part that we do not see.  So we can only disambiguate now
854      if there is no B2 in the tail of path1 and no B1 on the
855      tail of path2.  */
856   if (base1_alias_set == ref2_alias_set
857       || alias_set_subset_of (base1_alias_set, ref2_alias_set))
858     return true;
859   /* If this is ptr vs. decl then we know there is no ptr ... decl path.  */
860   if (!ref2_is_decl)
861     return (base2_alias_set == ref1_alias_set
862 	    || alias_set_subset_of (base2_alias_set, ref1_alias_set));
863   return false;
864 }
865 
866 /* Return true if we can determine that component references REF1 and REF2,
867    that are within a common DECL, cannot overlap.  */
868 
869 static bool
nonoverlapping_component_refs_of_decl_p(tree ref1,tree ref2)870 nonoverlapping_component_refs_of_decl_p (tree ref1, tree ref2)
871 {
872   auto_vec<tree, 16> component_refs1;
873   auto_vec<tree, 16> component_refs2;
874 
875   /* Create the stack of handled components for REF1.  */
876   while (handled_component_p (ref1))
877     {
878       component_refs1.safe_push (ref1);
879       ref1 = TREE_OPERAND (ref1, 0);
880     }
881   if (TREE_CODE (ref1) == MEM_REF)
882     {
883       if (!integer_zerop (TREE_OPERAND (ref1, 1)))
884 	return false;
885       ref1 = TREE_OPERAND (TREE_OPERAND (ref1, 0), 0);
886     }
887 
888   /* Create the stack of handled components for REF2.  */
889   while (handled_component_p (ref2))
890     {
891       component_refs2.safe_push (ref2);
892       ref2 = TREE_OPERAND (ref2, 0);
893     }
894   if (TREE_CODE (ref2) == MEM_REF)
895     {
896       if (!integer_zerop (TREE_OPERAND (ref2, 1)))
897 	return false;
898       ref2 = TREE_OPERAND (TREE_OPERAND (ref2, 0), 0);
899     }
900 
901   /* Bases must be either same or uncomparable.  */
902   gcc_checking_assert (ref1 == ref2
903 		       || (DECL_P (ref1) && DECL_P (ref2)
904 			   && compare_base_decls (ref1, ref2) != 0));
905 
906   /* Pop the stacks in parallel and examine the COMPONENT_REFs of the same
907      rank.  This is sufficient because we start from the same DECL and you
908      cannot reference several fields at a time with COMPONENT_REFs (unlike
909      with ARRAY_RANGE_REFs for arrays) so you always need the same number
910      of them to access a sub-component, unless you're in a union, in which
911      case the return value will precisely be false.  */
912   while (true)
913     {
914       do
915 	{
916 	  if (component_refs1.is_empty ())
917 	    return false;
918 	  ref1 = component_refs1.pop ();
919 	}
920       while (!RECORD_OR_UNION_TYPE_P (TREE_TYPE (TREE_OPERAND (ref1, 0))));
921 
922       do
923 	{
924 	  if (component_refs2.is_empty ())
925 	     return false;
926 	  ref2 = component_refs2.pop ();
927 	}
928       while (!RECORD_OR_UNION_TYPE_P (TREE_TYPE (TREE_OPERAND (ref2, 0))));
929 
930       /* Beware of BIT_FIELD_REF.  */
931       if (TREE_CODE (ref1) != COMPONENT_REF
932 	  || TREE_CODE (ref2) != COMPONENT_REF)
933 	return false;
934 
935       tree field1 = TREE_OPERAND (ref1, 1);
936       tree field2 = TREE_OPERAND (ref2, 1);
937 
938       /* ??? We cannot simply use the type of operand #0 of the refs here
939 	 as the Fortran compiler smuggles type punning into COMPONENT_REFs
940 	 for common blocks instead of using unions like everyone else.  */
941       tree type1 = DECL_CONTEXT (field1);
942       tree type2 = DECL_CONTEXT (field2);
943 
944       /* We cannot disambiguate fields in a union or qualified union.  */
945       if (type1 != type2 || TREE_CODE (type1) != RECORD_TYPE)
946 	 return false;
947 
948       if (field1 != field2)
949 	{
950 	  /* A field and its representative need to be considered the
951 	     same.  */
952 	  if (DECL_BIT_FIELD_REPRESENTATIVE (field1) == field2
953 	      || DECL_BIT_FIELD_REPRESENTATIVE (field2) == field1)
954 	    return false;
955 	  /* Different fields of the same record type cannot overlap.
956 	     ??? Bitfields can overlap at RTL level so punt on them.  */
957 	  if (DECL_BIT_FIELD (field1) && DECL_BIT_FIELD (field2))
958 	    return false;
959 	  return true;
960 	}
961     }
962 
963   return false;
964 }
965 
966 /* qsort compare function to sort FIELD_DECLs after their
967    DECL_FIELD_CONTEXT TYPE_UID.  */
968 
969 static inline int
ncr_compar(const void * field1_,const void * field2_)970 ncr_compar (const void *field1_, const void *field2_)
971 {
972   const_tree field1 = *(const_tree *) const_cast <void *>(field1_);
973   const_tree field2 = *(const_tree *) const_cast <void *>(field2_);
974   unsigned int uid1 = TYPE_UID (DECL_FIELD_CONTEXT (field1));
975   unsigned int uid2 = TYPE_UID (DECL_FIELD_CONTEXT (field2));
976   if (uid1 < uid2)
977     return -1;
978   else if (uid1 > uid2)
979     return 1;
980   return 0;
981 }
982 
983 /* Return true if we can determine that the fields referenced cannot
984    overlap for any pair of objects.  */
985 
986 static bool
nonoverlapping_component_refs_p(const_tree x,const_tree y)987 nonoverlapping_component_refs_p (const_tree x, const_tree y)
988 {
989   if (!flag_strict_aliasing
990       || !x || !y
991       || TREE_CODE (x) != COMPONENT_REF
992       || TREE_CODE (y) != COMPONENT_REF)
993     return false;
994 
995   auto_vec<const_tree, 16> fieldsx;
996   while (TREE_CODE (x) == COMPONENT_REF)
997     {
998       tree field = TREE_OPERAND (x, 1);
999       tree type = DECL_FIELD_CONTEXT (field);
1000       if (TREE_CODE (type) == RECORD_TYPE)
1001 	fieldsx.safe_push (field);
1002       x = TREE_OPERAND (x, 0);
1003     }
1004   if (fieldsx.length () == 0)
1005     return false;
1006   auto_vec<const_tree, 16> fieldsy;
1007   while (TREE_CODE (y) == COMPONENT_REF)
1008     {
1009       tree field = TREE_OPERAND (y, 1);
1010       tree type = DECL_FIELD_CONTEXT (field);
1011       if (TREE_CODE (type) == RECORD_TYPE)
1012 	fieldsy.safe_push (TREE_OPERAND (y, 1));
1013       y = TREE_OPERAND (y, 0);
1014     }
1015   if (fieldsy.length () == 0)
1016     return false;
1017 
1018   /* Most common case first.  */
1019   if (fieldsx.length () == 1
1020       && fieldsy.length () == 1)
1021     return ((DECL_FIELD_CONTEXT (fieldsx[0])
1022 	     == DECL_FIELD_CONTEXT (fieldsy[0]))
1023 	    && fieldsx[0] != fieldsy[0]
1024 	    && !(DECL_BIT_FIELD (fieldsx[0]) && DECL_BIT_FIELD (fieldsy[0])));
1025 
1026   if (fieldsx.length () == 2)
1027     {
1028       if (ncr_compar (&fieldsx[0], &fieldsx[1]) == 1)
1029 	std::swap (fieldsx[0], fieldsx[1]);
1030     }
1031   else
1032     fieldsx.qsort (ncr_compar);
1033 
1034   if (fieldsy.length () == 2)
1035     {
1036       if (ncr_compar (&fieldsy[0], &fieldsy[1]) == 1)
1037 	std::swap (fieldsy[0], fieldsy[1]);
1038     }
1039   else
1040     fieldsy.qsort (ncr_compar);
1041 
1042   unsigned i = 0, j = 0;
1043   do
1044     {
1045       const_tree fieldx = fieldsx[i];
1046       const_tree fieldy = fieldsy[j];
1047       tree typex = DECL_FIELD_CONTEXT (fieldx);
1048       tree typey = DECL_FIELD_CONTEXT (fieldy);
1049       if (typex == typey)
1050 	{
1051 	  /* We're left with accessing different fields of a structure,
1052 	     no possible overlap.  */
1053 	  if (fieldx != fieldy)
1054 	    {
1055 	      /* A field and its representative need to be considered the
1056 		 same.  */
1057 	      if (DECL_BIT_FIELD_REPRESENTATIVE (fieldx) == fieldy
1058 		  || DECL_BIT_FIELD_REPRESENTATIVE (fieldy) == fieldx)
1059 		return false;
1060 	      /* Different fields of the same record type cannot overlap.
1061 		 ??? Bitfields can overlap at RTL level so punt on them.  */
1062 	      if (DECL_BIT_FIELD (fieldx) && DECL_BIT_FIELD (fieldy))
1063 		return false;
1064 	      return true;
1065 	    }
1066 	}
1067       if (TYPE_UID (typex) < TYPE_UID (typey))
1068 	{
1069 	  i++;
1070 	  if (i == fieldsx.length ())
1071 	    break;
1072 	}
1073       else
1074 	{
1075 	  j++;
1076 	  if (j == fieldsy.length ())
1077 	    break;
1078 	}
1079     }
1080   while (1);
1081 
1082   return false;
1083 }
1084 
1085 
1086 /* Return true if two memory references based on the variables BASE1
1087    and BASE2 constrained to [OFFSET1, OFFSET1 + MAX_SIZE1) and
1088    [OFFSET2, OFFSET2 + MAX_SIZE2) may alias.  REF1 and REF2
1089    if non-NULL are the complete memory reference trees.  */
1090 
1091 static bool
decl_refs_may_alias_p(tree ref1,tree base1,poly_int64 offset1,poly_int64 max_size1,tree ref2,tree base2,poly_int64 offset2,poly_int64 max_size2)1092 decl_refs_may_alias_p (tree ref1, tree base1,
1093 		       poly_int64 offset1, poly_int64 max_size1,
1094 		       tree ref2, tree base2,
1095 		       poly_int64 offset2, poly_int64 max_size2)
1096 {
1097   gcc_checking_assert (DECL_P (base1) && DECL_P (base2));
1098 
1099   /* If both references are based on different variables, they cannot alias.  */
1100   if (compare_base_decls (base1, base2) == 0)
1101     return false;
1102 
1103   /* If both references are based on the same variable, they cannot alias if
1104      the accesses do not overlap.  */
1105   if (!ranges_maybe_overlap_p (offset1, max_size1, offset2, max_size2))
1106     return false;
1107 
1108   /* For components with variable position, the above test isn't sufficient,
1109      so we disambiguate component references manually.  */
1110   if (ref1 && ref2
1111       && handled_component_p (ref1) && handled_component_p (ref2)
1112       && nonoverlapping_component_refs_of_decl_p (ref1, ref2))
1113     return false;
1114 
1115   return true;
1116 }
1117 
1118 /* Return true if an indirect reference based on *PTR1 constrained
1119    to [OFFSET1, OFFSET1 + MAX_SIZE1) may alias a variable based on BASE2
1120    constrained to [OFFSET2, OFFSET2 + MAX_SIZE2).  *PTR1 and BASE2 have
1121    the alias sets BASE1_ALIAS_SET and BASE2_ALIAS_SET which can be -1
1122    in which case they are computed on-demand.  REF1 and REF2
1123    if non-NULL are the complete memory reference trees.  */
1124 
1125 static bool
indirect_ref_may_alias_decl_p(tree ref1 ATTRIBUTE_UNUSED,tree base1,poly_int64 offset1,poly_int64 max_size1,alias_set_type ref1_alias_set,alias_set_type base1_alias_set,tree ref2 ATTRIBUTE_UNUSED,tree base2,poly_int64 offset2,poly_int64 max_size2,alias_set_type ref2_alias_set,alias_set_type base2_alias_set,bool tbaa_p)1126 indirect_ref_may_alias_decl_p (tree ref1 ATTRIBUTE_UNUSED, tree base1,
1127 			       poly_int64 offset1, poly_int64 max_size1,
1128 			       alias_set_type ref1_alias_set,
1129 			       alias_set_type base1_alias_set,
1130 			       tree ref2 ATTRIBUTE_UNUSED, tree base2,
1131 			       poly_int64 offset2, poly_int64 max_size2,
1132 			       alias_set_type ref2_alias_set,
1133 			       alias_set_type base2_alias_set, bool tbaa_p)
1134 {
1135   tree ptr1;
1136   tree ptrtype1, dbase2;
1137 
1138   gcc_checking_assert ((TREE_CODE (base1) == MEM_REF
1139 			|| TREE_CODE (base1) == TARGET_MEM_REF)
1140 		       && DECL_P (base2));
1141 
1142   ptr1 = TREE_OPERAND (base1, 0);
1143   poly_offset_int moff = mem_ref_offset (base1) << LOG2_BITS_PER_UNIT;
1144 
1145   /* If only one reference is based on a variable, they cannot alias if
1146      the pointer access is beyond the extent of the variable access.
1147      (the pointer base cannot validly point to an offset less than zero
1148      of the variable).
1149      ???  IVOPTs creates bases that do not honor this restriction,
1150      so do not apply this optimization for TARGET_MEM_REFs.  */
1151   if (TREE_CODE (base1) != TARGET_MEM_REF
1152       && !ranges_maybe_overlap_p (offset1 + moff, -1, offset2, max_size2))
1153     return false;
1154   /* They also cannot alias if the pointer may not point to the decl.  */
1155   if (!ptr_deref_may_alias_decl_p (ptr1, base2))
1156     return false;
1157 
1158   /* Disambiguations that rely on strict aliasing rules follow.  */
1159   if (!flag_strict_aliasing || !tbaa_p)
1160     return true;
1161 
1162   ptrtype1 = TREE_TYPE (TREE_OPERAND (base1, 1));
1163 
1164   /* If the alias set for a pointer access is zero all bets are off.  */
1165   if (base1_alias_set == 0)
1166     return true;
1167 
1168   /* When we are trying to disambiguate an access with a pointer dereference
1169      as base versus one with a decl as base we can use both the size
1170      of the decl and its dynamic type for extra disambiguation.
1171      ???  We do not know anything about the dynamic type of the decl
1172      other than that its alias-set contains base2_alias_set as a subset
1173      which does not help us here.  */
1174   /* As we know nothing useful about the dynamic type of the decl just
1175      use the usual conflict check rather than a subset test.
1176      ???  We could introduce -fvery-strict-aliasing when the language
1177      does not allow decls to have a dynamic type that differs from their
1178      static type.  Then we can check
1179      !alias_set_subset_of (base1_alias_set, base2_alias_set) instead.  */
1180   if (base1_alias_set != base2_alias_set
1181       && !alias_sets_conflict_p (base1_alias_set, base2_alias_set))
1182     return false;
1183   /* If the size of the access relevant for TBAA through the pointer
1184      is bigger than the size of the decl we can't possibly access the
1185      decl via that pointer.  */
1186   if (DECL_SIZE (base2) && COMPLETE_TYPE_P (TREE_TYPE (ptrtype1))
1187       && poly_int_tree_p (DECL_SIZE (base2))
1188       && poly_int_tree_p (TYPE_SIZE (TREE_TYPE (ptrtype1)))
1189       /* ???  This in turn may run afoul when a decl of type T which is
1190 	 a member of union type U is accessed through a pointer to
1191 	 type U and sizeof T is smaller than sizeof U.  */
1192       && TREE_CODE (TREE_TYPE (ptrtype1)) != UNION_TYPE
1193       && TREE_CODE (TREE_TYPE (ptrtype1)) != QUAL_UNION_TYPE
1194       && known_lt (wi::to_poly_widest (DECL_SIZE (base2)),
1195 		   wi::to_poly_widest (TYPE_SIZE (TREE_TYPE (ptrtype1)))))
1196     return false;
1197 
1198   if (!ref2)
1199     return true;
1200 
1201   /* If the decl is accessed via a MEM_REF, reconstruct the base
1202      we can use for TBAA and an appropriately adjusted offset.  */
1203   dbase2 = ref2;
1204   while (handled_component_p (dbase2))
1205     dbase2 = TREE_OPERAND (dbase2, 0);
1206   poly_int64 doffset1 = offset1;
1207   poly_offset_int doffset2 = offset2;
1208   if (TREE_CODE (dbase2) == MEM_REF
1209       || TREE_CODE (dbase2) == TARGET_MEM_REF)
1210     doffset2 -= mem_ref_offset (dbase2) << LOG2_BITS_PER_UNIT;
1211 
1212   /* If either reference is view-converted, give up now.  */
1213   if (same_type_for_tbaa (TREE_TYPE (base1), TREE_TYPE (ptrtype1)) != 1
1214       || same_type_for_tbaa (TREE_TYPE (dbase2), TREE_TYPE (base2)) != 1)
1215     return true;
1216 
1217   /* If both references are through the same type, they do not alias
1218      if the accesses do not overlap.  This does extra disambiguation
1219      for mixed/pointer accesses but requires strict aliasing.
1220      For MEM_REFs we require that the component-ref offset we computed
1221      is relative to the start of the type which we ensure by
1222      comparing rvalue and access type and disregarding the constant
1223      pointer offset.  */
1224   if ((TREE_CODE (base1) != TARGET_MEM_REF
1225        || (!TMR_INDEX (base1) && !TMR_INDEX2 (base1)))
1226       && same_type_for_tbaa (TREE_TYPE (base1), TREE_TYPE (dbase2)) == 1)
1227     return ranges_maybe_overlap_p (doffset1, max_size1, doffset2, max_size2);
1228 
1229   if (ref1 && ref2
1230       && nonoverlapping_component_refs_p (ref1, ref2))
1231     return false;
1232 
1233   /* Do access-path based disambiguation.  */
1234   if (ref1 && ref2
1235       && (handled_component_p (ref1) || handled_component_p (ref2)))
1236     return aliasing_component_refs_p (ref1,
1237 				      ref1_alias_set, base1_alias_set,
1238 				      offset1, max_size1,
1239 				      ref2,
1240 				      ref2_alias_set, base2_alias_set,
1241 				      offset2, max_size2, true);
1242 
1243   return true;
1244 }
1245 
1246 /* Return true if two indirect references based on *PTR1
1247    and *PTR2 constrained to [OFFSET1, OFFSET1 + MAX_SIZE1) and
1248    [OFFSET2, OFFSET2 + MAX_SIZE2) may alias.  *PTR1 and *PTR2 have
1249    the alias sets BASE1_ALIAS_SET and BASE2_ALIAS_SET which can be -1
1250    in which case they are computed on-demand.  REF1 and REF2
1251    if non-NULL are the complete memory reference trees. */
1252 
1253 static bool
indirect_refs_may_alias_p(tree ref1 ATTRIBUTE_UNUSED,tree base1,poly_int64 offset1,poly_int64 max_size1,alias_set_type ref1_alias_set,alias_set_type base1_alias_set,tree ref2 ATTRIBUTE_UNUSED,tree base2,poly_int64 offset2,poly_int64 max_size2,alias_set_type ref2_alias_set,alias_set_type base2_alias_set,bool tbaa_p)1254 indirect_refs_may_alias_p (tree ref1 ATTRIBUTE_UNUSED, tree base1,
1255 			   poly_int64 offset1, poly_int64 max_size1,
1256 			   alias_set_type ref1_alias_set,
1257 			   alias_set_type base1_alias_set,
1258 			   tree ref2 ATTRIBUTE_UNUSED, tree base2,
1259 			   poly_int64 offset2, poly_int64 max_size2,
1260 			   alias_set_type ref2_alias_set,
1261 			   alias_set_type base2_alias_set, bool tbaa_p)
1262 {
1263   tree ptr1;
1264   tree ptr2;
1265   tree ptrtype1, ptrtype2;
1266 
1267   gcc_checking_assert ((TREE_CODE (base1) == MEM_REF
1268 			|| TREE_CODE (base1) == TARGET_MEM_REF)
1269 		       && (TREE_CODE (base2) == MEM_REF
1270 			   || TREE_CODE (base2) == TARGET_MEM_REF));
1271 
1272   ptr1 = TREE_OPERAND (base1, 0);
1273   ptr2 = TREE_OPERAND (base2, 0);
1274 
1275   /* If both bases are based on pointers they cannot alias if they may not
1276      point to the same memory object or if they point to the same object
1277      and the accesses do not overlap.  */
1278   if ((!cfun || gimple_in_ssa_p (cfun))
1279       && operand_equal_p (ptr1, ptr2, 0)
1280       && (((TREE_CODE (base1) != TARGET_MEM_REF
1281 	    || (!TMR_INDEX (base1) && !TMR_INDEX2 (base1)))
1282 	   && (TREE_CODE (base2) != TARGET_MEM_REF
1283 	       || (!TMR_INDEX (base2) && !TMR_INDEX2 (base2))))
1284 	  || (TREE_CODE (base1) == TARGET_MEM_REF
1285 	      && TREE_CODE (base2) == TARGET_MEM_REF
1286 	      && (TMR_STEP (base1) == TMR_STEP (base2)
1287 		  || (TMR_STEP (base1) && TMR_STEP (base2)
1288 		      && operand_equal_p (TMR_STEP (base1),
1289 					  TMR_STEP (base2), 0)))
1290 	      && (TMR_INDEX (base1) == TMR_INDEX (base2)
1291 		  || (TMR_INDEX (base1) && TMR_INDEX (base2)
1292 		      && operand_equal_p (TMR_INDEX (base1),
1293 					  TMR_INDEX (base2), 0)))
1294 	      && (TMR_INDEX2 (base1) == TMR_INDEX2 (base2)
1295 		  || (TMR_INDEX2 (base1) && TMR_INDEX2 (base2)
1296 		      && operand_equal_p (TMR_INDEX2 (base1),
1297 					  TMR_INDEX2 (base2), 0))))))
1298     {
1299       poly_offset_int moff1 = mem_ref_offset (base1) << LOG2_BITS_PER_UNIT;
1300       poly_offset_int moff2 = mem_ref_offset (base2) << LOG2_BITS_PER_UNIT;
1301       return ranges_maybe_overlap_p (offset1 + moff1, max_size1,
1302 				     offset2 + moff2, max_size2);
1303     }
1304   if (!ptr_derefs_may_alias_p (ptr1, ptr2))
1305     return false;
1306 
1307   /* Disambiguations that rely on strict aliasing rules follow.  */
1308   if (!flag_strict_aliasing || !tbaa_p)
1309     return true;
1310 
1311   ptrtype1 = TREE_TYPE (TREE_OPERAND (base1, 1));
1312   ptrtype2 = TREE_TYPE (TREE_OPERAND (base2, 1));
1313 
1314   /* If the alias set for a pointer access is zero all bets are off.  */
1315   if (base1_alias_set == 0
1316       || base2_alias_set == 0)
1317     return true;
1318 
1319   /* If both references are through the same type, they do not alias
1320      if the accesses do not overlap.  This does extra disambiguation
1321      for mixed/pointer accesses but requires strict aliasing.  */
1322   if ((TREE_CODE (base1) != TARGET_MEM_REF
1323        || (!TMR_INDEX (base1) && !TMR_INDEX2 (base1)))
1324       && (TREE_CODE (base2) != TARGET_MEM_REF
1325 	  || (!TMR_INDEX (base2) && !TMR_INDEX2 (base2)))
1326       && same_type_for_tbaa (TREE_TYPE (base1), TREE_TYPE (ptrtype1)) == 1
1327       && same_type_for_tbaa (TREE_TYPE (base2), TREE_TYPE (ptrtype2)) == 1
1328       && same_type_for_tbaa (TREE_TYPE (ptrtype1),
1329 			     TREE_TYPE (ptrtype2)) == 1
1330       /* But avoid treating arrays as "objects", instead assume they
1331          can overlap by an exact multiple of their element size.  */
1332       && TREE_CODE (TREE_TYPE (ptrtype1)) != ARRAY_TYPE)
1333     return ranges_maybe_overlap_p (offset1, max_size1, offset2, max_size2);
1334 
1335   /* Do type-based disambiguation.  */
1336   if (base1_alias_set != base2_alias_set
1337       && !alias_sets_conflict_p (base1_alias_set, base2_alias_set))
1338     return false;
1339 
1340   /* If either reference is view-converted, give up now.  */
1341   if (same_type_for_tbaa (TREE_TYPE (base1), TREE_TYPE (ptrtype1)) != 1
1342       || same_type_for_tbaa (TREE_TYPE (base2), TREE_TYPE (ptrtype2)) != 1)
1343     return true;
1344 
1345   if (ref1 && ref2
1346       && nonoverlapping_component_refs_p (ref1, ref2))
1347     return false;
1348 
1349   /* Do access-path based disambiguation.  */
1350   if (ref1 && ref2
1351       && (handled_component_p (ref1) || handled_component_p (ref2)))
1352     return aliasing_component_refs_p (ref1,
1353 				      ref1_alias_set, base1_alias_set,
1354 				      offset1, max_size1,
1355 				      ref2,
1356 				      ref2_alias_set, base2_alias_set,
1357 				      offset2, max_size2, false);
1358 
1359   return true;
1360 }
1361 
1362 /* Return true, if the two memory references REF1 and REF2 may alias.  */
1363 
1364 bool
refs_may_alias_p_1(ao_ref * ref1,ao_ref * ref2,bool tbaa_p)1365 refs_may_alias_p_1 (ao_ref *ref1, ao_ref *ref2, bool tbaa_p)
1366 {
1367   tree base1, base2;
1368   poly_int64 offset1 = 0, offset2 = 0;
1369   poly_int64 max_size1 = -1, max_size2 = -1;
1370   bool var1_p, var2_p, ind1_p, ind2_p;
1371 
1372   gcc_checking_assert ((!ref1->ref
1373 			|| TREE_CODE (ref1->ref) == SSA_NAME
1374 			|| DECL_P (ref1->ref)
1375 			|| TREE_CODE (ref1->ref) == STRING_CST
1376 			|| handled_component_p (ref1->ref)
1377 			|| TREE_CODE (ref1->ref) == MEM_REF
1378 			|| TREE_CODE (ref1->ref) == TARGET_MEM_REF)
1379 		       && (!ref2->ref
1380 			   || TREE_CODE (ref2->ref) == SSA_NAME
1381 			   || DECL_P (ref2->ref)
1382 			   || TREE_CODE (ref2->ref) == STRING_CST
1383 			   || handled_component_p (ref2->ref)
1384 			   || TREE_CODE (ref2->ref) == MEM_REF
1385 			   || TREE_CODE (ref2->ref) == TARGET_MEM_REF));
1386 
1387   /* Decompose the references into their base objects and the access.  */
1388   base1 = ao_ref_base (ref1);
1389   offset1 = ref1->offset;
1390   max_size1 = ref1->max_size;
1391   base2 = ao_ref_base (ref2);
1392   offset2 = ref2->offset;
1393   max_size2 = ref2->max_size;
1394 
1395   /* We can end up with registers or constants as bases for example from
1396      *D.1663_44 = VIEW_CONVERT_EXPR<struct DB_LSN>(__tmp$B0F64_59);
1397      which is seen as a struct copy.  */
1398   if (TREE_CODE (base1) == SSA_NAME
1399       || TREE_CODE (base1) == CONST_DECL
1400       || TREE_CODE (base1) == CONSTRUCTOR
1401       || TREE_CODE (base1) == ADDR_EXPR
1402       || CONSTANT_CLASS_P (base1)
1403       || TREE_CODE (base2) == SSA_NAME
1404       || TREE_CODE (base2) == CONST_DECL
1405       || TREE_CODE (base2) == CONSTRUCTOR
1406       || TREE_CODE (base2) == ADDR_EXPR
1407       || CONSTANT_CLASS_P (base2))
1408     return false;
1409 
1410   /* We can end up referring to code via function and label decls.
1411      As we likely do not properly track code aliases conservatively
1412      bail out.  */
1413   if (TREE_CODE (base1) == FUNCTION_DECL
1414       || TREE_CODE (base1) == LABEL_DECL
1415       || TREE_CODE (base2) == FUNCTION_DECL
1416       || TREE_CODE (base2) == LABEL_DECL)
1417     return true;
1418 
1419   /* Two volatile accesses always conflict.  */
1420   if (ref1->volatile_p
1421       && ref2->volatile_p)
1422     return true;
1423 
1424   /* Defer to simple offset based disambiguation if we have
1425      references based on two decls.  Do this before defering to
1426      TBAA to handle must-alias cases in conformance with the
1427      GCC extension of allowing type-punning through unions.  */
1428   var1_p = DECL_P (base1);
1429   var2_p = DECL_P (base2);
1430   if (var1_p && var2_p)
1431     return decl_refs_may_alias_p (ref1->ref, base1, offset1, max_size1,
1432 				  ref2->ref, base2, offset2, max_size2);
1433 
1434   /* Handle restrict based accesses.
1435      ???  ao_ref_base strips inner MEM_REF [&decl], recover from that
1436      here.  */
1437   tree rbase1 = base1;
1438   tree rbase2 = base2;
1439   if (var1_p)
1440     {
1441       rbase1 = ref1->ref;
1442       if (rbase1)
1443 	while (handled_component_p (rbase1))
1444 	  rbase1 = TREE_OPERAND (rbase1, 0);
1445     }
1446   if (var2_p)
1447     {
1448       rbase2 = ref2->ref;
1449       if (rbase2)
1450 	while (handled_component_p (rbase2))
1451 	  rbase2 = TREE_OPERAND (rbase2, 0);
1452     }
1453   if (rbase1 && rbase2
1454       && (TREE_CODE (base1) == MEM_REF || TREE_CODE (base1) == TARGET_MEM_REF)
1455       && (TREE_CODE (base2) == MEM_REF || TREE_CODE (base2) == TARGET_MEM_REF)
1456       /* If the accesses are in the same restrict clique... */
1457       && MR_DEPENDENCE_CLIQUE (base1) == MR_DEPENDENCE_CLIQUE (base2)
1458       /* But based on different pointers they do not alias.  */
1459       && MR_DEPENDENCE_BASE (base1) != MR_DEPENDENCE_BASE (base2))
1460     return false;
1461 
1462   ind1_p = (TREE_CODE (base1) == MEM_REF
1463 	    || TREE_CODE (base1) == TARGET_MEM_REF);
1464   ind2_p = (TREE_CODE (base2) == MEM_REF
1465 	    || TREE_CODE (base2) == TARGET_MEM_REF);
1466 
1467   /* Canonicalize the pointer-vs-decl case.  */
1468   if (ind1_p && var2_p)
1469     {
1470       std::swap (offset1, offset2);
1471       std::swap (max_size1, max_size2);
1472       std::swap (base1, base2);
1473       std::swap (ref1, ref2);
1474       var1_p = true;
1475       ind1_p = false;
1476       var2_p = false;
1477       ind2_p = true;
1478     }
1479 
1480   /* First defer to TBAA if possible.  */
1481   if (tbaa_p
1482       && flag_strict_aliasing
1483       && !alias_sets_conflict_p (ao_ref_alias_set (ref1),
1484 				 ao_ref_alias_set (ref2)))
1485     return false;
1486 
1487   /* If the reference is based on a pointer that points to memory
1488      that may not be written to then the other reference cannot possibly
1489      clobber it.  */
1490   if ((TREE_CODE (TREE_OPERAND (base2, 0)) == SSA_NAME
1491        && SSA_NAME_POINTS_TO_READONLY_MEMORY (TREE_OPERAND (base2, 0)))
1492       || (ind1_p
1493 	  && TREE_CODE (TREE_OPERAND (base1, 0)) == SSA_NAME
1494 	  && SSA_NAME_POINTS_TO_READONLY_MEMORY (TREE_OPERAND (base1, 0))))
1495     return false;
1496 
1497   /* Dispatch to the pointer-vs-decl or pointer-vs-pointer disambiguators.  */
1498   if (var1_p && ind2_p)
1499     return indirect_ref_may_alias_decl_p (ref2->ref, base2,
1500 					  offset2, max_size2,
1501 					  ao_ref_alias_set (ref2),
1502 					  ao_ref_base_alias_set (ref2),
1503 					  ref1->ref, base1,
1504 					  offset1, max_size1,
1505 					  ao_ref_alias_set (ref1),
1506 					  ao_ref_base_alias_set (ref1),
1507 					  tbaa_p);
1508   else if (ind1_p && ind2_p)
1509     return indirect_refs_may_alias_p (ref1->ref, base1,
1510 				      offset1, max_size1,
1511 				      ao_ref_alias_set (ref1),
1512 				      ao_ref_base_alias_set (ref1),
1513 				      ref2->ref, base2,
1514 				      offset2, max_size2,
1515 				      ao_ref_alias_set (ref2),
1516 				      ao_ref_base_alias_set (ref2),
1517 				      tbaa_p);
1518 
1519   gcc_unreachable ();
1520 }
1521 
1522 static bool
refs_may_alias_p(tree ref1,ao_ref * ref2,bool tbaa_p)1523 refs_may_alias_p (tree ref1, ao_ref *ref2, bool tbaa_p)
1524 {
1525   ao_ref r1;
1526   ao_ref_init (&r1, ref1);
1527   return refs_may_alias_p_1 (&r1, ref2, tbaa_p);
1528 }
1529 
1530 bool
refs_may_alias_p(tree ref1,tree ref2,bool tbaa_p)1531 refs_may_alias_p (tree ref1, tree ref2, bool tbaa_p)
1532 {
1533   ao_ref r1, r2;
1534   bool res;
1535   ao_ref_init (&r1, ref1);
1536   ao_ref_init (&r2, ref2);
1537   res = refs_may_alias_p_1 (&r1, &r2, tbaa_p);
1538   if (res)
1539     ++alias_stats.refs_may_alias_p_may_alias;
1540   else
1541     ++alias_stats.refs_may_alias_p_no_alias;
1542   return res;
1543 }
1544 
1545 /* Returns true if there is a anti-dependence for the STORE that
1546    executes after the LOAD.  */
1547 
1548 bool
refs_anti_dependent_p(tree load,tree store)1549 refs_anti_dependent_p (tree load, tree store)
1550 {
1551   ao_ref r1, r2;
1552   ao_ref_init (&r1, load);
1553   ao_ref_init (&r2, store);
1554   return refs_may_alias_p_1 (&r1, &r2, false);
1555 }
1556 
1557 /* Returns true if there is a output dependence for the stores
1558    STORE1 and STORE2.  */
1559 
1560 bool
refs_output_dependent_p(tree store1,tree store2)1561 refs_output_dependent_p (tree store1, tree store2)
1562 {
1563   ao_ref r1, r2;
1564   ao_ref_init (&r1, store1);
1565   ao_ref_init (&r2, store2);
1566   return refs_may_alias_p_1 (&r1, &r2, false);
1567 }
1568 
1569 /* If the call CALL may use the memory reference REF return true,
1570    otherwise return false.  */
1571 
1572 static bool
ref_maybe_used_by_call_p_1(gcall * call,ao_ref * ref,bool tbaa_p)1573 ref_maybe_used_by_call_p_1 (gcall *call, ao_ref *ref, bool tbaa_p)
1574 {
1575   tree base, callee;
1576   unsigned i;
1577   int flags = gimple_call_flags (call);
1578 
1579   /* Const functions without a static chain do not implicitly use memory.  */
1580   if (!gimple_call_chain (call)
1581       && (flags & (ECF_CONST|ECF_NOVOPS)))
1582     goto process_args;
1583 
1584   base = ao_ref_base (ref);
1585   if (!base)
1586     return true;
1587 
1588   /* A call that is not without side-effects might involve volatile
1589      accesses and thus conflicts with all other volatile accesses.  */
1590   if (ref->volatile_p)
1591     return true;
1592 
1593   /* If the reference is based on a decl that is not aliased the call
1594      cannot possibly use it.  */
1595   if (DECL_P (base)
1596       && !may_be_aliased (base)
1597       /* But local statics can be used through recursion.  */
1598       && !is_global_var (base))
1599     goto process_args;
1600 
1601   callee = gimple_call_fndecl (call);
1602 
1603   /* Handle those builtin functions explicitly that do not act as
1604      escape points.  See tree-ssa-structalias.c:find_func_aliases
1605      for the list of builtins we might need to handle here.  */
1606   if (callee != NULL_TREE
1607       && gimple_call_builtin_p (call, BUILT_IN_NORMAL))
1608     switch (DECL_FUNCTION_CODE (callee))
1609       {
1610 	/* All the following functions read memory pointed to by
1611 	   their second argument.  strcat/strncat additionally
1612 	   reads memory pointed to by the first argument.  */
1613 	case BUILT_IN_STRCAT:
1614 	case BUILT_IN_STRNCAT:
1615 	  {
1616 	    ao_ref dref;
1617 	    ao_ref_init_from_ptr_and_size (&dref,
1618 					   gimple_call_arg (call, 0),
1619 					   NULL_TREE);
1620 	    if (refs_may_alias_p_1 (&dref, ref, false))
1621 	      return true;
1622 	  }
1623 	  /* FALLTHRU */
1624 	case BUILT_IN_STRCPY:
1625 	case BUILT_IN_STRNCPY:
1626 	case BUILT_IN_MEMCPY:
1627 	case BUILT_IN_MEMMOVE:
1628 	case BUILT_IN_MEMPCPY:
1629 	case BUILT_IN_STPCPY:
1630 	case BUILT_IN_STPNCPY:
1631 	case BUILT_IN_TM_MEMCPY:
1632 	case BUILT_IN_TM_MEMMOVE:
1633 	  {
1634 	    ao_ref dref;
1635 	    tree size = NULL_TREE;
1636 	    if (gimple_call_num_args (call) == 3)
1637 	      size = gimple_call_arg (call, 2);
1638 	    ao_ref_init_from_ptr_and_size (&dref,
1639 					   gimple_call_arg (call, 1),
1640 					   size);
1641 	    return refs_may_alias_p_1 (&dref, ref, false);
1642 	  }
1643 	case BUILT_IN_STRCAT_CHK:
1644 	case BUILT_IN_STRNCAT_CHK:
1645 	  {
1646 	    ao_ref dref;
1647 	    ao_ref_init_from_ptr_and_size (&dref,
1648 					   gimple_call_arg (call, 0),
1649 					   NULL_TREE);
1650 	    if (refs_may_alias_p_1 (&dref, ref, false))
1651 	      return true;
1652 	  }
1653 	  /* FALLTHRU */
1654 	case BUILT_IN_STRCPY_CHK:
1655 	case BUILT_IN_STRNCPY_CHK:
1656 	case BUILT_IN_MEMCPY_CHK:
1657 	case BUILT_IN_MEMMOVE_CHK:
1658 	case BUILT_IN_MEMPCPY_CHK:
1659 	case BUILT_IN_STPCPY_CHK:
1660 	case BUILT_IN_STPNCPY_CHK:
1661 	  {
1662 	    ao_ref dref;
1663 	    tree size = NULL_TREE;
1664 	    if (gimple_call_num_args (call) == 4)
1665 	      size = gimple_call_arg (call, 2);
1666 	    ao_ref_init_from_ptr_and_size (&dref,
1667 					   gimple_call_arg (call, 1),
1668 					   size);
1669 	    return refs_may_alias_p_1 (&dref, ref, false);
1670 	  }
1671 	case BUILT_IN_BCOPY:
1672 	  {
1673 	    ao_ref dref;
1674 	    tree size = gimple_call_arg (call, 2);
1675 	    ao_ref_init_from_ptr_and_size (&dref,
1676 					   gimple_call_arg (call, 0),
1677 					   size);
1678 	    return refs_may_alias_p_1 (&dref, ref, false);
1679 	  }
1680 
1681 	/* The following functions read memory pointed to by their
1682 	   first argument.  */
1683 	CASE_BUILT_IN_TM_LOAD (1):
1684 	CASE_BUILT_IN_TM_LOAD (2):
1685 	CASE_BUILT_IN_TM_LOAD (4):
1686 	CASE_BUILT_IN_TM_LOAD (8):
1687 	CASE_BUILT_IN_TM_LOAD (FLOAT):
1688 	CASE_BUILT_IN_TM_LOAD (DOUBLE):
1689 	CASE_BUILT_IN_TM_LOAD (LDOUBLE):
1690 	CASE_BUILT_IN_TM_LOAD (M64):
1691 	CASE_BUILT_IN_TM_LOAD (M128):
1692 	CASE_BUILT_IN_TM_LOAD (M256):
1693 	case BUILT_IN_TM_LOG:
1694 	case BUILT_IN_TM_LOG_1:
1695 	case BUILT_IN_TM_LOG_2:
1696 	case BUILT_IN_TM_LOG_4:
1697 	case BUILT_IN_TM_LOG_8:
1698 	case BUILT_IN_TM_LOG_FLOAT:
1699 	case BUILT_IN_TM_LOG_DOUBLE:
1700 	case BUILT_IN_TM_LOG_LDOUBLE:
1701 	case BUILT_IN_TM_LOG_M64:
1702 	case BUILT_IN_TM_LOG_M128:
1703 	case BUILT_IN_TM_LOG_M256:
1704 	  return ptr_deref_may_alias_ref_p_1 (gimple_call_arg (call, 0), ref);
1705 
1706 	/* These read memory pointed to by the first argument.  */
1707 	case BUILT_IN_STRDUP:
1708 	case BUILT_IN_STRNDUP:
1709 	case BUILT_IN_REALLOC:
1710 	  {
1711 	    ao_ref dref;
1712 	    tree size = NULL_TREE;
1713 	    if (gimple_call_num_args (call) == 2)
1714 	      size = gimple_call_arg (call, 1);
1715 	    ao_ref_init_from_ptr_and_size (&dref,
1716 					   gimple_call_arg (call, 0),
1717 					   size);
1718 	    return refs_may_alias_p_1 (&dref, ref, false);
1719 	  }
1720 	/* These read memory pointed to by the first argument.  */
1721 	case BUILT_IN_INDEX:
1722 	case BUILT_IN_STRCHR:
1723 	case BUILT_IN_STRRCHR:
1724 	  {
1725 	    ao_ref dref;
1726 	    ao_ref_init_from_ptr_and_size (&dref,
1727 					   gimple_call_arg (call, 0),
1728 					   NULL_TREE);
1729 	    return refs_may_alias_p_1 (&dref, ref, false);
1730 	  }
1731 	/* These read memory pointed to by the first argument with size
1732 	   in the third argument.  */
1733 	case BUILT_IN_MEMCHR:
1734 	  {
1735 	    ao_ref dref;
1736 	    ao_ref_init_from_ptr_and_size (&dref,
1737 					   gimple_call_arg (call, 0),
1738 					   gimple_call_arg (call, 2));
1739 	    return refs_may_alias_p_1 (&dref, ref, false);
1740 	  }
1741 	/* These read memory pointed to by the first and second arguments.  */
1742 	case BUILT_IN_STRSTR:
1743 	case BUILT_IN_STRPBRK:
1744 	  {
1745 	    ao_ref dref;
1746 	    ao_ref_init_from_ptr_and_size (&dref,
1747 					   gimple_call_arg (call, 0),
1748 					   NULL_TREE);
1749 	    if (refs_may_alias_p_1 (&dref, ref, false))
1750 	      return true;
1751 	    ao_ref_init_from_ptr_and_size (&dref,
1752 					   gimple_call_arg (call, 1),
1753 					   NULL_TREE);
1754 	    return refs_may_alias_p_1 (&dref, ref, false);
1755 	  }
1756 
1757 	/* The following builtins do not read from memory.  */
1758 	case BUILT_IN_FREE:
1759 	case BUILT_IN_MALLOC:
1760 	case BUILT_IN_POSIX_MEMALIGN:
1761 	case BUILT_IN_ALIGNED_ALLOC:
1762 	case BUILT_IN_CALLOC:
1763 	CASE_BUILT_IN_ALLOCA:
1764 	case BUILT_IN_STACK_SAVE:
1765 	case BUILT_IN_STACK_RESTORE:
1766 	case BUILT_IN_MEMSET:
1767 	case BUILT_IN_TM_MEMSET:
1768 	case BUILT_IN_MEMSET_CHK:
1769 	case BUILT_IN_FREXP:
1770 	case BUILT_IN_FREXPF:
1771 	case BUILT_IN_FREXPL:
1772 	case BUILT_IN_GAMMA_R:
1773 	case BUILT_IN_GAMMAF_R:
1774 	case BUILT_IN_GAMMAL_R:
1775 	case BUILT_IN_LGAMMA_R:
1776 	case BUILT_IN_LGAMMAF_R:
1777 	case BUILT_IN_LGAMMAL_R:
1778 	case BUILT_IN_MODF:
1779 	case BUILT_IN_MODFF:
1780 	case BUILT_IN_MODFL:
1781 	case BUILT_IN_REMQUO:
1782 	case BUILT_IN_REMQUOF:
1783 	case BUILT_IN_REMQUOL:
1784 	case BUILT_IN_SINCOS:
1785 	case BUILT_IN_SINCOSF:
1786 	case BUILT_IN_SINCOSL:
1787 	case BUILT_IN_ASSUME_ALIGNED:
1788 	case BUILT_IN_VA_END:
1789 	  return false;
1790 	/* __sync_* builtins and some OpenMP builtins act as threading
1791 	   barriers.  */
1792 #undef DEF_SYNC_BUILTIN
1793 #define DEF_SYNC_BUILTIN(ENUM, NAME, TYPE, ATTRS) case ENUM:
1794 #include "sync-builtins.def"
1795 #undef DEF_SYNC_BUILTIN
1796 	case BUILT_IN_GOMP_ATOMIC_START:
1797 	case BUILT_IN_GOMP_ATOMIC_END:
1798 	case BUILT_IN_GOMP_BARRIER:
1799 	case BUILT_IN_GOMP_BARRIER_CANCEL:
1800 	case BUILT_IN_GOMP_TASKWAIT:
1801 	case BUILT_IN_GOMP_TASKGROUP_END:
1802 	case BUILT_IN_GOMP_CRITICAL_START:
1803 	case BUILT_IN_GOMP_CRITICAL_END:
1804 	case BUILT_IN_GOMP_CRITICAL_NAME_START:
1805 	case BUILT_IN_GOMP_CRITICAL_NAME_END:
1806 	case BUILT_IN_GOMP_LOOP_END:
1807 	case BUILT_IN_GOMP_LOOP_END_CANCEL:
1808 	case BUILT_IN_GOMP_ORDERED_START:
1809 	case BUILT_IN_GOMP_ORDERED_END:
1810 	case BUILT_IN_GOMP_SECTIONS_END:
1811 	case BUILT_IN_GOMP_SECTIONS_END_CANCEL:
1812 	case BUILT_IN_GOMP_SINGLE_COPY_START:
1813 	case BUILT_IN_GOMP_SINGLE_COPY_END:
1814 	  return true;
1815 
1816 	default:
1817 	  /* Fallthru to general call handling.  */;
1818       }
1819 
1820   /* Check if base is a global static variable that is not read
1821      by the function.  */
1822   if (callee != NULL_TREE && VAR_P (base) && TREE_STATIC (base))
1823     {
1824       struct cgraph_node *node = cgraph_node::get (callee);
1825       bitmap not_read;
1826 
1827       /* FIXME: Callee can be an OMP builtin that does not have a call graph
1828 	 node yet.  We should enforce that there are nodes for all decls in the
1829 	 IL and remove this check instead.  */
1830       if (node
1831 	  && (not_read = ipa_reference_get_not_read_global (node))
1832 	  && bitmap_bit_p (not_read, ipa_reference_var_uid (base)))
1833 	goto process_args;
1834     }
1835 
1836   /* Check if the base variable is call-used.  */
1837   if (DECL_P (base))
1838     {
1839       if (pt_solution_includes (gimple_call_use_set (call), base))
1840 	return true;
1841     }
1842   else if ((TREE_CODE (base) == MEM_REF
1843 	    || TREE_CODE (base) == TARGET_MEM_REF)
1844 	   && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
1845     {
1846       struct ptr_info_def *pi = SSA_NAME_PTR_INFO (TREE_OPERAND (base, 0));
1847       if (!pi)
1848 	return true;
1849 
1850       if (pt_solutions_intersect (gimple_call_use_set (call), &pi->pt))
1851 	return true;
1852     }
1853   else
1854     return true;
1855 
1856   /* Inspect call arguments for passed-by-value aliases.  */
1857 process_args:
1858   for (i = 0; i < gimple_call_num_args (call); ++i)
1859     {
1860       tree op = gimple_call_arg (call, i);
1861       int flags = gimple_call_arg_flags (call, i);
1862 
1863       if (flags & EAF_UNUSED)
1864 	continue;
1865 
1866       if (TREE_CODE (op) == WITH_SIZE_EXPR)
1867 	op = TREE_OPERAND (op, 0);
1868 
1869       if (TREE_CODE (op) != SSA_NAME
1870 	  && !is_gimple_min_invariant (op))
1871 	{
1872 	  ao_ref r;
1873 	  ao_ref_init (&r, op);
1874 	  if (refs_may_alias_p_1 (&r, ref, tbaa_p))
1875 	    return true;
1876 	}
1877     }
1878 
1879   return false;
1880 }
1881 
1882 static bool
ref_maybe_used_by_call_p(gcall * call,ao_ref * ref,bool tbaa_p)1883 ref_maybe_used_by_call_p (gcall *call, ao_ref *ref, bool tbaa_p)
1884 {
1885   bool res;
1886   res = ref_maybe_used_by_call_p_1 (call, ref, tbaa_p);
1887   if (res)
1888     ++alias_stats.ref_maybe_used_by_call_p_may_alias;
1889   else
1890     ++alias_stats.ref_maybe_used_by_call_p_no_alias;
1891   return res;
1892 }
1893 
1894 
1895 /* If the statement STMT may use the memory reference REF return
1896    true, otherwise return false.  */
1897 
1898 bool
ref_maybe_used_by_stmt_p(gimple * stmt,ao_ref * ref,bool tbaa_p)1899 ref_maybe_used_by_stmt_p (gimple *stmt, ao_ref *ref, bool tbaa_p)
1900 {
1901   if (is_gimple_assign (stmt))
1902     {
1903       tree rhs;
1904 
1905       /* All memory assign statements are single.  */
1906       if (!gimple_assign_single_p (stmt))
1907 	return false;
1908 
1909       rhs = gimple_assign_rhs1 (stmt);
1910       if (is_gimple_reg (rhs)
1911 	  || is_gimple_min_invariant (rhs)
1912 	  || gimple_assign_rhs_code (stmt) == CONSTRUCTOR)
1913 	return false;
1914 
1915       return refs_may_alias_p (rhs, ref, tbaa_p);
1916     }
1917   else if (is_gimple_call (stmt))
1918     return ref_maybe_used_by_call_p (as_a <gcall *> (stmt), ref, tbaa_p);
1919   else if (greturn *return_stmt = dyn_cast <greturn *> (stmt))
1920     {
1921       tree retval = gimple_return_retval (return_stmt);
1922       if (retval
1923 	  && TREE_CODE (retval) != SSA_NAME
1924 	  && !is_gimple_min_invariant (retval)
1925 	  && refs_may_alias_p (retval, ref, tbaa_p))
1926 	return true;
1927       /* If ref escapes the function then the return acts as a use.  */
1928       tree base = ao_ref_base (ref);
1929       if (!base)
1930 	;
1931       else if (DECL_P (base))
1932 	return is_global_var (base);
1933       else if (TREE_CODE (base) == MEM_REF
1934 	       || TREE_CODE (base) == TARGET_MEM_REF)
1935 	return ptr_deref_may_alias_global_p (TREE_OPERAND (base, 0));
1936       return false;
1937     }
1938 
1939   return true;
1940 }
1941 
1942 bool
ref_maybe_used_by_stmt_p(gimple * stmt,tree ref,bool tbaa_p)1943 ref_maybe_used_by_stmt_p (gimple *stmt, tree ref, bool tbaa_p)
1944 {
1945   ao_ref r;
1946   ao_ref_init (&r, ref);
1947   return ref_maybe_used_by_stmt_p (stmt, &r, tbaa_p);
1948 }
1949 
1950 /* If the call in statement CALL may clobber the memory reference REF
1951    return true, otherwise return false.  */
1952 
1953 bool
call_may_clobber_ref_p_1(gcall * call,ao_ref * ref)1954 call_may_clobber_ref_p_1 (gcall *call, ao_ref *ref)
1955 {
1956   tree base;
1957   tree callee;
1958 
1959   /* If the call is pure or const it cannot clobber anything.  */
1960   if (gimple_call_flags (call)
1961       & (ECF_PURE|ECF_CONST|ECF_LOOPING_CONST_OR_PURE|ECF_NOVOPS))
1962     return false;
1963   if (gimple_call_internal_p (call))
1964     switch (gimple_call_internal_fn (call))
1965       {
1966 	/* Treat these internal calls like ECF_PURE for aliasing,
1967 	   they don't write to any memory the program should care about.
1968 	   They have important other side-effects, and read memory,
1969 	   so can't be ECF_NOVOPS.  */
1970       case IFN_UBSAN_NULL:
1971       case IFN_UBSAN_BOUNDS:
1972       case IFN_UBSAN_VPTR:
1973       case IFN_UBSAN_OBJECT_SIZE:
1974       case IFN_UBSAN_PTR:
1975       case IFN_ASAN_CHECK:
1976 	return false;
1977       default:
1978 	break;
1979       }
1980 
1981   base = ao_ref_base (ref);
1982   if (!base)
1983     return true;
1984 
1985   if (TREE_CODE (base) == SSA_NAME
1986       || CONSTANT_CLASS_P (base))
1987     return false;
1988 
1989   /* A call that is not without side-effects might involve volatile
1990      accesses and thus conflicts with all other volatile accesses.  */
1991   if (ref->volatile_p)
1992     return true;
1993 
1994   /* If the reference is based on a decl that is not aliased the call
1995      cannot possibly clobber it.  */
1996   if (DECL_P (base)
1997       && !may_be_aliased (base)
1998       /* But local non-readonly statics can be modified through recursion
1999          or the call may implement a threading barrier which we must
2000 	 treat as may-def.  */
2001       && (TREE_READONLY (base)
2002 	  || !is_global_var (base)))
2003     return false;
2004 
2005   /* If the reference is based on a pointer that points to memory
2006      that may not be written to then the call cannot possibly clobber it.  */
2007   if ((TREE_CODE (base) == MEM_REF
2008        || TREE_CODE (base) == TARGET_MEM_REF)
2009       && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME
2010       && SSA_NAME_POINTS_TO_READONLY_MEMORY (TREE_OPERAND (base, 0)))
2011     return false;
2012 
2013   callee = gimple_call_fndecl (call);
2014 
2015   /* Handle those builtin functions explicitly that do not act as
2016      escape points.  See tree-ssa-structalias.c:find_func_aliases
2017      for the list of builtins we might need to handle here.  */
2018   if (callee != NULL_TREE
2019       && gimple_call_builtin_p (call, BUILT_IN_NORMAL))
2020     switch (DECL_FUNCTION_CODE (callee))
2021       {
2022 	/* All the following functions clobber memory pointed to by
2023 	   their first argument.  */
2024 	case BUILT_IN_STRCPY:
2025 	case BUILT_IN_STRNCPY:
2026 	case BUILT_IN_MEMCPY:
2027 	case BUILT_IN_MEMMOVE:
2028 	case BUILT_IN_MEMPCPY:
2029 	case BUILT_IN_STPCPY:
2030 	case BUILT_IN_STPNCPY:
2031 	case BUILT_IN_STRCAT:
2032 	case BUILT_IN_STRNCAT:
2033 	case BUILT_IN_MEMSET:
2034 	case BUILT_IN_TM_MEMSET:
2035 	CASE_BUILT_IN_TM_STORE (1):
2036 	CASE_BUILT_IN_TM_STORE (2):
2037 	CASE_BUILT_IN_TM_STORE (4):
2038 	CASE_BUILT_IN_TM_STORE (8):
2039 	CASE_BUILT_IN_TM_STORE (FLOAT):
2040 	CASE_BUILT_IN_TM_STORE (DOUBLE):
2041 	CASE_BUILT_IN_TM_STORE (LDOUBLE):
2042 	CASE_BUILT_IN_TM_STORE (M64):
2043 	CASE_BUILT_IN_TM_STORE (M128):
2044 	CASE_BUILT_IN_TM_STORE (M256):
2045 	case BUILT_IN_TM_MEMCPY:
2046 	case BUILT_IN_TM_MEMMOVE:
2047 	  {
2048 	    ao_ref dref;
2049 	    tree size = NULL_TREE;
2050 	    /* Don't pass in size for strncat, as the maximum size
2051 	       is strlen (dest) + n + 1 instead of n, resp.
2052 	       n + 1 at dest + strlen (dest), but strlen (dest) isn't
2053 	       known.  */
2054 	    if (gimple_call_num_args (call) == 3
2055 		&& DECL_FUNCTION_CODE (callee) != BUILT_IN_STRNCAT)
2056 	      size = gimple_call_arg (call, 2);
2057 	    ao_ref_init_from_ptr_and_size (&dref,
2058 					   gimple_call_arg (call, 0),
2059 					   size);
2060 	    return refs_may_alias_p_1 (&dref, ref, false);
2061 	  }
2062 	case BUILT_IN_STRCPY_CHK:
2063 	case BUILT_IN_STRNCPY_CHK:
2064 	case BUILT_IN_MEMCPY_CHK:
2065 	case BUILT_IN_MEMMOVE_CHK:
2066 	case BUILT_IN_MEMPCPY_CHK:
2067 	case BUILT_IN_STPCPY_CHK:
2068 	case BUILT_IN_STPNCPY_CHK:
2069 	case BUILT_IN_STRCAT_CHK:
2070 	case BUILT_IN_STRNCAT_CHK:
2071 	case BUILT_IN_MEMSET_CHK:
2072 	  {
2073 	    ao_ref dref;
2074 	    tree size = NULL_TREE;
2075 	    /* Don't pass in size for __strncat_chk, as the maximum size
2076 	       is strlen (dest) + n + 1 instead of n, resp.
2077 	       n + 1 at dest + strlen (dest), but strlen (dest) isn't
2078 	       known.  */
2079 	    if (gimple_call_num_args (call) == 4
2080 		&& DECL_FUNCTION_CODE (callee) != BUILT_IN_STRNCAT_CHK)
2081 	      size = gimple_call_arg (call, 2);
2082 	    ao_ref_init_from_ptr_and_size (&dref,
2083 					   gimple_call_arg (call, 0),
2084 					   size);
2085 	    return refs_may_alias_p_1 (&dref, ref, false);
2086 	  }
2087 	case BUILT_IN_BCOPY:
2088 	  {
2089 	    ao_ref dref;
2090 	    tree size = gimple_call_arg (call, 2);
2091 	    ao_ref_init_from_ptr_and_size (&dref,
2092 					   gimple_call_arg (call, 1),
2093 					   size);
2094 	    return refs_may_alias_p_1 (&dref, ref, false);
2095 	  }
2096 	/* Allocating memory does not have any side-effects apart from
2097 	   being the definition point for the pointer.  */
2098 	case BUILT_IN_MALLOC:
2099 	case BUILT_IN_ALIGNED_ALLOC:
2100 	case BUILT_IN_CALLOC:
2101 	case BUILT_IN_STRDUP:
2102 	case BUILT_IN_STRNDUP:
2103 	  /* Unix98 specifies that errno is set on allocation failure.  */
2104 	  if (flag_errno_math
2105 	      && targetm.ref_may_alias_errno (ref))
2106 	    return true;
2107 	  return false;
2108 	case BUILT_IN_STACK_SAVE:
2109 	CASE_BUILT_IN_ALLOCA:
2110 	case BUILT_IN_ASSUME_ALIGNED:
2111 	  return false;
2112 	/* But posix_memalign stores a pointer into the memory pointed to
2113 	   by its first argument.  */
2114 	case BUILT_IN_POSIX_MEMALIGN:
2115 	  {
2116 	    tree ptrptr = gimple_call_arg (call, 0);
2117 	    ao_ref dref;
2118 	    ao_ref_init_from_ptr_and_size (&dref, ptrptr,
2119 					   TYPE_SIZE_UNIT (ptr_type_node));
2120 	    return (refs_may_alias_p_1 (&dref, ref, false)
2121 		    || (flag_errno_math
2122 			&& targetm.ref_may_alias_errno (ref)));
2123 	  }
2124 	/* Freeing memory kills the pointed-to memory.  More importantly
2125 	   the call has to serve as a barrier for moving loads and stores
2126 	   across it.  */
2127 	case BUILT_IN_FREE:
2128 	case BUILT_IN_VA_END:
2129 	  {
2130 	    tree ptr = gimple_call_arg (call, 0);
2131 	    return ptr_deref_may_alias_ref_p_1 (ptr, ref);
2132 	  }
2133 	/* Realloc serves both as allocation point and deallocation point.  */
2134 	case BUILT_IN_REALLOC:
2135 	  {
2136 	    tree ptr = gimple_call_arg (call, 0);
2137 	    /* Unix98 specifies that errno is set on allocation failure.  */
2138 	    return ((flag_errno_math
2139 		     && targetm.ref_may_alias_errno (ref))
2140 		    || ptr_deref_may_alias_ref_p_1 (ptr, ref));
2141 	  }
2142 	case BUILT_IN_GAMMA_R:
2143 	case BUILT_IN_GAMMAF_R:
2144 	case BUILT_IN_GAMMAL_R:
2145 	case BUILT_IN_LGAMMA_R:
2146 	case BUILT_IN_LGAMMAF_R:
2147 	case BUILT_IN_LGAMMAL_R:
2148 	  {
2149 	    tree out = gimple_call_arg (call, 1);
2150 	    if (ptr_deref_may_alias_ref_p_1 (out, ref))
2151 	      return true;
2152 	    if (flag_errno_math)
2153 	      break;
2154 	    return false;
2155 	  }
2156 	case BUILT_IN_FREXP:
2157 	case BUILT_IN_FREXPF:
2158 	case BUILT_IN_FREXPL:
2159 	case BUILT_IN_MODF:
2160 	case BUILT_IN_MODFF:
2161 	case BUILT_IN_MODFL:
2162 	  {
2163 	    tree out = gimple_call_arg (call, 1);
2164 	    return ptr_deref_may_alias_ref_p_1 (out, ref);
2165 	  }
2166 	case BUILT_IN_REMQUO:
2167 	case BUILT_IN_REMQUOF:
2168 	case BUILT_IN_REMQUOL:
2169 	  {
2170 	    tree out = gimple_call_arg (call, 2);
2171 	    if (ptr_deref_may_alias_ref_p_1 (out, ref))
2172 	      return true;
2173 	    if (flag_errno_math)
2174 	      break;
2175 	    return false;
2176 	  }
2177 	case BUILT_IN_SINCOS:
2178 	case BUILT_IN_SINCOSF:
2179 	case BUILT_IN_SINCOSL:
2180 	  {
2181 	    tree sin = gimple_call_arg (call, 1);
2182 	    tree cos = gimple_call_arg (call, 2);
2183 	    return (ptr_deref_may_alias_ref_p_1 (sin, ref)
2184 		    || ptr_deref_may_alias_ref_p_1 (cos, ref));
2185 	  }
2186 	/* __sync_* builtins and some OpenMP builtins act as threading
2187 	   barriers.  */
2188 #undef DEF_SYNC_BUILTIN
2189 #define DEF_SYNC_BUILTIN(ENUM, NAME, TYPE, ATTRS) case ENUM:
2190 #include "sync-builtins.def"
2191 #undef DEF_SYNC_BUILTIN
2192 	case BUILT_IN_GOMP_ATOMIC_START:
2193 	case BUILT_IN_GOMP_ATOMIC_END:
2194 	case BUILT_IN_GOMP_BARRIER:
2195 	case BUILT_IN_GOMP_BARRIER_CANCEL:
2196 	case BUILT_IN_GOMP_TASKWAIT:
2197 	case BUILT_IN_GOMP_TASKGROUP_END:
2198 	case BUILT_IN_GOMP_CRITICAL_START:
2199 	case BUILT_IN_GOMP_CRITICAL_END:
2200 	case BUILT_IN_GOMP_CRITICAL_NAME_START:
2201 	case BUILT_IN_GOMP_CRITICAL_NAME_END:
2202 	case BUILT_IN_GOMP_LOOP_END:
2203 	case BUILT_IN_GOMP_LOOP_END_CANCEL:
2204 	case BUILT_IN_GOMP_ORDERED_START:
2205 	case BUILT_IN_GOMP_ORDERED_END:
2206 	case BUILT_IN_GOMP_SECTIONS_END:
2207 	case BUILT_IN_GOMP_SECTIONS_END_CANCEL:
2208 	case BUILT_IN_GOMP_SINGLE_COPY_START:
2209 	case BUILT_IN_GOMP_SINGLE_COPY_END:
2210 	  return true;
2211 	default:
2212 	  /* Fallthru to general call handling.  */;
2213       }
2214 
2215   /* Check if base is a global static variable that is not written
2216      by the function.  */
2217   if (callee != NULL_TREE && VAR_P (base) && TREE_STATIC (base))
2218     {
2219       struct cgraph_node *node = cgraph_node::get (callee);
2220       bitmap not_written;
2221 
2222       if (node
2223 	  && (not_written = ipa_reference_get_not_written_global (node))
2224 	  && bitmap_bit_p (not_written, ipa_reference_var_uid (base)))
2225 	return false;
2226     }
2227 
2228   /* Check if the base variable is call-clobbered.  */
2229   if (DECL_P (base))
2230     return pt_solution_includes (gimple_call_clobber_set (call), base);
2231   else if ((TREE_CODE (base) == MEM_REF
2232 	    || TREE_CODE (base) == TARGET_MEM_REF)
2233 	   && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
2234     {
2235       struct ptr_info_def *pi = SSA_NAME_PTR_INFO (TREE_OPERAND (base, 0));
2236       if (!pi)
2237 	return true;
2238 
2239       return pt_solutions_intersect (gimple_call_clobber_set (call), &pi->pt);
2240     }
2241 
2242   return true;
2243 }
2244 
2245 /* If the call in statement CALL may clobber the memory reference REF
2246    return true, otherwise return false.  */
2247 
2248 bool
call_may_clobber_ref_p(gcall * call,tree ref)2249 call_may_clobber_ref_p (gcall *call, tree ref)
2250 {
2251   bool res;
2252   ao_ref r;
2253   ao_ref_init (&r, ref);
2254   res = call_may_clobber_ref_p_1 (call, &r);
2255   if (res)
2256     ++alias_stats.call_may_clobber_ref_p_may_alias;
2257   else
2258     ++alias_stats.call_may_clobber_ref_p_no_alias;
2259   return res;
2260 }
2261 
2262 
2263 /* If the statement STMT may clobber the memory reference REF return true,
2264    otherwise return false.  */
2265 
2266 bool
stmt_may_clobber_ref_p_1(gimple * stmt,ao_ref * ref,bool tbaa_p)2267 stmt_may_clobber_ref_p_1 (gimple *stmt, ao_ref *ref, bool tbaa_p)
2268 {
2269   if (is_gimple_call (stmt))
2270     {
2271       tree lhs = gimple_call_lhs (stmt);
2272       if (lhs
2273 	  && TREE_CODE (lhs) != SSA_NAME)
2274 	{
2275 	  ao_ref r;
2276 	  ao_ref_init (&r, lhs);
2277 	  if (refs_may_alias_p_1 (ref, &r, tbaa_p))
2278 	    return true;
2279 	}
2280 
2281       return call_may_clobber_ref_p_1 (as_a <gcall *> (stmt), ref);
2282     }
2283   else if (gimple_assign_single_p (stmt))
2284     {
2285       tree lhs = gimple_assign_lhs (stmt);
2286       if (TREE_CODE (lhs) != SSA_NAME)
2287 	{
2288 	  ao_ref r;
2289 	  ao_ref_init (&r, lhs);
2290 	  return refs_may_alias_p_1 (ref, &r, tbaa_p);
2291 	}
2292     }
2293   else if (gimple_code (stmt) == GIMPLE_ASM)
2294     return true;
2295 
2296   return false;
2297 }
2298 
2299 bool
stmt_may_clobber_ref_p(gimple * stmt,tree ref,bool tbaa_p)2300 stmt_may_clobber_ref_p (gimple *stmt, tree ref, bool tbaa_p)
2301 {
2302   ao_ref r;
2303   ao_ref_init (&r, ref);
2304   return stmt_may_clobber_ref_p_1 (stmt, &r, tbaa_p);
2305 }
2306 
2307 /* Return true if store1 and store2 described by corresponding tuples
2308    <BASE, OFFSET, SIZE, MAX_SIZE> have the same size and store to the same
2309    address.  */
2310 
2311 static bool
same_addr_size_stores_p(tree base1,poly_int64 offset1,poly_int64 size1,poly_int64 max_size1,tree base2,poly_int64 offset2,poly_int64 size2,poly_int64 max_size2)2312 same_addr_size_stores_p (tree base1, poly_int64 offset1, poly_int64 size1,
2313 			 poly_int64 max_size1,
2314 			 tree base2, poly_int64 offset2, poly_int64 size2,
2315 			 poly_int64 max_size2)
2316 {
2317   /* Offsets need to be 0.  */
2318   if (maybe_ne (offset1, 0)
2319       || maybe_ne (offset2, 0))
2320     return false;
2321 
2322   bool base1_obj_p = SSA_VAR_P (base1);
2323   bool base2_obj_p = SSA_VAR_P (base2);
2324 
2325   /* We need one object.  */
2326   if (base1_obj_p == base2_obj_p)
2327     return false;
2328   tree obj = base1_obj_p ? base1 : base2;
2329 
2330   /* And we need one MEM_REF.  */
2331   bool base1_memref_p = TREE_CODE (base1) == MEM_REF;
2332   bool base2_memref_p = TREE_CODE (base2) == MEM_REF;
2333   if (base1_memref_p == base2_memref_p)
2334     return false;
2335   tree memref = base1_memref_p ? base1 : base2;
2336 
2337   /* Sizes need to be valid.  */
2338   if (!known_size_p (max_size1)
2339       || !known_size_p (max_size2)
2340       || !known_size_p (size1)
2341       || !known_size_p (size2))
2342     return false;
2343 
2344   /* Max_size needs to match size.  */
2345   if (maybe_ne (max_size1, size1)
2346       || maybe_ne (max_size2, size2))
2347     return false;
2348 
2349   /* Sizes need to match.  */
2350   if (maybe_ne (size1, size2))
2351     return false;
2352 
2353 
2354   /* Check that memref is a store to pointer with singleton points-to info.  */
2355   if (!integer_zerop (TREE_OPERAND (memref, 1)))
2356     return false;
2357   tree ptr = TREE_OPERAND (memref, 0);
2358   if (TREE_CODE (ptr) != SSA_NAME)
2359     return false;
2360   struct ptr_info_def *pi = SSA_NAME_PTR_INFO (ptr);
2361   unsigned int pt_uid;
2362   if (pi == NULL
2363       || !pt_solution_singleton_or_null_p (&pi->pt, &pt_uid))
2364     return false;
2365 
2366   /* Be conservative with non-call exceptions when the address might
2367      be NULL.  */
2368   if (cfun->can_throw_non_call_exceptions && pi->pt.null)
2369     return false;
2370 
2371   /* Check that ptr points relative to obj.  */
2372   unsigned int obj_uid = DECL_PT_UID (obj);
2373   if (obj_uid != pt_uid)
2374     return false;
2375 
2376   /* Check that the object size is the same as the store size.  That ensures us
2377      that ptr points to the start of obj.  */
2378   return (DECL_SIZE (obj)
2379 	  && poly_int_tree_p (DECL_SIZE (obj))
2380 	  && known_eq (wi::to_poly_offset (DECL_SIZE (obj)), size1));
2381 }
2382 
2383 /* If STMT kills the memory reference REF return true, otherwise
2384    return false.  */
2385 
2386 bool
stmt_kills_ref_p(gimple * stmt,ao_ref * ref)2387 stmt_kills_ref_p (gimple *stmt, ao_ref *ref)
2388 {
2389   if (!ao_ref_base (ref))
2390     return false;
2391 
2392   if (gimple_has_lhs (stmt)
2393       && TREE_CODE (gimple_get_lhs (stmt)) != SSA_NAME
2394       /* The assignment is not necessarily carried out if it can throw
2395 	 and we can catch it in the current function where we could inspect
2396 	 the previous value.
2397 	 ???  We only need to care about the RHS throwing.  For aggregate
2398 	 assignments or similar calls and non-call exceptions the LHS
2399 	 might throw as well.  */
2400       && !stmt_can_throw_internal (cfun, stmt))
2401     {
2402       tree lhs = gimple_get_lhs (stmt);
2403       /* If LHS is literally a base of the access we are done.  */
2404       if (ref->ref)
2405 	{
2406 	  tree base = ref->ref;
2407 	  tree innermost_dropped_array_ref = NULL_TREE;
2408 	  if (handled_component_p (base))
2409 	    {
2410 	      tree saved_lhs0 = NULL_TREE;
2411 	      if (handled_component_p (lhs))
2412 		{
2413 		  saved_lhs0 = TREE_OPERAND (lhs, 0);
2414 		  TREE_OPERAND (lhs, 0) = integer_zero_node;
2415 		}
2416 	      do
2417 		{
2418 		  /* Just compare the outermost handled component, if
2419 		     they are equal we have found a possible common
2420 		     base.  */
2421 		  tree saved_base0 = TREE_OPERAND (base, 0);
2422 		  TREE_OPERAND (base, 0) = integer_zero_node;
2423 		  bool res = operand_equal_p (lhs, base, 0);
2424 		  TREE_OPERAND (base, 0) = saved_base0;
2425 		  if (res)
2426 		    break;
2427 		  /* Remember if we drop an array-ref that we need to
2428 		     double-check not being at struct end.  */
2429 		  if (TREE_CODE (base) == ARRAY_REF
2430 		      || TREE_CODE (base) == ARRAY_RANGE_REF)
2431 		    innermost_dropped_array_ref = base;
2432 		  /* Otherwise drop handled components of the access.  */
2433 		  base = saved_base0;
2434 		}
2435 	      while (handled_component_p (base));
2436 	      if (saved_lhs0)
2437 		TREE_OPERAND (lhs, 0) = saved_lhs0;
2438 	    }
2439 	  /* Finally check if the lhs has the same address and size as the
2440 	     base candidate of the access.  Watch out if we have dropped
2441 	     an array-ref that was at struct end, this means ref->ref may
2442 	     be outside of the TYPE_SIZE of its base.  */
2443 	  if ((! innermost_dropped_array_ref
2444 	       || ! array_at_struct_end_p (innermost_dropped_array_ref))
2445 	      && (lhs == base
2446 		  || (((TYPE_SIZE (TREE_TYPE (lhs))
2447 			== TYPE_SIZE (TREE_TYPE (base)))
2448 		       || (TYPE_SIZE (TREE_TYPE (lhs))
2449 			   && TYPE_SIZE (TREE_TYPE (base))
2450 			   && operand_equal_p (TYPE_SIZE (TREE_TYPE (lhs)),
2451 					       TYPE_SIZE (TREE_TYPE (base)),
2452 					       0)))
2453 		      && operand_equal_p (lhs, base,
2454 					  OEP_ADDRESS_OF
2455 					  | OEP_MATCH_SIDE_EFFECTS))))
2456 	    return true;
2457 	}
2458 
2459       /* Now look for non-literal equal bases with the restriction of
2460          handling constant offset and size.  */
2461       /* For a must-alias check we need to be able to constrain
2462 	 the access properly.  */
2463       if (!ref->max_size_known_p ())
2464 	return false;
2465       poly_int64 size, offset, max_size, ref_offset = ref->offset;
2466       bool reverse;
2467       tree base = get_ref_base_and_extent (lhs, &offset, &size, &max_size,
2468 					   &reverse);
2469       /* We can get MEM[symbol: sZ, index: D.8862_1] here,
2470 	 so base == ref->base does not always hold.  */
2471       if (base != ref->base)
2472 	{
2473 	  /* Try using points-to info.  */
2474 	  if (same_addr_size_stores_p (base, offset, size, max_size, ref->base,
2475 				       ref->offset, ref->size, ref->max_size))
2476 	    return true;
2477 
2478 	  /* If both base and ref->base are MEM_REFs, only compare the
2479 	     first operand, and if the second operand isn't equal constant,
2480 	     try to add the offsets into offset and ref_offset.  */
2481 	  if (TREE_CODE (base) == MEM_REF && TREE_CODE (ref->base) == MEM_REF
2482 	      && TREE_OPERAND (base, 0) == TREE_OPERAND (ref->base, 0))
2483 	    {
2484 	      if (!tree_int_cst_equal (TREE_OPERAND (base, 1),
2485 				       TREE_OPERAND (ref->base, 1)))
2486 		{
2487 		  poly_offset_int off1 = mem_ref_offset (base);
2488 		  off1 <<= LOG2_BITS_PER_UNIT;
2489 		  off1 += offset;
2490 		  poly_offset_int off2 = mem_ref_offset (ref->base);
2491 		  off2 <<= LOG2_BITS_PER_UNIT;
2492 		  off2 += ref_offset;
2493 		  if (!off1.to_shwi (&offset) || !off2.to_shwi (&ref_offset))
2494 		    size = -1;
2495 		}
2496 	    }
2497 	  else
2498 	    size = -1;
2499 	}
2500       /* For a must-alias check we need to be able to constrain
2501 	 the access properly.  */
2502       if (known_eq (size, max_size)
2503 	  && known_subrange_p (ref_offset, ref->max_size, offset, size))
2504 	return true;
2505     }
2506 
2507   if (is_gimple_call (stmt))
2508     {
2509       tree callee = gimple_call_fndecl (stmt);
2510       if (callee != NULL_TREE
2511 	  && gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
2512 	switch (DECL_FUNCTION_CODE (callee))
2513 	  {
2514 	  case BUILT_IN_FREE:
2515 	    {
2516 	      tree ptr = gimple_call_arg (stmt, 0);
2517 	      tree base = ao_ref_base (ref);
2518 	      if (base && TREE_CODE (base) == MEM_REF
2519 		  && TREE_OPERAND (base, 0) == ptr)
2520 		return true;
2521 	      break;
2522 	    }
2523 
2524 	  case BUILT_IN_MEMCPY:
2525 	  case BUILT_IN_MEMPCPY:
2526 	  case BUILT_IN_MEMMOVE:
2527 	  case BUILT_IN_MEMSET:
2528 	  case BUILT_IN_MEMCPY_CHK:
2529 	  case BUILT_IN_MEMPCPY_CHK:
2530 	  case BUILT_IN_MEMMOVE_CHK:
2531 	  case BUILT_IN_MEMSET_CHK:
2532 	  case BUILT_IN_STRNCPY:
2533 	  case BUILT_IN_STPNCPY:
2534 	    {
2535 	      /* For a must-alias check we need to be able to constrain
2536 		 the access properly.  */
2537 	      if (!ref->max_size_known_p ())
2538 		return false;
2539 	      tree dest = gimple_call_arg (stmt, 0);
2540 	      tree len = gimple_call_arg (stmt, 2);
2541 	      if (!poly_int_tree_p (len))
2542 		return false;
2543 	      tree rbase = ref->base;
2544 	      poly_offset_int roffset = ref->offset;
2545 	      ao_ref dref;
2546 	      ao_ref_init_from_ptr_and_size (&dref, dest, len);
2547 	      tree base = ao_ref_base (&dref);
2548 	      poly_offset_int offset = dref.offset;
2549 	      if (!base || !known_size_p (dref.size))
2550 		return false;
2551 	      if (TREE_CODE (base) == MEM_REF)
2552 		{
2553 		  if (TREE_CODE (rbase) != MEM_REF)
2554 		    return false;
2555 		  // Compare pointers.
2556 		  offset += mem_ref_offset (base) << LOG2_BITS_PER_UNIT;
2557 		  roffset += mem_ref_offset (rbase) << LOG2_BITS_PER_UNIT;
2558 		  base = TREE_OPERAND (base, 0);
2559 		  rbase = TREE_OPERAND (rbase, 0);
2560 		}
2561 	      if (base == rbase
2562 		  && known_subrange_p (roffset, ref->max_size, offset,
2563 				       wi::to_poly_offset (len)
2564 				       << LOG2_BITS_PER_UNIT))
2565 		return true;
2566 	      break;
2567 	    }
2568 
2569 	  case BUILT_IN_VA_END:
2570 	    {
2571 	      tree ptr = gimple_call_arg (stmt, 0);
2572 	      if (TREE_CODE (ptr) == ADDR_EXPR)
2573 		{
2574 		  tree base = ao_ref_base (ref);
2575 		  if (TREE_OPERAND (ptr, 0) == base)
2576 		    return true;
2577 		}
2578 	      break;
2579 	    }
2580 
2581 	  default:;
2582 	  }
2583     }
2584   return false;
2585 }
2586 
2587 bool
stmt_kills_ref_p(gimple * stmt,tree ref)2588 stmt_kills_ref_p (gimple *stmt, tree ref)
2589 {
2590   ao_ref r;
2591   ao_ref_init (&r, ref);
2592   return stmt_kills_ref_p (stmt, &r);
2593 }
2594 
2595 
2596 /* Walk the virtual use-def chain of VUSE until hitting the virtual operand
2597    TARGET or a statement clobbering the memory reference REF in which
2598    case false is returned.  The walk starts with VUSE, one argument of PHI.  */
2599 
2600 static bool
maybe_skip_until(gimple * phi,tree & target,basic_block target_bb,ao_ref * ref,tree vuse,bool tbaa_p,unsigned int & limit,bitmap * visited,bool abort_on_visited,void * (* translate)(ao_ref *,tree,void *,bool *),void * data)2601 maybe_skip_until (gimple *phi, tree &target, basic_block target_bb,
2602 		  ao_ref *ref, tree vuse, bool tbaa_p, unsigned int &limit,
2603 		  bitmap *visited, bool abort_on_visited,
2604 		  void *(*translate)(ao_ref *, tree, void *, bool *),
2605 		  void *data)
2606 {
2607   basic_block bb = gimple_bb (phi);
2608 
2609   if (!*visited)
2610     *visited = BITMAP_ALLOC (NULL);
2611 
2612   bitmap_set_bit (*visited, SSA_NAME_VERSION (PHI_RESULT (phi)));
2613 
2614   /* Walk until we hit the target.  */
2615   while (vuse != target)
2616     {
2617       gimple *def_stmt = SSA_NAME_DEF_STMT (vuse);
2618       /* If we are searching for the target VUSE by walking up to
2619          TARGET_BB dominating the original PHI we are finished once
2620 	 we reach a default def or a definition in a block dominating
2621 	 that block.  Update TARGET and return.  */
2622       if (!target
2623 	  && (gimple_nop_p (def_stmt)
2624 	      || dominated_by_p (CDI_DOMINATORS,
2625 				 target_bb, gimple_bb (def_stmt))))
2626 	{
2627 	  target = vuse;
2628 	  return true;
2629 	}
2630 
2631       /* Recurse for PHI nodes.  */
2632       if (gimple_code (def_stmt) == GIMPLE_PHI)
2633 	{
2634 	  /* An already visited PHI node ends the walk successfully.  */
2635 	  if (bitmap_bit_p (*visited, SSA_NAME_VERSION (PHI_RESULT (def_stmt))))
2636 	    return !abort_on_visited;
2637 	  vuse = get_continuation_for_phi (def_stmt, ref, tbaa_p, limit,
2638 					   visited, abort_on_visited,
2639 					   translate, data);
2640 	  if (!vuse)
2641 	    return false;
2642 	  continue;
2643 	}
2644       else if (gimple_nop_p (def_stmt))
2645 	return false;
2646       else
2647 	{
2648 	  /* A clobbering statement or the end of the IL ends it failing.  */
2649 	  if ((int)limit <= 0)
2650 	    return false;
2651 	  --limit;
2652 	  if (stmt_may_clobber_ref_p_1 (def_stmt, ref, tbaa_p))
2653 	    {
2654 	      bool disambiguate_only = true;
2655 	      if (translate
2656 		  && (*translate) (ref, vuse, data, &disambiguate_only) == NULL)
2657 		;
2658 	      else
2659 		return false;
2660 	    }
2661 	}
2662       /* If we reach a new basic-block see if we already skipped it
2663          in a previous walk that ended successfully.  */
2664       if (gimple_bb (def_stmt) != bb)
2665 	{
2666 	  if (!bitmap_set_bit (*visited, SSA_NAME_VERSION (vuse)))
2667 	    return !abort_on_visited;
2668 	  bb = gimple_bb (def_stmt);
2669 	}
2670       vuse = gimple_vuse (def_stmt);
2671     }
2672   return true;
2673 }
2674 
2675 
2676 /* Starting from a PHI node for the virtual operand of the memory reference
2677    REF find a continuation virtual operand that allows to continue walking
2678    statements dominating PHI skipping only statements that cannot possibly
2679    clobber REF.  Decrements LIMIT for each alias disambiguation done
2680    and aborts the walk, returning NULL_TREE if it reaches zero.
2681    Returns NULL_TREE if no suitable virtual operand can be found.  */
2682 
2683 tree
get_continuation_for_phi(gimple * phi,ao_ref * ref,bool tbaa_p,unsigned int & limit,bitmap * visited,bool abort_on_visited,void * (* translate)(ao_ref *,tree,void *,bool *),void * data)2684 get_continuation_for_phi (gimple *phi, ao_ref *ref, bool tbaa_p,
2685 			  unsigned int &limit, bitmap *visited,
2686 			  bool abort_on_visited,
2687 			  void *(*translate)(ao_ref *, tree, void *, bool *),
2688 			  void *data)
2689 {
2690   unsigned nargs = gimple_phi_num_args (phi);
2691 
2692   /* Through a single-argument PHI we can simply look through.  */
2693   if (nargs == 1)
2694     return PHI_ARG_DEF (phi, 0);
2695 
2696   /* For two or more arguments try to pairwise skip non-aliasing code
2697      until we hit the phi argument definition that dominates the other one.  */
2698   basic_block phi_bb = gimple_bb (phi);
2699   tree arg0, arg1;
2700   unsigned i;
2701 
2702   /* Find a candidate for the virtual operand which definition
2703      dominates those of all others.  */
2704   /* First look if any of the args themselves satisfy this.  */
2705   for (i = 0; i < nargs; ++i)
2706     {
2707       arg0 = PHI_ARG_DEF (phi, i);
2708       if (SSA_NAME_IS_DEFAULT_DEF (arg0))
2709 	break;
2710       basic_block def_bb = gimple_bb (SSA_NAME_DEF_STMT (arg0));
2711       if (def_bb != phi_bb
2712 	  && dominated_by_p (CDI_DOMINATORS, phi_bb, def_bb))
2713 	break;
2714       arg0 = NULL_TREE;
2715     }
2716   /* If not, look if we can reach such candidate by walking defs
2717      until we hit the immediate dominator.  maybe_skip_until will
2718      do that for us.  */
2719   basic_block dom = get_immediate_dominator (CDI_DOMINATORS, phi_bb);
2720 
2721   /* Then check against the (to be) found candidate.  */
2722   for (i = 0; i < nargs; ++i)
2723     {
2724       arg1 = PHI_ARG_DEF (phi, i);
2725       if (arg1 == arg0)
2726 	;
2727       else if (! maybe_skip_until (phi, arg0, dom, ref, arg1, tbaa_p,
2728 				   limit, visited,
2729 				   abort_on_visited,
2730 				   /* Do not translate when walking over
2731 				      backedges.  */
2732 				   dominated_by_p
2733 				     (CDI_DOMINATORS,
2734 				      gimple_bb (SSA_NAME_DEF_STMT (arg1)),
2735 				      phi_bb)
2736 				   ? NULL : translate, data))
2737 	return NULL_TREE;
2738     }
2739 
2740   return arg0;
2741 }
2742 
2743 /* Based on the memory reference REF and its virtual use VUSE call
2744    WALKER for each virtual use that is equivalent to VUSE, including VUSE
2745    itself.  That is, for each virtual use for which its defining statement
2746    does not clobber REF.
2747 
2748    WALKER is called with REF, the current virtual use and DATA.  If
2749    WALKER returns non-NULL the walk stops and its result is returned.
2750    At the end of a non-successful walk NULL is returned.
2751 
2752    TRANSLATE if non-NULL is called with a pointer to REF, the virtual
2753    use which definition is a statement that may clobber REF and DATA.
2754    If TRANSLATE returns (void *)-1 the walk stops and NULL is returned.
2755    If TRANSLATE returns non-NULL the walk stops and its result is returned.
2756    If TRANSLATE returns NULL the walk continues and TRANSLATE is supposed
2757    to adjust REF and *DATA to make that valid.
2758 
2759    VALUEIZE if non-NULL is called with the next VUSE that is considered
2760    and return value is substituted for that.  This can be used to
2761    implement optimistic value-numbering for example.  Note that the
2762    VUSE argument is assumed to be valueized already.
2763 
2764    LIMIT specifies the number of alias queries we are allowed to do,
2765    the walk stops when it reaches zero and NULL is returned.  LIMIT
2766    is decremented by the number of alias queries (plus adjustments
2767    done by the callbacks) upon return.
2768 
2769    TODO: Cache the vector of equivalent vuses per ref, vuse pair.  */
2770 
2771 void *
walk_non_aliased_vuses(ao_ref * ref,tree vuse,bool tbaa_p,void * (* walker)(ao_ref *,tree,void *),void * (* translate)(ao_ref *,tree,void *,bool *),tree (* valueize)(tree),unsigned & limit,void * data)2772 walk_non_aliased_vuses (ao_ref *ref, tree vuse, bool tbaa_p,
2773 			void *(*walker)(ao_ref *, tree, void *),
2774 			void *(*translate)(ao_ref *, tree, void *, bool *),
2775 			tree (*valueize)(tree),
2776 			unsigned &limit, void *data)
2777 {
2778   bitmap visited = NULL;
2779   void *res;
2780   bool translated = false;
2781 
2782   timevar_push (TV_ALIAS_STMT_WALK);
2783 
2784   do
2785     {
2786       gimple *def_stmt;
2787 
2788       /* ???  Do we want to account this to TV_ALIAS_STMT_WALK?  */
2789       res = (*walker) (ref, vuse, data);
2790       /* Abort walk.  */
2791       if (res == (void *)-1)
2792 	{
2793 	  res = NULL;
2794 	  break;
2795 	}
2796       /* Lookup succeeded.  */
2797       else if (res != NULL)
2798 	break;
2799 
2800       if (valueize)
2801 	{
2802 	  vuse = valueize (vuse);
2803 	  if (!vuse)
2804 	    {
2805 	      res = NULL;
2806 	      break;
2807 	    }
2808 	}
2809       def_stmt = SSA_NAME_DEF_STMT (vuse);
2810       if (gimple_nop_p (def_stmt))
2811 	break;
2812       else if (gimple_code (def_stmt) == GIMPLE_PHI)
2813 	vuse = get_continuation_for_phi (def_stmt, ref, tbaa_p, limit,
2814 					 &visited, translated, translate, data);
2815       else
2816 	{
2817 	  if ((int)limit <= 0)
2818 	    {
2819 	      res = NULL;
2820 	      break;
2821 	    }
2822 	  --limit;
2823 	  if (stmt_may_clobber_ref_p_1 (def_stmt, ref, tbaa_p))
2824 	    {
2825 	      if (!translate)
2826 		break;
2827 	      bool disambiguate_only = false;
2828 	      res = (*translate) (ref, vuse, data, &disambiguate_only);
2829 	      /* Failed lookup and translation.  */
2830 	      if (res == (void *)-1)
2831 		{
2832 		  res = NULL;
2833 		  break;
2834 		}
2835 	      /* Lookup succeeded.  */
2836 	      else if (res != NULL)
2837 		break;
2838 	      /* Translation succeeded, continue walking.  */
2839 	      translated = translated || !disambiguate_only;
2840 	    }
2841 	  vuse = gimple_vuse (def_stmt);
2842 	}
2843     }
2844   while (vuse);
2845 
2846   if (visited)
2847     BITMAP_FREE (visited);
2848 
2849   timevar_pop (TV_ALIAS_STMT_WALK);
2850 
2851   return res;
2852 }
2853 
2854 
2855 /* Based on the memory reference REF call WALKER for each vdef which
2856    defining statement may clobber REF, starting with VDEF.  If REF
2857    is NULL_TREE, each defining statement is visited.
2858 
2859    WALKER is called with REF, the current vdef and DATA.  If WALKER
2860    returns true the walk is stopped, otherwise it continues.
2861 
2862    If function entry is reached, FUNCTION_ENTRY_REACHED is set to true.
2863    The pointer may be NULL and then we do not track this information.
2864 
2865    At PHI nodes walk_aliased_vdefs forks into one walk for reach
2866    PHI argument (but only one walk continues on merge points), the
2867    return value is true if any of the walks was successful.
2868 
2869    The function returns the number of statements walked or -1 if
2870    LIMIT stmts were walked and the walk was aborted at this point.
2871    If LIMIT is zero the walk is not aborted.  */
2872 
2873 static int
walk_aliased_vdefs_1(ao_ref * ref,tree vdef,bool (* walker)(ao_ref *,tree,void *),void * data,bitmap * visited,unsigned int cnt,bool * function_entry_reached,unsigned limit)2874 walk_aliased_vdefs_1 (ao_ref *ref, tree vdef,
2875 		      bool (*walker)(ao_ref *, tree, void *), void *data,
2876 		      bitmap *visited, unsigned int cnt,
2877 		      bool *function_entry_reached, unsigned limit)
2878 {
2879   do
2880     {
2881       gimple *def_stmt = SSA_NAME_DEF_STMT (vdef);
2882 
2883       if (*visited
2884 	  && !bitmap_set_bit (*visited, SSA_NAME_VERSION (vdef)))
2885 	return cnt;
2886 
2887       if (gimple_nop_p (def_stmt))
2888 	{
2889 	  if (function_entry_reached)
2890 	    *function_entry_reached = true;
2891 	  return cnt;
2892 	}
2893       else if (gimple_code (def_stmt) == GIMPLE_PHI)
2894 	{
2895 	  unsigned i;
2896 	  if (!*visited)
2897 	    *visited = BITMAP_ALLOC (NULL);
2898 	  for (i = 0; i < gimple_phi_num_args (def_stmt); ++i)
2899 	    {
2900 	      int res = walk_aliased_vdefs_1 (ref,
2901 					      gimple_phi_arg_def (def_stmt, i),
2902 					      walker, data, visited, cnt,
2903 					      function_entry_reached, limit);
2904 	      if (res == -1)
2905 		return -1;
2906 	      cnt = res;
2907 	    }
2908 	  return cnt;
2909 	}
2910 
2911       /* ???  Do we want to account this to TV_ALIAS_STMT_WALK?  */
2912       cnt++;
2913       if (cnt == limit)
2914 	return -1;
2915       if ((!ref
2916 	   || stmt_may_clobber_ref_p_1 (def_stmt, ref))
2917 	  && (*walker) (ref, vdef, data))
2918 	return cnt;
2919 
2920       vdef = gimple_vuse (def_stmt);
2921     }
2922   while (1);
2923 }
2924 
2925 int
walk_aliased_vdefs(ao_ref * ref,tree vdef,bool (* walker)(ao_ref *,tree,void *),void * data,bitmap * visited,bool * function_entry_reached,unsigned int limit)2926 walk_aliased_vdefs (ao_ref *ref, tree vdef,
2927 		    bool (*walker)(ao_ref *, tree, void *), void *data,
2928 		    bitmap *visited,
2929 		    bool *function_entry_reached, unsigned int limit)
2930 {
2931   bitmap local_visited = NULL;
2932   int ret;
2933 
2934   timevar_push (TV_ALIAS_STMT_WALK);
2935 
2936   if (function_entry_reached)
2937     *function_entry_reached = false;
2938 
2939   ret = walk_aliased_vdefs_1 (ref, vdef, walker, data,
2940 			      visited ? visited : &local_visited, 0,
2941 			      function_entry_reached, limit);
2942   if (local_visited)
2943     BITMAP_FREE (local_visited);
2944 
2945   timevar_pop (TV_ALIAS_STMT_WALK);
2946 
2947   return ret;
2948 }
2949 
2950