1 /* Alias analysis for trees.
2    Copyright (C) 2004-2020 Free Software Foundation, Inc.
3    Contributed by Diego Novillo <dnovillo@redhat.com>
4 
5 This file is part of GCC.
6 
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11 
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
15 GNU General Public License for more details.
16 
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3.  If not see
19 <http://www.gnu.org/licenses/>.  */
20 
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "target.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "gimple.h"
29 #include "timevar.h"	/* for TV_ALIAS_STMT_WALK */
30 #include "ssa.h"
31 #include "cgraph.h"
32 #include "tree-pretty-print.h"
33 #include "alias.h"
34 #include "fold-const.h"
35 #include "langhooks.h"
36 #include "dumpfile.h"
37 #include "tree-eh.h"
38 #include "tree-dfa.h"
39 #include "ipa-reference.h"
40 #include "varasm.h"
41 
42 /* Broad overview of how alias analysis on gimple works:
43 
44    Statements clobbering or using memory are linked through the
45    virtual operand factored use-def chain.  The virtual operand
46    is unique per function, its symbol is accessible via gimple_vop (cfun).
47    Virtual operands are used for efficiently walking memory statements
48    in the gimple IL and are useful for things like value-numbering as
49    a generation count for memory references.
50 
51    SSA_NAME pointers may have associated points-to information
52    accessible via the SSA_NAME_PTR_INFO macro.  Flow-insensitive
53    points-to information is (re-)computed by the TODO_rebuild_alias
54    pass manager todo.  Points-to information is also used for more
55    precise tracking of call-clobbered and call-used variables and
56    related disambiguations.
57 
58    This file contains functions for disambiguating memory references,
59    the so called alias-oracle and tools for walking of the gimple IL.
60 
61    The main alias-oracle entry-points are
62 
63    bool stmt_may_clobber_ref_p (gimple *, tree)
64 
65      This function queries if a statement may invalidate (parts of)
66      the memory designated by the reference tree argument.
67 
68    bool ref_maybe_used_by_stmt_p (gimple *, tree)
69 
70      This function queries if a statement may need (parts of) the
71      memory designated by the reference tree argument.
72 
73    There are variants of these functions that only handle the call
74    part of a statement, call_may_clobber_ref_p and ref_maybe_used_by_call_p.
75    Note that these do not disambiguate against a possible call lhs.
76 
77    bool refs_may_alias_p (tree, tree)
78 
79      This function tries to disambiguate two reference trees.
80 
81    bool ptr_deref_may_alias_global_p (tree)
82 
83      This function queries if dereferencing a pointer variable may
84      alias global memory.
85 
86    More low-level disambiguators are available and documented in
87    this file.  Low-level disambiguators dealing with points-to
88    information are in tree-ssa-structalias.c.  */
89 
90 static int nonoverlapping_refs_since_match_p (tree, tree, tree, tree, bool);
91 static bool nonoverlapping_component_refs_p (const_tree, const_tree);
92 
93 /* Query statistics for the different low-level disambiguators.
94    A high-level query may trigger multiple of them.  */
95 
96 static struct {
97   unsigned HOST_WIDE_INT refs_may_alias_p_may_alias;
98   unsigned HOST_WIDE_INT refs_may_alias_p_no_alias;
99   unsigned HOST_WIDE_INT ref_maybe_used_by_call_p_may_alias;
100   unsigned HOST_WIDE_INT ref_maybe_used_by_call_p_no_alias;
101   unsigned HOST_WIDE_INT call_may_clobber_ref_p_may_alias;
102   unsigned HOST_WIDE_INT call_may_clobber_ref_p_no_alias;
103   unsigned HOST_WIDE_INT aliasing_component_refs_p_may_alias;
104   unsigned HOST_WIDE_INT aliasing_component_refs_p_no_alias;
105   unsigned HOST_WIDE_INT nonoverlapping_component_refs_p_may_alias;
106   unsigned HOST_WIDE_INT nonoverlapping_component_refs_p_no_alias;
107   unsigned HOST_WIDE_INT nonoverlapping_refs_since_match_p_may_alias;
108   unsigned HOST_WIDE_INT nonoverlapping_refs_since_match_p_must_overlap;
109   unsigned HOST_WIDE_INT nonoverlapping_refs_since_match_p_no_alias;
110 } alias_stats;
111 
112 void
dump_alias_stats(FILE * s)113 dump_alias_stats (FILE *s)
114 {
115   fprintf (s, "\nAlias oracle query stats:\n");
116   fprintf (s, "  refs_may_alias_p: "
117 	   HOST_WIDE_INT_PRINT_DEC" disambiguations, "
118 	   HOST_WIDE_INT_PRINT_DEC" queries\n",
119 	   alias_stats.refs_may_alias_p_no_alias,
120 	   alias_stats.refs_may_alias_p_no_alias
121 	   + alias_stats.refs_may_alias_p_may_alias);
122   fprintf (s, "  ref_maybe_used_by_call_p: "
123 	   HOST_WIDE_INT_PRINT_DEC" disambiguations, "
124 	   HOST_WIDE_INT_PRINT_DEC" queries\n",
125 	   alias_stats.ref_maybe_used_by_call_p_no_alias,
126 	   alias_stats.refs_may_alias_p_no_alias
127 	   + alias_stats.ref_maybe_used_by_call_p_may_alias);
128   fprintf (s, "  call_may_clobber_ref_p: "
129 	   HOST_WIDE_INT_PRINT_DEC" disambiguations, "
130 	   HOST_WIDE_INT_PRINT_DEC" queries\n",
131 	   alias_stats.call_may_clobber_ref_p_no_alias,
132 	   alias_stats.call_may_clobber_ref_p_no_alias
133 	   + alias_stats.call_may_clobber_ref_p_may_alias);
134   fprintf (s, "  nonoverlapping_component_refs_p: "
135 	   HOST_WIDE_INT_PRINT_DEC" disambiguations, "
136 	   HOST_WIDE_INT_PRINT_DEC" queries\n",
137 	   alias_stats.nonoverlapping_component_refs_p_no_alias,
138 	   alias_stats.nonoverlapping_component_refs_p_no_alias
139 	   + alias_stats.nonoverlapping_component_refs_p_may_alias);
140   fprintf (s, "  nonoverlapping_refs_since_match_p: "
141 	   HOST_WIDE_INT_PRINT_DEC" disambiguations, "
142 	   HOST_WIDE_INT_PRINT_DEC" must overlaps, "
143 	   HOST_WIDE_INT_PRINT_DEC" queries\n",
144 	   alias_stats.nonoverlapping_refs_since_match_p_no_alias,
145 	   alias_stats.nonoverlapping_refs_since_match_p_must_overlap,
146 	   alias_stats.nonoverlapping_refs_since_match_p_no_alias
147 	   + alias_stats.nonoverlapping_refs_since_match_p_may_alias
148 	   + alias_stats.nonoverlapping_refs_since_match_p_must_overlap);
149   fprintf (s, "  aliasing_component_refs_p: "
150 	   HOST_WIDE_INT_PRINT_DEC" disambiguations, "
151 	   HOST_WIDE_INT_PRINT_DEC" queries\n",
152 	   alias_stats.aliasing_component_refs_p_no_alias,
153 	   alias_stats.aliasing_component_refs_p_no_alias
154 	   + alias_stats.aliasing_component_refs_p_may_alias);
155   dump_alias_stats_in_alias_c (s);
156 }
157 
158 
159 /* Return true, if dereferencing PTR may alias with a global variable.  */
160 
161 bool
ptr_deref_may_alias_global_p(tree ptr)162 ptr_deref_may_alias_global_p (tree ptr)
163 {
164   struct ptr_info_def *pi;
165 
166   /* If we end up with a pointer constant here that may point
167      to global memory.  */
168   if (TREE_CODE (ptr) != SSA_NAME)
169     return true;
170 
171   pi = SSA_NAME_PTR_INFO (ptr);
172 
173   /* If we do not have points-to information for this variable,
174      we have to punt.  */
175   if (!pi)
176     return true;
177 
178   /* ???  This does not use TBAA to prune globals ptr may not access.  */
179   return pt_solution_includes_global (&pi->pt);
180 }
181 
182 /* Return true if dereferencing PTR may alias DECL.
183    The caller is responsible for applying TBAA to see if PTR
184    may access DECL at all.  */
185 
186 static bool
ptr_deref_may_alias_decl_p(tree ptr,tree decl)187 ptr_deref_may_alias_decl_p (tree ptr, tree decl)
188 {
189   struct ptr_info_def *pi;
190 
191   /* Conversions are irrelevant for points-to information and
192      data-dependence analysis can feed us those.  */
193   STRIP_NOPS (ptr);
194 
195   /* Anything we do not explicilty handle aliases.  */
196   if ((TREE_CODE (ptr) != SSA_NAME
197        && TREE_CODE (ptr) != ADDR_EXPR
198        && TREE_CODE (ptr) != POINTER_PLUS_EXPR)
199       || !POINTER_TYPE_P (TREE_TYPE (ptr))
200       || (!VAR_P (decl)
201 	  && TREE_CODE (decl) != PARM_DECL
202 	  && TREE_CODE (decl) != RESULT_DECL))
203     return true;
204 
205   /* Disregard pointer offsetting.  */
206   if (TREE_CODE (ptr) == POINTER_PLUS_EXPR)
207     {
208       do
209 	{
210 	  ptr = TREE_OPERAND (ptr, 0);
211 	}
212       while (TREE_CODE (ptr) == POINTER_PLUS_EXPR);
213       return ptr_deref_may_alias_decl_p (ptr, decl);
214     }
215 
216   /* ADDR_EXPR pointers either just offset another pointer or directly
217      specify the pointed-to set.  */
218   if (TREE_CODE (ptr) == ADDR_EXPR)
219     {
220       tree base = get_base_address (TREE_OPERAND (ptr, 0));
221       if (base
222 	  && (TREE_CODE (base) == MEM_REF
223 	      || TREE_CODE (base) == TARGET_MEM_REF))
224 	ptr = TREE_OPERAND (base, 0);
225       else if (base
226 	       && DECL_P (base))
227 	return compare_base_decls (base, decl) != 0;
228       else if (base
229 	       && CONSTANT_CLASS_P (base))
230 	return false;
231       else
232 	return true;
233     }
234 
235   /* Non-aliased variables cannot be pointed to.  */
236   if (!may_be_aliased (decl))
237     return false;
238 
239   /* If we do not have useful points-to information for this pointer
240      we cannot disambiguate anything else.  */
241   pi = SSA_NAME_PTR_INFO (ptr);
242   if (!pi)
243     return true;
244 
245   return pt_solution_includes (&pi->pt, decl);
246 }
247 
248 /* Return true if dereferenced PTR1 and PTR2 may alias.
249    The caller is responsible for applying TBAA to see if accesses
250    through PTR1 and PTR2 may conflict at all.  */
251 
252 bool
ptr_derefs_may_alias_p(tree ptr1,tree ptr2)253 ptr_derefs_may_alias_p (tree ptr1, tree ptr2)
254 {
255   struct ptr_info_def *pi1, *pi2;
256 
257   /* Conversions are irrelevant for points-to information and
258      data-dependence analysis can feed us those.  */
259   STRIP_NOPS (ptr1);
260   STRIP_NOPS (ptr2);
261 
262   /* Disregard pointer offsetting.  */
263   if (TREE_CODE (ptr1) == POINTER_PLUS_EXPR)
264     {
265       do
266 	{
267 	  ptr1 = TREE_OPERAND (ptr1, 0);
268 	}
269       while (TREE_CODE (ptr1) == POINTER_PLUS_EXPR);
270       return ptr_derefs_may_alias_p (ptr1, ptr2);
271     }
272   if (TREE_CODE (ptr2) == POINTER_PLUS_EXPR)
273     {
274       do
275 	{
276 	  ptr2 = TREE_OPERAND (ptr2, 0);
277 	}
278       while (TREE_CODE (ptr2) == POINTER_PLUS_EXPR);
279       return ptr_derefs_may_alias_p (ptr1, ptr2);
280     }
281 
282   /* ADDR_EXPR pointers either just offset another pointer or directly
283      specify the pointed-to set.  */
284   if (TREE_CODE (ptr1) == ADDR_EXPR)
285     {
286       tree base = get_base_address (TREE_OPERAND (ptr1, 0));
287       if (base
288 	  && (TREE_CODE (base) == MEM_REF
289 	      || TREE_CODE (base) == TARGET_MEM_REF))
290 	return ptr_derefs_may_alias_p (TREE_OPERAND (base, 0), ptr2);
291       else if (base
292 	       && DECL_P (base))
293 	return ptr_deref_may_alias_decl_p (ptr2, base);
294       else
295 	return true;
296     }
297   if (TREE_CODE (ptr2) == ADDR_EXPR)
298     {
299       tree base = get_base_address (TREE_OPERAND (ptr2, 0));
300       if (base
301 	  && (TREE_CODE (base) == MEM_REF
302 	      || TREE_CODE (base) == TARGET_MEM_REF))
303 	return ptr_derefs_may_alias_p (ptr1, TREE_OPERAND (base, 0));
304       else if (base
305 	       && DECL_P (base))
306 	return ptr_deref_may_alias_decl_p (ptr1, base);
307       else
308 	return true;
309     }
310 
311   /* From here we require SSA name pointers.  Anything else aliases.  */
312   if (TREE_CODE (ptr1) != SSA_NAME
313       || TREE_CODE (ptr2) != SSA_NAME
314       || !POINTER_TYPE_P (TREE_TYPE (ptr1))
315       || !POINTER_TYPE_P (TREE_TYPE (ptr2)))
316     return true;
317 
318   /* We may end up with two empty points-to solutions for two same pointers.
319      In this case we still want to say both pointers alias, so shortcut
320      that here.  */
321   if (ptr1 == ptr2)
322     return true;
323 
324   /* If we do not have useful points-to information for either pointer
325      we cannot disambiguate anything else.  */
326   pi1 = SSA_NAME_PTR_INFO (ptr1);
327   pi2 = SSA_NAME_PTR_INFO (ptr2);
328   if (!pi1 || !pi2)
329     return true;
330 
331   /* ???  This does not use TBAA to prune decls from the intersection
332      that not both pointers may access.  */
333   return pt_solutions_intersect (&pi1->pt, &pi2->pt);
334 }
335 
336 /* Return true if dereferencing PTR may alias *REF.
337    The caller is responsible for applying TBAA to see if PTR
338    may access *REF at all.  */
339 
340 static bool
ptr_deref_may_alias_ref_p_1(tree ptr,ao_ref * ref)341 ptr_deref_may_alias_ref_p_1 (tree ptr, ao_ref *ref)
342 {
343   tree base = ao_ref_base (ref);
344 
345   if (TREE_CODE (base) == MEM_REF
346       || TREE_CODE (base) == TARGET_MEM_REF)
347     return ptr_derefs_may_alias_p (ptr, TREE_OPERAND (base, 0));
348   else if (DECL_P (base))
349     return ptr_deref_may_alias_decl_p (ptr, base);
350 
351   return true;
352 }
353 
354 /* Returns true if PTR1 and PTR2 compare unequal because of points-to.  */
355 
356 bool
ptrs_compare_unequal(tree ptr1,tree ptr2)357 ptrs_compare_unequal (tree ptr1, tree ptr2)
358 {
359   /* First resolve the pointers down to a SSA name pointer base or
360      a VAR_DECL, PARM_DECL or RESULT_DECL.  This explicitely does
361      not yet try to handle LABEL_DECLs, FUNCTION_DECLs, CONST_DECLs
362      or STRING_CSTs which needs points-to adjustments to track them
363      in the points-to sets.  */
364   tree obj1 = NULL_TREE;
365   tree obj2 = NULL_TREE;
366   if (TREE_CODE (ptr1) == ADDR_EXPR)
367     {
368       tree tem = get_base_address (TREE_OPERAND (ptr1, 0));
369       if (! tem)
370 	return false;
371       if (VAR_P (tem)
372 	  || TREE_CODE (tem) == PARM_DECL
373 	  || TREE_CODE (tem) == RESULT_DECL)
374 	obj1 = tem;
375       else if (TREE_CODE (tem) == MEM_REF)
376 	ptr1 = TREE_OPERAND (tem, 0);
377     }
378   if (TREE_CODE (ptr2) == ADDR_EXPR)
379     {
380       tree tem = get_base_address (TREE_OPERAND (ptr2, 0));
381       if (! tem)
382 	return false;
383       if (VAR_P (tem)
384 	  || TREE_CODE (tem) == PARM_DECL
385 	  || TREE_CODE (tem) == RESULT_DECL)
386 	obj2 = tem;
387       else if (TREE_CODE (tem) == MEM_REF)
388 	ptr2 = TREE_OPERAND (tem, 0);
389     }
390 
391   /* Canonicalize ptr vs. object.  */
392   if (TREE_CODE (ptr1) == SSA_NAME && obj2)
393     {
394       std::swap (ptr1, ptr2);
395       std::swap (obj1, obj2);
396     }
397 
398   if (obj1 && obj2)
399     /* Other code handles this correctly, no need to duplicate it here.  */;
400   else if (obj1 && TREE_CODE (ptr2) == SSA_NAME)
401     {
402       struct ptr_info_def *pi = SSA_NAME_PTR_INFO (ptr2);
403       /* We may not use restrict to optimize pointer comparisons.
404          See PR71062.  So we have to assume that restrict-pointed-to
405 	 may be in fact obj1.  */
406       if (!pi
407 	  || pi->pt.vars_contains_restrict
408 	  || pi->pt.vars_contains_interposable)
409 	return false;
410       if (VAR_P (obj1)
411 	  && (TREE_STATIC (obj1) || DECL_EXTERNAL (obj1)))
412 	{
413 	  varpool_node *node = varpool_node::get (obj1);
414 	  /* If obj1 may bind to NULL give up (see below).  */
415 	  if (! node
416 	      || ! node->nonzero_address ()
417 	      || ! decl_binds_to_current_def_p (obj1))
418 	    return false;
419 	}
420       return !pt_solution_includes (&pi->pt, obj1);
421     }
422 
423   /* ???  We'd like to handle ptr1 != NULL and ptr1 != ptr2
424      but those require pt.null to be conservatively correct.  */
425 
426   return false;
427 }
428 
429 /* Returns whether reference REF to BASE may refer to global memory.  */
430 
431 static bool
ref_may_alias_global_p_1(tree base)432 ref_may_alias_global_p_1 (tree base)
433 {
434   if (DECL_P (base))
435     return is_global_var (base);
436   else if (TREE_CODE (base) == MEM_REF
437 	   || TREE_CODE (base) == TARGET_MEM_REF)
438     return ptr_deref_may_alias_global_p (TREE_OPERAND (base, 0));
439   return true;
440 }
441 
442 bool
ref_may_alias_global_p(ao_ref * ref)443 ref_may_alias_global_p (ao_ref *ref)
444 {
445   tree base = ao_ref_base (ref);
446   return ref_may_alias_global_p_1 (base);
447 }
448 
449 bool
ref_may_alias_global_p(tree ref)450 ref_may_alias_global_p (tree ref)
451 {
452   tree base = get_base_address (ref);
453   return ref_may_alias_global_p_1 (base);
454 }
455 
456 /* Return true whether STMT may clobber global memory.  */
457 
458 bool
stmt_may_clobber_global_p(gimple * stmt)459 stmt_may_clobber_global_p (gimple *stmt)
460 {
461   tree lhs;
462 
463   if (!gimple_vdef (stmt))
464     return false;
465 
466   /* ???  We can ask the oracle whether an artificial pointer
467      dereference with a pointer with points-to information covering
468      all global memory (what about non-address taken memory?) maybe
469      clobbered by this call.  As there is at the moment no convenient
470      way of doing that without generating garbage do some manual
471      checking instead.
472      ???  We could make a NULL ao_ref argument to the various
473      predicates special, meaning any global memory.  */
474 
475   switch (gimple_code (stmt))
476     {
477     case GIMPLE_ASSIGN:
478       lhs = gimple_assign_lhs (stmt);
479       return (TREE_CODE (lhs) != SSA_NAME
480 	      && ref_may_alias_global_p (lhs));
481     case GIMPLE_CALL:
482       return true;
483     default:
484       return true;
485     }
486 }
487 
488 
489 /* Dump alias information on FILE.  */
490 
491 void
dump_alias_info(FILE * file)492 dump_alias_info (FILE *file)
493 {
494   unsigned i;
495   tree ptr;
496   const char *funcname
497     = lang_hooks.decl_printable_name (current_function_decl, 2);
498   tree var;
499 
500   fprintf (file, "\n\nAlias information for %s\n\n", funcname);
501 
502   fprintf (file, "Aliased symbols\n\n");
503 
504   FOR_EACH_LOCAL_DECL (cfun, i, var)
505     {
506       if (may_be_aliased (var))
507 	dump_variable (file, var);
508     }
509 
510   fprintf (file, "\nCall clobber information\n");
511 
512   fprintf (file, "\nESCAPED");
513   dump_points_to_solution (file, &cfun->gimple_df->escaped);
514 
515   fprintf (file, "\n\nFlow-insensitive points-to information\n\n");
516 
517   FOR_EACH_SSA_NAME (i, ptr, cfun)
518     {
519       struct ptr_info_def *pi;
520 
521       if (!POINTER_TYPE_P (TREE_TYPE (ptr))
522 	  || SSA_NAME_IN_FREE_LIST (ptr))
523 	continue;
524 
525       pi = SSA_NAME_PTR_INFO (ptr);
526       if (pi)
527 	dump_points_to_info_for (file, ptr);
528     }
529 
530   fprintf (file, "\n");
531 }
532 
533 
534 /* Dump alias information on stderr.  */
535 
536 DEBUG_FUNCTION void
debug_alias_info(void)537 debug_alias_info (void)
538 {
539   dump_alias_info (stderr);
540 }
541 
542 
543 /* Dump the points-to set *PT into FILE.  */
544 
545 void
dump_points_to_solution(FILE * file,struct pt_solution * pt)546 dump_points_to_solution (FILE *file, struct pt_solution *pt)
547 {
548   if (pt->anything)
549     fprintf (file, ", points-to anything");
550 
551   if (pt->nonlocal)
552     fprintf (file, ", points-to non-local");
553 
554   if (pt->escaped)
555     fprintf (file, ", points-to escaped");
556 
557   if (pt->ipa_escaped)
558     fprintf (file, ", points-to unit escaped");
559 
560   if (pt->null)
561     fprintf (file, ", points-to NULL");
562 
563   if (pt->vars)
564     {
565       fprintf (file, ", points-to vars: ");
566       dump_decl_set (file, pt->vars);
567       if (pt->vars_contains_nonlocal
568 	  || pt->vars_contains_escaped
569 	  || pt->vars_contains_escaped_heap
570 	  || pt->vars_contains_restrict)
571 	{
572 	  const char *comma = "";
573 	  fprintf (file, " (");
574 	  if (pt->vars_contains_nonlocal)
575 	    {
576 	      fprintf (file, "nonlocal");
577 	      comma = ", ";
578 	    }
579 	  if (pt->vars_contains_escaped)
580 	    {
581 	      fprintf (file, "%sescaped", comma);
582 	      comma = ", ";
583 	    }
584 	  if (pt->vars_contains_escaped_heap)
585 	    {
586 	      fprintf (file, "%sescaped heap", comma);
587 	      comma = ", ";
588 	    }
589 	  if (pt->vars_contains_restrict)
590 	    {
591 	      fprintf (file, "%srestrict", comma);
592 	      comma = ", ";
593 	    }
594 	  if (pt->vars_contains_interposable)
595 	    fprintf (file, "%sinterposable", comma);
596 	  fprintf (file, ")");
597 	}
598     }
599 }
600 
601 
602 /* Unified dump function for pt_solution.  */
603 
604 DEBUG_FUNCTION void
debug(pt_solution & ref)605 debug (pt_solution &ref)
606 {
607   dump_points_to_solution (stderr, &ref);
608 }
609 
610 DEBUG_FUNCTION void
debug(pt_solution * ptr)611 debug (pt_solution *ptr)
612 {
613   if (ptr)
614     debug (*ptr);
615   else
616     fprintf (stderr, "<nil>\n");
617 }
618 
619 
620 /* Dump points-to information for SSA_NAME PTR into FILE.  */
621 
622 void
dump_points_to_info_for(FILE * file,tree ptr)623 dump_points_to_info_for (FILE *file, tree ptr)
624 {
625   struct ptr_info_def *pi = SSA_NAME_PTR_INFO (ptr);
626 
627   print_generic_expr (file, ptr, dump_flags);
628 
629   if (pi)
630     dump_points_to_solution (file, &pi->pt);
631   else
632     fprintf (file, ", points-to anything");
633 
634   fprintf (file, "\n");
635 }
636 
637 
638 /* Dump points-to information for VAR into stderr.  */
639 
640 DEBUG_FUNCTION void
debug_points_to_info_for(tree var)641 debug_points_to_info_for (tree var)
642 {
643   dump_points_to_info_for (stderr, var);
644 }
645 
646 
647 /* Initializes the alias-oracle reference representation *R from REF.  */
648 
649 void
ao_ref_init(ao_ref * r,tree ref)650 ao_ref_init (ao_ref *r, tree ref)
651 {
652   r->ref = ref;
653   r->base = NULL_TREE;
654   r->offset = 0;
655   r->size = -1;
656   r->max_size = -1;
657   r->ref_alias_set = -1;
658   r->base_alias_set = -1;
659   r->volatile_p = ref ? TREE_THIS_VOLATILE (ref) : false;
660 }
661 
662 /* Returns the base object of the memory reference *REF.  */
663 
664 tree
ao_ref_base(ao_ref * ref)665 ao_ref_base (ao_ref *ref)
666 {
667   bool reverse;
668 
669   if (ref->base)
670     return ref->base;
671   ref->base = get_ref_base_and_extent (ref->ref, &ref->offset, &ref->size,
672 				       &ref->max_size, &reverse);
673   return ref->base;
674 }
675 
676 /* Returns the base object alias set of the memory reference *REF.  */
677 
678 alias_set_type
ao_ref_base_alias_set(ao_ref * ref)679 ao_ref_base_alias_set (ao_ref *ref)
680 {
681   tree base_ref;
682   if (ref->base_alias_set != -1)
683     return ref->base_alias_set;
684   if (!ref->ref)
685     return 0;
686   base_ref = ref->ref;
687   while (handled_component_p (base_ref))
688     base_ref = TREE_OPERAND (base_ref, 0);
689   ref->base_alias_set = get_alias_set (base_ref);
690   return ref->base_alias_set;
691 }
692 
693 /* Returns the reference alias set of the memory reference *REF.  */
694 
695 alias_set_type
ao_ref_alias_set(ao_ref * ref)696 ao_ref_alias_set (ao_ref *ref)
697 {
698   if (ref->ref_alias_set != -1)
699     return ref->ref_alias_set;
700   if (!ref->ref)
701     return 0;
702   ref->ref_alias_set = get_alias_set (ref->ref);
703   return ref->ref_alias_set;
704 }
705 
706 /* Init an alias-oracle reference representation from a gimple pointer
707    PTR and a gimple size SIZE in bytes.  If SIZE is NULL_TREE then the
708    size is assumed to be unknown.  The access is assumed to be only
709    to or after of the pointer target, not before it.  */
710 
711 void
ao_ref_init_from_ptr_and_size(ao_ref * ref,tree ptr,tree size)712 ao_ref_init_from_ptr_and_size (ao_ref *ref, tree ptr, tree size)
713 {
714   poly_int64 t, size_hwi, extra_offset = 0;
715   ref->ref = NULL_TREE;
716   if (TREE_CODE (ptr) == SSA_NAME)
717     {
718       gimple *stmt = SSA_NAME_DEF_STMT (ptr);
719       if (gimple_assign_single_p (stmt)
720 	  && gimple_assign_rhs_code (stmt) == ADDR_EXPR)
721 	ptr = gimple_assign_rhs1 (stmt);
722       else if (is_gimple_assign (stmt)
723 	       && gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
724 	       && ptrdiff_tree_p (gimple_assign_rhs2 (stmt), &extra_offset))
725 	{
726 	  ptr = gimple_assign_rhs1 (stmt);
727 	  extra_offset *= BITS_PER_UNIT;
728 	}
729     }
730 
731   if (TREE_CODE (ptr) == ADDR_EXPR)
732     {
733       ref->base = get_addr_base_and_unit_offset (TREE_OPERAND (ptr, 0), &t);
734       if (ref->base)
735 	ref->offset = BITS_PER_UNIT * t;
736       else
737 	{
738 	  size = NULL_TREE;
739 	  ref->offset = 0;
740 	  ref->base = get_base_address (TREE_OPERAND (ptr, 0));
741 	}
742     }
743   else
744     {
745       gcc_assert (POINTER_TYPE_P (TREE_TYPE (ptr)));
746       ref->base = build2 (MEM_REF, char_type_node,
747 			  ptr, null_pointer_node);
748       ref->offset = 0;
749     }
750   ref->offset += extra_offset;
751   if (size
752       && poly_int_tree_p (size, &size_hwi)
753       && coeffs_in_range_p (size_hwi, 0, HOST_WIDE_INT_MAX / BITS_PER_UNIT))
754     ref->max_size = ref->size = size_hwi * BITS_PER_UNIT;
755   else
756     ref->max_size = ref->size = -1;
757   ref->ref_alias_set = 0;
758   ref->base_alias_set = 0;
759   ref->volatile_p = false;
760 }
761 
762 /* S1 and S2 are TYPE_SIZE or DECL_SIZE.  Compare them:
763    Return -1 if S1 < S2
764    Return 1 if S1 > S2
765    Return 0 if equal or incomparable.  */
766 
767 static int
compare_sizes(tree s1,tree s2)768 compare_sizes (tree s1, tree s2)
769 {
770   if (!s1 || !s2)
771     return 0;
772 
773   poly_uint64 size1;
774   poly_uint64 size2;
775 
776   if (!poly_int_tree_p (s1, &size1) || !poly_int_tree_p (s2, &size2))
777     return 0;
778   if (known_lt (size1, size2))
779     return -1;
780   if (known_lt (size2, size1))
781     return 1;
782   return 0;
783 }
784 
785 /* Compare TYPE1 and TYPE2 by its size.
786    Return -1 if size of TYPE1 < size of TYPE2
787    Return 1 if size of TYPE1 > size of TYPE2
788    Return 0 if types are of equal sizes or we can not compare them.  */
789 
790 static int
compare_type_sizes(tree type1,tree type2)791 compare_type_sizes (tree type1, tree type2)
792 {
793   /* Be conservative for arrays and vectors.  We want to support partial
794      overlap on int[3] and int[3] as tested in gcc.dg/torture/alias-2.c.  */
795   while (TREE_CODE (type1) == ARRAY_TYPE
796 	 || TREE_CODE (type1) == VECTOR_TYPE)
797     type1 = TREE_TYPE (type1);
798   while (TREE_CODE (type2) == ARRAY_TYPE
799 	 || TREE_CODE (type2) == VECTOR_TYPE)
800     type2 = TREE_TYPE (type2);
801   return compare_sizes (TYPE_SIZE (type1), TYPE_SIZE (type2));
802 }
803 
804 /* Return 1 if TYPE1 and TYPE2 are to be considered equivalent for the
805    purpose of TBAA.  Return 0 if they are distinct and -1 if we cannot
806    decide.  */
807 
808 static inline int
same_type_for_tbaa(tree type1,tree type2)809 same_type_for_tbaa (tree type1, tree type2)
810 {
811   type1 = TYPE_MAIN_VARIANT (type1);
812   type2 = TYPE_MAIN_VARIANT (type2);
813 
814   /* Handle the most common case first.  */
815   if (type1 == type2)
816     return 1;
817 
818   /* If we would have to do structural comparison bail out.  */
819   if (TYPE_STRUCTURAL_EQUALITY_P (type1)
820       || TYPE_STRUCTURAL_EQUALITY_P (type2))
821     return -1;
822 
823   /* Compare the canonical types.  */
824   if (TYPE_CANONICAL (type1) == TYPE_CANONICAL (type2))
825     return 1;
826 
827   /* ??? Array types are not properly unified in all cases as we have
828      spurious changes in the index types for example.  Removing this
829      causes all sorts of problems with the Fortran frontend.  */
830   if (TREE_CODE (type1) == ARRAY_TYPE
831       && TREE_CODE (type2) == ARRAY_TYPE)
832     return -1;
833 
834   /* ??? In Ada, an lvalue of an unconstrained type can be used to access an
835      object of one of its constrained subtypes, e.g. when a function with an
836      unconstrained parameter passed by reference is called on an object and
837      inlined.  But, even in the case of a fixed size, type and subtypes are
838      not equivalent enough as to share the same TYPE_CANONICAL, since this
839      would mean that conversions between them are useless, whereas they are
840      not (e.g. type and subtypes can have different modes).  So, in the end,
841      they are only guaranteed to have the same alias set.  */
842   alias_set_type set1 = get_alias_set (type1);
843   alias_set_type set2 = get_alias_set (type2);
844   if (set1 == set2)
845     return -1;
846 
847   /* Pointers to void are considered compatible with all other pointers,
848      so for two pointers see what the alias set resolution thinks.  */
849   if (POINTER_TYPE_P (type1)
850       && POINTER_TYPE_P (type2)
851       && alias_sets_conflict_p (set1, set2))
852     return -1;
853 
854   /* The types are known to be not equal.  */
855   return 0;
856 }
857 
858 /* Return true if TYPE is a composite type (i.e. we may apply one of handled
859    components on it).  */
860 
861 static bool
type_has_components_p(tree type)862 type_has_components_p (tree type)
863 {
864   return AGGREGATE_TYPE_P (type) || VECTOR_TYPE_P (type)
865 	 || TREE_CODE (type) == COMPLEX_TYPE;
866 }
867 
868 /* MATCH1 and MATCH2 which are part of access path of REF1 and REF2
869    respectively are either pointing to same address or are completely
870    disjoint. If PARTIAL_OVERLAP is true, assume that outermost arrays may
871    just partly overlap.
872 
873    Try to disambiguate using the access path starting from the match
874    and return false if there is no conflict.
875 
876    Helper for aliasing_component_refs_p.  */
877 
878 static bool
aliasing_matching_component_refs_p(tree match1,tree ref1,poly_int64 offset1,poly_int64 max_size1,tree match2,tree ref2,poly_int64 offset2,poly_int64 max_size2,bool partial_overlap)879 aliasing_matching_component_refs_p (tree match1, tree ref1,
880 				    poly_int64 offset1, poly_int64 max_size1,
881 				    tree match2, tree ref2,
882 				    poly_int64 offset2, poly_int64 max_size2,
883 				    bool partial_overlap)
884 {
885   poly_int64 offadj, sztmp, msztmp;
886   bool reverse;
887 
888   if (!partial_overlap)
889     {
890       get_ref_base_and_extent (match2, &offadj, &sztmp, &msztmp, &reverse);
891       offset2 -= offadj;
892       get_ref_base_and_extent (match1, &offadj, &sztmp, &msztmp, &reverse);
893       offset1 -= offadj;
894       if (!ranges_maybe_overlap_p (offset1, max_size1, offset2, max_size2))
895 	{
896 	  ++alias_stats.aliasing_component_refs_p_no_alias;
897 	  return false;
898 	}
899     }
900 
901   int cmp = nonoverlapping_refs_since_match_p (match1, ref1, match2, ref2,
902 					       partial_overlap);
903   if (cmp == 1
904       || (cmp == -1 && nonoverlapping_component_refs_p (ref1, ref2)))
905     {
906       ++alias_stats.aliasing_component_refs_p_no_alias;
907       return false;
908     }
909   ++alias_stats.aliasing_component_refs_p_may_alias;
910   return true;
911 }
912 
913 /* Return true if REF is reference to zero sized trailing array. I.e.
914    struct foo {int bar; int array[0];} *fooptr;
915    fooptr->array.  */
916 
917 static bool
component_ref_to_zero_sized_trailing_array_p(tree ref)918 component_ref_to_zero_sized_trailing_array_p (tree ref)
919 {
920   return (TREE_CODE (ref) == COMPONENT_REF
921 	  && TREE_CODE (TREE_TYPE (TREE_OPERAND (ref, 1))) == ARRAY_TYPE
922 	  && (!TYPE_SIZE (TREE_TYPE (TREE_OPERAND (ref, 1)))
923 	      || integer_zerop (TYPE_SIZE (TREE_TYPE (TREE_OPERAND (ref, 1)))))
924 	  && array_at_struct_end_p (ref));
925 }
926 
927 /* Worker for aliasing_component_refs_p. Most parameters match parameters of
928    aliasing_component_refs_p.
929 
930    Walk access path REF2 and try to find type matching TYPE1
931    (which is a start of possibly aliasing access path REF1).
932    If match is found, try to disambiguate.
933 
934    Return 0 for sucessful disambiguation.
935    Return 1 if match was found but disambiguation failed
936    Return -1 if there is no match.
937    In this case MAYBE_MATCH is set to 0 if there is no type matching TYPE1
938    in access patch REF2 and -1 if we are not sure.  */
939 
940 static int
aliasing_component_refs_walk(tree ref1,tree type1,tree base1,poly_int64 offset1,poly_int64 max_size1,tree end_struct_ref1,tree ref2,tree base2,poly_int64 offset2,poly_int64 max_size2,bool * maybe_match)941 aliasing_component_refs_walk (tree ref1, tree type1, tree base1,
942 			      poly_int64 offset1, poly_int64 max_size1,
943 			      tree end_struct_ref1,
944 			      tree ref2, tree base2,
945 			      poly_int64 offset2, poly_int64 max_size2,
946 			      bool *maybe_match)
947 {
948   tree ref = ref2;
949   int same_p = 0;
950 
951   while (true)
952     {
953       /* We walk from inner type to the outer types. If type we see is
954 	 already too large to be part of type1, terminate the search.  */
955       int cmp = compare_type_sizes (type1, TREE_TYPE (ref));
956 
957       if (cmp < 0
958 	  && (!end_struct_ref1
959 	      || compare_type_sizes (TREE_TYPE (end_struct_ref1),
960 				     TREE_TYPE (ref)) < 0))
961 	break;
962       /* If types may be of same size, see if we can decide about their
963 	 equality.  */
964       if (cmp == 0)
965 	{
966 	  same_p = same_type_for_tbaa (TREE_TYPE (ref), type1);
967 	  if (same_p == 1)
968 	    break;
969 	  /* In case we can't decide whether types are same try to
970 	     continue looking for the exact match.
971 	     Remember however that we possibly saw a match
972 	     to bypass the access path continuations tests we do later.  */
973 	  if (same_p == -1)
974 	    *maybe_match = true;
975 	}
976       if (!handled_component_p (ref))
977 	break;
978       ref = TREE_OPERAND (ref, 0);
979     }
980   if (same_p == 1)
981     {
982       bool partial_overlap = false;
983 
984       /* We assume that arrays can overlap by multiple of their elements
985 	 size as tested in gcc.dg/torture/alias-2.c.
986 	 This partial overlap happen only when both arrays are bases of
987 	 the access and not contained within another component ref.
988 	 To be safe we also assume partial overlap for VLAs. */
989       if (TREE_CODE (TREE_TYPE (base1)) == ARRAY_TYPE
990 	  && (!TYPE_SIZE (TREE_TYPE (base1))
991 	      || TREE_CODE (TYPE_SIZE (TREE_TYPE (base1))) != INTEGER_CST
992 	      || ref == base2))
993 	{
994 	  /* Setting maybe_match to true triggers
995 	     nonoverlapping_component_refs_p test later that still may do
996 	     useful disambiguation.  */
997 	  *maybe_match = true;
998 	  partial_overlap = true;
999 	}
1000       return aliasing_matching_component_refs_p (base1, ref1,
1001 						 offset1, max_size1,
1002 						 ref, ref2,
1003 						 offset2, max_size2,
1004 						 partial_overlap);
1005     }
1006   return -1;
1007 }
1008 
1009 /* Consider access path1 base1....ref1 and access path2 base2...ref2.
1010    Return true if they can be composed to single access path
1011    base1...ref1...base2...ref2.
1012 
1013    REF_TYPE1 if type of REF1.  END_STRUCT_PAST_END1 is true if there is
1014    a trailing array access after REF1 in the non-TBAA part of the access.
1015    REF1_ALIAS_SET is the alias set of REF1.
1016 
1017    BASE_TYPE2 is type of base2.  END_STRUCT_REF2 is non-NULL if there is
1018    a traling array access in the TBAA part of access path2.
1019    BASE2_ALIAS_SET is the alias set of base2.  */
1020 
1021 bool
access_path_may_continue_p(tree ref_type1,bool end_struct_past_end1,alias_set_type ref1_alias_set,tree base_type2,tree end_struct_ref2,alias_set_type base2_alias_set)1022 access_path_may_continue_p (tree ref_type1, bool end_struct_past_end1,
1023 			    alias_set_type ref1_alias_set,
1024 			    tree base_type2, tree end_struct_ref2,
1025 			    alias_set_type base2_alias_set)
1026 {
1027   /* Access path can not continue past types with no components.  */
1028   if (!type_has_components_p (ref_type1))
1029     return false;
1030 
1031   /* If first access path ends by too small type to hold base of
1032      the second access path, typically paths can not continue.
1033 
1034      Punt if end_struct_past_end1 is true.  We want to support arbitrary
1035      type puning past first COMPONENT_REF to union because redundant store
1036      elimination depends on this, see PR92152.  For this reason we can not
1037      check size of the reference because types may partially overlap.  */
1038   if (!end_struct_past_end1)
1039     {
1040       if (compare_type_sizes (ref_type1, base_type2) < 0)
1041 	return false;
1042       /* If the path2 contains trailing array access we can strenghten the check
1043 	 to verify that also the size of element of the trailing array fits.
1044 	 In fact we could check for offset + type_size, but we do not track
1045 	 offsets and this is quite side case.  */
1046       if (end_struct_ref2
1047 	  && compare_type_sizes (ref_type1, TREE_TYPE (end_struct_ref2)) < 0)
1048 	return false;
1049     }
1050   return (base2_alias_set == ref1_alias_set
1051 	  || alias_set_subset_of (base2_alias_set, ref1_alias_set));
1052 }
1053 
1054 /* Determine if the two component references REF1 and REF2 which are
1055    based on access types TYPE1 and TYPE2 and of which at least one is based
1056    on an indirect reference may alias.
1057    REF1_ALIAS_SET, BASE1_ALIAS_SET, REF2_ALIAS_SET and BASE2_ALIAS_SET
1058    are the respective alias sets.  */
1059 
1060 static bool
aliasing_component_refs_p(tree ref1,alias_set_type ref1_alias_set,alias_set_type base1_alias_set,poly_int64 offset1,poly_int64 max_size1,tree ref2,alias_set_type ref2_alias_set,alias_set_type base2_alias_set,poly_int64 offset2,poly_int64 max_size2)1061 aliasing_component_refs_p (tree ref1,
1062 			   alias_set_type ref1_alias_set,
1063 			   alias_set_type base1_alias_set,
1064 			   poly_int64 offset1, poly_int64 max_size1,
1065 			   tree ref2,
1066 			   alias_set_type ref2_alias_set,
1067 			   alias_set_type base2_alias_set,
1068 			   poly_int64 offset2, poly_int64 max_size2)
1069 {
1070   /* If one reference is a component references through pointers try to find a
1071      common base and apply offset based disambiguation.  This handles
1072      for example
1073        struct A { int i; int j; } *q;
1074        struct B { struct A a; int k; } *p;
1075      disambiguating q->i and p->a.j.  */
1076   tree base1, base2;
1077   tree type1, type2;
1078   bool maybe_match = false;
1079   tree end_struct_ref1 = NULL, end_struct_ref2 = NULL;
1080   bool end_struct_past_end1 = false;
1081   bool end_struct_past_end2 = false;
1082 
1083   /* Choose bases and base types to search for.
1084      The access path is as follows:
1085        base....end_of_tbaa_ref...actual_ref
1086      At one place in the access path may be a reference to zero sized or
1087      trailing array.
1088 
1089      We generally discard the segment after end_of_tbaa_ref however
1090      we need to be careful in case it contains zero sized or traling array.
1091      These may happen after refernce to union and in this case we need to
1092      not disambiguate type puning scenarios.
1093 
1094      We set:
1095 	base1 to point to base
1096 
1097 	ref1 to point to end_of_tbaa_ref
1098 
1099 	end_struct_ref1 to point the trailing reference (if it exists
1100  	in range base....end_of_tbaa_ref
1101 
1102 	end_struct_past_end1 is true if this traling refernece occurs in
1103 	end_of_tbaa_ref...actual_ref.  */
1104   base1 = ref1;
1105   while (handled_component_p (base1))
1106     {
1107       /* Generally access paths are monotous in the size of object. The
1108 	 exception are trailing arrays of structures. I.e.
1109 	   struct a {int array[0];};
1110 	 or
1111 	   struct a {int array1[0]; int array[];};
1112 	 Such struct has size 0 but accesses to a.array may have non-zero size.
1113 	 In this case the size of TREE_TYPE (base1) is smaller than
1114 	 size of TREE_TYPE (TREE_OPERNAD (base1, 0)).
1115 
1116 	 Because we compare sizes of arrays just by sizes of their elements,
1117 	 we only need to care about zero sized array fields here.  */
1118       if (component_ref_to_zero_sized_trailing_array_p (base1))
1119 	{
1120 	  gcc_checking_assert (!end_struct_ref1);
1121           end_struct_ref1 = base1;
1122 	}
1123       if (ends_tbaa_access_path_p (base1))
1124 	{
1125 	  ref1 = TREE_OPERAND (base1, 0);
1126 	  if (end_struct_ref1)
1127 	    {
1128 	      end_struct_past_end1 = true;
1129 	      end_struct_ref1 = NULL;
1130 	    }
1131 	}
1132       base1 = TREE_OPERAND (base1, 0);
1133     }
1134   type1 = TREE_TYPE (base1);
1135   base2 = ref2;
1136   while (handled_component_p (base2))
1137     {
1138       if (component_ref_to_zero_sized_trailing_array_p (base2))
1139 	{
1140 	  gcc_checking_assert (!end_struct_ref2);
1141 	  end_struct_ref2 = base2;
1142 	}
1143       if (ends_tbaa_access_path_p (base2))
1144 	{
1145 	  ref2 = TREE_OPERAND (base2, 0);
1146 	  if (end_struct_ref2)
1147 	    {
1148 	      end_struct_past_end2 = true;
1149 	      end_struct_ref2 = NULL;
1150 	    }
1151 	}
1152       base2 = TREE_OPERAND (base2, 0);
1153     }
1154   type2 = TREE_TYPE (base2);
1155 
1156   /* Now search for the type1 in the access path of ref2.  This
1157      would be a common base for doing offset based disambiguation on.
1158      This however only makes sense if type2 is big enough to hold type1.  */
1159   int cmp_outer = compare_type_sizes (type2, type1);
1160 
1161   /* If type2 is big enough to contain type1 walk its access path.
1162      We also need to care of arrays at the end of structs that may extend
1163      beyond the end of structure.  If this occurs in the TBAA part of the
1164      access path, we need to consider the increased type as well.  */
1165   if (cmp_outer >= 0
1166       || (end_struct_ref2
1167 	  && compare_type_sizes (TREE_TYPE (end_struct_ref2), type1) >= 0))
1168     {
1169       int res = aliasing_component_refs_walk (ref1, type1, base1,
1170 					      offset1, max_size1,
1171 					      end_struct_ref1,
1172 					      ref2, base2, offset2, max_size2,
1173 					      &maybe_match);
1174       if (res != -1)
1175 	return res;
1176     }
1177 
1178   /* If we didn't find a common base, try the other way around.  */
1179   if (cmp_outer <= 0
1180       || (end_struct_ref1
1181 	  && compare_type_sizes (TREE_TYPE (end_struct_ref1), type1) <= 0))
1182     {
1183       int res = aliasing_component_refs_walk (ref2, type2, base2,
1184 					      offset2, max_size2,
1185 					      end_struct_ref2,
1186 					      ref1, base1, offset1, max_size1,
1187 					      &maybe_match);
1188       if (res != -1)
1189 	return res;
1190     }
1191 
1192   /* In the following code we make an assumption that the types in access
1193      paths do not overlap and thus accesses alias only if one path can be
1194      continuation of another.  If we was not able to decide about equivalence,
1195      we need to give up.  */
1196   if (maybe_match)
1197     {
1198       if (!nonoverlapping_component_refs_p (ref1, ref2))
1199 	{
1200 	  ++alias_stats.aliasing_component_refs_p_may_alias;
1201 	  return true;
1202 	}
1203       ++alias_stats.aliasing_component_refs_p_no_alias;
1204       return false;
1205     }
1206 
1207   if (access_path_may_continue_p (TREE_TYPE (ref1), end_struct_past_end1,
1208 				  ref1_alias_set,
1209 				  type2, end_struct_ref2,
1210 				  base2_alias_set)
1211       || access_path_may_continue_p (TREE_TYPE (ref2), end_struct_past_end2,
1212 				     ref2_alias_set,
1213 				     type1, end_struct_ref1,
1214 				     base1_alias_set))
1215     {
1216       ++alias_stats.aliasing_component_refs_p_may_alias;
1217       return true;
1218     }
1219   ++alias_stats.aliasing_component_refs_p_no_alias;
1220   return false;
1221 }
1222 
1223 /* FIELD1 and FIELD2 are two fields of component refs.  We assume
1224    that bases of both component refs are either equivalent or nonoverlapping.
1225    We do not assume that the containers of FIELD1 and FIELD2 are of the
1226    same type or size.
1227 
1228    Return 0 in case the base address of component_refs are same then
1229    FIELD1 and FIELD2 have same address. Note that FIELD1 and FIELD2
1230    may not be of same type or size.
1231 
1232    Return 1 if FIELD1 and FIELD2 are non-overlapping.
1233 
1234    Return -1 otherwise.
1235 
1236    Main difference between 0 and -1 is to let
1237    nonoverlapping_component_refs_since_match_p discover the semantically
1238    equivalent part of the access path.
1239 
1240    Note that this function is used even with -fno-strict-aliasing
1241    and makes use of no TBAA assumptions.  */
1242 
1243 static int
nonoverlapping_component_refs_p_1(const_tree field1,const_tree field2)1244 nonoverlapping_component_refs_p_1 (const_tree field1, const_tree field2)
1245 {
1246   /* If both fields are of the same type, we could save hard work of
1247      comparing offsets.  */
1248   tree type1 = DECL_CONTEXT (field1);
1249   tree type2 = DECL_CONTEXT (field2);
1250 
1251   if (TREE_CODE (type1) == RECORD_TYPE
1252       && DECL_BIT_FIELD_REPRESENTATIVE (field1))
1253     field1 = DECL_BIT_FIELD_REPRESENTATIVE (field1);
1254   if (TREE_CODE (type2) == RECORD_TYPE
1255       && DECL_BIT_FIELD_REPRESENTATIVE (field2))
1256     field2 = DECL_BIT_FIELD_REPRESENTATIVE (field2);
1257 
1258   /* ??? Bitfields can overlap at RTL level so punt on them.
1259      FIXME: RTL expansion should be fixed by adjusting the access path
1260      when producing MEM_ATTRs for MEMs which are wider than
1261      the bitfields similarly as done in set_mem_attrs_minus_bitpos.  */
1262   if (DECL_BIT_FIELD (field1) && DECL_BIT_FIELD (field2))
1263     return -1;
1264 
1265   /* Assume that different FIELD_DECLs never overlap within a RECORD_TYPE.  */
1266   if (type1 == type2 && TREE_CODE (type1) == RECORD_TYPE)
1267     return field1 != field2;
1268 
1269   /* In common case the offsets and bit offsets will be the same.
1270      However if frontends do not agree on the alignment, they may be
1271      different even if they actually represent same address.
1272      Try the common case first and if that fails calcualte the
1273      actual bit offset.  */
1274   if (tree_int_cst_equal (DECL_FIELD_OFFSET (field1),
1275 			  DECL_FIELD_OFFSET (field2))
1276       && tree_int_cst_equal (DECL_FIELD_BIT_OFFSET (field1),
1277 			     DECL_FIELD_BIT_OFFSET (field2)))
1278     return 0;
1279 
1280   /* Note that it may be possible to use component_ref_field_offset
1281      which would provide offsets as trees. However constructing and folding
1282      trees is expensive and does not seem to be worth the compile time
1283      cost.  */
1284 
1285   poly_uint64 offset1, offset2;
1286   poly_uint64 bit_offset1, bit_offset2;
1287 
1288   if (poly_int_tree_p (DECL_FIELD_OFFSET (field1), &offset1)
1289       && poly_int_tree_p (DECL_FIELD_OFFSET (field2), &offset2)
1290       && poly_int_tree_p (DECL_FIELD_BIT_OFFSET (field1), &bit_offset1)
1291       && poly_int_tree_p (DECL_FIELD_BIT_OFFSET (field2), &bit_offset2))
1292     {
1293       offset1 = (offset1 << LOG2_BITS_PER_UNIT) + bit_offset1;
1294       offset2 = (offset2 << LOG2_BITS_PER_UNIT) + bit_offset2;
1295 
1296       if (known_eq (offset1, offset2))
1297 	return 0;
1298 
1299       poly_uint64 size1, size2;
1300 
1301       if (poly_int_tree_p (DECL_SIZE (field1), &size1)
1302 	  && poly_int_tree_p (DECL_SIZE (field2), &size2)
1303 	  && !ranges_maybe_overlap_p (offset1, size1, offset2, size2))
1304 	return 1;
1305     }
1306   /* Resort to slower overlap checking by looking for matching types in
1307      the middle of access path.  */
1308   return -1;
1309 }
1310 
1311 /* Return low bound of array. Do not produce new trees
1312    and thus do not care about particular type of integer constant
1313    and placeholder exprs.  */
1314 
1315 static tree
cheap_array_ref_low_bound(tree ref)1316 cheap_array_ref_low_bound (tree ref)
1317 {
1318   tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref, 0)));
1319 
1320   /* Avoid expensive array_ref_low_bound.
1321      low bound is either stored in operand2, or it is TYPE_MIN_VALUE of domain
1322      type or it is zero.  */
1323   if (TREE_OPERAND (ref, 2))
1324     return TREE_OPERAND (ref, 2);
1325   else if (domain_type && TYPE_MIN_VALUE (domain_type))
1326     return TYPE_MIN_VALUE (domain_type);
1327   else
1328     return integer_zero_node;
1329 }
1330 
1331 /* REF1 and REF2 are ARRAY_REFs with either same base address or which are
1332    completely disjoint.
1333 
1334    Return 1 if the refs are non-overlapping.
1335    Return 0 if they are possibly overlapping but if so the overlap again
1336    starts on the same address.
1337    Return -1 otherwise.  */
1338 
1339 int
nonoverlapping_array_refs_p(tree ref1,tree ref2)1340 nonoverlapping_array_refs_p (tree ref1, tree ref2)
1341 {
1342   tree index1 = TREE_OPERAND (ref1, 1);
1343   tree index2 = TREE_OPERAND (ref2, 1);
1344   tree low_bound1 = cheap_array_ref_low_bound(ref1);
1345   tree low_bound2 = cheap_array_ref_low_bound(ref2);
1346 
1347   /* Handle zero offsets first: we do not need to match type size in this
1348      case.  */
1349   if (operand_equal_p (index1, low_bound1, 0)
1350       && operand_equal_p (index2, low_bound2, 0))
1351     return 0;
1352 
1353   /* If type sizes are different, give up.
1354 
1355      Avoid expensive array_ref_element_size.
1356      If operand 3 is present it denotes size in the alignmnet units.
1357      Otherwise size is TYPE_SIZE of the element type.
1358      Handle only common cases where types are of the same "kind".  */
1359   if ((TREE_OPERAND (ref1, 3) == NULL) != (TREE_OPERAND (ref2, 3) == NULL))
1360     return -1;
1361 
1362   tree elmt_type1 = TREE_TYPE (TREE_TYPE (TREE_OPERAND (ref1, 0)));
1363   tree elmt_type2 = TREE_TYPE (TREE_TYPE (TREE_OPERAND (ref2, 0)));
1364 
1365   if (TREE_OPERAND (ref1, 3))
1366     {
1367       if (TYPE_ALIGN (elmt_type1) != TYPE_ALIGN (elmt_type2)
1368 	  || !operand_equal_p (TREE_OPERAND (ref1, 3),
1369 			       TREE_OPERAND (ref2, 3), 0))
1370 	return -1;
1371     }
1372   else
1373     {
1374       if (!operand_equal_p (TYPE_SIZE_UNIT (elmt_type1),
1375 			    TYPE_SIZE_UNIT (elmt_type2), 0))
1376 	return -1;
1377     }
1378 
1379   /* Since we know that type sizes are the same, there is no need to return
1380      -1 after this point. Partial overlap can not be introduced.  */
1381 
1382   /* We may need to fold trees in this case.
1383      TODO: Handle integer constant case at least.  */
1384   if (!operand_equal_p (low_bound1, low_bound2, 0))
1385     return 0;
1386 
1387   if (TREE_CODE (index1) == INTEGER_CST && TREE_CODE (index2) == INTEGER_CST)
1388     {
1389       if (tree_int_cst_equal (index1, index2))
1390 	return 0;
1391       return 1;
1392     }
1393   /* TODO: We can use VRP to further disambiguate here. */
1394   return 0;
1395 }
1396 
1397 /* Try to disambiguate REF1 and REF2 under the assumption that MATCH1 and
1398    MATCH2 either point to the same address or are disjoint.
1399    MATCH1 and MATCH2 are assumed to be ref in the access path of REF1 and REF2
1400    respectively or NULL in the case we established equivalence of bases.
1401    If PARTIAL_OVERLAP is true assume that the toplevel arrays may actually
1402    overlap by exact multiply of their element size.
1403 
1404    This test works by matching the initial segment of the access path
1405    and does not rely on TBAA thus is safe for !flag_strict_aliasing if
1406    match was determined without use of TBAA oracle.
1407 
1408    Return 1 if we can determine that component references REF1 and REF2,
1409    that are within a common DECL, cannot overlap.
1410 
1411    Return 0 if paths are same and thus there is nothing to disambiguate more
1412    (i.e. there is must alias assuming there is must alias between MATCH1 and
1413    MATCH2)
1414 
1415    Return -1 if we can not determine 0 or 1 - this happens when we met
1416    non-matching types was met in the path.
1417    In this case it may make sense to continue by other disambiguation
1418    oracles.  */
1419 
1420 static int
nonoverlapping_refs_since_match_p(tree match1,tree ref1,tree match2,tree ref2,bool partial_overlap)1421 nonoverlapping_refs_since_match_p (tree match1, tree ref1,
1422 				   tree match2, tree ref2,
1423 				   bool partial_overlap)
1424 {
1425   int ntbaa1 = 0, ntbaa2 = 0;
1426   /* Early return if there are no references to match, we do not need
1427      to walk the access paths.
1428 
1429      Do not consider this as may-alias for stats - it is more useful
1430      to have information how many disambiguations happened provided that
1431      the query was meaningful.  */
1432 
1433   if (match1 == ref1 || !handled_component_p (ref1)
1434       || match2 == ref2 || !handled_component_p (ref2))
1435     return -1;
1436 
1437   auto_vec<tree, 16> component_refs1;
1438   auto_vec<tree, 16> component_refs2;
1439 
1440   /* Create the stack of handled components for REF1.  */
1441   while (handled_component_p (ref1) && ref1 != match1)
1442     {
1443       /* We use TBAA only to re-synchronize after mismatched refs.  So we
1444 	 do not need to truncate access path after TBAA part ends.  */
1445       if (ends_tbaa_access_path_p (ref1))
1446 	ntbaa1 = 0;
1447       else
1448 	ntbaa1++;
1449       component_refs1.safe_push (ref1);
1450       ref1 = TREE_OPERAND (ref1, 0);
1451     }
1452 
1453   /* Create the stack of handled components for REF2.  */
1454   while (handled_component_p (ref2) && ref2 != match2)
1455     {
1456       if (ends_tbaa_access_path_p (ref2))
1457 	ntbaa2 = 0;
1458       else
1459 	ntbaa2++;
1460       component_refs2.safe_push (ref2);
1461       ref2 = TREE_OPERAND (ref2, 0);
1462     }
1463 
1464   if (!flag_strict_aliasing)
1465     {
1466       ntbaa1 = 0;
1467       ntbaa2 = 0;
1468     }
1469 
1470   bool mem_ref1 = TREE_CODE (ref1) == MEM_REF && ref1 != match1;
1471   bool mem_ref2 = TREE_CODE (ref2) == MEM_REF && ref2 != match2;
1472 
1473   /* If only one of access path starts with MEM_REF check that offset is 0
1474      so the addresses stays the same after stripping it.
1475      TODO: In this case we may walk the other access path until we get same
1476      offset.
1477 
1478      If both starts with MEM_REF, offset has to be same.  */
1479   if ((mem_ref1 && !mem_ref2 && !integer_zerop (TREE_OPERAND (ref1, 1)))
1480       || (mem_ref2 && !mem_ref1 && !integer_zerop (TREE_OPERAND (ref2, 1)))
1481       || (mem_ref1 && mem_ref2
1482 	  && !tree_int_cst_equal (TREE_OPERAND (ref1, 1),
1483 				  TREE_OPERAND (ref2, 1))))
1484     {
1485       ++alias_stats.nonoverlapping_refs_since_match_p_may_alias;
1486       return -1;
1487     }
1488 
1489   /* TARGET_MEM_REF are never wrapped in handled components, so we do not need
1490      to handle them here at all.  */
1491   gcc_checking_assert (TREE_CODE (ref1) != TARGET_MEM_REF
1492 		       && TREE_CODE (ref2) != TARGET_MEM_REF);
1493 
1494   /* Pop the stacks in parallel and examine the COMPONENT_REFs of the same
1495      rank.  This is sufficient because we start from the same DECL and you
1496      cannot reference several fields at a time with COMPONENT_REFs (unlike
1497      with ARRAY_RANGE_REFs for arrays) so you always need the same number
1498      of them to access a sub-component, unless you're in a union, in which
1499      case the return value will precisely be false.  */
1500   while (true)
1501     {
1502       /* Track if we seen unmatched ref with non-zero offset.  In this case
1503 	 we must look for partial overlaps.  */
1504       bool seen_unmatched_ref_p = false;
1505 
1506       /* First match ARRAY_REFs an try to disambiguate.  */
1507       if (!component_refs1.is_empty ()
1508 	  && !component_refs2.is_empty ())
1509 	{
1510 	  unsigned int narray_refs1=0, narray_refs2=0;
1511 
1512 	  /* We generally assume that both access paths starts by same sequence
1513 	     of refs.  However if number of array refs is not in sync, try
1514 	     to recover and pop elts until number match.  This helps the case
1515 	     where one access path starts by array and other by element.  */
1516 	  for (narray_refs1 = 0; narray_refs1 < component_refs1.length ();
1517 	       narray_refs1++)
1518 	    if (TREE_CODE (component_refs1 [component_refs1.length()
1519 					    - 1 - narray_refs1]) != ARRAY_REF)
1520 	      break;
1521 
1522 	  for (narray_refs2 = 0; narray_refs2 < component_refs2.length ();
1523 	       narray_refs2++)
1524 	    if (TREE_CODE (component_refs2 [component_refs2.length()
1525 					    - 1 - narray_refs2]) != ARRAY_REF)
1526 	      break;
1527 	  for (; narray_refs1 > narray_refs2; narray_refs1--)
1528 	    {
1529 	      ref1 = component_refs1.pop ();
1530 	      ntbaa1--;
1531 
1532 	      /* If index is non-zero we need to check whether the reference
1533 		 does not break the main invariant that bases are either
1534 		 disjoint or equal.  Consider the example:
1535 
1536 		 unsigned char out[][1];
1537 		 out[1]="a";
1538 		 out[i][0];
1539 
1540 		 Here bases out and out are same, but after removing the
1541 		 [i] index, this invariant no longer holds, because
1542 		 out[i] points to the middle of array out.
1543 
1544 		 TODO: If size of type of the skipped reference is an integer
1545 		 multiply of the size of type of the other reference this
1546 		 invariant can be verified, but even then it is not completely
1547 		 safe with !flag_strict_aliasing if the other reference contains
1548 		 unbounded array accesses.
1549 		 See   */
1550 
1551 	      if (!operand_equal_p (TREE_OPERAND (ref1, 1),
1552 				    cheap_array_ref_low_bound (ref1), 0))
1553 		return 0;
1554 	    }
1555 	  for (; narray_refs2 > narray_refs1; narray_refs2--)
1556 	    {
1557 	      ref2 = component_refs2.pop ();
1558 	      ntbaa2--;
1559 	      if (!operand_equal_p (TREE_OPERAND (ref2, 1),
1560 				    cheap_array_ref_low_bound (ref2), 0))
1561 		return 0;
1562 	    }
1563 	  /* Try to disambiguate matched arrays.  */
1564 	  for (unsigned int i = 0; i < narray_refs1; i++)
1565 	    {
1566 	      int cmp = nonoverlapping_array_refs_p (component_refs1.pop (),
1567 						     component_refs2.pop ());
1568 	      ntbaa1--;
1569 	      ntbaa2--;
1570 	      if (cmp == 1 && !partial_overlap)
1571 		{
1572 		  ++alias_stats
1573 		    .nonoverlapping_refs_since_match_p_no_alias;
1574 		  return 1;
1575 		}
1576 	      if (cmp == -1)
1577 		{
1578 		  seen_unmatched_ref_p = true;
1579 		  /* We can not maintain the invariant that bases are either
1580 		     same or completely disjoint.  However we can still recover
1581 		     from type based alias analysis if we reach referneces to
1582 		     same sizes.  We do not attempt to match array sizes, so
1583 		     just finish array walking and look for component refs.  */
1584 		  if (ntbaa1 < 0 || ntbaa2 < 0)
1585 		    {
1586 		      ++alias_stats.nonoverlapping_refs_since_match_p_may_alias;
1587 		      return -1;
1588 		    }
1589 		  for (i++; i < narray_refs1; i++)
1590 		    {
1591 		      component_refs1.pop ();
1592 		      component_refs2.pop ();
1593 		      ntbaa1--;
1594 		      ntbaa2--;
1595 		    }
1596 		  break;
1597 		}
1598 	      partial_overlap = false;
1599 	    }
1600 	}
1601 
1602       /* Next look for component_refs.  */
1603       do
1604 	{
1605 	  if (component_refs1.is_empty ())
1606 	    {
1607 	      ++alias_stats
1608 		.nonoverlapping_refs_since_match_p_must_overlap;
1609 	      return 0;
1610 	    }
1611 	  ref1 = component_refs1.pop ();
1612 	  ntbaa1--;
1613 	  if (TREE_CODE (ref1) != COMPONENT_REF)
1614 	    {
1615 	      seen_unmatched_ref_p = true;
1616 	      if (ntbaa1 < 0 || ntbaa2 < 0)
1617 		{
1618 		  ++alias_stats.nonoverlapping_refs_since_match_p_may_alias;
1619 		  return -1;
1620 		}
1621 	    }
1622 	}
1623       while (!RECORD_OR_UNION_TYPE_P (TREE_TYPE (TREE_OPERAND (ref1, 0))));
1624 
1625       do
1626 	{
1627 	  if (component_refs2.is_empty ())
1628 	    {
1629 	      ++alias_stats
1630 		.nonoverlapping_refs_since_match_p_must_overlap;
1631 	      return 0;
1632 	    }
1633 	  ref2 = component_refs2.pop ();
1634 	  ntbaa2--;
1635 	  if (TREE_CODE (ref2) != COMPONENT_REF)
1636 	    {
1637 	      if (ntbaa1 < 0 || ntbaa2 < 0)
1638 		{
1639 		  ++alias_stats.nonoverlapping_refs_since_match_p_may_alias;
1640 		  return -1;
1641 		}
1642 	      seen_unmatched_ref_p = true;
1643 	    }
1644 	}
1645       while (!RECORD_OR_UNION_TYPE_P (TREE_TYPE (TREE_OPERAND (ref2, 0))));
1646 
1647       /* BIT_FIELD_REF and VIEW_CONVERT_EXPR are taken off the vectors
1648 	 earlier.  */
1649       gcc_checking_assert (TREE_CODE (ref1) == COMPONENT_REF
1650 			   && TREE_CODE (ref2) == COMPONENT_REF);
1651 
1652       tree field1 = TREE_OPERAND (ref1, 1);
1653       tree field2 = TREE_OPERAND (ref2, 1);
1654 
1655       /* ??? We cannot simply use the type of operand #0 of the refs here
1656 	 as the Fortran compiler smuggles type punning into COMPONENT_REFs
1657 	 for common blocks instead of using unions like everyone else.  */
1658       tree type1 = DECL_CONTEXT (field1);
1659       tree type2 = DECL_CONTEXT (field2);
1660 
1661       partial_overlap = false;
1662 
1663       /* If we skipped array refs on type of different sizes, we can
1664 	 no longer be sure that there are not partial overlaps.  */
1665       if (seen_unmatched_ref_p && ntbaa1 >= 0 && ntbaa2 >= 0
1666 	  && !operand_equal_p (TYPE_SIZE (type1), TYPE_SIZE (type2), 0))
1667 	{
1668 	  ++alias_stats
1669 	    .nonoverlapping_refs_since_match_p_may_alias;
1670 	  return -1;
1671 	}
1672 
1673       int cmp = nonoverlapping_component_refs_p_1 (field1, field2);
1674       if (cmp == -1)
1675 	{
1676 	  ++alias_stats
1677 	    .nonoverlapping_refs_since_match_p_may_alias;
1678 	  return -1;
1679 	}
1680       else if (cmp == 1)
1681 	{
1682 	  ++alias_stats
1683 	    .nonoverlapping_refs_since_match_p_no_alias;
1684 	  return 1;
1685 	}
1686     }
1687 
1688   ++alias_stats.nonoverlapping_refs_since_match_p_must_overlap;
1689   return 0;
1690 }
1691 
1692 /* Return TYPE_UID which can be used to match record types we consider
1693    same for TBAA purposes.  */
1694 
1695 static inline int
ncr_type_uid(const_tree field)1696 ncr_type_uid (const_tree field)
1697 {
1698   /* ??? We cannot simply use the type of operand #0 of the refs here
1699      as the Fortran compiler smuggles type punning into COMPONENT_REFs
1700      for common blocks instead of using unions like everyone else.  */
1701   tree type = DECL_FIELD_CONTEXT (field);
1702   /* With LTO types considered same_type_for_tbaa_p
1703      from different translation unit may not have same
1704      main variant.  They however have same TYPE_CANONICAL.  */
1705   if (TYPE_CANONICAL (type))
1706     return TYPE_UID (TYPE_CANONICAL (type));
1707   return TYPE_UID (type);
1708 }
1709 
1710 /* qsort compare function to sort FIELD_DECLs after their
1711    DECL_FIELD_CONTEXT TYPE_UID.  */
1712 
1713 static inline int
ncr_compar(const void * field1_,const void * field2_)1714 ncr_compar (const void *field1_, const void *field2_)
1715 {
1716   const_tree field1 = *(const_tree *) const_cast <void *>(field1_);
1717   const_tree field2 = *(const_tree *) const_cast <void *>(field2_);
1718   unsigned int uid1 = ncr_type_uid (field1);
1719   unsigned int uid2 = ncr_type_uid (field2);
1720 
1721   if (uid1 < uid2)
1722     return -1;
1723   else if (uid1 > uid2)
1724     return 1;
1725   return 0;
1726 }
1727 
1728 /* Return true if we can determine that the fields referenced cannot
1729    overlap for any pair of objects.  This relies on TBAA.  */
1730 
1731 static bool
nonoverlapping_component_refs_p(const_tree x,const_tree y)1732 nonoverlapping_component_refs_p (const_tree x, const_tree y)
1733 {
1734   /* Early return if we have nothing to do.
1735 
1736      Do not consider this as may-alias for stats - it is more useful
1737      to have information how many disambiguations happened provided that
1738      the query was meaningful.  */
1739   if (!flag_strict_aliasing
1740       || !x || !y
1741       || !handled_component_p (x)
1742       || !handled_component_p (y))
1743     return false;
1744 
1745   auto_vec<const_tree, 16> fieldsx;
1746   while (handled_component_p (x))
1747     {
1748       if (TREE_CODE (x) == COMPONENT_REF)
1749 	{
1750 	  tree field = TREE_OPERAND (x, 1);
1751 	  tree type = DECL_FIELD_CONTEXT (field);
1752 	  if (TREE_CODE (type) == RECORD_TYPE)
1753 	    fieldsx.safe_push (field);
1754 	}
1755       else if (ends_tbaa_access_path_p (x))
1756 	fieldsx.truncate (0);
1757       x = TREE_OPERAND (x, 0);
1758     }
1759   if (fieldsx.length () == 0)
1760     return false;
1761   auto_vec<const_tree, 16> fieldsy;
1762   while (handled_component_p (y))
1763     {
1764       if (TREE_CODE (y) == COMPONENT_REF)
1765 	{
1766 	  tree field = TREE_OPERAND (y, 1);
1767 	  tree type = DECL_FIELD_CONTEXT (field);
1768 	  if (TREE_CODE (type) == RECORD_TYPE)
1769 	    fieldsy.safe_push (TREE_OPERAND (y, 1));
1770 	}
1771       else if (ends_tbaa_access_path_p (y))
1772 	fieldsy.truncate (0);
1773       y = TREE_OPERAND (y, 0);
1774     }
1775   if (fieldsy.length () == 0)
1776     {
1777       ++alias_stats.nonoverlapping_component_refs_p_may_alias;
1778       return false;
1779     }
1780 
1781   /* Most common case first.  */
1782   if (fieldsx.length () == 1
1783       && fieldsy.length () == 1)
1784    {
1785      if (same_type_for_tbaa (DECL_FIELD_CONTEXT (fieldsx[0]),
1786 			     DECL_FIELD_CONTEXT (fieldsy[0])) == 1
1787 	 && nonoverlapping_component_refs_p_1 (fieldsx[0], fieldsy[0]) == 1)
1788       {
1789          ++alias_stats.nonoverlapping_component_refs_p_no_alias;
1790          return true;
1791       }
1792      else
1793       {
1794          ++alias_stats.nonoverlapping_component_refs_p_may_alias;
1795          return false;
1796       }
1797    }
1798 
1799   if (fieldsx.length () == 2)
1800     {
1801       if (ncr_compar (&fieldsx[0], &fieldsx[1]) == 1)
1802 	std::swap (fieldsx[0], fieldsx[1]);
1803     }
1804   else
1805     fieldsx.qsort (ncr_compar);
1806 
1807   if (fieldsy.length () == 2)
1808     {
1809       if (ncr_compar (&fieldsy[0], &fieldsy[1]) == 1)
1810 	std::swap (fieldsy[0], fieldsy[1]);
1811     }
1812   else
1813     fieldsy.qsort (ncr_compar);
1814 
1815   unsigned i = 0, j = 0;
1816   do
1817     {
1818       const_tree fieldx = fieldsx[i];
1819       const_tree fieldy = fieldsy[j];
1820 
1821       /* We're left with accessing different fields of a structure,
1822 	 no possible overlap.  */
1823       if (same_type_for_tbaa (DECL_FIELD_CONTEXT (fieldx),
1824 			      DECL_FIELD_CONTEXT (fieldy)) == 1
1825 	  && nonoverlapping_component_refs_p_1 (fieldx, fieldy) == 1)
1826 	{
1827 	  ++alias_stats.nonoverlapping_component_refs_p_no_alias;
1828 	  return true;
1829 	}
1830 
1831       if (ncr_type_uid (fieldx) < ncr_type_uid (fieldy))
1832 	{
1833 	  i++;
1834 	  if (i == fieldsx.length ())
1835 	    break;
1836 	}
1837       else
1838 	{
1839 	  j++;
1840 	  if (j == fieldsy.length ())
1841 	    break;
1842 	}
1843     }
1844   while (1);
1845 
1846   ++alias_stats.nonoverlapping_component_refs_p_may_alias;
1847   return false;
1848 }
1849 
1850 
1851 /* Return true if two memory references based on the variables BASE1
1852    and BASE2 constrained to [OFFSET1, OFFSET1 + MAX_SIZE1) and
1853    [OFFSET2, OFFSET2 + MAX_SIZE2) may alias.  REF1 and REF2
1854    if non-NULL are the complete memory reference trees.  */
1855 
1856 static bool
decl_refs_may_alias_p(tree ref1,tree base1,poly_int64 offset1,poly_int64 max_size1,poly_int64 size1,tree ref2,tree base2,poly_int64 offset2,poly_int64 max_size2,poly_int64 size2)1857 decl_refs_may_alias_p (tree ref1, tree base1,
1858 		       poly_int64 offset1, poly_int64 max_size1,
1859 		       poly_int64 size1,
1860 		       tree ref2, tree base2,
1861 		       poly_int64 offset2, poly_int64 max_size2,
1862 		       poly_int64 size2)
1863 {
1864   gcc_checking_assert (DECL_P (base1) && DECL_P (base2));
1865 
1866   /* If both references are based on different variables, they cannot alias.  */
1867   if (compare_base_decls (base1, base2) == 0)
1868     return false;
1869 
1870   /* If both references are based on the same variable, they cannot alias if
1871      the accesses do not overlap.  */
1872   if (!ranges_maybe_overlap_p (offset1, max_size1, offset2, max_size2))
1873     return false;
1874 
1875   /* If there is must alias, there is no use disambiguating further.  */
1876   if (known_eq (size1, max_size1) && known_eq (size2, max_size2))
1877     return true;
1878 
1879   /* For components with variable position, the above test isn't sufficient,
1880      so we disambiguate component references manually.  */
1881   if (ref1 && ref2
1882       && handled_component_p (ref1) && handled_component_p (ref2)
1883       && nonoverlapping_refs_since_match_p (NULL, ref1, NULL, ref2, false) == 1)
1884     return false;
1885 
1886   return true;
1887 }
1888 
1889 /* Return true if an indirect reference based on *PTR1 constrained
1890    to [OFFSET1, OFFSET1 + MAX_SIZE1) may alias a variable based on BASE2
1891    constrained to [OFFSET2, OFFSET2 + MAX_SIZE2).  *PTR1 and BASE2 have
1892    the alias sets BASE1_ALIAS_SET and BASE2_ALIAS_SET which can be -1
1893    in which case they are computed on-demand.  REF1 and REF2
1894    if non-NULL are the complete memory reference trees.  */
1895 
1896 static bool
indirect_ref_may_alias_decl_p(tree ref1 ATTRIBUTE_UNUSED,tree base1,poly_int64 offset1,poly_int64 max_size1,poly_int64 size1,alias_set_type ref1_alias_set,alias_set_type base1_alias_set,tree ref2 ATTRIBUTE_UNUSED,tree base2,poly_int64 offset2,poly_int64 max_size2,poly_int64 size2,alias_set_type ref2_alias_set,alias_set_type base2_alias_set,bool tbaa_p)1897 indirect_ref_may_alias_decl_p (tree ref1 ATTRIBUTE_UNUSED, tree base1,
1898 			       poly_int64 offset1, poly_int64 max_size1,
1899 			       poly_int64 size1,
1900 			       alias_set_type ref1_alias_set,
1901 			       alias_set_type base1_alias_set,
1902 			       tree ref2 ATTRIBUTE_UNUSED, tree base2,
1903 			       poly_int64 offset2, poly_int64 max_size2,
1904 			       poly_int64 size2,
1905 			       alias_set_type ref2_alias_set,
1906 			       alias_set_type base2_alias_set, bool tbaa_p)
1907 {
1908   tree ptr1;
1909   tree ptrtype1, dbase2;
1910 
1911   gcc_checking_assert ((TREE_CODE (base1) == MEM_REF
1912 			|| TREE_CODE (base1) == TARGET_MEM_REF)
1913 		       && DECL_P (base2));
1914 
1915   ptr1 = TREE_OPERAND (base1, 0);
1916   poly_offset_int moff = mem_ref_offset (base1) << LOG2_BITS_PER_UNIT;
1917 
1918   /* If only one reference is based on a variable, they cannot alias if
1919      the pointer access is beyond the extent of the variable access.
1920      (the pointer base cannot validly point to an offset less than zero
1921      of the variable).
1922      ???  IVOPTs creates bases that do not honor this restriction,
1923      so do not apply this optimization for TARGET_MEM_REFs.  */
1924   if (TREE_CODE (base1) != TARGET_MEM_REF
1925       && !ranges_maybe_overlap_p (offset1 + moff, -1, offset2, max_size2))
1926     return false;
1927   /* They also cannot alias if the pointer may not point to the decl.  */
1928   if (!ptr_deref_may_alias_decl_p (ptr1, base2))
1929     return false;
1930 
1931   /* Disambiguations that rely on strict aliasing rules follow.  */
1932   if (!flag_strict_aliasing || !tbaa_p)
1933     return true;
1934 
1935   /* If the alias set for a pointer access is zero all bets are off.  */
1936   if (base1_alias_set == 0 || base2_alias_set == 0)
1937     return true;
1938 
1939   /* When we are trying to disambiguate an access with a pointer dereference
1940      as base versus one with a decl as base we can use both the size
1941      of the decl and its dynamic type for extra disambiguation.
1942      ???  We do not know anything about the dynamic type of the decl
1943      other than that its alias-set contains base2_alias_set as a subset
1944      which does not help us here.  */
1945   /* As we know nothing useful about the dynamic type of the decl just
1946      use the usual conflict check rather than a subset test.
1947      ???  We could introduce -fvery-strict-aliasing when the language
1948      does not allow decls to have a dynamic type that differs from their
1949      static type.  Then we can check
1950      !alias_set_subset_of (base1_alias_set, base2_alias_set) instead.  */
1951   if (base1_alias_set != base2_alias_set
1952       && !alias_sets_conflict_p (base1_alias_set, base2_alias_set))
1953     return false;
1954 
1955   ptrtype1 = TREE_TYPE (TREE_OPERAND (base1, 1));
1956 
1957   /* If the size of the access relevant for TBAA through the pointer
1958      is bigger than the size of the decl we can't possibly access the
1959      decl via that pointer.  */
1960   if (/* ???  This in turn may run afoul when a decl of type T which is
1961 	 a member of union type U is accessed through a pointer to
1962 	 type U and sizeof T is smaller than sizeof U.  */
1963       TREE_CODE (TREE_TYPE (ptrtype1)) != UNION_TYPE
1964       && TREE_CODE (TREE_TYPE (ptrtype1)) != QUAL_UNION_TYPE
1965       && compare_sizes (DECL_SIZE (base2),
1966 		        TYPE_SIZE (TREE_TYPE (ptrtype1))) < 0)
1967     return false;
1968 
1969   if (!ref2)
1970     return true;
1971 
1972   /* If the decl is accessed via a MEM_REF, reconstruct the base
1973      we can use for TBAA and an appropriately adjusted offset.  */
1974   dbase2 = ref2;
1975   while (handled_component_p (dbase2))
1976     dbase2 = TREE_OPERAND (dbase2, 0);
1977   poly_int64 doffset1 = offset1;
1978   poly_offset_int doffset2 = offset2;
1979   if (TREE_CODE (dbase2) == MEM_REF
1980       || TREE_CODE (dbase2) == TARGET_MEM_REF)
1981     {
1982       doffset2 -= mem_ref_offset (dbase2) << LOG2_BITS_PER_UNIT;
1983       tree ptrtype2 = TREE_TYPE (TREE_OPERAND (dbase2, 1));
1984       /* If second reference is view-converted, give up now.  */
1985       if (same_type_for_tbaa (TREE_TYPE (dbase2), TREE_TYPE (ptrtype2)) != 1)
1986 	return true;
1987     }
1988 
1989   /* If first reference is view-converted, give up now.  */
1990   if (same_type_for_tbaa (TREE_TYPE (base1), TREE_TYPE (ptrtype1)) != 1)
1991     return true;
1992 
1993   /* If both references are through the same type, they do not alias
1994      if the accesses do not overlap.  This does extra disambiguation
1995      for mixed/pointer accesses but requires strict aliasing.
1996      For MEM_REFs we require that the component-ref offset we computed
1997      is relative to the start of the type which we ensure by
1998      comparing rvalue and access type and disregarding the constant
1999      pointer offset.
2000 
2001      But avoid treating variable length arrays as "objects", instead assume they
2002      can overlap by an exact multiple of their element size.
2003      See gcc.dg/torture/alias-2.c.  */
2004   if (((TREE_CODE (base1) != TARGET_MEM_REF
2005        || (!TMR_INDEX (base1) && !TMR_INDEX2 (base1)))
2006        && (TREE_CODE (dbase2) != TARGET_MEM_REF
2007 	   || (!TMR_INDEX (dbase2) && !TMR_INDEX2 (dbase2))))
2008       && same_type_for_tbaa (TREE_TYPE (base1), TREE_TYPE (dbase2)) == 1)
2009     {
2010       bool partial_overlap = (TREE_CODE (TREE_TYPE (base1)) == ARRAY_TYPE
2011 			      && (TYPE_SIZE (TREE_TYPE (base1))
2012 			      && TREE_CODE (TYPE_SIZE (TREE_TYPE (base1)))
2013 				 != INTEGER_CST));
2014       if (!partial_overlap
2015 	  && !ranges_maybe_overlap_p (doffset1, max_size1, doffset2, max_size2))
2016 	return false;
2017       if (!ref1 || !ref2
2018 	  /* If there is must alias, there is no use disambiguating further.  */
2019 	  || (!partial_overlap
2020 	      && known_eq (size1, max_size1) && known_eq (size2, max_size2)))
2021 	return true;
2022       int res = nonoverlapping_refs_since_match_p (base1, ref1, base2, ref2,
2023 						   partial_overlap);
2024       if (res == -1)
2025 	return !nonoverlapping_component_refs_p (ref1, ref2);
2026       return !res;
2027     }
2028 
2029   /* Do access-path based disambiguation.  */
2030   if (ref1 && ref2
2031       && (handled_component_p (ref1) || handled_component_p (ref2)))
2032     return aliasing_component_refs_p (ref1,
2033 				      ref1_alias_set, base1_alias_set,
2034 				      offset1, max_size1,
2035 				      ref2,
2036 				      ref2_alias_set, base2_alias_set,
2037 				      offset2, max_size2);
2038 
2039   return true;
2040 }
2041 
2042 /* Return true if two indirect references based on *PTR1
2043    and *PTR2 constrained to [OFFSET1, OFFSET1 + MAX_SIZE1) and
2044    [OFFSET2, OFFSET2 + MAX_SIZE2) may alias.  *PTR1 and *PTR2 have
2045    the alias sets BASE1_ALIAS_SET and BASE2_ALIAS_SET which can be -1
2046    in which case they are computed on-demand.  REF1 and REF2
2047    if non-NULL are the complete memory reference trees. */
2048 
2049 static bool
indirect_refs_may_alias_p(tree ref1 ATTRIBUTE_UNUSED,tree base1,poly_int64 offset1,poly_int64 max_size1,poly_int64 size1,alias_set_type ref1_alias_set,alias_set_type base1_alias_set,tree ref2 ATTRIBUTE_UNUSED,tree base2,poly_int64 offset2,poly_int64 max_size2,poly_int64 size2,alias_set_type ref2_alias_set,alias_set_type base2_alias_set,bool tbaa_p)2050 indirect_refs_may_alias_p (tree ref1 ATTRIBUTE_UNUSED, tree base1,
2051 			   poly_int64 offset1, poly_int64 max_size1,
2052 			   poly_int64 size1,
2053 			   alias_set_type ref1_alias_set,
2054 			   alias_set_type base1_alias_set,
2055 			   tree ref2 ATTRIBUTE_UNUSED, tree base2,
2056 			   poly_int64 offset2, poly_int64 max_size2,
2057 			   poly_int64 size2,
2058 			   alias_set_type ref2_alias_set,
2059 			   alias_set_type base2_alias_set, bool tbaa_p)
2060 {
2061   tree ptr1;
2062   tree ptr2;
2063   tree ptrtype1, ptrtype2;
2064 
2065   gcc_checking_assert ((TREE_CODE (base1) == MEM_REF
2066 			|| TREE_CODE (base1) == TARGET_MEM_REF)
2067 		       && (TREE_CODE (base2) == MEM_REF
2068 			   || TREE_CODE (base2) == TARGET_MEM_REF));
2069 
2070   ptr1 = TREE_OPERAND (base1, 0);
2071   ptr2 = TREE_OPERAND (base2, 0);
2072 
2073   /* If both bases are based on pointers they cannot alias if they may not
2074      point to the same memory object or if they point to the same object
2075      and the accesses do not overlap.  */
2076   if ((!cfun || gimple_in_ssa_p (cfun))
2077       && operand_equal_p (ptr1, ptr2, 0)
2078       && (((TREE_CODE (base1) != TARGET_MEM_REF
2079 	    || (!TMR_INDEX (base1) && !TMR_INDEX2 (base1)))
2080 	   && (TREE_CODE (base2) != TARGET_MEM_REF
2081 	       || (!TMR_INDEX (base2) && !TMR_INDEX2 (base2))))
2082 	  || (TREE_CODE (base1) == TARGET_MEM_REF
2083 	      && TREE_CODE (base2) == TARGET_MEM_REF
2084 	      && (TMR_STEP (base1) == TMR_STEP (base2)
2085 		  || (TMR_STEP (base1) && TMR_STEP (base2)
2086 		      && operand_equal_p (TMR_STEP (base1),
2087 					  TMR_STEP (base2), 0)))
2088 	      && (TMR_INDEX (base1) == TMR_INDEX (base2)
2089 		  || (TMR_INDEX (base1) && TMR_INDEX (base2)
2090 		      && operand_equal_p (TMR_INDEX (base1),
2091 					  TMR_INDEX (base2), 0)))
2092 	      && (TMR_INDEX2 (base1) == TMR_INDEX2 (base2)
2093 		  || (TMR_INDEX2 (base1) && TMR_INDEX2 (base2)
2094 		      && operand_equal_p (TMR_INDEX2 (base1),
2095 					  TMR_INDEX2 (base2), 0))))))
2096     {
2097       poly_offset_int moff1 = mem_ref_offset (base1) << LOG2_BITS_PER_UNIT;
2098       poly_offset_int moff2 = mem_ref_offset (base2) << LOG2_BITS_PER_UNIT;
2099       if (!ranges_maybe_overlap_p (offset1 + moff1, max_size1,
2100 				   offset2 + moff2, max_size2))
2101 	return false;
2102       /* If there is must alias, there is no use disambiguating further.  */
2103       if (known_eq (size1, max_size1) && known_eq (size2, max_size2))
2104 	return true;
2105       if (ref1 && ref2)
2106 	{
2107 	  int res = nonoverlapping_refs_since_match_p (NULL, ref1, NULL, ref2,
2108 						       false);
2109 	  if (res != -1)
2110 	    return !res;
2111 	}
2112     }
2113   if (!ptr_derefs_may_alias_p (ptr1, ptr2))
2114     return false;
2115 
2116   /* Disambiguations that rely on strict aliasing rules follow.  */
2117   if (!flag_strict_aliasing || !tbaa_p)
2118     return true;
2119 
2120   ptrtype1 = TREE_TYPE (TREE_OPERAND (base1, 1));
2121   ptrtype2 = TREE_TYPE (TREE_OPERAND (base2, 1));
2122 
2123   /* If the alias set for a pointer access is zero all bets are off.  */
2124   if (base1_alias_set == 0
2125       || base2_alias_set == 0)
2126     return true;
2127 
2128   /* Do type-based disambiguation.  */
2129   if (base1_alias_set != base2_alias_set
2130       && !alias_sets_conflict_p (base1_alias_set, base2_alias_set))
2131     return false;
2132 
2133   /* If either reference is view-converted, give up now.  */
2134   if (same_type_for_tbaa (TREE_TYPE (base1), TREE_TYPE (ptrtype1)) != 1
2135       || same_type_for_tbaa (TREE_TYPE (base2), TREE_TYPE (ptrtype2)) != 1)
2136     return true;
2137 
2138   /* If both references are through the same type, they do not alias
2139      if the accesses do not overlap.  This does extra disambiguation
2140      for mixed/pointer accesses but requires strict aliasing.  */
2141   if ((TREE_CODE (base1) != TARGET_MEM_REF
2142        || (!TMR_INDEX (base1) && !TMR_INDEX2 (base1)))
2143       && (TREE_CODE (base2) != TARGET_MEM_REF
2144 	  || (!TMR_INDEX (base2) && !TMR_INDEX2 (base2)))
2145       && same_type_for_tbaa (TREE_TYPE (ptrtype1),
2146 			     TREE_TYPE (ptrtype2)) == 1)
2147     {
2148       /* But avoid treating arrays as "objects", instead assume they
2149          can overlap by an exact multiple of their element size.
2150          See gcc.dg/torture/alias-2.c.  */
2151       bool partial_overlap = TREE_CODE (TREE_TYPE (ptrtype1)) == ARRAY_TYPE;
2152 
2153       if (!partial_overlap
2154 	  && !ranges_maybe_overlap_p (offset1, max_size1, offset2, max_size2))
2155 	return false;
2156       if (!ref1 || !ref2
2157 	  || (!partial_overlap
2158 	      && known_eq (size1, max_size1) && known_eq (size2, max_size2)))
2159 	return true;
2160       int res = nonoverlapping_refs_since_match_p (base1, ref1, base2, ref2,
2161 						   partial_overlap);
2162       if (res == -1)
2163 	return !nonoverlapping_component_refs_p (ref1, ref2);
2164       return !res;
2165     }
2166 
2167   /* Do access-path based disambiguation.  */
2168   if (ref1 && ref2
2169       && (handled_component_p (ref1) || handled_component_p (ref2)))
2170     return aliasing_component_refs_p (ref1,
2171 				      ref1_alias_set, base1_alias_set,
2172 				      offset1, max_size1,
2173 				      ref2,
2174 				      ref2_alias_set, base2_alias_set,
2175 				      offset2, max_size2);
2176 
2177   return true;
2178 }
2179 
2180 /* Return true, if the two memory references REF1 and REF2 may alias.  */
2181 
2182 static bool
refs_may_alias_p_2(ao_ref * ref1,ao_ref * ref2,bool tbaa_p)2183 refs_may_alias_p_2 (ao_ref *ref1, ao_ref *ref2, bool tbaa_p)
2184 {
2185   tree base1, base2;
2186   poly_int64 offset1 = 0, offset2 = 0;
2187   poly_int64 max_size1 = -1, max_size2 = -1;
2188   bool var1_p, var2_p, ind1_p, ind2_p;
2189 
2190   gcc_checking_assert ((!ref1->ref
2191 			|| TREE_CODE (ref1->ref) == SSA_NAME
2192 			|| DECL_P (ref1->ref)
2193 			|| TREE_CODE (ref1->ref) == STRING_CST
2194 			|| handled_component_p (ref1->ref)
2195 			|| TREE_CODE (ref1->ref) == MEM_REF
2196 			|| TREE_CODE (ref1->ref) == TARGET_MEM_REF)
2197 		       && (!ref2->ref
2198 			   || TREE_CODE (ref2->ref) == SSA_NAME
2199 			   || DECL_P (ref2->ref)
2200 			   || TREE_CODE (ref2->ref) == STRING_CST
2201 			   || handled_component_p (ref2->ref)
2202 			   || TREE_CODE (ref2->ref) == MEM_REF
2203 			   || TREE_CODE (ref2->ref) == TARGET_MEM_REF));
2204 
2205   /* Decompose the references into their base objects and the access.  */
2206   base1 = ao_ref_base (ref1);
2207   offset1 = ref1->offset;
2208   max_size1 = ref1->max_size;
2209   base2 = ao_ref_base (ref2);
2210   offset2 = ref2->offset;
2211   max_size2 = ref2->max_size;
2212 
2213   /* We can end up with registers or constants as bases for example from
2214      *D.1663_44 = VIEW_CONVERT_EXPR<struct DB_LSN>(__tmp$B0F64_59);
2215      which is seen as a struct copy.  */
2216   if (TREE_CODE (base1) == SSA_NAME
2217       || TREE_CODE (base1) == CONST_DECL
2218       || TREE_CODE (base1) == CONSTRUCTOR
2219       || TREE_CODE (base1) == ADDR_EXPR
2220       || CONSTANT_CLASS_P (base1)
2221       || TREE_CODE (base2) == SSA_NAME
2222       || TREE_CODE (base2) == CONST_DECL
2223       || TREE_CODE (base2) == CONSTRUCTOR
2224       || TREE_CODE (base2) == ADDR_EXPR
2225       || CONSTANT_CLASS_P (base2))
2226     return false;
2227 
2228   /* We can end up referring to code via function and label decls.
2229      As we likely do not properly track code aliases conservatively
2230      bail out.  */
2231   if (TREE_CODE (base1) == FUNCTION_DECL
2232       || TREE_CODE (base1) == LABEL_DECL
2233       || TREE_CODE (base2) == FUNCTION_DECL
2234       || TREE_CODE (base2) == LABEL_DECL)
2235     return true;
2236 
2237   /* Two volatile accesses always conflict.  */
2238   if (ref1->volatile_p
2239       && ref2->volatile_p)
2240     return true;
2241 
2242   /* Defer to simple offset based disambiguation if we have
2243      references based on two decls.  Do this before defering to
2244      TBAA to handle must-alias cases in conformance with the
2245      GCC extension of allowing type-punning through unions.  */
2246   var1_p = DECL_P (base1);
2247   var2_p = DECL_P (base2);
2248   if (var1_p && var2_p)
2249     return decl_refs_may_alias_p (ref1->ref, base1, offset1, max_size1,
2250 				  ref1->size,
2251 				  ref2->ref, base2, offset2, max_size2,
2252 				  ref2->size);
2253 
2254   /* Handle restrict based accesses.
2255      ???  ao_ref_base strips inner MEM_REF [&decl], recover from that
2256      here.  */
2257   tree rbase1 = base1;
2258   tree rbase2 = base2;
2259   if (var1_p)
2260     {
2261       rbase1 = ref1->ref;
2262       if (rbase1)
2263 	while (handled_component_p (rbase1))
2264 	  rbase1 = TREE_OPERAND (rbase1, 0);
2265     }
2266   if (var2_p)
2267     {
2268       rbase2 = ref2->ref;
2269       if (rbase2)
2270 	while (handled_component_p (rbase2))
2271 	  rbase2 = TREE_OPERAND (rbase2, 0);
2272     }
2273   if (rbase1 && rbase2
2274       && (TREE_CODE (base1) == MEM_REF || TREE_CODE (base1) == TARGET_MEM_REF)
2275       && (TREE_CODE (base2) == MEM_REF || TREE_CODE (base2) == TARGET_MEM_REF)
2276       /* If the accesses are in the same restrict clique... */
2277       && MR_DEPENDENCE_CLIQUE (base1) == MR_DEPENDENCE_CLIQUE (base2)
2278       /* But based on different pointers they do not alias.  */
2279       && MR_DEPENDENCE_BASE (base1) != MR_DEPENDENCE_BASE (base2))
2280     return false;
2281 
2282   ind1_p = (TREE_CODE (base1) == MEM_REF
2283 	    || TREE_CODE (base1) == TARGET_MEM_REF);
2284   ind2_p = (TREE_CODE (base2) == MEM_REF
2285 	    || TREE_CODE (base2) == TARGET_MEM_REF);
2286 
2287   /* Canonicalize the pointer-vs-decl case.  */
2288   if (ind1_p && var2_p)
2289     {
2290       std::swap (offset1, offset2);
2291       std::swap (max_size1, max_size2);
2292       std::swap (base1, base2);
2293       std::swap (ref1, ref2);
2294       var1_p = true;
2295       ind1_p = false;
2296       var2_p = false;
2297       ind2_p = true;
2298     }
2299 
2300   /* First defer to TBAA if possible.  */
2301   if (tbaa_p
2302       && flag_strict_aliasing
2303       && !alias_sets_conflict_p (ao_ref_alias_set (ref1),
2304 				 ao_ref_alias_set (ref2)))
2305     return false;
2306 
2307   /* If the reference is based on a pointer that points to memory
2308      that may not be written to then the other reference cannot possibly
2309      clobber it.  */
2310   if ((TREE_CODE (TREE_OPERAND (base2, 0)) == SSA_NAME
2311        && SSA_NAME_POINTS_TO_READONLY_MEMORY (TREE_OPERAND (base2, 0)))
2312       || (ind1_p
2313 	  && TREE_CODE (TREE_OPERAND (base1, 0)) == SSA_NAME
2314 	  && SSA_NAME_POINTS_TO_READONLY_MEMORY (TREE_OPERAND (base1, 0))))
2315     return false;
2316 
2317   /* Dispatch to the pointer-vs-decl or pointer-vs-pointer disambiguators.  */
2318   if (var1_p && ind2_p)
2319     return indirect_ref_may_alias_decl_p (ref2->ref, base2,
2320 					  offset2, max_size2, ref2->size,
2321 					  ao_ref_alias_set (ref2),
2322 					  ao_ref_base_alias_set (ref2),
2323 					  ref1->ref, base1,
2324 					  offset1, max_size1, ref1->size,
2325 					  ao_ref_alias_set (ref1),
2326 					  ao_ref_base_alias_set (ref1),
2327 					  tbaa_p);
2328   else if (ind1_p && ind2_p)
2329     return indirect_refs_may_alias_p (ref1->ref, base1,
2330 				      offset1, max_size1, ref1->size,
2331 				      ao_ref_alias_set (ref1),
2332 				      ao_ref_base_alias_set (ref1),
2333 				      ref2->ref, base2,
2334 				      offset2, max_size2, ref2->size,
2335 				      ao_ref_alias_set (ref2),
2336 				      ao_ref_base_alias_set (ref2),
2337 				      tbaa_p);
2338 
2339   gcc_unreachable ();
2340 }
2341 
2342 /* Return true, if the two memory references REF1 and REF2 may alias
2343    and update statistics.  */
2344 
2345 bool
refs_may_alias_p_1(ao_ref * ref1,ao_ref * ref2,bool tbaa_p)2346 refs_may_alias_p_1 (ao_ref *ref1, ao_ref *ref2, bool tbaa_p)
2347 {
2348   bool res = refs_may_alias_p_2 (ref1, ref2, tbaa_p);
2349   if (res)
2350     ++alias_stats.refs_may_alias_p_may_alias;
2351   else
2352     ++alias_stats.refs_may_alias_p_no_alias;
2353   return res;
2354 }
2355 
2356 static bool
refs_may_alias_p(tree ref1,ao_ref * ref2,bool tbaa_p)2357 refs_may_alias_p (tree ref1, ao_ref *ref2, bool tbaa_p)
2358 {
2359   ao_ref r1;
2360   ao_ref_init (&r1, ref1);
2361   return refs_may_alias_p_1 (&r1, ref2, tbaa_p);
2362 }
2363 
2364 bool
refs_may_alias_p(tree ref1,tree ref2,bool tbaa_p)2365 refs_may_alias_p (tree ref1, tree ref2, bool tbaa_p)
2366 {
2367   ao_ref r1, r2;
2368   ao_ref_init (&r1, ref1);
2369   ao_ref_init (&r2, ref2);
2370   return refs_may_alias_p_1 (&r1, &r2, tbaa_p);
2371 }
2372 
2373 /* Returns true if there is a anti-dependence for the STORE that
2374    executes after the LOAD.  */
2375 
2376 bool
refs_anti_dependent_p(tree load,tree store)2377 refs_anti_dependent_p (tree load, tree store)
2378 {
2379   ao_ref r1, r2;
2380   ao_ref_init (&r1, load);
2381   ao_ref_init (&r2, store);
2382   return refs_may_alias_p_1 (&r1, &r2, false);
2383 }
2384 
2385 /* Returns true if there is a output dependence for the stores
2386    STORE1 and STORE2.  */
2387 
2388 bool
refs_output_dependent_p(tree store1,tree store2)2389 refs_output_dependent_p (tree store1, tree store2)
2390 {
2391   ao_ref r1, r2;
2392   ao_ref_init (&r1, store1);
2393   ao_ref_init (&r2, store2);
2394   return refs_may_alias_p_1 (&r1, &r2, false);
2395 }
2396 
2397 /* If the call CALL may use the memory reference REF return true,
2398    otherwise return false.  */
2399 
2400 static bool
ref_maybe_used_by_call_p_1(gcall * call,ao_ref * ref,bool tbaa_p)2401 ref_maybe_used_by_call_p_1 (gcall *call, ao_ref *ref, bool tbaa_p)
2402 {
2403   tree base, callee;
2404   unsigned i;
2405   int flags = gimple_call_flags (call);
2406 
2407   /* Const functions without a static chain do not implicitly use memory.  */
2408   if (!gimple_call_chain (call)
2409       && (flags & (ECF_CONST|ECF_NOVOPS)))
2410     goto process_args;
2411 
2412   base = ao_ref_base (ref);
2413   if (!base)
2414     return true;
2415 
2416   /* A call that is not without side-effects might involve volatile
2417      accesses and thus conflicts with all other volatile accesses.  */
2418   if (ref->volatile_p)
2419     return true;
2420 
2421   /* If the reference is based on a decl that is not aliased the call
2422      cannot possibly use it.  */
2423   if (DECL_P (base)
2424       && !may_be_aliased (base)
2425       /* But local statics can be used through recursion.  */
2426       && !is_global_var (base))
2427     goto process_args;
2428 
2429   callee = gimple_call_fndecl (call);
2430 
2431   /* Handle those builtin functions explicitly that do not act as
2432      escape points.  See tree-ssa-structalias.c:find_func_aliases
2433      for the list of builtins we might need to handle here.  */
2434   if (callee != NULL_TREE
2435       && gimple_call_builtin_p (call, BUILT_IN_NORMAL))
2436     switch (DECL_FUNCTION_CODE (callee))
2437       {
2438 	/* All the following functions read memory pointed to by
2439 	   their second argument.  strcat/strncat additionally
2440 	   reads memory pointed to by the first argument.  */
2441 	case BUILT_IN_STRCAT:
2442 	case BUILT_IN_STRNCAT:
2443 	  {
2444 	    ao_ref dref;
2445 	    ao_ref_init_from_ptr_and_size (&dref,
2446 					   gimple_call_arg (call, 0),
2447 					   NULL_TREE);
2448 	    if (refs_may_alias_p_1 (&dref, ref, false))
2449 	      return true;
2450 	  }
2451 	  /* FALLTHRU */
2452 	case BUILT_IN_STRCPY:
2453 	case BUILT_IN_STRNCPY:
2454 	case BUILT_IN_MEMCPY:
2455 	case BUILT_IN_MEMMOVE:
2456 	case BUILT_IN_MEMPCPY:
2457 	case BUILT_IN_STPCPY:
2458 	case BUILT_IN_STPNCPY:
2459 	case BUILT_IN_TM_MEMCPY:
2460 	case BUILT_IN_TM_MEMMOVE:
2461 	  {
2462 	    ao_ref dref;
2463 	    tree size = NULL_TREE;
2464 	    if (gimple_call_num_args (call) == 3)
2465 	      size = gimple_call_arg (call, 2);
2466 	    ao_ref_init_from_ptr_and_size (&dref,
2467 					   gimple_call_arg (call, 1),
2468 					   size);
2469 	    return refs_may_alias_p_1 (&dref, ref, false);
2470 	  }
2471 	case BUILT_IN_STRCAT_CHK:
2472 	case BUILT_IN_STRNCAT_CHK:
2473 	  {
2474 	    ao_ref dref;
2475 	    ao_ref_init_from_ptr_and_size (&dref,
2476 					   gimple_call_arg (call, 0),
2477 					   NULL_TREE);
2478 	    if (refs_may_alias_p_1 (&dref, ref, false))
2479 	      return true;
2480 	  }
2481 	  /* FALLTHRU */
2482 	case BUILT_IN_STRCPY_CHK:
2483 	case BUILT_IN_STRNCPY_CHK:
2484 	case BUILT_IN_MEMCPY_CHK:
2485 	case BUILT_IN_MEMMOVE_CHK:
2486 	case BUILT_IN_MEMPCPY_CHK:
2487 	case BUILT_IN_STPCPY_CHK:
2488 	case BUILT_IN_STPNCPY_CHK:
2489 	  {
2490 	    ao_ref dref;
2491 	    tree size = NULL_TREE;
2492 	    if (gimple_call_num_args (call) == 4)
2493 	      size = gimple_call_arg (call, 2);
2494 	    ao_ref_init_from_ptr_and_size (&dref,
2495 					   gimple_call_arg (call, 1),
2496 					   size);
2497 	    return refs_may_alias_p_1 (&dref, ref, false);
2498 	  }
2499 	case BUILT_IN_BCOPY:
2500 	  {
2501 	    ao_ref dref;
2502 	    tree size = gimple_call_arg (call, 2);
2503 	    ao_ref_init_from_ptr_and_size (&dref,
2504 					   gimple_call_arg (call, 0),
2505 					   size);
2506 	    return refs_may_alias_p_1 (&dref, ref, false);
2507 	  }
2508 
2509 	/* The following functions read memory pointed to by their
2510 	   first argument.  */
2511 	CASE_BUILT_IN_TM_LOAD (1):
2512 	CASE_BUILT_IN_TM_LOAD (2):
2513 	CASE_BUILT_IN_TM_LOAD (4):
2514 	CASE_BUILT_IN_TM_LOAD (8):
2515 	CASE_BUILT_IN_TM_LOAD (FLOAT):
2516 	CASE_BUILT_IN_TM_LOAD (DOUBLE):
2517 	CASE_BUILT_IN_TM_LOAD (LDOUBLE):
2518 	CASE_BUILT_IN_TM_LOAD (M64):
2519 	CASE_BUILT_IN_TM_LOAD (M128):
2520 	CASE_BUILT_IN_TM_LOAD (M256):
2521 	case BUILT_IN_TM_LOG:
2522 	case BUILT_IN_TM_LOG_1:
2523 	case BUILT_IN_TM_LOG_2:
2524 	case BUILT_IN_TM_LOG_4:
2525 	case BUILT_IN_TM_LOG_8:
2526 	case BUILT_IN_TM_LOG_FLOAT:
2527 	case BUILT_IN_TM_LOG_DOUBLE:
2528 	case BUILT_IN_TM_LOG_LDOUBLE:
2529 	case BUILT_IN_TM_LOG_M64:
2530 	case BUILT_IN_TM_LOG_M128:
2531 	case BUILT_IN_TM_LOG_M256:
2532 	  return ptr_deref_may_alias_ref_p_1 (gimple_call_arg (call, 0), ref);
2533 
2534 	/* These read memory pointed to by the first argument.  */
2535 	case BUILT_IN_STRDUP:
2536 	case BUILT_IN_STRNDUP:
2537 	case BUILT_IN_REALLOC:
2538 	  {
2539 	    ao_ref dref;
2540 	    tree size = NULL_TREE;
2541 	    if (gimple_call_num_args (call) == 2)
2542 	      size = gimple_call_arg (call, 1);
2543 	    ao_ref_init_from_ptr_and_size (&dref,
2544 					   gimple_call_arg (call, 0),
2545 					   size);
2546 	    return refs_may_alias_p_1 (&dref, ref, false);
2547 	  }
2548 	/* These read memory pointed to by the first argument.  */
2549 	case BUILT_IN_INDEX:
2550 	case BUILT_IN_STRCHR:
2551 	case BUILT_IN_STRRCHR:
2552 	  {
2553 	    ao_ref dref;
2554 	    ao_ref_init_from_ptr_and_size (&dref,
2555 					   gimple_call_arg (call, 0),
2556 					   NULL_TREE);
2557 	    return refs_may_alias_p_1 (&dref, ref, false);
2558 	  }
2559 	/* These read memory pointed to by the first argument with size
2560 	   in the third argument.  */
2561 	case BUILT_IN_MEMCHR:
2562 	  {
2563 	    ao_ref dref;
2564 	    ao_ref_init_from_ptr_and_size (&dref,
2565 					   gimple_call_arg (call, 0),
2566 					   gimple_call_arg (call, 2));
2567 	    return refs_may_alias_p_1 (&dref, ref, false);
2568 	  }
2569 	/* These read memory pointed to by the first and second arguments.  */
2570 	case BUILT_IN_STRSTR:
2571 	case BUILT_IN_STRPBRK:
2572 	  {
2573 	    ao_ref dref;
2574 	    ao_ref_init_from_ptr_and_size (&dref,
2575 					   gimple_call_arg (call, 0),
2576 					   NULL_TREE);
2577 	    if (refs_may_alias_p_1 (&dref, ref, false))
2578 	      return true;
2579 	    ao_ref_init_from_ptr_and_size (&dref,
2580 					   gimple_call_arg (call, 1),
2581 					   NULL_TREE);
2582 	    return refs_may_alias_p_1 (&dref, ref, false);
2583 	  }
2584 
2585 	/* The following builtins do not read from memory.  */
2586 	case BUILT_IN_FREE:
2587 	case BUILT_IN_MALLOC:
2588 	case BUILT_IN_POSIX_MEMALIGN:
2589 	case BUILT_IN_ALIGNED_ALLOC:
2590 	case BUILT_IN_CALLOC:
2591 	CASE_BUILT_IN_ALLOCA:
2592 	case BUILT_IN_STACK_SAVE:
2593 	case BUILT_IN_STACK_RESTORE:
2594 	case BUILT_IN_MEMSET:
2595 	case BUILT_IN_TM_MEMSET:
2596 	case BUILT_IN_MEMSET_CHK:
2597 	case BUILT_IN_FREXP:
2598 	case BUILT_IN_FREXPF:
2599 	case BUILT_IN_FREXPL:
2600 	case BUILT_IN_GAMMA_R:
2601 	case BUILT_IN_GAMMAF_R:
2602 	case BUILT_IN_GAMMAL_R:
2603 	case BUILT_IN_LGAMMA_R:
2604 	case BUILT_IN_LGAMMAF_R:
2605 	case BUILT_IN_LGAMMAL_R:
2606 	case BUILT_IN_MODF:
2607 	case BUILT_IN_MODFF:
2608 	case BUILT_IN_MODFL:
2609 	case BUILT_IN_REMQUO:
2610 	case BUILT_IN_REMQUOF:
2611 	case BUILT_IN_REMQUOL:
2612 	case BUILT_IN_SINCOS:
2613 	case BUILT_IN_SINCOSF:
2614 	case BUILT_IN_SINCOSL:
2615 	case BUILT_IN_ASSUME_ALIGNED:
2616 	case BUILT_IN_VA_END:
2617 	  return false;
2618 	/* __sync_* builtins and some OpenMP builtins act as threading
2619 	   barriers.  */
2620 #undef DEF_SYNC_BUILTIN
2621 #define DEF_SYNC_BUILTIN(ENUM, NAME, TYPE, ATTRS) case ENUM:
2622 #include "sync-builtins.def"
2623 #undef DEF_SYNC_BUILTIN
2624 	case BUILT_IN_GOMP_ATOMIC_START:
2625 	case BUILT_IN_GOMP_ATOMIC_END:
2626 	case BUILT_IN_GOMP_BARRIER:
2627 	case BUILT_IN_GOMP_BARRIER_CANCEL:
2628 	case BUILT_IN_GOMP_TASKWAIT:
2629 	case BUILT_IN_GOMP_TASKGROUP_END:
2630 	case BUILT_IN_GOMP_CRITICAL_START:
2631 	case BUILT_IN_GOMP_CRITICAL_END:
2632 	case BUILT_IN_GOMP_CRITICAL_NAME_START:
2633 	case BUILT_IN_GOMP_CRITICAL_NAME_END:
2634 	case BUILT_IN_GOMP_LOOP_END:
2635 	case BUILT_IN_GOMP_LOOP_END_CANCEL:
2636 	case BUILT_IN_GOMP_ORDERED_START:
2637 	case BUILT_IN_GOMP_ORDERED_END:
2638 	case BUILT_IN_GOMP_SECTIONS_END:
2639 	case BUILT_IN_GOMP_SECTIONS_END_CANCEL:
2640 	case BUILT_IN_GOMP_SINGLE_COPY_START:
2641 	case BUILT_IN_GOMP_SINGLE_COPY_END:
2642 	  return true;
2643 
2644 	default:
2645 	  /* Fallthru to general call handling.  */;
2646       }
2647 
2648   /* Check if base is a global static variable that is not read
2649      by the function.  */
2650   if (callee != NULL_TREE && VAR_P (base) && TREE_STATIC (base))
2651     {
2652       struct cgraph_node *node = cgraph_node::get (callee);
2653       bitmap read;
2654       int id;
2655 
2656       /* FIXME: Callee can be an OMP builtin that does not have a call graph
2657 	 node yet.  We should enforce that there are nodes for all decls in the
2658 	 IL and remove this check instead.  */
2659       if (node
2660 	  && (id = ipa_reference_var_uid (base)) != -1
2661 	  && (read = ipa_reference_get_read_global (node))
2662 	  && !bitmap_bit_p (read, id))
2663 	goto process_args;
2664     }
2665 
2666   /* Check if the base variable is call-used.  */
2667   if (DECL_P (base))
2668     {
2669       if (pt_solution_includes (gimple_call_use_set (call), base))
2670 	return true;
2671     }
2672   else if ((TREE_CODE (base) == MEM_REF
2673 	    || TREE_CODE (base) == TARGET_MEM_REF)
2674 	   && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
2675     {
2676       struct ptr_info_def *pi = SSA_NAME_PTR_INFO (TREE_OPERAND (base, 0));
2677       if (!pi)
2678 	return true;
2679 
2680       if (pt_solutions_intersect (gimple_call_use_set (call), &pi->pt))
2681 	return true;
2682     }
2683   else
2684     return true;
2685 
2686   /* Inspect call arguments for passed-by-value aliases.  */
2687 process_args:
2688   for (i = 0; i < gimple_call_num_args (call); ++i)
2689     {
2690       tree op = gimple_call_arg (call, i);
2691       int flags = gimple_call_arg_flags (call, i);
2692 
2693       if (flags & EAF_UNUSED)
2694 	continue;
2695 
2696       if (TREE_CODE (op) == WITH_SIZE_EXPR)
2697 	op = TREE_OPERAND (op, 0);
2698 
2699       if (TREE_CODE (op) != SSA_NAME
2700 	  && !is_gimple_min_invariant (op))
2701 	{
2702 	  ao_ref r;
2703 	  ao_ref_init (&r, op);
2704 	  if (refs_may_alias_p_1 (&r, ref, tbaa_p))
2705 	    return true;
2706 	}
2707     }
2708 
2709   return false;
2710 }
2711 
2712 static bool
ref_maybe_used_by_call_p(gcall * call,ao_ref * ref,bool tbaa_p)2713 ref_maybe_used_by_call_p (gcall *call, ao_ref *ref, bool tbaa_p)
2714 {
2715   bool res;
2716   res = ref_maybe_used_by_call_p_1 (call, ref, tbaa_p);
2717   if (res)
2718     ++alias_stats.ref_maybe_used_by_call_p_may_alias;
2719   else
2720     ++alias_stats.ref_maybe_used_by_call_p_no_alias;
2721   return res;
2722 }
2723 
2724 
2725 /* If the statement STMT may use the memory reference REF return
2726    true, otherwise return false.  */
2727 
2728 bool
ref_maybe_used_by_stmt_p(gimple * stmt,ao_ref * ref,bool tbaa_p)2729 ref_maybe_used_by_stmt_p (gimple *stmt, ao_ref *ref, bool tbaa_p)
2730 {
2731   if (is_gimple_assign (stmt))
2732     {
2733       tree rhs;
2734 
2735       /* All memory assign statements are single.  */
2736       if (!gimple_assign_single_p (stmt))
2737 	return false;
2738 
2739       rhs = gimple_assign_rhs1 (stmt);
2740       if (is_gimple_reg (rhs)
2741 	  || is_gimple_min_invariant (rhs)
2742 	  || gimple_assign_rhs_code (stmt) == CONSTRUCTOR)
2743 	return false;
2744 
2745       return refs_may_alias_p (rhs, ref, tbaa_p);
2746     }
2747   else if (is_gimple_call (stmt))
2748     return ref_maybe_used_by_call_p (as_a <gcall *> (stmt), ref, tbaa_p);
2749   else if (greturn *return_stmt = dyn_cast <greturn *> (stmt))
2750     {
2751       tree retval = gimple_return_retval (return_stmt);
2752       if (retval
2753 	  && TREE_CODE (retval) != SSA_NAME
2754 	  && !is_gimple_min_invariant (retval)
2755 	  && refs_may_alias_p (retval, ref, tbaa_p))
2756 	return true;
2757       /* If ref escapes the function then the return acts as a use.  */
2758       tree base = ao_ref_base (ref);
2759       if (!base)
2760 	;
2761       else if (DECL_P (base))
2762 	return is_global_var (base);
2763       else if (TREE_CODE (base) == MEM_REF
2764 	       || TREE_CODE (base) == TARGET_MEM_REF)
2765 	return ptr_deref_may_alias_global_p (TREE_OPERAND (base, 0));
2766       return false;
2767     }
2768 
2769   return true;
2770 }
2771 
2772 bool
ref_maybe_used_by_stmt_p(gimple * stmt,tree ref,bool tbaa_p)2773 ref_maybe_used_by_stmt_p (gimple *stmt, tree ref, bool tbaa_p)
2774 {
2775   ao_ref r;
2776   ao_ref_init (&r, ref);
2777   return ref_maybe_used_by_stmt_p (stmt, &r, tbaa_p);
2778 }
2779 
2780 /* If the call in statement CALL may clobber the memory reference REF
2781    return true, otherwise return false.  */
2782 
2783 bool
call_may_clobber_ref_p_1(gcall * call,ao_ref * ref)2784 call_may_clobber_ref_p_1 (gcall *call, ao_ref *ref)
2785 {
2786   tree base;
2787   tree callee;
2788 
2789   /* If the call is pure or const it cannot clobber anything.  */
2790   if (gimple_call_flags (call)
2791       & (ECF_PURE|ECF_CONST|ECF_LOOPING_CONST_OR_PURE|ECF_NOVOPS))
2792     return false;
2793   if (gimple_call_internal_p (call))
2794     switch (gimple_call_internal_fn (call))
2795       {
2796 	/* Treat these internal calls like ECF_PURE for aliasing,
2797 	   they don't write to any memory the program should care about.
2798 	   They have important other side-effects, and read memory,
2799 	   so can't be ECF_NOVOPS.  */
2800       case IFN_UBSAN_NULL:
2801       case IFN_UBSAN_BOUNDS:
2802       case IFN_UBSAN_VPTR:
2803       case IFN_UBSAN_OBJECT_SIZE:
2804       case IFN_UBSAN_PTR:
2805       case IFN_ASAN_CHECK:
2806 	return false;
2807       default:
2808 	break;
2809       }
2810 
2811   base = ao_ref_base (ref);
2812   if (!base)
2813     return true;
2814 
2815   if (TREE_CODE (base) == SSA_NAME
2816       || CONSTANT_CLASS_P (base))
2817     return false;
2818 
2819   /* A call that is not without side-effects might involve volatile
2820      accesses and thus conflicts with all other volatile accesses.  */
2821   if (ref->volatile_p)
2822     return true;
2823 
2824   /* If the reference is based on a decl that is not aliased the call
2825      cannot possibly clobber it.  */
2826   if (DECL_P (base)
2827       && !may_be_aliased (base)
2828       /* But local non-readonly statics can be modified through recursion
2829          or the call may implement a threading barrier which we must
2830 	 treat as may-def.  */
2831       && (TREE_READONLY (base)
2832 	  || !is_global_var (base)))
2833     return false;
2834 
2835   /* If the reference is based on a pointer that points to memory
2836      that may not be written to then the call cannot possibly clobber it.  */
2837   if ((TREE_CODE (base) == MEM_REF
2838        || TREE_CODE (base) == TARGET_MEM_REF)
2839       && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME
2840       && SSA_NAME_POINTS_TO_READONLY_MEMORY (TREE_OPERAND (base, 0)))
2841     return false;
2842 
2843   callee = gimple_call_fndecl (call);
2844 
2845   /* Handle those builtin functions explicitly that do not act as
2846      escape points.  See tree-ssa-structalias.c:find_func_aliases
2847      for the list of builtins we might need to handle here.  */
2848   if (callee != NULL_TREE
2849       && gimple_call_builtin_p (call, BUILT_IN_NORMAL))
2850     switch (DECL_FUNCTION_CODE (callee))
2851       {
2852 	/* All the following functions clobber memory pointed to by
2853 	   their first argument.  */
2854 	case BUILT_IN_STRCPY:
2855 	case BUILT_IN_STRNCPY:
2856 	case BUILT_IN_MEMCPY:
2857 	case BUILT_IN_MEMMOVE:
2858 	case BUILT_IN_MEMPCPY:
2859 	case BUILT_IN_STPCPY:
2860 	case BUILT_IN_STPNCPY:
2861 	case BUILT_IN_STRCAT:
2862 	case BUILT_IN_STRNCAT:
2863 	case BUILT_IN_MEMSET:
2864 	case BUILT_IN_TM_MEMSET:
2865 	CASE_BUILT_IN_TM_STORE (1):
2866 	CASE_BUILT_IN_TM_STORE (2):
2867 	CASE_BUILT_IN_TM_STORE (4):
2868 	CASE_BUILT_IN_TM_STORE (8):
2869 	CASE_BUILT_IN_TM_STORE (FLOAT):
2870 	CASE_BUILT_IN_TM_STORE (DOUBLE):
2871 	CASE_BUILT_IN_TM_STORE (LDOUBLE):
2872 	CASE_BUILT_IN_TM_STORE (M64):
2873 	CASE_BUILT_IN_TM_STORE (M128):
2874 	CASE_BUILT_IN_TM_STORE (M256):
2875 	case BUILT_IN_TM_MEMCPY:
2876 	case BUILT_IN_TM_MEMMOVE:
2877 	  {
2878 	    ao_ref dref;
2879 	    tree size = NULL_TREE;
2880 	    /* Don't pass in size for strncat, as the maximum size
2881 	       is strlen (dest) + n + 1 instead of n, resp.
2882 	       n + 1 at dest + strlen (dest), but strlen (dest) isn't
2883 	       known.  */
2884 	    if (gimple_call_num_args (call) == 3
2885 		&& DECL_FUNCTION_CODE (callee) != BUILT_IN_STRNCAT)
2886 	      size = gimple_call_arg (call, 2);
2887 	    ao_ref_init_from_ptr_and_size (&dref,
2888 					   gimple_call_arg (call, 0),
2889 					   size);
2890 	    return refs_may_alias_p_1 (&dref, ref, false);
2891 	  }
2892 	case BUILT_IN_STRCPY_CHK:
2893 	case BUILT_IN_STRNCPY_CHK:
2894 	case BUILT_IN_MEMCPY_CHK:
2895 	case BUILT_IN_MEMMOVE_CHK:
2896 	case BUILT_IN_MEMPCPY_CHK:
2897 	case BUILT_IN_STPCPY_CHK:
2898 	case BUILT_IN_STPNCPY_CHK:
2899 	case BUILT_IN_STRCAT_CHK:
2900 	case BUILT_IN_STRNCAT_CHK:
2901 	case BUILT_IN_MEMSET_CHK:
2902 	  {
2903 	    ao_ref dref;
2904 	    tree size = NULL_TREE;
2905 	    /* Don't pass in size for __strncat_chk, as the maximum size
2906 	       is strlen (dest) + n + 1 instead of n, resp.
2907 	       n + 1 at dest + strlen (dest), but strlen (dest) isn't
2908 	       known.  */
2909 	    if (gimple_call_num_args (call) == 4
2910 		&& DECL_FUNCTION_CODE (callee) != BUILT_IN_STRNCAT_CHK)
2911 	      size = gimple_call_arg (call, 2);
2912 	    ao_ref_init_from_ptr_and_size (&dref,
2913 					   gimple_call_arg (call, 0),
2914 					   size);
2915 	    return refs_may_alias_p_1 (&dref, ref, false);
2916 	  }
2917 	case BUILT_IN_BCOPY:
2918 	  {
2919 	    ao_ref dref;
2920 	    tree size = gimple_call_arg (call, 2);
2921 	    ao_ref_init_from_ptr_and_size (&dref,
2922 					   gimple_call_arg (call, 1),
2923 					   size);
2924 	    return refs_may_alias_p_1 (&dref, ref, false);
2925 	  }
2926 	/* Allocating memory does not have any side-effects apart from
2927 	   being the definition point for the pointer.  */
2928 	case BUILT_IN_MALLOC:
2929 	case BUILT_IN_ALIGNED_ALLOC:
2930 	case BUILT_IN_CALLOC:
2931 	case BUILT_IN_STRDUP:
2932 	case BUILT_IN_STRNDUP:
2933 	  /* Unix98 specifies that errno is set on allocation failure.  */
2934 	  if (flag_errno_math
2935 	      && targetm.ref_may_alias_errno (ref))
2936 	    return true;
2937 	  return false;
2938 	case BUILT_IN_STACK_SAVE:
2939 	CASE_BUILT_IN_ALLOCA:
2940 	case BUILT_IN_ASSUME_ALIGNED:
2941 	  return false;
2942 	/* But posix_memalign stores a pointer into the memory pointed to
2943 	   by its first argument.  */
2944 	case BUILT_IN_POSIX_MEMALIGN:
2945 	  {
2946 	    tree ptrptr = gimple_call_arg (call, 0);
2947 	    ao_ref dref;
2948 	    ao_ref_init_from_ptr_and_size (&dref, ptrptr,
2949 					   TYPE_SIZE_UNIT (ptr_type_node));
2950 	    return (refs_may_alias_p_1 (&dref, ref, false)
2951 		    || (flag_errno_math
2952 			&& targetm.ref_may_alias_errno (ref)));
2953 	  }
2954 	/* Freeing memory kills the pointed-to memory.  More importantly
2955 	   the call has to serve as a barrier for moving loads and stores
2956 	   across it.  */
2957 	case BUILT_IN_FREE:
2958 	case BUILT_IN_VA_END:
2959 	  {
2960 	    tree ptr = gimple_call_arg (call, 0);
2961 	    return ptr_deref_may_alias_ref_p_1 (ptr, ref);
2962 	  }
2963 	/* Realloc serves both as allocation point and deallocation point.  */
2964 	case BUILT_IN_REALLOC:
2965 	  {
2966 	    tree ptr = gimple_call_arg (call, 0);
2967 	    /* Unix98 specifies that errno is set on allocation failure.  */
2968 	    return ((flag_errno_math
2969 		     && targetm.ref_may_alias_errno (ref))
2970 		    || ptr_deref_may_alias_ref_p_1 (ptr, ref));
2971 	  }
2972 	case BUILT_IN_GAMMA_R:
2973 	case BUILT_IN_GAMMAF_R:
2974 	case BUILT_IN_GAMMAL_R:
2975 	case BUILT_IN_LGAMMA_R:
2976 	case BUILT_IN_LGAMMAF_R:
2977 	case BUILT_IN_LGAMMAL_R:
2978 	  {
2979 	    tree out = gimple_call_arg (call, 1);
2980 	    if (ptr_deref_may_alias_ref_p_1 (out, ref))
2981 	      return true;
2982 	    if (flag_errno_math)
2983 	      break;
2984 	    return false;
2985 	  }
2986 	case BUILT_IN_FREXP:
2987 	case BUILT_IN_FREXPF:
2988 	case BUILT_IN_FREXPL:
2989 	case BUILT_IN_MODF:
2990 	case BUILT_IN_MODFF:
2991 	case BUILT_IN_MODFL:
2992 	  {
2993 	    tree out = gimple_call_arg (call, 1);
2994 	    return ptr_deref_may_alias_ref_p_1 (out, ref);
2995 	  }
2996 	case BUILT_IN_REMQUO:
2997 	case BUILT_IN_REMQUOF:
2998 	case BUILT_IN_REMQUOL:
2999 	  {
3000 	    tree out = gimple_call_arg (call, 2);
3001 	    if (ptr_deref_may_alias_ref_p_1 (out, ref))
3002 	      return true;
3003 	    if (flag_errno_math)
3004 	      break;
3005 	    return false;
3006 	  }
3007 	case BUILT_IN_SINCOS:
3008 	case BUILT_IN_SINCOSF:
3009 	case BUILT_IN_SINCOSL:
3010 	  {
3011 	    tree sin = gimple_call_arg (call, 1);
3012 	    tree cos = gimple_call_arg (call, 2);
3013 	    return (ptr_deref_may_alias_ref_p_1 (sin, ref)
3014 		    || ptr_deref_may_alias_ref_p_1 (cos, ref));
3015 	  }
3016 	/* __sync_* builtins and some OpenMP builtins act as threading
3017 	   barriers.  */
3018 #undef DEF_SYNC_BUILTIN
3019 #define DEF_SYNC_BUILTIN(ENUM, NAME, TYPE, ATTRS) case ENUM:
3020 #include "sync-builtins.def"
3021 #undef DEF_SYNC_BUILTIN
3022 	case BUILT_IN_GOMP_ATOMIC_START:
3023 	case BUILT_IN_GOMP_ATOMIC_END:
3024 	case BUILT_IN_GOMP_BARRIER:
3025 	case BUILT_IN_GOMP_BARRIER_CANCEL:
3026 	case BUILT_IN_GOMP_TASKWAIT:
3027 	case BUILT_IN_GOMP_TASKGROUP_END:
3028 	case BUILT_IN_GOMP_CRITICAL_START:
3029 	case BUILT_IN_GOMP_CRITICAL_END:
3030 	case BUILT_IN_GOMP_CRITICAL_NAME_START:
3031 	case BUILT_IN_GOMP_CRITICAL_NAME_END:
3032 	case BUILT_IN_GOMP_LOOP_END:
3033 	case BUILT_IN_GOMP_LOOP_END_CANCEL:
3034 	case BUILT_IN_GOMP_ORDERED_START:
3035 	case BUILT_IN_GOMP_ORDERED_END:
3036 	case BUILT_IN_GOMP_SECTIONS_END:
3037 	case BUILT_IN_GOMP_SECTIONS_END_CANCEL:
3038 	case BUILT_IN_GOMP_SINGLE_COPY_START:
3039 	case BUILT_IN_GOMP_SINGLE_COPY_END:
3040 	  return true;
3041 	default:
3042 	  /* Fallthru to general call handling.  */;
3043       }
3044 
3045   /* Check if base is a global static variable that is not written
3046      by the function.  */
3047   if (callee != NULL_TREE && VAR_P (base) && TREE_STATIC (base))
3048     {
3049       struct cgraph_node *node = cgraph_node::get (callee);
3050       bitmap written;
3051       int id;
3052 
3053       if (node
3054 	  && (id = ipa_reference_var_uid (base)) != -1
3055 	  && (written = ipa_reference_get_written_global (node))
3056 	  && !bitmap_bit_p (written, id))
3057 	return false;
3058     }
3059 
3060   /* Check if the base variable is call-clobbered.  */
3061   if (DECL_P (base))
3062     return pt_solution_includes (gimple_call_clobber_set (call), base);
3063   else if ((TREE_CODE (base) == MEM_REF
3064 	    || TREE_CODE (base) == TARGET_MEM_REF)
3065 	   && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
3066     {
3067       struct ptr_info_def *pi = SSA_NAME_PTR_INFO (TREE_OPERAND (base, 0));
3068       if (!pi)
3069 	return true;
3070 
3071       return pt_solutions_intersect (gimple_call_clobber_set (call), &pi->pt);
3072     }
3073 
3074   return true;
3075 }
3076 
3077 /* If the call in statement CALL may clobber the memory reference REF
3078    return true, otherwise return false.  */
3079 
3080 bool
call_may_clobber_ref_p(gcall * call,tree ref)3081 call_may_clobber_ref_p (gcall *call, tree ref)
3082 {
3083   bool res;
3084   ao_ref r;
3085   ao_ref_init (&r, ref);
3086   res = call_may_clobber_ref_p_1 (call, &r);
3087   if (res)
3088     ++alias_stats.call_may_clobber_ref_p_may_alias;
3089   else
3090     ++alias_stats.call_may_clobber_ref_p_no_alias;
3091   return res;
3092 }
3093 
3094 
3095 /* If the statement STMT may clobber the memory reference REF return true,
3096    otherwise return false.  */
3097 
3098 bool
stmt_may_clobber_ref_p_1(gimple * stmt,ao_ref * ref,bool tbaa_p)3099 stmt_may_clobber_ref_p_1 (gimple *stmt, ao_ref *ref, bool tbaa_p)
3100 {
3101   if (is_gimple_call (stmt))
3102     {
3103       tree lhs = gimple_call_lhs (stmt);
3104       if (lhs
3105 	  && TREE_CODE (lhs) != SSA_NAME)
3106 	{
3107 	  ao_ref r;
3108 	  ao_ref_init (&r, lhs);
3109 	  if (refs_may_alias_p_1 (ref, &r, tbaa_p))
3110 	    return true;
3111 	}
3112 
3113       return call_may_clobber_ref_p_1 (as_a <gcall *> (stmt), ref);
3114     }
3115   else if (gimple_assign_single_p (stmt))
3116     {
3117       tree lhs = gimple_assign_lhs (stmt);
3118       if (TREE_CODE (lhs) != SSA_NAME)
3119 	{
3120 	  ao_ref r;
3121 	  ao_ref_init (&r, lhs);
3122 	  return refs_may_alias_p_1 (ref, &r, tbaa_p);
3123 	}
3124     }
3125   else if (gimple_code (stmt) == GIMPLE_ASM)
3126     return true;
3127 
3128   return false;
3129 }
3130 
3131 bool
stmt_may_clobber_ref_p(gimple * stmt,tree ref,bool tbaa_p)3132 stmt_may_clobber_ref_p (gimple *stmt, tree ref, bool tbaa_p)
3133 {
3134   ao_ref r;
3135   ao_ref_init (&r, ref);
3136   return stmt_may_clobber_ref_p_1 (stmt, &r, tbaa_p);
3137 }
3138 
3139 /* Return true if store1 and store2 described by corresponding tuples
3140    <BASE, OFFSET, SIZE, MAX_SIZE> have the same size and store to the same
3141    address.  */
3142 
3143 static bool
same_addr_size_stores_p(tree base1,poly_int64 offset1,poly_int64 size1,poly_int64 max_size1,tree base2,poly_int64 offset2,poly_int64 size2,poly_int64 max_size2)3144 same_addr_size_stores_p (tree base1, poly_int64 offset1, poly_int64 size1,
3145 			 poly_int64 max_size1,
3146 			 tree base2, poly_int64 offset2, poly_int64 size2,
3147 			 poly_int64 max_size2)
3148 {
3149   /* Offsets need to be 0.  */
3150   if (maybe_ne (offset1, 0)
3151       || maybe_ne (offset2, 0))
3152     return false;
3153 
3154   bool base1_obj_p = SSA_VAR_P (base1);
3155   bool base2_obj_p = SSA_VAR_P (base2);
3156 
3157   /* We need one object.  */
3158   if (base1_obj_p == base2_obj_p)
3159     return false;
3160   tree obj = base1_obj_p ? base1 : base2;
3161 
3162   /* And we need one MEM_REF.  */
3163   bool base1_memref_p = TREE_CODE (base1) == MEM_REF;
3164   bool base2_memref_p = TREE_CODE (base2) == MEM_REF;
3165   if (base1_memref_p == base2_memref_p)
3166     return false;
3167   tree memref = base1_memref_p ? base1 : base2;
3168 
3169   /* Sizes need to be valid.  */
3170   if (!known_size_p (max_size1)
3171       || !known_size_p (max_size2)
3172       || !known_size_p (size1)
3173       || !known_size_p (size2))
3174     return false;
3175 
3176   /* Max_size needs to match size.  */
3177   if (maybe_ne (max_size1, size1)
3178       || maybe_ne (max_size2, size2))
3179     return false;
3180 
3181   /* Sizes need to match.  */
3182   if (maybe_ne (size1, size2))
3183     return false;
3184 
3185 
3186   /* Check that memref is a store to pointer with singleton points-to info.  */
3187   if (!integer_zerop (TREE_OPERAND (memref, 1)))
3188     return false;
3189   tree ptr = TREE_OPERAND (memref, 0);
3190   if (TREE_CODE (ptr) != SSA_NAME)
3191     return false;
3192   struct ptr_info_def *pi = SSA_NAME_PTR_INFO (ptr);
3193   unsigned int pt_uid;
3194   if (pi == NULL
3195       || !pt_solution_singleton_or_null_p (&pi->pt, &pt_uid))
3196     return false;
3197 
3198   /* Be conservative with non-call exceptions when the address might
3199      be NULL.  */
3200   if (cfun->can_throw_non_call_exceptions && pi->pt.null)
3201     return false;
3202 
3203   /* Check that ptr points relative to obj.  */
3204   unsigned int obj_uid = DECL_PT_UID (obj);
3205   if (obj_uid != pt_uid)
3206     return false;
3207 
3208   /* Check that the object size is the same as the store size.  That ensures us
3209      that ptr points to the start of obj.  */
3210   return (DECL_SIZE (obj)
3211 	  && poly_int_tree_p (DECL_SIZE (obj))
3212 	  && known_eq (wi::to_poly_offset (DECL_SIZE (obj)), size1));
3213 }
3214 
3215 /* If STMT kills the memory reference REF return true, otherwise
3216    return false.  */
3217 
3218 bool
stmt_kills_ref_p(gimple * stmt,ao_ref * ref)3219 stmt_kills_ref_p (gimple *stmt, ao_ref *ref)
3220 {
3221   if (!ao_ref_base (ref))
3222     return false;
3223 
3224   if (gimple_has_lhs (stmt)
3225       && TREE_CODE (gimple_get_lhs (stmt)) != SSA_NAME
3226       /* The assignment is not necessarily carried out if it can throw
3227 	 and we can catch it in the current function where we could inspect
3228 	 the previous value.
3229 	 ???  We only need to care about the RHS throwing.  For aggregate
3230 	 assignments or similar calls and non-call exceptions the LHS
3231 	 might throw as well.  */
3232       && !stmt_can_throw_internal (cfun, stmt))
3233     {
3234       tree lhs = gimple_get_lhs (stmt);
3235       /* If LHS is literally a base of the access we are done.  */
3236       if (ref->ref)
3237 	{
3238 	  tree base = ref->ref;
3239 	  tree innermost_dropped_array_ref = NULL_TREE;
3240 	  if (handled_component_p (base))
3241 	    {
3242 	      tree saved_lhs0 = NULL_TREE;
3243 	      if (handled_component_p (lhs))
3244 		{
3245 		  saved_lhs0 = TREE_OPERAND (lhs, 0);
3246 		  TREE_OPERAND (lhs, 0) = integer_zero_node;
3247 		}
3248 	      do
3249 		{
3250 		  /* Just compare the outermost handled component, if
3251 		     they are equal we have found a possible common
3252 		     base.  */
3253 		  tree saved_base0 = TREE_OPERAND (base, 0);
3254 		  TREE_OPERAND (base, 0) = integer_zero_node;
3255 		  bool res = operand_equal_p (lhs, base, 0);
3256 		  TREE_OPERAND (base, 0) = saved_base0;
3257 		  if (res)
3258 		    break;
3259 		  /* Remember if we drop an array-ref that we need to
3260 		     double-check not being at struct end.  */
3261 		  if (TREE_CODE (base) == ARRAY_REF
3262 		      || TREE_CODE (base) == ARRAY_RANGE_REF)
3263 		    innermost_dropped_array_ref = base;
3264 		  /* Otherwise drop handled components of the access.  */
3265 		  base = saved_base0;
3266 		}
3267 	      while (handled_component_p (base));
3268 	      if (saved_lhs0)
3269 		TREE_OPERAND (lhs, 0) = saved_lhs0;
3270 	    }
3271 	  /* Finally check if the lhs has the same address and size as the
3272 	     base candidate of the access.  Watch out if we have dropped
3273 	     an array-ref that was at struct end, this means ref->ref may
3274 	     be outside of the TYPE_SIZE of its base.  */
3275 	  if ((! innermost_dropped_array_ref
3276 	       || ! array_at_struct_end_p (innermost_dropped_array_ref))
3277 	      && (lhs == base
3278 		  || (((TYPE_SIZE (TREE_TYPE (lhs))
3279 			== TYPE_SIZE (TREE_TYPE (base)))
3280 		       || (TYPE_SIZE (TREE_TYPE (lhs))
3281 			   && TYPE_SIZE (TREE_TYPE (base))
3282 			   && operand_equal_p (TYPE_SIZE (TREE_TYPE (lhs)),
3283 					       TYPE_SIZE (TREE_TYPE (base)),
3284 					       0)))
3285 		      && operand_equal_p (lhs, base,
3286 					  OEP_ADDRESS_OF
3287 					  | OEP_MATCH_SIDE_EFFECTS))))
3288 	    return true;
3289 	}
3290 
3291       /* Now look for non-literal equal bases with the restriction of
3292          handling constant offset and size.  */
3293       /* For a must-alias check we need to be able to constrain
3294 	 the access properly.  */
3295       if (!ref->max_size_known_p ())
3296 	return false;
3297       poly_int64 size, offset, max_size, ref_offset = ref->offset;
3298       bool reverse;
3299       tree base = get_ref_base_and_extent (lhs, &offset, &size, &max_size,
3300 					   &reverse);
3301       /* We can get MEM[symbol: sZ, index: D.8862_1] here,
3302 	 so base == ref->base does not always hold.  */
3303       if (base != ref->base)
3304 	{
3305 	  /* Try using points-to info.  */
3306 	  if (same_addr_size_stores_p (base, offset, size, max_size, ref->base,
3307 				       ref->offset, ref->size, ref->max_size))
3308 	    return true;
3309 
3310 	  /* If both base and ref->base are MEM_REFs, only compare the
3311 	     first operand, and if the second operand isn't equal constant,
3312 	     try to add the offsets into offset and ref_offset.  */
3313 	  if (TREE_CODE (base) == MEM_REF && TREE_CODE (ref->base) == MEM_REF
3314 	      && TREE_OPERAND (base, 0) == TREE_OPERAND (ref->base, 0))
3315 	    {
3316 	      if (!tree_int_cst_equal (TREE_OPERAND (base, 1),
3317 				       TREE_OPERAND (ref->base, 1)))
3318 		{
3319 		  poly_offset_int off1 = mem_ref_offset (base);
3320 		  off1 <<= LOG2_BITS_PER_UNIT;
3321 		  off1 += offset;
3322 		  poly_offset_int off2 = mem_ref_offset (ref->base);
3323 		  off2 <<= LOG2_BITS_PER_UNIT;
3324 		  off2 += ref_offset;
3325 		  if (!off1.to_shwi (&offset) || !off2.to_shwi (&ref_offset))
3326 		    size = -1;
3327 		}
3328 	    }
3329 	  else
3330 	    size = -1;
3331 	}
3332       /* For a must-alias check we need to be able to constrain
3333 	 the access properly.  */
3334       if (known_eq (size, max_size)
3335 	  && known_subrange_p (ref_offset, ref->max_size, offset, size))
3336 	return true;
3337     }
3338 
3339   if (is_gimple_call (stmt))
3340     {
3341       tree callee = gimple_call_fndecl (stmt);
3342       if (callee != NULL_TREE
3343 	  && gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
3344 	switch (DECL_FUNCTION_CODE (callee))
3345 	  {
3346 	  case BUILT_IN_FREE:
3347 	    {
3348 	      tree ptr = gimple_call_arg (stmt, 0);
3349 	      tree base = ao_ref_base (ref);
3350 	      if (base && TREE_CODE (base) == MEM_REF
3351 		  && TREE_OPERAND (base, 0) == ptr)
3352 		return true;
3353 	      break;
3354 	    }
3355 
3356 	  case BUILT_IN_MEMCPY:
3357 	  case BUILT_IN_MEMPCPY:
3358 	  case BUILT_IN_MEMMOVE:
3359 	  case BUILT_IN_MEMSET:
3360 	  case BUILT_IN_MEMCPY_CHK:
3361 	  case BUILT_IN_MEMPCPY_CHK:
3362 	  case BUILT_IN_MEMMOVE_CHK:
3363 	  case BUILT_IN_MEMSET_CHK:
3364 	  case BUILT_IN_STRNCPY:
3365 	  case BUILT_IN_STPNCPY:
3366 	  case BUILT_IN_CALLOC:
3367 	    {
3368 	      /* For a must-alias check we need to be able to constrain
3369 		 the access properly.  */
3370 	      if (!ref->max_size_known_p ())
3371 		return false;
3372 	      tree dest;
3373 	      tree len;
3374 
3375 	      /* In execution order a calloc call will never kill
3376 		 anything.  However, DSE will (ab)use this interface
3377 		 to ask if a calloc call writes the same memory locations
3378 		 as a later assignment, memset, etc.  So handle calloc
3379 		 in the expected way.  */
3380 	      if (DECL_FUNCTION_CODE (callee) == BUILT_IN_CALLOC)
3381 		{
3382 		  tree arg0 = gimple_call_arg (stmt, 0);
3383 		  tree arg1 = gimple_call_arg (stmt, 1);
3384 		  if (TREE_CODE (arg0) != INTEGER_CST
3385 		      || TREE_CODE (arg1) != INTEGER_CST)
3386 		    return false;
3387 
3388 		  dest = gimple_call_lhs (stmt);
3389 		  if (!dest)
3390 		    return false;
3391 		  len = fold_build2 (MULT_EXPR, TREE_TYPE (arg0), arg0, arg1);
3392 		}
3393 	      else
3394 		{
3395 		  dest = gimple_call_arg (stmt, 0);
3396 		  len = gimple_call_arg (stmt, 2);
3397 		}
3398 	      if (!poly_int_tree_p (len))
3399 		return false;
3400 	      tree rbase = ref->base;
3401 	      poly_offset_int roffset = ref->offset;
3402 	      ao_ref dref;
3403 	      ao_ref_init_from_ptr_and_size (&dref, dest, len);
3404 	      tree base = ao_ref_base (&dref);
3405 	      poly_offset_int offset = dref.offset;
3406 	      if (!base || !known_size_p (dref.size))
3407 		return false;
3408 	      if (TREE_CODE (base) == MEM_REF)
3409 		{
3410 		  if (TREE_CODE (rbase) != MEM_REF)
3411 		    return false;
3412 		  // Compare pointers.
3413 		  offset += mem_ref_offset (base) << LOG2_BITS_PER_UNIT;
3414 		  roffset += mem_ref_offset (rbase) << LOG2_BITS_PER_UNIT;
3415 		  base = TREE_OPERAND (base, 0);
3416 		  rbase = TREE_OPERAND (rbase, 0);
3417 		}
3418 	      if (base == rbase
3419 		  && known_subrange_p (roffset, ref->max_size, offset,
3420 				       wi::to_poly_offset (len)
3421 				       << LOG2_BITS_PER_UNIT))
3422 		return true;
3423 	      break;
3424 	    }
3425 
3426 	  case BUILT_IN_VA_END:
3427 	    {
3428 	      tree ptr = gimple_call_arg (stmt, 0);
3429 	      if (TREE_CODE (ptr) == ADDR_EXPR)
3430 		{
3431 		  tree base = ao_ref_base (ref);
3432 		  if (TREE_OPERAND (ptr, 0) == base)
3433 		    return true;
3434 		}
3435 	      break;
3436 	    }
3437 
3438 	  default:;
3439 	  }
3440     }
3441   return false;
3442 }
3443 
3444 bool
stmt_kills_ref_p(gimple * stmt,tree ref)3445 stmt_kills_ref_p (gimple *stmt, tree ref)
3446 {
3447   ao_ref r;
3448   ao_ref_init (&r, ref);
3449   return stmt_kills_ref_p (stmt, &r);
3450 }
3451 
3452 
3453 /* Walk the virtual use-def chain of VUSE until hitting the virtual operand
3454    TARGET or a statement clobbering the memory reference REF in which
3455    case false is returned.  The walk starts with VUSE, one argument of PHI.  */
3456 
3457 static bool
maybe_skip_until(gimple * phi,tree & target,basic_block target_bb,ao_ref * ref,tree vuse,bool tbaa_p,unsigned int & limit,bitmap * visited,bool abort_on_visited,void * (* translate)(ao_ref *,tree,void *,translate_flags *),translate_flags disambiguate_only,void * data)3458 maybe_skip_until (gimple *phi, tree &target, basic_block target_bb,
3459 		  ao_ref *ref, tree vuse, bool tbaa_p, unsigned int &limit,
3460 		  bitmap *visited, bool abort_on_visited,
3461 		  void *(*translate)(ao_ref *, tree, void *, translate_flags *),
3462 		  translate_flags disambiguate_only,
3463 		  void *data)
3464 {
3465   basic_block bb = gimple_bb (phi);
3466 
3467   if (!*visited)
3468     *visited = BITMAP_ALLOC (NULL);
3469 
3470   bitmap_set_bit (*visited, SSA_NAME_VERSION (PHI_RESULT (phi)));
3471 
3472   /* Walk until we hit the target.  */
3473   while (vuse != target)
3474     {
3475       gimple *def_stmt = SSA_NAME_DEF_STMT (vuse);
3476       /* If we are searching for the target VUSE by walking up to
3477          TARGET_BB dominating the original PHI we are finished once
3478 	 we reach a default def or a definition in a block dominating
3479 	 that block.  Update TARGET and return.  */
3480       if (!target
3481 	  && (gimple_nop_p (def_stmt)
3482 	      || dominated_by_p (CDI_DOMINATORS,
3483 				 target_bb, gimple_bb (def_stmt))))
3484 	{
3485 	  target = vuse;
3486 	  return true;
3487 	}
3488 
3489       /* Recurse for PHI nodes.  */
3490       if (gimple_code (def_stmt) == GIMPLE_PHI)
3491 	{
3492 	  /* An already visited PHI node ends the walk successfully.  */
3493 	  if (bitmap_bit_p (*visited, SSA_NAME_VERSION (PHI_RESULT (def_stmt))))
3494 	    return !abort_on_visited;
3495 	  vuse = get_continuation_for_phi (def_stmt, ref, tbaa_p, limit,
3496 					   visited, abort_on_visited,
3497 					   translate, data, disambiguate_only);
3498 	  if (!vuse)
3499 	    return false;
3500 	  continue;
3501 	}
3502       else if (gimple_nop_p (def_stmt))
3503 	return false;
3504       else
3505 	{
3506 	  /* A clobbering statement or the end of the IL ends it failing.  */
3507 	  if ((int)limit <= 0)
3508 	    return false;
3509 	  --limit;
3510 	  if (stmt_may_clobber_ref_p_1 (def_stmt, ref, tbaa_p))
3511 	    {
3512 	      translate_flags tf = disambiguate_only;
3513 	      if (translate
3514 		  && (*translate) (ref, vuse, data, &tf) == NULL)
3515 		;
3516 	      else
3517 		return false;
3518 	    }
3519 	}
3520       /* If we reach a new basic-block see if we already skipped it
3521          in a previous walk that ended successfully.  */
3522       if (gimple_bb (def_stmt) != bb)
3523 	{
3524 	  if (!bitmap_set_bit (*visited, SSA_NAME_VERSION (vuse)))
3525 	    return !abort_on_visited;
3526 	  bb = gimple_bb (def_stmt);
3527 	}
3528       vuse = gimple_vuse (def_stmt);
3529     }
3530   return true;
3531 }
3532 
3533 
3534 /* Starting from a PHI node for the virtual operand of the memory reference
3535    REF find a continuation virtual operand that allows to continue walking
3536    statements dominating PHI skipping only statements that cannot possibly
3537    clobber REF.  Decrements LIMIT for each alias disambiguation done
3538    and aborts the walk, returning NULL_TREE if it reaches zero.
3539    Returns NULL_TREE if no suitable virtual operand can be found.  */
3540 
3541 tree
get_continuation_for_phi(gimple * phi,ao_ref * ref,bool tbaa_p,unsigned int & limit,bitmap * visited,bool abort_on_visited,void * (* translate)(ao_ref *,tree,void *,translate_flags *),void * data,translate_flags disambiguate_only)3542 get_continuation_for_phi (gimple *phi, ao_ref *ref, bool tbaa_p,
3543 			  unsigned int &limit, bitmap *visited,
3544 			  bool abort_on_visited,
3545 			  void *(*translate)(ao_ref *, tree, void *,
3546 					     translate_flags *),
3547 			  void *data,
3548 			  translate_flags disambiguate_only)
3549 {
3550   unsigned nargs = gimple_phi_num_args (phi);
3551 
3552   /* Through a single-argument PHI we can simply look through.  */
3553   if (nargs == 1)
3554     return PHI_ARG_DEF (phi, 0);
3555 
3556   /* For two or more arguments try to pairwise skip non-aliasing code
3557      until we hit the phi argument definition that dominates the other one.  */
3558   basic_block phi_bb = gimple_bb (phi);
3559   tree arg0, arg1;
3560   unsigned i;
3561 
3562   /* Find a candidate for the virtual operand which definition
3563      dominates those of all others.  */
3564   /* First look if any of the args themselves satisfy this.  */
3565   for (i = 0; i < nargs; ++i)
3566     {
3567       arg0 = PHI_ARG_DEF (phi, i);
3568       if (SSA_NAME_IS_DEFAULT_DEF (arg0))
3569 	break;
3570       basic_block def_bb = gimple_bb (SSA_NAME_DEF_STMT (arg0));
3571       if (def_bb != phi_bb
3572 	  && dominated_by_p (CDI_DOMINATORS, phi_bb, def_bb))
3573 	break;
3574       arg0 = NULL_TREE;
3575     }
3576   /* If not, look if we can reach such candidate by walking defs
3577      until we hit the immediate dominator.  maybe_skip_until will
3578      do that for us.  */
3579   basic_block dom = get_immediate_dominator (CDI_DOMINATORS, phi_bb);
3580 
3581   /* Then check against the (to be) found candidate.  */
3582   for (i = 0; i < nargs; ++i)
3583     {
3584       arg1 = PHI_ARG_DEF (phi, i);
3585       if (arg1 == arg0)
3586 	;
3587       else if (! maybe_skip_until (phi, arg0, dom, ref, arg1, tbaa_p,
3588 				   limit, visited,
3589 				   abort_on_visited,
3590 				   translate,
3591 				   /* Do not valueize when walking over
3592 				      backedges.  */
3593 				   dominated_by_p
3594 				     (CDI_DOMINATORS,
3595 				      gimple_bb (SSA_NAME_DEF_STMT (arg1)),
3596 				      phi_bb)
3597 				   ? TR_DISAMBIGUATE
3598 				   : disambiguate_only, data))
3599 	return NULL_TREE;
3600     }
3601 
3602   return arg0;
3603 }
3604 
3605 /* Based on the memory reference REF and its virtual use VUSE call
3606    WALKER for each virtual use that is equivalent to VUSE, including VUSE
3607    itself.  That is, for each virtual use for which its defining statement
3608    does not clobber REF.
3609 
3610    WALKER is called with REF, the current virtual use and DATA.  If
3611    WALKER returns non-NULL the walk stops and its result is returned.
3612    At the end of a non-successful walk NULL is returned.
3613 
3614    TRANSLATE if non-NULL is called with a pointer to REF, the virtual
3615    use which definition is a statement that may clobber REF and DATA.
3616    If TRANSLATE returns (void *)-1 the walk stops and NULL is returned.
3617    If TRANSLATE returns non-NULL the walk stops and its result is returned.
3618    If TRANSLATE returns NULL the walk continues and TRANSLATE is supposed
3619    to adjust REF and *DATA to make that valid.
3620 
3621    VALUEIZE if non-NULL is called with the next VUSE that is considered
3622    and return value is substituted for that.  This can be used to
3623    implement optimistic value-numbering for example.  Note that the
3624    VUSE argument is assumed to be valueized already.
3625 
3626    LIMIT specifies the number of alias queries we are allowed to do,
3627    the walk stops when it reaches zero and NULL is returned.  LIMIT
3628    is decremented by the number of alias queries (plus adjustments
3629    done by the callbacks) upon return.
3630 
3631    TODO: Cache the vector of equivalent vuses per ref, vuse pair.  */
3632 
3633 void *
walk_non_aliased_vuses(ao_ref * ref,tree vuse,bool tbaa_p,void * (* walker)(ao_ref *,tree,void *),void * (* translate)(ao_ref *,tree,void *,translate_flags *),tree (* valueize)(tree),unsigned & limit,void * data)3634 walk_non_aliased_vuses (ao_ref *ref, tree vuse, bool tbaa_p,
3635 			void *(*walker)(ao_ref *, tree, void *),
3636 			void *(*translate)(ao_ref *, tree, void *,
3637 					   translate_flags *),
3638 			tree (*valueize)(tree),
3639 			unsigned &limit, void *data)
3640 {
3641   bitmap visited = NULL;
3642   void *res;
3643   bool translated = false;
3644 
3645   timevar_push (TV_ALIAS_STMT_WALK);
3646 
3647   do
3648     {
3649       gimple *def_stmt;
3650 
3651       /* ???  Do we want to account this to TV_ALIAS_STMT_WALK?  */
3652       res = (*walker) (ref, vuse, data);
3653       /* Abort walk.  */
3654       if (res == (void *)-1)
3655 	{
3656 	  res = NULL;
3657 	  break;
3658 	}
3659       /* Lookup succeeded.  */
3660       else if (res != NULL)
3661 	break;
3662 
3663       if (valueize)
3664 	{
3665 	  vuse = valueize (vuse);
3666 	  if (!vuse)
3667 	    {
3668 	      res = NULL;
3669 	      break;
3670 	    }
3671 	}
3672       def_stmt = SSA_NAME_DEF_STMT (vuse);
3673       if (gimple_nop_p (def_stmt))
3674 	break;
3675       else if (gimple_code (def_stmt) == GIMPLE_PHI)
3676 	vuse = get_continuation_for_phi (def_stmt, ref, tbaa_p, limit,
3677 					 &visited, translated, translate, data);
3678       else
3679 	{
3680 	  if ((int)limit <= 0)
3681 	    {
3682 	      res = NULL;
3683 	      break;
3684 	    }
3685 	  --limit;
3686 	  if (stmt_may_clobber_ref_p_1 (def_stmt, ref, tbaa_p))
3687 	    {
3688 	      if (!translate)
3689 		break;
3690 	      translate_flags disambiguate_only = TR_TRANSLATE;
3691 	      res = (*translate) (ref, vuse, data, &disambiguate_only);
3692 	      /* Failed lookup and translation.  */
3693 	      if (res == (void *)-1)
3694 		{
3695 		  res = NULL;
3696 		  break;
3697 		}
3698 	      /* Lookup succeeded.  */
3699 	      else if (res != NULL)
3700 		break;
3701 	      /* Translation succeeded, continue walking.  */
3702 	      translated = translated || disambiguate_only == TR_TRANSLATE;
3703 	    }
3704 	  vuse = gimple_vuse (def_stmt);
3705 	}
3706     }
3707   while (vuse);
3708 
3709   if (visited)
3710     BITMAP_FREE (visited);
3711 
3712   timevar_pop (TV_ALIAS_STMT_WALK);
3713 
3714   return res;
3715 }
3716 
3717 
3718 /* Based on the memory reference REF call WALKER for each vdef which
3719    defining statement may clobber REF, starting with VDEF.  If REF
3720    is NULL_TREE, each defining statement is visited.
3721 
3722    WALKER is called with REF, the current vdef and DATA.  If WALKER
3723    returns true the walk is stopped, otherwise it continues.
3724 
3725    If function entry is reached, FUNCTION_ENTRY_REACHED is set to true.
3726    The pointer may be NULL and then we do not track this information.
3727 
3728    At PHI nodes walk_aliased_vdefs forks into one walk for reach
3729    PHI argument (but only one walk continues on merge points), the
3730    return value is true if any of the walks was successful.
3731 
3732    The function returns the number of statements walked or -1 if
3733    LIMIT stmts were walked and the walk was aborted at this point.
3734    If LIMIT is zero the walk is not aborted.  */
3735 
3736 static int
walk_aliased_vdefs_1(ao_ref * ref,tree vdef,bool (* walker)(ao_ref *,tree,void *),void * data,bitmap * visited,unsigned int cnt,bool * function_entry_reached,unsigned limit)3737 walk_aliased_vdefs_1 (ao_ref *ref, tree vdef,
3738 		      bool (*walker)(ao_ref *, tree, void *), void *data,
3739 		      bitmap *visited, unsigned int cnt,
3740 		      bool *function_entry_reached, unsigned limit)
3741 {
3742   do
3743     {
3744       gimple *def_stmt = SSA_NAME_DEF_STMT (vdef);
3745 
3746       if (*visited
3747 	  && !bitmap_set_bit (*visited, SSA_NAME_VERSION (vdef)))
3748 	return cnt;
3749 
3750       if (gimple_nop_p (def_stmt))
3751 	{
3752 	  if (function_entry_reached)
3753 	    *function_entry_reached = true;
3754 	  return cnt;
3755 	}
3756       else if (gimple_code (def_stmt) == GIMPLE_PHI)
3757 	{
3758 	  unsigned i;
3759 	  if (!*visited)
3760 	    *visited = BITMAP_ALLOC (NULL);
3761 	  for (i = 0; i < gimple_phi_num_args (def_stmt); ++i)
3762 	    {
3763 	      int res = walk_aliased_vdefs_1 (ref,
3764 					      gimple_phi_arg_def (def_stmt, i),
3765 					      walker, data, visited, cnt,
3766 					      function_entry_reached, limit);
3767 	      if (res == -1)
3768 		return -1;
3769 	      cnt = res;
3770 	    }
3771 	  return cnt;
3772 	}
3773 
3774       /* ???  Do we want to account this to TV_ALIAS_STMT_WALK?  */
3775       cnt++;
3776       if (cnt == limit)
3777 	return -1;
3778       if ((!ref
3779 	   || stmt_may_clobber_ref_p_1 (def_stmt, ref))
3780 	  && (*walker) (ref, vdef, data))
3781 	return cnt;
3782 
3783       vdef = gimple_vuse (def_stmt);
3784     }
3785   while (1);
3786 }
3787 
3788 int
walk_aliased_vdefs(ao_ref * ref,tree vdef,bool (* walker)(ao_ref *,tree,void *),void * data,bitmap * visited,bool * function_entry_reached,unsigned int limit)3789 walk_aliased_vdefs (ao_ref *ref, tree vdef,
3790 		    bool (*walker)(ao_ref *, tree, void *), void *data,
3791 		    bitmap *visited,
3792 		    bool *function_entry_reached, unsigned int limit)
3793 {
3794   bitmap local_visited = NULL;
3795   int ret;
3796 
3797   timevar_push (TV_ALIAS_STMT_WALK);
3798 
3799   if (function_entry_reached)
3800     *function_entry_reached = false;
3801 
3802   ret = walk_aliased_vdefs_1 (ref, vdef, walker, data,
3803 			      visited ? visited : &local_visited, 0,
3804 			      function_entry_reached, limit);
3805   if (local_visited)
3806     BITMAP_FREE (local_visited);
3807 
3808   timevar_pop (TV_ALIAS_STMT_WALK);
3809 
3810   return ret;
3811 }
3812 
3813