xref: /openbsd/gnu/usr.bin/gcc/gcc/alias.c (revision a67f0032)
1 /* Alias analysis for GNU C
2    Copyright (C) 1997, 1998, 1999, 2000, 2001, 2002, 2003
3    Free Software Foundation, Inc.
4    Contributed by John Carr (jfc@mit.edu).
5 
6 This file is part of GCC.
7 
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 2, or (at your option) any later
11 version.
12 
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
16 for more details.
17 
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING.  If not, write to the Free
20 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
21 02111-1307, USA.  */
22 
23 #include "config.h"
24 #include "system.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "tm_p.h"
28 #include "function.h"
29 #include "expr.h"
30 #include "regs.h"
31 #include "hard-reg-set.h"
32 #include "basic-block.h"
33 #include "flags.h"
34 #include "output.h"
35 #include "toplev.h"
36 #include "cselib.h"
37 #include "splay-tree.h"
38 #include "ggc.h"
39 #include "langhooks.h"
40 #include "target.h"
41 
42 /* The alias sets assigned to MEMs assist the back-end in determining
43    which MEMs can alias which other MEMs.  In general, two MEMs in
44    different alias sets cannot alias each other, with one important
45    exception.  Consider something like:
46 
47      struct S {int i; double d; };
48 
49    a store to an `S' can alias something of either type `int' or type
50    `double'.  (However, a store to an `int' cannot alias a `double'
51    and vice versa.)  We indicate this via a tree structure that looks
52    like:
53            struct S
54             /   \
55 	   /     \
56          |/_     _\|
57          int    double
58 
59    (The arrows are directed and point downwards.)
60     In this situation we say the alias set for `struct S' is the
61    `superset' and that those for `int' and `double' are `subsets'.
62 
63    To see whether two alias sets can point to the same memory, we must
64    see if either alias set is a subset of the other. We need not trace
65    past immediate descendents, however, since we propagate all
66    grandchildren up one level.
67 
68    Alias set zero is implicitly a superset of all other alias sets.
69    However, this is no actual entry for alias set zero.  It is an
70    error to attempt to explicitly construct a subset of zero.  */
71 
72 typedef struct alias_set_entry
73 {
74   /* The alias set number, as stored in MEM_ALIAS_SET.  */
75   HOST_WIDE_INT alias_set;
76 
77   /* The children of the alias set.  These are not just the immediate
78      children, but, in fact, all descendents.  So, if we have:
79 
80        struct T { struct S s; float f; }
81 
82      continuing our example above, the children here will be all of
83      `int', `double', `float', and `struct S'.  */
84   splay_tree children;
85 
86   /* Nonzero if would have a child of zero: this effectively makes this
87      alias set the same as alias set zero.  */
88   int has_zero_child;
89 } *alias_set_entry;
90 
91 static int rtx_equal_for_memref_p	PARAMS ((rtx, rtx));
92 static rtx find_symbolic_term		PARAMS ((rtx));
93 rtx get_addr				PARAMS ((rtx));
94 static int memrefs_conflict_p		PARAMS ((int, rtx, int, rtx,
95 						 HOST_WIDE_INT));
96 static void record_set			PARAMS ((rtx, rtx, void *));
97 static rtx find_base_term		PARAMS ((rtx));
98 static int base_alias_check		PARAMS ((rtx, rtx, enum machine_mode,
99 						 enum machine_mode));
100 static rtx find_base_value		PARAMS ((rtx));
101 static int mems_in_disjoint_alias_sets_p PARAMS ((rtx, rtx));
102 static int insert_subset_children       PARAMS ((splay_tree_node, void*));
103 static tree find_base_decl		PARAMS ((tree));
104 static alias_set_entry get_alias_set_entry PARAMS ((HOST_WIDE_INT));
105 static rtx fixed_scalar_and_varying_struct_p PARAMS ((rtx, rtx, rtx, rtx,
106 						      int (*) (rtx, int)));
107 static int aliases_everything_p         PARAMS ((rtx));
108 static bool nonoverlapping_component_refs_p PARAMS ((tree, tree));
109 static tree decl_for_component_ref	PARAMS ((tree));
110 static rtx adjust_offset_for_component_ref PARAMS ((tree, rtx));
111 static int nonoverlapping_memrefs_p	PARAMS ((rtx, rtx));
112 static int write_dependence_p           PARAMS ((rtx, rtx, int));
113 static void set_reg_known_value		PARAMS ((unsigned int, rtx));
114 static void set_reg_known_equiv_p	PARAMS ((unsigned int, int));
115 
116 static int nonlocal_mentioned_p_1       PARAMS ((rtx *, void *));
117 static int nonlocal_mentioned_p         PARAMS ((rtx));
118 static int nonlocal_referenced_p_1      PARAMS ((rtx *, void *));
119 static int nonlocal_referenced_p        PARAMS ((rtx));
120 static int nonlocal_set_p_1             PARAMS ((rtx *, void *));
121 static int nonlocal_set_p               PARAMS ((rtx));
122 
123 /* Set up all info needed to perform alias analysis on memory references.  */
124 
125 /* Returns the size in bytes of the mode of X.  */
126 #define SIZE_FOR_MODE(X) (GET_MODE_SIZE (GET_MODE (X)))
127 
128 /* Returns nonzero if MEM1 and MEM2 do not alias because they are in
129    different alias sets.  We ignore alias sets in functions making use
130    of variable arguments because the va_arg macros on some systems are
131    not legal ANSI C.  */
132 #define DIFFERENT_ALIAS_SETS_P(MEM1, MEM2)			\
133   mems_in_disjoint_alias_sets_p (MEM1, MEM2)
134 
135 /* Cap the number of passes we make over the insns propagating alias
136    information through set chains.   10 is a completely arbitrary choice.  */
137 #define MAX_ALIAS_LOOP_PASSES 10
138 
139 /* reg_base_value[N] gives an address to which register N is related.
140    If all sets after the first add or subtract to the current value
141    or otherwise modify it so it does not point to a different top level
142    object, reg_base_value[N] is equal to the address part of the source
143    of the first set.
144 
145    A base address can be an ADDRESS, SYMBOL_REF, or LABEL_REF.  ADDRESS
146    expressions represent certain special values: function arguments and
147    the stack, frame, and argument pointers.
148 
149    The contents of an ADDRESS is not normally used, the mode of the
150    ADDRESS determines whether the ADDRESS is a function argument or some
151    other special value.  Pointer equality, not rtx_equal_p, determines whether
152    two ADDRESS expressions refer to the same base address.
153 
154    The only use of the contents of an ADDRESS is for determining if the
155    current function performs nonlocal memory memory references for the
156    purposes of marking the function as a constant function.  */
157 
158 static GTY((length ("reg_base_value_size"))) rtx *reg_base_value;
159 static rtx *new_reg_base_value;
160 static unsigned int reg_base_value_size; /* size of reg_base_value array */
161 
162 /* Static hunks of RTL used by the aliasing code; these are initialized
163    once per function to avoid unnecessary RTL allocations.  */
164 static GTY (()) rtx static_reg_base_value[FIRST_PSEUDO_REGISTER];
165 
166 #define REG_BASE_VALUE(X) \
167   (REGNO (X) < reg_base_value_size \
168    ? reg_base_value[REGNO (X)] : 0)
169 
170 /* Vector of known invariant relationships between registers.  Set in
171    loop unrolling.  Indexed by register number, if nonzero the value
172    is an expression describing this register in terms of another.
173 
174    The length of this array is REG_BASE_VALUE_SIZE.
175 
176    Because this array contains only pseudo registers it has no effect
177    after reload.  */
178 static GTY((length("alias_invariant_size"))) rtx *alias_invariant;
179 unsigned GTY(()) int alias_invariant_size;
180 
181 /* Vector indexed by N giving the initial (unchanging) value known for
182    pseudo-register N.  This array is initialized in init_alias_analysis,
183    and does not change until end_alias_analysis is called.  */
184 static GTY((length("reg_known_value_size"))) rtx *reg_known_value;
185 
186 /* Indicates number of valid entries in reg_known_value.  */
187 static GTY(()) unsigned int reg_known_value_size;
188 
189 /* Vector recording for each reg_known_value whether it is due to a
190    REG_EQUIV note.  Future passes (viz., reload) may replace the
191    pseudo with the equivalent expression and so we account for the
192    dependences that would be introduced if that happens.
193 
194    The REG_EQUIV notes created in assign_parms may mention the arg
195    pointer, and there are explicit insns in the RTL that modify the
196    arg pointer.  Thus we must ensure that such insns don't get
197    scheduled across each other because that would invalidate the
198    REG_EQUIV notes.  One could argue that the REG_EQUIV notes are
199    wrong, but solving the problem in the scheduler will likely give
200    better code, so we do it here.  */
201 static bool *reg_known_equiv_p;
202 
203 /* True when scanning insns from the start of the rtl to the
204    NOTE_INSN_FUNCTION_BEG note.  */
205 static bool copying_arguments;
206 
207 /* The splay-tree used to store the various alias set entries.  */
208 static splay_tree alias_sets;
209 
210 /* Returns a pointer to the alias set entry for ALIAS_SET, if there is
211    such an entry, or NULL otherwise.  */
212 
213 static alias_set_entry
get_alias_set_entry(alias_set)214 get_alias_set_entry (alias_set)
215      HOST_WIDE_INT alias_set;
216 {
217   splay_tree_node sn
218     = splay_tree_lookup (alias_sets, (splay_tree_key) alias_set);
219 
220   return sn != 0 ? ((alias_set_entry) sn->value) : 0;
221 }
222 
223 /* Returns nonzero if the alias sets for MEM1 and MEM2 are such that
224    the two MEMs cannot alias each other.  */
225 
226 static int
mems_in_disjoint_alias_sets_p(mem1,mem2)227 mems_in_disjoint_alias_sets_p (mem1, mem2)
228      rtx mem1;
229      rtx mem2;
230 {
231 #ifdef ENABLE_CHECKING
232 /* Perform a basic sanity check.  Namely, that there are no alias sets
233    if we're not using strict aliasing.  This helps to catch bugs
234    whereby someone uses PUT_CODE, but doesn't clear MEM_ALIAS_SET, or
235    where a MEM is allocated in some way other than by the use of
236    gen_rtx_MEM, and the MEM_ALIAS_SET is not cleared.  If we begin to
237    use alias sets to indicate that spilled registers cannot alias each
238    other, we might need to remove this check.  */
239   if (! flag_strict_aliasing
240       && (MEM_ALIAS_SET (mem1) != 0 || MEM_ALIAS_SET (mem2) != 0))
241     abort ();
242 #endif
243 
244   return ! alias_sets_conflict_p (MEM_ALIAS_SET (mem1), MEM_ALIAS_SET (mem2));
245 }
246 
247 /* Insert the NODE into the splay tree given by DATA.  Used by
248    record_alias_subset via splay_tree_foreach.  */
249 
250 static int
insert_subset_children(node,data)251 insert_subset_children (node, data)
252      splay_tree_node node;
253      void *data;
254 {
255   splay_tree_insert ((splay_tree) data, node->key, node->value);
256 
257   return 0;
258 }
259 
260 /* Return 1 if the two specified alias sets may conflict.  */
261 
262 int
alias_sets_conflict_p(set1,set2)263 alias_sets_conflict_p (set1, set2)
264      HOST_WIDE_INT set1, set2;
265 {
266   alias_set_entry ase;
267 
268   /* If have no alias set information for one of the operands, we have
269      to assume it can alias anything.  */
270   if (set1 == 0 || set2 == 0
271       /* If the two alias sets are the same, they may alias.  */
272       || set1 == set2)
273     return 1;
274 
275   /* See if the first alias set is a subset of the second.  */
276   ase = get_alias_set_entry (set1);
277   if (ase != 0
278       && (ase->has_zero_child
279 	  || splay_tree_lookup (ase->children,
280 				(splay_tree_key) set2)))
281     return 1;
282 
283   /* Now do the same, but with the alias sets reversed.  */
284   ase = get_alias_set_entry (set2);
285   if (ase != 0
286       && (ase->has_zero_child
287 	  || splay_tree_lookup (ase->children,
288 				(splay_tree_key) set1)))
289     return 1;
290 
291   /* The two alias sets are distinct and neither one is the
292      child of the other.  Therefore, they cannot alias.  */
293   return 0;
294 }
295 
296 /* Return 1 if TYPE is a RECORD_TYPE, UNION_TYPE, or QUAL_UNION_TYPE and has
297    has any readonly fields.  If any of the fields have types that
298    contain readonly fields, return true as well.  */
299 
300 int
readonly_fields_p(type)301 readonly_fields_p (type)
302      tree type;
303 {
304   tree field;
305 
306   if (TREE_CODE (type) != RECORD_TYPE && TREE_CODE (type) != UNION_TYPE
307       && TREE_CODE (type) != QUAL_UNION_TYPE)
308     return 0;
309 
310   for (field = TYPE_FIELDS (type); field != 0; field = TREE_CHAIN (field))
311     if (TREE_CODE (field) == FIELD_DECL
312 	&& (TREE_READONLY (field)
313 	    || readonly_fields_p (TREE_TYPE (field))))
314       return 1;
315 
316   return 0;
317 }
318 
319 /* Return 1 if any MEM object of type T1 will always conflict (using the
320    dependency routines in this file) with any MEM object of type T2.
321    This is used when allocating temporary storage.  If T1 and/or T2 are
322    NULL_TREE, it means we know nothing about the storage.  */
323 
324 int
objects_must_conflict_p(t1,t2)325 objects_must_conflict_p (t1, t2)
326      tree t1, t2;
327 {
328   HOST_WIDE_INT set1, set2;
329 
330   /* If neither has a type specified, we don't know if they'll conflict
331      because we may be using them to store objects of various types, for
332      example the argument and local variables areas of inlined functions.  */
333   if (t1 == 0 && t2 == 0)
334     return 0;
335 
336   /* If one or the other has readonly fields or is readonly,
337      then they may not conflict.  */
338   if ((t1 != 0 && readonly_fields_p (t1))
339       || (t2 != 0 && readonly_fields_p (t2))
340       || (t1 != 0 && lang_hooks.honor_readonly && TYPE_READONLY (t1))
341       || (t2 != 0 && lang_hooks.honor_readonly && TYPE_READONLY (t2)))
342     return 0;
343 
344   /* If they are the same type, they must conflict.  */
345   if (t1 == t2
346       /* Likewise if both are volatile.  */
347       || (t1 != 0 && TYPE_VOLATILE (t1) && t2 != 0 && TYPE_VOLATILE (t2)))
348     return 1;
349 
350   set1 = t1 ? get_alias_set (t1) : 0;
351   set2 = t2 ? get_alias_set (t2) : 0;
352 
353   /* Otherwise they conflict if they have no alias set or the same. We
354      can't simply use alias_sets_conflict_p here, because we must make
355      sure that every subtype of t1 will conflict with every subtype of
356      t2 for which a pair of subobjects of these respective subtypes
357      overlaps on the stack.  */
358   return set1 == 0 || set2 == 0 || set1 == set2;
359 }
360 
361 /* T is an expression with pointer type.  Find the DECL on which this
362    expression is based.  (For example, in `a[i]' this would be `a'.)
363    If there is no such DECL, or a unique decl cannot be determined,
364    NULL_TREE is returned.  */
365 
366 static tree
find_base_decl(t)367 find_base_decl (t)
368      tree t;
369 {
370   tree d0, d1, d2;
371 
372   if (t == 0 || t == error_mark_node || ! POINTER_TYPE_P (TREE_TYPE (t)))
373     return 0;
374 
375   /* If this is a declaration, return it.  */
376   if (TREE_CODE_CLASS (TREE_CODE (t)) == 'd')
377     return t;
378 
379   /* Handle general expressions.  It would be nice to deal with
380      COMPONENT_REFs here.  If we could tell that `a' and `b' were the
381      same, then `a->f' and `b->f' are also the same.  */
382   switch (TREE_CODE_CLASS (TREE_CODE (t)))
383     {
384     case '1':
385       return find_base_decl (TREE_OPERAND (t, 0));
386 
387     case '2':
388       /* Return 0 if found in neither or both are the same.  */
389       d0 = find_base_decl (TREE_OPERAND (t, 0));
390       d1 = find_base_decl (TREE_OPERAND (t, 1));
391       if (d0 == d1)
392 	return d0;
393       else if (d0 == 0)
394 	return d1;
395       else if (d1 == 0)
396 	return d0;
397       else
398 	return 0;
399 
400     case '3':
401       d0 = find_base_decl (TREE_OPERAND (t, 0));
402       d1 = find_base_decl (TREE_OPERAND (t, 1));
403       d2 = find_base_decl (TREE_OPERAND (t, 2));
404 
405       /* Set any nonzero values from the last, then from the first.  */
406       if (d1 == 0) d1 = d2;
407       if (d0 == 0) d0 = d1;
408       if (d1 == 0) d1 = d0;
409       if (d2 == 0) d2 = d1;
410 
411       /* At this point all are nonzero or all are zero.  If all three are the
412 	 same, return it.  Otherwise, return zero.  */
413       return (d0 == d1 && d1 == d2) ? d0 : 0;
414 
415     default:
416       return 0;
417     }
418 }
419 
420 /* Return 1 if all the nested component references handled by
421    get_inner_reference in T are such that we can address the object in T.  */
422 
423 int
can_address_p(t)424 can_address_p (t)
425      tree t;
426 {
427   /* If we're at the end, it is vacuously addressable.  */
428   if (! handled_component_p (t))
429     return 1;
430 
431   /* Bitfields are never addressable.  */
432   else if (TREE_CODE (t) == BIT_FIELD_REF)
433     return 0;
434 
435   /* Fields are addressable unless they are marked as nonaddressable or
436      the containing type has alias set 0.  */
437   else if (TREE_CODE (t) == COMPONENT_REF
438 	   && ! DECL_NONADDRESSABLE_P (TREE_OPERAND (t, 1))
439 	   && get_alias_set (TREE_TYPE (TREE_OPERAND (t, 0))) != 0
440 	   && can_address_p (TREE_OPERAND (t, 0)))
441     return 1;
442 
443   /* Likewise for arrays.  */
444   else if ((TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
445 	   && ! TYPE_NONALIASED_COMPONENT (TREE_TYPE (TREE_OPERAND (t, 0)))
446 	   && get_alias_set (TREE_TYPE (TREE_OPERAND (t, 0))) != 0
447 	   && can_address_p (TREE_OPERAND (t, 0)))
448     return 1;
449 
450   return 0;
451 }
452 
453 /* Return the alias set for T, which may be either a type or an
454    expression.  Call language-specific routine for help, if needed.  */
455 
456 HOST_WIDE_INT
get_alias_set(t)457 get_alias_set (t)
458      tree t;
459 {
460   HOST_WIDE_INT set;
461 
462   /* If we're not doing any alias analysis, just assume everything
463      aliases everything else.  Also return 0 if this or its type is
464      an error.  */
465   if (! flag_strict_aliasing || t == error_mark_node
466       || (! TYPE_P (t)
467 	  && (TREE_TYPE (t) == 0 || TREE_TYPE (t) == error_mark_node)))
468     return 0;
469 
470   /* We can be passed either an expression or a type.  This and the
471      language-specific routine may make mutually-recursive calls to each other
472      to figure out what to do.  At each juncture, we see if this is a tree
473      that the language may need to handle specially.  First handle things that
474      aren't types.  */
475   if (! TYPE_P (t))
476     {
477       tree inner = t;
478       tree placeholder_ptr = 0;
479 
480       /* Remove any nops, then give the language a chance to do
481 	 something with this tree before we look at it.  */
482       STRIP_NOPS (t);
483       set = (*lang_hooks.get_alias_set) (t);
484       if (set != -1)
485 	return set;
486 
487       /* First see if the actual object referenced is an INDIRECT_REF from a
488 	 restrict-qualified pointer or a "void *".  Replace
489 	 PLACEHOLDER_EXPRs.  */
490       while (TREE_CODE (inner) == PLACEHOLDER_EXPR
491 	     || handled_component_p (inner))
492 	{
493 	  if (TREE_CODE (inner) == PLACEHOLDER_EXPR)
494 	    inner = find_placeholder (inner, &placeholder_ptr);
495 	  else
496 	    inner = TREE_OPERAND (inner, 0);
497 
498 	  STRIP_NOPS (inner);
499 	}
500 
501       /* Check for accesses through restrict-qualified pointers.  */
502       if (TREE_CODE (inner) == INDIRECT_REF)
503 	{
504 	  tree decl = find_base_decl (TREE_OPERAND (inner, 0));
505 
506 	  if (decl && DECL_POINTER_ALIAS_SET_KNOWN_P (decl))
507 	    {
508 	      /* If we haven't computed the actual alias set, do it now.  */
509 	      if (DECL_POINTER_ALIAS_SET (decl) == -2)
510 		{
511 		  tree pointed_to_type = TREE_TYPE (TREE_TYPE (decl));
512 
513 		  /* No two restricted pointers can point at the same thing.
514 		     However, a restricted pointer can point at the same thing
515 		     as an unrestricted pointer, if that unrestricted pointer
516 		     is based on the restricted pointer.  So, we make the
517 		     alias set for the restricted pointer a subset of the
518 		     alias set for the type pointed to by the type of the
519 		     decl.  */
520 		  HOST_WIDE_INT pointed_to_alias_set
521 		    = get_alias_set (pointed_to_type);
522 
523 		  if (pointed_to_alias_set == 0)
524 		    /* It's not legal to make a subset of alias set zero.  */
525 		    DECL_POINTER_ALIAS_SET (decl) = 0;
526 		  else if (AGGREGATE_TYPE_P (pointed_to_type))
527 		    /* For an aggregate, we must treat the restricted
528 		       pointer the same as an ordinary pointer.  If we
529 		       were to make the type pointed to by the
530 		       restricted pointer a subset of the pointed-to
531 		       type, then we would believe that other subsets
532 		       of the pointed-to type (such as fields of that
533 		       type) do not conflict with the type pointed to
534 		       by the restricted pointer.   */
535 		    DECL_POINTER_ALIAS_SET (decl)
536 		      = pointed_to_alias_set;
537 		  else
538 		    {
539 		      DECL_POINTER_ALIAS_SET (decl) = new_alias_set ();
540 		      record_alias_subset (pointed_to_alias_set,
541 					   DECL_POINTER_ALIAS_SET (decl));
542 		    }
543 		}
544 
545 	      /* We use the alias set indicated in the declaration.  */
546 	      return DECL_POINTER_ALIAS_SET (decl);
547 	    }
548 
549 	  /* If we have an INDIRECT_REF via a void pointer, we don't
550 	     know anything about what that might alias.  */
551 	  else if (TREE_CODE (TREE_TYPE (inner)) == VOID_TYPE)
552 	    return 0;
553 	}
554 
555       /* Otherwise, pick up the outermost object that we could have a pointer
556 	 to, processing conversion and PLACEHOLDER_EXPR as above.  */
557       placeholder_ptr = 0;
558       while (TREE_CODE (t) == PLACEHOLDER_EXPR
559 	     || (handled_component_p (t) && ! can_address_p (t)))
560 	{
561 	  if (TREE_CODE (t) == PLACEHOLDER_EXPR)
562 	    t = find_placeholder (t, &placeholder_ptr);
563 	  else
564 	    t = TREE_OPERAND (t, 0);
565 
566 	  STRIP_NOPS (t);
567 	}
568 
569       /* If we've already determined the alias set for a decl, just return
570 	 it.  This is necessary for C++ anonymous unions, whose component
571 	 variables don't look like union members (boo!).  */
572       if (TREE_CODE (t) == VAR_DECL
573 	  && DECL_RTL_SET_P (t) && GET_CODE (DECL_RTL (t)) == MEM)
574 	return MEM_ALIAS_SET (DECL_RTL (t));
575 
576       /* Now all we care about is the type.  */
577       t = TREE_TYPE (t);
578     }
579 
580   /* Variant qualifiers don't affect the alias set, so get the main
581      variant. If this is a type with a known alias set, return it.  */
582   t = TYPE_MAIN_VARIANT (t);
583   if (TYPE_ALIAS_SET_KNOWN_P (t))
584     return TYPE_ALIAS_SET (t);
585 
586   /* See if the language has special handling for this type.  */
587   set = (*lang_hooks.get_alias_set) (t);
588   if (set != -1)
589     return set;
590 
591   /* There are no objects of FUNCTION_TYPE, so there's no point in
592      using up an alias set for them.  (There are, of course, pointers
593      and references to functions, but that's different.)  */
594   else if (TREE_CODE (t) == FUNCTION_TYPE)
595     set = 0;
596 
597   /* Unless the language specifies otherwise, let vector types alias
598      their components.  This avoids some nasty type punning issues in
599      normal usage.  And indeed lets vectors be treated more like an
600      array slice.  */
601   else if (TREE_CODE (t) == VECTOR_TYPE)
602     set = get_alias_set (TREE_TYPE (t));
603 
604   else
605     /* Otherwise make a new alias set for this type.  */
606     set = new_alias_set ();
607 
608   TYPE_ALIAS_SET (t) = set;
609 
610   /* If this is an aggregate type, we must record any component aliasing
611      information.  */
612   if (AGGREGATE_TYPE_P (t) || TREE_CODE (t) == COMPLEX_TYPE)
613     record_component_aliases (t);
614 
615   return set;
616 }
617 
618 /* Return a brand-new alias set.  */
619 
620 HOST_WIDE_INT
new_alias_set()621 new_alias_set ()
622 {
623   static HOST_WIDE_INT last_alias_set;
624 
625   if (flag_strict_aliasing)
626     return ++last_alias_set;
627   else
628     return 0;
629 }
630 
631 /* Indicate that things in SUBSET can alias things in SUPERSET, but
632    not vice versa.  For example, in C, a store to an `int' can alias a
633    structure containing an `int', but not vice versa.  Here, the
634    structure would be the SUPERSET and `int' the SUBSET.  This
635    function should be called only once per SUPERSET/SUBSET pair.
636 
637    It is illegal for SUPERSET to be zero; everything is implicitly a
638    subset of alias set zero.  */
639 
640 void
record_alias_subset(superset,subset)641 record_alias_subset (superset, subset)
642      HOST_WIDE_INT superset;
643      HOST_WIDE_INT subset;
644 {
645   alias_set_entry superset_entry;
646   alias_set_entry subset_entry;
647 
648   /* It is possible in complex type situations for both sets to be the same,
649      in which case we can ignore this operation.  */
650   if (superset == subset)
651     return;
652 
653   if (superset == 0)
654     abort ();
655 
656   superset_entry = get_alias_set_entry (superset);
657   if (superset_entry == 0)
658     {
659       /* Create an entry for the SUPERSET, so that we have a place to
660 	 attach the SUBSET.  */
661       superset_entry
662 	= (alias_set_entry) xmalloc (sizeof (struct alias_set_entry));
663       superset_entry->alias_set = superset;
664       superset_entry->children
665 	= splay_tree_new (splay_tree_compare_ints, 0, 0);
666       superset_entry->has_zero_child = 0;
667       splay_tree_insert (alias_sets, (splay_tree_key) superset,
668 			 (splay_tree_value) superset_entry);
669     }
670 
671   if (subset == 0)
672     superset_entry->has_zero_child = 1;
673   else
674     {
675       subset_entry = get_alias_set_entry (subset);
676       /* If there is an entry for the subset, enter all of its children
677 	 (if they are not already present) as children of the SUPERSET.  */
678       if (subset_entry)
679 	{
680 	  if (subset_entry->has_zero_child)
681 	    superset_entry->has_zero_child = 1;
682 
683 	  splay_tree_foreach (subset_entry->children, insert_subset_children,
684 			      superset_entry->children);
685 	}
686 
687       /* Enter the SUBSET itself as a child of the SUPERSET.  */
688       splay_tree_insert (superset_entry->children,
689 			 (splay_tree_key) subset, 0);
690     }
691 }
692 
693 /* Record that component types of TYPE, if any, are part of that type for
694    aliasing purposes.  For record types, we only record component types
695    for fields that are marked addressable.  For array types, we always
696    record the component types, so the front end should not call this
697    function if the individual component aren't addressable.  */
698 
699 void
record_component_aliases(type)700 record_component_aliases (type)
701      tree type;
702 {
703   HOST_WIDE_INT superset = get_alias_set (type);
704   tree field;
705 
706   if (superset == 0)
707     return;
708 
709   switch (TREE_CODE (type))
710     {
711     case ARRAY_TYPE:
712       if (! TYPE_NONALIASED_COMPONENT (type))
713 	record_alias_subset (superset, get_alias_set (TREE_TYPE (type)));
714       break;
715 
716     case RECORD_TYPE:
717     case UNION_TYPE:
718     case QUAL_UNION_TYPE:
719       /* Recursively record aliases for the base classes, if there are any */
720       if (TYPE_BINFO (type) != NULL && TYPE_BINFO_BASETYPES (type) != NULL)
721 	{
722 	  int i;
723 	  for (i = 0; i < TREE_VEC_LENGTH (TYPE_BINFO_BASETYPES (type)); i++)
724 	    {
725 	      tree binfo = TREE_VEC_ELT (TYPE_BINFO_BASETYPES (type), i);
726 	      record_alias_subset (superset,
727 				   get_alias_set (BINFO_TYPE (binfo)));
728 	    }
729 	}
730       for (field = TYPE_FIELDS (type); field != 0; field = TREE_CHAIN (field))
731 	if (TREE_CODE (field) == FIELD_DECL && ! DECL_NONADDRESSABLE_P (field))
732 	  record_alias_subset (superset, get_alias_set (TREE_TYPE (field)));
733       break;
734 
735     case COMPLEX_TYPE:
736       record_alias_subset (superset, get_alias_set (TREE_TYPE (type)));
737       break;
738 
739     default:
740       break;
741     }
742 }
743 
744 /* Allocate an alias set for use in storing and reading from the varargs
745    spill area.  */
746 
747 HOST_WIDE_INT
get_varargs_alias_set()748 get_varargs_alias_set ()
749 {
750   static HOST_WIDE_INT set = -1;
751 
752   if (set == -1)
753     set = new_alias_set ();
754 
755   return set;
756 }
757 
758 /* Likewise, but used for the fixed portions of the frame, e.g., register
759    save areas.  */
760 
761 HOST_WIDE_INT
get_frame_alias_set()762 get_frame_alias_set ()
763 {
764   static HOST_WIDE_INT set = -1;
765 
766   if (set == -1)
767     set = new_alias_set ();
768 
769   return set;
770 }
771 
772 /* Inside SRC, the source of a SET, find a base address.  */
773 
774 static rtx
find_base_value(src)775 find_base_value (src)
776      rtx src;
777 {
778   unsigned int regno;
779 
780   switch (GET_CODE (src))
781     {
782     case SYMBOL_REF:
783     case LABEL_REF:
784       return src;
785 
786     case REG:
787       regno = REGNO (src);
788       /* At the start of a function, argument registers have known base
789 	 values which may be lost later.  Returning an ADDRESS
790 	 expression here allows optimization based on argument values
791 	 even when the argument registers are used for other purposes.  */
792       if (regno < FIRST_PSEUDO_REGISTER && copying_arguments)
793 	return new_reg_base_value[regno];
794 
795       /* If a pseudo has a known base value, return it.  Do not do this
796 	 for non-fixed hard regs since it can result in a circular
797 	 dependency chain for registers which have values at function entry.
798 
799 	 The test above is not sufficient because the scheduler may move
800 	 a copy out of an arg reg past the NOTE_INSN_FUNCTION_BEGIN.  */
801       if ((regno >= FIRST_PSEUDO_REGISTER || fixed_regs[regno])
802 	  && regno < reg_base_value_size)
803 	{
804 	  /* If we're inside init_alias_analysis, use new_reg_base_value
805 	     to reduce the number of relaxation iterations.  */
806 	  if (new_reg_base_value && new_reg_base_value[regno]
807 	      && REG_N_SETS (regno) == 1)
808 	    return new_reg_base_value[regno];
809 
810 	  if (reg_base_value[regno])
811 	    return reg_base_value[regno];
812 	}
813 
814       return 0;
815 
816     case MEM:
817       /* Check for an argument passed in memory.  Only record in the
818 	 copying-arguments block; it is too hard to track changes
819 	 otherwise.  */
820       if (copying_arguments
821 	  && (XEXP (src, 0) == arg_pointer_rtx
822 	      || (GET_CODE (XEXP (src, 0)) == PLUS
823 		  && XEXP (XEXP (src, 0), 0) == arg_pointer_rtx)))
824 	return gen_rtx_ADDRESS (VOIDmode, src);
825       return 0;
826 
827     case CONST:
828       src = XEXP (src, 0);
829       if (GET_CODE (src) != PLUS && GET_CODE (src) != MINUS)
830 	break;
831 
832       /* ... fall through ...  */
833 
834     case PLUS:
835     case MINUS:
836       {
837 	rtx temp, src_0 = XEXP (src, 0), src_1 = XEXP (src, 1);
838 
839 	/* If either operand is a REG that is a known pointer, then it
840 	   is the base.  */
841 	if (REG_P (src_0) && REG_POINTER (src_0))
842 	  return find_base_value (src_0);
843 	if (REG_P (src_1) && REG_POINTER (src_1))
844 	  return find_base_value (src_1);
845 
846 	/* If either operand is a REG, then see if we already have
847 	   a known value for it.  */
848 	if (REG_P (src_0))
849 	  {
850 	    temp = find_base_value (src_0);
851 	    if (temp != 0)
852 	      src_0 = temp;
853 	  }
854 
855 	if (REG_P (src_1))
856 	  {
857 	    temp = find_base_value (src_1);
858 	    if (temp!= 0)
859 	      src_1 = temp;
860 	  }
861 
862 	/* If either base is named object or a special address
863 	   (like an argument or stack reference), then use it for the
864 	   base term.  */
865 	if (src_0 != 0
866 	    && (GET_CODE (src_0) == SYMBOL_REF
867 		|| GET_CODE (src_0) == LABEL_REF
868 		|| (GET_CODE (src_0) == ADDRESS
869 		    && GET_MODE (src_0) != VOIDmode)))
870 	  return src_0;
871 
872 	if (src_1 != 0
873 	    && (GET_CODE (src_1) == SYMBOL_REF
874 		|| GET_CODE (src_1) == LABEL_REF
875 		|| (GET_CODE (src_1) == ADDRESS
876 		    && GET_MODE (src_1) != VOIDmode)))
877 	  return src_1;
878 
879 	/* Guess which operand is the base address:
880 	   If either operand is a symbol, then it is the base.  If
881 	   either operand is a CONST_INT, then the other is the base.  */
882 	if (GET_CODE (src_1) == CONST_INT || CONSTANT_P (src_0))
883 	  return find_base_value (src_0);
884 	else if (GET_CODE (src_0) == CONST_INT || CONSTANT_P (src_1))
885 	  return find_base_value (src_1);
886 
887 	return 0;
888       }
889 
890     case LO_SUM:
891       /* The standard form is (lo_sum reg sym) so look only at the
892 	 second operand.  */
893       return find_base_value (XEXP (src, 1));
894 
895     case AND:
896       /* If the second operand is constant set the base
897 	 address to the first operand.  */
898       if (GET_CODE (XEXP (src, 1)) == CONST_INT && INTVAL (XEXP (src, 1)) != 0)
899 	return find_base_value (XEXP (src, 0));
900       return 0;
901 
902     case TRUNCATE:
903       if (GET_MODE_SIZE (GET_MODE (src)) < GET_MODE_SIZE (Pmode))
904 	break;
905       /* Fall through.  */
906     case HIGH:
907     case PRE_INC:
908     case PRE_DEC:
909     case POST_INC:
910     case POST_DEC:
911     case PRE_MODIFY:
912     case POST_MODIFY:
913       return find_base_value (XEXP (src, 0));
914 
915     case ZERO_EXTEND:
916     case SIGN_EXTEND:	/* used for NT/Alpha pointers */
917       {
918 	rtx temp = find_base_value (XEXP (src, 0));
919 
920 #ifdef POINTERS_EXTEND_UNSIGNED
921 	if (temp != 0 && CONSTANT_P (temp) && GET_MODE (temp) != Pmode)
922 	  temp = convert_memory_address (Pmode, temp);
923 #endif
924 
925 	return temp;
926       }
927 
928     default:
929       break;
930     }
931 
932   return 0;
933 }
934 
935 /* Called from init_alias_analysis indirectly through note_stores.  */
936 
937 /* While scanning insns to find base values, reg_seen[N] is nonzero if
938    register N has been set in this function.  */
939 static char *reg_seen;
940 
941 /* Addresses which are known not to alias anything else are identified
942    by a unique integer.  */
943 static int unique_id;
944 
945 static void
record_set(dest,set,data)946 record_set (dest, set, data)
947      rtx dest, set;
948      void *data ATTRIBUTE_UNUSED;
949 {
950   unsigned regno;
951   rtx src;
952 
953   if (GET_CODE (dest) != REG)
954     return;
955 
956   regno = REGNO (dest);
957 
958   if (regno >= reg_base_value_size)
959     abort ();
960 
961   if (set)
962     {
963       /* A CLOBBER wipes out any old value but does not prevent a previously
964 	 unset register from acquiring a base address (i.e. reg_seen is not
965 	 set).  */
966       if (GET_CODE (set) == CLOBBER)
967 	{
968 	  new_reg_base_value[regno] = 0;
969 	  return;
970 	}
971       src = SET_SRC (set);
972     }
973   else
974     {
975       if (reg_seen[regno])
976 	{
977 	  new_reg_base_value[regno] = 0;
978 	  return;
979 	}
980       reg_seen[regno] = 1;
981       new_reg_base_value[regno] = gen_rtx_ADDRESS (Pmode,
982 						   GEN_INT (unique_id++));
983       return;
984     }
985 
986   /* This is not the first set.  If the new value is not related to the
987      old value, forget the base value. Note that the following code is
988      not detected:
989      extern int x, y;  int *p = &x; p += (&y-&x);
990      ANSI C does not allow computing the difference of addresses
991      of distinct top level objects.  */
992   if (new_reg_base_value[regno])
993     switch (GET_CODE (src))
994       {
995       case LO_SUM:
996       case MINUS:
997 	if (XEXP (src, 0) != dest && XEXP (src, 1) != dest)
998 	  new_reg_base_value[regno] = 0;
999 	break;
1000       case PLUS:
1001 	/* If the value we add in the PLUS is also a valid base value,
1002 	   this might be the actual base value, and the original value
1003 	   an index.  */
1004 	{
1005 	  rtx other = NULL_RTX;
1006 
1007 	  if (XEXP (src, 0) == dest)
1008 	    other = XEXP (src, 1);
1009 	  else if (XEXP (src, 1) == dest)
1010 	    other = XEXP (src, 0);
1011 
1012 	  if (! other || find_base_value (other))
1013 	    new_reg_base_value[regno] = 0;
1014 	  break;
1015 	}
1016       case AND:
1017 	if (XEXP (src, 0) != dest || GET_CODE (XEXP (src, 1)) != CONST_INT)
1018 	  new_reg_base_value[regno] = 0;
1019 	break;
1020       default:
1021 	new_reg_base_value[regno] = 0;
1022 	break;
1023       }
1024   /* If this is the first set of a register, record the value.  */
1025   else if ((regno >= FIRST_PSEUDO_REGISTER || ! fixed_regs[regno])
1026 	   && ! reg_seen[regno] && new_reg_base_value[regno] == 0)
1027     new_reg_base_value[regno] = find_base_value (src);
1028 
1029   reg_seen[regno] = 1;
1030 }
1031 
1032 /* Called from loop optimization when a new pseudo-register is
1033    created.  It indicates that REGNO is being set to VAL.  f INVARIANT
1034    is true then this value also describes an invariant relationship
1035    which can be used to deduce that two registers with unknown values
1036    are different.  */
1037 
1038 void
record_base_value(regno,val,invariant)1039 record_base_value (regno, val, invariant)
1040      unsigned int regno;
1041      rtx val;
1042      int invariant;
1043 {
1044   if (regno >= reg_base_value_size)
1045     return;
1046 
1047   if (invariant && alias_invariant && regno < alias_invariant_size)
1048     alias_invariant[regno] = val;
1049 
1050   if (GET_CODE (val) == REG)
1051     {
1052       if (REGNO (val) < reg_base_value_size)
1053 	reg_base_value[regno] = reg_base_value[REGNO (val)];
1054 
1055       return;
1056     }
1057 
1058   reg_base_value[regno] = find_base_value (val);
1059 }
1060 
1061 /* Clear alias info for a register.  This is used if an RTL transformation
1062    changes the value of a register.  This is used in flow by AUTO_INC_DEC
1063    optimizations.  We don't need to clear reg_base_value, since flow only
1064    changes the offset.  */
1065 
1066 void
clear_reg_alias_info(reg)1067 clear_reg_alias_info (reg)
1068      rtx reg;
1069 {
1070   unsigned int regno = REGNO (reg);
1071 
1072   if (regno >= FIRST_PSEUDO_REGISTER)
1073     {
1074       regno -= FIRST_PSEUDO_REGISTER;
1075       if (regno < reg_known_value_size)
1076 	{
1077 	  reg_known_value[regno] = reg;
1078 	  reg_known_equiv_p[regno] = false;
1079 	}
1080     }
1081 }
1082 
1083 /* If a value is known for REGNO, return it.  */
1084 
1085 rtx
get_reg_known_value(regno)1086 get_reg_known_value (regno)
1087      unsigned int regno;
1088 {
1089   if (regno >= FIRST_PSEUDO_REGISTER)
1090     {
1091       regno -= FIRST_PSEUDO_REGISTER;
1092       if (regno < reg_known_value_size)
1093 	return reg_known_value[regno];
1094     }
1095   return NULL;
1096 }
1097 
1098 /* Set it.  */
1099 
1100 static void
set_reg_known_value(regno,val)1101 set_reg_known_value (regno, val)
1102      unsigned int regno;
1103      rtx val;
1104 {
1105   if (regno >= FIRST_PSEUDO_REGISTER)
1106     {
1107       regno -= FIRST_PSEUDO_REGISTER;
1108       if (regno < reg_known_value_size)
1109 	reg_known_value[regno] = val;
1110     }
1111 }
1112 
1113 /* Similarly for reg_known_equiv_p.  */
1114 
1115 bool
get_reg_known_equiv_p(regno)1116 get_reg_known_equiv_p (regno)
1117      unsigned int regno;
1118 {
1119   if (regno >= FIRST_PSEUDO_REGISTER)
1120     {
1121       regno -= FIRST_PSEUDO_REGISTER;
1122       if (regno < reg_known_value_size)
1123 	return reg_known_equiv_p[regno];
1124     }
1125   return false;
1126 }
1127 
1128 static void
set_reg_known_equiv_p(regno,val)1129 set_reg_known_equiv_p (regno, val)
1130      unsigned int regno;
1131      int val;
1132 {
1133   if (regno >= FIRST_PSEUDO_REGISTER)
1134     {
1135       regno -= FIRST_PSEUDO_REGISTER;
1136       if (regno < reg_known_value_size)
1137 	reg_known_equiv_p[regno] = val;
1138     }
1139 }
1140 
1141 
1142 /* Returns a canonical version of X, from the point of view alias
1143    analysis.  (For example, if X is a MEM whose address is a register,
1144    and the register has a known value (say a SYMBOL_REF), then a MEM
1145    whose address is the SYMBOL_REF is returned.)  */
1146 
1147 rtx
canon_rtx(x)1148 canon_rtx (x)
1149      rtx x;
1150 {
1151   /* Recursively look for equivalences.  */
1152   if (GET_CODE (x) == REG && REGNO (x) >= FIRST_PSEUDO_REGISTER)
1153     {
1154       rtx t = get_reg_known_value (REGNO (x));
1155       if (t == x)
1156 	return x;
1157       if (t)
1158 	return canon_rtx (t);
1159     }
1160 
1161   if (GET_CODE (x) == PLUS)
1162     {
1163       rtx x0 = canon_rtx (XEXP (x, 0));
1164       rtx x1 = canon_rtx (XEXP (x, 1));
1165 
1166       if (x0 != XEXP (x, 0) || x1 != XEXP (x, 1))
1167 	{
1168 	  if (GET_CODE (x0) == CONST_INT)
1169 	    return plus_constant (x1, INTVAL (x0));
1170 	  else if (GET_CODE (x1) == CONST_INT)
1171 	    return plus_constant (x0, INTVAL (x1));
1172 	  return gen_rtx_PLUS (GET_MODE (x), x0, x1);
1173 	}
1174     }
1175 
1176   /* This gives us much better alias analysis when called from
1177      the loop optimizer.   Note we want to leave the original
1178      MEM alone, but need to return the canonicalized MEM with
1179      all the flags with their original values.  */
1180   else if (GET_CODE (x) == MEM)
1181     x = replace_equiv_address_nv (x, canon_rtx (XEXP (x, 0)));
1182 
1183   return x;
1184 }
1185 
1186 /* Return 1 if X and Y are identical-looking rtx's.
1187 
1188    We use the data in reg_known_value above to see if two registers with
1189    different numbers are, in fact, equivalent.  */
1190 
1191 static int
rtx_equal_for_memref_p(x,y)1192 rtx_equal_for_memref_p (x, y)
1193      rtx x, y;
1194 {
1195   int i;
1196   int j;
1197   enum rtx_code code;
1198   const char *fmt;
1199 
1200   if (x == 0 && y == 0)
1201     return 1;
1202   if (x == 0 || y == 0)
1203     return 0;
1204 
1205   x = canon_rtx (x);
1206   y = canon_rtx (y);
1207 
1208   if (x == y)
1209     return 1;
1210 
1211   code = GET_CODE (x);
1212   /* Rtx's of different codes cannot be equal.  */
1213   if (code != GET_CODE (y))
1214     return 0;
1215 
1216   /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent.
1217      (REG:SI x) and (REG:HI x) are NOT equivalent.  */
1218 
1219   if (GET_MODE (x) != GET_MODE (y))
1220     return 0;
1221 
1222   /* Some RTL can be compared without a recursive examination.  */
1223   switch (code)
1224     {
1225     case VALUE:
1226       return CSELIB_VAL_PTR (x) == CSELIB_VAL_PTR (y);
1227 
1228     case REG:
1229       return REGNO (x) == REGNO (y);
1230 
1231     case LABEL_REF:
1232       return XEXP (x, 0) == XEXP (y, 0);
1233 
1234     case SYMBOL_REF:
1235       return XSTR (x, 0) == XSTR (y, 0);
1236 
1237     case CONST_INT:
1238     case CONST_DOUBLE:
1239       /* There's no need to compare the contents of CONST_DOUBLEs or
1240 	 CONST_INTs because pointer equality is a good enough
1241 	 comparison for these nodes.  */
1242       return 0;
1243 
1244     case ADDRESSOF:
1245       return (XINT (x, 1) == XINT (y, 1)
1246 	      && rtx_equal_for_memref_p (XEXP (x, 0), XEXP (y, 0)));
1247 
1248     default:
1249       break;
1250     }
1251 
1252   /* For commutative operations, the RTX match if the operand match in any
1253      order.  Also handle the simple binary and unary cases without a loop.  */
1254   if (code == EQ || code == NE || GET_RTX_CLASS (code) == 'c')
1255     return ((rtx_equal_for_memref_p (XEXP (x, 0), XEXP (y, 0))
1256 	     && rtx_equal_for_memref_p (XEXP (x, 1), XEXP (y, 1)))
1257 	    || (rtx_equal_for_memref_p (XEXP (x, 0), XEXP (y, 1))
1258 		&& rtx_equal_for_memref_p (XEXP (x, 1), XEXP (y, 0))));
1259   else if (GET_RTX_CLASS (code) == '<' || GET_RTX_CLASS (code) == '2')
1260     return (rtx_equal_for_memref_p (XEXP (x, 0), XEXP (y, 0))
1261 	    && rtx_equal_for_memref_p (XEXP (x, 1), XEXP (y, 1)));
1262   else if (GET_RTX_CLASS (code) == '1')
1263     return rtx_equal_for_memref_p (XEXP (x, 0), XEXP (y, 0));
1264 
1265   /* Compare the elements.  If any pair of corresponding elements
1266      fail to match, return 0 for the whole things.
1267 
1268      Limit cases to types which actually appear in addresses.  */
1269 
1270   fmt = GET_RTX_FORMAT (code);
1271   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1272     {
1273       switch (fmt[i])
1274 	{
1275 	case 'i':
1276 	  if (XINT (x, i) != XINT (y, i))
1277 	    return 0;
1278 	  break;
1279 
1280 	case 'E':
1281 	  /* Two vectors must have the same length.  */
1282 	  if (XVECLEN (x, i) != XVECLEN (y, i))
1283 	    return 0;
1284 
1285 	  /* And the corresponding elements must match.  */
1286 	  for (j = 0; j < XVECLEN (x, i); j++)
1287 	    if (rtx_equal_for_memref_p (XVECEXP (x, i, j),
1288 					XVECEXP (y, i, j)) == 0)
1289 	      return 0;
1290 	  break;
1291 
1292 	case 'e':
1293 	  if (rtx_equal_for_memref_p (XEXP (x, i), XEXP (y, i)) == 0)
1294 	    return 0;
1295 	  break;
1296 
1297 	  /* This can happen for asm operands.  */
1298 	case 's':
1299 	  if (strcmp (XSTR (x, i), XSTR (y, i)))
1300 	    return 0;
1301 	  break;
1302 
1303 	/* This can happen for an asm which clobbers memory.  */
1304 	case '0':
1305 	  break;
1306 
1307 	  /* It is believed that rtx's at this level will never
1308 	     contain anything but integers and other rtx's,
1309 	     except for within LABEL_REFs and SYMBOL_REFs.  */
1310 	default:
1311 	  abort ();
1312 	}
1313     }
1314   return 1;
1315 }
1316 
1317 /* Given an rtx X, find a SYMBOL_REF or LABEL_REF within
1318    X and return it, or return 0 if none found.  */
1319 
1320 static rtx
find_symbolic_term(x)1321 find_symbolic_term (x)
1322      rtx x;
1323 {
1324   int i;
1325   enum rtx_code code;
1326   const char *fmt;
1327 
1328   code = GET_CODE (x);
1329   if (code == SYMBOL_REF || code == LABEL_REF)
1330     return x;
1331   if (GET_RTX_CLASS (code) == 'o')
1332     return 0;
1333 
1334   fmt = GET_RTX_FORMAT (code);
1335   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1336     {
1337       rtx t;
1338 
1339       if (fmt[i] == 'e')
1340 	{
1341 	  t = find_symbolic_term (XEXP (x, i));
1342 	  if (t != 0)
1343 	    return t;
1344 	}
1345       else if (fmt[i] == 'E')
1346 	break;
1347     }
1348   return 0;
1349 }
1350 
1351 static rtx
find_base_term(x)1352 find_base_term (x)
1353      rtx x;
1354 {
1355   cselib_val *val;
1356   struct elt_loc_list *l;
1357 
1358 #if defined (FIND_BASE_TERM)
1359   /* Try machine-dependent ways to find the base term.  */
1360   x = FIND_BASE_TERM (x);
1361 #endif
1362 
1363   switch (GET_CODE (x))
1364     {
1365     case REG:
1366       return REG_BASE_VALUE (x);
1367 
1368     case TRUNCATE:
1369       if (GET_MODE_SIZE (GET_MODE (x)) < GET_MODE_SIZE (Pmode))
1370 	return 0;
1371       /* Fall through.  */
1372     case HIGH:
1373     case PRE_INC:
1374     case PRE_DEC:
1375     case POST_INC:
1376     case POST_DEC:
1377     case PRE_MODIFY:
1378     case POST_MODIFY:
1379       return find_base_term (XEXP (x, 0));
1380 
1381     case ZERO_EXTEND:
1382     case SIGN_EXTEND:	/* Used for Alpha/NT pointers */
1383       {
1384 	rtx temp = find_base_term (XEXP (x, 0));
1385 
1386 #ifdef POINTERS_EXTEND_UNSIGNED
1387 	if (temp != 0 && CONSTANT_P (temp) && GET_MODE (temp) != Pmode)
1388 	  temp = convert_memory_address (Pmode, temp);
1389 #endif
1390 
1391 	return temp;
1392       }
1393 
1394     case VALUE:
1395       val = CSELIB_VAL_PTR (x);
1396       for (l = val->locs; l; l = l->next)
1397 	if ((x = find_base_term (l->loc)) != 0)
1398 	  return x;
1399       return 0;
1400 
1401     case CONST:
1402       x = XEXP (x, 0);
1403       if (GET_CODE (x) != PLUS && GET_CODE (x) != MINUS)
1404 	return 0;
1405       /* fall through */
1406     case LO_SUM:
1407     case PLUS:
1408     case MINUS:
1409       {
1410 	rtx tmp1 = XEXP (x, 0);
1411 	rtx tmp2 = XEXP (x, 1);
1412 
1413 	/* This is a little bit tricky since we have to determine which of
1414 	   the two operands represents the real base address.  Otherwise this
1415 	   routine may return the index register instead of the base register.
1416 
1417 	   That may cause us to believe no aliasing was possible, when in
1418 	   fact aliasing is possible.
1419 
1420 	   We use a few simple tests to guess the base register.  Additional
1421 	   tests can certainly be added.  For example, if one of the operands
1422 	   is a shift or multiply, then it must be the index register and the
1423 	   other operand is the base register.  */
1424 
1425 	if (tmp1 == pic_offset_table_rtx && CONSTANT_P (tmp2))
1426 	  return find_base_term (tmp2);
1427 
1428 	/* If either operand is known to be a pointer, then use it
1429 	   to determine the base term.  */
1430 	if (REG_P (tmp1) && REG_POINTER (tmp1))
1431 	  return find_base_term (tmp1);
1432 
1433 	if (REG_P (tmp2) && REG_POINTER (tmp2))
1434 	  return find_base_term (tmp2);
1435 
1436 	/* Neither operand was known to be a pointer.  Go ahead and find the
1437 	   base term for both operands.  */
1438 	tmp1 = find_base_term (tmp1);
1439 	tmp2 = find_base_term (tmp2);
1440 
1441 	/* If either base term is named object or a special address
1442 	   (like an argument or stack reference), then use it for the
1443 	   base term.  */
1444 	if (tmp1 != 0
1445 	    && (GET_CODE (tmp1) == SYMBOL_REF
1446 		|| GET_CODE (tmp1) == LABEL_REF
1447 		|| (GET_CODE (tmp1) == ADDRESS
1448 		    && GET_MODE (tmp1) != VOIDmode)))
1449 	  return tmp1;
1450 
1451 	if (tmp2 != 0
1452 	    && (GET_CODE (tmp2) == SYMBOL_REF
1453 		|| GET_CODE (tmp2) == LABEL_REF
1454 		|| (GET_CODE (tmp2) == ADDRESS
1455 		    && GET_MODE (tmp2) != VOIDmode)))
1456 	  return tmp2;
1457 
1458 	/* We could not determine which of the two operands was the
1459 	   base register and which was the index.  So we can determine
1460 	   nothing from the base alias check.  */
1461 	return 0;
1462       }
1463 
1464     case AND:
1465       if (GET_CODE (XEXP (x, 1)) == CONST_INT && INTVAL (XEXP (x, 1)) != 0)
1466 	return find_base_term (XEXP (x, 0));
1467       return 0;
1468 
1469     case SYMBOL_REF:
1470     case LABEL_REF:
1471       return x;
1472 
1473     case ADDRESSOF:
1474       return REG_BASE_VALUE (frame_pointer_rtx);
1475 
1476     default:
1477       return 0;
1478     }
1479 }
1480 
1481 /* Return 0 if the addresses X and Y are known to point to different
1482    objects, 1 if they might be pointers to the same object.  */
1483 
1484 static int
base_alias_check(x,y,x_mode,y_mode)1485 base_alias_check (x, y, x_mode, y_mode)
1486      rtx x, y;
1487      enum machine_mode x_mode, y_mode;
1488 {
1489   rtx x_base = find_base_term (x);
1490   rtx y_base = find_base_term (y);
1491 
1492   /* If the address itself has no known base see if a known equivalent
1493      value has one.  If either address still has no known base, nothing
1494      is known about aliasing.  */
1495   if (x_base == 0)
1496     {
1497       rtx x_c;
1498 
1499       if (! flag_expensive_optimizations || (x_c = canon_rtx (x)) == x)
1500 	return 1;
1501 
1502       x_base = find_base_term (x_c);
1503       if (x_base == 0)
1504 	return 1;
1505     }
1506 
1507   if (y_base == 0)
1508     {
1509       rtx y_c;
1510       if (! flag_expensive_optimizations || (y_c = canon_rtx (y)) == y)
1511 	return 1;
1512 
1513       y_base = find_base_term (y_c);
1514       if (y_base == 0)
1515 	return 1;
1516     }
1517 
1518   /* If the base addresses are equal nothing is known about aliasing.  */
1519   if (rtx_equal_p (x_base, y_base))
1520     return 1;
1521 
1522   /* The base addresses of the read and write are different expressions.
1523      If they are both symbols and they are not accessed via AND, there is
1524      no conflict.  We can bring knowledge of object alignment into play
1525      here.  For example, on alpha, "char a, b;" can alias one another,
1526      though "char a; long b;" cannot.  */
1527   if (GET_CODE (x_base) != ADDRESS && GET_CODE (y_base) != ADDRESS)
1528     {
1529       if (GET_CODE (x) == AND && GET_CODE (y) == AND)
1530 	return 1;
1531       if (GET_CODE (x) == AND
1532 	  && (GET_CODE (XEXP (x, 1)) != CONST_INT
1533 	      || (int) GET_MODE_UNIT_SIZE (y_mode) < -INTVAL (XEXP (x, 1))))
1534 	return 1;
1535       if (GET_CODE (y) == AND
1536 	  && (GET_CODE (XEXP (y, 1)) != CONST_INT
1537 	      || (int) GET_MODE_UNIT_SIZE (x_mode) < -INTVAL (XEXP (y, 1))))
1538 	return 1;
1539       /* Differing symbols never alias.  */
1540       return 0;
1541     }
1542 
1543   /* If one address is a stack reference there can be no alias:
1544      stack references using different base registers do not alias,
1545      a stack reference can not alias a parameter, and a stack reference
1546      can not alias a global.  */
1547   if ((GET_CODE (x_base) == ADDRESS && GET_MODE (x_base) == Pmode)
1548       || (GET_CODE (y_base) == ADDRESS && GET_MODE (y_base) == Pmode))
1549     return 0;
1550 
1551   if (! flag_argument_noalias)
1552     return 1;
1553 
1554   if (flag_argument_noalias > 1)
1555     return 0;
1556 
1557   /* Weak noalias assertion (arguments are distinct, but may match globals).  */
1558   return ! (GET_MODE (x_base) == VOIDmode && GET_MODE (y_base) == VOIDmode);
1559 }
1560 
1561 /* Convert the address X into something we can use.  This is done by returning
1562    it unchanged unless it is a value; in the latter case we call cselib to get
1563    a more useful rtx.  */
1564 
1565 rtx
get_addr(x)1566 get_addr (x)
1567      rtx x;
1568 {
1569   cselib_val *v;
1570   struct elt_loc_list *l;
1571 
1572   if (GET_CODE (x) != VALUE)
1573     return x;
1574   v = CSELIB_VAL_PTR (x);
1575   for (l = v->locs; l; l = l->next)
1576     if (CONSTANT_P (l->loc))
1577       return l->loc;
1578   for (l = v->locs; l; l = l->next)
1579     if (GET_CODE (l->loc) != REG && GET_CODE (l->loc) != MEM)
1580       return l->loc;
1581   if (v->locs)
1582     return v->locs->loc;
1583   return x;
1584 }
1585 
1586 /*  Return the address of the (N_REFS + 1)th memory reference to ADDR
1587     where SIZE is the size in bytes of the memory reference.  If ADDR
1588     is not modified by the memory reference then ADDR is returned.  */
1589 
1590 rtx
addr_side_effect_eval(addr,size,n_refs)1591 addr_side_effect_eval (addr, size, n_refs)
1592      rtx addr;
1593      int size;
1594      int n_refs;
1595 {
1596   int offset = 0;
1597 
1598   switch (GET_CODE (addr))
1599     {
1600     case PRE_INC:
1601       offset = (n_refs + 1) * size;
1602       break;
1603     case PRE_DEC:
1604       offset = -(n_refs + 1) * size;
1605       break;
1606     case POST_INC:
1607       offset = n_refs * size;
1608       break;
1609     case POST_DEC:
1610       offset = -n_refs * size;
1611       break;
1612 
1613     default:
1614       return addr;
1615     }
1616 
1617   if (offset)
1618     addr = gen_rtx_PLUS (GET_MODE (addr), XEXP (addr, 0), GEN_INT (offset));
1619   else
1620     addr = XEXP (addr, 0);
1621 
1622   return addr;
1623 }
1624 
1625 /* Return nonzero if X and Y (memory addresses) could reference the
1626    same location in memory.  C is an offset accumulator.  When
1627    C is nonzero, we are testing aliases between X and Y + C.
1628    XSIZE is the size in bytes of the X reference,
1629    similarly YSIZE is the size in bytes for Y.
1630 
1631    If XSIZE or YSIZE is zero, we do not know the amount of memory being
1632    referenced (the reference was BLKmode), so make the most pessimistic
1633    assumptions.
1634 
1635    If XSIZE or YSIZE is negative, we may access memory outside the object
1636    being referenced as a side effect.  This can happen when using AND to
1637    align memory references, as is done on the Alpha.
1638 
1639    Nice to notice that varying addresses cannot conflict with fp if no
1640    local variables had their addresses taken, but that's too hard now.  */
1641 
1642 static int
memrefs_conflict_p(xsize,x,ysize,y,c)1643 memrefs_conflict_p (xsize, x, ysize, y, c)
1644      rtx x, y;
1645      int xsize, ysize;
1646      HOST_WIDE_INT c;
1647 {
1648   if (GET_CODE (x) == VALUE)
1649     x = get_addr (x);
1650   if (GET_CODE (y) == VALUE)
1651     y = get_addr (y);
1652   if (GET_CODE (x) == HIGH)
1653     x = XEXP (x, 0);
1654   else if (GET_CODE (x) == LO_SUM)
1655     x = XEXP (x, 1);
1656   else
1657     x = canon_rtx (addr_side_effect_eval (x, xsize, 0));
1658   if (GET_CODE (y) == HIGH)
1659     y = XEXP (y, 0);
1660   else if (GET_CODE (y) == LO_SUM)
1661     y = XEXP (y, 1);
1662   else
1663     y = canon_rtx (addr_side_effect_eval (y, ysize, 0));
1664 
1665   if (rtx_equal_for_memref_p (x, y))
1666     {
1667       if (xsize <= 0 || ysize <= 0)
1668 	return 1;
1669       if (c >= 0 && xsize > c)
1670 	return 1;
1671       if (c < 0 && ysize+c > 0)
1672 	return 1;
1673       return 0;
1674     }
1675 
1676   /* This code used to check for conflicts involving stack references and
1677      globals but the base address alias code now handles these cases.  */
1678 
1679   if (GET_CODE (x) == PLUS)
1680     {
1681       /* The fact that X is canonicalized means that this
1682 	 PLUS rtx is canonicalized.  */
1683       rtx x0 = XEXP (x, 0);
1684       rtx x1 = XEXP (x, 1);
1685 
1686       if (GET_CODE (y) == PLUS)
1687 	{
1688 	  /* The fact that Y is canonicalized means that this
1689 	     PLUS rtx is canonicalized.  */
1690 	  rtx y0 = XEXP (y, 0);
1691 	  rtx y1 = XEXP (y, 1);
1692 
1693 	  if (rtx_equal_for_memref_p (x1, y1))
1694 	    return memrefs_conflict_p (xsize, x0, ysize, y0, c);
1695 	  if (rtx_equal_for_memref_p (x0, y0))
1696 	    return memrefs_conflict_p (xsize, x1, ysize, y1, c);
1697 	  if (GET_CODE (x1) == CONST_INT)
1698 	    {
1699 	      if (GET_CODE (y1) == CONST_INT)
1700 		return memrefs_conflict_p (xsize, x0, ysize, y0,
1701 					   c - INTVAL (x1) + INTVAL (y1));
1702 	      else
1703 		return memrefs_conflict_p (xsize, x0, ysize, y,
1704 					   c - INTVAL (x1));
1705 	    }
1706 	  else if (GET_CODE (y1) == CONST_INT)
1707 	    return memrefs_conflict_p (xsize, x, ysize, y0, c + INTVAL (y1));
1708 
1709 	  return 1;
1710 	}
1711       else if (GET_CODE (x1) == CONST_INT)
1712 	return memrefs_conflict_p (xsize, x0, ysize, y, c - INTVAL (x1));
1713     }
1714   else if (GET_CODE (y) == PLUS)
1715     {
1716       /* The fact that Y is canonicalized means that this
1717 	 PLUS rtx is canonicalized.  */
1718       rtx y0 = XEXP (y, 0);
1719       rtx y1 = XEXP (y, 1);
1720 
1721       if (GET_CODE (y1) == CONST_INT)
1722 	return memrefs_conflict_p (xsize, x, ysize, y0, c + INTVAL (y1));
1723       else
1724 	return 1;
1725     }
1726 
1727   if (GET_CODE (x) == GET_CODE (y))
1728     switch (GET_CODE (x))
1729       {
1730       case MULT:
1731 	{
1732 	  /* Handle cases where we expect the second operands to be the
1733 	     same, and check only whether the first operand would conflict
1734 	     or not.  */
1735 	  rtx x0, y0;
1736 	  rtx x1 = canon_rtx (XEXP (x, 1));
1737 	  rtx y1 = canon_rtx (XEXP (y, 1));
1738 	  if (! rtx_equal_for_memref_p (x1, y1))
1739 	    return 1;
1740 	  x0 = canon_rtx (XEXP (x, 0));
1741 	  y0 = canon_rtx (XEXP (y, 0));
1742 	  if (rtx_equal_for_memref_p (x0, y0))
1743 	    return (xsize == 0 || ysize == 0
1744 		    || (c >= 0 && xsize > c) || (c < 0 && ysize+c > 0));
1745 
1746 	  /* Can't properly adjust our sizes.  */
1747 	  if (GET_CODE (x1) != CONST_INT)
1748 	    return 1;
1749 	  xsize /= INTVAL (x1);
1750 	  ysize /= INTVAL (x1);
1751 	  c /= INTVAL (x1);
1752 	  return memrefs_conflict_p (xsize, x0, ysize, y0, c);
1753 	}
1754 
1755       case REG:
1756 	/* Are these registers known not to be equal?  */
1757 	if (alias_invariant)
1758 	  {
1759 	    unsigned int r_x = REGNO (x), r_y = REGNO (y);
1760 	    rtx i_x, i_y;	/* invariant relationships of X and Y */
1761 
1762 	    i_x = r_x >= reg_base_value_size ? 0 : alias_invariant[r_x];
1763 	    i_y = r_y >= reg_base_value_size ? 0 : alias_invariant[r_y];
1764 
1765 	    if (i_x == 0 && i_y == 0)
1766 	      break;
1767 
1768 	    if (! memrefs_conflict_p (xsize, i_x ? i_x : x,
1769 				      ysize, i_y ? i_y : y, c))
1770 	      return 0;
1771 	  }
1772 	break;
1773 
1774       default:
1775 	break;
1776       }
1777 
1778   /* Treat an access through an AND (e.g. a subword access on an Alpha)
1779      as an access with indeterminate size.  Assume that references
1780      besides AND are aligned, so if the size of the other reference is
1781      at least as large as the alignment, assume no other overlap.  */
1782   if (GET_CODE (x) == AND && GET_CODE (XEXP (x, 1)) == CONST_INT)
1783     {
1784       if (GET_CODE (y) == AND || ysize < -INTVAL (XEXP (x, 1)))
1785 	xsize = -1;
1786       return memrefs_conflict_p (xsize, XEXP (x, 0), ysize, y, c);
1787     }
1788   if (GET_CODE (y) == AND && GET_CODE (XEXP (y, 1)) == CONST_INT)
1789     {
1790       /* ??? If we are indexing far enough into the array/structure, we
1791 	 may yet be able to determine that we can not overlap.  But we
1792 	 also need to that we are far enough from the end not to overlap
1793 	 a following reference, so we do nothing with that for now.  */
1794       if (GET_CODE (x) == AND || xsize < -INTVAL (XEXP (y, 1)))
1795 	ysize = -1;
1796       return memrefs_conflict_p (xsize, x, ysize, XEXP (y, 0), c);
1797     }
1798 
1799   if (GET_CODE (x) == ADDRESSOF)
1800     {
1801       if (y == frame_pointer_rtx
1802 	  || GET_CODE (y) == ADDRESSOF)
1803 	return xsize <= 0 || ysize <= 0;
1804     }
1805   if (GET_CODE (y) == ADDRESSOF)
1806     {
1807       if (x == frame_pointer_rtx)
1808 	return xsize <= 0 || ysize <= 0;
1809     }
1810 
1811   if (CONSTANT_P (x))
1812     {
1813       if (GET_CODE (x) == CONST_INT && GET_CODE (y) == CONST_INT)
1814 	{
1815 	  c += (INTVAL (y) - INTVAL (x));
1816 	  return (xsize <= 0 || ysize <= 0
1817 		  || (c >= 0 && xsize > c) || (c < 0 && ysize+c > 0));
1818 	}
1819 
1820       if (GET_CODE (x) == CONST)
1821 	{
1822 	  if (GET_CODE (y) == CONST)
1823 	    return memrefs_conflict_p (xsize, canon_rtx (XEXP (x, 0)),
1824 				       ysize, canon_rtx (XEXP (y, 0)), c);
1825 	  else
1826 	    return memrefs_conflict_p (xsize, canon_rtx (XEXP (x, 0)),
1827 				       ysize, y, c);
1828 	}
1829       if (GET_CODE (y) == CONST)
1830 	return memrefs_conflict_p (xsize, x, ysize,
1831 				   canon_rtx (XEXP (y, 0)), c);
1832 
1833       if (CONSTANT_P (y))
1834 	return (xsize <= 0 || ysize <= 0
1835 		|| (rtx_equal_for_memref_p (x, y)
1836 		    && ((c >= 0 && xsize > c) || (c < 0 && ysize+c > 0))));
1837 
1838       return 1;
1839     }
1840   return 1;
1841 }
1842 
1843 /* Functions to compute memory dependencies.
1844 
1845    Since we process the insns in execution order, we can build tables
1846    to keep track of what registers are fixed (and not aliased), what registers
1847    are varying in known ways, and what registers are varying in unknown
1848    ways.
1849 
1850    If both memory references are volatile, then there must always be a
1851    dependence between the two references, since their order can not be
1852    changed.  A volatile and non-volatile reference can be interchanged
1853    though.
1854 
1855    A MEM_IN_STRUCT reference at a non-AND varying address can never
1856    conflict with a non-MEM_IN_STRUCT reference at a fixed address.  We
1857    also must allow AND addresses, because they may generate accesses
1858    outside the object being referenced.  This is used to generate
1859    aligned addresses from unaligned addresses, for instance, the alpha
1860    storeqi_unaligned pattern.  */
1861 
1862 /* Read dependence: X is read after read in MEM takes place.  There can
1863    only be a dependence here if both reads are volatile.  */
1864 
1865 int
read_dependence(mem,x)1866 read_dependence (mem, x)
1867      rtx mem;
1868      rtx x;
1869 {
1870   return MEM_VOLATILE_P (x) && MEM_VOLATILE_P (mem);
1871 }
1872 
1873 /* Returns MEM1 if and only if MEM1 is a scalar at a fixed address and
1874    MEM2 is a reference to a structure at a varying address, or returns
1875    MEM2 if vice versa.  Otherwise, returns NULL_RTX.  If a non-NULL
1876    value is returned MEM1 and MEM2 can never alias.  VARIES_P is used
1877    to decide whether or not an address may vary; it should return
1878    nonzero whenever variation is possible.
1879    MEM1_ADDR and MEM2_ADDR are the addresses of MEM1 and MEM2.  */
1880 
1881 static rtx
fixed_scalar_and_varying_struct_p(mem1,mem2,mem1_addr,mem2_addr,varies_p)1882 fixed_scalar_and_varying_struct_p (mem1, mem2, mem1_addr, mem2_addr, varies_p)
1883      rtx mem1, mem2;
1884      rtx mem1_addr, mem2_addr;
1885      int (*varies_p) PARAMS ((rtx, int));
1886 {
1887   if (! flag_strict_aliasing)
1888     return NULL_RTX;
1889 
1890   if (MEM_SCALAR_P (mem1) && MEM_IN_STRUCT_P (mem2)
1891       && !varies_p (mem1_addr, 1) && varies_p (mem2_addr, 1))
1892     /* MEM1 is a scalar at a fixed address; MEM2 is a struct at a
1893        varying address.  */
1894     return mem1;
1895 
1896   if (MEM_IN_STRUCT_P (mem1) && MEM_SCALAR_P (mem2)
1897       && varies_p (mem1_addr, 1) && !varies_p (mem2_addr, 1))
1898     /* MEM2 is a scalar at a fixed address; MEM1 is a struct at a
1899        varying address.  */
1900     return mem2;
1901 
1902   return NULL_RTX;
1903 }
1904 
1905 /* Returns nonzero if something about the mode or address format MEM1
1906    indicates that it might well alias *anything*.  */
1907 
1908 static int
aliases_everything_p(mem)1909 aliases_everything_p (mem)
1910      rtx mem;
1911 {
1912   if (GET_CODE (XEXP (mem, 0)) == AND)
1913     /* If the address is an AND, its very hard to know at what it is
1914        actually pointing.  */
1915     return 1;
1916 
1917   return 0;
1918 }
1919 
1920 /* Return true if we can determine that the fields referenced cannot
1921    overlap for any pair of objects.  */
1922 
1923 static bool
nonoverlapping_component_refs_p(x,y)1924 nonoverlapping_component_refs_p (x, y)
1925      tree x, y;
1926 {
1927   tree fieldx, fieldy, typex, typey, orig_y;
1928 
1929   do
1930     {
1931       /* The comparison has to be done at a common type, since we don't
1932 	 know how the inheritance hierarchy works.  */
1933       orig_y = y;
1934       do
1935 	{
1936 	  fieldx = TREE_OPERAND (x, 1);
1937 	  typex = DECL_FIELD_CONTEXT (fieldx);
1938 
1939 	  y = orig_y;
1940 	  do
1941 	    {
1942 	      fieldy = TREE_OPERAND (y, 1);
1943 	      typey = DECL_FIELD_CONTEXT (fieldy);
1944 
1945 	      if (typex == typey)
1946 		goto found;
1947 
1948 	      y = TREE_OPERAND (y, 0);
1949 	    }
1950 	  while (y && TREE_CODE (y) == COMPONENT_REF);
1951 
1952 	  x = TREE_OPERAND (x, 0);
1953 	}
1954       while (x && TREE_CODE (x) == COMPONENT_REF);
1955 
1956       /* Never found a common type.  */
1957       return false;
1958 
1959     found:
1960       /* If we're left with accessing different fields of a structure,
1961 	 then no overlap.  */
1962       if (TREE_CODE (typex) == RECORD_TYPE
1963 	  && fieldx != fieldy)
1964 	return true;
1965 
1966       /* The comparison on the current field failed.  If we're accessing
1967 	 a very nested structure, look at the next outer level.  */
1968       x = TREE_OPERAND (x, 0);
1969       y = TREE_OPERAND (y, 0);
1970     }
1971   while (x && y
1972 	 && TREE_CODE (x) == COMPONENT_REF
1973 	 && TREE_CODE (y) == COMPONENT_REF);
1974 
1975   return false;
1976 }
1977 
1978 /* Look at the bottom of the COMPONENT_REF list for a DECL, and return it.  */
1979 
1980 static tree
decl_for_component_ref(x)1981 decl_for_component_ref (x)
1982      tree x;
1983 {
1984   do
1985     {
1986       x = TREE_OPERAND (x, 0);
1987     }
1988   while (x && TREE_CODE (x) == COMPONENT_REF);
1989 
1990   return x && DECL_P (x) ? x : NULL_TREE;
1991 }
1992 
1993 /* Walk up the COMPONENT_REF list and adjust OFFSET to compensate for the
1994    offset of the field reference.  */
1995 
1996 static rtx
adjust_offset_for_component_ref(x,offset)1997 adjust_offset_for_component_ref (x, offset)
1998      tree x;
1999      rtx offset;
2000 {
2001   HOST_WIDE_INT ioffset;
2002 
2003   if (! offset)
2004     return NULL_RTX;
2005 
2006   ioffset = INTVAL (offset);
2007   do
2008     {
2009       tree field = TREE_OPERAND (x, 1);
2010 
2011       if (! host_integerp (DECL_FIELD_OFFSET (field), 1))
2012 	return NULL_RTX;
2013       ioffset += (tree_low_cst (DECL_FIELD_OFFSET (field), 1)
2014 		  + (tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
2015 		     / BITS_PER_UNIT));
2016 
2017       x = TREE_OPERAND (x, 0);
2018     }
2019   while (x && TREE_CODE (x) == COMPONENT_REF);
2020 
2021   return GEN_INT (ioffset);
2022 }
2023 
2024 /* Return nonzero if we can deterimine the exprs corresponding to memrefs
2025    X and Y and they do not overlap.  */
2026 
2027 static int
nonoverlapping_memrefs_p(x,y)2028 nonoverlapping_memrefs_p (x, y)
2029      rtx x, y;
2030 {
2031   tree exprx = MEM_EXPR (x), expry = MEM_EXPR (y);
2032   rtx rtlx, rtly;
2033   rtx basex, basey;
2034   rtx moffsetx, moffsety;
2035   HOST_WIDE_INT offsetx = 0, offsety = 0, sizex, sizey, tem;
2036 
2037   /* Unless both have exprs, we can't tell anything.  */
2038   if (exprx == 0 || expry == 0)
2039     return 0;
2040 
2041   /* If both are field references, we may be able to determine something.  */
2042   if (TREE_CODE (exprx) == COMPONENT_REF
2043       && TREE_CODE (expry) == COMPONENT_REF
2044       && nonoverlapping_component_refs_p (exprx, expry))
2045     return 1;
2046 
2047   /* If the field reference test failed, look at the DECLs involved.  */
2048   moffsetx = MEM_OFFSET (x);
2049   if (TREE_CODE (exprx) == COMPONENT_REF)
2050     {
2051       tree t = decl_for_component_ref (exprx);
2052       if (! t)
2053 	return 0;
2054       moffsetx = adjust_offset_for_component_ref (exprx, moffsetx);
2055       exprx = t;
2056     }
2057   else if (TREE_CODE (exprx) == INDIRECT_REF)
2058     {
2059       exprx = TREE_OPERAND (exprx, 0);
2060       if (flag_argument_noalias < 2
2061 	  || TREE_CODE (exprx) != PARM_DECL)
2062 	return 0;
2063     }
2064 
2065   moffsety = MEM_OFFSET (y);
2066   if (TREE_CODE (expry) == COMPONENT_REF)
2067     {
2068       tree t = decl_for_component_ref (expry);
2069       if (! t)
2070 	return 0;
2071       moffsety = adjust_offset_for_component_ref (expry, moffsety);
2072       expry = t;
2073     }
2074   else if (TREE_CODE (expry) == INDIRECT_REF)
2075     {
2076       expry = TREE_OPERAND (expry, 0);
2077       if (flag_argument_noalias < 2
2078 	  || TREE_CODE (expry) != PARM_DECL)
2079 	return 0;
2080     }
2081 
2082   if (! DECL_P (exprx) || ! DECL_P (expry))
2083     return 0;
2084 
2085   rtlx = DECL_RTL (exprx);
2086   rtly = DECL_RTL (expry);
2087 
2088   /* If either RTL is not a MEM, it must be a REG or CONCAT, meaning they
2089      can't overlap unless they are the same because we never reuse that part
2090      of the stack frame used for locals for spilled pseudos.  */
2091   if ((GET_CODE (rtlx) != MEM || GET_CODE (rtly) != MEM)
2092       && ! rtx_equal_p (rtlx, rtly))
2093     return 1;
2094 
2095   /* Get the base and offsets of both decls.  If either is a register, we
2096      know both are and are the same, so use that as the base.  The only
2097      we can avoid overlap is if we can deduce that they are nonoverlapping
2098      pieces of that decl, which is very rare.  */
2099   basex = GET_CODE (rtlx) == MEM ? XEXP (rtlx, 0) : rtlx;
2100   if (GET_CODE (basex) == PLUS && GET_CODE (XEXP (basex, 1)) == CONST_INT)
2101     offsetx = INTVAL (XEXP (basex, 1)), basex = XEXP (basex, 0);
2102 
2103   basey = GET_CODE (rtly) == MEM ? XEXP (rtly, 0) : rtly;
2104   if (GET_CODE (basey) == PLUS && GET_CODE (XEXP (basey, 1)) == CONST_INT)
2105     offsety = INTVAL (XEXP (basey, 1)), basey = XEXP (basey, 0);
2106 
2107   /* If the bases are different, we know they do not overlap if both
2108      are constants or if one is a constant and the other a pointer into the
2109      stack frame.  Otherwise a different base means we can't tell if they
2110      overlap or not.  */
2111   if (! rtx_equal_p (basex, basey))
2112     return ((CONSTANT_P (basex) && CONSTANT_P (basey))
2113 	    || (CONSTANT_P (basex) && REG_P (basey)
2114 		&& REGNO_PTR_FRAME_P (REGNO (basey)))
2115 	    || (CONSTANT_P (basey) && REG_P (basex)
2116 		&& REGNO_PTR_FRAME_P (REGNO (basex))));
2117 
2118   sizex = (GET_CODE (rtlx) != MEM ? (int) GET_MODE_SIZE (GET_MODE (rtlx))
2119 	   : MEM_SIZE (rtlx) ? INTVAL (MEM_SIZE (rtlx))
2120 	   : -1);
2121   sizey = (GET_CODE (rtly) != MEM ? (int) GET_MODE_SIZE (GET_MODE (rtly))
2122 	   : MEM_SIZE (rtly) ? INTVAL (MEM_SIZE (rtly)) :
2123 	   -1);
2124 
2125   /* If we have an offset for either memref, it can update the values computed
2126      above.  */
2127   if (moffsetx)
2128     offsetx += INTVAL (moffsetx), sizex -= INTVAL (moffsetx);
2129   if (moffsety)
2130     offsety += INTVAL (moffsety), sizey -= INTVAL (moffsety);
2131 
2132   /* If a memref has both a size and an offset, we can use the smaller size.
2133      We can't do this if the offset isn't known because we must view this
2134      memref as being anywhere inside the DECL's MEM.  */
2135   if (MEM_SIZE (x) && moffsetx)
2136     sizex = INTVAL (MEM_SIZE (x));
2137   if (MEM_SIZE (y) && moffsety)
2138     sizey = INTVAL (MEM_SIZE (y));
2139 
2140   /* Put the values of the memref with the lower offset in X's values.  */
2141   if (offsetx > offsety)
2142     {
2143       tem = offsetx, offsetx = offsety, offsety = tem;
2144       tem = sizex, sizex = sizey, sizey = tem;
2145     }
2146 
2147   /* If we don't know the size of the lower-offset value, we can't tell
2148      if they conflict.  Otherwise, we do the test.  */
2149   return sizex >= 0 && offsety >= offsetx + sizex;
2150 }
2151 
2152 /* True dependence: X is read after store in MEM takes place.  */
2153 
2154 int
true_dependence(mem,mem_mode,x,varies)2155 true_dependence (mem, mem_mode, x, varies)
2156      rtx mem;
2157      enum machine_mode mem_mode;
2158      rtx x;
2159      int (*varies) PARAMS ((rtx, int));
2160 {
2161   rtx x_addr, mem_addr;
2162   rtx base;
2163 
2164   if (MEM_VOLATILE_P (x) && MEM_VOLATILE_P (mem))
2165     return 1;
2166 
2167   /* (mem:BLK (scratch)) is a special mechanism to conflict with everything.
2168      This is used in epilogue deallocation functions.  */
2169   if (GET_MODE (x) == BLKmode && GET_CODE (XEXP (x, 0)) == SCRATCH)
2170     return 1;
2171   if (GET_MODE (mem) == BLKmode && GET_CODE (XEXP (mem, 0)) == SCRATCH)
2172     return 1;
2173 
2174   if (DIFFERENT_ALIAS_SETS_P (x, mem))
2175     return 0;
2176 
2177   /* Unchanging memory can't conflict with non-unchanging memory.
2178      A non-unchanging read can conflict with a non-unchanging write.
2179      An unchanging read can conflict with an unchanging write since
2180      there may be a single store to this address to initialize it.
2181      Note that an unchanging store can conflict with a non-unchanging read
2182      since we have to make conservative assumptions when we have a
2183      record with readonly fields and we are copying the whole thing.
2184      Just fall through to the code below to resolve potential conflicts.
2185      This won't handle all cases optimally, but the possible performance
2186      loss should be negligible.  */
2187   if (RTX_UNCHANGING_P (x) && ! RTX_UNCHANGING_P (mem))
2188     return 0;
2189 
2190   if (nonoverlapping_memrefs_p (mem, x))
2191     return 0;
2192 
2193   if (mem_mode == VOIDmode)
2194     mem_mode = GET_MODE (mem);
2195 
2196   x_addr = get_addr (XEXP (x, 0));
2197   mem_addr = get_addr (XEXP (mem, 0));
2198 
2199   base = find_base_term (x_addr);
2200   if (base && (GET_CODE (base) == LABEL_REF
2201 	       || (GET_CODE (base) == SYMBOL_REF
2202 		   && CONSTANT_POOL_ADDRESS_P (base))))
2203     return 0;
2204 
2205   if (! base_alias_check (x_addr, mem_addr, GET_MODE (x), mem_mode))
2206     return 0;
2207 
2208   x_addr = canon_rtx (x_addr);
2209   mem_addr = canon_rtx (mem_addr);
2210 
2211   if (! memrefs_conflict_p (GET_MODE_SIZE (mem_mode), mem_addr,
2212 			    SIZE_FOR_MODE (x), x_addr, 0))
2213     return 0;
2214 
2215   if (aliases_everything_p (x))
2216     return 1;
2217 
2218   /* We cannot use aliases_everything_p to test MEM, since we must look
2219      at MEM_MODE, rather than GET_MODE (MEM).  */
2220   if (mem_mode == QImode || GET_CODE (mem_addr) == AND)
2221     return 1;
2222 
2223   /* In true_dependence we also allow BLKmode to alias anything.  Why
2224      don't we do this in anti_dependence and output_dependence?  */
2225   if (mem_mode == BLKmode || GET_MODE (x) == BLKmode)
2226     return 1;
2227 
2228   return ! fixed_scalar_and_varying_struct_p (mem, x, mem_addr, x_addr,
2229 					      varies);
2230 }
2231 
2232 /* Canonical true dependence: X is read after store in MEM takes place.
2233    Variant of true_dependence which assumes MEM has already been
2234    canonicalized (hence we no longer do that here).
2235    The mem_addr argument has been added, since true_dependence computed
2236    this value prior to canonicalizing.  */
2237 
2238 int
canon_true_dependence(mem,mem_mode,mem_addr,x,varies)2239 canon_true_dependence (mem, mem_mode, mem_addr, x, varies)
2240      rtx mem, mem_addr, x;
2241      enum machine_mode mem_mode;
2242      int (*varies) PARAMS ((rtx, int));
2243 {
2244   rtx x_addr;
2245 
2246   if (MEM_VOLATILE_P (x) && MEM_VOLATILE_P (mem))
2247     return 1;
2248 
2249   /* (mem:BLK (scratch)) is a special mechanism to conflict with everything.
2250      This is used in epilogue deallocation functions.  */
2251   if (GET_MODE (x) == BLKmode && GET_CODE (XEXP (x, 0)) == SCRATCH)
2252     return 1;
2253   if (GET_MODE (mem) == BLKmode && GET_CODE (XEXP (mem, 0)) == SCRATCH)
2254     return 1;
2255 
2256   if (DIFFERENT_ALIAS_SETS_P (x, mem))
2257     return 0;
2258 
2259   /* If X is an unchanging read, then it can't possibly conflict with any
2260      non-unchanging store.  It may conflict with an unchanging write though,
2261      because there may be a single store to this address to initialize it.
2262      Just fall through to the code below to resolve the case where we have
2263      both an unchanging read and an unchanging write.  This won't handle all
2264      cases optimally, but the possible performance loss should be
2265      negligible.  */
2266   if (RTX_UNCHANGING_P (x) && ! RTX_UNCHANGING_P (mem))
2267     return 0;
2268 
2269   if (nonoverlapping_memrefs_p (x, mem))
2270     return 0;
2271 
2272   x_addr = get_addr (XEXP (x, 0));
2273 
2274   if (! base_alias_check (x_addr, mem_addr, GET_MODE (x), mem_mode))
2275     return 0;
2276 
2277   x_addr = canon_rtx (x_addr);
2278   if (! memrefs_conflict_p (GET_MODE_SIZE (mem_mode), mem_addr,
2279 			    SIZE_FOR_MODE (x), x_addr, 0))
2280     return 0;
2281 
2282   if (aliases_everything_p (x))
2283     return 1;
2284 
2285   /* We cannot use aliases_everything_p to test MEM, since we must look
2286      at MEM_MODE, rather than GET_MODE (MEM).  */
2287   if (mem_mode == QImode || GET_CODE (mem_addr) == AND)
2288     return 1;
2289 
2290   /* In true_dependence we also allow BLKmode to alias anything.  Why
2291      don't we do this in anti_dependence and output_dependence?  */
2292   if (mem_mode == BLKmode || GET_MODE (x) == BLKmode)
2293     return 1;
2294 
2295   return ! fixed_scalar_and_varying_struct_p (mem, x, mem_addr, x_addr,
2296 					      varies);
2297 }
2298 
2299 /* Returns nonzero if a write to X might alias a previous read from
2300    (or, if WRITEP is nonzero, a write to) MEM.  */
2301 
2302 static int
write_dependence_p(mem,x,writep)2303 write_dependence_p (mem, x, writep)
2304      rtx mem;
2305      rtx x;
2306      int writep;
2307 {
2308   rtx x_addr, mem_addr;
2309   rtx fixed_scalar;
2310   rtx base;
2311 
2312   if (MEM_VOLATILE_P (x) && MEM_VOLATILE_P (mem))
2313     return 1;
2314 
2315   /* (mem:BLK (scratch)) is a special mechanism to conflict with everything.
2316      This is used in epilogue deallocation functions.  */
2317   if (GET_MODE (x) == BLKmode && GET_CODE (XEXP (x, 0)) == SCRATCH)
2318     return 1;
2319   if (GET_MODE (mem) == BLKmode && GET_CODE (XEXP (mem, 0)) == SCRATCH)
2320     return 1;
2321 
2322   if (DIFFERENT_ALIAS_SETS_P (x, mem))
2323     return 0;
2324 
2325   /* Unchanging memory can't conflict with non-unchanging memory.  */
2326   if (RTX_UNCHANGING_P (x) != RTX_UNCHANGING_P (mem))
2327     return 0;
2328 
2329   /* If MEM is an unchanging read, then it can't possibly conflict with
2330      the store to X, because there is at most one store to MEM, and it must
2331      have occurred somewhere before MEM.  */
2332   if (! writep && RTX_UNCHANGING_P (mem))
2333     return 0;
2334 
2335   if (nonoverlapping_memrefs_p (x, mem))
2336     return 0;
2337 
2338   x_addr = get_addr (XEXP (x, 0));
2339   mem_addr = get_addr (XEXP (mem, 0));
2340 
2341   if (! writep)
2342     {
2343       base = find_base_term (mem_addr);
2344       if (base && (GET_CODE (base) == LABEL_REF
2345 		   || (GET_CODE (base) == SYMBOL_REF
2346 		       && CONSTANT_POOL_ADDRESS_P (base))))
2347 	return 0;
2348     }
2349 
2350   if (! base_alias_check (x_addr, mem_addr, GET_MODE (x),
2351 			  GET_MODE (mem)))
2352     return 0;
2353 
2354   x_addr = canon_rtx (x_addr);
2355   mem_addr = canon_rtx (mem_addr);
2356 
2357   if (!memrefs_conflict_p (SIZE_FOR_MODE (mem), mem_addr,
2358 			   SIZE_FOR_MODE (x), x_addr, 0))
2359     return 0;
2360 
2361   fixed_scalar
2362     = fixed_scalar_and_varying_struct_p (mem, x, mem_addr, x_addr,
2363 					 rtx_addr_varies_p);
2364 
2365   return (!(fixed_scalar == mem && !aliases_everything_p (x))
2366 	  && !(fixed_scalar == x && !aliases_everything_p (mem)));
2367 }
2368 
2369 /* Anti dependence: X is written after read in MEM takes place.  */
2370 
2371 int
anti_dependence(mem,x)2372 anti_dependence (mem, x)
2373      rtx mem;
2374      rtx x;
2375 {
2376   return write_dependence_p (mem, x, /*writep=*/0);
2377 }
2378 
2379 /* Output dependence: X is written after store in MEM takes place.  */
2380 
2381 int
output_dependence(mem,x)2382 output_dependence (mem, x)
2383      rtx mem;
2384      rtx x;
2385 {
2386   return write_dependence_p (mem, x, /*writep=*/1);
2387 }
2388 
2389 /* A subroutine of nonlocal_mentioned_p, returns 1 if *LOC mentions
2390    something which is not local to the function and is not constant.  */
2391 
2392 static int
nonlocal_mentioned_p_1(loc,data)2393 nonlocal_mentioned_p_1 (loc, data)
2394      rtx *loc;
2395      void *data ATTRIBUTE_UNUSED;
2396 {
2397   rtx x = *loc;
2398   rtx base;
2399   int regno;
2400 
2401   if (! x)
2402     return 0;
2403 
2404   switch (GET_CODE (x))
2405     {
2406     case SUBREG:
2407       if (GET_CODE (SUBREG_REG (x)) == REG)
2408 	{
2409 	  /* Global registers are not local.  */
2410 	  if (REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER
2411 	      && global_regs[subreg_regno (x)])
2412 	    return 1;
2413 	  return 0;
2414 	}
2415       break;
2416 
2417     case REG:
2418       regno = REGNO (x);
2419       /* Global registers are not local.  */
2420       if (regno < FIRST_PSEUDO_REGISTER && global_regs[regno])
2421 	return 1;
2422       return 0;
2423 
2424     case SCRATCH:
2425     case PC:
2426     case CC0:
2427     case CONST_INT:
2428     case CONST_DOUBLE:
2429     case CONST_VECTOR:
2430     case CONST:
2431     case LABEL_REF:
2432       return 0;
2433 
2434     case SYMBOL_REF:
2435       /* Constants in the function's constants pool are constant.  */
2436       if (CONSTANT_POOL_ADDRESS_P (x))
2437 	return 0;
2438       return 1;
2439 
2440     case CALL:
2441       /* Non-constant calls and recursion are not local.  */
2442       return 1;
2443 
2444     case MEM:
2445       /* Be overly conservative and consider any volatile memory
2446 	 reference as not local.  */
2447       if (MEM_VOLATILE_P (x))
2448 	return 1;
2449       base = find_base_term (XEXP (x, 0));
2450       if (base)
2451 	{
2452 	  /* A Pmode ADDRESS could be a reference via the structure value
2453 	     address or static chain.  Such memory references are nonlocal.
2454 
2455 	     Thus, we have to examine the contents of the ADDRESS to find
2456 	     out if this is a local reference or not.  */
2457 	  if (GET_CODE (base) == ADDRESS
2458 	      && GET_MODE (base) == Pmode
2459 	      && (XEXP (base, 0) == stack_pointer_rtx
2460 		  || XEXP (base, 0) == arg_pointer_rtx
2461 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
2462 		  || XEXP (base, 0) == hard_frame_pointer_rtx
2463 #endif
2464 		  || XEXP (base, 0) == frame_pointer_rtx))
2465 	    return 0;
2466 	  /* Constants in the function's constant pool are constant.  */
2467 	  if (GET_CODE (base) == SYMBOL_REF && CONSTANT_POOL_ADDRESS_P (base))
2468 	    return 0;
2469 	}
2470       return 1;
2471 
2472     case UNSPEC_VOLATILE:
2473     case ASM_INPUT:
2474       return 1;
2475 
2476     case ASM_OPERANDS:
2477       if (MEM_VOLATILE_P (x))
2478 	return 1;
2479 
2480     /* FALLTHROUGH */
2481 
2482     default:
2483       break;
2484     }
2485 
2486   return 0;
2487 }
2488 
2489 /* Returns nonzero if X might mention something which is not
2490    local to the function and is not constant.  */
2491 
2492 static int
nonlocal_mentioned_p(x)2493 nonlocal_mentioned_p (x)
2494      rtx x;
2495 {
2496 
2497   if (INSN_P (x))
2498     {
2499       if (GET_CODE (x) == CALL_INSN)
2500 	{
2501 	  if (! CONST_OR_PURE_CALL_P (x))
2502 	    return 1;
2503 	  x = CALL_INSN_FUNCTION_USAGE (x);
2504 	  if (x == 0)
2505 	    return 0;
2506 	}
2507       else
2508 	x = PATTERN (x);
2509     }
2510 
2511   return for_each_rtx (&x, nonlocal_mentioned_p_1, NULL);
2512 }
2513 
2514 /* A subroutine of nonlocal_referenced_p, returns 1 if *LOC references
2515    something which is not local to the function and is not constant.  */
2516 
2517 static int
nonlocal_referenced_p_1(loc,data)2518 nonlocal_referenced_p_1 (loc, data)
2519      rtx *loc;
2520      void *data ATTRIBUTE_UNUSED;
2521 {
2522   rtx x = *loc;
2523 
2524   if (! x)
2525     return 0;
2526 
2527   switch (GET_CODE (x))
2528     {
2529     case MEM:
2530     case REG:
2531     case SYMBOL_REF:
2532     case SUBREG:
2533       return nonlocal_mentioned_p (x);
2534 
2535     case CALL:
2536       /* Non-constant calls and recursion are not local.  */
2537       return 1;
2538 
2539     case SET:
2540       if (nonlocal_mentioned_p (SET_SRC (x)))
2541 	return 1;
2542 
2543       if (GET_CODE (SET_DEST (x)) == MEM)
2544 	return nonlocal_mentioned_p (XEXP (SET_DEST (x), 0));
2545 
2546       /* If the destination is anything other than a CC0, PC,
2547 	 MEM, REG, or a SUBREG of a REG that occupies all of
2548 	 the REG, then X references nonlocal memory if it is
2549 	 mentioned in the destination.  */
2550       if (GET_CODE (SET_DEST (x)) != CC0
2551 	  && GET_CODE (SET_DEST (x)) != PC
2552 	  && GET_CODE (SET_DEST (x)) != REG
2553 	  && ! (GET_CODE (SET_DEST (x)) == SUBREG
2554 		&& GET_CODE (SUBREG_REG (SET_DEST (x))) == REG
2555 		&& (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (x))))
2556 		      + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
2557 		    == ((GET_MODE_SIZE (GET_MODE (SET_DEST (x)))
2558 			 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD))))
2559 	return nonlocal_mentioned_p (SET_DEST (x));
2560       return 0;
2561 
2562     case CLOBBER:
2563       if (GET_CODE (XEXP (x, 0)) == MEM)
2564 	return nonlocal_mentioned_p (XEXP (XEXP (x, 0), 0));
2565       return 0;
2566 
2567     case USE:
2568       return nonlocal_mentioned_p (XEXP (x, 0));
2569 
2570     case ASM_INPUT:
2571     case UNSPEC_VOLATILE:
2572       return 1;
2573 
2574     case ASM_OPERANDS:
2575       if (MEM_VOLATILE_P (x))
2576 	return 1;
2577 
2578     /* FALLTHROUGH */
2579 
2580     default:
2581       break;
2582     }
2583 
2584   return 0;
2585 }
2586 
2587 /* Returns nonzero if X might reference something which is not
2588    local to the function and is not constant.  */
2589 
2590 static int
nonlocal_referenced_p(x)2591 nonlocal_referenced_p (x)
2592      rtx x;
2593 {
2594 
2595   if (INSN_P (x))
2596     {
2597       if (GET_CODE (x) == CALL_INSN)
2598 	{
2599 	  if (! CONST_OR_PURE_CALL_P (x))
2600 	    return 1;
2601 	  x = CALL_INSN_FUNCTION_USAGE (x);
2602 	  if (x == 0)
2603 	    return 0;
2604 	}
2605       else
2606 	x = PATTERN (x);
2607     }
2608 
2609   return for_each_rtx (&x, nonlocal_referenced_p_1, NULL);
2610 }
2611 
2612 /* A subroutine of nonlocal_set_p, returns 1 if *LOC sets
2613    something which is not local to the function and is not constant.  */
2614 
2615 static int
nonlocal_set_p_1(loc,data)2616 nonlocal_set_p_1 (loc, data)
2617      rtx *loc;
2618      void *data ATTRIBUTE_UNUSED;
2619 {
2620   rtx x = *loc;
2621 
2622   if (! x)
2623     return 0;
2624 
2625   switch (GET_CODE (x))
2626     {
2627     case CALL:
2628       /* Non-constant calls and recursion are not local.  */
2629       return 1;
2630 
2631     case PRE_INC:
2632     case PRE_DEC:
2633     case POST_INC:
2634     case POST_DEC:
2635     case PRE_MODIFY:
2636     case POST_MODIFY:
2637       return nonlocal_mentioned_p (XEXP (x, 0));
2638 
2639     case SET:
2640       if (nonlocal_mentioned_p (SET_DEST (x)))
2641 	return 1;
2642       return nonlocal_set_p (SET_SRC (x));
2643 
2644     case CLOBBER:
2645       return nonlocal_mentioned_p (XEXP (x, 0));
2646 
2647     case USE:
2648       return 0;
2649 
2650     case ASM_INPUT:
2651     case UNSPEC_VOLATILE:
2652       return 1;
2653 
2654     case ASM_OPERANDS:
2655       if (MEM_VOLATILE_P (x))
2656 	return 1;
2657 
2658     /* FALLTHROUGH */
2659 
2660     default:
2661       break;
2662     }
2663 
2664   return 0;
2665 }
2666 
2667 /* Returns nonzero if X might set something which is not
2668    local to the function and is not constant.  */
2669 
2670 static int
nonlocal_set_p(x)2671 nonlocal_set_p (x)
2672      rtx x;
2673 {
2674 
2675   if (INSN_P (x))
2676     {
2677       if (GET_CODE (x) == CALL_INSN)
2678 	{
2679 	  if (! CONST_OR_PURE_CALL_P (x))
2680 	    return 1;
2681 	  x = CALL_INSN_FUNCTION_USAGE (x);
2682 	  if (x == 0)
2683 	    return 0;
2684 	}
2685       else
2686 	x = PATTERN (x);
2687     }
2688 
2689   return for_each_rtx (&x, nonlocal_set_p_1, NULL);
2690 }
2691 
2692 /* Mark the function if it is constant.  */
2693 
2694 void
mark_constant_function()2695 mark_constant_function ()
2696 {
2697   rtx insn;
2698   int nonlocal_memory_referenced;
2699 
2700   if (TREE_READONLY (current_function_decl)
2701       || DECL_IS_PURE (current_function_decl)
2702       || TREE_THIS_VOLATILE (current_function_decl)
2703       || TYPE_MODE (TREE_TYPE (current_function_decl)) == VOIDmode
2704       || current_function_has_nonlocal_goto
2705       || !(*targetm.binds_local_p) (current_function_decl))
2706     return;
2707 
2708   /* A loop might not return which counts as a side effect.  */
2709   if (mark_dfs_back_edges ())
2710     return;
2711 
2712   nonlocal_memory_referenced = 0;
2713 
2714   init_alias_analysis ();
2715 
2716   /* Determine if this is a constant or pure function.  */
2717 
2718   for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
2719     {
2720       if (! INSN_P (insn))
2721 	continue;
2722 
2723       if (nonlocal_set_p (insn) || global_reg_mentioned_p (insn)
2724 	  || volatile_refs_p (PATTERN (insn)))
2725 	break;
2726 
2727       if (! nonlocal_memory_referenced)
2728 	nonlocal_memory_referenced = nonlocal_referenced_p (insn);
2729     }
2730 
2731   end_alias_analysis ();
2732 
2733   /* Mark the function.  */
2734 
2735   if (insn)
2736     ;
2737   else if (nonlocal_memory_referenced)
2738     DECL_IS_PURE (current_function_decl) = 1;
2739   else
2740     TREE_READONLY (current_function_decl) = 1;
2741 }
2742 
2743 
2744 void
init_alias_once()2745 init_alias_once ()
2746 {
2747   int i;
2748 
2749 #ifndef OUTGOING_REGNO
2750 #define OUTGOING_REGNO(N) N
2751 #endif
2752   for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2753     /* Check whether this register can hold an incoming pointer
2754        argument.  FUNCTION_ARG_REGNO_P tests outgoing register
2755        numbers, so translate if necessary due to register windows.  */
2756     if (FUNCTION_ARG_REGNO_P (OUTGOING_REGNO (i))
2757 	&& HARD_REGNO_MODE_OK (i, Pmode))
2758       static_reg_base_value[i]
2759 	= gen_rtx_ADDRESS (VOIDmode, gen_rtx_REG (Pmode, i));
2760 
2761   static_reg_base_value[STACK_POINTER_REGNUM]
2762     = gen_rtx_ADDRESS (Pmode, stack_pointer_rtx);
2763   static_reg_base_value[ARG_POINTER_REGNUM]
2764     = gen_rtx_ADDRESS (Pmode, arg_pointer_rtx);
2765   static_reg_base_value[FRAME_POINTER_REGNUM]
2766     = gen_rtx_ADDRESS (Pmode, frame_pointer_rtx);
2767 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
2768   static_reg_base_value[HARD_FRAME_POINTER_REGNUM]
2769     = gen_rtx_ADDRESS (Pmode, hard_frame_pointer_rtx);
2770 #endif
2771 
2772   alias_sets = splay_tree_new (splay_tree_compare_ints, 0, 0);
2773 }
2774 
2775 /* Initialize the aliasing machinery.  Initialize the REG_KNOWN_VALUE
2776    array.  */
2777 
2778 void
init_alias_analysis()2779 init_alias_analysis ()
2780 {
2781   int maxreg = max_reg_num ();
2782   int changed, pass;
2783   int i;
2784   unsigned int ui;
2785   rtx insn;
2786 
2787   reg_known_value_size = maxreg - FIRST_PSEUDO_REGISTER;
2788   reg_known_value = ggc_calloc (reg_known_value_size, sizeof (rtx));
2789   reg_known_equiv_p = xcalloc (reg_known_value_size, sizeof (bool));
2790 
2791   /* Overallocate reg_base_value to allow some growth during loop
2792      optimization.  Loop unrolling can create a large number of
2793      registers.  */
2794   reg_base_value_size = maxreg * 2;
2795   reg_base_value = (rtx *) ggc_alloc_cleared (reg_base_value_size
2796 					      * sizeof (rtx));
2797 
2798   new_reg_base_value = (rtx *) xmalloc (reg_base_value_size * sizeof (rtx));
2799   reg_seen = (char *) xmalloc (reg_base_value_size);
2800   if (! reload_completed && flag_unroll_loops)
2801     {
2802       alias_invariant = (rtx *) ggc_alloc_cleared (reg_base_value_size
2803 						   * sizeof (rtx));
2804       alias_invariant_size = reg_base_value_size;
2805     }
2806 
2807   /* The basic idea is that each pass through this loop will use the
2808      "constant" information from the previous pass to propagate alias
2809      information through another level of assignments.
2810 
2811      This could get expensive if the assignment chains are long.  Maybe
2812      we should throttle the number of iterations, possibly based on
2813      the optimization level or flag_expensive_optimizations.
2814 
2815      We could propagate more information in the first pass by making use
2816      of REG_N_SETS to determine immediately that the alias information
2817      for a pseudo is "constant".
2818 
2819      A program with an uninitialized variable can cause an infinite loop
2820      here.  Instead of doing a full dataflow analysis to detect such problems
2821      we just cap the number of iterations for the loop.
2822 
2823      The state of the arrays for the set chain in question does not matter
2824      since the program has undefined behavior.  */
2825 
2826   pass = 0;
2827   do
2828     {
2829       /* Assume nothing will change this iteration of the loop.  */
2830       changed = 0;
2831 
2832       /* We want to assign the same IDs each iteration of this loop, so
2833 	 start counting from zero each iteration of the loop.  */
2834       unique_id = 0;
2835 
2836       /* We're at the start of the function each iteration through the
2837 	 loop, so we're copying arguments.  */
2838       copying_arguments = true;
2839 
2840       /* Wipe the potential alias information clean for this pass.  */
2841       memset ((char *) new_reg_base_value, 0, reg_base_value_size * sizeof (rtx));
2842 
2843       /* Wipe the reg_seen array clean.  */
2844       memset ((char *) reg_seen, 0, reg_base_value_size);
2845 
2846       /* Mark all hard registers which may contain an address.
2847 	 The stack, frame and argument pointers may contain an address.
2848 	 An argument register which can hold a Pmode value may contain
2849 	 an address even if it is not in BASE_REGS.
2850 
2851 	 The address expression is VOIDmode for an argument and
2852 	 Pmode for other registers.  */
2853 
2854       memcpy (new_reg_base_value, static_reg_base_value,
2855 	      FIRST_PSEUDO_REGISTER * sizeof (rtx));
2856 
2857       /* Walk the insns adding values to the new_reg_base_value array.  */
2858       for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
2859 	{
2860 	  if (INSN_P (insn))
2861 	    {
2862 	      rtx note, set;
2863 
2864 #if defined (HAVE_prologue) || defined (HAVE_epilogue)
2865 	      /* The prologue/epilogue insns are not threaded onto the
2866 		 insn chain until after reload has completed.  Thus,
2867 		 there is no sense wasting time checking if INSN is in
2868 		 the prologue/epilogue until after reload has completed.  */
2869 	      if (reload_completed
2870 		  && prologue_epilogue_contains (insn))
2871 		continue;
2872 #endif
2873 
2874 	      /* If this insn has a noalias note, process it,  Otherwise,
2875 	         scan for sets.  A simple set will have no side effects
2876 	         which could change the base value of any other register.  */
2877 
2878 	      if (GET_CODE (PATTERN (insn)) == SET
2879 		  && REG_NOTES (insn) != 0
2880 		  && find_reg_note (insn, REG_NOALIAS, NULL_RTX))
2881 		record_set (SET_DEST (PATTERN (insn)), NULL_RTX, NULL);
2882 	      else
2883 		note_stores (PATTERN (insn), record_set, NULL);
2884 
2885 	      set = single_set (insn);
2886 
2887 	      if (set != 0
2888 		  && GET_CODE (SET_DEST (set)) == REG
2889 		  && REGNO (SET_DEST (set)) >= FIRST_PSEUDO_REGISTER)
2890 		{
2891 		  unsigned int regno = REGNO (SET_DEST (set));
2892 		  rtx src = SET_SRC (set);
2893 		  rtx t;
2894 
2895 		  if (REG_NOTES (insn) != 0
2896 		      && (((note = find_reg_note (insn, REG_EQUAL, 0)) != 0
2897 			   && REG_N_SETS (regno) == 1)
2898 			  || (note = find_reg_note (insn, REG_EQUIV, NULL_RTX)) != 0)
2899 		      && GET_CODE (XEXP (note, 0)) != EXPR_LIST
2900 		      && ! rtx_varies_p (XEXP (note, 0), 1)
2901 		      && ! reg_overlap_mentioned_p (SET_DEST (set),
2902 						    XEXP (note, 0)))
2903 		    {
2904 		      set_reg_known_value (regno, XEXP (note, 0));
2905 		      set_reg_known_equiv_p (regno,
2906 			REG_NOTE_KIND (note) == REG_EQUIV);
2907 		    }
2908 		  else if (REG_N_SETS (regno) == 1
2909 			   && GET_CODE (src) == PLUS
2910 			   && GET_CODE (XEXP (src, 0)) == REG
2911 			   && (t = get_reg_known_value (REGNO (XEXP (src, 0))))
2912 			   && GET_CODE (XEXP (src, 1)) == CONST_INT)
2913 		    {
2914 		      t = plus_constant (t, INTVAL (XEXP (src, 1)));
2915 		      set_reg_known_value (regno, t);
2916 		      set_reg_known_equiv_p (regno, 0);
2917 		    }
2918 		  else if (REG_N_SETS (regno) == 1
2919 			   && ! rtx_varies_p (src, 1))
2920 		    {
2921 		      set_reg_known_value (regno, src);
2922 		      set_reg_known_equiv_p (regno, 0);
2923 		    }
2924 		}
2925 	    }
2926 	  else if (GET_CODE (insn) == NOTE
2927 		   && NOTE_LINE_NUMBER (insn) == NOTE_INSN_FUNCTION_BEG)
2928 	    copying_arguments = false;
2929 	}
2930 
2931       /* Now propagate values from new_reg_base_value to reg_base_value.  */
2932       for (ui = 0; ui < reg_base_value_size; ui++)
2933 	{
2934 	  if (new_reg_base_value[ui]
2935 	      && new_reg_base_value[ui] != reg_base_value[ui]
2936 	      && ! rtx_equal_p (new_reg_base_value[ui], reg_base_value[ui]))
2937 	    {
2938 	      reg_base_value[ui] = new_reg_base_value[ui];
2939 	      changed = 1;
2940 	    }
2941 	}
2942     }
2943   while (changed && ++pass < MAX_ALIAS_LOOP_PASSES);
2944 
2945   /* Fill in the remaining entries.  */
2946   for (i = 0; i < (int)reg_known_value_size; i++)
2947     if (reg_known_value[i] == 0)
2948       reg_known_value[i] = regno_reg_rtx[i + FIRST_PSEUDO_REGISTER];
2949 
2950   /* Simplify the reg_base_value array so that no register refers to
2951      another register, except to special registers indirectly through
2952      ADDRESS expressions.
2953 
2954      In theory this loop can take as long as O(registers^2), but unless
2955      there are very long dependency chains it will run in close to linear
2956      time.
2957 
2958      This loop may not be needed any longer now that the main loop does
2959      a better job at propagating alias information.  */
2960   pass = 0;
2961   do
2962     {
2963       changed = 0;
2964       pass++;
2965       for (ui = 0; ui < reg_base_value_size; ui++)
2966 	{
2967 	  rtx base = reg_base_value[ui];
2968 	  if (base && GET_CODE (base) == REG)
2969 	    {
2970 	      unsigned int base_regno = REGNO (base);
2971 	      if (base_regno == ui)		/* register set from itself */
2972 		reg_base_value[ui] = 0;
2973 	      else
2974 		reg_base_value[ui] = reg_base_value[base_regno];
2975 	      changed = 1;
2976 	    }
2977 	}
2978     }
2979   while (changed && pass < MAX_ALIAS_LOOP_PASSES);
2980 
2981   /* Clean up.  */
2982   free (new_reg_base_value);
2983   new_reg_base_value = 0;
2984   free (reg_seen);
2985   reg_seen = 0;
2986 }
2987 
2988 void
end_alias_analysis()2989 end_alias_analysis ()
2990 {
2991   reg_known_value = 0;
2992   reg_known_value_size = 0;
2993   free (reg_known_equiv_p);
2994   reg_known_equiv_p = 0;
2995   reg_base_value = 0;
2996   reg_base_value_size = 0;
2997   if (alias_invariant)
2998     {
2999       alias_invariant = 0;
3000       alias_invariant_size = 0;
3001     }
3002 }
3003 
3004 #include "gt-alias.h"
3005