1 /* Common subexpression elimination library for GNU compiler.
2    Copyright (C) 1987, 1988, 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3    1999, 2000, 2001, 2003, 2004 Free Software Foundation, Inc.
4 
5 This file is part of GCC.
6 
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
11 
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
15 for more details.
16 
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING.  If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA.  */
21 
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 
27 #include "rtl.h"
28 #include "tm_p.h"
29 #include "regs.h"
30 #include "hard-reg-set.h"
31 #include "flags.h"
32 #include "real.h"
33 #include "insn-config.h"
34 #include "recog.h"
35 #include "function.h"
36 #include "expr.h"
37 #include "toplev.h"
38 #include "output.h"
39 #include "ggc.h"
40 #include "hashtab.h"
41 #include "cselib.h"
42 #include "params.h"
43 #include "alloc-pool.h"
44 
45 static int entry_and_rtx_equal_p (const void *, const void *);
46 static hashval_t get_value_hash (const void *);
47 static struct elt_list *new_elt_list (struct elt_list *, cselib_val *);
48 static struct elt_loc_list *new_elt_loc_list (struct elt_loc_list *, rtx);
49 static void unchain_one_value (cselib_val *);
50 static void unchain_one_elt_list (struct elt_list **);
51 static void unchain_one_elt_loc_list (struct elt_loc_list **);
52 static void clear_table (void);
53 static int discard_useless_locs (void **, void *);
54 static int discard_useless_values (void **, void *);
55 static void remove_useless_values (void);
56 static rtx wrap_constant (enum machine_mode, rtx);
57 static unsigned int hash_rtx (rtx, enum machine_mode, int);
58 static cselib_val *new_cselib_val (unsigned int, enum machine_mode);
59 static void add_mem_for_addr (cselib_val *, cselib_val *, rtx);
60 static cselib_val *cselib_lookup_mem (rtx, int);
61 static void cselib_invalidate_regno (unsigned int, enum machine_mode);
62 static void cselib_invalidate_mem (rtx);
63 static void cselib_invalidate_rtx (rtx, rtx, void *);
64 static void cselib_record_set (rtx, cselib_val *, cselib_val *);
65 static void cselib_record_sets (rtx);
66 
67 /* There are three ways in which cselib can look up an rtx:
68    - for a REG, the reg_values table (which is indexed by regno) is used
69    - for a MEM, we recursively look up its address and then follow the
70      addr_list of that value
71    - for everything else, we compute a hash value and go through the hash
72      table.  Since different rtx's can still have the same hash value,
73      this involves walking the table entries for a given value and comparing
74      the locations of the entries with the rtx we are looking up.  */
75 
76 /* A table that enables us to look up elts by their value.  */
77 static GTY((param_is (cselib_val))) htab_t hash_table;
78 
79 /* This is a global so we don't have to pass this through every function.
80    It is used in new_elt_loc_list to set SETTING_INSN.  */
81 static rtx cselib_current_insn;
82 static bool cselib_current_insn_in_libcall;
83 
84 /* Every new unknown value gets a unique number.  */
85 static unsigned int next_unknown_value;
86 
87 /* The number of registers we had when the varrays were last resized.  */
88 static unsigned int cselib_nregs;
89 
90 /* Count values without known locations.  Whenever this grows too big, we
91    remove these useless values from the table.  */
92 static int n_useless_values;
93 
94 /* Number of useless values before we remove them from the hash table.  */
95 #define MAX_USELESS_VALUES 32
96 
97 /* This table maps from register number to values.  It does not
98    contain pointers to cselib_val structures, but rather elt_lists.
99    The purpose is to be able to refer to the same register in
100    different modes.  The first element of the list defines the mode in
101    which the register was set; if the mode is unknown or the value is
102    no longer valid in that mode, ELT will be NULL for the first
103    element.  */
104 static GTY(()) varray_type reg_values;
105 static GTY((deletable (""))) varray_type reg_values_old;
106 #define REG_VALUES(I) VARRAY_ELT_LIST (reg_values, (I))
107 
108 /* The largest number of hard regs used by any entry added to the
109    REG_VALUES table.  Cleared on each clear_table() invocation.  */
110 static unsigned int max_value_regs;
111 
112 /* Here the set of indices I with REG_VALUES(I) != 0 is saved.  This is used
113    in clear_table() for fast emptying.  */
114 static GTY(()) varray_type used_regs;
115 static GTY((deletable (""))) varray_type used_regs_old;
116 
117 /* We pass this to cselib_invalidate_mem to invalidate all of
118    memory for a non-const call instruction.  */
119 static GTY(()) rtx callmem;
120 
121 /* Set by discard_useless_locs if it deleted the last location of any
122    value.  */
123 static int values_became_useless;
124 
125 /* Used as stop element of the containing_mem list so we can check
126    presence in the list by checking the next pointer.  */
127 static cselib_val dummy_val;
128 
129 /* Used to list all values that contain memory reference.
130    May or may not contain the useless values - the list is compacted
131    each time memory is invalidated.  */
132 static cselib_val *first_containing_mem = &dummy_val;
133 static alloc_pool elt_loc_list_pool, elt_list_pool, cselib_val_pool, value_pool;
134 
135 
136 /* Allocate a struct elt_list and fill in its two elements with the
137    arguments.  */
138 
139 static inline struct elt_list *
new_elt_list(struct elt_list * next,cselib_val * elt)140 new_elt_list (struct elt_list *next, cselib_val *elt)
141 {
142   struct elt_list *el;
143   el = pool_alloc (elt_list_pool);
144   el->next = next;
145   el->elt = elt;
146   return el;
147 }
148 
149 /* Allocate a struct elt_loc_list and fill in its two elements with the
150    arguments.  */
151 
152 static inline struct elt_loc_list *
new_elt_loc_list(struct elt_loc_list * next,rtx loc)153 new_elt_loc_list (struct elt_loc_list *next, rtx loc)
154 {
155   struct elt_loc_list *el;
156   el = pool_alloc (elt_loc_list_pool);
157   el->next = next;
158   el->loc = loc;
159   el->canon_loc = NULL;
160   el->setting_insn = cselib_current_insn;
161   el->in_libcall = cselib_current_insn_in_libcall;
162   return el;
163 }
164 
165 /* The elt_list at *PL is no longer needed.  Unchain it and free its
166    storage.  */
167 
168 static inline void
unchain_one_elt_list(struct elt_list ** pl)169 unchain_one_elt_list (struct elt_list **pl)
170 {
171   struct elt_list *l = *pl;
172 
173   *pl = l->next;
174   pool_free (elt_list_pool, l);
175 }
176 
177 /* Likewise for elt_loc_lists.  */
178 
179 static void
unchain_one_elt_loc_list(struct elt_loc_list ** pl)180 unchain_one_elt_loc_list (struct elt_loc_list **pl)
181 {
182   struct elt_loc_list *l = *pl;
183 
184   *pl = l->next;
185   pool_free (elt_loc_list_pool, l);
186 }
187 
188 /* Likewise for cselib_vals.  This also frees the addr_list associated with
189    V.  */
190 
191 static void
unchain_one_value(cselib_val * v)192 unchain_one_value (cselib_val *v)
193 {
194   while (v->addr_list)
195     unchain_one_elt_list (&v->addr_list);
196 
197   pool_free (cselib_val_pool, v);
198 }
199 
200 /* Remove all entries from the hash table.  Also used during
201    initialization.  If CLEAR_ALL isn't set, then only clear the entries
202    which are known to have been used.  */
203 
204 static void
clear_table(void)205 clear_table (void)
206 {
207   unsigned int i;
208 
209   for (i = 0; i < VARRAY_ACTIVE_SIZE (used_regs); i++)
210     REG_VALUES (VARRAY_UINT (used_regs, i)) = 0;
211 
212   max_value_regs = 0;
213 
214   VARRAY_POP_ALL (used_regs);
215 
216   htab_empty (hash_table);
217 
218   n_useless_values = 0;
219 
220   next_unknown_value = 0;
221 
222   first_containing_mem = &dummy_val;
223 }
224 
225 /* The equality test for our hash table.  The first argument ENTRY is a table
226    element (i.e. a cselib_val), while the second arg X is an rtx.  We know
227    that all callers of htab_find_slot_with_hash will wrap CONST_INTs into a
228    CONST of an appropriate mode.  */
229 
230 static int
entry_and_rtx_equal_p(const void * entry,const void * x_arg)231 entry_and_rtx_equal_p (const void *entry, const void *x_arg)
232 {
233   struct elt_loc_list *l;
234   const cselib_val *v = (const cselib_val *) entry;
235   rtx x = (rtx) x_arg;
236   enum machine_mode mode = GET_MODE (x);
237 
238   if (GET_CODE (x) == CONST_INT
239       || (mode == VOIDmode && GET_CODE (x) == CONST_DOUBLE))
240     abort ();
241   if (mode != GET_MODE (v->u.val_rtx))
242     return 0;
243 
244   /* Unwrap X if necessary.  */
245   if (GET_CODE (x) == CONST
246       && (GET_CODE (XEXP (x, 0)) == CONST_INT
247 	  || GET_CODE (XEXP (x, 0)) == CONST_DOUBLE))
248     x = XEXP (x, 0);
249 
250   /* We don't guarantee that distinct rtx's have different hash values,
251      so we need to do a comparison.  */
252   for (l = v->locs; l; l = l->next)
253     if (rtx_equal_for_cselib_p (l->loc, x))
254       return 1;
255 
256   return 0;
257 }
258 
259 /* The hash function for our hash table.  The value is always computed with
260    hash_rtx when adding an element; this function just extracts the hash
261    value from a cselib_val structure.  */
262 
263 static hashval_t
get_value_hash(const void * entry)264 get_value_hash (const void *entry)
265 {
266   const cselib_val *v = (const cselib_val *) entry;
267   return v->value;
268 }
269 
270 /* Return true if X contains a VALUE rtx.  If ONLY_USELESS is set, we
271    only return true for values which point to a cselib_val whose value
272    element has been set to zero, which implies the cselib_val will be
273    removed.  */
274 
275 int
references_value_p(rtx x,int only_useless)276 references_value_p (rtx x, int only_useless)
277 {
278   enum rtx_code code = GET_CODE (x);
279   const char *fmt = GET_RTX_FORMAT (code);
280   int i, j;
281 
282   if (GET_CODE (x) == VALUE
283       && (! only_useless || CSELIB_VAL_PTR (x)->locs == 0))
284     return 1;
285 
286   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
287     {
288       if (fmt[i] == 'e' && references_value_p (XEXP (x, i), only_useless))
289 	return 1;
290       else if (fmt[i] == 'E')
291 	for (j = 0; j < XVECLEN (x, i); j++)
292 	  if (references_value_p (XVECEXP (x, i, j), only_useless))
293 	    return 1;
294     }
295 
296   return 0;
297 }
298 
299 /* For all locations found in X, delete locations that reference useless
300    values (i.e. values without any location).  Called through
301    htab_traverse.  */
302 
303 static int
discard_useless_locs(void ** x,void * info ATTRIBUTE_UNUSED)304 discard_useless_locs (void **x, void *info ATTRIBUTE_UNUSED)
305 {
306   cselib_val *v = (cselib_val *)*x;
307   struct elt_loc_list **p = &v->locs;
308   int had_locs = v->locs != 0;
309 
310   while (*p)
311     {
312       if (references_value_p ((*p)->loc, 1))
313 	unchain_one_elt_loc_list (p);
314       else
315 	p = &(*p)->next;
316     }
317 
318   if (had_locs && v->locs == 0)
319     {
320       n_useless_values++;
321       values_became_useless = 1;
322     }
323   return 1;
324 }
325 
326 /* If X is a value with no locations, remove it from the hashtable.  */
327 
328 static int
discard_useless_values(void ** x,void * info ATTRIBUTE_UNUSED)329 discard_useless_values (void **x, void *info ATTRIBUTE_UNUSED)
330 {
331   cselib_val *v = (cselib_val *)*x;
332 
333   if (v->locs == 0)
334     {
335       CSELIB_VAL_PTR (v->u.val_rtx) = NULL;
336       htab_clear_slot (hash_table, x);
337       unchain_one_value (v);
338       n_useless_values--;
339     }
340 
341   return 1;
342 }
343 
344 /* Clean out useless values (i.e. those which no longer have locations
345    associated with them) from the hash table.  */
346 
347 static void
remove_useless_values(void)348 remove_useless_values (void)
349 {
350   cselib_val **p, *v;
351   /* First pass: eliminate locations that reference the value.  That in
352      turn can make more values useless.  */
353   do
354     {
355       values_became_useless = 0;
356       htab_traverse (hash_table, discard_useless_locs, 0);
357     }
358   while (values_became_useless);
359 
360   /* Second pass: actually remove the values.  */
361   p = &first_containing_mem;
362   for (v = *p; v != &dummy_val; v = v->next_containing_mem)
363     if (v->locs)
364       {
365 	*p = v;
366 	p = &(*p)->next_containing_mem;
367       }
368   *p = &dummy_val;
369 
370   htab_traverse (hash_table, discard_useless_values, 0);
371 
372   if (n_useless_values != 0)
373     abort ();
374 }
375 
376 /* Return the mode in which a register was last set.  If X is not a
377    register, return its mode.  If the mode in which the register was
378    set is not known, or the value was already clobbered, return
379    VOIDmode.  */
380 
381 enum machine_mode
cselib_reg_set_mode(rtx x)382 cselib_reg_set_mode (rtx x)
383 {
384   if (GET_CODE (x) != REG)
385     return GET_MODE (x);
386 
387   if (REG_VALUES (REGNO (x)) == NULL
388       || REG_VALUES (REGNO (x))->elt == NULL)
389     return VOIDmode;
390 
391   return GET_MODE (REG_VALUES (REGNO (x))->elt->u.val_rtx);
392 }
393 
394 /* Return nonzero if we can prove that X and Y contain the same value, taking
395    our gathered information into account.  */
396 
397 int
rtx_equal_for_cselib_p(rtx x,rtx y)398 rtx_equal_for_cselib_p (rtx x, rtx y)
399 {
400   enum rtx_code code;
401   const char *fmt;
402   int i;
403 
404   if (GET_CODE (x) == REG || GET_CODE (x) == MEM)
405     {
406       cselib_val *e = cselib_lookup (x, GET_MODE (x), 0);
407 
408       if (e)
409 	x = e->u.val_rtx;
410     }
411 
412   if (GET_CODE (y) == REG || GET_CODE (y) == MEM)
413     {
414       cselib_val *e = cselib_lookup (y, GET_MODE (y), 0);
415 
416       if (e)
417 	y = e->u.val_rtx;
418     }
419 
420   if (x == y)
421     return 1;
422 
423   if (GET_CODE (x) == VALUE && GET_CODE (y) == VALUE)
424     return CSELIB_VAL_PTR (x) == CSELIB_VAL_PTR (y);
425 
426   if (GET_CODE (x) == VALUE)
427     {
428       cselib_val *e = CSELIB_VAL_PTR (x);
429       struct elt_loc_list *l;
430 
431       for (l = e->locs; l; l = l->next)
432 	{
433 	  rtx t = l->loc;
434 
435 	  /* Avoid infinite recursion.  */
436 	  if (GET_CODE (t) == REG || GET_CODE (t) == MEM)
437 	    continue;
438 	  else if (rtx_equal_for_cselib_p (t, y))
439 	    return 1;
440 	}
441 
442       return 0;
443     }
444 
445   if (GET_CODE (y) == VALUE)
446     {
447       cselib_val *e = CSELIB_VAL_PTR (y);
448       struct elt_loc_list *l;
449 
450       for (l = e->locs; l; l = l->next)
451 	{
452 	  rtx t = l->loc;
453 
454 	  if (GET_CODE (t) == REG || GET_CODE (t) == MEM)
455 	    continue;
456 	  else if (rtx_equal_for_cselib_p (x, t))
457 	    return 1;
458 	}
459 
460       return 0;
461     }
462 
463   if (GET_CODE (x) != GET_CODE (y) || GET_MODE (x) != GET_MODE (y))
464     return 0;
465 
466   /* This won't be handled correctly by the code below.  */
467   if (GET_CODE (x) == LABEL_REF)
468     return XEXP (x, 0) == XEXP (y, 0);
469 
470   code = GET_CODE (x);
471   fmt = GET_RTX_FORMAT (code);
472 
473   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
474     {
475       int j;
476 
477       switch (fmt[i])
478 	{
479 	case 'w':
480 	  if (XWINT (x, i) != XWINT (y, i))
481 	    return 0;
482 	  break;
483 
484 	case 'n':
485 	case 'i':
486 	  if (XINT (x, i) != XINT (y, i))
487 	    return 0;
488 	  break;
489 
490 	case 'V':
491 	case 'E':
492 	  /* Two vectors must have the same length.  */
493 	  if (XVECLEN (x, i) != XVECLEN (y, i))
494 	    return 0;
495 
496 	  /* And the corresponding elements must match.  */
497 	  for (j = 0; j < XVECLEN (x, i); j++)
498 	    if (! rtx_equal_for_cselib_p (XVECEXP (x, i, j),
499 					  XVECEXP (y, i, j)))
500 	      return 0;
501 	  break;
502 
503 	case 'e':
504 	  if (! rtx_equal_for_cselib_p (XEXP (x, i), XEXP (y, i)))
505 	    return 0;
506 	  break;
507 
508 	case 'S':
509 	case 's':
510 	  if (strcmp (XSTR (x, i), XSTR (y, i)))
511 	    return 0;
512 	  break;
513 
514 	case 'u':
515 	  /* These are just backpointers, so they don't matter.  */
516 	  break;
517 
518 	case '0':
519 	case 't':
520 	  break;
521 
522 	  /* It is believed that rtx's at this level will never
523 	     contain anything but integers and other rtx's,
524 	     except for within LABEL_REFs and SYMBOL_REFs.  */
525 	default:
526 	  abort ();
527 	}
528     }
529   return 1;
530 }
531 
532 /* We need to pass down the mode of constants through the hash table
533    functions.  For that purpose, wrap them in a CONST of the appropriate
534    mode.  */
535 static rtx
wrap_constant(enum machine_mode mode,rtx x)536 wrap_constant (enum machine_mode mode, rtx x)
537 {
538   if (GET_CODE (x) != CONST_INT
539       && (GET_CODE (x) != CONST_DOUBLE || GET_MODE (x) != VOIDmode))
540     return x;
541   if (mode == VOIDmode)
542     abort ();
543   return gen_rtx_CONST (mode, x);
544 }
545 
546 /* Hash an rtx.  Return 0 if we couldn't hash the rtx.
547    For registers and memory locations, we look up their cselib_val structure
548    and return its VALUE element.
549    Possible reasons for return 0 are: the object is volatile, or we couldn't
550    find a register or memory location in the table and CREATE is zero.  If
551    CREATE is nonzero, table elts are created for regs and mem.
552    MODE is used in hashing for CONST_INTs only;
553    otherwise the mode of X is used.  */
554 
555 static unsigned int
hash_rtx(rtx x,enum machine_mode mode,int create)556 hash_rtx (rtx x, enum machine_mode mode, int create)
557 {
558   cselib_val *e;
559   int i, j;
560   enum rtx_code code;
561   const char *fmt;
562   unsigned int hash = 0;
563 
564   code = GET_CODE (x);
565   hash += (unsigned) code + (unsigned) GET_MODE (x);
566 
567   switch (code)
568     {
569     case MEM:
570     case REG:
571       e = cselib_lookup (x, GET_MODE (x), create);
572       if (! e)
573 	return 0;
574 
575       return e->value;
576 
577     case CONST_INT:
578       hash += ((unsigned) CONST_INT << 7) + (unsigned) mode + INTVAL (x);
579       return hash ? hash : (unsigned int) CONST_INT;
580 
581     case CONST_DOUBLE:
582       /* This is like the general case, except that it only counts
583 	 the integers representing the constant.  */
584       hash += (unsigned) code + (unsigned) GET_MODE (x);
585       if (GET_MODE (x) != VOIDmode)
586 	hash += real_hash (CONST_DOUBLE_REAL_VALUE (x));
587       else
588 	hash += ((unsigned) CONST_DOUBLE_LOW (x)
589 		 + (unsigned) CONST_DOUBLE_HIGH (x));
590       return hash ? hash : (unsigned int) CONST_DOUBLE;
591 
592     case CONST_VECTOR:
593       {
594 	int units;
595 	rtx elt;
596 
597 	units = CONST_VECTOR_NUNITS (x);
598 
599 	for (i = 0; i < units; ++i)
600 	  {
601 	    elt = CONST_VECTOR_ELT (x, i);
602 	    hash += hash_rtx (elt, GET_MODE (elt), 0);
603 	  }
604 
605 	return hash;
606       }
607 
608       /* Assume there is only one rtx object for any given label.  */
609     case LABEL_REF:
610       hash
611 	+= ((unsigned) LABEL_REF << 7) + (unsigned long) XEXP (x, 0);
612       return hash ? hash : (unsigned int) LABEL_REF;
613 
614     case SYMBOL_REF:
615       hash
616 	+= ((unsigned) SYMBOL_REF << 7) + (unsigned long) XSTR (x, 0);
617       return hash ? hash : (unsigned int) SYMBOL_REF;
618 
619     case PRE_DEC:
620     case PRE_INC:
621     case POST_DEC:
622     case POST_INC:
623     case POST_MODIFY:
624     case PRE_MODIFY:
625     case PC:
626     case CC0:
627     case CALL:
628     case UNSPEC_VOLATILE:
629       return 0;
630 
631     case ASM_OPERANDS:
632       if (MEM_VOLATILE_P (x))
633 	return 0;
634 
635       break;
636 
637     default:
638       break;
639     }
640 
641   i = GET_RTX_LENGTH (code) - 1;
642   fmt = GET_RTX_FORMAT (code);
643   for (; i >= 0; i--)
644     {
645       if (fmt[i] == 'e')
646 	{
647 	  rtx tem = XEXP (x, i);
648 	  unsigned int tem_hash = hash_rtx (tem, 0, create);
649 
650 	  if (tem_hash == 0)
651 	    return 0;
652 
653 	  hash += tem_hash;
654 	}
655       else if (fmt[i] == 'E')
656 	for (j = 0; j < XVECLEN (x, i); j++)
657 	  {
658 	    unsigned int tem_hash = hash_rtx (XVECEXP (x, i, j), 0, create);
659 
660 	    if (tem_hash == 0)
661 	      return 0;
662 
663 	    hash += tem_hash;
664 	  }
665       else if (fmt[i] == 's')
666 	{
667 	  const unsigned char *p = (const unsigned char *) XSTR (x, i);
668 
669 	  if (p)
670 	    while (*p)
671 	      hash += *p++;
672 	}
673       else if (fmt[i] == 'i')
674 	hash += XINT (x, i);
675       else if (fmt[i] == '0' || fmt[i] == 't')
676 	/* unused */;
677       else
678 	abort ();
679     }
680 
681   return hash ? hash : 1 + (unsigned int) GET_CODE (x);
682 }
683 
684 /* Create a new value structure for VALUE and initialize it.  The mode of the
685    value is MODE.  */
686 
687 static inline cselib_val *
new_cselib_val(unsigned int value,enum machine_mode mode)688 new_cselib_val (unsigned int value, enum machine_mode mode)
689 {
690   cselib_val *e = pool_alloc (cselib_val_pool);
691 
692 #ifdef ENABLE_CHECKING
693   if (value == 0)
694     abort ();
695 #endif
696 
697   e->value = value;
698   /* We use custom method to allocate this RTL construct because it accounts
699      about 8% of overall memory usage.  */
700   e->u.val_rtx = pool_alloc (value_pool);
701   memset (e->u.val_rtx, 0, RTX_HDR_SIZE);
702   PUT_CODE (e->u.val_rtx, VALUE);
703   PUT_MODE (e->u.val_rtx, mode);
704   CSELIB_VAL_PTR (e->u.val_rtx) = e;
705   e->addr_list = 0;
706   e->locs = 0;
707   e->next_containing_mem = 0;
708   return e;
709 }
710 
711 /* ADDR_ELT is a value that is used as address.  MEM_ELT is the value that
712    contains the data at this address.  X is a MEM that represents the
713    value.  Update the two value structures to represent this situation.  */
714 
715 static void
add_mem_for_addr(cselib_val * addr_elt,cselib_val * mem_elt,rtx x)716 add_mem_for_addr (cselib_val *addr_elt, cselib_val *mem_elt, rtx x)
717 {
718   struct elt_loc_list *l;
719 
720   /* Avoid duplicates.  */
721   for (l = mem_elt->locs; l; l = l->next)
722     if (GET_CODE (l->loc) == MEM
723 	&& CSELIB_VAL_PTR (XEXP (l->loc, 0)) == addr_elt)
724       return;
725 
726   addr_elt->addr_list = new_elt_list (addr_elt->addr_list, mem_elt);
727   mem_elt->locs
728     = new_elt_loc_list (mem_elt->locs,
729 			replace_equiv_address_nv (x, addr_elt->u.val_rtx));
730   if (mem_elt->next_containing_mem == NULL)
731     {
732       mem_elt->next_containing_mem = first_containing_mem;
733       first_containing_mem = mem_elt;
734     }
735 }
736 
737 /* Subroutine of cselib_lookup.  Return a value for X, which is a MEM rtx.
738    If CREATE, make a new one if we haven't seen it before.  */
739 
740 static cselib_val *
cselib_lookup_mem(rtx x,int create)741 cselib_lookup_mem (rtx x, int create)
742 {
743   enum machine_mode mode = GET_MODE (x);
744   void **slot;
745   cselib_val *addr;
746   cselib_val *mem_elt;
747   struct elt_list *l;
748 
749   if (MEM_VOLATILE_P (x) || mode == BLKmode
750       || (FLOAT_MODE_P (mode) && flag_float_store))
751     return 0;
752 
753   /* Look up the value for the address.  */
754   addr = cselib_lookup (XEXP (x, 0), mode, create);
755   if (! addr)
756     return 0;
757 
758   /* Find a value that describes a value of our mode at that address.  */
759   for (l = addr->addr_list; l; l = l->next)
760     if (GET_MODE (l->elt->u.val_rtx) == mode)
761       return l->elt;
762 
763   if (! create)
764     return 0;
765 
766   mem_elt = new_cselib_val (++next_unknown_value, mode);
767   add_mem_for_addr (addr, mem_elt, x);
768   slot = htab_find_slot_with_hash (hash_table, wrap_constant (mode, x),
769 				   mem_elt->value, INSERT);
770   *slot = mem_elt;
771   return mem_elt;
772 }
773 
774 /* Walk rtx X and replace all occurrences of REG and MEM subexpressions
775    with VALUE expressions.  This way, it becomes independent of changes
776    to registers and memory.
777    X isn't actually modified; if modifications are needed, new rtl is
778    allocated.  However, the return value can share rtl with X.  */
779 
780 rtx
cselib_subst_to_values(rtx x)781 cselib_subst_to_values (rtx x)
782 {
783   enum rtx_code code = GET_CODE (x);
784   const char *fmt = GET_RTX_FORMAT (code);
785   cselib_val *e;
786   struct elt_list *l;
787   rtx copy = x;
788   int i;
789 
790   switch (code)
791     {
792     case REG:
793       l = REG_VALUES (REGNO (x));
794       if (l && l->elt == NULL)
795 	l = l->next;
796       for (; l; l = l->next)
797 	if (GET_MODE (l->elt->u.val_rtx) == GET_MODE (x))
798 	  return l->elt->u.val_rtx;
799 
800       abort ();
801 
802     case MEM:
803       e = cselib_lookup_mem (x, 0);
804       if (! e)
805 	{
806 	  /* This happens for autoincrements.  Assign a value that doesn't
807 	     match any other.  */
808 	  e = new_cselib_val (++next_unknown_value, GET_MODE (x));
809 	}
810       return e->u.val_rtx;
811 
812     case CONST_DOUBLE:
813     case CONST_VECTOR:
814     case CONST_INT:
815       return x;
816 
817     case POST_INC:
818     case PRE_INC:
819     case POST_DEC:
820     case PRE_DEC:
821     case POST_MODIFY:
822     case PRE_MODIFY:
823       e = new_cselib_val (++next_unknown_value, GET_MODE (x));
824       return e->u.val_rtx;
825 
826     default:
827       break;
828     }
829 
830   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
831     {
832       if (fmt[i] == 'e')
833 	{
834 	  rtx t = cselib_subst_to_values (XEXP (x, i));
835 
836 	  if (t != XEXP (x, i) && x == copy)
837 	    copy = shallow_copy_rtx (x);
838 
839 	  XEXP (copy, i) = t;
840 	}
841       else if (fmt[i] == 'E')
842 	{
843 	  int j, k;
844 
845 	  for (j = 0; j < XVECLEN (x, i); j++)
846 	    {
847 	      rtx t = cselib_subst_to_values (XVECEXP (x, i, j));
848 
849 	      if (t != XVECEXP (x, i, j) && XVEC (x, i) == XVEC (copy, i))
850 		{
851 		  if (x == copy)
852 		    copy = shallow_copy_rtx (x);
853 
854 		  XVEC (copy, i) = rtvec_alloc (XVECLEN (x, i));
855 		  for (k = 0; k < j; k++)
856 		    XVECEXP (copy, i, k) = XVECEXP (x, i, k);
857 		}
858 
859 	      XVECEXP (copy, i, j) = t;
860 	    }
861 	}
862     }
863 
864   return copy;
865 }
866 
867 /* Look up the rtl expression X in our tables and return the value it has.
868    If CREATE is zero, we return NULL if we don't know the value.  Otherwise,
869    we create a new one if possible, using mode MODE if X doesn't have a mode
870    (i.e. because it's a constant).  */
871 
872 cselib_val *
cselib_lookup(rtx x,enum machine_mode mode,int create)873 cselib_lookup (rtx x, enum machine_mode mode, int create)
874 {
875   void **slot;
876   cselib_val *e;
877   unsigned int hashval;
878 
879   if (GET_MODE (x) != VOIDmode)
880     mode = GET_MODE (x);
881 
882   if (GET_CODE (x) == VALUE)
883     return CSELIB_VAL_PTR (x);
884 
885   if (GET_CODE (x) == REG)
886     {
887       struct elt_list *l;
888       unsigned int i = REGNO (x);
889 
890       l = REG_VALUES (i);
891       if (l && l->elt == NULL)
892 	l = l->next;
893       for (; l; l = l->next)
894 	if (mode == GET_MODE (l->elt->u.val_rtx))
895 	  return l->elt;
896 
897       if (! create)
898 	return 0;
899 
900       if (i < FIRST_PSEUDO_REGISTER)
901 	{
902 	  unsigned int n = HARD_REGNO_NREGS (i, mode);
903 
904 	  if (n > max_value_regs)
905 	    max_value_regs = n;
906 	}
907 
908       e = new_cselib_val (++next_unknown_value, GET_MODE (x));
909       e->locs = new_elt_loc_list (e->locs, x);
910       if (REG_VALUES (i) == 0)
911 	{
912 	  /* Maintain the invariant that the first entry of
913 	     REG_VALUES, if present, must be the value used to set the
914 	     register, or NULL.  */
915 	  VARRAY_PUSH_UINT (used_regs, i);
916 	  REG_VALUES (i) = new_elt_list (REG_VALUES (i), NULL);
917 	}
918       REG_VALUES (i)->next = new_elt_list (REG_VALUES (i)->next, e);
919       slot = htab_find_slot_with_hash (hash_table, x, e->value, INSERT);
920       *slot = e;
921       return e;
922     }
923 
924   if (GET_CODE (x) == MEM)
925     return cselib_lookup_mem (x, create);
926 
927   hashval = hash_rtx (x, mode, create);
928   /* Can't even create if hashing is not possible.  */
929   if (! hashval)
930     return 0;
931 
932   slot = htab_find_slot_with_hash (hash_table, wrap_constant (mode, x),
933 				   hashval, create ? INSERT : NO_INSERT);
934   if (slot == 0)
935     return 0;
936 
937   e = (cselib_val *) *slot;
938   if (e)
939     return e;
940 
941   e = new_cselib_val (hashval, mode);
942 
943   /* We have to fill the slot before calling cselib_subst_to_values:
944      the hash table is inconsistent until we do so, and
945      cselib_subst_to_values will need to do lookups.  */
946   *slot = (void *) e;
947   e->locs = new_elt_loc_list (e->locs, cselib_subst_to_values (x));
948   return e;
949 }
950 
951 /* Invalidate any entries in reg_values that overlap REGNO.  This is called
952    if REGNO is changing.  MODE is the mode of the assignment to REGNO, which
953    is used to determine how many hard registers are being changed.  If MODE
954    is VOIDmode, then only REGNO is being changed; this is used when
955    invalidating call clobbered registers across a call.  */
956 
957 static void
cselib_invalidate_regno(unsigned int regno,enum machine_mode mode)958 cselib_invalidate_regno (unsigned int regno, enum machine_mode mode)
959 {
960   unsigned int endregno;
961   unsigned int i;
962 
963   /* If we see pseudos after reload, something is _wrong_.  */
964   if (reload_completed && regno >= FIRST_PSEUDO_REGISTER
965       && reg_renumber[regno] >= 0)
966     abort ();
967 
968   /* Determine the range of registers that must be invalidated.  For
969      pseudos, only REGNO is affected.  For hard regs, we must take MODE
970      into account, and we must also invalidate lower register numbers
971      if they contain values that overlap REGNO.  */
972   if (regno < FIRST_PSEUDO_REGISTER)
973     {
974       if (mode == VOIDmode)
975 	abort ();
976 
977       if (regno < max_value_regs)
978 	i = 0;
979       else
980 	i = regno - max_value_regs;
981 
982       endregno = regno + HARD_REGNO_NREGS (regno, mode);
983     }
984   else
985     {
986       i = regno;
987       endregno = regno + 1;
988     }
989 
990   for (; i < endregno; i++)
991     {
992       struct elt_list **l = &REG_VALUES (i);
993 
994       /* Go through all known values for this reg; if it overlaps the range
995 	 we're invalidating, remove the value.  */
996       while (*l)
997 	{
998 	  cselib_val *v = (*l)->elt;
999 	  struct elt_loc_list **p;
1000 	  unsigned int this_last = i;
1001 
1002 	  if (i < FIRST_PSEUDO_REGISTER && v != NULL)
1003 	    this_last += HARD_REGNO_NREGS (i, GET_MODE (v->u.val_rtx)) - 1;
1004 
1005 	  if (this_last < regno || v == NULL)
1006 	    {
1007 	      l = &(*l)->next;
1008 	      continue;
1009 	    }
1010 
1011 	  /* We have an overlap.  */
1012 	  if (*l == REG_VALUES (i))
1013 	    {
1014 	      /* Maintain the invariant that the first entry of
1015 		 REG_VALUES, if present, must be the value used to set
1016 		 the register, or NULL.  This is also nice because
1017 		 then we won't push the same regno onto user_regs
1018 		 multiple times.  */
1019 	      (*l)->elt = NULL;
1020 	      l = &(*l)->next;
1021 	    }
1022 	  else
1023 	    unchain_one_elt_list (l);
1024 
1025 	  /* Now, we clear the mapping from value to reg.  It must exist, so
1026 	     this code will crash intentionally if it doesn't.  */
1027 	  for (p = &v->locs; ; p = &(*p)->next)
1028 	    {
1029 	      rtx x = (*p)->loc;
1030 
1031 	      if (GET_CODE (x) == REG && REGNO (x) == i)
1032 		{
1033 		  unchain_one_elt_loc_list (p);
1034 		  break;
1035 		}
1036 	    }
1037 	  if (v->locs == 0)
1038 	    n_useless_values++;
1039 	}
1040     }
1041 }
1042 
1043 /* Return 1 if X has a value that can vary even between two
1044    executions of the program.  0 means X can be compared reliably
1045    against certain constants or near-constants.  */
1046 
1047 static int
cselib_rtx_varies_p(rtx x ATTRIBUTE_UNUSED,int from_alias ATTRIBUTE_UNUSED)1048 cselib_rtx_varies_p (rtx x ATTRIBUTE_UNUSED, int from_alias ATTRIBUTE_UNUSED)
1049 {
1050   /* We actually don't need to verify very hard.  This is because
1051      if X has actually changed, we invalidate the memory anyway,
1052      so assume that all common memory addresses are
1053      invariant.  */
1054   return 0;
1055 }
1056 
1057 /* Invalidate any locations in the table which are changed because of a
1058    store to MEM_RTX.  If this is called because of a non-const call
1059    instruction, MEM_RTX is (mem:BLK const0_rtx).  */
1060 
1061 static void
cselib_invalidate_mem(rtx mem_rtx)1062 cselib_invalidate_mem (rtx mem_rtx)
1063 {
1064   cselib_val **vp, *v, *next;
1065   int num_mems = 0;
1066   rtx mem_addr;
1067 
1068   mem_addr = canon_rtx (get_addr (XEXP (mem_rtx, 0)));
1069   mem_rtx = canon_rtx (mem_rtx);
1070 
1071   vp = &first_containing_mem;
1072   for (v = *vp; v != &dummy_val; v = next)
1073     {
1074       bool has_mem = false;
1075       struct elt_loc_list **p = &v->locs;
1076       int had_locs = v->locs != 0;
1077 
1078       while (*p)
1079 	{
1080 	  rtx x = (*p)->loc;
1081 	  rtx canon_x = (*p)->canon_loc;
1082 	  cselib_val *addr;
1083 	  struct elt_list **mem_chain;
1084 
1085 	  /* MEMs may occur in locations only at the top level; below
1086 	     that every MEM or REG is substituted by its VALUE.  */
1087 	  if (GET_CODE (x) != MEM)
1088 	    {
1089 	      p = &(*p)->next;
1090 	      continue;
1091 	    }
1092 	  if (!canon_x)
1093 	    canon_x = (*p)->canon_loc = canon_rtx (x);
1094 	  if (num_mems < PARAM_VALUE (PARAM_MAX_CSELIB_MEMORY_LOCATIONS)
1095 	      && ! canon_true_dependence (mem_rtx, GET_MODE (mem_rtx), mem_addr,
1096 		      			  x, cselib_rtx_varies_p))
1097 	    {
1098 	      has_mem = true;
1099 	      num_mems++;
1100 	      p = &(*p)->next;
1101 	      continue;
1102 	    }
1103 
1104 	  /* This one overlaps.  */
1105 	  /* We must have a mapping from this MEM's address to the
1106 	     value (E).  Remove that, too.  */
1107 	  addr = cselib_lookup (XEXP (x, 0), VOIDmode, 0);
1108 	  mem_chain = &addr->addr_list;
1109 	  for (;;)
1110 	    {
1111 	      if ((*mem_chain)->elt == v)
1112 		{
1113 		  unchain_one_elt_list (mem_chain);
1114 		  break;
1115 		}
1116 
1117 	      mem_chain = &(*mem_chain)->next;
1118 	    }
1119 
1120 	  unchain_one_elt_loc_list (p);
1121 	}
1122 
1123       if (had_locs && v->locs == 0)
1124 	n_useless_values++;
1125 
1126       next = v->next_containing_mem;
1127       if (has_mem)
1128 	{
1129 	  *vp = v;
1130 	  vp = &(*vp)->next_containing_mem;
1131 	}
1132       else
1133 	v->next_containing_mem = NULL;
1134     }
1135   *vp = &dummy_val;
1136 }
1137 
1138 /* Invalidate DEST, which is being assigned to or clobbered.  The second and
1139    the third parameter exist so that this function can be passed to
1140    note_stores; they are ignored.  */
1141 
1142 static void
cselib_invalidate_rtx(rtx dest,rtx ignore ATTRIBUTE_UNUSED,void * data ATTRIBUTE_UNUSED)1143 cselib_invalidate_rtx (rtx dest, rtx ignore ATTRIBUTE_UNUSED,
1144 		       void *data ATTRIBUTE_UNUSED)
1145 {
1146   while (GET_CODE (dest) == STRICT_LOW_PART || GET_CODE (dest) == SIGN_EXTRACT
1147 	 || GET_CODE (dest) == ZERO_EXTRACT || GET_CODE (dest) == SUBREG)
1148     dest = XEXP (dest, 0);
1149 
1150   if (GET_CODE (dest) == REG)
1151     cselib_invalidate_regno (REGNO (dest), GET_MODE (dest));
1152   else if (GET_CODE (dest) == MEM)
1153     cselib_invalidate_mem (dest);
1154 
1155   /* Some machines don't define AUTO_INC_DEC, but they still use push
1156      instructions.  We need to catch that case here in order to
1157      invalidate the stack pointer correctly.  Note that invalidating
1158      the stack pointer is different from invalidating DEST.  */
1159   if (push_operand (dest, GET_MODE (dest)))
1160     cselib_invalidate_rtx (stack_pointer_rtx, NULL_RTX, NULL);
1161 }
1162 
1163 /* Record the result of a SET instruction.  DEST is being set; the source
1164    contains the value described by SRC_ELT.  If DEST is a MEM, DEST_ADDR_ELT
1165    describes its address.  */
1166 
1167 static void
cselib_record_set(rtx dest,cselib_val * src_elt,cselib_val * dest_addr_elt)1168 cselib_record_set (rtx dest, cselib_val *src_elt, cselib_val *dest_addr_elt)
1169 {
1170   int dreg = GET_CODE (dest) == REG ? (int) REGNO (dest) : -1;
1171 
1172   if (src_elt == 0 || side_effects_p (dest))
1173     return;
1174 
1175   if (dreg >= 0)
1176     {
1177       if (dreg < FIRST_PSEUDO_REGISTER)
1178 	{
1179 	  unsigned int n = HARD_REGNO_NREGS (dreg, GET_MODE (dest));
1180 
1181 	  if (n > max_value_regs)
1182 	    max_value_regs = n;
1183 	}
1184 
1185       if (REG_VALUES (dreg) == 0)
1186 	{
1187 	  VARRAY_PUSH_UINT (used_regs, dreg);
1188 	  REG_VALUES (dreg) = new_elt_list (REG_VALUES (dreg), src_elt);
1189 	}
1190       else
1191 	{
1192 	  if (REG_VALUES (dreg)->elt == 0)
1193 	    REG_VALUES (dreg)->elt = src_elt;
1194 	  else
1195 	    /* The register should have been invalidated.  */
1196 	    abort ();
1197 	}
1198 
1199       if (src_elt->locs == 0)
1200 	n_useless_values--;
1201       src_elt->locs = new_elt_loc_list (src_elt->locs, dest);
1202     }
1203   else if (GET_CODE (dest) == MEM && dest_addr_elt != 0)
1204     {
1205       if (src_elt->locs == 0)
1206 	n_useless_values--;
1207       add_mem_for_addr (dest_addr_elt, src_elt, dest);
1208     }
1209 }
1210 
1211 /* Describe a single set that is part of an insn.  */
1212 struct set
1213 {
1214   rtx src;
1215   rtx dest;
1216   cselib_val *src_elt;
1217   cselib_val *dest_addr_elt;
1218 };
1219 
1220 /* There is no good way to determine how many elements there can be
1221    in a PARALLEL.  Since it's fairly cheap, use a really large number.  */
1222 #define MAX_SETS (FIRST_PSEUDO_REGISTER * 2)
1223 
1224 /* Record the effects of any sets in INSN.  */
1225 static void
cselib_record_sets(rtx insn)1226 cselib_record_sets (rtx insn)
1227 {
1228   int n_sets = 0;
1229   int i;
1230   struct set sets[MAX_SETS];
1231   rtx body = PATTERN (insn);
1232   rtx cond = 0;
1233 
1234   body = PATTERN (insn);
1235   if (GET_CODE (body) == COND_EXEC)
1236     {
1237       cond = COND_EXEC_TEST (body);
1238       body = COND_EXEC_CODE (body);
1239     }
1240 
1241   /* Find all sets.  */
1242   if (GET_CODE (body) == SET)
1243     {
1244       sets[0].src = SET_SRC (body);
1245       sets[0].dest = SET_DEST (body);
1246       n_sets = 1;
1247     }
1248   else if (GET_CODE (body) == PARALLEL)
1249     {
1250       /* Look through the PARALLEL and record the values being
1251 	 set, if possible.  Also handle any CLOBBERs.  */
1252       for (i = XVECLEN (body, 0) - 1; i >= 0; --i)
1253 	{
1254 	  rtx x = XVECEXP (body, 0, i);
1255 
1256 	  if (GET_CODE (x) == SET)
1257 	    {
1258 	      sets[n_sets].src = SET_SRC (x);
1259 	      sets[n_sets].dest = SET_DEST (x);
1260 	      n_sets++;
1261 	    }
1262 	}
1263     }
1264 
1265   /* Look up the values that are read.  Do this before invalidating the
1266      locations that are written.  */
1267   for (i = 0; i < n_sets; i++)
1268     {
1269       rtx dest = sets[i].dest;
1270 
1271       /* A STRICT_LOW_PART can be ignored; we'll record the equivalence for
1272          the low part after invalidating any knowledge about larger modes.  */
1273       if (GET_CODE (sets[i].dest) == STRICT_LOW_PART)
1274 	sets[i].dest = dest = XEXP (dest, 0);
1275 
1276       /* We don't know how to record anything but REG or MEM.  */
1277       if (GET_CODE (dest) == REG || GET_CODE (dest) == MEM)
1278         {
1279 	  rtx src = sets[i].src;
1280 	  if (cond)
1281 	    src = gen_rtx_IF_THEN_ELSE (GET_MODE (src), cond, src, dest);
1282 	  sets[i].src_elt = cselib_lookup (src, GET_MODE (dest), 1);
1283 	  if (GET_CODE (dest) == MEM)
1284 	    sets[i].dest_addr_elt = cselib_lookup (XEXP (dest, 0), Pmode, 1);
1285 	  else
1286 	    sets[i].dest_addr_elt = 0;
1287 	}
1288     }
1289 
1290   /* Invalidate all locations written by this insn.  Note that the elts we
1291      looked up in the previous loop aren't affected, just some of their
1292      locations may go away.  */
1293   note_stores (body, cselib_invalidate_rtx, NULL);
1294 
1295   /* If this is an asm, look for duplicate sets.  This can happen when the
1296      user uses the same value as an output multiple times.  This is valid
1297      if the outputs are not actually used thereafter.  Treat this case as
1298      if the value isn't actually set.  We do this by smashing the destination
1299      to pc_rtx, so that we won't record the value later.  */
1300   if (n_sets >= 2 && asm_noperands (body) >= 0)
1301     {
1302       for (i = 0; i < n_sets; i++)
1303 	{
1304 	  rtx dest = sets[i].dest;
1305 	  if (GET_CODE (dest) == REG || GET_CODE (dest) == MEM)
1306 	    {
1307 	      int j;
1308 	      for (j = i + 1; j < n_sets; j++)
1309 		if (rtx_equal_p (dest, sets[j].dest))
1310 		  {
1311 		    sets[i].dest = pc_rtx;
1312 		    sets[j].dest = pc_rtx;
1313 		  }
1314 	    }
1315 	}
1316     }
1317 
1318   /* Now enter the equivalences in our tables.  */
1319   for (i = 0; i < n_sets; i++)
1320     {
1321       rtx dest = sets[i].dest;
1322       if (GET_CODE (dest) == REG || GET_CODE (dest) == MEM)
1323 	cselib_record_set (dest, sets[i].src_elt, sets[i].dest_addr_elt);
1324     }
1325 }
1326 
1327 /* Record the effects of INSN.  */
1328 
1329 void
cselib_process_insn(rtx insn)1330 cselib_process_insn (rtx insn)
1331 {
1332   int i;
1333   rtx x;
1334 
1335   if (find_reg_note (insn, REG_LIBCALL, NULL))
1336     cselib_current_insn_in_libcall = true;
1337   if (find_reg_note (insn, REG_RETVAL, NULL))
1338     cselib_current_insn_in_libcall = false;
1339   cselib_current_insn = insn;
1340 
1341   /* Forget everything at a CODE_LABEL, a volatile asm, or a setjmp.  */
1342   if (GET_CODE (insn) == CODE_LABEL
1343       || (GET_CODE (insn) == CALL_INSN
1344 	  && find_reg_note (insn, REG_SETJMP, NULL))
1345       || (GET_CODE (insn) == INSN
1346 	  && GET_CODE (PATTERN (insn)) == ASM_OPERANDS
1347 	  && MEM_VOLATILE_P (PATTERN (insn))))
1348     {
1349       clear_table ();
1350       return;
1351     }
1352 
1353   if (! INSN_P (insn))
1354     {
1355       cselib_current_insn = 0;
1356       return;
1357     }
1358 
1359   /* If this is a call instruction, forget anything stored in a
1360      call clobbered register, or, if this is not a const call, in
1361      memory.  */
1362   if (GET_CODE (insn) == CALL_INSN)
1363     {
1364       for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1365 	if (call_used_regs[i])
1366 	  cselib_invalidate_regno (i, reg_raw_mode[i]);
1367 
1368       if (! CONST_OR_PURE_CALL_P (insn))
1369 	cselib_invalidate_mem (callmem);
1370     }
1371 
1372   cselib_record_sets (insn);
1373 
1374 #ifdef AUTO_INC_DEC
1375   /* Clobber any registers which appear in REG_INC notes.  We
1376      could keep track of the changes to their values, but it is
1377      unlikely to help.  */
1378   for (x = REG_NOTES (insn); x; x = XEXP (x, 1))
1379     if (REG_NOTE_KIND (x) == REG_INC)
1380       cselib_invalidate_rtx (XEXP (x, 0), NULL_RTX, NULL);
1381 #endif
1382 
1383   /* Look for any CLOBBERs in CALL_INSN_FUNCTION_USAGE, but only
1384      after we have processed the insn.  */
1385   if (GET_CODE (insn) == CALL_INSN)
1386     for (x = CALL_INSN_FUNCTION_USAGE (insn); x; x = XEXP (x, 1))
1387       if (GET_CODE (XEXP (x, 0)) == CLOBBER)
1388 	cselib_invalidate_rtx (XEXP (XEXP (x, 0), 0), NULL_RTX, NULL);
1389 
1390   cselib_current_insn = 0;
1391 
1392   if (n_useless_values > MAX_USELESS_VALUES)
1393     remove_useless_values ();
1394 }
1395 
1396 /* Make sure our varrays are big enough.  Not called from any cselib routines;
1397    it must be called by the user if it allocated new registers.  */
1398 
1399 void
cselib_update_varray_sizes(void)1400 cselib_update_varray_sizes (void)
1401 {
1402   unsigned int nregs = max_reg_num ();
1403 
1404   if (nregs == cselib_nregs)
1405     return;
1406 
1407   cselib_nregs = nregs;
1408   VARRAY_GROW (reg_values, nregs);
1409   VARRAY_GROW (used_regs, nregs);
1410 }
1411 
1412 /* Initialize cselib for one pass.  The caller must also call
1413    init_alias_analysis.  */
1414 
1415 void
cselib_init(void)1416 cselib_init (void)
1417 {
1418   elt_list_pool = create_alloc_pool ("elt_list",
1419 				     sizeof (struct elt_list), 10);
1420   elt_loc_list_pool = create_alloc_pool ("elt_loc_list",
1421 				         sizeof (struct elt_loc_list), 10);
1422   cselib_val_pool = create_alloc_pool ("cselib_val_list",
1423 				       sizeof (cselib_val), 10);
1424   value_pool = create_alloc_pool ("value",
1425 				  RTX_SIZE (VALUE), 10);
1426   /* This is only created once.  */
1427   if (! callmem)
1428     callmem = gen_rtx_MEM (BLKmode, const0_rtx);
1429 
1430   cselib_nregs = max_reg_num ();
1431   if (reg_values_old != NULL && VARRAY_SIZE (reg_values_old) >= cselib_nregs)
1432     {
1433       reg_values = reg_values_old;
1434       used_regs = used_regs_old;
1435     }
1436   else
1437     {
1438       VARRAY_ELT_LIST_INIT (reg_values, cselib_nregs, "reg_values");
1439       VARRAY_UINT_INIT (used_regs, cselib_nregs, "used_regs");
1440     }
1441   hash_table = htab_create_ggc (31, get_value_hash, entry_and_rtx_equal_p,
1442 				NULL);
1443   cselib_current_insn_in_libcall = false;
1444 }
1445 
1446 /* Called when the current user is done with cselib.  */
1447 
1448 void
cselib_finish(void)1449 cselib_finish (void)
1450 {
1451   free_alloc_pool (elt_list_pool);
1452   free_alloc_pool (elt_loc_list_pool);
1453   free_alloc_pool (cselib_val_pool);
1454   free_alloc_pool (value_pool);
1455   clear_table ();
1456   reg_values_old = reg_values;
1457   reg_values = 0;
1458   used_regs_old = used_regs;
1459   used_regs = 0;
1460   hash_table = 0;
1461   n_useless_values = 0;
1462   next_unknown_value = 0;
1463 }
1464 
1465 #include "gt-cselib.h"
1466