xref: /openbsd/gnu/usr.bin/gcc/gcc/cselib.c (revision 4e43c760)
1 /* Common subexpression elimination library for GNU compiler.
2    Copyright (C) 1987, 1988, 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3    1999, 2000, 2001 Free Software Foundation, Inc.
4 
5 This file is part of GCC.
6 
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
11 
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
15 for more details.
16 
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING.  If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA.  */
21 
22 #include "config.h"
23 #include "system.h"
24 
25 #include "rtl.h"
26 #include "tm_p.h"
27 #include "regs.h"
28 #include "hard-reg-set.h"
29 #include "flags.h"
30 #include "real.h"
31 #include "insn-config.h"
32 #include "recog.h"
33 #include "function.h"
34 #include "expr.h"
35 #include "toplev.h"
36 #include "output.h"
37 #include "ggc.h"
38 #include "hashtab.h"
39 #include "cselib.h"
40 
41 static int entry_and_rtx_equal_p	PARAMS ((const void *, const void *));
42 static hashval_t get_value_hash		PARAMS ((const void *));
43 static struct elt_list *new_elt_list	PARAMS ((struct elt_list *,
44 						 cselib_val *));
45 static struct elt_loc_list *new_elt_loc_list PARAMS ((struct elt_loc_list *,
46 						      rtx));
47 static void unchain_one_value		PARAMS ((cselib_val *));
48 static void unchain_one_elt_list	PARAMS ((struct elt_list **));
49 static void unchain_one_elt_loc_list	PARAMS ((struct elt_loc_list **));
50 static void clear_table			PARAMS ((int));
51 static int discard_useless_locs		PARAMS ((void **, void *));
52 static int discard_useless_values	PARAMS ((void **, void *));
53 static void remove_useless_values	PARAMS ((void));
54 static rtx wrap_constant		PARAMS ((enum machine_mode, rtx));
55 static unsigned int hash_rtx		PARAMS ((rtx, enum machine_mode, int));
56 static cselib_val *new_cselib_val	PARAMS ((unsigned int,
57 						 enum machine_mode));
58 static void add_mem_for_addr		PARAMS ((cselib_val *, cselib_val *,
59 						 rtx));
60 static cselib_val *cselib_lookup_mem	PARAMS ((rtx, int));
61 static void cselib_invalidate_regno	PARAMS ((unsigned int,
62 						 enum machine_mode));
63 static int cselib_mem_conflict_p	PARAMS ((rtx, rtx));
64 static int cselib_invalidate_mem_1	PARAMS ((void **, void *));
65 static void cselib_invalidate_mem	PARAMS ((rtx));
66 static void cselib_invalidate_rtx_note_stores PARAMS ((rtx, rtx, void *));
67 static void cselib_record_set		PARAMS ((rtx, cselib_val *,
68 						 cselib_val *));
69 static void cselib_record_sets		PARAMS ((rtx));
70 
71 /* There are three ways in which cselib can look up an rtx:
72    - for a REG, the reg_values table (which is indexed by regno) is used
73    - for a MEM, we recursively look up its address and then follow the
74      addr_list of that value
75    - for everything else, we compute a hash value and go through the hash
76      table.  Since different rtx's can still have the same hash value,
77      this involves walking the table entries for a given value and comparing
78      the locations of the entries with the rtx we are looking up.  */
79 
80 /* A table that enables us to look up elts by their value.  */
81 static GTY((param_is (cselib_val))) htab_t hash_table;
82 
83 /* This is a global so we don't have to pass this through every function.
84    It is used in new_elt_loc_list to set SETTING_INSN.  */
85 static rtx cselib_current_insn;
86 static bool cselib_current_insn_in_libcall;
87 
88 /* Every new unknown value gets a unique number.  */
89 static unsigned int next_unknown_value;
90 
91 /* The number of registers we had when the varrays were last resized.  */
92 static unsigned int cselib_nregs;
93 
94 /* Count values without known locations.  Whenever this grows too big, we
95    remove these useless values from the table.  */
96 static int n_useless_values;
97 
98 /* Number of useless values before we remove them from the hash table.  */
99 #define MAX_USELESS_VALUES 32
100 
101 /* This table maps from register number to values.  It does not contain
102    pointers to cselib_val structures, but rather elt_lists.  The purpose is
103    to be able to refer to the same register in different modes.  */
104 static GTY(()) varray_type reg_values;
105 static GTY((deletable (""))) varray_type reg_values_old;
106 #define REG_VALUES(I) VARRAY_ELT_LIST (reg_values, (I))
107 
108 /* The largest number of hard regs used by any entry added to the
109    REG_VALUES table.  Cleared on each clear_table() invocation.  */
110 static unsigned int max_value_regs;
111 
112 /* Here the set of indices I with REG_VALUES(I) != 0 is saved.  This is used
113    in clear_table() for fast emptying.  */
114 static GTY(()) varray_type used_regs;
115 static GTY((deletable (""))) varray_type used_regs_old;
116 
117 /* We pass this to cselib_invalidate_mem to invalidate all of
118    memory for a non-const call instruction.  */
119 static GTY(()) rtx callmem;
120 
121 /* Caches for unused structures.  */
122 static GTY((deletable (""))) cselib_val *empty_vals;
123 static GTY((deletable (""))) struct elt_list *empty_elt_lists;
124 static GTY((deletable (""))) struct elt_loc_list *empty_elt_loc_lists;
125 
126 /* Set by discard_useless_locs if it deleted the last location of any
127    value.  */
128 static int values_became_useless;
129 
130 
131 /* Allocate a struct elt_list and fill in its two elements with the
132    arguments.  */
133 
134 static struct elt_list *
new_elt_list(next,elt)135 new_elt_list (next, elt)
136      struct elt_list *next;
137      cselib_val *elt;
138 {
139   struct elt_list *el = empty_elt_lists;
140 
141   if (el)
142     empty_elt_lists = el->next;
143   else
144     el = (struct elt_list *) ggc_alloc (sizeof (struct elt_list));
145   el->next = next;
146   el->elt = elt;
147   return el;
148 }
149 
150 /* Allocate a struct elt_loc_list and fill in its two elements with the
151    arguments.  */
152 
153 static struct elt_loc_list *
new_elt_loc_list(next,loc)154 new_elt_loc_list (next, loc)
155      struct elt_loc_list *next;
156      rtx loc;
157 {
158   struct elt_loc_list *el = empty_elt_loc_lists;
159 
160   if (el)
161     empty_elt_loc_lists = el->next;
162   else
163     el = (struct elt_loc_list *) ggc_alloc (sizeof (struct elt_loc_list));
164   el->next = next;
165   el->loc = loc;
166   el->setting_insn = cselib_current_insn;
167   el->in_libcall = cselib_current_insn_in_libcall;
168   return el;
169 }
170 
171 /* The elt_list at *PL is no longer needed.  Unchain it and free its
172    storage.  */
173 
174 static void
unchain_one_elt_list(pl)175 unchain_one_elt_list (pl)
176      struct elt_list **pl;
177 {
178   struct elt_list *l = *pl;
179 
180   *pl = l->next;
181   l->next = empty_elt_lists;
182   empty_elt_lists = l;
183 }
184 
185 /* Likewise for elt_loc_lists.  */
186 
187 static void
unchain_one_elt_loc_list(pl)188 unchain_one_elt_loc_list (pl)
189      struct elt_loc_list **pl;
190 {
191   struct elt_loc_list *l = *pl;
192 
193   *pl = l->next;
194   l->next = empty_elt_loc_lists;
195   empty_elt_loc_lists = l;
196 }
197 
198 /* Likewise for cselib_vals.  This also frees the addr_list associated with
199    V.  */
200 
201 static void
unchain_one_value(v)202 unchain_one_value (v)
203      cselib_val *v;
204 {
205   while (v->addr_list)
206     unchain_one_elt_list (&v->addr_list);
207 
208   v->u.next_free = empty_vals;
209   empty_vals = v;
210 }
211 
212 /* Remove all entries from the hash table.  Also used during
213    initialization.  If CLEAR_ALL isn't set, then only clear the entries
214    which are known to have been used.  */
215 
216 static void
clear_table(clear_all)217 clear_table (clear_all)
218      int clear_all;
219 {
220   unsigned int i;
221 
222   if (clear_all)
223     for (i = 0; i < cselib_nregs; i++)
224       REG_VALUES (i) = 0;
225   else
226     for (i = 0; i < VARRAY_ACTIVE_SIZE (used_regs); i++)
227       REG_VALUES (VARRAY_UINT (used_regs, i)) = 0;
228 
229   max_value_regs = 0;
230 
231   VARRAY_POP_ALL (used_regs);
232 
233   htab_empty (hash_table);
234 
235   n_useless_values = 0;
236 
237   next_unknown_value = 0;
238 }
239 
240 /* The equality test for our hash table.  The first argument ENTRY is a table
241    element (i.e. a cselib_val), while the second arg X is an rtx.  We know
242    that all callers of htab_find_slot_with_hash will wrap CONST_INTs into a
243    CONST of an appropriate mode.  */
244 
245 static int
entry_and_rtx_equal_p(entry,x_arg)246 entry_and_rtx_equal_p (entry, x_arg)
247      const void *entry, *x_arg;
248 {
249   struct elt_loc_list *l;
250   const cselib_val *v = (const cselib_val *) entry;
251   rtx x = (rtx) x_arg;
252   enum machine_mode mode = GET_MODE (x);
253 
254   if (GET_CODE (x) == CONST_INT
255       || (mode == VOIDmode && GET_CODE (x) == CONST_DOUBLE))
256     abort ();
257   if (mode != GET_MODE (v->u.val_rtx))
258     return 0;
259 
260   /* Unwrap X if necessary.  */
261   if (GET_CODE (x) == CONST
262       && (GET_CODE (XEXP (x, 0)) == CONST_INT
263 	  || GET_CODE (XEXP (x, 0)) == CONST_DOUBLE))
264     x = XEXP (x, 0);
265 
266   /* We don't guarantee that distinct rtx's have different hash values,
267      so we need to do a comparison.  */
268   for (l = v->locs; l; l = l->next)
269     if (rtx_equal_for_cselib_p (l->loc, x))
270       return 1;
271 
272   return 0;
273 }
274 
275 /* The hash function for our hash table.  The value is always computed with
276    hash_rtx when adding an element; this function just extracts the hash
277    value from a cselib_val structure.  */
278 
279 static hashval_t
get_value_hash(entry)280 get_value_hash (entry)
281      const void *entry;
282 {
283   const cselib_val *v = (const cselib_val *) entry;
284   return v->value;
285 }
286 
287 /* Return true if X contains a VALUE rtx.  If ONLY_USELESS is set, we
288    only return true for values which point to a cselib_val whose value
289    element has been set to zero, which implies the cselib_val will be
290    removed.  */
291 
292 int
references_value_p(x,only_useless)293 references_value_p (x, only_useless)
294      rtx x;
295      int only_useless;
296 {
297   enum rtx_code code = GET_CODE (x);
298   const char *fmt = GET_RTX_FORMAT (code);
299   int i, j;
300 
301   if (GET_CODE (x) == VALUE
302       && (! only_useless || CSELIB_VAL_PTR (x)->locs == 0))
303     return 1;
304 
305   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
306     {
307       if (fmt[i] == 'e' && references_value_p (XEXP (x, i), only_useless))
308 	return 1;
309       else if (fmt[i] == 'E')
310 	for (j = 0; j < XVECLEN (x, i); j++)
311 	  if (references_value_p (XVECEXP (x, i, j), only_useless))
312 	    return 1;
313     }
314 
315   return 0;
316 }
317 
318 /* For all locations found in X, delete locations that reference useless
319    values (i.e. values without any location).  Called through
320    htab_traverse.  */
321 
322 static int
discard_useless_locs(x,info)323 discard_useless_locs (x, info)
324      void **x;
325      void *info ATTRIBUTE_UNUSED;
326 {
327   cselib_val *v = (cselib_val *)*x;
328   struct elt_loc_list **p = &v->locs;
329   int had_locs = v->locs != 0;
330 
331   while (*p)
332     {
333       if (references_value_p ((*p)->loc, 1))
334 	unchain_one_elt_loc_list (p);
335       else
336 	p = &(*p)->next;
337     }
338 
339   if (had_locs && v->locs == 0)
340     {
341       n_useless_values++;
342       values_became_useless = 1;
343     }
344   return 1;
345 }
346 
347 /* If X is a value with no locations, remove it from the hashtable.  */
348 
349 static int
discard_useless_values(x,info)350 discard_useless_values (x, info)
351      void **x;
352      void *info ATTRIBUTE_UNUSED;
353 {
354   cselib_val *v = (cselib_val *)*x;
355 
356   if (v->locs == 0)
357     {
358       htab_clear_slot (hash_table, x);
359       unchain_one_value (v);
360       n_useless_values--;
361     }
362 
363   return 1;
364 }
365 
366 /* Clean out useless values (i.e. those which no longer have locations
367    associated with them) from the hash table.  */
368 
369 static void
remove_useless_values()370 remove_useless_values ()
371 {
372   /* First pass: eliminate locations that reference the value.  That in
373      turn can make more values useless.  */
374   do
375     {
376       values_became_useless = 0;
377       htab_traverse (hash_table, discard_useless_locs, 0);
378     }
379   while (values_became_useless);
380 
381   /* Second pass: actually remove the values.  */
382   htab_traverse (hash_table, discard_useless_values, 0);
383 
384   if (n_useless_values != 0)
385     abort ();
386 }
387 
388 /* Return nonzero if we can prove that X and Y contain the same value, taking
389    our gathered information into account.  */
390 
391 int
rtx_equal_for_cselib_p(x,y)392 rtx_equal_for_cselib_p (x, y)
393      rtx x, y;
394 {
395   enum rtx_code code;
396   const char *fmt;
397   int i;
398 
399   if (GET_CODE (x) == REG || GET_CODE (x) == MEM)
400     {
401       cselib_val *e = cselib_lookup (x, GET_MODE (x), 0);
402 
403       if (e)
404 	x = e->u.val_rtx;
405     }
406 
407   if (GET_CODE (y) == REG || GET_CODE (y) == MEM)
408     {
409       cselib_val *e = cselib_lookup (y, GET_MODE (y), 0);
410 
411       if (e)
412 	y = e->u.val_rtx;
413     }
414 
415   if (x == y)
416     return 1;
417 
418   if (GET_CODE (x) == VALUE && GET_CODE (y) == VALUE)
419     return CSELIB_VAL_PTR (x) == CSELIB_VAL_PTR (y);
420 
421   if (GET_CODE (x) == VALUE)
422     {
423       cselib_val *e = CSELIB_VAL_PTR (x);
424       struct elt_loc_list *l;
425 
426       for (l = e->locs; l; l = l->next)
427 	{
428 	  rtx t = l->loc;
429 
430 	  /* Avoid infinite recursion.  */
431 	  if (GET_CODE (t) == REG || GET_CODE (t) == MEM)
432 	    continue;
433 	  else if (rtx_equal_for_cselib_p (t, y))
434 	    return 1;
435 	}
436 
437       return 0;
438     }
439 
440   if (GET_CODE (y) == VALUE)
441     {
442       cselib_val *e = CSELIB_VAL_PTR (y);
443       struct elt_loc_list *l;
444 
445       for (l = e->locs; l; l = l->next)
446 	{
447 	  rtx t = l->loc;
448 
449 	  if (GET_CODE (t) == REG || GET_CODE (t) == MEM)
450 	    continue;
451 	  else if (rtx_equal_for_cselib_p (x, t))
452 	    return 1;
453 	}
454 
455       return 0;
456     }
457 
458   if (GET_CODE (x) != GET_CODE (y) || GET_MODE (x) != GET_MODE (y))
459     return 0;
460 
461   /* This won't be handled correctly by the code below.  */
462   if (GET_CODE (x) == LABEL_REF)
463     return XEXP (x, 0) == XEXP (y, 0);
464 
465   code = GET_CODE (x);
466   fmt = GET_RTX_FORMAT (code);
467 
468   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
469     {
470       int j;
471 
472       switch (fmt[i])
473 	{
474 	case 'w':
475 	  if (XWINT (x, i) != XWINT (y, i))
476 	    return 0;
477 	  break;
478 
479 	case 'n':
480 	case 'i':
481 	  if (XINT (x, i) != XINT (y, i))
482 	    return 0;
483 	  break;
484 
485 	case 'V':
486 	case 'E':
487 	  /* Two vectors must have the same length.  */
488 	  if (XVECLEN (x, i) != XVECLEN (y, i))
489 	    return 0;
490 
491 	  /* And the corresponding elements must match.  */
492 	  for (j = 0; j < XVECLEN (x, i); j++)
493 	    if (! rtx_equal_for_cselib_p (XVECEXP (x, i, j),
494 					  XVECEXP (y, i, j)))
495 	      return 0;
496 	  break;
497 
498 	case 'e':
499 	  if (! rtx_equal_for_cselib_p (XEXP (x, i), XEXP (y, i)))
500 	    return 0;
501 	  break;
502 
503 	case 'S':
504 	case 's':
505 	  if (strcmp (XSTR (x, i), XSTR (y, i)))
506 	    return 0;
507 	  break;
508 
509 	case 'u':
510 	  /* These are just backpointers, so they don't matter.  */
511 	  break;
512 
513 	case '0':
514 	case 't':
515 	  break;
516 
517 	  /* It is believed that rtx's at this level will never
518 	     contain anything but integers and other rtx's,
519 	     except for within LABEL_REFs and SYMBOL_REFs.  */
520 	default:
521 	  abort ();
522 	}
523     }
524   return 1;
525 }
526 
527 /* We need to pass down the mode of constants through the hash table
528    functions.  For that purpose, wrap them in a CONST of the appropriate
529    mode.  */
530 static rtx
wrap_constant(mode,x)531 wrap_constant (mode, x)
532      enum machine_mode mode;
533      rtx x;
534 {
535   if (GET_CODE (x) != CONST_INT
536       && (GET_CODE (x) != CONST_DOUBLE || GET_MODE (x) != VOIDmode))
537     return x;
538   if (mode == VOIDmode)
539     abort ();
540   return gen_rtx_CONST (mode, x);
541 }
542 
543 /* Hash an rtx.  Return 0 if we couldn't hash the rtx.
544    For registers and memory locations, we look up their cselib_val structure
545    and return its VALUE element.
546    Possible reasons for return 0 are: the object is volatile, or we couldn't
547    find a register or memory location in the table and CREATE is zero.  If
548    CREATE is nonzero, table elts are created for regs and mem.
549    MODE is used in hashing for CONST_INTs only;
550    otherwise the mode of X is used.  */
551 
552 static unsigned int
hash_rtx(x,mode,create)553 hash_rtx (x, mode, create)
554      rtx x;
555      enum machine_mode mode;
556      int create;
557 {
558   cselib_val *e;
559   int i, j;
560   enum rtx_code code;
561   const char *fmt;
562   unsigned int hash = 0;
563 
564   code = GET_CODE (x);
565   hash += (unsigned) code + (unsigned) GET_MODE (x);
566 
567   switch (code)
568     {
569     case MEM:
570     case REG:
571       e = cselib_lookup (x, GET_MODE (x), create);
572       if (! e)
573 	return 0;
574 
575       return e->value;
576 
577     case CONST_INT:
578       hash += ((unsigned) CONST_INT << 7) + (unsigned) mode + INTVAL (x);
579       return hash ? hash : (unsigned int) CONST_INT;
580 
581     case CONST_DOUBLE:
582       /* This is like the general case, except that it only counts
583 	 the integers representing the constant.  */
584       hash += (unsigned) code + (unsigned) GET_MODE (x);
585       if (GET_MODE (x) != VOIDmode)
586 	hash += real_hash (CONST_DOUBLE_REAL_VALUE (x));
587       else
588 	hash += ((unsigned) CONST_DOUBLE_LOW (x)
589 		 + (unsigned) CONST_DOUBLE_HIGH (x));
590       return hash ? hash : (unsigned int) CONST_DOUBLE;
591 
592     case CONST_VECTOR:
593       {
594 	int units;
595 	rtx elt;
596 
597 	units = CONST_VECTOR_NUNITS (x);
598 
599 	for (i = 0; i < units; ++i)
600 	  {
601 	    elt = CONST_VECTOR_ELT (x, i);
602 	    hash += hash_rtx (elt, GET_MODE (elt), 0);
603 	  }
604 
605 	return hash;
606       }
607 
608       /* Assume there is only one rtx object for any given label.  */
609     case LABEL_REF:
610       hash
611 	+= ((unsigned) LABEL_REF << 7) + (unsigned long) XEXP (x, 0);
612       return hash ? hash : (unsigned int) LABEL_REF;
613 
614     case SYMBOL_REF:
615       hash
616 	+= ((unsigned) SYMBOL_REF << 7) + (unsigned long) XSTR (x, 0);
617       return hash ? hash : (unsigned int) SYMBOL_REF;
618 
619     case PRE_DEC:
620     case PRE_INC:
621     case POST_DEC:
622     case POST_INC:
623     case POST_MODIFY:
624     case PRE_MODIFY:
625     case PC:
626     case CC0:
627     case CALL:
628     case UNSPEC_VOLATILE:
629       return 0;
630 
631     case ASM_OPERANDS:
632       if (MEM_VOLATILE_P (x))
633 	return 0;
634 
635       break;
636 
637     default:
638       break;
639     }
640 
641   i = GET_RTX_LENGTH (code) - 1;
642   fmt = GET_RTX_FORMAT (code);
643   for (; i >= 0; i--)
644     {
645       if (fmt[i] == 'e')
646 	{
647 	  rtx tem = XEXP (x, i);
648 	  unsigned int tem_hash = hash_rtx (tem, 0, create);
649 
650 	  if (tem_hash == 0)
651 	    return 0;
652 
653 	  hash += tem_hash;
654 	}
655       else if (fmt[i] == 'E')
656 	for (j = 0; j < XVECLEN (x, i); j++)
657 	  {
658 	    unsigned int tem_hash = hash_rtx (XVECEXP (x, i, j), 0, create);
659 
660 	    if (tem_hash == 0)
661 	      return 0;
662 
663 	    hash += tem_hash;
664 	  }
665       else if (fmt[i] == 's')
666 	{
667 	  const unsigned char *p = (const unsigned char *) XSTR (x, i);
668 
669 	  if (p)
670 	    while (*p)
671 	      hash += *p++;
672 	}
673       else if (fmt[i] == 'i')
674 	hash += XINT (x, i);
675       else if (fmt[i] == '0' || fmt[i] == 't')
676 	/* unused */;
677       else
678 	abort ();
679     }
680 
681   return hash ? hash : 1 + (unsigned int) GET_CODE (x);
682 }
683 
684 /* Create a new value structure for VALUE and initialize it.  The mode of the
685    value is MODE.  */
686 
687 static cselib_val *
new_cselib_val(value,mode)688 new_cselib_val (value, mode)
689      unsigned int value;
690      enum machine_mode mode;
691 {
692   cselib_val *e = empty_vals;
693 
694   if (e)
695     empty_vals = e->u.next_free;
696   else
697     e = (cselib_val *) ggc_alloc (sizeof (cselib_val));
698 
699   if (value == 0)
700     abort ();
701 
702   e->value = value;
703   e->u.val_rtx = gen_rtx_VALUE (mode);
704   CSELIB_VAL_PTR (e->u.val_rtx) = e;
705   e->addr_list = 0;
706   e->locs = 0;
707   return e;
708 }
709 
710 /* ADDR_ELT is a value that is used as address.  MEM_ELT is the value that
711    contains the data at this address.  X is a MEM that represents the
712    value.  Update the two value structures to represent this situation.  */
713 
714 static void
add_mem_for_addr(addr_elt,mem_elt,x)715 add_mem_for_addr (addr_elt, mem_elt, x)
716      cselib_val *addr_elt, *mem_elt;
717      rtx x;
718 {
719   struct elt_loc_list *l;
720 
721   /* Avoid duplicates.  */
722   for (l = mem_elt->locs; l; l = l->next)
723     if (GET_CODE (l->loc) == MEM
724 	&& CSELIB_VAL_PTR (XEXP (l->loc, 0)) == addr_elt)
725       return;
726 
727   addr_elt->addr_list = new_elt_list (addr_elt->addr_list, mem_elt);
728   mem_elt->locs
729     = new_elt_loc_list (mem_elt->locs,
730 			replace_equiv_address_nv (x, addr_elt->u.val_rtx));
731 }
732 
733 /* Subroutine of cselib_lookup.  Return a value for X, which is a MEM rtx.
734    If CREATE, make a new one if we haven't seen it before.  */
735 
736 static cselib_val *
cselib_lookup_mem(x,create)737 cselib_lookup_mem (x, create)
738      rtx x;
739      int create;
740 {
741   enum machine_mode mode = GET_MODE (x);
742   void **slot;
743   cselib_val *addr;
744   cselib_val *mem_elt;
745   struct elt_list *l;
746 
747   if (MEM_VOLATILE_P (x) || mode == BLKmode
748       || (FLOAT_MODE_P (mode) && flag_float_store))
749     return 0;
750 
751   /* Look up the value for the address.  */
752   addr = cselib_lookup (XEXP (x, 0), mode, create);
753   if (! addr)
754     return 0;
755 
756   /* Find a value that describes a value of our mode at that address.  */
757   for (l = addr->addr_list; l; l = l->next)
758     if (GET_MODE (l->elt->u.val_rtx) == mode)
759       return l->elt;
760 
761   if (! create)
762     return 0;
763 
764   mem_elt = new_cselib_val (++next_unknown_value, mode);
765   add_mem_for_addr (addr, mem_elt, x);
766   slot = htab_find_slot_with_hash (hash_table, wrap_constant (mode, x),
767 				   mem_elt->value, INSERT);
768   *slot = mem_elt;
769   return mem_elt;
770 }
771 
772 /* Walk rtx X and replace all occurrences of REG and MEM subexpressions
773    with VALUE expressions.  This way, it becomes independent of changes
774    to registers and memory.
775    X isn't actually modified; if modifications are needed, new rtl is
776    allocated.  However, the return value can share rtl with X.  */
777 
778 rtx
cselib_subst_to_values(x)779 cselib_subst_to_values (x)
780      rtx x;
781 {
782   enum rtx_code code = GET_CODE (x);
783   const char *fmt = GET_RTX_FORMAT (code);
784   cselib_val *e;
785   struct elt_list *l;
786   rtx copy = x;
787   int i;
788 
789   switch (code)
790     {
791     case REG:
792       for (l = REG_VALUES (REGNO (x)); l; l = l->next)
793 	if (GET_MODE (l->elt->u.val_rtx) == GET_MODE (x))
794 	  return l->elt->u.val_rtx;
795 
796       abort ();
797 
798     case MEM:
799       e = cselib_lookup_mem (x, 0);
800       if (! e)
801 	{
802 	  /* This happens for autoincrements.  Assign a value that doesn't
803 	     match any other.  */
804 	  e = new_cselib_val (++next_unknown_value, GET_MODE (x));
805 	}
806       return e->u.val_rtx;
807 
808     case CONST_DOUBLE:
809     case CONST_VECTOR:
810     case CONST_INT:
811       return x;
812 
813     case POST_INC:
814     case PRE_INC:
815     case POST_DEC:
816     case PRE_DEC:
817     case POST_MODIFY:
818     case PRE_MODIFY:
819       e = new_cselib_val (++next_unknown_value, GET_MODE (x));
820       return e->u.val_rtx;
821 
822     default:
823       break;
824     }
825 
826   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
827     {
828       if (fmt[i] == 'e')
829 	{
830 	  rtx t = cselib_subst_to_values (XEXP (x, i));
831 
832 	  if (t != XEXP (x, i) && x == copy)
833 	    copy = shallow_copy_rtx (x);
834 
835 	  XEXP (copy, i) = t;
836 	}
837       else if (fmt[i] == 'E')
838 	{
839 	  int j, k;
840 
841 	  for (j = 0; j < XVECLEN (x, i); j++)
842 	    {
843 	      rtx t = cselib_subst_to_values (XVECEXP (x, i, j));
844 
845 	      if (t != XVECEXP (x, i, j) && XVEC (x, i) == XVEC (copy, i))
846 		{
847 		  if (x == copy)
848 		    copy = shallow_copy_rtx (x);
849 
850 		  XVEC (copy, i) = rtvec_alloc (XVECLEN (x, i));
851 		  for (k = 0; k < j; k++)
852 		    XVECEXP (copy, i, k) = XVECEXP (x, i, k);
853 		}
854 
855 	      XVECEXP (copy, i, j) = t;
856 	    }
857 	}
858     }
859 
860   return copy;
861 }
862 
863 /* Look up the rtl expression X in our tables and return the value it has.
864    If CREATE is zero, we return NULL if we don't know the value.  Otherwise,
865    we create a new one if possible, using mode MODE if X doesn't have a mode
866    (i.e. because it's a constant).  */
867 
868 cselib_val *
cselib_lookup(x,mode,create)869 cselib_lookup (x, mode, create)
870      rtx x;
871      enum machine_mode mode;
872      int create;
873 {
874   void **slot;
875   cselib_val *e;
876   unsigned int hashval;
877 
878   if (GET_MODE (x) != VOIDmode)
879     mode = GET_MODE (x);
880 
881   if (GET_CODE (x) == VALUE)
882     return CSELIB_VAL_PTR (x);
883 
884   if (GET_CODE (x) == REG)
885     {
886       struct elt_list *l;
887       unsigned int i = REGNO (x);
888 
889       for (l = REG_VALUES (i); l; l = l->next)
890 	if (mode == GET_MODE (l->elt->u.val_rtx))
891 	  return l->elt;
892 
893       if (! create)
894 	return 0;
895 
896       if (i < FIRST_PSEUDO_REGISTER)
897 	{
898 	  unsigned int n = HARD_REGNO_NREGS (i, mode);
899 
900 	  if (n > max_value_regs)
901 	    max_value_regs = n;
902 	}
903 
904       e = new_cselib_val (++next_unknown_value, GET_MODE (x));
905       e->locs = new_elt_loc_list (e->locs, x);
906       if (REG_VALUES (i) == 0)
907         VARRAY_PUSH_UINT (used_regs, i);
908       REG_VALUES (i) = new_elt_list (REG_VALUES (i), e);
909       slot = htab_find_slot_with_hash (hash_table, x, e->value, INSERT);
910       *slot = e;
911       return e;
912     }
913 
914   if (GET_CODE (x) == MEM)
915     return cselib_lookup_mem (x, create);
916 
917   hashval = hash_rtx (x, mode, create);
918   /* Can't even create if hashing is not possible.  */
919   if (! hashval)
920     return 0;
921 
922   slot = htab_find_slot_with_hash (hash_table, wrap_constant (mode, x),
923 				   hashval, create ? INSERT : NO_INSERT);
924   if (slot == 0)
925     return 0;
926 
927   e = (cselib_val *) *slot;
928   if (e)
929     return e;
930 
931   e = new_cselib_val (hashval, mode);
932 
933   /* We have to fill the slot before calling cselib_subst_to_values:
934      the hash table is inconsistent until we do so, and
935      cselib_subst_to_values will need to do lookups.  */
936   *slot = (void *) e;
937   e->locs = new_elt_loc_list (e->locs, cselib_subst_to_values (x));
938   return e;
939 }
940 
941 /* Invalidate any entries in reg_values that overlap REGNO.  This is called
942    if REGNO is changing.  MODE is the mode of the assignment to REGNO, which
943    is used to determine how many hard registers are being changed.  If MODE
944    is VOIDmode, then only REGNO is being changed; this is used when
945    invalidating call clobbered registers across a call.  */
946 
947 static void
cselib_invalidate_regno(regno,mode)948 cselib_invalidate_regno (regno, mode)
949      unsigned int regno;
950      enum machine_mode mode;
951 {
952   unsigned int endregno;
953   unsigned int i;
954 
955   /* If we see pseudos after reload, something is _wrong_.  */
956   if (reload_completed && regno >= FIRST_PSEUDO_REGISTER
957       && reg_renumber[regno] >= 0)
958     abort ();
959 
960   /* Determine the range of registers that must be invalidated.  For
961      pseudos, only REGNO is affected.  For hard regs, we must take MODE
962      into account, and we must also invalidate lower register numbers
963      if they contain values that overlap REGNO.  */
964   if (regno < FIRST_PSEUDO_REGISTER)
965     {
966       if (mode == VOIDmode)
967 	abort ();
968 
969       if (regno < max_value_regs)
970 	i = 0;
971       else
972 	i = regno - max_value_regs;
973 
974       endregno = regno + HARD_REGNO_NREGS (regno, mode);
975     }
976   else
977     {
978       i = regno;
979       endregno = regno + 1;
980     }
981 
982   for (; i < endregno; i++)
983     {
984       struct elt_list **l = &REG_VALUES (i);
985 
986       /* Go through all known values for this reg; if it overlaps the range
987 	 we're invalidating, remove the value.  */
988       while (*l)
989 	{
990 	  cselib_val *v = (*l)->elt;
991 	  struct elt_loc_list **p;
992 	  unsigned int this_last = i;
993 
994 	  if (i < FIRST_PSEUDO_REGISTER)
995 	    this_last += HARD_REGNO_NREGS (i, GET_MODE (v->u.val_rtx)) - 1;
996 
997 	  if (this_last < regno)
998 	    {
999 	      l = &(*l)->next;
1000 	      continue;
1001 	    }
1002 
1003 	  /* We have an overlap.  */
1004 	  unchain_one_elt_list (l);
1005 
1006 	  /* Now, we clear the mapping from value to reg.  It must exist, so
1007 	     this code will crash intentionally if it doesn't.  */
1008 	  for (p = &v->locs; ; p = &(*p)->next)
1009 	    {
1010 	      rtx x = (*p)->loc;
1011 
1012 	      if (GET_CODE (x) == REG && REGNO (x) == i)
1013 		{
1014 		  unchain_one_elt_loc_list (p);
1015 		  break;
1016 		}
1017 	    }
1018 	  if (v->locs == 0)
1019 	    n_useless_values++;
1020 	}
1021     }
1022 }
1023 
1024 /* The memory at address MEM_BASE is being changed.
1025    Return whether this change will invalidate VAL.  */
1026 
1027 static int
cselib_mem_conflict_p(mem_base,val)1028 cselib_mem_conflict_p (mem_base, val)
1029      rtx mem_base;
1030      rtx val;
1031 {
1032   enum rtx_code code;
1033   const char *fmt;
1034   int i, j;
1035 
1036   code = GET_CODE (val);
1037   switch (code)
1038     {
1039       /* Get rid of a few simple cases quickly.  */
1040     case REG:
1041     case PC:
1042     case CC0:
1043     case SCRATCH:
1044     case CONST:
1045     case CONST_INT:
1046     case CONST_DOUBLE:
1047     case CONST_VECTOR:
1048     case SYMBOL_REF:
1049     case LABEL_REF:
1050       return 0;
1051 
1052     case MEM:
1053       if (GET_MODE (mem_base) == BLKmode
1054 	  || GET_MODE (val) == BLKmode
1055 	  || anti_dependence (val, mem_base))
1056 	return 1;
1057 
1058       /* The address may contain nested MEMs.  */
1059       break;
1060 
1061     default:
1062       break;
1063     }
1064 
1065   fmt = GET_RTX_FORMAT (code);
1066   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1067     {
1068       if (fmt[i] == 'e')
1069 	{
1070 	  if (cselib_mem_conflict_p (mem_base, XEXP (val, i)))
1071 	    return 1;
1072 	}
1073       else if (fmt[i] == 'E')
1074 	for (j = 0; j < XVECLEN (val, i); j++)
1075 	  if (cselib_mem_conflict_p (mem_base, XVECEXP (val, i, j)))
1076 	    return 1;
1077     }
1078 
1079   return 0;
1080 }
1081 
1082 /* For the value found in SLOT, walk its locations to determine if any overlap
1083    INFO (which is a MEM rtx).  */
1084 
1085 static int
cselib_invalidate_mem_1(slot,info)1086 cselib_invalidate_mem_1 (slot, info)
1087      void **slot;
1088      void *info;
1089 {
1090   cselib_val *v = (cselib_val *) *slot;
1091   rtx mem_rtx = (rtx) info;
1092   struct elt_loc_list **p = &v->locs;
1093   int had_locs = v->locs != 0;
1094 
1095   while (*p)
1096     {
1097       rtx x = (*p)->loc;
1098       cselib_val *addr;
1099       struct elt_list **mem_chain;
1100 
1101       /* MEMs may occur in locations only at the top level; below
1102 	 that every MEM or REG is substituted by its VALUE.  */
1103       if (GET_CODE (x) != MEM
1104 	  || ! cselib_mem_conflict_p (mem_rtx, x))
1105 	{
1106 	  p = &(*p)->next;
1107 	  continue;
1108 	}
1109 
1110       /* This one overlaps.  */
1111       /* We must have a mapping from this MEM's address to the
1112 	 value (E).  Remove that, too.  */
1113       addr = cselib_lookup (XEXP (x, 0), VOIDmode, 0);
1114       mem_chain = &addr->addr_list;
1115       for (;;)
1116 	{
1117 	  if ((*mem_chain)->elt == v)
1118 	    {
1119 	      unchain_one_elt_list (mem_chain);
1120 	      break;
1121 	    }
1122 
1123 	  mem_chain = &(*mem_chain)->next;
1124 	}
1125 
1126       unchain_one_elt_loc_list (p);
1127     }
1128 
1129   if (had_locs && v->locs == 0)
1130     n_useless_values++;
1131 
1132   return 1;
1133 }
1134 
1135 /* Invalidate any locations in the table which are changed because of a
1136    store to MEM_RTX.  If this is called because of a non-const call
1137    instruction, MEM_RTX is (mem:BLK const0_rtx).  */
1138 
1139 static void
cselib_invalidate_mem(mem_rtx)1140 cselib_invalidate_mem (mem_rtx)
1141      rtx mem_rtx;
1142 {
1143   htab_traverse (hash_table, cselib_invalidate_mem_1, mem_rtx);
1144 }
1145 
1146 /* Invalidate DEST, which is being assigned to or clobbered.  */
1147 
1148 void
cselib_invalidate_rtx(dest)1149 cselib_invalidate_rtx (dest)
1150      rtx dest;
1151 {
1152   while (GET_CODE (dest) == STRICT_LOW_PART || GET_CODE (dest) == SIGN_EXTRACT
1153 	 || GET_CODE (dest) == ZERO_EXTRACT || GET_CODE (dest) == SUBREG)
1154     dest = XEXP (dest, 0);
1155 
1156   if (GET_CODE (dest) == REG)
1157     cselib_invalidate_regno (REGNO (dest), GET_MODE (dest));
1158   else if (GET_CODE (dest) == MEM)
1159     cselib_invalidate_mem (dest);
1160 
1161   /* Some machines don't define AUTO_INC_DEC, but they still use push
1162      instructions.  We need to catch that case here in order to
1163      invalidate the stack pointer correctly.  Note that invalidating
1164      the stack pointer is different from invalidating DEST.  */
1165   if (push_operand (dest, GET_MODE (dest)))
1166     cselib_invalidate_rtx (stack_pointer_rtx);
1167 }
1168 
1169 /* A wrapper for cselib_invalidate_rtx to be called via note_stores.  */
1170 
1171 static void
cselib_invalidate_rtx_note_stores(dest,ignore,data)1172 cselib_invalidate_rtx_note_stores (dest, ignore, data)
1173      rtx dest;
1174      rtx ignore ATTRIBUTE_UNUSED;
1175      void *data ATTRIBUTE_UNUSED;
1176 {
1177   cselib_invalidate_rtx (dest);
1178 }
1179 
1180 /* Record the result of a SET instruction.  DEST is being set; the source
1181    contains the value described by SRC_ELT.  If DEST is a MEM, DEST_ADDR_ELT
1182    describes its address.  */
1183 
1184 static void
cselib_record_set(dest,src_elt,dest_addr_elt)1185 cselib_record_set (dest, src_elt, dest_addr_elt)
1186      rtx dest;
1187      cselib_val *src_elt, *dest_addr_elt;
1188 {
1189   int dreg = GET_CODE (dest) == REG ? (int) REGNO (dest) : -1;
1190 
1191   if (src_elt == 0 || side_effects_p (dest))
1192     return;
1193 
1194   if (dreg >= 0)
1195     {
1196       if (REG_VALUES (dreg) == 0)
1197         VARRAY_PUSH_UINT (used_regs, dreg);
1198 
1199       if (dreg < FIRST_PSEUDO_REGISTER)
1200 	{
1201 	  unsigned int n = HARD_REGNO_NREGS (dreg, GET_MODE (dest));
1202 
1203 	  if (n > max_value_regs)
1204 	    max_value_regs = n;
1205 	}
1206 
1207       REG_VALUES (dreg) = new_elt_list (REG_VALUES (dreg), src_elt);
1208       if (src_elt->locs == 0)
1209 	n_useless_values--;
1210       src_elt->locs = new_elt_loc_list (src_elt->locs, dest);
1211     }
1212   else if (GET_CODE (dest) == MEM && dest_addr_elt != 0)
1213     {
1214       if (src_elt->locs == 0)
1215 	n_useless_values--;
1216       add_mem_for_addr (dest_addr_elt, src_elt, dest);
1217     }
1218 }
1219 
1220 /* Describe a single set that is part of an insn.  */
1221 struct set
1222 {
1223   rtx src;
1224   rtx dest;
1225   cselib_val *src_elt;
1226   cselib_val *dest_addr_elt;
1227 };
1228 
1229 /* There is no good way to determine how many elements there can be
1230    in a PARALLEL.  Since it's fairly cheap, use a really large number.  */
1231 #define MAX_SETS (FIRST_PSEUDO_REGISTER * 2)
1232 
1233 /* Record the effects of any sets in INSN.  */
1234 static void
cselib_record_sets(insn)1235 cselib_record_sets (insn)
1236      rtx insn;
1237 {
1238   int n_sets = 0;
1239   int i;
1240   struct set sets[MAX_SETS];
1241   rtx body = PATTERN (insn);
1242   rtx cond = 0;
1243 
1244   body = PATTERN (insn);
1245   if (GET_CODE (body) == COND_EXEC)
1246     {
1247       cond = COND_EXEC_TEST (body);
1248       body = COND_EXEC_CODE (body);
1249     }
1250 
1251   /* Find all sets.  */
1252   if (GET_CODE (body) == SET)
1253     {
1254       sets[0].src = SET_SRC (body);
1255       sets[0].dest = SET_DEST (body);
1256       n_sets = 1;
1257     }
1258   else if (GET_CODE (body) == PARALLEL)
1259     {
1260       /* Look through the PARALLEL and record the values being
1261 	 set, if possible.  Also handle any CLOBBERs.  */
1262       for (i = XVECLEN (body, 0) - 1; i >= 0; --i)
1263 	{
1264 	  rtx x = XVECEXP (body, 0, i);
1265 
1266 	  if (GET_CODE (x) == SET)
1267 	    {
1268 	      sets[n_sets].src = SET_SRC (x);
1269 	      sets[n_sets].dest = SET_DEST (x);
1270 	      n_sets++;
1271 	    }
1272 	}
1273     }
1274 
1275   /* Look up the values that are read.  Do this before invalidating the
1276      locations that are written.  */
1277   for (i = 0; i < n_sets; i++)
1278     {
1279       rtx dest = sets[i].dest;
1280 
1281       /* A STRICT_LOW_PART can be ignored; we'll record the equivalence for
1282          the low part after invalidating any knowledge about larger modes.  */
1283       if (GET_CODE (sets[i].dest) == STRICT_LOW_PART)
1284 	sets[i].dest = dest = XEXP (dest, 0);
1285 
1286       /* We don't know how to record anything but REG or MEM.  */
1287       if (GET_CODE (dest) == REG || GET_CODE (dest) == MEM)
1288         {
1289 	  rtx src = sets[i].src;
1290 	  if (cond)
1291 	    src = gen_rtx_IF_THEN_ELSE (GET_MODE (src), cond, src, dest);
1292 	  sets[i].src_elt = cselib_lookup (src, GET_MODE (dest), 1);
1293 	  if (GET_CODE (dest) == MEM)
1294 	    sets[i].dest_addr_elt = cselib_lookup (XEXP (dest, 0), Pmode, 1);
1295 	  else
1296 	    sets[i].dest_addr_elt = 0;
1297 	}
1298     }
1299 
1300   /* Invalidate all locations written by this insn.  Note that the elts we
1301      looked up in the previous loop aren't affected, just some of their
1302      locations may go away.  */
1303   note_stores (body, cselib_invalidate_rtx_note_stores, NULL);
1304 
1305   /* Now enter the equivalences in our tables.  */
1306   for (i = 0; i < n_sets; i++)
1307     {
1308       rtx dest = sets[i].dest;
1309       if (GET_CODE (dest) == REG || GET_CODE (dest) == MEM)
1310 	cselib_record_set (dest, sets[i].src_elt, sets[i].dest_addr_elt);
1311     }
1312 }
1313 
1314 /* Record the effects of INSN.  */
1315 
1316 void
cselib_process_insn(insn)1317 cselib_process_insn (insn)
1318      rtx insn;
1319 {
1320   int i;
1321   rtx x;
1322 
1323   if (find_reg_note (insn, REG_LIBCALL, NULL))
1324     cselib_current_insn_in_libcall = true;
1325   if (find_reg_note (insn, REG_RETVAL, NULL))
1326     cselib_current_insn_in_libcall = false;
1327   cselib_current_insn = insn;
1328 
1329   /* Forget everything at a CODE_LABEL, a volatile asm, or a setjmp.  */
1330   if (GET_CODE (insn) == CODE_LABEL
1331       || (GET_CODE (insn) == CALL_INSN
1332 	  && find_reg_note (insn, REG_SETJMP, NULL))
1333       || (GET_CODE (insn) == INSN
1334 	  && GET_CODE (PATTERN (insn)) == ASM_OPERANDS
1335 	  && MEM_VOLATILE_P (PATTERN (insn))))
1336     {
1337       clear_table (0);
1338       return;
1339     }
1340 
1341   if (! INSN_P (insn))
1342     {
1343       cselib_current_insn = 0;
1344       return;
1345     }
1346 
1347   /* If this is a call instruction, forget anything stored in a
1348      call clobbered register, or, if this is not a const call, in
1349      memory.  */
1350   if (GET_CODE (insn) == CALL_INSN)
1351     {
1352       for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1353 	if (call_used_regs[i])
1354 	  cselib_invalidate_regno (i, reg_raw_mode[i]);
1355 
1356       if (! CONST_OR_PURE_CALL_P (insn))
1357 	cselib_invalidate_mem (callmem);
1358     }
1359 
1360   cselib_record_sets (insn);
1361 
1362 #ifdef AUTO_INC_DEC
1363   /* Clobber any registers which appear in REG_INC notes.  We
1364      could keep track of the changes to their values, but it is
1365      unlikely to help.  */
1366   for (x = REG_NOTES (insn); x; x = XEXP (x, 1))
1367     if (REG_NOTE_KIND (x) == REG_INC)
1368       cselib_invalidate_rtx (XEXP (x, 0));
1369 #endif
1370 
1371   /* Look for any CLOBBERs in CALL_INSN_FUNCTION_USAGE, but only
1372      after we have processed the insn.  */
1373   if (GET_CODE (insn) == CALL_INSN)
1374     for (x = CALL_INSN_FUNCTION_USAGE (insn); x; x = XEXP (x, 1))
1375       if (GET_CODE (XEXP (x, 0)) == CLOBBER)
1376 	cselib_invalidate_rtx (XEXP (XEXP (x, 0), 0));
1377 
1378   cselib_current_insn = 0;
1379 
1380   if (n_useless_values > MAX_USELESS_VALUES)
1381     remove_useless_values ();
1382 }
1383 
1384 /* Make sure our varrays are big enough.  Not called from any cselib routines;
1385    it must be called by the user if it allocated new registers.  */
1386 
1387 void
cselib_update_varray_sizes()1388 cselib_update_varray_sizes ()
1389 {
1390   unsigned int nregs = max_reg_num ();
1391 
1392   if (nregs == cselib_nregs)
1393     return;
1394 
1395   cselib_nregs = nregs;
1396   VARRAY_GROW (reg_values, nregs);
1397   VARRAY_GROW (used_regs, nregs);
1398 }
1399 
1400 /* Initialize cselib for one pass.  The caller must also call
1401    init_alias_analysis.  */
1402 
1403 void
cselib_init()1404 cselib_init ()
1405 {
1406   /* This is only created once.  */
1407   if (! callmem)
1408     callmem = gen_rtx_MEM (BLKmode, const0_rtx);
1409 
1410   cselib_nregs = max_reg_num ();
1411   if (reg_values_old != NULL && VARRAY_SIZE (reg_values_old) >= cselib_nregs)
1412     {
1413       reg_values = reg_values_old;
1414       used_regs = used_regs_old;
1415       VARRAY_CLEAR (reg_values);
1416       VARRAY_CLEAR (used_regs);
1417     }
1418   else
1419     {
1420       VARRAY_ELT_LIST_INIT (reg_values, cselib_nregs, "reg_values");
1421       VARRAY_UINT_INIT (used_regs, cselib_nregs, "used_regs");
1422     }
1423   hash_table = htab_create_ggc (31, get_value_hash, entry_and_rtx_equal_p,
1424 				NULL);
1425   clear_table (1);
1426   cselib_current_insn_in_libcall = false;
1427 }
1428 
1429 /* Called when the current user is done with cselib.  */
1430 
1431 void
cselib_finish()1432 cselib_finish ()
1433 {
1434   reg_values_old = reg_values;
1435   reg_values = 0;
1436   used_regs_old = used_regs;
1437   used_regs = 0;
1438   hash_table = 0;
1439   n_useless_values = 0;
1440   next_unknown_value = 0;
1441 }
1442 
1443 #include "gt-cselib.h"
1444