xref: /dragonfly/contrib/gcc-8.0/gcc/cselib.c (revision 8bf5b238)
1 /* Common subexpression elimination library for GNU compiler.
2    Copyright (C) 1987-2018 Free Software Foundation, Inc.
3 
4 This file is part of GCC.
5 
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10 
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
14 for more details.
15 
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3.  If not see
18 <http://www.gnu.org/licenses/>.  */
19 
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "backend.h"
24 #include "target.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "df.h"
28 #include "memmodel.h"
29 #include "tm_p.h"
30 #include "regs.h"
31 #include "emit-rtl.h"
32 #include "dumpfile.h"
33 #include "cselib.h"
34 #include "params.h"
35 
36 /* A list of cselib_val structures.  */
37 struct elt_list
38 {
39   struct elt_list *next;
40   cselib_val *elt;
41 };
42 
43 static bool cselib_record_memory;
44 static bool cselib_preserve_constants;
45 static bool cselib_any_perm_equivs;
46 static inline void promote_debug_loc (struct elt_loc_list *l);
47 static struct elt_list *new_elt_list (struct elt_list *, cselib_val *);
48 static void new_elt_loc_list (cselib_val *, rtx);
49 static void unchain_one_value (cselib_val *);
50 static void unchain_one_elt_list (struct elt_list **);
51 static void unchain_one_elt_loc_list (struct elt_loc_list **);
52 static void remove_useless_values (void);
53 static unsigned int cselib_hash_rtx (rtx, int, machine_mode);
54 static cselib_val *new_cselib_val (unsigned int, machine_mode, rtx);
55 static void add_mem_for_addr (cselib_val *, cselib_val *, rtx);
56 static cselib_val *cselib_lookup_mem (rtx, int);
57 static void cselib_invalidate_regno (unsigned int, machine_mode);
58 static void cselib_invalidate_mem (rtx);
59 static void cselib_record_set (rtx, cselib_val *, cselib_val *);
60 static void cselib_record_sets (rtx_insn *);
61 
62 struct expand_value_data
63 {
64   bitmap regs_active;
65   cselib_expand_callback callback;
66   void *callback_arg;
67   bool dummy;
68 };
69 
70 static rtx cselib_expand_value_rtx_1 (rtx, struct expand_value_data *, int);
71 
72 /* There are three ways in which cselib can look up an rtx:
73    - for a REG, the reg_values table (which is indexed by regno) is used
74    - for a MEM, we recursively look up its address and then follow the
75      addr_list of that value
76    - for everything else, we compute a hash value and go through the hash
77      table.  Since different rtx's can still have the same hash value,
78      this involves walking the table entries for a given value and comparing
79      the locations of the entries with the rtx we are looking up.  */
80 
81 struct cselib_hasher : nofree_ptr_hash <cselib_val>
82 {
83   struct key {
84     /* The rtx value and its mode (needed separately for constant
85        integers).  */
86     machine_mode mode;
87     rtx x;
88     /* The mode of the contaning MEM, if any, otherwise VOIDmode.  */
89     machine_mode memmode;
90   };
91   typedef key *compare_type;
92   static inline hashval_t hash (const cselib_val *);
93   static inline bool equal (const cselib_val *, const key *);
94 };
95 
96 /* The hash function for our hash table.  The value is always computed with
97    cselib_hash_rtx when adding an element; this function just extracts the
98    hash value from a cselib_val structure.  */
99 
100 inline hashval_t
101 cselib_hasher::hash (const cselib_val *v)
102 {
103   return v->hash;
104 }
105 
106 /* The equality test for our hash table.  The first argument V is a table
107    element (i.e. a cselib_val), while the second arg X is an rtx.  We know
108    that all callers of htab_find_slot_with_hash will wrap CONST_INTs into a
109    CONST of an appropriate mode.  */
110 
111 inline bool
112 cselib_hasher::equal (const cselib_val *v, const key *x_arg)
113 {
114   struct elt_loc_list *l;
115   rtx x = x_arg->x;
116   machine_mode mode = x_arg->mode;
117   machine_mode memmode = x_arg->memmode;
118 
119   if (mode != GET_MODE (v->val_rtx))
120     return false;
121 
122   if (GET_CODE (x) == VALUE)
123     return x == v->val_rtx;
124 
125   /* We don't guarantee that distinct rtx's have different hash values,
126      so we need to do a comparison.  */
127   for (l = v->locs; l; l = l->next)
128     if (rtx_equal_for_cselib_1 (l->loc, x, memmode, 0))
129       {
130 	promote_debug_loc (l);
131 	return true;
132       }
133 
134   return false;
135 }
136 
137 /* A table that enables us to look up elts by their value.  */
138 static hash_table<cselib_hasher> *cselib_hash_table;
139 
140 /* A table to hold preserved values.  */
141 static hash_table<cselib_hasher> *cselib_preserved_hash_table;
142 
143 /* This is a global so we don't have to pass this through every function.
144    It is used in new_elt_loc_list to set SETTING_INSN.  */
145 static rtx_insn *cselib_current_insn;
146 
147 /* The unique id that the next create value will take.  */
148 static unsigned int next_uid;
149 
150 /* The number of registers we had when the varrays were last resized.  */
151 static unsigned int cselib_nregs;
152 
153 /* Count values without known locations, or with only locations that
154    wouldn't have been known except for debug insns.  Whenever this
155    grows too big, we remove these useless values from the table.
156 
157    Counting values with only debug values is a bit tricky.  We don't
158    want to increment n_useless_values when we create a value for a
159    debug insn, for this would get n_useless_values out of sync, but we
160    want increment it if all locs in the list that were ever referenced
161    in nondebug insns are removed from the list.
162 
163    In the general case, once we do that, we'd have to stop accepting
164    nondebug expressions in the loc list, to avoid having two values
165    equivalent that, without debug insns, would have been made into
166    separate values.  However, because debug insns never introduce
167    equivalences themselves (no assignments), the only means for
168    growing loc lists is through nondebug assignments.  If the locs
169    also happen to be referenced in debug insns, it will work just fine.
170 
171    A consequence of this is that there's at most one debug-only loc in
172    each loc list.  If we keep it in the first entry, testing whether
173    we have a debug-only loc list takes O(1).
174 
175    Furthermore, since any additional entry in a loc list containing a
176    debug loc would have to come from an assignment (nondebug) that
177    references both the initial debug loc and the newly-equivalent loc,
178    the initial debug loc would be promoted to a nondebug loc, and the
179    loc list would not contain debug locs any more.
180 
181    So the only case we have to be careful with in order to keep
182    n_useless_values in sync between debug and nondebug compilations is
183    to avoid incrementing n_useless_values when removing the single loc
184    from a value that turns out to not appear outside debug values.  We
185    increment n_useless_debug_values instead, and leave such values
186    alone until, for other reasons, we garbage-collect useless
187    values.  */
188 static int n_useless_values;
189 static int n_useless_debug_values;
190 
191 /* Count values whose locs have been taken exclusively from debug
192    insns for the entire life of the value.  */
193 static int n_debug_values;
194 
195 /* Number of useless values before we remove them from the hash table.  */
196 #define MAX_USELESS_VALUES 32
197 
198 /* This table maps from register number to values.  It does not
199    contain pointers to cselib_val structures, but rather elt_lists.
200    The purpose is to be able to refer to the same register in
201    different modes.  The first element of the list defines the mode in
202    which the register was set; if the mode is unknown or the value is
203    no longer valid in that mode, ELT will be NULL for the first
204    element.  */
205 static struct elt_list **reg_values;
206 static unsigned int reg_values_size;
207 #define REG_VALUES(i) reg_values[i]
208 
209 /* The largest number of hard regs used by any entry added to the
210    REG_VALUES table.  Cleared on each cselib_clear_table() invocation.  */
211 static unsigned int max_value_regs;
212 
213 /* Here the set of indices I with REG_VALUES(I) != 0 is saved.  This is used
214    in cselib_clear_table() for fast emptying.  */
215 static unsigned int *used_regs;
216 static unsigned int n_used_regs;
217 
218 /* We pass this to cselib_invalidate_mem to invalidate all of
219    memory for a non-const call instruction.  */
220 static GTY(()) rtx callmem;
221 
222 /* Set by discard_useless_locs if it deleted the last location of any
223    value.  */
224 static int values_became_useless;
225 
226 /* Used as stop element of the containing_mem list so we can check
227    presence in the list by checking the next pointer.  */
228 static cselib_val dummy_val;
229 
230 /* If non-NULL, value of the eliminated arg_pointer_rtx or frame_pointer_rtx
231    that is constant through the whole function and should never be
232    eliminated.  */
233 static cselib_val *cfa_base_preserved_val;
234 static unsigned int cfa_base_preserved_regno = INVALID_REGNUM;
235 
236 /* Used to list all values that contain memory reference.
237    May or may not contain the useless values - the list is compacted
238    each time memory is invalidated.  */
239 static cselib_val *first_containing_mem = &dummy_val;
240 
241 static object_allocator<elt_list> elt_list_pool ("elt_list");
242 static object_allocator<elt_loc_list> elt_loc_list_pool ("elt_loc_list");
243 static object_allocator<cselib_val> cselib_val_pool ("cselib_val_list");
244 
245 static pool_allocator value_pool ("value", RTX_CODE_SIZE (VALUE));
246 
247 /* If nonnull, cselib will call this function before freeing useless
248    VALUEs.  A VALUE is deemed useless if its "locs" field is null.  */
249 void (*cselib_discard_hook) (cselib_val *);
250 
251 /* If nonnull, cselib will call this function before recording sets or
252    even clobbering outputs of INSN.  All the recorded sets will be
253    represented in the array sets[n_sets].  new_val_min can be used to
254    tell whether values present in sets are introduced by this
255    instruction.  */
256 void (*cselib_record_sets_hook) (rtx_insn *insn, struct cselib_set *sets,
257 				 int n_sets);
258 
259 #define PRESERVED_VALUE_P(RTX) \
260   (RTL_FLAG_CHECK1 ("PRESERVED_VALUE_P", (RTX), VALUE)->unchanging)
261 
262 #define SP_BASED_VALUE_P(RTX) \
263   (RTL_FLAG_CHECK1 ("SP_BASED_VALUE_P", (RTX), VALUE)->jump)
264 
265 
266 
267 /* Allocate a struct elt_list and fill in its two elements with the
268    arguments.  */
269 
270 static inline struct elt_list *
271 new_elt_list (struct elt_list *next, cselib_val *elt)
272 {
273   elt_list *el = elt_list_pool.allocate ();
274   el->next = next;
275   el->elt = elt;
276   return el;
277 }
278 
279 /* Allocate a struct elt_loc_list with LOC and prepend it to VAL's loc
280    list.  */
281 
282 static inline void
283 new_elt_loc_list (cselib_val *val, rtx loc)
284 {
285   struct elt_loc_list *el, *next = val->locs;
286 
287   gcc_checking_assert (!next || !next->setting_insn
288 		       || !DEBUG_INSN_P (next->setting_insn)
289 		       || cselib_current_insn == next->setting_insn);
290 
291   /* If we're creating the first loc in a debug insn context, we've
292      just created a debug value.  Count it.  */
293   if (!next && cselib_current_insn && DEBUG_INSN_P (cselib_current_insn))
294     n_debug_values++;
295 
296   val = canonical_cselib_val (val);
297   next = val->locs;
298 
299   if (GET_CODE (loc) == VALUE)
300     {
301       loc = canonical_cselib_val (CSELIB_VAL_PTR (loc))->val_rtx;
302 
303       gcc_checking_assert (PRESERVED_VALUE_P (loc)
304 			   == PRESERVED_VALUE_P (val->val_rtx));
305 
306       if (val->val_rtx == loc)
307 	return;
308       else if (val->uid > CSELIB_VAL_PTR (loc)->uid)
309 	{
310 	  /* Reverse the insertion.  */
311 	  new_elt_loc_list (CSELIB_VAL_PTR (loc), val->val_rtx);
312 	  return;
313 	}
314 
315       gcc_checking_assert (val->uid < CSELIB_VAL_PTR (loc)->uid);
316 
317       if (CSELIB_VAL_PTR (loc)->locs)
318 	{
319 	  /* Bring all locs from LOC to VAL.  */
320 	  for (el = CSELIB_VAL_PTR (loc)->locs; el->next; el = el->next)
321 	    {
322 	      /* Adjust values that have LOC as canonical so that VAL
323 		 becomes their canonical.  */
324 	      if (el->loc && GET_CODE (el->loc) == VALUE)
325 		{
326 		  gcc_checking_assert (CSELIB_VAL_PTR (el->loc)->locs->loc
327 				       == loc);
328 		  CSELIB_VAL_PTR (el->loc)->locs->loc = val->val_rtx;
329 		}
330 	    }
331 	  el->next = val->locs;
332 	  next = val->locs = CSELIB_VAL_PTR (loc)->locs;
333 	}
334 
335       if (CSELIB_VAL_PTR (loc)->addr_list)
336 	{
337 	  /* Bring in addr_list into canonical node.  */
338 	  struct elt_list *last = CSELIB_VAL_PTR (loc)->addr_list;
339 	  while (last->next)
340 	    last = last->next;
341 	  last->next = val->addr_list;
342 	  val->addr_list = CSELIB_VAL_PTR (loc)->addr_list;
343 	  CSELIB_VAL_PTR (loc)->addr_list = NULL;
344 	}
345 
346       if (CSELIB_VAL_PTR (loc)->next_containing_mem != NULL
347 	  && val->next_containing_mem == NULL)
348 	{
349 	  /* Add VAL to the containing_mem list after LOC.  LOC will
350 	     be removed when we notice it doesn't contain any
351 	     MEMs.  */
352 	  val->next_containing_mem = CSELIB_VAL_PTR (loc)->next_containing_mem;
353 	  CSELIB_VAL_PTR (loc)->next_containing_mem = val;
354 	}
355 
356       /* Chain LOC back to VAL.  */
357       el = elt_loc_list_pool.allocate ();
358       el->loc = val->val_rtx;
359       el->setting_insn = cselib_current_insn;
360       el->next = NULL;
361       CSELIB_VAL_PTR (loc)->locs = el;
362     }
363 
364   el = elt_loc_list_pool.allocate ();
365   el->loc = loc;
366   el->setting_insn = cselib_current_insn;
367   el->next = next;
368   val->locs = el;
369 }
370 
371 /* Promote loc L to a nondebug cselib_current_insn if L is marked as
372    originating from a debug insn, maintaining the debug values
373    count.  */
374 
375 static inline void
376 promote_debug_loc (struct elt_loc_list *l)
377 {
378   if (l && l->setting_insn && DEBUG_INSN_P (l->setting_insn)
379       && (!cselib_current_insn || !DEBUG_INSN_P (cselib_current_insn)))
380     {
381       n_debug_values--;
382       l->setting_insn = cselib_current_insn;
383       if (cselib_preserve_constants && l->next)
384 	{
385 	  gcc_assert (l->next->setting_insn
386 		      && DEBUG_INSN_P (l->next->setting_insn)
387 		      && !l->next->next);
388 	  l->next->setting_insn = cselib_current_insn;
389 	}
390       else
391 	gcc_assert (!l->next);
392     }
393 }
394 
395 /* The elt_list at *PL is no longer needed.  Unchain it and free its
396    storage.  */
397 
398 static inline void
399 unchain_one_elt_list (struct elt_list **pl)
400 {
401   struct elt_list *l = *pl;
402 
403   *pl = l->next;
404   elt_list_pool.remove (l);
405 }
406 
407 /* Likewise for elt_loc_lists.  */
408 
409 static void
410 unchain_one_elt_loc_list (struct elt_loc_list **pl)
411 {
412   struct elt_loc_list *l = *pl;
413 
414   *pl = l->next;
415   elt_loc_list_pool.remove (l);
416 }
417 
418 /* Likewise for cselib_vals.  This also frees the addr_list associated with
419    V.  */
420 
421 static void
422 unchain_one_value (cselib_val *v)
423 {
424   while (v->addr_list)
425     unchain_one_elt_list (&v->addr_list);
426 
427   cselib_val_pool.remove (v);
428 }
429 
430 /* Remove all entries from the hash table.  Also used during
431    initialization.  */
432 
433 void
434 cselib_clear_table (void)
435 {
436   cselib_reset_table (1);
437 }
438 
439 /* Return TRUE if V is a constant, a function invariant or a VALUE
440    equivalence; FALSE otherwise.  */
441 
442 static bool
443 invariant_or_equiv_p (cselib_val *v)
444 {
445   struct elt_loc_list *l;
446 
447   if (v == cfa_base_preserved_val)
448     return true;
449 
450   /* Keep VALUE equivalences around.  */
451   for (l = v->locs; l; l = l->next)
452     if (GET_CODE (l->loc) == VALUE)
453       return true;
454 
455   if (v->locs != NULL
456       && v->locs->next == NULL)
457     {
458       if (CONSTANT_P (v->locs->loc)
459 	  && (GET_CODE (v->locs->loc) != CONST
460 	      || !references_value_p (v->locs->loc, 0)))
461 	return true;
462       /* Although a debug expr may be bound to different expressions,
463 	 we can preserve it as if it was constant, to get unification
464 	 and proper merging within var-tracking.  */
465       if (GET_CODE (v->locs->loc) == DEBUG_EXPR
466 	  || GET_CODE (v->locs->loc) == DEBUG_IMPLICIT_PTR
467 	  || GET_CODE (v->locs->loc) == ENTRY_VALUE
468 	  || GET_CODE (v->locs->loc) == DEBUG_PARAMETER_REF)
469 	return true;
470 
471       /* (plus (value V) (const_int C)) is invariant iff V is invariant.  */
472       if (GET_CODE (v->locs->loc) == PLUS
473 	  && CONST_INT_P (XEXP (v->locs->loc, 1))
474 	  && GET_CODE (XEXP (v->locs->loc, 0)) == VALUE
475 	  && invariant_or_equiv_p (CSELIB_VAL_PTR (XEXP (v->locs->loc, 0))))
476 	return true;
477     }
478 
479   return false;
480 }
481 
482 /* Remove from hash table all VALUEs except constants, function
483    invariants and VALUE equivalences.  */
484 
485 int
486 preserve_constants_and_equivs (cselib_val **x, void *info ATTRIBUTE_UNUSED)
487 {
488   cselib_val *v = *x;
489 
490   if (invariant_or_equiv_p (v))
491     {
492       cselib_hasher::key lookup = {
493 	GET_MODE (v->val_rtx), v->val_rtx, VOIDmode
494       };
495       cselib_val **slot
496 	= cselib_preserved_hash_table->find_slot_with_hash (&lookup,
497 							   v->hash, INSERT);
498       gcc_assert (!*slot);
499       *slot = v;
500     }
501 
502   cselib_hash_table->clear_slot (x);
503 
504   return 1;
505 }
506 
507 /* Remove all entries from the hash table, arranging for the next
508    value to be numbered NUM.  */
509 
510 void
511 cselib_reset_table (unsigned int num)
512 {
513   unsigned int i;
514 
515   max_value_regs = 0;
516 
517   if (cfa_base_preserved_val)
518     {
519       unsigned int regno = cfa_base_preserved_regno;
520       unsigned int new_used_regs = 0;
521       for (i = 0; i < n_used_regs; i++)
522 	if (used_regs[i] == regno)
523 	  {
524 	    new_used_regs = 1;
525 	    continue;
526 	  }
527 	else
528 	  REG_VALUES (used_regs[i]) = 0;
529       gcc_assert (new_used_regs == 1);
530       n_used_regs = new_used_regs;
531       used_regs[0] = regno;
532       max_value_regs
533 	= hard_regno_nregs (regno,
534 			    GET_MODE (cfa_base_preserved_val->locs->loc));
535     }
536   else
537     {
538       for (i = 0; i < n_used_regs; i++)
539 	REG_VALUES (used_regs[i]) = 0;
540       n_used_regs = 0;
541     }
542 
543   if (cselib_preserve_constants)
544     cselib_hash_table->traverse <void *, preserve_constants_and_equivs>
545       (NULL);
546   else
547     {
548       cselib_hash_table->empty ();
549       gcc_checking_assert (!cselib_any_perm_equivs);
550     }
551 
552   n_useless_values = 0;
553   n_useless_debug_values = 0;
554   n_debug_values = 0;
555 
556   next_uid = num;
557 
558   first_containing_mem = &dummy_val;
559 }
560 
561 /* Return the number of the next value that will be generated.  */
562 
563 unsigned int
564 cselib_get_next_uid (void)
565 {
566   return next_uid;
567 }
568 
569 /* Search for X, whose hashcode is HASH, in CSELIB_HASH_TABLE,
570    INSERTing if requested.  When X is part of the address of a MEM,
571    MEMMODE should specify the mode of the MEM.  */
572 
573 static cselib_val **
574 cselib_find_slot (machine_mode mode, rtx x, hashval_t hash,
575 		  enum insert_option insert, machine_mode memmode)
576 {
577   cselib_val **slot = NULL;
578   cselib_hasher::key lookup = { mode, x, memmode };
579   if (cselib_preserve_constants)
580     slot = cselib_preserved_hash_table->find_slot_with_hash (&lookup, hash,
581 							     NO_INSERT);
582   if (!slot)
583     slot = cselib_hash_table->find_slot_with_hash (&lookup, hash, insert);
584   return slot;
585 }
586 
587 /* Return true if X contains a VALUE rtx.  If ONLY_USELESS is set, we
588    only return true for values which point to a cselib_val whose value
589    element has been set to zero, which implies the cselib_val will be
590    removed.  */
591 
592 int
593 references_value_p (const_rtx x, int only_useless)
594 {
595   const enum rtx_code code = GET_CODE (x);
596   const char *fmt = GET_RTX_FORMAT (code);
597   int i, j;
598 
599   if (GET_CODE (x) == VALUE
600       && (! only_useless ||
601 	  (CSELIB_VAL_PTR (x)->locs == 0 && !PRESERVED_VALUE_P (x))))
602     return 1;
603 
604   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
605     {
606       if (fmt[i] == 'e' && references_value_p (XEXP (x, i), only_useless))
607 	return 1;
608       else if (fmt[i] == 'E')
609 	for (j = 0; j < XVECLEN (x, i); j++)
610 	  if (references_value_p (XVECEXP (x, i, j), only_useless))
611 	    return 1;
612     }
613 
614   return 0;
615 }
616 
617 /* For all locations found in X, delete locations that reference useless
618    values (i.e. values without any location).  Called through
619    htab_traverse.  */
620 
621 int
622 discard_useless_locs (cselib_val **x, void *info ATTRIBUTE_UNUSED)
623 {
624   cselib_val *v = *x;
625   struct elt_loc_list **p = &v->locs;
626   bool had_locs = v->locs != NULL;
627   rtx_insn *setting_insn = v->locs ? v->locs->setting_insn : NULL;
628 
629   while (*p)
630     {
631       if (references_value_p ((*p)->loc, 1))
632 	unchain_one_elt_loc_list (p);
633       else
634 	p = &(*p)->next;
635     }
636 
637   if (had_locs && v->locs == 0 && !PRESERVED_VALUE_P (v->val_rtx))
638     {
639       if (setting_insn && DEBUG_INSN_P (setting_insn))
640 	n_useless_debug_values++;
641       else
642 	n_useless_values++;
643       values_became_useless = 1;
644     }
645   return 1;
646 }
647 
648 /* If X is a value with no locations, remove it from the hashtable.  */
649 
650 int
651 discard_useless_values (cselib_val **x, void *info ATTRIBUTE_UNUSED)
652 {
653   cselib_val *v = *x;
654 
655   if (v->locs == 0 && !PRESERVED_VALUE_P (v->val_rtx))
656     {
657       if (cselib_discard_hook)
658 	cselib_discard_hook (v);
659 
660       CSELIB_VAL_PTR (v->val_rtx) = NULL;
661       cselib_hash_table->clear_slot (x);
662       unchain_one_value (v);
663       n_useless_values--;
664     }
665 
666   return 1;
667 }
668 
669 /* Clean out useless values (i.e. those which no longer have locations
670    associated with them) from the hash table.  */
671 
672 static void
673 remove_useless_values (void)
674 {
675   cselib_val **p, *v;
676 
677   /* First pass: eliminate locations that reference the value.  That in
678      turn can make more values useless.  */
679   do
680     {
681       values_became_useless = 0;
682       cselib_hash_table->traverse <void *, discard_useless_locs> (NULL);
683     }
684   while (values_became_useless);
685 
686   /* Second pass: actually remove the values.  */
687 
688   p = &first_containing_mem;
689   for (v = *p; v != &dummy_val; v = v->next_containing_mem)
690     if (v->locs && v == canonical_cselib_val (v))
691       {
692 	*p = v;
693 	p = &(*p)->next_containing_mem;
694       }
695   *p = &dummy_val;
696 
697   n_useless_values += n_useless_debug_values;
698   n_debug_values -= n_useless_debug_values;
699   n_useless_debug_values = 0;
700 
701   cselib_hash_table->traverse <void *, discard_useless_values> (NULL);
702 
703   gcc_assert (!n_useless_values);
704 }
705 
706 /* Arrange for a value to not be removed from the hash table even if
707    it becomes useless.  */
708 
709 void
710 cselib_preserve_value (cselib_val *v)
711 {
712   PRESERVED_VALUE_P (v->val_rtx) = 1;
713 }
714 
715 /* Test whether a value is preserved.  */
716 
717 bool
718 cselib_preserved_value_p (cselib_val *v)
719 {
720   return PRESERVED_VALUE_P (v->val_rtx);
721 }
722 
723 /* Arrange for a REG value to be assumed constant through the whole function,
724    never invalidated and preserved across cselib_reset_table calls.  */
725 
726 void
727 cselib_preserve_cfa_base_value (cselib_val *v, unsigned int regno)
728 {
729   if (cselib_preserve_constants
730       && v->locs
731       && REG_P (v->locs->loc))
732     {
733       cfa_base_preserved_val = v;
734       cfa_base_preserved_regno = regno;
735     }
736 }
737 
738 /* Clean all non-constant expressions in the hash table, but retain
739    their values.  */
740 
741 void
742 cselib_preserve_only_values (void)
743 {
744   int i;
745 
746   for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
747     cselib_invalidate_regno (i, reg_raw_mode[i]);
748 
749   cselib_invalidate_mem (callmem);
750 
751   remove_useless_values ();
752 
753   gcc_assert (first_containing_mem == &dummy_val);
754 }
755 
756 /* Arrange for a value to be marked as based on stack pointer
757    for find_base_term purposes.  */
758 
759 void
760 cselib_set_value_sp_based (cselib_val *v)
761 {
762   SP_BASED_VALUE_P (v->val_rtx) = 1;
763 }
764 
765 /* Test whether a value is based on stack pointer for
766    find_base_term purposes.  */
767 
768 bool
769 cselib_sp_based_value_p (cselib_val *v)
770 {
771   return SP_BASED_VALUE_P (v->val_rtx);
772 }
773 
774 /* Return the mode in which a register was last set.  If X is not a
775    register, return its mode.  If the mode in which the register was
776    set is not known, or the value was already clobbered, return
777    VOIDmode.  */
778 
779 machine_mode
780 cselib_reg_set_mode (const_rtx x)
781 {
782   if (!REG_P (x))
783     return GET_MODE (x);
784 
785   if (REG_VALUES (REGNO (x)) == NULL
786       || REG_VALUES (REGNO (x))->elt == NULL)
787     return VOIDmode;
788 
789   return GET_MODE (REG_VALUES (REGNO (x))->elt->val_rtx);
790 }
791 
792 /* If x is a PLUS or an autoinc operation, expand the operation,
793    storing the offset, if any, in *OFF.  */
794 
795 static rtx
796 autoinc_split (rtx x, rtx *off, machine_mode memmode)
797 {
798   switch (GET_CODE (x))
799     {
800     case PLUS:
801       *off = XEXP (x, 1);
802       return XEXP (x, 0);
803 
804     case PRE_DEC:
805       if (memmode == VOIDmode)
806 	return x;
807 
808       *off = gen_int_mode (-GET_MODE_SIZE (memmode), GET_MODE (x));
809       return XEXP (x, 0);
810 
811     case PRE_INC:
812       if (memmode == VOIDmode)
813 	return x;
814 
815       *off = gen_int_mode (GET_MODE_SIZE (memmode), GET_MODE (x));
816       return XEXP (x, 0);
817 
818     case PRE_MODIFY:
819       return XEXP (x, 1);
820 
821     case POST_DEC:
822     case POST_INC:
823     case POST_MODIFY:
824       return XEXP (x, 0);
825 
826     default:
827       return x;
828     }
829 }
830 
831 /* Return nonzero if we can prove that X and Y contain the same value,
832    taking our gathered information into account.  MEMMODE holds the
833    mode of the enclosing MEM, if any, as required to deal with autoinc
834    addressing modes.  If X and Y are not (known to be) part of
835    addresses, MEMMODE should be VOIDmode.  */
836 
837 int
838 rtx_equal_for_cselib_1 (rtx x, rtx y, machine_mode memmode, int depth)
839 {
840   enum rtx_code code;
841   const char *fmt;
842   int i;
843 
844   if (REG_P (x) || MEM_P (x))
845     {
846       cselib_val *e = cselib_lookup (x, GET_MODE (x), 0, memmode);
847 
848       if (e)
849 	x = e->val_rtx;
850     }
851 
852   if (REG_P (y) || MEM_P (y))
853     {
854       cselib_val *e = cselib_lookup (y, GET_MODE (y), 0, memmode);
855 
856       if (e)
857 	y = e->val_rtx;
858     }
859 
860   if (x == y)
861     return 1;
862 
863   if (GET_CODE (x) == VALUE)
864     {
865       cselib_val *e = canonical_cselib_val (CSELIB_VAL_PTR (x));
866       struct elt_loc_list *l;
867 
868       if (GET_CODE (y) == VALUE)
869 	return e == canonical_cselib_val (CSELIB_VAL_PTR (y));
870 
871       if (depth == 128)
872 	return 0;
873 
874       for (l = e->locs; l; l = l->next)
875 	{
876 	  rtx t = l->loc;
877 
878 	  /* Avoid infinite recursion.  We know we have the canonical
879 	     value, so we can just skip any values in the equivalence
880 	     list.  */
881 	  if (REG_P (t) || MEM_P (t) || GET_CODE (t) == VALUE)
882 	    continue;
883 	  else if (rtx_equal_for_cselib_1 (t, y, memmode, depth + 1))
884 	    return 1;
885 	}
886 
887       return 0;
888     }
889   else if (GET_CODE (y) == VALUE)
890     {
891       cselib_val *e = canonical_cselib_val (CSELIB_VAL_PTR (y));
892       struct elt_loc_list *l;
893 
894       if (depth == 128)
895 	return 0;
896 
897       for (l = e->locs; l; l = l->next)
898 	{
899 	  rtx t = l->loc;
900 
901 	  if (REG_P (t) || MEM_P (t) || GET_CODE (t) == VALUE)
902 	    continue;
903 	  else if (rtx_equal_for_cselib_1 (x, t, memmode, depth + 1))
904 	    return 1;
905 	}
906 
907       return 0;
908     }
909 
910   if (GET_MODE (x) != GET_MODE (y))
911     return 0;
912 
913   if (GET_CODE (x) != GET_CODE (y))
914     {
915       rtx xorig = x, yorig = y;
916       rtx xoff = NULL, yoff = NULL;
917 
918       x = autoinc_split (x, &xoff, memmode);
919       y = autoinc_split (y, &yoff, memmode);
920 
921       if (!xoff != !yoff)
922 	return 0;
923 
924       if (xoff && !rtx_equal_for_cselib_1 (xoff, yoff, memmode, depth))
925 	return 0;
926 
927       /* Don't recurse if nothing changed.  */
928       if (x != xorig || y != yorig)
929 	return rtx_equal_for_cselib_1 (x, y, memmode, depth);
930 
931       return 0;
932     }
933 
934   /* These won't be handled correctly by the code below.  */
935   switch (GET_CODE (x))
936     {
937     CASE_CONST_UNIQUE:
938     case DEBUG_EXPR:
939       return 0;
940 
941     case DEBUG_IMPLICIT_PTR:
942       return DEBUG_IMPLICIT_PTR_DECL (x)
943 	     == DEBUG_IMPLICIT_PTR_DECL (y);
944 
945     case DEBUG_PARAMETER_REF:
946       return DEBUG_PARAMETER_REF_DECL (x)
947 	     == DEBUG_PARAMETER_REF_DECL (y);
948 
949     case ENTRY_VALUE:
950       /* ENTRY_VALUEs are function invariant, it is thus undesirable to
951 	 use rtx_equal_for_cselib_1 to compare the operands.  */
952       return rtx_equal_p (ENTRY_VALUE_EXP (x), ENTRY_VALUE_EXP (y));
953 
954     case LABEL_REF:
955       return label_ref_label (x) == label_ref_label (y);
956 
957     case REG:
958       return REGNO (x) == REGNO (y);
959 
960     case MEM:
961       /* We have to compare any autoinc operations in the addresses
962 	 using this MEM's mode.  */
963       return rtx_equal_for_cselib_1 (XEXP (x, 0), XEXP (y, 0), GET_MODE (x),
964 				     depth);
965 
966     default:
967       break;
968     }
969 
970   code = GET_CODE (x);
971   fmt = GET_RTX_FORMAT (code);
972 
973   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
974     {
975       int j;
976 
977       switch (fmt[i])
978 	{
979 	case 'w':
980 	  if (XWINT (x, i) != XWINT (y, i))
981 	    return 0;
982 	  break;
983 
984 	case 'n':
985 	case 'i':
986 	  if (XINT (x, i) != XINT (y, i))
987 	    return 0;
988 	  break;
989 
990 	case 'p':
991 	  if (maybe_ne (SUBREG_BYTE (x), SUBREG_BYTE (y)))
992 	    return 0;
993 	  break;
994 
995 	case 'V':
996 	case 'E':
997 	  /* Two vectors must have the same length.  */
998 	  if (XVECLEN (x, i) != XVECLEN (y, i))
999 	    return 0;
1000 
1001 	  /* And the corresponding elements must match.  */
1002 	  for (j = 0; j < XVECLEN (x, i); j++)
1003 	    if (! rtx_equal_for_cselib_1 (XVECEXP (x, i, j),
1004 					  XVECEXP (y, i, j), memmode, depth))
1005 	      return 0;
1006 	  break;
1007 
1008 	case 'e':
1009 	  if (i == 1
1010 	      && targetm.commutative_p (x, UNKNOWN)
1011 	      && rtx_equal_for_cselib_1 (XEXP (x, 1), XEXP (y, 0), memmode,
1012 					 depth)
1013 	      && rtx_equal_for_cselib_1 (XEXP (x, 0), XEXP (y, 1), memmode,
1014 					 depth))
1015 	    return 1;
1016 	  if (! rtx_equal_for_cselib_1 (XEXP (x, i), XEXP (y, i), memmode,
1017 					depth))
1018 	    return 0;
1019 	  break;
1020 
1021 	case 'S':
1022 	case 's':
1023 	  if (strcmp (XSTR (x, i), XSTR (y, i)))
1024 	    return 0;
1025 	  break;
1026 
1027 	case 'u':
1028 	  /* These are just backpointers, so they don't matter.  */
1029 	  break;
1030 
1031 	case '0':
1032 	case 't':
1033 	  break;
1034 
1035 	  /* It is believed that rtx's at this level will never
1036 	     contain anything but integers and other rtx's,
1037 	     except for within LABEL_REFs and SYMBOL_REFs.  */
1038 	default:
1039 	  gcc_unreachable ();
1040 	}
1041     }
1042   return 1;
1043 }
1044 
1045 /* Hash an rtx.  Return 0 if we couldn't hash the rtx.
1046    For registers and memory locations, we look up their cselib_val structure
1047    and return its VALUE element.
1048    Possible reasons for return 0 are: the object is volatile, or we couldn't
1049    find a register or memory location in the table and CREATE is zero.  If
1050    CREATE is nonzero, table elts are created for regs and mem.
1051    N.B. this hash function returns the same hash value for RTXes that
1052    differ only in the order of operands, thus it is suitable for comparisons
1053    that take commutativity into account.
1054    If we wanted to also support associative rules, we'd have to use a different
1055    strategy to avoid returning spurious 0, e.g. return ~(~0U >> 1) .
1056    MEMMODE indicates the mode of an enclosing MEM, and it's only
1057    used to compute autoinc values.
1058    We used to have a MODE argument for hashing for CONST_INTs, but that
1059    didn't make sense, since it caused spurious hash differences between
1060     (set (reg:SI 1) (const_int))
1061     (plus:SI (reg:SI 2) (reg:SI 1))
1062    and
1063     (plus:SI (reg:SI 2) (const_int))
1064    If the mode is important in any context, it must be checked specifically
1065    in a comparison anyway, since relying on hash differences is unsafe.  */
1066 
1067 static unsigned int
1068 cselib_hash_rtx (rtx x, int create, machine_mode memmode)
1069 {
1070   cselib_val *e;
1071   poly_int64 offset;
1072   int i, j;
1073   enum rtx_code code;
1074   const char *fmt;
1075   unsigned int hash = 0;
1076 
1077   code = GET_CODE (x);
1078   hash += (unsigned) code + (unsigned) GET_MODE (x);
1079 
1080   switch (code)
1081     {
1082     case VALUE:
1083       e = CSELIB_VAL_PTR (x);
1084       return e->hash;
1085 
1086     case MEM:
1087     case REG:
1088       e = cselib_lookup (x, GET_MODE (x), create, memmode);
1089       if (! e)
1090 	return 0;
1091 
1092       return e->hash;
1093 
1094     case DEBUG_EXPR:
1095       hash += ((unsigned) DEBUG_EXPR << 7)
1096 	      + DEBUG_TEMP_UID (DEBUG_EXPR_TREE_DECL (x));
1097       return hash ? hash : (unsigned int) DEBUG_EXPR;
1098 
1099     case DEBUG_IMPLICIT_PTR:
1100       hash += ((unsigned) DEBUG_IMPLICIT_PTR << 7)
1101 	      + DECL_UID (DEBUG_IMPLICIT_PTR_DECL (x));
1102       return hash ? hash : (unsigned int) DEBUG_IMPLICIT_PTR;
1103 
1104     case DEBUG_PARAMETER_REF:
1105       hash += ((unsigned) DEBUG_PARAMETER_REF << 7)
1106 	      + DECL_UID (DEBUG_PARAMETER_REF_DECL (x));
1107       return hash ? hash : (unsigned int) DEBUG_PARAMETER_REF;
1108 
1109     case ENTRY_VALUE:
1110       /* ENTRY_VALUEs are function invariant, thus try to avoid
1111 	 recursing on argument if ENTRY_VALUE is one of the
1112 	 forms emitted by expand_debug_expr, otherwise
1113 	 ENTRY_VALUE hash would depend on the current value
1114 	 in some register or memory.  */
1115       if (REG_P (ENTRY_VALUE_EXP (x)))
1116 	hash += (unsigned int) REG
1117 		+ (unsigned int) GET_MODE (ENTRY_VALUE_EXP (x))
1118 		+ (unsigned int) REGNO (ENTRY_VALUE_EXP (x));
1119       else if (MEM_P (ENTRY_VALUE_EXP (x))
1120 	       && REG_P (XEXP (ENTRY_VALUE_EXP (x), 0)))
1121 	hash += (unsigned int) MEM
1122 		+ (unsigned int) GET_MODE (XEXP (ENTRY_VALUE_EXP (x), 0))
1123 		+ (unsigned int) REGNO (XEXP (ENTRY_VALUE_EXP (x), 0));
1124       else
1125 	hash += cselib_hash_rtx (ENTRY_VALUE_EXP (x), create, memmode);
1126       return hash ? hash : (unsigned int) ENTRY_VALUE;
1127 
1128     case CONST_INT:
1129       hash += ((unsigned) CONST_INT << 7) + UINTVAL (x);
1130       return hash ? hash : (unsigned int) CONST_INT;
1131 
1132     case CONST_WIDE_INT:
1133       for (i = 0; i < CONST_WIDE_INT_NUNITS (x); i++)
1134 	hash += CONST_WIDE_INT_ELT (x, i);
1135       return hash;
1136 
1137     case CONST_POLY_INT:
1138       {
1139 	inchash::hash h;
1140 	h.add_int (hash);
1141 	for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
1142 	  h.add_wide_int (CONST_POLY_INT_COEFFS (x)[i]);
1143 	return h.end ();
1144       }
1145 
1146     case CONST_DOUBLE:
1147       /* This is like the general case, except that it only counts
1148 	 the integers representing the constant.  */
1149       hash += (unsigned) code + (unsigned) GET_MODE (x);
1150       if (TARGET_SUPPORTS_WIDE_INT == 0 && GET_MODE (x) == VOIDmode)
1151 	hash += ((unsigned) CONST_DOUBLE_LOW (x)
1152 		 + (unsigned) CONST_DOUBLE_HIGH (x));
1153       else
1154 	hash += real_hash (CONST_DOUBLE_REAL_VALUE (x));
1155       return hash ? hash : (unsigned int) CONST_DOUBLE;
1156 
1157     case CONST_FIXED:
1158       hash += (unsigned int) code + (unsigned int) GET_MODE (x);
1159       hash += fixed_hash (CONST_FIXED_VALUE (x));
1160       return hash ? hash : (unsigned int) CONST_FIXED;
1161 
1162     case CONST_VECTOR:
1163       {
1164 	int units;
1165 	rtx elt;
1166 
1167 	units = const_vector_encoded_nelts (x);
1168 
1169 	for (i = 0; i < units; ++i)
1170 	  {
1171 	    elt = CONST_VECTOR_ENCODED_ELT (x, i);
1172 	    hash += cselib_hash_rtx (elt, 0, memmode);
1173 	  }
1174 
1175 	return hash;
1176       }
1177 
1178       /* Assume there is only one rtx object for any given label.  */
1179     case LABEL_REF:
1180       /* We don't hash on the address of the CODE_LABEL to avoid bootstrap
1181 	 differences and differences between each stage's debugging dumps.  */
1182       hash += (((unsigned int) LABEL_REF << 7)
1183 	       + CODE_LABEL_NUMBER (label_ref_label (x)));
1184       return hash ? hash : (unsigned int) LABEL_REF;
1185 
1186     case SYMBOL_REF:
1187       {
1188 	/* Don't hash on the symbol's address to avoid bootstrap differences.
1189 	   Different hash values may cause expressions to be recorded in
1190 	   different orders and thus different registers to be used in the
1191 	   final assembler.  This also avoids differences in the dump files
1192 	   between various stages.  */
1193 	unsigned int h = 0;
1194 	const unsigned char *p = (const unsigned char *) XSTR (x, 0);
1195 
1196 	while (*p)
1197 	  h += (h << 7) + *p++; /* ??? revisit */
1198 
1199 	hash += ((unsigned int) SYMBOL_REF << 7) + h;
1200 	return hash ? hash : (unsigned int) SYMBOL_REF;
1201       }
1202 
1203     case PRE_DEC:
1204     case PRE_INC:
1205       /* We can't compute these without knowing the MEM mode.  */
1206       gcc_assert (memmode != VOIDmode);
1207       offset = GET_MODE_SIZE (memmode);
1208       if (code == PRE_DEC)
1209 	offset = -offset;
1210       /* Adjust the hash so that (mem:MEMMODE (pre_* (reg))) hashes
1211 	 like (mem:MEMMODE (plus (reg) (const_int I))).  */
1212       hash += (unsigned) PLUS - (unsigned)code
1213 	+ cselib_hash_rtx (XEXP (x, 0), create, memmode)
1214 	+ cselib_hash_rtx (gen_int_mode (offset, GET_MODE (x)),
1215 			   create, memmode);
1216       return hash ? hash : 1 + (unsigned) PLUS;
1217 
1218     case PRE_MODIFY:
1219       gcc_assert (memmode != VOIDmode);
1220       return cselib_hash_rtx (XEXP (x, 1), create, memmode);
1221 
1222     case POST_DEC:
1223     case POST_INC:
1224     case POST_MODIFY:
1225       gcc_assert (memmode != VOIDmode);
1226       return cselib_hash_rtx (XEXP (x, 0), create, memmode);
1227 
1228     case PC:
1229     case CC0:
1230     case CALL:
1231     case UNSPEC_VOLATILE:
1232       return 0;
1233 
1234     case ASM_OPERANDS:
1235       if (MEM_VOLATILE_P (x))
1236 	return 0;
1237 
1238       break;
1239 
1240     default:
1241       break;
1242     }
1243 
1244   i = GET_RTX_LENGTH (code) - 1;
1245   fmt = GET_RTX_FORMAT (code);
1246   for (; i >= 0; i--)
1247     {
1248       switch (fmt[i])
1249 	{
1250 	case 'e':
1251 	  {
1252 	    rtx tem = XEXP (x, i);
1253 	    unsigned int tem_hash = cselib_hash_rtx (tem, create, memmode);
1254 
1255 	    if (tem_hash == 0)
1256 	      return 0;
1257 
1258 	    hash += tem_hash;
1259 	  }
1260 	  break;
1261 	case 'E':
1262 	  for (j = 0; j < XVECLEN (x, i); j++)
1263 	    {
1264 	      unsigned int tem_hash
1265 		= cselib_hash_rtx (XVECEXP (x, i, j), create, memmode);
1266 
1267 	      if (tem_hash == 0)
1268 		return 0;
1269 
1270 	      hash += tem_hash;
1271 	    }
1272 	  break;
1273 
1274 	case 's':
1275 	  {
1276 	    const unsigned char *p = (const unsigned char *) XSTR (x, i);
1277 
1278 	    if (p)
1279 	      while (*p)
1280 		hash += *p++;
1281 	    break;
1282 	  }
1283 
1284 	case 'i':
1285 	  hash += XINT (x, i);
1286 	  break;
1287 
1288 	case 'p':
1289 	  hash += constant_lower_bound (SUBREG_BYTE (x));
1290 	  break;
1291 
1292 	case '0':
1293 	case 't':
1294 	  /* unused */
1295 	  break;
1296 
1297 	default:
1298 	  gcc_unreachable ();
1299 	}
1300     }
1301 
1302   return hash ? hash : 1 + (unsigned int) GET_CODE (x);
1303 }
1304 
1305 /* Create a new value structure for VALUE and initialize it.  The mode of the
1306    value is MODE.  */
1307 
1308 static inline cselib_val *
1309 new_cselib_val (unsigned int hash, machine_mode mode, rtx x)
1310 {
1311   cselib_val *e = cselib_val_pool.allocate ();
1312 
1313   gcc_assert (hash);
1314   gcc_assert (next_uid);
1315 
1316   e->hash = hash;
1317   e->uid = next_uid++;
1318   /* We use an alloc pool to allocate this RTL construct because it
1319      accounts for about 8% of the overall memory usage.  We know
1320      precisely when we can have VALUE RTXen (when cselib is active)
1321      so we don't need to put them in garbage collected memory.
1322      ??? Why should a VALUE be an RTX in the first place?  */
1323   e->val_rtx = (rtx_def*) value_pool.allocate ();
1324   memset (e->val_rtx, 0, RTX_HDR_SIZE);
1325   PUT_CODE (e->val_rtx, VALUE);
1326   PUT_MODE (e->val_rtx, mode);
1327   CSELIB_VAL_PTR (e->val_rtx) = e;
1328   e->addr_list = 0;
1329   e->locs = 0;
1330   e->next_containing_mem = 0;
1331 
1332   if (dump_file && (dump_flags & TDF_CSELIB))
1333     {
1334       fprintf (dump_file, "cselib value %u:%u ", e->uid, hash);
1335       if (flag_dump_noaddr || flag_dump_unnumbered)
1336 	fputs ("# ", dump_file);
1337       else
1338 	fprintf (dump_file, "%p ", (void*)e);
1339       print_rtl_single (dump_file, x);
1340       fputc ('\n', dump_file);
1341     }
1342 
1343   return e;
1344 }
1345 
1346 /* ADDR_ELT is a value that is used as address.  MEM_ELT is the value that
1347    contains the data at this address.  X is a MEM that represents the
1348    value.  Update the two value structures to represent this situation.  */
1349 
1350 static void
1351 add_mem_for_addr (cselib_val *addr_elt, cselib_val *mem_elt, rtx x)
1352 {
1353   addr_elt = canonical_cselib_val (addr_elt);
1354   mem_elt = canonical_cselib_val (mem_elt);
1355 
1356   /* Avoid duplicates.  */
1357   addr_space_t as = MEM_ADDR_SPACE (x);
1358   for (elt_loc_list *l = mem_elt->locs; l; l = l->next)
1359     if (MEM_P (l->loc)
1360 	&& CSELIB_VAL_PTR (XEXP (l->loc, 0)) == addr_elt
1361         && MEM_ADDR_SPACE (l->loc) == as)
1362       {
1363 	promote_debug_loc (l);
1364 	return;
1365       }
1366 
1367   addr_elt->addr_list = new_elt_list (addr_elt->addr_list, mem_elt);
1368   new_elt_loc_list (mem_elt,
1369 		    replace_equiv_address_nv (x, addr_elt->val_rtx));
1370   if (mem_elt->next_containing_mem == NULL)
1371     {
1372       mem_elt->next_containing_mem = first_containing_mem;
1373       first_containing_mem = mem_elt;
1374     }
1375 }
1376 
1377 /* Subroutine of cselib_lookup.  Return a value for X, which is a MEM rtx.
1378    If CREATE, make a new one if we haven't seen it before.  */
1379 
1380 static cselib_val *
1381 cselib_lookup_mem (rtx x, int create)
1382 {
1383   machine_mode mode = GET_MODE (x);
1384   machine_mode addr_mode;
1385   cselib_val **slot;
1386   cselib_val *addr;
1387   cselib_val *mem_elt;
1388 
1389   if (MEM_VOLATILE_P (x) || mode == BLKmode
1390       || !cselib_record_memory
1391       || (FLOAT_MODE_P (mode) && flag_float_store))
1392     return 0;
1393 
1394   addr_mode = GET_MODE (XEXP (x, 0));
1395   if (addr_mode == VOIDmode)
1396     addr_mode = Pmode;
1397 
1398   /* Look up the value for the address.  */
1399   addr = cselib_lookup (XEXP (x, 0), addr_mode, create, mode);
1400   if (! addr)
1401     return 0;
1402   addr = canonical_cselib_val (addr);
1403 
1404   /* Find a value that describes a value of our mode at that address.  */
1405   addr_space_t as = MEM_ADDR_SPACE (x);
1406   for (elt_list *l = addr->addr_list; l; l = l->next)
1407     if (GET_MODE (l->elt->val_rtx) == mode)
1408       {
1409 	for (elt_loc_list *l2 = l->elt->locs; l2; l2 = l2->next)
1410 	  if (MEM_P (l2->loc) && MEM_ADDR_SPACE (l2->loc) == as)
1411 	    {
1412 	      promote_debug_loc (l->elt->locs);
1413 	      return l->elt;
1414 	    }
1415       }
1416 
1417   if (! create)
1418     return 0;
1419 
1420   mem_elt = new_cselib_val (next_uid, mode, x);
1421   add_mem_for_addr (addr, mem_elt, x);
1422   slot = cselib_find_slot (mode, x, mem_elt->hash, INSERT, VOIDmode);
1423   *slot = mem_elt;
1424   return mem_elt;
1425 }
1426 
1427 /* Search through the possible substitutions in P.  We prefer a non reg
1428    substitution because this allows us to expand the tree further.  If
1429    we find, just a reg, take the lowest regno.  There may be several
1430    non-reg results, we just take the first one because they will all
1431    expand to the same place.  */
1432 
1433 static rtx
1434 expand_loc (struct elt_loc_list *p, struct expand_value_data *evd,
1435 	    int max_depth)
1436 {
1437   rtx reg_result = NULL;
1438   unsigned int regno = UINT_MAX;
1439   struct elt_loc_list *p_in = p;
1440 
1441   for (; p; p = p->next)
1442     {
1443       /* Return these right away to avoid returning stack pointer based
1444 	 expressions for frame pointer and vice versa, which is something
1445 	 that would confuse DSE.  See the comment in cselib_expand_value_rtx_1
1446 	 for more details.  */
1447       if (REG_P (p->loc)
1448 	  && (REGNO (p->loc) == STACK_POINTER_REGNUM
1449 	      || REGNO (p->loc) == FRAME_POINTER_REGNUM
1450 	      || REGNO (p->loc) == HARD_FRAME_POINTER_REGNUM
1451 	      || REGNO (p->loc) == cfa_base_preserved_regno))
1452 	return p->loc;
1453       /* Avoid infinite recursion trying to expand a reg into a
1454 	 the same reg.  */
1455       if ((REG_P (p->loc))
1456 	  && (REGNO (p->loc) < regno)
1457 	  && !bitmap_bit_p (evd->regs_active, REGNO (p->loc)))
1458 	{
1459 	  reg_result = p->loc;
1460 	  regno = REGNO (p->loc);
1461 	}
1462       /* Avoid infinite recursion and do not try to expand the
1463 	 value.  */
1464       else if (GET_CODE (p->loc) == VALUE
1465 	       && CSELIB_VAL_PTR (p->loc)->locs == p_in)
1466 	continue;
1467       else if (!REG_P (p->loc))
1468 	{
1469 	  rtx result, note;
1470 	  if (dump_file && (dump_flags & TDF_CSELIB))
1471 	    {
1472 	      print_inline_rtx (dump_file, p->loc, 0);
1473 	      fprintf (dump_file, "\n");
1474 	    }
1475 	  if (GET_CODE (p->loc) == LO_SUM
1476 	      && GET_CODE (XEXP (p->loc, 1)) == SYMBOL_REF
1477 	      && p->setting_insn
1478 	      && (note = find_reg_note (p->setting_insn, REG_EQUAL, NULL_RTX))
1479 	      && XEXP (note, 0) == XEXP (p->loc, 1))
1480 	    return XEXP (p->loc, 1);
1481 	  result = cselib_expand_value_rtx_1 (p->loc, evd, max_depth - 1);
1482 	  if (result)
1483 	    return result;
1484 	}
1485 
1486     }
1487 
1488   if (regno != UINT_MAX)
1489     {
1490       rtx result;
1491       if (dump_file && (dump_flags & TDF_CSELIB))
1492 	fprintf (dump_file, "r%d\n", regno);
1493 
1494       result = cselib_expand_value_rtx_1 (reg_result, evd, max_depth - 1);
1495       if (result)
1496 	return result;
1497     }
1498 
1499   if (dump_file && (dump_flags & TDF_CSELIB))
1500     {
1501       if (reg_result)
1502 	{
1503 	  print_inline_rtx (dump_file, reg_result, 0);
1504 	  fprintf (dump_file, "\n");
1505 	}
1506       else
1507 	fprintf (dump_file, "NULL\n");
1508     }
1509   return reg_result;
1510 }
1511 
1512 
1513 /* Forward substitute and expand an expression out to its roots.
1514    This is the opposite of common subexpression.  Because local value
1515    numbering is such a weak optimization, the expanded expression is
1516    pretty much unique (not from a pointer equals point of view but
1517    from a tree shape point of view.
1518 
1519    This function returns NULL if the expansion fails.  The expansion
1520    will fail if there is no value number for one of the operands or if
1521    one of the operands has been overwritten between the current insn
1522    and the beginning of the basic block.  For instance x has no
1523    expansion in:
1524 
1525    r1 <- r1 + 3
1526    x <- r1 + 8
1527 
1528    REGS_ACTIVE is a scratch bitmap that should be clear when passing in.
1529    It is clear on return.  */
1530 
1531 rtx
1532 cselib_expand_value_rtx (rtx orig, bitmap regs_active, int max_depth)
1533 {
1534   struct expand_value_data evd;
1535 
1536   evd.regs_active = regs_active;
1537   evd.callback = NULL;
1538   evd.callback_arg = NULL;
1539   evd.dummy = false;
1540 
1541   return cselib_expand_value_rtx_1 (orig, &evd, max_depth);
1542 }
1543 
1544 /* Same as cselib_expand_value_rtx, but using a callback to try to
1545    resolve some expressions.  The CB function should return ORIG if it
1546    can't or does not want to deal with a certain RTX.  Any other
1547    return value, including NULL, will be used as the expansion for
1548    VALUE, without any further changes.  */
1549 
1550 rtx
1551 cselib_expand_value_rtx_cb (rtx orig, bitmap regs_active, int max_depth,
1552 			    cselib_expand_callback cb, void *data)
1553 {
1554   struct expand_value_data evd;
1555 
1556   evd.regs_active = regs_active;
1557   evd.callback = cb;
1558   evd.callback_arg = data;
1559   evd.dummy = false;
1560 
1561   return cselib_expand_value_rtx_1 (orig, &evd, max_depth);
1562 }
1563 
1564 /* Similar to cselib_expand_value_rtx_cb, but no rtxs are actually copied
1565    or simplified.  Useful to find out whether cselib_expand_value_rtx_cb
1566    would return NULL or non-NULL, without allocating new rtx.  */
1567 
1568 bool
1569 cselib_dummy_expand_value_rtx_cb (rtx orig, bitmap regs_active, int max_depth,
1570 				  cselib_expand_callback cb, void *data)
1571 {
1572   struct expand_value_data evd;
1573 
1574   evd.regs_active = regs_active;
1575   evd.callback = cb;
1576   evd.callback_arg = data;
1577   evd.dummy = true;
1578 
1579   return cselib_expand_value_rtx_1 (orig, &evd, max_depth) != NULL;
1580 }
1581 
1582 /* Internal implementation of cselib_expand_value_rtx and
1583    cselib_expand_value_rtx_cb.  */
1584 
1585 static rtx
1586 cselib_expand_value_rtx_1 (rtx orig, struct expand_value_data *evd,
1587 			   int max_depth)
1588 {
1589   rtx copy, scopy;
1590   int i, j;
1591   RTX_CODE code;
1592   const char *format_ptr;
1593   machine_mode mode;
1594 
1595   code = GET_CODE (orig);
1596 
1597   /* For the context of dse, if we end up expand into a huge tree, we
1598      will not have a useful address, so we might as well just give up
1599      quickly.  */
1600   if (max_depth <= 0)
1601     return NULL;
1602 
1603   switch (code)
1604     {
1605     case REG:
1606       {
1607 	struct elt_list *l = REG_VALUES (REGNO (orig));
1608 
1609 	if (l && l->elt == NULL)
1610 	  l = l->next;
1611 	for (; l; l = l->next)
1612 	  if (GET_MODE (l->elt->val_rtx) == GET_MODE (orig))
1613 	    {
1614 	      rtx result;
1615 	      unsigned regno = REGNO (orig);
1616 
1617 	      /* The only thing that we are not willing to do (this
1618 		 is requirement of dse and if others potential uses
1619 		 need this function we should add a parm to control
1620 		 it) is that we will not substitute the
1621 		 STACK_POINTER_REGNUM, FRAME_POINTER or the
1622 		 HARD_FRAME_POINTER.
1623 
1624 		 These expansions confuses the code that notices that
1625 		 stores into the frame go dead at the end of the
1626 		 function and that the frame is not effected by calls
1627 		 to subroutines.  If you allow the
1628 		 STACK_POINTER_REGNUM substitution, then dse will
1629 		 think that parameter pushing also goes dead which is
1630 		 wrong.  If you allow the FRAME_POINTER or the
1631 		 HARD_FRAME_POINTER then you lose the opportunity to
1632 		 make the frame assumptions.  */
1633 	      if (regno == STACK_POINTER_REGNUM
1634 		  || regno == FRAME_POINTER_REGNUM
1635 		  || regno == HARD_FRAME_POINTER_REGNUM
1636 		  || regno == cfa_base_preserved_regno)
1637 		return orig;
1638 
1639 	      bitmap_set_bit (evd->regs_active, regno);
1640 
1641 	      if (dump_file && (dump_flags & TDF_CSELIB))
1642 		fprintf (dump_file, "expanding: r%d into: ", regno);
1643 
1644 	      result = expand_loc (l->elt->locs, evd, max_depth);
1645 	      bitmap_clear_bit (evd->regs_active, regno);
1646 
1647 	      if (result)
1648 		return result;
1649 	      else
1650 		return orig;
1651 	    }
1652 	return orig;
1653       }
1654 
1655     CASE_CONST_ANY:
1656     case SYMBOL_REF:
1657     case CODE_LABEL:
1658     case PC:
1659     case CC0:
1660     case SCRATCH:
1661       /* SCRATCH must be shared because they represent distinct values.  */
1662       return orig;
1663     case CLOBBER:
1664       if (REG_P (XEXP (orig, 0)) && HARD_REGISTER_NUM_P (REGNO (XEXP (orig, 0))))
1665 	return orig;
1666       break;
1667 
1668     case CONST:
1669       if (shared_const_p (orig))
1670 	return orig;
1671       break;
1672 
1673     case SUBREG:
1674       {
1675 	rtx subreg;
1676 
1677 	if (evd->callback)
1678 	  {
1679 	    subreg = evd->callback (orig, evd->regs_active, max_depth,
1680 				    evd->callback_arg);
1681 	    if (subreg != orig)
1682 	      return subreg;
1683 	  }
1684 
1685 	subreg = cselib_expand_value_rtx_1 (SUBREG_REG (orig), evd,
1686 					    max_depth - 1);
1687 	if (!subreg)
1688 	  return NULL;
1689 	scopy = simplify_gen_subreg (GET_MODE (orig), subreg,
1690 				     GET_MODE (SUBREG_REG (orig)),
1691 				     SUBREG_BYTE (orig));
1692 	if (scopy == NULL
1693 	    || (GET_CODE (scopy) == SUBREG
1694 		&& !REG_P (SUBREG_REG (scopy))
1695 		&& !MEM_P (SUBREG_REG (scopy))))
1696 	  return NULL;
1697 
1698 	return scopy;
1699       }
1700 
1701     case VALUE:
1702       {
1703 	rtx result;
1704 
1705 	if (dump_file && (dump_flags & TDF_CSELIB))
1706 	  {
1707 	    fputs ("\nexpanding ", dump_file);
1708 	    print_rtl_single (dump_file, orig);
1709 	    fputs (" into...", dump_file);
1710 	  }
1711 
1712 	if (evd->callback)
1713 	  {
1714 	    result = evd->callback (orig, evd->regs_active, max_depth,
1715 				    evd->callback_arg);
1716 
1717 	    if (result != orig)
1718 	      return result;
1719 	  }
1720 
1721 	result = expand_loc (CSELIB_VAL_PTR (orig)->locs, evd, max_depth);
1722 	return result;
1723       }
1724 
1725     case DEBUG_EXPR:
1726       if (evd->callback)
1727 	return evd->callback (orig, evd->regs_active, max_depth,
1728 			      evd->callback_arg);
1729       return orig;
1730 
1731     default:
1732       break;
1733     }
1734 
1735   /* Copy the various flags, fields, and other information.  We assume
1736      that all fields need copying, and then clear the fields that should
1737      not be copied.  That is the sensible default behavior, and forces
1738      us to explicitly document why we are *not* copying a flag.  */
1739   if (evd->dummy)
1740     copy = NULL;
1741   else
1742     copy = shallow_copy_rtx (orig);
1743 
1744   format_ptr = GET_RTX_FORMAT (code);
1745 
1746   for (i = 0; i < GET_RTX_LENGTH (code); i++)
1747     switch (*format_ptr++)
1748       {
1749       case 'e':
1750 	if (XEXP (orig, i) != NULL)
1751 	  {
1752 	    rtx result = cselib_expand_value_rtx_1 (XEXP (orig, i), evd,
1753 						    max_depth - 1);
1754 	    if (!result)
1755 	      return NULL;
1756 	    if (copy)
1757 	      XEXP (copy, i) = result;
1758 	  }
1759 	break;
1760 
1761       case 'E':
1762       case 'V':
1763 	if (XVEC (orig, i) != NULL)
1764 	  {
1765 	    if (copy)
1766 	      XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
1767 	    for (j = 0; j < XVECLEN (orig, i); j++)
1768 	      {
1769 		rtx result = cselib_expand_value_rtx_1 (XVECEXP (orig, i, j),
1770 							evd, max_depth - 1);
1771 		if (!result)
1772 		  return NULL;
1773 		if (copy)
1774 		  XVECEXP (copy, i, j) = result;
1775 	      }
1776 	  }
1777 	break;
1778 
1779       case 't':
1780       case 'w':
1781       case 'i':
1782       case 's':
1783       case 'S':
1784       case 'T':
1785       case 'u':
1786       case 'B':
1787       case '0':
1788 	/* These are left unchanged.  */
1789 	break;
1790 
1791       default:
1792 	gcc_unreachable ();
1793       }
1794 
1795   if (evd->dummy)
1796     return orig;
1797 
1798   mode = GET_MODE (copy);
1799   /* If an operand has been simplified into CONST_INT, which doesn't
1800      have a mode and the mode isn't derivable from whole rtx's mode,
1801      try simplify_*_operation first with mode from original's operand
1802      and as a fallback wrap CONST_INT into gen_rtx_CONST.  */
1803   scopy = copy;
1804   switch (GET_RTX_CLASS (code))
1805     {
1806     case RTX_UNARY:
1807       if (CONST_INT_P (XEXP (copy, 0))
1808 	  && GET_MODE (XEXP (orig, 0)) != VOIDmode)
1809 	{
1810 	  scopy = simplify_unary_operation (code, mode, XEXP (copy, 0),
1811 					    GET_MODE (XEXP (orig, 0)));
1812 	  if (scopy)
1813 	    return scopy;
1814 	}
1815       break;
1816     case RTX_COMM_ARITH:
1817     case RTX_BIN_ARITH:
1818       /* These expressions can derive operand modes from the whole rtx's mode.  */
1819       break;
1820     case RTX_TERNARY:
1821     case RTX_BITFIELD_OPS:
1822       if (CONST_INT_P (XEXP (copy, 0))
1823 	  && GET_MODE (XEXP (orig, 0)) != VOIDmode)
1824 	{
1825 	  scopy = simplify_ternary_operation (code, mode,
1826 					      GET_MODE (XEXP (orig, 0)),
1827 					      XEXP (copy, 0), XEXP (copy, 1),
1828 					      XEXP (copy, 2));
1829 	  if (scopy)
1830 	    return scopy;
1831 	}
1832       break;
1833     case RTX_COMPARE:
1834     case RTX_COMM_COMPARE:
1835       if (CONST_INT_P (XEXP (copy, 0))
1836 	  && GET_MODE (XEXP (copy, 1)) == VOIDmode
1837 	  && (GET_MODE (XEXP (orig, 0)) != VOIDmode
1838 	      || GET_MODE (XEXP (orig, 1)) != VOIDmode))
1839 	{
1840 	  scopy = simplify_relational_operation (code, mode,
1841 						 (GET_MODE (XEXP (orig, 0))
1842 						  != VOIDmode)
1843 						 ? GET_MODE (XEXP (orig, 0))
1844 						 : GET_MODE (XEXP (orig, 1)),
1845 						 XEXP (copy, 0),
1846 						 XEXP (copy, 1));
1847 	  if (scopy)
1848 	    return scopy;
1849 	}
1850       break;
1851     default:
1852       break;
1853     }
1854   scopy = simplify_rtx (copy);
1855   if (scopy)
1856     return scopy;
1857   return copy;
1858 }
1859 
1860 /* Walk rtx X and replace all occurrences of REG and MEM subexpressions
1861    with VALUE expressions.  This way, it becomes independent of changes
1862    to registers and memory.
1863    X isn't actually modified; if modifications are needed, new rtl is
1864    allocated.  However, the return value can share rtl with X.
1865    If X is within a MEM, MEMMODE must be the mode of the MEM.  */
1866 
1867 rtx
1868 cselib_subst_to_values (rtx x, machine_mode memmode)
1869 {
1870   enum rtx_code code = GET_CODE (x);
1871   const char *fmt = GET_RTX_FORMAT (code);
1872   cselib_val *e;
1873   struct elt_list *l;
1874   rtx copy = x;
1875   int i;
1876   poly_int64 offset;
1877 
1878   switch (code)
1879     {
1880     case REG:
1881       l = REG_VALUES (REGNO (x));
1882       if (l && l->elt == NULL)
1883 	l = l->next;
1884       for (; l; l = l->next)
1885 	if (GET_MODE (l->elt->val_rtx) == GET_MODE (x))
1886 	  return l->elt->val_rtx;
1887 
1888       gcc_unreachable ();
1889 
1890     case MEM:
1891       e = cselib_lookup_mem (x, 0);
1892       /* This used to happen for autoincrements, but we deal with them
1893 	 properly now.  Remove the if stmt for the next release.  */
1894       if (! e)
1895 	{
1896 	  /* Assign a value that doesn't match any other.  */
1897 	  e = new_cselib_val (next_uid, GET_MODE (x), x);
1898 	}
1899       return e->val_rtx;
1900 
1901     case ENTRY_VALUE:
1902       e = cselib_lookup (x, GET_MODE (x), 0, memmode);
1903       if (! e)
1904 	break;
1905       return e->val_rtx;
1906 
1907     CASE_CONST_ANY:
1908       return x;
1909 
1910     case PRE_DEC:
1911     case PRE_INC:
1912       gcc_assert (memmode != VOIDmode);
1913       offset = GET_MODE_SIZE (memmode);
1914       if (code == PRE_DEC)
1915 	offset = -offset;
1916       return cselib_subst_to_values (plus_constant (GET_MODE (x),
1917 						    XEXP (x, 0), offset),
1918 				     memmode);
1919 
1920     case PRE_MODIFY:
1921       gcc_assert (memmode != VOIDmode);
1922       return cselib_subst_to_values (XEXP (x, 1), memmode);
1923 
1924     case POST_DEC:
1925     case POST_INC:
1926     case POST_MODIFY:
1927       gcc_assert (memmode != VOIDmode);
1928       return cselib_subst_to_values (XEXP (x, 0), memmode);
1929 
1930     default:
1931       break;
1932     }
1933 
1934   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1935     {
1936       if (fmt[i] == 'e')
1937 	{
1938 	  rtx t = cselib_subst_to_values (XEXP (x, i), memmode);
1939 
1940 	  if (t != XEXP (x, i))
1941 	    {
1942 	      if (x == copy)
1943 		copy = shallow_copy_rtx (x);
1944 	      XEXP (copy, i) = t;
1945 	    }
1946 	}
1947       else if (fmt[i] == 'E')
1948 	{
1949 	  int j;
1950 
1951 	  for (j = 0; j < XVECLEN (x, i); j++)
1952 	    {
1953 	      rtx t = cselib_subst_to_values (XVECEXP (x, i, j), memmode);
1954 
1955 	      if (t != XVECEXP (x, i, j))
1956 		{
1957 		  if (XVEC (x, i) == XVEC (copy, i))
1958 		    {
1959 		      if (x == copy)
1960 			copy = shallow_copy_rtx (x);
1961 		      XVEC (copy, i) = shallow_copy_rtvec (XVEC (x, i));
1962 		    }
1963 		  XVECEXP (copy, i, j) = t;
1964 		}
1965 	    }
1966 	}
1967     }
1968 
1969   return copy;
1970 }
1971 
1972 /* Wrapper for cselib_subst_to_values, that indicates X is in INSN.  */
1973 
1974 rtx
1975 cselib_subst_to_values_from_insn (rtx x, machine_mode memmode, rtx_insn *insn)
1976 {
1977   rtx ret;
1978   gcc_assert (!cselib_current_insn);
1979   cselib_current_insn = insn;
1980   ret = cselib_subst_to_values (x, memmode);
1981   cselib_current_insn = NULL;
1982   return ret;
1983 }
1984 
1985 /* Look up the rtl expression X in our tables and return the value it
1986    has.  If CREATE is zero, we return NULL if we don't know the value.
1987    Otherwise, we create a new one if possible, using mode MODE if X
1988    doesn't have a mode (i.e. because it's a constant).  When X is part
1989    of an address, MEMMODE should be the mode of the enclosing MEM if
1990    we're tracking autoinc expressions.  */
1991 
1992 static cselib_val *
1993 cselib_lookup_1 (rtx x, machine_mode mode,
1994 		 int create, machine_mode memmode)
1995 {
1996   cselib_val **slot;
1997   cselib_val *e;
1998   unsigned int hashval;
1999 
2000   if (GET_MODE (x) != VOIDmode)
2001     mode = GET_MODE (x);
2002 
2003   if (GET_CODE (x) == VALUE)
2004     return CSELIB_VAL_PTR (x);
2005 
2006   if (REG_P (x))
2007     {
2008       struct elt_list *l;
2009       unsigned int i = REGNO (x);
2010 
2011       l = REG_VALUES (i);
2012       if (l && l->elt == NULL)
2013 	l = l->next;
2014       for (; l; l = l->next)
2015 	if (mode == GET_MODE (l->elt->val_rtx))
2016 	  {
2017 	    promote_debug_loc (l->elt->locs);
2018 	    return l->elt;
2019 	  }
2020 
2021       if (! create)
2022 	return 0;
2023 
2024       if (i < FIRST_PSEUDO_REGISTER)
2025 	{
2026 	  unsigned int n = hard_regno_nregs (i, mode);
2027 
2028 	  if (n > max_value_regs)
2029 	    max_value_regs = n;
2030 	}
2031 
2032       e = new_cselib_val (next_uid, GET_MODE (x), x);
2033       new_elt_loc_list (e, x);
2034 
2035       scalar_int_mode int_mode;
2036       if (REG_VALUES (i) == 0)
2037 	{
2038 	  /* Maintain the invariant that the first entry of
2039 	     REG_VALUES, if present, must be the value used to set the
2040 	     register, or NULL.  */
2041 	  used_regs[n_used_regs++] = i;
2042 	  REG_VALUES (i) = new_elt_list (REG_VALUES (i), NULL);
2043 	}
2044       else if (cselib_preserve_constants
2045 	       && is_int_mode (mode, &int_mode))
2046 	{
2047 	  /* During var-tracking, try harder to find equivalences
2048 	     for SUBREGs.  If a setter sets say a DImode register
2049 	     and user uses that register only in SImode, add a lowpart
2050 	     subreg location.  */
2051 	  struct elt_list *lwider = NULL;
2052 	  scalar_int_mode lmode;
2053 	  l = REG_VALUES (i);
2054 	  if (l && l->elt == NULL)
2055 	    l = l->next;
2056 	  for (; l; l = l->next)
2057 	    if (is_int_mode (GET_MODE (l->elt->val_rtx), &lmode)
2058 		&& GET_MODE_SIZE (lmode) > GET_MODE_SIZE (int_mode)
2059 		&& (lwider == NULL
2060 		    || partial_subreg_p (lmode,
2061 					 GET_MODE (lwider->elt->val_rtx))))
2062 	      {
2063 		struct elt_loc_list *el;
2064 		if (i < FIRST_PSEUDO_REGISTER
2065 		    && hard_regno_nregs (i, lmode) != 1)
2066 		  continue;
2067 		for (el = l->elt->locs; el; el = el->next)
2068 		  if (!REG_P (el->loc))
2069 		    break;
2070 		if (el)
2071 		  lwider = l;
2072 	      }
2073 	  if (lwider)
2074 	    {
2075 	      rtx sub = lowpart_subreg (int_mode, lwider->elt->val_rtx,
2076 					GET_MODE (lwider->elt->val_rtx));
2077 	      if (sub)
2078 		new_elt_loc_list (e, sub);
2079 	    }
2080 	}
2081       REG_VALUES (i)->next = new_elt_list (REG_VALUES (i)->next, e);
2082       slot = cselib_find_slot (mode, x, e->hash, INSERT, memmode);
2083       *slot = e;
2084       return e;
2085     }
2086 
2087   if (MEM_P (x))
2088     return cselib_lookup_mem (x, create);
2089 
2090   hashval = cselib_hash_rtx (x, create, memmode);
2091   /* Can't even create if hashing is not possible.  */
2092   if (! hashval)
2093     return 0;
2094 
2095   slot = cselib_find_slot (mode, x, hashval,
2096 			   create ? INSERT : NO_INSERT, memmode);
2097   if (slot == 0)
2098     return 0;
2099 
2100   e = (cselib_val *) *slot;
2101   if (e)
2102     return e;
2103 
2104   e = new_cselib_val (hashval, mode, x);
2105 
2106   /* We have to fill the slot before calling cselib_subst_to_values:
2107      the hash table is inconsistent until we do so, and
2108      cselib_subst_to_values will need to do lookups.  */
2109   *slot = e;
2110   new_elt_loc_list (e, cselib_subst_to_values (x, memmode));
2111   return e;
2112 }
2113 
2114 /* Wrapper for cselib_lookup, that indicates X is in INSN.  */
2115 
2116 cselib_val *
2117 cselib_lookup_from_insn (rtx x, machine_mode mode,
2118 			 int create, machine_mode memmode, rtx_insn *insn)
2119 {
2120   cselib_val *ret;
2121 
2122   gcc_assert (!cselib_current_insn);
2123   cselib_current_insn = insn;
2124 
2125   ret = cselib_lookup (x, mode, create, memmode);
2126 
2127   cselib_current_insn = NULL;
2128 
2129   return ret;
2130 }
2131 
2132 /* Wrapper for cselib_lookup_1, that logs the lookup result and
2133    maintains invariants related with debug insns.  */
2134 
2135 cselib_val *
2136 cselib_lookup (rtx x, machine_mode mode,
2137 	       int create, machine_mode memmode)
2138 {
2139   cselib_val *ret = cselib_lookup_1 (x, mode, create, memmode);
2140 
2141   /* ??? Should we return NULL if we're not to create an entry, the
2142      found loc is a debug loc and cselib_current_insn is not DEBUG?
2143      If so, we should also avoid converting val to non-DEBUG; probably
2144      easiest setting cselib_current_insn to NULL before the call
2145      above.  */
2146 
2147   if (dump_file && (dump_flags & TDF_CSELIB))
2148     {
2149       fputs ("cselib lookup ", dump_file);
2150       print_inline_rtx (dump_file, x, 2);
2151       fprintf (dump_file, " => %u:%u\n",
2152 	       ret ? ret->uid : 0,
2153 	       ret ? ret->hash : 0);
2154     }
2155 
2156   return ret;
2157 }
2158 
2159 /* Invalidate any entries in reg_values that overlap REGNO.  This is called
2160    if REGNO is changing.  MODE is the mode of the assignment to REGNO, which
2161    is used to determine how many hard registers are being changed.  If MODE
2162    is VOIDmode, then only REGNO is being changed; this is used when
2163    invalidating call clobbered registers across a call.  */
2164 
2165 static void
2166 cselib_invalidate_regno (unsigned int regno, machine_mode mode)
2167 {
2168   unsigned int endregno;
2169   unsigned int i;
2170 
2171   /* If we see pseudos after reload, something is _wrong_.  */
2172   gcc_assert (!reload_completed || regno < FIRST_PSEUDO_REGISTER
2173 	      || reg_renumber[regno] < 0);
2174 
2175   /* Determine the range of registers that must be invalidated.  For
2176      pseudos, only REGNO is affected.  For hard regs, we must take MODE
2177      into account, and we must also invalidate lower register numbers
2178      if they contain values that overlap REGNO.  */
2179   if (regno < FIRST_PSEUDO_REGISTER)
2180     {
2181       gcc_assert (mode != VOIDmode);
2182 
2183       if (regno < max_value_regs)
2184 	i = 0;
2185       else
2186 	i = regno - max_value_regs;
2187 
2188       endregno = end_hard_regno (mode, regno);
2189     }
2190   else
2191     {
2192       i = regno;
2193       endregno = regno + 1;
2194     }
2195 
2196   for (; i < endregno; i++)
2197     {
2198       struct elt_list **l = &REG_VALUES (i);
2199 
2200       /* Go through all known values for this reg; if it overlaps the range
2201 	 we're invalidating, remove the value.  */
2202       while (*l)
2203 	{
2204 	  cselib_val *v = (*l)->elt;
2205 	  bool had_locs;
2206 	  rtx_insn *setting_insn;
2207 	  struct elt_loc_list **p;
2208 	  unsigned int this_last = i;
2209 
2210 	  if (i < FIRST_PSEUDO_REGISTER && v != NULL)
2211 	    this_last = end_hard_regno (GET_MODE (v->val_rtx), i) - 1;
2212 
2213 	  if (this_last < regno || v == NULL
2214 	      || (v == cfa_base_preserved_val
2215 		  && i == cfa_base_preserved_regno))
2216 	    {
2217 	      l = &(*l)->next;
2218 	      continue;
2219 	    }
2220 
2221 	  /* We have an overlap.  */
2222 	  if (*l == REG_VALUES (i))
2223 	    {
2224 	      /* Maintain the invariant that the first entry of
2225 		 REG_VALUES, if present, must be the value used to set
2226 		 the register, or NULL.  This is also nice because
2227 		 then we won't push the same regno onto user_regs
2228 		 multiple times.  */
2229 	      (*l)->elt = NULL;
2230 	      l = &(*l)->next;
2231 	    }
2232 	  else
2233 	    unchain_one_elt_list (l);
2234 
2235 	  v = canonical_cselib_val (v);
2236 
2237 	  had_locs = v->locs != NULL;
2238 	  setting_insn = v->locs ? v->locs->setting_insn : NULL;
2239 
2240 	  /* Now, we clear the mapping from value to reg.  It must exist, so
2241 	     this code will crash intentionally if it doesn't.  */
2242 	  for (p = &v->locs; ; p = &(*p)->next)
2243 	    {
2244 	      rtx x = (*p)->loc;
2245 
2246 	      if (REG_P (x) && REGNO (x) == i)
2247 		{
2248 		  unchain_one_elt_loc_list (p);
2249 		  break;
2250 		}
2251 	    }
2252 
2253 	  if (had_locs && v->locs == 0 && !PRESERVED_VALUE_P (v->val_rtx))
2254 	    {
2255 	      if (setting_insn && DEBUG_INSN_P (setting_insn))
2256 		n_useless_debug_values++;
2257 	      else
2258 		n_useless_values++;
2259 	    }
2260 	}
2261     }
2262 }
2263 
2264 /* Invalidate any locations in the table which are changed because of a
2265    store to MEM_RTX.  If this is called because of a non-const call
2266    instruction, MEM_RTX is (mem:BLK const0_rtx).  */
2267 
2268 static void
2269 cselib_invalidate_mem (rtx mem_rtx)
2270 {
2271   cselib_val **vp, *v, *next;
2272   int num_mems = 0;
2273   rtx mem_addr;
2274 
2275   mem_addr = canon_rtx (get_addr (XEXP (mem_rtx, 0)));
2276   mem_rtx = canon_rtx (mem_rtx);
2277 
2278   vp = &first_containing_mem;
2279   for (v = *vp; v != &dummy_val; v = next)
2280     {
2281       bool has_mem = false;
2282       struct elt_loc_list **p = &v->locs;
2283       bool had_locs = v->locs != NULL;
2284       rtx_insn *setting_insn = v->locs ? v->locs->setting_insn : NULL;
2285 
2286       while (*p)
2287 	{
2288 	  rtx x = (*p)->loc;
2289 	  cselib_val *addr;
2290 	  struct elt_list **mem_chain;
2291 
2292 	  /* MEMs may occur in locations only at the top level; below
2293 	     that every MEM or REG is substituted by its VALUE.  */
2294 	  if (!MEM_P (x))
2295 	    {
2296 	      p = &(*p)->next;
2297 	      continue;
2298 	    }
2299 	  if (num_mems < PARAM_VALUE (PARAM_MAX_CSELIB_MEMORY_LOCATIONS)
2300 	      && ! canon_anti_dependence (x, false, mem_rtx,
2301 					  GET_MODE (mem_rtx), mem_addr))
2302 	    {
2303 	      has_mem = true;
2304 	      num_mems++;
2305 	      p = &(*p)->next;
2306 	      continue;
2307 	    }
2308 
2309 	  /* This one overlaps.  */
2310 	  /* We must have a mapping from this MEM's address to the
2311 	     value (E).  Remove that, too.  */
2312 	  addr = cselib_lookup (XEXP (x, 0), VOIDmode, 0, GET_MODE (x));
2313 	  addr = canonical_cselib_val (addr);
2314 	  gcc_checking_assert (v == canonical_cselib_val (v));
2315 	  mem_chain = &addr->addr_list;
2316 	  for (;;)
2317 	    {
2318 	      cselib_val *canon = canonical_cselib_val ((*mem_chain)->elt);
2319 
2320 	      if (canon == v)
2321 		{
2322 		  unchain_one_elt_list (mem_chain);
2323 		  break;
2324 		}
2325 
2326 	      /* Record canonicalized elt.  */
2327 	      (*mem_chain)->elt = canon;
2328 
2329 	      mem_chain = &(*mem_chain)->next;
2330 	    }
2331 
2332 	  unchain_one_elt_loc_list (p);
2333 	}
2334 
2335       if (had_locs && v->locs == 0 && !PRESERVED_VALUE_P (v->val_rtx))
2336 	{
2337 	  if (setting_insn && DEBUG_INSN_P (setting_insn))
2338 	    n_useless_debug_values++;
2339 	  else
2340 	    n_useless_values++;
2341 	}
2342 
2343       next = v->next_containing_mem;
2344       if (has_mem)
2345 	{
2346 	  *vp = v;
2347 	  vp = &(*vp)->next_containing_mem;
2348 	}
2349       else
2350 	v->next_containing_mem = NULL;
2351     }
2352   *vp = &dummy_val;
2353 }
2354 
2355 /* Invalidate DEST, which is being assigned to or clobbered.  */
2356 
2357 void
2358 cselib_invalidate_rtx (rtx dest)
2359 {
2360   while (GET_CODE (dest) == SUBREG
2361 	 || GET_CODE (dest) == ZERO_EXTRACT
2362 	 || GET_CODE (dest) == STRICT_LOW_PART)
2363     dest = XEXP (dest, 0);
2364 
2365   if (REG_P (dest))
2366     cselib_invalidate_regno (REGNO (dest), GET_MODE (dest));
2367   else if (MEM_P (dest))
2368     cselib_invalidate_mem (dest);
2369 }
2370 
2371 /* A wrapper for cselib_invalidate_rtx to be called via note_stores.  */
2372 
2373 static void
2374 cselib_invalidate_rtx_note_stores (rtx dest, const_rtx ignore ATTRIBUTE_UNUSED,
2375 				   void *data ATTRIBUTE_UNUSED)
2376 {
2377   cselib_invalidate_rtx (dest);
2378 }
2379 
2380 /* Record the result of a SET instruction.  DEST is being set; the source
2381    contains the value described by SRC_ELT.  If DEST is a MEM, DEST_ADDR_ELT
2382    describes its address.  */
2383 
2384 static void
2385 cselib_record_set (rtx dest, cselib_val *src_elt, cselib_val *dest_addr_elt)
2386 {
2387   if (src_elt == 0 || side_effects_p (dest))
2388     return;
2389 
2390   if (REG_P (dest))
2391     {
2392       unsigned int dreg = REGNO (dest);
2393       if (dreg < FIRST_PSEUDO_REGISTER)
2394 	{
2395 	  unsigned int n = REG_NREGS (dest);
2396 
2397 	  if (n > max_value_regs)
2398 	    max_value_regs = n;
2399 	}
2400 
2401       if (REG_VALUES (dreg) == 0)
2402 	{
2403 	  used_regs[n_used_regs++] = dreg;
2404 	  REG_VALUES (dreg) = new_elt_list (REG_VALUES (dreg), src_elt);
2405 	}
2406       else
2407 	{
2408 	  /* The register should have been invalidated.  */
2409 	  gcc_assert (REG_VALUES (dreg)->elt == 0);
2410 	  REG_VALUES (dreg)->elt = src_elt;
2411 	}
2412 
2413       if (src_elt->locs == 0 && !PRESERVED_VALUE_P (src_elt->val_rtx))
2414 	n_useless_values--;
2415       new_elt_loc_list (src_elt, dest);
2416     }
2417   else if (MEM_P (dest) && dest_addr_elt != 0
2418 	   && cselib_record_memory)
2419     {
2420       if (src_elt->locs == 0 && !PRESERVED_VALUE_P (src_elt->val_rtx))
2421 	n_useless_values--;
2422       add_mem_for_addr (dest_addr_elt, src_elt, dest);
2423     }
2424 }
2425 
2426 /* Make ELT and X's VALUE equivalent to each other at INSN.  */
2427 
2428 void
2429 cselib_add_permanent_equiv (cselib_val *elt, rtx x, rtx_insn *insn)
2430 {
2431   cselib_val *nelt;
2432   rtx_insn *save_cselib_current_insn = cselib_current_insn;
2433 
2434   gcc_checking_assert (elt);
2435   gcc_checking_assert (PRESERVED_VALUE_P (elt->val_rtx));
2436   gcc_checking_assert (!side_effects_p (x));
2437 
2438   cselib_current_insn = insn;
2439 
2440   nelt = cselib_lookup (x, GET_MODE (elt->val_rtx), 1, VOIDmode);
2441 
2442   if (nelt != elt)
2443     {
2444       cselib_any_perm_equivs = true;
2445 
2446       if (!PRESERVED_VALUE_P (nelt->val_rtx))
2447 	cselib_preserve_value (nelt);
2448 
2449       new_elt_loc_list (nelt, elt->val_rtx);
2450     }
2451 
2452   cselib_current_insn = save_cselib_current_insn;
2453 }
2454 
2455 /* Return TRUE if any permanent equivalences have been recorded since
2456    the table was last initialized.  */
2457 bool
2458 cselib_have_permanent_equivalences (void)
2459 {
2460   return cselib_any_perm_equivs;
2461 }
2462 
2463 /* There is no good way to determine how many elements there can be
2464    in a PARALLEL.  Since it's fairly cheap, use a really large number.  */
2465 #define MAX_SETS (FIRST_PSEUDO_REGISTER * 2)
2466 
2467 struct cselib_record_autoinc_data
2468 {
2469   struct cselib_set *sets;
2470   int n_sets;
2471 };
2472 
2473 /* Callback for for_each_inc_dec.  Records in ARG the SETs implied by
2474    autoinc RTXs: SRC plus SRCOFF if non-NULL is stored in DEST.  */
2475 
2476 static int
2477 cselib_record_autoinc_cb (rtx mem ATTRIBUTE_UNUSED, rtx op ATTRIBUTE_UNUSED,
2478 			  rtx dest, rtx src, rtx srcoff, void *arg)
2479 {
2480   struct cselib_record_autoinc_data *data;
2481   data = (struct cselib_record_autoinc_data *)arg;
2482 
2483   data->sets[data->n_sets].dest = dest;
2484 
2485   if (srcoff)
2486     data->sets[data->n_sets].src = gen_rtx_PLUS (GET_MODE (src), src, srcoff);
2487   else
2488     data->sets[data->n_sets].src = src;
2489 
2490   data->n_sets++;
2491 
2492   return 0;
2493 }
2494 
2495 /* Record the effects of any sets and autoincs in INSN.  */
2496 static void
2497 cselib_record_sets (rtx_insn *insn)
2498 {
2499   int n_sets = 0;
2500   int i;
2501   struct cselib_set sets[MAX_SETS];
2502   rtx body = PATTERN (insn);
2503   rtx cond = 0;
2504   int n_sets_before_autoinc;
2505   int n_strict_low_parts = 0;
2506   struct cselib_record_autoinc_data data;
2507 
2508   body = PATTERN (insn);
2509   if (GET_CODE (body) == COND_EXEC)
2510     {
2511       cond = COND_EXEC_TEST (body);
2512       body = COND_EXEC_CODE (body);
2513     }
2514 
2515   /* Find all sets.  */
2516   if (GET_CODE (body) == SET)
2517     {
2518       sets[0].src = SET_SRC (body);
2519       sets[0].dest = SET_DEST (body);
2520       n_sets = 1;
2521     }
2522   else if (GET_CODE (body) == PARALLEL)
2523     {
2524       /* Look through the PARALLEL and record the values being
2525 	 set, if possible.  Also handle any CLOBBERs.  */
2526       for (i = XVECLEN (body, 0) - 1; i >= 0; --i)
2527 	{
2528 	  rtx x = XVECEXP (body, 0, i);
2529 
2530 	  if (GET_CODE (x) == SET)
2531 	    {
2532 	      sets[n_sets].src = SET_SRC (x);
2533 	      sets[n_sets].dest = SET_DEST (x);
2534 	      n_sets++;
2535 	    }
2536 	}
2537     }
2538 
2539   if (n_sets == 1
2540       && MEM_P (sets[0].src)
2541       && !cselib_record_memory
2542       && MEM_READONLY_P (sets[0].src))
2543     {
2544       rtx note = find_reg_equal_equiv_note (insn);
2545 
2546       if (note && CONSTANT_P (XEXP (note, 0)))
2547 	sets[0].src = XEXP (note, 0);
2548     }
2549 
2550   data.sets = sets;
2551   data.n_sets = n_sets_before_autoinc = n_sets;
2552   for_each_inc_dec (PATTERN (insn), cselib_record_autoinc_cb, &data);
2553   n_sets = data.n_sets;
2554 
2555   /* Look up the values that are read.  Do this before invalidating the
2556      locations that are written.  */
2557   for (i = 0; i < n_sets; i++)
2558     {
2559       rtx dest = sets[i].dest;
2560       rtx orig = dest;
2561 
2562       /* A STRICT_LOW_PART can be ignored; we'll record the equivalence for
2563          the low part after invalidating any knowledge about larger modes.  */
2564       if (GET_CODE (sets[i].dest) == STRICT_LOW_PART)
2565 	sets[i].dest = dest = XEXP (dest, 0);
2566 
2567       /* We don't know how to record anything but REG or MEM.  */
2568       if (REG_P (dest)
2569 	  || (MEM_P (dest) && cselib_record_memory))
2570         {
2571 	  rtx src = sets[i].src;
2572 	  if (cond)
2573 	    src = gen_rtx_IF_THEN_ELSE (GET_MODE (dest), cond, src, dest);
2574 	  sets[i].src_elt = cselib_lookup (src, GET_MODE (dest), 1, VOIDmode);
2575 	  if (MEM_P (dest))
2576 	    {
2577 	      machine_mode address_mode = get_address_mode (dest);
2578 
2579 	      sets[i].dest_addr_elt = cselib_lookup (XEXP (dest, 0),
2580 						     address_mode, 1,
2581 						     GET_MODE (dest));
2582 	    }
2583 	  else
2584 	    sets[i].dest_addr_elt = 0;
2585 	}
2586 
2587       /* Improve handling of STRICT_LOW_PART if the current value is known
2588 	 to be const0_rtx, then the low bits will be set to dest and higher
2589 	 bits will remain zero.  Used in code like:
2590 
2591 	 {di:SI=0;clobber flags:CC;}
2592 	 flags:CCNO=cmp(bx:SI,0)
2593 	 strict_low_part(di:QI)=flags:CCNO<=0
2594 
2595 	 where we can note both that di:QI=flags:CCNO<=0 and
2596 	 also that because di:SI is known to be 0 and strict_low_part(di:QI)
2597 	 preserves the upper bits that di:SI=zero_extend(flags:CCNO<=0).  */
2598       scalar_int_mode mode;
2599       if (dest != orig
2600 	  && cselib_record_sets_hook
2601 	  && REG_P (dest)
2602 	  && HARD_REGISTER_P (dest)
2603 	  && is_a <scalar_int_mode> (GET_MODE (dest), &mode)
2604 	  && n_sets + n_strict_low_parts < MAX_SETS)
2605 	{
2606 	  opt_scalar_int_mode wider_mode_iter;
2607 	  FOR_EACH_WIDER_MODE (wider_mode_iter, mode)
2608 	    {
2609 	      scalar_int_mode wider_mode = wider_mode_iter.require ();
2610 	      if (GET_MODE_PRECISION (wider_mode) > BITS_PER_WORD)
2611 		break;
2612 
2613 	      rtx reg = gen_lowpart (wider_mode, dest);
2614 	      if (!REG_P (reg))
2615 		break;
2616 
2617 	      cselib_val *v = cselib_lookup (reg, wider_mode, 0, VOIDmode);
2618 	      if (!v)
2619 		continue;
2620 
2621 	      struct elt_loc_list *l;
2622 	      for (l = v->locs; l; l = l->next)
2623 		if (l->loc == const0_rtx)
2624 		  break;
2625 
2626 	      if (!l)
2627 		continue;
2628 
2629 	      sets[n_sets + n_strict_low_parts].dest = reg;
2630 	      sets[n_sets + n_strict_low_parts].src = dest;
2631 	      sets[n_sets + n_strict_low_parts++].src_elt = sets[i].src_elt;
2632 	      break;
2633 	    }
2634 	}
2635     }
2636 
2637   if (cselib_record_sets_hook)
2638     cselib_record_sets_hook (insn, sets, n_sets);
2639 
2640   /* Invalidate all locations written by this insn.  Note that the elts we
2641      looked up in the previous loop aren't affected, just some of their
2642      locations may go away.  */
2643   note_stores (body, cselib_invalidate_rtx_note_stores, NULL);
2644 
2645   for (i = n_sets_before_autoinc; i < n_sets; i++)
2646     cselib_invalidate_rtx (sets[i].dest);
2647 
2648   /* If this is an asm, look for duplicate sets.  This can happen when the
2649      user uses the same value as an output multiple times.  This is valid
2650      if the outputs are not actually used thereafter.  Treat this case as
2651      if the value isn't actually set.  We do this by smashing the destination
2652      to pc_rtx, so that we won't record the value later.  */
2653   if (n_sets >= 2 && asm_noperands (body) >= 0)
2654     {
2655       for (i = 0; i < n_sets; i++)
2656 	{
2657 	  rtx dest = sets[i].dest;
2658 	  if (REG_P (dest) || MEM_P (dest))
2659 	    {
2660 	      int j;
2661 	      for (j = i + 1; j < n_sets; j++)
2662 		if (rtx_equal_p (dest, sets[j].dest))
2663 		  {
2664 		    sets[i].dest = pc_rtx;
2665 		    sets[j].dest = pc_rtx;
2666 		  }
2667 	    }
2668 	}
2669     }
2670 
2671   /* Now enter the equivalences in our tables.  */
2672   for (i = 0; i < n_sets; i++)
2673     {
2674       rtx dest = sets[i].dest;
2675       if (REG_P (dest)
2676 	  || (MEM_P (dest) && cselib_record_memory))
2677 	cselib_record_set (dest, sets[i].src_elt, sets[i].dest_addr_elt);
2678     }
2679 
2680   /* And deal with STRICT_LOW_PART.  */
2681   for (i = 0; i < n_strict_low_parts; i++)
2682     {
2683       if (! PRESERVED_VALUE_P (sets[n_sets + i].src_elt->val_rtx))
2684 	continue;
2685       machine_mode dest_mode = GET_MODE (sets[n_sets + i].dest);
2686       cselib_val *v
2687 	= cselib_lookup (sets[n_sets + i].dest, dest_mode, 1, VOIDmode);
2688       cselib_preserve_value (v);
2689       rtx r = gen_rtx_ZERO_EXTEND (dest_mode,
2690 				   sets[n_sets + i].src_elt->val_rtx);
2691       cselib_add_permanent_equiv (v, r, insn);
2692     }
2693 }
2694 
2695 /* Return true if INSN in the prologue initializes hard_frame_pointer_rtx.  */
2696 
2697 bool
2698 fp_setter_insn (rtx_insn *insn)
2699 {
2700   rtx expr, pat = NULL_RTX;
2701 
2702   if (!RTX_FRAME_RELATED_P (insn))
2703     return false;
2704 
2705   expr = find_reg_note (insn, REG_FRAME_RELATED_EXPR, NULL_RTX);
2706   if (expr)
2707     pat = XEXP (expr, 0);
2708   if (!modified_in_p (hard_frame_pointer_rtx, pat ? pat : insn))
2709     return false;
2710 
2711   /* Don't return true for frame pointer restores in the epilogue.  */
2712   if (find_reg_note (insn, REG_CFA_RESTORE, hard_frame_pointer_rtx))
2713     return false;
2714   return true;
2715 }
2716 
2717 /* Record the effects of INSN.  */
2718 
2719 void
2720 cselib_process_insn (rtx_insn *insn)
2721 {
2722   int i;
2723   rtx x;
2724 
2725   cselib_current_insn = insn;
2726 
2727   /* Forget everything at a CODE_LABEL or a setjmp.  */
2728   if ((LABEL_P (insn)
2729        || (CALL_P (insn)
2730 	   && find_reg_note (insn, REG_SETJMP, NULL)))
2731       && !cselib_preserve_constants)
2732     {
2733       cselib_reset_table (next_uid);
2734       cselib_current_insn = NULL;
2735       return;
2736     }
2737 
2738   if (! INSN_P (insn))
2739     {
2740       cselib_current_insn = NULL;
2741       return;
2742     }
2743 
2744   /* If this is a call instruction, forget anything stored in a
2745      call clobbered register, or, if this is not a const call, in
2746      memory.  */
2747   if (CALL_P (insn))
2748     {
2749       for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2750 	if (call_used_regs[i]
2751 	    || (REG_VALUES (i) && REG_VALUES (i)->elt
2752 		&& (targetm.hard_regno_call_part_clobbered
2753 		    (i, GET_MODE (REG_VALUES (i)->elt->val_rtx)))))
2754 	  cselib_invalidate_regno (i, reg_raw_mode[i]);
2755 
2756       /* Since it is not clear how cselib is going to be used, be
2757 	 conservative here and treat looping pure or const functions
2758 	 as if they were regular functions.  */
2759       if (RTL_LOOPING_CONST_OR_PURE_CALL_P (insn)
2760 	  || !(RTL_CONST_OR_PURE_CALL_P (insn)))
2761 	cselib_invalidate_mem (callmem);
2762       else
2763 	/* For const/pure calls, invalidate any argument slots because
2764 	   they are owned by the callee.  */
2765 	for (x = CALL_INSN_FUNCTION_USAGE (insn); x; x = XEXP (x, 1))
2766 	  if (GET_CODE (XEXP (x, 0)) == USE
2767 	      && MEM_P (XEXP (XEXP (x, 0), 0)))
2768 	    cselib_invalidate_mem (XEXP (XEXP (x, 0), 0));
2769     }
2770 
2771   cselib_record_sets (insn);
2772 
2773   /* Look for any CLOBBERs in CALL_INSN_FUNCTION_USAGE, but only
2774      after we have processed the insn.  */
2775   if (CALL_P (insn))
2776     {
2777       for (x = CALL_INSN_FUNCTION_USAGE (insn); x; x = XEXP (x, 1))
2778 	if (GET_CODE (XEXP (x, 0)) == CLOBBER)
2779 	  cselib_invalidate_rtx (XEXP (XEXP (x, 0), 0));
2780       /* Flush evertything on setjmp.  */
2781       if (cselib_preserve_constants
2782 	  && find_reg_note (insn, REG_SETJMP, NULL))
2783 	{
2784 	  cselib_preserve_only_values ();
2785 	  cselib_reset_table (next_uid);
2786 	}
2787     }
2788 
2789   /* On setter of the hard frame pointer if frame_pointer_needed,
2790      invalidate stack_pointer_rtx, so that sp and {,h}fp based
2791      VALUEs are distinct.  */
2792   if (reload_completed
2793       && frame_pointer_needed
2794       && fp_setter_insn (insn))
2795     cselib_invalidate_rtx (stack_pointer_rtx);
2796 
2797   cselib_current_insn = NULL;
2798 
2799   if (n_useless_values > MAX_USELESS_VALUES
2800       /* remove_useless_values is linear in the hash table size.  Avoid
2801          quadratic behavior for very large hashtables with very few
2802 	 useless elements.  */
2803       && ((unsigned int)n_useless_values
2804 	  > (cselib_hash_table->elements () - n_debug_values) / 4))
2805     remove_useless_values ();
2806 }
2807 
2808 /* Initialize cselib for one pass.  The caller must also call
2809    init_alias_analysis.  */
2810 
2811 void
2812 cselib_init (int record_what)
2813 {
2814   cselib_record_memory = record_what & CSELIB_RECORD_MEMORY;
2815   cselib_preserve_constants = record_what & CSELIB_PRESERVE_CONSTANTS;
2816   cselib_any_perm_equivs = false;
2817 
2818   /* (mem:BLK (scratch)) is a special mechanism to conflict with everything,
2819      see canon_true_dependence.  This is only created once.  */
2820   if (! callmem)
2821     callmem = gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode));
2822 
2823   cselib_nregs = max_reg_num ();
2824 
2825   /* We preserve reg_values to allow expensive clearing of the whole thing.
2826      Reallocate it however if it happens to be too large.  */
2827   if (!reg_values || reg_values_size < cselib_nregs
2828       || (reg_values_size > 10 && reg_values_size > cselib_nregs * 4))
2829     {
2830       free (reg_values);
2831       /* Some space for newly emit instructions so we don't end up
2832 	 reallocating in between passes.  */
2833       reg_values_size = cselib_nregs + (63 + cselib_nregs) / 16;
2834       reg_values = XCNEWVEC (struct elt_list *, reg_values_size);
2835     }
2836   used_regs = XNEWVEC (unsigned int, cselib_nregs);
2837   n_used_regs = 0;
2838   cselib_hash_table = new hash_table<cselib_hasher> (31);
2839   if (cselib_preserve_constants)
2840     cselib_preserved_hash_table = new hash_table<cselib_hasher> (31);
2841   next_uid = 1;
2842 }
2843 
2844 /* Called when the current user is done with cselib.  */
2845 
2846 void
2847 cselib_finish (void)
2848 {
2849   bool preserved = cselib_preserve_constants;
2850   cselib_discard_hook = NULL;
2851   cselib_preserve_constants = false;
2852   cselib_any_perm_equivs = false;
2853   cfa_base_preserved_val = NULL;
2854   cfa_base_preserved_regno = INVALID_REGNUM;
2855   elt_list_pool.release ();
2856   elt_loc_list_pool.release ();
2857   cselib_val_pool.release ();
2858   value_pool.release ();
2859   cselib_clear_table ();
2860   delete cselib_hash_table;
2861   cselib_hash_table = NULL;
2862   if (preserved)
2863     delete cselib_preserved_hash_table;
2864   cselib_preserved_hash_table = NULL;
2865   free (used_regs);
2866   used_regs = 0;
2867   n_useless_values = 0;
2868   n_useless_debug_values = 0;
2869   n_debug_values = 0;
2870   next_uid = 0;
2871 }
2872 
2873 /* Dump the cselib_val *X to FILE *OUT.  */
2874 
2875 int
2876 dump_cselib_val (cselib_val **x, FILE *out)
2877 {
2878   cselib_val *v = *x;
2879   bool need_lf = true;
2880 
2881   print_inline_rtx (out, v->val_rtx, 0);
2882 
2883   if (v->locs)
2884     {
2885       struct elt_loc_list *l = v->locs;
2886       if (need_lf)
2887 	{
2888 	  fputc ('\n', out);
2889 	  need_lf = false;
2890 	}
2891       fputs (" locs:", out);
2892       do
2893 	{
2894 	  if (l->setting_insn)
2895 	    fprintf (out, "\n  from insn %i ",
2896 		     INSN_UID (l->setting_insn));
2897 	  else
2898 	    fprintf (out, "\n   ");
2899 	  print_inline_rtx (out, l->loc, 4);
2900 	}
2901       while ((l = l->next));
2902       fputc ('\n', out);
2903     }
2904   else
2905     {
2906       fputs (" no locs", out);
2907       need_lf = true;
2908     }
2909 
2910   if (v->addr_list)
2911     {
2912       struct elt_list *e = v->addr_list;
2913       if (need_lf)
2914 	{
2915 	  fputc ('\n', out);
2916 	  need_lf = false;
2917 	}
2918       fputs (" addr list:", out);
2919       do
2920 	{
2921 	  fputs ("\n  ", out);
2922 	  print_inline_rtx (out, e->elt->val_rtx, 2);
2923 	}
2924       while ((e = e->next));
2925       fputc ('\n', out);
2926     }
2927   else
2928     {
2929       fputs (" no addrs", out);
2930       need_lf = true;
2931     }
2932 
2933   if (v->next_containing_mem == &dummy_val)
2934     fputs (" last mem\n", out);
2935   else if (v->next_containing_mem)
2936     {
2937       fputs (" next mem ", out);
2938       print_inline_rtx (out, v->next_containing_mem->val_rtx, 2);
2939       fputc ('\n', out);
2940     }
2941   else if (need_lf)
2942     fputc ('\n', out);
2943 
2944   return 1;
2945 }
2946 
2947 /* Dump to OUT everything in the CSELIB table.  */
2948 
2949 void
2950 dump_cselib_table (FILE *out)
2951 {
2952   fprintf (out, "cselib hash table:\n");
2953   cselib_hash_table->traverse <FILE *, dump_cselib_val> (out);
2954   fprintf (out, "cselib preserved hash table:\n");
2955   cselib_preserved_hash_table->traverse <FILE *, dump_cselib_val> (out);
2956   if (first_containing_mem != &dummy_val)
2957     {
2958       fputs ("first mem ", out);
2959       print_inline_rtx (out, first_containing_mem->val_rtx, 2);
2960       fputc ('\n', out);
2961     }
2962   fprintf (out, "next uid %i\n", next_uid);
2963 }
2964 
2965 #include "gt-cselib.h"
2966