1 /* Common subexpression elimination library for GNU compiler.
2    Copyright (C) 1987-2020 Free Software Foundation, Inc.
3 
4 This file is part of GCC.
5 
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10 
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
14 for more details.
15 
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3.  If not see
18 <http://www.gnu.org/licenses/>.  */
19 
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "backend.h"
24 #include "target.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "df.h"
28 #include "memmodel.h"
29 #include "tm_p.h"
30 #include "regs.h"
31 #include "emit-rtl.h"
32 #include "dumpfile.h"
33 #include "cselib.h"
34 #include "function-abi.h"
35 
36 /* A list of cselib_val structures.  */
37 struct elt_list
38 {
39   struct elt_list *next;
40   cselib_val *elt;
41 };
42 
43 static bool cselib_record_memory;
44 static bool cselib_preserve_constants;
45 static bool cselib_any_perm_equivs;
46 static inline void promote_debug_loc (struct elt_loc_list *l);
47 static struct elt_list *new_elt_list (struct elt_list *, cselib_val *);
48 static void new_elt_loc_list (cselib_val *, rtx);
49 static void unchain_one_value (cselib_val *);
50 static void unchain_one_elt_list (struct elt_list **);
51 static void unchain_one_elt_loc_list (struct elt_loc_list **);
52 static void remove_useless_values (void);
53 static unsigned int cselib_hash_rtx (rtx, int, machine_mode);
54 static cselib_val *new_cselib_val (unsigned int, machine_mode, rtx);
55 static void add_mem_for_addr (cselib_val *, cselib_val *, rtx);
56 static cselib_val *cselib_lookup_mem (rtx, int);
57 static void cselib_invalidate_regno (unsigned int, machine_mode);
58 static void cselib_invalidate_mem (rtx);
59 static void cselib_record_set (rtx, cselib_val *, cselib_val *);
60 static void cselib_record_sets (rtx_insn *);
61 static rtx autoinc_split (rtx, rtx *, machine_mode);
62 
63 #define PRESERVED_VALUE_P(RTX) \
64   (RTL_FLAG_CHECK1 ("PRESERVED_VALUE_P", (RTX), VALUE)->unchanging)
65 
66 #define SP_BASED_VALUE_P(RTX) \
67   (RTL_FLAG_CHECK1 ("SP_BASED_VALUE_P", (RTX), VALUE)->jump)
68 
69 #define SP_DERIVED_VALUE_P(RTX) \
70   (RTL_FLAG_CHECK1 ("SP_DERIVED_VALUE_P", (RTX), VALUE)->call)
71 
72 struct expand_value_data
73 {
74   bitmap regs_active;
75   cselib_expand_callback callback;
76   void *callback_arg;
77   bool dummy;
78 };
79 
80 static rtx cselib_expand_value_rtx_1 (rtx, struct expand_value_data *, int);
81 
82 /* There are three ways in which cselib can look up an rtx:
83    - for a REG, the reg_values table (which is indexed by regno) is used
84    - for a MEM, we recursively look up its address and then follow the
85      addr_list of that value
86    - for everything else, we compute a hash value and go through the hash
87      table.  Since different rtx's can still have the same hash value,
88      this involves walking the table entries for a given value and comparing
89      the locations of the entries with the rtx we are looking up.  */
90 
91 struct cselib_hasher : nofree_ptr_hash <cselib_val>
92 {
93   struct key {
94     /* The rtx value and its mode (needed separately for constant
95        integers).  */
96     machine_mode mode;
97     rtx x;
98     /* The mode of the contaning MEM, if any, otherwise VOIDmode.  */
99     machine_mode memmode;
100   };
101   typedef key *compare_type;
102   static inline hashval_t hash (const cselib_val *);
103   static inline bool equal (const cselib_val *, const key *);
104 };
105 
106 /* The hash function for our hash table.  The value is always computed with
107    cselib_hash_rtx when adding an element; this function just extracts the
108    hash value from a cselib_val structure.  */
109 
110 inline hashval_t
hash(const cselib_val * v)111 cselib_hasher::hash (const cselib_val *v)
112 {
113   return v->hash;
114 }
115 
116 /* The equality test for our hash table.  The first argument V is a table
117    element (i.e. a cselib_val), while the second arg X is an rtx.  We know
118    that all callers of htab_find_slot_with_hash will wrap CONST_INTs into a
119    CONST of an appropriate mode.  */
120 
121 inline bool
equal(const cselib_val * v,const key * x_arg)122 cselib_hasher::equal (const cselib_val *v, const key *x_arg)
123 {
124   struct elt_loc_list *l;
125   rtx x = x_arg->x;
126   machine_mode mode = x_arg->mode;
127   machine_mode memmode = x_arg->memmode;
128 
129   if (mode != GET_MODE (v->val_rtx))
130     return false;
131 
132   if (GET_CODE (x) == VALUE)
133     return x == v->val_rtx;
134 
135   if (SP_DERIVED_VALUE_P (v->val_rtx) && GET_MODE (x) == Pmode)
136     {
137       rtx xoff = NULL;
138       if (autoinc_split (x, &xoff, memmode) == v->val_rtx && xoff == NULL_RTX)
139 	return true;
140     }
141 
142   /* We don't guarantee that distinct rtx's have different hash values,
143      so we need to do a comparison.  */
144   for (l = v->locs; l; l = l->next)
145     if (rtx_equal_for_cselib_1 (l->loc, x, memmode, 0))
146       {
147 	promote_debug_loc (l);
148 	return true;
149       }
150 
151   return false;
152 }
153 
154 /* A table that enables us to look up elts by their value.  */
155 static hash_table<cselib_hasher> *cselib_hash_table;
156 
157 /* A table to hold preserved values.  */
158 static hash_table<cselib_hasher> *cselib_preserved_hash_table;
159 
160 /* This is a global so we don't have to pass this through every function.
161    It is used in new_elt_loc_list to set SETTING_INSN.  */
162 static rtx_insn *cselib_current_insn;
163 
164 /* The unique id that the next create value will take.  */
165 static unsigned int next_uid;
166 
167 /* The number of registers we had when the varrays were last resized.  */
168 static unsigned int cselib_nregs;
169 
170 /* Count values without known locations, or with only locations that
171    wouldn't have been known except for debug insns.  Whenever this
172    grows too big, we remove these useless values from the table.
173 
174    Counting values with only debug values is a bit tricky.  We don't
175    want to increment n_useless_values when we create a value for a
176    debug insn, for this would get n_useless_values out of sync, but we
177    want increment it if all locs in the list that were ever referenced
178    in nondebug insns are removed from the list.
179 
180    In the general case, once we do that, we'd have to stop accepting
181    nondebug expressions in the loc list, to avoid having two values
182    equivalent that, without debug insns, would have been made into
183    separate values.  However, because debug insns never introduce
184    equivalences themselves (no assignments), the only means for
185    growing loc lists is through nondebug assignments.  If the locs
186    also happen to be referenced in debug insns, it will work just fine.
187 
188    A consequence of this is that there's at most one debug-only loc in
189    each loc list.  If we keep it in the first entry, testing whether
190    we have a debug-only loc list takes O(1).
191 
192    Furthermore, since any additional entry in a loc list containing a
193    debug loc would have to come from an assignment (nondebug) that
194    references both the initial debug loc and the newly-equivalent loc,
195    the initial debug loc would be promoted to a nondebug loc, and the
196    loc list would not contain debug locs any more.
197 
198    So the only case we have to be careful with in order to keep
199    n_useless_values in sync between debug and nondebug compilations is
200    to avoid incrementing n_useless_values when removing the single loc
201    from a value that turns out to not appear outside debug values.  We
202    increment n_useless_debug_values instead, and leave such values
203    alone until, for other reasons, we garbage-collect useless
204    values.  */
205 static int n_useless_values;
206 static int n_useless_debug_values;
207 
208 /* Count values whose locs have been taken exclusively from debug
209    insns for the entire life of the value.  */
210 static int n_debug_values;
211 
212 /* Number of useless values before we remove them from the hash table.  */
213 #define MAX_USELESS_VALUES 32
214 
215 /* This table maps from register number to values.  It does not
216    contain pointers to cselib_val structures, but rather elt_lists.
217    The purpose is to be able to refer to the same register in
218    different modes.  The first element of the list defines the mode in
219    which the register was set; if the mode is unknown or the value is
220    no longer valid in that mode, ELT will be NULL for the first
221    element.  */
222 static struct elt_list **reg_values;
223 static unsigned int reg_values_size;
224 #define REG_VALUES(i) reg_values[i]
225 
226 /* The largest number of hard regs used by any entry added to the
227    REG_VALUES table.  Cleared on each cselib_clear_table() invocation.  */
228 static unsigned int max_value_regs;
229 
230 /* Here the set of indices I with REG_VALUES(I) != 0 is saved.  This is used
231    in cselib_clear_table() for fast emptying.  */
232 static unsigned int *used_regs;
233 static unsigned int n_used_regs;
234 
235 /* We pass this to cselib_invalidate_mem to invalidate all of
236    memory for a non-const call instruction.  */
237 static GTY(()) rtx callmem;
238 
239 /* Set by discard_useless_locs if it deleted the last location of any
240    value.  */
241 static int values_became_useless;
242 
243 /* Used as stop element of the containing_mem list so we can check
244    presence in the list by checking the next pointer.  */
245 static cselib_val dummy_val;
246 
247 /* If non-NULL, value of the eliminated arg_pointer_rtx or frame_pointer_rtx
248    that is constant through the whole function and should never be
249    eliminated.  */
250 static cselib_val *cfa_base_preserved_val;
251 static unsigned int cfa_base_preserved_regno = INVALID_REGNUM;
252 
253 /* Used to list all values that contain memory reference.
254    May or may not contain the useless values - the list is compacted
255    each time memory is invalidated.  */
256 static cselib_val *first_containing_mem = &dummy_val;
257 
258 static object_allocator<elt_list> elt_list_pool ("elt_list");
259 static object_allocator<elt_loc_list> elt_loc_list_pool ("elt_loc_list");
260 static object_allocator<cselib_val> cselib_val_pool ("cselib_val_list");
261 
262 static pool_allocator value_pool ("value", RTX_CODE_SIZE (VALUE));
263 
264 /* If nonnull, cselib will call this function before freeing useless
265    VALUEs.  A VALUE is deemed useless if its "locs" field is null.  */
266 void (*cselib_discard_hook) (cselib_val *);
267 
268 /* If nonnull, cselib will call this function before recording sets or
269    even clobbering outputs of INSN.  All the recorded sets will be
270    represented in the array sets[n_sets].  new_val_min can be used to
271    tell whether values present in sets are introduced by this
272    instruction.  */
273 void (*cselib_record_sets_hook) (rtx_insn *insn, struct cselib_set *sets,
274 				 int n_sets);
275 
276 
277 
278 /* Allocate a struct elt_list and fill in its two elements with the
279    arguments.  */
280 
281 static inline struct elt_list *
new_elt_list(struct elt_list * next,cselib_val * elt)282 new_elt_list (struct elt_list *next, cselib_val *elt)
283 {
284   elt_list *el = elt_list_pool.allocate ();
285   el->next = next;
286   el->elt = elt;
287   return el;
288 }
289 
290 /* Allocate a struct elt_loc_list with LOC and prepend it to VAL's loc
291    list.  */
292 
293 static inline void
new_elt_loc_list(cselib_val * val,rtx loc)294 new_elt_loc_list (cselib_val *val, rtx loc)
295 {
296   struct elt_loc_list *el, *next = val->locs;
297 
298   gcc_checking_assert (!next || !next->setting_insn
299 		       || !DEBUG_INSN_P (next->setting_insn)
300 		       || cselib_current_insn == next->setting_insn);
301 
302   /* If we're creating the first loc in a debug insn context, we've
303      just created a debug value.  Count it.  */
304   if (!next && cselib_current_insn && DEBUG_INSN_P (cselib_current_insn))
305     n_debug_values++;
306 
307   val = canonical_cselib_val (val);
308   next = val->locs;
309 
310   if (GET_CODE (loc) == VALUE)
311     {
312       loc = canonical_cselib_val (CSELIB_VAL_PTR (loc))->val_rtx;
313 
314       gcc_checking_assert (PRESERVED_VALUE_P (loc)
315 			   == PRESERVED_VALUE_P (val->val_rtx));
316 
317       if (val->val_rtx == loc)
318 	return;
319       else if (val->uid > CSELIB_VAL_PTR (loc)->uid)
320 	{
321 	  /* Reverse the insertion.  */
322 	  new_elt_loc_list (CSELIB_VAL_PTR (loc), val->val_rtx);
323 	  return;
324 	}
325 
326       gcc_checking_assert (val->uid < CSELIB_VAL_PTR (loc)->uid);
327 
328       if (CSELIB_VAL_PTR (loc)->locs)
329 	{
330 	  /* Bring all locs from LOC to VAL.  */
331 	  for (el = CSELIB_VAL_PTR (loc)->locs; el->next; el = el->next)
332 	    {
333 	      /* Adjust values that have LOC as canonical so that VAL
334 		 becomes their canonical.  */
335 	      if (el->loc && GET_CODE (el->loc) == VALUE)
336 		{
337 		  gcc_checking_assert (CSELIB_VAL_PTR (el->loc)->locs->loc
338 				       == loc);
339 		  CSELIB_VAL_PTR (el->loc)->locs->loc = val->val_rtx;
340 		}
341 	    }
342 	  el->next = val->locs;
343 	  next = val->locs = CSELIB_VAL_PTR (loc)->locs;
344 	}
345 
346       if (CSELIB_VAL_PTR (loc)->addr_list)
347 	{
348 	  /* Bring in addr_list into canonical node.  */
349 	  struct elt_list *last = CSELIB_VAL_PTR (loc)->addr_list;
350 	  while (last->next)
351 	    last = last->next;
352 	  last->next = val->addr_list;
353 	  val->addr_list = CSELIB_VAL_PTR (loc)->addr_list;
354 	  CSELIB_VAL_PTR (loc)->addr_list = NULL;
355 	}
356 
357       if (CSELIB_VAL_PTR (loc)->next_containing_mem != NULL
358 	  && val->next_containing_mem == NULL)
359 	{
360 	  /* Add VAL to the containing_mem list after LOC.  LOC will
361 	     be removed when we notice it doesn't contain any
362 	     MEMs.  */
363 	  val->next_containing_mem = CSELIB_VAL_PTR (loc)->next_containing_mem;
364 	  CSELIB_VAL_PTR (loc)->next_containing_mem = val;
365 	}
366 
367       /* Chain LOC back to VAL.  */
368       el = elt_loc_list_pool.allocate ();
369       el->loc = val->val_rtx;
370       el->setting_insn = cselib_current_insn;
371       el->next = NULL;
372       CSELIB_VAL_PTR (loc)->locs = el;
373     }
374 
375   el = elt_loc_list_pool.allocate ();
376   el->loc = loc;
377   el->setting_insn = cselib_current_insn;
378   el->next = next;
379   val->locs = el;
380 }
381 
382 /* Promote loc L to a nondebug cselib_current_insn if L is marked as
383    originating from a debug insn, maintaining the debug values
384    count.  */
385 
386 static inline void
promote_debug_loc(struct elt_loc_list * l)387 promote_debug_loc (struct elt_loc_list *l)
388 {
389   if (l && l->setting_insn && DEBUG_INSN_P (l->setting_insn)
390       && (!cselib_current_insn || !DEBUG_INSN_P (cselib_current_insn)))
391     {
392       n_debug_values--;
393       l->setting_insn = cselib_current_insn;
394       if (cselib_preserve_constants && l->next)
395 	{
396 	  gcc_assert (l->next->setting_insn
397 		      && DEBUG_INSN_P (l->next->setting_insn)
398 		      && !l->next->next);
399 	  l->next->setting_insn = cselib_current_insn;
400 	}
401       else
402 	gcc_assert (!l->next);
403     }
404 }
405 
406 /* The elt_list at *PL is no longer needed.  Unchain it and free its
407    storage.  */
408 
409 static inline void
unchain_one_elt_list(struct elt_list ** pl)410 unchain_one_elt_list (struct elt_list **pl)
411 {
412   struct elt_list *l = *pl;
413 
414   *pl = l->next;
415   elt_list_pool.remove (l);
416 }
417 
418 /* Likewise for elt_loc_lists.  */
419 
420 static void
unchain_one_elt_loc_list(struct elt_loc_list ** pl)421 unchain_one_elt_loc_list (struct elt_loc_list **pl)
422 {
423   struct elt_loc_list *l = *pl;
424 
425   *pl = l->next;
426   elt_loc_list_pool.remove (l);
427 }
428 
429 /* Likewise for cselib_vals.  This also frees the addr_list associated with
430    V.  */
431 
432 static void
unchain_one_value(cselib_val * v)433 unchain_one_value (cselib_val *v)
434 {
435   while (v->addr_list)
436     unchain_one_elt_list (&v->addr_list);
437 
438   cselib_val_pool.remove (v);
439 }
440 
441 /* Remove all entries from the hash table.  Also used during
442    initialization.  */
443 
444 void
cselib_clear_table(void)445 cselib_clear_table (void)
446 {
447   cselib_reset_table (1);
448 }
449 
450 /* Return TRUE if V is a constant, a function invariant or a VALUE
451    equivalence; FALSE otherwise.  */
452 
453 static bool
invariant_or_equiv_p(cselib_val * v)454 invariant_or_equiv_p (cselib_val *v)
455 {
456   struct elt_loc_list *l;
457 
458   if (v == cfa_base_preserved_val)
459     return true;
460 
461   /* Keep VALUE equivalences around.  */
462   for (l = v->locs; l; l = l->next)
463     if (GET_CODE (l->loc) == VALUE)
464       return true;
465 
466   if (v->locs != NULL
467       && v->locs->next == NULL)
468     {
469       if (CONSTANT_P (v->locs->loc)
470 	  && (GET_CODE (v->locs->loc) != CONST
471 	      || !references_value_p (v->locs->loc, 0)))
472 	return true;
473       /* Although a debug expr may be bound to different expressions,
474 	 we can preserve it as if it was constant, to get unification
475 	 and proper merging within var-tracking.  */
476       if (GET_CODE (v->locs->loc) == DEBUG_EXPR
477 	  || GET_CODE (v->locs->loc) == DEBUG_IMPLICIT_PTR
478 	  || GET_CODE (v->locs->loc) == ENTRY_VALUE
479 	  || GET_CODE (v->locs->loc) == DEBUG_PARAMETER_REF)
480 	return true;
481 
482       /* (plus (value V) (const_int C)) is invariant iff V is invariant.  */
483       if (GET_CODE (v->locs->loc) == PLUS
484 	  && CONST_INT_P (XEXP (v->locs->loc, 1))
485 	  && GET_CODE (XEXP (v->locs->loc, 0)) == VALUE
486 	  && invariant_or_equiv_p (CSELIB_VAL_PTR (XEXP (v->locs->loc, 0))))
487 	return true;
488     }
489 
490   return false;
491 }
492 
493 /* Remove from hash table all VALUEs except constants, function
494    invariants and VALUE equivalences.  */
495 
496 int
preserve_constants_and_equivs(cselib_val ** x,void * info ATTRIBUTE_UNUSED)497 preserve_constants_and_equivs (cselib_val **x, void *info ATTRIBUTE_UNUSED)
498 {
499   cselib_val *v = *x;
500 
501   if (invariant_or_equiv_p (v))
502     {
503       cselib_hasher::key lookup = {
504 	GET_MODE (v->val_rtx), v->val_rtx, VOIDmode
505       };
506       cselib_val **slot
507 	= cselib_preserved_hash_table->find_slot_with_hash (&lookup,
508 							    v->hash, INSERT);
509       gcc_assert (!*slot);
510       *slot = v;
511     }
512 
513   cselib_hash_table->clear_slot (x);
514 
515   return 1;
516 }
517 
518 /* Remove all entries from the hash table, arranging for the next
519    value to be numbered NUM.  */
520 
521 void
cselib_reset_table(unsigned int num)522 cselib_reset_table (unsigned int num)
523 {
524   unsigned int i;
525 
526   max_value_regs = 0;
527 
528   if (cfa_base_preserved_val)
529     {
530       unsigned int regno = cfa_base_preserved_regno;
531       unsigned int new_used_regs = 0;
532       for (i = 0; i < n_used_regs; i++)
533 	if (used_regs[i] == regno)
534 	  {
535 	    new_used_regs = 1;
536 	    continue;
537 	  }
538 	else
539 	  REG_VALUES (used_regs[i]) = 0;
540       gcc_assert (new_used_regs == 1);
541       n_used_regs = new_used_regs;
542       used_regs[0] = regno;
543       max_value_regs
544 	= hard_regno_nregs (regno,
545 			    GET_MODE (cfa_base_preserved_val->locs->loc));
546 
547       /* If cfa_base is sp + const_int, need to preserve also the
548 	 SP_DERIVED_VALUE_P value.  */
549       for (struct elt_loc_list *l = cfa_base_preserved_val->locs;
550 	   l; l = l->next)
551 	if (GET_CODE (l->loc) == PLUS
552 	    && GET_CODE (XEXP (l->loc, 0)) == VALUE
553 	    && SP_DERIVED_VALUE_P (XEXP (l->loc, 0))
554 	    && CONST_INT_P (XEXP (l->loc, 1)))
555 	  {
556 	    if (! invariant_or_equiv_p (CSELIB_VAL_PTR (XEXP (l->loc, 0))))
557 	      {
558 		rtx val = cfa_base_preserved_val->val_rtx;
559 		rtx_insn *save_cselib_current_insn = cselib_current_insn;
560 		cselib_current_insn = l->setting_insn;
561 		new_elt_loc_list (CSELIB_VAL_PTR (XEXP (l->loc, 0)),
562 				  plus_constant (Pmode, val,
563 						 -UINTVAL (XEXP (l->loc, 1))));
564 		cselib_current_insn = save_cselib_current_insn;
565 	      }
566 	    break;
567 	  }
568     }
569   else
570     {
571       for (i = 0; i < n_used_regs; i++)
572 	REG_VALUES (used_regs[i]) = 0;
573       n_used_regs = 0;
574     }
575 
576   if (cselib_preserve_constants)
577     cselib_hash_table->traverse <void *, preserve_constants_and_equivs> (NULL);
578   else
579     {
580       cselib_hash_table->empty ();
581       gcc_checking_assert (!cselib_any_perm_equivs);
582     }
583 
584   n_useless_values = 0;
585   n_useless_debug_values = 0;
586   n_debug_values = 0;
587 
588   next_uid = num;
589 
590   first_containing_mem = &dummy_val;
591 }
592 
593 /* Return the number of the next value that will be generated.  */
594 
595 unsigned int
cselib_get_next_uid(void)596 cselib_get_next_uid (void)
597 {
598   return next_uid;
599 }
600 
601 /* Search for X, whose hashcode is HASH, in CSELIB_HASH_TABLE,
602    INSERTing if requested.  When X is part of the address of a MEM,
603    MEMMODE should specify the mode of the MEM.  */
604 
605 static cselib_val **
cselib_find_slot(machine_mode mode,rtx x,hashval_t hash,enum insert_option insert,machine_mode memmode)606 cselib_find_slot (machine_mode mode, rtx x, hashval_t hash,
607 		  enum insert_option insert, machine_mode memmode)
608 {
609   cselib_val **slot = NULL;
610   cselib_hasher::key lookup = { mode, x, memmode };
611   if (cselib_preserve_constants)
612     slot = cselib_preserved_hash_table->find_slot_with_hash (&lookup, hash,
613 							     NO_INSERT);
614   if (!slot)
615     slot = cselib_hash_table->find_slot_with_hash (&lookup, hash, insert);
616   return slot;
617 }
618 
619 /* Return true if X contains a VALUE rtx.  If ONLY_USELESS is set, we
620    only return true for values which point to a cselib_val whose value
621    element has been set to zero, which implies the cselib_val will be
622    removed.  */
623 
624 int
references_value_p(const_rtx x,int only_useless)625 references_value_p (const_rtx x, int only_useless)
626 {
627   const enum rtx_code code = GET_CODE (x);
628   const char *fmt = GET_RTX_FORMAT (code);
629   int i, j;
630 
631   if (GET_CODE (x) == VALUE
632       && (! only_useless
633 	  || (CSELIB_VAL_PTR (x)->locs == 0 && !PRESERVED_VALUE_P (x))))
634     return 1;
635 
636   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
637     {
638       if (fmt[i] == 'e' && references_value_p (XEXP (x, i), only_useless))
639 	return 1;
640       else if (fmt[i] == 'E')
641 	for (j = 0; j < XVECLEN (x, i); j++)
642 	  if (references_value_p (XVECEXP (x, i, j), only_useless))
643 	    return 1;
644     }
645 
646   return 0;
647 }
648 
649 /* Return true if V is a useless VALUE and can be discarded as such.  */
650 
651 static bool
cselib_useless_value_p(cselib_val * v)652 cselib_useless_value_p (cselib_val *v)
653 {
654   return (v->locs == 0
655 	  && !PRESERVED_VALUE_P (v->val_rtx)
656 	  && !SP_DERIVED_VALUE_P (v->val_rtx));
657 }
658 
659 /* For all locations found in X, delete locations that reference useless
660    values (i.e. values without any location).  Called through
661    htab_traverse.  */
662 
663 int
discard_useless_locs(cselib_val ** x,void * info ATTRIBUTE_UNUSED)664 discard_useless_locs (cselib_val **x, void *info ATTRIBUTE_UNUSED)
665 {
666   cselib_val *v = *x;
667   struct elt_loc_list **p = &v->locs;
668   bool had_locs = v->locs != NULL;
669   rtx_insn *setting_insn = v->locs ? v->locs->setting_insn : NULL;
670 
671   while (*p)
672     {
673       if (references_value_p ((*p)->loc, 1))
674 	unchain_one_elt_loc_list (p);
675       else
676 	p = &(*p)->next;
677     }
678 
679   if (had_locs && cselib_useless_value_p (v))
680     {
681       if (setting_insn && DEBUG_INSN_P (setting_insn))
682 	n_useless_debug_values++;
683       else
684 	n_useless_values++;
685       values_became_useless = 1;
686     }
687   return 1;
688 }
689 
690 /* If X is a value with no locations, remove it from the hashtable.  */
691 
692 int
discard_useless_values(cselib_val ** x,void * info ATTRIBUTE_UNUSED)693 discard_useless_values (cselib_val **x, void *info ATTRIBUTE_UNUSED)
694 {
695   cselib_val *v = *x;
696 
697   if (v->locs == 0 && cselib_useless_value_p (v))
698     {
699       if (cselib_discard_hook)
700 	cselib_discard_hook (v);
701 
702       CSELIB_VAL_PTR (v->val_rtx) = NULL;
703       cselib_hash_table->clear_slot (x);
704       unchain_one_value (v);
705       n_useless_values--;
706     }
707 
708   return 1;
709 }
710 
711 /* Clean out useless values (i.e. those which no longer have locations
712    associated with them) from the hash table.  */
713 
714 static void
remove_useless_values(void)715 remove_useless_values (void)
716 {
717   cselib_val **p, *v;
718 
719   /* First pass: eliminate locations that reference the value.  That in
720      turn can make more values useless.  */
721   do
722     {
723       values_became_useless = 0;
724       cselib_hash_table->traverse <void *, discard_useless_locs> (NULL);
725     }
726   while (values_became_useless);
727 
728   /* Second pass: actually remove the values.  */
729 
730   p = &first_containing_mem;
731   for (v = *p; v != &dummy_val; v = v->next_containing_mem)
732     if (v->locs && v == canonical_cselib_val (v))
733       {
734 	*p = v;
735 	p = &(*p)->next_containing_mem;
736       }
737   *p = &dummy_val;
738 
739   n_useless_values += n_useless_debug_values;
740   n_debug_values -= n_useless_debug_values;
741   n_useless_debug_values = 0;
742 
743   cselib_hash_table->traverse <void *, discard_useless_values> (NULL);
744 
745   gcc_assert (!n_useless_values);
746 }
747 
748 /* Arrange for a value to not be removed from the hash table even if
749    it becomes useless.  */
750 
751 void
cselib_preserve_value(cselib_val * v)752 cselib_preserve_value (cselib_val *v)
753 {
754   PRESERVED_VALUE_P (v->val_rtx) = 1;
755 }
756 
757 /* Test whether a value is preserved.  */
758 
759 bool
cselib_preserved_value_p(cselib_val * v)760 cselib_preserved_value_p (cselib_val *v)
761 {
762   return PRESERVED_VALUE_P (v->val_rtx);
763 }
764 
765 /* Arrange for a REG value to be assumed constant through the whole function,
766    never invalidated and preserved across cselib_reset_table calls.  */
767 
768 void
cselib_preserve_cfa_base_value(cselib_val * v,unsigned int regno)769 cselib_preserve_cfa_base_value (cselib_val *v, unsigned int regno)
770 {
771   if (cselib_preserve_constants
772       && v->locs
773       && REG_P (v->locs->loc))
774     {
775       cfa_base_preserved_val = v;
776       cfa_base_preserved_regno = regno;
777     }
778 }
779 
780 /* Clean all non-constant expressions in the hash table, but retain
781    their values.  */
782 
783 void
cselib_preserve_only_values(void)784 cselib_preserve_only_values (void)
785 {
786   int i;
787 
788   for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
789     cselib_invalidate_regno (i, reg_raw_mode[i]);
790 
791   cselib_invalidate_mem (callmem);
792 
793   remove_useless_values ();
794 
795   gcc_assert (first_containing_mem == &dummy_val);
796 }
797 
798 /* Arrange for a value to be marked as based on stack pointer
799    for find_base_term purposes.  */
800 
801 void
cselib_set_value_sp_based(cselib_val * v)802 cselib_set_value_sp_based (cselib_val *v)
803 {
804   SP_BASED_VALUE_P (v->val_rtx) = 1;
805 }
806 
807 /* Test whether a value is based on stack pointer for
808    find_base_term purposes.  */
809 
810 bool
cselib_sp_based_value_p(cselib_val * v)811 cselib_sp_based_value_p (cselib_val *v)
812 {
813   return SP_BASED_VALUE_P (v->val_rtx);
814 }
815 
816 /* Return the mode in which a register was last set.  If X is not a
817    register, return its mode.  If the mode in which the register was
818    set is not known, or the value was already clobbered, return
819    VOIDmode.  */
820 
821 machine_mode
cselib_reg_set_mode(const_rtx x)822 cselib_reg_set_mode (const_rtx x)
823 {
824   if (!REG_P (x))
825     return GET_MODE (x);
826 
827   if (REG_VALUES (REGNO (x)) == NULL
828       || REG_VALUES (REGNO (x))->elt == NULL)
829     return VOIDmode;
830 
831   return GET_MODE (REG_VALUES (REGNO (x))->elt->val_rtx);
832 }
833 
834 /* If x is a PLUS or an autoinc operation, expand the operation,
835    storing the offset, if any, in *OFF.  */
836 
837 static rtx
autoinc_split(rtx x,rtx * off,machine_mode memmode)838 autoinc_split (rtx x, rtx *off, machine_mode memmode)
839 {
840   switch (GET_CODE (x))
841     {
842     case PLUS:
843       *off = XEXP (x, 1);
844       x = XEXP (x, 0);
845       break;
846 
847     case PRE_DEC:
848       if (memmode == VOIDmode)
849 	return x;
850 
851       *off = gen_int_mode (-GET_MODE_SIZE (memmode), GET_MODE (x));
852       x = XEXP (x, 0);
853       break;
854 
855     case PRE_INC:
856       if (memmode == VOIDmode)
857 	return x;
858 
859       *off = gen_int_mode (GET_MODE_SIZE (memmode), GET_MODE (x));
860       x = XEXP (x, 0);
861       break;
862 
863     case PRE_MODIFY:
864       x = XEXP (x, 1);
865       break;
866 
867     case POST_DEC:
868     case POST_INC:
869     case POST_MODIFY:
870       x = XEXP (x, 0);
871       break;
872 
873     default:
874       break;
875     }
876 
877   if (GET_MODE (x) == Pmode
878       && (REG_P (x) || MEM_P (x) || GET_CODE (x) == VALUE)
879       && (*off == NULL_RTX || CONST_INT_P (*off)))
880     {
881       cselib_val *e;
882       if (GET_CODE (x) == VALUE)
883 	e = CSELIB_VAL_PTR (x);
884       else
885 	e = cselib_lookup (x, GET_MODE (x), 0, memmode);
886       if (e)
887 	{
888 	  if (SP_DERIVED_VALUE_P (e->val_rtx)
889 	      && (*off == NULL_RTX || *off == const0_rtx))
890 	    {
891 	      *off = NULL_RTX;
892 	      return e->val_rtx;
893 	    }
894 	  for (struct elt_loc_list *l = e->locs; l; l = l->next)
895 	    if (GET_CODE (l->loc) == PLUS
896 		&& GET_CODE (XEXP (l->loc, 0)) == VALUE
897 		&& SP_DERIVED_VALUE_P (XEXP (l->loc, 0))
898 		&& CONST_INT_P (XEXP (l->loc, 1)))
899 	      {
900 		if (*off == NULL_RTX)
901 		  *off = XEXP (l->loc, 1);
902 		else
903 		  *off = plus_constant (Pmode, *off,
904 					INTVAL (XEXP (l->loc, 1)));
905 		if (*off == const0_rtx)
906 		  *off = NULL_RTX;
907 		return XEXP (l->loc, 0);
908 	      }
909 	}
910     }
911   return x;
912 }
913 
914 /* Return nonzero if we can prove that X and Y contain the same value,
915    taking our gathered information into account.  MEMMODE holds the
916    mode of the enclosing MEM, if any, as required to deal with autoinc
917    addressing modes.  If X and Y are not (known to be) part of
918    addresses, MEMMODE should be VOIDmode.  */
919 
920 int
rtx_equal_for_cselib_1(rtx x,rtx y,machine_mode memmode,int depth)921 rtx_equal_for_cselib_1 (rtx x, rtx y, machine_mode memmode, int depth)
922 {
923   enum rtx_code code;
924   const char *fmt;
925   int i;
926 
927   if (REG_P (x) || MEM_P (x))
928     {
929       cselib_val *e = cselib_lookup (x, GET_MODE (x), 0, memmode);
930 
931       if (e)
932 	x = e->val_rtx;
933     }
934 
935   if (REG_P (y) || MEM_P (y))
936     {
937       cselib_val *e = cselib_lookup (y, GET_MODE (y), 0, memmode);
938 
939       if (e)
940 	y = e->val_rtx;
941     }
942 
943   if (x == y)
944     return 1;
945 
946   if (GET_CODE (x) == VALUE)
947     {
948       cselib_val *e = canonical_cselib_val (CSELIB_VAL_PTR (x));
949       struct elt_loc_list *l;
950 
951       if (GET_CODE (y) == VALUE)
952 	return e == canonical_cselib_val (CSELIB_VAL_PTR (y));
953 
954       if ((SP_DERIVED_VALUE_P (x)
955 	   || SP_DERIVED_VALUE_P (e->val_rtx))
956 	  && GET_MODE (y) == Pmode)
957 	{
958 	  rtx yoff = NULL;
959 	  rtx yr = autoinc_split (y, &yoff, memmode);
960 	  if ((yr == x || yr == e->val_rtx) && yoff == NULL_RTX)
961 	    return 1;
962 	}
963 
964       if (depth == 128)
965 	return 0;
966 
967       for (l = e->locs; l; l = l->next)
968 	{
969 	  rtx t = l->loc;
970 
971 	  /* Avoid infinite recursion.  We know we have the canonical
972 	     value, so we can just skip any values in the equivalence
973 	     list.  */
974 	  if (REG_P (t) || MEM_P (t) || GET_CODE (t) == VALUE)
975 	    continue;
976 	  else if (rtx_equal_for_cselib_1 (t, y, memmode, depth + 1))
977 	    return 1;
978 	}
979 
980       return 0;
981     }
982   else if (GET_CODE (y) == VALUE)
983     {
984       cselib_val *e = canonical_cselib_val (CSELIB_VAL_PTR (y));
985       struct elt_loc_list *l;
986 
987       if ((SP_DERIVED_VALUE_P (y)
988 	   || SP_DERIVED_VALUE_P (e->val_rtx))
989 	  && GET_MODE (x) == Pmode)
990 	{
991 	  rtx xoff = NULL;
992 	  rtx xr = autoinc_split (x, &xoff, memmode);
993 	  if ((xr == y || xr == e->val_rtx) && xoff == NULL_RTX)
994 	    return 1;
995 	}
996 
997       if (depth == 128)
998 	return 0;
999 
1000       for (l = e->locs; l; l = l->next)
1001 	{
1002 	  rtx t = l->loc;
1003 
1004 	  if (REG_P (t) || MEM_P (t) || GET_CODE (t) == VALUE)
1005 	    continue;
1006 	  else if (rtx_equal_for_cselib_1 (x, t, memmode, depth + 1))
1007 	    return 1;
1008 	}
1009 
1010       return 0;
1011     }
1012 
1013   if (GET_MODE (x) != GET_MODE (y))
1014     return 0;
1015 
1016   if (GET_CODE (x) != GET_CODE (y)
1017       || (GET_CODE (x) == PLUS
1018 	  && GET_MODE (x) == Pmode
1019 	  && CONST_INT_P (XEXP (x, 1))
1020 	  && CONST_INT_P (XEXP (y, 1))))
1021     {
1022       rtx xorig = x, yorig = y;
1023       rtx xoff = NULL, yoff = NULL;
1024 
1025       x = autoinc_split (x, &xoff, memmode);
1026       y = autoinc_split (y, &yoff, memmode);
1027 
1028       /* Don't recurse if nothing changed.  */
1029       if (x != xorig || y != yorig)
1030 	{
1031 	  if (!xoff != !yoff)
1032 	    return 0;
1033 
1034 	  if (xoff && !rtx_equal_for_cselib_1 (xoff, yoff, memmode, depth))
1035 	    return 0;
1036 
1037 	  return rtx_equal_for_cselib_1 (x, y, memmode, depth);
1038 	}
1039 
1040       if (GET_CODE (xorig) != GET_CODE (yorig))
1041 	return 0;
1042     }
1043 
1044   /* These won't be handled correctly by the code below.  */
1045   switch (GET_CODE (x))
1046     {
1047     CASE_CONST_UNIQUE:
1048     case DEBUG_EXPR:
1049       return 0;
1050 
1051     case CONST_VECTOR:
1052       if (!same_vector_encodings_p (x, y))
1053 	return false;
1054       break;
1055 
1056     case DEBUG_IMPLICIT_PTR:
1057       return DEBUG_IMPLICIT_PTR_DECL (x)
1058 	     == DEBUG_IMPLICIT_PTR_DECL (y);
1059 
1060     case DEBUG_PARAMETER_REF:
1061       return DEBUG_PARAMETER_REF_DECL (x)
1062 	     == DEBUG_PARAMETER_REF_DECL (y);
1063 
1064     case ENTRY_VALUE:
1065       /* ENTRY_VALUEs are function invariant, it is thus undesirable to
1066 	 use rtx_equal_for_cselib_1 to compare the operands.  */
1067       return rtx_equal_p (ENTRY_VALUE_EXP (x), ENTRY_VALUE_EXP (y));
1068 
1069     case LABEL_REF:
1070       return label_ref_label (x) == label_ref_label (y);
1071 
1072     case REG:
1073       return REGNO (x) == REGNO (y);
1074 
1075     case MEM:
1076       /* We have to compare any autoinc operations in the addresses
1077 	 using this MEM's mode.  */
1078       return rtx_equal_for_cselib_1 (XEXP (x, 0), XEXP (y, 0), GET_MODE (x),
1079 				     depth);
1080 
1081     default:
1082       break;
1083     }
1084 
1085   code = GET_CODE (x);
1086   fmt = GET_RTX_FORMAT (code);
1087 
1088   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1089     {
1090       int j;
1091 
1092       switch (fmt[i])
1093 	{
1094 	case 'w':
1095 	  if (XWINT (x, i) != XWINT (y, i))
1096 	    return 0;
1097 	  break;
1098 
1099 	case 'n':
1100 	case 'i':
1101 	  if (XINT (x, i) != XINT (y, i))
1102 	    return 0;
1103 	  break;
1104 
1105 	case 'p':
1106 	  if (maybe_ne (SUBREG_BYTE (x), SUBREG_BYTE (y)))
1107 	    return 0;
1108 	  break;
1109 
1110 	case 'V':
1111 	case 'E':
1112 	  /* Two vectors must have the same length.  */
1113 	  if (XVECLEN (x, i) != XVECLEN (y, i))
1114 	    return 0;
1115 
1116 	  /* And the corresponding elements must match.  */
1117 	  for (j = 0; j < XVECLEN (x, i); j++)
1118 	    if (! rtx_equal_for_cselib_1 (XVECEXP (x, i, j),
1119 					  XVECEXP (y, i, j), memmode, depth))
1120 	      return 0;
1121 	  break;
1122 
1123 	case 'e':
1124 	  if (i == 1
1125 	      && targetm.commutative_p (x, UNKNOWN)
1126 	      && rtx_equal_for_cselib_1 (XEXP (x, 1), XEXP (y, 0), memmode,
1127 					 depth)
1128 	      && rtx_equal_for_cselib_1 (XEXP (x, 0), XEXP (y, 1), memmode,
1129 					 depth))
1130 	    return 1;
1131 	  if (! rtx_equal_for_cselib_1 (XEXP (x, i), XEXP (y, i), memmode,
1132 					depth))
1133 	    return 0;
1134 	  break;
1135 
1136 	case 'S':
1137 	case 's':
1138 	  if (strcmp (XSTR (x, i), XSTR (y, i)))
1139 	    return 0;
1140 	  break;
1141 
1142 	case 'u':
1143 	  /* These are just backpointers, so they don't matter.  */
1144 	  break;
1145 
1146 	case '0':
1147 	case 't':
1148 	  break;
1149 
1150 	  /* It is believed that rtx's at this level will never
1151 	     contain anything but integers and other rtx's,
1152 	     except for within LABEL_REFs and SYMBOL_REFs.  */
1153 	default:
1154 	  gcc_unreachable ();
1155 	}
1156     }
1157   return 1;
1158 }
1159 
1160 /* Helper function for cselib_hash_rtx.  Arguments like for cselib_hash_rtx,
1161    except that it hashes (plus:P x c).  */
1162 
1163 static unsigned int
cselib_hash_plus_const_int(rtx x,HOST_WIDE_INT c,int create,machine_mode memmode)1164 cselib_hash_plus_const_int (rtx x, HOST_WIDE_INT c, int create,
1165 			    machine_mode memmode)
1166 {
1167   cselib_val *e = cselib_lookup (x, GET_MODE (x), create, memmode);
1168   if (! e)
1169     return 0;
1170 
1171   if (! SP_DERIVED_VALUE_P (e->val_rtx))
1172     for (struct elt_loc_list *l = e->locs; l; l = l->next)
1173       if (GET_CODE (l->loc) == PLUS
1174 	  && GET_CODE (XEXP (l->loc, 0)) == VALUE
1175 	  && SP_DERIVED_VALUE_P (XEXP (l->loc, 0))
1176 	  && CONST_INT_P (XEXP (l->loc, 1)))
1177 	{
1178 	  e = CSELIB_VAL_PTR (XEXP (l->loc, 0));
1179 	  c = trunc_int_for_mode (c + UINTVAL (XEXP (l->loc, 1)), Pmode);
1180 	  break;
1181 	}
1182   if (c == 0)
1183     return e->hash;
1184 
1185   unsigned hash = (unsigned) PLUS + (unsigned) GET_MODE (x);
1186   hash += e->hash;
1187   unsigned int tem_hash = (unsigned) CONST_INT + (unsigned) VOIDmode;
1188   tem_hash += ((unsigned) CONST_INT << 7) + (unsigned HOST_WIDE_INT) c;
1189   if (tem_hash == 0)
1190     tem_hash = (unsigned int) CONST_INT;
1191   hash += tem_hash;
1192   return hash ? hash : 1 + (unsigned int) PLUS;
1193 }
1194 
1195 /* Hash an rtx.  Return 0 if we couldn't hash the rtx.
1196    For registers and memory locations, we look up their cselib_val structure
1197    and return its VALUE element.
1198    Possible reasons for return 0 are: the object is volatile, or we couldn't
1199    find a register or memory location in the table and CREATE is zero.  If
1200    CREATE is nonzero, table elts are created for regs and mem.
1201    N.B. this hash function returns the same hash value for RTXes that
1202    differ only in the order of operands, thus it is suitable for comparisons
1203    that take commutativity into account.
1204    If we wanted to also support associative rules, we'd have to use a different
1205    strategy to avoid returning spurious 0, e.g. return ~(~0U >> 1) .
1206    MEMMODE indicates the mode of an enclosing MEM, and it's only
1207    used to compute autoinc values.
1208    We used to have a MODE argument for hashing for CONST_INTs, but that
1209    didn't make sense, since it caused spurious hash differences between
1210     (set (reg:SI 1) (const_int))
1211     (plus:SI (reg:SI 2) (reg:SI 1))
1212    and
1213     (plus:SI (reg:SI 2) (const_int))
1214    If the mode is important in any context, it must be checked specifically
1215    in a comparison anyway, since relying on hash differences is unsafe.  */
1216 
1217 static unsigned int
cselib_hash_rtx(rtx x,int create,machine_mode memmode)1218 cselib_hash_rtx (rtx x, int create, machine_mode memmode)
1219 {
1220   cselib_val *e;
1221   poly_int64 offset;
1222   int i, j;
1223   enum rtx_code code;
1224   const char *fmt;
1225   unsigned int hash = 0;
1226 
1227   code = GET_CODE (x);
1228   hash += (unsigned) code + (unsigned) GET_MODE (x);
1229 
1230   switch (code)
1231     {
1232     case VALUE:
1233       e = CSELIB_VAL_PTR (x);
1234       return e->hash;
1235 
1236     case MEM:
1237     case REG:
1238       e = cselib_lookup (x, GET_MODE (x), create, memmode);
1239       if (! e)
1240 	return 0;
1241 
1242       return e->hash;
1243 
1244     case DEBUG_EXPR:
1245       hash += ((unsigned) DEBUG_EXPR << 7)
1246 	      + DEBUG_TEMP_UID (DEBUG_EXPR_TREE_DECL (x));
1247       return hash ? hash : (unsigned int) DEBUG_EXPR;
1248 
1249     case DEBUG_IMPLICIT_PTR:
1250       hash += ((unsigned) DEBUG_IMPLICIT_PTR << 7)
1251 	      + DECL_UID (DEBUG_IMPLICIT_PTR_DECL (x));
1252       return hash ? hash : (unsigned int) DEBUG_IMPLICIT_PTR;
1253 
1254     case DEBUG_PARAMETER_REF:
1255       hash += ((unsigned) DEBUG_PARAMETER_REF << 7)
1256 	      + DECL_UID (DEBUG_PARAMETER_REF_DECL (x));
1257       return hash ? hash : (unsigned int) DEBUG_PARAMETER_REF;
1258 
1259     case ENTRY_VALUE:
1260       /* ENTRY_VALUEs are function invariant, thus try to avoid
1261 	 recursing on argument if ENTRY_VALUE is one of the
1262 	 forms emitted by expand_debug_expr, otherwise
1263 	 ENTRY_VALUE hash would depend on the current value
1264 	 in some register or memory.  */
1265       if (REG_P (ENTRY_VALUE_EXP (x)))
1266 	hash += (unsigned int) REG
1267 		+ (unsigned int) GET_MODE (ENTRY_VALUE_EXP (x))
1268 		+ (unsigned int) REGNO (ENTRY_VALUE_EXP (x));
1269       else if (MEM_P (ENTRY_VALUE_EXP (x))
1270 	       && REG_P (XEXP (ENTRY_VALUE_EXP (x), 0)))
1271 	hash += (unsigned int) MEM
1272 		+ (unsigned int) GET_MODE (XEXP (ENTRY_VALUE_EXP (x), 0))
1273 		+ (unsigned int) REGNO (XEXP (ENTRY_VALUE_EXP (x), 0));
1274       else
1275 	hash += cselib_hash_rtx (ENTRY_VALUE_EXP (x), create, memmode);
1276       return hash ? hash : (unsigned int) ENTRY_VALUE;
1277 
1278     case CONST_INT:
1279       hash += ((unsigned) CONST_INT << 7) + UINTVAL (x);
1280       return hash ? hash : (unsigned int) CONST_INT;
1281 
1282     case CONST_WIDE_INT:
1283       for (i = 0; i < CONST_WIDE_INT_NUNITS (x); i++)
1284 	hash += CONST_WIDE_INT_ELT (x, i);
1285       return hash;
1286 
1287     case CONST_POLY_INT:
1288       {
1289 	inchash::hash h;
1290 	h.add_int (hash);
1291 	for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
1292 	  h.add_wide_int (CONST_POLY_INT_COEFFS (x)[i]);
1293 	return h.end ();
1294       }
1295 
1296     case CONST_DOUBLE:
1297       /* This is like the general case, except that it only counts
1298 	 the integers representing the constant.  */
1299       hash += (unsigned) code + (unsigned) GET_MODE (x);
1300       if (TARGET_SUPPORTS_WIDE_INT == 0 && GET_MODE (x) == VOIDmode)
1301 	hash += ((unsigned) CONST_DOUBLE_LOW (x)
1302 		 + (unsigned) CONST_DOUBLE_HIGH (x));
1303       else
1304 	hash += real_hash (CONST_DOUBLE_REAL_VALUE (x));
1305       return hash ? hash : (unsigned int) CONST_DOUBLE;
1306 
1307     case CONST_FIXED:
1308       hash += (unsigned int) code + (unsigned int) GET_MODE (x);
1309       hash += fixed_hash (CONST_FIXED_VALUE (x));
1310       return hash ? hash : (unsigned int) CONST_FIXED;
1311 
1312     case CONST_VECTOR:
1313       {
1314 	int units;
1315 	rtx elt;
1316 
1317 	units = const_vector_encoded_nelts (x);
1318 
1319 	for (i = 0; i < units; ++i)
1320 	  {
1321 	    elt = CONST_VECTOR_ENCODED_ELT (x, i);
1322 	    hash += cselib_hash_rtx (elt, 0, memmode);
1323 	  }
1324 
1325 	return hash;
1326       }
1327 
1328       /* Assume there is only one rtx object for any given label.  */
1329     case LABEL_REF:
1330       /* We don't hash on the address of the CODE_LABEL to avoid bootstrap
1331 	 differences and differences between each stage's debugging dumps.  */
1332       hash += (((unsigned int) LABEL_REF << 7)
1333 	       + CODE_LABEL_NUMBER (label_ref_label (x)));
1334       return hash ? hash : (unsigned int) LABEL_REF;
1335 
1336     case SYMBOL_REF:
1337       {
1338 	/* Don't hash on the symbol's address to avoid bootstrap differences.
1339 	   Different hash values may cause expressions to be recorded in
1340 	   different orders and thus different registers to be used in the
1341 	   final assembler.  This also avoids differences in the dump files
1342 	   between various stages.  */
1343 	unsigned int h = 0;
1344 	const unsigned char *p = (const unsigned char *) XSTR (x, 0);
1345 
1346 	while (*p)
1347 	  h += (h << 7) + *p++; /* ??? revisit */
1348 
1349 	hash += ((unsigned int) SYMBOL_REF << 7) + h;
1350 	return hash ? hash : (unsigned int) SYMBOL_REF;
1351       }
1352 
1353     case PRE_DEC:
1354     case PRE_INC:
1355       /* We can't compute these without knowing the MEM mode.  */
1356       gcc_assert (memmode != VOIDmode);
1357       offset = GET_MODE_SIZE (memmode);
1358       if (code == PRE_DEC)
1359 	offset = -offset;
1360       /* Adjust the hash so that (mem:MEMMODE (pre_* (reg))) hashes
1361 	 like (mem:MEMMODE (plus (reg) (const_int I))).  */
1362       if (GET_MODE (x) == Pmode
1363 	  && (REG_P (XEXP (x, 0))
1364 	      || MEM_P (XEXP (x, 0))
1365 	      || GET_CODE (XEXP (x, 0)) == VALUE))
1366 	{
1367 	  HOST_WIDE_INT c;
1368 	  if (offset.is_constant (&c))
1369 	    return cselib_hash_plus_const_int (XEXP (x, 0),
1370 					       trunc_int_for_mode (c, Pmode),
1371 					       create, memmode);
1372 	}
1373       hash = ((unsigned) PLUS + (unsigned) GET_MODE (x)
1374 	      + cselib_hash_rtx (XEXP (x, 0), create, memmode)
1375 	      + cselib_hash_rtx (gen_int_mode (offset, GET_MODE (x)),
1376 				 create, memmode));
1377       return hash ? hash : 1 + (unsigned) PLUS;
1378 
1379     case PRE_MODIFY:
1380       gcc_assert (memmode != VOIDmode);
1381       return cselib_hash_rtx (XEXP (x, 1), create, memmode);
1382 
1383     case POST_DEC:
1384     case POST_INC:
1385     case POST_MODIFY:
1386       gcc_assert (memmode != VOIDmode);
1387       return cselib_hash_rtx (XEXP (x, 0), create, memmode);
1388 
1389     case PC:
1390     case CC0:
1391     case CALL:
1392     case UNSPEC_VOLATILE:
1393       return 0;
1394 
1395     case ASM_OPERANDS:
1396       if (MEM_VOLATILE_P (x))
1397 	return 0;
1398 
1399       break;
1400 
1401     case PLUS:
1402       if (GET_MODE (x) == Pmode
1403 	  && (REG_P (XEXP (x, 0))
1404 	      || MEM_P (XEXP (x, 0))
1405 	      || GET_CODE (XEXP (x, 0)) == VALUE)
1406 	  && CONST_INT_P (XEXP (x, 1)))
1407 	return cselib_hash_plus_const_int (XEXP (x, 0), INTVAL (XEXP (x, 1)),
1408 					   create, memmode);
1409       break;
1410 
1411     default:
1412       break;
1413     }
1414 
1415   i = GET_RTX_LENGTH (code) - 1;
1416   fmt = GET_RTX_FORMAT (code);
1417   for (; i >= 0; i--)
1418     {
1419       switch (fmt[i])
1420 	{
1421 	case 'e':
1422 	  {
1423 	    rtx tem = XEXP (x, i);
1424 	    unsigned int tem_hash = cselib_hash_rtx (tem, create, memmode);
1425 
1426 	    if (tem_hash == 0)
1427 	      return 0;
1428 
1429 	    hash += tem_hash;
1430 	  }
1431 	  break;
1432 	case 'E':
1433 	  for (j = 0; j < XVECLEN (x, i); j++)
1434 	    {
1435 	      unsigned int tem_hash
1436 		= cselib_hash_rtx (XVECEXP (x, i, j), create, memmode);
1437 
1438 	      if (tem_hash == 0)
1439 		return 0;
1440 
1441 	      hash += tem_hash;
1442 	    }
1443 	  break;
1444 
1445 	case 's':
1446 	  {
1447 	    const unsigned char *p = (const unsigned char *) XSTR (x, i);
1448 
1449 	    if (p)
1450 	      while (*p)
1451 		hash += *p++;
1452 	    break;
1453 	  }
1454 
1455 	case 'i':
1456 	  hash += XINT (x, i);
1457 	  break;
1458 
1459 	case 'p':
1460 	  hash += constant_lower_bound (SUBREG_BYTE (x));
1461 	  break;
1462 
1463 	case '0':
1464 	case 't':
1465 	  /* unused */
1466 	  break;
1467 
1468 	default:
1469 	  gcc_unreachable ();
1470 	}
1471     }
1472 
1473   return hash ? hash : 1 + (unsigned int) GET_CODE (x);
1474 }
1475 
1476 /* Create a new value structure for VALUE and initialize it.  The mode of the
1477    value is MODE.  */
1478 
1479 static inline cselib_val *
new_cselib_val(unsigned int hash,machine_mode mode,rtx x)1480 new_cselib_val (unsigned int hash, machine_mode mode, rtx x)
1481 {
1482   cselib_val *e = cselib_val_pool.allocate ();
1483 
1484   gcc_assert (hash);
1485   gcc_assert (next_uid);
1486 
1487   e->hash = hash;
1488   e->uid = next_uid++;
1489   /* We use an alloc pool to allocate this RTL construct because it
1490      accounts for about 8% of the overall memory usage.  We know
1491      precisely when we can have VALUE RTXen (when cselib is active)
1492      so we don't need to put them in garbage collected memory.
1493      ??? Why should a VALUE be an RTX in the first place?  */
1494   e->val_rtx = (rtx_def*) value_pool.allocate ();
1495   memset (e->val_rtx, 0, RTX_HDR_SIZE);
1496   PUT_CODE (e->val_rtx, VALUE);
1497   PUT_MODE (e->val_rtx, mode);
1498   CSELIB_VAL_PTR (e->val_rtx) = e;
1499   e->addr_list = 0;
1500   e->locs = 0;
1501   e->next_containing_mem = 0;
1502 
1503   if (dump_file && (dump_flags & TDF_CSELIB))
1504     {
1505       fprintf (dump_file, "cselib value %u:%u ", e->uid, hash);
1506       if (flag_dump_noaddr || flag_dump_unnumbered)
1507 	fputs ("# ", dump_file);
1508       else
1509 	fprintf (dump_file, "%p ", (void*)e);
1510       print_rtl_single (dump_file, x);
1511       fputc ('\n', dump_file);
1512     }
1513 
1514   return e;
1515 }
1516 
1517 /* ADDR_ELT is a value that is used as address.  MEM_ELT is the value that
1518    contains the data at this address.  X is a MEM that represents the
1519    value.  Update the two value structures to represent this situation.  */
1520 
1521 static void
add_mem_for_addr(cselib_val * addr_elt,cselib_val * mem_elt,rtx x)1522 add_mem_for_addr (cselib_val *addr_elt, cselib_val *mem_elt, rtx x)
1523 {
1524   addr_elt = canonical_cselib_val (addr_elt);
1525   mem_elt = canonical_cselib_val (mem_elt);
1526 
1527   /* Avoid duplicates.  */
1528   addr_space_t as = MEM_ADDR_SPACE (x);
1529   for (elt_loc_list *l = mem_elt->locs; l; l = l->next)
1530     if (MEM_P (l->loc)
1531 	&& CSELIB_VAL_PTR (XEXP (l->loc, 0)) == addr_elt
1532         && MEM_ADDR_SPACE (l->loc) == as)
1533       {
1534 	promote_debug_loc (l);
1535 	return;
1536       }
1537 
1538   addr_elt->addr_list = new_elt_list (addr_elt->addr_list, mem_elt);
1539   new_elt_loc_list (mem_elt,
1540 		    replace_equiv_address_nv (x, addr_elt->val_rtx));
1541   if (mem_elt->next_containing_mem == NULL)
1542     {
1543       mem_elt->next_containing_mem = first_containing_mem;
1544       first_containing_mem = mem_elt;
1545     }
1546 }
1547 
1548 /* Subroutine of cselib_lookup.  Return a value for X, which is a MEM rtx.
1549    If CREATE, make a new one if we haven't seen it before.  */
1550 
1551 static cselib_val *
cselib_lookup_mem(rtx x,int create)1552 cselib_lookup_mem (rtx x, int create)
1553 {
1554   machine_mode mode = GET_MODE (x);
1555   machine_mode addr_mode;
1556   cselib_val **slot;
1557   cselib_val *addr;
1558   cselib_val *mem_elt;
1559 
1560   if (MEM_VOLATILE_P (x) || mode == BLKmode
1561       || !cselib_record_memory
1562       || (FLOAT_MODE_P (mode) && flag_float_store))
1563     return 0;
1564 
1565   addr_mode = GET_MODE (XEXP (x, 0));
1566   if (addr_mode == VOIDmode)
1567     addr_mode = Pmode;
1568 
1569   /* Look up the value for the address.  */
1570   addr = cselib_lookup (XEXP (x, 0), addr_mode, create, mode);
1571   if (! addr)
1572     return 0;
1573   addr = canonical_cselib_val (addr);
1574 
1575   /* Find a value that describes a value of our mode at that address.  */
1576   addr_space_t as = MEM_ADDR_SPACE (x);
1577   for (elt_list *l = addr->addr_list; l; l = l->next)
1578     if (GET_MODE (l->elt->val_rtx) == mode)
1579       {
1580 	for (elt_loc_list *l2 = l->elt->locs; l2; l2 = l2->next)
1581 	  if (MEM_P (l2->loc) && MEM_ADDR_SPACE (l2->loc) == as)
1582 	    {
1583 	      promote_debug_loc (l->elt->locs);
1584 	      return l->elt;
1585 	    }
1586       }
1587 
1588   if (! create)
1589     return 0;
1590 
1591   mem_elt = new_cselib_val (next_uid, mode, x);
1592   add_mem_for_addr (addr, mem_elt, x);
1593   slot = cselib_find_slot (mode, x, mem_elt->hash, INSERT, VOIDmode);
1594   *slot = mem_elt;
1595   return mem_elt;
1596 }
1597 
1598 /* Search through the possible substitutions in P.  We prefer a non reg
1599    substitution because this allows us to expand the tree further.  If
1600    we find, just a reg, take the lowest regno.  There may be several
1601    non-reg results, we just take the first one because they will all
1602    expand to the same place.  */
1603 
1604 static rtx
expand_loc(struct elt_loc_list * p,struct expand_value_data * evd,int max_depth)1605 expand_loc (struct elt_loc_list *p, struct expand_value_data *evd,
1606 	    int max_depth)
1607 {
1608   rtx reg_result = NULL;
1609   unsigned int regno = UINT_MAX;
1610   struct elt_loc_list *p_in = p;
1611 
1612   for (; p; p = p->next)
1613     {
1614       /* Return these right away to avoid returning stack pointer based
1615 	 expressions for frame pointer and vice versa, which is something
1616 	 that would confuse DSE.  See the comment in cselib_expand_value_rtx_1
1617 	 for more details.  */
1618       if (REG_P (p->loc)
1619 	  && (REGNO (p->loc) == STACK_POINTER_REGNUM
1620 	      || REGNO (p->loc) == FRAME_POINTER_REGNUM
1621 	      || REGNO (p->loc) == HARD_FRAME_POINTER_REGNUM
1622 	      || REGNO (p->loc) == cfa_base_preserved_regno))
1623 	return p->loc;
1624       /* Avoid infinite recursion trying to expand a reg into a
1625 	 the same reg.  */
1626       if ((REG_P (p->loc))
1627 	  && (REGNO (p->loc) < regno)
1628 	  && !bitmap_bit_p (evd->regs_active, REGNO (p->loc)))
1629 	{
1630 	  reg_result = p->loc;
1631 	  regno = REGNO (p->loc);
1632 	}
1633       /* Avoid infinite recursion and do not try to expand the
1634 	 value.  */
1635       else if (GET_CODE (p->loc) == VALUE
1636 	       && CSELIB_VAL_PTR (p->loc)->locs == p_in)
1637 	continue;
1638       else if (!REG_P (p->loc))
1639 	{
1640 	  rtx result, note;
1641 	  if (dump_file && (dump_flags & TDF_CSELIB))
1642 	    {
1643 	      print_inline_rtx (dump_file, p->loc, 0);
1644 	      fprintf (dump_file, "\n");
1645 	    }
1646 	  if (GET_CODE (p->loc) == LO_SUM
1647 	      && GET_CODE (XEXP (p->loc, 1)) == SYMBOL_REF
1648 	      && p->setting_insn
1649 	      && (note = find_reg_note (p->setting_insn, REG_EQUAL, NULL_RTX))
1650 	      && XEXP (note, 0) == XEXP (p->loc, 1))
1651 	    return XEXP (p->loc, 1);
1652 	  result = cselib_expand_value_rtx_1 (p->loc, evd, max_depth - 1);
1653 	  if (result)
1654 	    return result;
1655 	}
1656 
1657     }
1658 
1659   if (regno != UINT_MAX)
1660     {
1661       rtx result;
1662       if (dump_file && (dump_flags & TDF_CSELIB))
1663 	fprintf (dump_file, "r%d\n", regno);
1664 
1665       result = cselib_expand_value_rtx_1 (reg_result, evd, max_depth - 1);
1666       if (result)
1667 	return result;
1668     }
1669 
1670   if (dump_file && (dump_flags & TDF_CSELIB))
1671     {
1672       if (reg_result)
1673 	{
1674 	  print_inline_rtx (dump_file, reg_result, 0);
1675 	  fprintf (dump_file, "\n");
1676 	}
1677       else
1678 	fprintf (dump_file, "NULL\n");
1679     }
1680   return reg_result;
1681 }
1682 
1683 
1684 /* Forward substitute and expand an expression out to its roots.
1685    This is the opposite of common subexpression.  Because local value
1686    numbering is such a weak optimization, the expanded expression is
1687    pretty much unique (not from a pointer equals point of view but
1688    from a tree shape point of view.
1689 
1690    This function returns NULL if the expansion fails.  The expansion
1691    will fail if there is no value number for one of the operands or if
1692    one of the operands has been overwritten between the current insn
1693    and the beginning of the basic block.  For instance x has no
1694    expansion in:
1695 
1696    r1 <- r1 + 3
1697    x <- r1 + 8
1698 
1699    REGS_ACTIVE is a scratch bitmap that should be clear when passing in.
1700    It is clear on return.  */
1701 
1702 rtx
cselib_expand_value_rtx(rtx orig,bitmap regs_active,int max_depth)1703 cselib_expand_value_rtx (rtx orig, bitmap regs_active, int max_depth)
1704 {
1705   struct expand_value_data evd;
1706 
1707   evd.regs_active = regs_active;
1708   evd.callback = NULL;
1709   evd.callback_arg = NULL;
1710   evd.dummy = false;
1711 
1712   return cselib_expand_value_rtx_1 (orig, &evd, max_depth);
1713 }
1714 
1715 /* Same as cselib_expand_value_rtx, but using a callback to try to
1716    resolve some expressions.  The CB function should return ORIG if it
1717    can't or does not want to deal with a certain RTX.  Any other
1718    return value, including NULL, will be used as the expansion for
1719    VALUE, without any further changes.  */
1720 
1721 rtx
cselib_expand_value_rtx_cb(rtx orig,bitmap regs_active,int max_depth,cselib_expand_callback cb,void * data)1722 cselib_expand_value_rtx_cb (rtx orig, bitmap regs_active, int max_depth,
1723 			    cselib_expand_callback cb, void *data)
1724 {
1725   struct expand_value_data evd;
1726 
1727   evd.regs_active = regs_active;
1728   evd.callback = cb;
1729   evd.callback_arg = data;
1730   evd.dummy = false;
1731 
1732   return cselib_expand_value_rtx_1 (orig, &evd, max_depth);
1733 }
1734 
1735 /* Similar to cselib_expand_value_rtx_cb, but no rtxs are actually copied
1736    or simplified.  Useful to find out whether cselib_expand_value_rtx_cb
1737    would return NULL or non-NULL, without allocating new rtx.  */
1738 
1739 bool
cselib_dummy_expand_value_rtx_cb(rtx orig,bitmap regs_active,int max_depth,cselib_expand_callback cb,void * data)1740 cselib_dummy_expand_value_rtx_cb (rtx orig, bitmap regs_active, int max_depth,
1741 				  cselib_expand_callback cb, void *data)
1742 {
1743   struct expand_value_data evd;
1744 
1745   evd.regs_active = regs_active;
1746   evd.callback = cb;
1747   evd.callback_arg = data;
1748   evd.dummy = true;
1749 
1750   return cselib_expand_value_rtx_1 (orig, &evd, max_depth) != NULL;
1751 }
1752 
1753 /* Internal implementation of cselib_expand_value_rtx and
1754    cselib_expand_value_rtx_cb.  */
1755 
1756 static rtx
cselib_expand_value_rtx_1(rtx orig,struct expand_value_data * evd,int max_depth)1757 cselib_expand_value_rtx_1 (rtx orig, struct expand_value_data *evd,
1758 			   int max_depth)
1759 {
1760   rtx copy, scopy;
1761   int i, j;
1762   RTX_CODE code;
1763   const char *format_ptr;
1764   machine_mode mode;
1765 
1766   code = GET_CODE (orig);
1767 
1768   /* For the context of dse, if we end up expand into a huge tree, we
1769      will not have a useful address, so we might as well just give up
1770      quickly.  */
1771   if (max_depth <= 0)
1772     return NULL;
1773 
1774   switch (code)
1775     {
1776     case REG:
1777       {
1778 	struct elt_list *l = REG_VALUES (REGNO (orig));
1779 
1780 	if (l && l->elt == NULL)
1781 	  l = l->next;
1782 	for (; l; l = l->next)
1783 	  if (GET_MODE (l->elt->val_rtx) == GET_MODE (orig))
1784 	    {
1785 	      rtx result;
1786 	      unsigned regno = REGNO (orig);
1787 
1788 	      /* The only thing that we are not willing to do (this
1789 		 is requirement of dse and if others potential uses
1790 		 need this function we should add a parm to control
1791 		 it) is that we will not substitute the
1792 		 STACK_POINTER_REGNUM, FRAME_POINTER or the
1793 		 HARD_FRAME_POINTER.
1794 
1795 		 These expansions confuses the code that notices that
1796 		 stores into the frame go dead at the end of the
1797 		 function and that the frame is not effected by calls
1798 		 to subroutines.  If you allow the
1799 		 STACK_POINTER_REGNUM substitution, then dse will
1800 		 think that parameter pushing also goes dead which is
1801 		 wrong.  If you allow the FRAME_POINTER or the
1802 		 HARD_FRAME_POINTER then you lose the opportunity to
1803 		 make the frame assumptions.  */
1804 	      if (regno == STACK_POINTER_REGNUM
1805 		  || regno == FRAME_POINTER_REGNUM
1806 		  || regno == HARD_FRAME_POINTER_REGNUM
1807 		  || regno == cfa_base_preserved_regno)
1808 		return orig;
1809 
1810 	      bitmap_set_bit (evd->regs_active, regno);
1811 
1812 	      if (dump_file && (dump_flags & TDF_CSELIB))
1813 		fprintf (dump_file, "expanding: r%d into: ", regno);
1814 
1815 	      result = expand_loc (l->elt->locs, evd, max_depth);
1816 	      bitmap_clear_bit (evd->regs_active, regno);
1817 
1818 	      if (result)
1819 		return result;
1820 	      else
1821 		return orig;
1822 	    }
1823 	return orig;
1824       }
1825 
1826     CASE_CONST_ANY:
1827     case SYMBOL_REF:
1828     case CODE_LABEL:
1829     case PC:
1830     case CC0:
1831     case SCRATCH:
1832       /* SCRATCH must be shared because they represent distinct values.  */
1833       return orig;
1834     case CLOBBER:
1835       if (REG_P (XEXP (orig, 0)) && HARD_REGISTER_NUM_P (REGNO (XEXP (orig, 0))))
1836 	return orig;
1837       break;
1838 
1839     case CONST:
1840       if (shared_const_p (orig))
1841 	return orig;
1842       break;
1843 
1844     case SUBREG:
1845       {
1846 	rtx subreg;
1847 
1848 	if (evd->callback)
1849 	  {
1850 	    subreg = evd->callback (orig, evd->regs_active, max_depth,
1851 				    evd->callback_arg);
1852 	    if (subreg != orig)
1853 	      return subreg;
1854 	  }
1855 
1856 	subreg = cselib_expand_value_rtx_1 (SUBREG_REG (orig), evd,
1857 					    max_depth - 1);
1858 	if (!subreg)
1859 	  return NULL;
1860 	scopy = simplify_gen_subreg (GET_MODE (orig), subreg,
1861 				     GET_MODE (SUBREG_REG (orig)),
1862 				     SUBREG_BYTE (orig));
1863 	if (scopy == NULL
1864 	    || (GET_CODE (scopy) == SUBREG
1865 		&& !REG_P (SUBREG_REG (scopy))
1866 		&& !MEM_P (SUBREG_REG (scopy))))
1867 	  return NULL;
1868 
1869 	return scopy;
1870       }
1871 
1872     case VALUE:
1873       {
1874 	rtx result;
1875 
1876 	if (dump_file && (dump_flags & TDF_CSELIB))
1877 	  {
1878 	    fputs ("\nexpanding ", dump_file);
1879 	    print_rtl_single (dump_file, orig);
1880 	    fputs (" into...", dump_file);
1881 	  }
1882 
1883 	if (evd->callback)
1884 	  {
1885 	    result = evd->callback (orig, evd->regs_active, max_depth,
1886 				    evd->callback_arg);
1887 
1888 	    if (result != orig)
1889 	      return result;
1890 	  }
1891 
1892 	result = expand_loc (CSELIB_VAL_PTR (orig)->locs, evd, max_depth);
1893 	return result;
1894       }
1895 
1896     case DEBUG_EXPR:
1897       if (evd->callback)
1898 	return evd->callback (orig, evd->regs_active, max_depth,
1899 			      evd->callback_arg);
1900       return orig;
1901 
1902     default:
1903       break;
1904     }
1905 
1906   /* Copy the various flags, fields, and other information.  We assume
1907      that all fields need copying, and then clear the fields that should
1908      not be copied.  That is the sensible default behavior, and forces
1909      us to explicitly document why we are *not* copying a flag.  */
1910   if (evd->dummy)
1911     copy = NULL;
1912   else
1913     copy = shallow_copy_rtx (orig);
1914 
1915   format_ptr = GET_RTX_FORMAT (code);
1916 
1917   for (i = 0; i < GET_RTX_LENGTH (code); i++)
1918     switch (*format_ptr++)
1919       {
1920       case 'e':
1921 	if (XEXP (orig, i) != NULL)
1922 	  {
1923 	    rtx result = cselib_expand_value_rtx_1 (XEXP (orig, i), evd,
1924 						    max_depth - 1);
1925 	    if (!result)
1926 	      return NULL;
1927 	    if (copy)
1928 	      XEXP (copy, i) = result;
1929 	  }
1930 	break;
1931 
1932       case 'E':
1933       case 'V':
1934 	if (XVEC (orig, i) != NULL)
1935 	  {
1936 	    if (copy)
1937 	      XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
1938 	    for (j = 0; j < XVECLEN (orig, i); j++)
1939 	      {
1940 		rtx result = cselib_expand_value_rtx_1 (XVECEXP (orig, i, j),
1941 							evd, max_depth - 1);
1942 		if (!result)
1943 		  return NULL;
1944 		if (copy)
1945 		  XVECEXP (copy, i, j) = result;
1946 	      }
1947 	  }
1948 	break;
1949 
1950       case 't':
1951       case 'w':
1952       case 'i':
1953       case 's':
1954       case 'S':
1955       case 'T':
1956       case 'u':
1957       case 'B':
1958       case '0':
1959 	/* These are left unchanged.  */
1960 	break;
1961 
1962       default:
1963 	gcc_unreachable ();
1964       }
1965 
1966   if (evd->dummy)
1967     return orig;
1968 
1969   mode = GET_MODE (copy);
1970   /* If an operand has been simplified into CONST_INT, which doesn't
1971      have a mode and the mode isn't derivable from whole rtx's mode,
1972      try simplify_*_operation first with mode from original's operand
1973      and as a fallback wrap CONST_INT into gen_rtx_CONST.  */
1974   scopy = copy;
1975   switch (GET_RTX_CLASS (code))
1976     {
1977     case RTX_UNARY:
1978       if (CONST_INT_P (XEXP (copy, 0))
1979 	  && GET_MODE (XEXP (orig, 0)) != VOIDmode)
1980 	{
1981 	  scopy = simplify_unary_operation (code, mode, XEXP (copy, 0),
1982 					    GET_MODE (XEXP (orig, 0)));
1983 	  if (scopy)
1984 	    return scopy;
1985 	}
1986       break;
1987     case RTX_COMM_ARITH:
1988     case RTX_BIN_ARITH:
1989       /* These expressions can derive operand modes from the whole rtx's mode.  */
1990       break;
1991     case RTX_TERNARY:
1992     case RTX_BITFIELD_OPS:
1993       if (CONST_INT_P (XEXP (copy, 0))
1994 	  && GET_MODE (XEXP (orig, 0)) != VOIDmode)
1995 	{
1996 	  scopy = simplify_ternary_operation (code, mode,
1997 					      GET_MODE (XEXP (orig, 0)),
1998 					      XEXP (copy, 0), XEXP (copy, 1),
1999 					      XEXP (copy, 2));
2000 	  if (scopy)
2001 	    return scopy;
2002 	}
2003       break;
2004     case RTX_COMPARE:
2005     case RTX_COMM_COMPARE:
2006       if (CONST_INT_P (XEXP (copy, 0))
2007 	  && GET_MODE (XEXP (copy, 1)) == VOIDmode
2008 	  && (GET_MODE (XEXP (orig, 0)) != VOIDmode
2009 	      || GET_MODE (XEXP (orig, 1)) != VOIDmode))
2010 	{
2011 	  scopy = simplify_relational_operation (code, mode,
2012 						 (GET_MODE (XEXP (orig, 0))
2013 						  != VOIDmode)
2014 						 ? GET_MODE (XEXP (orig, 0))
2015 						 : GET_MODE (XEXP (orig, 1)),
2016 						 XEXP (copy, 0),
2017 						 XEXP (copy, 1));
2018 	  if (scopy)
2019 	    return scopy;
2020 	}
2021       break;
2022     default:
2023       break;
2024     }
2025   scopy = simplify_rtx (copy);
2026   if (scopy)
2027     return scopy;
2028   return copy;
2029 }
2030 
2031 /* Walk rtx X and replace all occurrences of REG and MEM subexpressions
2032    with VALUE expressions.  This way, it becomes independent of changes
2033    to registers and memory.
2034    X isn't actually modified; if modifications are needed, new rtl is
2035    allocated.  However, the return value can share rtl with X.
2036    If X is within a MEM, MEMMODE must be the mode of the MEM.  */
2037 
2038 rtx
cselib_subst_to_values(rtx x,machine_mode memmode)2039 cselib_subst_to_values (rtx x, machine_mode memmode)
2040 {
2041   enum rtx_code code = GET_CODE (x);
2042   const char *fmt = GET_RTX_FORMAT (code);
2043   cselib_val *e;
2044   struct elt_list *l;
2045   rtx copy = x;
2046   int i;
2047   poly_int64 offset;
2048 
2049   switch (code)
2050     {
2051     case REG:
2052       l = REG_VALUES (REGNO (x));
2053       if (l && l->elt == NULL)
2054 	l = l->next;
2055       for (; l; l = l->next)
2056 	if (GET_MODE (l->elt->val_rtx) == GET_MODE (x))
2057 	  return l->elt->val_rtx;
2058 
2059       gcc_unreachable ();
2060 
2061     case MEM:
2062       e = cselib_lookup_mem (x, 0);
2063       /* This used to happen for autoincrements, but we deal with them
2064 	 properly now.  Remove the if stmt for the next release.  */
2065       if (! e)
2066 	{
2067 	  /* Assign a value that doesn't match any other.  */
2068 	  e = new_cselib_val (next_uid, GET_MODE (x), x);
2069 	}
2070       return e->val_rtx;
2071 
2072     case ENTRY_VALUE:
2073       e = cselib_lookup (x, GET_MODE (x), 0, memmode);
2074       if (! e)
2075 	break;
2076       return e->val_rtx;
2077 
2078     CASE_CONST_ANY:
2079       return x;
2080 
2081     case PRE_DEC:
2082     case PRE_INC:
2083       gcc_assert (memmode != VOIDmode);
2084       offset = GET_MODE_SIZE (memmode);
2085       if (code == PRE_DEC)
2086 	offset = -offset;
2087       return cselib_subst_to_values (plus_constant (GET_MODE (x),
2088 						    XEXP (x, 0), offset),
2089 				     memmode);
2090 
2091     case PRE_MODIFY:
2092       gcc_assert (memmode != VOIDmode);
2093       return cselib_subst_to_values (XEXP (x, 1), memmode);
2094 
2095     case POST_DEC:
2096     case POST_INC:
2097     case POST_MODIFY:
2098       gcc_assert (memmode != VOIDmode);
2099       return cselib_subst_to_values (XEXP (x, 0), memmode);
2100 
2101     case PLUS:
2102       if (GET_MODE (x) == Pmode && CONST_INT_P (XEXP (x, 1)))
2103 	{
2104 	  rtx t = cselib_subst_to_values (XEXP (x, 0), memmode);
2105 	  if (GET_CODE (t) == VALUE)
2106 	    {
2107 	      if (SP_DERIVED_VALUE_P (t) && XEXP (x, 1) == const0_rtx)
2108 		return t;
2109 	      for (struct elt_loc_list *l = CSELIB_VAL_PTR (t)->locs;
2110 		   l; l = l->next)
2111 		if (GET_CODE (l->loc) == PLUS
2112 		    && GET_CODE (XEXP (l->loc, 0)) == VALUE
2113 		    && SP_DERIVED_VALUE_P (XEXP (l->loc, 0))
2114 		    && CONST_INT_P (XEXP (l->loc, 1)))
2115 		  return plus_constant (Pmode, l->loc, INTVAL (XEXP (x, 1)));
2116 	    }
2117 	  if (t != XEXP (x, 0))
2118 	    {
2119 	      copy = shallow_copy_rtx (x);
2120 	      XEXP (copy, 0) = t;
2121 	    }
2122 	  return copy;
2123 	}
2124 
2125     default:
2126       break;
2127     }
2128 
2129   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2130     {
2131       if (fmt[i] == 'e')
2132 	{
2133 	  rtx t = cselib_subst_to_values (XEXP (x, i), memmode);
2134 
2135 	  if (t != XEXP (x, i))
2136 	    {
2137 	      if (x == copy)
2138 		copy = shallow_copy_rtx (x);
2139 	      XEXP (copy, i) = t;
2140 	    }
2141 	}
2142       else if (fmt[i] == 'E')
2143 	{
2144 	  int j;
2145 
2146 	  for (j = 0; j < XVECLEN (x, i); j++)
2147 	    {
2148 	      rtx t = cselib_subst_to_values (XVECEXP (x, i, j), memmode);
2149 
2150 	      if (t != XVECEXP (x, i, j))
2151 		{
2152 		  if (XVEC (x, i) == XVEC (copy, i))
2153 		    {
2154 		      if (x == copy)
2155 			copy = shallow_copy_rtx (x);
2156 		      XVEC (copy, i) = shallow_copy_rtvec (XVEC (x, i));
2157 		    }
2158 		  XVECEXP (copy, i, j) = t;
2159 		}
2160 	    }
2161 	}
2162     }
2163 
2164   return copy;
2165 }
2166 
2167 /* Wrapper for cselib_subst_to_values, that indicates X is in INSN.  */
2168 
2169 rtx
cselib_subst_to_values_from_insn(rtx x,machine_mode memmode,rtx_insn * insn)2170 cselib_subst_to_values_from_insn (rtx x, machine_mode memmode, rtx_insn *insn)
2171 {
2172   rtx ret;
2173   gcc_assert (!cselib_current_insn);
2174   cselib_current_insn = insn;
2175   ret = cselib_subst_to_values (x, memmode);
2176   cselib_current_insn = NULL;
2177   return ret;
2178 }
2179 
2180 /* Look up the rtl expression X in our tables and return the value it
2181    has.  If CREATE is zero, we return NULL if we don't know the value.
2182    Otherwise, we create a new one if possible, using mode MODE if X
2183    doesn't have a mode (i.e. because it's a constant).  When X is part
2184    of an address, MEMMODE should be the mode of the enclosing MEM if
2185    we're tracking autoinc expressions.  */
2186 
2187 static cselib_val *
cselib_lookup_1(rtx x,machine_mode mode,int create,machine_mode memmode)2188 cselib_lookup_1 (rtx x, machine_mode mode,
2189 		 int create, machine_mode memmode)
2190 {
2191   cselib_val **slot;
2192   cselib_val *e;
2193   unsigned int hashval;
2194 
2195   if (GET_MODE (x) != VOIDmode)
2196     mode = GET_MODE (x);
2197 
2198   if (GET_CODE (x) == VALUE)
2199     return CSELIB_VAL_PTR (x);
2200 
2201   if (REG_P (x))
2202     {
2203       struct elt_list *l;
2204       unsigned int i = REGNO (x);
2205 
2206       l = REG_VALUES (i);
2207       if (l && l->elt == NULL)
2208 	l = l->next;
2209       for (; l; l = l->next)
2210 	if (mode == GET_MODE (l->elt->val_rtx))
2211 	  {
2212 	    promote_debug_loc (l->elt->locs);
2213 	    return l->elt;
2214 	  }
2215 
2216       if (! create)
2217 	return 0;
2218 
2219       if (i < FIRST_PSEUDO_REGISTER)
2220 	{
2221 	  unsigned int n = hard_regno_nregs (i, mode);
2222 
2223 	  if (n > max_value_regs)
2224 	    max_value_regs = n;
2225 	}
2226 
2227       e = new_cselib_val (next_uid, GET_MODE (x), x);
2228       if (GET_MODE (x) == Pmode && x == stack_pointer_rtx)
2229 	SP_DERIVED_VALUE_P (e->val_rtx) = 1;
2230       new_elt_loc_list (e, x);
2231 
2232       scalar_int_mode int_mode;
2233       if (REG_VALUES (i) == 0)
2234 	{
2235 	  /* Maintain the invariant that the first entry of
2236 	     REG_VALUES, if present, must be the value used to set the
2237 	     register, or NULL.  */
2238 	  used_regs[n_used_regs++] = i;
2239 	  REG_VALUES (i) = new_elt_list (REG_VALUES (i), NULL);
2240 	}
2241       else if (cselib_preserve_constants
2242 	       && is_int_mode (mode, &int_mode))
2243 	{
2244 	  /* During var-tracking, try harder to find equivalences
2245 	     for SUBREGs.  If a setter sets say a DImode register
2246 	     and user uses that register only in SImode, add a lowpart
2247 	     subreg location.  */
2248 	  struct elt_list *lwider = NULL;
2249 	  scalar_int_mode lmode;
2250 	  l = REG_VALUES (i);
2251 	  if (l && l->elt == NULL)
2252 	    l = l->next;
2253 	  for (; l; l = l->next)
2254 	    if (is_int_mode (GET_MODE (l->elt->val_rtx), &lmode)
2255 		&& GET_MODE_SIZE (lmode) > GET_MODE_SIZE (int_mode)
2256 		&& (lwider == NULL
2257 		    || partial_subreg_p (lmode,
2258 					 GET_MODE (lwider->elt->val_rtx))))
2259 	      {
2260 		struct elt_loc_list *el;
2261 		if (i < FIRST_PSEUDO_REGISTER
2262 		    && hard_regno_nregs (i, lmode) != 1)
2263 		  continue;
2264 		for (el = l->elt->locs; el; el = el->next)
2265 		  if (!REG_P (el->loc))
2266 		    break;
2267 		if (el)
2268 		  lwider = l;
2269 	      }
2270 	  if (lwider)
2271 	    {
2272 	      rtx sub = lowpart_subreg (int_mode, lwider->elt->val_rtx,
2273 					GET_MODE (lwider->elt->val_rtx));
2274 	      if (sub)
2275 		new_elt_loc_list (e, sub);
2276 	    }
2277 	}
2278       REG_VALUES (i)->next = new_elt_list (REG_VALUES (i)->next, e);
2279       slot = cselib_find_slot (mode, x, e->hash, INSERT, memmode);
2280       *slot = e;
2281       return e;
2282     }
2283 
2284   if (MEM_P (x))
2285     return cselib_lookup_mem (x, create);
2286 
2287   hashval = cselib_hash_rtx (x, create, memmode);
2288   /* Can't even create if hashing is not possible.  */
2289   if (! hashval)
2290     return 0;
2291 
2292   slot = cselib_find_slot (mode, x, hashval,
2293 			   create ? INSERT : NO_INSERT, memmode);
2294   if (slot == 0)
2295     return 0;
2296 
2297   e = (cselib_val *) *slot;
2298   if (e)
2299     return e;
2300 
2301   e = new_cselib_val (hashval, mode, x);
2302 
2303   /* We have to fill the slot before calling cselib_subst_to_values:
2304      the hash table is inconsistent until we do so, and
2305      cselib_subst_to_values will need to do lookups.  */
2306   *slot = e;
2307   rtx v = cselib_subst_to_values (x, memmode);
2308 
2309   /* If cselib_preserve_constants, we might get a SP_DERIVED_VALUE_P
2310      VALUE that isn't in the hash tables anymore.  */
2311   if (GET_CODE (v) == VALUE && SP_DERIVED_VALUE_P (v) && PRESERVED_VALUE_P (v))
2312     PRESERVED_VALUE_P (e->val_rtx) = 1;
2313 
2314   new_elt_loc_list (e, v);
2315   return e;
2316 }
2317 
2318 /* Wrapper for cselib_lookup, that indicates X is in INSN.  */
2319 
2320 cselib_val *
cselib_lookup_from_insn(rtx x,machine_mode mode,int create,machine_mode memmode,rtx_insn * insn)2321 cselib_lookup_from_insn (rtx x, machine_mode mode,
2322 			 int create, machine_mode memmode, rtx_insn *insn)
2323 {
2324   cselib_val *ret;
2325 
2326   gcc_assert (!cselib_current_insn);
2327   cselib_current_insn = insn;
2328 
2329   ret = cselib_lookup (x, mode, create, memmode);
2330 
2331   cselib_current_insn = NULL;
2332 
2333   return ret;
2334 }
2335 
2336 /* Wrapper for cselib_lookup_1, that logs the lookup result and
2337    maintains invariants related with debug insns.  */
2338 
2339 cselib_val *
cselib_lookup(rtx x,machine_mode mode,int create,machine_mode memmode)2340 cselib_lookup (rtx x, machine_mode mode,
2341 	       int create, machine_mode memmode)
2342 {
2343   cselib_val *ret = cselib_lookup_1 (x, mode, create, memmode);
2344 
2345   /* ??? Should we return NULL if we're not to create an entry, the
2346      found loc is a debug loc and cselib_current_insn is not DEBUG?
2347      If so, we should also avoid converting val to non-DEBUG; probably
2348      easiest setting cselib_current_insn to NULL before the call
2349      above.  */
2350 
2351   if (dump_file && (dump_flags & TDF_CSELIB))
2352     {
2353       fputs ("cselib lookup ", dump_file);
2354       print_inline_rtx (dump_file, x, 2);
2355       fprintf (dump_file, " => %u:%u\n",
2356 	       ret ? ret->uid : 0,
2357 	       ret ? ret->hash : 0);
2358     }
2359 
2360   return ret;
2361 }
2362 
2363 /* Invalidate the value at *L, which is part of REG_VALUES (REGNO).  */
2364 
2365 static void
cselib_invalidate_regno_val(unsigned int regno,struct elt_list ** l)2366 cselib_invalidate_regno_val (unsigned int regno, struct elt_list **l)
2367 {
2368   cselib_val *v = (*l)->elt;
2369   if (*l == REG_VALUES (regno))
2370     {
2371       /* Maintain the invariant that the first entry of
2372 	 REG_VALUES, if present, must be the value used to set
2373 	 the register, or NULL.  This is also nice because
2374 	 then we won't push the same regno onto user_regs
2375 	 multiple times.  */
2376       (*l)->elt = NULL;
2377       l = &(*l)->next;
2378     }
2379   else
2380     unchain_one_elt_list (l);
2381 
2382   v = canonical_cselib_val (v);
2383 
2384   bool had_locs = v->locs != NULL;
2385   rtx_insn *setting_insn = v->locs ? v->locs->setting_insn : NULL;
2386 
2387   /* Now, we clear the mapping from value to reg.  It must exist, so
2388      this code will crash intentionally if it doesn't.  */
2389   for (elt_loc_list **p = &v->locs; ; p = &(*p)->next)
2390     {
2391       rtx x = (*p)->loc;
2392 
2393       if (REG_P (x) && REGNO (x) == regno)
2394 	{
2395 	  unchain_one_elt_loc_list (p);
2396 	  break;
2397 	}
2398     }
2399 
2400   if (had_locs && cselib_useless_value_p (v))
2401     {
2402       if (setting_insn && DEBUG_INSN_P (setting_insn))
2403 	n_useless_debug_values++;
2404       else
2405 	n_useless_values++;
2406     }
2407 }
2408 
2409 /* Invalidate any entries in reg_values that overlap REGNO.  This is called
2410    if REGNO is changing.  MODE is the mode of the assignment to REGNO, which
2411    is used to determine how many hard registers are being changed.  If MODE
2412    is VOIDmode, then only REGNO is being changed; this is used when
2413    invalidating call clobbered registers across a call.  */
2414 
2415 static void
cselib_invalidate_regno(unsigned int regno,machine_mode mode)2416 cselib_invalidate_regno (unsigned int regno, machine_mode mode)
2417 {
2418   unsigned int endregno;
2419   unsigned int i;
2420 
2421   /* If we see pseudos after reload, something is _wrong_.  */
2422   gcc_assert (!reload_completed || regno < FIRST_PSEUDO_REGISTER
2423 	      || reg_renumber[regno] < 0);
2424 
2425   /* Determine the range of registers that must be invalidated.  For
2426      pseudos, only REGNO is affected.  For hard regs, we must take MODE
2427      into account, and we must also invalidate lower register numbers
2428      if they contain values that overlap REGNO.  */
2429   if (regno < FIRST_PSEUDO_REGISTER)
2430     {
2431       gcc_assert (mode != VOIDmode);
2432 
2433       if (regno < max_value_regs)
2434 	i = 0;
2435       else
2436 	i = regno - max_value_regs;
2437 
2438       endregno = end_hard_regno (mode, regno);
2439     }
2440   else
2441     {
2442       i = regno;
2443       endregno = regno + 1;
2444     }
2445 
2446   for (; i < endregno; i++)
2447     {
2448       struct elt_list **l = &REG_VALUES (i);
2449 
2450       /* Go through all known values for this reg; if it overlaps the range
2451 	 we're invalidating, remove the value.  */
2452       while (*l)
2453 	{
2454 	  cselib_val *v = (*l)->elt;
2455 	  unsigned int this_last = i;
2456 
2457 	  if (i < FIRST_PSEUDO_REGISTER && v != NULL)
2458 	    this_last = end_hard_regno (GET_MODE (v->val_rtx), i) - 1;
2459 
2460 	  if (this_last < regno || v == NULL
2461 	      || (v == cfa_base_preserved_val
2462 		  && i == cfa_base_preserved_regno))
2463 	    {
2464 	      l = &(*l)->next;
2465 	      continue;
2466 	    }
2467 
2468 	  /* We have an overlap.  */
2469 	  cselib_invalidate_regno_val (i, l);
2470 	}
2471     }
2472 }
2473 
2474 /* Invalidate any locations in the table which are changed because of a
2475    store to MEM_RTX.  If this is called because of a non-const call
2476    instruction, MEM_RTX is (mem:BLK const0_rtx).  */
2477 
2478 static void
cselib_invalidate_mem(rtx mem_rtx)2479 cselib_invalidate_mem (rtx mem_rtx)
2480 {
2481   cselib_val **vp, *v, *next;
2482   int num_mems = 0;
2483   rtx mem_addr;
2484 
2485   mem_addr = canon_rtx (get_addr (XEXP (mem_rtx, 0)));
2486   mem_rtx = canon_rtx (mem_rtx);
2487 
2488   vp = &first_containing_mem;
2489   for (v = *vp; v != &dummy_val; v = next)
2490     {
2491       bool has_mem = false;
2492       struct elt_loc_list **p = &v->locs;
2493       bool had_locs = v->locs != NULL;
2494       rtx_insn *setting_insn = v->locs ? v->locs->setting_insn : NULL;
2495 
2496       while (*p)
2497 	{
2498 	  rtx x = (*p)->loc;
2499 	  cselib_val *addr;
2500 	  struct elt_list **mem_chain;
2501 
2502 	  /* MEMs may occur in locations only at the top level; below
2503 	     that every MEM or REG is substituted by its VALUE.  */
2504 	  if (!MEM_P (x))
2505 	    {
2506 	      p = &(*p)->next;
2507 	      continue;
2508 	    }
2509 	  if (num_mems < param_max_cselib_memory_locations
2510 	      && ! canon_anti_dependence (x, false, mem_rtx,
2511 					  GET_MODE (mem_rtx), mem_addr))
2512 	    {
2513 	      has_mem = true;
2514 	      num_mems++;
2515 	      p = &(*p)->next;
2516 	      continue;
2517 	    }
2518 
2519 	  /* This one overlaps.  */
2520 	  /* We must have a mapping from this MEM's address to the
2521 	     value (E).  Remove that, too.  */
2522 	  addr = cselib_lookup (XEXP (x, 0), VOIDmode, 0, GET_MODE (x));
2523 	  addr = canonical_cselib_val (addr);
2524 	  gcc_checking_assert (v == canonical_cselib_val (v));
2525 	  mem_chain = &addr->addr_list;
2526 	  for (;;)
2527 	    {
2528 	      cselib_val *canon = canonical_cselib_val ((*mem_chain)->elt);
2529 
2530 	      if (canon == v)
2531 		{
2532 		  unchain_one_elt_list (mem_chain);
2533 		  break;
2534 		}
2535 
2536 	      /* Record canonicalized elt.  */
2537 	      (*mem_chain)->elt = canon;
2538 
2539 	      mem_chain = &(*mem_chain)->next;
2540 	    }
2541 
2542 	  unchain_one_elt_loc_list (p);
2543 	}
2544 
2545       if (had_locs && cselib_useless_value_p (v))
2546 	{
2547 	  if (setting_insn && DEBUG_INSN_P (setting_insn))
2548 	    n_useless_debug_values++;
2549 	  else
2550 	    n_useless_values++;
2551 	}
2552 
2553       next = v->next_containing_mem;
2554       if (has_mem)
2555 	{
2556 	  *vp = v;
2557 	  vp = &(*vp)->next_containing_mem;
2558 	}
2559       else
2560 	v->next_containing_mem = NULL;
2561     }
2562   *vp = &dummy_val;
2563 }
2564 
2565 /* Invalidate DEST.  */
2566 
2567 void
cselib_invalidate_rtx(rtx dest)2568 cselib_invalidate_rtx (rtx dest)
2569 {
2570   while (GET_CODE (dest) == SUBREG
2571 	 || GET_CODE (dest) == ZERO_EXTRACT
2572 	 || GET_CODE (dest) == STRICT_LOW_PART)
2573     dest = XEXP (dest, 0);
2574 
2575   if (REG_P (dest))
2576     cselib_invalidate_regno (REGNO (dest), GET_MODE (dest));
2577   else if (MEM_P (dest))
2578     cselib_invalidate_mem (dest);
2579 }
2580 
2581 /* A wrapper for cselib_invalidate_rtx to be called via note_stores.  */
2582 
2583 static void
cselib_invalidate_rtx_note_stores(rtx dest,const_rtx,void * data ATTRIBUTE_UNUSED)2584 cselib_invalidate_rtx_note_stores (rtx dest, const_rtx,
2585 				   void *data ATTRIBUTE_UNUSED)
2586 {
2587   cselib_invalidate_rtx (dest);
2588 }
2589 
2590 /* Record the result of a SET instruction.  DEST is being set; the source
2591    contains the value described by SRC_ELT.  If DEST is a MEM, DEST_ADDR_ELT
2592    describes its address.  */
2593 
2594 static void
cselib_record_set(rtx dest,cselib_val * src_elt,cselib_val * dest_addr_elt)2595 cselib_record_set (rtx dest, cselib_val *src_elt, cselib_val *dest_addr_elt)
2596 {
2597   if (src_elt == 0 || side_effects_p (dest))
2598     return;
2599 
2600   if (REG_P (dest))
2601     {
2602       unsigned int dreg = REGNO (dest);
2603       if (dreg < FIRST_PSEUDO_REGISTER)
2604 	{
2605 	  unsigned int n = REG_NREGS (dest);
2606 
2607 	  if (n > max_value_regs)
2608 	    max_value_regs = n;
2609 	}
2610 
2611       if (REG_VALUES (dreg) == 0)
2612 	{
2613 	  used_regs[n_used_regs++] = dreg;
2614 	  REG_VALUES (dreg) = new_elt_list (REG_VALUES (dreg), src_elt);
2615 	}
2616       else
2617 	{
2618 	  /* The register should have been invalidated.  */
2619 	  gcc_assert (REG_VALUES (dreg)->elt == 0);
2620 	  REG_VALUES (dreg)->elt = src_elt;
2621 	}
2622 
2623       if (cselib_useless_value_p (src_elt))
2624 	n_useless_values--;
2625       new_elt_loc_list (src_elt, dest);
2626     }
2627   else if (MEM_P (dest) && dest_addr_elt != 0
2628 	   && cselib_record_memory)
2629     {
2630       if (cselib_useless_value_p (src_elt))
2631 	n_useless_values--;
2632       add_mem_for_addr (dest_addr_elt, src_elt, dest);
2633     }
2634 }
2635 
2636 /* Make ELT and X's VALUE equivalent to each other at INSN.  */
2637 
2638 void
cselib_add_permanent_equiv(cselib_val * elt,rtx x,rtx_insn * insn)2639 cselib_add_permanent_equiv (cselib_val *elt, rtx x, rtx_insn *insn)
2640 {
2641   cselib_val *nelt;
2642   rtx_insn *save_cselib_current_insn = cselib_current_insn;
2643 
2644   gcc_checking_assert (elt);
2645   gcc_checking_assert (PRESERVED_VALUE_P (elt->val_rtx));
2646   gcc_checking_assert (!side_effects_p (x));
2647 
2648   cselib_current_insn = insn;
2649 
2650   nelt = cselib_lookup (x, GET_MODE (elt->val_rtx), 1, VOIDmode);
2651 
2652   if (nelt != elt)
2653     {
2654       cselib_any_perm_equivs = true;
2655 
2656       if (!PRESERVED_VALUE_P (nelt->val_rtx))
2657 	cselib_preserve_value (nelt);
2658 
2659       new_elt_loc_list (nelt, elt->val_rtx);
2660     }
2661 
2662   cselib_current_insn = save_cselib_current_insn;
2663 }
2664 
2665 /* Return TRUE if any permanent equivalences have been recorded since
2666    the table was last initialized.  */
2667 bool
cselib_have_permanent_equivalences(void)2668 cselib_have_permanent_equivalences (void)
2669 {
2670   return cselib_any_perm_equivs;
2671 }
2672 
2673 /* Record stack_pointer_rtx to be equal to
2674    (plus:P cfa_base_preserved_val offset).  Used by var-tracking
2675    at the start of basic blocks for !frame_pointer_needed functions.  */
2676 
2677 void
cselib_record_sp_cfa_base_equiv(HOST_WIDE_INT offset,rtx_insn * insn)2678 cselib_record_sp_cfa_base_equiv (HOST_WIDE_INT offset, rtx_insn *insn)
2679 {
2680   rtx sp_derived_value = NULL_RTX;
2681   for (struct elt_loc_list *l = cfa_base_preserved_val->locs; l; l = l->next)
2682     if (GET_CODE (l->loc) == VALUE
2683 	&& SP_DERIVED_VALUE_P (l->loc))
2684       {
2685 	sp_derived_value = l->loc;
2686 	break;
2687       }
2688     else if (GET_CODE (l->loc) == PLUS
2689 	     && GET_CODE (XEXP (l->loc, 0)) == VALUE
2690 	     && SP_DERIVED_VALUE_P (XEXP (l->loc, 0))
2691 	     && CONST_INT_P (XEXP (l->loc, 1)))
2692       {
2693 	sp_derived_value = XEXP (l->loc, 0);
2694 	offset = offset + UINTVAL (XEXP (l->loc, 1));
2695 	break;
2696       }
2697   if (sp_derived_value == NULL_RTX)
2698     return;
2699   cselib_val *val
2700     = cselib_lookup_from_insn (plus_constant (Pmode, sp_derived_value, offset),
2701 			       Pmode, 1, VOIDmode, insn);
2702   if (val != NULL)
2703     {
2704       PRESERVED_VALUE_P (val->val_rtx) = 1;
2705       cselib_record_set (stack_pointer_rtx, val, NULL);
2706     }
2707 }
2708 
2709 /* Return true if V is SP_DERIVED_VALUE_P (or SP_DERIVED_VALUE_P + CONST_INT)
2710    that can be expressed using cfa_base_preserved_val + CONST_INT.  */
2711 
2712 bool
cselib_sp_derived_value_p(cselib_val * v)2713 cselib_sp_derived_value_p (cselib_val *v)
2714 {
2715   if (!SP_DERIVED_VALUE_P (v->val_rtx))
2716     for (struct elt_loc_list *l = v->locs; l; l = l->next)
2717       if (GET_CODE (l->loc) == PLUS
2718 	  && GET_CODE (XEXP (l->loc, 0)) == VALUE
2719 	  && SP_DERIVED_VALUE_P (XEXP (l->loc, 0))
2720 	  && CONST_INT_P (XEXP (l->loc, 1)))
2721 	v = CSELIB_VAL_PTR (XEXP (l->loc, 0));
2722   if (!SP_DERIVED_VALUE_P (v->val_rtx))
2723     return false;
2724   for (struct elt_loc_list *l = v->locs; l; l = l->next)
2725     if (l->loc == cfa_base_preserved_val->val_rtx)
2726       return true;
2727     else if (GET_CODE (l->loc) == PLUS
2728 	     && XEXP (l->loc, 0) == cfa_base_preserved_val->val_rtx
2729 	     && CONST_INT_P (XEXP (l->loc, 1)))
2730       return true;
2731   return false;
2732 }
2733 
2734 /* There is no good way to determine how many elements there can be
2735    in a PARALLEL.  Since it's fairly cheap, use a really large number.  */
2736 #define MAX_SETS (FIRST_PSEUDO_REGISTER * 2)
2737 
2738 struct cselib_record_autoinc_data
2739 {
2740   struct cselib_set *sets;
2741   int n_sets;
2742 };
2743 
2744 /* Callback for for_each_inc_dec.  Records in ARG the SETs implied by
2745    autoinc RTXs: SRC plus SRCOFF if non-NULL is stored in DEST.  */
2746 
2747 static int
cselib_record_autoinc_cb(rtx mem ATTRIBUTE_UNUSED,rtx op ATTRIBUTE_UNUSED,rtx dest,rtx src,rtx srcoff,void * arg)2748 cselib_record_autoinc_cb (rtx mem ATTRIBUTE_UNUSED, rtx op ATTRIBUTE_UNUSED,
2749 			  rtx dest, rtx src, rtx srcoff, void *arg)
2750 {
2751   struct cselib_record_autoinc_data *data;
2752   data = (struct cselib_record_autoinc_data *)arg;
2753 
2754   data->sets[data->n_sets].dest = dest;
2755 
2756   if (srcoff)
2757     data->sets[data->n_sets].src = gen_rtx_PLUS (GET_MODE (src), src, srcoff);
2758   else
2759     data->sets[data->n_sets].src = src;
2760 
2761   data->n_sets++;
2762 
2763   return 0;
2764 }
2765 
2766 /* Record the effects of any sets and autoincs in INSN.  */
2767 static void
cselib_record_sets(rtx_insn * insn)2768 cselib_record_sets (rtx_insn *insn)
2769 {
2770   int n_sets = 0;
2771   int i;
2772   struct cselib_set sets[MAX_SETS];
2773   rtx cond = 0;
2774   int n_sets_before_autoinc;
2775   int n_strict_low_parts = 0;
2776   struct cselib_record_autoinc_data data;
2777 
2778   rtx body = PATTERN (insn);
2779   if (GET_CODE (body) == COND_EXEC)
2780     {
2781       cond = COND_EXEC_TEST (body);
2782       body = COND_EXEC_CODE (body);
2783     }
2784 
2785   /* Find all sets.  */
2786   if (GET_CODE (body) == SET)
2787     {
2788       sets[0].src = SET_SRC (body);
2789       sets[0].dest = SET_DEST (body);
2790       n_sets = 1;
2791     }
2792   else if (GET_CODE (body) == PARALLEL)
2793     {
2794       /* Look through the PARALLEL and record the values being
2795 	 set, if possible.  Also handle any CLOBBERs.  */
2796       for (i = XVECLEN (body, 0) - 1; i >= 0; --i)
2797 	{
2798 	  rtx x = XVECEXP (body, 0, i);
2799 
2800 	  if (GET_CODE (x) == SET)
2801 	    {
2802 	      sets[n_sets].src = SET_SRC (x);
2803 	      sets[n_sets].dest = SET_DEST (x);
2804 	      n_sets++;
2805 	    }
2806 	}
2807     }
2808 
2809   if (n_sets == 1
2810       && MEM_P (sets[0].src)
2811       && !cselib_record_memory
2812       && MEM_READONLY_P (sets[0].src))
2813     {
2814       rtx note = find_reg_equal_equiv_note (insn);
2815 
2816       if (note && CONSTANT_P (XEXP (note, 0)))
2817 	sets[0].src = XEXP (note, 0);
2818     }
2819 
2820   data.sets = sets;
2821   data.n_sets = n_sets_before_autoinc = n_sets;
2822   for_each_inc_dec (PATTERN (insn), cselib_record_autoinc_cb, &data);
2823   n_sets = data.n_sets;
2824 
2825   /* Look up the values that are read.  Do this before invalidating the
2826      locations that are written.  */
2827   for (i = 0; i < n_sets; i++)
2828     {
2829       rtx dest = sets[i].dest;
2830       rtx orig = dest;
2831 
2832       /* A STRICT_LOW_PART can be ignored; we'll record the equivalence for
2833          the low part after invalidating any knowledge about larger modes.  */
2834       if (GET_CODE (sets[i].dest) == STRICT_LOW_PART)
2835 	sets[i].dest = dest = XEXP (dest, 0);
2836 
2837       /* We don't know how to record anything but REG or MEM.  */
2838       if (REG_P (dest)
2839 	  || (MEM_P (dest) && cselib_record_memory))
2840         {
2841 	  rtx src = sets[i].src;
2842 	  if (cond)
2843 	    src = gen_rtx_IF_THEN_ELSE (GET_MODE (dest), cond, src, dest);
2844 	  sets[i].src_elt = cselib_lookup (src, GET_MODE (dest), 1, VOIDmode);
2845 	  if (MEM_P (dest))
2846 	    {
2847 	      machine_mode address_mode = get_address_mode (dest);
2848 
2849 	      sets[i].dest_addr_elt = cselib_lookup (XEXP (dest, 0),
2850 						     address_mode, 1,
2851 						     GET_MODE (dest));
2852 	    }
2853 	  else
2854 	    sets[i].dest_addr_elt = 0;
2855 	}
2856 
2857       /* Improve handling of STRICT_LOW_PART if the current value is known
2858 	 to be const0_rtx, then the low bits will be set to dest and higher
2859 	 bits will remain zero.  Used in code like:
2860 
2861 	 {di:SI=0;clobber flags:CC;}
2862 	 flags:CCNO=cmp(bx:SI,0)
2863 	 strict_low_part(di:QI)=flags:CCNO<=0
2864 
2865 	 where we can note both that di:QI=flags:CCNO<=0 and
2866 	 also that because di:SI is known to be 0 and strict_low_part(di:QI)
2867 	 preserves the upper bits that di:SI=zero_extend(flags:CCNO<=0).  */
2868       scalar_int_mode mode;
2869       if (dest != orig
2870 	  && cselib_record_sets_hook
2871 	  && REG_P (dest)
2872 	  && HARD_REGISTER_P (dest)
2873 	  && sets[i].src_elt
2874 	  && is_a <scalar_int_mode> (GET_MODE (dest), &mode)
2875 	  && n_sets + n_strict_low_parts < MAX_SETS)
2876 	{
2877 	  opt_scalar_int_mode wider_mode_iter;
2878 	  FOR_EACH_WIDER_MODE (wider_mode_iter, mode)
2879 	    {
2880 	      scalar_int_mode wider_mode = wider_mode_iter.require ();
2881 	      if (GET_MODE_PRECISION (wider_mode) > BITS_PER_WORD)
2882 		break;
2883 
2884 	      rtx reg = gen_lowpart (wider_mode, dest);
2885 	      if (!REG_P (reg))
2886 		break;
2887 
2888 	      cselib_val *v = cselib_lookup (reg, wider_mode, 0, VOIDmode);
2889 	      if (!v)
2890 		continue;
2891 
2892 	      struct elt_loc_list *l;
2893 	      for (l = v->locs; l; l = l->next)
2894 		if (l->loc == const0_rtx)
2895 		  break;
2896 
2897 	      if (!l)
2898 		continue;
2899 
2900 	      sets[n_sets + n_strict_low_parts].dest = reg;
2901 	      sets[n_sets + n_strict_low_parts].src = dest;
2902 	      sets[n_sets + n_strict_low_parts++].src_elt = sets[i].src_elt;
2903 	      break;
2904 	    }
2905 	}
2906     }
2907 
2908   if (cselib_record_sets_hook)
2909     cselib_record_sets_hook (insn, sets, n_sets);
2910 
2911   /* Invalidate all locations written by this insn.  Note that the elts we
2912      looked up in the previous loop aren't affected, just some of their
2913      locations may go away.  */
2914   note_pattern_stores (body, cselib_invalidate_rtx_note_stores, NULL);
2915 
2916   for (i = n_sets_before_autoinc; i < n_sets; i++)
2917     cselib_invalidate_rtx (sets[i].dest);
2918 
2919   /* If this is an asm, look for duplicate sets.  This can happen when the
2920      user uses the same value as an output multiple times.  This is valid
2921      if the outputs are not actually used thereafter.  Treat this case as
2922      if the value isn't actually set.  We do this by smashing the destination
2923      to pc_rtx, so that we won't record the value later.  */
2924   if (n_sets >= 2 && asm_noperands (body) >= 0)
2925     {
2926       for (i = 0; i < n_sets; i++)
2927 	{
2928 	  rtx dest = sets[i].dest;
2929 	  if (REG_P (dest) || MEM_P (dest))
2930 	    {
2931 	      int j;
2932 	      for (j = i + 1; j < n_sets; j++)
2933 		if (rtx_equal_p (dest, sets[j].dest))
2934 		  {
2935 		    sets[i].dest = pc_rtx;
2936 		    sets[j].dest = pc_rtx;
2937 		  }
2938 	    }
2939 	}
2940     }
2941 
2942   /* Now enter the equivalences in our tables.  */
2943   for (i = 0; i < n_sets; i++)
2944     {
2945       rtx dest = sets[i].dest;
2946       if (REG_P (dest)
2947 	  || (MEM_P (dest) && cselib_record_memory))
2948 	cselib_record_set (dest, sets[i].src_elt, sets[i].dest_addr_elt);
2949     }
2950 
2951   /* And deal with STRICT_LOW_PART.  */
2952   for (i = 0; i < n_strict_low_parts; i++)
2953     {
2954       if (! PRESERVED_VALUE_P (sets[n_sets + i].src_elt->val_rtx))
2955 	continue;
2956       machine_mode dest_mode = GET_MODE (sets[n_sets + i].dest);
2957       cselib_val *v
2958 	= cselib_lookup (sets[n_sets + i].dest, dest_mode, 1, VOIDmode);
2959       cselib_preserve_value (v);
2960       rtx r = gen_rtx_ZERO_EXTEND (dest_mode,
2961 				   sets[n_sets + i].src_elt->val_rtx);
2962       cselib_add_permanent_equiv (v, r, insn);
2963     }
2964 }
2965 
2966 /* Return true if INSN in the prologue initializes hard_frame_pointer_rtx.  */
2967 
2968 bool
fp_setter_insn(rtx_insn * insn)2969 fp_setter_insn (rtx_insn *insn)
2970 {
2971   rtx expr, pat = NULL_RTX;
2972 
2973   if (!RTX_FRAME_RELATED_P (insn))
2974     return false;
2975 
2976   expr = find_reg_note (insn, REG_FRAME_RELATED_EXPR, NULL_RTX);
2977   if (expr)
2978     pat = XEXP (expr, 0);
2979   if (!modified_in_p (hard_frame_pointer_rtx, pat ? pat : insn))
2980     return false;
2981 
2982   /* Don't return true for frame pointer restores in the epilogue.  */
2983   if (find_reg_note (insn, REG_CFA_RESTORE, hard_frame_pointer_rtx))
2984     return false;
2985   return true;
2986 }
2987 
2988 /* V is one of the values in REG_VALUES (REGNO).  Return true if it
2989    would be invalidated by CALLEE_ABI.  */
2990 
2991 static bool
cselib_invalidated_by_call_p(const function_abi & callee_abi,unsigned int regno,cselib_val * v)2992 cselib_invalidated_by_call_p (const function_abi &callee_abi,
2993 			      unsigned int regno, cselib_val *v)
2994 {
2995   machine_mode mode = GET_MODE (v->val_rtx);
2996   if (mode == VOIDmode)
2997     {
2998       v = REG_VALUES (regno)->elt;
2999       if (!v)
3000 	/* If we don't know what the mode of the constant value is, and we
3001 	   don't know what mode the register was set in, conservatively
3002 	   assume that the register is clobbered.  The value's going to be
3003 	   essentially useless in this case anyway.  */
3004 	return true;
3005       mode = GET_MODE (v->val_rtx);
3006     }
3007   return callee_abi.clobbers_reg_p (mode, regno);
3008 }
3009 
3010 /* Record the effects of INSN.  */
3011 
3012 void
cselib_process_insn(rtx_insn * insn)3013 cselib_process_insn (rtx_insn *insn)
3014 {
3015   int i;
3016   rtx x;
3017 
3018   cselib_current_insn = insn;
3019 
3020   /* Forget everything at a CODE_LABEL or a setjmp.  */
3021   if ((LABEL_P (insn)
3022        || (CALL_P (insn)
3023 	   && find_reg_note (insn, REG_SETJMP, NULL)))
3024       && !cselib_preserve_constants)
3025     {
3026       cselib_reset_table (next_uid);
3027       cselib_current_insn = NULL;
3028       return;
3029     }
3030 
3031   if (! INSN_P (insn))
3032     {
3033       cselib_current_insn = NULL;
3034       return;
3035     }
3036 
3037   /* If this is a call instruction, forget anything stored in a
3038      call clobbered register, or, if this is not a const call, in
3039      memory.  */
3040   if (CALL_P (insn))
3041     {
3042       function_abi callee_abi = insn_callee_abi (insn);
3043       for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3044 	{
3045 	  elt_list **l = &REG_VALUES (i);
3046 	  while (*l)
3047 	    {
3048 	      cselib_val *v = (*l)->elt;
3049 	      if (v && cselib_invalidated_by_call_p (callee_abi, i, v))
3050 		cselib_invalidate_regno_val (i, l);
3051 	      else
3052 		l = &(*l)->next;
3053 	    }
3054 	}
3055 
3056       /* Since it is not clear how cselib is going to be used, be
3057 	 conservative here and treat looping pure or const functions
3058 	 as if they were regular functions.  */
3059       if (RTL_LOOPING_CONST_OR_PURE_CALL_P (insn)
3060 	  || !(RTL_CONST_OR_PURE_CALL_P (insn)))
3061 	cselib_invalidate_mem (callmem);
3062       else
3063 	/* For const/pure calls, invalidate any argument slots because
3064 	   they are owned by the callee.  */
3065 	for (x = CALL_INSN_FUNCTION_USAGE (insn); x; x = XEXP (x, 1))
3066 	  if (GET_CODE (XEXP (x, 0)) == USE
3067 	      && MEM_P (XEXP (XEXP (x, 0), 0)))
3068 	    cselib_invalidate_mem (XEXP (XEXP (x, 0), 0));
3069     }
3070 
3071   cselib_record_sets (insn);
3072 
3073   /* Look for any CLOBBERs in CALL_INSN_FUNCTION_USAGE, but only
3074      after we have processed the insn.  */
3075   if (CALL_P (insn))
3076     {
3077       for (x = CALL_INSN_FUNCTION_USAGE (insn); x; x = XEXP (x, 1))
3078 	if (GET_CODE (XEXP (x, 0)) == CLOBBER)
3079 	  cselib_invalidate_rtx (XEXP (XEXP (x, 0), 0));
3080 
3081       /* Flush everything on setjmp.  */
3082       if (cselib_preserve_constants
3083 	  && find_reg_note (insn, REG_SETJMP, NULL))
3084 	{
3085 	  cselib_preserve_only_values ();
3086 	  cselib_reset_table (next_uid);
3087 	}
3088     }
3089 
3090   /* On setter of the hard frame pointer if frame_pointer_needed,
3091      invalidate stack_pointer_rtx, so that sp and {,h}fp based
3092      VALUEs are distinct.  */
3093   if (reload_completed
3094       && frame_pointer_needed
3095       && fp_setter_insn (insn))
3096     cselib_invalidate_rtx (stack_pointer_rtx);
3097 
3098   cselib_current_insn = NULL;
3099 
3100   if (n_useless_values > MAX_USELESS_VALUES
3101       /* remove_useless_values is linear in the hash table size.  Avoid
3102          quadratic behavior for very large hashtables with very few
3103 	 useless elements.  */
3104       && ((unsigned int)n_useless_values
3105 	  > (cselib_hash_table->elements () - n_debug_values) / 4))
3106     remove_useless_values ();
3107 }
3108 
3109 /* Initialize cselib for one pass.  The caller must also call
3110    init_alias_analysis.  */
3111 
3112 void
cselib_init(int record_what)3113 cselib_init (int record_what)
3114 {
3115   cselib_record_memory = record_what & CSELIB_RECORD_MEMORY;
3116   cselib_preserve_constants = record_what & CSELIB_PRESERVE_CONSTANTS;
3117   cselib_any_perm_equivs = false;
3118 
3119   /* (mem:BLK (scratch)) is a special mechanism to conflict with everything,
3120      see canon_true_dependence.  This is only created once.  */
3121   if (! callmem)
3122     callmem = gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode));
3123 
3124   cselib_nregs = max_reg_num ();
3125 
3126   /* We preserve reg_values to allow expensive clearing of the whole thing.
3127      Reallocate it however if it happens to be too large.  */
3128   if (!reg_values || reg_values_size < cselib_nregs
3129       || (reg_values_size > 10 && reg_values_size > cselib_nregs * 4))
3130     {
3131       free (reg_values);
3132       /* Some space for newly emit instructions so we don't end up
3133 	 reallocating in between passes.  */
3134       reg_values_size = cselib_nregs + (63 + cselib_nregs) / 16;
3135       reg_values = XCNEWVEC (struct elt_list *, reg_values_size);
3136     }
3137   used_regs = XNEWVEC (unsigned int, cselib_nregs);
3138   n_used_regs = 0;
3139   /* FIXME: enable sanitization (PR87845) */
3140   cselib_hash_table
3141     = new hash_table<cselib_hasher> (31, /* ggc */ false,
3142 				     /* sanitize_eq_and_hash */ false);
3143   if (cselib_preserve_constants)
3144     cselib_preserved_hash_table
3145       = new hash_table<cselib_hasher> (31, /* ggc */ false,
3146 				       /* sanitize_eq_and_hash */ false);
3147   next_uid = 1;
3148 }
3149 
3150 /* Called when the current user is done with cselib.  */
3151 
3152 void
cselib_finish(void)3153 cselib_finish (void)
3154 {
3155   bool preserved = cselib_preserve_constants;
3156   cselib_discard_hook = NULL;
3157   cselib_preserve_constants = false;
3158   cselib_any_perm_equivs = false;
3159   cfa_base_preserved_val = NULL;
3160   cfa_base_preserved_regno = INVALID_REGNUM;
3161   elt_list_pool.release ();
3162   elt_loc_list_pool.release ();
3163   cselib_val_pool.release ();
3164   value_pool.release ();
3165   cselib_clear_table ();
3166   delete cselib_hash_table;
3167   cselib_hash_table = NULL;
3168   if (preserved)
3169     delete cselib_preserved_hash_table;
3170   cselib_preserved_hash_table = NULL;
3171   free (used_regs);
3172   used_regs = 0;
3173   n_useless_values = 0;
3174   n_useless_debug_values = 0;
3175   n_debug_values = 0;
3176   next_uid = 0;
3177 }
3178 
3179 /* Dump the cselib_val *X to FILE *OUT.  */
3180 
3181 int
dump_cselib_val(cselib_val ** x,FILE * out)3182 dump_cselib_val (cselib_val **x, FILE *out)
3183 {
3184   cselib_val *v = *x;
3185   bool need_lf = true;
3186 
3187   print_inline_rtx (out, v->val_rtx, 0);
3188 
3189   if (v->locs)
3190     {
3191       struct elt_loc_list *l = v->locs;
3192       if (need_lf)
3193 	{
3194 	  fputc ('\n', out);
3195 	  need_lf = false;
3196 	}
3197       fputs (" locs:", out);
3198       do
3199 	{
3200 	  if (l->setting_insn)
3201 	    fprintf (out, "\n  from insn %i ",
3202 		     INSN_UID (l->setting_insn));
3203 	  else
3204 	    fprintf (out, "\n   ");
3205 	  print_inline_rtx (out, l->loc, 4);
3206 	}
3207       while ((l = l->next));
3208       fputc ('\n', out);
3209     }
3210   else
3211     {
3212       fputs (" no locs", out);
3213       need_lf = true;
3214     }
3215 
3216   if (v->addr_list)
3217     {
3218       struct elt_list *e = v->addr_list;
3219       if (need_lf)
3220 	{
3221 	  fputc ('\n', out);
3222 	  need_lf = false;
3223 	}
3224       fputs (" addr list:", out);
3225       do
3226 	{
3227 	  fputs ("\n  ", out);
3228 	  print_inline_rtx (out, e->elt->val_rtx, 2);
3229 	}
3230       while ((e = e->next));
3231       fputc ('\n', out);
3232     }
3233   else
3234     {
3235       fputs (" no addrs", out);
3236       need_lf = true;
3237     }
3238 
3239   if (v->next_containing_mem == &dummy_val)
3240     fputs (" last mem\n", out);
3241   else if (v->next_containing_mem)
3242     {
3243       fputs (" next mem ", out);
3244       print_inline_rtx (out, v->next_containing_mem->val_rtx, 2);
3245       fputc ('\n', out);
3246     }
3247   else if (need_lf)
3248     fputc ('\n', out);
3249 
3250   return 1;
3251 }
3252 
3253 /* Dump to OUT everything in the CSELIB table.  */
3254 
3255 void
dump_cselib_table(FILE * out)3256 dump_cselib_table (FILE *out)
3257 {
3258   fprintf (out, "cselib hash table:\n");
3259   cselib_hash_table->traverse <FILE *, dump_cselib_val> (out);
3260   fprintf (out, "cselib preserved hash table:\n");
3261   cselib_preserved_hash_table->traverse <FILE *, dump_cselib_val> (out);
3262   if (first_containing_mem != &dummy_val)
3263     {
3264       fputs ("first mem ", out);
3265       print_inline_rtx (out, first_containing_mem->val_rtx, 2);
3266       fputc ('\n', out);
3267     }
3268   fprintf (out, "next uid %i\n", next_uid);
3269 }
3270 
3271 #include "gt-cselib.h"
3272