1 /* Common subexpression elimination library for GNU compiler.
2    Copyright (C) 1987-2019 Free Software Foundation, Inc.
3 
4 This file is part of GCC.
5 
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10 
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
14 for more details.
15 
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3.  If not see
18 <http://www.gnu.org/licenses/>.  */
19 
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "backend.h"
24 #include "target.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "df.h"
28 #include "memmodel.h"
29 #include "tm_p.h"
30 #include "regs.h"
31 #include "emit-rtl.h"
32 #include "dumpfile.h"
33 #include "cselib.h"
34 #include "params.h"
35 
36 /* A list of cselib_val structures.  */
37 struct elt_list
38 {
39   struct elt_list *next;
40   cselib_val *elt;
41 };
42 
43 static bool cselib_record_memory;
44 static bool cselib_preserve_constants;
45 static bool cselib_any_perm_equivs;
46 static inline void promote_debug_loc (struct elt_loc_list *l);
47 static struct elt_list *new_elt_list (struct elt_list *, cselib_val *);
48 static void new_elt_loc_list (cselib_val *, rtx);
49 static void unchain_one_value (cselib_val *);
50 static void unchain_one_elt_list (struct elt_list **);
51 static void unchain_one_elt_loc_list (struct elt_loc_list **);
52 static void remove_useless_values (void);
53 static unsigned int cselib_hash_rtx (rtx, int, machine_mode);
54 static cselib_val *new_cselib_val (unsigned int, machine_mode, rtx);
55 static void add_mem_for_addr (cselib_val *, cselib_val *, rtx);
56 static cselib_val *cselib_lookup_mem (rtx, int);
57 static void cselib_invalidate_regno (unsigned int, machine_mode,
58 				     const_rtx = NULL);
59 static void cselib_invalidate_mem (rtx);
60 static void cselib_record_set (rtx, cselib_val *, cselib_val *);
61 static void cselib_record_sets (rtx_insn *);
62 
63 struct expand_value_data
64 {
65   bitmap regs_active;
66   cselib_expand_callback callback;
67   void *callback_arg;
68   bool dummy;
69 };
70 
71 static rtx cselib_expand_value_rtx_1 (rtx, struct expand_value_data *, int);
72 
73 /* There are three ways in which cselib can look up an rtx:
74    - for a REG, the reg_values table (which is indexed by regno) is used
75    - for a MEM, we recursively look up its address and then follow the
76      addr_list of that value
77    - for everything else, we compute a hash value and go through the hash
78      table.  Since different rtx's can still have the same hash value,
79      this involves walking the table entries for a given value and comparing
80      the locations of the entries with the rtx we are looking up.  */
81 
82 struct cselib_hasher : nofree_ptr_hash <cselib_val>
83 {
84   struct key {
85     /* The rtx value and its mode (needed separately for constant
86        integers).  */
87     machine_mode mode;
88     rtx x;
89     /* The mode of the contaning MEM, if any, otherwise VOIDmode.  */
90     machine_mode memmode;
91   };
92   typedef key *compare_type;
93   static inline hashval_t hash (const cselib_val *);
94   static inline bool equal (const cselib_val *, const key *);
95 };
96 
97 /* The hash function for our hash table.  The value is always computed with
98    cselib_hash_rtx when adding an element; this function just extracts the
99    hash value from a cselib_val structure.  */
100 
101 inline hashval_t
hash(const cselib_val * v)102 cselib_hasher::hash (const cselib_val *v)
103 {
104   return v->hash;
105 }
106 
107 /* The equality test for our hash table.  The first argument V is a table
108    element (i.e. a cselib_val), while the second arg X is an rtx.  We know
109    that all callers of htab_find_slot_with_hash will wrap CONST_INTs into a
110    CONST of an appropriate mode.  */
111 
112 inline bool
equal(const cselib_val * v,const key * x_arg)113 cselib_hasher::equal (const cselib_val *v, const key *x_arg)
114 {
115   struct elt_loc_list *l;
116   rtx x = x_arg->x;
117   machine_mode mode = x_arg->mode;
118   machine_mode memmode = x_arg->memmode;
119 
120   if (mode != GET_MODE (v->val_rtx))
121     return false;
122 
123   if (GET_CODE (x) == VALUE)
124     return x == v->val_rtx;
125 
126   /* We don't guarantee that distinct rtx's have different hash values,
127      so we need to do a comparison.  */
128   for (l = v->locs; l; l = l->next)
129     if (rtx_equal_for_cselib_1 (l->loc, x, memmode, 0))
130       {
131 	promote_debug_loc (l);
132 	return true;
133       }
134 
135   return false;
136 }
137 
138 /* A table that enables us to look up elts by their value.  */
139 static hash_table<cselib_hasher> *cselib_hash_table;
140 
141 /* A table to hold preserved values.  */
142 static hash_table<cselib_hasher> *cselib_preserved_hash_table;
143 
144 /* This is a global so we don't have to pass this through every function.
145    It is used in new_elt_loc_list to set SETTING_INSN.  */
146 static rtx_insn *cselib_current_insn;
147 
148 /* The unique id that the next create value will take.  */
149 static unsigned int next_uid;
150 
151 /* The number of registers we had when the varrays were last resized.  */
152 static unsigned int cselib_nregs;
153 
154 /* Count values without known locations, or with only locations that
155    wouldn't have been known except for debug insns.  Whenever this
156    grows too big, we remove these useless values from the table.
157 
158    Counting values with only debug values is a bit tricky.  We don't
159    want to increment n_useless_values when we create a value for a
160    debug insn, for this would get n_useless_values out of sync, but we
161    want increment it if all locs in the list that were ever referenced
162    in nondebug insns are removed from the list.
163 
164    In the general case, once we do that, we'd have to stop accepting
165    nondebug expressions in the loc list, to avoid having two values
166    equivalent that, without debug insns, would have been made into
167    separate values.  However, because debug insns never introduce
168    equivalences themselves (no assignments), the only means for
169    growing loc lists is through nondebug assignments.  If the locs
170    also happen to be referenced in debug insns, it will work just fine.
171 
172    A consequence of this is that there's at most one debug-only loc in
173    each loc list.  If we keep it in the first entry, testing whether
174    we have a debug-only loc list takes O(1).
175 
176    Furthermore, since any additional entry in a loc list containing a
177    debug loc would have to come from an assignment (nondebug) that
178    references both the initial debug loc and the newly-equivalent loc,
179    the initial debug loc would be promoted to a nondebug loc, and the
180    loc list would not contain debug locs any more.
181 
182    So the only case we have to be careful with in order to keep
183    n_useless_values in sync between debug and nondebug compilations is
184    to avoid incrementing n_useless_values when removing the single loc
185    from a value that turns out to not appear outside debug values.  We
186    increment n_useless_debug_values instead, and leave such values
187    alone until, for other reasons, we garbage-collect useless
188    values.  */
189 static int n_useless_values;
190 static int n_useless_debug_values;
191 
192 /* Count values whose locs have been taken exclusively from debug
193    insns for the entire life of the value.  */
194 static int n_debug_values;
195 
196 /* Number of useless values before we remove them from the hash table.  */
197 #define MAX_USELESS_VALUES 32
198 
199 /* This table maps from register number to values.  It does not
200    contain pointers to cselib_val structures, but rather elt_lists.
201    The purpose is to be able to refer to the same register in
202    different modes.  The first element of the list defines the mode in
203    which the register was set; if the mode is unknown or the value is
204    no longer valid in that mode, ELT will be NULL for the first
205    element.  */
206 static struct elt_list **reg_values;
207 static unsigned int reg_values_size;
208 #define REG_VALUES(i) reg_values[i]
209 
210 /* The largest number of hard regs used by any entry added to the
211    REG_VALUES table.  Cleared on each cselib_clear_table() invocation.  */
212 static unsigned int max_value_regs;
213 
214 /* Here the set of indices I with REG_VALUES(I) != 0 is saved.  This is used
215    in cselib_clear_table() for fast emptying.  */
216 static unsigned int *used_regs;
217 static unsigned int n_used_regs;
218 
219 /* We pass this to cselib_invalidate_mem to invalidate all of
220    memory for a non-const call instruction.  */
221 static GTY(()) rtx callmem;
222 
223 /* Set by discard_useless_locs if it deleted the last location of any
224    value.  */
225 static int values_became_useless;
226 
227 /* Used as stop element of the containing_mem list so we can check
228    presence in the list by checking the next pointer.  */
229 static cselib_val dummy_val;
230 
231 /* If non-NULL, value of the eliminated arg_pointer_rtx or frame_pointer_rtx
232    that is constant through the whole function and should never be
233    eliminated.  */
234 static cselib_val *cfa_base_preserved_val;
235 static unsigned int cfa_base_preserved_regno = INVALID_REGNUM;
236 
237 /* Used to list all values that contain memory reference.
238    May or may not contain the useless values - the list is compacted
239    each time memory is invalidated.  */
240 static cselib_val *first_containing_mem = &dummy_val;
241 
242 static object_allocator<elt_list> elt_list_pool ("elt_list");
243 static object_allocator<elt_loc_list> elt_loc_list_pool ("elt_loc_list");
244 static object_allocator<cselib_val> cselib_val_pool ("cselib_val_list");
245 
246 static pool_allocator value_pool ("value", RTX_CODE_SIZE (VALUE));
247 
248 /* If nonnull, cselib will call this function before freeing useless
249    VALUEs.  A VALUE is deemed useless if its "locs" field is null.  */
250 void (*cselib_discard_hook) (cselib_val *);
251 
252 /* If nonnull, cselib will call this function before recording sets or
253    even clobbering outputs of INSN.  All the recorded sets will be
254    represented in the array sets[n_sets].  new_val_min can be used to
255    tell whether values present in sets are introduced by this
256    instruction.  */
257 void (*cselib_record_sets_hook) (rtx_insn *insn, struct cselib_set *sets,
258 				 int n_sets);
259 
260 #define PRESERVED_VALUE_P(RTX) \
261   (RTL_FLAG_CHECK1 ("PRESERVED_VALUE_P", (RTX), VALUE)->unchanging)
262 
263 #define SP_BASED_VALUE_P(RTX) \
264   (RTL_FLAG_CHECK1 ("SP_BASED_VALUE_P", (RTX), VALUE)->jump)
265 
266 
267 
268 /* Allocate a struct elt_list and fill in its two elements with the
269    arguments.  */
270 
271 static inline struct elt_list *
new_elt_list(struct elt_list * next,cselib_val * elt)272 new_elt_list (struct elt_list *next, cselib_val *elt)
273 {
274   elt_list *el = elt_list_pool.allocate ();
275   el->next = next;
276   el->elt = elt;
277   return el;
278 }
279 
280 /* Allocate a struct elt_loc_list with LOC and prepend it to VAL's loc
281    list.  */
282 
283 static inline void
new_elt_loc_list(cselib_val * val,rtx loc)284 new_elt_loc_list (cselib_val *val, rtx loc)
285 {
286   struct elt_loc_list *el, *next = val->locs;
287 
288   gcc_checking_assert (!next || !next->setting_insn
289 		       || !DEBUG_INSN_P (next->setting_insn)
290 		       || cselib_current_insn == next->setting_insn);
291 
292   /* If we're creating the first loc in a debug insn context, we've
293      just created a debug value.  Count it.  */
294   if (!next && cselib_current_insn && DEBUG_INSN_P (cselib_current_insn))
295     n_debug_values++;
296 
297   val = canonical_cselib_val (val);
298   next = val->locs;
299 
300   if (GET_CODE (loc) == VALUE)
301     {
302       loc = canonical_cselib_val (CSELIB_VAL_PTR (loc))->val_rtx;
303 
304       gcc_checking_assert (PRESERVED_VALUE_P (loc)
305 			   == PRESERVED_VALUE_P (val->val_rtx));
306 
307       if (val->val_rtx == loc)
308 	return;
309       else if (val->uid > CSELIB_VAL_PTR (loc)->uid)
310 	{
311 	  /* Reverse the insertion.  */
312 	  new_elt_loc_list (CSELIB_VAL_PTR (loc), val->val_rtx);
313 	  return;
314 	}
315 
316       gcc_checking_assert (val->uid < CSELIB_VAL_PTR (loc)->uid);
317 
318       if (CSELIB_VAL_PTR (loc)->locs)
319 	{
320 	  /* Bring all locs from LOC to VAL.  */
321 	  for (el = CSELIB_VAL_PTR (loc)->locs; el->next; el = el->next)
322 	    {
323 	      /* Adjust values that have LOC as canonical so that VAL
324 		 becomes their canonical.  */
325 	      if (el->loc && GET_CODE (el->loc) == VALUE)
326 		{
327 		  gcc_checking_assert (CSELIB_VAL_PTR (el->loc)->locs->loc
328 				       == loc);
329 		  CSELIB_VAL_PTR (el->loc)->locs->loc = val->val_rtx;
330 		}
331 	    }
332 	  el->next = val->locs;
333 	  next = val->locs = CSELIB_VAL_PTR (loc)->locs;
334 	}
335 
336       if (CSELIB_VAL_PTR (loc)->addr_list)
337 	{
338 	  /* Bring in addr_list into canonical node.  */
339 	  struct elt_list *last = CSELIB_VAL_PTR (loc)->addr_list;
340 	  while (last->next)
341 	    last = last->next;
342 	  last->next = val->addr_list;
343 	  val->addr_list = CSELIB_VAL_PTR (loc)->addr_list;
344 	  CSELIB_VAL_PTR (loc)->addr_list = NULL;
345 	}
346 
347       if (CSELIB_VAL_PTR (loc)->next_containing_mem != NULL
348 	  && val->next_containing_mem == NULL)
349 	{
350 	  /* Add VAL to the containing_mem list after LOC.  LOC will
351 	     be removed when we notice it doesn't contain any
352 	     MEMs.  */
353 	  val->next_containing_mem = CSELIB_VAL_PTR (loc)->next_containing_mem;
354 	  CSELIB_VAL_PTR (loc)->next_containing_mem = val;
355 	}
356 
357       /* Chain LOC back to VAL.  */
358       el = elt_loc_list_pool.allocate ();
359       el->loc = val->val_rtx;
360       el->setting_insn = cselib_current_insn;
361       el->next = NULL;
362       CSELIB_VAL_PTR (loc)->locs = el;
363     }
364 
365   el = elt_loc_list_pool.allocate ();
366   el->loc = loc;
367   el->setting_insn = cselib_current_insn;
368   el->next = next;
369   val->locs = el;
370 }
371 
372 /* Promote loc L to a nondebug cselib_current_insn if L is marked as
373    originating from a debug insn, maintaining the debug values
374    count.  */
375 
376 static inline void
promote_debug_loc(struct elt_loc_list * l)377 promote_debug_loc (struct elt_loc_list *l)
378 {
379   if (l && l->setting_insn && DEBUG_INSN_P (l->setting_insn)
380       && (!cselib_current_insn || !DEBUG_INSN_P (cselib_current_insn)))
381     {
382       n_debug_values--;
383       l->setting_insn = cselib_current_insn;
384       if (cselib_preserve_constants && l->next)
385 	{
386 	  gcc_assert (l->next->setting_insn
387 		      && DEBUG_INSN_P (l->next->setting_insn)
388 		      && !l->next->next);
389 	  l->next->setting_insn = cselib_current_insn;
390 	}
391       else
392 	gcc_assert (!l->next);
393     }
394 }
395 
396 /* The elt_list at *PL is no longer needed.  Unchain it and free its
397    storage.  */
398 
399 static inline void
unchain_one_elt_list(struct elt_list ** pl)400 unchain_one_elt_list (struct elt_list **pl)
401 {
402   struct elt_list *l = *pl;
403 
404   *pl = l->next;
405   elt_list_pool.remove (l);
406 }
407 
408 /* Likewise for elt_loc_lists.  */
409 
410 static void
unchain_one_elt_loc_list(struct elt_loc_list ** pl)411 unchain_one_elt_loc_list (struct elt_loc_list **pl)
412 {
413   struct elt_loc_list *l = *pl;
414 
415   *pl = l->next;
416   elt_loc_list_pool.remove (l);
417 }
418 
419 /* Likewise for cselib_vals.  This also frees the addr_list associated with
420    V.  */
421 
422 static void
unchain_one_value(cselib_val * v)423 unchain_one_value (cselib_val *v)
424 {
425   while (v->addr_list)
426     unchain_one_elt_list (&v->addr_list);
427 
428   cselib_val_pool.remove (v);
429 }
430 
431 /* Remove all entries from the hash table.  Also used during
432    initialization.  */
433 
434 void
cselib_clear_table(void)435 cselib_clear_table (void)
436 {
437   cselib_reset_table (1);
438 }
439 
440 /* Return TRUE if V is a constant, a function invariant or a VALUE
441    equivalence; FALSE otherwise.  */
442 
443 static bool
invariant_or_equiv_p(cselib_val * v)444 invariant_or_equiv_p (cselib_val *v)
445 {
446   struct elt_loc_list *l;
447 
448   if (v == cfa_base_preserved_val)
449     return true;
450 
451   /* Keep VALUE equivalences around.  */
452   for (l = v->locs; l; l = l->next)
453     if (GET_CODE (l->loc) == VALUE)
454       return true;
455 
456   if (v->locs != NULL
457       && v->locs->next == NULL)
458     {
459       if (CONSTANT_P (v->locs->loc)
460 	  && (GET_CODE (v->locs->loc) != CONST
461 	      || !references_value_p (v->locs->loc, 0)))
462 	return true;
463       /* Although a debug expr may be bound to different expressions,
464 	 we can preserve it as if it was constant, to get unification
465 	 and proper merging within var-tracking.  */
466       if (GET_CODE (v->locs->loc) == DEBUG_EXPR
467 	  || GET_CODE (v->locs->loc) == DEBUG_IMPLICIT_PTR
468 	  || GET_CODE (v->locs->loc) == ENTRY_VALUE
469 	  || GET_CODE (v->locs->loc) == DEBUG_PARAMETER_REF)
470 	return true;
471 
472       /* (plus (value V) (const_int C)) is invariant iff V is invariant.  */
473       if (GET_CODE (v->locs->loc) == PLUS
474 	  && CONST_INT_P (XEXP (v->locs->loc, 1))
475 	  && GET_CODE (XEXP (v->locs->loc, 0)) == VALUE
476 	  && invariant_or_equiv_p (CSELIB_VAL_PTR (XEXP (v->locs->loc, 0))))
477 	return true;
478     }
479 
480   return false;
481 }
482 
483 /* Remove from hash table all VALUEs except constants, function
484    invariants and VALUE equivalences.  */
485 
486 int
preserve_constants_and_equivs(cselib_val ** x,void * info ATTRIBUTE_UNUSED)487 preserve_constants_and_equivs (cselib_val **x, void *info ATTRIBUTE_UNUSED)
488 {
489   cselib_val *v = *x;
490 
491   if (invariant_or_equiv_p (v))
492     {
493       cselib_hasher::key lookup = {
494 	GET_MODE (v->val_rtx), v->val_rtx, VOIDmode
495       };
496       cselib_val **slot
497 	= cselib_preserved_hash_table->find_slot_with_hash (&lookup,
498 							   v->hash, INSERT);
499       gcc_assert (!*slot);
500       *slot = v;
501     }
502 
503   cselib_hash_table->clear_slot (x);
504 
505   return 1;
506 }
507 
508 /* Remove all entries from the hash table, arranging for the next
509    value to be numbered NUM.  */
510 
511 void
cselib_reset_table(unsigned int num)512 cselib_reset_table (unsigned int num)
513 {
514   unsigned int i;
515 
516   max_value_regs = 0;
517 
518   if (cfa_base_preserved_val)
519     {
520       unsigned int regno = cfa_base_preserved_regno;
521       unsigned int new_used_regs = 0;
522       for (i = 0; i < n_used_regs; i++)
523 	if (used_regs[i] == regno)
524 	  {
525 	    new_used_regs = 1;
526 	    continue;
527 	  }
528 	else
529 	  REG_VALUES (used_regs[i]) = 0;
530       gcc_assert (new_used_regs == 1);
531       n_used_regs = new_used_regs;
532       used_regs[0] = regno;
533       max_value_regs
534 	= hard_regno_nregs (regno,
535 			    GET_MODE (cfa_base_preserved_val->locs->loc));
536     }
537   else
538     {
539       for (i = 0; i < n_used_regs; i++)
540 	REG_VALUES (used_regs[i]) = 0;
541       n_used_regs = 0;
542     }
543 
544   if (cselib_preserve_constants)
545     cselib_hash_table->traverse <void *, preserve_constants_and_equivs>
546       (NULL);
547   else
548     {
549       cselib_hash_table->empty ();
550       gcc_checking_assert (!cselib_any_perm_equivs);
551     }
552 
553   n_useless_values = 0;
554   n_useless_debug_values = 0;
555   n_debug_values = 0;
556 
557   next_uid = num;
558 
559   first_containing_mem = &dummy_val;
560 }
561 
562 /* Return the number of the next value that will be generated.  */
563 
564 unsigned int
cselib_get_next_uid(void)565 cselib_get_next_uid (void)
566 {
567   return next_uid;
568 }
569 
570 /* Search for X, whose hashcode is HASH, in CSELIB_HASH_TABLE,
571    INSERTing if requested.  When X is part of the address of a MEM,
572    MEMMODE should specify the mode of the MEM.  */
573 
574 static cselib_val **
cselib_find_slot(machine_mode mode,rtx x,hashval_t hash,enum insert_option insert,machine_mode memmode)575 cselib_find_slot (machine_mode mode, rtx x, hashval_t hash,
576 		  enum insert_option insert, machine_mode memmode)
577 {
578   cselib_val **slot = NULL;
579   cselib_hasher::key lookup = { mode, x, memmode };
580   if (cselib_preserve_constants)
581     slot = cselib_preserved_hash_table->find_slot_with_hash (&lookup, hash,
582 							     NO_INSERT);
583   if (!slot)
584     slot = cselib_hash_table->find_slot_with_hash (&lookup, hash, insert);
585   return slot;
586 }
587 
588 /* Return true if X contains a VALUE rtx.  If ONLY_USELESS is set, we
589    only return true for values which point to a cselib_val whose value
590    element has been set to zero, which implies the cselib_val will be
591    removed.  */
592 
593 int
references_value_p(const_rtx x,int only_useless)594 references_value_p (const_rtx x, int only_useless)
595 {
596   const enum rtx_code code = GET_CODE (x);
597   const char *fmt = GET_RTX_FORMAT (code);
598   int i, j;
599 
600   if (GET_CODE (x) == VALUE
601       && (! only_useless ||
602 	  (CSELIB_VAL_PTR (x)->locs == 0 && !PRESERVED_VALUE_P (x))))
603     return 1;
604 
605   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
606     {
607       if (fmt[i] == 'e' && references_value_p (XEXP (x, i), only_useless))
608 	return 1;
609       else if (fmt[i] == 'E')
610 	for (j = 0; j < XVECLEN (x, i); j++)
611 	  if (references_value_p (XVECEXP (x, i, j), only_useless))
612 	    return 1;
613     }
614 
615   return 0;
616 }
617 
618 /* For all locations found in X, delete locations that reference useless
619    values (i.e. values without any location).  Called through
620    htab_traverse.  */
621 
622 int
discard_useless_locs(cselib_val ** x,void * info ATTRIBUTE_UNUSED)623 discard_useless_locs (cselib_val **x, void *info ATTRIBUTE_UNUSED)
624 {
625   cselib_val *v = *x;
626   struct elt_loc_list **p = &v->locs;
627   bool had_locs = v->locs != NULL;
628   rtx_insn *setting_insn = v->locs ? v->locs->setting_insn : NULL;
629 
630   while (*p)
631     {
632       if (references_value_p ((*p)->loc, 1))
633 	unchain_one_elt_loc_list (p);
634       else
635 	p = &(*p)->next;
636     }
637 
638   if (had_locs && v->locs == 0 && !PRESERVED_VALUE_P (v->val_rtx))
639     {
640       if (setting_insn && DEBUG_INSN_P (setting_insn))
641 	n_useless_debug_values++;
642       else
643 	n_useless_values++;
644       values_became_useless = 1;
645     }
646   return 1;
647 }
648 
649 /* If X is a value with no locations, remove it from the hashtable.  */
650 
651 int
discard_useless_values(cselib_val ** x,void * info ATTRIBUTE_UNUSED)652 discard_useless_values (cselib_val **x, void *info ATTRIBUTE_UNUSED)
653 {
654   cselib_val *v = *x;
655 
656   if (v->locs == 0 && !PRESERVED_VALUE_P (v->val_rtx))
657     {
658       if (cselib_discard_hook)
659 	cselib_discard_hook (v);
660 
661       CSELIB_VAL_PTR (v->val_rtx) = NULL;
662       cselib_hash_table->clear_slot (x);
663       unchain_one_value (v);
664       n_useless_values--;
665     }
666 
667   return 1;
668 }
669 
670 /* Clean out useless values (i.e. those which no longer have locations
671    associated with them) from the hash table.  */
672 
673 static void
remove_useless_values(void)674 remove_useless_values (void)
675 {
676   cselib_val **p, *v;
677 
678   /* First pass: eliminate locations that reference the value.  That in
679      turn can make more values useless.  */
680   do
681     {
682       values_became_useless = 0;
683       cselib_hash_table->traverse <void *, discard_useless_locs> (NULL);
684     }
685   while (values_became_useless);
686 
687   /* Second pass: actually remove the values.  */
688 
689   p = &first_containing_mem;
690   for (v = *p; v != &dummy_val; v = v->next_containing_mem)
691     if (v->locs && v == canonical_cselib_val (v))
692       {
693 	*p = v;
694 	p = &(*p)->next_containing_mem;
695       }
696   *p = &dummy_val;
697 
698   n_useless_values += n_useless_debug_values;
699   n_debug_values -= n_useless_debug_values;
700   n_useless_debug_values = 0;
701 
702   cselib_hash_table->traverse <void *, discard_useless_values> (NULL);
703 
704   gcc_assert (!n_useless_values);
705 }
706 
707 /* Arrange for a value to not be removed from the hash table even if
708    it becomes useless.  */
709 
710 void
cselib_preserve_value(cselib_val * v)711 cselib_preserve_value (cselib_val *v)
712 {
713   PRESERVED_VALUE_P (v->val_rtx) = 1;
714 }
715 
716 /* Test whether a value is preserved.  */
717 
718 bool
cselib_preserved_value_p(cselib_val * v)719 cselib_preserved_value_p (cselib_val *v)
720 {
721   return PRESERVED_VALUE_P (v->val_rtx);
722 }
723 
724 /* Arrange for a REG value to be assumed constant through the whole function,
725    never invalidated and preserved across cselib_reset_table calls.  */
726 
727 void
cselib_preserve_cfa_base_value(cselib_val * v,unsigned int regno)728 cselib_preserve_cfa_base_value (cselib_val *v, unsigned int regno)
729 {
730   if (cselib_preserve_constants
731       && v->locs
732       && REG_P (v->locs->loc))
733     {
734       cfa_base_preserved_val = v;
735       cfa_base_preserved_regno = regno;
736     }
737 }
738 
739 /* Clean all non-constant expressions in the hash table, but retain
740    their values.  */
741 
742 void
cselib_preserve_only_values(void)743 cselib_preserve_only_values (void)
744 {
745   int i;
746 
747   for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
748     cselib_invalidate_regno (i, reg_raw_mode[i]);
749 
750   cselib_invalidate_mem (callmem);
751 
752   remove_useless_values ();
753 
754   gcc_assert (first_containing_mem == &dummy_val);
755 }
756 
757 /* Arrange for a value to be marked as based on stack pointer
758    for find_base_term purposes.  */
759 
760 void
cselib_set_value_sp_based(cselib_val * v)761 cselib_set_value_sp_based (cselib_val *v)
762 {
763   SP_BASED_VALUE_P (v->val_rtx) = 1;
764 }
765 
766 /* Test whether a value is based on stack pointer for
767    find_base_term purposes.  */
768 
769 bool
cselib_sp_based_value_p(cselib_val * v)770 cselib_sp_based_value_p (cselib_val *v)
771 {
772   return SP_BASED_VALUE_P (v->val_rtx);
773 }
774 
775 /* Return the mode in which a register was last set.  If X is not a
776    register, return its mode.  If the mode in which the register was
777    set is not known, or the value was already clobbered, return
778    VOIDmode.  */
779 
780 machine_mode
cselib_reg_set_mode(const_rtx x)781 cselib_reg_set_mode (const_rtx x)
782 {
783   if (!REG_P (x))
784     return GET_MODE (x);
785 
786   if (REG_VALUES (REGNO (x)) == NULL
787       || REG_VALUES (REGNO (x))->elt == NULL)
788     return VOIDmode;
789 
790   return GET_MODE (REG_VALUES (REGNO (x))->elt->val_rtx);
791 }
792 
793 /* If x is a PLUS or an autoinc operation, expand the operation,
794    storing the offset, if any, in *OFF.  */
795 
796 static rtx
autoinc_split(rtx x,rtx * off,machine_mode memmode)797 autoinc_split (rtx x, rtx *off, machine_mode memmode)
798 {
799   switch (GET_CODE (x))
800     {
801     case PLUS:
802       *off = XEXP (x, 1);
803       return XEXP (x, 0);
804 
805     case PRE_DEC:
806       if (memmode == VOIDmode)
807 	return x;
808 
809       *off = gen_int_mode (-GET_MODE_SIZE (memmode), GET_MODE (x));
810       return XEXP (x, 0);
811 
812     case PRE_INC:
813       if (memmode == VOIDmode)
814 	return x;
815 
816       *off = gen_int_mode (GET_MODE_SIZE (memmode), GET_MODE (x));
817       return XEXP (x, 0);
818 
819     case PRE_MODIFY:
820       return XEXP (x, 1);
821 
822     case POST_DEC:
823     case POST_INC:
824     case POST_MODIFY:
825       return XEXP (x, 0);
826 
827     default:
828       return x;
829     }
830 }
831 
832 /* Return nonzero if we can prove that X and Y contain the same value,
833    taking our gathered information into account.  MEMMODE holds the
834    mode of the enclosing MEM, if any, as required to deal with autoinc
835    addressing modes.  If X and Y are not (known to be) part of
836    addresses, MEMMODE should be VOIDmode.  */
837 
838 int
rtx_equal_for_cselib_1(rtx x,rtx y,machine_mode memmode,int depth)839 rtx_equal_for_cselib_1 (rtx x, rtx y, machine_mode memmode, int depth)
840 {
841   enum rtx_code code;
842   const char *fmt;
843   int i;
844 
845   if (REG_P (x) || MEM_P (x))
846     {
847       cselib_val *e = cselib_lookup (x, GET_MODE (x), 0, memmode);
848 
849       if (e)
850 	x = e->val_rtx;
851     }
852 
853   if (REG_P (y) || MEM_P (y))
854     {
855       cselib_val *e = cselib_lookup (y, GET_MODE (y), 0, memmode);
856 
857       if (e)
858 	y = e->val_rtx;
859     }
860 
861   if (x == y)
862     return 1;
863 
864   if (GET_CODE (x) == VALUE)
865     {
866       cselib_val *e = canonical_cselib_val (CSELIB_VAL_PTR (x));
867       struct elt_loc_list *l;
868 
869       if (GET_CODE (y) == VALUE)
870 	return e == canonical_cselib_val (CSELIB_VAL_PTR (y));
871 
872       if (depth == 128)
873 	return 0;
874 
875       for (l = e->locs; l; l = l->next)
876 	{
877 	  rtx t = l->loc;
878 
879 	  /* Avoid infinite recursion.  We know we have the canonical
880 	     value, so we can just skip any values in the equivalence
881 	     list.  */
882 	  if (REG_P (t) || MEM_P (t) || GET_CODE (t) == VALUE)
883 	    continue;
884 	  else if (rtx_equal_for_cselib_1 (t, y, memmode, depth + 1))
885 	    return 1;
886 	}
887 
888       return 0;
889     }
890   else if (GET_CODE (y) == VALUE)
891     {
892       cselib_val *e = canonical_cselib_val (CSELIB_VAL_PTR (y));
893       struct elt_loc_list *l;
894 
895       if (depth == 128)
896 	return 0;
897 
898       for (l = e->locs; l; l = l->next)
899 	{
900 	  rtx t = l->loc;
901 
902 	  if (REG_P (t) || MEM_P (t) || GET_CODE (t) == VALUE)
903 	    continue;
904 	  else if (rtx_equal_for_cselib_1 (x, t, memmode, depth + 1))
905 	    return 1;
906 	}
907 
908       return 0;
909     }
910 
911   if (GET_MODE (x) != GET_MODE (y))
912     return 0;
913 
914   if (GET_CODE (x) != GET_CODE (y))
915     {
916       rtx xorig = x, yorig = y;
917       rtx xoff = NULL, yoff = NULL;
918 
919       x = autoinc_split (x, &xoff, memmode);
920       y = autoinc_split (y, &yoff, memmode);
921 
922       if (!xoff != !yoff)
923 	return 0;
924 
925       if (xoff && !rtx_equal_for_cselib_1 (xoff, yoff, memmode, depth))
926 	return 0;
927 
928       /* Don't recurse if nothing changed.  */
929       if (x != xorig || y != yorig)
930 	return rtx_equal_for_cselib_1 (x, y, memmode, depth);
931 
932       return 0;
933     }
934 
935   /* These won't be handled correctly by the code below.  */
936   switch (GET_CODE (x))
937     {
938     CASE_CONST_UNIQUE:
939     case DEBUG_EXPR:
940       return 0;
941 
942     case DEBUG_IMPLICIT_PTR:
943       return DEBUG_IMPLICIT_PTR_DECL (x)
944 	     == DEBUG_IMPLICIT_PTR_DECL (y);
945 
946     case DEBUG_PARAMETER_REF:
947       return DEBUG_PARAMETER_REF_DECL (x)
948 	     == DEBUG_PARAMETER_REF_DECL (y);
949 
950     case ENTRY_VALUE:
951       /* ENTRY_VALUEs are function invariant, it is thus undesirable to
952 	 use rtx_equal_for_cselib_1 to compare the operands.  */
953       return rtx_equal_p (ENTRY_VALUE_EXP (x), ENTRY_VALUE_EXP (y));
954 
955     case LABEL_REF:
956       return label_ref_label (x) == label_ref_label (y);
957 
958     case REG:
959       return REGNO (x) == REGNO (y);
960 
961     case MEM:
962       /* We have to compare any autoinc operations in the addresses
963 	 using this MEM's mode.  */
964       return rtx_equal_for_cselib_1 (XEXP (x, 0), XEXP (y, 0), GET_MODE (x),
965 				     depth);
966 
967     default:
968       break;
969     }
970 
971   code = GET_CODE (x);
972   fmt = GET_RTX_FORMAT (code);
973 
974   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
975     {
976       int j;
977 
978       switch (fmt[i])
979 	{
980 	case 'w':
981 	  if (XWINT (x, i) != XWINT (y, i))
982 	    return 0;
983 	  break;
984 
985 	case 'n':
986 	case 'i':
987 	  if (XINT (x, i) != XINT (y, i))
988 	    return 0;
989 	  break;
990 
991 	case 'p':
992 	  if (maybe_ne (SUBREG_BYTE (x), SUBREG_BYTE (y)))
993 	    return 0;
994 	  break;
995 
996 	case 'V':
997 	case 'E':
998 	  /* Two vectors must have the same length.  */
999 	  if (XVECLEN (x, i) != XVECLEN (y, i))
1000 	    return 0;
1001 
1002 	  /* And the corresponding elements must match.  */
1003 	  for (j = 0; j < XVECLEN (x, i); j++)
1004 	    if (! rtx_equal_for_cselib_1 (XVECEXP (x, i, j),
1005 					  XVECEXP (y, i, j), memmode, depth))
1006 	      return 0;
1007 	  break;
1008 
1009 	case 'e':
1010 	  if (i == 1
1011 	      && targetm.commutative_p (x, UNKNOWN)
1012 	      && rtx_equal_for_cselib_1 (XEXP (x, 1), XEXP (y, 0), memmode,
1013 					 depth)
1014 	      && rtx_equal_for_cselib_1 (XEXP (x, 0), XEXP (y, 1), memmode,
1015 					 depth))
1016 	    return 1;
1017 	  if (! rtx_equal_for_cselib_1 (XEXP (x, i), XEXP (y, i), memmode,
1018 					depth))
1019 	    return 0;
1020 	  break;
1021 
1022 	case 'S':
1023 	case 's':
1024 	  if (strcmp (XSTR (x, i), XSTR (y, i)))
1025 	    return 0;
1026 	  break;
1027 
1028 	case 'u':
1029 	  /* These are just backpointers, so they don't matter.  */
1030 	  break;
1031 
1032 	case '0':
1033 	case 't':
1034 	  break;
1035 
1036 	  /* It is believed that rtx's at this level will never
1037 	     contain anything but integers and other rtx's,
1038 	     except for within LABEL_REFs and SYMBOL_REFs.  */
1039 	default:
1040 	  gcc_unreachable ();
1041 	}
1042     }
1043   return 1;
1044 }
1045 
1046 /* Hash an rtx.  Return 0 if we couldn't hash the rtx.
1047    For registers and memory locations, we look up their cselib_val structure
1048    and return its VALUE element.
1049    Possible reasons for return 0 are: the object is volatile, or we couldn't
1050    find a register or memory location in the table and CREATE is zero.  If
1051    CREATE is nonzero, table elts are created for regs and mem.
1052    N.B. this hash function returns the same hash value for RTXes that
1053    differ only in the order of operands, thus it is suitable for comparisons
1054    that take commutativity into account.
1055    If we wanted to also support associative rules, we'd have to use a different
1056    strategy to avoid returning spurious 0, e.g. return ~(~0U >> 1) .
1057    MEMMODE indicates the mode of an enclosing MEM, and it's only
1058    used to compute autoinc values.
1059    We used to have a MODE argument for hashing for CONST_INTs, but that
1060    didn't make sense, since it caused spurious hash differences between
1061     (set (reg:SI 1) (const_int))
1062     (plus:SI (reg:SI 2) (reg:SI 1))
1063    and
1064     (plus:SI (reg:SI 2) (const_int))
1065    If the mode is important in any context, it must be checked specifically
1066    in a comparison anyway, since relying on hash differences is unsafe.  */
1067 
1068 static unsigned int
cselib_hash_rtx(rtx x,int create,machine_mode memmode)1069 cselib_hash_rtx (rtx x, int create, machine_mode memmode)
1070 {
1071   cselib_val *e;
1072   poly_int64 offset;
1073   int i, j;
1074   enum rtx_code code;
1075   const char *fmt;
1076   unsigned int hash = 0;
1077 
1078   code = GET_CODE (x);
1079   hash += (unsigned) code + (unsigned) GET_MODE (x);
1080 
1081   switch (code)
1082     {
1083     case VALUE:
1084       e = CSELIB_VAL_PTR (x);
1085       return e->hash;
1086 
1087     case MEM:
1088     case REG:
1089       e = cselib_lookup (x, GET_MODE (x), create, memmode);
1090       if (! e)
1091 	return 0;
1092 
1093       return e->hash;
1094 
1095     case DEBUG_EXPR:
1096       hash += ((unsigned) DEBUG_EXPR << 7)
1097 	      + DEBUG_TEMP_UID (DEBUG_EXPR_TREE_DECL (x));
1098       return hash ? hash : (unsigned int) DEBUG_EXPR;
1099 
1100     case DEBUG_IMPLICIT_PTR:
1101       hash += ((unsigned) DEBUG_IMPLICIT_PTR << 7)
1102 	      + DECL_UID (DEBUG_IMPLICIT_PTR_DECL (x));
1103       return hash ? hash : (unsigned int) DEBUG_IMPLICIT_PTR;
1104 
1105     case DEBUG_PARAMETER_REF:
1106       hash += ((unsigned) DEBUG_PARAMETER_REF << 7)
1107 	      + DECL_UID (DEBUG_PARAMETER_REF_DECL (x));
1108       return hash ? hash : (unsigned int) DEBUG_PARAMETER_REF;
1109 
1110     case ENTRY_VALUE:
1111       /* ENTRY_VALUEs are function invariant, thus try to avoid
1112 	 recursing on argument if ENTRY_VALUE is one of the
1113 	 forms emitted by expand_debug_expr, otherwise
1114 	 ENTRY_VALUE hash would depend on the current value
1115 	 in some register or memory.  */
1116       if (REG_P (ENTRY_VALUE_EXP (x)))
1117 	hash += (unsigned int) REG
1118 		+ (unsigned int) GET_MODE (ENTRY_VALUE_EXP (x))
1119 		+ (unsigned int) REGNO (ENTRY_VALUE_EXP (x));
1120       else if (MEM_P (ENTRY_VALUE_EXP (x))
1121 	       && REG_P (XEXP (ENTRY_VALUE_EXP (x), 0)))
1122 	hash += (unsigned int) MEM
1123 		+ (unsigned int) GET_MODE (XEXP (ENTRY_VALUE_EXP (x), 0))
1124 		+ (unsigned int) REGNO (XEXP (ENTRY_VALUE_EXP (x), 0));
1125       else
1126 	hash += cselib_hash_rtx (ENTRY_VALUE_EXP (x), create, memmode);
1127       return hash ? hash : (unsigned int) ENTRY_VALUE;
1128 
1129     case CONST_INT:
1130       hash += ((unsigned) CONST_INT << 7) + UINTVAL (x);
1131       return hash ? hash : (unsigned int) CONST_INT;
1132 
1133     case CONST_WIDE_INT:
1134       for (i = 0; i < CONST_WIDE_INT_NUNITS (x); i++)
1135 	hash += CONST_WIDE_INT_ELT (x, i);
1136       return hash;
1137 
1138     case CONST_POLY_INT:
1139       {
1140 	inchash::hash h;
1141 	h.add_int (hash);
1142 	for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
1143 	  h.add_wide_int (CONST_POLY_INT_COEFFS (x)[i]);
1144 	return h.end ();
1145       }
1146 
1147     case CONST_DOUBLE:
1148       /* This is like the general case, except that it only counts
1149 	 the integers representing the constant.  */
1150       hash += (unsigned) code + (unsigned) GET_MODE (x);
1151       if (TARGET_SUPPORTS_WIDE_INT == 0 && GET_MODE (x) == VOIDmode)
1152 	hash += ((unsigned) CONST_DOUBLE_LOW (x)
1153 		 + (unsigned) CONST_DOUBLE_HIGH (x));
1154       else
1155 	hash += real_hash (CONST_DOUBLE_REAL_VALUE (x));
1156       return hash ? hash : (unsigned int) CONST_DOUBLE;
1157 
1158     case CONST_FIXED:
1159       hash += (unsigned int) code + (unsigned int) GET_MODE (x);
1160       hash += fixed_hash (CONST_FIXED_VALUE (x));
1161       return hash ? hash : (unsigned int) CONST_FIXED;
1162 
1163     case CONST_VECTOR:
1164       {
1165 	int units;
1166 	rtx elt;
1167 
1168 	units = const_vector_encoded_nelts (x);
1169 
1170 	for (i = 0; i < units; ++i)
1171 	  {
1172 	    elt = CONST_VECTOR_ENCODED_ELT (x, i);
1173 	    hash += cselib_hash_rtx (elt, 0, memmode);
1174 	  }
1175 
1176 	return hash;
1177       }
1178 
1179       /* Assume there is only one rtx object for any given label.  */
1180     case LABEL_REF:
1181       /* We don't hash on the address of the CODE_LABEL to avoid bootstrap
1182 	 differences and differences between each stage's debugging dumps.  */
1183       hash += (((unsigned int) LABEL_REF << 7)
1184 	       + CODE_LABEL_NUMBER (label_ref_label (x)));
1185       return hash ? hash : (unsigned int) LABEL_REF;
1186 
1187     case SYMBOL_REF:
1188       {
1189 	/* Don't hash on the symbol's address to avoid bootstrap differences.
1190 	   Different hash values may cause expressions to be recorded in
1191 	   different orders and thus different registers to be used in the
1192 	   final assembler.  This also avoids differences in the dump files
1193 	   between various stages.  */
1194 	unsigned int h = 0;
1195 	const unsigned char *p = (const unsigned char *) XSTR (x, 0);
1196 
1197 	while (*p)
1198 	  h += (h << 7) + *p++; /* ??? revisit */
1199 
1200 	hash += ((unsigned int) SYMBOL_REF << 7) + h;
1201 	return hash ? hash : (unsigned int) SYMBOL_REF;
1202       }
1203 
1204     case PRE_DEC:
1205     case PRE_INC:
1206       /* We can't compute these without knowing the MEM mode.  */
1207       gcc_assert (memmode != VOIDmode);
1208       offset = GET_MODE_SIZE (memmode);
1209       if (code == PRE_DEC)
1210 	offset = -offset;
1211       /* Adjust the hash so that (mem:MEMMODE (pre_* (reg))) hashes
1212 	 like (mem:MEMMODE (plus (reg) (const_int I))).  */
1213       hash += (unsigned) PLUS - (unsigned)code
1214 	+ cselib_hash_rtx (XEXP (x, 0), create, memmode)
1215 	+ cselib_hash_rtx (gen_int_mode (offset, GET_MODE (x)),
1216 			   create, memmode);
1217       return hash ? hash : 1 + (unsigned) PLUS;
1218 
1219     case PRE_MODIFY:
1220       gcc_assert (memmode != VOIDmode);
1221       return cselib_hash_rtx (XEXP (x, 1), create, memmode);
1222 
1223     case POST_DEC:
1224     case POST_INC:
1225     case POST_MODIFY:
1226       gcc_assert (memmode != VOIDmode);
1227       return cselib_hash_rtx (XEXP (x, 0), create, memmode);
1228 
1229     case PC:
1230     case CC0:
1231     case CALL:
1232     case UNSPEC_VOLATILE:
1233       return 0;
1234 
1235     case ASM_OPERANDS:
1236       if (MEM_VOLATILE_P (x))
1237 	return 0;
1238 
1239       break;
1240 
1241     default:
1242       break;
1243     }
1244 
1245   i = GET_RTX_LENGTH (code) - 1;
1246   fmt = GET_RTX_FORMAT (code);
1247   for (; i >= 0; i--)
1248     {
1249       switch (fmt[i])
1250 	{
1251 	case 'e':
1252 	  {
1253 	    rtx tem = XEXP (x, i);
1254 	    unsigned int tem_hash = cselib_hash_rtx (tem, create, memmode);
1255 
1256 	    if (tem_hash == 0)
1257 	      return 0;
1258 
1259 	    hash += tem_hash;
1260 	  }
1261 	  break;
1262 	case 'E':
1263 	  for (j = 0; j < XVECLEN (x, i); j++)
1264 	    {
1265 	      unsigned int tem_hash
1266 		= cselib_hash_rtx (XVECEXP (x, i, j), create, memmode);
1267 
1268 	      if (tem_hash == 0)
1269 		return 0;
1270 
1271 	      hash += tem_hash;
1272 	    }
1273 	  break;
1274 
1275 	case 's':
1276 	  {
1277 	    const unsigned char *p = (const unsigned char *) XSTR (x, i);
1278 
1279 	    if (p)
1280 	      while (*p)
1281 		hash += *p++;
1282 	    break;
1283 	  }
1284 
1285 	case 'i':
1286 	  hash += XINT (x, i);
1287 	  break;
1288 
1289 	case 'p':
1290 	  hash += constant_lower_bound (SUBREG_BYTE (x));
1291 	  break;
1292 
1293 	case '0':
1294 	case 't':
1295 	  /* unused */
1296 	  break;
1297 
1298 	default:
1299 	  gcc_unreachable ();
1300 	}
1301     }
1302 
1303   return hash ? hash : 1 + (unsigned int) GET_CODE (x);
1304 }
1305 
1306 /* Create a new value structure for VALUE and initialize it.  The mode of the
1307    value is MODE.  */
1308 
1309 static inline cselib_val *
new_cselib_val(unsigned int hash,machine_mode mode,rtx x)1310 new_cselib_val (unsigned int hash, machine_mode mode, rtx x)
1311 {
1312   cselib_val *e = cselib_val_pool.allocate ();
1313 
1314   gcc_assert (hash);
1315   gcc_assert (next_uid);
1316 
1317   e->hash = hash;
1318   e->uid = next_uid++;
1319   /* We use an alloc pool to allocate this RTL construct because it
1320      accounts for about 8% of the overall memory usage.  We know
1321      precisely when we can have VALUE RTXen (when cselib is active)
1322      so we don't need to put them in garbage collected memory.
1323      ??? Why should a VALUE be an RTX in the first place?  */
1324   e->val_rtx = (rtx_def*) value_pool.allocate ();
1325   memset (e->val_rtx, 0, RTX_HDR_SIZE);
1326   PUT_CODE (e->val_rtx, VALUE);
1327   PUT_MODE (e->val_rtx, mode);
1328   CSELIB_VAL_PTR (e->val_rtx) = e;
1329   e->addr_list = 0;
1330   e->locs = 0;
1331   e->next_containing_mem = 0;
1332 
1333   if (dump_file && (dump_flags & TDF_CSELIB))
1334     {
1335       fprintf (dump_file, "cselib value %u:%u ", e->uid, hash);
1336       if (flag_dump_noaddr || flag_dump_unnumbered)
1337 	fputs ("# ", dump_file);
1338       else
1339 	fprintf (dump_file, "%p ", (void*)e);
1340       print_rtl_single (dump_file, x);
1341       fputc ('\n', dump_file);
1342     }
1343 
1344   return e;
1345 }
1346 
1347 /* ADDR_ELT is a value that is used as address.  MEM_ELT is the value that
1348    contains the data at this address.  X is a MEM that represents the
1349    value.  Update the two value structures to represent this situation.  */
1350 
1351 static void
add_mem_for_addr(cselib_val * addr_elt,cselib_val * mem_elt,rtx x)1352 add_mem_for_addr (cselib_val *addr_elt, cselib_val *mem_elt, rtx x)
1353 {
1354   addr_elt = canonical_cselib_val (addr_elt);
1355   mem_elt = canonical_cselib_val (mem_elt);
1356 
1357   /* Avoid duplicates.  */
1358   addr_space_t as = MEM_ADDR_SPACE (x);
1359   for (elt_loc_list *l = mem_elt->locs; l; l = l->next)
1360     if (MEM_P (l->loc)
1361 	&& CSELIB_VAL_PTR (XEXP (l->loc, 0)) == addr_elt
1362         && MEM_ADDR_SPACE (l->loc) == as)
1363       {
1364 	promote_debug_loc (l);
1365 	return;
1366       }
1367 
1368   addr_elt->addr_list = new_elt_list (addr_elt->addr_list, mem_elt);
1369   new_elt_loc_list (mem_elt,
1370 		    replace_equiv_address_nv (x, addr_elt->val_rtx));
1371   if (mem_elt->next_containing_mem == NULL)
1372     {
1373       mem_elt->next_containing_mem = first_containing_mem;
1374       first_containing_mem = mem_elt;
1375     }
1376 }
1377 
1378 /* Subroutine of cselib_lookup.  Return a value for X, which is a MEM rtx.
1379    If CREATE, make a new one if we haven't seen it before.  */
1380 
1381 static cselib_val *
cselib_lookup_mem(rtx x,int create)1382 cselib_lookup_mem (rtx x, int create)
1383 {
1384   machine_mode mode = GET_MODE (x);
1385   machine_mode addr_mode;
1386   cselib_val **slot;
1387   cselib_val *addr;
1388   cselib_val *mem_elt;
1389 
1390   if (MEM_VOLATILE_P (x) || mode == BLKmode
1391       || !cselib_record_memory
1392       || (FLOAT_MODE_P (mode) && flag_float_store))
1393     return 0;
1394 
1395   addr_mode = GET_MODE (XEXP (x, 0));
1396   if (addr_mode == VOIDmode)
1397     addr_mode = Pmode;
1398 
1399   /* Look up the value for the address.  */
1400   addr = cselib_lookup (XEXP (x, 0), addr_mode, create, mode);
1401   if (! addr)
1402     return 0;
1403   addr = canonical_cselib_val (addr);
1404 
1405   /* Find a value that describes a value of our mode at that address.  */
1406   addr_space_t as = MEM_ADDR_SPACE (x);
1407   for (elt_list *l = addr->addr_list; l; l = l->next)
1408     if (GET_MODE (l->elt->val_rtx) == mode)
1409       {
1410 	for (elt_loc_list *l2 = l->elt->locs; l2; l2 = l2->next)
1411 	  if (MEM_P (l2->loc) && MEM_ADDR_SPACE (l2->loc) == as)
1412 	    {
1413 	      promote_debug_loc (l->elt->locs);
1414 	      return l->elt;
1415 	    }
1416       }
1417 
1418   if (! create)
1419     return 0;
1420 
1421   mem_elt = new_cselib_val (next_uid, mode, x);
1422   add_mem_for_addr (addr, mem_elt, x);
1423   slot = cselib_find_slot (mode, x, mem_elt->hash, INSERT, VOIDmode);
1424   *slot = mem_elt;
1425   return mem_elt;
1426 }
1427 
1428 /* Search through the possible substitutions in P.  We prefer a non reg
1429    substitution because this allows us to expand the tree further.  If
1430    we find, just a reg, take the lowest regno.  There may be several
1431    non-reg results, we just take the first one because they will all
1432    expand to the same place.  */
1433 
1434 static rtx
expand_loc(struct elt_loc_list * p,struct expand_value_data * evd,int max_depth)1435 expand_loc (struct elt_loc_list *p, struct expand_value_data *evd,
1436 	    int max_depth)
1437 {
1438   rtx reg_result = NULL;
1439   unsigned int regno = UINT_MAX;
1440   struct elt_loc_list *p_in = p;
1441 
1442   for (; p; p = p->next)
1443     {
1444       /* Return these right away to avoid returning stack pointer based
1445 	 expressions for frame pointer and vice versa, which is something
1446 	 that would confuse DSE.  See the comment in cselib_expand_value_rtx_1
1447 	 for more details.  */
1448       if (REG_P (p->loc)
1449 	  && (REGNO (p->loc) == STACK_POINTER_REGNUM
1450 	      || REGNO (p->loc) == FRAME_POINTER_REGNUM
1451 	      || REGNO (p->loc) == HARD_FRAME_POINTER_REGNUM
1452 	      || REGNO (p->loc) == cfa_base_preserved_regno))
1453 	return p->loc;
1454       /* Avoid infinite recursion trying to expand a reg into a
1455 	 the same reg.  */
1456       if ((REG_P (p->loc))
1457 	  && (REGNO (p->loc) < regno)
1458 	  && !bitmap_bit_p (evd->regs_active, REGNO (p->loc)))
1459 	{
1460 	  reg_result = p->loc;
1461 	  regno = REGNO (p->loc);
1462 	}
1463       /* Avoid infinite recursion and do not try to expand the
1464 	 value.  */
1465       else if (GET_CODE (p->loc) == VALUE
1466 	       && CSELIB_VAL_PTR (p->loc)->locs == p_in)
1467 	continue;
1468       else if (!REG_P (p->loc))
1469 	{
1470 	  rtx result, note;
1471 	  if (dump_file && (dump_flags & TDF_CSELIB))
1472 	    {
1473 	      print_inline_rtx (dump_file, p->loc, 0);
1474 	      fprintf (dump_file, "\n");
1475 	    }
1476 	  if (GET_CODE (p->loc) == LO_SUM
1477 	      && GET_CODE (XEXP (p->loc, 1)) == SYMBOL_REF
1478 	      && p->setting_insn
1479 	      && (note = find_reg_note (p->setting_insn, REG_EQUAL, NULL_RTX))
1480 	      && XEXP (note, 0) == XEXP (p->loc, 1))
1481 	    return XEXP (p->loc, 1);
1482 	  result = cselib_expand_value_rtx_1 (p->loc, evd, max_depth - 1);
1483 	  if (result)
1484 	    return result;
1485 	}
1486 
1487     }
1488 
1489   if (regno != UINT_MAX)
1490     {
1491       rtx result;
1492       if (dump_file && (dump_flags & TDF_CSELIB))
1493 	fprintf (dump_file, "r%d\n", regno);
1494 
1495       result = cselib_expand_value_rtx_1 (reg_result, evd, max_depth - 1);
1496       if (result)
1497 	return result;
1498     }
1499 
1500   if (dump_file && (dump_flags & TDF_CSELIB))
1501     {
1502       if (reg_result)
1503 	{
1504 	  print_inline_rtx (dump_file, reg_result, 0);
1505 	  fprintf (dump_file, "\n");
1506 	}
1507       else
1508 	fprintf (dump_file, "NULL\n");
1509     }
1510   return reg_result;
1511 }
1512 
1513 
1514 /* Forward substitute and expand an expression out to its roots.
1515    This is the opposite of common subexpression.  Because local value
1516    numbering is such a weak optimization, the expanded expression is
1517    pretty much unique (not from a pointer equals point of view but
1518    from a tree shape point of view.
1519 
1520    This function returns NULL if the expansion fails.  The expansion
1521    will fail if there is no value number for one of the operands or if
1522    one of the operands has been overwritten between the current insn
1523    and the beginning of the basic block.  For instance x has no
1524    expansion in:
1525 
1526    r1 <- r1 + 3
1527    x <- r1 + 8
1528 
1529    REGS_ACTIVE is a scratch bitmap that should be clear when passing in.
1530    It is clear on return.  */
1531 
1532 rtx
cselib_expand_value_rtx(rtx orig,bitmap regs_active,int max_depth)1533 cselib_expand_value_rtx (rtx orig, bitmap regs_active, int max_depth)
1534 {
1535   struct expand_value_data evd;
1536 
1537   evd.regs_active = regs_active;
1538   evd.callback = NULL;
1539   evd.callback_arg = NULL;
1540   evd.dummy = false;
1541 
1542   return cselib_expand_value_rtx_1 (orig, &evd, max_depth);
1543 }
1544 
1545 /* Same as cselib_expand_value_rtx, but using a callback to try to
1546    resolve some expressions.  The CB function should return ORIG if it
1547    can't or does not want to deal with a certain RTX.  Any other
1548    return value, including NULL, will be used as the expansion for
1549    VALUE, without any further changes.  */
1550 
1551 rtx
cselib_expand_value_rtx_cb(rtx orig,bitmap regs_active,int max_depth,cselib_expand_callback cb,void * data)1552 cselib_expand_value_rtx_cb (rtx orig, bitmap regs_active, int max_depth,
1553 			    cselib_expand_callback cb, void *data)
1554 {
1555   struct expand_value_data evd;
1556 
1557   evd.regs_active = regs_active;
1558   evd.callback = cb;
1559   evd.callback_arg = data;
1560   evd.dummy = false;
1561 
1562   return cselib_expand_value_rtx_1 (orig, &evd, max_depth);
1563 }
1564 
1565 /* Similar to cselib_expand_value_rtx_cb, but no rtxs are actually copied
1566    or simplified.  Useful to find out whether cselib_expand_value_rtx_cb
1567    would return NULL or non-NULL, without allocating new rtx.  */
1568 
1569 bool
cselib_dummy_expand_value_rtx_cb(rtx orig,bitmap regs_active,int max_depth,cselib_expand_callback cb,void * data)1570 cselib_dummy_expand_value_rtx_cb (rtx orig, bitmap regs_active, int max_depth,
1571 				  cselib_expand_callback cb, void *data)
1572 {
1573   struct expand_value_data evd;
1574 
1575   evd.regs_active = regs_active;
1576   evd.callback = cb;
1577   evd.callback_arg = data;
1578   evd.dummy = true;
1579 
1580   return cselib_expand_value_rtx_1 (orig, &evd, max_depth) != NULL;
1581 }
1582 
1583 /* Internal implementation of cselib_expand_value_rtx and
1584    cselib_expand_value_rtx_cb.  */
1585 
1586 static rtx
cselib_expand_value_rtx_1(rtx orig,struct expand_value_data * evd,int max_depth)1587 cselib_expand_value_rtx_1 (rtx orig, struct expand_value_data *evd,
1588 			   int max_depth)
1589 {
1590   rtx copy, scopy;
1591   int i, j;
1592   RTX_CODE code;
1593   const char *format_ptr;
1594   machine_mode mode;
1595 
1596   code = GET_CODE (orig);
1597 
1598   /* For the context of dse, if we end up expand into a huge tree, we
1599      will not have a useful address, so we might as well just give up
1600      quickly.  */
1601   if (max_depth <= 0)
1602     return NULL;
1603 
1604   switch (code)
1605     {
1606     case REG:
1607       {
1608 	struct elt_list *l = REG_VALUES (REGNO (orig));
1609 
1610 	if (l && l->elt == NULL)
1611 	  l = l->next;
1612 	for (; l; l = l->next)
1613 	  if (GET_MODE (l->elt->val_rtx) == GET_MODE (orig))
1614 	    {
1615 	      rtx result;
1616 	      unsigned regno = REGNO (orig);
1617 
1618 	      /* The only thing that we are not willing to do (this
1619 		 is requirement of dse and if others potential uses
1620 		 need this function we should add a parm to control
1621 		 it) is that we will not substitute the
1622 		 STACK_POINTER_REGNUM, FRAME_POINTER or the
1623 		 HARD_FRAME_POINTER.
1624 
1625 		 These expansions confuses the code that notices that
1626 		 stores into the frame go dead at the end of the
1627 		 function and that the frame is not effected by calls
1628 		 to subroutines.  If you allow the
1629 		 STACK_POINTER_REGNUM substitution, then dse will
1630 		 think that parameter pushing also goes dead which is
1631 		 wrong.  If you allow the FRAME_POINTER or the
1632 		 HARD_FRAME_POINTER then you lose the opportunity to
1633 		 make the frame assumptions.  */
1634 	      if (regno == STACK_POINTER_REGNUM
1635 		  || regno == FRAME_POINTER_REGNUM
1636 		  || regno == HARD_FRAME_POINTER_REGNUM
1637 		  || regno == cfa_base_preserved_regno)
1638 		return orig;
1639 
1640 	      bitmap_set_bit (evd->regs_active, regno);
1641 
1642 	      if (dump_file && (dump_flags & TDF_CSELIB))
1643 		fprintf (dump_file, "expanding: r%d into: ", regno);
1644 
1645 	      result = expand_loc (l->elt->locs, evd, max_depth);
1646 	      bitmap_clear_bit (evd->regs_active, regno);
1647 
1648 	      if (result)
1649 		return result;
1650 	      else
1651 		return orig;
1652 	    }
1653 	return orig;
1654       }
1655 
1656     CASE_CONST_ANY:
1657     case SYMBOL_REF:
1658     case CODE_LABEL:
1659     case PC:
1660     case CC0:
1661     case SCRATCH:
1662       /* SCRATCH must be shared because they represent distinct values.  */
1663       return orig;
1664     case CLOBBER:
1665     case CLOBBER_HIGH:
1666       if (REG_P (XEXP (orig, 0)) && HARD_REGISTER_NUM_P (REGNO (XEXP (orig, 0))))
1667 	return orig;
1668       break;
1669 
1670     case CONST:
1671       if (shared_const_p (orig))
1672 	return orig;
1673       break;
1674 
1675     case SUBREG:
1676       {
1677 	rtx subreg;
1678 
1679 	if (evd->callback)
1680 	  {
1681 	    subreg = evd->callback (orig, evd->regs_active, max_depth,
1682 				    evd->callback_arg);
1683 	    if (subreg != orig)
1684 	      return subreg;
1685 	  }
1686 
1687 	subreg = cselib_expand_value_rtx_1 (SUBREG_REG (orig), evd,
1688 					    max_depth - 1);
1689 	if (!subreg)
1690 	  return NULL;
1691 	scopy = simplify_gen_subreg (GET_MODE (orig), subreg,
1692 				     GET_MODE (SUBREG_REG (orig)),
1693 				     SUBREG_BYTE (orig));
1694 	if (scopy == NULL
1695 	    || (GET_CODE (scopy) == SUBREG
1696 		&& !REG_P (SUBREG_REG (scopy))
1697 		&& !MEM_P (SUBREG_REG (scopy))))
1698 	  return NULL;
1699 
1700 	return scopy;
1701       }
1702 
1703     case VALUE:
1704       {
1705 	rtx result;
1706 
1707 	if (dump_file && (dump_flags & TDF_CSELIB))
1708 	  {
1709 	    fputs ("\nexpanding ", dump_file);
1710 	    print_rtl_single (dump_file, orig);
1711 	    fputs (" into...", dump_file);
1712 	  }
1713 
1714 	if (evd->callback)
1715 	  {
1716 	    result = evd->callback (orig, evd->regs_active, max_depth,
1717 				    evd->callback_arg);
1718 
1719 	    if (result != orig)
1720 	      return result;
1721 	  }
1722 
1723 	result = expand_loc (CSELIB_VAL_PTR (orig)->locs, evd, max_depth);
1724 	return result;
1725       }
1726 
1727     case DEBUG_EXPR:
1728       if (evd->callback)
1729 	return evd->callback (orig, evd->regs_active, max_depth,
1730 			      evd->callback_arg);
1731       return orig;
1732 
1733     default:
1734       break;
1735     }
1736 
1737   /* Copy the various flags, fields, and other information.  We assume
1738      that all fields need copying, and then clear the fields that should
1739      not be copied.  That is the sensible default behavior, and forces
1740      us to explicitly document why we are *not* copying a flag.  */
1741   if (evd->dummy)
1742     copy = NULL;
1743   else
1744     copy = shallow_copy_rtx (orig);
1745 
1746   format_ptr = GET_RTX_FORMAT (code);
1747 
1748   for (i = 0; i < GET_RTX_LENGTH (code); i++)
1749     switch (*format_ptr++)
1750       {
1751       case 'e':
1752 	if (XEXP (orig, i) != NULL)
1753 	  {
1754 	    rtx result = cselib_expand_value_rtx_1 (XEXP (orig, i), evd,
1755 						    max_depth - 1);
1756 	    if (!result)
1757 	      return NULL;
1758 	    if (copy)
1759 	      XEXP (copy, i) = result;
1760 	  }
1761 	break;
1762 
1763       case 'E':
1764       case 'V':
1765 	if (XVEC (orig, i) != NULL)
1766 	  {
1767 	    if (copy)
1768 	      XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
1769 	    for (j = 0; j < XVECLEN (orig, i); j++)
1770 	      {
1771 		rtx result = cselib_expand_value_rtx_1 (XVECEXP (orig, i, j),
1772 							evd, max_depth - 1);
1773 		if (!result)
1774 		  return NULL;
1775 		if (copy)
1776 		  XVECEXP (copy, i, j) = result;
1777 	      }
1778 	  }
1779 	break;
1780 
1781       case 't':
1782       case 'w':
1783       case 'i':
1784       case 's':
1785       case 'S':
1786       case 'T':
1787       case 'u':
1788       case 'B':
1789       case '0':
1790 	/* These are left unchanged.  */
1791 	break;
1792 
1793       default:
1794 	gcc_unreachable ();
1795       }
1796 
1797   if (evd->dummy)
1798     return orig;
1799 
1800   mode = GET_MODE (copy);
1801   /* If an operand has been simplified into CONST_INT, which doesn't
1802      have a mode and the mode isn't derivable from whole rtx's mode,
1803      try simplify_*_operation first with mode from original's operand
1804      and as a fallback wrap CONST_INT into gen_rtx_CONST.  */
1805   scopy = copy;
1806   switch (GET_RTX_CLASS (code))
1807     {
1808     case RTX_UNARY:
1809       if (CONST_INT_P (XEXP (copy, 0))
1810 	  && GET_MODE (XEXP (orig, 0)) != VOIDmode)
1811 	{
1812 	  scopy = simplify_unary_operation (code, mode, XEXP (copy, 0),
1813 					    GET_MODE (XEXP (orig, 0)));
1814 	  if (scopy)
1815 	    return scopy;
1816 	}
1817       break;
1818     case RTX_COMM_ARITH:
1819     case RTX_BIN_ARITH:
1820       /* These expressions can derive operand modes from the whole rtx's mode.  */
1821       break;
1822     case RTX_TERNARY:
1823     case RTX_BITFIELD_OPS:
1824       if (CONST_INT_P (XEXP (copy, 0))
1825 	  && GET_MODE (XEXP (orig, 0)) != VOIDmode)
1826 	{
1827 	  scopy = simplify_ternary_operation (code, mode,
1828 					      GET_MODE (XEXP (orig, 0)),
1829 					      XEXP (copy, 0), XEXP (copy, 1),
1830 					      XEXP (copy, 2));
1831 	  if (scopy)
1832 	    return scopy;
1833 	}
1834       break;
1835     case RTX_COMPARE:
1836     case RTX_COMM_COMPARE:
1837       if (CONST_INT_P (XEXP (copy, 0))
1838 	  && GET_MODE (XEXP (copy, 1)) == VOIDmode
1839 	  && (GET_MODE (XEXP (orig, 0)) != VOIDmode
1840 	      || GET_MODE (XEXP (orig, 1)) != VOIDmode))
1841 	{
1842 	  scopy = simplify_relational_operation (code, mode,
1843 						 (GET_MODE (XEXP (orig, 0))
1844 						  != VOIDmode)
1845 						 ? GET_MODE (XEXP (orig, 0))
1846 						 : GET_MODE (XEXP (orig, 1)),
1847 						 XEXP (copy, 0),
1848 						 XEXP (copy, 1));
1849 	  if (scopy)
1850 	    return scopy;
1851 	}
1852       break;
1853     default:
1854       break;
1855     }
1856   scopy = simplify_rtx (copy);
1857   if (scopy)
1858     return scopy;
1859   return copy;
1860 }
1861 
1862 /* Walk rtx X and replace all occurrences of REG and MEM subexpressions
1863    with VALUE expressions.  This way, it becomes independent of changes
1864    to registers and memory.
1865    X isn't actually modified; if modifications are needed, new rtl is
1866    allocated.  However, the return value can share rtl with X.
1867    If X is within a MEM, MEMMODE must be the mode of the MEM.  */
1868 
1869 rtx
cselib_subst_to_values(rtx x,machine_mode memmode)1870 cselib_subst_to_values (rtx x, machine_mode memmode)
1871 {
1872   enum rtx_code code = GET_CODE (x);
1873   const char *fmt = GET_RTX_FORMAT (code);
1874   cselib_val *e;
1875   struct elt_list *l;
1876   rtx copy = x;
1877   int i;
1878   poly_int64 offset;
1879 
1880   switch (code)
1881     {
1882     case REG:
1883       l = REG_VALUES (REGNO (x));
1884       if (l && l->elt == NULL)
1885 	l = l->next;
1886       for (; l; l = l->next)
1887 	if (GET_MODE (l->elt->val_rtx) == GET_MODE (x))
1888 	  return l->elt->val_rtx;
1889 
1890       gcc_unreachable ();
1891 
1892     case MEM:
1893       e = cselib_lookup_mem (x, 0);
1894       /* This used to happen for autoincrements, but we deal with them
1895 	 properly now.  Remove the if stmt for the next release.  */
1896       if (! e)
1897 	{
1898 	  /* Assign a value that doesn't match any other.  */
1899 	  e = new_cselib_val (next_uid, GET_MODE (x), x);
1900 	}
1901       return e->val_rtx;
1902 
1903     case ENTRY_VALUE:
1904       e = cselib_lookup (x, GET_MODE (x), 0, memmode);
1905       if (! e)
1906 	break;
1907       return e->val_rtx;
1908 
1909     CASE_CONST_ANY:
1910       return x;
1911 
1912     case PRE_DEC:
1913     case PRE_INC:
1914       gcc_assert (memmode != VOIDmode);
1915       offset = GET_MODE_SIZE (memmode);
1916       if (code == PRE_DEC)
1917 	offset = -offset;
1918       return cselib_subst_to_values (plus_constant (GET_MODE (x),
1919 						    XEXP (x, 0), offset),
1920 				     memmode);
1921 
1922     case PRE_MODIFY:
1923       gcc_assert (memmode != VOIDmode);
1924       return cselib_subst_to_values (XEXP (x, 1), memmode);
1925 
1926     case POST_DEC:
1927     case POST_INC:
1928     case POST_MODIFY:
1929       gcc_assert (memmode != VOIDmode);
1930       return cselib_subst_to_values (XEXP (x, 0), memmode);
1931 
1932     default:
1933       break;
1934     }
1935 
1936   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1937     {
1938       if (fmt[i] == 'e')
1939 	{
1940 	  rtx t = cselib_subst_to_values (XEXP (x, i), memmode);
1941 
1942 	  if (t != XEXP (x, i))
1943 	    {
1944 	      if (x == copy)
1945 		copy = shallow_copy_rtx (x);
1946 	      XEXP (copy, i) = t;
1947 	    }
1948 	}
1949       else if (fmt[i] == 'E')
1950 	{
1951 	  int j;
1952 
1953 	  for (j = 0; j < XVECLEN (x, i); j++)
1954 	    {
1955 	      rtx t = cselib_subst_to_values (XVECEXP (x, i, j), memmode);
1956 
1957 	      if (t != XVECEXP (x, i, j))
1958 		{
1959 		  if (XVEC (x, i) == XVEC (copy, i))
1960 		    {
1961 		      if (x == copy)
1962 			copy = shallow_copy_rtx (x);
1963 		      XVEC (copy, i) = shallow_copy_rtvec (XVEC (x, i));
1964 		    }
1965 		  XVECEXP (copy, i, j) = t;
1966 		}
1967 	    }
1968 	}
1969     }
1970 
1971   return copy;
1972 }
1973 
1974 /* Wrapper for cselib_subst_to_values, that indicates X is in INSN.  */
1975 
1976 rtx
cselib_subst_to_values_from_insn(rtx x,machine_mode memmode,rtx_insn * insn)1977 cselib_subst_to_values_from_insn (rtx x, machine_mode memmode, rtx_insn *insn)
1978 {
1979   rtx ret;
1980   gcc_assert (!cselib_current_insn);
1981   cselib_current_insn = insn;
1982   ret = cselib_subst_to_values (x, memmode);
1983   cselib_current_insn = NULL;
1984   return ret;
1985 }
1986 
1987 /* Look up the rtl expression X in our tables and return the value it
1988    has.  If CREATE is zero, we return NULL if we don't know the value.
1989    Otherwise, we create a new one if possible, using mode MODE if X
1990    doesn't have a mode (i.e. because it's a constant).  When X is part
1991    of an address, MEMMODE should be the mode of the enclosing MEM if
1992    we're tracking autoinc expressions.  */
1993 
1994 static cselib_val *
cselib_lookup_1(rtx x,machine_mode mode,int create,machine_mode memmode)1995 cselib_lookup_1 (rtx x, machine_mode mode,
1996 		 int create, machine_mode memmode)
1997 {
1998   cselib_val **slot;
1999   cselib_val *e;
2000   unsigned int hashval;
2001 
2002   if (GET_MODE (x) != VOIDmode)
2003     mode = GET_MODE (x);
2004 
2005   if (GET_CODE (x) == VALUE)
2006     return CSELIB_VAL_PTR (x);
2007 
2008   if (REG_P (x))
2009     {
2010       struct elt_list *l;
2011       unsigned int i = REGNO (x);
2012 
2013       l = REG_VALUES (i);
2014       if (l && l->elt == NULL)
2015 	l = l->next;
2016       for (; l; l = l->next)
2017 	if (mode == GET_MODE (l->elt->val_rtx))
2018 	  {
2019 	    promote_debug_loc (l->elt->locs);
2020 	    return l->elt;
2021 	  }
2022 
2023       if (! create)
2024 	return 0;
2025 
2026       if (i < FIRST_PSEUDO_REGISTER)
2027 	{
2028 	  unsigned int n = hard_regno_nregs (i, mode);
2029 
2030 	  if (n > max_value_regs)
2031 	    max_value_regs = n;
2032 	}
2033 
2034       e = new_cselib_val (next_uid, GET_MODE (x), x);
2035       new_elt_loc_list (e, x);
2036 
2037       scalar_int_mode int_mode;
2038       if (REG_VALUES (i) == 0)
2039 	{
2040 	  /* Maintain the invariant that the first entry of
2041 	     REG_VALUES, if present, must be the value used to set the
2042 	     register, or NULL.  */
2043 	  used_regs[n_used_regs++] = i;
2044 	  REG_VALUES (i) = new_elt_list (REG_VALUES (i), NULL);
2045 	}
2046       else if (cselib_preserve_constants
2047 	       && is_int_mode (mode, &int_mode))
2048 	{
2049 	  /* During var-tracking, try harder to find equivalences
2050 	     for SUBREGs.  If a setter sets say a DImode register
2051 	     and user uses that register only in SImode, add a lowpart
2052 	     subreg location.  */
2053 	  struct elt_list *lwider = NULL;
2054 	  scalar_int_mode lmode;
2055 	  l = REG_VALUES (i);
2056 	  if (l && l->elt == NULL)
2057 	    l = l->next;
2058 	  for (; l; l = l->next)
2059 	    if (is_int_mode (GET_MODE (l->elt->val_rtx), &lmode)
2060 		&& GET_MODE_SIZE (lmode) > GET_MODE_SIZE (int_mode)
2061 		&& (lwider == NULL
2062 		    || partial_subreg_p (lmode,
2063 					 GET_MODE (lwider->elt->val_rtx))))
2064 	      {
2065 		struct elt_loc_list *el;
2066 		if (i < FIRST_PSEUDO_REGISTER
2067 		    && hard_regno_nregs (i, lmode) != 1)
2068 		  continue;
2069 		for (el = l->elt->locs; el; el = el->next)
2070 		  if (!REG_P (el->loc))
2071 		    break;
2072 		if (el)
2073 		  lwider = l;
2074 	      }
2075 	  if (lwider)
2076 	    {
2077 	      rtx sub = lowpart_subreg (int_mode, lwider->elt->val_rtx,
2078 					GET_MODE (lwider->elt->val_rtx));
2079 	      if (sub)
2080 		new_elt_loc_list (e, sub);
2081 	    }
2082 	}
2083       REG_VALUES (i)->next = new_elt_list (REG_VALUES (i)->next, e);
2084       slot = cselib_find_slot (mode, x, e->hash, INSERT, memmode);
2085       *slot = e;
2086       return e;
2087     }
2088 
2089   if (MEM_P (x))
2090     return cselib_lookup_mem (x, create);
2091 
2092   hashval = cselib_hash_rtx (x, create, memmode);
2093   /* Can't even create if hashing is not possible.  */
2094   if (! hashval)
2095     return 0;
2096 
2097   slot = cselib_find_slot (mode, x, hashval,
2098 			   create ? INSERT : NO_INSERT, memmode);
2099   if (slot == 0)
2100     return 0;
2101 
2102   e = (cselib_val *) *slot;
2103   if (e)
2104     return e;
2105 
2106   e = new_cselib_val (hashval, mode, x);
2107 
2108   /* We have to fill the slot before calling cselib_subst_to_values:
2109      the hash table is inconsistent until we do so, and
2110      cselib_subst_to_values will need to do lookups.  */
2111   *slot = e;
2112   new_elt_loc_list (e, cselib_subst_to_values (x, memmode));
2113   return e;
2114 }
2115 
2116 /* Wrapper for cselib_lookup, that indicates X is in INSN.  */
2117 
2118 cselib_val *
cselib_lookup_from_insn(rtx x,machine_mode mode,int create,machine_mode memmode,rtx_insn * insn)2119 cselib_lookup_from_insn (rtx x, machine_mode mode,
2120 			 int create, machine_mode memmode, rtx_insn *insn)
2121 {
2122   cselib_val *ret;
2123 
2124   gcc_assert (!cselib_current_insn);
2125   cselib_current_insn = insn;
2126 
2127   ret = cselib_lookup (x, mode, create, memmode);
2128 
2129   cselib_current_insn = NULL;
2130 
2131   return ret;
2132 }
2133 
2134 /* Wrapper for cselib_lookup_1, that logs the lookup result and
2135    maintains invariants related with debug insns.  */
2136 
2137 cselib_val *
cselib_lookup(rtx x,machine_mode mode,int create,machine_mode memmode)2138 cselib_lookup (rtx x, machine_mode mode,
2139 	       int create, machine_mode memmode)
2140 {
2141   cselib_val *ret = cselib_lookup_1 (x, mode, create, memmode);
2142 
2143   /* ??? Should we return NULL if we're not to create an entry, the
2144      found loc is a debug loc and cselib_current_insn is not DEBUG?
2145      If so, we should also avoid converting val to non-DEBUG; probably
2146      easiest setting cselib_current_insn to NULL before the call
2147      above.  */
2148 
2149   if (dump_file && (dump_flags & TDF_CSELIB))
2150     {
2151       fputs ("cselib lookup ", dump_file);
2152       print_inline_rtx (dump_file, x, 2);
2153       fprintf (dump_file, " => %u:%u\n",
2154 	       ret ? ret->uid : 0,
2155 	       ret ? ret->hash : 0);
2156     }
2157 
2158   return ret;
2159 }
2160 
2161 /* Invalidate any entries in reg_values that overlap REGNO.  This is called
2162    if REGNO is changing.  MODE is the mode of the assignment to REGNO, which
2163    is used to determine how many hard registers are being changed.  If MODE
2164    is VOIDmode, then only REGNO is being changed; this is used when
2165    invalidating call clobbered registers across a call.  */
2166 
2167 static void
cselib_invalidate_regno(unsigned int regno,machine_mode mode,const_rtx setter)2168 cselib_invalidate_regno (unsigned int regno, machine_mode mode,
2169 			 const_rtx setter)
2170 {
2171   unsigned int endregno;
2172   unsigned int i;
2173 
2174   /* If we see pseudos after reload, something is _wrong_.  */
2175   gcc_assert (!reload_completed || regno < FIRST_PSEUDO_REGISTER
2176 	      || reg_renumber[regno] < 0);
2177 
2178   /* Determine the range of registers that must be invalidated.  For
2179      pseudos, only REGNO is affected.  For hard regs, we must take MODE
2180      into account, and we must also invalidate lower register numbers
2181      if they contain values that overlap REGNO.  */
2182   if (regno < FIRST_PSEUDO_REGISTER)
2183     {
2184       gcc_assert (mode != VOIDmode);
2185 
2186       if (regno < max_value_regs)
2187 	i = 0;
2188       else
2189 	i = regno - max_value_regs;
2190 
2191       endregno = end_hard_regno (mode, regno);
2192 
2193       if (setter && GET_CODE (setter) == CLOBBER_HIGH)
2194 	gcc_assert (endregno == regno + 1);
2195     }
2196   else
2197     {
2198       i = regno;
2199       endregno = regno + 1;
2200     }
2201 
2202   for (; i < endregno; i++)
2203     {
2204       struct elt_list **l = &REG_VALUES (i);
2205 
2206       /* Go through all known values for this reg; if it overlaps the range
2207 	 we're invalidating, remove the value.  */
2208       while (*l)
2209 	{
2210 	  cselib_val *v = (*l)->elt;
2211 	  bool had_locs;
2212 	  rtx_insn *setting_insn;
2213 	  struct elt_loc_list **p;
2214 	  unsigned int this_last = i;
2215 
2216 	  if (i < FIRST_PSEUDO_REGISTER && v != NULL)
2217 	    this_last = end_hard_regno (GET_MODE (v->val_rtx), i) - 1;
2218 
2219 	  if (this_last < regno || v == NULL
2220 	      || (v == cfa_base_preserved_val
2221 		  && i == cfa_base_preserved_regno))
2222 	    {
2223 	      l = &(*l)->next;
2224 	      continue;
2225 	    }
2226 
2227 	  /* Ignore if clobber high and the register isn't clobbered.  */
2228 	  if (setter && GET_CODE (setter) == CLOBBER_HIGH)
2229 	    {
2230 	      gcc_assert (endregno == regno + 1);
2231 	      const_rtx x = XEXP (setter, 0);
2232 	      if (!reg_is_clobbered_by_clobber_high (i, GET_MODE (v->val_rtx),
2233 						     x))
2234 		{
2235 		  l = &(*l)->next;
2236 		  continue;
2237 		}
2238 	    }
2239 
2240 	  /* We have an overlap.  */
2241 	  if (*l == REG_VALUES (i))
2242 	    {
2243 	      /* Maintain the invariant that the first entry of
2244 		 REG_VALUES, if present, must be the value used to set
2245 		 the register, or NULL.  This is also nice because
2246 		 then we won't push the same regno onto user_regs
2247 		 multiple times.  */
2248 	      (*l)->elt = NULL;
2249 	      l = &(*l)->next;
2250 	    }
2251 	  else
2252 	    unchain_one_elt_list (l);
2253 
2254 	  v = canonical_cselib_val (v);
2255 
2256 	  had_locs = v->locs != NULL;
2257 	  setting_insn = v->locs ? v->locs->setting_insn : NULL;
2258 
2259 	  /* Now, we clear the mapping from value to reg.  It must exist, so
2260 	     this code will crash intentionally if it doesn't.  */
2261 	  for (p = &v->locs; ; p = &(*p)->next)
2262 	    {
2263 	      rtx x = (*p)->loc;
2264 
2265 	      if (REG_P (x) && REGNO (x) == i)
2266 		{
2267 		  unchain_one_elt_loc_list (p);
2268 		  break;
2269 		}
2270 	    }
2271 
2272 	  if (had_locs && v->locs == 0 && !PRESERVED_VALUE_P (v->val_rtx))
2273 	    {
2274 	      if (setting_insn && DEBUG_INSN_P (setting_insn))
2275 		n_useless_debug_values++;
2276 	      else
2277 		n_useless_values++;
2278 	    }
2279 	}
2280     }
2281 }
2282 
2283 /* Invalidate any locations in the table which are changed because of a
2284    store to MEM_RTX.  If this is called because of a non-const call
2285    instruction, MEM_RTX is (mem:BLK const0_rtx).  */
2286 
2287 static void
cselib_invalidate_mem(rtx mem_rtx)2288 cselib_invalidate_mem (rtx mem_rtx)
2289 {
2290   cselib_val **vp, *v, *next;
2291   int num_mems = 0;
2292   rtx mem_addr;
2293 
2294   mem_addr = canon_rtx (get_addr (XEXP (mem_rtx, 0)));
2295   mem_rtx = canon_rtx (mem_rtx);
2296 
2297   vp = &first_containing_mem;
2298   for (v = *vp; v != &dummy_val; v = next)
2299     {
2300       bool has_mem = false;
2301       struct elt_loc_list **p = &v->locs;
2302       bool had_locs = v->locs != NULL;
2303       rtx_insn *setting_insn = v->locs ? v->locs->setting_insn : NULL;
2304 
2305       while (*p)
2306 	{
2307 	  rtx x = (*p)->loc;
2308 	  cselib_val *addr;
2309 	  struct elt_list **mem_chain;
2310 
2311 	  /* MEMs may occur in locations only at the top level; below
2312 	     that every MEM or REG is substituted by its VALUE.  */
2313 	  if (!MEM_P (x))
2314 	    {
2315 	      p = &(*p)->next;
2316 	      continue;
2317 	    }
2318 	  if (num_mems < PARAM_VALUE (PARAM_MAX_CSELIB_MEMORY_LOCATIONS)
2319 	      && ! canon_anti_dependence (x, false, mem_rtx,
2320 					  GET_MODE (mem_rtx), mem_addr))
2321 	    {
2322 	      has_mem = true;
2323 	      num_mems++;
2324 	      p = &(*p)->next;
2325 	      continue;
2326 	    }
2327 
2328 	  /* This one overlaps.  */
2329 	  /* We must have a mapping from this MEM's address to the
2330 	     value (E).  Remove that, too.  */
2331 	  addr = cselib_lookup (XEXP (x, 0), VOIDmode, 0, GET_MODE (x));
2332 	  addr = canonical_cselib_val (addr);
2333 	  gcc_checking_assert (v == canonical_cselib_val (v));
2334 	  mem_chain = &addr->addr_list;
2335 	  for (;;)
2336 	    {
2337 	      cselib_val *canon = canonical_cselib_val ((*mem_chain)->elt);
2338 
2339 	      if (canon == v)
2340 		{
2341 		  unchain_one_elt_list (mem_chain);
2342 		  break;
2343 		}
2344 
2345 	      /* Record canonicalized elt.  */
2346 	      (*mem_chain)->elt = canon;
2347 
2348 	      mem_chain = &(*mem_chain)->next;
2349 	    }
2350 
2351 	  unchain_one_elt_loc_list (p);
2352 	}
2353 
2354       if (had_locs && v->locs == 0 && !PRESERVED_VALUE_P (v->val_rtx))
2355 	{
2356 	  if (setting_insn && DEBUG_INSN_P (setting_insn))
2357 	    n_useless_debug_values++;
2358 	  else
2359 	    n_useless_values++;
2360 	}
2361 
2362       next = v->next_containing_mem;
2363       if (has_mem)
2364 	{
2365 	  *vp = v;
2366 	  vp = &(*vp)->next_containing_mem;
2367 	}
2368       else
2369 	v->next_containing_mem = NULL;
2370     }
2371   *vp = &dummy_val;
2372 }
2373 
2374 /* Invalidate DEST, which is being assigned to or clobbered by SETTER.  */
2375 
2376 void
cselib_invalidate_rtx(rtx dest,const_rtx setter)2377 cselib_invalidate_rtx (rtx dest, const_rtx setter)
2378 {
2379   while (GET_CODE (dest) == SUBREG
2380 	 || GET_CODE (dest) == ZERO_EXTRACT
2381 	 || GET_CODE (dest) == STRICT_LOW_PART)
2382     dest = XEXP (dest, 0);
2383 
2384   if (REG_P (dest))
2385     cselib_invalidate_regno (REGNO (dest), GET_MODE (dest), setter);
2386   else if (MEM_P (dest))
2387     cselib_invalidate_mem (dest);
2388 }
2389 
2390 /* A wrapper for cselib_invalidate_rtx to be called via note_stores.  */
2391 
2392 static void
cselib_invalidate_rtx_note_stores(rtx dest,const_rtx setter,void * data ATTRIBUTE_UNUSED)2393 cselib_invalidate_rtx_note_stores (rtx dest, const_rtx setter,
2394 				   void *data ATTRIBUTE_UNUSED)
2395 {
2396   cselib_invalidate_rtx (dest, setter);
2397 }
2398 
2399 /* Record the result of a SET instruction.  DEST is being set; the source
2400    contains the value described by SRC_ELT.  If DEST is a MEM, DEST_ADDR_ELT
2401    describes its address.  */
2402 
2403 static void
cselib_record_set(rtx dest,cselib_val * src_elt,cselib_val * dest_addr_elt)2404 cselib_record_set (rtx dest, cselib_val *src_elt, cselib_val *dest_addr_elt)
2405 {
2406   if (src_elt == 0 || side_effects_p (dest))
2407     return;
2408 
2409   if (REG_P (dest))
2410     {
2411       unsigned int dreg = REGNO (dest);
2412       if (dreg < FIRST_PSEUDO_REGISTER)
2413 	{
2414 	  unsigned int n = REG_NREGS (dest);
2415 
2416 	  if (n > max_value_regs)
2417 	    max_value_regs = n;
2418 	}
2419 
2420       if (REG_VALUES (dreg) == 0)
2421 	{
2422 	  used_regs[n_used_regs++] = dreg;
2423 	  REG_VALUES (dreg) = new_elt_list (REG_VALUES (dreg), src_elt);
2424 	}
2425       else
2426 	{
2427 	  /* The register should have been invalidated.  */
2428 	  gcc_assert (REG_VALUES (dreg)->elt == 0);
2429 	  REG_VALUES (dreg)->elt = src_elt;
2430 	}
2431 
2432       if (src_elt->locs == 0 && !PRESERVED_VALUE_P (src_elt->val_rtx))
2433 	n_useless_values--;
2434       new_elt_loc_list (src_elt, dest);
2435     }
2436   else if (MEM_P (dest) && dest_addr_elt != 0
2437 	   && cselib_record_memory)
2438     {
2439       if (src_elt->locs == 0 && !PRESERVED_VALUE_P (src_elt->val_rtx))
2440 	n_useless_values--;
2441       add_mem_for_addr (dest_addr_elt, src_elt, dest);
2442     }
2443 }
2444 
2445 /* Make ELT and X's VALUE equivalent to each other at INSN.  */
2446 
2447 void
cselib_add_permanent_equiv(cselib_val * elt,rtx x,rtx_insn * insn)2448 cselib_add_permanent_equiv (cselib_val *elt, rtx x, rtx_insn *insn)
2449 {
2450   cselib_val *nelt;
2451   rtx_insn *save_cselib_current_insn = cselib_current_insn;
2452 
2453   gcc_checking_assert (elt);
2454   gcc_checking_assert (PRESERVED_VALUE_P (elt->val_rtx));
2455   gcc_checking_assert (!side_effects_p (x));
2456 
2457   cselib_current_insn = insn;
2458 
2459   nelt = cselib_lookup (x, GET_MODE (elt->val_rtx), 1, VOIDmode);
2460 
2461   if (nelt != elt)
2462     {
2463       cselib_any_perm_equivs = true;
2464 
2465       if (!PRESERVED_VALUE_P (nelt->val_rtx))
2466 	cselib_preserve_value (nelt);
2467 
2468       new_elt_loc_list (nelt, elt->val_rtx);
2469     }
2470 
2471   cselib_current_insn = save_cselib_current_insn;
2472 }
2473 
2474 /* Return TRUE if any permanent equivalences have been recorded since
2475    the table was last initialized.  */
2476 bool
cselib_have_permanent_equivalences(void)2477 cselib_have_permanent_equivalences (void)
2478 {
2479   return cselib_any_perm_equivs;
2480 }
2481 
2482 /* There is no good way to determine how many elements there can be
2483    in a PARALLEL.  Since it's fairly cheap, use a really large number.  */
2484 #define MAX_SETS (FIRST_PSEUDO_REGISTER * 2)
2485 
2486 struct cselib_record_autoinc_data
2487 {
2488   struct cselib_set *sets;
2489   int n_sets;
2490 };
2491 
2492 /* Callback for for_each_inc_dec.  Records in ARG the SETs implied by
2493    autoinc RTXs: SRC plus SRCOFF if non-NULL is stored in DEST.  */
2494 
2495 static int
cselib_record_autoinc_cb(rtx mem ATTRIBUTE_UNUSED,rtx op ATTRIBUTE_UNUSED,rtx dest,rtx src,rtx srcoff,void * arg)2496 cselib_record_autoinc_cb (rtx mem ATTRIBUTE_UNUSED, rtx op ATTRIBUTE_UNUSED,
2497 			  rtx dest, rtx src, rtx srcoff, void *arg)
2498 {
2499   struct cselib_record_autoinc_data *data;
2500   data = (struct cselib_record_autoinc_data *)arg;
2501 
2502   data->sets[data->n_sets].dest = dest;
2503 
2504   if (srcoff)
2505     data->sets[data->n_sets].src = gen_rtx_PLUS (GET_MODE (src), src, srcoff);
2506   else
2507     data->sets[data->n_sets].src = src;
2508 
2509   data->n_sets++;
2510 
2511   return 0;
2512 }
2513 
2514 /* Record the effects of any sets and autoincs in INSN.  */
2515 static void
cselib_record_sets(rtx_insn * insn)2516 cselib_record_sets (rtx_insn *insn)
2517 {
2518   int n_sets = 0;
2519   int i;
2520   struct cselib_set sets[MAX_SETS];
2521   rtx body = PATTERN (insn);
2522   rtx cond = 0;
2523   int n_sets_before_autoinc;
2524   int n_strict_low_parts = 0;
2525   struct cselib_record_autoinc_data data;
2526 
2527   body = PATTERN (insn);
2528   if (GET_CODE (body) == COND_EXEC)
2529     {
2530       cond = COND_EXEC_TEST (body);
2531       body = COND_EXEC_CODE (body);
2532     }
2533 
2534   /* Find all sets.  */
2535   if (GET_CODE (body) == SET)
2536     {
2537       sets[0].src = SET_SRC (body);
2538       sets[0].dest = SET_DEST (body);
2539       n_sets = 1;
2540     }
2541   else if (GET_CODE (body) == PARALLEL)
2542     {
2543       /* Look through the PARALLEL and record the values being
2544 	 set, if possible.  Also handle any CLOBBERs.  */
2545       for (i = XVECLEN (body, 0) - 1; i >= 0; --i)
2546 	{
2547 	  rtx x = XVECEXP (body, 0, i);
2548 
2549 	  if (GET_CODE (x) == SET)
2550 	    {
2551 	      sets[n_sets].src = SET_SRC (x);
2552 	      sets[n_sets].dest = SET_DEST (x);
2553 	      n_sets++;
2554 	    }
2555 	}
2556     }
2557 
2558   if (n_sets == 1
2559       && MEM_P (sets[0].src)
2560       && !cselib_record_memory
2561       && MEM_READONLY_P (sets[0].src))
2562     {
2563       rtx note = find_reg_equal_equiv_note (insn);
2564 
2565       if (note && CONSTANT_P (XEXP (note, 0)))
2566 	sets[0].src = XEXP (note, 0);
2567     }
2568 
2569   data.sets = sets;
2570   data.n_sets = n_sets_before_autoinc = n_sets;
2571   for_each_inc_dec (PATTERN (insn), cselib_record_autoinc_cb, &data);
2572   n_sets = data.n_sets;
2573 
2574   /* Look up the values that are read.  Do this before invalidating the
2575      locations that are written.  */
2576   for (i = 0; i < n_sets; i++)
2577     {
2578       rtx dest = sets[i].dest;
2579       rtx orig = dest;
2580 
2581       /* A STRICT_LOW_PART can be ignored; we'll record the equivalence for
2582          the low part after invalidating any knowledge about larger modes.  */
2583       if (GET_CODE (sets[i].dest) == STRICT_LOW_PART)
2584 	sets[i].dest = dest = XEXP (dest, 0);
2585 
2586       /* We don't know how to record anything but REG or MEM.  */
2587       if (REG_P (dest)
2588 	  || (MEM_P (dest) && cselib_record_memory))
2589         {
2590 	  rtx src = sets[i].src;
2591 	  if (cond)
2592 	    src = gen_rtx_IF_THEN_ELSE (GET_MODE (dest), cond, src, dest);
2593 	  sets[i].src_elt = cselib_lookup (src, GET_MODE (dest), 1, VOIDmode);
2594 	  if (MEM_P (dest))
2595 	    {
2596 	      machine_mode address_mode = get_address_mode (dest);
2597 
2598 	      sets[i].dest_addr_elt = cselib_lookup (XEXP (dest, 0),
2599 						     address_mode, 1,
2600 						     GET_MODE (dest));
2601 	    }
2602 	  else
2603 	    sets[i].dest_addr_elt = 0;
2604 	}
2605 
2606       /* Improve handling of STRICT_LOW_PART if the current value is known
2607 	 to be const0_rtx, then the low bits will be set to dest and higher
2608 	 bits will remain zero.  Used in code like:
2609 
2610 	 {di:SI=0;clobber flags:CC;}
2611 	 flags:CCNO=cmp(bx:SI,0)
2612 	 strict_low_part(di:QI)=flags:CCNO<=0
2613 
2614 	 where we can note both that di:QI=flags:CCNO<=0 and
2615 	 also that because di:SI is known to be 0 and strict_low_part(di:QI)
2616 	 preserves the upper bits that di:SI=zero_extend(flags:CCNO<=0).  */
2617       scalar_int_mode mode;
2618       if (dest != orig
2619 	  && cselib_record_sets_hook
2620 	  && REG_P (dest)
2621 	  && HARD_REGISTER_P (dest)
2622 	  && sets[i].src_elt
2623 	  && is_a <scalar_int_mode> (GET_MODE (dest), &mode)
2624 	  && n_sets + n_strict_low_parts < MAX_SETS)
2625 	{
2626 	  opt_scalar_int_mode wider_mode_iter;
2627 	  FOR_EACH_WIDER_MODE (wider_mode_iter, mode)
2628 	    {
2629 	      scalar_int_mode wider_mode = wider_mode_iter.require ();
2630 	      if (GET_MODE_PRECISION (wider_mode) > BITS_PER_WORD)
2631 		break;
2632 
2633 	      rtx reg = gen_lowpart (wider_mode, dest);
2634 	      if (!REG_P (reg))
2635 		break;
2636 
2637 	      cselib_val *v = cselib_lookup (reg, wider_mode, 0, VOIDmode);
2638 	      if (!v)
2639 		continue;
2640 
2641 	      struct elt_loc_list *l;
2642 	      for (l = v->locs; l; l = l->next)
2643 		if (l->loc == const0_rtx)
2644 		  break;
2645 
2646 	      if (!l)
2647 		continue;
2648 
2649 	      sets[n_sets + n_strict_low_parts].dest = reg;
2650 	      sets[n_sets + n_strict_low_parts].src = dest;
2651 	      sets[n_sets + n_strict_low_parts++].src_elt = sets[i].src_elt;
2652 	      break;
2653 	    }
2654 	}
2655     }
2656 
2657   if (cselib_record_sets_hook)
2658     cselib_record_sets_hook (insn, sets, n_sets);
2659 
2660   /* Invalidate all locations written by this insn.  Note that the elts we
2661      looked up in the previous loop aren't affected, just some of their
2662      locations may go away.  */
2663   note_stores (body, cselib_invalidate_rtx_note_stores, NULL);
2664 
2665   for (i = n_sets_before_autoinc; i < n_sets; i++)
2666     cselib_invalidate_rtx (sets[i].dest);
2667 
2668   /* If this is an asm, look for duplicate sets.  This can happen when the
2669      user uses the same value as an output multiple times.  This is valid
2670      if the outputs are not actually used thereafter.  Treat this case as
2671      if the value isn't actually set.  We do this by smashing the destination
2672      to pc_rtx, so that we won't record the value later.  */
2673   if (n_sets >= 2 && asm_noperands (body) >= 0)
2674     {
2675       for (i = 0; i < n_sets; i++)
2676 	{
2677 	  rtx dest = sets[i].dest;
2678 	  if (REG_P (dest) || MEM_P (dest))
2679 	    {
2680 	      int j;
2681 	      for (j = i + 1; j < n_sets; j++)
2682 		if (rtx_equal_p (dest, sets[j].dest))
2683 		  {
2684 		    sets[i].dest = pc_rtx;
2685 		    sets[j].dest = pc_rtx;
2686 		  }
2687 	    }
2688 	}
2689     }
2690 
2691   /* Now enter the equivalences in our tables.  */
2692   for (i = 0; i < n_sets; i++)
2693     {
2694       rtx dest = sets[i].dest;
2695       if (REG_P (dest)
2696 	  || (MEM_P (dest) && cselib_record_memory))
2697 	cselib_record_set (dest, sets[i].src_elt, sets[i].dest_addr_elt);
2698     }
2699 
2700   /* And deal with STRICT_LOW_PART.  */
2701   for (i = 0; i < n_strict_low_parts; i++)
2702     {
2703       if (! PRESERVED_VALUE_P (sets[n_sets + i].src_elt->val_rtx))
2704 	continue;
2705       machine_mode dest_mode = GET_MODE (sets[n_sets + i].dest);
2706       cselib_val *v
2707 	= cselib_lookup (sets[n_sets + i].dest, dest_mode, 1, VOIDmode);
2708       cselib_preserve_value (v);
2709       rtx r = gen_rtx_ZERO_EXTEND (dest_mode,
2710 				   sets[n_sets + i].src_elt->val_rtx);
2711       cselib_add_permanent_equiv (v, r, insn);
2712     }
2713 }
2714 
2715 /* Return true if INSN in the prologue initializes hard_frame_pointer_rtx.  */
2716 
2717 bool
fp_setter_insn(rtx_insn * insn)2718 fp_setter_insn (rtx_insn *insn)
2719 {
2720   rtx expr, pat = NULL_RTX;
2721 
2722   if (!RTX_FRAME_RELATED_P (insn))
2723     return false;
2724 
2725   expr = find_reg_note (insn, REG_FRAME_RELATED_EXPR, NULL_RTX);
2726   if (expr)
2727     pat = XEXP (expr, 0);
2728   if (!modified_in_p (hard_frame_pointer_rtx, pat ? pat : insn))
2729     return false;
2730 
2731   /* Don't return true for frame pointer restores in the epilogue.  */
2732   if (find_reg_note (insn, REG_CFA_RESTORE, hard_frame_pointer_rtx))
2733     return false;
2734   return true;
2735 }
2736 
2737 /* Record the effects of INSN.  */
2738 
2739 void
cselib_process_insn(rtx_insn * insn)2740 cselib_process_insn (rtx_insn *insn)
2741 {
2742   int i;
2743   rtx x;
2744 
2745   cselib_current_insn = insn;
2746 
2747   /* Forget everything at a CODE_LABEL or a setjmp.  */
2748   if ((LABEL_P (insn)
2749        || (CALL_P (insn)
2750 	   && find_reg_note (insn, REG_SETJMP, NULL)))
2751       && !cselib_preserve_constants)
2752     {
2753       cselib_reset_table (next_uid);
2754       cselib_current_insn = NULL;
2755       return;
2756     }
2757 
2758   if (! INSN_P (insn))
2759     {
2760       cselib_current_insn = NULL;
2761       return;
2762     }
2763 
2764   /* If this is a call instruction, forget anything stored in a
2765      call clobbered register, or, if this is not a const call, in
2766      memory.  */
2767   if (CALL_P (insn))
2768     {
2769       for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2770 	if (call_used_regs[i]
2771 	    || (REG_VALUES (i) && REG_VALUES (i)->elt
2772 		&& (targetm.hard_regno_call_part_clobbered
2773 		    (insn, i, GET_MODE (REG_VALUES (i)->elt->val_rtx)))))
2774 	  cselib_invalidate_regno (i, reg_raw_mode[i]);
2775 
2776       /* Since it is not clear how cselib is going to be used, be
2777 	 conservative here and treat looping pure or const functions
2778 	 as if they were regular functions.  */
2779       if (RTL_LOOPING_CONST_OR_PURE_CALL_P (insn)
2780 	  || !(RTL_CONST_OR_PURE_CALL_P (insn)))
2781 	cselib_invalidate_mem (callmem);
2782       else
2783 	/* For const/pure calls, invalidate any argument slots because
2784 	   they are owned by the callee.  */
2785 	for (x = CALL_INSN_FUNCTION_USAGE (insn); x; x = XEXP (x, 1))
2786 	  if (GET_CODE (XEXP (x, 0)) == USE
2787 	      && MEM_P (XEXP (XEXP (x, 0), 0)))
2788 	    cselib_invalidate_mem (XEXP (XEXP (x, 0), 0));
2789     }
2790 
2791   cselib_record_sets (insn);
2792 
2793   /* Look for any CLOBBERs in CALL_INSN_FUNCTION_USAGE, but only
2794      after we have processed the insn.  */
2795   if (CALL_P (insn))
2796     {
2797       for (x = CALL_INSN_FUNCTION_USAGE (insn); x; x = XEXP (x, 1))
2798 	{
2799 	  gcc_assert (GET_CODE (XEXP (x, 0)) != CLOBBER_HIGH);
2800 	  if (GET_CODE (XEXP (x, 0)) == CLOBBER)
2801 	    cselib_invalidate_rtx (XEXP (XEXP (x, 0), 0));
2802 	}
2803       /* Flush everything on setjmp.  */
2804       if (cselib_preserve_constants
2805 	  && find_reg_note (insn, REG_SETJMP, NULL))
2806 	{
2807 	  cselib_preserve_only_values ();
2808 	  cselib_reset_table (next_uid);
2809 	}
2810     }
2811 
2812   /* On setter of the hard frame pointer if frame_pointer_needed,
2813      invalidate stack_pointer_rtx, so that sp and {,h}fp based
2814      VALUEs are distinct.  */
2815   if (reload_completed
2816       && frame_pointer_needed
2817       && fp_setter_insn (insn))
2818     cselib_invalidate_rtx (stack_pointer_rtx);
2819 
2820   cselib_current_insn = NULL;
2821 
2822   if (n_useless_values > MAX_USELESS_VALUES
2823       /* remove_useless_values is linear in the hash table size.  Avoid
2824          quadratic behavior for very large hashtables with very few
2825 	 useless elements.  */
2826       && ((unsigned int)n_useless_values
2827 	  > (cselib_hash_table->elements () - n_debug_values) / 4))
2828     remove_useless_values ();
2829 }
2830 
2831 /* Initialize cselib for one pass.  The caller must also call
2832    init_alias_analysis.  */
2833 
2834 void
cselib_init(int record_what)2835 cselib_init (int record_what)
2836 {
2837   cselib_record_memory = record_what & CSELIB_RECORD_MEMORY;
2838   cselib_preserve_constants = record_what & CSELIB_PRESERVE_CONSTANTS;
2839   cselib_any_perm_equivs = false;
2840 
2841   /* (mem:BLK (scratch)) is a special mechanism to conflict with everything,
2842      see canon_true_dependence.  This is only created once.  */
2843   if (! callmem)
2844     callmem = gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode));
2845 
2846   cselib_nregs = max_reg_num ();
2847 
2848   /* We preserve reg_values to allow expensive clearing of the whole thing.
2849      Reallocate it however if it happens to be too large.  */
2850   if (!reg_values || reg_values_size < cselib_nregs
2851       || (reg_values_size > 10 && reg_values_size > cselib_nregs * 4))
2852     {
2853       free (reg_values);
2854       /* Some space for newly emit instructions so we don't end up
2855 	 reallocating in between passes.  */
2856       reg_values_size = cselib_nregs + (63 + cselib_nregs) / 16;
2857       reg_values = XCNEWVEC (struct elt_list *, reg_values_size);
2858     }
2859   used_regs = XNEWVEC (unsigned int, cselib_nregs);
2860   n_used_regs = 0;
2861   cselib_hash_table = new hash_table<cselib_hasher> (31);
2862   if (cselib_preserve_constants)
2863     cselib_preserved_hash_table = new hash_table<cselib_hasher> (31);
2864   next_uid = 1;
2865 }
2866 
2867 /* Called when the current user is done with cselib.  */
2868 
2869 void
cselib_finish(void)2870 cselib_finish (void)
2871 {
2872   bool preserved = cselib_preserve_constants;
2873   cselib_discard_hook = NULL;
2874   cselib_preserve_constants = false;
2875   cselib_any_perm_equivs = false;
2876   cfa_base_preserved_val = NULL;
2877   cfa_base_preserved_regno = INVALID_REGNUM;
2878   elt_list_pool.release ();
2879   elt_loc_list_pool.release ();
2880   cselib_val_pool.release ();
2881   value_pool.release ();
2882   cselib_clear_table ();
2883   delete cselib_hash_table;
2884   cselib_hash_table = NULL;
2885   if (preserved)
2886     delete cselib_preserved_hash_table;
2887   cselib_preserved_hash_table = NULL;
2888   free (used_regs);
2889   used_regs = 0;
2890   n_useless_values = 0;
2891   n_useless_debug_values = 0;
2892   n_debug_values = 0;
2893   next_uid = 0;
2894 }
2895 
2896 /* Dump the cselib_val *X to FILE *OUT.  */
2897 
2898 int
dump_cselib_val(cselib_val ** x,FILE * out)2899 dump_cselib_val (cselib_val **x, FILE *out)
2900 {
2901   cselib_val *v = *x;
2902   bool need_lf = true;
2903 
2904   print_inline_rtx (out, v->val_rtx, 0);
2905 
2906   if (v->locs)
2907     {
2908       struct elt_loc_list *l = v->locs;
2909       if (need_lf)
2910 	{
2911 	  fputc ('\n', out);
2912 	  need_lf = false;
2913 	}
2914       fputs (" locs:", out);
2915       do
2916 	{
2917 	  if (l->setting_insn)
2918 	    fprintf (out, "\n  from insn %i ",
2919 		     INSN_UID (l->setting_insn));
2920 	  else
2921 	    fprintf (out, "\n   ");
2922 	  print_inline_rtx (out, l->loc, 4);
2923 	}
2924       while ((l = l->next));
2925       fputc ('\n', out);
2926     }
2927   else
2928     {
2929       fputs (" no locs", out);
2930       need_lf = true;
2931     }
2932 
2933   if (v->addr_list)
2934     {
2935       struct elt_list *e = v->addr_list;
2936       if (need_lf)
2937 	{
2938 	  fputc ('\n', out);
2939 	  need_lf = false;
2940 	}
2941       fputs (" addr list:", out);
2942       do
2943 	{
2944 	  fputs ("\n  ", out);
2945 	  print_inline_rtx (out, e->elt->val_rtx, 2);
2946 	}
2947       while ((e = e->next));
2948       fputc ('\n', out);
2949     }
2950   else
2951     {
2952       fputs (" no addrs", out);
2953       need_lf = true;
2954     }
2955 
2956   if (v->next_containing_mem == &dummy_val)
2957     fputs (" last mem\n", out);
2958   else if (v->next_containing_mem)
2959     {
2960       fputs (" next mem ", out);
2961       print_inline_rtx (out, v->next_containing_mem->val_rtx, 2);
2962       fputc ('\n', out);
2963     }
2964   else if (need_lf)
2965     fputc ('\n', out);
2966 
2967   return 1;
2968 }
2969 
2970 /* Dump to OUT everything in the CSELIB table.  */
2971 
2972 void
dump_cselib_table(FILE * out)2973 dump_cselib_table (FILE *out)
2974 {
2975   fprintf (out, "cselib hash table:\n");
2976   cselib_hash_table->traverse <FILE *, dump_cselib_val> (out);
2977   fprintf (out, "cselib preserved hash table:\n");
2978   cselib_preserved_hash_table->traverse <FILE *, dump_cselib_val> (out);
2979   if (first_containing_mem != &dummy_val)
2980     {
2981       fputs ("first mem ", out);
2982       print_inline_rtx (out, first_containing_mem->val_rtx, 2);
2983       fputc ('\n', out);
2984     }
2985   fprintf (out, "next uid %i\n", next_uid);
2986 }
2987 
2988 #include "gt-cselib.h"
2989