1 /* Variable tracking routines for the GNU compiler.
2    Copyright (C) 2002-2014 Free Software Foundation, Inc.
3 
4    This file is part of GCC.
5 
6    GCC is free software; you can redistribute it and/or modify it
7    under the terms of the GNU General Public License as published by
8    the Free Software Foundation; either version 3, or (at your option)
9    any later version.
10 
11    GCC is distributed in the hope that it will be useful, but WITHOUT
12    ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
13    or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public
14    License for more details.
15 
16    You should have received a copy of the GNU General Public License
17    along with GCC; see the file COPYING3.  If not see
18    <http://www.gnu.org/licenses/>.  */
19 
20 /* This file contains the variable tracking pass.  It computes where
21    variables are located (which registers or where in memory) at each position
22    in instruction stream and emits notes describing the locations.
23    Debug information (DWARF2 location lists) is finally generated from
24    these notes.
25    With this debug information, it is possible to show variables
26    even when debugging optimized code.
27 
28    How does the variable tracking pass work?
29 
30    First, it scans RTL code for uses, stores and clobbers (register/memory
31    references in instructions), for call insns and for stack adjustments
32    separately for each basic block and saves them to an array of micro
33    operations.
34    The micro operations of one instruction are ordered so that
35    pre-modifying stack adjustment < use < use with no var < call insn <
36      < clobber < set < post-modifying stack adjustment
37 
38    Then, a forward dataflow analysis is performed to find out how locations
39    of variables change through code and to propagate the variable locations
40    along control flow graph.
41    The IN set for basic block BB is computed as a union of OUT sets of BB's
42    predecessors, the OUT set for BB is copied from the IN set for BB and
43    is changed according to micro operations in BB.
44 
45    The IN and OUT sets for basic blocks consist of a current stack adjustment
46    (used for adjusting offset of variables addressed using stack pointer),
47    the table of structures describing the locations of parts of a variable
48    and for each physical register a linked list for each physical register.
49    The linked list is a list of variable parts stored in the register,
50    i.e. it is a list of triplets (reg, decl, offset) where decl is
51    REG_EXPR (reg) and offset is REG_OFFSET (reg).  The linked list is used for
52    effective deleting appropriate variable parts when we set or clobber the
53    register.
54 
55    There may be more than one variable part in a register.  The linked lists
56    should be pretty short so it is a good data structure here.
57    For example in the following code, register allocator may assign same
58    register to variables A and B, and both of them are stored in the same
59    register in CODE:
60 
61      if (cond)
62        set A;
63      else
64        set B;
65      CODE;
66      if (cond)
67        use A;
68      else
69        use B;
70 
71    Finally, the NOTE_INSN_VAR_LOCATION notes describing the variable locations
72    are emitted to appropriate positions in RTL code.  Each such a note describes
73    the location of one variable at the point in instruction stream where the
74    note is.  There is no need to emit a note for each variable before each
75    instruction, we only emit these notes where the location of variable changes
76    (this means that we also emit notes for changes between the OUT set of the
77    previous block and the IN set of the current block).
78 
79    The notes consist of two parts:
80    1. the declaration (from REG_EXPR or MEM_EXPR)
81    2. the location of a variable - it is either a simple register/memory
82       reference (for simple variables, for example int),
83       or a parallel of register/memory references (for a large variables
84       which consist of several parts, for example long long).
85 
86 */
87 
88 #include "config.h"
89 #include "system.h"
90 #include "coretypes.h"
91 #include "tm.h"
92 #include "rtl.h"
93 #include "tree.h"
94 #include "varasm.h"
95 #include "stor-layout.h"
96 #include "pointer-set.h"
97 #include "hash-table.h"
98 #include "basic-block.h"
99 #include "tm_p.h"
100 #include "hard-reg-set.h"
101 #include "flags.h"
102 #include "insn-config.h"
103 #include "reload.h"
104 #include "sbitmap.h"
105 #include "alloc-pool.h"
106 #include "fibheap.h"
107 #include "regs.h"
108 #include "expr.h"
109 #include "tree-pass.h"
110 #include "bitmap.h"
111 #include "tree-dfa.h"
112 #include "tree-ssa.h"
113 #include "cselib.h"
114 #include "target.h"
115 #include "params.h"
116 #include "diagnostic.h"
117 #include "tree-pretty-print.h"
118 #include "recog.h"
119 #include "tm_p.h"
120 #include "alias.h"
121 
122 /* var-tracking.c assumes that tree code with the same value as VALUE rtx code
123    has no chance to appear in REG_EXPR/MEM_EXPRs and isn't a decl.
124    Currently the value is the same as IDENTIFIER_NODE, which has such
125    a property.  If this compile time assertion ever fails, make sure that
126    the new tree code that equals (int) VALUE has the same property.  */
127 extern char check_value_val[(int) VALUE == (int) IDENTIFIER_NODE ? 1 : -1];
128 
129 /* Type of micro operation.  */
130 enum micro_operation_type
131 {
132   MO_USE,	/* Use location (REG or MEM).  */
133   MO_USE_NO_VAR,/* Use location which is not associated with a variable
134 		   or the variable is not trackable.  */
135   MO_VAL_USE,	/* Use location which is associated with a value.  */
136   MO_VAL_LOC,   /* Use location which appears in a debug insn.  */
137   MO_VAL_SET,	/* Set location associated with a value.  */
138   MO_SET,	/* Set location.  */
139   MO_COPY,	/* Copy the same portion of a variable from one
140 		   location to another.  */
141   MO_CLOBBER,	/* Clobber location.  */
142   MO_CALL,	/* Call insn.  */
143   MO_ADJUST	/* Adjust stack pointer.  */
144 
145 };
146 
147 static const char * const ATTRIBUTE_UNUSED
148 micro_operation_type_name[] = {
149   "MO_USE",
150   "MO_USE_NO_VAR",
151   "MO_VAL_USE",
152   "MO_VAL_LOC",
153   "MO_VAL_SET",
154   "MO_SET",
155   "MO_COPY",
156   "MO_CLOBBER",
157   "MO_CALL",
158   "MO_ADJUST"
159 };
160 
161 /* Where shall the note be emitted?  BEFORE or AFTER the instruction.
162    Notes emitted as AFTER_CALL are to take effect during the call,
163    rather than after the call.  */
164 enum emit_note_where
165 {
166   EMIT_NOTE_BEFORE_INSN,
167   EMIT_NOTE_AFTER_INSN,
168   EMIT_NOTE_AFTER_CALL_INSN
169 };
170 
171 /* Structure holding information about micro operation.  */
172 typedef struct micro_operation_def
173 {
174   /* Type of micro operation.  */
175   enum micro_operation_type type;
176 
177   /* The instruction which the micro operation is in, for MO_USE,
178      MO_USE_NO_VAR, MO_CALL and MO_ADJUST, or the subsequent
179      instruction or note in the original flow (before any var-tracking
180      notes are inserted, to simplify emission of notes), for MO_SET
181      and MO_CLOBBER.  */
182   rtx insn;
183 
184   union {
185     /* Location.  For MO_SET and MO_COPY, this is the SET that
186        performs the assignment, if known, otherwise it is the target
187        of the assignment.  For MO_VAL_USE and MO_VAL_SET, it is a
188        CONCAT of the VALUE and the LOC associated with it.  For
189        MO_VAL_LOC, it is a CONCAT of the VALUE and the VAR_LOCATION
190        associated with it.  */
191     rtx loc;
192 
193     /* Stack adjustment.  */
194     HOST_WIDE_INT adjust;
195   } u;
196 } micro_operation;
197 
198 
199 /* A declaration of a variable, or an RTL value being handled like a
200    declaration.  */
201 typedef void *decl_or_value;
202 
203 /* Return true if a decl_or_value DV is a DECL or NULL.  */
204 static inline bool
dv_is_decl_p(decl_or_value dv)205 dv_is_decl_p (decl_or_value dv)
206 {
207   return !dv || (int) TREE_CODE ((tree) dv) != (int) VALUE;
208 }
209 
210 /* Return true if a decl_or_value is a VALUE rtl.  */
211 static inline bool
dv_is_value_p(decl_or_value dv)212 dv_is_value_p (decl_or_value dv)
213 {
214   return dv && !dv_is_decl_p (dv);
215 }
216 
217 /* Return the decl in the decl_or_value.  */
218 static inline tree
dv_as_decl(decl_or_value dv)219 dv_as_decl (decl_or_value dv)
220 {
221   gcc_checking_assert (dv_is_decl_p (dv));
222   return (tree) dv;
223 }
224 
225 /* Return the value in the decl_or_value.  */
226 static inline rtx
dv_as_value(decl_or_value dv)227 dv_as_value (decl_or_value dv)
228 {
229   gcc_checking_assert (dv_is_value_p (dv));
230   return (rtx)dv;
231 }
232 
233 /* Return the opaque pointer in the decl_or_value.  */
234 static inline void *
dv_as_opaque(decl_or_value dv)235 dv_as_opaque (decl_or_value dv)
236 {
237   return dv;
238 }
239 
240 
241 /* Description of location of a part of a variable.  The content of a physical
242    register is described by a chain of these structures.
243    The chains are pretty short (usually 1 or 2 elements) and thus
244    chain is the best data structure.  */
245 typedef struct attrs_def
246 {
247   /* Pointer to next member of the list.  */
248   struct attrs_def *next;
249 
250   /* The rtx of register.  */
251   rtx loc;
252 
253   /* The declaration corresponding to LOC.  */
254   decl_or_value dv;
255 
256   /* Offset from start of DECL.  */
257   HOST_WIDE_INT offset;
258 } *attrs;
259 
260 /* Structure for chaining the locations.  */
261 typedef struct location_chain_def
262 {
263   /* Next element in the chain.  */
264   struct location_chain_def *next;
265 
266   /* The location (REG, MEM or VALUE).  */
267   rtx loc;
268 
269   /* The "value" stored in this location.  */
270   rtx set_src;
271 
272   /* Initialized? */
273   enum var_init_status init;
274 } *location_chain;
275 
276 /* A vector of loc_exp_dep holds the active dependencies of a one-part
277    DV on VALUEs, i.e., the VALUEs expanded so as to form the current
278    location of DV.  Each entry is also part of VALUE' s linked-list of
279    backlinks back to DV.  */
280 typedef struct loc_exp_dep_s
281 {
282   /* The dependent DV.  */
283   decl_or_value dv;
284   /* The dependency VALUE or DECL_DEBUG.  */
285   rtx value;
286   /* The next entry in VALUE's backlinks list.  */
287   struct loc_exp_dep_s *next;
288   /* A pointer to the pointer to this entry (head or prev's next) in
289      the doubly-linked list.  */
290   struct loc_exp_dep_s **pprev;
291 } loc_exp_dep;
292 
293 
294 /* This data structure holds information about the depth of a variable
295    expansion.  */
296 typedef struct expand_depth_struct
297 {
298   /* This measures the complexity of the expanded expression.  It
299      grows by one for each level of expansion that adds more than one
300      operand.  */
301   int complexity;
302   /* This counts the number of ENTRY_VALUE expressions in an
303      expansion.  We want to minimize their use.  */
304   int entryvals;
305 } expand_depth;
306 
307 /* This data structure is allocated for one-part variables at the time
308    of emitting notes.  */
309 struct onepart_aux
310 {
311   /* Doubly-linked list of dependent DVs.  These are DVs whose cur_loc
312      computation used the expansion of this variable, and that ought
313      to be notified should this variable change.  If the DV's cur_loc
314      expanded to NULL, all components of the loc list are regarded as
315      active, so that any changes in them give us a chance to get a
316      location.  Otherwise, only components of the loc that expanded to
317      non-NULL are regarded as active dependencies.  */
318   loc_exp_dep *backlinks;
319   /* This holds the LOC that was expanded into cur_loc.  We need only
320      mark a one-part variable as changed if the FROM loc is removed,
321      or if it has no known location and a loc is added, or if it gets
322      a change notification from any of its active dependencies.  */
323   rtx from;
324   /* The depth of the cur_loc expression.  */
325   expand_depth depth;
326   /* Dependencies actively used when expand FROM into cur_loc.  */
327   vec<loc_exp_dep, va_heap, vl_embed> deps;
328 };
329 
330 /* Structure describing one part of variable.  */
331 typedef struct variable_part_def
332 {
333   /* Chain of locations of the part.  */
334   location_chain loc_chain;
335 
336   /* Location which was last emitted to location list.  */
337   rtx cur_loc;
338 
339   union variable_aux
340   {
341     /* The offset in the variable, if !var->onepart.  */
342     HOST_WIDE_INT offset;
343 
344     /* Pointer to auxiliary data, if var->onepart and emit_notes.  */
345     struct onepart_aux *onepaux;
346   } aux;
347 } variable_part;
348 
349 /* Maximum number of location parts.  */
350 #define MAX_VAR_PARTS 16
351 
352 /* Enumeration type used to discriminate various types of one-part
353    variables.  */
354 typedef enum onepart_enum
355 {
356   /* Not a one-part variable.  */
357   NOT_ONEPART = 0,
358   /* A one-part DECL that is not a DEBUG_EXPR_DECL.  */
359   ONEPART_VDECL = 1,
360   /* A DEBUG_EXPR_DECL.  */
361   ONEPART_DEXPR = 2,
362   /* A VALUE.  */
363   ONEPART_VALUE = 3
364 } onepart_enum_t;
365 
366 /* Structure describing where the variable is located.  */
367 typedef struct variable_def
368 {
369   /* The declaration of the variable, or an RTL value being handled
370      like a declaration.  */
371   decl_or_value dv;
372 
373   /* Reference count.  */
374   int refcount;
375 
376   /* Number of variable parts.  */
377   char n_var_parts;
378 
379   /* What type of DV this is, according to enum onepart_enum.  */
380   ENUM_BITFIELD (onepart_enum) onepart : CHAR_BIT;
381 
382   /* True if this variable_def struct is currently in the
383      changed_variables hash table.  */
384   bool in_changed_variables;
385 
386   /* The variable parts.  */
387   variable_part var_part[1];
388 } *variable;
389 typedef const struct variable_def *const_variable;
390 
391 /* Pointer to the BB's information specific to variable tracking pass.  */
392 #define VTI(BB) ((variable_tracking_info) (BB)->aux)
393 
394 /* Macro to access MEM_OFFSET as an HOST_WIDE_INT.  Evaluates MEM twice.  */
395 #define INT_MEM_OFFSET(mem) (MEM_OFFSET_KNOWN_P (mem) ? MEM_OFFSET (mem) : 0)
396 
397 #if ENABLE_CHECKING && (GCC_VERSION >= 2007)
398 
399 /* Access VAR's Ith part's offset, checking that it's not a one-part
400    variable.  */
401 #define VAR_PART_OFFSET(var, i) __extension__			\
402 (*({  variable const __v = (var);				\
403       gcc_checking_assert (!__v->onepart);			\
404       &__v->var_part[(i)].aux.offset; }))
405 
406 /* Access VAR's one-part auxiliary data, checking that it is a
407    one-part variable.  */
408 #define VAR_LOC_1PAUX(var) __extension__			\
409 (*({  variable const __v = (var);				\
410       gcc_checking_assert (__v->onepart);			\
411       &__v->var_part[0].aux.onepaux; }))
412 
413 #else
414 #define VAR_PART_OFFSET(var, i) ((var)->var_part[(i)].aux.offset)
415 #define VAR_LOC_1PAUX(var) ((var)->var_part[0].aux.onepaux)
416 #endif
417 
418 /* These are accessor macros for the one-part auxiliary data.  When
419    convenient for users, they're guarded by tests that the data was
420    allocated.  */
421 #define VAR_LOC_DEP_LST(var) (VAR_LOC_1PAUX (var)		  \
422 			      ? VAR_LOC_1PAUX (var)->backlinks	  \
423 			      : NULL)
424 #define VAR_LOC_DEP_LSTP(var) (VAR_LOC_1PAUX (var)		  \
425 			       ? &VAR_LOC_1PAUX (var)->backlinks  \
426 			       : NULL)
427 #define VAR_LOC_FROM(var) (VAR_LOC_1PAUX (var)->from)
428 #define VAR_LOC_DEPTH(var) (VAR_LOC_1PAUX (var)->depth)
429 #define VAR_LOC_DEP_VEC(var) (VAR_LOC_1PAUX (var)		  \
430 			      ? &VAR_LOC_1PAUX (var)->deps	  \
431 			      : NULL)
432 
433 
434 
435 typedef unsigned int dvuid;
436 
437 /* Return the uid of DV.  */
438 
439 static inline dvuid
dv_uid(decl_or_value dv)440 dv_uid (decl_or_value dv)
441 {
442   if (dv_is_value_p (dv))
443     return CSELIB_VAL_PTR (dv_as_value (dv))->uid;
444   else
445     return DECL_UID (dv_as_decl (dv));
446 }
447 
448 /* Compute the hash from the uid.  */
449 
450 static inline hashval_t
dv_uid2hash(dvuid uid)451 dv_uid2hash (dvuid uid)
452 {
453   return uid;
454 }
455 
456 /* The hash function for a mask table in a shared_htab chain.  */
457 
458 static inline hashval_t
dv_htab_hash(decl_or_value dv)459 dv_htab_hash (decl_or_value dv)
460 {
461   return dv_uid2hash (dv_uid (dv));
462 }
463 
464 static void variable_htab_free (void *);
465 
466 /* Variable hashtable helpers.  */
467 
468 struct variable_hasher
469 {
470   typedef variable_def value_type;
471   typedef void compare_type;
472   static inline hashval_t hash (const value_type *);
473   static inline bool equal (const value_type *, const compare_type *);
474   static inline void remove (value_type *);
475 };
476 
477 /* The hash function for variable_htab, computes the hash value
478    from the declaration of variable X.  */
479 
480 inline hashval_t
hash(const value_type * v)481 variable_hasher::hash (const value_type *v)
482 {
483   return dv_htab_hash (v->dv);
484 }
485 
486 /* Compare the declaration of variable X with declaration Y.  */
487 
488 inline bool
equal(const value_type * v,const compare_type * y)489 variable_hasher::equal (const value_type *v, const compare_type *y)
490 {
491   decl_or_value dv = CONST_CAST2 (decl_or_value, const void *, y);
492 
493   return (dv_as_opaque (v->dv) == dv_as_opaque (dv));
494 }
495 
496 /* Free the element of VARIABLE_HTAB (its type is struct variable_def).  */
497 
498 inline void
remove(value_type * var)499 variable_hasher::remove (value_type *var)
500 {
501   variable_htab_free (var);
502 }
503 
504 typedef hash_table <variable_hasher> variable_table_type;
505 typedef variable_table_type::iterator variable_iterator_type;
506 
507 /* Structure for passing some other parameters to function
508    emit_note_insn_var_location.  */
509 typedef struct emit_note_data_def
510 {
511   /* The instruction which the note will be emitted before/after.  */
512   rtx insn;
513 
514   /* Where the note will be emitted (before/after insn)?  */
515   enum emit_note_where where;
516 
517   /* The variables and values active at this point.  */
518   variable_table_type vars;
519 } emit_note_data;
520 
521 /* Structure holding a refcounted hash table.  If refcount > 1,
522    it must be first unshared before modified.  */
523 typedef struct shared_hash_def
524 {
525   /* Reference count.  */
526   int refcount;
527 
528   /* Actual hash table.  */
529   variable_table_type htab;
530 } *shared_hash;
531 
532 /* Structure holding the IN or OUT set for a basic block.  */
533 typedef struct dataflow_set_def
534 {
535   /* Adjustment of stack offset.  */
536   HOST_WIDE_INT stack_adjust;
537 
538   /* Attributes for registers (lists of attrs).  */
539   attrs regs[FIRST_PSEUDO_REGISTER];
540 
541   /* Variable locations.  */
542   shared_hash vars;
543 
544   /* Vars that is being traversed.  */
545   shared_hash traversed_vars;
546 } dataflow_set;
547 
548 /* The structure (one for each basic block) containing the information
549    needed for variable tracking.  */
550 typedef struct variable_tracking_info_def
551 {
552   /* The vector of micro operations.  */
553   vec<micro_operation> mos;
554 
555   /* The IN and OUT set for dataflow analysis.  */
556   dataflow_set in;
557   dataflow_set out;
558 
559   /* The permanent-in dataflow set for this block.  This is used to
560      hold values for which we had to compute entry values.  ??? This
561      should probably be dynamically allocated, to avoid using more
562      memory in non-debug builds.  */
563   dataflow_set *permp;
564 
565   /* Has the block been visited in DFS?  */
566   bool visited;
567 
568   /* Has the block been flooded in VTA?  */
569   bool flooded;
570 
571 } *variable_tracking_info;
572 
573 /* Alloc pool for struct attrs_def.  */
574 static alloc_pool attrs_pool;
575 
576 /* Alloc pool for struct variable_def with MAX_VAR_PARTS entries.  */
577 static alloc_pool var_pool;
578 
579 /* Alloc pool for struct variable_def with a single var_part entry.  */
580 static alloc_pool valvar_pool;
581 
582 /* Alloc pool for struct location_chain_def.  */
583 static alloc_pool loc_chain_pool;
584 
585 /* Alloc pool for struct shared_hash_def.  */
586 static alloc_pool shared_hash_pool;
587 
588 /* Alloc pool for struct loc_exp_dep_s for NOT_ONEPART variables.  */
589 static alloc_pool loc_exp_dep_pool;
590 
591 /* Changed variables, notes will be emitted for them.  */
592 static variable_table_type changed_variables;
593 
594 /* Shall notes be emitted?  */
595 static bool emit_notes;
596 
597 /* Values whose dynamic location lists have gone empty, but whose
598    cselib location lists are still usable.  Use this to hold the
599    current location, the backlinks, etc, during emit_notes.  */
600 static variable_table_type dropped_values;
601 
602 /* Empty shared hashtable.  */
603 static shared_hash empty_shared_hash;
604 
605 /* Scratch register bitmap used by cselib_expand_value_rtx.  */
606 static bitmap scratch_regs = NULL;
607 
608 #ifdef HAVE_window_save
609 typedef struct GTY(()) parm_reg {
610   rtx outgoing;
611   rtx incoming;
612 } parm_reg_t;
613 
614 
615 /* Vector of windowed parameter registers, if any.  */
616 static vec<parm_reg_t, va_gc> *windowed_parm_regs = NULL;
617 #endif
618 
619 /* Variable used to tell whether cselib_process_insn called our hook.  */
620 static bool cselib_hook_called;
621 
622 /* Local function prototypes.  */
623 static void stack_adjust_offset_pre_post (rtx, HOST_WIDE_INT *,
624 					  HOST_WIDE_INT *);
625 static void insn_stack_adjust_offset_pre_post (rtx, HOST_WIDE_INT *,
626 					       HOST_WIDE_INT *);
627 static bool vt_stack_adjustments (void);
628 
629 static void init_attrs_list_set (attrs *);
630 static void attrs_list_clear (attrs *);
631 static attrs attrs_list_member (attrs, decl_or_value, HOST_WIDE_INT);
632 static void attrs_list_insert (attrs *, decl_or_value, HOST_WIDE_INT, rtx);
633 static void attrs_list_copy (attrs *, attrs);
634 static void attrs_list_union (attrs *, attrs);
635 
636 static variable_def **unshare_variable (dataflow_set *set, variable_def **slot,
637 					variable var, enum var_init_status);
638 static void vars_copy (variable_table_type, variable_table_type);
639 static tree var_debug_decl (tree);
640 static void var_reg_set (dataflow_set *, rtx, enum var_init_status, rtx);
641 static void var_reg_delete_and_set (dataflow_set *, rtx, bool,
642 				    enum var_init_status, rtx);
643 static void var_reg_delete (dataflow_set *, rtx, bool);
644 static void var_regno_delete (dataflow_set *, int);
645 static void var_mem_set (dataflow_set *, rtx, enum var_init_status, rtx);
646 static void var_mem_delete_and_set (dataflow_set *, rtx, bool,
647 				    enum var_init_status, rtx);
648 static void var_mem_delete (dataflow_set *, rtx, bool);
649 
650 static void dataflow_set_init (dataflow_set *);
651 static void dataflow_set_clear (dataflow_set *);
652 static void dataflow_set_copy (dataflow_set *, dataflow_set *);
653 static int variable_union_info_cmp_pos (const void *, const void *);
654 static void dataflow_set_union (dataflow_set *, dataflow_set *);
655 static location_chain find_loc_in_1pdv (rtx, variable, variable_table_type);
656 static bool canon_value_cmp (rtx, rtx);
657 static int loc_cmp (rtx, rtx);
658 static bool variable_part_different_p (variable_part *, variable_part *);
659 static bool onepart_variable_different_p (variable, variable);
660 static bool variable_different_p (variable, variable);
661 static bool dataflow_set_different (dataflow_set *, dataflow_set *);
662 static void dataflow_set_destroy (dataflow_set *);
663 
664 static bool contains_symbol_ref (rtx);
665 static bool track_expr_p (tree, bool);
666 static bool same_variable_part_p (rtx, tree, HOST_WIDE_INT);
667 static int add_uses (rtx *, void *);
668 static void add_uses_1 (rtx *, void *);
669 static void add_stores (rtx, const_rtx, void *);
670 static bool compute_bb_dataflow (basic_block);
671 static bool vt_find_locations (void);
672 
673 static void dump_attrs_list (attrs);
674 static void dump_var (variable);
675 static void dump_vars (variable_table_type);
676 static void dump_dataflow_set (dataflow_set *);
677 static void dump_dataflow_sets (void);
678 
679 static void set_dv_changed (decl_or_value, bool);
680 static void variable_was_changed (variable, dataflow_set *);
681 static variable_def **set_slot_part (dataflow_set *, rtx, variable_def **,
682 				     decl_or_value, HOST_WIDE_INT,
683 				     enum var_init_status, rtx);
684 static void set_variable_part (dataflow_set *, rtx,
685 			       decl_or_value, HOST_WIDE_INT,
686 			       enum var_init_status, rtx, enum insert_option);
687 static variable_def **clobber_slot_part (dataflow_set *, rtx,
688 					 variable_def **, HOST_WIDE_INT, rtx);
689 static void clobber_variable_part (dataflow_set *, rtx,
690 				   decl_or_value, HOST_WIDE_INT, rtx);
691 static variable_def **delete_slot_part (dataflow_set *, rtx, variable_def **,
692 					HOST_WIDE_INT);
693 static void delete_variable_part (dataflow_set *, rtx,
694 				  decl_or_value, HOST_WIDE_INT);
695 static void emit_notes_in_bb (basic_block, dataflow_set *);
696 static void vt_emit_notes (void);
697 
698 static bool vt_get_decl_and_offset (rtx, tree *, HOST_WIDE_INT *);
699 static void vt_add_function_parameters (void);
700 static bool vt_initialize (void);
701 static void vt_finalize (void);
702 
703 /* Given a SET, calculate the amount of stack adjustment it contains
704    PRE- and POST-modifying stack pointer.
705    This function is similar to stack_adjust_offset.  */
706 
707 static void
stack_adjust_offset_pre_post(rtx pattern,HOST_WIDE_INT * pre,HOST_WIDE_INT * post)708 stack_adjust_offset_pre_post (rtx pattern, HOST_WIDE_INT *pre,
709 			      HOST_WIDE_INT *post)
710 {
711   rtx src = SET_SRC (pattern);
712   rtx dest = SET_DEST (pattern);
713   enum rtx_code code;
714 
715   if (dest == stack_pointer_rtx)
716     {
717       /* (set (reg sp) (plus (reg sp) (const_int))) */
718       code = GET_CODE (src);
719       if (! (code == PLUS || code == MINUS)
720 	  || XEXP (src, 0) != stack_pointer_rtx
721 	  || !CONST_INT_P (XEXP (src, 1)))
722 	return;
723 
724       if (code == MINUS)
725 	*post += INTVAL (XEXP (src, 1));
726       else
727 	*post -= INTVAL (XEXP (src, 1));
728     }
729   else if (MEM_P (dest))
730     {
731       /* (set (mem (pre_dec (reg sp))) (foo)) */
732       src = XEXP (dest, 0);
733       code = GET_CODE (src);
734 
735       switch (code)
736 	{
737 	case PRE_MODIFY:
738 	case POST_MODIFY:
739 	  if (XEXP (src, 0) == stack_pointer_rtx)
740 	    {
741 	      rtx val = XEXP (XEXP (src, 1), 1);
742 	      /* We handle only adjustments by constant amount.  */
743 	      gcc_assert (GET_CODE (XEXP (src, 1)) == PLUS &&
744 			  CONST_INT_P (val));
745 
746 	      if (code == PRE_MODIFY)
747 		*pre -= INTVAL (val);
748 	      else
749 		*post -= INTVAL (val);
750 	      break;
751 	    }
752 	  return;
753 
754 	case PRE_DEC:
755 	  if (XEXP (src, 0) == stack_pointer_rtx)
756 	    {
757 	      *pre += GET_MODE_SIZE (GET_MODE (dest));
758 	      break;
759 	    }
760 	  return;
761 
762 	case POST_DEC:
763 	  if (XEXP (src, 0) == stack_pointer_rtx)
764 	    {
765 	      *post += GET_MODE_SIZE (GET_MODE (dest));
766 	      break;
767 	    }
768 	  return;
769 
770 	case PRE_INC:
771 	  if (XEXP (src, 0) == stack_pointer_rtx)
772 	    {
773 	      *pre -= GET_MODE_SIZE (GET_MODE (dest));
774 	      break;
775 	    }
776 	  return;
777 
778 	case POST_INC:
779 	  if (XEXP (src, 0) == stack_pointer_rtx)
780 	    {
781 	      *post -= GET_MODE_SIZE (GET_MODE (dest));
782 	      break;
783 	    }
784 	  return;
785 
786 	default:
787 	  return;
788 	}
789     }
790 }
791 
792 /* Given an INSN, calculate the amount of stack adjustment it contains
793    PRE- and POST-modifying stack pointer.  */
794 
795 static void
insn_stack_adjust_offset_pre_post(rtx insn,HOST_WIDE_INT * pre,HOST_WIDE_INT * post)796 insn_stack_adjust_offset_pre_post (rtx insn, HOST_WIDE_INT *pre,
797 				   HOST_WIDE_INT *post)
798 {
799   rtx pattern;
800 
801   *pre = 0;
802   *post = 0;
803 
804   pattern = PATTERN (insn);
805   if (RTX_FRAME_RELATED_P (insn))
806     {
807       rtx expr = find_reg_note (insn, REG_FRAME_RELATED_EXPR, NULL_RTX);
808       if (expr)
809 	pattern = XEXP (expr, 0);
810     }
811 
812   if (GET_CODE (pattern) == SET)
813     stack_adjust_offset_pre_post (pattern, pre, post);
814   else if (GET_CODE (pattern) == PARALLEL
815 	   || GET_CODE (pattern) == SEQUENCE)
816     {
817       int i;
818 
819       /* There may be stack adjustments inside compound insns.  Search
820 	 for them.  */
821       for ( i = XVECLEN (pattern, 0) - 1; i >= 0; i--)
822 	if (GET_CODE (XVECEXP (pattern, 0, i)) == SET)
823 	  stack_adjust_offset_pre_post (XVECEXP (pattern, 0, i), pre, post);
824     }
825 }
826 
827 /* Compute stack adjustments for all blocks by traversing DFS tree.
828    Return true when the adjustments on all incoming edges are consistent.
829    Heavily borrowed from pre_and_rev_post_order_compute.  */
830 
831 static bool
vt_stack_adjustments(void)832 vt_stack_adjustments (void)
833 {
834   edge_iterator *stack;
835   int sp;
836 
837   /* Initialize entry block.  */
838   VTI (ENTRY_BLOCK_PTR_FOR_FN (cfun))->visited = true;
839   VTI (ENTRY_BLOCK_PTR_FOR_FN (cfun))->in.stack_adjust =
840  INCOMING_FRAME_SP_OFFSET;
841   VTI (ENTRY_BLOCK_PTR_FOR_FN (cfun))->out.stack_adjust =
842  INCOMING_FRAME_SP_OFFSET;
843 
844   /* Allocate stack for back-tracking up CFG.  */
845   stack = XNEWVEC (edge_iterator, n_basic_blocks_for_fn (cfun) + 1);
846   sp = 0;
847 
848   /* Push the first edge on to the stack.  */
849   stack[sp++] = ei_start (ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs);
850 
851   while (sp)
852     {
853       edge_iterator ei;
854       basic_block src;
855       basic_block dest;
856 
857       /* Look at the edge on the top of the stack.  */
858       ei = stack[sp - 1];
859       src = ei_edge (ei)->src;
860       dest = ei_edge (ei)->dest;
861 
862       /* Check if the edge destination has been visited yet.  */
863       if (!VTI (dest)->visited)
864 	{
865 	  rtx insn;
866 	  HOST_WIDE_INT pre, post, offset;
867 	  VTI (dest)->visited = true;
868 	  VTI (dest)->in.stack_adjust = offset = VTI (src)->out.stack_adjust;
869 
870 	  if (dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
871 	    for (insn = BB_HEAD (dest);
872 		 insn != NEXT_INSN (BB_END (dest));
873 		 insn = NEXT_INSN (insn))
874 	      if (INSN_P (insn))
875 		{
876 		  insn_stack_adjust_offset_pre_post (insn, &pre, &post);
877 		  offset += pre + post;
878 		}
879 
880 	  VTI (dest)->out.stack_adjust = offset;
881 
882 	  if (EDGE_COUNT (dest->succs) > 0)
883 	    /* Since the DEST node has been visited for the first
884 	       time, check its successors.  */
885 	    stack[sp++] = ei_start (dest->succs);
886 	}
887       else
888 	{
889 	  /* We can end up with different stack adjustments for the exit block
890 	     of a shrink-wrapped function if stack_adjust_offset_pre_post
891 	     doesn't understand the rtx pattern used to restore the stack
892 	     pointer in the epilogue.  For example, on s390(x), the stack
893 	     pointer is often restored via a load-multiple instruction
894 	     and so no stack_adjust offset is recorded for it.  This means
895 	     that the stack offset at the end of the epilogue block is the
896 	     the same as the offset before the epilogue, whereas other paths
897 	     to the exit block will have the correct stack_adjust.
898 
899 	     It is safe to ignore these differences because (a) we never
900 	     use the stack_adjust for the exit block in this pass and
901 	     (b) dwarf2cfi checks whether the CFA notes in a shrink-wrapped
902 	     function are correct.
903 
904 	     We must check whether the adjustments on other edges are
905 	     the same though.  */
906 	  if (dest != EXIT_BLOCK_PTR_FOR_FN (cfun)
907 	      && VTI (dest)->in.stack_adjust != VTI (src)->out.stack_adjust)
908 	    {
909 	      free (stack);
910 	      return false;
911 	    }
912 
913 	  if (! ei_one_before_end_p (ei))
914 	    /* Go to the next edge.  */
915 	    ei_next (&stack[sp - 1]);
916 	  else
917 	    /* Return to previous level if there are no more edges.  */
918 	    sp--;
919 	}
920     }
921 
922   free (stack);
923   return true;
924 }
925 
926 /* arg_pointer_rtx resp. frame_pointer_rtx if stack_pointer_rtx or
927    hard_frame_pointer_rtx is being mapped to it and offset for it.  */
928 static rtx cfa_base_rtx;
929 static HOST_WIDE_INT cfa_base_offset;
930 
931 /* Compute a CFA-based value for an ADJUSTMENT made to stack_pointer_rtx
932    or hard_frame_pointer_rtx.  */
933 
934 static inline rtx
compute_cfa_pointer(HOST_WIDE_INT adjustment)935 compute_cfa_pointer (HOST_WIDE_INT adjustment)
936 {
937   return plus_constant (Pmode, cfa_base_rtx, adjustment + cfa_base_offset);
938 }
939 
940 /* Adjustment for hard_frame_pointer_rtx to cfa base reg,
941    or -1 if the replacement shouldn't be done.  */
942 static HOST_WIDE_INT hard_frame_pointer_adjustment = -1;
943 
944 /* Data for adjust_mems callback.  */
945 
946 struct adjust_mem_data
947 {
948   bool store;
949   enum machine_mode mem_mode;
950   HOST_WIDE_INT stack_adjust;
951   rtx side_effects;
952 };
953 
954 /* Helper for adjust_mems.  Return 1 if *loc is unsuitable for
955    transformation of wider mode arithmetics to narrower mode,
956    -1 if it is suitable and subexpressions shouldn't be
957    traversed and 0 if it is suitable and subexpressions should
958    be traversed.  Called through for_each_rtx.  */
959 
960 static int
use_narrower_mode_test(rtx * loc,void * data)961 use_narrower_mode_test (rtx *loc, void *data)
962 {
963   rtx subreg = (rtx) data;
964 
965   if (CONSTANT_P (*loc))
966     return -1;
967   switch (GET_CODE (*loc))
968     {
969     case REG:
970       if (cselib_lookup (*loc, GET_MODE (SUBREG_REG (subreg)), 0, VOIDmode))
971 	return 1;
972       if (!validate_subreg (GET_MODE (subreg), GET_MODE (*loc),
973 			    *loc, subreg_lowpart_offset (GET_MODE (subreg),
974 							 GET_MODE (*loc))))
975 	return 1;
976       return -1;
977     case PLUS:
978     case MINUS:
979     case MULT:
980       return 0;
981     case ASHIFT:
982       if (for_each_rtx (&XEXP (*loc, 0), use_narrower_mode_test, data))
983 	return 1;
984       else
985 	return -1;
986     default:
987       return 1;
988     }
989 }
990 
991 /* Transform X into narrower mode MODE from wider mode WMODE.  */
992 
993 static rtx
use_narrower_mode(rtx x,enum machine_mode mode,enum machine_mode wmode)994 use_narrower_mode (rtx x, enum machine_mode mode, enum machine_mode wmode)
995 {
996   rtx op0, op1;
997   if (CONSTANT_P (x))
998     return lowpart_subreg (mode, x, wmode);
999   switch (GET_CODE (x))
1000     {
1001     case REG:
1002       return lowpart_subreg (mode, x, wmode);
1003     case PLUS:
1004     case MINUS:
1005     case MULT:
1006       op0 = use_narrower_mode (XEXP (x, 0), mode, wmode);
1007       op1 = use_narrower_mode (XEXP (x, 1), mode, wmode);
1008       return simplify_gen_binary (GET_CODE (x), mode, op0, op1);
1009     case ASHIFT:
1010       op0 = use_narrower_mode (XEXP (x, 0), mode, wmode);
1011       return simplify_gen_binary (ASHIFT, mode, op0, XEXP (x, 1));
1012     default:
1013       gcc_unreachable ();
1014     }
1015 }
1016 
1017 /* Helper function for adjusting used MEMs.  */
1018 
1019 static rtx
adjust_mems(rtx loc,const_rtx old_rtx,void * data)1020 adjust_mems (rtx loc, const_rtx old_rtx, void *data)
1021 {
1022   struct adjust_mem_data *amd = (struct adjust_mem_data *) data;
1023   rtx mem, addr = loc, tem;
1024   enum machine_mode mem_mode_save;
1025   bool store_save;
1026   switch (GET_CODE (loc))
1027     {
1028     case REG:
1029       /* Don't do any sp or fp replacements outside of MEM addresses
1030          on the LHS.  */
1031       if (amd->mem_mode == VOIDmode && amd->store)
1032 	return loc;
1033       if (loc == stack_pointer_rtx
1034 	  && !frame_pointer_needed
1035 	  && cfa_base_rtx)
1036 	return compute_cfa_pointer (amd->stack_adjust);
1037       else if (loc == hard_frame_pointer_rtx
1038 	       && frame_pointer_needed
1039 	       && hard_frame_pointer_adjustment != -1
1040 	       && cfa_base_rtx)
1041 	return compute_cfa_pointer (hard_frame_pointer_adjustment);
1042       gcc_checking_assert (loc != virtual_incoming_args_rtx);
1043       return loc;
1044     case MEM:
1045       mem = loc;
1046       if (!amd->store)
1047 	{
1048 	  mem = targetm.delegitimize_address (mem);
1049 	  if (mem != loc && !MEM_P (mem))
1050 	    return simplify_replace_fn_rtx (mem, old_rtx, adjust_mems, data);
1051 	}
1052 
1053       addr = XEXP (mem, 0);
1054       mem_mode_save = amd->mem_mode;
1055       amd->mem_mode = GET_MODE (mem);
1056       store_save = amd->store;
1057       amd->store = false;
1058       addr = simplify_replace_fn_rtx (addr, old_rtx, adjust_mems, data);
1059       amd->store = store_save;
1060       amd->mem_mode = mem_mode_save;
1061       if (mem == loc)
1062 	addr = targetm.delegitimize_address (addr);
1063       if (addr != XEXP (mem, 0))
1064 	mem = replace_equiv_address_nv (mem, addr);
1065       if (!amd->store)
1066 	mem = avoid_constant_pool_reference (mem);
1067       return mem;
1068     case PRE_INC:
1069     case PRE_DEC:
1070       addr = gen_rtx_PLUS (GET_MODE (loc), XEXP (loc, 0),
1071 			   gen_int_mode (GET_CODE (loc) == PRE_INC
1072 					 ? GET_MODE_SIZE (amd->mem_mode)
1073 					 : -GET_MODE_SIZE (amd->mem_mode),
1074 					 GET_MODE (loc)));
1075     case POST_INC:
1076     case POST_DEC:
1077       if (addr == loc)
1078 	addr = XEXP (loc, 0);
1079       gcc_assert (amd->mem_mode != VOIDmode && amd->mem_mode != BLKmode);
1080       addr = simplify_replace_fn_rtx (addr, old_rtx, adjust_mems, data);
1081       tem = gen_rtx_PLUS (GET_MODE (loc), XEXP (loc, 0),
1082 			  gen_int_mode ((GET_CODE (loc) == PRE_INC
1083 					 || GET_CODE (loc) == POST_INC)
1084 					? GET_MODE_SIZE (amd->mem_mode)
1085 					: -GET_MODE_SIZE (amd->mem_mode),
1086 					GET_MODE (loc)));
1087       store_save = amd->store;
1088       amd->store = false;
1089       tem = simplify_replace_fn_rtx (tem, old_rtx, adjust_mems, data);
1090       amd->store = store_save;
1091       amd->side_effects = alloc_EXPR_LIST (0,
1092 					   gen_rtx_SET (VOIDmode,
1093 							XEXP (loc, 0), tem),
1094 					   amd->side_effects);
1095       return addr;
1096     case PRE_MODIFY:
1097       addr = XEXP (loc, 1);
1098     case POST_MODIFY:
1099       if (addr == loc)
1100 	addr = XEXP (loc, 0);
1101       gcc_assert (amd->mem_mode != VOIDmode);
1102       addr = simplify_replace_fn_rtx (addr, old_rtx, adjust_mems, data);
1103       store_save = amd->store;
1104       amd->store = false;
1105       tem = simplify_replace_fn_rtx (XEXP (loc, 1), old_rtx,
1106 				     adjust_mems, data);
1107       amd->store = store_save;
1108       amd->side_effects = alloc_EXPR_LIST (0,
1109 					   gen_rtx_SET (VOIDmode,
1110 							XEXP (loc, 0), tem),
1111 					   amd->side_effects);
1112       return addr;
1113     case SUBREG:
1114       /* First try without delegitimization of whole MEMs and
1115 	 avoid_constant_pool_reference, which is more likely to succeed.  */
1116       store_save = amd->store;
1117       amd->store = true;
1118       addr = simplify_replace_fn_rtx (SUBREG_REG (loc), old_rtx, adjust_mems,
1119 				      data);
1120       amd->store = store_save;
1121       mem = simplify_replace_fn_rtx (addr, old_rtx, adjust_mems, data);
1122       if (mem == SUBREG_REG (loc))
1123 	{
1124 	  tem = loc;
1125 	  goto finish_subreg;
1126 	}
1127       tem = simplify_gen_subreg (GET_MODE (loc), mem,
1128 				 GET_MODE (SUBREG_REG (loc)),
1129 				 SUBREG_BYTE (loc));
1130       if (tem)
1131 	goto finish_subreg;
1132       tem = simplify_gen_subreg (GET_MODE (loc), addr,
1133 				 GET_MODE (SUBREG_REG (loc)),
1134 				 SUBREG_BYTE (loc));
1135       if (tem == NULL_RTX)
1136 	tem = gen_rtx_raw_SUBREG (GET_MODE (loc), addr, SUBREG_BYTE (loc));
1137     finish_subreg:
1138       if (MAY_HAVE_DEBUG_INSNS
1139 	  && GET_CODE (tem) == SUBREG
1140 	  && (GET_CODE (SUBREG_REG (tem)) == PLUS
1141 	      || GET_CODE (SUBREG_REG (tem)) == MINUS
1142 	      || GET_CODE (SUBREG_REG (tem)) == MULT
1143 	      || GET_CODE (SUBREG_REG (tem)) == ASHIFT)
1144 	  && GET_MODE_CLASS (GET_MODE (tem)) == MODE_INT
1145 	  && GET_MODE_CLASS (GET_MODE (SUBREG_REG (tem))) == MODE_INT
1146 	  && GET_MODE_SIZE (GET_MODE (tem))
1147 	     < GET_MODE_SIZE (GET_MODE (SUBREG_REG (tem)))
1148 	  && subreg_lowpart_p (tem)
1149 	  && !for_each_rtx (&SUBREG_REG (tem), use_narrower_mode_test, tem))
1150 	return use_narrower_mode (SUBREG_REG (tem), GET_MODE (tem),
1151 				  GET_MODE (SUBREG_REG (tem)));
1152       return tem;
1153     case ASM_OPERANDS:
1154       /* Don't do any replacements in second and following
1155 	 ASM_OPERANDS of inline-asm with multiple sets.
1156 	 ASM_OPERANDS_INPUT_VEC, ASM_OPERANDS_INPUT_CONSTRAINT_VEC
1157 	 and ASM_OPERANDS_LABEL_VEC need to be equal between
1158 	 all the ASM_OPERANDs in the insn and adjust_insn will
1159 	 fix this up.  */
1160       if (ASM_OPERANDS_OUTPUT_IDX (loc) != 0)
1161 	return loc;
1162       break;
1163     default:
1164       break;
1165     }
1166   return NULL_RTX;
1167 }
1168 
1169 /* Helper function for replacement of uses.  */
1170 
1171 static void
adjust_mem_uses(rtx * x,void * data)1172 adjust_mem_uses (rtx *x, void *data)
1173 {
1174   rtx new_x = simplify_replace_fn_rtx (*x, NULL_RTX, adjust_mems, data);
1175   if (new_x != *x)
1176     validate_change (NULL_RTX, x, new_x, true);
1177 }
1178 
1179 /* Helper function for replacement of stores.  */
1180 
1181 static void
adjust_mem_stores(rtx loc,const_rtx expr,void * data)1182 adjust_mem_stores (rtx loc, const_rtx expr, void *data)
1183 {
1184   if (MEM_P (loc))
1185     {
1186       rtx new_dest = simplify_replace_fn_rtx (SET_DEST (expr), NULL_RTX,
1187 					      adjust_mems, data);
1188       if (new_dest != SET_DEST (expr))
1189 	{
1190 	  rtx xexpr = CONST_CAST_RTX (expr);
1191 	  validate_change (NULL_RTX, &SET_DEST (xexpr), new_dest, true);
1192 	}
1193     }
1194 }
1195 
1196 /* Simplify INSN.  Remove all {PRE,POST}_{INC,DEC,MODIFY} rtxes,
1197    replace them with their value in the insn and add the side-effects
1198    as other sets to the insn.  */
1199 
1200 static void
adjust_insn(basic_block bb,rtx insn)1201 adjust_insn (basic_block bb, rtx insn)
1202 {
1203   struct adjust_mem_data amd;
1204   rtx set;
1205 
1206 #ifdef HAVE_window_save
1207   /* If the target machine has an explicit window save instruction, the
1208      transformation OUTGOING_REGNO -> INCOMING_REGNO is done there.  */
1209   if (RTX_FRAME_RELATED_P (insn)
1210       && find_reg_note (insn, REG_CFA_WINDOW_SAVE, NULL_RTX))
1211     {
1212       unsigned int i, nregs = vec_safe_length (windowed_parm_regs);
1213       rtx rtl = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (nregs * 2));
1214       parm_reg_t *p;
1215 
1216       FOR_EACH_VEC_SAFE_ELT (windowed_parm_regs, i, p)
1217 	{
1218 	  XVECEXP (rtl, 0, i * 2)
1219 	    = gen_rtx_SET (VOIDmode, p->incoming, p->outgoing);
1220 	  /* Do not clobber the attached DECL, but only the REG.  */
1221 	  XVECEXP (rtl, 0, i * 2 + 1)
1222 	    = gen_rtx_CLOBBER (GET_MODE (p->outgoing),
1223 			       gen_raw_REG (GET_MODE (p->outgoing),
1224 					    REGNO (p->outgoing)));
1225 	}
1226 
1227       validate_change (NULL_RTX, &PATTERN (insn), rtl, true);
1228       return;
1229     }
1230 #endif
1231 
1232   amd.mem_mode = VOIDmode;
1233   amd.stack_adjust = -VTI (bb)->out.stack_adjust;
1234   amd.side_effects = NULL_RTX;
1235 
1236   amd.store = true;
1237   note_stores (PATTERN (insn), adjust_mem_stores, &amd);
1238 
1239   amd.store = false;
1240   if (GET_CODE (PATTERN (insn)) == PARALLEL
1241       && asm_noperands (PATTERN (insn)) > 0
1242       && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
1243     {
1244       rtx body, set0;
1245       int i;
1246 
1247       /* inline-asm with multiple sets is tiny bit more complicated,
1248 	 because the 3 vectors in ASM_OPERANDS need to be shared between
1249 	 all ASM_OPERANDS in the instruction.  adjust_mems will
1250 	 not touch ASM_OPERANDS other than the first one, asm_noperands
1251 	 test above needs to be called before that (otherwise it would fail)
1252 	 and afterwards this code fixes it up.  */
1253       note_uses (&PATTERN (insn), adjust_mem_uses, &amd);
1254       body = PATTERN (insn);
1255       set0 = XVECEXP (body, 0, 0);
1256       gcc_checking_assert (GET_CODE (set0) == SET
1257 			   && GET_CODE (SET_SRC (set0)) == ASM_OPERANDS
1258 			   && ASM_OPERANDS_OUTPUT_IDX (SET_SRC (set0)) == 0);
1259       for (i = 1; i < XVECLEN (body, 0); i++)
1260 	if (GET_CODE (XVECEXP (body, 0, i)) != SET)
1261 	  break;
1262 	else
1263 	  {
1264 	    set = XVECEXP (body, 0, i);
1265 	    gcc_checking_assert (GET_CODE (SET_SRC (set)) == ASM_OPERANDS
1266 				 && ASM_OPERANDS_OUTPUT_IDX (SET_SRC (set))
1267 				    == i);
1268 	    if (ASM_OPERANDS_INPUT_VEC (SET_SRC (set))
1269 		!= ASM_OPERANDS_INPUT_VEC (SET_SRC (set0))
1270 		|| ASM_OPERANDS_INPUT_CONSTRAINT_VEC (SET_SRC (set))
1271 		   != ASM_OPERANDS_INPUT_CONSTRAINT_VEC (SET_SRC (set0))
1272 		|| ASM_OPERANDS_LABEL_VEC (SET_SRC (set))
1273 		   != ASM_OPERANDS_LABEL_VEC (SET_SRC (set0)))
1274 	      {
1275 		rtx newsrc = shallow_copy_rtx (SET_SRC (set));
1276 		ASM_OPERANDS_INPUT_VEC (newsrc)
1277 		  = ASM_OPERANDS_INPUT_VEC (SET_SRC (set0));
1278 		ASM_OPERANDS_INPUT_CONSTRAINT_VEC (newsrc)
1279 		  = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (SET_SRC (set0));
1280 		ASM_OPERANDS_LABEL_VEC (newsrc)
1281 		  = ASM_OPERANDS_LABEL_VEC (SET_SRC (set0));
1282 		validate_change (NULL_RTX, &SET_SRC (set), newsrc, true);
1283 	      }
1284 	  }
1285     }
1286   else
1287     note_uses (&PATTERN (insn), adjust_mem_uses, &amd);
1288 
1289   /* For read-only MEMs containing some constant, prefer those
1290      constants.  */
1291   set = single_set (insn);
1292   if (set && MEM_P (SET_SRC (set)) && MEM_READONLY_P (SET_SRC (set)))
1293     {
1294       rtx note = find_reg_equal_equiv_note (insn);
1295 
1296       if (note && CONSTANT_P (XEXP (note, 0)))
1297 	validate_change (NULL_RTX, &SET_SRC (set), XEXP (note, 0), true);
1298     }
1299 
1300   if (amd.side_effects)
1301     {
1302       rtx *pat, new_pat, s;
1303       int i, oldn, newn;
1304 
1305       pat = &PATTERN (insn);
1306       if (GET_CODE (*pat) == COND_EXEC)
1307 	pat = &COND_EXEC_CODE (*pat);
1308       if (GET_CODE (*pat) == PARALLEL)
1309 	oldn = XVECLEN (*pat, 0);
1310       else
1311 	oldn = 1;
1312       for (s = amd.side_effects, newn = 0; s; newn++)
1313 	s = XEXP (s, 1);
1314       new_pat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (oldn + newn));
1315       if (GET_CODE (*pat) == PARALLEL)
1316 	for (i = 0; i < oldn; i++)
1317 	  XVECEXP (new_pat, 0, i) = XVECEXP (*pat, 0, i);
1318       else
1319 	XVECEXP (new_pat, 0, 0) = *pat;
1320       for (s = amd.side_effects, i = oldn; i < oldn + newn; i++, s = XEXP (s, 1))
1321 	XVECEXP (new_pat, 0, i) = XEXP (s, 0);
1322       free_EXPR_LIST_list (&amd.side_effects);
1323       validate_change (NULL_RTX, pat, new_pat, true);
1324     }
1325 }
1326 
1327 /* Return the DEBUG_EXPR of a DEBUG_EXPR_DECL or the VALUE in DV.  */
1328 static inline rtx
dv_as_rtx(decl_or_value dv)1329 dv_as_rtx (decl_or_value dv)
1330 {
1331   tree decl;
1332 
1333   if (dv_is_value_p (dv))
1334     return dv_as_value (dv);
1335 
1336   decl = dv_as_decl (dv);
1337 
1338   gcc_checking_assert (TREE_CODE (decl) == DEBUG_EXPR_DECL);
1339   return DECL_RTL_KNOWN_SET (decl);
1340 }
1341 
1342 /* Return nonzero if a decl_or_value must not have more than one
1343    variable part.  The returned value discriminates among various
1344    kinds of one-part DVs ccording to enum onepart_enum.  */
1345 static inline onepart_enum_t
dv_onepart_p(decl_or_value dv)1346 dv_onepart_p (decl_or_value dv)
1347 {
1348   tree decl;
1349 
1350   if (!MAY_HAVE_DEBUG_INSNS)
1351     return NOT_ONEPART;
1352 
1353   if (dv_is_value_p (dv))
1354     return ONEPART_VALUE;
1355 
1356   decl = dv_as_decl (dv);
1357 
1358   if (TREE_CODE (decl) == DEBUG_EXPR_DECL)
1359     return ONEPART_DEXPR;
1360 
1361   if (target_for_debug_bind (decl) != NULL_TREE)
1362     return ONEPART_VDECL;
1363 
1364   return NOT_ONEPART;
1365 }
1366 
1367 /* Return the variable pool to be used for a dv of type ONEPART.  */
1368 static inline alloc_pool
onepart_pool(onepart_enum_t onepart)1369 onepart_pool (onepart_enum_t onepart)
1370 {
1371   return onepart ? valvar_pool : var_pool;
1372 }
1373 
1374 /* Build a decl_or_value out of a decl.  */
1375 static inline decl_or_value
dv_from_decl(tree decl)1376 dv_from_decl (tree decl)
1377 {
1378   decl_or_value dv;
1379   dv = decl;
1380   gcc_checking_assert (dv_is_decl_p (dv));
1381   return dv;
1382 }
1383 
1384 /* Build a decl_or_value out of a value.  */
1385 static inline decl_or_value
dv_from_value(rtx value)1386 dv_from_value (rtx value)
1387 {
1388   decl_or_value dv;
1389   dv = value;
1390   gcc_checking_assert (dv_is_value_p (dv));
1391   return dv;
1392 }
1393 
1394 /* Return a value or the decl of a debug_expr as a decl_or_value.  */
1395 static inline decl_or_value
dv_from_rtx(rtx x)1396 dv_from_rtx (rtx x)
1397 {
1398   decl_or_value dv;
1399 
1400   switch (GET_CODE (x))
1401     {
1402     case DEBUG_EXPR:
1403       dv = dv_from_decl (DEBUG_EXPR_TREE_DECL (x));
1404       gcc_checking_assert (DECL_RTL_KNOWN_SET (DEBUG_EXPR_TREE_DECL (x)) == x);
1405       break;
1406 
1407     case VALUE:
1408       dv = dv_from_value (x);
1409       break;
1410 
1411     default:
1412       gcc_unreachable ();
1413     }
1414 
1415   return dv;
1416 }
1417 
1418 extern void debug_dv (decl_or_value dv);
1419 
1420 DEBUG_FUNCTION void
debug_dv(decl_or_value dv)1421 debug_dv (decl_or_value dv)
1422 {
1423   if (dv_is_value_p (dv))
1424     debug_rtx (dv_as_value (dv));
1425   else
1426     debug_generic_stmt (dv_as_decl (dv));
1427 }
1428 
1429 static void loc_exp_dep_clear (variable var);
1430 
1431 /* Free the element of VARIABLE_HTAB (its type is struct variable_def).  */
1432 
1433 static void
variable_htab_free(void * elem)1434 variable_htab_free (void *elem)
1435 {
1436   int i;
1437   variable var = (variable) elem;
1438   location_chain node, next;
1439 
1440   gcc_checking_assert (var->refcount > 0);
1441 
1442   var->refcount--;
1443   if (var->refcount > 0)
1444     return;
1445 
1446   for (i = 0; i < var->n_var_parts; i++)
1447     {
1448       for (node = var->var_part[i].loc_chain; node; node = next)
1449 	{
1450 	  next = node->next;
1451 	  pool_free (loc_chain_pool, node);
1452 	}
1453       var->var_part[i].loc_chain = NULL;
1454     }
1455   if (var->onepart && VAR_LOC_1PAUX (var))
1456     {
1457       loc_exp_dep_clear (var);
1458       if (VAR_LOC_DEP_LST (var))
1459 	VAR_LOC_DEP_LST (var)->pprev = NULL;
1460       XDELETE (VAR_LOC_1PAUX (var));
1461       /* These may be reused across functions, so reset
1462 	 e.g. NO_LOC_P.  */
1463       if (var->onepart == ONEPART_DEXPR)
1464 	set_dv_changed (var->dv, true);
1465     }
1466   pool_free (onepart_pool (var->onepart), var);
1467 }
1468 
1469 /* Initialize the set (array) SET of attrs to empty lists.  */
1470 
1471 static void
init_attrs_list_set(attrs * set)1472 init_attrs_list_set (attrs *set)
1473 {
1474   int i;
1475 
1476   for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1477     set[i] = NULL;
1478 }
1479 
1480 /* Make the list *LISTP empty.  */
1481 
1482 static void
attrs_list_clear(attrs * listp)1483 attrs_list_clear (attrs *listp)
1484 {
1485   attrs list, next;
1486 
1487   for (list = *listp; list; list = next)
1488     {
1489       next = list->next;
1490       pool_free (attrs_pool, list);
1491     }
1492   *listp = NULL;
1493 }
1494 
1495 /* Return true if the pair of DECL and OFFSET is the member of the LIST.  */
1496 
1497 static attrs
attrs_list_member(attrs list,decl_or_value dv,HOST_WIDE_INT offset)1498 attrs_list_member (attrs list, decl_or_value dv, HOST_WIDE_INT offset)
1499 {
1500   for (; list; list = list->next)
1501     if (dv_as_opaque (list->dv) == dv_as_opaque (dv) && list->offset == offset)
1502       return list;
1503   return NULL;
1504 }
1505 
1506 /* Insert the triplet DECL, OFFSET, LOC to the list *LISTP.  */
1507 
1508 static void
attrs_list_insert(attrs * listp,decl_or_value dv,HOST_WIDE_INT offset,rtx loc)1509 attrs_list_insert (attrs *listp, decl_or_value dv,
1510 		   HOST_WIDE_INT offset, rtx loc)
1511 {
1512   attrs list;
1513 
1514   list = (attrs) pool_alloc (attrs_pool);
1515   list->loc = loc;
1516   list->dv = dv;
1517   list->offset = offset;
1518   list->next = *listp;
1519   *listp = list;
1520 }
1521 
1522 /* Copy all nodes from SRC and create a list *DSTP of the copies.  */
1523 
1524 static void
attrs_list_copy(attrs * dstp,attrs src)1525 attrs_list_copy (attrs *dstp, attrs src)
1526 {
1527   attrs n;
1528 
1529   attrs_list_clear (dstp);
1530   for (; src; src = src->next)
1531     {
1532       n = (attrs) pool_alloc (attrs_pool);
1533       n->loc = src->loc;
1534       n->dv = src->dv;
1535       n->offset = src->offset;
1536       n->next = *dstp;
1537       *dstp = n;
1538     }
1539 }
1540 
1541 /* Add all nodes from SRC which are not in *DSTP to *DSTP.  */
1542 
1543 static void
attrs_list_union(attrs * dstp,attrs src)1544 attrs_list_union (attrs *dstp, attrs src)
1545 {
1546   for (; src; src = src->next)
1547     {
1548       if (!attrs_list_member (*dstp, src->dv, src->offset))
1549 	attrs_list_insert (dstp, src->dv, src->offset, src->loc);
1550     }
1551 }
1552 
1553 /* Combine nodes that are not onepart nodes from SRC and SRC2 into
1554    *DSTP.  */
1555 
1556 static void
attrs_list_mpdv_union(attrs * dstp,attrs src,attrs src2)1557 attrs_list_mpdv_union (attrs *dstp, attrs src, attrs src2)
1558 {
1559   gcc_assert (!*dstp);
1560   for (; src; src = src->next)
1561     {
1562       if (!dv_onepart_p (src->dv))
1563 	attrs_list_insert (dstp, src->dv, src->offset, src->loc);
1564     }
1565   for (src = src2; src; src = src->next)
1566     {
1567       if (!dv_onepart_p (src->dv)
1568 	  && !attrs_list_member (*dstp, src->dv, src->offset))
1569 	attrs_list_insert (dstp, src->dv, src->offset, src->loc);
1570     }
1571 }
1572 
1573 /* Shared hashtable support.  */
1574 
1575 /* Return true if VARS is shared.  */
1576 
1577 static inline bool
shared_hash_shared(shared_hash vars)1578 shared_hash_shared (shared_hash vars)
1579 {
1580   return vars->refcount > 1;
1581 }
1582 
1583 /* Return the hash table for VARS.  */
1584 
1585 static inline variable_table_type
shared_hash_htab(shared_hash vars)1586 shared_hash_htab (shared_hash vars)
1587 {
1588   return vars->htab;
1589 }
1590 
1591 /* Return true if VAR is shared, or maybe because VARS is shared.  */
1592 
1593 static inline bool
shared_var_p(variable var,shared_hash vars)1594 shared_var_p (variable var, shared_hash vars)
1595 {
1596   /* Don't count an entry in the changed_variables table as a duplicate.  */
1597   return ((var->refcount > 1 + (int) var->in_changed_variables)
1598 	  || shared_hash_shared (vars));
1599 }
1600 
1601 /* Copy variables into a new hash table.  */
1602 
1603 static shared_hash
shared_hash_unshare(shared_hash vars)1604 shared_hash_unshare (shared_hash vars)
1605 {
1606   shared_hash new_vars = (shared_hash) pool_alloc (shared_hash_pool);
1607   gcc_assert (vars->refcount > 1);
1608   new_vars->refcount = 1;
1609   new_vars->htab.create (vars->htab.elements () + 3);
1610   vars_copy (new_vars->htab, vars->htab);
1611   vars->refcount--;
1612   return new_vars;
1613 }
1614 
1615 /* Increment reference counter on VARS and return it.  */
1616 
1617 static inline shared_hash
shared_hash_copy(shared_hash vars)1618 shared_hash_copy (shared_hash vars)
1619 {
1620   vars->refcount++;
1621   return vars;
1622 }
1623 
1624 /* Decrement reference counter and destroy hash table if not shared
1625    anymore.  */
1626 
1627 static void
shared_hash_destroy(shared_hash vars)1628 shared_hash_destroy (shared_hash vars)
1629 {
1630   gcc_checking_assert (vars->refcount > 0);
1631   if (--vars->refcount == 0)
1632     {
1633       vars->htab.dispose ();
1634       pool_free (shared_hash_pool, vars);
1635     }
1636 }
1637 
1638 /* Unshare *PVARS if shared and return slot for DV.  If INS is
1639    INSERT, insert it if not already present.  */
1640 
1641 static inline variable_def **
shared_hash_find_slot_unshare_1(shared_hash * pvars,decl_or_value dv,hashval_t dvhash,enum insert_option ins)1642 shared_hash_find_slot_unshare_1 (shared_hash *pvars, decl_or_value dv,
1643 				 hashval_t dvhash, enum insert_option ins)
1644 {
1645   if (shared_hash_shared (*pvars))
1646     *pvars = shared_hash_unshare (*pvars);
1647   return shared_hash_htab (*pvars).find_slot_with_hash (dv, dvhash, ins);
1648 }
1649 
1650 static inline variable_def **
shared_hash_find_slot_unshare(shared_hash * pvars,decl_or_value dv,enum insert_option ins)1651 shared_hash_find_slot_unshare (shared_hash *pvars, decl_or_value dv,
1652 			       enum insert_option ins)
1653 {
1654   return shared_hash_find_slot_unshare_1 (pvars, dv, dv_htab_hash (dv), ins);
1655 }
1656 
1657 /* Return slot for DV, if it is already present in the hash table.
1658    If it is not present, insert it only VARS is not shared, otherwise
1659    return NULL.  */
1660 
1661 static inline variable_def **
shared_hash_find_slot_1(shared_hash vars,decl_or_value dv,hashval_t dvhash)1662 shared_hash_find_slot_1 (shared_hash vars, decl_or_value dv, hashval_t dvhash)
1663 {
1664   return shared_hash_htab (vars).find_slot_with_hash (dv, dvhash,
1665 						      shared_hash_shared (vars)
1666 						      ? NO_INSERT : INSERT);
1667 }
1668 
1669 static inline variable_def **
shared_hash_find_slot(shared_hash vars,decl_or_value dv)1670 shared_hash_find_slot (shared_hash vars, decl_or_value dv)
1671 {
1672   return shared_hash_find_slot_1 (vars, dv, dv_htab_hash (dv));
1673 }
1674 
1675 /* Return slot for DV only if it is already present in the hash table.  */
1676 
1677 static inline variable_def **
shared_hash_find_slot_noinsert_1(shared_hash vars,decl_or_value dv,hashval_t dvhash)1678 shared_hash_find_slot_noinsert_1 (shared_hash vars, decl_or_value dv,
1679 				  hashval_t dvhash)
1680 {
1681   return shared_hash_htab (vars).find_slot_with_hash (dv, dvhash, NO_INSERT);
1682 }
1683 
1684 static inline variable_def **
shared_hash_find_slot_noinsert(shared_hash vars,decl_or_value dv)1685 shared_hash_find_slot_noinsert (shared_hash vars, decl_or_value dv)
1686 {
1687   return shared_hash_find_slot_noinsert_1 (vars, dv, dv_htab_hash (dv));
1688 }
1689 
1690 /* Return variable for DV or NULL if not already present in the hash
1691    table.  */
1692 
1693 static inline variable
shared_hash_find_1(shared_hash vars,decl_or_value dv,hashval_t dvhash)1694 shared_hash_find_1 (shared_hash vars, decl_or_value dv, hashval_t dvhash)
1695 {
1696   return shared_hash_htab (vars).find_with_hash (dv, dvhash);
1697 }
1698 
1699 static inline variable
shared_hash_find(shared_hash vars,decl_or_value dv)1700 shared_hash_find (shared_hash vars, decl_or_value dv)
1701 {
1702   return shared_hash_find_1 (vars, dv, dv_htab_hash (dv));
1703 }
1704 
1705 /* Return true if TVAL is better than CVAL as a canonival value.  We
1706    choose lowest-numbered VALUEs, using the RTX address as a
1707    tie-breaker.  The idea is to arrange them into a star topology,
1708    such that all of them are at most one step away from the canonical
1709    value, and the canonical value has backlinks to all of them, in
1710    addition to all the actual locations.  We don't enforce this
1711    topology throughout the entire dataflow analysis, though.
1712  */
1713 
1714 static inline bool
canon_value_cmp(rtx tval,rtx cval)1715 canon_value_cmp (rtx tval, rtx cval)
1716 {
1717   return !cval
1718     || CSELIB_VAL_PTR (tval)->uid < CSELIB_VAL_PTR (cval)->uid;
1719 }
1720 
1721 static bool dst_can_be_shared;
1722 
1723 /* Return a copy of a variable VAR and insert it to dataflow set SET.  */
1724 
1725 static variable_def **
unshare_variable(dataflow_set * set,variable_def ** slot,variable var,enum var_init_status initialized)1726 unshare_variable (dataflow_set *set, variable_def **slot, variable var,
1727 		  enum var_init_status initialized)
1728 {
1729   variable new_var;
1730   int i;
1731 
1732   new_var = (variable) pool_alloc (onepart_pool (var->onepart));
1733   new_var->dv = var->dv;
1734   new_var->refcount = 1;
1735   var->refcount--;
1736   new_var->n_var_parts = var->n_var_parts;
1737   new_var->onepart = var->onepart;
1738   new_var->in_changed_variables = false;
1739 
1740   if (! flag_var_tracking_uninit)
1741     initialized = VAR_INIT_STATUS_INITIALIZED;
1742 
1743   for (i = 0; i < var->n_var_parts; i++)
1744     {
1745       location_chain node;
1746       location_chain *nextp;
1747 
1748       if (i == 0 && var->onepart)
1749 	{
1750 	  /* One-part auxiliary data is only used while emitting
1751 	     notes, so propagate it to the new variable in the active
1752 	     dataflow set.  If we're not emitting notes, this will be
1753 	     a no-op.  */
1754 	  gcc_checking_assert (!VAR_LOC_1PAUX (var) || emit_notes);
1755 	  VAR_LOC_1PAUX (new_var) = VAR_LOC_1PAUX (var);
1756 	  VAR_LOC_1PAUX (var) = NULL;
1757 	}
1758       else
1759 	VAR_PART_OFFSET (new_var, i) = VAR_PART_OFFSET (var, i);
1760       nextp = &new_var->var_part[i].loc_chain;
1761       for (node = var->var_part[i].loc_chain; node; node = node->next)
1762 	{
1763 	  location_chain new_lc;
1764 
1765 	  new_lc = (location_chain) pool_alloc (loc_chain_pool);
1766 	  new_lc->next = NULL;
1767 	  if (node->init > initialized)
1768 	    new_lc->init = node->init;
1769 	  else
1770 	    new_lc->init = initialized;
1771 	  if (node->set_src && !(MEM_P (node->set_src)))
1772 	    new_lc->set_src = node->set_src;
1773 	  else
1774 	    new_lc->set_src = NULL;
1775 	  new_lc->loc = node->loc;
1776 
1777 	  *nextp = new_lc;
1778 	  nextp = &new_lc->next;
1779 	}
1780 
1781       new_var->var_part[i].cur_loc = var->var_part[i].cur_loc;
1782     }
1783 
1784   dst_can_be_shared = false;
1785   if (shared_hash_shared (set->vars))
1786     slot = shared_hash_find_slot_unshare (&set->vars, var->dv, NO_INSERT);
1787   else if (set->traversed_vars && set->vars != set->traversed_vars)
1788     slot = shared_hash_find_slot_noinsert (set->vars, var->dv);
1789   *slot = new_var;
1790   if (var->in_changed_variables)
1791     {
1792       variable_def **cslot
1793 	= changed_variables.find_slot_with_hash (var->dv,
1794 				    dv_htab_hash (var->dv), NO_INSERT);
1795       gcc_assert (*cslot == (void *) var);
1796       var->in_changed_variables = false;
1797       variable_htab_free (var);
1798       *cslot = new_var;
1799       new_var->in_changed_variables = true;
1800     }
1801   return slot;
1802 }
1803 
1804 /* Copy all variables from hash table SRC to hash table DST.  */
1805 
1806 static void
vars_copy(variable_table_type dst,variable_table_type src)1807 vars_copy (variable_table_type dst, variable_table_type src)
1808 {
1809   variable_iterator_type hi;
1810   variable var;
1811 
1812   FOR_EACH_HASH_TABLE_ELEMENT (src, var, variable, hi)
1813     {
1814       variable_def **dstp;
1815       var->refcount++;
1816       dstp = dst.find_slot_with_hash (var->dv, dv_htab_hash (var->dv), INSERT);
1817       *dstp = var;
1818     }
1819 }
1820 
1821 /* Map a decl to its main debug decl.  */
1822 
1823 static inline tree
var_debug_decl(tree decl)1824 var_debug_decl (tree decl)
1825 {
1826   if (decl && TREE_CODE (decl) == VAR_DECL
1827       && DECL_HAS_DEBUG_EXPR_P (decl))
1828     {
1829       tree debugdecl = DECL_DEBUG_EXPR (decl);
1830       if (DECL_P (debugdecl))
1831 	decl = debugdecl;
1832     }
1833 
1834   return decl;
1835 }
1836 
1837 /* Set the register LOC to contain DV, OFFSET.  */
1838 
1839 static void
var_reg_decl_set(dataflow_set * set,rtx loc,enum var_init_status initialized,decl_or_value dv,HOST_WIDE_INT offset,rtx set_src,enum insert_option iopt)1840 var_reg_decl_set (dataflow_set *set, rtx loc, enum var_init_status initialized,
1841 		  decl_or_value dv, HOST_WIDE_INT offset, rtx set_src,
1842 		  enum insert_option iopt)
1843 {
1844   attrs node;
1845   bool decl_p = dv_is_decl_p (dv);
1846 
1847   if (decl_p)
1848     dv = dv_from_decl (var_debug_decl (dv_as_decl (dv)));
1849 
1850   for (node = set->regs[REGNO (loc)]; node; node = node->next)
1851     if (dv_as_opaque (node->dv) == dv_as_opaque (dv)
1852 	&& node->offset == offset)
1853       break;
1854   if (!node)
1855     attrs_list_insert (&set->regs[REGNO (loc)], dv, offset, loc);
1856   set_variable_part (set, loc, dv, offset, initialized, set_src, iopt);
1857 }
1858 
1859 /* Set the register to contain REG_EXPR (LOC), REG_OFFSET (LOC).  */
1860 
1861 static void
var_reg_set(dataflow_set * set,rtx loc,enum var_init_status initialized,rtx set_src)1862 var_reg_set (dataflow_set *set, rtx loc, enum var_init_status initialized,
1863 	     rtx set_src)
1864 {
1865   tree decl = REG_EXPR (loc);
1866   HOST_WIDE_INT offset = REG_OFFSET (loc);
1867 
1868   var_reg_decl_set (set, loc, initialized,
1869 		    dv_from_decl (decl), offset, set_src, INSERT);
1870 }
1871 
1872 static enum var_init_status
get_init_value(dataflow_set * set,rtx loc,decl_or_value dv)1873 get_init_value (dataflow_set *set, rtx loc, decl_or_value dv)
1874 {
1875   variable var;
1876   int i;
1877   enum var_init_status ret_val = VAR_INIT_STATUS_UNKNOWN;
1878 
1879   if (! flag_var_tracking_uninit)
1880     return VAR_INIT_STATUS_INITIALIZED;
1881 
1882   var = shared_hash_find (set->vars, dv);
1883   if (var)
1884     {
1885       for (i = 0; i < var->n_var_parts && ret_val == VAR_INIT_STATUS_UNKNOWN; i++)
1886 	{
1887 	  location_chain nextp;
1888 	  for (nextp = var->var_part[i].loc_chain; nextp; nextp = nextp->next)
1889 	    if (rtx_equal_p (nextp->loc, loc))
1890 	      {
1891 		ret_val = nextp->init;
1892 		break;
1893 	      }
1894 	}
1895     }
1896 
1897   return ret_val;
1898 }
1899 
1900 /* Delete current content of register LOC in dataflow set SET and set
1901    the register to contain REG_EXPR (LOC), REG_OFFSET (LOC).  If
1902    MODIFY is true, any other live copies of the same variable part are
1903    also deleted from the dataflow set, otherwise the variable part is
1904    assumed to be copied from another location holding the same
1905    part.  */
1906 
1907 static void
var_reg_delete_and_set(dataflow_set * set,rtx loc,bool modify,enum var_init_status initialized,rtx set_src)1908 var_reg_delete_and_set (dataflow_set *set, rtx loc, bool modify,
1909 			enum var_init_status initialized, rtx set_src)
1910 {
1911   tree decl = REG_EXPR (loc);
1912   HOST_WIDE_INT offset = REG_OFFSET (loc);
1913   attrs node, next;
1914   attrs *nextp;
1915 
1916   decl = var_debug_decl (decl);
1917 
1918   if (initialized == VAR_INIT_STATUS_UNKNOWN)
1919     initialized = get_init_value (set, loc, dv_from_decl (decl));
1920 
1921   nextp = &set->regs[REGNO (loc)];
1922   for (node = *nextp; node; node = next)
1923     {
1924       next = node->next;
1925       if (dv_as_opaque (node->dv) != decl || node->offset != offset)
1926 	{
1927 	  delete_variable_part (set, node->loc, node->dv, node->offset);
1928 	  pool_free (attrs_pool, node);
1929 	  *nextp = next;
1930 	}
1931       else
1932 	{
1933 	  node->loc = loc;
1934 	  nextp = &node->next;
1935 	}
1936     }
1937   if (modify)
1938     clobber_variable_part (set, loc, dv_from_decl (decl), offset, set_src);
1939   var_reg_set (set, loc, initialized, set_src);
1940 }
1941 
1942 /* Delete the association of register LOC in dataflow set SET with any
1943    variables that aren't onepart.  If CLOBBER is true, also delete any
1944    other live copies of the same variable part, and delete the
1945    association with onepart dvs too.  */
1946 
1947 static void
var_reg_delete(dataflow_set * set,rtx loc,bool clobber)1948 var_reg_delete (dataflow_set *set, rtx loc, bool clobber)
1949 {
1950   attrs *nextp = &set->regs[REGNO (loc)];
1951   attrs node, next;
1952 
1953   if (clobber)
1954     {
1955       tree decl = REG_EXPR (loc);
1956       HOST_WIDE_INT offset = REG_OFFSET (loc);
1957 
1958       decl = var_debug_decl (decl);
1959 
1960       clobber_variable_part (set, NULL, dv_from_decl (decl), offset, NULL);
1961     }
1962 
1963   for (node = *nextp; node; node = next)
1964     {
1965       next = node->next;
1966       if (clobber || !dv_onepart_p (node->dv))
1967 	{
1968 	  delete_variable_part (set, node->loc, node->dv, node->offset);
1969 	  pool_free (attrs_pool, node);
1970 	  *nextp = next;
1971 	}
1972       else
1973 	nextp = &node->next;
1974     }
1975 }
1976 
1977 /* Delete content of register with number REGNO in dataflow set SET.  */
1978 
1979 static void
var_regno_delete(dataflow_set * set,int regno)1980 var_regno_delete (dataflow_set *set, int regno)
1981 {
1982   attrs *reg = &set->regs[regno];
1983   attrs node, next;
1984 
1985   for (node = *reg; node; node = next)
1986     {
1987       next = node->next;
1988       delete_variable_part (set, node->loc, node->dv, node->offset);
1989       pool_free (attrs_pool, node);
1990     }
1991   *reg = NULL;
1992 }
1993 
1994 /* Return true if I is the negated value of a power of two.  */
1995 static bool
negative_power_of_two_p(HOST_WIDE_INT i)1996 negative_power_of_two_p (HOST_WIDE_INT i)
1997 {
1998   unsigned HOST_WIDE_INT x = -(unsigned HOST_WIDE_INT)i;
1999   return x == (x & -x);
2000 }
2001 
2002 /* Strip constant offsets and alignments off of LOC.  Return the base
2003    expression.  */
2004 
2005 static rtx
vt_get_canonicalize_base(rtx loc)2006 vt_get_canonicalize_base (rtx loc)
2007 {
2008   while ((GET_CODE (loc) == PLUS
2009 	  || GET_CODE (loc) == AND)
2010 	 && GET_CODE (XEXP (loc, 1)) == CONST_INT
2011 	 && (GET_CODE (loc) != AND
2012 	     || negative_power_of_two_p (INTVAL (XEXP (loc, 1)))))
2013     loc = XEXP (loc, 0);
2014 
2015   return loc;
2016 }
2017 
2018 /* This caches canonicalized addresses for VALUEs, computed using
2019    information in the global cselib table.  */
2020 static struct pointer_map_t *global_get_addr_cache;
2021 
2022 /* This caches canonicalized addresses for VALUEs, computed using
2023    information from the global cache and information pertaining to a
2024    basic block being analyzed.  */
2025 static struct pointer_map_t *local_get_addr_cache;
2026 
2027 static rtx vt_canonicalize_addr (dataflow_set *, rtx);
2028 
2029 /* Return the canonical address for LOC, that must be a VALUE, using a
2030    cached global equivalence or computing it and storing it in the
2031    global cache.  */
2032 
2033 static rtx
get_addr_from_global_cache(rtx const loc)2034 get_addr_from_global_cache (rtx const loc)
2035 {
2036   rtx x;
2037   void **slot;
2038 
2039   gcc_checking_assert (GET_CODE (loc) == VALUE);
2040 
2041   slot = pointer_map_insert (global_get_addr_cache, loc);
2042   if (*slot)
2043     return (rtx)*slot;
2044 
2045   x = canon_rtx (get_addr (loc));
2046 
2047   /* Tentative, avoiding infinite recursion.  */
2048   *slot = x;
2049 
2050   if (x != loc)
2051     {
2052       rtx nx = vt_canonicalize_addr (NULL, x);
2053       if (nx != x)
2054 	{
2055 	  /* The table may have moved during recursion, recompute
2056 	     SLOT.  */
2057 	  slot = pointer_map_contains (global_get_addr_cache, loc);
2058 	  *slot = x = nx;
2059 	}
2060     }
2061 
2062   return x;
2063 }
2064 
2065 /* Return the canonical address for LOC, that must be a VALUE, using a
2066    cached local equivalence or computing it and storing it in the
2067    local cache.  */
2068 
2069 static rtx
get_addr_from_local_cache(dataflow_set * set,rtx const loc)2070 get_addr_from_local_cache (dataflow_set *set, rtx const loc)
2071 {
2072   rtx x;
2073   void **slot;
2074   decl_or_value dv;
2075   variable var;
2076   location_chain l;
2077 
2078   gcc_checking_assert (GET_CODE (loc) == VALUE);
2079 
2080   slot = pointer_map_insert (local_get_addr_cache, loc);
2081   if (*slot)
2082     return (rtx)*slot;
2083 
2084   x = get_addr_from_global_cache (loc);
2085 
2086   /* Tentative, avoiding infinite recursion.  */
2087   *slot = x;
2088 
2089   /* Recurse to cache local expansion of X, or if we need to search
2090      for a VALUE in the expansion.  */
2091   if (x != loc)
2092     {
2093       rtx nx = vt_canonicalize_addr (set, x);
2094       if (nx != x)
2095 	{
2096 	  slot = pointer_map_contains (local_get_addr_cache, loc);
2097 	  *slot = x = nx;
2098 	}
2099       return x;
2100     }
2101 
2102   dv = dv_from_rtx (x);
2103   var = shared_hash_find (set->vars, dv);
2104   if (!var)
2105     return x;
2106 
2107   /* Look for an improved equivalent expression.  */
2108   for (l = var->var_part[0].loc_chain; l; l = l->next)
2109     {
2110       rtx base = vt_get_canonicalize_base (l->loc);
2111       if (GET_CODE (base) == VALUE
2112 	  && canon_value_cmp (base, loc))
2113 	{
2114 	  rtx nx = vt_canonicalize_addr (set, l->loc);
2115 	  if (x != nx)
2116 	    {
2117 	      slot = pointer_map_contains (local_get_addr_cache, loc);
2118 	      *slot = x = nx;
2119 	    }
2120 	  break;
2121 	}
2122     }
2123 
2124   return x;
2125 }
2126 
2127 /* Canonicalize LOC using equivalences from SET in addition to those
2128    in the cselib static table.  It expects a VALUE-based expression,
2129    and it will only substitute VALUEs with other VALUEs or
2130    function-global equivalences, so that, if two addresses have base
2131    VALUEs that are locally or globally related in ways that
2132    memrefs_conflict_p cares about, they will both canonicalize to
2133    expressions that have the same base VALUE.
2134 
2135    The use of VALUEs as canonical base addresses enables the canonical
2136    RTXs to remain unchanged globally, if they resolve to a constant,
2137    or throughout a basic block otherwise, so that they can be cached
2138    and the cache needs not be invalidated when REGs, MEMs or such
2139    change.  */
2140 
2141 static rtx
vt_canonicalize_addr(dataflow_set * set,rtx oloc)2142 vt_canonicalize_addr (dataflow_set *set, rtx oloc)
2143 {
2144   HOST_WIDE_INT ofst = 0;
2145   enum machine_mode mode = GET_MODE (oloc);
2146   rtx loc = oloc;
2147   rtx x;
2148   bool retry = true;
2149 
2150   while (retry)
2151     {
2152       while (GET_CODE (loc) == PLUS
2153 	     && GET_CODE (XEXP (loc, 1)) == CONST_INT)
2154 	{
2155 	  ofst += INTVAL (XEXP (loc, 1));
2156 	  loc = XEXP (loc, 0);
2157 	}
2158 
2159       /* Alignment operations can't normally be combined, so just
2160 	 canonicalize the base and we're done.  We'll normally have
2161 	 only one stack alignment anyway.  */
2162       if (GET_CODE (loc) == AND
2163 	  && GET_CODE (XEXP (loc, 1)) == CONST_INT
2164 	  && negative_power_of_two_p (INTVAL (XEXP (loc, 1))))
2165 	{
2166 	  x = vt_canonicalize_addr (set, XEXP (loc, 0));
2167 	  if (x != XEXP (loc, 0))
2168 	    loc = gen_rtx_AND (mode, x, XEXP (loc, 1));
2169 	  retry = false;
2170 	}
2171 
2172       if (GET_CODE (loc) == VALUE)
2173 	{
2174 	  if (set)
2175 	    loc = get_addr_from_local_cache (set, loc);
2176 	  else
2177 	    loc = get_addr_from_global_cache (loc);
2178 
2179 	  /* Consolidate plus_constants.  */
2180 	  while (ofst && GET_CODE (loc) == PLUS
2181 		 && GET_CODE (XEXP (loc, 1)) == CONST_INT)
2182 	    {
2183 	      ofst += INTVAL (XEXP (loc, 1));
2184 	      loc = XEXP (loc, 0);
2185 	    }
2186 
2187 	  retry = false;
2188 	}
2189       else
2190 	{
2191 	  x = canon_rtx (loc);
2192 	  if (retry)
2193 	    retry = (x != loc);
2194 	  loc = x;
2195 	}
2196     }
2197 
2198   /* Add OFST back in.  */
2199   if (ofst)
2200     {
2201       /* Don't build new RTL if we can help it.  */
2202       if (GET_CODE (oloc) == PLUS
2203 	  && XEXP (oloc, 0) == loc
2204 	  && INTVAL (XEXP (oloc, 1)) == ofst)
2205 	return oloc;
2206 
2207       loc = plus_constant (mode, loc, ofst);
2208     }
2209 
2210   return loc;
2211 }
2212 
2213 /* Return true iff there's a true dependence between MLOC and LOC.
2214    MADDR must be a canonicalized version of MLOC's address.  */
2215 
2216 static inline bool
vt_canon_true_dep(dataflow_set * set,rtx mloc,rtx maddr,rtx loc)2217 vt_canon_true_dep (dataflow_set *set, rtx mloc, rtx maddr, rtx loc)
2218 {
2219   if (GET_CODE (loc) != MEM)
2220     return false;
2221 
2222   rtx addr = vt_canonicalize_addr (set, XEXP (loc, 0));
2223   if (!canon_true_dependence (mloc, GET_MODE (mloc), maddr, loc, addr))
2224     return false;
2225 
2226   return true;
2227 }
2228 
2229 /* Hold parameters for the hashtab traversal function
2230    drop_overlapping_mem_locs, see below.  */
2231 
2232 struct overlapping_mems
2233 {
2234   dataflow_set *set;
2235   rtx loc, addr;
2236 };
2237 
2238 /* Remove all MEMs that overlap with COMS->LOC from the location list
2239    of a hash table entry for a value.  COMS->ADDR must be a
2240    canonicalized form of COMS->LOC's address, and COMS->LOC must be
2241    canonicalized itself.  */
2242 
2243 int
drop_overlapping_mem_locs(variable_def ** slot,overlapping_mems * coms)2244 drop_overlapping_mem_locs (variable_def **slot, overlapping_mems *coms)
2245 {
2246   dataflow_set *set = coms->set;
2247   rtx mloc = coms->loc, addr = coms->addr;
2248   variable var = *slot;
2249 
2250   if (var->onepart == ONEPART_VALUE)
2251     {
2252       location_chain loc, *locp;
2253       bool changed = false;
2254       rtx cur_loc;
2255 
2256       gcc_assert (var->n_var_parts == 1);
2257 
2258       if (shared_var_p (var, set->vars))
2259 	{
2260 	  for (loc = var->var_part[0].loc_chain; loc; loc = loc->next)
2261 	    if (vt_canon_true_dep (set, mloc, addr, loc->loc))
2262 	      break;
2263 
2264 	  if (!loc)
2265 	    return 1;
2266 
2267 	  slot = unshare_variable (set, slot, var, VAR_INIT_STATUS_UNKNOWN);
2268 	  var = *slot;
2269 	  gcc_assert (var->n_var_parts == 1);
2270 	}
2271 
2272       if (VAR_LOC_1PAUX (var))
2273 	cur_loc = VAR_LOC_FROM (var);
2274       else
2275 	cur_loc = var->var_part[0].cur_loc;
2276 
2277       for (locp = &var->var_part[0].loc_chain, loc = *locp;
2278 	   loc; loc = *locp)
2279 	{
2280 	  if (!vt_canon_true_dep (set, mloc, addr, loc->loc))
2281 	    {
2282 	      locp = &loc->next;
2283 	      continue;
2284 	    }
2285 
2286 	  *locp = loc->next;
2287 	  /* If we have deleted the location which was last emitted
2288 	     we have to emit new location so add the variable to set
2289 	     of changed variables.  */
2290 	  if (cur_loc == loc->loc)
2291 	    {
2292 	      changed = true;
2293 	      var->var_part[0].cur_loc = NULL;
2294 	      if (VAR_LOC_1PAUX (var))
2295 		VAR_LOC_FROM (var) = NULL;
2296 	    }
2297 	  pool_free (loc_chain_pool, loc);
2298 	}
2299 
2300       if (!var->var_part[0].loc_chain)
2301 	{
2302 	  var->n_var_parts--;
2303 	  changed = true;
2304 	}
2305       if (changed)
2306 	variable_was_changed (var, set);
2307     }
2308 
2309   return 1;
2310 }
2311 
2312 /* Remove from SET all VALUE bindings to MEMs that overlap with LOC.  */
2313 
2314 static void
clobber_overlapping_mems(dataflow_set * set,rtx loc)2315 clobber_overlapping_mems (dataflow_set *set, rtx loc)
2316 {
2317   struct overlapping_mems coms;
2318 
2319   gcc_checking_assert (GET_CODE (loc) == MEM);
2320 
2321   coms.set = set;
2322   coms.loc = canon_rtx (loc);
2323   coms.addr = vt_canonicalize_addr (set, XEXP (loc, 0));
2324 
2325   set->traversed_vars = set->vars;
2326   shared_hash_htab (set->vars)
2327     .traverse <overlapping_mems*, drop_overlapping_mem_locs> (&coms);
2328   set->traversed_vars = NULL;
2329 }
2330 
2331 /* Set the location of DV, OFFSET as the MEM LOC.  */
2332 
2333 static void
var_mem_decl_set(dataflow_set * set,rtx loc,enum var_init_status initialized,decl_or_value dv,HOST_WIDE_INT offset,rtx set_src,enum insert_option iopt)2334 var_mem_decl_set (dataflow_set *set, rtx loc, enum var_init_status initialized,
2335 		  decl_or_value dv, HOST_WIDE_INT offset, rtx set_src,
2336 		  enum insert_option iopt)
2337 {
2338   if (dv_is_decl_p (dv))
2339     dv = dv_from_decl (var_debug_decl (dv_as_decl (dv)));
2340 
2341   set_variable_part (set, loc, dv, offset, initialized, set_src, iopt);
2342 }
2343 
2344 /* Set the location part of variable MEM_EXPR (LOC) in dataflow set
2345    SET to LOC.
2346    Adjust the address first if it is stack pointer based.  */
2347 
2348 static void
var_mem_set(dataflow_set * set,rtx loc,enum var_init_status initialized,rtx set_src)2349 var_mem_set (dataflow_set *set, rtx loc, enum var_init_status initialized,
2350 	     rtx set_src)
2351 {
2352   tree decl = MEM_EXPR (loc);
2353   HOST_WIDE_INT offset = INT_MEM_OFFSET (loc);
2354 
2355   var_mem_decl_set (set, loc, initialized,
2356 		    dv_from_decl (decl), offset, set_src, INSERT);
2357 }
2358 
2359 /* Delete and set the location part of variable MEM_EXPR (LOC) in
2360    dataflow set SET to LOC.  If MODIFY is true, any other live copies
2361    of the same variable part are also deleted from the dataflow set,
2362    otherwise the variable part is assumed to be copied from another
2363    location holding the same part.
2364    Adjust the address first if it is stack pointer based.  */
2365 
2366 static void
var_mem_delete_and_set(dataflow_set * set,rtx loc,bool modify,enum var_init_status initialized,rtx set_src)2367 var_mem_delete_and_set (dataflow_set *set, rtx loc, bool modify,
2368 			enum var_init_status initialized, rtx set_src)
2369 {
2370   tree decl = MEM_EXPR (loc);
2371   HOST_WIDE_INT offset = INT_MEM_OFFSET (loc);
2372 
2373   clobber_overlapping_mems (set, loc);
2374   decl = var_debug_decl (decl);
2375 
2376   if (initialized == VAR_INIT_STATUS_UNKNOWN)
2377     initialized = get_init_value (set, loc, dv_from_decl (decl));
2378 
2379   if (modify)
2380     clobber_variable_part (set, NULL, dv_from_decl (decl), offset, set_src);
2381   var_mem_set (set, loc, initialized, set_src);
2382 }
2383 
2384 /* Delete the location part LOC from dataflow set SET.  If CLOBBER is
2385    true, also delete any other live copies of the same variable part.
2386    Adjust the address first if it is stack pointer based.  */
2387 
2388 static void
var_mem_delete(dataflow_set * set,rtx loc,bool clobber)2389 var_mem_delete (dataflow_set *set, rtx loc, bool clobber)
2390 {
2391   tree decl = MEM_EXPR (loc);
2392   HOST_WIDE_INT offset = INT_MEM_OFFSET (loc);
2393 
2394   clobber_overlapping_mems (set, loc);
2395   decl = var_debug_decl (decl);
2396   if (clobber)
2397     clobber_variable_part (set, NULL, dv_from_decl (decl), offset, NULL);
2398   delete_variable_part (set, loc, dv_from_decl (decl), offset);
2399 }
2400 
2401 /* Return true if LOC should not be expanded for location expressions,
2402    or used in them.  */
2403 
2404 static inline bool
unsuitable_loc(rtx loc)2405 unsuitable_loc (rtx loc)
2406 {
2407   switch (GET_CODE (loc))
2408     {
2409     case PC:
2410     case SCRATCH:
2411     case CC0:
2412     case ASM_INPUT:
2413     case ASM_OPERANDS:
2414       return true;
2415 
2416     default:
2417       return false;
2418     }
2419 }
2420 
2421 /* Bind VAL to LOC in SET.  If MODIFIED, detach LOC from any values
2422    bound to it.  */
2423 
2424 static inline void
val_bind(dataflow_set * set,rtx val,rtx loc,bool modified)2425 val_bind (dataflow_set *set, rtx val, rtx loc, bool modified)
2426 {
2427   if (REG_P (loc))
2428     {
2429       if (modified)
2430 	var_regno_delete (set, REGNO (loc));
2431       var_reg_decl_set (set, loc, VAR_INIT_STATUS_INITIALIZED,
2432 			dv_from_value (val), 0, NULL_RTX, INSERT);
2433     }
2434   else if (MEM_P (loc))
2435     {
2436       struct elt_loc_list *l = CSELIB_VAL_PTR (val)->locs;
2437 
2438       if (modified)
2439 	clobber_overlapping_mems (set, loc);
2440 
2441       if (l && GET_CODE (l->loc) == VALUE)
2442 	l = canonical_cselib_val (CSELIB_VAL_PTR (l->loc))->locs;
2443 
2444       /* If this MEM is a global constant, we don't need it in the
2445 	 dynamic tables.  ??? We should test this before emitting the
2446 	 micro-op in the first place.  */
2447       while (l)
2448 	if (GET_CODE (l->loc) == MEM && XEXP (l->loc, 0) == XEXP (loc, 0))
2449 	  break;
2450 	else
2451 	  l = l->next;
2452 
2453       if (!l)
2454 	var_mem_decl_set (set, loc, VAR_INIT_STATUS_INITIALIZED,
2455 			  dv_from_value (val), 0, NULL_RTX, INSERT);
2456     }
2457   else
2458     {
2459       /* Other kinds of equivalences are necessarily static, at least
2460 	 so long as we do not perform substitutions while merging
2461 	 expressions.  */
2462       gcc_unreachable ();
2463       set_variable_part (set, loc, dv_from_value (val), 0,
2464 			 VAR_INIT_STATUS_INITIALIZED, NULL_RTX, INSERT);
2465     }
2466 }
2467 
2468 /* Bind a value to a location it was just stored in.  If MODIFIED
2469    holds, assume the location was modified, detaching it from any
2470    values bound to it.  */
2471 
2472 static void
val_store(dataflow_set * set,rtx val,rtx loc,rtx insn,bool modified)2473 val_store (dataflow_set *set, rtx val, rtx loc, rtx insn, bool modified)
2474 {
2475   cselib_val *v = CSELIB_VAL_PTR (val);
2476 
2477   gcc_assert (cselib_preserved_value_p (v));
2478 
2479   if (dump_file)
2480     {
2481       fprintf (dump_file, "%i: ", insn ? INSN_UID (insn) : 0);
2482       print_inline_rtx (dump_file, loc, 0);
2483       fprintf (dump_file, " evaluates to ");
2484       print_inline_rtx (dump_file, val, 0);
2485       if (v->locs)
2486 	{
2487 	  struct elt_loc_list *l;
2488 	  for (l = v->locs; l; l = l->next)
2489 	    {
2490 	      fprintf (dump_file, "\n%i: ", INSN_UID (l->setting_insn));
2491 	      print_inline_rtx (dump_file, l->loc, 0);
2492 	    }
2493 	}
2494       fprintf (dump_file, "\n");
2495     }
2496 
2497   gcc_checking_assert (!unsuitable_loc (loc));
2498 
2499   val_bind (set, val, loc, modified);
2500 }
2501 
2502 /* Clear (canonical address) slots that reference X.  */
2503 
2504 static bool
local_get_addr_clear_given_value(const void * v ATTRIBUTE_UNUSED,void ** slot,void * x)2505 local_get_addr_clear_given_value (const void *v ATTRIBUTE_UNUSED,
2506 				  void **slot, void *x)
2507 {
2508   if (vt_get_canonicalize_base ((rtx)*slot) == x)
2509     *slot = NULL;
2510   return true;
2511 }
2512 
2513 /* Reset this node, detaching all its equivalences.  Return the slot
2514    in the variable hash table that holds dv, if there is one.  */
2515 
2516 static void
val_reset(dataflow_set * set,decl_or_value dv)2517 val_reset (dataflow_set *set, decl_or_value dv)
2518 {
2519   variable var = shared_hash_find (set->vars, dv) ;
2520   location_chain node;
2521   rtx cval;
2522 
2523   if (!var || !var->n_var_parts)
2524     return;
2525 
2526   gcc_assert (var->n_var_parts == 1);
2527 
2528   if (var->onepart == ONEPART_VALUE)
2529     {
2530       rtx x = dv_as_value (dv);
2531       void **slot;
2532 
2533       /* Relationships in the global cache don't change, so reset the
2534 	 local cache entry only.  */
2535       slot = pointer_map_contains (local_get_addr_cache, x);
2536       if (slot)
2537 	{
2538 	  /* If the value resolved back to itself, odds are that other
2539 	     values may have cached it too.  These entries now refer
2540 	     to the old X, so detach them too.  Entries that used the
2541 	     old X but resolved to something else remain ok as long as
2542 	     that something else isn't also reset.  */
2543 	  if (*slot == x)
2544 	    pointer_map_traverse (local_get_addr_cache,
2545 				  local_get_addr_clear_given_value, x);
2546 	  *slot = NULL;
2547 	}
2548     }
2549 
2550   cval = NULL;
2551   for (node = var->var_part[0].loc_chain; node; node = node->next)
2552     if (GET_CODE (node->loc) == VALUE
2553 	&& canon_value_cmp (node->loc, cval))
2554       cval = node->loc;
2555 
2556   for (node = var->var_part[0].loc_chain; node; node = node->next)
2557     if (GET_CODE (node->loc) == VALUE && cval != node->loc)
2558       {
2559 	/* Redirect the equivalence link to the new canonical
2560 	   value, or simply remove it if it would point at
2561 	   itself.  */
2562 	if (cval)
2563 	  set_variable_part (set, cval, dv_from_value (node->loc),
2564 			     0, node->init, node->set_src, NO_INSERT);
2565 	delete_variable_part (set, dv_as_value (dv),
2566 			      dv_from_value (node->loc), 0);
2567       }
2568 
2569   if (cval)
2570     {
2571       decl_or_value cdv = dv_from_value (cval);
2572 
2573       /* Keep the remaining values connected, accummulating links
2574 	 in the canonical value.  */
2575       for (node = var->var_part[0].loc_chain; node; node = node->next)
2576 	{
2577 	  if (node->loc == cval)
2578 	    continue;
2579 	  else if (GET_CODE (node->loc) == REG)
2580 	    var_reg_decl_set (set, node->loc, node->init, cdv, 0,
2581 			      node->set_src, NO_INSERT);
2582 	  else if (GET_CODE (node->loc) == MEM)
2583 	    var_mem_decl_set (set, node->loc, node->init, cdv, 0,
2584 			      node->set_src, NO_INSERT);
2585 	  else
2586 	    set_variable_part (set, node->loc, cdv, 0,
2587 			       node->init, node->set_src, NO_INSERT);
2588 	}
2589     }
2590 
2591   /* We remove this last, to make sure that the canonical value is not
2592      removed to the point of requiring reinsertion.  */
2593   if (cval)
2594     delete_variable_part (set, dv_as_value (dv), dv_from_value (cval), 0);
2595 
2596   clobber_variable_part (set, NULL, dv, 0, NULL);
2597 }
2598 
2599 /* Find the values in a given location and map the val to another
2600    value, if it is unique, or add the location as one holding the
2601    value.  */
2602 
2603 static void
val_resolve(dataflow_set * set,rtx val,rtx loc,rtx insn)2604 val_resolve (dataflow_set *set, rtx val, rtx loc, rtx insn)
2605 {
2606   decl_or_value dv = dv_from_value (val);
2607 
2608   if (dump_file && (dump_flags & TDF_DETAILS))
2609     {
2610       if (insn)
2611 	fprintf (dump_file, "%i: ", INSN_UID (insn));
2612       else
2613 	fprintf (dump_file, "head: ");
2614       print_inline_rtx (dump_file, val, 0);
2615       fputs (" is at ", dump_file);
2616       print_inline_rtx (dump_file, loc, 0);
2617       fputc ('\n', dump_file);
2618     }
2619 
2620   val_reset (set, dv);
2621 
2622   gcc_checking_assert (!unsuitable_loc (loc));
2623 
2624   if (REG_P (loc))
2625     {
2626       attrs node, found = NULL;
2627 
2628       for (node = set->regs[REGNO (loc)]; node; node = node->next)
2629 	if (dv_is_value_p (node->dv)
2630 	    && GET_MODE (dv_as_value (node->dv)) == GET_MODE (loc))
2631 	  {
2632 	    found = node;
2633 
2634 	    /* Map incoming equivalences.  ??? Wouldn't it be nice if
2635 	     we just started sharing the location lists?  Maybe a
2636 	     circular list ending at the value itself or some
2637 	     such.  */
2638 	    set_variable_part (set, dv_as_value (node->dv),
2639 			       dv_from_value (val), node->offset,
2640 			       VAR_INIT_STATUS_INITIALIZED, NULL_RTX, INSERT);
2641 	    set_variable_part (set, val, node->dv, node->offset,
2642 			       VAR_INIT_STATUS_INITIALIZED, NULL_RTX, INSERT);
2643 	  }
2644 
2645       /* If we didn't find any equivalence, we need to remember that
2646 	 this value is held in the named register.  */
2647       if (found)
2648 	return;
2649     }
2650   /* ??? Attempt to find and merge equivalent MEMs or other
2651      expressions too.  */
2652 
2653   val_bind (set, val, loc, false);
2654 }
2655 
2656 /* Initialize dataflow set SET to be empty.
2657    VARS_SIZE is the initial size of hash table VARS.  */
2658 
2659 static void
dataflow_set_init(dataflow_set * set)2660 dataflow_set_init (dataflow_set *set)
2661 {
2662   init_attrs_list_set (set->regs);
2663   set->vars = shared_hash_copy (empty_shared_hash);
2664   set->stack_adjust = 0;
2665   set->traversed_vars = NULL;
2666 }
2667 
2668 /* Delete the contents of dataflow set SET.  */
2669 
2670 static void
dataflow_set_clear(dataflow_set * set)2671 dataflow_set_clear (dataflow_set *set)
2672 {
2673   int i;
2674 
2675   for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2676     attrs_list_clear (&set->regs[i]);
2677 
2678   shared_hash_destroy (set->vars);
2679   set->vars = shared_hash_copy (empty_shared_hash);
2680 }
2681 
2682 /* Copy the contents of dataflow set SRC to DST.  */
2683 
2684 static void
dataflow_set_copy(dataflow_set * dst,dataflow_set * src)2685 dataflow_set_copy (dataflow_set *dst, dataflow_set *src)
2686 {
2687   int i;
2688 
2689   for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2690     attrs_list_copy (&dst->regs[i], src->regs[i]);
2691 
2692   shared_hash_destroy (dst->vars);
2693   dst->vars = shared_hash_copy (src->vars);
2694   dst->stack_adjust = src->stack_adjust;
2695 }
2696 
2697 /* Information for merging lists of locations for a given offset of variable.
2698  */
2699 struct variable_union_info
2700 {
2701   /* Node of the location chain.  */
2702   location_chain lc;
2703 
2704   /* The sum of positions in the input chains.  */
2705   int pos;
2706 
2707   /* The position in the chain of DST dataflow set.  */
2708   int pos_dst;
2709 };
2710 
2711 /* Buffer for location list sorting and its allocated size.  */
2712 static struct variable_union_info *vui_vec;
2713 static int vui_allocated;
2714 
2715 /* Compare function for qsort, order the structures by POS element.  */
2716 
2717 static int
variable_union_info_cmp_pos(const void * n1,const void * n2)2718 variable_union_info_cmp_pos (const void *n1, const void *n2)
2719 {
2720   const struct variable_union_info *const i1 =
2721     (const struct variable_union_info *) n1;
2722   const struct variable_union_info *const i2 =
2723     ( const struct variable_union_info *) n2;
2724 
2725   if (i1->pos != i2->pos)
2726     return i1->pos - i2->pos;
2727 
2728   return (i1->pos_dst - i2->pos_dst);
2729 }
2730 
2731 /* Compute union of location parts of variable *SLOT and the same variable
2732    from hash table DATA.  Compute "sorted" union of the location chains
2733    for common offsets, i.e. the locations of a variable part are sorted by
2734    a priority where the priority is the sum of the positions in the 2 chains
2735    (if a location is only in one list the position in the second list is
2736    defined to be larger than the length of the chains).
2737    When we are updating the location parts the newest location is in the
2738    beginning of the chain, so when we do the described "sorted" union
2739    we keep the newest locations in the beginning.  */
2740 
2741 static int
variable_union(variable src,dataflow_set * set)2742 variable_union (variable src, dataflow_set *set)
2743 {
2744   variable dst;
2745   variable_def **dstp;
2746   int i, j, k;
2747 
2748   dstp = shared_hash_find_slot (set->vars, src->dv);
2749   if (!dstp || !*dstp)
2750     {
2751       src->refcount++;
2752 
2753       dst_can_be_shared = false;
2754       if (!dstp)
2755 	dstp = shared_hash_find_slot_unshare (&set->vars, src->dv, INSERT);
2756 
2757       *dstp = src;
2758 
2759       /* Continue traversing the hash table.  */
2760       return 1;
2761     }
2762   else
2763     dst = *dstp;
2764 
2765   gcc_assert (src->n_var_parts);
2766   gcc_checking_assert (src->onepart == dst->onepart);
2767 
2768   /* We can combine one-part variables very efficiently, because their
2769      entries are in canonical order.  */
2770   if (src->onepart)
2771     {
2772       location_chain *nodep, dnode, snode;
2773 
2774       gcc_assert (src->n_var_parts == 1
2775 		  && dst->n_var_parts == 1);
2776 
2777       snode = src->var_part[0].loc_chain;
2778       gcc_assert (snode);
2779 
2780     restart_onepart_unshared:
2781       nodep = &dst->var_part[0].loc_chain;
2782       dnode = *nodep;
2783       gcc_assert (dnode);
2784 
2785       while (snode)
2786 	{
2787 	  int r = dnode ? loc_cmp (dnode->loc, snode->loc) : 1;
2788 
2789 	  if (r > 0)
2790 	    {
2791 	      location_chain nnode;
2792 
2793 	      if (shared_var_p (dst, set->vars))
2794 		{
2795 		  dstp = unshare_variable (set, dstp, dst,
2796 					   VAR_INIT_STATUS_INITIALIZED);
2797 		  dst = *dstp;
2798 		  goto restart_onepart_unshared;
2799 		}
2800 
2801 	      *nodep = nnode = (location_chain) pool_alloc (loc_chain_pool);
2802 	      nnode->loc = snode->loc;
2803 	      nnode->init = snode->init;
2804 	      if (!snode->set_src || MEM_P (snode->set_src))
2805 		nnode->set_src = NULL;
2806 	      else
2807 		nnode->set_src = snode->set_src;
2808 	      nnode->next = dnode;
2809 	      dnode = nnode;
2810 	    }
2811 	  else if (r == 0)
2812 	    gcc_checking_assert (rtx_equal_p (dnode->loc, snode->loc));
2813 
2814 	  if (r >= 0)
2815 	    snode = snode->next;
2816 
2817 	  nodep = &dnode->next;
2818 	  dnode = *nodep;
2819 	}
2820 
2821       return 1;
2822     }
2823 
2824   gcc_checking_assert (!src->onepart);
2825 
2826   /* Count the number of location parts, result is K.  */
2827   for (i = 0, j = 0, k = 0;
2828        i < src->n_var_parts && j < dst->n_var_parts; k++)
2829     {
2830       if (VAR_PART_OFFSET (src, i) == VAR_PART_OFFSET (dst, j))
2831 	{
2832 	  i++;
2833 	  j++;
2834 	}
2835       else if (VAR_PART_OFFSET (src, i) < VAR_PART_OFFSET (dst, j))
2836 	i++;
2837       else
2838 	j++;
2839     }
2840   k += src->n_var_parts - i;
2841   k += dst->n_var_parts - j;
2842 
2843   /* We track only variables whose size is <= MAX_VAR_PARTS bytes
2844      thus there are at most MAX_VAR_PARTS different offsets.  */
2845   gcc_checking_assert (dst->onepart ? k == 1 : k <= MAX_VAR_PARTS);
2846 
2847   if (dst->n_var_parts != k && shared_var_p (dst, set->vars))
2848     {
2849       dstp = unshare_variable (set, dstp, dst, VAR_INIT_STATUS_UNKNOWN);
2850       dst = *dstp;
2851     }
2852 
2853   i = src->n_var_parts - 1;
2854   j = dst->n_var_parts - 1;
2855   dst->n_var_parts = k;
2856 
2857   for (k--; k >= 0; k--)
2858     {
2859       location_chain node, node2;
2860 
2861       if (i >= 0 && j >= 0
2862 	  && VAR_PART_OFFSET (src, i) == VAR_PART_OFFSET (dst, j))
2863 	{
2864 	  /* Compute the "sorted" union of the chains, i.e. the locations which
2865 	     are in both chains go first, they are sorted by the sum of
2866 	     positions in the chains.  */
2867 	  int dst_l, src_l;
2868 	  int ii, jj, n;
2869 	  struct variable_union_info *vui;
2870 
2871 	  /* If DST is shared compare the location chains.
2872 	     If they are different we will modify the chain in DST with
2873 	     high probability so make a copy of DST.  */
2874 	  if (shared_var_p (dst, set->vars))
2875 	    {
2876 	      for (node = src->var_part[i].loc_chain,
2877 		   node2 = dst->var_part[j].loc_chain; node && node2;
2878 		   node = node->next, node2 = node2->next)
2879 		{
2880 		  if (!((REG_P (node2->loc)
2881 			 && REG_P (node->loc)
2882 			 && REGNO (node2->loc) == REGNO (node->loc))
2883 			|| rtx_equal_p (node2->loc, node->loc)))
2884 		    {
2885 		      if (node2->init < node->init)
2886 		        node2->init = node->init;
2887 		      break;
2888 		    }
2889 		}
2890 	      if (node || node2)
2891 		{
2892 		  dstp = unshare_variable (set, dstp, dst,
2893 					   VAR_INIT_STATUS_UNKNOWN);
2894 		  dst = (variable)*dstp;
2895 		}
2896 	    }
2897 
2898 	  src_l = 0;
2899 	  for (node = src->var_part[i].loc_chain; node; node = node->next)
2900 	    src_l++;
2901 	  dst_l = 0;
2902 	  for (node = dst->var_part[j].loc_chain; node; node = node->next)
2903 	    dst_l++;
2904 
2905 	  if (dst_l == 1)
2906 	    {
2907 	      /* The most common case, much simpler, no qsort is needed.  */
2908 	      location_chain dstnode = dst->var_part[j].loc_chain;
2909 	      dst->var_part[k].loc_chain = dstnode;
2910 	      VAR_PART_OFFSET (dst, k) = VAR_PART_OFFSET (dst, j);
2911 	      node2 = dstnode;
2912 	      for (node = src->var_part[i].loc_chain; node; node = node->next)
2913 		if (!((REG_P (dstnode->loc)
2914 		       && REG_P (node->loc)
2915 		       && REGNO (dstnode->loc) == REGNO (node->loc))
2916 		      || rtx_equal_p (dstnode->loc, node->loc)))
2917 		  {
2918 		    location_chain new_node;
2919 
2920 		    /* Copy the location from SRC.  */
2921 		    new_node = (location_chain) pool_alloc (loc_chain_pool);
2922 		    new_node->loc = node->loc;
2923 		    new_node->init = node->init;
2924 		    if (!node->set_src || MEM_P (node->set_src))
2925 		      new_node->set_src = NULL;
2926 		    else
2927 		      new_node->set_src = node->set_src;
2928 		    node2->next = new_node;
2929 		    node2 = new_node;
2930 		  }
2931 	      node2->next = NULL;
2932 	    }
2933 	  else
2934 	    {
2935 	      if (src_l + dst_l > vui_allocated)
2936 		{
2937 		  vui_allocated = MAX (vui_allocated * 2, src_l + dst_l);
2938 		  vui_vec = XRESIZEVEC (struct variable_union_info, vui_vec,
2939 					vui_allocated);
2940 		}
2941 	      vui = vui_vec;
2942 
2943 	      /* Fill in the locations from DST.  */
2944 	      for (node = dst->var_part[j].loc_chain, jj = 0; node;
2945 		   node = node->next, jj++)
2946 		{
2947 		  vui[jj].lc = node;
2948 		  vui[jj].pos_dst = jj;
2949 
2950 		  /* Pos plus value larger than a sum of 2 valid positions.  */
2951 		  vui[jj].pos = jj + src_l + dst_l;
2952 		}
2953 
2954 	      /* Fill in the locations from SRC.  */
2955 	      n = dst_l;
2956 	      for (node = src->var_part[i].loc_chain, ii = 0; node;
2957 		   node = node->next, ii++)
2958 		{
2959 		  /* Find location from NODE.  */
2960 		  for (jj = 0; jj < dst_l; jj++)
2961 		    {
2962 		      if ((REG_P (vui[jj].lc->loc)
2963 			   && REG_P (node->loc)
2964 			   && REGNO (vui[jj].lc->loc) == REGNO (node->loc))
2965 			  || rtx_equal_p (vui[jj].lc->loc, node->loc))
2966 			{
2967 			  vui[jj].pos = jj + ii;
2968 			  break;
2969 			}
2970 		    }
2971 		  if (jj >= dst_l)	/* The location has not been found.  */
2972 		    {
2973 		      location_chain new_node;
2974 
2975 		      /* Copy the location from SRC.  */
2976 		      new_node = (location_chain) pool_alloc (loc_chain_pool);
2977 		      new_node->loc = node->loc;
2978 		      new_node->init = node->init;
2979 		      if (!node->set_src || MEM_P (node->set_src))
2980 			new_node->set_src = NULL;
2981 		      else
2982 			new_node->set_src = node->set_src;
2983 		      vui[n].lc = new_node;
2984 		      vui[n].pos_dst = src_l + dst_l;
2985 		      vui[n].pos = ii + src_l + dst_l;
2986 		      n++;
2987 		    }
2988 		}
2989 
2990 	      if (dst_l == 2)
2991 		{
2992 		  /* Special case still very common case.  For dst_l == 2
2993 		     all entries dst_l ... n-1 are sorted, with for i >= dst_l
2994 		     vui[i].pos == i + src_l + dst_l.  */
2995 		  if (vui[0].pos > vui[1].pos)
2996 		    {
2997 		      /* Order should be 1, 0, 2... */
2998 		      dst->var_part[k].loc_chain = vui[1].lc;
2999 		      vui[1].lc->next = vui[0].lc;
3000 		      if (n >= 3)
3001 			{
3002 			  vui[0].lc->next = vui[2].lc;
3003 			  vui[n - 1].lc->next = NULL;
3004 			}
3005 		      else
3006 			vui[0].lc->next = NULL;
3007 		      ii = 3;
3008 		    }
3009 		  else
3010 		    {
3011 		      dst->var_part[k].loc_chain = vui[0].lc;
3012 		      if (n >= 3 && vui[2].pos < vui[1].pos)
3013 			{
3014 			  /* Order should be 0, 2, 1, 3... */
3015 			  vui[0].lc->next = vui[2].lc;
3016 			  vui[2].lc->next = vui[1].lc;
3017 			  if (n >= 4)
3018 			    {
3019 			      vui[1].lc->next = vui[3].lc;
3020 			      vui[n - 1].lc->next = NULL;
3021 			    }
3022 			  else
3023 			    vui[1].lc->next = NULL;
3024 			  ii = 4;
3025 			}
3026 		      else
3027 			{
3028 			  /* Order should be 0, 1, 2... */
3029 			  ii = 1;
3030 			  vui[n - 1].lc->next = NULL;
3031 			}
3032 		    }
3033 		  for (; ii < n; ii++)
3034 		    vui[ii - 1].lc->next = vui[ii].lc;
3035 		}
3036 	      else
3037 		{
3038 		  qsort (vui, n, sizeof (struct variable_union_info),
3039 			 variable_union_info_cmp_pos);
3040 
3041 		  /* Reconnect the nodes in sorted order.  */
3042 		  for (ii = 1; ii < n; ii++)
3043 		    vui[ii - 1].lc->next = vui[ii].lc;
3044 		  vui[n - 1].lc->next = NULL;
3045 		  dst->var_part[k].loc_chain = vui[0].lc;
3046 		}
3047 
3048 	      VAR_PART_OFFSET (dst, k) = VAR_PART_OFFSET (dst, j);
3049 	    }
3050 	  i--;
3051 	  j--;
3052 	}
3053       else if ((i >= 0 && j >= 0
3054 		&& VAR_PART_OFFSET (src, i) < VAR_PART_OFFSET (dst, j))
3055 	       || i < 0)
3056 	{
3057 	  dst->var_part[k] = dst->var_part[j];
3058 	  j--;
3059 	}
3060       else if ((i >= 0 && j >= 0
3061 		&& VAR_PART_OFFSET (src, i) > VAR_PART_OFFSET (dst, j))
3062 	       || j < 0)
3063 	{
3064 	  location_chain *nextp;
3065 
3066 	  /* Copy the chain from SRC.  */
3067 	  nextp = &dst->var_part[k].loc_chain;
3068 	  for (node = src->var_part[i].loc_chain; node; node = node->next)
3069 	    {
3070 	      location_chain new_lc;
3071 
3072 	      new_lc = (location_chain) pool_alloc (loc_chain_pool);
3073 	      new_lc->next = NULL;
3074 	      new_lc->init = node->init;
3075 	      if (!node->set_src || MEM_P (node->set_src))
3076 		new_lc->set_src = NULL;
3077 	      else
3078 		new_lc->set_src = node->set_src;
3079 	      new_lc->loc = node->loc;
3080 
3081 	      *nextp = new_lc;
3082 	      nextp = &new_lc->next;
3083 	    }
3084 
3085 	  VAR_PART_OFFSET (dst, k) = VAR_PART_OFFSET (src, i);
3086 	  i--;
3087 	}
3088       dst->var_part[k].cur_loc = NULL;
3089     }
3090 
3091   if (flag_var_tracking_uninit)
3092     for (i = 0; i < src->n_var_parts && i < dst->n_var_parts; i++)
3093       {
3094 	location_chain node, node2;
3095 	for (node = src->var_part[i].loc_chain; node; node = node->next)
3096 	  for (node2 = dst->var_part[i].loc_chain; node2; node2 = node2->next)
3097 	    if (rtx_equal_p (node->loc, node2->loc))
3098 	      {
3099 		if (node->init > node2->init)
3100 		  node2->init = node->init;
3101 	      }
3102       }
3103 
3104   /* Continue traversing the hash table.  */
3105   return 1;
3106 }
3107 
3108 /* Compute union of dataflow sets SRC and DST and store it to DST.  */
3109 
3110 static void
dataflow_set_union(dataflow_set * dst,dataflow_set * src)3111 dataflow_set_union (dataflow_set *dst, dataflow_set *src)
3112 {
3113   int i;
3114 
3115   for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3116     attrs_list_union (&dst->regs[i], src->regs[i]);
3117 
3118   if (dst->vars == empty_shared_hash)
3119     {
3120       shared_hash_destroy (dst->vars);
3121       dst->vars = shared_hash_copy (src->vars);
3122     }
3123   else
3124     {
3125       variable_iterator_type hi;
3126       variable var;
3127 
3128       FOR_EACH_HASH_TABLE_ELEMENT (shared_hash_htab (src->vars),
3129 				   var, variable, hi)
3130 	variable_union (var, dst);
3131     }
3132 }
3133 
3134 /* Whether the value is currently being expanded.  */
3135 #define VALUE_RECURSED_INTO(x) \
3136   (RTL_FLAG_CHECK2 ("VALUE_RECURSED_INTO", (x), VALUE, DEBUG_EXPR)->used)
3137 
3138 /* Whether no expansion was found, saving useless lookups.
3139    It must only be set when VALUE_CHANGED is clear.  */
3140 #define NO_LOC_P(x) \
3141   (RTL_FLAG_CHECK2 ("NO_LOC_P", (x), VALUE, DEBUG_EXPR)->return_val)
3142 
3143 /* Whether cur_loc in the value needs to be (re)computed.  */
3144 #define VALUE_CHANGED(x) \
3145   (RTL_FLAG_CHECK1 ("VALUE_CHANGED", (x), VALUE)->frame_related)
3146 /* Whether cur_loc in the decl needs to be (re)computed.  */
3147 #define DECL_CHANGED(x) TREE_VISITED (x)
3148 
3149 /* Record (if NEWV) that DV needs to have its cur_loc recomputed.  For
3150    user DECLs, this means they're in changed_variables.  Values and
3151    debug exprs may be left with this flag set if no user variable
3152    requires them to be evaluated.  */
3153 
3154 static inline void
set_dv_changed(decl_or_value dv,bool newv)3155 set_dv_changed (decl_or_value dv, bool newv)
3156 {
3157   switch (dv_onepart_p (dv))
3158     {
3159     case ONEPART_VALUE:
3160       if (newv)
3161 	NO_LOC_P (dv_as_value (dv)) = false;
3162       VALUE_CHANGED (dv_as_value (dv)) = newv;
3163       break;
3164 
3165     case ONEPART_DEXPR:
3166       if (newv)
3167 	NO_LOC_P (DECL_RTL_KNOWN_SET (dv_as_decl (dv))) = false;
3168       /* Fall through...  */
3169 
3170     default:
3171       DECL_CHANGED (dv_as_decl (dv)) = newv;
3172       break;
3173     }
3174 }
3175 
3176 /* Return true if DV needs to have its cur_loc recomputed.  */
3177 
3178 static inline bool
dv_changed_p(decl_or_value dv)3179 dv_changed_p (decl_or_value dv)
3180 {
3181   return (dv_is_value_p (dv)
3182 	  ? VALUE_CHANGED (dv_as_value (dv))
3183 	  : DECL_CHANGED (dv_as_decl (dv)));
3184 }
3185 
3186 /* Return a location list node whose loc is rtx_equal to LOC, in the
3187    location list of a one-part variable or value VAR, or in that of
3188    any values recursively mentioned in the location lists.  VARS must
3189    be in star-canonical form.  */
3190 
3191 static location_chain
find_loc_in_1pdv(rtx loc,variable var,variable_table_type vars)3192 find_loc_in_1pdv (rtx loc, variable var, variable_table_type vars)
3193 {
3194   location_chain node;
3195   enum rtx_code loc_code;
3196 
3197   if (!var)
3198     return NULL;
3199 
3200   gcc_checking_assert (var->onepart);
3201 
3202   if (!var->n_var_parts)
3203     return NULL;
3204 
3205   gcc_checking_assert (loc != dv_as_opaque (var->dv));
3206 
3207   loc_code = GET_CODE (loc);
3208   for (node = var->var_part[0].loc_chain; node; node = node->next)
3209     {
3210       decl_or_value dv;
3211       variable rvar;
3212 
3213       if (GET_CODE (node->loc) != loc_code)
3214 	{
3215 	  if (GET_CODE (node->loc) != VALUE)
3216 	    continue;
3217 	}
3218       else if (loc == node->loc)
3219 	return node;
3220       else if (loc_code != VALUE)
3221 	{
3222 	  if (rtx_equal_p (loc, node->loc))
3223 	    return node;
3224 	  continue;
3225 	}
3226 
3227       /* Since we're in star-canonical form, we don't need to visit
3228 	 non-canonical nodes: one-part variables and non-canonical
3229 	 values would only point back to the canonical node.  */
3230       if (dv_is_value_p (var->dv)
3231 	  && !canon_value_cmp (node->loc, dv_as_value (var->dv)))
3232 	{
3233 	  /* Skip all subsequent VALUEs.  */
3234 	  while (node->next && GET_CODE (node->next->loc) == VALUE)
3235 	    {
3236 	      node = node->next;
3237 	      gcc_checking_assert (!canon_value_cmp (node->loc,
3238 						     dv_as_value (var->dv)));
3239 	      if (loc == node->loc)
3240 		return node;
3241 	    }
3242 	  continue;
3243 	}
3244 
3245       gcc_checking_assert (node == var->var_part[0].loc_chain);
3246       gcc_checking_assert (!node->next);
3247 
3248       dv = dv_from_value (node->loc);
3249       rvar = vars.find_with_hash (dv, dv_htab_hash (dv));
3250       return find_loc_in_1pdv (loc, rvar, vars);
3251     }
3252 
3253   /* ??? Gotta look in cselib_val locations too.  */
3254 
3255   return NULL;
3256 }
3257 
3258 /* Hash table iteration argument passed to variable_merge.  */
3259 struct dfset_merge
3260 {
3261   /* The set in which the merge is to be inserted.  */
3262   dataflow_set *dst;
3263   /* The set that we're iterating in.  */
3264   dataflow_set *cur;
3265   /* The set that may contain the other dv we are to merge with.  */
3266   dataflow_set *src;
3267   /* Number of onepart dvs in src.  */
3268   int src_onepart_cnt;
3269 };
3270 
3271 /* Insert LOC in *DNODE, if it's not there yet.  The list must be in
3272    loc_cmp order, and it is maintained as such.  */
3273 
3274 static void
insert_into_intersection(location_chain * nodep,rtx loc,enum var_init_status status)3275 insert_into_intersection (location_chain *nodep, rtx loc,
3276 			  enum var_init_status status)
3277 {
3278   location_chain node;
3279   int r;
3280 
3281   for (node = *nodep; node; nodep = &node->next, node = *nodep)
3282     if ((r = loc_cmp (node->loc, loc)) == 0)
3283       {
3284 	node->init = MIN (node->init, status);
3285 	return;
3286       }
3287     else if (r > 0)
3288       break;
3289 
3290   node = (location_chain) pool_alloc (loc_chain_pool);
3291 
3292   node->loc = loc;
3293   node->set_src = NULL;
3294   node->init = status;
3295   node->next = *nodep;
3296   *nodep = node;
3297 }
3298 
3299 /* Insert in DEST the intersection of the locations present in both
3300    S1NODE and S2VAR, directly or indirectly.  S1NODE is from a
3301    variable in DSM->cur, whereas S2VAR is from DSM->src.  dvar is in
3302    DSM->dst.  */
3303 
3304 static void
intersect_loc_chains(rtx val,location_chain * dest,struct dfset_merge * dsm,location_chain s1node,variable s2var)3305 intersect_loc_chains (rtx val, location_chain *dest, struct dfset_merge *dsm,
3306 		      location_chain s1node, variable s2var)
3307 {
3308   dataflow_set *s1set = dsm->cur;
3309   dataflow_set *s2set = dsm->src;
3310   location_chain found;
3311 
3312   if (s2var)
3313     {
3314       location_chain s2node;
3315 
3316       gcc_checking_assert (s2var->onepart);
3317 
3318       if (s2var->n_var_parts)
3319 	{
3320 	  s2node = s2var->var_part[0].loc_chain;
3321 
3322 	  for (; s1node && s2node;
3323 	       s1node = s1node->next, s2node = s2node->next)
3324 	    if (s1node->loc != s2node->loc)
3325 	      break;
3326 	    else if (s1node->loc == val)
3327 	      continue;
3328 	    else
3329 	      insert_into_intersection (dest, s1node->loc,
3330 					MIN (s1node->init, s2node->init));
3331 	}
3332     }
3333 
3334   for (; s1node; s1node = s1node->next)
3335     {
3336       if (s1node->loc == val)
3337 	continue;
3338 
3339       if ((found = find_loc_in_1pdv (s1node->loc, s2var,
3340 				     shared_hash_htab (s2set->vars))))
3341 	{
3342 	  insert_into_intersection (dest, s1node->loc,
3343 				    MIN (s1node->init, found->init));
3344 	  continue;
3345 	}
3346 
3347       if (GET_CODE (s1node->loc) == VALUE
3348 	  && !VALUE_RECURSED_INTO (s1node->loc))
3349 	{
3350 	  decl_or_value dv = dv_from_value (s1node->loc);
3351 	  variable svar = shared_hash_find (s1set->vars, dv);
3352 	  if (svar)
3353 	    {
3354 	      if (svar->n_var_parts == 1)
3355 		{
3356 		  VALUE_RECURSED_INTO (s1node->loc) = true;
3357 		  intersect_loc_chains (val, dest, dsm,
3358 					svar->var_part[0].loc_chain,
3359 					s2var);
3360 		  VALUE_RECURSED_INTO (s1node->loc) = false;
3361 		}
3362 	    }
3363 	}
3364 
3365       /* ??? gotta look in cselib_val locations too.  */
3366 
3367       /* ??? if the location is equivalent to any location in src,
3368 	 searched recursively
3369 
3370 	   add to dst the values needed to represent the equivalence
3371 
3372      telling whether locations S is equivalent to another dv's
3373      location list:
3374 
3375        for each location D in the list
3376 
3377          if S and D satisfy rtx_equal_p, then it is present
3378 
3379 	 else if D is a value, recurse without cycles
3380 
3381 	 else if S and D have the same CODE and MODE
3382 
3383 	   for each operand oS and the corresponding oD
3384 
3385 	     if oS and oD are not equivalent, then S an D are not equivalent
3386 
3387 	     else if they are RTX vectors
3388 
3389 	       if any vector oS element is not equivalent to its respective oD,
3390 	       then S and D are not equivalent
3391 
3392    */
3393 
3394 
3395     }
3396 }
3397 
3398 /* Return -1 if X should be before Y in a location list for a 1-part
3399    variable, 1 if Y should be before X, and 0 if they're equivalent
3400    and should not appear in the list.  */
3401 
3402 static int
loc_cmp(rtx x,rtx y)3403 loc_cmp (rtx x, rtx y)
3404 {
3405   int i, j, r;
3406   RTX_CODE code = GET_CODE (x);
3407   const char *fmt;
3408 
3409   if (x == y)
3410     return 0;
3411 
3412   if (REG_P (x))
3413     {
3414       if (!REG_P (y))
3415 	return -1;
3416       gcc_assert (GET_MODE (x) == GET_MODE (y));
3417       if (REGNO (x) == REGNO (y))
3418 	return 0;
3419       else if (REGNO (x) < REGNO (y))
3420 	return -1;
3421       else
3422 	return 1;
3423     }
3424 
3425   if (REG_P (y))
3426     return 1;
3427 
3428   if (MEM_P (x))
3429     {
3430       if (!MEM_P (y))
3431 	return -1;
3432       gcc_assert (GET_MODE (x) == GET_MODE (y));
3433       return loc_cmp (XEXP (x, 0), XEXP (y, 0));
3434     }
3435 
3436   if (MEM_P (y))
3437     return 1;
3438 
3439   if (GET_CODE (x) == VALUE)
3440     {
3441       if (GET_CODE (y) != VALUE)
3442 	return -1;
3443       /* Don't assert the modes are the same, that is true only
3444 	 when not recursing.  (subreg:QI (value:SI 1:1) 0)
3445 	 and (subreg:QI (value:DI 2:2) 0) can be compared,
3446 	 even when the modes are different.  */
3447       if (canon_value_cmp (x, y))
3448 	return -1;
3449       else
3450 	return 1;
3451     }
3452 
3453   if (GET_CODE (y) == VALUE)
3454     return 1;
3455 
3456   /* Entry value is the least preferable kind of expression.  */
3457   if (GET_CODE (x) == ENTRY_VALUE)
3458     {
3459       if (GET_CODE (y) != ENTRY_VALUE)
3460 	return 1;
3461       gcc_assert (GET_MODE (x) == GET_MODE (y));
3462       return loc_cmp (ENTRY_VALUE_EXP (x), ENTRY_VALUE_EXP (y));
3463     }
3464 
3465   if (GET_CODE (y) == ENTRY_VALUE)
3466     return -1;
3467 
3468   if (GET_CODE (x) == GET_CODE (y))
3469     /* Compare operands below.  */;
3470   else if (GET_CODE (x) < GET_CODE (y))
3471     return -1;
3472   else
3473     return 1;
3474 
3475   gcc_assert (GET_MODE (x) == GET_MODE (y));
3476 
3477   if (GET_CODE (x) == DEBUG_EXPR)
3478     {
3479       if (DEBUG_TEMP_UID (DEBUG_EXPR_TREE_DECL (x))
3480 	  < DEBUG_TEMP_UID (DEBUG_EXPR_TREE_DECL (y)))
3481 	return -1;
3482       gcc_checking_assert (DEBUG_TEMP_UID (DEBUG_EXPR_TREE_DECL (x))
3483 			   > DEBUG_TEMP_UID (DEBUG_EXPR_TREE_DECL (y)));
3484       return 1;
3485     }
3486 
3487   fmt = GET_RTX_FORMAT (code);
3488   for (i = 0; i < GET_RTX_LENGTH (code); i++)
3489     switch (fmt[i])
3490       {
3491       case 'w':
3492 	if (XWINT (x, i) == XWINT (y, i))
3493 	  break;
3494 	else if (XWINT (x, i) < XWINT (y, i))
3495 	  return -1;
3496 	else
3497 	  return 1;
3498 
3499       case 'n':
3500       case 'i':
3501 	if (XINT (x, i) == XINT (y, i))
3502 	  break;
3503 	else if (XINT (x, i) < XINT (y, i))
3504 	  return -1;
3505 	else
3506 	  return 1;
3507 
3508       case 'V':
3509       case 'E':
3510 	/* Compare the vector length first.  */
3511 	if (XVECLEN (x, i) == XVECLEN (y, i))
3512 	  /* Compare the vectors elements.  */;
3513 	else if (XVECLEN (x, i) < XVECLEN (y, i))
3514 	  return -1;
3515 	else
3516 	  return 1;
3517 
3518 	for (j = 0; j < XVECLEN (x, i); j++)
3519 	  if ((r = loc_cmp (XVECEXP (x, i, j),
3520 			    XVECEXP (y, i, j))))
3521 	    return r;
3522 	break;
3523 
3524       case 'e':
3525 	if ((r = loc_cmp (XEXP (x, i), XEXP (y, i))))
3526 	  return r;
3527 	break;
3528 
3529       case 'S':
3530       case 's':
3531 	if (XSTR (x, i) == XSTR (y, i))
3532 	  break;
3533 	if (!XSTR (x, i))
3534 	  return -1;
3535 	if (!XSTR (y, i))
3536 	  return 1;
3537 	if ((r = strcmp (XSTR (x, i), XSTR (y, i))) == 0)
3538 	  break;
3539 	else if (r < 0)
3540 	  return -1;
3541 	else
3542 	  return 1;
3543 
3544       case 'u':
3545 	/* These are just backpointers, so they don't matter.  */
3546 	break;
3547 
3548       case '0':
3549       case 't':
3550 	break;
3551 
3552 	/* It is believed that rtx's at this level will never
3553 	   contain anything but integers and other rtx's,
3554 	   except for within LABEL_REFs and SYMBOL_REFs.  */
3555       default:
3556 	gcc_unreachable ();
3557       }
3558 
3559   return 0;
3560 }
3561 
3562 #if ENABLE_CHECKING
3563 /* Check the order of entries in one-part variables.   */
3564 
3565 int
canonicalize_loc_order_check(variable_def ** slot,dataflow_set * data ATTRIBUTE_UNUSED)3566 canonicalize_loc_order_check (variable_def **slot,
3567 			      dataflow_set *data ATTRIBUTE_UNUSED)
3568 {
3569   variable var = *slot;
3570   location_chain node, next;
3571 
3572 #ifdef ENABLE_RTL_CHECKING
3573   int i;
3574   for (i = 0; i < var->n_var_parts; i++)
3575     gcc_assert (var->var_part[0].cur_loc == NULL);
3576   gcc_assert (!var->in_changed_variables);
3577 #endif
3578 
3579   if (!var->onepart)
3580     return 1;
3581 
3582   gcc_assert (var->n_var_parts == 1);
3583   node = var->var_part[0].loc_chain;
3584   gcc_assert (node);
3585 
3586   while ((next = node->next))
3587     {
3588       gcc_assert (loc_cmp (node->loc, next->loc) < 0);
3589       node = next;
3590     }
3591 
3592   return 1;
3593 }
3594 #endif
3595 
3596 /* Mark with VALUE_RECURSED_INTO values that have neighbors that are
3597    more likely to be chosen as canonical for an equivalence set.
3598    Ensure less likely values can reach more likely neighbors, making
3599    the connections bidirectional.  */
3600 
3601 int
canonicalize_values_mark(variable_def ** slot,dataflow_set * set)3602 canonicalize_values_mark (variable_def **slot, dataflow_set *set)
3603 {
3604   variable var = *slot;
3605   decl_or_value dv = var->dv;
3606   rtx val;
3607   location_chain node;
3608 
3609   if (!dv_is_value_p (dv))
3610     return 1;
3611 
3612   gcc_checking_assert (var->n_var_parts == 1);
3613 
3614   val = dv_as_value (dv);
3615 
3616   for (node = var->var_part[0].loc_chain; node; node = node->next)
3617     if (GET_CODE (node->loc) == VALUE)
3618       {
3619 	if (canon_value_cmp (node->loc, val))
3620 	  VALUE_RECURSED_INTO (val) = true;
3621 	else
3622 	  {
3623 	    decl_or_value odv = dv_from_value (node->loc);
3624 	    variable_def **oslot;
3625 	    oslot = shared_hash_find_slot_noinsert (set->vars, odv);
3626 
3627 	    set_slot_part (set, val, oslot, odv, 0,
3628 			   node->init, NULL_RTX);
3629 
3630 	    VALUE_RECURSED_INTO (node->loc) = true;
3631 	  }
3632       }
3633 
3634   return 1;
3635 }
3636 
3637 /* Remove redundant entries from equivalence lists in onepart
3638    variables, canonicalizing equivalence sets into star shapes.  */
3639 
3640 int
canonicalize_values_star(variable_def ** slot,dataflow_set * set)3641 canonicalize_values_star (variable_def **slot, dataflow_set *set)
3642 {
3643   variable var = *slot;
3644   decl_or_value dv = var->dv;
3645   location_chain node;
3646   decl_or_value cdv;
3647   rtx val, cval;
3648   variable_def **cslot;
3649   bool has_value;
3650   bool has_marks;
3651 
3652   if (!var->onepart)
3653     return 1;
3654 
3655   gcc_checking_assert (var->n_var_parts == 1);
3656 
3657   if (dv_is_value_p (dv))
3658     {
3659       cval = dv_as_value (dv);
3660       if (!VALUE_RECURSED_INTO (cval))
3661 	return 1;
3662       VALUE_RECURSED_INTO (cval) = false;
3663     }
3664   else
3665     cval = NULL_RTX;
3666 
3667  restart:
3668   val = cval;
3669   has_value = false;
3670   has_marks = false;
3671 
3672   gcc_assert (var->n_var_parts == 1);
3673 
3674   for (node = var->var_part[0].loc_chain; node; node = node->next)
3675     if (GET_CODE (node->loc) == VALUE)
3676       {
3677 	has_value = true;
3678 	if (VALUE_RECURSED_INTO (node->loc))
3679 	  has_marks = true;
3680 	if (canon_value_cmp (node->loc, cval))
3681 	  cval = node->loc;
3682       }
3683 
3684   if (!has_value)
3685     return 1;
3686 
3687   if (cval == val)
3688     {
3689       if (!has_marks || dv_is_decl_p (dv))
3690 	return 1;
3691 
3692       /* Keep it marked so that we revisit it, either after visiting a
3693 	 child node, or after visiting a new parent that might be
3694 	 found out.  */
3695       VALUE_RECURSED_INTO (val) = true;
3696 
3697       for (node = var->var_part[0].loc_chain; node; node = node->next)
3698 	if (GET_CODE (node->loc) == VALUE
3699 	    && VALUE_RECURSED_INTO (node->loc))
3700 	  {
3701 	    cval = node->loc;
3702 	  restart_with_cval:
3703 	    VALUE_RECURSED_INTO (cval) = false;
3704 	    dv = dv_from_value (cval);
3705 	    slot = shared_hash_find_slot_noinsert (set->vars, dv);
3706 	    if (!slot)
3707 	      {
3708 		gcc_assert (dv_is_decl_p (var->dv));
3709 		/* The canonical value was reset and dropped.
3710 		   Remove it.  */
3711 		clobber_variable_part (set, NULL, var->dv, 0, NULL);
3712 		return 1;
3713 	      }
3714 	    var = *slot;
3715 	    gcc_assert (dv_is_value_p (var->dv));
3716 	    if (var->n_var_parts == 0)
3717 	      return 1;
3718 	    gcc_assert (var->n_var_parts == 1);
3719 	    goto restart;
3720 	  }
3721 
3722       VALUE_RECURSED_INTO (val) = false;
3723 
3724       return 1;
3725     }
3726 
3727   /* Push values to the canonical one.  */
3728   cdv = dv_from_value (cval);
3729   cslot = shared_hash_find_slot_noinsert (set->vars, cdv);
3730 
3731   for (node = var->var_part[0].loc_chain; node; node = node->next)
3732     if (node->loc != cval)
3733       {
3734 	cslot = set_slot_part (set, node->loc, cslot, cdv, 0,
3735 			       node->init, NULL_RTX);
3736 	if (GET_CODE (node->loc) == VALUE)
3737 	  {
3738 	    decl_or_value ndv = dv_from_value (node->loc);
3739 
3740 	    set_variable_part (set, cval, ndv, 0, node->init, NULL_RTX,
3741 			       NO_INSERT);
3742 
3743 	    if (canon_value_cmp (node->loc, val))
3744 	      {
3745 		/* If it could have been a local minimum, it's not any more,
3746 		   since it's now neighbor to cval, so it may have to push
3747 		   to it.  Conversely, if it wouldn't have prevailed over
3748 		   val, then whatever mark it has is fine: if it was to
3749 		   push, it will now push to a more canonical node, but if
3750 		   it wasn't, then it has already pushed any values it might
3751 		   have to.  */
3752 		VALUE_RECURSED_INTO (node->loc) = true;
3753 		/* Make sure we visit node->loc by ensuring we cval is
3754 		   visited too.  */
3755 		VALUE_RECURSED_INTO (cval) = true;
3756 	      }
3757 	    else if (!VALUE_RECURSED_INTO (node->loc))
3758 	      /* If we have no need to "recurse" into this node, it's
3759 		 already "canonicalized", so drop the link to the old
3760 		 parent.  */
3761 	      clobber_variable_part (set, cval, ndv, 0, NULL);
3762 	  }
3763 	else if (GET_CODE (node->loc) == REG)
3764 	  {
3765 	    attrs list = set->regs[REGNO (node->loc)], *listp;
3766 
3767 	    /* Change an existing attribute referring to dv so that it
3768 	       refers to cdv, removing any duplicate this might
3769 	       introduce, and checking that no previous duplicates
3770 	       existed, all in a single pass.  */
3771 
3772 	    while (list)
3773 	      {
3774 		if (list->offset == 0
3775 		    && (dv_as_opaque (list->dv) == dv_as_opaque (dv)
3776 			|| dv_as_opaque (list->dv) == dv_as_opaque (cdv)))
3777 		  break;
3778 
3779 		list = list->next;
3780 	      }
3781 
3782 	    gcc_assert (list);
3783 	    if (dv_as_opaque (list->dv) == dv_as_opaque (dv))
3784 	      {
3785 		list->dv = cdv;
3786 		for (listp = &list->next; (list = *listp); listp = &list->next)
3787 		  {
3788 		    if (list->offset)
3789 		      continue;
3790 
3791 		    if (dv_as_opaque (list->dv) == dv_as_opaque (cdv))
3792 		      {
3793 			*listp = list->next;
3794 			pool_free (attrs_pool, list);
3795 			list = *listp;
3796 			break;
3797 		      }
3798 
3799 		    gcc_assert (dv_as_opaque (list->dv) != dv_as_opaque (dv));
3800 		  }
3801 	      }
3802 	    else if (dv_as_opaque (list->dv) == dv_as_opaque (cdv))
3803 	      {
3804 		for (listp = &list->next; (list = *listp); listp = &list->next)
3805 		  {
3806 		    if (list->offset)
3807 		      continue;
3808 
3809 		    if (dv_as_opaque (list->dv) == dv_as_opaque (dv))
3810 		      {
3811 			*listp = list->next;
3812 			pool_free (attrs_pool, list);
3813 			list = *listp;
3814 			break;
3815 		      }
3816 
3817 		    gcc_assert (dv_as_opaque (list->dv) != dv_as_opaque (cdv));
3818 		  }
3819 	      }
3820 	    else
3821 	      gcc_unreachable ();
3822 
3823 #if ENABLE_CHECKING
3824 	    while (list)
3825 	      {
3826 		if (list->offset == 0
3827 		    && (dv_as_opaque (list->dv) == dv_as_opaque (dv)
3828 			|| dv_as_opaque (list->dv) == dv_as_opaque (cdv)))
3829 		  gcc_unreachable ();
3830 
3831 		list = list->next;
3832 	      }
3833 #endif
3834 	  }
3835       }
3836 
3837   if (val)
3838     set_slot_part (set, val, cslot, cdv, 0,
3839 		   VAR_INIT_STATUS_INITIALIZED, NULL_RTX);
3840 
3841   slot = clobber_slot_part (set, cval, slot, 0, NULL);
3842 
3843   /* Variable may have been unshared.  */
3844   var = *slot;
3845   gcc_checking_assert (var->n_var_parts && var->var_part[0].loc_chain->loc == cval
3846 		       && var->var_part[0].loc_chain->next == NULL);
3847 
3848   if (VALUE_RECURSED_INTO (cval))
3849     goto restart_with_cval;
3850 
3851   return 1;
3852 }
3853 
3854 /* Bind one-part variables to the canonical value in an equivalence
3855    set.  Not doing this causes dataflow convergence failure in rare
3856    circumstances, see PR42873.  Unfortunately we can't do this
3857    efficiently as part of canonicalize_values_star, since we may not
3858    have determined or even seen the canonical value of a set when we
3859    get to a variable that references another member of the set.  */
3860 
3861 int
canonicalize_vars_star(variable_def ** slot,dataflow_set * set)3862 canonicalize_vars_star (variable_def **slot, dataflow_set *set)
3863 {
3864   variable var = *slot;
3865   decl_or_value dv = var->dv;
3866   location_chain node;
3867   rtx cval;
3868   decl_or_value cdv;
3869   variable_def **cslot;
3870   variable cvar;
3871   location_chain cnode;
3872 
3873   if (!var->onepart || var->onepart == ONEPART_VALUE)
3874     return 1;
3875 
3876   gcc_assert (var->n_var_parts == 1);
3877 
3878   node = var->var_part[0].loc_chain;
3879 
3880   if (GET_CODE (node->loc) != VALUE)
3881     return 1;
3882 
3883   gcc_assert (!node->next);
3884   cval = node->loc;
3885 
3886   /* Push values to the canonical one.  */
3887   cdv = dv_from_value (cval);
3888   cslot = shared_hash_find_slot_noinsert (set->vars, cdv);
3889   if (!cslot)
3890     return 1;
3891   cvar = *cslot;
3892   gcc_assert (cvar->n_var_parts == 1);
3893 
3894   cnode = cvar->var_part[0].loc_chain;
3895 
3896   /* CVAL is canonical if its value list contains non-VALUEs or VALUEs
3897      that are not “more canonical” than it.  */
3898   if (GET_CODE (cnode->loc) != VALUE
3899       || !canon_value_cmp (cnode->loc, cval))
3900     return 1;
3901 
3902   /* CVAL was found to be non-canonical.  Change the variable to point
3903      to the canonical VALUE.  */
3904   gcc_assert (!cnode->next);
3905   cval = cnode->loc;
3906 
3907   slot = set_slot_part (set, cval, slot, dv, 0,
3908 			node->init, node->set_src);
3909   clobber_slot_part (set, cval, slot, 0, node->set_src);
3910 
3911   return 1;
3912 }
3913 
3914 /* Combine variable or value in *S1SLOT (in DSM->cur) with the
3915    corresponding entry in DSM->src.  Multi-part variables are combined
3916    with variable_union, whereas onepart dvs are combined with
3917    intersection.  */
3918 
3919 static int
variable_merge_over_cur(variable s1var,struct dfset_merge * dsm)3920 variable_merge_over_cur (variable s1var, struct dfset_merge *dsm)
3921 {
3922   dataflow_set *dst = dsm->dst;
3923   variable_def **dstslot;
3924   variable s2var, dvar = NULL;
3925   decl_or_value dv = s1var->dv;
3926   onepart_enum_t onepart = s1var->onepart;
3927   rtx val;
3928   hashval_t dvhash;
3929   location_chain node, *nodep;
3930 
3931   /* If the incoming onepart variable has an empty location list, then
3932      the intersection will be just as empty.  For other variables,
3933      it's always union.  */
3934   gcc_checking_assert (s1var->n_var_parts
3935 		       && s1var->var_part[0].loc_chain);
3936 
3937   if (!onepart)
3938     return variable_union (s1var, dst);
3939 
3940   gcc_checking_assert (s1var->n_var_parts == 1);
3941 
3942   dvhash = dv_htab_hash (dv);
3943   if (dv_is_value_p (dv))
3944     val = dv_as_value (dv);
3945   else
3946     val = NULL;
3947 
3948   s2var = shared_hash_find_1 (dsm->src->vars, dv, dvhash);
3949   if (!s2var)
3950     {
3951       dst_can_be_shared = false;
3952       return 1;
3953     }
3954 
3955   dsm->src_onepart_cnt--;
3956   gcc_assert (s2var->var_part[0].loc_chain
3957 	      && s2var->onepart == onepart
3958 	      && s2var->n_var_parts == 1);
3959 
3960   dstslot = shared_hash_find_slot_noinsert_1 (dst->vars, dv, dvhash);
3961   if (dstslot)
3962     {
3963       dvar = *dstslot;
3964       gcc_assert (dvar->refcount == 1
3965 		  && dvar->onepart == onepart
3966 		  && dvar->n_var_parts == 1);
3967       nodep = &dvar->var_part[0].loc_chain;
3968     }
3969   else
3970     {
3971       nodep = &node;
3972       node = NULL;
3973     }
3974 
3975   if (!dstslot && !onepart_variable_different_p (s1var, s2var))
3976     {
3977       dstslot = shared_hash_find_slot_unshare_1 (&dst->vars, dv,
3978 						 dvhash, INSERT);
3979       *dstslot = dvar = s2var;
3980       dvar->refcount++;
3981     }
3982   else
3983     {
3984       dst_can_be_shared = false;
3985 
3986       intersect_loc_chains (val, nodep, dsm,
3987 			    s1var->var_part[0].loc_chain, s2var);
3988 
3989       if (!dstslot)
3990 	{
3991 	  if (node)
3992 	    {
3993 	      dvar = (variable) pool_alloc (onepart_pool (onepart));
3994 	      dvar->dv = dv;
3995 	      dvar->refcount = 1;
3996 	      dvar->n_var_parts = 1;
3997 	      dvar->onepart = onepart;
3998 	      dvar->in_changed_variables = false;
3999 	      dvar->var_part[0].loc_chain = node;
4000 	      dvar->var_part[0].cur_loc = NULL;
4001 	      if (onepart)
4002 		VAR_LOC_1PAUX (dvar) = NULL;
4003 	      else
4004 		VAR_PART_OFFSET (dvar, 0) = 0;
4005 
4006 	      dstslot
4007 		= shared_hash_find_slot_unshare_1 (&dst->vars, dv, dvhash,
4008 						   INSERT);
4009 	      gcc_assert (!*dstslot);
4010 	      *dstslot = dvar;
4011 	    }
4012 	  else
4013 	    return 1;
4014 	}
4015     }
4016 
4017   nodep = &dvar->var_part[0].loc_chain;
4018   while ((node = *nodep))
4019     {
4020       location_chain *nextp = &node->next;
4021 
4022       if (GET_CODE (node->loc) == REG)
4023 	{
4024 	  attrs list;
4025 
4026 	  for (list = dst->regs[REGNO (node->loc)]; list; list = list->next)
4027 	    if (GET_MODE (node->loc) == GET_MODE (list->loc)
4028 		&& dv_is_value_p (list->dv))
4029 	      break;
4030 
4031 	  if (!list)
4032 	    attrs_list_insert (&dst->regs[REGNO (node->loc)],
4033 			       dv, 0, node->loc);
4034 	  /* If this value became canonical for another value that had
4035 	     this register, we want to leave it alone.  */
4036 	  else if (dv_as_value (list->dv) != val)
4037 	    {
4038 	      dstslot = set_slot_part (dst, dv_as_value (list->dv),
4039 				       dstslot, dv, 0,
4040 				       node->init, NULL_RTX);
4041 	      dstslot = delete_slot_part (dst, node->loc, dstslot, 0);
4042 
4043 	      /* Since nextp points into the removed node, we can't
4044 		 use it.  The pointer to the next node moved to nodep.
4045 		 However, if the variable we're walking is unshared
4046 		 during our walk, we'll keep walking the location list
4047 		 of the previously-shared variable, in which case the
4048 		 node won't have been removed, and we'll want to skip
4049 		 it.  That's why we test *nodep here.  */
4050 	      if (*nodep != node)
4051 		nextp = nodep;
4052 	    }
4053 	}
4054       else
4055 	/* Canonicalization puts registers first, so we don't have to
4056 	   walk it all.  */
4057 	break;
4058       nodep = nextp;
4059     }
4060 
4061   if (dvar != *dstslot)
4062     dvar = *dstslot;
4063   nodep = &dvar->var_part[0].loc_chain;
4064 
4065   if (val)
4066     {
4067       /* Mark all referenced nodes for canonicalization, and make sure
4068 	 we have mutual equivalence links.  */
4069       VALUE_RECURSED_INTO (val) = true;
4070       for (node = *nodep; node; node = node->next)
4071 	if (GET_CODE (node->loc) == VALUE)
4072 	  {
4073 	    VALUE_RECURSED_INTO (node->loc) = true;
4074 	    set_variable_part (dst, val, dv_from_value (node->loc), 0,
4075 			       node->init, NULL, INSERT);
4076 	  }
4077 
4078       dstslot = shared_hash_find_slot_noinsert_1 (dst->vars, dv, dvhash);
4079       gcc_assert (*dstslot == dvar);
4080       canonicalize_values_star (dstslot, dst);
4081       gcc_checking_assert (dstslot
4082 			   == shared_hash_find_slot_noinsert_1 (dst->vars,
4083 								dv, dvhash));
4084       dvar = *dstslot;
4085     }
4086   else
4087     {
4088       bool has_value = false, has_other = false;
4089 
4090       /* If we have one value and anything else, we're going to
4091 	 canonicalize this, so make sure all values have an entry in
4092 	 the table and are marked for canonicalization.  */
4093       for (node = *nodep; node; node = node->next)
4094 	{
4095 	  if (GET_CODE (node->loc) == VALUE)
4096 	    {
4097 	      /* If this was marked during register canonicalization,
4098 		 we know we have to canonicalize values.  */
4099 	      if (has_value)
4100 		has_other = true;
4101 	      has_value = true;
4102 	      if (has_other)
4103 		break;
4104 	    }
4105 	  else
4106 	    {
4107 	      has_other = true;
4108 	      if (has_value)
4109 		break;
4110 	    }
4111 	}
4112 
4113       if (has_value && has_other)
4114 	{
4115 	  for (node = *nodep; node; node = node->next)
4116 	    {
4117 	      if (GET_CODE (node->loc) == VALUE)
4118 		{
4119 		  decl_or_value dv = dv_from_value (node->loc);
4120 		  variable_def **slot = NULL;
4121 
4122 		  if (shared_hash_shared (dst->vars))
4123 		    slot = shared_hash_find_slot_noinsert (dst->vars, dv);
4124 		  if (!slot)
4125 		    slot = shared_hash_find_slot_unshare (&dst->vars, dv,
4126 							  INSERT);
4127 		  if (!*slot)
4128 		    {
4129 		      variable var = (variable) pool_alloc (onepart_pool
4130 							    (ONEPART_VALUE));
4131 		      var->dv = dv;
4132 		      var->refcount = 1;
4133 		      var->n_var_parts = 1;
4134 		      var->onepart = ONEPART_VALUE;
4135 		      var->in_changed_variables = false;
4136 		      var->var_part[0].loc_chain = NULL;
4137 		      var->var_part[0].cur_loc = NULL;
4138 		      VAR_LOC_1PAUX (var) = NULL;
4139 		      *slot = var;
4140 		    }
4141 
4142 		  VALUE_RECURSED_INTO (node->loc) = true;
4143 		}
4144 	    }
4145 
4146 	  dstslot = shared_hash_find_slot_noinsert_1 (dst->vars, dv, dvhash);
4147 	  gcc_assert (*dstslot == dvar);
4148 	  canonicalize_values_star (dstslot, dst);
4149 	  gcc_checking_assert (dstslot
4150 			       == shared_hash_find_slot_noinsert_1 (dst->vars,
4151 								    dv, dvhash));
4152 	  dvar = *dstslot;
4153 	}
4154     }
4155 
4156   if (!onepart_variable_different_p (dvar, s2var))
4157     {
4158       variable_htab_free (dvar);
4159       *dstslot = dvar = s2var;
4160       dvar->refcount++;
4161     }
4162   else if (s2var != s1var && !onepart_variable_different_p (dvar, s1var))
4163     {
4164       variable_htab_free (dvar);
4165       *dstslot = dvar = s1var;
4166       dvar->refcount++;
4167       dst_can_be_shared = false;
4168     }
4169   else
4170     dst_can_be_shared = false;
4171 
4172   return 1;
4173 }
4174 
4175 /* Copy s2slot (in DSM->src) to DSM->dst if the variable is a
4176    multi-part variable.  Unions of multi-part variables and
4177    intersections of one-part ones will be handled in
4178    variable_merge_over_cur().  */
4179 
4180 static int
variable_merge_over_src(variable s2var,struct dfset_merge * dsm)4181 variable_merge_over_src (variable s2var, struct dfset_merge *dsm)
4182 {
4183   dataflow_set *dst = dsm->dst;
4184   decl_or_value dv = s2var->dv;
4185 
4186   if (!s2var->onepart)
4187     {
4188       variable_def **dstp = shared_hash_find_slot (dst->vars, dv);
4189       *dstp = s2var;
4190       s2var->refcount++;
4191       return 1;
4192     }
4193 
4194   dsm->src_onepart_cnt++;
4195   return 1;
4196 }
4197 
4198 /* Combine dataflow set information from SRC2 into DST, using PDST
4199    to carry over information across passes.  */
4200 
4201 static void
dataflow_set_merge(dataflow_set * dst,dataflow_set * src2)4202 dataflow_set_merge (dataflow_set *dst, dataflow_set *src2)
4203 {
4204   dataflow_set cur = *dst;
4205   dataflow_set *src1 = &cur;
4206   struct dfset_merge dsm;
4207   int i;
4208   size_t src1_elems, src2_elems;
4209   variable_iterator_type hi;
4210   variable var;
4211 
4212   src1_elems = shared_hash_htab (src1->vars).elements ();
4213   src2_elems = shared_hash_htab (src2->vars).elements ();
4214   dataflow_set_init (dst);
4215   dst->stack_adjust = cur.stack_adjust;
4216   shared_hash_destroy (dst->vars);
4217   dst->vars = (shared_hash) pool_alloc (shared_hash_pool);
4218   dst->vars->refcount = 1;
4219   dst->vars->htab.create (MAX (src1_elems, src2_elems));
4220 
4221   for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4222     attrs_list_mpdv_union (&dst->regs[i], src1->regs[i], src2->regs[i]);
4223 
4224   dsm.dst = dst;
4225   dsm.src = src2;
4226   dsm.cur = src1;
4227   dsm.src_onepart_cnt = 0;
4228 
4229   FOR_EACH_HASH_TABLE_ELEMENT (shared_hash_htab (dsm.src->vars),
4230 			       var, variable, hi)
4231     variable_merge_over_src (var, &dsm);
4232   FOR_EACH_HASH_TABLE_ELEMENT (shared_hash_htab (dsm.cur->vars),
4233 			       var, variable, hi)
4234     variable_merge_over_cur (var, &dsm);
4235 
4236   if (dsm.src_onepart_cnt)
4237     dst_can_be_shared = false;
4238 
4239   dataflow_set_destroy (src1);
4240 }
4241 
4242 /* Mark register equivalences.  */
4243 
4244 static void
dataflow_set_equiv_regs(dataflow_set * set)4245 dataflow_set_equiv_regs (dataflow_set *set)
4246 {
4247   int i;
4248   attrs list, *listp;
4249 
4250   for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4251     {
4252       rtx canon[NUM_MACHINE_MODES];
4253 
4254       /* If the list is empty or one entry, no need to canonicalize
4255 	 anything.  */
4256       if (set->regs[i] == NULL || set->regs[i]->next == NULL)
4257 	continue;
4258 
4259       memset (canon, 0, sizeof (canon));
4260 
4261       for (list = set->regs[i]; list; list = list->next)
4262 	if (list->offset == 0 && dv_is_value_p (list->dv))
4263 	  {
4264 	    rtx val = dv_as_value (list->dv);
4265 	    rtx *cvalp = &canon[(int)GET_MODE (val)];
4266 	    rtx cval = *cvalp;
4267 
4268 	    if (canon_value_cmp (val, cval))
4269 	      *cvalp = val;
4270 	  }
4271 
4272       for (list = set->regs[i]; list; list = list->next)
4273 	if (list->offset == 0 && dv_onepart_p (list->dv))
4274 	  {
4275 	    rtx cval = canon[(int)GET_MODE (list->loc)];
4276 
4277 	    if (!cval)
4278 	      continue;
4279 
4280 	    if (dv_is_value_p (list->dv))
4281 	      {
4282 		rtx val = dv_as_value (list->dv);
4283 
4284 		if (val == cval)
4285 		  continue;
4286 
4287 		VALUE_RECURSED_INTO (val) = true;
4288 		set_variable_part (set, val, dv_from_value (cval), 0,
4289 				   VAR_INIT_STATUS_INITIALIZED,
4290 				   NULL, NO_INSERT);
4291 	      }
4292 
4293 	    VALUE_RECURSED_INTO (cval) = true;
4294 	    set_variable_part (set, cval, list->dv, 0,
4295 			       VAR_INIT_STATUS_INITIALIZED, NULL, NO_INSERT);
4296 	  }
4297 
4298       for (listp = &set->regs[i]; (list = *listp);
4299 	   listp = list ? &list->next : listp)
4300 	if (list->offset == 0 && dv_onepart_p (list->dv))
4301 	  {
4302 	    rtx cval = canon[(int)GET_MODE (list->loc)];
4303 	    variable_def **slot;
4304 
4305 	    if (!cval)
4306 	      continue;
4307 
4308 	    if (dv_is_value_p (list->dv))
4309 	      {
4310 		rtx val = dv_as_value (list->dv);
4311 		if (!VALUE_RECURSED_INTO (val))
4312 		  continue;
4313 	      }
4314 
4315 	    slot = shared_hash_find_slot_noinsert (set->vars, list->dv);
4316 	    canonicalize_values_star (slot, set);
4317 	    if (*listp != list)
4318 	      list = NULL;
4319 	  }
4320     }
4321 }
4322 
4323 /* Remove any redundant values in the location list of VAR, which must
4324    be unshared and 1-part.  */
4325 
4326 static void
remove_duplicate_values(variable var)4327 remove_duplicate_values (variable var)
4328 {
4329   location_chain node, *nodep;
4330 
4331   gcc_assert (var->onepart);
4332   gcc_assert (var->n_var_parts == 1);
4333   gcc_assert (var->refcount == 1);
4334 
4335   for (nodep = &var->var_part[0].loc_chain; (node = *nodep); )
4336     {
4337       if (GET_CODE (node->loc) == VALUE)
4338 	{
4339 	  if (VALUE_RECURSED_INTO (node->loc))
4340 	    {
4341 	      /* Remove duplicate value node.  */
4342 	      *nodep = node->next;
4343 	      pool_free (loc_chain_pool, node);
4344 	      continue;
4345 	    }
4346 	  else
4347 	    VALUE_RECURSED_INTO (node->loc) = true;
4348 	}
4349       nodep = &node->next;
4350     }
4351 
4352   for (node = var->var_part[0].loc_chain; node; node = node->next)
4353     if (GET_CODE (node->loc) == VALUE)
4354       {
4355 	gcc_assert (VALUE_RECURSED_INTO (node->loc));
4356 	VALUE_RECURSED_INTO (node->loc) = false;
4357       }
4358 }
4359 
4360 
4361 /* Hash table iteration argument passed to variable_post_merge.  */
4362 struct dfset_post_merge
4363 {
4364   /* The new input set for the current block.  */
4365   dataflow_set *set;
4366   /* Pointer to the permanent input set for the current block, or
4367      NULL.  */
4368   dataflow_set **permp;
4369 };
4370 
4371 /* Create values for incoming expressions associated with one-part
4372    variables that don't have value numbers for them.  */
4373 
4374 int
variable_post_merge_new_vals(variable_def ** slot,dfset_post_merge * dfpm)4375 variable_post_merge_new_vals (variable_def **slot, dfset_post_merge *dfpm)
4376 {
4377   dataflow_set *set = dfpm->set;
4378   variable var = *slot;
4379   location_chain node;
4380 
4381   if (!var->onepart || !var->n_var_parts)
4382     return 1;
4383 
4384   gcc_assert (var->n_var_parts == 1);
4385 
4386   if (dv_is_decl_p (var->dv))
4387     {
4388       bool check_dupes = false;
4389 
4390     restart:
4391       for (node = var->var_part[0].loc_chain; node; node = node->next)
4392 	{
4393 	  if (GET_CODE (node->loc) == VALUE)
4394 	    gcc_assert (!VALUE_RECURSED_INTO (node->loc));
4395 	  else if (GET_CODE (node->loc) == REG)
4396 	    {
4397 	      attrs att, *attp, *curp = NULL;
4398 
4399 	      if (var->refcount != 1)
4400 		{
4401 		  slot = unshare_variable (set, slot, var,
4402 					   VAR_INIT_STATUS_INITIALIZED);
4403 		  var = *slot;
4404 		  goto restart;
4405 		}
4406 
4407 	      for (attp = &set->regs[REGNO (node->loc)]; (att = *attp);
4408 		   attp = &att->next)
4409 		if (att->offset == 0
4410 		    && GET_MODE (att->loc) == GET_MODE (node->loc))
4411 		  {
4412 		    if (dv_is_value_p (att->dv))
4413 		      {
4414 			rtx cval = dv_as_value (att->dv);
4415 			node->loc = cval;
4416 			check_dupes = true;
4417 			break;
4418 		      }
4419 		    else if (dv_as_opaque (att->dv) == dv_as_opaque (var->dv))
4420 		      curp = attp;
4421 		  }
4422 
4423 	      if (!curp)
4424 		{
4425 		  curp = attp;
4426 		  while (*curp)
4427 		    if ((*curp)->offset == 0
4428 			&& GET_MODE ((*curp)->loc) == GET_MODE (node->loc)
4429 			&& dv_as_opaque ((*curp)->dv) == dv_as_opaque (var->dv))
4430 		      break;
4431 		    else
4432 		      curp = &(*curp)->next;
4433 		  gcc_assert (*curp);
4434 		}
4435 
4436 	      if (!att)
4437 		{
4438 		  decl_or_value cdv;
4439 		  rtx cval;
4440 
4441 		  if (!*dfpm->permp)
4442 		    {
4443 		      *dfpm->permp = XNEW (dataflow_set);
4444 		      dataflow_set_init (*dfpm->permp);
4445 		    }
4446 
4447 		  for (att = (*dfpm->permp)->regs[REGNO (node->loc)];
4448 		       att; att = att->next)
4449 		    if (GET_MODE (att->loc) == GET_MODE (node->loc))
4450 		      {
4451 			gcc_assert (att->offset == 0
4452 				    && dv_is_value_p (att->dv));
4453 			val_reset (set, att->dv);
4454 			break;
4455 		      }
4456 
4457 		  if (att)
4458 		    {
4459 		      cdv = att->dv;
4460 		      cval = dv_as_value (cdv);
4461 		    }
4462 		  else
4463 		    {
4464 		      /* Create a unique value to hold this register,
4465 			 that ought to be found and reused in
4466 			 subsequent rounds.  */
4467 		      cselib_val *v;
4468 		      gcc_assert (!cselib_lookup (node->loc,
4469 						  GET_MODE (node->loc), 0,
4470 						  VOIDmode));
4471 		      v = cselib_lookup (node->loc, GET_MODE (node->loc), 1,
4472 					 VOIDmode);
4473 		      cselib_preserve_value (v);
4474 		      cselib_invalidate_rtx (node->loc);
4475 		      cval = v->val_rtx;
4476 		      cdv = dv_from_value (cval);
4477 		      if (dump_file)
4478 			fprintf (dump_file,
4479 				 "Created new value %u:%u for reg %i\n",
4480 				 v->uid, v->hash, REGNO (node->loc));
4481 		    }
4482 
4483 		  var_reg_decl_set (*dfpm->permp, node->loc,
4484 				    VAR_INIT_STATUS_INITIALIZED,
4485 				    cdv, 0, NULL, INSERT);
4486 
4487 		  node->loc = cval;
4488 		  check_dupes = true;
4489 		}
4490 
4491 	      /* Remove attribute referring to the decl, which now
4492 		 uses the value for the register, already existing or
4493 		 to be added when we bring perm in.  */
4494 	      att = *curp;
4495 	      *curp = att->next;
4496 	      pool_free (attrs_pool, att);
4497 	    }
4498 	}
4499 
4500       if (check_dupes)
4501 	remove_duplicate_values (var);
4502     }
4503 
4504   return 1;
4505 }
4506 
4507 /* Reset values in the permanent set that are not associated with the
4508    chosen expression.  */
4509 
4510 int
variable_post_merge_perm_vals(variable_def ** pslot,dfset_post_merge * dfpm)4511 variable_post_merge_perm_vals (variable_def **pslot, dfset_post_merge *dfpm)
4512 {
4513   dataflow_set *set = dfpm->set;
4514   variable pvar = *pslot, var;
4515   location_chain pnode;
4516   decl_or_value dv;
4517   attrs att;
4518 
4519   gcc_assert (dv_is_value_p (pvar->dv)
4520 	      && pvar->n_var_parts == 1);
4521   pnode = pvar->var_part[0].loc_chain;
4522   gcc_assert (pnode
4523 	      && !pnode->next
4524 	      && REG_P (pnode->loc));
4525 
4526   dv = pvar->dv;
4527 
4528   var = shared_hash_find (set->vars, dv);
4529   if (var)
4530     {
4531       /* Although variable_post_merge_new_vals may have made decls
4532 	 non-star-canonical, values that pre-existed in canonical form
4533 	 remain canonical, and newly-created values reference a single
4534 	 REG, so they are canonical as well.  Since VAR has the
4535 	 location list for a VALUE, using find_loc_in_1pdv for it is
4536 	 fine, since VALUEs don't map back to DECLs.  */
4537       if (find_loc_in_1pdv (pnode->loc, var, shared_hash_htab (set->vars)))
4538 	return 1;
4539       val_reset (set, dv);
4540     }
4541 
4542   for (att = set->regs[REGNO (pnode->loc)]; att; att = att->next)
4543     if (att->offset == 0
4544 	&& GET_MODE (att->loc) == GET_MODE (pnode->loc)
4545 	&& dv_is_value_p (att->dv))
4546       break;
4547 
4548   /* If there is a value associated with this register already, create
4549      an equivalence.  */
4550   if (att && dv_as_value (att->dv) != dv_as_value (dv))
4551     {
4552       rtx cval = dv_as_value (att->dv);
4553       set_variable_part (set, cval, dv, 0, pnode->init, NULL, INSERT);
4554       set_variable_part (set, dv_as_value (dv), att->dv, 0, pnode->init,
4555 			 NULL, INSERT);
4556     }
4557   else if (!att)
4558     {
4559       attrs_list_insert (&set->regs[REGNO (pnode->loc)],
4560 			 dv, 0, pnode->loc);
4561       variable_union (pvar, set);
4562     }
4563 
4564   return 1;
4565 }
4566 
4567 /* Just checking stuff and registering register attributes for
4568    now.  */
4569 
4570 static void
dataflow_post_merge_adjust(dataflow_set * set,dataflow_set ** permp)4571 dataflow_post_merge_adjust (dataflow_set *set, dataflow_set **permp)
4572 {
4573   struct dfset_post_merge dfpm;
4574 
4575   dfpm.set = set;
4576   dfpm.permp = permp;
4577 
4578   shared_hash_htab (set->vars)
4579     .traverse <dfset_post_merge*, variable_post_merge_new_vals> (&dfpm);
4580   if (*permp)
4581     shared_hash_htab ((*permp)->vars)
4582       .traverse <dfset_post_merge*, variable_post_merge_perm_vals> (&dfpm);
4583   shared_hash_htab (set->vars)
4584     .traverse <dataflow_set *, canonicalize_values_star> (set);
4585   shared_hash_htab (set->vars)
4586     .traverse <dataflow_set *, canonicalize_vars_star> (set);
4587 }
4588 
4589 /* Return a node whose loc is a MEM that refers to EXPR in the
4590    location list of a one-part variable or value VAR, or in that of
4591    any values recursively mentioned in the location lists.  */
4592 
4593 static location_chain
find_mem_expr_in_1pdv(tree expr,rtx val,variable_table_type vars)4594 find_mem_expr_in_1pdv (tree expr, rtx val, variable_table_type vars)
4595 {
4596   location_chain node;
4597   decl_or_value dv;
4598   variable var;
4599   location_chain where = NULL;
4600 
4601   if (!val)
4602     return NULL;
4603 
4604   gcc_assert (GET_CODE (val) == VALUE
4605 	      && !VALUE_RECURSED_INTO (val));
4606 
4607   dv = dv_from_value (val);
4608   var = vars.find_with_hash (dv, dv_htab_hash (dv));
4609 
4610   if (!var)
4611     return NULL;
4612 
4613   gcc_assert (var->onepart);
4614 
4615   if (!var->n_var_parts)
4616     return NULL;
4617 
4618   VALUE_RECURSED_INTO (val) = true;
4619 
4620   for (node = var->var_part[0].loc_chain; node; node = node->next)
4621     if (MEM_P (node->loc)
4622 	&& MEM_EXPR (node->loc) == expr
4623 	&& INT_MEM_OFFSET (node->loc) == 0)
4624       {
4625 	where = node;
4626 	break;
4627       }
4628     else if (GET_CODE (node->loc) == VALUE
4629 	     && !VALUE_RECURSED_INTO (node->loc)
4630 	     && (where = find_mem_expr_in_1pdv (expr, node->loc, vars)))
4631       break;
4632 
4633   VALUE_RECURSED_INTO (val) = false;
4634 
4635   return where;
4636 }
4637 
4638 /* Return TRUE if the value of MEM may vary across a call.  */
4639 
4640 static bool
mem_dies_at_call(rtx mem)4641 mem_dies_at_call (rtx mem)
4642 {
4643   tree expr = MEM_EXPR (mem);
4644   tree decl;
4645 
4646   if (!expr)
4647     return true;
4648 
4649   decl = get_base_address (expr);
4650 
4651   if (!decl)
4652     return true;
4653 
4654   if (!DECL_P (decl))
4655     return true;
4656 
4657   return (may_be_aliased (decl)
4658 	  || (!TREE_READONLY (decl) && is_global_var (decl)));
4659 }
4660 
4661 /* Remove all MEMs from the location list of a hash table entry for a
4662    one-part variable, except those whose MEM attributes map back to
4663    the variable itself, directly or within a VALUE.  */
4664 
4665 int
dataflow_set_preserve_mem_locs(variable_def ** slot,dataflow_set * set)4666 dataflow_set_preserve_mem_locs (variable_def **slot, dataflow_set *set)
4667 {
4668   variable var = *slot;
4669 
4670   if (var->onepart == ONEPART_VDECL || var->onepart == ONEPART_DEXPR)
4671     {
4672       tree decl = dv_as_decl (var->dv);
4673       location_chain loc, *locp;
4674       bool changed = false;
4675 
4676       if (!var->n_var_parts)
4677 	return 1;
4678 
4679       gcc_assert (var->n_var_parts == 1);
4680 
4681       if (shared_var_p (var, set->vars))
4682 	{
4683 	  for (loc = var->var_part[0].loc_chain; loc; loc = loc->next)
4684 	    {
4685 	      /* We want to remove dying MEMs that doesn't refer to DECL.  */
4686 	      if (GET_CODE (loc->loc) == MEM
4687 		  && (MEM_EXPR (loc->loc) != decl
4688 		      || INT_MEM_OFFSET (loc->loc) != 0)
4689 		  && !mem_dies_at_call (loc->loc))
4690 		break;
4691 	      /* We want to move here MEMs that do refer to DECL.  */
4692 	      else if (GET_CODE (loc->loc) == VALUE
4693 		       && find_mem_expr_in_1pdv (decl, loc->loc,
4694 						 shared_hash_htab (set->vars)))
4695 		break;
4696 	    }
4697 
4698 	  if (!loc)
4699 	    return 1;
4700 
4701 	  slot = unshare_variable (set, slot, var, VAR_INIT_STATUS_UNKNOWN);
4702 	  var = *slot;
4703 	  gcc_assert (var->n_var_parts == 1);
4704 	}
4705 
4706       for (locp = &var->var_part[0].loc_chain, loc = *locp;
4707 	   loc; loc = *locp)
4708 	{
4709 	  rtx old_loc = loc->loc;
4710 	  if (GET_CODE (old_loc) == VALUE)
4711 	    {
4712 	      location_chain mem_node
4713 		= find_mem_expr_in_1pdv (decl, loc->loc,
4714 					 shared_hash_htab (set->vars));
4715 
4716 	      /* ??? This picks up only one out of multiple MEMs that
4717 		 refer to the same variable.  Do we ever need to be
4718 		 concerned about dealing with more than one, or, given
4719 		 that they should all map to the same variable
4720 		 location, their addresses will have been merged and
4721 		 they will be regarded as equivalent?  */
4722 	      if (mem_node)
4723 		{
4724 		  loc->loc = mem_node->loc;
4725 		  loc->set_src = mem_node->set_src;
4726 		  loc->init = MIN (loc->init, mem_node->init);
4727 		}
4728 	    }
4729 
4730 	  if (GET_CODE (loc->loc) != MEM
4731 	      || (MEM_EXPR (loc->loc) == decl
4732 		  && INT_MEM_OFFSET (loc->loc) == 0)
4733 	      || !mem_dies_at_call (loc->loc))
4734 	    {
4735 	      if (old_loc != loc->loc && emit_notes)
4736 		{
4737 		  if (old_loc == var->var_part[0].cur_loc)
4738 		    {
4739 		      changed = true;
4740 		      var->var_part[0].cur_loc = NULL;
4741 		    }
4742 		}
4743 	      locp = &loc->next;
4744 	      continue;
4745 	    }
4746 
4747 	  if (emit_notes)
4748 	    {
4749 	      if (old_loc == var->var_part[0].cur_loc)
4750 		{
4751 		  changed = true;
4752 		  var->var_part[0].cur_loc = NULL;
4753 		}
4754 	    }
4755 	  *locp = loc->next;
4756 	  pool_free (loc_chain_pool, loc);
4757 	}
4758 
4759       if (!var->var_part[0].loc_chain)
4760 	{
4761 	  var->n_var_parts--;
4762 	  changed = true;
4763 	}
4764       if (changed)
4765 	variable_was_changed (var, set);
4766     }
4767 
4768   return 1;
4769 }
4770 
4771 /* Remove all MEMs from the location list of a hash table entry for a
4772    value.  */
4773 
4774 int
dataflow_set_remove_mem_locs(variable_def ** slot,dataflow_set * set)4775 dataflow_set_remove_mem_locs (variable_def **slot, dataflow_set *set)
4776 {
4777   variable var = *slot;
4778 
4779   if (var->onepart == ONEPART_VALUE)
4780     {
4781       location_chain loc, *locp;
4782       bool changed = false;
4783       rtx cur_loc;
4784 
4785       gcc_assert (var->n_var_parts == 1);
4786 
4787       if (shared_var_p (var, set->vars))
4788 	{
4789 	  for (loc = var->var_part[0].loc_chain; loc; loc = loc->next)
4790 	    if (GET_CODE (loc->loc) == MEM
4791 		&& mem_dies_at_call (loc->loc))
4792 	      break;
4793 
4794 	  if (!loc)
4795 	    return 1;
4796 
4797 	  slot = unshare_variable (set, slot, var, VAR_INIT_STATUS_UNKNOWN);
4798 	  var = *slot;
4799 	  gcc_assert (var->n_var_parts == 1);
4800 	}
4801 
4802       if (VAR_LOC_1PAUX (var))
4803 	cur_loc = VAR_LOC_FROM (var);
4804       else
4805 	cur_loc = var->var_part[0].cur_loc;
4806 
4807       for (locp = &var->var_part[0].loc_chain, loc = *locp;
4808 	   loc; loc = *locp)
4809 	{
4810 	  if (GET_CODE (loc->loc) != MEM
4811 	      || !mem_dies_at_call (loc->loc))
4812 	    {
4813 	      locp = &loc->next;
4814 	      continue;
4815 	    }
4816 
4817 	  *locp = loc->next;
4818 	  /* If we have deleted the location which was last emitted
4819 	     we have to emit new location so add the variable to set
4820 	     of changed variables.  */
4821 	  if (cur_loc == loc->loc)
4822 	    {
4823 	      changed = true;
4824 	      var->var_part[0].cur_loc = NULL;
4825 	      if (VAR_LOC_1PAUX (var))
4826 		VAR_LOC_FROM (var) = NULL;
4827 	    }
4828 	  pool_free (loc_chain_pool, loc);
4829 	}
4830 
4831       if (!var->var_part[0].loc_chain)
4832 	{
4833 	  var->n_var_parts--;
4834 	  changed = true;
4835 	}
4836       if (changed)
4837 	variable_was_changed (var, set);
4838     }
4839 
4840   return 1;
4841 }
4842 
4843 /* Remove all variable-location information about call-clobbered
4844    registers, as well as associations between MEMs and VALUEs.  */
4845 
4846 static void
dataflow_set_clear_at_call(dataflow_set * set)4847 dataflow_set_clear_at_call (dataflow_set *set)
4848 {
4849   unsigned int r;
4850   hard_reg_set_iterator hrsi;
4851 
4852   EXECUTE_IF_SET_IN_HARD_REG_SET (regs_invalidated_by_call, 0, r, hrsi)
4853     var_regno_delete (set, r);
4854 
4855   if (MAY_HAVE_DEBUG_INSNS)
4856     {
4857       set->traversed_vars = set->vars;
4858       shared_hash_htab (set->vars)
4859 	.traverse <dataflow_set *, dataflow_set_preserve_mem_locs> (set);
4860       set->traversed_vars = set->vars;
4861       shared_hash_htab (set->vars)
4862 	.traverse <dataflow_set *, dataflow_set_remove_mem_locs> (set);
4863       set->traversed_vars = NULL;
4864     }
4865 }
4866 
4867 static bool
variable_part_different_p(variable_part * vp1,variable_part * vp2)4868 variable_part_different_p (variable_part *vp1, variable_part *vp2)
4869 {
4870   location_chain lc1, lc2;
4871 
4872   for (lc1 = vp1->loc_chain; lc1; lc1 = lc1->next)
4873     {
4874       for (lc2 = vp2->loc_chain; lc2; lc2 = lc2->next)
4875 	{
4876 	  if (REG_P (lc1->loc) && REG_P (lc2->loc))
4877 	    {
4878 	      if (REGNO (lc1->loc) == REGNO (lc2->loc))
4879 		break;
4880 	    }
4881 	  if (rtx_equal_p (lc1->loc, lc2->loc))
4882 	    break;
4883 	}
4884       if (!lc2)
4885 	return true;
4886     }
4887   return false;
4888 }
4889 
4890 /* Return true if one-part variables VAR1 and VAR2 are different.
4891    They must be in canonical order.  */
4892 
4893 static bool
onepart_variable_different_p(variable var1,variable var2)4894 onepart_variable_different_p (variable var1, variable var2)
4895 {
4896   location_chain lc1, lc2;
4897 
4898   if (var1 == var2)
4899     return false;
4900 
4901   gcc_assert (var1->n_var_parts == 1
4902 	      && var2->n_var_parts == 1);
4903 
4904   lc1 = var1->var_part[0].loc_chain;
4905   lc2 = var2->var_part[0].loc_chain;
4906 
4907   gcc_assert (lc1 && lc2);
4908 
4909   while (lc1 && lc2)
4910     {
4911       if (loc_cmp (lc1->loc, lc2->loc))
4912 	return true;
4913       lc1 = lc1->next;
4914       lc2 = lc2->next;
4915     }
4916 
4917   return lc1 != lc2;
4918 }
4919 
4920 /* Return true if variables VAR1 and VAR2 are different.  */
4921 
4922 static bool
variable_different_p(variable var1,variable var2)4923 variable_different_p (variable var1, variable var2)
4924 {
4925   int i;
4926 
4927   if (var1 == var2)
4928     return false;
4929 
4930   if (var1->onepart != var2->onepart)
4931     return true;
4932 
4933   if (var1->n_var_parts != var2->n_var_parts)
4934     return true;
4935 
4936   if (var1->onepart && var1->n_var_parts)
4937     {
4938       gcc_checking_assert (dv_as_opaque (var1->dv) == dv_as_opaque (var2->dv)
4939 			   && var1->n_var_parts == 1);
4940       /* One-part values have locations in a canonical order.  */
4941       return onepart_variable_different_p (var1, var2);
4942     }
4943 
4944   for (i = 0; i < var1->n_var_parts; i++)
4945     {
4946       if (VAR_PART_OFFSET (var1, i) != VAR_PART_OFFSET (var2, i))
4947 	return true;
4948       if (variable_part_different_p (&var1->var_part[i], &var2->var_part[i]))
4949 	return true;
4950       if (variable_part_different_p (&var2->var_part[i], &var1->var_part[i]))
4951 	return true;
4952     }
4953   return false;
4954 }
4955 
4956 /* Return true if dataflow sets OLD_SET and NEW_SET differ.  */
4957 
4958 static bool
dataflow_set_different(dataflow_set * old_set,dataflow_set * new_set)4959 dataflow_set_different (dataflow_set *old_set, dataflow_set *new_set)
4960 {
4961   variable_iterator_type hi;
4962   variable var1;
4963 
4964   if (old_set->vars == new_set->vars)
4965     return false;
4966 
4967   if (shared_hash_htab (old_set->vars).elements ()
4968       != shared_hash_htab (new_set->vars).elements ())
4969     return true;
4970 
4971   FOR_EACH_HASH_TABLE_ELEMENT (shared_hash_htab (old_set->vars),
4972 			       var1, variable, hi)
4973     {
4974       variable_table_type htab = shared_hash_htab (new_set->vars);
4975       variable var2 = htab.find_with_hash (var1->dv, dv_htab_hash (var1->dv));
4976       if (!var2)
4977 	{
4978 	  if (dump_file && (dump_flags & TDF_DETAILS))
4979 	    {
4980 	      fprintf (dump_file, "dataflow difference found: removal of:\n");
4981 	      dump_var (var1);
4982 	    }
4983 	  return true;
4984 	}
4985 
4986       if (variable_different_p (var1, var2))
4987 	{
4988 	  if (dump_file && (dump_flags & TDF_DETAILS))
4989 	    {
4990 	      fprintf (dump_file, "dataflow difference found: "
4991 		       "old and new follow:\n");
4992 	      dump_var (var1);
4993 	      dump_var (var2);
4994 	    }
4995 	  return true;
4996 	}
4997     }
4998 
4999   /* No need to traverse the second hashtab, if both have the same number
5000      of elements and the second one had all entries found in the first one,
5001      then it can't have any extra entries.  */
5002   return false;
5003 }
5004 
5005 /* Free the contents of dataflow set SET.  */
5006 
5007 static void
dataflow_set_destroy(dataflow_set * set)5008 dataflow_set_destroy (dataflow_set *set)
5009 {
5010   int i;
5011 
5012   for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
5013     attrs_list_clear (&set->regs[i]);
5014 
5015   shared_hash_destroy (set->vars);
5016   set->vars = NULL;
5017 }
5018 
5019 /* Return true if RTL X contains a SYMBOL_REF.  */
5020 
5021 static bool
contains_symbol_ref(rtx x)5022 contains_symbol_ref (rtx x)
5023 {
5024   const char *fmt;
5025   RTX_CODE code;
5026   int i;
5027 
5028   if (!x)
5029     return false;
5030 
5031   code = GET_CODE (x);
5032   if (code == SYMBOL_REF)
5033     return true;
5034 
5035   fmt = GET_RTX_FORMAT (code);
5036   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
5037     {
5038       if (fmt[i] == 'e')
5039 	{
5040 	  if (contains_symbol_ref (XEXP (x, i)))
5041 	    return true;
5042 	}
5043       else if (fmt[i] == 'E')
5044 	{
5045 	  int j;
5046 	  for (j = 0; j < XVECLEN (x, i); j++)
5047 	    if (contains_symbol_ref (XVECEXP (x, i, j)))
5048 	      return true;
5049 	}
5050     }
5051 
5052   return false;
5053 }
5054 
5055 /* Shall EXPR be tracked?  */
5056 
5057 static bool
track_expr_p(tree expr,bool need_rtl)5058 track_expr_p (tree expr, bool need_rtl)
5059 {
5060   rtx decl_rtl;
5061   tree realdecl;
5062 
5063   if (TREE_CODE (expr) == DEBUG_EXPR_DECL)
5064     return DECL_RTL_SET_P (expr);
5065 
5066   /* If EXPR is not a parameter or a variable do not track it.  */
5067   if (TREE_CODE (expr) != VAR_DECL && TREE_CODE (expr) != PARM_DECL)
5068     return 0;
5069 
5070   /* It also must have a name...  */
5071   if (!DECL_NAME (expr) && need_rtl)
5072     return 0;
5073 
5074   /* ... and a RTL assigned to it.  */
5075   decl_rtl = DECL_RTL_IF_SET (expr);
5076   if (!decl_rtl && need_rtl)
5077     return 0;
5078 
5079   /* If this expression is really a debug alias of some other declaration, we
5080      don't need to track this expression if the ultimate declaration is
5081      ignored.  */
5082   realdecl = expr;
5083   if (TREE_CODE (realdecl) == VAR_DECL && DECL_HAS_DEBUG_EXPR_P (realdecl))
5084     {
5085       realdecl = DECL_DEBUG_EXPR (realdecl);
5086       if (!DECL_P (realdecl))
5087 	{
5088 	  if (handled_component_p (realdecl)
5089 	      || (TREE_CODE (realdecl) == MEM_REF
5090 		  && TREE_CODE (TREE_OPERAND (realdecl, 0)) == ADDR_EXPR))
5091 	    {
5092 	      HOST_WIDE_INT bitsize, bitpos, maxsize;
5093 	      tree innerdecl
5094 		= get_ref_base_and_extent (realdecl, &bitpos, &bitsize,
5095 					   &maxsize);
5096 	      if (!DECL_P (innerdecl)
5097 		  || DECL_IGNORED_P (innerdecl)
5098 		  /* Do not track declarations for parts of tracked parameters
5099 		     since we want to track them as a whole instead.  */
5100 		  || (TREE_CODE (innerdecl) == PARM_DECL
5101 		      && DECL_MODE (innerdecl) != BLKmode
5102 		      && TREE_CODE (TREE_TYPE (innerdecl)) != UNION_TYPE)
5103 		  || TREE_STATIC (innerdecl)
5104 		  || bitsize <= 0
5105 		  || bitpos + bitsize > 256
5106 		  || bitsize != maxsize)
5107 		return 0;
5108 	      else
5109 		realdecl = expr;
5110 	    }
5111 	  else
5112 	    return 0;
5113 	}
5114     }
5115 
5116   /* Do not track EXPR if REALDECL it should be ignored for debugging
5117      purposes.  */
5118   if (DECL_IGNORED_P (realdecl))
5119     return 0;
5120 
5121   /* Do not track global variables until we are able to emit correct location
5122      list for them.  */
5123   if (TREE_STATIC (realdecl))
5124     return 0;
5125 
5126   /* When the EXPR is a DECL for alias of some variable (see example)
5127      the TREE_STATIC flag is not used.  Disable tracking all DECLs whose
5128      DECL_RTL contains SYMBOL_REF.
5129 
5130      Example:
5131      extern char **_dl_argv_internal __attribute__ ((alias ("_dl_argv")));
5132      char **_dl_argv;
5133   */
5134   if (decl_rtl && MEM_P (decl_rtl)
5135       && contains_symbol_ref (XEXP (decl_rtl, 0)))
5136     return 0;
5137 
5138   /* If RTX is a memory it should not be very large (because it would be
5139      an array or struct).  */
5140   if (decl_rtl && MEM_P (decl_rtl))
5141     {
5142       /* Do not track structures and arrays.  */
5143       if (GET_MODE (decl_rtl) == BLKmode
5144 	  || AGGREGATE_TYPE_P (TREE_TYPE (realdecl)))
5145 	return 0;
5146       if (MEM_SIZE_KNOWN_P (decl_rtl)
5147 	  && MEM_SIZE (decl_rtl) > MAX_VAR_PARTS)
5148 	return 0;
5149     }
5150 
5151   DECL_CHANGED (expr) = 0;
5152   DECL_CHANGED (realdecl) = 0;
5153   return 1;
5154 }
5155 
5156 /* Determine whether a given LOC refers to the same variable part as
5157    EXPR+OFFSET.  */
5158 
5159 static bool
same_variable_part_p(rtx loc,tree expr,HOST_WIDE_INT offset)5160 same_variable_part_p (rtx loc, tree expr, HOST_WIDE_INT offset)
5161 {
5162   tree expr2;
5163   HOST_WIDE_INT offset2;
5164 
5165   if (! DECL_P (expr))
5166     return false;
5167 
5168   if (REG_P (loc))
5169     {
5170       expr2 = REG_EXPR (loc);
5171       offset2 = REG_OFFSET (loc);
5172     }
5173   else if (MEM_P (loc))
5174     {
5175       expr2 = MEM_EXPR (loc);
5176       offset2 = INT_MEM_OFFSET (loc);
5177     }
5178   else
5179     return false;
5180 
5181   if (! expr2 || ! DECL_P (expr2))
5182     return false;
5183 
5184   expr = var_debug_decl (expr);
5185   expr2 = var_debug_decl (expr2);
5186 
5187   return (expr == expr2 && offset == offset2);
5188 }
5189 
5190 /* LOC is a REG or MEM that we would like to track if possible.
5191    If EXPR is null, we don't know what expression LOC refers to,
5192    otherwise it refers to EXPR + OFFSET.  STORE_REG_P is true if
5193    LOC is an lvalue register.
5194 
5195    Return true if EXPR is nonnull and if LOC, or some lowpart of it,
5196    is something we can track.  When returning true, store the mode of
5197    the lowpart we can track in *MODE_OUT (if nonnull) and its offset
5198    from EXPR in *OFFSET_OUT (if nonnull).  */
5199 
5200 static bool
track_loc_p(rtx loc,tree expr,HOST_WIDE_INT offset,bool store_reg_p,enum machine_mode * mode_out,HOST_WIDE_INT * offset_out)5201 track_loc_p (rtx loc, tree expr, HOST_WIDE_INT offset, bool store_reg_p,
5202 	     enum machine_mode *mode_out, HOST_WIDE_INT *offset_out)
5203 {
5204   enum machine_mode mode;
5205 
5206   if (expr == NULL || !track_expr_p (expr, true))
5207     return false;
5208 
5209   /* If REG was a paradoxical subreg, its REG_ATTRS will describe the
5210      whole subreg, but only the old inner part is really relevant.  */
5211   mode = GET_MODE (loc);
5212   if (REG_P (loc) && !HARD_REGISTER_NUM_P (ORIGINAL_REGNO (loc)))
5213     {
5214       enum machine_mode pseudo_mode;
5215 
5216       pseudo_mode = PSEUDO_REGNO_MODE (ORIGINAL_REGNO (loc));
5217       if (GET_MODE_SIZE (mode) > GET_MODE_SIZE (pseudo_mode))
5218 	{
5219 	  offset += byte_lowpart_offset (pseudo_mode, mode);
5220 	  mode = pseudo_mode;
5221 	}
5222     }
5223 
5224   /* If LOC is a paradoxical lowpart of EXPR, refer to EXPR itself.
5225      Do the same if we are storing to a register and EXPR occupies
5226      the whole of register LOC; in that case, the whole of EXPR is
5227      being changed.  We exclude complex modes from the second case
5228      because the real and imaginary parts are represented as separate
5229      pseudo registers, even if the whole complex value fits into one
5230      hard register.  */
5231   if ((GET_MODE_SIZE (mode) > GET_MODE_SIZE (DECL_MODE (expr))
5232        || (store_reg_p
5233 	   && !COMPLEX_MODE_P (DECL_MODE (expr))
5234 	   && hard_regno_nregs[REGNO (loc)][DECL_MODE (expr)] == 1))
5235       && offset + byte_lowpart_offset (DECL_MODE (expr), mode) == 0)
5236     {
5237       mode = DECL_MODE (expr);
5238       offset = 0;
5239     }
5240 
5241   if (offset < 0 || offset >= MAX_VAR_PARTS)
5242     return false;
5243 
5244   if (mode_out)
5245     *mode_out = mode;
5246   if (offset_out)
5247     *offset_out = offset;
5248   return true;
5249 }
5250 
5251 /* Return the MODE lowpart of LOC, or null if LOC is not something we
5252    want to track.  When returning nonnull, make sure that the attributes
5253    on the returned value are updated.  */
5254 
5255 static rtx
var_lowpart(enum machine_mode mode,rtx loc)5256 var_lowpart (enum machine_mode mode, rtx loc)
5257 {
5258   unsigned int offset, reg_offset, regno;
5259 
5260   if (GET_MODE (loc) == mode)
5261     return loc;
5262 
5263   if (!REG_P (loc) && !MEM_P (loc))
5264     return NULL;
5265 
5266   offset = byte_lowpart_offset (mode, GET_MODE (loc));
5267 
5268   if (MEM_P (loc))
5269     return adjust_address_nv (loc, mode, offset);
5270 
5271   reg_offset = subreg_lowpart_offset (mode, GET_MODE (loc));
5272   regno = REGNO (loc) + subreg_regno_offset (REGNO (loc), GET_MODE (loc),
5273 					     reg_offset, mode);
5274   return gen_rtx_REG_offset (loc, mode, regno, offset);
5275 }
5276 
5277 /* Carry information about uses and stores while walking rtx.  */
5278 
5279 struct count_use_info
5280 {
5281   /* The insn where the RTX is.  */
5282   rtx insn;
5283 
5284   /* The basic block where insn is.  */
5285   basic_block bb;
5286 
5287   /* The array of n_sets sets in the insn, as determined by cselib.  */
5288   struct cselib_set *sets;
5289   int n_sets;
5290 
5291   /* True if we're counting stores, false otherwise.  */
5292   bool store_p;
5293 };
5294 
5295 /* Find a VALUE corresponding to X.   */
5296 
5297 static inline cselib_val *
find_use_val(rtx x,enum machine_mode mode,struct count_use_info * cui)5298 find_use_val (rtx x, enum machine_mode mode, struct count_use_info *cui)
5299 {
5300   int i;
5301 
5302   if (cui->sets)
5303     {
5304       /* This is called after uses are set up and before stores are
5305 	 processed by cselib, so it's safe to look up srcs, but not
5306 	 dsts.  So we look up expressions that appear in srcs or in
5307 	 dest expressions, but we search the sets array for dests of
5308 	 stores.  */
5309       if (cui->store_p)
5310 	{
5311 	  /* Some targets represent memset and memcpy patterns
5312 	     by (set (mem:BLK ...) (reg:[QHSD]I ...)) or
5313 	     (set (mem:BLK ...) (const_int ...)) or
5314 	     (set (mem:BLK ...) (mem:BLK ...)).  Don't return anything
5315 	     in that case, otherwise we end up with mode mismatches.  */
5316 	  if (mode == BLKmode && MEM_P (x))
5317 	    return NULL;
5318 	  for (i = 0; i < cui->n_sets; i++)
5319 	    if (cui->sets[i].dest == x)
5320 	      return cui->sets[i].src_elt;
5321 	}
5322       else
5323 	return cselib_lookup (x, mode, 0, VOIDmode);
5324     }
5325 
5326   return NULL;
5327 }
5328 
5329 /* Replace all registers and addresses in an expression with VALUE
5330    expressions that map back to them, unless the expression is a
5331    register.  If no mapping is or can be performed, returns NULL.  */
5332 
5333 static rtx
replace_expr_with_values(rtx loc)5334 replace_expr_with_values (rtx loc)
5335 {
5336   if (REG_P (loc) || GET_CODE (loc) == ENTRY_VALUE)
5337     return NULL;
5338   else if (MEM_P (loc))
5339     {
5340       cselib_val *addr = cselib_lookup (XEXP (loc, 0),
5341 					get_address_mode (loc), 0,
5342 					GET_MODE (loc));
5343       if (addr)
5344 	return replace_equiv_address_nv (loc, addr->val_rtx);
5345       else
5346 	return NULL;
5347     }
5348   else
5349     return cselib_subst_to_values (loc, VOIDmode);
5350 }
5351 
5352 /* Return true if *X is a DEBUG_EXPR.  Usable as an argument to
5353    for_each_rtx to tell whether there are any DEBUG_EXPRs within
5354    RTX.  */
5355 
5356 static int
rtx_debug_expr_p(rtx * x,void * data ATTRIBUTE_UNUSED)5357 rtx_debug_expr_p (rtx *x, void *data ATTRIBUTE_UNUSED)
5358 {
5359   rtx loc = *x;
5360 
5361   return GET_CODE (loc) == DEBUG_EXPR;
5362 }
5363 
5364 /* Determine what kind of micro operation to choose for a USE.  Return
5365    MO_CLOBBER if no micro operation is to be generated.  */
5366 
5367 static enum micro_operation_type
use_type(rtx loc,struct count_use_info * cui,enum machine_mode * modep)5368 use_type (rtx loc, struct count_use_info *cui, enum machine_mode *modep)
5369 {
5370   tree expr;
5371 
5372   if (cui && cui->sets)
5373     {
5374       if (GET_CODE (loc) == VAR_LOCATION)
5375 	{
5376 	  if (track_expr_p (PAT_VAR_LOCATION_DECL (loc), false))
5377 	    {
5378 	      rtx ploc = PAT_VAR_LOCATION_LOC (loc);
5379 	      if (! VAR_LOC_UNKNOWN_P (ploc))
5380 		{
5381 		  cselib_val *val = cselib_lookup (ploc, GET_MODE (loc), 1,
5382 						   VOIDmode);
5383 
5384 		  /* ??? flag_float_store and volatile mems are never
5385 		     given values, but we could in theory use them for
5386 		     locations.  */
5387 		  gcc_assert (val || 1);
5388 		}
5389 	      return MO_VAL_LOC;
5390 	    }
5391 	  else
5392 	    return MO_CLOBBER;
5393 	}
5394 
5395       if (REG_P (loc) || MEM_P (loc))
5396 	{
5397 	  if (modep)
5398 	    *modep = GET_MODE (loc);
5399 	  if (cui->store_p)
5400 	    {
5401 	      if (REG_P (loc)
5402 		  || (find_use_val (loc, GET_MODE (loc), cui)
5403 		      && cselib_lookup (XEXP (loc, 0),
5404 					get_address_mode (loc), 0,
5405 					GET_MODE (loc))))
5406 		return MO_VAL_SET;
5407 	    }
5408 	  else
5409 	    {
5410 	      cselib_val *val = find_use_val (loc, GET_MODE (loc), cui);
5411 
5412 	      if (val && !cselib_preserved_value_p (val))
5413 		return MO_VAL_USE;
5414 	    }
5415 	}
5416     }
5417 
5418   if (REG_P (loc))
5419     {
5420       gcc_assert (REGNO (loc) < FIRST_PSEUDO_REGISTER);
5421 
5422       if (loc == cfa_base_rtx)
5423 	return MO_CLOBBER;
5424       expr = REG_EXPR (loc);
5425 
5426       if (!expr)
5427 	return MO_USE_NO_VAR;
5428       else if (target_for_debug_bind (var_debug_decl (expr)))
5429 	return MO_CLOBBER;
5430       else if (track_loc_p (loc, expr, REG_OFFSET (loc),
5431 			    false, modep, NULL))
5432 	return MO_USE;
5433       else
5434 	return MO_USE_NO_VAR;
5435     }
5436   else if (MEM_P (loc))
5437     {
5438       expr = MEM_EXPR (loc);
5439 
5440       if (!expr)
5441 	return MO_CLOBBER;
5442       else if (target_for_debug_bind (var_debug_decl (expr)))
5443 	return MO_CLOBBER;
5444       else if (track_loc_p (loc, expr, INT_MEM_OFFSET (loc),
5445 			    false, modep, NULL)
5446 	       /* Multi-part variables shouldn't refer to one-part
5447 		  variable names such as VALUEs (never happens) or
5448 		  DEBUG_EXPRs (only happens in the presence of debug
5449 		  insns).  */
5450 	       && (!MAY_HAVE_DEBUG_INSNS
5451 		   || !for_each_rtx (&XEXP (loc, 0), rtx_debug_expr_p, NULL)))
5452 	return MO_USE;
5453       else
5454 	return MO_CLOBBER;
5455     }
5456 
5457   return MO_CLOBBER;
5458 }
5459 
5460 /* Log to OUT information about micro-operation MOPT involving X in
5461    INSN of BB.  */
5462 
5463 static inline void
log_op_type(rtx x,basic_block bb,rtx insn,enum micro_operation_type mopt,FILE * out)5464 log_op_type (rtx x, basic_block bb, rtx insn,
5465 	     enum micro_operation_type mopt, FILE *out)
5466 {
5467   fprintf (out, "bb %i op %i insn %i %s ",
5468 	   bb->index, VTI (bb)->mos.length (),
5469 	   INSN_UID (insn), micro_operation_type_name[mopt]);
5470   print_inline_rtx (out, x, 2);
5471   fputc ('\n', out);
5472 }
5473 
5474 /* Tell whether the CONCAT used to holds a VALUE and its location
5475    needs value resolution, i.e., an attempt of mapping the location
5476    back to other incoming values.  */
5477 #define VAL_NEEDS_RESOLUTION(x) \
5478   (RTL_FLAG_CHECK1 ("VAL_NEEDS_RESOLUTION", (x), CONCAT)->volatil)
5479 /* Whether the location in the CONCAT is a tracked expression, that
5480    should also be handled like a MO_USE.  */
5481 #define VAL_HOLDS_TRACK_EXPR(x) \
5482   (RTL_FLAG_CHECK1 ("VAL_HOLDS_TRACK_EXPR", (x), CONCAT)->used)
5483 /* Whether the location in the CONCAT should be handled like a MO_COPY
5484    as well.  */
5485 #define VAL_EXPR_IS_COPIED(x) \
5486   (RTL_FLAG_CHECK1 ("VAL_EXPR_IS_COPIED", (x), CONCAT)->jump)
5487 /* Whether the location in the CONCAT should be handled like a
5488    MO_CLOBBER as well.  */
5489 #define VAL_EXPR_IS_CLOBBERED(x) \
5490   (RTL_FLAG_CHECK1 ("VAL_EXPR_IS_CLOBBERED", (x), CONCAT)->unchanging)
5491 
5492 /* All preserved VALUEs.  */
5493 static vec<rtx> preserved_values;
5494 
5495 /* Ensure VAL is preserved and remember it in a vector for vt_emit_notes.  */
5496 
5497 static void
preserve_value(cselib_val * val)5498 preserve_value (cselib_val *val)
5499 {
5500   cselib_preserve_value (val);
5501   preserved_values.safe_push (val->val_rtx);
5502 }
5503 
5504 /* Helper function for MO_VAL_LOC handling.  Return non-zero if
5505    any rtxes not suitable for CONST use not replaced by VALUEs
5506    are discovered.  */
5507 
5508 static int
non_suitable_const(rtx * x,void * data ATTRIBUTE_UNUSED)5509 non_suitable_const (rtx *x, void *data ATTRIBUTE_UNUSED)
5510 {
5511   if (*x == NULL_RTX)
5512     return 0;
5513 
5514   switch (GET_CODE (*x))
5515     {
5516     case REG:
5517     case DEBUG_EXPR:
5518     case PC:
5519     case SCRATCH:
5520     case CC0:
5521     case ASM_INPUT:
5522     case ASM_OPERANDS:
5523       return 1;
5524     case MEM:
5525       return !MEM_READONLY_P (*x);
5526     default:
5527       return 0;
5528     }
5529 }
5530 
5531 /* Add uses (register and memory references) LOC which will be tracked
5532    to VTI (bb)->mos.  INSN is instruction which the LOC is part of.  */
5533 
5534 static int
add_uses(rtx * ploc,void * data)5535 add_uses (rtx *ploc, void *data)
5536 {
5537   rtx loc = *ploc;
5538   enum machine_mode mode = VOIDmode;
5539   struct count_use_info *cui = (struct count_use_info *)data;
5540   enum micro_operation_type type = use_type (loc, cui, &mode);
5541 
5542   if (type != MO_CLOBBER)
5543     {
5544       basic_block bb = cui->bb;
5545       micro_operation mo;
5546 
5547       mo.type = type;
5548       mo.u.loc = type == MO_USE ? var_lowpart (mode, loc) : loc;
5549       mo.insn = cui->insn;
5550 
5551       if (type == MO_VAL_LOC)
5552 	{
5553 	  rtx oloc = loc;
5554 	  rtx vloc = PAT_VAR_LOCATION_LOC (oloc);
5555 	  cselib_val *val;
5556 
5557 	  gcc_assert (cui->sets);
5558 
5559 	  if (MEM_P (vloc)
5560 	      && !REG_P (XEXP (vloc, 0))
5561 	      && !MEM_P (XEXP (vloc, 0)))
5562 	    {
5563 	      rtx mloc = vloc;
5564 	      enum machine_mode address_mode = get_address_mode (mloc);
5565 	      cselib_val *val
5566 		= cselib_lookup (XEXP (mloc, 0), address_mode, 0,
5567 				 GET_MODE (mloc));
5568 
5569 	      if (val && !cselib_preserved_value_p (val))
5570 		preserve_value (val);
5571 	    }
5572 
5573 	  if (CONSTANT_P (vloc)
5574 	      && (GET_CODE (vloc) != CONST
5575 		  || for_each_rtx (&vloc, non_suitable_const, NULL)))
5576 	    /* For constants don't look up any value.  */;
5577 	  else if (!VAR_LOC_UNKNOWN_P (vloc) && !unsuitable_loc (vloc)
5578 		   && (val = find_use_val (vloc, GET_MODE (oloc), cui)))
5579 	    {
5580 	      enum machine_mode mode2;
5581 	      enum micro_operation_type type2;
5582 	      rtx nloc = NULL;
5583 	      bool resolvable = REG_P (vloc) || MEM_P (vloc);
5584 
5585 	      if (resolvable)
5586 		nloc = replace_expr_with_values (vloc);
5587 
5588 	      if (nloc)
5589 		{
5590 		  oloc = shallow_copy_rtx (oloc);
5591 		  PAT_VAR_LOCATION_LOC (oloc) = nloc;
5592 		}
5593 
5594 	      oloc = gen_rtx_CONCAT (mode, val->val_rtx, oloc);
5595 
5596 	      type2 = use_type (vloc, 0, &mode2);
5597 
5598 	      gcc_assert (type2 == MO_USE || type2 == MO_USE_NO_VAR
5599 			  || type2 == MO_CLOBBER);
5600 
5601 	      if (type2 == MO_CLOBBER
5602 		  && !cselib_preserved_value_p (val))
5603 		{
5604 		  VAL_NEEDS_RESOLUTION (oloc) = resolvable;
5605 		  preserve_value (val);
5606 		}
5607 	    }
5608 	  else if (!VAR_LOC_UNKNOWN_P (vloc))
5609 	    {
5610 	      oloc = shallow_copy_rtx (oloc);
5611 	      PAT_VAR_LOCATION_LOC (oloc) = gen_rtx_UNKNOWN_VAR_LOC ();
5612 	    }
5613 
5614 	  mo.u.loc = oloc;
5615 	}
5616       else if (type == MO_VAL_USE)
5617 	{
5618 	  enum machine_mode mode2 = VOIDmode;
5619 	  enum micro_operation_type type2;
5620 	  cselib_val *val = find_use_val (loc, GET_MODE (loc), cui);
5621 	  rtx vloc, oloc = loc, nloc;
5622 
5623 	  gcc_assert (cui->sets);
5624 
5625 	  if (MEM_P (oloc)
5626 	      && !REG_P (XEXP (oloc, 0))
5627 	      && !MEM_P (XEXP (oloc, 0)))
5628 	    {
5629 	      rtx mloc = oloc;
5630 	      enum machine_mode address_mode = get_address_mode (mloc);
5631 	      cselib_val *val
5632 		= cselib_lookup (XEXP (mloc, 0), address_mode, 0,
5633 				 GET_MODE (mloc));
5634 
5635 	      if (val && !cselib_preserved_value_p (val))
5636 		preserve_value (val);
5637 	    }
5638 
5639 	  type2 = use_type (loc, 0, &mode2);
5640 
5641 	  gcc_assert (type2 == MO_USE || type2 == MO_USE_NO_VAR
5642 		      || type2 == MO_CLOBBER);
5643 
5644 	  if (type2 == MO_USE)
5645 	    vloc = var_lowpart (mode2, loc);
5646 	  else
5647 	    vloc = oloc;
5648 
5649 	  /* The loc of a MO_VAL_USE may have two forms:
5650 
5651 	     (concat val src): val is at src, a value-based
5652 	     representation.
5653 
5654 	     (concat (concat val use) src): same as above, with use as
5655 	     the MO_USE tracked value, if it differs from src.
5656 
5657 	  */
5658 
5659 	  gcc_checking_assert (REG_P (loc) || MEM_P (loc));
5660 	  nloc = replace_expr_with_values (loc);
5661 	  if (!nloc)
5662 	    nloc = oloc;
5663 
5664 	  if (vloc != nloc)
5665 	    oloc = gen_rtx_CONCAT (mode2, val->val_rtx, vloc);
5666 	  else
5667 	    oloc = val->val_rtx;
5668 
5669 	  mo.u.loc = gen_rtx_CONCAT (mode, oloc, nloc);
5670 
5671 	  if (type2 == MO_USE)
5672 	    VAL_HOLDS_TRACK_EXPR (mo.u.loc) = 1;
5673 	  if (!cselib_preserved_value_p (val))
5674 	    {
5675 	      VAL_NEEDS_RESOLUTION (mo.u.loc) = 1;
5676 	      preserve_value (val);
5677 	    }
5678 	}
5679       else
5680 	gcc_assert (type == MO_USE || type == MO_USE_NO_VAR);
5681 
5682       if (dump_file && (dump_flags & TDF_DETAILS))
5683 	log_op_type (mo.u.loc, cui->bb, cui->insn, mo.type, dump_file);
5684       VTI (bb)->mos.safe_push (mo);
5685     }
5686 
5687   return 0;
5688 }
5689 
5690 /* Helper function for finding all uses of REG/MEM in X in insn INSN.  */
5691 
5692 static void
add_uses_1(rtx * x,void * cui)5693 add_uses_1 (rtx *x, void *cui)
5694 {
5695   for_each_rtx (x, add_uses, cui);
5696 }
5697 
5698 /* This is the value used during expansion of locations.  We want it
5699    to be unbounded, so that variables expanded deep in a recursion
5700    nest are fully evaluated, so that their values are cached
5701    correctly.  We avoid recursion cycles through other means, and we
5702    don't unshare RTL, so excess complexity is not a problem.  */
5703 #define EXPR_DEPTH (INT_MAX)
5704 /* We use this to keep too-complex expressions from being emitted as
5705    location notes, and then to debug information.  Users can trade
5706    compile time for ridiculously complex expressions, although they're
5707    seldom useful, and they may often have to be discarded as not
5708    representable anyway.  */
5709 #define EXPR_USE_DEPTH (PARAM_VALUE (PARAM_MAX_VARTRACK_EXPR_DEPTH))
5710 
5711 /* Attempt to reverse the EXPR operation in the debug info and record
5712    it in the cselib table.  Say for reg1 = reg2 + 6 even when reg2 is
5713    no longer live we can express its value as VAL - 6.  */
5714 
5715 static void
reverse_op(rtx val,const_rtx expr,rtx insn)5716 reverse_op (rtx val, const_rtx expr, rtx insn)
5717 {
5718   rtx src, arg, ret;
5719   cselib_val *v;
5720   struct elt_loc_list *l;
5721   enum rtx_code code;
5722   int count;
5723 
5724   if (GET_CODE (expr) != SET)
5725     return;
5726 
5727   if (!REG_P (SET_DEST (expr)) || GET_MODE (val) != GET_MODE (SET_DEST (expr)))
5728     return;
5729 
5730   src = SET_SRC (expr);
5731   switch (GET_CODE (src))
5732     {
5733     case PLUS:
5734     case MINUS:
5735     case XOR:
5736     case NOT:
5737     case NEG:
5738       if (!REG_P (XEXP (src, 0)))
5739 	return;
5740       break;
5741     case SIGN_EXTEND:
5742     case ZERO_EXTEND:
5743       if (!REG_P (XEXP (src, 0)) && !MEM_P (XEXP (src, 0)))
5744 	return;
5745       break;
5746     default:
5747       return;
5748     }
5749 
5750   if (!SCALAR_INT_MODE_P (GET_MODE (src)) || XEXP (src, 0) == cfa_base_rtx)
5751     return;
5752 
5753   v = cselib_lookup (XEXP (src, 0), GET_MODE (XEXP (src, 0)), 0, VOIDmode);
5754   if (!v || !cselib_preserved_value_p (v))
5755     return;
5756 
5757   /* Use canonical V to avoid creating multiple redundant expressions
5758      for different VALUES equivalent to V.  */
5759   v = canonical_cselib_val (v);
5760 
5761   /* Adding a reverse op isn't useful if V already has an always valid
5762      location.  Ignore ENTRY_VALUE, while it is always constant, we should
5763      prefer non-ENTRY_VALUE locations whenever possible.  */
5764   for (l = v->locs, count = 0; l; l = l->next, count++)
5765     if (CONSTANT_P (l->loc)
5766 	&& (GET_CODE (l->loc) != CONST || !references_value_p (l->loc, 0)))
5767       return;
5768     /* Avoid creating too large locs lists.  */
5769     else if (count == PARAM_VALUE (PARAM_MAX_VARTRACK_REVERSE_OP_SIZE))
5770       return;
5771 
5772   switch (GET_CODE (src))
5773     {
5774     case NOT:
5775     case NEG:
5776       if (GET_MODE (v->val_rtx) != GET_MODE (val))
5777 	return;
5778       ret = gen_rtx_fmt_e (GET_CODE (src), GET_MODE (val), val);
5779       break;
5780     case SIGN_EXTEND:
5781     case ZERO_EXTEND:
5782       ret = gen_lowpart_SUBREG (GET_MODE (v->val_rtx), val);
5783       break;
5784     case XOR:
5785       code = XOR;
5786       goto binary;
5787     case PLUS:
5788       code = MINUS;
5789       goto binary;
5790     case MINUS:
5791       code = PLUS;
5792       goto binary;
5793     binary:
5794       if (GET_MODE (v->val_rtx) != GET_MODE (val))
5795 	return;
5796       arg = XEXP (src, 1);
5797       if (!CONST_INT_P (arg) && GET_CODE (arg) != SYMBOL_REF)
5798 	{
5799 	  arg = cselib_expand_value_rtx (arg, scratch_regs, 5);
5800 	  if (arg == NULL_RTX)
5801 	    return;
5802 	  if (!CONST_INT_P (arg) && GET_CODE (arg) != SYMBOL_REF)
5803 	    return;
5804 	}
5805       ret = simplify_gen_binary (code, GET_MODE (val), val, arg);
5806       if (ret == val)
5807 	/* Ensure ret isn't VALUE itself (which can happen e.g. for
5808 	   (plus (reg1) (reg2)) when reg2 is known to be 0), as that
5809 	   breaks a lot of routines during var-tracking.  */
5810 	ret = gen_rtx_fmt_ee (PLUS, GET_MODE (val), val, const0_rtx);
5811       break;
5812     default:
5813       gcc_unreachable ();
5814     }
5815 
5816   cselib_add_permanent_equiv (v, ret, insn);
5817 }
5818 
5819 /* Add stores (register and memory references) LOC which will be tracked
5820    to VTI (bb)->mos.  EXPR is the RTL expression containing the store.
5821    CUIP->insn is instruction which the LOC is part of.  */
5822 
5823 static void
add_stores(rtx loc,const_rtx expr,void * cuip)5824 add_stores (rtx loc, const_rtx expr, void *cuip)
5825 {
5826   enum machine_mode mode = VOIDmode, mode2;
5827   struct count_use_info *cui = (struct count_use_info *)cuip;
5828   basic_block bb = cui->bb;
5829   micro_operation mo;
5830   rtx oloc = loc, nloc, src = NULL;
5831   enum micro_operation_type type = use_type (loc, cui, &mode);
5832   bool track_p = false;
5833   cselib_val *v;
5834   bool resolve, preserve;
5835 
5836   if (type == MO_CLOBBER)
5837     return;
5838 
5839   mode2 = mode;
5840 
5841   if (REG_P (loc))
5842     {
5843       gcc_assert (loc != cfa_base_rtx);
5844       if ((GET_CODE (expr) == CLOBBER && type != MO_VAL_SET)
5845 	  || !(track_p = use_type (loc, NULL, &mode2) == MO_USE)
5846 	  || GET_CODE (expr) == CLOBBER)
5847 	{
5848 	  mo.type = MO_CLOBBER;
5849 	  mo.u.loc = loc;
5850 	  if (GET_CODE (expr) == SET
5851 	      && SET_DEST (expr) == loc
5852 	      && !unsuitable_loc (SET_SRC (expr))
5853 	      && find_use_val (loc, mode, cui))
5854 	    {
5855 	      gcc_checking_assert (type == MO_VAL_SET);
5856 	      mo.u.loc = gen_rtx_SET (VOIDmode, loc, SET_SRC (expr));
5857 	    }
5858 	}
5859       else
5860 	{
5861 	  if (GET_CODE (expr) == SET
5862 	      && SET_DEST (expr) == loc
5863 	      && GET_CODE (SET_SRC (expr)) != ASM_OPERANDS)
5864 	    src = var_lowpart (mode2, SET_SRC (expr));
5865 	  loc = var_lowpart (mode2, loc);
5866 
5867 	  if (src == NULL)
5868 	    {
5869 	      mo.type = MO_SET;
5870 	      mo.u.loc = loc;
5871 	    }
5872 	  else
5873 	    {
5874 	      rtx xexpr = gen_rtx_SET (VOIDmode, loc, src);
5875 	      if (same_variable_part_p (src, REG_EXPR (loc), REG_OFFSET (loc)))
5876 		{
5877 		  /* If this is an instruction copying (part of) a parameter
5878 		     passed by invisible reference to its register location,
5879 		     pretend it's a SET so that the initial memory location
5880 		     is discarded, as the parameter register can be reused
5881 		     for other purposes and we do not track locations based
5882 		     on generic registers.  */
5883 		  if (MEM_P (src)
5884 		      && REG_EXPR (loc)
5885 		      && TREE_CODE (REG_EXPR (loc)) == PARM_DECL
5886 		      && DECL_MODE (REG_EXPR (loc)) != BLKmode
5887 		      && MEM_P (DECL_INCOMING_RTL (REG_EXPR (loc)))
5888 		      && XEXP (DECL_INCOMING_RTL (REG_EXPR (loc)), 0)
5889 			 != arg_pointer_rtx)
5890 		    mo.type = MO_SET;
5891 		  else
5892 		    mo.type = MO_COPY;
5893 		}
5894 	      else
5895 		mo.type = MO_SET;
5896 	      mo.u.loc = xexpr;
5897 	    }
5898 	}
5899       mo.insn = cui->insn;
5900     }
5901   else if (MEM_P (loc)
5902 	   && ((track_p = use_type (loc, NULL, &mode2) == MO_USE)
5903 	       || cui->sets))
5904     {
5905       if (MEM_P (loc) && type == MO_VAL_SET
5906 	  && !REG_P (XEXP (loc, 0))
5907 	  && !MEM_P (XEXP (loc, 0)))
5908 	{
5909 	  rtx mloc = loc;
5910 	  enum machine_mode address_mode = get_address_mode (mloc);
5911 	  cselib_val *val = cselib_lookup (XEXP (mloc, 0),
5912 					   address_mode, 0,
5913 					   GET_MODE (mloc));
5914 
5915 	  if (val && !cselib_preserved_value_p (val))
5916 	    preserve_value (val);
5917 	}
5918 
5919       if (GET_CODE (expr) == CLOBBER || !track_p)
5920 	{
5921 	  mo.type = MO_CLOBBER;
5922 	  mo.u.loc = track_p ? var_lowpart (mode2, loc) : loc;
5923 	}
5924       else
5925 	{
5926 	  if (GET_CODE (expr) == SET
5927 	      && SET_DEST (expr) == loc
5928 	      && GET_CODE (SET_SRC (expr)) != ASM_OPERANDS)
5929 	    src = var_lowpart (mode2, SET_SRC (expr));
5930 	  loc = var_lowpart (mode2, loc);
5931 
5932 	  if (src == NULL)
5933 	    {
5934 	      mo.type = MO_SET;
5935 	      mo.u.loc = loc;
5936 	    }
5937 	  else
5938 	    {
5939 	      rtx xexpr = gen_rtx_SET (VOIDmode, loc, src);
5940 	      if (same_variable_part_p (SET_SRC (xexpr),
5941 					MEM_EXPR (loc),
5942 					INT_MEM_OFFSET (loc)))
5943 		mo.type = MO_COPY;
5944 	      else
5945 		mo.type = MO_SET;
5946 	      mo.u.loc = xexpr;
5947 	    }
5948 	}
5949       mo.insn = cui->insn;
5950     }
5951   else
5952     return;
5953 
5954   if (type != MO_VAL_SET)
5955     goto log_and_return;
5956 
5957   v = find_use_val (oloc, mode, cui);
5958 
5959   if (!v)
5960     goto log_and_return;
5961 
5962   resolve = preserve = !cselib_preserved_value_p (v);
5963 
5964   /* We cannot track values for multiple-part variables, so we track only
5965      locations for tracked parameters passed either by invisible reference
5966      or directly in multiple locations.  */
5967   if (track_p
5968       && REG_P (loc)
5969       && REG_EXPR (loc)
5970       && TREE_CODE (REG_EXPR (loc)) == PARM_DECL
5971       && DECL_MODE (REG_EXPR (loc)) != BLKmode
5972       && TREE_CODE (TREE_TYPE (REG_EXPR (loc))) != UNION_TYPE
5973       && ((MEM_P (DECL_INCOMING_RTL (REG_EXPR (loc)))
5974 	   && XEXP (DECL_INCOMING_RTL (REG_EXPR (loc)), 0) != arg_pointer_rtx)
5975           || (GET_CODE (DECL_INCOMING_RTL (REG_EXPR (loc))) == PARALLEL
5976 	      && XVECLEN (DECL_INCOMING_RTL (REG_EXPR (loc)), 0) > 1)))
5977     {
5978       /* Although we don't use the value here, it could be used later by the
5979 	 mere virtue of its existence as the operand of the reverse operation
5980 	 that gave rise to it (typically extension/truncation).  Make sure it
5981 	 is preserved as required by vt_expand_var_loc_chain.  */
5982       if (preserve)
5983 	preserve_value (v);
5984       goto log_and_return;
5985     }
5986 
5987   if (loc == stack_pointer_rtx
5988       && hard_frame_pointer_adjustment != -1
5989       && preserve)
5990     cselib_set_value_sp_based (v);
5991 
5992   nloc = replace_expr_with_values (oloc);
5993   if (nloc)
5994     oloc = nloc;
5995 
5996   if (GET_CODE (PATTERN (cui->insn)) == COND_EXEC)
5997     {
5998       cselib_val *oval = cselib_lookup (oloc, GET_MODE (oloc), 0, VOIDmode);
5999 
6000       gcc_assert (oval != v);
6001       gcc_assert (REG_P (oloc) || MEM_P (oloc));
6002 
6003       if (oval && !cselib_preserved_value_p (oval))
6004 	{
6005 	  micro_operation moa;
6006 
6007 	  preserve_value (oval);
6008 
6009 	  moa.type = MO_VAL_USE;
6010 	  moa.u.loc = gen_rtx_CONCAT (mode, oval->val_rtx, oloc);
6011 	  VAL_NEEDS_RESOLUTION (moa.u.loc) = 1;
6012 	  moa.insn = cui->insn;
6013 
6014 	  if (dump_file && (dump_flags & TDF_DETAILS))
6015 	    log_op_type (moa.u.loc, cui->bb, cui->insn,
6016 			 moa.type, dump_file);
6017 	  VTI (bb)->mos.safe_push (moa);
6018 	}
6019 
6020       resolve = false;
6021     }
6022   else if (resolve && GET_CODE (mo.u.loc) == SET)
6023     {
6024       if (REG_P (SET_SRC (expr)) || MEM_P (SET_SRC (expr)))
6025 	nloc = replace_expr_with_values (SET_SRC (expr));
6026       else
6027 	nloc = NULL_RTX;
6028 
6029       /* Avoid the mode mismatch between oexpr and expr.  */
6030       if (!nloc && mode != mode2)
6031 	{
6032 	  nloc = SET_SRC (expr);
6033 	  gcc_assert (oloc == SET_DEST (expr));
6034 	}
6035 
6036       if (nloc && nloc != SET_SRC (mo.u.loc))
6037 	oloc = gen_rtx_SET (GET_MODE (mo.u.loc), oloc, nloc);
6038       else
6039 	{
6040 	  if (oloc == SET_DEST (mo.u.loc))
6041 	    /* No point in duplicating.  */
6042 	    oloc = mo.u.loc;
6043 	  if (!REG_P (SET_SRC (mo.u.loc)))
6044 	    resolve = false;
6045 	}
6046     }
6047   else if (!resolve)
6048     {
6049       if (GET_CODE (mo.u.loc) == SET
6050 	  && oloc == SET_DEST (mo.u.loc))
6051 	/* No point in duplicating.  */
6052 	oloc = mo.u.loc;
6053     }
6054   else
6055     resolve = false;
6056 
6057   loc = gen_rtx_CONCAT (mode, v->val_rtx, oloc);
6058 
6059   if (mo.u.loc != oloc)
6060     loc = gen_rtx_CONCAT (GET_MODE (mo.u.loc), loc, mo.u.loc);
6061 
6062   /* The loc of a MO_VAL_SET may have various forms:
6063 
6064      (concat val dst): dst now holds val
6065 
6066      (concat val (set dst src)): dst now holds val, copied from src
6067 
6068      (concat (concat val dstv) dst): dst now holds val; dstv is dst
6069      after replacing mems and non-top-level regs with values.
6070 
6071      (concat (concat val dstv) (set dst src)): dst now holds val,
6072      copied from src.  dstv is a value-based representation of dst, if
6073      it differs from dst.  If resolution is needed, src is a REG, and
6074      its mode is the same as that of val.
6075 
6076      (concat (concat val (set dstv srcv)) (set dst src)): src
6077      copied to dst, holding val.  dstv and srcv are value-based
6078      representations of dst and src, respectively.
6079 
6080   */
6081 
6082   if (GET_CODE (PATTERN (cui->insn)) != COND_EXEC)
6083     reverse_op (v->val_rtx, expr, cui->insn);
6084 
6085   mo.u.loc = loc;
6086 
6087   if (track_p)
6088     VAL_HOLDS_TRACK_EXPR (loc) = 1;
6089   if (preserve)
6090     {
6091       VAL_NEEDS_RESOLUTION (loc) = resolve;
6092       preserve_value (v);
6093     }
6094   if (mo.type == MO_CLOBBER)
6095     VAL_EXPR_IS_CLOBBERED (loc) = 1;
6096   if (mo.type == MO_COPY)
6097     VAL_EXPR_IS_COPIED (loc) = 1;
6098 
6099   mo.type = MO_VAL_SET;
6100 
6101  log_and_return:
6102   if (dump_file && (dump_flags & TDF_DETAILS))
6103     log_op_type (mo.u.loc, cui->bb, cui->insn, mo.type, dump_file);
6104   VTI (bb)->mos.safe_push (mo);
6105 }
6106 
6107 /* Arguments to the call.  */
6108 static rtx call_arguments;
6109 
6110 /* Compute call_arguments.  */
6111 
6112 static void
prepare_call_arguments(basic_block bb,rtx insn)6113 prepare_call_arguments (basic_block bb, rtx insn)
6114 {
6115   rtx link, x, call;
6116   rtx prev, cur, next;
6117   rtx this_arg = NULL_RTX;
6118   tree type = NULL_TREE, t, fndecl = NULL_TREE;
6119   tree obj_type_ref = NULL_TREE;
6120   CUMULATIVE_ARGS args_so_far_v;
6121   cumulative_args_t args_so_far;
6122 
6123   memset (&args_so_far_v, 0, sizeof (args_so_far_v));
6124   args_so_far = pack_cumulative_args (&args_so_far_v);
6125   call = get_call_rtx_from (insn);
6126   if (call)
6127     {
6128       if (GET_CODE (XEXP (XEXP (call, 0), 0)) == SYMBOL_REF)
6129 	{
6130 	  rtx symbol = XEXP (XEXP (call, 0), 0);
6131 	  if (SYMBOL_REF_DECL (symbol))
6132 	    fndecl = SYMBOL_REF_DECL (symbol);
6133 	}
6134       if (fndecl == NULL_TREE)
6135 	fndecl = MEM_EXPR (XEXP (call, 0));
6136       if (fndecl
6137 	  && TREE_CODE (TREE_TYPE (fndecl)) != FUNCTION_TYPE
6138 	  && TREE_CODE (TREE_TYPE (fndecl)) != METHOD_TYPE)
6139 	fndecl = NULL_TREE;
6140       if (fndecl && TYPE_ARG_TYPES (TREE_TYPE (fndecl)))
6141 	type = TREE_TYPE (fndecl);
6142       if (fndecl && TREE_CODE (fndecl) != FUNCTION_DECL)
6143 	{
6144 	  if (TREE_CODE (fndecl) == INDIRECT_REF
6145 	      && TREE_CODE (TREE_OPERAND (fndecl, 0)) == OBJ_TYPE_REF)
6146 	    obj_type_ref = TREE_OPERAND (fndecl, 0);
6147 	  fndecl = NULL_TREE;
6148 	}
6149       if (type)
6150 	{
6151 	  for (t = TYPE_ARG_TYPES (type); t && t != void_list_node;
6152 	       t = TREE_CHAIN (t))
6153 	    if (TREE_CODE (TREE_VALUE (t)) == REFERENCE_TYPE
6154 		&& INTEGRAL_TYPE_P (TREE_TYPE (TREE_VALUE (t))))
6155 	      break;
6156 	  if ((t == NULL || t == void_list_node) && obj_type_ref == NULL_TREE)
6157 	    type = NULL;
6158 	  else
6159 	    {
6160 	      int nargs ATTRIBUTE_UNUSED = list_length (TYPE_ARG_TYPES (type));
6161 	      link = CALL_INSN_FUNCTION_USAGE (insn);
6162 #ifndef PCC_STATIC_STRUCT_RETURN
6163 	      if (aggregate_value_p (TREE_TYPE (type), type)
6164 		  && targetm.calls.struct_value_rtx (type, 0) == 0)
6165 		{
6166 		  tree struct_addr = build_pointer_type (TREE_TYPE (type));
6167 		  enum machine_mode mode = TYPE_MODE (struct_addr);
6168 		  rtx reg;
6169 		  INIT_CUMULATIVE_ARGS (args_so_far_v, type, NULL_RTX, fndecl,
6170 					nargs + 1);
6171 		  reg = targetm.calls.function_arg (args_so_far, mode,
6172 						    struct_addr, true);
6173 		  targetm.calls.function_arg_advance (args_so_far, mode,
6174 						      struct_addr, true);
6175 		  if (reg == NULL_RTX)
6176 		    {
6177 		      for (; link; link = XEXP (link, 1))
6178 			if (GET_CODE (XEXP (link, 0)) == USE
6179 			    && MEM_P (XEXP (XEXP (link, 0), 0)))
6180 			  {
6181 			    link = XEXP (link, 1);
6182 			    break;
6183 			  }
6184 		    }
6185 		}
6186 	      else
6187 #endif
6188 		INIT_CUMULATIVE_ARGS (args_so_far_v, type, NULL_RTX, fndecl,
6189 				      nargs);
6190 	      if (obj_type_ref && TYPE_ARG_TYPES (type) != void_list_node)
6191 		{
6192 		  enum machine_mode mode;
6193 		  t = TYPE_ARG_TYPES (type);
6194 		  mode = TYPE_MODE (TREE_VALUE (t));
6195 		  this_arg = targetm.calls.function_arg (args_so_far, mode,
6196 							 TREE_VALUE (t), true);
6197 		  if (this_arg && !REG_P (this_arg))
6198 		    this_arg = NULL_RTX;
6199 		  else if (this_arg == NULL_RTX)
6200 		    {
6201 		      for (; link; link = XEXP (link, 1))
6202 			if (GET_CODE (XEXP (link, 0)) == USE
6203 			    && MEM_P (XEXP (XEXP (link, 0), 0)))
6204 			  {
6205 			    this_arg = XEXP (XEXP (link, 0), 0);
6206 			    break;
6207 			  }
6208 		    }
6209 		}
6210 	    }
6211 	}
6212     }
6213   t = type ? TYPE_ARG_TYPES (type) : NULL_TREE;
6214 
6215   for (link = CALL_INSN_FUNCTION_USAGE (insn); link; link = XEXP (link, 1))
6216     if (GET_CODE (XEXP (link, 0)) == USE)
6217       {
6218 	rtx item = NULL_RTX;
6219 	x = XEXP (XEXP (link, 0), 0);
6220 	if (GET_MODE (link) == VOIDmode
6221 	    || GET_MODE (link) == BLKmode
6222 	    || (GET_MODE (link) != GET_MODE (x)
6223 		&& (GET_MODE_CLASS (GET_MODE (link)) != MODE_INT
6224 		    || GET_MODE_CLASS (GET_MODE (x)) != MODE_INT)))
6225 	  /* Can't do anything for these, if the original type mode
6226 	     isn't known or can't be converted.  */;
6227 	else if (REG_P (x))
6228 	  {
6229 	    cselib_val *val = cselib_lookup (x, GET_MODE (x), 0, VOIDmode);
6230 	    if (val && cselib_preserved_value_p (val))
6231 	      item = val->val_rtx;
6232 	    else if (GET_MODE_CLASS (GET_MODE (x)) == MODE_INT)
6233 	      {
6234 		enum machine_mode mode = GET_MODE (x);
6235 
6236 		while ((mode = GET_MODE_WIDER_MODE (mode)) != VOIDmode
6237 		       && GET_MODE_BITSIZE (mode) <= BITS_PER_WORD)
6238 		  {
6239 		    rtx reg = simplify_subreg (mode, x, GET_MODE (x), 0);
6240 
6241 		    if (reg == NULL_RTX || !REG_P (reg))
6242 		      continue;
6243 		    val = cselib_lookup (reg, mode, 0, VOIDmode);
6244 		    if (val && cselib_preserved_value_p (val))
6245 		      {
6246 			item = val->val_rtx;
6247 			break;
6248 		      }
6249 		  }
6250 	      }
6251 	  }
6252 	else if (MEM_P (x))
6253 	  {
6254 	    rtx mem = x;
6255 	    cselib_val *val;
6256 
6257 	    if (!frame_pointer_needed)
6258 	      {
6259 		struct adjust_mem_data amd;
6260 		amd.mem_mode = VOIDmode;
6261 		amd.stack_adjust = -VTI (bb)->out.stack_adjust;
6262 		amd.side_effects = NULL_RTX;
6263 		amd.store = true;
6264 		mem = simplify_replace_fn_rtx (mem, NULL_RTX, adjust_mems,
6265 					       &amd);
6266 		gcc_assert (amd.side_effects == NULL_RTX);
6267 	      }
6268 	    val = cselib_lookup (mem, GET_MODE (mem), 0, VOIDmode);
6269 	    if (val && cselib_preserved_value_p (val))
6270 	      item = val->val_rtx;
6271 	    else if (GET_MODE_CLASS (GET_MODE (mem)) != MODE_INT)
6272 	      {
6273 		/* For non-integer stack argument see also if they weren't
6274 		   initialized by integers.  */
6275 		enum machine_mode imode = int_mode_for_mode (GET_MODE (mem));
6276 		if (imode != GET_MODE (mem) && imode != BLKmode)
6277 		  {
6278 		    val = cselib_lookup (adjust_address_nv (mem, imode, 0),
6279 					 imode, 0, VOIDmode);
6280 		    if (val && cselib_preserved_value_p (val))
6281 		      item = lowpart_subreg (GET_MODE (x), val->val_rtx,
6282 					     imode);
6283 		  }
6284 	      }
6285 	  }
6286 	if (item)
6287 	  {
6288 	    rtx x2 = x;
6289 	    if (GET_MODE (item) != GET_MODE (link))
6290 	      item = lowpart_subreg (GET_MODE (link), item, GET_MODE (item));
6291 	    if (GET_MODE (x2) != GET_MODE (link))
6292 	      x2 = lowpart_subreg (GET_MODE (link), x2, GET_MODE (x2));
6293 	    item = gen_rtx_CONCAT (GET_MODE (link), x2, item);
6294 	    call_arguments
6295 	      = gen_rtx_EXPR_LIST (VOIDmode, item, call_arguments);
6296 	  }
6297 	if (t && t != void_list_node)
6298 	  {
6299 	    tree argtype = TREE_VALUE (t);
6300 	    enum machine_mode mode = TYPE_MODE (argtype);
6301 	    rtx reg;
6302 	    if (pass_by_reference (&args_so_far_v, mode, argtype, true))
6303 	      {
6304 		argtype = build_pointer_type (argtype);
6305 		mode = TYPE_MODE (argtype);
6306 	      }
6307 	    reg = targetm.calls.function_arg (args_so_far, mode,
6308 					      argtype, true);
6309 	    if (TREE_CODE (argtype) == REFERENCE_TYPE
6310 		&& INTEGRAL_TYPE_P (TREE_TYPE (argtype))
6311 		&& reg
6312 		&& REG_P (reg)
6313 		&& GET_MODE (reg) == mode
6314 		&& GET_MODE_CLASS (mode) == MODE_INT
6315 		&& REG_P (x)
6316 		&& REGNO (x) == REGNO (reg)
6317 		&& GET_MODE (x) == mode
6318 		&& item)
6319 	      {
6320 		enum machine_mode indmode
6321 		  = TYPE_MODE (TREE_TYPE (argtype));
6322 		rtx mem = gen_rtx_MEM (indmode, x);
6323 		cselib_val *val = cselib_lookup (mem, indmode, 0, VOIDmode);
6324 		if (val && cselib_preserved_value_p (val))
6325 		  {
6326 		    item = gen_rtx_CONCAT (indmode, mem, val->val_rtx);
6327 		    call_arguments = gen_rtx_EXPR_LIST (VOIDmode, item,
6328 							call_arguments);
6329 		  }
6330 		else
6331 		  {
6332 		    struct elt_loc_list *l;
6333 		    tree initial;
6334 
6335 		    /* Try harder, when passing address of a constant
6336 		       pool integer it can be easily read back.  */
6337 		    item = XEXP (item, 1);
6338 		    if (GET_CODE (item) == SUBREG)
6339 		      item = SUBREG_REG (item);
6340 		    gcc_assert (GET_CODE (item) == VALUE);
6341 		    val = CSELIB_VAL_PTR (item);
6342 		    for (l = val->locs; l; l = l->next)
6343 		      if (GET_CODE (l->loc) == SYMBOL_REF
6344 			  && TREE_CONSTANT_POOL_ADDRESS_P (l->loc)
6345 			  && SYMBOL_REF_DECL (l->loc)
6346 			  && DECL_INITIAL (SYMBOL_REF_DECL (l->loc)))
6347 			{
6348 			  initial = DECL_INITIAL (SYMBOL_REF_DECL (l->loc));
6349 			  if (tree_fits_shwi_p (initial))
6350 			    {
6351 			      item = GEN_INT (tree_to_shwi (initial));
6352 			      item = gen_rtx_CONCAT (indmode, mem, item);
6353 			      call_arguments
6354 				= gen_rtx_EXPR_LIST (VOIDmode, item,
6355 						     call_arguments);
6356 			    }
6357 			  break;
6358 			}
6359 		  }
6360 	      }
6361 	    targetm.calls.function_arg_advance (args_so_far, mode,
6362 						argtype, true);
6363 	    t = TREE_CHAIN (t);
6364 	  }
6365       }
6366 
6367   /* Add debug arguments.  */
6368   if (fndecl
6369       && TREE_CODE (fndecl) == FUNCTION_DECL
6370       && DECL_HAS_DEBUG_ARGS_P (fndecl))
6371     {
6372       vec<tree, va_gc> **debug_args = decl_debug_args_lookup (fndecl);
6373       if (debug_args)
6374 	{
6375 	  unsigned int ix;
6376 	  tree param;
6377 	  for (ix = 0; vec_safe_iterate (*debug_args, ix, &param); ix += 2)
6378 	    {
6379 	      rtx item;
6380 	      tree dtemp = (**debug_args)[ix + 1];
6381 	      enum machine_mode mode = DECL_MODE (dtemp);
6382 	      item = gen_rtx_DEBUG_PARAMETER_REF (mode, param);
6383 	      item = gen_rtx_CONCAT (mode, item, DECL_RTL_KNOWN_SET (dtemp));
6384 	      call_arguments = gen_rtx_EXPR_LIST (VOIDmode, item,
6385 						  call_arguments);
6386 	    }
6387 	}
6388     }
6389 
6390   /* Reverse call_arguments chain.  */
6391   prev = NULL_RTX;
6392   for (cur = call_arguments; cur; cur = next)
6393     {
6394       next = XEXP (cur, 1);
6395       XEXP (cur, 1) = prev;
6396       prev = cur;
6397     }
6398   call_arguments = prev;
6399 
6400   x = get_call_rtx_from (insn);
6401   if (x)
6402     {
6403       x = XEXP (XEXP (x, 0), 0);
6404       if (GET_CODE (x) == SYMBOL_REF)
6405 	/* Don't record anything.  */;
6406       else if (CONSTANT_P (x))
6407 	{
6408 	  x = gen_rtx_CONCAT (GET_MODE (x) == VOIDmode ? Pmode : GET_MODE (x),
6409 			      pc_rtx, x);
6410 	  call_arguments
6411 	    = gen_rtx_EXPR_LIST (VOIDmode, x, call_arguments);
6412 	}
6413       else
6414 	{
6415 	  cselib_val *val = cselib_lookup (x, GET_MODE (x), 0, VOIDmode);
6416 	  if (val && cselib_preserved_value_p (val))
6417 	    {
6418 	      x = gen_rtx_CONCAT (GET_MODE (x), pc_rtx, val->val_rtx);
6419 	      call_arguments
6420 		= gen_rtx_EXPR_LIST (VOIDmode, x, call_arguments);
6421 	    }
6422 	}
6423     }
6424   if (this_arg)
6425     {
6426       enum machine_mode mode
6427 	= TYPE_MODE (TREE_TYPE (OBJ_TYPE_REF_EXPR (obj_type_ref)));
6428       rtx clobbered = gen_rtx_MEM (mode, this_arg);
6429       HOST_WIDE_INT token
6430 	= tree_to_shwi (OBJ_TYPE_REF_TOKEN (obj_type_ref));
6431       if (token)
6432 	clobbered = plus_constant (mode, clobbered,
6433 				   token * GET_MODE_SIZE (mode));
6434       clobbered = gen_rtx_MEM (mode, clobbered);
6435       x = gen_rtx_CONCAT (mode, gen_rtx_CLOBBER (VOIDmode, pc_rtx), clobbered);
6436       call_arguments
6437 	= gen_rtx_EXPR_LIST (VOIDmode, x, call_arguments);
6438     }
6439 }
6440 
6441 /* Callback for cselib_record_sets_hook, that records as micro
6442    operations uses and stores in an insn after cselib_record_sets has
6443    analyzed the sets in an insn, but before it modifies the stored
6444    values in the internal tables, unless cselib_record_sets doesn't
6445    call it directly (perhaps because we're not doing cselib in the
6446    first place, in which case sets and n_sets will be 0).  */
6447 
6448 static void
add_with_sets(rtx insn,struct cselib_set * sets,int n_sets)6449 add_with_sets (rtx insn, struct cselib_set *sets, int n_sets)
6450 {
6451   basic_block bb = BLOCK_FOR_INSN (insn);
6452   int n1, n2;
6453   struct count_use_info cui;
6454   micro_operation *mos;
6455 
6456   cselib_hook_called = true;
6457 
6458   cui.insn = insn;
6459   cui.bb = bb;
6460   cui.sets = sets;
6461   cui.n_sets = n_sets;
6462 
6463   n1 = VTI (bb)->mos.length ();
6464   cui.store_p = false;
6465   note_uses (&PATTERN (insn), add_uses_1, &cui);
6466   n2 = VTI (bb)->mos.length () - 1;
6467   mos = VTI (bb)->mos.address ();
6468 
6469   /* Order the MO_USEs to be before MO_USE_NO_VARs and MO_VAL_USE, and
6470      MO_VAL_LOC last.  */
6471   while (n1 < n2)
6472     {
6473       while (n1 < n2 && mos[n1].type == MO_USE)
6474 	n1++;
6475       while (n1 < n2 && mos[n2].type != MO_USE)
6476 	n2--;
6477       if (n1 < n2)
6478 	{
6479 	  micro_operation sw;
6480 
6481 	  sw = mos[n1];
6482 	  mos[n1] = mos[n2];
6483 	  mos[n2] = sw;
6484 	}
6485     }
6486 
6487   n2 = VTI (bb)->mos.length () - 1;
6488   while (n1 < n2)
6489     {
6490       while (n1 < n2 && mos[n1].type != MO_VAL_LOC)
6491 	n1++;
6492       while (n1 < n2 && mos[n2].type == MO_VAL_LOC)
6493 	n2--;
6494       if (n1 < n2)
6495 	{
6496 	  micro_operation sw;
6497 
6498 	  sw = mos[n1];
6499 	  mos[n1] = mos[n2];
6500 	  mos[n2] = sw;
6501 	}
6502     }
6503 
6504   if (CALL_P (insn))
6505     {
6506       micro_operation mo;
6507 
6508       mo.type = MO_CALL;
6509       mo.insn = insn;
6510       mo.u.loc = call_arguments;
6511       call_arguments = NULL_RTX;
6512 
6513       if (dump_file && (dump_flags & TDF_DETAILS))
6514 	log_op_type (PATTERN (insn), bb, insn, mo.type, dump_file);
6515       VTI (bb)->mos.safe_push (mo);
6516     }
6517 
6518   n1 = VTI (bb)->mos.length ();
6519   /* This will record NEXT_INSN (insn), such that we can
6520      insert notes before it without worrying about any
6521      notes that MO_USEs might emit after the insn.  */
6522   cui.store_p = true;
6523   note_stores (PATTERN (insn), add_stores, &cui);
6524   n2 = VTI (bb)->mos.length () - 1;
6525   mos = VTI (bb)->mos.address ();
6526 
6527   /* Order the MO_VAL_USEs first (note_stores does nothing
6528      on DEBUG_INSNs, so there are no MO_VAL_LOCs from this
6529      insn), then MO_CLOBBERs, then MO_SET/MO_COPY/MO_VAL_SET.  */
6530   while (n1 < n2)
6531     {
6532       while (n1 < n2 && mos[n1].type == MO_VAL_USE)
6533 	n1++;
6534       while (n1 < n2 && mos[n2].type != MO_VAL_USE)
6535 	n2--;
6536       if (n1 < n2)
6537 	{
6538 	  micro_operation sw;
6539 
6540 	  sw = mos[n1];
6541 	  mos[n1] = mos[n2];
6542 	  mos[n2] = sw;
6543 	}
6544     }
6545 
6546   n2 = VTI (bb)->mos.length () - 1;
6547   while (n1 < n2)
6548     {
6549       while (n1 < n2 && mos[n1].type == MO_CLOBBER)
6550 	n1++;
6551       while (n1 < n2 && mos[n2].type != MO_CLOBBER)
6552 	n2--;
6553       if (n1 < n2)
6554 	{
6555 	  micro_operation sw;
6556 
6557 	  sw = mos[n1];
6558 	  mos[n1] = mos[n2];
6559 	  mos[n2] = sw;
6560 	}
6561     }
6562 }
6563 
6564 static enum var_init_status
find_src_status(dataflow_set * in,rtx src)6565 find_src_status (dataflow_set *in, rtx src)
6566 {
6567   tree decl = NULL_TREE;
6568   enum var_init_status status = VAR_INIT_STATUS_UNINITIALIZED;
6569 
6570   if (! flag_var_tracking_uninit)
6571     status = VAR_INIT_STATUS_INITIALIZED;
6572 
6573   if (src && REG_P (src))
6574     decl = var_debug_decl (REG_EXPR (src));
6575   else if (src && MEM_P (src))
6576     decl = var_debug_decl (MEM_EXPR (src));
6577 
6578   if (src && decl)
6579     status = get_init_value (in, src, dv_from_decl (decl));
6580 
6581   return status;
6582 }
6583 
6584 /* SRC is the source of an assignment.  Use SET to try to find what
6585    was ultimately assigned to SRC.  Return that value if known,
6586    otherwise return SRC itself.  */
6587 
6588 static rtx
find_src_set_src(dataflow_set * set,rtx src)6589 find_src_set_src (dataflow_set *set, rtx src)
6590 {
6591   tree decl = NULL_TREE;   /* The variable being copied around.          */
6592   rtx set_src = NULL_RTX;  /* The value for "decl" stored in "src".      */
6593   variable var;
6594   location_chain nextp;
6595   int i;
6596   bool found;
6597 
6598   if (src && REG_P (src))
6599     decl = var_debug_decl (REG_EXPR (src));
6600   else if (src && MEM_P (src))
6601     decl = var_debug_decl (MEM_EXPR (src));
6602 
6603   if (src && decl)
6604     {
6605       decl_or_value dv = dv_from_decl (decl);
6606 
6607       var = shared_hash_find (set->vars, dv);
6608       if (var)
6609 	{
6610 	  found = false;
6611 	  for (i = 0; i < var->n_var_parts && !found; i++)
6612 	    for (nextp = var->var_part[i].loc_chain; nextp && !found;
6613 		 nextp = nextp->next)
6614 	      if (rtx_equal_p (nextp->loc, src))
6615 		{
6616 		  set_src = nextp->set_src;
6617 		  found = true;
6618 		}
6619 
6620 	}
6621     }
6622 
6623   return set_src;
6624 }
6625 
6626 /* Compute the changes of variable locations in the basic block BB.  */
6627 
6628 static bool
compute_bb_dataflow(basic_block bb)6629 compute_bb_dataflow (basic_block bb)
6630 {
6631   unsigned int i;
6632   micro_operation *mo;
6633   bool changed;
6634   dataflow_set old_out;
6635   dataflow_set *in = &VTI (bb)->in;
6636   dataflow_set *out = &VTI (bb)->out;
6637 
6638   dataflow_set_init (&old_out);
6639   dataflow_set_copy (&old_out, out);
6640   dataflow_set_copy (out, in);
6641 
6642   if (MAY_HAVE_DEBUG_INSNS)
6643     local_get_addr_cache = pointer_map_create ();
6644 
6645   FOR_EACH_VEC_ELT (VTI (bb)->mos, i, mo)
6646     {
6647       rtx insn = mo->insn;
6648 
6649       switch (mo->type)
6650 	{
6651 	  case MO_CALL:
6652 	    dataflow_set_clear_at_call (out);
6653 	    break;
6654 
6655 	  case MO_USE:
6656 	    {
6657 	      rtx loc = mo->u.loc;
6658 
6659 	      if (REG_P (loc))
6660 		var_reg_set (out, loc, VAR_INIT_STATUS_UNINITIALIZED, NULL);
6661 	      else if (MEM_P (loc))
6662 		var_mem_set (out, loc, VAR_INIT_STATUS_UNINITIALIZED, NULL);
6663 	    }
6664 	    break;
6665 
6666 	  case MO_VAL_LOC:
6667 	    {
6668 	      rtx loc = mo->u.loc;
6669 	      rtx val, vloc;
6670 	      tree var;
6671 
6672 	      if (GET_CODE (loc) == CONCAT)
6673 		{
6674 		  val = XEXP (loc, 0);
6675 		  vloc = XEXP (loc, 1);
6676 		}
6677 	      else
6678 		{
6679 		  val = NULL_RTX;
6680 		  vloc = loc;
6681 		}
6682 
6683 	      var = PAT_VAR_LOCATION_DECL (vloc);
6684 
6685 	      clobber_variable_part (out, NULL_RTX,
6686 				     dv_from_decl (var), 0, NULL_RTX);
6687 	      if (val)
6688 		{
6689 		  if (VAL_NEEDS_RESOLUTION (loc))
6690 		    val_resolve (out, val, PAT_VAR_LOCATION_LOC (vloc), insn);
6691 		  set_variable_part (out, val, dv_from_decl (var), 0,
6692 				     VAR_INIT_STATUS_INITIALIZED, NULL_RTX,
6693 				     INSERT);
6694 		}
6695 	      else if (!VAR_LOC_UNKNOWN_P (PAT_VAR_LOCATION_LOC (vloc)))
6696 		set_variable_part (out, PAT_VAR_LOCATION_LOC (vloc),
6697 				   dv_from_decl (var), 0,
6698 				   VAR_INIT_STATUS_INITIALIZED, NULL_RTX,
6699 				   INSERT);
6700 	    }
6701 	    break;
6702 
6703 	  case MO_VAL_USE:
6704 	    {
6705 	      rtx loc = mo->u.loc;
6706 	      rtx val, vloc, uloc;
6707 
6708 	      vloc = uloc = XEXP (loc, 1);
6709 	      val = XEXP (loc, 0);
6710 
6711 	      if (GET_CODE (val) == CONCAT)
6712 		{
6713 		  uloc = XEXP (val, 1);
6714 		  val = XEXP (val, 0);
6715 		}
6716 
6717 	      if (VAL_NEEDS_RESOLUTION (loc))
6718 		val_resolve (out, val, vloc, insn);
6719 	      else
6720 		val_store (out, val, uloc, insn, false);
6721 
6722 	      if (VAL_HOLDS_TRACK_EXPR (loc))
6723 		{
6724 		  if (GET_CODE (uloc) == REG)
6725 		    var_reg_set (out, uloc, VAR_INIT_STATUS_UNINITIALIZED,
6726 				 NULL);
6727 		  else if (GET_CODE (uloc) == MEM)
6728 		    var_mem_set (out, uloc, VAR_INIT_STATUS_UNINITIALIZED,
6729 				 NULL);
6730 		}
6731 	    }
6732 	    break;
6733 
6734 	  case MO_VAL_SET:
6735 	    {
6736 	      rtx loc = mo->u.loc;
6737 	      rtx val, vloc, uloc;
6738 	      rtx dstv, srcv;
6739 
6740 	      vloc = loc;
6741 	      uloc = XEXP (vloc, 1);
6742 	      val = XEXP (vloc, 0);
6743 	      vloc = uloc;
6744 
6745 	      if (GET_CODE (uloc) == SET)
6746 		{
6747 		  dstv = SET_DEST (uloc);
6748 		  srcv = SET_SRC (uloc);
6749 		}
6750 	      else
6751 		{
6752 		  dstv = uloc;
6753 		  srcv = NULL;
6754 		}
6755 
6756 	      if (GET_CODE (val) == CONCAT)
6757 		{
6758 		  dstv = vloc = XEXP (val, 1);
6759 		  val = XEXP (val, 0);
6760 		}
6761 
6762 	      if (GET_CODE (vloc) == SET)
6763 		{
6764 		  srcv = SET_SRC (vloc);
6765 
6766 		  gcc_assert (val != srcv);
6767 		  gcc_assert (vloc == uloc || VAL_NEEDS_RESOLUTION (loc));
6768 
6769 		  dstv = vloc = SET_DEST (vloc);
6770 
6771 		  if (VAL_NEEDS_RESOLUTION (loc))
6772 		    val_resolve (out, val, srcv, insn);
6773 		}
6774 	      else if (VAL_NEEDS_RESOLUTION (loc))
6775 		{
6776 		  gcc_assert (GET_CODE (uloc) == SET
6777 			      && GET_CODE (SET_SRC (uloc)) == REG);
6778 		  val_resolve (out, val, SET_SRC (uloc), insn);
6779 		}
6780 
6781 	      if (VAL_HOLDS_TRACK_EXPR (loc))
6782 		{
6783 		  if (VAL_EXPR_IS_CLOBBERED (loc))
6784 		    {
6785 		      if (REG_P (uloc))
6786 			var_reg_delete (out, uloc, true);
6787 		      else if (MEM_P (uloc))
6788 			{
6789 			  gcc_assert (MEM_P (dstv));
6790 			  gcc_assert (MEM_ATTRS (dstv) == MEM_ATTRS (uloc));
6791 			  var_mem_delete (out, dstv, true);
6792 			}
6793 		    }
6794 		  else
6795 		    {
6796 		      bool copied_p = VAL_EXPR_IS_COPIED (loc);
6797 		      rtx src = NULL, dst = uloc;
6798 		      enum var_init_status status = VAR_INIT_STATUS_INITIALIZED;
6799 
6800 		      if (GET_CODE (uloc) == SET)
6801 			{
6802 			  src = SET_SRC (uloc);
6803 			  dst = SET_DEST (uloc);
6804 			}
6805 
6806 		      if (copied_p)
6807 			{
6808 			  if (flag_var_tracking_uninit)
6809 			    {
6810 			      status = find_src_status (in, src);
6811 
6812 			      if (status == VAR_INIT_STATUS_UNKNOWN)
6813 				status = find_src_status (out, src);
6814 			    }
6815 
6816 			  src = find_src_set_src (in, src);
6817 			}
6818 
6819 		      if (REG_P (dst))
6820 			var_reg_delete_and_set (out, dst, !copied_p,
6821 						status, srcv);
6822 		      else if (MEM_P (dst))
6823 			{
6824 			  gcc_assert (MEM_P (dstv));
6825 			  gcc_assert (MEM_ATTRS (dstv) == MEM_ATTRS (dst));
6826 			  var_mem_delete_and_set (out, dstv, !copied_p,
6827 						  status, srcv);
6828 			}
6829 		    }
6830 		}
6831 	      else if (REG_P (uloc))
6832 		var_regno_delete (out, REGNO (uloc));
6833 	      else if (MEM_P (uloc))
6834 		{
6835 		  gcc_checking_assert (GET_CODE (vloc) == MEM);
6836 		  gcc_checking_assert (dstv == vloc);
6837 		  if (dstv != vloc)
6838 		    clobber_overlapping_mems (out, vloc);
6839 		}
6840 
6841 	      val_store (out, val, dstv, insn, true);
6842 	    }
6843 	    break;
6844 
6845 	  case MO_SET:
6846 	    {
6847 	      rtx loc = mo->u.loc;
6848 	      rtx set_src = NULL;
6849 
6850 	      if (GET_CODE (loc) == SET)
6851 		{
6852 		  set_src = SET_SRC (loc);
6853 		  loc = SET_DEST (loc);
6854 		}
6855 
6856 	      if (REG_P (loc))
6857 		var_reg_delete_and_set (out, loc, true, VAR_INIT_STATUS_INITIALIZED,
6858 					set_src);
6859 	      else if (MEM_P (loc))
6860 		var_mem_delete_and_set (out, loc, true, VAR_INIT_STATUS_INITIALIZED,
6861 					set_src);
6862 	    }
6863 	    break;
6864 
6865 	  case MO_COPY:
6866 	    {
6867 	      rtx loc = mo->u.loc;
6868 	      enum var_init_status src_status;
6869 	      rtx set_src = NULL;
6870 
6871 	      if (GET_CODE (loc) == SET)
6872 		{
6873 		  set_src = SET_SRC (loc);
6874 		  loc = SET_DEST (loc);
6875 		}
6876 
6877 	      if (! flag_var_tracking_uninit)
6878 		src_status = VAR_INIT_STATUS_INITIALIZED;
6879 	      else
6880 		{
6881 		  src_status = find_src_status (in, set_src);
6882 
6883 		  if (src_status == VAR_INIT_STATUS_UNKNOWN)
6884 		    src_status = find_src_status (out, set_src);
6885 		}
6886 
6887 	      set_src = find_src_set_src (in, set_src);
6888 
6889 	      if (REG_P (loc))
6890 		var_reg_delete_and_set (out, loc, false, src_status, set_src);
6891 	      else if (MEM_P (loc))
6892 		var_mem_delete_and_set (out, loc, false, src_status, set_src);
6893 	    }
6894 	    break;
6895 
6896 	  case MO_USE_NO_VAR:
6897 	    {
6898 	      rtx loc = mo->u.loc;
6899 
6900 	      if (REG_P (loc))
6901 		var_reg_delete (out, loc, false);
6902 	      else if (MEM_P (loc))
6903 		var_mem_delete (out, loc, false);
6904 	    }
6905 	    break;
6906 
6907 	  case MO_CLOBBER:
6908 	    {
6909 	      rtx loc = mo->u.loc;
6910 
6911 	      if (REG_P (loc))
6912 		var_reg_delete (out, loc, true);
6913 	      else if (MEM_P (loc))
6914 		var_mem_delete (out, loc, true);
6915 	    }
6916 	    break;
6917 
6918 	  case MO_ADJUST:
6919 	    out->stack_adjust += mo->u.adjust;
6920 	    break;
6921 	}
6922     }
6923 
6924   if (MAY_HAVE_DEBUG_INSNS)
6925     {
6926       pointer_map_destroy (local_get_addr_cache);
6927       local_get_addr_cache = NULL;
6928 
6929       dataflow_set_equiv_regs (out);
6930       shared_hash_htab (out->vars)
6931 	.traverse <dataflow_set *, canonicalize_values_mark> (out);
6932       shared_hash_htab (out->vars)
6933 	.traverse <dataflow_set *, canonicalize_values_star> (out);
6934 #if ENABLE_CHECKING
6935       shared_hash_htab (out->vars)
6936 	.traverse <dataflow_set *, canonicalize_loc_order_check> (out);
6937 #endif
6938     }
6939   changed = dataflow_set_different (&old_out, out);
6940   dataflow_set_destroy (&old_out);
6941   return changed;
6942 }
6943 
6944 /* Find the locations of variables in the whole function.  */
6945 
6946 static bool
vt_find_locations(void)6947 vt_find_locations (void)
6948 {
6949   fibheap_t worklist, pending, fibheap_swap;
6950   sbitmap visited, in_worklist, in_pending, sbitmap_swap;
6951   basic_block bb;
6952   edge e;
6953   int *bb_order;
6954   int *rc_order;
6955   int i;
6956   int htabsz = 0;
6957   int htabmax = PARAM_VALUE (PARAM_MAX_VARTRACK_SIZE);
6958   bool success = true;
6959 
6960   timevar_push (TV_VAR_TRACKING_DATAFLOW);
6961   /* Compute reverse completion order of depth first search of the CFG
6962      so that the data-flow runs faster.  */
6963   rc_order = XNEWVEC (int, n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS);
6964   bb_order = XNEWVEC (int, last_basic_block_for_fn (cfun));
6965   pre_and_rev_post_order_compute (NULL, rc_order, false);
6966   for (i = 0; i < n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS; i++)
6967     bb_order[rc_order[i]] = i;
6968   free (rc_order);
6969 
6970   worklist = fibheap_new ();
6971   pending = fibheap_new ();
6972   visited = sbitmap_alloc (last_basic_block_for_fn (cfun));
6973   in_worklist = sbitmap_alloc (last_basic_block_for_fn (cfun));
6974   in_pending = sbitmap_alloc (last_basic_block_for_fn (cfun));
6975   bitmap_clear (in_worklist);
6976 
6977   FOR_EACH_BB_FN (bb, cfun)
6978     fibheap_insert (pending, bb_order[bb->index], bb);
6979   bitmap_ones (in_pending);
6980 
6981   while (success && !fibheap_empty (pending))
6982     {
6983       fibheap_swap = pending;
6984       pending = worklist;
6985       worklist = fibheap_swap;
6986       sbitmap_swap = in_pending;
6987       in_pending = in_worklist;
6988       in_worklist = sbitmap_swap;
6989 
6990       bitmap_clear (visited);
6991 
6992       while (!fibheap_empty (worklist))
6993 	{
6994 	  bb = (basic_block) fibheap_extract_min (worklist);
6995 	  bitmap_clear_bit (in_worklist, bb->index);
6996 	  gcc_assert (!bitmap_bit_p (visited, bb->index));
6997 	  if (!bitmap_bit_p (visited, bb->index))
6998 	    {
6999 	      bool changed;
7000 	      edge_iterator ei;
7001 	      int oldinsz, oldoutsz;
7002 
7003 	      bitmap_set_bit (visited, bb->index);
7004 
7005 	      if (VTI (bb)->in.vars)
7006 		{
7007 		  htabsz
7008 		    -= shared_hash_htab (VTI (bb)->in.vars).size ()
7009 			+ shared_hash_htab (VTI (bb)->out.vars).size ();
7010 		  oldinsz = shared_hash_htab (VTI (bb)->in.vars).elements ();
7011 		  oldoutsz = shared_hash_htab (VTI (bb)->out.vars).elements ();
7012 		}
7013 	      else
7014 		oldinsz = oldoutsz = 0;
7015 
7016 	      if (MAY_HAVE_DEBUG_INSNS)
7017 		{
7018 		  dataflow_set *in = &VTI (bb)->in, *first_out = NULL;
7019 		  bool first = true, adjust = false;
7020 
7021 		  /* Calculate the IN set as the intersection of
7022 		     predecessor OUT sets.  */
7023 
7024 		  dataflow_set_clear (in);
7025 		  dst_can_be_shared = true;
7026 
7027 		  FOR_EACH_EDGE (e, ei, bb->preds)
7028 		    if (!VTI (e->src)->flooded)
7029 		      gcc_assert (bb_order[bb->index]
7030 				  <= bb_order[e->src->index]);
7031 		    else if (first)
7032 		      {
7033 			dataflow_set_copy (in, &VTI (e->src)->out);
7034 			first_out = &VTI (e->src)->out;
7035 			first = false;
7036 		      }
7037 		    else
7038 		      {
7039 			dataflow_set_merge (in, &VTI (e->src)->out);
7040 			adjust = true;
7041 		      }
7042 
7043 		  if (adjust)
7044 		    {
7045 		      dataflow_post_merge_adjust (in, &VTI (bb)->permp);
7046 #if ENABLE_CHECKING
7047 		      /* Merge and merge_adjust should keep entries in
7048 			 canonical order.  */
7049 		      shared_hash_htab (in->vars)
7050 			.traverse <dataflow_set *,
7051 				   canonicalize_loc_order_check> (in);
7052 #endif
7053 		      if (dst_can_be_shared)
7054 			{
7055 			  shared_hash_destroy (in->vars);
7056 			  in->vars = shared_hash_copy (first_out->vars);
7057 			}
7058 		    }
7059 
7060 		  VTI (bb)->flooded = true;
7061 		}
7062 	      else
7063 		{
7064 		  /* Calculate the IN set as union of predecessor OUT sets.  */
7065 		  dataflow_set_clear (&VTI (bb)->in);
7066 		  FOR_EACH_EDGE (e, ei, bb->preds)
7067 		    dataflow_set_union (&VTI (bb)->in, &VTI (e->src)->out);
7068 		}
7069 
7070 	      changed = compute_bb_dataflow (bb);
7071 	      htabsz += shared_hash_htab (VTI (bb)->in.vars).size ()
7072 			 + shared_hash_htab (VTI (bb)->out.vars).size ();
7073 
7074 	      if (htabmax && htabsz > htabmax)
7075 		{
7076 		  if (MAY_HAVE_DEBUG_INSNS)
7077 		    inform (DECL_SOURCE_LOCATION (cfun->decl),
7078 			    "variable tracking size limit exceeded with "
7079 			    "-fvar-tracking-assignments, retrying without");
7080 		  else
7081 		    inform (DECL_SOURCE_LOCATION (cfun->decl),
7082 			    "variable tracking size limit exceeded");
7083 		  success = false;
7084 		  break;
7085 		}
7086 
7087 	      if (changed)
7088 		{
7089 		  FOR_EACH_EDGE (e, ei, bb->succs)
7090 		    {
7091 		      if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
7092 			continue;
7093 
7094 		      if (bitmap_bit_p (visited, e->dest->index))
7095 			{
7096 			  if (!bitmap_bit_p (in_pending, e->dest->index))
7097 			    {
7098 			      /* Send E->DEST to next round.  */
7099 			      bitmap_set_bit (in_pending, e->dest->index);
7100 			      fibheap_insert (pending,
7101 					      bb_order[e->dest->index],
7102 					      e->dest);
7103 			    }
7104 			}
7105 		      else if (!bitmap_bit_p (in_worklist, e->dest->index))
7106 			{
7107 			  /* Add E->DEST to current round.  */
7108 			  bitmap_set_bit (in_worklist, e->dest->index);
7109 			  fibheap_insert (worklist, bb_order[e->dest->index],
7110 					  e->dest);
7111 			}
7112 		    }
7113 		}
7114 
7115 	      if (dump_file)
7116 		fprintf (dump_file,
7117 			 "BB %i: in %i (was %i), out %i (was %i), rem %i + %i, tsz %i\n",
7118 			 bb->index,
7119 			 (int)shared_hash_htab (VTI (bb)->in.vars).size (),
7120 			 oldinsz,
7121 			 (int)shared_hash_htab (VTI (bb)->out.vars).size (),
7122 			 oldoutsz,
7123 			 (int)worklist->nodes, (int)pending->nodes, htabsz);
7124 
7125 	      if (dump_file && (dump_flags & TDF_DETAILS))
7126 		{
7127 		  fprintf (dump_file, "BB %i IN:\n", bb->index);
7128 		  dump_dataflow_set (&VTI (bb)->in);
7129 		  fprintf (dump_file, "BB %i OUT:\n", bb->index);
7130 		  dump_dataflow_set (&VTI (bb)->out);
7131 		}
7132 	    }
7133 	}
7134     }
7135 
7136   if (success && MAY_HAVE_DEBUG_INSNS)
7137     FOR_EACH_BB_FN (bb, cfun)
7138       gcc_assert (VTI (bb)->flooded);
7139 
7140   free (bb_order);
7141   fibheap_delete (worklist);
7142   fibheap_delete (pending);
7143   sbitmap_free (visited);
7144   sbitmap_free (in_worklist);
7145   sbitmap_free (in_pending);
7146 
7147   timevar_pop (TV_VAR_TRACKING_DATAFLOW);
7148   return success;
7149 }
7150 
7151 /* Print the content of the LIST to dump file.  */
7152 
7153 static void
dump_attrs_list(attrs list)7154 dump_attrs_list (attrs list)
7155 {
7156   for (; list; list = list->next)
7157     {
7158       if (dv_is_decl_p (list->dv))
7159 	print_mem_expr (dump_file, dv_as_decl (list->dv));
7160       else
7161 	print_rtl_single (dump_file, dv_as_value (list->dv));
7162       fprintf (dump_file, "+" HOST_WIDE_INT_PRINT_DEC, list->offset);
7163     }
7164   fprintf (dump_file, "\n");
7165 }
7166 
7167 /* Print the information about variable *SLOT to dump file.  */
7168 
7169 int
dump_var_tracking_slot(variable_def ** slot,void * data ATTRIBUTE_UNUSED)7170 dump_var_tracking_slot (variable_def **slot, void *data ATTRIBUTE_UNUSED)
7171 {
7172   variable var = *slot;
7173 
7174   dump_var (var);
7175 
7176   /* Continue traversing the hash table.  */
7177   return 1;
7178 }
7179 
7180 /* Print the information about variable VAR to dump file.  */
7181 
7182 static void
dump_var(variable var)7183 dump_var (variable var)
7184 {
7185   int i;
7186   location_chain node;
7187 
7188   if (dv_is_decl_p (var->dv))
7189     {
7190       const_tree decl = dv_as_decl (var->dv);
7191 
7192       if (DECL_NAME (decl))
7193 	{
7194 	  fprintf (dump_file, "  name: %s",
7195 		   IDENTIFIER_POINTER (DECL_NAME (decl)));
7196 	  if (dump_flags & TDF_UID)
7197 	    fprintf (dump_file, "D.%u", DECL_UID (decl));
7198 	}
7199       else if (TREE_CODE (decl) == DEBUG_EXPR_DECL)
7200 	fprintf (dump_file, "  name: D#%u", DEBUG_TEMP_UID (decl));
7201       else
7202 	fprintf (dump_file, "  name: D.%u", DECL_UID (decl));
7203       fprintf (dump_file, "\n");
7204     }
7205   else
7206     {
7207       fputc (' ', dump_file);
7208       print_rtl_single (dump_file, dv_as_value (var->dv));
7209     }
7210 
7211   for (i = 0; i < var->n_var_parts; i++)
7212     {
7213       fprintf (dump_file, "    offset %ld\n",
7214 	       (long)(var->onepart ? 0 : VAR_PART_OFFSET (var, i)));
7215       for (node = var->var_part[i].loc_chain; node; node = node->next)
7216 	{
7217 	  fprintf (dump_file, "      ");
7218 	  if (node->init == VAR_INIT_STATUS_UNINITIALIZED)
7219 	    fprintf (dump_file, "[uninit]");
7220 	  print_rtl_single (dump_file, node->loc);
7221 	}
7222     }
7223 }
7224 
7225 /* Print the information about variables from hash table VARS to dump file.  */
7226 
7227 static void
dump_vars(variable_table_type vars)7228 dump_vars (variable_table_type vars)
7229 {
7230   if (vars.elements () > 0)
7231     {
7232       fprintf (dump_file, "Variables:\n");
7233       vars.traverse <void *, dump_var_tracking_slot> (NULL);
7234     }
7235 }
7236 
7237 /* Print the dataflow set SET to dump file.  */
7238 
7239 static void
dump_dataflow_set(dataflow_set * set)7240 dump_dataflow_set (dataflow_set *set)
7241 {
7242   int i;
7243 
7244   fprintf (dump_file, "Stack adjustment: " HOST_WIDE_INT_PRINT_DEC "\n",
7245 	   set->stack_adjust);
7246   for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
7247     {
7248       if (set->regs[i])
7249 	{
7250 	  fprintf (dump_file, "Reg %d:", i);
7251 	  dump_attrs_list (set->regs[i]);
7252 	}
7253     }
7254   dump_vars (shared_hash_htab (set->vars));
7255   fprintf (dump_file, "\n");
7256 }
7257 
7258 /* Print the IN and OUT sets for each basic block to dump file.  */
7259 
7260 static void
dump_dataflow_sets(void)7261 dump_dataflow_sets (void)
7262 {
7263   basic_block bb;
7264 
7265   FOR_EACH_BB_FN (bb, cfun)
7266     {
7267       fprintf (dump_file, "\nBasic block %d:\n", bb->index);
7268       fprintf (dump_file, "IN:\n");
7269       dump_dataflow_set (&VTI (bb)->in);
7270       fprintf (dump_file, "OUT:\n");
7271       dump_dataflow_set (&VTI (bb)->out);
7272     }
7273 }
7274 
7275 /* Return the variable for DV in dropped_values, inserting one if
7276    requested with INSERT.  */
7277 
7278 static inline variable
variable_from_dropped(decl_or_value dv,enum insert_option insert)7279 variable_from_dropped (decl_or_value dv, enum insert_option insert)
7280 {
7281   variable_def **slot;
7282   variable empty_var;
7283   onepart_enum_t onepart;
7284 
7285   slot = dropped_values.find_slot_with_hash (dv, dv_htab_hash (dv), insert);
7286 
7287   if (!slot)
7288     return NULL;
7289 
7290   if (*slot)
7291     return *slot;
7292 
7293   gcc_checking_assert (insert == INSERT);
7294 
7295   onepart = dv_onepart_p (dv);
7296 
7297   gcc_checking_assert (onepart == ONEPART_VALUE || onepart == ONEPART_DEXPR);
7298 
7299   empty_var = (variable) pool_alloc (onepart_pool (onepart));
7300   empty_var->dv = dv;
7301   empty_var->refcount = 1;
7302   empty_var->n_var_parts = 0;
7303   empty_var->onepart = onepart;
7304   empty_var->in_changed_variables = false;
7305   empty_var->var_part[0].loc_chain = NULL;
7306   empty_var->var_part[0].cur_loc = NULL;
7307   VAR_LOC_1PAUX (empty_var) = NULL;
7308   set_dv_changed (dv, true);
7309 
7310   *slot = empty_var;
7311 
7312   return empty_var;
7313 }
7314 
7315 /* Recover the one-part aux from dropped_values.  */
7316 
7317 static struct onepart_aux *
recover_dropped_1paux(variable var)7318 recover_dropped_1paux (variable var)
7319 {
7320   variable dvar;
7321 
7322   gcc_checking_assert (var->onepart);
7323 
7324   if (VAR_LOC_1PAUX (var))
7325     return VAR_LOC_1PAUX (var);
7326 
7327   if (var->onepart == ONEPART_VDECL)
7328     return NULL;
7329 
7330   dvar = variable_from_dropped (var->dv, NO_INSERT);
7331 
7332   if (!dvar)
7333     return NULL;
7334 
7335   VAR_LOC_1PAUX (var) = VAR_LOC_1PAUX (dvar);
7336   VAR_LOC_1PAUX (dvar) = NULL;
7337 
7338   return VAR_LOC_1PAUX (var);
7339 }
7340 
7341 /* Add variable VAR to the hash table of changed variables and
7342    if it has no locations delete it from SET's hash table.  */
7343 
7344 static void
variable_was_changed(variable var,dataflow_set * set)7345 variable_was_changed (variable var, dataflow_set *set)
7346 {
7347   hashval_t hash = dv_htab_hash (var->dv);
7348 
7349   if (emit_notes)
7350     {
7351       variable_def **slot;
7352 
7353       /* Remember this decl or VALUE has been added to changed_variables.  */
7354       set_dv_changed (var->dv, true);
7355 
7356       slot = changed_variables.find_slot_with_hash (var->dv, hash, INSERT);
7357 
7358       if (*slot)
7359 	{
7360 	  variable old_var = *slot;
7361 	  gcc_assert (old_var->in_changed_variables);
7362 	  old_var->in_changed_variables = false;
7363 	  if (var != old_var && var->onepart)
7364 	    {
7365 	      /* Restore the auxiliary info from an empty variable
7366 		 previously created for changed_variables, so it is
7367 		 not lost.  */
7368 	      gcc_checking_assert (!VAR_LOC_1PAUX (var));
7369 	      VAR_LOC_1PAUX (var) = VAR_LOC_1PAUX (old_var);
7370 	      VAR_LOC_1PAUX (old_var) = NULL;
7371 	    }
7372 	  variable_htab_free (*slot);
7373 	}
7374 
7375       if (set && var->n_var_parts == 0)
7376 	{
7377 	  onepart_enum_t onepart = var->onepart;
7378 	  variable empty_var = NULL;
7379 	  variable_def **dslot = NULL;
7380 
7381 	  if (onepart == ONEPART_VALUE || onepart == ONEPART_DEXPR)
7382 	    {
7383 	      dslot = dropped_values.find_slot_with_hash (var->dv,
7384 						dv_htab_hash (var->dv),
7385 						INSERT);
7386 	      empty_var = *dslot;
7387 
7388 	      if (empty_var)
7389 		{
7390 		  gcc_checking_assert (!empty_var->in_changed_variables);
7391 		  if (!VAR_LOC_1PAUX (var))
7392 		    {
7393 		      VAR_LOC_1PAUX (var) = VAR_LOC_1PAUX (empty_var);
7394 		      VAR_LOC_1PAUX (empty_var) = NULL;
7395 		    }
7396 		  else
7397 		    gcc_checking_assert (!VAR_LOC_1PAUX (empty_var));
7398 		}
7399 	    }
7400 
7401 	  if (!empty_var)
7402 	    {
7403 	      empty_var = (variable) pool_alloc (onepart_pool (onepart));
7404 	      empty_var->dv = var->dv;
7405 	      empty_var->refcount = 1;
7406 	      empty_var->n_var_parts = 0;
7407 	      empty_var->onepart = onepart;
7408 	      if (dslot)
7409 		{
7410 		  empty_var->refcount++;
7411 		  *dslot = empty_var;
7412 		}
7413 	    }
7414 	  else
7415 	    empty_var->refcount++;
7416 	  empty_var->in_changed_variables = true;
7417 	  *slot = empty_var;
7418 	  if (onepart)
7419 	    {
7420 	      empty_var->var_part[0].loc_chain = NULL;
7421 	      empty_var->var_part[0].cur_loc = NULL;
7422 	      VAR_LOC_1PAUX (empty_var) = VAR_LOC_1PAUX (var);
7423 	      VAR_LOC_1PAUX (var) = NULL;
7424 	    }
7425 	  goto drop_var;
7426 	}
7427       else
7428 	{
7429 	  if (var->onepart && !VAR_LOC_1PAUX (var))
7430 	    recover_dropped_1paux (var);
7431 	  var->refcount++;
7432 	  var->in_changed_variables = true;
7433 	  *slot = var;
7434 	}
7435     }
7436   else
7437     {
7438       gcc_assert (set);
7439       if (var->n_var_parts == 0)
7440 	{
7441 	  variable_def **slot;
7442 
7443 	drop_var:
7444 	  slot = shared_hash_find_slot_noinsert (set->vars, var->dv);
7445 	  if (slot)
7446 	    {
7447 	      if (shared_hash_shared (set->vars))
7448 		slot = shared_hash_find_slot_unshare (&set->vars, var->dv,
7449 						      NO_INSERT);
7450 	      shared_hash_htab (set->vars).clear_slot (slot);
7451 	    }
7452 	}
7453     }
7454 }
7455 
7456 /* Look for the index in VAR->var_part corresponding to OFFSET.
7457    Return -1 if not found.  If INSERTION_POINT is non-NULL, the
7458    referenced int will be set to the index that the part has or should
7459    have, if it should be inserted.  */
7460 
7461 static inline int
find_variable_location_part(variable var,HOST_WIDE_INT offset,int * insertion_point)7462 find_variable_location_part (variable var, HOST_WIDE_INT offset,
7463 			     int *insertion_point)
7464 {
7465   int pos, low, high;
7466 
7467   if (var->onepart)
7468     {
7469       if (offset != 0)
7470 	return -1;
7471 
7472       if (insertion_point)
7473 	*insertion_point = 0;
7474 
7475       return var->n_var_parts - 1;
7476     }
7477 
7478   /* Find the location part.  */
7479   low = 0;
7480   high = var->n_var_parts;
7481   while (low != high)
7482     {
7483       pos = (low + high) / 2;
7484       if (VAR_PART_OFFSET (var, pos) < offset)
7485 	low = pos + 1;
7486       else
7487 	high = pos;
7488     }
7489   pos = low;
7490 
7491   if (insertion_point)
7492     *insertion_point = pos;
7493 
7494   if (pos < var->n_var_parts && VAR_PART_OFFSET (var, pos) == offset)
7495     return pos;
7496 
7497   return -1;
7498 }
7499 
7500 static variable_def **
set_slot_part(dataflow_set * set,rtx loc,variable_def ** slot,decl_or_value dv,HOST_WIDE_INT offset,enum var_init_status initialized,rtx set_src)7501 set_slot_part (dataflow_set *set, rtx loc, variable_def **slot,
7502 	       decl_or_value dv, HOST_WIDE_INT offset,
7503 	       enum var_init_status initialized, rtx set_src)
7504 {
7505   int pos;
7506   location_chain node, next;
7507   location_chain *nextp;
7508   variable var;
7509   onepart_enum_t onepart;
7510 
7511   var = *slot;
7512 
7513   if (var)
7514     onepart = var->onepart;
7515   else
7516     onepart = dv_onepart_p (dv);
7517 
7518   gcc_checking_assert (offset == 0 || !onepart);
7519   gcc_checking_assert (loc != dv_as_opaque (dv));
7520 
7521   if (! flag_var_tracking_uninit)
7522     initialized = VAR_INIT_STATUS_INITIALIZED;
7523 
7524   if (!var)
7525     {
7526       /* Create new variable information.  */
7527       var = (variable) pool_alloc (onepart_pool (onepart));
7528       var->dv = dv;
7529       var->refcount = 1;
7530       var->n_var_parts = 1;
7531       var->onepart = onepart;
7532       var->in_changed_variables = false;
7533       if (var->onepart)
7534 	VAR_LOC_1PAUX (var) = NULL;
7535       else
7536 	VAR_PART_OFFSET (var, 0) = offset;
7537       var->var_part[0].loc_chain = NULL;
7538       var->var_part[0].cur_loc = NULL;
7539       *slot = var;
7540       pos = 0;
7541       nextp = &var->var_part[0].loc_chain;
7542     }
7543   else if (onepart)
7544     {
7545       int r = -1, c = 0;
7546 
7547       gcc_assert (dv_as_opaque (var->dv) == dv_as_opaque (dv));
7548 
7549       pos = 0;
7550 
7551       if (GET_CODE (loc) == VALUE)
7552 	{
7553 	  for (nextp = &var->var_part[0].loc_chain; (node = *nextp);
7554 	       nextp = &node->next)
7555 	    if (GET_CODE (node->loc) == VALUE)
7556 	      {
7557 		if (node->loc == loc)
7558 		  {
7559 		    r = 0;
7560 		    break;
7561 		  }
7562 		if (canon_value_cmp (node->loc, loc))
7563 		  c++;
7564 		else
7565 		  {
7566 		    r = 1;
7567 		    break;
7568 		  }
7569 	      }
7570 	    else if (REG_P (node->loc) || MEM_P (node->loc))
7571 	      c++;
7572 	    else
7573 	      {
7574 		r = 1;
7575 		break;
7576 	      }
7577 	}
7578       else if (REG_P (loc))
7579 	{
7580 	  for (nextp = &var->var_part[0].loc_chain; (node = *nextp);
7581 	       nextp = &node->next)
7582 	    if (REG_P (node->loc))
7583 	      {
7584 		if (REGNO (node->loc) < REGNO (loc))
7585 		  c++;
7586 		else
7587 		  {
7588 		    if (REGNO (node->loc) == REGNO (loc))
7589 		      r = 0;
7590 		    else
7591 		      r = 1;
7592 		    break;
7593 		  }
7594 	      }
7595 	    else
7596 	      {
7597 		r = 1;
7598 		break;
7599 	      }
7600 	}
7601       else if (MEM_P (loc))
7602 	{
7603 	  for (nextp = &var->var_part[0].loc_chain; (node = *nextp);
7604 	       nextp = &node->next)
7605 	    if (REG_P (node->loc))
7606 	      c++;
7607 	    else if (MEM_P (node->loc))
7608 	      {
7609 		if ((r = loc_cmp (XEXP (node->loc, 0), XEXP (loc, 0))) >= 0)
7610 		  break;
7611 		else
7612 		  c++;
7613 	      }
7614 	    else
7615 	      {
7616 		r = 1;
7617 		break;
7618 	      }
7619 	}
7620       else
7621 	for (nextp = &var->var_part[0].loc_chain; (node = *nextp);
7622 	     nextp = &node->next)
7623 	  if ((r = loc_cmp (node->loc, loc)) >= 0)
7624 	    break;
7625 	  else
7626 	    c++;
7627 
7628       if (r == 0)
7629 	return slot;
7630 
7631       if (shared_var_p (var, set->vars))
7632 	{
7633 	  slot = unshare_variable (set, slot, var, initialized);
7634 	  var = *slot;
7635 	  for (nextp = &var->var_part[0].loc_chain; c;
7636 	       nextp = &(*nextp)->next)
7637 	    c--;
7638 	  gcc_assert ((!node && !*nextp) || node->loc == (*nextp)->loc);
7639 	}
7640     }
7641   else
7642     {
7643       int inspos = 0;
7644 
7645       gcc_assert (dv_as_decl (var->dv) == dv_as_decl (dv));
7646 
7647       pos = find_variable_location_part (var, offset, &inspos);
7648 
7649       if (pos >= 0)
7650 	{
7651 	  node = var->var_part[pos].loc_chain;
7652 
7653 	  if (node
7654 	      && ((REG_P (node->loc) && REG_P (loc)
7655 		   && REGNO (node->loc) == REGNO (loc))
7656 		  || rtx_equal_p (node->loc, loc)))
7657 	    {
7658 	      /* LOC is in the beginning of the chain so we have nothing
7659 		 to do.  */
7660 	      if (node->init < initialized)
7661 		node->init = initialized;
7662 	      if (set_src != NULL)
7663 		node->set_src = set_src;
7664 
7665 	      return slot;
7666 	    }
7667 	  else
7668 	    {
7669 	      /* We have to make a copy of a shared variable.  */
7670 	      if (shared_var_p (var, set->vars))
7671 		{
7672 		  slot = unshare_variable (set, slot, var, initialized);
7673 		  var = *slot;
7674 		}
7675 	    }
7676 	}
7677       else
7678 	{
7679 	  /* We have not found the location part, new one will be created.  */
7680 
7681 	  /* We have to make a copy of the shared variable.  */
7682 	  if (shared_var_p (var, set->vars))
7683 	    {
7684 	      slot = unshare_variable (set, slot, var, initialized);
7685 	      var = *slot;
7686 	    }
7687 
7688 	  /* We track only variables whose size is <= MAX_VAR_PARTS bytes
7689 	     thus there are at most MAX_VAR_PARTS different offsets.  */
7690 	  gcc_assert (var->n_var_parts < MAX_VAR_PARTS
7691 		      && (!var->n_var_parts || !onepart));
7692 
7693 	  /* We have to move the elements of array starting at index
7694 	     inspos to the next position.  */
7695 	  for (pos = var->n_var_parts; pos > inspos; pos--)
7696 	    var->var_part[pos] = var->var_part[pos - 1];
7697 
7698 	  var->n_var_parts++;
7699 	  gcc_checking_assert (!onepart);
7700 	  VAR_PART_OFFSET (var, pos) = offset;
7701 	  var->var_part[pos].loc_chain = NULL;
7702 	  var->var_part[pos].cur_loc = NULL;
7703 	}
7704 
7705       /* Delete the location from the list.  */
7706       nextp = &var->var_part[pos].loc_chain;
7707       for (node = var->var_part[pos].loc_chain; node; node = next)
7708 	{
7709 	  next = node->next;
7710 	  if ((REG_P (node->loc) && REG_P (loc)
7711 	       && REGNO (node->loc) == REGNO (loc))
7712 	      || rtx_equal_p (node->loc, loc))
7713 	    {
7714 	      /* Save these values, to assign to the new node, before
7715 		 deleting this one.  */
7716 	      if (node->init > initialized)
7717 		initialized = node->init;
7718 	      if (node->set_src != NULL && set_src == NULL)
7719 		set_src = node->set_src;
7720 	      if (var->var_part[pos].cur_loc == node->loc)
7721 		var->var_part[pos].cur_loc = NULL;
7722 	      pool_free (loc_chain_pool, node);
7723 	      *nextp = next;
7724 	      break;
7725 	    }
7726 	  else
7727 	    nextp = &node->next;
7728 	}
7729 
7730       nextp = &var->var_part[pos].loc_chain;
7731     }
7732 
7733   /* Add the location to the beginning.  */
7734   node = (location_chain) pool_alloc (loc_chain_pool);
7735   node->loc = loc;
7736   node->init = initialized;
7737   node->set_src = set_src;
7738   node->next = *nextp;
7739   *nextp = node;
7740 
7741   /* If no location was emitted do so.  */
7742   if (var->var_part[pos].cur_loc == NULL)
7743     variable_was_changed (var, set);
7744 
7745   return slot;
7746 }
7747 
7748 /* Set the part of variable's location in the dataflow set SET.  The
7749    variable part is specified by variable's declaration in DV and
7750    offset OFFSET and the part's location by LOC.  IOPT should be
7751    NO_INSERT if the variable is known to be in SET already and the
7752    variable hash table must not be resized, and INSERT otherwise.  */
7753 
7754 static void
set_variable_part(dataflow_set * set,rtx loc,decl_or_value dv,HOST_WIDE_INT offset,enum var_init_status initialized,rtx set_src,enum insert_option iopt)7755 set_variable_part (dataflow_set *set, rtx loc,
7756 		   decl_or_value dv, HOST_WIDE_INT offset,
7757 		   enum var_init_status initialized, rtx set_src,
7758 		   enum insert_option iopt)
7759 {
7760   variable_def **slot;
7761 
7762   if (iopt == NO_INSERT)
7763     slot = shared_hash_find_slot_noinsert (set->vars, dv);
7764   else
7765     {
7766       slot = shared_hash_find_slot (set->vars, dv);
7767       if (!slot)
7768 	slot = shared_hash_find_slot_unshare (&set->vars, dv, iopt);
7769     }
7770   set_slot_part (set, loc, slot, dv, offset, initialized, set_src);
7771 }
7772 
7773 /* Remove all recorded register locations for the given variable part
7774    from dataflow set SET, except for those that are identical to loc.
7775    The variable part is specified by variable's declaration or value
7776    DV and offset OFFSET.  */
7777 
7778 static variable_def **
clobber_slot_part(dataflow_set * set,rtx loc,variable_def ** slot,HOST_WIDE_INT offset,rtx set_src)7779 clobber_slot_part (dataflow_set *set, rtx loc, variable_def **slot,
7780 		   HOST_WIDE_INT offset, rtx set_src)
7781 {
7782   variable var = *slot;
7783   int pos = find_variable_location_part (var, offset, NULL);
7784 
7785   if (pos >= 0)
7786     {
7787       location_chain node, next;
7788 
7789       /* Remove the register locations from the dataflow set.  */
7790       next = var->var_part[pos].loc_chain;
7791       for (node = next; node; node = next)
7792 	{
7793 	  next = node->next;
7794 	  if (node->loc != loc
7795 	      && (!flag_var_tracking_uninit
7796 		  || !set_src
7797 		  || MEM_P (set_src)
7798 		  || !rtx_equal_p (set_src, node->set_src)))
7799 	    {
7800 	      if (REG_P (node->loc))
7801 		{
7802 		  attrs anode, anext;
7803 		  attrs *anextp;
7804 
7805 		  /* Remove the variable part from the register's
7806 		     list, but preserve any other variable parts
7807 		     that might be regarded as live in that same
7808 		     register.  */
7809 		  anextp = &set->regs[REGNO (node->loc)];
7810 		  for (anode = *anextp; anode; anode = anext)
7811 		    {
7812 		      anext = anode->next;
7813 		      if (dv_as_opaque (anode->dv) == dv_as_opaque (var->dv)
7814 			  && anode->offset == offset)
7815 			{
7816 			  pool_free (attrs_pool, anode);
7817 			  *anextp = anext;
7818 			}
7819 		      else
7820 			anextp = &anode->next;
7821 		    }
7822 		}
7823 
7824 	      slot = delete_slot_part (set, node->loc, slot, offset);
7825 	    }
7826 	}
7827     }
7828 
7829   return slot;
7830 }
7831 
7832 /* Remove all recorded register locations for the given variable part
7833    from dataflow set SET, except for those that are identical to loc.
7834    The variable part is specified by variable's declaration or value
7835    DV and offset OFFSET.  */
7836 
7837 static void
clobber_variable_part(dataflow_set * set,rtx loc,decl_or_value dv,HOST_WIDE_INT offset,rtx set_src)7838 clobber_variable_part (dataflow_set *set, rtx loc, decl_or_value dv,
7839 		       HOST_WIDE_INT offset, rtx set_src)
7840 {
7841   variable_def **slot;
7842 
7843   if (!dv_as_opaque (dv)
7844       || (!dv_is_value_p (dv) && ! DECL_P (dv_as_decl (dv))))
7845     return;
7846 
7847   slot = shared_hash_find_slot_noinsert (set->vars, dv);
7848   if (!slot)
7849     return;
7850 
7851   clobber_slot_part (set, loc, slot, offset, set_src);
7852 }
7853 
7854 /* Delete the part of variable's location from dataflow set SET.  The
7855    variable part is specified by its SET->vars slot SLOT and offset
7856    OFFSET and the part's location by LOC.  */
7857 
7858 static variable_def **
delete_slot_part(dataflow_set * set,rtx loc,variable_def ** slot,HOST_WIDE_INT offset)7859 delete_slot_part (dataflow_set *set, rtx loc, variable_def **slot,
7860 		  HOST_WIDE_INT offset)
7861 {
7862   variable var = *slot;
7863   int pos = find_variable_location_part (var, offset, NULL);
7864 
7865   if (pos >= 0)
7866     {
7867       location_chain node, next;
7868       location_chain *nextp;
7869       bool changed;
7870       rtx cur_loc;
7871 
7872       if (shared_var_p (var, set->vars))
7873 	{
7874 	  /* If the variable contains the location part we have to
7875 	     make a copy of the variable.  */
7876 	  for (node = var->var_part[pos].loc_chain; node;
7877 	       node = node->next)
7878 	    {
7879 	      if ((REG_P (node->loc) && REG_P (loc)
7880 		   && REGNO (node->loc) == REGNO (loc))
7881 		  || rtx_equal_p (node->loc, loc))
7882 		{
7883 		  slot = unshare_variable (set, slot, var,
7884 					   VAR_INIT_STATUS_UNKNOWN);
7885 		  var = *slot;
7886 		  break;
7887 		}
7888 	    }
7889 	}
7890 
7891       if (pos == 0 && var->onepart && VAR_LOC_1PAUX (var))
7892 	cur_loc = VAR_LOC_FROM (var);
7893       else
7894 	cur_loc = var->var_part[pos].cur_loc;
7895 
7896       /* Delete the location part.  */
7897       changed = false;
7898       nextp = &var->var_part[pos].loc_chain;
7899       for (node = *nextp; node; node = next)
7900 	{
7901 	  next = node->next;
7902 	  if ((REG_P (node->loc) && REG_P (loc)
7903 	       && REGNO (node->loc) == REGNO (loc))
7904 	      || rtx_equal_p (node->loc, loc))
7905 	    {
7906 	      /* If we have deleted the location which was last emitted
7907 		 we have to emit new location so add the variable to set
7908 		 of changed variables.  */
7909 	      if (cur_loc == node->loc)
7910 		{
7911 		  changed = true;
7912 		  var->var_part[pos].cur_loc = NULL;
7913 		  if (pos == 0 && var->onepart && VAR_LOC_1PAUX (var))
7914 		    VAR_LOC_FROM (var) = NULL;
7915 		}
7916 	      pool_free (loc_chain_pool, node);
7917 	      *nextp = next;
7918 	      break;
7919 	    }
7920 	  else
7921 	    nextp = &node->next;
7922 	}
7923 
7924       if (var->var_part[pos].loc_chain == NULL)
7925 	{
7926 	  changed = true;
7927 	  var->n_var_parts--;
7928 	  while (pos < var->n_var_parts)
7929 	    {
7930 	      var->var_part[pos] = var->var_part[pos + 1];
7931 	      pos++;
7932 	    }
7933 	}
7934       if (changed)
7935 	variable_was_changed (var, set);
7936     }
7937 
7938   return slot;
7939 }
7940 
7941 /* Delete the part of variable's location from dataflow set SET.  The
7942    variable part is specified by variable's declaration or value DV
7943    and offset OFFSET and the part's location by LOC.  */
7944 
7945 static void
delete_variable_part(dataflow_set * set,rtx loc,decl_or_value dv,HOST_WIDE_INT offset)7946 delete_variable_part (dataflow_set *set, rtx loc, decl_or_value dv,
7947 		      HOST_WIDE_INT offset)
7948 {
7949   variable_def **slot = shared_hash_find_slot_noinsert (set->vars, dv);
7950   if (!slot)
7951     return;
7952 
7953   delete_slot_part (set, loc, slot, offset);
7954 }
7955 
7956 
7957 /* Structure for passing some other parameters to function
7958    vt_expand_loc_callback.  */
7959 struct expand_loc_callback_data
7960 {
7961   /* The variables and values active at this point.  */
7962   variable_table_type vars;
7963 
7964   /* Stack of values and debug_exprs under expansion, and their
7965      children.  */
7966   auto_vec<rtx, 4> expanding;
7967 
7968   /* Stack of values and debug_exprs whose expansion hit recursion
7969      cycles.  They will have VALUE_RECURSED_INTO marked when added to
7970      this list.  This flag will be cleared if any of its dependencies
7971      resolves to a valid location.  So, if the flag remains set at the
7972      end of the search, we know no valid location for this one can
7973      possibly exist.  */
7974   auto_vec<rtx, 4> pending;
7975 
7976   /* The maximum depth among the sub-expressions under expansion.
7977      Zero indicates no expansion so far.  */
7978   expand_depth depth;
7979 };
7980 
7981 /* Allocate the one-part auxiliary data structure for VAR, with enough
7982    room for COUNT dependencies.  */
7983 
7984 static void
loc_exp_dep_alloc(variable var,int count)7985 loc_exp_dep_alloc (variable var, int count)
7986 {
7987   size_t allocsize;
7988 
7989   gcc_checking_assert (var->onepart);
7990 
7991   /* We can be called with COUNT == 0 to allocate the data structure
7992      without any dependencies, e.g. for the backlinks only.  However,
7993      if we are specifying a COUNT, then the dependency list must have
7994      been emptied before.  It would be possible to adjust pointers or
7995      force it empty here, but this is better done at an earlier point
7996      in the algorithm, so we instead leave an assertion to catch
7997      errors.  */
7998   gcc_checking_assert (!count
7999 		       || VAR_LOC_DEP_VEC (var) == NULL
8000 		       || VAR_LOC_DEP_VEC (var)->is_empty ());
8001 
8002   if (VAR_LOC_1PAUX (var) && VAR_LOC_DEP_VEC (var)->space (count))
8003     return;
8004 
8005   allocsize = offsetof (struct onepart_aux, deps)
8006 	      + vec<loc_exp_dep, va_heap, vl_embed>::embedded_size (count);
8007 
8008   if (VAR_LOC_1PAUX (var))
8009     {
8010       VAR_LOC_1PAUX (var) = XRESIZEVAR (struct onepart_aux,
8011 					VAR_LOC_1PAUX (var), allocsize);
8012       /* If the reallocation moves the onepaux structure, the
8013 	 back-pointer to BACKLINKS in the first list member will still
8014 	 point to its old location.  Adjust it.  */
8015       if (VAR_LOC_DEP_LST (var))
8016 	VAR_LOC_DEP_LST (var)->pprev = VAR_LOC_DEP_LSTP (var);
8017     }
8018   else
8019     {
8020       VAR_LOC_1PAUX (var) = XNEWVAR (struct onepart_aux, allocsize);
8021       *VAR_LOC_DEP_LSTP (var) = NULL;
8022       VAR_LOC_FROM (var) = NULL;
8023       VAR_LOC_DEPTH (var).complexity = 0;
8024       VAR_LOC_DEPTH (var).entryvals = 0;
8025     }
8026   VAR_LOC_DEP_VEC (var)->embedded_init (count);
8027 }
8028 
8029 /* Remove all entries from the vector of active dependencies of VAR,
8030    removing them from the back-links lists too.  */
8031 
8032 static void
loc_exp_dep_clear(variable var)8033 loc_exp_dep_clear (variable var)
8034 {
8035   while (VAR_LOC_DEP_VEC (var) && !VAR_LOC_DEP_VEC (var)->is_empty ())
8036     {
8037       loc_exp_dep *led = &VAR_LOC_DEP_VEC (var)->last ();
8038       if (led->next)
8039 	led->next->pprev = led->pprev;
8040       if (led->pprev)
8041 	*led->pprev = led->next;
8042       VAR_LOC_DEP_VEC (var)->pop ();
8043     }
8044 }
8045 
8046 /* Insert an active dependency from VAR on X to the vector of
8047    dependencies, and add the corresponding back-link to X's list of
8048    back-links in VARS.  */
8049 
8050 static void
loc_exp_insert_dep(variable var,rtx x,variable_table_type vars)8051 loc_exp_insert_dep (variable var, rtx x, variable_table_type vars)
8052 {
8053   decl_or_value dv;
8054   variable xvar;
8055   loc_exp_dep *led;
8056 
8057   dv = dv_from_rtx (x);
8058 
8059   /* ??? Build a vector of variables parallel to EXPANDING, to avoid
8060      an additional look up?  */
8061   xvar = vars.find_with_hash (dv, dv_htab_hash (dv));
8062 
8063   if (!xvar)
8064     {
8065       xvar = variable_from_dropped (dv, NO_INSERT);
8066       gcc_checking_assert (xvar);
8067     }
8068 
8069   /* No point in adding the same backlink more than once.  This may
8070      arise if say the same value appears in two complex expressions in
8071      the same loc_list, or even more than once in a single
8072      expression.  */
8073   if (VAR_LOC_DEP_LST (xvar) && VAR_LOC_DEP_LST (xvar)->dv == var->dv)
8074     return;
8075 
8076   if (var->onepart == NOT_ONEPART)
8077     led = (loc_exp_dep *) pool_alloc (loc_exp_dep_pool);
8078   else
8079     {
8080       loc_exp_dep empty;
8081       memset (&empty, 0, sizeof (empty));
8082       VAR_LOC_DEP_VEC (var)->quick_push (empty);
8083       led = &VAR_LOC_DEP_VEC (var)->last ();
8084     }
8085   led->dv = var->dv;
8086   led->value = x;
8087 
8088   loc_exp_dep_alloc (xvar, 0);
8089   led->pprev = VAR_LOC_DEP_LSTP (xvar);
8090   led->next = *led->pprev;
8091   if (led->next)
8092     led->next->pprev = &led->next;
8093   *led->pprev = led;
8094 }
8095 
8096 /* Create active dependencies of VAR on COUNT values starting at
8097    VALUE, and corresponding back-links to the entries in VARS.  Return
8098    true if we found any pending-recursion results.  */
8099 
8100 static bool
loc_exp_dep_set(variable var,rtx result,rtx * value,int count,variable_table_type vars)8101 loc_exp_dep_set (variable var, rtx result, rtx *value, int count,
8102 		 variable_table_type vars)
8103 {
8104   bool pending_recursion = false;
8105 
8106   gcc_checking_assert (VAR_LOC_DEP_VEC (var) == NULL
8107 		       || VAR_LOC_DEP_VEC (var)->is_empty ());
8108 
8109   /* Set up all dependencies from last_child (as set up at the end of
8110      the loop above) to the end.  */
8111   loc_exp_dep_alloc (var, count);
8112 
8113   while (count--)
8114     {
8115       rtx x = *value++;
8116 
8117       if (!pending_recursion)
8118 	pending_recursion = !result && VALUE_RECURSED_INTO (x);
8119 
8120       loc_exp_insert_dep (var, x, vars);
8121     }
8122 
8123   return pending_recursion;
8124 }
8125 
8126 /* Notify the back-links of IVAR that are pending recursion that we
8127    have found a non-NIL value for it, so they are cleared for another
8128    attempt to compute a current location.  */
8129 
8130 static void
notify_dependents_of_resolved_value(variable ivar,variable_table_type vars)8131 notify_dependents_of_resolved_value (variable ivar, variable_table_type vars)
8132 {
8133   loc_exp_dep *led, *next;
8134 
8135   for (led = VAR_LOC_DEP_LST (ivar); led; led = next)
8136     {
8137       decl_or_value dv = led->dv;
8138       variable var;
8139 
8140       next = led->next;
8141 
8142       if (dv_is_value_p (dv))
8143 	{
8144 	  rtx value = dv_as_value (dv);
8145 
8146 	  /* If we have already resolved it, leave it alone.  */
8147 	  if (!VALUE_RECURSED_INTO (value))
8148 	    continue;
8149 
8150 	  /* Check that VALUE_RECURSED_INTO, true from the test above,
8151 	     implies NO_LOC_P.  */
8152 	  gcc_checking_assert (NO_LOC_P (value));
8153 
8154 	  /* We won't notify variables that are being expanded,
8155 	     because their dependency list is cleared before
8156 	     recursing.  */
8157 	  NO_LOC_P (value) = false;
8158 	  VALUE_RECURSED_INTO (value) = false;
8159 
8160 	  gcc_checking_assert (dv_changed_p (dv));
8161 	}
8162       else
8163 	{
8164 	  gcc_checking_assert (dv_onepart_p (dv) != NOT_ONEPART);
8165 	  if (!dv_changed_p (dv))
8166 	    continue;
8167       }
8168 
8169       var = vars.find_with_hash (dv, dv_htab_hash (dv));
8170 
8171       if (!var)
8172 	var = variable_from_dropped (dv, NO_INSERT);
8173 
8174       if (var)
8175 	notify_dependents_of_resolved_value (var, vars);
8176 
8177       if (next)
8178 	next->pprev = led->pprev;
8179       if (led->pprev)
8180 	*led->pprev = next;
8181       led->next = NULL;
8182       led->pprev = NULL;
8183     }
8184 }
8185 
8186 static rtx vt_expand_loc_callback (rtx x, bitmap regs,
8187 				   int max_depth, void *data);
8188 
8189 /* Return the combined depth, when one sub-expression evaluated to
8190    BEST_DEPTH and the previous known depth was SAVED_DEPTH.  */
8191 
8192 static inline expand_depth
update_depth(expand_depth saved_depth,expand_depth best_depth)8193 update_depth (expand_depth saved_depth, expand_depth best_depth)
8194 {
8195   /* If we didn't find anything, stick with what we had.  */
8196   if (!best_depth.complexity)
8197     return saved_depth;
8198 
8199   /* If we found hadn't found anything, use the depth of the current
8200      expression.  Do NOT add one extra level, we want to compute the
8201      maximum depth among sub-expressions.  We'll increment it later,
8202      if appropriate.  */
8203   if (!saved_depth.complexity)
8204     return best_depth;
8205 
8206   /* Combine the entryval count so that regardless of which one we
8207      return, the entryval count is accurate.  */
8208   best_depth.entryvals = saved_depth.entryvals
8209     = best_depth.entryvals + saved_depth.entryvals;
8210 
8211   if (saved_depth.complexity < best_depth.complexity)
8212     return best_depth;
8213   else
8214     return saved_depth;
8215 }
8216 
8217 /* Expand VAR to a location RTX, updating its cur_loc.  Use REGS and
8218    DATA for cselib expand callback.  If PENDRECP is given, indicate in
8219    it whether any sub-expression couldn't be fully evaluated because
8220    it is pending recursion resolution.  */
8221 
8222 static inline rtx
vt_expand_var_loc_chain(variable var,bitmap regs,void * data,bool * pendrecp)8223 vt_expand_var_loc_chain (variable var, bitmap regs, void *data, bool *pendrecp)
8224 {
8225   struct expand_loc_callback_data *elcd
8226     = (struct expand_loc_callback_data *) data;
8227   location_chain loc, next;
8228   rtx result = NULL;
8229   int first_child, result_first_child, last_child;
8230   bool pending_recursion;
8231   rtx loc_from = NULL;
8232   struct elt_loc_list *cloc = NULL;
8233   expand_depth depth = { 0, 0 }, saved_depth = elcd->depth;
8234   int wanted_entryvals, found_entryvals = 0;
8235 
8236   /* Clear all backlinks pointing at this, so that we're not notified
8237      while we're active.  */
8238   loc_exp_dep_clear (var);
8239 
8240  retry:
8241   if (var->onepart == ONEPART_VALUE)
8242     {
8243       cselib_val *val = CSELIB_VAL_PTR (dv_as_value (var->dv));
8244 
8245       gcc_checking_assert (cselib_preserved_value_p (val));
8246 
8247       cloc = val->locs;
8248     }
8249 
8250   first_child = result_first_child = last_child
8251     = elcd->expanding.length ();
8252 
8253   wanted_entryvals = found_entryvals;
8254 
8255   /* Attempt to expand each available location in turn.  */
8256   for (next = loc = var->n_var_parts ? var->var_part[0].loc_chain : NULL;
8257        loc || cloc; loc = next)
8258     {
8259       result_first_child = last_child;
8260 
8261       if (!loc)
8262 	{
8263 	  loc_from = cloc->loc;
8264 	  next = loc;
8265 	  cloc = cloc->next;
8266 	  if (unsuitable_loc (loc_from))
8267 	    continue;
8268 	}
8269       else
8270 	{
8271 	  loc_from = loc->loc;
8272 	  next = loc->next;
8273 	}
8274 
8275       gcc_checking_assert (!unsuitable_loc (loc_from));
8276 
8277       elcd->depth.complexity = elcd->depth.entryvals = 0;
8278       result = cselib_expand_value_rtx_cb (loc_from, regs, EXPR_DEPTH,
8279 					   vt_expand_loc_callback, data);
8280       last_child = elcd->expanding.length ();
8281 
8282       if (result)
8283 	{
8284 	  depth = elcd->depth;
8285 
8286 	  gcc_checking_assert (depth.complexity
8287 			       || result_first_child == last_child);
8288 
8289 	  if (last_child - result_first_child != 1)
8290 	    {
8291 	      if (!depth.complexity && GET_CODE (result) == ENTRY_VALUE)
8292 		depth.entryvals++;
8293 	      depth.complexity++;
8294 	    }
8295 
8296 	  if (depth.complexity <= EXPR_USE_DEPTH)
8297 	    {
8298 	      if (depth.entryvals <= wanted_entryvals)
8299 		break;
8300 	      else if (!found_entryvals || depth.entryvals < found_entryvals)
8301 		found_entryvals = depth.entryvals;
8302 	    }
8303 
8304 	  result = NULL;
8305 	}
8306 
8307       /* Set it up in case we leave the loop.  */
8308       depth.complexity = depth.entryvals = 0;
8309       loc_from = NULL;
8310       result_first_child = first_child;
8311     }
8312 
8313   if (!loc_from && wanted_entryvals < found_entryvals)
8314     {
8315       /* We found entries with ENTRY_VALUEs and skipped them.  Since
8316 	 we could not find any expansions without ENTRY_VALUEs, but we
8317 	 found at least one with them, go back and get an entry with
8318 	 the minimum number ENTRY_VALUE count that we found.  We could
8319 	 avoid looping, but since each sub-loc is already resolved,
8320 	 the re-expansion should be trivial.  ??? Should we record all
8321 	 attempted locs as dependencies, so that we retry the
8322 	 expansion should any of them change, in the hope it can give
8323 	 us a new entry without an ENTRY_VALUE?  */
8324       elcd->expanding.truncate (first_child);
8325       goto retry;
8326     }
8327 
8328   /* Register all encountered dependencies as active.  */
8329   pending_recursion = loc_exp_dep_set
8330     (var, result, elcd->expanding.address () + result_first_child,
8331      last_child - result_first_child, elcd->vars);
8332 
8333   elcd->expanding.truncate (first_child);
8334 
8335   /* Record where the expansion came from.  */
8336   gcc_checking_assert (!result || !pending_recursion);
8337   VAR_LOC_FROM (var) = loc_from;
8338   VAR_LOC_DEPTH (var) = depth;
8339 
8340   gcc_checking_assert (!depth.complexity == !result);
8341 
8342   elcd->depth = update_depth (saved_depth, depth);
8343 
8344   /* Indicate whether any of the dependencies are pending recursion
8345      resolution.  */
8346   if (pendrecp)
8347     *pendrecp = pending_recursion;
8348 
8349   if (!pendrecp || !pending_recursion)
8350     var->var_part[0].cur_loc = result;
8351 
8352   return result;
8353 }
8354 
8355 /* Callback for cselib_expand_value, that looks for expressions
8356    holding the value in the var-tracking hash tables.  Return X for
8357    standard processing, anything else is to be used as-is.  */
8358 
8359 static rtx
vt_expand_loc_callback(rtx x,bitmap regs,int max_depth ATTRIBUTE_UNUSED,void * data)8360 vt_expand_loc_callback (rtx x, bitmap regs,
8361 			int max_depth ATTRIBUTE_UNUSED,
8362 			void *data)
8363 {
8364   struct expand_loc_callback_data *elcd
8365     = (struct expand_loc_callback_data *) data;
8366   decl_or_value dv;
8367   variable var;
8368   rtx result, subreg;
8369   bool pending_recursion = false;
8370   bool from_empty = false;
8371 
8372   switch (GET_CODE (x))
8373     {
8374     case SUBREG:
8375       subreg = cselib_expand_value_rtx_cb (SUBREG_REG (x), regs,
8376 					   EXPR_DEPTH,
8377 					   vt_expand_loc_callback, data);
8378 
8379       if (!subreg)
8380 	return NULL;
8381 
8382       result = simplify_gen_subreg (GET_MODE (x), subreg,
8383 				    GET_MODE (SUBREG_REG (x)),
8384 				    SUBREG_BYTE (x));
8385 
8386       /* Invalid SUBREGs are ok in debug info.  ??? We could try
8387 	 alternate expansions for the VALUE as well.  */
8388       if (!result)
8389 	result = gen_rtx_raw_SUBREG (GET_MODE (x), subreg, SUBREG_BYTE (x));
8390 
8391       return result;
8392 
8393     case DEBUG_EXPR:
8394     case VALUE:
8395       dv = dv_from_rtx (x);
8396       break;
8397 
8398     default:
8399       return x;
8400     }
8401 
8402   elcd->expanding.safe_push (x);
8403 
8404   /* Check that VALUE_RECURSED_INTO implies NO_LOC_P.  */
8405   gcc_checking_assert (!VALUE_RECURSED_INTO (x) || NO_LOC_P (x));
8406 
8407   if (NO_LOC_P (x))
8408     {
8409       gcc_checking_assert (VALUE_RECURSED_INTO (x) || !dv_changed_p (dv));
8410       return NULL;
8411     }
8412 
8413   var = elcd->vars.find_with_hash (dv, dv_htab_hash (dv));
8414 
8415   if (!var)
8416     {
8417       from_empty = true;
8418       var = variable_from_dropped (dv, INSERT);
8419     }
8420 
8421   gcc_checking_assert (var);
8422 
8423   if (!dv_changed_p (dv))
8424     {
8425       gcc_checking_assert (!NO_LOC_P (x));
8426       gcc_checking_assert (var->var_part[0].cur_loc);
8427       gcc_checking_assert (VAR_LOC_1PAUX (var));
8428       gcc_checking_assert (VAR_LOC_1PAUX (var)->depth.complexity);
8429 
8430       elcd->depth = update_depth (elcd->depth, VAR_LOC_1PAUX (var)->depth);
8431 
8432       return var->var_part[0].cur_loc;
8433     }
8434 
8435   VALUE_RECURSED_INTO (x) = true;
8436   /* This is tentative, but it makes some tests simpler.  */
8437   NO_LOC_P (x) = true;
8438 
8439   gcc_checking_assert (var->n_var_parts == 1 || from_empty);
8440 
8441   result = vt_expand_var_loc_chain (var, regs, data, &pending_recursion);
8442 
8443   if (pending_recursion)
8444     {
8445       gcc_checking_assert (!result);
8446       elcd->pending.safe_push (x);
8447     }
8448   else
8449     {
8450       NO_LOC_P (x) = !result;
8451       VALUE_RECURSED_INTO (x) = false;
8452       set_dv_changed (dv, false);
8453 
8454       if (result)
8455 	notify_dependents_of_resolved_value (var, elcd->vars);
8456     }
8457 
8458   return result;
8459 }
8460 
8461 /* While expanding variables, we may encounter recursion cycles
8462    because of mutual (possibly indirect) dependencies between two
8463    particular variables (or values), say A and B.  If we're trying to
8464    expand A when we get to B, which in turn attempts to expand A, if
8465    we can't find any other expansion for B, we'll add B to this
8466    pending-recursion stack, and tentatively return NULL for its
8467    location.  This tentative value will be used for any other
8468    occurrences of B, unless A gets some other location, in which case
8469    it will notify B that it is worth another try at computing a
8470    location for it, and it will use the location computed for A then.
8471    At the end of the expansion, the tentative NULL locations become
8472    final for all members of PENDING that didn't get a notification.
8473    This function performs this finalization of NULL locations.  */
8474 
8475 static void
resolve_expansions_pending_recursion(vec<rtx,va_heap> * pending)8476 resolve_expansions_pending_recursion (vec<rtx, va_heap> *pending)
8477 {
8478   while (!pending->is_empty ())
8479     {
8480       rtx x = pending->pop ();
8481       decl_or_value dv;
8482 
8483       if (!VALUE_RECURSED_INTO (x))
8484 	continue;
8485 
8486       gcc_checking_assert (NO_LOC_P (x));
8487       VALUE_RECURSED_INTO (x) = false;
8488       dv = dv_from_rtx (x);
8489       gcc_checking_assert (dv_changed_p (dv));
8490       set_dv_changed (dv, false);
8491     }
8492 }
8493 
8494 /* Initialize expand_loc_callback_data D with variable hash table V.
8495    It must be a macro because of alloca (vec stack).  */
8496 #define INIT_ELCD(d, v)						\
8497   do								\
8498     {								\
8499       (d).vars = (v);						\
8500       (d).depth.complexity = (d).depth.entryvals = 0;		\
8501     }								\
8502   while (0)
8503 /* Finalize expand_loc_callback_data D, resolved to location L.  */
8504 #define FINI_ELCD(d, l)						\
8505   do								\
8506     {								\
8507       resolve_expansions_pending_recursion (&(d).pending);	\
8508       (d).pending.release ();					\
8509       (d).expanding.release ();					\
8510 								\
8511       if ((l) && MEM_P (l))					\
8512 	(l) = targetm.delegitimize_address (l);			\
8513     }								\
8514   while (0)
8515 
8516 /* Expand VALUEs and DEBUG_EXPRs in LOC to a location, using the
8517    equivalences in VARS, updating their CUR_LOCs in the process.  */
8518 
8519 static rtx
vt_expand_loc(rtx loc,variable_table_type vars)8520 vt_expand_loc (rtx loc, variable_table_type vars)
8521 {
8522   struct expand_loc_callback_data data;
8523   rtx result;
8524 
8525   if (!MAY_HAVE_DEBUG_INSNS)
8526     return loc;
8527 
8528   INIT_ELCD (data, vars);
8529 
8530   result = cselib_expand_value_rtx_cb (loc, scratch_regs, EXPR_DEPTH,
8531 				       vt_expand_loc_callback, &data);
8532 
8533   FINI_ELCD (data, result);
8534 
8535   return result;
8536 }
8537 
8538 /* Expand the one-part VARiable to a location, using the equivalences
8539    in VARS, updating their CUR_LOCs in the process.  */
8540 
8541 static rtx
vt_expand_1pvar(variable var,variable_table_type vars)8542 vt_expand_1pvar (variable var, variable_table_type vars)
8543 {
8544   struct expand_loc_callback_data data;
8545   rtx loc;
8546 
8547   gcc_checking_assert (var->onepart && var->n_var_parts == 1);
8548 
8549   if (!dv_changed_p (var->dv))
8550     return var->var_part[0].cur_loc;
8551 
8552   INIT_ELCD (data, vars);
8553 
8554   loc = vt_expand_var_loc_chain (var, scratch_regs, &data, NULL);
8555 
8556   gcc_checking_assert (data.expanding.is_empty ());
8557 
8558   FINI_ELCD (data, loc);
8559 
8560   return loc;
8561 }
8562 
8563 /* Emit the NOTE_INSN_VAR_LOCATION for variable *VARP.  DATA contains
8564    additional parameters: WHERE specifies whether the note shall be emitted
8565    before or after instruction INSN.  */
8566 
8567 int
emit_note_insn_var_location(variable_def ** varp,emit_note_data * data)8568 emit_note_insn_var_location (variable_def **varp, emit_note_data *data)
8569 {
8570   variable var = *varp;
8571   rtx insn = data->insn;
8572   enum emit_note_where where = data->where;
8573   variable_table_type vars = data->vars;
8574   rtx note, note_vl;
8575   int i, j, n_var_parts;
8576   bool complete;
8577   enum var_init_status initialized = VAR_INIT_STATUS_UNINITIALIZED;
8578   HOST_WIDE_INT last_limit;
8579   tree type_size_unit;
8580   HOST_WIDE_INT offsets[MAX_VAR_PARTS];
8581   rtx loc[MAX_VAR_PARTS];
8582   tree decl;
8583   location_chain lc;
8584 
8585   gcc_checking_assert (var->onepart == NOT_ONEPART
8586 		       || var->onepart == ONEPART_VDECL);
8587 
8588   decl = dv_as_decl (var->dv);
8589 
8590   complete = true;
8591   last_limit = 0;
8592   n_var_parts = 0;
8593   if (!var->onepart)
8594     for (i = 0; i < var->n_var_parts; i++)
8595       if (var->var_part[i].cur_loc == NULL && var->var_part[i].loc_chain)
8596 	var->var_part[i].cur_loc = var->var_part[i].loc_chain->loc;
8597   for (i = 0; i < var->n_var_parts; i++)
8598     {
8599       enum machine_mode mode, wider_mode;
8600       rtx loc2;
8601       HOST_WIDE_INT offset;
8602 
8603       if (i == 0 && var->onepart)
8604 	{
8605 	  gcc_checking_assert (var->n_var_parts == 1);
8606 	  offset = 0;
8607 	  initialized = VAR_INIT_STATUS_INITIALIZED;
8608 	  loc2 = vt_expand_1pvar (var, vars);
8609 	}
8610       else
8611 	{
8612 	  if (last_limit < VAR_PART_OFFSET (var, i))
8613 	    {
8614 	      complete = false;
8615 	      break;
8616 	    }
8617 	  else if (last_limit > VAR_PART_OFFSET (var, i))
8618 	    continue;
8619 	  offset = VAR_PART_OFFSET (var, i);
8620 	  loc2 = var->var_part[i].cur_loc;
8621 	  if (loc2 && GET_CODE (loc2) == MEM
8622 	      && GET_CODE (XEXP (loc2, 0)) == VALUE)
8623 	    {
8624 	      rtx depval = XEXP (loc2, 0);
8625 
8626 	      loc2 = vt_expand_loc (loc2, vars);
8627 
8628 	      if (loc2)
8629 		loc_exp_insert_dep (var, depval, vars);
8630 	    }
8631 	  if (!loc2)
8632 	    {
8633 	      complete = false;
8634 	      continue;
8635 	    }
8636 	  gcc_checking_assert (GET_CODE (loc2) != VALUE);
8637 	  for (lc = var->var_part[i].loc_chain; lc; lc = lc->next)
8638 	    if (var->var_part[i].cur_loc == lc->loc)
8639 	      {
8640 		initialized = lc->init;
8641 		break;
8642 	      }
8643 	  gcc_assert (lc);
8644 	}
8645 
8646       offsets[n_var_parts] = offset;
8647       if (!loc2)
8648 	{
8649 	  complete = false;
8650 	  continue;
8651 	}
8652       loc[n_var_parts] = loc2;
8653       mode = GET_MODE (var->var_part[i].cur_loc);
8654       if (mode == VOIDmode && var->onepart)
8655 	mode = DECL_MODE (decl);
8656       last_limit = offsets[n_var_parts] + GET_MODE_SIZE (mode);
8657 
8658       /* Attempt to merge adjacent registers or memory.  */
8659       wider_mode = GET_MODE_WIDER_MODE (mode);
8660       for (j = i + 1; j < var->n_var_parts; j++)
8661 	if (last_limit <= VAR_PART_OFFSET (var, j))
8662 	  break;
8663       if (j < var->n_var_parts
8664 	  && wider_mode != VOIDmode
8665 	  && var->var_part[j].cur_loc
8666 	  && mode == GET_MODE (var->var_part[j].cur_loc)
8667 	  && (REG_P (loc[n_var_parts]) || MEM_P (loc[n_var_parts]))
8668 	  && last_limit == (var->onepart ? 0 : VAR_PART_OFFSET (var, j))
8669 	  && (loc2 = vt_expand_loc (var->var_part[j].cur_loc, vars))
8670 	  && GET_CODE (loc[n_var_parts]) == GET_CODE (loc2))
8671 	{
8672 	  rtx new_loc = NULL;
8673 
8674 	  if (REG_P (loc[n_var_parts])
8675 	      && hard_regno_nregs[REGNO (loc[n_var_parts])][mode] * 2
8676 		 == hard_regno_nregs[REGNO (loc[n_var_parts])][wider_mode]
8677 	      && end_hard_regno (mode, REGNO (loc[n_var_parts]))
8678 		 == REGNO (loc2))
8679 	    {
8680 	      if (! WORDS_BIG_ENDIAN && ! BYTES_BIG_ENDIAN)
8681 		new_loc = simplify_subreg (wider_mode, loc[n_var_parts],
8682 					   mode, 0);
8683 	      else if (WORDS_BIG_ENDIAN && BYTES_BIG_ENDIAN)
8684 		new_loc = simplify_subreg (wider_mode, loc2, mode, 0);
8685 	      if (new_loc)
8686 		{
8687 		  if (!REG_P (new_loc)
8688 		      || REGNO (new_loc) != REGNO (loc[n_var_parts]))
8689 		    new_loc = NULL;
8690 		  else
8691 		    REG_ATTRS (new_loc) = REG_ATTRS (loc[n_var_parts]);
8692 		}
8693 	    }
8694 	  else if (MEM_P (loc[n_var_parts])
8695 		   && GET_CODE (XEXP (loc2, 0)) == PLUS
8696 		   && REG_P (XEXP (XEXP (loc2, 0), 0))
8697 		   && CONST_INT_P (XEXP (XEXP (loc2, 0), 1)))
8698 	    {
8699 	      if ((REG_P (XEXP (loc[n_var_parts], 0))
8700 		   && rtx_equal_p (XEXP (loc[n_var_parts], 0),
8701 				   XEXP (XEXP (loc2, 0), 0))
8702 		   && INTVAL (XEXP (XEXP (loc2, 0), 1))
8703 		      == GET_MODE_SIZE (mode))
8704 		  || (GET_CODE (XEXP (loc[n_var_parts], 0)) == PLUS
8705 		      && CONST_INT_P (XEXP (XEXP (loc[n_var_parts], 0), 1))
8706 		      && rtx_equal_p (XEXP (XEXP (loc[n_var_parts], 0), 0),
8707 				      XEXP (XEXP (loc2, 0), 0))
8708 		      && INTVAL (XEXP (XEXP (loc[n_var_parts], 0), 1))
8709 			 + GET_MODE_SIZE (mode)
8710 			 == INTVAL (XEXP (XEXP (loc2, 0), 1))))
8711 		new_loc = adjust_address_nv (loc[n_var_parts],
8712 					     wider_mode, 0);
8713 	    }
8714 
8715 	  if (new_loc)
8716 	    {
8717 	      loc[n_var_parts] = new_loc;
8718 	      mode = wider_mode;
8719 	      last_limit = offsets[n_var_parts] + GET_MODE_SIZE (mode);
8720 	      i = j;
8721 	    }
8722 	}
8723       ++n_var_parts;
8724     }
8725   type_size_unit = TYPE_SIZE_UNIT (TREE_TYPE (decl));
8726   if ((unsigned HOST_WIDE_INT) last_limit < TREE_INT_CST_LOW (type_size_unit))
8727     complete = false;
8728 
8729   if (! flag_var_tracking_uninit)
8730     initialized = VAR_INIT_STATUS_INITIALIZED;
8731 
8732   note_vl = NULL_RTX;
8733   if (!complete)
8734     note_vl = gen_rtx_VAR_LOCATION (VOIDmode, decl, NULL_RTX,
8735 				    (int) initialized);
8736   else if (n_var_parts == 1)
8737     {
8738       rtx expr_list;
8739 
8740       if (offsets[0] || GET_CODE (loc[0]) == PARALLEL)
8741 	expr_list = gen_rtx_EXPR_LIST (VOIDmode, loc[0], GEN_INT (offsets[0]));
8742       else
8743 	expr_list = loc[0];
8744 
8745       note_vl = gen_rtx_VAR_LOCATION (VOIDmode, decl, expr_list,
8746 				      (int) initialized);
8747     }
8748   else if (n_var_parts)
8749     {
8750       rtx parallel;
8751 
8752       for (i = 0; i < n_var_parts; i++)
8753 	loc[i]
8754 	  = gen_rtx_EXPR_LIST (VOIDmode, loc[i], GEN_INT (offsets[i]));
8755 
8756       parallel = gen_rtx_PARALLEL (VOIDmode,
8757 				   gen_rtvec_v (n_var_parts, loc));
8758       note_vl = gen_rtx_VAR_LOCATION (VOIDmode, decl,
8759 				      parallel, (int) initialized);
8760     }
8761 
8762   if (where != EMIT_NOTE_BEFORE_INSN)
8763     {
8764       note = emit_note_after (NOTE_INSN_VAR_LOCATION, insn);
8765       if (where == EMIT_NOTE_AFTER_CALL_INSN)
8766 	NOTE_DURING_CALL_P (note) = true;
8767     }
8768   else
8769     {
8770       /* Make sure that the call related notes come first.  */
8771       while (NEXT_INSN (insn)
8772 	     && NOTE_P (insn)
8773 	     && ((NOTE_KIND (insn) == NOTE_INSN_VAR_LOCATION
8774 		  && NOTE_DURING_CALL_P (insn))
8775 		 || NOTE_KIND (insn) == NOTE_INSN_CALL_ARG_LOCATION))
8776 	insn = NEXT_INSN (insn);
8777       if (NOTE_P (insn)
8778 	  && ((NOTE_KIND (insn) == NOTE_INSN_VAR_LOCATION
8779 	       && NOTE_DURING_CALL_P (insn))
8780 	      || NOTE_KIND (insn) == NOTE_INSN_CALL_ARG_LOCATION))
8781 	note = emit_note_after (NOTE_INSN_VAR_LOCATION, insn);
8782       else
8783 	note = emit_note_before (NOTE_INSN_VAR_LOCATION, insn);
8784     }
8785   NOTE_VAR_LOCATION (note) = note_vl;
8786 
8787   set_dv_changed (var->dv, false);
8788   gcc_assert (var->in_changed_variables);
8789   var->in_changed_variables = false;
8790   changed_variables.clear_slot (varp);
8791 
8792   /* Continue traversing the hash table.  */
8793   return 1;
8794 }
8795 
8796 /* While traversing changed_variables, push onto DATA (a stack of RTX
8797    values) entries that aren't user variables.  */
8798 
8799 int
var_track_values_to_stack(variable_def ** slot,vec<rtx,va_heap> * changed_values_stack)8800 var_track_values_to_stack (variable_def **slot,
8801 			   vec<rtx, va_heap> *changed_values_stack)
8802 {
8803   variable var = *slot;
8804 
8805   if (var->onepart == ONEPART_VALUE)
8806     changed_values_stack->safe_push (dv_as_value (var->dv));
8807   else if (var->onepart == ONEPART_DEXPR)
8808     changed_values_stack->safe_push (DECL_RTL_KNOWN_SET (dv_as_decl (var->dv)));
8809 
8810   return 1;
8811 }
8812 
8813 /* Remove from changed_variables the entry whose DV corresponds to
8814    value or debug_expr VAL.  */
8815 static void
remove_value_from_changed_variables(rtx val)8816 remove_value_from_changed_variables (rtx val)
8817 {
8818   decl_or_value dv = dv_from_rtx (val);
8819   variable_def **slot;
8820   variable var;
8821 
8822   slot = changed_variables.find_slot_with_hash (dv, dv_htab_hash (dv),
8823 						NO_INSERT);
8824   var = *slot;
8825   var->in_changed_variables = false;
8826   changed_variables.clear_slot (slot);
8827 }
8828 
8829 /* If VAL (a value or debug_expr) has backlinks to variables actively
8830    dependent on it in HTAB or in CHANGED_VARIABLES, mark them as
8831    changed, adding to CHANGED_VALUES_STACK any dependencies that may
8832    have dependencies of their own to notify.  */
8833 
8834 static void
notify_dependents_of_changed_value(rtx val,variable_table_type htab,vec<rtx,va_heap> * changed_values_stack)8835 notify_dependents_of_changed_value (rtx val, variable_table_type htab,
8836 				    vec<rtx, va_heap> *changed_values_stack)
8837 {
8838   variable_def **slot;
8839   variable var;
8840   loc_exp_dep *led;
8841   decl_or_value dv = dv_from_rtx (val);
8842 
8843   slot = changed_variables.find_slot_with_hash (dv, dv_htab_hash (dv),
8844 						NO_INSERT);
8845   if (!slot)
8846     slot = htab.find_slot_with_hash (dv, dv_htab_hash (dv), NO_INSERT);
8847   if (!slot)
8848     slot = dropped_values.find_slot_with_hash (dv, dv_htab_hash (dv),
8849 					       NO_INSERT);
8850   var = *slot;
8851 
8852   while ((led = VAR_LOC_DEP_LST (var)))
8853     {
8854       decl_or_value ldv = led->dv;
8855       variable ivar;
8856 
8857       /* Deactivate and remove the backlink, as it was “used up”.  It
8858 	 makes no sense to attempt to notify the same entity again:
8859 	 either it will be recomputed and re-register an active
8860 	 dependency, or it will still have the changed mark.  */
8861       if (led->next)
8862 	led->next->pprev = led->pprev;
8863       if (led->pprev)
8864 	*led->pprev = led->next;
8865       led->next = NULL;
8866       led->pprev = NULL;
8867 
8868       if (dv_changed_p (ldv))
8869 	continue;
8870 
8871       switch (dv_onepart_p (ldv))
8872 	{
8873 	case ONEPART_VALUE:
8874 	case ONEPART_DEXPR:
8875 	  set_dv_changed (ldv, true);
8876 	  changed_values_stack->safe_push (dv_as_rtx (ldv));
8877 	  break;
8878 
8879 	case ONEPART_VDECL:
8880 	  ivar = htab.find_with_hash (ldv, dv_htab_hash (ldv));
8881 	  gcc_checking_assert (!VAR_LOC_DEP_LST (ivar));
8882 	  variable_was_changed (ivar, NULL);
8883 	  break;
8884 
8885 	case NOT_ONEPART:
8886 	  pool_free (loc_exp_dep_pool, led);
8887 	  ivar = htab.find_with_hash (ldv, dv_htab_hash (ldv));
8888 	  if (ivar)
8889 	    {
8890 	      int i = ivar->n_var_parts;
8891 	      while (i--)
8892 		{
8893 		  rtx loc = ivar->var_part[i].cur_loc;
8894 
8895 		  if (loc && GET_CODE (loc) == MEM
8896 		      && XEXP (loc, 0) == val)
8897 		    {
8898 		      variable_was_changed (ivar, NULL);
8899 		      break;
8900 		    }
8901 		}
8902 	    }
8903 	  break;
8904 
8905 	default:
8906 	  gcc_unreachable ();
8907 	}
8908     }
8909 }
8910 
8911 /* Take out of changed_variables any entries that don't refer to use
8912    variables.  Back-propagate change notifications from values and
8913    debug_exprs to their active dependencies in HTAB or in
8914    CHANGED_VARIABLES.  */
8915 
8916 static void
process_changed_values(variable_table_type htab)8917 process_changed_values (variable_table_type htab)
8918 {
8919   int i, n;
8920   rtx val;
8921   auto_vec<rtx, 20> changed_values_stack;
8922 
8923   /* Move values from changed_variables to changed_values_stack.  */
8924   changed_variables
8925     .traverse <vec<rtx, va_heap>*, var_track_values_to_stack>
8926       (&changed_values_stack);
8927 
8928   /* Back-propagate change notifications in values while popping
8929      them from the stack.  */
8930   for (n = i = changed_values_stack.length ();
8931        i > 0; i = changed_values_stack.length ())
8932     {
8933       val = changed_values_stack.pop ();
8934       notify_dependents_of_changed_value (val, htab, &changed_values_stack);
8935 
8936       /* This condition will hold when visiting each of the entries
8937 	 originally in changed_variables.  We can't remove them
8938 	 earlier because this could drop the backlinks before we got a
8939 	 chance to use them.  */
8940       if (i == n)
8941 	{
8942 	  remove_value_from_changed_variables (val);
8943 	  n--;
8944 	}
8945     }
8946 }
8947 
8948 /* Emit NOTE_INSN_VAR_LOCATION note for each variable from a chain
8949    CHANGED_VARIABLES and delete this chain.  WHERE specifies whether
8950    the notes shall be emitted before of after instruction INSN.  */
8951 
8952 static void
emit_notes_for_changes(rtx insn,enum emit_note_where where,shared_hash vars)8953 emit_notes_for_changes (rtx insn, enum emit_note_where where,
8954 			shared_hash vars)
8955 {
8956   emit_note_data data;
8957   variable_table_type htab = shared_hash_htab (vars);
8958 
8959   if (!changed_variables.elements ())
8960     return;
8961 
8962   if (MAY_HAVE_DEBUG_INSNS)
8963     process_changed_values (htab);
8964 
8965   data.insn = insn;
8966   data.where = where;
8967   data.vars = htab;
8968 
8969   changed_variables
8970     .traverse <emit_note_data*, emit_note_insn_var_location> (&data);
8971 }
8972 
8973 /* Add variable *SLOT to the chain CHANGED_VARIABLES if it differs from the
8974    same variable in hash table DATA or is not there at all.  */
8975 
8976 int
emit_notes_for_differences_1(variable_def ** slot,variable_table_type new_vars)8977 emit_notes_for_differences_1 (variable_def **slot, variable_table_type new_vars)
8978 {
8979   variable old_var, new_var;
8980 
8981   old_var = *slot;
8982   new_var = new_vars.find_with_hash (old_var->dv, dv_htab_hash (old_var->dv));
8983 
8984   if (!new_var)
8985     {
8986       /* Variable has disappeared.  */
8987       variable empty_var = NULL;
8988 
8989       if (old_var->onepart == ONEPART_VALUE
8990 	  || old_var->onepart == ONEPART_DEXPR)
8991 	{
8992 	  empty_var = variable_from_dropped (old_var->dv, NO_INSERT);
8993 	  if (empty_var)
8994 	    {
8995 	      gcc_checking_assert (!empty_var->in_changed_variables);
8996 	      if (!VAR_LOC_1PAUX (old_var))
8997 		{
8998 		  VAR_LOC_1PAUX (old_var) = VAR_LOC_1PAUX (empty_var);
8999 		  VAR_LOC_1PAUX (empty_var) = NULL;
9000 		}
9001 	      else
9002 		gcc_checking_assert (!VAR_LOC_1PAUX (empty_var));
9003 	    }
9004 	}
9005 
9006       if (!empty_var)
9007 	{
9008 	  empty_var = (variable) pool_alloc (onepart_pool (old_var->onepart));
9009 	  empty_var->dv = old_var->dv;
9010 	  empty_var->refcount = 0;
9011 	  empty_var->n_var_parts = 0;
9012 	  empty_var->onepart = old_var->onepart;
9013 	  empty_var->in_changed_variables = false;
9014 	}
9015 
9016       if (empty_var->onepart)
9017 	{
9018 	  /* Propagate the auxiliary data to (ultimately)
9019 	     changed_variables.  */
9020 	  empty_var->var_part[0].loc_chain = NULL;
9021 	  empty_var->var_part[0].cur_loc = NULL;
9022 	  VAR_LOC_1PAUX (empty_var) = VAR_LOC_1PAUX (old_var);
9023 	  VAR_LOC_1PAUX (old_var) = NULL;
9024 	}
9025       variable_was_changed (empty_var, NULL);
9026       /* Continue traversing the hash table.  */
9027       return 1;
9028     }
9029   /* Update cur_loc and one-part auxiliary data, before new_var goes
9030      through variable_was_changed.  */
9031   if (old_var != new_var && new_var->onepart)
9032     {
9033       gcc_checking_assert (VAR_LOC_1PAUX (new_var) == NULL);
9034       VAR_LOC_1PAUX (new_var) = VAR_LOC_1PAUX (old_var);
9035       VAR_LOC_1PAUX (old_var) = NULL;
9036       new_var->var_part[0].cur_loc = old_var->var_part[0].cur_loc;
9037     }
9038   if (variable_different_p (old_var, new_var))
9039     variable_was_changed (new_var, NULL);
9040 
9041   /* Continue traversing the hash table.  */
9042   return 1;
9043 }
9044 
9045 /* Add variable *SLOT to the chain CHANGED_VARIABLES if it is not in hash
9046    table DATA.  */
9047 
9048 int
emit_notes_for_differences_2(variable_def ** slot,variable_table_type old_vars)9049 emit_notes_for_differences_2 (variable_def **slot, variable_table_type old_vars)
9050 {
9051   variable old_var, new_var;
9052 
9053   new_var = *slot;
9054   old_var = old_vars.find_with_hash (new_var->dv, dv_htab_hash (new_var->dv));
9055   if (!old_var)
9056     {
9057       int i;
9058       for (i = 0; i < new_var->n_var_parts; i++)
9059 	new_var->var_part[i].cur_loc = NULL;
9060       variable_was_changed (new_var, NULL);
9061     }
9062 
9063   /* Continue traversing the hash table.  */
9064   return 1;
9065 }
9066 
9067 /* Emit notes before INSN for differences between dataflow sets OLD_SET and
9068    NEW_SET.  */
9069 
9070 static void
emit_notes_for_differences(rtx insn,dataflow_set * old_set,dataflow_set * new_set)9071 emit_notes_for_differences (rtx insn, dataflow_set *old_set,
9072 			    dataflow_set *new_set)
9073 {
9074   shared_hash_htab (old_set->vars)
9075     .traverse <variable_table_type, emit_notes_for_differences_1>
9076       (shared_hash_htab (new_set->vars));
9077   shared_hash_htab (new_set->vars)
9078     .traverse <variable_table_type, emit_notes_for_differences_2>
9079       (shared_hash_htab (old_set->vars));
9080   emit_notes_for_changes (insn, EMIT_NOTE_BEFORE_INSN, new_set->vars);
9081 }
9082 
9083 /* Return the next insn after INSN that is not a NOTE_INSN_VAR_LOCATION.  */
9084 
9085 static rtx
next_non_note_insn_var_location(rtx insn)9086 next_non_note_insn_var_location (rtx insn)
9087 {
9088   while (insn)
9089     {
9090       insn = NEXT_INSN (insn);
9091       if (insn == 0
9092 	  || !NOTE_P (insn)
9093 	  || NOTE_KIND (insn) != NOTE_INSN_VAR_LOCATION)
9094 	break;
9095     }
9096 
9097   return insn;
9098 }
9099 
9100 /* Emit the notes for changes of location parts in the basic block BB.  */
9101 
9102 static void
emit_notes_in_bb(basic_block bb,dataflow_set * set)9103 emit_notes_in_bb (basic_block bb, dataflow_set *set)
9104 {
9105   unsigned int i;
9106   micro_operation *mo;
9107 
9108   dataflow_set_clear (set);
9109   dataflow_set_copy (set, &VTI (bb)->in);
9110 
9111   FOR_EACH_VEC_ELT (VTI (bb)->mos, i, mo)
9112     {
9113       rtx insn = mo->insn;
9114       rtx next_insn = next_non_note_insn_var_location (insn);
9115 
9116       switch (mo->type)
9117 	{
9118 	  case MO_CALL:
9119 	    dataflow_set_clear_at_call (set);
9120 	    emit_notes_for_changes (insn, EMIT_NOTE_AFTER_CALL_INSN, set->vars);
9121 	    {
9122 	      rtx arguments = mo->u.loc, *p = &arguments, note;
9123 	      while (*p)
9124 		{
9125 		  XEXP (XEXP (*p, 0), 1)
9126 		    = vt_expand_loc (XEXP (XEXP (*p, 0), 1),
9127 				     shared_hash_htab (set->vars));
9128 		  /* If expansion is successful, keep it in the list.  */
9129 		  if (XEXP (XEXP (*p, 0), 1))
9130 		    p = &XEXP (*p, 1);
9131 		  /* Otherwise, if the following item is data_value for it,
9132 		     drop it too too.  */
9133 		  else if (XEXP (*p, 1)
9134 			   && REG_P (XEXP (XEXP (*p, 0), 0))
9135 			   && MEM_P (XEXP (XEXP (XEXP (*p, 1), 0), 0))
9136 			   && REG_P (XEXP (XEXP (XEXP (XEXP (*p, 1), 0), 0),
9137 					   0))
9138 			   && REGNO (XEXP (XEXP (*p, 0), 0))
9139 			      == REGNO (XEXP (XEXP (XEXP (XEXP (*p, 1), 0),
9140 						    0), 0)))
9141 		    *p = XEXP (XEXP (*p, 1), 1);
9142 		  /* Just drop this item.  */
9143 		  else
9144 		    *p = XEXP (*p, 1);
9145 		}
9146 	      note = emit_note_after (NOTE_INSN_CALL_ARG_LOCATION, insn);
9147 	      NOTE_VAR_LOCATION (note) = arguments;
9148 	    }
9149 	    break;
9150 
9151 	  case MO_USE:
9152 	    {
9153 	      rtx loc = mo->u.loc;
9154 
9155 	      if (REG_P (loc))
9156 		var_reg_set (set, loc, VAR_INIT_STATUS_UNINITIALIZED, NULL);
9157 	      else
9158 		var_mem_set (set, loc, VAR_INIT_STATUS_UNINITIALIZED, NULL);
9159 
9160 	      emit_notes_for_changes (insn, EMIT_NOTE_BEFORE_INSN, set->vars);
9161 	    }
9162 	    break;
9163 
9164 	  case MO_VAL_LOC:
9165 	    {
9166 	      rtx loc = mo->u.loc;
9167 	      rtx val, vloc;
9168 	      tree var;
9169 
9170 	      if (GET_CODE (loc) == CONCAT)
9171 		{
9172 		  val = XEXP (loc, 0);
9173 		  vloc = XEXP (loc, 1);
9174 		}
9175 	      else
9176 		{
9177 		  val = NULL_RTX;
9178 		  vloc = loc;
9179 		}
9180 
9181 	      var = PAT_VAR_LOCATION_DECL (vloc);
9182 
9183 	      clobber_variable_part (set, NULL_RTX,
9184 				     dv_from_decl (var), 0, NULL_RTX);
9185 	      if (val)
9186 		{
9187 		  if (VAL_NEEDS_RESOLUTION (loc))
9188 		    val_resolve (set, val, PAT_VAR_LOCATION_LOC (vloc), insn);
9189 		  set_variable_part (set, val, dv_from_decl (var), 0,
9190 				     VAR_INIT_STATUS_INITIALIZED, NULL_RTX,
9191 				     INSERT);
9192 		}
9193 	      else if (!VAR_LOC_UNKNOWN_P (PAT_VAR_LOCATION_LOC (vloc)))
9194 		set_variable_part (set, PAT_VAR_LOCATION_LOC (vloc),
9195 				   dv_from_decl (var), 0,
9196 				   VAR_INIT_STATUS_INITIALIZED, NULL_RTX,
9197 				   INSERT);
9198 
9199 	      emit_notes_for_changes (insn, EMIT_NOTE_AFTER_INSN, set->vars);
9200 	    }
9201 	    break;
9202 
9203 	  case MO_VAL_USE:
9204 	    {
9205 	      rtx loc = mo->u.loc;
9206 	      rtx val, vloc, uloc;
9207 
9208 	      vloc = uloc = XEXP (loc, 1);
9209 	      val = XEXP (loc, 0);
9210 
9211 	      if (GET_CODE (val) == CONCAT)
9212 		{
9213 		  uloc = XEXP (val, 1);
9214 		  val = XEXP (val, 0);
9215 		}
9216 
9217 	      if (VAL_NEEDS_RESOLUTION (loc))
9218 		val_resolve (set, val, vloc, insn);
9219 	      else
9220 		val_store (set, val, uloc, insn, false);
9221 
9222 	      if (VAL_HOLDS_TRACK_EXPR (loc))
9223 		{
9224 		  if (GET_CODE (uloc) == REG)
9225 		    var_reg_set (set, uloc, VAR_INIT_STATUS_UNINITIALIZED,
9226 				 NULL);
9227 		  else if (GET_CODE (uloc) == MEM)
9228 		    var_mem_set (set, uloc, VAR_INIT_STATUS_UNINITIALIZED,
9229 				 NULL);
9230 		}
9231 
9232 	      emit_notes_for_changes (insn, EMIT_NOTE_BEFORE_INSN, set->vars);
9233 	    }
9234 	    break;
9235 
9236 	  case MO_VAL_SET:
9237 	    {
9238 	      rtx loc = mo->u.loc;
9239 	      rtx val, vloc, uloc;
9240 	      rtx dstv, srcv;
9241 
9242 	      vloc = loc;
9243 	      uloc = XEXP (vloc, 1);
9244 	      val = XEXP (vloc, 0);
9245 	      vloc = uloc;
9246 
9247 	      if (GET_CODE (uloc) == SET)
9248 		{
9249 		  dstv = SET_DEST (uloc);
9250 		  srcv = SET_SRC (uloc);
9251 		}
9252 	      else
9253 		{
9254 		  dstv = uloc;
9255 		  srcv = NULL;
9256 		}
9257 
9258 	      if (GET_CODE (val) == CONCAT)
9259 		{
9260 		  dstv = vloc = XEXP (val, 1);
9261 		  val = XEXP (val, 0);
9262 		}
9263 
9264 	      if (GET_CODE (vloc) == SET)
9265 		{
9266 		  srcv = SET_SRC (vloc);
9267 
9268 		  gcc_assert (val != srcv);
9269 		  gcc_assert (vloc == uloc || VAL_NEEDS_RESOLUTION (loc));
9270 
9271 		  dstv = vloc = SET_DEST (vloc);
9272 
9273 		  if (VAL_NEEDS_RESOLUTION (loc))
9274 		    val_resolve (set, val, srcv, insn);
9275 		}
9276 	      else if (VAL_NEEDS_RESOLUTION (loc))
9277 		{
9278 		  gcc_assert (GET_CODE (uloc) == SET
9279 			      && GET_CODE (SET_SRC (uloc)) == REG);
9280 		  val_resolve (set, val, SET_SRC (uloc), insn);
9281 		}
9282 
9283 	      if (VAL_HOLDS_TRACK_EXPR (loc))
9284 		{
9285 		  if (VAL_EXPR_IS_CLOBBERED (loc))
9286 		    {
9287 		      if (REG_P (uloc))
9288 			var_reg_delete (set, uloc, true);
9289 		      else if (MEM_P (uloc))
9290 			{
9291 			  gcc_assert (MEM_P (dstv));
9292 			  gcc_assert (MEM_ATTRS (dstv) == MEM_ATTRS (uloc));
9293 			  var_mem_delete (set, dstv, true);
9294 			}
9295 		    }
9296 		  else
9297 		    {
9298 		      bool copied_p = VAL_EXPR_IS_COPIED (loc);
9299 		      rtx src = NULL, dst = uloc;
9300 		      enum var_init_status status = VAR_INIT_STATUS_INITIALIZED;
9301 
9302 		      if (GET_CODE (uloc) == SET)
9303 			{
9304 			  src = SET_SRC (uloc);
9305 			  dst = SET_DEST (uloc);
9306 			}
9307 
9308 		      if (copied_p)
9309 			{
9310 			  status = find_src_status (set, src);
9311 
9312 			  src = find_src_set_src (set, src);
9313 			}
9314 
9315 		      if (REG_P (dst))
9316 			var_reg_delete_and_set (set, dst, !copied_p,
9317 						status, srcv);
9318 		      else if (MEM_P (dst))
9319 			{
9320 			  gcc_assert (MEM_P (dstv));
9321 			  gcc_assert (MEM_ATTRS (dstv) == MEM_ATTRS (dst));
9322 			  var_mem_delete_and_set (set, dstv, !copied_p,
9323 						  status, srcv);
9324 			}
9325 		    }
9326 		}
9327 	      else if (REG_P (uloc))
9328 		var_regno_delete (set, REGNO (uloc));
9329 	      else if (MEM_P (uloc))
9330 		{
9331 		  gcc_checking_assert (GET_CODE (vloc) == MEM);
9332 		  gcc_checking_assert (vloc == dstv);
9333 		  if (vloc != dstv)
9334 		    clobber_overlapping_mems (set, vloc);
9335 		}
9336 
9337 	      val_store (set, val, dstv, insn, true);
9338 
9339 	      emit_notes_for_changes (next_insn, EMIT_NOTE_BEFORE_INSN,
9340 				      set->vars);
9341 	    }
9342 	    break;
9343 
9344 	  case MO_SET:
9345 	    {
9346 	      rtx loc = mo->u.loc;
9347 	      rtx set_src = NULL;
9348 
9349 	      if (GET_CODE (loc) == SET)
9350 		{
9351 		  set_src = SET_SRC (loc);
9352 		  loc = SET_DEST (loc);
9353 		}
9354 
9355 	      if (REG_P (loc))
9356 		var_reg_delete_and_set (set, loc, true, VAR_INIT_STATUS_INITIALIZED,
9357 					set_src);
9358 	      else
9359 		var_mem_delete_and_set (set, loc, true, VAR_INIT_STATUS_INITIALIZED,
9360 					set_src);
9361 
9362 	      emit_notes_for_changes (next_insn, EMIT_NOTE_BEFORE_INSN,
9363 				      set->vars);
9364 	    }
9365 	    break;
9366 
9367 	  case MO_COPY:
9368 	    {
9369 	      rtx loc = mo->u.loc;
9370 	      enum var_init_status src_status;
9371 	      rtx set_src = NULL;
9372 
9373 	      if (GET_CODE (loc) == SET)
9374 		{
9375 		  set_src = SET_SRC (loc);
9376 		  loc = SET_DEST (loc);
9377 		}
9378 
9379 	      src_status = find_src_status (set, set_src);
9380 	      set_src = find_src_set_src (set, set_src);
9381 
9382 	      if (REG_P (loc))
9383 		var_reg_delete_and_set (set, loc, false, src_status, set_src);
9384 	      else
9385 		var_mem_delete_and_set (set, loc, false, src_status, set_src);
9386 
9387 	      emit_notes_for_changes (next_insn, EMIT_NOTE_BEFORE_INSN,
9388 				      set->vars);
9389 	    }
9390 	    break;
9391 
9392 	  case MO_USE_NO_VAR:
9393 	    {
9394 	      rtx loc = mo->u.loc;
9395 
9396 	      if (REG_P (loc))
9397 		var_reg_delete (set, loc, false);
9398 	      else
9399 		var_mem_delete (set, loc, false);
9400 
9401 	      emit_notes_for_changes (insn, EMIT_NOTE_AFTER_INSN, set->vars);
9402 	    }
9403 	    break;
9404 
9405 	  case MO_CLOBBER:
9406 	    {
9407 	      rtx loc = mo->u.loc;
9408 
9409 	      if (REG_P (loc))
9410 		var_reg_delete (set, loc, true);
9411 	      else
9412 		var_mem_delete (set, loc, true);
9413 
9414 	      emit_notes_for_changes (next_insn, EMIT_NOTE_BEFORE_INSN,
9415 				      set->vars);
9416 	    }
9417 	    break;
9418 
9419 	  case MO_ADJUST:
9420 	    set->stack_adjust += mo->u.adjust;
9421 	    break;
9422 	}
9423     }
9424 }
9425 
9426 /* Emit notes for the whole function.  */
9427 
9428 static void
vt_emit_notes(void)9429 vt_emit_notes (void)
9430 {
9431   basic_block bb;
9432   dataflow_set cur;
9433 
9434   gcc_assert (!changed_variables.elements ());
9435 
9436   /* Free memory occupied by the out hash tables, as they aren't used
9437      anymore.  */
9438   FOR_EACH_BB_FN (bb, cfun)
9439     dataflow_set_clear (&VTI (bb)->out);
9440 
9441   /* Enable emitting notes by functions (mainly by set_variable_part and
9442      delete_variable_part).  */
9443   emit_notes = true;
9444 
9445   if (MAY_HAVE_DEBUG_INSNS)
9446     {
9447       dropped_values.create (cselib_get_next_uid () * 2);
9448       loc_exp_dep_pool = create_alloc_pool ("loc_exp_dep pool",
9449 					    sizeof (loc_exp_dep), 64);
9450     }
9451 
9452   dataflow_set_init (&cur);
9453 
9454   FOR_EACH_BB_FN (bb, cfun)
9455     {
9456       /* Emit the notes for changes of variable locations between two
9457 	 subsequent basic blocks.  */
9458       emit_notes_for_differences (BB_HEAD (bb), &cur, &VTI (bb)->in);
9459 
9460       if (MAY_HAVE_DEBUG_INSNS)
9461 	local_get_addr_cache = pointer_map_create ();
9462 
9463       /* Emit the notes for the changes in the basic block itself.  */
9464       emit_notes_in_bb (bb, &cur);
9465 
9466       if (MAY_HAVE_DEBUG_INSNS)
9467 	pointer_map_destroy (local_get_addr_cache);
9468       local_get_addr_cache = NULL;
9469 
9470       /* Free memory occupied by the in hash table, we won't need it
9471 	 again.  */
9472       dataflow_set_clear (&VTI (bb)->in);
9473     }
9474 #ifdef ENABLE_CHECKING
9475   shared_hash_htab (cur.vars)
9476     .traverse <variable_table_type, emit_notes_for_differences_1>
9477       (shared_hash_htab (empty_shared_hash));
9478 #endif
9479   dataflow_set_destroy (&cur);
9480 
9481   if (MAY_HAVE_DEBUG_INSNS)
9482     dropped_values.dispose ();
9483 
9484   emit_notes = false;
9485 }
9486 
9487 /* If there is a declaration and offset associated with register/memory RTL
9488    assign declaration to *DECLP and offset to *OFFSETP, and return true.  */
9489 
9490 static bool
vt_get_decl_and_offset(rtx rtl,tree * declp,HOST_WIDE_INT * offsetp)9491 vt_get_decl_and_offset (rtx rtl, tree *declp, HOST_WIDE_INT *offsetp)
9492 {
9493   if (REG_P (rtl))
9494     {
9495       if (REG_ATTRS (rtl))
9496 	{
9497 	  *declp = REG_EXPR (rtl);
9498 	  *offsetp = REG_OFFSET (rtl);
9499 	  return true;
9500 	}
9501     }
9502   else if (GET_CODE (rtl) == PARALLEL)
9503     {
9504       tree decl = NULL_TREE;
9505       HOST_WIDE_INT offset = MAX_VAR_PARTS;
9506       int len = XVECLEN (rtl, 0), i;
9507 
9508       for (i = 0; i < len; i++)
9509 	{
9510 	  rtx reg = XEXP (XVECEXP (rtl, 0, i), 0);
9511 	  if (!REG_P (reg) || !REG_ATTRS (reg))
9512 	    break;
9513 	  if (!decl)
9514 	    decl = REG_EXPR (reg);
9515 	  if (REG_EXPR (reg) != decl)
9516 	    break;
9517 	  if (REG_OFFSET (reg) < offset)
9518 	    offset = REG_OFFSET (reg);
9519 	}
9520 
9521       if (i == len)
9522 	{
9523 	  *declp = decl;
9524 	  *offsetp = offset;
9525 	  return true;
9526 	}
9527     }
9528   else if (MEM_P (rtl))
9529     {
9530       if (MEM_ATTRS (rtl))
9531 	{
9532 	  *declp = MEM_EXPR (rtl);
9533 	  *offsetp = INT_MEM_OFFSET (rtl);
9534 	  return true;
9535 	}
9536     }
9537   return false;
9538 }
9539 
9540 /* Record the value for the ENTRY_VALUE of RTL as a global equivalence
9541    of VAL.  */
9542 
9543 static void
record_entry_value(cselib_val * val,rtx rtl)9544 record_entry_value (cselib_val *val, rtx rtl)
9545 {
9546   rtx ev = gen_rtx_ENTRY_VALUE (GET_MODE (rtl));
9547 
9548   ENTRY_VALUE_EXP (ev) = rtl;
9549 
9550   cselib_add_permanent_equiv (val, ev, get_insns ());
9551 }
9552 
9553 /* Insert function parameter PARM in IN and OUT sets of ENTRY_BLOCK.  */
9554 
9555 static void
vt_add_function_parameter(tree parm)9556 vt_add_function_parameter (tree parm)
9557 {
9558   rtx decl_rtl = DECL_RTL_IF_SET (parm);
9559   rtx incoming = DECL_INCOMING_RTL (parm);
9560   tree decl;
9561   enum machine_mode mode;
9562   HOST_WIDE_INT offset;
9563   dataflow_set *out;
9564   decl_or_value dv;
9565 
9566   if (TREE_CODE (parm) != PARM_DECL)
9567     return;
9568 
9569   if (!decl_rtl || !incoming)
9570     return;
9571 
9572   if (GET_MODE (decl_rtl) == BLKmode || GET_MODE (incoming) == BLKmode)
9573     return;
9574 
9575   /* If there is a DRAP register or a pseudo in internal_arg_pointer,
9576      rewrite the incoming location of parameters passed on the stack
9577      into MEMs based on the argument pointer, so that incoming doesn't
9578      depend on a pseudo.  */
9579   if (MEM_P (incoming)
9580       && (XEXP (incoming, 0) == crtl->args.internal_arg_pointer
9581 	  || (GET_CODE (XEXP (incoming, 0)) == PLUS
9582 	      && XEXP (XEXP (incoming, 0), 0)
9583 		 == crtl->args.internal_arg_pointer
9584 	      && CONST_INT_P (XEXP (XEXP (incoming, 0), 1)))))
9585     {
9586       HOST_WIDE_INT off = -FIRST_PARM_OFFSET (current_function_decl);
9587       if (GET_CODE (XEXP (incoming, 0)) == PLUS)
9588 	off += INTVAL (XEXP (XEXP (incoming, 0), 1));
9589       incoming
9590 	= replace_equiv_address_nv (incoming,
9591 				    plus_constant (Pmode,
9592 						   arg_pointer_rtx, off));
9593     }
9594 
9595 #ifdef HAVE_window_save
9596   /* DECL_INCOMING_RTL uses the INCOMING_REGNO of parameter registers.
9597      If the target machine has an explicit window save instruction, the
9598      actual entry value is the corresponding OUTGOING_REGNO instead.  */
9599   if (HAVE_window_save && !crtl->uses_only_leaf_regs)
9600     {
9601       if (REG_P (incoming)
9602 	  && HARD_REGISTER_P (incoming)
9603 	  && OUTGOING_REGNO (REGNO (incoming)) != REGNO (incoming))
9604 	{
9605 	  parm_reg_t p;
9606 	  p.incoming = incoming;
9607 	  incoming
9608 	    = gen_rtx_REG_offset (incoming, GET_MODE (incoming),
9609 				  OUTGOING_REGNO (REGNO (incoming)), 0);
9610 	  p.outgoing = incoming;
9611 	  vec_safe_push (windowed_parm_regs, p);
9612 	}
9613       else if (GET_CODE (incoming) == PARALLEL)
9614 	{
9615 	  rtx outgoing
9616 	    = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (XVECLEN (incoming, 0)));
9617 	  int i;
9618 
9619 	  for (i = 0; i < XVECLEN (incoming, 0); i++)
9620 	    {
9621 	      rtx reg = XEXP (XVECEXP (incoming, 0, i), 0);
9622 	      parm_reg_t p;
9623 	      p.incoming = reg;
9624 	      reg = gen_rtx_REG_offset (reg, GET_MODE (reg),
9625 					OUTGOING_REGNO (REGNO (reg)), 0);
9626 	      p.outgoing = reg;
9627 	      XVECEXP (outgoing, 0, i)
9628 		= gen_rtx_EXPR_LIST (VOIDmode, reg,
9629 				     XEXP (XVECEXP (incoming, 0, i), 1));
9630 	      vec_safe_push (windowed_parm_regs, p);
9631 	    }
9632 
9633 	  incoming = outgoing;
9634 	}
9635       else if (MEM_P (incoming)
9636 	       && REG_P (XEXP (incoming, 0))
9637 	       && HARD_REGISTER_P (XEXP (incoming, 0)))
9638 	{
9639 	  rtx reg = XEXP (incoming, 0);
9640 	  if (OUTGOING_REGNO (REGNO (reg)) != REGNO (reg))
9641 	    {
9642 	      parm_reg_t p;
9643 	      p.incoming = reg;
9644 	      reg = gen_raw_REG (GET_MODE (reg), OUTGOING_REGNO (REGNO (reg)));
9645 	      p.outgoing = reg;
9646 	      vec_safe_push (windowed_parm_regs, p);
9647 	      incoming = replace_equiv_address_nv (incoming, reg);
9648 	    }
9649 	}
9650     }
9651 #endif
9652 
9653   if (!vt_get_decl_and_offset (incoming, &decl, &offset))
9654     {
9655       if (MEM_P (incoming))
9656 	{
9657 	  /* This means argument is passed by invisible reference.  */
9658 	  offset = 0;
9659 	  decl = parm;
9660 	}
9661       else
9662 	{
9663 	  if (!vt_get_decl_and_offset (decl_rtl, &decl, &offset))
9664 	    return;
9665 	  offset += byte_lowpart_offset (GET_MODE (incoming),
9666 					 GET_MODE (decl_rtl));
9667 	}
9668     }
9669 
9670   if (!decl)
9671     return;
9672 
9673   if (parm != decl)
9674     {
9675       /* If that DECL_RTL wasn't a pseudo that got spilled to
9676 	 memory, bail out.  Otherwise, the spill slot sharing code
9677 	 will force the memory to reference spill_slot_decl (%sfp),
9678 	 so we don't match above.  That's ok, the pseudo must have
9679 	 referenced the entire parameter, so just reset OFFSET.  */
9680       if (decl != get_spill_slot_decl (false))
9681         return;
9682       offset = 0;
9683     }
9684 
9685   if (!track_loc_p (incoming, parm, offset, false, &mode, &offset))
9686     return;
9687 
9688   out = &VTI (ENTRY_BLOCK_PTR_FOR_FN (cfun))->out;
9689 
9690   dv = dv_from_decl (parm);
9691 
9692   if (target_for_debug_bind (parm)
9693       /* We can't deal with these right now, because this kind of
9694 	 variable is single-part.  ??? We could handle parallels
9695 	 that describe multiple locations for the same single
9696 	 value, but ATM we don't.  */
9697       && GET_CODE (incoming) != PARALLEL)
9698     {
9699       cselib_val *val;
9700       rtx lowpart;
9701 
9702       /* ??? We shouldn't ever hit this, but it may happen because
9703 	 arguments passed by invisible reference aren't dealt with
9704 	 above: incoming-rtl will have Pmode rather than the
9705 	 expected mode for the type.  */
9706       if (offset)
9707 	return;
9708 
9709       lowpart = var_lowpart (mode, incoming);
9710       if (!lowpart)
9711 	return;
9712 
9713       val = cselib_lookup_from_insn (lowpart, mode, true,
9714 				     VOIDmode, get_insns ());
9715 
9716       /* ??? Float-typed values in memory are not handled by
9717 	 cselib.  */
9718       if (val)
9719 	{
9720 	  preserve_value (val);
9721 	  set_variable_part (out, val->val_rtx, dv, offset,
9722 			     VAR_INIT_STATUS_INITIALIZED, NULL, INSERT);
9723 	  dv = dv_from_value (val->val_rtx);
9724 	}
9725 
9726       if (MEM_P (incoming))
9727 	{
9728 	  val = cselib_lookup_from_insn (XEXP (incoming, 0), mode, true,
9729 					 VOIDmode, get_insns ());
9730 	  if (val)
9731 	    {
9732 	      preserve_value (val);
9733 	      incoming = replace_equiv_address_nv (incoming, val->val_rtx);
9734 	    }
9735 	}
9736     }
9737 
9738   if (REG_P (incoming))
9739     {
9740       incoming = var_lowpart (mode, incoming);
9741       gcc_assert (REGNO (incoming) < FIRST_PSEUDO_REGISTER);
9742       attrs_list_insert (&out->regs[REGNO (incoming)], dv, offset,
9743 			 incoming);
9744       set_variable_part (out, incoming, dv, offset,
9745 			 VAR_INIT_STATUS_INITIALIZED, NULL, INSERT);
9746       if (dv_is_value_p (dv))
9747 	{
9748 	  record_entry_value (CSELIB_VAL_PTR (dv_as_value (dv)), incoming);
9749 	  if (TREE_CODE (TREE_TYPE (parm)) == REFERENCE_TYPE
9750 	      && INTEGRAL_TYPE_P (TREE_TYPE (TREE_TYPE (parm))))
9751 	    {
9752 	      enum machine_mode indmode
9753 		= TYPE_MODE (TREE_TYPE (TREE_TYPE (parm)));
9754 	      rtx mem = gen_rtx_MEM (indmode, incoming);
9755 	      cselib_val *val = cselib_lookup_from_insn (mem, indmode, true,
9756 							 VOIDmode,
9757 							 get_insns ());
9758 	      if (val)
9759 		{
9760 		  preserve_value (val);
9761 		  record_entry_value (val, mem);
9762 		  set_variable_part (out, mem, dv_from_value (val->val_rtx), 0,
9763 				     VAR_INIT_STATUS_INITIALIZED, NULL, INSERT);
9764 		}
9765 	    }
9766 	}
9767     }
9768   else if (GET_CODE (incoming) == PARALLEL && !dv_onepart_p (dv))
9769     {
9770       int i;
9771 
9772       for (i = 0; i < XVECLEN (incoming, 0); i++)
9773 	{
9774 	  rtx reg = XEXP (XVECEXP (incoming, 0, i), 0);
9775 	  offset = REG_OFFSET (reg);
9776 	  gcc_assert (REGNO (reg) < FIRST_PSEUDO_REGISTER);
9777 	  attrs_list_insert (&out->regs[REGNO (reg)], dv, offset, reg);
9778 	  set_variable_part (out, reg, dv, offset,
9779 			     VAR_INIT_STATUS_INITIALIZED, NULL, INSERT);
9780 	}
9781     }
9782   else if (MEM_P (incoming))
9783     {
9784       incoming = var_lowpart (mode, incoming);
9785       set_variable_part (out, incoming, dv, offset,
9786 			 VAR_INIT_STATUS_INITIALIZED, NULL, INSERT);
9787     }
9788 }
9789 
9790 /* Insert function parameters to IN and OUT sets of ENTRY_BLOCK.  */
9791 
9792 static void
vt_add_function_parameters(void)9793 vt_add_function_parameters (void)
9794 {
9795   tree parm;
9796 
9797   for (parm = DECL_ARGUMENTS (current_function_decl);
9798        parm; parm = DECL_CHAIN (parm))
9799     vt_add_function_parameter (parm);
9800 
9801   if (DECL_HAS_VALUE_EXPR_P (DECL_RESULT (current_function_decl)))
9802     {
9803       tree vexpr = DECL_VALUE_EXPR (DECL_RESULT (current_function_decl));
9804 
9805       if (TREE_CODE (vexpr) == INDIRECT_REF)
9806 	vexpr = TREE_OPERAND (vexpr, 0);
9807 
9808       if (TREE_CODE (vexpr) == PARM_DECL
9809 	  && DECL_ARTIFICIAL (vexpr)
9810 	  && !DECL_IGNORED_P (vexpr)
9811 	  && DECL_NAMELESS (vexpr))
9812 	vt_add_function_parameter (vexpr);
9813     }
9814 }
9815 
9816 /* Initialize cfa_base_rtx, create a preserved VALUE for it and
9817    ensure it isn't flushed during cselib_reset_table.
9818    Can be called only if frame_pointer_rtx resp. arg_pointer_rtx
9819    has been eliminated.  */
9820 
9821 static void
vt_init_cfa_base(void)9822 vt_init_cfa_base (void)
9823 {
9824   cselib_val *val;
9825 
9826 #ifdef FRAME_POINTER_CFA_OFFSET
9827   cfa_base_rtx = frame_pointer_rtx;
9828   cfa_base_offset = -FRAME_POINTER_CFA_OFFSET (current_function_decl);
9829 #else
9830   cfa_base_rtx = arg_pointer_rtx;
9831   cfa_base_offset = -ARG_POINTER_CFA_OFFSET (current_function_decl);
9832 #endif
9833   if (cfa_base_rtx == hard_frame_pointer_rtx
9834       || !fixed_regs[REGNO (cfa_base_rtx)])
9835     {
9836       cfa_base_rtx = NULL_RTX;
9837       return;
9838     }
9839   if (!MAY_HAVE_DEBUG_INSNS)
9840     return;
9841 
9842   /* Tell alias analysis that cfa_base_rtx should share
9843      find_base_term value with stack pointer or hard frame pointer.  */
9844   if (!frame_pointer_needed)
9845     vt_equate_reg_base_value (cfa_base_rtx, stack_pointer_rtx);
9846   else if (!crtl->stack_realign_tried)
9847     vt_equate_reg_base_value (cfa_base_rtx, hard_frame_pointer_rtx);
9848 
9849   val = cselib_lookup_from_insn (cfa_base_rtx, GET_MODE (cfa_base_rtx), 1,
9850 				 VOIDmode, get_insns ());
9851   preserve_value (val);
9852   cselib_preserve_cfa_base_value (val, REGNO (cfa_base_rtx));
9853 }
9854 
9855 /* Allocate and initialize the data structures for variable tracking
9856    and parse the RTL to get the micro operations.  */
9857 
9858 static bool
vt_initialize(void)9859 vt_initialize (void)
9860 {
9861   basic_block bb;
9862   HOST_WIDE_INT fp_cfa_offset = -1;
9863 
9864   alloc_aux_for_blocks (sizeof (struct variable_tracking_info_def));
9865 
9866   attrs_pool = create_alloc_pool ("attrs_def pool",
9867 				  sizeof (struct attrs_def), 1024);
9868   var_pool = create_alloc_pool ("variable_def pool",
9869 				sizeof (struct variable_def)
9870 				+ (MAX_VAR_PARTS - 1)
9871 				* sizeof (((variable)NULL)->var_part[0]), 64);
9872   loc_chain_pool = create_alloc_pool ("location_chain_def pool",
9873 				      sizeof (struct location_chain_def),
9874 				      1024);
9875   shared_hash_pool = create_alloc_pool ("shared_hash_def pool",
9876 					sizeof (struct shared_hash_def), 256);
9877   empty_shared_hash = (shared_hash) pool_alloc (shared_hash_pool);
9878   empty_shared_hash->refcount = 1;
9879   empty_shared_hash->htab.create (1);
9880   changed_variables.create (10);
9881 
9882   /* Init the IN and OUT sets.  */
9883   FOR_ALL_BB_FN (bb, cfun)
9884     {
9885       VTI (bb)->visited = false;
9886       VTI (bb)->flooded = false;
9887       dataflow_set_init (&VTI (bb)->in);
9888       dataflow_set_init (&VTI (bb)->out);
9889       VTI (bb)->permp = NULL;
9890     }
9891 
9892   if (MAY_HAVE_DEBUG_INSNS)
9893     {
9894       cselib_init (CSELIB_RECORD_MEMORY | CSELIB_PRESERVE_CONSTANTS);
9895       scratch_regs = BITMAP_ALLOC (NULL);
9896       valvar_pool = create_alloc_pool ("small variable_def pool",
9897 				       sizeof (struct variable_def), 256);
9898       preserved_values.create (256);
9899       global_get_addr_cache = pointer_map_create ();
9900     }
9901   else
9902     {
9903       scratch_regs = NULL;
9904       valvar_pool = NULL;
9905       global_get_addr_cache = NULL;
9906     }
9907 
9908   if (MAY_HAVE_DEBUG_INSNS)
9909     {
9910       rtx reg, expr;
9911       int ofst;
9912       cselib_val *val;
9913 
9914 #ifdef FRAME_POINTER_CFA_OFFSET
9915       reg = frame_pointer_rtx;
9916       ofst = FRAME_POINTER_CFA_OFFSET (current_function_decl);
9917 #else
9918       reg = arg_pointer_rtx;
9919       ofst = ARG_POINTER_CFA_OFFSET (current_function_decl);
9920 #endif
9921 
9922       ofst -= INCOMING_FRAME_SP_OFFSET;
9923 
9924       val = cselib_lookup_from_insn (reg, GET_MODE (reg), 1,
9925 				     VOIDmode, get_insns ());
9926       preserve_value (val);
9927       if (reg != hard_frame_pointer_rtx && fixed_regs[REGNO (reg)])
9928 	cselib_preserve_cfa_base_value (val, REGNO (reg));
9929       expr = plus_constant (GET_MODE (stack_pointer_rtx),
9930 			    stack_pointer_rtx, -ofst);
9931       cselib_add_permanent_equiv (val, expr, get_insns ());
9932 
9933       if (ofst)
9934 	{
9935 	  val = cselib_lookup_from_insn (stack_pointer_rtx,
9936 					 GET_MODE (stack_pointer_rtx), 1,
9937 					 VOIDmode, get_insns ());
9938 	  preserve_value (val);
9939 	  expr = plus_constant (GET_MODE (reg), reg, ofst);
9940 	  cselib_add_permanent_equiv (val, expr, get_insns ());
9941 	}
9942     }
9943 
9944   /* In order to factor out the adjustments made to the stack pointer or to
9945      the hard frame pointer and thus be able to use DW_OP_fbreg operations
9946      instead of individual location lists, we're going to rewrite MEMs based
9947      on them into MEMs based on the CFA by de-eliminating stack_pointer_rtx
9948      or hard_frame_pointer_rtx to the virtual CFA pointer frame_pointer_rtx
9949      resp. arg_pointer_rtx.  We can do this either when there is no frame
9950      pointer in the function and stack adjustments are consistent for all
9951      basic blocks or when there is a frame pointer and no stack realignment.
9952      But we first have to check that frame_pointer_rtx resp. arg_pointer_rtx
9953      has been eliminated.  */
9954   if (!frame_pointer_needed)
9955     {
9956       rtx reg, elim;
9957 
9958       if (!vt_stack_adjustments ())
9959 	return false;
9960 
9961 #ifdef FRAME_POINTER_CFA_OFFSET
9962       reg = frame_pointer_rtx;
9963 #else
9964       reg = arg_pointer_rtx;
9965 #endif
9966       elim = eliminate_regs (reg, VOIDmode, NULL_RTX);
9967       if (elim != reg)
9968 	{
9969 	  if (GET_CODE (elim) == PLUS)
9970 	    elim = XEXP (elim, 0);
9971 	  if (elim == stack_pointer_rtx)
9972 	    vt_init_cfa_base ();
9973 	}
9974     }
9975   else if (!crtl->stack_realign_tried)
9976     {
9977       rtx reg, elim;
9978 
9979 #ifdef FRAME_POINTER_CFA_OFFSET
9980       reg = frame_pointer_rtx;
9981       fp_cfa_offset = FRAME_POINTER_CFA_OFFSET (current_function_decl);
9982 #else
9983       reg = arg_pointer_rtx;
9984       fp_cfa_offset = ARG_POINTER_CFA_OFFSET (current_function_decl);
9985 #endif
9986       elim = eliminate_regs (reg, VOIDmode, NULL_RTX);
9987       if (elim != reg)
9988 	{
9989 	  if (GET_CODE (elim) == PLUS)
9990 	    {
9991 	      fp_cfa_offset -= INTVAL (XEXP (elim, 1));
9992 	      elim = XEXP (elim, 0);
9993 	    }
9994 	  if (elim != hard_frame_pointer_rtx)
9995 	    fp_cfa_offset = -1;
9996 	}
9997       else
9998 	fp_cfa_offset = -1;
9999     }
10000 
10001   /* If the stack is realigned and a DRAP register is used, we're going to
10002      rewrite MEMs based on it representing incoming locations of parameters
10003      passed on the stack into MEMs based on the argument pointer.  Although
10004      we aren't going to rewrite other MEMs, we still need to initialize the
10005      virtual CFA pointer in order to ensure that the argument pointer will
10006      be seen as a constant throughout the function.
10007 
10008      ??? This doesn't work if FRAME_POINTER_CFA_OFFSET is defined.  */
10009   else if (stack_realign_drap)
10010     {
10011       rtx reg, elim;
10012 
10013 #ifdef FRAME_POINTER_CFA_OFFSET
10014       reg = frame_pointer_rtx;
10015 #else
10016       reg = arg_pointer_rtx;
10017 #endif
10018       elim = eliminate_regs (reg, VOIDmode, NULL_RTX);
10019       if (elim != reg)
10020 	{
10021 	  if (GET_CODE (elim) == PLUS)
10022 	    elim = XEXP (elim, 0);
10023 	  if (elim == hard_frame_pointer_rtx)
10024 	    vt_init_cfa_base ();
10025 	}
10026     }
10027 
10028   hard_frame_pointer_adjustment = -1;
10029 
10030   vt_add_function_parameters ();
10031 
10032   FOR_EACH_BB_FN (bb, cfun)
10033     {
10034       rtx insn;
10035       HOST_WIDE_INT pre, post = 0;
10036       basic_block first_bb, last_bb;
10037 
10038       if (MAY_HAVE_DEBUG_INSNS)
10039 	{
10040 	  cselib_record_sets_hook = add_with_sets;
10041 	  if (dump_file && (dump_flags & TDF_DETAILS))
10042 	    fprintf (dump_file, "first value: %i\n",
10043 		     cselib_get_next_uid ());
10044 	}
10045 
10046       first_bb = bb;
10047       for (;;)
10048 	{
10049 	  edge e;
10050 	  if (bb->next_bb == EXIT_BLOCK_PTR_FOR_FN (cfun)
10051 	      || ! single_pred_p (bb->next_bb))
10052 	    break;
10053 	  e = find_edge (bb, bb->next_bb);
10054 	  if (! e || (e->flags & EDGE_FALLTHRU) == 0)
10055 	    break;
10056 	  bb = bb->next_bb;
10057 	}
10058       last_bb = bb;
10059 
10060       /* Add the micro-operations to the vector.  */
10061       FOR_BB_BETWEEN (bb, first_bb, last_bb->next_bb, next_bb)
10062 	{
10063 	  HOST_WIDE_INT offset = VTI (bb)->out.stack_adjust;
10064 	  VTI (bb)->out.stack_adjust = VTI (bb)->in.stack_adjust;
10065 	  for (insn = BB_HEAD (bb); insn != NEXT_INSN (BB_END (bb));
10066 	       insn = NEXT_INSN (insn))
10067 	    {
10068 	      if (INSN_P (insn))
10069 		{
10070 		  if (!frame_pointer_needed)
10071 		    {
10072 		      insn_stack_adjust_offset_pre_post (insn, &pre, &post);
10073 		      if (pre)
10074 			{
10075 			  micro_operation mo;
10076 			  mo.type = MO_ADJUST;
10077 			  mo.u.adjust = pre;
10078 			  mo.insn = insn;
10079 			  if (dump_file && (dump_flags & TDF_DETAILS))
10080 			    log_op_type (PATTERN (insn), bb, insn,
10081 					 MO_ADJUST, dump_file);
10082 			  VTI (bb)->mos.safe_push (mo);
10083 			  VTI (bb)->out.stack_adjust += pre;
10084 			}
10085 		    }
10086 
10087 		  cselib_hook_called = false;
10088 		  adjust_insn (bb, insn);
10089 		  if (MAY_HAVE_DEBUG_INSNS)
10090 		    {
10091 		      if (CALL_P (insn))
10092 			prepare_call_arguments (bb, insn);
10093 		      cselib_process_insn (insn);
10094 		      if (dump_file && (dump_flags & TDF_DETAILS))
10095 			{
10096 			  print_rtl_single (dump_file, insn);
10097 			  dump_cselib_table (dump_file);
10098 			}
10099 		    }
10100 		  if (!cselib_hook_called)
10101 		    add_with_sets (insn, 0, 0);
10102 		  cancel_changes (0);
10103 
10104 		  if (!frame_pointer_needed && post)
10105 		    {
10106 		      micro_operation mo;
10107 		      mo.type = MO_ADJUST;
10108 		      mo.u.adjust = post;
10109 		      mo.insn = insn;
10110 		      if (dump_file && (dump_flags & TDF_DETAILS))
10111 			log_op_type (PATTERN (insn), bb, insn,
10112 				     MO_ADJUST, dump_file);
10113 		      VTI (bb)->mos.safe_push (mo);
10114 		      VTI (bb)->out.stack_adjust += post;
10115 		    }
10116 
10117 		  if (fp_cfa_offset != -1
10118 		      && hard_frame_pointer_adjustment == -1
10119 		      && fp_setter_insn (insn))
10120 		    {
10121 		      vt_init_cfa_base ();
10122 		      hard_frame_pointer_adjustment = fp_cfa_offset;
10123 		      /* Disassociate sp from fp now.  */
10124 		      if (MAY_HAVE_DEBUG_INSNS)
10125 			{
10126 			  cselib_val *v;
10127 			  cselib_invalidate_rtx (stack_pointer_rtx);
10128 			  v = cselib_lookup (stack_pointer_rtx, Pmode, 1,
10129 					     VOIDmode);
10130 			  if (v && !cselib_preserved_value_p (v))
10131 			    {
10132 			      cselib_set_value_sp_based (v);
10133 			      preserve_value (v);
10134 			    }
10135 			}
10136 		    }
10137 		}
10138 	    }
10139 	  gcc_assert (offset == VTI (bb)->out.stack_adjust);
10140 	}
10141 
10142       bb = last_bb;
10143 
10144       if (MAY_HAVE_DEBUG_INSNS)
10145 	{
10146 	  cselib_preserve_only_values ();
10147 	  cselib_reset_table (cselib_get_next_uid ());
10148 	  cselib_record_sets_hook = NULL;
10149 	}
10150     }
10151 
10152   hard_frame_pointer_adjustment = -1;
10153   VTI (ENTRY_BLOCK_PTR_FOR_FN (cfun))->flooded = true;
10154   cfa_base_rtx = NULL_RTX;
10155   return true;
10156 }
10157 
10158 /* This is *not* reset after each function.  It gives each
10159    NOTE_INSN_DELETED_DEBUG_LABEL in the entire compilation
10160    a unique label number.  */
10161 
10162 static int debug_label_num = 1;
10163 
10164 /* Get rid of all debug insns from the insn stream.  */
10165 
10166 static void
delete_debug_insns(void)10167 delete_debug_insns (void)
10168 {
10169   basic_block bb;
10170   rtx insn, next;
10171 
10172   if (!MAY_HAVE_DEBUG_INSNS)
10173     return;
10174 
10175   FOR_EACH_BB_FN (bb, cfun)
10176     {
10177       FOR_BB_INSNS_SAFE (bb, insn, next)
10178 	if (DEBUG_INSN_P (insn))
10179 	  {
10180 	    tree decl = INSN_VAR_LOCATION_DECL (insn);
10181 	    if (TREE_CODE (decl) == LABEL_DECL
10182 		&& DECL_NAME (decl)
10183 		&& !DECL_RTL_SET_P (decl))
10184 	      {
10185 		PUT_CODE (insn, NOTE);
10186 		NOTE_KIND (insn) = NOTE_INSN_DELETED_DEBUG_LABEL;
10187 		NOTE_DELETED_LABEL_NAME (insn)
10188 		  = IDENTIFIER_POINTER (DECL_NAME (decl));
10189 		SET_DECL_RTL (decl, insn);
10190 		CODE_LABEL_NUMBER (insn) = debug_label_num++;
10191 	      }
10192 	    else
10193 	      delete_insn (insn);
10194 	  }
10195     }
10196 }
10197 
10198 /* Run a fast, BB-local only version of var tracking, to take care of
10199    information that we don't do global analysis on, such that not all
10200    information is lost.  If SKIPPED holds, we're skipping the global
10201    pass entirely, so we should try to use information it would have
10202    handled as well..  */
10203 
10204 static void
vt_debug_insns_local(bool skipped ATTRIBUTE_UNUSED)10205 vt_debug_insns_local (bool skipped ATTRIBUTE_UNUSED)
10206 {
10207   /* ??? Just skip it all for now.  */
10208   delete_debug_insns ();
10209 }
10210 
10211 /* Free the data structures needed for variable tracking.  */
10212 
10213 static void
vt_finalize(void)10214 vt_finalize (void)
10215 {
10216   basic_block bb;
10217 
10218   FOR_EACH_BB_FN (bb, cfun)
10219     {
10220       VTI (bb)->mos.release ();
10221     }
10222 
10223   FOR_ALL_BB_FN (bb, cfun)
10224     {
10225       dataflow_set_destroy (&VTI (bb)->in);
10226       dataflow_set_destroy (&VTI (bb)->out);
10227       if (VTI (bb)->permp)
10228 	{
10229 	  dataflow_set_destroy (VTI (bb)->permp);
10230 	  XDELETE (VTI (bb)->permp);
10231 	}
10232     }
10233   free_aux_for_blocks ();
10234   empty_shared_hash->htab.dispose ();
10235   changed_variables.dispose ();
10236   free_alloc_pool (attrs_pool);
10237   free_alloc_pool (var_pool);
10238   free_alloc_pool (loc_chain_pool);
10239   free_alloc_pool (shared_hash_pool);
10240 
10241   if (MAY_HAVE_DEBUG_INSNS)
10242     {
10243       if (global_get_addr_cache)
10244 	pointer_map_destroy (global_get_addr_cache);
10245       global_get_addr_cache = NULL;
10246       if (loc_exp_dep_pool)
10247 	free_alloc_pool (loc_exp_dep_pool);
10248       loc_exp_dep_pool = NULL;
10249       free_alloc_pool (valvar_pool);
10250       preserved_values.release ();
10251       cselib_finish ();
10252       BITMAP_FREE (scratch_regs);
10253       scratch_regs = NULL;
10254     }
10255 
10256 #ifdef HAVE_window_save
10257   vec_free (windowed_parm_regs);
10258 #endif
10259 
10260   if (vui_vec)
10261     XDELETEVEC (vui_vec);
10262   vui_vec = NULL;
10263   vui_allocated = 0;
10264 }
10265 
10266 /* The entry point to variable tracking pass.  */
10267 
10268 static inline unsigned int
variable_tracking_main_1(void)10269 variable_tracking_main_1 (void)
10270 {
10271   bool success;
10272 
10273   if (flag_var_tracking_assignments < 0)
10274     {
10275       delete_debug_insns ();
10276       return 0;
10277     }
10278 
10279   if (n_basic_blocks_for_fn (cfun) > 500 &&
10280       n_edges_for_fn (cfun) / n_basic_blocks_for_fn (cfun) >= 20)
10281     {
10282       vt_debug_insns_local (true);
10283       return 0;
10284     }
10285 
10286   mark_dfs_back_edges ();
10287   if (!vt_initialize ())
10288     {
10289       vt_finalize ();
10290       vt_debug_insns_local (true);
10291       return 0;
10292     }
10293 
10294   success = vt_find_locations ();
10295 
10296   if (!success && flag_var_tracking_assignments > 0)
10297     {
10298       vt_finalize ();
10299 
10300       delete_debug_insns ();
10301 
10302       /* This is later restored by our caller.  */
10303       flag_var_tracking_assignments = 0;
10304 
10305       success = vt_initialize ();
10306       gcc_assert (success);
10307 
10308       success = vt_find_locations ();
10309     }
10310 
10311   if (!success)
10312     {
10313       vt_finalize ();
10314       vt_debug_insns_local (false);
10315       return 0;
10316     }
10317 
10318   if (dump_file && (dump_flags & TDF_DETAILS))
10319     {
10320       dump_dataflow_sets ();
10321       dump_reg_info (dump_file);
10322       dump_flow_info (dump_file, dump_flags);
10323     }
10324 
10325   timevar_push (TV_VAR_TRACKING_EMIT);
10326   vt_emit_notes ();
10327   timevar_pop (TV_VAR_TRACKING_EMIT);
10328 
10329   vt_finalize ();
10330   vt_debug_insns_local (false);
10331   return 0;
10332 }
10333 
10334 unsigned int
variable_tracking_main(void)10335 variable_tracking_main (void)
10336 {
10337   unsigned int ret;
10338   int save = flag_var_tracking_assignments;
10339 
10340   ret = variable_tracking_main_1 ();
10341 
10342   flag_var_tracking_assignments = save;
10343 
10344   return ret;
10345 }
10346 
10347 static bool
gate_handle_var_tracking(void)10348 gate_handle_var_tracking (void)
10349 {
10350   return (flag_var_tracking && !targetm.delay_vartrack);
10351 }
10352 
10353 
10354 
10355 namespace {
10356 
10357 const pass_data pass_data_variable_tracking =
10358 {
10359   RTL_PASS, /* type */
10360   "vartrack", /* name */
10361   OPTGROUP_NONE, /* optinfo_flags */
10362   true, /* has_gate */
10363   true, /* has_execute */
10364   TV_VAR_TRACKING, /* tv_id */
10365   0, /* properties_required */
10366   0, /* properties_provided */
10367   0, /* properties_destroyed */
10368   0, /* todo_flags_start */
10369   ( TODO_verify_rtl_sharing | TODO_verify_flow ), /* todo_flags_finish */
10370 };
10371 
10372 class pass_variable_tracking : public rtl_opt_pass
10373 {
10374 public:
pass_variable_tracking(gcc::context * ctxt)10375   pass_variable_tracking (gcc::context *ctxt)
10376     : rtl_opt_pass (pass_data_variable_tracking, ctxt)
10377   {}
10378 
10379   /* opt_pass methods: */
gate()10380   bool gate () { return gate_handle_var_tracking (); }
execute()10381   unsigned int execute () { return variable_tracking_main (); }
10382 
10383 }; // class pass_variable_tracking
10384 
10385 } // anon namespace
10386 
10387 rtl_opt_pass *
make_pass_variable_tracking(gcc::context * ctxt)10388 make_pass_variable_tracking (gcc::context *ctxt)
10389 {
10390   return new pass_variable_tracking (ctxt);
10391 }
10392