1 /* Variable tracking routines for the GNU compiler.
2    Copyright (C) 2002-2019 Free Software Foundation, Inc.
3 
4    This file is part of GCC.
5 
6    GCC is free software; you can redistribute it and/or modify it
7    under the terms of the GNU General Public License as published by
8    the Free Software Foundation; either version 3, or (at your option)
9    any later version.
10 
11    GCC is distributed in the hope that it will be useful, but WITHOUT
12    ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
13    or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public
14    License for more details.
15 
16    You should have received a copy of the GNU General Public License
17    along with GCC; see the file COPYING3.  If not see
18    <http://www.gnu.org/licenses/>.  */
19 
20 /* This file contains the variable tracking pass.  It computes where
21    variables are located (which registers or where in memory) at each position
22    in instruction stream and emits notes describing the locations.
23    Debug information (DWARF2 location lists) is finally generated from
24    these notes.
25    With this debug information, it is possible to show variables
26    even when debugging optimized code.
27 
28    How does the variable tracking pass work?
29 
30    First, it scans RTL code for uses, stores and clobbers (register/memory
31    references in instructions), for call insns and for stack adjustments
32    separately for each basic block and saves them to an array of micro
33    operations.
34    The micro operations of one instruction are ordered so that
35    pre-modifying stack adjustment < use < use with no var < call insn <
36      < clobber < set < post-modifying stack adjustment
37 
38    Then, a forward dataflow analysis is performed to find out how locations
39    of variables change through code and to propagate the variable locations
40    along control flow graph.
41    The IN set for basic block BB is computed as a union of OUT sets of BB's
42    predecessors, the OUT set for BB is copied from the IN set for BB and
43    is changed according to micro operations in BB.
44 
45    The IN and OUT sets for basic blocks consist of a current stack adjustment
46    (used for adjusting offset of variables addressed using stack pointer),
47    the table of structures describing the locations of parts of a variable
48    and for each physical register a linked list for each physical register.
49    The linked list is a list of variable parts stored in the register,
50    i.e. it is a list of triplets (reg, decl, offset) where decl is
51    REG_EXPR (reg) and offset is REG_OFFSET (reg).  The linked list is used for
52    effective deleting appropriate variable parts when we set or clobber the
53    register.
54 
55    There may be more than one variable part in a register.  The linked lists
56    should be pretty short so it is a good data structure here.
57    For example in the following code, register allocator may assign same
58    register to variables A and B, and both of them are stored in the same
59    register in CODE:
60 
61      if (cond)
62        set A;
63      else
64        set B;
65      CODE;
66      if (cond)
67        use A;
68      else
69        use B;
70 
71    Finally, the NOTE_INSN_VAR_LOCATION notes describing the variable locations
72    are emitted to appropriate positions in RTL code.  Each such a note describes
73    the location of one variable at the point in instruction stream where the
74    note is.  There is no need to emit a note for each variable before each
75    instruction, we only emit these notes where the location of variable changes
76    (this means that we also emit notes for changes between the OUT set of the
77    previous block and the IN set of the current block).
78 
79    The notes consist of two parts:
80    1. the declaration (from REG_EXPR or MEM_EXPR)
81    2. the location of a variable - it is either a simple register/memory
82       reference (for simple variables, for example int),
83       or a parallel of register/memory references (for a large variables
84       which consist of several parts, for example long long).
85 
86 */
87 
88 #include "config.h"
89 #include "system.h"
90 #include "coretypes.h"
91 #include "backend.h"
92 #include "target.h"
93 #include "rtl.h"
94 #include "tree.h"
95 #include "cfghooks.h"
96 #include "alloc-pool.h"
97 #include "tree-pass.h"
98 #include "memmodel.h"
99 #include "tm_p.h"
100 #include "insn-config.h"
101 #include "regs.h"
102 #include "emit-rtl.h"
103 #include "recog.h"
104 #include "diagnostic.h"
105 #include "varasm.h"
106 #include "stor-layout.h"
107 #include "cfgrtl.h"
108 #include "cfganal.h"
109 #include "reload.h"
110 #include "calls.h"
111 #include "tree-dfa.h"
112 #include "tree-ssa.h"
113 #include "cselib.h"
114 #include "params.h"
115 #include "tree-pretty-print.h"
116 #include "rtl-iter.h"
117 #include "fibonacci_heap.h"
118 #include "print-rtl.h"
119 
120 typedef fibonacci_heap <long, basic_block_def> bb_heap_t;
121 typedef fibonacci_node <long, basic_block_def> bb_heap_node_t;
122 
123 /* var-tracking.c assumes that tree code with the same value as VALUE rtx code
124    has no chance to appear in REG_EXPR/MEM_EXPRs and isn't a decl.
125    Currently the value is the same as IDENTIFIER_NODE, which has such
126    a property.  If this compile time assertion ever fails, make sure that
127    the new tree code that equals (int) VALUE has the same property.  */
128 extern char check_value_val[(int) VALUE == (int) IDENTIFIER_NODE ? 1 : -1];
129 
130 /* Type of micro operation.  */
131 enum micro_operation_type
132 {
133   MO_USE,	/* Use location (REG or MEM).  */
134   MO_USE_NO_VAR,/* Use location which is not associated with a variable
135 		   or the variable is not trackable.  */
136   MO_VAL_USE,	/* Use location which is associated with a value.  */
137   MO_VAL_LOC,   /* Use location which appears in a debug insn.  */
138   MO_VAL_SET,	/* Set location associated with a value.  */
139   MO_SET,	/* Set location.  */
140   MO_COPY,	/* Copy the same portion of a variable from one
141 		   location to another.  */
142   MO_CLOBBER,	/* Clobber location.  */
143   MO_CALL,	/* Call insn.  */
144   MO_ADJUST	/* Adjust stack pointer.  */
145 
146 };
147 
148 static const char * const ATTRIBUTE_UNUSED
149 micro_operation_type_name[] = {
150   "MO_USE",
151   "MO_USE_NO_VAR",
152   "MO_VAL_USE",
153   "MO_VAL_LOC",
154   "MO_VAL_SET",
155   "MO_SET",
156   "MO_COPY",
157   "MO_CLOBBER",
158   "MO_CALL",
159   "MO_ADJUST"
160 };
161 
162 /* Where shall the note be emitted?  BEFORE or AFTER the instruction.
163    Notes emitted as AFTER_CALL are to take effect during the call,
164    rather than after the call.  */
165 enum emit_note_where
166 {
167   EMIT_NOTE_BEFORE_INSN,
168   EMIT_NOTE_AFTER_INSN,
169   EMIT_NOTE_AFTER_CALL_INSN
170 };
171 
172 /* Structure holding information about micro operation.  */
173 struct micro_operation
174 {
175   /* Type of micro operation.  */
176   enum micro_operation_type type;
177 
178   /* The instruction which the micro operation is in, for MO_USE,
179      MO_USE_NO_VAR, MO_CALL and MO_ADJUST, or the subsequent
180      instruction or note in the original flow (before any var-tracking
181      notes are inserted, to simplify emission of notes), for MO_SET
182      and MO_CLOBBER.  */
183   rtx_insn *insn;
184 
185   union {
186     /* Location.  For MO_SET and MO_COPY, this is the SET that
187        performs the assignment, if known, otherwise it is the target
188        of the assignment.  For MO_VAL_USE and MO_VAL_SET, it is a
189        CONCAT of the VALUE and the LOC associated with it.  For
190        MO_VAL_LOC, it is a CONCAT of the VALUE and the VAR_LOCATION
191        associated with it.  */
192     rtx loc;
193 
194     /* Stack adjustment.  */
195     HOST_WIDE_INT adjust;
196   } u;
197 };
198 
199 
200 /* A declaration of a variable, or an RTL value being handled like a
201    declaration.  */
202 typedef void *decl_or_value;
203 
204 /* Return true if a decl_or_value DV is a DECL or NULL.  */
205 static inline bool
dv_is_decl_p(decl_or_value dv)206 dv_is_decl_p (decl_or_value dv)
207 {
208   return !dv || (int) TREE_CODE ((tree) dv) != (int) VALUE;
209 }
210 
211 /* Return true if a decl_or_value is a VALUE rtl.  */
212 static inline bool
dv_is_value_p(decl_or_value dv)213 dv_is_value_p (decl_or_value dv)
214 {
215   return dv && !dv_is_decl_p (dv);
216 }
217 
218 /* Return the decl in the decl_or_value.  */
219 static inline tree
dv_as_decl(decl_or_value dv)220 dv_as_decl (decl_or_value dv)
221 {
222   gcc_checking_assert (dv_is_decl_p (dv));
223   return (tree) dv;
224 }
225 
226 /* Return the value in the decl_or_value.  */
227 static inline rtx
dv_as_value(decl_or_value dv)228 dv_as_value (decl_or_value dv)
229 {
230   gcc_checking_assert (dv_is_value_p (dv));
231   return (rtx)dv;
232 }
233 
234 /* Return the opaque pointer in the decl_or_value.  */
235 static inline void *
dv_as_opaque(decl_or_value dv)236 dv_as_opaque (decl_or_value dv)
237 {
238   return dv;
239 }
240 
241 
242 /* Description of location of a part of a variable.  The content of a physical
243    register is described by a chain of these structures.
244    The chains are pretty short (usually 1 or 2 elements) and thus
245    chain is the best data structure.  */
246 struct attrs
247 {
248   /* Pointer to next member of the list.  */
249   attrs *next;
250 
251   /* The rtx of register.  */
252   rtx loc;
253 
254   /* The declaration corresponding to LOC.  */
255   decl_or_value dv;
256 
257   /* Offset from start of DECL.  */
258   HOST_WIDE_INT offset;
259 };
260 
261 /* Structure for chaining the locations.  */
262 struct location_chain
263 {
264   /* Next element in the chain.  */
265   location_chain *next;
266 
267   /* The location (REG, MEM or VALUE).  */
268   rtx loc;
269 
270   /* The "value" stored in this location.  */
271   rtx set_src;
272 
273   /* Initialized? */
274   enum var_init_status init;
275 };
276 
277 /* A vector of loc_exp_dep holds the active dependencies of a one-part
278    DV on VALUEs, i.e., the VALUEs expanded so as to form the current
279    location of DV.  Each entry is also part of VALUE' s linked-list of
280    backlinks back to DV.  */
281 struct loc_exp_dep
282 {
283   /* The dependent DV.  */
284   decl_or_value dv;
285   /* The dependency VALUE or DECL_DEBUG.  */
286   rtx value;
287   /* The next entry in VALUE's backlinks list.  */
288   struct loc_exp_dep *next;
289   /* A pointer to the pointer to this entry (head or prev's next) in
290      the doubly-linked list.  */
291   struct loc_exp_dep **pprev;
292 };
293 
294 
295 /* This data structure holds information about the depth of a variable
296    expansion.  */
297 struct expand_depth
298 {
299   /* This measures the complexity of the expanded expression.  It
300      grows by one for each level of expansion that adds more than one
301      operand.  */
302   int complexity;
303   /* This counts the number of ENTRY_VALUE expressions in an
304      expansion.  We want to minimize their use.  */
305   int entryvals;
306 };
307 
308 /* This data structure is allocated for one-part variables at the time
309    of emitting notes.  */
310 struct onepart_aux
311 {
312   /* Doubly-linked list of dependent DVs.  These are DVs whose cur_loc
313      computation used the expansion of this variable, and that ought
314      to be notified should this variable change.  If the DV's cur_loc
315      expanded to NULL, all components of the loc list are regarded as
316      active, so that any changes in them give us a chance to get a
317      location.  Otherwise, only components of the loc that expanded to
318      non-NULL are regarded as active dependencies.  */
319   loc_exp_dep *backlinks;
320   /* This holds the LOC that was expanded into cur_loc.  We need only
321      mark a one-part variable as changed if the FROM loc is removed,
322      or if it has no known location and a loc is added, or if it gets
323      a change notification from any of its active dependencies.  */
324   rtx from;
325   /* The depth of the cur_loc expression.  */
326   expand_depth depth;
327   /* Dependencies actively used when expand FROM into cur_loc.  */
328   vec<loc_exp_dep, va_heap, vl_embed> deps;
329 };
330 
331 /* Structure describing one part of variable.  */
332 struct variable_part
333 {
334   /* Chain of locations of the part.  */
335   location_chain *loc_chain;
336 
337   /* Location which was last emitted to location list.  */
338   rtx cur_loc;
339 
340   union variable_aux
341   {
342     /* The offset in the variable, if !var->onepart.  */
343     HOST_WIDE_INT offset;
344 
345     /* Pointer to auxiliary data, if var->onepart and emit_notes.  */
346     struct onepart_aux *onepaux;
347   } aux;
348 };
349 
350 /* Maximum number of location parts.  */
351 #define MAX_VAR_PARTS 16
352 
353 /* Enumeration type used to discriminate various types of one-part
354    variables.  */
355 enum onepart_enum
356 {
357   /* Not a one-part variable.  */
358   NOT_ONEPART = 0,
359   /* A one-part DECL that is not a DEBUG_EXPR_DECL.  */
360   ONEPART_VDECL = 1,
361   /* A DEBUG_EXPR_DECL.  */
362   ONEPART_DEXPR = 2,
363   /* A VALUE.  */
364   ONEPART_VALUE = 3
365 };
366 
367 /* Structure describing where the variable is located.  */
368 struct variable
369 {
370   /* The declaration of the variable, or an RTL value being handled
371      like a declaration.  */
372   decl_or_value dv;
373 
374   /* Reference count.  */
375   int refcount;
376 
377   /* Number of variable parts.  */
378   char n_var_parts;
379 
380   /* What type of DV this is, according to enum onepart_enum.  */
381   ENUM_BITFIELD (onepart_enum) onepart : CHAR_BIT;
382 
383   /* True if this variable_def struct is currently in the
384      changed_variables hash table.  */
385   bool in_changed_variables;
386 
387   /* The variable parts.  */
388   variable_part var_part[1];
389 };
390 
391 /* Pointer to the BB's information specific to variable tracking pass.  */
392 #define VTI(BB) ((variable_tracking_info *) (BB)->aux)
393 
394 /* Return MEM_OFFSET (MEM) as a HOST_WIDE_INT, or 0 if we can't.  */
395 
396 static inline HOST_WIDE_INT
int_mem_offset(const_rtx mem)397 int_mem_offset (const_rtx mem)
398 {
399   HOST_WIDE_INT offset;
400   if (MEM_OFFSET_KNOWN_P (mem) && MEM_OFFSET (mem).is_constant (&offset))
401     return offset;
402   return 0;
403 }
404 
405 #if CHECKING_P && (GCC_VERSION >= 2007)
406 
407 /* Access VAR's Ith part's offset, checking that it's not a one-part
408    variable.  */
409 #define VAR_PART_OFFSET(var, i) __extension__			\
410 (*({  variable *const __v = (var);				\
411       gcc_checking_assert (!__v->onepart);			\
412       &__v->var_part[(i)].aux.offset; }))
413 
414 /* Access VAR's one-part auxiliary data, checking that it is a
415    one-part variable.  */
416 #define VAR_LOC_1PAUX(var) __extension__			\
417 (*({  variable *const __v = (var);				\
418       gcc_checking_assert (__v->onepart);			\
419       &__v->var_part[0].aux.onepaux; }))
420 
421 #else
422 #define VAR_PART_OFFSET(var, i) ((var)->var_part[(i)].aux.offset)
423 #define VAR_LOC_1PAUX(var) ((var)->var_part[0].aux.onepaux)
424 #endif
425 
426 /* These are accessor macros for the one-part auxiliary data.  When
427    convenient for users, they're guarded by tests that the data was
428    allocated.  */
429 #define VAR_LOC_DEP_LST(var) (VAR_LOC_1PAUX (var)		  \
430 			      ? VAR_LOC_1PAUX (var)->backlinks	  \
431 			      : NULL)
432 #define VAR_LOC_DEP_LSTP(var) (VAR_LOC_1PAUX (var)		  \
433 			       ? &VAR_LOC_1PAUX (var)->backlinks  \
434 			       : NULL)
435 #define VAR_LOC_FROM(var) (VAR_LOC_1PAUX (var)->from)
436 #define VAR_LOC_DEPTH(var) (VAR_LOC_1PAUX (var)->depth)
437 #define VAR_LOC_DEP_VEC(var) (VAR_LOC_1PAUX (var)		  \
438 			      ? &VAR_LOC_1PAUX (var)->deps	  \
439 			      : NULL)
440 
441 
442 
443 typedef unsigned int dvuid;
444 
445 /* Return the uid of DV.  */
446 
447 static inline dvuid
dv_uid(decl_or_value dv)448 dv_uid (decl_or_value dv)
449 {
450   if (dv_is_value_p (dv))
451     return CSELIB_VAL_PTR (dv_as_value (dv))->uid;
452   else
453     return DECL_UID (dv_as_decl (dv));
454 }
455 
456 /* Compute the hash from the uid.  */
457 
458 static inline hashval_t
dv_uid2hash(dvuid uid)459 dv_uid2hash (dvuid uid)
460 {
461   return uid;
462 }
463 
464 /* The hash function for a mask table in a shared_htab chain.  */
465 
466 static inline hashval_t
dv_htab_hash(decl_or_value dv)467 dv_htab_hash (decl_or_value dv)
468 {
469   return dv_uid2hash (dv_uid (dv));
470 }
471 
472 static void variable_htab_free (void *);
473 
474 /* Variable hashtable helpers.  */
475 
476 struct variable_hasher : pointer_hash <variable>
477 {
478   typedef void *compare_type;
479   static inline hashval_t hash (const variable *);
480   static inline bool equal (const variable *, const void *);
481   static inline void remove (variable *);
482 };
483 
484 /* The hash function for variable_htab, computes the hash value
485    from the declaration of variable X.  */
486 
487 inline hashval_t
hash(const variable * v)488 variable_hasher::hash (const variable *v)
489 {
490   return dv_htab_hash (v->dv);
491 }
492 
493 /* Compare the declaration of variable X with declaration Y.  */
494 
495 inline bool
equal(const variable * v,const void * y)496 variable_hasher::equal (const variable *v, const void *y)
497 {
498   decl_or_value dv = CONST_CAST2 (decl_or_value, const void *, y);
499 
500   return (dv_as_opaque (v->dv) == dv_as_opaque (dv));
501 }
502 
503 /* Free the element of VARIABLE_HTAB (its type is struct variable_def).  */
504 
505 inline void
remove(variable * var)506 variable_hasher::remove (variable *var)
507 {
508   variable_htab_free (var);
509 }
510 
511 typedef hash_table<variable_hasher> variable_table_type;
512 typedef variable_table_type::iterator variable_iterator_type;
513 
514 /* Structure for passing some other parameters to function
515    emit_note_insn_var_location.  */
516 struct emit_note_data
517 {
518   /* The instruction which the note will be emitted before/after.  */
519   rtx_insn *insn;
520 
521   /* Where the note will be emitted (before/after insn)?  */
522   enum emit_note_where where;
523 
524   /* The variables and values active at this point.  */
525   variable_table_type *vars;
526 };
527 
528 /* Structure holding a refcounted hash table.  If refcount > 1,
529    it must be first unshared before modified.  */
530 struct shared_hash
531 {
532   /* Reference count.  */
533   int refcount;
534 
535   /* Actual hash table.  */
536   variable_table_type *htab;
537 };
538 
539 /* Structure holding the IN or OUT set for a basic block.  */
540 struct dataflow_set
541 {
542   /* Adjustment of stack offset.  */
543   HOST_WIDE_INT stack_adjust;
544 
545   /* Attributes for registers (lists of attrs).  */
546   attrs *regs[FIRST_PSEUDO_REGISTER];
547 
548   /* Variable locations.  */
549   shared_hash *vars;
550 
551   /* Vars that is being traversed.  */
552   shared_hash *traversed_vars;
553 };
554 
555 /* The structure (one for each basic block) containing the information
556    needed for variable tracking.  */
557 struct variable_tracking_info
558 {
559   /* The vector of micro operations.  */
560   vec<micro_operation> mos;
561 
562   /* The IN and OUT set for dataflow analysis.  */
563   dataflow_set in;
564   dataflow_set out;
565 
566   /* The permanent-in dataflow set for this block.  This is used to
567      hold values for which we had to compute entry values.  ??? This
568      should probably be dynamically allocated, to avoid using more
569      memory in non-debug builds.  */
570   dataflow_set *permp;
571 
572   /* Has the block been visited in DFS?  */
573   bool visited;
574 
575   /* Has the block been flooded in VTA?  */
576   bool flooded;
577 
578 };
579 
580 /* Alloc pool for struct attrs_def.  */
581 object_allocator<attrs> attrs_pool ("attrs pool");
582 
583 /* Alloc pool for struct variable_def with MAX_VAR_PARTS entries.  */
584 
585 static pool_allocator var_pool
586   ("variable_def pool", sizeof (variable) +
587    (MAX_VAR_PARTS - 1) * sizeof (((variable *)NULL)->var_part[0]));
588 
589 /* Alloc pool for struct variable_def with a single var_part entry.  */
590 static pool_allocator valvar_pool
591   ("small variable_def pool", sizeof (variable));
592 
593 /* Alloc pool for struct location_chain.  */
594 static object_allocator<location_chain> location_chain_pool
595   ("location_chain pool");
596 
597 /* Alloc pool for struct shared_hash.  */
598 static object_allocator<shared_hash> shared_hash_pool ("shared_hash pool");
599 
600 /* Alloc pool for struct loc_exp_dep_s for NOT_ONEPART variables.  */
601 object_allocator<loc_exp_dep> loc_exp_dep_pool ("loc_exp_dep pool");
602 
603 /* Changed variables, notes will be emitted for them.  */
604 static variable_table_type *changed_variables;
605 
606 /* Shall notes be emitted?  */
607 static bool emit_notes;
608 
609 /* Values whose dynamic location lists have gone empty, but whose
610    cselib location lists are still usable.  Use this to hold the
611    current location, the backlinks, etc, during emit_notes.  */
612 static variable_table_type *dropped_values;
613 
614 /* Empty shared hashtable.  */
615 static shared_hash *empty_shared_hash;
616 
617 /* Scratch register bitmap used by cselib_expand_value_rtx.  */
618 static bitmap scratch_regs = NULL;
619 
620 #ifdef HAVE_window_save
621 struct GTY(()) parm_reg {
622   rtx outgoing;
623   rtx incoming;
624 };
625 
626 
627 /* Vector of windowed parameter registers, if any.  */
628 static vec<parm_reg, va_gc> *windowed_parm_regs = NULL;
629 #endif
630 
631 /* Variable used to tell whether cselib_process_insn called our hook.  */
632 static bool cselib_hook_called;
633 
634 /* Local function prototypes.  */
635 static void stack_adjust_offset_pre_post (rtx, HOST_WIDE_INT *,
636 					  HOST_WIDE_INT *);
637 static void insn_stack_adjust_offset_pre_post (rtx_insn *, HOST_WIDE_INT *,
638 					       HOST_WIDE_INT *);
639 static bool vt_stack_adjustments (void);
640 
641 static void init_attrs_list_set (attrs **);
642 static void attrs_list_clear (attrs **);
643 static attrs *attrs_list_member (attrs *, decl_or_value, HOST_WIDE_INT);
644 static void attrs_list_insert (attrs **, decl_or_value, HOST_WIDE_INT, rtx);
645 static void attrs_list_copy (attrs **, attrs *);
646 static void attrs_list_union (attrs **, attrs *);
647 
648 static variable **unshare_variable (dataflow_set *set, variable **slot,
649 					variable *var, enum var_init_status);
650 static void vars_copy (variable_table_type *, variable_table_type *);
651 static tree var_debug_decl (tree);
652 static void var_reg_set (dataflow_set *, rtx, enum var_init_status, rtx);
653 static void var_reg_delete_and_set (dataflow_set *, rtx, bool,
654 				    enum var_init_status, rtx);
655 static void var_reg_delete (dataflow_set *, rtx, bool);
656 static void var_regno_delete (dataflow_set *, int);
657 static void var_mem_set (dataflow_set *, rtx, enum var_init_status, rtx);
658 static void var_mem_delete_and_set (dataflow_set *, rtx, bool,
659 				    enum var_init_status, rtx);
660 static void var_mem_delete (dataflow_set *, rtx, bool);
661 
662 static void dataflow_set_init (dataflow_set *);
663 static void dataflow_set_clear (dataflow_set *);
664 static void dataflow_set_copy (dataflow_set *, dataflow_set *);
665 static int variable_union_info_cmp_pos (const void *, const void *);
666 static void dataflow_set_union (dataflow_set *, dataflow_set *);
667 static location_chain *find_loc_in_1pdv (rtx, variable *,
668 					 variable_table_type *);
669 static bool canon_value_cmp (rtx, rtx);
670 static int loc_cmp (rtx, rtx);
671 static bool variable_part_different_p (variable_part *, variable_part *);
672 static bool onepart_variable_different_p (variable *, variable *);
673 static bool variable_different_p (variable *, variable *);
674 static bool dataflow_set_different (dataflow_set *, dataflow_set *);
675 static void dataflow_set_destroy (dataflow_set *);
676 
677 static bool track_expr_p (tree, bool);
678 static void add_uses_1 (rtx *, void *);
679 static void add_stores (rtx, const_rtx, void *);
680 static bool compute_bb_dataflow (basic_block);
681 static bool vt_find_locations (void);
682 
683 static void dump_attrs_list (attrs *);
684 static void dump_var (variable *);
685 static void dump_vars (variable_table_type *);
686 static void dump_dataflow_set (dataflow_set *);
687 static void dump_dataflow_sets (void);
688 
689 static void set_dv_changed (decl_or_value, bool);
690 static void variable_was_changed (variable *, dataflow_set *);
691 static variable **set_slot_part (dataflow_set *, rtx, variable **,
692 				 decl_or_value, HOST_WIDE_INT,
693 				 enum var_init_status, rtx);
694 static void set_variable_part (dataflow_set *, rtx,
695 			       decl_or_value, HOST_WIDE_INT,
696 			       enum var_init_status, rtx, enum insert_option);
697 static variable **clobber_slot_part (dataflow_set *, rtx,
698 				     variable **, HOST_WIDE_INT, rtx);
699 static void clobber_variable_part (dataflow_set *, rtx,
700 				   decl_or_value, HOST_WIDE_INT, rtx);
701 static variable **delete_slot_part (dataflow_set *, rtx, variable **,
702 				    HOST_WIDE_INT);
703 static void delete_variable_part (dataflow_set *, rtx,
704 				  decl_or_value, HOST_WIDE_INT);
705 static void emit_notes_in_bb (basic_block, dataflow_set *);
706 static void vt_emit_notes (void);
707 
708 static void vt_add_function_parameters (void);
709 static bool vt_initialize (void);
710 static void vt_finalize (void);
711 
712 /* Callback for stack_adjust_offset_pre_post, called via for_each_inc_dec.  */
713 
714 static int
stack_adjust_offset_pre_post_cb(rtx,rtx op,rtx dest,rtx src,rtx srcoff,void * arg)715 stack_adjust_offset_pre_post_cb (rtx, rtx op, rtx dest, rtx src, rtx srcoff,
716 				 void *arg)
717 {
718   if (dest != stack_pointer_rtx)
719     return 0;
720 
721   switch (GET_CODE (op))
722     {
723     case PRE_INC:
724     case PRE_DEC:
725       ((HOST_WIDE_INT *)arg)[0] -= INTVAL (srcoff);
726       return 0;
727     case POST_INC:
728     case POST_DEC:
729       ((HOST_WIDE_INT *)arg)[1] -= INTVAL (srcoff);
730       return 0;
731     case PRE_MODIFY:
732     case POST_MODIFY:
733       /* We handle only adjustments by constant amount.  */
734       gcc_assert (GET_CODE (src) == PLUS
735 		  && CONST_INT_P (XEXP (src, 1))
736 		  && XEXP (src, 0) == stack_pointer_rtx);
737       ((HOST_WIDE_INT *)arg)[GET_CODE (op) == POST_MODIFY]
738 	-= INTVAL (XEXP (src, 1));
739       return 0;
740     default:
741       gcc_unreachable ();
742     }
743 }
744 
745 /* Given a SET, calculate the amount of stack adjustment it contains
746    PRE- and POST-modifying stack pointer.
747    This function is similar to stack_adjust_offset.  */
748 
749 static void
stack_adjust_offset_pre_post(rtx pattern,HOST_WIDE_INT * pre,HOST_WIDE_INT * post)750 stack_adjust_offset_pre_post (rtx pattern, HOST_WIDE_INT *pre,
751 			      HOST_WIDE_INT *post)
752 {
753   rtx src = SET_SRC (pattern);
754   rtx dest = SET_DEST (pattern);
755   enum rtx_code code;
756 
757   if (dest == stack_pointer_rtx)
758     {
759       /* (set (reg sp) (plus (reg sp) (const_int))) */
760       code = GET_CODE (src);
761       if (! (code == PLUS || code == MINUS)
762 	  || XEXP (src, 0) != stack_pointer_rtx
763 	  || !CONST_INT_P (XEXP (src, 1)))
764 	return;
765 
766       if (code == MINUS)
767 	*post += INTVAL (XEXP (src, 1));
768       else
769 	*post -= INTVAL (XEXP (src, 1));
770       return;
771     }
772   HOST_WIDE_INT res[2] = { 0, 0 };
773   for_each_inc_dec (pattern, stack_adjust_offset_pre_post_cb, res);
774   *pre += res[0];
775   *post += res[1];
776 }
777 
778 /* Given an INSN, calculate the amount of stack adjustment it contains
779    PRE- and POST-modifying stack pointer.  */
780 
781 static void
insn_stack_adjust_offset_pre_post(rtx_insn * insn,HOST_WIDE_INT * pre,HOST_WIDE_INT * post)782 insn_stack_adjust_offset_pre_post (rtx_insn *insn, HOST_WIDE_INT *pre,
783 				   HOST_WIDE_INT *post)
784 {
785   rtx pattern;
786 
787   *pre = 0;
788   *post = 0;
789 
790   pattern = PATTERN (insn);
791   if (RTX_FRAME_RELATED_P (insn))
792     {
793       rtx expr = find_reg_note (insn, REG_FRAME_RELATED_EXPR, NULL_RTX);
794       if (expr)
795 	pattern = XEXP (expr, 0);
796     }
797 
798   if (GET_CODE (pattern) == SET)
799     stack_adjust_offset_pre_post (pattern, pre, post);
800   else if (GET_CODE (pattern) == PARALLEL
801 	   || GET_CODE (pattern) == SEQUENCE)
802     {
803       int i;
804 
805       /* There may be stack adjustments inside compound insns.  Search
806 	 for them.  */
807       for ( i = XVECLEN (pattern, 0) - 1; i >= 0; i--)
808 	if (GET_CODE (XVECEXP (pattern, 0, i)) == SET)
809 	  stack_adjust_offset_pre_post (XVECEXP (pattern, 0, i), pre, post);
810     }
811 }
812 
813 /* Compute stack adjustments for all blocks by traversing DFS tree.
814    Return true when the adjustments on all incoming edges are consistent.
815    Heavily borrowed from pre_and_rev_post_order_compute.  */
816 
817 static bool
vt_stack_adjustments(void)818 vt_stack_adjustments (void)
819 {
820   edge_iterator *stack;
821   int sp;
822 
823   /* Initialize entry block.  */
824   VTI (ENTRY_BLOCK_PTR_FOR_FN (cfun))->visited = true;
825   VTI (ENTRY_BLOCK_PTR_FOR_FN (cfun))->in.stack_adjust
826     = INCOMING_FRAME_SP_OFFSET;
827   VTI (ENTRY_BLOCK_PTR_FOR_FN (cfun))->out.stack_adjust
828     = INCOMING_FRAME_SP_OFFSET;
829 
830   /* Allocate stack for back-tracking up CFG.  */
831   stack = XNEWVEC (edge_iterator, n_basic_blocks_for_fn (cfun) + 1);
832   sp = 0;
833 
834   /* Push the first edge on to the stack.  */
835   stack[sp++] = ei_start (ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs);
836 
837   while (sp)
838     {
839       edge_iterator ei;
840       basic_block src;
841       basic_block dest;
842 
843       /* Look at the edge on the top of the stack.  */
844       ei = stack[sp - 1];
845       src = ei_edge (ei)->src;
846       dest = ei_edge (ei)->dest;
847 
848       /* Check if the edge destination has been visited yet.  */
849       if (!VTI (dest)->visited)
850 	{
851 	  rtx_insn *insn;
852 	  HOST_WIDE_INT pre, post, offset;
853 	  VTI (dest)->visited = true;
854 	  VTI (dest)->in.stack_adjust = offset = VTI (src)->out.stack_adjust;
855 
856 	  if (dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
857 	    for (insn = BB_HEAD (dest);
858 		 insn != NEXT_INSN (BB_END (dest));
859 		 insn = NEXT_INSN (insn))
860 	      if (INSN_P (insn))
861 		{
862 		  insn_stack_adjust_offset_pre_post (insn, &pre, &post);
863 		  offset += pre + post;
864 		}
865 
866 	  VTI (dest)->out.stack_adjust = offset;
867 
868 	  if (EDGE_COUNT (dest->succs) > 0)
869 	    /* Since the DEST node has been visited for the first
870 	       time, check its successors.  */
871 	    stack[sp++] = ei_start (dest->succs);
872 	}
873       else
874 	{
875 	  /* We can end up with different stack adjustments for the exit block
876 	     of a shrink-wrapped function if stack_adjust_offset_pre_post
877 	     doesn't understand the rtx pattern used to restore the stack
878 	     pointer in the epilogue.  For example, on s390(x), the stack
879 	     pointer is often restored via a load-multiple instruction
880 	     and so no stack_adjust offset is recorded for it.  This means
881 	     that the stack offset at the end of the epilogue block is the
882 	     same as the offset before the epilogue, whereas other paths
883 	     to the exit block will have the correct stack_adjust.
884 
885 	     It is safe to ignore these differences because (a) we never
886 	     use the stack_adjust for the exit block in this pass and
887 	     (b) dwarf2cfi checks whether the CFA notes in a shrink-wrapped
888 	     function are correct.
889 
890 	     We must check whether the adjustments on other edges are
891 	     the same though.  */
892 	  if (dest != EXIT_BLOCK_PTR_FOR_FN (cfun)
893 	      && VTI (dest)->in.stack_adjust != VTI (src)->out.stack_adjust)
894 	    {
895 	      free (stack);
896 	      return false;
897 	    }
898 
899 	  if (! ei_one_before_end_p (ei))
900 	    /* Go to the next edge.  */
901 	    ei_next (&stack[sp - 1]);
902 	  else
903 	    /* Return to previous level if there are no more edges.  */
904 	    sp--;
905 	}
906     }
907 
908   free (stack);
909   return true;
910 }
911 
912 /* arg_pointer_rtx resp. frame_pointer_rtx if stack_pointer_rtx or
913    hard_frame_pointer_rtx is being mapped to it and offset for it.  */
914 static rtx cfa_base_rtx;
915 static HOST_WIDE_INT cfa_base_offset;
916 
917 /* Compute a CFA-based value for an ADJUSTMENT made to stack_pointer_rtx
918    or hard_frame_pointer_rtx.  */
919 
920 static inline rtx
compute_cfa_pointer(poly_int64 adjustment)921 compute_cfa_pointer (poly_int64 adjustment)
922 {
923   return plus_constant (Pmode, cfa_base_rtx, adjustment + cfa_base_offset);
924 }
925 
926 /* Adjustment for hard_frame_pointer_rtx to cfa base reg,
927    or -1 if the replacement shouldn't be done.  */
928 static poly_int64 hard_frame_pointer_adjustment = -1;
929 
930 /* Data for adjust_mems callback.  */
931 
932 struct adjust_mem_data
933 {
934   bool store;
935   machine_mode mem_mode;
936   HOST_WIDE_INT stack_adjust;
937   auto_vec<rtx> side_effects;
938 };
939 
940 /* Helper for adjust_mems.  Return true if X is suitable for
941    transformation of wider mode arithmetics to narrower mode.  */
942 
943 static bool
use_narrower_mode_test(rtx x,const_rtx subreg)944 use_narrower_mode_test (rtx x, const_rtx subreg)
945 {
946   subrtx_var_iterator::array_type array;
947   FOR_EACH_SUBRTX_VAR (iter, array, x, NONCONST)
948     {
949       rtx x = *iter;
950       if (CONSTANT_P (x))
951 	iter.skip_subrtxes ();
952       else
953 	switch (GET_CODE (x))
954 	  {
955 	  case REG:
956 	    if (cselib_lookup (x, GET_MODE (SUBREG_REG (subreg)), 0, VOIDmode))
957 	      return false;
958 	    if (!validate_subreg (GET_MODE (subreg), GET_MODE (x), x,
959 				  subreg_lowpart_offset (GET_MODE (subreg),
960 							 GET_MODE (x))))
961 	      return false;
962 	    break;
963 	  case PLUS:
964 	  case MINUS:
965 	  case MULT:
966 	    break;
967 	  case ASHIFT:
968 	    if (GET_MODE (XEXP (x, 1)) != VOIDmode)
969 	      {
970 		enum machine_mode mode = GET_MODE (subreg);
971 		rtx op1 = XEXP (x, 1);
972 		enum machine_mode op1_mode = GET_MODE (op1);
973 		if (GET_MODE_PRECISION (as_a <scalar_int_mode> (mode))
974 		    < GET_MODE_PRECISION (as_a <scalar_int_mode> (op1_mode)))
975 		  {
976 		    poly_uint64 byte = subreg_lowpart_offset (mode, op1_mode);
977 		    if (GET_CODE (op1) == SUBREG || GET_CODE (op1) == CONCAT)
978 		      {
979 			if (!simplify_subreg (mode, op1, op1_mode, byte))
980 			  return false;
981 		      }
982 		    else if (!validate_subreg (mode, op1_mode, op1, byte))
983 		      return false;
984 		  }
985 	      }
986 	    iter.substitute (XEXP (x, 0));
987 	    break;
988 	  default:
989 	    return false;
990 	  }
991     }
992   return true;
993 }
994 
995 /* Transform X into narrower mode MODE from wider mode WMODE.  */
996 
997 static rtx
use_narrower_mode(rtx x,scalar_int_mode mode,scalar_int_mode wmode)998 use_narrower_mode (rtx x, scalar_int_mode mode, scalar_int_mode wmode)
999 {
1000   rtx op0, op1;
1001   if (CONSTANT_P (x))
1002     return lowpart_subreg (mode, x, wmode);
1003   switch (GET_CODE (x))
1004     {
1005     case REG:
1006       return lowpart_subreg (mode, x, wmode);
1007     case PLUS:
1008     case MINUS:
1009     case MULT:
1010       op0 = use_narrower_mode (XEXP (x, 0), mode, wmode);
1011       op1 = use_narrower_mode (XEXP (x, 1), mode, wmode);
1012       return simplify_gen_binary (GET_CODE (x), mode, op0, op1);
1013     case ASHIFT:
1014       op0 = use_narrower_mode (XEXP (x, 0), mode, wmode);
1015       op1 = XEXP (x, 1);
1016       /* Ensure shift amount is not wider than mode.  */
1017       if (GET_MODE (op1) == VOIDmode)
1018 	op1 = lowpart_subreg (mode, op1, wmode);
1019       else if (GET_MODE_PRECISION (mode)
1020 	       < GET_MODE_PRECISION (as_a <scalar_int_mode> (GET_MODE (op1))))
1021 	op1 = lowpart_subreg (mode, op1, GET_MODE (op1));
1022       return simplify_gen_binary (ASHIFT, mode, op0, op1);
1023     default:
1024       gcc_unreachable ();
1025     }
1026 }
1027 
1028 /* Helper function for adjusting used MEMs.  */
1029 
1030 static rtx
adjust_mems(rtx loc,const_rtx old_rtx,void * data)1031 adjust_mems (rtx loc, const_rtx old_rtx, void *data)
1032 {
1033   struct adjust_mem_data *amd = (struct adjust_mem_data *) data;
1034   rtx mem, addr = loc, tem;
1035   machine_mode mem_mode_save;
1036   bool store_save;
1037   scalar_int_mode tem_mode, tem_subreg_mode;
1038   poly_int64 size;
1039   switch (GET_CODE (loc))
1040     {
1041     case REG:
1042       /* Don't do any sp or fp replacements outside of MEM addresses
1043          on the LHS.  */
1044       if (amd->mem_mode == VOIDmode && amd->store)
1045 	return loc;
1046       if (loc == stack_pointer_rtx
1047 	  && !frame_pointer_needed
1048 	  && cfa_base_rtx)
1049 	return compute_cfa_pointer (amd->stack_adjust);
1050       else if (loc == hard_frame_pointer_rtx
1051 	       && frame_pointer_needed
1052 	       && maybe_ne (hard_frame_pointer_adjustment, -1)
1053 	       && cfa_base_rtx)
1054 	return compute_cfa_pointer (hard_frame_pointer_adjustment);
1055       gcc_checking_assert (loc != virtual_incoming_args_rtx);
1056       return loc;
1057     case MEM:
1058       mem = loc;
1059       if (!amd->store)
1060 	{
1061 	  mem = targetm.delegitimize_address (mem);
1062 	  if (mem != loc && !MEM_P (mem))
1063 	    return simplify_replace_fn_rtx (mem, old_rtx, adjust_mems, data);
1064 	}
1065 
1066       addr = XEXP (mem, 0);
1067       mem_mode_save = amd->mem_mode;
1068       amd->mem_mode = GET_MODE (mem);
1069       store_save = amd->store;
1070       amd->store = false;
1071       addr = simplify_replace_fn_rtx (addr, old_rtx, adjust_mems, data);
1072       amd->store = store_save;
1073       amd->mem_mode = mem_mode_save;
1074       if (mem == loc)
1075 	addr = targetm.delegitimize_address (addr);
1076       if (addr != XEXP (mem, 0))
1077 	mem = replace_equiv_address_nv (mem, addr);
1078       if (!amd->store)
1079 	mem = avoid_constant_pool_reference (mem);
1080       return mem;
1081     case PRE_INC:
1082     case PRE_DEC:
1083       size = GET_MODE_SIZE (amd->mem_mode);
1084       addr = plus_constant (GET_MODE (loc), XEXP (loc, 0),
1085 			    GET_CODE (loc) == PRE_INC ? size : -size);
1086       /* FALLTHRU */
1087     case POST_INC:
1088     case POST_DEC:
1089       if (addr == loc)
1090 	addr = XEXP (loc, 0);
1091       gcc_assert (amd->mem_mode != VOIDmode && amd->mem_mode != BLKmode);
1092       addr = simplify_replace_fn_rtx (addr, old_rtx, adjust_mems, data);
1093       size = GET_MODE_SIZE (amd->mem_mode);
1094       tem = plus_constant (GET_MODE (loc), XEXP (loc, 0),
1095 			   (GET_CODE (loc) == PRE_INC
1096 			    || GET_CODE (loc) == POST_INC) ? size : -size);
1097       store_save = amd->store;
1098       amd->store = false;
1099       tem = simplify_replace_fn_rtx (tem, old_rtx, adjust_mems, data);
1100       amd->store = store_save;
1101       amd->side_effects.safe_push (gen_rtx_SET (XEXP (loc, 0), tem));
1102       return addr;
1103     case PRE_MODIFY:
1104       addr = XEXP (loc, 1);
1105       /* FALLTHRU */
1106     case POST_MODIFY:
1107       if (addr == loc)
1108 	addr = XEXP (loc, 0);
1109       gcc_assert (amd->mem_mode != VOIDmode);
1110       addr = simplify_replace_fn_rtx (addr, old_rtx, adjust_mems, data);
1111       store_save = amd->store;
1112       amd->store = false;
1113       tem = simplify_replace_fn_rtx (XEXP (loc, 1), old_rtx,
1114 				     adjust_mems, data);
1115       amd->store = store_save;
1116       amd->side_effects.safe_push (gen_rtx_SET (XEXP (loc, 0), tem));
1117       return addr;
1118     case SUBREG:
1119       /* First try without delegitimization of whole MEMs and
1120 	 avoid_constant_pool_reference, which is more likely to succeed.  */
1121       store_save = amd->store;
1122       amd->store = true;
1123       addr = simplify_replace_fn_rtx (SUBREG_REG (loc), old_rtx, adjust_mems,
1124 				      data);
1125       amd->store = store_save;
1126       mem = simplify_replace_fn_rtx (addr, old_rtx, adjust_mems, data);
1127       if (mem == SUBREG_REG (loc))
1128 	{
1129 	  tem = loc;
1130 	  goto finish_subreg;
1131 	}
1132       tem = simplify_gen_subreg (GET_MODE (loc), mem,
1133 				 GET_MODE (SUBREG_REG (loc)),
1134 				 SUBREG_BYTE (loc));
1135       if (tem)
1136 	goto finish_subreg;
1137       tem = simplify_gen_subreg (GET_MODE (loc), addr,
1138 				 GET_MODE (SUBREG_REG (loc)),
1139 				 SUBREG_BYTE (loc));
1140       if (tem == NULL_RTX)
1141 	tem = gen_rtx_raw_SUBREG (GET_MODE (loc), addr, SUBREG_BYTE (loc));
1142     finish_subreg:
1143       if (MAY_HAVE_DEBUG_BIND_INSNS
1144 	  && GET_CODE (tem) == SUBREG
1145 	  && (GET_CODE (SUBREG_REG (tem)) == PLUS
1146 	      || GET_CODE (SUBREG_REG (tem)) == MINUS
1147 	      || GET_CODE (SUBREG_REG (tem)) == MULT
1148 	      || GET_CODE (SUBREG_REG (tem)) == ASHIFT)
1149 	  && is_a <scalar_int_mode> (GET_MODE (tem), &tem_mode)
1150 	  && is_a <scalar_int_mode> (GET_MODE (SUBREG_REG (tem)),
1151 				     &tem_subreg_mode)
1152 	  && (GET_MODE_PRECISION (tem_mode)
1153 	      < GET_MODE_PRECISION (tem_subreg_mode))
1154 	  && subreg_lowpart_p (tem)
1155 	  && use_narrower_mode_test (SUBREG_REG (tem), tem))
1156 	return use_narrower_mode (SUBREG_REG (tem), tem_mode, tem_subreg_mode);
1157       return tem;
1158     case ASM_OPERANDS:
1159       /* Don't do any replacements in second and following
1160 	 ASM_OPERANDS of inline-asm with multiple sets.
1161 	 ASM_OPERANDS_INPUT_VEC, ASM_OPERANDS_INPUT_CONSTRAINT_VEC
1162 	 and ASM_OPERANDS_LABEL_VEC need to be equal between
1163 	 all the ASM_OPERANDs in the insn and adjust_insn will
1164 	 fix this up.  */
1165       if (ASM_OPERANDS_OUTPUT_IDX (loc) != 0)
1166 	return loc;
1167       break;
1168     default:
1169       break;
1170     }
1171   return NULL_RTX;
1172 }
1173 
1174 /* Helper function for replacement of uses.  */
1175 
1176 static void
adjust_mem_uses(rtx * x,void * data)1177 adjust_mem_uses (rtx *x, void *data)
1178 {
1179   rtx new_x = simplify_replace_fn_rtx (*x, NULL_RTX, adjust_mems, data);
1180   if (new_x != *x)
1181     validate_change (NULL_RTX, x, new_x, true);
1182 }
1183 
1184 /* Helper function for replacement of stores.  */
1185 
1186 static void
adjust_mem_stores(rtx loc,const_rtx expr,void * data)1187 adjust_mem_stores (rtx loc, const_rtx expr, void *data)
1188 {
1189   if (MEM_P (loc))
1190     {
1191       rtx new_dest = simplify_replace_fn_rtx (SET_DEST (expr), NULL_RTX,
1192 					      adjust_mems, data);
1193       if (new_dest != SET_DEST (expr))
1194 	{
1195 	  rtx xexpr = CONST_CAST_RTX (expr);
1196 	  validate_change (NULL_RTX, &SET_DEST (xexpr), new_dest, true);
1197 	}
1198     }
1199 }
1200 
1201 /* Simplify INSN.  Remove all {PRE,POST}_{INC,DEC,MODIFY} rtxes,
1202    replace them with their value in the insn and add the side-effects
1203    as other sets to the insn.  */
1204 
1205 static void
adjust_insn(basic_block bb,rtx_insn * insn)1206 adjust_insn (basic_block bb, rtx_insn *insn)
1207 {
1208   rtx set;
1209 
1210 #ifdef HAVE_window_save
1211   /* If the target machine has an explicit window save instruction, the
1212      transformation OUTGOING_REGNO -> INCOMING_REGNO is done there.  */
1213   if (RTX_FRAME_RELATED_P (insn)
1214       && find_reg_note (insn, REG_CFA_WINDOW_SAVE, NULL_RTX))
1215     {
1216       unsigned int i, nregs = vec_safe_length (windowed_parm_regs);
1217       rtx rtl = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (nregs * 2));
1218       parm_reg *p;
1219 
1220       FOR_EACH_VEC_SAFE_ELT (windowed_parm_regs, i, p)
1221 	{
1222 	  XVECEXP (rtl, 0, i * 2)
1223 	    = gen_rtx_SET (p->incoming, p->outgoing);
1224 	  /* Do not clobber the attached DECL, but only the REG.  */
1225 	  XVECEXP (rtl, 0, i * 2 + 1)
1226 	    = gen_rtx_CLOBBER (GET_MODE (p->outgoing),
1227 			       gen_raw_REG (GET_MODE (p->outgoing),
1228 					    REGNO (p->outgoing)));
1229 	}
1230 
1231       validate_change (NULL_RTX, &PATTERN (insn), rtl, true);
1232       return;
1233     }
1234 #endif
1235 
1236   adjust_mem_data amd;
1237   amd.mem_mode = VOIDmode;
1238   amd.stack_adjust = -VTI (bb)->out.stack_adjust;
1239 
1240   amd.store = true;
1241   note_stores (PATTERN (insn), adjust_mem_stores, &amd);
1242 
1243   amd.store = false;
1244   if (GET_CODE (PATTERN (insn)) == PARALLEL
1245       && asm_noperands (PATTERN (insn)) > 0
1246       && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
1247     {
1248       rtx body, set0;
1249       int i;
1250 
1251       /* inline-asm with multiple sets is tiny bit more complicated,
1252 	 because the 3 vectors in ASM_OPERANDS need to be shared between
1253 	 all ASM_OPERANDS in the instruction.  adjust_mems will
1254 	 not touch ASM_OPERANDS other than the first one, asm_noperands
1255 	 test above needs to be called before that (otherwise it would fail)
1256 	 and afterwards this code fixes it up.  */
1257       note_uses (&PATTERN (insn), adjust_mem_uses, &amd);
1258       body = PATTERN (insn);
1259       set0 = XVECEXP (body, 0, 0);
1260       gcc_checking_assert (GET_CODE (set0) == SET
1261 			   && GET_CODE (SET_SRC (set0)) == ASM_OPERANDS
1262 			   && ASM_OPERANDS_OUTPUT_IDX (SET_SRC (set0)) == 0);
1263       for (i = 1; i < XVECLEN (body, 0); i++)
1264 	if (GET_CODE (XVECEXP (body, 0, i)) != SET)
1265 	  break;
1266 	else
1267 	  {
1268 	    set = XVECEXP (body, 0, i);
1269 	    gcc_checking_assert (GET_CODE (SET_SRC (set)) == ASM_OPERANDS
1270 				 && ASM_OPERANDS_OUTPUT_IDX (SET_SRC (set))
1271 				    == i);
1272 	    if (ASM_OPERANDS_INPUT_VEC (SET_SRC (set))
1273 		!= ASM_OPERANDS_INPUT_VEC (SET_SRC (set0))
1274 		|| ASM_OPERANDS_INPUT_CONSTRAINT_VEC (SET_SRC (set))
1275 		   != ASM_OPERANDS_INPUT_CONSTRAINT_VEC (SET_SRC (set0))
1276 		|| ASM_OPERANDS_LABEL_VEC (SET_SRC (set))
1277 		   != ASM_OPERANDS_LABEL_VEC (SET_SRC (set0)))
1278 	      {
1279 		rtx newsrc = shallow_copy_rtx (SET_SRC (set));
1280 		ASM_OPERANDS_INPUT_VEC (newsrc)
1281 		  = ASM_OPERANDS_INPUT_VEC (SET_SRC (set0));
1282 		ASM_OPERANDS_INPUT_CONSTRAINT_VEC (newsrc)
1283 		  = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (SET_SRC (set0));
1284 		ASM_OPERANDS_LABEL_VEC (newsrc)
1285 		  = ASM_OPERANDS_LABEL_VEC (SET_SRC (set0));
1286 		validate_change (NULL_RTX, &SET_SRC (set), newsrc, true);
1287 	      }
1288 	  }
1289     }
1290   else
1291     note_uses (&PATTERN (insn), adjust_mem_uses, &amd);
1292 
1293   /* For read-only MEMs containing some constant, prefer those
1294      constants.  */
1295   set = single_set (insn);
1296   if (set && MEM_P (SET_SRC (set)) && MEM_READONLY_P (SET_SRC (set)))
1297     {
1298       rtx note = find_reg_equal_equiv_note (insn);
1299 
1300       if (note && CONSTANT_P (XEXP (note, 0)))
1301 	validate_change (NULL_RTX, &SET_SRC (set), XEXP (note, 0), true);
1302     }
1303 
1304   if (!amd.side_effects.is_empty ())
1305     {
1306       rtx *pat, new_pat;
1307       int i, oldn;
1308 
1309       pat = &PATTERN (insn);
1310       if (GET_CODE (*pat) == COND_EXEC)
1311 	pat = &COND_EXEC_CODE (*pat);
1312       if (GET_CODE (*pat) == PARALLEL)
1313 	oldn = XVECLEN (*pat, 0);
1314       else
1315 	oldn = 1;
1316       unsigned int newn = amd.side_effects.length ();
1317       new_pat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (oldn + newn));
1318       if (GET_CODE (*pat) == PARALLEL)
1319 	for (i = 0; i < oldn; i++)
1320 	  XVECEXP (new_pat, 0, i) = XVECEXP (*pat, 0, i);
1321       else
1322 	XVECEXP (new_pat, 0, 0) = *pat;
1323 
1324       rtx effect;
1325       unsigned int j;
1326       FOR_EACH_VEC_ELT_REVERSE (amd.side_effects, j, effect)
1327 	XVECEXP (new_pat, 0, j + oldn) = effect;
1328       validate_change (NULL_RTX, pat, new_pat, true);
1329     }
1330 }
1331 
1332 /* Return the DEBUG_EXPR of a DEBUG_EXPR_DECL or the VALUE in DV.  */
1333 static inline rtx
dv_as_rtx(decl_or_value dv)1334 dv_as_rtx (decl_or_value dv)
1335 {
1336   tree decl;
1337 
1338   if (dv_is_value_p (dv))
1339     return dv_as_value (dv);
1340 
1341   decl = dv_as_decl (dv);
1342 
1343   gcc_checking_assert (TREE_CODE (decl) == DEBUG_EXPR_DECL);
1344   return DECL_RTL_KNOWN_SET (decl);
1345 }
1346 
1347 /* Return nonzero if a decl_or_value must not have more than one
1348    variable part.  The returned value discriminates among various
1349    kinds of one-part DVs ccording to enum onepart_enum.  */
1350 static inline onepart_enum
dv_onepart_p(decl_or_value dv)1351 dv_onepart_p (decl_or_value dv)
1352 {
1353   tree decl;
1354 
1355   if (!MAY_HAVE_DEBUG_BIND_INSNS)
1356     return NOT_ONEPART;
1357 
1358   if (dv_is_value_p (dv))
1359     return ONEPART_VALUE;
1360 
1361   decl = dv_as_decl (dv);
1362 
1363   if (TREE_CODE (decl) == DEBUG_EXPR_DECL)
1364     return ONEPART_DEXPR;
1365 
1366   if (target_for_debug_bind (decl) != NULL_TREE)
1367     return ONEPART_VDECL;
1368 
1369   return NOT_ONEPART;
1370 }
1371 
1372 /* Return the variable pool to be used for a dv of type ONEPART.  */
1373 static inline pool_allocator &
onepart_pool(onepart_enum onepart)1374 onepart_pool (onepart_enum onepart)
1375 {
1376   return onepart ? valvar_pool : var_pool;
1377 }
1378 
1379 /* Allocate a variable_def from the corresponding variable pool.  */
1380 static inline variable *
onepart_pool_allocate(onepart_enum onepart)1381 onepart_pool_allocate (onepart_enum onepart)
1382 {
1383   return (variable*) onepart_pool (onepart).allocate ();
1384 }
1385 
1386 /* Build a decl_or_value out of a decl.  */
1387 static inline decl_or_value
dv_from_decl(tree decl)1388 dv_from_decl (tree decl)
1389 {
1390   decl_or_value dv;
1391   dv = decl;
1392   gcc_checking_assert (dv_is_decl_p (dv));
1393   return dv;
1394 }
1395 
1396 /* Build a decl_or_value out of a value.  */
1397 static inline decl_or_value
dv_from_value(rtx value)1398 dv_from_value (rtx value)
1399 {
1400   decl_or_value dv;
1401   dv = value;
1402   gcc_checking_assert (dv_is_value_p (dv));
1403   return dv;
1404 }
1405 
1406 /* Return a value or the decl of a debug_expr as a decl_or_value.  */
1407 static inline decl_or_value
dv_from_rtx(rtx x)1408 dv_from_rtx (rtx x)
1409 {
1410   decl_or_value dv;
1411 
1412   switch (GET_CODE (x))
1413     {
1414     case DEBUG_EXPR:
1415       dv = dv_from_decl (DEBUG_EXPR_TREE_DECL (x));
1416       gcc_checking_assert (DECL_RTL_KNOWN_SET (DEBUG_EXPR_TREE_DECL (x)) == x);
1417       break;
1418 
1419     case VALUE:
1420       dv = dv_from_value (x);
1421       break;
1422 
1423     default:
1424       gcc_unreachable ();
1425     }
1426 
1427   return dv;
1428 }
1429 
1430 extern void debug_dv (decl_or_value dv);
1431 
1432 DEBUG_FUNCTION void
debug_dv(decl_or_value dv)1433 debug_dv (decl_or_value dv)
1434 {
1435   if (dv_is_value_p (dv))
1436     debug_rtx (dv_as_value (dv));
1437   else
1438     debug_generic_stmt (dv_as_decl (dv));
1439 }
1440 
1441 static void loc_exp_dep_clear (variable *var);
1442 
1443 /* Free the element of VARIABLE_HTAB (its type is struct variable_def).  */
1444 
1445 static void
variable_htab_free(void * elem)1446 variable_htab_free (void *elem)
1447 {
1448   int i;
1449   variable *var = (variable *) elem;
1450   location_chain *node, *next;
1451 
1452   gcc_checking_assert (var->refcount > 0);
1453 
1454   var->refcount--;
1455   if (var->refcount > 0)
1456     return;
1457 
1458   for (i = 0; i < var->n_var_parts; i++)
1459     {
1460       for (node = var->var_part[i].loc_chain; node; node = next)
1461 	{
1462 	  next = node->next;
1463 	  delete node;
1464 	}
1465       var->var_part[i].loc_chain = NULL;
1466     }
1467   if (var->onepart && VAR_LOC_1PAUX (var))
1468     {
1469       loc_exp_dep_clear (var);
1470       if (VAR_LOC_DEP_LST (var))
1471 	VAR_LOC_DEP_LST (var)->pprev = NULL;
1472       XDELETE (VAR_LOC_1PAUX (var));
1473       /* These may be reused across functions, so reset
1474 	 e.g. NO_LOC_P.  */
1475       if (var->onepart == ONEPART_DEXPR)
1476 	set_dv_changed (var->dv, true);
1477     }
1478   onepart_pool (var->onepart).remove (var);
1479 }
1480 
1481 /* Initialize the set (array) SET of attrs to empty lists.  */
1482 
1483 static void
init_attrs_list_set(attrs ** set)1484 init_attrs_list_set (attrs **set)
1485 {
1486   int i;
1487 
1488   for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1489     set[i] = NULL;
1490 }
1491 
1492 /* Make the list *LISTP empty.  */
1493 
1494 static void
attrs_list_clear(attrs ** listp)1495 attrs_list_clear (attrs **listp)
1496 {
1497   attrs *list, *next;
1498 
1499   for (list = *listp; list; list = next)
1500     {
1501       next = list->next;
1502       delete list;
1503     }
1504   *listp = NULL;
1505 }
1506 
1507 /* Return true if the pair of DECL and OFFSET is the member of the LIST.  */
1508 
1509 static attrs *
attrs_list_member(attrs * list,decl_or_value dv,HOST_WIDE_INT offset)1510 attrs_list_member (attrs *list, decl_or_value dv, HOST_WIDE_INT offset)
1511 {
1512   for (; list; list = list->next)
1513     if (dv_as_opaque (list->dv) == dv_as_opaque (dv) && list->offset == offset)
1514       return list;
1515   return NULL;
1516 }
1517 
1518 /* Insert the triplet DECL, OFFSET, LOC to the list *LISTP.  */
1519 
1520 static void
attrs_list_insert(attrs ** listp,decl_or_value dv,HOST_WIDE_INT offset,rtx loc)1521 attrs_list_insert (attrs **listp, decl_or_value dv,
1522 		   HOST_WIDE_INT offset, rtx loc)
1523 {
1524   attrs *list = new attrs;
1525   list->loc = loc;
1526   list->dv = dv;
1527   list->offset = offset;
1528   list->next = *listp;
1529   *listp = list;
1530 }
1531 
1532 /* Copy all nodes from SRC and create a list *DSTP of the copies.  */
1533 
1534 static void
attrs_list_copy(attrs ** dstp,attrs * src)1535 attrs_list_copy (attrs **dstp, attrs *src)
1536 {
1537   attrs_list_clear (dstp);
1538   for (; src; src = src->next)
1539     {
1540       attrs *n = new attrs;
1541       n->loc = src->loc;
1542       n->dv = src->dv;
1543       n->offset = src->offset;
1544       n->next = *dstp;
1545       *dstp = n;
1546     }
1547 }
1548 
1549 /* Add all nodes from SRC which are not in *DSTP to *DSTP.  */
1550 
1551 static void
attrs_list_union(attrs ** dstp,attrs * src)1552 attrs_list_union (attrs **dstp, attrs *src)
1553 {
1554   for (; src; src = src->next)
1555     {
1556       if (!attrs_list_member (*dstp, src->dv, src->offset))
1557 	attrs_list_insert (dstp, src->dv, src->offset, src->loc);
1558     }
1559 }
1560 
1561 /* Combine nodes that are not onepart nodes from SRC and SRC2 into
1562    *DSTP.  */
1563 
1564 static void
attrs_list_mpdv_union(attrs ** dstp,attrs * src,attrs * src2)1565 attrs_list_mpdv_union (attrs **dstp, attrs *src, attrs *src2)
1566 {
1567   gcc_assert (!*dstp);
1568   for (; src; src = src->next)
1569     {
1570       if (!dv_onepart_p (src->dv))
1571 	attrs_list_insert (dstp, src->dv, src->offset, src->loc);
1572     }
1573   for (src = src2; src; src = src->next)
1574     {
1575       if (!dv_onepart_p (src->dv)
1576 	  && !attrs_list_member (*dstp, src->dv, src->offset))
1577 	attrs_list_insert (dstp, src->dv, src->offset, src->loc);
1578     }
1579 }
1580 
1581 /* Shared hashtable support.  */
1582 
1583 /* Return true if VARS is shared.  */
1584 
1585 static inline bool
shared_hash_shared(shared_hash * vars)1586 shared_hash_shared (shared_hash *vars)
1587 {
1588   return vars->refcount > 1;
1589 }
1590 
1591 /* Return the hash table for VARS.  */
1592 
1593 static inline variable_table_type *
shared_hash_htab(shared_hash * vars)1594 shared_hash_htab (shared_hash *vars)
1595 {
1596   return vars->htab;
1597 }
1598 
1599 /* Return true if VAR is shared, or maybe because VARS is shared.  */
1600 
1601 static inline bool
shared_var_p(variable * var,shared_hash * vars)1602 shared_var_p (variable *var, shared_hash *vars)
1603 {
1604   /* Don't count an entry in the changed_variables table as a duplicate.  */
1605   return ((var->refcount > 1 + (int) var->in_changed_variables)
1606 	  || shared_hash_shared (vars));
1607 }
1608 
1609 /* Copy variables into a new hash table.  */
1610 
1611 static shared_hash *
shared_hash_unshare(shared_hash * vars)1612 shared_hash_unshare (shared_hash *vars)
1613 {
1614   shared_hash *new_vars = new shared_hash;
1615   gcc_assert (vars->refcount > 1);
1616   new_vars->refcount = 1;
1617   new_vars->htab = new variable_table_type (vars->htab->elements () + 3);
1618   vars_copy (new_vars->htab, vars->htab);
1619   vars->refcount--;
1620   return new_vars;
1621 }
1622 
1623 /* Increment reference counter on VARS and return it.  */
1624 
1625 static inline shared_hash *
shared_hash_copy(shared_hash * vars)1626 shared_hash_copy (shared_hash *vars)
1627 {
1628   vars->refcount++;
1629   return vars;
1630 }
1631 
1632 /* Decrement reference counter and destroy hash table if not shared
1633    anymore.  */
1634 
1635 static void
shared_hash_destroy(shared_hash * vars)1636 shared_hash_destroy (shared_hash *vars)
1637 {
1638   gcc_checking_assert (vars->refcount > 0);
1639   if (--vars->refcount == 0)
1640     {
1641       delete vars->htab;
1642       delete vars;
1643     }
1644 }
1645 
1646 /* Unshare *PVARS if shared and return slot for DV.  If INS is
1647    INSERT, insert it if not already present.  */
1648 
1649 static inline variable **
shared_hash_find_slot_unshare_1(shared_hash ** pvars,decl_or_value dv,hashval_t dvhash,enum insert_option ins)1650 shared_hash_find_slot_unshare_1 (shared_hash **pvars, decl_or_value dv,
1651 				 hashval_t dvhash, enum insert_option ins)
1652 {
1653   if (shared_hash_shared (*pvars))
1654     *pvars = shared_hash_unshare (*pvars);
1655   return shared_hash_htab (*pvars)->find_slot_with_hash (dv, dvhash, ins);
1656 }
1657 
1658 static inline variable **
shared_hash_find_slot_unshare(shared_hash ** pvars,decl_or_value dv,enum insert_option ins)1659 shared_hash_find_slot_unshare (shared_hash **pvars, decl_or_value dv,
1660 			       enum insert_option ins)
1661 {
1662   return shared_hash_find_slot_unshare_1 (pvars, dv, dv_htab_hash (dv), ins);
1663 }
1664 
1665 /* Return slot for DV, if it is already present in the hash table.
1666    If it is not present, insert it only VARS is not shared, otherwise
1667    return NULL.  */
1668 
1669 static inline variable **
shared_hash_find_slot_1(shared_hash * vars,decl_or_value dv,hashval_t dvhash)1670 shared_hash_find_slot_1 (shared_hash *vars, decl_or_value dv, hashval_t dvhash)
1671 {
1672   return shared_hash_htab (vars)->find_slot_with_hash (dv, dvhash,
1673 						       shared_hash_shared (vars)
1674 						       ? NO_INSERT : INSERT);
1675 }
1676 
1677 static inline variable **
shared_hash_find_slot(shared_hash * vars,decl_or_value dv)1678 shared_hash_find_slot (shared_hash *vars, decl_or_value dv)
1679 {
1680   return shared_hash_find_slot_1 (vars, dv, dv_htab_hash (dv));
1681 }
1682 
1683 /* Return slot for DV only if it is already present in the hash table.  */
1684 
1685 static inline variable **
shared_hash_find_slot_noinsert_1(shared_hash * vars,decl_or_value dv,hashval_t dvhash)1686 shared_hash_find_slot_noinsert_1 (shared_hash *vars, decl_or_value dv,
1687 				  hashval_t dvhash)
1688 {
1689   return shared_hash_htab (vars)->find_slot_with_hash (dv, dvhash, NO_INSERT);
1690 }
1691 
1692 static inline variable **
shared_hash_find_slot_noinsert(shared_hash * vars,decl_or_value dv)1693 shared_hash_find_slot_noinsert (shared_hash *vars, decl_or_value dv)
1694 {
1695   return shared_hash_find_slot_noinsert_1 (vars, dv, dv_htab_hash (dv));
1696 }
1697 
1698 /* Return variable for DV or NULL if not already present in the hash
1699    table.  */
1700 
1701 static inline variable *
shared_hash_find_1(shared_hash * vars,decl_or_value dv,hashval_t dvhash)1702 shared_hash_find_1 (shared_hash *vars, decl_or_value dv, hashval_t dvhash)
1703 {
1704   return shared_hash_htab (vars)->find_with_hash (dv, dvhash);
1705 }
1706 
1707 static inline variable *
shared_hash_find(shared_hash * vars,decl_or_value dv)1708 shared_hash_find (shared_hash *vars, decl_or_value dv)
1709 {
1710   return shared_hash_find_1 (vars, dv, dv_htab_hash (dv));
1711 }
1712 
1713 /* Return true if TVAL is better than CVAL as a canonival value.  We
1714    choose lowest-numbered VALUEs, using the RTX address as a
1715    tie-breaker.  The idea is to arrange them into a star topology,
1716    such that all of them are at most one step away from the canonical
1717    value, and the canonical value has backlinks to all of them, in
1718    addition to all the actual locations.  We don't enforce this
1719    topology throughout the entire dataflow analysis, though.
1720  */
1721 
1722 static inline bool
canon_value_cmp(rtx tval,rtx cval)1723 canon_value_cmp (rtx tval, rtx cval)
1724 {
1725   return !cval
1726     || CSELIB_VAL_PTR (tval)->uid < CSELIB_VAL_PTR (cval)->uid;
1727 }
1728 
1729 static bool dst_can_be_shared;
1730 
1731 /* Return a copy of a variable VAR and insert it to dataflow set SET.  */
1732 
1733 static variable **
unshare_variable(dataflow_set * set,variable ** slot,variable * var,enum var_init_status initialized)1734 unshare_variable (dataflow_set *set, variable **slot, variable *var,
1735 		  enum var_init_status initialized)
1736 {
1737   variable *new_var;
1738   int i;
1739 
1740   new_var = onepart_pool_allocate (var->onepart);
1741   new_var->dv = var->dv;
1742   new_var->refcount = 1;
1743   var->refcount--;
1744   new_var->n_var_parts = var->n_var_parts;
1745   new_var->onepart = var->onepart;
1746   new_var->in_changed_variables = false;
1747 
1748   if (! flag_var_tracking_uninit)
1749     initialized = VAR_INIT_STATUS_INITIALIZED;
1750 
1751   for (i = 0; i < var->n_var_parts; i++)
1752     {
1753       location_chain *node;
1754       location_chain **nextp;
1755 
1756       if (i == 0 && var->onepart)
1757 	{
1758 	  /* One-part auxiliary data is only used while emitting
1759 	     notes, so propagate it to the new variable in the active
1760 	     dataflow set.  If we're not emitting notes, this will be
1761 	     a no-op.  */
1762 	  gcc_checking_assert (!VAR_LOC_1PAUX (var) || emit_notes);
1763 	  VAR_LOC_1PAUX (new_var) = VAR_LOC_1PAUX (var);
1764 	  VAR_LOC_1PAUX (var) = NULL;
1765 	}
1766       else
1767 	VAR_PART_OFFSET (new_var, i) = VAR_PART_OFFSET (var, i);
1768       nextp = &new_var->var_part[i].loc_chain;
1769       for (node = var->var_part[i].loc_chain; node; node = node->next)
1770 	{
1771 	  location_chain *new_lc;
1772 
1773 	  new_lc = new location_chain;
1774 	  new_lc->next = NULL;
1775 	  if (node->init > initialized)
1776 	    new_lc->init = node->init;
1777 	  else
1778 	    new_lc->init = initialized;
1779 	  if (node->set_src && !(MEM_P (node->set_src)))
1780 	    new_lc->set_src = node->set_src;
1781 	  else
1782 	    new_lc->set_src = NULL;
1783 	  new_lc->loc = node->loc;
1784 
1785 	  *nextp = new_lc;
1786 	  nextp = &new_lc->next;
1787 	}
1788 
1789       new_var->var_part[i].cur_loc = var->var_part[i].cur_loc;
1790     }
1791 
1792   dst_can_be_shared = false;
1793   if (shared_hash_shared (set->vars))
1794     slot = shared_hash_find_slot_unshare (&set->vars, var->dv, NO_INSERT);
1795   else if (set->traversed_vars && set->vars != set->traversed_vars)
1796     slot = shared_hash_find_slot_noinsert (set->vars, var->dv);
1797   *slot = new_var;
1798   if (var->in_changed_variables)
1799     {
1800       variable **cslot
1801 	= changed_variables->find_slot_with_hash (var->dv,
1802 						  dv_htab_hash (var->dv),
1803 						  NO_INSERT);
1804       gcc_assert (*cslot == (void *) var);
1805       var->in_changed_variables = false;
1806       variable_htab_free (var);
1807       *cslot = new_var;
1808       new_var->in_changed_variables = true;
1809     }
1810   return slot;
1811 }
1812 
1813 /* Copy all variables from hash table SRC to hash table DST.  */
1814 
1815 static void
vars_copy(variable_table_type * dst,variable_table_type * src)1816 vars_copy (variable_table_type *dst, variable_table_type *src)
1817 {
1818   variable_iterator_type hi;
1819   variable *var;
1820 
1821   FOR_EACH_HASH_TABLE_ELEMENT (*src, var, variable, hi)
1822     {
1823       variable **dstp;
1824       var->refcount++;
1825       dstp = dst->find_slot_with_hash (var->dv, dv_htab_hash (var->dv),
1826 				       INSERT);
1827       *dstp = var;
1828     }
1829 }
1830 
1831 /* Map a decl to its main debug decl.  */
1832 
1833 static inline tree
var_debug_decl(tree decl)1834 var_debug_decl (tree decl)
1835 {
1836   if (decl && VAR_P (decl) && DECL_HAS_DEBUG_EXPR_P (decl))
1837     {
1838       tree debugdecl = DECL_DEBUG_EXPR (decl);
1839       if (DECL_P (debugdecl))
1840 	decl = debugdecl;
1841     }
1842 
1843   return decl;
1844 }
1845 
1846 /* Set the register LOC to contain DV, OFFSET.  */
1847 
1848 static void
var_reg_decl_set(dataflow_set * set,rtx loc,enum var_init_status initialized,decl_or_value dv,HOST_WIDE_INT offset,rtx set_src,enum insert_option iopt)1849 var_reg_decl_set (dataflow_set *set, rtx loc, enum var_init_status initialized,
1850 		  decl_or_value dv, HOST_WIDE_INT offset, rtx set_src,
1851 		  enum insert_option iopt)
1852 {
1853   attrs *node;
1854   bool decl_p = dv_is_decl_p (dv);
1855 
1856   if (decl_p)
1857     dv = dv_from_decl (var_debug_decl (dv_as_decl (dv)));
1858 
1859   for (node = set->regs[REGNO (loc)]; node; node = node->next)
1860     if (dv_as_opaque (node->dv) == dv_as_opaque (dv)
1861 	&& node->offset == offset)
1862       break;
1863   if (!node)
1864     attrs_list_insert (&set->regs[REGNO (loc)], dv, offset, loc);
1865   set_variable_part (set, loc, dv, offset, initialized, set_src, iopt);
1866 }
1867 
1868 /* Return true if we should track a location that is OFFSET bytes from
1869    a variable.  Store the constant offset in *OFFSET_OUT if so.  */
1870 
1871 static bool
track_offset_p(poly_int64 offset,HOST_WIDE_INT * offset_out)1872 track_offset_p (poly_int64 offset, HOST_WIDE_INT *offset_out)
1873 {
1874   HOST_WIDE_INT const_offset;
1875   if (!offset.is_constant (&const_offset)
1876       || !IN_RANGE (const_offset, 0, MAX_VAR_PARTS - 1))
1877     return false;
1878   *offset_out = const_offset;
1879   return true;
1880 }
1881 
1882 /* Return the offset of a register that track_offset_p says we
1883    should track.  */
1884 
1885 static HOST_WIDE_INT
get_tracked_reg_offset(rtx loc)1886 get_tracked_reg_offset (rtx loc)
1887 {
1888   HOST_WIDE_INT offset;
1889   if (!track_offset_p (REG_OFFSET (loc), &offset))
1890     gcc_unreachable ();
1891   return offset;
1892 }
1893 
1894 /* Set the register to contain REG_EXPR (LOC), REG_OFFSET (LOC).  */
1895 
1896 static void
var_reg_set(dataflow_set * set,rtx loc,enum var_init_status initialized,rtx set_src)1897 var_reg_set (dataflow_set *set, rtx loc, enum var_init_status initialized,
1898 	     rtx set_src)
1899 {
1900   tree decl = REG_EXPR (loc);
1901   HOST_WIDE_INT offset = get_tracked_reg_offset (loc);
1902 
1903   var_reg_decl_set (set, loc, initialized,
1904 		    dv_from_decl (decl), offset, set_src, INSERT);
1905 }
1906 
1907 static enum var_init_status
get_init_value(dataflow_set * set,rtx loc,decl_or_value dv)1908 get_init_value (dataflow_set *set, rtx loc, decl_or_value dv)
1909 {
1910   variable *var;
1911   int i;
1912   enum var_init_status ret_val = VAR_INIT_STATUS_UNKNOWN;
1913 
1914   if (! flag_var_tracking_uninit)
1915     return VAR_INIT_STATUS_INITIALIZED;
1916 
1917   var = shared_hash_find (set->vars, dv);
1918   if (var)
1919     {
1920       for (i = 0; i < var->n_var_parts && ret_val == VAR_INIT_STATUS_UNKNOWN; i++)
1921 	{
1922 	  location_chain *nextp;
1923 	  for (nextp = var->var_part[i].loc_chain; nextp; nextp = nextp->next)
1924 	    if (rtx_equal_p (nextp->loc, loc))
1925 	      {
1926 		ret_val = nextp->init;
1927 		break;
1928 	      }
1929 	}
1930     }
1931 
1932   return ret_val;
1933 }
1934 
1935 /* Delete current content of register LOC in dataflow set SET and set
1936    the register to contain REG_EXPR (LOC), REG_OFFSET (LOC).  If
1937    MODIFY is true, any other live copies of the same variable part are
1938    also deleted from the dataflow set, otherwise the variable part is
1939    assumed to be copied from another location holding the same
1940    part.  */
1941 
1942 static void
var_reg_delete_and_set(dataflow_set * set,rtx loc,bool modify,enum var_init_status initialized,rtx set_src)1943 var_reg_delete_and_set (dataflow_set *set, rtx loc, bool modify,
1944 			enum var_init_status initialized, rtx set_src)
1945 {
1946   tree decl = REG_EXPR (loc);
1947   HOST_WIDE_INT offset = get_tracked_reg_offset (loc);
1948   attrs *node, *next;
1949   attrs **nextp;
1950 
1951   decl = var_debug_decl (decl);
1952 
1953   if (initialized == VAR_INIT_STATUS_UNKNOWN)
1954     initialized = get_init_value (set, loc, dv_from_decl (decl));
1955 
1956   nextp = &set->regs[REGNO (loc)];
1957   for (node = *nextp; node; node = next)
1958     {
1959       next = node->next;
1960       if (dv_as_opaque (node->dv) != decl || node->offset != offset)
1961 	{
1962 	  delete_variable_part (set, node->loc, node->dv, node->offset);
1963 	  delete node;
1964 	  *nextp = next;
1965 	}
1966       else
1967 	{
1968 	  node->loc = loc;
1969 	  nextp = &node->next;
1970 	}
1971     }
1972   if (modify)
1973     clobber_variable_part (set, loc, dv_from_decl (decl), offset, set_src);
1974   var_reg_set (set, loc, initialized, set_src);
1975 }
1976 
1977 /* Delete the association of register LOC in dataflow set SET with any
1978    variables that aren't onepart.  If CLOBBER is true, also delete any
1979    other live copies of the same variable part, and delete the
1980    association with onepart dvs too.  */
1981 
1982 static void
var_reg_delete(dataflow_set * set,rtx loc,bool clobber)1983 var_reg_delete (dataflow_set *set, rtx loc, bool clobber)
1984 {
1985   attrs **nextp = &set->regs[REGNO (loc)];
1986   attrs *node, *next;
1987 
1988   HOST_WIDE_INT offset;
1989   if (clobber && track_offset_p (REG_OFFSET (loc), &offset))
1990     {
1991       tree decl = REG_EXPR (loc);
1992 
1993       decl = var_debug_decl (decl);
1994 
1995       clobber_variable_part (set, NULL, dv_from_decl (decl), offset, NULL);
1996     }
1997 
1998   for (node = *nextp; node; node = next)
1999     {
2000       next = node->next;
2001       if (clobber || !dv_onepart_p (node->dv))
2002 	{
2003 	  delete_variable_part (set, node->loc, node->dv, node->offset);
2004 	  delete node;
2005 	  *nextp = next;
2006 	}
2007       else
2008 	nextp = &node->next;
2009     }
2010 }
2011 
2012 /* Delete content of register with number REGNO in dataflow set SET.  */
2013 
2014 static void
var_regno_delete(dataflow_set * set,int regno)2015 var_regno_delete (dataflow_set *set, int regno)
2016 {
2017   attrs **reg = &set->regs[regno];
2018   attrs *node, *next;
2019 
2020   for (node = *reg; node; node = next)
2021     {
2022       next = node->next;
2023       delete_variable_part (set, node->loc, node->dv, node->offset);
2024       delete node;
2025     }
2026   *reg = NULL;
2027 }
2028 
2029 /* Return true if I is the negated value of a power of two.  */
2030 static bool
negative_power_of_two_p(HOST_WIDE_INT i)2031 negative_power_of_two_p (HOST_WIDE_INT i)
2032 {
2033   unsigned HOST_WIDE_INT x = -(unsigned HOST_WIDE_INT)i;
2034   return pow2_or_zerop (x);
2035 }
2036 
2037 /* Strip constant offsets and alignments off of LOC.  Return the base
2038    expression.  */
2039 
2040 static rtx
vt_get_canonicalize_base(rtx loc)2041 vt_get_canonicalize_base (rtx loc)
2042 {
2043   while ((GET_CODE (loc) == PLUS
2044 	  || GET_CODE (loc) == AND)
2045 	 && GET_CODE (XEXP (loc, 1)) == CONST_INT
2046 	 && (GET_CODE (loc) != AND
2047 	     || negative_power_of_two_p (INTVAL (XEXP (loc, 1)))))
2048     loc = XEXP (loc, 0);
2049 
2050   return loc;
2051 }
2052 
2053 /* This caches canonicalized addresses for VALUEs, computed using
2054    information in the global cselib table.  */
2055 static hash_map<rtx, rtx> *global_get_addr_cache;
2056 
2057 /* This caches canonicalized addresses for VALUEs, computed using
2058    information from the global cache and information pertaining to a
2059    basic block being analyzed.  */
2060 static hash_map<rtx, rtx> *local_get_addr_cache;
2061 
2062 static rtx vt_canonicalize_addr (dataflow_set *, rtx);
2063 
2064 /* Return the canonical address for LOC, that must be a VALUE, using a
2065    cached global equivalence or computing it and storing it in the
2066    global cache.  */
2067 
2068 static rtx
get_addr_from_global_cache(rtx const loc)2069 get_addr_from_global_cache (rtx const loc)
2070 {
2071   rtx x;
2072 
2073   gcc_checking_assert (GET_CODE (loc) == VALUE);
2074 
2075   bool existed;
2076   rtx *slot = &global_get_addr_cache->get_or_insert (loc, &existed);
2077   if (existed)
2078     return *slot;
2079 
2080   x = canon_rtx (get_addr (loc));
2081 
2082   /* Tentative, avoiding infinite recursion.  */
2083   *slot = x;
2084 
2085   if (x != loc)
2086     {
2087       rtx nx = vt_canonicalize_addr (NULL, x);
2088       if (nx != x)
2089 	{
2090 	  /* The table may have moved during recursion, recompute
2091 	     SLOT.  */
2092 	  *global_get_addr_cache->get (loc) = x = nx;
2093 	}
2094     }
2095 
2096   return x;
2097 }
2098 
2099 /* Return the canonical address for LOC, that must be a VALUE, using a
2100    cached local equivalence or computing it and storing it in the
2101    local cache.  */
2102 
2103 static rtx
get_addr_from_local_cache(dataflow_set * set,rtx const loc)2104 get_addr_from_local_cache (dataflow_set *set, rtx const loc)
2105 {
2106   rtx x;
2107   decl_or_value dv;
2108   variable *var;
2109   location_chain *l;
2110 
2111   gcc_checking_assert (GET_CODE (loc) == VALUE);
2112 
2113   bool existed;
2114   rtx *slot = &local_get_addr_cache->get_or_insert (loc, &existed);
2115   if (existed)
2116     return *slot;
2117 
2118   x = get_addr_from_global_cache (loc);
2119 
2120   /* Tentative, avoiding infinite recursion.  */
2121   *slot = x;
2122 
2123   /* Recurse to cache local expansion of X, or if we need to search
2124      for a VALUE in the expansion.  */
2125   if (x != loc)
2126     {
2127       rtx nx = vt_canonicalize_addr (set, x);
2128       if (nx != x)
2129 	{
2130 	  slot = local_get_addr_cache->get (loc);
2131 	  *slot = x = nx;
2132 	}
2133       return x;
2134     }
2135 
2136   dv = dv_from_rtx (x);
2137   var = shared_hash_find (set->vars, dv);
2138   if (!var)
2139     return x;
2140 
2141   /* Look for an improved equivalent expression.  */
2142   for (l = var->var_part[0].loc_chain; l; l = l->next)
2143     {
2144       rtx base = vt_get_canonicalize_base (l->loc);
2145       if (GET_CODE (base) == VALUE
2146 	  && canon_value_cmp (base, loc))
2147 	{
2148 	  rtx nx = vt_canonicalize_addr (set, l->loc);
2149 	  if (x != nx)
2150 	    {
2151 	      slot = local_get_addr_cache->get (loc);
2152 	      *slot = x = nx;
2153 	    }
2154 	  break;
2155 	}
2156     }
2157 
2158   return x;
2159 }
2160 
2161 /* Canonicalize LOC using equivalences from SET in addition to those
2162    in the cselib static table.  It expects a VALUE-based expression,
2163    and it will only substitute VALUEs with other VALUEs or
2164    function-global equivalences, so that, if two addresses have base
2165    VALUEs that are locally or globally related in ways that
2166    memrefs_conflict_p cares about, they will both canonicalize to
2167    expressions that have the same base VALUE.
2168 
2169    The use of VALUEs as canonical base addresses enables the canonical
2170    RTXs to remain unchanged globally, if they resolve to a constant,
2171    or throughout a basic block otherwise, so that they can be cached
2172    and the cache needs not be invalidated when REGs, MEMs or such
2173    change.  */
2174 
2175 static rtx
vt_canonicalize_addr(dataflow_set * set,rtx oloc)2176 vt_canonicalize_addr (dataflow_set *set, rtx oloc)
2177 {
2178   poly_int64 ofst = 0, term;
2179   machine_mode mode = GET_MODE (oloc);
2180   rtx loc = oloc;
2181   rtx x;
2182   bool retry = true;
2183 
2184   while (retry)
2185     {
2186       while (GET_CODE (loc) == PLUS
2187 	     && poly_int_rtx_p (XEXP (loc, 1), &term))
2188 	{
2189 	  ofst += term;
2190 	  loc = XEXP (loc, 0);
2191 	}
2192 
2193       /* Alignment operations can't normally be combined, so just
2194 	 canonicalize the base and we're done.  We'll normally have
2195 	 only one stack alignment anyway.  */
2196       if (GET_CODE (loc) == AND
2197 	  && GET_CODE (XEXP (loc, 1)) == CONST_INT
2198 	  && negative_power_of_two_p (INTVAL (XEXP (loc, 1))))
2199 	{
2200 	  x = vt_canonicalize_addr (set, XEXP (loc, 0));
2201 	  if (x != XEXP (loc, 0))
2202 	    loc = gen_rtx_AND (mode, x, XEXP (loc, 1));
2203 	  retry = false;
2204 	}
2205 
2206       if (GET_CODE (loc) == VALUE)
2207 	{
2208 	  if (set)
2209 	    loc = get_addr_from_local_cache (set, loc);
2210 	  else
2211 	    loc = get_addr_from_global_cache (loc);
2212 
2213 	  /* Consolidate plus_constants.  */
2214 	  while (maybe_ne (ofst, 0)
2215 		 && GET_CODE (loc) == PLUS
2216 		 && poly_int_rtx_p (XEXP (loc, 1), &term))
2217 	    {
2218 	      ofst += term;
2219 	      loc = XEXP (loc, 0);
2220 	    }
2221 
2222 	  retry = false;
2223 	}
2224       else
2225 	{
2226 	  x = canon_rtx (loc);
2227 	  if (retry)
2228 	    retry = (x != loc);
2229 	  loc = x;
2230 	}
2231     }
2232 
2233   /* Add OFST back in.  */
2234   if (maybe_ne (ofst, 0))
2235     {
2236       /* Don't build new RTL if we can help it.  */
2237       if (strip_offset (oloc, &term) == loc && known_eq (term, ofst))
2238 	return oloc;
2239 
2240       loc = plus_constant (mode, loc, ofst);
2241     }
2242 
2243   return loc;
2244 }
2245 
2246 /* Return true iff there's a true dependence between MLOC and LOC.
2247    MADDR must be a canonicalized version of MLOC's address.  */
2248 
2249 static inline bool
vt_canon_true_dep(dataflow_set * set,rtx mloc,rtx maddr,rtx loc)2250 vt_canon_true_dep (dataflow_set *set, rtx mloc, rtx maddr, rtx loc)
2251 {
2252   if (GET_CODE (loc) != MEM)
2253     return false;
2254 
2255   rtx addr = vt_canonicalize_addr (set, XEXP (loc, 0));
2256   if (!canon_true_dependence (mloc, GET_MODE (mloc), maddr, loc, addr))
2257     return false;
2258 
2259   return true;
2260 }
2261 
2262 /* Hold parameters for the hashtab traversal function
2263    drop_overlapping_mem_locs, see below.  */
2264 
2265 struct overlapping_mems
2266 {
2267   dataflow_set *set;
2268   rtx loc, addr;
2269 };
2270 
2271 /* Remove all MEMs that overlap with COMS->LOC from the location list
2272    of a hash table entry for a onepart variable.  COMS->ADDR must be a
2273    canonicalized form of COMS->LOC's address, and COMS->LOC must be
2274    canonicalized itself.  */
2275 
2276 int
drop_overlapping_mem_locs(variable ** slot,overlapping_mems * coms)2277 drop_overlapping_mem_locs (variable **slot, overlapping_mems *coms)
2278 {
2279   dataflow_set *set = coms->set;
2280   rtx mloc = coms->loc, addr = coms->addr;
2281   variable *var = *slot;
2282 
2283   if (var->onepart != NOT_ONEPART)
2284     {
2285       location_chain *loc, **locp;
2286       bool changed = false;
2287       rtx cur_loc;
2288 
2289       gcc_assert (var->n_var_parts == 1);
2290 
2291       if (shared_var_p (var, set->vars))
2292 	{
2293 	  for (loc = var->var_part[0].loc_chain; loc; loc = loc->next)
2294 	    if (vt_canon_true_dep (set, mloc, addr, loc->loc))
2295 	      break;
2296 
2297 	  if (!loc)
2298 	    return 1;
2299 
2300 	  slot = unshare_variable (set, slot, var, VAR_INIT_STATUS_UNKNOWN);
2301 	  var = *slot;
2302 	  gcc_assert (var->n_var_parts == 1);
2303 	}
2304 
2305       if (VAR_LOC_1PAUX (var))
2306 	cur_loc = VAR_LOC_FROM (var);
2307       else
2308 	cur_loc = var->var_part[0].cur_loc;
2309 
2310       for (locp = &var->var_part[0].loc_chain, loc = *locp;
2311 	   loc; loc = *locp)
2312 	{
2313 	  if (!vt_canon_true_dep (set, mloc, addr, loc->loc))
2314 	    {
2315 	      locp = &loc->next;
2316 	      continue;
2317 	    }
2318 
2319 	  *locp = loc->next;
2320 	  /* If we have deleted the location which was last emitted
2321 	     we have to emit new location so add the variable to set
2322 	     of changed variables.  */
2323 	  if (cur_loc == loc->loc)
2324 	    {
2325 	      changed = true;
2326 	      var->var_part[0].cur_loc = NULL;
2327 	      if (VAR_LOC_1PAUX (var))
2328 		VAR_LOC_FROM (var) = NULL;
2329 	    }
2330 	  delete loc;
2331 	}
2332 
2333       if (!var->var_part[0].loc_chain)
2334 	{
2335 	  var->n_var_parts--;
2336 	  changed = true;
2337 	}
2338       if (changed)
2339 	variable_was_changed (var, set);
2340     }
2341 
2342   return 1;
2343 }
2344 
2345 /* Remove from SET all VALUE bindings to MEMs that overlap with LOC.  */
2346 
2347 static void
clobber_overlapping_mems(dataflow_set * set,rtx loc)2348 clobber_overlapping_mems (dataflow_set *set, rtx loc)
2349 {
2350   struct overlapping_mems coms;
2351 
2352   gcc_checking_assert (GET_CODE (loc) == MEM);
2353 
2354   coms.set = set;
2355   coms.loc = canon_rtx (loc);
2356   coms.addr = vt_canonicalize_addr (set, XEXP (loc, 0));
2357 
2358   set->traversed_vars = set->vars;
2359   shared_hash_htab (set->vars)
2360     ->traverse <overlapping_mems*, drop_overlapping_mem_locs> (&coms);
2361   set->traversed_vars = NULL;
2362 }
2363 
2364 /* Set the location of DV, OFFSET as the MEM LOC.  */
2365 
2366 static void
var_mem_decl_set(dataflow_set * set,rtx loc,enum var_init_status initialized,decl_or_value dv,HOST_WIDE_INT offset,rtx set_src,enum insert_option iopt)2367 var_mem_decl_set (dataflow_set *set, rtx loc, enum var_init_status initialized,
2368 		  decl_or_value dv, HOST_WIDE_INT offset, rtx set_src,
2369 		  enum insert_option iopt)
2370 {
2371   if (dv_is_decl_p (dv))
2372     dv = dv_from_decl (var_debug_decl (dv_as_decl (dv)));
2373 
2374   set_variable_part (set, loc, dv, offset, initialized, set_src, iopt);
2375 }
2376 
2377 /* Set the location part of variable MEM_EXPR (LOC) in dataflow set
2378    SET to LOC.
2379    Adjust the address first if it is stack pointer based.  */
2380 
2381 static void
var_mem_set(dataflow_set * set,rtx loc,enum var_init_status initialized,rtx set_src)2382 var_mem_set (dataflow_set *set, rtx loc, enum var_init_status initialized,
2383 	     rtx set_src)
2384 {
2385   tree decl = MEM_EXPR (loc);
2386   HOST_WIDE_INT offset = int_mem_offset (loc);
2387 
2388   var_mem_decl_set (set, loc, initialized,
2389 		    dv_from_decl (decl), offset, set_src, INSERT);
2390 }
2391 
2392 /* Delete and set the location part of variable MEM_EXPR (LOC) in
2393    dataflow set SET to LOC.  If MODIFY is true, any other live copies
2394    of the same variable part are also deleted from the dataflow set,
2395    otherwise the variable part is assumed to be copied from another
2396    location holding the same part.
2397    Adjust the address first if it is stack pointer based.  */
2398 
2399 static void
var_mem_delete_and_set(dataflow_set * set,rtx loc,bool modify,enum var_init_status initialized,rtx set_src)2400 var_mem_delete_and_set (dataflow_set *set, rtx loc, bool modify,
2401 			enum var_init_status initialized, rtx set_src)
2402 {
2403   tree decl = MEM_EXPR (loc);
2404   HOST_WIDE_INT offset = int_mem_offset (loc);
2405 
2406   clobber_overlapping_mems (set, loc);
2407   decl = var_debug_decl (decl);
2408 
2409   if (initialized == VAR_INIT_STATUS_UNKNOWN)
2410     initialized = get_init_value (set, loc, dv_from_decl (decl));
2411 
2412   if (modify)
2413     clobber_variable_part (set, NULL, dv_from_decl (decl), offset, set_src);
2414   var_mem_set (set, loc, initialized, set_src);
2415 }
2416 
2417 /* Delete the location part LOC from dataflow set SET.  If CLOBBER is
2418    true, also delete any other live copies of the same variable part.
2419    Adjust the address first if it is stack pointer based.  */
2420 
2421 static void
var_mem_delete(dataflow_set * set,rtx loc,bool clobber)2422 var_mem_delete (dataflow_set *set, rtx loc, bool clobber)
2423 {
2424   tree decl = MEM_EXPR (loc);
2425   HOST_WIDE_INT offset = int_mem_offset (loc);
2426 
2427   clobber_overlapping_mems (set, loc);
2428   decl = var_debug_decl (decl);
2429   if (clobber)
2430     clobber_variable_part (set, NULL, dv_from_decl (decl), offset, NULL);
2431   delete_variable_part (set, loc, dv_from_decl (decl), offset);
2432 }
2433 
2434 /* Return true if LOC should not be expanded for location expressions,
2435    or used in them.  */
2436 
2437 static inline bool
unsuitable_loc(rtx loc)2438 unsuitable_loc (rtx loc)
2439 {
2440   switch (GET_CODE (loc))
2441     {
2442     case PC:
2443     case SCRATCH:
2444     case CC0:
2445     case ASM_INPUT:
2446     case ASM_OPERANDS:
2447       return true;
2448 
2449     default:
2450       return false;
2451     }
2452 }
2453 
2454 /* Bind VAL to LOC in SET.  If MODIFIED, detach LOC from any values
2455    bound to it.  */
2456 
2457 static inline void
val_bind(dataflow_set * set,rtx val,rtx loc,bool modified)2458 val_bind (dataflow_set *set, rtx val, rtx loc, bool modified)
2459 {
2460   if (REG_P (loc))
2461     {
2462       if (modified)
2463 	var_regno_delete (set, REGNO (loc));
2464       var_reg_decl_set (set, loc, VAR_INIT_STATUS_INITIALIZED,
2465 			dv_from_value (val), 0, NULL_RTX, INSERT);
2466     }
2467   else if (MEM_P (loc))
2468     {
2469       struct elt_loc_list *l = CSELIB_VAL_PTR (val)->locs;
2470 
2471       if (modified)
2472 	clobber_overlapping_mems (set, loc);
2473 
2474       if (l && GET_CODE (l->loc) == VALUE)
2475 	l = canonical_cselib_val (CSELIB_VAL_PTR (l->loc))->locs;
2476 
2477       /* If this MEM is a global constant, we don't need it in the
2478 	 dynamic tables.  ??? We should test this before emitting the
2479 	 micro-op in the first place.  */
2480       while (l)
2481 	if (GET_CODE (l->loc) == MEM && XEXP (l->loc, 0) == XEXP (loc, 0))
2482 	  break;
2483 	else
2484 	  l = l->next;
2485 
2486       if (!l)
2487 	var_mem_decl_set (set, loc, VAR_INIT_STATUS_INITIALIZED,
2488 			  dv_from_value (val), 0, NULL_RTX, INSERT);
2489     }
2490   else
2491     {
2492       /* Other kinds of equivalences are necessarily static, at least
2493 	 so long as we do not perform substitutions while merging
2494 	 expressions.  */
2495       gcc_unreachable ();
2496       set_variable_part (set, loc, dv_from_value (val), 0,
2497 			 VAR_INIT_STATUS_INITIALIZED, NULL_RTX, INSERT);
2498     }
2499 }
2500 
2501 /* Bind a value to a location it was just stored in.  If MODIFIED
2502    holds, assume the location was modified, detaching it from any
2503    values bound to it.  */
2504 
2505 static void
val_store(dataflow_set * set,rtx val,rtx loc,rtx_insn * insn,bool modified)2506 val_store (dataflow_set *set, rtx val, rtx loc, rtx_insn *insn,
2507 	   bool modified)
2508 {
2509   cselib_val *v = CSELIB_VAL_PTR (val);
2510 
2511   gcc_assert (cselib_preserved_value_p (v));
2512 
2513   if (dump_file)
2514     {
2515       fprintf (dump_file, "%i: ", insn ? INSN_UID (insn) : 0);
2516       print_inline_rtx (dump_file, loc, 0);
2517       fprintf (dump_file, " evaluates to ");
2518       print_inline_rtx (dump_file, val, 0);
2519       if (v->locs)
2520 	{
2521 	  struct elt_loc_list *l;
2522 	  for (l = v->locs; l; l = l->next)
2523 	    {
2524 	      fprintf (dump_file, "\n%i: ", INSN_UID (l->setting_insn));
2525 	      print_inline_rtx (dump_file, l->loc, 0);
2526 	    }
2527 	}
2528       fprintf (dump_file, "\n");
2529     }
2530 
2531   gcc_checking_assert (!unsuitable_loc (loc));
2532 
2533   val_bind (set, val, loc, modified);
2534 }
2535 
2536 /* Clear (canonical address) slots that reference X.  */
2537 
2538 bool
local_get_addr_clear_given_value(rtx const &,rtx * slot,rtx x)2539 local_get_addr_clear_given_value (rtx const &, rtx *slot, rtx x)
2540 {
2541   if (vt_get_canonicalize_base (*slot) == x)
2542     *slot = NULL;
2543   return true;
2544 }
2545 
2546 /* Reset this node, detaching all its equivalences.  Return the slot
2547    in the variable hash table that holds dv, if there is one.  */
2548 
2549 static void
val_reset(dataflow_set * set,decl_or_value dv)2550 val_reset (dataflow_set *set, decl_or_value dv)
2551 {
2552   variable *var = shared_hash_find (set->vars, dv) ;
2553   location_chain *node;
2554   rtx cval;
2555 
2556   if (!var || !var->n_var_parts)
2557     return;
2558 
2559   gcc_assert (var->n_var_parts == 1);
2560 
2561   if (var->onepart == ONEPART_VALUE)
2562     {
2563       rtx x = dv_as_value (dv);
2564 
2565       /* Relationships in the global cache don't change, so reset the
2566 	 local cache entry only.  */
2567       rtx *slot = local_get_addr_cache->get (x);
2568       if (slot)
2569 	{
2570 	  /* If the value resolved back to itself, odds are that other
2571 	     values may have cached it too.  These entries now refer
2572 	     to the old X, so detach them too.  Entries that used the
2573 	     old X but resolved to something else remain ok as long as
2574 	     that something else isn't also reset.  */
2575 	  if (*slot == x)
2576 	    local_get_addr_cache
2577 	      ->traverse<rtx, local_get_addr_clear_given_value> (x);
2578 	  *slot = NULL;
2579 	}
2580     }
2581 
2582   cval = NULL;
2583   for (node = var->var_part[0].loc_chain; node; node = node->next)
2584     if (GET_CODE (node->loc) == VALUE
2585 	&& canon_value_cmp (node->loc, cval))
2586       cval = node->loc;
2587 
2588   for (node = var->var_part[0].loc_chain; node; node = node->next)
2589     if (GET_CODE (node->loc) == VALUE && cval != node->loc)
2590       {
2591 	/* Redirect the equivalence link to the new canonical
2592 	   value, or simply remove it if it would point at
2593 	   itself.  */
2594 	if (cval)
2595 	  set_variable_part (set, cval, dv_from_value (node->loc),
2596 			     0, node->init, node->set_src, NO_INSERT);
2597 	delete_variable_part (set, dv_as_value (dv),
2598 			      dv_from_value (node->loc), 0);
2599       }
2600 
2601   if (cval)
2602     {
2603       decl_or_value cdv = dv_from_value (cval);
2604 
2605       /* Keep the remaining values connected, accumulating links
2606 	 in the canonical value.  */
2607       for (node = var->var_part[0].loc_chain; node; node = node->next)
2608 	{
2609 	  if (node->loc == cval)
2610 	    continue;
2611 	  else if (GET_CODE (node->loc) == REG)
2612 	    var_reg_decl_set (set, node->loc, node->init, cdv, 0,
2613 			      node->set_src, NO_INSERT);
2614 	  else if (GET_CODE (node->loc) == MEM)
2615 	    var_mem_decl_set (set, node->loc, node->init, cdv, 0,
2616 			      node->set_src, NO_INSERT);
2617 	  else
2618 	    set_variable_part (set, node->loc, cdv, 0,
2619 			       node->init, node->set_src, NO_INSERT);
2620 	}
2621     }
2622 
2623   /* We remove this last, to make sure that the canonical value is not
2624      removed to the point of requiring reinsertion.  */
2625   if (cval)
2626     delete_variable_part (set, dv_as_value (dv), dv_from_value (cval), 0);
2627 
2628   clobber_variable_part (set, NULL, dv, 0, NULL);
2629 }
2630 
2631 /* Find the values in a given location and map the val to another
2632    value, if it is unique, or add the location as one holding the
2633    value.  */
2634 
2635 static void
val_resolve(dataflow_set * set,rtx val,rtx loc,rtx_insn * insn)2636 val_resolve (dataflow_set *set, rtx val, rtx loc, rtx_insn *insn)
2637 {
2638   decl_or_value dv = dv_from_value (val);
2639 
2640   if (dump_file && (dump_flags & TDF_DETAILS))
2641     {
2642       if (insn)
2643 	fprintf (dump_file, "%i: ", INSN_UID (insn));
2644       else
2645 	fprintf (dump_file, "head: ");
2646       print_inline_rtx (dump_file, val, 0);
2647       fputs (" is at ", dump_file);
2648       print_inline_rtx (dump_file, loc, 0);
2649       fputc ('\n', dump_file);
2650     }
2651 
2652   val_reset (set, dv);
2653 
2654   gcc_checking_assert (!unsuitable_loc (loc));
2655 
2656   if (REG_P (loc))
2657     {
2658       attrs *node, *found = NULL;
2659 
2660       for (node = set->regs[REGNO (loc)]; node; node = node->next)
2661 	if (dv_is_value_p (node->dv)
2662 	    && GET_MODE (dv_as_value (node->dv)) == GET_MODE (loc))
2663 	  {
2664 	    found = node;
2665 
2666 	    /* Map incoming equivalences.  ??? Wouldn't it be nice if
2667 	     we just started sharing the location lists?  Maybe a
2668 	     circular list ending at the value itself or some
2669 	     such.  */
2670 	    set_variable_part (set, dv_as_value (node->dv),
2671 			       dv_from_value (val), node->offset,
2672 			       VAR_INIT_STATUS_INITIALIZED, NULL_RTX, INSERT);
2673 	    set_variable_part (set, val, node->dv, node->offset,
2674 			       VAR_INIT_STATUS_INITIALIZED, NULL_RTX, INSERT);
2675 	  }
2676 
2677       /* If we didn't find any equivalence, we need to remember that
2678 	 this value is held in the named register.  */
2679       if (found)
2680 	return;
2681     }
2682   /* ??? Attempt to find and merge equivalent MEMs or other
2683      expressions too.  */
2684 
2685   val_bind (set, val, loc, false);
2686 }
2687 
2688 /* Initialize dataflow set SET to be empty.
2689    VARS_SIZE is the initial size of hash table VARS.  */
2690 
2691 static void
dataflow_set_init(dataflow_set * set)2692 dataflow_set_init (dataflow_set *set)
2693 {
2694   init_attrs_list_set (set->regs);
2695   set->vars = shared_hash_copy (empty_shared_hash);
2696   set->stack_adjust = 0;
2697   set->traversed_vars = NULL;
2698 }
2699 
2700 /* Delete the contents of dataflow set SET.  */
2701 
2702 static void
dataflow_set_clear(dataflow_set * set)2703 dataflow_set_clear (dataflow_set *set)
2704 {
2705   int i;
2706 
2707   for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2708     attrs_list_clear (&set->regs[i]);
2709 
2710   shared_hash_destroy (set->vars);
2711   set->vars = shared_hash_copy (empty_shared_hash);
2712 }
2713 
2714 /* Copy the contents of dataflow set SRC to DST.  */
2715 
2716 static void
dataflow_set_copy(dataflow_set * dst,dataflow_set * src)2717 dataflow_set_copy (dataflow_set *dst, dataflow_set *src)
2718 {
2719   int i;
2720 
2721   for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2722     attrs_list_copy (&dst->regs[i], src->regs[i]);
2723 
2724   shared_hash_destroy (dst->vars);
2725   dst->vars = shared_hash_copy (src->vars);
2726   dst->stack_adjust = src->stack_adjust;
2727 }
2728 
2729 /* Information for merging lists of locations for a given offset of variable.
2730  */
2731 struct variable_union_info
2732 {
2733   /* Node of the location chain.  */
2734   location_chain *lc;
2735 
2736   /* The sum of positions in the input chains.  */
2737   int pos;
2738 
2739   /* The position in the chain of DST dataflow set.  */
2740   int pos_dst;
2741 };
2742 
2743 /* Buffer for location list sorting and its allocated size.  */
2744 static struct variable_union_info *vui_vec;
2745 static int vui_allocated;
2746 
2747 /* Compare function for qsort, order the structures by POS element.  */
2748 
2749 static int
variable_union_info_cmp_pos(const void * n1,const void * n2)2750 variable_union_info_cmp_pos (const void *n1, const void *n2)
2751 {
2752   const struct variable_union_info *const i1 =
2753     (const struct variable_union_info *) n1;
2754   const struct variable_union_info *const i2 =
2755     ( const struct variable_union_info *) n2;
2756 
2757   if (i1->pos != i2->pos)
2758     return i1->pos - i2->pos;
2759 
2760   return (i1->pos_dst - i2->pos_dst);
2761 }
2762 
2763 /* Compute union of location parts of variable *SLOT and the same variable
2764    from hash table DATA.  Compute "sorted" union of the location chains
2765    for common offsets, i.e. the locations of a variable part are sorted by
2766    a priority where the priority is the sum of the positions in the 2 chains
2767    (if a location is only in one list the position in the second list is
2768    defined to be larger than the length of the chains).
2769    When we are updating the location parts the newest location is in the
2770    beginning of the chain, so when we do the described "sorted" union
2771    we keep the newest locations in the beginning.  */
2772 
2773 static int
variable_union(variable * src,dataflow_set * set)2774 variable_union (variable *src, dataflow_set *set)
2775 {
2776   variable *dst;
2777   variable **dstp;
2778   int i, j, k;
2779 
2780   dstp = shared_hash_find_slot (set->vars, src->dv);
2781   if (!dstp || !*dstp)
2782     {
2783       src->refcount++;
2784 
2785       dst_can_be_shared = false;
2786       if (!dstp)
2787 	dstp = shared_hash_find_slot_unshare (&set->vars, src->dv, INSERT);
2788 
2789       *dstp = src;
2790 
2791       /* Continue traversing the hash table.  */
2792       return 1;
2793     }
2794   else
2795     dst = *dstp;
2796 
2797   gcc_assert (src->n_var_parts);
2798   gcc_checking_assert (src->onepart == dst->onepart);
2799 
2800   /* We can combine one-part variables very efficiently, because their
2801      entries are in canonical order.  */
2802   if (src->onepart)
2803     {
2804       location_chain **nodep, *dnode, *snode;
2805 
2806       gcc_assert (src->n_var_parts == 1
2807 		  && dst->n_var_parts == 1);
2808 
2809       snode = src->var_part[0].loc_chain;
2810       gcc_assert (snode);
2811 
2812     restart_onepart_unshared:
2813       nodep = &dst->var_part[0].loc_chain;
2814       dnode = *nodep;
2815       gcc_assert (dnode);
2816 
2817       while (snode)
2818 	{
2819 	  int r = dnode ? loc_cmp (dnode->loc, snode->loc) : 1;
2820 
2821 	  if (r > 0)
2822 	    {
2823 	      location_chain *nnode;
2824 
2825 	      if (shared_var_p (dst, set->vars))
2826 		{
2827 		  dstp = unshare_variable (set, dstp, dst,
2828 					   VAR_INIT_STATUS_INITIALIZED);
2829 		  dst = *dstp;
2830 		  goto restart_onepart_unshared;
2831 		}
2832 
2833 	      *nodep = nnode = new location_chain;
2834 	      nnode->loc = snode->loc;
2835 	      nnode->init = snode->init;
2836 	      if (!snode->set_src || MEM_P (snode->set_src))
2837 		nnode->set_src = NULL;
2838 	      else
2839 		nnode->set_src = snode->set_src;
2840 	      nnode->next = dnode;
2841 	      dnode = nnode;
2842 	    }
2843 	  else if (r == 0)
2844 	    gcc_checking_assert (rtx_equal_p (dnode->loc, snode->loc));
2845 
2846 	  if (r >= 0)
2847 	    snode = snode->next;
2848 
2849 	  nodep = &dnode->next;
2850 	  dnode = *nodep;
2851 	}
2852 
2853       return 1;
2854     }
2855 
2856   gcc_checking_assert (!src->onepart);
2857 
2858   /* Count the number of location parts, result is K.  */
2859   for (i = 0, j = 0, k = 0;
2860        i < src->n_var_parts && j < dst->n_var_parts; k++)
2861     {
2862       if (VAR_PART_OFFSET (src, i) == VAR_PART_OFFSET (dst, j))
2863 	{
2864 	  i++;
2865 	  j++;
2866 	}
2867       else if (VAR_PART_OFFSET (src, i) < VAR_PART_OFFSET (dst, j))
2868 	i++;
2869       else
2870 	j++;
2871     }
2872   k += src->n_var_parts - i;
2873   k += dst->n_var_parts - j;
2874 
2875   /* We track only variables whose size is <= MAX_VAR_PARTS bytes
2876      thus there are at most MAX_VAR_PARTS different offsets.  */
2877   gcc_checking_assert (dst->onepart ? k == 1 : k <= MAX_VAR_PARTS);
2878 
2879   if (dst->n_var_parts != k && shared_var_p (dst, set->vars))
2880     {
2881       dstp = unshare_variable (set, dstp, dst, VAR_INIT_STATUS_UNKNOWN);
2882       dst = *dstp;
2883     }
2884 
2885   i = src->n_var_parts - 1;
2886   j = dst->n_var_parts - 1;
2887   dst->n_var_parts = k;
2888 
2889   for (k--; k >= 0; k--)
2890     {
2891       location_chain *node, *node2;
2892 
2893       if (i >= 0 && j >= 0
2894 	  && VAR_PART_OFFSET (src, i) == VAR_PART_OFFSET (dst, j))
2895 	{
2896 	  /* Compute the "sorted" union of the chains, i.e. the locations which
2897 	     are in both chains go first, they are sorted by the sum of
2898 	     positions in the chains.  */
2899 	  int dst_l, src_l;
2900 	  int ii, jj, n;
2901 	  struct variable_union_info *vui;
2902 
2903 	  /* If DST is shared compare the location chains.
2904 	     If they are different we will modify the chain in DST with
2905 	     high probability so make a copy of DST.  */
2906 	  if (shared_var_p (dst, set->vars))
2907 	    {
2908 	      for (node = src->var_part[i].loc_chain,
2909 		   node2 = dst->var_part[j].loc_chain; node && node2;
2910 		   node = node->next, node2 = node2->next)
2911 		{
2912 		  if (!((REG_P (node2->loc)
2913 			 && REG_P (node->loc)
2914 			 && REGNO (node2->loc) == REGNO (node->loc))
2915 			|| rtx_equal_p (node2->loc, node->loc)))
2916 		    {
2917 		      if (node2->init < node->init)
2918 		        node2->init = node->init;
2919 		      break;
2920 		    }
2921 		}
2922 	      if (node || node2)
2923 		{
2924 		  dstp = unshare_variable (set, dstp, dst,
2925 					   VAR_INIT_STATUS_UNKNOWN);
2926 		  dst = (variable *)*dstp;
2927 		}
2928 	    }
2929 
2930 	  src_l = 0;
2931 	  for (node = src->var_part[i].loc_chain; node; node = node->next)
2932 	    src_l++;
2933 	  dst_l = 0;
2934 	  for (node = dst->var_part[j].loc_chain; node; node = node->next)
2935 	    dst_l++;
2936 
2937 	  if (dst_l == 1)
2938 	    {
2939 	      /* The most common case, much simpler, no qsort is needed.  */
2940 	      location_chain *dstnode = dst->var_part[j].loc_chain;
2941 	      dst->var_part[k].loc_chain = dstnode;
2942 	      VAR_PART_OFFSET (dst, k) = VAR_PART_OFFSET (dst, j);
2943 	      node2 = dstnode;
2944 	      for (node = src->var_part[i].loc_chain; node; node = node->next)
2945 		if (!((REG_P (dstnode->loc)
2946 		       && REG_P (node->loc)
2947 		       && REGNO (dstnode->loc) == REGNO (node->loc))
2948 		      || rtx_equal_p (dstnode->loc, node->loc)))
2949 		  {
2950 		    location_chain *new_node;
2951 
2952 		    /* Copy the location from SRC.  */
2953 		    new_node = new location_chain;
2954 		    new_node->loc = node->loc;
2955 		    new_node->init = node->init;
2956 		    if (!node->set_src || MEM_P (node->set_src))
2957 		      new_node->set_src = NULL;
2958 		    else
2959 		      new_node->set_src = node->set_src;
2960 		    node2->next = new_node;
2961 		    node2 = new_node;
2962 		  }
2963 	      node2->next = NULL;
2964 	    }
2965 	  else
2966 	    {
2967 	      if (src_l + dst_l > vui_allocated)
2968 		{
2969 		  vui_allocated = MAX (vui_allocated * 2, src_l + dst_l);
2970 		  vui_vec = XRESIZEVEC (struct variable_union_info, vui_vec,
2971 					vui_allocated);
2972 		}
2973 	      vui = vui_vec;
2974 
2975 	      /* Fill in the locations from DST.  */
2976 	      for (node = dst->var_part[j].loc_chain, jj = 0; node;
2977 		   node = node->next, jj++)
2978 		{
2979 		  vui[jj].lc = node;
2980 		  vui[jj].pos_dst = jj;
2981 
2982 		  /* Pos plus value larger than a sum of 2 valid positions.  */
2983 		  vui[jj].pos = jj + src_l + dst_l;
2984 		}
2985 
2986 	      /* Fill in the locations from SRC.  */
2987 	      n = dst_l;
2988 	      for (node = src->var_part[i].loc_chain, ii = 0; node;
2989 		   node = node->next, ii++)
2990 		{
2991 		  /* Find location from NODE.  */
2992 		  for (jj = 0; jj < dst_l; jj++)
2993 		    {
2994 		      if ((REG_P (vui[jj].lc->loc)
2995 			   && REG_P (node->loc)
2996 			   && REGNO (vui[jj].lc->loc) == REGNO (node->loc))
2997 			  || rtx_equal_p (vui[jj].lc->loc, node->loc))
2998 			{
2999 			  vui[jj].pos = jj + ii;
3000 			  break;
3001 			}
3002 		    }
3003 		  if (jj >= dst_l)	/* The location has not been found.  */
3004 		    {
3005 		      location_chain *new_node;
3006 
3007 		      /* Copy the location from SRC.  */
3008 		      new_node = new location_chain;
3009 		      new_node->loc = node->loc;
3010 		      new_node->init = node->init;
3011 		      if (!node->set_src || MEM_P (node->set_src))
3012 			new_node->set_src = NULL;
3013 		      else
3014 			new_node->set_src = node->set_src;
3015 		      vui[n].lc = new_node;
3016 		      vui[n].pos_dst = src_l + dst_l;
3017 		      vui[n].pos = ii + src_l + dst_l;
3018 		      n++;
3019 		    }
3020 		}
3021 
3022 	      if (dst_l == 2)
3023 		{
3024 		  /* Special case still very common case.  For dst_l == 2
3025 		     all entries dst_l ... n-1 are sorted, with for i >= dst_l
3026 		     vui[i].pos == i + src_l + dst_l.  */
3027 		  if (vui[0].pos > vui[1].pos)
3028 		    {
3029 		      /* Order should be 1, 0, 2... */
3030 		      dst->var_part[k].loc_chain = vui[1].lc;
3031 		      vui[1].lc->next = vui[0].lc;
3032 		      if (n >= 3)
3033 			{
3034 			  vui[0].lc->next = vui[2].lc;
3035 			  vui[n - 1].lc->next = NULL;
3036 			}
3037 		      else
3038 			vui[0].lc->next = NULL;
3039 		      ii = 3;
3040 		    }
3041 		  else
3042 		    {
3043 		      dst->var_part[k].loc_chain = vui[0].lc;
3044 		      if (n >= 3 && vui[2].pos < vui[1].pos)
3045 			{
3046 			  /* Order should be 0, 2, 1, 3... */
3047 			  vui[0].lc->next = vui[2].lc;
3048 			  vui[2].lc->next = vui[1].lc;
3049 			  if (n >= 4)
3050 			    {
3051 			      vui[1].lc->next = vui[3].lc;
3052 			      vui[n - 1].lc->next = NULL;
3053 			    }
3054 			  else
3055 			    vui[1].lc->next = NULL;
3056 			  ii = 4;
3057 			}
3058 		      else
3059 			{
3060 			  /* Order should be 0, 1, 2... */
3061 			  ii = 1;
3062 			  vui[n - 1].lc->next = NULL;
3063 			}
3064 		    }
3065 		  for (; ii < n; ii++)
3066 		    vui[ii - 1].lc->next = vui[ii].lc;
3067 		}
3068 	      else
3069 		{
3070 		  qsort (vui, n, sizeof (struct variable_union_info),
3071 			 variable_union_info_cmp_pos);
3072 
3073 		  /* Reconnect the nodes in sorted order.  */
3074 		  for (ii = 1; ii < n; ii++)
3075 		    vui[ii - 1].lc->next = vui[ii].lc;
3076 		  vui[n - 1].lc->next = NULL;
3077 		  dst->var_part[k].loc_chain = vui[0].lc;
3078 		}
3079 
3080 	      VAR_PART_OFFSET (dst, k) = VAR_PART_OFFSET (dst, j);
3081 	    }
3082 	  i--;
3083 	  j--;
3084 	}
3085       else if ((i >= 0 && j >= 0
3086 		&& VAR_PART_OFFSET (src, i) < VAR_PART_OFFSET (dst, j))
3087 	       || i < 0)
3088 	{
3089 	  dst->var_part[k] = dst->var_part[j];
3090 	  j--;
3091 	}
3092       else if ((i >= 0 && j >= 0
3093 		&& VAR_PART_OFFSET (src, i) > VAR_PART_OFFSET (dst, j))
3094 	       || j < 0)
3095 	{
3096 	  location_chain **nextp;
3097 
3098 	  /* Copy the chain from SRC.  */
3099 	  nextp = &dst->var_part[k].loc_chain;
3100 	  for (node = src->var_part[i].loc_chain; node; node = node->next)
3101 	    {
3102 	      location_chain *new_lc;
3103 
3104 	      new_lc = new location_chain;
3105 	      new_lc->next = NULL;
3106 	      new_lc->init = node->init;
3107 	      if (!node->set_src || MEM_P (node->set_src))
3108 		new_lc->set_src = NULL;
3109 	      else
3110 		new_lc->set_src = node->set_src;
3111 	      new_lc->loc = node->loc;
3112 
3113 	      *nextp = new_lc;
3114 	      nextp = &new_lc->next;
3115 	    }
3116 
3117 	  VAR_PART_OFFSET (dst, k) = VAR_PART_OFFSET (src, i);
3118 	  i--;
3119 	}
3120       dst->var_part[k].cur_loc = NULL;
3121     }
3122 
3123   if (flag_var_tracking_uninit)
3124     for (i = 0; i < src->n_var_parts && i < dst->n_var_parts; i++)
3125       {
3126 	location_chain *node, *node2;
3127 	for (node = src->var_part[i].loc_chain; node; node = node->next)
3128 	  for (node2 = dst->var_part[i].loc_chain; node2; node2 = node2->next)
3129 	    if (rtx_equal_p (node->loc, node2->loc))
3130 	      {
3131 		if (node->init > node2->init)
3132 		  node2->init = node->init;
3133 	      }
3134       }
3135 
3136   /* Continue traversing the hash table.  */
3137   return 1;
3138 }
3139 
3140 /* Compute union of dataflow sets SRC and DST and store it to DST.  */
3141 
3142 static void
dataflow_set_union(dataflow_set * dst,dataflow_set * src)3143 dataflow_set_union (dataflow_set *dst, dataflow_set *src)
3144 {
3145   int i;
3146 
3147   for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3148     attrs_list_union (&dst->regs[i], src->regs[i]);
3149 
3150   if (dst->vars == empty_shared_hash)
3151     {
3152       shared_hash_destroy (dst->vars);
3153       dst->vars = shared_hash_copy (src->vars);
3154     }
3155   else
3156     {
3157       variable_iterator_type hi;
3158       variable *var;
3159 
3160       FOR_EACH_HASH_TABLE_ELEMENT (*shared_hash_htab (src->vars),
3161 				   var, variable, hi)
3162 	variable_union (var, dst);
3163     }
3164 }
3165 
3166 /* Whether the value is currently being expanded.  */
3167 #define VALUE_RECURSED_INTO(x) \
3168   (RTL_FLAG_CHECK2 ("VALUE_RECURSED_INTO", (x), VALUE, DEBUG_EXPR)->used)
3169 
3170 /* Whether no expansion was found, saving useless lookups.
3171    It must only be set when VALUE_CHANGED is clear.  */
3172 #define NO_LOC_P(x) \
3173   (RTL_FLAG_CHECK2 ("NO_LOC_P", (x), VALUE, DEBUG_EXPR)->return_val)
3174 
3175 /* Whether cur_loc in the value needs to be (re)computed.  */
3176 #define VALUE_CHANGED(x) \
3177   (RTL_FLAG_CHECK1 ("VALUE_CHANGED", (x), VALUE)->frame_related)
3178 /* Whether cur_loc in the decl needs to be (re)computed.  */
3179 #define DECL_CHANGED(x) TREE_VISITED (x)
3180 
3181 /* Record (if NEWV) that DV needs to have its cur_loc recomputed.  For
3182    user DECLs, this means they're in changed_variables.  Values and
3183    debug exprs may be left with this flag set if no user variable
3184    requires them to be evaluated.  */
3185 
3186 static inline void
set_dv_changed(decl_or_value dv,bool newv)3187 set_dv_changed (decl_or_value dv, bool newv)
3188 {
3189   switch (dv_onepart_p (dv))
3190     {
3191     case ONEPART_VALUE:
3192       if (newv)
3193 	NO_LOC_P (dv_as_value (dv)) = false;
3194       VALUE_CHANGED (dv_as_value (dv)) = newv;
3195       break;
3196 
3197     case ONEPART_DEXPR:
3198       if (newv)
3199 	NO_LOC_P (DECL_RTL_KNOWN_SET (dv_as_decl (dv))) = false;
3200       /* Fall through.  */
3201 
3202     default:
3203       DECL_CHANGED (dv_as_decl (dv)) = newv;
3204       break;
3205     }
3206 }
3207 
3208 /* Return true if DV needs to have its cur_loc recomputed.  */
3209 
3210 static inline bool
dv_changed_p(decl_or_value dv)3211 dv_changed_p (decl_or_value dv)
3212 {
3213   return (dv_is_value_p (dv)
3214 	  ? VALUE_CHANGED (dv_as_value (dv))
3215 	  : DECL_CHANGED (dv_as_decl (dv)));
3216 }
3217 
3218 /* Return a location list node whose loc is rtx_equal to LOC, in the
3219    location list of a one-part variable or value VAR, or in that of
3220    any values recursively mentioned in the location lists.  VARS must
3221    be in star-canonical form.  */
3222 
3223 static location_chain *
find_loc_in_1pdv(rtx loc,variable * var,variable_table_type * vars)3224 find_loc_in_1pdv (rtx loc, variable *var, variable_table_type *vars)
3225 {
3226   location_chain *node;
3227   enum rtx_code loc_code;
3228 
3229   if (!var)
3230     return NULL;
3231 
3232   gcc_checking_assert (var->onepart);
3233 
3234   if (!var->n_var_parts)
3235     return NULL;
3236 
3237   gcc_checking_assert (loc != dv_as_opaque (var->dv));
3238 
3239   loc_code = GET_CODE (loc);
3240   for (node = var->var_part[0].loc_chain; node; node = node->next)
3241     {
3242       decl_or_value dv;
3243       variable *rvar;
3244 
3245       if (GET_CODE (node->loc) != loc_code)
3246 	{
3247 	  if (GET_CODE (node->loc) != VALUE)
3248 	    continue;
3249 	}
3250       else if (loc == node->loc)
3251 	return node;
3252       else if (loc_code != VALUE)
3253 	{
3254 	  if (rtx_equal_p (loc, node->loc))
3255 	    return node;
3256 	  continue;
3257 	}
3258 
3259       /* Since we're in star-canonical form, we don't need to visit
3260 	 non-canonical nodes: one-part variables and non-canonical
3261 	 values would only point back to the canonical node.  */
3262       if (dv_is_value_p (var->dv)
3263 	  && !canon_value_cmp (node->loc, dv_as_value (var->dv)))
3264 	{
3265 	  /* Skip all subsequent VALUEs.  */
3266 	  while (node->next && GET_CODE (node->next->loc) == VALUE)
3267 	    {
3268 	      node = node->next;
3269 	      gcc_checking_assert (!canon_value_cmp (node->loc,
3270 						     dv_as_value (var->dv)));
3271 	      if (loc == node->loc)
3272 		return node;
3273 	    }
3274 	  continue;
3275 	}
3276 
3277       gcc_checking_assert (node == var->var_part[0].loc_chain);
3278       gcc_checking_assert (!node->next);
3279 
3280       dv = dv_from_value (node->loc);
3281       rvar = vars->find_with_hash (dv, dv_htab_hash (dv));
3282       return find_loc_in_1pdv (loc, rvar, vars);
3283     }
3284 
3285   /* ??? Gotta look in cselib_val locations too.  */
3286 
3287   return NULL;
3288 }
3289 
3290 /* Hash table iteration argument passed to variable_merge.  */
3291 struct dfset_merge
3292 {
3293   /* The set in which the merge is to be inserted.  */
3294   dataflow_set *dst;
3295   /* The set that we're iterating in.  */
3296   dataflow_set *cur;
3297   /* The set that may contain the other dv we are to merge with.  */
3298   dataflow_set *src;
3299   /* Number of onepart dvs in src.  */
3300   int src_onepart_cnt;
3301 };
3302 
3303 /* Insert LOC in *DNODE, if it's not there yet.  The list must be in
3304    loc_cmp order, and it is maintained as such.  */
3305 
3306 static void
insert_into_intersection(location_chain ** nodep,rtx loc,enum var_init_status status)3307 insert_into_intersection (location_chain **nodep, rtx loc,
3308 			  enum var_init_status status)
3309 {
3310   location_chain *node;
3311   int r;
3312 
3313   for (node = *nodep; node; nodep = &node->next, node = *nodep)
3314     if ((r = loc_cmp (node->loc, loc)) == 0)
3315       {
3316 	node->init = MIN (node->init, status);
3317 	return;
3318       }
3319     else if (r > 0)
3320       break;
3321 
3322   node = new location_chain;
3323 
3324   node->loc = loc;
3325   node->set_src = NULL;
3326   node->init = status;
3327   node->next = *nodep;
3328   *nodep = node;
3329 }
3330 
3331 /* Insert in DEST the intersection of the locations present in both
3332    S1NODE and S2VAR, directly or indirectly.  S1NODE is from a
3333    variable in DSM->cur, whereas S2VAR is from DSM->src.  dvar is in
3334    DSM->dst.  */
3335 
3336 static void
intersect_loc_chains(rtx val,location_chain ** dest,struct dfset_merge * dsm,location_chain * s1node,variable * s2var)3337 intersect_loc_chains (rtx val, location_chain **dest, struct dfset_merge *dsm,
3338 		      location_chain *s1node, variable *s2var)
3339 {
3340   dataflow_set *s1set = dsm->cur;
3341   dataflow_set *s2set = dsm->src;
3342   location_chain *found;
3343 
3344   if (s2var)
3345     {
3346       location_chain *s2node;
3347 
3348       gcc_checking_assert (s2var->onepart);
3349 
3350       if (s2var->n_var_parts)
3351 	{
3352 	  s2node = s2var->var_part[0].loc_chain;
3353 
3354 	  for (; s1node && s2node;
3355 	       s1node = s1node->next, s2node = s2node->next)
3356 	    if (s1node->loc != s2node->loc)
3357 	      break;
3358 	    else if (s1node->loc == val)
3359 	      continue;
3360 	    else
3361 	      insert_into_intersection (dest, s1node->loc,
3362 					MIN (s1node->init, s2node->init));
3363 	}
3364     }
3365 
3366   for (; s1node; s1node = s1node->next)
3367     {
3368       if (s1node->loc == val)
3369 	continue;
3370 
3371       if ((found = find_loc_in_1pdv (s1node->loc, s2var,
3372 				     shared_hash_htab (s2set->vars))))
3373 	{
3374 	  insert_into_intersection (dest, s1node->loc,
3375 				    MIN (s1node->init, found->init));
3376 	  continue;
3377 	}
3378 
3379       if (GET_CODE (s1node->loc) == VALUE
3380 	  && !VALUE_RECURSED_INTO (s1node->loc))
3381 	{
3382 	  decl_or_value dv = dv_from_value (s1node->loc);
3383 	  variable *svar = shared_hash_find (s1set->vars, dv);
3384 	  if (svar)
3385 	    {
3386 	      if (svar->n_var_parts == 1)
3387 		{
3388 		  VALUE_RECURSED_INTO (s1node->loc) = true;
3389 		  intersect_loc_chains (val, dest, dsm,
3390 					svar->var_part[0].loc_chain,
3391 					s2var);
3392 		  VALUE_RECURSED_INTO (s1node->loc) = false;
3393 		}
3394 	    }
3395 	}
3396 
3397       /* ??? gotta look in cselib_val locations too.  */
3398 
3399       /* ??? if the location is equivalent to any location in src,
3400 	 searched recursively
3401 
3402 	   add to dst the values needed to represent the equivalence
3403 
3404      telling whether locations S is equivalent to another dv's
3405      location list:
3406 
3407        for each location D in the list
3408 
3409          if S and D satisfy rtx_equal_p, then it is present
3410 
3411 	 else if D is a value, recurse without cycles
3412 
3413 	 else if S and D have the same CODE and MODE
3414 
3415 	   for each operand oS and the corresponding oD
3416 
3417 	     if oS and oD are not equivalent, then S an D are not equivalent
3418 
3419 	     else if they are RTX vectors
3420 
3421 	       if any vector oS element is not equivalent to its respective oD,
3422 	       then S and D are not equivalent
3423 
3424    */
3425 
3426 
3427     }
3428 }
3429 
3430 /* Return -1 if X should be before Y in a location list for a 1-part
3431    variable, 1 if Y should be before X, and 0 if they're equivalent
3432    and should not appear in the list.  */
3433 
3434 static int
loc_cmp(rtx x,rtx y)3435 loc_cmp (rtx x, rtx y)
3436 {
3437   int i, j, r;
3438   RTX_CODE code = GET_CODE (x);
3439   const char *fmt;
3440 
3441   if (x == y)
3442     return 0;
3443 
3444   if (REG_P (x))
3445     {
3446       if (!REG_P (y))
3447 	return -1;
3448       gcc_assert (GET_MODE (x) == GET_MODE (y));
3449       if (REGNO (x) == REGNO (y))
3450 	return 0;
3451       else if (REGNO (x) < REGNO (y))
3452 	return -1;
3453       else
3454 	return 1;
3455     }
3456 
3457   if (REG_P (y))
3458     return 1;
3459 
3460   if (MEM_P (x))
3461     {
3462       if (!MEM_P (y))
3463 	return -1;
3464       gcc_assert (GET_MODE (x) == GET_MODE (y));
3465       return loc_cmp (XEXP (x, 0), XEXP (y, 0));
3466     }
3467 
3468   if (MEM_P (y))
3469     return 1;
3470 
3471   if (GET_CODE (x) == VALUE)
3472     {
3473       if (GET_CODE (y) != VALUE)
3474 	return -1;
3475       /* Don't assert the modes are the same, that is true only
3476 	 when not recursing.  (subreg:QI (value:SI 1:1) 0)
3477 	 and (subreg:QI (value:DI 2:2) 0) can be compared,
3478 	 even when the modes are different.  */
3479       if (canon_value_cmp (x, y))
3480 	return -1;
3481       else
3482 	return 1;
3483     }
3484 
3485   if (GET_CODE (y) == VALUE)
3486     return 1;
3487 
3488   /* Entry value is the least preferable kind of expression.  */
3489   if (GET_CODE (x) == ENTRY_VALUE)
3490     {
3491       if (GET_CODE (y) != ENTRY_VALUE)
3492 	return 1;
3493       gcc_assert (GET_MODE (x) == GET_MODE (y));
3494       return loc_cmp (ENTRY_VALUE_EXP (x), ENTRY_VALUE_EXP (y));
3495     }
3496 
3497   if (GET_CODE (y) == ENTRY_VALUE)
3498     return -1;
3499 
3500   if (GET_CODE (x) == GET_CODE (y))
3501     /* Compare operands below.  */;
3502   else if (GET_CODE (x) < GET_CODE (y))
3503     return -1;
3504   else
3505     return 1;
3506 
3507   gcc_assert (GET_MODE (x) == GET_MODE (y));
3508 
3509   if (GET_CODE (x) == DEBUG_EXPR)
3510     {
3511       if (DEBUG_TEMP_UID (DEBUG_EXPR_TREE_DECL (x))
3512 	  < DEBUG_TEMP_UID (DEBUG_EXPR_TREE_DECL (y)))
3513 	return -1;
3514       gcc_checking_assert (DEBUG_TEMP_UID (DEBUG_EXPR_TREE_DECL (x))
3515 			   > DEBUG_TEMP_UID (DEBUG_EXPR_TREE_DECL (y)));
3516       return 1;
3517     }
3518 
3519   fmt = GET_RTX_FORMAT (code);
3520   for (i = 0; i < GET_RTX_LENGTH (code); i++)
3521     switch (fmt[i])
3522       {
3523       case 'w':
3524 	if (XWINT (x, i) == XWINT (y, i))
3525 	  break;
3526 	else if (XWINT (x, i) < XWINT (y, i))
3527 	  return -1;
3528 	else
3529 	  return 1;
3530 
3531       case 'n':
3532       case 'i':
3533 	if (XINT (x, i) == XINT (y, i))
3534 	  break;
3535 	else if (XINT (x, i) < XINT (y, i))
3536 	  return -1;
3537 	else
3538 	  return 1;
3539 
3540       case 'p':
3541 	r = compare_sizes_for_sort (SUBREG_BYTE (x), SUBREG_BYTE (y));
3542 	if (r != 0)
3543 	  return r;
3544 	break;
3545 
3546       case 'V':
3547       case 'E':
3548 	/* Compare the vector length first.  */
3549 	if (XVECLEN (x, i) == XVECLEN (y, i))
3550 	  /* Compare the vectors elements.  */;
3551 	else if (XVECLEN (x, i) < XVECLEN (y, i))
3552 	  return -1;
3553 	else
3554 	  return 1;
3555 
3556 	for (j = 0; j < XVECLEN (x, i); j++)
3557 	  if ((r = loc_cmp (XVECEXP (x, i, j),
3558 			    XVECEXP (y, i, j))))
3559 	    return r;
3560 	break;
3561 
3562       case 'e':
3563 	if ((r = loc_cmp (XEXP (x, i), XEXP (y, i))))
3564 	  return r;
3565 	break;
3566 
3567       case 'S':
3568       case 's':
3569 	if (XSTR (x, i) == XSTR (y, i))
3570 	  break;
3571 	if (!XSTR (x, i))
3572 	  return -1;
3573 	if (!XSTR (y, i))
3574 	  return 1;
3575 	if ((r = strcmp (XSTR (x, i), XSTR (y, i))) == 0)
3576 	  break;
3577 	else if (r < 0)
3578 	  return -1;
3579 	else
3580 	  return 1;
3581 
3582       case 'u':
3583 	/* These are just backpointers, so they don't matter.  */
3584 	break;
3585 
3586       case '0':
3587       case 't':
3588 	break;
3589 
3590 	/* It is believed that rtx's at this level will never
3591 	   contain anything but integers and other rtx's,
3592 	   except for within LABEL_REFs and SYMBOL_REFs.  */
3593       default:
3594 	gcc_unreachable ();
3595       }
3596   if (CONST_WIDE_INT_P (x))
3597     {
3598       /* Compare the vector length first.  */
3599       if (CONST_WIDE_INT_NUNITS (x) >= CONST_WIDE_INT_NUNITS (y))
3600 	return 1;
3601       else if (CONST_WIDE_INT_NUNITS (x) < CONST_WIDE_INT_NUNITS (y))
3602 	return -1;
3603 
3604       /* Compare the vectors elements.  */;
3605       for (j = CONST_WIDE_INT_NUNITS (x) - 1; j >= 0 ; j--)
3606 	{
3607 	  if (CONST_WIDE_INT_ELT (x, j) < CONST_WIDE_INT_ELT (y, j))
3608 	    return -1;
3609 	  if (CONST_WIDE_INT_ELT (x, j) > CONST_WIDE_INT_ELT (y, j))
3610 	    return 1;
3611 	}
3612     }
3613 
3614   return 0;
3615 }
3616 
3617 /* Check the order of entries in one-part variables.   */
3618 
3619 int
canonicalize_loc_order_check(variable ** slot,dataflow_set * data ATTRIBUTE_UNUSED)3620 canonicalize_loc_order_check (variable **slot,
3621 			      dataflow_set *data ATTRIBUTE_UNUSED)
3622 {
3623   variable *var = *slot;
3624   location_chain *node, *next;
3625 
3626 #ifdef ENABLE_RTL_CHECKING
3627   int i;
3628   for (i = 0; i < var->n_var_parts; i++)
3629     gcc_assert (var->var_part[0].cur_loc == NULL);
3630   gcc_assert (!var->in_changed_variables);
3631 #endif
3632 
3633   if (!var->onepart)
3634     return 1;
3635 
3636   gcc_assert (var->n_var_parts == 1);
3637   node = var->var_part[0].loc_chain;
3638   gcc_assert (node);
3639 
3640   while ((next = node->next))
3641     {
3642       gcc_assert (loc_cmp (node->loc, next->loc) < 0);
3643       node = next;
3644     }
3645 
3646   return 1;
3647 }
3648 
3649 /* Mark with VALUE_RECURSED_INTO values that have neighbors that are
3650    more likely to be chosen as canonical for an equivalence set.
3651    Ensure less likely values can reach more likely neighbors, making
3652    the connections bidirectional.  */
3653 
3654 int
canonicalize_values_mark(variable ** slot,dataflow_set * set)3655 canonicalize_values_mark (variable **slot, dataflow_set *set)
3656 {
3657   variable *var = *slot;
3658   decl_or_value dv = var->dv;
3659   rtx val;
3660   location_chain *node;
3661 
3662   if (!dv_is_value_p (dv))
3663     return 1;
3664 
3665   gcc_checking_assert (var->n_var_parts == 1);
3666 
3667   val = dv_as_value (dv);
3668 
3669   for (node = var->var_part[0].loc_chain; node; node = node->next)
3670     if (GET_CODE (node->loc) == VALUE)
3671       {
3672 	if (canon_value_cmp (node->loc, val))
3673 	  VALUE_RECURSED_INTO (val) = true;
3674 	else
3675 	  {
3676 	    decl_or_value odv = dv_from_value (node->loc);
3677 	    variable **oslot;
3678 	    oslot = shared_hash_find_slot_noinsert (set->vars, odv);
3679 
3680 	    set_slot_part (set, val, oslot, odv, 0,
3681 			   node->init, NULL_RTX);
3682 
3683 	    VALUE_RECURSED_INTO (node->loc) = true;
3684 	  }
3685       }
3686 
3687   return 1;
3688 }
3689 
3690 /* Remove redundant entries from equivalence lists in onepart
3691    variables, canonicalizing equivalence sets into star shapes.  */
3692 
3693 int
canonicalize_values_star(variable ** slot,dataflow_set * set)3694 canonicalize_values_star (variable **slot, dataflow_set *set)
3695 {
3696   variable *var = *slot;
3697   decl_or_value dv = var->dv;
3698   location_chain *node;
3699   decl_or_value cdv;
3700   rtx val, cval;
3701   variable **cslot;
3702   bool has_value;
3703   bool has_marks;
3704 
3705   if (!var->onepart)
3706     return 1;
3707 
3708   gcc_checking_assert (var->n_var_parts == 1);
3709 
3710   if (dv_is_value_p (dv))
3711     {
3712       cval = dv_as_value (dv);
3713       if (!VALUE_RECURSED_INTO (cval))
3714 	return 1;
3715       VALUE_RECURSED_INTO (cval) = false;
3716     }
3717   else
3718     cval = NULL_RTX;
3719 
3720  restart:
3721   val = cval;
3722   has_value = false;
3723   has_marks = false;
3724 
3725   gcc_assert (var->n_var_parts == 1);
3726 
3727   for (node = var->var_part[0].loc_chain; node; node = node->next)
3728     if (GET_CODE (node->loc) == VALUE)
3729       {
3730 	has_value = true;
3731 	if (VALUE_RECURSED_INTO (node->loc))
3732 	  has_marks = true;
3733 	if (canon_value_cmp (node->loc, cval))
3734 	  cval = node->loc;
3735       }
3736 
3737   if (!has_value)
3738     return 1;
3739 
3740   if (cval == val)
3741     {
3742       if (!has_marks || dv_is_decl_p (dv))
3743 	return 1;
3744 
3745       /* Keep it marked so that we revisit it, either after visiting a
3746 	 child node, or after visiting a new parent that might be
3747 	 found out.  */
3748       VALUE_RECURSED_INTO (val) = true;
3749 
3750       for (node = var->var_part[0].loc_chain; node; node = node->next)
3751 	if (GET_CODE (node->loc) == VALUE
3752 	    && VALUE_RECURSED_INTO (node->loc))
3753 	  {
3754 	    cval = node->loc;
3755 	  restart_with_cval:
3756 	    VALUE_RECURSED_INTO (cval) = false;
3757 	    dv = dv_from_value (cval);
3758 	    slot = shared_hash_find_slot_noinsert (set->vars, dv);
3759 	    if (!slot)
3760 	      {
3761 		gcc_assert (dv_is_decl_p (var->dv));
3762 		/* The canonical value was reset and dropped.
3763 		   Remove it.  */
3764 		clobber_variable_part (set, NULL, var->dv, 0, NULL);
3765 		return 1;
3766 	      }
3767 	    var = *slot;
3768 	    gcc_assert (dv_is_value_p (var->dv));
3769 	    if (var->n_var_parts == 0)
3770 	      return 1;
3771 	    gcc_assert (var->n_var_parts == 1);
3772 	    goto restart;
3773 	  }
3774 
3775       VALUE_RECURSED_INTO (val) = false;
3776 
3777       return 1;
3778     }
3779 
3780   /* Push values to the canonical one.  */
3781   cdv = dv_from_value (cval);
3782   cslot = shared_hash_find_slot_noinsert (set->vars, cdv);
3783 
3784   for (node = var->var_part[0].loc_chain; node; node = node->next)
3785     if (node->loc != cval)
3786       {
3787 	cslot = set_slot_part (set, node->loc, cslot, cdv, 0,
3788 			       node->init, NULL_RTX);
3789 	if (GET_CODE (node->loc) == VALUE)
3790 	  {
3791 	    decl_or_value ndv = dv_from_value (node->loc);
3792 
3793 	    set_variable_part (set, cval, ndv, 0, node->init, NULL_RTX,
3794 			       NO_INSERT);
3795 
3796 	    if (canon_value_cmp (node->loc, val))
3797 	      {
3798 		/* If it could have been a local minimum, it's not any more,
3799 		   since it's now neighbor to cval, so it may have to push
3800 		   to it.  Conversely, if it wouldn't have prevailed over
3801 		   val, then whatever mark it has is fine: if it was to
3802 		   push, it will now push to a more canonical node, but if
3803 		   it wasn't, then it has already pushed any values it might
3804 		   have to.  */
3805 		VALUE_RECURSED_INTO (node->loc) = true;
3806 		/* Make sure we visit node->loc by ensuring we cval is
3807 		   visited too.  */
3808 		VALUE_RECURSED_INTO (cval) = true;
3809 	      }
3810 	    else if (!VALUE_RECURSED_INTO (node->loc))
3811 	      /* If we have no need to "recurse" into this node, it's
3812 		 already "canonicalized", so drop the link to the old
3813 		 parent.  */
3814 	      clobber_variable_part (set, cval, ndv, 0, NULL);
3815 	  }
3816 	else if (GET_CODE (node->loc) == REG)
3817 	  {
3818 	    attrs *list = set->regs[REGNO (node->loc)], **listp;
3819 
3820 	    /* Change an existing attribute referring to dv so that it
3821 	       refers to cdv, removing any duplicate this might
3822 	       introduce, and checking that no previous duplicates
3823 	       existed, all in a single pass.  */
3824 
3825 	    while (list)
3826 	      {
3827 		if (list->offset == 0
3828 		    && (dv_as_opaque (list->dv) == dv_as_opaque (dv)
3829 			|| dv_as_opaque (list->dv) == dv_as_opaque (cdv)))
3830 		  break;
3831 
3832 		list = list->next;
3833 	      }
3834 
3835 	    gcc_assert (list);
3836 	    if (dv_as_opaque (list->dv) == dv_as_opaque (dv))
3837 	      {
3838 		list->dv = cdv;
3839 		for (listp = &list->next; (list = *listp); listp = &list->next)
3840 		  {
3841 		    if (list->offset)
3842 		      continue;
3843 
3844 		    if (dv_as_opaque (list->dv) == dv_as_opaque (cdv))
3845 		      {
3846 			*listp = list->next;
3847 			delete list;
3848 			list = *listp;
3849 			break;
3850 		      }
3851 
3852 		    gcc_assert (dv_as_opaque (list->dv) != dv_as_opaque (dv));
3853 		  }
3854 	      }
3855 	    else if (dv_as_opaque (list->dv) == dv_as_opaque (cdv))
3856 	      {
3857 		for (listp = &list->next; (list = *listp); listp = &list->next)
3858 		  {
3859 		    if (list->offset)
3860 		      continue;
3861 
3862 		    if (dv_as_opaque (list->dv) == dv_as_opaque (dv))
3863 		      {
3864 			*listp = list->next;
3865 			delete list;
3866 			list = *listp;
3867 			break;
3868 		      }
3869 
3870 		    gcc_assert (dv_as_opaque (list->dv) != dv_as_opaque (cdv));
3871 		  }
3872 	      }
3873 	    else
3874 	      gcc_unreachable ();
3875 
3876 	    if (flag_checking)
3877 	      while (list)
3878 		{
3879 		  if (list->offset == 0
3880 		      && (dv_as_opaque (list->dv) == dv_as_opaque (dv)
3881 			  || dv_as_opaque (list->dv) == dv_as_opaque (cdv)))
3882 		    gcc_unreachable ();
3883 
3884 		  list = list->next;
3885 		}
3886 	  }
3887       }
3888 
3889   if (val)
3890     set_slot_part (set, val, cslot, cdv, 0,
3891 		   VAR_INIT_STATUS_INITIALIZED, NULL_RTX);
3892 
3893   slot = clobber_slot_part (set, cval, slot, 0, NULL);
3894 
3895   /* Variable may have been unshared.  */
3896   var = *slot;
3897   gcc_checking_assert (var->n_var_parts && var->var_part[0].loc_chain->loc == cval
3898 		       && var->var_part[0].loc_chain->next == NULL);
3899 
3900   if (VALUE_RECURSED_INTO (cval))
3901     goto restart_with_cval;
3902 
3903   return 1;
3904 }
3905 
3906 /* Bind one-part variables to the canonical value in an equivalence
3907    set.  Not doing this causes dataflow convergence failure in rare
3908    circumstances, see PR42873.  Unfortunately we can't do this
3909    efficiently as part of canonicalize_values_star, since we may not
3910    have determined or even seen the canonical value of a set when we
3911    get to a variable that references another member of the set.  */
3912 
3913 int
canonicalize_vars_star(variable ** slot,dataflow_set * set)3914 canonicalize_vars_star (variable **slot, dataflow_set *set)
3915 {
3916   variable *var = *slot;
3917   decl_or_value dv = var->dv;
3918   location_chain *node;
3919   rtx cval;
3920   decl_or_value cdv;
3921   variable **cslot;
3922   variable *cvar;
3923   location_chain *cnode;
3924 
3925   if (!var->onepart || var->onepart == ONEPART_VALUE)
3926     return 1;
3927 
3928   gcc_assert (var->n_var_parts == 1);
3929 
3930   node = var->var_part[0].loc_chain;
3931 
3932   if (GET_CODE (node->loc) != VALUE)
3933     return 1;
3934 
3935   gcc_assert (!node->next);
3936   cval = node->loc;
3937 
3938   /* Push values to the canonical one.  */
3939   cdv = dv_from_value (cval);
3940   cslot = shared_hash_find_slot_noinsert (set->vars, cdv);
3941   if (!cslot)
3942     return 1;
3943   cvar = *cslot;
3944   gcc_assert (cvar->n_var_parts == 1);
3945 
3946   cnode = cvar->var_part[0].loc_chain;
3947 
3948   /* CVAL is canonical if its value list contains non-VALUEs or VALUEs
3949      that are not “more canonical” than it.  */
3950   if (GET_CODE (cnode->loc) != VALUE
3951       || !canon_value_cmp (cnode->loc, cval))
3952     return 1;
3953 
3954   /* CVAL was found to be non-canonical.  Change the variable to point
3955      to the canonical VALUE.  */
3956   gcc_assert (!cnode->next);
3957   cval = cnode->loc;
3958 
3959   slot = set_slot_part (set, cval, slot, dv, 0,
3960 			node->init, node->set_src);
3961   clobber_slot_part (set, cval, slot, 0, node->set_src);
3962 
3963   return 1;
3964 }
3965 
3966 /* Combine variable or value in *S1SLOT (in DSM->cur) with the
3967    corresponding entry in DSM->src.  Multi-part variables are combined
3968    with variable_union, whereas onepart dvs are combined with
3969    intersection.  */
3970 
3971 static int
variable_merge_over_cur(variable * s1var,struct dfset_merge * dsm)3972 variable_merge_over_cur (variable *s1var, struct dfset_merge *dsm)
3973 {
3974   dataflow_set *dst = dsm->dst;
3975   variable **dstslot;
3976   variable *s2var, *dvar = NULL;
3977   decl_or_value dv = s1var->dv;
3978   onepart_enum onepart = s1var->onepart;
3979   rtx val;
3980   hashval_t dvhash;
3981   location_chain *node, **nodep;
3982 
3983   /* If the incoming onepart variable has an empty location list, then
3984      the intersection will be just as empty.  For other variables,
3985      it's always union.  */
3986   gcc_checking_assert (s1var->n_var_parts
3987 		       && s1var->var_part[0].loc_chain);
3988 
3989   if (!onepart)
3990     return variable_union (s1var, dst);
3991 
3992   gcc_checking_assert (s1var->n_var_parts == 1);
3993 
3994   dvhash = dv_htab_hash (dv);
3995   if (dv_is_value_p (dv))
3996     val = dv_as_value (dv);
3997   else
3998     val = NULL;
3999 
4000   s2var = shared_hash_find_1 (dsm->src->vars, dv, dvhash);
4001   if (!s2var)
4002     {
4003       dst_can_be_shared = false;
4004       return 1;
4005     }
4006 
4007   dsm->src_onepart_cnt--;
4008   gcc_assert (s2var->var_part[0].loc_chain
4009 	      && s2var->onepart == onepart
4010 	      && s2var->n_var_parts == 1);
4011 
4012   dstslot = shared_hash_find_slot_noinsert_1 (dst->vars, dv, dvhash);
4013   if (dstslot)
4014     {
4015       dvar = *dstslot;
4016       gcc_assert (dvar->refcount == 1
4017 		  && dvar->onepart == onepart
4018 		  && dvar->n_var_parts == 1);
4019       nodep = &dvar->var_part[0].loc_chain;
4020     }
4021   else
4022     {
4023       nodep = &node;
4024       node = NULL;
4025     }
4026 
4027   if (!dstslot && !onepart_variable_different_p (s1var, s2var))
4028     {
4029       dstslot = shared_hash_find_slot_unshare_1 (&dst->vars, dv,
4030 						 dvhash, INSERT);
4031       *dstslot = dvar = s2var;
4032       dvar->refcount++;
4033     }
4034   else
4035     {
4036       dst_can_be_shared = false;
4037 
4038       intersect_loc_chains (val, nodep, dsm,
4039 			    s1var->var_part[0].loc_chain, s2var);
4040 
4041       if (!dstslot)
4042 	{
4043 	  if (node)
4044 	    {
4045 	      dvar = onepart_pool_allocate (onepart);
4046 	      dvar->dv = dv;
4047 	      dvar->refcount = 1;
4048 	      dvar->n_var_parts = 1;
4049 	      dvar->onepart = onepart;
4050 	      dvar->in_changed_variables = false;
4051 	      dvar->var_part[0].loc_chain = node;
4052 	      dvar->var_part[0].cur_loc = NULL;
4053 	      if (onepart)
4054 		VAR_LOC_1PAUX (dvar) = NULL;
4055 	      else
4056 		VAR_PART_OFFSET (dvar, 0) = 0;
4057 
4058 	      dstslot
4059 		= shared_hash_find_slot_unshare_1 (&dst->vars, dv, dvhash,
4060 						   INSERT);
4061 	      gcc_assert (!*dstslot);
4062 	      *dstslot = dvar;
4063 	    }
4064 	  else
4065 	    return 1;
4066 	}
4067     }
4068 
4069   nodep = &dvar->var_part[0].loc_chain;
4070   while ((node = *nodep))
4071     {
4072       location_chain **nextp = &node->next;
4073 
4074       if (GET_CODE (node->loc) == REG)
4075 	{
4076 	  attrs *list;
4077 
4078 	  for (list = dst->regs[REGNO (node->loc)]; list; list = list->next)
4079 	    if (GET_MODE (node->loc) == GET_MODE (list->loc)
4080 		&& dv_is_value_p (list->dv))
4081 	      break;
4082 
4083 	  if (!list)
4084 	    attrs_list_insert (&dst->regs[REGNO (node->loc)],
4085 			       dv, 0, node->loc);
4086 	  /* If this value became canonical for another value that had
4087 	     this register, we want to leave it alone.  */
4088 	  else if (dv_as_value (list->dv) != val)
4089 	    {
4090 	      dstslot = set_slot_part (dst, dv_as_value (list->dv),
4091 				       dstslot, dv, 0,
4092 				       node->init, NULL_RTX);
4093 	      dstslot = delete_slot_part (dst, node->loc, dstslot, 0);
4094 
4095 	      /* Since nextp points into the removed node, we can't
4096 		 use it.  The pointer to the next node moved to nodep.
4097 		 However, if the variable we're walking is unshared
4098 		 during our walk, we'll keep walking the location list
4099 		 of the previously-shared variable, in which case the
4100 		 node won't have been removed, and we'll want to skip
4101 		 it.  That's why we test *nodep here.  */
4102 	      if (*nodep != node)
4103 		nextp = nodep;
4104 	    }
4105 	}
4106       else
4107 	/* Canonicalization puts registers first, so we don't have to
4108 	   walk it all.  */
4109 	break;
4110       nodep = nextp;
4111     }
4112 
4113   if (dvar != *dstslot)
4114     dvar = *dstslot;
4115   nodep = &dvar->var_part[0].loc_chain;
4116 
4117   if (val)
4118     {
4119       /* Mark all referenced nodes for canonicalization, and make sure
4120 	 we have mutual equivalence links.  */
4121       VALUE_RECURSED_INTO (val) = true;
4122       for (node = *nodep; node; node = node->next)
4123 	if (GET_CODE (node->loc) == VALUE)
4124 	  {
4125 	    VALUE_RECURSED_INTO (node->loc) = true;
4126 	    set_variable_part (dst, val, dv_from_value (node->loc), 0,
4127 			       node->init, NULL, INSERT);
4128 	  }
4129 
4130       dstslot = shared_hash_find_slot_noinsert_1 (dst->vars, dv, dvhash);
4131       gcc_assert (*dstslot == dvar);
4132       canonicalize_values_star (dstslot, dst);
4133       gcc_checking_assert (dstslot
4134 			   == shared_hash_find_slot_noinsert_1 (dst->vars,
4135 								dv, dvhash));
4136       dvar = *dstslot;
4137     }
4138   else
4139     {
4140       bool has_value = false, has_other = false;
4141 
4142       /* If we have one value and anything else, we're going to
4143 	 canonicalize this, so make sure all values have an entry in
4144 	 the table and are marked for canonicalization.  */
4145       for (node = *nodep; node; node = node->next)
4146 	{
4147 	  if (GET_CODE (node->loc) == VALUE)
4148 	    {
4149 	      /* If this was marked during register canonicalization,
4150 		 we know we have to canonicalize values.  */
4151 	      if (has_value)
4152 		has_other = true;
4153 	      has_value = true;
4154 	      if (has_other)
4155 		break;
4156 	    }
4157 	  else
4158 	    {
4159 	      has_other = true;
4160 	      if (has_value)
4161 		break;
4162 	    }
4163 	}
4164 
4165       if (has_value && has_other)
4166 	{
4167 	  for (node = *nodep; node; node = node->next)
4168 	    {
4169 	      if (GET_CODE (node->loc) == VALUE)
4170 		{
4171 		  decl_or_value dv = dv_from_value (node->loc);
4172 		  variable **slot = NULL;
4173 
4174 		  if (shared_hash_shared (dst->vars))
4175 		    slot = shared_hash_find_slot_noinsert (dst->vars, dv);
4176 		  if (!slot)
4177 		    slot = shared_hash_find_slot_unshare (&dst->vars, dv,
4178 							  INSERT);
4179 		  if (!*slot)
4180 		    {
4181 		      variable *var = onepart_pool_allocate (ONEPART_VALUE);
4182 		      var->dv = dv;
4183 		      var->refcount = 1;
4184 		      var->n_var_parts = 1;
4185 		      var->onepart = ONEPART_VALUE;
4186 		      var->in_changed_variables = false;
4187 		      var->var_part[0].loc_chain = NULL;
4188 		      var->var_part[0].cur_loc = NULL;
4189 		      VAR_LOC_1PAUX (var) = NULL;
4190 		      *slot = var;
4191 		    }
4192 
4193 		  VALUE_RECURSED_INTO (node->loc) = true;
4194 		}
4195 	    }
4196 
4197 	  dstslot = shared_hash_find_slot_noinsert_1 (dst->vars, dv, dvhash);
4198 	  gcc_assert (*dstslot == dvar);
4199 	  canonicalize_values_star (dstslot, dst);
4200 	  gcc_checking_assert (dstslot
4201 			       == shared_hash_find_slot_noinsert_1 (dst->vars,
4202 								    dv, dvhash));
4203 	  dvar = *dstslot;
4204 	}
4205     }
4206 
4207   if (!onepart_variable_different_p (dvar, s2var))
4208     {
4209       variable_htab_free (dvar);
4210       *dstslot = dvar = s2var;
4211       dvar->refcount++;
4212     }
4213   else if (s2var != s1var && !onepart_variable_different_p (dvar, s1var))
4214     {
4215       variable_htab_free (dvar);
4216       *dstslot = dvar = s1var;
4217       dvar->refcount++;
4218       dst_can_be_shared = false;
4219     }
4220   else
4221     dst_can_be_shared = false;
4222 
4223   return 1;
4224 }
4225 
4226 /* Copy s2slot (in DSM->src) to DSM->dst if the variable is a
4227    multi-part variable.  Unions of multi-part variables and
4228    intersections of one-part ones will be handled in
4229    variable_merge_over_cur().  */
4230 
4231 static int
variable_merge_over_src(variable * s2var,struct dfset_merge * dsm)4232 variable_merge_over_src (variable *s2var, struct dfset_merge *dsm)
4233 {
4234   dataflow_set *dst = dsm->dst;
4235   decl_or_value dv = s2var->dv;
4236 
4237   if (!s2var->onepart)
4238     {
4239       variable **dstp = shared_hash_find_slot (dst->vars, dv);
4240       *dstp = s2var;
4241       s2var->refcount++;
4242       return 1;
4243     }
4244 
4245   dsm->src_onepart_cnt++;
4246   return 1;
4247 }
4248 
4249 /* Combine dataflow set information from SRC2 into DST, using PDST
4250    to carry over information across passes.  */
4251 
4252 static void
dataflow_set_merge(dataflow_set * dst,dataflow_set * src2)4253 dataflow_set_merge (dataflow_set *dst, dataflow_set *src2)
4254 {
4255   dataflow_set cur = *dst;
4256   dataflow_set *src1 = &cur;
4257   struct dfset_merge dsm;
4258   int i;
4259   size_t src1_elems, src2_elems;
4260   variable_iterator_type hi;
4261   variable *var;
4262 
4263   src1_elems = shared_hash_htab (src1->vars)->elements ();
4264   src2_elems = shared_hash_htab (src2->vars)->elements ();
4265   dataflow_set_init (dst);
4266   dst->stack_adjust = cur.stack_adjust;
4267   shared_hash_destroy (dst->vars);
4268   dst->vars = new shared_hash;
4269   dst->vars->refcount = 1;
4270   dst->vars->htab = new variable_table_type (MAX (src1_elems, src2_elems));
4271 
4272   for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4273     attrs_list_mpdv_union (&dst->regs[i], src1->regs[i], src2->regs[i]);
4274 
4275   dsm.dst = dst;
4276   dsm.src = src2;
4277   dsm.cur = src1;
4278   dsm.src_onepart_cnt = 0;
4279 
4280   FOR_EACH_HASH_TABLE_ELEMENT (*shared_hash_htab (dsm.src->vars),
4281 			       var, variable, hi)
4282     variable_merge_over_src (var, &dsm);
4283   FOR_EACH_HASH_TABLE_ELEMENT (*shared_hash_htab (dsm.cur->vars),
4284 			       var, variable, hi)
4285     variable_merge_over_cur (var, &dsm);
4286 
4287   if (dsm.src_onepart_cnt)
4288     dst_can_be_shared = false;
4289 
4290   dataflow_set_destroy (src1);
4291 }
4292 
4293 /* Mark register equivalences.  */
4294 
4295 static void
dataflow_set_equiv_regs(dataflow_set * set)4296 dataflow_set_equiv_regs (dataflow_set *set)
4297 {
4298   int i;
4299   attrs *list, **listp;
4300 
4301   for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4302     {
4303       rtx canon[NUM_MACHINE_MODES];
4304 
4305       /* If the list is empty or one entry, no need to canonicalize
4306 	 anything.  */
4307       if (set->regs[i] == NULL || set->regs[i]->next == NULL)
4308 	continue;
4309 
4310       memset (canon, 0, sizeof (canon));
4311 
4312       for (list = set->regs[i]; list; list = list->next)
4313 	if (list->offset == 0 && dv_is_value_p (list->dv))
4314 	  {
4315 	    rtx val = dv_as_value (list->dv);
4316 	    rtx *cvalp = &canon[(int)GET_MODE (val)];
4317 	    rtx cval = *cvalp;
4318 
4319 	    if (canon_value_cmp (val, cval))
4320 	      *cvalp = val;
4321 	  }
4322 
4323       for (list = set->regs[i]; list; list = list->next)
4324 	if (list->offset == 0 && dv_onepart_p (list->dv))
4325 	  {
4326 	    rtx cval = canon[(int)GET_MODE (list->loc)];
4327 
4328 	    if (!cval)
4329 	      continue;
4330 
4331 	    if (dv_is_value_p (list->dv))
4332 	      {
4333 		rtx val = dv_as_value (list->dv);
4334 
4335 		if (val == cval)
4336 		  continue;
4337 
4338 		VALUE_RECURSED_INTO (val) = true;
4339 		set_variable_part (set, val, dv_from_value (cval), 0,
4340 				   VAR_INIT_STATUS_INITIALIZED,
4341 				   NULL, NO_INSERT);
4342 	      }
4343 
4344 	    VALUE_RECURSED_INTO (cval) = true;
4345 	    set_variable_part (set, cval, list->dv, 0,
4346 			       VAR_INIT_STATUS_INITIALIZED, NULL, NO_INSERT);
4347 	  }
4348 
4349       for (listp = &set->regs[i]; (list = *listp);
4350 	   listp = list ? &list->next : listp)
4351 	if (list->offset == 0 && dv_onepart_p (list->dv))
4352 	  {
4353 	    rtx cval = canon[(int)GET_MODE (list->loc)];
4354 	    variable **slot;
4355 
4356 	    if (!cval)
4357 	      continue;
4358 
4359 	    if (dv_is_value_p (list->dv))
4360 	      {
4361 		rtx val = dv_as_value (list->dv);
4362 		if (!VALUE_RECURSED_INTO (val))
4363 		  continue;
4364 	      }
4365 
4366 	    slot = shared_hash_find_slot_noinsert (set->vars, list->dv);
4367 	    canonicalize_values_star (slot, set);
4368 	    if (*listp != list)
4369 	      list = NULL;
4370 	  }
4371     }
4372 }
4373 
4374 /* Remove any redundant values in the location list of VAR, which must
4375    be unshared and 1-part.  */
4376 
4377 static void
remove_duplicate_values(variable * var)4378 remove_duplicate_values (variable *var)
4379 {
4380   location_chain *node, **nodep;
4381 
4382   gcc_assert (var->onepart);
4383   gcc_assert (var->n_var_parts == 1);
4384   gcc_assert (var->refcount == 1);
4385 
4386   for (nodep = &var->var_part[0].loc_chain; (node = *nodep); )
4387     {
4388       if (GET_CODE (node->loc) == VALUE)
4389 	{
4390 	  if (VALUE_RECURSED_INTO (node->loc))
4391 	    {
4392 	      /* Remove duplicate value node.  */
4393 	      *nodep = node->next;
4394 	      delete node;
4395 	      continue;
4396 	    }
4397 	  else
4398 	    VALUE_RECURSED_INTO (node->loc) = true;
4399 	}
4400       nodep = &node->next;
4401     }
4402 
4403   for (node = var->var_part[0].loc_chain; node; node = node->next)
4404     if (GET_CODE (node->loc) == VALUE)
4405       {
4406 	gcc_assert (VALUE_RECURSED_INTO (node->loc));
4407 	VALUE_RECURSED_INTO (node->loc) = false;
4408       }
4409 }
4410 
4411 
4412 /* Hash table iteration argument passed to variable_post_merge.  */
4413 struct dfset_post_merge
4414 {
4415   /* The new input set for the current block.  */
4416   dataflow_set *set;
4417   /* Pointer to the permanent input set for the current block, or
4418      NULL.  */
4419   dataflow_set **permp;
4420 };
4421 
4422 /* Create values for incoming expressions associated with one-part
4423    variables that don't have value numbers for them.  */
4424 
4425 int
variable_post_merge_new_vals(variable ** slot,dfset_post_merge * dfpm)4426 variable_post_merge_new_vals (variable **slot, dfset_post_merge *dfpm)
4427 {
4428   dataflow_set *set = dfpm->set;
4429   variable *var = *slot;
4430   location_chain *node;
4431 
4432   if (!var->onepart || !var->n_var_parts)
4433     return 1;
4434 
4435   gcc_assert (var->n_var_parts == 1);
4436 
4437   if (dv_is_decl_p (var->dv))
4438     {
4439       bool check_dupes = false;
4440 
4441     restart:
4442       for (node = var->var_part[0].loc_chain; node; node = node->next)
4443 	{
4444 	  if (GET_CODE (node->loc) == VALUE)
4445 	    gcc_assert (!VALUE_RECURSED_INTO (node->loc));
4446 	  else if (GET_CODE (node->loc) == REG)
4447 	    {
4448 	      attrs *att, **attp, **curp = NULL;
4449 
4450 	      if (var->refcount != 1)
4451 		{
4452 		  slot = unshare_variable (set, slot, var,
4453 					   VAR_INIT_STATUS_INITIALIZED);
4454 		  var = *slot;
4455 		  goto restart;
4456 		}
4457 
4458 	      for (attp = &set->regs[REGNO (node->loc)]; (att = *attp);
4459 		   attp = &att->next)
4460 		if (att->offset == 0
4461 		    && GET_MODE (att->loc) == GET_MODE (node->loc))
4462 		  {
4463 		    if (dv_is_value_p (att->dv))
4464 		      {
4465 			rtx cval = dv_as_value (att->dv);
4466 			node->loc = cval;
4467 			check_dupes = true;
4468 			break;
4469 		      }
4470 		    else if (dv_as_opaque (att->dv) == dv_as_opaque (var->dv))
4471 		      curp = attp;
4472 		  }
4473 
4474 	      if (!curp)
4475 		{
4476 		  curp = attp;
4477 		  while (*curp)
4478 		    if ((*curp)->offset == 0
4479 			&& GET_MODE ((*curp)->loc) == GET_MODE (node->loc)
4480 			&& dv_as_opaque ((*curp)->dv) == dv_as_opaque (var->dv))
4481 		      break;
4482 		    else
4483 		      curp = &(*curp)->next;
4484 		  gcc_assert (*curp);
4485 		}
4486 
4487 	      if (!att)
4488 		{
4489 		  decl_or_value cdv;
4490 		  rtx cval;
4491 
4492 		  if (!*dfpm->permp)
4493 		    {
4494 		      *dfpm->permp = XNEW (dataflow_set);
4495 		      dataflow_set_init (*dfpm->permp);
4496 		    }
4497 
4498 		  for (att = (*dfpm->permp)->regs[REGNO (node->loc)];
4499 		       att; att = att->next)
4500 		    if (GET_MODE (att->loc) == GET_MODE (node->loc))
4501 		      {
4502 			gcc_assert (att->offset == 0
4503 				    && dv_is_value_p (att->dv));
4504 			val_reset (set, att->dv);
4505 			break;
4506 		      }
4507 
4508 		  if (att)
4509 		    {
4510 		      cdv = att->dv;
4511 		      cval = dv_as_value (cdv);
4512 		    }
4513 		  else
4514 		    {
4515 		      /* Create a unique value to hold this register,
4516 			 that ought to be found and reused in
4517 			 subsequent rounds.  */
4518 		      cselib_val *v;
4519 		      gcc_assert (!cselib_lookup (node->loc,
4520 						  GET_MODE (node->loc), 0,
4521 						  VOIDmode));
4522 		      v = cselib_lookup (node->loc, GET_MODE (node->loc), 1,
4523 					 VOIDmode);
4524 		      cselib_preserve_value (v);
4525 		      cselib_invalidate_rtx (node->loc);
4526 		      cval = v->val_rtx;
4527 		      cdv = dv_from_value (cval);
4528 		      if (dump_file)
4529 			fprintf (dump_file,
4530 				 "Created new value %u:%u for reg %i\n",
4531 				 v->uid, v->hash, REGNO (node->loc));
4532 		    }
4533 
4534 		  var_reg_decl_set (*dfpm->permp, node->loc,
4535 				    VAR_INIT_STATUS_INITIALIZED,
4536 				    cdv, 0, NULL, INSERT);
4537 
4538 		  node->loc = cval;
4539 		  check_dupes = true;
4540 		}
4541 
4542 	      /* Remove attribute referring to the decl, which now
4543 		 uses the value for the register, already existing or
4544 		 to be added when we bring perm in.  */
4545 	      att = *curp;
4546 	      *curp = att->next;
4547 	      delete att;
4548 	    }
4549 	}
4550 
4551       if (check_dupes)
4552 	remove_duplicate_values (var);
4553     }
4554 
4555   return 1;
4556 }
4557 
4558 /* Reset values in the permanent set that are not associated with the
4559    chosen expression.  */
4560 
4561 int
variable_post_merge_perm_vals(variable ** pslot,dfset_post_merge * dfpm)4562 variable_post_merge_perm_vals (variable **pslot, dfset_post_merge *dfpm)
4563 {
4564   dataflow_set *set = dfpm->set;
4565   variable *pvar = *pslot, *var;
4566   location_chain *pnode;
4567   decl_or_value dv;
4568   attrs *att;
4569 
4570   gcc_assert (dv_is_value_p (pvar->dv)
4571 	      && pvar->n_var_parts == 1);
4572   pnode = pvar->var_part[0].loc_chain;
4573   gcc_assert (pnode
4574 	      && !pnode->next
4575 	      && REG_P (pnode->loc));
4576 
4577   dv = pvar->dv;
4578 
4579   var = shared_hash_find (set->vars, dv);
4580   if (var)
4581     {
4582       /* Although variable_post_merge_new_vals may have made decls
4583 	 non-star-canonical, values that pre-existed in canonical form
4584 	 remain canonical, and newly-created values reference a single
4585 	 REG, so they are canonical as well.  Since VAR has the
4586 	 location list for a VALUE, using find_loc_in_1pdv for it is
4587 	 fine, since VALUEs don't map back to DECLs.  */
4588       if (find_loc_in_1pdv (pnode->loc, var, shared_hash_htab (set->vars)))
4589 	return 1;
4590       val_reset (set, dv);
4591     }
4592 
4593   for (att = set->regs[REGNO (pnode->loc)]; att; att = att->next)
4594     if (att->offset == 0
4595 	&& GET_MODE (att->loc) == GET_MODE (pnode->loc)
4596 	&& dv_is_value_p (att->dv))
4597       break;
4598 
4599   /* If there is a value associated with this register already, create
4600      an equivalence.  */
4601   if (att && dv_as_value (att->dv) != dv_as_value (dv))
4602     {
4603       rtx cval = dv_as_value (att->dv);
4604       set_variable_part (set, cval, dv, 0, pnode->init, NULL, INSERT);
4605       set_variable_part (set, dv_as_value (dv), att->dv, 0, pnode->init,
4606 			 NULL, INSERT);
4607     }
4608   else if (!att)
4609     {
4610       attrs_list_insert (&set->regs[REGNO (pnode->loc)],
4611 			 dv, 0, pnode->loc);
4612       variable_union (pvar, set);
4613     }
4614 
4615   return 1;
4616 }
4617 
4618 /* Just checking stuff and registering register attributes for
4619    now.  */
4620 
4621 static void
dataflow_post_merge_adjust(dataflow_set * set,dataflow_set ** permp)4622 dataflow_post_merge_adjust (dataflow_set *set, dataflow_set **permp)
4623 {
4624   struct dfset_post_merge dfpm;
4625 
4626   dfpm.set = set;
4627   dfpm.permp = permp;
4628 
4629   shared_hash_htab (set->vars)
4630     ->traverse <dfset_post_merge*, variable_post_merge_new_vals> (&dfpm);
4631   if (*permp)
4632     shared_hash_htab ((*permp)->vars)
4633       ->traverse <dfset_post_merge*, variable_post_merge_perm_vals> (&dfpm);
4634   shared_hash_htab (set->vars)
4635     ->traverse <dataflow_set *, canonicalize_values_star> (set);
4636   shared_hash_htab (set->vars)
4637     ->traverse <dataflow_set *, canonicalize_vars_star> (set);
4638 }
4639 
4640 /* Return a node whose loc is a MEM that refers to EXPR in the
4641    location list of a one-part variable or value VAR, or in that of
4642    any values recursively mentioned in the location lists.  */
4643 
4644 static location_chain *
find_mem_expr_in_1pdv(tree expr,rtx val,variable_table_type * vars)4645 find_mem_expr_in_1pdv (tree expr, rtx val, variable_table_type *vars)
4646 {
4647   location_chain *node;
4648   decl_or_value dv;
4649   variable *var;
4650   location_chain *where = NULL;
4651 
4652   if (!val)
4653     return NULL;
4654 
4655   gcc_assert (GET_CODE (val) == VALUE
4656 	      && !VALUE_RECURSED_INTO (val));
4657 
4658   dv = dv_from_value (val);
4659   var = vars->find_with_hash (dv, dv_htab_hash (dv));
4660 
4661   if (!var)
4662     return NULL;
4663 
4664   gcc_assert (var->onepart);
4665 
4666   if (!var->n_var_parts)
4667     return NULL;
4668 
4669   VALUE_RECURSED_INTO (val) = true;
4670 
4671   for (node = var->var_part[0].loc_chain; node; node = node->next)
4672     if (MEM_P (node->loc)
4673 	&& MEM_EXPR (node->loc) == expr
4674 	&& int_mem_offset (node->loc) == 0)
4675       {
4676 	where = node;
4677 	break;
4678       }
4679     else if (GET_CODE (node->loc) == VALUE
4680 	     && !VALUE_RECURSED_INTO (node->loc)
4681 	     && (where = find_mem_expr_in_1pdv (expr, node->loc, vars)))
4682       break;
4683 
4684   VALUE_RECURSED_INTO (val) = false;
4685 
4686   return where;
4687 }
4688 
4689 /* Return TRUE if the value of MEM may vary across a call.  */
4690 
4691 static bool
mem_dies_at_call(rtx mem)4692 mem_dies_at_call (rtx mem)
4693 {
4694   tree expr = MEM_EXPR (mem);
4695   tree decl;
4696 
4697   if (!expr)
4698     return true;
4699 
4700   decl = get_base_address (expr);
4701 
4702   if (!decl)
4703     return true;
4704 
4705   if (!DECL_P (decl))
4706     return true;
4707 
4708   return (may_be_aliased (decl)
4709 	  || (!TREE_READONLY (decl) && is_global_var (decl)));
4710 }
4711 
4712 /* Remove all MEMs from the location list of a hash table entry for a
4713    one-part variable, except those whose MEM attributes map back to
4714    the variable itself, directly or within a VALUE.  */
4715 
4716 int
dataflow_set_preserve_mem_locs(variable ** slot,dataflow_set * set)4717 dataflow_set_preserve_mem_locs (variable **slot, dataflow_set *set)
4718 {
4719   variable *var = *slot;
4720 
4721   if (var->onepart == ONEPART_VDECL || var->onepart == ONEPART_DEXPR)
4722     {
4723       tree decl = dv_as_decl (var->dv);
4724       location_chain *loc, **locp;
4725       bool changed = false;
4726 
4727       if (!var->n_var_parts)
4728 	return 1;
4729 
4730       gcc_assert (var->n_var_parts == 1);
4731 
4732       if (shared_var_p (var, set->vars))
4733 	{
4734 	  for (loc = var->var_part[0].loc_chain; loc; loc = loc->next)
4735 	    {
4736 	      /* We want to remove dying MEMs that don't refer to DECL.  */
4737 	      if (GET_CODE (loc->loc) == MEM
4738 		  && (MEM_EXPR (loc->loc) != decl
4739 		      || int_mem_offset (loc->loc) != 0)
4740 		  && mem_dies_at_call (loc->loc))
4741 		break;
4742 	      /* We want to move here MEMs that do refer to DECL.  */
4743 	      else if (GET_CODE (loc->loc) == VALUE
4744 		       && find_mem_expr_in_1pdv (decl, loc->loc,
4745 						 shared_hash_htab (set->vars)))
4746 		break;
4747 	    }
4748 
4749 	  if (!loc)
4750 	    return 1;
4751 
4752 	  slot = unshare_variable (set, slot, var, VAR_INIT_STATUS_UNKNOWN);
4753 	  var = *slot;
4754 	  gcc_assert (var->n_var_parts == 1);
4755 	}
4756 
4757       for (locp = &var->var_part[0].loc_chain, loc = *locp;
4758 	   loc; loc = *locp)
4759 	{
4760 	  rtx old_loc = loc->loc;
4761 	  if (GET_CODE (old_loc) == VALUE)
4762 	    {
4763 	      location_chain *mem_node
4764 		= find_mem_expr_in_1pdv (decl, loc->loc,
4765 					 shared_hash_htab (set->vars));
4766 
4767 	      /* ??? This picks up only one out of multiple MEMs that
4768 		 refer to the same variable.  Do we ever need to be
4769 		 concerned about dealing with more than one, or, given
4770 		 that they should all map to the same variable
4771 		 location, their addresses will have been merged and
4772 		 they will be regarded as equivalent?  */
4773 	      if (mem_node)
4774 		{
4775 		  loc->loc = mem_node->loc;
4776 		  loc->set_src = mem_node->set_src;
4777 		  loc->init = MIN (loc->init, mem_node->init);
4778 		}
4779 	    }
4780 
4781 	  if (GET_CODE (loc->loc) != MEM
4782 	      || (MEM_EXPR (loc->loc) == decl
4783 		  && int_mem_offset (loc->loc) == 0)
4784 	      || !mem_dies_at_call (loc->loc))
4785 	    {
4786 	      if (old_loc != loc->loc && emit_notes)
4787 		{
4788 		  if (old_loc == var->var_part[0].cur_loc)
4789 		    {
4790 		      changed = true;
4791 		      var->var_part[0].cur_loc = NULL;
4792 		    }
4793 		}
4794 	      locp = &loc->next;
4795 	      continue;
4796 	    }
4797 
4798 	  if (emit_notes)
4799 	    {
4800 	      if (old_loc == var->var_part[0].cur_loc)
4801 		{
4802 		  changed = true;
4803 		  var->var_part[0].cur_loc = NULL;
4804 		}
4805 	    }
4806 	  *locp = loc->next;
4807 	  delete loc;
4808 	}
4809 
4810       if (!var->var_part[0].loc_chain)
4811 	{
4812 	  var->n_var_parts--;
4813 	  changed = true;
4814 	}
4815       if (changed)
4816 	variable_was_changed (var, set);
4817     }
4818 
4819   return 1;
4820 }
4821 
4822 /* Remove all MEMs from the location list of a hash table entry for a
4823    onepart variable.  */
4824 
4825 int
dataflow_set_remove_mem_locs(variable ** slot,dataflow_set * set)4826 dataflow_set_remove_mem_locs (variable **slot, dataflow_set *set)
4827 {
4828   variable *var = *slot;
4829 
4830   if (var->onepart != NOT_ONEPART)
4831     {
4832       location_chain *loc, **locp;
4833       bool changed = false;
4834       rtx cur_loc;
4835 
4836       gcc_assert (var->n_var_parts == 1);
4837 
4838       if (shared_var_p (var, set->vars))
4839 	{
4840 	  for (loc = var->var_part[0].loc_chain; loc; loc = loc->next)
4841 	    if (GET_CODE (loc->loc) == MEM
4842 		&& mem_dies_at_call (loc->loc))
4843 	      break;
4844 
4845 	  if (!loc)
4846 	    return 1;
4847 
4848 	  slot = unshare_variable (set, slot, var, VAR_INIT_STATUS_UNKNOWN);
4849 	  var = *slot;
4850 	  gcc_assert (var->n_var_parts == 1);
4851 	}
4852 
4853       if (VAR_LOC_1PAUX (var))
4854 	cur_loc = VAR_LOC_FROM (var);
4855       else
4856 	cur_loc = var->var_part[0].cur_loc;
4857 
4858       for (locp = &var->var_part[0].loc_chain, loc = *locp;
4859 	   loc; loc = *locp)
4860 	{
4861 	  if (GET_CODE (loc->loc) != MEM
4862 	      || !mem_dies_at_call (loc->loc))
4863 	    {
4864 	      locp = &loc->next;
4865 	      continue;
4866 	    }
4867 
4868 	  *locp = loc->next;
4869 	  /* If we have deleted the location which was last emitted
4870 	     we have to emit new location so add the variable to set
4871 	     of changed variables.  */
4872 	  if (cur_loc == loc->loc)
4873 	    {
4874 	      changed = true;
4875 	      var->var_part[0].cur_loc = NULL;
4876 	      if (VAR_LOC_1PAUX (var))
4877 		VAR_LOC_FROM (var) = NULL;
4878 	    }
4879 	  delete loc;
4880 	}
4881 
4882       if (!var->var_part[0].loc_chain)
4883 	{
4884 	  var->n_var_parts--;
4885 	  changed = true;
4886 	}
4887       if (changed)
4888 	variable_was_changed (var, set);
4889     }
4890 
4891   return 1;
4892 }
4893 
4894 /* Remove all variable-location information about call-clobbered
4895    registers, as well as associations between MEMs and VALUEs.  */
4896 
4897 static void
dataflow_set_clear_at_call(dataflow_set * set,rtx_insn * call_insn)4898 dataflow_set_clear_at_call (dataflow_set *set, rtx_insn *call_insn)
4899 {
4900   unsigned int r;
4901   hard_reg_set_iterator hrsi;
4902   HARD_REG_SET invalidated_regs;
4903 
4904   get_call_reg_set_usage (call_insn, &invalidated_regs,
4905 			  regs_invalidated_by_call);
4906 
4907   EXECUTE_IF_SET_IN_HARD_REG_SET (invalidated_regs, 0, r, hrsi)
4908     var_regno_delete (set, r);
4909 
4910   if (MAY_HAVE_DEBUG_BIND_INSNS)
4911     {
4912       set->traversed_vars = set->vars;
4913       shared_hash_htab (set->vars)
4914 	->traverse <dataflow_set *, dataflow_set_preserve_mem_locs> (set);
4915       set->traversed_vars = set->vars;
4916       shared_hash_htab (set->vars)
4917 	->traverse <dataflow_set *, dataflow_set_remove_mem_locs> (set);
4918       set->traversed_vars = NULL;
4919     }
4920 }
4921 
4922 static bool
variable_part_different_p(variable_part * vp1,variable_part * vp2)4923 variable_part_different_p (variable_part *vp1, variable_part *vp2)
4924 {
4925   location_chain *lc1, *lc2;
4926 
4927   for (lc1 = vp1->loc_chain; lc1; lc1 = lc1->next)
4928     {
4929       for (lc2 = vp2->loc_chain; lc2; lc2 = lc2->next)
4930 	{
4931 	  if (REG_P (lc1->loc) && REG_P (lc2->loc))
4932 	    {
4933 	      if (REGNO (lc1->loc) == REGNO (lc2->loc))
4934 		break;
4935 	    }
4936 	  if (rtx_equal_p (lc1->loc, lc2->loc))
4937 	    break;
4938 	}
4939       if (!lc2)
4940 	return true;
4941     }
4942   return false;
4943 }
4944 
4945 /* Return true if one-part variables VAR1 and VAR2 are different.
4946    They must be in canonical order.  */
4947 
4948 static bool
onepart_variable_different_p(variable * var1,variable * var2)4949 onepart_variable_different_p (variable *var1, variable *var2)
4950 {
4951   location_chain *lc1, *lc2;
4952 
4953   if (var1 == var2)
4954     return false;
4955 
4956   gcc_assert (var1->n_var_parts == 1
4957 	      && var2->n_var_parts == 1);
4958 
4959   lc1 = var1->var_part[0].loc_chain;
4960   lc2 = var2->var_part[0].loc_chain;
4961 
4962   gcc_assert (lc1 && lc2);
4963 
4964   while (lc1 && lc2)
4965     {
4966       if (loc_cmp (lc1->loc, lc2->loc))
4967 	return true;
4968       lc1 = lc1->next;
4969       lc2 = lc2->next;
4970     }
4971 
4972   return lc1 != lc2;
4973 }
4974 
4975 /* Return true if one-part variables VAR1 and VAR2 are different.
4976    They must be in canonical order.  */
4977 
4978 static void
dump_onepart_variable_differences(variable * var1,variable * var2)4979 dump_onepart_variable_differences (variable *var1, variable *var2)
4980 {
4981   location_chain *lc1, *lc2;
4982 
4983   gcc_assert (var1 != var2);
4984   gcc_assert (dump_file);
4985   gcc_assert (dv_as_opaque (var1->dv) == dv_as_opaque (var2->dv));
4986   gcc_assert (var1->n_var_parts == 1
4987 	      && var2->n_var_parts == 1);
4988 
4989   lc1 = var1->var_part[0].loc_chain;
4990   lc2 = var2->var_part[0].loc_chain;
4991 
4992   gcc_assert (lc1 && lc2);
4993 
4994   while (lc1 && lc2)
4995     {
4996       switch (loc_cmp (lc1->loc, lc2->loc))
4997 	{
4998 	case -1:
4999 	  fprintf (dump_file, "removed: ");
5000 	  print_rtl_single (dump_file, lc1->loc);
5001 	  lc1 = lc1->next;
5002 	  continue;
5003 	case 0:
5004 	  break;
5005 	case 1:
5006 	  fprintf (dump_file, "added: ");
5007 	  print_rtl_single (dump_file, lc2->loc);
5008 	  lc2 = lc2->next;
5009 	  continue;
5010 	default:
5011 	  gcc_unreachable ();
5012 	}
5013       lc1 = lc1->next;
5014       lc2 = lc2->next;
5015     }
5016 
5017   while (lc1)
5018     {
5019       fprintf (dump_file, "removed: ");
5020       print_rtl_single (dump_file, lc1->loc);
5021       lc1 = lc1->next;
5022     }
5023 
5024   while (lc2)
5025     {
5026       fprintf (dump_file, "added: ");
5027       print_rtl_single (dump_file, lc2->loc);
5028       lc2 = lc2->next;
5029     }
5030 }
5031 
5032 /* Return true if variables VAR1 and VAR2 are different.  */
5033 
5034 static bool
variable_different_p(variable * var1,variable * var2)5035 variable_different_p (variable *var1, variable *var2)
5036 {
5037   int i;
5038 
5039   if (var1 == var2)
5040     return false;
5041 
5042   if (var1->onepart != var2->onepart)
5043     return true;
5044 
5045   if (var1->n_var_parts != var2->n_var_parts)
5046     return true;
5047 
5048   if (var1->onepart && var1->n_var_parts)
5049     {
5050       gcc_checking_assert (dv_as_opaque (var1->dv) == dv_as_opaque (var2->dv)
5051 			   && var1->n_var_parts == 1);
5052       /* One-part values have locations in a canonical order.  */
5053       return onepart_variable_different_p (var1, var2);
5054     }
5055 
5056   for (i = 0; i < var1->n_var_parts; i++)
5057     {
5058       if (VAR_PART_OFFSET (var1, i) != VAR_PART_OFFSET (var2, i))
5059 	return true;
5060       if (variable_part_different_p (&var1->var_part[i], &var2->var_part[i]))
5061 	return true;
5062       if (variable_part_different_p (&var2->var_part[i], &var1->var_part[i]))
5063 	return true;
5064     }
5065   return false;
5066 }
5067 
5068 /* Return true if dataflow sets OLD_SET and NEW_SET differ.  */
5069 
5070 static bool
dataflow_set_different(dataflow_set * old_set,dataflow_set * new_set)5071 dataflow_set_different (dataflow_set *old_set, dataflow_set *new_set)
5072 {
5073   variable_iterator_type hi;
5074   variable *var1;
5075   bool diffound = false;
5076   bool details = (dump_file && (dump_flags & TDF_DETAILS));
5077 
5078 #define RETRUE					\
5079   do						\
5080     {						\
5081       if (!details)				\
5082 	return true;				\
5083       else					\
5084 	diffound = true;			\
5085     }						\
5086   while (0)
5087 
5088   if (old_set->vars == new_set->vars)
5089     return false;
5090 
5091   if (shared_hash_htab (old_set->vars)->elements ()
5092       != shared_hash_htab (new_set->vars)->elements ())
5093     RETRUE;
5094 
5095   FOR_EACH_HASH_TABLE_ELEMENT (*shared_hash_htab (old_set->vars),
5096 			       var1, variable, hi)
5097     {
5098       variable_table_type *htab = shared_hash_htab (new_set->vars);
5099       variable *var2 = htab->find_with_hash (var1->dv, dv_htab_hash (var1->dv));
5100 
5101       if (!var2)
5102 	{
5103 	  if (dump_file && (dump_flags & TDF_DETAILS))
5104 	    {
5105 	      fprintf (dump_file, "dataflow difference found: removal of:\n");
5106 	      dump_var (var1);
5107 	    }
5108 	  RETRUE;
5109 	}
5110       else if (variable_different_p (var1, var2))
5111 	{
5112 	  if (details)
5113 	    {
5114 	      fprintf (dump_file, "dataflow difference found: "
5115 		       "old and new follow:\n");
5116 	      dump_var (var1);
5117 	      if (dv_onepart_p (var1->dv))
5118 		dump_onepart_variable_differences (var1, var2);
5119 	      dump_var (var2);
5120 	    }
5121 	  RETRUE;
5122 	}
5123     }
5124 
5125   /* There's no need to traverse the second hashtab unless we want to
5126      print the details.  If both have the same number of elements and
5127      the second one had all entries found in the first one, then the
5128      second can't have any extra entries.  */
5129   if (!details)
5130     return diffound;
5131 
5132   FOR_EACH_HASH_TABLE_ELEMENT (*shared_hash_htab (new_set->vars),
5133 			       var1, variable, hi)
5134     {
5135       variable_table_type *htab = shared_hash_htab (old_set->vars);
5136       variable *var2 = htab->find_with_hash (var1->dv, dv_htab_hash (var1->dv));
5137       if (!var2)
5138 	{
5139 	  if (details)
5140 	    {
5141 	      fprintf (dump_file, "dataflow difference found: addition of:\n");
5142 	      dump_var (var1);
5143 	    }
5144 	  RETRUE;
5145 	}
5146     }
5147 
5148 #undef RETRUE
5149 
5150   return diffound;
5151 }
5152 
5153 /* Free the contents of dataflow set SET.  */
5154 
5155 static void
dataflow_set_destroy(dataflow_set * set)5156 dataflow_set_destroy (dataflow_set *set)
5157 {
5158   int i;
5159 
5160   for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
5161     attrs_list_clear (&set->regs[i]);
5162 
5163   shared_hash_destroy (set->vars);
5164   set->vars = NULL;
5165 }
5166 
5167 /* Return true if T is a tracked parameter with non-degenerate record type.  */
5168 
5169 static bool
tracked_record_parameter_p(tree t)5170 tracked_record_parameter_p (tree t)
5171 {
5172   if (TREE_CODE (t) != PARM_DECL)
5173     return false;
5174 
5175   if (DECL_MODE (t) == BLKmode)
5176     return false;
5177 
5178   tree type = TREE_TYPE (t);
5179   if (TREE_CODE (type) != RECORD_TYPE)
5180     return false;
5181 
5182   if (TYPE_FIELDS (type) == NULL_TREE
5183       || DECL_CHAIN (TYPE_FIELDS (type)) == NULL_TREE)
5184     return false;
5185 
5186   return true;
5187 }
5188 
5189 /* Shall EXPR be tracked?  */
5190 
5191 static bool
track_expr_p(tree expr,bool need_rtl)5192 track_expr_p (tree expr, bool need_rtl)
5193 {
5194   rtx decl_rtl;
5195   tree realdecl;
5196 
5197   if (TREE_CODE (expr) == DEBUG_EXPR_DECL)
5198     return DECL_RTL_SET_P (expr);
5199 
5200   /* If EXPR is not a parameter or a variable do not track it.  */
5201   if (!VAR_P (expr) && TREE_CODE (expr) != PARM_DECL)
5202     return 0;
5203 
5204   /* It also must have a name...  */
5205   if (!DECL_NAME (expr) && need_rtl)
5206     return 0;
5207 
5208   /* ... and a RTL assigned to it.  */
5209   decl_rtl = DECL_RTL_IF_SET (expr);
5210   if (!decl_rtl && need_rtl)
5211     return 0;
5212 
5213   /* If this expression is really a debug alias of some other declaration, we
5214      don't need to track this expression if the ultimate declaration is
5215      ignored.  */
5216   realdecl = expr;
5217   if (VAR_P (realdecl) && DECL_HAS_DEBUG_EXPR_P (realdecl))
5218     {
5219       realdecl = DECL_DEBUG_EXPR (realdecl);
5220       if (!DECL_P (realdecl))
5221 	{
5222 	  if (handled_component_p (realdecl)
5223 	      || (TREE_CODE (realdecl) == MEM_REF
5224 		  && TREE_CODE (TREE_OPERAND (realdecl, 0)) == ADDR_EXPR))
5225 	    {
5226 	      HOST_WIDE_INT bitsize, bitpos;
5227 	      bool reverse;
5228 	      tree innerdecl
5229 		= get_ref_base_and_extent_hwi (realdecl, &bitpos,
5230 					       &bitsize, &reverse);
5231 	      if (!innerdecl
5232 		  || !DECL_P (innerdecl)
5233 		  || DECL_IGNORED_P (innerdecl)
5234 		  /* Do not track declarations for parts of tracked record
5235 		     parameters since we want to track them as a whole.  */
5236 		  || tracked_record_parameter_p (innerdecl)
5237 		  || TREE_STATIC (innerdecl)
5238 		  || bitsize == 0
5239 		  || bitpos + bitsize > 256)
5240 		return 0;
5241 	      else
5242 		realdecl = expr;
5243 	    }
5244 	  else
5245 	    return 0;
5246 	}
5247     }
5248 
5249   /* Do not track EXPR if REALDECL it should be ignored for debugging
5250      purposes.  */
5251   if (DECL_IGNORED_P (realdecl))
5252     return 0;
5253 
5254   /* Do not track global variables until we are able to emit correct location
5255      list for them.  */
5256   if (TREE_STATIC (realdecl))
5257     return 0;
5258 
5259   /* When the EXPR is a DECL for alias of some variable (see example)
5260      the TREE_STATIC flag is not used.  Disable tracking all DECLs whose
5261      DECL_RTL contains SYMBOL_REF.
5262 
5263      Example:
5264      extern char **_dl_argv_internal __attribute__ ((alias ("_dl_argv")));
5265      char **_dl_argv;
5266   */
5267   if (decl_rtl && MEM_P (decl_rtl)
5268       && contains_symbol_ref_p (XEXP (decl_rtl, 0)))
5269     return 0;
5270 
5271   /* If RTX is a memory it should not be very large (because it would be
5272      an array or struct).  */
5273   if (decl_rtl && MEM_P (decl_rtl))
5274     {
5275       /* Do not track structures and arrays.  */
5276       if ((GET_MODE (decl_rtl) == BLKmode
5277 	   || AGGREGATE_TYPE_P (TREE_TYPE (realdecl)))
5278 	  && !tracked_record_parameter_p (realdecl))
5279 	return 0;
5280       if (MEM_SIZE_KNOWN_P (decl_rtl)
5281 	  && maybe_gt (MEM_SIZE (decl_rtl), MAX_VAR_PARTS))
5282 	return 0;
5283     }
5284 
5285   DECL_CHANGED (expr) = 0;
5286   DECL_CHANGED (realdecl) = 0;
5287   return 1;
5288 }
5289 
5290 /* Determine whether a given LOC refers to the same variable part as
5291    EXPR+OFFSET.  */
5292 
5293 static bool
same_variable_part_p(rtx loc,tree expr,poly_int64 offset)5294 same_variable_part_p (rtx loc, tree expr, poly_int64 offset)
5295 {
5296   tree expr2;
5297   poly_int64 offset2;
5298 
5299   if (! DECL_P (expr))
5300     return false;
5301 
5302   if (REG_P (loc))
5303     {
5304       expr2 = REG_EXPR (loc);
5305       offset2 = REG_OFFSET (loc);
5306     }
5307   else if (MEM_P (loc))
5308     {
5309       expr2 = MEM_EXPR (loc);
5310       offset2 = int_mem_offset (loc);
5311     }
5312   else
5313     return false;
5314 
5315   if (! expr2 || ! DECL_P (expr2))
5316     return false;
5317 
5318   expr = var_debug_decl (expr);
5319   expr2 = var_debug_decl (expr2);
5320 
5321   return (expr == expr2 && known_eq (offset, offset2));
5322 }
5323 
5324 /* LOC is a REG or MEM that we would like to track if possible.
5325    If EXPR is null, we don't know what expression LOC refers to,
5326    otherwise it refers to EXPR + OFFSET.  STORE_REG_P is true if
5327    LOC is an lvalue register.
5328 
5329    Return true if EXPR is nonnull and if LOC, or some lowpart of it,
5330    is something we can track.  When returning true, store the mode of
5331    the lowpart we can track in *MODE_OUT (if nonnull) and its offset
5332    from EXPR in *OFFSET_OUT (if nonnull).  */
5333 
5334 static bool
track_loc_p(rtx loc,tree expr,poly_int64 offset,bool store_reg_p,machine_mode * mode_out,HOST_WIDE_INT * offset_out)5335 track_loc_p (rtx loc, tree expr, poly_int64 offset, bool store_reg_p,
5336 	     machine_mode *mode_out, HOST_WIDE_INT *offset_out)
5337 {
5338   machine_mode mode;
5339 
5340   if (expr == NULL || !track_expr_p (expr, true))
5341     return false;
5342 
5343   /* If REG was a paradoxical subreg, its REG_ATTRS will describe the
5344      whole subreg, but only the old inner part is really relevant.  */
5345   mode = GET_MODE (loc);
5346   if (REG_P (loc) && !HARD_REGISTER_NUM_P (ORIGINAL_REGNO (loc)))
5347     {
5348       machine_mode pseudo_mode;
5349 
5350       pseudo_mode = PSEUDO_REGNO_MODE (ORIGINAL_REGNO (loc));
5351       if (paradoxical_subreg_p (mode, pseudo_mode))
5352 	{
5353 	  offset += byte_lowpart_offset (pseudo_mode, mode);
5354 	  mode = pseudo_mode;
5355 	}
5356     }
5357 
5358   /* If LOC is a paradoxical lowpart of EXPR, refer to EXPR itself.
5359      Do the same if we are storing to a register and EXPR occupies
5360      the whole of register LOC; in that case, the whole of EXPR is
5361      being changed.  We exclude complex modes from the second case
5362      because the real and imaginary parts are represented as separate
5363      pseudo registers, even if the whole complex value fits into one
5364      hard register.  */
5365   if ((paradoxical_subreg_p (mode, DECL_MODE (expr))
5366        || (store_reg_p
5367 	   && !COMPLEX_MODE_P (DECL_MODE (expr))
5368 	   && hard_regno_nregs (REGNO (loc), DECL_MODE (expr)) == 1))
5369       && known_eq (offset + byte_lowpart_offset (DECL_MODE (expr), mode), 0))
5370     {
5371       mode = DECL_MODE (expr);
5372       offset = 0;
5373     }
5374 
5375   HOST_WIDE_INT const_offset;
5376   if (!track_offset_p (offset, &const_offset))
5377     return false;
5378 
5379   if (mode_out)
5380     *mode_out = mode;
5381   if (offset_out)
5382     *offset_out = const_offset;
5383   return true;
5384 }
5385 
5386 /* Return the MODE lowpart of LOC, or null if LOC is not something we
5387    want to track.  When returning nonnull, make sure that the attributes
5388    on the returned value are updated.  */
5389 
5390 static rtx
var_lowpart(machine_mode mode,rtx loc)5391 var_lowpart (machine_mode mode, rtx loc)
5392 {
5393   unsigned int regno;
5394 
5395   if (GET_MODE (loc) == mode)
5396     return loc;
5397 
5398   if (!REG_P (loc) && !MEM_P (loc))
5399     return NULL;
5400 
5401   poly_uint64 offset = byte_lowpart_offset (mode, GET_MODE (loc));
5402 
5403   if (MEM_P (loc))
5404     return adjust_address_nv (loc, mode, offset);
5405 
5406   poly_uint64 reg_offset = subreg_lowpart_offset (mode, GET_MODE (loc));
5407   regno = REGNO (loc) + subreg_regno_offset (REGNO (loc), GET_MODE (loc),
5408 					     reg_offset, mode);
5409   return gen_rtx_REG_offset (loc, mode, regno, offset);
5410 }
5411 
5412 /* Carry information about uses and stores while walking rtx.  */
5413 
5414 struct count_use_info
5415 {
5416   /* The insn where the RTX is.  */
5417   rtx_insn *insn;
5418 
5419   /* The basic block where insn is.  */
5420   basic_block bb;
5421 
5422   /* The array of n_sets sets in the insn, as determined by cselib.  */
5423   struct cselib_set *sets;
5424   int n_sets;
5425 
5426   /* True if we're counting stores, false otherwise.  */
5427   bool store_p;
5428 };
5429 
5430 /* Find a VALUE corresponding to X.   */
5431 
5432 static inline cselib_val *
find_use_val(rtx x,machine_mode mode,struct count_use_info * cui)5433 find_use_val (rtx x, machine_mode mode, struct count_use_info *cui)
5434 {
5435   int i;
5436 
5437   if (cui->sets)
5438     {
5439       /* This is called after uses are set up and before stores are
5440 	 processed by cselib, so it's safe to look up srcs, but not
5441 	 dsts.  So we look up expressions that appear in srcs or in
5442 	 dest expressions, but we search the sets array for dests of
5443 	 stores.  */
5444       if (cui->store_p)
5445 	{
5446 	  /* Some targets represent memset and memcpy patterns
5447 	     by (set (mem:BLK ...) (reg:[QHSD]I ...)) or
5448 	     (set (mem:BLK ...) (const_int ...)) or
5449 	     (set (mem:BLK ...) (mem:BLK ...)).  Don't return anything
5450 	     in that case, otherwise we end up with mode mismatches.  */
5451 	  if (mode == BLKmode && MEM_P (x))
5452 	    return NULL;
5453 	  for (i = 0; i < cui->n_sets; i++)
5454 	    if (cui->sets[i].dest == x)
5455 	      return cui->sets[i].src_elt;
5456 	}
5457       else
5458 	return cselib_lookup (x, mode, 0, VOIDmode);
5459     }
5460 
5461   return NULL;
5462 }
5463 
5464 /* Replace all registers and addresses in an expression with VALUE
5465    expressions that map back to them, unless the expression is a
5466    register.  If no mapping is or can be performed, returns NULL.  */
5467 
5468 static rtx
replace_expr_with_values(rtx loc)5469 replace_expr_with_values (rtx loc)
5470 {
5471   if (REG_P (loc) || GET_CODE (loc) == ENTRY_VALUE)
5472     return NULL;
5473   else if (MEM_P (loc))
5474     {
5475       cselib_val *addr = cselib_lookup (XEXP (loc, 0),
5476 					get_address_mode (loc), 0,
5477 					GET_MODE (loc));
5478       if (addr)
5479 	return replace_equiv_address_nv (loc, addr->val_rtx);
5480       else
5481 	return NULL;
5482     }
5483   else
5484     return cselib_subst_to_values (loc, VOIDmode);
5485 }
5486 
5487 /* Return true if X contains a DEBUG_EXPR.  */
5488 
5489 static bool
rtx_debug_expr_p(const_rtx x)5490 rtx_debug_expr_p (const_rtx x)
5491 {
5492   subrtx_iterator::array_type array;
5493   FOR_EACH_SUBRTX (iter, array, x, ALL)
5494     if (GET_CODE (*iter) == DEBUG_EXPR)
5495       return true;
5496   return false;
5497 }
5498 
5499 /* Determine what kind of micro operation to choose for a USE.  Return
5500    MO_CLOBBER if no micro operation is to be generated.  */
5501 
5502 static enum micro_operation_type
use_type(rtx loc,struct count_use_info * cui,machine_mode * modep)5503 use_type (rtx loc, struct count_use_info *cui, machine_mode *modep)
5504 {
5505   tree expr;
5506 
5507   if (cui && cui->sets)
5508     {
5509       if (GET_CODE (loc) == VAR_LOCATION)
5510 	{
5511 	  if (track_expr_p (PAT_VAR_LOCATION_DECL (loc), false))
5512 	    {
5513 	      rtx ploc = PAT_VAR_LOCATION_LOC (loc);
5514 	      if (! VAR_LOC_UNKNOWN_P (ploc))
5515 		{
5516 		  cselib_val *val = cselib_lookup (ploc, GET_MODE (loc), 1,
5517 						   VOIDmode);
5518 
5519 		  /* ??? flag_float_store and volatile mems are never
5520 		     given values, but we could in theory use them for
5521 		     locations.  */
5522 		  gcc_assert (val || 1);
5523 		}
5524 	      return MO_VAL_LOC;
5525 	    }
5526 	  else
5527 	    return MO_CLOBBER;
5528 	}
5529 
5530       if (REG_P (loc) || MEM_P (loc))
5531 	{
5532 	  if (modep)
5533 	    *modep = GET_MODE (loc);
5534 	  if (cui->store_p)
5535 	    {
5536 	      if (REG_P (loc)
5537 		  || (find_use_val (loc, GET_MODE (loc), cui)
5538 		      && cselib_lookup (XEXP (loc, 0),
5539 					get_address_mode (loc), 0,
5540 					GET_MODE (loc))))
5541 		return MO_VAL_SET;
5542 	    }
5543 	  else
5544 	    {
5545 	      cselib_val *val = find_use_val (loc, GET_MODE (loc), cui);
5546 
5547 	      if (val && !cselib_preserved_value_p (val))
5548 		return MO_VAL_USE;
5549 	    }
5550 	}
5551     }
5552 
5553   if (REG_P (loc))
5554     {
5555       gcc_assert (REGNO (loc) < FIRST_PSEUDO_REGISTER);
5556 
5557       if (loc == cfa_base_rtx)
5558 	return MO_CLOBBER;
5559       expr = REG_EXPR (loc);
5560 
5561       if (!expr)
5562 	return MO_USE_NO_VAR;
5563       else if (target_for_debug_bind (var_debug_decl (expr)))
5564 	return MO_CLOBBER;
5565       else if (track_loc_p (loc, expr, REG_OFFSET (loc),
5566 			    false, modep, NULL))
5567 	return MO_USE;
5568       else
5569 	return MO_USE_NO_VAR;
5570     }
5571   else if (MEM_P (loc))
5572     {
5573       expr = MEM_EXPR (loc);
5574 
5575       if (!expr)
5576 	return MO_CLOBBER;
5577       else if (target_for_debug_bind (var_debug_decl (expr)))
5578 	return MO_CLOBBER;
5579       else if (track_loc_p (loc, expr, int_mem_offset (loc),
5580 			    false, modep, NULL)
5581 	       /* Multi-part variables shouldn't refer to one-part
5582 		  variable names such as VALUEs (never happens) or
5583 		  DEBUG_EXPRs (only happens in the presence of debug
5584 		  insns).  */
5585 	       && (!MAY_HAVE_DEBUG_BIND_INSNS
5586 		   || !rtx_debug_expr_p (XEXP (loc, 0))))
5587 	return MO_USE;
5588       else
5589 	return MO_CLOBBER;
5590     }
5591 
5592   return MO_CLOBBER;
5593 }
5594 
5595 /* Log to OUT information about micro-operation MOPT involving X in
5596    INSN of BB.  */
5597 
5598 static inline void
log_op_type(rtx x,basic_block bb,rtx_insn * insn,enum micro_operation_type mopt,FILE * out)5599 log_op_type (rtx x, basic_block bb, rtx_insn *insn,
5600 	     enum micro_operation_type mopt, FILE *out)
5601 {
5602   fprintf (out, "bb %i op %i insn %i %s ",
5603 	   bb->index, VTI (bb)->mos.length (),
5604 	   INSN_UID (insn), micro_operation_type_name[mopt]);
5605   print_inline_rtx (out, x, 2);
5606   fputc ('\n', out);
5607 }
5608 
5609 /* Tell whether the CONCAT used to holds a VALUE and its location
5610    needs value resolution, i.e., an attempt of mapping the location
5611    back to other incoming values.  */
5612 #define VAL_NEEDS_RESOLUTION(x) \
5613   (RTL_FLAG_CHECK1 ("VAL_NEEDS_RESOLUTION", (x), CONCAT)->volatil)
5614 /* Whether the location in the CONCAT is a tracked expression, that
5615    should also be handled like a MO_USE.  */
5616 #define VAL_HOLDS_TRACK_EXPR(x) \
5617   (RTL_FLAG_CHECK1 ("VAL_HOLDS_TRACK_EXPR", (x), CONCAT)->used)
5618 /* Whether the location in the CONCAT should be handled like a MO_COPY
5619    as well.  */
5620 #define VAL_EXPR_IS_COPIED(x) \
5621   (RTL_FLAG_CHECK1 ("VAL_EXPR_IS_COPIED", (x), CONCAT)->jump)
5622 /* Whether the location in the CONCAT should be handled like a
5623    MO_CLOBBER as well.  */
5624 #define VAL_EXPR_IS_CLOBBERED(x) \
5625   (RTL_FLAG_CHECK1 ("VAL_EXPR_IS_CLOBBERED", (x), CONCAT)->unchanging)
5626 
5627 /* All preserved VALUEs.  */
5628 static vec<rtx> preserved_values;
5629 
5630 /* Ensure VAL is preserved and remember it in a vector for vt_emit_notes.  */
5631 
5632 static void
preserve_value(cselib_val * val)5633 preserve_value (cselib_val *val)
5634 {
5635   cselib_preserve_value (val);
5636   preserved_values.safe_push (val->val_rtx);
5637 }
5638 
5639 /* Helper function for MO_VAL_LOC handling.  Return non-zero if
5640    any rtxes not suitable for CONST use not replaced by VALUEs
5641    are discovered.  */
5642 
5643 static bool
non_suitable_const(const_rtx x)5644 non_suitable_const (const_rtx x)
5645 {
5646   subrtx_iterator::array_type array;
5647   FOR_EACH_SUBRTX (iter, array, x, ALL)
5648     {
5649       const_rtx x = *iter;
5650       switch (GET_CODE (x))
5651 	{
5652 	case REG:
5653 	case DEBUG_EXPR:
5654 	case PC:
5655 	case SCRATCH:
5656 	case CC0:
5657 	case ASM_INPUT:
5658 	case ASM_OPERANDS:
5659 	  return true;
5660 	case MEM:
5661 	  if (!MEM_READONLY_P (x))
5662 	    return true;
5663 	  break;
5664 	default:
5665 	  break;
5666 	}
5667     }
5668   return false;
5669 }
5670 
5671 /* Add uses (register and memory references) LOC which will be tracked
5672    to VTI (bb)->mos.  */
5673 
5674 static void
add_uses(rtx loc,struct count_use_info * cui)5675 add_uses (rtx loc, struct count_use_info *cui)
5676 {
5677   machine_mode mode = VOIDmode;
5678   enum micro_operation_type type = use_type (loc, cui, &mode);
5679 
5680   if (type != MO_CLOBBER)
5681     {
5682       basic_block bb = cui->bb;
5683       micro_operation mo;
5684 
5685       mo.type = type;
5686       mo.u.loc = type == MO_USE ? var_lowpart (mode, loc) : loc;
5687       mo.insn = cui->insn;
5688 
5689       if (type == MO_VAL_LOC)
5690 	{
5691 	  rtx oloc = loc;
5692 	  rtx vloc = PAT_VAR_LOCATION_LOC (oloc);
5693 	  cselib_val *val;
5694 
5695 	  gcc_assert (cui->sets);
5696 
5697 	  if (MEM_P (vloc)
5698 	      && !REG_P (XEXP (vloc, 0))
5699 	      && !MEM_P (XEXP (vloc, 0)))
5700 	    {
5701 	      rtx mloc = vloc;
5702 	      machine_mode address_mode = get_address_mode (mloc);
5703 	      cselib_val *val
5704 		= cselib_lookup (XEXP (mloc, 0), address_mode, 0,
5705 				 GET_MODE (mloc));
5706 
5707 	      if (val && !cselib_preserved_value_p (val))
5708 		preserve_value (val);
5709 	    }
5710 
5711 	  if (CONSTANT_P (vloc)
5712 	      && (GET_CODE (vloc) != CONST || non_suitable_const (vloc)))
5713 	    /* For constants don't look up any value.  */;
5714 	  else if (!VAR_LOC_UNKNOWN_P (vloc) && !unsuitable_loc (vloc)
5715 		   && (val = find_use_val (vloc, GET_MODE (oloc), cui)))
5716 	    {
5717 	      machine_mode mode2;
5718 	      enum micro_operation_type type2;
5719 	      rtx nloc = NULL;
5720 	      bool resolvable = REG_P (vloc) || MEM_P (vloc);
5721 
5722 	      if (resolvable)
5723 		nloc = replace_expr_with_values (vloc);
5724 
5725 	      if (nloc)
5726 		{
5727 		  oloc = shallow_copy_rtx (oloc);
5728 		  PAT_VAR_LOCATION_LOC (oloc) = nloc;
5729 		}
5730 
5731 	      oloc = gen_rtx_CONCAT (mode, val->val_rtx, oloc);
5732 
5733 	      type2 = use_type (vloc, 0, &mode2);
5734 
5735 	      gcc_assert (type2 == MO_USE || type2 == MO_USE_NO_VAR
5736 			  || type2 == MO_CLOBBER);
5737 
5738 	      if (type2 == MO_CLOBBER
5739 		  && !cselib_preserved_value_p (val))
5740 		{
5741 		  VAL_NEEDS_RESOLUTION (oloc) = resolvable;
5742 		  preserve_value (val);
5743 		}
5744 	    }
5745 	  else if (!VAR_LOC_UNKNOWN_P (vloc))
5746 	    {
5747 	      oloc = shallow_copy_rtx (oloc);
5748 	      PAT_VAR_LOCATION_LOC (oloc) = gen_rtx_UNKNOWN_VAR_LOC ();
5749 	    }
5750 
5751 	  mo.u.loc = oloc;
5752 	}
5753       else if (type == MO_VAL_USE)
5754 	{
5755 	  machine_mode mode2 = VOIDmode;
5756 	  enum micro_operation_type type2;
5757 	  cselib_val *val = find_use_val (loc, GET_MODE (loc), cui);
5758 	  rtx vloc, oloc = loc, nloc;
5759 
5760 	  gcc_assert (cui->sets);
5761 
5762 	  if (MEM_P (oloc)
5763 	      && !REG_P (XEXP (oloc, 0))
5764 	      && !MEM_P (XEXP (oloc, 0)))
5765 	    {
5766 	      rtx mloc = oloc;
5767 	      machine_mode address_mode = get_address_mode (mloc);
5768 	      cselib_val *val
5769 		= cselib_lookup (XEXP (mloc, 0), address_mode, 0,
5770 				 GET_MODE (mloc));
5771 
5772 	      if (val && !cselib_preserved_value_p (val))
5773 		preserve_value (val);
5774 	    }
5775 
5776 	  type2 = use_type (loc, 0, &mode2);
5777 
5778 	  gcc_assert (type2 == MO_USE || type2 == MO_USE_NO_VAR
5779 		      || type2 == MO_CLOBBER);
5780 
5781 	  if (type2 == MO_USE)
5782 	    vloc = var_lowpart (mode2, loc);
5783 	  else
5784 	    vloc = oloc;
5785 
5786 	  /* The loc of a MO_VAL_USE may have two forms:
5787 
5788 	     (concat val src): val is at src, a value-based
5789 	     representation.
5790 
5791 	     (concat (concat val use) src): same as above, with use as
5792 	     the MO_USE tracked value, if it differs from src.
5793 
5794 	  */
5795 
5796 	  gcc_checking_assert (REG_P (loc) || MEM_P (loc));
5797 	  nloc = replace_expr_with_values (loc);
5798 	  if (!nloc)
5799 	    nloc = oloc;
5800 
5801 	  if (vloc != nloc)
5802 	    oloc = gen_rtx_CONCAT (mode2, val->val_rtx, vloc);
5803 	  else
5804 	    oloc = val->val_rtx;
5805 
5806 	  mo.u.loc = gen_rtx_CONCAT (mode, oloc, nloc);
5807 
5808 	  if (type2 == MO_USE)
5809 	    VAL_HOLDS_TRACK_EXPR (mo.u.loc) = 1;
5810 	  if (!cselib_preserved_value_p (val))
5811 	    {
5812 	      VAL_NEEDS_RESOLUTION (mo.u.loc) = 1;
5813 	      preserve_value (val);
5814 	    }
5815 	}
5816       else
5817 	gcc_assert (type == MO_USE || type == MO_USE_NO_VAR);
5818 
5819       if (dump_file && (dump_flags & TDF_DETAILS))
5820 	log_op_type (mo.u.loc, cui->bb, cui->insn, mo.type, dump_file);
5821       VTI (bb)->mos.safe_push (mo);
5822     }
5823 }
5824 
5825 /* Helper function for finding all uses of REG/MEM in X in insn INSN.  */
5826 
5827 static void
add_uses_1(rtx * x,void * cui)5828 add_uses_1 (rtx *x, void *cui)
5829 {
5830   subrtx_var_iterator::array_type array;
5831   FOR_EACH_SUBRTX_VAR (iter, array, *x, NONCONST)
5832     add_uses (*iter, (struct count_use_info *) cui);
5833 }
5834 
5835 /* This is the value used during expansion of locations.  We want it
5836    to be unbounded, so that variables expanded deep in a recursion
5837    nest are fully evaluated, so that their values are cached
5838    correctly.  We avoid recursion cycles through other means, and we
5839    don't unshare RTL, so excess complexity is not a problem.  */
5840 #define EXPR_DEPTH (INT_MAX)
5841 /* We use this to keep too-complex expressions from being emitted as
5842    location notes, and then to debug information.  Users can trade
5843    compile time for ridiculously complex expressions, although they're
5844    seldom useful, and they may often have to be discarded as not
5845    representable anyway.  */
5846 #define EXPR_USE_DEPTH (PARAM_VALUE (PARAM_MAX_VARTRACK_EXPR_DEPTH))
5847 
5848 /* Attempt to reverse the EXPR operation in the debug info and record
5849    it in the cselib table.  Say for reg1 = reg2 + 6 even when reg2 is
5850    no longer live we can express its value as VAL - 6.  */
5851 
5852 static void
reverse_op(rtx val,const_rtx expr,rtx_insn * insn)5853 reverse_op (rtx val, const_rtx expr, rtx_insn *insn)
5854 {
5855   rtx src, arg, ret;
5856   cselib_val *v;
5857   struct elt_loc_list *l;
5858   enum rtx_code code;
5859   int count;
5860 
5861   if (GET_CODE (expr) != SET)
5862     return;
5863 
5864   if (!REG_P (SET_DEST (expr)) || GET_MODE (val) != GET_MODE (SET_DEST (expr)))
5865     return;
5866 
5867   src = SET_SRC (expr);
5868   switch (GET_CODE (src))
5869     {
5870     case PLUS:
5871     case MINUS:
5872     case XOR:
5873     case NOT:
5874     case NEG:
5875       if (!REG_P (XEXP (src, 0)))
5876 	return;
5877       break;
5878     case SIGN_EXTEND:
5879     case ZERO_EXTEND:
5880       if (!REG_P (XEXP (src, 0)) && !MEM_P (XEXP (src, 0)))
5881 	return;
5882       break;
5883     default:
5884       return;
5885     }
5886 
5887   if (!SCALAR_INT_MODE_P (GET_MODE (src)) || XEXP (src, 0) == cfa_base_rtx)
5888     return;
5889 
5890   v = cselib_lookup (XEXP (src, 0), GET_MODE (XEXP (src, 0)), 0, VOIDmode);
5891   if (!v || !cselib_preserved_value_p (v))
5892     return;
5893 
5894   /* Use canonical V to avoid creating multiple redundant expressions
5895      for different VALUES equivalent to V.  */
5896   v = canonical_cselib_val (v);
5897 
5898   /* Adding a reverse op isn't useful if V already has an always valid
5899      location.  Ignore ENTRY_VALUE, while it is always constant, we should
5900      prefer non-ENTRY_VALUE locations whenever possible.  */
5901   for (l = v->locs, count = 0; l; l = l->next, count++)
5902     if (CONSTANT_P (l->loc)
5903 	&& (GET_CODE (l->loc) != CONST || !references_value_p (l->loc, 0)))
5904       return;
5905     /* Avoid creating too large locs lists.  */
5906     else if (count == PARAM_VALUE (PARAM_MAX_VARTRACK_REVERSE_OP_SIZE))
5907       return;
5908 
5909   switch (GET_CODE (src))
5910     {
5911     case NOT:
5912     case NEG:
5913       if (GET_MODE (v->val_rtx) != GET_MODE (val))
5914 	return;
5915       ret = gen_rtx_fmt_e (GET_CODE (src), GET_MODE (val), val);
5916       break;
5917     case SIGN_EXTEND:
5918     case ZERO_EXTEND:
5919       ret = gen_lowpart_SUBREG (GET_MODE (v->val_rtx), val);
5920       break;
5921     case XOR:
5922       code = XOR;
5923       goto binary;
5924     case PLUS:
5925       code = MINUS;
5926       goto binary;
5927     case MINUS:
5928       code = PLUS;
5929       goto binary;
5930     binary:
5931       if (GET_MODE (v->val_rtx) != GET_MODE (val))
5932 	return;
5933       arg = XEXP (src, 1);
5934       if (!CONST_INT_P (arg) && GET_CODE (arg) != SYMBOL_REF)
5935 	{
5936 	  arg = cselib_expand_value_rtx (arg, scratch_regs, 5);
5937 	  if (arg == NULL_RTX)
5938 	    return;
5939 	  if (!CONST_INT_P (arg) && GET_CODE (arg) != SYMBOL_REF)
5940 	    return;
5941 	}
5942       ret = simplify_gen_binary (code, GET_MODE (val), val, arg);
5943       break;
5944     default:
5945       gcc_unreachable ();
5946     }
5947 
5948   cselib_add_permanent_equiv (v, ret, insn);
5949 }
5950 
5951 /* Add stores (register and memory references) LOC which will be tracked
5952    to VTI (bb)->mos.  EXPR is the RTL expression containing the store.
5953    CUIP->insn is instruction which the LOC is part of.  */
5954 
5955 static void
add_stores(rtx loc,const_rtx expr,void * cuip)5956 add_stores (rtx loc, const_rtx expr, void *cuip)
5957 {
5958   machine_mode mode = VOIDmode, mode2;
5959   struct count_use_info *cui = (struct count_use_info *)cuip;
5960   basic_block bb = cui->bb;
5961   micro_operation mo;
5962   rtx oloc = loc, nloc, src = NULL;
5963   enum micro_operation_type type = use_type (loc, cui, &mode);
5964   bool track_p = false;
5965   cselib_val *v;
5966   bool resolve, preserve;
5967 
5968   if (type == MO_CLOBBER)
5969     return;
5970 
5971   mode2 = mode;
5972 
5973   if (REG_P (loc))
5974     {
5975       gcc_assert (loc != cfa_base_rtx);
5976       if ((GET_CODE (expr) == CLOBBER && type != MO_VAL_SET)
5977 	  || !(track_p = use_type (loc, NULL, &mode2) == MO_USE)
5978 	  || GET_CODE (expr) == CLOBBER)
5979 	{
5980 	  mo.type = MO_CLOBBER;
5981 	  mo.u.loc = loc;
5982 	  if (GET_CODE (expr) == SET
5983 	      && (SET_DEST (expr) == loc
5984 		  || (GET_CODE (SET_DEST (expr)) == STRICT_LOW_PART
5985 		      && XEXP (SET_DEST (expr), 0) == loc))
5986 	      && !unsuitable_loc (SET_SRC (expr))
5987 	      && find_use_val (loc, mode, cui))
5988 	    {
5989 	      gcc_checking_assert (type == MO_VAL_SET);
5990 	      mo.u.loc = gen_rtx_SET (loc, SET_SRC (expr));
5991 	    }
5992 	}
5993       else
5994 	{
5995 	  if (GET_CODE (expr) == SET
5996 	      && SET_DEST (expr) == loc
5997 	      && GET_CODE (SET_SRC (expr)) != ASM_OPERANDS)
5998 	    src = var_lowpart (mode2, SET_SRC (expr));
5999 	  loc = var_lowpart (mode2, loc);
6000 
6001 	  if (src == NULL)
6002 	    {
6003 	      mo.type = MO_SET;
6004 	      mo.u.loc = loc;
6005 	    }
6006 	  else
6007 	    {
6008 	      rtx xexpr = gen_rtx_SET (loc, src);
6009 	      if (same_variable_part_p (src, REG_EXPR (loc), REG_OFFSET (loc)))
6010 		{
6011 		  /* If this is an instruction copying (part of) a parameter
6012 		     passed by invisible reference to its register location,
6013 		     pretend it's a SET so that the initial memory location
6014 		     is discarded, as the parameter register can be reused
6015 		     for other purposes and we do not track locations based
6016 		     on generic registers.  */
6017 		  if (MEM_P (src)
6018 		      && REG_EXPR (loc)
6019 		      && TREE_CODE (REG_EXPR (loc)) == PARM_DECL
6020 		      && DECL_MODE (REG_EXPR (loc)) != BLKmode
6021 		      && MEM_P (DECL_INCOMING_RTL (REG_EXPR (loc)))
6022 		      && XEXP (DECL_INCOMING_RTL (REG_EXPR (loc)), 0)
6023 			 != arg_pointer_rtx)
6024 		    mo.type = MO_SET;
6025 		  else
6026 		    mo.type = MO_COPY;
6027 		}
6028 	      else
6029 		mo.type = MO_SET;
6030 	      mo.u.loc = xexpr;
6031 	    }
6032 	}
6033       mo.insn = cui->insn;
6034     }
6035   else if (MEM_P (loc)
6036 	   && ((track_p = use_type (loc, NULL, &mode2) == MO_USE)
6037 	       || cui->sets))
6038     {
6039       if (MEM_P (loc) && type == MO_VAL_SET
6040 	  && !REG_P (XEXP (loc, 0))
6041 	  && !MEM_P (XEXP (loc, 0)))
6042 	{
6043 	  rtx mloc = loc;
6044 	  machine_mode address_mode = get_address_mode (mloc);
6045 	  cselib_val *val = cselib_lookup (XEXP (mloc, 0),
6046 					   address_mode, 0,
6047 					   GET_MODE (mloc));
6048 
6049 	  if (val && !cselib_preserved_value_p (val))
6050 	    preserve_value (val);
6051 	}
6052 
6053       if (GET_CODE (expr) == CLOBBER || !track_p)
6054 	{
6055 	  mo.type = MO_CLOBBER;
6056 	  mo.u.loc = track_p ? var_lowpart (mode2, loc) : loc;
6057 	}
6058       else
6059 	{
6060 	  if (GET_CODE (expr) == SET
6061 	      && SET_DEST (expr) == loc
6062 	      && GET_CODE (SET_SRC (expr)) != ASM_OPERANDS)
6063 	    src = var_lowpart (mode2, SET_SRC (expr));
6064 	  loc = var_lowpart (mode2, loc);
6065 
6066 	  if (src == NULL)
6067 	    {
6068 	      mo.type = MO_SET;
6069 	      mo.u.loc = loc;
6070 	    }
6071 	  else
6072 	    {
6073 	      rtx xexpr = gen_rtx_SET (loc, src);
6074 	      if (same_variable_part_p (SET_SRC (xexpr),
6075 					MEM_EXPR (loc),
6076 					int_mem_offset (loc)))
6077 		mo.type = MO_COPY;
6078 	      else
6079 		mo.type = MO_SET;
6080 	      mo.u.loc = xexpr;
6081 	    }
6082 	}
6083       mo.insn = cui->insn;
6084     }
6085   else
6086     return;
6087 
6088   if (type != MO_VAL_SET)
6089     goto log_and_return;
6090 
6091   v = find_use_val (oloc, mode, cui);
6092 
6093   if (!v)
6094     goto log_and_return;
6095 
6096   resolve = preserve = !cselib_preserved_value_p (v);
6097 
6098   /* We cannot track values for multiple-part variables, so we track only
6099      locations for tracked record parameters.  */
6100   if (track_p
6101       && REG_P (loc)
6102       && REG_EXPR (loc)
6103       && tracked_record_parameter_p (REG_EXPR (loc)))
6104     {
6105       /* Although we don't use the value here, it could be used later by the
6106 	 mere virtue of its existence as the operand of the reverse operation
6107 	 that gave rise to it (typically extension/truncation).  Make sure it
6108 	 is preserved as required by vt_expand_var_loc_chain.  */
6109       if (preserve)
6110 	preserve_value (v);
6111       goto log_and_return;
6112     }
6113 
6114   if (loc == stack_pointer_rtx
6115       && maybe_ne (hard_frame_pointer_adjustment, -1)
6116       && preserve)
6117     cselib_set_value_sp_based (v);
6118 
6119   nloc = replace_expr_with_values (oloc);
6120   if (nloc)
6121     oloc = nloc;
6122 
6123   if (GET_CODE (PATTERN (cui->insn)) == COND_EXEC)
6124     {
6125       cselib_val *oval = cselib_lookup (oloc, GET_MODE (oloc), 0, VOIDmode);
6126 
6127       if (oval == v)
6128 	return;
6129       gcc_assert (REG_P (oloc) || MEM_P (oloc));
6130 
6131       if (oval && !cselib_preserved_value_p (oval))
6132 	{
6133 	  micro_operation moa;
6134 
6135 	  preserve_value (oval);
6136 
6137 	  moa.type = MO_VAL_USE;
6138 	  moa.u.loc = gen_rtx_CONCAT (mode, oval->val_rtx, oloc);
6139 	  VAL_NEEDS_RESOLUTION (moa.u.loc) = 1;
6140 	  moa.insn = cui->insn;
6141 
6142 	  if (dump_file && (dump_flags & TDF_DETAILS))
6143 	    log_op_type (moa.u.loc, cui->bb, cui->insn,
6144 			 moa.type, dump_file);
6145 	  VTI (bb)->mos.safe_push (moa);
6146 	}
6147 
6148       resolve = false;
6149     }
6150   else if (resolve && GET_CODE (mo.u.loc) == SET)
6151     {
6152       if (REG_P (SET_SRC (expr)) || MEM_P (SET_SRC (expr)))
6153 	nloc = replace_expr_with_values (SET_SRC (expr));
6154       else
6155 	nloc = NULL_RTX;
6156 
6157       /* Avoid the mode mismatch between oexpr and expr.  */
6158       if (!nloc && mode != mode2)
6159 	{
6160 	  nloc = SET_SRC (expr);
6161 	  gcc_assert (oloc == SET_DEST (expr));
6162 	}
6163 
6164       if (nloc && nloc != SET_SRC (mo.u.loc))
6165 	oloc = gen_rtx_SET (oloc, nloc);
6166       else
6167 	{
6168 	  if (oloc == SET_DEST (mo.u.loc))
6169 	    /* No point in duplicating.  */
6170 	    oloc = mo.u.loc;
6171 	  if (!REG_P (SET_SRC (mo.u.loc)))
6172 	    resolve = false;
6173 	}
6174     }
6175   else if (!resolve)
6176     {
6177       if (GET_CODE (mo.u.loc) == SET
6178 	  && oloc == SET_DEST (mo.u.loc))
6179 	/* No point in duplicating.  */
6180 	oloc = mo.u.loc;
6181     }
6182   else
6183     resolve = false;
6184 
6185   loc = gen_rtx_CONCAT (mode, v->val_rtx, oloc);
6186 
6187   if (mo.u.loc != oloc)
6188     loc = gen_rtx_CONCAT (GET_MODE (mo.u.loc), loc, mo.u.loc);
6189 
6190   /* The loc of a MO_VAL_SET may have various forms:
6191 
6192      (concat val dst): dst now holds val
6193 
6194      (concat val (set dst src)): dst now holds val, copied from src
6195 
6196      (concat (concat val dstv) dst): dst now holds val; dstv is dst
6197      after replacing mems and non-top-level regs with values.
6198 
6199      (concat (concat val dstv) (set dst src)): dst now holds val,
6200      copied from src.  dstv is a value-based representation of dst, if
6201      it differs from dst.  If resolution is needed, src is a REG, and
6202      its mode is the same as that of val.
6203 
6204      (concat (concat val (set dstv srcv)) (set dst src)): src
6205      copied to dst, holding val.  dstv and srcv are value-based
6206      representations of dst and src, respectively.
6207 
6208   */
6209 
6210   if (GET_CODE (PATTERN (cui->insn)) != COND_EXEC)
6211     reverse_op (v->val_rtx, expr, cui->insn);
6212 
6213   mo.u.loc = loc;
6214 
6215   if (track_p)
6216     VAL_HOLDS_TRACK_EXPR (loc) = 1;
6217   if (preserve)
6218     {
6219       VAL_NEEDS_RESOLUTION (loc) = resolve;
6220       preserve_value (v);
6221     }
6222   if (mo.type == MO_CLOBBER)
6223     VAL_EXPR_IS_CLOBBERED (loc) = 1;
6224   if (mo.type == MO_COPY)
6225     VAL_EXPR_IS_COPIED (loc) = 1;
6226 
6227   mo.type = MO_VAL_SET;
6228 
6229  log_and_return:
6230   if (dump_file && (dump_flags & TDF_DETAILS))
6231     log_op_type (mo.u.loc, cui->bb, cui->insn, mo.type, dump_file);
6232   VTI (bb)->mos.safe_push (mo);
6233 }
6234 
6235 /* Arguments to the call.  */
6236 static rtx call_arguments;
6237 
6238 /* Compute call_arguments.  */
6239 
6240 static void
prepare_call_arguments(basic_block bb,rtx_insn * insn)6241 prepare_call_arguments (basic_block bb, rtx_insn *insn)
6242 {
6243   rtx link, x, call;
6244   rtx prev, cur, next;
6245   rtx this_arg = NULL_RTX;
6246   tree type = NULL_TREE, t, fndecl = NULL_TREE;
6247   tree obj_type_ref = NULL_TREE;
6248   CUMULATIVE_ARGS args_so_far_v;
6249   cumulative_args_t args_so_far;
6250 
6251   memset (&args_so_far_v, 0, sizeof (args_so_far_v));
6252   args_so_far = pack_cumulative_args (&args_so_far_v);
6253   call = get_call_rtx_from (insn);
6254   if (call)
6255     {
6256       if (GET_CODE (XEXP (XEXP (call, 0), 0)) == SYMBOL_REF)
6257 	{
6258 	  rtx symbol = XEXP (XEXP (call, 0), 0);
6259 	  if (SYMBOL_REF_DECL (symbol))
6260 	    fndecl = SYMBOL_REF_DECL (symbol);
6261 	}
6262       if (fndecl == NULL_TREE)
6263 	fndecl = MEM_EXPR (XEXP (call, 0));
6264       if (fndecl
6265 	  && TREE_CODE (TREE_TYPE (fndecl)) != FUNCTION_TYPE
6266 	  && TREE_CODE (TREE_TYPE (fndecl)) != METHOD_TYPE)
6267 	fndecl = NULL_TREE;
6268       if (fndecl && TYPE_ARG_TYPES (TREE_TYPE (fndecl)))
6269 	type = TREE_TYPE (fndecl);
6270       if (fndecl && TREE_CODE (fndecl) != FUNCTION_DECL)
6271 	{
6272 	  if (TREE_CODE (fndecl) == INDIRECT_REF
6273 	      && TREE_CODE (TREE_OPERAND (fndecl, 0)) == OBJ_TYPE_REF)
6274 	    obj_type_ref = TREE_OPERAND (fndecl, 0);
6275 	  fndecl = NULL_TREE;
6276 	}
6277       if (type)
6278 	{
6279 	  for (t = TYPE_ARG_TYPES (type); t && t != void_list_node;
6280 	       t = TREE_CHAIN (t))
6281 	    if (TREE_CODE (TREE_VALUE (t)) == REFERENCE_TYPE
6282 		&& INTEGRAL_TYPE_P (TREE_TYPE (TREE_VALUE (t))))
6283 	      break;
6284 	  if ((t == NULL || t == void_list_node) && obj_type_ref == NULL_TREE)
6285 	    type = NULL;
6286 	  else
6287 	    {
6288 	      int nargs ATTRIBUTE_UNUSED = list_length (TYPE_ARG_TYPES (type));
6289 	      link = CALL_INSN_FUNCTION_USAGE (insn);
6290 #ifndef PCC_STATIC_STRUCT_RETURN
6291 	      if (aggregate_value_p (TREE_TYPE (type), type)
6292 		  && targetm.calls.struct_value_rtx (type, 0) == 0)
6293 		{
6294 		  tree struct_addr = build_pointer_type (TREE_TYPE (type));
6295 		  machine_mode mode = TYPE_MODE (struct_addr);
6296 		  rtx reg;
6297 		  INIT_CUMULATIVE_ARGS (args_so_far_v, type, NULL_RTX, fndecl,
6298 					nargs + 1);
6299 		  reg = targetm.calls.function_arg (args_so_far, mode,
6300 						    struct_addr, true);
6301 		  targetm.calls.function_arg_advance (args_so_far, mode,
6302 						      struct_addr, true);
6303 		  if (reg == NULL_RTX)
6304 		    {
6305 		      for (; link; link = XEXP (link, 1))
6306 			if (GET_CODE (XEXP (link, 0)) == USE
6307 			    && MEM_P (XEXP (XEXP (link, 0), 0)))
6308 			  {
6309 			    link = XEXP (link, 1);
6310 			    break;
6311 			  }
6312 		    }
6313 		}
6314 	      else
6315 #endif
6316 		INIT_CUMULATIVE_ARGS (args_so_far_v, type, NULL_RTX, fndecl,
6317 				      nargs);
6318 	      if (obj_type_ref && TYPE_ARG_TYPES (type) != void_list_node)
6319 		{
6320 		  machine_mode mode;
6321 		  t = TYPE_ARG_TYPES (type);
6322 		  mode = TYPE_MODE (TREE_VALUE (t));
6323 		  this_arg = targetm.calls.function_arg (args_so_far, mode,
6324 							 TREE_VALUE (t), true);
6325 		  if (this_arg && !REG_P (this_arg))
6326 		    this_arg = NULL_RTX;
6327 		  else if (this_arg == NULL_RTX)
6328 		    {
6329 		      for (; link; link = XEXP (link, 1))
6330 			if (GET_CODE (XEXP (link, 0)) == USE
6331 			    && MEM_P (XEXP (XEXP (link, 0), 0)))
6332 			  {
6333 			    this_arg = XEXP (XEXP (link, 0), 0);
6334 			    break;
6335 			  }
6336 		    }
6337 		}
6338 	    }
6339 	}
6340     }
6341   t = type ? TYPE_ARG_TYPES (type) : NULL_TREE;
6342 
6343   for (link = CALL_INSN_FUNCTION_USAGE (insn); link; link = XEXP (link, 1))
6344     if (GET_CODE (XEXP (link, 0)) == USE)
6345       {
6346 	rtx item = NULL_RTX;
6347 	x = XEXP (XEXP (link, 0), 0);
6348 	if (GET_MODE (link) == VOIDmode
6349 	    || GET_MODE (link) == BLKmode
6350 	    || (GET_MODE (link) != GET_MODE (x)
6351 		&& ((GET_MODE_CLASS (GET_MODE (link)) != MODE_INT
6352 		     && GET_MODE_CLASS (GET_MODE (link)) != MODE_PARTIAL_INT)
6353 		    || (GET_MODE_CLASS (GET_MODE (x)) != MODE_INT
6354 			&& GET_MODE_CLASS (GET_MODE (x)) != MODE_PARTIAL_INT))))
6355 	  /* Can't do anything for these, if the original type mode
6356 	     isn't known or can't be converted.  */;
6357 	else if (REG_P (x))
6358 	  {
6359 	    cselib_val *val = cselib_lookup (x, GET_MODE (x), 0, VOIDmode);
6360 	    scalar_int_mode mode;
6361 	    if (val && cselib_preserved_value_p (val))
6362 	      item = val->val_rtx;
6363 	    else if (is_a <scalar_int_mode> (GET_MODE (x), &mode))
6364 	      {
6365 		opt_scalar_int_mode mode_iter;
6366 		FOR_EACH_WIDER_MODE (mode_iter, mode)
6367 		  {
6368 		    mode = mode_iter.require ();
6369 		    if (GET_MODE_BITSIZE (mode) > BITS_PER_WORD)
6370 		      break;
6371 
6372 		    rtx reg = simplify_subreg (mode, x, GET_MODE (x), 0);
6373 		    if (reg == NULL_RTX || !REG_P (reg))
6374 		      continue;
6375 		    val = cselib_lookup (reg, mode, 0, VOIDmode);
6376 		    if (val && cselib_preserved_value_p (val))
6377 		      {
6378 			item = val->val_rtx;
6379 			break;
6380 		      }
6381 		  }
6382 	      }
6383 	  }
6384 	else if (MEM_P (x))
6385 	  {
6386 	    rtx mem = x;
6387 	    cselib_val *val;
6388 
6389 	    if (!frame_pointer_needed)
6390 	      {
6391 		struct adjust_mem_data amd;
6392 		amd.mem_mode = VOIDmode;
6393 		amd.stack_adjust = -VTI (bb)->out.stack_adjust;
6394 		amd.store = true;
6395 		mem = simplify_replace_fn_rtx (mem, NULL_RTX, adjust_mems,
6396 					       &amd);
6397 		gcc_assert (amd.side_effects.is_empty ());
6398 	      }
6399 	    val = cselib_lookup (mem, GET_MODE (mem), 0, VOIDmode);
6400 	    if (val && cselib_preserved_value_p (val))
6401 	      item = val->val_rtx;
6402 	    else if (GET_MODE_CLASS (GET_MODE (mem)) != MODE_INT
6403 		     && GET_MODE_CLASS (GET_MODE (mem)) != MODE_PARTIAL_INT)
6404 	      {
6405 		/* For non-integer stack argument see also if they weren't
6406 		   initialized by integers.  */
6407 		scalar_int_mode imode;
6408 		if (int_mode_for_mode (GET_MODE (mem)).exists (&imode)
6409 		    && imode != GET_MODE (mem))
6410 		  {
6411 		    val = cselib_lookup (adjust_address_nv (mem, imode, 0),
6412 					 imode, 0, VOIDmode);
6413 		    if (val && cselib_preserved_value_p (val))
6414 		      item = lowpart_subreg (GET_MODE (x), val->val_rtx,
6415 					     imode);
6416 		  }
6417 	      }
6418 	  }
6419 	if (item)
6420 	  {
6421 	    rtx x2 = x;
6422 	    if (GET_MODE (item) != GET_MODE (link))
6423 	      item = lowpart_subreg (GET_MODE (link), item, GET_MODE (item));
6424 	    if (GET_MODE (x2) != GET_MODE (link))
6425 	      x2 = lowpart_subreg (GET_MODE (link), x2, GET_MODE (x2));
6426 	    item = gen_rtx_CONCAT (GET_MODE (link), x2, item);
6427 	    call_arguments
6428 	      = gen_rtx_EXPR_LIST (VOIDmode, item, call_arguments);
6429 	  }
6430 	if (t && t != void_list_node)
6431 	  {
6432 	    tree argtype = TREE_VALUE (t);
6433 	    machine_mode mode = TYPE_MODE (argtype);
6434 	    rtx reg;
6435 	    if (pass_by_reference (&args_so_far_v, mode, argtype, true))
6436 	      {
6437 		argtype = build_pointer_type (argtype);
6438 		mode = TYPE_MODE (argtype);
6439 	      }
6440 	    reg = targetm.calls.function_arg (args_so_far, mode,
6441 					      argtype, true);
6442 	    if (TREE_CODE (argtype) == REFERENCE_TYPE
6443 		&& INTEGRAL_TYPE_P (TREE_TYPE (argtype))
6444 		&& reg
6445 		&& REG_P (reg)
6446 		&& GET_MODE (reg) == mode
6447 		&& (GET_MODE_CLASS (mode) == MODE_INT
6448 		    || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT)
6449 		&& REG_P (x)
6450 		&& REGNO (x) == REGNO (reg)
6451 		&& GET_MODE (x) == mode
6452 		&& item)
6453 	      {
6454 		machine_mode indmode
6455 		  = TYPE_MODE (TREE_TYPE (argtype));
6456 		rtx mem = gen_rtx_MEM (indmode, x);
6457 		cselib_val *val = cselib_lookup (mem, indmode, 0, VOIDmode);
6458 		if (val && cselib_preserved_value_p (val))
6459 		  {
6460 		    item = gen_rtx_CONCAT (indmode, mem, val->val_rtx);
6461 		    call_arguments = gen_rtx_EXPR_LIST (VOIDmode, item,
6462 							call_arguments);
6463 		  }
6464 		else
6465 		  {
6466 		    struct elt_loc_list *l;
6467 		    tree initial;
6468 
6469 		    /* Try harder, when passing address of a constant
6470 		       pool integer it can be easily read back.  */
6471 		    item = XEXP (item, 1);
6472 		    if (GET_CODE (item) == SUBREG)
6473 		      item = SUBREG_REG (item);
6474 		    gcc_assert (GET_CODE (item) == VALUE);
6475 		    val = CSELIB_VAL_PTR (item);
6476 		    for (l = val->locs; l; l = l->next)
6477 		      if (GET_CODE (l->loc) == SYMBOL_REF
6478 			  && TREE_CONSTANT_POOL_ADDRESS_P (l->loc)
6479 			  && SYMBOL_REF_DECL (l->loc)
6480 			  && DECL_INITIAL (SYMBOL_REF_DECL (l->loc)))
6481 			{
6482 			  initial = DECL_INITIAL (SYMBOL_REF_DECL (l->loc));
6483 			  if (tree_fits_shwi_p (initial))
6484 			    {
6485 			      item = GEN_INT (tree_to_shwi (initial));
6486 			      item = gen_rtx_CONCAT (indmode, mem, item);
6487 			      call_arguments
6488 				= gen_rtx_EXPR_LIST (VOIDmode, item,
6489 						     call_arguments);
6490 			    }
6491 			  break;
6492 			}
6493 		  }
6494 	      }
6495 	    targetm.calls.function_arg_advance (args_so_far, mode,
6496 						argtype, true);
6497 	    t = TREE_CHAIN (t);
6498 	  }
6499       }
6500 
6501   /* Add debug arguments.  */
6502   if (fndecl
6503       && TREE_CODE (fndecl) == FUNCTION_DECL
6504       && DECL_HAS_DEBUG_ARGS_P (fndecl))
6505     {
6506       vec<tree, va_gc> **debug_args = decl_debug_args_lookup (fndecl);
6507       if (debug_args)
6508 	{
6509 	  unsigned int ix;
6510 	  tree param;
6511 	  for (ix = 0; vec_safe_iterate (*debug_args, ix, &param); ix += 2)
6512 	    {
6513 	      rtx item;
6514 	      tree dtemp = (**debug_args)[ix + 1];
6515 	      machine_mode mode = DECL_MODE (dtemp);
6516 	      item = gen_rtx_DEBUG_PARAMETER_REF (mode, param);
6517 	      item = gen_rtx_CONCAT (mode, item, DECL_RTL_KNOWN_SET (dtemp));
6518 	      call_arguments = gen_rtx_EXPR_LIST (VOIDmode, item,
6519 						  call_arguments);
6520 	    }
6521 	}
6522     }
6523 
6524   /* Reverse call_arguments chain.  */
6525   prev = NULL_RTX;
6526   for (cur = call_arguments; cur; cur = next)
6527     {
6528       next = XEXP (cur, 1);
6529       XEXP (cur, 1) = prev;
6530       prev = cur;
6531     }
6532   call_arguments = prev;
6533 
6534   x = get_call_rtx_from (insn);
6535   if (x)
6536     {
6537       x = XEXP (XEXP (x, 0), 0);
6538       if (GET_CODE (x) == SYMBOL_REF)
6539 	/* Don't record anything.  */;
6540       else if (CONSTANT_P (x))
6541 	{
6542 	  x = gen_rtx_CONCAT (GET_MODE (x) == VOIDmode ? Pmode : GET_MODE (x),
6543 			      pc_rtx, x);
6544 	  call_arguments
6545 	    = gen_rtx_EXPR_LIST (VOIDmode, x, call_arguments);
6546 	}
6547       else
6548 	{
6549 	  cselib_val *val = cselib_lookup (x, GET_MODE (x), 0, VOIDmode);
6550 	  if (val && cselib_preserved_value_p (val))
6551 	    {
6552 	      x = gen_rtx_CONCAT (GET_MODE (x), pc_rtx, val->val_rtx);
6553 	      call_arguments
6554 		= gen_rtx_EXPR_LIST (VOIDmode, x, call_arguments);
6555 	    }
6556 	}
6557     }
6558   if (this_arg)
6559     {
6560       machine_mode mode
6561 	= TYPE_MODE (TREE_TYPE (OBJ_TYPE_REF_EXPR (obj_type_ref)));
6562       rtx clobbered = gen_rtx_MEM (mode, this_arg);
6563       HOST_WIDE_INT token
6564 	= tree_to_shwi (OBJ_TYPE_REF_TOKEN (obj_type_ref));
6565       if (token)
6566 	clobbered = plus_constant (mode, clobbered,
6567 				   token * GET_MODE_SIZE (mode));
6568       clobbered = gen_rtx_MEM (mode, clobbered);
6569       x = gen_rtx_CONCAT (mode, gen_rtx_CLOBBER (VOIDmode, pc_rtx), clobbered);
6570       call_arguments
6571 	= gen_rtx_EXPR_LIST (VOIDmode, x, call_arguments);
6572     }
6573 }
6574 
6575 /* Callback for cselib_record_sets_hook, that records as micro
6576    operations uses and stores in an insn after cselib_record_sets has
6577    analyzed the sets in an insn, but before it modifies the stored
6578    values in the internal tables, unless cselib_record_sets doesn't
6579    call it directly (perhaps because we're not doing cselib in the
6580    first place, in which case sets and n_sets will be 0).  */
6581 
6582 static void
add_with_sets(rtx_insn * insn,struct cselib_set * sets,int n_sets)6583 add_with_sets (rtx_insn *insn, struct cselib_set *sets, int n_sets)
6584 {
6585   basic_block bb = BLOCK_FOR_INSN (insn);
6586   int n1, n2;
6587   struct count_use_info cui;
6588   micro_operation *mos;
6589 
6590   cselib_hook_called = true;
6591 
6592   cui.insn = insn;
6593   cui.bb = bb;
6594   cui.sets = sets;
6595   cui.n_sets = n_sets;
6596 
6597   n1 = VTI (bb)->mos.length ();
6598   cui.store_p = false;
6599   note_uses (&PATTERN (insn), add_uses_1, &cui);
6600   n2 = VTI (bb)->mos.length () - 1;
6601   mos = VTI (bb)->mos.address ();
6602 
6603   /* Order the MO_USEs to be before MO_USE_NO_VARs and MO_VAL_USE, and
6604      MO_VAL_LOC last.  */
6605   while (n1 < n2)
6606     {
6607       while (n1 < n2 && mos[n1].type == MO_USE)
6608 	n1++;
6609       while (n1 < n2 && mos[n2].type != MO_USE)
6610 	n2--;
6611       if (n1 < n2)
6612 	std::swap (mos[n1], mos[n2]);
6613     }
6614 
6615   n2 = VTI (bb)->mos.length () - 1;
6616   while (n1 < n2)
6617     {
6618       while (n1 < n2 && mos[n1].type != MO_VAL_LOC)
6619 	n1++;
6620       while (n1 < n2 && mos[n2].type == MO_VAL_LOC)
6621 	n2--;
6622       if (n1 < n2)
6623 	std::swap (mos[n1], mos[n2]);
6624     }
6625 
6626   if (CALL_P (insn))
6627     {
6628       micro_operation mo;
6629 
6630       mo.type = MO_CALL;
6631       mo.insn = insn;
6632       mo.u.loc = call_arguments;
6633       call_arguments = NULL_RTX;
6634 
6635       if (dump_file && (dump_flags & TDF_DETAILS))
6636 	log_op_type (PATTERN (insn), bb, insn, mo.type, dump_file);
6637       VTI (bb)->mos.safe_push (mo);
6638     }
6639 
6640   n1 = VTI (bb)->mos.length ();
6641   /* This will record NEXT_INSN (insn), such that we can
6642      insert notes before it without worrying about any
6643      notes that MO_USEs might emit after the insn.  */
6644   cui.store_p = true;
6645   note_stores (PATTERN (insn), add_stores, &cui);
6646   n2 = VTI (bb)->mos.length () - 1;
6647   mos = VTI (bb)->mos.address ();
6648 
6649   /* Order the MO_VAL_USEs first (note_stores does nothing
6650      on DEBUG_INSNs, so there are no MO_VAL_LOCs from this
6651      insn), then MO_CLOBBERs, then MO_SET/MO_COPY/MO_VAL_SET.  */
6652   while (n1 < n2)
6653     {
6654       while (n1 < n2 && mos[n1].type == MO_VAL_USE)
6655 	n1++;
6656       while (n1 < n2 && mos[n2].type != MO_VAL_USE)
6657 	n2--;
6658       if (n1 < n2)
6659 	std::swap (mos[n1], mos[n2]);
6660     }
6661 
6662   n2 = VTI (bb)->mos.length () - 1;
6663   while (n1 < n2)
6664     {
6665       while (n1 < n2 && mos[n1].type == MO_CLOBBER)
6666 	n1++;
6667       while (n1 < n2 && mos[n2].type != MO_CLOBBER)
6668 	n2--;
6669       if (n1 < n2)
6670 	std::swap (mos[n1], mos[n2]);
6671     }
6672 }
6673 
6674 static enum var_init_status
find_src_status(dataflow_set * in,rtx src)6675 find_src_status (dataflow_set *in, rtx src)
6676 {
6677   tree decl = NULL_TREE;
6678   enum var_init_status status = VAR_INIT_STATUS_UNINITIALIZED;
6679 
6680   if (! flag_var_tracking_uninit)
6681     status = VAR_INIT_STATUS_INITIALIZED;
6682 
6683   if (src && REG_P (src))
6684     decl = var_debug_decl (REG_EXPR (src));
6685   else if (src && MEM_P (src))
6686     decl = var_debug_decl (MEM_EXPR (src));
6687 
6688   if (src && decl)
6689     status = get_init_value (in, src, dv_from_decl (decl));
6690 
6691   return status;
6692 }
6693 
6694 /* SRC is the source of an assignment.  Use SET to try to find what
6695    was ultimately assigned to SRC.  Return that value if known,
6696    otherwise return SRC itself.  */
6697 
6698 static rtx
find_src_set_src(dataflow_set * set,rtx src)6699 find_src_set_src (dataflow_set *set, rtx src)
6700 {
6701   tree decl = NULL_TREE;   /* The variable being copied around.          */
6702   rtx set_src = NULL_RTX;  /* The value for "decl" stored in "src".      */
6703   variable *var;
6704   location_chain *nextp;
6705   int i;
6706   bool found;
6707 
6708   if (src && REG_P (src))
6709     decl = var_debug_decl (REG_EXPR (src));
6710   else if (src && MEM_P (src))
6711     decl = var_debug_decl (MEM_EXPR (src));
6712 
6713   if (src && decl)
6714     {
6715       decl_or_value dv = dv_from_decl (decl);
6716 
6717       var = shared_hash_find (set->vars, dv);
6718       if (var)
6719 	{
6720 	  found = false;
6721 	  for (i = 0; i < var->n_var_parts && !found; i++)
6722 	    for (nextp = var->var_part[i].loc_chain; nextp && !found;
6723 		 nextp = nextp->next)
6724 	      if (rtx_equal_p (nextp->loc, src))
6725 		{
6726 		  set_src = nextp->set_src;
6727 		  found = true;
6728 		}
6729 
6730 	}
6731     }
6732 
6733   return set_src;
6734 }
6735 
6736 /* Compute the changes of variable locations in the basic block BB.  */
6737 
6738 static bool
compute_bb_dataflow(basic_block bb)6739 compute_bb_dataflow (basic_block bb)
6740 {
6741   unsigned int i;
6742   micro_operation *mo;
6743   bool changed;
6744   dataflow_set old_out;
6745   dataflow_set *in = &VTI (bb)->in;
6746   dataflow_set *out = &VTI (bb)->out;
6747 
6748   dataflow_set_init (&old_out);
6749   dataflow_set_copy (&old_out, out);
6750   dataflow_set_copy (out, in);
6751 
6752   if (MAY_HAVE_DEBUG_BIND_INSNS)
6753     local_get_addr_cache = new hash_map<rtx, rtx>;
6754 
6755   FOR_EACH_VEC_ELT (VTI (bb)->mos, i, mo)
6756     {
6757       rtx_insn *insn = mo->insn;
6758 
6759       switch (mo->type)
6760 	{
6761 	  case MO_CALL:
6762 	    dataflow_set_clear_at_call (out, insn);
6763 	    break;
6764 
6765 	  case MO_USE:
6766 	    {
6767 	      rtx loc = mo->u.loc;
6768 
6769 	      if (REG_P (loc))
6770 		var_reg_set (out, loc, VAR_INIT_STATUS_UNINITIALIZED, NULL);
6771 	      else if (MEM_P (loc))
6772 		var_mem_set (out, loc, VAR_INIT_STATUS_UNINITIALIZED, NULL);
6773 	    }
6774 	    break;
6775 
6776 	  case MO_VAL_LOC:
6777 	    {
6778 	      rtx loc = mo->u.loc;
6779 	      rtx val, vloc;
6780 	      tree var;
6781 
6782 	      if (GET_CODE (loc) == CONCAT)
6783 		{
6784 		  val = XEXP (loc, 0);
6785 		  vloc = XEXP (loc, 1);
6786 		}
6787 	      else
6788 		{
6789 		  val = NULL_RTX;
6790 		  vloc = loc;
6791 		}
6792 
6793 	      var = PAT_VAR_LOCATION_DECL (vloc);
6794 
6795 	      clobber_variable_part (out, NULL_RTX,
6796 				     dv_from_decl (var), 0, NULL_RTX);
6797 	      if (val)
6798 		{
6799 		  if (VAL_NEEDS_RESOLUTION (loc))
6800 		    val_resolve (out, val, PAT_VAR_LOCATION_LOC (vloc), insn);
6801 		  set_variable_part (out, val, dv_from_decl (var), 0,
6802 				     VAR_INIT_STATUS_INITIALIZED, NULL_RTX,
6803 				     INSERT);
6804 		}
6805 	      else if (!VAR_LOC_UNKNOWN_P (PAT_VAR_LOCATION_LOC (vloc)))
6806 		set_variable_part (out, PAT_VAR_LOCATION_LOC (vloc),
6807 				   dv_from_decl (var), 0,
6808 				   VAR_INIT_STATUS_INITIALIZED, NULL_RTX,
6809 				   INSERT);
6810 	    }
6811 	    break;
6812 
6813 	  case MO_VAL_USE:
6814 	    {
6815 	      rtx loc = mo->u.loc;
6816 	      rtx val, vloc, uloc;
6817 
6818 	      vloc = uloc = XEXP (loc, 1);
6819 	      val = XEXP (loc, 0);
6820 
6821 	      if (GET_CODE (val) == CONCAT)
6822 		{
6823 		  uloc = XEXP (val, 1);
6824 		  val = XEXP (val, 0);
6825 		}
6826 
6827 	      if (VAL_NEEDS_RESOLUTION (loc))
6828 		val_resolve (out, val, vloc, insn);
6829 	      else
6830 		val_store (out, val, uloc, insn, false);
6831 
6832 	      if (VAL_HOLDS_TRACK_EXPR (loc))
6833 		{
6834 		  if (GET_CODE (uloc) == REG)
6835 		    var_reg_set (out, uloc, VAR_INIT_STATUS_UNINITIALIZED,
6836 				 NULL);
6837 		  else if (GET_CODE (uloc) == MEM)
6838 		    var_mem_set (out, uloc, VAR_INIT_STATUS_UNINITIALIZED,
6839 				 NULL);
6840 		}
6841 	    }
6842 	    break;
6843 
6844 	  case MO_VAL_SET:
6845 	    {
6846 	      rtx loc = mo->u.loc;
6847 	      rtx val, vloc, uloc;
6848 	      rtx dstv, srcv;
6849 
6850 	      vloc = loc;
6851 	      uloc = XEXP (vloc, 1);
6852 	      val = XEXP (vloc, 0);
6853 	      vloc = uloc;
6854 
6855 	      if (GET_CODE (uloc) == SET)
6856 		{
6857 		  dstv = SET_DEST (uloc);
6858 		  srcv = SET_SRC (uloc);
6859 		}
6860 	      else
6861 		{
6862 		  dstv = uloc;
6863 		  srcv = NULL;
6864 		}
6865 
6866 	      if (GET_CODE (val) == CONCAT)
6867 		{
6868 		  dstv = vloc = XEXP (val, 1);
6869 		  val = XEXP (val, 0);
6870 		}
6871 
6872 	      if (GET_CODE (vloc) == SET)
6873 		{
6874 		  srcv = SET_SRC (vloc);
6875 
6876 		  gcc_assert (val != srcv);
6877 		  gcc_assert (vloc == uloc || VAL_NEEDS_RESOLUTION (loc));
6878 
6879 		  dstv = vloc = SET_DEST (vloc);
6880 
6881 		  if (VAL_NEEDS_RESOLUTION (loc))
6882 		    val_resolve (out, val, srcv, insn);
6883 		}
6884 	      else if (VAL_NEEDS_RESOLUTION (loc))
6885 		{
6886 		  gcc_assert (GET_CODE (uloc) == SET
6887 			      && GET_CODE (SET_SRC (uloc)) == REG);
6888 		  val_resolve (out, val, SET_SRC (uloc), insn);
6889 		}
6890 
6891 	      if (VAL_HOLDS_TRACK_EXPR (loc))
6892 		{
6893 		  if (VAL_EXPR_IS_CLOBBERED (loc))
6894 		    {
6895 		      if (REG_P (uloc))
6896 			var_reg_delete (out, uloc, true);
6897 		      else if (MEM_P (uloc))
6898 			{
6899 			  gcc_assert (MEM_P (dstv));
6900 			  gcc_assert (MEM_ATTRS (dstv) == MEM_ATTRS (uloc));
6901 			  var_mem_delete (out, dstv, true);
6902 			}
6903 		    }
6904 		  else
6905 		    {
6906 		      bool copied_p = VAL_EXPR_IS_COPIED (loc);
6907 		      rtx src = NULL, dst = uloc;
6908 		      enum var_init_status status = VAR_INIT_STATUS_INITIALIZED;
6909 
6910 		      if (GET_CODE (uloc) == SET)
6911 			{
6912 			  src = SET_SRC (uloc);
6913 			  dst = SET_DEST (uloc);
6914 			}
6915 
6916 		      if (copied_p)
6917 			{
6918 			  if (flag_var_tracking_uninit)
6919 			    {
6920 			      status = find_src_status (in, src);
6921 
6922 			      if (status == VAR_INIT_STATUS_UNKNOWN)
6923 				status = find_src_status (out, src);
6924 			    }
6925 
6926 			  src = find_src_set_src (in, src);
6927 			}
6928 
6929 		      if (REG_P (dst))
6930 			var_reg_delete_and_set (out, dst, !copied_p,
6931 						status, srcv);
6932 		      else if (MEM_P (dst))
6933 			{
6934 			  gcc_assert (MEM_P (dstv));
6935 			  gcc_assert (MEM_ATTRS (dstv) == MEM_ATTRS (dst));
6936 			  var_mem_delete_and_set (out, dstv, !copied_p,
6937 						  status, srcv);
6938 			}
6939 		    }
6940 		}
6941 	      else if (REG_P (uloc))
6942 		var_regno_delete (out, REGNO (uloc));
6943 	      else if (MEM_P (uloc))
6944 		{
6945 		  gcc_checking_assert (GET_CODE (vloc) == MEM);
6946 		  gcc_checking_assert (dstv == vloc);
6947 		  if (dstv != vloc)
6948 		    clobber_overlapping_mems (out, vloc);
6949 		}
6950 
6951 	      val_store (out, val, dstv, insn, true);
6952 	    }
6953 	    break;
6954 
6955 	  case MO_SET:
6956 	    {
6957 	      rtx loc = mo->u.loc;
6958 	      rtx set_src = NULL;
6959 
6960 	      if (GET_CODE (loc) == SET)
6961 		{
6962 		  set_src = SET_SRC (loc);
6963 		  loc = SET_DEST (loc);
6964 		}
6965 
6966 	      if (REG_P (loc))
6967 		var_reg_delete_and_set (out, loc, true, VAR_INIT_STATUS_INITIALIZED,
6968 					set_src);
6969 	      else if (MEM_P (loc))
6970 		var_mem_delete_and_set (out, loc, true, VAR_INIT_STATUS_INITIALIZED,
6971 					set_src);
6972 	    }
6973 	    break;
6974 
6975 	  case MO_COPY:
6976 	    {
6977 	      rtx loc = mo->u.loc;
6978 	      enum var_init_status src_status;
6979 	      rtx set_src = NULL;
6980 
6981 	      if (GET_CODE (loc) == SET)
6982 		{
6983 		  set_src = SET_SRC (loc);
6984 		  loc = SET_DEST (loc);
6985 		}
6986 
6987 	      if (! flag_var_tracking_uninit)
6988 		src_status = VAR_INIT_STATUS_INITIALIZED;
6989 	      else
6990 		{
6991 		  src_status = find_src_status (in, set_src);
6992 
6993 		  if (src_status == VAR_INIT_STATUS_UNKNOWN)
6994 		    src_status = find_src_status (out, set_src);
6995 		}
6996 
6997 	      set_src = find_src_set_src (in, set_src);
6998 
6999 	      if (REG_P (loc))
7000 		var_reg_delete_and_set (out, loc, false, src_status, set_src);
7001 	      else if (MEM_P (loc))
7002 		var_mem_delete_and_set (out, loc, false, src_status, set_src);
7003 	    }
7004 	    break;
7005 
7006 	  case MO_USE_NO_VAR:
7007 	    {
7008 	      rtx loc = mo->u.loc;
7009 
7010 	      if (REG_P (loc))
7011 		var_reg_delete (out, loc, false);
7012 	      else if (MEM_P (loc))
7013 		var_mem_delete (out, loc, false);
7014 	    }
7015 	    break;
7016 
7017 	  case MO_CLOBBER:
7018 	    {
7019 	      rtx loc = mo->u.loc;
7020 
7021 	      if (REG_P (loc))
7022 		var_reg_delete (out, loc, true);
7023 	      else if (MEM_P (loc))
7024 		var_mem_delete (out, loc, true);
7025 	    }
7026 	    break;
7027 
7028 	  case MO_ADJUST:
7029 	    out->stack_adjust += mo->u.adjust;
7030 	    break;
7031 	}
7032     }
7033 
7034   if (MAY_HAVE_DEBUG_BIND_INSNS)
7035     {
7036       delete local_get_addr_cache;
7037       local_get_addr_cache = NULL;
7038 
7039       dataflow_set_equiv_regs (out);
7040       shared_hash_htab (out->vars)
7041 	->traverse <dataflow_set *, canonicalize_values_mark> (out);
7042       shared_hash_htab (out->vars)
7043 	->traverse <dataflow_set *, canonicalize_values_star> (out);
7044       if (flag_checking)
7045 	shared_hash_htab (out->vars)
7046 	  ->traverse <dataflow_set *, canonicalize_loc_order_check> (out);
7047     }
7048   changed = dataflow_set_different (&old_out, out);
7049   dataflow_set_destroy (&old_out);
7050   return changed;
7051 }
7052 
7053 /* Find the locations of variables in the whole function.  */
7054 
7055 static bool
vt_find_locations(void)7056 vt_find_locations (void)
7057 {
7058   bb_heap_t *worklist = new bb_heap_t (LONG_MIN);
7059   bb_heap_t *pending = new bb_heap_t (LONG_MIN);
7060   sbitmap in_worklist, in_pending;
7061   basic_block bb;
7062   edge e;
7063   int *bb_order;
7064   int *rc_order;
7065   int i;
7066   int htabsz = 0;
7067   int htabmax = PARAM_VALUE (PARAM_MAX_VARTRACK_SIZE);
7068   bool success = true;
7069 
7070   timevar_push (TV_VAR_TRACKING_DATAFLOW);
7071   /* Compute reverse completion order of depth first search of the CFG
7072      so that the data-flow runs faster.  */
7073   rc_order = XNEWVEC (int, n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS);
7074   bb_order = XNEWVEC (int, last_basic_block_for_fn (cfun));
7075   pre_and_rev_post_order_compute (NULL, rc_order, false);
7076   for (i = 0; i < n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS; i++)
7077     bb_order[rc_order[i]] = i;
7078   free (rc_order);
7079 
7080   auto_sbitmap visited (last_basic_block_for_fn (cfun));
7081   in_worklist = sbitmap_alloc (last_basic_block_for_fn (cfun));
7082   in_pending = sbitmap_alloc (last_basic_block_for_fn (cfun));
7083   bitmap_clear (in_worklist);
7084 
7085   FOR_EACH_BB_FN (bb, cfun)
7086     pending->insert (bb_order[bb->index], bb);
7087   bitmap_ones (in_pending);
7088 
7089   while (success && !pending->empty ())
7090     {
7091       std::swap (worklist, pending);
7092       std::swap (in_worklist, in_pending);
7093 
7094       bitmap_clear (visited);
7095 
7096       while (!worklist->empty ())
7097 	{
7098 	  bb = worklist->extract_min ();
7099 	  bitmap_clear_bit (in_worklist, bb->index);
7100 	  gcc_assert (!bitmap_bit_p (visited, bb->index));
7101 	  if (!bitmap_bit_p (visited, bb->index))
7102 	    {
7103 	      bool changed;
7104 	      edge_iterator ei;
7105 	      int oldinsz, oldoutsz;
7106 
7107 	      bitmap_set_bit (visited, bb->index);
7108 
7109 	      if (VTI (bb)->in.vars)
7110 		{
7111 		  htabsz
7112 		    -= shared_hash_htab (VTI (bb)->in.vars)->size ()
7113 			+ shared_hash_htab (VTI (bb)->out.vars)->size ();
7114 		  oldinsz = shared_hash_htab (VTI (bb)->in.vars)->elements ();
7115 		  oldoutsz
7116 		    = shared_hash_htab (VTI (bb)->out.vars)->elements ();
7117 		}
7118 	      else
7119 		oldinsz = oldoutsz = 0;
7120 
7121 	      if (MAY_HAVE_DEBUG_BIND_INSNS)
7122 		{
7123 		  dataflow_set *in = &VTI (bb)->in, *first_out = NULL;
7124 		  bool first = true, adjust = false;
7125 
7126 		  /* Calculate the IN set as the intersection of
7127 		     predecessor OUT sets.  */
7128 
7129 		  dataflow_set_clear (in);
7130 		  dst_can_be_shared = true;
7131 
7132 		  FOR_EACH_EDGE (e, ei, bb->preds)
7133 		    if (!VTI (e->src)->flooded)
7134 		      gcc_assert (bb_order[bb->index]
7135 				  <= bb_order[e->src->index]);
7136 		    else if (first)
7137 		      {
7138 			dataflow_set_copy (in, &VTI (e->src)->out);
7139 			first_out = &VTI (e->src)->out;
7140 			first = false;
7141 		      }
7142 		    else
7143 		      {
7144 			dataflow_set_merge (in, &VTI (e->src)->out);
7145 			adjust = true;
7146 		      }
7147 
7148 		  if (adjust)
7149 		    {
7150 		      dataflow_post_merge_adjust (in, &VTI (bb)->permp);
7151 
7152 		      if (flag_checking)
7153 			/* Merge and merge_adjust should keep entries in
7154 			   canonical order.  */
7155 			shared_hash_htab (in->vars)
7156 			  ->traverse <dataflow_set *,
7157 				      canonicalize_loc_order_check> (in);
7158 
7159 		      if (dst_can_be_shared)
7160 			{
7161 			  shared_hash_destroy (in->vars);
7162 			  in->vars = shared_hash_copy (first_out->vars);
7163 			}
7164 		    }
7165 
7166 		  VTI (bb)->flooded = true;
7167 		}
7168 	      else
7169 		{
7170 		  /* Calculate the IN set as union of predecessor OUT sets.  */
7171 		  dataflow_set_clear (&VTI (bb)->in);
7172 		  FOR_EACH_EDGE (e, ei, bb->preds)
7173 		    dataflow_set_union (&VTI (bb)->in, &VTI (e->src)->out);
7174 		}
7175 
7176 	      changed = compute_bb_dataflow (bb);
7177 	      htabsz += shared_hash_htab (VTI (bb)->in.vars)->size ()
7178 			 + shared_hash_htab (VTI (bb)->out.vars)->size ();
7179 
7180 	      if (htabmax && htabsz > htabmax)
7181 		{
7182 		  if (MAY_HAVE_DEBUG_BIND_INSNS)
7183 		    inform (DECL_SOURCE_LOCATION (cfun->decl),
7184 			    "variable tracking size limit exceeded with "
7185 			    "%<-fvar-tracking-assignments%>, retrying without");
7186 		  else
7187 		    inform (DECL_SOURCE_LOCATION (cfun->decl),
7188 			    "variable tracking size limit exceeded");
7189 		  success = false;
7190 		  break;
7191 		}
7192 
7193 	      if (changed)
7194 		{
7195 		  FOR_EACH_EDGE (e, ei, bb->succs)
7196 		    {
7197 		      if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
7198 			continue;
7199 
7200 		      if (bitmap_bit_p (visited, e->dest->index))
7201 			{
7202 			  if (!bitmap_bit_p (in_pending, e->dest->index))
7203 			    {
7204 			      /* Send E->DEST to next round.  */
7205 			      bitmap_set_bit (in_pending, e->dest->index);
7206 			      pending->insert (bb_order[e->dest->index],
7207 					       e->dest);
7208 			    }
7209 			}
7210 		      else if (!bitmap_bit_p (in_worklist, e->dest->index))
7211 			{
7212 			  /* Add E->DEST to current round.  */
7213 			  bitmap_set_bit (in_worklist, e->dest->index);
7214 			  worklist->insert (bb_order[e->dest->index],
7215 					    e->dest);
7216 			}
7217 		    }
7218 		}
7219 
7220 	      if (dump_file)
7221 		fprintf (dump_file,
7222 			 "BB %i: in %i (was %i), out %i (was %i), rem %i + %i, tsz %i\n",
7223 			 bb->index,
7224 			 (int)shared_hash_htab (VTI (bb)->in.vars)->size (),
7225 			 oldinsz,
7226 			 (int)shared_hash_htab (VTI (bb)->out.vars)->size (),
7227 			 oldoutsz,
7228 			 (int)worklist->nodes (), (int)pending->nodes (),
7229 			 htabsz);
7230 
7231 	      if (dump_file && (dump_flags & TDF_DETAILS))
7232 		{
7233 		  fprintf (dump_file, "BB %i IN:\n", bb->index);
7234 		  dump_dataflow_set (&VTI (bb)->in);
7235 		  fprintf (dump_file, "BB %i OUT:\n", bb->index);
7236 		  dump_dataflow_set (&VTI (bb)->out);
7237 		}
7238 	    }
7239 	}
7240     }
7241 
7242   if (success && MAY_HAVE_DEBUG_BIND_INSNS)
7243     FOR_EACH_BB_FN (bb, cfun)
7244       gcc_assert (VTI (bb)->flooded);
7245 
7246   free (bb_order);
7247   delete worklist;
7248   delete pending;
7249   sbitmap_free (in_worklist);
7250   sbitmap_free (in_pending);
7251 
7252   timevar_pop (TV_VAR_TRACKING_DATAFLOW);
7253   return success;
7254 }
7255 
7256 /* Print the content of the LIST to dump file.  */
7257 
7258 static void
dump_attrs_list(attrs * list)7259 dump_attrs_list (attrs *list)
7260 {
7261   for (; list; list = list->next)
7262     {
7263       if (dv_is_decl_p (list->dv))
7264 	print_mem_expr (dump_file, dv_as_decl (list->dv));
7265       else
7266 	print_rtl_single (dump_file, dv_as_value (list->dv));
7267       fprintf (dump_file, "+" HOST_WIDE_INT_PRINT_DEC, list->offset);
7268     }
7269   fprintf (dump_file, "\n");
7270 }
7271 
7272 /* Print the information about variable *SLOT to dump file.  */
7273 
7274 int
dump_var_tracking_slot(variable ** slot,void * data ATTRIBUTE_UNUSED)7275 dump_var_tracking_slot (variable **slot, void *data ATTRIBUTE_UNUSED)
7276 {
7277   variable *var = *slot;
7278 
7279   dump_var (var);
7280 
7281   /* Continue traversing the hash table.  */
7282   return 1;
7283 }
7284 
7285 /* Print the information about variable VAR to dump file.  */
7286 
7287 static void
dump_var(variable * var)7288 dump_var (variable *var)
7289 {
7290   int i;
7291   location_chain *node;
7292 
7293   if (dv_is_decl_p (var->dv))
7294     {
7295       const_tree decl = dv_as_decl (var->dv);
7296 
7297       if (DECL_NAME (decl))
7298 	{
7299 	  fprintf (dump_file, "  name: %s",
7300 		   IDENTIFIER_POINTER (DECL_NAME (decl)));
7301 	  if (dump_flags & TDF_UID)
7302 	    fprintf (dump_file, "D.%u", DECL_UID (decl));
7303 	}
7304       else if (TREE_CODE (decl) == DEBUG_EXPR_DECL)
7305 	fprintf (dump_file, "  name: D#%u", DEBUG_TEMP_UID (decl));
7306       else
7307 	fprintf (dump_file, "  name: D.%u", DECL_UID (decl));
7308       fprintf (dump_file, "\n");
7309     }
7310   else
7311     {
7312       fputc (' ', dump_file);
7313       print_rtl_single (dump_file, dv_as_value (var->dv));
7314     }
7315 
7316   for (i = 0; i < var->n_var_parts; i++)
7317     {
7318       fprintf (dump_file, "    offset %ld\n",
7319 	       (long)(var->onepart ? 0 : VAR_PART_OFFSET (var, i)));
7320       for (node = var->var_part[i].loc_chain; node; node = node->next)
7321 	{
7322 	  fprintf (dump_file, "      ");
7323 	  if (node->init == VAR_INIT_STATUS_UNINITIALIZED)
7324 	    fprintf (dump_file, "[uninit]");
7325 	  print_rtl_single (dump_file, node->loc);
7326 	}
7327     }
7328 }
7329 
7330 /* Print the information about variables from hash table VARS to dump file.  */
7331 
7332 static void
dump_vars(variable_table_type * vars)7333 dump_vars (variable_table_type *vars)
7334 {
7335   if (vars->elements () > 0)
7336     {
7337       fprintf (dump_file, "Variables:\n");
7338       vars->traverse <void *, dump_var_tracking_slot> (NULL);
7339     }
7340 }
7341 
7342 /* Print the dataflow set SET to dump file.  */
7343 
7344 static void
dump_dataflow_set(dataflow_set * set)7345 dump_dataflow_set (dataflow_set *set)
7346 {
7347   int i;
7348 
7349   fprintf (dump_file, "Stack adjustment: " HOST_WIDE_INT_PRINT_DEC "\n",
7350 	   set->stack_adjust);
7351   for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
7352     {
7353       if (set->regs[i])
7354 	{
7355 	  fprintf (dump_file, "Reg %d:", i);
7356 	  dump_attrs_list (set->regs[i]);
7357 	}
7358     }
7359   dump_vars (shared_hash_htab (set->vars));
7360   fprintf (dump_file, "\n");
7361 }
7362 
7363 /* Print the IN and OUT sets for each basic block to dump file.  */
7364 
7365 static void
dump_dataflow_sets(void)7366 dump_dataflow_sets (void)
7367 {
7368   basic_block bb;
7369 
7370   FOR_EACH_BB_FN (bb, cfun)
7371     {
7372       fprintf (dump_file, "\nBasic block %d:\n", bb->index);
7373       fprintf (dump_file, "IN:\n");
7374       dump_dataflow_set (&VTI (bb)->in);
7375       fprintf (dump_file, "OUT:\n");
7376       dump_dataflow_set (&VTI (bb)->out);
7377     }
7378 }
7379 
7380 /* Return the variable for DV in dropped_values, inserting one if
7381    requested with INSERT.  */
7382 
7383 static inline variable *
variable_from_dropped(decl_or_value dv,enum insert_option insert)7384 variable_from_dropped (decl_or_value dv, enum insert_option insert)
7385 {
7386   variable **slot;
7387   variable *empty_var;
7388   onepart_enum onepart;
7389 
7390   slot = dropped_values->find_slot_with_hash (dv, dv_htab_hash (dv), insert);
7391 
7392   if (!slot)
7393     return NULL;
7394 
7395   if (*slot)
7396     return *slot;
7397 
7398   gcc_checking_assert (insert == INSERT);
7399 
7400   onepart = dv_onepart_p (dv);
7401 
7402   gcc_checking_assert (onepart == ONEPART_VALUE || onepart == ONEPART_DEXPR);
7403 
7404   empty_var = onepart_pool_allocate (onepart);
7405   empty_var->dv = dv;
7406   empty_var->refcount = 1;
7407   empty_var->n_var_parts = 0;
7408   empty_var->onepart = onepart;
7409   empty_var->in_changed_variables = false;
7410   empty_var->var_part[0].loc_chain = NULL;
7411   empty_var->var_part[0].cur_loc = NULL;
7412   VAR_LOC_1PAUX (empty_var) = NULL;
7413   set_dv_changed (dv, true);
7414 
7415   *slot = empty_var;
7416 
7417   return empty_var;
7418 }
7419 
7420 /* Recover the one-part aux from dropped_values.  */
7421 
7422 static struct onepart_aux *
recover_dropped_1paux(variable * var)7423 recover_dropped_1paux (variable *var)
7424 {
7425   variable *dvar;
7426 
7427   gcc_checking_assert (var->onepart);
7428 
7429   if (VAR_LOC_1PAUX (var))
7430     return VAR_LOC_1PAUX (var);
7431 
7432   if (var->onepart == ONEPART_VDECL)
7433     return NULL;
7434 
7435   dvar = variable_from_dropped (var->dv, NO_INSERT);
7436 
7437   if (!dvar)
7438     return NULL;
7439 
7440   VAR_LOC_1PAUX (var) = VAR_LOC_1PAUX (dvar);
7441   VAR_LOC_1PAUX (dvar) = NULL;
7442 
7443   return VAR_LOC_1PAUX (var);
7444 }
7445 
7446 /* Add variable VAR to the hash table of changed variables and
7447    if it has no locations delete it from SET's hash table.  */
7448 
7449 static void
variable_was_changed(variable * var,dataflow_set * set)7450 variable_was_changed (variable *var, dataflow_set *set)
7451 {
7452   hashval_t hash = dv_htab_hash (var->dv);
7453 
7454   if (emit_notes)
7455     {
7456       variable **slot;
7457 
7458       /* Remember this decl or VALUE has been added to changed_variables.  */
7459       set_dv_changed (var->dv, true);
7460 
7461       slot = changed_variables->find_slot_with_hash (var->dv, hash, INSERT);
7462 
7463       if (*slot)
7464 	{
7465 	  variable *old_var = *slot;
7466 	  gcc_assert (old_var->in_changed_variables);
7467 	  old_var->in_changed_variables = false;
7468 	  if (var != old_var && var->onepart)
7469 	    {
7470 	      /* Restore the auxiliary info from an empty variable
7471 		 previously created for changed_variables, so it is
7472 		 not lost.  */
7473 	      gcc_checking_assert (!VAR_LOC_1PAUX (var));
7474 	      VAR_LOC_1PAUX (var) = VAR_LOC_1PAUX (old_var);
7475 	      VAR_LOC_1PAUX (old_var) = NULL;
7476 	    }
7477 	  variable_htab_free (*slot);
7478 	}
7479 
7480       if (set && var->n_var_parts == 0)
7481 	{
7482 	  onepart_enum onepart = var->onepart;
7483 	  variable *empty_var = NULL;
7484 	  variable **dslot = NULL;
7485 
7486 	  if (onepart == ONEPART_VALUE || onepart == ONEPART_DEXPR)
7487 	    {
7488 	      dslot = dropped_values->find_slot_with_hash (var->dv,
7489 							   dv_htab_hash (var->dv),
7490 							   INSERT);
7491 	      empty_var = *dslot;
7492 
7493 	      if (empty_var)
7494 		{
7495 		  gcc_checking_assert (!empty_var->in_changed_variables);
7496 		  if (!VAR_LOC_1PAUX (var))
7497 		    {
7498 		      VAR_LOC_1PAUX (var) = VAR_LOC_1PAUX (empty_var);
7499 		      VAR_LOC_1PAUX (empty_var) = NULL;
7500 		    }
7501 		  else
7502 		    gcc_checking_assert (!VAR_LOC_1PAUX (empty_var));
7503 		}
7504 	    }
7505 
7506 	  if (!empty_var)
7507 	    {
7508 	      empty_var = onepart_pool_allocate (onepart);
7509 	      empty_var->dv = var->dv;
7510 	      empty_var->refcount = 1;
7511 	      empty_var->n_var_parts = 0;
7512 	      empty_var->onepart = onepart;
7513 	      if (dslot)
7514 		{
7515 		  empty_var->refcount++;
7516 		  *dslot = empty_var;
7517 		}
7518 	    }
7519 	  else
7520 	    empty_var->refcount++;
7521 	  empty_var->in_changed_variables = true;
7522 	  *slot = empty_var;
7523 	  if (onepart)
7524 	    {
7525 	      empty_var->var_part[0].loc_chain = NULL;
7526 	      empty_var->var_part[0].cur_loc = NULL;
7527 	      VAR_LOC_1PAUX (empty_var) = VAR_LOC_1PAUX (var);
7528 	      VAR_LOC_1PAUX (var) = NULL;
7529 	    }
7530 	  goto drop_var;
7531 	}
7532       else
7533 	{
7534 	  if (var->onepart && !VAR_LOC_1PAUX (var))
7535 	    recover_dropped_1paux (var);
7536 	  var->refcount++;
7537 	  var->in_changed_variables = true;
7538 	  *slot = var;
7539 	}
7540     }
7541   else
7542     {
7543       gcc_assert (set);
7544       if (var->n_var_parts == 0)
7545 	{
7546 	  variable **slot;
7547 
7548 	drop_var:
7549 	  slot = shared_hash_find_slot_noinsert (set->vars, var->dv);
7550 	  if (slot)
7551 	    {
7552 	      if (shared_hash_shared (set->vars))
7553 		slot = shared_hash_find_slot_unshare (&set->vars, var->dv,
7554 						      NO_INSERT);
7555 	      shared_hash_htab (set->vars)->clear_slot (slot);
7556 	    }
7557 	}
7558     }
7559 }
7560 
7561 /* Look for the index in VAR->var_part corresponding to OFFSET.
7562    Return -1 if not found.  If INSERTION_POINT is non-NULL, the
7563    referenced int will be set to the index that the part has or should
7564    have, if it should be inserted.  */
7565 
7566 static inline int
find_variable_location_part(variable * var,HOST_WIDE_INT offset,int * insertion_point)7567 find_variable_location_part (variable *var, HOST_WIDE_INT offset,
7568 			     int *insertion_point)
7569 {
7570   int pos, low, high;
7571 
7572   if (var->onepart)
7573     {
7574       if (offset != 0)
7575 	return -1;
7576 
7577       if (insertion_point)
7578 	*insertion_point = 0;
7579 
7580       return var->n_var_parts - 1;
7581     }
7582 
7583   /* Find the location part.  */
7584   low = 0;
7585   high = var->n_var_parts;
7586   while (low != high)
7587     {
7588       pos = (low + high) / 2;
7589       if (VAR_PART_OFFSET (var, pos) < offset)
7590 	low = pos + 1;
7591       else
7592 	high = pos;
7593     }
7594   pos = low;
7595 
7596   if (insertion_point)
7597     *insertion_point = pos;
7598 
7599   if (pos < var->n_var_parts && VAR_PART_OFFSET (var, pos) == offset)
7600     return pos;
7601 
7602   return -1;
7603 }
7604 
7605 static variable **
set_slot_part(dataflow_set * set,rtx loc,variable ** slot,decl_or_value dv,HOST_WIDE_INT offset,enum var_init_status initialized,rtx set_src)7606 set_slot_part (dataflow_set *set, rtx loc, variable **slot,
7607 	       decl_or_value dv, HOST_WIDE_INT offset,
7608 	       enum var_init_status initialized, rtx set_src)
7609 {
7610   int pos;
7611   location_chain *node, *next;
7612   location_chain **nextp;
7613   variable *var;
7614   onepart_enum onepart;
7615 
7616   var = *slot;
7617 
7618   if (var)
7619     onepart = var->onepart;
7620   else
7621     onepart = dv_onepart_p (dv);
7622 
7623   gcc_checking_assert (offset == 0 || !onepart);
7624   gcc_checking_assert (loc != dv_as_opaque (dv));
7625 
7626   if (! flag_var_tracking_uninit)
7627     initialized = VAR_INIT_STATUS_INITIALIZED;
7628 
7629   if (!var)
7630     {
7631       /* Create new variable information.  */
7632       var = onepart_pool_allocate (onepart);
7633       var->dv = dv;
7634       var->refcount = 1;
7635       var->n_var_parts = 1;
7636       var->onepart = onepart;
7637       var->in_changed_variables = false;
7638       if (var->onepart)
7639 	VAR_LOC_1PAUX (var) = NULL;
7640       else
7641 	VAR_PART_OFFSET (var, 0) = offset;
7642       var->var_part[0].loc_chain = NULL;
7643       var->var_part[0].cur_loc = NULL;
7644       *slot = var;
7645       pos = 0;
7646       nextp = &var->var_part[0].loc_chain;
7647     }
7648   else if (onepart)
7649     {
7650       int r = -1, c = 0;
7651 
7652       gcc_assert (dv_as_opaque (var->dv) == dv_as_opaque (dv));
7653 
7654       pos = 0;
7655 
7656       if (GET_CODE (loc) == VALUE)
7657 	{
7658 	  for (nextp = &var->var_part[0].loc_chain; (node = *nextp);
7659 	       nextp = &node->next)
7660 	    if (GET_CODE (node->loc) == VALUE)
7661 	      {
7662 		if (node->loc == loc)
7663 		  {
7664 		    r = 0;
7665 		    break;
7666 		  }
7667 		if (canon_value_cmp (node->loc, loc))
7668 		  c++;
7669 		else
7670 		  {
7671 		    r = 1;
7672 		    break;
7673 		  }
7674 	      }
7675 	    else if (REG_P (node->loc) || MEM_P (node->loc))
7676 	      c++;
7677 	    else
7678 	      {
7679 		r = 1;
7680 		break;
7681 	      }
7682 	}
7683       else if (REG_P (loc))
7684 	{
7685 	  for (nextp = &var->var_part[0].loc_chain; (node = *nextp);
7686 	       nextp = &node->next)
7687 	    if (REG_P (node->loc))
7688 	      {
7689 		if (REGNO (node->loc) < REGNO (loc))
7690 		  c++;
7691 		else
7692 		  {
7693 		    if (REGNO (node->loc) == REGNO (loc))
7694 		      r = 0;
7695 		    else
7696 		      r = 1;
7697 		    break;
7698 		  }
7699 	      }
7700 	    else
7701 	      {
7702 		r = 1;
7703 		break;
7704 	      }
7705 	}
7706       else if (MEM_P (loc))
7707 	{
7708 	  for (nextp = &var->var_part[0].loc_chain; (node = *nextp);
7709 	       nextp = &node->next)
7710 	    if (REG_P (node->loc))
7711 	      c++;
7712 	    else if (MEM_P (node->loc))
7713 	      {
7714 		if ((r = loc_cmp (XEXP (node->loc, 0), XEXP (loc, 0))) >= 0)
7715 		  break;
7716 		else
7717 		  c++;
7718 	      }
7719 	    else
7720 	      {
7721 		r = 1;
7722 		break;
7723 	      }
7724 	}
7725       else
7726 	for (nextp = &var->var_part[0].loc_chain; (node = *nextp);
7727 	     nextp = &node->next)
7728 	  if ((r = loc_cmp (node->loc, loc)) >= 0)
7729 	    break;
7730 	  else
7731 	    c++;
7732 
7733       if (r == 0)
7734 	return slot;
7735 
7736       if (shared_var_p (var, set->vars))
7737 	{
7738 	  slot = unshare_variable (set, slot, var, initialized);
7739 	  var = *slot;
7740 	  for (nextp = &var->var_part[0].loc_chain; c;
7741 	       nextp = &(*nextp)->next)
7742 	    c--;
7743 	  gcc_assert ((!node && !*nextp) || node->loc == (*nextp)->loc);
7744 	}
7745     }
7746   else
7747     {
7748       int inspos = 0;
7749 
7750       gcc_assert (dv_as_decl (var->dv) == dv_as_decl (dv));
7751 
7752       pos = find_variable_location_part (var, offset, &inspos);
7753 
7754       if (pos >= 0)
7755 	{
7756 	  node = var->var_part[pos].loc_chain;
7757 
7758 	  if (node
7759 	      && ((REG_P (node->loc) && REG_P (loc)
7760 		   && REGNO (node->loc) == REGNO (loc))
7761 		  || rtx_equal_p (node->loc, loc)))
7762 	    {
7763 	      /* LOC is in the beginning of the chain so we have nothing
7764 		 to do.  */
7765 	      if (node->init < initialized)
7766 		node->init = initialized;
7767 	      if (set_src != NULL)
7768 		node->set_src = set_src;
7769 
7770 	      return slot;
7771 	    }
7772 	  else
7773 	    {
7774 	      /* We have to make a copy of a shared variable.  */
7775 	      if (shared_var_p (var, set->vars))
7776 		{
7777 		  slot = unshare_variable (set, slot, var, initialized);
7778 		  var = *slot;
7779 		}
7780 	    }
7781 	}
7782       else
7783 	{
7784 	  /* We have not found the location part, new one will be created.  */
7785 
7786 	  /* We have to make a copy of the shared variable.  */
7787 	  if (shared_var_p (var, set->vars))
7788 	    {
7789 	      slot = unshare_variable (set, slot, var, initialized);
7790 	      var = *slot;
7791 	    }
7792 
7793 	  /* We track only variables whose size is <= MAX_VAR_PARTS bytes
7794 	     thus there are at most MAX_VAR_PARTS different offsets.  */
7795 	  gcc_assert (var->n_var_parts < MAX_VAR_PARTS
7796 		      && (!var->n_var_parts || !onepart));
7797 
7798 	  /* We have to move the elements of array starting at index
7799 	     inspos to the next position.  */
7800 	  for (pos = var->n_var_parts; pos > inspos; pos--)
7801 	    var->var_part[pos] = var->var_part[pos - 1];
7802 
7803 	  var->n_var_parts++;
7804 	  gcc_checking_assert (!onepart);
7805 	  VAR_PART_OFFSET (var, pos) = offset;
7806 	  var->var_part[pos].loc_chain = NULL;
7807 	  var->var_part[pos].cur_loc = NULL;
7808 	}
7809 
7810       /* Delete the location from the list.  */
7811       nextp = &var->var_part[pos].loc_chain;
7812       for (node = var->var_part[pos].loc_chain; node; node = next)
7813 	{
7814 	  next = node->next;
7815 	  if ((REG_P (node->loc) && REG_P (loc)
7816 	       && REGNO (node->loc) == REGNO (loc))
7817 	      || rtx_equal_p (node->loc, loc))
7818 	    {
7819 	      /* Save these values, to assign to the new node, before
7820 		 deleting this one.  */
7821 	      if (node->init > initialized)
7822 		initialized = node->init;
7823 	      if (node->set_src != NULL && set_src == NULL)
7824 		set_src = node->set_src;
7825 	      if (var->var_part[pos].cur_loc == node->loc)
7826 		var->var_part[pos].cur_loc = NULL;
7827 	      delete node;
7828 	      *nextp = next;
7829 	      break;
7830 	    }
7831 	  else
7832 	    nextp = &node->next;
7833 	}
7834 
7835       nextp = &var->var_part[pos].loc_chain;
7836     }
7837 
7838   /* Add the location to the beginning.  */
7839   node = new location_chain;
7840   node->loc = loc;
7841   node->init = initialized;
7842   node->set_src = set_src;
7843   node->next = *nextp;
7844   *nextp = node;
7845 
7846   /* If no location was emitted do so.  */
7847   if (var->var_part[pos].cur_loc == NULL)
7848     variable_was_changed (var, set);
7849 
7850   return slot;
7851 }
7852 
7853 /* Set the part of variable's location in the dataflow set SET.  The
7854    variable part is specified by variable's declaration in DV and
7855    offset OFFSET and the part's location by LOC.  IOPT should be
7856    NO_INSERT if the variable is known to be in SET already and the
7857    variable hash table must not be resized, and INSERT otherwise.  */
7858 
7859 static void
set_variable_part(dataflow_set * set,rtx loc,decl_or_value dv,HOST_WIDE_INT offset,enum var_init_status initialized,rtx set_src,enum insert_option iopt)7860 set_variable_part (dataflow_set *set, rtx loc,
7861 		   decl_or_value dv, HOST_WIDE_INT offset,
7862 		   enum var_init_status initialized, rtx set_src,
7863 		   enum insert_option iopt)
7864 {
7865   variable **slot;
7866 
7867   if (iopt == NO_INSERT)
7868     slot = shared_hash_find_slot_noinsert (set->vars, dv);
7869   else
7870     {
7871       slot = shared_hash_find_slot (set->vars, dv);
7872       if (!slot)
7873 	slot = shared_hash_find_slot_unshare (&set->vars, dv, iopt);
7874     }
7875   set_slot_part (set, loc, slot, dv, offset, initialized, set_src);
7876 }
7877 
7878 /* Remove all recorded register locations for the given variable part
7879    from dataflow set SET, except for those that are identical to loc.
7880    The variable part is specified by variable's declaration or value
7881    DV and offset OFFSET.  */
7882 
7883 static variable **
clobber_slot_part(dataflow_set * set,rtx loc,variable ** slot,HOST_WIDE_INT offset,rtx set_src)7884 clobber_slot_part (dataflow_set *set, rtx loc, variable **slot,
7885 		   HOST_WIDE_INT offset, rtx set_src)
7886 {
7887   variable *var = *slot;
7888   int pos = find_variable_location_part (var, offset, NULL);
7889 
7890   if (pos >= 0)
7891     {
7892       location_chain *node, *next;
7893 
7894       /* Remove the register locations from the dataflow set.  */
7895       next = var->var_part[pos].loc_chain;
7896       for (node = next; node; node = next)
7897 	{
7898 	  next = node->next;
7899 	  if (node->loc != loc
7900 	      && (!flag_var_tracking_uninit
7901 		  || !set_src
7902 		  || MEM_P (set_src)
7903 		  || !rtx_equal_p (set_src, node->set_src)))
7904 	    {
7905 	      if (REG_P (node->loc))
7906 		{
7907 		  attrs *anode, *anext;
7908 		  attrs **anextp;
7909 
7910 		  /* Remove the variable part from the register's
7911 		     list, but preserve any other variable parts
7912 		     that might be regarded as live in that same
7913 		     register.  */
7914 		  anextp = &set->regs[REGNO (node->loc)];
7915 		  for (anode = *anextp; anode; anode = anext)
7916 		    {
7917 		      anext = anode->next;
7918 		      if (dv_as_opaque (anode->dv) == dv_as_opaque (var->dv)
7919 			  && anode->offset == offset)
7920 			{
7921 			  delete anode;
7922 			  *anextp = anext;
7923 			}
7924 		      else
7925 			anextp = &anode->next;
7926 		    }
7927 		}
7928 
7929 	      slot = delete_slot_part (set, node->loc, slot, offset);
7930 	    }
7931 	}
7932     }
7933 
7934   return slot;
7935 }
7936 
7937 /* Remove all recorded register locations for the given variable part
7938    from dataflow set SET, except for those that are identical to loc.
7939    The variable part is specified by variable's declaration or value
7940    DV and offset OFFSET.  */
7941 
7942 static void
clobber_variable_part(dataflow_set * set,rtx loc,decl_or_value dv,HOST_WIDE_INT offset,rtx set_src)7943 clobber_variable_part (dataflow_set *set, rtx loc, decl_or_value dv,
7944 		       HOST_WIDE_INT offset, rtx set_src)
7945 {
7946   variable **slot;
7947 
7948   if (!dv_as_opaque (dv)
7949       || (!dv_is_value_p (dv) && ! DECL_P (dv_as_decl (dv))))
7950     return;
7951 
7952   slot = shared_hash_find_slot_noinsert (set->vars, dv);
7953   if (!slot)
7954     return;
7955 
7956   clobber_slot_part (set, loc, slot, offset, set_src);
7957 }
7958 
7959 /* Delete the part of variable's location from dataflow set SET.  The
7960    variable part is specified by its SET->vars slot SLOT and offset
7961    OFFSET and the part's location by LOC.  */
7962 
7963 static variable **
delete_slot_part(dataflow_set * set,rtx loc,variable ** slot,HOST_WIDE_INT offset)7964 delete_slot_part (dataflow_set *set, rtx loc, variable **slot,
7965 		  HOST_WIDE_INT offset)
7966 {
7967   variable *var = *slot;
7968   int pos = find_variable_location_part (var, offset, NULL);
7969 
7970   if (pos >= 0)
7971     {
7972       location_chain *node, *next;
7973       location_chain **nextp;
7974       bool changed;
7975       rtx cur_loc;
7976 
7977       if (shared_var_p (var, set->vars))
7978 	{
7979 	  /* If the variable contains the location part we have to
7980 	     make a copy of the variable.  */
7981 	  for (node = var->var_part[pos].loc_chain; node;
7982 	       node = node->next)
7983 	    {
7984 	      if ((REG_P (node->loc) && REG_P (loc)
7985 		   && REGNO (node->loc) == REGNO (loc))
7986 		  || rtx_equal_p (node->loc, loc))
7987 		{
7988 		  slot = unshare_variable (set, slot, var,
7989 					   VAR_INIT_STATUS_UNKNOWN);
7990 		  var = *slot;
7991 		  break;
7992 		}
7993 	    }
7994 	}
7995 
7996       if (pos == 0 && var->onepart && VAR_LOC_1PAUX (var))
7997 	cur_loc = VAR_LOC_FROM (var);
7998       else
7999 	cur_loc = var->var_part[pos].cur_loc;
8000 
8001       /* Delete the location part.  */
8002       changed = false;
8003       nextp = &var->var_part[pos].loc_chain;
8004       for (node = *nextp; node; node = next)
8005 	{
8006 	  next = node->next;
8007 	  if ((REG_P (node->loc) && REG_P (loc)
8008 	       && REGNO (node->loc) == REGNO (loc))
8009 	      || rtx_equal_p (node->loc, loc))
8010 	    {
8011 	      /* If we have deleted the location which was last emitted
8012 		 we have to emit new location so add the variable to set
8013 		 of changed variables.  */
8014 	      if (cur_loc == node->loc)
8015 		{
8016 		  changed = true;
8017 		  var->var_part[pos].cur_loc = NULL;
8018 		  if (pos == 0 && var->onepart && VAR_LOC_1PAUX (var))
8019 		    VAR_LOC_FROM (var) = NULL;
8020 		}
8021 	      delete node;
8022 	      *nextp = next;
8023 	      break;
8024 	    }
8025 	  else
8026 	    nextp = &node->next;
8027 	}
8028 
8029       if (var->var_part[pos].loc_chain == NULL)
8030 	{
8031 	  changed = true;
8032 	  var->n_var_parts--;
8033 	  while (pos < var->n_var_parts)
8034 	    {
8035 	      var->var_part[pos] = var->var_part[pos + 1];
8036 	      pos++;
8037 	    }
8038 	}
8039       if (changed)
8040 	variable_was_changed (var, set);
8041     }
8042 
8043   return slot;
8044 }
8045 
8046 /* Delete the part of variable's location from dataflow set SET.  The
8047    variable part is specified by variable's declaration or value DV
8048    and offset OFFSET and the part's location by LOC.  */
8049 
8050 static void
delete_variable_part(dataflow_set * set,rtx loc,decl_or_value dv,HOST_WIDE_INT offset)8051 delete_variable_part (dataflow_set *set, rtx loc, decl_or_value dv,
8052 		      HOST_WIDE_INT offset)
8053 {
8054   variable **slot = shared_hash_find_slot_noinsert (set->vars, dv);
8055   if (!slot)
8056     return;
8057 
8058   delete_slot_part (set, loc, slot, offset);
8059 }
8060 
8061 
8062 /* Structure for passing some other parameters to function
8063    vt_expand_loc_callback.  */
8064 struct expand_loc_callback_data
8065 {
8066   /* The variables and values active at this point.  */
8067   variable_table_type *vars;
8068 
8069   /* Stack of values and debug_exprs under expansion, and their
8070      children.  */
8071   auto_vec<rtx, 4> expanding;
8072 
8073   /* Stack of values and debug_exprs whose expansion hit recursion
8074      cycles.  They will have VALUE_RECURSED_INTO marked when added to
8075      this list.  This flag will be cleared if any of its dependencies
8076      resolves to a valid location.  So, if the flag remains set at the
8077      end of the search, we know no valid location for this one can
8078      possibly exist.  */
8079   auto_vec<rtx, 4> pending;
8080 
8081   /* The maximum depth among the sub-expressions under expansion.
8082      Zero indicates no expansion so far.  */
8083   expand_depth depth;
8084 };
8085 
8086 /* Allocate the one-part auxiliary data structure for VAR, with enough
8087    room for COUNT dependencies.  */
8088 
8089 static void
loc_exp_dep_alloc(variable * var,int count)8090 loc_exp_dep_alloc (variable *var, int count)
8091 {
8092   size_t allocsize;
8093 
8094   gcc_checking_assert (var->onepart);
8095 
8096   /* We can be called with COUNT == 0 to allocate the data structure
8097      without any dependencies, e.g. for the backlinks only.  However,
8098      if we are specifying a COUNT, then the dependency list must have
8099      been emptied before.  It would be possible to adjust pointers or
8100      force it empty here, but this is better done at an earlier point
8101      in the algorithm, so we instead leave an assertion to catch
8102      errors.  */
8103   gcc_checking_assert (!count
8104 		       || VAR_LOC_DEP_VEC (var) == NULL
8105 		       || VAR_LOC_DEP_VEC (var)->is_empty ());
8106 
8107   if (VAR_LOC_1PAUX (var) && VAR_LOC_DEP_VEC (var)->space (count))
8108     return;
8109 
8110   allocsize = offsetof (struct onepart_aux, deps)
8111 	      + vec<loc_exp_dep, va_heap, vl_embed>::embedded_size (count);
8112 
8113   if (VAR_LOC_1PAUX (var))
8114     {
8115       VAR_LOC_1PAUX (var) = XRESIZEVAR (struct onepart_aux,
8116 					VAR_LOC_1PAUX (var), allocsize);
8117       /* If the reallocation moves the onepaux structure, the
8118 	 back-pointer to BACKLINKS in the first list member will still
8119 	 point to its old location.  Adjust it.  */
8120       if (VAR_LOC_DEP_LST (var))
8121 	VAR_LOC_DEP_LST (var)->pprev = VAR_LOC_DEP_LSTP (var);
8122     }
8123   else
8124     {
8125       VAR_LOC_1PAUX (var) = XNEWVAR (struct onepart_aux, allocsize);
8126       *VAR_LOC_DEP_LSTP (var) = NULL;
8127       VAR_LOC_FROM (var) = NULL;
8128       VAR_LOC_DEPTH (var).complexity = 0;
8129       VAR_LOC_DEPTH (var).entryvals = 0;
8130     }
8131   VAR_LOC_DEP_VEC (var)->embedded_init (count);
8132 }
8133 
8134 /* Remove all entries from the vector of active dependencies of VAR,
8135    removing them from the back-links lists too.  */
8136 
8137 static void
loc_exp_dep_clear(variable * var)8138 loc_exp_dep_clear (variable *var)
8139 {
8140   while (VAR_LOC_DEP_VEC (var) && !VAR_LOC_DEP_VEC (var)->is_empty ())
8141     {
8142       loc_exp_dep *led = &VAR_LOC_DEP_VEC (var)->last ();
8143       if (led->next)
8144 	led->next->pprev = led->pprev;
8145       if (led->pprev)
8146 	*led->pprev = led->next;
8147       VAR_LOC_DEP_VEC (var)->pop ();
8148     }
8149 }
8150 
8151 /* Insert an active dependency from VAR on X to the vector of
8152    dependencies, and add the corresponding back-link to X's list of
8153    back-links in VARS.  */
8154 
8155 static void
loc_exp_insert_dep(variable * var,rtx x,variable_table_type * vars)8156 loc_exp_insert_dep (variable *var, rtx x, variable_table_type *vars)
8157 {
8158   decl_or_value dv;
8159   variable *xvar;
8160   loc_exp_dep *led;
8161 
8162   dv = dv_from_rtx (x);
8163 
8164   /* ??? Build a vector of variables parallel to EXPANDING, to avoid
8165      an additional look up?  */
8166   xvar = vars->find_with_hash (dv, dv_htab_hash (dv));
8167 
8168   if (!xvar)
8169     {
8170       xvar = variable_from_dropped (dv, NO_INSERT);
8171       gcc_checking_assert (xvar);
8172     }
8173 
8174   /* No point in adding the same backlink more than once.  This may
8175      arise if say the same value appears in two complex expressions in
8176      the same loc_list, or even more than once in a single
8177      expression.  */
8178   if (VAR_LOC_DEP_LST (xvar) && VAR_LOC_DEP_LST (xvar)->dv == var->dv)
8179     return;
8180 
8181   if (var->onepart == NOT_ONEPART)
8182     led = new loc_exp_dep;
8183   else
8184     {
8185       loc_exp_dep empty;
8186       memset (&empty, 0, sizeof (empty));
8187       VAR_LOC_DEP_VEC (var)->quick_push (empty);
8188       led = &VAR_LOC_DEP_VEC (var)->last ();
8189     }
8190   led->dv = var->dv;
8191   led->value = x;
8192 
8193   loc_exp_dep_alloc (xvar, 0);
8194   led->pprev = VAR_LOC_DEP_LSTP (xvar);
8195   led->next = *led->pprev;
8196   if (led->next)
8197     led->next->pprev = &led->next;
8198   *led->pprev = led;
8199 }
8200 
8201 /* Create active dependencies of VAR on COUNT values starting at
8202    VALUE, and corresponding back-links to the entries in VARS.  Return
8203    true if we found any pending-recursion results.  */
8204 
8205 static bool
loc_exp_dep_set(variable * var,rtx result,rtx * value,int count,variable_table_type * vars)8206 loc_exp_dep_set (variable *var, rtx result, rtx *value, int count,
8207 		 variable_table_type *vars)
8208 {
8209   bool pending_recursion = false;
8210 
8211   gcc_checking_assert (VAR_LOC_DEP_VEC (var) == NULL
8212 		       || VAR_LOC_DEP_VEC (var)->is_empty ());
8213 
8214   /* Set up all dependencies from last_child (as set up at the end of
8215      the loop above) to the end.  */
8216   loc_exp_dep_alloc (var, count);
8217 
8218   while (count--)
8219     {
8220       rtx x = *value++;
8221 
8222       if (!pending_recursion)
8223 	pending_recursion = !result && VALUE_RECURSED_INTO (x);
8224 
8225       loc_exp_insert_dep (var, x, vars);
8226     }
8227 
8228   return pending_recursion;
8229 }
8230 
8231 /* Notify the back-links of IVAR that are pending recursion that we
8232    have found a non-NIL value for it, so they are cleared for another
8233    attempt to compute a current location.  */
8234 
8235 static void
notify_dependents_of_resolved_value(variable * ivar,variable_table_type * vars)8236 notify_dependents_of_resolved_value (variable *ivar, variable_table_type *vars)
8237 {
8238   loc_exp_dep *led, *next;
8239 
8240   for (led = VAR_LOC_DEP_LST (ivar); led; led = next)
8241     {
8242       decl_or_value dv = led->dv;
8243       variable *var;
8244 
8245       next = led->next;
8246 
8247       if (dv_is_value_p (dv))
8248 	{
8249 	  rtx value = dv_as_value (dv);
8250 
8251 	  /* If we have already resolved it, leave it alone.  */
8252 	  if (!VALUE_RECURSED_INTO (value))
8253 	    continue;
8254 
8255 	  /* Check that VALUE_RECURSED_INTO, true from the test above,
8256 	     implies NO_LOC_P.  */
8257 	  gcc_checking_assert (NO_LOC_P (value));
8258 
8259 	  /* We won't notify variables that are being expanded,
8260 	     because their dependency list is cleared before
8261 	     recursing.  */
8262 	  NO_LOC_P (value) = false;
8263 	  VALUE_RECURSED_INTO (value) = false;
8264 
8265 	  gcc_checking_assert (dv_changed_p (dv));
8266 	}
8267       else
8268 	{
8269 	  gcc_checking_assert (dv_onepart_p (dv) != NOT_ONEPART);
8270 	  if (!dv_changed_p (dv))
8271 	    continue;
8272       }
8273 
8274       var = vars->find_with_hash (dv, dv_htab_hash (dv));
8275 
8276       if (!var)
8277 	var = variable_from_dropped (dv, NO_INSERT);
8278 
8279       if (var)
8280 	notify_dependents_of_resolved_value (var, vars);
8281 
8282       if (next)
8283 	next->pprev = led->pprev;
8284       if (led->pprev)
8285 	*led->pprev = next;
8286       led->next = NULL;
8287       led->pprev = NULL;
8288     }
8289 }
8290 
8291 static rtx vt_expand_loc_callback (rtx x, bitmap regs,
8292 				   int max_depth, void *data);
8293 
8294 /* Return the combined depth, when one sub-expression evaluated to
8295    BEST_DEPTH and the previous known depth was SAVED_DEPTH.  */
8296 
8297 static inline expand_depth
update_depth(expand_depth saved_depth,expand_depth best_depth)8298 update_depth (expand_depth saved_depth, expand_depth best_depth)
8299 {
8300   /* If we didn't find anything, stick with what we had.  */
8301   if (!best_depth.complexity)
8302     return saved_depth;
8303 
8304   /* If we found hadn't found anything, use the depth of the current
8305      expression.  Do NOT add one extra level, we want to compute the
8306      maximum depth among sub-expressions.  We'll increment it later,
8307      if appropriate.  */
8308   if (!saved_depth.complexity)
8309     return best_depth;
8310 
8311   /* Combine the entryval count so that regardless of which one we
8312      return, the entryval count is accurate.  */
8313   best_depth.entryvals = saved_depth.entryvals
8314     = best_depth.entryvals + saved_depth.entryvals;
8315 
8316   if (saved_depth.complexity < best_depth.complexity)
8317     return best_depth;
8318   else
8319     return saved_depth;
8320 }
8321 
8322 /* Expand VAR to a location RTX, updating its cur_loc.  Use REGS and
8323    DATA for cselib expand callback.  If PENDRECP is given, indicate in
8324    it whether any sub-expression couldn't be fully evaluated because
8325    it is pending recursion resolution.  */
8326 
8327 static inline rtx
vt_expand_var_loc_chain(variable * var,bitmap regs,void * data,bool * pendrecp)8328 vt_expand_var_loc_chain (variable *var, bitmap regs, void *data,
8329 			 bool *pendrecp)
8330 {
8331   struct expand_loc_callback_data *elcd
8332     = (struct expand_loc_callback_data *) data;
8333   location_chain *loc, *next;
8334   rtx result = NULL;
8335   int first_child, result_first_child, last_child;
8336   bool pending_recursion;
8337   rtx loc_from = NULL;
8338   struct elt_loc_list *cloc = NULL;
8339   expand_depth depth = { 0, 0 }, saved_depth = elcd->depth;
8340   int wanted_entryvals, found_entryvals = 0;
8341 
8342   /* Clear all backlinks pointing at this, so that we're not notified
8343      while we're active.  */
8344   loc_exp_dep_clear (var);
8345 
8346  retry:
8347   if (var->onepart == ONEPART_VALUE)
8348     {
8349       cselib_val *val = CSELIB_VAL_PTR (dv_as_value (var->dv));
8350 
8351       gcc_checking_assert (cselib_preserved_value_p (val));
8352 
8353       cloc = val->locs;
8354     }
8355 
8356   first_child = result_first_child = last_child
8357     = elcd->expanding.length ();
8358 
8359   wanted_entryvals = found_entryvals;
8360 
8361   /* Attempt to expand each available location in turn.  */
8362   for (next = loc = var->n_var_parts ? var->var_part[0].loc_chain : NULL;
8363        loc || cloc; loc = next)
8364     {
8365       result_first_child = last_child;
8366 
8367       if (!loc)
8368 	{
8369 	  loc_from = cloc->loc;
8370 	  next = loc;
8371 	  cloc = cloc->next;
8372 	  if (unsuitable_loc (loc_from))
8373 	    continue;
8374 	}
8375       else
8376 	{
8377 	  loc_from = loc->loc;
8378 	  next = loc->next;
8379 	}
8380 
8381       gcc_checking_assert (!unsuitable_loc (loc_from));
8382 
8383       elcd->depth.complexity = elcd->depth.entryvals = 0;
8384       result = cselib_expand_value_rtx_cb (loc_from, regs, EXPR_DEPTH,
8385 					   vt_expand_loc_callback, data);
8386       last_child = elcd->expanding.length ();
8387 
8388       if (result)
8389 	{
8390 	  depth = elcd->depth;
8391 
8392 	  gcc_checking_assert (depth.complexity
8393 			       || result_first_child == last_child);
8394 
8395 	  if (last_child - result_first_child != 1)
8396 	    {
8397 	      if (!depth.complexity && GET_CODE (result) == ENTRY_VALUE)
8398 		depth.entryvals++;
8399 	      depth.complexity++;
8400 	    }
8401 
8402 	  if (depth.complexity <= EXPR_USE_DEPTH)
8403 	    {
8404 	      if (depth.entryvals <= wanted_entryvals)
8405 		break;
8406 	      else if (!found_entryvals || depth.entryvals < found_entryvals)
8407 		found_entryvals = depth.entryvals;
8408 	    }
8409 
8410 	  result = NULL;
8411 	}
8412 
8413       /* Set it up in case we leave the loop.  */
8414       depth.complexity = depth.entryvals = 0;
8415       loc_from = NULL;
8416       result_first_child = first_child;
8417     }
8418 
8419   if (!loc_from && wanted_entryvals < found_entryvals)
8420     {
8421       /* We found entries with ENTRY_VALUEs and skipped them.  Since
8422 	 we could not find any expansions without ENTRY_VALUEs, but we
8423 	 found at least one with them, go back and get an entry with
8424 	 the minimum number ENTRY_VALUE count that we found.  We could
8425 	 avoid looping, but since each sub-loc is already resolved,
8426 	 the re-expansion should be trivial.  ??? Should we record all
8427 	 attempted locs as dependencies, so that we retry the
8428 	 expansion should any of them change, in the hope it can give
8429 	 us a new entry without an ENTRY_VALUE?  */
8430       elcd->expanding.truncate (first_child);
8431       goto retry;
8432     }
8433 
8434   /* Register all encountered dependencies as active.  */
8435   pending_recursion = loc_exp_dep_set
8436     (var, result, elcd->expanding.address () + result_first_child,
8437      last_child - result_first_child, elcd->vars);
8438 
8439   elcd->expanding.truncate (first_child);
8440 
8441   /* Record where the expansion came from.  */
8442   gcc_checking_assert (!result || !pending_recursion);
8443   VAR_LOC_FROM (var) = loc_from;
8444   VAR_LOC_DEPTH (var) = depth;
8445 
8446   gcc_checking_assert (!depth.complexity == !result);
8447 
8448   elcd->depth = update_depth (saved_depth, depth);
8449 
8450   /* Indicate whether any of the dependencies are pending recursion
8451      resolution.  */
8452   if (pendrecp)
8453     *pendrecp = pending_recursion;
8454 
8455   if (!pendrecp || !pending_recursion)
8456     var->var_part[0].cur_loc = result;
8457 
8458   return result;
8459 }
8460 
8461 /* Callback for cselib_expand_value, that looks for expressions
8462    holding the value in the var-tracking hash tables.  Return X for
8463    standard processing, anything else is to be used as-is.  */
8464 
8465 static rtx
vt_expand_loc_callback(rtx x,bitmap regs,int max_depth ATTRIBUTE_UNUSED,void * data)8466 vt_expand_loc_callback (rtx x, bitmap regs,
8467 			int max_depth ATTRIBUTE_UNUSED,
8468 			void *data)
8469 {
8470   struct expand_loc_callback_data *elcd
8471     = (struct expand_loc_callback_data *) data;
8472   decl_or_value dv;
8473   variable *var;
8474   rtx result, subreg;
8475   bool pending_recursion = false;
8476   bool from_empty = false;
8477 
8478   switch (GET_CODE (x))
8479     {
8480     case SUBREG:
8481       subreg = cselib_expand_value_rtx_cb (SUBREG_REG (x), regs,
8482 					   EXPR_DEPTH,
8483 					   vt_expand_loc_callback, data);
8484 
8485       if (!subreg)
8486 	return NULL;
8487 
8488       result = simplify_gen_subreg (GET_MODE (x), subreg,
8489 				    GET_MODE (SUBREG_REG (x)),
8490 				    SUBREG_BYTE (x));
8491 
8492       /* Invalid SUBREGs are ok in debug info.  ??? We could try
8493 	 alternate expansions for the VALUE as well.  */
8494       if (!result)
8495 	result = gen_rtx_raw_SUBREG (GET_MODE (x), subreg, SUBREG_BYTE (x));
8496 
8497       return result;
8498 
8499     case DEBUG_EXPR:
8500     case VALUE:
8501       dv = dv_from_rtx (x);
8502       break;
8503 
8504     default:
8505       return x;
8506     }
8507 
8508   elcd->expanding.safe_push (x);
8509 
8510   /* Check that VALUE_RECURSED_INTO implies NO_LOC_P.  */
8511   gcc_checking_assert (!VALUE_RECURSED_INTO (x) || NO_LOC_P (x));
8512 
8513   if (NO_LOC_P (x))
8514     {
8515       gcc_checking_assert (VALUE_RECURSED_INTO (x) || !dv_changed_p (dv));
8516       return NULL;
8517     }
8518 
8519   var = elcd->vars->find_with_hash (dv, dv_htab_hash (dv));
8520 
8521   if (!var)
8522     {
8523       from_empty = true;
8524       var = variable_from_dropped (dv, INSERT);
8525     }
8526 
8527   gcc_checking_assert (var);
8528 
8529   if (!dv_changed_p (dv))
8530     {
8531       gcc_checking_assert (!NO_LOC_P (x));
8532       gcc_checking_assert (var->var_part[0].cur_loc);
8533       gcc_checking_assert (VAR_LOC_1PAUX (var));
8534       gcc_checking_assert (VAR_LOC_1PAUX (var)->depth.complexity);
8535 
8536       elcd->depth = update_depth (elcd->depth, VAR_LOC_1PAUX (var)->depth);
8537 
8538       return var->var_part[0].cur_loc;
8539     }
8540 
8541   VALUE_RECURSED_INTO (x) = true;
8542   /* This is tentative, but it makes some tests simpler.  */
8543   NO_LOC_P (x) = true;
8544 
8545   gcc_checking_assert (var->n_var_parts == 1 || from_empty);
8546 
8547   result = vt_expand_var_loc_chain (var, regs, data, &pending_recursion);
8548 
8549   if (pending_recursion)
8550     {
8551       gcc_checking_assert (!result);
8552       elcd->pending.safe_push (x);
8553     }
8554   else
8555     {
8556       NO_LOC_P (x) = !result;
8557       VALUE_RECURSED_INTO (x) = false;
8558       set_dv_changed (dv, false);
8559 
8560       if (result)
8561 	notify_dependents_of_resolved_value (var, elcd->vars);
8562     }
8563 
8564   return result;
8565 }
8566 
8567 /* While expanding variables, we may encounter recursion cycles
8568    because of mutual (possibly indirect) dependencies between two
8569    particular variables (or values), say A and B.  If we're trying to
8570    expand A when we get to B, which in turn attempts to expand A, if
8571    we can't find any other expansion for B, we'll add B to this
8572    pending-recursion stack, and tentatively return NULL for its
8573    location.  This tentative value will be used for any other
8574    occurrences of B, unless A gets some other location, in which case
8575    it will notify B that it is worth another try at computing a
8576    location for it, and it will use the location computed for A then.
8577    At the end of the expansion, the tentative NULL locations become
8578    final for all members of PENDING that didn't get a notification.
8579    This function performs this finalization of NULL locations.  */
8580 
8581 static void
resolve_expansions_pending_recursion(vec<rtx,va_heap> * pending)8582 resolve_expansions_pending_recursion (vec<rtx, va_heap> *pending)
8583 {
8584   while (!pending->is_empty ())
8585     {
8586       rtx x = pending->pop ();
8587       decl_or_value dv;
8588 
8589       if (!VALUE_RECURSED_INTO (x))
8590 	continue;
8591 
8592       gcc_checking_assert (NO_LOC_P (x));
8593       VALUE_RECURSED_INTO (x) = false;
8594       dv = dv_from_rtx (x);
8595       gcc_checking_assert (dv_changed_p (dv));
8596       set_dv_changed (dv, false);
8597     }
8598 }
8599 
8600 /* Initialize expand_loc_callback_data D with variable hash table V.
8601    It must be a macro because of alloca (vec stack).  */
8602 #define INIT_ELCD(d, v)						\
8603   do								\
8604     {								\
8605       (d).vars = (v);						\
8606       (d).depth.complexity = (d).depth.entryvals = 0;		\
8607     }								\
8608   while (0)
8609 /* Finalize expand_loc_callback_data D, resolved to location L.  */
8610 #define FINI_ELCD(d, l)						\
8611   do								\
8612     {								\
8613       resolve_expansions_pending_recursion (&(d).pending);	\
8614       (d).pending.release ();					\
8615       (d).expanding.release ();					\
8616 								\
8617       if ((l) && MEM_P (l))					\
8618 	(l) = targetm.delegitimize_address (l);			\
8619     }								\
8620   while (0)
8621 
8622 /* Expand VALUEs and DEBUG_EXPRs in LOC to a location, using the
8623    equivalences in VARS, updating their CUR_LOCs in the process.  */
8624 
8625 static rtx
vt_expand_loc(rtx loc,variable_table_type * vars)8626 vt_expand_loc (rtx loc, variable_table_type *vars)
8627 {
8628   struct expand_loc_callback_data data;
8629   rtx result;
8630 
8631   if (!MAY_HAVE_DEBUG_BIND_INSNS)
8632     return loc;
8633 
8634   INIT_ELCD (data, vars);
8635 
8636   result = cselib_expand_value_rtx_cb (loc, scratch_regs, EXPR_DEPTH,
8637 				       vt_expand_loc_callback, &data);
8638 
8639   FINI_ELCD (data, result);
8640 
8641   return result;
8642 }
8643 
8644 /* Expand the one-part VARiable to a location, using the equivalences
8645    in VARS, updating their CUR_LOCs in the process.  */
8646 
8647 static rtx
vt_expand_1pvar(variable * var,variable_table_type * vars)8648 vt_expand_1pvar (variable *var, variable_table_type *vars)
8649 {
8650   struct expand_loc_callback_data data;
8651   rtx loc;
8652 
8653   gcc_checking_assert (var->onepart && var->n_var_parts == 1);
8654 
8655   if (!dv_changed_p (var->dv))
8656     return var->var_part[0].cur_loc;
8657 
8658   INIT_ELCD (data, vars);
8659 
8660   loc = vt_expand_var_loc_chain (var, scratch_regs, &data, NULL);
8661 
8662   gcc_checking_assert (data.expanding.is_empty ());
8663 
8664   FINI_ELCD (data, loc);
8665 
8666   return loc;
8667 }
8668 
8669 /* Emit the NOTE_INSN_VAR_LOCATION for variable *VARP.  DATA contains
8670    additional parameters: WHERE specifies whether the note shall be emitted
8671    before or after instruction INSN.  */
8672 
8673 int
emit_note_insn_var_location(variable ** varp,emit_note_data * data)8674 emit_note_insn_var_location (variable **varp, emit_note_data *data)
8675 {
8676   variable *var = *varp;
8677   rtx_insn *insn = data->insn;
8678   enum emit_note_where where = data->where;
8679   variable_table_type *vars = data->vars;
8680   rtx_note *note;
8681   rtx note_vl;
8682   int i, j, n_var_parts;
8683   bool complete;
8684   enum var_init_status initialized = VAR_INIT_STATUS_UNINITIALIZED;
8685   HOST_WIDE_INT last_limit;
8686   HOST_WIDE_INT offsets[MAX_VAR_PARTS];
8687   rtx loc[MAX_VAR_PARTS];
8688   tree decl;
8689   location_chain *lc;
8690 
8691   gcc_checking_assert (var->onepart == NOT_ONEPART
8692 		       || var->onepart == ONEPART_VDECL);
8693 
8694   decl = dv_as_decl (var->dv);
8695 
8696   complete = true;
8697   last_limit = 0;
8698   n_var_parts = 0;
8699   if (!var->onepart)
8700     for (i = 0; i < var->n_var_parts; i++)
8701       if (var->var_part[i].cur_loc == NULL && var->var_part[i].loc_chain)
8702 	var->var_part[i].cur_loc = var->var_part[i].loc_chain->loc;
8703   for (i = 0; i < var->n_var_parts; i++)
8704     {
8705       machine_mode mode, wider_mode;
8706       rtx loc2;
8707       HOST_WIDE_INT offset, size, wider_size;
8708 
8709       if (i == 0 && var->onepart)
8710 	{
8711 	  gcc_checking_assert (var->n_var_parts == 1);
8712 	  offset = 0;
8713 	  initialized = VAR_INIT_STATUS_INITIALIZED;
8714 	  loc2 = vt_expand_1pvar (var, vars);
8715 	}
8716       else
8717 	{
8718 	  if (last_limit < VAR_PART_OFFSET (var, i))
8719 	    {
8720 	      complete = false;
8721 	      break;
8722 	    }
8723 	  else if (last_limit > VAR_PART_OFFSET (var, i))
8724 	    continue;
8725 	  offset = VAR_PART_OFFSET (var, i);
8726 	  loc2 = var->var_part[i].cur_loc;
8727 	  if (loc2 && GET_CODE (loc2) == MEM
8728 	      && GET_CODE (XEXP (loc2, 0)) == VALUE)
8729 	    {
8730 	      rtx depval = XEXP (loc2, 0);
8731 
8732 	      loc2 = vt_expand_loc (loc2, vars);
8733 
8734 	      if (loc2)
8735 		loc_exp_insert_dep (var, depval, vars);
8736 	    }
8737 	  if (!loc2)
8738 	    {
8739 	      complete = false;
8740 	      continue;
8741 	    }
8742 	  gcc_checking_assert (GET_CODE (loc2) != VALUE);
8743 	  for (lc = var->var_part[i].loc_chain; lc; lc = lc->next)
8744 	    if (var->var_part[i].cur_loc == lc->loc)
8745 	      {
8746 		initialized = lc->init;
8747 		break;
8748 	      }
8749 	  gcc_assert (lc);
8750 	}
8751 
8752       offsets[n_var_parts] = offset;
8753       if (!loc2)
8754 	{
8755 	  complete = false;
8756 	  continue;
8757 	}
8758       loc[n_var_parts] = loc2;
8759       mode = GET_MODE (var->var_part[i].cur_loc);
8760       if (mode == VOIDmode && var->onepart)
8761 	mode = DECL_MODE (decl);
8762       /* We ony track subparts of constant-sized objects, since at present
8763 	 there's no representation for polynomial pieces.  */
8764       if (!GET_MODE_SIZE (mode).is_constant (&size))
8765 	{
8766 	  complete = false;
8767 	  continue;
8768 	}
8769       last_limit = offsets[n_var_parts] + size;
8770 
8771       /* Attempt to merge adjacent registers or memory.  */
8772       for (j = i + 1; j < var->n_var_parts; j++)
8773 	if (last_limit <= VAR_PART_OFFSET (var, j))
8774 	  break;
8775       if (j < var->n_var_parts
8776 	  && GET_MODE_WIDER_MODE (mode).exists (&wider_mode)
8777 	  && GET_MODE_SIZE (wider_mode).is_constant (&wider_size)
8778 	  && var->var_part[j].cur_loc
8779 	  && mode == GET_MODE (var->var_part[j].cur_loc)
8780 	  && (REG_P (loc[n_var_parts]) || MEM_P (loc[n_var_parts]))
8781 	  && last_limit == (var->onepart ? 0 : VAR_PART_OFFSET (var, j))
8782 	  && (loc2 = vt_expand_loc (var->var_part[j].cur_loc, vars))
8783 	  && GET_CODE (loc[n_var_parts]) == GET_CODE (loc2))
8784 	{
8785 	  rtx new_loc = NULL;
8786 	  poly_int64 offset2;
8787 
8788 	  if (REG_P (loc[n_var_parts])
8789 	      && hard_regno_nregs (REGNO (loc[n_var_parts]), mode) * 2
8790 		 == hard_regno_nregs (REGNO (loc[n_var_parts]), wider_mode)
8791 	      && end_hard_regno (mode, REGNO (loc[n_var_parts]))
8792 		 == REGNO (loc2))
8793 	    {
8794 	      if (! WORDS_BIG_ENDIAN && ! BYTES_BIG_ENDIAN)
8795 		new_loc = simplify_subreg (wider_mode, loc[n_var_parts],
8796 					   mode, 0);
8797 	      else if (WORDS_BIG_ENDIAN && BYTES_BIG_ENDIAN)
8798 		new_loc = simplify_subreg (wider_mode, loc2, mode, 0);
8799 	      if (new_loc)
8800 		{
8801 		  if (!REG_P (new_loc)
8802 		      || REGNO (new_loc) != REGNO (loc[n_var_parts]))
8803 		    new_loc = NULL;
8804 		  else
8805 		    REG_ATTRS (new_loc) = REG_ATTRS (loc[n_var_parts]);
8806 		}
8807 	    }
8808 	  else if (MEM_P (loc[n_var_parts])
8809 		   && GET_CODE (XEXP (loc2, 0)) == PLUS
8810 		   && REG_P (XEXP (XEXP (loc2, 0), 0))
8811 		   && poly_int_rtx_p (XEXP (XEXP (loc2, 0), 1), &offset2))
8812 	    {
8813 	      poly_int64 end1 = size;
8814 	      rtx base1 = strip_offset_and_add (XEXP (loc[n_var_parts], 0),
8815 						&end1);
8816 	      if (rtx_equal_p (base1, XEXP (XEXP (loc2, 0), 0))
8817 		  && known_eq (end1, offset2))
8818 		new_loc = adjust_address_nv (loc[n_var_parts],
8819 					     wider_mode, 0);
8820 	    }
8821 
8822 	  if (new_loc)
8823 	    {
8824 	      loc[n_var_parts] = new_loc;
8825 	      mode = wider_mode;
8826 	      last_limit = offsets[n_var_parts] + wider_size;
8827 	      i = j;
8828 	    }
8829 	}
8830       ++n_var_parts;
8831     }
8832   poly_uint64 type_size_unit
8833     = tree_to_poly_uint64 (TYPE_SIZE_UNIT (TREE_TYPE (decl)));
8834   if (maybe_lt (poly_uint64 (last_limit), type_size_unit))
8835     complete = false;
8836 
8837   if (! flag_var_tracking_uninit)
8838     initialized = VAR_INIT_STATUS_INITIALIZED;
8839 
8840   note_vl = NULL_RTX;
8841   if (!complete)
8842     note_vl = gen_rtx_VAR_LOCATION (VOIDmode, decl, NULL_RTX, initialized);
8843   else if (n_var_parts == 1)
8844     {
8845       rtx expr_list;
8846 
8847       if (offsets[0] || GET_CODE (loc[0]) == PARALLEL)
8848 	expr_list = gen_rtx_EXPR_LIST (VOIDmode, loc[0], GEN_INT (offsets[0]));
8849       else
8850 	expr_list = loc[0];
8851 
8852       note_vl = gen_rtx_VAR_LOCATION (VOIDmode, decl, expr_list, initialized);
8853     }
8854   else if (n_var_parts)
8855     {
8856       rtx parallel;
8857 
8858       for (i = 0; i < n_var_parts; i++)
8859 	loc[i]
8860 	  = gen_rtx_EXPR_LIST (VOIDmode, loc[i], GEN_INT (offsets[i]));
8861 
8862       parallel = gen_rtx_PARALLEL (VOIDmode,
8863 				   gen_rtvec_v (n_var_parts, loc));
8864       note_vl = gen_rtx_VAR_LOCATION (VOIDmode, decl,
8865 				      parallel, initialized);
8866     }
8867 
8868   if (where != EMIT_NOTE_BEFORE_INSN)
8869     {
8870       note = emit_note_after (NOTE_INSN_VAR_LOCATION, insn);
8871       if (where == EMIT_NOTE_AFTER_CALL_INSN)
8872 	NOTE_DURING_CALL_P (note) = true;
8873     }
8874   else
8875     {
8876       /* Make sure that the call related notes come first.  */
8877       while (NEXT_INSN (insn)
8878 	     && NOTE_P (insn)
8879 	     && NOTE_KIND (insn) == NOTE_INSN_VAR_LOCATION
8880 	     && NOTE_DURING_CALL_P (insn))
8881 	insn = NEXT_INSN (insn);
8882       if (NOTE_P (insn)
8883 	  && NOTE_KIND (insn) == NOTE_INSN_VAR_LOCATION
8884 	  && NOTE_DURING_CALL_P (insn))
8885 	note = emit_note_after (NOTE_INSN_VAR_LOCATION, insn);
8886       else
8887 	note = emit_note_before (NOTE_INSN_VAR_LOCATION, insn);
8888     }
8889   NOTE_VAR_LOCATION (note) = note_vl;
8890 
8891   set_dv_changed (var->dv, false);
8892   gcc_assert (var->in_changed_variables);
8893   var->in_changed_variables = false;
8894   changed_variables->clear_slot (varp);
8895 
8896   /* Continue traversing the hash table.  */
8897   return 1;
8898 }
8899 
8900 /* While traversing changed_variables, push onto DATA (a stack of RTX
8901    values) entries that aren't user variables.  */
8902 
8903 int
var_track_values_to_stack(variable ** slot,vec<rtx,va_heap> * changed_values_stack)8904 var_track_values_to_stack (variable **slot,
8905 			   vec<rtx, va_heap> *changed_values_stack)
8906 {
8907   variable *var = *slot;
8908 
8909   if (var->onepart == ONEPART_VALUE)
8910     changed_values_stack->safe_push (dv_as_value (var->dv));
8911   else if (var->onepart == ONEPART_DEXPR)
8912     changed_values_stack->safe_push (DECL_RTL_KNOWN_SET (dv_as_decl (var->dv)));
8913 
8914   return 1;
8915 }
8916 
8917 /* Remove from changed_variables the entry whose DV corresponds to
8918    value or debug_expr VAL.  */
8919 static void
remove_value_from_changed_variables(rtx val)8920 remove_value_from_changed_variables (rtx val)
8921 {
8922   decl_or_value dv = dv_from_rtx (val);
8923   variable **slot;
8924   variable *var;
8925 
8926   slot = changed_variables->find_slot_with_hash (dv, dv_htab_hash (dv),
8927 						NO_INSERT);
8928   var = *slot;
8929   var->in_changed_variables = false;
8930   changed_variables->clear_slot (slot);
8931 }
8932 
8933 /* If VAL (a value or debug_expr) has backlinks to variables actively
8934    dependent on it in HTAB or in CHANGED_VARIABLES, mark them as
8935    changed, adding to CHANGED_VALUES_STACK any dependencies that may
8936    have dependencies of their own to notify.  */
8937 
8938 static void
notify_dependents_of_changed_value(rtx val,variable_table_type * htab,vec<rtx,va_heap> * changed_values_stack)8939 notify_dependents_of_changed_value (rtx val, variable_table_type *htab,
8940 				    vec<rtx, va_heap> *changed_values_stack)
8941 {
8942   variable **slot;
8943   variable *var;
8944   loc_exp_dep *led;
8945   decl_or_value dv = dv_from_rtx (val);
8946 
8947   slot = changed_variables->find_slot_with_hash (dv, dv_htab_hash (dv),
8948 						NO_INSERT);
8949   if (!slot)
8950     slot = htab->find_slot_with_hash (dv, dv_htab_hash (dv), NO_INSERT);
8951   if (!slot)
8952     slot = dropped_values->find_slot_with_hash (dv, dv_htab_hash (dv),
8953 						NO_INSERT);
8954   var = *slot;
8955 
8956   while ((led = VAR_LOC_DEP_LST (var)))
8957     {
8958       decl_or_value ldv = led->dv;
8959       variable *ivar;
8960 
8961       /* Deactivate and remove the backlink, as it was “used up”.  It
8962 	 makes no sense to attempt to notify the same entity again:
8963 	 either it will be recomputed and re-register an active
8964 	 dependency, or it will still have the changed mark.  */
8965       if (led->next)
8966 	led->next->pprev = led->pprev;
8967       if (led->pprev)
8968 	*led->pprev = led->next;
8969       led->next = NULL;
8970       led->pprev = NULL;
8971 
8972       if (dv_changed_p (ldv))
8973 	continue;
8974 
8975       switch (dv_onepart_p (ldv))
8976 	{
8977 	case ONEPART_VALUE:
8978 	case ONEPART_DEXPR:
8979 	  set_dv_changed (ldv, true);
8980 	  changed_values_stack->safe_push (dv_as_rtx (ldv));
8981 	  break;
8982 
8983 	case ONEPART_VDECL:
8984 	  ivar = htab->find_with_hash (ldv, dv_htab_hash (ldv));
8985 	  gcc_checking_assert (!VAR_LOC_DEP_LST (ivar));
8986 	  variable_was_changed (ivar, NULL);
8987 	  break;
8988 
8989 	case NOT_ONEPART:
8990 	  delete led;
8991 	  ivar = htab->find_with_hash (ldv, dv_htab_hash (ldv));
8992 	  if (ivar)
8993 	    {
8994 	      int i = ivar->n_var_parts;
8995 	      while (i--)
8996 		{
8997 		  rtx loc = ivar->var_part[i].cur_loc;
8998 
8999 		  if (loc && GET_CODE (loc) == MEM
9000 		      && XEXP (loc, 0) == val)
9001 		    {
9002 		      variable_was_changed (ivar, NULL);
9003 		      break;
9004 		    }
9005 		}
9006 	    }
9007 	  break;
9008 
9009 	default:
9010 	  gcc_unreachable ();
9011 	}
9012     }
9013 }
9014 
9015 /* Take out of changed_variables any entries that don't refer to use
9016    variables.  Back-propagate change notifications from values and
9017    debug_exprs to their active dependencies in HTAB or in
9018    CHANGED_VARIABLES.  */
9019 
9020 static void
process_changed_values(variable_table_type * htab)9021 process_changed_values (variable_table_type *htab)
9022 {
9023   int i, n;
9024   rtx val;
9025   auto_vec<rtx, 20> changed_values_stack;
9026 
9027   /* Move values from changed_variables to changed_values_stack.  */
9028   changed_variables
9029     ->traverse <vec<rtx, va_heap>*, var_track_values_to_stack>
9030       (&changed_values_stack);
9031 
9032   /* Back-propagate change notifications in values while popping
9033      them from the stack.  */
9034   for (n = i = changed_values_stack.length ();
9035        i > 0; i = changed_values_stack.length ())
9036     {
9037       val = changed_values_stack.pop ();
9038       notify_dependents_of_changed_value (val, htab, &changed_values_stack);
9039 
9040       /* This condition will hold when visiting each of the entries
9041 	 originally in changed_variables.  We can't remove them
9042 	 earlier because this could drop the backlinks before we got a
9043 	 chance to use them.  */
9044       if (i == n)
9045 	{
9046 	  remove_value_from_changed_variables (val);
9047 	  n--;
9048 	}
9049     }
9050 }
9051 
9052 /* Emit NOTE_INSN_VAR_LOCATION note for each variable from a chain
9053    CHANGED_VARIABLES and delete this chain.  WHERE specifies whether
9054    the notes shall be emitted before of after instruction INSN.  */
9055 
9056 static void
emit_notes_for_changes(rtx_insn * insn,enum emit_note_where where,shared_hash * vars)9057 emit_notes_for_changes (rtx_insn *insn, enum emit_note_where where,
9058 			shared_hash *vars)
9059 {
9060   emit_note_data data;
9061   variable_table_type *htab = shared_hash_htab (vars);
9062 
9063   if (!changed_variables->elements ())
9064     return;
9065 
9066   if (MAY_HAVE_DEBUG_BIND_INSNS)
9067     process_changed_values (htab);
9068 
9069   data.insn = insn;
9070   data.where = where;
9071   data.vars = htab;
9072 
9073   changed_variables
9074     ->traverse <emit_note_data*, emit_note_insn_var_location> (&data);
9075 }
9076 
9077 /* Add variable *SLOT to the chain CHANGED_VARIABLES if it differs from the
9078    same variable in hash table DATA or is not there at all.  */
9079 
9080 int
emit_notes_for_differences_1(variable ** slot,variable_table_type * new_vars)9081 emit_notes_for_differences_1 (variable **slot, variable_table_type *new_vars)
9082 {
9083   variable *old_var, *new_var;
9084 
9085   old_var = *slot;
9086   new_var = new_vars->find_with_hash (old_var->dv, dv_htab_hash (old_var->dv));
9087 
9088   if (!new_var)
9089     {
9090       /* Variable has disappeared.  */
9091       variable *empty_var = NULL;
9092 
9093       if (old_var->onepart == ONEPART_VALUE
9094 	  || old_var->onepart == ONEPART_DEXPR)
9095 	{
9096 	  empty_var = variable_from_dropped (old_var->dv, NO_INSERT);
9097 	  if (empty_var)
9098 	    {
9099 	      gcc_checking_assert (!empty_var->in_changed_variables);
9100 	      if (!VAR_LOC_1PAUX (old_var))
9101 		{
9102 		  VAR_LOC_1PAUX (old_var) = VAR_LOC_1PAUX (empty_var);
9103 		  VAR_LOC_1PAUX (empty_var) = NULL;
9104 		}
9105 	      else
9106 		gcc_checking_assert (!VAR_LOC_1PAUX (empty_var));
9107 	    }
9108 	}
9109 
9110       if (!empty_var)
9111 	{
9112 	  empty_var = onepart_pool_allocate (old_var->onepart);
9113 	  empty_var->dv = old_var->dv;
9114 	  empty_var->refcount = 0;
9115 	  empty_var->n_var_parts = 0;
9116 	  empty_var->onepart = old_var->onepart;
9117 	  empty_var->in_changed_variables = false;
9118 	}
9119 
9120       if (empty_var->onepart)
9121 	{
9122 	  /* Propagate the auxiliary data to (ultimately)
9123 	     changed_variables.  */
9124 	  empty_var->var_part[0].loc_chain = NULL;
9125 	  empty_var->var_part[0].cur_loc = NULL;
9126 	  VAR_LOC_1PAUX (empty_var) = VAR_LOC_1PAUX (old_var);
9127 	  VAR_LOC_1PAUX (old_var) = NULL;
9128 	}
9129       variable_was_changed (empty_var, NULL);
9130       /* Continue traversing the hash table.  */
9131       return 1;
9132     }
9133   /* Update cur_loc and one-part auxiliary data, before new_var goes
9134      through variable_was_changed.  */
9135   if (old_var != new_var && new_var->onepart)
9136     {
9137       gcc_checking_assert (VAR_LOC_1PAUX (new_var) == NULL);
9138       VAR_LOC_1PAUX (new_var) = VAR_LOC_1PAUX (old_var);
9139       VAR_LOC_1PAUX (old_var) = NULL;
9140       new_var->var_part[0].cur_loc = old_var->var_part[0].cur_loc;
9141     }
9142   if (variable_different_p (old_var, new_var))
9143     variable_was_changed (new_var, NULL);
9144 
9145   /* Continue traversing the hash table.  */
9146   return 1;
9147 }
9148 
9149 /* Add variable *SLOT to the chain CHANGED_VARIABLES if it is not in hash
9150    table DATA.  */
9151 
9152 int
emit_notes_for_differences_2(variable ** slot,variable_table_type * old_vars)9153 emit_notes_for_differences_2 (variable **slot, variable_table_type *old_vars)
9154 {
9155   variable *old_var, *new_var;
9156 
9157   new_var = *slot;
9158   old_var = old_vars->find_with_hash (new_var->dv, dv_htab_hash (new_var->dv));
9159   if (!old_var)
9160     {
9161       int i;
9162       for (i = 0; i < new_var->n_var_parts; i++)
9163 	new_var->var_part[i].cur_loc = NULL;
9164       variable_was_changed (new_var, NULL);
9165     }
9166 
9167   /* Continue traversing the hash table.  */
9168   return 1;
9169 }
9170 
9171 /* Emit notes before INSN for differences between dataflow sets OLD_SET and
9172    NEW_SET.  */
9173 
9174 static void
emit_notes_for_differences(rtx_insn * insn,dataflow_set * old_set,dataflow_set * new_set)9175 emit_notes_for_differences (rtx_insn *insn, dataflow_set *old_set,
9176 			    dataflow_set *new_set)
9177 {
9178   shared_hash_htab (old_set->vars)
9179     ->traverse <variable_table_type *, emit_notes_for_differences_1>
9180       (shared_hash_htab (new_set->vars));
9181   shared_hash_htab (new_set->vars)
9182     ->traverse <variable_table_type *, emit_notes_for_differences_2>
9183       (shared_hash_htab (old_set->vars));
9184   emit_notes_for_changes (insn, EMIT_NOTE_BEFORE_INSN, new_set->vars);
9185 }
9186 
9187 /* Return the next insn after INSN that is not a NOTE_INSN_VAR_LOCATION.  */
9188 
9189 static rtx_insn *
next_non_note_insn_var_location(rtx_insn * insn)9190 next_non_note_insn_var_location (rtx_insn *insn)
9191 {
9192   while (insn)
9193     {
9194       insn = NEXT_INSN (insn);
9195       if (insn == 0
9196 	  || !NOTE_P (insn)
9197 	  || NOTE_KIND (insn) != NOTE_INSN_VAR_LOCATION)
9198 	break;
9199     }
9200 
9201   return insn;
9202 }
9203 
9204 /* Emit the notes for changes of location parts in the basic block BB.  */
9205 
9206 static void
emit_notes_in_bb(basic_block bb,dataflow_set * set)9207 emit_notes_in_bb (basic_block bb, dataflow_set *set)
9208 {
9209   unsigned int i;
9210   micro_operation *mo;
9211 
9212   dataflow_set_clear (set);
9213   dataflow_set_copy (set, &VTI (bb)->in);
9214 
9215   FOR_EACH_VEC_ELT (VTI (bb)->mos, i, mo)
9216     {
9217       rtx_insn *insn = mo->insn;
9218       rtx_insn *next_insn = next_non_note_insn_var_location (insn);
9219 
9220       switch (mo->type)
9221 	{
9222 	  case MO_CALL:
9223 	    dataflow_set_clear_at_call (set, insn);
9224 	    emit_notes_for_changes (insn, EMIT_NOTE_AFTER_CALL_INSN, set->vars);
9225 	    {
9226 	      rtx arguments = mo->u.loc, *p = &arguments;
9227 	      while (*p)
9228 		{
9229 		  XEXP (XEXP (*p, 0), 1)
9230 		    = vt_expand_loc (XEXP (XEXP (*p, 0), 1),
9231 				     shared_hash_htab (set->vars));
9232 		  /* If expansion is successful, keep it in the list.  */
9233 		  if (XEXP (XEXP (*p, 0), 1))
9234 		    {
9235 		      XEXP (XEXP (*p, 0), 1)
9236 			= copy_rtx_if_shared (XEXP (XEXP (*p, 0), 1));
9237 		      p = &XEXP (*p, 1);
9238 		    }
9239 		  /* Otherwise, if the following item is data_value for it,
9240 		     drop it too too.  */
9241 		  else if (XEXP (*p, 1)
9242 			   && REG_P (XEXP (XEXP (*p, 0), 0))
9243 			   && MEM_P (XEXP (XEXP (XEXP (*p, 1), 0), 0))
9244 			   && REG_P (XEXP (XEXP (XEXP (XEXP (*p, 1), 0), 0),
9245 					   0))
9246 			   && REGNO (XEXP (XEXP (*p, 0), 0))
9247 			      == REGNO (XEXP (XEXP (XEXP (XEXP (*p, 1), 0),
9248 						    0), 0)))
9249 		    *p = XEXP (XEXP (*p, 1), 1);
9250 		  /* Just drop this item.  */
9251 		  else
9252 		    *p = XEXP (*p, 1);
9253 		}
9254 	      add_reg_note (insn, REG_CALL_ARG_LOCATION, arguments);
9255 	    }
9256 	    break;
9257 
9258 	  case MO_USE:
9259 	    {
9260 	      rtx loc = mo->u.loc;
9261 
9262 	      if (REG_P (loc))
9263 		var_reg_set (set, loc, VAR_INIT_STATUS_UNINITIALIZED, NULL);
9264 	      else
9265 		var_mem_set (set, loc, VAR_INIT_STATUS_UNINITIALIZED, NULL);
9266 
9267 	      emit_notes_for_changes (insn, EMIT_NOTE_BEFORE_INSN, set->vars);
9268 	    }
9269 	    break;
9270 
9271 	  case MO_VAL_LOC:
9272 	    {
9273 	      rtx loc = mo->u.loc;
9274 	      rtx val, vloc;
9275 	      tree var;
9276 
9277 	      if (GET_CODE (loc) == CONCAT)
9278 		{
9279 		  val = XEXP (loc, 0);
9280 		  vloc = XEXP (loc, 1);
9281 		}
9282 	      else
9283 		{
9284 		  val = NULL_RTX;
9285 		  vloc = loc;
9286 		}
9287 
9288 	      var = PAT_VAR_LOCATION_DECL (vloc);
9289 
9290 	      clobber_variable_part (set, NULL_RTX,
9291 				     dv_from_decl (var), 0, NULL_RTX);
9292 	      if (val)
9293 		{
9294 		  if (VAL_NEEDS_RESOLUTION (loc))
9295 		    val_resolve (set, val, PAT_VAR_LOCATION_LOC (vloc), insn);
9296 		  set_variable_part (set, val, dv_from_decl (var), 0,
9297 				     VAR_INIT_STATUS_INITIALIZED, NULL_RTX,
9298 				     INSERT);
9299 		}
9300 	      else if (!VAR_LOC_UNKNOWN_P (PAT_VAR_LOCATION_LOC (vloc)))
9301 		set_variable_part (set, PAT_VAR_LOCATION_LOC (vloc),
9302 				   dv_from_decl (var), 0,
9303 				   VAR_INIT_STATUS_INITIALIZED, NULL_RTX,
9304 				   INSERT);
9305 
9306 	      emit_notes_for_changes (insn, EMIT_NOTE_AFTER_INSN, set->vars);
9307 	    }
9308 	    break;
9309 
9310 	  case MO_VAL_USE:
9311 	    {
9312 	      rtx loc = mo->u.loc;
9313 	      rtx val, vloc, uloc;
9314 
9315 	      vloc = uloc = XEXP (loc, 1);
9316 	      val = XEXP (loc, 0);
9317 
9318 	      if (GET_CODE (val) == CONCAT)
9319 		{
9320 		  uloc = XEXP (val, 1);
9321 		  val = XEXP (val, 0);
9322 		}
9323 
9324 	      if (VAL_NEEDS_RESOLUTION (loc))
9325 		val_resolve (set, val, vloc, insn);
9326 	      else
9327 		val_store (set, val, uloc, insn, false);
9328 
9329 	      if (VAL_HOLDS_TRACK_EXPR (loc))
9330 		{
9331 		  if (GET_CODE (uloc) == REG)
9332 		    var_reg_set (set, uloc, VAR_INIT_STATUS_UNINITIALIZED,
9333 				 NULL);
9334 		  else if (GET_CODE (uloc) == MEM)
9335 		    var_mem_set (set, uloc, VAR_INIT_STATUS_UNINITIALIZED,
9336 				 NULL);
9337 		}
9338 
9339 	      emit_notes_for_changes (insn, EMIT_NOTE_BEFORE_INSN, set->vars);
9340 	    }
9341 	    break;
9342 
9343 	  case MO_VAL_SET:
9344 	    {
9345 	      rtx loc = mo->u.loc;
9346 	      rtx val, vloc, uloc;
9347 	      rtx dstv, srcv;
9348 
9349 	      vloc = loc;
9350 	      uloc = XEXP (vloc, 1);
9351 	      val = XEXP (vloc, 0);
9352 	      vloc = uloc;
9353 
9354 	      if (GET_CODE (uloc) == SET)
9355 		{
9356 		  dstv = SET_DEST (uloc);
9357 		  srcv = SET_SRC (uloc);
9358 		}
9359 	      else
9360 		{
9361 		  dstv = uloc;
9362 		  srcv = NULL;
9363 		}
9364 
9365 	      if (GET_CODE (val) == CONCAT)
9366 		{
9367 		  dstv = vloc = XEXP (val, 1);
9368 		  val = XEXP (val, 0);
9369 		}
9370 
9371 	      if (GET_CODE (vloc) == SET)
9372 		{
9373 		  srcv = SET_SRC (vloc);
9374 
9375 		  gcc_assert (val != srcv);
9376 		  gcc_assert (vloc == uloc || VAL_NEEDS_RESOLUTION (loc));
9377 
9378 		  dstv = vloc = SET_DEST (vloc);
9379 
9380 		  if (VAL_NEEDS_RESOLUTION (loc))
9381 		    val_resolve (set, val, srcv, insn);
9382 		}
9383 	      else if (VAL_NEEDS_RESOLUTION (loc))
9384 		{
9385 		  gcc_assert (GET_CODE (uloc) == SET
9386 			      && GET_CODE (SET_SRC (uloc)) == REG);
9387 		  val_resolve (set, val, SET_SRC (uloc), insn);
9388 		}
9389 
9390 	      if (VAL_HOLDS_TRACK_EXPR (loc))
9391 		{
9392 		  if (VAL_EXPR_IS_CLOBBERED (loc))
9393 		    {
9394 		      if (REG_P (uloc))
9395 			var_reg_delete (set, uloc, true);
9396 		      else if (MEM_P (uloc))
9397 			{
9398 			  gcc_assert (MEM_P (dstv));
9399 			  gcc_assert (MEM_ATTRS (dstv) == MEM_ATTRS (uloc));
9400 			  var_mem_delete (set, dstv, true);
9401 			}
9402 		    }
9403 		  else
9404 		    {
9405 		      bool copied_p = VAL_EXPR_IS_COPIED (loc);
9406 		      rtx src = NULL, dst = uloc;
9407 		      enum var_init_status status = VAR_INIT_STATUS_INITIALIZED;
9408 
9409 		      if (GET_CODE (uloc) == SET)
9410 			{
9411 			  src = SET_SRC (uloc);
9412 			  dst = SET_DEST (uloc);
9413 			}
9414 
9415 		      if (copied_p)
9416 			{
9417 			  status = find_src_status (set, src);
9418 
9419 			  src = find_src_set_src (set, src);
9420 			}
9421 
9422 		      if (REG_P (dst))
9423 			var_reg_delete_and_set (set, dst, !copied_p,
9424 						status, srcv);
9425 		      else if (MEM_P (dst))
9426 			{
9427 			  gcc_assert (MEM_P (dstv));
9428 			  gcc_assert (MEM_ATTRS (dstv) == MEM_ATTRS (dst));
9429 			  var_mem_delete_and_set (set, dstv, !copied_p,
9430 						  status, srcv);
9431 			}
9432 		    }
9433 		}
9434 	      else if (REG_P (uloc))
9435 		var_regno_delete (set, REGNO (uloc));
9436 	      else if (MEM_P (uloc))
9437 		{
9438 		  gcc_checking_assert (GET_CODE (vloc) == MEM);
9439 		  gcc_checking_assert (vloc == dstv);
9440 		  if (vloc != dstv)
9441 		    clobber_overlapping_mems (set, vloc);
9442 		}
9443 
9444 	      val_store (set, val, dstv, insn, true);
9445 
9446 	      emit_notes_for_changes (next_insn, EMIT_NOTE_BEFORE_INSN,
9447 				      set->vars);
9448 	    }
9449 	    break;
9450 
9451 	  case MO_SET:
9452 	    {
9453 	      rtx loc = mo->u.loc;
9454 	      rtx set_src = NULL;
9455 
9456 	      if (GET_CODE (loc) == SET)
9457 		{
9458 		  set_src = SET_SRC (loc);
9459 		  loc = SET_DEST (loc);
9460 		}
9461 
9462 	      if (REG_P (loc))
9463 		var_reg_delete_and_set (set, loc, true, VAR_INIT_STATUS_INITIALIZED,
9464 					set_src);
9465 	      else
9466 		var_mem_delete_and_set (set, loc, true, VAR_INIT_STATUS_INITIALIZED,
9467 					set_src);
9468 
9469 	      emit_notes_for_changes (next_insn, EMIT_NOTE_BEFORE_INSN,
9470 				      set->vars);
9471 	    }
9472 	    break;
9473 
9474 	  case MO_COPY:
9475 	    {
9476 	      rtx loc = mo->u.loc;
9477 	      enum var_init_status src_status;
9478 	      rtx set_src = NULL;
9479 
9480 	      if (GET_CODE (loc) == SET)
9481 		{
9482 		  set_src = SET_SRC (loc);
9483 		  loc = SET_DEST (loc);
9484 		}
9485 
9486 	      src_status = find_src_status (set, set_src);
9487 	      set_src = find_src_set_src (set, set_src);
9488 
9489 	      if (REG_P (loc))
9490 		var_reg_delete_and_set (set, loc, false, src_status, set_src);
9491 	      else
9492 		var_mem_delete_and_set (set, loc, false, src_status, set_src);
9493 
9494 	      emit_notes_for_changes (next_insn, EMIT_NOTE_BEFORE_INSN,
9495 				      set->vars);
9496 	    }
9497 	    break;
9498 
9499 	  case MO_USE_NO_VAR:
9500 	    {
9501 	      rtx loc = mo->u.loc;
9502 
9503 	      if (REG_P (loc))
9504 		var_reg_delete (set, loc, false);
9505 	      else
9506 		var_mem_delete (set, loc, false);
9507 
9508 	      emit_notes_for_changes (insn, EMIT_NOTE_AFTER_INSN, set->vars);
9509 	    }
9510 	    break;
9511 
9512 	  case MO_CLOBBER:
9513 	    {
9514 	      rtx loc = mo->u.loc;
9515 
9516 	      if (REG_P (loc))
9517 		var_reg_delete (set, loc, true);
9518 	      else
9519 		var_mem_delete (set, loc, true);
9520 
9521 	      emit_notes_for_changes (next_insn, EMIT_NOTE_BEFORE_INSN,
9522 				      set->vars);
9523 	    }
9524 	    break;
9525 
9526 	  case MO_ADJUST:
9527 	    set->stack_adjust += mo->u.adjust;
9528 	    break;
9529 	}
9530     }
9531 }
9532 
9533 /* Emit notes for the whole function.  */
9534 
9535 static void
vt_emit_notes(void)9536 vt_emit_notes (void)
9537 {
9538   basic_block bb;
9539   dataflow_set cur;
9540 
9541   gcc_assert (!changed_variables->elements ());
9542 
9543   /* Free memory occupied by the out hash tables, as they aren't used
9544      anymore.  */
9545   FOR_EACH_BB_FN (bb, cfun)
9546     dataflow_set_clear (&VTI (bb)->out);
9547 
9548   /* Enable emitting notes by functions (mainly by set_variable_part and
9549      delete_variable_part).  */
9550   emit_notes = true;
9551 
9552   if (MAY_HAVE_DEBUG_BIND_INSNS)
9553     dropped_values = new variable_table_type (cselib_get_next_uid () * 2);
9554 
9555   dataflow_set_init (&cur);
9556 
9557   FOR_EACH_BB_FN (bb, cfun)
9558     {
9559       /* Emit the notes for changes of variable locations between two
9560 	 subsequent basic blocks.  */
9561       emit_notes_for_differences (BB_HEAD (bb), &cur, &VTI (bb)->in);
9562 
9563       if (MAY_HAVE_DEBUG_BIND_INSNS)
9564 	local_get_addr_cache = new hash_map<rtx, rtx>;
9565 
9566       /* Emit the notes for the changes in the basic block itself.  */
9567       emit_notes_in_bb (bb, &cur);
9568 
9569       if (MAY_HAVE_DEBUG_BIND_INSNS)
9570 	delete local_get_addr_cache;
9571       local_get_addr_cache = NULL;
9572 
9573       /* Free memory occupied by the in hash table, we won't need it
9574 	 again.  */
9575       dataflow_set_clear (&VTI (bb)->in);
9576     }
9577 
9578   if (flag_checking)
9579     shared_hash_htab (cur.vars)
9580       ->traverse <variable_table_type *, emit_notes_for_differences_1>
9581 	(shared_hash_htab (empty_shared_hash));
9582 
9583   dataflow_set_destroy (&cur);
9584 
9585   if (MAY_HAVE_DEBUG_BIND_INSNS)
9586     delete dropped_values;
9587   dropped_values = NULL;
9588 
9589   emit_notes = false;
9590 }
9591 
9592 /* If there is a declaration and offset associated with register/memory RTL
9593    assign declaration to *DECLP and offset to *OFFSETP, and return true.  */
9594 
9595 static bool
vt_get_decl_and_offset(rtx rtl,tree * declp,poly_int64 * offsetp)9596 vt_get_decl_and_offset (rtx rtl, tree *declp, poly_int64 *offsetp)
9597 {
9598   if (REG_P (rtl))
9599     {
9600       if (REG_ATTRS (rtl))
9601 	{
9602 	  *declp = REG_EXPR (rtl);
9603 	  *offsetp = REG_OFFSET (rtl);
9604 	  return true;
9605 	}
9606     }
9607   else if (GET_CODE (rtl) == PARALLEL)
9608     {
9609       tree decl = NULL_TREE;
9610       HOST_WIDE_INT offset = MAX_VAR_PARTS;
9611       int len = XVECLEN (rtl, 0), i;
9612 
9613       for (i = 0; i < len; i++)
9614 	{
9615 	  rtx reg = XEXP (XVECEXP (rtl, 0, i), 0);
9616 	  if (!REG_P (reg) || !REG_ATTRS (reg))
9617 	    break;
9618 	  if (!decl)
9619 	    decl = REG_EXPR (reg);
9620 	  if (REG_EXPR (reg) != decl)
9621 	    break;
9622 	  HOST_WIDE_INT this_offset;
9623 	  if (!track_offset_p (REG_OFFSET (reg), &this_offset))
9624 	    break;
9625 	  offset = MIN (offset, this_offset);
9626 	}
9627 
9628       if (i == len)
9629 	{
9630 	  *declp = decl;
9631 	  *offsetp = offset;
9632 	  return true;
9633 	}
9634     }
9635   else if (MEM_P (rtl))
9636     {
9637       if (MEM_ATTRS (rtl))
9638 	{
9639 	  *declp = MEM_EXPR (rtl);
9640 	  *offsetp = int_mem_offset (rtl);
9641 	  return true;
9642 	}
9643     }
9644   return false;
9645 }
9646 
9647 /* Record the value for the ENTRY_VALUE of RTL as a global equivalence
9648    of VAL.  */
9649 
9650 static void
record_entry_value(cselib_val * val,rtx rtl)9651 record_entry_value (cselib_val *val, rtx rtl)
9652 {
9653   rtx ev = gen_rtx_ENTRY_VALUE (GET_MODE (rtl));
9654 
9655   ENTRY_VALUE_EXP (ev) = rtl;
9656 
9657   cselib_add_permanent_equiv (val, ev, get_insns ());
9658 }
9659 
9660 /* Insert function parameter PARM in IN and OUT sets of ENTRY_BLOCK.  */
9661 
9662 static void
vt_add_function_parameter(tree parm)9663 vt_add_function_parameter (tree parm)
9664 {
9665   rtx decl_rtl = DECL_RTL_IF_SET (parm);
9666   rtx incoming = DECL_INCOMING_RTL (parm);
9667   tree decl;
9668   machine_mode mode;
9669   poly_int64 offset;
9670   dataflow_set *out;
9671   decl_or_value dv;
9672   bool incoming_ok = true;
9673 
9674   if (TREE_CODE (parm) != PARM_DECL)
9675     return;
9676 
9677   if (!decl_rtl || !incoming)
9678     return;
9679 
9680   if (GET_MODE (decl_rtl) == BLKmode || GET_MODE (incoming) == BLKmode)
9681     return;
9682 
9683   /* If there is a DRAP register or a pseudo in internal_arg_pointer,
9684      rewrite the incoming location of parameters passed on the stack
9685      into MEMs based on the argument pointer, so that incoming doesn't
9686      depend on a pseudo.  */
9687   poly_int64 incoming_offset = 0;
9688   if (MEM_P (incoming)
9689       && (strip_offset (XEXP (incoming, 0), &incoming_offset)
9690 	  == crtl->args.internal_arg_pointer))
9691     {
9692       HOST_WIDE_INT off = -FIRST_PARM_OFFSET (current_function_decl);
9693       incoming
9694 	= replace_equiv_address_nv (incoming,
9695 				    plus_constant (Pmode,
9696 						   arg_pointer_rtx,
9697 						   off + incoming_offset));
9698     }
9699 
9700 #ifdef HAVE_window_save
9701   /* DECL_INCOMING_RTL uses the INCOMING_REGNO of parameter registers.
9702      If the target machine has an explicit window save instruction, the
9703      actual entry value is the corresponding OUTGOING_REGNO instead.  */
9704   if (HAVE_window_save && !crtl->uses_only_leaf_regs)
9705     {
9706       if (REG_P (incoming)
9707 	  && HARD_REGISTER_P (incoming)
9708 	  && OUTGOING_REGNO (REGNO (incoming)) != REGNO (incoming))
9709 	{
9710 	  parm_reg p;
9711 	  p.incoming = incoming;
9712 	  incoming
9713 	    = gen_rtx_REG_offset (incoming, GET_MODE (incoming),
9714 				  OUTGOING_REGNO (REGNO (incoming)), 0);
9715 	  p.outgoing = incoming;
9716 	  vec_safe_push (windowed_parm_regs, p);
9717 	}
9718       else if (GET_CODE (incoming) == PARALLEL)
9719 	{
9720 	  rtx outgoing
9721 	    = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (XVECLEN (incoming, 0)));
9722 	  int i;
9723 
9724 	  for (i = 0; i < XVECLEN (incoming, 0); i++)
9725 	    {
9726 	      rtx reg = XEXP (XVECEXP (incoming, 0, i), 0);
9727 	      parm_reg p;
9728 	      p.incoming = reg;
9729 	      reg = gen_rtx_REG_offset (reg, GET_MODE (reg),
9730 					OUTGOING_REGNO (REGNO (reg)), 0);
9731 	      p.outgoing = reg;
9732 	      XVECEXP (outgoing, 0, i)
9733 		= gen_rtx_EXPR_LIST (VOIDmode, reg,
9734 				     XEXP (XVECEXP (incoming, 0, i), 1));
9735 	      vec_safe_push (windowed_parm_regs, p);
9736 	    }
9737 
9738 	  incoming = outgoing;
9739 	}
9740       else if (MEM_P (incoming)
9741 	       && REG_P (XEXP (incoming, 0))
9742 	       && HARD_REGISTER_P (XEXP (incoming, 0)))
9743 	{
9744 	  rtx reg = XEXP (incoming, 0);
9745 	  if (OUTGOING_REGNO (REGNO (reg)) != REGNO (reg))
9746 	    {
9747 	      parm_reg p;
9748 	      p.incoming = reg;
9749 	      reg = gen_raw_REG (GET_MODE (reg), OUTGOING_REGNO (REGNO (reg)));
9750 	      p.outgoing = reg;
9751 	      vec_safe_push (windowed_parm_regs, p);
9752 	      incoming = replace_equiv_address_nv (incoming, reg);
9753 	    }
9754 	}
9755     }
9756 #endif
9757 
9758   if (!vt_get_decl_and_offset (incoming, &decl, &offset))
9759     {
9760       incoming_ok = false;
9761       if (MEM_P (incoming))
9762 	{
9763 	  /* This means argument is passed by invisible reference.  */
9764 	  offset = 0;
9765 	  decl = parm;
9766 	}
9767       else
9768 	{
9769 	  if (!vt_get_decl_and_offset (decl_rtl, &decl, &offset))
9770 	    return;
9771 	  offset += byte_lowpart_offset (GET_MODE (incoming),
9772 					 GET_MODE (decl_rtl));
9773 	}
9774     }
9775 
9776   if (!decl)
9777     return;
9778 
9779   if (parm != decl)
9780     {
9781       /* If that DECL_RTL wasn't a pseudo that got spilled to
9782 	 memory, bail out.  Otherwise, the spill slot sharing code
9783 	 will force the memory to reference spill_slot_decl (%sfp),
9784 	 so we don't match above.  That's ok, the pseudo must have
9785 	 referenced the entire parameter, so just reset OFFSET.  */
9786       if (decl != get_spill_slot_decl (false))
9787         return;
9788       offset = 0;
9789     }
9790 
9791   HOST_WIDE_INT const_offset;
9792   if (!track_loc_p (incoming, parm, offset, false, &mode, &const_offset))
9793     return;
9794 
9795   out = &VTI (ENTRY_BLOCK_PTR_FOR_FN (cfun))->out;
9796 
9797   dv = dv_from_decl (parm);
9798 
9799   if (target_for_debug_bind (parm)
9800       /* We can't deal with these right now, because this kind of
9801 	 variable is single-part.  ??? We could handle parallels
9802 	 that describe multiple locations for the same single
9803 	 value, but ATM we don't.  */
9804       && GET_CODE (incoming) != PARALLEL)
9805     {
9806       cselib_val *val;
9807       rtx lowpart;
9808 
9809       /* ??? We shouldn't ever hit this, but it may happen because
9810 	 arguments passed by invisible reference aren't dealt with
9811 	 above: incoming-rtl will have Pmode rather than the
9812 	 expected mode for the type.  */
9813       if (const_offset)
9814 	return;
9815 
9816       lowpart = var_lowpart (mode, incoming);
9817       if (!lowpart)
9818 	return;
9819 
9820       val = cselib_lookup_from_insn (lowpart, mode, true,
9821 				     VOIDmode, get_insns ());
9822 
9823       /* ??? Float-typed values in memory are not handled by
9824 	 cselib.  */
9825       if (val)
9826 	{
9827 	  preserve_value (val);
9828 	  set_variable_part (out, val->val_rtx, dv, const_offset,
9829 			     VAR_INIT_STATUS_INITIALIZED, NULL, INSERT);
9830 	  dv = dv_from_value (val->val_rtx);
9831 	}
9832 
9833       if (MEM_P (incoming))
9834 	{
9835 	  val = cselib_lookup_from_insn (XEXP (incoming, 0), mode, true,
9836 					 VOIDmode, get_insns ());
9837 	  if (val)
9838 	    {
9839 	      preserve_value (val);
9840 	      incoming = replace_equiv_address_nv (incoming, val->val_rtx);
9841 	    }
9842 	}
9843     }
9844 
9845   if (REG_P (incoming))
9846     {
9847       incoming = var_lowpart (mode, incoming);
9848       gcc_assert (REGNO (incoming) < FIRST_PSEUDO_REGISTER);
9849       attrs_list_insert (&out->regs[REGNO (incoming)], dv, const_offset,
9850 			 incoming);
9851       set_variable_part (out, incoming, dv, const_offset,
9852 			 VAR_INIT_STATUS_INITIALIZED, NULL, INSERT);
9853       if (dv_is_value_p (dv))
9854 	{
9855 	  record_entry_value (CSELIB_VAL_PTR (dv_as_value (dv)), incoming);
9856 	  if (TREE_CODE (TREE_TYPE (parm)) == REFERENCE_TYPE
9857 	      && INTEGRAL_TYPE_P (TREE_TYPE (TREE_TYPE (parm))))
9858 	    {
9859 	      machine_mode indmode
9860 		= TYPE_MODE (TREE_TYPE (TREE_TYPE (parm)));
9861 	      rtx mem = gen_rtx_MEM (indmode, incoming);
9862 	      cselib_val *val = cselib_lookup_from_insn (mem, indmode, true,
9863 							 VOIDmode,
9864 							 get_insns ());
9865 	      if (val)
9866 		{
9867 		  preserve_value (val);
9868 		  record_entry_value (val, mem);
9869 		  set_variable_part (out, mem, dv_from_value (val->val_rtx), 0,
9870 				     VAR_INIT_STATUS_INITIALIZED, NULL, INSERT);
9871 		}
9872 	    }
9873 	}
9874     }
9875   else if (GET_CODE (incoming) == PARALLEL && !dv_onepart_p (dv))
9876     {
9877       int i;
9878 
9879       /* The following code relies on vt_get_decl_and_offset returning true for
9880 	 incoming, which might not be always the case.  */
9881       if (!incoming_ok)
9882 	return;
9883       for (i = 0; i < XVECLEN (incoming, 0); i++)
9884 	{
9885 	  rtx reg = XEXP (XVECEXP (incoming, 0, i), 0);
9886 	  /* vt_get_decl_and_offset has already checked that the offset
9887 	     is a valid variable part.  */
9888 	  const_offset = get_tracked_reg_offset (reg);
9889 	  gcc_assert (REGNO (reg) < FIRST_PSEUDO_REGISTER);
9890 	  attrs_list_insert (&out->regs[REGNO (reg)], dv, const_offset, reg);
9891 	  set_variable_part (out, reg, dv, const_offset,
9892 			     VAR_INIT_STATUS_INITIALIZED, NULL, INSERT);
9893 	}
9894     }
9895   else if (MEM_P (incoming))
9896     {
9897       incoming = var_lowpart (mode, incoming);
9898       set_variable_part (out, incoming, dv, const_offset,
9899 			 VAR_INIT_STATUS_INITIALIZED, NULL, INSERT);
9900     }
9901 }
9902 
9903 /* Insert function parameters to IN and OUT sets of ENTRY_BLOCK.  */
9904 
9905 static void
vt_add_function_parameters(void)9906 vt_add_function_parameters (void)
9907 {
9908   tree parm;
9909 
9910   for (parm = DECL_ARGUMENTS (current_function_decl);
9911        parm; parm = DECL_CHAIN (parm))
9912     vt_add_function_parameter (parm);
9913 
9914   if (DECL_HAS_VALUE_EXPR_P (DECL_RESULT (current_function_decl)))
9915     {
9916       tree vexpr = DECL_VALUE_EXPR (DECL_RESULT (current_function_decl));
9917 
9918       if (TREE_CODE (vexpr) == INDIRECT_REF)
9919 	vexpr = TREE_OPERAND (vexpr, 0);
9920 
9921       if (TREE_CODE (vexpr) == PARM_DECL
9922 	  && DECL_ARTIFICIAL (vexpr)
9923 	  && !DECL_IGNORED_P (vexpr)
9924 	  && DECL_NAMELESS (vexpr))
9925 	vt_add_function_parameter (vexpr);
9926     }
9927 }
9928 
9929 /* Initialize cfa_base_rtx, create a preserved VALUE for it and
9930    ensure it isn't flushed during cselib_reset_table.
9931    Can be called only if frame_pointer_rtx resp. arg_pointer_rtx
9932    has been eliminated.  */
9933 
9934 static void
vt_init_cfa_base(void)9935 vt_init_cfa_base (void)
9936 {
9937   cselib_val *val;
9938 
9939 #ifdef FRAME_POINTER_CFA_OFFSET
9940   cfa_base_rtx = frame_pointer_rtx;
9941   cfa_base_offset = -FRAME_POINTER_CFA_OFFSET (current_function_decl);
9942 #else
9943   cfa_base_rtx = arg_pointer_rtx;
9944   cfa_base_offset = -ARG_POINTER_CFA_OFFSET (current_function_decl);
9945 #endif
9946   if (cfa_base_rtx == hard_frame_pointer_rtx
9947       || !fixed_regs[REGNO (cfa_base_rtx)])
9948     {
9949       cfa_base_rtx = NULL_RTX;
9950       return;
9951     }
9952   if (!MAY_HAVE_DEBUG_BIND_INSNS)
9953     return;
9954 
9955   /* Tell alias analysis that cfa_base_rtx should share
9956      find_base_term value with stack pointer or hard frame pointer.  */
9957   if (!frame_pointer_needed)
9958     vt_equate_reg_base_value (cfa_base_rtx, stack_pointer_rtx);
9959   else if (!crtl->stack_realign_tried)
9960     vt_equate_reg_base_value (cfa_base_rtx, hard_frame_pointer_rtx);
9961 
9962   val = cselib_lookup_from_insn (cfa_base_rtx, GET_MODE (cfa_base_rtx), 1,
9963 				 VOIDmode, get_insns ());
9964   preserve_value (val);
9965   cselib_preserve_cfa_base_value (val, REGNO (cfa_base_rtx));
9966 }
9967 
9968 /* Reemit INSN, a MARKER_DEBUG_INSN, as a note.  */
9969 
9970 static rtx_insn *
reemit_marker_as_note(rtx_insn * insn)9971 reemit_marker_as_note (rtx_insn *insn)
9972 {
9973   gcc_checking_assert (DEBUG_MARKER_INSN_P (insn));
9974 
9975   enum insn_note kind = INSN_DEBUG_MARKER_KIND (insn);
9976 
9977   switch (kind)
9978     {
9979     case NOTE_INSN_BEGIN_STMT:
9980     case NOTE_INSN_INLINE_ENTRY:
9981       {
9982 	rtx_insn *note = NULL;
9983 	if (cfun->debug_nonbind_markers)
9984 	  {
9985 	    note = emit_note_before (kind, insn);
9986 	    NOTE_MARKER_LOCATION (note) = INSN_LOCATION (insn);
9987 	  }
9988 	delete_insn (insn);
9989 	return note;
9990       }
9991 
9992     default:
9993       gcc_unreachable ();
9994     }
9995 }
9996 
9997 /* Allocate and initialize the data structures for variable tracking
9998    and parse the RTL to get the micro operations.  */
9999 
10000 static bool
vt_initialize(void)10001 vt_initialize (void)
10002 {
10003   basic_block bb;
10004   poly_int64 fp_cfa_offset = -1;
10005 
10006   alloc_aux_for_blocks (sizeof (variable_tracking_info));
10007 
10008   empty_shared_hash = shared_hash_pool.allocate ();
10009   empty_shared_hash->refcount = 1;
10010   empty_shared_hash->htab = new variable_table_type (1);
10011   changed_variables = new variable_table_type (10);
10012 
10013   /* Init the IN and OUT sets.  */
10014   FOR_ALL_BB_FN (bb, cfun)
10015     {
10016       VTI (bb)->visited = false;
10017       VTI (bb)->flooded = false;
10018       dataflow_set_init (&VTI (bb)->in);
10019       dataflow_set_init (&VTI (bb)->out);
10020       VTI (bb)->permp = NULL;
10021     }
10022 
10023   if (MAY_HAVE_DEBUG_BIND_INSNS)
10024     {
10025       cselib_init (CSELIB_RECORD_MEMORY | CSELIB_PRESERVE_CONSTANTS);
10026       scratch_regs = BITMAP_ALLOC (NULL);
10027       preserved_values.create (256);
10028       global_get_addr_cache = new hash_map<rtx, rtx>;
10029     }
10030   else
10031     {
10032       scratch_regs = NULL;
10033       global_get_addr_cache = NULL;
10034     }
10035 
10036   if (MAY_HAVE_DEBUG_BIND_INSNS)
10037     {
10038       rtx reg, expr;
10039       int ofst;
10040       cselib_val *val;
10041 
10042 #ifdef FRAME_POINTER_CFA_OFFSET
10043       reg = frame_pointer_rtx;
10044       ofst = FRAME_POINTER_CFA_OFFSET (current_function_decl);
10045 #else
10046       reg = arg_pointer_rtx;
10047       ofst = ARG_POINTER_CFA_OFFSET (current_function_decl);
10048 #endif
10049 
10050       ofst -= INCOMING_FRAME_SP_OFFSET;
10051 
10052       val = cselib_lookup_from_insn (reg, GET_MODE (reg), 1,
10053 				     VOIDmode, get_insns ());
10054       preserve_value (val);
10055       if (reg != hard_frame_pointer_rtx && fixed_regs[REGNO (reg)])
10056 	cselib_preserve_cfa_base_value (val, REGNO (reg));
10057       expr = plus_constant (GET_MODE (stack_pointer_rtx),
10058 			    stack_pointer_rtx, -ofst);
10059       cselib_add_permanent_equiv (val, expr, get_insns ());
10060 
10061       if (ofst)
10062 	{
10063 	  val = cselib_lookup_from_insn (stack_pointer_rtx,
10064 					 GET_MODE (stack_pointer_rtx), 1,
10065 					 VOIDmode, get_insns ());
10066 	  preserve_value (val);
10067 	  expr = plus_constant (GET_MODE (reg), reg, ofst);
10068 	  cselib_add_permanent_equiv (val, expr, get_insns ());
10069 	}
10070     }
10071 
10072   /* In order to factor out the adjustments made to the stack pointer or to
10073      the hard frame pointer and thus be able to use DW_OP_fbreg operations
10074      instead of individual location lists, we're going to rewrite MEMs based
10075      on them into MEMs based on the CFA by de-eliminating stack_pointer_rtx
10076      or hard_frame_pointer_rtx to the virtual CFA pointer frame_pointer_rtx
10077      resp. arg_pointer_rtx.  We can do this either when there is no frame
10078      pointer in the function and stack adjustments are consistent for all
10079      basic blocks or when there is a frame pointer and no stack realignment.
10080      But we first have to check that frame_pointer_rtx resp. arg_pointer_rtx
10081      has been eliminated.  */
10082   if (!frame_pointer_needed)
10083     {
10084       rtx reg, elim;
10085 
10086       if (!vt_stack_adjustments ())
10087 	return false;
10088 
10089 #ifdef FRAME_POINTER_CFA_OFFSET
10090       reg = frame_pointer_rtx;
10091 #else
10092       reg = arg_pointer_rtx;
10093 #endif
10094       elim = eliminate_regs (reg, VOIDmode, NULL_RTX);
10095       if (elim != reg)
10096 	{
10097 	  if (GET_CODE (elim) == PLUS)
10098 	    elim = XEXP (elim, 0);
10099 	  if (elim == stack_pointer_rtx)
10100 	    vt_init_cfa_base ();
10101 	}
10102     }
10103   else if (!crtl->stack_realign_tried)
10104     {
10105       rtx reg, elim;
10106 
10107 #ifdef FRAME_POINTER_CFA_OFFSET
10108       reg = frame_pointer_rtx;
10109       fp_cfa_offset = FRAME_POINTER_CFA_OFFSET (current_function_decl);
10110 #else
10111       reg = arg_pointer_rtx;
10112       fp_cfa_offset = ARG_POINTER_CFA_OFFSET (current_function_decl);
10113 #endif
10114       elim = eliminate_regs (reg, VOIDmode, NULL_RTX);
10115       if (elim != reg)
10116 	{
10117 	  if (GET_CODE (elim) == PLUS)
10118 	    {
10119 	      fp_cfa_offset -= rtx_to_poly_int64 (XEXP (elim, 1));
10120 	      elim = XEXP (elim, 0);
10121 	    }
10122 	  if (elim != hard_frame_pointer_rtx)
10123 	    fp_cfa_offset = -1;
10124 	}
10125       else
10126 	fp_cfa_offset = -1;
10127     }
10128 
10129   /* If the stack is realigned and a DRAP register is used, we're going to
10130      rewrite MEMs based on it representing incoming locations of parameters
10131      passed on the stack into MEMs based on the argument pointer.  Although
10132      we aren't going to rewrite other MEMs, we still need to initialize the
10133      virtual CFA pointer in order to ensure that the argument pointer will
10134      be seen as a constant throughout the function.
10135 
10136      ??? This doesn't work if FRAME_POINTER_CFA_OFFSET is defined.  */
10137   else if (stack_realign_drap)
10138     {
10139       rtx reg, elim;
10140 
10141 #ifdef FRAME_POINTER_CFA_OFFSET
10142       reg = frame_pointer_rtx;
10143 #else
10144       reg = arg_pointer_rtx;
10145 #endif
10146       elim = eliminate_regs (reg, VOIDmode, NULL_RTX);
10147       if (elim != reg)
10148 	{
10149 	  if (GET_CODE (elim) == PLUS)
10150 	    elim = XEXP (elim, 0);
10151 	  if (elim == hard_frame_pointer_rtx)
10152 	    vt_init_cfa_base ();
10153 	}
10154     }
10155 
10156   hard_frame_pointer_adjustment = -1;
10157 
10158   vt_add_function_parameters ();
10159 
10160   FOR_EACH_BB_FN (bb, cfun)
10161     {
10162       rtx_insn *insn;
10163       HOST_WIDE_INT pre, post = 0;
10164       basic_block first_bb, last_bb;
10165 
10166       if (MAY_HAVE_DEBUG_BIND_INSNS)
10167 	{
10168 	  cselib_record_sets_hook = add_with_sets;
10169 	  if (dump_file && (dump_flags & TDF_DETAILS))
10170 	    fprintf (dump_file, "first value: %i\n",
10171 		     cselib_get_next_uid ());
10172 	}
10173 
10174       first_bb = bb;
10175       for (;;)
10176 	{
10177 	  edge e;
10178 	  if (bb->next_bb == EXIT_BLOCK_PTR_FOR_FN (cfun)
10179 	      || ! single_pred_p (bb->next_bb))
10180 	    break;
10181 	  e = find_edge (bb, bb->next_bb);
10182 	  if (! e || (e->flags & EDGE_FALLTHRU) == 0)
10183 	    break;
10184 	  bb = bb->next_bb;
10185 	}
10186       last_bb = bb;
10187 
10188       /* Add the micro-operations to the vector.  */
10189       FOR_BB_BETWEEN (bb, first_bb, last_bb->next_bb, next_bb)
10190 	{
10191 	  HOST_WIDE_INT offset = VTI (bb)->out.stack_adjust;
10192 	  VTI (bb)->out.stack_adjust = VTI (bb)->in.stack_adjust;
10193 
10194 	  rtx_insn *next;
10195 	  FOR_BB_INSNS_SAFE (bb, insn, next)
10196 	    {
10197 	      if (INSN_P (insn))
10198 		{
10199 		  if (!frame_pointer_needed)
10200 		    {
10201 		      insn_stack_adjust_offset_pre_post (insn, &pre, &post);
10202 		      if (pre)
10203 			{
10204 			  micro_operation mo;
10205 			  mo.type = MO_ADJUST;
10206 			  mo.u.adjust = pre;
10207 			  mo.insn = insn;
10208 			  if (dump_file && (dump_flags & TDF_DETAILS))
10209 			    log_op_type (PATTERN (insn), bb, insn,
10210 					 MO_ADJUST, dump_file);
10211 			  VTI (bb)->mos.safe_push (mo);
10212 			}
10213 		    }
10214 
10215 		  cselib_hook_called = false;
10216 		  adjust_insn (bb, insn);
10217 
10218 		  if (!frame_pointer_needed && pre)
10219 		    VTI (bb)->out.stack_adjust += pre;
10220 
10221 		  if (DEBUG_MARKER_INSN_P (insn))
10222 		    {
10223 		      reemit_marker_as_note (insn);
10224 		      continue;
10225 		    }
10226 
10227 		  if (MAY_HAVE_DEBUG_BIND_INSNS)
10228 		    {
10229 		      if (CALL_P (insn))
10230 			prepare_call_arguments (bb, insn);
10231 		      cselib_process_insn (insn);
10232 		      if (dump_file && (dump_flags & TDF_DETAILS))
10233 			{
10234 			  if (dump_flags & TDF_SLIM)
10235 			    dump_insn_slim (dump_file, insn);
10236 			  else
10237 			    print_rtl_single (dump_file, insn);
10238 			  dump_cselib_table (dump_file);
10239 			}
10240 		    }
10241 		  if (!cselib_hook_called)
10242 		    add_with_sets (insn, 0, 0);
10243 		  cancel_changes (0);
10244 
10245 		  if (!frame_pointer_needed && post)
10246 		    {
10247 		      micro_operation mo;
10248 		      mo.type = MO_ADJUST;
10249 		      mo.u.adjust = post;
10250 		      mo.insn = insn;
10251 		      if (dump_file && (dump_flags & TDF_DETAILS))
10252 			log_op_type (PATTERN (insn), bb, insn,
10253 				     MO_ADJUST, dump_file);
10254 		      VTI (bb)->mos.safe_push (mo);
10255 		      VTI (bb)->out.stack_adjust += post;
10256 		    }
10257 
10258 		  if (maybe_ne (fp_cfa_offset, -1)
10259 		      && known_eq (hard_frame_pointer_adjustment, -1)
10260 		      && fp_setter_insn (insn))
10261 		    {
10262 		      vt_init_cfa_base ();
10263 		      hard_frame_pointer_adjustment = fp_cfa_offset;
10264 		      /* Disassociate sp from fp now.  */
10265 		      if (MAY_HAVE_DEBUG_BIND_INSNS)
10266 			{
10267 			  cselib_val *v;
10268 			  cselib_invalidate_rtx (stack_pointer_rtx);
10269 			  v = cselib_lookup (stack_pointer_rtx, Pmode, 1,
10270 					     VOIDmode);
10271 			  if (v && !cselib_preserved_value_p (v))
10272 			    {
10273 			      cselib_set_value_sp_based (v);
10274 			      preserve_value (v);
10275 			    }
10276 			}
10277 		    }
10278 		}
10279 	    }
10280 	  gcc_assert (offset == VTI (bb)->out.stack_adjust);
10281 	}
10282 
10283       bb = last_bb;
10284 
10285       if (MAY_HAVE_DEBUG_BIND_INSNS)
10286 	{
10287 	  cselib_preserve_only_values ();
10288 	  cselib_reset_table (cselib_get_next_uid ());
10289 	  cselib_record_sets_hook = NULL;
10290 	}
10291     }
10292 
10293   hard_frame_pointer_adjustment = -1;
10294   VTI (ENTRY_BLOCK_PTR_FOR_FN (cfun))->flooded = true;
10295   cfa_base_rtx = NULL_RTX;
10296   return true;
10297 }
10298 
10299 /* This is *not* reset after each function.  It gives each
10300    NOTE_INSN_DELETED_DEBUG_LABEL in the entire compilation
10301    a unique label number.  */
10302 
10303 static int debug_label_num = 1;
10304 
10305 /* Remove from the insn stream a single debug insn used for
10306    variable tracking at assignments.  */
10307 
10308 static inline void
delete_vta_debug_insn(rtx_insn * insn)10309 delete_vta_debug_insn (rtx_insn *insn)
10310 {
10311   if (DEBUG_MARKER_INSN_P (insn))
10312     {
10313       reemit_marker_as_note (insn);
10314       return;
10315     }
10316 
10317   tree decl = INSN_VAR_LOCATION_DECL (insn);
10318   if (TREE_CODE (decl) == LABEL_DECL
10319       && DECL_NAME (decl)
10320       && !DECL_RTL_SET_P (decl))
10321     {
10322       PUT_CODE (insn, NOTE);
10323       NOTE_KIND (insn) = NOTE_INSN_DELETED_DEBUG_LABEL;
10324       NOTE_DELETED_LABEL_NAME (insn)
10325 	= IDENTIFIER_POINTER (DECL_NAME (decl));
10326       SET_DECL_RTL (decl, insn);
10327       CODE_LABEL_NUMBER (insn) = debug_label_num++;
10328     }
10329   else
10330     delete_insn (insn);
10331 }
10332 
10333 /* Remove from the insn stream all debug insns used for variable
10334    tracking at assignments.  USE_CFG should be false if the cfg is no
10335    longer usable.  */
10336 
10337 void
delete_vta_debug_insns(bool use_cfg)10338 delete_vta_debug_insns (bool use_cfg)
10339 {
10340   basic_block bb;
10341   rtx_insn *insn, *next;
10342 
10343   if (!MAY_HAVE_DEBUG_INSNS)
10344     return;
10345 
10346   if (use_cfg)
10347     FOR_EACH_BB_FN (bb, cfun)
10348       {
10349 	FOR_BB_INSNS_SAFE (bb, insn, next)
10350 	  if (DEBUG_INSN_P (insn))
10351 	    delete_vta_debug_insn (insn);
10352       }
10353   else
10354     for (insn = get_insns (); insn; insn = next)
10355       {
10356 	next = NEXT_INSN (insn);
10357 	if (DEBUG_INSN_P (insn))
10358 	  delete_vta_debug_insn (insn);
10359       }
10360 }
10361 
10362 /* Run a fast, BB-local only version of var tracking, to take care of
10363    information that we don't do global analysis on, such that not all
10364    information is lost.  If SKIPPED holds, we're skipping the global
10365    pass entirely, so we should try to use information it would have
10366    handled as well..  */
10367 
10368 static void
vt_debug_insns_local(bool skipped ATTRIBUTE_UNUSED)10369 vt_debug_insns_local (bool skipped ATTRIBUTE_UNUSED)
10370 {
10371   /* ??? Just skip it all for now.  */
10372   delete_vta_debug_insns (true);
10373 }
10374 
10375 /* Free the data structures needed for variable tracking.  */
10376 
10377 static void
vt_finalize(void)10378 vt_finalize (void)
10379 {
10380   basic_block bb;
10381 
10382   FOR_EACH_BB_FN (bb, cfun)
10383     {
10384       VTI (bb)->mos.release ();
10385     }
10386 
10387   FOR_ALL_BB_FN (bb, cfun)
10388     {
10389       dataflow_set_destroy (&VTI (bb)->in);
10390       dataflow_set_destroy (&VTI (bb)->out);
10391       if (VTI (bb)->permp)
10392 	{
10393 	  dataflow_set_destroy (VTI (bb)->permp);
10394 	  XDELETE (VTI (bb)->permp);
10395 	}
10396     }
10397   free_aux_for_blocks ();
10398   delete empty_shared_hash->htab;
10399   empty_shared_hash->htab = NULL;
10400   delete changed_variables;
10401   changed_variables = NULL;
10402   attrs_pool.release ();
10403   var_pool.release ();
10404   location_chain_pool.release ();
10405   shared_hash_pool.release ();
10406 
10407   if (MAY_HAVE_DEBUG_BIND_INSNS)
10408     {
10409       if (global_get_addr_cache)
10410 	delete global_get_addr_cache;
10411       global_get_addr_cache = NULL;
10412       loc_exp_dep_pool.release ();
10413       valvar_pool.release ();
10414       preserved_values.release ();
10415       cselib_finish ();
10416       BITMAP_FREE (scratch_regs);
10417       scratch_regs = NULL;
10418     }
10419 
10420 #ifdef HAVE_window_save
10421   vec_free (windowed_parm_regs);
10422 #endif
10423 
10424   if (vui_vec)
10425     XDELETEVEC (vui_vec);
10426   vui_vec = NULL;
10427   vui_allocated = 0;
10428 }
10429 
10430 /* The entry point to variable tracking pass.  */
10431 
10432 static inline unsigned int
variable_tracking_main_1(void)10433 variable_tracking_main_1 (void)
10434 {
10435   bool success;
10436 
10437   /* We won't be called as a separate pass if flag_var_tracking is not
10438      set, but final may call us to turn debug markers into notes.  */
10439   if ((!flag_var_tracking && MAY_HAVE_DEBUG_INSNS)
10440       || flag_var_tracking_assignments < 0
10441       /* Var-tracking right now assumes the IR doesn't contain
10442 	 any pseudos at this point.  */
10443       || targetm.no_register_allocation)
10444     {
10445       delete_vta_debug_insns (true);
10446       return 0;
10447     }
10448 
10449   if (!flag_var_tracking)
10450     return 0;
10451 
10452   if (n_basic_blocks_for_fn (cfun) > 500
10453       && n_edges_for_fn (cfun) / n_basic_blocks_for_fn (cfun) >= 20)
10454     {
10455       vt_debug_insns_local (true);
10456       return 0;
10457     }
10458 
10459   mark_dfs_back_edges ();
10460   if (!vt_initialize ())
10461     {
10462       vt_finalize ();
10463       vt_debug_insns_local (true);
10464       return 0;
10465     }
10466 
10467   success = vt_find_locations ();
10468 
10469   if (!success && flag_var_tracking_assignments > 0)
10470     {
10471       vt_finalize ();
10472 
10473       delete_vta_debug_insns (true);
10474 
10475       /* This is later restored by our caller.  */
10476       flag_var_tracking_assignments = 0;
10477 
10478       success = vt_initialize ();
10479       gcc_assert (success);
10480 
10481       success = vt_find_locations ();
10482     }
10483 
10484   if (!success)
10485     {
10486       vt_finalize ();
10487       vt_debug_insns_local (false);
10488       return 0;
10489     }
10490 
10491   if (dump_file && (dump_flags & TDF_DETAILS))
10492     {
10493       dump_dataflow_sets ();
10494       dump_reg_info (dump_file);
10495       dump_flow_info (dump_file, dump_flags);
10496     }
10497 
10498   timevar_push (TV_VAR_TRACKING_EMIT);
10499   vt_emit_notes ();
10500   timevar_pop (TV_VAR_TRACKING_EMIT);
10501 
10502   vt_finalize ();
10503   vt_debug_insns_local (false);
10504   return 0;
10505 }
10506 
10507 unsigned int
variable_tracking_main(void)10508 variable_tracking_main (void)
10509 {
10510   unsigned int ret;
10511   int save = flag_var_tracking_assignments;
10512 
10513   ret = variable_tracking_main_1 ();
10514 
10515   flag_var_tracking_assignments = save;
10516 
10517   return ret;
10518 }
10519 
10520 namespace {
10521 
10522 const pass_data pass_data_variable_tracking =
10523 {
10524   RTL_PASS, /* type */
10525   "vartrack", /* name */
10526   OPTGROUP_NONE, /* optinfo_flags */
10527   TV_VAR_TRACKING, /* tv_id */
10528   0, /* properties_required */
10529   0, /* properties_provided */
10530   0, /* properties_destroyed */
10531   0, /* todo_flags_start */
10532   0, /* todo_flags_finish */
10533 };
10534 
10535 class pass_variable_tracking : public rtl_opt_pass
10536 {
10537 public:
pass_variable_tracking(gcc::context * ctxt)10538   pass_variable_tracking (gcc::context *ctxt)
10539     : rtl_opt_pass (pass_data_variable_tracking, ctxt)
10540   {}
10541 
10542   /* opt_pass methods: */
gate(function *)10543   virtual bool gate (function *)
10544     {
10545       return (flag_var_tracking && !targetm.delay_vartrack);
10546     }
10547 
execute(function *)10548   virtual unsigned int execute (function *)
10549     {
10550       return variable_tracking_main ();
10551     }
10552 
10553 }; // class pass_variable_tracking
10554 
10555 } // anon namespace
10556 
10557 rtl_opt_pass *
make_pass_variable_tracking(gcc::context * ctxt)10558 make_pass_variable_tracking (gcc::context *ctxt)
10559 {
10560   return new pass_variable_tracking (ctxt);
10561 }
10562