1 /* Variable tracking routines for the GNU compiler.
2    Copyright (C) 2002-2021 Free Software Foundation, Inc.
3 
4    This file is part of GCC.
5 
6    GCC is free software; you can redistribute it and/or modify it
7    under the terms of the GNU General Public License as published by
8    the Free Software Foundation; either version 3, or (at your option)
9    any later version.
10 
11    GCC is distributed in the hope that it will be useful, but WITHOUT
12    ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
13    or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public
14    License for more details.
15 
16    You should have received a copy of the GNU General Public License
17    along with GCC; see the file COPYING3.  If not see
18    <http://www.gnu.org/licenses/>.  */
19 
20 /* This file contains the variable tracking pass.  It computes where
21    variables are located (which registers or where in memory) at each position
22    in instruction stream and emits notes describing the locations.
23    Debug information (DWARF2 location lists) is finally generated from
24    these notes.
25    With this debug information, it is possible to show variables
26    even when debugging optimized code.
27 
28    How does the variable tracking pass work?
29 
30    First, it scans RTL code for uses, stores and clobbers (register/memory
31    references in instructions), for call insns and for stack adjustments
32    separately for each basic block and saves them to an array of micro
33    operations.
34    The micro operations of one instruction are ordered so that
35    pre-modifying stack adjustment < use < use with no var < call insn <
36      < clobber < set < post-modifying stack adjustment
37 
38    Then, a forward dataflow analysis is performed to find out how locations
39    of variables change through code and to propagate the variable locations
40    along control flow graph.
41    The IN set for basic block BB is computed as a union of OUT sets of BB's
42    predecessors, the OUT set for BB is copied from the IN set for BB and
43    is changed according to micro operations in BB.
44 
45    The IN and OUT sets for basic blocks consist of a current stack adjustment
46    (used for adjusting offset of variables addressed using stack pointer),
47    the table of structures describing the locations of parts of a variable
48    and for each physical register a linked list for each physical register.
49    The linked list is a list of variable parts stored in the register,
50    i.e. it is a list of triplets (reg, decl, offset) where decl is
51    REG_EXPR (reg) and offset is REG_OFFSET (reg).  The linked list is used for
52    effective deleting appropriate variable parts when we set or clobber the
53    register.
54 
55    There may be more than one variable part in a register.  The linked lists
56    should be pretty short so it is a good data structure here.
57    For example in the following code, register allocator may assign same
58    register to variables A and B, and both of them are stored in the same
59    register in CODE:
60 
61      if (cond)
62        set A;
63      else
64        set B;
65      CODE;
66      if (cond)
67        use A;
68      else
69        use B;
70 
71    Finally, the NOTE_INSN_VAR_LOCATION notes describing the variable locations
72    are emitted to appropriate positions in RTL code.  Each such a note describes
73    the location of one variable at the point in instruction stream where the
74    note is.  There is no need to emit a note for each variable before each
75    instruction, we only emit these notes where the location of variable changes
76    (this means that we also emit notes for changes between the OUT set of the
77    previous block and the IN set of the current block).
78 
79    The notes consist of two parts:
80    1. the declaration (from REG_EXPR or MEM_EXPR)
81    2. the location of a variable - it is either a simple register/memory
82       reference (for simple variables, for example int),
83       or a parallel of register/memory references (for a large variables
84       which consist of several parts, for example long long).
85 
86 */
87 
88 #include "config.h"
89 #include "system.h"
90 #include "coretypes.h"
91 #include "backend.h"
92 #include "target.h"
93 #include "rtl.h"
94 #include "tree.h"
95 #include "cfghooks.h"
96 #include "alloc-pool.h"
97 #include "tree-pass.h"
98 #include "memmodel.h"
99 #include "tm_p.h"
100 #include "insn-config.h"
101 #include "regs.h"
102 #include "emit-rtl.h"
103 #include "recog.h"
104 #include "diagnostic.h"
105 #include "varasm.h"
106 #include "stor-layout.h"
107 #include "cfgrtl.h"
108 #include "cfganal.h"
109 #include "reload.h"
110 #include "calls.h"
111 #include "tree-dfa.h"
112 #include "tree-ssa.h"
113 #include "cselib.h"
114 #include "tree-pretty-print.h"
115 #include "rtl-iter.h"
116 #include "fibonacci_heap.h"
117 #include "print-rtl.h"
118 #include "function-abi.h"
119 
120 typedef fibonacci_heap <long, basic_block_def> bb_heap_t;
121 
122 /* var-tracking.c assumes that tree code with the same value as VALUE rtx code
123    has no chance to appear in REG_EXPR/MEM_EXPRs and isn't a decl.
124    Currently the value is the same as IDENTIFIER_NODE, which has such
125    a property.  If this compile time assertion ever fails, make sure that
126    the new tree code that equals (int) VALUE has the same property.  */
127 extern char check_value_val[(int) VALUE == (int) IDENTIFIER_NODE ? 1 : -1];
128 
129 /* Type of micro operation.  */
130 enum micro_operation_type
131 {
132   MO_USE,	/* Use location (REG or MEM).  */
133   MO_USE_NO_VAR,/* Use location which is not associated with a variable
134 		   or the variable is not trackable.  */
135   MO_VAL_USE,	/* Use location which is associated with a value.  */
136   MO_VAL_LOC,   /* Use location which appears in a debug insn.  */
137   MO_VAL_SET,	/* Set location associated with a value.  */
138   MO_SET,	/* Set location.  */
139   MO_COPY,	/* Copy the same portion of a variable from one
140 		   location to another.  */
141   MO_CLOBBER,	/* Clobber location.  */
142   MO_CALL,	/* Call insn.  */
143   MO_ADJUST	/* Adjust stack pointer.  */
144 
145 };
146 
147 static const char * const ATTRIBUTE_UNUSED
148 micro_operation_type_name[] = {
149   "MO_USE",
150   "MO_USE_NO_VAR",
151   "MO_VAL_USE",
152   "MO_VAL_LOC",
153   "MO_VAL_SET",
154   "MO_SET",
155   "MO_COPY",
156   "MO_CLOBBER",
157   "MO_CALL",
158   "MO_ADJUST"
159 };
160 
161 /* Where shall the note be emitted?  BEFORE or AFTER the instruction.
162    Notes emitted as AFTER_CALL are to take effect during the call,
163    rather than after the call.  */
164 enum emit_note_where
165 {
166   EMIT_NOTE_BEFORE_INSN,
167   EMIT_NOTE_AFTER_INSN,
168   EMIT_NOTE_AFTER_CALL_INSN
169 };
170 
171 /* Structure holding information about micro operation.  */
172 struct micro_operation
173 {
174   /* Type of micro operation.  */
175   enum micro_operation_type type;
176 
177   /* The instruction which the micro operation is in, for MO_USE,
178      MO_USE_NO_VAR, MO_CALL and MO_ADJUST, or the subsequent
179      instruction or note in the original flow (before any var-tracking
180      notes are inserted, to simplify emission of notes), for MO_SET
181      and MO_CLOBBER.  */
182   rtx_insn *insn;
183 
184   union {
185     /* Location.  For MO_SET and MO_COPY, this is the SET that
186        performs the assignment, if known, otherwise it is the target
187        of the assignment.  For MO_VAL_USE and MO_VAL_SET, it is a
188        CONCAT of the VALUE and the LOC associated with it.  For
189        MO_VAL_LOC, it is a CONCAT of the VALUE and the VAR_LOCATION
190        associated with it.  */
191     rtx loc;
192 
193     /* Stack adjustment.  */
194     HOST_WIDE_INT adjust;
195   } u;
196 };
197 
198 
199 /* A declaration of a variable, or an RTL value being handled like a
200    declaration.  */
201 typedef void *decl_or_value;
202 
203 /* Return true if a decl_or_value DV is a DECL or NULL.  */
204 static inline bool
dv_is_decl_p(decl_or_value dv)205 dv_is_decl_p (decl_or_value dv)
206 {
207   return !dv || (int) TREE_CODE ((tree) dv) != (int) VALUE;
208 }
209 
210 /* Return true if a decl_or_value is a VALUE rtl.  */
211 static inline bool
dv_is_value_p(decl_or_value dv)212 dv_is_value_p (decl_or_value dv)
213 {
214   return dv && !dv_is_decl_p (dv);
215 }
216 
217 /* Return the decl in the decl_or_value.  */
218 static inline tree
dv_as_decl(decl_or_value dv)219 dv_as_decl (decl_or_value dv)
220 {
221   gcc_checking_assert (dv_is_decl_p (dv));
222   return (tree) dv;
223 }
224 
225 /* Return the value in the decl_or_value.  */
226 static inline rtx
dv_as_value(decl_or_value dv)227 dv_as_value (decl_or_value dv)
228 {
229   gcc_checking_assert (dv_is_value_p (dv));
230   return (rtx)dv;
231 }
232 
233 /* Return the opaque pointer in the decl_or_value.  */
234 static inline void *
dv_as_opaque(decl_or_value dv)235 dv_as_opaque (decl_or_value dv)
236 {
237   return dv;
238 }
239 
240 
241 /* Description of location of a part of a variable.  The content of a physical
242    register is described by a chain of these structures.
243    The chains are pretty short (usually 1 or 2 elements) and thus
244    chain is the best data structure.  */
245 struct attrs
246 {
247   /* Pointer to next member of the list.  */
248   attrs *next;
249 
250   /* The rtx of register.  */
251   rtx loc;
252 
253   /* The declaration corresponding to LOC.  */
254   decl_or_value dv;
255 
256   /* Offset from start of DECL.  */
257   HOST_WIDE_INT offset;
258 };
259 
260 /* Structure for chaining the locations.  */
261 struct location_chain
262 {
263   /* Next element in the chain.  */
264   location_chain *next;
265 
266   /* The location (REG, MEM or VALUE).  */
267   rtx loc;
268 
269   /* The "value" stored in this location.  */
270   rtx set_src;
271 
272   /* Initialized? */
273   enum var_init_status init;
274 };
275 
276 /* A vector of loc_exp_dep holds the active dependencies of a one-part
277    DV on VALUEs, i.e., the VALUEs expanded so as to form the current
278    location of DV.  Each entry is also part of VALUE' s linked-list of
279    backlinks back to DV.  */
280 struct loc_exp_dep
281 {
282   /* The dependent DV.  */
283   decl_or_value dv;
284   /* The dependency VALUE or DECL_DEBUG.  */
285   rtx value;
286   /* The next entry in VALUE's backlinks list.  */
287   struct loc_exp_dep *next;
288   /* A pointer to the pointer to this entry (head or prev's next) in
289      the doubly-linked list.  */
290   struct loc_exp_dep **pprev;
291 };
292 
293 
294 /* This data structure holds information about the depth of a variable
295    expansion.  */
296 struct expand_depth
297 {
298   /* This measures the complexity of the expanded expression.  It
299      grows by one for each level of expansion that adds more than one
300      operand.  */
301   int complexity;
302   /* This counts the number of ENTRY_VALUE expressions in an
303      expansion.  We want to minimize their use.  */
304   int entryvals;
305 };
306 
307 /* Type for dependencies actively used when expand FROM into cur_loc.  */
308 typedef vec<loc_exp_dep, va_heap, vl_embed> deps_vec;
309 
310 /* This data structure is allocated for one-part variables at the time
311    of emitting notes.  */
312 struct onepart_aux
313 {
314   /* Doubly-linked list of dependent DVs.  These are DVs whose cur_loc
315      computation used the expansion of this variable, and that ought
316      to be notified should this variable change.  If the DV's cur_loc
317      expanded to NULL, all components of the loc list are regarded as
318      active, so that any changes in them give us a chance to get a
319      location.  Otherwise, only components of the loc that expanded to
320      non-NULL are regarded as active dependencies.  */
321   loc_exp_dep *backlinks;
322   /* This holds the LOC that was expanded into cur_loc.  We need only
323      mark a one-part variable as changed if the FROM loc is removed,
324      or if it has no known location and a loc is added, or if it gets
325      a change notification from any of its active dependencies.  */
326   rtx from;
327   /* The depth of the cur_loc expression.  */
328   expand_depth depth;
329   /* Dependencies actively used when expand FROM into cur_loc.  */
330   deps_vec deps;
331 };
332 
333 /* Structure describing one part of variable.  */
334 struct variable_part
335 {
336   /* Chain of locations of the part.  */
337   location_chain *loc_chain;
338 
339   /* Location which was last emitted to location list.  */
340   rtx cur_loc;
341 
342   union variable_aux
343   {
344     /* The offset in the variable, if !var->onepart.  */
345     HOST_WIDE_INT offset;
346 
347     /* Pointer to auxiliary data, if var->onepart and emit_notes.  */
348     struct onepart_aux *onepaux;
349   } aux;
350 };
351 
352 /* Maximum number of location parts.  */
353 #define MAX_VAR_PARTS 16
354 
355 /* Enumeration type used to discriminate various types of one-part
356    variables.  */
357 enum onepart_enum
358 {
359   /* Not a one-part variable.  */
360   NOT_ONEPART = 0,
361   /* A one-part DECL that is not a DEBUG_EXPR_DECL.  */
362   ONEPART_VDECL = 1,
363   /* A DEBUG_EXPR_DECL.  */
364   ONEPART_DEXPR = 2,
365   /* A VALUE.  */
366   ONEPART_VALUE = 3
367 };
368 
369 /* Structure describing where the variable is located.  */
370 struct variable
371 {
372   /* The declaration of the variable, or an RTL value being handled
373      like a declaration.  */
374   decl_or_value dv;
375 
376   /* Reference count.  */
377   int refcount;
378 
379   /* Number of variable parts.  */
380   char n_var_parts;
381 
382   /* What type of DV this is, according to enum onepart_enum.  */
383   ENUM_BITFIELD (onepart_enum) onepart : CHAR_BIT;
384 
385   /* True if this variable_def struct is currently in the
386      changed_variables hash table.  */
387   bool in_changed_variables;
388 
389   /* The variable parts.  */
390   variable_part var_part[1];
391 };
392 
393 /* Pointer to the BB's information specific to variable tracking pass.  */
394 #define VTI(BB) ((variable_tracking_info *) (BB)->aux)
395 
396 /* Return MEM_OFFSET (MEM) as a HOST_WIDE_INT, or 0 if we can't.  */
397 
398 static inline HOST_WIDE_INT
int_mem_offset(const_rtx mem)399 int_mem_offset (const_rtx mem)
400 {
401   HOST_WIDE_INT offset;
402   if (MEM_OFFSET_KNOWN_P (mem) && MEM_OFFSET (mem).is_constant (&offset))
403     return offset;
404   return 0;
405 }
406 
407 #if CHECKING_P && (GCC_VERSION >= 2007)
408 
409 /* Access VAR's Ith part's offset, checking that it's not a one-part
410    variable.  */
411 #define VAR_PART_OFFSET(var, i) __extension__			\
412 (*({  variable *const __v = (var);				\
413       gcc_checking_assert (!__v->onepart);			\
414       &__v->var_part[(i)].aux.offset; }))
415 
416 /* Access VAR's one-part auxiliary data, checking that it is a
417    one-part variable.  */
418 #define VAR_LOC_1PAUX(var) __extension__			\
419 (*({  variable *const __v = (var);				\
420       gcc_checking_assert (__v->onepart);			\
421       &__v->var_part[0].aux.onepaux; }))
422 
423 #else
424 #define VAR_PART_OFFSET(var, i) ((var)->var_part[(i)].aux.offset)
425 #define VAR_LOC_1PAUX(var) ((var)->var_part[0].aux.onepaux)
426 #endif
427 
428 /* These are accessor macros for the one-part auxiliary data.  When
429    convenient for users, they're guarded by tests that the data was
430    allocated.  */
431 #define VAR_LOC_DEP_LST(var) (VAR_LOC_1PAUX (var)		  \
432 			      ? VAR_LOC_1PAUX (var)->backlinks	  \
433 			      : NULL)
434 #define VAR_LOC_DEP_LSTP(var) (VAR_LOC_1PAUX (var)		  \
435 			       ? &VAR_LOC_1PAUX (var)->backlinks  \
436 			       : NULL)
437 #define VAR_LOC_FROM(var) (VAR_LOC_1PAUX (var)->from)
438 #define VAR_LOC_DEPTH(var) (VAR_LOC_1PAUX (var)->depth)
439 #define VAR_LOC_DEP_VEC(var) var_loc_dep_vec (var)
440 
441 /* Implements the VAR_LOC_DEP_VEC above as a function to work around
442    a bogus -Wnonnull (PR c/95554). */
443 
444 static inline deps_vec*
var_loc_dep_vec(variable * var)445 var_loc_dep_vec (variable *var)
446 {
447   return VAR_LOC_1PAUX (var) ? &VAR_LOC_1PAUX (var)->deps : NULL;
448 }
449 
450 
451 typedef unsigned int dvuid;
452 
453 /* Return the uid of DV.  */
454 
455 static inline dvuid
dv_uid(decl_or_value dv)456 dv_uid (decl_or_value dv)
457 {
458   if (dv_is_value_p (dv))
459     return CSELIB_VAL_PTR (dv_as_value (dv))->uid;
460   else
461     return DECL_UID (dv_as_decl (dv));
462 }
463 
464 /* Compute the hash from the uid.  */
465 
466 static inline hashval_t
dv_uid2hash(dvuid uid)467 dv_uid2hash (dvuid uid)
468 {
469   return uid;
470 }
471 
472 /* The hash function for a mask table in a shared_htab chain.  */
473 
474 static inline hashval_t
dv_htab_hash(decl_or_value dv)475 dv_htab_hash (decl_or_value dv)
476 {
477   return dv_uid2hash (dv_uid (dv));
478 }
479 
480 static void variable_htab_free (void *);
481 
482 /* Variable hashtable helpers.  */
483 
484 struct variable_hasher : pointer_hash <variable>
485 {
486   typedef void *compare_type;
487   static inline hashval_t hash (const variable *);
488   static inline bool equal (const variable *, const void *);
489   static inline void remove (variable *);
490 };
491 
492 /* The hash function for variable_htab, computes the hash value
493    from the declaration of variable X.  */
494 
495 inline hashval_t
hash(const variable * v)496 variable_hasher::hash (const variable *v)
497 {
498   return dv_htab_hash (v->dv);
499 }
500 
501 /* Compare the declaration of variable X with declaration Y.  */
502 
503 inline bool
equal(const variable * v,const void * y)504 variable_hasher::equal (const variable *v, const void *y)
505 {
506   decl_or_value dv = CONST_CAST2 (decl_or_value, const void *, y);
507 
508   return (dv_as_opaque (v->dv) == dv_as_opaque (dv));
509 }
510 
511 /* Free the element of VARIABLE_HTAB (its type is struct variable_def).  */
512 
513 inline void
remove(variable * var)514 variable_hasher::remove (variable *var)
515 {
516   variable_htab_free (var);
517 }
518 
519 typedef hash_table<variable_hasher> variable_table_type;
520 typedef variable_table_type::iterator variable_iterator_type;
521 
522 /* Structure for passing some other parameters to function
523    emit_note_insn_var_location.  */
524 struct emit_note_data
525 {
526   /* The instruction which the note will be emitted before/after.  */
527   rtx_insn *insn;
528 
529   /* Where the note will be emitted (before/after insn)?  */
530   enum emit_note_where where;
531 
532   /* The variables and values active at this point.  */
533   variable_table_type *vars;
534 };
535 
536 /* Structure holding a refcounted hash table.  If refcount > 1,
537    it must be first unshared before modified.  */
538 struct shared_hash
539 {
540   /* Reference count.  */
541   int refcount;
542 
543   /* Actual hash table.  */
544   variable_table_type *htab;
545 };
546 
547 /* Structure holding the IN or OUT set for a basic block.  */
548 struct dataflow_set
549 {
550   /* Adjustment of stack offset.  */
551   HOST_WIDE_INT stack_adjust;
552 
553   /* Attributes for registers (lists of attrs).  */
554   attrs *regs[FIRST_PSEUDO_REGISTER];
555 
556   /* Variable locations.  */
557   shared_hash *vars;
558 
559   /* Vars that is being traversed.  */
560   shared_hash *traversed_vars;
561 };
562 
563 /* The structure (one for each basic block) containing the information
564    needed for variable tracking.  */
565 struct variable_tracking_info
566 {
567   /* The vector of micro operations.  */
568   vec<micro_operation> mos;
569 
570   /* The IN and OUT set for dataflow analysis.  */
571   dataflow_set in;
572   dataflow_set out;
573 
574   /* The permanent-in dataflow set for this block.  This is used to
575      hold values for which we had to compute entry values.  ??? This
576      should probably be dynamically allocated, to avoid using more
577      memory in non-debug builds.  */
578   dataflow_set *permp;
579 
580   /* Has the block been visited in DFS?  */
581   bool visited;
582 
583   /* Has the block been flooded in VTA?  */
584   bool flooded;
585 
586 };
587 
588 /* Alloc pool for struct attrs_def.  */
589 object_allocator<attrs> attrs_pool ("attrs pool");
590 
591 /* Alloc pool for struct variable_def with MAX_VAR_PARTS entries.  */
592 
593 static pool_allocator var_pool
594   ("variable_def pool", sizeof (variable) +
595    (MAX_VAR_PARTS - 1) * sizeof (((variable *)NULL)->var_part[0]));
596 
597 /* Alloc pool for struct variable_def with a single var_part entry.  */
598 static pool_allocator valvar_pool
599   ("small variable_def pool", sizeof (variable));
600 
601 /* Alloc pool for struct location_chain.  */
602 static object_allocator<location_chain> location_chain_pool
603   ("location_chain pool");
604 
605 /* Alloc pool for struct shared_hash.  */
606 static object_allocator<shared_hash> shared_hash_pool ("shared_hash pool");
607 
608 /* Alloc pool for struct loc_exp_dep_s for NOT_ONEPART variables.  */
609 object_allocator<loc_exp_dep> loc_exp_dep_pool ("loc_exp_dep pool");
610 
611 /* Changed variables, notes will be emitted for them.  */
612 static variable_table_type *changed_variables;
613 
614 /* Shall notes be emitted?  */
615 static bool emit_notes;
616 
617 /* Values whose dynamic location lists have gone empty, but whose
618    cselib location lists are still usable.  Use this to hold the
619    current location, the backlinks, etc, during emit_notes.  */
620 static variable_table_type *dropped_values;
621 
622 /* Empty shared hashtable.  */
623 static shared_hash *empty_shared_hash;
624 
625 /* Scratch register bitmap used by cselib_expand_value_rtx.  */
626 static bitmap scratch_regs = NULL;
627 
628 #ifdef HAVE_window_save
629 struct GTY(()) parm_reg {
630   rtx outgoing;
631   rtx incoming;
632 };
633 
634 
635 /* Vector of windowed parameter registers, if any.  */
636 static vec<parm_reg, va_gc> *windowed_parm_regs = NULL;
637 #endif
638 
639 /* Variable used to tell whether cselib_process_insn called our hook.  */
640 static bool cselib_hook_called;
641 
642 /* Local function prototypes.  */
643 static void stack_adjust_offset_pre_post (rtx, HOST_WIDE_INT *,
644 					  HOST_WIDE_INT *);
645 static void insn_stack_adjust_offset_pre_post (rtx_insn *, HOST_WIDE_INT *,
646 					       HOST_WIDE_INT *);
647 static bool vt_stack_adjustments (void);
648 
649 static void init_attrs_list_set (attrs **);
650 static void attrs_list_clear (attrs **);
651 static attrs *attrs_list_member (attrs *, decl_or_value, HOST_WIDE_INT);
652 static void attrs_list_insert (attrs **, decl_or_value, HOST_WIDE_INT, rtx);
653 static void attrs_list_copy (attrs **, attrs *);
654 static void attrs_list_union (attrs **, attrs *);
655 
656 static variable **unshare_variable (dataflow_set *set, variable **slot,
657 					variable *var, enum var_init_status);
658 static void vars_copy (variable_table_type *, variable_table_type *);
659 static tree var_debug_decl (tree);
660 static void var_reg_set (dataflow_set *, rtx, enum var_init_status, rtx);
661 static void var_reg_delete_and_set (dataflow_set *, rtx, bool,
662 				    enum var_init_status, rtx);
663 static void var_reg_delete (dataflow_set *, rtx, bool);
664 static void var_regno_delete (dataflow_set *, int);
665 static void var_mem_set (dataflow_set *, rtx, enum var_init_status, rtx);
666 static void var_mem_delete_and_set (dataflow_set *, rtx, bool,
667 				    enum var_init_status, rtx);
668 static void var_mem_delete (dataflow_set *, rtx, bool);
669 
670 static void dataflow_set_init (dataflow_set *);
671 static void dataflow_set_clear (dataflow_set *);
672 static void dataflow_set_copy (dataflow_set *, dataflow_set *);
673 static int variable_union_info_cmp_pos (const void *, const void *);
674 static void dataflow_set_union (dataflow_set *, dataflow_set *);
675 static location_chain *find_loc_in_1pdv (rtx, variable *,
676 					 variable_table_type *);
677 static bool canon_value_cmp (rtx, rtx);
678 static int loc_cmp (rtx, rtx);
679 static bool variable_part_different_p (variable_part *, variable_part *);
680 static bool onepart_variable_different_p (variable *, variable *);
681 static bool variable_different_p (variable *, variable *);
682 static bool dataflow_set_different (dataflow_set *, dataflow_set *);
683 static void dataflow_set_destroy (dataflow_set *);
684 
685 static bool track_expr_p (tree, bool);
686 static void add_uses_1 (rtx *, void *);
687 static void add_stores (rtx, const_rtx, void *);
688 static bool compute_bb_dataflow (basic_block);
689 static bool vt_find_locations (void);
690 
691 static void dump_attrs_list (attrs *);
692 static void dump_var (variable *);
693 static void dump_vars (variable_table_type *);
694 static void dump_dataflow_set (dataflow_set *);
695 static void dump_dataflow_sets (void);
696 
697 static void set_dv_changed (decl_or_value, bool);
698 static void variable_was_changed (variable *, dataflow_set *);
699 static variable **set_slot_part (dataflow_set *, rtx, variable **,
700 				 decl_or_value, HOST_WIDE_INT,
701 				 enum var_init_status, rtx);
702 static void set_variable_part (dataflow_set *, rtx,
703 			       decl_or_value, HOST_WIDE_INT,
704 			       enum var_init_status, rtx, enum insert_option);
705 static variable **clobber_slot_part (dataflow_set *, rtx,
706 				     variable **, HOST_WIDE_INT, rtx);
707 static void clobber_variable_part (dataflow_set *, rtx,
708 				   decl_or_value, HOST_WIDE_INT, rtx);
709 static variable **delete_slot_part (dataflow_set *, rtx, variable **,
710 				    HOST_WIDE_INT);
711 static void delete_variable_part (dataflow_set *, rtx,
712 				  decl_or_value, HOST_WIDE_INT);
713 static void emit_notes_in_bb (basic_block, dataflow_set *);
714 static void vt_emit_notes (void);
715 
716 static void vt_add_function_parameters (void);
717 static bool vt_initialize (void);
718 static void vt_finalize (void);
719 
720 /* Callback for stack_adjust_offset_pre_post, called via for_each_inc_dec.  */
721 
722 static int
stack_adjust_offset_pre_post_cb(rtx,rtx op,rtx dest,rtx src,rtx srcoff,void * arg)723 stack_adjust_offset_pre_post_cb (rtx, rtx op, rtx dest, rtx src, rtx srcoff,
724 				 void *arg)
725 {
726   if (dest != stack_pointer_rtx)
727     return 0;
728 
729   switch (GET_CODE (op))
730     {
731     case PRE_INC:
732     case PRE_DEC:
733       ((HOST_WIDE_INT *)arg)[0] -= INTVAL (srcoff);
734       return 0;
735     case POST_INC:
736     case POST_DEC:
737       ((HOST_WIDE_INT *)arg)[1] -= INTVAL (srcoff);
738       return 0;
739     case PRE_MODIFY:
740     case POST_MODIFY:
741       /* We handle only adjustments by constant amount.  */
742       gcc_assert (GET_CODE (src) == PLUS
743 		  && CONST_INT_P (XEXP (src, 1))
744 		  && XEXP (src, 0) == stack_pointer_rtx);
745       ((HOST_WIDE_INT *)arg)[GET_CODE (op) == POST_MODIFY]
746 	-= INTVAL (XEXP (src, 1));
747       return 0;
748     default:
749       gcc_unreachable ();
750     }
751 }
752 
753 /* Given a SET, calculate the amount of stack adjustment it contains
754    PRE- and POST-modifying stack pointer.
755    This function is similar to stack_adjust_offset.  */
756 
757 static void
stack_adjust_offset_pre_post(rtx pattern,HOST_WIDE_INT * pre,HOST_WIDE_INT * post)758 stack_adjust_offset_pre_post (rtx pattern, HOST_WIDE_INT *pre,
759 			      HOST_WIDE_INT *post)
760 {
761   rtx src = SET_SRC (pattern);
762   rtx dest = SET_DEST (pattern);
763   enum rtx_code code;
764 
765   if (dest == stack_pointer_rtx)
766     {
767       /* (set (reg sp) (plus (reg sp) (const_int))) */
768       code = GET_CODE (src);
769       if (! (code == PLUS || code == MINUS)
770 	  || XEXP (src, 0) != stack_pointer_rtx
771 	  || !CONST_INT_P (XEXP (src, 1)))
772 	return;
773 
774       if (code == MINUS)
775 	*post += INTVAL (XEXP (src, 1));
776       else
777 	*post -= INTVAL (XEXP (src, 1));
778       return;
779     }
780   HOST_WIDE_INT res[2] = { 0, 0 };
781   for_each_inc_dec (pattern, stack_adjust_offset_pre_post_cb, res);
782   *pre += res[0];
783   *post += res[1];
784 }
785 
786 /* Given an INSN, calculate the amount of stack adjustment it contains
787    PRE- and POST-modifying stack pointer.  */
788 
789 static void
insn_stack_adjust_offset_pre_post(rtx_insn * insn,HOST_WIDE_INT * pre,HOST_WIDE_INT * post)790 insn_stack_adjust_offset_pre_post (rtx_insn *insn, HOST_WIDE_INT *pre,
791 				   HOST_WIDE_INT *post)
792 {
793   rtx pattern;
794 
795   *pre = 0;
796   *post = 0;
797 
798   pattern = PATTERN (insn);
799   if (RTX_FRAME_RELATED_P (insn))
800     {
801       rtx expr = find_reg_note (insn, REG_FRAME_RELATED_EXPR, NULL_RTX);
802       if (expr)
803 	pattern = XEXP (expr, 0);
804     }
805 
806   if (GET_CODE (pattern) == SET)
807     stack_adjust_offset_pre_post (pattern, pre, post);
808   else if (GET_CODE (pattern) == PARALLEL
809 	   || GET_CODE (pattern) == SEQUENCE)
810     {
811       int i;
812 
813       /* There may be stack adjustments inside compound insns.  Search
814 	 for them.  */
815       for ( i = XVECLEN (pattern, 0) - 1; i >= 0; i--)
816 	if (GET_CODE (XVECEXP (pattern, 0, i)) == SET)
817 	  stack_adjust_offset_pre_post (XVECEXP (pattern, 0, i), pre, post);
818     }
819 }
820 
821 /* Compute stack adjustments for all blocks by traversing DFS tree.
822    Return true when the adjustments on all incoming edges are consistent.
823    Heavily borrowed from pre_and_rev_post_order_compute.  */
824 
825 static bool
vt_stack_adjustments(void)826 vt_stack_adjustments (void)
827 {
828   edge_iterator *stack;
829   int sp;
830 
831   /* Initialize entry block.  */
832   VTI (ENTRY_BLOCK_PTR_FOR_FN (cfun))->visited = true;
833   VTI (ENTRY_BLOCK_PTR_FOR_FN (cfun))->in.stack_adjust
834     = INCOMING_FRAME_SP_OFFSET;
835   VTI (ENTRY_BLOCK_PTR_FOR_FN (cfun))->out.stack_adjust
836     = INCOMING_FRAME_SP_OFFSET;
837 
838   /* Allocate stack for back-tracking up CFG.  */
839   stack = XNEWVEC (edge_iterator, n_basic_blocks_for_fn (cfun) + 1);
840   sp = 0;
841 
842   /* Push the first edge on to the stack.  */
843   stack[sp++] = ei_start (ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs);
844 
845   while (sp)
846     {
847       edge_iterator ei;
848       basic_block src;
849       basic_block dest;
850 
851       /* Look at the edge on the top of the stack.  */
852       ei = stack[sp - 1];
853       src = ei_edge (ei)->src;
854       dest = ei_edge (ei)->dest;
855 
856       /* Check if the edge destination has been visited yet.  */
857       if (!VTI (dest)->visited)
858 	{
859 	  rtx_insn *insn;
860 	  HOST_WIDE_INT pre, post, offset;
861 	  VTI (dest)->visited = true;
862 	  VTI (dest)->in.stack_adjust = offset = VTI (src)->out.stack_adjust;
863 
864 	  if (dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
865 	    for (insn = BB_HEAD (dest);
866 		 insn != NEXT_INSN (BB_END (dest));
867 		 insn = NEXT_INSN (insn))
868 	      if (INSN_P (insn))
869 		{
870 		  insn_stack_adjust_offset_pre_post (insn, &pre, &post);
871 		  offset += pre + post;
872 		}
873 
874 	  VTI (dest)->out.stack_adjust = offset;
875 
876 	  if (EDGE_COUNT (dest->succs) > 0)
877 	    /* Since the DEST node has been visited for the first
878 	       time, check its successors.  */
879 	    stack[sp++] = ei_start (dest->succs);
880 	}
881       else
882 	{
883 	  /* We can end up with different stack adjustments for the exit block
884 	     of a shrink-wrapped function if stack_adjust_offset_pre_post
885 	     doesn't understand the rtx pattern used to restore the stack
886 	     pointer in the epilogue.  For example, on s390(x), the stack
887 	     pointer is often restored via a load-multiple instruction
888 	     and so no stack_adjust offset is recorded for it.  This means
889 	     that the stack offset at the end of the epilogue block is the
890 	     same as the offset before the epilogue, whereas other paths
891 	     to the exit block will have the correct stack_adjust.
892 
893 	     It is safe to ignore these differences because (a) we never
894 	     use the stack_adjust for the exit block in this pass and
895 	     (b) dwarf2cfi checks whether the CFA notes in a shrink-wrapped
896 	     function are correct.
897 
898 	     We must check whether the adjustments on other edges are
899 	     the same though.  */
900 	  if (dest != EXIT_BLOCK_PTR_FOR_FN (cfun)
901 	      && VTI (dest)->in.stack_adjust != VTI (src)->out.stack_adjust)
902 	    {
903 	      free (stack);
904 	      return false;
905 	    }
906 
907 	  if (! ei_one_before_end_p (ei))
908 	    /* Go to the next edge.  */
909 	    ei_next (&stack[sp - 1]);
910 	  else
911 	    /* Return to previous level if there are no more edges.  */
912 	    sp--;
913 	}
914     }
915 
916   free (stack);
917   return true;
918 }
919 
920 /* arg_pointer_rtx resp. frame_pointer_rtx if stack_pointer_rtx or
921    hard_frame_pointer_rtx is being mapped to it and offset for it.  */
922 static rtx cfa_base_rtx;
923 static HOST_WIDE_INT cfa_base_offset;
924 
925 /* Compute a CFA-based value for an ADJUSTMENT made to stack_pointer_rtx
926    or hard_frame_pointer_rtx.  */
927 
928 static inline rtx
compute_cfa_pointer(poly_int64 adjustment)929 compute_cfa_pointer (poly_int64 adjustment)
930 {
931   return plus_constant (Pmode, cfa_base_rtx, adjustment + cfa_base_offset);
932 }
933 
934 /* Adjustment for hard_frame_pointer_rtx to cfa base reg,
935    or -1 if the replacement shouldn't be done.  */
936 static poly_int64 hard_frame_pointer_adjustment = -1;
937 
938 /* Data for adjust_mems callback.  */
939 
940 class adjust_mem_data
941 {
942 public:
943   bool store;
944   machine_mode mem_mode;
945   HOST_WIDE_INT stack_adjust;
946   auto_vec<rtx> side_effects;
947 };
948 
949 /* Helper for adjust_mems.  Return true if X is suitable for
950    transformation of wider mode arithmetics to narrower mode.  */
951 
952 static bool
use_narrower_mode_test(rtx x,const_rtx subreg)953 use_narrower_mode_test (rtx x, const_rtx subreg)
954 {
955   subrtx_var_iterator::array_type array;
956   FOR_EACH_SUBRTX_VAR (iter, array, x, NONCONST)
957     {
958       rtx x = *iter;
959       if (CONSTANT_P (x))
960 	iter.skip_subrtxes ();
961       else
962 	switch (GET_CODE (x))
963 	  {
964 	  case REG:
965 	    if (cselib_lookup (x, GET_MODE (SUBREG_REG (subreg)), 0, VOIDmode))
966 	      return false;
967 	    if (!validate_subreg (GET_MODE (subreg), GET_MODE (x), x,
968 				  subreg_lowpart_offset (GET_MODE (subreg),
969 							 GET_MODE (x))))
970 	      return false;
971 	    break;
972 	  case PLUS:
973 	  case MINUS:
974 	  case MULT:
975 	    break;
976 	  case ASHIFT:
977 	    if (GET_MODE (XEXP (x, 1)) != VOIDmode)
978 	      {
979 		enum machine_mode mode = GET_MODE (subreg);
980 		rtx op1 = XEXP (x, 1);
981 		enum machine_mode op1_mode = GET_MODE (op1);
982 		if (GET_MODE_PRECISION (as_a <scalar_int_mode> (mode))
983 		    < GET_MODE_PRECISION (as_a <scalar_int_mode> (op1_mode)))
984 		  {
985 		    poly_uint64 byte = subreg_lowpart_offset (mode, op1_mode);
986 		    if (GET_CODE (op1) == SUBREG || GET_CODE (op1) == CONCAT)
987 		      {
988 			if (!simplify_subreg (mode, op1, op1_mode, byte))
989 			  return false;
990 		      }
991 		    else if (!validate_subreg (mode, op1_mode, op1, byte))
992 		      return false;
993 		  }
994 	      }
995 	    iter.substitute (XEXP (x, 0));
996 	    break;
997 	  default:
998 	    return false;
999 	  }
1000     }
1001   return true;
1002 }
1003 
1004 /* Transform X into narrower mode MODE from wider mode WMODE.  */
1005 
1006 static rtx
use_narrower_mode(rtx x,scalar_int_mode mode,scalar_int_mode wmode)1007 use_narrower_mode (rtx x, scalar_int_mode mode, scalar_int_mode wmode)
1008 {
1009   rtx op0, op1;
1010   if (CONSTANT_P (x))
1011     return lowpart_subreg (mode, x, wmode);
1012   switch (GET_CODE (x))
1013     {
1014     case REG:
1015       return lowpart_subreg (mode, x, wmode);
1016     case PLUS:
1017     case MINUS:
1018     case MULT:
1019       op0 = use_narrower_mode (XEXP (x, 0), mode, wmode);
1020       op1 = use_narrower_mode (XEXP (x, 1), mode, wmode);
1021       return simplify_gen_binary (GET_CODE (x), mode, op0, op1);
1022     case ASHIFT:
1023       op0 = use_narrower_mode (XEXP (x, 0), mode, wmode);
1024       op1 = XEXP (x, 1);
1025       /* Ensure shift amount is not wider than mode.  */
1026       if (GET_MODE (op1) == VOIDmode)
1027 	op1 = lowpart_subreg (mode, op1, wmode);
1028       else if (GET_MODE_PRECISION (mode)
1029 	       < GET_MODE_PRECISION (as_a <scalar_int_mode> (GET_MODE (op1))))
1030 	op1 = lowpart_subreg (mode, op1, GET_MODE (op1));
1031       return simplify_gen_binary (ASHIFT, mode, op0, op1);
1032     default:
1033       gcc_unreachable ();
1034     }
1035 }
1036 
1037 /* Helper function for adjusting used MEMs.  */
1038 
1039 static rtx
adjust_mems(rtx loc,const_rtx old_rtx,void * data)1040 adjust_mems (rtx loc, const_rtx old_rtx, void *data)
1041 {
1042   class adjust_mem_data *amd = (class adjust_mem_data *) data;
1043   rtx mem, addr = loc, tem;
1044   machine_mode mem_mode_save;
1045   bool store_save;
1046   scalar_int_mode tem_mode, tem_subreg_mode;
1047   poly_int64 size;
1048   switch (GET_CODE (loc))
1049     {
1050     case REG:
1051       /* Don't do any sp or fp replacements outside of MEM addresses
1052          on the LHS.  */
1053       if (amd->mem_mode == VOIDmode && amd->store)
1054 	return loc;
1055       if (loc == stack_pointer_rtx
1056 	  && !frame_pointer_needed
1057 	  && cfa_base_rtx)
1058 	return compute_cfa_pointer (amd->stack_adjust);
1059       else if (loc == hard_frame_pointer_rtx
1060 	       && frame_pointer_needed
1061 	       && maybe_ne (hard_frame_pointer_adjustment, -1)
1062 	       && cfa_base_rtx)
1063 	return compute_cfa_pointer (hard_frame_pointer_adjustment);
1064       gcc_checking_assert (loc != virtual_incoming_args_rtx);
1065       return loc;
1066     case MEM:
1067       mem = loc;
1068       if (!amd->store)
1069 	{
1070 	  mem = targetm.delegitimize_address (mem);
1071 	  if (mem != loc && !MEM_P (mem))
1072 	    return simplify_replace_fn_rtx (mem, old_rtx, adjust_mems, data);
1073 	}
1074 
1075       addr = XEXP (mem, 0);
1076       mem_mode_save = amd->mem_mode;
1077       amd->mem_mode = GET_MODE (mem);
1078       store_save = amd->store;
1079       amd->store = false;
1080       addr = simplify_replace_fn_rtx (addr, old_rtx, adjust_mems, data);
1081       amd->store = store_save;
1082       amd->mem_mode = mem_mode_save;
1083       if (mem == loc)
1084 	addr = targetm.delegitimize_address (addr);
1085       if (addr != XEXP (mem, 0))
1086 	mem = replace_equiv_address_nv (mem, addr);
1087       if (!amd->store)
1088 	mem = avoid_constant_pool_reference (mem);
1089       return mem;
1090     case PRE_INC:
1091     case PRE_DEC:
1092       size = GET_MODE_SIZE (amd->mem_mode);
1093       addr = plus_constant (GET_MODE (loc), XEXP (loc, 0),
1094 			    GET_CODE (loc) == PRE_INC ? size : -size);
1095       /* FALLTHRU */
1096     case POST_INC:
1097     case POST_DEC:
1098       if (addr == loc)
1099 	addr = XEXP (loc, 0);
1100       gcc_assert (amd->mem_mode != VOIDmode && amd->mem_mode != BLKmode);
1101       addr = simplify_replace_fn_rtx (addr, old_rtx, adjust_mems, data);
1102       size = GET_MODE_SIZE (amd->mem_mode);
1103       tem = plus_constant (GET_MODE (loc), XEXP (loc, 0),
1104 			   (GET_CODE (loc) == PRE_INC
1105 			    || GET_CODE (loc) == POST_INC) ? size : -size);
1106       store_save = amd->store;
1107       amd->store = false;
1108       tem = simplify_replace_fn_rtx (tem, old_rtx, adjust_mems, data);
1109       amd->store = store_save;
1110       amd->side_effects.safe_push (gen_rtx_SET (XEXP (loc, 0), tem));
1111       return addr;
1112     case PRE_MODIFY:
1113       addr = XEXP (loc, 1);
1114       /* FALLTHRU */
1115     case POST_MODIFY:
1116       if (addr == loc)
1117 	addr = XEXP (loc, 0);
1118       gcc_assert (amd->mem_mode != VOIDmode);
1119       addr = simplify_replace_fn_rtx (addr, old_rtx, adjust_mems, data);
1120       store_save = amd->store;
1121       amd->store = false;
1122       tem = simplify_replace_fn_rtx (XEXP (loc, 1), old_rtx,
1123 				     adjust_mems, data);
1124       amd->store = store_save;
1125       amd->side_effects.safe_push (gen_rtx_SET (XEXP (loc, 0), tem));
1126       return addr;
1127     case SUBREG:
1128       /* First try without delegitimization of whole MEMs and
1129 	 avoid_constant_pool_reference, which is more likely to succeed.  */
1130       store_save = amd->store;
1131       amd->store = true;
1132       addr = simplify_replace_fn_rtx (SUBREG_REG (loc), old_rtx, adjust_mems,
1133 				      data);
1134       amd->store = store_save;
1135       mem = simplify_replace_fn_rtx (addr, old_rtx, adjust_mems, data);
1136       if (mem == SUBREG_REG (loc))
1137 	{
1138 	  tem = loc;
1139 	  goto finish_subreg;
1140 	}
1141       tem = simplify_gen_subreg (GET_MODE (loc), mem,
1142 				 GET_MODE (SUBREG_REG (loc)),
1143 				 SUBREG_BYTE (loc));
1144       if (tem)
1145 	goto finish_subreg;
1146       tem = simplify_gen_subreg (GET_MODE (loc), addr,
1147 				 GET_MODE (SUBREG_REG (loc)),
1148 				 SUBREG_BYTE (loc));
1149       if (tem == NULL_RTX)
1150 	tem = gen_rtx_raw_SUBREG (GET_MODE (loc), addr, SUBREG_BYTE (loc));
1151     finish_subreg:
1152       if (MAY_HAVE_DEBUG_BIND_INSNS
1153 	  && GET_CODE (tem) == SUBREG
1154 	  && (GET_CODE (SUBREG_REG (tem)) == PLUS
1155 	      || GET_CODE (SUBREG_REG (tem)) == MINUS
1156 	      || GET_CODE (SUBREG_REG (tem)) == MULT
1157 	      || GET_CODE (SUBREG_REG (tem)) == ASHIFT)
1158 	  && is_a <scalar_int_mode> (GET_MODE (tem), &tem_mode)
1159 	  && is_a <scalar_int_mode> (GET_MODE (SUBREG_REG (tem)),
1160 				     &tem_subreg_mode)
1161 	  && (GET_MODE_PRECISION (tem_mode)
1162 	      < GET_MODE_PRECISION (tem_subreg_mode))
1163 	  && subreg_lowpart_p (tem)
1164 	  && use_narrower_mode_test (SUBREG_REG (tem), tem))
1165 	return use_narrower_mode (SUBREG_REG (tem), tem_mode, tem_subreg_mode);
1166       return tem;
1167     case ASM_OPERANDS:
1168       /* Don't do any replacements in second and following
1169 	 ASM_OPERANDS of inline-asm with multiple sets.
1170 	 ASM_OPERANDS_INPUT_VEC, ASM_OPERANDS_INPUT_CONSTRAINT_VEC
1171 	 and ASM_OPERANDS_LABEL_VEC need to be equal between
1172 	 all the ASM_OPERANDs in the insn and adjust_insn will
1173 	 fix this up.  */
1174       if (ASM_OPERANDS_OUTPUT_IDX (loc) != 0)
1175 	return loc;
1176       break;
1177     default:
1178       break;
1179     }
1180   return NULL_RTX;
1181 }
1182 
1183 /* Helper function for replacement of uses.  */
1184 
1185 static void
adjust_mem_uses(rtx * x,void * data)1186 adjust_mem_uses (rtx *x, void *data)
1187 {
1188   rtx new_x = simplify_replace_fn_rtx (*x, NULL_RTX, adjust_mems, data);
1189   if (new_x != *x)
1190     validate_change (NULL_RTX, x, new_x, true);
1191 }
1192 
1193 /* Helper function for replacement of stores.  */
1194 
1195 static void
adjust_mem_stores(rtx loc,const_rtx expr,void * data)1196 adjust_mem_stores (rtx loc, const_rtx expr, void *data)
1197 {
1198   if (MEM_P (loc))
1199     {
1200       rtx new_dest = simplify_replace_fn_rtx (SET_DEST (expr), NULL_RTX,
1201 					      adjust_mems, data);
1202       if (new_dest != SET_DEST (expr))
1203 	{
1204 	  rtx xexpr = CONST_CAST_RTX (expr);
1205 	  validate_change (NULL_RTX, &SET_DEST (xexpr), new_dest, true);
1206 	}
1207     }
1208 }
1209 
1210 /* Simplify INSN.  Remove all {PRE,POST}_{INC,DEC,MODIFY} rtxes,
1211    replace them with their value in the insn and add the side-effects
1212    as other sets to the insn.  */
1213 
1214 static void
adjust_insn(basic_block bb,rtx_insn * insn)1215 adjust_insn (basic_block bb, rtx_insn *insn)
1216 {
1217   rtx set;
1218 
1219 #ifdef HAVE_window_save
1220   /* If the target machine has an explicit window save instruction, the
1221      transformation OUTGOING_REGNO -> INCOMING_REGNO is done there.  */
1222   if (RTX_FRAME_RELATED_P (insn)
1223       && find_reg_note (insn, REG_CFA_WINDOW_SAVE, NULL_RTX))
1224     {
1225       unsigned int i, nregs = vec_safe_length (windowed_parm_regs);
1226       rtx rtl = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (nregs * 2));
1227       parm_reg *p;
1228 
1229       FOR_EACH_VEC_SAFE_ELT (windowed_parm_regs, i, p)
1230 	{
1231 	  XVECEXP (rtl, 0, i * 2)
1232 	    = gen_rtx_SET (p->incoming, p->outgoing);
1233 	  /* Do not clobber the attached DECL, but only the REG.  */
1234 	  XVECEXP (rtl, 0, i * 2 + 1)
1235 	    = gen_rtx_CLOBBER (GET_MODE (p->outgoing),
1236 			       gen_raw_REG (GET_MODE (p->outgoing),
1237 					    REGNO (p->outgoing)));
1238 	}
1239 
1240       validate_change (NULL_RTX, &PATTERN (insn), rtl, true);
1241       return;
1242     }
1243 #endif
1244 
1245   adjust_mem_data amd;
1246   amd.mem_mode = VOIDmode;
1247   amd.stack_adjust = -VTI (bb)->out.stack_adjust;
1248 
1249   amd.store = true;
1250   note_stores (insn, adjust_mem_stores, &amd);
1251 
1252   amd.store = false;
1253   if (GET_CODE (PATTERN (insn)) == PARALLEL
1254       && asm_noperands (PATTERN (insn)) > 0
1255       && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
1256     {
1257       rtx body, set0;
1258       int i;
1259 
1260       /* inline-asm with multiple sets is tiny bit more complicated,
1261 	 because the 3 vectors in ASM_OPERANDS need to be shared between
1262 	 all ASM_OPERANDS in the instruction.  adjust_mems will
1263 	 not touch ASM_OPERANDS other than the first one, asm_noperands
1264 	 test above needs to be called before that (otherwise it would fail)
1265 	 and afterwards this code fixes it up.  */
1266       note_uses (&PATTERN (insn), adjust_mem_uses, &amd);
1267       body = PATTERN (insn);
1268       set0 = XVECEXP (body, 0, 0);
1269       gcc_checking_assert (GET_CODE (set0) == SET
1270 			   && GET_CODE (SET_SRC (set0)) == ASM_OPERANDS
1271 			   && ASM_OPERANDS_OUTPUT_IDX (SET_SRC (set0)) == 0);
1272       for (i = 1; i < XVECLEN (body, 0); i++)
1273 	if (GET_CODE (XVECEXP (body, 0, i)) != SET)
1274 	  break;
1275 	else
1276 	  {
1277 	    set = XVECEXP (body, 0, i);
1278 	    gcc_checking_assert (GET_CODE (SET_SRC (set)) == ASM_OPERANDS
1279 				 && ASM_OPERANDS_OUTPUT_IDX (SET_SRC (set))
1280 				    == i);
1281 	    if (ASM_OPERANDS_INPUT_VEC (SET_SRC (set))
1282 		!= ASM_OPERANDS_INPUT_VEC (SET_SRC (set0))
1283 		|| ASM_OPERANDS_INPUT_CONSTRAINT_VEC (SET_SRC (set))
1284 		   != ASM_OPERANDS_INPUT_CONSTRAINT_VEC (SET_SRC (set0))
1285 		|| ASM_OPERANDS_LABEL_VEC (SET_SRC (set))
1286 		   != ASM_OPERANDS_LABEL_VEC (SET_SRC (set0)))
1287 	      {
1288 		rtx newsrc = shallow_copy_rtx (SET_SRC (set));
1289 		ASM_OPERANDS_INPUT_VEC (newsrc)
1290 		  = ASM_OPERANDS_INPUT_VEC (SET_SRC (set0));
1291 		ASM_OPERANDS_INPUT_CONSTRAINT_VEC (newsrc)
1292 		  = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (SET_SRC (set0));
1293 		ASM_OPERANDS_LABEL_VEC (newsrc)
1294 		  = ASM_OPERANDS_LABEL_VEC (SET_SRC (set0));
1295 		validate_change (NULL_RTX, &SET_SRC (set), newsrc, true);
1296 	      }
1297 	  }
1298     }
1299   else
1300     note_uses (&PATTERN (insn), adjust_mem_uses, &amd);
1301 
1302   /* For read-only MEMs containing some constant, prefer those
1303      constants.  */
1304   set = single_set (insn);
1305   if (set && MEM_P (SET_SRC (set)) && MEM_READONLY_P (SET_SRC (set)))
1306     {
1307       rtx note = find_reg_equal_equiv_note (insn);
1308 
1309       if (note && CONSTANT_P (XEXP (note, 0)))
1310 	validate_change (NULL_RTX, &SET_SRC (set), XEXP (note, 0), true);
1311     }
1312 
1313   if (!amd.side_effects.is_empty ())
1314     {
1315       rtx *pat, new_pat;
1316       int i, oldn;
1317 
1318       pat = &PATTERN (insn);
1319       if (GET_CODE (*pat) == COND_EXEC)
1320 	pat = &COND_EXEC_CODE (*pat);
1321       if (GET_CODE (*pat) == PARALLEL)
1322 	oldn = XVECLEN (*pat, 0);
1323       else
1324 	oldn = 1;
1325       unsigned int newn = amd.side_effects.length ();
1326       new_pat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (oldn + newn));
1327       if (GET_CODE (*pat) == PARALLEL)
1328 	for (i = 0; i < oldn; i++)
1329 	  XVECEXP (new_pat, 0, i) = XVECEXP (*pat, 0, i);
1330       else
1331 	XVECEXP (new_pat, 0, 0) = *pat;
1332 
1333       rtx effect;
1334       unsigned int j;
1335       FOR_EACH_VEC_ELT_REVERSE (amd.side_effects, j, effect)
1336 	XVECEXP (new_pat, 0, j + oldn) = effect;
1337       validate_change (NULL_RTX, pat, new_pat, true);
1338     }
1339 }
1340 
1341 /* Return the DEBUG_EXPR of a DEBUG_EXPR_DECL or the VALUE in DV.  */
1342 static inline rtx
dv_as_rtx(decl_or_value dv)1343 dv_as_rtx (decl_or_value dv)
1344 {
1345   tree decl;
1346 
1347   if (dv_is_value_p (dv))
1348     return dv_as_value (dv);
1349 
1350   decl = dv_as_decl (dv);
1351 
1352   gcc_checking_assert (TREE_CODE (decl) == DEBUG_EXPR_DECL);
1353   return DECL_RTL_KNOWN_SET (decl);
1354 }
1355 
1356 /* Return nonzero if a decl_or_value must not have more than one
1357    variable part.  The returned value discriminates among various
1358    kinds of one-part DVs ccording to enum onepart_enum.  */
1359 static inline onepart_enum
dv_onepart_p(decl_or_value dv)1360 dv_onepart_p (decl_or_value dv)
1361 {
1362   tree decl;
1363 
1364   if (!MAY_HAVE_DEBUG_BIND_INSNS)
1365     return NOT_ONEPART;
1366 
1367   if (dv_is_value_p (dv))
1368     return ONEPART_VALUE;
1369 
1370   decl = dv_as_decl (dv);
1371 
1372   if (TREE_CODE (decl) == DEBUG_EXPR_DECL)
1373     return ONEPART_DEXPR;
1374 
1375   if (target_for_debug_bind (decl) != NULL_TREE)
1376     return ONEPART_VDECL;
1377 
1378   return NOT_ONEPART;
1379 }
1380 
1381 /* Return the variable pool to be used for a dv of type ONEPART.  */
1382 static inline pool_allocator &
onepart_pool(onepart_enum onepart)1383 onepart_pool (onepart_enum onepart)
1384 {
1385   return onepart ? valvar_pool : var_pool;
1386 }
1387 
1388 /* Allocate a variable_def from the corresponding variable pool.  */
1389 static inline variable *
onepart_pool_allocate(onepart_enum onepart)1390 onepart_pool_allocate (onepart_enum onepart)
1391 {
1392   return (variable*) onepart_pool (onepart).allocate ();
1393 }
1394 
1395 /* Build a decl_or_value out of a decl.  */
1396 static inline decl_or_value
dv_from_decl(tree decl)1397 dv_from_decl (tree decl)
1398 {
1399   decl_or_value dv;
1400   dv = decl;
1401   gcc_checking_assert (dv_is_decl_p (dv));
1402   return dv;
1403 }
1404 
1405 /* Build a decl_or_value out of a value.  */
1406 static inline decl_or_value
dv_from_value(rtx value)1407 dv_from_value (rtx value)
1408 {
1409   decl_or_value dv;
1410   dv = value;
1411   gcc_checking_assert (dv_is_value_p (dv));
1412   return dv;
1413 }
1414 
1415 /* Return a value or the decl of a debug_expr as a decl_or_value.  */
1416 static inline decl_or_value
dv_from_rtx(rtx x)1417 dv_from_rtx (rtx x)
1418 {
1419   decl_or_value dv;
1420 
1421   switch (GET_CODE (x))
1422     {
1423     case DEBUG_EXPR:
1424       dv = dv_from_decl (DEBUG_EXPR_TREE_DECL (x));
1425       gcc_checking_assert (DECL_RTL_KNOWN_SET (DEBUG_EXPR_TREE_DECL (x)) == x);
1426       break;
1427 
1428     case VALUE:
1429       dv = dv_from_value (x);
1430       break;
1431 
1432     default:
1433       gcc_unreachable ();
1434     }
1435 
1436   return dv;
1437 }
1438 
1439 extern void debug_dv (decl_or_value dv);
1440 
1441 DEBUG_FUNCTION void
debug_dv(decl_or_value dv)1442 debug_dv (decl_or_value dv)
1443 {
1444   if (dv_is_value_p (dv))
1445     debug_rtx (dv_as_value (dv));
1446   else
1447     debug_generic_stmt (dv_as_decl (dv));
1448 }
1449 
1450 static void loc_exp_dep_clear (variable *var);
1451 
1452 /* Free the element of VARIABLE_HTAB (its type is struct variable_def).  */
1453 
1454 static void
variable_htab_free(void * elem)1455 variable_htab_free (void *elem)
1456 {
1457   int i;
1458   variable *var = (variable *) elem;
1459   location_chain *node, *next;
1460 
1461   gcc_checking_assert (var->refcount > 0);
1462 
1463   var->refcount--;
1464   if (var->refcount > 0)
1465     return;
1466 
1467   for (i = 0; i < var->n_var_parts; i++)
1468     {
1469       for (node = var->var_part[i].loc_chain; node; node = next)
1470 	{
1471 	  next = node->next;
1472 	  delete node;
1473 	}
1474       var->var_part[i].loc_chain = NULL;
1475     }
1476   if (var->onepart && VAR_LOC_1PAUX (var))
1477     {
1478       loc_exp_dep_clear (var);
1479       if (VAR_LOC_DEP_LST (var))
1480 	VAR_LOC_DEP_LST (var)->pprev = NULL;
1481       XDELETE (VAR_LOC_1PAUX (var));
1482       /* These may be reused across functions, so reset
1483 	 e.g. NO_LOC_P.  */
1484       if (var->onepart == ONEPART_DEXPR)
1485 	set_dv_changed (var->dv, true);
1486     }
1487   onepart_pool (var->onepart).remove (var);
1488 }
1489 
1490 /* Initialize the set (array) SET of attrs to empty lists.  */
1491 
1492 static void
init_attrs_list_set(attrs ** set)1493 init_attrs_list_set (attrs **set)
1494 {
1495   int i;
1496 
1497   for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1498     set[i] = NULL;
1499 }
1500 
1501 /* Make the list *LISTP empty.  */
1502 
1503 static void
attrs_list_clear(attrs ** listp)1504 attrs_list_clear (attrs **listp)
1505 {
1506   attrs *list, *next;
1507 
1508   for (list = *listp; list; list = next)
1509     {
1510       next = list->next;
1511       delete list;
1512     }
1513   *listp = NULL;
1514 }
1515 
1516 /* Return true if the pair of DECL and OFFSET is the member of the LIST.  */
1517 
1518 static attrs *
attrs_list_member(attrs * list,decl_or_value dv,HOST_WIDE_INT offset)1519 attrs_list_member (attrs *list, decl_or_value dv, HOST_WIDE_INT offset)
1520 {
1521   for (; list; list = list->next)
1522     if (dv_as_opaque (list->dv) == dv_as_opaque (dv) && list->offset == offset)
1523       return list;
1524   return NULL;
1525 }
1526 
1527 /* Insert the triplet DECL, OFFSET, LOC to the list *LISTP.  */
1528 
1529 static void
attrs_list_insert(attrs ** listp,decl_or_value dv,HOST_WIDE_INT offset,rtx loc)1530 attrs_list_insert (attrs **listp, decl_or_value dv,
1531 		   HOST_WIDE_INT offset, rtx loc)
1532 {
1533   attrs *list = new attrs;
1534   list->loc = loc;
1535   list->dv = dv;
1536   list->offset = offset;
1537   list->next = *listp;
1538   *listp = list;
1539 }
1540 
1541 /* Copy all nodes from SRC and create a list *DSTP of the copies.  */
1542 
1543 static void
attrs_list_copy(attrs ** dstp,attrs * src)1544 attrs_list_copy (attrs **dstp, attrs *src)
1545 {
1546   attrs_list_clear (dstp);
1547   for (; src; src = src->next)
1548     {
1549       attrs *n = new attrs;
1550       n->loc = src->loc;
1551       n->dv = src->dv;
1552       n->offset = src->offset;
1553       n->next = *dstp;
1554       *dstp = n;
1555     }
1556 }
1557 
1558 /* Add all nodes from SRC which are not in *DSTP to *DSTP.  */
1559 
1560 static void
attrs_list_union(attrs ** dstp,attrs * src)1561 attrs_list_union (attrs **dstp, attrs *src)
1562 {
1563   for (; src; src = src->next)
1564     {
1565       if (!attrs_list_member (*dstp, src->dv, src->offset))
1566 	attrs_list_insert (dstp, src->dv, src->offset, src->loc);
1567     }
1568 }
1569 
1570 /* Combine nodes that are not onepart nodes from SRC and SRC2 into
1571    *DSTP.  */
1572 
1573 static void
attrs_list_mpdv_union(attrs ** dstp,attrs * src,attrs * src2)1574 attrs_list_mpdv_union (attrs **dstp, attrs *src, attrs *src2)
1575 {
1576   gcc_assert (!*dstp);
1577   for (; src; src = src->next)
1578     {
1579       if (!dv_onepart_p (src->dv))
1580 	attrs_list_insert (dstp, src->dv, src->offset, src->loc);
1581     }
1582   for (src = src2; src; src = src->next)
1583     {
1584       if (!dv_onepart_p (src->dv)
1585 	  && !attrs_list_member (*dstp, src->dv, src->offset))
1586 	attrs_list_insert (dstp, src->dv, src->offset, src->loc);
1587     }
1588 }
1589 
1590 /* Shared hashtable support.  */
1591 
1592 /* Return true if VARS is shared.  */
1593 
1594 static inline bool
shared_hash_shared(shared_hash * vars)1595 shared_hash_shared (shared_hash *vars)
1596 {
1597   return vars->refcount > 1;
1598 }
1599 
1600 /* Return the hash table for VARS.  */
1601 
1602 static inline variable_table_type *
shared_hash_htab(shared_hash * vars)1603 shared_hash_htab (shared_hash *vars)
1604 {
1605   return vars->htab;
1606 }
1607 
1608 /* Return true if VAR is shared, or maybe because VARS is shared.  */
1609 
1610 static inline bool
shared_var_p(variable * var,shared_hash * vars)1611 shared_var_p (variable *var, shared_hash *vars)
1612 {
1613   /* Don't count an entry in the changed_variables table as a duplicate.  */
1614   return ((var->refcount > 1 + (int) var->in_changed_variables)
1615 	  || shared_hash_shared (vars));
1616 }
1617 
1618 /* Copy variables into a new hash table.  */
1619 
1620 static shared_hash *
shared_hash_unshare(shared_hash * vars)1621 shared_hash_unshare (shared_hash *vars)
1622 {
1623   shared_hash *new_vars = new shared_hash;
1624   gcc_assert (vars->refcount > 1);
1625   new_vars->refcount = 1;
1626   new_vars->htab = new variable_table_type (vars->htab->elements () + 3);
1627   vars_copy (new_vars->htab, vars->htab);
1628   vars->refcount--;
1629   return new_vars;
1630 }
1631 
1632 /* Increment reference counter on VARS and return it.  */
1633 
1634 static inline shared_hash *
shared_hash_copy(shared_hash * vars)1635 shared_hash_copy (shared_hash *vars)
1636 {
1637   vars->refcount++;
1638   return vars;
1639 }
1640 
1641 /* Decrement reference counter and destroy hash table if not shared
1642    anymore.  */
1643 
1644 static void
shared_hash_destroy(shared_hash * vars)1645 shared_hash_destroy (shared_hash *vars)
1646 {
1647   gcc_checking_assert (vars->refcount > 0);
1648   if (--vars->refcount == 0)
1649     {
1650       delete vars->htab;
1651       delete vars;
1652     }
1653 }
1654 
1655 /* Unshare *PVARS if shared and return slot for DV.  If INS is
1656    INSERT, insert it if not already present.  */
1657 
1658 static inline variable **
shared_hash_find_slot_unshare_1(shared_hash ** pvars,decl_or_value dv,hashval_t dvhash,enum insert_option ins)1659 shared_hash_find_slot_unshare_1 (shared_hash **pvars, decl_or_value dv,
1660 				 hashval_t dvhash, enum insert_option ins)
1661 {
1662   if (shared_hash_shared (*pvars))
1663     *pvars = shared_hash_unshare (*pvars);
1664   return shared_hash_htab (*pvars)->find_slot_with_hash (dv, dvhash, ins);
1665 }
1666 
1667 static inline variable **
shared_hash_find_slot_unshare(shared_hash ** pvars,decl_or_value dv,enum insert_option ins)1668 shared_hash_find_slot_unshare (shared_hash **pvars, decl_or_value dv,
1669 			       enum insert_option ins)
1670 {
1671   return shared_hash_find_slot_unshare_1 (pvars, dv, dv_htab_hash (dv), ins);
1672 }
1673 
1674 /* Return slot for DV, if it is already present in the hash table.
1675    If it is not present, insert it only VARS is not shared, otherwise
1676    return NULL.  */
1677 
1678 static inline variable **
shared_hash_find_slot_1(shared_hash * vars,decl_or_value dv,hashval_t dvhash)1679 shared_hash_find_slot_1 (shared_hash *vars, decl_or_value dv, hashval_t dvhash)
1680 {
1681   return shared_hash_htab (vars)->find_slot_with_hash (dv, dvhash,
1682 						       shared_hash_shared (vars)
1683 						       ? NO_INSERT : INSERT);
1684 }
1685 
1686 static inline variable **
shared_hash_find_slot(shared_hash * vars,decl_or_value dv)1687 shared_hash_find_slot (shared_hash *vars, decl_or_value dv)
1688 {
1689   return shared_hash_find_slot_1 (vars, dv, dv_htab_hash (dv));
1690 }
1691 
1692 /* Return slot for DV only if it is already present in the hash table.  */
1693 
1694 static inline variable **
shared_hash_find_slot_noinsert_1(shared_hash * vars,decl_or_value dv,hashval_t dvhash)1695 shared_hash_find_slot_noinsert_1 (shared_hash *vars, decl_or_value dv,
1696 				  hashval_t dvhash)
1697 {
1698   return shared_hash_htab (vars)->find_slot_with_hash (dv, dvhash, NO_INSERT);
1699 }
1700 
1701 static inline variable **
shared_hash_find_slot_noinsert(shared_hash * vars,decl_or_value dv)1702 shared_hash_find_slot_noinsert (shared_hash *vars, decl_or_value dv)
1703 {
1704   return shared_hash_find_slot_noinsert_1 (vars, dv, dv_htab_hash (dv));
1705 }
1706 
1707 /* Return variable for DV or NULL if not already present in the hash
1708    table.  */
1709 
1710 static inline variable *
shared_hash_find_1(shared_hash * vars,decl_or_value dv,hashval_t dvhash)1711 shared_hash_find_1 (shared_hash *vars, decl_or_value dv, hashval_t dvhash)
1712 {
1713   return shared_hash_htab (vars)->find_with_hash (dv, dvhash);
1714 }
1715 
1716 static inline variable *
shared_hash_find(shared_hash * vars,decl_or_value dv)1717 shared_hash_find (shared_hash *vars, decl_or_value dv)
1718 {
1719   return shared_hash_find_1 (vars, dv, dv_htab_hash (dv));
1720 }
1721 
1722 /* Return true if TVAL is better than CVAL as a canonival value.  We
1723    choose lowest-numbered VALUEs, using the RTX address as a
1724    tie-breaker.  The idea is to arrange them into a star topology,
1725    such that all of them are at most one step away from the canonical
1726    value, and the canonical value has backlinks to all of them, in
1727    addition to all the actual locations.  We don't enforce this
1728    topology throughout the entire dataflow analysis, though.
1729  */
1730 
1731 static inline bool
canon_value_cmp(rtx tval,rtx cval)1732 canon_value_cmp (rtx tval, rtx cval)
1733 {
1734   return !cval
1735     || CSELIB_VAL_PTR (tval)->uid < CSELIB_VAL_PTR (cval)->uid;
1736 }
1737 
1738 static bool dst_can_be_shared;
1739 
1740 /* Return a copy of a variable VAR and insert it to dataflow set SET.  */
1741 
1742 static variable **
unshare_variable(dataflow_set * set,variable ** slot,variable * var,enum var_init_status initialized)1743 unshare_variable (dataflow_set *set, variable **slot, variable *var,
1744 		  enum var_init_status initialized)
1745 {
1746   variable *new_var;
1747   int i;
1748 
1749   new_var = onepart_pool_allocate (var->onepart);
1750   new_var->dv = var->dv;
1751   new_var->refcount = 1;
1752   var->refcount--;
1753   new_var->n_var_parts = var->n_var_parts;
1754   new_var->onepart = var->onepart;
1755   new_var->in_changed_variables = false;
1756 
1757   if (! flag_var_tracking_uninit)
1758     initialized = VAR_INIT_STATUS_INITIALIZED;
1759 
1760   for (i = 0; i < var->n_var_parts; i++)
1761     {
1762       location_chain *node;
1763       location_chain **nextp;
1764 
1765       if (i == 0 && var->onepart)
1766 	{
1767 	  /* One-part auxiliary data is only used while emitting
1768 	     notes, so propagate it to the new variable in the active
1769 	     dataflow set.  If we're not emitting notes, this will be
1770 	     a no-op.  */
1771 	  gcc_checking_assert (!VAR_LOC_1PAUX (var) || emit_notes);
1772 	  VAR_LOC_1PAUX (new_var) = VAR_LOC_1PAUX (var);
1773 	  VAR_LOC_1PAUX (var) = NULL;
1774 	}
1775       else
1776 	VAR_PART_OFFSET (new_var, i) = VAR_PART_OFFSET (var, i);
1777       nextp = &new_var->var_part[i].loc_chain;
1778       for (node = var->var_part[i].loc_chain; node; node = node->next)
1779 	{
1780 	  location_chain *new_lc;
1781 
1782 	  new_lc = new location_chain;
1783 	  new_lc->next = NULL;
1784 	  if (node->init > initialized)
1785 	    new_lc->init = node->init;
1786 	  else
1787 	    new_lc->init = initialized;
1788 	  if (node->set_src && !(MEM_P (node->set_src)))
1789 	    new_lc->set_src = node->set_src;
1790 	  else
1791 	    new_lc->set_src = NULL;
1792 	  new_lc->loc = node->loc;
1793 
1794 	  *nextp = new_lc;
1795 	  nextp = &new_lc->next;
1796 	}
1797 
1798       new_var->var_part[i].cur_loc = var->var_part[i].cur_loc;
1799     }
1800 
1801   dst_can_be_shared = false;
1802   if (shared_hash_shared (set->vars))
1803     slot = shared_hash_find_slot_unshare (&set->vars, var->dv, NO_INSERT);
1804   else if (set->traversed_vars && set->vars != set->traversed_vars)
1805     slot = shared_hash_find_slot_noinsert (set->vars, var->dv);
1806   *slot = new_var;
1807   if (var->in_changed_variables)
1808     {
1809       variable **cslot
1810 	= changed_variables->find_slot_with_hash (var->dv,
1811 						  dv_htab_hash (var->dv),
1812 						  NO_INSERT);
1813       gcc_assert (*cslot == (void *) var);
1814       var->in_changed_variables = false;
1815       variable_htab_free (var);
1816       *cslot = new_var;
1817       new_var->in_changed_variables = true;
1818     }
1819   return slot;
1820 }
1821 
1822 /* Copy all variables from hash table SRC to hash table DST.  */
1823 
1824 static void
vars_copy(variable_table_type * dst,variable_table_type * src)1825 vars_copy (variable_table_type *dst, variable_table_type *src)
1826 {
1827   variable_iterator_type hi;
1828   variable *var;
1829 
1830   FOR_EACH_HASH_TABLE_ELEMENT (*src, var, variable, hi)
1831     {
1832       variable **dstp;
1833       var->refcount++;
1834       dstp = dst->find_slot_with_hash (var->dv, dv_htab_hash (var->dv),
1835 				       INSERT);
1836       *dstp = var;
1837     }
1838 }
1839 
1840 /* Map a decl to its main debug decl.  */
1841 
1842 static inline tree
var_debug_decl(tree decl)1843 var_debug_decl (tree decl)
1844 {
1845   if (decl && VAR_P (decl) && DECL_HAS_DEBUG_EXPR_P (decl))
1846     {
1847       tree debugdecl = DECL_DEBUG_EXPR (decl);
1848       if (DECL_P (debugdecl))
1849 	decl = debugdecl;
1850     }
1851 
1852   return decl;
1853 }
1854 
1855 /* Set the register LOC to contain DV, OFFSET.  */
1856 
1857 static void
var_reg_decl_set(dataflow_set * set,rtx loc,enum var_init_status initialized,decl_or_value dv,HOST_WIDE_INT offset,rtx set_src,enum insert_option iopt)1858 var_reg_decl_set (dataflow_set *set, rtx loc, enum var_init_status initialized,
1859 		  decl_or_value dv, HOST_WIDE_INT offset, rtx set_src,
1860 		  enum insert_option iopt)
1861 {
1862   attrs *node;
1863   bool decl_p = dv_is_decl_p (dv);
1864 
1865   if (decl_p)
1866     dv = dv_from_decl (var_debug_decl (dv_as_decl (dv)));
1867 
1868   for (node = set->regs[REGNO (loc)]; node; node = node->next)
1869     if (dv_as_opaque (node->dv) == dv_as_opaque (dv)
1870 	&& node->offset == offset)
1871       break;
1872   if (!node)
1873     attrs_list_insert (&set->regs[REGNO (loc)], dv, offset, loc);
1874   set_variable_part (set, loc, dv, offset, initialized, set_src, iopt);
1875 }
1876 
1877 /* Return true if we should track a location that is OFFSET bytes from
1878    a variable.  Store the constant offset in *OFFSET_OUT if so.  */
1879 
1880 static bool
track_offset_p(poly_int64 offset,HOST_WIDE_INT * offset_out)1881 track_offset_p (poly_int64 offset, HOST_WIDE_INT *offset_out)
1882 {
1883   HOST_WIDE_INT const_offset;
1884   if (!offset.is_constant (&const_offset)
1885       || !IN_RANGE (const_offset, 0, MAX_VAR_PARTS - 1))
1886     return false;
1887   *offset_out = const_offset;
1888   return true;
1889 }
1890 
1891 /* Return the offset of a register that track_offset_p says we
1892    should track.  */
1893 
1894 static HOST_WIDE_INT
get_tracked_reg_offset(rtx loc)1895 get_tracked_reg_offset (rtx loc)
1896 {
1897   HOST_WIDE_INT offset;
1898   if (!track_offset_p (REG_OFFSET (loc), &offset))
1899     gcc_unreachable ();
1900   return offset;
1901 }
1902 
1903 /* Set the register to contain REG_EXPR (LOC), REG_OFFSET (LOC).  */
1904 
1905 static void
var_reg_set(dataflow_set * set,rtx loc,enum var_init_status initialized,rtx set_src)1906 var_reg_set (dataflow_set *set, rtx loc, enum var_init_status initialized,
1907 	     rtx set_src)
1908 {
1909   tree decl = REG_EXPR (loc);
1910   HOST_WIDE_INT offset = get_tracked_reg_offset (loc);
1911 
1912   var_reg_decl_set (set, loc, initialized,
1913 		    dv_from_decl (decl), offset, set_src, INSERT);
1914 }
1915 
1916 static enum var_init_status
get_init_value(dataflow_set * set,rtx loc,decl_or_value dv)1917 get_init_value (dataflow_set *set, rtx loc, decl_or_value dv)
1918 {
1919   variable *var;
1920   int i;
1921   enum var_init_status ret_val = VAR_INIT_STATUS_UNKNOWN;
1922 
1923   if (! flag_var_tracking_uninit)
1924     return VAR_INIT_STATUS_INITIALIZED;
1925 
1926   var = shared_hash_find (set->vars, dv);
1927   if (var)
1928     {
1929       for (i = 0; i < var->n_var_parts && ret_val == VAR_INIT_STATUS_UNKNOWN; i++)
1930 	{
1931 	  location_chain *nextp;
1932 	  for (nextp = var->var_part[i].loc_chain; nextp; nextp = nextp->next)
1933 	    if (rtx_equal_p (nextp->loc, loc))
1934 	      {
1935 		ret_val = nextp->init;
1936 		break;
1937 	      }
1938 	}
1939     }
1940 
1941   return ret_val;
1942 }
1943 
1944 /* Delete current content of register LOC in dataflow set SET and set
1945    the register to contain REG_EXPR (LOC), REG_OFFSET (LOC).  If
1946    MODIFY is true, any other live copies of the same variable part are
1947    also deleted from the dataflow set, otherwise the variable part is
1948    assumed to be copied from another location holding the same
1949    part.  */
1950 
1951 static void
var_reg_delete_and_set(dataflow_set * set,rtx loc,bool modify,enum var_init_status initialized,rtx set_src)1952 var_reg_delete_and_set (dataflow_set *set, rtx loc, bool modify,
1953 			enum var_init_status initialized, rtx set_src)
1954 {
1955   tree decl = REG_EXPR (loc);
1956   HOST_WIDE_INT offset = get_tracked_reg_offset (loc);
1957   attrs *node, *next;
1958   attrs **nextp;
1959 
1960   decl = var_debug_decl (decl);
1961 
1962   if (initialized == VAR_INIT_STATUS_UNKNOWN)
1963     initialized = get_init_value (set, loc, dv_from_decl (decl));
1964 
1965   nextp = &set->regs[REGNO (loc)];
1966   for (node = *nextp; node; node = next)
1967     {
1968       next = node->next;
1969       if (dv_as_opaque (node->dv) != decl || node->offset != offset)
1970 	{
1971 	  delete_variable_part (set, node->loc, node->dv, node->offset);
1972 	  delete node;
1973 	  *nextp = next;
1974 	}
1975       else
1976 	{
1977 	  node->loc = loc;
1978 	  nextp = &node->next;
1979 	}
1980     }
1981   if (modify)
1982     clobber_variable_part (set, loc, dv_from_decl (decl), offset, set_src);
1983   var_reg_set (set, loc, initialized, set_src);
1984 }
1985 
1986 /* Delete the association of register LOC in dataflow set SET with any
1987    variables that aren't onepart.  If CLOBBER is true, also delete any
1988    other live copies of the same variable part, and delete the
1989    association with onepart dvs too.  */
1990 
1991 static void
var_reg_delete(dataflow_set * set,rtx loc,bool clobber)1992 var_reg_delete (dataflow_set *set, rtx loc, bool clobber)
1993 {
1994   attrs **nextp = &set->regs[REGNO (loc)];
1995   attrs *node, *next;
1996 
1997   HOST_WIDE_INT offset;
1998   if (clobber && track_offset_p (REG_OFFSET (loc), &offset))
1999     {
2000       tree decl = REG_EXPR (loc);
2001 
2002       decl = var_debug_decl (decl);
2003 
2004       clobber_variable_part (set, NULL, dv_from_decl (decl), offset, NULL);
2005     }
2006 
2007   for (node = *nextp; node; node = next)
2008     {
2009       next = node->next;
2010       if (clobber || !dv_onepart_p (node->dv))
2011 	{
2012 	  delete_variable_part (set, node->loc, node->dv, node->offset);
2013 	  delete node;
2014 	  *nextp = next;
2015 	}
2016       else
2017 	nextp = &node->next;
2018     }
2019 }
2020 
2021 /* Delete content of register with number REGNO in dataflow set SET.  */
2022 
2023 static void
var_regno_delete(dataflow_set * set,int regno)2024 var_regno_delete (dataflow_set *set, int regno)
2025 {
2026   attrs **reg = &set->regs[regno];
2027   attrs *node, *next;
2028 
2029   for (node = *reg; node; node = next)
2030     {
2031       next = node->next;
2032       delete_variable_part (set, node->loc, node->dv, node->offset);
2033       delete node;
2034     }
2035   *reg = NULL;
2036 }
2037 
2038 /* Return true if I is the negated value of a power of two.  */
2039 static bool
negative_power_of_two_p(HOST_WIDE_INT i)2040 negative_power_of_two_p (HOST_WIDE_INT i)
2041 {
2042   unsigned HOST_WIDE_INT x = -(unsigned HOST_WIDE_INT)i;
2043   return pow2_or_zerop (x);
2044 }
2045 
2046 /* Strip constant offsets and alignments off of LOC.  Return the base
2047    expression.  */
2048 
2049 static rtx
vt_get_canonicalize_base(rtx loc)2050 vt_get_canonicalize_base (rtx loc)
2051 {
2052   while ((GET_CODE (loc) == PLUS
2053 	  || GET_CODE (loc) == AND)
2054 	 && GET_CODE (XEXP (loc, 1)) == CONST_INT
2055 	 && (GET_CODE (loc) != AND
2056 	     || negative_power_of_two_p (INTVAL (XEXP (loc, 1)))))
2057     loc = XEXP (loc, 0);
2058 
2059   return loc;
2060 }
2061 
2062 /* This caches canonicalized addresses for VALUEs, computed using
2063    information in the global cselib table.  */
2064 static hash_map<rtx, rtx> *global_get_addr_cache;
2065 
2066 /* This caches canonicalized addresses for VALUEs, computed using
2067    information from the global cache and information pertaining to a
2068    basic block being analyzed.  */
2069 static hash_map<rtx, rtx> *local_get_addr_cache;
2070 
2071 static rtx vt_canonicalize_addr (dataflow_set *, rtx);
2072 
2073 /* Return the canonical address for LOC, that must be a VALUE, using a
2074    cached global equivalence or computing it and storing it in the
2075    global cache.  */
2076 
2077 static rtx
get_addr_from_global_cache(rtx const loc)2078 get_addr_from_global_cache (rtx const loc)
2079 {
2080   rtx x;
2081 
2082   gcc_checking_assert (GET_CODE (loc) == VALUE);
2083 
2084   bool existed;
2085   rtx *slot = &global_get_addr_cache->get_or_insert (loc, &existed);
2086   if (existed)
2087     return *slot;
2088 
2089   x = canon_rtx (get_addr (loc));
2090 
2091   /* Tentative, avoiding infinite recursion.  */
2092   *slot = x;
2093 
2094   if (x != loc)
2095     {
2096       rtx nx = vt_canonicalize_addr (NULL, x);
2097       if (nx != x)
2098 	{
2099 	  /* The table may have moved during recursion, recompute
2100 	     SLOT.  */
2101 	  *global_get_addr_cache->get (loc) = x = nx;
2102 	}
2103     }
2104 
2105   return x;
2106 }
2107 
2108 /* Return the canonical address for LOC, that must be a VALUE, using a
2109    cached local equivalence or computing it and storing it in the
2110    local cache.  */
2111 
2112 static rtx
get_addr_from_local_cache(dataflow_set * set,rtx const loc)2113 get_addr_from_local_cache (dataflow_set *set, rtx const loc)
2114 {
2115   rtx x;
2116   decl_or_value dv;
2117   variable *var;
2118   location_chain *l;
2119 
2120   gcc_checking_assert (GET_CODE (loc) == VALUE);
2121 
2122   bool existed;
2123   rtx *slot = &local_get_addr_cache->get_or_insert (loc, &existed);
2124   if (existed)
2125     return *slot;
2126 
2127   x = get_addr_from_global_cache (loc);
2128 
2129   /* Tentative, avoiding infinite recursion.  */
2130   *slot = x;
2131 
2132   /* Recurse to cache local expansion of X, or if we need to search
2133      for a VALUE in the expansion.  */
2134   if (x != loc)
2135     {
2136       rtx nx = vt_canonicalize_addr (set, x);
2137       if (nx != x)
2138 	{
2139 	  slot = local_get_addr_cache->get (loc);
2140 	  *slot = x = nx;
2141 	}
2142       return x;
2143     }
2144 
2145   dv = dv_from_rtx (x);
2146   var = shared_hash_find (set->vars, dv);
2147   if (!var)
2148     return x;
2149 
2150   /* Look for an improved equivalent expression.  */
2151   for (l = var->var_part[0].loc_chain; l; l = l->next)
2152     {
2153       rtx base = vt_get_canonicalize_base (l->loc);
2154       if (GET_CODE (base) == VALUE
2155 	  && canon_value_cmp (base, loc))
2156 	{
2157 	  rtx nx = vt_canonicalize_addr (set, l->loc);
2158 	  if (x != nx)
2159 	    {
2160 	      slot = local_get_addr_cache->get (loc);
2161 	      *slot = x = nx;
2162 	    }
2163 	  break;
2164 	}
2165     }
2166 
2167   return x;
2168 }
2169 
2170 /* Canonicalize LOC using equivalences from SET in addition to those
2171    in the cselib static table.  It expects a VALUE-based expression,
2172    and it will only substitute VALUEs with other VALUEs or
2173    function-global equivalences, so that, if two addresses have base
2174    VALUEs that are locally or globally related in ways that
2175    memrefs_conflict_p cares about, they will both canonicalize to
2176    expressions that have the same base VALUE.
2177 
2178    The use of VALUEs as canonical base addresses enables the canonical
2179    RTXs to remain unchanged globally, if they resolve to a constant,
2180    or throughout a basic block otherwise, so that they can be cached
2181    and the cache needs not be invalidated when REGs, MEMs or such
2182    change.  */
2183 
2184 static rtx
vt_canonicalize_addr(dataflow_set * set,rtx oloc)2185 vt_canonicalize_addr (dataflow_set *set, rtx oloc)
2186 {
2187   poly_int64 ofst = 0, term;
2188   machine_mode mode = GET_MODE (oloc);
2189   rtx loc = oloc;
2190   rtx x;
2191   bool retry = true;
2192 
2193   while (retry)
2194     {
2195       while (GET_CODE (loc) == PLUS
2196 	     && poly_int_rtx_p (XEXP (loc, 1), &term))
2197 	{
2198 	  ofst += term;
2199 	  loc = XEXP (loc, 0);
2200 	}
2201 
2202       /* Alignment operations can't normally be combined, so just
2203 	 canonicalize the base and we're done.  We'll normally have
2204 	 only one stack alignment anyway.  */
2205       if (GET_CODE (loc) == AND
2206 	  && GET_CODE (XEXP (loc, 1)) == CONST_INT
2207 	  && negative_power_of_two_p (INTVAL (XEXP (loc, 1))))
2208 	{
2209 	  x = vt_canonicalize_addr (set, XEXP (loc, 0));
2210 	  if (x != XEXP (loc, 0))
2211 	    loc = gen_rtx_AND (mode, x, XEXP (loc, 1));
2212 	  retry = false;
2213 	}
2214 
2215       if (GET_CODE (loc) == VALUE)
2216 	{
2217 	  if (set)
2218 	    loc = get_addr_from_local_cache (set, loc);
2219 	  else
2220 	    loc = get_addr_from_global_cache (loc);
2221 
2222 	  /* Consolidate plus_constants.  */
2223 	  while (maybe_ne (ofst, 0)
2224 		 && GET_CODE (loc) == PLUS
2225 		 && poly_int_rtx_p (XEXP (loc, 1), &term))
2226 	    {
2227 	      ofst += term;
2228 	      loc = XEXP (loc, 0);
2229 	    }
2230 
2231 	  retry = false;
2232 	}
2233       else
2234 	{
2235 	  x = canon_rtx (loc);
2236 	  if (retry)
2237 	    retry = (x != loc);
2238 	  loc = x;
2239 	}
2240     }
2241 
2242   /* Add OFST back in.  */
2243   if (maybe_ne (ofst, 0))
2244     {
2245       /* Don't build new RTL if we can help it.  */
2246       if (strip_offset (oloc, &term) == loc && known_eq (term, ofst))
2247 	return oloc;
2248 
2249       loc = plus_constant (mode, loc, ofst);
2250     }
2251 
2252   return loc;
2253 }
2254 
2255 /* Return true iff there's a true dependence between MLOC and LOC.
2256    MADDR must be a canonicalized version of MLOC's address.  */
2257 
2258 static inline bool
vt_canon_true_dep(dataflow_set * set,rtx mloc,rtx maddr,rtx loc)2259 vt_canon_true_dep (dataflow_set *set, rtx mloc, rtx maddr, rtx loc)
2260 {
2261   if (GET_CODE (loc) != MEM)
2262     return false;
2263 
2264   rtx addr = vt_canonicalize_addr (set, XEXP (loc, 0));
2265   if (!canon_true_dependence (mloc, GET_MODE (mloc), maddr, loc, addr))
2266     return false;
2267 
2268   return true;
2269 }
2270 
2271 /* Hold parameters for the hashtab traversal function
2272    drop_overlapping_mem_locs, see below.  */
2273 
2274 struct overlapping_mems
2275 {
2276   dataflow_set *set;
2277   rtx loc, addr;
2278 };
2279 
2280 /* Remove all MEMs that overlap with COMS->LOC from the location list
2281    of a hash table entry for a onepart variable.  COMS->ADDR must be a
2282    canonicalized form of COMS->LOC's address, and COMS->LOC must be
2283    canonicalized itself.  */
2284 
2285 int
drop_overlapping_mem_locs(variable ** slot,overlapping_mems * coms)2286 drop_overlapping_mem_locs (variable **slot, overlapping_mems *coms)
2287 {
2288   dataflow_set *set = coms->set;
2289   rtx mloc = coms->loc, addr = coms->addr;
2290   variable *var = *slot;
2291 
2292   if (var->onepart != NOT_ONEPART)
2293     {
2294       location_chain *loc, **locp;
2295       bool changed = false;
2296       rtx cur_loc;
2297 
2298       gcc_assert (var->n_var_parts == 1);
2299 
2300       if (shared_var_p (var, set->vars))
2301 	{
2302 	  for (loc = var->var_part[0].loc_chain; loc; loc = loc->next)
2303 	    if (vt_canon_true_dep (set, mloc, addr, loc->loc))
2304 	      break;
2305 
2306 	  if (!loc)
2307 	    return 1;
2308 
2309 	  slot = unshare_variable (set, slot, var, VAR_INIT_STATUS_UNKNOWN);
2310 	  var = *slot;
2311 	  gcc_assert (var->n_var_parts == 1);
2312 	}
2313 
2314       if (VAR_LOC_1PAUX (var))
2315 	cur_loc = VAR_LOC_FROM (var);
2316       else
2317 	cur_loc = var->var_part[0].cur_loc;
2318 
2319       for (locp = &var->var_part[0].loc_chain, loc = *locp;
2320 	   loc; loc = *locp)
2321 	{
2322 	  if (!vt_canon_true_dep (set, mloc, addr, loc->loc))
2323 	    {
2324 	      locp = &loc->next;
2325 	      continue;
2326 	    }
2327 
2328 	  *locp = loc->next;
2329 	  /* If we have deleted the location which was last emitted
2330 	     we have to emit new location so add the variable to set
2331 	     of changed variables.  */
2332 	  if (cur_loc == loc->loc)
2333 	    {
2334 	      changed = true;
2335 	      var->var_part[0].cur_loc = NULL;
2336 	      if (VAR_LOC_1PAUX (var))
2337 		VAR_LOC_FROM (var) = NULL;
2338 	    }
2339 	  delete loc;
2340 	}
2341 
2342       if (!var->var_part[0].loc_chain)
2343 	{
2344 	  var->n_var_parts--;
2345 	  changed = true;
2346 	}
2347       if (changed)
2348 	variable_was_changed (var, set);
2349     }
2350 
2351   return 1;
2352 }
2353 
2354 /* Remove from SET all VALUE bindings to MEMs that overlap with LOC.  */
2355 
2356 static void
clobber_overlapping_mems(dataflow_set * set,rtx loc)2357 clobber_overlapping_mems (dataflow_set *set, rtx loc)
2358 {
2359   struct overlapping_mems coms;
2360 
2361   gcc_checking_assert (GET_CODE (loc) == MEM);
2362 
2363   coms.set = set;
2364   coms.loc = canon_rtx (loc);
2365   coms.addr = vt_canonicalize_addr (set, XEXP (loc, 0));
2366 
2367   set->traversed_vars = set->vars;
2368   shared_hash_htab (set->vars)
2369     ->traverse <overlapping_mems*, drop_overlapping_mem_locs> (&coms);
2370   set->traversed_vars = NULL;
2371 }
2372 
2373 /* Set the location of DV, OFFSET as the MEM LOC.  */
2374 
2375 static void
var_mem_decl_set(dataflow_set * set,rtx loc,enum var_init_status initialized,decl_or_value dv,HOST_WIDE_INT offset,rtx set_src,enum insert_option iopt)2376 var_mem_decl_set (dataflow_set *set, rtx loc, enum var_init_status initialized,
2377 		  decl_or_value dv, HOST_WIDE_INT offset, rtx set_src,
2378 		  enum insert_option iopt)
2379 {
2380   if (dv_is_decl_p (dv))
2381     dv = dv_from_decl (var_debug_decl (dv_as_decl (dv)));
2382 
2383   set_variable_part (set, loc, dv, offset, initialized, set_src, iopt);
2384 }
2385 
2386 /* Set the location part of variable MEM_EXPR (LOC) in dataflow set
2387    SET to LOC.
2388    Adjust the address first if it is stack pointer based.  */
2389 
2390 static void
var_mem_set(dataflow_set * set,rtx loc,enum var_init_status initialized,rtx set_src)2391 var_mem_set (dataflow_set *set, rtx loc, enum var_init_status initialized,
2392 	     rtx set_src)
2393 {
2394   tree decl = MEM_EXPR (loc);
2395   HOST_WIDE_INT offset = int_mem_offset (loc);
2396 
2397   var_mem_decl_set (set, loc, initialized,
2398 		    dv_from_decl (decl), offset, set_src, INSERT);
2399 }
2400 
2401 /* Delete and set the location part of variable MEM_EXPR (LOC) in
2402    dataflow set SET to LOC.  If MODIFY is true, any other live copies
2403    of the same variable part are also deleted from the dataflow set,
2404    otherwise the variable part is assumed to be copied from another
2405    location holding the same part.
2406    Adjust the address first if it is stack pointer based.  */
2407 
2408 static void
var_mem_delete_and_set(dataflow_set * set,rtx loc,bool modify,enum var_init_status initialized,rtx set_src)2409 var_mem_delete_and_set (dataflow_set *set, rtx loc, bool modify,
2410 			enum var_init_status initialized, rtx set_src)
2411 {
2412   tree decl = MEM_EXPR (loc);
2413   HOST_WIDE_INT offset = int_mem_offset (loc);
2414 
2415   clobber_overlapping_mems (set, loc);
2416   decl = var_debug_decl (decl);
2417 
2418   if (initialized == VAR_INIT_STATUS_UNKNOWN)
2419     initialized = get_init_value (set, loc, dv_from_decl (decl));
2420 
2421   if (modify)
2422     clobber_variable_part (set, NULL, dv_from_decl (decl), offset, set_src);
2423   var_mem_set (set, loc, initialized, set_src);
2424 }
2425 
2426 /* Delete the location part LOC from dataflow set SET.  If CLOBBER is
2427    true, also delete any other live copies of the same variable part.
2428    Adjust the address first if it is stack pointer based.  */
2429 
2430 static void
var_mem_delete(dataflow_set * set,rtx loc,bool clobber)2431 var_mem_delete (dataflow_set *set, rtx loc, bool clobber)
2432 {
2433   tree decl = MEM_EXPR (loc);
2434   HOST_WIDE_INT offset = int_mem_offset (loc);
2435 
2436   clobber_overlapping_mems (set, loc);
2437   decl = var_debug_decl (decl);
2438   if (clobber)
2439     clobber_variable_part (set, NULL, dv_from_decl (decl), offset, NULL);
2440   delete_variable_part (set, loc, dv_from_decl (decl), offset);
2441 }
2442 
2443 /* Return true if LOC should not be expanded for location expressions,
2444    or used in them.  */
2445 
2446 static inline bool
unsuitable_loc(rtx loc)2447 unsuitable_loc (rtx loc)
2448 {
2449   switch (GET_CODE (loc))
2450     {
2451     case PC:
2452     case SCRATCH:
2453     case CC0:
2454     case ASM_INPUT:
2455     case ASM_OPERANDS:
2456       return true;
2457 
2458     default:
2459       return false;
2460     }
2461 }
2462 
2463 /* Bind VAL to LOC in SET.  If MODIFIED, detach LOC from any values
2464    bound to it.  */
2465 
2466 static inline void
val_bind(dataflow_set * set,rtx val,rtx loc,bool modified)2467 val_bind (dataflow_set *set, rtx val, rtx loc, bool modified)
2468 {
2469   if (REG_P (loc))
2470     {
2471       if (modified)
2472 	var_regno_delete (set, REGNO (loc));
2473       var_reg_decl_set (set, loc, VAR_INIT_STATUS_INITIALIZED,
2474 			dv_from_value (val), 0, NULL_RTX, INSERT);
2475     }
2476   else if (MEM_P (loc))
2477     {
2478       struct elt_loc_list *l = CSELIB_VAL_PTR (val)->locs;
2479 
2480       if (modified)
2481 	clobber_overlapping_mems (set, loc);
2482 
2483       if (l && GET_CODE (l->loc) == VALUE)
2484 	l = canonical_cselib_val (CSELIB_VAL_PTR (l->loc))->locs;
2485 
2486       /* If this MEM is a global constant, we don't need it in the
2487 	 dynamic tables.  ??? We should test this before emitting the
2488 	 micro-op in the first place.  */
2489       while (l)
2490 	if (GET_CODE (l->loc) == MEM && XEXP (l->loc, 0) == XEXP (loc, 0))
2491 	  break;
2492 	else
2493 	  l = l->next;
2494 
2495       if (!l)
2496 	var_mem_decl_set (set, loc, VAR_INIT_STATUS_INITIALIZED,
2497 			  dv_from_value (val), 0, NULL_RTX, INSERT);
2498     }
2499   else
2500     {
2501       /* Other kinds of equivalences are necessarily static, at least
2502 	 so long as we do not perform substitutions while merging
2503 	 expressions.  */
2504       gcc_unreachable ();
2505       set_variable_part (set, loc, dv_from_value (val), 0,
2506 			 VAR_INIT_STATUS_INITIALIZED, NULL_RTX, INSERT);
2507     }
2508 }
2509 
2510 /* Bind a value to a location it was just stored in.  If MODIFIED
2511    holds, assume the location was modified, detaching it from any
2512    values bound to it.  */
2513 
2514 static void
val_store(dataflow_set * set,rtx val,rtx loc,rtx_insn * insn,bool modified)2515 val_store (dataflow_set *set, rtx val, rtx loc, rtx_insn *insn,
2516 	   bool modified)
2517 {
2518   cselib_val *v = CSELIB_VAL_PTR (val);
2519 
2520   gcc_assert (cselib_preserved_value_p (v));
2521 
2522   if (dump_file)
2523     {
2524       fprintf (dump_file, "%i: ", insn ? INSN_UID (insn) : 0);
2525       print_inline_rtx (dump_file, loc, 0);
2526       fprintf (dump_file, " evaluates to ");
2527       print_inline_rtx (dump_file, val, 0);
2528       if (v->locs)
2529 	{
2530 	  struct elt_loc_list *l;
2531 	  for (l = v->locs; l; l = l->next)
2532 	    {
2533 	      fprintf (dump_file, "\n%i: ", INSN_UID (l->setting_insn));
2534 	      print_inline_rtx (dump_file, l->loc, 0);
2535 	    }
2536 	}
2537       fprintf (dump_file, "\n");
2538     }
2539 
2540   gcc_checking_assert (!unsuitable_loc (loc));
2541 
2542   val_bind (set, val, loc, modified);
2543 }
2544 
2545 /* Clear (canonical address) slots that reference X.  */
2546 
2547 bool
local_get_addr_clear_given_value(rtx const &,rtx * slot,rtx x)2548 local_get_addr_clear_given_value (rtx const &, rtx *slot, rtx x)
2549 {
2550   if (vt_get_canonicalize_base (*slot) == x)
2551     *slot = NULL;
2552   return true;
2553 }
2554 
2555 /* Reset this node, detaching all its equivalences.  Return the slot
2556    in the variable hash table that holds dv, if there is one.  */
2557 
2558 static void
val_reset(dataflow_set * set,decl_or_value dv)2559 val_reset (dataflow_set *set, decl_or_value dv)
2560 {
2561   variable *var = shared_hash_find (set->vars, dv) ;
2562   location_chain *node;
2563   rtx cval;
2564 
2565   if (!var || !var->n_var_parts)
2566     return;
2567 
2568   gcc_assert (var->n_var_parts == 1);
2569 
2570   if (var->onepart == ONEPART_VALUE)
2571     {
2572       rtx x = dv_as_value (dv);
2573 
2574       /* Relationships in the global cache don't change, so reset the
2575 	 local cache entry only.  */
2576       rtx *slot = local_get_addr_cache->get (x);
2577       if (slot)
2578 	{
2579 	  /* If the value resolved back to itself, odds are that other
2580 	     values may have cached it too.  These entries now refer
2581 	     to the old X, so detach them too.  Entries that used the
2582 	     old X but resolved to something else remain ok as long as
2583 	     that something else isn't also reset.  */
2584 	  if (*slot == x)
2585 	    local_get_addr_cache
2586 	      ->traverse<rtx, local_get_addr_clear_given_value> (x);
2587 	  *slot = NULL;
2588 	}
2589     }
2590 
2591   cval = NULL;
2592   for (node = var->var_part[0].loc_chain; node; node = node->next)
2593     if (GET_CODE (node->loc) == VALUE
2594 	&& canon_value_cmp (node->loc, cval))
2595       cval = node->loc;
2596 
2597   for (node = var->var_part[0].loc_chain; node; node = node->next)
2598     if (GET_CODE (node->loc) == VALUE && cval != node->loc)
2599       {
2600 	/* Redirect the equivalence link to the new canonical
2601 	   value, or simply remove it if it would point at
2602 	   itself.  */
2603 	if (cval)
2604 	  set_variable_part (set, cval, dv_from_value (node->loc),
2605 			     0, node->init, node->set_src, NO_INSERT);
2606 	delete_variable_part (set, dv_as_value (dv),
2607 			      dv_from_value (node->loc), 0);
2608       }
2609 
2610   if (cval)
2611     {
2612       decl_or_value cdv = dv_from_value (cval);
2613 
2614       /* Keep the remaining values connected, accumulating links
2615 	 in the canonical value.  */
2616       for (node = var->var_part[0].loc_chain; node; node = node->next)
2617 	{
2618 	  if (node->loc == cval)
2619 	    continue;
2620 	  else if (GET_CODE (node->loc) == REG)
2621 	    var_reg_decl_set (set, node->loc, node->init, cdv, 0,
2622 			      node->set_src, NO_INSERT);
2623 	  else if (GET_CODE (node->loc) == MEM)
2624 	    var_mem_decl_set (set, node->loc, node->init, cdv, 0,
2625 			      node->set_src, NO_INSERT);
2626 	  else
2627 	    set_variable_part (set, node->loc, cdv, 0,
2628 			       node->init, node->set_src, NO_INSERT);
2629 	}
2630     }
2631 
2632   /* We remove this last, to make sure that the canonical value is not
2633      removed to the point of requiring reinsertion.  */
2634   if (cval)
2635     delete_variable_part (set, dv_as_value (dv), dv_from_value (cval), 0);
2636 
2637   clobber_variable_part (set, NULL, dv, 0, NULL);
2638 }
2639 
2640 /* Find the values in a given location and map the val to another
2641    value, if it is unique, or add the location as one holding the
2642    value.  */
2643 
2644 static void
val_resolve(dataflow_set * set,rtx val,rtx loc,rtx_insn * insn)2645 val_resolve (dataflow_set *set, rtx val, rtx loc, rtx_insn *insn)
2646 {
2647   decl_or_value dv = dv_from_value (val);
2648 
2649   if (dump_file && (dump_flags & TDF_DETAILS))
2650     {
2651       if (insn)
2652 	fprintf (dump_file, "%i: ", INSN_UID (insn));
2653       else
2654 	fprintf (dump_file, "head: ");
2655       print_inline_rtx (dump_file, val, 0);
2656       fputs (" is at ", dump_file);
2657       print_inline_rtx (dump_file, loc, 0);
2658       fputc ('\n', dump_file);
2659     }
2660 
2661   val_reset (set, dv);
2662 
2663   gcc_checking_assert (!unsuitable_loc (loc));
2664 
2665   if (REG_P (loc))
2666     {
2667       attrs *node, *found = NULL;
2668 
2669       for (node = set->regs[REGNO (loc)]; node; node = node->next)
2670 	if (dv_is_value_p (node->dv)
2671 	    && GET_MODE (dv_as_value (node->dv)) == GET_MODE (loc))
2672 	  {
2673 	    found = node;
2674 
2675 	    /* Map incoming equivalences.  ??? Wouldn't it be nice if
2676 	     we just started sharing the location lists?  Maybe a
2677 	     circular list ending at the value itself or some
2678 	     such.  */
2679 	    set_variable_part (set, dv_as_value (node->dv),
2680 			       dv_from_value (val), node->offset,
2681 			       VAR_INIT_STATUS_INITIALIZED, NULL_RTX, INSERT);
2682 	    set_variable_part (set, val, node->dv, node->offset,
2683 			       VAR_INIT_STATUS_INITIALIZED, NULL_RTX, INSERT);
2684 	  }
2685 
2686       /* If we didn't find any equivalence, we need to remember that
2687 	 this value is held in the named register.  */
2688       if (found)
2689 	return;
2690     }
2691   /* ??? Attempt to find and merge equivalent MEMs or other
2692      expressions too.  */
2693 
2694   val_bind (set, val, loc, false);
2695 }
2696 
2697 /* Initialize dataflow set SET to be empty.
2698    VARS_SIZE is the initial size of hash table VARS.  */
2699 
2700 static void
dataflow_set_init(dataflow_set * set)2701 dataflow_set_init (dataflow_set *set)
2702 {
2703   init_attrs_list_set (set->regs);
2704   set->vars = shared_hash_copy (empty_shared_hash);
2705   set->stack_adjust = 0;
2706   set->traversed_vars = NULL;
2707 }
2708 
2709 /* Delete the contents of dataflow set SET.  */
2710 
2711 static void
dataflow_set_clear(dataflow_set * set)2712 dataflow_set_clear (dataflow_set *set)
2713 {
2714   int i;
2715 
2716   for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2717     attrs_list_clear (&set->regs[i]);
2718 
2719   shared_hash_destroy (set->vars);
2720   set->vars = shared_hash_copy (empty_shared_hash);
2721 }
2722 
2723 /* Copy the contents of dataflow set SRC to DST.  */
2724 
2725 static void
dataflow_set_copy(dataflow_set * dst,dataflow_set * src)2726 dataflow_set_copy (dataflow_set *dst, dataflow_set *src)
2727 {
2728   int i;
2729 
2730   for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2731     attrs_list_copy (&dst->regs[i], src->regs[i]);
2732 
2733   shared_hash_destroy (dst->vars);
2734   dst->vars = shared_hash_copy (src->vars);
2735   dst->stack_adjust = src->stack_adjust;
2736 }
2737 
2738 /* Information for merging lists of locations for a given offset of variable.
2739  */
2740 struct variable_union_info
2741 {
2742   /* Node of the location chain.  */
2743   location_chain *lc;
2744 
2745   /* The sum of positions in the input chains.  */
2746   int pos;
2747 
2748   /* The position in the chain of DST dataflow set.  */
2749   int pos_dst;
2750 };
2751 
2752 /* Buffer for location list sorting and its allocated size.  */
2753 static struct variable_union_info *vui_vec;
2754 static int vui_allocated;
2755 
2756 /* Compare function for qsort, order the structures by POS element.  */
2757 
2758 static int
variable_union_info_cmp_pos(const void * n1,const void * n2)2759 variable_union_info_cmp_pos (const void *n1, const void *n2)
2760 {
2761   const struct variable_union_info *const i1 =
2762     (const struct variable_union_info *) n1;
2763   const struct variable_union_info *const i2 =
2764     ( const struct variable_union_info *) n2;
2765 
2766   if (i1->pos != i2->pos)
2767     return i1->pos - i2->pos;
2768 
2769   return (i1->pos_dst - i2->pos_dst);
2770 }
2771 
2772 /* Compute union of location parts of variable *SLOT and the same variable
2773    from hash table DATA.  Compute "sorted" union of the location chains
2774    for common offsets, i.e. the locations of a variable part are sorted by
2775    a priority where the priority is the sum of the positions in the 2 chains
2776    (if a location is only in one list the position in the second list is
2777    defined to be larger than the length of the chains).
2778    When we are updating the location parts the newest location is in the
2779    beginning of the chain, so when we do the described "sorted" union
2780    we keep the newest locations in the beginning.  */
2781 
2782 static int
variable_union(variable * src,dataflow_set * set)2783 variable_union (variable *src, dataflow_set *set)
2784 {
2785   variable *dst;
2786   variable **dstp;
2787   int i, j, k;
2788 
2789   dstp = shared_hash_find_slot (set->vars, src->dv);
2790   if (!dstp || !*dstp)
2791     {
2792       src->refcount++;
2793 
2794       dst_can_be_shared = false;
2795       if (!dstp)
2796 	dstp = shared_hash_find_slot_unshare (&set->vars, src->dv, INSERT);
2797 
2798       *dstp = src;
2799 
2800       /* Continue traversing the hash table.  */
2801       return 1;
2802     }
2803   else
2804     dst = *dstp;
2805 
2806   gcc_assert (src->n_var_parts);
2807   gcc_checking_assert (src->onepart == dst->onepart);
2808 
2809   /* We can combine one-part variables very efficiently, because their
2810      entries are in canonical order.  */
2811   if (src->onepart)
2812     {
2813       location_chain **nodep, *dnode, *snode;
2814 
2815       gcc_assert (src->n_var_parts == 1
2816 		  && dst->n_var_parts == 1);
2817 
2818       snode = src->var_part[0].loc_chain;
2819       gcc_assert (snode);
2820 
2821     restart_onepart_unshared:
2822       nodep = &dst->var_part[0].loc_chain;
2823       dnode = *nodep;
2824       gcc_assert (dnode);
2825 
2826       while (snode)
2827 	{
2828 	  int r = dnode ? loc_cmp (dnode->loc, snode->loc) : 1;
2829 
2830 	  if (r > 0)
2831 	    {
2832 	      location_chain *nnode;
2833 
2834 	      if (shared_var_p (dst, set->vars))
2835 		{
2836 		  dstp = unshare_variable (set, dstp, dst,
2837 					   VAR_INIT_STATUS_INITIALIZED);
2838 		  dst = *dstp;
2839 		  goto restart_onepart_unshared;
2840 		}
2841 
2842 	      *nodep = nnode = new location_chain;
2843 	      nnode->loc = snode->loc;
2844 	      nnode->init = snode->init;
2845 	      if (!snode->set_src || MEM_P (snode->set_src))
2846 		nnode->set_src = NULL;
2847 	      else
2848 		nnode->set_src = snode->set_src;
2849 	      nnode->next = dnode;
2850 	      dnode = nnode;
2851 	    }
2852 	  else if (r == 0)
2853 	    gcc_checking_assert (rtx_equal_p (dnode->loc, snode->loc));
2854 
2855 	  if (r >= 0)
2856 	    snode = snode->next;
2857 
2858 	  nodep = &dnode->next;
2859 	  dnode = *nodep;
2860 	}
2861 
2862       return 1;
2863     }
2864 
2865   gcc_checking_assert (!src->onepart);
2866 
2867   /* Count the number of location parts, result is K.  */
2868   for (i = 0, j = 0, k = 0;
2869        i < src->n_var_parts && j < dst->n_var_parts; k++)
2870     {
2871       if (VAR_PART_OFFSET (src, i) == VAR_PART_OFFSET (dst, j))
2872 	{
2873 	  i++;
2874 	  j++;
2875 	}
2876       else if (VAR_PART_OFFSET (src, i) < VAR_PART_OFFSET (dst, j))
2877 	i++;
2878       else
2879 	j++;
2880     }
2881   k += src->n_var_parts - i;
2882   k += dst->n_var_parts - j;
2883 
2884   /* We track only variables whose size is <= MAX_VAR_PARTS bytes
2885      thus there are at most MAX_VAR_PARTS different offsets.  */
2886   gcc_checking_assert (dst->onepart ? k == 1 : k <= MAX_VAR_PARTS);
2887 
2888   if (dst->n_var_parts != k && shared_var_p (dst, set->vars))
2889     {
2890       dstp = unshare_variable (set, dstp, dst, VAR_INIT_STATUS_UNKNOWN);
2891       dst = *dstp;
2892     }
2893 
2894   i = src->n_var_parts - 1;
2895   j = dst->n_var_parts - 1;
2896   dst->n_var_parts = k;
2897 
2898   for (k--; k >= 0; k--)
2899     {
2900       location_chain *node, *node2;
2901 
2902       if (i >= 0 && j >= 0
2903 	  && VAR_PART_OFFSET (src, i) == VAR_PART_OFFSET (dst, j))
2904 	{
2905 	  /* Compute the "sorted" union of the chains, i.e. the locations which
2906 	     are in both chains go first, they are sorted by the sum of
2907 	     positions in the chains.  */
2908 	  int dst_l, src_l;
2909 	  int ii, jj, n;
2910 	  struct variable_union_info *vui;
2911 
2912 	  /* If DST is shared compare the location chains.
2913 	     If they are different we will modify the chain in DST with
2914 	     high probability so make a copy of DST.  */
2915 	  if (shared_var_p (dst, set->vars))
2916 	    {
2917 	      for (node = src->var_part[i].loc_chain,
2918 		   node2 = dst->var_part[j].loc_chain; node && node2;
2919 		   node = node->next, node2 = node2->next)
2920 		{
2921 		  if (!((REG_P (node2->loc)
2922 			 && REG_P (node->loc)
2923 			 && REGNO (node2->loc) == REGNO (node->loc))
2924 			|| rtx_equal_p (node2->loc, node->loc)))
2925 		    {
2926 		      if (node2->init < node->init)
2927 		        node2->init = node->init;
2928 		      break;
2929 		    }
2930 		}
2931 	      if (node || node2)
2932 		{
2933 		  dstp = unshare_variable (set, dstp, dst,
2934 					   VAR_INIT_STATUS_UNKNOWN);
2935 		  dst = (variable *)*dstp;
2936 		}
2937 	    }
2938 
2939 	  src_l = 0;
2940 	  for (node = src->var_part[i].loc_chain; node; node = node->next)
2941 	    src_l++;
2942 	  dst_l = 0;
2943 	  for (node = dst->var_part[j].loc_chain; node; node = node->next)
2944 	    dst_l++;
2945 
2946 	  if (dst_l == 1)
2947 	    {
2948 	      /* The most common case, much simpler, no qsort is needed.  */
2949 	      location_chain *dstnode = dst->var_part[j].loc_chain;
2950 	      dst->var_part[k].loc_chain = dstnode;
2951 	      VAR_PART_OFFSET (dst, k) = VAR_PART_OFFSET (dst, j);
2952 	      node2 = dstnode;
2953 	      for (node = src->var_part[i].loc_chain; node; node = node->next)
2954 		if (!((REG_P (dstnode->loc)
2955 		       && REG_P (node->loc)
2956 		       && REGNO (dstnode->loc) == REGNO (node->loc))
2957 		      || rtx_equal_p (dstnode->loc, node->loc)))
2958 		  {
2959 		    location_chain *new_node;
2960 
2961 		    /* Copy the location from SRC.  */
2962 		    new_node = new location_chain;
2963 		    new_node->loc = node->loc;
2964 		    new_node->init = node->init;
2965 		    if (!node->set_src || MEM_P (node->set_src))
2966 		      new_node->set_src = NULL;
2967 		    else
2968 		      new_node->set_src = node->set_src;
2969 		    node2->next = new_node;
2970 		    node2 = new_node;
2971 		  }
2972 	      node2->next = NULL;
2973 	    }
2974 	  else
2975 	    {
2976 	      if (src_l + dst_l > vui_allocated)
2977 		{
2978 		  vui_allocated = MAX (vui_allocated * 2, src_l + dst_l);
2979 		  vui_vec = XRESIZEVEC (struct variable_union_info, vui_vec,
2980 					vui_allocated);
2981 		}
2982 	      vui = vui_vec;
2983 
2984 	      /* Fill in the locations from DST.  */
2985 	      for (node = dst->var_part[j].loc_chain, jj = 0; node;
2986 		   node = node->next, jj++)
2987 		{
2988 		  vui[jj].lc = node;
2989 		  vui[jj].pos_dst = jj;
2990 
2991 		  /* Pos plus value larger than a sum of 2 valid positions.  */
2992 		  vui[jj].pos = jj + src_l + dst_l;
2993 		}
2994 
2995 	      /* Fill in the locations from SRC.  */
2996 	      n = dst_l;
2997 	      for (node = src->var_part[i].loc_chain, ii = 0; node;
2998 		   node = node->next, ii++)
2999 		{
3000 		  /* Find location from NODE.  */
3001 		  for (jj = 0; jj < dst_l; jj++)
3002 		    {
3003 		      if ((REG_P (vui[jj].lc->loc)
3004 			   && REG_P (node->loc)
3005 			   && REGNO (vui[jj].lc->loc) == REGNO (node->loc))
3006 			  || rtx_equal_p (vui[jj].lc->loc, node->loc))
3007 			{
3008 			  vui[jj].pos = jj + ii;
3009 			  break;
3010 			}
3011 		    }
3012 		  if (jj >= dst_l)	/* The location has not been found.  */
3013 		    {
3014 		      location_chain *new_node;
3015 
3016 		      /* Copy the location from SRC.  */
3017 		      new_node = new location_chain;
3018 		      new_node->loc = node->loc;
3019 		      new_node->init = node->init;
3020 		      if (!node->set_src || MEM_P (node->set_src))
3021 			new_node->set_src = NULL;
3022 		      else
3023 			new_node->set_src = node->set_src;
3024 		      vui[n].lc = new_node;
3025 		      vui[n].pos_dst = src_l + dst_l;
3026 		      vui[n].pos = ii + src_l + dst_l;
3027 		      n++;
3028 		    }
3029 		}
3030 
3031 	      if (dst_l == 2)
3032 		{
3033 		  /* Special case still very common case.  For dst_l == 2
3034 		     all entries dst_l ... n-1 are sorted, with for i >= dst_l
3035 		     vui[i].pos == i + src_l + dst_l.  */
3036 		  if (vui[0].pos > vui[1].pos)
3037 		    {
3038 		      /* Order should be 1, 0, 2... */
3039 		      dst->var_part[k].loc_chain = vui[1].lc;
3040 		      vui[1].lc->next = vui[0].lc;
3041 		      if (n >= 3)
3042 			{
3043 			  vui[0].lc->next = vui[2].lc;
3044 			  vui[n - 1].lc->next = NULL;
3045 			}
3046 		      else
3047 			vui[0].lc->next = NULL;
3048 		      ii = 3;
3049 		    }
3050 		  else
3051 		    {
3052 		      dst->var_part[k].loc_chain = vui[0].lc;
3053 		      if (n >= 3 && vui[2].pos < vui[1].pos)
3054 			{
3055 			  /* Order should be 0, 2, 1, 3... */
3056 			  vui[0].lc->next = vui[2].lc;
3057 			  vui[2].lc->next = vui[1].lc;
3058 			  if (n >= 4)
3059 			    {
3060 			      vui[1].lc->next = vui[3].lc;
3061 			      vui[n - 1].lc->next = NULL;
3062 			    }
3063 			  else
3064 			    vui[1].lc->next = NULL;
3065 			  ii = 4;
3066 			}
3067 		      else
3068 			{
3069 			  /* Order should be 0, 1, 2... */
3070 			  ii = 1;
3071 			  vui[n - 1].lc->next = NULL;
3072 			}
3073 		    }
3074 		  for (; ii < n; ii++)
3075 		    vui[ii - 1].lc->next = vui[ii].lc;
3076 		}
3077 	      else
3078 		{
3079 		  qsort (vui, n, sizeof (struct variable_union_info),
3080 			 variable_union_info_cmp_pos);
3081 
3082 		  /* Reconnect the nodes in sorted order.  */
3083 		  for (ii = 1; ii < n; ii++)
3084 		    vui[ii - 1].lc->next = vui[ii].lc;
3085 		  vui[n - 1].lc->next = NULL;
3086 		  dst->var_part[k].loc_chain = vui[0].lc;
3087 		}
3088 
3089 	      VAR_PART_OFFSET (dst, k) = VAR_PART_OFFSET (dst, j);
3090 	    }
3091 	  i--;
3092 	  j--;
3093 	}
3094       else if ((i >= 0 && j >= 0
3095 		&& VAR_PART_OFFSET (src, i) < VAR_PART_OFFSET (dst, j))
3096 	       || i < 0)
3097 	{
3098 	  dst->var_part[k] = dst->var_part[j];
3099 	  j--;
3100 	}
3101       else if ((i >= 0 && j >= 0
3102 		&& VAR_PART_OFFSET (src, i) > VAR_PART_OFFSET (dst, j))
3103 	       || j < 0)
3104 	{
3105 	  location_chain **nextp;
3106 
3107 	  /* Copy the chain from SRC.  */
3108 	  nextp = &dst->var_part[k].loc_chain;
3109 	  for (node = src->var_part[i].loc_chain; node; node = node->next)
3110 	    {
3111 	      location_chain *new_lc;
3112 
3113 	      new_lc = new location_chain;
3114 	      new_lc->next = NULL;
3115 	      new_lc->init = node->init;
3116 	      if (!node->set_src || MEM_P (node->set_src))
3117 		new_lc->set_src = NULL;
3118 	      else
3119 		new_lc->set_src = node->set_src;
3120 	      new_lc->loc = node->loc;
3121 
3122 	      *nextp = new_lc;
3123 	      nextp = &new_lc->next;
3124 	    }
3125 
3126 	  VAR_PART_OFFSET (dst, k) = VAR_PART_OFFSET (src, i);
3127 	  i--;
3128 	}
3129       dst->var_part[k].cur_loc = NULL;
3130     }
3131 
3132   if (flag_var_tracking_uninit)
3133     for (i = 0; i < src->n_var_parts && i < dst->n_var_parts; i++)
3134       {
3135 	location_chain *node, *node2;
3136 	for (node = src->var_part[i].loc_chain; node; node = node->next)
3137 	  for (node2 = dst->var_part[i].loc_chain; node2; node2 = node2->next)
3138 	    if (rtx_equal_p (node->loc, node2->loc))
3139 	      {
3140 		if (node->init > node2->init)
3141 		  node2->init = node->init;
3142 	      }
3143       }
3144 
3145   /* Continue traversing the hash table.  */
3146   return 1;
3147 }
3148 
3149 /* Compute union of dataflow sets SRC and DST and store it to DST.  */
3150 
3151 static void
dataflow_set_union(dataflow_set * dst,dataflow_set * src)3152 dataflow_set_union (dataflow_set *dst, dataflow_set *src)
3153 {
3154   int i;
3155 
3156   for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3157     attrs_list_union (&dst->regs[i], src->regs[i]);
3158 
3159   if (dst->vars == empty_shared_hash)
3160     {
3161       shared_hash_destroy (dst->vars);
3162       dst->vars = shared_hash_copy (src->vars);
3163     }
3164   else
3165     {
3166       variable_iterator_type hi;
3167       variable *var;
3168 
3169       FOR_EACH_HASH_TABLE_ELEMENT (*shared_hash_htab (src->vars),
3170 				   var, variable, hi)
3171 	variable_union (var, dst);
3172     }
3173 }
3174 
3175 /* Whether the value is currently being expanded.  */
3176 #define VALUE_RECURSED_INTO(x) \
3177   (RTL_FLAG_CHECK2 ("VALUE_RECURSED_INTO", (x), VALUE, DEBUG_EXPR)->used)
3178 
3179 /* Whether no expansion was found, saving useless lookups.
3180    It must only be set when VALUE_CHANGED is clear.  */
3181 #define NO_LOC_P(x) \
3182   (RTL_FLAG_CHECK2 ("NO_LOC_P", (x), VALUE, DEBUG_EXPR)->return_val)
3183 
3184 /* Whether cur_loc in the value needs to be (re)computed.  */
3185 #define VALUE_CHANGED(x) \
3186   (RTL_FLAG_CHECK1 ("VALUE_CHANGED", (x), VALUE)->frame_related)
3187 /* Whether cur_loc in the decl needs to be (re)computed.  */
3188 #define DECL_CHANGED(x) TREE_VISITED (x)
3189 
3190 /* Record (if NEWV) that DV needs to have its cur_loc recomputed.  For
3191    user DECLs, this means they're in changed_variables.  Values and
3192    debug exprs may be left with this flag set if no user variable
3193    requires them to be evaluated.  */
3194 
3195 static inline void
set_dv_changed(decl_or_value dv,bool newv)3196 set_dv_changed (decl_or_value dv, bool newv)
3197 {
3198   switch (dv_onepart_p (dv))
3199     {
3200     case ONEPART_VALUE:
3201       if (newv)
3202 	NO_LOC_P (dv_as_value (dv)) = false;
3203       VALUE_CHANGED (dv_as_value (dv)) = newv;
3204       break;
3205 
3206     case ONEPART_DEXPR:
3207       if (newv)
3208 	NO_LOC_P (DECL_RTL_KNOWN_SET (dv_as_decl (dv))) = false;
3209       /* Fall through.  */
3210 
3211     default:
3212       DECL_CHANGED (dv_as_decl (dv)) = newv;
3213       break;
3214     }
3215 }
3216 
3217 /* Return true if DV needs to have its cur_loc recomputed.  */
3218 
3219 static inline bool
dv_changed_p(decl_or_value dv)3220 dv_changed_p (decl_or_value dv)
3221 {
3222   return (dv_is_value_p (dv)
3223 	  ? VALUE_CHANGED (dv_as_value (dv))
3224 	  : DECL_CHANGED (dv_as_decl (dv)));
3225 }
3226 
3227 /* Return a location list node whose loc is rtx_equal to LOC, in the
3228    location list of a one-part variable or value VAR, or in that of
3229    any values recursively mentioned in the location lists.  VARS must
3230    be in star-canonical form.  */
3231 
3232 static location_chain *
find_loc_in_1pdv(rtx loc,variable * var,variable_table_type * vars)3233 find_loc_in_1pdv (rtx loc, variable *var, variable_table_type *vars)
3234 {
3235   location_chain *node;
3236   enum rtx_code loc_code;
3237 
3238   if (!var)
3239     return NULL;
3240 
3241   gcc_checking_assert (var->onepart);
3242 
3243   if (!var->n_var_parts)
3244     return NULL;
3245 
3246   gcc_checking_assert (loc != dv_as_opaque (var->dv));
3247 
3248   loc_code = GET_CODE (loc);
3249   for (node = var->var_part[0].loc_chain; node; node = node->next)
3250     {
3251       decl_or_value dv;
3252       variable *rvar;
3253 
3254       if (GET_CODE (node->loc) != loc_code)
3255 	{
3256 	  if (GET_CODE (node->loc) != VALUE)
3257 	    continue;
3258 	}
3259       else if (loc == node->loc)
3260 	return node;
3261       else if (loc_code != VALUE)
3262 	{
3263 	  if (rtx_equal_p (loc, node->loc))
3264 	    return node;
3265 	  continue;
3266 	}
3267 
3268       /* Since we're in star-canonical form, we don't need to visit
3269 	 non-canonical nodes: one-part variables and non-canonical
3270 	 values would only point back to the canonical node.  */
3271       if (dv_is_value_p (var->dv)
3272 	  && !canon_value_cmp (node->loc, dv_as_value (var->dv)))
3273 	{
3274 	  /* Skip all subsequent VALUEs.  */
3275 	  while (node->next && GET_CODE (node->next->loc) == VALUE)
3276 	    {
3277 	      node = node->next;
3278 	      gcc_checking_assert (!canon_value_cmp (node->loc,
3279 						     dv_as_value (var->dv)));
3280 	      if (loc == node->loc)
3281 		return node;
3282 	    }
3283 	  continue;
3284 	}
3285 
3286       gcc_checking_assert (node == var->var_part[0].loc_chain);
3287       gcc_checking_assert (!node->next);
3288 
3289       dv = dv_from_value (node->loc);
3290       rvar = vars->find_with_hash (dv, dv_htab_hash (dv));
3291       return find_loc_in_1pdv (loc, rvar, vars);
3292     }
3293 
3294   /* ??? Gotta look in cselib_val locations too.  */
3295 
3296   return NULL;
3297 }
3298 
3299 /* Hash table iteration argument passed to variable_merge.  */
3300 struct dfset_merge
3301 {
3302   /* The set in which the merge is to be inserted.  */
3303   dataflow_set *dst;
3304   /* The set that we're iterating in.  */
3305   dataflow_set *cur;
3306   /* The set that may contain the other dv we are to merge with.  */
3307   dataflow_set *src;
3308   /* Number of onepart dvs in src.  */
3309   int src_onepart_cnt;
3310 };
3311 
3312 /* Insert LOC in *DNODE, if it's not there yet.  The list must be in
3313    loc_cmp order, and it is maintained as such.  */
3314 
3315 static void
insert_into_intersection(location_chain ** nodep,rtx loc,enum var_init_status status)3316 insert_into_intersection (location_chain **nodep, rtx loc,
3317 			  enum var_init_status status)
3318 {
3319   location_chain *node;
3320   int r;
3321 
3322   for (node = *nodep; node; nodep = &node->next, node = *nodep)
3323     if ((r = loc_cmp (node->loc, loc)) == 0)
3324       {
3325 	node->init = MIN (node->init, status);
3326 	return;
3327       }
3328     else if (r > 0)
3329       break;
3330 
3331   node = new location_chain;
3332 
3333   node->loc = loc;
3334   node->set_src = NULL;
3335   node->init = status;
3336   node->next = *nodep;
3337   *nodep = node;
3338 }
3339 
3340 /* Insert in DEST the intersection of the locations present in both
3341    S1NODE and S2VAR, directly or indirectly.  S1NODE is from a
3342    variable in DSM->cur, whereas S2VAR is from DSM->src.  dvar is in
3343    DSM->dst.  */
3344 
3345 static void
intersect_loc_chains(rtx val,location_chain ** dest,struct dfset_merge * dsm,location_chain * s1node,variable * s2var)3346 intersect_loc_chains (rtx val, location_chain **dest, struct dfset_merge *dsm,
3347 		      location_chain *s1node, variable *s2var)
3348 {
3349   dataflow_set *s1set = dsm->cur;
3350   dataflow_set *s2set = dsm->src;
3351   location_chain *found;
3352 
3353   if (s2var)
3354     {
3355       location_chain *s2node;
3356 
3357       gcc_checking_assert (s2var->onepart);
3358 
3359       if (s2var->n_var_parts)
3360 	{
3361 	  s2node = s2var->var_part[0].loc_chain;
3362 
3363 	  for (; s1node && s2node;
3364 	       s1node = s1node->next, s2node = s2node->next)
3365 	    if (s1node->loc != s2node->loc)
3366 	      break;
3367 	    else if (s1node->loc == val)
3368 	      continue;
3369 	    else
3370 	      insert_into_intersection (dest, s1node->loc,
3371 					MIN (s1node->init, s2node->init));
3372 	}
3373     }
3374 
3375   for (; s1node; s1node = s1node->next)
3376     {
3377       if (s1node->loc == val)
3378 	continue;
3379 
3380       if ((found = find_loc_in_1pdv (s1node->loc, s2var,
3381 				     shared_hash_htab (s2set->vars))))
3382 	{
3383 	  insert_into_intersection (dest, s1node->loc,
3384 				    MIN (s1node->init, found->init));
3385 	  continue;
3386 	}
3387 
3388       if (GET_CODE (s1node->loc) == VALUE
3389 	  && !VALUE_RECURSED_INTO (s1node->loc))
3390 	{
3391 	  decl_or_value dv = dv_from_value (s1node->loc);
3392 	  variable *svar = shared_hash_find (s1set->vars, dv);
3393 	  if (svar)
3394 	    {
3395 	      if (svar->n_var_parts == 1)
3396 		{
3397 		  VALUE_RECURSED_INTO (s1node->loc) = true;
3398 		  intersect_loc_chains (val, dest, dsm,
3399 					svar->var_part[0].loc_chain,
3400 					s2var);
3401 		  VALUE_RECURSED_INTO (s1node->loc) = false;
3402 		}
3403 	    }
3404 	}
3405 
3406       /* ??? gotta look in cselib_val locations too.  */
3407 
3408       /* ??? if the location is equivalent to any location in src,
3409 	 searched recursively
3410 
3411 	   add to dst the values needed to represent the equivalence
3412 
3413      telling whether locations S is equivalent to another dv's
3414      location list:
3415 
3416        for each location D in the list
3417 
3418          if S and D satisfy rtx_equal_p, then it is present
3419 
3420 	 else if D is a value, recurse without cycles
3421 
3422 	 else if S and D have the same CODE and MODE
3423 
3424 	   for each operand oS and the corresponding oD
3425 
3426 	     if oS and oD are not equivalent, then S an D are not equivalent
3427 
3428 	     else if they are RTX vectors
3429 
3430 	       if any vector oS element is not equivalent to its respective oD,
3431 	       then S and D are not equivalent
3432 
3433    */
3434 
3435 
3436     }
3437 }
3438 
3439 /* Return -1 if X should be before Y in a location list for a 1-part
3440    variable, 1 if Y should be before X, and 0 if they're equivalent
3441    and should not appear in the list.  */
3442 
3443 static int
loc_cmp(rtx x,rtx y)3444 loc_cmp (rtx x, rtx y)
3445 {
3446   int i, j, r;
3447   RTX_CODE code = GET_CODE (x);
3448   const char *fmt;
3449 
3450   if (x == y)
3451     return 0;
3452 
3453   if (REG_P (x))
3454     {
3455       if (!REG_P (y))
3456 	return -1;
3457       gcc_assert (GET_MODE (x) == GET_MODE (y));
3458       if (REGNO (x) == REGNO (y))
3459 	return 0;
3460       else if (REGNO (x) < REGNO (y))
3461 	return -1;
3462       else
3463 	return 1;
3464     }
3465 
3466   if (REG_P (y))
3467     return 1;
3468 
3469   if (MEM_P (x))
3470     {
3471       if (!MEM_P (y))
3472 	return -1;
3473       gcc_assert (GET_MODE (x) == GET_MODE (y));
3474       return loc_cmp (XEXP (x, 0), XEXP (y, 0));
3475     }
3476 
3477   if (MEM_P (y))
3478     return 1;
3479 
3480   if (GET_CODE (x) == VALUE)
3481     {
3482       if (GET_CODE (y) != VALUE)
3483 	return -1;
3484       /* Don't assert the modes are the same, that is true only
3485 	 when not recursing.  (subreg:QI (value:SI 1:1) 0)
3486 	 and (subreg:QI (value:DI 2:2) 0) can be compared,
3487 	 even when the modes are different.  */
3488       if (canon_value_cmp (x, y))
3489 	return -1;
3490       else
3491 	return 1;
3492     }
3493 
3494   if (GET_CODE (y) == VALUE)
3495     return 1;
3496 
3497   /* Entry value is the least preferable kind of expression.  */
3498   if (GET_CODE (x) == ENTRY_VALUE)
3499     {
3500       if (GET_CODE (y) != ENTRY_VALUE)
3501 	return 1;
3502       gcc_assert (GET_MODE (x) == GET_MODE (y));
3503       return loc_cmp (ENTRY_VALUE_EXP (x), ENTRY_VALUE_EXP (y));
3504     }
3505 
3506   if (GET_CODE (y) == ENTRY_VALUE)
3507     return -1;
3508 
3509   if (GET_CODE (x) == GET_CODE (y))
3510     /* Compare operands below.  */;
3511   else if (GET_CODE (x) < GET_CODE (y))
3512     return -1;
3513   else
3514     return 1;
3515 
3516   gcc_assert (GET_MODE (x) == GET_MODE (y));
3517 
3518   if (GET_CODE (x) == DEBUG_EXPR)
3519     {
3520       if (DEBUG_TEMP_UID (DEBUG_EXPR_TREE_DECL (x))
3521 	  < DEBUG_TEMP_UID (DEBUG_EXPR_TREE_DECL (y)))
3522 	return -1;
3523       gcc_checking_assert (DEBUG_TEMP_UID (DEBUG_EXPR_TREE_DECL (x))
3524 			   > DEBUG_TEMP_UID (DEBUG_EXPR_TREE_DECL (y)));
3525       return 1;
3526     }
3527 
3528   fmt = GET_RTX_FORMAT (code);
3529   for (i = 0; i < GET_RTX_LENGTH (code); i++)
3530     switch (fmt[i])
3531       {
3532       case 'w':
3533 	if (XWINT (x, i) == XWINT (y, i))
3534 	  break;
3535 	else if (XWINT (x, i) < XWINT (y, i))
3536 	  return -1;
3537 	else
3538 	  return 1;
3539 
3540       case 'n':
3541       case 'i':
3542 	if (XINT (x, i) == XINT (y, i))
3543 	  break;
3544 	else if (XINT (x, i) < XINT (y, i))
3545 	  return -1;
3546 	else
3547 	  return 1;
3548 
3549       case 'p':
3550 	r = compare_sizes_for_sort (SUBREG_BYTE (x), SUBREG_BYTE (y));
3551 	if (r != 0)
3552 	  return r;
3553 	break;
3554 
3555       case 'V':
3556       case 'E':
3557 	/* Compare the vector length first.  */
3558 	if (XVECLEN (x, i) == XVECLEN (y, i))
3559 	  /* Compare the vectors elements.  */;
3560 	else if (XVECLEN (x, i) < XVECLEN (y, i))
3561 	  return -1;
3562 	else
3563 	  return 1;
3564 
3565 	for (j = 0; j < XVECLEN (x, i); j++)
3566 	  if ((r = loc_cmp (XVECEXP (x, i, j),
3567 			    XVECEXP (y, i, j))))
3568 	    return r;
3569 	break;
3570 
3571       case 'e':
3572 	if ((r = loc_cmp (XEXP (x, i), XEXP (y, i))))
3573 	  return r;
3574 	break;
3575 
3576       case 'S':
3577       case 's':
3578 	if (XSTR (x, i) == XSTR (y, i))
3579 	  break;
3580 	if (!XSTR (x, i))
3581 	  return -1;
3582 	if (!XSTR (y, i))
3583 	  return 1;
3584 	if ((r = strcmp (XSTR (x, i), XSTR (y, i))) == 0)
3585 	  break;
3586 	else if (r < 0)
3587 	  return -1;
3588 	else
3589 	  return 1;
3590 
3591       case 'u':
3592 	/* These are just backpointers, so they don't matter.  */
3593 	break;
3594 
3595       case '0':
3596       case 't':
3597 	break;
3598 
3599 	/* It is believed that rtx's at this level will never
3600 	   contain anything but integers and other rtx's,
3601 	   except for within LABEL_REFs and SYMBOL_REFs.  */
3602       default:
3603 	gcc_unreachable ();
3604       }
3605   if (CONST_WIDE_INT_P (x))
3606     {
3607       /* Compare the vector length first.  */
3608       if (CONST_WIDE_INT_NUNITS (x) >= CONST_WIDE_INT_NUNITS (y))
3609 	return 1;
3610       else if (CONST_WIDE_INT_NUNITS (x) < CONST_WIDE_INT_NUNITS (y))
3611 	return -1;
3612 
3613       /* Compare the vectors elements.  */;
3614       for (j = CONST_WIDE_INT_NUNITS (x) - 1; j >= 0 ; j--)
3615 	{
3616 	  if (CONST_WIDE_INT_ELT (x, j) < CONST_WIDE_INT_ELT (y, j))
3617 	    return -1;
3618 	  if (CONST_WIDE_INT_ELT (x, j) > CONST_WIDE_INT_ELT (y, j))
3619 	    return 1;
3620 	}
3621     }
3622 
3623   return 0;
3624 }
3625 
3626 /* Check the order of entries in one-part variables.   */
3627 
3628 int
canonicalize_loc_order_check(variable ** slot,dataflow_set * data ATTRIBUTE_UNUSED)3629 canonicalize_loc_order_check (variable **slot,
3630 			      dataflow_set *data ATTRIBUTE_UNUSED)
3631 {
3632   variable *var = *slot;
3633   location_chain *node, *next;
3634 
3635 #ifdef ENABLE_RTL_CHECKING
3636   int i;
3637   for (i = 0; i < var->n_var_parts; i++)
3638     gcc_assert (var->var_part[0].cur_loc == NULL);
3639   gcc_assert (!var->in_changed_variables);
3640 #endif
3641 
3642   if (!var->onepart)
3643     return 1;
3644 
3645   gcc_assert (var->n_var_parts == 1);
3646   node = var->var_part[0].loc_chain;
3647   gcc_assert (node);
3648 
3649   while ((next = node->next))
3650     {
3651       gcc_assert (loc_cmp (node->loc, next->loc) < 0);
3652       node = next;
3653     }
3654 
3655   return 1;
3656 }
3657 
3658 /* Mark with VALUE_RECURSED_INTO values that have neighbors that are
3659    more likely to be chosen as canonical for an equivalence set.
3660    Ensure less likely values can reach more likely neighbors, making
3661    the connections bidirectional.  */
3662 
3663 int
canonicalize_values_mark(variable ** slot,dataflow_set * set)3664 canonicalize_values_mark (variable **slot, dataflow_set *set)
3665 {
3666   variable *var = *slot;
3667   decl_or_value dv = var->dv;
3668   rtx val;
3669   location_chain *node;
3670 
3671   if (!dv_is_value_p (dv))
3672     return 1;
3673 
3674   gcc_checking_assert (var->n_var_parts == 1);
3675 
3676   val = dv_as_value (dv);
3677 
3678   for (node = var->var_part[0].loc_chain; node; node = node->next)
3679     if (GET_CODE (node->loc) == VALUE)
3680       {
3681 	if (canon_value_cmp (node->loc, val))
3682 	  VALUE_RECURSED_INTO (val) = true;
3683 	else
3684 	  {
3685 	    decl_or_value odv = dv_from_value (node->loc);
3686 	    variable **oslot;
3687 	    oslot = shared_hash_find_slot_noinsert (set->vars, odv);
3688 
3689 	    set_slot_part (set, val, oslot, odv, 0,
3690 			   node->init, NULL_RTX);
3691 
3692 	    VALUE_RECURSED_INTO (node->loc) = true;
3693 	  }
3694       }
3695 
3696   return 1;
3697 }
3698 
3699 /* Remove redundant entries from equivalence lists in onepart
3700    variables, canonicalizing equivalence sets into star shapes.  */
3701 
3702 int
canonicalize_values_star(variable ** slot,dataflow_set * set)3703 canonicalize_values_star (variable **slot, dataflow_set *set)
3704 {
3705   variable *var = *slot;
3706   decl_or_value dv = var->dv;
3707   location_chain *node;
3708   decl_or_value cdv;
3709   rtx val, cval;
3710   variable **cslot;
3711   bool has_value;
3712   bool has_marks;
3713 
3714   if (!var->onepart)
3715     return 1;
3716 
3717   gcc_checking_assert (var->n_var_parts == 1);
3718 
3719   if (dv_is_value_p (dv))
3720     {
3721       cval = dv_as_value (dv);
3722       if (!VALUE_RECURSED_INTO (cval))
3723 	return 1;
3724       VALUE_RECURSED_INTO (cval) = false;
3725     }
3726   else
3727     cval = NULL_RTX;
3728 
3729  restart:
3730   val = cval;
3731   has_value = false;
3732   has_marks = false;
3733 
3734   gcc_assert (var->n_var_parts == 1);
3735 
3736   for (node = var->var_part[0].loc_chain; node; node = node->next)
3737     if (GET_CODE (node->loc) == VALUE)
3738       {
3739 	has_value = true;
3740 	if (VALUE_RECURSED_INTO (node->loc))
3741 	  has_marks = true;
3742 	if (canon_value_cmp (node->loc, cval))
3743 	  cval = node->loc;
3744       }
3745 
3746   if (!has_value)
3747     return 1;
3748 
3749   if (cval == val)
3750     {
3751       if (!has_marks || dv_is_decl_p (dv))
3752 	return 1;
3753 
3754       /* Keep it marked so that we revisit it, either after visiting a
3755 	 child node, or after visiting a new parent that might be
3756 	 found out.  */
3757       VALUE_RECURSED_INTO (val) = true;
3758 
3759       for (node = var->var_part[0].loc_chain; node; node = node->next)
3760 	if (GET_CODE (node->loc) == VALUE
3761 	    && VALUE_RECURSED_INTO (node->loc))
3762 	  {
3763 	    cval = node->loc;
3764 	  restart_with_cval:
3765 	    VALUE_RECURSED_INTO (cval) = false;
3766 	    dv = dv_from_value (cval);
3767 	    slot = shared_hash_find_slot_noinsert (set->vars, dv);
3768 	    if (!slot)
3769 	      {
3770 		gcc_assert (dv_is_decl_p (var->dv));
3771 		/* The canonical value was reset and dropped.
3772 		   Remove it.  */
3773 		clobber_variable_part (set, NULL, var->dv, 0, NULL);
3774 		return 1;
3775 	      }
3776 	    var = *slot;
3777 	    gcc_assert (dv_is_value_p (var->dv));
3778 	    if (var->n_var_parts == 0)
3779 	      return 1;
3780 	    gcc_assert (var->n_var_parts == 1);
3781 	    goto restart;
3782 	  }
3783 
3784       VALUE_RECURSED_INTO (val) = false;
3785 
3786       return 1;
3787     }
3788 
3789   /* Push values to the canonical one.  */
3790   cdv = dv_from_value (cval);
3791   cslot = shared_hash_find_slot_noinsert (set->vars, cdv);
3792 
3793   for (node = var->var_part[0].loc_chain; node; node = node->next)
3794     if (node->loc != cval)
3795       {
3796 	cslot = set_slot_part (set, node->loc, cslot, cdv, 0,
3797 			       node->init, NULL_RTX);
3798 	if (GET_CODE (node->loc) == VALUE)
3799 	  {
3800 	    decl_or_value ndv = dv_from_value (node->loc);
3801 
3802 	    set_variable_part (set, cval, ndv, 0, node->init, NULL_RTX,
3803 			       NO_INSERT);
3804 
3805 	    if (canon_value_cmp (node->loc, val))
3806 	      {
3807 		/* If it could have been a local minimum, it's not any more,
3808 		   since it's now neighbor to cval, so it may have to push
3809 		   to it.  Conversely, if it wouldn't have prevailed over
3810 		   val, then whatever mark it has is fine: if it was to
3811 		   push, it will now push to a more canonical node, but if
3812 		   it wasn't, then it has already pushed any values it might
3813 		   have to.  */
3814 		VALUE_RECURSED_INTO (node->loc) = true;
3815 		/* Make sure we visit node->loc by ensuring we cval is
3816 		   visited too.  */
3817 		VALUE_RECURSED_INTO (cval) = true;
3818 	      }
3819 	    else if (!VALUE_RECURSED_INTO (node->loc))
3820 	      /* If we have no need to "recurse" into this node, it's
3821 		 already "canonicalized", so drop the link to the old
3822 		 parent.  */
3823 	      clobber_variable_part (set, cval, ndv, 0, NULL);
3824 	  }
3825 	else if (GET_CODE (node->loc) == REG)
3826 	  {
3827 	    attrs *list = set->regs[REGNO (node->loc)], **listp;
3828 
3829 	    /* Change an existing attribute referring to dv so that it
3830 	       refers to cdv, removing any duplicate this might
3831 	       introduce, and checking that no previous duplicates
3832 	       existed, all in a single pass.  */
3833 
3834 	    while (list)
3835 	      {
3836 		if (list->offset == 0
3837 		    && (dv_as_opaque (list->dv) == dv_as_opaque (dv)
3838 			|| dv_as_opaque (list->dv) == dv_as_opaque (cdv)))
3839 		  break;
3840 
3841 		list = list->next;
3842 	      }
3843 
3844 	    gcc_assert (list);
3845 	    if (dv_as_opaque (list->dv) == dv_as_opaque (dv))
3846 	      {
3847 		list->dv = cdv;
3848 		for (listp = &list->next; (list = *listp); listp = &list->next)
3849 		  {
3850 		    if (list->offset)
3851 		      continue;
3852 
3853 		    if (dv_as_opaque (list->dv) == dv_as_opaque (cdv))
3854 		      {
3855 			*listp = list->next;
3856 			delete list;
3857 			list = *listp;
3858 			break;
3859 		      }
3860 
3861 		    gcc_assert (dv_as_opaque (list->dv) != dv_as_opaque (dv));
3862 		  }
3863 	      }
3864 	    else if (dv_as_opaque (list->dv) == dv_as_opaque (cdv))
3865 	      {
3866 		for (listp = &list->next; (list = *listp); listp = &list->next)
3867 		  {
3868 		    if (list->offset)
3869 		      continue;
3870 
3871 		    if (dv_as_opaque (list->dv) == dv_as_opaque (dv))
3872 		      {
3873 			*listp = list->next;
3874 			delete list;
3875 			list = *listp;
3876 			break;
3877 		      }
3878 
3879 		    gcc_assert (dv_as_opaque (list->dv) != dv_as_opaque (cdv));
3880 		  }
3881 	      }
3882 	    else
3883 	      gcc_unreachable ();
3884 
3885 	    if (flag_checking)
3886 	      while (list)
3887 		{
3888 		  if (list->offset == 0
3889 		      && (dv_as_opaque (list->dv) == dv_as_opaque (dv)
3890 			  || dv_as_opaque (list->dv) == dv_as_opaque (cdv)))
3891 		    gcc_unreachable ();
3892 
3893 		  list = list->next;
3894 		}
3895 	  }
3896       }
3897 
3898   if (val)
3899     set_slot_part (set, val, cslot, cdv, 0,
3900 		   VAR_INIT_STATUS_INITIALIZED, NULL_RTX);
3901 
3902   slot = clobber_slot_part (set, cval, slot, 0, NULL);
3903 
3904   /* Variable may have been unshared.  */
3905   var = *slot;
3906   gcc_checking_assert (var->n_var_parts && var->var_part[0].loc_chain->loc == cval
3907 		       && var->var_part[0].loc_chain->next == NULL);
3908 
3909   if (VALUE_RECURSED_INTO (cval))
3910     goto restart_with_cval;
3911 
3912   return 1;
3913 }
3914 
3915 /* Bind one-part variables to the canonical value in an equivalence
3916    set.  Not doing this causes dataflow convergence failure in rare
3917    circumstances, see PR42873.  Unfortunately we can't do this
3918    efficiently as part of canonicalize_values_star, since we may not
3919    have determined or even seen the canonical value of a set when we
3920    get to a variable that references another member of the set.  */
3921 
3922 int
canonicalize_vars_star(variable ** slot,dataflow_set * set)3923 canonicalize_vars_star (variable **slot, dataflow_set *set)
3924 {
3925   variable *var = *slot;
3926   decl_or_value dv = var->dv;
3927   location_chain *node;
3928   rtx cval;
3929   decl_or_value cdv;
3930   variable **cslot;
3931   variable *cvar;
3932   location_chain *cnode;
3933 
3934   if (!var->onepart || var->onepart == ONEPART_VALUE)
3935     return 1;
3936 
3937   gcc_assert (var->n_var_parts == 1);
3938 
3939   node = var->var_part[0].loc_chain;
3940 
3941   if (GET_CODE (node->loc) != VALUE)
3942     return 1;
3943 
3944   gcc_assert (!node->next);
3945   cval = node->loc;
3946 
3947   /* Push values to the canonical one.  */
3948   cdv = dv_from_value (cval);
3949   cslot = shared_hash_find_slot_noinsert (set->vars, cdv);
3950   if (!cslot)
3951     return 1;
3952   cvar = *cslot;
3953   gcc_assert (cvar->n_var_parts == 1);
3954 
3955   cnode = cvar->var_part[0].loc_chain;
3956 
3957   /* CVAL is canonical if its value list contains non-VALUEs or VALUEs
3958      that are not “more canonical” than it.  */
3959   if (GET_CODE (cnode->loc) != VALUE
3960       || !canon_value_cmp (cnode->loc, cval))
3961     return 1;
3962 
3963   /* CVAL was found to be non-canonical.  Change the variable to point
3964      to the canonical VALUE.  */
3965   gcc_assert (!cnode->next);
3966   cval = cnode->loc;
3967 
3968   slot = set_slot_part (set, cval, slot, dv, 0,
3969 			node->init, node->set_src);
3970   clobber_slot_part (set, cval, slot, 0, node->set_src);
3971 
3972   return 1;
3973 }
3974 
3975 /* Combine variable or value in *S1SLOT (in DSM->cur) with the
3976    corresponding entry in DSM->src.  Multi-part variables are combined
3977    with variable_union, whereas onepart dvs are combined with
3978    intersection.  */
3979 
3980 static int
variable_merge_over_cur(variable * s1var,struct dfset_merge * dsm)3981 variable_merge_over_cur (variable *s1var, struct dfset_merge *dsm)
3982 {
3983   dataflow_set *dst = dsm->dst;
3984   variable **dstslot;
3985   variable *s2var, *dvar = NULL;
3986   decl_or_value dv = s1var->dv;
3987   onepart_enum onepart = s1var->onepart;
3988   rtx val;
3989   hashval_t dvhash;
3990   location_chain *node, **nodep;
3991 
3992   /* If the incoming onepart variable has an empty location list, then
3993      the intersection will be just as empty.  For other variables,
3994      it's always union.  */
3995   gcc_checking_assert (s1var->n_var_parts
3996 		       && s1var->var_part[0].loc_chain);
3997 
3998   if (!onepart)
3999     return variable_union (s1var, dst);
4000 
4001   gcc_checking_assert (s1var->n_var_parts == 1);
4002 
4003   dvhash = dv_htab_hash (dv);
4004   if (dv_is_value_p (dv))
4005     val = dv_as_value (dv);
4006   else
4007     val = NULL;
4008 
4009   s2var = shared_hash_find_1 (dsm->src->vars, dv, dvhash);
4010   if (!s2var)
4011     {
4012       dst_can_be_shared = false;
4013       return 1;
4014     }
4015 
4016   dsm->src_onepart_cnt--;
4017   gcc_assert (s2var->var_part[0].loc_chain
4018 	      && s2var->onepart == onepart
4019 	      && s2var->n_var_parts == 1);
4020 
4021   dstslot = shared_hash_find_slot_noinsert_1 (dst->vars, dv, dvhash);
4022   if (dstslot)
4023     {
4024       dvar = *dstslot;
4025       gcc_assert (dvar->refcount == 1
4026 		  && dvar->onepart == onepart
4027 		  && dvar->n_var_parts == 1);
4028       nodep = &dvar->var_part[0].loc_chain;
4029     }
4030   else
4031     {
4032       nodep = &node;
4033       node = NULL;
4034     }
4035 
4036   if (!dstslot && !onepart_variable_different_p (s1var, s2var))
4037     {
4038       dstslot = shared_hash_find_slot_unshare_1 (&dst->vars, dv,
4039 						 dvhash, INSERT);
4040       *dstslot = dvar = s2var;
4041       dvar->refcount++;
4042     }
4043   else
4044     {
4045       dst_can_be_shared = false;
4046 
4047       intersect_loc_chains (val, nodep, dsm,
4048 			    s1var->var_part[0].loc_chain, s2var);
4049 
4050       if (!dstslot)
4051 	{
4052 	  if (node)
4053 	    {
4054 	      dvar = onepart_pool_allocate (onepart);
4055 	      dvar->dv = dv;
4056 	      dvar->refcount = 1;
4057 	      dvar->n_var_parts = 1;
4058 	      dvar->onepart = onepart;
4059 	      dvar->in_changed_variables = false;
4060 	      dvar->var_part[0].loc_chain = node;
4061 	      dvar->var_part[0].cur_loc = NULL;
4062 	      if (onepart)
4063 		VAR_LOC_1PAUX (dvar) = NULL;
4064 	      else
4065 		VAR_PART_OFFSET (dvar, 0) = 0;
4066 
4067 	      dstslot
4068 		= shared_hash_find_slot_unshare_1 (&dst->vars, dv, dvhash,
4069 						   INSERT);
4070 	      gcc_assert (!*dstslot);
4071 	      *dstslot = dvar;
4072 	    }
4073 	  else
4074 	    return 1;
4075 	}
4076     }
4077 
4078   nodep = &dvar->var_part[0].loc_chain;
4079   while ((node = *nodep))
4080     {
4081       location_chain **nextp = &node->next;
4082 
4083       if (GET_CODE (node->loc) == REG)
4084 	{
4085 	  attrs *list;
4086 
4087 	  for (list = dst->regs[REGNO (node->loc)]; list; list = list->next)
4088 	    if (GET_MODE (node->loc) == GET_MODE (list->loc)
4089 		&& dv_is_value_p (list->dv))
4090 	      break;
4091 
4092 	  if (!list)
4093 	    attrs_list_insert (&dst->regs[REGNO (node->loc)],
4094 			       dv, 0, node->loc);
4095 	  /* If this value became canonical for another value that had
4096 	     this register, we want to leave it alone.  */
4097 	  else if (dv_as_value (list->dv) != val)
4098 	    {
4099 	      dstslot = set_slot_part (dst, dv_as_value (list->dv),
4100 				       dstslot, dv, 0,
4101 				       node->init, NULL_RTX);
4102 	      dstslot = delete_slot_part (dst, node->loc, dstslot, 0);
4103 
4104 	      /* Since nextp points into the removed node, we can't
4105 		 use it.  The pointer to the next node moved to nodep.
4106 		 However, if the variable we're walking is unshared
4107 		 during our walk, we'll keep walking the location list
4108 		 of the previously-shared variable, in which case the
4109 		 node won't have been removed, and we'll want to skip
4110 		 it.  That's why we test *nodep here.  */
4111 	      if (*nodep != node)
4112 		nextp = nodep;
4113 	    }
4114 	}
4115       else
4116 	/* Canonicalization puts registers first, so we don't have to
4117 	   walk it all.  */
4118 	break;
4119       nodep = nextp;
4120     }
4121 
4122   if (dvar != *dstslot)
4123     dvar = *dstslot;
4124   nodep = &dvar->var_part[0].loc_chain;
4125 
4126   if (val)
4127     {
4128       /* Mark all referenced nodes for canonicalization, and make sure
4129 	 we have mutual equivalence links.  */
4130       VALUE_RECURSED_INTO (val) = true;
4131       for (node = *nodep; node; node = node->next)
4132 	if (GET_CODE (node->loc) == VALUE)
4133 	  {
4134 	    VALUE_RECURSED_INTO (node->loc) = true;
4135 	    set_variable_part (dst, val, dv_from_value (node->loc), 0,
4136 			       node->init, NULL, INSERT);
4137 	  }
4138 
4139       dstslot = shared_hash_find_slot_noinsert_1 (dst->vars, dv, dvhash);
4140       gcc_assert (*dstslot == dvar);
4141       canonicalize_values_star (dstslot, dst);
4142       gcc_checking_assert (dstslot
4143 			   == shared_hash_find_slot_noinsert_1 (dst->vars,
4144 								dv, dvhash));
4145       dvar = *dstslot;
4146     }
4147   else
4148     {
4149       bool has_value = false, has_other = false;
4150 
4151       /* If we have one value and anything else, we're going to
4152 	 canonicalize this, so make sure all values have an entry in
4153 	 the table and are marked for canonicalization.  */
4154       for (node = *nodep; node; node = node->next)
4155 	{
4156 	  if (GET_CODE (node->loc) == VALUE)
4157 	    {
4158 	      /* If this was marked during register canonicalization,
4159 		 we know we have to canonicalize values.  */
4160 	      if (has_value)
4161 		has_other = true;
4162 	      has_value = true;
4163 	      if (has_other)
4164 		break;
4165 	    }
4166 	  else
4167 	    {
4168 	      has_other = true;
4169 	      if (has_value)
4170 		break;
4171 	    }
4172 	}
4173 
4174       if (has_value && has_other)
4175 	{
4176 	  for (node = *nodep; node; node = node->next)
4177 	    {
4178 	      if (GET_CODE (node->loc) == VALUE)
4179 		{
4180 		  decl_or_value dv = dv_from_value (node->loc);
4181 		  variable **slot = NULL;
4182 
4183 		  if (shared_hash_shared (dst->vars))
4184 		    slot = shared_hash_find_slot_noinsert (dst->vars, dv);
4185 		  if (!slot)
4186 		    slot = shared_hash_find_slot_unshare (&dst->vars, dv,
4187 							  INSERT);
4188 		  if (!*slot)
4189 		    {
4190 		      variable *var = onepart_pool_allocate (ONEPART_VALUE);
4191 		      var->dv = dv;
4192 		      var->refcount = 1;
4193 		      var->n_var_parts = 1;
4194 		      var->onepart = ONEPART_VALUE;
4195 		      var->in_changed_variables = false;
4196 		      var->var_part[0].loc_chain = NULL;
4197 		      var->var_part[0].cur_loc = NULL;
4198 		      VAR_LOC_1PAUX (var) = NULL;
4199 		      *slot = var;
4200 		    }
4201 
4202 		  VALUE_RECURSED_INTO (node->loc) = true;
4203 		}
4204 	    }
4205 
4206 	  dstslot = shared_hash_find_slot_noinsert_1 (dst->vars, dv, dvhash);
4207 	  gcc_assert (*dstslot == dvar);
4208 	  canonicalize_values_star (dstslot, dst);
4209 	  gcc_checking_assert (dstslot
4210 			       == shared_hash_find_slot_noinsert_1 (dst->vars,
4211 								    dv, dvhash));
4212 	  dvar = *dstslot;
4213 	}
4214     }
4215 
4216   if (!onepart_variable_different_p (dvar, s2var))
4217     {
4218       variable_htab_free (dvar);
4219       *dstslot = dvar = s2var;
4220       dvar->refcount++;
4221     }
4222   else if (s2var != s1var && !onepart_variable_different_p (dvar, s1var))
4223     {
4224       variable_htab_free (dvar);
4225       *dstslot = dvar = s1var;
4226       dvar->refcount++;
4227       dst_can_be_shared = false;
4228     }
4229   else
4230     dst_can_be_shared = false;
4231 
4232   return 1;
4233 }
4234 
4235 /* Copy s2slot (in DSM->src) to DSM->dst if the variable is a
4236    multi-part variable.  Unions of multi-part variables and
4237    intersections of one-part ones will be handled in
4238    variable_merge_over_cur().  */
4239 
4240 static int
variable_merge_over_src(variable * s2var,struct dfset_merge * dsm)4241 variable_merge_over_src (variable *s2var, struct dfset_merge *dsm)
4242 {
4243   dataflow_set *dst = dsm->dst;
4244   decl_or_value dv = s2var->dv;
4245 
4246   if (!s2var->onepart)
4247     {
4248       variable **dstp = shared_hash_find_slot (dst->vars, dv);
4249       *dstp = s2var;
4250       s2var->refcount++;
4251       return 1;
4252     }
4253 
4254   dsm->src_onepart_cnt++;
4255   return 1;
4256 }
4257 
4258 /* Combine dataflow set information from SRC2 into DST, using PDST
4259    to carry over information across passes.  */
4260 
4261 static void
dataflow_set_merge(dataflow_set * dst,dataflow_set * src2)4262 dataflow_set_merge (dataflow_set *dst, dataflow_set *src2)
4263 {
4264   dataflow_set cur = *dst;
4265   dataflow_set *src1 = &cur;
4266   struct dfset_merge dsm;
4267   int i;
4268   size_t src1_elems, src2_elems;
4269   variable_iterator_type hi;
4270   variable *var;
4271 
4272   src1_elems = shared_hash_htab (src1->vars)->elements ();
4273   src2_elems = shared_hash_htab (src2->vars)->elements ();
4274   dataflow_set_init (dst);
4275   dst->stack_adjust = cur.stack_adjust;
4276   shared_hash_destroy (dst->vars);
4277   dst->vars = new shared_hash;
4278   dst->vars->refcount = 1;
4279   dst->vars->htab = new variable_table_type (MAX (src1_elems, src2_elems));
4280 
4281   for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4282     attrs_list_mpdv_union (&dst->regs[i], src1->regs[i], src2->regs[i]);
4283 
4284   dsm.dst = dst;
4285   dsm.src = src2;
4286   dsm.cur = src1;
4287   dsm.src_onepart_cnt = 0;
4288 
4289   FOR_EACH_HASH_TABLE_ELEMENT (*shared_hash_htab (dsm.src->vars),
4290 			       var, variable, hi)
4291     variable_merge_over_src (var, &dsm);
4292   FOR_EACH_HASH_TABLE_ELEMENT (*shared_hash_htab (dsm.cur->vars),
4293 			       var, variable, hi)
4294     variable_merge_over_cur (var, &dsm);
4295 
4296   if (dsm.src_onepart_cnt)
4297     dst_can_be_shared = false;
4298 
4299   dataflow_set_destroy (src1);
4300 }
4301 
4302 /* Mark register equivalences.  */
4303 
4304 static void
dataflow_set_equiv_regs(dataflow_set * set)4305 dataflow_set_equiv_regs (dataflow_set *set)
4306 {
4307   int i;
4308   attrs *list, **listp;
4309 
4310   for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4311     {
4312       rtx canon[NUM_MACHINE_MODES];
4313 
4314       /* If the list is empty or one entry, no need to canonicalize
4315 	 anything.  */
4316       if (set->regs[i] == NULL || set->regs[i]->next == NULL)
4317 	continue;
4318 
4319       memset (canon, 0, sizeof (canon));
4320 
4321       for (list = set->regs[i]; list; list = list->next)
4322 	if (list->offset == 0 && dv_is_value_p (list->dv))
4323 	  {
4324 	    rtx val = dv_as_value (list->dv);
4325 	    rtx *cvalp = &canon[(int)GET_MODE (val)];
4326 	    rtx cval = *cvalp;
4327 
4328 	    if (canon_value_cmp (val, cval))
4329 	      *cvalp = val;
4330 	  }
4331 
4332       for (list = set->regs[i]; list; list = list->next)
4333 	if (list->offset == 0 && dv_onepart_p (list->dv))
4334 	  {
4335 	    rtx cval = canon[(int)GET_MODE (list->loc)];
4336 
4337 	    if (!cval)
4338 	      continue;
4339 
4340 	    if (dv_is_value_p (list->dv))
4341 	      {
4342 		rtx val = dv_as_value (list->dv);
4343 
4344 		if (val == cval)
4345 		  continue;
4346 
4347 		VALUE_RECURSED_INTO (val) = true;
4348 		set_variable_part (set, val, dv_from_value (cval), 0,
4349 				   VAR_INIT_STATUS_INITIALIZED,
4350 				   NULL, NO_INSERT);
4351 	      }
4352 
4353 	    VALUE_RECURSED_INTO (cval) = true;
4354 	    set_variable_part (set, cval, list->dv, 0,
4355 			       VAR_INIT_STATUS_INITIALIZED, NULL, NO_INSERT);
4356 	  }
4357 
4358       for (listp = &set->regs[i]; (list = *listp);
4359 	   listp = list ? &list->next : listp)
4360 	if (list->offset == 0 && dv_onepart_p (list->dv))
4361 	  {
4362 	    rtx cval = canon[(int)GET_MODE (list->loc)];
4363 	    variable **slot;
4364 
4365 	    if (!cval)
4366 	      continue;
4367 
4368 	    if (dv_is_value_p (list->dv))
4369 	      {
4370 		rtx val = dv_as_value (list->dv);
4371 		if (!VALUE_RECURSED_INTO (val))
4372 		  continue;
4373 	      }
4374 
4375 	    slot = shared_hash_find_slot_noinsert (set->vars, list->dv);
4376 	    canonicalize_values_star (slot, set);
4377 	    if (*listp != list)
4378 	      list = NULL;
4379 	  }
4380     }
4381 }
4382 
4383 /* Remove any redundant values in the location list of VAR, which must
4384    be unshared and 1-part.  */
4385 
4386 static void
remove_duplicate_values(variable * var)4387 remove_duplicate_values (variable *var)
4388 {
4389   location_chain *node, **nodep;
4390 
4391   gcc_assert (var->onepart);
4392   gcc_assert (var->n_var_parts == 1);
4393   gcc_assert (var->refcount == 1);
4394 
4395   for (nodep = &var->var_part[0].loc_chain; (node = *nodep); )
4396     {
4397       if (GET_CODE (node->loc) == VALUE)
4398 	{
4399 	  if (VALUE_RECURSED_INTO (node->loc))
4400 	    {
4401 	      /* Remove duplicate value node.  */
4402 	      *nodep = node->next;
4403 	      delete node;
4404 	      continue;
4405 	    }
4406 	  else
4407 	    VALUE_RECURSED_INTO (node->loc) = true;
4408 	}
4409       nodep = &node->next;
4410     }
4411 
4412   for (node = var->var_part[0].loc_chain; node; node = node->next)
4413     if (GET_CODE (node->loc) == VALUE)
4414       {
4415 	gcc_assert (VALUE_RECURSED_INTO (node->loc));
4416 	VALUE_RECURSED_INTO (node->loc) = false;
4417       }
4418 }
4419 
4420 
4421 /* Hash table iteration argument passed to variable_post_merge.  */
4422 struct dfset_post_merge
4423 {
4424   /* The new input set for the current block.  */
4425   dataflow_set *set;
4426   /* Pointer to the permanent input set for the current block, or
4427      NULL.  */
4428   dataflow_set **permp;
4429 };
4430 
4431 /* Create values for incoming expressions associated with one-part
4432    variables that don't have value numbers for them.  */
4433 
4434 int
variable_post_merge_new_vals(variable ** slot,dfset_post_merge * dfpm)4435 variable_post_merge_new_vals (variable **slot, dfset_post_merge *dfpm)
4436 {
4437   dataflow_set *set = dfpm->set;
4438   variable *var = *slot;
4439   location_chain *node;
4440 
4441   if (!var->onepart || !var->n_var_parts)
4442     return 1;
4443 
4444   gcc_assert (var->n_var_parts == 1);
4445 
4446   if (dv_is_decl_p (var->dv))
4447     {
4448       bool check_dupes = false;
4449 
4450     restart:
4451       for (node = var->var_part[0].loc_chain; node; node = node->next)
4452 	{
4453 	  if (GET_CODE (node->loc) == VALUE)
4454 	    gcc_assert (!VALUE_RECURSED_INTO (node->loc));
4455 	  else if (GET_CODE (node->loc) == REG)
4456 	    {
4457 	      attrs *att, **attp, **curp = NULL;
4458 
4459 	      if (var->refcount != 1)
4460 		{
4461 		  slot = unshare_variable (set, slot, var,
4462 					   VAR_INIT_STATUS_INITIALIZED);
4463 		  var = *slot;
4464 		  goto restart;
4465 		}
4466 
4467 	      for (attp = &set->regs[REGNO (node->loc)]; (att = *attp);
4468 		   attp = &att->next)
4469 		if (att->offset == 0
4470 		    && GET_MODE (att->loc) == GET_MODE (node->loc))
4471 		  {
4472 		    if (dv_is_value_p (att->dv))
4473 		      {
4474 			rtx cval = dv_as_value (att->dv);
4475 			node->loc = cval;
4476 			check_dupes = true;
4477 			break;
4478 		      }
4479 		    else if (dv_as_opaque (att->dv) == dv_as_opaque (var->dv))
4480 		      curp = attp;
4481 		  }
4482 
4483 	      if (!curp)
4484 		{
4485 		  curp = attp;
4486 		  while (*curp)
4487 		    if ((*curp)->offset == 0
4488 			&& GET_MODE ((*curp)->loc) == GET_MODE (node->loc)
4489 			&& dv_as_opaque ((*curp)->dv) == dv_as_opaque (var->dv))
4490 		      break;
4491 		    else
4492 		      curp = &(*curp)->next;
4493 		  gcc_assert (*curp);
4494 		}
4495 
4496 	      if (!att)
4497 		{
4498 		  decl_or_value cdv;
4499 		  rtx cval;
4500 
4501 		  if (!*dfpm->permp)
4502 		    {
4503 		      *dfpm->permp = XNEW (dataflow_set);
4504 		      dataflow_set_init (*dfpm->permp);
4505 		    }
4506 
4507 		  for (att = (*dfpm->permp)->regs[REGNO (node->loc)];
4508 		       att; att = att->next)
4509 		    if (GET_MODE (att->loc) == GET_MODE (node->loc))
4510 		      {
4511 			gcc_assert (att->offset == 0
4512 				    && dv_is_value_p (att->dv));
4513 			val_reset (set, att->dv);
4514 			break;
4515 		      }
4516 
4517 		  if (att)
4518 		    {
4519 		      cdv = att->dv;
4520 		      cval = dv_as_value (cdv);
4521 		    }
4522 		  else
4523 		    {
4524 		      /* Create a unique value to hold this register,
4525 			 that ought to be found and reused in
4526 			 subsequent rounds.  */
4527 		      cselib_val *v;
4528 		      gcc_assert (!cselib_lookup (node->loc,
4529 						  GET_MODE (node->loc), 0,
4530 						  VOIDmode));
4531 		      v = cselib_lookup (node->loc, GET_MODE (node->loc), 1,
4532 					 VOIDmode);
4533 		      cselib_preserve_value (v);
4534 		      cselib_invalidate_rtx (node->loc);
4535 		      cval = v->val_rtx;
4536 		      cdv = dv_from_value (cval);
4537 		      if (dump_file)
4538 			fprintf (dump_file,
4539 				 "Created new value %u:%u for reg %i\n",
4540 				 v->uid, v->hash, REGNO (node->loc));
4541 		    }
4542 
4543 		  var_reg_decl_set (*dfpm->permp, node->loc,
4544 				    VAR_INIT_STATUS_INITIALIZED,
4545 				    cdv, 0, NULL, INSERT);
4546 
4547 		  node->loc = cval;
4548 		  check_dupes = true;
4549 		}
4550 
4551 	      /* Remove attribute referring to the decl, which now
4552 		 uses the value for the register, already existing or
4553 		 to be added when we bring perm in.  */
4554 	      att = *curp;
4555 	      *curp = att->next;
4556 	      delete att;
4557 	    }
4558 	}
4559 
4560       if (check_dupes)
4561 	remove_duplicate_values (var);
4562     }
4563 
4564   return 1;
4565 }
4566 
4567 /* Reset values in the permanent set that are not associated with the
4568    chosen expression.  */
4569 
4570 int
variable_post_merge_perm_vals(variable ** pslot,dfset_post_merge * dfpm)4571 variable_post_merge_perm_vals (variable **pslot, dfset_post_merge *dfpm)
4572 {
4573   dataflow_set *set = dfpm->set;
4574   variable *pvar = *pslot, *var;
4575   location_chain *pnode;
4576   decl_or_value dv;
4577   attrs *att;
4578 
4579   gcc_assert (dv_is_value_p (pvar->dv)
4580 	      && pvar->n_var_parts == 1);
4581   pnode = pvar->var_part[0].loc_chain;
4582   gcc_assert (pnode
4583 	      && !pnode->next
4584 	      && REG_P (pnode->loc));
4585 
4586   dv = pvar->dv;
4587 
4588   var = shared_hash_find (set->vars, dv);
4589   if (var)
4590     {
4591       /* Although variable_post_merge_new_vals may have made decls
4592 	 non-star-canonical, values that pre-existed in canonical form
4593 	 remain canonical, and newly-created values reference a single
4594 	 REG, so they are canonical as well.  Since VAR has the
4595 	 location list for a VALUE, using find_loc_in_1pdv for it is
4596 	 fine, since VALUEs don't map back to DECLs.  */
4597       if (find_loc_in_1pdv (pnode->loc, var, shared_hash_htab (set->vars)))
4598 	return 1;
4599       val_reset (set, dv);
4600     }
4601 
4602   for (att = set->regs[REGNO (pnode->loc)]; att; att = att->next)
4603     if (att->offset == 0
4604 	&& GET_MODE (att->loc) == GET_MODE (pnode->loc)
4605 	&& dv_is_value_p (att->dv))
4606       break;
4607 
4608   /* If there is a value associated with this register already, create
4609      an equivalence.  */
4610   if (att && dv_as_value (att->dv) != dv_as_value (dv))
4611     {
4612       rtx cval = dv_as_value (att->dv);
4613       set_variable_part (set, cval, dv, 0, pnode->init, NULL, INSERT);
4614       set_variable_part (set, dv_as_value (dv), att->dv, 0, pnode->init,
4615 			 NULL, INSERT);
4616     }
4617   else if (!att)
4618     {
4619       attrs_list_insert (&set->regs[REGNO (pnode->loc)],
4620 			 dv, 0, pnode->loc);
4621       variable_union (pvar, set);
4622     }
4623 
4624   return 1;
4625 }
4626 
4627 /* Just checking stuff and registering register attributes for
4628    now.  */
4629 
4630 static void
dataflow_post_merge_adjust(dataflow_set * set,dataflow_set ** permp)4631 dataflow_post_merge_adjust (dataflow_set *set, dataflow_set **permp)
4632 {
4633   struct dfset_post_merge dfpm;
4634 
4635   dfpm.set = set;
4636   dfpm.permp = permp;
4637 
4638   shared_hash_htab (set->vars)
4639     ->traverse <dfset_post_merge*, variable_post_merge_new_vals> (&dfpm);
4640   if (*permp)
4641     shared_hash_htab ((*permp)->vars)
4642       ->traverse <dfset_post_merge*, variable_post_merge_perm_vals> (&dfpm);
4643   shared_hash_htab (set->vars)
4644     ->traverse <dataflow_set *, canonicalize_values_star> (set);
4645   shared_hash_htab (set->vars)
4646     ->traverse <dataflow_set *, canonicalize_vars_star> (set);
4647 }
4648 
4649 /* Return a node whose loc is a MEM that refers to EXPR in the
4650    location list of a one-part variable or value VAR, or in that of
4651    any values recursively mentioned in the location lists.  */
4652 
4653 static location_chain *
find_mem_expr_in_1pdv(tree expr,rtx val,variable_table_type * vars)4654 find_mem_expr_in_1pdv (tree expr, rtx val, variable_table_type *vars)
4655 {
4656   location_chain *node;
4657   decl_or_value dv;
4658   variable *var;
4659   location_chain *where = NULL;
4660 
4661   if (!val)
4662     return NULL;
4663 
4664   gcc_assert (GET_CODE (val) == VALUE
4665 	      && !VALUE_RECURSED_INTO (val));
4666 
4667   dv = dv_from_value (val);
4668   var = vars->find_with_hash (dv, dv_htab_hash (dv));
4669 
4670   if (!var)
4671     return NULL;
4672 
4673   gcc_assert (var->onepart);
4674 
4675   if (!var->n_var_parts)
4676     return NULL;
4677 
4678   VALUE_RECURSED_INTO (val) = true;
4679 
4680   for (node = var->var_part[0].loc_chain; node; node = node->next)
4681     if (MEM_P (node->loc)
4682 	&& MEM_EXPR (node->loc) == expr
4683 	&& int_mem_offset (node->loc) == 0)
4684       {
4685 	where = node;
4686 	break;
4687       }
4688     else if (GET_CODE (node->loc) == VALUE
4689 	     && !VALUE_RECURSED_INTO (node->loc)
4690 	     && (where = find_mem_expr_in_1pdv (expr, node->loc, vars)))
4691       break;
4692 
4693   VALUE_RECURSED_INTO (val) = false;
4694 
4695   return where;
4696 }
4697 
4698 /* Return TRUE if the value of MEM may vary across a call.  */
4699 
4700 static bool
mem_dies_at_call(rtx mem)4701 mem_dies_at_call (rtx mem)
4702 {
4703   tree expr = MEM_EXPR (mem);
4704   tree decl;
4705 
4706   if (!expr)
4707     return true;
4708 
4709   decl = get_base_address (expr);
4710 
4711   if (!decl)
4712     return true;
4713 
4714   if (!DECL_P (decl))
4715     return true;
4716 
4717   return (may_be_aliased (decl)
4718 	  || (!TREE_READONLY (decl) && is_global_var (decl)));
4719 }
4720 
4721 /* Remove all MEMs from the location list of a hash table entry for a
4722    one-part variable, except those whose MEM attributes map back to
4723    the variable itself, directly or within a VALUE.  */
4724 
4725 int
dataflow_set_preserve_mem_locs(variable ** slot,dataflow_set * set)4726 dataflow_set_preserve_mem_locs (variable **slot, dataflow_set *set)
4727 {
4728   variable *var = *slot;
4729 
4730   if (var->onepart == ONEPART_VDECL || var->onepart == ONEPART_DEXPR)
4731     {
4732       tree decl = dv_as_decl (var->dv);
4733       location_chain *loc, **locp;
4734       bool changed = false;
4735 
4736       if (!var->n_var_parts)
4737 	return 1;
4738 
4739       gcc_assert (var->n_var_parts == 1);
4740 
4741       if (shared_var_p (var, set->vars))
4742 	{
4743 	  for (loc = var->var_part[0].loc_chain; loc; loc = loc->next)
4744 	    {
4745 	      /* We want to remove dying MEMs that don't refer to DECL.  */
4746 	      if (GET_CODE (loc->loc) == MEM
4747 		  && (MEM_EXPR (loc->loc) != decl
4748 		      || int_mem_offset (loc->loc) != 0)
4749 		  && mem_dies_at_call (loc->loc))
4750 		break;
4751 	      /* We want to move here MEMs that do refer to DECL.  */
4752 	      else if (GET_CODE (loc->loc) == VALUE
4753 		       && find_mem_expr_in_1pdv (decl, loc->loc,
4754 						 shared_hash_htab (set->vars)))
4755 		break;
4756 	    }
4757 
4758 	  if (!loc)
4759 	    return 1;
4760 
4761 	  slot = unshare_variable (set, slot, var, VAR_INIT_STATUS_UNKNOWN);
4762 	  var = *slot;
4763 	  gcc_assert (var->n_var_parts == 1);
4764 	}
4765 
4766       for (locp = &var->var_part[0].loc_chain, loc = *locp;
4767 	   loc; loc = *locp)
4768 	{
4769 	  rtx old_loc = loc->loc;
4770 	  if (GET_CODE (old_loc) == VALUE)
4771 	    {
4772 	      location_chain *mem_node
4773 		= find_mem_expr_in_1pdv (decl, loc->loc,
4774 					 shared_hash_htab (set->vars));
4775 
4776 	      /* ??? This picks up only one out of multiple MEMs that
4777 		 refer to the same variable.  Do we ever need to be
4778 		 concerned about dealing with more than one, or, given
4779 		 that they should all map to the same variable
4780 		 location, their addresses will have been merged and
4781 		 they will be regarded as equivalent?  */
4782 	      if (mem_node)
4783 		{
4784 		  loc->loc = mem_node->loc;
4785 		  loc->set_src = mem_node->set_src;
4786 		  loc->init = MIN (loc->init, mem_node->init);
4787 		}
4788 	    }
4789 
4790 	  if (GET_CODE (loc->loc) != MEM
4791 	      || (MEM_EXPR (loc->loc) == decl
4792 		  && int_mem_offset (loc->loc) == 0)
4793 	      || !mem_dies_at_call (loc->loc))
4794 	    {
4795 	      if (old_loc != loc->loc && emit_notes)
4796 		{
4797 		  if (old_loc == var->var_part[0].cur_loc)
4798 		    {
4799 		      changed = true;
4800 		      var->var_part[0].cur_loc = NULL;
4801 		    }
4802 		}
4803 	      locp = &loc->next;
4804 	      continue;
4805 	    }
4806 
4807 	  if (emit_notes)
4808 	    {
4809 	      if (old_loc == var->var_part[0].cur_loc)
4810 		{
4811 		  changed = true;
4812 		  var->var_part[0].cur_loc = NULL;
4813 		}
4814 	    }
4815 	  *locp = loc->next;
4816 	  delete loc;
4817 	}
4818 
4819       if (!var->var_part[0].loc_chain)
4820 	{
4821 	  var->n_var_parts--;
4822 	  changed = true;
4823 	}
4824       if (changed)
4825 	variable_was_changed (var, set);
4826     }
4827 
4828   return 1;
4829 }
4830 
4831 /* Remove all MEMs from the location list of a hash table entry for a
4832    onepart variable.  */
4833 
4834 int
dataflow_set_remove_mem_locs(variable ** slot,dataflow_set * set)4835 dataflow_set_remove_mem_locs (variable **slot, dataflow_set *set)
4836 {
4837   variable *var = *slot;
4838 
4839   if (var->onepart != NOT_ONEPART)
4840     {
4841       location_chain *loc, **locp;
4842       bool changed = false;
4843       rtx cur_loc;
4844 
4845       gcc_assert (var->n_var_parts == 1);
4846 
4847       if (shared_var_p (var, set->vars))
4848 	{
4849 	  for (loc = var->var_part[0].loc_chain; loc; loc = loc->next)
4850 	    if (GET_CODE (loc->loc) == MEM
4851 		&& mem_dies_at_call (loc->loc))
4852 	      break;
4853 
4854 	  if (!loc)
4855 	    return 1;
4856 
4857 	  slot = unshare_variable (set, slot, var, VAR_INIT_STATUS_UNKNOWN);
4858 	  var = *slot;
4859 	  gcc_assert (var->n_var_parts == 1);
4860 	}
4861 
4862       if (VAR_LOC_1PAUX (var))
4863 	cur_loc = VAR_LOC_FROM (var);
4864       else
4865 	cur_loc = var->var_part[0].cur_loc;
4866 
4867       for (locp = &var->var_part[0].loc_chain, loc = *locp;
4868 	   loc; loc = *locp)
4869 	{
4870 	  if (GET_CODE (loc->loc) != MEM
4871 	      || !mem_dies_at_call (loc->loc))
4872 	    {
4873 	      locp = &loc->next;
4874 	      continue;
4875 	    }
4876 
4877 	  *locp = loc->next;
4878 	  /* If we have deleted the location which was last emitted
4879 	     we have to emit new location so add the variable to set
4880 	     of changed variables.  */
4881 	  if (cur_loc == loc->loc)
4882 	    {
4883 	      changed = true;
4884 	      var->var_part[0].cur_loc = NULL;
4885 	      if (VAR_LOC_1PAUX (var))
4886 		VAR_LOC_FROM (var) = NULL;
4887 	    }
4888 	  delete loc;
4889 	}
4890 
4891       if (!var->var_part[0].loc_chain)
4892 	{
4893 	  var->n_var_parts--;
4894 	  changed = true;
4895 	}
4896       if (changed)
4897 	variable_was_changed (var, set);
4898     }
4899 
4900   return 1;
4901 }
4902 
4903 /* Remove all variable-location information about call-clobbered
4904    registers, as well as associations between MEMs and VALUEs.  */
4905 
4906 static void
dataflow_set_clear_at_call(dataflow_set * set,rtx_insn * call_insn)4907 dataflow_set_clear_at_call (dataflow_set *set, rtx_insn *call_insn)
4908 {
4909   unsigned int r;
4910   hard_reg_set_iterator hrsi;
4911 
4912   HARD_REG_SET callee_clobbers
4913     = insn_callee_abi (call_insn).full_reg_clobbers ();
4914 
4915   EXECUTE_IF_SET_IN_HARD_REG_SET (callee_clobbers, 0, r, hrsi)
4916     var_regno_delete (set, r);
4917 
4918   if (MAY_HAVE_DEBUG_BIND_INSNS)
4919     {
4920       set->traversed_vars = set->vars;
4921       shared_hash_htab (set->vars)
4922 	->traverse <dataflow_set *, dataflow_set_preserve_mem_locs> (set);
4923       set->traversed_vars = set->vars;
4924       shared_hash_htab (set->vars)
4925 	->traverse <dataflow_set *, dataflow_set_remove_mem_locs> (set);
4926       set->traversed_vars = NULL;
4927     }
4928 }
4929 
4930 static bool
variable_part_different_p(variable_part * vp1,variable_part * vp2)4931 variable_part_different_p (variable_part *vp1, variable_part *vp2)
4932 {
4933   location_chain *lc1, *lc2;
4934 
4935   for (lc1 = vp1->loc_chain; lc1; lc1 = lc1->next)
4936     {
4937       for (lc2 = vp2->loc_chain; lc2; lc2 = lc2->next)
4938 	{
4939 	  if (REG_P (lc1->loc) && REG_P (lc2->loc))
4940 	    {
4941 	      if (REGNO (lc1->loc) == REGNO (lc2->loc))
4942 		break;
4943 	    }
4944 	  if (rtx_equal_p (lc1->loc, lc2->loc))
4945 	    break;
4946 	}
4947       if (!lc2)
4948 	return true;
4949     }
4950   return false;
4951 }
4952 
4953 /* Return true if one-part variables VAR1 and VAR2 are different.
4954    They must be in canonical order.  */
4955 
4956 static bool
onepart_variable_different_p(variable * var1,variable * var2)4957 onepart_variable_different_p (variable *var1, variable *var2)
4958 {
4959   location_chain *lc1, *lc2;
4960 
4961   if (var1 == var2)
4962     return false;
4963 
4964   gcc_assert (var1->n_var_parts == 1
4965 	      && var2->n_var_parts == 1);
4966 
4967   lc1 = var1->var_part[0].loc_chain;
4968   lc2 = var2->var_part[0].loc_chain;
4969 
4970   gcc_assert (lc1 && lc2);
4971 
4972   while (lc1 && lc2)
4973     {
4974       if (loc_cmp (lc1->loc, lc2->loc))
4975 	return true;
4976       lc1 = lc1->next;
4977       lc2 = lc2->next;
4978     }
4979 
4980   return lc1 != lc2;
4981 }
4982 
4983 /* Return true if one-part variables VAR1 and VAR2 are different.
4984    They must be in canonical order.  */
4985 
4986 static void
dump_onepart_variable_differences(variable * var1,variable * var2)4987 dump_onepart_variable_differences (variable *var1, variable *var2)
4988 {
4989   location_chain *lc1, *lc2;
4990 
4991   gcc_assert (var1 != var2);
4992   gcc_assert (dump_file);
4993   gcc_assert (dv_as_opaque (var1->dv) == dv_as_opaque (var2->dv));
4994   gcc_assert (var1->n_var_parts == 1
4995 	      && var2->n_var_parts == 1);
4996 
4997   lc1 = var1->var_part[0].loc_chain;
4998   lc2 = var2->var_part[0].loc_chain;
4999 
5000   gcc_assert (lc1 && lc2);
5001 
5002   while (lc1 && lc2)
5003     {
5004       switch (loc_cmp (lc1->loc, lc2->loc))
5005 	{
5006 	case -1:
5007 	  fprintf (dump_file, "removed: ");
5008 	  print_rtl_single (dump_file, lc1->loc);
5009 	  lc1 = lc1->next;
5010 	  continue;
5011 	case 0:
5012 	  break;
5013 	case 1:
5014 	  fprintf (dump_file, "added: ");
5015 	  print_rtl_single (dump_file, lc2->loc);
5016 	  lc2 = lc2->next;
5017 	  continue;
5018 	default:
5019 	  gcc_unreachable ();
5020 	}
5021       lc1 = lc1->next;
5022       lc2 = lc2->next;
5023     }
5024 
5025   while (lc1)
5026     {
5027       fprintf (dump_file, "removed: ");
5028       print_rtl_single (dump_file, lc1->loc);
5029       lc1 = lc1->next;
5030     }
5031 
5032   while (lc2)
5033     {
5034       fprintf (dump_file, "added: ");
5035       print_rtl_single (dump_file, lc2->loc);
5036       lc2 = lc2->next;
5037     }
5038 }
5039 
5040 /* Return true if variables VAR1 and VAR2 are different.  */
5041 
5042 static bool
variable_different_p(variable * var1,variable * var2)5043 variable_different_p (variable *var1, variable *var2)
5044 {
5045   int i;
5046 
5047   if (var1 == var2)
5048     return false;
5049 
5050   if (var1->onepart != var2->onepart)
5051     return true;
5052 
5053   if (var1->n_var_parts != var2->n_var_parts)
5054     return true;
5055 
5056   if (var1->onepart && var1->n_var_parts)
5057     {
5058       gcc_checking_assert (dv_as_opaque (var1->dv) == dv_as_opaque (var2->dv)
5059 			   && var1->n_var_parts == 1);
5060       /* One-part values have locations in a canonical order.  */
5061       return onepart_variable_different_p (var1, var2);
5062     }
5063 
5064   for (i = 0; i < var1->n_var_parts; i++)
5065     {
5066       if (VAR_PART_OFFSET (var1, i) != VAR_PART_OFFSET (var2, i))
5067 	return true;
5068       if (variable_part_different_p (&var1->var_part[i], &var2->var_part[i]))
5069 	return true;
5070       if (variable_part_different_p (&var2->var_part[i], &var1->var_part[i]))
5071 	return true;
5072     }
5073   return false;
5074 }
5075 
5076 /* Return true if dataflow sets OLD_SET and NEW_SET differ.  */
5077 
5078 static bool
dataflow_set_different(dataflow_set * old_set,dataflow_set * new_set)5079 dataflow_set_different (dataflow_set *old_set, dataflow_set *new_set)
5080 {
5081   variable_iterator_type hi;
5082   variable *var1;
5083   bool diffound = false;
5084   bool details = (dump_file && (dump_flags & TDF_DETAILS));
5085 
5086 #define RETRUE					\
5087   do						\
5088     {						\
5089       if (!details)				\
5090 	return true;				\
5091       else					\
5092 	diffound = true;			\
5093     }						\
5094   while (0)
5095 
5096   if (old_set->vars == new_set->vars)
5097     return false;
5098 
5099   if (shared_hash_htab (old_set->vars)->elements ()
5100       != shared_hash_htab (new_set->vars)->elements ())
5101     RETRUE;
5102 
5103   FOR_EACH_HASH_TABLE_ELEMENT (*shared_hash_htab (old_set->vars),
5104 			       var1, variable, hi)
5105     {
5106       variable_table_type *htab = shared_hash_htab (new_set->vars);
5107       variable *var2 = htab->find_with_hash (var1->dv, dv_htab_hash (var1->dv));
5108 
5109       if (!var2)
5110 	{
5111 	  if (dump_file && (dump_flags & TDF_DETAILS))
5112 	    {
5113 	      fprintf (dump_file, "dataflow difference found: removal of:\n");
5114 	      dump_var (var1);
5115 	    }
5116 	  RETRUE;
5117 	}
5118       else if (variable_different_p (var1, var2))
5119 	{
5120 	  if (details)
5121 	    {
5122 	      fprintf (dump_file, "dataflow difference found: "
5123 		       "old and new follow:\n");
5124 	      dump_var (var1);
5125 	      if (dv_onepart_p (var1->dv))
5126 		dump_onepart_variable_differences (var1, var2);
5127 	      dump_var (var2);
5128 	    }
5129 	  RETRUE;
5130 	}
5131     }
5132 
5133   /* There's no need to traverse the second hashtab unless we want to
5134      print the details.  If both have the same number of elements and
5135      the second one had all entries found in the first one, then the
5136      second can't have any extra entries.  */
5137   if (!details)
5138     return diffound;
5139 
5140   FOR_EACH_HASH_TABLE_ELEMENT (*shared_hash_htab (new_set->vars),
5141 			       var1, variable, hi)
5142     {
5143       variable_table_type *htab = shared_hash_htab (old_set->vars);
5144       variable *var2 = htab->find_with_hash (var1->dv, dv_htab_hash (var1->dv));
5145       if (!var2)
5146 	{
5147 	  if (details)
5148 	    {
5149 	      fprintf (dump_file, "dataflow difference found: addition of:\n");
5150 	      dump_var (var1);
5151 	    }
5152 	  RETRUE;
5153 	}
5154     }
5155 
5156 #undef RETRUE
5157 
5158   return diffound;
5159 }
5160 
5161 /* Free the contents of dataflow set SET.  */
5162 
5163 static void
dataflow_set_destroy(dataflow_set * set)5164 dataflow_set_destroy (dataflow_set *set)
5165 {
5166   int i;
5167 
5168   for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
5169     attrs_list_clear (&set->regs[i]);
5170 
5171   shared_hash_destroy (set->vars);
5172   set->vars = NULL;
5173 }
5174 
5175 /* Return true if T is a tracked parameter with non-degenerate record type.  */
5176 
5177 static bool
tracked_record_parameter_p(tree t)5178 tracked_record_parameter_p (tree t)
5179 {
5180   if (TREE_CODE (t) != PARM_DECL)
5181     return false;
5182 
5183   if (DECL_MODE (t) == BLKmode)
5184     return false;
5185 
5186   tree type = TREE_TYPE (t);
5187   if (TREE_CODE (type) != RECORD_TYPE)
5188     return false;
5189 
5190   if (TYPE_FIELDS (type) == NULL_TREE
5191       || DECL_CHAIN (TYPE_FIELDS (type)) == NULL_TREE)
5192     return false;
5193 
5194   return true;
5195 }
5196 
5197 /* Shall EXPR be tracked?  */
5198 
5199 static bool
track_expr_p(tree expr,bool need_rtl)5200 track_expr_p (tree expr, bool need_rtl)
5201 {
5202   rtx decl_rtl;
5203   tree realdecl;
5204 
5205   if (TREE_CODE (expr) == DEBUG_EXPR_DECL)
5206     return DECL_RTL_SET_P (expr);
5207 
5208   /* If EXPR is not a parameter or a variable do not track it.  */
5209   if (!VAR_P (expr) && TREE_CODE (expr) != PARM_DECL)
5210     return 0;
5211 
5212   /* It also must have a name...  */
5213   if (!DECL_NAME (expr) && need_rtl)
5214     return 0;
5215 
5216   /* ... and a RTL assigned to it.  */
5217   decl_rtl = DECL_RTL_IF_SET (expr);
5218   if (!decl_rtl && need_rtl)
5219     return 0;
5220 
5221   /* If this expression is really a debug alias of some other declaration, we
5222      don't need to track this expression if the ultimate declaration is
5223      ignored.  */
5224   realdecl = expr;
5225   if (VAR_P (realdecl) && DECL_HAS_DEBUG_EXPR_P (realdecl))
5226     {
5227       realdecl = DECL_DEBUG_EXPR (realdecl);
5228       if (!DECL_P (realdecl))
5229 	{
5230 	  if (handled_component_p (realdecl)
5231 	      || (TREE_CODE (realdecl) == MEM_REF
5232 		  && TREE_CODE (TREE_OPERAND (realdecl, 0)) == ADDR_EXPR))
5233 	    {
5234 	      HOST_WIDE_INT bitsize, bitpos;
5235 	      bool reverse;
5236 	      tree innerdecl
5237 		= get_ref_base_and_extent_hwi (realdecl, &bitpos,
5238 					       &bitsize, &reverse);
5239 	      if (!innerdecl
5240 		  || !DECL_P (innerdecl)
5241 		  || DECL_IGNORED_P (innerdecl)
5242 		  /* Do not track declarations for parts of tracked record
5243 		     parameters since we want to track them as a whole.  */
5244 		  || tracked_record_parameter_p (innerdecl)
5245 		  || TREE_STATIC (innerdecl)
5246 		  || bitsize == 0
5247 		  || bitpos + bitsize > 256)
5248 		return 0;
5249 	      else
5250 		realdecl = expr;
5251 	    }
5252 	  else
5253 	    return 0;
5254 	}
5255     }
5256 
5257   /* Do not track EXPR if REALDECL it should be ignored for debugging
5258      purposes.  */
5259   if (DECL_IGNORED_P (realdecl))
5260     return 0;
5261 
5262   /* Do not track global variables until we are able to emit correct location
5263      list for them.  */
5264   if (TREE_STATIC (realdecl))
5265     return 0;
5266 
5267   /* When the EXPR is a DECL for alias of some variable (see example)
5268      the TREE_STATIC flag is not used.  Disable tracking all DECLs whose
5269      DECL_RTL contains SYMBOL_REF.
5270 
5271      Example:
5272      extern char **_dl_argv_internal __attribute__ ((alias ("_dl_argv")));
5273      char **_dl_argv;
5274   */
5275   if (decl_rtl && MEM_P (decl_rtl)
5276       && contains_symbol_ref_p (XEXP (decl_rtl, 0)))
5277     return 0;
5278 
5279   /* If RTX is a memory it should not be very large (because it would be
5280      an array or struct).  */
5281   if (decl_rtl && MEM_P (decl_rtl))
5282     {
5283       /* Do not track structures and arrays.  */
5284       if ((GET_MODE (decl_rtl) == BLKmode
5285 	   || AGGREGATE_TYPE_P (TREE_TYPE (realdecl)))
5286 	  && !tracked_record_parameter_p (realdecl))
5287 	return 0;
5288       if (MEM_SIZE_KNOWN_P (decl_rtl)
5289 	  && maybe_gt (MEM_SIZE (decl_rtl), MAX_VAR_PARTS))
5290 	return 0;
5291     }
5292 
5293   DECL_CHANGED (expr) = 0;
5294   DECL_CHANGED (realdecl) = 0;
5295   return 1;
5296 }
5297 
5298 /* Determine whether a given LOC refers to the same variable part as
5299    EXPR+OFFSET.  */
5300 
5301 static bool
same_variable_part_p(rtx loc,tree expr,poly_int64 offset)5302 same_variable_part_p (rtx loc, tree expr, poly_int64 offset)
5303 {
5304   tree expr2;
5305   poly_int64 offset2;
5306 
5307   if (! DECL_P (expr))
5308     return false;
5309 
5310   if (REG_P (loc))
5311     {
5312       expr2 = REG_EXPR (loc);
5313       offset2 = REG_OFFSET (loc);
5314     }
5315   else if (MEM_P (loc))
5316     {
5317       expr2 = MEM_EXPR (loc);
5318       offset2 = int_mem_offset (loc);
5319     }
5320   else
5321     return false;
5322 
5323   if (! expr2 || ! DECL_P (expr2))
5324     return false;
5325 
5326   expr = var_debug_decl (expr);
5327   expr2 = var_debug_decl (expr2);
5328 
5329   return (expr == expr2 && known_eq (offset, offset2));
5330 }
5331 
5332 /* LOC is a REG or MEM that we would like to track if possible.
5333    If EXPR is null, we don't know what expression LOC refers to,
5334    otherwise it refers to EXPR + OFFSET.  STORE_REG_P is true if
5335    LOC is an lvalue register.
5336 
5337    Return true if EXPR is nonnull and if LOC, or some lowpart of it,
5338    is something we can track.  When returning true, store the mode of
5339    the lowpart we can track in *MODE_OUT (if nonnull) and its offset
5340    from EXPR in *OFFSET_OUT (if nonnull).  */
5341 
5342 static bool
track_loc_p(rtx loc,tree expr,poly_int64 offset,bool store_reg_p,machine_mode * mode_out,HOST_WIDE_INT * offset_out)5343 track_loc_p (rtx loc, tree expr, poly_int64 offset, bool store_reg_p,
5344 	     machine_mode *mode_out, HOST_WIDE_INT *offset_out)
5345 {
5346   machine_mode mode;
5347 
5348   if (expr == NULL || !track_expr_p (expr, true))
5349     return false;
5350 
5351   /* If REG was a paradoxical subreg, its REG_ATTRS will describe the
5352      whole subreg, but only the old inner part is really relevant.  */
5353   mode = GET_MODE (loc);
5354   if (REG_P (loc) && !HARD_REGISTER_NUM_P (ORIGINAL_REGNO (loc)))
5355     {
5356       machine_mode pseudo_mode;
5357 
5358       pseudo_mode = PSEUDO_REGNO_MODE (ORIGINAL_REGNO (loc));
5359       if (paradoxical_subreg_p (mode, pseudo_mode))
5360 	{
5361 	  offset += byte_lowpart_offset (pseudo_mode, mode);
5362 	  mode = pseudo_mode;
5363 	}
5364     }
5365 
5366   /* If LOC is a paradoxical lowpart of EXPR, refer to EXPR itself.
5367      Do the same if we are storing to a register and EXPR occupies
5368      the whole of register LOC; in that case, the whole of EXPR is
5369      being changed.  We exclude complex modes from the second case
5370      because the real and imaginary parts are represented as separate
5371      pseudo registers, even if the whole complex value fits into one
5372      hard register.  */
5373   if ((paradoxical_subreg_p (mode, DECL_MODE (expr))
5374        || (store_reg_p
5375 	   && !COMPLEX_MODE_P (DECL_MODE (expr))
5376 	   && hard_regno_nregs (REGNO (loc), DECL_MODE (expr)) == 1))
5377       && known_eq (offset + byte_lowpart_offset (DECL_MODE (expr), mode), 0))
5378     {
5379       mode = DECL_MODE (expr);
5380       offset = 0;
5381     }
5382 
5383   HOST_WIDE_INT const_offset;
5384   if (!track_offset_p (offset, &const_offset))
5385     return false;
5386 
5387   if (mode_out)
5388     *mode_out = mode;
5389   if (offset_out)
5390     *offset_out = const_offset;
5391   return true;
5392 }
5393 
5394 /* Return the MODE lowpart of LOC, or null if LOC is not something we
5395    want to track.  When returning nonnull, make sure that the attributes
5396    on the returned value are updated.  */
5397 
5398 static rtx
var_lowpart(machine_mode mode,rtx loc)5399 var_lowpart (machine_mode mode, rtx loc)
5400 {
5401   unsigned int regno;
5402 
5403   if (GET_MODE (loc) == mode)
5404     return loc;
5405 
5406   if (!REG_P (loc) && !MEM_P (loc))
5407     return NULL;
5408 
5409   poly_uint64 offset = byte_lowpart_offset (mode, GET_MODE (loc));
5410 
5411   if (MEM_P (loc))
5412     return adjust_address_nv (loc, mode, offset);
5413 
5414   poly_uint64 reg_offset = subreg_lowpart_offset (mode, GET_MODE (loc));
5415   regno = REGNO (loc) + subreg_regno_offset (REGNO (loc), GET_MODE (loc),
5416 					     reg_offset, mode);
5417   return gen_rtx_REG_offset (loc, mode, regno, offset);
5418 }
5419 
5420 /* Carry information about uses and stores while walking rtx.  */
5421 
5422 struct count_use_info
5423 {
5424   /* The insn where the RTX is.  */
5425   rtx_insn *insn;
5426 
5427   /* The basic block where insn is.  */
5428   basic_block bb;
5429 
5430   /* The array of n_sets sets in the insn, as determined by cselib.  */
5431   struct cselib_set *sets;
5432   int n_sets;
5433 
5434   /* True if we're counting stores, false otherwise.  */
5435   bool store_p;
5436 };
5437 
5438 /* Find a VALUE corresponding to X.   */
5439 
5440 static inline cselib_val *
find_use_val(rtx x,machine_mode mode,struct count_use_info * cui)5441 find_use_val (rtx x, machine_mode mode, struct count_use_info *cui)
5442 {
5443   int i;
5444 
5445   if (cui->sets)
5446     {
5447       /* This is called after uses are set up and before stores are
5448 	 processed by cselib, so it's safe to look up srcs, but not
5449 	 dsts.  So we look up expressions that appear in srcs or in
5450 	 dest expressions, but we search the sets array for dests of
5451 	 stores.  */
5452       if (cui->store_p)
5453 	{
5454 	  /* Some targets represent memset and memcpy patterns
5455 	     by (set (mem:BLK ...) (reg:[QHSD]I ...)) or
5456 	     (set (mem:BLK ...) (const_int ...)) or
5457 	     (set (mem:BLK ...) (mem:BLK ...)).  Don't return anything
5458 	     in that case, otherwise we end up with mode mismatches.  */
5459 	  if (mode == BLKmode && MEM_P (x))
5460 	    return NULL;
5461 	  for (i = 0; i < cui->n_sets; i++)
5462 	    if (cui->sets[i].dest == x)
5463 	      return cui->sets[i].src_elt;
5464 	}
5465       else
5466 	return cselib_lookup (x, mode, 0, VOIDmode);
5467     }
5468 
5469   return NULL;
5470 }
5471 
5472 /* Replace all registers and addresses in an expression with VALUE
5473    expressions that map back to them, unless the expression is a
5474    register.  If no mapping is or can be performed, returns NULL.  */
5475 
5476 static rtx
replace_expr_with_values(rtx loc)5477 replace_expr_with_values (rtx loc)
5478 {
5479   if (REG_P (loc) || GET_CODE (loc) == ENTRY_VALUE)
5480     return NULL;
5481   else if (MEM_P (loc))
5482     {
5483       cselib_val *addr = cselib_lookup (XEXP (loc, 0),
5484 					get_address_mode (loc), 0,
5485 					GET_MODE (loc));
5486       if (addr)
5487 	return replace_equiv_address_nv (loc, addr->val_rtx);
5488       else
5489 	return NULL;
5490     }
5491   else
5492     return cselib_subst_to_values (loc, VOIDmode);
5493 }
5494 
5495 /* Return true if X contains a DEBUG_EXPR.  */
5496 
5497 static bool
rtx_debug_expr_p(const_rtx x)5498 rtx_debug_expr_p (const_rtx x)
5499 {
5500   subrtx_iterator::array_type array;
5501   FOR_EACH_SUBRTX (iter, array, x, ALL)
5502     if (GET_CODE (*iter) == DEBUG_EXPR)
5503       return true;
5504   return false;
5505 }
5506 
5507 /* Determine what kind of micro operation to choose for a USE.  Return
5508    MO_CLOBBER if no micro operation is to be generated.  */
5509 
5510 static enum micro_operation_type
use_type(rtx loc,struct count_use_info * cui,machine_mode * modep)5511 use_type (rtx loc, struct count_use_info *cui, machine_mode *modep)
5512 {
5513   tree expr;
5514 
5515   if (cui && cui->sets)
5516     {
5517       if (GET_CODE (loc) == VAR_LOCATION)
5518 	{
5519 	  if (track_expr_p (PAT_VAR_LOCATION_DECL (loc), false))
5520 	    {
5521 	      rtx ploc = PAT_VAR_LOCATION_LOC (loc);
5522 	      if (! VAR_LOC_UNKNOWN_P (ploc))
5523 		{
5524 		  cselib_val *val = cselib_lookup (ploc, GET_MODE (loc), 1,
5525 						   VOIDmode);
5526 
5527 		  /* ??? flag_float_store and volatile mems are never
5528 		     given values, but we could in theory use them for
5529 		     locations.  */
5530 		  gcc_assert (val || 1);
5531 		}
5532 	      return MO_VAL_LOC;
5533 	    }
5534 	  else
5535 	    return MO_CLOBBER;
5536 	}
5537 
5538       if (REG_P (loc) || MEM_P (loc))
5539 	{
5540 	  if (modep)
5541 	    *modep = GET_MODE (loc);
5542 	  if (cui->store_p)
5543 	    {
5544 	      if (REG_P (loc)
5545 		  || (find_use_val (loc, GET_MODE (loc), cui)
5546 		      && cselib_lookup (XEXP (loc, 0),
5547 					get_address_mode (loc), 0,
5548 					GET_MODE (loc))))
5549 		return MO_VAL_SET;
5550 	    }
5551 	  else
5552 	    {
5553 	      cselib_val *val = find_use_val (loc, GET_MODE (loc), cui);
5554 
5555 	      if (val && !cselib_preserved_value_p (val))
5556 		return MO_VAL_USE;
5557 	    }
5558 	}
5559     }
5560 
5561   if (REG_P (loc))
5562     {
5563       gcc_assert (REGNO (loc) < FIRST_PSEUDO_REGISTER);
5564 
5565       if (loc == cfa_base_rtx)
5566 	return MO_CLOBBER;
5567       expr = REG_EXPR (loc);
5568 
5569       if (!expr)
5570 	return MO_USE_NO_VAR;
5571       else if (target_for_debug_bind (var_debug_decl (expr)))
5572 	return MO_CLOBBER;
5573       else if (track_loc_p (loc, expr, REG_OFFSET (loc),
5574 			    false, modep, NULL))
5575 	return MO_USE;
5576       else
5577 	return MO_USE_NO_VAR;
5578     }
5579   else if (MEM_P (loc))
5580     {
5581       expr = MEM_EXPR (loc);
5582 
5583       if (!expr)
5584 	return MO_CLOBBER;
5585       else if (target_for_debug_bind (var_debug_decl (expr)))
5586 	return MO_CLOBBER;
5587       else if (track_loc_p (loc, expr, int_mem_offset (loc),
5588 			    false, modep, NULL)
5589 	       /* Multi-part variables shouldn't refer to one-part
5590 		  variable names such as VALUEs (never happens) or
5591 		  DEBUG_EXPRs (only happens in the presence of debug
5592 		  insns).  */
5593 	       && (!MAY_HAVE_DEBUG_BIND_INSNS
5594 		   || !rtx_debug_expr_p (XEXP (loc, 0))))
5595 	return MO_USE;
5596       else
5597 	return MO_CLOBBER;
5598     }
5599 
5600   return MO_CLOBBER;
5601 }
5602 
5603 /* Log to OUT information about micro-operation MOPT involving X in
5604    INSN of BB.  */
5605 
5606 static inline void
log_op_type(rtx x,basic_block bb,rtx_insn * insn,enum micro_operation_type mopt,FILE * out)5607 log_op_type (rtx x, basic_block bb, rtx_insn *insn,
5608 	     enum micro_operation_type mopt, FILE *out)
5609 {
5610   fprintf (out, "bb %i op %i insn %i %s ",
5611 	   bb->index, VTI (bb)->mos.length (),
5612 	   INSN_UID (insn), micro_operation_type_name[mopt]);
5613   print_inline_rtx (out, x, 2);
5614   fputc ('\n', out);
5615 }
5616 
5617 /* Tell whether the CONCAT used to holds a VALUE and its location
5618    needs value resolution, i.e., an attempt of mapping the location
5619    back to other incoming values.  */
5620 #define VAL_NEEDS_RESOLUTION(x) \
5621   (RTL_FLAG_CHECK1 ("VAL_NEEDS_RESOLUTION", (x), CONCAT)->volatil)
5622 /* Whether the location in the CONCAT is a tracked expression, that
5623    should also be handled like a MO_USE.  */
5624 #define VAL_HOLDS_TRACK_EXPR(x) \
5625   (RTL_FLAG_CHECK1 ("VAL_HOLDS_TRACK_EXPR", (x), CONCAT)->used)
5626 /* Whether the location in the CONCAT should be handled like a MO_COPY
5627    as well.  */
5628 #define VAL_EXPR_IS_COPIED(x) \
5629   (RTL_FLAG_CHECK1 ("VAL_EXPR_IS_COPIED", (x), CONCAT)->jump)
5630 /* Whether the location in the CONCAT should be handled like a
5631    MO_CLOBBER as well.  */
5632 #define VAL_EXPR_IS_CLOBBERED(x) \
5633   (RTL_FLAG_CHECK1 ("VAL_EXPR_IS_CLOBBERED", (x), CONCAT)->unchanging)
5634 
5635 /* All preserved VALUEs.  */
5636 static vec<rtx> preserved_values;
5637 
5638 /* Ensure VAL is preserved and remember it in a vector for vt_emit_notes.  */
5639 
5640 static void
preserve_value(cselib_val * val)5641 preserve_value (cselib_val *val)
5642 {
5643   cselib_preserve_value (val);
5644   preserved_values.safe_push (val->val_rtx);
5645 }
5646 
5647 /* Helper function for MO_VAL_LOC handling.  Return non-zero if
5648    any rtxes not suitable for CONST use not replaced by VALUEs
5649    are discovered.  */
5650 
5651 static bool
non_suitable_const(const_rtx x)5652 non_suitable_const (const_rtx x)
5653 {
5654   subrtx_iterator::array_type array;
5655   FOR_EACH_SUBRTX (iter, array, x, ALL)
5656     {
5657       const_rtx x = *iter;
5658       switch (GET_CODE (x))
5659 	{
5660 	case REG:
5661 	case DEBUG_EXPR:
5662 	case PC:
5663 	case SCRATCH:
5664 	case CC0:
5665 	case ASM_INPUT:
5666 	case ASM_OPERANDS:
5667 	  return true;
5668 	case MEM:
5669 	  if (!MEM_READONLY_P (x))
5670 	    return true;
5671 	  break;
5672 	default:
5673 	  break;
5674 	}
5675     }
5676   return false;
5677 }
5678 
5679 /* Add uses (register and memory references) LOC which will be tracked
5680    to VTI (bb)->mos.  */
5681 
5682 static void
add_uses(rtx loc,struct count_use_info * cui)5683 add_uses (rtx loc, struct count_use_info *cui)
5684 {
5685   machine_mode mode = VOIDmode;
5686   enum micro_operation_type type = use_type (loc, cui, &mode);
5687 
5688   if (type != MO_CLOBBER)
5689     {
5690       basic_block bb = cui->bb;
5691       micro_operation mo;
5692 
5693       mo.type = type;
5694       mo.u.loc = type == MO_USE ? var_lowpart (mode, loc) : loc;
5695       mo.insn = cui->insn;
5696 
5697       if (type == MO_VAL_LOC)
5698 	{
5699 	  rtx oloc = loc;
5700 	  rtx vloc = PAT_VAR_LOCATION_LOC (oloc);
5701 	  cselib_val *val;
5702 
5703 	  gcc_assert (cui->sets);
5704 
5705 	  if (MEM_P (vloc)
5706 	      && !REG_P (XEXP (vloc, 0))
5707 	      && !MEM_P (XEXP (vloc, 0)))
5708 	    {
5709 	      rtx mloc = vloc;
5710 	      machine_mode address_mode = get_address_mode (mloc);
5711 	      cselib_val *val
5712 		= cselib_lookup (XEXP (mloc, 0), address_mode, 0,
5713 				 GET_MODE (mloc));
5714 
5715 	      if (val && !cselib_preserved_value_p (val))
5716 		preserve_value (val);
5717 	    }
5718 
5719 	  if (CONSTANT_P (vloc)
5720 	      && (GET_CODE (vloc) != CONST || non_suitable_const (vloc)))
5721 	    /* For constants don't look up any value.  */;
5722 	  else if (!VAR_LOC_UNKNOWN_P (vloc) && !unsuitable_loc (vloc)
5723 		   && (val = find_use_val (vloc, GET_MODE (oloc), cui)))
5724 	    {
5725 	      machine_mode mode2;
5726 	      enum micro_operation_type type2;
5727 	      rtx nloc = NULL;
5728 	      bool resolvable = REG_P (vloc) || MEM_P (vloc);
5729 
5730 	      if (resolvable)
5731 		nloc = replace_expr_with_values (vloc);
5732 
5733 	      if (nloc)
5734 		{
5735 		  oloc = shallow_copy_rtx (oloc);
5736 		  PAT_VAR_LOCATION_LOC (oloc) = nloc;
5737 		}
5738 
5739 	      oloc = gen_rtx_CONCAT (mode, val->val_rtx, oloc);
5740 
5741 	      type2 = use_type (vloc, 0, &mode2);
5742 
5743 	      gcc_assert (type2 == MO_USE || type2 == MO_USE_NO_VAR
5744 			  || type2 == MO_CLOBBER);
5745 
5746 	      if (type2 == MO_CLOBBER
5747 		  && !cselib_preserved_value_p (val))
5748 		{
5749 		  VAL_NEEDS_RESOLUTION (oloc) = resolvable;
5750 		  preserve_value (val);
5751 		}
5752 	    }
5753 	  else if (!VAR_LOC_UNKNOWN_P (vloc))
5754 	    {
5755 	      oloc = shallow_copy_rtx (oloc);
5756 	      PAT_VAR_LOCATION_LOC (oloc) = gen_rtx_UNKNOWN_VAR_LOC ();
5757 	    }
5758 
5759 	  mo.u.loc = oloc;
5760 	}
5761       else if (type == MO_VAL_USE)
5762 	{
5763 	  machine_mode mode2 = VOIDmode;
5764 	  enum micro_operation_type type2;
5765 	  cselib_val *val = find_use_val (loc, GET_MODE (loc), cui);
5766 	  rtx vloc, oloc = loc, nloc;
5767 
5768 	  gcc_assert (cui->sets);
5769 
5770 	  if (MEM_P (oloc)
5771 	      && !REG_P (XEXP (oloc, 0))
5772 	      && !MEM_P (XEXP (oloc, 0)))
5773 	    {
5774 	      rtx mloc = oloc;
5775 	      machine_mode address_mode = get_address_mode (mloc);
5776 	      cselib_val *val
5777 		= cselib_lookup (XEXP (mloc, 0), address_mode, 0,
5778 				 GET_MODE (mloc));
5779 
5780 	      if (val && !cselib_preserved_value_p (val))
5781 		preserve_value (val);
5782 	    }
5783 
5784 	  type2 = use_type (loc, 0, &mode2);
5785 
5786 	  gcc_assert (type2 == MO_USE || type2 == MO_USE_NO_VAR
5787 		      || type2 == MO_CLOBBER);
5788 
5789 	  if (type2 == MO_USE)
5790 	    vloc = var_lowpart (mode2, loc);
5791 	  else
5792 	    vloc = oloc;
5793 
5794 	  /* The loc of a MO_VAL_USE may have two forms:
5795 
5796 	     (concat val src): val is at src, a value-based
5797 	     representation.
5798 
5799 	     (concat (concat val use) src): same as above, with use as
5800 	     the MO_USE tracked value, if it differs from src.
5801 
5802 	  */
5803 
5804 	  gcc_checking_assert (REG_P (loc) || MEM_P (loc));
5805 	  nloc = replace_expr_with_values (loc);
5806 	  if (!nloc)
5807 	    nloc = oloc;
5808 
5809 	  if (vloc != nloc)
5810 	    oloc = gen_rtx_CONCAT (mode2, val->val_rtx, vloc);
5811 	  else
5812 	    oloc = val->val_rtx;
5813 
5814 	  mo.u.loc = gen_rtx_CONCAT (mode, oloc, nloc);
5815 
5816 	  if (type2 == MO_USE)
5817 	    VAL_HOLDS_TRACK_EXPR (mo.u.loc) = 1;
5818 	  if (!cselib_preserved_value_p (val))
5819 	    {
5820 	      VAL_NEEDS_RESOLUTION (mo.u.loc) = 1;
5821 	      preserve_value (val);
5822 	    }
5823 	}
5824       else
5825 	gcc_assert (type == MO_USE || type == MO_USE_NO_VAR);
5826 
5827       if (dump_file && (dump_flags & TDF_DETAILS))
5828 	log_op_type (mo.u.loc, cui->bb, cui->insn, mo.type, dump_file);
5829       VTI (bb)->mos.safe_push (mo);
5830     }
5831 }
5832 
5833 /* Helper function for finding all uses of REG/MEM in X in insn INSN.  */
5834 
5835 static void
add_uses_1(rtx * x,void * cui)5836 add_uses_1 (rtx *x, void *cui)
5837 {
5838   subrtx_var_iterator::array_type array;
5839   FOR_EACH_SUBRTX_VAR (iter, array, *x, NONCONST)
5840     add_uses (*iter, (struct count_use_info *) cui);
5841 }
5842 
5843 /* This is the value used during expansion of locations.  We want it
5844    to be unbounded, so that variables expanded deep in a recursion
5845    nest are fully evaluated, so that their values are cached
5846    correctly.  We avoid recursion cycles through other means, and we
5847    don't unshare RTL, so excess complexity is not a problem.  */
5848 #define EXPR_DEPTH (INT_MAX)
5849 /* We use this to keep too-complex expressions from being emitted as
5850    location notes, and then to debug information.  Users can trade
5851    compile time for ridiculously complex expressions, although they're
5852    seldom useful, and they may often have to be discarded as not
5853    representable anyway.  */
5854 #define EXPR_USE_DEPTH (param_max_vartrack_expr_depth)
5855 
5856 /* Attempt to reverse the EXPR operation in the debug info and record
5857    it in the cselib table.  Say for reg1 = reg2 + 6 even when reg2 is
5858    no longer live we can express its value as VAL - 6.  */
5859 
5860 static void
reverse_op(rtx val,const_rtx expr,rtx_insn * insn)5861 reverse_op (rtx val, const_rtx expr, rtx_insn *insn)
5862 {
5863   rtx src, arg, ret;
5864   cselib_val *v;
5865   struct elt_loc_list *l;
5866   enum rtx_code code;
5867   int count;
5868 
5869   if (GET_CODE (expr) != SET)
5870     return;
5871 
5872   if (!REG_P (SET_DEST (expr)) || GET_MODE (val) != GET_MODE (SET_DEST (expr)))
5873     return;
5874 
5875   src = SET_SRC (expr);
5876   switch (GET_CODE (src))
5877     {
5878     case PLUS:
5879     case MINUS:
5880     case XOR:
5881     case NOT:
5882     case NEG:
5883       if (!REG_P (XEXP (src, 0)))
5884 	return;
5885       break;
5886     case SIGN_EXTEND:
5887     case ZERO_EXTEND:
5888       if (!REG_P (XEXP (src, 0)) && !MEM_P (XEXP (src, 0)))
5889 	return;
5890       break;
5891     default:
5892       return;
5893     }
5894 
5895   if (!SCALAR_INT_MODE_P (GET_MODE (src)) || XEXP (src, 0) == cfa_base_rtx)
5896     return;
5897 
5898   v = cselib_lookup (XEXP (src, 0), GET_MODE (XEXP (src, 0)), 0, VOIDmode);
5899   if (!v || !cselib_preserved_value_p (v))
5900     return;
5901 
5902   /* Use canonical V to avoid creating multiple redundant expressions
5903      for different VALUES equivalent to V.  */
5904   v = canonical_cselib_val (v);
5905 
5906   /* Adding a reverse op isn't useful if V already has an always valid
5907      location.  Ignore ENTRY_VALUE, while it is always constant, we should
5908      prefer non-ENTRY_VALUE locations whenever possible.  */
5909   for (l = v->locs, count = 0; l; l = l->next, count++)
5910     if (CONSTANT_P (l->loc)
5911 	&& (GET_CODE (l->loc) != CONST || !references_value_p (l->loc, 0)))
5912       return;
5913     /* Avoid creating too large locs lists.  */
5914     else if (count == param_max_vartrack_reverse_op_size)
5915       return;
5916 
5917   switch (GET_CODE (src))
5918     {
5919     case NOT:
5920     case NEG:
5921       if (GET_MODE (v->val_rtx) != GET_MODE (val))
5922 	return;
5923       ret = gen_rtx_fmt_e (GET_CODE (src), GET_MODE (val), val);
5924       break;
5925     case SIGN_EXTEND:
5926     case ZERO_EXTEND:
5927       ret = gen_lowpart_SUBREG (GET_MODE (v->val_rtx), val);
5928       break;
5929     case XOR:
5930       code = XOR;
5931       goto binary;
5932     case PLUS:
5933       code = MINUS;
5934       goto binary;
5935     case MINUS:
5936       code = PLUS;
5937       goto binary;
5938     binary:
5939       if (GET_MODE (v->val_rtx) != GET_MODE (val))
5940 	return;
5941       arg = XEXP (src, 1);
5942       if (!CONST_INT_P (arg) && GET_CODE (arg) != SYMBOL_REF)
5943 	{
5944 	  arg = cselib_expand_value_rtx (arg, scratch_regs, 5);
5945 	  if (arg == NULL_RTX)
5946 	    return;
5947 	  if (!CONST_INT_P (arg) && GET_CODE (arg) != SYMBOL_REF)
5948 	    return;
5949 	}
5950       ret = simplify_gen_binary (code, GET_MODE (val), val, arg);
5951       break;
5952     default:
5953       gcc_unreachable ();
5954     }
5955 
5956   cselib_add_permanent_equiv (v, ret, insn);
5957 }
5958 
5959 /* Add stores (register and memory references) LOC which will be tracked
5960    to VTI (bb)->mos.  EXPR is the RTL expression containing the store.
5961    CUIP->insn is instruction which the LOC is part of.  */
5962 
5963 static void
add_stores(rtx loc,const_rtx expr,void * cuip)5964 add_stores (rtx loc, const_rtx expr, void *cuip)
5965 {
5966   machine_mode mode = VOIDmode, mode2;
5967   struct count_use_info *cui = (struct count_use_info *)cuip;
5968   basic_block bb = cui->bb;
5969   micro_operation mo;
5970   rtx oloc = loc, nloc, src = NULL;
5971   enum micro_operation_type type = use_type (loc, cui, &mode);
5972   bool track_p = false;
5973   cselib_val *v;
5974   bool resolve, preserve;
5975 
5976   if (type == MO_CLOBBER)
5977     return;
5978 
5979   mode2 = mode;
5980 
5981   if (REG_P (loc))
5982     {
5983       gcc_assert (loc != cfa_base_rtx);
5984       if ((GET_CODE (expr) == CLOBBER && type != MO_VAL_SET)
5985 	  || !(track_p = use_type (loc, NULL, &mode2) == MO_USE)
5986 	  || GET_CODE (expr) == CLOBBER)
5987 	{
5988 	  mo.type = MO_CLOBBER;
5989 	  mo.u.loc = loc;
5990 	  if (GET_CODE (expr) == SET
5991 	      && (SET_DEST (expr) == loc
5992 		  || (GET_CODE (SET_DEST (expr)) == STRICT_LOW_PART
5993 		      && XEXP (SET_DEST (expr), 0) == loc))
5994 	      && !unsuitable_loc (SET_SRC (expr))
5995 	      && find_use_val (loc, mode, cui))
5996 	    {
5997 	      gcc_checking_assert (type == MO_VAL_SET);
5998 	      mo.u.loc = gen_rtx_SET (loc, SET_SRC (expr));
5999 	    }
6000 	}
6001       else
6002 	{
6003 	  if (GET_CODE (expr) == SET
6004 	      && SET_DEST (expr) == loc
6005 	      && GET_CODE (SET_SRC (expr)) != ASM_OPERANDS)
6006 	    src = var_lowpart (mode2, SET_SRC (expr));
6007 	  loc = var_lowpart (mode2, loc);
6008 
6009 	  if (src == NULL)
6010 	    {
6011 	      mo.type = MO_SET;
6012 	      mo.u.loc = loc;
6013 	    }
6014 	  else
6015 	    {
6016 	      rtx xexpr = gen_rtx_SET (loc, src);
6017 	      if (same_variable_part_p (src, REG_EXPR (loc), REG_OFFSET (loc)))
6018 		{
6019 		  /* If this is an instruction copying (part of) a parameter
6020 		     passed by invisible reference to its register location,
6021 		     pretend it's a SET so that the initial memory location
6022 		     is discarded, as the parameter register can be reused
6023 		     for other purposes and we do not track locations based
6024 		     on generic registers.  */
6025 		  if (MEM_P (src)
6026 		      && REG_EXPR (loc)
6027 		      && TREE_CODE (REG_EXPR (loc)) == PARM_DECL
6028 		      && DECL_MODE (REG_EXPR (loc)) != BLKmode
6029 		      && MEM_P (DECL_INCOMING_RTL (REG_EXPR (loc)))
6030 		      && XEXP (DECL_INCOMING_RTL (REG_EXPR (loc)), 0)
6031 			 != arg_pointer_rtx)
6032 		    mo.type = MO_SET;
6033 		  else
6034 		    mo.type = MO_COPY;
6035 		}
6036 	      else
6037 		mo.type = MO_SET;
6038 	      mo.u.loc = xexpr;
6039 	    }
6040 	}
6041       mo.insn = cui->insn;
6042     }
6043   else if (MEM_P (loc)
6044 	   && ((track_p = use_type (loc, NULL, &mode2) == MO_USE)
6045 	       || cui->sets))
6046     {
6047       if (MEM_P (loc) && type == MO_VAL_SET
6048 	  && !REG_P (XEXP (loc, 0))
6049 	  && !MEM_P (XEXP (loc, 0)))
6050 	{
6051 	  rtx mloc = loc;
6052 	  machine_mode address_mode = get_address_mode (mloc);
6053 	  cselib_val *val = cselib_lookup (XEXP (mloc, 0),
6054 					   address_mode, 0,
6055 					   GET_MODE (mloc));
6056 
6057 	  if (val && !cselib_preserved_value_p (val))
6058 	    preserve_value (val);
6059 	}
6060 
6061       if (GET_CODE (expr) == CLOBBER || !track_p)
6062 	{
6063 	  mo.type = MO_CLOBBER;
6064 	  mo.u.loc = track_p ? var_lowpart (mode2, loc) : loc;
6065 	}
6066       else
6067 	{
6068 	  if (GET_CODE (expr) == SET
6069 	      && SET_DEST (expr) == loc
6070 	      && GET_CODE (SET_SRC (expr)) != ASM_OPERANDS)
6071 	    src = var_lowpart (mode2, SET_SRC (expr));
6072 	  loc = var_lowpart (mode2, loc);
6073 
6074 	  if (src == NULL)
6075 	    {
6076 	      mo.type = MO_SET;
6077 	      mo.u.loc = loc;
6078 	    }
6079 	  else
6080 	    {
6081 	      rtx xexpr = gen_rtx_SET (loc, src);
6082 	      if (same_variable_part_p (SET_SRC (xexpr),
6083 					MEM_EXPR (loc),
6084 					int_mem_offset (loc)))
6085 		mo.type = MO_COPY;
6086 	      else
6087 		mo.type = MO_SET;
6088 	      mo.u.loc = xexpr;
6089 	    }
6090 	}
6091       mo.insn = cui->insn;
6092     }
6093   else
6094     return;
6095 
6096   if (type != MO_VAL_SET)
6097     goto log_and_return;
6098 
6099   v = find_use_val (oloc, mode, cui);
6100 
6101   if (!v)
6102     goto log_and_return;
6103 
6104   resolve = preserve = !cselib_preserved_value_p (v);
6105 
6106   /* We cannot track values for multiple-part variables, so we track only
6107      locations for tracked record parameters.  */
6108   if (track_p
6109       && REG_P (loc)
6110       && REG_EXPR (loc)
6111       && tracked_record_parameter_p (REG_EXPR (loc)))
6112     {
6113       /* Although we don't use the value here, it could be used later by the
6114 	 mere virtue of its existence as the operand of the reverse operation
6115 	 that gave rise to it (typically extension/truncation).  Make sure it
6116 	 is preserved as required by vt_expand_var_loc_chain.  */
6117       if (preserve)
6118 	preserve_value (v);
6119       goto log_and_return;
6120     }
6121 
6122   if (loc == stack_pointer_rtx
6123       && (maybe_ne (hard_frame_pointer_adjustment, -1)
6124 	  || (!frame_pointer_needed && !ACCUMULATE_OUTGOING_ARGS))
6125       && preserve)
6126     cselib_set_value_sp_based (v);
6127 
6128   /* Don't record MO_VAL_SET for VALUEs that can be described using
6129      cfa_base_rtx or cfa_base_rtx + CONST_INT, cselib already knows
6130      all the needed equivalences and they shouldn't change depending
6131      on which register holds that VALUE in some instruction.  */
6132   if (!frame_pointer_needed
6133       && cfa_base_rtx
6134       && cselib_sp_derived_value_p (v))
6135     {
6136       if (preserve)
6137 	preserve_value (v);
6138       return;
6139     }
6140 
6141   nloc = replace_expr_with_values (oloc);
6142   if (nloc)
6143     oloc = nloc;
6144 
6145   if (GET_CODE (PATTERN (cui->insn)) == COND_EXEC)
6146     {
6147       cselib_val *oval = cselib_lookup (oloc, GET_MODE (oloc), 0, VOIDmode);
6148 
6149       if (oval == v)
6150 	return;
6151       gcc_assert (REG_P (oloc) || MEM_P (oloc));
6152 
6153       if (oval && !cselib_preserved_value_p (oval))
6154 	{
6155 	  micro_operation moa;
6156 
6157 	  preserve_value (oval);
6158 
6159 	  moa.type = MO_VAL_USE;
6160 	  moa.u.loc = gen_rtx_CONCAT (mode, oval->val_rtx, oloc);
6161 	  VAL_NEEDS_RESOLUTION (moa.u.loc) = 1;
6162 	  moa.insn = cui->insn;
6163 
6164 	  if (dump_file && (dump_flags & TDF_DETAILS))
6165 	    log_op_type (moa.u.loc, cui->bb, cui->insn,
6166 			 moa.type, dump_file);
6167 	  VTI (bb)->mos.safe_push (moa);
6168 	}
6169 
6170       resolve = false;
6171     }
6172   else if (resolve && GET_CODE (mo.u.loc) == SET)
6173     {
6174       if (REG_P (SET_SRC (expr)) || MEM_P (SET_SRC (expr)))
6175 	nloc = replace_expr_with_values (SET_SRC (expr));
6176       else
6177 	nloc = NULL_RTX;
6178 
6179       /* Avoid the mode mismatch between oexpr and expr.  */
6180       if (!nloc && mode != mode2)
6181 	{
6182 	  nloc = SET_SRC (expr);
6183 	  gcc_assert (oloc == SET_DEST (expr));
6184 	}
6185 
6186       if (nloc && nloc != SET_SRC (mo.u.loc))
6187 	oloc = gen_rtx_SET (oloc, nloc);
6188       else
6189 	{
6190 	  if (oloc == SET_DEST (mo.u.loc))
6191 	    /* No point in duplicating.  */
6192 	    oloc = mo.u.loc;
6193 	  if (!REG_P (SET_SRC (mo.u.loc)))
6194 	    resolve = false;
6195 	}
6196     }
6197   else if (!resolve)
6198     {
6199       if (GET_CODE (mo.u.loc) == SET
6200 	  && oloc == SET_DEST (mo.u.loc))
6201 	/* No point in duplicating.  */
6202 	oloc = mo.u.loc;
6203     }
6204   else
6205     resolve = false;
6206 
6207   loc = gen_rtx_CONCAT (mode, v->val_rtx, oloc);
6208 
6209   if (mo.u.loc != oloc)
6210     loc = gen_rtx_CONCAT (GET_MODE (mo.u.loc), loc, mo.u.loc);
6211 
6212   /* The loc of a MO_VAL_SET may have various forms:
6213 
6214      (concat val dst): dst now holds val
6215 
6216      (concat val (set dst src)): dst now holds val, copied from src
6217 
6218      (concat (concat val dstv) dst): dst now holds val; dstv is dst
6219      after replacing mems and non-top-level regs with values.
6220 
6221      (concat (concat val dstv) (set dst src)): dst now holds val,
6222      copied from src.  dstv is a value-based representation of dst, if
6223      it differs from dst.  If resolution is needed, src is a REG, and
6224      its mode is the same as that of val.
6225 
6226      (concat (concat val (set dstv srcv)) (set dst src)): src
6227      copied to dst, holding val.  dstv and srcv are value-based
6228      representations of dst and src, respectively.
6229 
6230   */
6231 
6232   if (GET_CODE (PATTERN (cui->insn)) != COND_EXEC)
6233     reverse_op (v->val_rtx, expr, cui->insn);
6234 
6235   mo.u.loc = loc;
6236 
6237   if (track_p)
6238     VAL_HOLDS_TRACK_EXPR (loc) = 1;
6239   if (preserve)
6240     {
6241       VAL_NEEDS_RESOLUTION (loc) = resolve;
6242       preserve_value (v);
6243     }
6244   if (mo.type == MO_CLOBBER)
6245     VAL_EXPR_IS_CLOBBERED (loc) = 1;
6246   if (mo.type == MO_COPY)
6247     VAL_EXPR_IS_COPIED (loc) = 1;
6248 
6249   mo.type = MO_VAL_SET;
6250 
6251  log_and_return:
6252   if (dump_file && (dump_flags & TDF_DETAILS))
6253     log_op_type (mo.u.loc, cui->bb, cui->insn, mo.type, dump_file);
6254   VTI (bb)->mos.safe_push (mo);
6255 }
6256 
6257 /* Arguments to the call.  */
6258 static rtx call_arguments;
6259 
6260 /* Compute call_arguments.  */
6261 
6262 static void
prepare_call_arguments(basic_block bb,rtx_insn * insn)6263 prepare_call_arguments (basic_block bb, rtx_insn *insn)
6264 {
6265   rtx link, x, call;
6266   rtx prev, cur, next;
6267   rtx this_arg = NULL_RTX;
6268   tree type = NULL_TREE, t, fndecl = NULL_TREE;
6269   tree obj_type_ref = NULL_TREE;
6270   CUMULATIVE_ARGS args_so_far_v;
6271   cumulative_args_t args_so_far;
6272 
6273   memset (&args_so_far_v, 0, sizeof (args_so_far_v));
6274   args_so_far = pack_cumulative_args (&args_so_far_v);
6275   call = get_call_rtx_from (insn);
6276   if (call)
6277     {
6278       if (GET_CODE (XEXP (XEXP (call, 0), 0)) == SYMBOL_REF)
6279 	{
6280 	  rtx symbol = XEXP (XEXP (call, 0), 0);
6281 	  if (SYMBOL_REF_DECL (symbol))
6282 	    fndecl = SYMBOL_REF_DECL (symbol);
6283 	}
6284       if (fndecl == NULL_TREE)
6285 	fndecl = MEM_EXPR (XEXP (call, 0));
6286       if (fndecl
6287 	  && TREE_CODE (TREE_TYPE (fndecl)) != FUNCTION_TYPE
6288 	  && TREE_CODE (TREE_TYPE (fndecl)) != METHOD_TYPE)
6289 	fndecl = NULL_TREE;
6290       if (fndecl && TYPE_ARG_TYPES (TREE_TYPE (fndecl)))
6291 	type = TREE_TYPE (fndecl);
6292       if (fndecl && TREE_CODE (fndecl) != FUNCTION_DECL)
6293 	{
6294 	  if (TREE_CODE (fndecl) == INDIRECT_REF
6295 	      && TREE_CODE (TREE_OPERAND (fndecl, 0)) == OBJ_TYPE_REF)
6296 	    obj_type_ref = TREE_OPERAND (fndecl, 0);
6297 	  fndecl = NULL_TREE;
6298 	}
6299       if (type)
6300 	{
6301 	  for (t = TYPE_ARG_TYPES (type); t && t != void_list_node;
6302 	       t = TREE_CHAIN (t))
6303 	    if (TREE_CODE (TREE_VALUE (t)) == REFERENCE_TYPE
6304 		&& INTEGRAL_TYPE_P (TREE_TYPE (TREE_VALUE (t))))
6305 	      break;
6306 	  if ((t == NULL || t == void_list_node) && obj_type_ref == NULL_TREE)
6307 	    type = NULL;
6308 	  else
6309 	    {
6310 	      int nargs ATTRIBUTE_UNUSED = list_length (TYPE_ARG_TYPES (type));
6311 	      link = CALL_INSN_FUNCTION_USAGE (insn);
6312 #ifndef PCC_STATIC_STRUCT_RETURN
6313 	      if (aggregate_value_p (TREE_TYPE (type), type)
6314 		  && targetm.calls.struct_value_rtx (type, 0) == 0)
6315 		{
6316 		  tree struct_addr = build_pointer_type (TREE_TYPE (type));
6317 		  function_arg_info arg (struct_addr, /*named=*/true);
6318 		  rtx reg;
6319 		  INIT_CUMULATIVE_ARGS (args_so_far_v, type, NULL_RTX, fndecl,
6320 					nargs + 1);
6321 		  reg = targetm.calls.function_arg (args_so_far, arg);
6322 		  targetm.calls.function_arg_advance (args_so_far, arg);
6323 		  if (reg == NULL_RTX)
6324 		    {
6325 		      for (; link; link = XEXP (link, 1))
6326 			if (GET_CODE (XEXP (link, 0)) == USE
6327 			    && MEM_P (XEXP (XEXP (link, 0), 0)))
6328 			  {
6329 			    link = XEXP (link, 1);
6330 			    break;
6331 			  }
6332 		    }
6333 		}
6334 	      else
6335 #endif
6336 		INIT_CUMULATIVE_ARGS (args_so_far_v, type, NULL_RTX, fndecl,
6337 				      nargs);
6338 	      if (obj_type_ref && TYPE_ARG_TYPES (type) != void_list_node)
6339 		{
6340 		  t = TYPE_ARG_TYPES (type);
6341 		  function_arg_info arg (TREE_VALUE (t), /*named=*/true);
6342 		  this_arg = targetm.calls.function_arg (args_so_far, arg);
6343 		  if (this_arg && !REG_P (this_arg))
6344 		    this_arg = NULL_RTX;
6345 		  else if (this_arg == NULL_RTX)
6346 		    {
6347 		      for (; link; link = XEXP (link, 1))
6348 			if (GET_CODE (XEXP (link, 0)) == USE
6349 			    && MEM_P (XEXP (XEXP (link, 0), 0)))
6350 			  {
6351 			    this_arg = XEXP (XEXP (link, 0), 0);
6352 			    break;
6353 			  }
6354 		    }
6355 		}
6356 	    }
6357 	}
6358     }
6359   t = type ? TYPE_ARG_TYPES (type) : NULL_TREE;
6360 
6361   for (link = CALL_INSN_FUNCTION_USAGE (insn); link; link = XEXP (link, 1))
6362     if (GET_CODE (XEXP (link, 0)) == USE)
6363       {
6364 	rtx item = NULL_RTX;
6365 	x = XEXP (XEXP (link, 0), 0);
6366 	if (GET_MODE (link) == VOIDmode
6367 	    || GET_MODE (link) == BLKmode
6368 	    || (GET_MODE (link) != GET_MODE (x)
6369 		&& ((GET_MODE_CLASS (GET_MODE (link)) != MODE_INT
6370 		     && GET_MODE_CLASS (GET_MODE (link)) != MODE_PARTIAL_INT)
6371 		    || (GET_MODE_CLASS (GET_MODE (x)) != MODE_INT
6372 			&& GET_MODE_CLASS (GET_MODE (x)) != MODE_PARTIAL_INT))))
6373 	  /* Can't do anything for these, if the original type mode
6374 	     isn't known or can't be converted.  */;
6375 	else if (REG_P (x))
6376 	  {
6377 	    cselib_val *val = cselib_lookup (x, GET_MODE (x), 0, VOIDmode);
6378 	    scalar_int_mode mode;
6379 	    if (val && cselib_preserved_value_p (val))
6380 	      item = val->val_rtx;
6381 	    else if (is_a <scalar_int_mode> (GET_MODE (x), &mode))
6382 	      {
6383 		opt_scalar_int_mode mode_iter;
6384 		FOR_EACH_WIDER_MODE (mode_iter, mode)
6385 		  {
6386 		    mode = mode_iter.require ();
6387 		    if (GET_MODE_BITSIZE (mode) > BITS_PER_WORD)
6388 		      break;
6389 
6390 		    rtx reg = simplify_subreg (mode, x, GET_MODE (x), 0);
6391 		    if (reg == NULL_RTX || !REG_P (reg))
6392 		      continue;
6393 		    val = cselib_lookup (reg, mode, 0, VOIDmode);
6394 		    if (val && cselib_preserved_value_p (val))
6395 		      {
6396 			item = val->val_rtx;
6397 			break;
6398 		      }
6399 		  }
6400 	      }
6401 	  }
6402 	else if (MEM_P (x))
6403 	  {
6404 	    rtx mem = x;
6405 	    cselib_val *val;
6406 
6407 	    if (!frame_pointer_needed)
6408 	      {
6409 		class adjust_mem_data amd;
6410 		amd.mem_mode = VOIDmode;
6411 		amd.stack_adjust = -VTI (bb)->out.stack_adjust;
6412 		amd.store = true;
6413 		mem = simplify_replace_fn_rtx (mem, NULL_RTX, adjust_mems,
6414 					       &amd);
6415 		gcc_assert (amd.side_effects.is_empty ());
6416 	      }
6417 	    val = cselib_lookup (mem, GET_MODE (mem), 0, VOIDmode);
6418 	    if (val && cselib_preserved_value_p (val))
6419 	      item = val->val_rtx;
6420 	    else if (GET_MODE_CLASS (GET_MODE (mem)) != MODE_INT
6421 		     && GET_MODE_CLASS (GET_MODE (mem)) != MODE_PARTIAL_INT)
6422 	      {
6423 		/* For non-integer stack argument see also if they weren't
6424 		   initialized by integers.  */
6425 		scalar_int_mode imode;
6426 		if (int_mode_for_mode (GET_MODE (mem)).exists (&imode)
6427 		    && imode != GET_MODE (mem))
6428 		  {
6429 		    val = cselib_lookup (adjust_address_nv (mem, imode, 0),
6430 					 imode, 0, VOIDmode);
6431 		    if (val && cselib_preserved_value_p (val))
6432 		      item = lowpart_subreg (GET_MODE (x), val->val_rtx,
6433 					     imode);
6434 		  }
6435 	      }
6436 	  }
6437 	if (item)
6438 	  {
6439 	    rtx x2 = x;
6440 	    if (GET_MODE (item) != GET_MODE (link))
6441 	      item = lowpart_subreg (GET_MODE (link), item, GET_MODE (item));
6442 	    if (GET_MODE (x2) != GET_MODE (link))
6443 	      x2 = lowpart_subreg (GET_MODE (link), x2, GET_MODE (x2));
6444 	    item = gen_rtx_CONCAT (GET_MODE (link), x2, item);
6445 	    call_arguments
6446 	      = gen_rtx_EXPR_LIST (VOIDmode, item, call_arguments);
6447 	  }
6448 	if (t && t != void_list_node)
6449 	  {
6450 	    rtx reg;
6451 	    function_arg_info arg (TREE_VALUE (t), /*named=*/true);
6452 	    apply_pass_by_reference_rules (&args_so_far_v, arg);
6453 	    reg = targetm.calls.function_arg (args_so_far, arg);
6454 	    if (TREE_CODE (arg.type) == REFERENCE_TYPE
6455 		&& INTEGRAL_TYPE_P (TREE_TYPE (arg.type))
6456 		&& reg
6457 		&& REG_P (reg)
6458 		&& GET_MODE (reg) == arg.mode
6459 		&& (GET_MODE_CLASS (arg.mode) == MODE_INT
6460 		    || GET_MODE_CLASS (arg.mode) == MODE_PARTIAL_INT)
6461 		&& REG_P (x)
6462 		&& REGNO (x) == REGNO (reg)
6463 		&& GET_MODE (x) == arg.mode
6464 		&& item)
6465 	      {
6466 		machine_mode indmode
6467 		  = TYPE_MODE (TREE_TYPE (arg.type));
6468 		rtx mem = gen_rtx_MEM (indmode, x);
6469 		cselib_val *val = cselib_lookup (mem, indmode, 0, VOIDmode);
6470 		if (val && cselib_preserved_value_p (val))
6471 		  {
6472 		    item = gen_rtx_CONCAT (indmode, mem, val->val_rtx);
6473 		    call_arguments = gen_rtx_EXPR_LIST (VOIDmode, item,
6474 							call_arguments);
6475 		  }
6476 		else
6477 		  {
6478 		    struct elt_loc_list *l;
6479 		    tree initial;
6480 
6481 		    /* Try harder, when passing address of a constant
6482 		       pool integer it can be easily read back.  */
6483 		    item = XEXP (item, 1);
6484 		    if (GET_CODE (item) == SUBREG)
6485 		      item = SUBREG_REG (item);
6486 		    gcc_assert (GET_CODE (item) == VALUE);
6487 		    val = CSELIB_VAL_PTR (item);
6488 		    for (l = val->locs; l; l = l->next)
6489 		      if (GET_CODE (l->loc) == SYMBOL_REF
6490 			  && TREE_CONSTANT_POOL_ADDRESS_P (l->loc)
6491 			  && SYMBOL_REF_DECL (l->loc)
6492 			  && DECL_INITIAL (SYMBOL_REF_DECL (l->loc)))
6493 			{
6494 			  initial = DECL_INITIAL (SYMBOL_REF_DECL (l->loc));
6495 			  if (tree_fits_shwi_p (initial))
6496 			    {
6497 			      item = GEN_INT (tree_to_shwi (initial));
6498 			      item = gen_rtx_CONCAT (indmode, mem, item);
6499 			      call_arguments
6500 				= gen_rtx_EXPR_LIST (VOIDmode, item,
6501 						     call_arguments);
6502 			    }
6503 			  break;
6504 			}
6505 		  }
6506 	      }
6507 	    targetm.calls.function_arg_advance (args_so_far, arg);
6508 	    t = TREE_CHAIN (t);
6509 	  }
6510       }
6511 
6512   /* Add debug arguments.  */
6513   if (fndecl
6514       && TREE_CODE (fndecl) == FUNCTION_DECL
6515       && DECL_HAS_DEBUG_ARGS_P (fndecl))
6516     {
6517       vec<tree, va_gc> **debug_args = decl_debug_args_lookup (fndecl);
6518       if (debug_args)
6519 	{
6520 	  unsigned int ix;
6521 	  tree param;
6522 	  for (ix = 0; vec_safe_iterate (*debug_args, ix, &param); ix += 2)
6523 	    {
6524 	      rtx item;
6525 	      tree dtemp = (**debug_args)[ix + 1];
6526 	      machine_mode mode = DECL_MODE (dtemp);
6527 	      item = gen_rtx_DEBUG_PARAMETER_REF (mode, param);
6528 	      item = gen_rtx_CONCAT (mode, item, DECL_RTL_KNOWN_SET (dtemp));
6529 	      call_arguments = gen_rtx_EXPR_LIST (VOIDmode, item,
6530 						  call_arguments);
6531 	    }
6532 	}
6533     }
6534 
6535   /* Reverse call_arguments chain.  */
6536   prev = NULL_RTX;
6537   for (cur = call_arguments; cur; cur = next)
6538     {
6539       next = XEXP (cur, 1);
6540       XEXP (cur, 1) = prev;
6541       prev = cur;
6542     }
6543   call_arguments = prev;
6544 
6545   x = get_call_rtx_from (insn);
6546   if (x)
6547     {
6548       x = XEXP (XEXP (x, 0), 0);
6549       if (GET_CODE (x) == SYMBOL_REF)
6550 	/* Don't record anything.  */;
6551       else if (CONSTANT_P (x))
6552 	{
6553 	  x = gen_rtx_CONCAT (GET_MODE (x) == VOIDmode ? Pmode : GET_MODE (x),
6554 			      pc_rtx, x);
6555 	  call_arguments
6556 	    = gen_rtx_EXPR_LIST (VOIDmode, x, call_arguments);
6557 	}
6558       else
6559 	{
6560 	  cselib_val *val = cselib_lookup (x, GET_MODE (x), 0, VOIDmode);
6561 	  if (val && cselib_preserved_value_p (val))
6562 	    {
6563 	      x = gen_rtx_CONCAT (GET_MODE (x), pc_rtx, val->val_rtx);
6564 	      call_arguments
6565 		= gen_rtx_EXPR_LIST (VOIDmode, x, call_arguments);
6566 	    }
6567 	}
6568     }
6569   if (this_arg)
6570     {
6571       machine_mode mode
6572 	= TYPE_MODE (TREE_TYPE (OBJ_TYPE_REF_EXPR (obj_type_ref)));
6573       rtx clobbered = gen_rtx_MEM (mode, this_arg);
6574       HOST_WIDE_INT token
6575 	= tree_to_shwi (OBJ_TYPE_REF_TOKEN (obj_type_ref));
6576       if (token)
6577 	clobbered = plus_constant (mode, clobbered,
6578 				   token * GET_MODE_SIZE (mode));
6579       clobbered = gen_rtx_MEM (mode, clobbered);
6580       x = gen_rtx_CONCAT (mode, gen_rtx_CLOBBER (VOIDmode, pc_rtx), clobbered);
6581       call_arguments
6582 	= gen_rtx_EXPR_LIST (VOIDmode, x, call_arguments);
6583     }
6584 }
6585 
6586 /* Callback for cselib_record_sets_hook, that records as micro
6587    operations uses and stores in an insn after cselib_record_sets has
6588    analyzed the sets in an insn, but before it modifies the stored
6589    values in the internal tables, unless cselib_record_sets doesn't
6590    call it directly (perhaps because we're not doing cselib in the
6591    first place, in which case sets and n_sets will be 0).  */
6592 
6593 static void
add_with_sets(rtx_insn * insn,struct cselib_set * sets,int n_sets)6594 add_with_sets (rtx_insn *insn, struct cselib_set *sets, int n_sets)
6595 {
6596   basic_block bb = BLOCK_FOR_INSN (insn);
6597   int n1, n2;
6598   struct count_use_info cui;
6599   micro_operation *mos;
6600 
6601   cselib_hook_called = true;
6602 
6603   cui.insn = insn;
6604   cui.bb = bb;
6605   cui.sets = sets;
6606   cui.n_sets = n_sets;
6607 
6608   n1 = VTI (bb)->mos.length ();
6609   cui.store_p = false;
6610   note_uses (&PATTERN (insn), add_uses_1, &cui);
6611   n2 = VTI (bb)->mos.length () - 1;
6612   mos = VTI (bb)->mos.address ();
6613 
6614   /* Order the MO_USEs to be before MO_USE_NO_VARs and MO_VAL_USE, and
6615      MO_VAL_LOC last.  */
6616   while (n1 < n2)
6617     {
6618       while (n1 < n2 && mos[n1].type == MO_USE)
6619 	n1++;
6620       while (n1 < n2 && mos[n2].type != MO_USE)
6621 	n2--;
6622       if (n1 < n2)
6623 	std::swap (mos[n1], mos[n2]);
6624     }
6625 
6626   n2 = VTI (bb)->mos.length () - 1;
6627   while (n1 < n2)
6628     {
6629       while (n1 < n2 && mos[n1].type != MO_VAL_LOC)
6630 	n1++;
6631       while (n1 < n2 && mos[n2].type == MO_VAL_LOC)
6632 	n2--;
6633       if (n1 < n2)
6634 	std::swap (mos[n1], mos[n2]);
6635     }
6636 
6637   if (CALL_P (insn))
6638     {
6639       micro_operation mo;
6640 
6641       mo.type = MO_CALL;
6642       mo.insn = insn;
6643       mo.u.loc = call_arguments;
6644       call_arguments = NULL_RTX;
6645 
6646       if (dump_file && (dump_flags & TDF_DETAILS))
6647 	log_op_type (PATTERN (insn), bb, insn, mo.type, dump_file);
6648       VTI (bb)->mos.safe_push (mo);
6649     }
6650 
6651   n1 = VTI (bb)->mos.length ();
6652   /* This will record NEXT_INSN (insn), such that we can
6653      insert notes before it without worrying about any
6654      notes that MO_USEs might emit after the insn.  */
6655   cui.store_p = true;
6656   note_stores (insn, add_stores, &cui);
6657   n2 = VTI (bb)->mos.length () - 1;
6658   mos = VTI (bb)->mos.address ();
6659 
6660   /* Order the MO_VAL_USEs first (note_stores does nothing
6661      on DEBUG_INSNs, so there are no MO_VAL_LOCs from this
6662      insn), then MO_CLOBBERs, then MO_SET/MO_COPY/MO_VAL_SET.  */
6663   while (n1 < n2)
6664     {
6665       while (n1 < n2 && mos[n1].type == MO_VAL_USE)
6666 	n1++;
6667       while (n1 < n2 && mos[n2].type != MO_VAL_USE)
6668 	n2--;
6669       if (n1 < n2)
6670 	std::swap (mos[n1], mos[n2]);
6671     }
6672 
6673   n2 = VTI (bb)->mos.length () - 1;
6674   while (n1 < n2)
6675     {
6676       while (n1 < n2 && mos[n1].type == MO_CLOBBER)
6677 	n1++;
6678       while (n1 < n2 && mos[n2].type != MO_CLOBBER)
6679 	n2--;
6680       if (n1 < n2)
6681 	std::swap (mos[n1], mos[n2]);
6682     }
6683 }
6684 
6685 static enum var_init_status
find_src_status(dataflow_set * in,rtx src)6686 find_src_status (dataflow_set *in, rtx src)
6687 {
6688   tree decl = NULL_TREE;
6689   enum var_init_status status = VAR_INIT_STATUS_UNINITIALIZED;
6690 
6691   if (! flag_var_tracking_uninit)
6692     status = VAR_INIT_STATUS_INITIALIZED;
6693 
6694   if (src && REG_P (src))
6695     decl = var_debug_decl (REG_EXPR (src));
6696   else if (src && MEM_P (src))
6697     decl = var_debug_decl (MEM_EXPR (src));
6698 
6699   if (src && decl)
6700     status = get_init_value (in, src, dv_from_decl (decl));
6701 
6702   return status;
6703 }
6704 
6705 /* SRC is the source of an assignment.  Use SET to try to find what
6706    was ultimately assigned to SRC.  Return that value if known,
6707    otherwise return SRC itself.  */
6708 
6709 static rtx
find_src_set_src(dataflow_set * set,rtx src)6710 find_src_set_src (dataflow_set *set, rtx src)
6711 {
6712   tree decl = NULL_TREE;   /* The variable being copied around.          */
6713   rtx set_src = NULL_RTX;  /* The value for "decl" stored in "src".      */
6714   variable *var;
6715   location_chain *nextp;
6716   int i;
6717   bool found;
6718 
6719   if (src && REG_P (src))
6720     decl = var_debug_decl (REG_EXPR (src));
6721   else if (src && MEM_P (src))
6722     decl = var_debug_decl (MEM_EXPR (src));
6723 
6724   if (src && decl)
6725     {
6726       decl_or_value dv = dv_from_decl (decl);
6727 
6728       var = shared_hash_find (set->vars, dv);
6729       if (var)
6730 	{
6731 	  found = false;
6732 	  for (i = 0; i < var->n_var_parts && !found; i++)
6733 	    for (nextp = var->var_part[i].loc_chain; nextp && !found;
6734 		 nextp = nextp->next)
6735 	      if (rtx_equal_p (nextp->loc, src))
6736 		{
6737 		  set_src = nextp->set_src;
6738 		  found = true;
6739 		}
6740 
6741 	}
6742     }
6743 
6744   return set_src;
6745 }
6746 
6747 /* Compute the changes of variable locations in the basic block BB.  */
6748 
6749 static bool
compute_bb_dataflow(basic_block bb)6750 compute_bb_dataflow (basic_block bb)
6751 {
6752   unsigned int i;
6753   micro_operation *mo;
6754   bool changed;
6755   dataflow_set old_out;
6756   dataflow_set *in = &VTI (bb)->in;
6757   dataflow_set *out = &VTI (bb)->out;
6758 
6759   dataflow_set_init (&old_out);
6760   dataflow_set_copy (&old_out, out);
6761   dataflow_set_copy (out, in);
6762 
6763   if (MAY_HAVE_DEBUG_BIND_INSNS)
6764     local_get_addr_cache = new hash_map<rtx, rtx>;
6765 
6766   FOR_EACH_VEC_ELT (VTI (bb)->mos, i, mo)
6767     {
6768       rtx_insn *insn = mo->insn;
6769 
6770       switch (mo->type)
6771 	{
6772 	  case MO_CALL:
6773 	    dataflow_set_clear_at_call (out, insn);
6774 	    break;
6775 
6776 	  case MO_USE:
6777 	    {
6778 	      rtx loc = mo->u.loc;
6779 
6780 	      if (REG_P (loc))
6781 		var_reg_set (out, loc, VAR_INIT_STATUS_UNINITIALIZED, NULL);
6782 	      else if (MEM_P (loc))
6783 		var_mem_set (out, loc, VAR_INIT_STATUS_UNINITIALIZED, NULL);
6784 	    }
6785 	    break;
6786 
6787 	  case MO_VAL_LOC:
6788 	    {
6789 	      rtx loc = mo->u.loc;
6790 	      rtx val, vloc;
6791 	      tree var;
6792 
6793 	      if (GET_CODE (loc) == CONCAT)
6794 		{
6795 		  val = XEXP (loc, 0);
6796 		  vloc = XEXP (loc, 1);
6797 		}
6798 	      else
6799 		{
6800 		  val = NULL_RTX;
6801 		  vloc = loc;
6802 		}
6803 
6804 	      var = PAT_VAR_LOCATION_DECL (vloc);
6805 
6806 	      clobber_variable_part (out, NULL_RTX,
6807 				     dv_from_decl (var), 0, NULL_RTX);
6808 	      if (val)
6809 		{
6810 		  if (VAL_NEEDS_RESOLUTION (loc))
6811 		    val_resolve (out, val, PAT_VAR_LOCATION_LOC (vloc), insn);
6812 		  set_variable_part (out, val, dv_from_decl (var), 0,
6813 				     VAR_INIT_STATUS_INITIALIZED, NULL_RTX,
6814 				     INSERT);
6815 		}
6816 	      else if (!VAR_LOC_UNKNOWN_P (PAT_VAR_LOCATION_LOC (vloc)))
6817 		set_variable_part (out, PAT_VAR_LOCATION_LOC (vloc),
6818 				   dv_from_decl (var), 0,
6819 				   VAR_INIT_STATUS_INITIALIZED, NULL_RTX,
6820 				   INSERT);
6821 	    }
6822 	    break;
6823 
6824 	  case MO_VAL_USE:
6825 	    {
6826 	      rtx loc = mo->u.loc;
6827 	      rtx val, vloc, uloc;
6828 
6829 	      vloc = uloc = XEXP (loc, 1);
6830 	      val = XEXP (loc, 0);
6831 
6832 	      if (GET_CODE (val) == CONCAT)
6833 		{
6834 		  uloc = XEXP (val, 1);
6835 		  val = XEXP (val, 0);
6836 		}
6837 
6838 	      if (VAL_NEEDS_RESOLUTION (loc))
6839 		val_resolve (out, val, vloc, insn);
6840 	      else
6841 		val_store (out, val, uloc, insn, false);
6842 
6843 	      if (VAL_HOLDS_TRACK_EXPR (loc))
6844 		{
6845 		  if (GET_CODE (uloc) == REG)
6846 		    var_reg_set (out, uloc, VAR_INIT_STATUS_UNINITIALIZED,
6847 				 NULL);
6848 		  else if (GET_CODE (uloc) == MEM)
6849 		    var_mem_set (out, uloc, VAR_INIT_STATUS_UNINITIALIZED,
6850 				 NULL);
6851 		}
6852 	    }
6853 	    break;
6854 
6855 	  case MO_VAL_SET:
6856 	    {
6857 	      rtx loc = mo->u.loc;
6858 	      rtx val, vloc, uloc;
6859 	      rtx dstv, srcv;
6860 
6861 	      vloc = loc;
6862 	      uloc = XEXP (vloc, 1);
6863 	      val = XEXP (vloc, 0);
6864 	      vloc = uloc;
6865 
6866 	      if (GET_CODE (uloc) == SET)
6867 		{
6868 		  dstv = SET_DEST (uloc);
6869 		  srcv = SET_SRC (uloc);
6870 		}
6871 	      else
6872 		{
6873 		  dstv = uloc;
6874 		  srcv = NULL;
6875 		}
6876 
6877 	      if (GET_CODE (val) == CONCAT)
6878 		{
6879 		  dstv = vloc = XEXP (val, 1);
6880 		  val = XEXP (val, 0);
6881 		}
6882 
6883 	      if (GET_CODE (vloc) == SET)
6884 		{
6885 		  srcv = SET_SRC (vloc);
6886 
6887 		  gcc_assert (val != srcv);
6888 		  gcc_assert (vloc == uloc || VAL_NEEDS_RESOLUTION (loc));
6889 
6890 		  dstv = vloc = SET_DEST (vloc);
6891 
6892 		  if (VAL_NEEDS_RESOLUTION (loc))
6893 		    val_resolve (out, val, srcv, insn);
6894 		}
6895 	      else if (VAL_NEEDS_RESOLUTION (loc))
6896 		{
6897 		  gcc_assert (GET_CODE (uloc) == SET
6898 			      && GET_CODE (SET_SRC (uloc)) == REG);
6899 		  val_resolve (out, val, SET_SRC (uloc), insn);
6900 		}
6901 
6902 	      if (VAL_HOLDS_TRACK_EXPR (loc))
6903 		{
6904 		  if (VAL_EXPR_IS_CLOBBERED (loc))
6905 		    {
6906 		      if (REG_P (uloc))
6907 			var_reg_delete (out, uloc, true);
6908 		      else if (MEM_P (uloc))
6909 			{
6910 			  gcc_assert (MEM_P (dstv));
6911 			  gcc_assert (MEM_ATTRS (dstv) == MEM_ATTRS (uloc));
6912 			  var_mem_delete (out, dstv, true);
6913 			}
6914 		    }
6915 		  else
6916 		    {
6917 		      bool copied_p = VAL_EXPR_IS_COPIED (loc);
6918 		      rtx src = NULL, dst = uloc;
6919 		      enum var_init_status status = VAR_INIT_STATUS_INITIALIZED;
6920 
6921 		      if (GET_CODE (uloc) == SET)
6922 			{
6923 			  src = SET_SRC (uloc);
6924 			  dst = SET_DEST (uloc);
6925 			}
6926 
6927 		      if (copied_p)
6928 			{
6929 			  if (flag_var_tracking_uninit)
6930 			    {
6931 			      status = find_src_status (in, src);
6932 
6933 			      if (status == VAR_INIT_STATUS_UNKNOWN)
6934 				status = find_src_status (out, src);
6935 			    }
6936 
6937 			  src = find_src_set_src (in, src);
6938 			}
6939 
6940 		      if (REG_P (dst))
6941 			var_reg_delete_and_set (out, dst, !copied_p,
6942 						status, srcv);
6943 		      else if (MEM_P (dst))
6944 			{
6945 			  gcc_assert (MEM_P (dstv));
6946 			  gcc_assert (MEM_ATTRS (dstv) == MEM_ATTRS (dst));
6947 			  var_mem_delete_and_set (out, dstv, !copied_p,
6948 						  status, srcv);
6949 			}
6950 		    }
6951 		}
6952 	      else if (REG_P (uloc))
6953 		var_regno_delete (out, REGNO (uloc));
6954 	      else if (MEM_P (uloc))
6955 		{
6956 		  gcc_checking_assert (GET_CODE (vloc) == MEM);
6957 		  gcc_checking_assert (dstv == vloc);
6958 		  if (dstv != vloc)
6959 		    clobber_overlapping_mems (out, vloc);
6960 		}
6961 
6962 	      val_store (out, val, dstv, insn, true);
6963 	    }
6964 	    break;
6965 
6966 	  case MO_SET:
6967 	    {
6968 	      rtx loc = mo->u.loc;
6969 	      rtx set_src = NULL;
6970 
6971 	      if (GET_CODE (loc) == SET)
6972 		{
6973 		  set_src = SET_SRC (loc);
6974 		  loc = SET_DEST (loc);
6975 		}
6976 
6977 	      if (REG_P (loc))
6978 		var_reg_delete_and_set (out, loc, true, VAR_INIT_STATUS_INITIALIZED,
6979 					set_src);
6980 	      else if (MEM_P (loc))
6981 		var_mem_delete_and_set (out, loc, true, VAR_INIT_STATUS_INITIALIZED,
6982 					set_src);
6983 	    }
6984 	    break;
6985 
6986 	  case MO_COPY:
6987 	    {
6988 	      rtx loc = mo->u.loc;
6989 	      enum var_init_status src_status;
6990 	      rtx set_src = NULL;
6991 
6992 	      if (GET_CODE (loc) == SET)
6993 		{
6994 		  set_src = SET_SRC (loc);
6995 		  loc = SET_DEST (loc);
6996 		}
6997 
6998 	      if (! flag_var_tracking_uninit)
6999 		src_status = VAR_INIT_STATUS_INITIALIZED;
7000 	      else
7001 		{
7002 		  src_status = find_src_status (in, set_src);
7003 
7004 		  if (src_status == VAR_INIT_STATUS_UNKNOWN)
7005 		    src_status = find_src_status (out, set_src);
7006 		}
7007 
7008 	      set_src = find_src_set_src (in, set_src);
7009 
7010 	      if (REG_P (loc))
7011 		var_reg_delete_and_set (out, loc, false, src_status, set_src);
7012 	      else if (MEM_P (loc))
7013 		var_mem_delete_and_set (out, loc, false, src_status, set_src);
7014 	    }
7015 	    break;
7016 
7017 	  case MO_USE_NO_VAR:
7018 	    {
7019 	      rtx loc = mo->u.loc;
7020 
7021 	      if (REG_P (loc))
7022 		var_reg_delete (out, loc, false);
7023 	      else if (MEM_P (loc))
7024 		var_mem_delete (out, loc, false);
7025 	    }
7026 	    break;
7027 
7028 	  case MO_CLOBBER:
7029 	    {
7030 	      rtx loc = mo->u.loc;
7031 
7032 	      if (REG_P (loc))
7033 		var_reg_delete (out, loc, true);
7034 	      else if (MEM_P (loc))
7035 		var_mem_delete (out, loc, true);
7036 	    }
7037 	    break;
7038 
7039 	  case MO_ADJUST:
7040 	    out->stack_adjust += mo->u.adjust;
7041 	    break;
7042 	}
7043     }
7044 
7045   if (MAY_HAVE_DEBUG_BIND_INSNS)
7046     {
7047       delete local_get_addr_cache;
7048       local_get_addr_cache = NULL;
7049 
7050       dataflow_set_equiv_regs (out);
7051       shared_hash_htab (out->vars)
7052 	->traverse <dataflow_set *, canonicalize_values_mark> (out);
7053       shared_hash_htab (out->vars)
7054 	->traverse <dataflow_set *, canonicalize_values_star> (out);
7055       if (flag_checking)
7056 	shared_hash_htab (out->vars)
7057 	  ->traverse <dataflow_set *, canonicalize_loc_order_check> (out);
7058     }
7059   changed = dataflow_set_different (&old_out, out);
7060   dataflow_set_destroy (&old_out);
7061   return changed;
7062 }
7063 
7064 /* Find the locations of variables in the whole function.  */
7065 
7066 static bool
vt_find_locations(void)7067 vt_find_locations (void)
7068 {
7069   bb_heap_t *worklist = new bb_heap_t (LONG_MIN);
7070   bb_heap_t *pending = new bb_heap_t (LONG_MIN);
7071   sbitmap in_worklist, in_pending;
7072   basic_block bb;
7073   edge e;
7074   int *bb_order;
7075   int *rc_order;
7076   int i;
7077   int htabsz = 0;
7078   int htabmax = param_max_vartrack_size;
7079   bool success = true;
7080   unsigned int n_blocks_processed = 0;
7081 
7082   timevar_push (TV_VAR_TRACKING_DATAFLOW);
7083   /* Compute reverse completion order of depth first search of the CFG
7084      so that the data-flow runs faster.  */
7085   rc_order = XNEWVEC (int, n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS);
7086   bb_order = XNEWVEC (int, last_basic_block_for_fn (cfun));
7087   auto_bitmap exit_bbs;
7088   bitmap_set_bit (exit_bbs, EXIT_BLOCK);
7089   auto_vec<std::pair<int, int> > toplevel_scc_extents;
7090   int n = rev_post_order_and_mark_dfs_back_seme
7091     (cfun, single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun)), exit_bbs, true,
7092      rc_order, &toplevel_scc_extents);
7093   for (i = 0; i < n; i++)
7094     bb_order[rc_order[i]] = i;
7095 
7096   in_worklist = sbitmap_alloc (last_basic_block_for_fn (cfun));
7097   in_pending = sbitmap_alloc (last_basic_block_for_fn (cfun));
7098   bitmap_clear (in_worklist);
7099   bitmap_clear (in_pending);
7100 
7101   /* We're performing the dataflow iteration independently over the
7102      toplevel SCCs plus leading non-cyclic entry blocks and separately
7103      over the tail.  That ensures best memory locality and the least
7104      number of visited blocks.  */
7105   unsigned extent = 0;
7106   int curr_start = -1;
7107   int curr_end = -1;
7108   do
7109     {
7110       curr_start = curr_end + 1;
7111       if (toplevel_scc_extents.length () <= extent)
7112 	curr_end = n - 1;
7113       else
7114 	curr_end = toplevel_scc_extents[extent++].second;
7115 
7116       for (int i = curr_start; i <= curr_end; ++i)
7117 	{
7118 	  pending->insert (i, BASIC_BLOCK_FOR_FN (cfun, rc_order[i]));
7119 	  bitmap_set_bit (in_pending, rc_order[i]);
7120 	}
7121 
7122       while (success && !pending->empty ())
7123 	{
7124 	  std::swap (worklist, pending);
7125 	  std::swap (in_worklist, in_pending);
7126 
7127 	  while (!worklist->empty ())
7128 	    {
7129 	      bool changed;
7130 	      edge_iterator ei;
7131 	      int oldinsz, oldoutsz;
7132 
7133 	      bb = worklist->extract_min ();
7134 	      bitmap_clear_bit (in_worklist, bb->index);
7135 
7136 	      if (VTI (bb)->in.vars)
7137 		{
7138 		  htabsz -= (shared_hash_htab (VTI (bb)->in.vars)->size ()
7139 			     + shared_hash_htab (VTI (bb)->out.vars)->size ());
7140 		  oldinsz = shared_hash_htab (VTI (bb)->in.vars)->elements ();
7141 		  oldoutsz = shared_hash_htab (VTI (bb)->out.vars)->elements ();
7142 		}
7143 	      else
7144 		oldinsz = oldoutsz = 0;
7145 
7146 	      if (MAY_HAVE_DEBUG_BIND_INSNS)
7147 		{
7148 		  dataflow_set *in = &VTI (bb)->in, *first_out = NULL;
7149 		  bool first = true, adjust = false;
7150 
7151 		  /* Calculate the IN set as the intersection of
7152 		     predecessor OUT sets.  */
7153 
7154 		  dataflow_set_clear (in);
7155 		  dst_can_be_shared = true;
7156 
7157 		  FOR_EACH_EDGE (e, ei, bb->preds)
7158 		    if (!VTI (e->src)->flooded)
7159 		      gcc_assert (bb_order[bb->index]
7160 				  <= bb_order[e->src->index]);
7161 		    else if (first)
7162 		      {
7163 			dataflow_set_copy (in, &VTI (e->src)->out);
7164 			first_out = &VTI (e->src)->out;
7165 			first = false;
7166 		      }
7167 		    else
7168 		      {
7169 			dataflow_set_merge (in, &VTI (e->src)->out);
7170 			adjust = true;
7171 		      }
7172 
7173 		  if (adjust)
7174 		    {
7175 		      dataflow_post_merge_adjust (in, &VTI (bb)->permp);
7176 
7177 		      if (flag_checking)
7178 			/* Merge and merge_adjust should keep entries in
7179 			   canonical order.  */
7180 			shared_hash_htab (in->vars)
7181 			  ->traverse <dataflow_set *,
7182 				      canonicalize_loc_order_check> (in);
7183 
7184 		      if (dst_can_be_shared)
7185 			{
7186 			  shared_hash_destroy (in->vars);
7187 			  in->vars = shared_hash_copy (first_out->vars);
7188 			}
7189 		    }
7190 
7191 		  VTI (bb)->flooded = true;
7192 		}
7193 	      else
7194 		{
7195 		  /* Calculate the IN set as union of predecessor OUT sets.  */
7196 		  dataflow_set_clear (&VTI (bb)->in);
7197 		  FOR_EACH_EDGE (e, ei, bb->preds)
7198 		    dataflow_set_union (&VTI (bb)->in, &VTI (e->src)->out);
7199 		}
7200 
7201 	      changed = compute_bb_dataflow (bb);
7202 	      n_blocks_processed++;
7203 	      htabsz += (shared_hash_htab (VTI (bb)->in.vars)->size ()
7204 			 + shared_hash_htab (VTI (bb)->out.vars)->size ());
7205 
7206 	      if (htabmax && htabsz > htabmax)
7207 		{
7208 		  if (MAY_HAVE_DEBUG_BIND_INSNS)
7209 		    inform (DECL_SOURCE_LOCATION (cfun->decl),
7210 			    "variable tracking size limit exceeded with "
7211 			    "%<-fvar-tracking-assignments%>, retrying without");
7212 		  else
7213 		    inform (DECL_SOURCE_LOCATION (cfun->decl),
7214 			    "variable tracking size limit exceeded");
7215 		  success = false;
7216 		  break;
7217 		}
7218 
7219 	      if (changed)
7220 		{
7221 		  FOR_EACH_EDGE (e, ei, bb->succs)
7222 		    {
7223 		      if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
7224 			continue;
7225 
7226 		      /* Iterate to an earlier block in RPO in the next
7227 			 round, iterate to the same block immediately.  */
7228 		      if (bb_order[e->dest->index] < bb_order[bb->index])
7229 			{
7230 			  gcc_assert (bb_order[e->dest->index] >= curr_start);
7231 			  if (!bitmap_bit_p (in_pending, e->dest->index))
7232 			    {
7233 			      /* Send E->DEST to next round.  */
7234 			      bitmap_set_bit (in_pending, e->dest->index);
7235 			      pending->insert (bb_order[e->dest->index],
7236 					       e->dest);
7237 			    }
7238 			}
7239 		      else if (bb_order[e->dest->index] <= curr_end
7240 			       && !bitmap_bit_p (in_worklist, e->dest->index))
7241 			{
7242 			  /* Add E->DEST to current round or delay
7243 			     processing if it is in the next SCC.  */
7244 			  bitmap_set_bit (in_worklist, e->dest->index);
7245 			  worklist->insert (bb_order[e->dest->index],
7246 					    e->dest);
7247 			}
7248 		    }
7249 		}
7250 
7251 	      if (dump_file)
7252 		fprintf (dump_file,
7253 			 "BB %i: in %i (was %i), out %i (was %i), rem %i + %i, "
7254 			 "tsz %i\n", bb->index,
7255 			 (int)shared_hash_htab (VTI (bb)->in.vars)->size (),
7256 			 oldinsz,
7257 			 (int)shared_hash_htab (VTI (bb)->out.vars)->size (),
7258 			 oldoutsz,
7259 			 (int)worklist->nodes (), (int)pending->nodes (),
7260 			 htabsz);
7261 
7262 	      if (dump_file && (dump_flags & TDF_DETAILS))
7263 		{
7264 		  fprintf (dump_file, "BB %i IN:\n", bb->index);
7265 		  dump_dataflow_set (&VTI (bb)->in);
7266 		  fprintf (dump_file, "BB %i OUT:\n", bb->index);
7267 		  dump_dataflow_set (&VTI (bb)->out);
7268 		}
7269 	    }
7270 	}
7271     }
7272   while (curr_end != n - 1);
7273 
7274   statistics_counter_event (cfun, "compute_bb_dataflow times",
7275 			    n_blocks_processed);
7276 
7277   if (success && MAY_HAVE_DEBUG_BIND_INSNS)
7278     FOR_EACH_BB_FN (bb, cfun)
7279       gcc_assert (VTI (bb)->flooded);
7280 
7281   free (rc_order);
7282   free (bb_order);
7283   delete worklist;
7284   delete pending;
7285   sbitmap_free (in_worklist);
7286   sbitmap_free (in_pending);
7287 
7288   timevar_pop (TV_VAR_TRACKING_DATAFLOW);
7289   return success;
7290 }
7291 
7292 /* Print the content of the LIST to dump file.  */
7293 
7294 static void
dump_attrs_list(attrs * list)7295 dump_attrs_list (attrs *list)
7296 {
7297   for (; list; list = list->next)
7298     {
7299       if (dv_is_decl_p (list->dv))
7300 	print_mem_expr (dump_file, dv_as_decl (list->dv));
7301       else
7302 	print_rtl_single (dump_file, dv_as_value (list->dv));
7303       fprintf (dump_file, "+" HOST_WIDE_INT_PRINT_DEC, list->offset);
7304     }
7305   fprintf (dump_file, "\n");
7306 }
7307 
7308 /* Print the information about variable *SLOT to dump file.  */
7309 
7310 int
dump_var_tracking_slot(variable ** slot,void * data ATTRIBUTE_UNUSED)7311 dump_var_tracking_slot (variable **slot, void *data ATTRIBUTE_UNUSED)
7312 {
7313   variable *var = *slot;
7314 
7315   dump_var (var);
7316 
7317   /* Continue traversing the hash table.  */
7318   return 1;
7319 }
7320 
7321 /* Print the information about variable VAR to dump file.  */
7322 
7323 static void
dump_var(variable * var)7324 dump_var (variable *var)
7325 {
7326   int i;
7327   location_chain *node;
7328 
7329   if (dv_is_decl_p (var->dv))
7330     {
7331       const_tree decl = dv_as_decl (var->dv);
7332 
7333       if (DECL_NAME (decl))
7334 	{
7335 	  fprintf (dump_file, "  name: %s",
7336 		   IDENTIFIER_POINTER (DECL_NAME (decl)));
7337 	  if (dump_flags & TDF_UID)
7338 	    fprintf (dump_file, "D.%u", DECL_UID (decl));
7339 	}
7340       else if (TREE_CODE (decl) == DEBUG_EXPR_DECL)
7341 	fprintf (dump_file, "  name: D#%u", DEBUG_TEMP_UID (decl));
7342       else
7343 	fprintf (dump_file, "  name: D.%u", DECL_UID (decl));
7344       fprintf (dump_file, "\n");
7345     }
7346   else
7347     {
7348       fputc (' ', dump_file);
7349       print_rtl_single (dump_file, dv_as_value (var->dv));
7350     }
7351 
7352   for (i = 0; i < var->n_var_parts; i++)
7353     {
7354       fprintf (dump_file, "    offset %ld\n",
7355 	       (long)(var->onepart ? 0 : VAR_PART_OFFSET (var, i)));
7356       for (node = var->var_part[i].loc_chain; node; node = node->next)
7357 	{
7358 	  fprintf (dump_file, "      ");
7359 	  if (node->init == VAR_INIT_STATUS_UNINITIALIZED)
7360 	    fprintf (dump_file, "[uninit]");
7361 	  print_rtl_single (dump_file, node->loc);
7362 	}
7363     }
7364 }
7365 
7366 /* Print the information about variables from hash table VARS to dump file.  */
7367 
7368 static void
dump_vars(variable_table_type * vars)7369 dump_vars (variable_table_type *vars)
7370 {
7371   if (!vars->is_empty ())
7372     {
7373       fprintf (dump_file, "Variables:\n");
7374       vars->traverse <void *, dump_var_tracking_slot> (NULL);
7375     }
7376 }
7377 
7378 /* Print the dataflow set SET to dump file.  */
7379 
7380 static void
dump_dataflow_set(dataflow_set * set)7381 dump_dataflow_set (dataflow_set *set)
7382 {
7383   int i;
7384 
7385   fprintf (dump_file, "Stack adjustment: " HOST_WIDE_INT_PRINT_DEC "\n",
7386 	   set->stack_adjust);
7387   for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
7388     {
7389       if (set->regs[i])
7390 	{
7391 	  fprintf (dump_file, "Reg %d:", i);
7392 	  dump_attrs_list (set->regs[i]);
7393 	}
7394     }
7395   dump_vars (shared_hash_htab (set->vars));
7396   fprintf (dump_file, "\n");
7397 }
7398 
7399 /* Print the IN and OUT sets for each basic block to dump file.  */
7400 
7401 static void
dump_dataflow_sets(void)7402 dump_dataflow_sets (void)
7403 {
7404   basic_block bb;
7405 
7406   FOR_EACH_BB_FN (bb, cfun)
7407     {
7408       fprintf (dump_file, "\nBasic block %d:\n", bb->index);
7409       fprintf (dump_file, "IN:\n");
7410       dump_dataflow_set (&VTI (bb)->in);
7411       fprintf (dump_file, "OUT:\n");
7412       dump_dataflow_set (&VTI (bb)->out);
7413     }
7414 }
7415 
7416 /* Return the variable for DV in dropped_values, inserting one if
7417    requested with INSERT.  */
7418 
7419 static inline variable *
variable_from_dropped(decl_or_value dv,enum insert_option insert)7420 variable_from_dropped (decl_or_value dv, enum insert_option insert)
7421 {
7422   variable **slot;
7423   variable *empty_var;
7424   onepart_enum onepart;
7425 
7426   slot = dropped_values->find_slot_with_hash (dv, dv_htab_hash (dv), insert);
7427 
7428   if (!slot)
7429     return NULL;
7430 
7431   if (*slot)
7432     return *slot;
7433 
7434   gcc_checking_assert (insert == INSERT);
7435 
7436   onepart = dv_onepart_p (dv);
7437 
7438   gcc_checking_assert (onepart == ONEPART_VALUE || onepart == ONEPART_DEXPR);
7439 
7440   empty_var = onepart_pool_allocate (onepart);
7441   empty_var->dv = dv;
7442   empty_var->refcount = 1;
7443   empty_var->n_var_parts = 0;
7444   empty_var->onepart = onepart;
7445   empty_var->in_changed_variables = false;
7446   empty_var->var_part[0].loc_chain = NULL;
7447   empty_var->var_part[0].cur_loc = NULL;
7448   VAR_LOC_1PAUX (empty_var) = NULL;
7449   set_dv_changed (dv, true);
7450 
7451   *slot = empty_var;
7452 
7453   return empty_var;
7454 }
7455 
7456 /* Recover the one-part aux from dropped_values.  */
7457 
7458 static struct onepart_aux *
recover_dropped_1paux(variable * var)7459 recover_dropped_1paux (variable *var)
7460 {
7461   variable *dvar;
7462 
7463   gcc_checking_assert (var->onepart);
7464 
7465   if (VAR_LOC_1PAUX (var))
7466     return VAR_LOC_1PAUX (var);
7467 
7468   if (var->onepart == ONEPART_VDECL)
7469     return NULL;
7470 
7471   dvar = variable_from_dropped (var->dv, NO_INSERT);
7472 
7473   if (!dvar)
7474     return NULL;
7475 
7476   VAR_LOC_1PAUX (var) = VAR_LOC_1PAUX (dvar);
7477   VAR_LOC_1PAUX (dvar) = NULL;
7478 
7479   return VAR_LOC_1PAUX (var);
7480 }
7481 
7482 /* Add variable VAR to the hash table of changed variables and
7483    if it has no locations delete it from SET's hash table.  */
7484 
7485 static void
variable_was_changed(variable * var,dataflow_set * set)7486 variable_was_changed (variable *var, dataflow_set *set)
7487 {
7488   hashval_t hash = dv_htab_hash (var->dv);
7489 
7490   if (emit_notes)
7491     {
7492       variable **slot;
7493 
7494       /* Remember this decl or VALUE has been added to changed_variables.  */
7495       set_dv_changed (var->dv, true);
7496 
7497       slot = changed_variables->find_slot_with_hash (var->dv, hash, INSERT);
7498 
7499       if (*slot)
7500 	{
7501 	  variable *old_var = *slot;
7502 	  gcc_assert (old_var->in_changed_variables);
7503 	  old_var->in_changed_variables = false;
7504 	  if (var != old_var && var->onepart)
7505 	    {
7506 	      /* Restore the auxiliary info from an empty variable
7507 		 previously created for changed_variables, so it is
7508 		 not lost.  */
7509 	      gcc_checking_assert (!VAR_LOC_1PAUX (var));
7510 	      VAR_LOC_1PAUX (var) = VAR_LOC_1PAUX (old_var);
7511 	      VAR_LOC_1PAUX (old_var) = NULL;
7512 	    }
7513 	  variable_htab_free (*slot);
7514 	}
7515 
7516       if (set && var->n_var_parts == 0)
7517 	{
7518 	  onepart_enum onepart = var->onepart;
7519 	  variable *empty_var = NULL;
7520 	  variable **dslot = NULL;
7521 
7522 	  if (onepart == ONEPART_VALUE || onepart == ONEPART_DEXPR)
7523 	    {
7524 	      dslot = dropped_values->find_slot_with_hash (var->dv,
7525 							   dv_htab_hash (var->dv),
7526 							   INSERT);
7527 	      empty_var = *dslot;
7528 
7529 	      if (empty_var)
7530 		{
7531 		  gcc_checking_assert (!empty_var->in_changed_variables);
7532 		  if (!VAR_LOC_1PAUX (var))
7533 		    {
7534 		      VAR_LOC_1PAUX (var) = VAR_LOC_1PAUX (empty_var);
7535 		      VAR_LOC_1PAUX (empty_var) = NULL;
7536 		    }
7537 		  else
7538 		    gcc_checking_assert (!VAR_LOC_1PAUX (empty_var));
7539 		}
7540 	    }
7541 
7542 	  if (!empty_var)
7543 	    {
7544 	      empty_var = onepart_pool_allocate (onepart);
7545 	      empty_var->dv = var->dv;
7546 	      empty_var->refcount = 1;
7547 	      empty_var->n_var_parts = 0;
7548 	      empty_var->onepart = onepart;
7549 	      if (dslot)
7550 		{
7551 		  empty_var->refcount++;
7552 		  *dslot = empty_var;
7553 		}
7554 	    }
7555 	  else
7556 	    empty_var->refcount++;
7557 	  empty_var->in_changed_variables = true;
7558 	  *slot = empty_var;
7559 	  if (onepart)
7560 	    {
7561 	      empty_var->var_part[0].loc_chain = NULL;
7562 	      empty_var->var_part[0].cur_loc = NULL;
7563 	      VAR_LOC_1PAUX (empty_var) = VAR_LOC_1PAUX (var);
7564 	      VAR_LOC_1PAUX (var) = NULL;
7565 	    }
7566 	  goto drop_var;
7567 	}
7568       else
7569 	{
7570 	  if (var->onepart && !VAR_LOC_1PAUX (var))
7571 	    recover_dropped_1paux (var);
7572 	  var->refcount++;
7573 	  var->in_changed_variables = true;
7574 	  *slot = var;
7575 	}
7576     }
7577   else
7578     {
7579       gcc_assert (set);
7580       if (var->n_var_parts == 0)
7581 	{
7582 	  variable **slot;
7583 
7584 	drop_var:
7585 	  slot = shared_hash_find_slot_noinsert (set->vars, var->dv);
7586 	  if (slot)
7587 	    {
7588 	      if (shared_hash_shared (set->vars))
7589 		slot = shared_hash_find_slot_unshare (&set->vars, var->dv,
7590 						      NO_INSERT);
7591 	      shared_hash_htab (set->vars)->clear_slot (slot);
7592 	    }
7593 	}
7594     }
7595 }
7596 
7597 /* Look for the index in VAR->var_part corresponding to OFFSET.
7598    Return -1 if not found.  If INSERTION_POINT is non-NULL, the
7599    referenced int will be set to the index that the part has or should
7600    have, if it should be inserted.  */
7601 
7602 static inline int
find_variable_location_part(variable * var,HOST_WIDE_INT offset,int * insertion_point)7603 find_variable_location_part (variable *var, HOST_WIDE_INT offset,
7604 			     int *insertion_point)
7605 {
7606   int pos, low, high;
7607 
7608   if (var->onepart)
7609     {
7610       if (offset != 0)
7611 	return -1;
7612 
7613       if (insertion_point)
7614 	*insertion_point = 0;
7615 
7616       return var->n_var_parts - 1;
7617     }
7618 
7619   /* Find the location part.  */
7620   low = 0;
7621   high = var->n_var_parts;
7622   while (low != high)
7623     {
7624       pos = (low + high) / 2;
7625       if (VAR_PART_OFFSET (var, pos) < offset)
7626 	low = pos + 1;
7627       else
7628 	high = pos;
7629     }
7630   pos = low;
7631 
7632   if (insertion_point)
7633     *insertion_point = pos;
7634 
7635   if (pos < var->n_var_parts && VAR_PART_OFFSET (var, pos) == offset)
7636     return pos;
7637 
7638   return -1;
7639 }
7640 
7641 static variable **
set_slot_part(dataflow_set * set,rtx loc,variable ** slot,decl_or_value dv,HOST_WIDE_INT offset,enum var_init_status initialized,rtx set_src)7642 set_slot_part (dataflow_set *set, rtx loc, variable **slot,
7643 	       decl_or_value dv, HOST_WIDE_INT offset,
7644 	       enum var_init_status initialized, rtx set_src)
7645 {
7646   int pos;
7647   location_chain *node, *next;
7648   location_chain **nextp;
7649   variable *var;
7650   onepart_enum onepart;
7651 
7652   var = *slot;
7653 
7654   if (var)
7655     onepart = var->onepart;
7656   else
7657     onepart = dv_onepart_p (dv);
7658 
7659   gcc_checking_assert (offset == 0 || !onepart);
7660   gcc_checking_assert (loc != dv_as_opaque (dv));
7661 
7662   if (! flag_var_tracking_uninit)
7663     initialized = VAR_INIT_STATUS_INITIALIZED;
7664 
7665   if (!var)
7666     {
7667       /* Create new variable information.  */
7668       var = onepart_pool_allocate (onepart);
7669       var->dv = dv;
7670       var->refcount = 1;
7671       var->n_var_parts = 1;
7672       var->onepart = onepart;
7673       var->in_changed_variables = false;
7674       if (var->onepart)
7675 	VAR_LOC_1PAUX (var) = NULL;
7676       else
7677 	VAR_PART_OFFSET (var, 0) = offset;
7678       var->var_part[0].loc_chain = NULL;
7679       var->var_part[0].cur_loc = NULL;
7680       *slot = var;
7681       pos = 0;
7682       nextp = &var->var_part[0].loc_chain;
7683     }
7684   else if (onepart)
7685     {
7686       int r = -1, c = 0;
7687 
7688       gcc_assert (dv_as_opaque (var->dv) == dv_as_opaque (dv));
7689 
7690       pos = 0;
7691 
7692       if (GET_CODE (loc) == VALUE)
7693 	{
7694 	  for (nextp = &var->var_part[0].loc_chain; (node = *nextp);
7695 	       nextp = &node->next)
7696 	    if (GET_CODE (node->loc) == VALUE)
7697 	      {
7698 		if (node->loc == loc)
7699 		  {
7700 		    r = 0;
7701 		    break;
7702 		  }
7703 		if (canon_value_cmp (node->loc, loc))
7704 		  c++;
7705 		else
7706 		  {
7707 		    r = 1;
7708 		    break;
7709 		  }
7710 	      }
7711 	    else if (REG_P (node->loc) || MEM_P (node->loc))
7712 	      c++;
7713 	    else
7714 	      {
7715 		r = 1;
7716 		break;
7717 	      }
7718 	}
7719       else if (REG_P (loc))
7720 	{
7721 	  for (nextp = &var->var_part[0].loc_chain; (node = *nextp);
7722 	       nextp = &node->next)
7723 	    if (REG_P (node->loc))
7724 	      {
7725 		if (REGNO (node->loc) < REGNO (loc))
7726 		  c++;
7727 		else
7728 		  {
7729 		    if (REGNO (node->loc) == REGNO (loc))
7730 		      r = 0;
7731 		    else
7732 		      r = 1;
7733 		    break;
7734 		  }
7735 	      }
7736 	    else
7737 	      {
7738 		r = 1;
7739 		break;
7740 	      }
7741 	}
7742       else if (MEM_P (loc))
7743 	{
7744 	  for (nextp = &var->var_part[0].loc_chain; (node = *nextp);
7745 	       nextp = &node->next)
7746 	    if (REG_P (node->loc))
7747 	      c++;
7748 	    else if (MEM_P (node->loc))
7749 	      {
7750 		if ((r = loc_cmp (XEXP (node->loc, 0), XEXP (loc, 0))) >= 0)
7751 		  break;
7752 		else
7753 		  c++;
7754 	      }
7755 	    else
7756 	      {
7757 		r = 1;
7758 		break;
7759 	      }
7760 	}
7761       else
7762 	for (nextp = &var->var_part[0].loc_chain; (node = *nextp);
7763 	     nextp = &node->next)
7764 	  if ((r = loc_cmp (node->loc, loc)) >= 0)
7765 	    break;
7766 	  else
7767 	    c++;
7768 
7769       if (r == 0)
7770 	return slot;
7771 
7772       if (shared_var_p (var, set->vars))
7773 	{
7774 	  slot = unshare_variable (set, slot, var, initialized);
7775 	  var = *slot;
7776 	  for (nextp = &var->var_part[0].loc_chain; c;
7777 	       nextp = &(*nextp)->next)
7778 	    c--;
7779 	  gcc_assert ((!node && !*nextp) || node->loc == (*nextp)->loc);
7780 	}
7781     }
7782   else
7783     {
7784       int inspos = 0;
7785 
7786       gcc_assert (dv_as_decl (var->dv) == dv_as_decl (dv));
7787 
7788       pos = find_variable_location_part (var, offset, &inspos);
7789 
7790       if (pos >= 0)
7791 	{
7792 	  node = var->var_part[pos].loc_chain;
7793 
7794 	  if (node
7795 	      && ((REG_P (node->loc) && REG_P (loc)
7796 		   && REGNO (node->loc) == REGNO (loc))
7797 		  || rtx_equal_p (node->loc, loc)))
7798 	    {
7799 	      /* LOC is in the beginning of the chain so we have nothing
7800 		 to do.  */
7801 	      if (node->init < initialized)
7802 		node->init = initialized;
7803 	      if (set_src != NULL)
7804 		node->set_src = set_src;
7805 
7806 	      return slot;
7807 	    }
7808 	  else
7809 	    {
7810 	      /* We have to make a copy of a shared variable.  */
7811 	      if (shared_var_p (var, set->vars))
7812 		{
7813 		  slot = unshare_variable (set, slot, var, initialized);
7814 		  var = *slot;
7815 		}
7816 	    }
7817 	}
7818       else
7819 	{
7820 	  /* We have not found the location part, new one will be created.  */
7821 
7822 	  /* We have to make a copy of the shared variable.  */
7823 	  if (shared_var_p (var, set->vars))
7824 	    {
7825 	      slot = unshare_variable (set, slot, var, initialized);
7826 	      var = *slot;
7827 	    }
7828 
7829 	  /* We track only variables whose size is <= MAX_VAR_PARTS bytes
7830 	     thus there are at most MAX_VAR_PARTS different offsets.  */
7831 	  gcc_assert (var->n_var_parts < MAX_VAR_PARTS
7832 		      && (!var->n_var_parts || !onepart));
7833 
7834 	  /* We have to move the elements of array starting at index
7835 	     inspos to the next position.  */
7836 	  for (pos = var->n_var_parts; pos > inspos; pos--)
7837 	    var->var_part[pos] = var->var_part[pos - 1];
7838 
7839 	  var->n_var_parts++;
7840 	  gcc_checking_assert (!onepart);
7841 	  VAR_PART_OFFSET (var, pos) = offset;
7842 	  var->var_part[pos].loc_chain = NULL;
7843 	  var->var_part[pos].cur_loc = NULL;
7844 	}
7845 
7846       /* Delete the location from the list.  */
7847       nextp = &var->var_part[pos].loc_chain;
7848       for (node = var->var_part[pos].loc_chain; node; node = next)
7849 	{
7850 	  next = node->next;
7851 	  if ((REG_P (node->loc) && REG_P (loc)
7852 	       && REGNO (node->loc) == REGNO (loc))
7853 	      || rtx_equal_p (node->loc, loc))
7854 	    {
7855 	      /* Save these values, to assign to the new node, before
7856 		 deleting this one.  */
7857 	      if (node->init > initialized)
7858 		initialized = node->init;
7859 	      if (node->set_src != NULL && set_src == NULL)
7860 		set_src = node->set_src;
7861 	      if (var->var_part[pos].cur_loc == node->loc)
7862 		var->var_part[pos].cur_loc = NULL;
7863 	      delete node;
7864 	      *nextp = next;
7865 	      break;
7866 	    }
7867 	  else
7868 	    nextp = &node->next;
7869 	}
7870 
7871       nextp = &var->var_part[pos].loc_chain;
7872     }
7873 
7874   /* Add the location to the beginning.  */
7875   node = new location_chain;
7876   node->loc = loc;
7877   node->init = initialized;
7878   node->set_src = set_src;
7879   node->next = *nextp;
7880   *nextp = node;
7881 
7882   /* If no location was emitted do so.  */
7883   if (var->var_part[pos].cur_loc == NULL)
7884     variable_was_changed (var, set);
7885 
7886   return slot;
7887 }
7888 
7889 /* Set the part of variable's location in the dataflow set SET.  The
7890    variable part is specified by variable's declaration in DV and
7891    offset OFFSET and the part's location by LOC.  IOPT should be
7892    NO_INSERT if the variable is known to be in SET already and the
7893    variable hash table must not be resized, and INSERT otherwise.  */
7894 
7895 static void
set_variable_part(dataflow_set * set,rtx loc,decl_or_value dv,HOST_WIDE_INT offset,enum var_init_status initialized,rtx set_src,enum insert_option iopt)7896 set_variable_part (dataflow_set *set, rtx loc,
7897 		   decl_or_value dv, HOST_WIDE_INT offset,
7898 		   enum var_init_status initialized, rtx set_src,
7899 		   enum insert_option iopt)
7900 {
7901   variable **slot;
7902 
7903   if (iopt == NO_INSERT)
7904     slot = shared_hash_find_slot_noinsert (set->vars, dv);
7905   else
7906     {
7907       slot = shared_hash_find_slot (set->vars, dv);
7908       if (!slot)
7909 	slot = shared_hash_find_slot_unshare (&set->vars, dv, iopt);
7910     }
7911   set_slot_part (set, loc, slot, dv, offset, initialized, set_src);
7912 }
7913 
7914 /* Remove all recorded register locations for the given variable part
7915    from dataflow set SET, except for those that are identical to loc.
7916    The variable part is specified by variable's declaration or value
7917    DV and offset OFFSET.  */
7918 
7919 static variable **
clobber_slot_part(dataflow_set * set,rtx loc,variable ** slot,HOST_WIDE_INT offset,rtx set_src)7920 clobber_slot_part (dataflow_set *set, rtx loc, variable **slot,
7921 		   HOST_WIDE_INT offset, rtx set_src)
7922 {
7923   variable *var = *slot;
7924   int pos = find_variable_location_part (var, offset, NULL);
7925 
7926   if (pos >= 0)
7927     {
7928       location_chain *node, *next;
7929 
7930       /* Remove the register locations from the dataflow set.  */
7931       next = var->var_part[pos].loc_chain;
7932       for (node = next; node; node = next)
7933 	{
7934 	  next = node->next;
7935 	  if (node->loc != loc
7936 	      && (!flag_var_tracking_uninit
7937 		  || !set_src
7938 		  || MEM_P (set_src)
7939 		  || !rtx_equal_p (set_src, node->set_src)))
7940 	    {
7941 	      if (REG_P (node->loc))
7942 		{
7943 		  attrs *anode, *anext;
7944 		  attrs **anextp;
7945 
7946 		  /* Remove the variable part from the register's
7947 		     list, but preserve any other variable parts
7948 		     that might be regarded as live in that same
7949 		     register.  */
7950 		  anextp = &set->regs[REGNO (node->loc)];
7951 		  for (anode = *anextp; anode; anode = anext)
7952 		    {
7953 		      anext = anode->next;
7954 		      if (dv_as_opaque (anode->dv) == dv_as_opaque (var->dv)
7955 			  && anode->offset == offset)
7956 			{
7957 			  delete anode;
7958 			  *anextp = anext;
7959 			}
7960 		      else
7961 			anextp = &anode->next;
7962 		    }
7963 		}
7964 
7965 	      slot = delete_slot_part (set, node->loc, slot, offset);
7966 	    }
7967 	}
7968     }
7969 
7970   return slot;
7971 }
7972 
7973 /* Remove all recorded register locations for the given variable part
7974    from dataflow set SET, except for those that are identical to loc.
7975    The variable part is specified by variable's declaration or value
7976    DV and offset OFFSET.  */
7977 
7978 static void
clobber_variable_part(dataflow_set * set,rtx loc,decl_or_value dv,HOST_WIDE_INT offset,rtx set_src)7979 clobber_variable_part (dataflow_set *set, rtx loc, decl_or_value dv,
7980 		       HOST_WIDE_INT offset, rtx set_src)
7981 {
7982   variable **slot;
7983 
7984   if (!dv_as_opaque (dv)
7985       || (!dv_is_value_p (dv) && ! DECL_P (dv_as_decl (dv))))
7986     return;
7987 
7988   slot = shared_hash_find_slot_noinsert (set->vars, dv);
7989   if (!slot)
7990     return;
7991 
7992   clobber_slot_part (set, loc, slot, offset, set_src);
7993 }
7994 
7995 /* Delete the part of variable's location from dataflow set SET.  The
7996    variable part is specified by its SET->vars slot SLOT and offset
7997    OFFSET and the part's location by LOC.  */
7998 
7999 static variable **
delete_slot_part(dataflow_set * set,rtx loc,variable ** slot,HOST_WIDE_INT offset)8000 delete_slot_part (dataflow_set *set, rtx loc, variable **slot,
8001 		  HOST_WIDE_INT offset)
8002 {
8003   variable *var = *slot;
8004   int pos = find_variable_location_part (var, offset, NULL);
8005 
8006   if (pos >= 0)
8007     {
8008       location_chain *node, *next;
8009       location_chain **nextp;
8010       bool changed;
8011       rtx cur_loc;
8012 
8013       if (shared_var_p (var, set->vars))
8014 	{
8015 	  /* If the variable contains the location part we have to
8016 	     make a copy of the variable.  */
8017 	  for (node = var->var_part[pos].loc_chain; node;
8018 	       node = node->next)
8019 	    {
8020 	      if ((REG_P (node->loc) && REG_P (loc)
8021 		   && REGNO (node->loc) == REGNO (loc))
8022 		  || rtx_equal_p (node->loc, loc))
8023 		{
8024 		  slot = unshare_variable (set, slot, var,
8025 					   VAR_INIT_STATUS_UNKNOWN);
8026 		  var = *slot;
8027 		  break;
8028 		}
8029 	    }
8030 	}
8031 
8032       if (pos == 0 && var->onepart && VAR_LOC_1PAUX (var))
8033 	cur_loc = VAR_LOC_FROM (var);
8034       else
8035 	cur_loc = var->var_part[pos].cur_loc;
8036 
8037       /* Delete the location part.  */
8038       changed = false;
8039       nextp = &var->var_part[pos].loc_chain;
8040       for (node = *nextp; node; node = next)
8041 	{
8042 	  next = node->next;
8043 	  if ((REG_P (node->loc) && REG_P (loc)
8044 	       && REGNO (node->loc) == REGNO (loc))
8045 	      || rtx_equal_p (node->loc, loc))
8046 	    {
8047 	      /* If we have deleted the location which was last emitted
8048 		 we have to emit new location so add the variable to set
8049 		 of changed variables.  */
8050 	      if (cur_loc == node->loc)
8051 		{
8052 		  changed = true;
8053 		  var->var_part[pos].cur_loc = NULL;
8054 		  if (pos == 0 && var->onepart && VAR_LOC_1PAUX (var))
8055 		    VAR_LOC_FROM (var) = NULL;
8056 		}
8057 	      delete node;
8058 	      *nextp = next;
8059 	      break;
8060 	    }
8061 	  else
8062 	    nextp = &node->next;
8063 	}
8064 
8065       if (var->var_part[pos].loc_chain == NULL)
8066 	{
8067 	  changed = true;
8068 	  var->n_var_parts--;
8069 	  while (pos < var->n_var_parts)
8070 	    {
8071 	      var->var_part[pos] = var->var_part[pos + 1];
8072 	      pos++;
8073 	    }
8074 	}
8075       if (changed)
8076 	variable_was_changed (var, set);
8077     }
8078 
8079   return slot;
8080 }
8081 
8082 /* Delete the part of variable's location from dataflow set SET.  The
8083    variable part is specified by variable's declaration or value DV
8084    and offset OFFSET and the part's location by LOC.  */
8085 
8086 static void
delete_variable_part(dataflow_set * set,rtx loc,decl_or_value dv,HOST_WIDE_INT offset)8087 delete_variable_part (dataflow_set *set, rtx loc, decl_or_value dv,
8088 		      HOST_WIDE_INT offset)
8089 {
8090   variable **slot = shared_hash_find_slot_noinsert (set->vars, dv);
8091   if (!slot)
8092     return;
8093 
8094   delete_slot_part (set, loc, slot, offset);
8095 }
8096 
8097 
8098 /* Structure for passing some other parameters to function
8099    vt_expand_loc_callback.  */
8100 class expand_loc_callback_data
8101 {
8102 public:
8103   /* The variables and values active at this point.  */
8104   variable_table_type *vars;
8105 
8106   /* Stack of values and debug_exprs under expansion, and their
8107      children.  */
8108   auto_vec<rtx, 4> expanding;
8109 
8110   /* Stack of values and debug_exprs whose expansion hit recursion
8111      cycles.  They will have VALUE_RECURSED_INTO marked when added to
8112      this list.  This flag will be cleared if any of its dependencies
8113      resolves to a valid location.  So, if the flag remains set at the
8114      end of the search, we know no valid location for this one can
8115      possibly exist.  */
8116   auto_vec<rtx, 4> pending;
8117 
8118   /* The maximum depth among the sub-expressions under expansion.
8119      Zero indicates no expansion so far.  */
8120   expand_depth depth;
8121 };
8122 
8123 /* Allocate the one-part auxiliary data structure for VAR, with enough
8124    room for COUNT dependencies.  */
8125 
8126 static void
loc_exp_dep_alloc(variable * var,int count)8127 loc_exp_dep_alloc (variable *var, int count)
8128 {
8129   size_t allocsize;
8130 
8131   gcc_checking_assert (var->onepart);
8132 
8133   /* We can be called with COUNT == 0 to allocate the data structure
8134      without any dependencies, e.g. for the backlinks only.  However,
8135      if we are specifying a COUNT, then the dependency list must have
8136      been emptied before.  It would be possible to adjust pointers or
8137      force it empty here, but this is better done at an earlier point
8138      in the algorithm, so we instead leave an assertion to catch
8139      errors.  */
8140   gcc_checking_assert (!count
8141 		       || VAR_LOC_DEP_VEC (var) == NULL
8142 		       || VAR_LOC_DEP_VEC (var)->is_empty ());
8143 
8144   if (VAR_LOC_1PAUX (var) && VAR_LOC_DEP_VEC (var)->space (count))
8145     return;
8146 
8147   allocsize = offsetof (struct onepart_aux, deps)
8148 	      + deps_vec::embedded_size (count);
8149 
8150   if (VAR_LOC_1PAUX (var))
8151     {
8152       VAR_LOC_1PAUX (var) = XRESIZEVAR (struct onepart_aux,
8153 					VAR_LOC_1PAUX (var), allocsize);
8154       /* If the reallocation moves the onepaux structure, the
8155 	 back-pointer to BACKLINKS in the first list member will still
8156 	 point to its old location.  Adjust it.  */
8157       if (VAR_LOC_DEP_LST (var))
8158 	VAR_LOC_DEP_LST (var)->pprev = VAR_LOC_DEP_LSTP (var);
8159     }
8160   else
8161     {
8162       VAR_LOC_1PAUX (var) = XNEWVAR (struct onepart_aux, allocsize);
8163       *VAR_LOC_DEP_LSTP (var) = NULL;
8164       VAR_LOC_FROM (var) = NULL;
8165       VAR_LOC_DEPTH (var).complexity = 0;
8166       VAR_LOC_DEPTH (var).entryvals = 0;
8167     }
8168   VAR_LOC_DEP_VEC (var)->embedded_init (count);
8169 }
8170 
8171 /* Remove all entries from the vector of active dependencies of VAR,
8172    removing them from the back-links lists too.  */
8173 
8174 static void
loc_exp_dep_clear(variable * var)8175 loc_exp_dep_clear (variable *var)
8176 {
8177   while (VAR_LOC_DEP_VEC (var) && !VAR_LOC_DEP_VEC (var)->is_empty ())
8178     {
8179       loc_exp_dep *led = &VAR_LOC_DEP_VEC (var)->last ();
8180       if (led->next)
8181 	led->next->pprev = led->pprev;
8182       if (led->pprev)
8183 	*led->pprev = led->next;
8184       VAR_LOC_DEP_VEC (var)->pop ();
8185     }
8186 }
8187 
8188 /* Insert an active dependency from VAR on X to the vector of
8189    dependencies, and add the corresponding back-link to X's list of
8190    back-links in VARS.  */
8191 
8192 static void
loc_exp_insert_dep(variable * var,rtx x,variable_table_type * vars)8193 loc_exp_insert_dep (variable *var, rtx x, variable_table_type *vars)
8194 {
8195   decl_or_value dv;
8196   variable *xvar;
8197   loc_exp_dep *led;
8198 
8199   dv = dv_from_rtx (x);
8200 
8201   /* ??? Build a vector of variables parallel to EXPANDING, to avoid
8202      an additional look up?  */
8203   xvar = vars->find_with_hash (dv, dv_htab_hash (dv));
8204 
8205   if (!xvar)
8206     {
8207       xvar = variable_from_dropped (dv, NO_INSERT);
8208       gcc_checking_assert (xvar);
8209     }
8210 
8211   /* No point in adding the same backlink more than once.  This may
8212      arise if say the same value appears in two complex expressions in
8213      the same loc_list, or even more than once in a single
8214      expression.  */
8215   if (VAR_LOC_DEP_LST (xvar) && VAR_LOC_DEP_LST (xvar)->dv == var->dv)
8216     return;
8217 
8218   if (var->onepart == NOT_ONEPART)
8219     led = new loc_exp_dep;
8220   else
8221     {
8222       loc_exp_dep empty;
8223       memset (&empty, 0, sizeof (empty));
8224       VAR_LOC_DEP_VEC (var)->quick_push (empty);
8225       led = &VAR_LOC_DEP_VEC (var)->last ();
8226     }
8227   led->dv = var->dv;
8228   led->value = x;
8229 
8230   loc_exp_dep_alloc (xvar, 0);
8231   led->pprev = VAR_LOC_DEP_LSTP (xvar);
8232   led->next = *led->pprev;
8233   if (led->next)
8234     led->next->pprev = &led->next;
8235   *led->pprev = led;
8236 }
8237 
8238 /* Create active dependencies of VAR on COUNT values starting at
8239    VALUE, and corresponding back-links to the entries in VARS.  Return
8240    true if we found any pending-recursion results.  */
8241 
8242 static bool
loc_exp_dep_set(variable * var,rtx result,rtx * value,int count,variable_table_type * vars)8243 loc_exp_dep_set (variable *var, rtx result, rtx *value, int count,
8244 		 variable_table_type *vars)
8245 {
8246   bool pending_recursion = false;
8247 
8248   gcc_checking_assert (VAR_LOC_DEP_VEC (var) == NULL
8249 		       || VAR_LOC_DEP_VEC (var)->is_empty ());
8250 
8251   /* Set up all dependencies from last_child (as set up at the end of
8252      the loop above) to the end.  */
8253   loc_exp_dep_alloc (var, count);
8254 
8255   while (count--)
8256     {
8257       rtx x = *value++;
8258 
8259       if (!pending_recursion)
8260 	pending_recursion = !result && VALUE_RECURSED_INTO (x);
8261 
8262       loc_exp_insert_dep (var, x, vars);
8263     }
8264 
8265   return pending_recursion;
8266 }
8267 
8268 /* Notify the back-links of IVAR that are pending recursion that we
8269    have found a non-NIL value for it, so they are cleared for another
8270    attempt to compute a current location.  */
8271 
8272 static void
notify_dependents_of_resolved_value(variable * ivar,variable_table_type * vars)8273 notify_dependents_of_resolved_value (variable *ivar, variable_table_type *vars)
8274 {
8275   loc_exp_dep *led, *next;
8276 
8277   for (led = VAR_LOC_DEP_LST (ivar); led; led = next)
8278     {
8279       decl_or_value dv = led->dv;
8280       variable *var;
8281 
8282       next = led->next;
8283 
8284       if (dv_is_value_p (dv))
8285 	{
8286 	  rtx value = dv_as_value (dv);
8287 
8288 	  /* If we have already resolved it, leave it alone.  */
8289 	  if (!VALUE_RECURSED_INTO (value))
8290 	    continue;
8291 
8292 	  /* Check that VALUE_RECURSED_INTO, true from the test above,
8293 	     implies NO_LOC_P.  */
8294 	  gcc_checking_assert (NO_LOC_P (value));
8295 
8296 	  /* We won't notify variables that are being expanded,
8297 	     because their dependency list is cleared before
8298 	     recursing.  */
8299 	  NO_LOC_P (value) = false;
8300 	  VALUE_RECURSED_INTO (value) = false;
8301 
8302 	  gcc_checking_assert (dv_changed_p (dv));
8303 	}
8304       else
8305 	{
8306 	  gcc_checking_assert (dv_onepart_p (dv) != NOT_ONEPART);
8307 	  if (!dv_changed_p (dv))
8308 	    continue;
8309       }
8310 
8311       var = vars->find_with_hash (dv, dv_htab_hash (dv));
8312 
8313       if (!var)
8314 	var = variable_from_dropped (dv, NO_INSERT);
8315 
8316       if (var)
8317 	notify_dependents_of_resolved_value (var, vars);
8318 
8319       if (next)
8320 	next->pprev = led->pprev;
8321       if (led->pprev)
8322 	*led->pprev = next;
8323       led->next = NULL;
8324       led->pprev = NULL;
8325     }
8326 }
8327 
8328 static rtx vt_expand_loc_callback (rtx x, bitmap regs,
8329 				   int max_depth, void *data);
8330 
8331 /* Return the combined depth, when one sub-expression evaluated to
8332    BEST_DEPTH and the previous known depth was SAVED_DEPTH.  */
8333 
8334 static inline expand_depth
update_depth(expand_depth saved_depth,expand_depth best_depth)8335 update_depth (expand_depth saved_depth, expand_depth best_depth)
8336 {
8337   /* If we didn't find anything, stick with what we had.  */
8338   if (!best_depth.complexity)
8339     return saved_depth;
8340 
8341   /* If we found hadn't found anything, use the depth of the current
8342      expression.  Do NOT add one extra level, we want to compute the
8343      maximum depth among sub-expressions.  We'll increment it later,
8344      if appropriate.  */
8345   if (!saved_depth.complexity)
8346     return best_depth;
8347 
8348   /* Combine the entryval count so that regardless of which one we
8349      return, the entryval count is accurate.  */
8350   best_depth.entryvals = saved_depth.entryvals
8351     = best_depth.entryvals + saved_depth.entryvals;
8352 
8353   if (saved_depth.complexity < best_depth.complexity)
8354     return best_depth;
8355   else
8356     return saved_depth;
8357 }
8358 
8359 /* Expand VAR to a location RTX, updating its cur_loc.  Use REGS and
8360    DATA for cselib expand callback.  If PENDRECP is given, indicate in
8361    it whether any sub-expression couldn't be fully evaluated because
8362    it is pending recursion resolution.  */
8363 
8364 static inline rtx
vt_expand_var_loc_chain(variable * var,bitmap regs,void * data,bool * pendrecp)8365 vt_expand_var_loc_chain (variable *var, bitmap regs, void *data,
8366 			 bool *pendrecp)
8367 {
8368   class expand_loc_callback_data *elcd
8369     = (class expand_loc_callback_data *) data;
8370   location_chain *loc, *next;
8371   rtx result = NULL;
8372   int first_child, result_first_child, last_child;
8373   bool pending_recursion;
8374   rtx loc_from = NULL;
8375   struct elt_loc_list *cloc = NULL;
8376   expand_depth depth = { 0, 0 }, saved_depth = elcd->depth;
8377   int wanted_entryvals, found_entryvals = 0;
8378 
8379   /* Clear all backlinks pointing at this, so that we're not notified
8380      while we're active.  */
8381   loc_exp_dep_clear (var);
8382 
8383  retry:
8384   if (var->onepart == ONEPART_VALUE)
8385     {
8386       cselib_val *val = CSELIB_VAL_PTR (dv_as_value (var->dv));
8387 
8388       gcc_checking_assert (cselib_preserved_value_p (val));
8389 
8390       cloc = val->locs;
8391     }
8392 
8393   first_child = result_first_child = last_child
8394     = elcd->expanding.length ();
8395 
8396   wanted_entryvals = found_entryvals;
8397 
8398   /* Attempt to expand each available location in turn.  */
8399   for (next = loc = var->n_var_parts ? var->var_part[0].loc_chain : NULL;
8400        loc || cloc; loc = next)
8401     {
8402       result_first_child = last_child;
8403 
8404       if (!loc)
8405 	{
8406 	  loc_from = cloc->loc;
8407 	  next = loc;
8408 	  cloc = cloc->next;
8409 	  if (unsuitable_loc (loc_from))
8410 	    continue;
8411 	}
8412       else
8413 	{
8414 	  loc_from = loc->loc;
8415 	  next = loc->next;
8416 	}
8417 
8418       gcc_checking_assert (!unsuitable_loc (loc_from));
8419 
8420       elcd->depth.complexity = elcd->depth.entryvals = 0;
8421       result = cselib_expand_value_rtx_cb (loc_from, regs, EXPR_DEPTH,
8422 					   vt_expand_loc_callback, data);
8423       last_child = elcd->expanding.length ();
8424 
8425       if (result)
8426 	{
8427 	  depth = elcd->depth;
8428 
8429 	  gcc_checking_assert (depth.complexity
8430 			       || result_first_child == last_child);
8431 
8432 	  if (last_child - result_first_child != 1)
8433 	    {
8434 	      if (!depth.complexity && GET_CODE (result) == ENTRY_VALUE)
8435 		depth.entryvals++;
8436 	      depth.complexity++;
8437 	    }
8438 
8439 	  if (depth.complexity <= EXPR_USE_DEPTH)
8440 	    {
8441 	      if (depth.entryvals <= wanted_entryvals)
8442 		break;
8443 	      else if (!found_entryvals || depth.entryvals < found_entryvals)
8444 		found_entryvals = depth.entryvals;
8445 	    }
8446 
8447 	  result = NULL;
8448 	}
8449 
8450       /* Set it up in case we leave the loop.  */
8451       depth.complexity = depth.entryvals = 0;
8452       loc_from = NULL;
8453       result_first_child = first_child;
8454     }
8455 
8456   if (!loc_from && wanted_entryvals < found_entryvals)
8457     {
8458       /* We found entries with ENTRY_VALUEs and skipped them.  Since
8459 	 we could not find any expansions without ENTRY_VALUEs, but we
8460 	 found at least one with them, go back and get an entry with
8461 	 the minimum number ENTRY_VALUE count that we found.  We could
8462 	 avoid looping, but since each sub-loc is already resolved,
8463 	 the re-expansion should be trivial.  ??? Should we record all
8464 	 attempted locs as dependencies, so that we retry the
8465 	 expansion should any of them change, in the hope it can give
8466 	 us a new entry without an ENTRY_VALUE?  */
8467       elcd->expanding.truncate (first_child);
8468       goto retry;
8469     }
8470 
8471   /* Register all encountered dependencies as active.  */
8472   pending_recursion = loc_exp_dep_set
8473     (var, result, elcd->expanding.address () + result_first_child,
8474      last_child - result_first_child, elcd->vars);
8475 
8476   elcd->expanding.truncate (first_child);
8477 
8478   /* Record where the expansion came from.  */
8479   gcc_checking_assert (!result || !pending_recursion);
8480   VAR_LOC_FROM (var) = loc_from;
8481   VAR_LOC_DEPTH (var) = depth;
8482 
8483   gcc_checking_assert (!depth.complexity == !result);
8484 
8485   elcd->depth = update_depth (saved_depth, depth);
8486 
8487   /* Indicate whether any of the dependencies are pending recursion
8488      resolution.  */
8489   if (pendrecp)
8490     *pendrecp = pending_recursion;
8491 
8492   if (!pendrecp || !pending_recursion)
8493     var->var_part[0].cur_loc = result;
8494 
8495   return result;
8496 }
8497 
8498 /* Callback for cselib_expand_value, that looks for expressions
8499    holding the value in the var-tracking hash tables.  Return X for
8500    standard processing, anything else is to be used as-is.  */
8501 
8502 static rtx
vt_expand_loc_callback(rtx x,bitmap regs,int max_depth ATTRIBUTE_UNUSED,void * data)8503 vt_expand_loc_callback (rtx x, bitmap regs,
8504 			int max_depth ATTRIBUTE_UNUSED,
8505 			void *data)
8506 {
8507   class expand_loc_callback_data *elcd
8508     = (class expand_loc_callback_data *) data;
8509   decl_or_value dv;
8510   variable *var;
8511   rtx result, subreg;
8512   bool pending_recursion = false;
8513   bool from_empty = false;
8514 
8515   switch (GET_CODE (x))
8516     {
8517     case SUBREG:
8518       subreg = cselib_expand_value_rtx_cb (SUBREG_REG (x), regs,
8519 					   EXPR_DEPTH,
8520 					   vt_expand_loc_callback, data);
8521 
8522       if (!subreg)
8523 	return NULL;
8524 
8525       result = simplify_gen_subreg (GET_MODE (x), subreg,
8526 				    GET_MODE (SUBREG_REG (x)),
8527 				    SUBREG_BYTE (x));
8528 
8529       /* Invalid SUBREGs are ok in debug info.  ??? We could try
8530 	 alternate expansions for the VALUE as well.  */
8531       if (!result && GET_MODE (subreg) != VOIDmode)
8532 	result = gen_rtx_raw_SUBREG (GET_MODE (x), subreg, SUBREG_BYTE (x));
8533 
8534       return result;
8535 
8536     case DEBUG_EXPR:
8537     case VALUE:
8538       dv = dv_from_rtx (x);
8539       break;
8540 
8541     default:
8542       return x;
8543     }
8544 
8545   elcd->expanding.safe_push (x);
8546 
8547   /* Check that VALUE_RECURSED_INTO implies NO_LOC_P.  */
8548   gcc_checking_assert (!VALUE_RECURSED_INTO (x) || NO_LOC_P (x));
8549 
8550   if (NO_LOC_P (x))
8551     {
8552       gcc_checking_assert (VALUE_RECURSED_INTO (x) || !dv_changed_p (dv));
8553       return NULL;
8554     }
8555 
8556   var = elcd->vars->find_with_hash (dv, dv_htab_hash (dv));
8557 
8558   if (!var)
8559     {
8560       from_empty = true;
8561       var = variable_from_dropped (dv, INSERT);
8562     }
8563 
8564   gcc_checking_assert (var);
8565 
8566   if (!dv_changed_p (dv))
8567     {
8568       gcc_checking_assert (!NO_LOC_P (x));
8569       gcc_checking_assert (var->var_part[0].cur_loc);
8570       gcc_checking_assert (VAR_LOC_1PAUX (var));
8571       gcc_checking_assert (VAR_LOC_1PAUX (var)->depth.complexity);
8572 
8573       elcd->depth = update_depth (elcd->depth, VAR_LOC_1PAUX (var)->depth);
8574 
8575       return var->var_part[0].cur_loc;
8576     }
8577 
8578   VALUE_RECURSED_INTO (x) = true;
8579   /* This is tentative, but it makes some tests simpler.  */
8580   NO_LOC_P (x) = true;
8581 
8582   gcc_checking_assert (var->n_var_parts == 1 || from_empty);
8583 
8584   result = vt_expand_var_loc_chain (var, regs, data, &pending_recursion);
8585 
8586   if (pending_recursion)
8587     {
8588       gcc_checking_assert (!result);
8589       elcd->pending.safe_push (x);
8590     }
8591   else
8592     {
8593       NO_LOC_P (x) = !result;
8594       VALUE_RECURSED_INTO (x) = false;
8595       set_dv_changed (dv, false);
8596 
8597       if (result)
8598 	notify_dependents_of_resolved_value (var, elcd->vars);
8599     }
8600 
8601   return result;
8602 }
8603 
8604 /* While expanding variables, we may encounter recursion cycles
8605    because of mutual (possibly indirect) dependencies between two
8606    particular variables (or values), say A and B.  If we're trying to
8607    expand A when we get to B, which in turn attempts to expand A, if
8608    we can't find any other expansion for B, we'll add B to this
8609    pending-recursion stack, and tentatively return NULL for its
8610    location.  This tentative value will be used for any other
8611    occurrences of B, unless A gets some other location, in which case
8612    it will notify B that it is worth another try at computing a
8613    location for it, and it will use the location computed for A then.
8614    At the end of the expansion, the tentative NULL locations become
8615    final for all members of PENDING that didn't get a notification.
8616    This function performs this finalization of NULL locations.  */
8617 
8618 static void
resolve_expansions_pending_recursion(vec<rtx,va_heap> * pending)8619 resolve_expansions_pending_recursion (vec<rtx, va_heap> *pending)
8620 {
8621   while (!pending->is_empty ())
8622     {
8623       rtx x = pending->pop ();
8624       decl_or_value dv;
8625 
8626       if (!VALUE_RECURSED_INTO (x))
8627 	continue;
8628 
8629       gcc_checking_assert (NO_LOC_P (x));
8630       VALUE_RECURSED_INTO (x) = false;
8631       dv = dv_from_rtx (x);
8632       gcc_checking_assert (dv_changed_p (dv));
8633       set_dv_changed (dv, false);
8634     }
8635 }
8636 
8637 /* Initialize expand_loc_callback_data D with variable hash table V.
8638    It must be a macro because of alloca (vec stack).  */
8639 #define INIT_ELCD(d, v)						\
8640   do								\
8641     {								\
8642       (d).vars = (v);						\
8643       (d).depth.complexity = (d).depth.entryvals = 0;		\
8644     }								\
8645   while (0)
8646 /* Finalize expand_loc_callback_data D, resolved to location L.  */
8647 #define FINI_ELCD(d, l)						\
8648   do								\
8649     {								\
8650       resolve_expansions_pending_recursion (&(d).pending);	\
8651       (d).pending.release ();					\
8652       (d).expanding.release ();					\
8653 								\
8654       if ((l) && MEM_P (l))					\
8655 	(l) = targetm.delegitimize_address (l);			\
8656     }								\
8657   while (0)
8658 
8659 /* Expand VALUEs and DEBUG_EXPRs in LOC to a location, using the
8660    equivalences in VARS, updating their CUR_LOCs in the process.  */
8661 
8662 static rtx
vt_expand_loc(rtx loc,variable_table_type * vars)8663 vt_expand_loc (rtx loc, variable_table_type *vars)
8664 {
8665   class expand_loc_callback_data data;
8666   rtx result;
8667 
8668   if (!MAY_HAVE_DEBUG_BIND_INSNS)
8669     return loc;
8670 
8671   INIT_ELCD (data, vars);
8672 
8673   result = cselib_expand_value_rtx_cb (loc, scratch_regs, EXPR_DEPTH,
8674 				       vt_expand_loc_callback, &data);
8675 
8676   FINI_ELCD (data, result);
8677 
8678   return result;
8679 }
8680 
8681 /* Expand the one-part VARiable to a location, using the equivalences
8682    in VARS, updating their CUR_LOCs in the process.  */
8683 
8684 static rtx
vt_expand_1pvar(variable * var,variable_table_type * vars)8685 vt_expand_1pvar (variable *var, variable_table_type *vars)
8686 {
8687   class expand_loc_callback_data data;
8688   rtx loc;
8689 
8690   gcc_checking_assert (var->onepart && var->n_var_parts == 1);
8691 
8692   if (!dv_changed_p (var->dv))
8693     return var->var_part[0].cur_loc;
8694 
8695   INIT_ELCD (data, vars);
8696 
8697   loc = vt_expand_var_loc_chain (var, scratch_regs, &data, NULL);
8698 
8699   gcc_checking_assert (data.expanding.is_empty ());
8700 
8701   FINI_ELCD (data, loc);
8702 
8703   return loc;
8704 }
8705 
8706 /* Emit the NOTE_INSN_VAR_LOCATION for variable *VARP.  DATA contains
8707    additional parameters: WHERE specifies whether the note shall be emitted
8708    before or after instruction INSN.  */
8709 
8710 int
emit_note_insn_var_location(variable ** varp,emit_note_data * data)8711 emit_note_insn_var_location (variable **varp, emit_note_data *data)
8712 {
8713   variable *var = *varp;
8714   rtx_insn *insn = data->insn;
8715   enum emit_note_where where = data->where;
8716   variable_table_type *vars = data->vars;
8717   rtx_note *note;
8718   rtx note_vl;
8719   int i, j, n_var_parts;
8720   bool complete;
8721   enum var_init_status initialized = VAR_INIT_STATUS_UNINITIALIZED;
8722   HOST_WIDE_INT last_limit;
8723   HOST_WIDE_INT offsets[MAX_VAR_PARTS];
8724   rtx loc[MAX_VAR_PARTS];
8725   tree decl;
8726   location_chain *lc;
8727 
8728   gcc_checking_assert (var->onepart == NOT_ONEPART
8729 		       || var->onepart == ONEPART_VDECL);
8730 
8731   decl = dv_as_decl (var->dv);
8732 
8733   complete = true;
8734   last_limit = 0;
8735   n_var_parts = 0;
8736   if (!var->onepart)
8737     for (i = 0; i < var->n_var_parts; i++)
8738       if (var->var_part[i].cur_loc == NULL && var->var_part[i].loc_chain)
8739 	var->var_part[i].cur_loc = var->var_part[i].loc_chain->loc;
8740   for (i = 0; i < var->n_var_parts; i++)
8741     {
8742       machine_mode mode, wider_mode;
8743       rtx loc2;
8744       HOST_WIDE_INT offset, size, wider_size;
8745 
8746       if (i == 0 && var->onepart)
8747 	{
8748 	  gcc_checking_assert (var->n_var_parts == 1);
8749 	  offset = 0;
8750 	  initialized = VAR_INIT_STATUS_INITIALIZED;
8751 	  loc2 = vt_expand_1pvar (var, vars);
8752 	}
8753       else
8754 	{
8755 	  if (last_limit < VAR_PART_OFFSET (var, i))
8756 	    {
8757 	      complete = false;
8758 	      break;
8759 	    }
8760 	  else if (last_limit > VAR_PART_OFFSET (var, i))
8761 	    continue;
8762 	  offset = VAR_PART_OFFSET (var, i);
8763 	  loc2 = var->var_part[i].cur_loc;
8764 	  if (loc2 && GET_CODE (loc2) == MEM
8765 	      && GET_CODE (XEXP (loc2, 0)) == VALUE)
8766 	    {
8767 	      rtx depval = XEXP (loc2, 0);
8768 
8769 	      loc2 = vt_expand_loc (loc2, vars);
8770 
8771 	      if (loc2)
8772 		loc_exp_insert_dep (var, depval, vars);
8773 	    }
8774 	  if (!loc2)
8775 	    {
8776 	      complete = false;
8777 	      continue;
8778 	    }
8779 	  gcc_checking_assert (GET_CODE (loc2) != VALUE);
8780 	  for (lc = var->var_part[i].loc_chain; lc; lc = lc->next)
8781 	    if (var->var_part[i].cur_loc == lc->loc)
8782 	      {
8783 		initialized = lc->init;
8784 		break;
8785 	      }
8786 	  gcc_assert (lc);
8787 	}
8788 
8789       offsets[n_var_parts] = offset;
8790       if (!loc2)
8791 	{
8792 	  complete = false;
8793 	  continue;
8794 	}
8795       loc[n_var_parts] = loc2;
8796       mode = GET_MODE (var->var_part[i].cur_loc);
8797       if (mode == VOIDmode && var->onepart)
8798 	mode = DECL_MODE (decl);
8799       /* We ony track subparts of constant-sized objects, since at present
8800 	 there's no representation for polynomial pieces.  */
8801       if (!GET_MODE_SIZE (mode).is_constant (&size))
8802 	{
8803 	  complete = false;
8804 	  continue;
8805 	}
8806       last_limit = offsets[n_var_parts] + size;
8807 
8808       /* Attempt to merge adjacent registers or memory.  */
8809       for (j = i + 1; j < var->n_var_parts; j++)
8810 	if (last_limit <= VAR_PART_OFFSET (var, j))
8811 	  break;
8812       if (j < var->n_var_parts
8813 	  && GET_MODE_WIDER_MODE (mode).exists (&wider_mode)
8814 	  && GET_MODE_SIZE (wider_mode).is_constant (&wider_size)
8815 	  && var->var_part[j].cur_loc
8816 	  && mode == GET_MODE (var->var_part[j].cur_loc)
8817 	  && (REG_P (loc[n_var_parts]) || MEM_P (loc[n_var_parts]))
8818 	  && last_limit == (var->onepart ? 0 : VAR_PART_OFFSET (var, j))
8819 	  && (loc2 = vt_expand_loc (var->var_part[j].cur_loc, vars))
8820 	  && GET_CODE (loc[n_var_parts]) == GET_CODE (loc2))
8821 	{
8822 	  rtx new_loc = NULL;
8823 	  poly_int64 offset2;
8824 
8825 	  if (REG_P (loc[n_var_parts])
8826 	      && hard_regno_nregs (REGNO (loc[n_var_parts]), mode) * 2
8827 		 == hard_regno_nregs (REGNO (loc[n_var_parts]), wider_mode)
8828 	      && end_hard_regno (mode, REGNO (loc[n_var_parts]))
8829 		 == REGNO (loc2))
8830 	    {
8831 	      if (! WORDS_BIG_ENDIAN && ! BYTES_BIG_ENDIAN)
8832 		new_loc = simplify_subreg (wider_mode, loc[n_var_parts],
8833 					   mode, 0);
8834 	      else if (WORDS_BIG_ENDIAN && BYTES_BIG_ENDIAN)
8835 		new_loc = simplify_subreg (wider_mode, loc2, mode, 0);
8836 	      if (new_loc)
8837 		{
8838 		  if (!REG_P (new_loc)
8839 		      || REGNO (new_loc) != REGNO (loc[n_var_parts]))
8840 		    new_loc = NULL;
8841 		  else
8842 		    REG_ATTRS (new_loc) = REG_ATTRS (loc[n_var_parts]);
8843 		}
8844 	    }
8845 	  else if (MEM_P (loc[n_var_parts])
8846 		   && GET_CODE (XEXP (loc2, 0)) == PLUS
8847 		   && REG_P (XEXP (XEXP (loc2, 0), 0))
8848 		   && poly_int_rtx_p (XEXP (XEXP (loc2, 0), 1), &offset2))
8849 	    {
8850 	      poly_int64 end1 = size;
8851 	      rtx base1 = strip_offset_and_add (XEXP (loc[n_var_parts], 0),
8852 						&end1);
8853 	      if (rtx_equal_p (base1, XEXP (XEXP (loc2, 0), 0))
8854 		  && known_eq (end1, offset2))
8855 		new_loc = adjust_address_nv (loc[n_var_parts],
8856 					     wider_mode, 0);
8857 	    }
8858 
8859 	  if (new_loc)
8860 	    {
8861 	      loc[n_var_parts] = new_loc;
8862 	      mode = wider_mode;
8863 	      last_limit = offsets[n_var_parts] + wider_size;
8864 	      i = j;
8865 	    }
8866 	}
8867       ++n_var_parts;
8868     }
8869   poly_uint64 type_size_unit
8870     = tree_to_poly_uint64 (TYPE_SIZE_UNIT (TREE_TYPE (decl)));
8871   if (maybe_lt (poly_uint64 (last_limit), type_size_unit))
8872     complete = false;
8873 
8874   if (! flag_var_tracking_uninit)
8875     initialized = VAR_INIT_STATUS_INITIALIZED;
8876 
8877   note_vl = NULL_RTX;
8878   if (!complete)
8879     note_vl = gen_rtx_VAR_LOCATION (VOIDmode, decl, NULL_RTX, initialized);
8880   else if (n_var_parts == 1)
8881     {
8882       rtx expr_list;
8883 
8884       if (offsets[0] || GET_CODE (loc[0]) == PARALLEL)
8885 	expr_list = gen_rtx_EXPR_LIST (VOIDmode, loc[0], GEN_INT (offsets[0]));
8886       else
8887 	expr_list = loc[0];
8888 
8889       note_vl = gen_rtx_VAR_LOCATION (VOIDmode, decl, expr_list, initialized);
8890     }
8891   else if (n_var_parts)
8892     {
8893       rtx parallel;
8894 
8895       for (i = 0; i < n_var_parts; i++)
8896 	loc[i]
8897 	  = gen_rtx_EXPR_LIST (VOIDmode, loc[i], GEN_INT (offsets[i]));
8898 
8899       parallel = gen_rtx_PARALLEL (VOIDmode,
8900 				   gen_rtvec_v (n_var_parts, loc));
8901       note_vl = gen_rtx_VAR_LOCATION (VOIDmode, decl,
8902 				      parallel, initialized);
8903     }
8904 
8905   if (where != EMIT_NOTE_BEFORE_INSN)
8906     {
8907       note = emit_note_after (NOTE_INSN_VAR_LOCATION, insn);
8908       if (where == EMIT_NOTE_AFTER_CALL_INSN)
8909 	NOTE_DURING_CALL_P (note) = true;
8910     }
8911   else
8912     {
8913       /* Make sure that the call related notes come first.  */
8914       while (NEXT_INSN (insn)
8915 	     && NOTE_P (insn)
8916 	     && NOTE_KIND (insn) == NOTE_INSN_VAR_LOCATION
8917 	     && NOTE_DURING_CALL_P (insn))
8918 	insn = NEXT_INSN (insn);
8919       if (NOTE_P (insn)
8920 	  && NOTE_KIND (insn) == NOTE_INSN_VAR_LOCATION
8921 	  && NOTE_DURING_CALL_P (insn))
8922 	note = emit_note_after (NOTE_INSN_VAR_LOCATION, insn);
8923       else
8924 	note = emit_note_before (NOTE_INSN_VAR_LOCATION, insn);
8925     }
8926   NOTE_VAR_LOCATION (note) = note_vl;
8927 
8928   set_dv_changed (var->dv, false);
8929   gcc_assert (var->in_changed_variables);
8930   var->in_changed_variables = false;
8931   changed_variables->clear_slot (varp);
8932 
8933   /* Continue traversing the hash table.  */
8934   return 1;
8935 }
8936 
8937 /* While traversing changed_variables, push onto DATA (a stack of RTX
8938    values) entries that aren't user variables.  */
8939 
8940 int
var_track_values_to_stack(variable ** slot,vec<rtx,va_heap> * changed_values_stack)8941 var_track_values_to_stack (variable **slot,
8942 			   vec<rtx, va_heap> *changed_values_stack)
8943 {
8944   variable *var = *slot;
8945 
8946   if (var->onepart == ONEPART_VALUE)
8947     changed_values_stack->safe_push (dv_as_value (var->dv));
8948   else if (var->onepart == ONEPART_DEXPR)
8949     changed_values_stack->safe_push (DECL_RTL_KNOWN_SET (dv_as_decl (var->dv)));
8950 
8951   return 1;
8952 }
8953 
8954 /* Remove from changed_variables the entry whose DV corresponds to
8955    value or debug_expr VAL.  */
8956 static void
remove_value_from_changed_variables(rtx val)8957 remove_value_from_changed_variables (rtx val)
8958 {
8959   decl_or_value dv = dv_from_rtx (val);
8960   variable **slot;
8961   variable *var;
8962 
8963   slot = changed_variables->find_slot_with_hash (dv, dv_htab_hash (dv),
8964 						NO_INSERT);
8965   var = *slot;
8966   var->in_changed_variables = false;
8967   changed_variables->clear_slot (slot);
8968 }
8969 
8970 /* If VAL (a value or debug_expr) has backlinks to variables actively
8971    dependent on it in HTAB or in CHANGED_VARIABLES, mark them as
8972    changed, adding to CHANGED_VALUES_STACK any dependencies that may
8973    have dependencies of their own to notify.  */
8974 
8975 static void
notify_dependents_of_changed_value(rtx val,variable_table_type * htab,vec<rtx,va_heap> * changed_values_stack)8976 notify_dependents_of_changed_value (rtx val, variable_table_type *htab,
8977 				    vec<rtx, va_heap> *changed_values_stack)
8978 {
8979   variable **slot;
8980   variable *var;
8981   loc_exp_dep *led;
8982   decl_or_value dv = dv_from_rtx (val);
8983 
8984   slot = changed_variables->find_slot_with_hash (dv, dv_htab_hash (dv),
8985 						NO_INSERT);
8986   if (!slot)
8987     slot = htab->find_slot_with_hash (dv, dv_htab_hash (dv), NO_INSERT);
8988   if (!slot)
8989     slot = dropped_values->find_slot_with_hash (dv, dv_htab_hash (dv),
8990 						NO_INSERT);
8991   var = *slot;
8992 
8993   while ((led = VAR_LOC_DEP_LST (var)))
8994     {
8995       decl_or_value ldv = led->dv;
8996       variable *ivar;
8997 
8998       /* Deactivate and remove the backlink, as it was “used up”.  It
8999 	 makes no sense to attempt to notify the same entity again:
9000 	 either it will be recomputed and re-register an active
9001 	 dependency, or it will still have the changed mark.  */
9002       if (led->next)
9003 	led->next->pprev = led->pprev;
9004       if (led->pprev)
9005 	*led->pprev = led->next;
9006       led->next = NULL;
9007       led->pprev = NULL;
9008 
9009       if (dv_changed_p (ldv))
9010 	continue;
9011 
9012       switch (dv_onepart_p (ldv))
9013 	{
9014 	case ONEPART_VALUE:
9015 	case ONEPART_DEXPR:
9016 	  set_dv_changed (ldv, true);
9017 	  changed_values_stack->safe_push (dv_as_rtx (ldv));
9018 	  break;
9019 
9020 	case ONEPART_VDECL:
9021 	  ivar = htab->find_with_hash (ldv, dv_htab_hash (ldv));
9022 	  gcc_checking_assert (!VAR_LOC_DEP_LST (ivar));
9023 	  variable_was_changed (ivar, NULL);
9024 	  break;
9025 
9026 	case NOT_ONEPART:
9027 	  delete led;
9028 	  ivar = htab->find_with_hash (ldv, dv_htab_hash (ldv));
9029 	  if (ivar)
9030 	    {
9031 	      int i = ivar->n_var_parts;
9032 	      while (i--)
9033 		{
9034 		  rtx loc = ivar->var_part[i].cur_loc;
9035 
9036 		  if (loc && GET_CODE (loc) == MEM
9037 		      && XEXP (loc, 0) == val)
9038 		    {
9039 		      variable_was_changed (ivar, NULL);
9040 		      break;
9041 		    }
9042 		}
9043 	    }
9044 	  break;
9045 
9046 	default:
9047 	  gcc_unreachable ();
9048 	}
9049     }
9050 }
9051 
9052 /* Take out of changed_variables any entries that don't refer to use
9053    variables.  Back-propagate change notifications from values and
9054    debug_exprs to their active dependencies in HTAB or in
9055    CHANGED_VARIABLES.  */
9056 
9057 static void
process_changed_values(variable_table_type * htab)9058 process_changed_values (variable_table_type *htab)
9059 {
9060   int i, n;
9061   rtx val;
9062   auto_vec<rtx, 20> changed_values_stack;
9063 
9064   /* Move values from changed_variables to changed_values_stack.  */
9065   changed_variables
9066     ->traverse <vec<rtx, va_heap>*, var_track_values_to_stack>
9067       (&changed_values_stack);
9068 
9069   /* Back-propagate change notifications in values while popping
9070      them from the stack.  */
9071   for (n = i = changed_values_stack.length ();
9072        i > 0; i = changed_values_stack.length ())
9073     {
9074       val = changed_values_stack.pop ();
9075       notify_dependents_of_changed_value (val, htab, &changed_values_stack);
9076 
9077       /* This condition will hold when visiting each of the entries
9078 	 originally in changed_variables.  We can't remove them
9079 	 earlier because this could drop the backlinks before we got a
9080 	 chance to use them.  */
9081       if (i == n)
9082 	{
9083 	  remove_value_from_changed_variables (val);
9084 	  n--;
9085 	}
9086     }
9087 }
9088 
9089 /* Emit NOTE_INSN_VAR_LOCATION note for each variable from a chain
9090    CHANGED_VARIABLES and delete this chain.  WHERE specifies whether
9091    the notes shall be emitted before of after instruction INSN.  */
9092 
9093 static void
emit_notes_for_changes(rtx_insn * insn,enum emit_note_where where,shared_hash * vars)9094 emit_notes_for_changes (rtx_insn *insn, enum emit_note_where where,
9095 			shared_hash *vars)
9096 {
9097   emit_note_data data;
9098   variable_table_type *htab = shared_hash_htab (vars);
9099 
9100   if (changed_variables->is_empty ())
9101     return;
9102 
9103   if (MAY_HAVE_DEBUG_BIND_INSNS)
9104     process_changed_values (htab);
9105 
9106   data.insn = insn;
9107   data.where = where;
9108   data.vars = htab;
9109 
9110   changed_variables
9111     ->traverse <emit_note_data*, emit_note_insn_var_location> (&data);
9112 }
9113 
9114 /* Add variable *SLOT to the chain CHANGED_VARIABLES if it differs from the
9115    same variable in hash table DATA or is not there at all.  */
9116 
9117 int
emit_notes_for_differences_1(variable ** slot,variable_table_type * new_vars)9118 emit_notes_for_differences_1 (variable **slot, variable_table_type *new_vars)
9119 {
9120   variable *old_var, *new_var;
9121 
9122   old_var = *slot;
9123   new_var = new_vars->find_with_hash (old_var->dv, dv_htab_hash (old_var->dv));
9124 
9125   if (!new_var)
9126     {
9127       /* Variable has disappeared.  */
9128       variable *empty_var = NULL;
9129 
9130       if (old_var->onepart == ONEPART_VALUE
9131 	  || old_var->onepart == ONEPART_DEXPR)
9132 	{
9133 	  empty_var = variable_from_dropped (old_var->dv, NO_INSERT);
9134 	  if (empty_var)
9135 	    {
9136 	      gcc_checking_assert (!empty_var->in_changed_variables);
9137 	      if (!VAR_LOC_1PAUX (old_var))
9138 		{
9139 		  VAR_LOC_1PAUX (old_var) = VAR_LOC_1PAUX (empty_var);
9140 		  VAR_LOC_1PAUX (empty_var) = NULL;
9141 		}
9142 	      else
9143 		gcc_checking_assert (!VAR_LOC_1PAUX (empty_var));
9144 	    }
9145 	}
9146 
9147       if (!empty_var)
9148 	{
9149 	  empty_var = onepart_pool_allocate (old_var->onepart);
9150 	  empty_var->dv = old_var->dv;
9151 	  empty_var->refcount = 0;
9152 	  empty_var->n_var_parts = 0;
9153 	  empty_var->onepart = old_var->onepart;
9154 	  empty_var->in_changed_variables = false;
9155 	}
9156 
9157       if (empty_var->onepart)
9158 	{
9159 	  /* Propagate the auxiliary data to (ultimately)
9160 	     changed_variables.  */
9161 	  empty_var->var_part[0].loc_chain = NULL;
9162 	  empty_var->var_part[0].cur_loc = NULL;
9163 	  VAR_LOC_1PAUX (empty_var) = VAR_LOC_1PAUX (old_var);
9164 	  VAR_LOC_1PAUX (old_var) = NULL;
9165 	}
9166       variable_was_changed (empty_var, NULL);
9167       /* Continue traversing the hash table.  */
9168       return 1;
9169     }
9170   /* Update cur_loc and one-part auxiliary data, before new_var goes
9171      through variable_was_changed.  */
9172   if (old_var != new_var && new_var->onepart)
9173     {
9174       gcc_checking_assert (VAR_LOC_1PAUX (new_var) == NULL);
9175       VAR_LOC_1PAUX (new_var) = VAR_LOC_1PAUX (old_var);
9176       VAR_LOC_1PAUX (old_var) = NULL;
9177       new_var->var_part[0].cur_loc = old_var->var_part[0].cur_loc;
9178     }
9179   if (variable_different_p (old_var, new_var))
9180     variable_was_changed (new_var, NULL);
9181 
9182   /* Continue traversing the hash table.  */
9183   return 1;
9184 }
9185 
9186 /* Add variable *SLOT to the chain CHANGED_VARIABLES if it is not in hash
9187    table DATA.  */
9188 
9189 int
emit_notes_for_differences_2(variable ** slot,variable_table_type * old_vars)9190 emit_notes_for_differences_2 (variable **slot, variable_table_type *old_vars)
9191 {
9192   variable *old_var, *new_var;
9193 
9194   new_var = *slot;
9195   old_var = old_vars->find_with_hash (new_var->dv, dv_htab_hash (new_var->dv));
9196   if (!old_var)
9197     {
9198       int i;
9199       for (i = 0; i < new_var->n_var_parts; i++)
9200 	new_var->var_part[i].cur_loc = NULL;
9201       variable_was_changed (new_var, NULL);
9202     }
9203 
9204   /* Continue traversing the hash table.  */
9205   return 1;
9206 }
9207 
9208 /* Emit notes before INSN for differences between dataflow sets OLD_SET and
9209    NEW_SET.  */
9210 
9211 static void
emit_notes_for_differences(rtx_insn * insn,dataflow_set * old_set,dataflow_set * new_set)9212 emit_notes_for_differences (rtx_insn *insn, dataflow_set *old_set,
9213 			    dataflow_set *new_set)
9214 {
9215   shared_hash_htab (old_set->vars)
9216     ->traverse <variable_table_type *, emit_notes_for_differences_1>
9217       (shared_hash_htab (new_set->vars));
9218   shared_hash_htab (new_set->vars)
9219     ->traverse <variable_table_type *, emit_notes_for_differences_2>
9220       (shared_hash_htab (old_set->vars));
9221   emit_notes_for_changes (insn, EMIT_NOTE_BEFORE_INSN, new_set->vars);
9222 }
9223 
9224 /* Return the next insn after INSN that is not a NOTE_INSN_VAR_LOCATION.  */
9225 
9226 static rtx_insn *
next_non_note_insn_var_location(rtx_insn * insn)9227 next_non_note_insn_var_location (rtx_insn *insn)
9228 {
9229   while (insn)
9230     {
9231       insn = NEXT_INSN (insn);
9232       if (insn == 0
9233 	  || !NOTE_P (insn)
9234 	  || NOTE_KIND (insn) != NOTE_INSN_VAR_LOCATION)
9235 	break;
9236     }
9237 
9238   return insn;
9239 }
9240 
9241 /* Emit the notes for changes of location parts in the basic block BB.  */
9242 
9243 static void
emit_notes_in_bb(basic_block bb,dataflow_set * set)9244 emit_notes_in_bb (basic_block bb, dataflow_set *set)
9245 {
9246   unsigned int i;
9247   micro_operation *mo;
9248 
9249   dataflow_set_clear (set);
9250   dataflow_set_copy (set, &VTI (bb)->in);
9251 
9252   FOR_EACH_VEC_ELT (VTI (bb)->mos, i, mo)
9253     {
9254       rtx_insn *insn = mo->insn;
9255       rtx_insn *next_insn = next_non_note_insn_var_location (insn);
9256 
9257       switch (mo->type)
9258 	{
9259 	  case MO_CALL:
9260 	    dataflow_set_clear_at_call (set, insn);
9261 	    emit_notes_for_changes (insn, EMIT_NOTE_AFTER_CALL_INSN, set->vars);
9262 	    {
9263 	      rtx arguments = mo->u.loc, *p = &arguments;
9264 	      while (*p)
9265 		{
9266 		  XEXP (XEXP (*p, 0), 1)
9267 		    = vt_expand_loc (XEXP (XEXP (*p, 0), 1),
9268 				     shared_hash_htab (set->vars));
9269 		  /* If expansion is successful, keep it in the list.  */
9270 		  if (XEXP (XEXP (*p, 0), 1))
9271 		    {
9272 		      XEXP (XEXP (*p, 0), 1)
9273 			= copy_rtx_if_shared (XEXP (XEXP (*p, 0), 1));
9274 		      p = &XEXP (*p, 1);
9275 		    }
9276 		  /* Otherwise, if the following item is data_value for it,
9277 		     drop it too too.  */
9278 		  else if (XEXP (*p, 1)
9279 			   && REG_P (XEXP (XEXP (*p, 0), 0))
9280 			   && MEM_P (XEXP (XEXP (XEXP (*p, 1), 0), 0))
9281 			   && REG_P (XEXP (XEXP (XEXP (XEXP (*p, 1), 0), 0),
9282 					   0))
9283 			   && REGNO (XEXP (XEXP (*p, 0), 0))
9284 			      == REGNO (XEXP (XEXP (XEXP (XEXP (*p, 1), 0),
9285 						    0), 0)))
9286 		    *p = XEXP (XEXP (*p, 1), 1);
9287 		  /* Just drop this item.  */
9288 		  else
9289 		    *p = XEXP (*p, 1);
9290 		}
9291 	      add_reg_note (insn, REG_CALL_ARG_LOCATION, arguments);
9292 	    }
9293 	    break;
9294 
9295 	  case MO_USE:
9296 	    {
9297 	      rtx loc = mo->u.loc;
9298 
9299 	      if (REG_P (loc))
9300 		var_reg_set (set, loc, VAR_INIT_STATUS_UNINITIALIZED, NULL);
9301 	      else
9302 		var_mem_set (set, loc, VAR_INIT_STATUS_UNINITIALIZED, NULL);
9303 
9304 	      emit_notes_for_changes (insn, EMIT_NOTE_BEFORE_INSN, set->vars);
9305 	    }
9306 	    break;
9307 
9308 	  case MO_VAL_LOC:
9309 	    {
9310 	      rtx loc = mo->u.loc;
9311 	      rtx val, vloc;
9312 	      tree var;
9313 
9314 	      if (GET_CODE (loc) == CONCAT)
9315 		{
9316 		  val = XEXP (loc, 0);
9317 		  vloc = XEXP (loc, 1);
9318 		}
9319 	      else
9320 		{
9321 		  val = NULL_RTX;
9322 		  vloc = loc;
9323 		}
9324 
9325 	      var = PAT_VAR_LOCATION_DECL (vloc);
9326 
9327 	      clobber_variable_part (set, NULL_RTX,
9328 				     dv_from_decl (var), 0, NULL_RTX);
9329 	      if (val)
9330 		{
9331 		  if (VAL_NEEDS_RESOLUTION (loc))
9332 		    val_resolve (set, val, PAT_VAR_LOCATION_LOC (vloc), insn);
9333 		  set_variable_part (set, val, dv_from_decl (var), 0,
9334 				     VAR_INIT_STATUS_INITIALIZED, NULL_RTX,
9335 				     INSERT);
9336 		}
9337 	      else if (!VAR_LOC_UNKNOWN_P (PAT_VAR_LOCATION_LOC (vloc)))
9338 		set_variable_part (set, PAT_VAR_LOCATION_LOC (vloc),
9339 				   dv_from_decl (var), 0,
9340 				   VAR_INIT_STATUS_INITIALIZED, NULL_RTX,
9341 				   INSERT);
9342 
9343 	      emit_notes_for_changes (insn, EMIT_NOTE_AFTER_INSN, set->vars);
9344 	    }
9345 	    break;
9346 
9347 	  case MO_VAL_USE:
9348 	    {
9349 	      rtx loc = mo->u.loc;
9350 	      rtx val, vloc, uloc;
9351 
9352 	      vloc = uloc = XEXP (loc, 1);
9353 	      val = XEXP (loc, 0);
9354 
9355 	      if (GET_CODE (val) == CONCAT)
9356 		{
9357 		  uloc = XEXP (val, 1);
9358 		  val = XEXP (val, 0);
9359 		}
9360 
9361 	      if (VAL_NEEDS_RESOLUTION (loc))
9362 		val_resolve (set, val, vloc, insn);
9363 	      else
9364 		val_store (set, val, uloc, insn, false);
9365 
9366 	      if (VAL_HOLDS_TRACK_EXPR (loc))
9367 		{
9368 		  if (GET_CODE (uloc) == REG)
9369 		    var_reg_set (set, uloc, VAR_INIT_STATUS_UNINITIALIZED,
9370 				 NULL);
9371 		  else if (GET_CODE (uloc) == MEM)
9372 		    var_mem_set (set, uloc, VAR_INIT_STATUS_UNINITIALIZED,
9373 				 NULL);
9374 		}
9375 
9376 	      emit_notes_for_changes (insn, EMIT_NOTE_BEFORE_INSN, set->vars);
9377 	    }
9378 	    break;
9379 
9380 	  case MO_VAL_SET:
9381 	    {
9382 	      rtx loc = mo->u.loc;
9383 	      rtx val, vloc, uloc;
9384 	      rtx dstv, srcv;
9385 
9386 	      vloc = loc;
9387 	      uloc = XEXP (vloc, 1);
9388 	      val = XEXP (vloc, 0);
9389 	      vloc = uloc;
9390 
9391 	      if (GET_CODE (uloc) == SET)
9392 		{
9393 		  dstv = SET_DEST (uloc);
9394 		  srcv = SET_SRC (uloc);
9395 		}
9396 	      else
9397 		{
9398 		  dstv = uloc;
9399 		  srcv = NULL;
9400 		}
9401 
9402 	      if (GET_CODE (val) == CONCAT)
9403 		{
9404 		  dstv = vloc = XEXP (val, 1);
9405 		  val = XEXP (val, 0);
9406 		}
9407 
9408 	      if (GET_CODE (vloc) == SET)
9409 		{
9410 		  srcv = SET_SRC (vloc);
9411 
9412 		  gcc_assert (val != srcv);
9413 		  gcc_assert (vloc == uloc || VAL_NEEDS_RESOLUTION (loc));
9414 
9415 		  dstv = vloc = SET_DEST (vloc);
9416 
9417 		  if (VAL_NEEDS_RESOLUTION (loc))
9418 		    val_resolve (set, val, srcv, insn);
9419 		}
9420 	      else if (VAL_NEEDS_RESOLUTION (loc))
9421 		{
9422 		  gcc_assert (GET_CODE (uloc) == SET
9423 			      && GET_CODE (SET_SRC (uloc)) == REG);
9424 		  val_resolve (set, val, SET_SRC (uloc), insn);
9425 		}
9426 
9427 	      if (VAL_HOLDS_TRACK_EXPR (loc))
9428 		{
9429 		  if (VAL_EXPR_IS_CLOBBERED (loc))
9430 		    {
9431 		      if (REG_P (uloc))
9432 			var_reg_delete (set, uloc, true);
9433 		      else if (MEM_P (uloc))
9434 			{
9435 			  gcc_assert (MEM_P (dstv));
9436 			  gcc_assert (MEM_ATTRS (dstv) == MEM_ATTRS (uloc));
9437 			  var_mem_delete (set, dstv, true);
9438 			}
9439 		    }
9440 		  else
9441 		    {
9442 		      bool copied_p = VAL_EXPR_IS_COPIED (loc);
9443 		      rtx src = NULL, dst = uloc;
9444 		      enum var_init_status status = VAR_INIT_STATUS_INITIALIZED;
9445 
9446 		      if (GET_CODE (uloc) == SET)
9447 			{
9448 			  src = SET_SRC (uloc);
9449 			  dst = SET_DEST (uloc);
9450 			}
9451 
9452 		      if (copied_p)
9453 			{
9454 			  status = find_src_status (set, src);
9455 
9456 			  src = find_src_set_src (set, src);
9457 			}
9458 
9459 		      if (REG_P (dst))
9460 			var_reg_delete_and_set (set, dst, !copied_p,
9461 						status, srcv);
9462 		      else if (MEM_P (dst))
9463 			{
9464 			  gcc_assert (MEM_P (dstv));
9465 			  gcc_assert (MEM_ATTRS (dstv) == MEM_ATTRS (dst));
9466 			  var_mem_delete_and_set (set, dstv, !copied_p,
9467 						  status, srcv);
9468 			}
9469 		    }
9470 		}
9471 	      else if (REG_P (uloc))
9472 		var_regno_delete (set, REGNO (uloc));
9473 	      else if (MEM_P (uloc))
9474 		{
9475 		  gcc_checking_assert (GET_CODE (vloc) == MEM);
9476 		  gcc_checking_assert (vloc == dstv);
9477 		  if (vloc != dstv)
9478 		    clobber_overlapping_mems (set, vloc);
9479 		}
9480 
9481 	      val_store (set, val, dstv, insn, true);
9482 
9483 	      emit_notes_for_changes (next_insn, EMIT_NOTE_BEFORE_INSN,
9484 				      set->vars);
9485 	    }
9486 	    break;
9487 
9488 	  case MO_SET:
9489 	    {
9490 	      rtx loc = mo->u.loc;
9491 	      rtx set_src = NULL;
9492 
9493 	      if (GET_CODE (loc) == SET)
9494 		{
9495 		  set_src = SET_SRC (loc);
9496 		  loc = SET_DEST (loc);
9497 		}
9498 
9499 	      if (REG_P (loc))
9500 		var_reg_delete_and_set (set, loc, true, VAR_INIT_STATUS_INITIALIZED,
9501 					set_src);
9502 	      else
9503 		var_mem_delete_and_set (set, loc, true, VAR_INIT_STATUS_INITIALIZED,
9504 					set_src);
9505 
9506 	      emit_notes_for_changes (next_insn, EMIT_NOTE_BEFORE_INSN,
9507 				      set->vars);
9508 	    }
9509 	    break;
9510 
9511 	  case MO_COPY:
9512 	    {
9513 	      rtx loc = mo->u.loc;
9514 	      enum var_init_status src_status;
9515 	      rtx set_src = NULL;
9516 
9517 	      if (GET_CODE (loc) == SET)
9518 		{
9519 		  set_src = SET_SRC (loc);
9520 		  loc = SET_DEST (loc);
9521 		}
9522 
9523 	      src_status = find_src_status (set, set_src);
9524 	      set_src = find_src_set_src (set, set_src);
9525 
9526 	      if (REG_P (loc))
9527 		var_reg_delete_and_set (set, loc, false, src_status, set_src);
9528 	      else
9529 		var_mem_delete_and_set (set, loc, false, src_status, set_src);
9530 
9531 	      emit_notes_for_changes (next_insn, EMIT_NOTE_BEFORE_INSN,
9532 				      set->vars);
9533 	    }
9534 	    break;
9535 
9536 	  case MO_USE_NO_VAR:
9537 	    {
9538 	      rtx loc = mo->u.loc;
9539 
9540 	      if (REG_P (loc))
9541 		var_reg_delete (set, loc, false);
9542 	      else
9543 		var_mem_delete (set, loc, false);
9544 
9545 	      emit_notes_for_changes (insn, EMIT_NOTE_AFTER_INSN, set->vars);
9546 	    }
9547 	    break;
9548 
9549 	  case MO_CLOBBER:
9550 	    {
9551 	      rtx loc = mo->u.loc;
9552 
9553 	      if (REG_P (loc))
9554 		var_reg_delete (set, loc, true);
9555 	      else
9556 		var_mem_delete (set, loc, true);
9557 
9558 	      emit_notes_for_changes (next_insn, EMIT_NOTE_BEFORE_INSN,
9559 				      set->vars);
9560 	    }
9561 	    break;
9562 
9563 	  case MO_ADJUST:
9564 	    set->stack_adjust += mo->u.adjust;
9565 	    break;
9566 	}
9567     }
9568 }
9569 
9570 /* Emit notes for the whole function.  */
9571 
9572 static void
vt_emit_notes(void)9573 vt_emit_notes (void)
9574 {
9575   basic_block bb;
9576   dataflow_set cur;
9577 
9578   gcc_assert (changed_variables->is_empty ());
9579 
9580   /* Free memory occupied by the out hash tables, as they aren't used
9581      anymore.  */
9582   FOR_EACH_BB_FN (bb, cfun)
9583     dataflow_set_clear (&VTI (bb)->out);
9584 
9585   /* Enable emitting notes by functions (mainly by set_variable_part and
9586      delete_variable_part).  */
9587   emit_notes = true;
9588 
9589   if (MAY_HAVE_DEBUG_BIND_INSNS)
9590     dropped_values = new variable_table_type (cselib_get_next_uid () * 2);
9591 
9592   dataflow_set_init (&cur);
9593 
9594   FOR_EACH_BB_FN (bb, cfun)
9595     {
9596       /* Emit the notes for changes of variable locations between two
9597 	 subsequent basic blocks.  */
9598       emit_notes_for_differences (BB_HEAD (bb), &cur, &VTI (bb)->in);
9599 
9600       if (MAY_HAVE_DEBUG_BIND_INSNS)
9601 	local_get_addr_cache = new hash_map<rtx, rtx>;
9602 
9603       /* Emit the notes for the changes in the basic block itself.  */
9604       emit_notes_in_bb (bb, &cur);
9605 
9606       if (MAY_HAVE_DEBUG_BIND_INSNS)
9607 	delete local_get_addr_cache;
9608       local_get_addr_cache = NULL;
9609 
9610       /* Free memory occupied by the in hash table, we won't need it
9611 	 again.  */
9612       dataflow_set_clear (&VTI (bb)->in);
9613     }
9614 
9615   if (flag_checking)
9616     shared_hash_htab (cur.vars)
9617       ->traverse <variable_table_type *, emit_notes_for_differences_1>
9618 	(shared_hash_htab (empty_shared_hash));
9619 
9620   dataflow_set_destroy (&cur);
9621 
9622   if (MAY_HAVE_DEBUG_BIND_INSNS)
9623     delete dropped_values;
9624   dropped_values = NULL;
9625 
9626   emit_notes = false;
9627 }
9628 
9629 /* If there is a declaration and offset associated with register/memory RTL
9630    assign declaration to *DECLP and offset to *OFFSETP, and return true.  */
9631 
9632 static bool
vt_get_decl_and_offset(rtx rtl,tree * declp,poly_int64 * offsetp)9633 vt_get_decl_and_offset (rtx rtl, tree *declp, poly_int64 *offsetp)
9634 {
9635   if (REG_P (rtl))
9636     {
9637       if (REG_ATTRS (rtl))
9638 	{
9639 	  *declp = REG_EXPR (rtl);
9640 	  *offsetp = REG_OFFSET (rtl);
9641 	  return true;
9642 	}
9643     }
9644   else if (GET_CODE (rtl) == PARALLEL)
9645     {
9646       tree decl = NULL_TREE;
9647       HOST_WIDE_INT offset = MAX_VAR_PARTS;
9648       int len = XVECLEN (rtl, 0), i;
9649 
9650       for (i = 0; i < len; i++)
9651 	{
9652 	  rtx reg = XEXP (XVECEXP (rtl, 0, i), 0);
9653 	  if (!REG_P (reg) || !REG_ATTRS (reg))
9654 	    break;
9655 	  if (!decl)
9656 	    decl = REG_EXPR (reg);
9657 	  if (REG_EXPR (reg) != decl)
9658 	    break;
9659 	  HOST_WIDE_INT this_offset;
9660 	  if (!track_offset_p (REG_OFFSET (reg), &this_offset))
9661 	    break;
9662 	  offset = MIN (offset, this_offset);
9663 	}
9664 
9665       if (i == len)
9666 	{
9667 	  *declp = decl;
9668 	  *offsetp = offset;
9669 	  return true;
9670 	}
9671     }
9672   else if (MEM_P (rtl))
9673     {
9674       if (MEM_ATTRS (rtl))
9675 	{
9676 	  *declp = MEM_EXPR (rtl);
9677 	  *offsetp = int_mem_offset (rtl);
9678 	  return true;
9679 	}
9680     }
9681   return false;
9682 }
9683 
9684 /* Record the value for the ENTRY_VALUE of RTL as a global equivalence
9685    of VAL.  */
9686 
9687 static void
record_entry_value(cselib_val * val,rtx rtl)9688 record_entry_value (cselib_val *val, rtx rtl)
9689 {
9690   rtx ev = gen_rtx_ENTRY_VALUE (GET_MODE (rtl));
9691 
9692   ENTRY_VALUE_EXP (ev) = rtl;
9693 
9694   cselib_add_permanent_equiv (val, ev, get_insns ());
9695 }
9696 
9697 /* Insert function parameter PARM in IN and OUT sets of ENTRY_BLOCK.  */
9698 
9699 static void
vt_add_function_parameter(tree parm)9700 vt_add_function_parameter (tree parm)
9701 {
9702   rtx decl_rtl = DECL_RTL_IF_SET (parm);
9703   rtx incoming = DECL_INCOMING_RTL (parm);
9704   tree decl;
9705   machine_mode mode;
9706   poly_int64 offset;
9707   dataflow_set *out;
9708   decl_or_value dv;
9709   bool incoming_ok = true;
9710 
9711   if (TREE_CODE (parm) != PARM_DECL)
9712     return;
9713 
9714   if (!decl_rtl || !incoming)
9715     return;
9716 
9717   if (GET_MODE (decl_rtl) == BLKmode || GET_MODE (incoming) == BLKmode)
9718     return;
9719 
9720   /* If there is a DRAP register or a pseudo in internal_arg_pointer,
9721      rewrite the incoming location of parameters passed on the stack
9722      into MEMs based on the argument pointer, so that incoming doesn't
9723      depend on a pseudo.  */
9724   poly_int64 incoming_offset = 0;
9725   if (MEM_P (incoming)
9726       && (strip_offset (XEXP (incoming, 0), &incoming_offset)
9727 	  == crtl->args.internal_arg_pointer))
9728     {
9729       HOST_WIDE_INT off = -FIRST_PARM_OFFSET (current_function_decl);
9730       incoming
9731 	= replace_equiv_address_nv (incoming,
9732 				    plus_constant (Pmode,
9733 						   arg_pointer_rtx,
9734 						   off + incoming_offset));
9735     }
9736 
9737 #ifdef HAVE_window_save
9738   /* DECL_INCOMING_RTL uses the INCOMING_REGNO of parameter registers.
9739      If the target machine has an explicit window save instruction, the
9740      actual entry value is the corresponding OUTGOING_REGNO instead.  */
9741   if (HAVE_window_save && !crtl->uses_only_leaf_regs)
9742     {
9743       if (REG_P (incoming)
9744 	  && HARD_REGISTER_P (incoming)
9745 	  && OUTGOING_REGNO (REGNO (incoming)) != REGNO (incoming))
9746 	{
9747 	  parm_reg p;
9748 	  p.incoming = incoming;
9749 	  incoming
9750 	    = gen_rtx_REG_offset (incoming, GET_MODE (incoming),
9751 				  OUTGOING_REGNO (REGNO (incoming)), 0);
9752 	  p.outgoing = incoming;
9753 	  vec_safe_push (windowed_parm_regs, p);
9754 	}
9755       else if (GET_CODE (incoming) == PARALLEL)
9756 	{
9757 	  rtx outgoing
9758 	    = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (XVECLEN (incoming, 0)));
9759 	  int i;
9760 
9761 	  for (i = 0; i < XVECLEN (incoming, 0); i++)
9762 	    {
9763 	      rtx reg = XEXP (XVECEXP (incoming, 0, i), 0);
9764 	      parm_reg p;
9765 	      p.incoming = reg;
9766 	      reg = gen_rtx_REG_offset (reg, GET_MODE (reg),
9767 					OUTGOING_REGNO (REGNO (reg)), 0);
9768 	      p.outgoing = reg;
9769 	      XVECEXP (outgoing, 0, i)
9770 		= gen_rtx_EXPR_LIST (VOIDmode, reg,
9771 				     XEXP (XVECEXP (incoming, 0, i), 1));
9772 	      vec_safe_push (windowed_parm_regs, p);
9773 	    }
9774 
9775 	  incoming = outgoing;
9776 	}
9777       else if (MEM_P (incoming)
9778 	       && REG_P (XEXP (incoming, 0))
9779 	       && HARD_REGISTER_P (XEXP (incoming, 0)))
9780 	{
9781 	  rtx reg = XEXP (incoming, 0);
9782 	  if (OUTGOING_REGNO (REGNO (reg)) != REGNO (reg))
9783 	    {
9784 	      parm_reg p;
9785 	      p.incoming = reg;
9786 	      reg = gen_raw_REG (GET_MODE (reg), OUTGOING_REGNO (REGNO (reg)));
9787 	      p.outgoing = reg;
9788 	      vec_safe_push (windowed_parm_regs, p);
9789 	      incoming = replace_equiv_address_nv (incoming, reg);
9790 	    }
9791 	}
9792     }
9793 #endif
9794 
9795   if (!vt_get_decl_and_offset (incoming, &decl, &offset))
9796     {
9797       incoming_ok = false;
9798       if (MEM_P (incoming))
9799 	{
9800 	  /* This means argument is passed by invisible reference.  */
9801 	  offset = 0;
9802 	  decl = parm;
9803 	}
9804       else
9805 	{
9806 	  if (!vt_get_decl_and_offset (decl_rtl, &decl, &offset))
9807 	    return;
9808 	  offset += byte_lowpart_offset (GET_MODE (incoming),
9809 					 GET_MODE (decl_rtl));
9810 	}
9811     }
9812 
9813   if (!decl)
9814     return;
9815 
9816   if (parm != decl)
9817     {
9818       /* If that DECL_RTL wasn't a pseudo that got spilled to
9819 	 memory, bail out.  Otherwise, the spill slot sharing code
9820 	 will force the memory to reference spill_slot_decl (%sfp),
9821 	 so we don't match above.  That's ok, the pseudo must have
9822 	 referenced the entire parameter, so just reset OFFSET.  */
9823       if (decl != get_spill_slot_decl (false))
9824         return;
9825       offset = 0;
9826     }
9827 
9828   HOST_WIDE_INT const_offset;
9829   if (!track_loc_p (incoming, parm, offset, false, &mode, &const_offset))
9830     return;
9831 
9832   out = &VTI (ENTRY_BLOCK_PTR_FOR_FN (cfun))->out;
9833 
9834   dv = dv_from_decl (parm);
9835 
9836   if (target_for_debug_bind (parm)
9837       /* We can't deal with these right now, because this kind of
9838 	 variable is single-part.  ??? We could handle parallels
9839 	 that describe multiple locations for the same single
9840 	 value, but ATM we don't.  */
9841       && GET_CODE (incoming) != PARALLEL)
9842     {
9843       cselib_val *val;
9844       rtx lowpart;
9845 
9846       /* ??? We shouldn't ever hit this, but it may happen because
9847 	 arguments passed by invisible reference aren't dealt with
9848 	 above: incoming-rtl will have Pmode rather than the
9849 	 expected mode for the type.  */
9850       if (const_offset)
9851 	return;
9852 
9853       lowpart = var_lowpart (mode, incoming);
9854       if (!lowpart)
9855 	return;
9856 
9857       val = cselib_lookup_from_insn (lowpart, mode, true,
9858 				     VOIDmode, get_insns ());
9859 
9860       /* ??? Float-typed values in memory are not handled by
9861 	 cselib.  */
9862       if (val)
9863 	{
9864 	  preserve_value (val);
9865 	  set_variable_part (out, val->val_rtx, dv, const_offset,
9866 			     VAR_INIT_STATUS_INITIALIZED, NULL, INSERT);
9867 	  dv = dv_from_value (val->val_rtx);
9868 	}
9869 
9870       if (MEM_P (incoming))
9871 	{
9872 	  val = cselib_lookup_from_insn (XEXP (incoming, 0), mode, true,
9873 					 VOIDmode, get_insns ());
9874 	  if (val)
9875 	    {
9876 	      preserve_value (val);
9877 	      incoming = replace_equiv_address_nv (incoming, val->val_rtx);
9878 	    }
9879 	}
9880     }
9881 
9882   if (REG_P (incoming))
9883     {
9884       incoming = var_lowpart (mode, incoming);
9885       gcc_assert (REGNO (incoming) < FIRST_PSEUDO_REGISTER);
9886       attrs_list_insert (&out->regs[REGNO (incoming)], dv, const_offset,
9887 			 incoming);
9888       set_variable_part (out, incoming, dv, const_offset,
9889 			 VAR_INIT_STATUS_INITIALIZED, NULL, INSERT);
9890       if (dv_is_value_p (dv))
9891 	{
9892 	  record_entry_value (CSELIB_VAL_PTR (dv_as_value (dv)), incoming);
9893 	  if (TREE_CODE (TREE_TYPE (parm)) == REFERENCE_TYPE
9894 	      && INTEGRAL_TYPE_P (TREE_TYPE (TREE_TYPE (parm))))
9895 	    {
9896 	      machine_mode indmode
9897 		= TYPE_MODE (TREE_TYPE (TREE_TYPE (parm)));
9898 	      rtx mem = gen_rtx_MEM (indmode, incoming);
9899 	      cselib_val *val = cselib_lookup_from_insn (mem, indmode, true,
9900 							 VOIDmode,
9901 							 get_insns ());
9902 	      if (val)
9903 		{
9904 		  preserve_value (val);
9905 		  record_entry_value (val, mem);
9906 		  set_variable_part (out, mem, dv_from_value (val->val_rtx), 0,
9907 				     VAR_INIT_STATUS_INITIALIZED, NULL, INSERT);
9908 		}
9909 	    }
9910 	}
9911     }
9912   else if (GET_CODE (incoming) == PARALLEL && !dv_onepart_p (dv))
9913     {
9914       int i;
9915 
9916       /* The following code relies on vt_get_decl_and_offset returning true for
9917 	 incoming, which might not be always the case.  */
9918       if (!incoming_ok)
9919 	return;
9920       for (i = 0; i < XVECLEN (incoming, 0); i++)
9921 	{
9922 	  rtx reg = XEXP (XVECEXP (incoming, 0, i), 0);
9923 	  /* vt_get_decl_and_offset has already checked that the offset
9924 	     is a valid variable part.  */
9925 	  const_offset = get_tracked_reg_offset (reg);
9926 	  gcc_assert (REGNO (reg) < FIRST_PSEUDO_REGISTER);
9927 	  attrs_list_insert (&out->regs[REGNO (reg)], dv, const_offset, reg);
9928 	  set_variable_part (out, reg, dv, const_offset,
9929 			     VAR_INIT_STATUS_INITIALIZED, NULL, INSERT);
9930 	}
9931     }
9932   else if (MEM_P (incoming))
9933     {
9934       incoming = var_lowpart (mode, incoming);
9935       set_variable_part (out, incoming, dv, const_offset,
9936 			 VAR_INIT_STATUS_INITIALIZED, NULL, INSERT);
9937     }
9938 }
9939 
9940 /* Insert function parameters to IN and OUT sets of ENTRY_BLOCK.  */
9941 
9942 static void
vt_add_function_parameters(void)9943 vt_add_function_parameters (void)
9944 {
9945   tree parm;
9946 
9947   for (parm = DECL_ARGUMENTS (current_function_decl);
9948        parm; parm = DECL_CHAIN (parm))
9949     vt_add_function_parameter (parm);
9950 
9951   if (DECL_HAS_VALUE_EXPR_P (DECL_RESULT (current_function_decl)))
9952     {
9953       tree vexpr = DECL_VALUE_EXPR (DECL_RESULT (current_function_decl));
9954 
9955       if (TREE_CODE (vexpr) == INDIRECT_REF)
9956 	vexpr = TREE_OPERAND (vexpr, 0);
9957 
9958       if (TREE_CODE (vexpr) == PARM_DECL
9959 	  && DECL_ARTIFICIAL (vexpr)
9960 	  && !DECL_IGNORED_P (vexpr)
9961 	  && DECL_NAMELESS (vexpr))
9962 	vt_add_function_parameter (vexpr);
9963     }
9964 }
9965 
9966 /* Initialize cfa_base_rtx, create a preserved VALUE for it and
9967    ensure it isn't flushed during cselib_reset_table.
9968    Can be called only if frame_pointer_rtx resp. arg_pointer_rtx
9969    has been eliminated.  */
9970 
9971 static void
vt_init_cfa_base(void)9972 vt_init_cfa_base (void)
9973 {
9974   cselib_val *val;
9975 
9976 #ifdef FRAME_POINTER_CFA_OFFSET
9977   cfa_base_rtx = frame_pointer_rtx;
9978   cfa_base_offset = -FRAME_POINTER_CFA_OFFSET (current_function_decl);
9979 #else
9980   cfa_base_rtx = arg_pointer_rtx;
9981   cfa_base_offset = -ARG_POINTER_CFA_OFFSET (current_function_decl);
9982 #endif
9983   if (cfa_base_rtx == hard_frame_pointer_rtx
9984       || !fixed_regs[REGNO (cfa_base_rtx)])
9985     {
9986       cfa_base_rtx = NULL_RTX;
9987       return;
9988     }
9989   if (!MAY_HAVE_DEBUG_BIND_INSNS)
9990     return;
9991 
9992   /* Tell alias analysis that cfa_base_rtx should share
9993      find_base_term value with stack pointer or hard frame pointer.  */
9994   if (!frame_pointer_needed)
9995     vt_equate_reg_base_value (cfa_base_rtx, stack_pointer_rtx);
9996   else if (!crtl->stack_realign_tried)
9997     vt_equate_reg_base_value (cfa_base_rtx, hard_frame_pointer_rtx);
9998 
9999   val = cselib_lookup_from_insn (cfa_base_rtx, GET_MODE (cfa_base_rtx), 1,
10000 				 VOIDmode, get_insns ());
10001   preserve_value (val);
10002   cselib_preserve_cfa_base_value (val, REGNO (cfa_base_rtx));
10003 }
10004 
10005 /* Reemit INSN, a MARKER_DEBUG_INSN, as a note.  */
10006 
10007 static rtx_insn *
reemit_marker_as_note(rtx_insn * insn)10008 reemit_marker_as_note (rtx_insn *insn)
10009 {
10010   gcc_checking_assert (DEBUG_MARKER_INSN_P (insn));
10011 
10012   enum insn_note kind = INSN_DEBUG_MARKER_KIND (insn);
10013 
10014   switch (kind)
10015     {
10016     case NOTE_INSN_BEGIN_STMT:
10017     case NOTE_INSN_INLINE_ENTRY:
10018       {
10019 	rtx_insn *note = NULL;
10020 	if (cfun->debug_nonbind_markers)
10021 	  {
10022 	    note = emit_note_before (kind, insn);
10023 	    NOTE_MARKER_LOCATION (note) = INSN_LOCATION (insn);
10024 	  }
10025 	delete_insn (insn);
10026 	return note;
10027       }
10028 
10029     default:
10030       gcc_unreachable ();
10031     }
10032 }
10033 
10034 /* Allocate and initialize the data structures for variable tracking
10035    and parse the RTL to get the micro operations.  */
10036 
10037 static bool
vt_initialize(void)10038 vt_initialize (void)
10039 {
10040   basic_block bb;
10041   poly_int64 fp_cfa_offset = -1;
10042 
10043   alloc_aux_for_blocks (sizeof (variable_tracking_info));
10044 
10045   empty_shared_hash = shared_hash_pool.allocate ();
10046   empty_shared_hash->refcount = 1;
10047   empty_shared_hash->htab = new variable_table_type (1);
10048   changed_variables = new variable_table_type (10);
10049 
10050   /* Init the IN and OUT sets.  */
10051   FOR_ALL_BB_FN (bb, cfun)
10052     {
10053       VTI (bb)->visited = false;
10054       VTI (bb)->flooded = false;
10055       dataflow_set_init (&VTI (bb)->in);
10056       dataflow_set_init (&VTI (bb)->out);
10057       VTI (bb)->permp = NULL;
10058     }
10059 
10060   if (MAY_HAVE_DEBUG_BIND_INSNS)
10061     {
10062       cselib_init (CSELIB_RECORD_MEMORY | CSELIB_PRESERVE_CONSTANTS);
10063       scratch_regs = BITMAP_ALLOC (NULL);
10064       preserved_values.create (256);
10065       global_get_addr_cache = new hash_map<rtx, rtx>;
10066     }
10067   else
10068     {
10069       scratch_regs = NULL;
10070       global_get_addr_cache = NULL;
10071     }
10072 
10073   if (MAY_HAVE_DEBUG_BIND_INSNS)
10074     {
10075       rtx reg, expr;
10076       int ofst;
10077       cselib_val *val;
10078 
10079 #ifdef FRAME_POINTER_CFA_OFFSET
10080       reg = frame_pointer_rtx;
10081       ofst = FRAME_POINTER_CFA_OFFSET (current_function_decl);
10082 #else
10083       reg = arg_pointer_rtx;
10084       ofst = ARG_POINTER_CFA_OFFSET (current_function_decl);
10085 #endif
10086 
10087       ofst -= INCOMING_FRAME_SP_OFFSET;
10088 
10089       val = cselib_lookup_from_insn (reg, GET_MODE (reg), 1,
10090 				     VOIDmode, get_insns ());
10091       preserve_value (val);
10092       if (reg != hard_frame_pointer_rtx && fixed_regs[REGNO (reg)])
10093 	cselib_preserve_cfa_base_value (val, REGNO (reg));
10094       if (ofst)
10095 	{
10096 	  cselib_val *valsp
10097 	    = cselib_lookup_from_insn (stack_pointer_rtx,
10098 				       GET_MODE (stack_pointer_rtx), 1,
10099 				       VOIDmode, get_insns ());
10100 	  preserve_value (valsp);
10101 	  expr = plus_constant (GET_MODE (reg), reg, ofst);
10102 	  /* This cselib_add_permanent_equiv call needs to be done before
10103 	     the other cselib_add_permanent_equiv a few lines later,
10104 	     because after that one is done, cselib_lookup on this expr
10105 	     will due to the cselib SP_DERIVED_VALUE_P optimizations
10106 	     return valsp and so no permanent equivalency will be added.  */
10107 	  cselib_add_permanent_equiv (valsp, expr, get_insns ());
10108 	}
10109 
10110       expr = plus_constant (GET_MODE (stack_pointer_rtx),
10111 			    stack_pointer_rtx, -ofst);
10112       cselib_add_permanent_equiv (val, expr, get_insns ());
10113     }
10114 
10115   /* In order to factor out the adjustments made to the stack pointer or to
10116      the hard frame pointer and thus be able to use DW_OP_fbreg operations
10117      instead of individual location lists, we're going to rewrite MEMs based
10118      on them into MEMs based on the CFA by de-eliminating stack_pointer_rtx
10119      or hard_frame_pointer_rtx to the virtual CFA pointer frame_pointer_rtx
10120      resp. arg_pointer_rtx.  We can do this either when there is no frame
10121      pointer in the function and stack adjustments are consistent for all
10122      basic blocks or when there is a frame pointer and no stack realignment.
10123      But we first have to check that frame_pointer_rtx resp. arg_pointer_rtx
10124      has been eliminated.  */
10125   if (!frame_pointer_needed)
10126     {
10127       rtx reg, elim;
10128 
10129       if (!vt_stack_adjustments ())
10130 	return false;
10131 
10132 #ifdef FRAME_POINTER_CFA_OFFSET
10133       reg = frame_pointer_rtx;
10134 #else
10135       reg = arg_pointer_rtx;
10136 #endif
10137       elim = eliminate_regs (reg, VOIDmode, NULL_RTX);
10138       if (elim != reg)
10139 	{
10140 	  if (GET_CODE (elim) == PLUS)
10141 	    elim = XEXP (elim, 0);
10142 	  if (elim == stack_pointer_rtx)
10143 	    vt_init_cfa_base ();
10144 	}
10145     }
10146   else if (!crtl->stack_realign_tried)
10147     {
10148       rtx reg, elim;
10149 
10150 #ifdef FRAME_POINTER_CFA_OFFSET
10151       reg = frame_pointer_rtx;
10152       fp_cfa_offset = FRAME_POINTER_CFA_OFFSET (current_function_decl);
10153 #else
10154       reg = arg_pointer_rtx;
10155       fp_cfa_offset = ARG_POINTER_CFA_OFFSET (current_function_decl);
10156 #endif
10157       elim = eliminate_regs (reg, VOIDmode, NULL_RTX);
10158       if (elim != reg)
10159 	{
10160 	  if (GET_CODE (elim) == PLUS)
10161 	    {
10162 	      fp_cfa_offset -= rtx_to_poly_int64 (XEXP (elim, 1));
10163 	      elim = XEXP (elim, 0);
10164 	    }
10165 	  if (elim != hard_frame_pointer_rtx)
10166 	    fp_cfa_offset = -1;
10167 	}
10168       else
10169 	fp_cfa_offset = -1;
10170     }
10171 
10172   /* If the stack is realigned and a DRAP register is used, we're going to
10173      rewrite MEMs based on it representing incoming locations of parameters
10174      passed on the stack into MEMs based on the argument pointer.  Although
10175      we aren't going to rewrite other MEMs, we still need to initialize the
10176      virtual CFA pointer in order to ensure that the argument pointer will
10177      be seen as a constant throughout the function.
10178 
10179      ??? This doesn't work if FRAME_POINTER_CFA_OFFSET is defined.  */
10180   else if (stack_realign_drap)
10181     {
10182       rtx reg, elim;
10183 
10184 #ifdef FRAME_POINTER_CFA_OFFSET
10185       reg = frame_pointer_rtx;
10186 #else
10187       reg = arg_pointer_rtx;
10188 #endif
10189       elim = eliminate_regs (reg, VOIDmode, NULL_RTX);
10190       if (elim != reg)
10191 	{
10192 	  if (GET_CODE (elim) == PLUS)
10193 	    elim = XEXP (elim, 0);
10194 	  if (elim == hard_frame_pointer_rtx)
10195 	    vt_init_cfa_base ();
10196 	}
10197     }
10198 
10199   hard_frame_pointer_adjustment = -1;
10200 
10201   vt_add_function_parameters ();
10202 
10203   bool record_sp_value = false;
10204   FOR_EACH_BB_FN (bb, cfun)
10205     {
10206       rtx_insn *insn;
10207       basic_block first_bb, last_bb;
10208 
10209       if (MAY_HAVE_DEBUG_BIND_INSNS)
10210 	{
10211 	  cselib_record_sets_hook = add_with_sets;
10212 	  if (dump_file && (dump_flags & TDF_DETAILS))
10213 	    fprintf (dump_file, "first value: %i\n",
10214 		     cselib_get_next_uid ());
10215 	}
10216 
10217       if (MAY_HAVE_DEBUG_BIND_INSNS
10218 	  && cfa_base_rtx
10219 	  && !frame_pointer_needed
10220 	  && record_sp_value)
10221 	cselib_record_sp_cfa_base_equiv (-cfa_base_offset
10222 					 - VTI (bb)->in.stack_adjust,
10223 					 BB_HEAD (bb));
10224       record_sp_value = true;
10225 
10226       first_bb = bb;
10227       for (;;)
10228 	{
10229 	  edge e;
10230 	  if (bb->next_bb == EXIT_BLOCK_PTR_FOR_FN (cfun)
10231 	      || ! single_pred_p (bb->next_bb))
10232 	    break;
10233 	  e = find_edge (bb, bb->next_bb);
10234 	  if (! e || (e->flags & EDGE_FALLTHRU) == 0)
10235 	    break;
10236 	  bb = bb->next_bb;
10237 	}
10238       last_bb = bb;
10239 
10240       /* Add the micro-operations to the vector.  */
10241       FOR_BB_BETWEEN (bb, first_bb, last_bb->next_bb, next_bb)
10242 	{
10243 	  HOST_WIDE_INT offset = VTI (bb)->out.stack_adjust;
10244 	  VTI (bb)->out.stack_adjust = VTI (bb)->in.stack_adjust;
10245 
10246 	  rtx_insn *next;
10247 	  FOR_BB_INSNS_SAFE (bb, insn, next)
10248 	    {
10249 	      if (INSN_P (insn))
10250 		{
10251 		  HOST_WIDE_INT pre = 0, post = 0;
10252 
10253 		  if (!frame_pointer_needed)
10254 		    {
10255 		      insn_stack_adjust_offset_pre_post (insn, &pre, &post);
10256 		      if (pre)
10257 			{
10258 			  micro_operation mo;
10259 			  mo.type = MO_ADJUST;
10260 			  mo.u.adjust = pre;
10261 			  mo.insn = insn;
10262 			  if (dump_file && (dump_flags & TDF_DETAILS))
10263 			    log_op_type (PATTERN (insn), bb, insn,
10264 					 MO_ADJUST, dump_file);
10265 			  VTI (bb)->mos.safe_push (mo);
10266 			}
10267 		    }
10268 
10269 		  cselib_hook_called = false;
10270 		  adjust_insn (bb, insn);
10271 
10272 		  if (pre)
10273 		    VTI (bb)->out.stack_adjust += pre;
10274 
10275 		  if (DEBUG_MARKER_INSN_P (insn))
10276 		    {
10277 		      reemit_marker_as_note (insn);
10278 		      continue;
10279 		    }
10280 
10281 		  if (MAY_HAVE_DEBUG_BIND_INSNS)
10282 		    {
10283 		      if (CALL_P (insn))
10284 			prepare_call_arguments (bb, insn);
10285 		      cselib_process_insn (insn);
10286 		      if (dump_file && (dump_flags & TDF_DETAILS))
10287 			{
10288 			  if (dump_flags & TDF_SLIM)
10289 			    dump_insn_slim (dump_file, insn);
10290 			  else
10291 			    print_rtl_single (dump_file, insn);
10292 			  dump_cselib_table (dump_file);
10293 			}
10294 		    }
10295 		  if (!cselib_hook_called)
10296 		    add_with_sets (insn, 0, 0);
10297 		  cancel_changes (0);
10298 
10299 		  if (post)
10300 		    {
10301 		      micro_operation mo;
10302 		      mo.type = MO_ADJUST;
10303 		      mo.u.adjust = post;
10304 		      mo.insn = insn;
10305 		      if (dump_file && (dump_flags & TDF_DETAILS))
10306 			log_op_type (PATTERN (insn), bb, insn,
10307 				     MO_ADJUST, dump_file);
10308 		      VTI (bb)->mos.safe_push (mo);
10309 		      VTI (bb)->out.stack_adjust += post;
10310 		    }
10311 
10312 		  if (maybe_ne (fp_cfa_offset, -1)
10313 		      && known_eq (hard_frame_pointer_adjustment, -1)
10314 		      && fp_setter_insn (insn))
10315 		    {
10316 		      vt_init_cfa_base ();
10317 		      hard_frame_pointer_adjustment = fp_cfa_offset;
10318 		      /* Disassociate sp from fp now.  */
10319 		      if (MAY_HAVE_DEBUG_BIND_INSNS)
10320 			{
10321 			  cselib_val *v;
10322 			  cselib_invalidate_rtx (stack_pointer_rtx);
10323 			  v = cselib_lookup (stack_pointer_rtx, Pmode, 1,
10324 					     VOIDmode);
10325 			  if (v && !cselib_preserved_value_p (v))
10326 			    {
10327 			      cselib_set_value_sp_based (v);
10328 			      preserve_value (v);
10329 			    }
10330 			}
10331 		    }
10332 		}
10333 	    }
10334 	  gcc_assert (offset == VTI (bb)->out.stack_adjust);
10335 	}
10336 
10337       bb = last_bb;
10338 
10339       if (MAY_HAVE_DEBUG_BIND_INSNS)
10340 	{
10341 	  cselib_preserve_only_values ();
10342 	  cselib_reset_table (cselib_get_next_uid ());
10343 	  cselib_record_sets_hook = NULL;
10344 	}
10345     }
10346 
10347   hard_frame_pointer_adjustment = -1;
10348   VTI (ENTRY_BLOCK_PTR_FOR_FN (cfun))->flooded = true;
10349   cfa_base_rtx = NULL_RTX;
10350   return true;
10351 }
10352 
10353 /* This is *not* reset after each function.  It gives each
10354    NOTE_INSN_DELETED_DEBUG_LABEL in the entire compilation
10355    a unique label number.  */
10356 
10357 static int debug_label_num = 1;
10358 
10359 /* Remove from the insn stream a single debug insn used for
10360    variable tracking at assignments.  */
10361 
10362 static inline void
delete_vta_debug_insn(rtx_insn * insn)10363 delete_vta_debug_insn (rtx_insn *insn)
10364 {
10365   if (DEBUG_MARKER_INSN_P (insn))
10366     {
10367       reemit_marker_as_note (insn);
10368       return;
10369     }
10370 
10371   tree decl = INSN_VAR_LOCATION_DECL (insn);
10372   if (TREE_CODE (decl) == LABEL_DECL
10373       && DECL_NAME (decl)
10374       && !DECL_RTL_SET_P (decl))
10375     {
10376       PUT_CODE (insn, NOTE);
10377       NOTE_KIND (insn) = NOTE_INSN_DELETED_DEBUG_LABEL;
10378       NOTE_DELETED_LABEL_NAME (insn)
10379 	= IDENTIFIER_POINTER (DECL_NAME (decl));
10380       SET_DECL_RTL (decl, insn);
10381       CODE_LABEL_NUMBER (insn) = debug_label_num++;
10382     }
10383   else
10384     delete_insn (insn);
10385 }
10386 
10387 /* Remove from the insn stream all debug insns used for variable
10388    tracking at assignments.  USE_CFG should be false if the cfg is no
10389    longer usable.  */
10390 
10391 void
delete_vta_debug_insns(bool use_cfg)10392 delete_vta_debug_insns (bool use_cfg)
10393 {
10394   basic_block bb;
10395   rtx_insn *insn, *next;
10396 
10397   if (!MAY_HAVE_DEBUG_INSNS)
10398     return;
10399 
10400   if (use_cfg)
10401     FOR_EACH_BB_FN (bb, cfun)
10402       {
10403 	FOR_BB_INSNS_SAFE (bb, insn, next)
10404 	  if (DEBUG_INSN_P (insn))
10405 	    delete_vta_debug_insn (insn);
10406       }
10407   else
10408     for (insn = get_insns (); insn; insn = next)
10409       {
10410 	next = NEXT_INSN (insn);
10411 	if (DEBUG_INSN_P (insn))
10412 	  delete_vta_debug_insn (insn);
10413       }
10414 }
10415 
10416 /* Run a fast, BB-local only version of var tracking, to take care of
10417    information that we don't do global analysis on, such that not all
10418    information is lost.  If SKIPPED holds, we're skipping the global
10419    pass entirely, so we should try to use information it would have
10420    handled as well..  */
10421 
10422 static void
vt_debug_insns_local(bool skipped ATTRIBUTE_UNUSED)10423 vt_debug_insns_local (bool skipped ATTRIBUTE_UNUSED)
10424 {
10425   /* ??? Just skip it all for now.  */
10426   delete_vta_debug_insns (true);
10427 }
10428 
10429 /* Free the data structures needed for variable tracking.  */
10430 
10431 static void
vt_finalize(void)10432 vt_finalize (void)
10433 {
10434   basic_block bb;
10435 
10436   FOR_EACH_BB_FN (bb, cfun)
10437     {
10438       VTI (bb)->mos.release ();
10439     }
10440 
10441   FOR_ALL_BB_FN (bb, cfun)
10442     {
10443       dataflow_set_destroy (&VTI (bb)->in);
10444       dataflow_set_destroy (&VTI (bb)->out);
10445       if (VTI (bb)->permp)
10446 	{
10447 	  dataflow_set_destroy (VTI (bb)->permp);
10448 	  XDELETE (VTI (bb)->permp);
10449 	}
10450     }
10451   free_aux_for_blocks ();
10452   delete empty_shared_hash->htab;
10453   empty_shared_hash->htab = NULL;
10454   delete changed_variables;
10455   changed_variables = NULL;
10456   attrs_pool.release ();
10457   var_pool.release ();
10458   location_chain_pool.release ();
10459   shared_hash_pool.release ();
10460 
10461   if (MAY_HAVE_DEBUG_BIND_INSNS)
10462     {
10463       if (global_get_addr_cache)
10464 	delete global_get_addr_cache;
10465       global_get_addr_cache = NULL;
10466       loc_exp_dep_pool.release ();
10467       valvar_pool.release ();
10468       preserved_values.release ();
10469       cselib_finish ();
10470       BITMAP_FREE (scratch_regs);
10471       scratch_regs = NULL;
10472     }
10473 
10474 #ifdef HAVE_window_save
10475   vec_free (windowed_parm_regs);
10476 #endif
10477 
10478   if (vui_vec)
10479     XDELETEVEC (vui_vec);
10480   vui_vec = NULL;
10481   vui_allocated = 0;
10482 }
10483 
10484 /* The entry point to variable tracking pass.  */
10485 
10486 static inline unsigned int
variable_tracking_main_1(void)10487 variable_tracking_main_1 (void)
10488 {
10489   bool success;
10490 
10491   /* We won't be called as a separate pass if flag_var_tracking is not
10492      set, but final may call us to turn debug markers into notes.  */
10493   if ((!flag_var_tracking && MAY_HAVE_DEBUG_INSNS)
10494       || flag_var_tracking_assignments < 0
10495       /* Var-tracking right now assumes the IR doesn't contain
10496 	 any pseudos at this point.  */
10497       || targetm.no_register_allocation)
10498     {
10499       delete_vta_debug_insns (true);
10500       return 0;
10501     }
10502 
10503   if (!flag_var_tracking)
10504     return 0;
10505 
10506   if (n_basic_blocks_for_fn (cfun) > 500
10507       && n_edges_for_fn (cfun) / n_basic_blocks_for_fn (cfun) >= 20)
10508     {
10509       vt_debug_insns_local (true);
10510       return 0;
10511     }
10512 
10513   if (!vt_initialize ())
10514     {
10515       vt_finalize ();
10516       vt_debug_insns_local (true);
10517       return 0;
10518     }
10519 
10520   success = vt_find_locations ();
10521 
10522   if (!success && flag_var_tracking_assignments > 0)
10523     {
10524       vt_finalize ();
10525 
10526       delete_vta_debug_insns (true);
10527 
10528       /* This is later restored by our caller.  */
10529       flag_var_tracking_assignments = 0;
10530 
10531       success = vt_initialize ();
10532       gcc_assert (success);
10533 
10534       success = vt_find_locations ();
10535     }
10536 
10537   if (!success)
10538     {
10539       vt_finalize ();
10540       vt_debug_insns_local (false);
10541       return 0;
10542     }
10543 
10544   if (dump_file && (dump_flags & TDF_DETAILS))
10545     {
10546       dump_dataflow_sets ();
10547       dump_reg_info (dump_file);
10548       dump_flow_info (dump_file, dump_flags);
10549     }
10550 
10551   timevar_push (TV_VAR_TRACKING_EMIT);
10552   vt_emit_notes ();
10553   timevar_pop (TV_VAR_TRACKING_EMIT);
10554 
10555   vt_finalize ();
10556   vt_debug_insns_local (false);
10557   return 0;
10558 }
10559 
10560 unsigned int
variable_tracking_main(void)10561 variable_tracking_main (void)
10562 {
10563   unsigned int ret;
10564   int save = flag_var_tracking_assignments;
10565 
10566   ret = variable_tracking_main_1 ();
10567 
10568   flag_var_tracking_assignments = save;
10569 
10570   return ret;
10571 }
10572 
10573 namespace {
10574 
10575 const pass_data pass_data_variable_tracking =
10576 {
10577   RTL_PASS, /* type */
10578   "vartrack", /* name */
10579   OPTGROUP_NONE, /* optinfo_flags */
10580   TV_VAR_TRACKING, /* tv_id */
10581   0, /* properties_required */
10582   0, /* properties_provided */
10583   0, /* properties_destroyed */
10584   0, /* todo_flags_start */
10585   0, /* todo_flags_finish */
10586 };
10587 
10588 class pass_variable_tracking : public rtl_opt_pass
10589 {
10590 public:
pass_variable_tracking(gcc::context * ctxt)10591   pass_variable_tracking (gcc::context *ctxt)
10592     : rtl_opt_pass (pass_data_variable_tracking, ctxt)
10593   {}
10594 
10595   /* opt_pass methods: */
gate(function *)10596   virtual bool gate (function *)
10597     {
10598       return (flag_var_tracking && !targetm.delay_vartrack);
10599     }
10600 
execute(function *)10601   virtual unsigned int execute (function *)
10602     {
10603       return variable_tracking_main ();
10604     }
10605 
10606 }; // class pass_variable_tracking
10607 
10608 } // anon namespace
10609 
10610 rtl_opt_pass *
make_pass_variable_tracking(gcc::context * ctxt)10611 make_pass_variable_tracking (gcc::context *ctxt)
10612 {
10613   return new pass_variable_tracking (ctxt);
10614 }
10615