1 /* Variable tracking routines for the GNU compiler.
2 Copyright (C) 2002-2018 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it
7 under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
9 any later version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT
12 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
13 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
14 License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 /* This file contains the variable tracking pass. It computes where
21 variables are located (which registers or where in memory) at each position
22 in instruction stream and emits notes describing the locations.
23 Debug information (DWARF2 location lists) is finally generated from
24 these notes.
25 With this debug information, it is possible to show variables
26 even when debugging optimized code.
27
28 How does the variable tracking pass work?
29
30 First, it scans RTL code for uses, stores and clobbers (register/memory
31 references in instructions), for call insns and for stack adjustments
32 separately for each basic block and saves them to an array of micro
33 operations.
34 The micro operations of one instruction are ordered so that
35 pre-modifying stack adjustment < use < use with no var < call insn <
36 < clobber < set < post-modifying stack adjustment
37
38 Then, a forward dataflow analysis is performed to find out how locations
39 of variables change through code and to propagate the variable locations
40 along control flow graph.
41 The IN set for basic block BB is computed as a union of OUT sets of BB's
42 predecessors, the OUT set for BB is copied from the IN set for BB and
43 is changed according to micro operations in BB.
44
45 The IN and OUT sets for basic blocks consist of a current stack adjustment
46 (used for adjusting offset of variables addressed using stack pointer),
47 the table of structures describing the locations of parts of a variable
48 and for each physical register a linked list for each physical register.
49 The linked list is a list of variable parts stored in the register,
50 i.e. it is a list of triplets (reg, decl, offset) where decl is
51 REG_EXPR (reg) and offset is REG_OFFSET (reg). The linked list is used for
52 effective deleting appropriate variable parts when we set or clobber the
53 register.
54
55 There may be more than one variable part in a register. The linked lists
56 should be pretty short so it is a good data structure here.
57 For example in the following code, register allocator may assign same
58 register to variables A and B, and both of them are stored in the same
59 register in CODE:
60
61 if (cond)
62 set A;
63 else
64 set B;
65 CODE;
66 if (cond)
67 use A;
68 else
69 use B;
70
71 Finally, the NOTE_INSN_VAR_LOCATION notes describing the variable locations
72 are emitted to appropriate positions in RTL code. Each such a note describes
73 the location of one variable at the point in instruction stream where the
74 note is. There is no need to emit a note for each variable before each
75 instruction, we only emit these notes where the location of variable changes
76 (this means that we also emit notes for changes between the OUT set of the
77 previous block and the IN set of the current block).
78
79 The notes consist of two parts:
80 1. the declaration (from REG_EXPR or MEM_EXPR)
81 2. the location of a variable - it is either a simple register/memory
82 reference (for simple variables, for example int),
83 or a parallel of register/memory references (for a large variables
84 which consist of several parts, for example long long).
85
86 */
87
88 #include "config.h"
89 #include "system.h"
90 #include "coretypes.h"
91 #include "backend.h"
92 #include "target.h"
93 #include "rtl.h"
94 #include "tree.h"
95 #include "cfghooks.h"
96 #include "alloc-pool.h"
97 #include "tree-pass.h"
98 #include "memmodel.h"
99 #include "tm_p.h"
100 #include "insn-config.h"
101 #include "regs.h"
102 #include "emit-rtl.h"
103 #include "recog.h"
104 #include "diagnostic.h"
105 #include "varasm.h"
106 #include "stor-layout.h"
107 #include "cfgrtl.h"
108 #include "cfganal.h"
109 #include "reload.h"
110 #include "calls.h"
111 #include "tree-dfa.h"
112 #include "tree-ssa.h"
113 #include "cselib.h"
114 #include "params.h"
115 #include "tree-pretty-print.h"
116 #include "rtl-iter.h"
117 #include "fibonacci_heap.h"
118
119 typedef fibonacci_heap <long, basic_block_def> bb_heap_t;
120 typedef fibonacci_node <long, basic_block_def> bb_heap_node_t;
121
122 /* var-tracking.c assumes that tree code with the same value as VALUE rtx code
123 has no chance to appear in REG_EXPR/MEM_EXPRs and isn't a decl.
124 Currently the value is the same as IDENTIFIER_NODE, which has such
125 a property. If this compile time assertion ever fails, make sure that
126 the new tree code that equals (int) VALUE has the same property. */
127 extern char check_value_val[(int) VALUE == (int) IDENTIFIER_NODE ? 1 : -1];
128
129 /* Type of micro operation. */
130 enum micro_operation_type
131 {
132 MO_USE, /* Use location (REG or MEM). */
133 MO_USE_NO_VAR,/* Use location which is not associated with a variable
134 or the variable is not trackable. */
135 MO_VAL_USE, /* Use location which is associated with a value. */
136 MO_VAL_LOC, /* Use location which appears in a debug insn. */
137 MO_VAL_SET, /* Set location associated with a value. */
138 MO_SET, /* Set location. */
139 MO_COPY, /* Copy the same portion of a variable from one
140 location to another. */
141 MO_CLOBBER, /* Clobber location. */
142 MO_CALL, /* Call insn. */
143 MO_ADJUST /* Adjust stack pointer. */
144
145 };
146
147 static const char * const ATTRIBUTE_UNUSED
148 micro_operation_type_name[] = {
149 "MO_USE",
150 "MO_USE_NO_VAR",
151 "MO_VAL_USE",
152 "MO_VAL_LOC",
153 "MO_VAL_SET",
154 "MO_SET",
155 "MO_COPY",
156 "MO_CLOBBER",
157 "MO_CALL",
158 "MO_ADJUST"
159 };
160
161 /* Where shall the note be emitted? BEFORE or AFTER the instruction.
162 Notes emitted as AFTER_CALL are to take effect during the call,
163 rather than after the call. */
164 enum emit_note_where
165 {
166 EMIT_NOTE_BEFORE_INSN,
167 EMIT_NOTE_AFTER_INSN,
168 EMIT_NOTE_AFTER_CALL_INSN
169 };
170
171 /* Structure holding information about micro operation. */
172 struct micro_operation
173 {
174 /* Type of micro operation. */
175 enum micro_operation_type type;
176
177 /* The instruction which the micro operation is in, for MO_USE,
178 MO_USE_NO_VAR, MO_CALL and MO_ADJUST, or the subsequent
179 instruction or note in the original flow (before any var-tracking
180 notes are inserted, to simplify emission of notes), for MO_SET
181 and MO_CLOBBER. */
182 rtx_insn *insn;
183
184 union {
185 /* Location. For MO_SET and MO_COPY, this is the SET that
186 performs the assignment, if known, otherwise it is the target
187 of the assignment. For MO_VAL_USE and MO_VAL_SET, it is a
188 CONCAT of the VALUE and the LOC associated with it. For
189 MO_VAL_LOC, it is a CONCAT of the VALUE and the VAR_LOCATION
190 associated with it. */
191 rtx loc;
192
193 /* Stack adjustment. */
194 HOST_WIDE_INT adjust;
195 } u;
196 };
197
198
199 /* A declaration of a variable, or an RTL value being handled like a
200 declaration. */
201 typedef void *decl_or_value;
202
203 /* Return true if a decl_or_value DV is a DECL or NULL. */
204 static inline bool
dv_is_decl_p(decl_or_value dv)205 dv_is_decl_p (decl_or_value dv)
206 {
207 return !dv || (int) TREE_CODE ((tree) dv) != (int) VALUE;
208 }
209
210 /* Return true if a decl_or_value is a VALUE rtl. */
211 static inline bool
dv_is_value_p(decl_or_value dv)212 dv_is_value_p (decl_or_value dv)
213 {
214 return dv && !dv_is_decl_p (dv);
215 }
216
217 /* Return the decl in the decl_or_value. */
218 static inline tree
dv_as_decl(decl_or_value dv)219 dv_as_decl (decl_or_value dv)
220 {
221 gcc_checking_assert (dv_is_decl_p (dv));
222 return (tree) dv;
223 }
224
225 /* Return the value in the decl_or_value. */
226 static inline rtx
dv_as_value(decl_or_value dv)227 dv_as_value (decl_or_value dv)
228 {
229 gcc_checking_assert (dv_is_value_p (dv));
230 return (rtx)dv;
231 }
232
233 /* Return the opaque pointer in the decl_or_value. */
234 static inline void *
dv_as_opaque(decl_or_value dv)235 dv_as_opaque (decl_or_value dv)
236 {
237 return dv;
238 }
239
240
241 /* Description of location of a part of a variable. The content of a physical
242 register is described by a chain of these structures.
243 The chains are pretty short (usually 1 or 2 elements) and thus
244 chain is the best data structure. */
245 struct attrs
246 {
247 /* Pointer to next member of the list. */
248 attrs *next;
249
250 /* The rtx of register. */
251 rtx loc;
252
253 /* The declaration corresponding to LOC. */
254 decl_or_value dv;
255
256 /* Offset from start of DECL. */
257 HOST_WIDE_INT offset;
258 };
259
260 /* Structure for chaining the locations. */
261 struct location_chain
262 {
263 /* Next element in the chain. */
264 location_chain *next;
265
266 /* The location (REG, MEM or VALUE). */
267 rtx loc;
268
269 /* The "value" stored in this location. */
270 rtx set_src;
271
272 /* Initialized? */
273 enum var_init_status init;
274 };
275
276 /* A vector of loc_exp_dep holds the active dependencies of a one-part
277 DV on VALUEs, i.e., the VALUEs expanded so as to form the current
278 location of DV. Each entry is also part of VALUE' s linked-list of
279 backlinks back to DV. */
280 struct loc_exp_dep
281 {
282 /* The dependent DV. */
283 decl_or_value dv;
284 /* The dependency VALUE or DECL_DEBUG. */
285 rtx value;
286 /* The next entry in VALUE's backlinks list. */
287 struct loc_exp_dep *next;
288 /* A pointer to the pointer to this entry (head or prev's next) in
289 the doubly-linked list. */
290 struct loc_exp_dep **pprev;
291 };
292
293
294 /* This data structure holds information about the depth of a variable
295 expansion. */
296 struct expand_depth
297 {
298 /* This measures the complexity of the expanded expression. It
299 grows by one for each level of expansion that adds more than one
300 operand. */
301 int complexity;
302 /* This counts the number of ENTRY_VALUE expressions in an
303 expansion. We want to minimize their use. */
304 int entryvals;
305 };
306
307 /* This data structure is allocated for one-part variables at the time
308 of emitting notes. */
309 struct onepart_aux
310 {
311 /* Doubly-linked list of dependent DVs. These are DVs whose cur_loc
312 computation used the expansion of this variable, and that ought
313 to be notified should this variable change. If the DV's cur_loc
314 expanded to NULL, all components of the loc list are regarded as
315 active, so that any changes in them give us a chance to get a
316 location. Otherwise, only components of the loc that expanded to
317 non-NULL are regarded as active dependencies. */
318 loc_exp_dep *backlinks;
319 /* This holds the LOC that was expanded into cur_loc. We need only
320 mark a one-part variable as changed if the FROM loc is removed,
321 or if it has no known location and a loc is added, or if it gets
322 a change notification from any of its active dependencies. */
323 rtx from;
324 /* The depth of the cur_loc expression. */
325 expand_depth depth;
326 /* Dependencies actively used when expand FROM into cur_loc. */
327 vec<loc_exp_dep, va_heap, vl_embed> deps;
328 };
329
330 /* Structure describing one part of variable. */
331 struct variable_part
332 {
333 /* Chain of locations of the part. */
334 location_chain *loc_chain;
335
336 /* Location which was last emitted to location list. */
337 rtx cur_loc;
338
339 union variable_aux
340 {
341 /* The offset in the variable, if !var->onepart. */
342 HOST_WIDE_INT offset;
343
344 /* Pointer to auxiliary data, if var->onepart and emit_notes. */
345 struct onepart_aux *onepaux;
346 } aux;
347 };
348
349 /* Maximum number of location parts. */
350 #define MAX_VAR_PARTS 16
351
352 /* Enumeration type used to discriminate various types of one-part
353 variables. */
354 enum onepart_enum
355 {
356 /* Not a one-part variable. */
357 NOT_ONEPART = 0,
358 /* A one-part DECL that is not a DEBUG_EXPR_DECL. */
359 ONEPART_VDECL = 1,
360 /* A DEBUG_EXPR_DECL. */
361 ONEPART_DEXPR = 2,
362 /* A VALUE. */
363 ONEPART_VALUE = 3
364 };
365
366 /* Structure describing where the variable is located. */
367 struct variable
368 {
369 /* The declaration of the variable, or an RTL value being handled
370 like a declaration. */
371 decl_or_value dv;
372
373 /* Reference count. */
374 int refcount;
375
376 /* Number of variable parts. */
377 char n_var_parts;
378
379 /* What type of DV this is, according to enum onepart_enum. */
380 ENUM_BITFIELD (onepart_enum) onepart : CHAR_BIT;
381
382 /* True if this variable_def struct is currently in the
383 changed_variables hash table. */
384 bool in_changed_variables;
385
386 /* The variable parts. */
387 variable_part var_part[1];
388 };
389
390 /* Pointer to the BB's information specific to variable tracking pass. */
391 #define VTI(BB) ((variable_tracking_info *) (BB)->aux)
392
393 /* Return MEM_OFFSET (MEM) as a HOST_WIDE_INT, or 0 if we can't. */
394
395 static inline HOST_WIDE_INT
int_mem_offset(const_rtx mem)396 int_mem_offset (const_rtx mem)
397 {
398 HOST_WIDE_INT offset;
399 if (MEM_OFFSET_KNOWN_P (mem) && MEM_OFFSET (mem).is_constant (&offset))
400 return offset;
401 return 0;
402 }
403
404 #if CHECKING_P && (GCC_VERSION >= 2007)
405
406 /* Access VAR's Ith part's offset, checking that it's not a one-part
407 variable. */
408 #define VAR_PART_OFFSET(var, i) __extension__ \
409 (*({ variable *const __v = (var); \
410 gcc_checking_assert (!__v->onepart); \
411 &__v->var_part[(i)].aux.offset; }))
412
413 /* Access VAR's one-part auxiliary data, checking that it is a
414 one-part variable. */
415 #define VAR_LOC_1PAUX(var) __extension__ \
416 (*({ variable *const __v = (var); \
417 gcc_checking_assert (__v->onepart); \
418 &__v->var_part[0].aux.onepaux; }))
419
420 #else
421 #define VAR_PART_OFFSET(var, i) ((var)->var_part[(i)].aux.offset)
422 #define VAR_LOC_1PAUX(var) ((var)->var_part[0].aux.onepaux)
423 #endif
424
425 /* These are accessor macros for the one-part auxiliary data. When
426 convenient for users, they're guarded by tests that the data was
427 allocated. */
428 #define VAR_LOC_DEP_LST(var) (VAR_LOC_1PAUX (var) \
429 ? VAR_LOC_1PAUX (var)->backlinks \
430 : NULL)
431 #define VAR_LOC_DEP_LSTP(var) (VAR_LOC_1PAUX (var) \
432 ? &VAR_LOC_1PAUX (var)->backlinks \
433 : NULL)
434 #define VAR_LOC_FROM(var) (VAR_LOC_1PAUX (var)->from)
435 #define VAR_LOC_DEPTH(var) (VAR_LOC_1PAUX (var)->depth)
436 #define VAR_LOC_DEP_VEC(var) (VAR_LOC_1PAUX (var) \
437 ? &VAR_LOC_1PAUX (var)->deps \
438 : NULL)
439
440
441
442 typedef unsigned int dvuid;
443
444 /* Return the uid of DV. */
445
446 static inline dvuid
dv_uid(decl_or_value dv)447 dv_uid (decl_or_value dv)
448 {
449 if (dv_is_value_p (dv))
450 return CSELIB_VAL_PTR (dv_as_value (dv))->uid;
451 else
452 return DECL_UID (dv_as_decl (dv));
453 }
454
455 /* Compute the hash from the uid. */
456
457 static inline hashval_t
dv_uid2hash(dvuid uid)458 dv_uid2hash (dvuid uid)
459 {
460 return uid;
461 }
462
463 /* The hash function for a mask table in a shared_htab chain. */
464
465 static inline hashval_t
dv_htab_hash(decl_or_value dv)466 dv_htab_hash (decl_or_value dv)
467 {
468 return dv_uid2hash (dv_uid (dv));
469 }
470
471 static void variable_htab_free (void *);
472
473 /* Variable hashtable helpers. */
474
475 struct variable_hasher : pointer_hash <variable>
476 {
477 typedef void *compare_type;
478 static inline hashval_t hash (const variable *);
479 static inline bool equal (const variable *, const void *);
480 static inline void remove (variable *);
481 };
482
483 /* The hash function for variable_htab, computes the hash value
484 from the declaration of variable X. */
485
486 inline hashval_t
hash(const variable * v)487 variable_hasher::hash (const variable *v)
488 {
489 return dv_htab_hash (v->dv);
490 }
491
492 /* Compare the declaration of variable X with declaration Y. */
493
494 inline bool
equal(const variable * v,const void * y)495 variable_hasher::equal (const variable *v, const void *y)
496 {
497 decl_or_value dv = CONST_CAST2 (decl_or_value, const void *, y);
498
499 return (dv_as_opaque (v->dv) == dv_as_opaque (dv));
500 }
501
502 /* Free the element of VARIABLE_HTAB (its type is struct variable_def). */
503
504 inline void
remove(variable * var)505 variable_hasher::remove (variable *var)
506 {
507 variable_htab_free (var);
508 }
509
510 typedef hash_table<variable_hasher> variable_table_type;
511 typedef variable_table_type::iterator variable_iterator_type;
512
513 /* Structure for passing some other parameters to function
514 emit_note_insn_var_location. */
515 struct emit_note_data
516 {
517 /* The instruction which the note will be emitted before/after. */
518 rtx_insn *insn;
519
520 /* Where the note will be emitted (before/after insn)? */
521 enum emit_note_where where;
522
523 /* The variables and values active at this point. */
524 variable_table_type *vars;
525 };
526
527 /* Structure holding a refcounted hash table. If refcount > 1,
528 it must be first unshared before modified. */
529 struct shared_hash
530 {
531 /* Reference count. */
532 int refcount;
533
534 /* Actual hash table. */
535 variable_table_type *htab;
536 };
537
538 /* Structure holding the IN or OUT set for a basic block. */
539 struct dataflow_set
540 {
541 /* Adjustment of stack offset. */
542 HOST_WIDE_INT stack_adjust;
543
544 /* Attributes for registers (lists of attrs). */
545 attrs *regs[FIRST_PSEUDO_REGISTER];
546
547 /* Variable locations. */
548 shared_hash *vars;
549
550 /* Vars that is being traversed. */
551 shared_hash *traversed_vars;
552 };
553
554 /* The structure (one for each basic block) containing the information
555 needed for variable tracking. */
556 struct variable_tracking_info
557 {
558 /* The vector of micro operations. */
559 vec<micro_operation> mos;
560
561 /* The IN and OUT set for dataflow analysis. */
562 dataflow_set in;
563 dataflow_set out;
564
565 /* The permanent-in dataflow set for this block. This is used to
566 hold values for which we had to compute entry values. ??? This
567 should probably be dynamically allocated, to avoid using more
568 memory in non-debug builds. */
569 dataflow_set *permp;
570
571 /* Has the block been visited in DFS? */
572 bool visited;
573
574 /* Has the block been flooded in VTA? */
575 bool flooded;
576
577 };
578
579 /* Alloc pool for struct attrs_def. */
580 object_allocator<attrs> attrs_pool ("attrs pool");
581
582 /* Alloc pool for struct variable_def with MAX_VAR_PARTS entries. */
583
584 static pool_allocator var_pool
585 ("variable_def pool", sizeof (variable) +
586 (MAX_VAR_PARTS - 1) * sizeof (((variable *)NULL)->var_part[0]));
587
588 /* Alloc pool for struct variable_def with a single var_part entry. */
589 static pool_allocator valvar_pool
590 ("small variable_def pool", sizeof (variable));
591
592 /* Alloc pool for struct location_chain. */
593 static object_allocator<location_chain> location_chain_pool
594 ("location_chain pool");
595
596 /* Alloc pool for struct shared_hash. */
597 static object_allocator<shared_hash> shared_hash_pool ("shared_hash pool");
598
599 /* Alloc pool for struct loc_exp_dep_s for NOT_ONEPART variables. */
600 object_allocator<loc_exp_dep> loc_exp_dep_pool ("loc_exp_dep pool");
601
602 /* Changed variables, notes will be emitted for them. */
603 static variable_table_type *changed_variables;
604
605 /* Shall notes be emitted? */
606 static bool emit_notes;
607
608 /* Values whose dynamic location lists have gone empty, but whose
609 cselib location lists are still usable. Use this to hold the
610 current location, the backlinks, etc, during emit_notes. */
611 static variable_table_type *dropped_values;
612
613 /* Empty shared hashtable. */
614 static shared_hash *empty_shared_hash;
615
616 /* Scratch register bitmap used by cselib_expand_value_rtx. */
617 static bitmap scratch_regs = NULL;
618
619 #ifdef HAVE_window_save
620 struct GTY(()) parm_reg {
621 rtx outgoing;
622 rtx incoming;
623 };
624
625
626 /* Vector of windowed parameter registers, if any. */
627 static vec<parm_reg, va_gc> *windowed_parm_regs = NULL;
628 #endif
629
630 /* Variable used to tell whether cselib_process_insn called our hook. */
631 static bool cselib_hook_called;
632
633 /* Local function prototypes. */
634 static void stack_adjust_offset_pre_post (rtx, HOST_WIDE_INT *,
635 HOST_WIDE_INT *);
636 static void insn_stack_adjust_offset_pre_post (rtx_insn *, HOST_WIDE_INT *,
637 HOST_WIDE_INT *);
638 static bool vt_stack_adjustments (void);
639
640 static void init_attrs_list_set (attrs **);
641 static void attrs_list_clear (attrs **);
642 static attrs *attrs_list_member (attrs *, decl_or_value, HOST_WIDE_INT);
643 static void attrs_list_insert (attrs **, decl_or_value, HOST_WIDE_INT, rtx);
644 static void attrs_list_copy (attrs **, attrs *);
645 static void attrs_list_union (attrs **, attrs *);
646
647 static variable **unshare_variable (dataflow_set *set, variable **slot,
648 variable *var, enum var_init_status);
649 static void vars_copy (variable_table_type *, variable_table_type *);
650 static tree var_debug_decl (tree);
651 static void var_reg_set (dataflow_set *, rtx, enum var_init_status, rtx);
652 static void var_reg_delete_and_set (dataflow_set *, rtx, bool,
653 enum var_init_status, rtx);
654 static void var_reg_delete (dataflow_set *, rtx, bool);
655 static void var_regno_delete (dataflow_set *, int);
656 static void var_mem_set (dataflow_set *, rtx, enum var_init_status, rtx);
657 static void var_mem_delete_and_set (dataflow_set *, rtx, bool,
658 enum var_init_status, rtx);
659 static void var_mem_delete (dataflow_set *, rtx, bool);
660
661 static void dataflow_set_init (dataflow_set *);
662 static void dataflow_set_clear (dataflow_set *);
663 static void dataflow_set_copy (dataflow_set *, dataflow_set *);
664 static int variable_union_info_cmp_pos (const void *, const void *);
665 static void dataflow_set_union (dataflow_set *, dataflow_set *);
666 static location_chain *find_loc_in_1pdv (rtx, variable *,
667 variable_table_type *);
668 static bool canon_value_cmp (rtx, rtx);
669 static int loc_cmp (rtx, rtx);
670 static bool variable_part_different_p (variable_part *, variable_part *);
671 static bool onepart_variable_different_p (variable *, variable *);
672 static bool variable_different_p (variable *, variable *);
673 static bool dataflow_set_different (dataflow_set *, dataflow_set *);
674 static void dataflow_set_destroy (dataflow_set *);
675
676 static bool track_expr_p (tree, bool);
677 static void add_uses_1 (rtx *, void *);
678 static void add_stores (rtx, const_rtx, void *);
679 static bool compute_bb_dataflow (basic_block);
680 static bool vt_find_locations (void);
681
682 static void dump_attrs_list (attrs *);
683 static void dump_var (variable *);
684 static void dump_vars (variable_table_type *);
685 static void dump_dataflow_set (dataflow_set *);
686 static void dump_dataflow_sets (void);
687
688 static void set_dv_changed (decl_or_value, bool);
689 static void variable_was_changed (variable *, dataflow_set *);
690 static variable **set_slot_part (dataflow_set *, rtx, variable **,
691 decl_or_value, HOST_WIDE_INT,
692 enum var_init_status, rtx);
693 static void set_variable_part (dataflow_set *, rtx,
694 decl_or_value, HOST_WIDE_INT,
695 enum var_init_status, rtx, enum insert_option);
696 static variable **clobber_slot_part (dataflow_set *, rtx,
697 variable **, HOST_WIDE_INT, rtx);
698 static void clobber_variable_part (dataflow_set *, rtx,
699 decl_or_value, HOST_WIDE_INT, rtx);
700 static variable **delete_slot_part (dataflow_set *, rtx, variable **,
701 HOST_WIDE_INT);
702 static void delete_variable_part (dataflow_set *, rtx,
703 decl_or_value, HOST_WIDE_INT);
704 static void emit_notes_in_bb (basic_block, dataflow_set *);
705 static void vt_emit_notes (void);
706
707 static void vt_add_function_parameters (void);
708 static bool vt_initialize (void);
709 static void vt_finalize (void);
710
711 /* Callback for stack_adjust_offset_pre_post, called via for_each_inc_dec. */
712
713 static int
stack_adjust_offset_pre_post_cb(rtx,rtx op,rtx dest,rtx src,rtx srcoff,void * arg)714 stack_adjust_offset_pre_post_cb (rtx, rtx op, rtx dest, rtx src, rtx srcoff,
715 void *arg)
716 {
717 if (dest != stack_pointer_rtx)
718 return 0;
719
720 switch (GET_CODE (op))
721 {
722 case PRE_INC:
723 case PRE_DEC:
724 ((HOST_WIDE_INT *)arg)[0] -= INTVAL (srcoff);
725 return 0;
726 case POST_INC:
727 case POST_DEC:
728 ((HOST_WIDE_INT *)arg)[1] -= INTVAL (srcoff);
729 return 0;
730 case PRE_MODIFY:
731 case POST_MODIFY:
732 /* We handle only adjustments by constant amount. */
733 gcc_assert (GET_CODE (src) == PLUS
734 && CONST_INT_P (XEXP (src, 1))
735 && XEXP (src, 0) == stack_pointer_rtx);
736 ((HOST_WIDE_INT *)arg)[GET_CODE (op) == POST_MODIFY]
737 -= INTVAL (XEXP (src, 1));
738 return 0;
739 default:
740 gcc_unreachable ();
741 }
742 }
743
744 /* Given a SET, calculate the amount of stack adjustment it contains
745 PRE- and POST-modifying stack pointer.
746 This function is similar to stack_adjust_offset. */
747
748 static void
stack_adjust_offset_pre_post(rtx pattern,HOST_WIDE_INT * pre,HOST_WIDE_INT * post)749 stack_adjust_offset_pre_post (rtx pattern, HOST_WIDE_INT *pre,
750 HOST_WIDE_INT *post)
751 {
752 rtx src = SET_SRC (pattern);
753 rtx dest = SET_DEST (pattern);
754 enum rtx_code code;
755
756 if (dest == stack_pointer_rtx)
757 {
758 /* (set (reg sp) (plus (reg sp) (const_int))) */
759 code = GET_CODE (src);
760 if (! (code == PLUS || code == MINUS)
761 || XEXP (src, 0) != stack_pointer_rtx
762 || !CONST_INT_P (XEXP (src, 1)))
763 return;
764
765 if (code == MINUS)
766 *post += INTVAL (XEXP (src, 1));
767 else
768 *post -= INTVAL (XEXP (src, 1));
769 return;
770 }
771 HOST_WIDE_INT res[2] = { 0, 0 };
772 for_each_inc_dec (pattern, stack_adjust_offset_pre_post_cb, res);
773 *pre += res[0];
774 *post += res[1];
775 }
776
777 /* Given an INSN, calculate the amount of stack adjustment it contains
778 PRE- and POST-modifying stack pointer. */
779
780 static void
insn_stack_adjust_offset_pre_post(rtx_insn * insn,HOST_WIDE_INT * pre,HOST_WIDE_INT * post)781 insn_stack_adjust_offset_pre_post (rtx_insn *insn, HOST_WIDE_INT *pre,
782 HOST_WIDE_INT *post)
783 {
784 rtx pattern;
785
786 *pre = 0;
787 *post = 0;
788
789 pattern = PATTERN (insn);
790 if (RTX_FRAME_RELATED_P (insn))
791 {
792 rtx expr = find_reg_note (insn, REG_FRAME_RELATED_EXPR, NULL_RTX);
793 if (expr)
794 pattern = XEXP (expr, 0);
795 }
796
797 if (GET_CODE (pattern) == SET)
798 stack_adjust_offset_pre_post (pattern, pre, post);
799 else if (GET_CODE (pattern) == PARALLEL
800 || GET_CODE (pattern) == SEQUENCE)
801 {
802 int i;
803
804 /* There may be stack adjustments inside compound insns. Search
805 for them. */
806 for ( i = XVECLEN (pattern, 0) - 1; i >= 0; i--)
807 if (GET_CODE (XVECEXP (pattern, 0, i)) == SET)
808 stack_adjust_offset_pre_post (XVECEXP (pattern, 0, i), pre, post);
809 }
810 }
811
812 /* Compute stack adjustments for all blocks by traversing DFS tree.
813 Return true when the adjustments on all incoming edges are consistent.
814 Heavily borrowed from pre_and_rev_post_order_compute. */
815
816 static bool
vt_stack_adjustments(void)817 vt_stack_adjustments (void)
818 {
819 edge_iterator *stack;
820 int sp;
821
822 /* Initialize entry block. */
823 VTI (ENTRY_BLOCK_PTR_FOR_FN (cfun))->visited = true;
824 VTI (ENTRY_BLOCK_PTR_FOR_FN (cfun))->in.stack_adjust
825 = INCOMING_FRAME_SP_OFFSET;
826 VTI (ENTRY_BLOCK_PTR_FOR_FN (cfun))->out.stack_adjust
827 = INCOMING_FRAME_SP_OFFSET;
828
829 /* Allocate stack for back-tracking up CFG. */
830 stack = XNEWVEC (edge_iterator, n_basic_blocks_for_fn (cfun) + 1);
831 sp = 0;
832
833 /* Push the first edge on to the stack. */
834 stack[sp++] = ei_start (ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs);
835
836 while (sp)
837 {
838 edge_iterator ei;
839 basic_block src;
840 basic_block dest;
841
842 /* Look at the edge on the top of the stack. */
843 ei = stack[sp - 1];
844 src = ei_edge (ei)->src;
845 dest = ei_edge (ei)->dest;
846
847 /* Check if the edge destination has been visited yet. */
848 if (!VTI (dest)->visited)
849 {
850 rtx_insn *insn;
851 HOST_WIDE_INT pre, post, offset;
852 VTI (dest)->visited = true;
853 VTI (dest)->in.stack_adjust = offset = VTI (src)->out.stack_adjust;
854
855 if (dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
856 for (insn = BB_HEAD (dest);
857 insn != NEXT_INSN (BB_END (dest));
858 insn = NEXT_INSN (insn))
859 if (INSN_P (insn))
860 {
861 insn_stack_adjust_offset_pre_post (insn, &pre, &post);
862 offset += pre + post;
863 }
864
865 VTI (dest)->out.stack_adjust = offset;
866
867 if (EDGE_COUNT (dest->succs) > 0)
868 /* Since the DEST node has been visited for the first
869 time, check its successors. */
870 stack[sp++] = ei_start (dest->succs);
871 }
872 else
873 {
874 /* We can end up with different stack adjustments for the exit block
875 of a shrink-wrapped function if stack_adjust_offset_pre_post
876 doesn't understand the rtx pattern used to restore the stack
877 pointer in the epilogue. For example, on s390(x), the stack
878 pointer is often restored via a load-multiple instruction
879 and so no stack_adjust offset is recorded for it. This means
880 that the stack offset at the end of the epilogue block is the
881 same as the offset before the epilogue, whereas other paths
882 to the exit block will have the correct stack_adjust.
883
884 It is safe to ignore these differences because (a) we never
885 use the stack_adjust for the exit block in this pass and
886 (b) dwarf2cfi checks whether the CFA notes in a shrink-wrapped
887 function are correct.
888
889 We must check whether the adjustments on other edges are
890 the same though. */
891 if (dest != EXIT_BLOCK_PTR_FOR_FN (cfun)
892 && VTI (dest)->in.stack_adjust != VTI (src)->out.stack_adjust)
893 {
894 free (stack);
895 return false;
896 }
897
898 if (! ei_one_before_end_p (ei))
899 /* Go to the next edge. */
900 ei_next (&stack[sp - 1]);
901 else
902 /* Return to previous level if there are no more edges. */
903 sp--;
904 }
905 }
906
907 free (stack);
908 return true;
909 }
910
911 /* arg_pointer_rtx resp. frame_pointer_rtx if stack_pointer_rtx or
912 hard_frame_pointer_rtx is being mapped to it and offset for it. */
913 static rtx cfa_base_rtx;
914 static HOST_WIDE_INT cfa_base_offset;
915
916 /* Compute a CFA-based value for an ADJUSTMENT made to stack_pointer_rtx
917 or hard_frame_pointer_rtx. */
918
919 static inline rtx
compute_cfa_pointer(HOST_WIDE_INT adjustment)920 compute_cfa_pointer (HOST_WIDE_INT adjustment)
921 {
922 return plus_constant (Pmode, cfa_base_rtx, adjustment + cfa_base_offset);
923 }
924
925 /* Adjustment for hard_frame_pointer_rtx to cfa base reg,
926 or -1 if the replacement shouldn't be done. */
927 static HOST_WIDE_INT hard_frame_pointer_adjustment = -1;
928
929 /* Data for adjust_mems callback. */
930
931 struct adjust_mem_data
932 {
933 bool store;
934 machine_mode mem_mode;
935 HOST_WIDE_INT stack_adjust;
936 auto_vec<rtx> side_effects;
937 };
938
939 /* Helper for adjust_mems. Return true if X is suitable for
940 transformation of wider mode arithmetics to narrower mode. */
941
942 static bool
use_narrower_mode_test(rtx x,const_rtx subreg)943 use_narrower_mode_test (rtx x, const_rtx subreg)
944 {
945 subrtx_var_iterator::array_type array;
946 FOR_EACH_SUBRTX_VAR (iter, array, x, NONCONST)
947 {
948 rtx x = *iter;
949 if (CONSTANT_P (x))
950 iter.skip_subrtxes ();
951 else
952 switch (GET_CODE (x))
953 {
954 case REG:
955 if (cselib_lookup (x, GET_MODE (SUBREG_REG (subreg)), 0, VOIDmode))
956 return false;
957 if (!validate_subreg (GET_MODE (subreg), GET_MODE (x), x,
958 subreg_lowpart_offset (GET_MODE (subreg),
959 GET_MODE (x))))
960 return false;
961 break;
962 case PLUS:
963 case MINUS:
964 case MULT:
965 break;
966 case ASHIFT:
967 if (GET_MODE (XEXP (x, 1)) != VOIDmode)
968 {
969 enum machine_mode mode = GET_MODE (subreg);
970 rtx op1 = XEXP (x, 1);
971 enum machine_mode op1_mode = GET_MODE (op1);
972 if (GET_MODE_PRECISION (as_a <scalar_int_mode> (mode))
973 < GET_MODE_PRECISION (as_a <scalar_int_mode> (op1_mode)))
974 {
975 poly_uint64 byte = subreg_lowpart_offset (mode, op1_mode);
976 if (GET_CODE (op1) == SUBREG || GET_CODE (op1) == CONCAT)
977 {
978 if (!simplify_subreg (mode, op1, op1_mode, byte))
979 return false;
980 }
981 else if (!validate_subreg (mode, op1_mode, op1, byte))
982 return false;
983 }
984 }
985 iter.substitute (XEXP (x, 0));
986 break;
987 default:
988 return false;
989 }
990 }
991 return true;
992 }
993
994 /* Transform X into narrower mode MODE from wider mode WMODE. */
995
996 static rtx
use_narrower_mode(rtx x,scalar_int_mode mode,scalar_int_mode wmode)997 use_narrower_mode (rtx x, scalar_int_mode mode, scalar_int_mode wmode)
998 {
999 rtx op0, op1;
1000 if (CONSTANT_P (x))
1001 return lowpart_subreg (mode, x, wmode);
1002 switch (GET_CODE (x))
1003 {
1004 case REG:
1005 return lowpart_subreg (mode, x, wmode);
1006 case PLUS:
1007 case MINUS:
1008 case MULT:
1009 op0 = use_narrower_mode (XEXP (x, 0), mode, wmode);
1010 op1 = use_narrower_mode (XEXP (x, 1), mode, wmode);
1011 return simplify_gen_binary (GET_CODE (x), mode, op0, op1);
1012 case ASHIFT:
1013 op0 = use_narrower_mode (XEXP (x, 0), mode, wmode);
1014 op1 = XEXP (x, 1);
1015 /* Ensure shift amount is not wider than mode. */
1016 if (GET_MODE (op1) == VOIDmode)
1017 op1 = lowpart_subreg (mode, op1, wmode);
1018 else if (GET_MODE_PRECISION (mode)
1019 < GET_MODE_PRECISION (as_a <scalar_int_mode> (GET_MODE (op1))))
1020 op1 = lowpart_subreg (mode, op1, GET_MODE (op1));
1021 return simplify_gen_binary (ASHIFT, mode, op0, op1);
1022 default:
1023 gcc_unreachable ();
1024 }
1025 }
1026
1027 /* Helper function for adjusting used MEMs. */
1028
1029 static rtx
adjust_mems(rtx loc,const_rtx old_rtx,void * data)1030 adjust_mems (rtx loc, const_rtx old_rtx, void *data)
1031 {
1032 struct adjust_mem_data *amd = (struct adjust_mem_data *) data;
1033 rtx mem, addr = loc, tem;
1034 machine_mode mem_mode_save;
1035 bool store_save;
1036 scalar_int_mode tem_mode, tem_subreg_mode;
1037 poly_int64 size;
1038 switch (GET_CODE (loc))
1039 {
1040 case REG:
1041 /* Don't do any sp or fp replacements outside of MEM addresses
1042 on the LHS. */
1043 if (amd->mem_mode == VOIDmode && amd->store)
1044 return loc;
1045 if (loc == stack_pointer_rtx
1046 && !frame_pointer_needed
1047 && cfa_base_rtx)
1048 return compute_cfa_pointer (amd->stack_adjust);
1049 else if (loc == hard_frame_pointer_rtx
1050 && frame_pointer_needed
1051 && hard_frame_pointer_adjustment != -1
1052 && cfa_base_rtx)
1053 return compute_cfa_pointer (hard_frame_pointer_adjustment);
1054 gcc_checking_assert (loc != virtual_incoming_args_rtx);
1055 return loc;
1056 case MEM:
1057 mem = loc;
1058 if (!amd->store)
1059 {
1060 mem = targetm.delegitimize_address (mem);
1061 if (mem != loc && !MEM_P (mem))
1062 return simplify_replace_fn_rtx (mem, old_rtx, adjust_mems, data);
1063 }
1064
1065 addr = XEXP (mem, 0);
1066 mem_mode_save = amd->mem_mode;
1067 amd->mem_mode = GET_MODE (mem);
1068 store_save = amd->store;
1069 amd->store = false;
1070 addr = simplify_replace_fn_rtx (addr, old_rtx, adjust_mems, data);
1071 amd->store = store_save;
1072 amd->mem_mode = mem_mode_save;
1073 if (mem == loc)
1074 addr = targetm.delegitimize_address (addr);
1075 if (addr != XEXP (mem, 0))
1076 mem = replace_equiv_address_nv (mem, addr);
1077 if (!amd->store)
1078 mem = avoid_constant_pool_reference (mem);
1079 return mem;
1080 case PRE_INC:
1081 case PRE_DEC:
1082 size = GET_MODE_SIZE (amd->mem_mode);
1083 addr = plus_constant (GET_MODE (loc), XEXP (loc, 0),
1084 GET_CODE (loc) == PRE_INC ? size : -size);
1085 /* FALLTHRU */
1086 case POST_INC:
1087 case POST_DEC:
1088 if (addr == loc)
1089 addr = XEXP (loc, 0);
1090 gcc_assert (amd->mem_mode != VOIDmode && amd->mem_mode != BLKmode);
1091 addr = simplify_replace_fn_rtx (addr, old_rtx, adjust_mems, data);
1092 size = GET_MODE_SIZE (amd->mem_mode);
1093 tem = plus_constant (GET_MODE (loc), XEXP (loc, 0),
1094 (GET_CODE (loc) == PRE_INC
1095 || GET_CODE (loc) == POST_INC) ? size : -size);
1096 store_save = amd->store;
1097 amd->store = false;
1098 tem = simplify_replace_fn_rtx (tem, old_rtx, adjust_mems, data);
1099 amd->store = store_save;
1100 amd->side_effects.safe_push (gen_rtx_SET (XEXP (loc, 0), tem));
1101 return addr;
1102 case PRE_MODIFY:
1103 addr = XEXP (loc, 1);
1104 /* FALLTHRU */
1105 case POST_MODIFY:
1106 if (addr == loc)
1107 addr = XEXP (loc, 0);
1108 gcc_assert (amd->mem_mode != VOIDmode);
1109 addr = simplify_replace_fn_rtx (addr, old_rtx, adjust_mems, data);
1110 store_save = amd->store;
1111 amd->store = false;
1112 tem = simplify_replace_fn_rtx (XEXP (loc, 1), old_rtx,
1113 adjust_mems, data);
1114 amd->store = store_save;
1115 amd->side_effects.safe_push (gen_rtx_SET (XEXP (loc, 0), tem));
1116 return addr;
1117 case SUBREG:
1118 /* First try without delegitimization of whole MEMs and
1119 avoid_constant_pool_reference, which is more likely to succeed. */
1120 store_save = amd->store;
1121 amd->store = true;
1122 addr = simplify_replace_fn_rtx (SUBREG_REG (loc), old_rtx, adjust_mems,
1123 data);
1124 amd->store = store_save;
1125 mem = simplify_replace_fn_rtx (addr, old_rtx, adjust_mems, data);
1126 if (mem == SUBREG_REG (loc))
1127 {
1128 tem = loc;
1129 goto finish_subreg;
1130 }
1131 tem = simplify_gen_subreg (GET_MODE (loc), mem,
1132 GET_MODE (SUBREG_REG (loc)),
1133 SUBREG_BYTE (loc));
1134 if (tem)
1135 goto finish_subreg;
1136 tem = simplify_gen_subreg (GET_MODE (loc), addr,
1137 GET_MODE (SUBREG_REG (loc)),
1138 SUBREG_BYTE (loc));
1139 if (tem == NULL_RTX)
1140 tem = gen_rtx_raw_SUBREG (GET_MODE (loc), addr, SUBREG_BYTE (loc));
1141 finish_subreg:
1142 if (MAY_HAVE_DEBUG_BIND_INSNS
1143 && GET_CODE (tem) == SUBREG
1144 && (GET_CODE (SUBREG_REG (tem)) == PLUS
1145 || GET_CODE (SUBREG_REG (tem)) == MINUS
1146 || GET_CODE (SUBREG_REG (tem)) == MULT
1147 || GET_CODE (SUBREG_REG (tem)) == ASHIFT)
1148 && is_a <scalar_int_mode> (GET_MODE (tem), &tem_mode)
1149 && is_a <scalar_int_mode> (GET_MODE (SUBREG_REG (tem)),
1150 &tem_subreg_mode)
1151 && (GET_MODE_PRECISION (tem_mode)
1152 < GET_MODE_PRECISION (tem_subreg_mode))
1153 && subreg_lowpart_p (tem)
1154 && use_narrower_mode_test (SUBREG_REG (tem), tem))
1155 return use_narrower_mode (SUBREG_REG (tem), tem_mode, tem_subreg_mode);
1156 return tem;
1157 case ASM_OPERANDS:
1158 /* Don't do any replacements in second and following
1159 ASM_OPERANDS of inline-asm with multiple sets.
1160 ASM_OPERANDS_INPUT_VEC, ASM_OPERANDS_INPUT_CONSTRAINT_VEC
1161 and ASM_OPERANDS_LABEL_VEC need to be equal between
1162 all the ASM_OPERANDs in the insn and adjust_insn will
1163 fix this up. */
1164 if (ASM_OPERANDS_OUTPUT_IDX (loc) != 0)
1165 return loc;
1166 break;
1167 default:
1168 break;
1169 }
1170 return NULL_RTX;
1171 }
1172
1173 /* Helper function for replacement of uses. */
1174
1175 static void
adjust_mem_uses(rtx * x,void * data)1176 adjust_mem_uses (rtx *x, void *data)
1177 {
1178 rtx new_x = simplify_replace_fn_rtx (*x, NULL_RTX, adjust_mems, data);
1179 if (new_x != *x)
1180 validate_change (NULL_RTX, x, new_x, true);
1181 }
1182
1183 /* Helper function for replacement of stores. */
1184
1185 static void
adjust_mem_stores(rtx loc,const_rtx expr,void * data)1186 adjust_mem_stores (rtx loc, const_rtx expr, void *data)
1187 {
1188 if (MEM_P (loc))
1189 {
1190 rtx new_dest = simplify_replace_fn_rtx (SET_DEST (expr), NULL_RTX,
1191 adjust_mems, data);
1192 if (new_dest != SET_DEST (expr))
1193 {
1194 rtx xexpr = CONST_CAST_RTX (expr);
1195 validate_change (NULL_RTX, &SET_DEST (xexpr), new_dest, true);
1196 }
1197 }
1198 }
1199
1200 /* Simplify INSN. Remove all {PRE,POST}_{INC,DEC,MODIFY} rtxes,
1201 replace them with their value in the insn and add the side-effects
1202 as other sets to the insn. */
1203
1204 static void
adjust_insn(basic_block bb,rtx_insn * insn)1205 adjust_insn (basic_block bb, rtx_insn *insn)
1206 {
1207 rtx set;
1208
1209 #ifdef HAVE_window_save
1210 /* If the target machine has an explicit window save instruction, the
1211 transformation OUTGOING_REGNO -> INCOMING_REGNO is done there. */
1212 if (RTX_FRAME_RELATED_P (insn)
1213 && find_reg_note (insn, REG_CFA_WINDOW_SAVE, NULL_RTX))
1214 {
1215 unsigned int i, nregs = vec_safe_length (windowed_parm_regs);
1216 rtx rtl = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (nregs * 2));
1217 parm_reg *p;
1218
1219 FOR_EACH_VEC_SAFE_ELT (windowed_parm_regs, i, p)
1220 {
1221 XVECEXP (rtl, 0, i * 2)
1222 = gen_rtx_SET (p->incoming, p->outgoing);
1223 /* Do not clobber the attached DECL, but only the REG. */
1224 XVECEXP (rtl, 0, i * 2 + 1)
1225 = gen_rtx_CLOBBER (GET_MODE (p->outgoing),
1226 gen_raw_REG (GET_MODE (p->outgoing),
1227 REGNO (p->outgoing)));
1228 }
1229
1230 validate_change (NULL_RTX, &PATTERN (insn), rtl, true);
1231 return;
1232 }
1233 #endif
1234
1235 adjust_mem_data amd;
1236 amd.mem_mode = VOIDmode;
1237 amd.stack_adjust = -VTI (bb)->out.stack_adjust;
1238
1239 amd.store = true;
1240 note_stores (PATTERN (insn), adjust_mem_stores, &amd);
1241
1242 amd.store = false;
1243 if (GET_CODE (PATTERN (insn)) == PARALLEL
1244 && asm_noperands (PATTERN (insn)) > 0
1245 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
1246 {
1247 rtx body, set0;
1248 int i;
1249
1250 /* inline-asm with multiple sets is tiny bit more complicated,
1251 because the 3 vectors in ASM_OPERANDS need to be shared between
1252 all ASM_OPERANDS in the instruction. adjust_mems will
1253 not touch ASM_OPERANDS other than the first one, asm_noperands
1254 test above needs to be called before that (otherwise it would fail)
1255 and afterwards this code fixes it up. */
1256 note_uses (&PATTERN (insn), adjust_mem_uses, &amd);
1257 body = PATTERN (insn);
1258 set0 = XVECEXP (body, 0, 0);
1259 gcc_checking_assert (GET_CODE (set0) == SET
1260 && GET_CODE (SET_SRC (set0)) == ASM_OPERANDS
1261 && ASM_OPERANDS_OUTPUT_IDX (SET_SRC (set0)) == 0);
1262 for (i = 1; i < XVECLEN (body, 0); i++)
1263 if (GET_CODE (XVECEXP (body, 0, i)) != SET)
1264 break;
1265 else
1266 {
1267 set = XVECEXP (body, 0, i);
1268 gcc_checking_assert (GET_CODE (SET_SRC (set)) == ASM_OPERANDS
1269 && ASM_OPERANDS_OUTPUT_IDX (SET_SRC (set))
1270 == i);
1271 if (ASM_OPERANDS_INPUT_VEC (SET_SRC (set))
1272 != ASM_OPERANDS_INPUT_VEC (SET_SRC (set0))
1273 || ASM_OPERANDS_INPUT_CONSTRAINT_VEC (SET_SRC (set))
1274 != ASM_OPERANDS_INPUT_CONSTRAINT_VEC (SET_SRC (set0))
1275 || ASM_OPERANDS_LABEL_VEC (SET_SRC (set))
1276 != ASM_OPERANDS_LABEL_VEC (SET_SRC (set0)))
1277 {
1278 rtx newsrc = shallow_copy_rtx (SET_SRC (set));
1279 ASM_OPERANDS_INPUT_VEC (newsrc)
1280 = ASM_OPERANDS_INPUT_VEC (SET_SRC (set0));
1281 ASM_OPERANDS_INPUT_CONSTRAINT_VEC (newsrc)
1282 = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (SET_SRC (set0));
1283 ASM_OPERANDS_LABEL_VEC (newsrc)
1284 = ASM_OPERANDS_LABEL_VEC (SET_SRC (set0));
1285 validate_change (NULL_RTX, &SET_SRC (set), newsrc, true);
1286 }
1287 }
1288 }
1289 else
1290 note_uses (&PATTERN (insn), adjust_mem_uses, &amd);
1291
1292 /* For read-only MEMs containing some constant, prefer those
1293 constants. */
1294 set = single_set (insn);
1295 if (set && MEM_P (SET_SRC (set)) && MEM_READONLY_P (SET_SRC (set)))
1296 {
1297 rtx note = find_reg_equal_equiv_note (insn);
1298
1299 if (note && CONSTANT_P (XEXP (note, 0)))
1300 validate_change (NULL_RTX, &SET_SRC (set), XEXP (note, 0), true);
1301 }
1302
1303 if (!amd.side_effects.is_empty ())
1304 {
1305 rtx *pat, new_pat;
1306 int i, oldn;
1307
1308 pat = &PATTERN (insn);
1309 if (GET_CODE (*pat) == COND_EXEC)
1310 pat = &COND_EXEC_CODE (*pat);
1311 if (GET_CODE (*pat) == PARALLEL)
1312 oldn = XVECLEN (*pat, 0);
1313 else
1314 oldn = 1;
1315 unsigned int newn = amd.side_effects.length ();
1316 new_pat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (oldn + newn));
1317 if (GET_CODE (*pat) == PARALLEL)
1318 for (i = 0; i < oldn; i++)
1319 XVECEXP (new_pat, 0, i) = XVECEXP (*pat, 0, i);
1320 else
1321 XVECEXP (new_pat, 0, 0) = *pat;
1322
1323 rtx effect;
1324 unsigned int j;
1325 FOR_EACH_VEC_ELT_REVERSE (amd.side_effects, j, effect)
1326 XVECEXP (new_pat, 0, j + oldn) = effect;
1327 validate_change (NULL_RTX, pat, new_pat, true);
1328 }
1329 }
1330
1331 /* Return the DEBUG_EXPR of a DEBUG_EXPR_DECL or the VALUE in DV. */
1332 static inline rtx
dv_as_rtx(decl_or_value dv)1333 dv_as_rtx (decl_or_value dv)
1334 {
1335 tree decl;
1336
1337 if (dv_is_value_p (dv))
1338 return dv_as_value (dv);
1339
1340 decl = dv_as_decl (dv);
1341
1342 gcc_checking_assert (TREE_CODE (decl) == DEBUG_EXPR_DECL);
1343 return DECL_RTL_KNOWN_SET (decl);
1344 }
1345
1346 /* Return nonzero if a decl_or_value must not have more than one
1347 variable part. The returned value discriminates among various
1348 kinds of one-part DVs ccording to enum onepart_enum. */
1349 static inline onepart_enum
dv_onepart_p(decl_or_value dv)1350 dv_onepart_p (decl_or_value dv)
1351 {
1352 tree decl;
1353
1354 if (!MAY_HAVE_DEBUG_BIND_INSNS)
1355 return NOT_ONEPART;
1356
1357 if (dv_is_value_p (dv))
1358 return ONEPART_VALUE;
1359
1360 decl = dv_as_decl (dv);
1361
1362 if (TREE_CODE (decl) == DEBUG_EXPR_DECL)
1363 return ONEPART_DEXPR;
1364
1365 if (target_for_debug_bind (decl) != NULL_TREE)
1366 return ONEPART_VDECL;
1367
1368 return NOT_ONEPART;
1369 }
1370
1371 /* Return the variable pool to be used for a dv of type ONEPART. */
1372 static inline pool_allocator &
onepart_pool(onepart_enum onepart)1373 onepart_pool (onepart_enum onepart)
1374 {
1375 return onepart ? valvar_pool : var_pool;
1376 }
1377
1378 /* Allocate a variable_def from the corresponding variable pool. */
1379 static inline variable *
onepart_pool_allocate(onepart_enum onepart)1380 onepart_pool_allocate (onepart_enum onepart)
1381 {
1382 return (variable*) onepart_pool (onepart).allocate ();
1383 }
1384
1385 /* Build a decl_or_value out of a decl. */
1386 static inline decl_or_value
dv_from_decl(tree decl)1387 dv_from_decl (tree decl)
1388 {
1389 decl_or_value dv;
1390 dv = decl;
1391 gcc_checking_assert (dv_is_decl_p (dv));
1392 return dv;
1393 }
1394
1395 /* Build a decl_or_value out of a value. */
1396 static inline decl_or_value
dv_from_value(rtx value)1397 dv_from_value (rtx value)
1398 {
1399 decl_or_value dv;
1400 dv = value;
1401 gcc_checking_assert (dv_is_value_p (dv));
1402 return dv;
1403 }
1404
1405 /* Return a value or the decl of a debug_expr as a decl_or_value. */
1406 static inline decl_or_value
dv_from_rtx(rtx x)1407 dv_from_rtx (rtx x)
1408 {
1409 decl_or_value dv;
1410
1411 switch (GET_CODE (x))
1412 {
1413 case DEBUG_EXPR:
1414 dv = dv_from_decl (DEBUG_EXPR_TREE_DECL (x));
1415 gcc_checking_assert (DECL_RTL_KNOWN_SET (DEBUG_EXPR_TREE_DECL (x)) == x);
1416 break;
1417
1418 case VALUE:
1419 dv = dv_from_value (x);
1420 break;
1421
1422 default:
1423 gcc_unreachable ();
1424 }
1425
1426 return dv;
1427 }
1428
1429 extern void debug_dv (decl_or_value dv);
1430
1431 DEBUG_FUNCTION void
debug_dv(decl_or_value dv)1432 debug_dv (decl_or_value dv)
1433 {
1434 if (dv_is_value_p (dv))
1435 debug_rtx (dv_as_value (dv));
1436 else
1437 debug_generic_stmt (dv_as_decl (dv));
1438 }
1439
1440 static void loc_exp_dep_clear (variable *var);
1441
1442 /* Free the element of VARIABLE_HTAB (its type is struct variable_def). */
1443
1444 static void
variable_htab_free(void * elem)1445 variable_htab_free (void *elem)
1446 {
1447 int i;
1448 variable *var = (variable *) elem;
1449 location_chain *node, *next;
1450
1451 gcc_checking_assert (var->refcount > 0);
1452
1453 var->refcount--;
1454 if (var->refcount > 0)
1455 return;
1456
1457 for (i = 0; i < var->n_var_parts; i++)
1458 {
1459 for (node = var->var_part[i].loc_chain; node; node = next)
1460 {
1461 next = node->next;
1462 delete node;
1463 }
1464 var->var_part[i].loc_chain = NULL;
1465 }
1466 if (var->onepart && VAR_LOC_1PAUX (var))
1467 {
1468 loc_exp_dep_clear (var);
1469 if (VAR_LOC_DEP_LST (var))
1470 VAR_LOC_DEP_LST (var)->pprev = NULL;
1471 XDELETE (VAR_LOC_1PAUX (var));
1472 /* These may be reused across functions, so reset
1473 e.g. NO_LOC_P. */
1474 if (var->onepart == ONEPART_DEXPR)
1475 set_dv_changed (var->dv, true);
1476 }
1477 onepart_pool (var->onepart).remove (var);
1478 }
1479
1480 /* Initialize the set (array) SET of attrs to empty lists. */
1481
1482 static void
init_attrs_list_set(attrs ** set)1483 init_attrs_list_set (attrs **set)
1484 {
1485 int i;
1486
1487 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1488 set[i] = NULL;
1489 }
1490
1491 /* Make the list *LISTP empty. */
1492
1493 static void
attrs_list_clear(attrs ** listp)1494 attrs_list_clear (attrs **listp)
1495 {
1496 attrs *list, *next;
1497
1498 for (list = *listp; list; list = next)
1499 {
1500 next = list->next;
1501 delete list;
1502 }
1503 *listp = NULL;
1504 }
1505
1506 /* Return true if the pair of DECL and OFFSET is the member of the LIST. */
1507
1508 static attrs *
attrs_list_member(attrs * list,decl_or_value dv,HOST_WIDE_INT offset)1509 attrs_list_member (attrs *list, decl_or_value dv, HOST_WIDE_INT offset)
1510 {
1511 for (; list; list = list->next)
1512 if (dv_as_opaque (list->dv) == dv_as_opaque (dv) && list->offset == offset)
1513 return list;
1514 return NULL;
1515 }
1516
1517 /* Insert the triplet DECL, OFFSET, LOC to the list *LISTP. */
1518
1519 static void
attrs_list_insert(attrs ** listp,decl_or_value dv,HOST_WIDE_INT offset,rtx loc)1520 attrs_list_insert (attrs **listp, decl_or_value dv,
1521 HOST_WIDE_INT offset, rtx loc)
1522 {
1523 attrs *list = new attrs;
1524 list->loc = loc;
1525 list->dv = dv;
1526 list->offset = offset;
1527 list->next = *listp;
1528 *listp = list;
1529 }
1530
1531 /* Copy all nodes from SRC and create a list *DSTP of the copies. */
1532
1533 static void
attrs_list_copy(attrs ** dstp,attrs * src)1534 attrs_list_copy (attrs **dstp, attrs *src)
1535 {
1536 attrs_list_clear (dstp);
1537 for (; src; src = src->next)
1538 {
1539 attrs *n = new attrs;
1540 n->loc = src->loc;
1541 n->dv = src->dv;
1542 n->offset = src->offset;
1543 n->next = *dstp;
1544 *dstp = n;
1545 }
1546 }
1547
1548 /* Add all nodes from SRC which are not in *DSTP to *DSTP. */
1549
1550 static void
attrs_list_union(attrs ** dstp,attrs * src)1551 attrs_list_union (attrs **dstp, attrs *src)
1552 {
1553 for (; src; src = src->next)
1554 {
1555 if (!attrs_list_member (*dstp, src->dv, src->offset))
1556 attrs_list_insert (dstp, src->dv, src->offset, src->loc);
1557 }
1558 }
1559
1560 /* Combine nodes that are not onepart nodes from SRC and SRC2 into
1561 *DSTP. */
1562
1563 static void
attrs_list_mpdv_union(attrs ** dstp,attrs * src,attrs * src2)1564 attrs_list_mpdv_union (attrs **dstp, attrs *src, attrs *src2)
1565 {
1566 gcc_assert (!*dstp);
1567 for (; src; src = src->next)
1568 {
1569 if (!dv_onepart_p (src->dv))
1570 attrs_list_insert (dstp, src->dv, src->offset, src->loc);
1571 }
1572 for (src = src2; src; src = src->next)
1573 {
1574 if (!dv_onepart_p (src->dv)
1575 && !attrs_list_member (*dstp, src->dv, src->offset))
1576 attrs_list_insert (dstp, src->dv, src->offset, src->loc);
1577 }
1578 }
1579
1580 /* Shared hashtable support. */
1581
1582 /* Return true if VARS is shared. */
1583
1584 static inline bool
shared_hash_shared(shared_hash * vars)1585 shared_hash_shared (shared_hash *vars)
1586 {
1587 return vars->refcount > 1;
1588 }
1589
1590 /* Return the hash table for VARS. */
1591
1592 static inline variable_table_type *
shared_hash_htab(shared_hash * vars)1593 shared_hash_htab (shared_hash *vars)
1594 {
1595 return vars->htab;
1596 }
1597
1598 /* Return true if VAR is shared, or maybe because VARS is shared. */
1599
1600 static inline bool
shared_var_p(variable * var,shared_hash * vars)1601 shared_var_p (variable *var, shared_hash *vars)
1602 {
1603 /* Don't count an entry in the changed_variables table as a duplicate. */
1604 return ((var->refcount > 1 + (int) var->in_changed_variables)
1605 || shared_hash_shared (vars));
1606 }
1607
1608 /* Copy variables into a new hash table. */
1609
1610 static shared_hash *
shared_hash_unshare(shared_hash * vars)1611 shared_hash_unshare (shared_hash *vars)
1612 {
1613 shared_hash *new_vars = new shared_hash;
1614 gcc_assert (vars->refcount > 1);
1615 new_vars->refcount = 1;
1616 new_vars->htab = new variable_table_type (vars->htab->elements () + 3);
1617 vars_copy (new_vars->htab, vars->htab);
1618 vars->refcount--;
1619 return new_vars;
1620 }
1621
1622 /* Increment reference counter on VARS and return it. */
1623
1624 static inline shared_hash *
shared_hash_copy(shared_hash * vars)1625 shared_hash_copy (shared_hash *vars)
1626 {
1627 vars->refcount++;
1628 return vars;
1629 }
1630
1631 /* Decrement reference counter and destroy hash table if not shared
1632 anymore. */
1633
1634 static void
shared_hash_destroy(shared_hash * vars)1635 shared_hash_destroy (shared_hash *vars)
1636 {
1637 gcc_checking_assert (vars->refcount > 0);
1638 if (--vars->refcount == 0)
1639 {
1640 delete vars->htab;
1641 delete vars;
1642 }
1643 }
1644
1645 /* Unshare *PVARS if shared and return slot for DV. If INS is
1646 INSERT, insert it if not already present. */
1647
1648 static inline variable **
shared_hash_find_slot_unshare_1(shared_hash ** pvars,decl_or_value dv,hashval_t dvhash,enum insert_option ins)1649 shared_hash_find_slot_unshare_1 (shared_hash **pvars, decl_or_value dv,
1650 hashval_t dvhash, enum insert_option ins)
1651 {
1652 if (shared_hash_shared (*pvars))
1653 *pvars = shared_hash_unshare (*pvars);
1654 return shared_hash_htab (*pvars)->find_slot_with_hash (dv, dvhash, ins);
1655 }
1656
1657 static inline variable **
shared_hash_find_slot_unshare(shared_hash ** pvars,decl_or_value dv,enum insert_option ins)1658 shared_hash_find_slot_unshare (shared_hash **pvars, decl_or_value dv,
1659 enum insert_option ins)
1660 {
1661 return shared_hash_find_slot_unshare_1 (pvars, dv, dv_htab_hash (dv), ins);
1662 }
1663
1664 /* Return slot for DV, if it is already present in the hash table.
1665 If it is not present, insert it only VARS is not shared, otherwise
1666 return NULL. */
1667
1668 static inline variable **
shared_hash_find_slot_1(shared_hash * vars,decl_or_value dv,hashval_t dvhash)1669 shared_hash_find_slot_1 (shared_hash *vars, decl_or_value dv, hashval_t dvhash)
1670 {
1671 return shared_hash_htab (vars)->find_slot_with_hash (dv, dvhash,
1672 shared_hash_shared (vars)
1673 ? NO_INSERT : INSERT);
1674 }
1675
1676 static inline variable **
shared_hash_find_slot(shared_hash * vars,decl_or_value dv)1677 shared_hash_find_slot (shared_hash *vars, decl_or_value dv)
1678 {
1679 return shared_hash_find_slot_1 (vars, dv, dv_htab_hash (dv));
1680 }
1681
1682 /* Return slot for DV only if it is already present in the hash table. */
1683
1684 static inline variable **
shared_hash_find_slot_noinsert_1(shared_hash * vars,decl_or_value dv,hashval_t dvhash)1685 shared_hash_find_slot_noinsert_1 (shared_hash *vars, decl_or_value dv,
1686 hashval_t dvhash)
1687 {
1688 return shared_hash_htab (vars)->find_slot_with_hash (dv, dvhash, NO_INSERT);
1689 }
1690
1691 static inline variable **
shared_hash_find_slot_noinsert(shared_hash * vars,decl_or_value dv)1692 shared_hash_find_slot_noinsert (shared_hash *vars, decl_or_value dv)
1693 {
1694 return shared_hash_find_slot_noinsert_1 (vars, dv, dv_htab_hash (dv));
1695 }
1696
1697 /* Return variable for DV or NULL if not already present in the hash
1698 table. */
1699
1700 static inline variable *
shared_hash_find_1(shared_hash * vars,decl_or_value dv,hashval_t dvhash)1701 shared_hash_find_1 (shared_hash *vars, decl_or_value dv, hashval_t dvhash)
1702 {
1703 return shared_hash_htab (vars)->find_with_hash (dv, dvhash);
1704 }
1705
1706 static inline variable *
shared_hash_find(shared_hash * vars,decl_or_value dv)1707 shared_hash_find (shared_hash *vars, decl_or_value dv)
1708 {
1709 return shared_hash_find_1 (vars, dv, dv_htab_hash (dv));
1710 }
1711
1712 /* Return true if TVAL is better than CVAL as a canonival value. We
1713 choose lowest-numbered VALUEs, using the RTX address as a
1714 tie-breaker. The idea is to arrange them into a star topology,
1715 such that all of them are at most one step away from the canonical
1716 value, and the canonical value has backlinks to all of them, in
1717 addition to all the actual locations. We don't enforce this
1718 topology throughout the entire dataflow analysis, though.
1719 */
1720
1721 static inline bool
canon_value_cmp(rtx tval,rtx cval)1722 canon_value_cmp (rtx tval, rtx cval)
1723 {
1724 return !cval
1725 || CSELIB_VAL_PTR (tval)->uid < CSELIB_VAL_PTR (cval)->uid;
1726 }
1727
1728 static bool dst_can_be_shared;
1729
1730 /* Return a copy of a variable VAR and insert it to dataflow set SET. */
1731
1732 static variable **
unshare_variable(dataflow_set * set,variable ** slot,variable * var,enum var_init_status initialized)1733 unshare_variable (dataflow_set *set, variable **slot, variable *var,
1734 enum var_init_status initialized)
1735 {
1736 variable *new_var;
1737 int i;
1738
1739 new_var = onepart_pool_allocate (var->onepart);
1740 new_var->dv = var->dv;
1741 new_var->refcount = 1;
1742 var->refcount--;
1743 new_var->n_var_parts = var->n_var_parts;
1744 new_var->onepart = var->onepart;
1745 new_var->in_changed_variables = false;
1746
1747 if (! flag_var_tracking_uninit)
1748 initialized = VAR_INIT_STATUS_INITIALIZED;
1749
1750 for (i = 0; i < var->n_var_parts; i++)
1751 {
1752 location_chain *node;
1753 location_chain **nextp;
1754
1755 if (i == 0 && var->onepart)
1756 {
1757 /* One-part auxiliary data is only used while emitting
1758 notes, so propagate it to the new variable in the active
1759 dataflow set. If we're not emitting notes, this will be
1760 a no-op. */
1761 gcc_checking_assert (!VAR_LOC_1PAUX (var) || emit_notes);
1762 VAR_LOC_1PAUX (new_var) = VAR_LOC_1PAUX (var);
1763 VAR_LOC_1PAUX (var) = NULL;
1764 }
1765 else
1766 VAR_PART_OFFSET (new_var, i) = VAR_PART_OFFSET (var, i);
1767 nextp = &new_var->var_part[i].loc_chain;
1768 for (node = var->var_part[i].loc_chain; node; node = node->next)
1769 {
1770 location_chain *new_lc;
1771
1772 new_lc = new location_chain;
1773 new_lc->next = NULL;
1774 if (node->init > initialized)
1775 new_lc->init = node->init;
1776 else
1777 new_lc->init = initialized;
1778 if (node->set_src && !(MEM_P (node->set_src)))
1779 new_lc->set_src = node->set_src;
1780 else
1781 new_lc->set_src = NULL;
1782 new_lc->loc = node->loc;
1783
1784 *nextp = new_lc;
1785 nextp = &new_lc->next;
1786 }
1787
1788 new_var->var_part[i].cur_loc = var->var_part[i].cur_loc;
1789 }
1790
1791 dst_can_be_shared = false;
1792 if (shared_hash_shared (set->vars))
1793 slot = shared_hash_find_slot_unshare (&set->vars, var->dv, NO_INSERT);
1794 else if (set->traversed_vars && set->vars != set->traversed_vars)
1795 slot = shared_hash_find_slot_noinsert (set->vars, var->dv);
1796 *slot = new_var;
1797 if (var->in_changed_variables)
1798 {
1799 variable **cslot
1800 = changed_variables->find_slot_with_hash (var->dv,
1801 dv_htab_hash (var->dv),
1802 NO_INSERT);
1803 gcc_assert (*cslot == (void *) var);
1804 var->in_changed_variables = false;
1805 variable_htab_free (var);
1806 *cslot = new_var;
1807 new_var->in_changed_variables = true;
1808 }
1809 return slot;
1810 }
1811
1812 /* Copy all variables from hash table SRC to hash table DST. */
1813
1814 static void
vars_copy(variable_table_type * dst,variable_table_type * src)1815 vars_copy (variable_table_type *dst, variable_table_type *src)
1816 {
1817 variable_iterator_type hi;
1818 variable *var;
1819
1820 FOR_EACH_HASH_TABLE_ELEMENT (*src, var, variable, hi)
1821 {
1822 variable **dstp;
1823 var->refcount++;
1824 dstp = dst->find_slot_with_hash (var->dv, dv_htab_hash (var->dv),
1825 INSERT);
1826 *dstp = var;
1827 }
1828 }
1829
1830 /* Map a decl to its main debug decl. */
1831
1832 static inline tree
var_debug_decl(tree decl)1833 var_debug_decl (tree decl)
1834 {
1835 if (decl && VAR_P (decl) && DECL_HAS_DEBUG_EXPR_P (decl))
1836 {
1837 tree debugdecl = DECL_DEBUG_EXPR (decl);
1838 if (DECL_P (debugdecl))
1839 decl = debugdecl;
1840 }
1841
1842 return decl;
1843 }
1844
1845 /* Set the register LOC to contain DV, OFFSET. */
1846
1847 static void
var_reg_decl_set(dataflow_set * set,rtx loc,enum var_init_status initialized,decl_or_value dv,HOST_WIDE_INT offset,rtx set_src,enum insert_option iopt)1848 var_reg_decl_set (dataflow_set *set, rtx loc, enum var_init_status initialized,
1849 decl_or_value dv, HOST_WIDE_INT offset, rtx set_src,
1850 enum insert_option iopt)
1851 {
1852 attrs *node;
1853 bool decl_p = dv_is_decl_p (dv);
1854
1855 if (decl_p)
1856 dv = dv_from_decl (var_debug_decl (dv_as_decl (dv)));
1857
1858 for (node = set->regs[REGNO (loc)]; node; node = node->next)
1859 if (dv_as_opaque (node->dv) == dv_as_opaque (dv)
1860 && node->offset == offset)
1861 break;
1862 if (!node)
1863 attrs_list_insert (&set->regs[REGNO (loc)], dv, offset, loc);
1864 set_variable_part (set, loc, dv, offset, initialized, set_src, iopt);
1865 }
1866
1867 /* Return true if we should track a location that is OFFSET bytes from
1868 a variable. Store the constant offset in *OFFSET_OUT if so. */
1869
1870 static bool
track_offset_p(poly_int64 offset,HOST_WIDE_INT * offset_out)1871 track_offset_p (poly_int64 offset, HOST_WIDE_INT *offset_out)
1872 {
1873 HOST_WIDE_INT const_offset;
1874 if (!offset.is_constant (&const_offset)
1875 || !IN_RANGE (const_offset, 0, MAX_VAR_PARTS - 1))
1876 return false;
1877 *offset_out = const_offset;
1878 return true;
1879 }
1880
1881 /* Return the offset of a register that track_offset_p says we
1882 should track. */
1883
1884 static HOST_WIDE_INT
get_tracked_reg_offset(rtx loc)1885 get_tracked_reg_offset (rtx loc)
1886 {
1887 HOST_WIDE_INT offset;
1888 if (!track_offset_p (REG_OFFSET (loc), &offset))
1889 gcc_unreachable ();
1890 return offset;
1891 }
1892
1893 /* Set the register to contain REG_EXPR (LOC), REG_OFFSET (LOC). */
1894
1895 static void
var_reg_set(dataflow_set * set,rtx loc,enum var_init_status initialized,rtx set_src)1896 var_reg_set (dataflow_set *set, rtx loc, enum var_init_status initialized,
1897 rtx set_src)
1898 {
1899 tree decl = REG_EXPR (loc);
1900 HOST_WIDE_INT offset = get_tracked_reg_offset (loc);
1901
1902 var_reg_decl_set (set, loc, initialized,
1903 dv_from_decl (decl), offset, set_src, INSERT);
1904 }
1905
1906 static enum var_init_status
get_init_value(dataflow_set * set,rtx loc,decl_or_value dv)1907 get_init_value (dataflow_set *set, rtx loc, decl_or_value dv)
1908 {
1909 variable *var;
1910 int i;
1911 enum var_init_status ret_val = VAR_INIT_STATUS_UNKNOWN;
1912
1913 if (! flag_var_tracking_uninit)
1914 return VAR_INIT_STATUS_INITIALIZED;
1915
1916 var = shared_hash_find (set->vars, dv);
1917 if (var)
1918 {
1919 for (i = 0; i < var->n_var_parts && ret_val == VAR_INIT_STATUS_UNKNOWN; i++)
1920 {
1921 location_chain *nextp;
1922 for (nextp = var->var_part[i].loc_chain; nextp; nextp = nextp->next)
1923 if (rtx_equal_p (nextp->loc, loc))
1924 {
1925 ret_val = nextp->init;
1926 break;
1927 }
1928 }
1929 }
1930
1931 return ret_val;
1932 }
1933
1934 /* Delete current content of register LOC in dataflow set SET and set
1935 the register to contain REG_EXPR (LOC), REG_OFFSET (LOC). If
1936 MODIFY is true, any other live copies of the same variable part are
1937 also deleted from the dataflow set, otherwise the variable part is
1938 assumed to be copied from another location holding the same
1939 part. */
1940
1941 static void
var_reg_delete_and_set(dataflow_set * set,rtx loc,bool modify,enum var_init_status initialized,rtx set_src)1942 var_reg_delete_and_set (dataflow_set *set, rtx loc, bool modify,
1943 enum var_init_status initialized, rtx set_src)
1944 {
1945 tree decl = REG_EXPR (loc);
1946 HOST_WIDE_INT offset = get_tracked_reg_offset (loc);
1947 attrs *node, *next;
1948 attrs **nextp;
1949
1950 decl = var_debug_decl (decl);
1951
1952 if (initialized == VAR_INIT_STATUS_UNKNOWN)
1953 initialized = get_init_value (set, loc, dv_from_decl (decl));
1954
1955 nextp = &set->regs[REGNO (loc)];
1956 for (node = *nextp; node; node = next)
1957 {
1958 next = node->next;
1959 if (dv_as_opaque (node->dv) != decl || node->offset != offset)
1960 {
1961 delete_variable_part (set, node->loc, node->dv, node->offset);
1962 delete node;
1963 *nextp = next;
1964 }
1965 else
1966 {
1967 node->loc = loc;
1968 nextp = &node->next;
1969 }
1970 }
1971 if (modify)
1972 clobber_variable_part (set, loc, dv_from_decl (decl), offset, set_src);
1973 var_reg_set (set, loc, initialized, set_src);
1974 }
1975
1976 /* Delete the association of register LOC in dataflow set SET with any
1977 variables that aren't onepart. If CLOBBER is true, also delete any
1978 other live copies of the same variable part, and delete the
1979 association with onepart dvs too. */
1980
1981 static void
var_reg_delete(dataflow_set * set,rtx loc,bool clobber)1982 var_reg_delete (dataflow_set *set, rtx loc, bool clobber)
1983 {
1984 attrs **nextp = &set->regs[REGNO (loc)];
1985 attrs *node, *next;
1986
1987 HOST_WIDE_INT offset;
1988 if (clobber && track_offset_p (REG_OFFSET (loc), &offset))
1989 {
1990 tree decl = REG_EXPR (loc);
1991
1992 decl = var_debug_decl (decl);
1993
1994 clobber_variable_part (set, NULL, dv_from_decl (decl), offset, NULL);
1995 }
1996
1997 for (node = *nextp; node; node = next)
1998 {
1999 next = node->next;
2000 if (clobber || !dv_onepart_p (node->dv))
2001 {
2002 delete_variable_part (set, node->loc, node->dv, node->offset);
2003 delete node;
2004 *nextp = next;
2005 }
2006 else
2007 nextp = &node->next;
2008 }
2009 }
2010
2011 /* Delete content of register with number REGNO in dataflow set SET. */
2012
2013 static void
var_regno_delete(dataflow_set * set,int regno)2014 var_regno_delete (dataflow_set *set, int regno)
2015 {
2016 attrs **reg = &set->regs[regno];
2017 attrs *node, *next;
2018
2019 for (node = *reg; node; node = next)
2020 {
2021 next = node->next;
2022 delete_variable_part (set, node->loc, node->dv, node->offset);
2023 delete node;
2024 }
2025 *reg = NULL;
2026 }
2027
2028 /* Return true if I is the negated value of a power of two. */
2029 static bool
negative_power_of_two_p(HOST_WIDE_INT i)2030 negative_power_of_two_p (HOST_WIDE_INT i)
2031 {
2032 unsigned HOST_WIDE_INT x = -(unsigned HOST_WIDE_INT)i;
2033 return pow2_or_zerop (x);
2034 }
2035
2036 /* Strip constant offsets and alignments off of LOC. Return the base
2037 expression. */
2038
2039 static rtx
vt_get_canonicalize_base(rtx loc)2040 vt_get_canonicalize_base (rtx loc)
2041 {
2042 while ((GET_CODE (loc) == PLUS
2043 || GET_CODE (loc) == AND)
2044 && GET_CODE (XEXP (loc, 1)) == CONST_INT
2045 && (GET_CODE (loc) != AND
2046 || negative_power_of_two_p (INTVAL (XEXP (loc, 1)))))
2047 loc = XEXP (loc, 0);
2048
2049 return loc;
2050 }
2051
2052 /* This caches canonicalized addresses for VALUEs, computed using
2053 information in the global cselib table. */
2054 static hash_map<rtx, rtx> *global_get_addr_cache;
2055
2056 /* This caches canonicalized addresses for VALUEs, computed using
2057 information from the global cache and information pertaining to a
2058 basic block being analyzed. */
2059 static hash_map<rtx, rtx> *local_get_addr_cache;
2060
2061 static rtx vt_canonicalize_addr (dataflow_set *, rtx);
2062
2063 /* Return the canonical address for LOC, that must be a VALUE, using a
2064 cached global equivalence or computing it and storing it in the
2065 global cache. */
2066
2067 static rtx
get_addr_from_global_cache(rtx const loc)2068 get_addr_from_global_cache (rtx const loc)
2069 {
2070 rtx x;
2071
2072 gcc_checking_assert (GET_CODE (loc) == VALUE);
2073
2074 bool existed;
2075 rtx *slot = &global_get_addr_cache->get_or_insert (loc, &existed);
2076 if (existed)
2077 return *slot;
2078
2079 x = canon_rtx (get_addr (loc));
2080
2081 /* Tentative, avoiding infinite recursion. */
2082 *slot = x;
2083
2084 if (x != loc)
2085 {
2086 rtx nx = vt_canonicalize_addr (NULL, x);
2087 if (nx != x)
2088 {
2089 /* The table may have moved during recursion, recompute
2090 SLOT. */
2091 *global_get_addr_cache->get (loc) = x = nx;
2092 }
2093 }
2094
2095 return x;
2096 }
2097
2098 /* Return the canonical address for LOC, that must be a VALUE, using a
2099 cached local equivalence or computing it and storing it in the
2100 local cache. */
2101
2102 static rtx
get_addr_from_local_cache(dataflow_set * set,rtx const loc)2103 get_addr_from_local_cache (dataflow_set *set, rtx const loc)
2104 {
2105 rtx x;
2106 decl_or_value dv;
2107 variable *var;
2108 location_chain *l;
2109
2110 gcc_checking_assert (GET_CODE (loc) == VALUE);
2111
2112 bool existed;
2113 rtx *slot = &local_get_addr_cache->get_or_insert (loc, &existed);
2114 if (existed)
2115 return *slot;
2116
2117 x = get_addr_from_global_cache (loc);
2118
2119 /* Tentative, avoiding infinite recursion. */
2120 *slot = x;
2121
2122 /* Recurse to cache local expansion of X, or if we need to search
2123 for a VALUE in the expansion. */
2124 if (x != loc)
2125 {
2126 rtx nx = vt_canonicalize_addr (set, x);
2127 if (nx != x)
2128 {
2129 slot = local_get_addr_cache->get (loc);
2130 *slot = x = nx;
2131 }
2132 return x;
2133 }
2134
2135 dv = dv_from_rtx (x);
2136 var = shared_hash_find (set->vars, dv);
2137 if (!var)
2138 return x;
2139
2140 /* Look for an improved equivalent expression. */
2141 for (l = var->var_part[0].loc_chain; l; l = l->next)
2142 {
2143 rtx base = vt_get_canonicalize_base (l->loc);
2144 if (GET_CODE (base) == VALUE
2145 && canon_value_cmp (base, loc))
2146 {
2147 rtx nx = vt_canonicalize_addr (set, l->loc);
2148 if (x != nx)
2149 {
2150 slot = local_get_addr_cache->get (loc);
2151 *slot = x = nx;
2152 }
2153 break;
2154 }
2155 }
2156
2157 return x;
2158 }
2159
2160 /* Canonicalize LOC using equivalences from SET in addition to those
2161 in the cselib static table. It expects a VALUE-based expression,
2162 and it will only substitute VALUEs with other VALUEs or
2163 function-global equivalences, so that, if two addresses have base
2164 VALUEs that are locally or globally related in ways that
2165 memrefs_conflict_p cares about, they will both canonicalize to
2166 expressions that have the same base VALUE.
2167
2168 The use of VALUEs as canonical base addresses enables the canonical
2169 RTXs to remain unchanged globally, if they resolve to a constant,
2170 or throughout a basic block otherwise, so that they can be cached
2171 and the cache needs not be invalidated when REGs, MEMs or such
2172 change. */
2173
2174 static rtx
vt_canonicalize_addr(dataflow_set * set,rtx oloc)2175 vt_canonicalize_addr (dataflow_set *set, rtx oloc)
2176 {
2177 HOST_WIDE_INT ofst = 0;
2178 machine_mode mode = GET_MODE (oloc);
2179 rtx loc = oloc;
2180 rtx x;
2181 bool retry = true;
2182
2183 while (retry)
2184 {
2185 while (GET_CODE (loc) == PLUS
2186 && GET_CODE (XEXP (loc, 1)) == CONST_INT)
2187 {
2188 ofst += INTVAL (XEXP (loc, 1));
2189 loc = XEXP (loc, 0);
2190 }
2191
2192 /* Alignment operations can't normally be combined, so just
2193 canonicalize the base and we're done. We'll normally have
2194 only one stack alignment anyway. */
2195 if (GET_CODE (loc) == AND
2196 && GET_CODE (XEXP (loc, 1)) == CONST_INT
2197 && negative_power_of_two_p (INTVAL (XEXP (loc, 1))))
2198 {
2199 x = vt_canonicalize_addr (set, XEXP (loc, 0));
2200 if (x != XEXP (loc, 0))
2201 loc = gen_rtx_AND (mode, x, XEXP (loc, 1));
2202 retry = false;
2203 }
2204
2205 if (GET_CODE (loc) == VALUE)
2206 {
2207 if (set)
2208 loc = get_addr_from_local_cache (set, loc);
2209 else
2210 loc = get_addr_from_global_cache (loc);
2211
2212 /* Consolidate plus_constants. */
2213 while (ofst && GET_CODE (loc) == PLUS
2214 && GET_CODE (XEXP (loc, 1)) == CONST_INT)
2215 {
2216 ofst += INTVAL (XEXP (loc, 1));
2217 loc = XEXP (loc, 0);
2218 }
2219
2220 retry = false;
2221 }
2222 else
2223 {
2224 x = canon_rtx (loc);
2225 if (retry)
2226 retry = (x != loc);
2227 loc = x;
2228 }
2229 }
2230
2231 /* Add OFST back in. */
2232 if (ofst)
2233 {
2234 /* Don't build new RTL if we can help it. */
2235 if (GET_CODE (oloc) == PLUS
2236 && XEXP (oloc, 0) == loc
2237 && INTVAL (XEXP (oloc, 1)) == ofst)
2238 return oloc;
2239
2240 loc = plus_constant (mode, loc, ofst);
2241 }
2242
2243 return loc;
2244 }
2245
2246 /* Return true iff there's a true dependence between MLOC and LOC.
2247 MADDR must be a canonicalized version of MLOC's address. */
2248
2249 static inline bool
vt_canon_true_dep(dataflow_set * set,rtx mloc,rtx maddr,rtx loc)2250 vt_canon_true_dep (dataflow_set *set, rtx mloc, rtx maddr, rtx loc)
2251 {
2252 if (GET_CODE (loc) != MEM)
2253 return false;
2254
2255 rtx addr = vt_canonicalize_addr (set, XEXP (loc, 0));
2256 if (!canon_true_dependence (mloc, GET_MODE (mloc), maddr, loc, addr))
2257 return false;
2258
2259 return true;
2260 }
2261
2262 /* Hold parameters for the hashtab traversal function
2263 drop_overlapping_mem_locs, see below. */
2264
2265 struct overlapping_mems
2266 {
2267 dataflow_set *set;
2268 rtx loc, addr;
2269 };
2270
2271 /* Remove all MEMs that overlap with COMS->LOC from the location list
2272 of a hash table entry for a onepart variable. COMS->ADDR must be a
2273 canonicalized form of COMS->LOC's address, and COMS->LOC must be
2274 canonicalized itself. */
2275
2276 int
drop_overlapping_mem_locs(variable ** slot,overlapping_mems * coms)2277 drop_overlapping_mem_locs (variable **slot, overlapping_mems *coms)
2278 {
2279 dataflow_set *set = coms->set;
2280 rtx mloc = coms->loc, addr = coms->addr;
2281 variable *var = *slot;
2282
2283 if (var->onepart != NOT_ONEPART)
2284 {
2285 location_chain *loc, **locp;
2286 bool changed = false;
2287 rtx cur_loc;
2288
2289 gcc_assert (var->n_var_parts == 1);
2290
2291 if (shared_var_p (var, set->vars))
2292 {
2293 for (loc = var->var_part[0].loc_chain; loc; loc = loc->next)
2294 if (vt_canon_true_dep (set, mloc, addr, loc->loc))
2295 break;
2296
2297 if (!loc)
2298 return 1;
2299
2300 slot = unshare_variable (set, slot, var, VAR_INIT_STATUS_UNKNOWN);
2301 var = *slot;
2302 gcc_assert (var->n_var_parts == 1);
2303 }
2304
2305 if (VAR_LOC_1PAUX (var))
2306 cur_loc = VAR_LOC_FROM (var);
2307 else
2308 cur_loc = var->var_part[0].cur_loc;
2309
2310 for (locp = &var->var_part[0].loc_chain, loc = *locp;
2311 loc; loc = *locp)
2312 {
2313 if (!vt_canon_true_dep (set, mloc, addr, loc->loc))
2314 {
2315 locp = &loc->next;
2316 continue;
2317 }
2318
2319 *locp = loc->next;
2320 /* If we have deleted the location which was last emitted
2321 we have to emit new location so add the variable to set
2322 of changed variables. */
2323 if (cur_loc == loc->loc)
2324 {
2325 changed = true;
2326 var->var_part[0].cur_loc = NULL;
2327 if (VAR_LOC_1PAUX (var))
2328 VAR_LOC_FROM (var) = NULL;
2329 }
2330 delete loc;
2331 }
2332
2333 if (!var->var_part[0].loc_chain)
2334 {
2335 var->n_var_parts--;
2336 changed = true;
2337 }
2338 if (changed)
2339 variable_was_changed (var, set);
2340 }
2341
2342 return 1;
2343 }
2344
2345 /* Remove from SET all VALUE bindings to MEMs that overlap with LOC. */
2346
2347 static void
clobber_overlapping_mems(dataflow_set * set,rtx loc)2348 clobber_overlapping_mems (dataflow_set *set, rtx loc)
2349 {
2350 struct overlapping_mems coms;
2351
2352 gcc_checking_assert (GET_CODE (loc) == MEM);
2353
2354 coms.set = set;
2355 coms.loc = canon_rtx (loc);
2356 coms.addr = vt_canonicalize_addr (set, XEXP (loc, 0));
2357
2358 set->traversed_vars = set->vars;
2359 shared_hash_htab (set->vars)
2360 ->traverse <overlapping_mems*, drop_overlapping_mem_locs> (&coms);
2361 set->traversed_vars = NULL;
2362 }
2363
2364 /* Set the location of DV, OFFSET as the MEM LOC. */
2365
2366 static void
var_mem_decl_set(dataflow_set * set,rtx loc,enum var_init_status initialized,decl_or_value dv,HOST_WIDE_INT offset,rtx set_src,enum insert_option iopt)2367 var_mem_decl_set (dataflow_set *set, rtx loc, enum var_init_status initialized,
2368 decl_or_value dv, HOST_WIDE_INT offset, rtx set_src,
2369 enum insert_option iopt)
2370 {
2371 if (dv_is_decl_p (dv))
2372 dv = dv_from_decl (var_debug_decl (dv_as_decl (dv)));
2373
2374 set_variable_part (set, loc, dv, offset, initialized, set_src, iopt);
2375 }
2376
2377 /* Set the location part of variable MEM_EXPR (LOC) in dataflow set
2378 SET to LOC.
2379 Adjust the address first if it is stack pointer based. */
2380
2381 static void
var_mem_set(dataflow_set * set,rtx loc,enum var_init_status initialized,rtx set_src)2382 var_mem_set (dataflow_set *set, rtx loc, enum var_init_status initialized,
2383 rtx set_src)
2384 {
2385 tree decl = MEM_EXPR (loc);
2386 HOST_WIDE_INT offset = int_mem_offset (loc);
2387
2388 var_mem_decl_set (set, loc, initialized,
2389 dv_from_decl (decl), offset, set_src, INSERT);
2390 }
2391
2392 /* Delete and set the location part of variable MEM_EXPR (LOC) in
2393 dataflow set SET to LOC. If MODIFY is true, any other live copies
2394 of the same variable part are also deleted from the dataflow set,
2395 otherwise the variable part is assumed to be copied from another
2396 location holding the same part.
2397 Adjust the address first if it is stack pointer based. */
2398
2399 static void
var_mem_delete_and_set(dataflow_set * set,rtx loc,bool modify,enum var_init_status initialized,rtx set_src)2400 var_mem_delete_and_set (dataflow_set *set, rtx loc, bool modify,
2401 enum var_init_status initialized, rtx set_src)
2402 {
2403 tree decl = MEM_EXPR (loc);
2404 HOST_WIDE_INT offset = int_mem_offset (loc);
2405
2406 clobber_overlapping_mems (set, loc);
2407 decl = var_debug_decl (decl);
2408
2409 if (initialized == VAR_INIT_STATUS_UNKNOWN)
2410 initialized = get_init_value (set, loc, dv_from_decl (decl));
2411
2412 if (modify)
2413 clobber_variable_part (set, NULL, dv_from_decl (decl), offset, set_src);
2414 var_mem_set (set, loc, initialized, set_src);
2415 }
2416
2417 /* Delete the location part LOC from dataflow set SET. If CLOBBER is
2418 true, also delete any other live copies of the same variable part.
2419 Adjust the address first if it is stack pointer based. */
2420
2421 static void
var_mem_delete(dataflow_set * set,rtx loc,bool clobber)2422 var_mem_delete (dataflow_set *set, rtx loc, bool clobber)
2423 {
2424 tree decl = MEM_EXPR (loc);
2425 HOST_WIDE_INT offset = int_mem_offset (loc);
2426
2427 clobber_overlapping_mems (set, loc);
2428 decl = var_debug_decl (decl);
2429 if (clobber)
2430 clobber_variable_part (set, NULL, dv_from_decl (decl), offset, NULL);
2431 delete_variable_part (set, loc, dv_from_decl (decl), offset);
2432 }
2433
2434 /* Return true if LOC should not be expanded for location expressions,
2435 or used in them. */
2436
2437 static inline bool
unsuitable_loc(rtx loc)2438 unsuitable_loc (rtx loc)
2439 {
2440 switch (GET_CODE (loc))
2441 {
2442 case PC:
2443 case SCRATCH:
2444 case CC0:
2445 case ASM_INPUT:
2446 case ASM_OPERANDS:
2447 return true;
2448
2449 default:
2450 return false;
2451 }
2452 }
2453
2454 /* Bind VAL to LOC in SET. If MODIFIED, detach LOC from any values
2455 bound to it. */
2456
2457 static inline void
val_bind(dataflow_set * set,rtx val,rtx loc,bool modified)2458 val_bind (dataflow_set *set, rtx val, rtx loc, bool modified)
2459 {
2460 if (REG_P (loc))
2461 {
2462 if (modified)
2463 var_regno_delete (set, REGNO (loc));
2464 var_reg_decl_set (set, loc, VAR_INIT_STATUS_INITIALIZED,
2465 dv_from_value (val), 0, NULL_RTX, INSERT);
2466 }
2467 else if (MEM_P (loc))
2468 {
2469 struct elt_loc_list *l = CSELIB_VAL_PTR (val)->locs;
2470
2471 if (modified)
2472 clobber_overlapping_mems (set, loc);
2473
2474 if (l && GET_CODE (l->loc) == VALUE)
2475 l = canonical_cselib_val (CSELIB_VAL_PTR (l->loc))->locs;
2476
2477 /* If this MEM is a global constant, we don't need it in the
2478 dynamic tables. ??? We should test this before emitting the
2479 micro-op in the first place. */
2480 while (l)
2481 if (GET_CODE (l->loc) == MEM && XEXP (l->loc, 0) == XEXP (loc, 0))
2482 break;
2483 else
2484 l = l->next;
2485
2486 if (!l)
2487 var_mem_decl_set (set, loc, VAR_INIT_STATUS_INITIALIZED,
2488 dv_from_value (val), 0, NULL_RTX, INSERT);
2489 }
2490 else
2491 {
2492 /* Other kinds of equivalences are necessarily static, at least
2493 so long as we do not perform substitutions while merging
2494 expressions. */
2495 gcc_unreachable ();
2496 set_variable_part (set, loc, dv_from_value (val), 0,
2497 VAR_INIT_STATUS_INITIALIZED, NULL_RTX, INSERT);
2498 }
2499 }
2500
2501 /* Bind a value to a location it was just stored in. If MODIFIED
2502 holds, assume the location was modified, detaching it from any
2503 values bound to it. */
2504
2505 static void
val_store(dataflow_set * set,rtx val,rtx loc,rtx_insn * insn,bool modified)2506 val_store (dataflow_set *set, rtx val, rtx loc, rtx_insn *insn,
2507 bool modified)
2508 {
2509 cselib_val *v = CSELIB_VAL_PTR (val);
2510
2511 gcc_assert (cselib_preserved_value_p (v));
2512
2513 if (dump_file)
2514 {
2515 fprintf (dump_file, "%i: ", insn ? INSN_UID (insn) : 0);
2516 print_inline_rtx (dump_file, loc, 0);
2517 fprintf (dump_file, " evaluates to ");
2518 print_inline_rtx (dump_file, val, 0);
2519 if (v->locs)
2520 {
2521 struct elt_loc_list *l;
2522 for (l = v->locs; l; l = l->next)
2523 {
2524 fprintf (dump_file, "\n%i: ", INSN_UID (l->setting_insn));
2525 print_inline_rtx (dump_file, l->loc, 0);
2526 }
2527 }
2528 fprintf (dump_file, "\n");
2529 }
2530
2531 gcc_checking_assert (!unsuitable_loc (loc));
2532
2533 val_bind (set, val, loc, modified);
2534 }
2535
2536 /* Clear (canonical address) slots that reference X. */
2537
2538 bool
local_get_addr_clear_given_value(rtx const &,rtx * slot,rtx x)2539 local_get_addr_clear_given_value (rtx const &, rtx *slot, rtx x)
2540 {
2541 if (vt_get_canonicalize_base (*slot) == x)
2542 *slot = NULL;
2543 return true;
2544 }
2545
2546 /* Reset this node, detaching all its equivalences. Return the slot
2547 in the variable hash table that holds dv, if there is one. */
2548
2549 static void
val_reset(dataflow_set * set,decl_or_value dv)2550 val_reset (dataflow_set *set, decl_or_value dv)
2551 {
2552 variable *var = shared_hash_find (set->vars, dv) ;
2553 location_chain *node;
2554 rtx cval;
2555
2556 if (!var || !var->n_var_parts)
2557 return;
2558
2559 gcc_assert (var->n_var_parts == 1);
2560
2561 if (var->onepart == ONEPART_VALUE)
2562 {
2563 rtx x = dv_as_value (dv);
2564
2565 /* Relationships in the global cache don't change, so reset the
2566 local cache entry only. */
2567 rtx *slot = local_get_addr_cache->get (x);
2568 if (slot)
2569 {
2570 /* If the value resolved back to itself, odds are that other
2571 values may have cached it too. These entries now refer
2572 to the old X, so detach them too. Entries that used the
2573 old X but resolved to something else remain ok as long as
2574 that something else isn't also reset. */
2575 if (*slot == x)
2576 local_get_addr_cache
2577 ->traverse<rtx, local_get_addr_clear_given_value> (x);
2578 *slot = NULL;
2579 }
2580 }
2581
2582 cval = NULL;
2583 for (node = var->var_part[0].loc_chain; node; node = node->next)
2584 if (GET_CODE (node->loc) == VALUE
2585 && canon_value_cmp (node->loc, cval))
2586 cval = node->loc;
2587
2588 for (node = var->var_part[0].loc_chain; node; node = node->next)
2589 if (GET_CODE (node->loc) == VALUE && cval != node->loc)
2590 {
2591 /* Redirect the equivalence link to the new canonical
2592 value, or simply remove it if it would point at
2593 itself. */
2594 if (cval)
2595 set_variable_part (set, cval, dv_from_value (node->loc),
2596 0, node->init, node->set_src, NO_INSERT);
2597 delete_variable_part (set, dv_as_value (dv),
2598 dv_from_value (node->loc), 0);
2599 }
2600
2601 if (cval)
2602 {
2603 decl_or_value cdv = dv_from_value (cval);
2604
2605 /* Keep the remaining values connected, accumulating links
2606 in the canonical value. */
2607 for (node = var->var_part[0].loc_chain; node; node = node->next)
2608 {
2609 if (node->loc == cval)
2610 continue;
2611 else if (GET_CODE (node->loc) == REG)
2612 var_reg_decl_set (set, node->loc, node->init, cdv, 0,
2613 node->set_src, NO_INSERT);
2614 else if (GET_CODE (node->loc) == MEM)
2615 var_mem_decl_set (set, node->loc, node->init, cdv, 0,
2616 node->set_src, NO_INSERT);
2617 else
2618 set_variable_part (set, node->loc, cdv, 0,
2619 node->init, node->set_src, NO_INSERT);
2620 }
2621 }
2622
2623 /* We remove this last, to make sure that the canonical value is not
2624 removed to the point of requiring reinsertion. */
2625 if (cval)
2626 delete_variable_part (set, dv_as_value (dv), dv_from_value (cval), 0);
2627
2628 clobber_variable_part (set, NULL, dv, 0, NULL);
2629 }
2630
2631 /* Find the values in a given location and map the val to another
2632 value, if it is unique, or add the location as one holding the
2633 value. */
2634
2635 static void
val_resolve(dataflow_set * set,rtx val,rtx loc,rtx_insn * insn)2636 val_resolve (dataflow_set *set, rtx val, rtx loc, rtx_insn *insn)
2637 {
2638 decl_or_value dv = dv_from_value (val);
2639
2640 if (dump_file && (dump_flags & TDF_DETAILS))
2641 {
2642 if (insn)
2643 fprintf (dump_file, "%i: ", INSN_UID (insn));
2644 else
2645 fprintf (dump_file, "head: ");
2646 print_inline_rtx (dump_file, val, 0);
2647 fputs (" is at ", dump_file);
2648 print_inline_rtx (dump_file, loc, 0);
2649 fputc ('\n', dump_file);
2650 }
2651
2652 val_reset (set, dv);
2653
2654 gcc_checking_assert (!unsuitable_loc (loc));
2655
2656 if (REG_P (loc))
2657 {
2658 attrs *node, *found = NULL;
2659
2660 for (node = set->regs[REGNO (loc)]; node; node = node->next)
2661 if (dv_is_value_p (node->dv)
2662 && GET_MODE (dv_as_value (node->dv)) == GET_MODE (loc))
2663 {
2664 found = node;
2665
2666 /* Map incoming equivalences. ??? Wouldn't it be nice if
2667 we just started sharing the location lists? Maybe a
2668 circular list ending at the value itself or some
2669 such. */
2670 set_variable_part (set, dv_as_value (node->dv),
2671 dv_from_value (val), node->offset,
2672 VAR_INIT_STATUS_INITIALIZED, NULL_RTX, INSERT);
2673 set_variable_part (set, val, node->dv, node->offset,
2674 VAR_INIT_STATUS_INITIALIZED, NULL_RTX, INSERT);
2675 }
2676
2677 /* If we didn't find any equivalence, we need to remember that
2678 this value is held in the named register. */
2679 if (found)
2680 return;
2681 }
2682 /* ??? Attempt to find and merge equivalent MEMs or other
2683 expressions too. */
2684
2685 val_bind (set, val, loc, false);
2686 }
2687
2688 /* Initialize dataflow set SET to be empty.
2689 VARS_SIZE is the initial size of hash table VARS. */
2690
2691 static void
dataflow_set_init(dataflow_set * set)2692 dataflow_set_init (dataflow_set *set)
2693 {
2694 init_attrs_list_set (set->regs);
2695 set->vars = shared_hash_copy (empty_shared_hash);
2696 set->stack_adjust = 0;
2697 set->traversed_vars = NULL;
2698 }
2699
2700 /* Delete the contents of dataflow set SET. */
2701
2702 static void
dataflow_set_clear(dataflow_set * set)2703 dataflow_set_clear (dataflow_set *set)
2704 {
2705 int i;
2706
2707 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2708 attrs_list_clear (&set->regs[i]);
2709
2710 shared_hash_destroy (set->vars);
2711 set->vars = shared_hash_copy (empty_shared_hash);
2712 }
2713
2714 /* Copy the contents of dataflow set SRC to DST. */
2715
2716 static void
dataflow_set_copy(dataflow_set * dst,dataflow_set * src)2717 dataflow_set_copy (dataflow_set *dst, dataflow_set *src)
2718 {
2719 int i;
2720
2721 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2722 attrs_list_copy (&dst->regs[i], src->regs[i]);
2723
2724 shared_hash_destroy (dst->vars);
2725 dst->vars = shared_hash_copy (src->vars);
2726 dst->stack_adjust = src->stack_adjust;
2727 }
2728
2729 /* Information for merging lists of locations for a given offset of variable.
2730 */
2731 struct variable_union_info
2732 {
2733 /* Node of the location chain. */
2734 location_chain *lc;
2735
2736 /* The sum of positions in the input chains. */
2737 int pos;
2738
2739 /* The position in the chain of DST dataflow set. */
2740 int pos_dst;
2741 };
2742
2743 /* Buffer for location list sorting and its allocated size. */
2744 static struct variable_union_info *vui_vec;
2745 static int vui_allocated;
2746
2747 /* Compare function for qsort, order the structures by POS element. */
2748
2749 static int
variable_union_info_cmp_pos(const void * n1,const void * n2)2750 variable_union_info_cmp_pos (const void *n1, const void *n2)
2751 {
2752 const struct variable_union_info *const i1 =
2753 (const struct variable_union_info *) n1;
2754 const struct variable_union_info *const i2 =
2755 ( const struct variable_union_info *) n2;
2756
2757 if (i1->pos != i2->pos)
2758 return i1->pos - i2->pos;
2759
2760 return (i1->pos_dst - i2->pos_dst);
2761 }
2762
2763 /* Compute union of location parts of variable *SLOT and the same variable
2764 from hash table DATA. Compute "sorted" union of the location chains
2765 for common offsets, i.e. the locations of a variable part are sorted by
2766 a priority where the priority is the sum of the positions in the 2 chains
2767 (if a location is only in one list the position in the second list is
2768 defined to be larger than the length of the chains).
2769 When we are updating the location parts the newest location is in the
2770 beginning of the chain, so when we do the described "sorted" union
2771 we keep the newest locations in the beginning. */
2772
2773 static int
variable_union(variable * src,dataflow_set * set)2774 variable_union (variable *src, dataflow_set *set)
2775 {
2776 variable *dst;
2777 variable **dstp;
2778 int i, j, k;
2779
2780 dstp = shared_hash_find_slot (set->vars, src->dv);
2781 if (!dstp || !*dstp)
2782 {
2783 src->refcount++;
2784
2785 dst_can_be_shared = false;
2786 if (!dstp)
2787 dstp = shared_hash_find_slot_unshare (&set->vars, src->dv, INSERT);
2788
2789 *dstp = src;
2790
2791 /* Continue traversing the hash table. */
2792 return 1;
2793 }
2794 else
2795 dst = *dstp;
2796
2797 gcc_assert (src->n_var_parts);
2798 gcc_checking_assert (src->onepart == dst->onepart);
2799
2800 /* We can combine one-part variables very efficiently, because their
2801 entries are in canonical order. */
2802 if (src->onepart)
2803 {
2804 location_chain **nodep, *dnode, *snode;
2805
2806 gcc_assert (src->n_var_parts == 1
2807 && dst->n_var_parts == 1);
2808
2809 snode = src->var_part[0].loc_chain;
2810 gcc_assert (snode);
2811
2812 restart_onepart_unshared:
2813 nodep = &dst->var_part[0].loc_chain;
2814 dnode = *nodep;
2815 gcc_assert (dnode);
2816
2817 while (snode)
2818 {
2819 int r = dnode ? loc_cmp (dnode->loc, snode->loc) : 1;
2820
2821 if (r > 0)
2822 {
2823 location_chain *nnode;
2824
2825 if (shared_var_p (dst, set->vars))
2826 {
2827 dstp = unshare_variable (set, dstp, dst,
2828 VAR_INIT_STATUS_INITIALIZED);
2829 dst = *dstp;
2830 goto restart_onepart_unshared;
2831 }
2832
2833 *nodep = nnode = new location_chain;
2834 nnode->loc = snode->loc;
2835 nnode->init = snode->init;
2836 if (!snode->set_src || MEM_P (snode->set_src))
2837 nnode->set_src = NULL;
2838 else
2839 nnode->set_src = snode->set_src;
2840 nnode->next = dnode;
2841 dnode = nnode;
2842 }
2843 else if (r == 0)
2844 gcc_checking_assert (rtx_equal_p (dnode->loc, snode->loc));
2845
2846 if (r >= 0)
2847 snode = snode->next;
2848
2849 nodep = &dnode->next;
2850 dnode = *nodep;
2851 }
2852
2853 return 1;
2854 }
2855
2856 gcc_checking_assert (!src->onepart);
2857
2858 /* Count the number of location parts, result is K. */
2859 for (i = 0, j = 0, k = 0;
2860 i < src->n_var_parts && j < dst->n_var_parts; k++)
2861 {
2862 if (VAR_PART_OFFSET (src, i) == VAR_PART_OFFSET (dst, j))
2863 {
2864 i++;
2865 j++;
2866 }
2867 else if (VAR_PART_OFFSET (src, i) < VAR_PART_OFFSET (dst, j))
2868 i++;
2869 else
2870 j++;
2871 }
2872 k += src->n_var_parts - i;
2873 k += dst->n_var_parts - j;
2874
2875 /* We track only variables whose size is <= MAX_VAR_PARTS bytes
2876 thus there are at most MAX_VAR_PARTS different offsets. */
2877 gcc_checking_assert (dst->onepart ? k == 1 : k <= MAX_VAR_PARTS);
2878
2879 if (dst->n_var_parts != k && shared_var_p (dst, set->vars))
2880 {
2881 dstp = unshare_variable (set, dstp, dst, VAR_INIT_STATUS_UNKNOWN);
2882 dst = *dstp;
2883 }
2884
2885 i = src->n_var_parts - 1;
2886 j = dst->n_var_parts - 1;
2887 dst->n_var_parts = k;
2888
2889 for (k--; k >= 0; k--)
2890 {
2891 location_chain *node, *node2;
2892
2893 if (i >= 0 && j >= 0
2894 && VAR_PART_OFFSET (src, i) == VAR_PART_OFFSET (dst, j))
2895 {
2896 /* Compute the "sorted" union of the chains, i.e. the locations which
2897 are in both chains go first, they are sorted by the sum of
2898 positions in the chains. */
2899 int dst_l, src_l;
2900 int ii, jj, n;
2901 struct variable_union_info *vui;
2902
2903 /* If DST is shared compare the location chains.
2904 If they are different we will modify the chain in DST with
2905 high probability so make a copy of DST. */
2906 if (shared_var_p (dst, set->vars))
2907 {
2908 for (node = src->var_part[i].loc_chain,
2909 node2 = dst->var_part[j].loc_chain; node && node2;
2910 node = node->next, node2 = node2->next)
2911 {
2912 if (!((REG_P (node2->loc)
2913 && REG_P (node->loc)
2914 && REGNO (node2->loc) == REGNO (node->loc))
2915 || rtx_equal_p (node2->loc, node->loc)))
2916 {
2917 if (node2->init < node->init)
2918 node2->init = node->init;
2919 break;
2920 }
2921 }
2922 if (node || node2)
2923 {
2924 dstp = unshare_variable (set, dstp, dst,
2925 VAR_INIT_STATUS_UNKNOWN);
2926 dst = (variable *)*dstp;
2927 }
2928 }
2929
2930 src_l = 0;
2931 for (node = src->var_part[i].loc_chain; node; node = node->next)
2932 src_l++;
2933 dst_l = 0;
2934 for (node = dst->var_part[j].loc_chain; node; node = node->next)
2935 dst_l++;
2936
2937 if (dst_l == 1)
2938 {
2939 /* The most common case, much simpler, no qsort is needed. */
2940 location_chain *dstnode = dst->var_part[j].loc_chain;
2941 dst->var_part[k].loc_chain = dstnode;
2942 VAR_PART_OFFSET (dst, k) = VAR_PART_OFFSET (dst, j);
2943 node2 = dstnode;
2944 for (node = src->var_part[i].loc_chain; node; node = node->next)
2945 if (!((REG_P (dstnode->loc)
2946 && REG_P (node->loc)
2947 && REGNO (dstnode->loc) == REGNO (node->loc))
2948 || rtx_equal_p (dstnode->loc, node->loc)))
2949 {
2950 location_chain *new_node;
2951
2952 /* Copy the location from SRC. */
2953 new_node = new location_chain;
2954 new_node->loc = node->loc;
2955 new_node->init = node->init;
2956 if (!node->set_src || MEM_P (node->set_src))
2957 new_node->set_src = NULL;
2958 else
2959 new_node->set_src = node->set_src;
2960 node2->next = new_node;
2961 node2 = new_node;
2962 }
2963 node2->next = NULL;
2964 }
2965 else
2966 {
2967 if (src_l + dst_l > vui_allocated)
2968 {
2969 vui_allocated = MAX (vui_allocated * 2, src_l + dst_l);
2970 vui_vec = XRESIZEVEC (struct variable_union_info, vui_vec,
2971 vui_allocated);
2972 }
2973 vui = vui_vec;
2974
2975 /* Fill in the locations from DST. */
2976 for (node = dst->var_part[j].loc_chain, jj = 0; node;
2977 node = node->next, jj++)
2978 {
2979 vui[jj].lc = node;
2980 vui[jj].pos_dst = jj;
2981
2982 /* Pos plus value larger than a sum of 2 valid positions. */
2983 vui[jj].pos = jj + src_l + dst_l;
2984 }
2985
2986 /* Fill in the locations from SRC. */
2987 n = dst_l;
2988 for (node = src->var_part[i].loc_chain, ii = 0; node;
2989 node = node->next, ii++)
2990 {
2991 /* Find location from NODE. */
2992 for (jj = 0; jj < dst_l; jj++)
2993 {
2994 if ((REG_P (vui[jj].lc->loc)
2995 && REG_P (node->loc)
2996 && REGNO (vui[jj].lc->loc) == REGNO (node->loc))
2997 || rtx_equal_p (vui[jj].lc->loc, node->loc))
2998 {
2999 vui[jj].pos = jj + ii;
3000 break;
3001 }
3002 }
3003 if (jj >= dst_l) /* The location has not been found. */
3004 {
3005 location_chain *new_node;
3006
3007 /* Copy the location from SRC. */
3008 new_node = new location_chain;
3009 new_node->loc = node->loc;
3010 new_node->init = node->init;
3011 if (!node->set_src || MEM_P (node->set_src))
3012 new_node->set_src = NULL;
3013 else
3014 new_node->set_src = node->set_src;
3015 vui[n].lc = new_node;
3016 vui[n].pos_dst = src_l + dst_l;
3017 vui[n].pos = ii + src_l + dst_l;
3018 n++;
3019 }
3020 }
3021
3022 if (dst_l == 2)
3023 {
3024 /* Special case still very common case. For dst_l == 2
3025 all entries dst_l ... n-1 are sorted, with for i >= dst_l
3026 vui[i].pos == i + src_l + dst_l. */
3027 if (vui[0].pos > vui[1].pos)
3028 {
3029 /* Order should be 1, 0, 2... */
3030 dst->var_part[k].loc_chain = vui[1].lc;
3031 vui[1].lc->next = vui[0].lc;
3032 if (n >= 3)
3033 {
3034 vui[0].lc->next = vui[2].lc;
3035 vui[n - 1].lc->next = NULL;
3036 }
3037 else
3038 vui[0].lc->next = NULL;
3039 ii = 3;
3040 }
3041 else
3042 {
3043 dst->var_part[k].loc_chain = vui[0].lc;
3044 if (n >= 3 && vui[2].pos < vui[1].pos)
3045 {
3046 /* Order should be 0, 2, 1, 3... */
3047 vui[0].lc->next = vui[2].lc;
3048 vui[2].lc->next = vui[1].lc;
3049 if (n >= 4)
3050 {
3051 vui[1].lc->next = vui[3].lc;
3052 vui[n - 1].lc->next = NULL;
3053 }
3054 else
3055 vui[1].lc->next = NULL;
3056 ii = 4;
3057 }
3058 else
3059 {
3060 /* Order should be 0, 1, 2... */
3061 ii = 1;
3062 vui[n - 1].lc->next = NULL;
3063 }
3064 }
3065 for (; ii < n; ii++)
3066 vui[ii - 1].lc->next = vui[ii].lc;
3067 }
3068 else
3069 {
3070 qsort (vui, n, sizeof (struct variable_union_info),
3071 variable_union_info_cmp_pos);
3072
3073 /* Reconnect the nodes in sorted order. */
3074 for (ii = 1; ii < n; ii++)
3075 vui[ii - 1].lc->next = vui[ii].lc;
3076 vui[n - 1].lc->next = NULL;
3077 dst->var_part[k].loc_chain = vui[0].lc;
3078 }
3079
3080 VAR_PART_OFFSET (dst, k) = VAR_PART_OFFSET (dst, j);
3081 }
3082 i--;
3083 j--;
3084 }
3085 else if ((i >= 0 && j >= 0
3086 && VAR_PART_OFFSET (src, i) < VAR_PART_OFFSET (dst, j))
3087 || i < 0)
3088 {
3089 dst->var_part[k] = dst->var_part[j];
3090 j--;
3091 }
3092 else if ((i >= 0 && j >= 0
3093 && VAR_PART_OFFSET (src, i) > VAR_PART_OFFSET (dst, j))
3094 || j < 0)
3095 {
3096 location_chain **nextp;
3097
3098 /* Copy the chain from SRC. */
3099 nextp = &dst->var_part[k].loc_chain;
3100 for (node = src->var_part[i].loc_chain; node; node = node->next)
3101 {
3102 location_chain *new_lc;
3103
3104 new_lc = new location_chain;
3105 new_lc->next = NULL;
3106 new_lc->init = node->init;
3107 if (!node->set_src || MEM_P (node->set_src))
3108 new_lc->set_src = NULL;
3109 else
3110 new_lc->set_src = node->set_src;
3111 new_lc->loc = node->loc;
3112
3113 *nextp = new_lc;
3114 nextp = &new_lc->next;
3115 }
3116
3117 VAR_PART_OFFSET (dst, k) = VAR_PART_OFFSET (src, i);
3118 i--;
3119 }
3120 dst->var_part[k].cur_loc = NULL;
3121 }
3122
3123 if (flag_var_tracking_uninit)
3124 for (i = 0; i < src->n_var_parts && i < dst->n_var_parts; i++)
3125 {
3126 location_chain *node, *node2;
3127 for (node = src->var_part[i].loc_chain; node; node = node->next)
3128 for (node2 = dst->var_part[i].loc_chain; node2; node2 = node2->next)
3129 if (rtx_equal_p (node->loc, node2->loc))
3130 {
3131 if (node->init > node2->init)
3132 node2->init = node->init;
3133 }
3134 }
3135
3136 /* Continue traversing the hash table. */
3137 return 1;
3138 }
3139
3140 /* Compute union of dataflow sets SRC and DST and store it to DST. */
3141
3142 static void
dataflow_set_union(dataflow_set * dst,dataflow_set * src)3143 dataflow_set_union (dataflow_set *dst, dataflow_set *src)
3144 {
3145 int i;
3146
3147 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3148 attrs_list_union (&dst->regs[i], src->regs[i]);
3149
3150 if (dst->vars == empty_shared_hash)
3151 {
3152 shared_hash_destroy (dst->vars);
3153 dst->vars = shared_hash_copy (src->vars);
3154 }
3155 else
3156 {
3157 variable_iterator_type hi;
3158 variable *var;
3159
3160 FOR_EACH_HASH_TABLE_ELEMENT (*shared_hash_htab (src->vars),
3161 var, variable, hi)
3162 variable_union (var, dst);
3163 }
3164 }
3165
3166 /* Whether the value is currently being expanded. */
3167 #define VALUE_RECURSED_INTO(x) \
3168 (RTL_FLAG_CHECK2 ("VALUE_RECURSED_INTO", (x), VALUE, DEBUG_EXPR)->used)
3169
3170 /* Whether no expansion was found, saving useless lookups.
3171 It must only be set when VALUE_CHANGED is clear. */
3172 #define NO_LOC_P(x) \
3173 (RTL_FLAG_CHECK2 ("NO_LOC_P", (x), VALUE, DEBUG_EXPR)->return_val)
3174
3175 /* Whether cur_loc in the value needs to be (re)computed. */
3176 #define VALUE_CHANGED(x) \
3177 (RTL_FLAG_CHECK1 ("VALUE_CHANGED", (x), VALUE)->frame_related)
3178 /* Whether cur_loc in the decl needs to be (re)computed. */
3179 #define DECL_CHANGED(x) TREE_VISITED (x)
3180
3181 /* Record (if NEWV) that DV needs to have its cur_loc recomputed. For
3182 user DECLs, this means they're in changed_variables. Values and
3183 debug exprs may be left with this flag set if no user variable
3184 requires them to be evaluated. */
3185
3186 static inline void
set_dv_changed(decl_or_value dv,bool newv)3187 set_dv_changed (decl_or_value dv, bool newv)
3188 {
3189 switch (dv_onepart_p (dv))
3190 {
3191 case ONEPART_VALUE:
3192 if (newv)
3193 NO_LOC_P (dv_as_value (dv)) = false;
3194 VALUE_CHANGED (dv_as_value (dv)) = newv;
3195 break;
3196
3197 case ONEPART_DEXPR:
3198 if (newv)
3199 NO_LOC_P (DECL_RTL_KNOWN_SET (dv_as_decl (dv))) = false;
3200 /* Fall through. */
3201
3202 default:
3203 DECL_CHANGED (dv_as_decl (dv)) = newv;
3204 break;
3205 }
3206 }
3207
3208 /* Return true if DV needs to have its cur_loc recomputed. */
3209
3210 static inline bool
dv_changed_p(decl_or_value dv)3211 dv_changed_p (decl_or_value dv)
3212 {
3213 return (dv_is_value_p (dv)
3214 ? VALUE_CHANGED (dv_as_value (dv))
3215 : DECL_CHANGED (dv_as_decl (dv)));
3216 }
3217
3218 /* Return a location list node whose loc is rtx_equal to LOC, in the
3219 location list of a one-part variable or value VAR, or in that of
3220 any values recursively mentioned in the location lists. VARS must
3221 be in star-canonical form. */
3222
3223 static location_chain *
find_loc_in_1pdv(rtx loc,variable * var,variable_table_type * vars)3224 find_loc_in_1pdv (rtx loc, variable *var, variable_table_type *vars)
3225 {
3226 location_chain *node;
3227 enum rtx_code loc_code;
3228
3229 if (!var)
3230 return NULL;
3231
3232 gcc_checking_assert (var->onepart);
3233
3234 if (!var->n_var_parts)
3235 return NULL;
3236
3237 gcc_checking_assert (loc != dv_as_opaque (var->dv));
3238
3239 loc_code = GET_CODE (loc);
3240 for (node = var->var_part[0].loc_chain; node; node = node->next)
3241 {
3242 decl_or_value dv;
3243 variable *rvar;
3244
3245 if (GET_CODE (node->loc) != loc_code)
3246 {
3247 if (GET_CODE (node->loc) != VALUE)
3248 continue;
3249 }
3250 else if (loc == node->loc)
3251 return node;
3252 else if (loc_code != VALUE)
3253 {
3254 if (rtx_equal_p (loc, node->loc))
3255 return node;
3256 continue;
3257 }
3258
3259 /* Since we're in star-canonical form, we don't need to visit
3260 non-canonical nodes: one-part variables and non-canonical
3261 values would only point back to the canonical node. */
3262 if (dv_is_value_p (var->dv)
3263 && !canon_value_cmp (node->loc, dv_as_value (var->dv)))
3264 {
3265 /* Skip all subsequent VALUEs. */
3266 while (node->next && GET_CODE (node->next->loc) == VALUE)
3267 {
3268 node = node->next;
3269 gcc_checking_assert (!canon_value_cmp (node->loc,
3270 dv_as_value (var->dv)));
3271 if (loc == node->loc)
3272 return node;
3273 }
3274 continue;
3275 }
3276
3277 gcc_checking_assert (node == var->var_part[0].loc_chain);
3278 gcc_checking_assert (!node->next);
3279
3280 dv = dv_from_value (node->loc);
3281 rvar = vars->find_with_hash (dv, dv_htab_hash (dv));
3282 return find_loc_in_1pdv (loc, rvar, vars);
3283 }
3284
3285 /* ??? Gotta look in cselib_val locations too. */
3286
3287 return NULL;
3288 }
3289
3290 /* Hash table iteration argument passed to variable_merge. */
3291 struct dfset_merge
3292 {
3293 /* The set in which the merge is to be inserted. */
3294 dataflow_set *dst;
3295 /* The set that we're iterating in. */
3296 dataflow_set *cur;
3297 /* The set that may contain the other dv we are to merge with. */
3298 dataflow_set *src;
3299 /* Number of onepart dvs in src. */
3300 int src_onepart_cnt;
3301 };
3302
3303 /* Insert LOC in *DNODE, if it's not there yet. The list must be in
3304 loc_cmp order, and it is maintained as such. */
3305
3306 static void
insert_into_intersection(location_chain ** nodep,rtx loc,enum var_init_status status)3307 insert_into_intersection (location_chain **nodep, rtx loc,
3308 enum var_init_status status)
3309 {
3310 location_chain *node;
3311 int r;
3312
3313 for (node = *nodep; node; nodep = &node->next, node = *nodep)
3314 if ((r = loc_cmp (node->loc, loc)) == 0)
3315 {
3316 node->init = MIN (node->init, status);
3317 return;
3318 }
3319 else if (r > 0)
3320 break;
3321
3322 node = new location_chain;
3323
3324 node->loc = loc;
3325 node->set_src = NULL;
3326 node->init = status;
3327 node->next = *nodep;
3328 *nodep = node;
3329 }
3330
3331 /* Insert in DEST the intersection of the locations present in both
3332 S1NODE and S2VAR, directly or indirectly. S1NODE is from a
3333 variable in DSM->cur, whereas S2VAR is from DSM->src. dvar is in
3334 DSM->dst. */
3335
3336 static void
intersect_loc_chains(rtx val,location_chain ** dest,struct dfset_merge * dsm,location_chain * s1node,variable * s2var)3337 intersect_loc_chains (rtx val, location_chain **dest, struct dfset_merge *dsm,
3338 location_chain *s1node, variable *s2var)
3339 {
3340 dataflow_set *s1set = dsm->cur;
3341 dataflow_set *s2set = dsm->src;
3342 location_chain *found;
3343
3344 if (s2var)
3345 {
3346 location_chain *s2node;
3347
3348 gcc_checking_assert (s2var->onepart);
3349
3350 if (s2var->n_var_parts)
3351 {
3352 s2node = s2var->var_part[0].loc_chain;
3353
3354 for (; s1node && s2node;
3355 s1node = s1node->next, s2node = s2node->next)
3356 if (s1node->loc != s2node->loc)
3357 break;
3358 else if (s1node->loc == val)
3359 continue;
3360 else
3361 insert_into_intersection (dest, s1node->loc,
3362 MIN (s1node->init, s2node->init));
3363 }
3364 }
3365
3366 for (; s1node; s1node = s1node->next)
3367 {
3368 if (s1node->loc == val)
3369 continue;
3370
3371 if ((found = find_loc_in_1pdv (s1node->loc, s2var,
3372 shared_hash_htab (s2set->vars))))
3373 {
3374 insert_into_intersection (dest, s1node->loc,
3375 MIN (s1node->init, found->init));
3376 continue;
3377 }
3378
3379 if (GET_CODE (s1node->loc) == VALUE
3380 && !VALUE_RECURSED_INTO (s1node->loc))
3381 {
3382 decl_or_value dv = dv_from_value (s1node->loc);
3383 variable *svar = shared_hash_find (s1set->vars, dv);
3384 if (svar)
3385 {
3386 if (svar->n_var_parts == 1)
3387 {
3388 VALUE_RECURSED_INTO (s1node->loc) = true;
3389 intersect_loc_chains (val, dest, dsm,
3390 svar->var_part[0].loc_chain,
3391 s2var);
3392 VALUE_RECURSED_INTO (s1node->loc) = false;
3393 }
3394 }
3395 }
3396
3397 /* ??? gotta look in cselib_val locations too. */
3398
3399 /* ??? if the location is equivalent to any location in src,
3400 searched recursively
3401
3402 add to dst the values needed to represent the equivalence
3403
3404 telling whether locations S is equivalent to another dv's
3405 location list:
3406
3407 for each location D in the list
3408
3409 if S and D satisfy rtx_equal_p, then it is present
3410
3411 else if D is a value, recurse without cycles
3412
3413 else if S and D have the same CODE and MODE
3414
3415 for each operand oS and the corresponding oD
3416
3417 if oS and oD are not equivalent, then S an D are not equivalent
3418
3419 else if they are RTX vectors
3420
3421 if any vector oS element is not equivalent to its respective oD,
3422 then S and D are not equivalent
3423
3424 */
3425
3426
3427 }
3428 }
3429
3430 /* Return -1 if X should be before Y in a location list for a 1-part
3431 variable, 1 if Y should be before X, and 0 if they're equivalent
3432 and should not appear in the list. */
3433
3434 static int
loc_cmp(rtx x,rtx y)3435 loc_cmp (rtx x, rtx y)
3436 {
3437 int i, j, r;
3438 RTX_CODE code = GET_CODE (x);
3439 const char *fmt;
3440
3441 if (x == y)
3442 return 0;
3443
3444 if (REG_P (x))
3445 {
3446 if (!REG_P (y))
3447 return -1;
3448 gcc_assert (GET_MODE (x) == GET_MODE (y));
3449 if (REGNO (x) == REGNO (y))
3450 return 0;
3451 else if (REGNO (x) < REGNO (y))
3452 return -1;
3453 else
3454 return 1;
3455 }
3456
3457 if (REG_P (y))
3458 return 1;
3459
3460 if (MEM_P (x))
3461 {
3462 if (!MEM_P (y))
3463 return -1;
3464 gcc_assert (GET_MODE (x) == GET_MODE (y));
3465 return loc_cmp (XEXP (x, 0), XEXP (y, 0));
3466 }
3467
3468 if (MEM_P (y))
3469 return 1;
3470
3471 if (GET_CODE (x) == VALUE)
3472 {
3473 if (GET_CODE (y) != VALUE)
3474 return -1;
3475 /* Don't assert the modes are the same, that is true only
3476 when not recursing. (subreg:QI (value:SI 1:1) 0)
3477 and (subreg:QI (value:DI 2:2) 0) can be compared,
3478 even when the modes are different. */
3479 if (canon_value_cmp (x, y))
3480 return -1;
3481 else
3482 return 1;
3483 }
3484
3485 if (GET_CODE (y) == VALUE)
3486 return 1;
3487
3488 /* Entry value is the least preferable kind of expression. */
3489 if (GET_CODE (x) == ENTRY_VALUE)
3490 {
3491 if (GET_CODE (y) != ENTRY_VALUE)
3492 return 1;
3493 gcc_assert (GET_MODE (x) == GET_MODE (y));
3494 return loc_cmp (ENTRY_VALUE_EXP (x), ENTRY_VALUE_EXP (y));
3495 }
3496
3497 if (GET_CODE (y) == ENTRY_VALUE)
3498 return -1;
3499
3500 if (GET_CODE (x) == GET_CODE (y))
3501 /* Compare operands below. */;
3502 else if (GET_CODE (x) < GET_CODE (y))
3503 return -1;
3504 else
3505 return 1;
3506
3507 gcc_assert (GET_MODE (x) == GET_MODE (y));
3508
3509 if (GET_CODE (x) == DEBUG_EXPR)
3510 {
3511 if (DEBUG_TEMP_UID (DEBUG_EXPR_TREE_DECL (x))
3512 < DEBUG_TEMP_UID (DEBUG_EXPR_TREE_DECL (y)))
3513 return -1;
3514 gcc_checking_assert (DEBUG_TEMP_UID (DEBUG_EXPR_TREE_DECL (x))
3515 > DEBUG_TEMP_UID (DEBUG_EXPR_TREE_DECL (y)));
3516 return 1;
3517 }
3518
3519 fmt = GET_RTX_FORMAT (code);
3520 for (i = 0; i < GET_RTX_LENGTH (code); i++)
3521 switch (fmt[i])
3522 {
3523 case 'w':
3524 if (XWINT (x, i) == XWINT (y, i))
3525 break;
3526 else if (XWINT (x, i) < XWINT (y, i))
3527 return -1;
3528 else
3529 return 1;
3530
3531 case 'n':
3532 case 'i':
3533 if (XINT (x, i) == XINT (y, i))
3534 break;
3535 else if (XINT (x, i) < XINT (y, i))
3536 return -1;
3537 else
3538 return 1;
3539
3540 case 'p':
3541 r = compare_sizes_for_sort (SUBREG_BYTE (x), SUBREG_BYTE (y));
3542 if (r != 0)
3543 return r;
3544 break;
3545
3546 case 'V':
3547 case 'E':
3548 /* Compare the vector length first. */
3549 if (XVECLEN (x, i) == XVECLEN (y, i))
3550 /* Compare the vectors elements. */;
3551 else if (XVECLEN (x, i) < XVECLEN (y, i))
3552 return -1;
3553 else
3554 return 1;
3555
3556 for (j = 0; j < XVECLEN (x, i); j++)
3557 if ((r = loc_cmp (XVECEXP (x, i, j),
3558 XVECEXP (y, i, j))))
3559 return r;
3560 break;
3561
3562 case 'e':
3563 if ((r = loc_cmp (XEXP (x, i), XEXP (y, i))))
3564 return r;
3565 break;
3566
3567 case 'S':
3568 case 's':
3569 if (XSTR (x, i) == XSTR (y, i))
3570 break;
3571 if (!XSTR (x, i))
3572 return -1;
3573 if (!XSTR (y, i))
3574 return 1;
3575 if ((r = strcmp (XSTR (x, i), XSTR (y, i))) == 0)
3576 break;
3577 else if (r < 0)
3578 return -1;
3579 else
3580 return 1;
3581
3582 case 'u':
3583 /* These are just backpointers, so they don't matter. */
3584 break;
3585
3586 case '0':
3587 case 't':
3588 break;
3589
3590 /* It is believed that rtx's at this level will never
3591 contain anything but integers and other rtx's,
3592 except for within LABEL_REFs and SYMBOL_REFs. */
3593 default:
3594 gcc_unreachable ();
3595 }
3596 if (CONST_WIDE_INT_P (x))
3597 {
3598 /* Compare the vector length first. */
3599 if (CONST_WIDE_INT_NUNITS (x) >= CONST_WIDE_INT_NUNITS (y))
3600 return 1;
3601 else if (CONST_WIDE_INT_NUNITS (x) < CONST_WIDE_INT_NUNITS (y))
3602 return -1;
3603
3604 /* Compare the vectors elements. */;
3605 for (j = CONST_WIDE_INT_NUNITS (x) - 1; j >= 0 ; j--)
3606 {
3607 if (CONST_WIDE_INT_ELT (x, j) < CONST_WIDE_INT_ELT (y, j))
3608 return -1;
3609 if (CONST_WIDE_INT_ELT (x, j) > CONST_WIDE_INT_ELT (y, j))
3610 return 1;
3611 }
3612 }
3613
3614 return 0;
3615 }
3616
3617 /* Check the order of entries in one-part variables. */
3618
3619 int
canonicalize_loc_order_check(variable ** slot,dataflow_set * data ATTRIBUTE_UNUSED)3620 canonicalize_loc_order_check (variable **slot,
3621 dataflow_set *data ATTRIBUTE_UNUSED)
3622 {
3623 variable *var = *slot;
3624 location_chain *node, *next;
3625
3626 #ifdef ENABLE_RTL_CHECKING
3627 int i;
3628 for (i = 0; i < var->n_var_parts; i++)
3629 gcc_assert (var->var_part[0].cur_loc == NULL);
3630 gcc_assert (!var->in_changed_variables);
3631 #endif
3632
3633 if (!var->onepart)
3634 return 1;
3635
3636 gcc_assert (var->n_var_parts == 1);
3637 node = var->var_part[0].loc_chain;
3638 gcc_assert (node);
3639
3640 while ((next = node->next))
3641 {
3642 gcc_assert (loc_cmp (node->loc, next->loc) < 0);
3643 node = next;
3644 }
3645
3646 return 1;
3647 }
3648
3649 /* Mark with VALUE_RECURSED_INTO values that have neighbors that are
3650 more likely to be chosen as canonical for an equivalence set.
3651 Ensure less likely values can reach more likely neighbors, making
3652 the connections bidirectional. */
3653
3654 int
canonicalize_values_mark(variable ** slot,dataflow_set * set)3655 canonicalize_values_mark (variable **slot, dataflow_set *set)
3656 {
3657 variable *var = *slot;
3658 decl_or_value dv = var->dv;
3659 rtx val;
3660 location_chain *node;
3661
3662 if (!dv_is_value_p (dv))
3663 return 1;
3664
3665 gcc_checking_assert (var->n_var_parts == 1);
3666
3667 val = dv_as_value (dv);
3668
3669 for (node = var->var_part[0].loc_chain; node; node = node->next)
3670 if (GET_CODE (node->loc) == VALUE)
3671 {
3672 if (canon_value_cmp (node->loc, val))
3673 VALUE_RECURSED_INTO (val) = true;
3674 else
3675 {
3676 decl_or_value odv = dv_from_value (node->loc);
3677 variable **oslot;
3678 oslot = shared_hash_find_slot_noinsert (set->vars, odv);
3679
3680 set_slot_part (set, val, oslot, odv, 0,
3681 node->init, NULL_RTX);
3682
3683 VALUE_RECURSED_INTO (node->loc) = true;
3684 }
3685 }
3686
3687 return 1;
3688 }
3689
3690 /* Remove redundant entries from equivalence lists in onepart
3691 variables, canonicalizing equivalence sets into star shapes. */
3692
3693 int
canonicalize_values_star(variable ** slot,dataflow_set * set)3694 canonicalize_values_star (variable **slot, dataflow_set *set)
3695 {
3696 variable *var = *slot;
3697 decl_or_value dv = var->dv;
3698 location_chain *node;
3699 decl_or_value cdv;
3700 rtx val, cval;
3701 variable **cslot;
3702 bool has_value;
3703 bool has_marks;
3704
3705 if (!var->onepart)
3706 return 1;
3707
3708 gcc_checking_assert (var->n_var_parts == 1);
3709
3710 if (dv_is_value_p (dv))
3711 {
3712 cval = dv_as_value (dv);
3713 if (!VALUE_RECURSED_INTO (cval))
3714 return 1;
3715 VALUE_RECURSED_INTO (cval) = false;
3716 }
3717 else
3718 cval = NULL_RTX;
3719
3720 restart:
3721 val = cval;
3722 has_value = false;
3723 has_marks = false;
3724
3725 gcc_assert (var->n_var_parts == 1);
3726
3727 for (node = var->var_part[0].loc_chain; node; node = node->next)
3728 if (GET_CODE (node->loc) == VALUE)
3729 {
3730 has_value = true;
3731 if (VALUE_RECURSED_INTO (node->loc))
3732 has_marks = true;
3733 if (canon_value_cmp (node->loc, cval))
3734 cval = node->loc;
3735 }
3736
3737 if (!has_value)
3738 return 1;
3739
3740 if (cval == val)
3741 {
3742 if (!has_marks || dv_is_decl_p (dv))
3743 return 1;
3744
3745 /* Keep it marked so that we revisit it, either after visiting a
3746 child node, or after visiting a new parent that might be
3747 found out. */
3748 VALUE_RECURSED_INTO (val) = true;
3749
3750 for (node = var->var_part[0].loc_chain; node; node = node->next)
3751 if (GET_CODE (node->loc) == VALUE
3752 && VALUE_RECURSED_INTO (node->loc))
3753 {
3754 cval = node->loc;
3755 restart_with_cval:
3756 VALUE_RECURSED_INTO (cval) = false;
3757 dv = dv_from_value (cval);
3758 slot = shared_hash_find_slot_noinsert (set->vars, dv);
3759 if (!slot)
3760 {
3761 gcc_assert (dv_is_decl_p (var->dv));
3762 /* The canonical value was reset and dropped.
3763 Remove it. */
3764 clobber_variable_part (set, NULL, var->dv, 0, NULL);
3765 return 1;
3766 }
3767 var = *slot;
3768 gcc_assert (dv_is_value_p (var->dv));
3769 if (var->n_var_parts == 0)
3770 return 1;
3771 gcc_assert (var->n_var_parts == 1);
3772 goto restart;
3773 }
3774
3775 VALUE_RECURSED_INTO (val) = false;
3776
3777 return 1;
3778 }
3779
3780 /* Push values to the canonical one. */
3781 cdv = dv_from_value (cval);
3782 cslot = shared_hash_find_slot_noinsert (set->vars, cdv);
3783
3784 for (node = var->var_part[0].loc_chain; node; node = node->next)
3785 if (node->loc != cval)
3786 {
3787 cslot = set_slot_part (set, node->loc, cslot, cdv, 0,
3788 node->init, NULL_RTX);
3789 if (GET_CODE (node->loc) == VALUE)
3790 {
3791 decl_or_value ndv = dv_from_value (node->loc);
3792
3793 set_variable_part (set, cval, ndv, 0, node->init, NULL_RTX,
3794 NO_INSERT);
3795
3796 if (canon_value_cmp (node->loc, val))
3797 {
3798 /* If it could have been a local minimum, it's not any more,
3799 since it's now neighbor to cval, so it may have to push
3800 to it. Conversely, if it wouldn't have prevailed over
3801 val, then whatever mark it has is fine: if it was to
3802 push, it will now push to a more canonical node, but if
3803 it wasn't, then it has already pushed any values it might
3804 have to. */
3805 VALUE_RECURSED_INTO (node->loc) = true;
3806 /* Make sure we visit node->loc by ensuring we cval is
3807 visited too. */
3808 VALUE_RECURSED_INTO (cval) = true;
3809 }
3810 else if (!VALUE_RECURSED_INTO (node->loc))
3811 /* If we have no need to "recurse" into this node, it's
3812 already "canonicalized", so drop the link to the old
3813 parent. */
3814 clobber_variable_part (set, cval, ndv, 0, NULL);
3815 }
3816 else if (GET_CODE (node->loc) == REG)
3817 {
3818 attrs *list = set->regs[REGNO (node->loc)], **listp;
3819
3820 /* Change an existing attribute referring to dv so that it
3821 refers to cdv, removing any duplicate this might
3822 introduce, and checking that no previous duplicates
3823 existed, all in a single pass. */
3824
3825 while (list)
3826 {
3827 if (list->offset == 0
3828 && (dv_as_opaque (list->dv) == dv_as_opaque (dv)
3829 || dv_as_opaque (list->dv) == dv_as_opaque (cdv)))
3830 break;
3831
3832 list = list->next;
3833 }
3834
3835 gcc_assert (list);
3836 if (dv_as_opaque (list->dv) == dv_as_opaque (dv))
3837 {
3838 list->dv = cdv;
3839 for (listp = &list->next; (list = *listp); listp = &list->next)
3840 {
3841 if (list->offset)
3842 continue;
3843
3844 if (dv_as_opaque (list->dv) == dv_as_opaque (cdv))
3845 {
3846 *listp = list->next;
3847 delete list;
3848 list = *listp;
3849 break;
3850 }
3851
3852 gcc_assert (dv_as_opaque (list->dv) != dv_as_opaque (dv));
3853 }
3854 }
3855 else if (dv_as_opaque (list->dv) == dv_as_opaque (cdv))
3856 {
3857 for (listp = &list->next; (list = *listp); listp = &list->next)
3858 {
3859 if (list->offset)
3860 continue;
3861
3862 if (dv_as_opaque (list->dv) == dv_as_opaque (dv))
3863 {
3864 *listp = list->next;
3865 delete list;
3866 list = *listp;
3867 break;
3868 }
3869
3870 gcc_assert (dv_as_opaque (list->dv) != dv_as_opaque (cdv));
3871 }
3872 }
3873 else
3874 gcc_unreachable ();
3875
3876 if (flag_checking)
3877 while (list)
3878 {
3879 if (list->offset == 0
3880 && (dv_as_opaque (list->dv) == dv_as_opaque (dv)
3881 || dv_as_opaque (list->dv) == dv_as_opaque (cdv)))
3882 gcc_unreachable ();
3883
3884 list = list->next;
3885 }
3886 }
3887 }
3888
3889 if (val)
3890 set_slot_part (set, val, cslot, cdv, 0,
3891 VAR_INIT_STATUS_INITIALIZED, NULL_RTX);
3892
3893 slot = clobber_slot_part (set, cval, slot, 0, NULL);
3894
3895 /* Variable may have been unshared. */
3896 var = *slot;
3897 gcc_checking_assert (var->n_var_parts && var->var_part[0].loc_chain->loc == cval
3898 && var->var_part[0].loc_chain->next == NULL);
3899
3900 if (VALUE_RECURSED_INTO (cval))
3901 goto restart_with_cval;
3902
3903 return 1;
3904 }
3905
3906 /* Bind one-part variables to the canonical value in an equivalence
3907 set. Not doing this causes dataflow convergence failure in rare
3908 circumstances, see PR42873. Unfortunately we can't do this
3909 efficiently as part of canonicalize_values_star, since we may not
3910 have determined or even seen the canonical value of a set when we
3911 get to a variable that references another member of the set. */
3912
3913 int
canonicalize_vars_star(variable ** slot,dataflow_set * set)3914 canonicalize_vars_star (variable **slot, dataflow_set *set)
3915 {
3916 variable *var = *slot;
3917 decl_or_value dv = var->dv;
3918 location_chain *node;
3919 rtx cval;
3920 decl_or_value cdv;
3921 variable **cslot;
3922 variable *cvar;
3923 location_chain *cnode;
3924
3925 if (!var->onepart || var->onepart == ONEPART_VALUE)
3926 return 1;
3927
3928 gcc_assert (var->n_var_parts == 1);
3929
3930 node = var->var_part[0].loc_chain;
3931
3932 if (GET_CODE (node->loc) != VALUE)
3933 return 1;
3934
3935 gcc_assert (!node->next);
3936 cval = node->loc;
3937
3938 /* Push values to the canonical one. */
3939 cdv = dv_from_value (cval);
3940 cslot = shared_hash_find_slot_noinsert (set->vars, cdv);
3941 if (!cslot)
3942 return 1;
3943 cvar = *cslot;
3944 gcc_assert (cvar->n_var_parts == 1);
3945
3946 cnode = cvar->var_part[0].loc_chain;
3947
3948 /* CVAL is canonical if its value list contains non-VALUEs or VALUEs
3949 that are not “more canonical” than it. */
3950 if (GET_CODE (cnode->loc) != VALUE
3951 || !canon_value_cmp (cnode->loc, cval))
3952 return 1;
3953
3954 /* CVAL was found to be non-canonical. Change the variable to point
3955 to the canonical VALUE. */
3956 gcc_assert (!cnode->next);
3957 cval = cnode->loc;
3958
3959 slot = set_slot_part (set, cval, slot, dv, 0,
3960 node->init, node->set_src);
3961 clobber_slot_part (set, cval, slot, 0, node->set_src);
3962
3963 return 1;
3964 }
3965
3966 /* Combine variable or value in *S1SLOT (in DSM->cur) with the
3967 corresponding entry in DSM->src. Multi-part variables are combined
3968 with variable_union, whereas onepart dvs are combined with
3969 intersection. */
3970
3971 static int
variable_merge_over_cur(variable * s1var,struct dfset_merge * dsm)3972 variable_merge_over_cur (variable *s1var, struct dfset_merge *dsm)
3973 {
3974 dataflow_set *dst = dsm->dst;
3975 variable **dstslot;
3976 variable *s2var, *dvar = NULL;
3977 decl_or_value dv = s1var->dv;
3978 onepart_enum onepart = s1var->onepart;
3979 rtx val;
3980 hashval_t dvhash;
3981 location_chain *node, **nodep;
3982
3983 /* If the incoming onepart variable has an empty location list, then
3984 the intersection will be just as empty. For other variables,
3985 it's always union. */
3986 gcc_checking_assert (s1var->n_var_parts
3987 && s1var->var_part[0].loc_chain);
3988
3989 if (!onepart)
3990 return variable_union (s1var, dst);
3991
3992 gcc_checking_assert (s1var->n_var_parts == 1);
3993
3994 dvhash = dv_htab_hash (dv);
3995 if (dv_is_value_p (dv))
3996 val = dv_as_value (dv);
3997 else
3998 val = NULL;
3999
4000 s2var = shared_hash_find_1 (dsm->src->vars, dv, dvhash);
4001 if (!s2var)
4002 {
4003 dst_can_be_shared = false;
4004 return 1;
4005 }
4006
4007 dsm->src_onepart_cnt--;
4008 gcc_assert (s2var->var_part[0].loc_chain
4009 && s2var->onepart == onepart
4010 && s2var->n_var_parts == 1);
4011
4012 dstslot = shared_hash_find_slot_noinsert_1 (dst->vars, dv, dvhash);
4013 if (dstslot)
4014 {
4015 dvar = *dstslot;
4016 gcc_assert (dvar->refcount == 1
4017 && dvar->onepart == onepart
4018 && dvar->n_var_parts == 1);
4019 nodep = &dvar->var_part[0].loc_chain;
4020 }
4021 else
4022 {
4023 nodep = &node;
4024 node = NULL;
4025 }
4026
4027 if (!dstslot && !onepart_variable_different_p (s1var, s2var))
4028 {
4029 dstslot = shared_hash_find_slot_unshare_1 (&dst->vars, dv,
4030 dvhash, INSERT);
4031 *dstslot = dvar = s2var;
4032 dvar->refcount++;
4033 }
4034 else
4035 {
4036 dst_can_be_shared = false;
4037
4038 intersect_loc_chains (val, nodep, dsm,
4039 s1var->var_part[0].loc_chain, s2var);
4040
4041 if (!dstslot)
4042 {
4043 if (node)
4044 {
4045 dvar = onepart_pool_allocate (onepart);
4046 dvar->dv = dv;
4047 dvar->refcount = 1;
4048 dvar->n_var_parts = 1;
4049 dvar->onepart = onepart;
4050 dvar->in_changed_variables = false;
4051 dvar->var_part[0].loc_chain = node;
4052 dvar->var_part[0].cur_loc = NULL;
4053 if (onepart)
4054 VAR_LOC_1PAUX (dvar) = NULL;
4055 else
4056 VAR_PART_OFFSET (dvar, 0) = 0;
4057
4058 dstslot
4059 = shared_hash_find_slot_unshare_1 (&dst->vars, dv, dvhash,
4060 INSERT);
4061 gcc_assert (!*dstslot);
4062 *dstslot = dvar;
4063 }
4064 else
4065 return 1;
4066 }
4067 }
4068
4069 nodep = &dvar->var_part[0].loc_chain;
4070 while ((node = *nodep))
4071 {
4072 location_chain **nextp = &node->next;
4073
4074 if (GET_CODE (node->loc) == REG)
4075 {
4076 attrs *list;
4077
4078 for (list = dst->regs[REGNO (node->loc)]; list; list = list->next)
4079 if (GET_MODE (node->loc) == GET_MODE (list->loc)
4080 && dv_is_value_p (list->dv))
4081 break;
4082
4083 if (!list)
4084 attrs_list_insert (&dst->regs[REGNO (node->loc)],
4085 dv, 0, node->loc);
4086 /* If this value became canonical for another value that had
4087 this register, we want to leave it alone. */
4088 else if (dv_as_value (list->dv) != val)
4089 {
4090 dstslot = set_slot_part (dst, dv_as_value (list->dv),
4091 dstslot, dv, 0,
4092 node->init, NULL_RTX);
4093 dstslot = delete_slot_part (dst, node->loc, dstslot, 0);
4094
4095 /* Since nextp points into the removed node, we can't
4096 use it. The pointer to the next node moved to nodep.
4097 However, if the variable we're walking is unshared
4098 during our walk, we'll keep walking the location list
4099 of the previously-shared variable, in which case the
4100 node won't have been removed, and we'll want to skip
4101 it. That's why we test *nodep here. */
4102 if (*nodep != node)
4103 nextp = nodep;
4104 }
4105 }
4106 else
4107 /* Canonicalization puts registers first, so we don't have to
4108 walk it all. */
4109 break;
4110 nodep = nextp;
4111 }
4112
4113 if (dvar != *dstslot)
4114 dvar = *dstslot;
4115 nodep = &dvar->var_part[0].loc_chain;
4116
4117 if (val)
4118 {
4119 /* Mark all referenced nodes for canonicalization, and make sure
4120 we have mutual equivalence links. */
4121 VALUE_RECURSED_INTO (val) = true;
4122 for (node = *nodep; node; node = node->next)
4123 if (GET_CODE (node->loc) == VALUE)
4124 {
4125 VALUE_RECURSED_INTO (node->loc) = true;
4126 set_variable_part (dst, val, dv_from_value (node->loc), 0,
4127 node->init, NULL, INSERT);
4128 }
4129
4130 dstslot = shared_hash_find_slot_noinsert_1 (dst->vars, dv, dvhash);
4131 gcc_assert (*dstslot == dvar);
4132 canonicalize_values_star (dstslot, dst);
4133 gcc_checking_assert (dstslot
4134 == shared_hash_find_slot_noinsert_1 (dst->vars,
4135 dv, dvhash));
4136 dvar = *dstslot;
4137 }
4138 else
4139 {
4140 bool has_value = false, has_other = false;
4141
4142 /* If we have one value and anything else, we're going to
4143 canonicalize this, so make sure all values have an entry in
4144 the table and are marked for canonicalization. */
4145 for (node = *nodep; node; node = node->next)
4146 {
4147 if (GET_CODE (node->loc) == VALUE)
4148 {
4149 /* If this was marked during register canonicalization,
4150 we know we have to canonicalize values. */
4151 if (has_value)
4152 has_other = true;
4153 has_value = true;
4154 if (has_other)
4155 break;
4156 }
4157 else
4158 {
4159 has_other = true;
4160 if (has_value)
4161 break;
4162 }
4163 }
4164
4165 if (has_value && has_other)
4166 {
4167 for (node = *nodep; node; node = node->next)
4168 {
4169 if (GET_CODE (node->loc) == VALUE)
4170 {
4171 decl_or_value dv = dv_from_value (node->loc);
4172 variable **slot = NULL;
4173
4174 if (shared_hash_shared (dst->vars))
4175 slot = shared_hash_find_slot_noinsert (dst->vars, dv);
4176 if (!slot)
4177 slot = shared_hash_find_slot_unshare (&dst->vars, dv,
4178 INSERT);
4179 if (!*slot)
4180 {
4181 variable *var = onepart_pool_allocate (ONEPART_VALUE);
4182 var->dv = dv;
4183 var->refcount = 1;
4184 var->n_var_parts = 1;
4185 var->onepart = ONEPART_VALUE;
4186 var->in_changed_variables = false;
4187 var->var_part[0].loc_chain = NULL;
4188 var->var_part[0].cur_loc = NULL;
4189 VAR_LOC_1PAUX (var) = NULL;
4190 *slot = var;
4191 }
4192
4193 VALUE_RECURSED_INTO (node->loc) = true;
4194 }
4195 }
4196
4197 dstslot = shared_hash_find_slot_noinsert_1 (dst->vars, dv, dvhash);
4198 gcc_assert (*dstslot == dvar);
4199 canonicalize_values_star (dstslot, dst);
4200 gcc_checking_assert (dstslot
4201 == shared_hash_find_slot_noinsert_1 (dst->vars,
4202 dv, dvhash));
4203 dvar = *dstslot;
4204 }
4205 }
4206
4207 if (!onepart_variable_different_p (dvar, s2var))
4208 {
4209 variable_htab_free (dvar);
4210 *dstslot = dvar = s2var;
4211 dvar->refcount++;
4212 }
4213 else if (s2var != s1var && !onepart_variable_different_p (dvar, s1var))
4214 {
4215 variable_htab_free (dvar);
4216 *dstslot = dvar = s1var;
4217 dvar->refcount++;
4218 dst_can_be_shared = false;
4219 }
4220 else
4221 dst_can_be_shared = false;
4222
4223 return 1;
4224 }
4225
4226 /* Copy s2slot (in DSM->src) to DSM->dst if the variable is a
4227 multi-part variable. Unions of multi-part variables and
4228 intersections of one-part ones will be handled in
4229 variable_merge_over_cur(). */
4230
4231 static int
variable_merge_over_src(variable * s2var,struct dfset_merge * dsm)4232 variable_merge_over_src (variable *s2var, struct dfset_merge *dsm)
4233 {
4234 dataflow_set *dst = dsm->dst;
4235 decl_or_value dv = s2var->dv;
4236
4237 if (!s2var->onepart)
4238 {
4239 variable **dstp = shared_hash_find_slot (dst->vars, dv);
4240 *dstp = s2var;
4241 s2var->refcount++;
4242 return 1;
4243 }
4244
4245 dsm->src_onepart_cnt++;
4246 return 1;
4247 }
4248
4249 /* Combine dataflow set information from SRC2 into DST, using PDST
4250 to carry over information across passes. */
4251
4252 static void
dataflow_set_merge(dataflow_set * dst,dataflow_set * src2)4253 dataflow_set_merge (dataflow_set *dst, dataflow_set *src2)
4254 {
4255 dataflow_set cur = *dst;
4256 dataflow_set *src1 = &cur;
4257 struct dfset_merge dsm;
4258 int i;
4259 size_t src1_elems, src2_elems;
4260 variable_iterator_type hi;
4261 variable *var;
4262
4263 src1_elems = shared_hash_htab (src1->vars)->elements ();
4264 src2_elems = shared_hash_htab (src2->vars)->elements ();
4265 dataflow_set_init (dst);
4266 dst->stack_adjust = cur.stack_adjust;
4267 shared_hash_destroy (dst->vars);
4268 dst->vars = new shared_hash;
4269 dst->vars->refcount = 1;
4270 dst->vars->htab = new variable_table_type (MAX (src1_elems, src2_elems));
4271
4272 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4273 attrs_list_mpdv_union (&dst->regs[i], src1->regs[i], src2->regs[i]);
4274
4275 dsm.dst = dst;
4276 dsm.src = src2;
4277 dsm.cur = src1;
4278 dsm.src_onepart_cnt = 0;
4279
4280 FOR_EACH_HASH_TABLE_ELEMENT (*shared_hash_htab (dsm.src->vars),
4281 var, variable, hi)
4282 variable_merge_over_src (var, &dsm);
4283 FOR_EACH_HASH_TABLE_ELEMENT (*shared_hash_htab (dsm.cur->vars),
4284 var, variable, hi)
4285 variable_merge_over_cur (var, &dsm);
4286
4287 if (dsm.src_onepart_cnt)
4288 dst_can_be_shared = false;
4289
4290 dataflow_set_destroy (src1);
4291 }
4292
4293 /* Mark register equivalences. */
4294
4295 static void
dataflow_set_equiv_regs(dataflow_set * set)4296 dataflow_set_equiv_regs (dataflow_set *set)
4297 {
4298 int i;
4299 attrs *list, **listp;
4300
4301 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4302 {
4303 rtx canon[NUM_MACHINE_MODES];
4304
4305 /* If the list is empty or one entry, no need to canonicalize
4306 anything. */
4307 if (set->regs[i] == NULL || set->regs[i]->next == NULL)
4308 continue;
4309
4310 memset (canon, 0, sizeof (canon));
4311
4312 for (list = set->regs[i]; list; list = list->next)
4313 if (list->offset == 0 && dv_is_value_p (list->dv))
4314 {
4315 rtx val = dv_as_value (list->dv);
4316 rtx *cvalp = &canon[(int)GET_MODE (val)];
4317 rtx cval = *cvalp;
4318
4319 if (canon_value_cmp (val, cval))
4320 *cvalp = val;
4321 }
4322
4323 for (list = set->regs[i]; list; list = list->next)
4324 if (list->offset == 0 && dv_onepart_p (list->dv))
4325 {
4326 rtx cval = canon[(int)GET_MODE (list->loc)];
4327
4328 if (!cval)
4329 continue;
4330
4331 if (dv_is_value_p (list->dv))
4332 {
4333 rtx val = dv_as_value (list->dv);
4334
4335 if (val == cval)
4336 continue;
4337
4338 VALUE_RECURSED_INTO (val) = true;
4339 set_variable_part (set, val, dv_from_value (cval), 0,
4340 VAR_INIT_STATUS_INITIALIZED,
4341 NULL, NO_INSERT);
4342 }
4343
4344 VALUE_RECURSED_INTO (cval) = true;
4345 set_variable_part (set, cval, list->dv, 0,
4346 VAR_INIT_STATUS_INITIALIZED, NULL, NO_INSERT);
4347 }
4348
4349 for (listp = &set->regs[i]; (list = *listp);
4350 listp = list ? &list->next : listp)
4351 if (list->offset == 0 && dv_onepart_p (list->dv))
4352 {
4353 rtx cval = canon[(int)GET_MODE (list->loc)];
4354 variable **slot;
4355
4356 if (!cval)
4357 continue;
4358
4359 if (dv_is_value_p (list->dv))
4360 {
4361 rtx val = dv_as_value (list->dv);
4362 if (!VALUE_RECURSED_INTO (val))
4363 continue;
4364 }
4365
4366 slot = shared_hash_find_slot_noinsert (set->vars, list->dv);
4367 canonicalize_values_star (slot, set);
4368 if (*listp != list)
4369 list = NULL;
4370 }
4371 }
4372 }
4373
4374 /* Remove any redundant values in the location list of VAR, which must
4375 be unshared and 1-part. */
4376
4377 static void
remove_duplicate_values(variable * var)4378 remove_duplicate_values (variable *var)
4379 {
4380 location_chain *node, **nodep;
4381
4382 gcc_assert (var->onepart);
4383 gcc_assert (var->n_var_parts == 1);
4384 gcc_assert (var->refcount == 1);
4385
4386 for (nodep = &var->var_part[0].loc_chain; (node = *nodep); )
4387 {
4388 if (GET_CODE (node->loc) == VALUE)
4389 {
4390 if (VALUE_RECURSED_INTO (node->loc))
4391 {
4392 /* Remove duplicate value node. */
4393 *nodep = node->next;
4394 delete node;
4395 continue;
4396 }
4397 else
4398 VALUE_RECURSED_INTO (node->loc) = true;
4399 }
4400 nodep = &node->next;
4401 }
4402
4403 for (node = var->var_part[0].loc_chain; node; node = node->next)
4404 if (GET_CODE (node->loc) == VALUE)
4405 {
4406 gcc_assert (VALUE_RECURSED_INTO (node->loc));
4407 VALUE_RECURSED_INTO (node->loc) = false;
4408 }
4409 }
4410
4411
4412 /* Hash table iteration argument passed to variable_post_merge. */
4413 struct dfset_post_merge
4414 {
4415 /* The new input set for the current block. */
4416 dataflow_set *set;
4417 /* Pointer to the permanent input set for the current block, or
4418 NULL. */
4419 dataflow_set **permp;
4420 };
4421
4422 /* Create values for incoming expressions associated with one-part
4423 variables that don't have value numbers for them. */
4424
4425 int
variable_post_merge_new_vals(variable ** slot,dfset_post_merge * dfpm)4426 variable_post_merge_new_vals (variable **slot, dfset_post_merge *dfpm)
4427 {
4428 dataflow_set *set = dfpm->set;
4429 variable *var = *slot;
4430 location_chain *node;
4431
4432 if (!var->onepart || !var->n_var_parts)
4433 return 1;
4434
4435 gcc_assert (var->n_var_parts == 1);
4436
4437 if (dv_is_decl_p (var->dv))
4438 {
4439 bool check_dupes = false;
4440
4441 restart:
4442 for (node = var->var_part[0].loc_chain; node; node = node->next)
4443 {
4444 if (GET_CODE (node->loc) == VALUE)
4445 gcc_assert (!VALUE_RECURSED_INTO (node->loc));
4446 else if (GET_CODE (node->loc) == REG)
4447 {
4448 attrs *att, **attp, **curp = NULL;
4449
4450 if (var->refcount != 1)
4451 {
4452 slot = unshare_variable (set, slot, var,
4453 VAR_INIT_STATUS_INITIALIZED);
4454 var = *slot;
4455 goto restart;
4456 }
4457
4458 for (attp = &set->regs[REGNO (node->loc)]; (att = *attp);
4459 attp = &att->next)
4460 if (att->offset == 0
4461 && GET_MODE (att->loc) == GET_MODE (node->loc))
4462 {
4463 if (dv_is_value_p (att->dv))
4464 {
4465 rtx cval = dv_as_value (att->dv);
4466 node->loc = cval;
4467 check_dupes = true;
4468 break;
4469 }
4470 else if (dv_as_opaque (att->dv) == dv_as_opaque (var->dv))
4471 curp = attp;
4472 }
4473
4474 if (!curp)
4475 {
4476 curp = attp;
4477 while (*curp)
4478 if ((*curp)->offset == 0
4479 && GET_MODE ((*curp)->loc) == GET_MODE (node->loc)
4480 && dv_as_opaque ((*curp)->dv) == dv_as_opaque (var->dv))
4481 break;
4482 else
4483 curp = &(*curp)->next;
4484 gcc_assert (*curp);
4485 }
4486
4487 if (!att)
4488 {
4489 decl_or_value cdv;
4490 rtx cval;
4491
4492 if (!*dfpm->permp)
4493 {
4494 *dfpm->permp = XNEW (dataflow_set);
4495 dataflow_set_init (*dfpm->permp);
4496 }
4497
4498 for (att = (*dfpm->permp)->regs[REGNO (node->loc)];
4499 att; att = att->next)
4500 if (GET_MODE (att->loc) == GET_MODE (node->loc))
4501 {
4502 gcc_assert (att->offset == 0
4503 && dv_is_value_p (att->dv));
4504 val_reset (set, att->dv);
4505 break;
4506 }
4507
4508 if (att)
4509 {
4510 cdv = att->dv;
4511 cval = dv_as_value (cdv);
4512 }
4513 else
4514 {
4515 /* Create a unique value to hold this register,
4516 that ought to be found and reused in
4517 subsequent rounds. */
4518 cselib_val *v;
4519 gcc_assert (!cselib_lookup (node->loc,
4520 GET_MODE (node->loc), 0,
4521 VOIDmode));
4522 v = cselib_lookup (node->loc, GET_MODE (node->loc), 1,
4523 VOIDmode);
4524 cselib_preserve_value (v);
4525 cselib_invalidate_rtx (node->loc);
4526 cval = v->val_rtx;
4527 cdv = dv_from_value (cval);
4528 if (dump_file)
4529 fprintf (dump_file,
4530 "Created new value %u:%u for reg %i\n",
4531 v->uid, v->hash, REGNO (node->loc));
4532 }
4533
4534 var_reg_decl_set (*dfpm->permp, node->loc,
4535 VAR_INIT_STATUS_INITIALIZED,
4536 cdv, 0, NULL, INSERT);
4537
4538 node->loc = cval;
4539 check_dupes = true;
4540 }
4541
4542 /* Remove attribute referring to the decl, which now
4543 uses the value for the register, already existing or
4544 to be added when we bring perm in. */
4545 att = *curp;
4546 *curp = att->next;
4547 delete att;
4548 }
4549 }
4550
4551 if (check_dupes)
4552 remove_duplicate_values (var);
4553 }
4554
4555 return 1;
4556 }
4557
4558 /* Reset values in the permanent set that are not associated with the
4559 chosen expression. */
4560
4561 int
variable_post_merge_perm_vals(variable ** pslot,dfset_post_merge * dfpm)4562 variable_post_merge_perm_vals (variable **pslot, dfset_post_merge *dfpm)
4563 {
4564 dataflow_set *set = dfpm->set;
4565 variable *pvar = *pslot, *var;
4566 location_chain *pnode;
4567 decl_or_value dv;
4568 attrs *att;
4569
4570 gcc_assert (dv_is_value_p (pvar->dv)
4571 && pvar->n_var_parts == 1);
4572 pnode = pvar->var_part[0].loc_chain;
4573 gcc_assert (pnode
4574 && !pnode->next
4575 && REG_P (pnode->loc));
4576
4577 dv = pvar->dv;
4578
4579 var = shared_hash_find (set->vars, dv);
4580 if (var)
4581 {
4582 /* Although variable_post_merge_new_vals may have made decls
4583 non-star-canonical, values that pre-existed in canonical form
4584 remain canonical, and newly-created values reference a single
4585 REG, so they are canonical as well. Since VAR has the
4586 location list for a VALUE, using find_loc_in_1pdv for it is
4587 fine, since VALUEs don't map back to DECLs. */
4588 if (find_loc_in_1pdv (pnode->loc, var, shared_hash_htab (set->vars)))
4589 return 1;
4590 val_reset (set, dv);
4591 }
4592
4593 for (att = set->regs[REGNO (pnode->loc)]; att; att = att->next)
4594 if (att->offset == 0
4595 && GET_MODE (att->loc) == GET_MODE (pnode->loc)
4596 && dv_is_value_p (att->dv))
4597 break;
4598
4599 /* If there is a value associated with this register already, create
4600 an equivalence. */
4601 if (att && dv_as_value (att->dv) != dv_as_value (dv))
4602 {
4603 rtx cval = dv_as_value (att->dv);
4604 set_variable_part (set, cval, dv, 0, pnode->init, NULL, INSERT);
4605 set_variable_part (set, dv_as_value (dv), att->dv, 0, pnode->init,
4606 NULL, INSERT);
4607 }
4608 else if (!att)
4609 {
4610 attrs_list_insert (&set->regs[REGNO (pnode->loc)],
4611 dv, 0, pnode->loc);
4612 variable_union (pvar, set);
4613 }
4614
4615 return 1;
4616 }
4617
4618 /* Just checking stuff and registering register attributes for
4619 now. */
4620
4621 static void
dataflow_post_merge_adjust(dataflow_set * set,dataflow_set ** permp)4622 dataflow_post_merge_adjust (dataflow_set *set, dataflow_set **permp)
4623 {
4624 struct dfset_post_merge dfpm;
4625
4626 dfpm.set = set;
4627 dfpm.permp = permp;
4628
4629 shared_hash_htab (set->vars)
4630 ->traverse <dfset_post_merge*, variable_post_merge_new_vals> (&dfpm);
4631 if (*permp)
4632 shared_hash_htab ((*permp)->vars)
4633 ->traverse <dfset_post_merge*, variable_post_merge_perm_vals> (&dfpm);
4634 shared_hash_htab (set->vars)
4635 ->traverse <dataflow_set *, canonicalize_values_star> (set);
4636 shared_hash_htab (set->vars)
4637 ->traverse <dataflow_set *, canonicalize_vars_star> (set);
4638 }
4639
4640 /* Return a node whose loc is a MEM that refers to EXPR in the
4641 location list of a one-part variable or value VAR, or in that of
4642 any values recursively mentioned in the location lists. */
4643
4644 static location_chain *
find_mem_expr_in_1pdv(tree expr,rtx val,variable_table_type * vars)4645 find_mem_expr_in_1pdv (tree expr, rtx val, variable_table_type *vars)
4646 {
4647 location_chain *node;
4648 decl_or_value dv;
4649 variable *var;
4650 location_chain *where = NULL;
4651
4652 if (!val)
4653 return NULL;
4654
4655 gcc_assert (GET_CODE (val) == VALUE
4656 && !VALUE_RECURSED_INTO (val));
4657
4658 dv = dv_from_value (val);
4659 var = vars->find_with_hash (dv, dv_htab_hash (dv));
4660
4661 if (!var)
4662 return NULL;
4663
4664 gcc_assert (var->onepart);
4665
4666 if (!var->n_var_parts)
4667 return NULL;
4668
4669 VALUE_RECURSED_INTO (val) = true;
4670
4671 for (node = var->var_part[0].loc_chain; node; node = node->next)
4672 if (MEM_P (node->loc)
4673 && MEM_EXPR (node->loc) == expr
4674 && int_mem_offset (node->loc) == 0)
4675 {
4676 where = node;
4677 break;
4678 }
4679 else if (GET_CODE (node->loc) == VALUE
4680 && !VALUE_RECURSED_INTO (node->loc)
4681 && (where = find_mem_expr_in_1pdv (expr, node->loc, vars)))
4682 break;
4683
4684 VALUE_RECURSED_INTO (val) = false;
4685
4686 return where;
4687 }
4688
4689 /* Return TRUE if the value of MEM may vary across a call. */
4690
4691 static bool
mem_dies_at_call(rtx mem)4692 mem_dies_at_call (rtx mem)
4693 {
4694 tree expr = MEM_EXPR (mem);
4695 tree decl;
4696
4697 if (!expr)
4698 return true;
4699
4700 decl = get_base_address (expr);
4701
4702 if (!decl)
4703 return true;
4704
4705 if (!DECL_P (decl))
4706 return true;
4707
4708 return (may_be_aliased (decl)
4709 || (!TREE_READONLY (decl) && is_global_var (decl)));
4710 }
4711
4712 /* Remove all MEMs from the location list of a hash table entry for a
4713 one-part variable, except those whose MEM attributes map back to
4714 the variable itself, directly or within a VALUE. */
4715
4716 int
dataflow_set_preserve_mem_locs(variable ** slot,dataflow_set * set)4717 dataflow_set_preserve_mem_locs (variable **slot, dataflow_set *set)
4718 {
4719 variable *var = *slot;
4720
4721 if (var->onepart == ONEPART_VDECL || var->onepart == ONEPART_DEXPR)
4722 {
4723 tree decl = dv_as_decl (var->dv);
4724 location_chain *loc, **locp;
4725 bool changed = false;
4726
4727 if (!var->n_var_parts)
4728 return 1;
4729
4730 gcc_assert (var->n_var_parts == 1);
4731
4732 if (shared_var_p (var, set->vars))
4733 {
4734 for (loc = var->var_part[0].loc_chain; loc; loc = loc->next)
4735 {
4736 /* We want to remove dying MEMs that don't refer to DECL. */
4737 if (GET_CODE (loc->loc) == MEM
4738 && (MEM_EXPR (loc->loc) != decl
4739 || int_mem_offset (loc->loc) != 0)
4740 && mem_dies_at_call (loc->loc))
4741 break;
4742 /* We want to move here MEMs that do refer to DECL. */
4743 else if (GET_CODE (loc->loc) == VALUE
4744 && find_mem_expr_in_1pdv (decl, loc->loc,
4745 shared_hash_htab (set->vars)))
4746 break;
4747 }
4748
4749 if (!loc)
4750 return 1;
4751
4752 slot = unshare_variable (set, slot, var, VAR_INIT_STATUS_UNKNOWN);
4753 var = *slot;
4754 gcc_assert (var->n_var_parts == 1);
4755 }
4756
4757 for (locp = &var->var_part[0].loc_chain, loc = *locp;
4758 loc; loc = *locp)
4759 {
4760 rtx old_loc = loc->loc;
4761 if (GET_CODE (old_loc) == VALUE)
4762 {
4763 location_chain *mem_node
4764 = find_mem_expr_in_1pdv (decl, loc->loc,
4765 shared_hash_htab (set->vars));
4766
4767 /* ??? This picks up only one out of multiple MEMs that
4768 refer to the same variable. Do we ever need to be
4769 concerned about dealing with more than one, or, given
4770 that they should all map to the same variable
4771 location, their addresses will have been merged and
4772 they will be regarded as equivalent? */
4773 if (mem_node)
4774 {
4775 loc->loc = mem_node->loc;
4776 loc->set_src = mem_node->set_src;
4777 loc->init = MIN (loc->init, mem_node->init);
4778 }
4779 }
4780
4781 if (GET_CODE (loc->loc) != MEM
4782 || (MEM_EXPR (loc->loc) == decl
4783 && int_mem_offset (loc->loc) == 0)
4784 || !mem_dies_at_call (loc->loc))
4785 {
4786 if (old_loc != loc->loc && emit_notes)
4787 {
4788 if (old_loc == var->var_part[0].cur_loc)
4789 {
4790 changed = true;
4791 var->var_part[0].cur_loc = NULL;
4792 }
4793 }
4794 locp = &loc->next;
4795 continue;
4796 }
4797
4798 if (emit_notes)
4799 {
4800 if (old_loc == var->var_part[0].cur_loc)
4801 {
4802 changed = true;
4803 var->var_part[0].cur_loc = NULL;
4804 }
4805 }
4806 *locp = loc->next;
4807 delete loc;
4808 }
4809
4810 if (!var->var_part[0].loc_chain)
4811 {
4812 var->n_var_parts--;
4813 changed = true;
4814 }
4815 if (changed)
4816 variable_was_changed (var, set);
4817 }
4818
4819 return 1;
4820 }
4821
4822 /* Remove all MEMs from the location list of a hash table entry for a
4823 onepart variable. */
4824
4825 int
dataflow_set_remove_mem_locs(variable ** slot,dataflow_set * set)4826 dataflow_set_remove_mem_locs (variable **slot, dataflow_set *set)
4827 {
4828 variable *var = *slot;
4829
4830 if (var->onepart != NOT_ONEPART)
4831 {
4832 location_chain *loc, **locp;
4833 bool changed = false;
4834 rtx cur_loc;
4835
4836 gcc_assert (var->n_var_parts == 1);
4837
4838 if (shared_var_p (var, set->vars))
4839 {
4840 for (loc = var->var_part[0].loc_chain; loc; loc = loc->next)
4841 if (GET_CODE (loc->loc) == MEM
4842 && mem_dies_at_call (loc->loc))
4843 break;
4844
4845 if (!loc)
4846 return 1;
4847
4848 slot = unshare_variable (set, slot, var, VAR_INIT_STATUS_UNKNOWN);
4849 var = *slot;
4850 gcc_assert (var->n_var_parts == 1);
4851 }
4852
4853 if (VAR_LOC_1PAUX (var))
4854 cur_loc = VAR_LOC_FROM (var);
4855 else
4856 cur_loc = var->var_part[0].cur_loc;
4857
4858 for (locp = &var->var_part[0].loc_chain, loc = *locp;
4859 loc; loc = *locp)
4860 {
4861 if (GET_CODE (loc->loc) != MEM
4862 || !mem_dies_at_call (loc->loc))
4863 {
4864 locp = &loc->next;
4865 continue;
4866 }
4867
4868 *locp = loc->next;
4869 /* If we have deleted the location which was last emitted
4870 we have to emit new location so add the variable to set
4871 of changed variables. */
4872 if (cur_loc == loc->loc)
4873 {
4874 changed = true;
4875 var->var_part[0].cur_loc = NULL;
4876 if (VAR_LOC_1PAUX (var))
4877 VAR_LOC_FROM (var) = NULL;
4878 }
4879 delete loc;
4880 }
4881
4882 if (!var->var_part[0].loc_chain)
4883 {
4884 var->n_var_parts--;
4885 changed = true;
4886 }
4887 if (changed)
4888 variable_was_changed (var, set);
4889 }
4890
4891 return 1;
4892 }
4893
4894 /* Remove all variable-location information about call-clobbered
4895 registers, as well as associations between MEMs and VALUEs. */
4896
4897 static void
dataflow_set_clear_at_call(dataflow_set * set,rtx_insn * call_insn)4898 dataflow_set_clear_at_call (dataflow_set *set, rtx_insn *call_insn)
4899 {
4900 unsigned int r;
4901 hard_reg_set_iterator hrsi;
4902 HARD_REG_SET invalidated_regs;
4903
4904 get_call_reg_set_usage (call_insn, &invalidated_regs,
4905 regs_invalidated_by_call);
4906
4907 EXECUTE_IF_SET_IN_HARD_REG_SET (invalidated_regs, 0, r, hrsi)
4908 var_regno_delete (set, r);
4909
4910 if (MAY_HAVE_DEBUG_BIND_INSNS)
4911 {
4912 set->traversed_vars = set->vars;
4913 shared_hash_htab (set->vars)
4914 ->traverse <dataflow_set *, dataflow_set_preserve_mem_locs> (set);
4915 set->traversed_vars = set->vars;
4916 shared_hash_htab (set->vars)
4917 ->traverse <dataflow_set *, dataflow_set_remove_mem_locs> (set);
4918 set->traversed_vars = NULL;
4919 }
4920 }
4921
4922 static bool
variable_part_different_p(variable_part * vp1,variable_part * vp2)4923 variable_part_different_p (variable_part *vp1, variable_part *vp2)
4924 {
4925 location_chain *lc1, *lc2;
4926
4927 for (lc1 = vp1->loc_chain; lc1; lc1 = lc1->next)
4928 {
4929 for (lc2 = vp2->loc_chain; lc2; lc2 = lc2->next)
4930 {
4931 if (REG_P (lc1->loc) && REG_P (lc2->loc))
4932 {
4933 if (REGNO (lc1->loc) == REGNO (lc2->loc))
4934 break;
4935 }
4936 if (rtx_equal_p (lc1->loc, lc2->loc))
4937 break;
4938 }
4939 if (!lc2)
4940 return true;
4941 }
4942 return false;
4943 }
4944
4945 /* Return true if one-part variables VAR1 and VAR2 are different.
4946 They must be in canonical order. */
4947
4948 static bool
onepart_variable_different_p(variable * var1,variable * var2)4949 onepart_variable_different_p (variable *var1, variable *var2)
4950 {
4951 location_chain *lc1, *lc2;
4952
4953 if (var1 == var2)
4954 return false;
4955
4956 gcc_assert (var1->n_var_parts == 1
4957 && var2->n_var_parts == 1);
4958
4959 lc1 = var1->var_part[0].loc_chain;
4960 lc2 = var2->var_part[0].loc_chain;
4961
4962 gcc_assert (lc1 && lc2);
4963
4964 while (lc1 && lc2)
4965 {
4966 if (loc_cmp (lc1->loc, lc2->loc))
4967 return true;
4968 lc1 = lc1->next;
4969 lc2 = lc2->next;
4970 }
4971
4972 return lc1 != lc2;
4973 }
4974
4975 /* Return true if one-part variables VAR1 and VAR2 are different.
4976 They must be in canonical order. */
4977
4978 static void
dump_onepart_variable_differences(variable * var1,variable * var2)4979 dump_onepart_variable_differences (variable *var1, variable *var2)
4980 {
4981 location_chain *lc1, *lc2;
4982
4983 gcc_assert (var1 != var2);
4984 gcc_assert (dump_file);
4985 gcc_assert (dv_as_opaque (var1->dv) == dv_as_opaque (var2->dv));
4986 gcc_assert (var1->n_var_parts == 1
4987 && var2->n_var_parts == 1);
4988
4989 lc1 = var1->var_part[0].loc_chain;
4990 lc2 = var2->var_part[0].loc_chain;
4991
4992 gcc_assert (lc1 && lc2);
4993
4994 while (lc1 && lc2)
4995 {
4996 switch (loc_cmp (lc1->loc, lc2->loc))
4997 {
4998 case -1:
4999 fprintf (dump_file, "removed: ");
5000 print_rtl_single (dump_file, lc1->loc);
5001 lc1 = lc1->next;
5002 continue;
5003 case 0:
5004 break;
5005 case 1:
5006 fprintf (dump_file, "added: ");
5007 print_rtl_single (dump_file, lc2->loc);
5008 lc2 = lc2->next;
5009 continue;
5010 default:
5011 gcc_unreachable ();
5012 }
5013 lc1 = lc1->next;
5014 lc2 = lc2->next;
5015 }
5016
5017 while (lc1)
5018 {
5019 fprintf (dump_file, "removed: ");
5020 print_rtl_single (dump_file, lc1->loc);
5021 lc1 = lc1->next;
5022 }
5023
5024 while (lc2)
5025 {
5026 fprintf (dump_file, "added: ");
5027 print_rtl_single (dump_file, lc2->loc);
5028 lc2 = lc2->next;
5029 }
5030 }
5031
5032 /* Return true if variables VAR1 and VAR2 are different. */
5033
5034 static bool
variable_different_p(variable * var1,variable * var2)5035 variable_different_p (variable *var1, variable *var2)
5036 {
5037 int i;
5038
5039 if (var1 == var2)
5040 return false;
5041
5042 if (var1->onepart != var2->onepart)
5043 return true;
5044
5045 if (var1->n_var_parts != var2->n_var_parts)
5046 return true;
5047
5048 if (var1->onepart && var1->n_var_parts)
5049 {
5050 gcc_checking_assert (dv_as_opaque (var1->dv) == dv_as_opaque (var2->dv)
5051 && var1->n_var_parts == 1);
5052 /* One-part values have locations in a canonical order. */
5053 return onepart_variable_different_p (var1, var2);
5054 }
5055
5056 for (i = 0; i < var1->n_var_parts; i++)
5057 {
5058 if (VAR_PART_OFFSET (var1, i) != VAR_PART_OFFSET (var2, i))
5059 return true;
5060 if (variable_part_different_p (&var1->var_part[i], &var2->var_part[i]))
5061 return true;
5062 if (variable_part_different_p (&var2->var_part[i], &var1->var_part[i]))
5063 return true;
5064 }
5065 return false;
5066 }
5067
5068 /* Return true if dataflow sets OLD_SET and NEW_SET differ. */
5069
5070 static bool
dataflow_set_different(dataflow_set * old_set,dataflow_set * new_set)5071 dataflow_set_different (dataflow_set *old_set, dataflow_set *new_set)
5072 {
5073 variable_iterator_type hi;
5074 variable *var1;
5075 bool diffound = false;
5076 bool details = (dump_file && (dump_flags & TDF_DETAILS));
5077
5078 #define RETRUE \
5079 do \
5080 { \
5081 if (!details) \
5082 return true; \
5083 else \
5084 diffound = true; \
5085 } \
5086 while (0)
5087
5088 if (old_set->vars == new_set->vars)
5089 return false;
5090
5091 if (shared_hash_htab (old_set->vars)->elements ()
5092 != shared_hash_htab (new_set->vars)->elements ())
5093 RETRUE;
5094
5095 FOR_EACH_HASH_TABLE_ELEMENT (*shared_hash_htab (old_set->vars),
5096 var1, variable, hi)
5097 {
5098 variable_table_type *htab = shared_hash_htab (new_set->vars);
5099 variable *var2 = htab->find_with_hash (var1->dv, dv_htab_hash (var1->dv));
5100
5101 if (!var2)
5102 {
5103 if (dump_file && (dump_flags & TDF_DETAILS))
5104 {
5105 fprintf (dump_file, "dataflow difference found: removal of:\n");
5106 dump_var (var1);
5107 }
5108 RETRUE;
5109 }
5110 else if (variable_different_p (var1, var2))
5111 {
5112 if (details)
5113 {
5114 fprintf (dump_file, "dataflow difference found: "
5115 "old and new follow:\n");
5116 dump_var (var1);
5117 if (dv_onepart_p (var1->dv))
5118 dump_onepart_variable_differences (var1, var2);
5119 dump_var (var2);
5120 }
5121 RETRUE;
5122 }
5123 }
5124
5125 /* There's no need to traverse the second hashtab unless we want to
5126 print the details. If both have the same number of elements and
5127 the second one had all entries found in the first one, then the
5128 second can't have any extra entries. */
5129 if (!details)
5130 return diffound;
5131
5132 FOR_EACH_HASH_TABLE_ELEMENT (*shared_hash_htab (new_set->vars),
5133 var1, variable, hi)
5134 {
5135 variable_table_type *htab = shared_hash_htab (old_set->vars);
5136 variable *var2 = htab->find_with_hash (var1->dv, dv_htab_hash (var1->dv));
5137 if (!var2)
5138 {
5139 if (details)
5140 {
5141 fprintf (dump_file, "dataflow difference found: addition of:\n");
5142 dump_var (var1);
5143 }
5144 RETRUE;
5145 }
5146 }
5147
5148 #undef RETRUE
5149
5150 return diffound;
5151 }
5152
5153 /* Free the contents of dataflow set SET. */
5154
5155 static void
dataflow_set_destroy(dataflow_set * set)5156 dataflow_set_destroy (dataflow_set *set)
5157 {
5158 int i;
5159
5160 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
5161 attrs_list_clear (&set->regs[i]);
5162
5163 shared_hash_destroy (set->vars);
5164 set->vars = NULL;
5165 }
5166
5167 /* Return true if T is a tracked parameter with non-degenerate record type. */
5168
5169 static bool
tracked_record_parameter_p(tree t)5170 tracked_record_parameter_p (tree t)
5171 {
5172 if (TREE_CODE (t) != PARM_DECL)
5173 return false;
5174
5175 if (DECL_MODE (t) == BLKmode)
5176 return false;
5177
5178 tree type = TREE_TYPE (t);
5179 if (TREE_CODE (type) != RECORD_TYPE)
5180 return false;
5181
5182 if (TYPE_FIELDS (type) == NULL_TREE
5183 || DECL_CHAIN (TYPE_FIELDS (type)) == NULL_TREE)
5184 return false;
5185
5186 return true;
5187 }
5188
5189 /* Shall EXPR be tracked? */
5190
5191 static bool
track_expr_p(tree expr,bool need_rtl)5192 track_expr_p (tree expr, bool need_rtl)
5193 {
5194 rtx decl_rtl;
5195 tree realdecl;
5196
5197 if (TREE_CODE (expr) == DEBUG_EXPR_DECL)
5198 return DECL_RTL_SET_P (expr);
5199
5200 /* If EXPR is not a parameter or a variable do not track it. */
5201 if (!VAR_P (expr) && TREE_CODE (expr) != PARM_DECL)
5202 return 0;
5203
5204 /* It also must have a name... */
5205 if (!DECL_NAME (expr) && need_rtl)
5206 return 0;
5207
5208 /* ... and a RTL assigned to it. */
5209 decl_rtl = DECL_RTL_IF_SET (expr);
5210 if (!decl_rtl && need_rtl)
5211 return 0;
5212
5213 /* If this expression is really a debug alias of some other declaration, we
5214 don't need to track this expression if the ultimate declaration is
5215 ignored. */
5216 realdecl = expr;
5217 if (VAR_P (realdecl) && DECL_HAS_DEBUG_EXPR_P (realdecl))
5218 {
5219 realdecl = DECL_DEBUG_EXPR (realdecl);
5220 if (!DECL_P (realdecl))
5221 {
5222 if (handled_component_p (realdecl)
5223 || (TREE_CODE (realdecl) == MEM_REF
5224 && TREE_CODE (TREE_OPERAND (realdecl, 0)) == ADDR_EXPR))
5225 {
5226 HOST_WIDE_INT bitsize, bitpos;
5227 bool reverse;
5228 tree innerdecl
5229 = get_ref_base_and_extent_hwi (realdecl, &bitpos,
5230 &bitsize, &reverse);
5231 if (!innerdecl
5232 || !DECL_P (innerdecl)
5233 || DECL_IGNORED_P (innerdecl)
5234 /* Do not track declarations for parts of tracked record
5235 parameters since we want to track them as a whole. */
5236 || tracked_record_parameter_p (innerdecl)
5237 || TREE_STATIC (innerdecl)
5238 || bitsize == 0
5239 || bitpos + bitsize > 256)
5240 return 0;
5241 else
5242 realdecl = expr;
5243 }
5244 else
5245 return 0;
5246 }
5247 }
5248
5249 /* Do not track EXPR if REALDECL it should be ignored for debugging
5250 purposes. */
5251 if (DECL_IGNORED_P (realdecl))
5252 return 0;
5253
5254 /* Do not track global variables until we are able to emit correct location
5255 list for them. */
5256 if (TREE_STATIC (realdecl))
5257 return 0;
5258
5259 /* When the EXPR is a DECL for alias of some variable (see example)
5260 the TREE_STATIC flag is not used. Disable tracking all DECLs whose
5261 DECL_RTL contains SYMBOL_REF.
5262
5263 Example:
5264 extern char **_dl_argv_internal __attribute__ ((alias ("_dl_argv")));
5265 char **_dl_argv;
5266 */
5267 if (decl_rtl && MEM_P (decl_rtl)
5268 && contains_symbol_ref_p (XEXP (decl_rtl, 0)))
5269 return 0;
5270
5271 /* If RTX is a memory it should not be very large (because it would be
5272 an array or struct). */
5273 if (decl_rtl && MEM_P (decl_rtl))
5274 {
5275 /* Do not track structures and arrays. */
5276 if ((GET_MODE (decl_rtl) == BLKmode
5277 || AGGREGATE_TYPE_P (TREE_TYPE (realdecl)))
5278 && !tracked_record_parameter_p (realdecl))
5279 return 0;
5280 if (MEM_SIZE_KNOWN_P (decl_rtl)
5281 && maybe_gt (MEM_SIZE (decl_rtl), MAX_VAR_PARTS))
5282 return 0;
5283 }
5284
5285 DECL_CHANGED (expr) = 0;
5286 DECL_CHANGED (realdecl) = 0;
5287 return 1;
5288 }
5289
5290 /* Determine whether a given LOC refers to the same variable part as
5291 EXPR+OFFSET. */
5292
5293 static bool
same_variable_part_p(rtx loc,tree expr,poly_int64 offset)5294 same_variable_part_p (rtx loc, tree expr, poly_int64 offset)
5295 {
5296 tree expr2;
5297 poly_int64 offset2;
5298
5299 if (! DECL_P (expr))
5300 return false;
5301
5302 if (REG_P (loc))
5303 {
5304 expr2 = REG_EXPR (loc);
5305 offset2 = REG_OFFSET (loc);
5306 }
5307 else if (MEM_P (loc))
5308 {
5309 expr2 = MEM_EXPR (loc);
5310 offset2 = int_mem_offset (loc);
5311 }
5312 else
5313 return false;
5314
5315 if (! expr2 || ! DECL_P (expr2))
5316 return false;
5317
5318 expr = var_debug_decl (expr);
5319 expr2 = var_debug_decl (expr2);
5320
5321 return (expr == expr2 && known_eq (offset, offset2));
5322 }
5323
5324 /* LOC is a REG or MEM that we would like to track if possible.
5325 If EXPR is null, we don't know what expression LOC refers to,
5326 otherwise it refers to EXPR + OFFSET. STORE_REG_P is true if
5327 LOC is an lvalue register.
5328
5329 Return true if EXPR is nonnull and if LOC, or some lowpart of it,
5330 is something we can track. When returning true, store the mode of
5331 the lowpart we can track in *MODE_OUT (if nonnull) and its offset
5332 from EXPR in *OFFSET_OUT (if nonnull). */
5333
5334 static bool
track_loc_p(rtx loc,tree expr,poly_int64 offset,bool store_reg_p,machine_mode * mode_out,HOST_WIDE_INT * offset_out)5335 track_loc_p (rtx loc, tree expr, poly_int64 offset, bool store_reg_p,
5336 machine_mode *mode_out, HOST_WIDE_INT *offset_out)
5337 {
5338 machine_mode mode;
5339
5340 if (expr == NULL || !track_expr_p (expr, true))
5341 return false;
5342
5343 /* If REG was a paradoxical subreg, its REG_ATTRS will describe the
5344 whole subreg, but only the old inner part is really relevant. */
5345 mode = GET_MODE (loc);
5346 if (REG_P (loc) && !HARD_REGISTER_NUM_P (ORIGINAL_REGNO (loc)))
5347 {
5348 machine_mode pseudo_mode;
5349
5350 pseudo_mode = PSEUDO_REGNO_MODE (ORIGINAL_REGNO (loc));
5351 if (paradoxical_subreg_p (mode, pseudo_mode))
5352 {
5353 offset += byte_lowpart_offset (pseudo_mode, mode);
5354 mode = pseudo_mode;
5355 }
5356 }
5357
5358 /* If LOC is a paradoxical lowpart of EXPR, refer to EXPR itself.
5359 Do the same if we are storing to a register and EXPR occupies
5360 the whole of register LOC; in that case, the whole of EXPR is
5361 being changed. We exclude complex modes from the second case
5362 because the real and imaginary parts are represented as separate
5363 pseudo registers, even if the whole complex value fits into one
5364 hard register. */
5365 if ((paradoxical_subreg_p (mode, DECL_MODE (expr))
5366 || (store_reg_p
5367 && !COMPLEX_MODE_P (DECL_MODE (expr))
5368 && hard_regno_nregs (REGNO (loc), DECL_MODE (expr)) == 1))
5369 && known_eq (offset + byte_lowpart_offset (DECL_MODE (expr), mode), 0))
5370 {
5371 mode = DECL_MODE (expr);
5372 offset = 0;
5373 }
5374
5375 HOST_WIDE_INT const_offset;
5376 if (!track_offset_p (offset, &const_offset))
5377 return false;
5378
5379 if (mode_out)
5380 *mode_out = mode;
5381 if (offset_out)
5382 *offset_out = const_offset;
5383 return true;
5384 }
5385
5386 /* Return the MODE lowpart of LOC, or null if LOC is not something we
5387 want to track. When returning nonnull, make sure that the attributes
5388 on the returned value are updated. */
5389
5390 static rtx
var_lowpart(machine_mode mode,rtx loc)5391 var_lowpart (machine_mode mode, rtx loc)
5392 {
5393 unsigned int regno;
5394
5395 if (GET_MODE (loc) == mode)
5396 return loc;
5397
5398 if (!REG_P (loc) && !MEM_P (loc))
5399 return NULL;
5400
5401 poly_uint64 offset = byte_lowpart_offset (mode, GET_MODE (loc));
5402
5403 if (MEM_P (loc))
5404 return adjust_address_nv (loc, mode, offset);
5405
5406 poly_uint64 reg_offset = subreg_lowpart_offset (mode, GET_MODE (loc));
5407 regno = REGNO (loc) + subreg_regno_offset (REGNO (loc), GET_MODE (loc),
5408 reg_offset, mode);
5409 return gen_rtx_REG_offset (loc, mode, regno, offset);
5410 }
5411
5412 /* Carry information about uses and stores while walking rtx. */
5413
5414 struct count_use_info
5415 {
5416 /* The insn where the RTX is. */
5417 rtx_insn *insn;
5418
5419 /* The basic block where insn is. */
5420 basic_block bb;
5421
5422 /* The array of n_sets sets in the insn, as determined by cselib. */
5423 struct cselib_set *sets;
5424 int n_sets;
5425
5426 /* True if we're counting stores, false otherwise. */
5427 bool store_p;
5428 };
5429
5430 /* Find a VALUE corresponding to X. */
5431
5432 static inline cselib_val *
find_use_val(rtx x,machine_mode mode,struct count_use_info * cui)5433 find_use_val (rtx x, machine_mode mode, struct count_use_info *cui)
5434 {
5435 int i;
5436
5437 if (cui->sets)
5438 {
5439 /* This is called after uses are set up and before stores are
5440 processed by cselib, so it's safe to look up srcs, but not
5441 dsts. So we look up expressions that appear in srcs or in
5442 dest expressions, but we search the sets array for dests of
5443 stores. */
5444 if (cui->store_p)
5445 {
5446 /* Some targets represent memset and memcpy patterns
5447 by (set (mem:BLK ...) (reg:[QHSD]I ...)) or
5448 (set (mem:BLK ...) (const_int ...)) or
5449 (set (mem:BLK ...) (mem:BLK ...)). Don't return anything
5450 in that case, otherwise we end up with mode mismatches. */
5451 if (mode == BLKmode && MEM_P (x))
5452 return NULL;
5453 for (i = 0; i < cui->n_sets; i++)
5454 if (cui->sets[i].dest == x)
5455 return cui->sets[i].src_elt;
5456 }
5457 else
5458 return cselib_lookup (x, mode, 0, VOIDmode);
5459 }
5460
5461 return NULL;
5462 }
5463
5464 /* Replace all registers and addresses in an expression with VALUE
5465 expressions that map back to them, unless the expression is a
5466 register. If no mapping is or can be performed, returns NULL. */
5467
5468 static rtx
replace_expr_with_values(rtx loc)5469 replace_expr_with_values (rtx loc)
5470 {
5471 if (REG_P (loc) || GET_CODE (loc) == ENTRY_VALUE)
5472 return NULL;
5473 else if (MEM_P (loc))
5474 {
5475 cselib_val *addr = cselib_lookup (XEXP (loc, 0),
5476 get_address_mode (loc), 0,
5477 GET_MODE (loc));
5478 if (addr)
5479 return replace_equiv_address_nv (loc, addr->val_rtx);
5480 else
5481 return NULL;
5482 }
5483 else
5484 return cselib_subst_to_values (loc, VOIDmode);
5485 }
5486
5487 /* Return true if X contains a DEBUG_EXPR. */
5488
5489 static bool
rtx_debug_expr_p(const_rtx x)5490 rtx_debug_expr_p (const_rtx x)
5491 {
5492 subrtx_iterator::array_type array;
5493 FOR_EACH_SUBRTX (iter, array, x, ALL)
5494 if (GET_CODE (*iter) == DEBUG_EXPR)
5495 return true;
5496 return false;
5497 }
5498
5499 /* Determine what kind of micro operation to choose for a USE. Return
5500 MO_CLOBBER if no micro operation is to be generated. */
5501
5502 static enum micro_operation_type
use_type(rtx loc,struct count_use_info * cui,machine_mode * modep)5503 use_type (rtx loc, struct count_use_info *cui, machine_mode *modep)
5504 {
5505 tree expr;
5506
5507 if (cui && cui->sets)
5508 {
5509 if (GET_CODE (loc) == VAR_LOCATION)
5510 {
5511 if (track_expr_p (PAT_VAR_LOCATION_DECL (loc), false))
5512 {
5513 rtx ploc = PAT_VAR_LOCATION_LOC (loc);
5514 if (! VAR_LOC_UNKNOWN_P (ploc))
5515 {
5516 cselib_val *val = cselib_lookup (ploc, GET_MODE (loc), 1,
5517 VOIDmode);
5518
5519 /* ??? flag_float_store and volatile mems are never
5520 given values, but we could in theory use them for
5521 locations. */
5522 gcc_assert (val || 1);
5523 }
5524 return MO_VAL_LOC;
5525 }
5526 else
5527 return MO_CLOBBER;
5528 }
5529
5530 if (REG_P (loc) || MEM_P (loc))
5531 {
5532 if (modep)
5533 *modep = GET_MODE (loc);
5534 if (cui->store_p)
5535 {
5536 if (REG_P (loc)
5537 || (find_use_val (loc, GET_MODE (loc), cui)
5538 && cselib_lookup (XEXP (loc, 0),
5539 get_address_mode (loc), 0,
5540 GET_MODE (loc))))
5541 return MO_VAL_SET;
5542 }
5543 else
5544 {
5545 cselib_val *val = find_use_val (loc, GET_MODE (loc), cui);
5546
5547 if (val && !cselib_preserved_value_p (val))
5548 return MO_VAL_USE;
5549 }
5550 }
5551 }
5552
5553 if (REG_P (loc))
5554 {
5555 gcc_assert (REGNO (loc) < FIRST_PSEUDO_REGISTER);
5556
5557 if (loc == cfa_base_rtx)
5558 return MO_CLOBBER;
5559 expr = REG_EXPR (loc);
5560
5561 if (!expr)
5562 return MO_USE_NO_VAR;
5563 else if (target_for_debug_bind (var_debug_decl (expr)))
5564 return MO_CLOBBER;
5565 else if (track_loc_p (loc, expr, REG_OFFSET (loc),
5566 false, modep, NULL))
5567 return MO_USE;
5568 else
5569 return MO_USE_NO_VAR;
5570 }
5571 else if (MEM_P (loc))
5572 {
5573 expr = MEM_EXPR (loc);
5574
5575 if (!expr)
5576 return MO_CLOBBER;
5577 else if (target_for_debug_bind (var_debug_decl (expr)))
5578 return MO_CLOBBER;
5579 else if (track_loc_p (loc, expr, int_mem_offset (loc),
5580 false, modep, NULL)
5581 /* Multi-part variables shouldn't refer to one-part
5582 variable names such as VALUEs (never happens) or
5583 DEBUG_EXPRs (only happens in the presence of debug
5584 insns). */
5585 && (!MAY_HAVE_DEBUG_BIND_INSNS
5586 || !rtx_debug_expr_p (XEXP (loc, 0))))
5587 return MO_USE;
5588 else
5589 return MO_CLOBBER;
5590 }
5591
5592 return MO_CLOBBER;
5593 }
5594
5595 /* Log to OUT information about micro-operation MOPT involving X in
5596 INSN of BB. */
5597
5598 static inline void
log_op_type(rtx x,basic_block bb,rtx_insn * insn,enum micro_operation_type mopt,FILE * out)5599 log_op_type (rtx x, basic_block bb, rtx_insn *insn,
5600 enum micro_operation_type mopt, FILE *out)
5601 {
5602 fprintf (out, "bb %i op %i insn %i %s ",
5603 bb->index, VTI (bb)->mos.length (),
5604 INSN_UID (insn), micro_operation_type_name[mopt]);
5605 print_inline_rtx (out, x, 2);
5606 fputc ('\n', out);
5607 }
5608
5609 /* Tell whether the CONCAT used to holds a VALUE and its location
5610 needs value resolution, i.e., an attempt of mapping the location
5611 back to other incoming values. */
5612 #define VAL_NEEDS_RESOLUTION(x) \
5613 (RTL_FLAG_CHECK1 ("VAL_NEEDS_RESOLUTION", (x), CONCAT)->volatil)
5614 /* Whether the location in the CONCAT is a tracked expression, that
5615 should also be handled like a MO_USE. */
5616 #define VAL_HOLDS_TRACK_EXPR(x) \
5617 (RTL_FLAG_CHECK1 ("VAL_HOLDS_TRACK_EXPR", (x), CONCAT)->used)
5618 /* Whether the location in the CONCAT should be handled like a MO_COPY
5619 as well. */
5620 #define VAL_EXPR_IS_COPIED(x) \
5621 (RTL_FLAG_CHECK1 ("VAL_EXPR_IS_COPIED", (x), CONCAT)->jump)
5622 /* Whether the location in the CONCAT should be handled like a
5623 MO_CLOBBER as well. */
5624 #define VAL_EXPR_IS_CLOBBERED(x) \
5625 (RTL_FLAG_CHECK1 ("VAL_EXPR_IS_CLOBBERED", (x), CONCAT)->unchanging)
5626
5627 /* All preserved VALUEs. */
5628 static vec<rtx> preserved_values;
5629
5630 /* Ensure VAL is preserved and remember it in a vector for vt_emit_notes. */
5631
5632 static void
preserve_value(cselib_val * val)5633 preserve_value (cselib_val *val)
5634 {
5635 cselib_preserve_value (val);
5636 preserved_values.safe_push (val->val_rtx);
5637 }
5638
5639 /* Helper function for MO_VAL_LOC handling. Return non-zero if
5640 any rtxes not suitable for CONST use not replaced by VALUEs
5641 are discovered. */
5642
5643 static bool
non_suitable_const(const_rtx x)5644 non_suitable_const (const_rtx x)
5645 {
5646 subrtx_iterator::array_type array;
5647 FOR_EACH_SUBRTX (iter, array, x, ALL)
5648 {
5649 const_rtx x = *iter;
5650 switch (GET_CODE (x))
5651 {
5652 case REG:
5653 case DEBUG_EXPR:
5654 case PC:
5655 case SCRATCH:
5656 case CC0:
5657 case ASM_INPUT:
5658 case ASM_OPERANDS:
5659 return true;
5660 case MEM:
5661 if (!MEM_READONLY_P (x))
5662 return true;
5663 break;
5664 default:
5665 break;
5666 }
5667 }
5668 return false;
5669 }
5670
5671 /* Add uses (register and memory references) LOC which will be tracked
5672 to VTI (bb)->mos. */
5673
5674 static void
add_uses(rtx loc,struct count_use_info * cui)5675 add_uses (rtx loc, struct count_use_info *cui)
5676 {
5677 machine_mode mode = VOIDmode;
5678 enum micro_operation_type type = use_type (loc, cui, &mode);
5679
5680 if (type != MO_CLOBBER)
5681 {
5682 basic_block bb = cui->bb;
5683 micro_operation mo;
5684
5685 mo.type = type;
5686 mo.u.loc = type == MO_USE ? var_lowpart (mode, loc) : loc;
5687 mo.insn = cui->insn;
5688
5689 if (type == MO_VAL_LOC)
5690 {
5691 rtx oloc = loc;
5692 rtx vloc = PAT_VAR_LOCATION_LOC (oloc);
5693 cselib_val *val;
5694
5695 gcc_assert (cui->sets);
5696
5697 if (MEM_P (vloc)
5698 && !REG_P (XEXP (vloc, 0))
5699 && !MEM_P (XEXP (vloc, 0)))
5700 {
5701 rtx mloc = vloc;
5702 machine_mode address_mode = get_address_mode (mloc);
5703 cselib_val *val
5704 = cselib_lookup (XEXP (mloc, 0), address_mode, 0,
5705 GET_MODE (mloc));
5706
5707 if (val && !cselib_preserved_value_p (val))
5708 preserve_value (val);
5709 }
5710
5711 if (CONSTANT_P (vloc)
5712 && (GET_CODE (vloc) != CONST || non_suitable_const (vloc)))
5713 /* For constants don't look up any value. */;
5714 else if (!VAR_LOC_UNKNOWN_P (vloc) && !unsuitable_loc (vloc)
5715 && (val = find_use_val (vloc, GET_MODE (oloc), cui)))
5716 {
5717 machine_mode mode2;
5718 enum micro_operation_type type2;
5719 rtx nloc = NULL;
5720 bool resolvable = REG_P (vloc) || MEM_P (vloc);
5721
5722 if (resolvable)
5723 nloc = replace_expr_with_values (vloc);
5724
5725 if (nloc)
5726 {
5727 oloc = shallow_copy_rtx (oloc);
5728 PAT_VAR_LOCATION_LOC (oloc) = nloc;
5729 }
5730
5731 oloc = gen_rtx_CONCAT (mode, val->val_rtx, oloc);
5732
5733 type2 = use_type (vloc, 0, &mode2);
5734
5735 gcc_assert (type2 == MO_USE || type2 == MO_USE_NO_VAR
5736 || type2 == MO_CLOBBER);
5737
5738 if (type2 == MO_CLOBBER
5739 && !cselib_preserved_value_p (val))
5740 {
5741 VAL_NEEDS_RESOLUTION (oloc) = resolvable;
5742 preserve_value (val);
5743 }
5744 }
5745 else if (!VAR_LOC_UNKNOWN_P (vloc))
5746 {
5747 oloc = shallow_copy_rtx (oloc);
5748 PAT_VAR_LOCATION_LOC (oloc) = gen_rtx_UNKNOWN_VAR_LOC ();
5749 }
5750
5751 mo.u.loc = oloc;
5752 }
5753 else if (type == MO_VAL_USE)
5754 {
5755 machine_mode mode2 = VOIDmode;
5756 enum micro_operation_type type2;
5757 cselib_val *val = find_use_val (loc, GET_MODE (loc), cui);
5758 rtx vloc, oloc = loc, nloc;
5759
5760 gcc_assert (cui->sets);
5761
5762 if (MEM_P (oloc)
5763 && !REG_P (XEXP (oloc, 0))
5764 && !MEM_P (XEXP (oloc, 0)))
5765 {
5766 rtx mloc = oloc;
5767 machine_mode address_mode = get_address_mode (mloc);
5768 cselib_val *val
5769 = cselib_lookup (XEXP (mloc, 0), address_mode, 0,
5770 GET_MODE (mloc));
5771
5772 if (val && !cselib_preserved_value_p (val))
5773 preserve_value (val);
5774 }
5775
5776 type2 = use_type (loc, 0, &mode2);
5777
5778 gcc_assert (type2 == MO_USE || type2 == MO_USE_NO_VAR
5779 || type2 == MO_CLOBBER);
5780
5781 if (type2 == MO_USE)
5782 vloc = var_lowpart (mode2, loc);
5783 else
5784 vloc = oloc;
5785
5786 /* The loc of a MO_VAL_USE may have two forms:
5787
5788 (concat val src): val is at src, a value-based
5789 representation.
5790
5791 (concat (concat val use) src): same as above, with use as
5792 the MO_USE tracked value, if it differs from src.
5793
5794 */
5795
5796 gcc_checking_assert (REG_P (loc) || MEM_P (loc));
5797 nloc = replace_expr_with_values (loc);
5798 if (!nloc)
5799 nloc = oloc;
5800
5801 if (vloc != nloc)
5802 oloc = gen_rtx_CONCAT (mode2, val->val_rtx, vloc);
5803 else
5804 oloc = val->val_rtx;
5805
5806 mo.u.loc = gen_rtx_CONCAT (mode, oloc, nloc);
5807
5808 if (type2 == MO_USE)
5809 VAL_HOLDS_TRACK_EXPR (mo.u.loc) = 1;
5810 if (!cselib_preserved_value_p (val))
5811 {
5812 VAL_NEEDS_RESOLUTION (mo.u.loc) = 1;
5813 preserve_value (val);
5814 }
5815 }
5816 else
5817 gcc_assert (type == MO_USE || type == MO_USE_NO_VAR);
5818
5819 if (dump_file && (dump_flags & TDF_DETAILS))
5820 log_op_type (mo.u.loc, cui->bb, cui->insn, mo.type, dump_file);
5821 VTI (bb)->mos.safe_push (mo);
5822 }
5823 }
5824
5825 /* Helper function for finding all uses of REG/MEM in X in insn INSN. */
5826
5827 static void
add_uses_1(rtx * x,void * cui)5828 add_uses_1 (rtx *x, void *cui)
5829 {
5830 subrtx_var_iterator::array_type array;
5831 FOR_EACH_SUBRTX_VAR (iter, array, *x, NONCONST)
5832 add_uses (*iter, (struct count_use_info *) cui);
5833 }
5834
5835 /* This is the value used during expansion of locations. We want it
5836 to be unbounded, so that variables expanded deep in a recursion
5837 nest are fully evaluated, so that their values are cached
5838 correctly. We avoid recursion cycles through other means, and we
5839 don't unshare RTL, so excess complexity is not a problem. */
5840 #define EXPR_DEPTH (INT_MAX)
5841 /* We use this to keep too-complex expressions from being emitted as
5842 location notes, and then to debug information. Users can trade
5843 compile time for ridiculously complex expressions, although they're
5844 seldom useful, and they may often have to be discarded as not
5845 representable anyway. */
5846 #define EXPR_USE_DEPTH (PARAM_VALUE (PARAM_MAX_VARTRACK_EXPR_DEPTH))
5847
5848 /* Attempt to reverse the EXPR operation in the debug info and record
5849 it in the cselib table. Say for reg1 = reg2 + 6 even when reg2 is
5850 no longer live we can express its value as VAL - 6. */
5851
5852 static void
reverse_op(rtx val,const_rtx expr,rtx_insn * insn)5853 reverse_op (rtx val, const_rtx expr, rtx_insn *insn)
5854 {
5855 rtx src, arg, ret;
5856 cselib_val *v;
5857 struct elt_loc_list *l;
5858 enum rtx_code code;
5859 int count;
5860
5861 if (GET_CODE (expr) != SET)
5862 return;
5863
5864 if (!REG_P (SET_DEST (expr)) || GET_MODE (val) != GET_MODE (SET_DEST (expr)))
5865 return;
5866
5867 src = SET_SRC (expr);
5868 switch (GET_CODE (src))
5869 {
5870 case PLUS:
5871 case MINUS:
5872 case XOR:
5873 case NOT:
5874 case NEG:
5875 if (!REG_P (XEXP (src, 0)))
5876 return;
5877 break;
5878 case SIGN_EXTEND:
5879 case ZERO_EXTEND:
5880 if (!REG_P (XEXP (src, 0)) && !MEM_P (XEXP (src, 0)))
5881 return;
5882 break;
5883 default:
5884 return;
5885 }
5886
5887 if (!SCALAR_INT_MODE_P (GET_MODE (src)) || XEXP (src, 0) == cfa_base_rtx)
5888 return;
5889
5890 v = cselib_lookup (XEXP (src, 0), GET_MODE (XEXP (src, 0)), 0, VOIDmode);
5891 if (!v || !cselib_preserved_value_p (v))
5892 return;
5893
5894 /* Use canonical V to avoid creating multiple redundant expressions
5895 for different VALUES equivalent to V. */
5896 v = canonical_cselib_val (v);
5897
5898 /* Adding a reverse op isn't useful if V already has an always valid
5899 location. Ignore ENTRY_VALUE, while it is always constant, we should
5900 prefer non-ENTRY_VALUE locations whenever possible. */
5901 for (l = v->locs, count = 0; l; l = l->next, count++)
5902 if (CONSTANT_P (l->loc)
5903 && (GET_CODE (l->loc) != CONST || !references_value_p (l->loc, 0)))
5904 return;
5905 /* Avoid creating too large locs lists. */
5906 else if (count == PARAM_VALUE (PARAM_MAX_VARTRACK_REVERSE_OP_SIZE))
5907 return;
5908
5909 switch (GET_CODE (src))
5910 {
5911 case NOT:
5912 case NEG:
5913 if (GET_MODE (v->val_rtx) != GET_MODE (val))
5914 return;
5915 ret = gen_rtx_fmt_e (GET_CODE (src), GET_MODE (val), val);
5916 break;
5917 case SIGN_EXTEND:
5918 case ZERO_EXTEND:
5919 ret = gen_lowpart_SUBREG (GET_MODE (v->val_rtx), val);
5920 break;
5921 case XOR:
5922 code = XOR;
5923 goto binary;
5924 case PLUS:
5925 code = MINUS;
5926 goto binary;
5927 case MINUS:
5928 code = PLUS;
5929 goto binary;
5930 binary:
5931 if (GET_MODE (v->val_rtx) != GET_MODE (val))
5932 return;
5933 arg = XEXP (src, 1);
5934 if (!CONST_INT_P (arg) && GET_CODE (arg) != SYMBOL_REF)
5935 {
5936 arg = cselib_expand_value_rtx (arg, scratch_regs, 5);
5937 if (arg == NULL_RTX)
5938 return;
5939 if (!CONST_INT_P (arg) && GET_CODE (arg) != SYMBOL_REF)
5940 return;
5941 }
5942 ret = simplify_gen_binary (code, GET_MODE (val), val, arg);
5943 break;
5944 default:
5945 gcc_unreachable ();
5946 }
5947
5948 cselib_add_permanent_equiv (v, ret, insn);
5949 }
5950
5951 /* Add stores (register and memory references) LOC which will be tracked
5952 to VTI (bb)->mos. EXPR is the RTL expression containing the store.
5953 CUIP->insn is instruction which the LOC is part of. */
5954
5955 static void
add_stores(rtx loc,const_rtx expr,void * cuip)5956 add_stores (rtx loc, const_rtx expr, void *cuip)
5957 {
5958 machine_mode mode = VOIDmode, mode2;
5959 struct count_use_info *cui = (struct count_use_info *)cuip;
5960 basic_block bb = cui->bb;
5961 micro_operation mo;
5962 rtx oloc = loc, nloc, src = NULL;
5963 enum micro_operation_type type = use_type (loc, cui, &mode);
5964 bool track_p = false;
5965 cselib_val *v;
5966 bool resolve, preserve;
5967
5968 if (type == MO_CLOBBER)
5969 return;
5970
5971 mode2 = mode;
5972
5973 if (REG_P (loc))
5974 {
5975 gcc_assert (loc != cfa_base_rtx);
5976 if ((GET_CODE (expr) == CLOBBER && type != MO_VAL_SET)
5977 || !(track_p = use_type (loc, NULL, &mode2) == MO_USE)
5978 || GET_CODE (expr) == CLOBBER)
5979 {
5980 mo.type = MO_CLOBBER;
5981 mo.u.loc = loc;
5982 if (GET_CODE (expr) == SET
5983 && (SET_DEST (expr) == loc
5984 || (GET_CODE (SET_DEST (expr)) == STRICT_LOW_PART
5985 && XEXP (SET_DEST (expr), 0) == loc))
5986 && !unsuitable_loc (SET_SRC (expr))
5987 && find_use_val (loc, mode, cui))
5988 {
5989 gcc_checking_assert (type == MO_VAL_SET);
5990 mo.u.loc = gen_rtx_SET (loc, SET_SRC (expr));
5991 }
5992 }
5993 else
5994 {
5995 if (GET_CODE (expr) == SET
5996 && SET_DEST (expr) == loc
5997 && GET_CODE (SET_SRC (expr)) != ASM_OPERANDS)
5998 src = var_lowpart (mode2, SET_SRC (expr));
5999 loc = var_lowpart (mode2, loc);
6000
6001 if (src == NULL)
6002 {
6003 mo.type = MO_SET;
6004 mo.u.loc = loc;
6005 }
6006 else
6007 {
6008 rtx xexpr = gen_rtx_SET (loc, src);
6009 if (same_variable_part_p (src, REG_EXPR (loc), REG_OFFSET (loc)))
6010 {
6011 /* If this is an instruction copying (part of) a parameter
6012 passed by invisible reference to its register location,
6013 pretend it's a SET so that the initial memory location
6014 is discarded, as the parameter register can be reused
6015 for other purposes and we do not track locations based
6016 on generic registers. */
6017 if (MEM_P (src)
6018 && REG_EXPR (loc)
6019 && TREE_CODE (REG_EXPR (loc)) == PARM_DECL
6020 && DECL_MODE (REG_EXPR (loc)) != BLKmode
6021 && MEM_P (DECL_INCOMING_RTL (REG_EXPR (loc)))
6022 && XEXP (DECL_INCOMING_RTL (REG_EXPR (loc)), 0)
6023 != arg_pointer_rtx)
6024 mo.type = MO_SET;
6025 else
6026 mo.type = MO_COPY;
6027 }
6028 else
6029 mo.type = MO_SET;
6030 mo.u.loc = xexpr;
6031 }
6032 }
6033 mo.insn = cui->insn;
6034 }
6035 else if (MEM_P (loc)
6036 && ((track_p = use_type (loc, NULL, &mode2) == MO_USE)
6037 || cui->sets))
6038 {
6039 if (MEM_P (loc) && type == MO_VAL_SET
6040 && !REG_P (XEXP (loc, 0))
6041 && !MEM_P (XEXP (loc, 0)))
6042 {
6043 rtx mloc = loc;
6044 machine_mode address_mode = get_address_mode (mloc);
6045 cselib_val *val = cselib_lookup (XEXP (mloc, 0),
6046 address_mode, 0,
6047 GET_MODE (mloc));
6048
6049 if (val && !cselib_preserved_value_p (val))
6050 preserve_value (val);
6051 }
6052
6053 if (GET_CODE (expr) == CLOBBER || !track_p)
6054 {
6055 mo.type = MO_CLOBBER;
6056 mo.u.loc = track_p ? var_lowpart (mode2, loc) : loc;
6057 }
6058 else
6059 {
6060 if (GET_CODE (expr) == SET
6061 && SET_DEST (expr) == loc
6062 && GET_CODE (SET_SRC (expr)) != ASM_OPERANDS)
6063 src = var_lowpart (mode2, SET_SRC (expr));
6064 loc = var_lowpart (mode2, loc);
6065
6066 if (src == NULL)
6067 {
6068 mo.type = MO_SET;
6069 mo.u.loc = loc;
6070 }
6071 else
6072 {
6073 rtx xexpr = gen_rtx_SET (loc, src);
6074 if (same_variable_part_p (SET_SRC (xexpr),
6075 MEM_EXPR (loc),
6076 int_mem_offset (loc)))
6077 mo.type = MO_COPY;
6078 else
6079 mo.type = MO_SET;
6080 mo.u.loc = xexpr;
6081 }
6082 }
6083 mo.insn = cui->insn;
6084 }
6085 else
6086 return;
6087
6088 if (type != MO_VAL_SET)
6089 goto log_and_return;
6090
6091 v = find_use_val (oloc, mode, cui);
6092
6093 if (!v)
6094 goto log_and_return;
6095
6096 resolve = preserve = !cselib_preserved_value_p (v);
6097
6098 /* We cannot track values for multiple-part variables, so we track only
6099 locations for tracked record parameters. */
6100 if (track_p
6101 && REG_P (loc)
6102 && REG_EXPR (loc)
6103 && tracked_record_parameter_p (REG_EXPR (loc)))
6104 {
6105 /* Although we don't use the value here, it could be used later by the
6106 mere virtue of its existence as the operand of the reverse operation
6107 that gave rise to it (typically extension/truncation). Make sure it
6108 is preserved as required by vt_expand_var_loc_chain. */
6109 if (preserve)
6110 preserve_value (v);
6111 goto log_and_return;
6112 }
6113
6114 if (loc == stack_pointer_rtx
6115 && hard_frame_pointer_adjustment != -1
6116 && preserve)
6117 cselib_set_value_sp_based (v);
6118
6119 nloc = replace_expr_with_values (oloc);
6120 if (nloc)
6121 oloc = nloc;
6122
6123 if (GET_CODE (PATTERN (cui->insn)) == COND_EXEC)
6124 {
6125 cselib_val *oval = cselib_lookup (oloc, GET_MODE (oloc), 0, VOIDmode);
6126
6127 if (oval == v)
6128 return;
6129 gcc_assert (REG_P (oloc) || MEM_P (oloc));
6130
6131 if (oval && !cselib_preserved_value_p (oval))
6132 {
6133 micro_operation moa;
6134
6135 preserve_value (oval);
6136
6137 moa.type = MO_VAL_USE;
6138 moa.u.loc = gen_rtx_CONCAT (mode, oval->val_rtx, oloc);
6139 VAL_NEEDS_RESOLUTION (moa.u.loc) = 1;
6140 moa.insn = cui->insn;
6141
6142 if (dump_file && (dump_flags & TDF_DETAILS))
6143 log_op_type (moa.u.loc, cui->bb, cui->insn,
6144 moa.type, dump_file);
6145 VTI (bb)->mos.safe_push (moa);
6146 }
6147
6148 resolve = false;
6149 }
6150 else if (resolve && GET_CODE (mo.u.loc) == SET)
6151 {
6152 if (REG_P (SET_SRC (expr)) || MEM_P (SET_SRC (expr)))
6153 nloc = replace_expr_with_values (SET_SRC (expr));
6154 else
6155 nloc = NULL_RTX;
6156
6157 /* Avoid the mode mismatch between oexpr and expr. */
6158 if (!nloc && mode != mode2)
6159 {
6160 nloc = SET_SRC (expr);
6161 gcc_assert (oloc == SET_DEST (expr));
6162 }
6163
6164 if (nloc && nloc != SET_SRC (mo.u.loc))
6165 oloc = gen_rtx_SET (oloc, nloc);
6166 else
6167 {
6168 if (oloc == SET_DEST (mo.u.loc))
6169 /* No point in duplicating. */
6170 oloc = mo.u.loc;
6171 if (!REG_P (SET_SRC (mo.u.loc)))
6172 resolve = false;
6173 }
6174 }
6175 else if (!resolve)
6176 {
6177 if (GET_CODE (mo.u.loc) == SET
6178 && oloc == SET_DEST (mo.u.loc))
6179 /* No point in duplicating. */
6180 oloc = mo.u.loc;
6181 }
6182 else
6183 resolve = false;
6184
6185 loc = gen_rtx_CONCAT (mode, v->val_rtx, oloc);
6186
6187 if (mo.u.loc != oloc)
6188 loc = gen_rtx_CONCAT (GET_MODE (mo.u.loc), loc, mo.u.loc);
6189
6190 /* The loc of a MO_VAL_SET may have various forms:
6191
6192 (concat val dst): dst now holds val
6193
6194 (concat val (set dst src)): dst now holds val, copied from src
6195
6196 (concat (concat val dstv) dst): dst now holds val; dstv is dst
6197 after replacing mems and non-top-level regs with values.
6198
6199 (concat (concat val dstv) (set dst src)): dst now holds val,
6200 copied from src. dstv is a value-based representation of dst, if
6201 it differs from dst. If resolution is needed, src is a REG, and
6202 its mode is the same as that of val.
6203
6204 (concat (concat val (set dstv srcv)) (set dst src)): src
6205 copied to dst, holding val. dstv and srcv are value-based
6206 representations of dst and src, respectively.
6207
6208 */
6209
6210 if (GET_CODE (PATTERN (cui->insn)) != COND_EXEC)
6211 reverse_op (v->val_rtx, expr, cui->insn);
6212
6213 mo.u.loc = loc;
6214
6215 if (track_p)
6216 VAL_HOLDS_TRACK_EXPR (loc) = 1;
6217 if (preserve)
6218 {
6219 VAL_NEEDS_RESOLUTION (loc) = resolve;
6220 preserve_value (v);
6221 }
6222 if (mo.type == MO_CLOBBER)
6223 VAL_EXPR_IS_CLOBBERED (loc) = 1;
6224 if (mo.type == MO_COPY)
6225 VAL_EXPR_IS_COPIED (loc) = 1;
6226
6227 mo.type = MO_VAL_SET;
6228
6229 log_and_return:
6230 if (dump_file && (dump_flags & TDF_DETAILS))
6231 log_op_type (mo.u.loc, cui->bb, cui->insn, mo.type, dump_file);
6232 VTI (bb)->mos.safe_push (mo);
6233 }
6234
6235 /* Arguments to the call. */
6236 static rtx call_arguments;
6237
6238 /* Compute call_arguments. */
6239
6240 static void
prepare_call_arguments(basic_block bb,rtx_insn * insn)6241 prepare_call_arguments (basic_block bb, rtx_insn *insn)
6242 {
6243 rtx link, x, call;
6244 rtx prev, cur, next;
6245 rtx this_arg = NULL_RTX;
6246 tree type = NULL_TREE, t, fndecl = NULL_TREE;
6247 tree obj_type_ref = NULL_TREE;
6248 CUMULATIVE_ARGS args_so_far_v;
6249 cumulative_args_t args_so_far;
6250
6251 memset (&args_so_far_v, 0, sizeof (args_so_far_v));
6252 args_so_far = pack_cumulative_args (&args_so_far_v);
6253 call = get_call_rtx_from (insn);
6254 if (call)
6255 {
6256 if (GET_CODE (XEXP (XEXP (call, 0), 0)) == SYMBOL_REF)
6257 {
6258 rtx symbol = XEXP (XEXP (call, 0), 0);
6259 if (SYMBOL_REF_DECL (symbol))
6260 fndecl = SYMBOL_REF_DECL (symbol);
6261 }
6262 if (fndecl == NULL_TREE)
6263 fndecl = MEM_EXPR (XEXP (call, 0));
6264 if (fndecl
6265 && TREE_CODE (TREE_TYPE (fndecl)) != FUNCTION_TYPE
6266 && TREE_CODE (TREE_TYPE (fndecl)) != METHOD_TYPE)
6267 fndecl = NULL_TREE;
6268 if (fndecl && TYPE_ARG_TYPES (TREE_TYPE (fndecl)))
6269 type = TREE_TYPE (fndecl);
6270 if (fndecl && TREE_CODE (fndecl) != FUNCTION_DECL)
6271 {
6272 if (TREE_CODE (fndecl) == INDIRECT_REF
6273 && TREE_CODE (TREE_OPERAND (fndecl, 0)) == OBJ_TYPE_REF)
6274 obj_type_ref = TREE_OPERAND (fndecl, 0);
6275 fndecl = NULL_TREE;
6276 }
6277 if (type)
6278 {
6279 for (t = TYPE_ARG_TYPES (type); t && t != void_list_node;
6280 t = TREE_CHAIN (t))
6281 if (TREE_CODE (TREE_VALUE (t)) == REFERENCE_TYPE
6282 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_VALUE (t))))
6283 break;
6284 if ((t == NULL || t == void_list_node) && obj_type_ref == NULL_TREE)
6285 type = NULL;
6286 else
6287 {
6288 int nargs ATTRIBUTE_UNUSED = list_length (TYPE_ARG_TYPES (type));
6289 link = CALL_INSN_FUNCTION_USAGE (insn);
6290 #ifndef PCC_STATIC_STRUCT_RETURN
6291 if (aggregate_value_p (TREE_TYPE (type), type)
6292 && targetm.calls.struct_value_rtx (type, 0) == 0)
6293 {
6294 tree struct_addr = build_pointer_type (TREE_TYPE (type));
6295 machine_mode mode = TYPE_MODE (struct_addr);
6296 rtx reg;
6297 INIT_CUMULATIVE_ARGS (args_so_far_v, type, NULL_RTX, fndecl,
6298 nargs + 1);
6299 reg = targetm.calls.function_arg (args_so_far, mode,
6300 struct_addr, true);
6301 targetm.calls.function_arg_advance (args_so_far, mode,
6302 struct_addr, true);
6303 if (reg == NULL_RTX)
6304 {
6305 for (; link; link = XEXP (link, 1))
6306 if (GET_CODE (XEXP (link, 0)) == USE
6307 && MEM_P (XEXP (XEXP (link, 0), 0)))
6308 {
6309 link = XEXP (link, 1);
6310 break;
6311 }
6312 }
6313 }
6314 else
6315 #endif
6316 INIT_CUMULATIVE_ARGS (args_so_far_v, type, NULL_RTX, fndecl,
6317 nargs);
6318 if (obj_type_ref && TYPE_ARG_TYPES (type) != void_list_node)
6319 {
6320 machine_mode mode;
6321 t = TYPE_ARG_TYPES (type);
6322 mode = TYPE_MODE (TREE_VALUE (t));
6323 this_arg = targetm.calls.function_arg (args_so_far, mode,
6324 TREE_VALUE (t), true);
6325 if (this_arg && !REG_P (this_arg))
6326 this_arg = NULL_RTX;
6327 else if (this_arg == NULL_RTX)
6328 {
6329 for (; link; link = XEXP (link, 1))
6330 if (GET_CODE (XEXP (link, 0)) == USE
6331 && MEM_P (XEXP (XEXP (link, 0), 0)))
6332 {
6333 this_arg = XEXP (XEXP (link, 0), 0);
6334 break;
6335 }
6336 }
6337 }
6338 }
6339 }
6340 }
6341 t = type ? TYPE_ARG_TYPES (type) : NULL_TREE;
6342
6343 for (link = CALL_INSN_FUNCTION_USAGE (insn); link; link = XEXP (link, 1))
6344 if (GET_CODE (XEXP (link, 0)) == USE)
6345 {
6346 rtx item = NULL_RTX;
6347 x = XEXP (XEXP (link, 0), 0);
6348 if (GET_MODE (link) == VOIDmode
6349 || GET_MODE (link) == BLKmode
6350 || (GET_MODE (link) != GET_MODE (x)
6351 && ((GET_MODE_CLASS (GET_MODE (link)) != MODE_INT
6352 && GET_MODE_CLASS (GET_MODE (link)) != MODE_PARTIAL_INT)
6353 || (GET_MODE_CLASS (GET_MODE (x)) != MODE_INT
6354 && GET_MODE_CLASS (GET_MODE (x)) != MODE_PARTIAL_INT))))
6355 /* Can't do anything for these, if the original type mode
6356 isn't known or can't be converted. */;
6357 else if (REG_P (x))
6358 {
6359 cselib_val *val = cselib_lookup (x, GET_MODE (x), 0, VOIDmode);
6360 scalar_int_mode mode;
6361 if (val && cselib_preserved_value_p (val))
6362 item = val->val_rtx;
6363 else if (is_a <scalar_int_mode> (GET_MODE (x), &mode))
6364 {
6365 opt_scalar_int_mode mode_iter;
6366 FOR_EACH_WIDER_MODE (mode_iter, mode)
6367 {
6368 mode = mode_iter.require ();
6369 if (GET_MODE_BITSIZE (mode) > BITS_PER_WORD)
6370 break;
6371
6372 rtx reg = simplify_subreg (mode, x, GET_MODE (x), 0);
6373 if (reg == NULL_RTX || !REG_P (reg))
6374 continue;
6375 val = cselib_lookup (reg, mode, 0, VOIDmode);
6376 if (val && cselib_preserved_value_p (val))
6377 {
6378 item = val->val_rtx;
6379 break;
6380 }
6381 }
6382 }
6383 }
6384 else if (MEM_P (x))
6385 {
6386 rtx mem = x;
6387 cselib_val *val;
6388
6389 if (!frame_pointer_needed)
6390 {
6391 struct adjust_mem_data amd;
6392 amd.mem_mode = VOIDmode;
6393 amd.stack_adjust = -VTI (bb)->out.stack_adjust;
6394 amd.store = true;
6395 mem = simplify_replace_fn_rtx (mem, NULL_RTX, adjust_mems,
6396 &amd);
6397 gcc_assert (amd.side_effects.is_empty ());
6398 }
6399 val = cselib_lookup (mem, GET_MODE (mem), 0, VOIDmode);
6400 if (val && cselib_preserved_value_p (val))
6401 item = val->val_rtx;
6402 else if (GET_MODE_CLASS (GET_MODE (mem)) != MODE_INT
6403 && GET_MODE_CLASS (GET_MODE (mem)) != MODE_PARTIAL_INT)
6404 {
6405 /* For non-integer stack argument see also if they weren't
6406 initialized by integers. */
6407 scalar_int_mode imode;
6408 if (int_mode_for_mode (GET_MODE (mem)).exists (&imode)
6409 && imode != GET_MODE (mem))
6410 {
6411 val = cselib_lookup (adjust_address_nv (mem, imode, 0),
6412 imode, 0, VOIDmode);
6413 if (val && cselib_preserved_value_p (val))
6414 item = lowpart_subreg (GET_MODE (x), val->val_rtx,
6415 imode);
6416 }
6417 }
6418 }
6419 if (item)
6420 {
6421 rtx x2 = x;
6422 if (GET_MODE (item) != GET_MODE (link))
6423 item = lowpart_subreg (GET_MODE (link), item, GET_MODE (item));
6424 if (GET_MODE (x2) != GET_MODE (link))
6425 x2 = lowpart_subreg (GET_MODE (link), x2, GET_MODE (x2));
6426 item = gen_rtx_CONCAT (GET_MODE (link), x2, item);
6427 call_arguments
6428 = gen_rtx_EXPR_LIST (VOIDmode, item, call_arguments);
6429 }
6430 if (t && t != void_list_node)
6431 {
6432 tree argtype = TREE_VALUE (t);
6433 machine_mode mode = TYPE_MODE (argtype);
6434 rtx reg;
6435 if (pass_by_reference (&args_so_far_v, mode, argtype, true))
6436 {
6437 argtype = build_pointer_type (argtype);
6438 mode = TYPE_MODE (argtype);
6439 }
6440 reg = targetm.calls.function_arg (args_so_far, mode,
6441 argtype, true);
6442 if (TREE_CODE (argtype) == REFERENCE_TYPE
6443 && INTEGRAL_TYPE_P (TREE_TYPE (argtype))
6444 && reg
6445 && REG_P (reg)
6446 && GET_MODE (reg) == mode
6447 && (GET_MODE_CLASS (mode) == MODE_INT
6448 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT)
6449 && REG_P (x)
6450 && REGNO (x) == REGNO (reg)
6451 && GET_MODE (x) == mode
6452 && item)
6453 {
6454 machine_mode indmode
6455 = TYPE_MODE (TREE_TYPE (argtype));
6456 rtx mem = gen_rtx_MEM (indmode, x);
6457 cselib_val *val = cselib_lookup (mem, indmode, 0, VOIDmode);
6458 if (val && cselib_preserved_value_p (val))
6459 {
6460 item = gen_rtx_CONCAT (indmode, mem, val->val_rtx);
6461 call_arguments = gen_rtx_EXPR_LIST (VOIDmode, item,
6462 call_arguments);
6463 }
6464 else
6465 {
6466 struct elt_loc_list *l;
6467 tree initial;
6468
6469 /* Try harder, when passing address of a constant
6470 pool integer it can be easily read back. */
6471 item = XEXP (item, 1);
6472 if (GET_CODE (item) == SUBREG)
6473 item = SUBREG_REG (item);
6474 gcc_assert (GET_CODE (item) == VALUE);
6475 val = CSELIB_VAL_PTR (item);
6476 for (l = val->locs; l; l = l->next)
6477 if (GET_CODE (l->loc) == SYMBOL_REF
6478 && TREE_CONSTANT_POOL_ADDRESS_P (l->loc)
6479 && SYMBOL_REF_DECL (l->loc)
6480 && DECL_INITIAL (SYMBOL_REF_DECL (l->loc)))
6481 {
6482 initial = DECL_INITIAL (SYMBOL_REF_DECL (l->loc));
6483 if (tree_fits_shwi_p (initial))
6484 {
6485 item = GEN_INT (tree_to_shwi (initial));
6486 item = gen_rtx_CONCAT (indmode, mem, item);
6487 call_arguments
6488 = gen_rtx_EXPR_LIST (VOIDmode, item,
6489 call_arguments);
6490 }
6491 break;
6492 }
6493 }
6494 }
6495 targetm.calls.function_arg_advance (args_so_far, mode,
6496 argtype, true);
6497 t = TREE_CHAIN (t);
6498 }
6499 }
6500
6501 /* Add debug arguments. */
6502 if (fndecl
6503 && TREE_CODE (fndecl) == FUNCTION_DECL
6504 && DECL_HAS_DEBUG_ARGS_P (fndecl))
6505 {
6506 vec<tree, va_gc> **debug_args = decl_debug_args_lookup (fndecl);
6507 if (debug_args)
6508 {
6509 unsigned int ix;
6510 tree param;
6511 for (ix = 0; vec_safe_iterate (*debug_args, ix, ¶m); ix += 2)
6512 {
6513 rtx item;
6514 tree dtemp = (**debug_args)[ix + 1];
6515 machine_mode mode = DECL_MODE (dtemp);
6516 item = gen_rtx_DEBUG_PARAMETER_REF (mode, param);
6517 item = gen_rtx_CONCAT (mode, item, DECL_RTL_KNOWN_SET (dtemp));
6518 call_arguments = gen_rtx_EXPR_LIST (VOIDmode, item,
6519 call_arguments);
6520 }
6521 }
6522 }
6523
6524 /* Reverse call_arguments chain. */
6525 prev = NULL_RTX;
6526 for (cur = call_arguments; cur; cur = next)
6527 {
6528 next = XEXP (cur, 1);
6529 XEXP (cur, 1) = prev;
6530 prev = cur;
6531 }
6532 call_arguments = prev;
6533
6534 x = get_call_rtx_from (insn);
6535 if (x)
6536 {
6537 x = XEXP (XEXP (x, 0), 0);
6538 if (GET_CODE (x) == SYMBOL_REF)
6539 /* Don't record anything. */;
6540 else if (CONSTANT_P (x))
6541 {
6542 x = gen_rtx_CONCAT (GET_MODE (x) == VOIDmode ? Pmode : GET_MODE (x),
6543 pc_rtx, x);
6544 call_arguments
6545 = gen_rtx_EXPR_LIST (VOIDmode, x, call_arguments);
6546 }
6547 else
6548 {
6549 cselib_val *val = cselib_lookup (x, GET_MODE (x), 0, VOIDmode);
6550 if (val && cselib_preserved_value_p (val))
6551 {
6552 x = gen_rtx_CONCAT (GET_MODE (x), pc_rtx, val->val_rtx);
6553 call_arguments
6554 = gen_rtx_EXPR_LIST (VOIDmode, x, call_arguments);
6555 }
6556 }
6557 }
6558 if (this_arg)
6559 {
6560 machine_mode mode
6561 = TYPE_MODE (TREE_TYPE (OBJ_TYPE_REF_EXPR (obj_type_ref)));
6562 rtx clobbered = gen_rtx_MEM (mode, this_arg);
6563 HOST_WIDE_INT token
6564 = tree_to_shwi (OBJ_TYPE_REF_TOKEN (obj_type_ref));
6565 if (token)
6566 clobbered = plus_constant (mode, clobbered,
6567 token * GET_MODE_SIZE (mode));
6568 clobbered = gen_rtx_MEM (mode, clobbered);
6569 x = gen_rtx_CONCAT (mode, gen_rtx_CLOBBER (VOIDmode, pc_rtx), clobbered);
6570 call_arguments
6571 = gen_rtx_EXPR_LIST (VOIDmode, x, call_arguments);
6572 }
6573 }
6574
6575 /* Callback for cselib_record_sets_hook, that records as micro
6576 operations uses and stores in an insn after cselib_record_sets has
6577 analyzed the sets in an insn, but before it modifies the stored
6578 values in the internal tables, unless cselib_record_sets doesn't
6579 call it directly (perhaps because we're not doing cselib in the
6580 first place, in which case sets and n_sets will be 0). */
6581
6582 static void
add_with_sets(rtx_insn * insn,struct cselib_set * sets,int n_sets)6583 add_with_sets (rtx_insn *insn, struct cselib_set *sets, int n_sets)
6584 {
6585 basic_block bb = BLOCK_FOR_INSN (insn);
6586 int n1, n2;
6587 struct count_use_info cui;
6588 micro_operation *mos;
6589
6590 cselib_hook_called = true;
6591
6592 cui.insn = insn;
6593 cui.bb = bb;
6594 cui.sets = sets;
6595 cui.n_sets = n_sets;
6596
6597 n1 = VTI (bb)->mos.length ();
6598 cui.store_p = false;
6599 note_uses (&PATTERN (insn), add_uses_1, &cui);
6600 n2 = VTI (bb)->mos.length () - 1;
6601 mos = VTI (bb)->mos.address ();
6602
6603 /* Order the MO_USEs to be before MO_USE_NO_VARs and MO_VAL_USE, and
6604 MO_VAL_LOC last. */
6605 while (n1 < n2)
6606 {
6607 while (n1 < n2 && mos[n1].type == MO_USE)
6608 n1++;
6609 while (n1 < n2 && mos[n2].type != MO_USE)
6610 n2--;
6611 if (n1 < n2)
6612 std::swap (mos[n1], mos[n2]);
6613 }
6614
6615 n2 = VTI (bb)->mos.length () - 1;
6616 while (n1 < n2)
6617 {
6618 while (n1 < n2 && mos[n1].type != MO_VAL_LOC)
6619 n1++;
6620 while (n1 < n2 && mos[n2].type == MO_VAL_LOC)
6621 n2--;
6622 if (n1 < n2)
6623 std::swap (mos[n1], mos[n2]);
6624 }
6625
6626 if (CALL_P (insn))
6627 {
6628 micro_operation mo;
6629
6630 mo.type = MO_CALL;
6631 mo.insn = insn;
6632 mo.u.loc = call_arguments;
6633 call_arguments = NULL_RTX;
6634
6635 if (dump_file && (dump_flags & TDF_DETAILS))
6636 log_op_type (PATTERN (insn), bb, insn, mo.type, dump_file);
6637 VTI (bb)->mos.safe_push (mo);
6638 }
6639
6640 n1 = VTI (bb)->mos.length ();
6641 /* This will record NEXT_INSN (insn), such that we can
6642 insert notes before it without worrying about any
6643 notes that MO_USEs might emit after the insn. */
6644 cui.store_p = true;
6645 note_stores (PATTERN (insn), add_stores, &cui);
6646 n2 = VTI (bb)->mos.length () - 1;
6647 mos = VTI (bb)->mos.address ();
6648
6649 /* Order the MO_VAL_USEs first (note_stores does nothing
6650 on DEBUG_INSNs, so there are no MO_VAL_LOCs from this
6651 insn), then MO_CLOBBERs, then MO_SET/MO_COPY/MO_VAL_SET. */
6652 while (n1 < n2)
6653 {
6654 while (n1 < n2 && mos[n1].type == MO_VAL_USE)
6655 n1++;
6656 while (n1 < n2 && mos[n2].type != MO_VAL_USE)
6657 n2--;
6658 if (n1 < n2)
6659 std::swap (mos[n1], mos[n2]);
6660 }
6661
6662 n2 = VTI (bb)->mos.length () - 1;
6663 while (n1 < n2)
6664 {
6665 while (n1 < n2 && mos[n1].type == MO_CLOBBER)
6666 n1++;
6667 while (n1 < n2 && mos[n2].type != MO_CLOBBER)
6668 n2--;
6669 if (n1 < n2)
6670 std::swap (mos[n1], mos[n2]);
6671 }
6672 }
6673
6674 static enum var_init_status
find_src_status(dataflow_set * in,rtx src)6675 find_src_status (dataflow_set *in, rtx src)
6676 {
6677 tree decl = NULL_TREE;
6678 enum var_init_status status = VAR_INIT_STATUS_UNINITIALIZED;
6679
6680 if (! flag_var_tracking_uninit)
6681 status = VAR_INIT_STATUS_INITIALIZED;
6682
6683 if (src && REG_P (src))
6684 decl = var_debug_decl (REG_EXPR (src));
6685 else if (src && MEM_P (src))
6686 decl = var_debug_decl (MEM_EXPR (src));
6687
6688 if (src && decl)
6689 status = get_init_value (in, src, dv_from_decl (decl));
6690
6691 return status;
6692 }
6693
6694 /* SRC is the source of an assignment. Use SET to try to find what
6695 was ultimately assigned to SRC. Return that value if known,
6696 otherwise return SRC itself. */
6697
6698 static rtx
find_src_set_src(dataflow_set * set,rtx src)6699 find_src_set_src (dataflow_set *set, rtx src)
6700 {
6701 tree decl = NULL_TREE; /* The variable being copied around. */
6702 rtx set_src = NULL_RTX; /* The value for "decl" stored in "src". */
6703 variable *var;
6704 location_chain *nextp;
6705 int i;
6706 bool found;
6707
6708 if (src && REG_P (src))
6709 decl = var_debug_decl (REG_EXPR (src));
6710 else if (src && MEM_P (src))
6711 decl = var_debug_decl (MEM_EXPR (src));
6712
6713 if (src && decl)
6714 {
6715 decl_or_value dv = dv_from_decl (decl);
6716
6717 var = shared_hash_find (set->vars, dv);
6718 if (var)
6719 {
6720 found = false;
6721 for (i = 0; i < var->n_var_parts && !found; i++)
6722 for (nextp = var->var_part[i].loc_chain; nextp && !found;
6723 nextp = nextp->next)
6724 if (rtx_equal_p (nextp->loc, src))
6725 {
6726 set_src = nextp->set_src;
6727 found = true;
6728 }
6729
6730 }
6731 }
6732
6733 return set_src;
6734 }
6735
6736 /* Compute the changes of variable locations in the basic block BB. */
6737
6738 static bool
compute_bb_dataflow(basic_block bb)6739 compute_bb_dataflow (basic_block bb)
6740 {
6741 unsigned int i;
6742 micro_operation *mo;
6743 bool changed;
6744 dataflow_set old_out;
6745 dataflow_set *in = &VTI (bb)->in;
6746 dataflow_set *out = &VTI (bb)->out;
6747
6748 dataflow_set_init (&old_out);
6749 dataflow_set_copy (&old_out, out);
6750 dataflow_set_copy (out, in);
6751
6752 if (MAY_HAVE_DEBUG_BIND_INSNS)
6753 local_get_addr_cache = new hash_map<rtx, rtx>;
6754
6755 FOR_EACH_VEC_ELT (VTI (bb)->mos, i, mo)
6756 {
6757 rtx_insn *insn = mo->insn;
6758
6759 switch (mo->type)
6760 {
6761 case MO_CALL:
6762 dataflow_set_clear_at_call (out, insn);
6763 break;
6764
6765 case MO_USE:
6766 {
6767 rtx loc = mo->u.loc;
6768
6769 if (REG_P (loc))
6770 var_reg_set (out, loc, VAR_INIT_STATUS_UNINITIALIZED, NULL);
6771 else if (MEM_P (loc))
6772 var_mem_set (out, loc, VAR_INIT_STATUS_UNINITIALIZED, NULL);
6773 }
6774 break;
6775
6776 case MO_VAL_LOC:
6777 {
6778 rtx loc = mo->u.loc;
6779 rtx val, vloc;
6780 tree var;
6781
6782 if (GET_CODE (loc) == CONCAT)
6783 {
6784 val = XEXP (loc, 0);
6785 vloc = XEXP (loc, 1);
6786 }
6787 else
6788 {
6789 val = NULL_RTX;
6790 vloc = loc;
6791 }
6792
6793 var = PAT_VAR_LOCATION_DECL (vloc);
6794
6795 clobber_variable_part (out, NULL_RTX,
6796 dv_from_decl (var), 0, NULL_RTX);
6797 if (val)
6798 {
6799 if (VAL_NEEDS_RESOLUTION (loc))
6800 val_resolve (out, val, PAT_VAR_LOCATION_LOC (vloc), insn);
6801 set_variable_part (out, val, dv_from_decl (var), 0,
6802 VAR_INIT_STATUS_INITIALIZED, NULL_RTX,
6803 INSERT);
6804 }
6805 else if (!VAR_LOC_UNKNOWN_P (PAT_VAR_LOCATION_LOC (vloc)))
6806 set_variable_part (out, PAT_VAR_LOCATION_LOC (vloc),
6807 dv_from_decl (var), 0,
6808 VAR_INIT_STATUS_INITIALIZED, NULL_RTX,
6809 INSERT);
6810 }
6811 break;
6812
6813 case MO_VAL_USE:
6814 {
6815 rtx loc = mo->u.loc;
6816 rtx val, vloc, uloc;
6817
6818 vloc = uloc = XEXP (loc, 1);
6819 val = XEXP (loc, 0);
6820
6821 if (GET_CODE (val) == CONCAT)
6822 {
6823 uloc = XEXP (val, 1);
6824 val = XEXP (val, 0);
6825 }
6826
6827 if (VAL_NEEDS_RESOLUTION (loc))
6828 val_resolve (out, val, vloc, insn);
6829 else
6830 val_store (out, val, uloc, insn, false);
6831
6832 if (VAL_HOLDS_TRACK_EXPR (loc))
6833 {
6834 if (GET_CODE (uloc) == REG)
6835 var_reg_set (out, uloc, VAR_INIT_STATUS_UNINITIALIZED,
6836 NULL);
6837 else if (GET_CODE (uloc) == MEM)
6838 var_mem_set (out, uloc, VAR_INIT_STATUS_UNINITIALIZED,
6839 NULL);
6840 }
6841 }
6842 break;
6843
6844 case MO_VAL_SET:
6845 {
6846 rtx loc = mo->u.loc;
6847 rtx val, vloc, uloc;
6848 rtx dstv, srcv;
6849
6850 vloc = loc;
6851 uloc = XEXP (vloc, 1);
6852 val = XEXP (vloc, 0);
6853 vloc = uloc;
6854
6855 if (GET_CODE (uloc) == SET)
6856 {
6857 dstv = SET_DEST (uloc);
6858 srcv = SET_SRC (uloc);
6859 }
6860 else
6861 {
6862 dstv = uloc;
6863 srcv = NULL;
6864 }
6865
6866 if (GET_CODE (val) == CONCAT)
6867 {
6868 dstv = vloc = XEXP (val, 1);
6869 val = XEXP (val, 0);
6870 }
6871
6872 if (GET_CODE (vloc) == SET)
6873 {
6874 srcv = SET_SRC (vloc);
6875
6876 gcc_assert (val != srcv);
6877 gcc_assert (vloc == uloc || VAL_NEEDS_RESOLUTION (loc));
6878
6879 dstv = vloc = SET_DEST (vloc);
6880
6881 if (VAL_NEEDS_RESOLUTION (loc))
6882 val_resolve (out, val, srcv, insn);
6883 }
6884 else if (VAL_NEEDS_RESOLUTION (loc))
6885 {
6886 gcc_assert (GET_CODE (uloc) == SET
6887 && GET_CODE (SET_SRC (uloc)) == REG);
6888 val_resolve (out, val, SET_SRC (uloc), insn);
6889 }
6890
6891 if (VAL_HOLDS_TRACK_EXPR (loc))
6892 {
6893 if (VAL_EXPR_IS_CLOBBERED (loc))
6894 {
6895 if (REG_P (uloc))
6896 var_reg_delete (out, uloc, true);
6897 else if (MEM_P (uloc))
6898 {
6899 gcc_assert (MEM_P (dstv));
6900 gcc_assert (MEM_ATTRS (dstv) == MEM_ATTRS (uloc));
6901 var_mem_delete (out, dstv, true);
6902 }
6903 }
6904 else
6905 {
6906 bool copied_p = VAL_EXPR_IS_COPIED (loc);
6907 rtx src = NULL, dst = uloc;
6908 enum var_init_status status = VAR_INIT_STATUS_INITIALIZED;
6909
6910 if (GET_CODE (uloc) == SET)
6911 {
6912 src = SET_SRC (uloc);
6913 dst = SET_DEST (uloc);
6914 }
6915
6916 if (copied_p)
6917 {
6918 if (flag_var_tracking_uninit)
6919 {
6920 status = find_src_status (in, src);
6921
6922 if (status == VAR_INIT_STATUS_UNKNOWN)
6923 status = find_src_status (out, src);
6924 }
6925
6926 src = find_src_set_src (in, src);
6927 }
6928
6929 if (REG_P (dst))
6930 var_reg_delete_and_set (out, dst, !copied_p,
6931 status, srcv);
6932 else if (MEM_P (dst))
6933 {
6934 gcc_assert (MEM_P (dstv));
6935 gcc_assert (MEM_ATTRS (dstv) == MEM_ATTRS (dst));
6936 var_mem_delete_and_set (out, dstv, !copied_p,
6937 status, srcv);
6938 }
6939 }
6940 }
6941 else if (REG_P (uloc))
6942 var_regno_delete (out, REGNO (uloc));
6943 else if (MEM_P (uloc))
6944 {
6945 gcc_checking_assert (GET_CODE (vloc) == MEM);
6946 gcc_checking_assert (dstv == vloc);
6947 if (dstv != vloc)
6948 clobber_overlapping_mems (out, vloc);
6949 }
6950
6951 val_store (out, val, dstv, insn, true);
6952 }
6953 break;
6954
6955 case MO_SET:
6956 {
6957 rtx loc = mo->u.loc;
6958 rtx set_src = NULL;
6959
6960 if (GET_CODE (loc) == SET)
6961 {
6962 set_src = SET_SRC (loc);
6963 loc = SET_DEST (loc);
6964 }
6965
6966 if (REG_P (loc))
6967 var_reg_delete_and_set (out, loc, true, VAR_INIT_STATUS_INITIALIZED,
6968 set_src);
6969 else if (MEM_P (loc))
6970 var_mem_delete_and_set (out, loc, true, VAR_INIT_STATUS_INITIALIZED,
6971 set_src);
6972 }
6973 break;
6974
6975 case MO_COPY:
6976 {
6977 rtx loc = mo->u.loc;
6978 enum var_init_status src_status;
6979 rtx set_src = NULL;
6980
6981 if (GET_CODE (loc) == SET)
6982 {
6983 set_src = SET_SRC (loc);
6984 loc = SET_DEST (loc);
6985 }
6986
6987 if (! flag_var_tracking_uninit)
6988 src_status = VAR_INIT_STATUS_INITIALIZED;
6989 else
6990 {
6991 src_status = find_src_status (in, set_src);
6992
6993 if (src_status == VAR_INIT_STATUS_UNKNOWN)
6994 src_status = find_src_status (out, set_src);
6995 }
6996
6997 set_src = find_src_set_src (in, set_src);
6998
6999 if (REG_P (loc))
7000 var_reg_delete_and_set (out, loc, false, src_status, set_src);
7001 else if (MEM_P (loc))
7002 var_mem_delete_and_set (out, loc, false, src_status, set_src);
7003 }
7004 break;
7005
7006 case MO_USE_NO_VAR:
7007 {
7008 rtx loc = mo->u.loc;
7009
7010 if (REG_P (loc))
7011 var_reg_delete (out, loc, false);
7012 else if (MEM_P (loc))
7013 var_mem_delete (out, loc, false);
7014 }
7015 break;
7016
7017 case MO_CLOBBER:
7018 {
7019 rtx loc = mo->u.loc;
7020
7021 if (REG_P (loc))
7022 var_reg_delete (out, loc, true);
7023 else if (MEM_P (loc))
7024 var_mem_delete (out, loc, true);
7025 }
7026 break;
7027
7028 case MO_ADJUST:
7029 out->stack_adjust += mo->u.adjust;
7030 break;
7031 }
7032 }
7033
7034 if (MAY_HAVE_DEBUG_BIND_INSNS)
7035 {
7036 delete local_get_addr_cache;
7037 local_get_addr_cache = NULL;
7038
7039 dataflow_set_equiv_regs (out);
7040 shared_hash_htab (out->vars)
7041 ->traverse <dataflow_set *, canonicalize_values_mark> (out);
7042 shared_hash_htab (out->vars)
7043 ->traverse <dataflow_set *, canonicalize_values_star> (out);
7044 if (flag_checking)
7045 shared_hash_htab (out->vars)
7046 ->traverse <dataflow_set *, canonicalize_loc_order_check> (out);
7047 }
7048 changed = dataflow_set_different (&old_out, out);
7049 dataflow_set_destroy (&old_out);
7050 return changed;
7051 }
7052
7053 /* Find the locations of variables in the whole function. */
7054
7055 static bool
vt_find_locations(void)7056 vt_find_locations (void)
7057 {
7058 bb_heap_t *worklist = new bb_heap_t (LONG_MIN);
7059 bb_heap_t *pending = new bb_heap_t (LONG_MIN);
7060 sbitmap in_worklist, in_pending;
7061 basic_block bb;
7062 edge e;
7063 int *bb_order;
7064 int *rc_order;
7065 int i;
7066 int htabsz = 0;
7067 int htabmax = PARAM_VALUE (PARAM_MAX_VARTRACK_SIZE);
7068 bool success = true;
7069
7070 timevar_push (TV_VAR_TRACKING_DATAFLOW);
7071 /* Compute reverse completion order of depth first search of the CFG
7072 so that the data-flow runs faster. */
7073 rc_order = XNEWVEC (int, n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS);
7074 bb_order = XNEWVEC (int, last_basic_block_for_fn (cfun));
7075 pre_and_rev_post_order_compute (NULL, rc_order, false);
7076 for (i = 0; i < n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS; i++)
7077 bb_order[rc_order[i]] = i;
7078 free (rc_order);
7079
7080 auto_sbitmap visited (last_basic_block_for_fn (cfun));
7081 in_worklist = sbitmap_alloc (last_basic_block_for_fn (cfun));
7082 in_pending = sbitmap_alloc (last_basic_block_for_fn (cfun));
7083 bitmap_clear (in_worklist);
7084
7085 FOR_EACH_BB_FN (bb, cfun)
7086 pending->insert (bb_order[bb->index], bb);
7087 bitmap_ones (in_pending);
7088
7089 while (success && !pending->empty ())
7090 {
7091 std::swap (worklist, pending);
7092 std::swap (in_worklist, in_pending);
7093
7094 bitmap_clear (visited);
7095
7096 while (!worklist->empty ())
7097 {
7098 bb = worklist->extract_min ();
7099 bitmap_clear_bit (in_worklist, bb->index);
7100 gcc_assert (!bitmap_bit_p (visited, bb->index));
7101 if (!bitmap_bit_p (visited, bb->index))
7102 {
7103 bool changed;
7104 edge_iterator ei;
7105 int oldinsz, oldoutsz;
7106
7107 bitmap_set_bit (visited, bb->index);
7108
7109 if (VTI (bb)->in.vars)
7110 {
7111 htabsz
7112 -= shared_hash_htab (VTI (bb)->in.vars)->size ()
7113 + shared_hash_htab (VTI (bb)->out.vars)->size ();
7114 oldinsz = shared_hash_htab (VTI (bb)->in.vars)->elements ();
7115 oldoutsz
7116 = shared_hash_htab (VTI (bb)->out.vars)->elements ();
7117 }
7118 else
7119 oldinsz = oldoutsz = 0;
7120
7121 if (MAY_HAVE_DEBUG_BIND_INSNS)
7122 {
7123 dataflow_set *in = &VTI (bb)->in, *first_out = NULL;
7124 bool first = true, adjust = false;
7125
7126 /* Calculate the IN set as the intersection of
7127 predecessor OUT sets. */
7128
7129 dataflow_set_clear (in);
7130 dst_can_be_shared = true;
7131
7132 FOR_EACH_EDGE (e, ei, bb->preds)
7133 if (!VTI (e->src)->flooded)
7134 gcc_assert (bb_order[bb->index]
7135 <= bb_order[e->src->index]);
7136 else if (first)
7137 {
7138 dataflow_set_copy (in, &VTI (e->src)->out);
7139 first_out = &VTI (e->src)->out;
7140 first = false;
7141 }
7142 else
7143 {
7144 dataflow_set_merge (in, &VTI (e->src)->out);
7145 adjust = true;
7146 }
7147
7148 if (adjust)
7149 {
7150 dataflow_post_merge_adjust (in, &VTI (bb)->permp);
7151
7152 if (flag_checking)
7153 /* Merge and merge_adjust should keep entries in
7154 canonical order. */
7155 shared_hash_htab (in->vars)
7156 ->traverse <dataflow_set *,
7157 canonicalize_loc_order_check> (in);
7158
7159 if (dst_can_be_shared)
7160 {
7161 shared_hash_destroy (in->vars);
7162 in->vars = shared_hash_copy (first_out->vars);
7163 }
7164 }
7165
7166 VTI (bb)->flooded = true;
7167 }
7168 else
7169 {
7170 /* Calculate the IN set as union of predecessor OUT sets. */
7171 dataflow_set_clear (&VTI (bb)->in);
7172 FOR_EACH_EDGE (e, ei, bb->preds)
7173 dataflow_set_union (&VTI (bb)->in, &VTI (e->src)->out);
7174 }
7175
7176 changed = compute_bb_dataflow (bb);
7177 htabsz += shared_hash_htab (VTI (bb)->in.vars)->size ()
7178 + shared_hash_htab (VTI (bb)->out.vars)->size ();
7179
7180 if (htabmax && htabsz > htabmax)
7181 {
7182 if (MAY_HAVE_DEBUG_BIND_INSNS)
7183 inform (DECL_SOURCE_LOCATION (cfun->decl),
7184 "variable tracking size limit exceeded with "
7185 "-fvar-tracking-assignments, retrying without");
7186 else
7187 inform (DECL_SOURCE_LOCATION (cfun->decl),
7188 "variable tracking size limit exceeded");
7189 success = false;
7190 break;
7191 }
7192
7193 if (changed)
7194 {
7195 FOR_EACH_EDGE (e, ei, bb->succs)
7196 {
7197 if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
7198 continue;
7199
7200 if (bitmap_bit_p (visited, e->dest->index))
7201 {
7202 if (!bitmap_bit_p (in_pending, e->dest->index))
7203 {
7204 /* Send E->DEST to next round. */
7205 bitmap_set_bit (in_pending, e->dest->index);
7206 pending->insert (bb_order[e->dest->index],
7207 e->dest);
7208 }
7209 }
7210 else if (!bitmap_bit_p (in_worklist, e->dest->index))
7211 {
7212 /* Add E->DEST to current round. */
7213 bitmap_set_bit (in_worklist, e->dest->index);
7214 worklist->insert (bb_order[e->dest->index],
7215 e->dest);
7216 }
7217 }
7218 }
7219
7220 if (dump_file)
7221 fprintf (dump_file,
7222 "BB %i: in %i (was %i), out %i (was %i), rem %i + %i, tsz %i\n",
7223 bb->index,
7224 (int)shared_hash_htab (VTI (bb)->in.vars)->size (),
7225 oldinsz,
7226 (int)shared_hash_htab (VTI (bb)->out.vars)->size (),
7227 oldoutsz,
7228 (int)worklist->nodes (), (int)pending->nodes (),
7229 htabsz);
7230
7231 if (dump_file && (dump_flags & TDF_DETAILS))
7232 {
7233 fprintf (dump_file, "BB %i IN:\n", bb->index);
7234 dump_dataflow_set (&VTI (bb)->in);
7235 fprintf (dump_file, "BB %i OUT:\n", bb->index);
7236 dump_dataflow_set (&VTI (bb)->out);
7237 }
7238 }
7239 }
7240 }
7241
7242 if (success && MAY_HAVE_DEBUG_BIND_INSNS)
7243 FOR_EACH_BB_FN (bb, cfun)
7244 gcc_assert (VTI (bb)->flooded);
7245
7246 free (bb_order);
7247 delete worklist;
7248 delete pending;
7249 sbitmap_free (in_worklist);
7250 sbitmap_free (in_pending);
7251
7252 timevar_pop (TV_VAR_TRACKING_DATAFLOW);
7253 return success;
7254 }
7255
7256 /* Print the content of the LIST to dump file. */
7257
7258 static void
dump_attrs_list(attrs * list)7259 dump_attrs_list (attrs *list)
7260 {
7261 for (; list; list = list->next)
7262 {
7263 if (dv_is_decl_p (list->dv))
7264 print_mem_expr (dump_file, dv_as_decl (list->dv));
7265 else
7266 print_rtl_single (dump_file, dv_as_value (list->dv));
7267 fprintf (dump_file, "+" HOST_WIDE_INT_PRINT_DEC, list->offset);
7268 }
7269 fprintf (dump_file, "\n");
7270 }
7271
7272 /* Print the information about variable *SLOT to dump file. */
7273
7274 int
dump_var_tracking_slot(variable ** slot,void * data ATTRIBUTE_UNUSED)7275 dump_var_tracking_slot (variable **slot, void *data ATTRIBUTE_UNUSED)
7276 {
7277 variable *var = *slot;
7278
7279 dump_var (var);
7280
7281 /* Continue traversing the hash table. */
7282 return 1;
7283 }
7284
7285 /* Print the information about variable VAR to dump file. */
7286
7287 static void
dump_var(variable * var)7288 dump_var (variable *var)
7289 {
7290 int i;
7291 location_chain *node;
7292
7293 if (dv_is_decl_p (var->dv))
7294 {
7295 const_tree decl = dv_as_decl (var->dv);
7296
7297 if (DECL_NAME (decl))
7298 {
7299 fprintf (dump_file, " name: %s",
7300 IDENTIFIER_POINTER (DECL_NAME (decl)));
7301 if (dump_flags & TDF_UID)
7302 fprintf (dump_file, "D.%u", DECL_UID (decl));
7303 }
7304 else if (TREE_CODE (decl) == DEBUG_EXPR_DECL)
7305 fprintf (dump_file, " name: D#%u", DEBUG_TEMP_UID (decl));
7306 else
7307 fprintf (dump_file, " name: D.%u", DECL_UID (decl));
7308 fprintf (dump_file, "\n");
7309 }
7310 else
7311 {
7312 fputc (' ', dump_file);
7313 print_rtl_single (dump_file, dv_as_value (var->dv));
7314 }
7315
7316 for (i = 0; i < var->n_var_parts; i++)
7317 {
7318 fprintf (dump_file, " offset %ld\n",
7319 (long)(var->onepart ? 0 : VAR_PART_OFFSET (var, i)));
7320 for (node = var->var_part[i].loc_chain; node; node = node->next)
7321 {
7322 fprintf (dump_file, " ");
7323 if (node->init == VAR_INIT_STATUS_UNINITIALIZED)
7324 fprintf (dump_file, "[uninit]");
7325 print_rtl_single (dump_file, node->loc);
7326 }
7327 }
7328 }
7329
7330 /* Print the information about variables from hash table VARS to dump file. */
7331
7332 static void
dump_vars(variable_table_type * vars)7333 dump_vars (variable_table_type *vars)
7334 {
7335 if (vars->elements () > 0)
7336 {
7337 fprintf (dump_file, "Variables:\n");
7338 vars->traverse <void *, dump_var_tracking_slot> (NULL);
7339 }
7340 }
7341
7342 /* Print the dataflow set SET to dump file. */
7343
7344 static void
dump_dataflow_set(dataflow_set * set)7345 dump_dataflow_set (dataflow_set *set)
7346 {
7347 int i;
7348
7349 fprintf (dump_file, "Stack adjustment: " HOST_WIDE_INT_PRINT_DEC "\n",
7350 set->stack_adjust);
7351 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
7352 {
7353 if (set->regs[i])
7354 {
7355 fprintf (dump_file, "Reg %d:", i);
7356 dump_attrs_list (set->regs[i]);
7357 }
7358 }
7359 dump_vars (shared_hash_htab (set->vars));
7360 fprintf (dump_file, "\n");
7361 }
7362
7363 /* Print the IN and OUT sets for each basic block to dump file. */
7364
7365 static void
dump_dataflow_sets(void)7366 dump_dataflow_sets (void)
7367 {
7368 basic_block bb;
7369
7370 FOR_EACH_BB_FN (bb, cfun)
7371 {
7372 fprintf (dump_file, "\nBasic block %d:\n", bb->index);
7373 fprintf (dump_file, "IN:\n");
7374 dump_dataflow_set (&VTI (bb)->in);
7375 fprintf (dump_file, "OUT:\n");
7376 dump_dataflow_set (&VTI (bb)->out);
7377 }
7378 }
7379
7380 /* Return the variable for DV in dropped_values, inserting one if
7381 requested with INSERT. */
7382
7383 static inline variable *
variable_from_dropped(decl_or_value dv,enum insert_option insert)7384 variable_from_dropped (decl_or_value dv, enum insert_option insert)
7385 {
7386 variable **slot;
7387 variable *empty_var;
7388 onepart_enum onepart;
7389
7390 slot = dropped_values->find_slot_with_hash (dv, dv_htab_hash (dv), insert);
7391
7392 if (!slot)
7393 return NULL;
7394
7395 if (*slot)
7396 return *slot;
7397
7398 gcc_checking_assert (insert == INSERT);
7399
7400 onepart = dv_onepart_p (dv);
7401
7402 gcc_checking_assert (onepart == ONEPART_VALUE || onepart == ONEPART_DEXPR);
7403
7404 empty_var = onepart_pool_allocate (onepart);
7405 empty_var->dv = dv;
7406 empty_var->refcount = 1;
7407 empty_var->n_var_parts = 0;
7408 empty_var->onepart = onepart;
7409 empty_var->in_changed_variables = false;
7410 empty_var->var_part[0].loc_chain = NULL;
7411 empty_var->var_part[0].cur_loc = NULL;
7412 VAR_LOC_1PAUX (empty_var) = NULL;
7413 set_dv_changed (dv, true);
7414
7415 *slot = empty_var;
7416
7417 return empty_var;
7418 }
7419
7420 /* Recover the one-part aux from dropped_values. */
7421
7422 static struct onepart_aux *
recover_dropped_1paux(variable * var)7423 recover_dropped_1paux (variable *var)
7424 {
7425 variable *dvar;
7426
7427 gcc_checking_assert (var->onepart);
7428
7429 if (VAR_LOC_1PAUX (var))
7430 return VAR_LOC_1PAUX (var);
7431
7432 if (var->onepart == ONEPART_VDECL)
7433 return NULL;
7434
7435 dvar = variable_from_dropped (var->dv, NO_INSERT);
7436
7437 if (!dvar)
7438 return NULL;
7439
7440 VAR_LOC_1PAUX (var) = VAR_LOC_1PAUX (dvar);
7441 VAR_LOC_1PAUX (dvar) = NULL;
7442
7443 return VAR_LOC_1PAUX (var);
7444 }
7445
7446 /* Add variable VAR to the hash table of changed variables and
7447 if it has no locations delete it from SET's hash table. */
7448
7449 static void
variable_was_changed(variable * var,dataflow_set * set)7450 variable_was_changed (variable *var, dataflow_set *set)
7451 {
7452 hashval_t hash = dv_htab_hash (var->dv);
7453
7454 if (emit_notes)
7455 {
7456 variable **slot;
7457
7458 /* Remember this decl or VALUE has been added to changed_variables. */
7459 set_dv_changed (var->dv, true);
7460
7461 slot = changed_variables->find_slot_with_hash (var->dv, hash, INSERT);
7462
7463 if (*slot)
7464 {
7465 variable *old_var = *slot;
7466 gcc_assert (old_var->in_changed_variables);
7467 old_var->in_changed_variables = false;
7468 if (var != old_var && var->onepart)
7469 {
7470 /* Restore the auxiliary info from an empty variable
7471 previously created for changed_variables, so it is
7472 not lost. */
7473 gcc_checking_assert (!VAR_LOC_1PAUX (var));
7474 VAR_LOC_1PAUX (var) = VAR_LOC_1PAUX (old_var);
7475 VAR_LOC_1PAUX (old_var) = NULL;
7476 }
7477 variable_htab_free (*slot);
7478 }
7479
7480 if (set && var->n_var_parts == 0)
7481 {
7482 onepart_enum onepart = var->onepart;
7483 variable *empty_var = NULL;
7484 variable **dslot = NULL;
7485
7486 if (onepart == ONEPART_VALUE || onepart == ONEPART_DEXPR)
7487 {
7488 dslot = dropped_values->find_slot_with_hash (var->dv,
7489 dv_htab_hash (var->dv),
7490 INSERT);
7491 empty_var = *dslot;
7492
7493 if (empty_var)
7494 {
7495 gcc_checking_assert (!empty_var->in_changed_variables);
7496 if (!VAR_LOC_1PAUX (var))
7497 {
7498 VAR_LOC_1PAUX (var) = VAR_LOC_1PAUX (empty_var);
7499 VAR_LOC_1PAUX (empty_var) = NULL;
7500 }
7501 else
7502 gcc_checking_assert (!VAR_LOC_1PAUX (empty_var));
7503 }
7504 }
7505
7506 if (!empty_var)
7507 {
7508 empty_var = onepart_pool_allocate (onepart);
7509 empty_var->dv = var->dv;
7510 empty_var->refcount = 1;
7511 empty_var->n_var_parts = 0;
7512 empty_var->onepart = onepart;
7513 if (dslot)
7514 {
7515 empty_var->refcount++;
7516 *dslot = empty_var;
7517 }
7518 }
7519 else
7520 empty_var->refcount++;
7521 empty_var->in_changed_variables = true;
7522 *slot = empty_var;
7523 if (onepart)
7524 {
7525 empty_var->var_part[0].loc_chain = NULL;
7526 empty_var->var_part[0].cur_loc = NULL;
7527 VAR_LOC_1PAUX (empty_var) = VAR_LOC_1PAUX (var);
7528 VAR_LOC_1PAUX (var) = NULL;
7529 }
7530 goto drop_var;
7531 }
7532 else
7533 {
7534 if (var->onepart && !VAR_LOC_1PAUX (var))
7535 recover_dropped_1paux (var);
7536 var->refcount++;
7537 var->in_changed_variables = true;
7538 *slot = var;
7539 }
7540 }
7541 else
7542 {
7543 gcc_assert (set);
7544 if (var->n_var_parts == 0)
7545 {
7546 variable **slot;
7547
7548 drop_var:
7549 slot = shared_hash_find_slot_noinsert (set->vars, var->dv);
7550 if (slot)
7551 {
7552 if (shared_hash_shared (set->vars))
7553 slot = shared_hash_find_slot_unshare (&set->vars, var->dv,
7554 NO_INSERT);
7555 shared_hash_htab (set->vars)->clear_slot (slot);
7556 }
7557 }
7558 }
7559 }
7560
7561 /* Look for the index in VAR->var_part corresponding to OFFSET.
7562 Return -1 if not found. If INSERTION_POINT is non-NULL, the
7563 referenced int will be set to the index that the part has or should
7564 have, if it should be inserted. */
7565
7566 static inline int
find_variable_location_part(variable * var,HOST_WIDE_INT offset,int * insertion_point)7567 find_variable_location_part (variable *var, HOST_WIDE_INT offset,
7568 int *insertion_point)
7569 {
7570 int pos, low, high;
7571
7572 if (var->onepart)
7573 {
7574 if (offset != 0)
7575 return -1;
7576
7577 if (insertion_point)
7578 *insertion_point = 0;
7579
7580 return var->n_var_parts - 1;
7581 }
7582
7583 /* Find the location part. */
7584 low = 0;
7585 high = var->n_var_parts;
7586 while (low != high)
7587 {
7588 pos = (low + high) / 2;
7589 if (VAR_PART_OFFSET (var, pos) < offset)
7590 low = pos + 1;
7591 else
7592 high = pos;
7593 }
7594 pos = low;
7595
7596 if (insertion_point)
7597 *insertion_point = pos;
7598
7599 if (pos < var->n_var_parts && VAR_PART_OFFSET (var, pos) == offset)
7600 return pos;
7601
7602 return -1;
7603 }
7604
7605 static variable **
set_slot_part(dataflow_set * set,rtx loc,variable ** slot,decl_or_value dv,HOST_WIDE_INT offset,enum var_init_status initialized,rtx set_src)7606 set_slot_part (dataflow_set *set, rtx loc, variable **slot,
7607 decl_or_value dv, HOST_WIDE_INT offset,
7608 enum var_init_status initialized, rtx set_src)
7609 {
7610 int pos;
7611 location_chain *node, *next;
7612 location_chain **nextp;
7613 variable *var;
7614 onepart_enum onepart;
7615
7616 var = *slot;
7617
7618 if (var)
7619 onepart = var->onepart;
7620 else
7621 onepart = dv_onepart_p (dv);
7622
7623 gcc_checking_assert (offset == 0 || !onepart);
7624 gcc_checking_assert (loc != dv_as_opaque (dv));
7625
7626 if (! flag_var_tracking_uninit)
7627 initialized = VAR_INIT_STATUS_INITIALIZED;
7628
7629 if (!var)
7630 {
7631 /* Create new variable information. */
7632 var = onepart_pool_allocate (onepart);
7633 var->dv = dv;
7634 var->refcount = 1;
7635 var->n_var_parts = 1;
7636 var->onepart = onepart;
7637 var->in_changed_variables = false;
7638 if (var->onepart)
7639 VAR_LOC_1PAUX (var) = NULL;
7640 else
7641 VAR_PART_OFFSET (var, 0) = offset;
7642 var->var_part[0].loc_chain = NULL;
7643 var->var_part[0].cur_loc = NULL;
7644 *slot = var;
7645 pos = 0;
7646 nextp = &var->var_part[0].loc_chain;
7647 }
7648 else if (onepart)
7649 {
7650 int r = -1, c = 0;
7651
7652 gcc_assert (dv_as_opaque (var->dv) == dv_as_opaque (dv));
7653
7654 pos = 0;
7655
7656 if (GET_CODE (loc) == VALUE)
7657 {
7658 for (nextp = &var->var_part[0].loc_chain; (node = *nextp);
7659 nextp = &node->next)
7660 if (GET_CODE (node->loc) == VALUE)
7661 {
7662 if (node->loc == loc)
7663 {
7664 r = 0;
7665 break;
7666 }
7667 if (canon_value_cmp (node->loc, loc))
7668 c++;
7669 else
7670 {
7671 r = 1;
7672 break;
7673 }
7674 }
7675 else if (REG_P (node->loc) || MEM_P (node->loc))
7676 c++;
7677 else
7678 {
7679 r = 1;
7680 break;
7681 }
7682 }
7683 else if (REG_P (loc))
7684 {
7685 for (nextp = &var->var_part[0].loc_chain; (node = *nextp);
7686 nextp = &node->next)
7687 if (REG_P (node->loc))
7688 {
7689 if (REGNO (node->loc) < REGNO (loc))
7690 c++;
7691 else
7692 {
7693 if (REGNO (node->loc) == REGNO (loc))
7694 r = 0;
7695 else
7696 r = 1;
7697 break;
7698 }
7699 }
7700 else
7701 {
7702 r = 1;
7703 break;
7704 }
7705 }
7706 else if (MEM_P (loc))
7707 {
7708 for (nextp = &var->var_part[0].loc_chain; (node = *nextp);
7709 nextp = &node->next)
7710 if (REG_P (node->loc))
7711 c++;
7712 else if (MEM_P (node->loc))
7713 {
7714 if ((r = loc_cmp (XEXP (node->loc, 0), XEXP (loc, 0))) >= 0)
7715 break;
7716 else
7717 c++;
7718 }
7719 else
7720 {
7721 r = 1;
7722 break;
7723 }
7724 }
7725 else
7726 for (nextp = &var->var_part[0].loc_chain; (node = *nextp);
7727 nextp = &node->next)
7728 if ((r = loc_cmp (node->loc, loc)) >= 0)
7729 break;
7730 else
7731 c++;
7732
7733 if (r == 0)
7734 return slot;
7735
7736 if (shared_var_p (var, set->vars))
7737 {
7738 slot = unshare_variable (set, slot, var, initialized);
7739 var = *slot;
7740 for (nextp = &var->var_part[0].loc_chain; c;
7741 nextp = &(*nextp)->next)
7742 c--;
7743 gcc_assert ((!node && !*nextp) || node->loc == (*nextp)->loc);
7744 }
7745 }
7746 else
7747 {
7748 int inspos = 0;
7749
7750 gcc_assert (dv_as_decl (var->dv) == dv_as_decl (dv));
7751
7752 pos = find_variable_location_part (var, offset, &inspos);
7753
7754 if (pos >= 0)
7755 {
7756 node = var->var_part[pos].loc_chain;
7757
7758 if (node
7759 && ((REG_P (node->loc) && REG_P (loc)
7760 && REGNO (node->loc) == REGNO (loc))
7761 || rtx_equal_p (node->loc, loc)))
7762 {
7763 /* LOC is in the beginning of the chain so we have nothing
7764 to do. */
7765 if (node->init < initialized)
7766 node->init = initialized;
7767 if (set_src != NULL)
7768 node->set_src = set_src;
7769
7770 return slot;
7771 }
7772 else
7773 {
7774 /* We have to make a copy of a shared variable. */
7775 if (shared_var_p (var, set->vars))
7776 {
7777 slot = unshare_variable (set, slot, var, initialized);
7778 var = *slot;
7779 }
7780 }
7781 }
7782 else
7783 {
7784 /* We have not found the location part, new one will be created. */
7785
7786 /* We have to make a copy of the shared variable. */
7787 if (shared_var_p (var, set->vars))
7788 {
7789 slot = unshare_variable (set, slot, var, initialized);
7790 var = *slot;
7791 }
7792
7793 /* We track only variables whose size is <= MAX_VAR_PARTS bytes
7794 thus there are at most MAX_VAR_PARTS different offsets. */
7795 gcc_assert (var->n_var_parts < MAX_VAR_PARTS
7796 && (!var->n_var_parts || !onepart));
7797
7798 /* We have to move the elements of array starting at index
7799 inspos to the next position. */
7800 for (pos = var->n_var_parts; pos > inspos; pos--)
7801 var->var_part[pos] = var->var_part[pos - 1];
7802
7803 var->n_var_parts++;
7804 gcc_checking_assert (!onepart);
7805 VAR_PART_OFFSET (var, pos) = offset;
7806 var->var_part[pos].loc_chain = NULL;
7807 var->var_part[pos].cur_loc = NULL;
7808 }
7809
7810 /* Delete the location from the list. */
7811 nextp = &var->var_part[pos].loc_chain;
7812 for (node = var->var_part[pos].loc_chain; node; node = next)
7813 {
7814 next = node->next;
7815 if ((REG_P (node->loc) && REG_P (loc)
7816 && REGNO (node->loc) == REGNO (loc))
7817 || rtx_equal_p (node->loc, loc))
7818 {
7819 /* Save these values, to assign to the new node, before
7820 deleting this one. */
7821 if (node->init > initialized)
7822 initialized = node->init;
7823 if (node->set_src != NULL && set_src == NULL)
7824 set_src = node->set_src;
7825 if (var->var_part[pos].cur_loc == node->loc)
7826 var->var_part[pos].cur_loc = NULL;
7827 delete node;
7828 *nextp = next;
7829 break;
7830 }
7831 else
7832 nextp = &node->next;
7833 }
7834
7835 nextp = &var->var_part[pos].loc_chain;
7836 }
7837
7838 /* Add the location to the beginning. */
7839 node = new location_chain;
7840 node->loc = loc;
7841 node->init = initialized;
7842 node->set_src = set_src;
7843 node->next = *nextp;
7844 *nextp = node;
7845
7846 /* If no location was emitted do so. */
7847 if (var->var_part[pos].cur_loc == NULL)
7848 variable_was_changed (var, set);
7849
7850 return slot;
7851 }
7852
7853 /* Set the part of variable's location in the dataflow set SET. The
7854 variable part is specified by variable's declaration in DV and
7855 offset OFFSET and the part's location by LOC. IOPT should be
7856 NO_INSERT if the variable is known to be in SET already and the
7857 variable hash table must not be resized, and INSERT otherwise. */
7858
7859 static void
set_variable_part(dataflow_set * set,rtx loc,decl_or_value dv,HOST_WIDE_INT offset,enum var_init_status initialized,rtx set_src,enum insert_option iopt)7860 set_variable_part (dataflow_set *set, rtx loc,
7861 decl_or_value dv, HOST_WIDE_INT offset,
7862 enum var_init_status initialized, rtx set_src,
7863 enum insert_option iopt)
7864 {
7865 variable **slot;
7866
7867 if (iopt == NO_INSERT)
7868 slot = shared_hash_find_slot_noinsert (set->vars, dv);
7869 else
7870 {
7871 slot = shared_hash_find_slot (set->vars, dv);
7872 if (!slot)
7873 slot = shared_hash_find_slot_unshare (&set->vars, dv, iopt);
7874 }
7875 set_slot_part (set, loc, slot, dv, offset, initialized, set_src);
7876 }
7877
7878 /* Remove all recorded register locations for the given variable part
7879 from dataflow set SET, except for those that are identical to loc.
7880 The variable part is specified by variable's declaration or value
7881 DV and offset OFFSET. */
7882
7883 static variable **
clobber_slot_part(dataflow_set * set,rtx loc,variable ** slot,HOST_WIDE_INT offset,rtx set_src)7884 clobber_slot_part (dataflow_set *set, rtx loc, variable **slot,
7885 HOST_WIDE_INT offset, rtx set_src)
7886 {
7887 variable *var = *slot;
7888 int pos = find_variable_location_part (var, offset, NULL);
7889
7890 if (pos >= 0)
7891 {
7892 location_chain *node, *next;
7893
7894 /* Remove the register locations from the dataflow set. */
7895 next = var->var_part[pos].loc_chain;
7896 for (node = next; node; node = next)
7897 {
7898 next = node->next;
7899 if (node->loc != loc
7900 && (!flag_var_tracking_uninit
7901 || !set_src
7902 || MEM_P (set_src)
7903 || !rtx_equal_p (set_src, node->set_src)))
7904 {
7905 if (REG_P (node->loc))
7906 {
7907 attrs *anode, *anext;
7908 attrs **anextp;
7909
7910 /* Remove the variable part from the register's
7911 list, but preserve any other variable parts
7912 that might be regarded as live in that same
7913 register. */
7914 anextp = &set->regs[REGNO (node->loc)];
7915 for (anode = *anextp; anode; anode = anext)
7916 {
7917 anext = anode->next;
7918 if (dv_as_opaque (anode->dv) == dv_as_opaque (var->dv)
7919 && anode->offset == offset)
7920 {
7921 delete anode;
7922 *anextp = anext;
7923 }
7924 else
7925 anextp = &anode->next;
7926 }
7927 }
7928
7929 slot = delete_slot_part (set, node->loc, slot, offset);
7930 }
7931 }
7932 }
7933
7934 return slot;
7935 }
7936
7937 /* Remove all recorded register locations for the given variable part
7938 from dataflow set SET, except for those that are identical to loc.
7939 The variable part is specified by variable's declaration or value
7940 DV and offset OFFSET. */
7941
7942 static void
clobber_variable_part(dataflow_set * set,rtx loc,decl_or_value dv,HOST_WIDE_INT offset,rtx set_src)7943 clobber_variable_part (dataflow_set *set, rtx loc, decl_or_value dv,
7944 HOST_WIDE_INT offset, rtx set_src)
7945 {
7946 variable **slot;
7947
7948 if (!dv_as_opaque (dv)
7949 || (!dv_is_value_p (dv) && ! DECL_P (dv_as_decl (dv))))
7950 return;
7951
7952 slot = shared_hash_find_slot_noinsert (set->vars, dv);
7953 if (!slot)
7954 return;
7955
7956 clobber_slot_part (set, loc, slot, offset, set_src);
7957 }
7958
7959 /* Delete the part of variable's location from dataflow set SET. The
7960 variable part is specified by its SET->vars slot SLOT and offset
7961 OFFSET and the part's location by LOC. */
7962
7963 static variable **
delete_slot_part(dataflow_set * set,rtx loc,variable ** slot,HOST_WIDE_INT offset)7964 delete_slot_part (dataflow_set *set, rtx loc, variable **slot,
7965 HOST_WIDE_INT offset)
7966 {
7967 variable *var = *slot;
7968 int pos = find_variable_location_part (var, offset, NULL);
7969
7970 if (pos >= 0)
7971 {
7972 location_chain *node, *next;
7973 location_chain **nextp;
7974 bool changed;
7975 rtx cur_loc;
7976
7977 if (shared_var_p (var, set->vars))
7978 {
7979 /* If the variable contains the location part we have to
7980 make a copy of the variable. */
7981 for (node = var->var_part[pos].loc_chain; node;
7982 node = node->next)
7983 {
7984 if ((REG_P (node->loc) && REG_P (loc)
7985 && REGNO (node->loc) == REGNO (loc))
7986 || rtx_equal_p (node->loc, loc))
7987 {
7988 slot = unshare_variable (set, slot, var,
7989 VAR_INIT_STATUS_UNKNOWN);
7990 var = *slot;
7991 break;
7992 }
7993 }
7994 }
7995
7996 if (pos == 0 && var->onepart && VAR_LOC_1PAUX (var))
7997 cur_loc = VAR_LOC_FROM (var);
7998 else
7999 cur_loc = var->var_part[pos].cur_loc;
8000
8001 /* Delete the location part. */
8002 changed = false;
8003 nextp = &var->var_part[pos].loc_chain;
8004 for (node = *nextp; node; node = next)
8005 {
8006 next = node->next;
8007 if ((REG_P (node->loc) && REG_P (loc)
8008 && REGNO (node->loc) == REGNO (loc))
8009 || rtx_equal_p (node->loc, loc))
8010 {
8011 /* If we have deleted the location which was last emitted
8012 we have to emit new location so add the variable to set
8013 of changed variables. */
8014 if (cur_loc == node->loc)
8015 {
8016 changed = true;
8017 var->var_part[pos].cur_loc = NULL;
8018 if (pos == 0 && var->onepart && VAR_LOC_1PAUX (var))
8019 VAR_LOC_FROM (var) = NULL;
8020 }
8021 delete node;
8022 *nextp = next;
8023 break;
8024 }
8025 else
8026 nextp = &node->next;
8027 }
8028
8029 if (var->var_part[pos].loc_chain == NULL)
8030 {
8031 changed = true;
8032 var->n_var_parts--;
8033 while (pos < var->n_var_parts)
8034 {
8035 var->var_part[pos] = var->var_part[pos + 1];
8036 pos++;
8037 }
8038 }
8039 if (changed)
8040 variable_was_changed (var, set);
8041 }
8042
8043 return slot;
8044 }
8045
8046 /* Delete the part of variable's location from dataflow set SET. The
8047 variable part is specified by variable's declaration or value DV
8048 and offset OFFSET and the part's location by LOC. */
8049
8050 static void
delete_variable_part(dataflow_set * set,rtx loc,decl_or_value dv,HOST_WIDE_INT offset)8051 delete_variable_part (dataflow_set *set, rtx loc, decl_or_value dv,
8052 HOST_WIDE_INT offset)
8053 {
8054 variable **slot = shared_hash_find_slot_noinsert (set->vars, dv);
8055 if (!slot)
8056 return;
8057
8058 delete_slot_part (set, loc, slot, offset);
8059 }
8060
8061
8062 /* Structure for passing some other parameters to function
8063 vt_expand_loc_callback. */
8064 struct expand_loc_callback_data
8065 {
8066 /* The variables and values active at this point. */
8067 variable_table_type *vars;
8068
8069 /* Stack of values and debug_exprs under expansion, and their
8070 children. */
8071 auto_vec<rtx, 4> expanding;
8072
8073 /* Stack of values and debug_exprs whose expansion hit recursion
8074 cycles. They will have VALUE_RECURSED_INTO marked when added to
8075 this list. This flag will be cleared if any of its dependencies
8076 resolves to a valid location. So, if the flag remains set at the
8077 end of the search, we know no valid location for this one can
8078 possibly exist. */
8079 auto_vec<rtx, 4> pending;
8080
8081 /* The maximum depth among the sub-expressions under expansion.
8082 Zero indicates no expansion so far. */
8083 expand_depth depth;
8084 };
8085
8086 /* Allocate the one-part auxiliary data structure for VAR, with enough
8087 room for COUNT dependencies. */
8088
8089 static void
loc_exp_dep_alloc(variable * var,int count)8090 loc_exp_dep_alloc (variable *var, int count)
8091 {
8092 size_t allocsize;
8093
8094 gcc_checking_assert (var->onepart);
8095
8096 /* We can be called with COUNT == 0 to allocate the data structure
8097 without any dependencies, e.g. for the backlinks only. However,
8098 if we are specifying a COUNT, then the dependency list must have
8099 been emptied before. It would be possible to adjust pointers or
8100 force it empty here, but this is better done at an earlier point
8101 in the algorithm, so we instead leave an assertion to catch
8102 errors. */
8103 gcc_checking_assert (!count
8104 || VAR_LOC_DEP_VEC (var) == NULL
8105 || VAR_LOC_DEP_VEC (var)->is_empty ());
8106
8107 if (VAR_LOC_1PAUX (var) && VAR_LOC_DEP_VEC (var)->space (count))
8108 return;
8109
8110 allocsize = offsetof (struct onepart_aux, deps)
8111 + vec<loc_exp_dep, va_heap, vl_embed>::embedded_size (count);
8112
8113 if (VAR_LOC_1PAUX (var))
8114 {
8115 VAR_LOC_1PAUX (var) = XRESIZEVAR (struct onepart_aux,
8116 VAR_LOC_1PAUX (var), allocsize);
8117 /* If the reallocation moves the onepaux structure, the
8118 back-pointer to BACKLINKS in the first list member will still
8119 point to its old location. Adjust it. */
8120 if (VAR_LOC_DEP_LST (var))
8121 VAR_LOC_DEP_LST (var)->pprev = VAR_LOC_DEP_LSTP (var);
8122 }
8123 else
8124 {
8125 VAR_LOC_1PAUX (var) = XNEWVAR (struct onepart_aux, allocsize);
8126 *VAR_LOC_DEP_LSTP (var) = NULL;
8127 VAR_LOC_FROM (var) = NULL;
8128 VAR_LOC_DEPTH (var).complexity = 0;
8129 VAR_LOC_DEPTH (var).entryvals = 0;
8130 }
8131 VAR_LOC_DEP_VEC (var)->embedded_init (count);
8132 }
8133
8134 /* Remove all entries from the vector of active dependencies of VAR,
8135 removing them from the back-links lists too. */
8136
8137 static void
loc_exp_dep_clear(variable * var)8138 loc_exp_dep_clear (variable *var)
8139 {
8140 while (VAR_LOC_DEP_VEC (var) && !VAR_LOC_DEP_VEC (var)->is_empty ())
8141 {
8142 loc_exp_dep *led = &VAR_LOC_DEP_VEC (var)->last ();
8143 if (led->next)
8144 led->next->pprev = led->pprev;
8145 if (led->pprev)
8146 *led->pprev = led->next;
8147 VAR_LOC_DEP_VEC (var)->pop ();
8148 }
8149 }
8150
8151 /* Insert an active dependency from VAR on X to the vector of
8152 dependencies, and add the corresponding back-link to X's list of
8153 back-links in VARS. */
8154
8155 static void
loc_exp_insert_dep(variable * var,rtx x,variable_table_type * vars)8156 loc_exp_insert_dep (variable *var, rtx x, variable_table_type *vars)
8157 {
8158 decl_or_value dv;
8159 variable *xvar;
8160 loc_exp_dep *led;
8161
8162 dv = dv_from_rtx (x);
8163
8164 /* ??? Build a vector of variables parallel to EXPANDING, to avoid
8165 an additional look up? */
8166 xvar = vars->find_with_hash (dv, dv_htab_hash (dv));
8167
8168 if (!xvar)
8169 {
8170 xvar = variable_from_dropped (dv, NO_INSERT);
8171 gcc_checking_assert (xvar);
8172 }
8173
8174 /* No point in adding the same backlink more than once. This may
8175 arise if say the same value appears in two complex expressions in
8176 the same loc_list, or even more than once in a single
8177 expression. */
8178 if (VAR_LOC_DEP_LST (xvar) && VAR_LOC_DEP_LST (xvar)->dv == var->dv)
8179 return;
8180
8181 if (var->onepart == NOT_ONEPART)
8182 led = new loc_exp_dep;
8183 else
8184 {
8185 loc_exp_dep empty;
8186 memset (&empty, 0, sizeof (empty));
8187 VAR_LOC_DEP_VEC (var)->quick_push (empty);
8188 led = &VAR_LOC_DEP_VEC (var)->last ();
8189 }
8190 led->dv = var->dv;
8191 led->value = x;
8192
8193 loc_exp_dep_alloc (xvar, 0);
8194 led->pprev = VAR_LOC_DEP_LSTP (xvar);
8195 led->next = *led->pprev;
8196 if (led->next)
8197 led->next->pprev = &led->next;
8198 *led->pprev = led;
8199 }
8200
8201 /* Create active dependencies of VAR on COUNT values starting at
8202 VALUE, and corresponding back-links to the entries in VARS. Return
8203 true if we found any pending-recursion results. */
8204
8205 static bool
loc_exp_dep_set(variable * var,rtx result,rtx * value,int count,variable_table_type * vars)8206 loc_exp_dep_set (variable *var, rtx result, rtx *value, int count,
8207 variable_table_type *vars)
8208 {
8209 bool pending_recursion = false;
8210
8211 gcc_checking_assert (VAR_LOC_DEP_VEC (var) == NULL
8212 || VAR_LOC_DEP_VEC (var)->is_empty ());
8213
8214 /* Set up all dependencies from last_child (as set up at the end of
8215 the loop above) to the end. */
8216 loc_exp_dep_alloc (var, count);
8217
8218 while (count--)
8219 {
8220 rtx x = *value++;
8221
8222 if (!pending_recursion)
8223 pending_recursion = !result && VALUE_RECURSED_INTO (x);
8224
8225 loc_exp_insert_dep (var, x, vars);
8226 }
8227
8228 return pending_recursion;
8229 }
8230
8231 /* Notify the back-links of IVAR that are pending recursion that we
8232 have found a non-NIL value for it, so they are cleared for another
8233 attempt to compute a current location. */
8234
8235 static void
notify_dependents_of_resolved_value(variable * ivar,variable_table_type * vars)8236 notify_dependents_of_resolved_value (variable *ivar, variable_table_type *vars)
8237 {
8238 loc_exp_dep *led, *next;
8239
8240 for (led = VAR_LOC_DEP_LST (ivar); led; led = next)
8241 {
8242 decl_or_value dv = led->dv;
8243 variable *var;
8244
8245 next = led->next;
8246
8247 if (dv_is_value_p (dv))
8248 {
8249 rtx value = dv_as_value (dv);
8250
8251 /* If we have already resolved it, leave it alone. */
8252 if (!VALUE_RECURSED_INTO (value))
8253 continue;
8254
8255 /* Check that VALUE_RECURSED_INTO, true from the test above,
8256 implies NO_LOC_P. */
8257 gcc_checking_assert (NO_LOC_P (value));
8258
8259 /* We won't notify variables that are being expanded,
8260 because their dependency list is cleared before
8261 recursing. */
8262 NO_LOC_P (value) = false;
8263 VALUE_RECURSED_INTO (value) = false;
8264
8265 gcc_checking_assert (dv_changed_p (dv));
8266 }
8267 else
8268 {
8269 gcc_checking_assert (dv_onepart_p (dv) != NOT_ONEPART);
8270 if (!dv_changed_p (dv))
8271 continue;
8272 }
8273
8274 var = vars->find_with_hash (dv, dv_htab_hash (dv));
8275
8276 if (!var)
8277 var = variable_from_dropped (dv, NO_INSERT);
8278
8279 if (var)
8280 notify_dependents_of_resolved_value (var, vars);
8281
8282 if (next)
8283 next->pprev = led->pprev;
8284 if (led->pprev)
8285 *led->pprev = next;
8286 led->next = NULL;
8287 led->pprev = NULL;
8288 }
8289 }
8290
8291 static rtx vt_expand_loc_callback (rtx x, bitmap regs,
8292 int max_depth, void *data);
8293
8294 /* Return the combined depth, when one sub-expression evaluated to
8295 BEST_DEPTH and the previous known depth was SAVED_DEPTH. */
8296
8297 static inline expand_depth
update_depth(expand_depth saved_depth,expand_depth best_depth)8298 update_depth (expand_depth saved_depth, expand_depth best_depth)
8299 {
8300 /* If we didn't find anything, stick with what we had. */
8301 if (!best_depth.complexity)
8302 return saved_depth;
8303
8304 /* If we found hadn't found anything, use the depth of the current
8305 expression. Do NOT add one extra level, we want to compute the
8306 maximum depth among sub-expressions. We'll increment it later,
8307 if appropriate. */
8308 if (!saved_depth.complexity)
8309 return best_depth;
8310
8311 /* Combine the entryval count so that regardless of which one we
8312 return, the entryval count is accurate. */
8313 best_depth.entryvals = saved_depth.entryvals
8314 = best_depth.entryvals + saved_depth.entryvals;
8315
8316 if (saved_depth.complexity < best_depth.complexity)
8317 return best_depth;
8318 else
8319 return saved_depth;
8320 }
8321
8322 /* Expand VAR to a location RTX, updating its cur_loc. Use REGS and
8323 DATA for cselib expand callback. If PENDRECP is given, indicate in
8324 it whether any sub-expression couldn't be fully evaluated because
8325 it is pending recursion resolution. */
8326
8327 static inline rtx
vt_expand_var_loc_chain(variable * var,bitmap regs,void * data,bool * pendrecp)8328 vt_expand_var_loc_chain (variable *var, bitmap regs, void *data,
8329 bool *pendrecp)
8330 {
8331 struct expand_loc_callback_data *elcd
8332 = (struct expand_loc_callback_data *) data;
8333 location_chain *loc, *next;
8334 rtx result = NULL;
8335 int first_child, result_first_child, last_child;
8336 bool pending_recursion;
8337 rtx loc_from = NULL;
8338 struct elt_loc_list *cloc = NULL;
8339 expand_depth depth = { 0, 0 }, saved_depth = elcd->depth;
8340 int wanted_entryvals, found_entryvals = 0;
8341
8342 /* Clear all backlinks pointing at this, so that we're not notified
8343 while we're active. */
8344 loc_exp_dep_clear (var);
8345
8346 retry:
8347 if (var->onepart == ONEPART_VALUE)
8348 {
8349 cselib_val *val = CSELIB_VAL_PTR (dv_as_value (var->dv));
8350
8351 gcc_checking_assert (cselib_preserved_value_p (val));
8352
8353 cloc = val->locs;
8354 }
8355
8356 first_child = result_first_child = last_child
8357 = elcd->expanding.length ();
8358
8359 wanted_entryvals = found_entryvals;
8360
8361 /* Attempt to expand each available location in turn. */
8362 for (next = loc = var->n_var_parts ? var->var_part[0].loc_chain : NULL;
8363 loc || cloc; loc = next)
8364 {
8365 result_first_child = last_child;
8366
8367 if (!loc)
8368 {
8369 loc_from = cloc->loc;
8370 next = loc;
8371 cloc = cloc->next;
8372 if (unsuitable_loc (loc_from))
8373 continue;
8374 }
8375 else
8376 {
8377 loc_from = loc->loc;
8378 next = loc->next;
8379 }
8380
8381 gcc_checking_assert (!unsuitable_loc (loc_from));
8382
8383 elcd->depth.complexity = elcd->depth.entryvals = 0;
8384 result = cselib_expand_value_rtx_cb (loc_from, regs, EXPR_DEPTH,
8385 vt_expand_loc_callback, data);
8386 last_child = elcd->expanding.length ();
8387
8388 if (result)
8389 {
8390 depth = elcd->depth;
8391
8392 gcc_checking_assert (depth.complexity
8393 || result_first_child == last_child);
8394
8395 if (last_child - result_first_child != 1)
8396 {
8397 if (!depth.complexity && GET_CODE (result) == ENTRY_VALUE)
8398 depth.entryvals++;
8399 depth.complexity++;
8400 }
8401
8402 if (depth.complexity <= EXPR_USE_DEPTH)
8403 {
8404 if (depth.entryvals <= wanted_entryvals)
8405 break;
8406 else if (!found_entryvals || depth.entryvals < found_entryvals)
8407 found_entryvals = depth.entryvals;
8408 }
8409
8410 result = NULL;
8411 }
8412
8413 /* Set it up in case we leave the loop. */
8414 depth.complexity = depth.entryvals = 0;
8415 loc_from = NULL;
8416 result_first_child = first_child;
8417 }
8418
8419 if (!loc_from && wanted_entryvals < found_entryvals)
8420 {
8421 /* We found entries with ENTRY_VALUEs and skipped them. Since
8422 we could not find any expansions without ENTRY_VALUEs, but we
8423 found at least one with them, go back and get an entry with
8424 the minimum number ENTRY_VALUE count that we found. We could
8425 avoid looping, but since each sub-loc is already resolved,
8426 the re-expansion should be trivial. ??? Should we record all
8427 attempted locs as dependencies, so that we retry the
8428 expansion should any of them change, in the hope it can give
8429 us a new entry without an ENTRY_VALUE? */
8430 elcd->expanding.truncate (first_child);
8431 goto retry;
8432 }
8433
8434 /* Register all encountered dependencies as active. */
8435 pending_recursion = loc_exp_dep_set
8436 (var, result, elcd->expanding.address () + result_first_child,
8437 last_child - result_first_child, elcd->vars);
8438
8439 elcd->expanding.truncate (first_child);
8440
8441 /* Record where the expansion came from. */
8442 gcc_checking_assert (!result || !pending_recursion);
8443 VAR_LOC_FROM (var) = loc_from;
8444 VAR_LOC_DEPTH (var) = depth;
8445
8446 gcc_checking_assert (!depth.complexity == !result);
8447
8448 elcd->depth = update_depth (saved_depth, depth);
8449
8450 /* Indicate whether any of the dependencies are pending recursion
8451 resolution. */
8452 if (pendrecp)
8453 *pendrecp = pending_recursion;
8454
8455 if (!pendrecp || !pending_recursion)
8456 var->var_part[0].cur_loc = result;
8457
8458 return result;
8459 }
8460
8461 /* Callback for cselib_expand_value, that looks for expressions
8462 holding the value in the var-tracking hash tables. Return X for
8463 standard processing, anything else is to be used as-is. */
8464
8465 static rtx
vt_expand_loc_callback(rtx x,bitmap regs,int max_depth ATTRIBUTE_UNUSED,void * data)8466 vt_expand_loc_callback (rtx x, bitmap regs,
8467 int max_depth ATTRIBUTE_UNUSED,
8468 void *data)
8469 {
8470 struct expand_loc_callback_data *elcd
8471 = (struct expand_loc_callback_data *) data;
8472 decl_or_value dv;
8473 variable *var;
8474 rtx result, subreg;
8475 bool pending_recursion = false;
8476 bool from_empty = false;
8477
8478 switch (GET_CODE (x))
8479 {
8480 case SUBREG:
8481 subreg = cselib_expand_value_rtx_cb (SUBREG_REG (x), regs,
8482 EXPR_DEPTH,
8483 vt_expand_loc_callback, data);
8484
8485 if (!subreg)
8486 return NULL;
8487
8488 result = simplify_gen_subreg (GET_MODE (x), subreg,
8489 GET_MODE (SUBREG_REG (x)),
8490 SUBREG_BYTE (x));
8491
8492 /* Invalid SUBREGs are ok in debug info. ??? We could try
8493 alternate expansions for the VALUE as well. */
8494 if (!result)
8495 result = gen_rtx_raw_SUBREG (GET_MODE (x), subreg, SUBREG_BYTE (x));
8496
8497 return result;
8498
8499 case DEBUG_EXPR:
8500 case VALUE:
8501 dv = dv_from_rtx (x);
8502 break;
8503
8504 default:
8505 return x;
8506 }
8507
8508 elcd->expanding.safe_push (x);
8509
8510 /* Check that VALUE_RECURSED_INTO implies NO_LOC_P. */
8511 gcc_checking_assert (!VALUE_RECURSED_INTO (x) || NO_LOC_P (x));
8512
8513 if (NO_LOC_P (x))
8514 {
8515 gcc_checking_assert (VALUE_RECURSED_INTO (x) || !dv_changed_p (dv));
8516 return NULL;
8517 }
8518
8519 var = elcd->vars->find_with_hash (dv, dv_htab_hash (dv));
8520
8521 if (!var)
8522 {
8523 from_empty = true;
8524 var = variable_from_dropped (dv, INSERT);
8525 }
8526
8527 gcc_checking_assert (var);
8528
8529 if (!dv_changed_p (dv))
8530 {
8531 gcc_checking_assert (!NO_LOC_P (x));
8532 gcc_checking_assert (var->var_part[0].cur_loc);
8533 gcc_checking_assert (VAR_LOC_1PAUX (var));
8534 gcc_checking_assert (VAR_LOC_1PAUX (var)->depth.complexity);
8535
8536 elcd->depth = update_depth (elcd->depth, VAR_LOC_1PAUX (var)->depth);
8537
8538 return var->var_part[0].cur_loc;
8539 }
8540
8541 VALUE_RECURSED_INTO (x) = true;
8542 /* This is tentative, but it makes some tests simpler. */
8543 NO_LOC_P (x) = true;
8544
8545 gcc_checking_assert (var->n_var_parts == 1 || from_empty);
8546
8547 result = vt_expand_var_loc_chain (var, regs, data, &pending_recursion);
8548
8549 if (pending_recursion)
8550 {
8551 gcc_checking_assert (!result);
8552 elcd->pending.safe_push (x);
8553 }
8554 else
8555 {
8556 NO_LOC_P (x) = !result;
8557 VALUE_RECURSED_INTO (x) = false;
8558 set_dv_changed (dv, false);
8559
8560 if (result)
8561 notify_dependents_of_resolved_value (var, elcd->vars);
8562 }
8563
8564 return result;
8565 }
8566
8567 /* While expanding variables, we may encounter recursion cycles
8568 because of mutual (possibly indirect) dependencies between two
8569 particular variables (or values), say A and B. If we're trying to
8570 expand A when we get to B, which in turn attempts to expand A, if
8571 we can't find any other expansion for B, we'll add B to this
8572 pending-recursion stack, and tentatively return NULL for its
8573 location. This tentative value will be used for any other
8574 occurrences of B, unless A gets some other location, in which case
8575 it will notify B that it is worth another try at computing a
8576 location for it, and it will use the location computed for A then.
8577 At the end of the expansion, the tentative NULL locations become
8578 final for all members of PENDING that didn't get a notification.
8579 This function performs this finalization of NULL locations. */
8580
8581 static void
resolve_expansions_pending_recursion(vec<rtx,va_heap> * pending)8582 resolve_expansions_pending_recursion (vec<rtx, va_heap> *pending)
8583 {
8584 while (!pending->is_empty ())
8585 {
8586 rtx x = pending->pop ();
8587 decl_or_value dv;
8588
8589 if (!VALUE_RECURSED_INTO (x))
8590 continue;
8591
8592 gcc_checking_assert (NO_LOC_P (x));
8593 VALUE_RECURSED_INTO (x) = false;
8594 dv = dv_from_rtx (x);
8595 gcc_checking_assert (dv_changed_p (dv));
8596 set_dv_changed (dv, false);
8597 }
8598 }
8599
8600 /* Initialize expand_loc_callback_data D with variable hash table V.
8601 It must be a macro because of alloca (vec stack). */
8602 #define INIT_ELCD(d, v) \
8603 do \
8604 { \
8605 (d).vars = (v); \
8606 (d).depth.complexity = (d).depth.entryvals = 0; \
8607 } \
8608 while (0)
8609 /* Finalize expand_loc_callback_data D, resolved to location L. */
8610 #define FINI_ELCD(d, l) \
8611 do \
8612 { \
8613 resolve_expansions_pending_recursion (&(d).pending); \
8614 (d).pending.release (); \
8615 (d).expanding.release (); \
8616 \
8617 if ((l) && MEM_P (l)) \
8618 (l) = targetm.delegitimize_address (l); \
8619 } \
8620 while (0)
8621
8622 /* Expand VALUEs and DEBUG_EXPRs in LOC to a location, using the
8623 equivalences in VARS, updating their CUR_LOCs in the process. */
8624
8625 static rtx
vt_expand_loc(rtx loc,variable_table_type * vars)8626 vt_expand_loc (rtx loc, variable_table_type *vars)
8627 {
8628 struct expand_loc_callback_data data;
8629 rtx result;
8630
8631 if (!MAY_HAVE_DEBUG_BIND_INSNS)
8632 return loc;
8633
8634 INIT_ELCD (data, vars);
8635
8636 result = cselib_expand_value_rtx_cb (loc, scratch_regs, EXPR_DEPTH,
8637 vt_expand_loc_callback, &data);
8638
8639 FINI_ELCD (data, result);
8640
8641 return result;
8642 }
8643
8644 /* Expand the one-part VARiable to a location, using the equivalences
8645 in VARS, updating their CUR_LOCs in the process. */
8646
8647 static rtx
vt_expand_1pvar(variable * var,variable_table_type * vars)8648 vt_expand_1pvar (variable *var, variable_table_type *vars)
8649 {
8650 struct expand_loc_callback_data data;
8651 rtx loc;
8652
8653 gcc_checking_assert (var->onepart && var->n_var_parts == 1);
8654
8655 if (!dv_changed_p (var->dv))
8656 return var->var_part[0].cur_loc;
8657
8658 INIT_ELCD (data, vars);
8659
8660 loc = vt_expand_var_loc_chain (var, scratch_regs, &data, NULL);
8661
8662 gcc_checking_assert (data.expanding.is_empty ());
8663
8664 FINI_ELCD (data, loc);
8665
8666 return loc;
8667 }
8668
8669 /* Emit the NOTE_INSN_VAR_LOCATION for variable *VARP. DATA contains
8670 additional parameters: WHERE specifies whether the note shall be emitted
8671 before or after instruction INSN. */
8672
8673 int
emit_note_insn_var_location(variable ** varp,emit_note_data * data)8674 emit_note_insn_var_location (variable **varp, emit_note_data *data)
8675 {
8676 variable *var = *varp;
8677 rtx_insn *insn = data->insn;
8678 enum emit_note_where where = data->where;
8679 variable_table_type *vars = data->vars;
8680 rtx_note *note;
8681 rtx note_vl;
8682 int i, j, n_var_parts;
8683 bool complete;
8684 enum var_init_status initialized = VAR_INIT_STATUS_UNINITIALIZED;
8685 HOST_WIDE_INT last_limit;
8686 tree type_size_unit;
8687 HOST_WIDE_INT offsets[MAX_VAR_PARTS];
8688 rtx loc[MAX_VAR_PARTS];
8689 tree decl;
8690 location_chain *lc;
8691
8692 gcc_checking_assert (var->onepart == NOT_ONEPART
8693 || var->onepart == ONEPART_VDECL);
8694
8695 decl = dv_as_decl (var->dv);
8696
8697 complete = true;
8698 last_limit = 0;
8699 n_var_parts = 0;
8700 if (!var->onepart)
8701 for (i = 0; i < var->n_var_parts; i++)
8702 if (var->var_part[i].cur_loc == NULL && var->var_part[i].loc_chain)
8703 var->var_part[i].cur_loc = var->var_part[i].loc_chain->loc;
8704 for (i = 0; i < var->n_var_parts; i++)
8705 {
8706 machine_mode mode, wider_mode;
8707 rtx loc2;
8708 HOST_WIDE_INT offset, size, wider_size;
8709
8710 if (i == 0 && var->onepart)
8711 {
8712 gcc_checking_assert (var->n_var_parts == 1);
8713 offset = 0;
8714 initialized = VAR_INIT_STATUS_INITIALIZED;
8715 loc2 = vt_expand_1pvar (var, vars);
8716 }
8717 else
8718 {
8719 if (last_limit < VAR_PART_OFFSET (var, i))
8720 {
8721 complete = false;
8722 break;
8723 }
8724 else if (last_limit > VAR_PART_OFFSET (var, i))
8725 continue;
8726 offset = VAR_PART_OFFSET (var, i);
8727 loc2 = var->var_part[i].cur_loc;
8728 if (loc2 && GET_CODE (loc2) == MEM
8729 && GET_CODE (XEXP (loc2, 0)) == VALUE)
8730 {
8731 rtx depval = XEXP (loc2, 0);
8732
8733 loc2 = vt_expand_loc (loc2, vars);
8734
8735 if (loc2)
8736 loc_exp_insert_dep (var, depval, vars);
8737 }
8738 if (!loc2)
8739 {
8740 complete = false;
8741 continue;
8742 }
8743 gcc_checking_assert (GET_CODE (loc2) != VALUE);
8744 for (lc = var->var_part[i].loc_chain; lc; lc = lc->next)
8745 if (var->var_part[i].cur_loc == lc->loc)
8746 {
8747 initialized = lc->init;
8748 break;
8749 }
8750 gcc_assert (lc);
8751 }
8752
8753 offsets[n_var_parts] = offset;
8754 if (!loc2)
8755 {
8756 complete = false;
8757 continue;
8758 }
8759 loc[n_var_parts] = loc2;
8760 mode = GET_MODE (var->var_part[i].cur_loc);
8761 if (mode == VOIDmode && var->onepart)
8762 mode = DECL_MODE (decl);
8763 /* We ony track subparts of constant-sized objects, since at present
8764 there's no representation for polynomial pieces. */
8765 if (!GET_MODE_SIZE (mode).is_constant (&size))
8766 {
8767 complete = false;
8768 continue;
8769 }
8770 last_limit = offsets[n_var_parts] + size;
8771
8772 /* Attempt to merge adjacent registers or memory. */
8773 for (j = i + 1; j < var->n_var_parts; j++)
8774 if (last_limit <= VAR_PART_OFFSET (var, j))
8775 break;
8776 if (j < var->n_var_parts
8777 && GET_MODE_WIDER_MODE (mode).exists (&wider_mode)
8778 && GET_MODE_SIZE (wider_mode).is_constant (&wider_size)
8779 && var->var_part[j].cur_loc
8780 && mode == GET_MODE (var->var_part[j].cur_loc)
8781 && (REG_P (loc[n_var_parts]) || MEM_P (loc[n_var_parts]))
8782 && last_limit == (var->onepart ? 0 : VAR_PART_OFFSET (var, j))
8783 && (loc2 = vt_expand_loc (var->var_part[j].cur_loc, vars))
8784 && GET_CODE (loc[n_var_parts]) == GET_CODE (loc2))
8785 {
8786 rtx new_loc = NULL;
8787
8788 if (REG_P (loc[n_var_parts])
8789 && hard_regno_nregs (REGNO (loc[n_var_parts]), mode) * 2
8790 == hard_regno_nregs (REGNO (loc[n_var_parts]), wider_mode)
8791 && end_hard_regno (mode, REGNO (loc[n_var_parts]))
8792 == REGNO (loc2))
8793 {
8794 if (! WORDS_BIG_ENDIAN && ! BYTES_BIG_ENDIAN)
8795 new_loc = simplify_subreg (wider_mode, loc[n_var_parts],
8796 mode, 0);
8797 else if (WORDS_BIG_ENDIAN && BYTES_BIG_ENDIAN)
8798 new_loc = simplify_subreg (wider_mode, loc2, mode, 0);
8799 if (new_loc)
8800 {
8801 if (!REG_P (new_loc)
8802 || REGNO (new_loc) != REGNO (loc[n_var_parts]))
8803 new_loc = NULL;
8804 else
8805 REG_ATTRS (new_loc) = REG_ATTRS (loc[n_var_parts]);
8806 }
8807 }
8808 else if (MEM_P (loc[n_var_parts])
8809 && GET_CODE (XEXP (loc2, 0)) == PLUS
8810 && REG_P (XEXP (XEXP (loc2, 0), 0))
8811 && CONST_INT_P (XEXP (XEXP (loc2, 0), 1)))
8812 {
8813 if ((REG_P (XEXP (loc[n_var_parts], 0))
8814 && rtx_equal_p (XEXP (loc[n_var_parts], 0),
8815 XEXP (XEXP (loc2, 0), 0))
8816 && INTVAL (XEXP (XEXP (loc2, 0), 1)) == size)
8817 || (GET_CODE (XEXP (loc[n_var_parts], 0)) == PLUS
8818 && CONST_INT_P (XEXP (XEXP (loc[n_var_parts], 0), 1))
8819 && rtx_equal_p (XEXP (XEXP (loc[n_var_parts], 0), 0),
8820 XEXP (XEXP (loc2, 0), 0))
8821 && INTVAL (XEXP (XEXP (loc[n_var_parts], 0), 1)) + size
8822 == INTVAL (XEXP (XEXP (loc2, 0), 1))))
8823 new_loc = adjust_address_nv (loc[n_var_parts],
8824 wider_mode, 0);
8825 }
8826
8827 if (new_loc)
8828 {
8829 loc[n_var_parts] = new_loc;
8830 mode = wider_mode;
8831 last_limit = offsets[n_var_parts] + wider_size;
8832 i = j;
8833 }
8834 }
8835 ++n_var_parts;
8836 }
8837 type_size_unit = TYPE_SIZE_UNIT (TREE_TYPE (decl));
8838 if ((unsigned HOST_WIDE_INT) last_limit < TREE_INT_CST_LOW (type_size_unit))
8839 complete = false;
8840
8841 if (! flag_var_tracking_uninit)
8842 initialized = VAR_INIT_STATUS_INITIALIZED;
8843
8844 note_vl = NULL_RTX;
8845 if (!complete)
8846 note_vl = gen_rtx_VAR_LOCATION (VOIDmode, decl, NULL_RTX, initialized);
8847 else if (n_var_parts == 1)
8848 {
8849 rtx expr_list;
8850
8851 if (offsets[0] || GET_CODE (loc[0]) == PARALLEL)
8852 expr_list = gen_rtx_EXPR_LIST (VOIDmode, loc[0], GEN_INT (offsets[0]));
8853 else
8854 expr_list = loc[0];
8855
8856 note_vl = gen_rtx_VAR_LOCATION (VOIDmode, decl, expr_list, initialized);
8857 }
8858 else if (n_var_parts)
8859 {
8860 rtx parallel;
8861
8862 for (i = 0; i < n_var_parts; i++)
8863 loc[i]
8864 = gen_rtx_EXPR_LIST (VOIDmode, loc[i], GEN_INT (offsets[i]));
8865
8866 parallel = gen_rtx_PARALLEL (VOIDmode,
8867 gen_rtvec_v (n_var_parts, loc));
8868 note_vl = gen_rtx_VAR_LOCATION (VOIDmode, decl,
8869 parallel, initialized);
8870 }
8871
8872 if (where != EMIT_NOTE_BEFORE_INSN)
8873 {
8874 note = emit_note_after (NOTE_INSN_VAR_LOCATION, insn);
8875 if (where == EMIT_NOTE_AFTER_CALL_INSN)
8876 NOTE_DURING_CALL_P (note) = true;
8877 }
8878 else
8879 {
8880 /* Make sure that the call related notes come first. */
8881 while (NEXT_INSN (insn)
8882 && NOTE_P (insn)
8883 && NOTE_KIND (insn) == NOTE_INSN_VAR_LOCATION
8884 && NOTE_DURING_CALL_P (insn))
8885 insn = NEXT_INSN (insn);
8886 if (NOTE_P (insn)
8887 && NOTE_KIND (insn) == NOTE_INSN_VAR_LOCATION
8888 && NOTE_DURING_CALL_P (insn))
8889 note = emit_note_after (NOTE_INSN_VAR_LOCATION, insn);
8890 else
8891 note = emit_note_before (NOTE_INSN_VAR_LOCATION, insn);
8892 }
8893 NOTE_VAR_LOCATION (note) = note_vl;
8894
8895 set_dv_changed (var->dv, false);
8896 gcc_assert (var->in_changed_variables);
8897 var->in_changed_variables = false;
8898 changed_variables->clear_slot (varp);
8899
8900 /* Continue traversing the hash table. */
8901 return 1;
8902 }
8903
8904 /* While traversing changed_variables, push onto DATA (a stack of RTX
8905 values) entries that aren't user variables. */
8906
8907 int
var_track_values_to_stack(variable ** slot,vec<rtx,va_heap> * changed_values_stack)8908 var_track_values_to_stack (variable **slot,
8909 vec<rtx, va_heap> *changed_values_stack)
8910 {
8911 variable *var = *slot;
8912
8913 if (var->onepart == ONEPART_VALUE)
8914 changed_values_stack->safe_push (dv_as_value (var->dv));
8915 else if (var->onepart == ONEPART_DEXPR)
8916 changed_values_stack->safe_push (DECL_RTL_KNOWN_SET (dv_as_decl (var->dv)));
8917
8918 return 1;
8919 }
8920
8921 /* Remove from changed_variables the entry whose DV corresponds to
8922 value or debug_expr VAL. */
8923 static void
remove_value_from_changed_variables(rtx val)8924 remove_value_from_changed_variables (rtx val)
8925 {
8926 decl_or_value dv = dv_from_rtx (val);
8927 variable **slot;
8928 variable *var;
8929
8930 slot = changed_variables->find_slot_with_hash (dv, dv_htab_hash (dv),
8931 NO_INSERT);
8932 var = *slot;
8933 var->in_changed_variables = false;
8934 changed_variables->clear_slot (slot);
8935 }
8936
8937 /* If VAL (a value or debug_expr) has backlinks to variables actively
8938 dependent on it in HTAB or in CHANGED_VARIABLES, mark them as
8939 changed, adding to CHANGED_VALUES_STACK any dependencies that may
8940 have dependencies of their own to notify. */
8941
8942 static void
notify_dependents_of_changed_value(rtx val,variable_table_type * htab,vec<rtx,va_heap> * changed_values_stack)8943 notify_dependents_of_changed_value (rtx val, variable_table_type *htab,
8944 vec<rtx, va_heap> *changed_values_stack)
8945 {
8946 variable **slot;
8947 variable *var;
8948 loc_exp_dep *led;
8949 decl_or_value dv = dv_from_rtx (val);
8950
8951 slot = changed_variables->find_slot_with_hash (dv, dv_htab_hash (dv),
8952 NO_INSERT);
8953 if (!slot)
8954 slot = htab->find_slot_with_hash (dv, dv_htab_hash (dv), NO_INSERT);
8955 if (!slot)
8956 slot = dropped_values->find_slot_with_hash (dv, dv_htab_hash (dv),
8957 NO_INSERT);
8958 var = *slot;
8959
8960 while ((led = VAR_LOC_DEP_LST (var)))
8961 {
8962 decl_or_value ldv = led->dv;
8963 variable *ivar;
8964
8965 /* Deactivate and remove the backlink, as it was “used up”. It
8966 makes no sense to attempt to notify the same entity again:
8967 either it will be recomputed and re-register an active
8968 dependency, or it will still have the changed mark. */
8969 if (led->next)
8970 led->next->pprev = led->pprev;
8971 if (led->pprev)
8972 *led->pprev = led->next;
8973 led->next = NULL;
8974 led->pprev = NULL;
8975
8976 if (dv_changed_p (ldv))
8977 continue;
8978
8979 switch (dv_onepart_p (ldv))
8980 {
8981 case ONEPART_VALUE:
8982 case ONEPART_DEXPR:
8983 set_dv_changed (ldv, true);
8984 changed_values_stack->safe_push (dv_as_rtx (ldv));
8985 break;
8986
8987 case ONEPART_VDECL:
8988 ivar = htab->find_with_hash (ldv, dv_htab_hash (ldv));
8989 gcc_checking_assert (!VAR_LOC_DEP_LST (ivar));
8990 variable_was_changed (ivar, NULL);
8991 break;
8992
8993 case NOT_ONEPART:
8994 delete led;
8995 ivar = htab->find_with_hash (ldv, dv_htab_hash (ldv));
8996 if (ivar)
8997 {
8998 int i = ivar->n_var_parts;
8999 while (i--)
9000 {
9001 rtx loc = ivar->var_part[i].cur_loc;
9002
9003 if (loc && GET_CODE (loc) == MEM
9004 && XEXP (loc, 0) == val)
9005 {
9006 variable_was_changed (ivar, NULL);
9007 break;
9008 }
9009 }
9010 }
9011 break;
9012
9013 default:
9014 gcc_unreachable ();
9015 }
9016 }
9017 }
9018
9019 /* Take out of changed_variables any entries that don't refer to use
9020 variables. Back-propagate change notifications from values and
9021 debug_exprs to their active dependencies in HTAB or in
9022 CHANGED_VARIABLES. */
9023
9024 static void
process_changed_values(variable_table_type * htab)9025 process_changed_values (variable_table_type *htab)
9026 {
9027 int i, n;
9028 rtx val;
9029 auto_vec<rtx, 20> changed_values_stack;
9030
9031 /* Move values from changed_variables to changed_values_stack. */
9032 changed_variables
9033 ->traverse <vec<rtx, va_heap>*, var_track_values_to_stack>
9034 (&changed_values_stack);
9035
9036 /* Back-propagate change notifications in values while popping
9037 them from the stack. */
9038 for (n = i = changed_values_stack.length ();
9039 i > 0; i = changed_values_stack.length ())
9040 {
9041 val = changed_values_stack.pop ();
9042 notify_dependents_of_changed_value (val, htab, &changed_values_stack);
9043
9044 /* This condition will hold when visiting each of the entries
9045 originally in changed_variables. We can't remove them
9046 earlier because this could drop the backlinks before we got a
9047 chance to use them. */
9048 if (i == n)
9049 {
9050 remove_value_from_changed_variables (val);
9051 n--;
9052 }
9053 }
9054 }
9055
9056 /* Emit NOTE_INSN_VAR_LOCATION note for each variable from a chain
9057 CHANGED_VARIABLES and delete this chain. WHERE specifies whether
9058 the notes shall be emitted before of after instruction INSN. */
9059
9060 static void
emit_notes_for_changes(rtx_insn * insn,enum emit_note_where where,shared_hash * vars)9061 emit_notes_for_changes (rtx_insn *insn, enum emit_note_where where,
9062 shared_hash *vars)
9063 {
9064 emit_note_data data;
9065 variable_table_type *htab = shared_hash_htab (vars);
9066
9067 if (!changed_variables->elements ())
9068 return;
9069
9070 if (MAY_HAVE_DEBUG_BIND_INSNS)
9071 process_changed_values (htab);
9072
9073 data.insn = insn;
9074 data.where = where;
9075 data.vars = htab;
9076
9077 changed_variables
9078 ->traverse <emit_note_data*, emit_note_insn_var_location> (&data);
9079 }
9080
9081 /* Add variable *SLOT to the chain CHANGED_VARIABLES if it differs from the
9082 same variable in hash table DATA or is not there at all. */
9083
9084 int
emit_notes_for_differences_1(variable ** slot,variable_table_type * new_vars)9085 emit_notes_for_differences_1 (variable **slot, variable_table_type *new_vars)
9086 {
9087 variable *old_var, *new_var;
9088
9089 old_var = *slot;
9090 new_var = new_vars->find_with_hash (old_var->dv, dv_htab_hash (old_var->dv));
9091
9092 if (!new_var)
9093 {
9094 /* Variable has disappeared. */
9095 variable *empty_var = NULL;
9096
9097 if (old_var->onepart == ONEPART_VALUE
9098 || old_var->onepart == ONEPART_DEXPR)
9099 {
9100 empty_var = variable_from_dropped (old_var->dv, NO_INSERT);
9101 if (empty_var)
9102 {
9103 gcc_checking_assert (!empty_var->in_changed_variables);
9104 if (!VAR_LOC_1PAUX (old_var))
9105 {
9106 VAR_LOC_1PAUX (old_var) = VAR_LOC_1PAUX (empty_var);
9107 VAR_LOC_1PAUX (empty_var) = NULL;
9108 }
9109 else
9110 gcc_checking_assert (!VAR_LOC_1PAUX (empty_var));
9111 }
9112 }
9113
9114 if (!empty_var)
9115 {
9116 empty_var = onepart_pool_allocate (old_var->onepart);
9117 empty_var->dv = old_var->dv;
9118 empty_var->refcount = 0;
9119 empty_var->n_var_parts = 0;
9120 empty_var->onepart = old_var->onepart;
9121 empty_var->in_changed_variables = false;
9122 }
9123
9124 if (empty_var->onepart)
9125 {
9126 /* Propagate the auxiliary data to (ultimately)
9127 changed_variables. */
9128 empty_var->var_part[0].loc_chain = NULL;
9129 empty_var->var_part[0].cur_loc = NULL;
9130 VAR_LOC_1PAUX (empty_var) = VAR_LOC_1PAUX (old_var);
9131 VAR_LOC_1PAUX (old_var) = NULL;
9132 }
9133 variable_was_changed (empty_var, NULL);
9134 /* Continue traversing the hash table. */
9135 return 1;
9136 }
9137 /* Update cur_loc and one-part auxiliary data, before new_var goes
9138 through variable_was_changed. */
9139 if (old_var != new_var && new_var->onepart)
9140 {
9141 gcc_checking_assert (VAR_LOC_1PAUX (new_var) == NULL);
9142 VAR_LOC_1PAUX (new_var) = VAR_LOC_1PAUX (old_var);
9143 VAR_LOC_1PAUX (old_var) = NULL;
9144 new_var->var_part[0].cur_loc = old_var->var_part[0].cur_loc;
9145 }
9146 if (variable_different_p (old_var, new_var))
9147 variable_was_changed (new_var, NULL);
9148
9149 /* Continue traversing the hash table. */
9150 return 1;
9151 }
9152
9153 /* Add variable *SLOT to the chain CHANGED_VARIABLES if it is not in hash
9154 table DATA. */
9155
9156 int
emit_notes_for_differences_2(variable ** slot,variable_table_type * old_vars)9157 emit_notes_for_differences_2 (variable **slot, variable_table_type *old_vars)
9158 {
9159 variable *old_var, *new_var;
9160
9161 new_var = *slot;
9162 old_var = old_vars->find_with_hash (new_var->dv, dv_htab_hash (new_var->dv));
9163 if (!old_var)
9164 {
9165 int i;
9166 for (i = 0; i < new_var->n_var_parts; i++)
9167 new_var->var_part[i].cur_loc = NULL;
9168 variable_was_changed (new_var, NULL);
9169 }
9170
9171 /* Continue traversing the hash table. */
9172 return 1;
9173 }
9174
9175 /* Emit notes before INSN for differences between dataflow sets OLD_SET and
9176 NEW_SET. */
9177
9178 static void
emit_notes_for_differences(rtx_insn * insn,dataflow_set * old_set,dataflow_set * new_set)9179 emit_notes_for_differences (rtx_insn *insn, dataflow_set *old_set,
9180 dataflow_set *new_set)
9181 {
9182 shared_hash_htab (old_set->vars)
9183 ->traverse <variable_table_type *, emit_notes_for_differences_1>
9184 (shared_hash_htab (new_set->vars));
9185 shared_hash_htab (new_set->vars)
9186 ->traverse <variable_table_type *, emit_notes_for_differences_2>
9187 (shared_hash_htab (old_set->vars));
9188 emit_notes_for_changes (insn, EMIT_NOTE_BEFORE_INSN, new_set->vars);
9189 }
9190
9191 /* Return the next insn after INSN that is not a NOTE_INSN_VAR_LOCATION. */
9192
9193 static rtx_insn *
next_non_note_insn_var_location(rtx_insn * insn)9194 next_non_note_insn_var_location (rtx_insn *insn)
9195 {
9196 while (insn)
9197 {
9198 insn = NEXT_INSN (insn);
9199 if (insn == 0
9200 || !NOTE_P (insn)
9201 || NOTE_KIND (insn) != NOTE_INSN_VAR_LOCATION)
9202 break;
9203 }
9204
9205 return insn;
9206 }
9207
9208 /* Emit the notes for changes of location parts in the basic block BB. */
9209
9210 static void
emit_notes_in_bb(basic_block bb,dataflow_set * set)9211 emit_notes_in_bb (basic_block bb, dataflow_set *set)
9212 {
9213 unsigned int i;
9214 micro_operation *mo;
9215
9216 dataflow_set_clear (set);
9217 dataflow_set_copy (set, &VTI (bb)->in);
9218
9219 FOR_EACH_VEC_ELT (VTI (bb)->mos, i, mo)
9220 {
9221 rtx_insn *insn = mo->insn;
9222 rtx_insn *next_insn = next_non_note_insn_var_location (insn);
9223
9224 switch (mo->type)
9225 {
9226 case MO_CALL:
9227 dataflow_set_clear_at_call (set, insn);
9228 emit_notes_for_changes (insn, EMIT_NOTE_AFTER_CALL_INSN, set->vars);
9229 {
9230 rtx arguments = mo->u.loc, *p = &arguments;
9231 while (*p)
9232 {
9233 XEXP (XEXP (*p, 0), 1)
9234 = vt_expand_loc (XEXP (XEXP (*p, 0), 1),
9235 shared_hash_htab (set->vars));
9236 /* If expansion is successful, keep it in the list. */
9237 if (XEXP (XEXP (*p, 0), 1))
9238 {
9239 XEXP (XEXP (*p, 0), 1)
9240 = copy_rtx_if_shared (XEXP (XEXP (*p, 0), 1));
9241 p = &XEXP (*p, 1);
9242 }
9243 /* Otherwise, if the following item is data_value for it,
9244 drop it too too. */
9245 else if (XEXP (*p, 1)
9246 && REG_P (XEXP (XEXP (*p, 0), 0))
9247 && MEM_P (XEXP (XEXP (XEXP (*p, 1), 0), 0))
9248 && REG_P (XEXP (XEXP (XEXP (XEXP (*p, 1), 0), 0),
9249 0))
9250 && REGNO (XEXP (XEXP (*p, 0), 0))
9251 == REGNO (XEXP (XEXP (XEXP (XEXP (*p, 1), 0),
9252 0), 0)))
9253 *p = XEXP (XEXP (*p, 1), 1);
9254 /* Just drop this item. */
9255 else
9256 *p = XEXP (*p, 1);
9257 }
9258 add_reg_note (insn, REG_CALL_ARG_LOCATION, arguments);
9259 }
9260 break;
9261
9262 case MO_USE:
9263 {
9264 rtx loc = mo->u.loc;
9265
9266 if (REG_P (loc))
9267 var_reg_set (set, loc, VAR_INIT_STATUS_UNINITIALIZED, NULL);
9268 else
9269 var_mem_set (set, loc, VAR_INIT_STATUS_UNINITIALIZED, NULL);
9270
9271 emit_notes_for_changes (insn, EMIT_NOTE_BEFORE_INSN, set->vars);
9272 }
9273 break;
9274
9275 case MO_VAL_LOC:
9276 {
9277 rtx loc = mo->u.loc;
9278 rtx val, vloc;
9279 tree var;
9280
9281 if (GET_CODE (loc) == CONCAT)
9282 {
9283 val = XEXP (loc, 0);
9284 vloc = XEXP (loc, 1);
9285 }
9286 else
9287 {
9288 val = NULL_RTX;
9289 vloc = loc;
9290 }
9291
9292 var = PAT_VAR_LOCATION_DECL (vloc);
9293
9294 clobber_variable_part (set, NULL_RTX,
9295 dv_from_decl (var), 0, NULL_RTX);
9296 if (val)
9297 {
9298 if (VAL_NEEDS_RESOLUTION (loc))
9299 val_resolve (set, val, PAT_VAR_LOCATION_LOC (vloc), insn);
9300 set_variable_part (set, val, dv_from_decl (var), 0,
9301 VAR_INIT_STATUS_INITIALIZED, NULL_RTX,
9302 INSERT);
9303 }
9304 else if (!VAR_LOC_UNKNOWN_P (PAT_VAR_LOCATION_LOC (vloc)))
9305 set_variable_part (set, PAT_VAR_LOCATION_LOC (vloc),
9306 dv_from_decl (var), 0,
9307 VAR_INIT_STATUS_INITIALIZED, NULL_RTX,
9308 INSERT);
9309
9310 emit_notes_for_changes (insn, EMIT_NOTE_AFTER_INSN, set->vars);
9311 }
9312 break;
9313
9314 case MO_VAL_USE:
9315 {
9316 rtx loc = mo->u.loc;
9317 rtx val, vloc, uloc;
9318
9319 vloc = uloc = XEXP (loc, 1);
9320 val = XEXP (loc, 0);
9321
9322 if (GET_CODE (val) == CONCAT)
9323 {
9324 uloc = XEXP (val, 1);
9325 val = XEXP (val, 0);
9326 }
9327
9328 if (VAL_NEEDS_RESOLUTION (loc))
9329 val_resolve (set, val, vloc, insn);
9330 else
9331 val_store (set, val, uloc, insn, false);
9332
9333 if (VAL_HOLDS_TRACK_EXPR (loc))
9334 {
9335 if (GET_CODE (uloc) == REG)
9336 var_reg_set (set, uloc, VAR_INIT_STATUS_UNINITIALIZED,
9337 NULL);
9338 else if (GET_CODE (uloc) == MEM)
9339 var_mem_set (set, uloc, VAR_INIT_STATUS_UNINITIALIZED,
9340 NULL);
9341 }
9342
9343 emit_notes_for_changes (insn, EMIT_NOTE_BEFORE_INSN, set->vars);
9344 }
9345 break;
9346
9347 case MO_VAL_SET:
9348 {
9349 rtx loc = mo->u.loc;
9350 rtx val, vloc, uloc;
9351 rtx dstv, srcv;
9352
9353 vloc = loc;
9354 uloc = XEXP (vloc, 1);
9355 val = XEXP (vloc, 0);
9356 vloc = uloc;
9357
9358 if (GET_CODE (uloc) == SET)
9359 {
9360 dstv = SET_DEST (uloc);
9361 srcv = SET_SRC (uloc);
9362 }
9363 else
9364 {
9365 dstv = uloc;
9366 srcv = NULL;
9367 }
9368
9369 if (GET_CODE (val) == CONCAT)
9370 {
9371 dstv = vloc = XEXP (val, 1);
9372 val = XEXP (val, 0);
9373 }
9374
9375 if (GET_CODE (vloc) == SET)
9376 {
9377 srcv = SET_SRC (vloc);
9378
9379 gcc_assert (val != srcv);
9380 gcc_assert (vloc == uloc || VAL_NEEDS_RESOLUTION (loc));
9381
9382 dstv = vloc = SET_DEST (vloc);
9383
9384 if (VAL_NEEDS_RESOLUTION (loc))
9385 val_resolve (set, val, srcv, insn);
9386 }
9387 else if (VAL_NEEDS_RESOLUTION (loc))
9388 {
9389 gcc_assert (GET_CODE (uloc) == SET
9390 && GET_CODE (SET_SRC (uloc)) == REG);
9391 val_resolve (set, val, SET_SRC (uloc), insn);
9392 }
9393
9394 if (VAL_HOLDS_TRACK_EXPR (loc))
9395 {
9396 if (VAL_EXPR_IS_CLOBBERED (loc))
9397 {
9398 if (REG_P (uloc))
9399 var_reg_delete (set, uloc, true);
9400 else if (MEM_P (uloc))
9401 {
9402 gcc_assert (MEM_P (dstv));
9403 gcc_assert (MEM_ATTRS (dstv) == MEM_ATTRS (uloc));
9404 var_mem_delete (set, dstv, true);
9405 }
9406 }
9407 else
9408 {
9409 bool copied_p = VAL_EXPR_IS_COPIED (loc);
9410 rtx src = NULL, dst = uloc;
9411 enum var_init_status status = VAR_INIT_STATUS_INITIALIZED;
9412
9413 if (GET_CODE (uloc) == SET)
9414 {
9415 src = SET_SRC (uloc);
9416 dst = SET_DEST (uloc);
9417 }
9418
9419 if (copied_p)
9420 {
9421 status = find_src_status (set, src);
9422
9423 src = find_src_set_src (set, src);
9424 }
9425
9426 if (REG_P (dst))
9427 var_reg_delete_and_set (set, dst, !copied_p,
9428 status, srcv);
9429 else if (MEM_P (dst))
9430 {
9431 gcc_assert (MEM_P (dstv));
9432 gcc_assert (MEM_ATTRS (dstv) == MEM_ATTRS (dst));
9433 var_mem_delete_and_set (set, dstv, !copied_p,
9434 status, srcv);
9435 }
9436 }
9437 }
9438 else if (REG_P (uloc))
9439 var_regno_delete (set, REGNO (uloc));
9440 else if (MEM_P (uloc))
9441 {
9442 gcc_checking_assert (GET_CODE (vloc) == MEM);
9443 gcc_checking_assert (vloc == dstv);
9444 if (vloc != dstv)
9445 clobber_overlapping_mems (set, vloc);
9446 }
9447
9448 val_store (set, val, dstv, insn, true);
9449
9450 emit_notes_for_changes (next_insn, EMIT_NOTE_BEFORE_INSN,
9451 set->vars);
9452 }
9453 break;
9454
9455 case MO_SET:
9456 {
9457 rtx loc = mo->u.loc;
9458 rtx set_src = NULL;
9459
9460 if (GET_CODE (loc) == SET)
9461 {
9462 set_src = SET_SRC (loc);
9463 loc = SET_DEST (loc);
9464 }
9465
9466 if (REG_P (loc))
9467 var_reg_delete_and_set (set, loc, true, VAR_INIT_STATUS_INITIALIZED,
9468 set_src);
9469 else
9470 var_mem_delete_and_set (set, loc, true, VAR_INIT_STATUS_INITIALIZED,
9471 set_src);
9472
9473 emit_notes_for_changes (next_insn, EMIT_NOTE_BEFORE_INSN,
9474 set->vars);
9475 }
9476 break;
9477
9478 case MO_COPY:
9479 {
9480 rtx loc = mo->u.loc;
9481 enum var_init_status src_status;
9482 rtx set_src = NULL;
9483
9484 if (GET_CODE (loc) == SET)
9485 {
9486 set_src = SET_SRC (loc);
9487 loc = SET_DEST (loc);
9488 }
9489
9490 src_status = find_src_status (set, set_src);
9491 set_src = find_src_set_src (set, set_src);
9492
9493 if (REG_P (loc))
9494 var_reg_delete_and_set (set, loc, false, src_status, set_src);
9495 else
9496 var_mem_delete_and_set (set, loc, false, src_status, set_src);
9497
9498 emit_notes_for_changes (next_insn, EMIT_NOTE_BEFORE_INSN,
9499 set->vars);
9500 }
9501 break;
9502
9503 case MO_USE_NO_VAR:
9504 {
9505 rtx loc = mo->u.loc;
9506
9507 if (REG_P (loc))
9508 var_reg_delete (set, loc, false);
9509 else
9510 var_mem_delete (set, loc, false);
9511
9512 emit_notes_for_changes (insn, EMIT_NOTE_AFTER_INSN, set->vars);
9513 }
9514 break;
9515
9516 case MO_CLOBBER:
9517 {
9518 rtx loc = mo->u.loc;
9519
9520 if (REG_P (loc))
9521 var_reg_delete (set, loc, true);
9522 else
9523 var_mem_delete (set, loc, true);
9524
9525 emit_notes_for_changes (next_insn, EMIT_NOTE_BEFORE_INSN,
9526 set->vars);
9527 }
9528 break;
9529
9530 case MO_ADJUST:
9531 set->stack_adjust += mo->u.adjust;
9532 break;
9533 }
9534 }
9535 }
9536
9537 /* Emit notes for the whole function. */
9538
9539 static void
vt_emit_notes(void)9540 vt_emit_notes (void)
9541 {
9542 basic_block bb;
9543 dataflow_set cur;
9544
9545 gcc_assert (!changed_variables->elements ());
9546
9547 /* Free memory occupied by the out hash tables, as they aren't used
9548 anymore. */
9549 FOR_EACH_BB_FN (bb, cfun)
9550 dataflow_set_clear (&VTI (bb)->out);
9551
9552 /* Enable emitting notes by functions (mainly by set_variable_part and
9553 delete_variable_part). */
9554 emit_notes = true;
9555
9556 if (MAY_HAVE_DEBUG_BIND_INSNS)
9557 dropped_values = new variable_table_type (cselib_get_next_uid () * 2);
9558
9559 dataflow_set_init (&cur);
9560
9561 FOR_EACH_BB_FN (bb, cfun)
9562 {
9563 /* Emit the notes for changes of variable locations between two
9564 subsequent basic blocks. */
9565 emit_notes_for_differences (BB_HEAD (bb), &cur, &VTI (bb)->in);
9566
9567 if (MAY_HAVE_DEBUG_BIND_INSNS)
9568 local_get_addr_cache = new hash_map<rtx, rtx>;
9569
9570 /* Emit the notes for the changes in the basic block itself. */
9571 emit_notes_in_bb (bb, &cur);
9572
9573 if (MAY_HAVE_DEBUG_BIND_INSNS)
9574 delete local_get_addr_cache;
9575 local_get_addr_cache = NULL;
9576
9577 /* Free memory occupied by the in hash table, we won't need it
9578 again. */
9579 dataflow_set_clear (&VTI (bb)->in);
9580 }
9581
9582 if (flag_checking)
9583 shared_hash_htab (cur.vars)
9584 ->traverse <variable_table_type *, emit_notes_for_differences_1>
9585 (shared_hash_htab (empty_shared_hash));
9586
9587 dataflow_set_destroy (&cur);
9588
9589 if (MAY_HAVE_DEBUG_BIND_INSNS)
9590 delete dropped_values;
9591 dropped_values = NULL;
9592
9593 emit_notes = false;
9594 }
9595
9596 /* If there is a declaration and offset associated with register/memory RTL
9597 assign declaration to *DECLP and offset to *OFFSETP, and return true. */
9598
9599 static bool
vt_get_decl_and_offset(rtx rtl,tree * declp,poly_int64 * offsetp)9600 vt_get_decl_and_offset (rtx rtl, tree *declp, poly_int64 *offsetp)
9601 {
9602 if (REG_P (rtl))
9603 {
9604 if (REG_ATTRS (rtl))
9605 {
9606 *declp = REG_EXPR (rtl);
9607 *offsetp = REG_OFFSET (rtl);
9608 return true;
9609 }
9610 }
9611 else if (GET_CODE (rtl) == PARALLEL)
9612 {
9613 tree decl = NULL_TREE;
9614 HOST_WIDE_INT offset = MAX_VAR_PARTS;
9615 int len = XVECLEN (rtl, 0), i;
9616
9617 for (i = 0; i < len; i++)
9618 {
9619 rtx reg = XEXP (XVECEXP (rtl, 0, i), 0);
9620 if (!REG_P (reg) || !REG_ATTRS (reg))
9621 break;
9622 if (!decl)
9623 decl = REG_EXPR (reg);
9624 if (REG_EXPR (reg) != decl)
9625 break;
9626 HOST_WIDE_INT this_offset;
9627 if (!track_offset_p (REG_OFFSET (reg), &this_offset))
9628 break;
9629 offset = MIN (offset, this_offset);
9630 }
9631
9632 if (i == len)
9633 {
9634 *declp = decl;
9635 *offsetp = offset;
9636 return true;
9637 }
9638 }
9639 else if (MEM_P (rtl))
9640 {
9641 if (MEM_ATTRS (rtl))
9642 {
9643 *declp = MEM_EXPR (rtl);
9644 *offsetp = int_mem_offset (rtl);
9645 return true;
9646 }
9647 }
9648 return false;
9649 }
9650
9651 /* Record the value for the ENTRY_VALUE of RTL as a global equivalence
9652 of VAL. */
9653
9654 static void
record_entry_value(cselib_val * val,rtx rtl)9655 record_entry_value (cselib_val *val, rtx rtl)
9656 {
9657 rtx ev = gen_rtx_ENTRY_VALUE (GET_MODE (rtl));
9658
9659 ENTRY_VALUE_EXP (ev) = rtl;
9660
9661 cselib_add_permanent_equiv (val, ev, get_insns ());
9662 }
9663
9664 /* Insert function parameter PARM in IN and OUT sets of ENTRY_BLOCK. */
9665
9666 static void
vt_add_function_parameter(tree parm)9667 vt_add_function_parameter (tree parm)
9668 {
9669 rtx decl_rtl = DECL_RTL_IF_SET (parm);
9670 rtx incoming = DECL_INCOMING_RTL (parm);
9671 tree decl;
9672 machine_mode mode;
9673 poly_int64 offset;
9674 dataflow_set *out;
9675 decl_or_value dv;
9676 bool incoming_ok = true;
9677
9678 if (TREE_CODE (parm) != PARM_DECL)
9679 return;
9680
9681 if (!decl_rtl || !incoming)
9682 return;
9683
9684 if (GET_MODE (decl_rtl) == BLKmode || GET_MODE (incoming) == BLKmode)
9685 return;
9686
9687 /* If there is a DRAP register or a pseudo in internal_arg_pointer,
9688 rewrite the incoming location of parameters passed on the stack
9689 into MEMs based on the argument pointer, so that incoming doesn't
9690 depend on a pseudo. */
9691 if (MEM_P (incoming)
9692 && (XEXP (incoming, 0) == crtl->args.internal_arg_pointer
9693 || (GET_CODE (XEXP (incoming, 0)) == PLUS
9694 && XEXP (XEXP (incoming, 0), 0)
9695 == crtl->args.internal_arg_pointer
9696 && CONST_INT_P (XEXP (XEXP (incoming, 0), 1)))))
9697 {
9698 HOST_WIDE_INT off = -FIRST_PARM_OFFSET (current_function_decl);
9699 if (GET_CODE (XEXP (incoming, 0)) == PLUS)
9700 off += INTVAL (XEXP (XEXP (incoming, 0), 1));
9701 incoming
9702 = replace_equiv_address_nv (incoming,
9703 plus_constant (Pmode,
9704 arg_pointer_rtx, off));
9705 }
9706
9707 #ifdef HAVE_window_save
9708 /* DECL_INCOMING_RTL uses the INCOMING_REGNO of parameter registers.
9709 If the target machine has an explicit window save instruction, the
9710 actual entry value is the corresponding OUTGOING_REGNO instead. */
9711 if (HAVE_window_save && !crtl->uses_only_leaf_regs)
9712 {
9713 if (REG_P (incoming)
9714 && HARD_REGISTER_P (incoming)
9715 && OUTGOING_REGNO (REGNO (incoming)) != REGNO (incoming))
9716 {
9717 parm_reg p;
9718 p.incoming = incoming;
9719 incoming
9720 = gen_rtx_REG_offset (incoming, GET_MODE (incoming),
9721 OUTGOING_REGNO (REGNO (incoming)), 0);
9722 p.outgoing = incoming;
9723 vec_safe_push (windowed_parm_regs, p);
9724 }
9725 else if (GET_CODE (incoming) == PARALLEL)
9726 {
9727 rtx outgoing
9728 = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (XVECLEN (incoming, 0)));
9729 int i;
9730
9731 for (i = 0; i < XVECLEN (incoming, 0); i++)
9732 {
9733 rtx reg = XEXP (XVECEXP (incoming, 0, i), 0);
9734 parm_reg p;
9735 p.incoming = reg;
9736 reg = gen_rtx_REG_offset (reg, GET_MODE (reg),
9737 OUTGOING_REGNO (REGNO (reg)), 0);
9738 p.outgoing = reg;
9739 XVECEXP (outgoing, 0, i)
9740 = gen_rtx_EXPR_LIST (VOIDmode, reg,
9741 XEXP (XVECEXP (incoming, 0, i), 1));
9742 vec_safe_push (windowed_parm_regs, p);
9743 }
9744
9745 incoming = outgoing;
9746 }
9747 else if (MEM_P (incoming)
9748 && REG_P (XEXP (incoming, 0))
9749 && HARD_REGISTER_P (XEXP (incoming, 0)))
9750 {
9751 rtx reg = XEXP (incoming, 0);
9752 if (OUTGOING_REGNO (REGNO (reg)) != REGNO (reg))
9753 {
9754 parm_reg p;
9755 p.incoming = reg;
9756 reg = gen_raw_REG (GET_MODE (reg), OUTGOING_REGNO (REGNO (reg)));
9757 p.outgoing = reg;
9758 vec_safe_push (windowed_parm_regs, p);
9759 incoming = replace_equiv_address_nv (incoming, reg);
9760 }
9761 }
9762 }
9763 #endif
9764
9765 if (!vt_get_decl_and_offset (incoming, &decl, &offset))
9766 {
9767 incoming_ok = false;
9768 if (MEM_P (incoming))
9769 {
9770 /* This means argument is passed by invisible reference. */
9771 offset = 0;
9772 decl = parm;
9773 }
9774 else
9775 {
9776 if (!vt_get_decl_and_offset (decl_rtl, &decl, &offset))
9777 return;
9778 offset += byte_lowpart_offset (GET_MODE (incoming),
9779 GET_MODE (decl_rtl));
9780 }
9781 }
9782
9783 if (!decl)
9784 return;
9785
9786 if (parm != decl)
9787 {
9788 /* If that DECL_RTL wasn't a pseudo that got spilled to
9789 memory, bail out. Otherwise, the spill slot sharing code
9790 will force the memory to reference spill_slot_decl (%sfp),
9791 so we don't match above. That's ok, the pseudo must have
9792 referenced the entire parameter, so just reset OFFSET. */
9793 if (decl != get_spill_slot_decl (false))
9794 return;
9795 offset = 0;
9796 }
9797
9798 HOST_WIDE_INT const_offset;
9799 if (!track_loc_p (incoming, parm, offset, false, &mode, &const_offset))
9800 return;
9801
9802 out = &VTI (ENTRY_BLOCK_PTR_FOR_FN (cfun))->out;
9803
9804 dv = dv_from_decl (parm);
9805
9806 if (target_for_debug_bind (parm)
9807 /* We can't deal with these right now, because this kind of
9808 variable is single-part. ??? We could handle parallels
9809 that describe multiple locations for the same single
9810 value, but ATM we don't. */
9811 && GET_CODE (incoming) != PARALLEL)
9812 {
9813 cselib_val *val;
9814 rtx lowpart;
9815
9816 /* ??? We shouldn't ever hit this, but it may happen because
9817 arguments passed by invisible reference aren't dealt with
9818 above: incoming-rtl will have Pmode rather than the
9819 expected mode for the type. */
9820 if (const_offset)
9821 return;
9822
9823 lowpart = var_lowpart (mode, incoming);
9824 if (!lowpart)
9825 return;
9826
9827 val = cselib_lookup_from_insn (lowpart, mode, true,
9828 VOIDmode, get_insns ());
9829
9830 /* ??? Float-typed values in memory are not handled by
9831 cselib. */
9832 if (val)
9833 {
9834 preserve_value (val);
9835 set_variable_part (out, val->val_rtx, dv, const_offset,
9836 VAR_INIT_STATUS_INITIALIZED, NULL, INSERT);
9837 dv = dv_from_value (val->val_rtx);
9838 }
9839
9840 if (MEM_P (incoming))
9841 {
9842 val = cselib_lookup_from_insn (XEXP (incoming, 0), mode, true,
9843 VOIDmode, get_insns ());
9844 if (val)
9845 {
9846 preserve_value (val);
9847 incoming = replace_equiv_address_nv (incoming, val->val_rtx);
9848 }
9849 }
9850 }
9851
9852 if (REG_P (incoming))
9853 {
9854 incoming = var_lowpart (mode, incoming);
9855 gcc_assert (REGNO (incoming) < FIRST_PSEUDO_REGISTER);
9856 attrs_list_insert (&out->regs[REGNO (incoming)], dv, const_offset,
9857 incoming);
9858 set_variable_part (out, incoming, dv, const_offset,
9859 VAR_INIT_STATUS_INITIALIZED, NULL, INSERT);
9860 if (dv_is_value_p (dv))
9861 {
9862 record_entry_value (CSELIB_VAL_PTR (dv_as_value (dv)), incoming);
9863 if (TREE_CODE (TREE_TYPE (parm)) == REFERENCE_TYPE
9864 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_TYPE (parm))))
9865 {
9866 machine_mode indmode
9867 = TYPE_MODE (TREE_TYPE (TREE_TYPE (parm)));
9868 rtx mem = gen_rtx_MEM (indmode, incoming);
9869 cselib_val *val = cselib_lookup_from_insn (mem, indmode, true,
9870 VOIDmode,
9871 get_insns ());
9872 if (val)
9873 {
9874 preserve_value (val);
9875 record_entry_value (val, mem);
9876 set_variable_part (out, mem, dv_from_value (val->val_rtx), 0,
9877 VAR_INIT_STATUS_INITIALIZED, NULL, INSERT);
9878 }
9879 }
9880 }
9881 }
9882 else if (GET_CODE (incoming) == PARALLEL && !dv_onepart_p (dv))
9883 {
9884 int i;
9885
9886 /* The following code relies on vt_get_decl_and_offset returning true for
9887 incoming, which might not be always the case. */
9888 if (!incoming_ok)
9889 return;
9890 for (i = 0; i < XVECLEN (incoming, 0); i++)
9891 {
9892 rtx reg = XEXP (XVECEXP (incoming, 0, i), 0);
9893 /* vt_get_decl_and_offset has already checked that the offset
9894 is a valid variable part. */
9895 const_offset = get_tracked_reg_offset (reg);
9896 gcc_assert (REGNO (reg) < FIRST_PSEUDO_REGISTER);
9897 attrs_list_insert (&out->regs[REGNO (reg)], dv, const_offset, reg);
9898 set_variable_part (out, reg, dv, const_offset,
9899 VAR_INIT_STATUS_INITIALIZED, NULL, INSERT);
9900 }
9901 }
9902 else if (MEM_P (incoming))
9903 {
9904 incoming = var_lowpart (mode, incoming);
9905 set_variable_part (out, incoming, dv, const_offset,
9906 VAR_INIT_STATUS_INITIALIZED, NULL, INSERT);
9907 }
9908 }
9909
9910 /* Insert function parameters to IN and OUT sets of ENTRY_BLOCK. */
9911
9912 static void
vt_add_function_parameters(void)9913 vt_add_function_parameters (void)
9914 {
9915 tree parm;
9916
9917 for (parm = DECL_ARGUMENTS (current_function_decl);
9918 parm; parm = DECL_CHAIN (parm))
9919 if (!POINTER_BOUNDS_P (parm))
9920 vt_add_function_parameter (parm);
9921
9922 if (DECL_HAS_VALUE_EXPR_P (DECL_RESULT (current_function_decl)))
9923 {
9924 tree vexpr = DECL_VALUE_EXPR (DECL_RESULT (current_function_decl));
9925
9926 if (TREE_CODE (vexpr) == INDIRECT_REF)
9927 vexpr = TREE_OPERAND (vexpr, 0);
9928
9929 if (TREE_CODE (vexpr) == PARM_DECL
9930 && DECL_ARTIFICIAL (vexpr)
9931 && !DECL_IGNORED_P (vexpr)
9932 && DECL_NAMELESS (vexpr))
9933 vt_add_function_parameter (vexpr);
9934 }
9935 }
9936
9937 /* Initialize cfa_base_rtx, create a preserved VALUE for it and
9938 ensure it isn't flushed during cselib_reset_table.
9939 Can be called only if frame_pointer_rtx resp. arg_pointer_rtx
9940 has been eliminated. */
9941
9942 static void
vt_init_cfa_base(void)9943 vt_init_cfa_base (void)
9944 {
9945 cselib_val *val;
9946
9947 #ifdef FRAME_POINTER_CFA_OFFSET
9948 cfa_base_rtx = frame_pointer_rtx;
9949 cfa_base_offset = -FRAME_POINTER_CFA_OFFSET (current_function_decl);
9950 #else
9951 cfa_base_rtx = arg_pointer_rtx;
9952 cfa_base_offset = -ARG_POINTER_CFA_OFFSET (current_function_decl);
9953 #endif
9954 if (cfa_base_rtx == hard_frame_pointer_rtx
9955 || !fixed_regs[REGNO (cfa_base_rtx)])
9956 {
9957 cfa_base_rtx = NULL_RTX;
9958 return;
9959 }
9960 if (!MAY_HAVE_DEBUG_BIND_INSNS)
9961 return;
9962
9963 /* Tell alias analysis that cfa_base_rtx should share
9964 find_base_term value with stack pointer or hard frame pointer. */
9965 if (!frame_pointer_needed)
9966 vt_equate_reg_base_value (cfa_base_rtx, stack_pointer_rtx);
9967 else if (!crtl->stack_realign_tried)
9968 vt_equate_reg_base_value (cfa_base_rtx, hard_frame_pointer_rtx);
9969
9970 val = cselib_lookup_from_insn (cfa_base_rtx, GET_MODE (cfa_base_rtx), 1,
9971 VOIDmode, get_insns ());
9972 preserve_value (val);
9973 cselib_preserve_cfa_base_value (val, REGNO (cfa_base_rtx));
9974 }
9975
9976 /* Reemit INSN, a MARKER_DEBUG_INSN, as a note. */
9977
9978 static rtx_insn *
reemit_marker_as_note(rtx_insn * insn)9979 reemit_marker_as_note (rtx_insn *insn)
9980 {
9981 gcc_checking_assert (DEBUG_MARKER_INSN_P (insn));
9982
9983 enum insn_note kind = INSN_DEBUG_MARKER_KIND (insn);
9984
9985 switch (kind)
9986 {
9987 case NOTE_INSN_BEGIN_STMT:
9988 case NOTE_INSN_INLINE_ENTRY:
9989 {
9990 rtx_insn *note = NULL;
9991 if (cfun->debug_nonbind_markers)
9992 {
9993 note = emit_note_before (kind, insn);
9994 NOTE_MARKER_LOCATION (note) = INSN_LOCATION (insn);
9995 }
9996 delete_insn (insn);
9997 return note;
9998 }
9999
10000 default:
10001 gcc_unreachable ();
10002 }
10003 }
10004
10005 /* Allocate and initialize the data structures for variable tracking
10006 and parse the RTL to get the micro operations. */
10007
10008 static bool
vt_initialize(void)10009 vt_initialize (void)
10010 {
10011 basic_block bb;
10012 HOST_WIDE_INT fp_cfa_offset = -1;
10013
10014 alloc_aux_for_blocks (sizeof (variable_tracking_info));
10015
10016 empty_shared_hash = shared_hash_pool.allocate ();
10017 empty_shared_hash->refcount = 1;
10018 empty_shared_hash->htab = new variable_table_type (1);
10019 changed_variables = new variable_table_type (10);
10020
10021 /* Init the IN and OUT sets. */
10022 FOR_ALL_BB_FN (bb, cfun)
10023 {
10024 VTI (bb)->visited = false;
10025 VTI (bb)->flooded = false;
10026 dataflow_set_init (&VTI (bb)->in);
10027 dataflow_set_init (&VTI (bb)->out);
10028 VTI (bb)->permp = NULL;
10029 }
10030
10031 if (MAY_HAVE_DEBUG_BIND_INSNS)
10032 {
10033 cselib_init (CSELIB_RECORD_MEMORY | CSELIB_PRESERVE_CONSTANTS);
10034 scratch_regs = BITMAP_ALLOC (NULL);
10035 preserved_values.create (256);
10036 global_get_addr_cache = new hash_map<rtx, rtx>;
10037 }
10038 else
10039 {
10040 scratch_regs = NULL;
10041 global_get_addr_cache = NULL;
10042 }
10043
10044 if (MAY_HAVE_DEBUG_BIND_INSNS)
10045 {
10046 rtx reg, expr;
10047 int ofst;
10048 cselib_val *val;
10049
10050 #ifdef FRAME_POINTER_CFA_OFFSET
10051 reg = frame_pointer_rtx;
10052 ofst = FRAME_POINTER_CFA_OFFSET (current_function_decl);
10053 #else
10054 reg = arg_pointer_rtx;
10055 ofst = ARG_POINTER_CFA_OFFSET (current_function_decl);
10056 #endif
10057
10058 ofst -= INCOMING_FRAME_SP_OFFSET;
10059
10060 val = cselib_lookup_from_insn (reg, GET_MODE (reg), 1,
10061 VOIDmode, get_insns ());
10062 preserve_value (val);
10063 if (reg != hard_frame_pointer_rtx && fixed_regs[REGNO (reg)])
10064 cselib_preserve_cfa_base_value (val, REGNO (reg));
10065 expr = plus_constant (GET_MODE (stack_pointer_rtx),
10066 stack_pointer_rtx, -ofst);
10067 cselib_add_permanent_equiv (val, expr, get_insns ());
10068
10069 if (ofst)
10070 {
10071 val = cselib_lookup_from_insn (stack_pointer_rtx,
10072 GET_MODE (stack_pointer_rtx), 1,
10073 VOIDmode, get_insns ());
10074 preserve_value (val);
10075 expr = plus_constant (GET_MODE (reg), reg, ofst);
10076 cselib_add_permanent_equiv (val, expr, get_insns ());
10077 }
10078 }
10079
10080 /* In order to factor out the adjustments made to the stack pointer or to
10081 the hard frame pointer and thus be able to use DW_OP_fbreg operations
10082 instead of individual location lists, we're going to rewrite MEMs based
10083 on them into MEMs based on the CFA by de-eliminating stack_pointer_rtx
10084 or hard_frame_pointer_rtx to the virtual CFA pointer frame_pointer_rtx
10085 resp. arg_pointer_rtx. We can do this either when there is no frame
10086 pointer in the function and stack adjustments are consistent for all
10087 basic blocks or when there is a frame pointer and no stack realignment.
10088 But we first have to check that frame_pointer_rtx resp. arg_pointer_rtx
10089 has been eliminated. */
10090 if (!frame_pointer_needed)
10091 {
10092 rtx reg, elim;
10093
10094 if (!vt_stack_adjustments ())
10095 return false;
10096
10097 #ifdef FRAME_POINTER_CFA_OFFSET
10098 reg = frame_pointer_rtx;
10099 #else
10100 reg = arg_pointer_rtx;
10101 #endif
10102 elim = eliminate_regs (reg, VOIDmode, NULL_RTX);
10103 if (elim != reg)
10104 {
10105 if (GET_CODE (elim) == PLUS)
10106 elim = XEXP (elim, 0);
10107 if (elim == stack_pointer_rtx)
10108 vt_init_cfa_base ();
10109 }
10110 }
10111 else if (!crtl->stack_realign_tried)
10112 {
10113 rtx reg, elim;
10114
10115 #ifdef FRAME_POINTER_CFA_OFFSET
10116 reg = frame_pointer_rtx;
10117 fp_cfa_offset = FRAME_POINTER_CFA_OFFSET (current_function_decl);
10118 #else
10119 reg = arg_pointer_rtx;
10120 fp_cfa_offset = ARG_POINTER_CFA_OFFSET (current_function_decl);
10121 #endif
10122 elim = eliminate_regs (reg, VOIDmode, NULL_RTX);
10123 if (elim != reg)
10124 {
10125 if (GET_CODE (elim) == PLUS)
10126 {
10127 fp_cfa_offset -= INTVAL (XEXP (elim, 1));
10128 elim = XEXP (elim, 0);
10129 }
10130 if (elim != hard_frame_pointer_rtx)
10131 fp_cfa_offset = -1;
10132 }
10133 else
10134 fp_cfa_offset = -1;
10135 }
10136
10137 /* If the stack is realigned and a DRAP register is used, we're going to
10138 rewrite MEMs based on it representing incoming locations of parameters
10139 passed on the stack into MEMs based on the argument pointer. Although
10140 we aren't going to rewrite other MEMs, we still need to initialize the
10141 virtual CFA pointer in order to ensure that the argument pointer will
10142 be seen as a constant throughout the function.
10143
10144 ??? This doesn't work if FRAME_POINTER_CFA_OFFSET is defined. */
10145 else if (stack_realign_drap)
10146 {
10147 rtx reg, elim;
10148
10149 #ifdef FRAME_POINTER_CFA_OFFSET
10150 reg = frame_pointer_rtx;
10151 #else
10152 reg = arg_pointer_rtx;
10153 #endif
10154 elim = eliminate_regs (reg, VOIDmode, NULL_RTX);
10155 if (elim != reg)
10156 {
10157 if (GET_CODE (elim) == PLUS)
10158 elim = XEXP (elim, 0);
10159 if (elim == hard_frame_pointer_rtx)
10160 vt_init_cfa_base ();
10161 }
10162 }
10163
10164 hard_frame_pointer_adjustment = -1;
10165
10166 vt_add_function_parameters ();
10167
10168 FOR_EACH_BB_FN (bb, cfun)
10169 {
10170 rtx_insn *insn;
10171 HOST_WIDE_INT pre, post = 0;
10172 basic_block first_bb, last_bb;
10173
10174 if (MAY_HAVE_DEBUG_BIND_INSNS)
10175 {
10176 cselib_record_sets_hook = add_with_sets;
10177 if (dump_file && (dump_flags & TDF_DETAILS))
10178 fprintf (dump_file, "first value: %i\n",
10179 cselib_get_next_uid ());
10180 }
10181
10182 first_bb = bb;
10183 for (;;)
10184 {
10185 edge e;
10186 if (bb->next_bb == EXIT_BLOCK_PTR_FOR_FN (cfun)
10187 || ! single_pred_p (bb->next_bb))
10188 break;
10189 e = find_edge (bb, bb->next_bb);
10190 if (! e || (e->flags & EDGE_FALLTHRU) == 0)
10191 break;
10192 bb = bb->next_bb;
10193 }
10194 last_bb = bb;
10195
10196 /* Add the micro-operations to the vector. */
10197 FOR_BB_BETWEEN (bb, first_bb, last_bb->next_bb, next_bb)
10198 {
10199 HOST_WIDE_INT offset = VTI (bb)->out.stack_adjust;
10200 VTI (bb)->out.stack_adjust = VTI (bb)->in.stack_adjust;
10201
10202 rtx_insn *next;
10203 FOR_BB_INSNS_SAFE (bb, insn, next)
10204 {
10205 if (INSN_P (insn))
10206 {
10207 if (!frame_pointer_needed)
10208 {
10209 insn_stack_adjust_offset_pre_post (insn, &pre, &post);
10210 if (pre)
10211 {
10212 micro_operation mo;
10213 mo.type = MO_ADJUST;
10214 mo.u.adjust = pre;
10215 mo.insn = insn;
10216 if (dump_file && (dump_flags & TDF_DETAILS))
10217 log_op_type (PATTERN (insn), bb, insn,
10218 MO_ADJUST, dump_file);
10219 VTI (bb)->mos.safe_push (mo);
10220 VTI (bb)->out.stack_adjust += pre;
10221 }
10222 }
10223
10224 cselib_hook_called = false;
10225 adjust_insn (bb, insn);
10226 if (DEBUG_MARKER_INSN_P (insn))
10227 {
10228 reemit_marker_as_note (insn);
10229 continue;
10230 }
10231
10232 if (MAY_HAVE_DEBUG_BIND_INSNS)
10233 {
10234 if (CALL_P (insn))
10235 prepare_call_arguments (bb, insn);
10236 cselib_process_insn (insn);
10237 if (dump_file && (dump_flags & TDF_DETAILS))
10238 {
10239 print_rtl_single (dump_file, insn);
10240 dump_cselib_table (dump_file);
10241 }
10242 }
10243 if (!cselib_hook_called)
10244 add_with_sets (insn, 0, 0);
10245 cancel_changes (0);
10246
10247 if (!frame_pointer_needed && post)
10248 {
10249 micro_operation mo;
10250 mo.type = MO_ADJUST;
10251 mo.u.adjust = post;
10252 mo.insn = insn;
10253 if (dump_file && (dump_flags & TDF_DETAILS))
10254 log_op_type (PATTERN (insn), bb, insn,
10255 MO_ADJUST, dump_file);
10256 VTI (bb)->mos.safe_push (mo);
10257 VTI (bb)->out.stack_adjust += post;
10258 }
10259
10260 if (fp_cfa_offset != -1
10261 && hard_frame_pointer_adjustment == -1
10262 && fp_setter_insn (insn))
10263 {
10264 vt_init_cfa_base ();
10265 hard_frame_pointer_adjustment = fp_cfa_offset;
10266 /* Disassociate sp from fp now. */
10267 if (MAY_HAVE_DEBUG_BIND_INSNS)
10268 {
10269 cselib_val *v;
10270 cselib_invalidate_rtx (stack_pointer_rtx);
10271 v = cselib_lookup (stack_pointer_rtx, Pmode, 1,
10272 VOIDmode);
10273 if (v && !cselib_preserved_value_p (v))
10274 {
10275 cselib_set_value_sp_based (v);
10276 preserve_value (v);
10277 }
10278 }
10279 }
10280 }
10281 }
10282 gcc_assert (offset == VTI (bb)->out.stack_adjust);
10283 }
10284
10285 bb = last_bb;
10286
10287 if (MAY_HAVE_DEBUG_BIND_INSNS)
10288 {
10289 cselib_preserve_only_values ();
10290 cselib_reset_table (cselib_get_next_uid ());
10291 cselib_record_sets_hook = NULL;
10292 }
10293 }
10294
10295 hard_frame_pointer_adjustment = -1;
10296 VTI (ENTRY_BLOCK_PTR_FOR_FN (cfun))->flooded = true;
10297 cfa_base_rtx = NULL_RTX;
10298 return true;
10299 }
10300
10301 /* This is *not* reset after each function. It gives each
10302 NOTE_INSN_DELETED_DEBUG_LABEL in the entire compilation
10303 a unique label number. */
10304
10305 static int debug_label_num = 1;
10306
10307 /* Remove from the insn stream a single debug insn used for
10308 variable tracking at assignments. */
10309
10310 static inline void
delete_vta_debug_insn(rtx_insn * insn)10311 delete_vta_debug_insn (rtx_insn *insn)
10312 {
10313 if (DEBUG_MARKER_INSN_P (insn))
10314 {
10315 reemit_marker_as_note (insn);
10316 return;
10317 }
10318
10319 tree decl = INSN_VAR_LOCATION_DECL (insn);
10320 if (TREE_CODE (decl) == LABEL_DECL
10321 && DECL_NAME (decl)
10322 && !DECL_RTL_SET_P (decl))
10323 {
10324 PUT_CODE (insn, NOTE);
10325 NOTE_KIND (insn) = NOTE_INSN_DELETED_DEBUG_LABEL;
10326 NOTE_DELETED_LABEL_NAME (insn)
10327 = IDENTIFIER_POINTER (DECL_NAME (decl));
10328 SET_DECL_RTL (decl, insn);
10329 CODE_LABEL_NUMBER (insn) = debug_label_num++;
10330 }
10331 else
10332 delete_insn (insn);
10333 }
10334
10335 /* Remove from the insn stream all debug insns used for variable
10336 tracking at assignments. USE_CFG should be false if the cfg is no
10337 longer usable. */
10338
10339 void
delete_vta_debug_insns(bool use_cfg)10340 delete_vta_debug_insns (bool use_cfg)
10341 {
10342 basic_block bb;
10343 rtx_insn *insn, *next;
10344
10345 if (!MAY_HAVE_DEBUG_INSNS)
10346 return;
10347
10348 if (use_cfg)
10349 FOR_EACH_BB_FN (bb, cfun)
10350 {
10351 FOR_BB_INSNS_SAFE (bb, insn, next)
10352 if (DEBUG_INSN_P (insn))
10353 delete_vta_debug_insn (insn);
10354 }
10355 else
10356 for (insn = get_insns (); insn; insn = next)
10357 {
10358 next = NEXT_INSN (insn);
10359 if (DEBUG_INSN_P (insn))
10360 delete_vta_debug_insn (insn);
10361 }
10362 }
10363
10364 /* Run a fast, BB-local only version of var tracking, to take care of
10365 information that we don't do global analysis on, such that not all
10366 information is lost. If SKIPPED holds, we're skipping the global
10367 pass entirely, so we should try to use information it would have
10368 handled as well.. */
10369
10370 static void
vt_debug_insns_local(bool skipped ATTRIBUTE_UNUSED)10371 vt_debug_insns_local (bool skipped ATTRIBUTE_UNUSED)
10372 {
10373 /* ??? Just skip it all for now. */
10374 delete_vta_debug_insns (true);
10375 }
10376
10377 /* Free the data structures needed for variable tracking. */
10378
10379 static void
vt_finalize(void)10380 vt_finalize (void)
10381 {
10382 basic_block bb;
10383
10384 FOR_EACH_BB_FN (bb, cfun)
10385 {
10386 VTI (bb)->mos.release ();
10387 }
10388
10389 FOR_ALL_BB_FN (bb, cfun)
10390 {
10391 dataflow_set_destroy (&VTI (bb)->in);
10392 dataflow_set_destroy (&VTI (bb)->out);
10393 if (VTI (bb)->permp)
10394 {
10395 dataflow_set_destroy (VTI (bb)->permp);
10396 XDELETE (VTI (bb)->permp);
10397 }
10398 }
10399 free_aux_for_blocks ();
10400 delete empty_shared_hash->htab;
10401 empty_shared_hash->htab = NULL;
10402 delete changed_variables;
10403 changed_variables = NULL;
10404 attrs_pool.release ();
10405 var_pool.release ();
10406 location_chain_pool.release ();
10407 shared_hash_pool.release ();
10408
10409 if (MAY_HAVE_DEBUG_BIND_INSNS)
10410 {
10411 if (global_get_addr_cache)
10412 delete global_get_addr_cache;
10413 global_get_addr_cache = NULL;
10414 loc_exp_dep_pool.release ();
10415 valvar_pool.release ();
10416 preserved_values.release ();
10417 cselib_finish ();
10418 BITMAP_FREE (scratch_regs);
10419 scratch_regs = NULL;
10420 }
10421
10422 #ifdef HAVE_window_save
10423 vec_free (windowed_parm_regs);
10424 #endif
10425
10426 if (vui_vec)
10427 XDELETEVEC (vui_vec);
10428 vui_vec = NULL;
10429 vui_allocated = 0;
10430 }
10431
10432 /* The entry point to variable tracking pass. */
10433
10434 static inline unsigned int
variable_tracking_main_1(void)10435 variable_tracking_main_1 (void)
10436 {
10437 bool success;
10438
10439 /* We won't be called as a separate pass if flag_var_tracking is not
10440 set, but final may call us to turn debug markers into notes. */
10441 if ((!flag_var_tracking && MAY_HAVE_DEBUG_INSNS)
10442 || flag_var_tracking_assignments < 0
10443 /* Var-tracking right now assumes the IR doesn't contain
10444 any pseudos at this point. */
10445 || targetm.no_register_allocation)
10446 {
10447 delete_vta_debug_insns (true);
10448 return 0;
10449 }
10450
10451 if (!flag_var_tracking)
10452 return 0;
10453
10454 if (n_basic_blocks_for_fn (cfun) > 500
10455 && n_edges_for_fn (cfun) / n_basic_blocks_for_fn (cfun) >= 20)
10456 {
10457 vt_debug_insns_local (true);
10458 return 0;
10459 }
10460
10461 mark_dfs_back_edges ();
10462 if (!vt_initialize ())
10463 {
10464 vt_finalize ();
10465 vt_debug_insns_local (true);
10466 return 0;
10467 }
10468
10469 success = vt_find_locations ();
10470
10471 if (!success && flag_var_tracking_assignments > 0)
10472 {
10473 vt_finalize ();
10474
10475 delete_vta_debug_insns (true);
10476
10477 /* This is later restored by our caller. */
10478 flag_var_tracking_assignments = 0;
10479
10480 success = vt_initialize ();
10481 gcc_assert (success);
10482
10483 success = vt_find_locations ();
10484 }
10485
10486 if (!success)
10487 {
10488 vt_finalize ();
10489 vt_debug_insns_local (false);
10490 return 0;
10491 }
10492
10493 if (dump_file && (dump_flags & TDF_DETAILS))
10494 {
10495 dump_dataflow_sets ();
10496 dump_reg_info (dump_file);
10497 dump_flow_info (dump_file, dump_flags);
10498 }
10499
10500 timevar_push (TV_VAR_TRACKING_EMIT);
10501 vt_emit_notes ();
10502 timevar_pop (TV_VAR_TRACKING_EMIT);
10503
10504 vt_finalize ();
10505 vt_debug_insns_local (false);
10506 return 0;
10507 }
10508
10509 unsigned int
variable_tracking_main(void)10510 variable_tracking_main (void)
10511 {
10512 unsigned int ret;
10513 int save = flag_var_tracking_assignments;
10514
10515 ret = variable_tracking_main_1 ();
10516
10517 flag_var_tracking_assignments = save;
10518
10519 return ret;
10520 }
10521
10522 namespace {
10523
10524 const pass_data pass_data_variable_tracking =
10525 {
10526 RTL_PASS, /* type */
10527 "vartrack", /* name */
10528 OPTGROUP_NONE, /* optinfo_flags */
10529 TV_VAR_TRACKING, /* tv_id */
10530 0, /* properties_required */
10531 0, /* properties_provided */
10532 0, /* properties_destroyed */
10533 0, /* todo_flags_start */
10534 0, /* todo_flags_finish */
10535 };
10536
10537 class pass_variable_tracking : public rtl_opt_pass
10538 {
10539 public:
pass_variable_tracking(gcc::context * ctxt)10540 pass_variable_tracking (gcc::context *ctxt)
10541 : rtl_opt_pass (pass_data_variable_tracking, ctxt)
10542 {}
10543
10544 /* opt_pass methods: */
gate(function *)10545 virtual bool gate (function *)
10546 {
10547 return (flag_var_tracking && !targetm.delay_vartrack);
10548 }
10549
execute(function *)10550 virtual unsigned int execute (function *)
10551 {
10552 return variable_tracking_main ();
10553 }
10554
10555 }; // class pass_variable_tracking
10556
10557 } // anon namespace
10558
10559 rtl_opt_pass *
make_pass_variable_tracking(gcc::context * ctxt)10560 make_pass_variable_tracking (gcc::context *ctxt)
10561 {
10562 return new pass_variable_tracking (ctxt);
10563 }
10564