1 //===- llvm/CodeGen/LiveInterval.h - Interval representation ----*- C++ -*-===//
2 //
3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4 // See https://llvm.org/LICENSE.txt for license information.
5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6 //
7 //===----------------------------------------------------------------------===//
8 //
9 // This file implements the LiveRange and LiveInterval classes.  Given some
10 // numbering of each the machine instructions an interval [i, j) is said to be a
11 // live range for register v if there is no instruction with number j' >= j
12 // such that v is live at j' and there is no instruction with number i' < i such
13 // that v is live at i'. In this implementation ranges can have holes,
14 // i.e. a range might look like [1,20), [50,65), [1000,1001).  Each
15 // individual segment is represented as an instance of LiveRange::Segment,
16 // and the whole range is represented as an instance of LiveRange.
17 //
18 //===----------------------------------------------------------------------===//
19 
20 #ifndef LLVM_CODEGEN_LIVEINTERVAL_H
21 #define LLVM_CODEGEN_LIVEINTERVAL_H
22 
23 #include "llvm/ADT/ArrayRef.h"
24 #include "llvm/ADT/IntEqClasses.h"
25 #include "llvm/ADT/STLExtras.h"
26 #include "llvm/ADT/SmallVector.h"
27 #include "llvm/ADT/iterator_range.h"
28 #include "llvm/CodeGen/Register.h"
29 #include "llvm/CodeGen/SlotIndexes.h"
30 #include "llvm/MC/LaneBitmask.h"
31 #include "llvm/Support/Allocator.h"
32 #include "llvm/Support/MathExtras.h"
33 #include <algorithm>
34 #include <cassert>
35 #include <cstddef>
36 #include <functional>
37 #include <memory>
38 #include <set>
39 #include <tuple>
40 #include <utility>
41 
42 namespace llvm {
43 
44   class CoalescerPair;
45   class LiveIntervals;
46   class MachineRegisterInfo;
47   class raw_ostream;
48 
49   /// VNInfo - Value Number Information.
50   /// This class holds information about a machine level values, including
51   /// definition and use points.
52   ///
53   class VNInfo {
54   public:
55     using Allocator = BumpPtrAllocator;
56 
57     /// The ID number of this value.
58     unsigned id;
59 
60     /// The index of the defining instruction.
61     SlotIndex def;
62 
63     /// VNInfo constructor.
64     VNInfo(unsigned i, SlotIndex d) : id(i), def(d) {}
65 
66     /// VNInfo constructor, copies values from orig, except for the value number.
67     VNInfo(unsigned i, const VNInfo &orig) : id(i), def(orig.def) {}
68 
69     /// Copy from the parameter into this VNInfo.
70     void copyFrom(VNInfo &src) {
71       def = src.def;
72     }
73 
74     /// Returns true if this value is defined by a PHI instruction (or was,
75     /// PHI instructions may have been eliminated).
76     /// PHI-defs begin at a block boundary, all other defs begin at register or
77     /// EC slots.
78     bool isPHIDef() const { return def.isBlock(); }
79 
80     /// Returns true if this value is unused.
81     bool isUnused() const { return !def.isValid(); }
82 
83     /// Mark this value as unused.
84     void markUnused() { def = SlotIndex(); }
85   };
86 
87   /// Result of a LiveRange query. This class hides the implementation details
88   /// of live ranges, and it should be used as the primary interface for
89   /// examining live ranges around instructions.
90   class LiveQueryResult {
91     VNInfo *const EarlyVal;
92     VNInfo *const LateVal;
93     const SlotIndex EndPoint;
94     const bool Kill;
95 
96   public:
97     LiveQueryResult(VNInfo *EarlyVal, VNInfo *LateVal, SlotIndex EndPoint,
98                     bool Kill)
99       : EarlyVal(EarlyVal), LateVal(LateVal), EndPoint(EndPoint), Kill(Kill)
100     {}
101 
102     /// Return the value that is live-in to the instruction. This is the value
103     /// that will be read by the instruction's use operands. Return NULL if no
104     /// value is live-in.
105     VNInfo *valueIn() const {
106       return EarlyVal;
107     }
108 
109     /// Return true if the live-in value is killed by this instruction. This
110     /// means that either the live range ends at the instruction, or it changes
111     /// value.
112     bool isKill() const {
113       return Kill;
114     }
115 
116     /// Return true if this instruction has a dead def.
117     bool isDeadDef() const {
118       return EndPoint.isDead();
119     }
120 
121     /// Return the value leaving the instruction, if any. This can be a
122     /// live-through value, or a live def. A dead def returns NULL.
123     VNInfo *valueOut() const {
124       return isDeadDef() ? nullptr : LateVal;
125     }
126 
127     /// Returns the value alive at the end of the instruction, if any. This can
128     /// be a live-through value, a live def or a dead def.
129     VNInfo *valueOutOrDead() const {
130       return LateVal;
131     }
132 
133     /// Return the value defined by this instruction, if any. This includes
134     /// dead defs, it is the value created by the instruction's def operands.
135     VNInfo *valueDefined() const {
136       return EarlyVal == LateVal ? nullptr : LateVal;
137     }
138 
139     /// Return the end point of the last live range segment to interact with
140     /// the instruction, if any.
141     ///
142     /// The end point is an invalid SlotIndex only if the live range doesn't
143     /// intersect the instruction at all.
144     ///
145     /// The end point may be at or past the end of the instruction's basic
146     /// block. That means the value was live out of the block.
147     SlotIndex endPoint() const {
148       return EndPoint;
149     }
150   };
151 
152   /// This class represents the liveness of a register, stack slot, etc.
153   /// It manages an ordered list of Segment objects.
154   /// The Segments are organized in a static single assignment form: At places
155   /// where a new value is defined or different values reach a CFG join a new
156   /// segment with a new value number is used.
157   class LiveRange {
158   public:
159     /// This represents a simple continuous liveness interval for a value.
160     /// The start point is inclusive, the end point exclusive. These intervals
161     /// are rendered as [start,end).
162     struct Segment {
163       SlotIndex start;  // Start point of the interval (inclusive)
164       SlotIndex end;    // End point of the interval (exclusive)
165       VNInfo *valno = nullptr; // identifier for the value contained in this
166                                // segment.
167 
168       Segment() = default;
169 
170       Segment(SlotIndex S, SlotIndex E, VNInfo *V)
171         : start(S), end(E), valno(V) {
172         assert(S < E && "Cannot create empty or backwards segment");
173       }
174 
175       /// Return true if the index is covered by this segment.
176       bool contains(SlotIndex I) const {
177         return start <= I && I < end;
178       }
179 
180       /// Return true if the given interval, [S, E), is covered by this segment.
181       bool containsInterval(SlotIndex S, SlotIndex E) const {
182         assert((S < E) && "Backwards interval?");
183         return (start <= S && S < end) && (start < E && E <= end);
184       }
185 
186       bool operator<(const Segment &Other) const {
187         return std::tie(start, end) < std::tie(Other.start, Other.end);
188       }
189       bool operator==(const Segment &Other) const {
190         return start == Other.start && end == Other.end;
191       }
192 
193       bool operator!=(const Segment &Other) const {
194         return !(*this == Other);
195       }
196 
197       void dump() const;
198     };
199 
200     using Segments = SmallVector<Segment, 2>;
201     using VNInfoList = SmallVector<VNInfo *, 2>;
202 
203     Segments segments;   // the liveness segments
204     VNInfoList valnos;   // value#'s
205 
206     // The segment set is used temporarily to accelerate initial computation
207     // of live ranges of physical registers in computeRegUnitRange.
208     // After that the set is flushed to the segment vector and deleted.
209     using SegmentSet = std::set<Segment>;
210     std::unique_ptr<SegmentSet> segmentSet;
211 
212     using iterator = Segments::iterator;
213     using const_iterator = Segments::const_iterator;
214 
215     iterator begin() { return segments.begin(); }
216     iterator end()   { return segments.end(); }
217 
218     const_iterator begin() const { return segments.begin(); }
219     const_iterator end() const  { return segments.end(); }
220 
221     using vni_iterator = VNInfoList::iterator;
222     using const_vni_iterator = VNInfoList::const_iterator;
223 
224     vni_iterator vni_begin() { return valnos.begin(); }
225     vni_iterator vni_end()   { return valnos.end(); }
226 
227     const_vni_iterator vni_begin() const { return valnos.begin(); }
228     const_vni_iterator vni_end() const   { return valnos.end(); }
229 
230     iterator_range<vni_iterator> vnis() {
231       return make_range(vni_begin(), vni_end());
232     }
233 
234     iterator_range<const_vni_iterator> vnis() const {
235       return make_range(vni_begin(), vni_end());
236     }
237 
238     /// Constructs a new LiveRange object.
239     LiveRange(bool UseSegmentSet = false)
240         : segmentSet(UseSegmentSet ? std::make_unique<SegmentSet>()
241                                    : nullptr) {}
242 
243     /// Constructs a new LiveRange object by copying segments and valnos from
244     /// another LiveRange.
245     LiveRange(const LiveRange &Other, BumpPtrAllocator &Allocator) {
246       assert(Other.segmentSet == nullptr &&
247              "Copying of LiveRanges with active SegmentSets is not supported");
248       assign(Other, Allocator);
249     }
250 
251     /// Copies values numbers and live segments from \p Other into this range.
252     void assign(const LiveRange &Other, BumpPtrAllocator &Allocator) {
253       if (this == &Other)
254         return;
255 
256       assert(Other.segmentSet == nullptr &&
257              "Copying of LiveRanges with active SegmentSets is not supported");
258       // Duplicate valnos.
259       for (const VNInfo *VNI : Other.valnos)
260         createValueCopy(VNI, Allocator);
261       // Now we can copy segments and remap their valnos.
262       for (const Segment &S : Other.segments)
263         segments.push_back(Segment(S.start, S.end, valnos[S.valno->id]));
264     }
265 
266     /// advanceTo - Advance the specified iterator to point to the Segment
267     /// containing the specified position, or end() if the position is past the
268     /// end of the range.  If no Segment contains this position, but the
269     /// position is in a hole, this method returns an iterator pointing to the
270     /// Segment immediately after the hole.
271     iterator advanceTo(iterator I, SlotIndex Pos) {
272       assert(I != end());
273       if (Pos >= endIndex())
274         return end();
275       while (I->end <= Pos) ++I;
276       return I;
277     }
278 
279     const_iterator advanceTo(const_iterator I, SlotIndex Pos) const {
280       assert(I != end());
281       if (Pos >= endIndex())
282         return end();
283       while (I->end <= Pos) ++I;
284       return I;
285     }
286 
287     /// find - Return an iterator pointing to the first segment that ends after
288     /// Pos, or end(). This is the same as advanceTo(begin(), Pos), but faster
289     /// when searching large ranges.
290     ///
291     /// If Pos is contained in a Segment, that segment is returned.
292     /// If Pos is in a hole, the following Segment is returned.
293     /// If Pos is beyond endIndex, end() is returned.
294     iterator find(SlotIndex Pos);
295 
296     const_iterator find(SlotIndex Pos) const {
297       return const_cast<LiveRange*>(this)->find(Pos);
298     }
299 
300     void clear() {
301       valnos.clear();
302       segments.clear();
303     }
304 
305     size_t size() const {
306       return segments.size();
307     }
308 
309     bool hasAtLeastOneValue() const { return !valnos.empty(); }
310 
311     bool containsOneValue() const { return valnos.size() == 1; }
312 
313     unsigned getNumValNums() const { return (unsigned)valnos.size(); }
314 
315     /// getValNumInfo - Returns pointer to the specified val#.
316     ///
317     inline VNInfo *getValNumInfo(unsigned ValNo) {
318       return valnos[ValNo];
319     }
320     inline const VNInfo *getValNumInfo(unsigned ValNo) const {
321       return valnos[ValNo];
322     }
323 
324     /// containsValue - Returns true if VNI belongs to this range.
325     bool containsValue(const VNInfo *VNI) const {
326       return VNI && VNI->id < getNumValNums() && VNI == getValNumInfo(VNI->id);
327     }
328 
329     /// getNextValue - Create a new value number and return it.  MIIdx specifies
330     /// the instruction that defines the value number.
331     VNInfo *getNextValue(SlotIndex def, VNInfo::Allocator &VNInfoAllocator) {
332       VNInfo *VNI =
333         new (VNInfoAllocator) VNInfo((unsigned)valnos.size(), def);
334       valnos.push_back(VNI);
335       return VNI;
336     }
337 
338     /// createDeadDef - Make sure the range has a value defined at Def.
339     /// If one already exists, return it. Otherwise allocate a new value and
340     /// add liveness for a dead def.
341     VNInfo *createDeadDef(SlotIndex Def, VNInfo::Allocator &VNIAlloc);
342 
343     /// Create a def of value @p VNI. Return @p VNI. If there already exists
344     /// a definition at VNI->def, the value defined there must be @p VNI.
345     VNInfo *createDeadDef(VNInfo *VNI);
346 
347     /// Create a copy of the given value. The new value will be identical except
348     /// for the Value number.
349     VNInfo *createValueCopy(const VNInfo *orig,
350                             VNInfo::Allocator &VNInfoAllocator) {
351       VNInfo *VNI =
352         new (VNInfoAllocator) VNInfo((unsigned)valnos.size(), *orig);
353       valnos.push_back(VNI);
354       return VNI;
355     }
356 
357     /// RenumberValues - Renumber all values in order of appearance and remove
358     /// unused values.
359     void RenumberValues();
360 
361     /// MergeValueNumberInto - This method is called when two value numbers
362     /// are found to be equivalent.  This eliminates V1, replacing all
363     /// segments with the V1 value number with the V2 value number.  This can
364     /// cause merging of V1/V2 values numbers and compaction of the value space.
365     VNInfo* MergeValueNumberInto(VNInfo *V1, VNInfo *V2);
366 
367     /// Merge all of the live segments of a specific val# in RHS into this live
368     /// range as the specified value number. The segments in RHS are allowed
369     /// to overlap with segments in the current range, it will replace the
370     /// value numbers of the overlaped live segments with the specified value
371     /// number.
372     void MergeSegmentsInAsValue(const LiveRange &RHS, VNInfo *LHSValNo);
373 
374     /// MergeValueInAsValue - Merge all of the segments of a specific val#
375     /// in RHS into this live range as the specified value number.
376     /// The segments in RHS are allowed to overlap with segments in the
377     /// current range, but only if the overlapping segments have the
378     /// specified value number.
379     void MergeValueInAsValue(const LiveRange &RHS,
380                              const VNInfo *RHSValNo, VNInfo *LHSValNo);
381 
382     bool empty() const { return segments.empty(); }
383 
384     /// beginIndex - Return the lowest numbered slot covered.
385     SlotIndex beginIndex() const {
386       assert(!empty() && "Call to beginIndex() on empty range.");
387       return segments.front().start;
388     }
389 
390     /// endNumber - return the maximum point of the range of the whole,
391     /// exclusive.
392     SlotIndex endIndex() const {
393       assert(!empty() && "Call to endIndex() on empty range.");
394       return segments.back().end;
395     }
396 
397     bool expiredAt(SlotIndex index) const {
398       return index >= endIndex();
399     }
400 
401     bool liveAt(SlotIndex index) const {
402       const_iterator r = find(index);
403       return r != end() && r->start <= index;
404     }
405 
406     /// Return the segment that contains the specified index, or null if there
407     /// is none.
408     const Segment *getSegmentContaining(SlotIndex Idx) const {
409       const_iterator I = FindSegmentContaining(Idx);
410       return I == end() ? nullptr : &*I;
411     }
412 
413     /// Return the live segment that contains the specified index, or null if
414     /// there is none.
415     Segment *getSegmentContaining(SlotIndex Idx) {
416       iterator I = FindSegmentContaining(Idx);
417       return I == end() ? nullptr : &*I;
418     }
419 
420     /// getVNInfoAt - Return the VNInfo that is live at Idx, or NULL.
421     VNInfo *getVNInfoAt(SlotIndex Idx) const {
422       const_iterator I = FindSegmentContaining(Idx);
423       return I == end() ? nullptr : I->valno;
424     }
425 
426     /// getVNInfoBefore - Return the VNInfo that is live up to but not
427     /// necessarilly including Idx, or NULL. Use this to find the reaching def
428     /// used by an instruction at this SlotIndex position.
429     VNInfo *getVNInfoBefore(SlotIndex Idx) const {
430       const_iterator I = FindSegmentContaining(Idx.getPrevSlot());
431       return I == end() ? nullptr : I->valno;
432     }
433 
434     /// Return an iterator to the segment that contains the specified index, or
435     /// end() if there is none.
436     iterator FindSegmentContaining(SlotIndex Idx) {
437       iterator I = find(Idx);
438       return I != end() && I->start <= Idx ? I : end();
439     }
440 
441     const_iterator FindSegmentContaining(SlotIndex Idx) const {
442       const_iterator I = find(Idx);
443       return I != end() && I->start <= Idx ? I : end();
444     }
445 
446     /// overlaps - Return true if the intersection of the two live ranges is
447     /// not empty.
448     bool overlaps(const LiveRange &other) const {
449       if (other.empty())
450         return false;
451       return overlapsFrom(other, other.begin());
452     }
453 
454     /// overlaps - Return true if the two ranges have overlapping segments
455     /// that are not coalescable according to CP.
456     ///
457     /// Overlapping segments where one range is defined by a coalescable
458     /// copy are allowed.
459     bool overlaps(const LiveRange &Other, const CoalescerPair &CP,
460                   const SlotIndexes&) const;
461 
462     /// overlaps - Return true if the live range overlaps an interval specified
463     /// by [Start, End).
464     bool overlaps(SlotIndex Start, SlotIndex End) const;
465 
466     /// overlapsFrom - Return true if the intersection of the two live ranges
467     /// is not empty.  The specified iterator is a hint that we can begin
468     /// scanning the Other range starting at I.
469     bool overlapsFrom(const LiveRange &Other, const_iterator StartPos) const;
470 
471     /// Returns true if all segments of the @p Other live range are completely
472     /// covered by this live range.
473     /// Adjacent live ranges do not affect the covering:the liverange
474     /// [1,5](5,10] covers (3,7].
475     bool covers(const LiveRange &Other) const;
476 
477     /// Add the specified Segment to this range, merging segments as
478     /// appropriate.  This returns an iterator to the inserted segment (which
479     /// may have grown since it was inserted).
480     iterator addSegment(Segment S);
481 
482     /// Attempt to extend a value defined after @p StartIdx to include @p Use.
483     /// Both @p StartIdx and @p Use should be in the same basic block. In case
484     /// of subranges, an extension could be prevented by an explicit "undef"
485     /// caused by a <def,read-undef> on a non-overlapping lane. The list of
486     /// location of such "undefs" should be provided in @p Undefs.
487     /// The return value is a pair: the first element is VNInfo of the value
488     /// that was extended (possibly nullptr), the second is a boolean value
489     /// indicating whether an "undef" was encountered.
490     /// If this range is live before @p Use in the basic block that starts at
491     /// @p StartIdx, and there is no intervening "undef", extend it to be live
492     /// up to @p Use, and return the pair {value, false}. If there is no
493     /// segment before @p Use and there is no "undef" between @p StartIdx and
494     /// @p Use, return {nullptr, false}. If there is an "undef" before @p Use,
495     /// return {nullptr, true}.
496     std::pair<VNInfo*,bool> extendInBlock(ArrayRef<SlotIndex> Undefs,
497         SlotIndex StartIdx, SlotIndex Kill);
498 
499     /// Simplified version of the above "extendInBlock", which assumes that
500     /// no register lanes are undefined by <def,read-undef> operands.
501     /// If this range is live before @p Use in the basic block that starts
502     /// at @p StartIdx, extend it to be live up to @p Use, and return the
503     /// value. If there is no segment before @p Use, return nullptr.
504     VNInfo *extendInBlock(SlotIndex StartIdx, SlotIndex Kill);
505 
506     /// join - Join two live ranges (this, and other) together.  This applies
507     /// mappings to the value numbers in the LHS/RHS ranges as specified.  If
508     /// the ranges are not joinable, this aborts.
509     void join(LiveRange &Other,
510               const int *ValNoAssignments,
511               const int *RHSValNoAssignments,
512               SmallVectorImpl<VNInfo *> &NewVNInfo);
513 
514     /// True iff this segment is a single segment that lies between the
515     /// specified boundaries, exclusively. Vregs live across a backedge are not
516     /// considered local. The boundaries are expected to lie within an extended
517     /// basic block, so vregs that are not live out should contain no holes.
518     bool isLocal(SlotIndex Start, SlotIndex End) const {
519       return beginIndex() > Start.getBaseIndex() &&
520         endIndex() < End.getBoundaryIndex();
521     }
522 
523     /// Remove the specified segment from this range.  Note that the segment
524     /// must be a single Segment in its entirety.
525     void removeSegment(SlotIndex Start, SlotIndex End,
526                        bool RemoveDeadValNo = false);
527 
528     void removeSegment(Segment S, bool RemoveDeadValNo = false) {
529       removeSegment(S.start, S.end, RemoveDeadValNo);
530     }
531 
532     /// Remove segment pointed to by iterator @p I from this range.
533     iterator removeSegment(iterator I, bool RemoveDeadValNo = false);
534 
535     /// Mark \p ValNo for deletion if no segments in this range use it.
536     void removeValNoIfDead(VNInfo *ValNo);
537 
538     /// Query Liveness at Idx.
539     /// The sub-instruction slot of Idx doesn't matter, only the instruction
540     /// it refers to is considered.
541     LiveQueryResult Query(SlotIndex Idx) const {
542       // Find the segment that enters the instruction.
543       const_iterator I = find(Idx.getBaseIndex());
544       const_iterator E = end();
545       if (I == E)
546         return LiveQueryResult(nullptr, nullptr, SlotIndex(), false);
547 
548       // Is this an instruction live-in segment?
549       // If Idx is the start index of a basic block, include live-in segments
550       // that start at Idx.getBaseIndex().
551       VNInfo *EarlyVal = nullptr;
552       VNInfo *LateVal  = nullptr;
553       SlotIndex EndPoint;
554       bool Kill = false;
555       if (I->start <= Idx.getBaseIndex()) {
556         EarlyVal = I->valno;
557         EndPoint = I->end;
558         // Move to the potentially live-out segment.
559         if (SlotIndex::isSameInstr(Idx, I->end)) {
560           Kill = true;
561           if (++I == E)
562             return LiveQueryResult(EarlyVal, LateVal, EndPoint, Kill);
563         }
564         // Special case: A PHIDef value can have its def in the middle of a
565         // segment if the value happens to be live out of the layout
566         // predecessor.
567         // Such a value is not live-in.
568         if (EarlyVal->def == Idx.getBaseIndex())
569           EarlyVal = nullptr;
570       }
571       // I now points to the segment that may be live-through, or defined by
572       // this instr. Ignore segments starting after the current instr.
573       if (!SlotIndex::isEarlierInstr(Idx, I->start)) {
574         LateVal = I->valno;
575         EndPoint = I->end;
576       }
577       return LiveQueryResult(EarlyVal, LateVal, EndPoint, Kill);
578     }
579 
580     /// removeValNo - Remove all the segments defined by the specified value#.
581     /// Also remove the value# from value# list.
582     void removeValNo(VNInfo *ValNo);
583 
584     /// Returns true if the live range is zero length, i.e. no live segments
585     /// span instructions. It doesn't pay to spill such a range.
586     bool isZeroLength(SlotIndexes *Indexes) const {
587       for (const Segment &S : segments)
588         if (Indexes->getNextNonNullIndex(S.start).getBaseIndex() <
589             S.end.getBaseIndex())
590           return false;
591       return true;
592     }
593 
594     // Returns true if any segment in the live range contains any of the
595     // provided slot indexes.  Slots which occur in holes between
596     // segments will not cause the function to return true.
597     bool isLiveAtIndexes(ArrayRef<SlotIndex> Slots) const;
598 
599     bool operator<(const LiveRange& other) const {
600       const SlotIndex &thisIndex = beginIndex();
601       const SlotIndex &otherIndex = other.beginIndex();
602       return thisIndex < otherIndex;
603     }
604 
605     /// Returns true if there is an explicit "undef" between @p Begin
606     /// @p End.
607     bool isUndefIn(ArrayRef<SlotIndex> Undefs, SlotIndex Begin,
608                    SlotIndex End) const {
609       return llvm::any_of(Undefs, [Begin, End](SlotIndex Idx) -> bool {
610         return Begin <= Idx && Idx < End;
611       });
612     }
613 
614     /// Flush segment set into the regular segment vector.
615     /// The method is to be called after the live range
616     /// has been created, if use of the segment set was
617     /// activated in the constructor of the live range.
618     void flushSegmentSet();
619 
620     /// Stores indexes from the input index sequence R at which this LiveRange
621     /// is live to the output O iterator.
622     /// R is a range of _ascending sorted_ _random_ access iterators
623     /// to the input indexes. Indexes stored at O are ascending sorted so it
624     /// can be used directly in the subsequent search (for example for
625     /// subranges). Returns true if found at least one index.
626     template <typename Range, typename OutputIt>
627     bool findIndexesLiveAt(Range &&R, OutputIt O) const {
628       assert(llvm::is_sorted(R));
629       auto Idx = R.begin(), EndIdx = R.end();
630       auto Seg = segments.begin(), EndSeg = segments.end();
631       bool Found = false;
632       while (Idx != EndIdx && Seg != EndSeg) {
633         // if the Seg is lower find first segment that is above Idx using binary
634         // search
635         if (Seg->end <= *Idx) {
636           Seg =
637               std::upper_bound(++Seg, EndSeg, *Idx, [=](auto V, const auto &S) {
638                 return V < S.end;
639               });
640           if (Seg == EndSeg)
641             break;
642         }
643         auto NotLessStart = std::lower_bound(Idx, EndIdx, Seg->start);
644         if (NotLessStart == EndIdx)
645           break;
646         auto NotLessEnd = std::lower_bound(NotLessStart, EndIdx, Seg->end);
647         if (NotLessEnd != NotLessStart) {
648           Found = true;
649           O = std::copy(NotLessStart, NotLessEnd, O);
650         }
651         Idx = NotLessEnd;
652         ++Seg;
653       }
654       return Found;
655     }
656 
657     void print(raw_ostream &OS) const;
658     void dump() const;
659 
660     /// Walk the range and assert if any invariants fail to hold.
661     ///
662     /// Note that this is a no-op when asserts are disabled.
663 #ifdef NDEBUG
664     void verify() const {}
665 #else
666     void verify() const;
667 #endif
668 
669   protected:
670     /// Append a segment to the list of segments.
671     void append(const LiveRange::Segment S);
672 
673   private:
674     friend class LiveRangeUpdater;
675     void addSegmentToSet(Segment S);
676     void markValNoForDeletion(VNInfo *V);
677   };
678 
679   inline raw_ostream &operator<<(raw_ostream &OS, const LiveRange &LR) {
680     LR.print(OS);
681     return OS;
682   }
683 
684   /// LiveInterval - This class represents the liveness of a register,
685   /// or stack slot.
686   class LiveInterval : public LiveRange {
687   public:
688     using super = LiveRange;
689 
690     /// A live range for subregisters. The LaneMask specifies which parts of the
691     /// super register are covered by the interval.
692     /// (@sa TargetRegisterInfo::getSubRegIndexLaneMask()).
693     class SubRange : public LiveRange {
694     public:
695       SubRange *Next = nullptr;
696       LaneBitmask LaneMask;
697 
698       /// Constructs a new SubRange object.
699       SubRange(LaneBitmask LaneMask) : LaneMask(LaneMask) {}
700 
701       /// Constructs a new SubRange object by copying liveness from @p Other.
702       SubRange(LaneBitmask LaneMask, const LiveRange &Other,
703                BumpPtrAllocator &Allocator)
704         : LiveRange(Other, Allocator), LaneMask(LaneMask) {}
705 
706       void print(raw_ostream &OS) const;
707       void dump() const;
708     };
709 
710   private:
711     SubRange *SubRanges = nullptr; ///< Single linked list of subregister live
712                                    /// ranges.
713     const Register Reg; // the register or stack slot of this interval.
714     float Weight = 0.0; // weight of this interval
715 
716   public:
717     Register reg() const { return Reg; }
718     float weight() const { return Weight; }
719     void incrementWeight(float Inc) { Weight += Inc; }
720     void setWeight(float Value) { Weight = Value; }
721 
722     LiveInterval(unsigned Reg, float Weight) : Reg(Reg), Weight(Weight) {}
723 
724     ~LiveInterval() {
725       clearSubRanges();
726     }
727 
728     template<typename T>
729     class SingleLinkedListIterator {
730       T *P;
731 
732     public:
733       SingleLinkedListIterator(T *P) : P(P) {}
734 
735       SingleLinkedListIterator<T> &operator++() {
736         P = P->Next;
737         return *this;
738       }
739       SingleLinkedListIterator<T> operator++(int) {
740         SingleLinkedListIterator res = *this;
741         ++*this;
742         return res;
743       }
744       bool operator!=(const SingleLinkedListIterator<T> &Other) const {
745         return P != Other.operator->();
746       }
747       bool operator==(const SingleLinkedListIterator<T> &Other) const {
748         return P == Other.operator->();
749       }
750       T &operator*() const {
751         return *P;
752       }
753       T *operator->() const {
754         return P;
755       }
756     };
757 
758     using subrange_iterator = SingleLinkedListIterator<SubRange>;
759     using const_subrange_iterator = SingleLinkedListIterator<const SubRange>;
760 
761     subrange_iterator subrange_begin() {
762       return subrange_iterator(SubRanges);
763     }
764     subrange_iterator subrange_end() {
765       return subrange_iterator(nullptr);
766     }
767 
768     const_subrange_iterator subrange_begin() const {
769       return const_subrange_iterator(SubRanges);
770     }
771     const_subrange_iterator subrange_end() const {
772       return const_subrange_iterator(nullptr);
773     }
774 
775     iterator_range<subrange_iterator> subranges() {
776       return make_range(subrange_begin(), subrange_end());
777     }
778 
779     iterator_range<const_subrange_iterator> subranges() const {
780       return make_range(subrange_begin(), subrange_end());
781     }
782 
783     /// Creates a new empty subregister live range. The range is added at the
784     /// beginning of the subrange list; subrange iterators stay valid.
785     SubRange *createSubRange(BumpPtrAllocator &Allocator,
786                              LaneBitmask LaneMask) {
787       SubRange *Range = new (Allocator) SubRange(LaneMask);
788       appendSubRange(Range);
789       return Range;
790     }
791 
792     /// Like createSubRange() but the new range is filled with a copy of the
793     /// liveness information in @p CopyFrom.
794     SubRange *createSubRangeFrom(BumpPtrAllocator &Allocator,
795                                  LaneBitmask LaneMask,
796                                  const LiveRange &CopyFrom) {
797       SubRange *Range = new (Allocator) SubRange(LaneMask, CopyFrom, Allocator);
798       appendSubRange(Range);
799       return Range;
800     }
801 
802     /// Returns true if subregister liveness information is available.
803     bool hasSubRanges() const {
804       return SubRanges != nullptr;
805     }
806 
807     /// Removes all subregister liveness information.
808     void clearSubRanges();
809 
810     /// Removes all subranges without any segments (subranges without segments
811     /// are not considered valid and should only exist temporarily).
812     void removeEmptySubRanges();
813 
814     /// getSize - Returns the sum of sizes of all the LiveRange's.
815     ///
816     unsigned getSize() const;
817 
818     /// isSpillable - Can this interval be spilled?
819     bool isSpillable() const { return Weight != huge_valf; }
820 
821     /// markNotSpillable - Mark interval as not spillable
822     void markNotSpillable() { Weight = huge_valf; }
823 
824     /// For a given lane mask @p LaneMask, compute indexes at which the
825     /// lane is marked undefined by subregister <def,read-undef> definitions.
826     void computeSubRangeUndefs(SmallVectorImpl<SlotIndex> &Undefs,
827                                LaneBitmask LaneMask,
828                                const MachineRegisterInfo &MRI,
829                                const SlotIndexes &Indexes) const;
830 
831     /// Refines the subranges to support \p LaneMask. This may only be called
832     /// for LI.hasSubrange()==true. Subregister ranges are split or created
833     /// until \p LaneMask can be matched exactly. \p Mod is executed on the
834     /// matching subranges.
835     ///
836     /// Example:
837     ///    Given an interval with subranges with lanemasks L0F00, L00F0 and
838     ///    L000F, refining for mask L0018. Will split the L00F0 lane into
839     ///    L00E0 and L0010 and the L000F lane into L0007 and L0008. The Mod
840     ///    function will be applied to the L0010 and L0008 subranges.
841     ///
842     /// \p Indexes and \p TRI are required to clean up the VNIs that
843     /// don't define the related lane masks after they get shrunk. E.g.,
844     /// when L000F gets split into L0007 and L0008 maybe only a subset
845     /// of the VNIs that defined L000F defines L0007.
846     ///
847     /// The clean up of the VNIs need to look at the actual instructions
848     /// to decide what is or is not live at a definition point. If the
849     /// update of the subranges occurs while the IR does not reflect these
850     /// changes, \p ComposeSubRegIdx can be used to specify how the
851     /// definition are going to be rewritten.
852     /// E.g., let say we want to merge:
853     ///     V1.sub1:<2 x s32> = COPY V2.sub3:<4 x s32>
854     /// We do that by choosing a class where sub1:<2 x s32> and sub3:<4 x s32>
855     /// overlap, i.e., by choosing a class where we can find "offset + 1 == 3".
856     /// Put differently we align V2's sub3 with V1's sub1:
857     /// V2: sub0 sub1 sub2 sub3
858     /// V1: <offset>  sub0 sub1
859     ///
860     /// This offset will look like a composed subregidx in the the class:
861     ///     V1.(composed sub2 with sub1):<4 x s32> = COPY V2.sub3:<4 x s32>
862     /// =>  V1.(composed sub2 with sub1):<4 x s32> = COPY V2.sub3:<4 x s32>
863     ///
864     /// Now if we didn't rewrite the uses and def of V1, all the checks for V1
865     /// need to account for this offset.
866     /// This happens during coalescing where we update the live-ranges while
867     /// still having the old IR around because updating the IR on-the-fly
868     /// would actually clobber some information on how the live-ranges that
869     /// are being updated look like.
870     void refineSubRanges(BumpPtrAllocator &Allocator, LaneBitmask LaneMask,
871                          std::function<void(LiveInterval::SubRange &)> Apply,
872                          const SlotIndexes &Indexes,
873                          const TargetRegisterInfo &TRI,
874                          unsigned ComposeSubRegIdx = 0);
875 
876     bool operator<(const LiveInterval& other) const {
877       const SlotIndex &thisIndex = beginIndex();
878       const SlotIndex &otherIndex = other.beginIndex();
879       return std::tie(thisIndex, Reg) < std::tie(otherIndex, other.Reg);
880     }
881 
882     void print(raw_ostream &OS) const;
883     void dump() const;
884 
885     /// Walks the interval and assert if any invariants fail to hold.
886     ///
887     /// Note that this is a no-op when asserts are disabled.
888 #ifdef NDEBUG
889     void verify(const MachineRegisterInfo *MRI = nullptr) const {}
890 #else
891     void verify(const MachineRegisterInfo *MRI = nullptr) const;
892 #endif
893 
894   private:
895     /// Appends @p Range to SubRanges list.
896     void appendSubRange(SubRange *Range) {
897       Range->Next = SubRanges;
898       SubRanges = Range;
899     }
900 
901     /// Free memory held by SubRange.
902     void freeSubRange(SubRange *S);
903   };
904 
905   inline raw_ostream &operator<<(raw_ostream &OS,
906                                  const LiveInterval::SubRange &SR) {
907     SR.print(OS);
908     return OS;
909   }
910 
911   inline raw_ostream &operator<<(raw_ostream &OS, const LiveInterval &LI) {
912     LI.print(OS);
913     return OS;
914   }
915 
916   raw_ostream &operator<<(raw_ostream &OS, const LiveRange::Segment &S);
917 
918   inline bool operator<(SlotIndex V, const LiveRange::Segment &S) {
919     return V < S.start;
920   }
921 
922   inline bool operator<(const LiveRange::Segment &S, SlotIndex V) {
923     return S.start < V;
924   }
925 
926   /// Helper class for performant LiveRange bulk updates.
927   ///
928   /// Calling LiveRange::addSegment() repeatedly can be expensive on large
929   /// live ranges because segments after the insertion point may need to be
930   /// shifted. The LiveRangeUpdater class can defer the shifting when adding
931   /// many segments in order.
932   ///
933   /// The LiveRange will be in an invalid state until flush() is called.
934   class LiveRangeUpdater {
935     LiveRange *LR;
936     SlotIndex LastStart;
937     LiveRange::iterator WriteI;
938     LiveRange::iterator ReadI;
939     SmallVector<LiveRange::Segment, 16> Spills;
940     void mergeSpills();
941 
942   public:
943     /// Create a LiveRangeUpdater for adding segments to LR.
944     /// LR will temporarily be in an invalid state until flush() is called.
945     LiveRangeUpdater(LiveRange *lr = nullptr) : LR(lr) {}
946 
947     ~LiveRangeUpdater() { flush(); }
948 
949     /// Add a segment to LR and coalesce when possible, just like
950     /// LR.addSegment(). Segments should be added in increasing start order for
951     /// best performance.
952     void add(LiveRange::Segment);
953 
954     void add(SlotIndex Start, SlotIndex End, VNInfo *VNI) {
955       add(LiveRange::Segment(Start, End, VNI));
956     }
957 
958     /// Return true if the LR is currently in an invalid state, and flush()
959     /// needs to be called.
960     bool isDirty() const { return LastStart.isValid(); }
961 
962     /// Flush the updater state to LR so it is valid and contains all added
963     /// segments.
964     void flush();
965 
966     /// Select a different destination live range.
967     void setDest(LiveRange *lr) {
968       if (LR != lr && isDirty())
969         flush();
970       LR = lr;
971     }
972 
973     /// Get the current destination live range.
974     LiveRange *getDest() const { return LR; }
975 
976     void dump() const;
977     void print(raw_ostream&) const;
978   };
979 
980   inline raw_ostream &operator<<(raw_ostream &OS, const LiveRangeUpdater &X) {
981     X.print(OS);
982     return OS;
983   }
984 
985   /// ConnectedVNInfoEqClasses - Helper class that can divide VNInfos in a
986   /// LiveInterval into equivalence clases of connected components. A
987   /// LiveInterval that has multiple connected components can be broken into
988   /// multiple LiveIntervals.
989   ///
990   /// Given a LiveInterval that may have multiple connected components, run:
991   ///
992   ///   unsigned numComps = ConEQ.Classify(LI);
993   ///   if (numComps > 1) {
994   ///     // allocate numComps-1 new LiveIntervals into LIS[1..]
995   ///     ConEQ.Distribute(LIS);
996   /// }
997 
998   class ConnectedVNInfoEqClasses {
999     LiveIntervals &LIS;
1000     IntEqClasses EqClass;
1001 
1002   public:
1003     explicit ConnectedVNInfoEqClasses(LiveIntervals &lis) : LIS(lis) {}
1004 
1005     /// Classify the values in \p LR into connected components.
1006     /// Returns the number of connected components.
1007     unsigned Classify(const LiveRange &LR);
1008 
1009     /// getEqClass - Classify creates equivalence classes numbered 0..N. Return
1010     /// the equivalence class assigned the VNI.
1011     unsigned getEqClass(const VNInfo *VNI) const { return EqClass[VNI->id]; }
1012 
1013     /// Distribute values in \p LI into a separate LiveIntervals
1014     /// for each connected component. LIV must have an empty LiveInterval for
1015     /// each additional connected component. The first connected component is
1016     /// left in \p LI.
1017     void Distribute(LiveInterval &LI, LiveInterval *LIV[],
1018                     MachineRegisterInfo &MRI);
1019   };
1020 
1021 } // end namespace llvm
1022 
1023 #endif // LLVM_CODEGEN_LIVEINTERVAL_H
1024