1 //===- llvm/CodeGen/LiveInterval.h - Interval representation ----*- C++ -*-===//
2 //
3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4 // See https://llvm.org/LICENSE.txt for license information.
5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6 //
7 //===----------------------------------------------------------------------===//
8 //
9 // This file implements the LiveRange and LiveInterval classes.  Given some
10 // numbering of each the machine instructions an interval [i, j) is said to be a
11 // live range for register v if there is no instruction with number j' >= j
12 // such that v is live at j' and there is no instruction with number i' < i such
13 // that v is live at i'. In this implementation ranges can have holes,
14 // i.e. a range might look like [1,20), [50,65), [1000,1001).  Each
15 // individual segment is represented as an instance of LiveRange::Segment,
16 // and the whole range is represented as an instance of LiveRange.
17 //
18 //===----------------------------------------------------------------------===//
19 
20 #ifndef LLVM_CODEGEN_LIVEINTERVAL_H
21 #define LLVM_CODEGEN_LIVEINTERVAL_H
22 
23 #include "llvm/ADT/ArrayRef.h"
24 #include "llvm/ADT/IntEqClasses.h"
25 #include "llvm/ADT/STLExtras.h"
26 #include "llvm/ADT/SmallVector.h"
27 #include "llvm/ADT/iterator_range.h"
28 #include "llvm/CodeGen/Register.h"
29 #include "llvm/CodeGen/SlotIndexes.h"
30 #include "llvm/MC/LaneBitmask.h"
31 #include "llvm/Support/Allocator.h"
32 #include "llvm/Support/MathExtras.h"
33 #include <algorithm>
34 #include <cassert>
35 #include <cstddef>
36 #include <functional>
37 #include <memory>
38 #include <set>
39 #include <tuple>
40 #include <utility>
41 
42 namespace llvm {
43 
44   class CoalescerPair;
45   class LiveIntervals;
46   class MachineRegisterInfo;
47   class raw_ostream;
48 
49   /// VNInfo - Value Number Information.
50   /// This class holds information about a machine level values, including
51   /// definition and use points.
52   ///
53   class VNInfo {
54   public:
55     using Allocator = BumpPtrAllocator;
56 
57     /// The ID number of this value.
58     unsigned id;
59 
60     /// The index of the defining instruction.
61     SlotIndex def;
62 
63     /// VNInfo constructor.
64     VNInfo(unsigned i, SlotIndex d) : id(i), def(d) {}
65 
66     /// VNInfo constructor, copies values from orig, except for the value number.
67     VNInfo(unsigned i, const VNInfo &orig) : id(i), def(orig.def) {}
68 
69     /// Copy from the parameter into this VNInfo.
70     void copyFrom(VNInfo &src) {
71       def = src.def;
72     }
73 
74     /// Returns true if this value is defined by a PHI instruction (or was,
75     /// PHI instructions may have been eliminated).
76     /// PHI-defs begin at a block boundary, all other defs begin at register or
77     /// EC slots.
78     bool isPHIDef() const { return def.isBlock(); }
79 
80     /// Returns true if this value is unused.
81     bool isUnused() const { return !def.isValid(); }
82 
83     /// Mark this value as unused.
84     void markUnused() { def = SlotIndex(); }
85   };
86 
87   /// Result of a LiveRange query. This class hides the implementation details
88   /// of live ranges, and it should be used as the primary interface for
89   /// examining live ranges around instructions.
90   class LiveQueryResult {
91     VNInfo *const EarlyVal;
92     VNInfo *const LateVal;
93     const SlotIndex EndPoint;
94     const bool Kill;
95 
96   public:
97     LiveQueryResult(VNInfo *EarlyVal, VNInfo *LateVal, SlotIndex EndPoint,
98                     bool Kill)
99       : EarlyVal(EarlyVal), LateVal(LateVal), EndPoint(EndPoint), Kill(Kill)
100     {}
101 
102     /// Return the value that is live-in to the instruction. This is the value
103     /// that will be read by the instruction's use operands. Return NULL if no
104     /// value is live-in.
105     VNInfo *valueIn() const {
106       return EarlyVal;
107     }
108 
109     /// Return true if the live-in value is killed by this instruction. This
110     /// means that either the live range ends at the instruction, or it changes
111     /// value.
112     bool isKill() const {
113       return Kill;
114     }
115 
116     /// Return true if this instruction has a dead def.
117     bool isDeadDef() const {
118       return EndPoint.isDead();
119     }
120 
121     /// Return the value leaving the instruction, if any. This can be a
122     /// live-through value, or a live def. A dead def returns NULL.
123     VNInfo *valueOut() const {
124       return isDeadDef() ? nullptr : LateVal;
125     }
126 
127     /// Returns the value alive at the end of the instruction, if any. This can
128     /// be a live-through value, a live def or a dead def.
129     VNInfo *valueOutOrDead() const {
130       return LateVal;
131     }
132 
133     /// Return the value defined by this instruction, if any. This includes
134     /// dead defs, it is the value created by the instruction's def operands.
135     VNInfo *valueDefined() const {
136       return EarlyVal == LateVal ? nullptr : LateVal;
137     }
138 
139     /// Return the end point of the last live range segment to interact with
140     /// the instruction, if any.
141     ///
142     /// The end point is an invalid SlotIndex only if the live range doesn't
143     /// intersect the instruction at all.
144     ///
145     /// The end point may be at or past the end of the instruction's basic
146     /// block. That means the value was live out of the block.
147     SlotIndex endPoint() const {
148       return EndPoint;
149     }
150   };
151 
152   /// This class represents the liveness of a register, stack slot, etc.
153   /// It manages an ordered list of Segment objects.
154   /// The Segments are organized in a static single assignment form: At places
155   /// where a new value is defined or different values reach a CFG join a new
156   /// segment with a new value number is used.
157   class LiveRange {
158   public:
159     /// This represents a simple continuous liveness interval for a value.
160     /// The start point is inclusive, the end point exclusive. These intervals
161     /// are rendered as [start,end).
162     struct Segment {
163       SlotIndex start;  // Start point of the interval (inclusive)
164       SlotIndex end;    // End point of the interval (exclusive)
165       VNInfo *valno = nullptr; // identifier for the value contained in this
166                                // segment.
167 
168       Segment() = default;
169 
170       Segment(SlotIndex S, SlotIndex E, VNInfo *V)
171         : start(S), end(E), valno(V) {
172         assert(S < E && "Cannot create empty or backwards segment");
173       }
174 
175       /// Return true if the index is covered by this segment.
176       bool contains(SlotIndex I) const {
177         return start <= I && I < end;
178       }
179 
180       /// Return true if the given interval, [S, E), is covered by this segment.
181       bool containsInterval(SlotIndex S, SlotIndex E) const {
182         assert((S < E) && "Backwards interval?");
183         return (start <= S && S < end) && (start < E && E <= end);
184       }
185 
186       bool operator<(const Segment &Other) const {
187         return std::tie(start, end) < std::tie(Other.start, Other.end);
188       }
189       bool operator==(const Segment &Other) const {
190         return start == Other.start && end == Other.end;
191       }
192 
193       bool operator!=(const Segment &Other) const {
194         return !(*this == Other);
195       }
196 
197       void dump() const;
198     };
199 
200     using Segments = SmallVector<Segment, 2>;
201     using VNInfoList = SmallVector<VNInfo *, 2>;
202 
203     Segments segments;   // the liveness segments
204     VNInfoList valnos;   // value#'s
205 
206     // The segment set is used temporarily to accelerate initial computation
207     // of live ranges of physical registers in computeRegUnitRange.
208     // After that the set is flushed to the segment vector and deleted.
209     using SegmentSet = std::set<Segment>;
210     std::unique_ptr<SegmentSet> segmentSet;
211 
212     using iterator = Segments::iterator;
213     using const_iterator = Segments::const_iterator;
214 
215     iterator begin() { return segments.begin(); }
216     iterator end()   { return segments.end(); }
217 
218     const_iterator begin() const { return segments.begin(); }
219     const_iterator end() const  { return segments.end(); }
220 
221     using vni_iterator = VNInfoList::iterator;
222     using const_vni_iterator = VNInfoList::const_iterator;
223 
224     vni_iterator vni_begin() { return valnos.begin(); }
225     vni_iterator vni_end()   { return valnos.end(); }
226 
227     const_vni_iterator vni_begin() const { return valnos.begin(); }
228     const_vni_iterator vni_end() const   { return valnos.end(); }
229 
230     iterator_range<vni_iterator> vnis() {
231       return make_range(vni_begin(), vni_end());
232     }
233 
234     iterator_range<const_vni_iterator> vnis() const {
235       return make_range(vni_begin(), vni_end());
236     }
237 
238     /// Constructs a new LiveRange object.
239     LiveRange(bool UseSegmentSet = false)
240         : segmentSet(UseSegmentSet ? std::make_unique<SegmentSet>()
241                                    : nullptr) {}
242 
243     /// Constructs a new LiveRange object by copying segments and valnos from
244     /// another LiveRange.
245     LiveRange(const LiveRange &Other, BumpPtrAllocator &Allocator) {
246       assert(Other.segmentSet == nullptr &&
247              "Copying of LiveRanges with active SegmentSets is not supported");
248       assign(Other, Allocator);
249     }
250 
251     /// Copies values numbers and live segments from \p Other into this range.
252     void assign(const LiveRange &Other, BumpPtrAllocator &Allocator) {
253       if (this == &Other)
254         return;
255 
256       assert(Other.segmentSet == nullptr &&
257              "Copying of LiveRanges with active SegmentSets is not supported");
258       // Duplicate valnos.
259       for (const VNInfo *VNI : Other.valnos)
260         createValueCopy(VNI, Allocator);
261       // Now we can copy segments and remap their valnos.
262       for (const Segment &S : Other.segments)
263         segments.push_back(Segment(S.start, S.end, valnos[S.valno->id]));
264     }
265 
266     /// advanceTo - Advance the specified iterator to point to the Segment
267     /// containing the specified position, or end() if the position is past the
268     /// end of the range.  If no Segment contains this position, but the
269     /// position is in a hole, this method returns an iterator pointing to the
270     /// Segment immediately after the hole.
271     iterator advanceTo(iterator I, SlotIndex Pos) {
272       assert(I != end());
273       if (Pos >= endIndex())
274         return end();
275       while (I->end <= Pos) ++I;
276       return I;
277     }
278 
279     const_iterator advanceTo(const_iterator I, SlotIndex Pos) const {
280       assert(I != end());
281       if (Pos >= endIndex())
282         return end();
283       while (I->end <= Pos) ++I;
284       return I;
285     }
286 
287     /// find - Return an iterator pointing to the first segment that ends after
288     /// Pos, or end(). This is the same as advanceTo(begin(), Pos), but faster
289     /// when searching large ranges.
290     ///
291     /// If Pos is contained in a Segment, that segment is returned.
292     /// If Pos is in a hole, the following Segment is returned.
293     /// If Pos is beyond endIndex, end() is returned.
294     iterator find(SlotIndex Pos);
295 
296     const_iterator find(SlotIndex Pos) const {
297       return const_cast<LiveRange*>(this)->find(Pos);
298     }
299 
300     void clear() {
301       valnos.clear();
302       segments.clear();
303     }
304 
305     size_t size() const {
306       return segments.size();
307     }
308 
309     bool hasAtLeastOneValue() const { return !valnos.empty(); }
310 
311     bool containsOneValue() const { return valnos.size() == 1; }
312 
313     unsigned getNumValNums() const { return (unsigned)valnos.size(); }
314 
315     /// getValNumInfo - Returns pointer to the specified val#.
316     ///
317     inline VNInfo *getValNumInfo(unsigned ValNo) {
318       return valnos[ValNo];
319     }
320     inline const VNInfo *getValNumInfo(unsigned ValNo) const {
321       return valnos[ValNo];
322     }
323 
324     /// containsValue - Returns true if VNI belongs to this range.
325     bool containsValue(const VNInfo *VNI) const {
326       return VNI && VNI->id < getNumValNums() && VNI == getValNumInfo(VNI->id);
327     }
328 
329     /// getNextValue - Create a new value number and return it.
330     /// @p Def is the index of instruction that defines the value number.
331     VNInfo *getNextValue(SlotIndex Def, VNInfo::Allocator &VNInfoAllocator) {
332       VNInfo *VNI =
333         new (VNInfoAllocator) VNInfo((unsigned)valnos.size(), Def);
334       valnos.push_back(VNI);
335       return VNI;
336     }
337 
338     /// createDeadDef - Make sure the range has a value defined at Def.
339     /// If one already exists, return it. Otherwise allocate a new value and
340     /// add liveness for a dead def.
341     VNInfo *createDeadDef(SlotIndex Def, VNInfo::Allocator &VNIAlloc);
342 
343     /// Create a def of value @p VNI. Return @p VNI. If there already exists
344     /// a definition at VNI->def, the value defined there must be @p VNI.
345     VNInfo *createDeadDef(VNInfo *VNI);
346 
347     /// Create a copy of the given value. The new value will be identical except
348     /// for the Value number.
349     VNInfo *createValueCopy(const VNInfo *orig,
350                             VNInfo::Allocator &VNInfoAllocator) {
351       VNInfo *VNI =
352         new (VNInfoAllocator) VNInfo((unsigned)valnos.size(), *orig);
353       valnos.push_back(VNI);
354       return VNI;
355     }
356 
357     /// RenumberValues - Renumber all values in order of appearance and remove
358     /// unused values.
359     void RenumberValues();
360 
361     /// MergeValueNumberInto - This method is called when two value numbers
362     /// are found to be equivalent.  This eliminates V1, replacing all
363     /// segments with the V1 value number with the V2 value number.  This can
364     /// cause merging of V1/V2 values numbers and compaction of the value space.
365     VNInfo* MergeValueNumberInto(VNInfo *V1, VNInfo *V2);
366 
367     /// Merge all of the live segments of a specific val# in RHS into this live
368     /// range as the specified value number. The segments in RHS are allowed
369     /// to overlap with segments in the current range, it will replace the
370     /// value numbers of the overlaped live segments with the specified value
371     /// number.
372     void MergeSegmentsInAsValue(const LiveRange &RHS, VNInfo *LHSValNo);
373 
374     /// MergeValueInAsValue - Merge all of the segments of a specific val#
375     /// in RHS into this live range as the specified value number.
376     /// The segments in RHS are allowed to overlap with segments in the
377     /// current range, but only if the overlapping segments have the
378     /// specified value number.
379     void MergeValueInAsValue(const LiveRange &RHS,
380                              const VNInfo *RHSValNo, VNInfo *LHSValNo);
381 
382     bool empty() const { return segments.empty(); }
383 
384     /// beginIndex - Return the lowest numbered slot covered.
385     SlotIndex beginIndex() const {
386       assert(!empty() && "Call to beginIndex() on empty range.");
387       return segments.front().start;
388     }
389 
390     /// endNumber - return the maximum point of the range of the whole,
391     /// exclusive.
392     SlotIndex endIndex() const {
393       assert(!empty() && "Call to endIndex() on empty range.");
394       return segments.back().end;
395     }
396 
397     bool expiredAt(SlotIndex index) const {
398       return index >= endIndex();
399     }
400 
401     bool liveAt(SlotIndex index) const {
402       const_iterator r = find(index);
403       return r != end() && r->start <= index;
404     }
405 
406     /// Return the segment that contains the specified index, or null if there
407     /// is none.
408     const Segment *getSegmentContaining(SlotIndex Idx) const {
409       const_iterator I = FindSegmentContaining(Idx);
410       return I == end() ? nullptr : &*I;
411     }
412 
413     /// Return the live segment that contains the specified index, or null if
414     /// there is none.
415     Segment *getSegmentContaining(SlotIndex Idx) {
416       iterator I = FindSegmentContaining(Idx);
417       return I == end() ? nullptr : &*I;
418     }
419 
420     /// getVNInfoAt - Return the VNInfo that is live at Idx, or NULL.
421     VNInfo *getVNInfoAt(SlotIndex Idx) const {
422       const_iterator I = FindSegmentContaining(Idx);
423       return I == end() ? nullptr : I->valno;
424     }
425 
426     /// getVNInfoBefore - Return the VNInfo that is live up to but not
427     /// necessarilly including Idx, or NULL. Use this to find the reaching def
428     /// used by an instruction at this SlotIndex position.
429     VNInfo *getVNInfoBefore(SlotIndex Idx) const {
430       const_iterator I = FindSegmentContaining(Idx.getPrevSlot());
431       return I == end() ? nullptr : I->valno;
432     }
433 
434     /// Return an iterator to the segment that contains the specified index, or
435     /// end() if there is none.
436     iterator FindSegmentContaining(SlotIndex Idx) {
437       iterator I = find(Idx);
438       return I != end() && I->start <= Idx ? I : end();
439     }
440 
441     const_iterator FindSegmentContaining(SlotIndex Idx) const {
442       const_iterator I = find(Idx);
443       return I != end() && I->start <= Idx ? I : end();
444     }
445 
446     /// overlaps - Return true if the intersection of the two live ranges is
447     /// not empty.
448     bool overlaps(const LiveRange &other) const {
449       if (other.empty())
450         return false;
451       return overlapsFrom(other, other.begin());
452     }
453 
454     /// overlaps - Return true if the two ranges have overlapping segments
455     /// that are not coalescable according to CP.
456     ///
457     /// Overlapping segments where one range is defined by a coalescable
458     /// copy are allowed.
459     bool overlaps(const LiveRange &Other, const CoalescerPair &CP,
460                   const SlotIndexes&) const;
461 
462     /// overlaps - Return true if the live range overlaps an interval specified
463     /// by [Start, End).
464     bool overlaps(SlotIndex Start, SlotIndex End) const;
465 
466     /// overlapsFrom - Return true if the intersection of the two live ranges
467     /// is not empty.  The specified iterator is a hint that we can begin
468     /// scanning the Other range starting at I.
469     bool overlapsFrom(const LiveRange &Other, const_iterator StartPos) const;
470 
471     /// Returns true if all segments of the @p Other live range are completely
472     /// covered by this live range.
473     /// Adjacent live ranges do not affect the covering:the liverange
474     /// [1,5](5,10] covers (3,7].
475     bool covers(const LiveRange &Other) const;
476 
477     /// Add the specified Segment to this range, merging segments as
478     /// appropriate.  This returns an iterator to the inserted segment (which
479     /// may have grown since it was inserted).
480     iterator addSegment(Segment S);
481 
482     /// Attempt to extend a value defined after @p StartIdx to include @p Use.
483     /// Both @p StartIdx and @p Use should be in the same basic block. In case
484     /// of subranges, an extension could be prevented by an explicit "undef"
485     /// caused by a <def,read-undef> on a non-overlapping lane. The list of
486     /// location of such "undefs" should be provided in @p Undefs.
487     /// The return value is a pair: the first element is VNInfo of the value
488     /// that was extended (possibly nullptr), the second is a boolean value
489     /// indicating whether an "undef" was encountered.
490     /// If this range is live before @p Use in the basic block that starts at
491     /// @p StartIdx, and there is no intervening "undef", extend it to be live
492     /// up to @p Use, and return the pair {value, false}. If there is no
493     /// segment before @p Use and there is no "undef" between @p StartIdx and
494     /// @p Use, return {nullptr, false}. If there is an "undef" before @p Use,
495     /// return {nullptr, true}.
496     std::pair<VNInfo*,bool> extendInBlock(ArrayRef<SlotIndex> Undefs,
497         SlotIndex StartIdx, SlotIndex Kill);
498 
499     /// Simplified version of the above "extendInBlock", which assumes that
500     /// no register lanes are undefined by <def,read-undef> operands.
501     /// If this range is live before @p Use in the basic block that starts
502     /// at @p StartIdx, extend it to be live up to @p Use, and return the
503     /// value. If there is no segment before @p Use, return nullptr.
504     VNInfo *extendInBlock(SlotIndex StartIdx, SlotIndex Kill);
505 
506     /// join - Join two live ranges (this, and other) together.  This applies
507     /// mappings to the value numbers in the LHS/RHS ranges as specified.  If
508     /// the ranges are not joinable, this aborts.
509     void join(LiveRange &Other,
510               const int *ValNoAssignments,
511               const int *RHSValNoAssignments,
512               SmallVectorImpl<VNInfo *> &NewVNInfo);
513 
514     /// True iff this segment is a single segment that lies between the
515     /// specified boundaries, exclusively. Vregs live across a backedge are not
516     /// considered local. The boundaries are expected to lie within an extended
517     /// basic block, so vregs that are not live out should contain no holes.
518     bool isLocal(SlotIndex Start, SlotIndex End) const {
519       return beginIndex() > Start.getBaseIndex() &&
520         endIndex() < End.getBoundaryIndex();
521     }
522 
523     /// Remove the specified interval from this live range.
524     /// Does nothing if interval is not part of this live range.
525     /// Note that the interval must be within a single Segment in its entirety.
526     void removeSegment(SlotIndex Start, SlotIndex End,
527                        bool RemoveDeadValNo = false);
528 
529     void removeSegment(Segment S, bool RemoveDeadValNo = false) {
530       removeSegment(S.start, S.end, RemoveDeadValNo);
531     }
532 
533     /// Remove segment pointed to by iterator @p I from this range.
534     iterator removeSegment(iterator I, bool RemoveDeadValNo = false);
535 
536     /// Mark \p ValNo for deletion if no segments in this range use it.
537     void removeValNoIfDead(VNInfo *ValNo);
538 
539     /// Query Liveness at Idx.
540     /// The sub-instruction slot of Idx doesn't matter, only the instruction
541     /// it refers to is considered.
542     LiveQueryResult Query(SlotIndex Idx) const {
543       // Find the segment that enters the instruction.
544       const_iterator I = find(Idx.getBaseIndex());
545       const_iterator E = end();
546       if (I == E)
547         return LiveQueryResult(nullptr, nullptr, SlotIndex(), false);
548 
549       // Is this an instruction live-in segment?
550       // If Idx is the start index of a basic block, include live-in segments
551       // that start at Idx.getBaseIndex().
552       VNInfo *EarlyVal = nullptr;
553       VNInfo *LateVal  = nullptr;
554       SlotIndex EndPoint;
555       bool Kill = false;
556       if (I->start <= Idx.getBaseIndex()) {
557         EarlyVal = I->valno;
558         EndPoint = I->end;
559         // Move to the potentially live-out segment.
560         if (SlotIndex::isSameInstr(Idx, I->end)) {
561           Kill = true;
562           if (++I == E)
563             return LiveQueryResult(EarlyVal, LateVal, EndPoint, Kill);
564         }
565         // Special case: A PHIDef value can have its def in the middle of a
566         // segment if the value happens to be live out of the layout
567         // predecessor.
568         // Such a value is not live-in.
569         if (EarlyVal->def == Idx.getBaseIndex())
570           EarlyVal = nullptr;
571       }
572       // I now points to the segment that may be live-through, or defined by
573       // this instr. Ignore segments starting after the current instr.
574       if (!SlotIndex::isEarlierInstr(Idx, I->start)) {
575         LateVal = I->valno;
576         EndPoint = I->end;
577       }
578       return LiveQueryResult(EarlyVal, LateVal, EndPoint, Kill);
579     }
580 
581     /// removeValNo - Remove all the segments defined by the specified value#.
582     /// Also remove the value# from value# list.
583     void removeValNo(VNInfo *ValNo);
584 
585     /// Returns true if the live range is zero length, i.e. no live segments
586     /// span instructions. It doesn't pay to spill such a range.
587     bool isZeroLength(SlotIndexes *Indexes) const {
588       for (const Segment &S : segments)
589         if (Indexes->getNextNonNullIndex(S.start).getBaseIndex() <
590             S.end.getBaseIndex())
591           return false;
592       return true;
593     }
594 
595     // Returns true if any segment in the live range contains any of the
596     // provided slot indexes.  Slots which occur in holes between
597     // segments will not cause the function to return true.
598     bool isLiveAtIndexes(ArrayRef<SlotIndex> Slots) const;
599 
600     bool operator<(const LiveRange& other) const {
601       const SlotIndex &thisIndex = beginIndex();
602       const SlotIndex &otherIndex = other.beginIndex();
603       return thisIndex < otherIndex;
604     }
605 
606     /// Returns true if there is an explicit "undef" between @p Begin
607     /// @p End.
608     bool isUndefIn(ArrayRef<SlotIndex> Undefs, SlotIndex Begin,
609                    SlotIndex End) const {
610       return llvm::any_of(Undefs, [Begin, End](SlotIndex Idx) -> bool {
611         return Begin <= Idx && Idx < End;
612       });
613     }
614 
615     /// Flush segment set into the regular segment vector.
616     /// The method is to be called after the live range
617     /// has been created, if use of the segment set was
618     /// activated in the constructor of the live range.
619     void flushSegmentSet();
620 
621     /// Stores indexes from the input index sequence R at which this LiveRange
622     /// is live to the output O iterator.
623     /// R is a range of _ascending sorted_ _random_ access iterators
624     /// to the input indexes. Indexes stored at O are ascending sorted so it
625     /// can be used directly in the subsequent search (for example for
626     /// subranges). Returns true if found at least one index.
627     template <typename Range, typename OutputIt>
628     bool findIndexesLiveAt(Range &&R, OutputIt O) const {
629       assert(llvm::is_sorted(R));
630       auto Idx = R.begin(), EndIdx = R.end();
631       auto Seg = segments.begin(), EndSeg = segments.end();
632       bool Found = false;
633       while (Idx != EndIdx && Seg != EndSeg) {
634         // if the Seg is lower find first segment that is above Idx using binary
635         // search
636         if (Seg->end <= *Idx) {
637           Seg =
638               std::upper_bound(++Seg, EndSeg, *Idx, [=](auto V, const auto &S) {
639                 return V < S.end;
640               });
641           if (Seg == EndSeg)
642             break;
643         }
644         auto NotLessStart = std::lower_bound(Idx, EndIdx, Seg->start);
645         if (NotLessStart == EndIdx)
646           break;
647         auto NotLessEnd = std::lower_bound(NotLessStart, EndIdx, Seg->end);
648         if (NotLessEnd != NotLessStart) {
649           Found = true;
650           O = std::copy(NotLessStart, NotLessEnd, O);
651         }
652         Idx = NotLessEnd;
653         ++Seg;
654       }
655       return Found;
656     }
657 
658     void print(raw_ostream &OS) const;
659     void dump() const;
660 
661     /// Walk the range and assert if any invariants fail to hold.
662     ///
663     /// Note that this is a no-op when asserts are disabled.
664 #ifdef NDEBUG
665     void verify() const {}
666 #else
667     void verify() const;
668 #endif
669 
670   protected:
671     /// Append a segment to the list of segments.
672     void append(const LiveRange::Segment S);
673 
674   private:
675     friend class LiveRangeUpdater;
676     void addSegmentToSet(Segment S);
677     void markValNoForDeletion(VNInfo *V);
678   };
679 
680   inline raw_ostream &operator<<(raw_ostream &OS, const LiveRange &LR) {
681     LR.print(OS);
682     return OS;
683   }
684 
685   /// LiveInterval - This class represents the liveness of a register,
686   /// or stack slot.
687   class LiveInterval : public LiveRange {
688   public:
689     using super = LiveRange;
690 
691     /// A live range for subregisters. The LaneMask specifies which parts of the
692     /// super register are covered by the interval.
693     /// (@sa TargetRegisterInfo::getSubRegIndexLaneMask()).
694     class SubRange : public LiveRange {
695     public:
696       SubRange *Next = nullptr;
697       LaneBitmask LaneMask;
698 
699       /// Constructs a new SubRange object.
700       SubRange(LaneBitmask LaneMask) : LaneMask(LaneMask) {}
701 
702       /// Constructs a new SubRange object by copying liveness from @p Other.
703       SubRange(LaneBitmask LaneMask, const LiveRange &Other,
704                BumpPtrAllocator &Allocator)
705         : LiveRange(Other, Allocator), LaneMask(LaneMask) {}
706 
707       void print(raw_ostream &OS) const;
708       void dump() const;
709     };
710 
711   private:
712     SubRange *SubRanges = nullptr; ///< Single linked list of subregister live
713                                    /// ranges.
714     const Register Reg; // the register or stack slot of this interval.
715     float Weight = 0.0; // weight of this interval
716 
717   public:
718     Register reg() const { return Reg; }
719     float weight() const { return Weight; }
720     void incrementWeight(float Inc) { Weight += Inc; }
721     void setWeight(float Value) { Weight = Value; }
722 
723     LiveInterval(unsigned Reg, float Weight) : Reg(Reg), Weight(Weight) {}
724 
725     ~LiveInterval() {
726       clearSubRanges();
727     }
728 
729     template<typename T>
730     class SingleLinkedListIterator {
731       T *P;
732 
733     public:
734       SingleLinkedListIterator(T *P) : P(P) {}
735 
736       SingleLinkedListIterator<T> &operator++() {
737         P = P->Next;
738         return *this;
739       }
740       SingleLinkedListIterator<T> operator++(int) {
741         SingleLinkedListIterator res = *this;
742         ++*this;
743         return res;
744       }
745       bool operator!=(const SingleLinkedListIterator<T> &Other) const {
746         return P != Other.operator->();
747       }
748       bool operator==(const SingleLinkedListIterator<T> &Other) const {
749         return P == Other.operator->();
750       }
751       T &operator*() const {
752         return *P;
753       }
754       T *operator->() const {
755         return P;
756       }
757     };
758 
759     using subrange_iterator = SingleLinkedListIterator<SubRange>;
760     using const_subrange_iterator = SingleLinkedListIterator<const SubRange>;
761 
762     subrange_iterator subrange_begin() {
763       return subrange_iterator(SubRanges);
764     }
765     subrange_iterator subrange_end() {
766       return subrange_iterator(nullptr);
767     }
768 
769     const_subrange_iterator subrange_begin() const {
770       return const_subrange_iterator(SubRanges);
771     }
772     const_subrange_iterator subrange_end() const {
773       return const_subrange_iterator(nullptr);
774     }
775 
776     iterator_range<subrange_iterator> subranges() {
777       return make_range(subrange_begin(), subrange_end());
778     }
779 
780     iterator_range<const_subrange_iterator> subranges() const {
781       return make_range(subrange_begin(), subrange_end());
782     }
783 
784     /// Creates a new empty subregister live range. The range is added at the
785     /// beginning of the subrange list; subrange iterators stay valid.
786     SubRange *createSubRange(BumpPtrAllocator &Allocator,
787                              LaneBitmask LaneMask) {
788       SubRange *Range = new (Allocator) SubRange(LaneMask);
789       appendSubRange(Range);
790       return Range;
791     }
792 
793     /// Like createSubRange() but the new range is filled with a copy of the
794     /// liveness information in @p CopyFrom.
795     SubRange *createSubRangeFrom(BumpPtrAllocator &Allocator,
796                                  LaneBitmask LaneMask,
797                                  const LiveRange &CopyFrom) {
798       SubRange *Range = new (Allocator) SubRange(LaneMask, CopyFrom, Allocator);
799       appendSubRange(Range);
800       return Range;
801     }
802 
803     /// Returns true if subregister liveness information is available.
804     bool hasSubRanges() const {
805       return SubRanges != nullptr;
806     }
807 
808     /// Removes all subregister liveness information.
809     void clearSubRanges();
810 
811     /// Removes all subranges without any segments (subranges without segments
812     /// are not considered valid and should only exist temporarily).
813     void removeEmptySubRanges();
814 
815     /// getSize - Returns the sum of sizes of all the LiveRange's.
816     ///
817     unsigned getSize() const;
818 
819     /// isSpillable - Can this interval be spilled?
820     bool isSpillable() const { return Weight != huge_valf; }
821 
822     /// markNotSpillable - Mark interval as not spillable
823     void markNotSpillable() { Weight = huge_valf; }
824 
825     /// For a given lane mask @p LaneMask, compute indexes at which the
826     /// lane is marked undefined by subregister <def,read-undef> definitions.
827     void computeSubRangeUndefs(SmallVectorImpl<SlotIndex> &Undefs,
828                                LaneBitmask LaneMask,
829                                const MachineRegisterInfo &MRI,
830                                const SlotIndexes &Indexes) const;
831 
832     /// Refines the subranges to support \p LaneMask. This may only be called
833     /// for LI.hasSubrange()==true. Subregister ranges are split or created
834     /// until \p LaneMask can be matched exactly. \p Mod is executed on the
835     /// matching subranges.
836     ///
837     /// Example:
838     ///    Given an interval with subranges with lanemasks L0F00, L00F0 and
839     ///    L000F, refining for mask L0018. Will split the L00F0 lane into
840     ///    L00E0 and L0010 and the L000F lane into L0007 and L0008. The Mod
841     ///    function will be applied to the L0010 and L0008 subranges.
842     ///
843     /// \p Indexes and \p TRI are required to clean up the VNIs that
844     /// don't define the related lane masks after they get shrunk. E.g.,
845     /// when L000F gets split into L0007 and L0008 maybe only a subset
846     /// of the VNIs that defined L000F defines L0007.
847     ///
848     /// The clean up of the VNIs need to look at the actual instructions
849     /// to decide what is or is not live at a definition point. If the
850     /// update of the subranges occurs while the IR does not reflect these
851     /// changes, \p ComposeSubRegIdx can be used to specify how the
852     /// definition are going to be rewritten.
853     /// E.g., let say we want to merge:
854     ///     V1.sub1:<2 x s32> = COPY V2.sub3:<4 x s32>
855     /// We do that by choosing a class where sub1:<2 x s32> and sub3:<4 x s32>
856     /// overlap, i.e., by choosing a class where we can find "offset + 1 == 3".
857     /// Put differently we align V2's sub3 with V1's sub1:
858     /// V2: sub0 sub1 sub2 sub3
859     /// V1: <offset>  sub0 sub1
860     ///
861     /// This offset will look like a composed subregidx in the class:
862     ///     V1.(composed sub2 with sub1):<4 x s32> = COPY V2.sub3:<4 x s32>
863     /// =>  V1.(composed sub2 with sub1):<4 x s32> = COPY V2.sub3:<4 x s32>
864     ///
865     /// Now if we didn't rewrite the uses and def of V1, all the checks for V1
866     /// need to account for this offset.
867     /// This happens during coalescing where we update the live-ranges while
868     /// still having the old IR around because updating the IR on-the-fly
869     /// would actually clobber some information on how the live-ranges that
870     /// are being updated look like.
871     void refineSubRanges(BumpPtrAllocator &Allocator, LaneBitmask LaneMask,
872                          std::function<void(LiveInterval::SubRange &)> Apply,
873                          const SlotIndexes &Indexes,
874                          const TargetRegisterInfo &TRI,
875                          unsigned ComposeSubRegIdx = 0);
876 
877     bool operator<(const LiveInterval& other) const {
878       const SlotIndex &thisIndex = beginIndex();
879       const SlotIndex &otherIndex = other.beginIndex();
880       return std::tie(thisIndex, Reg) < std::tie(otherIndex, other.Reg);
881     }
882 
883     void print(raw_ostream &OS) const;
884     void dump() const;
885 
886     /// Walks the interval and assert if any invariants fail to hold.
887     ///
888     /// Note that this is a no-op when asserts are disabled.
889 #ifdef NDEBUG
890     void verify(const MachineRegisterInfo *MRI = nullptr) const {}
891 #else
892     void verify(const MachineRegisterInfo *MRI = nullptr) const;
893 #endif
894 
895   private:
896     /// Appends @p Range to SubRanges list.
897     void appendSubRange(SubRange *Range) {
898       Range->Next = SubRanges;
899       SubRanges = Range;
900     }
901 
902     /// Free memory held by SubRange.
903     void freeSubRange(SubRange *S);
904   };
905 
906   inline raw_ostream &operator<<(raw_ostream &OS,
907                                  const LiveInterval::SubRange &SR) {
908     SR.print(OS);
909     return OS;
910   }
911 
912   inline raw_ostream &operator<<(raw_ostream &OS, const LiveInterval &LI) {
913     LI.print(OS);
914     return OS;
915   }
916 
917   raw_ostream &operator<<(raw_ostream &OS, const LiveRange::Segment &S);
918 
919   inline bool operator<(SlotIndex V, const LiveRange::Segment &S) {
920     return V < S.start;
921   }
922 
923   inline bool operator<(const LiveRange::Segment &S, SlotIndex V) {
924     return S.start < V;
925   }
926 
927   /// Helper class for performant LiveRange bulk updates.
928   ///
929   /// Calling LiveRange::addSegment() repeatedly can be expensive on large
930   /// live ranges because segments after the insertion point may need to be
931   /// shifted. The LiveRangeUpdater class can defer the shifting when adding
932   /// many segments in order.
933   ///
934   /// The LiveRange will be in an invalid state until flush() is called.
935   class LiveRangeUpdater {
936     LiveRange *LR;
937     SlotIndex LastStart;
938     LiveRange::iterator WriteI;
939     LiveRange::iterator ReadI;
940     SmallVector<LiveRange::Segment, 16> Spills;
941     void mergeSpills();
942 
943   public:
944     /// Create a LiveRangeUpdater for adding segments to LR.
945     /// LR will temporarily be in an invalid state until flush() is called.
946     LiveRangeUpdater(LiveRange *lr = nullptr) : LR(lr) {}
947 
948     ~LiveRangeUpdater() { flush(); }
949 
950     /// Add a segment to LR and coalesce when possible, just like
951     /// LR.addSegment(). Segments should be added in increasing start order for
952     /// best performance.
953     void add(LiveRange::Segment);
954 
955     void add(SlotIndex Start, SlotIndex End, VNInfo *VNI) {
956       add(LiveRange::Segment(Start, End, VNI));
957     }
958 
959     /// Return true if the LR is currently in an invalid state, and flush()
960     /// needs to be called.
961     bool isDirty() const { return LastStart.isValid(); }
962 
963     /// Flush the updater state to LR so it is valid and contains all added
964     /// segments.
965     void flush();
966 
967     /// Select a different destination live range.
968     void setDest(LiveRange *lr) {
969       if (LR != lr && isDirty())
970         flush();
971       LR = lr;
972     }
973 
974     /// Get the current destination live range.
975     LiveRange *getDest() const { return LR; }
976 
977     void dump() const;
978     void print(raw_ostream&) const;
979   };
980 
981   inline raw_ostream &operator<<(raw_ostream &OS, const LiveRangeUpdater &X) {
982     X.print(OS);
983     return OS;
984   }
985 
986   /// ConnectedVNInfoEqClasses - Helper class that can divide VNInfos in a
987   /// LiveInterval into equivalence clases of connected components. A
988   /// LiveInterval that has multiple connected components can be broken into
989   /// multiple LiveIntervals.
990   ///
991   /// Given a LiveInterval that may have multiple connected components, run:
992   ///
993   ///   unsigned numComps = ConEQ.Classify(LI);
994   ///   if (numComps > 1) {
995   ///     // allocate numComps-1 new LiveIntervals into LIS[1..]
996   ///     ConEQ.Distribute(LIS);
997   /// }
998 
999   class ConnectedVNInfoEqClasses {
1000     LiveIntervals &LIS;
1001     IntEqClasses EqClass;
1002 
1003   public:
1004     explicit ConnectedVNInfoEqClasses(LiveIntervals &lis) : LIS(lis) {}
1005 
1006     /// Classify the values in \p LR into connected components.
1007     /// Returns the number of connected components.
1008     unsigned Classify(const LiveRange &LR);
1009 
1010     /// getEqClass - Classify creates equivalence classes numbered 0..N. Return
1011     /// the equivalence class assigned the VNI.
1012     unsigned getEqClass(const VNInfo *VNI) const { return EqClass[VNI->id]; }
1013 
1014     /// Distribute values in \p LI into a separate LiveIntervals
1015     /// for each connected component. LIV must have an empty LiveInterval for
1016     /// each additional connected component. The first connected component is
1017     /// left in \p LI.
1018     void Distribute(LiveInterval &LI, LiveInterval *LIV[],
1019                     MachineRegisterInfo &MRI);
1020   };
1021 
1022 } // end namespace llvm
1023 
1024 #endif // LLVM_CODEGEN_LIVEINTERVAL_H
1025