1*da58b97aSjoerg //===- llvm/ADT/CoalescingBitVector.h - A coalescing bitvector --*- C++ -*-===//
2*da58b97aSjoerg //
3*da58b97aSjoerg // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4*da58b97aSjoerg // See https://llvm.org/LICENSE.txt for license information.
5*da58b97aSjoerg // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6*da58b97aSjoerg //
7*da58b97aSjoerg //===----------------------------------------------------------------------===//
8*da58b97aSjoerg ///
9*da58b97aSjoerg /// \file A bitvector that uses an IntervalMap to coalesce adjacent elements
10*da58b97aSjoerg /// into intervals.
11*da58b97aSjoerg ///
12*da58b97aSjoerg //===----------------------------------------------------------------------===//
13*da58b97aSjoerg 
14*da58b97aSjoerg #ifndef LLVM_ADT_COALESCINGBITVECTOR_H
15*da58b97aSjoerg #define LLVM_ADT_COALESCINGBITVECTOR_H
16*da58b97aSjoerg 
17*da58b97aSjoerg #include "llvm/ADT/IntervalMap.h"
18*da58b97aSjoerg #include "llvm/ADT/SmallVector.h"
19*da58b97aSjoerg #include "llvm/ADT/iterator_range.h"
20*da58b97aSjoerg #include "llvm/Support/Debug.h"
21*da58b97aSjoerg #include "llvm/Support/raw_ostream.h"
22*da58b97aSjoerg 
23*da58b97aSjoerg #include <algorithm>
24*da58b97aSjoerg #include <initializer_list>
25*da58b97aSjoerg 
26*da58b97aSjoerg namespace llvm {
27*da58b97aSjoerg 
28*da58b97aSjoerg /// A bitvector that, under the hood, relies on an IntervalMap to coalesce
29*da58b97aSjoerg /// elements into intervals. Good for representing sets which predominantly
30*da58b97aSjoerg /// contain contiguous ranges. Bad for representing sets with lots of gaps
31*da58b97aSjoerg /// between elements.
32*da58b97aSjoerg ///
33*da58b97aSjoerg /// Compared to SparseBitVector, CoalescingBitVector offers more predictable
34*da58b97aSjoerg /// performance for non-sequential find() operations.
35*da58b97aSjoerg ///
36*da58b97aSjoerg /// \tparam IndexT - The type of the index into the bitvector.
37*da58b97aSjoerg template <typename IndexT> class CoalescingBitVector {
38*da58b97aSjoerg   static_assert(std::is_unsigned<IndexT>::value,
39*da58b97aSjoerg                 "Index must be an unsigned integer.");
40*da58b97aSjoerg 
41*da58b97aSjoerg   using ThisT = CoalescingBitVector<IndexT>;
42*da58b97aSjoerg 
43*da58b97aSjoerg   /// An interval map for closed integer ranges. The mapped values are unused.
44*da58b97aSjoerg   using MapT = IntervalMap<IndexT, char>;
45*da58b97aSjoerg 
46*da58b97aSjoerg   using UnderlyingIterator = typename MapT::const_iterator;
47*da58b97aSjoerg 
48*da58b97aSjoerg   using IntervalT = std::pair<IndexT, IndexT>;
49*da58b97aSjoerg 
50*da58b97aSjoerg public:
51*da58b97aSjoerg   using Allocator = typename MapT::Allocator;
52*da58b97aSjoerg 
53*da58b97aSjoerg   /// Construct by passing in a CoalescingBitVector<IndexT>::Allocator
54*da58b97aSjoerg   /// reference.
CoalescingBitVector(Allocator & Alloc)55*da58b97aSjoerg   CoalescingBitVector(Allocator &Alloc)
56*da58b97aSjoerg       : Alloc(&Alloc), Intervals(Alloc) {}
57*da58b97aSjoerg 
58*da58b97aSjoerg   /// \name Copy/move constructors and assignment operators.
59*da58b97aSjoerg   /// @{
60*da58b97aSjoerg 
CoalescingBitVector(const ThisT & Other)61*da58b97aSjoerg   CoalescingBitVector(const ThisT &Other)
62*da58b97aSjoerg       : Alloc(Other.Alloc), Intervals(*Other.Alloc) {
63*da58b97aSjoerg     set(Other);
64*da58b97aSjoerg   }
65*da58b97aSjoerg 
66*da58b97aSjoerg   ThisT &operator=(const ThisT &Other) {
67*da58b97aSjoerg     clear();
68*da58b97aSjoerg     set(Other);
69*da58b97aSjoerg     return *this;
70*da58b97aSjoerg   }
71*da58b97aSjoerg 
72*da58b97aSjoerg   CoalescingBitVector(ThisT &&Other) = delete;
73*da58b97aSjoerg   ThisT &operator=(ThisT &&Other) = delete;
74*da58b97aSjoerg 
75*da58b97aSjoerg   /// @}
76*da58b97aSjoerg 
77*da58b97aSjoerg   /// Clear all the bits.
clear()78*da58b97aSjoerg   void clear() { Intervals.clear(); }
79*da58b97aSjoerg 
80*da58b97aSjoerg   /// Check whether no bits are set.
empty()81*da58b97aSjoerg   bool empty() const { return Intervals.empty(); }
82*da58b97aSjoerg 
83*da58b97aSjoerg   /// Count the number of set bits.
count()84*da58b97aSjoerg   unsigned count() const {
85*da58b97aSjoerg     unsigned Bits = 0;
86*da58b97aSjoerg     for (auto It = Intervals.begin(), End = Intervals.end(); It != End; ++It)
87*da58b97aSjoerg       Bits += 1 + It.stop() - It.start();
88*da58b97aSjoerg     return Bits;
89*da58b97aSjoerg   }
90*da58b97aSjoerg 
91*da58b97aSjoerg   /// Set the bit at \p Index.
92*da58b97aSjoerg   ///
93*da58b97aSjoerg   /// This method does /not/ support setting a bit that has already been set,
94*da58b97aSjoerg   /// for efficiency reasons. If possible, restructure your code to not set the
95*da58b97aSjoerg   /// same bit multiple times, or use \ref test_and_set.
set(IndexT Index)96*da58b97aSjoerg   void set(IndexT Index) {
97*da58b97aSjoerg     assert(!test(Index) && "Setting already-set bits not supported/efficient, "
98*da58b97aSjoerg                            "IntervalMap will assert");
99*da58b97aSjoerg     insert(Index, Index);
100*da58b97aSjoerg   }
101*da58b97aSjoerg 
102*da58b97aSjoerg   /// Set the bits set in \p Other.
103*da58b97aSjoerg   ///
104*da58b97aSjoerg   /// This method does /not/ support setting already-set bits, see \ref set
105*da58b97aSjoerg   /// for the rationale. For a safe set union operation, use \ref operator|=.
set(const ThisT & Other)106*da58b97aSjoerg   void set(const ThisT &Other) {
107*da58b97aSjoerg     for (auto It = Other.Intervals.begin(), End = Other.Intervals.end();
108*da58b97aSjoerg          It != End; ++It)
109*da58b97aSjoerg       insert(It.start(), It.stop());
110*da58b97aSjoerg   }
111*da58b97aSjoerg 
112*da58b97aSjoerg   /// Set the bits at \p Indices. Used for testing, primarily.
set(std::initializer_list<IndexT> Indices)113*da58b97aSjoerg   void set(std::initializer_list<IndexT> Indices) {
114*da58b97aSjoerg     for (IndexT Index : Indices)
115*da58b97aSjoerg       set(Index);
116*da58b97aSjoerg   }
117*da58b97aSjoerg 
118*da58b97aSjoerg   /// Check whether the bit at \p Index is set.
test(IndexT Index)119*da58b97aSjoerg   bool test(IndexT Index) const {
120*da58b97aSjoerg     const auto It = Intervals.find(Index);
121*da58b97aSjoerg     if (It == Intervals.end())
122*da58b97aSjoerg       return false;
123*da58b97aSjoerg     assert(It.stop() >= Index && "Interval must end after Index");
124*da58b97aSjoerg     return It.start() <= Index;
125*da58b97aSjoerg   }
126*da58b97aSjoerg 
127*da58b97aSjoerg   /// Set the bit at \p Index. Supports setting an already-set bit.
test_and_set(IndexT Index)128*da58b97aSjoerg   void test_and_set(IndexT Index) {
129*da58b97aSjoerg     if (!test(Index))
130*da58b97aSjoerg       set(Index);
131*da58b97aSjoerg   }
132*da58b97aSjoerg 
133*da58b97aSjoerg   /// Reset the bit at \p Index. Supports resetting an already-unset bit.
reset(IndexT Index)134*da58b97aSjoerg   void reset(IndexT Index) {
135*da58b97aSjoerg     auto It = Intervals.find(Index);
136*da58b97aSjoerg     if (It == Intervals.end())
137*da58b97aSjoerg       return;
138*da58b97aSjoerg 
139*da58b97aSjoerg     // Split the interval containing Index into up to two parts: one from
140*da58b97aSjoerg     // [Start, Index-1] and another from [Index+1, Stop]. If Index is equal to
141*da58b97aSjoerg     // either Start or Stop, we create one new interval. If Index is equal to
142*da58b97aSjoerg     // both Start and Stop, we simply erase the existing interval.
143*da58b97aSjoerg     IndexT Start = It.start();
144*da58b97aSjoerg     if (Index < Start)
145*da58b97aSjoerg       // The index was not set.
146*da58b97aSjoerg       return;
147*da58b97aSjoerg     IndexT Stop = It.stop();
148*da58b97aSjoerg     assert(Index <= Stop && "Wrong interval for index");
149*da58b97aSjoerg     It.erase();
150*da58b97aSjoerg     if (Start < Index)
151*da58b97aSjoerg       insert(Start, Index - 1);
152*da58b97aSjoerg     if (Index < Stop)
153*da58b97aSjoerg       insert(Index + 1, Stop);
154*da58b97aSjoerg   }
155*da58b97aSjoerg 
156*da58b97aSjoerg   /// Set union. If \p RHS is guaranteed to not overlap with this, \ref set may
157*da58b97aSjoerg   /// be a faster alternative.
158*da58b97aSjoerg   void operator|=(const ThisT &RHS) {
159*da58b97aSjoerg     // Get the overlaps between the two interval maps.
160*da58b97aSjoerg     SmallVector<IntervalT, 8> Overlaps;
161*da58b97aSjoerg     getOverlaps(RHS, Overlaps);
162*da58b97aSjoerg 
163*da58b97aSjoerg     // Insert the non-overlapping parts of all the intervals from RHS.
164*da58b97aSjoerg     for (auto It = RHS.Intervals.begin(), End = RHS.Intervals.end();
165*da58b97aSjoerg          It != End; ++It) {
166*da58b97aSjoerg       IndexT Start = It.start();
167*da58b97aSjoerg       IndexT Stop = It.stop();
168*da58b97aSjoerg       SmallVector<IntervalT, 8> NonOverlappingParts;
169*da58b97aSjoerg       getNonOverlappingParts(Start, Stop, Overlaps, NonOverlappingParts);
170*da58b97aSjoerg       for (IntervalT AdditivePortion : NonOverlappingParts)
171*da58b97aSjoerg         insert(AdditivePortion.first, AdditivePortion.second);
172*da58b97aSjoerg     }
173*da58b97aSjoerg   }
174*da58b97aSjoerg 
175*da58b97aSjoerg   /// Set intersection.
176*da58b97aSjoerg   void operator&=(const ThisT &RHS) {
177*da58b97aSjoerg     // Get the overlaps between the two interval maps (i.e. the intersection).
178*da58b97aSjoerg     SmallVector<IntervalT, 8> Overlaps;
179*da58b97aSjoerg     getOverlaps(RHS, Overlaps);
180*da58b97aSjoerg     // Rebuild the interval map, including only the overlaps.
181*da58b97aSjoerg     clear();
182*da58b97aSjoerg     for (IntervalT Overlap : Overlaps)
183*da58b97aSjoerg       insert(Overlap.first, Overlap.second);
184*da58b97aSjoerg   }
185*da58b97aSjoerg 
186*da58b97aSjoerg   /// Reset all bits present in \p Other.
intersectWithComplement(const ThisT & Other)187*da58b97aSjoerg   void intersectWithComplement(const ThisT &Other) {
188*da58b97aSjoerg     SmallVector<IntervalT, 8> Overlaps;
189*da58b97aSjoerg     if (!getOverlaps(Other, Overlaps)) {
190*da58b97aSjoerg       // If there is no overlap with Other, the intersection is empty.
191*da58b97aSjoerg       return;
192*da58b97aSjoerg     }
193*da58b97aSjoerg 
194*da58b97aSjoerg     // Delete the overlapping intervals. Split up intervals that only partially
195*da58b97aSjoerg     // intersect an overlap.
196*da58b97aSjoerg     for (IntervalT Overlap : Overlaps) {
197*da58b97aSjoerg       IndexT OlapStart, OlapStop;
198*da58b97aSjoerg       std::tie(OlapStart, OlapStop) = Overlap;
199*da58b97aSjoerg 
200*da58b97aSjoerg       auto It = Intervals.find(OlapStart);
201*da58b97aSjoerg       IndexT CurrStart = It.start();
202*da58b97aSjoerg       IndexT CurrStop = It.stop();
203*da58b97aSjoerg       assert(CurrStart <= OlapStart && OlapStop <= CurrStop &&
204*da58b97aSjoerg              "Expected some intersection!");
205*da58b97aSjoerg 
206*da58b97aSjoerg       // Split the overlap interval into up to two parts: one from [CurrStart,
207*da58b97aSjoerg       // OlapStart-1] and another from [OlapStop+1, CurrStop]. If OlapStart is
208*da58b97aSjoerg       // equal to CurrStart, the first split interval is unnecessary. Ditto for
209*da58b97aSjoerg       // when OlapStop is equal to CurrStop, we omit the second split interval.
210*da58b97aSjoerg       It.erase();
211*da58b97aSjoerg       if (CurrStart < OlapStart)
212*da58b97aSjoerg         insert(CurrStart, OlapStart - 1);
213*da58b97aSjoerg       if (OlapStop < CurrStop)
214*da58b97aSjoerg         insert(OlapStop + 1, CurrStop);
215*da58b97aSjoerg     }
216*da58b97aSjoerg   }
217*da58b97aSjoerg 
218*da58b97aSjoerg   bool operator==(const ThisT &RHS) const {
219*da58b97aSjoerg     // We cannot just use std::equal because it checks the dereferenced values
220*da58b97aSjoerg     // of an iterator pair for equality, not the iterators themselves. In our
221*da58b97aSjoerg     // case that results in comparison of the (unused) IntervalMap values.
222*da58b97aSjoerg     auto ItL = Intervals.begin();
223*da58b97aSjoerg     auto ItR = RHS.Intervals.begin();
224*da58b97aSjoerg     while (ItL != Intervals.end() && ItR != RHS.Intervals.end() &&
225*da58b97aSjoerg            ItL.start() == ItR.start() && ItL.stop() == ItR.stop()) {
226*da58b97aSjoerg       ++ItL;
227*da58b97aSjoerg       ++ItR;
228*da58b97aSjoerg     }
229*da58b97aSjoerg     return ItL == Intervals.end() && ItR == RHS.Intervals.end();
230*da58b97aSjoerg   }
231*da58b97aSjoerg 
232*da58b97aSjoerg   bool operator!=(const ThisT &RHS) const { return !operator==(RHS); }
233*da58b97aSjoerg 
234*da58b97aSjoerg   class const_iterator {
235*da58b97aSjoerg     friend class CoalescingBitVector;
236*da58b97aSjoerg 
237*da58b97aSjoerg   public:
238*da58b97aSjoerg     using iterator_category = std::forward_iterator_tag;
239*da58b97aSjoerg     using value_type = IndexT;
240*da58b97aSjoerg     using difference_type = std::ptrdiff_t;
241*da58b97aSjoerg     using pointer = value_type *;
242*da58b97aSjoerg     using reference = value_type &;
243*da58b97aSjoerg 
244*da58b97aSjoerg   private:
245*da58b97aSjoerg     // For performance reasons, make the offset at the end different than the
246*da58b97aSjoerg     // one used in \ref begin, to optimize the common `It == end()` pattern.
247*da58b97aSjoerg     static constexpr unsigned kIteratorAtTheEndOffset = ~0u;
248*da58b97aSjoerg 
249*da58b97aSjoerg     UnderlyingIterator MapIterator;
250*da58b97aSjoerg     unsigned OffsetIntoMapIterator = 0;
251*da58b97aSjoerg 
252*da58b97aSjoerg     // Querying the start/stop of an IntervalMap iterator can be very expensive.
253*da58b97aSjoerg     // Cache these values for performance reasons.
254*da58b97aSjoerg     IndexT CachedStart = IndexT();
255*da58b97aSjoerg     IndexT CachedStop = IndexT();
256*da58b97aSjoerg 
setToEnd()257*da58b97aSjoerg     void setToEnd() {
258*da58b97aSjoerg       OffsetIntoMapIterator = kIteratorAtTheEndOffset;
259*da58b97aSjoerg       CachedStart = IndexT();
260*da58b97aSjoerg       CachedStop = IndexT();
261*da58b97aSjoerg     }
262*da58b97aSjoerg 
263*da58b97aSjoerg     /// MapIterator has just changed, reset the cached state to point to the
264*da58b97aSjoerg     /// start of the new underlying iterator.
resetCache()265*da58b97aSjoerg     void resetCache() {
266*da58b97aSjoerg       if (MapIterator.valid()) {
267*da58b97aSjoerg         OffsetIntoMapIterator = 0;
268*da58b97aSjoerg         CachedStart = MapIterator.start();
269*da58b97aSjoerg         CachedStop = MapIterator.stop();
270*da58b97aSjoerg       } else {
271*da58b97aSjoerg         setToEnd();
272*da58b97aSjoerg       }
273*da58b97aSjoerg     }
274*da58b97aSjoerg 
275*da58b97aSjoerg     /// Advance the iterator to \p Index, if it is contained within the current
276*da58b97aSjoerg     /// interval. The public-facing method which supports advancing past the
277*da58b97aSjoerg     /// current interval is \ref advanceToLowerBound.
advanceTo(IndexT Index)278*da58b97aSjoerg     void advanceTo(IndexT Index) {
279*da58b97aSjoerg       assert(Index <= CachedStop && "Cannot advance to OOB index");
280*da58b97aSjoerg       if (Index < CachedStart)
281*da58b97aSjoerg         // We're already past this index.
282*da58b97aSjoerg         return;
283*da58b97aSjoerg       OffsetIntoMapIterator = Index - CachedStart;
284*da58b97aSjoerg     }
285*da58b97aSjoerg 
const_iterator(UnderlyingIterator MapIt)286*da58b97aSjoerg     const_iterator(UnderlyingIterator MapIt) : MapIterator(MapIt) {
287*da58b97aSjoerg       resetCache();
288*da58b97aSjoerg     }
289*da58b97aSjoerg 
290*da58b97aSjoerg   public:
const_iterator()291*da58b97aSjoerg     const_iterator() { setToEnd(); }
292*da58b97aSjoerg 
293*da58b97aSjoerg     bool operator==(const const_iterator &RHS) const {
294*da58b97aSjoerg       // Do /not/ compare MapIterator for equality, as this is very expensive.
295*da58b97aSjoerg       // The cached start/stop values make that check unnecessary.
296*da58b97aSjoerg       return std::tie(OffsetIntoMapIterator, CachedStart, CachedStop) ==
297*da58b97aSjoerg              std::tie(RHS.OffsetIntoMapIterator, RHS.CachedStart,
298*da58b97aSjoerg                       RHS.CachedStop);
299*da58b97aSjoerg     }
300*da58b97aSjoerg 
301*da58b97aSjoerg     bool operator!=(const const_iterator &RHS) const {
302*da58b97aSjoerg       return !operator==(RHS);
303*da58b97aSjoerg     }
304*da58b97aSjoerg 
305*da58b97aSjoerg     IndexT operator*() const { return CachedStart + OffsetIntoMapIterator; }
306*da58b97aSjoerg 
307*da58b97aSjoerg     const_iterator &operator++() { // Pre-increment (++It).
308*da58b97aSjoerg       if (CachedStart + OffsetIntoMapIterator < CachedStop) {
309*da58b97aSjoerg         // Keep going within the current interval.
310*da58b97aSjoerg         ++OffsetIntoMapIterator;
311*da58b97aSjoerg       } else {
312*da58b97aSjoerg         // We reached the end of the current interval: advance.
313*da58b97aSjoerg         ++MapIterator;
314*da58b97aSjoerg         resetCache();
315*da58b97aSjoerg       }
316*da58b97aSjoerg       return *this;
317*da58b97aSjoerg     }
318*da58b97aSjoerg 
319*da58b97aSjoerg     const_iterator operator++(int) { // Post-increment (It++).
320*da58b97aSjoerg       const_iterator tmp = *this;
321*da58b97aSjoerg       operator++();
322*da58b97aSjoerg       return tmp;
323*da58b97aSjoerg     }
324*da58b97aSjoerg 
325*da58b97aSjoerg     /// Advance the iterator to the first set bit AT, OR AFTER, \p Index. If
326*da58b97aSjoerg     /// no such set bit exists, advance to end(). This is like std::lower_bound.
327*da58b97aSjoerg     /// This is useful if \p Index is close to the current iterator position.
328*da58b97aSjoerg     /// However, unlike \ref find(), this has worst-case O(n) performance.
advanceToLowerBound(IndexT Index)329*da58b97aSjoerg     void advanceToLowerBound(IndexT Index) {
330*da58b97aSjoerg       if (OffsetIntoMapIterator == kIteratorAtTheEndOffset)
331*da58b97aSjoerg         return;
332*da58b97aSjoerg 
333*da58b97aSjoerg       // Advance to the first interval containing (or past) Index, or to end().
334*da58b97aSjoerg       while (Index > CachedStop) {
335*da58b97aSjoerg         ++MapIterator;
336*da58b97aSjoerg         resetCache();
337*da58b97aSjoerg         if (OffsetIntoMapIterator == kIteratorAtTheEndOffset)
338*da58b97aSjoerg           return;
339*da58b97aSjoerg       }
340*da58b97aSjoerg 
341*da58b97aSjoerg       advanceTo(Index);
342*da58b97aSjoerg     }
343*da58b97aSjoerg   };
344*da58b97aSjoerg 
begin()345*da58b97aSjoerg   const_iterator begin() const { return const_iterator(Intervals.begin()); }
346*da58b97aSjoerg 
end()347*da58b97aSjoerg   const_iterator end() const { return const_iterator(); }
348*da58b97aSjoerg 
349*da58b97aSjoerg   /// Return an iterator pointing to the first set bit AT, OR AFTER, \p Index.
350*da58b97aSjoerg   /// If no such set bit exists, return end(). This is like std::lower_bound.
351*da58b97aSjoerg   /// This has worst-case logarithmic performance (roughly O(log(gaps between
352*da58b97aSjoerg   /// contiguous ranges))).
find(IndexT Index)353*da58b97aSjoerg   const_iterator find(IndexT Index) const {
354*da58b97aSjoerg     auto UnderlyingIt = Intervals.find(Index);
355*da58b97aSjoerg     if (UnderlyingIt == Intervals.end())
356*da58b97aSjoerg       return end();
357*da58b97aSjoerg     auto It = const_iterator(UnderlyingIt);
358*da58b97aSjoerg     It.advanceTo(Index);
359*da58b97aSjoerg     return It;
360*da58b97aSjoerg   }
361*da58b97aSjoerg 
362*da58b97aSjoerg   /// Return a range iterator which iterates over all of the set bits in the
363*da58b97aSjoerg   /// half-open range [Start, End).
half_open_range(IndexT Start,IndexT End)364*da58b97aSjoerg   iterator_range<const_iterator> half_open_range(IndexT Start,
365*da58b97aSjoerg                                                  IndexT End) const {
366*da58b97aSjoerg     assert(Start < End && "Not a valid range");
367*da58b97aSjoerg     auto StartIt = find(Start);
368*da58b97aSjoerg     if (StartIt == end() || *StartIt >= End)
369*da58b97aSjoerg       return {end(), end()};
370*da58b97aSjoerg     auto EndIt = StartIt;
371*da58b97aSjoerg     EndIt.advanceToLowerBound(End);
372*da58b97aSjoerg     return {StartIt, EndIt};
373*da58b97aSjoerg   }
374*da58b97aSjoerg 
print(raw_ostream & OS)375*da58b97aSjoerg   void print(raw_ostream &OS) const {
376*da58b97aSjoerg     OS << "{";
377*da58b97aSjoerg     for (auto It = Intervals.begin(), End = Intervals.end(); It != End;
378*da58b97aSjoerg          ++It) {
379*da58b97aSjoerg       OS << "[" << It.start();
380*da58b97aSjoerg       if (It.start() != It.stop())
381*da58b97aSjoerg         OS << ", " << It.stop();
382*da58b97aSjoerg       OS << "]";
383*da58b97aSjoerg     }
384*da58b97aSjoerg     OS << "}";
385*da58b97aSjoerg   }
386*da58b97aSjoerg 
387*da58b97aSjoerg #if !defined(NDEBUG) || defined(LLVM_ENABLE_DUMP)
dump()388*da58b97aSjoerg   LLVM_DUMP_METHOD void dump() const {
389*da58b97aSjoerg     // LLDB swallows the first line of output after callling dump(). Add
390*da58b97aSjoerg     // newlines before/after the braces to work around this.
391*da58b97aSjoerg     dbgs() << "\n";
392*da58b97aSjoerg     print(dbgs());
393*da58b97aSjoerg     dbgs() << "\n";
394*da58b97aSjoerg   }
395*da58b97aSjoerg #endif
396*da58b97aSjoerg 
397*da58b97aSjoerg private:
insert(IndexT Start,IndexT End)398*da58b97aSjoerg   void insert(IndexT Start, IndexT End) { Intervals.insert(Start, End, 0); }
399*da58b97aSjoerg 
400*da58b97aSjoerg   /// Record the overlaps between \p this and \p Other in \p Overlaps. Return
401*da58b97aSjoerg   /// true if there is any overlap.
getOverlaps(const ThisT & Other,SmallVectorImpl<IntervalT> & Overlaps)402*da58b97aSjoerg   bool getOverlaps(const ThisT &Other,
403*da58b97aSjoerg                    SmallVectorImpl<IntervalT> &Overlaps) const {
404*da58b97aSjoerg     for (IntervalMapOverlaps<MapT, MapT> I(Intervals, Other.Intervals);
405*da58b97aSjoerg          I.valid(); ++I)
406*da58b97aSjoerg       Overlaps.emplace_back(I.start(), I.stop());
407*da58b97aSjoerg     assert(llvm::is_sorted(Overlaps,
408*da58b97aSjoerg                            [](IntervalT LHS, IntervalT RHS) {
409*da58b97aSjoerg                              return LHS.second < RHS.first;
410*da58b97aSjoerg                            }) &&
411*da58b97aSjoerg            "Overlaps must be sorted");
412*da58b97aSjoerg     return !Overlaps.empty();
413*da58b97aSjoerg   }
414*da58b97aSjoerg 
415*da58b97aSjoerg   /// Given the set of overlaps between this and some other bitvector, and an
416*da58b97aSjoerg   /// interval [Start, Stop] from that bitvector, determine the portions of the
417*da58b97aSjoerg   /// interval which do not overlap with this.
getNonOverlappingParts(IndexT Start,IndexT Stop,const SmallVectorImpl<IntervalT> & Overlaps,SmallVectorImpl<IntervalT> & NonOverlappingParts)418*da58b97aSjoerg   void getNonOverlappingParts(IndexT Start, IndexT Stop,
419*da58b97aSjoerg                               const SmallVectorImpl<IntervalT> &Overlaps,
420*da58b97aSjoerg                               SmallVectorImpl<IntervalT> &NonOverlappingParts) {
421*da58b97aSjoerg     IndexT NextUncoveredBit = Start;
422*da58b97aSjoerg     for (IntervalT Overlap : Overlaps) {
423*da58b97aSjoerg       IndexT OlapStart, OlapStop;
424*da58b97aSjoerg       std::tie(OlapStart, OlapStop) = Overlap;
425*da58b97aSjoerg 
426*da58b97aSjoerg       // [Start;Stop] and [OlapStart;OlapStop] overlap iff OlapStart <= Stop
427*da58b97aSjoerg       // and Start <= OlapStop.
428*da58b97aSjoerg       bool DoesOverlap = OlapStart <= Stop && Start <= OlapStop;
429*da58b97aSjoerg       if (!DoesOverlap)
430*da58b97aSjoerg         continue;
431*da58b97aSjoerg 
432*da58b97aSjoerg       // Cover the range [NextUncoveredBit, OlapStart). This puts the start of
433*da58b97aSjoerg       // the next uncovered range at OlapStop+1.
434*da58b97aSjoerg       if (NextUncoveredBit < OlapStart)
435*da58b97aSjoerg         NonOverlappingParts.emplace_back(NextUncoveredBit, OlapStart - 1);
436*da58b97aSjoerg       NextUncoveredBit = OlapStop + 1;
437*da58b97aSjoerg       if (NextUncoveredBit > Stop)
438*da58b97aSjoerg         break;
439*da58b97aSjoerg     }
440*da58b97aSjoerg     if (NextUncoveredBit <= Stop)
441*da58b97aSjoerg       NonOverlappingParts.emplace_back(NextUncoveredBit, Stop);
442*da58b97aSjoerg   }
443*da58b97aSjoerg 
444*da58b97aSjoerg   Allocator *Alloc;
445*da58b97aSjoerg   MapT Intervals;
446*da58b97aSjoerg };
447*da58b97aSjoerg 
448*da58b97aSjoerg } // namespace llvm
449*da58b97aSjoerg 
450*da58b97aSjoerg #endif // LLVM_ADT_COALESCINGBITVECTOR_H
451