1 //===-- RangeMap.h ----------------------------------------------*- C++ -*-===//
2 //
3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4 // See https://llvm.org/LICENSE.txt for license information.
5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6 //
7 //===----------------------------------------------------------------------===//
8 
9 #ifndef LLDB_UTILITY_RANGEMAP_H
10 #define LLDB_UTILITY_RANGEMAP_H
11 
12 #include <algorithm>
13 #include <vector>
14 
15 #include "llvm/ADT/SmallVector.h"
16 
17 #include "lldb/lldb-private.h"
18 
19 // Uncomment to make sure all Range objects are sorted when needed
20 //#define ASSERT_RANGEMAP_ARE_SORTED
21 
22 namespace lldb_private {
23 
24 // Templatized classes for dealing with generic ranges and also collections of
25 // ranges, or collections of ranges that have associated data.
26 
27 // A simple range class where you get to define the type of the range
28 // base "B", and the type used for the range byte size "S".
29 template <typename B, typename S> struct Range {
30   typedef B BaseType;
31   typedef S SizeType;
32 
33   BaseType base;
34   SizeType size;
35 
36   Range() : base(0), size(0) {}
37 
38   Range(BaseType b, SizeType s) : base(b), size(s) {}
39 
40   void Clear(BaseType b = 0) {
41     base = b;
42     size = 0;
43   }
44 
45   // Set the start value for the range, and keep the same size
46   BaseType GetRangeBase() const { return base; }
47 
48   void SetRangeBase(BaseType b) { base = b; }
49 
50   void Slide(BaseType slide) { base += slide; }
51 
52   bool Union(const Range &rhs) {
53     if (DoesAdjoinOrIntersect(rhs)) {
54       auto new_end = std::max<BaseType>(GetRangeEnd(), rhs.GetRangeEnd());
55       base = std::min<BaseType>(base, rhs.base);
56       size = new_end - base;
57       return true;
58     }
59     return false;
60   }
61 
62   Range Intersect(const Range &rhs) const {
63     const BaseType lhs_base = this->GetRangeBase();
64     const BaseType rhs_base = rhs.GetRangeBase();
65     const BaseType lhs_end = this->GetRangeEnd();
66     const BaseType rhs_end = rhs.GetRangeEnd();
67     Range range;
68     range.SetRangeBase(std::max(lhs_base, rhs_base));
69     range.SetRangeEnd(std::min(lhs_end, rhs_end));
70     return range;
71   }
72 
73   BaseType GetRangeEnd() const { return base + size; }
74 
75   void SetRangeEnd(BaseType end) {
76     if (end > base)
77       size = end - base;
78     else
79       size = 0;
80   }
81 
82   SizeType GetByteSize() const { return size; }
83 
84   void SetByteSize(SizeType s) { size = s; }
85 
86   bool IsValid() const { return size > 0; }
87 
88   bool Contains(BaseType r) const {
89     return (GetRangeBase() <= r) && (r < GetRangeEnd());
90   }
91 
92   bool ContainsEndInclusive(BaseType r) const {
93     return (GetRangeBase() <= r) && (r <= GetRangeEnd());
94   }
95 
96   bool Contains(const Range &range) const {
97     return Contains(range.GetRangeBase()) &&
98            ContainsEndInclusive(range.GetRangeEnd());
99   }
100 
101   // Returns true if the two ranges adjoing or intersect
102   bool DoesAdjoinOrIntersect(const Range &rhs) const {
103     const BaseType lhs_base = this->GetRangeBase();
104     const BaseType rhs_base = rhs.GetRangeBase();
105     const BaseType lhs_end = this->GetRangeEnd();
106     const BaseType rhs_end = rhs.GetRangeEnd();
107     bool result = (lhs_base <= rhs_end) && (lhs_end >= rhs_base);
108     return result;
109   }
110 
111   // Returns true if the two ranges intersect
112   bool DoesIntersect(const Range &rhs) const {
113     return Intersect(rhs).IsValid();
114   }
115 
116   bool operator<(const Range &rhs) const {
117     if (base == rhs.base)
118       return size < rhs.size;
119     return base < rhs.base;
120   }
121 
122   bool operator==(const Range &rhs) const {
123     return base == rhs.base && size == rhs.size;
124   }
125 
126   bool operator!=(const Range &rhs) const {
127     return base != rhs.base || size != rhs.size;
128   }
129 };
130 
131 template <typename B, typename S, unsigned N = 0> class RangeVector {
132 public:
133   typedef B BaseType;
134   typedef S SizeType;
135   typedef Range<B, S> Entry;
136   typedef llvm::SmallVector<Entry, N> Collection;
137 
138   RangeVector() = default;
139 
140   ~RangeVector() = default;
141 
142   static RangeVector GetOverlaps(const RangeVector &vec1,
143                                  const RangeVector &vec2) {
144 #ifdef ASSERT_RANGEMAP_ARE_SORTED
145     assert(vec1.IsSorted() && vec2.IsSorted());
146 #endif
147     RangeVector result;
148     auto pos1 = vec1.begin();
149     auto end1 = vec1.end();
150     auto pos2 = vec2.begin();
151     auto end2 = vec2.end();
152     while (pos1 != end1 && pos2 != end2) {
153       Entry entry = pos1->Intersect(*pos2);
154       if (entry.IsValid())
155         result.Append(entry);
156       if (pos1->GetRangeEnd() < pos2->GetRangeEnd())
157         ++pos1;
158       else
159         ++pos2;
160     }
161     return result;
162   }
163 
164   bool operator==(const RangeVector &rhs) const {
165     if (GetSize() != rhs.GetSize())
166       return false;
167     for (size_t i = 0; i < GetSize(); ++i) {
168       if (GetEntryRef(i) != rhs.GetEntryRef(i))
169         return false;
170     }
171     return true;
172   }
173 
174   void Append(const Entry &entry) { m_entries.push_back(entry); }
175 
176   void Append(B base, S size) { m_entries.emplace_back(base, size); }
177 
178   // Insert an item into a sorted list and optionally combine it with any
179   // adjacent blocks if requested.
180   void Insert(const Entry &entry, bool combine) {
181     if (m_entries.empty()) {
182       m_entries.push_back(entry);
183       return;
184     }
185     auto begin = m_entries.begin();
186     auto end = m_entries.end();
187     auto pos = std::lower_bound(begin, end, entry);
188     if (combine) {
189       if (pos != end && pos->Union(entry)) {
190         CombinePrevAndNext(pos);
191         return;
192       }
193       if (pos != begin) {
194         auto prev = pos - 1;
195         if (prev->Union(entry)) {
196           CombinePrevAndNext(prev);
197           return;
198         }
199       }
200     }
201     m_entries.insert(pos, entry);
202   }
203 
204   bool RemoveEntryAtIndex(uint32_t idx) {
205     if (idx < m_entries.size()) {
206       m_entries.erase(m_entries.begin() + idx);
207       return true;
208     }
209     return false;
210   }
211 
212   void Sort() {
213     if (m_entries.size() > 1)
214       std::stable_sort(m_entries.begin(), m_entries.end());
215   }
216 
217 #ifdef ASSERT_RANGEMAP_ARE_SORTED
218   bool IsSorted() const {
219     typename Collection::const_iterator pos, end, prev;
220     // First we determine if we can combine any of the Entry objects so we
221     // don't end up allocating and making a new collection for no reason
222     for (pos = m_entries.begin(), end = m_entries.end(), prev = end; pos != end;
223          prev = pos++) {
224       if (prev != end && *pos < *prev)
225         return false;
226     }
227     return true;
228   }
229 #endif
230 
231   void CombineConsecutiveRanges() {
232 #ifdef ASSERT_RANGEMAP_ARE_SORTED
233     assert(IsSorted());
234 #endif
235     auto first_intersect = std::adjacent_find(
236         m_entries.begin(), m_entries.end(), [](const Entry &a, const Entry &b) {
237           return a.DoesAdjoinOrIntersect(b);
238         });
239     if (first_intersect == m_entries.end())
240       return;
241 
242     // We we can combine at least one entry, then we make a new collection and
243     // populate it accordingly, and then swap it into place.
244     auto pos = std::next(first_intersect);
245     Collection minimal_ranges(m_entries.begin(), pos);
246     for (; pos != m_entries.end(); ++pos) {
247       Entry &back = minimal_ranges.back();
248       if (back.DoesAdjoinOrIntersect(*pos))
249         back.SetRangeEnd(std::max(back.GetRangeEnd(), pos->GetRangeEnd()));
250       else
251         minimal_ranges.push_back(*pos);
252     }
253     m_entries.swap(minimal_ranges);
254   }
255 
256   BaseType GetMinRangeBase(BaseType fail_value) const {
257 #ifdef ASSERT_RANGEMAP_ARE_SORTED
258     assert(IsSorted());
259 #endif
260     if (m_entries.empty())
261       return fail_value;
262     // m_entries must be sorted, so if we aren't empty, we grab the first
263     // range's base
264     return m_entries.front().GetRangeBase();
265   }
266 
267   BaseType GetMaxRangeEnd(BaseType fail_value) const {
268 #ifdef ASSERT_RANGEMAP_ARE_SORTED
269     assert(IsSorted());
270 #endif
271     if (m_entries.empty())
272       return fail_value;
273     // m_entries must be sorted, so if we aren't empty, we grab the last
274     // range's end
275     return m_entries.back().GetRangeEnd();
276   }
277 
278   void Slide(BaseType slide) {
279     typename Collection::iterator pos, end;
280     for (pos = m_entries.begin(), end = m_entries.end(); pos != end; ++pos)
281       pos->Slide(slide);
282   }
283 
284   void Clear() { m_entries.clear(); }
285 
286   void Reserve(typename Collection::size_type size) { m_entries.reserve(size); }
287 
288   bool IsEmpty() const { return m_entries.empty(); }
289 
290   size_t GetSize() const { return m_entries.size(); }
291 
292   const Entry *GetEntryAtIndex(size_t i) const {
293     return ((i < m_entries.size()) ? &m_entries[i] : nullptr);
294   }
295 
296   // Clients must ensure that "i" is a valid index prior to calling this
297   // function
298   Entry &GetEntryRef(size_t i) { return m_entries[i]; }
299   const Entry &GetEntryRef(size_t i) const { return m_entries[i]; }
300 
301   Entry *Back() { return (m_entries.empty() ? nullptr : &m_entries.back()); }
302 
303   const Entry *Back() const {
304     return (m_entries.empty() ? nullptr : &m_entries.back());
305   }
306 
307   static bool BaseLessThan(const Entry &lhs, const Entry &rhs) {
308     return lhs.GetRangeBase() < rhs.GetRangeBase();
309   }
310 
311   uint32_t FindEntryIndexThatContains(B addr) const {
312 #ifdef ASSERT_RANGEMAP_ARE_SORTED
313     assert(IsSorted());
314 #endif
315     if (!m_entries.empty()) {
316       Entry entry(addr, 1);
317       typename Collection::const_iterator begin = m_entries.begin();
318       typename Collection::const_iterator end = m_entries.end();
319       typename Collection::const_iterator pos =
320           std::lower_bound(begin, end, entry, BaseLessThan);
321 
322       if (pos != end && pos->Contains(addr)) {
323         return std::distance(begin, pos);
324       } else if (pos != begin) {
325         --pos;
326         if (pos->Contains(addr))
327           return std::distance(begin, pos);
328       }
329     }
330     return UINT32_MAX;
331   }
332 
333   const Entry *FindEntryThatContains(B addr) const {
334 #ifdef ASSERT_RANGEMAP_ARE_SORTED
335     assert(IsSorted());
336 #endif
337     if (!m_entries.empty()) {
338       Entry entry(addr, 1);
339       typename Collection::const_iterator begin = m_entries.begin();
340       typename Collection::const_iterator end = m_entries.end();
341       typename Collection::const_iterator pos =
342           std::lower_bound(begin, end, entry, BaseLessThan);
343 
344       if (pos != end && pos->Contains(addr)) {
345         return &(*pos);
346       } else if (pos != begin) {
347         --pos;
348         if (pos->Contains(addr)) {
349           return &(*pos);
350         }
351       }
352     }
353     return nullptr;
354   }
355 
356   const Entry *FindEntryThatContains(const Entry &range) const {
357 #ifdef ASSERT_RANGEMAP_ARE_SORTED
358     assert(IsSorted());
359 #endif
360     if (!m_entries.empty()) {
361       typename Collection::const_iterator begin = m_entries.begin();
362       typename Collection::const_iterator end = m_entries.end();
363       typename Collection::const_iterator pos =
364           std::lower_bound(begin, end, range, BaseLessThan);
365 
366       if (pos != end && pos->Contains(range)) {
367         return &(*pos);
368       } else if (pos != begin) {
369         --pos;
370         if (pos->Contains(range)) {
371           return &(*pos);
372         }
373       }
374     }
375     return nullptr;
376   }
377 
378   using const_iterator = typename Collection::const_iterator;
379   const_iterator begin() const { return m_entries.begin(); }
380   const_iterator end() const { return m_entries.end(); }
381 
382 protected:
383   void CombinePrevAndNext(typename Collection::iterator pos) {
384     // Check if the prev or next entries in case they need to be unioned with
385     // the entry pointed to by "pos".
386     if (pos != m_entries.begin()) {
387       auto prev = pos - 1;
388       if (prev->Union(*pos))
389         m_entries.erase(pos);
390       pos = prev;
391     }
392 
393     auto end = m_entries.end();
394     if (pos != end) {
395       auto next = pos + 1;
396       if (next != end) {
397         if (pos->Union(*next))
398           m_entries.erase(next);
399       }
400     }
401   }
402 
403   Collection m_entries;
404 };
405 
406 // A simple range  with data class where you get to define the type of
407 // the range base "B", the type used for the range byte size "S", and the type
408 // for the associated data "T".
409 template <typename B, typename S, typename T>
410 struct RangeData : public Range<B, S> {
411   typedef T DataType;
412 
413   DataType data;
414 
415   RangeData() : Range<B, S>(), data() {}
416 
417   RangeData(B base, S size) : Range<B, S>(base, size), data() {}
418 
419   RangeData(B base, S size, DataType d) : Range<B, S>(base, size), data(d) {}
420 };
421 
422 // We can treat the vector as a flattened Binary Search Tree, augmenting it
423 // with upper bounds (max of range endpoints) for every index allows us to
424 // query for range containment quicker.
425 template <typename B, typename S, typename T>
426 struct AugmentedRangeData : public RangeData<B, S, T> {
427   B upper_bound;
428 
429   AugmentedRangeData(const RangeData<B, S, T> &rd)
430       : RangeData<B, S, T>(rd), upper_bound() {}
431 };
432 
433 template <typename B, typename S, typename T, unsigned N = 0,
434           class Compare = std::less<T>>
435 class RangeDataVector {
436 public:
437   typedef lldb_private::Range<B, S> Range;
438   typedef RangeData<B, S, T> Entry;
439   typedef AugmentedRangeData<B, S, T> AugmentedEntry;
440   typedef llvm::SmallVector<AugmentedEntry, N> Collection;
441 
442   RangeDataVector(Compare compare = Compare()) : m_compare(compare) {}
443 
444   ~RangeDataVector() = default;
445 
446   void Append(const Entry &entry) { m_entries.emplace_back(entry); }
447 
448   void Sort() {
449     if (m_entries.size() > 1)
450       std::stable_sort(m_entries.begin(), m_entries.end(),
451                        [&compare = m_compare](const Entry &a, const Entry &b) {
452                          if (a.base != b.base)
453                            return a.base < b.base;
454                          if (a.size != b.size)
455                            return a.size < b.size;
456                          return compare(a.data, b.data);
457                        });
458     if (!m_entries.empty())
459       ComputeUpperBounds(0, m_entries.size());
460   }
461 
462 #ifdef ASSERT_RANGEMAP_ARE_SORTED
463   bool IsSorted() const {
464     typename Collection::const_iterator pos, end, prev;
465     for (pos = m_entries.begin(), end = m_entries.end(), prev = end; pos != end;
466          prev = pos++) {
467       if (prev != end && *pos < *prev)
468         return false;
469     }
470     return true;
471   }
472 #endif
473 
474   void CombineConsecutiveEntriesWithEqualData() {
475 #ifdef ASSERT_RANGEMAP_ARE_SORTED
476     assert(IsSorted());
477 #endif
478     typename Collection::iterator pos;
479     typename Collection::iterator end;
480     typename Collection::iterator prev;
481     bool can_combine = false;
482     // First we determine if we can combine any of the Entry objects so we
483     // don't end up allocating and making a new collection for no reason
484     for (pos = m_entries.begin(), end = m_entries.end(), prev = end; pos != end;
485          prev = pos++) {
486       if (prev != end && prev->data == pos->data) {
487         can_combine = true;
488         break;
489       }
490     }
491 
492     // We we can combine at least one entry, then we make a new collection and
493     // populate it accordingly, and then swap it into place.
494     if (can_combine) {
495       Collection minimal_ranges;
496       for (pos = m_entries.begin(), end = m_entries.end(), prev = end;
497            pos != end; prev = pos++) {
498         if (prev != end && prev->data == pos->data)
499           minimal_ranges.back().SetRangeEnd(pos->GetRangeEnd());
500         else
501           minimal_ranges.push_back(*pos);
502       }
503       // Use the swap technique in case our new vector is much smaller. We must
504       // swap when using the STL because std::vector objects never release or
505       // reduce the memory once it has been allocated/reserved.
506       m_entries.swap(minimal_ranges);
507     }
508   }
509 
510   void Clear() { m_entries.clear(); }
511 
512   bool IsEmpty() const { return m_entries.empty(); }
513 
514   size_t GetSize() const { return m_entries.size(); }
515 
516   const Entry *GetEntryAtIndex(size_t i) const {
517     return ((i < m_entries.size()) ? &m_entries[i] : nullptr);
518   }
519 
520   Entry *GetMutableEntryAtIndex(size_t i) {
521     return ((i < m_entries.size()) ? &m_entries[i] : nullptr);
522   }
523 
524   // Clients must ensure that "i" is a valid index prior to calling this
525   // function
526   Entry &GetEntryRef(size_t i) { return m_entries[i]; }
527   const Entry &GetEntryRef(size_t i) const { return m_entries[i]; }
528 
529   static bool BaseLessThan(const Entry &lhs, const Entry &rhs) {
530     return lhs.GetRangeBase() < rhs.GetRangeBase();
531   }
532 
533   uint32_t FindEntryIndexThatContains(B addr) const {
534     const AugmentedEntry *entry =
535         static_cast<const AugmentedEntry *>(FindEntryThatContains(addr));
536     if (entry)
537       return std::distance(m_entries.begin(), entry);
538     return UINT32_MAX;
539   }
540 
541   uint32_t FindEntryIndexesThatContain(B addr, std::vector<uint32_t> &indexes) {
542 #ifdef ASSERT_RANGEMAP_ARE_SORTED
543     assert(IsSorted());
544 #endif
545     if (!m_entries.empty())
546       FindEntryIndexesThatContain(addr, 0, m_entries.size(), indexes);
547 
548     return indexes.size();
549   }
550 
551   Entry *FindEntryThatContains(B addr) {
552     return const_cast<Entry *>(
553         static_cast<const RangeDataVector *>(this)->FindEntryThatContains(
554             addr));
555   }
556 
557   const Entry *FindEntryThatContains(B addr) const {
558     return FindEntryThatContains(Entry(addr, 1));
559   }
560 
561   const Entry *FindEntryThatContains(const Entry &range) const {
562 #ifdef ASSERT_RANGEMAP_ARE_SORTED
563     assert(IsSorted());
564 #endif
565     if (!m_entries.empty()) {
566       typename Collection::const_iterator begin = m_entries.begin();
567       typename Collection::const_iterator end = m_entries.end();
568       typename Collection::const_iterator pos =
569           std::lower_bound(begin, end, range, BaseLessThan);
570 
571       while (pos != begin && pos[-1].Contains(range))
572         --pos;
573 
574       if (pos != end && pos->Contains(range))
575         return &(*pos);
576     }
577     return nullptr;
578   }
579 
580   const Entry *FindEntryStartsAt(B addr) const {
581 #ifdef ASSERT_RANGEMAP_ARE_SORTED
582     assert(IsSorted());
583 #endif
584     if (!m_entries.empty()) {
585       auto begin = m_entries.begin(), end = m_entries.end();
586       auto pos = std::lower_bound(begin, end, Entry(addr, 1), BaseLessThan);
587       if (pos != end && pos->base == addr)
588         return &(*pos);
589     }
590     return nullptr;
591   }
592 
593   // This method will return the entry that contains the given address, or the
594   // entry following that address.  If you give it an address of 0 and the
595   // first entry starts at address 0x100, you will get the entry at 0x100.
596   //
597   // For most uses, FindEntryThatContains is the correct one to use, this is a
598   // less commonly needed behavior.  It was added for core file memory regions,
599   // where we want to present a gap in the memory regions as a distinct region,
600   // so we need to know the start address of the next memory section that
601   // exists.
602   const Entry *FindEntryThatContainsOrFollows(B addr) const {
603 #ifdef ASSERT_RANGEMAP_ARE_SORTED
604     assert(IsSorted());
605 #endif
606     if (!m_entries.empty()) {
607       typename Collection::const_iterator begin = m_entries.begin();
608       typename Collection::const_iterator end = m_entries.end();
609       typename Collection::const_iterator pos =
610           std::lower_bound(m_entries.begin(), end, addr,
611                            [](const Entry &lhs, B rhs_base) -> bool {
612                              return lhs.GetRangeEnd() <= rhs_base;
613                            });
614 
615       while (pos != begin && pos[-1].Contains(addr))
616         --pos;
617 
618       if (pos != end)
619         return &(*pos);
620     }
621     return nullptr;
622   }
623 
624   Entry *Back() { return (m_entries.empty() ? nullptr : &m_entries.back()); }
625 
626   const Entry *Back() const {
627     return (m_entries.empty() ? nullptr : &m_entries.back());
628   }
629 
630   using const_iterator = typename Collection::const_iterator;
631   const_iterator begin() const { return m_entries.begin(); }
632   const_iterator end() const { return m_entries.end(); }
633 
634 protected:
635   Collection m_entries;
636   Compare m_compare;
637 
638 private:
639   // Compute extra information needed for search
640   B ComputeUpperBounds(size_t lo, size_t hi) {
641     size_t mid = (lo + hi) / 2;
642     AugmentedEntry &entry = m_entries[mid];
643 
644     entry.upper_bound = entry.base + entry.size;
645 
646     if (lo < mid)
647       entry.upper_bound =
648           std::max(entry.upper_bound, ComputeUpperBounds(lo, mid));
649 
650     if (mid + 1 < hi)
651       entry.upper_bound =
652           std::max(entry.upper_bound, ComputeUpperBounds(mid + 1, hi));
653 
654     return entry.upper_bound;
655   }
656 
657   // This is based on the augmented tree implementation found at
658   // https://en.wikipedia.org/wiki/Interval_tree#Augmented_tree
659   void FindEntryIndexesThatContain(B addr, size_t lo, size_t hi,
660                                    std::vector<uint32_t> &indexes) {
661     size_t mid = (lo + hi) / 2;
662     const AugmentedEntry &entry = m_entries[mid];
663 
664     // addr is greater than the rightmost point of any interval below mid
665     // so there are cannot be any matches.
666     if (addr > entry.upper_bound)
667       return;
668 
669     // Recursively search left subtree
670     if (lo < mid)
671       FindEntryIndexesThatContain(addr, lo, mid, indexes);
672 
673     // If addr is smaller than the start of the current interval it
674     // cannot contain it nor can any of its right subtree.
675     if (addr < entry.base)
676       return;
677 
678     if (entry.Contains(addr))
679       indexes.push_back(entry.data);
680 
681     // Recursively search right subtree
682     if (mid + 1 < hi)
683       FindEntryIndexesThatContain(addr, mid + 1, hi, indexes);
684   }
685 };
686 
687 // A simple range  with data class where you get to define the type of
688 // the range base "B", the type used for the range byte size "S", and the type
689 // for the associated data "T".
690 template <typename B, typename T> struct AddressData {
691   typedef B BaseType;
692   typedef T DataType;
693 
694   BaseType addr;
695   DataType data;
696 
697   AddressData() : addr(), data() {}
698 
699   AddressData(B a, DataType d) : addr(a), data(d) {}
700 
701   bool operator<(const AddressData &rhs) const {
702     if (this->addr == rhs.addr)
703       return this->data < rhs.data;
704     return this->addr < rhs.addr;
705   }
706 
707   bool operator==(const AddressData &rhs) const {
708     return this->addr == rhs.addr && this->data == rhs.data;
709   }
710 
711   bool operator!=(const AddressData &rhs) const {
712     return this->addr != rhs.addr || this->data == rhs.data;
713   }
714 };
715 
716 template <typename B, typename T, unsigned N> class AddressDataArray {
717 public:
718   typedef AddressData<B, T> Entry;
719   typedef llvm::SmallVector<Entry, N> Collection;
720 
721   AddressDataArray() = default;
722 
723   ~AddressDataArray() = default;
724 
725   void Append(const Entry &entry) { m_entries.push_back(entry); }
726 
727   void Sort() {
728     if (m_entries.size() > 1)
729       std::stable_sort(m_entries.begin(), m_entries.end());
730   }
731 
732 #ifdef ASSERT_RANGEMAP_ARE_SORTED
733   bool IsSorted() const {
734     typename Collection::const_iterator pos, end, prev;
735     // First we determine if we can combine any of the Entry objects so we
736     // don't end up allocating and making a new collection for no reason
737     for (pos = m_entries.begin(), end = m_entries.end(), prev = end; pos != end;
738          prev = pos++) {
739       if (prev != end && *pos < *prev)
740         return false;
741     }
742     return true;
743   }
744 #endif
745 
746   void Clear() { m_entries.clear(); }
747 
748   bool IsEmpty() const { return m_entries.empty(); }
749 
750   size_t GetSize() const { return m_entries.size(); }
751 
752   const Entry *GetEntryAtIndex(size_t i) const {
753     return ((i < m_entries.size()) ? &m_entries[i] : nullptr);
754   }
755 
756   // Clients must ensure that "i" is a valid index prior to calling this
757   // function
758   const Entry &GetEntryRef(size_t i) const { return m_entries[i]; }
759 
760   static bool BaseLessThan(const Entry &lhs, const Entry &rhs) {
761     return lhs.addr < rhs.addr;
762   }
763 
764   Entry *FindEntry(B addr, bool exact_match_only) {
765 #ifdef ASSERT_RANGEMAP_ARE_SORTED
766     assert(IsSorted());
767 #endif
768     if (!m_entries.empty()) {
769       Entry entry;
770       entry.addr = addr;
771       typename Collection::iterator begin = m_entries.begin();
772       typename Collection::iterator end = m_entries.end();
773       typename Collection::iterator pos =
774           std::lower_bound(begin, end, entry, BaseLessThan);
775 
776       while (pos != begin && pos[-1].addr == addr)
777         --pos;
778 
779       if (pos != end) {
780         if (pos->addr == addr || !exact_match_only)
781           return &(*pos);
782       }
783     }
784     return nullptr;
785   }
786 
787   const Entry *FindNextEntry(const Entry *entry) {
788     if (entry >= &*m_entries.begin() && entry + 1 < &*m_entries.end())
789       return entry + 1;
790     return nullptr;
791   }
792 
793   Entry *Back() { return (m_entries.empty() ? nullptr : &m_entries.back()); }
794 
795   const Entry *Back() const {
796     return (m_entries.empty() ? nullptr : &m_entries.back());
797   }
798 
799 protected:
800   Collection m_entries;
801 };
802 
803 } // namespace lldb_private
804 
805 #endif // LLDB_UTILITY_RANGEMAP_H
806