1 //===-- RangeMap.h ----------------------------------------------*- C++ -*-===//
2 //
3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4 // See https://llvm.org/LICENSE.txt for license information.
5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6 //
7 //===----------------------------------------------------------------------===//
8 
9 #ifndef LLDB_UTILITY_RANGEMAP_H
10 #define LLDB_UTILITY_RANGEMAP_H
11 
12 #include <algorithm>
13 #include <vector>
14 
15 #include "llvm/ADT/SmallVector.h"
16 
17 #include "lldb/lldb-private.h"
18 
19 // Uncomment to make sure all Range objects are sorted when needed
20 //#define ASSERT_RANGEMAP_ARE_SORTED
21 
22 namespace lldb_private {
23 
24 // Templatized classes for dealing with generic ranges and also collections of
25 // ranges, or collections of ranges that have associated data.
26 
27 // A simple range class where you get to define the type of the range
28 // base "B", and the type used for the range byte size "S".
29 template <typename B, typename S> struct Range {
30   typedef B BaseType;
31   typedef S SizeType;
32 
33   BaseType base;
34   SizeType size;
35 
36   Range() : base(0), size(0) {}
37 
38   Range(BaseType b, SizeType s) : base(b), size(s) {}
39 
40   void Clear(BaseType b = 0) {
41     base = b;
42     size = 0;
43   }
44 
45   // Set the start value for the range, and keep the same size
46   BaseType GetRangeBase() const { return base; }
47 
48   void SetRangeBase(BaseType b) { base = b; }
49 
50   void Slide(BaseType slide) { base += slide; }
51 
52   bool Union(const Range &rhs) {
53     if (DoesAdjoinOrIntersect(rhs)) {
54       auto new_end = std::max<BaseType>(GetRangeEnd(), rhs.GetRangeEnd());
55       base = std::min<BaseType>(base, rhs.base);
56       size = new_end - base;
57       return true;
58     }
59     return false;
60   }
61 
62   BaseType GetRangeEnd() const { return base + size; }
63 
64   void SetRangeEnd(BaseType end) {
65     if (end > base)
66       size = end - base;
67     else
68       size = 0;
69   }
70 
71   SizeType GetByteSize() const { return size; }
72 
73   void SetByteSize(SizeType s) { size = s; }
74 
75   bool IsValid() const { return size > 0; }
76 
77   bool Contains(BaseType r) const {
78     return (GetRangeBase() <= r) && (r < GetRangeEnd());
79   }
80 
81   bool ContainsEndInclusive(BaseType r) const {
82     return (GetRangeBase() <= r) && (r <= GetRangeEnd());
83   }
84 
85   bool Contains(const Range &range) const {
86     return Contains(range.GetRangeBase()) &&
87            ContainsEndInclusive(range.GetRangeEnd());
88   }
89 
90   // Returns true if the two ranges adjoing or intersect
91   bool DoesAdjoinOrIntersect(const Range &rhs) const {
92     const BaseType lhs_base = this->GetRangeBase();
93     const BaseType rhs_base = rhs.GetRangeBase();
94     const BaseType lhs_end = this->GetRangeEnd();
95     const BaseType rhs_end = rhs.GetRangeEnd();
96     bool result = (lhs_base <= rhs_end) && (lhs_end >= rhs_base);
97     return result;
98   }
99 
100   // Returns true if the two ranges intersect
101   bool DoesIntersect(const Range &rhs) const {
102     const BaseType lhs_base = this->GetRangeBase();
103     const BaseType rhs_base = rhs.GetRangeBase();
104     const BaseType lhs_end = this->GetRangeEnd();
105     const BaseType rhs_end = rhs.GetRangeEnd();
106     bool result = (lhs_base < rhs_end) && (lhs_end > rhs_base);
107     return result;
108   }
109 
110   bool operator<(const Range &rhs) const {
111     if (base == rhs.base)
112       return size < rhs.size;
113     return base < rhs.base;
114   }
115 
116   bool operator==(const Range &rhs) const {
117     return base == rhs.base && size == rhs.size;
118   }
119 
120   bool operator!=(const Range &rhs) const {
121     return base != rhs.base || size != rhs.size;
122   }
123 };
124 
125 template <typename B, typename S, unsigned N = 0> class RangeVector {
126 public:
127   typedef B BaseType;
128   typedef S SizeType;
129   typedef Range<B, S> Entry;
130   typedef llvm::SmallVector<Entry, N> Collection;
131 
132   RangeVector() = default;
133 
134   ~RangeVector() = default;
135 
136   void Append(const Entry &entry) { m_entries.push_back(entry); }
137 
138   void Append(B base, S size) { m_entries.emplace_back(base, size); }
139 
140   // Insert an item into a sorted list and optionally combine it with any
141   // adjacent blocks if requested.
142   void Insert(const Entry &entry, bool combine) {
143     if (m_entries.empty()) {
144       m_entries.push_back(entry);
145       return;
146     }
147     auto begin = m_entries.begin();
148     auto end = m_entries.end();
149     auto pos = std::lower_bound(begin, end, entry);
150     if (combine) {
151       if (pos != end && pos->Union(entry)) {
152         CombinePrevAndNext(pos);
153         return;
154       }
155       if (pos != begin) {
156         auto prev = pos - 1;
157         if (prev->Union(entry)) {
158           CombinePrevAndNext(prev);
159           return;
160         }
161       }
162     }
163     m_entries.insert(pos, entry);
164   }
165 
166   bool RemoveEntryAtIndex(uint32_t idx) {
167     if (idx < m_entries.size()) {
168       m_entries.erase(m_entries.begin() + idx);
169       return true;
170     }
171     return false;
172   }
173 
174   void Sort() {
175     if (m_entries.size() > 1)
176       std::stable_sort(m_entries.begin(), m_entries.end());
177   }
178 
179 #ifdef ASSERT_RANGEMAP_ARE_SORTED
180   bool IsSorted() const {
181     typename Collection::const_iterator pos, end, prev;
182     // First we determine if we can combine any of the Entry objects so we
183     // don't end up allocating and making a new collection for no reason
184     for (pos = m_entries.begin(), end = m_entries.end(), prev = end; pos != end;
185          prev = pos++) {
186       if (prev != end && *pos < *prev)
187         return false;
188     }
189     return true;
190   }
191 #endif
192 
193   void CombineConsecutiveRanges() {
194 #ifdef ASSERT_RANGEMAP_ARE_SORTED
195     assert(IsSorted());
196 #endif
197     auto first_intersect = std::adjacent_find(
198         m_entries.begin(), m_entries.end(), [](const Entry &a, const Entry &b) {
199           return a.DoesAdjoinOrIntersect(b);
200         });
201     if (first_intersect == m_entries.end())
202       return;
203 
204     // We we can combine at least one entry, then we make a new collection and
205     // populate it accordingly, and then swap it into place.
206     auto pos = std::next(first_intersect);
207     Collection minimal_ranges(m_entries.begin(), pos);
208     for (; pos != m_entries.end(); ++pos) {
209       Entry &back = minimal_ranges.back();
210       if (back.DoesAdjoinOrIntersect(*pos))
211         back.SetRangeEnd(std::max(back.GetRangeEnd(), pos->GetRangeEnd()));
212       else
213         minimal_ranges.push_back(*pos);
214     }
215     m_entries.swap(minimal_ranges);
216   }
217 
218   BaseType GetMinRangeBase(BaseType fail_value) const {
219 #ifdef ASSERT_RANGEMAP_ARE_SORTED
220     assert(IsSorted());
221 #endif
222     if (m_entries.empty())
223       return fail_value;
224     // m_entries must be sorted, so if we aren't empty, we grab the first
225     // range's base
226     return m_entries.front().GetRangeBase();
227   }
228 
229   BaseType GetMaxRangeEnd(BaseType fail_value) const {
230 #ifdef ASSERT_RANGEMAP_ARE_SORTED
231     assert(IsSorted());
232 #endif
233     if (m_entries.empty())
234       return fail_value;
235     // m_entries must be sorted, so if we aren't empty, we grab the last
236     // range's end
237     return m_entries.back().GetRangeEnd();
238   }
239 
240   void Slide(BaseType slide) {
241     typename Collection::iterator pos, end;
242     for (pos = m_entries.begin(), end = m_entries.end(); pos != end; ++pos)
243       pos->Slide(slide);
244   }
245 
246   void Clear() { m_entries.clear(); }
247 
248   void Reserve(typename Collection::size_type size) { m_entries.reserve(size); }
249 
250   bool IsEmpty() const { return m_entries.empty(); }
251 
252   size_t GetSize() const { return m_entries.size(); }
253 
254   const Entry *GetEntryAtIndex(size_t i) const {
255     return ((i < m_entries.size()) ? &m_entries[i] : nullptr);
256   }
257 
258   // Clients must ensure that "i" is a valid index prior to calling this
259   // function
260   Entry &GetEntryRef(size_t i) { return m_entries[i]; }
261   const Entry &GetEntryRef(size_t i) const { return m_entries[i]; }
262 
263   Entry *Back() { return (m_entries.empty() ? nullptr : &m_entries.back()); }
264 
265   const Entry *Back() const {
266     return (m_entries.empty() ? nullptr : &m_entries.back());
267   }
268 
269   static bool BaseLessThan(const Entry &lhs, const Entry &rhs) {
270     return lhs.GetRangeBase() < rhs.GetRangeBase();
271   }
272 
273   uint32_t FindEntryIndexThatContains(B addr) const {
274 #ifdef ASSERT_RANGEMAP_ARE_SORTED
275     assert(IsSorted());
276 #endif
277     if (!m_entries.empty()) {
278       Entry entry(addr, 1);
279       typename Collection::const_iterator begin = m_entries.begin();
280       typename Collection::const_iterator end = m_entries.end();
281       typename Collection::const_iterator pos =
282           std::lower_bound(begin, end, entry, BaseLessThan);
283 
284       if (pos != end && pos->Contains(addr)) {
285         return std::distance(begin, pos);
286       } else if (pos != begin) {
287         --pos;
288         if (pos->Contains(addr))
289           return std::distance(begin, pos);
290       }
291     }
292     return UINT32_MAX;
293   }
294 
295   const Entry *FindEntryThatContains(B addr) const {
296 #ifdef ASSERT_RANGEMAP_ARE_SORTED
297     assert(IsSorted());
298 #endif
299     if (!m_entries.empty()) {
300       Entry entry(addr, 1);
301       typename Collection::const_iterator begin = m_entries.begin();
302       typename Collection::const_iterator end = m_entries.end();
303       typename Collection::const_iterator pos =
304           std::lower_bound(begin, end, entry, BaseLessThan);
305 
306       if (pos != end && pos->Contains(addr)) {
307         return &(*pos);
308       } else if (pos != begin) {
309         --pos;
310         if (pos->Contains(addr)) {
311           return &(*pos);
312         }
313       }
314     }
315     return nullptr;
316   }
317 
318   const Entry *FindEntryThatContains(const Entry &range) const {
319 #ifdef ASSERT_RANGEMAP_ARE_SORTED
320     assert(IsSorted());
321 #endif
322     if (!m_entries.empty()) {
323       typename Collection::const_iterator begin = m_entries.begin();
324       typename Collection::const_iterator end = m_entries.end();
325       typename Collection::const_iterator pos =
326           std::lower_bound(begin, end, range, BaseLessThan);
327 
328       if (pos != end && pos->Contains(range)) {
329         return &(*pos);
330       } else if (pos != begin) {
331         --pos;
332         if (pos->Contains(range)) {
333           return &(*pos);
334         }
335       }
336     }
337     return nullptr;
338   }
339 
340   using const_iterator = typename Collection::const_iterator;
341   const_iterator begin() const { return m_entries.begin(); }
342   const_iterator end() const { return m_entries.end(); }
343 
344 protected:
345   void CombinePrevAndNext(typename Collection::iterator pos) {
346     // Check if the prev or next entries in case they need to be unioned with
347     // the entry pointed to by "pos".
348     if (pos != m_entries.begin()) {
349       auto prev = pos - 1;
350       if (prev->Union(*pos))
351         m_entries.erase(pos);
352       pos = prev;
353     }
354 
355     auto end = m_entries.end();
356     if (pos != end) {
357       auto next = pos + 1;
358       if (next != end) {
359         if (pos->Union(*next))
360           m_entries.erase(next);
361       }
362     }
363   }
364 
365   Collection m_entries;
366 };
367 
368 // A simple range  with data class where you get to define the type of
369 // the range base "B", the type used for the range byte size "S", and the type
370 // for the associated data "T".
371 template <typename B, typename S, typename T>
372 struct RangeData : public Range<B, S> {
373   typedef T DataType;
374 
375   DataType data;
376 
377   RangeData() : Range<B, S>(), data() {}
378 
379   RangeData(B base, S size) : Range<B, S>(base, size), data() {}
380 
381   RangeData(B base, S size, DataType d) : Range<B, S>(base, size), data(d) {}
382 };
383 
384 // We can treat the vector as a flattened Binary Search Tree, augmenting it
385 // with upper bounds (max of range endpoints) for every index allows us to
386 // query for range containment quicker.
387 template <typename B, typename S, typename T>
388 struct AugmentedRangeData : public RangeData<B, S, T> {
389   B upper_bound;
390 
391   AugmentedRangeData(const RangeData<B, S, T> &rd)
392       : RangeData<B, S, T>(rd), upper_bound() {}
393 };
394 
395 template <typename B, typename S, typename T, unsigned N = 0,
396           class Compare = std::less<T>>
397 class RangeDataVector {
398 public:
399   typedef lldb_private::Range<B, S> Range;
400   typedef RangeData<B, S, T> Entry;
401   typedef AugmentedRangeData<B, S, T> AugmentedEntry;
402   typedef llvm::SmallVector<AugmentedEntry, N> Collection;
403 
404   RangeDataVector(Compare compare = Compare()) : m_compare(compare) {}
405 
406   ~RangeDataVector() = default;
407 
408   void Append(const Entry &entry) { m_entries.emplace_back(entry); }
409 
410   void Sort() {
411     if (m_entries.size() > 1)
412       std::stable_sort(m_entries.begin(), m_entries.end(),
413                        [&compare = m_compare](const Entry &a, const Entry &b) {
414                          if (a.base != b.base)
415                            return a.base < b.base;
416                          if (a.size != b.size)
417                            return a.size < b.size;
418                          return compare(a.data, b.data);
419                        });
420     if (!m_entries.empty())
421       ComputeUpperBounds(0, m_entries.size());
422   }
423 
424 #ifdef ASSERT_RANGEMAP_ARE_SORTED
425   bool IsSorted() const {
426     typename Collection::const_iterator pos, end, prev;
427     for (pos = m_entries.begin(), end = m_entries.end(), prev = end; pos != end;
428          prev = pos++) {
429       if (prev != end && *pos < *prev)
430         return false;
431     }
432     return true;
433   }
434 #endif
435 
436   void CombineConsecutiveEntriesWithEqualData() {
437 #ifdef ASSERT_RANGEMAP_ARE_SORTED
438     assert(IsSorted());
439 #endif
440     typename Collection::iterator pos;
441     typename Collection::iterator end;
442     typename Collection::iterator prev;
443     bool can_combine = false;
444     // First we determine if we can combine any of the Entry objects so we
445     // don't end up allocating and making a new collection for no reason
446     for (pos = m_entries.begin(), end = m_entries.end(), prev = end; pos != end;
447          prev = pos++) {
448       if (prev != end && prev->data == pos->data) {
449         can_combine = true;
450         break;
451       }
452     }
453 
454     // We we can combine at least one entry, then we make a new collection and
455     // populate it accordingly, and then swap it into place.
456     if (can_combine) {
457       Collection minimal_ranges;
458       for (pos = m_entries.begin(), end = m_entries.end(), prev = end;
459            pos != end; prev = pos++) {
460         if (prev != end && prev->data == pos->data)
461           minimal_ranges.back().SetRangeEnd(pos->GetRangeEnd());
462         else
463           minimal_ranges.push_back(*pos);
464       }
465       // Use the swap technique in case our new vector is much smaller. We must
466       // swap when using the STL because std::vector objects never release or
467       // reduce the memory once it has been allocated/reserved.
468       m_entries.swap(minimal_ranges);
469     }
470   }
471 
472   void Clear() { m_entries.clear(); }
473 
474   bool IsEmpty() const { return m_entries.empty(); }
475 
476   size_t GetSize() const { return m_entries.size(); }
477 
478   const Entry *GetEntryAtIndex(size_t i) const {
479     return ((i < m_entries.size()) ? &m_entries[i] : nullptr);
480   }
481 
482   Entry *GetMutableEntryAtIndex(size_t i) {
483     return ((i < m_entries.size()) ? &m_entries[i] : nullptr);
484   }
485 
486   // Clients must ensure that "i" is a valid index prior to calling this
487   // function
488   Entry &GetEntryRef(size_t i) { return m_entries[i]; }
489   const Entry &GetEntryRef(size_t i) const { return m_entries[i]; }
490 
491   static bool BaseLessThan(const Entry &lhs, const Entry &rhs) {
492     return lhs.GetRangeBase() < rhs.GetRangeBase();
493   }
494 
495   uint32_t FindEntryIndexThatContains(B addr) const {
496     const AugmentedEntry *entry =
497         static_cast<const AugmentedEntry *>(FindEntryThatContains(addr));
498     if (entry)
499       return std::distance(m_entries.begin(), entry);
500     return UINT32_MAX;
501   }
502 
503   uint32_t FindEntryIndexesThatContain(B addr, std::vector<uint32_t> &indexes) {
504 #ifdef ASSERT_RANGEMAP_ARE_SORTED
505     assert(IsSorted());
506 #endif
507     if (!m_entries.empty())
508       FindEntryIndexesThatContain(addr, 0, m_entries.size(), indexes);
509 
510     return indexes.size();
511   }
512 
513   Entry *FindEntryThatContains(B addr) {
514     return const_cast<Entry *>(
515         static_cast<const RangeDataVector *>(this)->FindEntryThatContains(
516             addr));
517   }
518 
519   const Entry *FindEntryThatContains(B addr) const {
520     return FindEntryThatContains(Entry(addr, 1));
521   }
522 
523   const Entry *FindEntryThatContains(const Entry &range) const {
524 #ifdef ASSERT_RANGEMAP_ARE_SORTED
525     assert(IsSorted());
526 #endif
527     if (!m_entries.empty()) {
528       typename Collection::const_iterator begin = m_entries.begin();
529       typename Collection::const_iterator end = m_entries.end();
530       typename Collection::const_iterator pos =
531           std::lower_bound(begin, end, range, BaseLessThan);
532 
533       while (pos != begin && pos[-1].Contains(range))
534         --pos;
535 
536       if (pos != end && pos->Contains(range))
537         return &(*pos);
538     }
539     return nullptr;
540   }
541 
542   const Entry *FindEntryStartsAt(B addr) const {
543 #ifdef ASSERT_RANGEMAP_ARE_SORTED
544     assert(IsSorted());
545 #endif
546     if (!m_entries.empty()) {
547       auto begin = m_entries.begin(), end = m_entries.end();
548       auto pos = std::lower_bound(begin, end, Entry(addr, 1), BaseLessThan);
549       if (pos != end && pos->base == addr)
550         return &(*pos);
551     }
552     return nullptr;
553   }
554 
555   // This method will return the entry that contains the given address, or the
556   // entry following that address.  If you give it an address of 0 and the
557   // first entry starts at address 0x100, you will get the entry at 0x100.
558   //
559   // For most uses, FindEntryThatContains is the correct one to use, this is a
560   // less commonly needed behavior.  It was added for core file memory regions,
561   // where we want to present a gap in the memory regions as a distinct region,
562   // so we need to know the start address of the next memory section that
563   // exists.
564   const Entry *FindEntryThatContainsOrFollows(B addr) const {
565 #ifdef ASSERT_RANGEMAP_ARE_SORTED
566     assert(IsSorted());
567 #endif
568     if (!m_entries.empty()) {
569       typename Collection::const_iterator begin = m_entries.begin();
570       typename Collection::const_iterator end = m_entries.end();
571       typename Collection::const_iterator pos =
572           std::lower_bound(m_entries.begin(), end, addr,
573                            [](const Entry &lhs, B rhs_base) -> bool {
574                              return lhs.GetRangeEnd() <= rhs_base;
575                            });
576 
577       while (pos != begin && pos[-1].Contains(addr))
578         --pos;
579 
580       if (pos != end)
581         return &(*pos);
582     }
583     return nullptr;
584   }
585 
586   Entry *Back() { return (m_entries.empty() ? nullptr : &m_entries.back()); }
587 
588   const Entry *Back() const {
589     return (m_entries.empty() ? nullptr : &m_entries.back());
590   }
591 
592 protected:
593   Collection m_entries;
594   Compare m_compare;
595 
596 private:
597   // Compute extra information needed for search
598   B ComputeUpperBounds(size_t lo, size_t hi) {
599     size_t mid = (lo + hi) / 2;
600     AugmentedEntry &entry = m_entries[mid];
601 
602     entry.upper_bound = entry.base + entry.size;
603 
604     if (lo < mid)
605       entry.upper_bound =
606           std::max(entry.upper_bound, ComputeUpperBounds(lo, mid));
607 
608     if (mid + 1 < hi)
609       entry.upper_bound =
610           std::max(entry.upper_bound, ComputeUpperBounds(mid + 1, hi));
611 
612     return entry.upper_bound;
613   }
614 
615   // This is based on the augmented tree implementation found at
616   // https://en.wikipedia.org/wiki/Interval_tree#Augmented_tree
617   void FindEntryIndexesThatContain(B addr, size_t lo, size_t hi,
618                                    std::vector<uint32_t> &indexes) {
619     size_t mid = (lo + hi) / 2;
620     const AugmentedEntry &entry = m_entries[mid];
621 
622     // addr is greater than the rightmost point of any interval below mid
623     // so there are cannot be any matches.
624     if (addr > entry.upper_bound)
625       return;
626 
627     // Recursively search left subtree
628     if (lo < mid)
629       FindEntryIndexesThatContain(addr, lo, mid, indexes);
630 
631     // If addr is smaller than the start of the current interval it
632     // cannot contain it nor can any of its right subtree.
633     if (addr < entry.base)
634       return;
635 
636     if (entry.Contains(addr))
637       indexes.push_back(entry.data);
638 
639     // Recursively search right subtree
640     if (mid + 1 < hi)
641       FindEntryIndexesThatContain(addr, mid + 1, hi, indexes);
642   }
643 };
644 
645 // A simple range  with data class where you get to define the type of
646 // the range base "B", the type used for the range byte size "S", and the type
647 // for the associated data "T".
648 template <typename B, typename T> struct AddressData {
649   typedef B BaseType;
650   typedef T DataType;
651 
652   BaseType addr;
653   DataType data;
654 
655   AddressData() : addr(), data() {}
656 
657   AddressData(B a, DataType d) : addr(a), data(d) {}
658 
659   bool operator<(const AddressData &rhs) const {
660     if (this->addr == rhs.addr)
661       return this->data < rhs.data;
662     return this->addr < rhs.addr;
663   }
664 
665   bool operator==(const AddressData &rhs) const {
666     return this->addr == rhs.addr && this->data == rhs.data;
667   }
668 
669   bool operator!=(const AddressData &rhs) const {
670     return this->addr != rhs.addr || this->data == rhs.data;
671   }
672 };
673 
674 template <typename B, typename T, unsigned N> class AddressDataArray {
675 public:
676   typedef AddressData<B, T> Entry;
677   typedef llvm::SmallVector<Entry, N> Collection;
678 
679   AddressDataArray() = default;
680 
681   ~AddressDataArray() = default;
682 
683   void Append(const Entry &entry) { m_entries.push_back(entry); }
684 
685   void Sort() {
686     if (m_entries.size() > 1)
687       std::stable_sort(m_entries.begin(), m_entries.end());
688   }
689 
690 #ifdef ASSERT_RANGEMAP_ARE_SORTED
691   bool IsSorted() const {
692     typename Collection::const_iterator pos, end, prev;
693     // First we determine if we can combine any of the Entry objects so we
694     // don't end up allocating and making a new collection for no reason
695     for (pos = m_entries.begin(), end = m_entries.end(), prev = end; pos != end;
696          prev = pos++) {
697       if (prev != end && *pos < *prev)
698         return false;
699     }
700     return true;
701   }
702 #endif
703 
704   void Clear() { m_entries.clear(); }
705 
706   bool IsEmpty() const { return m_entries.empty(); }
707 
708   size_t GetSize() const { return m_entries.size(); }
709 
710   const Entry *GetEntryAtIndex(size_t i) const {
711     return ((i < m_entries.size()) ? &m_entries[i] : nullptr);
712   }
713 
714   // Clients must ensure that "i" is a valid index prior to calling this
715   // function
716   const Entry &GetEntryRef(size_t i) const { return m_entries[i]; }
717 
718   static bool BaseLessThan(const Entry &lhs, const Entry &rhs) {
719     return lhs.addr < rhs.addr;
720   }
721 
722   Entry *FindEntry(B addr, bool exact_match_only) {
723 #ifdef ASSERT_RANGEMAP_ARE_SORTED
724     assert(IsSorted());
725 #endif
726     if (!m_entries.empty()) {
727       Entry entry;
728       entry.addr = addr;
729       typename Collection::iterator begin = m_entries.begin();
730       typename Collection::iterator end = m_entries.end();
731       typename Collection::iterator pos =
732           std::lower_bound(begin, end, entry, BaseLessThan);
733 
734       while (pos != begin && pos[-1].addr == addr)
735         --pos;
736 
737       if (pos != end) {
738         if (pos->addr == addr || !exact_match_only)
739           return &(*pos);
740       }
741     }
742     return nullptr;
743   }
744 
745   const Entry *FindNextEntry(const Entry *entry) {
746     if (entry >= &*m_entries.begin() && entry + 1 < &*m_entries.end())
747       return entry + 1;
748     return nullptr;
749   }
750 
751   Entry *Back() { return (m_entries.empty() ? nullptr : &m_entries.back()); }
752 
753   const Entry *Back() const {
754     return (m_entries.empty() ? nullptr : &m_entries.back());
755   }
756 
757 protected:
758   Collection m_entries;
759 };
760 
761 } // namespace lldb_private
762 
763 #endif // LLDB_UTILITY_RANGEMAP_H
764