1 //===-- RangeMap.h ----------------------------------------------*- C++ -*-===//
2 //
3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4 // See https://llvm.org/LICENSE.txt for license information.
5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6 //
7 //===----------------------------------------------------------------------===//
8 
9 #ifndef LLDB_UTILITY_RANGEMAP_H
10 #define LLDB_UTILITY_RANGEMAP_H
11 
12 #include <algorithm>
13 #include <vector>
14 
15 #include "llvm/ADT/SmallVector.h"
16 
17 #include "lldb/lldb-private.h"
18 
19 // Uncomment to make sure all Range objects are sorted when needed
20 //#define ASSERT_RANGEMAP_ARE_SORTED
21 
22 namespace lldb_private {
23 
24 // Templatized classes for dealing with generic ranges and also collections of
25 // ranges, or collections of ranges that have associated data.
26 
27 // A simple range class where you get to define the type of the range
28 // base "B", and the type used for the range byte size "S".
29 template <typename B, typename S> struct Range {
30   typedef B BaseType;
31   typedef S SizeType;
32 
33   BaseType base;
34   SizeType size;
35 
36   Range() : base(0), size(0) {}
37 
38   Range(BaseType b, SizeType s) : base(b), size(s) {}
39 
40   void Clear(BaseType b = 0) {
41     base = b;
42     size = 0;
43   }
44 
45   // Set the start value for the range, and keep the same size
46   BaseType GetRangeBase() const { return base; }
47 
48   void SetRangeBase(BaseType b) { base = b; }
49 
50   void Slide(BaseType slide) { base += slide; }
51 
52   bool Union(const Range &rhs) {
53     if (DoesAdjoinOrIntersect(rhs)) {
54       auto new_end = std::max<BaseType>(GetRangeEnd(), rhs.GetRangeEnd());
55       base = std::min<BaseType>(base, rhs.base);
56       size = new_end - base;
57       return true;
58     }
59     return false;
60   }
61 
62   BaseType GetRangeEnd() const { return base + size; }
63 
64   void SetRangeEnd(BaseType end) {
65     if (end > base)
66       size = end - base;
67     else
68       size = 0;
69   }
70 
71   SizeType GetByteSize() const { return size; }
72 
73   void SetByteSize(SizeType s) { size = s; }
74 
75   bool IsValid() const { return size > 0; }
76 
77   bool Contains(BaseType r) const {
78     return (GetRangeBase() <= r) && (r < GetRangeEnd());
79   }
80 
81   bool ContainsEndInclusive(BaseType r) const {
82     return (GetRangeBase() <= r) && (r <= GetRangeEnd());
83   }
84 
85   bool Contains(const Range &range) const {
86     return Contains(range.GetRangeBase()) &&
87            ContainsEndInclusive(range.GetRangeEnd());
88   }
89 
90   // Returns true if the two ranges adjoing or intersect
91   bool DoesAdjoinOrIntersect(const Range &rhs) const {
92     const BaseType lhs_base = this->GetRangeBase();
93     const BaseType rhs_base = rhs.GetRangeBase();
94     const BaseType lhs_end = this->GetRangeEnd();
95     const BaseType rhs_end = rhs.GetRangeEnd();
96     bool result = (lhs_base <= rhs_end) && (lhs_end >= rhs_base);
97     return result;
98   }
99 
100   // Returns true if the two ranges intersect
101   bool DoesIntersect(const Range &rhs) const {
102     const BaseType lhs_base = this->GetRangeBase();
103     const BaseType rhs_base = rhs.GetRangeBase();
104     const BaseType lhs_end = this->GetRangeEnd();
105     const BaseType rhs_end = rhs.GetRangeEnd();
106     bool result = (lhs_base < rhs_end) && (lhs_end > rhs_base);
107     return result;
108   }
109 
110   bool operator<(const Range &rhs) const {
111     if (base == rhs.base)
112       return size < rhs.size;
113     return base < rhs.base;
114   }
115 
116   bool operator==(const Range &rhs) const {
117     return base == rhs.base && size == rhs.size;
118   }
119 
120   bool operator!=(const Range &rhs) const {
121     return base != rhs.base || size != rhs.size;
122   }
123 };
124 
125 template <typename B, typename S, unsigned N = 0> class RangeVector {
126 public:
127   typedef B BaseType;
128   typedef S SizeType;
129   typedef Range<B, S> Entry;
130   typedef llvm::SmallVector<Entry, N> Collection;
131 
132   RangeVector() = default;
133 
134   ~RangeVector() = default;
135 
136   void Append(const Entry &entry) { m_entries.push_back(entry); }
137 
138   void Append(B base, S size) { m_entries.emplace_back(base, size); }
139 
140   // Insert an item into a sorted list and optionally combine it with any
141   // adjacent blocks if requested.
142   void Insert(const Entry &entry, bool combine) {
143     if (m_entries.empty()) {
144       m_entries.push_back(entry);
145       return;
146     }
147     auto begin = m_entries.begin();
148     auto end = m_entries.end();
149     auto pos = std::lower_bound(begin, end, entry);
150     if (combine) {
151       if (pos != end && pos->Union(entry)) {
152         CombinePrevAndNext(pos);
153         return;
154       }
155       if (pos != begin) {
156         auto prev = pos - 1;
157         if (prev->Union(entry)) {
158           CombinePrevAndNext(prev);
159           return;
160         }
161       }
162     }
163     m_entries.insert(pos, entry);
164   }
165 
166   bool RemoveEntryAtIndex(uint32_t idx) {
167     if (idx < m_entries.size()) {
168       m_entries.erase(m_entries.begin() + idx);
169       return true;
170     }
171     return false;
172   }
173 
174   void Sort() {
175     if (m_entries.size() > 1)
176       std::stable_sort(m_entries.begin(), m_entries.end());
177   }
178 
179 #ifdef ASSERT_RANGEMAP_ARE_SORTED
180   bool IsSorted() const {
181     typename Collection::const_iterator pos, end, prev;
182     // First we determine if we can combine any of the Entry objects so we
183     // don't end up allocating and making a new collection for no reason
184     for (pos = m_entries.begin(), end = m_entries.end(), prev = end; pos != end;
185          prev = pos++) {
186       if (prev != end && *pos < *prev)
187         return false;
188     }
189     return true;
190   }
191 #endif
192 
193   void CombineConsecutiveRanges() {
194 #ifdef ASSERT_RANGEMAP_ARE_SORTED
195     assert(IsSorted());
196 #endif
197     auto first_intersect = std::adjacent_find(
198         m_entries.begin(), m_entries.end(), [](const Entry &a, const Entry &b) {
199           return a.DoesAdjoinOrIntersect(b);
200         });
201     if (first_intersect == m_entries.end())
202       return;
203 
204     // We we can combine at least one entry, then we make a new collection and
205     // populate it accordingly, and then swap it into place.
206     auto pos = std::next(first_intersect);
207     Collection minimal_ranges(m_entries.begin(), pos);
208     for (; pos != m_entries.end(); ++pos) {
209       Entry &back = minimal_ranges.back();
210       if (back.DoesAdjoinOrIntersect(*pos))
211         back.SetRangeEnd(std::max(back.GetRangeEnd(), pos->GetRangeEnd()));
212       else
213         minimal_ranges.push_back(*pos);
214     }
215     m_entries.swap(minimal_ranges);
216   }
217 
218   BaseType GetMinRangeBase(BaseType fail_value) const {
219 #ifdef ASSERT_RANGEMAP_ARE_SORTED
220     assert(IsSorted());
221 #endif
222     if (m_entries.empty())
223       return fail_value;
224     // m_entries must be sorted, so if we aren't empty, we grab the first
225     // range's base
226     return m_entries.front().GetRangeBase();
227   }
228 
229   BaseType GetMaxRangeEnd(BaseType fail_value) const {
230 #ifdef ASSERT_RANGEMAP_ARE_SORTED
231     assert(IsSorted());
232 #endif
233     if (m_entries.empty())
234       return fail_value;
235     // m_entries must be sorted, so if we aren't empty, we grab the last
236     // range's end
237     return m_entries.back().GetRangeEnd();
238   }
239 
240   void Slide(BaseType slide) {
241     typename Collection::iterator pos, end;
242     for (pos = m_entries.begin(), end = m_entries.end(); pos != end; ++pos)
243       pos->Slide(slide);
244   }
245 
246   void Clear() { m_entries.clear(); }
247 
248   void Reserve(typename Collection::size_type size) { m_entries.reserve(size); }
249 
250   bool IsEmpty() const { return m_entries.empty(); }
251 
252   size_t GetSize() const { return m_entries.size(); }
253 
254   const Entry *GetEntryAtIndex(size_t i) const {
255     return ((i < m_entries.size()) ? &m_entries[i] : nullptr);
256   }
257 
258   // Clients must ensure that "i" is a valid index prior to calling this
259   // function
260   Entry &GetEntryRef(size_t i) { return m_entries[i]; }
261   const Entry &GetEntryRef(size_t i) const { return m_entries[i]; }
262 
263   Entry *Back() { return (m_entries.empty() ? nullptr : &m_entries.back()); }
264 
265   const Entry *Back() const {
266     return (m_entries.empty() ? nullptr : &m_entries.back());
267   }
268 
269   static bool BaseLessThan(const Entry &lhs, const Entry &rhs) {
270     return lhs.GetRangeBase() < rhs.GetRangeBase();
271   }
272 
273   uint32_t FindEntryIndexThatContains(B addr) const {
274 #ifdef ASSERT_RANGEMAP_ARE_SORTED
275     assert(IsSorted());
276 #endif
277     if (!m_entries.empty()) {
278       Entry entry(addr, 1);
279       typename Collection::const_iterator begin = m_entries.begin();
280       typename Collection::const_iterator end = m_entries.end();
281       typename Collection::const_iterator pos =
282           std::lower_bound(begin, end, entry, BaseLessThan);
283 
284       if (pos != end && pos->Contains(addr)) {
285         return std::distance(begin, pos);
286       } else if (pos != begin) {
287         --pos;
288         if (pos->Contains(addr))
289           return std::distance(begin, pos);
290       }
291     }
292     return UINT32_MAX;
293   }
294 
295   const Entry *FindEntryThatContains(B addr) const {
296 #ifdef ASSERT_RANGEMAP_ARE_SORTED
297     assert(IsSorted());
298 #endif
299     if (!m_entries.empty()) {
300       Entry entry(addr, 1);
301       typename Collection::const_iterator begin = m_entries.begin();
302       typename Collection::const_iterator end = m_entries.end();
303       typename Collection::const_iterator pos =
304           std::lower_bound(begin, end, entry, BaseLessThan);
305 
306       if (pos != end && pos->Contains(addr)) {
307         return &(*pos);
308       } else if (pos != begin) {
309         --pos;
310         if (pos->Contains(addr)) {
311           return &(*pos);
312         }
313       }
314     }
315     return nullptr;
316   }
317 
318   const Entry *FindEntryThatContains(const Entry &range) const {
319 #ifdef ASSERT_RANGEMAP_ARE_SORTED
320     assert(IsSorted());
321 #endif
322     if (!m_entries.empty()) {
323       typename Collection::const_iterator begin = m_entries.begin();
324       typename Collection::const_iterator end = m_entries.end();
325       typename Collection::const_iterator pos =
326           std::lower_bound(begin, end, range, BaseLessThan);
327 
328       if (pos != end && pos->Contains(range)) {
329         return &(*pos);
330       } else if (pos != begin) {
331         --pos;
332         if (pos->Contains(range)) {
333           return &(*pos);
334         }
335       }
336     }
337     return nullptr;
338   }
339 
340   using const_iterator = typename Collection::const_iterator;
341   const_iterator begin() const { return m_entries.begin(); }
342   const_iterator end() const { return m_entries.end(); }
343 
344 protected:
345   void CombinePrevAndNext(typename Collection::iterator pos) {
346     // Check if the prev or next entries in case they need to be unioned with
347     // the entry pointed to by "pos".
348     if (pos != m_entries.begin()) {
349       auto prev = pos - 1;
350       if (prev->Union(*pos))
351         m_entries.erase(pos);
352       pos = prev;
353     }
354 
355     auto end = m_entries.end();
356     if (pos != end) {
357       auto next = pos + 1;
358       if (next != end) {
359         if (pos->Union(*next))
360           m_entries.erase(next);
361       }
362     }
363     return;
364   }
365 
366   Collection m_entries;
367 };
368 
369 // A simple range  with data class where you get to define the type of
370 // the range base "B", the type used for the range byte size "S", and the type
371 // for the associated data "T".
372 template <typename B, typename S, typename T>
373 struct RangeData : public Range<B, S> {
374   typedef T DataType;
375 
376   DataType data;
377 
378   RangeData() : Range<B, S>(), data() {}
379 
380   RangeData(B base, S size) : Range<B, S>(base, size), data() {}
381 
382   RangeData(B base, S size, DataType d) : Range<B, S>(base, size), data(d) {}
383 };
384 
385 // We can treat the vector as a flattened Binary Search Tree, augmenting it
386 // with upper bounds (max of range endpoints) for every index allows us to
387 // query for range containment quicker.
388 template <typename B, typename S, typename T>
389 struct AugmentedRangeData : public RangeData<B, S, T> {
390   B upper_bound;
391 
392   AugmentedRangeData(const RangeData<B, S, T> &rd)
393       : RangeData<B, S, T>(rd), upper_bound() {}
394 };
395 
396 template <typename B, typename S, typename T, unsigned N = 0,
397           class Compare = std::less<T>>
398 class RangeDataVector {
399 public:
400   typedef lldb_private::Range<B, S> Range;
401   typedef RangeData<B, S, T> Entry;
402   typedef AugmentedRangeData<B, S, T> AugmentedEntry;
403   typedef llvm::SmallVector<AugmentedEntry, N> Collection;
404 
405   RangeDataVector(Compare compare = Compare()) : m_compare(compare) {}
406 
407   ~RangeDataVector() = default;
408 
409   void Append(const Entry &entry) { m_entries.emplace_back(entry); }
410 
411   void Sort() {
412     if (m_entries.size() > 1)
413       std::stable_sort(m_entries.begin(), m_entries.end(),
414                        [&compare = m_compare](const Entry &a, const Entry &b) {
415                          if (a.base != b.base)
416                            return a.base < b.base;
417                          if (a.size != b.size)
418                            return a.size < b.size;
419                          return compare(a.data, b.data);
420                        });
421     if (!m_entries.empty())
422       ComputeUpperBounds(0, m_entries.size());
423   }
424 
425 #ifdef ASSERT_RANGEMAP_ARE_SORTED
426   bool IsSorted() const {
427     typename Collection::const_iterator pos, end, prev;
428     for (pos = m_entries.begin(), end = m_entries.end(), prev = end; pos != end;
429          prev = pos++) {
430       if (prev != end && *pos < *prev)
431         return false;
432     }
433     return true;
434   }
435 #endif
436 
437   void CombineConsecutiveEntriesWithEqualData() {
438 #ifdef ASSERT_RANGEMAP_ARE_SORTED
439     assert(IsSorted());
440 #endif
441     typename Collection::iterator pos;
442     typename Collection::iterator end;
443     typename Collection::iterator prev;
444     bool can_combine = false;
445     // First we determine if we can combine any of the Entry objects so we
446     // don't end up allocating and making a new collection for no reason
447     for (pos = m_entries.begin(), end = m_entries.end(), prev = end; pos != end;
448          prev = pos++) {
449       if (prev != end && prev->data == pos->data) {
450         can_combine = true;
451         break;
452       }
453     }
454 
455     // We we can combine at least one entry, then we make a new collection and
456     // populate it accordingly, and then swap it into place.
457     if (can_combine) {
458       Collection minimal_ranges;
459       for (pos = m_entries.begin(), end = m_entries.end(), prev = end;
460            pos != end; prev = pos++) {
461         if (prev != end && prev->data == pos->data)
462           minimal_ranges.back().SetRangeEnd(pos->GetRangeEnd());
463         else
464           minimal_ranges.push_back(*pos);
465       }
466       // Use the swap technique in case our new vector is much smaller. We must
467       // swap when using the STL because std::vector objects never release or
468       // reduce the memory once it has been allocated/reserved.
469       m_entries.swap(minimal_ranges);
470     }
471   }
472 
473   void Clear() { m_entries.clear(); }
474 
475   bool IsEmpty() const { return m_entries.empty(); }
476 
477   size_t GetSize() const { return m_entries.size(); }
478 
479   const Entry *GetEntryAtIndex(size_t i) const {
480     return ((i < m_entries.size()) ? &m_entries[i] : nullptr);
481   }
482 
483   Entry *GetMutableEntryAtIndex(size_t i) {
484     return ((i < m_entries.size()) ? &m_entries[i] : nullptr);
485   }
486 
487   // Clients must ensure that "i" is a valid index prior to calling this
488   // function
489   Entry &GetEntryRef(size_t i) { return m_entries[i]; }
490   const Entry &GetEntryRef(size_t i) const { return m_entries[i]; }
491 
492   static bool BaseLessThan(const Entry &lhs, const Entry &rhs) {
493     return lhs.GetRangeBase() < rhs.GetRangeBase();
494   }
495 
496   uint32_t FindEntryIndexThatContains(B addr) const {
497     const AugmentedEntry *entry =
498         static_cast<const AugmentedEntry *>(FindEntryThatContains(addr));
499     if (entry)
500       return std::distance(m_entries.begin(), entry);
501     return UINT32_MAX;
502   }
503 
504   uint32_t FindEntryIndexesThatContain(B addr, std::vector<uint32_t> &indexes) {
505 #ifdef ASSERT_RANGEMAP_ARE_SORTED
506     assert(IsSorted());
507 #endif
508     if (!m_entries.empty())
509       FindEntryIndexesThatContain(addr, 0, m_entries.size(), indexes);
510 
511     return indexes.size();
512   }
513 
514   Entry *FindEntryThatContains(B addr) {
515     return const_cast<Entry *>(
516         static_cast<const RangeDataVector *>(this)->FindEntryThatContains(
517             addr));
518   }
519 
520   const Entry *FindEntryThatContains(B addr) const {
521     return FindEntryThatContains(Entry(addr, 1));
522   }
523 
524   const Entry *FindEntryThatContains(const Entry &range) const {
525 #ifdef ASSERT_RANGEMAP_ARE_SORTED
526     assert(IsSorted());
527 #endif
528     if (!m_entries.empty()) {
529       typename Collection::const_iterator begin = m_entries.begin();
530       typename Collection::const_iterator end = m_entries.end();
531       typename Collection::const_iterator pos =
532           std::lower_bound(begin, end, range, BaseLessThan);
533 
534       while (pos != begin && pos[-1].Contains(range))
535         --pos;
536 
537       if (pos != end && pos->Contains(range))
538         return &(*pos);
539     }
540     return nullptr;
541   }
542 
543   const Entry *FindEntryStartsAt(B addr) const {
544 #ifdef ASSERT_RANGEMAP_ARE_SORTED
545     assert(IsSorted());
546 #endif
547     if (!m_entries.empty()) {
548       auto begin = m_entries.begin(), end = m_entries.end();
549       auto pos = std::lower_bound(begin, end, Entry(addr, 1), BaseLessThan);
550       if (pos != end && pos->base == addr)
551         return &(*pos);
552     }
553     return nullptr;
554   }
555 
556   // This method will return the entry that contains the given address, or the
557   // entry following that address.  If you give it an address of 0 and the
558   // first entry starts at address 0x100, you will get the entry at 0x100.
559   //
560   // For most uses, FindEntryThatContains is the correct one to use, this is a
561   // less commonly needed behavior.  It was added for core file memory regions,
562   // where we want to present a gap in the memory regions as a distinct region,
563   // so we need to know the start address of the next memory section that
564   // exists.
565   const Entry *FindEntryThatContainsOrFollows(B addr) const {
566 #ifdef ASSERT_RANGEMAP_ARE_SORTED
567     assert(IsSorted());
568 #endif
569     if (!m_entries.empty()) {
570       typename Collection::const_iterator begin = m_entries.begin();
571       typename Collection::const_iterator end = m_entries.end();
572       typename Collection::const_iterator pos =
573           std::lower_bound(m_entries.begin(), end, addr,
574                            [](const Entry &lhs, B rhs_base) -> bool {
575                              return lhs.GetRangeEnd() <= rhs_base;
576                            });
577 
578       while (pos != begin && pos[-1].Contains(addr))
579         --pos;
580 
581       if (pos != end)
582         return &(*pos);
583     }
584     return nullptr;
585   }
586 
587   Entry *Back() { return (m_entries.empty() ? nullptr : &m_entries.back()); }
588 
589   const Entry *Back() const {
590     return (m_entries.empty() ? nullptr : &m_entries.back());
591   }
592 
593 protected:
594   Collection m_entries;
595   Compare m_compare;
596 
597 private:
598   // Compute extra information needed for search
599   B ComputeUpperBounds(size_t lo, size_t hi) {
600     size_t mid = (lo + hi) / 2;
601     AugmentedEntry &entry = m_entries[mid];
602 
603     entry.upper_bound = entry.base + entry.size;
604 
605     if (lo < mid)
606       entry.upper_bound =
607           std::max(entry.upper_bound, ComputeUpperBounds(lo, mid));
608 
609     if (mid + 1 < hi)
610       entry.upper_bound =
611           std::max(entry.upper_bound, ComputeUpperBounds(mid + 1, hi));
612 
613     return entry.upper_bound;
614   }
615 
616   // This is based on the augmented tree implementation found at
617   // https://en.wikipedia.org/wiki/Interval_tree#Augmented_tree
618   void FindEntryIndexesThatContain(B addr, size_t lo, size_t hi,
619                                    std::vector<uint32_t> &indexes) {
620     size_t mid = (lo + hi) / 2;
621     const AugmentedEntry &entry = m_entries[mid];
622 
623     // addr is greater than the rightmost point of any interval below mid
624     // so there are cannot be any matches.
625     if (addr > entry.upper_bound)
626       return;
627 
628     // Recursively search left subtree
629     if (lo < mid)
630       FindEntryIndexesThatContain(addr, lo, mid, indexes);
631 
632     // If addr is smaller than the start of the current interval it
633     // cannot contain it nor can any of its right subtree.
634     if (addr < entry.base)
635       return;
636 
637     if (entry.Contains(addr))
638       indexes.push_back(entry.data);
639 
640     // Recursively search right subtree
641     if (mid + 1 < hi)
642       FindEntryIndexesThatContain(addr, mid + 1, hi, indexes);
643   }
644 };
645 
646 // A simple range  with data class where you get to define the type of
647 // the range base "B", the type used for the range byte size "S", and the type
648 // for the associated data "T".
649 template <typename B, typename T> struct AddressData {
650   typedef B BaseType;
651   typedef T DataType;
652 
653   BaseType addr;
654   DataType data;
655 
656   AddressData() : addr(), data() {}
657 
658   AddressData(B a, DataType d) : addr(a), data(d) {}
659 
660   bool operator<(const AddressData &rhs) const {
661     if (this->addr == rhs.addr)
662       return this->data < rhs.data;
663     return this->addr < rhs.addr;
664   }
665 
666   bool operator==(const AddressData &rhs) const {
667     return this->addr == rhs.addr && this->data == rhs.data;
668   }
669 
670   bool operator!=(const AddressData &rhs) const {
671     return this->addr != rhs.addr || this->data == rhs.data;
672   }
673 };
674 
675 template <typename B, typename T, unsigned N> class AddressDataArray {
676 public:
677   typedef AddressData<B, T> Entry;
678   typedef llvm::SmallVector<Entry, N> Collection;
679 
680   AddressDataArray() = default;
681 
682   ~AddressDataArray() = default;
683 
684   void Append(const Entry &entry) { m_entries.push_back(entry); }
685 
686   void Sort() {
687     if (m_entries.size() > 1)
688       std::stable_sort(m_entries.begin(), m_entries.end());
689   }
690 
691 #ifdef ASSERT_RANGEMAP_ARE_SORTED
692   bool IsSorted() const {
693     typename Collection::const_iterator pos, end, prev;
694     // First we determine if we can combine any of the Entry objects so we
695     // don't end up allocating and making a new collection for no reason
696     for (pos = m_entries.begin(), end = m_entries.end(), prev = end; pos != end;
697          prev = pos++) {
698       if (prev != end && *pos < *prev)
699         return false;
700     }
701     return true;
702   }
703 #endif
704 
705   void Clear() { m_entries.clear(); }
706 
707   bool IsEmpty() const { return m_entries.empty(); }
708 
709   size_t GetSize() const { return m_entries.size(); }
710 
711   const Entry *GetEntryAtIndex(size_t i) const {
712     return ((i < m_entries.size()) ? &m_entries[i] : nullptr);
713   }
714 
715   // Clients must ensure that "i" is a valid index prior to calling this
716   // function
717   const Entry &GetEntryRef(size_t i) const { return m_entries[i]; }
718 
719   static bool BaseLessThan(const Entry &lhs, const Entry &rhs) {
720     return lhs.addr < rhs.addr;
721   }
722 
723   Entry *FindEntry(B addr, bool exact_match_only) {
724 #ifdef ASSERT_RANGEMAP_ARE_SORTED
725     assert(IsSorted());
726 #endif
727     if (!m_entries.empty()) {
728       Entry entry;
729       entry.addr = addr;
730       typename Collection::iterator begin = m_entries.begin();
731       typename Collection::iterator end = m_entries.end();
732       typename Collection::iterator pos =
733           std::lower_bound(begin, end, entry, BaseLessThan);
734 
735       while (pos != begin && pos[-1].addr == addr)
736         --pos;
737 
738       if (pos != end) {
739         if (pos->addr == addr || !exact_match_only)
740           return &(*pos);
741       }
742     }
743     return nullptr;
744   }
745 
746   const Entry *FindNextEntry(const Entry *entry) {
747     if (entry >= &*m_entries.begin() && entry + 1 < &*m_entries.end())
748       return entry + 1;
749     return nullptr;
750   }
751 
752   Entry *Back() { return (m_entries.empty() ? nullptr : &m_entries.back()); }
753 
754   const Entry *Back() const {
755     return (m_entries.empty() ? nullptr : &m_entries.back());
756   }
757 
758 protected:
759   Collection m_entries;
760 };
761 
762 } // namespace lldb_private
763 
764 #endif // LLDB_UTILITY_RANGEMAP_H
765