1 //===-- RangeMap.h ----------------------------------------------*- C++ -*-===//
3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4 // See https://llvm.org/LICENSE.txt for license information.
5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
7 //===----------------------------------------------------------------------===//
9 #ifndef LLDB_UTILITY_RANGEMAP_H
10 #define LLDB_UTILITY_RANGEMAP_H
15 #include "llvm/ADT/SmallVector.h"
17 #include "lldb/lldb-private.h"
19 // Uncomment to make sure all Range objects are sorted when needed
20 //#define ASSERT_RANGEMAP_ARE_SORTED
22 namespace lldb_private {
24 // Templatized classes for dealing with generic ranges and also collections of
25 // ranges, or collections of ranges that have associated data.
27 // A simple range class where you get to define the type of the range
28 // base "B", and the type used for the range byte size "S".
29 template <typename B, typename S> struct Range {
36 Range() : base(0), size(0) {}
38 Range(BaseType b, SizeType s) : base(b), size(s) {}
40 void Clear(BaseType b = 0) {
45 // Set the start value for the range, and keep the same size
46 BaseType GetRangeBase() const { return base; }
48 void SetRangeBase(BaseType b) { base = b; }
50 void Slide(BaseType slide) { base += slide; }
52 bool Union(const Range &rhs) {
53 if (DoesAdjoinOrIntersect(rhs)) {
54 auto new_end = std::max<BaseType>(GetRangeEnd(), rhs.GetRangeEnd());
55 base = std::min<BaseType>(base, rhs.base);
56 size = new_end - base;
62 BaseType GetRangeEnd() const { return base + size; }
64 void SetRangeEnd(BaseType end) {
71 SizeType GetByteSize() const { return size; }
73 void SetByteSize(SizeType s) { size = s; }
75 bool IsValid() const { return size > 0; }
77 bool Contains(BaseType r) const {
78 return (GetRangeBase() <= r) && (r < GetRangeEnd());
81 bool ContainsEndInclusive(BaseType r) const {
82 return (GetRangeBase() <= r) && (r <= GetRangeEnd());
85 bool Contains(const Range &range) const {
86 return Contains(range.GetRangeBase()) &&
87 ContainsEndInclusive(range.GetRangeEnd());
90 // Returns true if the two ranges adjoing or intersect
91 bool DoesAdjoinOrIntersect(const Range &rhs) const {
92 const BaseType lhs_base = this->GetRangeBase();
93 const BaseType rhs_base = rhs.GetRangeBase();
94 const BaseType lhs_end = this->GetRangeEnd();
95 const BaseType rhs_end = rhs.GetRangeEnd();
96 bool result = (lhs_base <= rhs_end) && (lhs_end >= rhs_base);
100 // Returns true if the two ranges intersect
101 bool DoesIntersect(const Range &rhs) const {
102 const BaseType lhs_base = this->GetRangeBase();
103 const BaseType rhs_base = rhs.GetRangeBase();
104 const BaseType lhs_end = this->GetRangeEnd();
105 const BaseType rhs_end = rhs.GetRangeEnd();
106 bool result = (lhs_base < rhs_end) && (lhs_end > rhs_base);
110 bool operator<(const Range &rhs) const {
111 if (base == rhs.base)
112 return size < rhs.size;
113 return base < rhs.base;
116 bool operator==(const Range &rhs) const {
117 return base == rhs.base && size == rhs.size;
120 bool operator!=(const Range &rhs) const {
121 return base != rhs.base || size != rhs.size;
125 // A range array class where you get to define the type of the ranges
126 // that the collection contains.
128 template <typename B, typename S, unsigned N> class RangeArray {
132 typedef Range<B, S> Entry;
133 typedef llvm::SmallVector<Entry, N> Collection;
135 RangeArray() = default;
137 ~RangeArray() = default;
139 void Append(const Entry &entry) { m_entries.push_back(entry); }
141 void Append(B base, S size) { m_entries.emplace_back(base, size); }
143 bool RemoveEntrtAtIndex(uint32_t idx) {
144 if (idx < m_entries.size()) {
145 m_entries.erase(m_entries.begin() + idx);
152 if (m_entries.size() > 1)
153 std::stable_sort(m_entries.begin(), m_entries.end());
156 #ifdef ASSERT_RANGEMAP_ARE_SORTED
157 bool IsSorted() const {
158 typename Collection::const_iterator pos, end, prev;
159 for (pos = m_entries.begin(), end = m_entries.end(), prev = end; pos != end;
161 if (prev != end && *pos < *prev)
168 void CombineConsecutiveRanges() {
169 #ifdef ASSERT_RANGEMAP_ARE_SORTED
172 // Can't combine if ranges if we have zero or one range
173 if (m_entries.size() > 1) {
174 // The list should be sorted prior to calling this function
175 typename Collection::iterator pos;
176 typename Collection::iterator end;
177 typename Collection::iterator prev;
178 bool can_combine = false;
179 // First we determine if we can combine any of the Entry objects so we
180 // don't end up allocating and making a new collection for no reason
181 for (pos = m_entries.begin(), end = m_entries.end(), prev = end;
182 pos != end; prev = pos++) {
183 if (prev != end && prev->DoesAdjoinOrIntersect(*pos)) {
189 // We we can combine at least one entry, then we make a new collection
190 // and populate it accordingly, and then swap it into place.
192 Collection minimal_ranges;
193 for (pos = m_entries.begin(), end = m_entries.end(), prev = end;
194 pos != end; prev = pos++) {
195 if (prev != end && prev->DoesAdjoinOrIntersect(*pos))
196 minimal_ranges.back().SetRangeEnd(
197 std::max<BaseType>(prev->GetRangeEnd(), pos->GetRangeEnd()));
199 minimal_ranges.push_back(*pos);
201 // Use the swap technique in case our new vector is much smaller. We
202 // must swap when using the STL because std::vector objects never
203 // release or reduce the memory once it has been allocated/reserved.
204 m_entries.swap(minimal_ranges);
209 BaseType GetMinRangeBase(BaseType fail_value) const {
210 #ifdef ASSERT_RANGEMAP_ARE_SORTED
213 if (m_entries.empty())
215 // m_entries must be sorted, so if we aren't empty, we grab the first
217 return m_entries.front().GetRangeBase();
220 BaseType GetMaxRangeEnd(BaseType fail_value) const {
221 #ifdef ASSERT_RANGEMAP_ARE_SORTED
224 if (m_entries.empty())
226 // m_entries must be sorted, so if we aren't empty, we grab the last
228 return m_entries.back().GetRangeEnd();
231 void Slide(BaseType slide) {
232 typename Collection::iterator pos, end;
233 for (pos = m_entries.begin(), end = m_entries.end(); pos != end; ++pos)
237 void Clear() { m_entries.clear(); }
239 bool IsEmpty() const { return m_entries.empty(); }
241 size_t GetSize() const { return m_entries.size(); }
243 const Entry *GetEntryAtIndex(size_t i) const {
244 return ((i < m_entries.size()) ? &m_entries[i] : nullptr);
247 // Clients must ensure that "i" is a valid index prior to calling this
249 const Entry &GetEntryRef(size_t i) const { return m_entries[i]; }
251 Entry *Back() { return (m_entries.empty() ? nullptr : &m_entries.back()); }
253 const Entry *Back() const {
254 return (m_entries.empty() ? nullptr : &m_entries.back());
257 static bool BaseLessThan(const Entry &lhs, const Entry &rhs) {
258 return lhs.GetRangeBase() < rhs.GetRangeBase();
261 uint32_t FindEntryIndexThatContains(B addr) const {
262 #ifdef ASSERT_RANGEMAP_ARE_SORTED
265 if (!m_entries.empty()) {
266 Entry entry(addr, 1);
267 typename Collection::const_iterator begin = m_entries.begin();
268 typename Collection::const_iterator end = m_entries.end();
269 typename Collection::const_iterator pos =
270 std::lower_bound(begin, end, entry, BaseLessThan);
272 if (pos != end && pos->Contains(addr)) {
273 return std::distance(begin, pos);
274 } else if (pos != begin) {
276 if (pos->Contains(addr))
277 return std::distance(begin, pos);
283 const Entry *FindEntryThatContains(B addr) const {
284 #ifdef ASSERT_RANGEMAP_ARE_SORTED
287 if (!m_entries.empty()) {
288 Entry entry(addr, 1);
289 typename Collection::const_iterator begin = m_entries.begin();
290 typename Collection::const_iterator end = m_entries.end();
291 typename Collection::const_iterator pos =
292 std::lower_bound(begin, end, entry, BaseLessThan);
294 if (pos != end && pos->Contains(addr)) {
296 } else if (pos != begin) {
298 if (pos->Contains(addr)) {
306 const Entry *FindEntryThatContains(const Entry &range) const {
307 #ifdef ASSERT_RANGEMAP_ARE_SORTED
310 if (!m_entries.empty()) {
311 typename Collection::const_iterator begin = m_entries.begin();
312 typename Collection::const_iterator end = m_entries.end();
313 typename Collection::const_iterator pos =
314 std::lower_bound(begin, end, range, BaseLessThan);
316 if (pos != end && pos->Contains(range)) {
318 } else if (pos != begin) {
320 if (pos->Contains(range)) {
329 Collection m_entries;
332 template <typename B, typename S> class RangeVector {
336 typedef Range<B, S> Entry;
337 typedef std::vector<Entry> Collection;
339 RangeVector() = default;
341 ~RangeVector() = default;
343 void Append(const Entry &entry) { m_entries.push_back(entry); }
345 void Append(B base, S size) { m_entries.emplace_back(base, size); }
347 // Insert an item into a sorted list and optionally combine it with any
348 // adjacent blocks if requested.
349 void Insert(const Entry &entry, bool combine) {
350 if (m_entries.empty()) {
351 m_entries.push_back(entry);
354 auto begin = m_entries.begin();
355 auto end = m_entries.end();
356 auto pos = std::lower_bound(begin, end, entry);
358 if (pos != end && pos->Union(entry)) {
359 CombinePrevAndNext(pos);
364 if (prev->Union(entry)) {
365 CombinePrevAndNext(prev);
370 m_entries.insert(pos, entry);
373 bool RemoveEntryAtIndex(uint32_t idx) {
374 if (idx < m_entries.size()) {
375 m_entries.erase(m_entries.begin() + idx);
382 if (m_entries.size() > 1)
383 std::stable_sort(m_entries.begin(), m_entries.end());
386 #ifdef ASSERT_RANGEMAP_ARE_SORTED
387 bool IsSorted() const {
388 typename Collection::const_iterator pos, end, prev;
389 // First we determine if we can combine any of the Entry objects so we
390 // don't end up allocating and making a new collection for no reason
391 for (pos = m_entries.begin(), end = m_entries.end(), prev = end; pos != end;
393 if (prev != end && *pos < *prev)
400 void CombineConsecutiveRanges() {
401 #ifdef ASSERT_RANGEMAP_ARE_SORTED
404 // Can't combine if ranges if we have zero or one range
405 if (m_entries.size() > 1) {
406 // The list should be sorted prior to calling this function
407 typename Collection::iterator pos;
408 typename Collection::iterator end;
409 typename Collection::iterator prev;
410 bool can_combine = false;
411 // First we determine if we can combine any of the Entry objects so we
412 // don't end up allocating and making a new collection for no reason
413 for (pos = m_entries.begin(), end = m_entries.end(), prev = end;
414 pos != end; prev = pos++) {
415 if (prev != end && prev->DoesAdjoinOrIntersect(*pos)) {
421 // We we can combine at least one entry, then we make a new collection
422 // and populate it accordingly, and then swap it into place.
424 Collection minimal_ranges;
425 for (pos = m_entries.begin(), end = m_entries.end(), prev = end;
426 pos != end; prev = pos++) {
427 if (prev != end && prev->DoesAdjoinOrIntersect(*pos))
428 minimal_ranges.back().SetRangeEnd(
429 std::max<BaseType>(prev->GetRangeEnd(), pos->GetRangeEnd()));
431 minimal_ranges.push_back(*pos);
433 // Use the swap technique in case our new vector is much smaller. We
434 // must swap when using the STL because std::vector objects never
435 // release or reduce the memory once it has been allocated/reserved.
436 m_entries.swap(minimal_ranges);
441 BaseType GetMinRangeBase(BaseType fail_value) const {
442 #ifdef ASSERT_RANGEMAP_ARE_SORTED
445 if (m_entries.empty())
447 // m_entries must be sorted, so if we aren't empty, we grab the first
449 return m_entries.front().GetRangeBase();
452 BaseType GetMaxRangeEnd(BaseType fail_value) const {
453 #ifdef ASSERT_RANGEMAP_ARE_SORTED
456 if (m_entries.empty())
458 // m_entries must be sorted, so if we aren't empty, we grab the last
460 return m_entries.back().GetRangeEnd();
463 void Slide(BaseType slide) {
464 typename Collection::iterator pos, end;
465 for (pos = m_entries.begin(), end = m_entries.end(); pos != end; ++pos)
469 void Clear() { m_entries.clear(); }
471 void Reserve(typename Collection::size_type size) { m_entries.reserve(size); }
473 bool IsEmpty() const { return m_entries.empty(); }
475 size_t GetSize() const { return m_entries.size(); }
477 const Entry *GetEntryAtIndex(size_t i) const {
478 return ((i < m_entries.size()) ? &m_entries[i] : nullptr);
481 // Clients must ensure that "i" is a valid index prior to calling this
483 Entry &GetEntryRef(size_t i) { return m_entries[i]; }
484 const Entry &GetEntryRef(size_t i) const { return m_entries[i]; }
486 Entry *Back() { return (m_entries.empty() ? nullptr : &m_entries.back()); }
488 const Entry *Back() const {
489 return (m_entries.empty() ? nullptr : &m_entries.back());
492 static bool BaseLessThan(const Entry &lhs, const Entry &rhs) {
493 return lhs.GetRangeBase() < rhs.GetRangeBase();
496 uint32_t FindEntryIndexThatContains(B addr) const {
497 #ifdef ASSERT_RANGEMAP_ARE_SORTED
500 if (!m_entries.empty()) {
501 Entry entry(addr, 1);
502 typename Collection::const_iterator begin = m_entries.begin();
503 typename Collection::const_iterator end = m_entries.end();
504 typename Collection::const_iterator pos =
505 std::lower_bound(begin, end, entry, BaseLessThan);
507 if (pos != end && pos->Contains(addr)) {
508 return std::distance(begin, pos);
509 } else if (pos != begin) {
511 if (pos->Contains(addr))
512 return std::distance(begin, pos);
518 const Entry *FindEntryThatContains(B addr) const {
519 #ifdef ASSERT_RANGEMAP_ARE_SORTED
522 if (!m_entries.empty()) {
523 Entry entry(addr, 1);
524 typename Collection::const_iterator begin = m_entries.begin();
525 typename Collection::const_iterator end = m_entries.end();
526 typename Collection::const_iterator pos =
527 std::lower_bound(begin, end, entry, BaseLessThan);
529 if (pos != end && pos->Contains(addr)) {
531 } else if (pos != begin) {
533 if (pos->Contains(addr)) {
541 const Entry *FindEntryThatContains(const Entry &range) const {
542 #ifdef ASSERT_RANGEMAP_ARE_SORTED
545 if (!m_entries.empty()) {
546 typename Collection::const_iterator begin = m_entries.begin();
547 typename Collection::const_iterator end = m_entries.end();
548 typename Collection::const_iterator pos =
549 std::lower_bound(begin, end, range, BaseLessThan);
551 if (pos != end && pos->Contains(range)) {
553 } else if (pos != begin) {
555 if (pos->Contains(range)) {
564 void CombinePrevAndNext(typename Collection::iterator pos) {
565 // Check if the prev or next entries in case they need to be unioned with
566 // the entry pointed to by "pos".
567 if (pos != m_entries.begin()) {
569 if (prev->Union(*pos))
570 m_entries.erase(pos);
574 auto end = m_entries.end();
578 if (pos->Union(*next))
579 m_entries.erase(next);
585 Collection m_entries;
588 // A simple range with data class where you get to define the type of
589 // the range base "B", the type used for the range byte size "S", and the type
590 // for the associated data "T".
591 template <typename B, typename S, typename T>
592 struct RangeData : public Range<B, S> {
597 RangeData() : Range<B, S>(), data() {}
599 RangeData(B base, S size) : Range<B, S>(base, size), data() {}
601 RangeData(B base, S size, DataType d) : Range<B, S>(base, size), data(d) {}
603 bool operator<(const RangeData &rhs) const {
604 if (this->base == rhs.base) {
605 if (this->size == rhs.size)
606 return this->data < rhs.data;
608 return this->size < rhs.size;
610 return this->base < rhs.base;
613 bool operator==(const RangeData &rhs) const {
614 return this->GetRangeBase() == rhs.GetRangeBase() &&
615 this->GetByteSize() == rhs.GetByteSize() && this->data == rhs.data;
618 bool operator!=(const RangeData &rhs) const {
619 return this->GetRangeBase() != rhs.GetRangeBase() ||
620 this->GetByteSize() != rhs.GetByteSize() || this->data != rhs.data;
624 template <typename B, typename S, typename T, unsigned N = 0>
625 class RangeDataVector {
627 typedef lldb_private::Range<B, S> Range;
628 typedef RangeData<B, S, T> Entry;
629 typedef llvm::SmallVector<Entry, N> Collection;
631 RangeDataVector() = default;
633 ~RangeDataVector() = default;
635 void Append(const Entry &entry) { m_entries.push_back(entry); }
638 if (m_entries.size() > 1)
639 std::stable_sort(m_entries.begin(), m_entries.end());
642 #ifdef ASSERT_RANGEMAP_ARE_SORTED
643 bool IsSorted() const {
644 typename Collection::const_iterator pos, end, prev;
645 // First we determine if we can combine any of the Entry objects so we
646 // don't end up allocating and making a new collection for no reason
647 for (pos = m_entries.begin(), end = m_entries.end(), prev = end; pos != end;
649 if (prev != end && *pos < *prev)
656 void CombineConsecutiveEntriesWithEqualData() {
657 #ifdef ASSERT_RANGEMAP_ARE_SORTED
660 typename Collection::iterator pos;
661 typename Collection::iterator end;
662 typename Collection::iterator prev;
663 bool can_combine = false;
664 // First we determine if we can combine any of the Entry objects so we
665 // don't end up allocating and making a new collection for no reason
666 for (pos = m_entries.begin(), end = m_entries.end(), prev = end; pos != end;
668 if (prev != end && prev->data == pos->data) {
674 // We we can combine at least one entry, then we make a new collection and
675 // populate it accordingly, and then swap it into place.
677 Collection minimal_ranges;
678 for (pos = m_entries.begin(), end = m_entries.end(), prev = end;
679 pos != end; prev = pos++) {
680 if (prev != end && prev->data == pos->data)
681 minimal_ranges.back().SetRangeEnd(pos->GetRangeEnd());
683 minimal_ranges.push_back(*pos);
685 // Use the swap technique in case our new vector is much smaller. We must
686 // swap when using the STL because std::vector objects never release or
687 // reduce the memory once it has been allocated/reserved.
688 m_entries.swap(minimal_ranges);
692 void Clear() { m_entries.clear(); }
694 bool IsEmpty() const { return m_entries.empty(); }
696 size_t GetSize() const { return m_entries.size(); }
698 const Entry *GetEntryAtIndex(size_t i) const {
699 return ((i < m_entries.size()) ? &m_entries[i] : nullptr);
702 Entry *GetMutableEntryAtIndex(size_t i) {
703 return ((i < m_entries.size()) ? &m_entries[i] : nullptr);
706 // Clients must ensure that "i" is a valid index prior to calling this
708 Entry &GetEntryRef(size_t i) { return m_entries[i]; }
709 const Entry &GetEntryRef(size_t i) const { return m_entries[i]; }
711 static bool BaseLessThan(const Entry &lhs, const Entry &rhs) {
712 return lhs.GetRangeBase() < rhs.GetRangeBase();
715 uint32_t FindEntryIndexThatContains(B addr) const {
716 const Entry *entry = FindEntryThatContains(addr);
718 return std::distance(m_entries.begin(), entry);
722 uint32_t FindEntryIndexesThatContain(B addr,
723 std::vector<uint32_t> &indexes) const {
724 #ifdef ASSERT_RANGEMAP_ARE_SORTED
728 if (!m_entries.empty()) {
729 for (const auto &entry : m_entries) {
730 if (entry.Contains(addr))
731 indexes.push_back(entry.data);
734 return indexes.size();
737 Entry *FindEntryThatContains(B addr) {
738 return const_cast<Entry *>(
739 static_cast<const RangeDataVector *>(this)->FindEntryThatContains(
743 const Entry *FindEntryThatContains(B addr) const {
744 return FindEntryThatContains(Entry(addr, 1));
747 const Entry *FindEntryThatContains(const Entry &range) const {
748 #ifdef ASSERT_RANGEMAP_ARE_SORTED
751 if (!m_entries.empty()) {
752 typename Collection::const_iterator begin = m_entries.begin();
753 typename Collection::const_iterator end = m_entries.end();
754 typename Collection::const_iterator pos =
755 std::lower_bound(begin, end, range, BaseLessThan);
757 while (pos != begin && pos[-1].Contains(range))
760 if (pos != end && pos->Contains(range))
766 const Entry *FindEntryStartsAt(B addr) const {
767 #ifdef ASSERT_RANGEMAP_ARE_SORTED
770 if (!m_entries.empty()) {
771 auto begin = m_entries.begin(), end = m_entries.end();
772 auto pos = std::lower_bound(begin, end, Entry(addr, 1), BaseLessThan);
773 if (pos != end && pos->base == addr)
779 // This method will return the entry that contains the given address, or the
780 // entry following that address. If you give it an address of 0 and the
781 // first entry starts at address 0x100, you will get the entry at 0x100.
783 // For most uses, FindEntryThatContains is the correct one to use, this is a
784 // less commonly needed behavior. It was added for core file memory regions,
785 // where we want to present a gap in the memory regions as a distinct region,
786 // so we need to know the start address of the next memory section that
788 const Entry *FindEntryThatContainsOrFollows(B addr) const {
789 #ifdef ASSERT_RANGEMAP_ARE_SORTED
792 if (!m_entries.empty()) {
793 typename Collection::const_iterator begin = m_entries.begin();
794 typename Collection::const_iterator end = m_entries.end();
795 typename Collection::const_iterator pos =
796 std::lower_bound(m_entries.begin(), end, addr,
797 [](const Entry &lhs, B rhs_base) -> bool {
798 return lhs.GetRangeEnd() <= rhs_base;
801 while (pos != begin && pos[-1].Contains(addr))
810 Entry *Back() { return (m_entries.empty() ? nullptr : &m_entries.back()); }
812 const Entry *Back() const {
813 return (m_entries.empty() ? nullptr : &m_entries.back());
817 Collection m_entries;
820 // A simple range with data class where you get to define the type of
821 // the range base "B", the type used for the range byte size "S", and the type
822 // for the associated data "T".
823 template <typename B, typename T> struct AddressData {
830 AddressData() : addr(), data() {}
832 AddressData(B a, DataType d) : addr(a), data(d) {}
834 bool operator<(const AddressData &rhs) const {
835 if (this->addr == rhs.addr)
836 return this->data < rhs.data;
837 return this->addr < rhs.addr;
840 bool operator==(const AddressData &rhs) const {
841 return this->addr == rhs.addr && this->data == rhs.data;
844 bool operator!=(const AddressData &rhs) const {
845 return this->addr != rhs.addr || this->data == rhs.data;
849 template <typename B, typename T, unsigned N> class AddressDataArray {
851 typedef AddressData<B, T> Entry;
852 typedef llvm::SmallVector<Entry, N> Collection;
854 AddressDataArray() = default;
856 ~AddressDataArray() = default;
858 void Append(const Entry &entry) { m_entries.push_back(entry); }
861 if (m_entries.size() > 1)
862 std::stable_sort(m_entries.begin(), m_entries.end());
865 #ifdef ASSERT_RANGEMAP_ARE_SORTED
866 bool IsSorted() const {
867 typename Collection::const_iterator pos, end, prev;
868 // First we determine if we can combine any of the Entry objects so we
869 // don't end up allocating and making a new collection for no reason
870 for (pos = m_entries.begin(), end = m_entries.end(), prev = end; pos != end;
872 if (prev != end && *pos < *prev)
879 void Clear() { m_entries.clear(); }
881 bool IsEmpty() const { return m_entries.empty(); }
883 size_t GetSize() const { return m_entries.size(); }
885 const Entry *GetEntryAtIndex(size_t i) const {
886 return ((i < m_entries.size()) ? &m_entries[i] : nullptr);
889 // Clients must ensure that "i" is a valid index prior to calling this
891 const Entry &GetEntryRef(size_t i) const { return m_entries[i]; }
893 static bool BaseLessThan(const Entry &lhs, const Entry &rhs) {
894 return lhs.addr < rhs.addr;
897 Entry *FindEntry(B addr, bool exact_match_only) {
898 #ifdef ASSERT_RANGEMAP_ARE_SORTED
901 if (!m_entries.empty()) {
904 typename Collection::iterator begin = m_entries.begin();
905 typename Collection::iterator end = m_entries.end();
906 typename Collection::iterator pos =
907 std::lower_bound(begin, end, entry, BaseLessThan);
909 while (pos != begin && pos[-1].addr == addr)
913 if (pos->addr == addr || !exact_match_only)
920 const Entry *FindNextEntry(const Entry *entry) {
921 if (entry >= &*m_entries.begin() && entry + 1 < &*m_entries.end())
926 Entry *Back() { return (m_entries.empty() ? nullptr : &m_entries.back()); }
928 const Entry *Back() const {
929 return (m_entries.empty() ? nullptr : &m_entries.back());
933 Collection m_entries;
936 } // namespace lldb_private
938 #endif // LLDB_UTILITY_RANGEMAP_H