1 //===-- RangeMap.h ----------------------------------------------*- C++ -*-===//
3 // The LLVM Compiler Infrastructure
5 // This file is distributed under the University of Illinois Open Source
6 // License. See LICENSE.TXT for details.
8 //===----------------------------------------------------------------------===//
10 #ifndef liblldb_RangeMap_h_
11 #define liblldb_RangeMap_h_
16 #include "llvm/ADT/SmallVector.h"
18 #include "lldb/lldb-private.h"
20 // Uncomment to make sure all Range objects are sorted when needed
21 //#define ASSERT_RANGEMAP_ARE_SORTED
23 namespace lldb_private {
25 //----------------------------------------------------------------------
26 // Templatized classes for dealing with generic ranges and also collections of
27 // ranges, or collections of ranges that have associated data.
28 //----------------------------------------------------------------------
30 //----------------------------------------------------------------------
31 // A simple range class where you get to define the type of the range
32 // base "B", and the type used for the range byte size "S".
33 //----------------------------------------------------------------------
34 template <typename B, typename S> struct Range {
41 Range() : base(0), size(0) {}
43 Range(BaseType b, SizeType s) : base(b), size(s) {}
45 void Clear(BaseType b = 0) {
50 // Set the start value for the range, and keep the same size
51 BaseType GetRangeBase() const { return base; }
53 void SetRangeBase(BaseType b) { base = b; }
55 void Slide(BaseType slide) { base += slide; }
57 bool Union(const Range &rhs)
59 if (DoesAdjoinOrIntersect(rhs))
61 auto new_end = std::max<BaseType>(GetRangeEnd(), rhs.GetRangeEnd());
62 base = std::min<BaseType>(base, rhs.base);
63 size = new_end - base;
69 BaseType GetRangeEnd() const { return base + size; }
71 void SetRangeEnd(BaseType end) {
78 SizeType GetByteSize() const { return size; }
80 void SetByteSize(SizeType s) { size = s; }
82 bool IsValid() const { return size > 0; }
84 bool Contains(BaseType r) const {
85 return (GetRangeBase() <= r) && (r < GetRangeEnd());
88 bool ContainsEndInclusive(BaseType r) const {
89 return (GetRangeBase() <= r) && (r <= GetRangeEnd());
92 bool Contains(const Range &range) const {
93 return Contains(range.GetRangeBase()) &&
94 ContainsEndInclusive(range.GetRangeEnd());
97 // Returns true if the two ranges adjoing or intersect
98 bool DoesAdjoinOrIntersect(const Range &rhs) const {
99 const BaseType lhs_base = this->GetRangeBase();
100 const BaseType rhs_base = rhs.GetRangeBase();
101 const BaseType lhs_end = this->GetRangeEnd();
102 const BaseType rhs_end = rhs.GetRangeEnd();
103 bool result = (lhs_base <= rhs_end) && (lhs_end >= rhs_base);
107 // Returns true if the two ranges intersect
108 bool DoesIntersect(const Range &rhs) const {
109 const BaseType lhs_base = this->GetRangeBase();
110 const BaseType rhs_base = rhs.GetRangeBase();
111 const BaseType lhs_end = this->GetRangeEnd();
112 const BaseType rhs_end = rhs.GetRangeEnd();
113 bool result = (lhs_base < rhs_end) && (lhs_end > rhs_base);
117 bool operator<(const Range &rhs) const {
118 if (base == rhs.base)
119 return size < rhs.size;
120 return base < rhs.base;
123 bool operator==(const Range &rhs) const {
124 return base == rhs.base && size == rhs.size;
127 bool operator!=(const Range &rhs) const {
128 return base != rhs.base || size != rhs.size;
132 //----------------------------------------------------------------------
133 // A range array class where you get to define the type of the ranges
134 // that the collection contains.
135 //----------------------------------------------------------------------
137 template <typename B, typename S, unsigned N> class RangeArray {
141 typedef Range<B, S> Entry;
142 typedef llvm::SmallVector<Entry, N> Collection;
144 RangeArray() = default;
146 ~RangeArray() = default;
148 void Append(const Entry &entry) { m_entries.push_back(entry); }
150 void Append(B base, S size) { m_entries.emplace_back(base, size); }
152 bool RemoveEntrtAtIndex(uint32_t idx) {
153 if (idx < m_entries.size()) {
154 m_entries.erase(m_entries.begin() + idx);
161 if (m_entries.size() > 1)
162 std::stable_sort(m_entries.begin(), m_entries.end());
165 #ifdef ASSERT_RANGEMAP_ARE_SORTED
166 bool IsSorted() const {
167 typename Collection::const_iterator pos, end, prev;
168 for (pos = m_entries.begin(), end = m_entries.end(), prev = end; pos != end;
170 if (prev != end && *pos < *prev)
177 void CombineConsecutiveRanges() {
178 #ifdef ASSERT_RANGEMAP_ARE_SORTED
181 // Can't combine if ranges if we have zero or one range
182 if (m_entries.size() > 1) {
183 // The list should be sorted prior to calling this function
184 typename Collection::iterator pos;
185 typename Collection::iterator end;
186 typename Collection::iterator prev;
187 bool can_combine = false;
188 // First we determine if we can combine any of the Entry objects so we
189 // don't end up allocating and making a new collection for no reason
190 for (pos = m_entries.begin(), end = m_entries.end(), prev = end;
191 pos != end; prev = pos++) {
192 if (prev != end && prev->DoesAdjoinOrIntersect(*pos)) {
198 // We we can combine at least one entry, then we make a new collection
199 // and populate it accordingly, and then swap it into place.
201 Collection minimal_ranges;
202 for (pos = m_entries.begin(), end = m_entries.end(), prev = end;
203 pos != end; prev = pos++) {
204 if (prev != end && prev->DoesAdjoinOrIntersect(*pos))
205 minimal_ranges.back().SetRangeEnd(
206 std::max<BaseType>(prev->GetRangeEnd(), pos->GetRangeEnd()));
208 minimal_ranges.push_back(*pos);
210 // Use the swap technique in case our new vector is much smaller. We
211 // must swap when using the STL because std::vector objects never
212 // release or reduce the memory once it has been allocated/reserved.
213 m_entries.swap(minimal_ranges);
218 BaseType GetMinRangeBase(BaseType fail_value) const {
219 #ifdef ASSERT_RANGEMAP_ARE_SORTED
222 if (m_entries.empty())
224 // m_entries must be sorted, so if we aren't empty, we grab the first
226 return m_entries.front().GetRangeBase();
229 BaseType GetMaxRangeEnd(BaseType fail_value) const {
230 #ifdef ASSERT_RANGEMAP_ARE_SORTED
233 if (m_entries.empty())
235 // m_entries must be sorted, so if we aren't empty, we grab the last
237 return m_entries.back().GetRangeEnd();
240 void Slide(BaseType slide) {
241 typename Collection::iterator pos, end;
242 for (pos = m_entries.begin(), end = m_entries.end(); pos != end; ++pos)
246 void Clear() { m_entries.clear(); }
248 bool IsEmpty() const { return m_entries.empty(); }
250 size_t GetSize() const { return m_entries.size(); }
252 const Entry *GetEntryAtIndex(size_t i) const {
253 return ((i < m_entries.size()) ? &m_entries[i] : nullptr);
256 // Clients must ensure that "i" is a valid index prior to calling this
258 const Entry &GetEntryRef(size_t i) const { return m_entries[i]; }
260 Entry *Back() { return (m_entries.empty() ? nullptr : &m_entries.back()); }
262 const Entry *Back() const {
263 return (m_entries.empty() ? nullptr : &m_entries.back());
266 static bool BaseLessThan(const Entry &lhs, const Entry &rhs) {
267 return lhs.GetRangeBase() < rhs.GetRangeBase();
270 uint32_t FindEntryIndexThatContains(B addr) const {
271 #ifdef ASSERT_RANGEMAP_ARE_SORTED
274 if (!m_entries.empty()) {
275 Entry entry(addr, 1);
276 typename Collection::const_iterator begin = m_entries.begin();
277 typename Collection::const_iterator end = m_entries.end();
278 typename Collection::const_iterator pos =
279 std::lower_bound(begin, end, entry, BaseLessThan);
281 if (pos != end && pos->Contains(addr)) {
282 return std::distance(begin, pos);
283 } else if (pos != begin) {
285 if (pos->Contains(addr))
286 return std::distance(begin, pos);
292 const Entry *FindEntryThatContains(B addr) const {
293 #ifdef ASSERT_RANGEMAP_ARE_SORTED
296 if (!m_entries.empty()) {
297 Entry entry(addr, 1);
298 typename Collection::const_iterator begin = m_entries.begin();
299 typename Collection::const_iterator end = m_entries.end();
300 typename Collection::const_iterator pos =
301 std::lower_bound(begin, end, entry, BaseLessThan);
303 if (pos != end && pos->Contains(addr)) {
305 } else if (pos != begin) {
307 if (pos->Contains(addr)) {
315 const Entry *FindEntryThatContains(const Entry &range) const {
316 #ifdef ASSERT_RANGEMAP_ARE_SORTED
319 if (!m_entries.empty()) {
320 typename Collection::const_iterator begin = m_entries.begin();
321 typename Collection::const_iterator end = m_entries.end();
322 typename Collection::const_iterator pos =
323 std::lower_bound(begin, end, range, BaseLessThan);
325 if (pos != end && pos->Contains(range)) {
327 } else if (pos != begin) {
329 if (pos->Contains(range)) {
338 Collection m_entries;
341 template <typename B, typename S> class RangeVector {
345 typedef Range<B, S> Entry;
346 typedef std::vector<Entry> Collection;
348 RangeVector() = default;
350 ~RangeVector() = default;
352 void Append(const Entry &entry) { m_entries.push_back(entry); }
354 void Append(B base, S size) { m_entries.emplace_back(base, size); }
356 // Insert an item into a sorted list and optionally combine it with any
357 // adjacent blocks if requested.
358 void Insert(const Entry &entry, bool combine) {
359 if (m_entries.empty()) {
360 m_entries.push_back(entry);
363 auto begin = m_entries.begin();
364 auto end = m_entries.end();
365 auto pos = std::lower_bound(begin, end, entry);
367 if (pos != end && pos->Union(entry)) {
368 CombinePrevAndNext(pos);
373 if (prev->Union(entry)) {
374 CombinePrevAndNext(prev);
379 m_entries.insert(pos, entry);
382 bool RemoveEntryAtIndex(uint32_t idx) {
383 if (idx < m_entries.size()) {
384 m_entries.erase(m_entries.begin() + idx);
391 if (m_entries.size() > 1)
392 std::stable_sort(m_entries.begin(), m_entries.end());
395 #ifdef ASSERT_RANGEMAP_ARE_SORTED
396 bool IsSorted() const {
397 typename Collection::const_iterator pos, end, prev;
398 // First we determine if we can combine any of the Entry objects so we
399 // don't end up allocating and making a new collection for no reason
400 for (pos = m_entries.begin(), end = m_entries.end(), prev = end; pos != end;
402 if (prev != end && *pos < *prev)
409 void CombineConsecutiveRanges() {
410 #ifdef ASSERT_RANGEMAP_ARE_SORTED
413 // Can't combine if ranges if we have zero or one range
414 if (m_entries.size() > 1) {
415 // The list should be sorted prior to calling this function
416 typename Collection::iterator pos;
417 typename Collection::iterator end;
418 typename Collection::iterator prev;
419 bool can_combine = false;
420 // First we determine if we can combine any of the Entry objects so we
421 // don't end up allocating and making a new collection for no reason
422 for (pos = m_entries.begin(), end = m_entries.end(), prev = end;
423 pos != end; prev = pos++) {
424 if (prev != end && prev->DoesAdjoinOrIntersect(*pos)) {
430 // We we can combine at least one entry, then we make a new collection
431 // and populate it accordingly, and then swap it into place.
433 Collection minimal_ranges;
434 for (pos = m_entries.begin(), end = m_entries.end(), prev = end;
435 pos != end; prev = pos++) {
436 if (prev != end && prev->DoesAdjoinOrIntersect(*pos))
437 minimal_ranges.back().SetRangeEnd(
438 std::max<BaseType>(prev->GetRangeEnd(), pos->GetRangeEnd()));
440 minimal_ranges.push_back(*pos);
442 // Use the swap technique in case our new vector is much smaller. We
443 // must swap when using the STL because std::vector objects never
444 // release or reduce the memory once it has been allocated/reserved.
445 m_entries.swap(minimal_ranges);
450 BaseType GetMinRangeBase(BaseType fail_value) const {
451 #ifdef ASSERT_RANGEMAP_ARE_SORTED
454 if (m_entries.empty())
456 // m_entries must be sorted, so if we aren't empty, we grab the first
458 return m_entries.front().GetRangeBase();
461 BaseType GetMaxRangeEnd(BaseType fail_value) const {
462 #ifdef ASSERT_RANGEMAP_ARE_SORTED
465 if (m_entries.empty())
467 // m_entries must be sorted, so if we aren't empty, we grab the last
469 return m_entries.back().GetRangeEnd();
472 void Slide(BaseType slide) {
473 typename Collection::iterator pos, end;
474 for (pos = m_entries.begin(), end = m_entries.end(); pos != end; ++pos)
478 void Clear() { m_entries.clear(); }
480 void Reserve(typename Collection::size_type size) { m_entries.reserve(size); }
482 bool IsEmpty() const { return m_entries.empty(); }
484 size_t GetSize() const { return m_entries.size(); }
486 const Entry *GetEntryAtIndex(size_t i) const {
487 return ((i < m_entries.size()) ? &m_entries[i] : nullptr);
490 // Clients must ensure that "i" is a valid index prior to calling this
492 Entry &GetEntryRef(size_t i) { return m_entries[i]; }
493 const Entry &GetEntryRef(size_t i) const { return m_entries[i]; }
495 Entry *Back() { return (m_entries.empty() ? nullptr : &m_entries.back()); }
497 const Entry *Back() const {
498 return (m_entries.empty() ? nullptr : &m_entries.back());
501 static bool BaseLessThan(const Entry &lhs, const Entry &rhs) {
502 return lhs.GetRangeBase() < rhs.GetRangeBase();
505 uint32_t FindEntryIndexThatContains(B addr) const {
506 #ifdef ASSERT_RANGEMAP_ARE_SORTED
509 if (!m_entries.empty()) {
510 Entry entry(addr, 1);
511 typename Collection::const_iterator begin = m_entries.begin();
512 typename Collection::const_iterator end = m_entries.end();
513 typename Collection::const_iterator pos =
514 std::lower_bound(begin, end, entry, BaseLessThan);
516 if (pos != end && pos->Contains(addr)) {
517 return std::distance(begin, pos);
518 } else if (pos != begin) {
520 if (pos->Contains(addr))
521 return std::distance(begin, pos);
527 const Entry *FindEntryThatContains(B addr) const {
528 #ifdef ASSERT_RANGEMAP_ARE_SORTED
531 if (!m_entries.empty()) {
532 Entry entry(addr, 1);
533 typename Collection::const_iterator begin = m_entries.begin();
534 typename Collection::const_iterator end = m_entries.end();
535 typename Collection::const_iterator pos =
536 std::lower_bound(begin, end, entry, BaseLessThan);
538 if (pos != end && pos->Contains(addr)) {
540 } else if (pos != begin) {
542 if (pos->Contains(addr)) {
550 const Entry *FindEntryThatContains(const Entry &range) const {
551 #ifdef ASSERT_RANGEMAP_ARE_SORTED
554 if (!m_entries.empty()) {
555 typename Collection::const_iterator begin = m_entries.begin();
556 typename Collection::const_iterator end = m_entries.end();
557 typename Collection::const_iterator pos =
558 std::lower_bound(begin, end, range, BaseLessThan);
560 if (pos != end && pos->Contains(range)) {
562 } else if (pos != begin) {
564 if (pos->Contains(range)) {
574 void CombinePrevAndNext(typename Collection::iterator pos) {
575 // Check if the prev or next entries in case they need to be unioned with
576 // the entry pointed to by "pos".
577 if (pos != m_entries.begin()) {
579 if (prev->Union(*pos))
580 m_entries.erase(pos);
584 auto end = m_entries.end();
588 if (pos->Union(*next))
589 m_entries.erase(next);
595 Collection m_entries;
598 //----------------------------------------------------------------------
599 // A simple range with data class where you get to define the type of
600 // the range base "B", the type used for the range byte size "S", and the type
601 // for the associated data "T".
602 //----------------------------------------------------------------------
603 template <typename B, typename S, typename T>
604 struct RangeData : public Range<B, S> {
609 RangeData() : Range<B, S>(), data() {}
611 RangeData(B base, S size) : Range<B, S>(base, size), data() {}
613 RangeData(B base, S size, DataType d) : Range<B, S>(base, size), data(d) {}
615 bool operator<(const RangeData &rhs) const {
616 if (this->base == rhs.base) {
617 if (this->size == rhs.size)
618 return this->data < rhs.data;
620 return this->size < rhs.size;
622 return this->base < rhs.base;
625 bool operator==(const RangeData &rhs) const {
626 return this->GetRangeBase() == rhs.GetRangeBase() &&
627 this->GetByteSize() == rhs.GetByteSize() && this->data == rhs.data;
630 bool operator!=(const RangeData &rhs) const {
631 return this->GetRangeBase() != rhs.GetRangeBase() ||
632 this->GetByteSize() != rhs.GetByteSize() || this->data != rhs.data;
636 template <typename B, typename S, typename T, unsigned N = 0>
637 class RangeDataVector {
639 typedef RangeData<B, S, T> Entry;
640 typedef llvm::SmallVector<Entry, N> Collection;
642 RangeDataVector() = default;
644 ~RangeDataVector() = default;
646 void Append(const Entry &entry) { m_entries.push_back(entry); }
649 if (m_entries.size() > 1)
650 std::stable_sort(m_entries.begin(), m_entries.end());
653 #ifdef ASSERT_RANGEMAP_ARE_SORTED
654 bool IsSorted() const {
655 typename Collection::const_iterator pos, end, prev;
656 // First we determine if we can combine any of the Entry objects so we
657 // don't end up allocating and making a new collection for no reason
658 for (pos = m_entries.begin(), end = m_entries.end(), prev = end; pos != end;
660 if (prev != end && *pos < *prev)
667 void CombineConsecutiveEntriesWithEqualData() {
668 #ifdef ASSERT_RANGEMAP_ARE_SORTED
671 typename Collection::iterator pos;
672 typename Collection::iterator end;
673 typename Collection::iterator prev;
674 bool can_combine = false;
675 // First we determine if we can combine any of the Entry objects so we
676 // don't end up allocating and making a new collection for no reason
677 for (pos = m_entries.begin(), end = m_entries.end(), prev = end; pos != end;
679 if (prev != end && prev->data == pos->data) {
685 // We we can combine at least one entry, then we make a new collection and
686 // populate it accordingly, and then swap it into place.
688 Collection minimal_ranges;
689 for (pos = m_entries.begin(), end = m_entries.end(), prev = end;
690 pos != end; prev = pos++) {
691 if (prev != end && prev->data == pos->data)
692 minimal_ranges.back().SetRangeEnd(pos->GetRangeEnd());
694 minimal_ranges.push_back(*pos);
696 // Use the swap technique in case our new vector is much smaller. We must
697 // swap when using the STL because std::vector objects never release or
698 // reduce the memory once it has been allocated/reserved.
699 m_entries.swap(minimal_ranges);
703 void Clear() { m_entries.clear(); }
705 bool IsEmpty() const { return m_entries.empty(); }
707 size_t GetSize() const { return m_entries.size(); }
709 const Entry *GetEntryAtIndex(size_t i) const {
710 return ((i < m_entries.size()) ? &m_entries[i] : nullptr);
713 Entry *GetMutableEntryAtIndex(size_t i) {
714 return ((i < m_entries.size()) ? &m_entries[i] : nullptr);
717 // Clients must ensure that "i" is a valid index prior to calling this
719 const Entry &GetEntryRef(size_t i) const { return m_entries[i]; }
721 static bool BaseLessThan(const Entry &lhs, const Entry &rhs) {
722 return lhs.GetRangeBase() < rhs.GetRangeBase();
725 uint32_t FindEntryIndexThatContains(B addr) const {
726 const Entry *entry = FindEntryThatContains(addr);
728 return std::distance(m_entries.begin(), entry);
732 uint32_t FindEntryIndexesThatContain(B addr,
733 std::vector<uint32_t> &indexes) const {
734 #ifdef ASSERT_RANGEMAP_ARE_SORTED
738 if (!m_entries.empty()) {
739 for (const auto &entry : m_entries) {
740 if (entry.Contains(addr))
741 indexes.push_back(entry.data);
744 return indexes.size();
747 Entry *FindEntryThatContains(B addr) {
748 return const_cast<Entry *>(
749 static_cast<const RangeDataVector *>(this)->FindEntryThatContains(
753 const Entry *FindEntryThatContains(B addr) const {
754 return FindEntryThatContains(Entry(addr, 1));
757 const Entry *FindEntryThatContains(const Entry &range) const {
758 #ifdef ASSERT_RANGEMAP_ARE_SORTED
761 if (!m_entries.empty()) {
762 typename Collection::const_iterator begin = m_entries.begin();
763 typename Collection::const_iterator end = m_entries.end();
764 typename Collection::const_iterator pos =
765 std::lower_bound(begin, end, range, BaseLessThan);
767 while (pos != begin && pos[-1].Contains(range))
770 if (pos != end && pos->Contains(range))
776 const Entry *FindEntryStartsAt(B addr) const {
777 #ifdef ASSERT_RANGEMAP_ARE_SORTED
780 if (!m_entries.empty()) {
781 auto begin = m_entries.begin(), end = m_entries.end();
782 auto pos = std::lower_bound(begin, end, Entry(addr, 1), BaseLessThan);
783 if (pos != end && pos->base == addr)
789 // This method will return the entry that contains the given address, or the
790 // entry following that address. If you give it an address of 0 and the
791 // first entry starts at address 0x100, you will get the entry at 0x100.
793 // For most uses, FindEntryThatContains is the correct one to use, this is a
794 // less commonly needed behavior. It was added for core file memory regions,
795 // where we want to present a gap in the memory regions as a distinct region,
796 // so we need to know the start address of the next memory section that
798 const Entry *FindEntryThatContainsOrFollows(B addr) const {
799 #ifdef ASSERT_RANGEMAP_ARE_SORTED
802 if (!m_entries.empty()) {
803 typename Collection::const_iterator begin = m_entries.begin();
804 typename Collection::const_iterator end = m_entries.end();
805 typename Collection::const_iterator pos =
806 std::lower_bound(m_entries.begin(), end, addr,
807 [](const Entry &lhs, B rhs_base) -> bool {
808 return lhs.GetRangeEnd() <= rhs_base;
811 while (pos != begin && pos[-1].Contains(addr))
820 Entry *Back() { return (m_entries.empty() ? nullptr : &m_entries.back()); }
822 const Entry *Back() const {
823 return (m_entries.empty() ? nullptr : &m_entries.back());
827 Collection m_entries;
830 //----------------------------------------------------------------------
831 // A simple range with data class where you get to define the type of
832 // the range base "B", the type used for the range byte size "S", and the type
833 // for the associated data "T".
834 //----------------------------------------------------------------------
835 template <typename B, typename T> struct AddressData {
842 AddressData() : addr(), data() {}
844 AddressData(B a, DataType d) : addr(a), data(d) {}
846 bool operator<(const AddressData &rhs) const {
847 if (this->addr == rhs.addr)
848 return this->data < rhs.data;
849 return this->addr < rhs.addr;
852 bool operator==(const AddressData &rhs) const {
853 return this->addr == rhs.addr && this->data == rhs.data;
856 bool operator!=(const AddressData &rhs) const {
857 return this->addr != rhs.addr || this->data == rhs.data;
861 template <typename B, typename T, unsigned N> class AddressDataArray {
863 typedef AddressData<B, T> Entry;
864 typedef llvm::SmallVector<Entry, N> Collection;
866 AddressDataArray() = default;
868 ~AddressDataArray() = default;
870 void Append(const Entry &entry) { m_entries.push_back(entry); }
873 if (m_entries.size() > 1)
874 std::stable_sort(m_entries.begin(), m_entries.end());
877 #ifdef ASSERT_RANGEMAP_ARE_SORTED
878 bool IsSorted() const {
879 typename Collection::const_iterator pos, end, prev;
880 // First we determine if we can combine any of the Entry objects so we
881 // don't end up allocating and making a new collection for no reason
882 for (pos = m_entries.begin(), end = m_entries.end(), prev = end; pos != end;
884 if (prev != end && *pos < *prev)
891 void Clear() { m_entries.clear(); }
893 bool IsEmpty() const { return m_entries.empty(); }
895 size_t GetSize() const { return m_entries.size(); }
897 const Entry *GetEntryAtIndex(size_t i) const {
898 return ((i < m_entries.size()) ? &m_entries[i] : nullptr);
901 // Clients must ensure that "i" is a valid index prior to calling this
903 const Entry &GetEntryRef(size_t i) const { return m_entries[i]; }
905 static bool BaseLessThan(const Entry &lhs, const Entry &rhs) {
906 return lhs.addr < rhs.addr;
909 Entry *FindEntry(B addr, bool exact_match_only) {
910 #ifdef ASSERT_RANGEMAP_ARE_SORTED
913 if (!m_entries.empty()) {
916 typename Collection::iterator begin = m_entries.begin();
917 typename Collection::iterator end = m_entries.end();
918 typename Collection::iterator pos =
919 std::lower_bound(begin, end, entry, BaseLessThan);
921 while (pos != begin && pos[-1].addr == addr)
925 if (pos->addr == addr || !exact_match_only)
932 const Entry *FindNextEntry(const Entry *entry) {
933 if (entry >= &*m_entries.begin() && entry + 1 < &*m_entries.end())
938 Entry *Back() { return (m_entries.empty() ? nullptr : &m_entries.back()); }
940 const Entry *Back() const {
941 return (m_entries.empty() ? nullptr : &m_entries.back());
945 Collection m_entries;
948 } // namespace lldb_private
950 #endif // liblldb_RangeMap_h_