1 //===-- RangeMap.h ----------------------------------------------*- C++ -*-===//
3 // The LLVM Compiler Infrastructure
5 // This file is distributed under the University of Illinois Open Source
6 // License. See LICENSE.TXT for details.
8 //===----------------------------------------------------------------------===//
10 #ifndef liblldb_RangeMap_h_
11 #define liblldb_RangeMap_h_
18 // Other libraries and framework includes
19 #include "llvm/ADT/SmallVector.h"
22 #include "lldb/lldb-private.h"
24 // Uncomment to make sure all Range objects are sorted when needed
25 //#define ASSERT_RANGEMAP_ARE_SORTED
27 namespace lldb_private {
29 //----------------------------------------------------------------------
30 // Templatized classes for dealing with generic ranges and also
31 // collections of ranges, or collections of ranges that have associated
33 //----------------------------------------------------------------------
35 //----------------------------------------------------------------------
36 // A simple range class where you get to define the type of the range
37 // base "B", and the type used for the range byte size "S".
38 //----------------------------------------------------------------------
39 template <typename B, typename S> struct Range {
46 Range() : base(0), size(0) {}
48 Range(BaseType b, SizeType s) : base(b), size(s) {}
50 void Clear(BaseType b = 0) {
55 // Set the start value for the range, and keep the same size
56 BaseType GetRangeBase() const { return base; }
58 void SetRangeBase(BaseType b) { base = b; }
60 void Slide(BaseType slide) { base += slide; }
62 bool Union(const Range &rhs)
64 if (DoesAdjoinOrIntersect(rhs))
66 auto new_end = std::max<BaseType>(GetRangeEnd(), rhs.GetRangeEnd());
67 base = std::min<BaseType>(base, rhs.base);
68 size = new_end - base;
74 BaseType GetRangeEnd() const { return base + size; }
76 void SetRangeEnd(BaseType end) {
83 SizeType GetByteSize() const { return size; }
85 void SetByteSize(SizeType s) { size = s; }
87 bool IsValid() const { return size > 0; }
89 bool Contains(BaseType r) const {
90 return (GetRangeBase() <= r) && (r < GetRangeEnd());
93 bool ContainsEndInclusive(BaseType r) const {
94 return (GetRangeBase() <= r) && (r <= GetRangeEnd());
97 bool Contains(const Range &range) const {
98 return Contains(range.GetRangeBase()) &&
99 ContainsEndInclusive(range.GetRangeEnd());
102 // Returns true if the two ranges adjoing or intersect
103 bool DoesAdjoinOrIntersect(const Range &rhs) const {
104 const BaseType lhs_base = this->GetRangeBase();
105 const BaseType rhs_base = rhs.GetRangeBase();
106 const BaseType lhs_end = this->GetRangeEnd();
107 const BaseType rhs_end = rhs.GetRangeEnd();
108 bool result = (lhs_base <= rhs_end) && (lhs_end >= rhs_base);
112 // Returns true if the two ranges intersect
113 bool DoesIntersect(const Range &rhs) const {
114 const BaseType lhs_base = this->GetRangeBase();
115 const BaseType rhs_base = rhs.GetRangeBase();
116 const BaseType lhs_end = this->GetRangeEnd();
117 const BaseType rhs_end = rhs.GetRangeEnd();
118 bool result = (lhs_base < rhs_end) && (lhs_end > rhs_base);
122 bool operator<(const Range &rhs) const {
123 if (base == rhs.base)
124 return size < rhs.size;
125 return base < rhs.base;
128 bool operator==(const Range &rhs) const {
129 return base == rhs.base && size == rhs.size;
132 bool operator!=(const Range &rhs) const {
133 return base != rhs.base || size != rhs.size;
137 //----------------------------------------------------------------------
138 // A range array class where you get to define the type of the ranges
139 // that the collection contains.
140 //----------------------------------------------------------------------
142 template <typename B, typename S, unsigned N> class RangeArray {
146 typedef Range<B, S> Entry;
147 typedef llvm::SmallVector<Entry, N> Collection;
149 RangeArray() = default;
151 ~RangeArray() = default;
153 void Append(const Entry &entry) { m_entries.push_back(entry); }
155 void Append(B base, S size) { m_entries.emplace_back(base, size); }
157 bool RemoveEntrtAtIndex(uint32_t idx) {
158 if (idx < m_entries.size()) {
159 m_entries.erase(m_entries.begin() + idx);
166 if (m_entries.size() > 1)
167 std::stable_sort(m_entries.begin(), m_entries.end());
170 #ifdef ASSERT_RANGEMAP_ARE_SORTED
171 bool IsSorted() const {
172 typename Collection::const_iterator pos, end, prev;
173 // First we determine if we can combine any of the Entry objects so we
174 // don't end up allocating and making a new collection for no reason
175 for (pos = m_entries.begin(), end = m_entries.end(), prev = end; pos != end;
177 if (prev != end && *pos < *prev)
184 void CombineConsecutiveRanges() {
185 #ifdef ASSERT_RANGEMAP_ARE_SORTED
188 // Can't combine if ranges if we have zero or one range
189 if (m_entries.size() > 1) {
190 // The list should be sorted prior to calling this function
191 typename Collection::iterator pos;
192 typename Collection::iterator end;
193 typename Collection::iterator prev;
194 bool can_combine = false;
195 // First we determine if we can combine any of the Entry objects so we
196 // don't end up allocating and making a new collection for no reason
197 for (pos = m_entries.begin(), end = m_entries.end(), prev = end;
198 pos != end; prev = pos++) {
199 if (prev != end && prev->DoesAdjoinOrIntersect(*pos)) {
205 // We we can combine at least one entry, then we make a new collection
206 // and populate it accordingly, and then swap it into place.
208 Collection minimal_ranges;
209 for (pos = m_entries.begin(), end = m_entries.end(), prev = end;
210 pos != end; prev = pos++) {
211 if (prev != end && prev->DoesAdjoinOrIntersect(*pos))
212 minimal_ranges.back().SetRangeEnd(
213 std::max<BaseType>(prev->GetRangeEnd(), pos->GetRangeEnd()));
215 minimal_ranges.push_back(*pos);
217 // Use the swap technique in case our new vector is much smaller.
218 // We must swap when using the STL because std::vector objects never
219 // release or reduce the memory once it has been allocated/reserved.
220 m_entries.swap(minimal_ranges);
225 BaseType GetMinRangeBase(BaseType fail_value) const {
226 #ifdef ASSERT_RANGEMAP_ARE_SORTED
229 if (m_entries.empty())
231 // m_entries must be sorted, so if we aren't empty, we grab the
232 // first range's base
233 return m_entries.front().GetRangeBase();
236 BaseType GetMaxRangeEnd(BaseType fail_value) const {
237 #ifdef ASSERT_RANGEMAP_ARE_SORTED
240 if (m_entries.empty())
242 // m_entries must be sorted, so if we aren't empty, we grab the
244 return m_entries.back().GetRangeEnd();
247 void Slide(BaseType slide) {
248 typename Collection::iterator pos, end;
249 for (pos = m_entries.begin(), end = m_entries.end(); pos != end; ++pos)
253 void Clear() { m_entries.clear(); }
255 bool IsEmpty() const { return m_entries.empty(); }
257 size_t GetSize() const { return m_entries.size(); }
259 const Entry *GetEntryAtIndex(size_t i) const {
260 return ((i < m_entries.size()) ? &m_entries[i] : nullptr);
263 // Clients must ensure that "i" is a valid index prior to calling this
265 const Entry &GetEntryRef(size_t i) const { return m_entries[i]; }
267 Entry *Back() { return (m_entries.empty() ? nullptr : &m_entries.back()); }
269 const Entry *Back() const {
270 return (m_entries.empty() ? nullptr : &m_entries.back());
273 static bool BaseLessThan(const Entry &lhs, const Entry &rhs) {
274 return lhs.GetRangeBase() < rhs.GetRangeBase();
277 uint32_t FindEntryIndexThatContains(B addr) const {
278 #ifdef ASSERT_RANGEMAP_ARE_SORTED
281 if (!m_entries.empty()) {
282 Entry entry(addr, 1);
283 typename Collection::const_iterator begin = m_entries.begin();
284 typename Collection::const_iterator end = m_entries.end();
285 typename Collection::const_iterator pos =
286 std::lower_bound(begin, end, entry, BaseLessThan);
288 if (pos != end && pos->Contains(addr)) {
289 return std::distance(begin, pos);
290 } else if (pos != begin) {
292 if (pos->Contains(addr))
293 return std::distance(begin, pos);
299 const Entry *FindEntryThatContains(B addr) const {
300 #ifdef ASSERT_RANGEMAP_ARE_SORTED
303 if (!m_entries.empty()) {
304 Entry entry(addr, 1);
305 typename Collection::const_iterator begin = m_entries.begin();
306 typename Collection::const_iterator end = m_entries.end();
307 typename Collection::const_iterator pos =
308 std::lower_bound(begin, end, entry, BaseLessThan);
310 if (pos != end && pos->Contains(addr)) {
312 } else if (pos != begin) {
314 if (pos->Contains(addr)) {
322 const Entry *FindEntryThatContains(const Entry &range) const {
323 #ifdef ASSERT_RANGEMAP_ARE_SORTED
326 if (!m_entries.empty()) {
327 typename Collection::const_iterator begin = m_entries.begin();
328 typename Collection::const_iterator end = m_entries.end();
329 typename Collection::const_iterator pos =
330 std::lower_bound(begin, end, range, BaseLessThan);
332 if (pos != end && pos->Contains(range)) {
334 } else if (pos != begin) {
336 if (pos->Contains(range)) {
345 Collection m_entries;
348 template <typename B, typename S> class RangeVector {
352 typedef Range<B, S> Entry;
353 typedef std::vector<Entry> Collection;
355 RangeVector() = default;
357 ~RangeVector() = default;
359 void Append(const Entry &entry) { m_entries.push_back(entry); }
361 void Append(B base, S size) { m_entries.emplace_back(base, size); }
363 // Insert an item into a sorted list and optionally combine it with any
364 // adjacent blocks if requested.
365 void Insert(const Entry &entry, bool combine) {
366 if (m_entries.empty()) {
367 m_entries.push_back(entry);
370 auto begin = m_entries.begin();
371 auto end = m_entries.end();
372 auto pos = std::lower_bound(begin, end, entry);
374 if (pos != end && pos->Union(entry)) {
375 CombinePrevAndNext(pos);
380 if (prev->Union(entry)) {
381 CombinePrevAndNext(prev);
386 m_entries.insert(pos, entry);
389 bool RemoveEntryAtIndex(uint32_t idx) {
390 if (idx < m_entries.size()) {
391 m_entries.erase(m_entries.begin() + idx);
398 if (m_entries.size() > 1)
399 std::stable_sort(m_entries.begin(), m_entries.end());
402 #ifdef ASSERT_RANGEMAP_ARE_SORTED
403 bool IsSorted() const {
404 typename Collection::const_iterator pos, end, prev;
405 // First we determine if we can combine any of the Entry objects so we
406 // don't end up allocating and making a new collection for no reason
407 for (pos = m_entries.begin(), end = m_entries.end(), prev = end; pos != end;
409 if (prev != end && *pos < *prev)
416 void CombineConsecutiveRanges() {
417 #ifdef ASSERT_RANGEMAP_ARE_SORTED
420 // Can't combine if ranges if we have zero or one range
421 if (m_entries.size() > 1) {
422 // The list should be sorted prior to calling this function
423 typename Collection::iterator pos;
424 typename Collection::iterator end;
425 typename Collection::iterator prev;
426 bool can_combine = false;
427 // First we determine if we can combine any of the Entry objects so we
428 // don't end up allocating and making a new collection for no reason
429 for (pos = m_entries.begin(), end = m_entries.end(), prev = end;
430 pos != end; prev = pos++) {
431 if (prev != end && prev->DoesAdjoinOrIntersect(*pos)) {
437 // We we can combine at least one entry, then we make a new collection
438 // and populate it accordingly, and then swap it into place.
440 Collection minimal_ranges;
441 for (pos = m_entries.begin(), end = m_entries.end(), prev = end;
442 pos != end; prev = pos++) {
443 if (prev != end && prev->DoesAdjoinOrIntersect(*pos))
444 minimal_ranges.back().SetRangeEnd(
445 std::max<BaseType>(prev->GetRangeEnd(), pos->GetRangeEnd()));
447 minimal_ranges.push_back(*pos);
449 // Use the swap technique in case our new vector is much smaller.
450 // We must swap when using the STL because std::vector objects never
451 // release or reduce the memory once it has been allocated/reserved.
452 m_entries.swap(minimal_ranges);
457 BaseType GetMinRangeBase(BaseType fail_value) const {
458 #ifdef ASSERT_RANGEMAP_ARE_SORTED
461 if (m_entries.empty())
463 // m_entries must be sorted, so if we aren't empty, we grab the
464 // first range's base
465 return m_entries.front().GetRangeBase();
468 BaseType GetMaxRangeEnd(BaseType fail_value) const {
469 #ifdef ASSERT_RANGEMAP_ARE_SORTED
472 if (m_entries.empty())
474 // m_entries must be sorted, so if we aren't empty, we grab the
476 return m_entries.back().GetRangeEnd();
479 void Slide(BaseType slide) {
480 typename Collection::iterator pos, end;
481 for (pos = m_entries.begin(), end = m_entries.end(); pos != end; ++pos)
485 void Clear() { m_entries.clear(); }
487 void Reserve(typename Collection::size_type size) { m_entries.reserve(size); }
489 bool IsEmpty() const { return m_entries.empty(); }
491 size_t GetSize() const { return m_entries.size(); }
493 const Entry *GetEntryAtIndex(size_t i) const {
494 return ((i < m_entries.size()) ? &m_entries[i] : nullptr);
497 // Clients must ensure that "i" is a valid index prior to calling this
499 Entry &GetEntryRef(size_t i) { return m_entries[i]; }
500 const Entry &GetEntryRef(size_t i) const { return m_entries[i]; }
502 Entry *Back() { return (m_entries.empty() ? nullptr : &m_entries.back()); }
504 const Entry *Back() const {
505 return (m_entries.empty() ? nullptr : &m_entries.back());
508 static bool BaseLessThan(const Entry &lhs, const Entry &rhs) {
509 return lhs.GetRangeBase() < rhs.GetRangeBase();
512 uint32_t FindEntryIndexThatContains(B addr) const {
513 #ifdef ASSERT_RANGEMAP_ARE_SORTED
516 if (!m_entries.empty()) {
517 Entry entry(addr, 1);
518 typename Collection::const_iterator begin = m_entries.begin();
519 typename Collection::const_iterator end = m_entries.end();
520 typename Collection::const_iterator pos =
521 std::lower_bound(begin, end, entry, BaseLessThan);
523 if (pos != end && pos->Contains(addr)) {
524 return std::distance(begin, pos);
525 } else if (pos != begin) {
527 if (pos->Contains(addr))
528 return std::distance(begin, pos);
534 const Entry *FindEntryThatContains(B addr) const {
535 #ifdef ASSERT_RANGEMAP_ARE_SORTED
538 if (!m_entries.empty()) {
539 Entry entry(addr, 1);
540 typename Collection::const_iterator begin = m_entries.begin();
541 typename Collection::const_iterator end = m_entries.end();
542 typename Collection::const_iterator pos =
543 std::lower_bound(begin, end, entry, BaseLessThan);
545 if (pos != end && pos->Contains(addr)) {
547 } else if (pos != begin) {
549 if (pos->Contains(addr)) {
557 const Entry *FindEntryThatContains(const Entry &range) const {
558 #ifdef ASSERT_RANGEMAP_ARE_SORTED
561 if (!m_entries.empty()) {
562 typename Collection::const_iterator begin = m_entries.begin();
563 typename Collection::const_iterator end = m_entries.end();
564 typename Collection::const_iterator pos =
565 std::lower_bound(begin, end, range, BaseLessThan);
567 if (pos != end && pos->Contains(range)) {
569 } else if (pos != begin) {
571 if (pos->Contains(range)) {
581 void CombinePrevAndNext(typename Collection::iterator pos) {
582 // Check if the prev or next entries in case they need to be unioned with
583 // the entry pointed to by "pos".
584 if (pos != m_entries.begin()) {
586 if (prev->Union(*pos))
587 m_entries.erase(pos);
591 auto end = m_entries.end();
595 if (pos->Union(*next))
596 m_entries.erase(next);
602 Collection m_entries;
605 //----------------------------------------------------------------------
606 // A simple range with data class where you get to define the type of
607 // the range base "B", the type used for the range byte size "S", and
608 // the type for the associated data "T".
609 //----------------------------------------------------------------------
610 template <typename B, typename S, typename T>
611 struct RangeData : public Range<B, S> {
616 RangeData() : Range<B, S>(), data() {}
618 RangeData(B base, S size) : Range<B, S>(base, size), data() {}
620 RangeData(B base, S size, DataType d) : Range<B, S>(base, size), data(d) {}
622 bool operator<(const RangeData &rhs) const {
623 if (this->base == rhs.base) {
624 if (this->size == rhs.size)
625 return this->data < rhs.data;
627 return this->size < rhs.size;
629 return this->base < rhs.base;
632 bool operator==(const RangeData &rhs) const {
633 return this->GetRangeBase() == rhs.GetRangeBase() &&
634 this->GetByteSize() == rhs.GetByteSize() && this->data == rhs.data;
637 bool operator!=(const RangeData &rhs) const {
638 return this->GetRangeBase() != rhs.GetRangeBase() ||
639 this->GetByteSize() != rhs.GetByteSize() || this->data != rhs.data;
643 template <typename B, typename S, typename T, unsigned N> class RangeDataArray {
645 typedef RangeData<B, S, T> Entry;
646 typedef llvm::SmallVector<Entry, N> Collection;
648 RangeDataArray() = default;
650 ~RangeDataArray() = default;
652 void Append(const Entry &entry) { m_entries.push_back(entry); }
655 if (m_entries.size() > 1)
656 std::stable_sort(m_entries.begin(), m_entries.end());
659 #ifdef ASSERT_RANGEMAP_ARE_SORTED
660 bool IsSorted() const {
661 typename Collection::const_iterator pos, end, prev;
662 // First we determine if we can combine any of the Entry objects so we
663 // don't end up allocating and making a new collection for no reason
664 for (pos = m_entries.begin(), end = m_entries.end(), prev = end; pos != end;
666 if (prev != end && *pos < *prev)
673 void CombineConsecutiveEntriesWithEqualData() {
674 #ifdef ASSERT_RANGEMAP_ARE_SORTED
677 typename Collection::iterator pos;
678 typename Collection::iterator end;
679 typename Collection::iterator prev;
680 bool can_combine = false;
681 // First we determine if we can combine any of the Entry objects so we
682 // don't end up allocating and making a new collection for no reason
683 for (pos = m_entries.begin(), end = m_entries.end(), prev = end; pos != end;
685 if (prev != end && prev->data == pos->data) {
691 // We we can combine at least one entry, then we make a new collection
692 // and populate it accordingly, and then swap it into place.
694 Collection minimal_ranges;
695 for (pos = m_entries.begin(), end = m_entries.end(), prev = end;
696 pos != end; prev = pos++) {
697 if (prev != end && prev->data == pos->data)
698 minimal_ranges.back().SetRangeEnd(pos->GetRangeEnd());
700 minimal_ranges.push_back(*pos);
702 // Use the swap technique in case our new vector is much smaller.
703 // We must swap when using the STL because std::vector objects never
704 // release or reduce the memory once it has been allocated/reserved.
705 m_entries.swap(minimal_ranges);
709 void Clear() { m_entries.clear(); }
711 bool IsEmpty() const { return m_entries.empty(); }
713 size_t GetSize() const { return m_entries.size(); }
715 const Entry *GetEntryAtIndex(size_t i) const {
716 return ((i < m_entries.size()) ? &m_entries[i] : nullptr);
719 // Clients must ensure that "i" is a valid index prior to calling this
721 const Entry &GetEntryRef(size_t i) const { return m_entries[i]; }
723 static bool BaseLessThan(const Entry &lhs, const Entry &rhs) {
724 return lhs.GetRangeBase() < rhs.GetRangeBase();
727 uint32_t FindEntryIndexThatContains(B addr) const {
728 #ifdef ASSERT_RANGEMAP_ARE_SORTED
731 if (!m_entries.empty()) {
732 Entry entry(addr, 1);
733 typename Collection::const_iterator begin = m_entries.begin();
734 typename Collection::const_iterator end = m_entries.end();
735 typename Collection::const_iterator pos =
736 std::lower_bound(begin, end, entry, BaseLessThan);
738 if (pos != end && pos->Contains(addr)) {
739 return std::distance(begin, pos);
740 } else if (pos != begin) {
742 if (pos->Contains(addr))
743 return std::distance(begin, pos);
749 Entry *FindEntryThatContains(B addr) {
750 #ifdef ASSERT_RANGEMAP_ARE_SORTED
753 if (!m_entries.empty()) {
755 entry.SetRangeBase(addr);
756 entry.SetByteSize(1);
757 typename Collection::iterator begin = m_entries.begin();
758 typename Collection::iterator end = m_entries.end();
759 typename Collection::iterator pos =
760 std::lower_bound(begin, end, entry, BaseLessThan);
762 if (pos != end && pos->Contains(addr)) {
764 } else if (pos != begin) {
766 if (pos->Contains(addr)) {
774 const Entry *FindEntryThatContains(B addr) const {
775 #ifdef ASSERT_RANGEMAP_ARE_SORTED
778 if (!m_entries.empty()) {
780 entry.SetRangeBase(addr);
781 entry.SetByteSize(1);
782 typename Collection::const_iterator begin = m_entries.begin();
783 typename Collection::const_iterator end = m_entries.end();
784 typename Collection::const_iterator pos =
785 std::lower_bound(begin, end, entry, BaseLessThan);
787 if (pos != end && pos->Contains(addr)) {
789 } else if (pos != begin) {
791 if (pos->Contains(addr)) {
799 const Entry *FindEntryThatContains(const Entry &range) const {
800 #ifdef ASSERT_RANGEMAP_ARE_SORTED
803 if (!m_entries.empty()) {
804 typename Collection::const_iterator begin = m_entries.begin();
805 typename Collection::const_iterator end = m_entries.end();
806 typename Collection::const_iterator pos =
807 std::lower_bound(begin, end, range, BaseLessThan);
809 if (pos != end && pos->Contains(range)) {
811 } else if (pos != begin) {
813 if (pos->Contains(range)) {
821 Entry *Back() { return (m_entries.empty() ? nullptr : &m_entries.back()); }
823 const Entry *Back() const {
824 return (m_entries.empty() ? nullptr : &m_entries.back());
828 Collection m_entries;
831 // Same as RangeDataArray, but uses std::vector as to not
832 // require static storage of N items in the class itself
833 template <typename B, typename S, typename T> class RangeDataVector {
835 typedef RangeData<B, S, T> Entry;
836 typedef std::vector<Entry> Collection;
838 RangeDataVector() = default;
840 ~RangeDataVector() = default;
842 void Append(const Entry &entry) { m_entries.push_back(entry); }
845 if (m_entries.size() > 1)
846 std::stable_sort(m_entries.begin(), m_entries.end());
849 #ifdef ASSERT_RANGEMAP_ARE_SORTED
850 bool IsSorted() const {
851 typename Collection::const_iterator pos, end, prev;
852 // First we determine if we can combine any of the Entry objects so we
853 // don't end up allocating and making a new collection for no reason
854 for (pos = m_entries.begin(), end = m_entries.end(), prev = end; pos != end;
856 if (prev != end && *pos < *prev)
863 void CombineConsecutiveEntriesWithEqualData() {
864 #ifdef ASSERT_RANGEMAP_ARE_SORTED
867 typename Collection::iterator pos;
868 typename Collection::iterator end;
869 typename Collection::iterator prev;
870 bool can_combine = false;
871 // First we determine if we can combine any of the Entry objects so we
872 // don't end up allocating and making a new collection for no reason
873 for (pos = m_entries.begin(), end = m_entries.end(), prev = end; pos != end;
875 if (prev != end && prev->data == pos->data) {
881 // We we can combine at least one entry, then we make a new collection
882 // and populate it accordingly, and then swap it into place.
884 Collection minimal_ranges;
885 for (pos = m_entries.begin(), end = m_entries.end(), prev = end;
886 pos != end; prev = pos++) {
887 if (prev != end && prev->data == pos->data)
888 minimal_ranges.back().SetRangeEnd(pos->GetRangeEnd());
890 minimal_ranges.push_back(*pos);
892 // Use the swap technique in case our new vector is much smaller.
893 // We must swap when using the STL because std::vector objects never
894 // release or reduce the memory once it has been allocated/reserved.
895 m_entries.swap(minimal_ranges);
899 // Calculate the byte size of ranges with zero byte sizes by finding
900 // the next entry with a base address > the current base address
901 void CalculateSizesOfZeroByteSizeRanges(S full_size = 0) {
902 #ifdef ASSERT_RANGEMAP_ARE_SORTED
905 typename Collection::iterator pos;
906 typename Collection::iterator end;
907 typename Collection::iterator next;
908 for (pos = m_entries.begin(), end = m_entries.end(); pos != end; ++pos) {
909 if (pos->GetByteSize() == 0) {
910 // Watch out for multiple entries with same address and make sure
911 // we find an entry that is greater than the current base address
912 // before we use that for the size
913 auto curr_base = pos->GetRangeBase();
914 for (next = pos + 1; next != end; ++next) {
915 auto next_base = next->GetRangeBase();
916 if (next_base > curr_base) {
917 pos->SetByteSize(next_base - curr_base);
921 if (next == end && full_size > curr_base)
922 pos->SetByteSize(full_size - curr_base);
927 void Clear() { m_entries.clear(); }
929 void Reserve(typename Collection::size_type size) { m_entries.resize(size); }
931 bool IsEmpty() const { return m_entries.empty(); }
933 size_t GetSize() const { return m_entries.size(); }
935 const Entry *GetEntryAtIndex(size_t i) const {
936 return ((i < m_entries.size()) ? &m_entries[i] : nullptr);
939 Entry *GetMutableEntryAtIndex(size_t i) {
940 return ((i < m_entries.size()) ? &m_entries[i] : nullptr);
943 // Clients must ensure that "i" is a valid index prior to calling this
945 const Entry &GetEntryRef(size_t i) const { return m_entries[i]; }
947 static bool BaseLessThan(const Entry &lhs, const Entry &rhs) {
948 return lhs.GetRangeBase() < rhs.GetRangeBase();
951 uint32_t FindEntryIndexThatContains(B addr) const {
952 #ifdef ASSERT_RANGEMAP_ARE_SORTED
955 if (!m_entries.empty()) {
956 Entry entry(addr, 1);
957 typename Collection::const_iterator begin = m_entries.begin();
958 typename Collection::const_iterator end = m_entries.end();
959 typename Collection::const_iterator pos =
960 std::lower_bound(begin, end, entry, BaseLessThan);
962 while (pos != begin && pos[-1].Contains(addr))
965 if (pos != end && pos->Contains(addr))
966 return std::distance(begin, pos);
971 uint32_t FindEntryIndexesThatContain(B addr,
972 std::vector<uint32_t> &indexes) const {
973 #ifdef ASSERT_RANGEMAP_ARE_SORTED
977 if (!m_entries.empty()) {
978 typename Collection::const_iterator pos;
979 for (const auto &entry : m_entries) {
980 if (entry.Contains(addr))
981 indexes.push_back(entry.data);
984 return indexes.size();
987 Entry *FindEntryThatContains(B addr) {
988 #ifdef ASSERT_RANGEMAP_ARE_SORTED
991 if (!m_entries.empty()) {
993 entry.SetRangeBase(addr);
994 entry.SetByteSize(1);
995 typename Collection::iterator begin = m_entries.begin();
996 typename Collection::iterator end = m_entries.end();
997 typename Collection::iterator pos =
998 std::lower_bound(begin, end, entry, BaseLessThan);
1000 while (pos != begin && pos[-1].Contains(addr))
1003 if (pos != end && pos->Contains(addr))
1009 const Entry *FindEntryThatContains(B addr) const {
1010 #ifdef ASSERT_RANGEMAP_ARE_SORTED
1013 if (!m_entries.empty()) {
1015 entry.SetRangeBase(addr);
1016 entry.SetByteSize(1);
1017 typename Collection::const_iterator begin = m_entries.begin();
1018 typename Collection::const_iterator end = m_entries.end();
1019 typename Collection::const_iterator pos =
1020 std::lower_bound(begin, end, entry, BaseLessThan);
1022 while (pos != begin && pos[-1].Contains(addr))
1025 if (pos != end && pos->Contains(addr))
1031 const Entry *FindEntryThatContains(const Entry &range) const {
1032 #ifdef ASSERT_RANGEMAP_ARE_SORTED
1035 if (!m_entries.empty()) {
1036 typename Collection::const_iterator begin = m_entries.begin();
1037 typename Collection::const_iterator end = m_entries.end();
1038 typename Collection::const_iterator pos =
1039 std::lower_bound(begin, end, range, BaseLessThan);
1041 while (pos != begin && pos[-1].Contains(range))
1044 if (pos != end && pos->Contains(range))
1050 const Entry *FindEntryStartsAt(B addr) const {
1051 #ifdef ASSERT_RANGEMAP_ARE_SORTED
1054 if (!m_entries.empty()) {
1055 auto begin = m_entries.begin(), end = m_entries.end();
1056 auto pos = std::lower_bound(begin, end, Entry(addr, 1), BaseLessThan);
1057 if (pos != end && pos->base == addr)
1063 // This method will return the entry that contains the given address, or the
1064 // entry following that address. If you give it an address of 0 and the first
1065 // entry starts at address 0x100, you will get the entry at 0x100.
1067 // For most uses, FindEntryThatContains is the correct one to use, this is a
1068 // less commonly needed behavior. It was added for core file memory regions,
1069 // where we want to present a gap in the memory regions as a distinct region,
1070 // so we need to know the start address of the next memory section that
1072 const Entry *FindEntryThatContainsOrFollows(B addr) const {
1073 #ifdef ASSERT_RANGEMAP_ARE_SORTED
1076 if (!m_entries.empty()) {
1077 typename Collection::const_iterator begin = m_entries.begin();
1078 typename Collection::const_iterator end = m_entries.end();
1079 typename Collection::const_iterator pos =
1080 std::lower_bound(m_entries.begin(), end, addr,
1081 [](const Entry &lhs, B rhs_base) -> bool {
1082 return lhs.GetRangeEnd() <= rhs_base;
1085 while (pos != begin && pos[-1].Contains(addr))
1094 Entry *Back() { return (m_entries.empty() ? nullptr : &m_entries.back()); }
1096 const Entry *Back() const {
1097 return (m_entries.empty() ? nullptr : &m_entries.back());
1101 Collection m_entries;
1104 //----------------------------------------------------------------------
1105 // A simple range with data class where you get to define the type of
1106 // the range base "B", the type used for the range byte size "S", and
1107 // the type for the associated data "T".
1108 //----------------------------------------------------------------------
1109 template <typename B, typename T> struct AddressData {
1116 AddressData() : addr(), data() {}
1118 AddressData(B a, DataType d) : addr(a), data(d) {}
1120 bool operator<(const AddressData &rhs) const {
1121 if (this->addr == rhs.addr)
1122 return this->data < rhs.data;
1123 return this->addr < rhs.addr;
1126 bool operator==(const AddressData &rhs) const {
1127 return this->addr == rhs.addr && this->data == rhs.data;
1130 bool operator!=(const AddressData &rhs) const {
1131 return this->addr != rhs.addr || this->data == rhs.data;
1135 template <typename B, typename T, unsigned N> class AddressDataArray {
1137 typedef AddressData<B, T> Entry;
1138 typedef llvm::SmallVector<Entry, N> Collection;
1140 AddressDataArray() = default;
1142 ~AddressDataArray() = default;
1144 void Append(const Entry &entry) { m_entries.push_back(entry); }
1147 if (m_entries.size() > 1)
1148 std::stable_sort(m_entries.begin(), m_entries.end());
1151 #ifdef ASSERT_RANGEMAP_ARE_SORTED
1152 bool IsSorted() const {
1153 typename Collection::const_iterator pos, end, prev;
1154 // First we determine if we can combine any of the Entry objects so we
1155 // don't end up allocating and making a new collection for no reason
1156 for (pos = m_entries.begin(), end = m_entries.end(), prev = end; pos != end;
1158 if (prev != end && *pos < *prev)
1165 void Clear() { m_entries.clear(); }
1167 bool IsEmpty() const { return m_entries.empty(); }
1169 size_t GetSize() const { return m_entries.size(); }
1171 const Entry *GetEntryAtIndex(size_t i) const {
1172 return ((i < m_entries.size()) ? &m_entries[i] : nullptr);
1175 // Clients must ensure that "i" is a valid index prior to calling this
1177 const Entry &GetEntryRef(size_t i) const { return m_entries[i]; }
1179 static bool BaseLessThan(const Entry &lhs, const Entry &rhs) {
1180 return lhs.addr < rhs.addr;
1183 Entry *FindEntry(B addr, bool exact_match_only) {
1184 #ifdef ASSERT_RANGEMAP_ARE_SORTED
1187 if (!m_entries.empty()) {
1190 typename Collection::iterator begin = m_entries.begin();
1191 typename Collection::iterator end = m_entries.end();
1192 typename Collection::iterator pos =
1193 std::lower_bound(begin, end, entry, BaseLessThan);
1195 while (pos != begin && pos[-1].addr == addr)
1199 if (pos->addr == addr || !exact_match_only)
1206 const Entry *FindNextEntry(const Entry *entry) {
1207 if (entry >= &*m_entries.begin() && entry + 1 < &*m_entries.end())
1212 Entry *Back() { return (m_entries.empty() ? nullptr : &m_entries.back()); }
1214 const Entry *Back() const {
1215 return (m_entries.empty() ? nullptr : &m_entries.back());
1219 Collection m_entries;
1222 } // namespace lldb_private
1224 #endif // liblldb_RangeMap_h_