1 //===-- xray_segmented_array.h ---------------------------------*- C++ -*-===//
3 // The LLVM Compiler Infrastructure
5 // This file is distributed under the University of Illinois Open Source
6 // License. See LICENSE.TXT for details.
8 //===----------------------------------------------------------------------===//
10 // This file is a part of XRay, a dynamic runtime instrumentation system.
12 // Defines the implementation of a segmented array, with fixed-size segments
13 // backing the segments.
15 //===----------------------------------------------------------------------===//
16 #ifndef XRAY_SEGMENTED_ARRAY_H
17 #define XRAY_SEGMENTED_ARRAY_H
19 #include "sanitizer_common/sanitizer_allocator.h"
20 #include "xray_allocator.h"
21 #include "xray_utils.h"
23 #include <type_traits>
28 /// The Array type provides an interface similar to std::vector<...> but does
29 /// not shrink in size. Once constructed, elements can be appended but cannot be
30 /// removed. The implementation is heavily dependent on the contract provided by
31 /// the Allocator type, in that all memory will be released when the Allocator
32 /// is destroyed. When an Array is destroyed, it will destroy elements in the
33 /// backing store but will not free the memory.
34 template <class T> class Array {
40 // We want each segment of the array to be cache-line aligned, and elements of
41 // the array be offset from the beginning of the segment.
42 struct Segment : SegmentBase {
47 // Each segment of the array will be laid out with the following assumptions:
49 // - Each segment will be on a cache-line address boundary (kCacheLineSize
52 // - The elements will be accessed through an aligned pointer, dependent on
53 // the alignment of T.
55 // - Each element is at least two-pointers worth from the beginning of the
56 // Segment, aligned properly, and the rest of the elements are accessed
57 // through appropriate alignment.
59 // We then compute the size of the segment to follow this logic:
61 // - Compute the number of elements that can fit within
62 // kCacheLineSize-multiple segments, minus the size of two pointers.
64 // - Request cacheline-multiple sized elements from the allocator.
65 static constexpr size_t AlignedElementStorageSize =
66 sizeof(typename std::aligned_storage<sizeof(T), alignof(T)>::type);
68 static constexpr size_t SegmentSize =
69 nearest_boundary(sizeof(Segment) + next_pow2(sizeof(T)), kCacheLineSize);
71 using AllocatorType = Allocator<SegmentSize>;
73 static constexpr size_t ElementsPerSegment =
74 (SegmentSize - sizeof(Segment)) / next_pow2(sizeof(T));
76 static_assert(ElementsPerSegment > 0,
77 "Must have at least 1 element per segment.");
79 static SegmentBase SentinelSegment;
83 SegmentBase *Head = &SentinelSegment;
84 SegmentBase *Tail = &SentinelSegment;
87 // Here we keep track of segments in the freelist, to allow us to re-use
88 // segments when elements are trimmed off the end.
89 SegmentBase *Freelist = &SentinelSegment;
91 Segment *NewSegment() {
92 // We need to handle the case in which enough elements have been trimmed to
93 // allow us to re-use segments we've allocated before. For this we look into
94 // the Freelist, to see whether we need to actually allocate new blocks or
95 // just re-use blocks we've already seen before.
96 if (Freelist != &SentinelSegment) {
97 auto *FreeSegment = Freelist;
98 Freelist = FreeSegment->Next;
99 FreeSegment->Next = &SentinelSegment;
100 Freelist->Prev = &SentinelSegment;
101 return static_cast<Segment *>(FreeSegment);
104 auto SegmentBlock = Alloc->Allocate();
105 if (SegmentBlock.Data == nullptr)
108 // Placement-new the Segment element at the beginning of the SegmentBlock.
109 auto S = reinterpret_cast<Segment *>(SegmentBlock.Data);
110 new (S) SegmentBase{&SentinelSegment, &SentinelSegment};
114 Segment *InitHeadAndTail() {
115 DCHECK_EQ(Head, &SentinelSegment);
116 DCHECK_EQ(Tail, &SentinelSegment);
117 auto Segment = NewSegment();
118 if (Segment == nullptr)
120 DCHECK_EQ(Segment->Next, &SentinelSegment);
121 DCHECK_EQ(Segment->Prev, &SentinelSegment);
122 Head = Tail = static_cast<SegmentBase *>(Segment);
126 Segment *AppendNewSegment() {
127 auto S = NewSegment();
130 DCHECK_NE(Tail, &SentinelSegment);
131 DCHECK_EQ(Tail->Next, &SentinelSegment);
132 DCHECK_EQ(S->Prev, &SentinelSegment);
133 DCHECK_EQ(S->Next, &SentinelSegment);
137 return static_cast<Segment *>(Tail);
140 // This Iterator models a BidirectionalIterator.
141 template <class U> class Iterator {
142 SegmentBase *S = &SentinelSegment;
147 Iterator(SegmentBase *IS, size_t Off, size_t S)
148 : S(IS), Offset(Off), Size(S) {}
149 Iterator(const Iterator &) noexcept = default;
150 Iterator() noexcept = default;
151 Iterator(Iterator &&) noexcept = default;
152 Iterator &operator=(const Iterator &) = default;
153 Iterator &operator=(Iterator &&) = default;
154 ~Iterator() = default;
156 Iterator &operator++() {
157 if (++Offset % ElementsPerSegment || Offset == Size)
160 // At this point, we know that Offset % N == 0, so we must advance the
162 DCHECK_EQ(Offset % ElementsPerSegment, 0);
163 DCHECK_NE(Offset, Size);
164 DCHECK_NE(S, &SentinelSegment);
165 DCHECK_NE(S->Next, &SentinelSegment);
167 DCHECK_NE(S, &SentinelSegment);
171 Iterator &operator--() {
172 DCHECK_NE(S, &SentinelSegment);
173 DCHECK_GT(Offset, 0);
175 auto PreviousOffset = Offset--;
176 if (PreviousOffset != Size && PreviousOffset % ElementsPerSegment == 0) {
177 DCHECK_NE(S->Prev, &SentinelSegment);
184 Iterator operator++(int) {
185 Iterator Copy(*this);
190 Iterator operator--(int) {
191 Iterator Copy(*this);
196 template <class V, class W>
197 friend bool operator==(const Iterator<V> &L, const Iterator<W> &R) {
198 return L.S == R.S && L.Offset == R.Offset;
201 template <class V, class W>
202 friend bool operator!=(const Iterator<V> &L, const Iterator<W> &R) {
206 U &operator*() const {
207 DCHECK_NE(S, &SentinelSegment);
208 auto RelOff = Offset % ElementsPerSegment;
210 // We need to compute the character-aligned pointer, offset from the
211 // segment's Data location to get the element in the position of Offset.
212 auto Base = static_cast<Segment *>(S)->Data;
213 auto AlignedOffset = Base + (RelOff * AlignedElementStorageSize);
214 return *reinterpret_cast<U *>(AlignedOffset);
217 U *operator->() const { return &(**this); }
221 explicit Array(AllocatorType &A) : Alloc(&A) {}
223 Array(const Array &) = delete;
224 Array(Array &&O) NOEXCEPT : Alloc(O.Alloc),
228 O.Head = &SentinelSegment;
229 O.Tail = &SentinelSegment;
233 bool empty() const { return Size == 0; }
235 AllocatorType &allocator() const {
236 DCHECK_NE(Alloc, nullptr);
240 size_t size() const { return Size; }
242 T *Append(const T &E) {
243 if (UNLIKELY(Head == &SentinelSegment))
244 if (InitHeadAndTail() == nullptr)
247 auto Offset = Size % ElementsPerSegment;
248 if (UNLIKELY(Size != 0 && Offset == 0))
249 if (AppendNewSegment() == nullptr)
252 auto Base = static_cast<Segment *>(Tail)->Data;
253 auto AlignedOffset = Base + (Offset * AlignedElementStorageSize);
254 auto Position = reinterpret_cast<T *>(AlignedOffset);
260 template <class... Args> T *AppendEmplace(Args &&... args) {
261 if (UNLIKELY(Head == &SentinelSegment))
262 if (InitHeadAndTail() == nullptr)
265 auto Offset = Size % ElementsPerSegment;
266 auto *LatestSegment = Tail;
267 if (UNLIKELY(Size != 0 && Offset == 0)) {
268 LatestSegment = AppendNewSegment();
269 if (LatestSegment == nullptr)
273 DCHECK_NE(Tail, &SentinelSegment);
274 auto Base = static_cast<Segment *>(LatestSegment)->Data;
275 auto AlignedOffset = Base + (Offset * AlignedElementStorageSize);
276 auto Position = reinterpret_cast<T *>(AlignedOffset);
278 // In-place construct at Position.
279 new (Position) T{std::forward<Args>(args)...};
281 return reinterpret_cast<T *>(Position);
284 T &operator[](size_t Offset) const {
285 DCHECK_LE(Offset, Size);
286 // We need to traverse the array enough times to find the element at Offset.
288 while (Offset >= ElementsPerSegment) {
290 Offset -= ElementsPerSegment;
291 DCHECK_NE(S, &SentinelSegment);
293 auto Base = static_cast<Segment *>(S)->Data;
294 auto AlignedOffset = Base + (Offset * AlignedElementStorageSize);
295 auto Position = reinterpret_cast<T *>(AlignedOffset);
296 return *reinterpret_cast<T *>(Position);
300 DCHECK_NE(Head, &SentinelSegment);
306 DCHECK_NE(Tail, &SentinelSegment);
313 template <class Predicate> T *find_element(Predicate P) const {
318 for (auto I = begin(); I != E; ++I)
325 /// Remove N Elements from the end. This leaves the blocks behind, and not
326 /// require allocation of new blocks for new elements added after trimming.
327 void trim(size_t Elements) {
328 DCHECK_LE(Elements, Size);
333 DCHECK_NE(Head, &SentinelSegment);
334 DCHECK_NE(Tail, &SentinelSegment);
336 for (auto SegmentsToTrim = (nearest_boundary(OldSize, ElementsPerSegment) -
337 nearest_boundary(Size, ElementsPerSegment)) /
339 SegmentsToTrim > 0; --SegmentsToTrim) {
340 DCHECK_NE(Head, &SentinelSegment);
341 DCHECK_NE(Tail, &SentinelSegment);
342 // Put the tail into the Freelist.
343 auto *FreeSegment = Tail;
345 if (Tail == &SentinelSegment)
348 Tail->Next = &SentinelSegment;
350 DCHECK_EQ(Tail->Next, &SentinelSegment);
351 FreeSegment->Next = Freelist;
352 FreeSegment->Prev = &SentinelSegment;
353 if (Freelist != &SentinelSegment)
354 Freelist->Prev = FreeSegment;
355 Freelist = FreeSegment;
359 // Provide iterators.
360 Iterator<T> begin() const { return Iterator<T>(Head, 0, Size); }
361 Iterator<T> end() const { return Iterator<T>(Tail, Size, Size); }
362 Iterator<const T> cbegin() const { return Iterator<const T>(Head, 0, Size); }
363 Iterator<const T> cend() const { return Iterator<const T>(Tail, Size, Size); }
366 // We need to have this storage definition out-of-line so that the compiler can
367 // ensure that storage for the SentinelSegment is defined and has a single
370 typename Array<T>::SegmentBase Array<T>::SentinelSegment{
371 &Array<T>::SentinelSegment, &Array<T>::SentinelSegment};
373 } // namespace __xray
375 #endif // XRAY_SEGMENTED_ARRAY_H