1 //===- ASTVector.h - Vector that uses ASTContext for allocation --*- C++ -*-=//
3 // The LLVM Compiler Infrastructure
5 // This file is distributed under the University of Illinois Open Source
6 // License. See LICENSE.TXT for details.
8 //===----------------------------------------------------------------------===//
10 // This file provides ASTVector, a vector ADT whose contents are
11 // allocated using the allocator associated with an ASTContext..
13 //===----------------------------------------------------------------------===//
15 // FIXME: Most of this is copy-and-paste from BumpVector.h and SmallVector.h.
16 // We can refactor this core logic into something common.
18 #ifndef LLVM_CLANG_AST_VECTOR
19 #define LLVM_CLANG_AST_VECTOR
21 #include "clang/AST/AttrIterator.h"
22 #include "llvm/ADT/PointerIntPair.h"
23 #include "llvm/Support/Allocator.h"
24 #include "llvm/Support/type_traits.h"
32 // Work around flawed VC++ implementation of std::uninitialized_copy. Define
33 // additional overloads so that elements with pointer types are recognized as
34 // scalars and not objects, causing bizarre type conversion errors.
35 template<class T1, class T2>
36 inline _Scalar_ptr_iterator_tag _Ptr_cat(T1 **, T2 **) {
37 _Scalar_ptr_iterator_tag _Cat;
41 template<class T1, class T2>
42 inline _Scalar_ptr_iterator_tag _Ptr_cat(T1* const *, T2 **) {
43 _Scalar_ptr_iterator_tag _Cat;
47 // FIXME: It is not clear if the problem is fixed in VS 2005. What is clear
48 // is that the above hack won't work if it wasn't fixed.
58 T *Begin, *End, *Capacity;
60 void setEnd(T *P) { this->End = P; }
63 // Default ctor - Initialize to empty.
64 ASTVector() : Begin(NULL), End(NULL), Capacity(NULL) { }
66 ASTVector(ASTContext &C, unsigned N)
67 : Begin(NULL), End(NULL), Capacity(NULL) {
72 if (llvm::is_class<T>::value) {
73 // Destroy the constructed elements in the vector.
74 destroy_range(Begin, End);
78 typedef size_t size_type;
79 typedef ptrdiff_t difference_type;
82 typedef const T* const_iterator;
84 typedef std::reverse_iterator<const_iterator> const_reverse_iterator;
85 typedef std::reverse_iterator<iterator> reverse_iterator;
88 typedef const T& const_reference;
90 typedef const T* const_pointer;
92 // forward iterator creation methods.
93 iterator begin() { return Begin; }
94 const_iterator begin() const { return Begin; }
95 iterator end() { return End; }
96 const_iterator end() const { return End; }
98 // reverse iterator creation methods.
99 reverse_iterator rbegin() { return reverse_iterator(end()); }
100 const_reverse_iterator rbegin() const{ return const_reverse_iterator(end()); }
101 reverse_iterator rend() { return reverse_iterator(begin()); }
102 const_reverse_iterator rend() const { return const_reverse_iterator(begin());}
104 bool empty() const { return Begin == End; }
105 size_type size() const { return End-Begin; }
107 reference operator[](unsigned idx) {
108 assert(Begin + idx < End);
111 const_reference operator[](unsigned idx) const {
112 assert(Begin + idx < End);
119 const_reference front() const {
126 const_reference back() const {
142 if (llvm::is_class<T>::value) {
143 destroy_range(Begin, End);
148 /// data - Return a pointer to the vector's buffer, even if empty().
150 return pointer(Begin);
153 /// data - Return a pointer to the vector's buffer, even if empty().
154 const_pointer data() const {
155 return const_pointer(Begin);
158 void push_back(const_reference Elt, ASTContext &C) {
159 if (End < Capacity) {
169 void reserve(ASTContext &C, unsigned N) {
170 if (unsigned(Capacity-Begin) < N)
174 /// capacity - Return the total number of elements in the currently allocated
176 size_t capacity() const { return Capacity - Begin; }
178 /// append - Add the specified range to the end of the SmallVector.
180 template<typename in_iter>
181 void append(ASTContext &C, in_iter in_start, in_iter in_end) {
182 size_type NumInputs = std::distance(in_start, in_end);
187 // Grow allocated space if needed.
188 if (NumInputs > size_type(this->capacity_ptr()-this->end()))
189 this->grow(C, this->size()+NumInputs);
191 // Copy the new elements over.
192 // TODO: NEED To compile time dispatch on whether in_iter is a random access
193 // iterator to use the fast uninitialized_copy.
194 std::uninitialized_copy(in_start, in_end, this->end());
195 this->setEnd(this->end() + NumInputs);
198 /// append - Add the specified range to the end of the SmallVector.
200 void append(ASTContext &C, size_type NumInputs, const T &Elt) {
201 // Grow allocated space if needed.
202 if (NumInputs > size_type(this->capacity_ptr()-this->end()))
203 this->grow(C, this->size()+NumInputs);
205 // Copy the new elements over.
206 std::uninitialized_fill_n(this->end(), NumInputs, Elt);
207 this->setEnd(this->end() + NumInputs);
210 /// uninitialized_copy - Copy the range [I, E) onto the uninitialized memory
211 /// starting with "Dest", constructing elements into it as needed.
212 template<typename It1, typename It2>
213 static void uninitialized_copy(It1 I, It1 E, It2 Dest) {
214 std::uninitialized_copy(I, E, Dest);
217 iterator insert(ASTContext &C, iterator I, const T &Elt) {
218 if (I == this->end()) { // Important special case for empty vector.
220 return this->end()-1;
223 if (this->EndX < this->CapacityX) {
225 new (this->end()) T(this->back());
226 this->setEnd(this->end()+1);
227 // Push everything else over.
228 std::copy_backward(I, this->end()-1, this->end());
232 size_t EltNo = I-this->begin();
234 I = this->begin()+EltNo;
238 iterator insert(ASTContext &C, iterator I, size_type NumToInsert,
240 if (I == this->end()) { // Important special case for empty vector.
241 append(C, NumToInsert, Elt);
242 return this->end()-1;
245 // Convert iterator to elt# to avoid invalidating iterator when we reserve()
246 size_t InsertElt = I - this->begin();
248 // Ensure there is enough space.
249 reserve(C, static_cast<unsigned>(this->size() + NumToInsert));
251 // Uninvalidate the iterator.
252 I = this->begin()+InsertElt;
254 // If there are more elements between the insertion point and the end of the
255 // range than there are being inserted, we can use a simple approach to
256 // insertion. Since we already reserved space, we know that this won't
257 // reallocate the vector.
258 if (size_t(this->end()-I) >= NumToInsert) {
259 T *OldEnd = this->end();
260 append(C, this->end()-NumToInsert, this->end());
262 // Copy the existing elements that get replaced.
263 std::copy_backward(I, OldEnd-NumToInsert, OldEnd);
265 std::fill_n(I, NumToInsert, Elt);
269 // Otherwise, we're inserting more elements than exist already, and we're
270 // not inserting at the end.
272 // Copy over the elements that we're about to overwrite.
273 T *OldEnd = this->end();
274 this->setEnd(this->end() + NumToInsert);
275 size_t NumOverwritten = OldEnd-I;
276 this->uninitialized_copy(I, OldEnd, this->end()-NumOverwritten);
278 // Replace the overwritten part.
279 std::fill_n(I, NumOverwritten, Elt);
281 // Insert the non-overwritten middle part.
282 std::uninitialized_fill_n(OldEnd, NumToInsert-NumOverwritten, Elt);
286 template<typename ItTy>
287 iterator insert(ASTContext &C, iterator I, ItTy From, ItTy To) {
288 if (I == this->end()) { // Important special case for empty vector.
290 return this->end()-1;
293 size_t NumToInsert = std::distance(From, To);
294 // Convert iterator to elt# to avoid invalidating iterator when we reserve()
295 size_t InsertElt = I - this->begin();
297 // Ensure there is enough space.
298 reserve(C, static_cast<unsigned>(this->size() + NumToInsert));
300 // Uninvalidate the iterator.
301 I = this->begin()+InsertElt;
303 // If there are more elements between the insertion point and the end of the
304 // range than there are being inserted, we can use a simple approach to
305 // insertion. Since we already reserved space, we know that this won't
306 // reallocate the vector.
307 if (size_t(this->end()-I) >= NumToInsert) {
308 T *OldEnd = this->end();
309 append(C, this->end()-NumToInsert, this->end());
311 // Copy the existing elements that get replaced.
312 std::copy_backward(I, OldEnd-NumToInsert, OldEnd);
314 std::copy(From, To, I);
318 // Otherwise, we're inserting more elements than exist already, and we're
319 // not inserting at the end.
321 // Copy over the elements that we're about to overwrite.
322 T *OldEnd = this->end();
323 this->setEnd(this->end() + NumToInsert);
324 size_t NumOverwritten = OldEnd-I;
325 this->uninitialized_copy(I, OldEnd, this->end()-NumOverwritten);
327 // Replace the overwritten part.
328 for (; NumOverwritten > 0; --NumOverwritten) {
333 // Insert the non-overwritten middle part.
334 this->uninitialized_copy(From, To, OldEnd);
338 void resize(ASTContext &C, unsigned N, const T &NV) {
339 if (N < this->size()) {
340 this->destroy_range(this->begin()+N, this->end());
341 this->setEnd(this->begin()+N);
342 } else if (N > this->size()) {
343 if (this->capacity() < N)
345 construct_range(this->end(), this->begin()+N, NV);
346 this->setEnd(this->begin()+N);
351 /// grow - double the size of the allocated memory, guaranteeing space for at
352 /// least one more element or MinSize if specified.
353 void grow(ASTContext &C, size_type MinSize = 1);
355 void construct_range(T *S, T *E, const T &Elt) {
360 void destroy_range(T *S, T *E) {
368 iterator capacity_ptr() { return (iterator)this->Capacity; }
371 // Define this out-of-line to dissuade the C++ compiler from inlining it.
372 template <typename T>
373 void ASTVector<T>::grow(ASTContext &C, size_t MinSize) {
374 size_t CurCapacity = Capacity-Begin;
375 size_t CurSize = size();
376 size_t NewCapacity = 2*CurCapacity;
377 if (NewCapacity < MinSize)
378 NewCapacity = MinSize;
380 // Allocate the memory from the ASTContext.
381 T *NewElts = new (C, llvm::alignOf<T>()) T[NewCapacity];
383 // Copy the elements over.
384 if (llvm::is_class<T>::value) {
385 std::uninitialized_copy(Begin, End, NewElts);
386 // Destroy the original elements.
387 destroy_range(Begin, End);
390 // Use memcpy for PODs (std::uninitialized_copy optimizes to memmove).
391 memcpy(NewElts, Begin, CurSize * sizeof(T));
394 // ASTContext never frees any memory.
396 End = NewElts+CurSize;
397 Capacity = Begin+NewCapacity;
400 } // end: clang namespace