1 //===--- Allocator.h - Simple memory allocation abstraction -----*- C++ -*-===//
3 // The LLVM Compiler Infrastructure
5 // This file is distributed under the University of Illinois Open Source
6 // License. See LICENSE.TXT for details.
8 //===----------------------------------------------------------------------===//
10 // This file defines the MallocAllocator and BumpPtrAllocator interfaces.
12 //===----------------------------------------------------------------------===//
14 #ifndef LLVM_SUPPORT_ALLOCATOR_H
15 #define LLVM_SUPPORT_ALLOCATOR_H
17 #include "llvm/Support/AlignOf.h"
18 #include "llvm/Support/DataTypes.h"
19 #include "llvm/Support/MathExtras.h"
20 #include "llvm/Support/Memory.h"
27 template <typename T> struct ReferenceAdder {
30 template <typename T> struct ReferenceAdder<T &> {
34 class MallocAllocator {
41 void *Allocate(size_t Size, size_t /*Alignment*/) { return malloc(Size); }
43 template <typename T> T *Allocate() {
44 return static_cast<T *>(malloc(sizeof(T)));
47 template <typename T> T *Allocate(size_t Num) {
48 return static_cast<T *>(malloc(sizeof(T) * Num));
51 void Deallocate(const void *Ptr) { free(const_cast<void *>(Ptr)); }
53 void PrintStats() const {}
56 /// MemSlab - This structure lives at the beginning of every slab allocated by
57 /// the bump allocator.
64 /// SlabAllocator - This class can be used to parameterize the underlying
65 /// allocation strategy for the bump allocator. In particular, this is used
66 /// by the JIT to allocate contiguous swathes of executable memory. The
67 /// interface uses MemSlab's instead of void *'s so that the allocator
68 /// doesn't have to remember the size of the pointer it allocated.
71 virtual ~SlabAllocator();
72 virtual MemSlab *Allocate(size_t Size) = 0;
73 virtual void Deallocate(MemSlab *Slab) = 0;
76 /// MallocSlabAllocator - The default slab allocator for the bump allocator
77 /// is an adapter class for MallocAllocator that just forwards the method
78 /// calls and translates the arguments.
79 class MallocSlabAllocator : public SlabAllocator {
80 /// Allocator - The underlying allocator that we forward to.
82 MallocAllocator Allocator;
85 MallocSlabAllocator() : Allocator() {}
86 virtual ~MallocSlabAllocator();
87 MemSlab *Allocate(size_t Size) override;
88 void Deallocate(MemSlab *Slab) override;
91 /// \brief Non-templated base class for the \c BumpPtrAllocatorImpl template.
92 class BumpPtrAllocatorBase {
94 void Deallocate(const void * /*Ptr*/) {}
95 void PrintStats() const;
97 /// \brief Returns the total physical memory allocated by this allocator.
98 size_t getTotalMemory() const;
101 /// \brief The slab that we are currently allocating into.
104 /// \brief How many bytes we've allocated.
106 /// Used so that we can compute how much space was wasted.
107 size_t BytesAllocated;
109 BumpPtrAllocatorBase() : CurSlab(0), BytesAllocated(0) {}
112 /// \brief Allocate memory in an ever growing pool, as if by bump-pointer.
114 /// This isn't strictly a bump-pointer allocator as it uses backing slabs of
115 /// memory rather than relying on boundless contiguous heap. However, it has
116 /// bump-pointer semantics in that is a monotonically growing pool of memory
117 /// where every allocation is found by merely allocating the next N bytes in
118 /// the slab, or the next N bytes in the next slab.
120 /// Note that this also has a threshold for forcing allocations above a certain
121 /// size into their own slab.
122 template <size_t SlabSize = 4096, size_t SizeThreshold = SlabSize>
123 class BumpPtrAllocatorImpl : public BumpPtrAllocatorBase {
124 BumpPtrAllocatorImpl(const BumpPtrAllocatorImpl &) LLVM_DELETED_FUNCTION;
125 void operator=(const BumpPtrAllocatorImpl &) LLVM_DELETED_FUNCTION;
128 static_assert(SizeThreshold <= SlabSize,
129 "The SizeThreshold must be at most the SlabSize to ensure "
130 "that objects larger than a slab go into their own memory "
133 BumpPtrAllocatorImpl()
134 : Allocator(DefaultSlabAllocator), NumSlabs(0) {}
135 BumpPtrAllocatorImpl(SlabAllocator &Allocator)
136 : Allocator(Allocator), NumSlabs(0) {}
137 ~BumpPtrAllocatorImpl() { DeallocateSlabs(CurSlab); }
139 /// \brief Deallocate all but the current slab and reset the current pointer
140 /// to the beginning of it, freeing all memory allocated so far.
144 DeallocateSlabs(CurSlab->NextPtr);
145 CurSlab->NextPtr = 0;
146 CurPtr = (char *)(CurSlab + 1);
147 End = ((char *)CurSlab) + CurSlab->Size;
151 /// \brief Allocate space at the specified alignment.
152 void *Allocate(size_t Size, size_t Alignment) {
153 if (!CurSlab) // Start a new slab if we haven't allocated one already.
156 // Keep track of how many bytes we've allocated.
157 BytesAllocated += Size;
159 // 0-byte alignment means 1-byte alignment.
163 // Allocate the aligned space, going forwards from CurPtr.
164 char *Ptr = alignPtr(CurPtr, Alignment);
166 // Check if we can hold it.
167 if (Ptr + Size <= End) {
169 // Update the allocation point of this memory block in MemorySanitizer.
170 // Without this, MemorySanitizer messages for values originated from here
171 // will point to the allocation of the entire slab.
172 __msan_allocated_memory(Ptr, Size);
176 // If Size is really big, allocate a separate slab for it.
177 size_t PaddedSize = Size + sizeof(MemSlab) + Alignment - 1;
178 if (PaddedSize > SizeThreshold) {
180 MemSlab *NewSlab = Allocator.Allocate(PaddedSize);
182 // Put the new slab after the current slab, since we are not allocating
184 NewSlab->NextPtr = CurSlab->NextPtr;
185 CurSlab->NextPtr = NewSlab;
187 Ptr = alignPtr((char *)(NewSlab + 1), Alignment);
188 assert((uintptr_t)Ptr + Size <= (uintptr_t)NewSlab + NewSlab->Size);
189 __msan_allocated_memory(Ptr, Size);
193 // Otherwise, start a new slab and try again.
195 Ptr = alignPtr(CurPtr, Alignment);
197 assert(CurPtr <= End && "Unable to allocate memory!");
198 __msan_allocated_memory(Ptr, Size);
202 /// \brief Allocate space for one object without constructing it.
203 template <typename T> T *Allocate() {
204 return static_cast<T *>(Allocate(sizeof(T), AlignOf<T>::Alignment));
207 /// \brief Allocate space for an array of objects without constructing them.
208 template <typename T> T *Allocate(size_t Num) {
209 return static_cast<T *>(Allocate(Num * sizeof(T), AlignOf<T>::Alignment));
212 /// \brief Allocate space for an array of objects with the specified alignment
213 /// and without constructing them.
214 template <typename T> T *Allocate(size_t Num, size_t Alignment) {
215 // Round EltSize up to the specified alignment.
216 size_t EltSize = (sizeof(T) + Alignment - 1) & (-Alignment);
217 return static_cast<T *>(Allocate(Num * EltSize, Alignment));
220 size_t GetNumSlabs() const { return NumSlabs; }
223 /// \brief The default allocator used if one is not provided.
224 MallocSlabAllocator DefaultSlabAllocator;
226 /// \brief The underlying allocator we use to get slabs of memory.
228 /// This defaults to MallocSlabAllocator, which wraps malloc, but it could be
229 /// changed to use a custom allocator.
230 SlabAllocator &Allocator;
232 /// \brief The current pointer into the current slab.
234 /// This points to the next free byte in the slab.
237 /// \brief The end of the current slab.
240 /// \brief How many slabs we've allocated.
242 /// Used to scale the size of each slab and reduce the number of allocations
243 /// for extremely heavy memory use scenarios.
246 /// \brief Allocate a new slab and move the bump pointers over into the new
247 /// slab, modifying CurPtr and End.
248 void StartNewSlab() {
250 // Scale the actual allocated slab size based on the number of slabs
251 // allocated. Every 128 slabs allocated, we double the allocated size to
252 // reduce allocation frequency, but saturate at multiplying the slab size by
254 // FIXME: Currently, this count includes special slabs for objects above the
255 // size threshold. That will be fixed in a subsequent commit to make the
256 // growth even more predictable.
257 size_t AllocatedSlabSize =
258 SlabSize * (1 << std::min<size_t>(30, NumSlabs / 128));
260 MemSlab *NewSlab = Allocator.Allocate(AllocatedSlabSize);
261 NewSlab->NextPtr = CurSlab;
263 CurPtr = (char *)(CurSlab + 1);
264 End = ((char *)CurSlab) + CurSlab->Size;
267 /// \brief Deallocate all memory slabs after and including this one.
268 void DeallocateSlabs(MemSlab *Slab) {
270 MemSlab *NextSlab = Slab->NextPtr;
272 // Poison the memory so stale pointers crash sooner. Note we must
273 // preserve the Size and NextPtr fields at the beginning.
274 sys::Memory::setRangeWritable(Slab + 1, Slab->Size - sizeof(MemSlab));
275 memset(Slab + 1, 0xCD, Slab->Size - sizeof(MemSlab));
277 Allocator.Deallocate(Slab);
283 template <typename T> friend class SpecificBumpPtrAllocator;
286 /// \brief The standard BumpPtrAllocator which just uses the default template
288 typedef BumpPtrAllocatorImpl<> BumpPtrAllocator;
290 /// \brief A BumpPtrAllocator that allows only elements of a specific type to be
293 /// This allows calling the destructor in DestroyAll() and when the allocator is
295 template <typename T> class SpecificBumpPtrAllocator {
296 BumpPtrAllocator Allocator;
299 SpecificBumpPtrAllocator() : Allocator() {}
300 SpecificBumpPtrAllocator(SlabAllocator &allocator) : Allocator(allocator) {}
302 ~SpecificBumpPtrAllocator() { DestroyAll(); }
304 /// Call the destructor of each allocated object and deallocate all but the
305 /// current slab and reset the current pointer to the beginning of it, freeing
306 /// all memory allocated so far.
308 MemSlab *Slab = Allocator.CurSlab;
310 char *End = Slab == Allocator.CurSlab ? Allocator.CurPtr
311 : (char *)Slab + Slab->Size;
312 for (char *Ptr = (char *)(Slab + 1); Ptr < End; Ptr += sizeof(T)) {
313 Ptr = alignPtr(Ptr, alignOf<T>());
314 if (Ptr + sizeof(T) <= End)
315 reinterpret_cast<T *>(Ptr)->~T();
317 Slab = Slab->NextPtr;
322 /// \brief Allocate space for an array of objects without constructing them.
323 T *Allocate(size_t num = 1) { return Allocator.Allocate<T>(num); }
326 } // end namespace llvm
328 template <size_t SlabSize, size_t SizeThreshold>
330 operator new(size_t Size,
331 llvm::BumpPtrAllocatorImpl<SlabSize, SizeThreshold> &Allocator) {
341 return Allocator.Allocate(
342 Size, std::min((size_t)llvm::NextPowerOf2(Size), offsetof(S, x)));
345 template <size_t SlabSize, size_t SizeThreshold>
346 void operator delete(void *,
347 llvm::BumpPtrAllocatorImpl<SlabSize, SizeThreshold> &) {}
349 #endif // LLVM_SUPPORT_ALLOCATOR_H