1 //===--- Allocator.cpp - Simple memory allocation abstraction -------------===//
3 // The LLVM Compiler Infrastructure
5 // This file is distributed under the University of Illinois Open Source
6 // License. See LICENSE.TXT for details.
8 //===----------------------------------------------------------------------===//
10 // This file implements the BumpPtrAllocator interface.
12 //===----------------------------------------------------------------------===//
14 #include "llvm/Support/Allocator.h"
15 #include "llvm/Support/Compiler.h"
16 #include "llvm/Support/DataTypes.h"
17 #include "llvm/Support/Memory.h"
18 #include "llvm/Support/Recycler.h"
19 #include "llvm/Support/raw_ostream.h"
24 BumpPtrAllocator::BumpPtrAllocator(size_t size, size_t threshold,
25 SlabAllocator &allocator)
26 : SlabSize(size), SizeThreshold(std::min(size, threshold)),
27 Allocator(allocator), CurSlab(0), BytesAllocated(0), NumSlabs(0) {}
29 BumpPtrAllocator::BumpPtrAllocator(size_t size, size_t threshold)
30 : SlabSize(size), SizeThreshold(std::min(size, threshold)),
31 Allocator(DefaultSlabAllocator), CurSlab(0), BytesAllocated(0),
34 BumpPtrAllocator::~BumpPtrAllocator() {
35 DeallocateSlabs(CurSlab);
38 /// AlignPtr - Align Ptr to Alignment bytes, rounding up. Alignment should
39 /// be a power of two. This method rounds up, so AlignPtr(7, 4) == 8 and
40 /// AlignPtr(8, 4) == 8.
41 char *BumpPtrAllocator::AlignPtr(char *Ptr, size_t Alignment) {
42 assert(Alignment && (Alignment & (Alignment - 1)) == 0 &&
43 "Alignment is not a power of two!");
46 return (char*)(((uintptr_t)Ptr + Alignment - 1) &
47 ~(uintptr_t)(Alignment - 1));
50 /// StartNewSlab - Allocate a new slab and move the bump pointers over into
51 /// the new slab. Modifies CurPtr and End.
52 void BumpPtrAllocator::StartNewSlab() {
54 // Scale the actual allocated slab size based on the number of slabs
55 // allocated. Every 128 slabs allocated, we double the allocated size to
56 // reduce allocation frequency, but saturate at multiplying the slab size by
58 // FIXME: Currently, this count includes special slabs for objects above the
59 // size threshold. That will be fixed in a subsequent commit to make the
60 // growth even more predictable.
61 size_t AllocatedSlabSize =
62 SlabSize * (1 << std::min<size_t>(30, NumSlabs / 128));
64 MemSlab *NewSlab = Allocator.Allocate(AllocatedSlabSize);
65 NewSlab->NextPtr = CurSlab;
67 CurPtr = (char*)(CurSlab + 1);
68 End = ((char*)CurSlab) + CurSlab->Size;
71 /// DeallocateSlabs - Deallocate all memory slabs after and including this
73 void BumpPtrAllocator::DeallocateSlabs(MemSlab *Slab) {
75 MemSlab *NextSlab = Slab->NextPtr;
77 // Poison the memory so stale pointers crash sooner. Note we must
78 // preserve the Size and NextPtr fields at the beginning.
79 sys::Memory::setRangeWritable(Slab + 1, Slab->Size - sizeof(MemSlab));
80 memset(Slab + 1, 0xCD, Slab->Size - sizeof(MemSlab));
82 Allocator.Deallocate(Slab);
88 /// Reset - Deallocate all but the current slab and reset the current pointer
89 /// to the beginning of it, freeing all memory allocated so far.
90 void BumpPtrAllocator::Reset() {
93 DeallocateSlabs(CurSlab->NextPtr);
95 CurPtr = (char*)(CurSlab + 1);
96 End = ((char*)CurSlab) + CurSlab->Size;
100 /// Allocate - Allocate space at the specified alignment.
102 void *BumpPtrAllocator::Allocate(size_t Size, size_t Alignment) {
103 if (!CurSlab) // Start a new slab if we haven't allocated one already.
106 // Keep track of how many bytes we've allocated.
107 BytesAllocated += Size;
109 // 0-byte alignment means 1-byte alignment.
110 if (Alignment == 0) Alignment = 1;
112 // Allocate the aligned space, going forwards from CurPtr.
113 char *Ptr = AlignPtr(CurPtr, Alignment);
115 // Check if we can hold it.
116 if (Ptr + Size <= End) {
118 // Update the allocation point of this memory block in MemorySanitizer.
119 // Without this, MemorySanitizer messages for values originated from here
120 // will point to the allocation of the entire slab.
121 __msan_allocated_memory(Ptr, Size);
125 // If Size is really big, allocate a separate slab for it.
126 size_t PaddedSize = Size + sizeof(MemSlab) + Alignment - 1;
127 if (PaddedSize > SizeThreshold) {
129 MemSlab *NewSlab = Allocator.Allocate(PaddedSize);
131 // Put the new slab after the current slab, since we are not allocating
133 NewSlab->NextPtr = CurSlab->NextPtr;
134 CurSlab->NextPtr = NewSlab;
136 Ptr = AlignPtr((char*)(NewSlab + 1), Alignment);
137 assert((uintptr_t)Ptr + Size <= (uintptr_t)NewSlab + NewSlab->Size);
138 __msan_allocated_memory(Ptr, Size);
142 // Otherwise, start a new slab and try again.
144 Ptr = AlignPtr(CurPtr, Alignment);
146 assert(CurPtr <= End && "Unable to allocate memory!");
147 __msan_allocated_memory(Ptr, Size);
151 size_t BumpPtrAllocator::getTotalMemory() const {
152 size_t TotalMemory = 0;
153 for (MemSlab *Slab = CurSlab; Slab != 0; Slab = Slab->NextPtr) {
154 TotalMemory += Slab->Size;
159 void BumpPtrAllocator::PrintStats() const {
160 unsigned NumSlabs = 0;
161 size_t TotalMemory = 0;
162 for (MemSlab *Slab = CurSlab; Slab != 0; Slab = Slab->NextPtr) {
163 TotalMemory += Slab->Size;
167 errs() << "\nNumber of memory regions: " << NumSlabs << '\n'
168 << "Bytes used: " << BytesAllocated << '\n'
169 << "Bytes allocated: " << TotalMemory << '\n'
170 << "Bytes wasted: " << (TotalMemory - BytesAllocated)
171 << " (includes alignment, etc)\n";
174 SlabAllocator::~SlabAllocator() { }
176 MallocSlabAllocator::~MallocSlabAllocator() { }
178 MemSlab *MallocSlabAllocator::Allocate(size_t Size) {
179 MemSlab *Slab = (MemSlab*)Allocator.Allocate(Size, 0);
185 void MallocSlabAllocator::Deallocate(MemSlab *Slab) {
186 Allocator.Deallocate(Slab);
189 void PrintRecyclerStats(size_t Size,
191 size_t FreeListSize) {
192 errs() << "Recycler element size: " << Size << '\n'
193 << "Recycler element alignment: " << Align << '\n'
194 << "Number of elements free for recycling: " << FreeListSize << '\n';