X-Git-Url: http://demsky.eecs.uci.edu/git/?a=blobdiff_plain;f=lib%2FSupport%2FAllocator.cpp;h=90df262336c59a36ddb4e3c11fbab39e32ce25c0;hb=335db223926931db204bf54d4accac6677b8e6b1;hp=4e4a75ee58c50c4f70911d3015733bfeeda491b1;hpb=8f51a62b41a425f7fe262ff20cee835129ecc072;p=oota-llvm.git diff --git a/lib/Support/Allocator.cpp b/lib/Support/Allocator.cpp index 4e4a75ee58c..90df262336c 100644 --- a/lib/Support/Allocator.cpp +++ b/lib/Support/Allocator.cpp @@ -12,9 +12,10 @@ //===----------------------------------------------------------------------===// #include "llvm/Support/Allocator.h" -#include "llvm/Support/DataTypes.h" +#include "llvm/System/DataTypes.h" #include "llvm/Support/Recycler.h" -#include "llvm/Support/Streams.h" +#include "llvm/Support/raw_ostream.h" +#include "llvm/System/Memory.h" #include namespace llvm { @@ -22,9 +23,7 @@ namespace llvm { BumpPtrAllocator::BumpPtrAllocator(size_t size, size_t threshold, SlabAllocator &allocator) : SlabSize(size), SizeThreshold(threshold), Allocator(allocator), - CurSlab(0), BytesAllocated(0) { - StartNewSlab(); -} + CurSlab(0), BytesAllocated(0) { } BumpPtrAllocator::~BumpPtrAllocator() { DeallocateSlabs(CurSlab); @@ -60,6 +59,7 @@ void BumpPtrAllocator::DeallocateSlabs(MemSlab *Slab) { #ifndef NDEBUG // Poison the memory so stale pointers crash sooner. Note we must // preserve the Size and NextPtr fields at the beginning. + sys::Memory::setRangeWritable(Slab + 1, Slab->Size - sizeof(MemSlab)); memset(Slab + 1, 0xCD, Slab->Size - sizeof(MemSlab)); #endif Allocator.Deallocate(Slab); @@ -70,6 +70,8 @@ void BumpPtrAllocator::DeallocateSlabs(MemSlab *Slab) { /// Reset - Deallocate all but the current slab and reset the current pointer /// to the beginning of it, freeing all memory allocated so far. void BumpPtrAllocator::Reset() { + if (!CurSlab) + return; DeallocateSlabs(CurSlab->NextPtr); CurSlab->NextPtr = 0; CurPtr = (char*)(CurSlab + 1); @@ -79,6 +81,9 @@ void BumpPtrAllocator::Reset() { /// Allocate - Allocate space at the specified alignment. /// void *BumpPtrAllocator::Allocate(size_t Size, size_t Alignment) { + if (!CurSlab) // Start a new slab if we haven't allocated one already. + StartNewSlab(); + // Keep track of how many bytes we've allocated. BytesAllocated += Size; @@ -95,8 +100,8 @@ void *BumpPtrAllocator::Allocate(size_t Size, size_t Alignment) { } // If Size is really big, allocate a separate slab for it. - if (Size > SizeThreshold) { - size_t PaddedSize = Size + sizeof(MemSlab) + Alignment - 1; + size_t PaddedSize = Size + sizeof(MemSlab) + Alignment - 1; + if (PaddedSize > SizeThreshold) { MemSlab *NewSlab = Allocator.Allocate(PaddedSize); // Put the new slab after the current slab, since we are not allocating @@ -133,11 +138,11 @@ void BumpPtrAllocator::PrintStats() const { ++NumSlabs; } - cerr << "\nNumber of memory regions: " << NumSlabs << '\n' - << "Bytes used: " << BytesAllocated << '\n' - << "Bytes allocated: " << TotalMemory << '\n' - << "Bytes wasted: " << (TotalMemory - BytesAllocated) - << " (includes alignment, etc)\n"; + errs() << "\nNumber of memory regions: " << NumSlabs << '\n' + << "Bytes used: " << BytesAllocated << '\n' + << "Bytes allocated: " << TotalMemory << '\n' + << "Bytes wasted: " << (TotalMemory - BytesAllocated) + << " (includes alignment, etc)\n"; } MallocSlabAllocator BumpPtrAllocator::DefaultSlabAllocator = @@ -161,9 +166,9 @@ void MallocSlabAllocator::Deallocate(MemSlab *Slab) { void PrintRecyclerStats(size_t Size, size_t Align, size_t FreeListSize) { - cerr << "Recycler element size: " << Size << '\n' - << "Recycler element alignment: " << Align << '\n' - << "Number of elements free for recycling: " << FreeListSize << '\n'; + errs() << "Recycler element size: " << Size << '\n' + << "Recycler element alignment: " << Align << '\n' + << "Number of elements free for recycling: " << FreeListSize << '\n'; } }