2 * Copyright 2012 Facebook, Inc.
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
8 * http://www.apache.org/licenses/LICENSE-2.0
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
17 #ifndef FOLLY_ARENA_H_
18 #define FOLLY_ARENA_H_
23 #include <boost/intrusive/slist.hpp>
25 #include "folly/Likely.h"
26 #include "folly/Malloc.h"
31 * Simple arena: allocate memory which gets freed when the arena gets
34 * The arena itself allocates memory using a custom allocator which provides
35 * the following interface (same as required by StlAllocator in StlAllocator.h)
37 * void* allocate(size_t size);
38 * Allocate a block of size bytes, properly aligned to the maximum
39 * alignment required on your system; throw std::bad_alloc if the
40 * allocation can't be satisfied.
42 * void deallocate(void* ptr);
43 * Deallocate a previously allocated block.
45 * You may also specialize ArenaAllocatorTraits for your allocator type to
48 * size_t goodSize(const Allocator& alloc, size_t size) const;
49 * Return a size (>= the provided size) that is considered "good" for your
50 * allocator (for example, if your allocator allocates memory in 4MB
51 * chunks, size should be rounded up to 4MB). The provided value is
52 * guaranteed to be rounded up to a multiple of the maximum alignment
53 * required on your system; the returned value must be also.
55 * An implementation that uses malloc() / free() is defined below, see
56 * SysAlloc / SysArena.
58 template <class Alloc> struct ArenaAllocatorTraits;
59 template <class Alloc>
62 explicit Arena(const Alloc& alloc,
63 size_t minBlockSize = kDefaultMinBlockSize)
64 : allocAndSize_(alloc, minBlockSize),
67 totalAllocatedSize_(0) {
72 void* allocate(size_t size) {
75 if (LIKELY(end_ - ptr_ >= size)) {
76 // Fast path: there's enough room in the current block
83 // Not enough room in the current block
84 void* r = allocateSlow(size);
89 void deallocate(void* p) {
93 // Transfer ownership of all memory allocated from "other" to "this".
94 void merge(Arena&& other);
96 // Gets the total memory used by the arena
97 size_t totalSize() const {
98 return totalAllocatedSize_ + sizeof(Arena);
103 Arena(const Arena&) = delete;
104 Arena& operator=(const Arena&) = delete;
107 Arena(Arena&&) = default;
108 Arena& operator=(Arena&&) = default;
111 typedef boost::intrusive::slist_member_hook<
112 boost::intrusive::tag<Arena>> BlockLink;
117 // Allocate a block with at least size bytes of storage.
118 // If allowSlack is true, allocate more than size bytes if convenient
119 // (via ArenaAllocatorTraits::goodSize()) as we'll try to pack small
120 // allocations in this block.
121 static std::pair<Block*, size_t> allocate(
122 Alloc& alloc, size_t size, bool allowSlack);
123 void deallocate(Alloc& alloc);
126 return reinterpret_cast<char*>(this + 1);
132 } __attribute__((aligned));
133 // This should be alignas(std::max_align_t) but neither alignas nor
134 // max_align_t are supported by gcc 4.6.2.
137 static constexpr size_t kDefaultMinBlockSize = 4096 - sizeof(Block);
140 static constexpr size_t maxAlign = alignof(Block);
141 static constexpr bool isAligned(uintptr_t address) {
142 return (address & (maxAlign - 1)) == 0;
144 static bool isAligned(void* p) {
145 return isAligned(reinterpret_cast<uintptr_t>(p));
148 // Round up size so it's properly aligned
149 static constexpr size_t roundUp(size_t size) {
150 return (size + maxAlign - 1) & ~(maxAlign - 1);
153 // cache_last<true> makes the list keep a pointer to the last element, so we
154 // have push_back() and constant time splice_after()
155 typedef boost::intrusive::slist<
157 boost::intrusive::member_hook<Block, BlockLink, &Block::link>,
158 boost::intrusive::constant_time_size<false>,
159 boost::intrusive::cache_last<true>> BlockList;
161 void* allocateSlow(size_t size);
163 // Empty member optimization: package Alloc with a non-empty member
164 // in case Alloc is empty (as it is in the case of SysAlloc).
165 struct AllocAndSize : public Alloc {
166 explicit AllocAndSize(const Alloc& a, size_t s)
167 : Alloc(a), minBlockSize(s) {
173 size_t minBlockSize() const {
174 return allocAndSize_.minBlockSize;
176 Alloc& alloc() { return allocAndSize_; }
177 const Alloc& alloc() const { return allocAndSize_; }
179 AllocAndSize allocAndSize_;
183 size_t totalAllocatedSize_;
187 * By default, don't pad the given size.
189 template <class Alloc>
190 struct ArenaAllocatorTraits {
191 static size_t goodSize(const Alloc& alloc, size_t size) {
197 * Arena-compatible allocator that calls malloc() and free(); see
198 * goodMallocSize() in Malloc.h for goodSize().
202 void* allocate(size_t size) {
203 return checkedMalloc(size);
206 void deallocate(void* p) {
212 struct ArenaAllocatorTraits<SysAlloc> {
213 static size_t goodSize(const SysAlloc& alloc, size_t size) {
214 return goodMallocSize(size);
219 * Arena that uses the system allocator (malloc / free)
221 class SysArena : public Arena<SysAlloc> {
223 explicit SysArena(size_t minBlockSize = kDefaultMinBlockSize)
224 : Arena<SysAlloc>(SysAlloc(), minBlockSize) {
230 #include "folly/Arena-inl.h"
232 #endif /* FOLLY_ARENA_H_ */