2 * Copyright 2017 Facebook, Inc.
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
8 * http://www.apache.org/licenses/LICENSE-2.0
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
18 * For high-level documentation and usage examples see
19 * folly/docs/small_vector.md
21 * @author Jordan DeLong <delong.j@fb.com>
28 #include <type_traits>
33 #include <boost/operators.hpp>
34 #include <boost/type_traits.hpp>
35 #include <boost/mpl/if.hpp>
36 #include <boost/mpl/eval_if.hpp>
37 #include <boost/mpl/vector.hpp>
38 #include <boost/mpl/front.hpp>
39 #include <boost/mpl/filter_view.hpp>
40 #include <boost/mpl/identity.hpp>
41 #include <boost/mpl/placeholders.hpp>
42 #include <boost/mpl/empty.hpp>
43 #include <boost/mpl/size.hpp>
44 #include <boost/mpl/count.hpp>
46 #include <folly/Assume.h>
47 #include <folly/FormatTraits.h>
48 #include <folly/Malloc.h>
49 #include <folly/Portability.h>
50 #include <folly/SmallLocks.h>
51 #include <folly/Traits.h>
52 #include <folly/portability/BitsFunctexcept.h>
53 #include <folly/portability/Constexpr.h>
54 #include <folly/portability/Malloc.h>
55 #include <folly/portability/TypeTraits.h>
57 // Ignore shadowing warnings within this file, so includers can use -Wshadow.
58 #pragma GCC diagnostic push
59 #pragma GCC diagnostic ignored "-Wshadow"
63 //////////////////////////////////////////////////////////////////////
65 namespace small_vector_policy {
67 //////////////////////////////////////////////////////////////////////
70 * A flag which makes us refuse to use the heap at all. If we
71 * overflow the in situ capacity we throw an exception.
75 //////////////////////////////////////////////////////////////////////
77 } // small_vector_policy
79 //////////////////////////////////////////////////////////////////////
81 template<class T, std::size_t M, class A, class B, class C>
84 //////////////////////////////////////////////////////////////////////
89 * Move a range to a range of uninitialized memory. Assumes the
90 * ranges don't overlap.
93 typename std::enable_if<
94 !FOLLY_IS_TRIVIALLY_COPYABLE(T)
96 moveToUninitialized(T* first, T* last, T* out) {
99 for (; first != last; ++first, ++idx) {
100 new (&out[idx]) T(std::move(*first));
103 // Even for callers trying to give the strong guarantee
104 // (e.g. push_back) it's ok to assume here that we don't have to
105 // move things back and that it was a copy constructor that
106 // threw: if someone throws from a move constructor the effects
108 for (std::size_t i = 0; i < idx; ++i) {
115 // Specialization for trivially copyable types.
117 typename std::enable_if<
118 FOLLY_IS_TRIVIALLY_COPYABLE(T)
120 moveToUninitialized(T* first, T* last, T* out) {
121 std::memmove(out, first, (last - first) * sizeof *first);
125 * Move a range to a range of uninitialized memory. Assumes the
126 * ranges don't overlap. Inserts an element at out + pos using emplaceFunc().
127 * out will contain (end - begin) + 1 elements on success and none on failure.
128 * If emplaceFunc() throws [begin, end) is unmodified.
130 template <class T, class Size, class EmplaceFunc>
131 void moveToUninitializedEmplace(
136 EmplaceFunc&& emplaceFunc) {
137 // Must be called first so that if it throws [begin, end) is unmodified.
138 // We have to support the strong exception guarantee for emplace_back().
139 emplaceFunc(out + pos);
140 // move old elements to the left of the new one
142 detail::moveToUninitialized(begin, begin + pos, out);
147 // move old elements to the right of the new one
149 if (begin + pos < end) {
150 detail::moveToUninitialized(begin + pos, end, out + pos + 1);
153 for (Size i = 0; i <= pos; ++i) {
161 * Move objects in memory to the right into some uninitialized
162 * memory, where the region overlaps. This doesn't just use
163 * std::move_backward because move_backward only works if all the
164 * memory is initialized to type T already.
167 typename std::enable_if<
168 !FOLLY_IS_TRIVIALLY_COPYABLE(T)
170 moveObjectsRight(T* first, T* lastConstructed, T* realLast) {
171 if (lastConstructed == realLast) {
175 T* end = first - 1; // Past the end going backwards.
176 T* out = realLast - 1;
177 T* in = lastConstructed - 1;
179 for (; in != end && out >= lastConstructed; --in, --out) {
180 new (out) T(std::move(*in));
182 for (; in != end; --in, --out) {
183 *out = std::move(*in);
185 for (; out >= lastConstructed; --out) {
189 // We want to make sure the same stuff is uninitialized memory
190 // if we exit via an exception (this is to make sure we provide
191 // the basic exception safety guarantee for insert functions).
192 if (out < lastConstructed) {
193 out = lastConstructed - 1;
195 for (auto it = out + 1; it != realLast; ++it) {
202 // Specialization for trivially copyable types. The call to
203 // std::move_backward here will just turn into a memmove. (TODO:
204 // change to std::is_trivially_copyable when that works.)
206 typename std::enable_if<
207 FOLLY_IS_TRIVIALLY_COPYABLE(T)
209 moveObjectsRight(T* first, T* lastConstructed, T* realLast) {
210 std::move_backward(first, lastConstructed, realLast);
214 * Populate a region of memory using `op' to construct elements. If
215 * anything throws, undo what we did.
217 template<class T, class Function>
218 void populateMemForward(T* mem, std::size_t n, Function const& op) {
221 for (size_t i = 0; i < n; ++i) {
226 for (std::size_t i = 0; i < idx; ++i) {
233 template<class SizeType, bool ShouldUseHeap>
234 struct IntegralSizePolicy {
235 typedef SizeType InternalSizeType;
237 IntegralSizePolicy() : size_(0) {}
240 static constexpr std::size_t policyMaxSize() {
241 return SizeType(~kExternMask);
244 std::size_t doSize() const {
245 return size_ & ~kExternMask;
248 std::size_t isExtern() const {
249 return kExternMask & size_;
252 void setExtern(bool b) {
254 size_ |= kExternMask;
256 size_ &= ~kExternMask;
260 void setSize(std::size_t sz) {
261 assert(sz <= policyMaxSize());
262 size_ = (kExternMask & size_) | SizeType(sz);
265 void swapSizePolicy(IntegralSizePolicy& o) {
266 std::swap(size_, o.size_);
270 static bool const kShouldUseHeap = ShouldUseHeap;
273 static SizeType const kExternMask =
274 kShouldUseHeap ? SizeType(1) << (sizeof(SizeType) * 8 - 1)
281 * If you're just trying to use this class, ignore everything about
282 * this next small_vector_base class thing.
284 * The purpose of this junk is to minimize sizeof(small_vector<>)
285 * and allow specifying the template parameters in whatever order is
286 * convenient for the user. There's a few extra steps here to try
287 * to keep the error messages at least semi-reasonable.
289 * Apologies for all the black magic.
291 namespace mpl = boost::mpl;
292 template<class Value,
293 std::size_t RequestedMaxInline,
297 struct small_vector_base {
298 typedef mpl::vector<InPolicyA,InPolicyB,InPolicyC> PolicyList;
301 * Determine the size type
303 typedef typename mpl::filter_view<
305 boost::is_integral<mpl::placeholders::_1>
307 typedef typename mpl::eval_if<
308 mpl::empty<Integrals>,
309 mpl::identity<std::size_t>,
310 mpl::front<Integrals>
313 static_assert(std::is_unsigned<SizeType>::value,
314 "Size type should be an unsigned integral type");
315 static_assert(mpl::size<Integrals>::value == 0 ||
316 mpl::size<Integrals>::value == 1,
317 "Multiple size types specified in small_vector<>");
320 * Determine whether we should allow spilling to the heap or not.
322 typedef typename mpl::count<
323 PolicyList,small_vector_policy::NoHeap
326 static_assert(HasNoHeap::value == 0 || HasNoHeap::value == 1,
327 "Multiple copies of small_vector_policy::NoHeap "
328 "supplied; this is probably a mistake");
331 * Make the real policy base classes.
333 typedef IntegralSizePolicy<SizeType,!HasNoHeap::value>
337 * Now inherit from them all. This is done in such a convoluted
338 * way to make sure we get the empty base optimizaton on all these
339 * types to keep sizeof(small_vector<>) minimal.
341 typedef boost::totally_ordered1<
342 small_vector<Value,RequestedMaxInline,InPolicyA,InPolicyB,InPolicyC>,
348 T* pointerFlagSet(T* p) {
349 return reinterpret_cast<T*>(reinterpret_cast<uintptr_t>(p) | 1);
352 bool pointerFlagGet(T* p) {
353 return reinterpret_cast<uintptr_t>(p) & 1;
356 T* pointerFlagClear(T* p) {
357 return reinterpret_cast<T*>(
358 reinterpret_cast<uintptr_t>(p) & ~uintptr_t(1));
360 inline void* shiftPointer(void* p, size_t sizeBytes) {
361 return static_cast<char*>(p) + sizeBytes;
365 //////////////////////////////////////////////////////////////////////
367 template<class Value,
368 std::size_t RequestedMaxInline = 1,
369 class PolicyA = void,
370 class PolicyB = void,
371 class PolicyC = void>
373 : public detail::small_vector_base<
374 Value,RequestedMaxInline,PolicyA,PolicyB,PolicyC
377 typedef typename detail::small_vector_base<
378 Value,RequestedMaxInline,PolicyA,PolicyB,PolicyC
380 typedef typename BaseType::InternalSizeType InternalSizeType;
383 * Figure out the max number of elements we should inline. (If
384 * the user asks for less inlined elements than we can fit unioned
385 * into our value_type*, we will inline more than they asked.)
387 static constexpr std::size_t MaxInline{
388 constexpr_max(sizeof(Value*) / sizeof(Value), RequestedMaxInline)};
391 typedef std::size_t size_type;
392 typedef Value value_type;
393 typedef value_type& reference;
394 typedef value_type const& const_reference;
395 typedef value_type* iterator;
396 typedef value_type const* const_iterator;
397 typedef std::ptrdiff_t difference_type;
399 typedef std::reverse_iterator<iterator> reverse_iterator;
400 typedef std::reverse_iterator<const_iterator> const_reverse_iterator;
402 explicit small_vector() = default;
404 small_vector(small_vector const& o) {
408 std::uninitialized_copy(o.begin(), o.end(), begin());
410 if (this->isExtern()) {
418 small_vector(small_vector&& o)
419 noexcept(std::is_nothrow_move_constructible<Value>::value) {
423 std::uninitialized_copy(std::make_move_iterator(o.begin()),
424 std::make_move_iterator(o.end()),
426 this->setSize(o.size());
430 small_vector(std::initializer_list<value_type> il) {
431 constructImpl(il.begin(), il.end(), std::false_type());
434 explicit small_vector(size_type n) {
435 doConstruct(n, [&](void* p) { new (p) value_type(); });
438 small_vector(size_type n, value_type const& t) {
439 doConstruct(n, [&](void* p) { new (p) value_type(t); });
443 explicit small_vector(Arg arg1, Arg arg2) {
444 // Forward using std::is_arithmetic to get to the proper
445 // implementation; this disambiguates between the iterators and
446 // (size_t, value_type) meaning for this constructor.
447 constructImpl(arg1, arg2, std::is_arithmetic<Arg>());
451 for (auto& t : *this) {
454 if (this->isExtern()) {
459 small_vector& operator=(small_vector const& o) {
460 assign(o.begin(), o.end());
464 small_vector& operator=(small_vector&& o) {
465 // TODO: optimization:
466 // if both are internal, use move assignment where possible
467 if (this == &o) return *this;
473 bool operator==(small_vector const& o) const {
474 return size() == o.size() && std::equal(begin(), end(), o.begin());
477 bool operator<(small_vector const& o) const {
478 return std::lexicographical_compare(begin(), end(), o.begin(), o.end());
481 static constexpr size_type max_size() {
482 return !BaseType::kShouldUseHeap ? static_cast<size_type>(MaxInline)
483 : BaseType::policyMaxSize();
486 size_type size() const { return this->doSize(); }
487 bool empty() const { return !size(); }
489 iterator begin() { return data(); }
490 iterator end() { return data() + size(); }
491 const_iterator begin() const { return data(); }
492 const_iterator end() const { return data() + size(); }
493 const_iterator cbegin() const { return begin(); }
494 const_iterator cend() const { return end(); }
496 reverse_iterator rbegin() { return reverse_iterator(end()); }
497 reverse_iterator rend() { return reverse_iterator(begin()); }
499 const_reverse_iterator rbegin() const {
500 return const_reverse_iterator(end());
503 const_reverse_iterator rend() const {
504 return const_reverse_iterator(begin());
507 const_reverse_iterator crbegin() const { return rbegin(); }
508 const_reverse_iterator crend() const { return rend(); }
511 * Usually one of the simplest functions in a Container-like class
512 * but a bit more complex here. We have to handle all combinations
513 * of in-place vs. heap between this and o.
515 * Basic guarantee only. Provides the nothrow guarantee iff our
516 * value_type has a nothrow move or copy constructor.
518 void swap(small_vector& o) {
519 using std::swap; // Allow ADL on swap for our value_type.
521 if (this->isExtern() && o.isExtern()) {
522 this->swapSizePolicy(o);
524 auto thisCapacity = this->capacity();
525 auto oCapacity = o.capacity();
527 auto* tmp = u.pdata_.heap_;
528 u.pdata_.heap_ = o.u.pdata_.heap_;
529 o.u.pdata_.heap_ = tmp;
531 this->setCapacity(oCapacity);
532 o.setCapacity(thisCapacity);
537 if (!this->isExtern() && !o.isExtern()) {
538 auto& oldSmall = size() < o.size() ? *this : o;
539 auto& oldLarge = size() < o.size() ? o : *this;
541 for (size_type i = 0; i < oldSmall.size(); ++i) {
542 swap(oldSmall[i], oldLarge[i]);
545 size_type i = oldSmall.size();
546 const size_type ci = i;
548 for (; i < oldLarge.size(); ++i) {
549 auto addr = oldSmall.begin() + i;
550 new (addr) value_type(std::move(oldLarge[i]));
551 oldLarge[i].~value_type();
555 for (; i < oldLarge.size(); ++i) {
556 oldLarge[i].~value_type();
558 oldLarge.setSize(ci);
562 oldLarge.setSize(ci);
566 // isExtern != o.isExtern()
567 auto& oldExtern = o.isExtern() ? o : *this;
568 auto& oldIntern = o.isExtern() ? *this : o;
570 auto oldExternCapacity = oldExtern.capacity();
571 auto oldExternHeap = oldExtern.u.pdata_.heap_;
573 auto buff = oldExtern.u.buffer();
576 for (; i < oldIntern.size(); ++i) {
577 new (&buff[i]) value_type(std::move(oldIntern[i]));
578 oldIntern[i].~value_type();
581 for (size_type kill = 0; kill < i; ++kill) {
582 buff[kill].~value_type();
584 for (; i < oldIntern.size(); ++i) {
585 oldIntern[i].~value_type();
587 oldIntern.setSize(0);
588 oldExtern.u.pdata_.heap_ = oldExternHeap;
589 oldExtern.setCapacity(oldExternCapacity);
592 oldIntern.u.pdata_.heap_ = oldExternHeap;
593 this->swapSizePolicy(o);
594 oldIntern.setCapacity(oldExternCapacity);
597 void resize(size_type sz) {
599 erase(begin() + sz, end());
603 detail::populateMemForward(begin() + size(), sz - size(),
604 [&] (void* p) { new (p) value_type(); }
609 void resize(size_type sz, value_type const& v) {
611 erase(begin() + sz, end());
615 detail::populateMemForward(begin() + size(), sz - size(),
616 [&] (void* p) { new (p) value_type(v); }
621 value_type* data() noexcept {
622 return this->isExtern() ? u.heap() : u.buffer();
625 value_type const* data() const noexcept {
626 return this->isExtern() ? u.heap() : u.buffer();
629 template<class ...Args>
630 iterator emplace(const_iterator p, Args&&... args) {
632 emplace_back(std::forward<Args>(args)...);
637 * We implement emplace at places other than at the back with a
638 * temporary for exception safety reasons. It is possible to
639 * avoid having to do this, but it becomes hard to maintain the
640 * basic exception safety guarantee (unless you respond to a copy
641 * constructor throwing by clearing the whole vector).
643 * The reason for this is that otherwise you have to destruct an
644 * element before constructing this one in its place---if the
645 * constructor throws, you either need a nothrow default
646 * constructor or a nothrow copy/move to get something back in the
647 * "gap", and the vector requirements don't guarantee we have any
648 * of these. Clearing the whole vector is a legal response in
649 * this situation, but it seems like this implementation is easy
650 * enough and probably better.
652 return insert(p, value_type(std::forward<Args>(args)...));
655 void reserve(size_type sz) {
659 size_type capacity() const {
660 if (this->isExtern()) {
661 if (u.hasCapacity()) {
662 return u.getCapacity();
664 return malloc_usable_size(u.pdata_.heap_) / sizeof(value_type);
669 void shrink_to_fit() {
670 if (!this->isExtern()) {
674 small_vector tmp(begin(), end());
678 template <class... Args>
679 void emplace_back(Args&&... args) {
680 if (capacity() == size()) {
681 // Any of args may be references into the vector.
682 // When we are reallocating, we have to be careful to construct the new
683 // element before modifying the data in the old buffer.
686 [&](void* p) { new (p) value_type(std::forward<Args>(args)...); },
689 new (end()) value_type(std::forward<Args>(args)...);
691 this->setSize(size() + 1);
694 void push_back(value_type&& t) {
695 return emplace_back(std::move(t));
698 void push_back(value_type const& t) {
706 iterator insert(const_iterator constp, value_type&& t) {
707 iterator p = unconst(constp);
710 push_back(std::move(t));
714 auto offset = p - begin();
716 if (capacity() == size()) {
719 [&t](void* ptr) { new (ptr) value_type(std::move(t)); },
721 this->setSize(this->size() + 1);
723 detail::moveObjectsRight(data() + offset,
725 data() + size() + 1);
726 this->setSize(size() + 1);
727 data()[offset] = std::move(t);
729 return begin() + offset;
733 iterator insert(const_iterator p, value_type const& t) {
734 // Make a copy and forward to the rvalue value_type&& overload
736 return insert(p, value_type(t));
739 iterator insert(const_iterator pos, size_type n, value_type const& val) {
740 auto offset = pos - begin();
741 makeSize(size() + n);
742 detail::moveObjectsRight(data() + offset,
744 data() + size() + n);
745 this->setSize(size() + n);
746 std::generate_n(begin() + offset, n, [&] { return val; });
747 return begin() + offset;
751 iterator insert(const_iterator p, Arg arg1, Arg arg2) {
752 // Forward using std::is_arithmetic to get to the proper
753 // implementation; this disambiguates between the iterators and
754 // (size_t, value_type) meaning for this function.
755 return insertImpl(unconst(p), arg1, arg2, std::is_arithmetic<Arg>());
758 iterator insert(const_iterator p, std::initializer_list<value_type> il) {
759 return insert(p, il.begin(), il.end());
762 iterator erase(const_iterator q) {
763 std::move(unconst(q) + 1, end(), unconst(q));
764 (data() + size() - 1)->~value_type();
765 this->setSize(size() - 1);
769 iterator erase(const_iterator q1, const_iterator q2) {
770 if (q1 == q2) return unconst(q1);
771 std::move(unconst(q2), end(), unconst(q1));
772 for (auto it = (end() - std::distance(q1, q2)); it != end(); ++it) {
775 this->setSize(size() - (q2 - q1));
780 erase(begin(), end());
784 void assign(Arg first, Arg last) {
786 insert(end(), first, last);
789 void assign(std::initializer_list<value_type> il) {
790 assign(il.begin(), il.end());
793 void assign(size_type n, const value_type& t) {
798 reference front() { assert(!empty()); return *begin(); }
799 reference back() { assert(!empty()); return *(end() - 1); }
800 const_reference front() const { assert(!empty()); return *begin(); }
801 const_reference back() const { assert(!empty()); return *(end() - 1); }
803 reference operator[](size_type i) {
805 return *(begin() + i);
808 const_reference operator[](size_type i) const {
810 return *(begin() + i);
813 reference at(size_type i) {
815 std::__throw_out_of_range("index out of range");
820 const_reference at(size_type i) const {
822 std::__throw_out_of_range("index out of range");
829 static iterator unconst(const_iterator it) {
830 return const_cast<iterator>(it);
833 // The std::false_type argument is part of disambiguating the
834 // iterator insert functions from integral types (see insert().)
836 iterator insertImpl(iterator pos, It first, It last, std::false_type) {
837 typedef typename std::iterator_traits<It>::iterator_category categ;
838 if (std::is_same<categ,std::input_iterator_tag>::value) {
839 auto offset = pos - begin();
840 while (first != last) {
841 pos = insert(pos, *first++);
844 return begin() + offset;
847 auto distance = std::distance(first, last);
848 auto offset = pos - begin();
849 makeSize(size() + distance);
850 detail::moveObjectsRight(data() + offset,
852 data() + size() + distance);
853 this->setSize(size() + distance);
854 std::copy_n(first, distance, begin() + offset);
855 return begin() + offset;
858 iterator insertImpl(iterator pos, size_type n, const value_type& val,
860 // The true_type means this should call the size_t,value_type
861 // overload. (See insert().)
862 return insert(pos, n, val);
865 // The std::false_type argument came from std::is_arithmetic as part
866 // of disambiguating an overload (see the comment in the
869 void constructImpl(It first, It last, std::false_type) {
870 typedef typename std::iterator_traits<It>::iterator_category categ;
871 if (std::is_same<categ,std::input_iterator_tag>::value) {
872 // With iterators that only allow a single pass, we can't really
873 // do anything sane here.
874 while (first != last) {
875 emplace_back(*first++);
880 auto distance = std::distance(first, last);
882 this->setSize(distance);
884 detail::populateMemForward(data(), distance,
885 [&] (void* p) { new (p) value_type(*first++); }
888 if (this->isExtern()) {
895 template <typename InitFunc>
896 void doConstruct(size_type n, InitFunc&& func) {
900 detail::populateMemForward(data(), n, std::forward<InitFunc>(func));
902 if (this->isExtern()) {
909 // The true_type means we should forward to the size_t,value_type
911 void constructImpl(size_type n, value_type const& val, std::true_type) {
912 doConstruct(n, [&](void* p) { new (p) value_type(val); });
916 * Compute the size after growth.
918 size_type computeNewSize() const {
919 return std::min((3 * capacity()) / 2 + 1, max_size());
922 void makeSize(size_type newSize) {
923 makeSizeInternal(newSize, false, [](void*) { assume_unreachable(); }, 0);
926 template <typename EmplaceFunc>
927 void makeSize(size_type newSize, EmplaceFunc&& emplaceFunc, size_type pos) {
928 assert(size() == capacity());
930 newSize, true, std::forward<EmplaceFunc>(emplaceFunc), pos);
934 * Ensure we have a large enough memory region to be size `newSize'.
935 * Will move/copy elements if we are spilling to heap_ or needed to
936 * allocate a new region, but if resized in place doesn't initialize
937 * anything in the new region. In any case doesn't change size().
938 * Supports insertion of new element during reallocation by given
939 * pointer to new element and position of new element.
940 * NOTE: If reallocation is not needed, insert must be false,
941 * because we only know how to emplace elements into new memory.
943 template <typename EmplaceFunc>
944 void makeSizeInternal(
947 EmplaceFunc&& emplaceFunc,
949 if (newSize > max_size()) {
950 throw std::length_error("max_size exceeded in small_vector");
952 if (newSize <= capacity()) {
956 newSize = std::max(newSize, computeNewSize());
958 auto needBytes = newSize * sizeof(value_type);
959 // If the capacity isn't explicitly stored inline, but the heap
960 // allocation is grown to over some threshold, we should store
961 // a capacity at the front of the heap allocation.
962 bool heapifyCapacity =
963 !kHasInlineCapacity && needBytes > kHeapifyCapacityThreshold;
964 if (heapifyCapacity) {
965 needBytes += kHeapifyCapacitySize;
967 auto const sizeBytes = goodMallocSize(needBytes);
968 void* newh = checkedMalloc(sizeBytes);
969 // We expect newh to be at least 2-aligned, because we want to
970 // use its least significant bit as a flag.
971 assert(!detail::pointerFlagGet(newh));
973 value_type* newp = static_cast<value_type*>(
975 detail::shiftPointer(newh, kHeapifyCapacitySize) :
980 // move and insert the new element
981 detail::moveToUninitializedEmplace(
982 begin(), end(), newp, pos, std::forward<EmplaceFunc>(emplaceFunc));
984 // move without inserting new element
985 detail::moveToUninitialized(begin(), end(), newp);
991 for (auto& val : *this) {
995 if (this->isExtern()) {
998 auto availableSizeBytes = sizeBytes;
999 if (heapifyCapacity) {
1000 u.pdata_.heap_ = detail::pointerFlagSet(newh);
1001 availableSizeBytes -= kHeapifyCapacitySize;
1003 u.pdata_.heap_ = newh;
1005 this->setExtern(true);
1006 this->setCapacity(availableSizeBytes / sizeof(value_type));
1010 * This will set the capacity field, stored inline in the storage_ field
1011 * if there is sufficient room to store it.
1013 void setCapacity(size_type newCapacity) {
1014 assert(this->isExtern());
1015 if (u.hasCapacity()) {
1016 assert(newCapacity < std::numeric_limits<InternalSizeType>::max());
1017 u.setCapacity(newCapacity);
1022 struct HeapPtrWithCapacity {
1024 InternalSizeType capacity_;
1026 InternalSizeType getCapacity() const {
1029 void setCapacity(InternalSizeType c) {
1035 // Lower order bit of heap_ is used as flag to indicate whether capacity is
1036 // stored at the front of the heap allocation.
1039 InternalSizeType getCapacity() const {
1040 assert(detail::pointerFlagGet(heap_));
1041 return *static_cast<InternalSizeType*>(detail::pointerFlagClear(heap_));
1043 void setCapacity(InternalSizeType c) {
1044 *static_cast<InternalSizeType*>(detail::pointerFlagClear(heap_)) = c;
1048 #if (FOLLY_X64 || FOLLY_PPC64)
1049 typedef unsigned char InlineStorageDataType[sizeof(value_type) * MaxInline];
1051 typedef typename std::aligned_storage<
1052 sizeof(value_type) * MaxInline,
1054 >::type InlineStorageDataType;
1057 typedef typename std::conditional<
1058 sizeof(value_type) * MaxInline != 0,
1059 InlineStorageDataType,
1061 >::type InlineStorageType;
1063 static bool const kHasInlineCapacity =
1064 sizeof(HeapPtrWithCapacity) < sizeof(InlineStorageType);
1066 // This value should we multiple of word size.
1067 static size_t const kHeapifyCapacitySize = sizeof(
1068 typename std::aligned_storage<
1069 sizeof(InternalSizeType),
1072 // Threshold to control capacity heapifying.
1073 static size_t const kHeapifyCapacityThreshold =
1074 100 * kHeapifyCapacitySize;
1076 typedef typename std::conditional<
1078 HeapPtrWithCapacity,
1080 >::type PointerType;
1083 explicit Data() { pdata_.heap_ = 0; }
1086 InlineStorageType storage_;
1088 value_type* buffer() noexcept {
1089 void* vp = &storage_;
1090 return static_cast<value_type*>(vp);
1092 value_type const* buffer() const noexcept {
1093 return const_cast<Data*>(this)->buffer();
1095 value_type* heap() noexcept {
1096 if (kHasInlineCapacity || !detail::pointerFlagGet(pdata_.heap_)) {
1097 return static_cast<value_type*>(pdata_.heap_);
1099 return static_cast<value_type*>(
1100 detail::shiftPointer(
1101 detail::pointerFlagClear(pdata_.heap_), kHeapifyCapacitySize));
1103 value_type const* heap() const noexcept {
1104 return const_cast<Data*>(this)->heap();
1107 bool hasCapacity() const {
1108 return kHasInlineCapacity || detail::pointerFlagGet(pdata_.heap_);
1110 InternalSizeType getCapacity() const {
1111 return pdata_.getCapacity();
1113 void setCapacity(InternalSizeType c) {
1114 pdata_.setCapacity(c);
1118 auto vp = detail::pointerFlagClear(pdata_.heap_);
1121 } FOLLY_PACK_ATTR u;
1125 //////////////////////////////////////////////////////////////////////
1127 // Basic guarantee only, or provides the nothrow guarantee iff T has a
1128 // nothrow move or copy constructor.
1129 template<class T, std::size_t MaxInline, class A, class B, class C>
1130 void swap(small_vector<T,MaxInline,A,B,C>& a,
1131 small_vector<T,MaxInline,A,B,C>& b) {
1135 //////////////////////////////////////////////////////////////////////
1140 template <class T, size_t M, class A, class B, class C>
1141 struct IndexableTraits<small_vector<T, M, A, B, C>>
1142 : public IndexableTraitsSeq<small_vector<T, M, A, B, C>> {
1145 } // namespace detail
1147 } // namespace folly
1149 #pragma GCC diagnostic pop