2 * Copyright 2016 Facebook, Inc.
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
8 * http://www.apache.org/licenses/LICENSE-2.0
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
18 * For high-level documentation and usage examples see
19 * folly/docs/small_vector.md
21 * @author Jordan DeLong <delong.j@fb.com>
23 #ifndef FOLLY_SMALL_VECTOR_H_
24 #define FOLLY_SMALL_VECTOR_H_
28 #include <type_traits>
33 #include <boost/operators.hpp>
34 #include <boost/type_traits.hpp>
35 #include <boost/mpl/if.hpp>
36 #include <boost/mpl/eval_if.hpp>
37 #include <boost/mpl/vector.hpp>
38 #include <boost/mpl/front.hpp>
39 #include <boost/mpl/filter_view.hpp>
40 #include <boost/mpl/identity.hpp>
41 #include <boost/mpl/placeholders.hpp>
42 #include <boost/mpl/empty.hpp>
43 #include <boost/mpl/size.hpp>
44 #include <boost/mpl/count.hpp>
45 #include <boost/mpl/max.hpp>
47 #include <folly/FormatTraits.h>
48 #include <folly/Malloc.h>
49 #include <folly/Portability.h>
51 #if defined(__GNUC__) && (FOLLY_X64 || FOLLY_PPC64)
52 # include <folly/SmallLocks.h>
53 # define FB_PACK_ATTR FOLLY_PACK_ATTR
54 # define FB_PACK_PUSH FOLLY_PACK_PUSH
55 # define FB_PACK_POP FOLLY_PACK_POP
62 #if FOLLY_HAVE_MALLOC_SIZE
63 extern "C" std::size_t malloc_size(const void*);
64 # if !FOLLY_HAVE_MALLOC_USABLE_SIZE
65 # define malloc_usable_size malloc_size
67 # ifndef malloc_usable_size
68 # define malloc_usable_size malloc_size
72 // Ignore shadowing warnings within this file, so includers can use -Wshadow.
73 #pragma GCC diagnostic push
74 #pragma GCC diagnostic ignored "-Wshadow"
78 //////////////////////////////////////////////////////////////////////
80 namespace small_vector_policy {
82 //////////////////////////////////////////////////////////////////////
85 * A flag which makes us refuse to use the heap at all. If we
86 * overflow the in situ capacity we throw an exception.
90 //////////////////////////////////////////////////////////////////////
92 } // small_vector_policy
94 //////////////////////////////////////////////////////////////////////
96 template<class T, std::size_t M, class A, class B, class C>
99 //////////////////////////////////////////////////////////////////////
104 * Move a range to a range of uninitialized memory. Assumes the
105 * ranges don't overlap.
108 typename std::enable_if<
109 !FOLLY_IS_TRIVIALLY_COPYABLE(T)
111 moveToUninitialized(T* first, T* last, T* out) {
114 for (; first != last; ++first, ++idx) {
115 new (&out[idx]) T(std::move(*first));
118 // Even for callers trying to give the strong guarantee
119 // (e.g. push_back) it's ok to assume here that we don't have to
120 // move things back and that it was a copy constructor that
121 // threw: if someone throws from a move constructor the effects
123 for (std::size_t i = 0; i < idx; ++i) {
130 // Specialization for trivially copyable types.
132 typename std::enable_if<
133 FOLLY_IS_TRIVIALLY_COPYABLE(T)
135 moveToUninitialized(T* first, T* last, T* out) {
136 std::memmove(out, first, (last - first) * sizeof *first);
140 * Move objects in memory to the right into some uninitialized
141 * memory, where the region overlaps. This doesn't just use
142 * std::move_backward because move_backward only works if all the
143 * memory is initialized to type T already.
146 typename std::enable_if<
147 !FOLLY_IS_TRIVIALLY_COPYABLE(T)
149 moveObjectsRight(T* first, T* lastConstructed, T* realLast) {
150 if (lastConstructed == realLast) {
154 T* end = first - 1; // Past the end going backwards.
155 T* out = realLast - 1;
156 T* in = lastConstructed - 1;
158 for (; in != end && out >= lastConstructed; --in, --out) {
159 new (out) T(std::move(*in));
161 for (; in != end; --in, --out) {
162 *out = std::move(*in);
164 for (; out >= lastConstructed; --out) {
168 // We want to make sure the same stuff is uninitialized memory
169 // if we exit via an exception (this is to make sure we provide
170 // the basic exception safety guarantee for insert functions).
171 if (out < lastConstructed) {
172 out = lastConstructed - 1;
174 for (auto it = out + 1; it != realLast; ++it) {
181 // Specialization for trivially copyable types. The call to
182 // std::move_backward here will just turn into a memmove. (TODO:
183 // change to std::is_trivially_copyable when that works.)
185 typename std::enable_if<
186 FOLLY_IS_TRIVIALLY_COPYABLE(T)
188 moveObjectsRight(T* first, T* lastConstructed, T* realLast) {
189 std::move_backward(first, lastConstructed, realLast);
193 * Populate a region of memory using `op' to construct elements. If
194 * anything throws, undo what we did.
196 template<class T, class Function>
197 void populateMemForward(T* mem, std::size_t n, Function const& op) {
200 for (size_t i = 0; i < n; ++i) {
205 for (std::size_t i = 0; i < idx; ++i) {
212 template<class SizeType, bool ShouldUseHeap>
213 struct IntegralSizePolicy {
214 typedef SizeType InternalSizeType;
216 IntegralSizePolicy() : size_(0) {}
219 static constexpr std::size_t policyMaxSize() {
220 return SizeType(~kExternMask);
223 std::size_t doSize() const {
224 return size_ & ~kExternMask;
227 std::size_t isExtern() const {
228 return kExternMask & size_;
231 void setExtern(bool b) {
233 size_ |= kExternMask;
235 size_ &= ~kExternMask;
239 void setSize(std::size_t sz) {
240 assert(sz <= policyMaxSize());
241 size_ = (kExternMask & size_) | SizeType(sz);
244 void swapSizePolicy(IntegralSizePolicy& o) {
245 std::swap(size_, o.size_);
249 static bool const kShouldUseHeap = ShouldUseHeap;
252 static SizeType const kExternMask =
253 kShouldUseHeap ? SizeType(1) << (sizeof(SizeType) * 8 - 1)
260 * If you're just trying to use this class, ignore everything about
261 * this next small_vector_base class thing.
263 * The purpose of this junk is to minimize sizeof(small_vector<>)
264 * and allow specifying the template parameters in whatever order is
265 * convenient for the user. There's a few extra steps here to try
266 * to keep the error messages at least semi-reasonable.
268 * Apologies for all the black magic.
270 namespace mpl = boost::mpl;
271 template<class Value,
272 std::size_t RequestedMaxInline,
276 struct small_vector_base {
277 typedef mpl::vector<InPolicyA,InPolicyB,InPolicyC> PolicyList;
280 * Determine the size type
282 typedef typename mpl::filter_view<
284 boost::is_integral<mpl::placeholders::_1>
286 typedef typename mpl::eval_if<
287 mpl::empty<Integrals>,
288 mpl::identity<std::size_t>,
289 mpl::front<Integrals>
292 static_assert(std::is_unsigned<SizeType>::value,
293 "Size type should be an unsigned integral type");
294 static_assert(mpl::size<Integrals>::value == 0 ||
295 mpl::size<Integrals>::value == 1,
296 "Multiple size types specified in small_vector<>");
299 * Determine whether we should allow spilling to the heap or not.
301 typedef typename mpl::count<
302 PolicyList,small_vector_policy::NoHeap
305 static_assert(HasNoHeap::value == 0 || HasNoHeap::value == 1,
306 "Multiple copies of small_vector_policy::NoHeap "
307 "supplied; this is probably a mistake");
310 * Make the real policy base classes.
312 typedef IntegralSizePolicy<SizeType,!HasNoHeap::value>
316 * Now inherit from them all. This is done in such a convoluted
317 * way to make sure we get the empty base optimizaton on all these
318 * types to keep sizeof(small_vector<>) minimal.
320 typedef boost::totally_ordered1<
321 small_vector<Value,RequestedMaxInline,InPolicyA,InPolicyB,InPolicyC>,
327 T* pointerFlagSet(T* p) {
328 return reinterpret_cast<T*>(reinterpret_cast<uintptr_t>(p) | 1);
331 bool pointerFlagGet(T* p) {
332 return reinterpret_cast<uintptr_t>(p) & 1;
335 T* pointerFlagClear(T* p) {
336 return reinterpret_cast<T*>(
337 reinterpret_cast<uintptr_t>(p) & ~uintptr_t(1));
339 inline void* shiftPointer(void* p, size_t sizeBytes) {
340 return static_cast<char*>(p) + sizeBytes;
344 //////////////////////////////////////////////////////////////////////
346 template<class Value,
347 std::size_t RequestedMaxInline = 1,
348 class PolicyA = void,
349 class PolicyB = void,
350 class PolicyC = void>
352 : public detail::small_vector_base<
353 Value,RequestedMaxInline,PolicyA,PolicyB,PolicyC
356 typedef typename detail::small_vector_base<
357 Value,RequestedMaxInline,PolicyA,PolicyB,PolicyC
359 typedef typename BaseType::InternalSizeType InternalSizeType;
362 * Figure out the max number of elements we should inline. (If
363 * the user asks for less inlined elements than we can fit unioned
364 * into our value_type*, we will inline more than they asked.)
367 MaxInline = boost::mpl::max<
368 boost::mpl::int_<sizeof(Value*) / sizeof(Value)>,
369 boost::mpl::int_<RequestedMaxInline>
374 typedef std::size_t size_type;
375 typedef Value value_type;
376 typedef value_type& reference;
377 typedef value_type const& const_reference;
378 typedef value_type* iterator;
379 typedef value_type const* const_iterator;
380 typedef std::ptrdiff_t difference_type;
382 typedef std::reverse_iterator<iterator> reverse_iterator;
383 typedef std::reverse_iterator<const_iterator> const_reverse_iterator;
385 explicit small_vector() = default;
387 small_vector(small_vector const& o) {
391 std::uninitialized_copy(o.begin(), o.end(), begin());
393 if (this->isExtern()) {
401 small_vector(small_vector&& o)
402 noexcept(std::is_nothrow_move_constructible<Value>::value) {
406 std::uninitialized_copy(std::make_move_iterator(o.begin()),
407 std::make_move_iterator(o.end()),
409 this->setSize(o.size());
413 small_vector(std::initializer_list<value_type> il) {
414 constructImpl(il.begin(), il.end(), std::false_type());
417 explicit small_vector(size_type n, value_type const& t = value_type()) {
422 explicit small_vector(Arg arg1, Arg arg2) {
423 // Forward using std::is_arithmetic to get to the proper
424 // implementation; this disambiguates between the iterators and
425 // (size_t, value_type) meaning for this constructor.
426 constructImpl(arg1, arg2, std::is_arithmetic<Arg>());
430 for (auto& t : *this) {
433 if (this->isExtern()) {
438 small_vector& operator=(small_vector const& o) {
439 assign(o.begin(), o.end());
443 small_vector& operator=(small_vector&& o) {
444 // TODO: optimization:
445 // if both are internal, use move assignment where possible
446 if (this == &o) return *this;
452 bool operator==(small_vector const& o) const {
453 return size() == o.size() && std::equal(begin(), end(), o.begin());
456 bool operator<(small_vector const& o) const {
457 return std::lexicographical_compare(begin(), end(), o.begin(), o.end());
460 static constexpr size_type max_size() {
461 return !BaseType::kShouldUseHeap ? MaxInline
462 : BaseType::policyMaxSize();
465 size_type size() const { return this->doSize(); }
466 bool empty() const { return !size(); }
468 iterator begin() { return data(); }
469 iterator end() { return data() + size(); }
470 const_iterator begin() const { return data(); }
471 const_iterator end() const { return data() + size(); }
472 const_iterator cbegin() const { return begin(); }
473 const_iterator cend() const { return end(); }
475 reverse_iterator rbegin() { return reverse_iterator(end()); }
476 reverse_iterator rend() { return reverse_iterator(begin()); }
478 const_reverse_iterator rbegin() const {
479 return const_reverse_iterator(end());
482 const_reverse_iterator rend() const {
483 return const_reverse_iterator(begin());
486 const_reverse_iterator crbegin() const { return rbegin(); }
487 const_reverse_iterator crend() const { return rend(); }
490 * Usually one of the simplest functions in a Container-like class
491 * but a bit more complex here. We have to handle all combinations
492 * of in-place vs. heap between this and o.
494 * Basic guarantee only. Provides the nothrow guarantee iff our
495 * value_type has a nothrow move or copy constructor.
497 void swap(small_vector& o) {
498 using std::swap; // Allow ADL on swap for our value_type.
500 if (this->isExtern() && o.isExtern()) {
501 this->swapSizePolicy(o);
503 auto thisCapacity = this->capacity();
504 auto oCapacity = o.capacity();
506 std::swap(unpackHack(&u.pdata_.heap_), unpackHack(&o.u.pdata_.heap_));
508 this->setCapacity(oCapacity);
509 o.setCapacity(thisCapacity);
514 if (!this->isExtern() && !o.isExtern()) {
515 auto& oldSmall = size() < o.size() ? *this : o;
516 auto& oldLarge = size() < o.size() ? o : *this;
518 for (size_type i = 0; i < oldSmall.size(); ++i) {
519 swap(oldSmall[i], oldLarge[i]);
522 size_type i = oldSmall.size();
523 const size_type ci = i;
525 for (; i < oldLarge.size(); ++i) {
526 auto addr = oldSmall.begin() + i;
527 new (addr) value_type(std::move(oldLarge[i]));
528 oldLarge[i].~value_type();
532 for (; i < oldLarge.size(); ++i) {
533 oldLarge[i].~value_type();
535 oldLarge.setSize(ci);
539 oldLarge.setSize(ci);
543 // isExtern != o.isExtern()
544 auto& oldExtern = o.isExtern() ? o : *this;
545 auto& oldIntern = o.isExtern() ? *this : o;
547 auto oldExternCapacity = oldExtern.capacity();
548 auto oldExternHeap = oldExtern.u.pdata_.heap_;
550 auto buff = oldExtern.u.buffer();
553 for (; i < oldIntern.size(); ++i) {
554 new (&buff[i]) value_type(std::move(oldIntern[i]));
555 oldIntern[i].~value_type();
558 for (size_type kill = 0; kill < i; ++kill) {
559 buff[kill].~value_type();
561 for (; i < oldIntern.size(); ++i) {
562 oldIntern[i].~value_type();
564 oldIntern.setSize(0);
565 oldExtern.u.pdata_.heap_ = oldExternHeap;
566 oldExtern.setCapacity(oldExternCapacity);
569 oldIntern.u.pdata_.heap_ = oldExternHeap;
570 this->swapSizePolicy(o);
571 oldIntern.setCapacity(oldExternCapacity);
574 void resize(size_type sz) {
576 erase(begin() + sz, end());
580 detail::populateMemForward(begin() + size(), sz - size(),
581 [&] (void* p) { new (p) value_type(); }
586 void resize(size_type sz, value_type const& v) {
588 erase(begin() + sz, end());
592 detail::populateMemForward(begin() + size(), sz - size(),
593 [&] (void* p) { new (p) value_type(v); }
598 value_type* data() noexcept {
599 return this->isExtern() ? u.heap() : u.buffer();
602 value_type const* data() const noexcept {
603 return this->isExtern() ? u.heap() : u.buffer();
606 template<class ...Args>
607 iterator emplace(const_iterator p, Args&&... args) {
609 emplace_back(std::forward<Args>(args)...);
614 * We implement emplace at places other than at the back with a
615 * temporary for exception safety reasons. It is possible to
616 * avoid having to do this, but it becomes hard to maintain the
617 * basic exception safety guarantee (unless you respond to a copy
618 * constructor throwing by clearing the whole vector).
620 * The reason for this is that otherwise you have to destruct an
621 * element before constructing this one in its place---if the
622 * constructor throws, you either need a nothrow default
623 * constructor or a nothrow copy/move to get something back in the
624 * "gap", and the vector requirements don't guarantee we have any
625 * of these. Clearing the whole vector is a legal response in
626 * this situation, but it seems like this implementation is easy
627 * enough and probably better.
629 return insert(p, value_type(std::forward<Args>(args)...));
632 void reserve(size_type sz) {
636 size_type capacity() const {
637 if (this->isExtern()) {
638 if (u.hasCapacity()) {
639 return *u.getCapacity();
641 return malloc_usable_size(u.pdata_.heap_) / sizeof(value_type);
646 void shrink_to_fit() {
647 if (!this->isExtern()) {
651 small_vector tmp(begin(), end());
655 template<class ...Args>
656 void emplace_back(Args&&... args) {
657 // call helper function for static dispatch of special cases
658 emplaceBack(std::forward<Args>(args)...);
661 void emplace_back(const value_type& t) {
664 void emplace_back(value_type& t) {
668 void emplace_back(value_type&& t) {
669 push_back(std::move(t));
672 void push_back(value_type&& t) {
673 if (capacity() == size()) {
674 makeSize(std::max(size_type(2), 3 * size() / 2), &t, size());
676 new (end()) value_type(std::move(t));
678 this->setSize(size() + 1);
681 void push_back(value_type const& t) {
682 // TODO: we'd like to make use of makeSize (it can be optimized better,
683 // because it manipulates the internals)
684 // unfortunately the current implementation only supports moving from
685 // a supplied rvalue, and doing an extra move just to reuse it is a perf
687 if (size() == capacity()) {// && isInside(&t)) {
689 emplaceBack(std::move(tmp));
699 iterator insert(const_iterator constp, value_type&& t) {
700 iterator p = unconst(constp);
703 push_back(std::move(t));
707 auto offset = p - begin();
709 if (capacity() == size()) {
710 makeSize(size() + 1, &t, offset);
711 this->setSize(this->size() + 1);
713 makeSize(size() + 1);
714 detail::moveObjectsRight(data() + offset,
716 data() + size() + 1);
717 this->setSize(size() + 1);
718 data()[offset] = std::move(t);
720 return begin() + offset;
724 iterator insert(const_iterator p, value_type const& t) {
725 // Make a copy and forward to the rvalue value_type&& overload
727 return insert(p, value_type(t));
730 iterator insert(const_iterator pos, size_type n, value_type const& val) {
731 auto offset = pos - begin();
732 makeSize(size() + n);
733 detail::moveObjectsRight(data() + offset,
735 data() + size() + n);
736 this->setSize(size() + n);
737 std::generate_n(begin() + offset, n, [&] { return val; });
738 return begin() + offset;
742 iterator insert(const_iterator p, Arg arg1, Arg arg2) {
743 // Forward using std::is_arithmetic to get to the proper
744 // implementation; this disambiguates between the iterators and
745 // (size_t, value_type) meaning for this function.
746 return insertImpl(unconst(p), arg1, arg2, std::is_arithmetic<Arg>());
749 iterator insert(const_iterator p, std::initializer_list<value_type> il) {
750 return insert(p, il.begin(), il.end());
753 iterator erase(const_iterator q) {
754 std::move(unconst(q) + 1, end(), unconst(q));
755 (data() + size() - 1)->~value_type();
756 this->setSize(size() - 1);
760 iterator erase(const_iterator q1, const_iterator q2) {
761 if (q1 == q2) return unconst(q1);
762 std::move(unconst(q2), end(), unconst(q1));
763 for (auto it = (end() - std::distance(q1, q2)); it != end(); ++it) {
766 this->setSize(size() - (q2 - q1));
771 erase(begin(), end());
775 void assign(Arg first, Arg last) {
777 insert(end(), first, last);
780 void assign(std::initializer_list<value_type> il) {
781 assign(il.begin(), il.end());
784 void assign(size_type n, const value_type& t) {
789 reference front() { assert(!empty()); return *begin(); }
790 reference back() { assert(!empty()); return *(end() - 1); }
791 const_reference front() const { assert(!empty()); return *begin(); }
792 const_reference back() const { assert(!empty()); return *(end() - 1); }
794 reference operator[](size_type i) {
796 return *(begin() + i);
799 const_reference operator[](size_type i) const {
801 return *(begin() + i);
804 reference at(size_type i) {
806 throw std::out_of_range("index out of range");
811 const_reference at(size_type i) const {
813 throw std::out_of_range("index out of range");
821 * This is doing the same like emplace_back, but we need this helper
822 * to catch the special case - see the next overload function..
824 template<class ...Args>
825 void emplaceBack(Args&&... args) {
826 makeSize(size() + 1);
827 new (end()) value_type(std::forward<Args>(args)...);
828 this->setSize(size() + 1);
831 static iterator unconst(const_iterator it) {
832 return const_cast<iterator>(it);
836 * g++ doesn't allow you to bind a non-const reference to a member
837 * of a packed structure, presumably because it would make it too
838 * easy to accidentally make an unaligned memory access?
840 template<class T> static T& unpackHack(T* p) {
844 // The std::false_type argument is part of disambiguating the
845 // iterator insert functions from integral types (see insert().)
847 iterator insertImpl(iterator pos, It first, It last, std::false_type) {
848 typedef typename std::iterator_traits<It>::iterator_category categ;
849 if (std::is_same<categ,std::input_iterator_tag>::value) {
850 auto offset = pos - begin();
851 while (first != last) {
852 pos = insert(pos, *first++);
855 return begin() + offset;
858 auto distance = std::distance(first, last);
859 auto offset = pos - begin();
860 makeSize(size() + distance);
861 detail::moveObjectsRight(data() + offset,
863 data() + size() + distance);
864 this->setSize(size() + distance);
865 std::copy_n(first, distance, begin() + offset);
866 return begin() + offset;
869 iterator insertImpl(iterator pos, size_type n, const value_type& val,
871 // The true_type means this should call the size_t,value_type
872 // overload. (See insert().)
873 return insert(pos, n, val);
876 // The std::false_type argument came from std::is_arithmetic as part
877 // of disambiguating an overload (see the comment in the
880 void constructImpl(It first, It last, std::false_type) {
881 typedef typename std::iterator_traits<It>::iterator_category categ;
882 if (std::is_same<categ,std::input_iterator_tag>::value) {
883 // With iterators that only allow a single pass, we can't really
884 // do anything sane here.
885 while (first != last) {
886 emplace_back(*first++);
891 auto distance = std::distance(first, last);
893 this->setSize(distance);
895 detail::populateMemForward(data(), distance,
896 [&] (void* p) { new (p) value_type(*first++); }
899 if (this->isExtern()) {
906 void doConstruct(size_type n, value_type const& val) {
910 detail::populateMemForward(data(), n,
911 [&] (void* p) { new (p) value_type(val); }
914 if (this->isExtern()) {
921 // The true_type means we should forward to the size_t,value_type
923 void constructImpl(size_type n, value_type const& val, std::true_type) {
927 void makeSize(size_type size, value_type* v = nullptr) {
928 makeSize(size, v, size - 1);
932 * Ensure we have a large enough memory region to be size `size'.
933 * Will move/copy elements if we are spilling to heap_ or needed to
934 * allocate a new region, but if resized in place doesn't initialize
935 * anything in the new region. In any case doesn't change size().
936 * Supports insertion of new element during reallocation by given
937 * pointer to new element and position of new element.
938 * NOTE: If reallocation is not needed, and new element should be
939 * inserted in the middle of vector (not at the end), do the move
940 * objects and insertion outside the function, otherwise exception is thrown.
942 void makeSize(size_type size, value_type* v, size_type pos) {
943 if (size > this->max_size()) {
944 throw std::length_error("max_size exceeded in small_vector");
946 if (size <= this->capacity()) {
950 auto needBytes = size * sizeof(value_type);
951 // If the capacity isn't explicitly stored inline, but the heap
952 // allocation is grown to over some threshold, we should store
953 // a capacity at the front of the heap allocation.
954 bool heapifyCapacity =
955 !kHasInlineCapacity && needBytes > kHeapifyCapacityThreshold;
956 if (heapifyCapacity) {
957 needBytes += kHeapifyCapacitySize;
959 auto const sizeBytes = goodMallocSize(needBytes);
960 void* newh = checkedMalloc(sizeBytes);
961 // We expect newh to be at least 2-aligned, because we want to
962 // use its least significant bit as a flag.
963 assert(!detail::pointerFlagGet(newh));
965 value_type* newp = static_cast<value_type*>(
967 detail::shiftPointer(newh, kHeapifyCapacitySize) :
973 new (&newp[pos]) value_type(std::move(*v));
979 // move old elements to the left of the new one
981 detail::moveToUninitialized(begin(), begin() + pos, newp);
983 newp[pos].~value_type();
988 // move old elements to the right of the new one
991 detail::moveToUninitialized(begin() + pos, end(), newp + pos + 1);
994 for (size_type i = 0; i <= pos; ++i) {
995 newp[i].~value_type();
1001 // move without inserting new element
1003 detail::moveToUninitialized(begin(), end(), newp);
1009 for (auto& val : *this) {
1013 if (this->isExtern()) {
1016 auto availableSizeBytes = sizeBytes;
1017 if (heapifyCapacity) {
1018 u.pdata_.heap_ = detail::pointerFlagSet(newh);
1019 availableSizeBytes -= kHeapifyCapacitySize;
1021 u.pdata_.heap_ = newh;
1023 this->setExtern(true);
1024 this->setCapacity(availableSizeBytes / sizeof(value_type));
1028 * This will set the capacity field, stored inline in the storage_ field
1029 * if there is sufficient room to store it.
1031 void setCapacity(size_type newCapacity) {
1032 assert(this->isExtern());
1033 if (u.hasCapacity()) {
1034 assert(newCapacity < std::numeric_limits<InternalSizeType>::max());
1035 *u.getCapacity() = InternalSizeType(newCapacity);
1040 struct HeapPtrWithCapacity {
1042 InternalSizeType capacity_;
1044 InternalSizeType* getCapacity() {
1050 // Lower order bit of heap_ is used as flag to indicate whether capacity is
1051 // stored at the front of the heap allocation.
1054 InternalSizeType* getCapacity() {
1055 assert(detail::pointerFlagGet(heap_));
1056 return static_cast<InternalSizeType*>(
1057 detail::pointerFlagClear(heap_));
1061 #if (FOLLY_X64 || FOLLY_PPC64)
1062 typedef unsigned char InlineStorageType[sizeof(value_type) * MaxInline];
1064 typedef typename std::aligned_storage<
1065 sizeof(value_type) * MaxInline,
1067 >::type InlineStorageType;
1070 static bool const kHasInlineCapacity =
1071 sizeof(HeapPtrWithCapacity) < sizeof(InlineStorageType);
1073 // This value should we multiple of word size.
1074 static size_t const kHeapifyCapacitySize = sizeof(
1075 typename std::aligned_storage<
1076 sizeof(InternalSizeType),
1079 // Threshold to control capacity heapifying.
1080 static size_t const kHeapifyCapacityThreshold =
1081 100 * kHeapifyCapacitySize;
1083 typedef typename std::conditional<
1085 HeapPtrWithCapacity,
1087 >::type PointerType;
1090 explicit Data() { pdata_.heap_ = 0; }
1093 InlineStorageType storage_;
1095 value_type* buffer() noexcept {
1096 void* vp = &storage_;
1097 return static_cast<value_type*>(vp);
1099 value_type const* buffer() const noexcept {
1100 return const_cast<Data*>(this)->buffer();
1102 value_type* heap() noexcept {
1103 if (kHasInlineCapacity || !detail::pointerFlagGet(pdata_.heap_)) {
1104 return static_cast<value_type*>(pdata_.heap_);
1106 return static_cast<value_type*>(
1107 detail::shiftPointer(
1108 detail::pointerFlagClear(pdata_.heap_), kHeapifyCapacitySize));
1110 value_type const* heap() const noexcept {
1111 return const_cast<Data*>(this)->heap();
1114 bool hasCapacity() const {
1115 return kHasInlineCapacity || detail::pointerFlagGet(pdata_.heap_);
1117 InternalSizeType* getCapacity() {
1118 return pdata_.getCapacity();
1120 InternalSizeType* getCapacity() const {
1121 return const_cast<Data*>(this)->getCapacity();
1125 auto vp = detail::pointerFlagClear(pdata_.heap_);
1132 //////////////////////////////////////////////////////////////////////
1134 // Basic guarantee only, or provides the nothrow guarantee iff T has a
1135 // nothrow move or copy constructor.
1136 template<class T, std::size_t MaxInline, class A, class B, class C>
1137 void swap(small_vector<T,MaxInline,A,B,C>& a,
1138 small_vector<T,MaxInline,A,B,C>& b) {
1142 //////////////////////////////////////////////////////////////////////
1147 template <class T, size_t M, class A, class B, class C>
1148 struct IndexableTraits<small_vector<T, M, A, B, C>>
1149 : public IndexableTraitsSeq<small_vector<T, M, A, B, C>> {
1152 } // namespace detail
1154 } // namespace folly
1156 #pragma GCC diagnostic pop
1159 # undef FB_PACK_ATTR
1160 # undef FB_PACK_PUSH