2 * Copyright 2013 Facebook, Inc.
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
8 * http://www.apache.org/licenses/LICENSE-2.0
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
18 * Nicholas Ormrod (njormrod)
19 * Andrei Alexandrescu (aalexandre)
21 * FBVector is Facebook's drop-in implementation of std::vector. It has special
22 * optimizations for use with relocatable types and jemalloc.
25 #ifndef FOLLY_FBVECTOR_H
26 #define FOLLY_FBVECTOR_H
28 //=============================================================================
36 #include <type_traits>
39 #include "folly/Likely.h"
40 #include "folly/Malloc.h"
41 #include "folly/Traits.h"
43 #include <boost/operators.hpp>
45 // some files expected these from FBVector
47 #include "folly/Foreach.h"
48 #include <boost/type_traits.hpp>
49 #include <boost/utility/enable_if.hpp>
51 //=============================================================================
52 // forward declaration
54 #ifdef FOLLY_BENCHMARK_USE_NS_IFOLLY
59 template <class T, class Allocator = std::allocator<T>>
63 //=============================================================================
66 #if __GNUC__ < 4 || __GNUC__ == 4 && __GNUC_MINOR__ < 7
67 // PLEASE UPGRADE TO GCC 4.7 or above
68 #define FOLLY_FBV_COMPATIBILITY_MODE
71 #ifndef FOLLY_FBV_COMPATIBILITY_MODE
76 struct fbv_allocator_traits
77 : std::allocator_traits<A> {};
80 struct fbv_is_nothrow_move_constructible
81 : std::is_nothrow_move_constructible<T> {};
83 template <typename T, typename... Args>
84 struct fbv_is_nothrow_constructible
85 : std::is_nothrow_constructible<T, Args...> {};
88 struct fbv_is_copy_constructible
89 : std::is_copy_constructible<T> {};
98 struct fbv_allocator_traits {
99 static_assert(sizeof(A) == 0,
100 "If you want to use a custom allocator, then you must upgrade to gcc 4.7");
101 // for some old code that deals with this case, see D566719, diff number 10.
104 template <typename T>
105 struct fbv_allocator_traits<std::allocator<T>> {
106 typedef std::allocator<T> A;
109 typedef const T* const_pointer;
110 typedef size_t size_type;
112 typedef std::false_type propagate_on_container_copy_assignment;
113 typedef std::false_type propagate_on_container_move_assignment;
114 typedef std::false_type propagate_on_container_swap;
116 static pointer allocate(A& a, size_type n) {
117 return static_cast<pointer>(::operator new(n * sizeof(T)));
119 static void deallocate(A& a, pointer p, size_type n) {
120 ::operator delete(p);
123 template <typename R, typename... Args>
124 static void construct(A& a, R* p, Args&&... args) {
125 new (p) R(std::forward<Args>(args)...);
127 template <typename R>
128 static void destroy(A& a, R* p) {
132 static A select_on_container_copy_construction(const A& a) {
137 template <typename T>
138 struct fbv_is_nothrow_move_constructible
139 : std::false_type {};
141 template <typename T, typename... Args>
142 struct fbv_is_nothrow_constructible
143 : std::false_type {};
145 template <typename T>
146 struct fbv_is_copy_constructible
153 //=============================================================================
156 #define FOLLY_FBV_UNROLL_PTR(first, last, OP) do { \
157 for (; (last) - (first) >= 4; (first) += 4) { \
163 for (; (first) != (last); ++(first)) OP((first)); \
166 //=============================================================================
167 ///////////////////////////////////////////////////////////////////////////////
171 ///////////////////////////////////////////////////////////////////////////////
173 #ifdef FOLLY_BENCHMARK_USE_NS_IFOLLY
179 template <class T, class Allocator>
180 class fbvector : private boost::totally_ordered<fbvector<T, Allocator>> {
182 //===========================================================================
183 //---------------------------------------------------------------------------
187 typedef folly::fbv_allocator_traits<Allocator> A;
189 struct Impl : public Allocator {
191 typedef typename A::pointer pointer;
192 typedef typename A::size_type size_type;
198 Impl() : Allocator(), b_(nullptr), e_(nullptr), z_(nullptr) {}
199 Impl(const Allocator& a)
200 : Allocator(a), b_(nullptr), e_(nullptr), z_(nullptr) {}
202 : Allocator(std::move(a)), b_(nullptr), e_(nullptr), z_(nullptr) {}
204 Impl(size_type n, const Allocator& a = Allocator())
209 : Allocator(std::move(other)),
210 b_(other.b_), e_(other.e_), z_(other.z_)
211 { other.b_ = other.e_ = other.z_ = nullptr; }
219 // note that 'allocate' and 'deallocate' are inherited from Allocator
220 T* D_allocate(size_type n) {
221 if (usingStdAllocator::value) {
222 return static_cast<T*>(malloc(n * sizeof(T)));
224 return folly::fbv_allocator_traits<Allocator>::allocate(*this, n);
228 void D_deallocate(T* p, size_type n) noexcept {
229 if (usingStdAllocator::value) {
232 folly::fbv_allocator_traits<Allocator>::deallocate(*this, p, n);
237 void swapData(Impl& other) {
238 std::swap(b_, other.b_);
239 std::swap(e_, other.e_);
240 std::swap(z_, other.z_);
244 inline void destroy() noexcept {
246 // THIS DISPATCH CODE IS DUPLICATED IN fbvector::D_destroy_range_a.
247 // It has been inlined here for speed. It calls the static fbvector
248 // methods to perform the actual destruction.
249 if (usingStdAllocator::value) {
250 S_destroy_range(b_, e_);
252 S_destroy_range_a(*this, b_, e_);
255 D_deallocate(b_, z_ - b_);
259 void init(size_type n) {
260 if (UNLIKELY(n == 0)) {
261 b_ = e_ = z_ = nullptr;
263 size_type sz = folly::goodMallocSize(n * sizeof(T)) / sizeof(T);
271 set(pointer newB, size_type newSize, size_type newCap) {
277 void reset(size_type newCap) {
286 void reset() { // same as reset(0)
288 b_ = e_ = z_ = nullptr;
292 static void swap(Impl& a, Impl& b) {
294 if (!usingStdAllocator::value) swap<Allocator>(a, b);
298 //===========================================================================
299 //---------------------------------------------------------------------------
300 // types and constants
303 typedef T value_type;
304 typedef value_type& reference;
305 typedef const value_type& const_reference;
307 typedef const T* const_iterator;
308 typedef size_t size_type;
309 typedef typename std::make_signed<size_type>::type difference_type;
310 typedef Allocator allocator_type;
311 typedef typename A::pointer pointer;
312 typedef typename A::const_pointer const_pointer;
313 typedef std::reverse_iterator<iterator> reverse_iterator;
314 typedef std::reverse_iterator<const_iterator> const_reverse_iterator;
318 typedef std::integral_constant<bool,
319 std::has_trivial_copy_constructor<T>::value &&
320 sizeof(T) <= 16 // don't force large structures to be passed by value
321 > should_pass_by_value;
322 typedef typename std::conditional<
323 should_pass_by_value::value, T, const T&>::type VT;
324 typedef typename std::conditional<
325 should_pass_by_value::value, T, T&&>::type MT;
327 typedef std::integral_constant<bool,
328 std::is_same<Allocator, std::allocator<T>>::value> usingStdAllocator;
329 typedef std::integral_constant<bool,
330 usingStdAllocator::value ||
331 A::propagate_on_container_move_assignment::value> moveIsSwap;
333 //===========================================================================
334 //---------------------------------------------------------------------------
338 //---------------------------------------------------------------------------
341 T* M_allocate(size_type n) {
342 return impl_.D_allocate(n);
345 //---------------------------------------------------------------------------
348 void M_deallocate(T* p, size_type n) noexcept {
349 impl_.D_deallocate(p, n);
352 //---------------------------------------------------------------------------
355 // GCC is very sensitive to the exact way that construct is called. For
356 // that reason there are several different specializations of construct.
358 template <typename U, typename... Args>
359 void M_construct(U* p, Args&&... args) {
360 if (usingStdAllocator::value) {
361 new (p) U(std::forward<Args>(args)...);
363 folly::fbv_allocator_traits<Allocator>::construct(
364 impl_, p, std::forward<Args>(args)...);
368 template <typename U, typename... Args>
369 static void S_construct(U* p, Args&&... args) {
370 new (p) U(std::forward<Args>(args)...);
373 template <typename U, typename... Args>
374 static void S_construct_a(Allocator& a, U* p, Args&&... args) {
375 folly::fbv_allocator_traits<Allocator>::construct(
376 a, p, std::forward<Args>(args)...);
379 // scalar optimization
380 // TODO we can expand this optimization to: default copyable and assignable
381 template <typename U, typename Enable = typename
382 std::enable_if<std::is_scalar<U>::value>::type>
383 void M_construct(U* p, U arg) {
384 if (usingStdAllocator::value) {
387 folly::fbv_allocator_traits<Allocator>::construct(impl_, p, arg);
391 template <typename U, typename Enable = typename
392 std::enable_if<std::is_scalar<U>::value>::type>
393 static void S_construct(U* p, U arg) {
397 template <typename U, typename Enable = typename
398 std::enable_if<std::is_scalar<U>::value>::type>
399 static void S_construct_a(Allocator& a, U* p, U arg) {
400 folly::fbv_allocator_traits<Allocator>::construct(a, p, arg);
403 // const& optimization
404 template <typename U, typename Enable = typename
405 std::enable_if<!std::is_scalar<U>::value>::type>
406 void M_construct(U* p, const U& value) {
407 if (usingStdAllocator::value) {
410 folly::fbv_allocator_traits<Allocator>::construct(impl_, p, value);
414 template <typename U, typename Enable = typename
415 std::enable_if<!std::is_scalar<U>::value>::type>
416 static void S_construct(U* p, const U& value) {
420 template <typename U, typename Enable = typename
421 std::enable_if<!std::is_scalar<U>::value>::type>
422 static void S_construct_a(Allocator& a, U* p, const U& value) {
423 folly::fbv_allocator_traits<Allocator>::construct(a, p, value);
426 //---------------------------------------------------------------------------
429 void M_destroy(T* p) noexcept {
430 if (usingStdAllocator::value) {
431 if (!std::has_trivial_destructor<T>::value) p->~T();
433 folly::fbv_allocator_traits<Allocator>::destroy(impl_, p);
437 //===========================================================================
438 //---------------------------------------------------------------------------
439 // algorithmic helpers
442 //---------------------------------------------------------------------------
446 void M_destroy_range_e(T* pos) noexcept {
447 D_destroy_range_a(pos, impl_.e_);
452 // THIS DISPATCH CODE IS DUPLICATED IN IMPL. SEE IMPL FOR DETAILS.
453 void D_destroy_range_a(T* first, T* last) noexcept {
454 if (usingStdAllocator::value) {
455 S_destroy_range(first, last);
457 S_destroy_range_a(impl_, first, last);
462 static void S_destroy_range_a(Allocator& a, T* first, T* last) noexcept {
463 for (; first != last; ++first)
464 folly::fbv_allocator_traits<Allocator>::destroy(a, first);
468 static void S_destroy_range(T* first, T* last) noexcept {
469 if (!std::has_trivial_destructor<T>::value) {
470 // EXPERIMENTAL DATA on fbvector<vector<int>> (where each vector<int> has
472 // The unrolled version seems to work faster for small to medium sized
473 // fbvectors. It gets a 10% speedup on fbvectors of size 1024, 64, and
475 // The simple loop version seems to work faster for large fbvectors. The
476 // unrolled version is about 6% slower on fbvectors on size 16384.
477 // The two methods seem tied for very large fbvectors. The unrolled
478 // version is about 0.5% slower on size 262144.
480 // for (; first != last; ++first) first->~T();
481 #define FOLLY_FBV_OP(p) (p)->~T()
482 FOLLY_FBV_UNROLL_PTR(first, last, FOLLY_FBV_OP)
487 //---------------------------------------------------------------------------
488 // uninitialized_fill_n
491 void M_uninitialized_fill_n_e(size_type sz) {
492 D_uninitialized_fill_n_a(impl_.e_, sz);
496 void M_uninitialized_fill_n_e(size_type sz, VT value) {
497 D_uninitialized_fill_n_a(impl_.e_, sz, value);
502 void D_uninitialized_fill_n_a(T* dest, size_type sz) {
503 if (usingStdAllocator::value) {
504 S_uninitialized_fill_n(dest, sz);
506 S_uninitialized_fill_n_a(impl_, dest, sz);
510 void D_uninitialized_fill_n_a(T* dest, size_type sz, VT value) {
511 if (usingStdAllocator::value) {
512 S_uninitialized_fill_n(dest, sz, value);
514 S_uninitialized_fill_n_a(impl_, dest, sz, value);
519 template <typename... Args>
520 static void S_uninitialized_fill_n_a(Allocator& a, T* dest,
521 size_type sz, Args&&... args) {
526 folly::fbv_allocator_traits<Allocator>::construct(a, b,
527 std::forward<Args>(args)...);
529 S_destroy_range_a(a, dest, b);
535 static void S_uninitialized_fill_n(T* dest, size_type n) {
536 if (folly::IsZeroInitializable<T>::value) {
537 std::memset(dest, 0, sizeof(T) * n);
542 for (; b != e; ++b) S_construct(b);
545 for (; b >= dest; --b) b->~T();
551 static void S_uninitialized_fill_n(T* dest, size_type n, const T& value) {
555 for (; b != e; ++b) S_construct(b, value);
557 S_destroy_range(dest, b);
562 //---------------------------------------------------------------------------
563 // uninitialized_copy
565 // it is possible to add an optimization for the case where
566 // It = move(T*) and IsRelocatable<T> and Is0Initiailizable<T>
569 template <typename It>
570 void M_uninitialized_copy_e(It first, It last) {
571 D_uninitialized_copy_a(impl_.e_, first, last);
572 impl_.e_ += std::distance(first, last);
575 template <typename It>
576 void M_uninitialized_move_e(It first, It last) {
577 D_uninitialized_move_a(impl_.e_, first, last);
578 impl_.e_ += std::distance(first, last);
582 template <typename It>
583 void D_uninitialized_copy_a(T* dest, It first, It last) {
584 if (usingStdAllocator::value) {
585 if (folly::IsTriviallyCopyable<T>::value) {
586 S_uninitialized_copy_bits(dest, first, last);
588 S_uninitialized_copy(dest, first, last);
591 S_uninitialized_copy_a(impl_, dest, first, last);
595 template <typename It>
596 void D_uninitialized_move_a(T* dest, It first, It last) {
597 D_uninitialized_copy_a(dest,
598 std::make_move_iterator(first), std::make_move_iterator(last));
602 template <typename It>
604 S_uninitialized_copy_a(Allocator& a, T* dest, It first, It last) {
607 for (; first != last; ++first, ++b)
608 folly::fbv_allocator_traits<Allocator>::construct(a, b, *first);
610 S_destroy_range_a(a, dest, b);
616 template <typename It>
617 static void S_uninitialized_copy(T* dest, It first, It last) {
620 for (; first != last; ++first, ++b)
621 S_construct(b, *first);
623 S_destroy_range(dest, b);
629 S_uninitialized_copy_bits(T* dest, const T* first, const T* last) {
630 std::memcpy(dest, first, (last - first) * sizeof(T));
634 S_uninitialized_copy_bits(T* dest, std::move_iterator<T*> first,
635 std::move_iterator<T*> last) {
636 T* bFirst = first.base();
637 T* bLast = last.base();
638 std::memcpy(dest, bFirst, (bLast - bFirst) * sizeof(T));
641 template <typename It>
643 S_uninitialized_copy_bits(T* dest, It first, It last) {
644 S_uninitialized_copy(dest, first, last);
647 //---------------------------------------------------------------------------
650 // This function is "unsafe": it assumes that the iterator can be advanced at
651 // least n times. However, as a private function, that unsafety is managed
652 // wholly by fbvector itself.
654 template <typename It>
655 static It S_copy_n(T* dest, It first, size_type n) {
657 for (; dest != e; ++dest, ++first) *dest = *first;
661 static const T* S_copy_n(T* dest, const T* first, size_type n) {
662 if (folly::IsTriviallyCopyable<T>::value) {
663 std::memcpy(dest, first, n * sizeof(T));
666 return S_copy_n<const T*>(dest, first, n);
670 static std::move_iterator<T*>
671 S_copy_n(T* dest, std::move_iterator<T*> mIt, size_type n) {
672 if (folly::IsTriviallyCopyable<T>::value) {
673 T* first = mIt.base();
674 std::memcpy(dest, first, n * sizeof(T));
675 return std::make_move_iterator(first + n);
677 return S_copy_n<std::move_iterator<T*>>(dest, mIt, n);
681 //===========================================================================
682 //---------------------------------------------------------------------------
683 // relocation helpers
686 // Relocation is divided into three parts:
689 // Performs the actual movement of data from point a to point b.
692 // Destroys the old data.
695 // Destoys the new data and restores the old data.
697 // The three steps are used because there may be an exception after part 1
698 // has completed. If that is the case, then relocate_undo can nullify the
699 // initial move. Otherwise, relocate_done performs the last bit of tidying
702 // The relocation trio may use either memcpy, move, or copy. It is decided
703 // by the following case statement:
705 // IsRelocatable && usingStdAllocator -> memcpy
706 // has_nothrow_move && usingStdAllocator -> move
707 // cannot copy -> move
710 // If the class is non-copyable then it must be movable. However, if the
711 // move constructor is not noexcept, i.e. an error could be thrown, then
712 // relocate_undo will be unable to restore the old data, for fear of a
713 // second exception being thrown. This is a known and unavoidable
714 // deficiency. In lieu of a strong exception guarantee, relocate_undo does
715 // the next best thing: it provides a weak exception guarantee by
716 // destorying the new data, but leaving the old data in an indeterminate
717 // state. Note that that indeterminate state will be valid, since the
718 // old data has not been destroyed; it has merely been the source of a
719 // move, which is required to leave the source in a valid state.
722 void M_relocate(T* newB) {
723 relocate_move(newB, impl_.b_, impl_.e_);
724 relocate_done(newB, impl_.b_, impl_.e_);
727 // dispatch type trait
728 typedef std::integral_constant<bool,
729 folly::IsRelocatable<T>::value && usingStdAllocator::value
730 > relocate_use_memcpy;
732 typedef std::integral_constant<bool,
733 (folly::fbv_is_nothrow_move_constructible<T>::value
734 && usingStdAllocator::value)
735 || !folly::fbv_is_copy_constructible<T>::value
739 void relocate_move(T* dest, T* first, T* last) {
740 relocate_move_or_memcpy(dest, first, last, relocate_use_memcpy());
743 void relocate_move_or_memcpy(T* dest, T* first, T* last, std::true_type) {
744 std::memcpy(dest, first, (last - first) * sizeof(T));
747 void relocate_move_or_memcpy(T* dest, T* first, T* last, std::false_type) {
748 relocate_move_or_copy(dest, first, last, relocate_use_move());
751 void relocate_move_or_copy(T* dest, T* first, T* last, std::true_type) {
752 D_uninitialized_move_a(dest, first, last);
755 void relocate_move_or_copy(T* dest, T* first, T* last, std::false_type) {
756 D_uninitialized_copy_a(dest, first, last);
760 void relocate_done(T* dest, T* first, T* last) noexcept {
761 if (folly::IsRelocatable<T>::value && usingStdAllocator::value) {
762 // used memcpy; data has been relocated, do not call destructor
764 D_destroy_range_a(first, last);
769 void relocate_undo(T* dest, T* first, T* last) noexcept {
770 if (folly::IsRelocatable<T>::value && usingStdAllocator::value) {
771 // used memcpy, old data is still valid, nothing to do
772 } else if (folly::fbv_is_nothrow_move_constructible<T>::value &&
773 usingStdAllocator::value) {
774 // noexcept move everything back, aka relocate_move
775 relocate_move(first, dest, dest + (last - first));
776 } else if (!folly::fbv_is_copy_constructible<T>::value) {
778 D_destroy_range_a(dest, dest + (last - first));
780 // used copy, old data is still valid
781 D_destroy_range_a(dest, dest + (last - first));
786 //===========================================================================
787 //---------------------------------------------------------------------------
788 // construct/copy/destroy
791 fbvector() = default;
793 explicit fbvector(const Allocator& a) : impl_(a) {}
795 explicit fbvector(size_type n, const Allocator& a = Allocator())
797 { M_uninitialized_fill_n_e(n); }
799 fbvector(size_type n, VT value, const Allocator& a = Allocator())
801 { M_uninitialized_fill_n_e(n, value); }
803 template <class It, class Category = typename
804 std::iterator_traits<It>::iterator_category>
805 fbvector(It first, It last, const Allocator& a = Allocator())
806 #ifndef FOLLY_FBV_COMPATIBILITY_MODE
807 : fbvector(first, last, a, Category()) {}
809 : impl_(std::distance(first, last), a)
810 { fbvector_init(first, last, Category()); }
813 fbvector(const fbvector& other)
814 : impl_(other.size(), A::select_on_container_copy_construction(other.impl_))
815 { M_uninitialized_copy_e(other.begin(), other.end()); }
817 fbvector(fbvector&& other) noexcept : impl_(std::move(other.impl_)) {}
819 fbvector(const fbvector& other, const Allocator& a)
820 #ifndef FOLLY_FBV_COMPATIBILITY_MODE
821 : fbvector(other.begin(), other.end(), a) {}
823 : impl_(other.size(), a)
824 { fbvector_init(other.begin(), other.end(), std::forward_iterator_tag()); }
827 fbvector(fbvector&& other, const Allocator& a) : impl_(a) {
828 if (impl_ == other.impl_) {
829 impl_.swapData(other.impl_);
831 impl_.init(other.size());
832 M_uninitialized_move_e(other.begin(), other.end());
836 fbvector(std::initializer_list<T> il, const Allocator& a = Allocator())
837 #ifndef FOLLY_FBV_COMPATIBILITY_MODE
838 : fbvector(il.begin(), il.end(), a) {}
840 : impl_(std::distance(il.begin(), il.end()), a)
841 { fbvector_init(il.begin(), il.end(), std::forward_iterator_tag()); }
844 ~fbvector() = default; // the cleanup occurs in impl_
846 fbvector& operator=(const fbvector& other) {
847 if (UNLIKELY(this == &other)) return *this;
849 if (!usingStdAllocator::value &&
850 A::propagate_on_container_copy_assignment::value) {
851 if (impl_ != other.impl_) {
852 // can't use other's different allocator to clean up self
855 (Allocator&)impl_ = (Allocator&)other.impl_;
858 assign(other.begin(), other.end());
862 fbvector& operator=(fbvector&& other) {
863 if (UNLIKELY(this == &other)) return *this;
864 moveFrom(std::move(other), moveIsSwap());
868 fbvector& operator=(std::initializer_list<T> il) {
869 assign(il.begin(), il.end());
873 template <class It, class Category = typename
874 std::iterator_traits<It>::iterator_category>
875 void assign(It first, It last) {
876 assign(first, last, Category());
879 void assign(size_type n, VT value) {
880 if (n > capacity()) {
881 // Not enough space. Do not reserve in place, since we will
882 // discard the old values anyways.
883 if (dataIsInternalAndNotVT(value)) {
884 T copy(std::move(value));
886 M_uninitialized_fill_n_e(n, copy);
889 M_uninitialized_fill_n_e(n, value);
891 } else if (n <= size()) {
892 auto newE = impl_.b_ + n;
893 std::fill(impl_.b_, newE, value);
894 M_destroy_range_e(newE);
896 std::fill(impl_.b_, impl_.e_, value);
897 M_uninitialized_fill_n_e(n - size(), value);
901 void assign(std::initializer_list<T> il) {
902 assign(il.begin(), il.end());
905 allocator_type get_allocator() const noexcept {
911 #ifndef FOLLY_FBV_COMPATIBILITY_MODE
912 // contract dispatch for iterator types fbvector(It first, It last)
913 template <class ForwardIterator>
914 fbvector(ForwardIterator first, ForwardIterator last,
915 const Allocator& a, std::forward_iterator_tag)
916 : impl_(std::distance(first, last), a)
917 { M_uninitialized_copy_e(first, last); }
919 template <class InputIterator>
920 fbvector(InputIterator first, InputIterator last,
921 const Allocator& a, std::input_iterator_tag)
923 { for (; first != last; ++first) emplace_back(*first); }
926 // contract dispatch for iterator types without constructor forwarding
927 template <class ForwardIterator>
929 fbvector_init(ForwardIterator first, ForwardIterator last,
930 std::forward_iterator_tag)
931 { M_uninitialized_copy_e(first, last); }
933 template <class InputIterator>
935 fbvector_init(InputIterator first, InputIterator last,
936 std::input_iterator_tag)
937 { for (; first != last; ++first) emplace_back(*first); }
940 // contract dispatch for allocator movement in operator=(fbvector&&)
942 moveFrom(fbvector&& other, std::true_type) {
943 swap(impl_, other.impl_);
945 void moveFrom(fbvector&& other, std::false_type) {
946 if (impl_ == other.impl_) {
947 impl_.swapData(other.impl_);
949 impl_.reset(other.size());
950 M_uninitialized_move_e(other.begin(), other.end());
954 // contract dispatch for iterator types in assign(It first, It last)
955 template <class ForwardIterator>
956 void assign(ForwardIterator first, ForwardIterator last,
957 std::forward_iterator_tag) {
958 auto const newSize = std::distance(first, last);
959 if (newSize > capacity()) {
960 impl_.reset(newSize);
961 M_uninitialized_copy_e(first, last);
962 } else if (newSize <= size()) {
963 auto newEnd = std::copy(first, last, impl_.b_);
964 M_destroy_range_e(newEnd);
966 auto mid = S_copy_n(impl_.b_, first, size());
967 M_uninitialized_copy_e<decltype(last)>(mid, last);
971 template <class InputIterator>
972 void assign(InputIterator first, InputIterator last,
973 std::input_iterator_tag) {
975 for (; first != last && p != impl_.e_; ++first, ++p) {
979 M_destroy_range_e(p);
981 for (; first != last; ++first) emplace_back(*first);
985 // contract dispatch for aliasing under VT optimization
986 bool dataIsInternalAndNotVT(const T& t) {
987 if (should_pass_by_value::value) return false;
988 return dataIsInternal(t);
990 bool dataIsInternal(const T& t) {
991 return UNLIKELY(impl_.b_ <= std::addressof(t) &&
992 std::addressof(t) < impl_.e_);
996 //===========================================================================
997 //---------------------------------------------------------------------------
1001 iterator begin() noexcept {
1004 const_iterator begin() const noexcept {
1007 iterator end() noexcept {
1010 const_iterator end() const noexcept {
1013 reverse_iterator rbegin() noexcept {
1014 return reverse_iterator(end());
1016 const_reverse_iterator rbegin() const noexcept {
1017 return const_reverse_iterator(end());
1019 reverse_iterator rend() noexcept {
1020 return reverse_iterator(begin());
1022 const_reverse_iterator rend() const noexcept {
1023 return const_reverse_iterator(begin());
1026 const_iterator cbegin() const noexcept {
1029 const_iterator cend() const noexcept {
1032 const_reverse_iterator crbegin() const noexcept {
1033 return const_reverse_iterator(end());
1035 const_reverse_iterator crend() const noexcept {
1036 return const_reverse_iterator(begin());
1039 //===========================================================================
1040 //---------------------------------------------------------------------------
1044 size_type size() const noexcept {
1045 return impl_.e_ - impl_.b_;
1048 size_type max_size() const noexcept {
1049 // good luck gettin' there
1050 return ~size_type(0);
1053 void resize(size_type n) {
1055 M_destroy_range_e(impl_.b_ + n);
1058 M_uninitialized_fill_n_e(n - size());
1062 void resize(size_type n, VT t) {
1064 M_destroy_range_e(impl_.b_ + n);
1065 } else if (dataIsInternalAndNotVT(t) && n > capacity()) {
1068 M_uninitialized_fill_n_e(n - size(), copy);
1071 M_uninitialized_fill_n_e(n - size(), t);
1075 size_type capacity() const noexcept {
1076 return impl_.z_ - impl_.b_;
1079 bool empty() const noexcept {
1080 return impl_.b_ == impl_.e_;
1083 void reserve(size_type n) {
1084 if (n <= capacity()) return;
1085 if (impl_.b_ && reserve_in_place(n)) return;
1087 auto newCap = folly::goodMallocSize(n * sizeof(T)) / sizeof(T);
1088 auto newB = M_allocate(newCap);
1092 M_deallocate(newB, newCap);
1096 M_deallocate(impl_.b_, impl_.z_ - impl_.b_);
1097 impl_.z_ = newB + newCap;
1098 impl_.e_ = newB + (impl_.e_ - impl_.b_);
1102 void shrink_to_fit() noexcept {
1103 auto const newCapacityBytes = folly::goodMallocSize(size() * sizeof(T));
1104 auto const newCap = newCapacityBytes / sizeof(T);
1105 auto const oldCap = capacity();
1107 if (newCap >= oldCap) return;
1110 if ((rallocm && usingStdAllocator::value) &&
1111 newCapacityBytes >= folly::jemallocMinInPlaceExpandable &&
1112 rallocm(&p, NULL, newCapacityBytes, 0, ALLOCM_NO_MOVE)
1113 == ALLOCM_SUCCESS) {
1114 impl_.z_ += newCap - oldCap;
1116 T* newB; // intentionally uninitialized
1118 newB = M_allocate(newCap);
1122 M_deallocate(newB, newCap);
1123 return; // swallow the error
1129 M_deallocate(impl_.b_, impl_.z_ - impl_.b_);
1130 impl_.z_ = newB + newCap;
1131 impl_.e_ = newB + (impl_.e_ - impl_.b_);
1138 bool reserve_in_place(size_type n) {
1139 if (!usingStdAllocator::value || !rallocm) return false;
1141 // jemalloc can never grow in place blocks smaller than 4096 bytes.
1142 if ((impl_.z_ - impl_.b_) * sizeof(T) <
1143 folly::jemallocMinInPlaceExpandable) return false;
1145 auto const newCapacityBytes = folly::goodMallocSize(n * sizeof(T));
1147 if (rallocm(&p, NULL, newCapacityBytes, 0, ALLOCM_NO_MOVE)
1148 == ALLOCM_SUCCESS) {
1149 impl_.z_ = impl_.b_ + newCapacityBytes / sizeof(T);
1155 //===========================================================================
1156 //---------------------------------------------------------------------------
1160 reference operator[](size_type n) {
1164 const_reference operator[](size_type n) const {
1168 const_reference at(size_type n) const {
1169 if (UNLIKELY(n >= size())) {
1170 throw std::out_of_range("fbvector: index is greater than size.");
1174 reference at(size_type n) {
1175 auto const& cThis = *this;
1176 return const_cast<reference>(cThis.at(n));
1182 const_reference front() const {
1188 return impl_.e_[-1];
1190 const_reference back() const {
1192 return impl_.e_[-1];
1195 //===========================================================================
1196 //---------------------------------------------------------------------------
1200 T* data() noexcept {
1203 const T* data() const noexcept {
1207 //===========================================================================
1208 //---------------------------------------------------------------------------
1209 // modifiers (common)
1212 template <class... Args>
1213 void emplace_back(Args&&... args) {
1214 if (impl_.e_ != impl_.z_) {
1215 M_construct(impl_.e_, std::forward<Args>(args)...);
1218 emplace_back_aux(std::forward<Args>(args)...);
1223 push_back(const T& value) {
1224 if (impl_.e_ != impl_.z_) {
1225 M_construct(impl_.e_, value);
1228 emplace_back_aux(value);
1233 push_back(T&& value) {
1234 if (impl_.e_ != impl_.z_) {
1235 M_construct(impl_.e_, std::move(value));
1238 emplace_back_aux(std::move(value));
1245 M_destroy(impl_.e_);
1248 void swap(fbvector& other) noexcept {
1249 if (!usingStdAllocator::value &&
1250 A::propagate_on_container_swap::value)
1251 swap(impl_, other.impl_);
1252 else impl_.swapData(other.impl_);
1255 void clear() noexcept {
1256 M_destroy_range_e(impl_.b_);
1261 // std::vector implements a similar function with a different growth
1262 // strategy: empty() ? 1 : capacity() * 2.
1264 // fbvector grows differently on two counts:
1267 // Instead of grwoing to size 1 from empty, and fbvector allocates at
1268 // least 64 bytes. You may still use reserve to reserve a lesser amount
1271 // For medium-sized vectors, the growth strategy is 1.5x. See the docs
1273 // This does not apply to very small or very large fbvectors. This is a
1275 // A nice addition to fbvector would be the capability of having a user-
1276 // defined growth strategy, probably as part of the allocator.
1279 size_type computePushBackCapacity() const {
1280 return empty() ? std::max(64 / sizeof(T), size_type(1))
1281 : capacity() < folly::jemallocMinInPlaceExpandable / sizeof(T)
1283 : sizeof(T) > folly::jemallocMinInPlaceExpandable / 2 && capacity() == 1
1285 : capacity() > 4096 * 32 / sizeof(T)
1287 : (capacity() * 3 + 1) / 2;
1290 template <class... Args>
1291 void emplace_back_aux(Args&&... args);
1293 //===========================================================================
1294 //---------------------------------------------------------------------------
1295 // modifiers (erase)
1298 iterator erase(const_iterator position) {
1299 return erase(position, position + 1);
1302 iterator erase(const_iterator first, const_iterator last) {
1303 assert(isValid(first) && isValid(last));
1304 assert(first <= last);
1305 if (first != last) {
1306 if (last == end()) {
1307 M_destroy_range_e((iterator)first);
1309 if (folly::IsRelocatable<T>::value && usingStdAllocator::value) {
1310 D_destroy_range_a((iterator)first, (iterator)last);
1311 if (last - first >= cend() - last) {
1312 std::memcpy((iterator)first, last, (cend() - last) * sizeof(T));
1314 std::memmove((iterator)first, last, (cend() - last) * sizeof(T));
1316 impl_.e_ -= (last - first);
1318 std::copy(std::make_move_iterator((iterator)last),
1319 std::make_move_iterator(end()), (iterator)first);
1320 auto newEnd = impl_.e_ - std::distance(first, last);
1321 M_destroy_range_e(newEnd);
1325 return (iterator)first;
1328 //===========================================================================
1329 //---------------------------------------------------------------------------
1330 // modifiers (insert)
1331 private: // we have the private section first because it defines some macros
1333 bool isValid(const_iterator it) {
1334 return cbegin() <= it && it <= cend();
1337 size_type computeInsertCapacity(size_type n) {
1338 size_type nc = std::max(computePushBackCapacity(), size() + n);
1339 size_type ac = folly::goodMallocSize(nc * sizeof(T)) / sizeof(T);
1343 //---------------------------------------------------------------------------
1345 // make_window takes an fbvector, and creates an uninitialized gap (a
1346 // window) at the given position, of the given size. The fbvector must
1347 // have enough capacity.
1349 // Explanation by picture.
1353 // make_window here of size 3
1357 // If something goes wrong and the window must be destroyed, use
1358 // undo_window to provide a weak exception guarantee. It destroys
1363 //---------------------------------------------------------------------------
1365 // wrap_frame takes an inverse window and relocates an fbvector around it.
1366 // The fbvector must have at least as many elements as the left ledge.
1368 // Explanation by picture.
1371 // fbvector: inverse window:
1372 // 123456789______ _____abcde_______
1376 // _______________ 12345abcde6789___
1378 //---------------------------------------------------------------------------
1380 // insert_use_fresh_memory returns true iff the fbvector should use a fresh
1381 // block of memory for the insertion. If the fbvector does not have enough
1382 // spare capacity, then it must return true. Otherwise either true or false
1385 //---------------------------------------------------------------------------
1387 // These three functions, make_window, wrap_frame, and
1388 // insert_use_fresh_memory, can be combined into a uniform interface.
1389 // Since that interface involves a lot of case-work, it is built into
1390 // some macros: FOLLY_FBVECTOR_INSERT_(START|TRY|END)
1391 // Macros are used in an attempt to let GCC perform better optimizations,
1392 // especially control flow optimization.
1395 //---------------------------------------------------------------------------
1398 void make_window(iterator position, size_type n) {
1399 assert(isValid(position));
1400 assert(size() + n <= capacity());
1403 auto tail = std::distance(position, impl_.e_);
1406 relocate_move(position + n, position, impl_.e_);
1407 relocate_done(position + n, position, impl_.e_);
1410 if (folly::IsRelocatable<T>::value && usingStdAllocator::value) {
1411 std::memmove(position + n, position, tail * sizeof(T));
1414 D_uninitialized_move_a(impl_.e_, impl_.e_ - n, impl_.e_);
1416 std::copy_backward(std::make_move_iterator(position),
1417 std::make_move_iterator(impl_.e_ - n), impl_.e_);
1418 D_destroy_range_a(position, position + n);
1423 void undo_window(iterator position, size_type n) noexcept {
1424 D_destroy_range_a(position + n, impl_.e_);
1425 impl_.e_ = position;
1428 //---------------------------------------------------------------------------
1431 void wrap_frame(T* ledge, size_type idx, size_type n) {
1432 assert(size() >= idx);
1435 relocate_move(ledge, impl_.b_, impl_.b_ + idx);
1437 relocate_move(ledge + idx + n, impl_.b_ + idx, impl_.e_);
1439 relocate_undo(ledge, impl_.b_, impl_.b_ + idx);
1442 relocate_done(ledge, impl_.b_, impl_.b_ + idx);
1443 relocate_done(ledge + idx + n, impl_.b_ + idx, impl_.e_);
1446 //---------------------------------------------------------------------------
1449 bool insert_use_fresh(const_iterator cposition, size_type n) {
1450 if (cposition == cend()) {
1451 if (size() + n <= capacity()) return false;
1452 if (reserve_in_place(size() + n)) return false;
1456 if (size() + n > capacity()) return true;
1461 //---------------------------------------------------------------------------
1464 #define FOLLY_FBVECTOR_INSERT_START(cpos, n) \
1465 assert(isValid(cpos)); \
1466 T* position = const_cast<T*>(cpos); \
1467 size_type idx = std::distance(impl_.b_, position); \
1468 bool fresh = insert_use_fresh(position, n); \
1470 size_type newCap = 0; \
1473 newCap = computeInsertCapacity(n); \
1474 b = M_allocate(newCap); \
1476 make_window(position, n); \
1480 T* start = b + idx; \
1484 // construct the inserted elements
1486 #define FOLLY_FBVECTOR_INSERT_TRY(cpos, n) \
1489 M_deallocate(b, newCap); \
1491 undo_window(position, n); \
1498 wrap_frame(b, idx, n); \
1502 // delete the inserted elements (exception has been thrown)
1504 #define FOLLY_FBVECTOR_INSERT_END(cpos, n) \
1505 M_deallocate(b, newCap); \
1508 if (impl_.b_) M_deallocate(impl_.b_, capacity()); \
1509 impl_.set(b, size() + n, newCap); \
1510 return impl_.b_ + idx; \
1515 //---------------------------------------------------------------------------
1519 template <class... Args>
1520 iterator emplace(const_iterator cpos, Args&&... args) {
1521 FOLLY_FBVECTOR_INSERT_START(cpos, 1)
1522 M_construct(start, std::forward<Args>(args)...);
1523 FOLLY_FBVECTOR_INSERT_TRY(cpos, 1)
1525 FOLLY_FBVECTOR_INSERT_END(cpos, 1)
1528 iterator insert(const_iterator cpos, const T& value) {
1529 if (dataIsInternal(value)) return insert(cpos, T(value));
1531 FOLLY_FBVECTOR_INSERT_START(cpos, 1)
1532 M_construct(start, value);
1533 FOLLY_FBVECTOR_INSERT_TRY(cpos, 1)
1535 FOLLY_FBVECTOR_INSERT_END(cpos, 1)
1538 iterator insert(const_iterator cpos, T&& value) {
1539 if (dataIsInternal(value)) return insert(cpos, T(std::move(value)));
1541 FOLLY_FBVECTOR_INSERT_START(cpos, 1)
1542 M_construct(start, std::move(value));
1543 FOLLY_FBVECTOR_INSERT_TRY(cpos, 1)
1545 FOLLY_FBVECTOR_INSERT_END(cpos, 1)
1548 iterator insert(const_iterator cpos, size_type n, VT value) {
1549 if (n == 0) return (iterator)cpos;
1550 if (dataIsInternalAndNotVT(value)) return insert(cpos, n, T(value));
1552 FOLLY_FBVECTOR_INSERT_START(cpos, n)
1553 D_uninitialized_fill_n_a(start, n, value);
1554 FOLLY_FBVECTOR_INSERT_TRY(cpos, n)
1555 D_destroy_range_a(start, start + n);
1556 FOLLY_FBVECTOR_INSERT_END(cpos, n)
1559 template <class It, class Category = typename
1560 std::iterator_traits<It>::iterator_category>
1561 iterator insert(const_iterator cpos, It first, It last) {
1562 return insert(cpos, first, last, Category());
1565 iterator insert(const_iterator cpos, std::initializer_list<T> il) {
1566 return insert(cpos, il.begin(), il.end());
1569 //---------------------------------------------------------------------------
1570 // insert dispatch for iterator types
1573 template <class FIt>
1574 iterator insert(const_iterator cpos, FIt first, FIt last,
1575 std::forward_iterator_tag) {
1576 size_type n = std::distance(first, last);
1577 if (n == 0) return (iterator)cpos;
1579 FOLLY_FBVECTOR_INSERT_START(cpos, n)
1580 D_uninitialized_copy_a(start, first, last);
1581 FOLLY_FBVECTOR_INSERT_TRY(cpos, n)
1582 D_destroy_range_a(start, start + n);
1583 FOLLY_FBVECTOR_INSERT_END(cpos, n)
1586 template <class IIt>
1587 iterator insert(const_iterator cpos, IIt first, IIt last,
1588 std::input_iterator_tag) {
1589 T* position = const_cast<T*>(cpos);
1590 assert(isValid(position));
1591 size_type idx = std::distance(begin(), position);
1593 fbvector storage(std::make_move_iterator(position),
1594 std::make_move_iterator(end()),
1595 A::select_on_container_copy_construction(impl_));
1596 M_destroy_range_e(position);
1597 for (; first != last; ++first) emplace_back(*first);
1598 insert(cend(), std::make_move_iterator(storage.begin()),
1599 std::make_move_iterator(storage.end()));
1600 return impl_.b_ + idx;
1603 //===========================================================================
1604 //---------------------------------------------------------------------------
1605 // lexicographical functions (others from boost::totally_ordered superclass)
1608 bool operator==(const fbvector& other) const {
1609 return size() == other.size() && std::equal(begin(), end(), other.begin());
1612 bool operator<(const fbvector& other) const {
1613 return std::lexicographical_compare(
1614 begin(), end(), other.begin(), other.end());
1617 //===========================================================================
1618 //---------------------------------------------------------------------------
1622 template <class _T, class _A>
1623 friend _T* relinquish(fbvector<_T, _A>&);
1625 template <class _T, class _A>
1626 friend void attach(fbvector<_T, _A>&, _T* data, size_t sz, size_t cap);
1628 }; // class fbvector
1631 //=============================================================================
1632 //-----------------------------------------------------------------------------
1633 // outlined functions (gcc, you finicky compiler you)
1635 template <typename T, typename Allocator>
1636 template <class... Args>
1637 void fbvector<T, Allocator>::emplace_back_aux(Args&&... args) {
1638 size_type byte_sz = folly::goodMallocSize(
1639 computePushBackCapacity() * sizeof(T));
1640 if (usingStdAllocator::value
1642 && ((impl_.z_ - impl_.b_) * sizeof(T) >=
1643 folly::jemallocMinInPlaceExpandable)) {
1644 // Try to reserve in place.
1645 // Ask rallocm to allocate in place at least size()+1 and at most sz space.
1646 // rallocm will allocate as much as possible within that range, which
1647 // is the best possible outcome: if sz space is available, take it all,
1648 // otherwise take as much as possible. If nothing is available, then fail.
1649 // In this fashion, we never relocate if there is a possibility of
1650 // expanding in place, and we never relocate by less than the desired
1651 // amount unless we cannot expand further. Hence we will not relocate
1652 // sub-optimally twice in a row (modulo the blocking memory being freed).
1653 size_type lower = folly::goodMallocSize(sizeof(T) + size() * sizeof(T));
1654 size_type upper = byte_sz;
1655 size_type extra = upper - lower;
1661 if (rallocm(&p, &actual, lower, extra, ALLOCM_NO_MOVE)
1662 == ALLOCM_SUCCESS) {
1663 impl_.z_ = impl_.b_ + actual / sizeof(T);
1664 M_construct(impl_.e_, std::forward<Args>(args)...);
1670 // Reallocation failed. Perform a manual relocation.
1671 size_type sz = byte_sz / sizeof(T);
1672 auto newB = M_allocate(sz);
1673 auto newE = newB + size();
1675 if (folly::IsRelocatable<T>::value && usingStdAllocator::value) {
1676 // For linear memory access, relocate before construction.
1677 // By the test condition, relocate is noexcept.
1678 // Note that there is no cleanup to do if M_construct throws - that's
1679 // one of the beauties of relocation.
1680 // Benchmarks for this code have high variance, and seem to be close.
1681 relocate_move(newB, impl_.b_, impl_.e_);
1682 M_construct(newE, std::forward<Args>(args)...);
1685 M_construct(newE, std::forward<Args>(args)...);
1690 M_destroy(newE - 1);
1695 M_deallocate(newB, sz);
1698 if (impl_.b_) M_deallocate(impl_.b_, size());
1701 impl_.z_ = newB + sz;
1704 //=============================================================================
1705 //-----------------------------------------------------------------------------
1706 // specialized functions
1708 template <class T, class A>
1709 void swap(fbvector<T, A>& lhs, fbvector<T, A>& rhs) noexcept {
1713 //=============================================================================
1714 //-----------------------------------------------------------------------------
1717 template <class T, class A>
1718 void compactResize(fbvector<T, A>* v, size_t sz) {
1725 // relinquish and attach are not a members function specifically so that it is
1726 // awkward to call them. It is very easy to shoot yourself in the foot with
1729 // If you call relinquish, then it is your responsibility to free the data
1730 // and the storage, both of which may have been generated in a non-standard
1731 // way through the fbvector's allocator.
1733 // If you call attach, it is your responsibility to ensure that the fbvector
1734 // is fresh (size and capacity both zero), and that the supplied data is
1735 // capable of being manipulated by the allocator.
1736 // It is acceptable to supply a stack pointer IF:
1737 // (1) The vector's data does not outlive the stack pointer. This includes
1738 // extension of the data's life through a move operation.
1739 // (2) The pointer has enough capacity that the vector will never be
1741 // (3) Insert is not called on the vector; these functions have leeway to
1742 // relocate the vector even if there is enough capacity.
1743 // (4) A stack pointer is compatible with the fbvector's allocator.
1746 template <class T, class A>
1747 T* relinquish(fbvector<T, A>& v) {
1749 v.impl_.b_ = v.impl_.e_ = v.impl_.z_ = nullptr;
1753 template <class T, class A>
1754 void attach(fbvector<T, A>& v, T* data, size_t sz, size_t cap) {
1755 assert(v.data() == nullptr);
1757 v.impl_.e_ = data + sz;
1758 v.impl_.z_ = data + cap;
1761 } // namespace folly
1763 #endif // FOLLY_FBVECTOR_H