2 * Copyright 2017 Facebook, Inc.
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
8 * http://www.apache.org/licenses/LICENSE-2.0
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
22 #include <folly/Enumerate.h>
23 #include <folly/concurrency/AtomicSharedPtr.h>
24 #include <folly/concurrency/CacheLocality.h>
25 #include <folly/experimental/hazptr/hazptr.h>
30 * This class creates core-local caches for a given shared_ptr, to
31 * mitigate contention when acquiring/releasing it.
33 * It has the same thread-safety guarantees as shared_ptr: it is safe
34 * to concurrently call get(), but reset()s must be synchronized with
35 * reads and other resets().
37 * @author Giuseppe Ottaviano <ott@fb.com>
39 template <class T, size_t kNumSlots = 64>
40 class CoreCachedSharedPtr {
42 explicit CoreCachedSharedPtr(const std::shared_ptr<T>& p = nullptr) {
46 void reset(const std::shared_ptr<T>& p = nullptr) {
47 // Allocate each Holder in a different CoreAllocator stripe to
48 // prevent false sharing. Their control blocks will be adjacent
49 // thanks to allocate_shared().
50 for (auto slot : folly::enumerate(slots_)) {
51 auto alloc = getCoreAllocatorStl<Holder, kNumSlots>(slot.index);
52 auto holder = std::allocate_shared<Holder>(alloc, p);
53 *slot = std::shared_ptr<T>(holder, p.get());
57 std::shared_ptr<T> get() const {
58 return slots_[AccessSpreader<>::current(kNumSlots)];
62 using Holder = std::shared_ptr<T>;
64 template <class, size_t>
65 friend class CoreCachedWeakPtr;
67 std::array<std::shared_ptr<T>, kNumSlots> slots_;
70 template <class T, size_t kNumSlots = 64>
71 class CoreCachedWeakPtr {
73 explicit CoreCachedWeakPtr(const CoreCachedSharedPtr<T, kNumSlots>& p) {
74 for (auto slot : folly::enumerate(slots_)) {
75 *slot = p.slots_[slot.index];
79 std::weak_ptr<T> get() const {
80 return slots_[AccessSpreader<>::current(kNumSlots)];
84 std::array<std::weak_ptr<T>, kNumSlots> slots_;
88 * This class creates core-local caches for a given shared_ptr, to
89 * mitigate contention when acquiring/releasing it.
91 * All methods are threadsafe. Hazard pointers are used to avoid
92 * use-after-free for concurrent reset() and get() operations.
94 * Concurrent reset()s are sequenced with respect to each other: the
95 * sharded shared_ptrs will always all be set to the same value.
96 * get()s will never see a newer pointer on one core, and an older
97 * pointer on another after a subsequent thread migration.
99 template <class T, size_t kNumSlots = 64>
100 class AtomicCoreCachedSharedPtr {
102 explicit AtomicCoreCachedSharedPtr(const std::shared_ptr<T>& p = nullptr) {
106 ~AtomicCoreCachedSharedPtr() {
107 auto slots = slots_.load(std::memory_order_acquire);
108 // Delete of AtomicCoreCachedSharedPtr must be synchronized, no
109 // need for stlots->retire().
115 void reset(const std::shared_ptr<T>& p = nullptr) {
116 auto newslots = folly::make_unique<Slots>();
117 // Allocate each Holder in a different CoreAllocator stripe to
118 // prevent false sharing. Their control blocks will be adjacent
119 // thanks to allocate_shared().
120 for (auto slot : folly::enumerate(newslots->slots_)) {
121 auto alloc = getCoreAllocatorStl<Holder, kNumSlots>(slot.index);
122 auto holder = std::allocate_shared<Holder>(alloc, p);
123 *slot = std::shared_ptr<T>(holder, p.get());
126 auto oldslots = slots_.exchange(newslots.release());
132 std::shared_ptr<T> get() const {
133 folly::hazptr::hazptr_holder hazptr;
134 auto slots = hazptr.get_protected(slots_);
138 return (slots->slots_)[AccessSpreader<>::current(kNumSlots)];
142 using Holder = std::shared_ptr<T>;
143 struct Slots : folly::hazptr::hazptr_obj_base<Slots> {
144 std::array<std::shared_ptr<T>, kNumSlots> slots_;
146 std::atomic<Slots*> slots_{nullptr};