2 * Copyright 2017 Facebook, Inc.
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
8 * http://www.apache.org/licenses/LICENSE-2.0
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
17 /* override-include-guard */
19 #error "This should only be included by hazptr.h"
22 /* quality of implementation switches */
24 // NOTE: The #ifndef pattern is prone to ODR violation. Its use for
25 // quality of implementation options is temporary. Eventually these
26 // options should be added to the API in future API extensions.
29 #define HAZPTR_AMB true
33 #define HAZPTR_TC true
36 #ifndef HAZPTR_TC_SIZE
37 #define HAZPTR_TC_SIZE 10
41 #define HAZPTR_PRIV true
44 #ifndef HAZPTR_ONE_DOMAIN
45 #define HAZPTR_ONE_DOMAIN false
48 #ifndef HAZPTR_SCAN_MULT
49 #define HAZPTR_SCAN_MULT 2
52 #ifndef HAZPTR_SCAN_THRESHOLD
53 #define HAZPTR_SCAN_THRESHOLD 1000
58 #define HAZPTR_STATS false
61 #include <folly/concurrency/CacheLocality.h>
62 #include <folly/experimental/AsymmetricMemoryBarrier.h>
63 #include <folly/experimental/hazptr/debug.h>
65 #include <mutex> // for thread caching
66 #include <unordered_set> // for hash set in bulk reclamation
72 * helper classes and functions
78 hazptr_stats& hazptr_stats();
81 #define INC_HAZPTR_STATS(x) hazptr_stats().x()
83 #define INC_HAZPTR_STATS(x)
95 * Thread caching of hazptr_rec-s that belong to the default domain.
98 class hazptr_tc_entry {
99 friend class hazptr_tc;
100 hazptr_rec* hprec_{nullptr};
103 void fill(hazptr_rec* hprec);
109 hazptr_tc_entry tc_[HAZPTR_TC_SIZE];
116 bool put(hazptr_rec* hprec);
119 hazptr_tc& hazptr_tc();
122 * Thread private lists of retired objects that belong to the default domain.
126 hazptr_domain* domain_{&default_hazptr_domain()};
127 hazptr_obj* head_{nullptr};
128 hazptr_obj* tail_{nullptr};
135 void push(hazptr_obj* obj);
138 void pushAllToDomain();
141 hazptr_priv& hazptr_priv();
149 constexpr hazptr_domain::hazptr_domain(memory_resource* mr) noexcept
152 /** hazptr_obj_base */
154 template <typename T, typename D>
155 inline void hazptr_obj_base<T, D>::retire(hazptr_domain& domain, D deleter) {
156 DEBUG_PRINT(this << " " << &domain);
157 deleter_ = std::move(deleter);
158 reclaim_ = [](hazptr_obj* p) {
159 auto hobp = static_cast<hazptr_obj_base*>(p);
160 auto obj = static_cast<T*>(hobp);
164 (HAZPTR_ONE_DOMAIN || (&domain == &default_hazptr_domain()))) {
165 hazptr_priv().push(this);
167 domain.objRetire(this);
174 friend class hazptr_domain;
175 friend class hazptr_holder;
176 friend class hazptr_tc_entry;
178 FOLLY_ALIGN_TO_AVOID_FALSE_SHARING
179 std::atomic<const void*> hazptr_{nullptr};
180 hazptr_rec* next_{nullptr};
181 std::atomic<bool> active_{false};
183 void set(const void* p) noexcept;
184 const void* get() const noexcept;
185 void clear() noexcept;
186 bool isActive() noexcept;
187 bool tryAcquire() noexcept;
188 void release() noexcept;
193 FOLLY_ALWAYS_INLINE hazptr_holder::hazptr_holder(hazptr_domain& domain) {
195 hazptr_ = domain_->hazptrAcquire();
196 DEBUG_PRINT(this << " " << domain_ << " " << hazptr_);
197 if (hazptr_ == nullptr) { std::bad_alloc e; throw e; }
200 inline hazptr_holder::hazptr_holder(std::nullptr_t) {
203 DEBUG_PRINT(this << " " << domain_ << " " << hazptr_);
206 FOLLY_ALWAYS_INLINE hazptr_holder::~hazptr_holder() {
210 domain_->hazptrRelease(hazptr_);
214 inline hazptr_holder::hazptr_holder(hazptr_holder&& rhs) noexcept {
215 domain_ = rhs.domain_;
216 hazptr_ = rhs.hazptr_;
217 rhs.domain_ = nullptr;
218 rhs.hazptr_ = nullptr;
221 inline hazptr_holder& hazptr_holder::operator=(hazptr_holder&& rhs) noexcept {
222 /* Self-move is a no-op. */
224 this->~hazptr_holder();
225 new (this) hazptr_holder(std::move(rhs));
230 template <typename T>
231 FOLLY_ALWAYS_INLINE bool hazptr_holder::try_protect(
233 const std::atomic<T*>& src) noexcept {
234 return try_protect(ptr, src, [](T* t) { return t; });
237 template <typename T, typename Func>
238 FOLLY_ALWAYS_INLINE bool hazptr_holder::try_protect(
240 const std::atomic<T*>& src,
242 DEBUG_PRINT(this << " " << ptr << " " << &src);
244 /*** Full fence ***/ hazptr_mb::light();
245 T* p = src.load(std::memory_order_acquire);
254 template <typename T>
255 FOLLY_ALWAYS_INLINE T* hazptr_holder::get_protected(
256 const std::atomic<T*>& src) noexcept {
257 return get_protected(src, [](T* t) { return t; });
260 template <typename T, typename Func>
261 FOLLY_ALWAYS_INLINE T* hazptr_holder::get_protected(
262 const std::atomic<T*>& src,
264 T* p = src.load(std::memory_order_relaxed);
265 while (!try_protect(p, src, f)) {
267 DEBUG_PRINT(this << " " << p << " " << &src);
271 template <typename T>
272 FOLLY_ALWAYS_INLINE void hazptr_holder::reset(const T* ptr) noexcept {
273 auto p = static_cast<hazptr_obj*>(const_cast<T*>(ptr));
274 DEBUG_PRINT(this << " " << ptr << " p:" << p);
275 DCHECK(hazptr_); // UB if *this is empty
279 FOLLY_ALWAYS_INLINE void hazptr_holder::reset(std::nullptr_t) noexcept {
281 DCHECK(hazptr_); // UB if *this is empty
285 FOLLY_ALWAYS_INLINE void hazptr_holder::swap(hazptr_holder& rhs) noexcept {
287 this << " " << this->hazptr_ << " " << this->domain_ << " -- "
288 << &rhs << " " << rhs.hazptr_ << " " << rhs.domain_);
289 if (!HAZPTR_ONE_DOMAIN) {
290 std::swap(this->domain_, rhs.domain_);
292 std::swap(this->hazptr_, rhs.hazptr_);
295 FOLLY_ALWAYS_INLINE void swap(hazptr_holder& lhs, hazptr_holder& rhs) noexcept {
299 ////////////////////////////////////////////////////////////////////////////////
301 // - Control of reclamation (when and by whom)
302 // - End-to-end lock-free implementation
304 /** Definition of default_hazptr_domain() */
306 inline hazptr_domain& default_hazptr_domain() {
307 static hazptr_domain d;
314 inline void hazptr_rec::set(const void* p) noexcept {
315 DEBUG_PRINT(this << " " << p);
316 hazptr_.store(p, std::memory_order_release);
319 inline const void* hazptr_rec::get() const noexcept {
320 auto p = hazptr_.load(std::memory_order_acquire);
321 DEBUG_PRINT(this << " " << p);
325 inline void hazptr_rec::clear() noexcept {
327 hazptr_.store(nullptr, std::memory_order_release);
330 inline bool hazptr_rec::isActive() noexcept {
331 return active_.load(std::memory_order_acquire);
334 inline bool hazptr_rec::tryAcquire() noexcept {
335 bool active = isActive();
337 active_.compare_exchange_strong(
338 active, true, std::memory_order_release, std::memory_order_relaxed)) {
345 inline void hazptr_rec::release() noexcept {
347 active_.store(false, std::memory_order_release);
352 inline const void* hazptr_obj::getObjPtr() const {
359 inline hazptr_domain::~hazptr_domain() {
361 { /* reclaim all remaining retired objects */
363 auto retired = retired_.exchange(nullptr);
365 for (auto p = retired; p; p = next) {
369 retired = retired_.exchange(nullptr);
372 { /* free all hazptr_rec-s */
374 for (auto p = hazptrs_.load(std::memory_order_acquire); p; p = next) {
376 DCHECK(!p->isActive());
377 mr_->deallocate(static_cast<void*>(p), sizeof(hazptr_rec));
382 FOLLY_ALWAYS_INLINE hazptr_rec* hazptr_domain::hazptrAcquire() {
383 if (HAZPTR_TC && (HAZPTR_ONE_DOMAIN || this == &default_hazptr_domain())) {
384 auto hprec = hazptr_tc().get();
391 for (p = hazptrs_.load(std::memory_order_acquire); p; p = next) {
393 if (p->tryAcquire()) {
397 p = static_cast<hazptr_rec*>(mr_->allocate(sizeof(hazptr_rec)));
401 p->active_.store(true, std::memory_order_relaxed);
402 p->next_ = hazptrs_.load(std::memory_order_acquire);
403 while (!hazptrs_.compare_exchange_weak(
404 p->next_, p, std::memory_order_release, std::memory_order_acquire))
406 auto hcount = hcount_.fetch_add(1);
407 DEBUG_PRINT(this << " " << p << " " << sizeof(hazptr_rec) << " " << hcount);
411 FOLLY_ALWAYS_INLINE void hazptr_domain::hazptrRelease(hazptr_rec* p) noexcept {
412 if (HAZPTR_TC && (HAZPTR_ONE_DOMAIN || this == &default_hazptr_domain()) &&
413 hazptr_tc().put(p)) {
416 DEBUG_PRINT(this << " " << p);
421 hazptr_domain::pushRetired(hazptr_obj* head, hazptr_obj* tail, int count) {
422 /*** Full fence ***/ hazptr_mb::light();
423 tail->next_ = retired_.load(std::memory_order_acquire);
424 while (!retired_.compare_exchange_weak(
427 std::memory_order_release,
428 std::memory_order_acquire)) {
430 return rcount_.fetch_add(count) + count;
433 inline bool hazptr_domain::reachedThreshold(int rcount) {
435 rcount >= HAZPTR_SCAN_THRESHOLD &&
436 rcount >= HAZPTR_SCAN_MULT * hcount_.load(std::memory_order_acquire));
439 inline void hazptr_domain::objRetire(hazptr_obj* p) {
440 auto rcount = pushRetired(p, p, 1);
441 if (reachedThreshold(rcount)) {
446 inline void hazptr_domain::tryBulkReclaim() {
449 auto hcount = hcount_.load(std::memory_order_acquire);
450 auto rcount = rcount_.load(std::memory_order_acquire);
451 if (rcount < HAZPTR_SCAN_THRESHOLD || rcount < HAZPTR_SCAN_MULT * hcount) {
454 if (rcount_.compare_exchange_weak(
455 rcount, 0, std::memory_order_release, std::memory_order_relaxed)) {
462 inline void hazptr_domain::bulkReclaim() {
464 /*** Full fence ***/ hazptr_mb::heavy();
465 auto p = retired_.exchange(nullptr, std::memory_order_acquire);
466 auto h = hazptrs_.load(std::memory_order_acquire);
467 std::unordered_set<const void*> hs; // TODO lock-free alternative
468 for (; h; h = h->next_) {
472 hazptr_obj* retired = nullptr;
473 hazptr_obj* tail = nullptr;
475 for (; p; p = next) {
477 if (hs.count(p->getObjPtr()) == 0) {
478 DEBUG_PRINT(this << " " << p << " " << p->reclaim_);
483 if (tail == nullptr) {
490 pushRetired(retired, tail, rcount);
504 std::atomic<uint64_t> light_{0};
505 std::atomic<uint64_t> heavy_{0};
506 std::atomic<uint64_t> seq_cst_{0};
509 inline hazptr_stats::~hazptr_stats() {
510 DEBUG_PRINT(this << " light " << light_.load());
511 DEBUG_PRINT(this << " heavy " << heavy_.load());
512 DEBUG_PRINT(this << " seq_cst " << seq_cst_.load());
515 FOLLY_ALWAYS_INLINE void hazptr_stats::light() {
517 /* atomic */ ++light_;
521 inline void hazptr_stats::heavy() {
523 /* atomic */ ++heavy_;
527 inline void hazptr_stats::seq_cst() {
529 /* atomic */ ++seq_cst_;
533 inline class hazptr_stats& hazptr_stats() {
534 static class hazptr_stats stats_;
535 DEBUG_PRINT(&stats_);
541 inline void hazptr_mb::light() {
544 folly::asymmetricLightBarrier();
545 INC_HAZPTR_STATS(light);
547 atomic_thread_fence(std::memory_order_seq_cst);
548 INC_HAZPTR_STATS(seq_cst);
552 inline void hazptr_mb::heavy() {
555 folly::asymmetricHeavyBarrier(AMBFlags::EXPEDITED);
556 INC_HAZPTR_STATS(heavy);
558 atomic_thread_fence(std::memory_order_seq_cst);
559 INC_HAZPTR_STATS(seq_cst);
563 /** hazptr_tc - functions */
565 inline void hazptr_tc_entry::fill(hazptr_rec* hprec) {
567 DEBUG_PRINT(this << " " << hprec);
570 inline hazptr_rec* hazptr_tc_entry::get() {
573 DEBUG_PRINT(this << " " << hprec);
577 inline void hazptr_tc_entry::evict() {
581 DEBUG_PRINT(this << " " << hprec);
584 inline hazptr_tc::hazptr_tc() {
588 inline hazptr_tc::~hazptr_tc() {
590 for (int i = 0; i < count_; ++i) {
595 inline hazptr_rec* hazptr_tc::get() {
597 auto hprec = tc_[--count_].get();
598 DEBUG_PRINT(this << " " << hprec);
601 DEBUG_PRINT(this << " nullptr");
605 inline bool hazptr_tc::put(hazptr_rec* hprec) {
606 if (count_ < HAZPTR_TC_SIZE) {
607 tc_[count_++].fill(hprec);
608 DEBUG_PRINT(this << " " << count_ - 1);
614 inline class hazptr_tc& hazptr_tc() {
615 static thread_local class hazptr_tc tc;
620 /** hazptr_priv - functions */
622 inline hazptr_priv::hazptr_priv() {
626 inline hazptr_priv::~hazptr_priv() {
635 inline void hazptr_priv::push(hazptr_obj* obj) {
636 obj->next_ = nullptr;
641 default_hazptr_domain().objRetire(obj);
648 if (domain_->reachedThreshold(rcount_)) {
653 inline void hazptr_priv::pushAllToDomain() {
654 domain_->pushRetired(head_, tail_, rcount_);
658 domain_->tryBulkReclaim();
661 inline class hazptr_priv& hazptr_priv() {
662 static thread_local class hazptr_priv priv;
668 } // namespace hazptr