2 * Copyright 2016-present Facebook, Inc.
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
8 * http://www.apache.org/licenses/LICENSE-2.0
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
16 /* override-include-guard */
18 #error "This should only be included by hazptr.h"
21 /* quality of implementation switches */
23 // NOTE: The #ifndef pattern is prone to ODR violation. Its use for
24 // quality of implementation options is temporary. Eventually these
25 // options should be added to the API in future API extensions.
28 #define HAZPTR_AMB true
32 #define HAZPTR_TC true
35 #ifndef HAZPTR_TC_SIZE
36 #define HAZPTR_TC_SIZE 10
40 #define HAZPTR_PRIV true
43 #ifndef HAZPTR_PRIV_THRESHOLD
44 #define HAZPTR_PRIV_THRESHOLD 20
47 #ifndef HAZPTR_ONE_DOMAIN
48 #define HAZPTR_ONE_DOMAIN false
51 #ifndef HAZPTR_SCAN_MULT
52 #define HAZPTR_SCAN_MULT 2
55 #ifndef HAZPTR_SCAN_THRESHOLD
56 #define HAZPTR_SCAN_THRESHOLD 1000
61 #define HAZPTR_STATS false
64 #include <folly/concurrency/CacheLocality.h>
65 #include <folly/experimental/hazptr/debug.h>
66 #include <folly/synchronization/AsymmetricMemoryBarrier.h>
68 #include <mutex> // for thread caching
69 #include <unordered_set> // for hash set in bulk reclamation
75 * Helper classes and functions
83 #define INC_HAZPTR_STATS(x) hazptr_stats_.x()
85 #define INC_HAZPTR_STATS(x)
100 /** TLS life state */
102 enum hazptr_tls_state { TLS_ALIVE, TLS_UNINITIALIZED, TLS_DESTROYED };
104 /** hazptr_tc structures
105 * Thread caching of hazptr_rec-s that belong to the default domain.
108 struct hazptr_tc_entry {
111 void fill(hazptr_rec* hprec);
117 std::is_trivial<hazptr_tc_entry>::value,
118 "hazptr_tc_entry must be trivial"
119 " to avoid a branch to check initialization");
122 hazptr_tc_entry entry_[HAZPTR_TC_SIZE];
124 bool local_; // for debug mode only
127 hazptr_tc_entry& operator[](size_t i);
129 bool put(hazptr_rec* hprec);
134 std::is_trivial<hazptr_tc>::value,
135 "hazptr_tc must be trivial to avoid a branch to check initialization");
137 hazptr_tc* hazptr_tc_tls();
138 void hazptr_tc_init();
139 void hazptr_tc_shutdown();
140 hazptr_rec* hazptr_tc_try_get();
141 bool hazptr_tc_try_put(hazptr_rec* hprec);
143 /** hazptr_priv structures
144 * Thread private lists of retired objects that belong to the default domain.
153 void push(hazptr_obj* obj);
154 void pushAllToDomain();
158 std::is_trivial<hazptr_priv>::value,
159 "hazptr_priv must be trivial to avoid a branch to check initialization");
161 void hazptr_priv_init();
162 void hazptr_priv_shutdown();
163 bool hazptr_priv_try_retire(hazptr_obj* obj);
165 /** hazptr_tls_life */
167 struct hazptr_tls_life {
172 void tls_life_odr_use();
176 extern thread_local hazptr_tls_state tls_state_;
177 extern thread_local hazptr_tc tls_tc_data_;
178 extern thread_local hazptr_priv tls_priv_data_;
179 extern thread_local hazptr_tls_life tls_life_; // last
185 inline constexpr hazptr_domain::hazptr_domain(memory_resource* mr) noexcept
192 template <typename T, typename D>
193 inline void hazptr_obj_base<T, D>::retire(hazptr_domain& domain, D deleter) {
194 DEBUG_PRINT(this << " " << &domain);
195 deleter_ = std::move(deleter);
196 reclaim_ = [](hazptr_obj* p) {
197 auto hobp = static_cast<hazptr_obj_base*>(p);
198 auto obj = static_cast<T*>(hobp);
202 (HAZPTR_ONE_DOMAIN || (&domain == &default_hazptr_domain()))) {
203 if (hazptr_priv_try_retire(this)) {
207 domain.objRetire(this);
211 * hazptr_obj_base_refcounted
214 template <typename T, typename D>
215 inline void hazptr_obj_base_refcounted<T, D>::retire(
216 hazptr_domain& domain,
218 DEBUG_PRINT(this << " " << &domain);
221 (HAZPTR_ONE_DOMAIN || (&domain == &default_hazptr_domain()))) {
222 if (hazptr_priv_try_retire(this)) {
226 domain.objRetire(this);
229 template <typename T, typename D>
230 inline void hazptr_obj_base_refcounted<T, D>::acquire_ref() {
232 auto oldval = refcount_.fetch_add(1);
236 template <typename T, typename D>
237 inline void hazptr_obj_base_refcounted<T, D>::acquire_ref_safe() {
239 auto oldval = refcount_.load(std::memory_order_acquire);
241 refcount_.store(oldval + 1, std::memory_order_release);
244 template <typename T, typename D>
245 inline bool hazptr_obj_base_refcounted<T, D>::release_ref() {
247 auto oldval = refcount_.load(std::memory_order_acquire);
249 oldval = refcount_.fetch_sub(1);
255 DEBUG_PRINT(this << " " << oldval);
260 template <typename T, typename D>
261 inline void hazptr_obj_base_refcounted<T, D>::preRetire(D deleter) {
262 DCHECK(next_ == nullptr);
263 deleter_ = std::move(deleter);
264 reclaim_ = [](hazptr_obj* p) {
265 auto hrobp = static_cast<hazptr_obj_base_refcounted*>(p);
266 if (hrobp->release_ref()) {
267 auto obj = static_cast<T*>(hrobp);
268 hrobp->deleter_(obj);
277 class alignas(hardware_destructive_interference_size) hazptr_rec {
278 friend class hazptr_domain;
279 friend class hazptr_holder;
280 friend struct hazptr_tc_entry;
282 std::atomic<const void*> hazptr_{nullptr};
283 hazptr_rec* next_{nullptr};
284 std::atomic<bool> active_{false};
286 void set(const void* p) noexcept;
287 const void* get() const noexcept;
288 void clear() noexcept;
289 bool isActive() noexcept;
290 bool tryAcquire() noexcept;
291 void release() noexcept;
298 FOLLY_ALWAYS_INLINE hazptr_holder::hazptr_holder(hazptr_domain& domain) {
302 (HAZPTR_ONE_DOMAIN || &domain == &default_hazptr_domain()))) {
303 auto hprec = hazptr_tc_try_get();
304 if (LIKELY(hprec != nullptr)) {
306 DEBUG_PRINT(this << " " << domain_ << " " << hazptr_);
310 hazptr_ = domain_->hazptrAcquire();
311 DEBUG_PRINT(this << " " << domain_ << " " << hazptr_);
312 if (hazptr_ == nullptr) { std::bad_alloc e; throw e; }
315 FOLLY_ALWAYS_INLINE hazptr_holder::hazptr_holder(std::nullptr_t) noexcept {
318 DEBUG_PRINT(this << " " << domain_ << " " << hazptr_);
321 FOLLY_ALWAYS_INLINE hazptr_holder::~hazptr_holder() {
323 if (LIKELY(hazptr_ != nullptr)) {
327 (HAZPTR_ONE_DOMAIN || domain_ == &default_hazptr_domain()))) {
328 if (LIKELY(hazptr_tc_try_put(hazptr_))) {
332 domain_->hazptrRelease(hazptr_);
336 FOLLY_ALWAYS_INLINE hazptr_holder::hazptr_holder(hazptr_holder&& rhs) noexcept {
337 domain_ = rhs.domain_;
338 hazptr_ = rhs.hazptr_;
339 rhs.domain_ = nullptr;
340 rhs.hazptr_ = nullptr;
344 hazptr_holder& hazptr_holder::operator=(hazptr_holder&& rhs) noexcept {
345 /* Self-move is a no-op. */
346 if (LIKELY(this != &rhs)) {
347 this->~hazptr_holder();
348 new (this) hazptr_holder(std::move(rhs));
353 template <typename T>
354 FOLLY_ALWAYS_INLINE bool hazptr_holder::try_protect(
356 const std::atomic<T*>& src) noexcept {
357 return try_protect(ptr, src, [](T* t) { return t; });
360 template <typename T, typename Func>
361 FOLLY_ALWAYS_INLINE bool hazptr_holder::try_protect(
363 const std::atomic<T*>& src,
365 DEBUG_PRINT(this << " " << ptr << " " << &src);
367 /*** Full fence ***/ hazptr_mb::light();
368 T* p = src.load(std::memory_order_acquire);
369 if (UNLIKELY(p != ptr)) {
377 template <typename T>
378 FOLLY_ALWAYS_INLINE T* hazptr_holder::get_protected(
379 const std::atomic<T*>& src) noexcept {
380 return get_protected(src, [](T* t) { return t; });
383 template <typename T, typename Func>
384 FOLLY_ALWAYS_INLINE T* hazptr_holder::get_protected(
385 const std::atomic<T*>& src,
387 T* p = src.load(std::memory_order_relaxed);
388 while (!try_protect(p, src, f)) {
390 DEBUG_PRINT(this << " " << p << " " << &src);
394 template <typename T>
395 FOLLY_ALWAYS_INLINE void hazptr_holder::reset(const T* ptr) noexcept {
396 auto p = static_cast<hazptr_obj*>(const_cast<T*>(ptr));
397 DEBUG_PRINT(this << " " << ptr << " p:" << p);
398 DCHECK(hazptr_); // UB if *this is empty
402 FOLLY_ALWAYS_INLINE void hazptr_holder::reset(std::nullptr_t) noexcept {
404 DCHECK(hazptr_); // UB if *this is empty
408 FOLLY_ALWAYS_INLINE void hazptr_holder::swap(hazptr_holder& rhs) noexcept {
410 this << " " << this->hazptr_ << " " << this->domain_ << " -- "
411 << &rhs << " " << rhs.hazptr_ << " " << rhs.domain_);
412 if (!HAZPTR_ONE_DOMAIN) {
413 std::swap(this->domain_, rhs.domain_);
415 std::swap(this->hazptr_, rhs.hazptr_);
418 FOLLY_ALWAYS_INLINE void swap(hazptr_holder& lhs, hazptr_holder& rhs) noexcept {
427 FOLLY_ALWAYS_INLINE hazptr_array<M>::hazptr_array() {
428 auto h = reinterpret_cast<hazptr_holder*>(&raw_);
430 auto ptc = hazptr_tc_tls();
431 if (LIKELY(ptc != nullptr)) {
433 auto count = tc.count();
435 size_t offset = count - M;
436 for (size_t i = 0; i < M; ++i) {
437 auto hprec = tc[offset + i].hprec_;
438 DCHECK(hprec != nullptr);
439 DEBUG_PRINT(i << " " << &h[i]);
440 new (&h[i]) hazptr_holder(nullptr);
441 h[i].hazptr_ = hprec;
443 i << " " << &h[i] << " " << h[i].domain_ << " " << h[i].hazptr_);
451 for (size_t i = 0; i < M; ++i) {
452 new (&h[i]) hazptr_holder;
454 i << " " << &h[i] << " " << h[i].domain_ << " " << h[i].hazptr_);
459 FOLLY_ALWAYS_INLINE hazptr_array<M>::hazptr_array(
460 hazptr_array&& other) noexcept {
461 DEBUG_PRINT(this << " " << M << " " << &other);
462 auto h = reinterpret_cast<hazptr_holder*>(&raw_);
463 auto hother = reinterpret_cast<hazptr_holder*>(&other.raw_);
464 for (size_t i = 0; i < M; ++i) {
465 new (&h[i]) hazptr_holder(std::move(hother[i]));
466 DEBUG_PRINT(i << " " << &h[i] << " " << &hother[i]);
468 empty_ = other.empty_;
473 FOLLY_ALWAYS_INLINE hazptr_array<M>::hazptr_array(std::nullptr_t) noexcept {
474 DEBUG_PRINT(this << " " << M);
475 auto h = reinterpret_cast<hazptr_holder*>(&raw_);
476 for (size_t i = 0; i < M; ++i) {
477 new (&h[i]) hazptr_holder(nullptr);
478 DEBUG_PRINT(i << " " << &h[i]);
484 FOLLY_ALWAYS_INLINE hazptr_array<M>::~hazptr_array() {
488 auto h = reinterpret_cast<hazptr_holder*>(&raw_);
490 auto ptc = hazptr_tc_tls();
491 if (LIKELY(ptc != nullptr)) {
493 auto count = tc.count();
494 if ((M <= HAZPTR_TC_SIZE) && (count + M <= HAZPTR_TC_SIZE)) {
495 for (size_t i = 0; i < M; ++i) {
496 tc[count + i].hprec_ = h[i].hazptr_;
497 DEBUG_PRINT(i << " " << &h[i]);
498 new (&h[i]) hazptr_holder(nullptr);
500 i << " " << &h[i] << " " << h[i].domain_ << " " << h[i].hazptr_);
502 tc.count_ = count + M;
508 for (size_t i = 0; i < M; ++i) {
509 h[i].~hazptr_holder();
514 FOLLY_ALWAYS_INLINE hazptr_array<M>& hazptr_array<M>::operator=(
515 hazptr_array&& other) noexcept {
516 DEBUG_PRINT(this << " " << M << " " << &other);
517 auto h = reinterpret_cast<hazptr_holder*>(&raw_);
518 for (size_t i = 0; i < M; ++i) {
519 h[i] = std::move(other[i]);
520 DEBUG_PRINT(i << " " << &h[i] << " " << &other[i]);
522 empty_ = other.empty_;
528 FOLLY_ALWAYS_INLINE hazptr_holder& hazptr_array<M>::operator[](
530 auto h = reinterpret_cast<hazptr_holder*>(&raw_);
540 FOLLY_ALWAYS_INLINE hazptr_local<M>::hazptr_local() {
541 auto h = reinterpret_cast<hazptr_holder*>(&raw_);
543 auto ptc = hazptr_tc_tls();
544 if (LIKELY(ptc != nullptr)) {
546 auto count = tc.count();
553 for (size_t i = 0; i < M; ++i) {
554 auto hprec = tc[i].hprec_;
555 DCHECK(hprec != nullptr);
556 DEBUG_PRINT(i << " " << &h[i]);
557 new (&h[i]) hazptr_holder(nullptr);
558 h[i].hazptr_ = hprec;
560 i << " " << &h[i] << " " << h[i].domain_ << " " << h[i].hazptr_);
567 need_destruct_ = true;
568 for (size_t i = 0; i < M; ++i) {
569 new (&h[i]) hazptr_holder;
571 i << " " << &h[i] << " " << h[i].domain_ << " " << h[i].hazptr_);
576 FOLLY_ALWAYS_INLINE hazptr_local<M>::~hazptr_local() {
577 if (LIKELY(!need_destruct_)) {
579 auto ptc = hazptr_tc_tls();
580 DCHECK(ptc != nullptr);
588 auto h = reinterpret_cast<hazptr_holder*>(&raw_);
589 for (size_t i = 0; i < M; ++i) {
590 h[i].~hazptr_holder();
595 FOLLY_ALWAYS_INLINE hazptr_holder& hazptr_local<M>::operator[](
597 auto h = reinterpret_cast<hazptr_holder*>(&raw_);
602 ////////////////////////////////////////////////////////////////////////////////
604 // - Control of reclamation (when and by whom)
605 // - End-to-end lock-free implementation
607 /** Definition of default_hazptr_domain() */
609 FOLLY_ALWAYS_INLINE hazptr_domain& default_hazptr_domain() {
610 DEBUG_PRINT(&default_domain_);
611 return default_domain_;
614 template <typename T, typename D>
615 FOLLY_ALWAYS_INLINE void hazptr_retire(T* obj, D reclaim) {
616 default_hazptr_domain().retire(obj, std::move(reclaim));
621 FOLLY_ALWAYS_INLINE void hazptr_rec::set(const void* p) noexcept {
622 DEBUG_PRINT(this << " " << p);
623 hazptr_.store(p, std::memory_order_release);
626 inline const void* hazptr_rec::get() const noexcept {
627 auto p = hazptr_.load(std::memory_order_acquire);
628 DEBUG_PRINT(this << " " << p);
632 FOLLY_ALWAYS_INLINE void hazptr_rec::clear() noexcept {
634 hazptr_.store(nullptr, std::memory_order_release);
637 inline bool hazptr_rec::isActive() noexcept {
638 return active_.load(std::memory_order_acquire);
641 inline bool hazptr_rec::tryAcquire() noexcept {
642 bool active = isActive();
644 active_.compare_exchange_strong(
645 active, true, std::memory_order_release, std::memory_order_relaxed)) {
652 inline void hazptr_rec::release() noexcept {
654 active_.store(false, std::memory_order_release);
659 inline const void* hazptr_obj::getObjPtr() const {
666 template <typename T, typename D>
667 void hazptr_domain::retire(T* obj, D reclaim) {
668 struct hazptr_retire_node : hazptr_obj {
669 std::unique_ptr<T, D> obj_;
671 hazptr_retire_node(T* obj, D reclaim) : obj_{obj, std::move(reclaim)} {}
674 auto node = new hazptr_retire_node(obj, std::move(reclaim));
675 node->reclaim_ = [](hazptr_obj* p) {
676 delete static_cast<hazptr_retire_node*>(p);
681 inline hazptr_domain::~hazptr_domain() {
683 { /* reclaim all remaining retired objects */
685 auto retired = retired_.exchange(nullptr);
687 for (auto p = retired; p; p = next) {
690 DEBUG_PRINT(this << " " << p << " " << p->reclaim_);
693 retired = retired_.exchange(nullptr);
696 /* Leak the data for the default domain to avoid destruction order
697 * issues with thread caches.
699 if (this != &default_hazptr_domain()) {
700 /* free all hazptr_rec-s */
702 for (auto p = hazptrs_.load(std::memory_order_acquire); p; p = next) {
704 DCHECK(!p->isActive());
705 mr_->deallocate(static_cast<void*>(p), sizeof(hazptr_rec));
710 inline hazptr_rec* hazptr_domain::hazptrAcquire() {
713 for (p = hazptrs_.load(std::memory_order_acquire); p; p = next) {
715 if (p->tryAcquire()) {
719 p = static_cast<hazptr_rec*>(mr_->allocate(sizeof(hazptr_rec)));
720 DEBUG_PRINT(this << " " << p << " " << sizeof(hazptr_rec));
724 p->active_.store(true, std::memory_order_relaxed);
725 p->next_ = hazptrs_.load(std::memory_order_acquire);
726 while (!hazptrs_.compare_exchange_weak(
727 p->next_, p, std::memory_order_release, std::memory_order_acquire)) {
730 auto hcount = hcount_.fetch_add(1);
731 DEBUG_PRINT(this << " " << p << " " << sizeof(hazptr_rec) << " " << hcount);
735 inline void hazptr_domain::hazptrRelease(hazptr_rec* p) noexcept {
736 DEBUG_PRINT(this << " " << p);
741 hazptr_domain::pushRetired(hazptr_obj* head, hazptr_obj* tail, int count) {
742 /*** Full fence ***/ hazptr_mb::light();
743 tail->next_ = retired_.load(std::memory_order_acquire);
744 while (!retired_.compare_exchange_weak(
747 std::memory_order_release,
748 std::memory_order_acquire)) {
750 return rcount_.fetch_add(count) + count;
753 inline bool hazptr_domain::reachedThreshold(int rcount) {
755 rcount >= HAZPTR_SCAN_THRESHOLD &&
756 rcount >= HAZPTR_SCAN_MULT * hcount_.load(std::memory_order_acquire));
759 inline void hazptr_domain::objRetire(hazptr_obj* p) {
760 auto rcount = pushRetired(p, p, 1);
761 if (reachedThreshold(rcount)) {
766 inline void hazptr_domain::tryBulkReclaim() {
769 auto hcount = hcount_.load(std::memory_order_acquire);
770 auto rcount = rcount_.load(std::memory_order_acquire);
771 if (rcount < HAZPTR_SCAN_THRESHOLD || rcount < HAZPTR_SCAN_MULT * hcount) {
774 if (rcount_.compare_exchange_weak(
775 rcount, 0, std::memory_order_release, std::memory_order_relaxed)) {
782 inline void hazptr_domain::bulkReclaim() {
784 /*** Full fence ***/ hazptr_mb::heavy();
785 auto p = retired_.exchange(nullptr, std::memory_order_acquire);
786 auto h = hazptrs_.load(std::memory_order_acquire);
787 std::unordered_set<const void*> hs; // TODO lock-free alternative
788 for (; h; h = h->next_) {
792 hazptr_obj* retired = nullptr;
793 hazptr_obj* tail = nullptr;
795 for (; p; p = next) {
798 if (hs.count(p->getObjPtr()) == 0) {
799 DEBUG_PRINT(this << " " << p << " " << p->reclaim_);
804 if (tail == nullptr) {
811 pushRetired(retired, tail, rcount);
825 std::atomic<uint64_t> light_{0};
826 std::atomic<uint64_t> heavy_{0};
827 std::atomic<uint64_t> seq_cst_{0};
830 extern hazptr_stats hazptr_stats_;
832 inline hazptr_stats::~hazptr_stats() {
833 DEBUG_PRINT(this << " light " << light_.load());
834 DEBUG_PRINT(this << " heavy " << heavy_.load());
835 DEBUG_PRINT(this << " seq_cst " << seq_cst_.load());
838 FOLLY_ALWAYS_INLINE void hazptr_stats::light() {
840 /* atomic */ ++light_;
844 inline void hazptr_stats::heavy() {
846 /* atomic */ ++heavy_;
850 inline void hazptr_stats::seq_cst() {
852 /* atomic */ ++seq_cst_;
858 FOLLY_ALWAYS_INLINE void hazptr_mb::light() {
861 folly::asymmetricLightBarrier();
862 INC_HAZPTR_STATS(light);
864 atomic_thread_fence(std::memory_order_seq_cst);
865 INC_HAZPTR_STATS(seq_cst);
869 inline void hazptr_mb::heavy() {
872 folly::asymmetricHeavyBarrier(AMBFlags::EXPEDITED);
873 INC_HAZPTR_STATS(heavy);
875 atomic_thread_fence(std::memory_order_seq_cst);
876 INC_HAZPTR_STATS(seq_cst);
885 * hazptr_tc structures
888 /** hazptr_tc_entry */
890 FOLLY_ALWAYS_INLINE void hazptr_tc_entry::fill(hazptr_rec* hprec) {
892 DEBUG_PRINT(this << " " << hprec);
895 FOLLY_ALWAYS_INLINE hazptr_rec* hazptr_tc_entry::get() {
897 DEBUG_PRINT(this << " " << hprec);
901 inline void hazptr_tc_entry::evict() {
904 DEBUG_PRINT(this << " " << hprec);
909 FOLLY_ALWAYS_INLINE hazptr_tc_entry& hazptr_tc::operator[](size_t i) {
910 DCHECK(i <= HAZPTR_TC_SIZE);
914 FOLLY_ALWAYS_INLINE hazptr_rec* hazptr_tc::get() {
915 if (LIKELY(count_ != 0)) {
916 auto hprec = entry_[--count_].get();
917 DEBUG_PRINT(this << " " << hprec);
920 DEBUG_PRINT(this << " nullptr");
924 FOLLY_ALWAYS_INLINE bool hazptr_tc::put(hazptr_rec* hprec) {
925 if (LIKELY(count_ < HAZPTR_TC_SIZE)) {
926 entry_[count_++].fill(hprec);
927 DEBUG_PRINT(this << " " << count_ - 1);
933 FOLLY_ALWAYS_INLINE size_t hazptr_tc::count() {
937 /** hazptr_tc free functions */
939 FOLLY_ALWAYS_INLINE hazptr_tc* hazptr_tc_tls() {
940 DEBUG_PRINT(tls_state_);
941 if (LIKELY(tls_state_ == TLS_ALIVE)) {
942 DEBUG_PRINT(tls_state_);
943 return &tls_tc_data_;
944 } else if (tls_state_ == TLS_UNINITIALIZED) {
946 return &tls_tc_data_;
951 inline void hazptr_tc_init() {
953 auto& tc = tls_tc_data_;
961 inline void hazptr_tc_shutdown() {
962 auto& tc = tls_tc_data_;
964 for (size_t i = 0; i < tc.count_; ++i) {
965 tc.entry_[i].evict();
969 FOLLY_ALWAYS_INLINE hazptr_rec* hazptr_tc_try_get() {
970 DEBUG_PRINT(TLS_UNINITIALIZED << TLS_ALIVE << TLS_DESTROYED);
971 DEBUG_PRINT(tls_state_);
972 if (LIKELY(tls_state_ == TLS_ALIVE)) {
973 DEBUG_PRINT(tls_state_);
974 return tls_tc_data_.get();
975 } else if (tls_state_ == TLS_UNINITIALIZED) {
977 return tls_tc_data_.get();
982 FOLLY_ALWAYS_INLINE bool hazptr_tc_try_put(hazptr_rec* hprec) {
983 DEBUG_PRINT(tls_state_);
984 if (LIKELY(tls_state_ == TLS_ALIVE)) {
985 DEBUG_PRINT(tls_state_);
986 return tls_tc_data_.put(hprec);
995 inline void hazptr_priv::push(hazptr_obj* obj) {
996 auto& domain = default_hazptr_domain();
997 obj->next_ = nullptr;
1002 domain.objRetire(obj);
1008 if (++rcount_ >= HAZPTR_PRIV_THRESHOLD) {
1013 inline void hazptr_priv::pushAllToDomain() {
1014 auto& domain = default_hazptr_domain();
1015 domain.pushRetired(head_, tail_, rcount_);
1019 domain.tryBulkReclaim();
1022 inline void hazptr_priv_init() {
1023 auto& priv = tls_priv_data_;
1025 priv.head_ = nullptr;
1026 priv.tail_ = nullptr;
1028 priv.active_ = true;
1031 inline void hazptr_priv_shutdown() {
1032 auto& priv = tls_priv_data_;
1034 DCHECK(priv.active_);
1035 priv.active_ = false;
1037 priv.pushAllToDomain();
1041 inline bool hazptr_priv_try_retire(hazptr_obj* obj) {
1042 DEBUG_PRINT(tls_state_);
1043 if (tls_state_ == TLS_ALIVE) {
1044 DEBUG_PRINT(tls_state_);
1045 tls_priv_data_.push(obj);
1047 } else if (tls_state_ == TLS_UNINITIALIZED) {
1048 DEBUG_PRINT(tls_state_);
1050 tls_priv_data_.push(obj);
1056 /** hazptr_tls_life */
1058 inline void tls_life_odr_use() {
1059 DEBUG_PRINT(tls_state_);
1060 CHECK(tls_state_ == TLS_UNINITIALIZED);
1061 auto volatile tlsOdrUse = &tls_life_;
1062 CHECK(tlsOdrUse != nullptr);
1063 DEBUG_PRINT(tlsOdrUse);
1066 inline hazptr_tls_life::hazptr_tls_life() {
1068 CHECK(tls_state_ == TLS_UNINITIALIZED);
1071 tls_state_ = TLS_ALIVE;
1074 inline hazptr_tls_life::~hazptr_tls_life() {
1076 CHECK(tls_state_ == TLS_ALIVE);
1077 hazptr_tc_shutdown();
1078 hazptr_priv_shutdown();
1079 tls_state_ = TLS_DESTROYED;
1082 /** hazptr_obj_batch */
1083 /* Only for default domain. Supports only hazptr_obj_base_refcounted
1084 * and a thread-safe access only, for now. */
1086 class hazptr_obj_batch {
1087 static constexpr size_t DefaultThreshold = 20;
1088 hazptr_obj* head_{nullptr};
1089 hazptr_obj* tail_{nullptr};
1091 size_t threshold_{DefaultThreshold};
1094 hazptr_obj_batch() {}
1095 hazptr_obj_batch(hazptr_obj* head, hazptr_obj* tail, size_t rcount)
1096 : head_(head), tail_(tail), rcount_(rcount) {}
1098 ~hazptr_obj_batch() {
1102 /* Prepare a hazptr_obj_base_refcounted for retirement but don't
1103 push it the domain yet. Return true if the batch is ready. */
1104 template <typename T, typename D = std::default_delete<T>>
1105 hazptr_obj_batch prep_retire_refcounted(
1106 hazptr_obj_base_refcounted<T, D>* obj,
1108 obj->preRetire(deleter);
1111 if (tail_ == nullptr) {
1114 if (++rcount_ < threshold_) {
1115 return hazptr_obj_batch();
1119 auto rcount = rcount_;
1121 return hazptr_obj_batch(head, tail, rcount);
1126 return rcount_ == 0;
1131 auto& domain = default_hazptr_domain();
1132 domain.pushRetired(head_, tail_, rcount_);
1133 domain.tryBulkReclaim();
1138 void set_threshold(size_t thresh) {
1139 threshold_ = thresh;
1150 } // namespace hazptr
1151 } // namespace folly