3 #ifndef CDSLIB_INTRUSIVE_FELDMAN_HASHSET_RCU_H
4 #define CDSLIB_INTRUSIVE_FELDMAN_HASHSET_RCU_H
6 #include <functional> // std::ref
7 #include <iterator> // std::iterator_traits
10 #include <cds/intrusive/details/feldman_hashset_base.h>
11 #include <cds/details/allocator.h>
12 #include <cds/urcu/details/check_deadlock.h>
13 #include <cds/urcu/exempt_ptr.h>
14 #include <cds/intrusive/details/raw_ptr_disposer.h>
17 namespace cds { namespace intrusive {
18 /// Intrusive hash set based on multi-level array, \ref cds_urcu_desc "RCU" specialization
19 /** @ingroup cds_intrusive_map
20 @anchor cds_intrusive_FeldmanHashSet_rcu
23 - [2013] Steven Feldman, Pierre LaBorde, Damian Dechev "Concurrent Multi-level Arrays:
24 Wait-free Extensible Hash Maps"
26 See algorithm short description @ref cds_intrusive_FeldmanHashSet_hp "here"
28 @note Two important things you should keep in mind when you're using \p %FeldmanHashSet:
29 - all keys must be fixed-size. It means that you cannot use \p std::string as a key for \p %FeldmanHashSet.
30 Instead, for the strings you should use well-known hashing algorithms like <a href="https://en.wikipedia.org/wiki/Secure_Hash_Algorithm">SHA1, SHA2</a>,
31 <a href="https://en.wikipedia.org/wiki/MurmurHash">MurmurHash</a>, <a href="https://en.wikipedia.org/wiki/CityHash">CityHash</a>
32 or its successor <a href="https://code.google.com/p/farmhash/">FarmHash</a> and so on, which
33 converts variable-length strings to fixed-length bit-strings, and use that hash as a key in \p %FeldmanHashSet.
34 - \p %FeldmanHashSet uses a perfect hashing. It means that if two different keys, for example, of type \p std::string,
35 have identical hash then you cannot insert both that keys in the set. \p %FeldmanHashSet does not maintain the key,
36 it maintains its fixed-size hash value.
39 - \p RCU - one of \ref cds_urcu_gc "RCU type"
40 - \p T - a value type to be stored in the set
41 - \p Traits - type traits, the structure based on \p feldman_hashset::traits or result of \p feldman_hashset::make_traits metafunction.
42 \p Traits is the mandatory argument because it has one mandatory type - an @ref feldman_hashset::traits::hash_accessor "accessor"
43 to hash value of \p T. The set algorithm does not calculate that hash value.
45 @note Before including <tt><cds/intrusive/feldman_hashset_rcu.h></tt> you should include appropriate RCU header file,
46 see \ref cds_urcu_gc "RCU type" for list of existing RCU class and corresponding header files.
48 The set supports @ref cds_intrusive_FeldmanHashSet_rcu_iterators "bidirectional thread-safe iterators"
50 with some restrictions.
55 #ifdef CDS_DOXYGEN_INVOKED
56 class Traits = feldman_hashset::traits
61 class FeldmanHashSet< cds::urcu::gc< RCU >, T, Traits >: protected feldman_hashset::multilevel_array<T, Traits>
64 typedef feldman_hashset::multilevel_array<T, Traits> base_class;
68 typedef cds::urcu::gc< RCU > gc; ///< RCU garbage collector
69 typedef T value_type; ///< type of value stored in the set
70 typedef Traits traits; ///< Traits template parameter
72 typedef typename traits::hash_accessor hash_accessor; ///< Hash accessor functor
73 typedef typename base_class::hash_type hash_type; ///< Hash type deduced from \p hash_accessor return type
74 typedef typename traits::disposer disposer; ///< data node disposer
75 typedef typename base_class::hash_comparator hash_comparator; ///< hash compare functor based on \p traits::compare and \p traits::less options
77 typedef typename traits::item_counter item_counter; ///< Item counter type
78 typedef typename traits::node_allocator node_allocator; ///< Array node allocator
79 typedef typename traits::memory_model memory_model; ///< Memory model
80 typedef typename traits::back_off back_off; ///< Backoff strategy
81 typedef typename traits::stat stat; ///< Internal statistics type
82 typedef typename traits::rcu_check_deadlock rcu_check_deadlock; ///< Deadlock checking policy
83 typedef typename gc::scoped_lock rcu_lock; ///< RCU scoped lock
84 static CDS_CONSTEXPR const bool c_bExtractLockExternal = false; ///< Group of \p extract_xxx functions does not require external locking
86 using exempt_ptr = cds::urcu::exempt_ptr< gc, value_type, value_type, disposer, void >; ///< pointer to extracted node
90 typedef typename base_class::node_ptr node_ptr;
91 typedef typename base_class::atomic_node_ptr atomic_node_ptr;
92 typedef typename base_class::array_node array_node;
93 typedef typename base_class::traverse_data traverse_data;
95 using base_class::to_array;
96 using base_class::to_node;
97 using base_class::stats;
98 using base_class::head;
99 using base_class::metrics;
101 typedef cds::urcu::details::check_deadlock_policy< gc, rcu_check_deadlock> check_deadlock_policy;
106 item_counter m_ItemCounter; ///< Item counter
110 /// Creates empty set
112 @param head_bits: 2<sup>head_bits</sup> specifies the size of head array, minimum is 4.
113 @param array_bits: 2<sup>array_bits</sup> specifies the size of array node, minimum is 2.
115 Equation for \p head_bits and \p array_bits:
117 sizeof(hash_type) * 8 == head_bits + N * array_bits
119 where \p N is multi-level array depth.
121 FeldmanHashSet(size_t head_bits = 8, size_t array_bits = 4)
122 : base_class(head_bits, array_bits)
125 /// Destructs the set and frees all data
133 The function inserts \p val in the set if it does not contain
134 an item with that hash.
136 Returns \p true if \p val is placed into the set, \p false otherwise.
138 The function locks RCU internally.
140 bool insert( value_type& val )
142 return insert( val, [](value_type&) {} );
147 This function is intended for derived non-intrusive containers.
149 The function allows to split creating of new item into two part:
150 - create item with key only
151 - insert new item into the set
152 - if inserting is success, calls \p f functor to initialize \p val.
154 The functor signature is:
156 void func( value_type& val );
158 where \p val is the item inserted.
160 The user-defined functor is called only if the inserting is success.
162 The function locks RCU internally.
163 @warning See \ref cds_intrusive_item_creating "insert item troubleshooting".
165 template <typename Func>
166 bool insert( value_type& val, Func f )
168 hash_type const& hash = hash_accessor()( val );
169 traverse_data pos( hash, *this );
175 node_ptr slot = base_class::traverse( pos );
176 assert(slot.bits() == 0);
178 if ( pos.pArr->nodes[pos.nSlot].load(memory_model::memory_order_acquire) == slot) {
180 if ( cmp( hash, hash_accessor()(*slot.ptr())) == 0 ) {
181 // the item with that hash value already exists
182 stats().onInsertFailed();
186 // the slot must be expanded
187 base_class::expand_slot( pos, slot );
190 // the slot is empty, try to insert data node
192 if ( pos.pArr->nodes[pos.nSlot].compare_exchange_strong(pNull, node_ptr(&val), memory_model::memory_order_release, atomics::memory_order_relaxed))
194 // the new data node has been inserted
197 stats().onInsertSuccess();
198 stats().height( pos.nHeight );
202 // insert failed - slot has been changed by another thread
204 stats().onInsertRetry();
208 stats().onSlotChanged();
214 Performs inserting or updating the item with hash value equal to \p val.
215 - If hash value is found then existing item is replaced with \p val, old item is disposed
216 with \p Traits::disposer. Note that the disposer is called by \p GC asynchronously.
217 The function returns <tt> std::pair<true, false> </tt>
218 - If hash value is not found and \p bInsert is \p true then \p val is inserted,
219 the function returns <tt> std::pair<true, true> </tt>
220 - If hash value is not found and \p bInsert is \p false then the set is unchanged,
221 the function returns <tt> std::pair<false, false> </tt>
223 Returns <tt> std::pair<bool, bool> </tt> where \p first is \p true if operation is successfull
224 (i.e. the item has been inserted or updated),
225 \p second is \p true if new item has been added or \p false if the set contains that hash.
227 The function locks RCU internally.
229 std::pair<bool, bool> update( value_type& val, bool bInsert = true )
231 return do_update(val, [](value_type&, value_type *) {}, bInsert );
234 /// Unlinks the item \p val from the set
236 The function searches the item \p val in the set and unlink it
237 if it is found and its address is equal to <tt>&val</tt>.
239 The function returns \p true if success and \p false otherwise.
241 RCU should not be locked. The function locks RCU internally.
243 bool unlink( value_type const& val )
245 check_deadlock_policy::check();
247 auto pred = [&val](value_type const& item) -> bool { return &item == &val; };
251 p = do_erase( hash_accessor()( val ), std::ref( pred ));
254 gc::template retire_ptr<disposer>( p );
260 /// Deletes the item from the set
262 The function searches \p hash in the set,
263 unlinks the item found, and returns \p true.
264 If that item is not found the function returns \p false.
266 The \ref disposer specified in \p Traits is called by garbage collector \p GC asynchronously.
268 RCU should not be locked. The function locks RCU internally.
270 bool erase( hash_type const& hash )
272 return erase(hash, [](value_type const&) {} );
275 /// Deletes the item from the set
277 The function searches \p hash in the set,
278 call \p f functor with item found, and unlinks it from the set.
279 The \ref disposer specified in \p Traits is called
280 by garbage collector \p GC asynchronously.
282 The \p Func interface is
285 void operator()( value_type& item );
289 If \p hash is not found the function returns \p false.
291 RCU should not be locked. The function locks RCU internally.
293 template <typename Func>
294 bool erase( hash_type const& hash, Func f )
296 check_deadlock_policy::check();
302 p = do_erase( hash, []( value_type const&) -> bool { return true; } );
305 // p is guarded by HP
308 gc::template retire_ptr<disposer>(p);
314 /// Extracts the item with specified \p hash
316 The function searches \p hash in the set,
317 unlinks it from the set, and returns \ref cds::urcu::exempt_ptr "exempt_ptr" pointer to the item found.
318 If the item with key equal to \p key is not found the function returns an empty \p exempt_ptr.
320 RCU \p synchronize method can be called. RCU should NOT be locked.
321 The function does not call the disposer for the item found.
322 The disposer will be implicitly invoked when the returned object is destroyed or when
323 its \p release() member function is called.
326 typedef cds::intrusive::FeldmanHashSet< cds::urcu::gc< cds::urcu::general_buffered<> >, foo, my_traits > set_type;
330 typename set_type::exempt_ptr ep( theSet.extract( 5 ));
335 // Dispose returned item.
340 exempt_ptr extract( hash_type const& hash )
342 check_deadlock_policy::check();
347 p = do_erase( hash, []( value_type const&) -> bool {return true;} );
349 return exempt_ptr( p );
352 /// Finds an item by it's \p hash
354 The function searches the item by \p hash and calls the functor \p f for item found.
355 The interface of \p Func functor is:
358 void operator()( value_type& item );
361 where \p item is the item found.
363 The functor may change non-key fields of \p item. Note that the functor is only guarantee
364 that \p item cannot be disposed during the functor is executing.
365 The functor does not serialize simultaneous access to the set's \p item. If such access is
366 possible you must provide your own synchronization schema on item level to prevent unsafe item modifications.
368 The function returns \p true if \p hash is found, \p false otherwise.
370 The function applies RCU lock internally.
372 template <typename Func>
373 bool find( hash_type const& hash, Func f )
377 value_type * p = search( hash );
385 /// Checks whether the set contains \p hash
387 The function searches the item by its \p hash
388 and returns \p true if it is found, or \p false otherwise.
390 The function applies RCU lock internally.
392 bool contains( hash_type const& hash )
394 return find( hash, [](value_type&) {} );
397 /// Finds an item by it's \p hash and returns the item found
399 The function searches the item by its \p hash
400 and returns the pointer to the item found.
401 If \p hash is not found the function returns \p nullptr.
403 RCU should be locked before the function invocation.
404 Returned pointer is valid only while RCU is locked.
408 typedef cds::intrusive::FeldmanHashSet< your_template_params > my_set;
415 foo * p = theSet.get( 5 );
423 value_type * get( hash_type const& hash )
425 assert( gc::is_locked());
426 return search( hash );
429 /// Clears the set (non-atomic)
431 The function unlink all data node from the set.
432 The function is not atomic but is thread-safe.
433 After \p %clear() the set may not be empty because another threads may insert items.
435 For each item the \p disposer is called after unlinking.
439 clear_array( head(), head_size());
442 /// Checks if the set is empty
444 Emptiness is checked by item counting: if item count is zero then the set is empty.
445 Thus, the correct item counting feature is an important part of the set implementation.
452 /// Returns item count in the set
455 return m_ItemCounter;
458 /// Returns const reference to internal statistics
459 stat const& statistics() const
464 /// Returns the size of head node
465 using base_class::head_size;
467 /// Returns the size of the array node
468 using base_class::array_node_size;
470 /// Collects tree level statistics into \p stat
471 /** @copydetails cds::intrusive::FeldmanHashSet::get_level_statistics
473 void get_level_statistics(std::vector<feldman_hashset::level_statistics>& stat) const
475 base_class::get_level_statistics(stat);
482 friend class FeldmanHashSet;
485 array_node * m_pNode; ///< current array node
486 size_t m_idx; ///< current position in m_pNode
487 value_type * m_pValue; ///< current value
488 FeldmanHashSet const* m_set; ///< Hash set
491 iterator_base() CDS_NOEXCEPT
498 iterator_base(iterator_base const& rhs) CDS_NOEXCEPT
499 : m_pNode(rhs.m_pNode)
501 , m_pValue(rhs.m_pValue)
505 iterator_base& operator=(iterator_base const& rhs) CDS_NOEXCEPT
507 m_pNode = rhs.m_pNode;
509 m_pValue = rhs.m_pValue;
514 iterator_base& operator++()
520 iterator_base& operator--()
526 bool operator ==(iterator_base const& rhs) const CDS_NOEXCEPT
528 return m_pNode == rhs.m_pNode && m_idx == rhs.m_idx && m_set == rhs.m_set;
531 bool operator !=(iterator_base const& rhs) const CDS_NOEXCEPT
533 return !(*this == rhs);
537 iterator_base(FeldmanHashSet const& set, array_node * pNode, size_t idx, bool)
544 iterator_base(FeldmanHashSet const& set, array_node * pNode, size_t idx)
553 value_type * pointer() const CDS_NOEXCEPT
555 assert(gc::is_locked());
561 assert( gc::is_locked());
562 assert(m_set != nullptr);
563 assert(m_pNode != nullptr);
565 size_t const arrayNodeSize = m_set->array_node_size();
566 size_t const headSize = m_set->head_size();
567 array_node * pNode = m_pNode;
568 size_t idx = m_idx + 1;
569 size_t nodeSize = m_pNode->pParent ? arrayNodeSize : headSize;
572 if (idx < nodeSize) {
573 node_ptr slot = pNode->nodes[idx].load(memory_model::memory_order_acquire);
574 if (slot.bits() == base_class::flag_array_node ) {
575 // array node, go down the tree
576 assert(slot.ptr() != nullptr);
577 pNode = to_array(slot.ptr());
579 nodeSize = arrayNodeSize;
581 else if (slot.bits() == base_class::flag_array_converting ) {
582 // the slot is converting to array node right now - skip the node
590 m_pValue = slot.ptr();
598 if (pNode->pParent) {
599 idx = pNode->idxParent + 1;
600 pNode = pNode->pParent;
601 nodeSize = pNode->pParent ? arrayNodeSize : headSize;
605 assert(pNode == m_set->head());
606 assert(idx == headSize);
618 assert(gc::is_locked());
619 assert(m_set != nullptr);
620 assert(m_pNode != nullptr);
622 size_t const arrayNodeSize = m_set->array_node_size();
623 size_t const headSize = m_set->head_size();
624 size_t const endIdx = size_t(0) - 1;
626 array_node * pNode = m_pNode;
627 size_t idx = m_idx - 1;
628 size_t nodeSize = m_pNode->pParent ? arrayNodeSize : headSize;
632 node_ptr slot = pNode->nodes[idx].load(memory_model::memory_order_acquire);
633 if (slot.bits() == base_class::flag_array_node ) {
634 // array node, go down the tree
635 assert(slot.ptr() != nullptr);
636 pNode = to_array(slot.ptr());
637 nodeSize = arrayNodeSize;
640 else if (slot.bits() == base_class::flag_array_converting ) {
641 // the slot is converting to array node right now - skip the node
649 m_pValue = slot.ptr();
657 if (pNode->pParent) {
658 idx = pNode->idxParent - 1;
659 pNode = pNode->pParent;
660 nodeSize = pNode->pParent ? arrayNodeSize : headSize;
664 assert(pNode == m_set->head());
665 assert(idx == endIdx);
676 template <class Iterator>
677 Iterator init_begin() const
679 return Iterator(*this, head(), size_t(0) - 1);
682 template <class Iterator>
683 Iterator init_end() const
685 return Iterator(*this, head(), head_size(), false);
688 template <class Iterator>
689 Iterator init_rbegin() const
691 return Iterator(*this, head(), head_size());
694 template <class Iterator>
695 Iterator init_rend() const
697 return Iterator(*this, head(), size_t(0) - 1, false);
700 /// Bidirectional iterator class
701 template <bool IsConst>
702 class bidirectional_iterator : protected iterator_base
704 friend class FeldmanHashSet;
707 static CDS_CONSTEXPR bool const c_bConstantIterator = IsConst;
710 typedef typename std::conditional< IsConst, value_type const*, value_type*>::type value_ptr; ///< Value pointer
711 typedef typename std::conditional< IsConst, value_type const&, value_type&>::type value_ref; ///< Value reference
714 bidirectional_iterator() CDS_NOEXCEPT
717 bidirectional_iterator(bidirectional_iterator const& rhs) CDS_NOEXCEPT
721 bidirectional_iterator& operator=(bidirectional_iterator const& rhs) CDS_NOEXCEPT
723 iterator_base::operator=(rhs);
727 bidirectional_iterator& operator++()
729 iterator_base::operator++();
733 bidirectional_iterator& operator--()
735 iterator_base::operator--();
739 value_ptr operator ->() const CDS_NOEXCEPT
741 return iterator_base::pointer();
744 value_ref operator *() const CDS_NOEXCEPT
746 value_ptr p = iterator_base::pointer();
751 template <bool IsConst2>
752 bool operator ==(bidirectional_iterator<IsConst2> const& rhs) const CDS_NOEXCEPT
754 return iterator_base::operator==(rhs);
757 template <bool IsConst2>
758 bool operator !=(bidirectional_iterator<IsConst2> const& rhs) const CDS_NOEXCEPT
760 return !(*this == rhs);
764 bidirectional_iterator(FeldmanHashSet& set, array_node * pNode, size_t idx, bool)
765 : iterator_base(set, pNode, idx, false)
768 bidirectional_iterator(FeldmanHashSet& set, array_node * pNode, size_t idx)
769 : iterator_base(set, pNode, idx)
773 /// Reverse bidirectional iterator
774 template <bool IsConst>
775 class reverse_bidirectional_iterator : public iterator_base
777 friend class FeldmanHashSet;
780 typedef typename std::conditional< IsConst, value_type const*, value_type*>::type value_ptr; ///< Value pointer
781 typedef typename std::conditional< IsConst, value_type const&, value_type&>::type value_ref; ///< Value reference
784 reverse_bidirectional_iterator() CDS_NOEXCEPT
788 reverse_bidirectional_iterator(reverse_bidirectional_iterator const& rhs) CDS_NOEXCEPT
792 reverse_bidirectional_iterator& operator=(reverse_bidirectional_iterator const& rhs) CDS_NOEXCEPT
794 iterator_base::operator=(rhs);
798 reverse_bidirectional_iterator& operator++()
800 iterator_base::operator--();
804 reverse_bidirectional_iterator& operator--()
806 iterator_base::operator++();
810 value_ptr operator ->() const CDS_NOEXCEPT
812 return iterator_base::pointer();
815 value_ref operator *() const CDS_NOEXCEPT
817 value_ptr p = iterator_base::pointer();
822 template <bool IsConst2>
823 bool operator ==(reverse_bidirectional_iterator<IsConst2> const& rhs) const
825 return iterator_base::operator==(rhs);
828 template <bool IsConst2>
829 bool operator !=(reverse_bidirectional_iterator<IsConst2> const& rhs)
831 return !(*this == rhs);
835 reverse_bidirectional_iterator(FeldmanHashSet& set, array_node * pNode, size_t idx, bool)
836 : iterator_base(set, pNode, idx, false)
839 reverse_bidirectional_iterator(FeldmanHashSet& set, array_node * pNode, size_t idx)
840 : iterator_base(set, pNode, idx, false)
842 iterator_base::backward();
848 #ifdef CDS_DOXYGEN_INVOKED
849 typedef implementation_defined iterator; ///< @ref cds_intrusive_FeldmanHashSet_rcu_iterators "bidirectional iterator" type
850 typedef implementation_defined const_iterator; ///< @ref cds_intrusive_FeldmanHashSet_rcu_iterators "bidirectional const iterator" type
851 typedef implementation_defined reverse_iterator; ///< @ref cds_intrusive_FeldmanHashSet_rcu_iterators "bidirectional reverse iterator" type
852 typedef implementation_defined const_reverse_iterator; ///< @ref cds_intrusive_FeldmanHashSet_rcu_iterators "bidirectional reverse const iterator" type
854 typedef bidirectional_iterator<false> iterator;
855 typedef bidirectional_iterator<true> const_iterator;
856 typedef reverse_bidirectional_iterator<false> reverse_iterator;
857 typedef reverse_bidirectional_iterator<true> const_reverse_iterator;
860 ///@name Thread-safe iterators
861 /** @anchor cds_intrusive_FeldmanHashSet_rcu_iterators
862 The set supports thread-safe iterators: you may iterate over the set in multi-threaded environment
863 under explicit RCU lock.
864 RCU lock requirement means that inserting or searching is allowed but you must not erase the items from the set
865 since erasing under RCU lock can lead to a deadlock. However, another thread can call \p erase() safely
866 while your thread is iterating.
868 A typical example is:
873 uint32_t payload; // only for example
875 struct set_traits: cds::intrusive::feldman_hashset::traits
877 struct hash_accessor {
878 uint32_t operator()( foo const& src ) const
885 typedef cds::urcu::gc< cds::urcu::general_buffered<>> rcu;
886 typedef cds::intrusive::FeldmanHashSet< rcu, foo, set_traits > set_type;
892 // iterate over the set
895 typename set_type::rcu_lock l; // scoped RCU lock
898 for ( auto i = s.begin(); i != s.end(); ++i ) {
899 // deal with i. Remember, erasing is prohibited here!
902 } // at this point RCU lock is released
905 Each iterator object supports the common interface:
906 - dereference operators:
908 value_type [const] * operator ->() noexcept
909 value_type [const] & operator *() noexcept
911 - pre-increment and pre-decrement. Post-operators is not supported
912 - equality operators <tt>==</tt> and <tt>!=</tt>.
913 Iterators are equal iff they point to the same cell of the same array node.
914 Note that for two iterators \p it1 and \p it2 the condition <tt> it1 == it2 </tt>
915 does not entail <tt> &(*it1) == &(*it2) </tt>: welcome to concurrent containers
917 @note It is possible the item can be iterated more that once, for example, if an iterator points to the item
918 in an array node that is being splitted.
922 /// Returns an iterator to the beginning of the set
925 return iterator(*this, head(), size_t(0) - 1);
928 /// Returns an const iterator to the beginning of the set
929 const_iterator begin() const
931 return const_iterator(*this, head(), size_t(0) - 1);
934 /// Returns an const iterator to the beginning of the set
935 const_iterator cbegin()
937 return const_iterator(*this, head(), size_t(0) - 1);
940 /// Returns an iterator to the element following the last element of the set. This element acts as a placeholder; attempting to access it results in undefined behavior.
943 return iterator(*this, head(), head_size(), false);
946 /// Returns a const iterator to the element following the last element of the set. This element acts as a placeholder; attempting to access it results in undefined behavior.
947 const_iterator end() const
949 return const_iterator(*this, head(), head_size(), false);
952 /// Returns a const iterator to the element following the last element of the set. This element acts as a placeholder; attempting to access it results in undefined behavior.
953 const_iterator cend()
955 return const_iterator(*this, head(), head_size(), false);
958 /// Returns a reverse iterator to the first element of the reversed set
959 reverse_iterator rbegin()
961 return reverse_iterator(*this, head(), head_size());
964 /// Returns a const reverse iterator to the first element of the reversed set
965 const_reverse_iterator rbegin() const
967 return const_reverse_iterator(*this, head(), head_size());
970 /// Returns a const reverse iterator to the first element of the reversed set
971 const_reverse_iterator crbegin()
973 return const_reverse_iterator(*this, head(), head_size());
976 /// Returns a reverse iterator to the element following the last element of the reversed set
978 It corresponds to the element preceding the first element of the non-reversed container.
979 This element acts as a placeholder, attempting to access it results in undefined behavior.
981 reverse_iterator rend()
983 return reverse_iterator(*this, head(), size_t(0) - 1, false);
986 /// Returns a const reverse iterator to the element following the last element of the reversed set
988 It corresponds to the element preceding the first element of the non-reversed container.
989 This element acts as a placeholder, attempting to access it results in undefined behavior.
991 const_reverse_iterator rend() const
993 return const_reverse_iterator(*this, head(), size_t(0) - 1, false);
996 /// Returns a const reverse iterator to the element following the last element of the reversed set
998 It corresponds to the element preceding the first element of the non-reversed container.
999 This element acts as a placeholder, attempting to access it results in undefined behavior.
1001 const_reverse_iterator crend()
1003 return const_reverse_iterator(*this, head(), size_t(0) - 1, false);
1009 template <typename Func>
1010 std::pair<bool, bool> do_update(value_type& val, Func f, bool bInsert = true)
1012 hash_type const& hash = hash_accessor()(val);
1013 traverse_data pos( hash, *this );
1014 hash_comparator cmp;
1020 node_ptr slot = base_class::traverse( pos );
1021 assert(slot.bits() == 0);
1024 if ( pos.pArr->nodes[pos.nSlot].load(memory_model::memory_order_acquire) == slot) {
1026 if ( cmp( hash, hash_accessor()(*slot.ptr())) == 0 ) {
1027 // the item with that hash value already exists
1028 // Replace it with val
1029 if ( slot.ptr() == &val ) {
1030 stats().onUpdateExisting();
1031 return std::make_pair(true, false);
1034 if ( pos.pArr->nodes[pos.nSlot].compare_exchange_strong(slot, node_ptr(&val), memory_model::memory_order_release, atomics::memory_order_relaxed)) {
1035 // slot can be disposed
1036 f( val, slot.ptr());
1038 stats().onUpdateExisting();
1039 goto update_existing_done;
1042 stats().onUpdateRetry();
1046 // the slot must be expanded
1047 base_class::expand_slot( pos, slot );
1050 stats().onUpdateFailed();
1051 return std::make_pair(false, false);
1056 // the slot is empty, try to insert data node
1059 if ( pos.pArr->nodes[pos.nSlot].compare_exchange_strong(pNull, node_ptr(&val), memory_model::memory_order_release, atomics::memory_order_relaxed))
1061 // the new data node has been inserted
1064 stats().onUpdateNew();
1065 stats().height( pos.nHeight );
1066 return std::make_pair(true, true);
1070 stats().onUpdateFailed();
1071 return std::make_pair(false, false);
1074 // insert failed - slot has been changed by another thread
1076 stats().onUpdateRetry();
1080 stats().onSlotChanged();
1084 // retire_ptr must be called only outside of RCU lock
1085 update_existing_done:
1087 gc::template retire_ptr<disposer>(pOld);
1088 return std::make_pair(true, false);
1091 template <typename Predicate>
1092 value_type * do_erase( hash_type const& hash, Predicate pred)
1094 assert(gc::is_locked());
1095 traverse_data pos( hash, *this );
1096 hash_comparator cmp;
1099 node_ptr slot = base_class::traverse( pos );
1100 assert( slot.bits() == 0 );
1102 if ( pos.pArr->nodes[pos.nSlot].load( memory_model::memory_order_acquire ) == slot ) {
1104 if ( cmp( hash, hash_accessor()(*slot.ptr())) == 0 && pred( *slot.ptr())) {
1105 // item found - replace it with nullptr
1106 if ( pos.pArr->nodes[pos.nSlot].compare_exchange_strong( slot, node_ptr( nullptr ), memory_model::memory_order_acquire, atomics::memory_order_relaxed )) {
1108 stats().onEraseSuccess();
1112 stats().onEraseRetry();
1115 stats().onEraseFailed();
1119 // the slot is empty
1120 stats().onEraseFailed();
1125 stats().onSlotChanged();
1129 value_type * search(hash_type const& hash )
1131 assert( gc::is_locked());
1132 traverse_data pos( hash, *this );
1133 hash_comparator cmp;
1136 node_ptr slot = base_class::traverse( pos );
1137 assert( slot.bits() == 0 );
1139 if ( pos.pArr->nodes[pos.nSlot].load( memory_model::memory_order_acquire ) != slot ) {
1140 // slot value has been changed - retry
1141 stats().onSlotChanged();
1144 else if ( slot.ptr() && cmp( hash, hash_accessor()(*slot.ptr())) == 0 ) {
1146 stats().onFindSuccess();
1149 stats().onFindFailed();
1158 void clear_array(array_node * pArrNode, size_t nSize)
1163 for (atomic_node_ptr * pArr = pArrNode->nodes, *pLast = pArr + nSize; pArr != pLast; ++pArr) {
1165 node_ptr slot = pArr->load(memory_model::memory_order_acquire);
1166 if (slot.bits() == base_class::flag_array_node ) {
1167 // array node, go down the tree
1168 assert(slot.ptr() != nullptr);
1169 clear_array(to_array(slot.ptr()), array_node_size());
1172 else if (slot.bits() == base_class::flag_array_converting ) {
1173 // the slot is converting to array node right now
1174 while ((slot = pArr->load(memory_model::memory_order_acquire)).bits() == base_class::flag_array_converting ) {
1176 stats().onSlotConverting();
1180 assert(slot.ptr() != nullptr);
1181 assert(slot.bits() == base_class::flag_array_node );
1182 clear_array(to_array(slot.ptr()), array_node_size());
1187 if (pArr->compare_exchange_strong(slot, node_ptr(), memory_model::memory_order_acquire, atomics::memory_order_relaxed)) {
1189 gc::template retire_ptr<disposer>(slot.ptr());
1191 stats().onEraseSuccess();
1202 }} // namespace cds::intrusive
1204 #endif // #ifndef CDSLIB_INTRUSIVE_FELDMAN_HASHSET_RCU_H