3 #ifndef CDSLIB_INTRUSIVE_FELDMAN_HASHSET_RCU_H
4 #define CDSLIB_INTRUSIVE_FELDMAN_HASHSET_RCU_H
6 #include <functional> // std::ref
7 #include <iterator> // std::iterator_traits
10 #include <cds/intrusive/details/feldman_hashset_base.h>
11 #include <cds/details/allocator.h>
12 #include <cds/urcu/details/check_deadlock.h>
13 #include <cds/urcu/exempt_ptr.h>
14 #include <cds/intrusive/details/raw_ptr_disposer.h>
17 namespace cds { namespace intrusive {
18 /// Intrusive hash set based on multi-level array, \ref cds_urcu_desc "RCU" specialization
19 /** @ingroup cds_intrusive_map
20 @anchor cds_intrusive_FeldmanHashSet_rcu
23 - [2013] Steven Feldman, Pierre LaBorde, Damian Dechev "Concurrent Multi-level Arrays:
24 Wait-free Extensible Hash Maps"
26 See algorithm short description @ref cds_intrusive_FeldmanHashSet_hp "here"
28 @note Two important things you should keep in mind when you're using \p %FeldmanHashSet:
29 - all keys must be fixed-size. It means that you cannot use \p std::string as a key for \p %FeldmanHashSet.
30 Instead, for the strings you should use well-known hashing algorithms like <a href="https://en.wikipedia.org/wiki/Secure_Hash_Algorithm">SHA1, SHA2</a>,
31 <a href="https://en.wikipedia.org/wiki/MurmurHash">MurmurHash</a>, <a href="https://en.wikipedia.org/wiki/CityHash">CityHash</a>
32 or its successor <a href="https://code.google.com/p/farmhash/">FarmHash</a> and so on, which
33 converts variable-length strings to fixed-length bit-strings, and use that hash as a key in \p %FeldmanHashSet.
34 - \p %FeldmanHashSet uses a perfect hashing. It means that if two different keys, for example, of type \p std::string,
35 have identical hash then you cannot insert both that keys in the set. \p %FeldmanHashSet does not maintain the key,
36 it maintains its fixed-size hash value.
39 - \p RCU - one of \ref cds_urcu_gc "RCU type"
40 - \p T - a value type to be stored in the set
41 - \p Traits - type traits, the structure based on \p feldman_hashset::traits or result of \p feldman_hashset::make_traits metafunction.
42 \p Traits is the mandatory argument because it has one mandatory type - an @ref feldman_hashset::traits::hash_accessor "accessor"
43 to hash value of \p T. The set algorithm does not calculate that hash value.
45 @note Before including <tt><cds/intrusive/feldman_hashset_rcu.h></tt> you should include appropriate RCU header file,
46 see \ref cds_urcu_gc "RCU type" for list of existing RCU class and corresponding header files.
48 The set supports @ref cds_intrusive_FeldmanHashSet_rcu_iterators "bidirectional thread-safe iterators"
49 with some restrictions.
54 #ifdef CDS_DOXYGEN_INVOKED
55 class Traits = feldman_hashset::traits
60 class FeldmanHashSet< cds::urcu::gc< RCU >, T, Traits >: protected feldman_hashset::multilevel_array<T, Traits>
62 typedef feldman_hashset::multilevel_array<T, Traits> base_class;
64 typedef cds::urcu::gc< RCU > gc; ///< RCU garbage collector
65 typedef T value_type; ///< type of value stored in the set
66 typedef Traits traits; ///< Traits template parameter
68 typedef typename traits::hash_accessor hash_accessor; ///< Hash accessor functor
69 typedef typename base_class::hash_type hash_type; ///< Hash type deduced from \p hash_accessor return type
70 typedef typename traits::disposer disposer; ///< data node disposer
71 typedef typename base_class::hash_comparator hash_comparator; ///< hash compare functor based on \p traits::compare and \p traits::less options
73 typedef typename traits::item_counter item_counter; ///< Item counter type
74 typedef typename traits::node_allocator node_allocator; ///< Array node allocator
75 typedef typename traits::memory_model memory_model; ///< Memory model
76 typedef typename traits::back_off back_off; ///< Backoff strategy
77 typedef typename traits::stat stat; ///< Internal statistics type
78 typedef typename traits::rcu_check_deadlock rcu_check_deadlock; ///< Deadlock checking policy
79 typedef typename gc::scoped_lock rcu_lock; ///< RCU scoped lock
80 static CDS_CONSTEXPR const bool c_bExtractLockExternal = false; ///< Group of \p extract_xxx functions does not require external locking
82 using exempt_ptr = cds::urcu::exempt_ptr< gc, value_type, value_type, disposer, void >; ///< pointer to extracted node
86 typedef typename base_class::node_ptr node_ptr;
87 typedef typename base_class::atomic_node_ptr atomic_node_ptr;
88 typedef typename base_class::array_node array_node;
89 typedef typename base_class::traverse_data traverse_data;
91 using base_class::to_array;
92 using base_class::to_node;
93 using base_class::stats;
94 using base_class::head;
95 using base_class::metrics;
97 typedef cds::urcu::details::check_deadlock_policy< gc, rcu_check_deadlock> check_deadlock_policy;
102 item_counter m_ItemCounter; ///< Item counter
106 /// Creates empty set
108 @param head_bits: 2<sup>head_bits</sup> specifies the size of head array, minimum is 4.
109 @param array_bits: 2<sup>array_bits</sup> specifies the size of array node, minimum is 2.
111 Equation for \p head_bits and \p array_bits:
113 sizeof(hash_type) * 8 == head_bits + N * array_bits
115 where \p N is multi-level array depth.
117 FeldmanHashSet(size_t head_bits = 8, size_t array_bits = 4)
118 : base_class(head_bits, array_bits)
121 /// Destructs the set and frees all data
129 The function inserts \p val in the set if it does not contain
130 an item with that hash.
132 Returns \p true if \p val is placed into the set, \p false otherwise.
134 The function locks RCU internally.
136 bool insert( value_type& val )
138 return insert( val, [](value_type&) {} );
143 This function is intended for derived non-intrusive containers.
145 The function allows to split creating of new item into two part:
146 - create item with key only
147 - insert new item into the set
148 - if inserting is success, calls \p f functor to initialize \p val.
150 The functor signature is:
152 void func( value_type& val );
154 where \p val is the item inserted.
156 The user-defined functor is called only if the inserting is success.
158 The function locks RCU internally.
159 @warning See \ref cds_intrusive_item_creating "insert item troubleshooting".
161 template <typename Func>
162 bool insert( value_type& val, Func f )
164 hash_type const& hash = hash_accessor()( val );
165 traverse_data pos( hash, *this );
169 node_ptr slot = base_class::traverse( pos );
170 assert(slot.bits() == 0);
173 if ( pos.pArr->nodes[pos.nSlot].load(memory_model::memory_order_acquire) == slot) {
175 if ( cmp( hash, hash_accessor()(*slot.ptr())) == 0 ) {
176 // the item with that hash value already exists
177 stats().onInsertFailed();
181 // the slot must be expanded
182 base_class::expand_slot( pos, slot );
185 // the slot is empty, try to insert data node
187 if ( pos.pArr->nodes[pos.nSlot].compare_exchange_strong(pNull, node_ptr(&val), memory_model::memory_order_release, atomics::memory_order_relaxed))
189 // the new data node has been inserted
192 stats().onInsertSuccess();
193 stats().height( pos.nHeight );
197 // insert failed - slot has been changed by another thread
199 stats().onInsertRetry();
203 stats().onSlotChanged();
209 Performs inserting or updating the item with hash value equal to \p val.
210 - If hash value is found then existing item is replaced with \p val, old item is disposed
211 with \p Traits::disposer. Note that the disposer is called by \p GC asynchronously.
212 The function returns <tt> std::pair<true, false> </tt>
213 - If hash value is not found and \p bInsert is \p true then \p val is inserted,
214 the function returns <tt> std::pair<true, true> </tt>
215 - If hash value is not found and \p bInsert is \p false then the set is unchanged,
216 the function returns <tt> std::pair<false, false> </tt>
218 Returns <tt> std::pair<bool, bool> </tt> where \p first is \p true if operation is successfull
219 (i.e. the item has been inserted or updated),
220 \p second is \p true if new item has been added or \p false if the set contains that hash.
222 The function locks RCU internally.
224 std::pair<bool, bool> update( value_type& val, bool bInsert = true )
226 return do_update(val, [](value_type&, value_type *) {}, bInsert );
229 /// Unlinks the item \p val from the set
231 The function searches the item \p val in the set and unlink it
232 if it is found and its address is equal to <tt>&val</tt>.
234 The function returns \p true if success and \p false otherwise.
236 RCU should not be locked. The function locks RCU internally.
238 bool unlink( value_type const& val )
240 check_deadlock_policy::check();
242 auto pred = [&val](value_type const& item) -> bool { return &item == &val; };
246 p = do_erase( hash_accessor()( val ), std::ref( pred ));
249 gc::template retire_ptr<disposer>( p );
255 /// Deletes the item from the set
257 The function searches \p hash in the set,
258 unlinks the item found, and returns \p true.
259 If that item is not found the function returns \p false.
261 The \ref disposer specified in \p Traits is called by garbage collector \p GC asynchronously.
263 RCU should not be locked. The function locks RCU internally.
265 bool erase( hash_type const& hash )
267 return erase(hash, [](value_type const&) {} );
270 /// Deletes the item from the set
272 The function searches \p hash in the set,
273 call \p f functor with item found, and unlinks it from the set.
274 The \ref disposer specified in \p Traits is called
275 by garbage collector \p GC asynchronously.
277 The \p Func interface is
280 void operator()( value_type& item );
284 If \p hash is not found the function returns \p false.
286 RCU should not be locked. The function locks RCU internally.
288 template <typename Func>
289 bool erase( hash_type const& hash, Func f )
291 check_deadlock_policy::check();
297 p = do_erase( hash, []( value_type const&) -> bool { return true; } );
300 // p is guarded by HP
303 gc::template retire_ptr<disposer>(p);
309 /// Extracts the item with specified \p hash
311 The function searches \p hash in the set,
312 unlinks it from the set, and returns \ref cds::urcu::exempt_ptr "exempt_ptr" pointer to the item found.
313 If the item with key equal to \p key is not found the function returns an empty \p exempt_ptr.
315 RCU \p synchronize method can be called. RCU should NOT be locked.
316 The function does not call the disposer for the item found.
317 The disposer will be implicitly invoked when the returned object is destroyed or when
318 its \p release() member function is called.
321 typedef cds::intrusive::FeldmanHashSet< cds::urcu::gc< cds::urcu::general_buffered<> >, foo, my_traits > set_type;
325 typename set_type::exempt_ptr ep( theSet.extract( 5 ));
330 // Dispose returned item.
335 exempt_ptr extract( hash_type const& hash )
337 check_deadlock_policy::check();
342 p = do_erase( hash, []( value_type const&) -> bool {return true;} );
344 return exempt_ptr( p );
347 /// Finds an item by it's \p hash
349 The function searches the item by \p hash and calls the functor \p f for item found.
350 The interface of \p Func functor is:
353 void operator()( value_type& item );
356 where \p item is the item found.
358 The functor may change non-key fields of \p item. Note that the functor is only guarantee
359 that \p item cannot be disposed during the functor is executing.
360 The functor does not serialize simultaneous access to the set's \p item. If such access is
361 possible you must provide your own synchronization schema on item level to prevent unsafe item modifications.
363 The function returns \p true if \p hash is found, \p false otherwise.
365 The function applies RCU lock internally.
367 template <typename Func>
368 bool find( hash_type const& hash, Func f )
372 value_type * p = search( hash );
380 /// Checks whether the set contains \p hash
382 The function searches the item by its \p hash
383 and returns \p true if it is found, or \p false otherwise.
385 The function applies RCU lock internally.
387 bool contains( hash_type const& hash )
389 return find( hash, [](value_type&) {} );
392 /// Finds an item by it's \p hash and returns the item found
394 The function searches the item by its \p hash
395 and returns the pointer to the item found.
396 If \p hash is not found the function returns \p nullptr.
398 RCU should be locked before the function invocation.
399 Returned pointer is valid only while RCU is locked.
403 typedef cds::intrusive::FeldmanHashSet< your_template_params > my_set;
410 foo * p = theSet.get( 5 );
418 value_type * get( hash_type const& hash )
420 assert( gc::is_locked());
421 return search( hash );
424 /// Clears the set (non-atomic)
426 The function unlink all data node from the set.
427 The function is not atomic but is thread-safe.
428 After \p %clear() the set may not be empty because another threads may insert items.
430 For each item the \p disposer is called after unlinking.
434 clear_array( head(), head_size() );
437 /// Checks if the set is empty
439 Emptiness is checked by item counting: if item count is zero then the set is empty.
440 Thus, the correct item counting feature is an important part of the set implementation.
447 /// Returns item count in the set
450 return m_ItemCounter;
453 /// Returns const reference to internal statistics
454 stat const& statistics() const
459 /// Returns the size of head node
460 using base_class::head_size;
462 /// Returns the size of the array node
463 using base_class::array_node_size;
465 /// Collects tree level statistics into \p stat
466 /** @copydetails cds::intrusive::FeldmanHashSet::get_level_statistics
468 void get_level_statistics(std::vector<feldman_hashset::level_statistics>& stat) const
470 base_class::get_level_statistics(stat);
477 friend class FeldmanHashSet;
480 array_node * m_pNode; ///< current array node
481 size_t m_idx; ///< current position in m_pNode
482 value_type * m_pValue; ///< current value
483 FeldmanHashSet const* m_set; ///< Hash set
486 iterator_base() CDS_NOEXCEPT
493 iterator_base(iterator_base const& rhs) CDS_NOEXCEPT
494 : m_pNode(rhs.m_pNode)
496 , m_pValue(rhs.m_pValue)
500 iterator_base& operator=(iterator_base const& rhs) CDS_NOEXCEPT
502 m_pNode = rhs.m_pNode;
504 m_pValue = rhs.m_pValue;
509 iterator_base& operator++()
515 iterator_base& operator--()
521 bool operator ==(iterator_base const& rhs) const CDS_NOEXCEPT
523 return m_pNode == rhs.m_pNode && m_idx == rhs.m_idx && m_set == rhs.m_set;
526 bool operator !=(iterator_base const& rhs) const CDS_NOEXCEPT
528 return !(*this == rhs);
532 iterator_base(FeldmanHashSet const& set, array_node * pNode, size_t idx, bool)
539 iterator_base(FeldmanHashSet const& set, array_node * pNode, size_t idx)
548 value_type * pointer() const CDS_NOEXCEPT
550 assert(gc::is_locked());
556 assert( gc::is_locked());
557 assert(m_set != nullptr);
558 assert(m_pNode != nullptr);
560 size_t const arrayNodeSize = m_set->array_node_size();
561 size_t const headSize = m_set->head_size();
562 array_node * pNode = m_pNode;
563 size_t idx = m_idx + 1;
564 size_t nodeSize = m_pNode->pParent ? arrayNodeSize : headSize;
567 if (idx < nodeSize) {
568 node_ptr slot = pNode->nodes[idx].load(memory_model::memory_order_acquire);
569 if (slot.bits() == base_class::flag_array_node ) {
570 // array node, go down the tree
571 assert(slot.ptr() != nullptr);
572 pNode = to_array(slot.ptr());
574 nodeSize = arrayNodeSize;
576 else if (slot.bits() == base_class::flag_array_converting ) {
577 // the slot is converting to array node right now - skip the node
585 m_pValue = slot.ptr();
593 if (pNode->pParent) {
594 idx = pNode->idxParent + 1;
595 pNode = pNode->pParent;
596 nodeSize = pNode->pParent ? arrayNodeSize : headSize;
600 assert(pNode == m_set->head());
601 assert(idx == headSize);
613 assert(gc::is_locked());
614 assert(m_set != nullptr);
615 assert(m_pNode != nullptr);
617 size_t const arrayNodeSize = m_set->array_node_size();
618 size_t const headSize = m_set->head_size();
619 size_t const endIdx = size_t(0) - 1;
621 array_node * pNode = m_pNode;
622 size_t idx = m_idx - 1;
623 size_t nodeSize = m_pNode->pParent ? arrayNodeSize : headSize;
627 node_ptr slot = pNode->nodes[idx].load(memory_model::memory_order_acquire);
628 if (slot.bits() == base_class::flag_array_node ) {
629 // array node, go down the tree
630 assert(slot.ptr() != nullptr);
631 pNode = to_array(slot.ptr());
632 nodeSize = arrayNodeSize;
635 else if (slot.bits() == base_class::flag_array_converting ) {
636 // the slot is converting to array node right now - skip the node
644 m_pValue = slot.ptr();
652 if (pNode->pParent) {
653 idx = pNode->idxParent - 1;
654 pNode = pNode->pParent;
655 nodeSize = pNode->pParent ? arrayNodeSize : headSize;
659 assert(pNode == m_set->head());
660 assert(idx == endIdx);
671 template <class Iterator>
672 Iterator init_begin() const
674 return Iterator(*this, head(), size_t(0) - 1);
677 template <class Iterator>
678 Iterator init_end() const
680 return Iterator(*this, head(), head_size(), false);
683 template <class Iterator>
684 Iterator init_rbegin() const
686 return Iterator(*this, head(), head_size());
689 template <class Iterator>
690 Iterator init_rend() const
692 return Iterator(*this, head(), size_t(0) - 1, false);
695 /// Bidirectional iterator class
696 template <bool IsConst>
697 class bidirectional_iterator : protected iterator_base
699 friend class FeldmanHashSet;
702 static CDS_CONSTEXPR bool const c_bConstantIterator = IsConst;
705 typedef typename std::conditional< IsConst, value_type const*, value_type*>::type value_ptr; ///< Value pointer
706 typedef typename std::conditional< IsConst, value_type const&, value_type&>::type value_ref; ///< Value reference
709 bidirectional_iterator() CDS_NOEXCEPT
712 bidirectional_iterator(bidirectional_iterator const& rhs) CDS_NOEXCEPT
716 bidirectional_iterator& operator=(bidirectional_iterator const& rhs) CDS_NOEXCEPT
718 iterator_base::operator=(rhs);
722 bidirectional_iterator& operator++()
724 iterator_base::operator++();
728 bidirectional_iterator& operator--()
730 iterator_base::operator--();
734 value_ptr operator ->() const CDS_NOEXCEPT
736 return iterator_base::pointer();
739 value_ref operator *() const CDS_NOEXCEPT
741 value_ptr p = iterator_base::pointer();
746 template <bool IsConst2>
747 bool operator ==(bidirectional_iterator<IsConst2> const& rhs) const CDS_NOEXCEPT
749 return iterator_base::operator==(rhs);
752 template <bool IsConst2>
753 bool operator !=(bidirectional_iterator<IsConst2> const& rhs) const CDS_NOEXCEPT
755 return !(*this == rhs);
759 bidirectional_iterator(FeldmanHashSet& set, array_node * pNode, size_t idx, bool)
760 : iterator_base(set, pNode, idx, false)
763 bidirectional_iterator(FeldmanHashSet& set, array_node * pNode, size_t idx)
764 : iterator_base(set, pNode, idx)
768 /// Reverse bidirectional iterator
769 template <bool IsConst>
770 class reverse_bidirectional_iterator : public iterator_base
772 friend class FeldmanHashSet;
775 typedef typename std::conditional< IsConst, value_type const*, value_type*>::type value_ptr; ///< Value pointer
776 typedef typename std::conditional< IsConst, value_type const&, value_type&>::type value_ref; ///< Value reference
779 reverse_bidirectional_iterator() CDS_NOEXCEPT
783 reverse_bidirectional_iterator(reverse_bidirectional_iterator const& rhs) CDS_NOEXCEPT
787 reverse_bidirectional_iterator& operator=(reverse_bidirectional_iterator const& rhs) CDS_NOEXCEPT
789 iterator_base::operator=(rhs);
793 reverse_bidirectional_iterator& operator++()
795 iterator_base::operator--();
799 reverse_bidirectional_iterator& operator--()
801 iterator_base::operator++();
805 value_ptr operator ->() const CDS_NOEXCEPT
807 return iterator_base::pointer();
810 value_ref operator *() const CDS_NOEXCEPT
812 value_ptr p = iterator_base::pointer();
817 template <bool IsConst2>
818 bool operator ==(reverse_bidirectional_iterator<IsConst2> const& rhs) const
820 return iterator_base::operator==(rhs);
823 template <bool IsConst2>
824 bool operator !=(reverse_bidirectional_iterator<IsConst2> const& rhs)
826 return !(*this == rhs);
830 reverse_bidirectional_iterator(FeldmanHashSet& set, array_node * pNode, size_t idx, bool)
831 : iterator_base(set, pNode, idx, false)
834 reverse_bidirectional_iterator(FeldmanHashSet& set, array_node * pNode, size_t idx)
835 : iterator_base(set, pNode, idx, false)
837 iterator_base::backward();
843 #ifdef CDS_DOXYGEN_INVOKED
844 typedef implementation_defined iterator; ///< @ref cds_intrusive_FeldmanHashSet_rcu_iterators "bidirectional iterator" type
845 typedef implementation_defined const_iterator; ///< @ref cds_intrusive_FeldmanHashSet_rcu_iterators "bidirectional const iterator" type
846 typedef implementation_defined reverse_iterator; ///< @ref cds_intrusive_FeldmanHashSet_rcu_iterators "bidirectional reverse iterator" type
847 typedef implementation_defined const_reverse_iterator; ///< @ref cds_intrusive_FeldmanHashSet_rcu_iterators "bidirectional reverse const iterator" type
849 typedef bidirectional_iterator<false> iterator;
850 typedef bidirectional_iterator<true> const_iterator;
851 typedef reverse_bidirectional_iterator<false> reverse_iterator;
852 typedef reverse_bidirectional_iterator<true> const_reverse_iterator;
855 ///@name Thread-safe iterators
856 /** @anchor cds_intrusive_FeldmanHashSet_rcu_iterators
857 The set supports thread-safe iterators: you may iterate over the set in multi-threaded environment
858 under explicit RCU lock.
859 RCU lock requirement means that inserting or searching is allowed but you must not erase the items from the set
860 since erasing under RCU lock can lead to a deadlock. However, another thread can call \p erase() safely
861 while your thread is iterating.
863 A typical example is:
868 uint32_t payload; // only for example
870 struct set_traits: cds::intrusive::feldman_hashset::traits
872 struct hash_accessor {
873 uint32_t operator()( foo const& src ) const
880 typedef cds::urcu::gc< cds::urcu::general_buffered<>> rcu;
881 typedef cds::intrusive::FeldmanHashSet< rcu, foo, set_traits > set_type;
887 // iterate over the set
890 typename set_type::rcu_lock l; // scoped RCU lock
893 for ( auto i = s.begin(); i != s.end(); ++i ) {
894 // deal with i. Remember, erasing is prohibited here!
897 } // at this point RCU lock is released
900 Each iterator object supports the common interface:
901 - dereference operators:
903 value_type [const] * operator ->() noexcept
904 value_type [const] & operator *() noexcept
906 - pre-increment and pre-decrement. Post-operators is not supported
907 - equality operators <tt>==</tt> and <tt>!=</tt>.
908 Iterators are equal iff they point to the same cell of the same array node.
909 Note that for two iterators \p it1 and \p it2 the condition <tt> it1 == it2 </tt>
910 does not entail <tt> &(*it1) == &(*it2) </tt>: welcome to concurrent containers
912 @note It is possible the item can be iterated more that once, for example, if an iterator points to the item
913 in an array node that is being splitted.
917 /// Returns an iterator to the beginning of the set
920 return iterator(*this, head(), size_t(0) - 1);
923 /// Returns an const iterator to the beginning of the set
924 const_iterator begin() const
926 return const_iterator(*this, head(), size_t(0) - 1);
929 /// Returns an const iterator to the beginning of the set
930 const_iterator cbegin()
932 return const_iterator(*this, head(), size_t(0) - 1);
935 /// Returns an iterator to the element following the last element of the set. This element acts as a placeholder; attempting to access it results in undefined behavior.
938 return iterator(*this, head(), head_size(), false);
941 /// Returns a const iterator to the element following the last element of the set. This element acts as a placeholder; attempting to access it results in undefined behavior.
942 const_iterator end() const
944 return const_iterator(*this, head(), head_size(), false);
947 /// Returns a const iterator to the element following the last element of the set. This element acts as a placeholder; attempting to access it results in undefined behavior.
948 const_iterator cend()
950 return const_iterator(*this, head(), head_size(), false);
953 /// Returns a reverse iterator to the first element of the reversed set
954 reverse_iterator rbegin()
956 return reverse_iterator(*this, head(), head_size());
959 /// Returns a const reverse iterator to the first element of the reversed set
960 const_reverse_iterator rbegin() const
962 return const_reverse_iterator(*this, head(), head_size());
965 /// Returns a const reverse iterator to the first element of the reversed set
966 const_reverse_iterator crbegin()
968 return const_reverse_iterator(*this, head(), head_size());
971 /// Returns a reverse iterator to the element following the last element of the reversed set
973 It corresponds to the element preceding the first element of the non-reversed container.
974 This element acts as a placeholder, attempting to access it results in undefined behavior.
976 reverse_iterator rend()
978 return reverse_iterator(*this, head(), size_t(0) - 1, false);
981 /// Returns a const reverse iterator to the element following the last element of the reversed set
983 It corresponds to the element preceding the first element of the non-reversed container.
984 This element acts as a placeholder, attempting to access it results in undefined behavior.
986 const_reverse_iterator rend() const
988 return const_reverse_iterator(*this, head(), size_t(0) - 1, false);
991 /// Returns a const reverse iterator to the element following the last element of the reversed set
993 It corresponds to the element preceding the first element of the non-reversed container.
994 This element acts as a placeholder, attempting to access it results in undefined behavior.
996 const_reverse_iterator crend()
998 return const_reverse_iterator(*this, head(), size_t(0) - 1, false);
1004 template <typename Func>
1005 std::pair<bool, bool> do_update(value_type& val, Func f, bool bInsert = true)
1007 hash_type const& hash = hash_accessor()(val);
1008 traverse_data pos( hash, *this );
1009 hash_comparator cmp;
1013 node_ptr slot = base_class::traverse( pos );
1014 assert(slot.bits() == 0);
1020 if ( pos.pArr->nodes[pos.nSlot].load(memory_model::memory_order_acquire) == slot) {
1022 if ( cmp( hash, hash_accessor()(*slot.ptr())) == 0 ) {
1023 // the item with that hash value already exists
1024 // Replace it with val
1025 if ( slot.ptr() == &val ) {
1026 stats().onUpdateExisting();
1027 return std::make_pair(true, false);
1030 if ( pos.pArr->nodes[pos.nSlot].compare_exchange_strong(slot, node_ptr(&val), memory_model::memory_order_release, atomics::memory_order_relaxed)) {
1031 // slot can be disposed
1032 f( val, slot.ptr());
1034 stats().onUpdateExisting();
1035 goto update_existing_done;
1038 stats().onUpdateRetry();
1042 // the slot must be expanded
1043 base_class::expand_slot( pos, slot );
1046 stats().onUpdateFailed();
1047 return std::make_pair(false, false);
1052 // the slot is empty, try to insert data node
1055 if ( pos.pArr->nodes[pos.nSlot].compare_exchange_strong(pNull, node_ptr(&val), memory_model::memory_order_release, atomics::memory_order_relaxed))
1057 // the new data node has been inserted
1060 stats().onUpdateNew();
1061 stats().height( pos.nHeight );
1062 return std::make_pair(true, true);
1066 stats().onUpdateFailed();
1067 return std::make_pair(false, false);
1070 // insert failed - slot has been changed by another thread
1072 stats().onUpdateRetry();
1076 stats().onSlotChanged();
1081 // retire_ptr must be called only outside of RCU lock
1082 update_existing_done:
1084 gc::template retire_ptr<disposer>(pOld);
1085 return std::make_pair(true, false);
1088 template <typename Predicate>
1089 value_type * do_erase( hash_type const& hash, Predicate pred)
1091 assert(gc::is_locked());
1092 traverse_data pos( hash, *this );
1093 hash_comparator cmp;
1096 node_ptr slot = base_class::traverse( pos );
1097 assert( slot.bits() == 0 );
1099 if ( pos.pArr->nodes[pos.nSlot].load( memory_model::memory_order_acquire ) == slot ) {
1101 if ( cmp( hash, hash_accessor()(*slot.ptr()) ) == 0 && pred( *slot.ptr() ) ) {
1102 // item found - replace it with nullptr
1103 if ( pos.pArr->nodes[pos.nSlot].compare_exchange_strong( slot, node_ptr( nullptr ), memory_model::memory_order_acquire, atomics::memory_order_relaxed ) ) {
1105 stats().onEraseSuccess();
1109 stats().onEraseRetry();
1112 stats().onEraseFailed();
1116 // the slot is empty
1117 stats().onEraseFailed();
1122 stats().onSlotChanged();
1126 value_type * search(hash_type const& hash )
1128 assert( gc::is_locked() );
1129 traverse_data pos( hash, *this );
1130 hash_comparator cmp;
1133 node_ptr slot = base_class::traverse( pos );
1134 assert( slot.bits() == 0 );
1136 if ( pos.pArr->nodes[pos.nSlot].load( memory_model::memory_order_acquire ) != slot ) {
1137 // slot value has been changed - retry
1138 stats().onSlotChanged();
1140 else if ( slot.ptr() && cmp( hash, hash_accessor()(*slot.ptr()) ) == 0 ) {
1142 stats().onFindSuccess();
1145 stats().onFindFailed();
1154 void clear_array(array_node * pArrNode, size_t nSize)
1159 for (atomic_node_ptr * pArr = pArrNode->nodes, *pLast = pArr + nSize; pArr != pLast; ++pArr) {
1161 node_ptr slot = pArr->load(memory_model::memory_order_acquire);
1162 if (slot.bits() == base_class::flag_array_node ) {
1163 // array node, go down the tree
1164 assert(slot.ptr() != nullptr);
1165 clear_array(to_array(slot.ptr()), array_node_size());
1168 else if (slot.bits() == base_class::flag_array_converting ) {
1169 // the slot is converting to array node right now
1170 while ((slot = pArr->load(memory_model::memory_order_acquire)).bits() == base_class::flag_array_converting ) {
1172 stats().onSlotConverting();
1176 assert(slot.ptr() != nullptr);
1177 assert(slot.bits() == base_class::flag_array_node );
1178 clear_array(to_array(slot.ptr()), array_node_size());
1183 if (pArr->compare_exchange_strong(slot, node_ptr(), memory_model::memory_order_acquire, atomics::memory_order_relaxed)) {
1185 gc::template retire_ptr<disposer>(slot.ptr());
1187 stats().onEraseSuccess();
1198 }} // namespace cds::intrusive
1200 #endif // #ifndef CDSLIB_INTRUSIVE_FELDMAN_HASHSET_RCU_H