3 #ifndef CDSLIB_INTRUSIVE_IMPL_FELDMAN_HASHSET_H
4 #define CDSLIB_INTRUSIVE_IMPL_FELDMAN_HASHSET_H
6 #include <functional> // std::ref
7 #include <iterator> // std::iterator_traits
10 #include <cds/intrusive/details/feldman_hashset_base.h>
11 #include <cds/details/allocator.h>
13 namespace cds { namespace intrusive {
14 /// Intrusive hash set based on multi-level array
15 /** @ingroup cds_intrusive_map
16 @anchor cds_intrusive_FeldmanHashSet_hp
19 - [2013] Steven Feldman, Pierre LaBorde, Damian Dechev "Concurrent Multi-level Arrays:
20 Wait-free Extensible Hash Maps"
22 [From the paper] The hardest problem encountered while developing a parallel hash map is how to perform
23 a global resize, the process of redistributing the elements in a hash map that occurs when adding new
24 buckets. The negative impact of blocking synchronization is multiplied during a global resize, because all
25 threads will be forced to wait on the thread that is performing the involved process of resizing the hash map
26 and redistributing the elements. \p %FeldmanHashSet implementation avoids global resizes through new array
27 allocation. By allowing concurrent expansion this structure is free from the overhead of an explicit resize,
28 which facilitates concurrent operations.
30 The presented design includes dynamic hashing, the use of sub-arrays within the hash map data structure;
31 which, in combination with <b>perfect hashing</b>, means that each element has a unique final, as well as current, position.
32 It is important to note that the perfect hash function required by our hash map is trivial to realize as
33 any hash function that permutes the bits of the key is suitable. This is possible because of our approach
34 to the hash function; we require that it produces hash values that are equal in size to that of the key.
35 We know that if we expand the hash map a fixed number of times there can be no collision as duplicate keys
36 are not provided for in the standard semantics of a hash map.
38 \p %FeldmanHashSet is a multi-level array which has a structure similar to a tree:
39 @image html feldman_hashset.png
40 The multi-level array differs from a tree in that each position on the tree could hold an array of nodes or a single node.
41 A position that holds a single node is a \p dataNode which holds the hash value of a key and the value that is associated
42 with that key; it is a simple struct holding two variables. A \p dataNode in the multi-level array could be marked.
43 A \p markedDataNode refers to a pointer to a \p dataNode that has been bitmarked at the least significant bit (LSB)
44 of the pointer to the node. This signifies that this \p dataNode is contended. An expansion must occur at this node;
45 any thread that sees this \p markedDataNode will try to replace it with an \p arrayNode; which is a position that holds
46 an array of nodes. The pointer to an \p arrayNode is differentiated from that of a pointer to a \p dataNode by a bitmark
47 on the second-least significant bit.
49 \p %FeldmanHashSet multi-level array is similar to a tree in that we keep a pointer to the root, which is a memory array
50 called \p head. The length of the \p head memory array is unique, whereas every other \p arrayNode has a uniform length;
51 a normal \p arrayNode has a fixed power-of-two length equal to the binary logarithm of a variable called \p arrayLength.
52 The maximum depth of the tree, \p maxDepth, is the maximum number of pointers that must be followed to reach any node.
53 We define \p currentDepth as the number of memory arrays that we need to traverse to reach the \p arrayNode on which
54 we need to operate; this is initially one, because of \p head.
56 That approach to the structure of the hash set uses an extensible hashing scheme; <b> the hash value is treated as a bit
57 string</b> and rehash incrementally.
59 @note Two important things you should keep in mind when you're using \p %FeldmanHashSet:
60 - all keys must be fixed-size. It means that you cannot use \p std::string as a key for \p %FeldmanHashSet.
61 Instead, for the strings you should use well-known hashing algorithms like <a href="https://en.wikipedia.org/wiki/Secure_Hash_Algorithm">SHA1, SHA2</a>,
62 <a href="https://en.wikipedia.org/wiki/MurmurHash">MurmurHash</a>, <a href="https://en.wikipedia.org/wiki/CityHash">CityHash</a>
63 or its successor <a href="https://code.google.com/p/farmhash/">FarmHash</a> and so on, which
64 converts variable-length strings to fixed-length bit-strings, and use that hash as a key in \p %FeldmanHashSet.
65 - \p %FeldmanHashSet uses a perfect hashing. It means that if two different keys, for example, of type \p std::string,
66 have identical hash then you cannot insert both that keys in the set. \p %FeldmanHashSet does not maintain the key,
67 it maintains its fixed-size hash value.
69 The set supports @ref cds_intrusive_FeldmanHashSet_iterators "bidirectional thread-safe iterators".
72 - \p GC - safe memory reclamation schema. Can be \p gc::HP, \p gc::DHP or one of \ref cds_urcu_type "RCU type"
73 - \p T - a value type to be stored in the set
74 - \p Traits - type traits, the structure based on \p feldman_hashset::traits or result of \p feldman_hashset::make_traits metafunction.
75 \p Traits is the mandatory argument because it has one mandatory type - an @ref feldman_hashset::traits::hash_accessor "accessor"
76 to hash value of \p T. The set algorithm does not calculate that hash value.
78 There are several specializations of \p %FeldmanHashSet for each \p GC. You should include:
79 - <tt><cds/intrusive/feldman_hashset_hp.h></tt> for \p gc::HP garbage collector
80 - <tt><cds/intrusive/feldman_hashset_dhp.h></tt> for \p gc::DHP garbage collector
81 - <tt><cds/intrusive/feldman_hashset_rcu.h></tt> for \ref cds_intrusive_FeldmanHashSet_rcu "RCU type". RCU specialization
82 has a slightly different interface.
87 #ifdef CDS_DOXYGEN_INVOKED
88 ,typename Traits = feldman_hashset::traits
93 class FeldmanHashSet: protected feldman_hashset::multilevel_array<T, Traits>
96 typedef feldman_hashset::multilevel_array<T, Traits> base_class;
100 typedef GC gc; ///< Garbage collector
101 typedef T value_type; ///< type of value stored in the set
102 typedef Traits traits; ///< Traits template parameter, see \p feldman_hashset::traits
104 typedef typename traits::hash_accessor hash_accessor; ///< Hash accessor functor
105 typedef typename base_class::hash_type hash_type; ///< Hash type deduced from \p hash_accessor return type
106 typedef typename traits::disposer disposer; ///< data node disposer
107 typedef typename base_class::hash_comparator hash_comparator; ///< hash compare functor based on \p traits::compare and \p traits::less options
109 typedef typename traits::item_counter item_counter; ///< Item counter type
110 typedef typename traits::node_allocator node_allocator; ///< Array node allocator
111 typedef typename traits::memory_model memory_model; ///< Memory model
112 typedef typename traits::back_off back_off; ///< Backoff strategy
113 typedef typename traits::stat stat; ///< Internal statistics type
115 typedef typename gc::template guarded_ptr< value_type > guarded_ptr; ///< Guarded pointer
117 /// Count of hazard pointers required
118 static CDS_CONSTEXPR size_t const c_nHazardPtrCount = 2;
122 typedef typename base_class::node_ptr node_ptr;
123 typedef typename base_class::atomic_node_ptr atomic_node_ptr;
124 typedef typename base_class::array_node array_node;
125 typedef typename base_class::traverse_data traverse_data;
127 using base_class::to_array;
128 using base_class::to_node;
129 using base_class::stats;
130 using base_class::head;
131 using base_class::metrics;
138 friend class FeldmanHashSet;
141 array_node * m_pNode; ///< current array node
142 size_t m_idx; ///< current position in m_pNode
143 typename gc::Guard m_guard; ///< HP guard
144 FeldmanHashSet const* m_set; ///< Hash set
147 iterator_base() CDS_NOEXCEPT
153 iterator_base( iterator_base const& rhs ) CDS_NOEXCEPT
154 : m_pNode( rhs.m_pNode )
158 m_guard.copy( rhs.m_guard );
161 iterator_base& operator=(iterator_base const& rhs) CDS_NOEXCEPT
163 m_pNode = rhs.m_pNode;
166 m_guard.copy( rhs.m_guard );
170 iterator_base& operator++()
176 iterator_base& operator--()
187 bool operator ==(iterator_base const& rhs) const CDS_NOEXCEPT
189 return m_pNode == rhs.m_pNode && m_idx == rhs.m_idx && m_set == rhs.m_set;
192 bool operator !=(iterator_base const& rhs) const CDS_NOEXCEPT
194 return !( *this == rhs );
198 iterator_base( FeldmanHashSet const& set, array_node * pNode, size_t idx, bool )
204 iterator_base( FeldmanHashSet const& set, array_node * pNode, size_t idx )
212 value_type * pointer() const CDS_NOEXCEPT
214 return m_guard.template get<value_type>();
219 assert( m_set != nullptr );
220 assert( m_pNode != nullptr );
222 size_t const arrayNodeSize = m_set->array_node_size();
223 size_t const headSize = m_set->head_size();
224 array_node * pNode = m_pNode;
225 size_t idx = m_idx + 1;
226 size_t nodeSize = m_pNode->pParent? arrayNodeSize : headSize;
229 if ( idx < nodeSize ) {
230 node_ptr slot = pNode->nodes[idx].load( memory_model::memory_order_acquire );
231 if ( slot.bits() == base_class::flag_array_node ) {
232 // array node, go down the tree
233 assert( slot.ptr() != nullptr );
234 pNode = to_array( slot.ptr());
236 nodeSize = arrayNodeSize;
238 else if ( slot.bits() == base_class::flag_array_converting ) {
239 // the slot is converting to array node right now - skip the node
245 if ( m_guard.protect( pNode->nodes[idx], [](node_ptr p) -> value_type * { return p.ptr(); }) == slot ) {
256 if ( pNode->pParent ) {
257 idx = pNode->idxParent + 1;
258 pNode = pNode->pParent;
259 nodeSize = pNode->pParent ? arrayNodeSize : headSize;
263 assert( pNode == m_set->head());
264 assert( idx == headSize );
275 assert( m_set != nullptr );
276 assert( m_pNode != nullptr );
278 size_t const arrayNodeSize = m_set->array_node_size();
279 size_t const headSize = m_set->head_size();
280 size_t const endIdx = size_t(0) - 1;
282 array_node * pNode = m_pNode;
283 size_t idx = m_idx - 1;
284 size_t nodeSize = m_pNode->pParent? arrayNodeSize : headSize;
287 if ( idx != endIdx ) {
288 node_ptr slot = pNode->nodes[idx].load( memory_model::memory_order_acquire );
289 if ( slot.bits() == base_class::flag_array_node ) {
290 // array node, go down the tree
291 assert( slot.ptr() != nullptr );
292 pNode = to_array( slot.ptr());
293 nodeSize = arrayNodeSize;
296 else if ( slot.bits() == base_class::flag_array_converting ) {
297 // the slot is converting to array node right now - skip the node
303 if ( m_guard.protect( pNode->nodes[idx], [](node_ptr p) -> value_type * { return p.ptr(); }) == slot ) {
314 if ( pNode->pParent ) {
315 idx = pNode->idxParent - 1;
316 pNode = pNode->pParent;
317 nodeSize = pNode->pParent ? arrayNodeSize : headSize;
321 assert( pNode == m_set->head());
322 assert( idx == endIdx );
332 template <class Iterator>
333 Iterator init_begin() const
335 return Iterator( *this, head(), size_t(0) - 1 );
338 template <class Iterator>
339 Iterator init_end() const
341 return Iterator( *this, head(), head_size(), false );
344 template <class Iterator>
345 Iterator init_rbegin() const
347 return Iterator( *this, head(), head_size());
350 template <class Iterator>
351 Iterator init_rend() const
353 return Iterator( *this, head(), size_t(0) - 1, false );
356 /// Bidirectional iterator class
357 template <bool IsConst>
358 class bidirectional_iterator: protected iterator_base
360 friend class FeldmanHashSet;
363 static CDS_CONSTEXPR bool const c_bConstantIterator = IsConst;
366 typedef typename std::conditional< IsConst, value_type const*, value_type*>::type value_ptr; ///< Value pointer
367 typedef typename std::conditional< IsConst, value_type const&, value_type&>::type value_ref; ///< Value reference
370 bidirectional_iterator() CDS_NOEXCEPT
373 bidirectional_iterator( bidirectional_iterator const& rhs ) CDS_NOEXCEPT
374 : iterator_base( rhs )
377 bidirectional_iterator& operator=(bidirectional_iterator const& rhs) CDS_NOEXCEPT
379 iterator_base::operator=( rhs );
383 bidirectional_iterator& operator++()
385 iterator_base::operator++();
389 bidirectional_iterator& operator--()
391 iterator_base::operator--();
395 value_ptr operator ->() const CDS_NOEXCEPT
397 return iterator_base::pointer();
400 value_ref operator *() const CDS_NOEXCEPT
402 value_ptr p = iterator_base::pointer();
409 iterator_base::release();
412 template <bool IsConst2>
413 bool operator ==(bidirectional_iterator<IsConst2> const& rhs) const CDS_NOEXCEPT
415 return iterator_base::operator==( rhs );
418 template <bool IsConst2>
419 bool operator !=(bidirectional_iterator<IsConst2> const& rhs) const CDS_NOEXCEPT
421 return !( *this == rhs );
425 bidirectional_iterator( FeldmanHashSet& set, array_node * pNode, size_t idx, bool )
426 : iterator_base( set, pNode, idx, false )
429 bidirectional_iterator( FeldmanHashSet& set, array_node * pNode, size_t idx )
430 : iterator_base( set, pNode, idx )
434 /// Reverse bidirectional iterator
435 template <bool IsConst>
436 class reverse_bidirectional_iterator : public iterator_base
438 friend class FeldmanHashSet;
441 typedef typename std::conditional< IsConst, value_type const*, value_type*>::type value_ptr; ///< Value pointer
442 typedef typename std::conditional< IsConst, value_type const&, value_type&>::type value_ref; ///< Value reference
445 reverse_bidirectional_iterator() CDS_NOEXCEPT
449 reverse_bidirectional_iterator( reverse_bidirectional_iterator const& rhs ) CDS_NOEXCEPT
450 : iterator_base( rhs )
453 reverse_bidirectional_iterator& operator=( reverse_bidirectional_iterator const& rhs) CDS_NOEXCEPT
455 iterator_base::operator=( rhs );
459 reverse_bidirectional_iterator& operator++()
461 iterator_base::operator--();
465 reverse_bidirectional_iterator& operator--()
467 iterator_base::operator++();
471 value_ptr operator ->() const CDS_NOEXCEPT
473 return iterator_base::pointer();
476 value_ref operator *() const CDS_NOEXCEPT
478 value_ptr p = iterator_base::pointer();
485 iterator_base::release();
488 template <bool IsConst2>
489 bool operator ==(reverse_bidirectional_iterator<IsConst2> const& rhs) const
491 return iterator_base::operator==( rhs );
494 template <bool IsConst2>
495 bool operator !=(reverse_bidirectional_iterator<IsConst2> const& rhs)
497 return !( *this == rhs );
501 reverse_bidirectional_iterator( FeldmanHashSet& set, array_node * pNode, size_t idx, bool )
502 : iterator_base( set, pNode, idx, false )
505 reverse_bidirectional_iterator( FeldmanHashSet& set, array_node * pNode, size_t idx )
506 : iterator_base( set, pNode, idx, false )
508 iterator_base::backward();
514 #ifdef CDS_DOXYGEN_INVOKED
515 typedef implementation_defined iterator; ///< @ref cds_intrusive_FeldmanHashSet_iterators "bidirectional iterator" type
516 typedef implementation_defined const_iterator; ///< @ref cds_intrusive_FeldmanHashSet_iterators "bidirectional const iterator" type
517 typedef implementation_defined reverse_iterator; ///< @ref cds_intrusive_FeldmanHashSet_iterators "bidirectional reverse iterator" type
518 typedef implementation_defined const_reverse_iterator; ///< @ref cds_intrusive_FeldmanHashSet_iterators "bidirectional reverse const iterator" type
520 typedef bidirectional_iterator<false> iterator;
521 typedef bidirectional_iterator<true> const_iterator;
522 typedef reverse_bidirectional_iterator<false> reverse_iterator;
523 typedef reverse_bidirectional_iterator<true> const_reverse_iterator;
528 item_counter m_ItemCounter; ///< Item counter
532 /// Creates empty set
534 @param head_bits: 2<sup>head_bits</sup> specifies the size of head array, minimum is 4.
535 @param array_bits: 2<sup>array_bits</sup> specifies the size of array node, minimum is 2.
537 Equation for \p head_bits and \p array_bits:
539 sizeof(hash_type) * 8 == head_bits + N * array_bits
541 where \p N is multi-level array depth.
543 FeldmanHashSet( size_t head_bits = 8, size_t array_bits = 4 )
544 : base_class( head_bits, array_bits )
547 /// Destructs the set and frees all data
555 The function inserts \p val in the set if it does not contain
556 an item with that hash.
558 Returns \p true if \p val is placed into the set, \p false otherwise.
560 bool insert( value_type& val )
562 return insert( val, [](value_type&) {} );
567 This function is intended for derived non-intrusive containers.
569 The function allows to split creating of new item into two part:
570 - create item with key only
571 - insert new item into the set
572 - if inserting is success, calls \p f functor to initialize \p val.
574 The functor signature is:
576 void func( value_type& val );
578 where \p val is the item inserted.
580 The user-defined functor is called only if the inserting is success.
582 @warning See \ref cds_intrusive_item_creating "insert item troubleshooting".
584 template <typename Func>
585 bool insert( value_type& val, Func f )
587 hash_type const& hash = hash_accessor()(val);
588 traverse_data pos( hash, *this );
590 typename gc::Guard guard;
593 node_ptr slot = base_class::traverse( pos );
594 assert( slot.bits() == 0 );
596 // protect data node by hazard pointer
597 if (guard.protect(pos.pArr->nodes[pos.nSlot], [](node_ptr p) -> value_type * { return p.ptr(); }) != slot) {
598 // slot value has been changed - retry
599 stats().onSlotChanged();
603 if ( cmp( hash, hash_accessor()(*slot.ptr())) == 0 ) {
604 // the item with that hash value already exists
605 stats().onInsertFailed();
609 // the slot must be expanded
610 base_class::expand_slot( pos, slot );
613 // the slot is empty, try to insert data node
615 if ( pos.pArr->nodes[pos.nSlot].compare_exchange_strong(pNull, node_ptr(&val), memory_model::memory_order_release, atomics::memory_order_relaxed))
617 // the new data node has been inserted
620 stats().onInsertSuccess();
621 stats().height(pos.nHeight);
625 // insert failed - slot has been changed by another thread
627 stats().onInsertRetry();
634 Performs inserting or updating the item with hash value equal to \p val.
635 - If hash value is found then existing item is replaced with \p val, old item is disposed
636 with \p Traits::disposer. Note that the disposer is called by \p GC asynchronously.
637 The function returns <tt> std::pair<true, false> </tt>
638 - If hash value is not found and \p bInsert is \p true then \p val is inserted,
639 the function returns <tt> std::pair<true, true> </tt>
640 - If hash value is not found and \p bInsert is \p false then the set is unchanged,
641 the function returns <tt> std::pair<false, false> </tt>
643 Returns <tt> std::pair<bool, bool> </tt> where \p first is \p true if operation is successfull
644 (i.e. the item has been inserted or updated),
645 \p second is \p true if new item has been added or \p false if the set contains that hash.
647 std::pair<bool, bool> update( value_type& val, bool bInsert = true )
649 return do_update(val, [](value_type&, value_type *) {}, bInsert );
652 /// Unlinks the item \p val from the set
654 The function searches the item \p val in the set and unlink it
655 if it is found and its address is equal to <tt>&val</tt>.
657 The function returns \p true if success and \p false otherwise.
659 bool unlink( value_type const& val )
661 typename gc::Guard guard;
662 auto pred = [&val](value_type const& item) -> bool { return &item == &val; };
663 value_type * p = do_erase( hash_accessor()( val ), guard, std::ref( pred ));
667 /// Deletes the item from the set
669 The function searches \p hash in the set,
670 unlinks the item found, and returns \p true.
671 If that item is not found the function returns \p false.
673 The \ref disposer specified in \p Traits is called by garbage collector \p GC asynchronously.
675 bool erase( hash_type const& hash )
677 return erase(hash, [](value_type const&) {} );
680 /// Deletes the item from the set
682 The function searches \p hash in the set,
683 call \p f functor with item found, and unlinks it from the set.
684 The \ref disposer specified in \p Traits is called
685 by garbage collector \p GC asynchronously.
687 The \p Func interface is
690 void operator()( value_type& item );
694 If \p hash is not found the function returns \p false.
696 template <typename Func>
697 bool erase( hash_type const& hash, Func f )
699 typename gc::Guard guard;
700 value_type * p = do_erase( hash, guard, []( value_type const&) -> bool {return true; } );
702 // p is guarded by HP
710 /// Deletes the item pointed by iterator \p iter
712 Returns \p true if the operation is successful, \p false otherwise.
714 The function does not invalidate the iterator, it remains valid and can be used for further traversing.
716 bool erase_at( iterator const& iter )
718 return do_erase_at( iter );
721 bool erase_at( reverse_iterator const& iter )
723 return do_erase_at( iter );
727 /// Extracts the item with specified \p hash
729 The function searches \p hash in the set,
730 unlinks it from the set, and returns an guarded pointer to the item extracted.
731 If \p hash is not found the function returns an empty guarded pointer.
733 The \p disposer specified in \p Traits class' template parameter is called automatically
734 by garbage collector \p GC when returned \ref guarded_ptr object to be destroyed or released.
735 @note Each \p guarded_ptr object uses the GC's guard that can be limited resource.
739 typedef cds::intrusive::FeldmanHashSet< your_template_args > my_set;
743 my_set::guarded_ptr gp( theSet.extract( 5 ));
748 // Destructor of gp releases internal HP guard
752 guarded_ptr extract( hash_type const& hash )
756 typename gc::Guard guard;
757 value_type * p = do_erase( hash, guard, []( value_type const&) -> bool {return true;} );
759 // p is guarded by HP
766 /// Finds an item by it's \p hash
768 The function searches the item by \p hash and calls the functor \p f for item found.
769 The interface of \p Func functor is:
772 void operator()( value_type& item );
775 where \p item is the item found.
777 The functor may change non-key fields of \p item. Note that the functor is only guarantee
778 that \p item cannot be disposed during the functor is executing.
779 The functor does not serialize simultaneous access to the set's \p item. If such access is
780 possible you must provide your own synchronization schema on item level to prevent unsafe item modifications.
782 The function returns \p true if \p hash is found, \p false otherwise.
784 template <typename Func>
785 bool find( hash_type const& hash, Func f )
787 typename gc::Guard guard;
788 value_type * p = search( hash, guard );
790 // p is guarded by HP
798 /// Checks whether the set contains \p hash
800 The function searches the item by its \p hash
801 and returns \p true if it is found, or \p false otherwise.
803 bool contains( hash_type const& hash )
805 return find( hash, [](value_type&) {} );
808 /// Finds an item by it's \p hash and returns the item found
810 The function searches the item by its \p hash
811 and returns the guarded pointer to the item found.
812 If \p hash is not found the function returns an empty \p guarded_ptr.
814 @note Each \p guarded_ptr object uses one GC's guard which can be limited resource.
818 typedef cds::intrusive::FeldmanHashSet< your_template_params > my_set;
822 my_set::guarded_ptr gp( theSet.get( 5 ));
823 if ( theSet.get( 5 )) {
827 // Destructor of guarded_ptr releases internal HP guard
831 guarded_ptr get( hash_type const& hash )
835 typename gc::Guard guard;
836 gp.reset( search( hash, guard ));
841 /// Clears the set (non-atomic)
843 The function unlink all data node from the set.
844 The function is not atomic but is thread-safe.
845 After \p %clear() the set may not be empty because another threads may insert items.
847 For each item the \p disposer is called after unlinking.
851 clear_array( head(), head_size());
854 /// Checks if the set is empty
856 Emptiness is checked by item counting: if item count is zero then the set is empty.
857 Thus, the correct item counting feature is an important part of the set implementation.
864 /// Returns item count in the set
867 return m_ItemCounter;
870 /// Returns const reference to internal statistics
871 stat const& statistics() const
876 /// Returns the size of head node
877 using base_class::head_size;
879 /// Returns the size of the array node
880 using base_class::array_node_size;
882 /// Collects tree level statistics into \p stat
883 /** @anchor cds_intrusive_FeldmanHashSet_hp_get_level_statistics
884 The function traverses the set and collects staistics for each level of the tree
885 into \p feldman_hashset::level_statistics struct. The element of \p stat[i]
886 represents statistics for level \p i, level 0 is head array.
887 The function is thread-safe and may be called in multi-threaded environment.
889 Result can be useful for estimating efficiency of hash functor you use.
891 void get_level_statistics(std::vector< feldman_hashset::level_statistics>& stat) const
893 base_class::get_level_statistics( stat );
897 ///@name Thread-safe iterators
898 /** @anchor cds_intrusive_FeldmanHashSet_iterators
899 The set supports thread-safe iterators: you may iterate over the set in multi-threaded environment.
900 It is guaranteed that the iterators will remain valid even if another thread deletes the node the iterator points to:
901 Hazard Pointer embedded into the iterator object protects the node from physical reclamation.
903 @note Since the iterator object contains hazard pointer that is a thread-local resource,
904 the iterator should not be passed to another thread.
906 Each iterator object supports the common interface:
907 - dereference operators:
909 value_type [const] * operator ->() noexcept
910 value_type [const] & operator *() noexcept
912 - pre-increment and pre-decrement. Post-operators is not supported
913 - equality operators <tt>==</tt> and <tt>!=</tt>.
914 Iterators are equal iff they point to the same cell of the same array node.
915 Note that for two iterators \p it1 and \p it2, the conditon <tt> it1 == it2 </tt>
916 does not entail <tt> &(*it1) == &(*it2) </tt>: welcome to concurrent containers
917 - helper member function \p release() that clears internal hazard pointer.
918 After \p release() the iterator points to \p nullptr but it still remain valid: further iterating is possible.
920 During iteration you may safely erase any item from the set;
921 @ref erase_at() function call doesn't invalidate any iterator.
922 If some iterator points to the item to be erased, that item is not deleted immediately
923 but only after that iterator will be advanced forward or backward.
925 @note It is possible the item can be iterated more that once, for example, if an iterator points to the item
926 in array node that is being splitted.
930 /// Returns an iterator to the beginning of the set
933 return iterator( *this, head(), size_t(0) - 1 );
936 /// Returns an const iterator to the beginning of the set
937 const_iterator begin() const
939 return const_iterator( *this, head(), size_t(0) - 1 );
942 /// Returns an const iterator to the beginning of the set
943 const_iterator cbegin()
945 return const_iterator( *this, head(), size_t(0) - 1 );
948 /// Returns an iterator to the element following the last element of the set. This element acts as a placeholder; attempting to access it results in undefined behavior.
951 return iterator( *this, head(), head_size(), false );
954 /// Returns a const iterator to the element following the last element of the set. This element acts as a placeholder; attempting to access it results in undefined behavior.
955 const_iterator end() const
957 return const_iterator( *this, head(), head_size(), false );
960 /// Returns a const iterator to the element following the last element of the set. This element acts as a placeholder; attempting to access it results in undefined behavior.
961 const_iterator cend()
963 return const_iterator( *this, head(), head_size(), false );
966 /// Returns a reverse iterator to the first element of the reversed set
967 reverse_iterator rbegin()
969 return reverse_iterator( *this, head(), head_size());
972 /// Returns a const reverse iterator to the first element of the reversed set
973 const_reverse_iterator rbegin() const
975 return const_reverse_iterator( *this, head(), head_size());
978 /// Returns a const reverse iterator to the first element of the reversed set
979 const_reverse_iterator crbegin()
981 return const_reverse_iterator( *this, head(), head_size());
984 /// Returns a reverse iterator to the element following the last element of the reversed set
986 It corresponds to the element preceding the first element of the non-reversed container.
987 This element acts as a placeholder, attempting to access it results in undefined behavior.
989 reverse_iterator rend()
991 return reverse_iterator( *this, head(), size_t(0) - 1, false );
994 /// Returns a const reverse iterator to the element following the last element of the reversed set
996 It corresponds to the element preceding the first element of the non-reversed container.
997 This element acts as a placeholder, attempting to access it results in undefined behavior.
999 const_reverse_iterator rend() const
1001 return const_reverse_iterator( *this, head(), size_t(0) - 1, false );
1004 /// Returns a const reverse iterator to the element following the last element of the reversed set
1006 It corresponds to the element preceding the first element of the non-reversed container.
1007 This element acts as a placeholder, attempting to access it results in undefined behavior.
1009 const_reverse_iterator crend()
1011 return const_reverse_iterator( *this, head(), size_t(0) - 1, false );
1017 void clear_array( array_node * pArrNode, size_t nSize )
1021 for ( atomic_node_ptr * pArr = pArrNode->nodes, *pLast = pArr + nSize; pArr != pLast; ++pArr ) {
1023 node_ptr slot = pArr->load( memory_model::memory_order_acquire );
1024 if ( slot.bits() == base_class::flag_array_node ) {
1025 // array node, go down the tree
1026 assert( slot.ptr() != nullptr );
1027 clear_array( to_array( slot.ptr()), array_node_size());
1030 else if ( slot.bits() == base_class::flag_array_converting ) {
1031 // the slot is converting to array node right now
1032 while ( (slot = pArr->load(memory_model::memory_order_acquire)).bits() == base_class::flag_array_converting ) {
1034 stats().onSlotConverting();
1038 assert( slot.ptr() != nullptr );
1039 assert( slot.bits() == base_class::flag_array_node );
1040 clear_array( to_array( slot.ptr()), array_node_size());
1045 if ( pArr->compare_exchange_strong( slot, node_ptr(), memory_model::memory_order_acquire, atomics::memory_order_relaxed )) {
1047 gc::template retire<disposer>( slot.ptr());
1049 stats().onEraseSuccess();
1061 value_type * search( hash_type const& hash, typename gc::Guard& guard )
1063 traverse_data pos( hash, *this );
1064 hash_comparator cmp;
1067 node_ptr slot = base_class::traverse( pos );
1068 assert(slot.bits() == 0);
1070 // protect data node by hazard pointer
1071 if (guard.protect( pos.pArr->nodes[pos.nSlot], [](node_ptr p) -> value_type * { return p.ptr(); }) != slot) {
1072 // slot value has been changed - retry
1073 stats().onSlotChanged();
1075 else if (slot.ptr() && cmp(hash, hash_accessor()(*slot.ptr())) == 0) {
1077 stats().onFindSuccess();
1080 stats().onFindFailed();
1085 template <typename Predicate>
1086 value_type * do_erase( hash_type const& hash, typename gc::Guard& guard, Predicate pred )
1088 traverse_data pos( hash, *this );
1089 hash_comparator cmp;
1091 node_ptr slot = base_class::traverse( pos );
1092 assert(slot.bits() == 0);
1094 // protect data node by hazard pointer
1095 if (guard.protect( pos.pArr->nodes[pos.nSlot], [](node_ptr p) -> value_type * { return p.ptr(); }) != slot) {
1096 // slot value has been changed - retry
1097 stats().onSlotChanged();
1099 else if (slot.ptr()) {
1100 if (cmp(hash, hash_accessor()(*slot.ptr())) == 0 && pred(*slot.ptr())) {
1101 // item found - replace it with nullptr
1102 if ( pos.pArr->nodes[pos.nSlot].compare_exchange_strong(slot, node_ptr(nullptr), memory_model::memory_order_acquire, atomics::memory_order_relaxed)) {
1103 // slot is guarded by HP
1104 gc::template retire<disposer>(slot.ptr());
1106 stats().onEraseSuccess();
1110 stats().onEraseRetry();
1113 stats().onEraseFailed();
1117 // the slot is empty
1118 stats().onEraseFailed();
1124 bool do_erase_at( iterator_base const& iter )
1126 if ( iter.m_set != this )
1128 if ( iter.m_pNode == head() && iter.m_idx >= head_size())
1130 if ( iter.m_idx >= array_node_size())
1134 node_ptr slot = iter.m_pNode->nodes[iter.m_idx].load( memory_model::memory_order_acquire );
1135 if ( slot.bits() == 0 && slot.ptr() == iter.pointer()) {
1136 if ( iter.m_pNode->nodes[iter.m_idx].compare_exchange_strong(slot, node_ptr(nullptr), memory_model::memory_order_acquire, atomics::memory_order_relaxed)) {
1137 // the item is guarded by iterator, so we may retire it safely
1138 gc::template retire<disposer>( slot.ptr());
1140 stats().onEraseSuccess();
1149 template <typename Func>
1150 std::pair<bool, bool> do_update( value_type& val, Func f, bool bInsert = true )
1152 hash_type const& hash = hash_accessor()( val );
1153 traverse_data pos( hash, *this );
1154 hash_comparator cmp;
1155 typename gc::template GuardArray<2> guards;
1158 node_ptr slot = base_class::traverse( pos );
1159 assert(slot.bits() == 0);
1161 // protect data node by hazard pointer
1162 if (guards.protect(0, pos.pArr->nodes[pos.nSlot], [](node_ptr p) -> value_type * { return p.ptr(); }) != slot) {
1163 // slot value has been changed - retry
1164 stats().onSlotChanged();
1166 else if (slot.ptr()) {
1167 if (cmp(hash, hash_accessor()(*slot.ptr())) == 0) {
1168 // the item with that hash value already exists
1169 // Replace it with val
1170 if (slot.ptr() == &val) {
1171 stats().onUpdateExisting();
1172 return std::make_pair(true, false);
1175 if ( pos.pArr->nodes[pos.nSlot].compare_exchange_strong(slot, node_ptr(&val), memory_model::memory_order_release, atomics::memory_order_relaxed)) {
1176 // slot can be disposed
1178 gc::template retire<disposer>(slot.ptr());
1179 stats().onUpdateExisting();
1180 return std::make_pair(true, false);
1183 stats().onUpdateRetry();
1188 // the slot must be expanded
1189 base_class::expand_slot( pos, slot );
1192 stats().onUpdateFailed();
1193 return std::make_pair(false, false);
1197 // the slot is empty, try to insert data node
1200 if ( pos.pArr->nodes[pos.nSlot].compare_exchange_strong(pNull, node_ptr(&val), memory_model::memory_order_release, atomics::memory_order_relaxed))
1202 // the new data node has been inserted
1205 stats().onUpdateNew();
1206 stats().height( pos.nHeight );
1207 return std::make_pair(true, true);
1211 stats().onUpdateFailed();
1212 return std::make_pair(false, false);
1215 // insert failed - slot has been changed by another thread
1217 stats().onUpdateRetry();
1223 }} // namespace cds::intrusive
1225 #endif // #ifndef CDSLIB_INTRUSIVE_IMPL_FELDMAN_HASHSET_H