3 #ifndef CDSLIB_INTRUSIVE_IMPL_FELDMAN_HASHSET_H
4 #define CDSLIB_INTRUSIVE_IMPL_FELDMAN_HASHSET_H
6 #include <functional> // std::ref
7 #include <iterator> // std::iterator_traits
10 #include <cds/intrusive/details/feldman_hashset_base.h>
11 #include <cds/details/allocator.h>
13 namespace cds { namespace intrusive {
14 /// Intrusive hash set based on multi-level array
15 /** @ingroup cds_intrusive_map
16 @anchor cds_intrusive_FeldmanHashSet_hp
19 - [2013] Steven Feldman, Pierre LaBorde, Damian Dechev "Concurrent Multi-level Arrays:
20 Wait-free Extensible Hash Maps"
22 [From the paper] The hardest problem encountered while developing a parallel hash map is how to perform
23 a global resize, the process of redistributing the elements in a hash map that occurs when adding new
24 buckets. The negative impact of blocking synchronization is multiplied during a global resize, because all
25 threads will be forced to wait on the thread that is performing the involved process of resizing the hash map
26 and redistributing the elements. \p %FeldmanHashSet implementation avoids global resizes through new array
27 allocation. By allowing concurrent expansion this structure is free from the overhead of an explicit resize,
28 which facilitates concurrent operations.
30 The presented design includes dynamic hashing, the use of sub-arrays within the hash map data structure;
31 which, in combination with <b>perfect hashing</b>, means that each element has a unique final, as well as current, position.
32 It is important to note that the perfect hash function required by our hash map is trivial to realize as
33 any hash function that permutes the bits of the key is suitable. This is possible because of our approach
34 to the hash function; we require that it produces hash values that are equal in size to that of the key.
35 We know that if we expand the hash map a fixed number of times there can be no collision as duplicate keys
36 are not provided for in the standard semantics of a hash map.
38 \p %FeldmanHashSet is a multi-level array which has a structure similar to a tree:
39 @image html feldman_hashset.png
40 The multi-level array differs from a tree in that each position on the tree could hold an array of nodes or a single node.
41 A position that holds a single node is a \p dataNode which holds the hash value of a key and the value that is associated
42 with that key; it is a simple struct holding two variables. A \p dataNode in the multi-level array could be marked.
43 A \p markedDataNode refers to a pointer to a \p dataNode that has been bitmarked at the least significant bit (LSB)
44 of the pointer to the node. This signifies that this \p dataNode is contended. An expansion must occur at this node;
45 any thread that sees this \p markedDataNode will try to replace it with an \p arrayNode; which is a position that holds
46 an array of nodes. The pointer to an \p arrayNode is differentiated from that of a pointer to a \p dataNode by a bitmark
47 on the second-least significant bit.
49 \p %FeldmanHashSet multi-level array is similar to a tree in that we keep a pointer to the root, which is a memory array
50 called \p head. The length of the \p head memory array is unique, whereas every other \p arrayNode has a uniform length;
51 a normal \p arrayNode has a fixed power-of-two length equal to the binary logarithm of a variable called \p arrayLength.
52 The maximum depth of the tree, \p maxDepth, is the maximum number of pointers that must be followed to reach any node.
53 We define \p currentDepth as the number of memory arrays that we need to traverse to reach the \p arrayNode on which
54 we need to operate; this is initially one, because of \p head.
56 That approach to the structure of the hash set uses an extensible hashing scheme; <b> the hash value is treated as a bit
57 string</b> and rehash incrementally.
59 @note Two important things you should keep in mind when you're using \p %FeldmanHashSet:
60 - all keys must be fixed-size. It means that you cannot use \p std::string as a key for \p %FeldmanHashSet.
61 Instead, for the strings you should use well-known hashing algorithms like <a href="https://en.wikipedia.org/wiki/Secure_Hash_Algorithm">SHA1, SHA2</a>,
62 <a href="https://en.wikipedia.org/wiki/MurmurHash">MurmurHash</a>, <a href="https://en.wikipedia.org/wiki/CityHash">CityHash</a>
63 or its successor <a href="https://code.google.com/p/farmhash/">FarmHash</a> and so on, which
64 converts variable-length strings to fixed-length bit-strings, and use that hash as a key in \p %FeldmanHashSet.
65 - \p %FeldmanHashSet uses a perfect hashing. It means that if two different keys, for example, of type \p std::string,
66 have identical hash then you cannot insert both that keys in the set. \p %FeldmanHashSet does not maintain the key,
67 it maintains its fixed-size hash value.
69 The set supports @ref cds_intrusive_FeldmanHashSet_iterators "bidirectional thread-safe iterators".
72 - \p GC - safe memory reclamation schema. Can be \p gc::HP, \p gc::DHP or one of \ref cds_urcu_type "RCU type"
73 - \p T - a value type to be stored in the set
74 - \p Traits - type traits, the structure based on \p feldman_hashset::traits or result of \p feldman_hashset::make_traits metafunction.
75 \p Traits is the mandatory argument because it has one mandatory type - an @ref feldman_hashset::traits::hash_accessor "accessor"
76 to hash value of \p T. The set algorithm does not calculate that hash value.
78 There are several specializations of \p %FeldmanHashSet for each \p GC. You should include:
79 - <tt><cds/intrusive/feldman_hashset_hp.h></tt> for \p gc::HP garbage collector
80 - <tt><cds/intrusive/feldman_hashset_dhp.h></tt> for \p gc::DHP garbage collector
81 - <tt><cds/intrusive/feldman_hashset_rcu.h></tt> for \ref cds_intrusive_FeldmanHashSet_rcu "RCU type". RCU specialization
82 has a slightly different interface.
87 #ifdef CDS_DOXYGEN_INVOKED
88 ,typename Traits = feldman_hashset::traits
93 class FeldmanHashSet: protected feldman_hashset::multilevel_array<T, Traits>
96 typedef feldman_hashset::multilevel_array<T, Traits> base_class;
100 typedef GC gc; ///< Garbage collector
101 typedef T value_type; ///< type of value stored in the set
102 typedef Traits traits; ///< Traits template parameter, see \p feldman_hashset::traits
104 typedef typename traits::hash_accessor hash_accessor; ///< Hash accessor functor
105 typedef typename base_class::hash_type hash_type; ///< Hash type deduced from \p hash_accessor return type
106 typedef typename traits::disposer disposer; ///< data node disposer
107 typedef typename base_class::hash_comparator hash_comparator; ///< hash compare functor based on \p traits::compare and \p traits::less options
109 typedef typename traits::item_counter item_counter; ///< Item counter type
110 typedef typename traits::node_allocator node_allocator; ///< Array node allocator
111 typedef typename traits::memory_model memory_model; ///< Memory model
112 typedef typename traits::back_off back_off; ///< Backoff strategy
113 typedef typename traits::stat stat; ///< Internal statistics type
115 typedef typename gc::template guarded_ptr< value_type > guarded_ptr; ///< Guarded pointer
117 /// Count of hazard pointers required
118 static CDS_CONSTEXPR size_t const c_nHazardPtrCount = 2;
122 typedef typename base_class::node_ptr node_ptr;
123 typedef typename base_class::atomic_node_ptr atomic_node_ptr;
124 typedef typename base_class::array_node array_node;
125 typedef typename base_class::traverse_data traverse_data;
127 using base_class::to_array;
128 using base_class::to_node;
129 using base_class::stats;
130 using base_class::head;
131 using base_class::metrics;
138 friend class FeldmanHashSet;
141 array_node * m_pNode; ///< current array node
142 size_t m_idx; ///< current position in m_pNode
143 typename gc::Guard m_guard; ///< HP guard
144 FeldmanHashSet const* m_set; ///< Hash set
147 iterator_base() CDS_NOEXCEPT
153 iterator_base( iterator_base const& rhs ) CDS_NOEXCEPT
154 : m_pNode( rhs.m_pNode )
158 m_guard.copy( rhs.m_guard );
161 iterator_base& operator=(iterator_base const& rhs) CDS_NOEXCEPT
163 m_pNode = rhs.m_pNode;
166 m_guard.copy( rhs.m_guard );
170 iterator_base& operator++()
176 iterator_base& operator--()
187 bool operator ==(iterator_base const& rhs) const CDS_NOEXCEPT
189 return m_pNode == rhs.m_pNode && m_idx == rhs.m_idx && m_set == rhs.m_set;
192 bool operator !=(iterator_base const& rhs) const CDS_NOEXCEPT
194 return !( *this == rhs );
198 iterator_base( FeldmanHashSet const& set, array_node * pNode, size_t idx, bool )
204 iterator_base( FeldmanHashSet const& set, array_node * pNode, size_t idx )
212 value_type * pointer() const CDS_NOEXCEPT
214 return m_guard.template get<value_type>();
219 assert( m_set != nullptr );
220 assert( m_pNode != nullptr );
222 size_t const arrayNodeSize = m_set->array_node_size();
223 size_t const headSize = m_set->head_size();
224 array_node * pNode = m_pNode;
225 size_t idx = m_idx + 1;
226 size_t nodeSize = m_pNode->pParent? arrayNodeSize : headSize;
229 if ( idx < nodeSize ) {
230 node_ptr slot = pNode->nodes[idx].load( memory_model::memory_order_acquire );
231 if ( slot.bits() == base_class::flag_array_node ) {
232 // array node, go down the tree
233 assert( slot.ptr() != nullptr );
234 pNode = to_array( slot.ptr());
236 nodeSize = arrayNodeSize;
238 else if ( slot.bits() == base_class::flag_array_converting ) {
239 // the slot is converting to array node right now - skip the node
245 if ( m_guard.protect( pNode->nodes[idx], [](node_ptr p) -> value_type * { return p.ptr(); }) == slot ) {
256 if ( pNode->pParent ) {
257 idx = pNode->idxParent + 1;
258 pNode = pNode->pParent;
259 nodeSize = pNode->pParent ? arrayNodeSize : headSize;
263 assert( pNode == m_set->head());
264 assert( idx == headSize );
275 assert( m_set != nullptr );
276 assert( m_pNode != nullptr );
278 size_t const arrayNodeSize = m_set->array_node_size();
279 size_t const headSize = m_set->head_size();
280 size_t const endIdx = size_t(0) - 1;
282 array_node * pNode = m_pNode;
283 size_t idx = m_idx - 1;
284 size_t nodeSize = m_pNode->pParent? arrayNodeSize : headSize;
287 if ( idx != endIdx ) {
288 node_ptr slot = pNode->nodes[idx].load( memory_model::memory_order_acquire );
289 if ( slot.bits() == base_class::flag_array_node ) {
290 // array node, go down the tree
291 assert( slot.ptr() != nullptr );
292 pNode = to_array( slot.ptr());
293 nodeSize = arrayNodeSize;
296 else if ( slot.bits() == base_class::flag_array_converting ) {
297 // the slot is converting to array node right now - skip the node
303 if ( m_guard.protect( pNode->nodes[idx], [](node_ptr p) -> value_type * { return p.ptr(); }) == slot ) {
314 if ( pNode->pParent ) {
315 idx = pNode->idxParent - 1;
316 pNode = pNode->pParent;
317 nodeSize = pNode->pParent ? arrayNodeSize : headSize;
321 assert( pNode == m_set->head());
322 assert( idx == endIdx );
332 template <class Iterator>
333 Iterator init_begin() const
335 return Iterator( *this, head(), size_t(0) - 1 );
338 template <class Iterator>
339 Iterator init_end() const
341 return Iterator( *this, head(), head_size(), false );
344 template <class Iterator>
345 Iterator init_rbegin() const
347 return Iterator( *this, head(), head_size());
350 template <class Iterator>
351 Iterator init_rend() const
353 return Iterator( *this, head(), size_t(0) - 1, false );
356 /// Bidirectional iterator class
357 template <bool IsConst>
358 class bidirectional_iterator: protected iterator_base
360 friend class FeldmanHashSet;
363 static CDS_CONSTEXPR bool const c_bConstantIterator = IsConst;
366 typedef typename std::conditional< IsConst, value_type const*, value_type*>::type value_ptr; ///< Value pointer
367 typedef typename std::conditional< IsConst, value_type const&, value_type&>::type value_ref; ///< Value reference
370 bidirectional_iterator() CDS_NOEXCEPT
373 bidirectional_iterator( bidirectional_iterator const& rhs ) CDS_NOEXCEPT
374 : iterator_base( rhs )
377 bidirectional_iterator& operator=(bidirectional_iterator const& rhs) CDS_NOEXCEPT
379 iterator_base::operator=( rhs );
383 bidirectional_iterator& operator++()
385 iterator_base::operator++();
389 bidirectional_iterator& operator--()
391 iterator_base::operator--();
395 value_ptr operator ->() const CDS_NOEXCEPT
397 return iterator_base::pointer();
400 value_ref operator *() const CDS_NOEXCEPT
402 value_ptr p = iterator_base::pointer();
409 iterator_base::release();
412 template <bool IsConst2>
413 bool operator ==(bidirectional_iterator<IsConst2> const& rhs) const CDS_NOEXCEPT
415 return iterator_base::operator==( rhs );
418 template <bool IsConst2>
419 bool operator !=(bidirectional_iterator<IsConst2> const& rhs) const CDS_NOEXCEPT
421 return !( *this == rhs );
425 bidirectional_iterator( FeldmanHashSet& set, array_node * pNode, size_t idx, bool )
426 : iterator_base( set, pNode, idx, false )
429 bidirectional_iterator( FeldmanHashSet& set, array_node * pNode, size_t idx )
430 : iterator_base( set, pNode, idx )
434 /// Reverse bidirectional iterator
435 template <bool IsConst>
436 class reverse_bidirectional_iterator : public iterator_base
438 friend class FeldmanHashSet;
441 typedef typename std::conditional< IsConst, value_type const*, value_type*>::type value_ptr; ///< Value pointer
442 typedef typename std::conditional< IsConst, value_type const&, value_type&>::type value_ref; ///< Value reference
445 reverse_bidirectional_iterator() CDS_NOEXCEPT
449 reverse_bidirectional_iterator( reverse_bidirectional_iterator const& rhs ) CDS_NOEXCEPT
450 : iterator_base( rhs )
453 reverse_bidirectional_iterator& operator=( reverse_bidirectional_iterator const& rhs) CDS_NOEXCEPT
455 iterator_base::operator=( rhs );
459 reverse_bidirectional_iterator& operator++()
461 iterator_base::operator--();
465 reverse_bidirectional_iterator& operator--()
467 iterator_base::operator++();
471 value_ptr operator ->() const CDS_NOEXCEPT
473 return iterator_base::pointer();
476 value_ref operator *() const CDS_NOEXCEPT
478 value_ptr p = iterator_base::pointer();
485 iterator_base::release();
488 template <bool IsConst2>
489 bool operator ==(reverse_bidirectional_iterator<IsConst2> const& rhs) const
491 return iterator_base::operator==( rhs );
494 template <bool IsConst2>
495 bool operator !=(reverse_bidirectional_iterator<IsConst2> const& rhs)
497 return !( *this == rhs );
501 reverse_bidirectional_iterator( FeldmanHashSet& set, array_node * pNode, size_t idx, bool )
502 : iterator_base( set, pNode, idx, false )
505 reverse_bidirectional_iterator( FeldmanHashSet& set, array_node * pNode, size_t idx )
506 : iterator_base( set, pNode, idx, false )
508 iterator_base::backward();
514 #ifdef CDS_DOXYGEN_INVOKED
515 typedef implementation_defined iterator; ///< @ref cds_intrusive_FeldmanHashSet_iterators "bidirectional iterator" type
516 typedef implementation_defined const_iterator; ///< @ref cds_intrusive_FeldmanHashSet_iterators "bidirectional const iterator" type
517 typedef implementation_defined reverse_iterator; ///< @ref cds_intrusive_FeldmanHashSet_iterators "bidirectional reverse iterator" type
518 typedef implementation_defined const_reverse_iterator; ///< @ref cds_intrusive_FeldmanHashSet_iterators "bidirectional reverse const iterator" type
520 typedef bidirectional_iterator<false> iterator;
521 typedef bidirectional_iterator<true> const_iterator;
522 typedef reverse_bidirectional_iterator<false> reverse_iterator;
523 typedef reverse_bidirectional_iterator<true> const_reverse_iterator;
528 item_counter m_ItemCounter; ///< Item counter
532 /// Creates empty set
534 @param head_bits: 2<sup>head_bits</sup> specifies the size of head array, minimum is 4.
535 @param array_bits: 2<sup>array_bits</sup> specifies the size of array node, minimum is 2.
537 Equation for \p head_bits and \p array_bits:
539 sizeof(hash_type) * 8 == head_bits + N * array_bits
541 where \p N is multi-level array depth.
543 FeldmanHashSet( size_t head_bits = 8, size_t array_bits = 4 )
544 : base_class( head_bits, array_bits )
547 /// Destructs the set and frees all data
555 The function inserts \p val in the set if it does not contain
556 an item with that hash.
558 Returns \p true if \p val is placed into the set, \p false otherwise.
560 bool insert( value_type& val )
562 return insert( val, [](value_type&) {} );
567 This function is intended for derived non-intrusive containers.
569 The function allows to split creating of new item into two part:
570 - create item with key only
571 - insert new item into the set
572 - if inserting is success, calls \p f functor to initialize \p val.
574 The functor signature is:
576 void func( value_type& val );
578 where \p val is the item inserted.
580 The user-defined functor is called only if the inserting is success.
582 @warning See \ref cds_intrusive_item_creating "insert item troubleshooting".
584 template <typename Func>
585 bool insert( value_type& val, Func f )
587 hash_type const& hash = hash_accessor()(val);
588 traverse_data pos( hash, *this );
590 typename gc::template GuardArray<2> guards;
592 guards.assign( 1, &val );
594 node_ptr slot = base_class::traverse( pos );
595 assert( slot.bits() == 0 );
597 // protect data node by hazard pointer
598 if (guards.protect( 0, pos.pArr->nodes[pos.nSlot], [](node_ptr p) -> value_type * { return p.ptr(); }) != slot ) {
599 // slot value has been changed - retry
600 stats().onSlotChanged();
604 if ( cmp( hash, hash_accessor()(*slot.ptr())) == 0 ) {
605 // the item with that hash value already exists
606 stats().onInsertFailed();
610 // the slot must be expanded
611 base_class::expand_slot( pos, slot );
614 // the slot is empty, try to insert data node
616 if ( pos.pArr->nodes[pos.nSlot].compare_exchange_strong( pNull, node_ptr(&val), memory_model::memory_order_release, atomics::memory_order_relaxed ))
618 // the new data node has been inserted
621 stats().onInsertSuccess();
622 stats().height(pos.nHeight);
626 // insert failed - slot has been changed by another thread
628 stats().onInsertRetry();
635 Performs inserting or updating the item with hash value equal to \p val.
636 - If hash value is found then existing item is replaced with \p val, old item is disposed
637 with \p Traits::disposer. Note that the disposer is called by \p GC asynchronously.
638 The function returns <tt> std::pair<true, false> </tt>
639 - If hash value is not found and \p bInsert is \p true then \p val is inserted,
640 the function returns <tt> std::pair<true, true> </tt>
641 - If hash value is not found and \p bInsert is \p false then the set is unchanged,
642 the function returns <tt> std::pair<false, false> </tt>
644 Returns <tt> std::pair<bool, bool> </tt> where \p first is \p true if operation is successfull
645 (i.e. the item has been inserted or updated),
646 \p second is \p true if new item has been added or \p false if the set contains that hash.
648 std::pair<bool, bool> update( value_type& val, bool bInsert = true )
650 return do_update(val, [](value_type&, value_type *) {}, bInsert );
653 /// Unlinks the item \p val from the set
655 The function searches the item \p val in the set and unlink it
656 if it is found and its address is equal to <tt>&val</tt>.
658 The function returns \p true if success and \p false otherwise.
660 bool unlink( value_type const& val )
662 typename gc::Guard guard;
663 auto pred = [&val](value_type const& item) -> bool { return &item == &val; };
664 value_type * p = do_erase( hash_accessor()( val ), guard, std::ref( pred ));
668 /// Deletes the item from the set
670 The function searches \p hash in the set,
671 unlinks the item found, and returns \p true.
672 If that item is not found the function returns \p false.
674 The \ref disposer specified in \p Traits is called by garbage collector \p GC asynchronously.
676 bool erase( hash_type const& hash )
678 return erase(hash, [](value_type const&) {} );
681 /// Deletes the item from the set
683 The function searches \p hash in the set,
684 call \p f functor with item found, and unlinks it from the set.
685 The \ref disposer specified in \p Traits is called
686 by garbage collector \p GC asynchronously.
688 The \p Func interface is
691 void operator()( value_type& item );
695 If \p hash is not found the function returns \p false.
697 template <typename Func>
698 bool erase( hash_type const& hash, Func f )
700 typename gc::Guard guard;
701 value_type * p = do_erase( hash, guard, []( value_type const&) -> bool {return true; } );
703 // p is guarded by HP
711 /// Deletes the item pointed by iterator \p iter
713 Returns \p true if the operation is successful, \p false otherwise.
715 The function does not invalidate the iterator, it remains valid and can be used for further traversing.
717 bool erase_at( iterator const& iter )
719 return do_erase_at( iter );
722 bool erase_at( reverse_iterator const& iter )
724 return do_erase_at( iter );
728 /// Extracts the item with specified \p hash
730 The function searches \p hash in the set,
731 unlinks it from the set, and returns an guarded pointer to the item extracted.
732 If \p hash is not found the function returns an empty guarded pointer.
734 The \p disposer specified in \p Traits class' template parameter is called automatically
735 by garbage collector \p GC when returned \ref guarded_ptr object to be destroyed or released.
736 @note Each \p guarded_ptr object uses the GC's guard that can be limited resource.
740 typedef cds::intrusive::FeldmanHashSet< your_template_args > my_set;
744 my_set::guarded_ptr gp( theSet.extract( 5 ));
749 // Destructor of gp releases internal HP guard
753 guarded_ptr extract( hash_type const& hash )
757 typename gc::Guard guard;
758 value_type * p = do_erase( hash, guard, []( value_type const&) -> bool {return true;} );
760 // p is guarded by HP
767 /// Finds an item by it's \p hash
769 The function searches the item by \p hash and calls the functor \p f for item found.
770 The interface of \p Func functor is:
773 void operator()( value_type& item );
776 where \p item is the item found.
778 The functor may change non-key fields of \p item. Note that the functor is only guarantee
779 that \p item cannot be disposed during the functor is executing.
780 The functor does not serialize simultaneous access to the set's \p item. If such access is
781 possible you must provide your own synchronization schema on item level to prevent unsafe item modifications.
783 The function returns \p true if \p hash is found, \p false otherwise.
785 template <typename Func>
786 bool find( hash_type const& hash, Func f )
788 typename gc::Guard guard;
789 value_type * p = search( hash, guard );
791 // p is guarded by HP
799 /// Checks whether the set contains \p hash
801 The function searches the item by its \p hash
802 and returns \p true if it is found, or \p false otherwise.
804 bool contains( hash_type const& hash )
806 return find( hash, [](value_type&) {} );
809 /// Finds an item by it's \p hash and returns the item found
811 The function searches the item by its \p hash
812 and returns the guarded pointer to the item found.
813 If \p hash is not found the function returns an empty \p guarded_ptr.
815 @note Each \p guarded_ptr object uses one GC's guard which can be limited resource.
819 typedef cds::intrusive::FeldmanHashSet< your_template_params > my_set;
823 my_set::guarded_ptr gp( theSet.get( 5 ));
824 if ( theSet.get( 5 )) {
828 // Destructor of guarded_ptr releases internal HP guard
832 guarded_ptr get( hash_type const& hash )
836 typename gc::Guard guard;
837 gp.reset( search( hash, guard ));
842 /// Clears the set (non-atomic)
844 The function unlink all data node from the set.
845 The function is not atomic but is thread-safe.
846 After \p %clear() the set may not be empty because another threads may insert items.
848 For each item the \p disposer is called after unlinking.
852 clear_array( head(), head_size());
855 /// Checks if the set is empty
857 Emptiness is checked by item counting: if item count is zero then the set is empty.
858 Thus, the correct item counting feature is an important part of the set implementation.
865 /// Returns item count in the set
868 return m_ItemCounter;
871 /// Returns const reference to internal statistics
872 stat const& statistics() const
877 /// Returns the size of head node
878 using base_class::head_size;
880 /// Returns the size of the array node
881 using base_class::array_node_size;
883 /// Collects tree level statistics into \p stat
884 /** @anchor cds_intrusive_FeldmanHashSet_hp_get_level_statistics
885 The function traverses the set and collects staistics for each level of the tree
886 into \p feldman_hashset::level_statistics struct. The element of \p stat[i]
887 represents statistics for level \p i, level 0 is head array.
888 The function is thread-safe and may be called in multi-threaded environment.
890 Result can be useful for estimating efficiency of hash functor you use.
892 void get_level_statistics(std::vector< feldman_hashset::level_statistics>& stat) const
894 base_class::get_level_statistics( stat );
898 ///@name Thread-safe iterators
899 /** @anchor cds_intrusive_FeldmanHashSet_iterators
900 The set supports thread-safe iterators: you may iterate over the set in multi-threaded environment.
901 It is guaranteed that the iterators will remain valid even if another thread deletes the node the iterator points to:
902 Hazard Pointer embedded into the iterator object protects the node from physical reclamation.
904 @note Since the iterator object contains hazard pointer that is a thread-local resource,
905 the iterator should not be passed to another thread.
907 Each iterator object supports the common interface:
908 - dereference operators:
910 value_type [const] * operator ->() noexcept
911 value_type [const] & operator *() noexcept
913 - pre-increment and pre-decrement. Post-operators is not supported
914 - equality operators <tt>==</tt> and <tt>!=</tt>.
915 Iterators are equal iff they point to the same cell of the same array node.
916 Note that for two iterators \p it1 and \p it2, the conditon <tt> it1 == it2 </tt>
917 does not entail <tt> &(*it1) == &(*it2) </tt>: welcome to concurrent containers
918 - helper member function \p release() that clears internal hazard pointer.
919 After \p release() the iterator points to \p nullptr but it still remain valid: further iterating is possible.
921 During iteration you may safely erase any item from the set;
922 @ref erase_at() function call doesn't invalidate any iterator.
923 If some iterator points to the item to be erased, that item is not deleted immediately
924 but only after that iterator will be advanced forward or backward.
926 @note It is possible the item can be iterated more that once, for example, if an iterator points to the item
927 in array node that is being splitted.
931 /// Returns an iterator to the beginning of the set
934 return iterator( *this, head(), size_t(0) - 1 );
937 /// Returns an const iterator to the beginning of the set
938 const_iterator begin() const
940 return const_iterator( *this, head(), size_t(0) - 1 );
943 /// Returns an const iterator to the beginning of the set
944 const_iterator cbegin()
946 return const_iterator( *this, head(), size_t(0) - 1 );
949 /// Returns an iterator to the element following the last element of the set. This element acts as a placeholder; attempting to access it results in undefined behavior.
952 return iterator( *this, head(), head_size(), false );
955 /// Returns a const iterator to the element following the last element of the set. This element acts as a placeholder; attempting to access it results in undefined behavior.
956 const_iterator end() const
958 return const_iterator( *this, head(), head_size(), false );
961 /// Returns a const iterator to the element following the last element of the set. This element acts as a placeholder; attempting to access it results in undefined behavior.
962 const_iterator cend()
964 return const_iterator( *this, head(), head_size(), false );
967 /// Returns a reverse iterator to the first element of the reversed set
968 reverse_iterator rbegin()
970 return reverse_iterator( *this, head(), head_size());
973 /// Returns a const reverse iterator to the first element of the reversed set
974 const_reverse_iterator rbegin() const
976 return const_reverse_iterator( *this, head(), head_size());
979 /// Returns a const reverse iterator to the first element of the reversed set
980 const_reverse_iterator crbegin()
982 return const_reverse_iterator( *this, head(), head_size());
985 /// Returns a reverse iterator to the element following the last element of the reversed set
987 It corresponds to the element preceding the first element of the non-reversed container.
988 This element acts as a placeholder, attempting to access it results in undefined behavior.
990 reverse_iterator rend()
992 return reverse_iterator( *this, head(), size_t(0) - 1, false );
995 /// Returns a const reverse iterator to the element following the last element of the reversed set
997 It corresponds to the element preceding the first element of the non-reversed container.
998 This element acts as a placeholder, attempting to access it results in undefined behavior.
1000 const_reverse_iterator rend() const
1002 return const_reverse_iterator( *this, head(), size_t(0) - 1, false );
1005 /// Returns a const reverse iterator to the element following the last element of the reversed set
1007 It corresponds to the element preceding the first element of the non-reversed container.
1008 This element acts as a placeholder, attempting to access it results in undefined behavior.
1010 const_reverse_iterator crend()
1012 return const_reverse_iterator( *this, head(), size_t(0) - 1, false );
1018 void clear_array( array_node * pArrNode, size_t nSize )
1022 for ( atomic_node_ptr * pArr = pArrNode->nodes, *pLast = pArr + nSize; pArr != pLast; ++pArr ) {
1024 node_ptr slot = pArr->load( memory_model::memory_order_acquire );
1025 if ( slot.bits() == base_class::flag_array_node ) {
1026 // array node, go down the tree
1027 assert( slot.ptr() != nullptr );
1028 clear_array( to_array( slot.ptr()), array_node_size());
1031 else if ( slot.bits() == base_class::flag_array_converting ) {
1032 // the slot is converting to array node right now
1033 while ( (slot = pArr->load(memory_model::memory_order_acquire)).bits() == base_class::flag_array_converting ) {
1035 stats().onSlotConverting();
1039 assert( slot.ptr() != nullptr );
1040 assert( slot.bits() == base_class::flag_array_node );
1041 clear_array( to_array( slot.ptr()), array_node_size());
1046 if ( pArr->compare_exchange_strong( slot, node_ptr(), memory_model::memory_order_acquire, atomics::memory_order_relaxed )) {
1048 gc::template retire<disposer>( slot.ptr());
1050 stats().onEraseSuccess();
1062 value_type * search( hash_type const& hash, typename gc::Guard& guard )
1064 traverse_data pos( hash, *this );
1065 hash_comparator cmp;
1068 node_ptr slot = base_class::traverse( pos );
1069 assert(slot.bits() == 0);
1071 // protect data node by hazard pointer
1072 if (guard.protect( pos.pArr->nodes[pos.nSlot], [](node_ptr p) -> value_type * { return p.ptr(); }) != slot) {
1073 // slot value has been changed - retry
1074 stats().onSlotChanged();
1076 else if (slot.ptr() && cmp(hash, hash_accessor()(*slot.ptr())) == 0) {
1078 stats().onFindSuccess();
1081 stats().onFindFailed();
1086 template <typename Predicate>
1087 value_type * do_erase( hash_type const& hash, typename gc::Guard& guard, Predicate pred )
1089 traverse_data pos( hash, *this );
1090 hash_comparator cmp;
1092 node_ptr slot = base_class::traverse( pos );
1093 assert(slot.bits() == 0);
1095 // protect data node by hazard pointer
1096 if (guard.protect( pos.pArr->nodes[pos.nSlot], [](node_ptr p) -> value_type * { return p.ptr(); }) != slot) {
1097 // slot value has been changed - retry
1098 stats().onSlotChanged();
1100 else if (slot.ptr()) {
1101 if (cmp(hash, hash_accessor()(*slot.ptr())) == 0 && pred(*slot.ptr())) {
1102 // item found - replace it with nullptr
1103 if ( pos.pArr->nodes[pos.nSlot].compare_exchange_strong(slot, node_ptr(nullptr), memory_model::memory_order_acquire, atomics::memory_order_relaxed)) {
1104 // slot is guarded by HP
1105 gc::template retire<disposer>(slot.ptr());
1107 stats().onEraseSuccess();
1111 stats().onEraseRetry();
1114 stats().onEraseFailed();
1118 // the slot is empty
1119 stats().onEraseFailed();
1125 bool do_erase_at( iterator_base const& iter )
1127 if ( iter.m_set != this )
1129 if ( iter.m_pNode == head() && iter.m_idx >= head_size())
1131 if ( iter.m_idx >= array_node_size())
1135 node_ptr slot = iter.m_pNode->nodes[iter.m_idx].load( memory_model::memory_order_acquire );
1136 if ( slot.bits() == 0 && slot.ptr() == iter.pointer()) {
1137 if ( iter.m_pNode->nodes[iter.m_idx].compare_exchange_strong(slot, node_ptr(nullptr), memory_model::memory_order_acquire, atomics::memory_order_relaxed)) {
1138 // the item is guarded by iterator, so we may retire it safely
1139 gc::template retire<disposer>( slot.ptr());
1141 stats().onEraseSuccess();
1150 template <typename Func>
1151 std::pair<bool, bool> do_update( value_type& val, Func f, bool bInsert = true )
1153 hash_type const& hash = hash_accessor()( val );
1154 traverse_data pos( hash, *this );
1155 hash_comparator cmp;
1156 typename gc::template GuardArray<2> guards;
1158 guards.assign( 1, &val );
1160 node_ptr slot = base_class::traverse( pos );
1161 assert(slot.bits() == 0);
1163 // protect data node by hazard pointer
1164 if ( guards.protect( 0, pos.pArr->nodes[pos.nSlot], [](node_ptr p) -> value_type * { return p.ptr(); }) != slot ) {
1165 // slot value has been changed - retry
1166 stats().onSlotChanged();
1168 else if ( slot.ptr()) {
1169 if ( cmp( hash, hash_accessor()(*slot.ptr())) == 0 ) {
1170 // the item with that hash value already exists
1171 // Replace it with val
1172 if ( slot.ptr() == &val ) {
1173 stats().onUpdateExisting();
1174 return std::make_pair(true, false);
1177 if ( pos.pArr->nodes[pos.nSlot].compare_exchange_strong(slot, node_ptr(&val), memory_model::memory_order_release, atomics::memory_order_relaxed)) {
1178 // slot can be disposed
1179 f( val, slot.ptr());
1180 gc::template retire<disposer>( slot.ptr());
1181 stats().onUpdateExisting();
1182 return std::make_pair(true, false);
1185 stats().onUpdateRetry();
1190 // the slot must be expanded
1191 base_class::expand_slot( pos, slot );
1194 stats().onUpdateFailed();
1195 return std::make_pair(false, false);
1199 // the slot is empty, try to insert data node
1202 if ( pos.pArr->nodes[pos.nSlot].compare_exchange_strong(pNull, node_ptr(&val), memory_model::memory_order_release, atomics::memory_order_relaxed))
1204 // the new data node has been inserted
1207 stats().onUpdateNew();
1208 stats().height( pos.nHeight );
1209 return std::make_pair(true, true);
1213 stats().onUpdateFailed();
1214 return std::make_pair(false, false);
1217 // insert failed - slot has been changed by another thread
1219 stats().onUpdateRetry();
1225 }} // namespace cds::intrusive
1227 #endif // #ifndef CDSLIB_INTRUSIVE_IMPL_FELDMAN_HASHSET_H