3 #ifndef CDSLIB_INTRUSIVE_MICHAEL_LIST_RCU_H
4 #define CDSLIB_INTRUSIVE_MICHAEL_LIST_RCU_H
6 #include <cds/intrusive/details/michael_list_base.h>
7 #include <cds/urcu/details/check_deadlock.h>
8 #include <cds/details/binary_functor_wrapper.h>
9 #include <cds/details/make_const_type.h>
10 #include <cds/urcu/exempt_ptr.h>
11 #include <cds/urcu/raw_ptr.h>
13 namespace cds { namespace intrusive {
16 namespace michael_list {
18 /// Node specialization for uRCU
19 template <class RCU, typename Tag>
20 struct node< cds::urcu::gc< RCU >, Tag >
22 typedef cds::urcu::gc< RCU > gc; ///< Garbage collector
23 typedef Tag tag; ///< tag
25 typedef cds::details::marked_ptr<node, 3> marked_ptr; ///< marked pointer
26 typedef typename gc::template atomic_marked_ptr<marked_ptr> atomic_marked_ptr; ///< atomic marked pointer specific for GC
28 atomic_marked_ptr m_pNext; ///< pointer to the next node in the container
29 node * m_pDelChain; ///< Deleted node chain (local for a thread)
31 CDS_CONSTEXPR node() CDS_NOEXCEPT
33 , m_pDelChain( nullptr )
36 } // namespace michael_list
39 /// Michael's lock-free ordered single-linked list (template specialization for \ref cds_urcu_desc "RCU")
40 /** @ingroup cds_intrusive_list
41 \anchor cds_intrusive_MichaelList_rcu
43 Usually, ordered single-linked list is used as a building block for the hash table implementation.
44 The complexity of searching is <tt>O(N)</tt>.
47 - \p RCU - one of \ref cds_urcu_gc "RCU type"
48 - \p T - type to be stored in the list; the type \p T should be based on (or has a member of type)
49 cds::intrusive::micheal_list::node
50 - \p Traits - type traits. See \p michael_list::traits for explanation. It is possible to declare option-based
51 list with \p cds::intrusive::michael_list::make_traits metafunction,
52 see \ref cds_intrusive_MichaelList_hp "here" for explanations.
55 Before including <tt><cds/intrusive/michael_list_rcu.h></tt> you should include appropriate RCU header file,
56 see \ref cds_urcu_gc "RCU type" for list of existing RCU class and corresponding header files.
57 For example, for \ref cds_urcu_general_buffered_gc "general-purpose buffered RCU" you should include:
59 #include <cds/urcu/general_buffered.h>
60 #include <cds/intrusive/michael_list_rcu.h>
62 // Now, you can declare Michael's list for type Foo and default traits:
63 typedef cds::intrusive::MichaelList<cds::urcu::gc< cds::urcu::general_buffered<> >, Foo > rcu_michael_list;
66 template < typename RCU, typename T,
67 #ifdef CDS_DOXYGEN_INVOKED
68 class Traits = michael_list::traits
73 class MichaelList<cds::urcu::gc<RCU>, T, Traits>
76 typedef T value_type; ///< type of value stored in the list
77 typedef Traits traits; ///< Traits template parameter
79 typedef typename traits::hook hook; ///< hook type
80 typedef typename hook::node_type node_type; ///< node type
82 # ifdef CDS_DOXYGEN_INVOKED
83 typedef implementation_defined key_comparator ; ///< key comparison functor based on opt::compare and opt::less option setter.
85 typedef typename opt::details::make_comparator< value_type, traits >::type key_comparator;
88 typedef typename traits::disposer disposer; ///< disposer used
89 typedef typename get_node_traits< value_type, node_type, hook>::type node_traits ; ///< node traits
90 typedef typename michael_list::get_link_checker< node_type, traits::link_checker >::type link_checker; ///< link checker
92 typedef cds::urcu::gc<RCU> gc; ///< RCU schema
93 typedef typename traits::back_off back_off; ///< back-off strategy
94 typedef typename traits::item_counter item_counter; ///< Item counting policy used
95 typedef typename traits::memory_model memory_model; ///< Memory ordering. See cds::opt::memory_model option
96 typedef typename traits::rcu_check_deadlock rcu_check_deadlock; ///< Deadlock checking policy
98 typedef typename gc::scoped_lock rcu_lock ; ///< RCU scoped lock
99 static CDS_CONSTEXPR const bool c_bExtractLockExternal = false; ///< Group of \p extract_xxx functions do not require external locking
102 // Rebind traits (split-list support)
103 template <typename... Options>
104 struct rebind_traits {
108 , typename cds::opt::make_options< traits, Options...>::type
114 typedef typename node_type::marked_ptr marked_node_ptr ; ///< Marked node pointer
115 typedef typename node_type::atomic_marked_ptr atomic_node_ptr ; ///< Atomic node pointer
116 typedef atomic_node_ptr auxiliary_head ; ///< Auxiliary head type (for split-list support)
118 atomic_node_ptr m_pHead ; ///< Head pointer
119 item_counter m_ItemCounter ; ///< Item counter
129 typedef cds::urcu::details::check_deadlock_policy< gc, rcu_check_deadlock> check_deadlock_policy;
131 static void clear_links( node_type * pNode )
133 pNode->m_pNext.store( marked_node_ptr(), memory_model::memory_order_relaxed );
134 pNode->m_pDelChain = nullptr;
137 struct clear_and_dispose {
138 void operator()( value_type * p )
140 assert( p != nullptr );
141 clear_links( node_traits::to_node_ptr(p));
146 static void dispose_node( node_type * pNode )
149 assert( !gc::is_locked() );
151 gc::template retire_ptr<clear_and_dispose>( node_traits::to_value_ptr( *pNode ) );
154 static void dispose_chain( node_type * pChain )
157 assert( !gc::is_locked() );
159 auto f = [&pChain]() -> cds::urcu::retired_ptr {
160 node_type * p = pChain;
162 pChain = p->m_pDelChain;
163 return cds::urcu::make_retired_ptr<clear_and_dispose>( node_traits::to_value_ptr( p ));
165 return cds::urcu::make_retired_ptr<clear_and_dispose>( static_cast<value_type *>(nullptr));
167 gc::batch_retire(std::ref(f));
171 /// Position pointer for item search
173 atomic_node_ptr * pPrev ; ///< Previous node
174 node_type * pCur ; ///< Current node
175 node_type * pNext ; ///< Next node
177 atomic_node_ptr& refHead;
178 node_type * pDelChain; ///< Head of deleted node chain
180 position( atomic_node_ptr& head )
182 , pDelChain( nullptr )
187 dispose_chain( pDelChain );
194 using exempt_ptr = cds::urcu::exempt_ptr< gc, value_type, value_type, clear_and_dispose, void >; ///< pointer to extracted node
198 struct raw_ptr_disposer
200 node_type * pReclaimedChain;
203 : pReclaimedChain( nullptr )
206 raw_ptr_disposer( position& pos )
207 : pReclaimedChain( pos.pDelChain )
209 pos.pDelChain = nullptr;
212 raw_ptr_disposer( raw_ptr_disposer&& d )
213 : pReclaimedChain( d.pReclaimedChain )
215 d.pReclaimedChain = nullptr;
218 raw_ptr_disposer( raw_ptr_disposer const& ) = delete;
227 if ( pReclaimedChain ) {
228 assert( !gc::is_locked());
229 dispose_chain( pReclaimedChain );
230 pReclaimedChain = nullptr;
237 /// Result of \p get(), \p get_with() functions - pointer to the node found
238 typedef cds::urcu::raw_ptr< gc, value_type, raw_ptr_disposer > raw_ptr;
243 bool link_node( node_type * pNode, position& pos )
245 assert( pNode != nullptr );
246 link_checker::is_empty( pNode );
248 marked_node_ptr p( pos.pCur );
249 pNode->m_pNext.store( p, memory_model::memory_order_release );
250 return pos.pPrev->compare_exchange_strong( p, marked_node_ptr(pNode), memory_model::memory_order_release, atomics::memory_order_relaxed );
253 static void link_to_remove_chain( position& pos, node_type * pDel )
255 assert( pDel->m_pDelChain == nullptr );
257 pDel->m_pDelChain = pos.pDelChain;
258 pos.pDelChain = pDel;
261 bool unlink_node( position& pos, erase_node_mask nMask )
263 // Mark the node (logical deletion)
264 marked_node_ptr next(pos.pNext, 0);
266 if ( pos.pCur->m_pNext.compare_exchange_strong( next, next | nMask, memory_model::memory_order_acq_rel, atomics::memory_order_relaxed )) {
268 // Try physical removal - fast path
269 marked_node_ptr cur(pos.pCur);
270 if ( pos.pPrev->compare_exchange_strong(cur, marked_node_ptr(pos.pNext), memory_model::memory_order_acquire, atomics::memory_order_relaxed) ) {
271 if ( nMask == erase_mask )
272 link_to_remove_chain( pos, pos.pCur );
276 search( pos.refHead, *node_traits::to_value_ptr( pos.pCur ), pos, key_comparator() );
286 template <bool IsConst>
289 friend class MichaelList;
290 value_type * m_pNode;
295 node_type * p = node_traits::to_node_ptr( *m_pNode )->m_pNext.load(memory_model::memory_order_relaxed).ptr();
296 m_pNode = p ? node_traits::to_value_ptr( p ) : nullptr;
301 explicit iterator_type( node_type * pNode)
304 m_pNode = node_traits::to_value_ptr( *pNode );
308 explicit iterator_type( atomic_node_ptr const& refNode)
310 node_type * pNode = refNode.load(memory_model::memory_order_relaxed).ptr();
311 m_pNode = pNode ? node_traits::to_value_ptr( *pNode ) : nullptr;
315 typedef typename cds::details::make_const_type<value_type, IsConst>::pointer value_ptr;
316 typedef typename cds::details::make_const_type<value_type, IsConst>::reference value_ref;
322 iterator_type( const iterator_type& src )
323 : m_pNode( src.m_pNode )
326 value_ptr operator ->() const
331 value_ref operator *() const
333 assert( m_pNode != nullptr );
338 iterator_type& operator ++()
345 iterator_type operator ++(int)
347 iterator_type i(*this);
352 iterator_type& operator = (const iterator_type& src)
354 m_pNode = src.m_pNode;
359 bool operator ==(iterator_type<C> const& i ) const
361 return m_pNode == i.m_pNode;
364 bool operator !=(iterator_type<C> const& i ) const
366 return m_pNode != i.m_pNode;
373 typedef iterator_type<false> iterator;
374 /// Const forward iterator
375 typedef iterator_type<true> const_iterator;
377 /// Returns a forward iterator addressing the first element in a list
379 For empty list \code begin() == end() \endcode
383 return iterator( m_pHead );
386 /// Returns an iterator that addresses the location succeeding the last element in a list
388 Do not use the value returned by <tt>end</tt> function to access any item.
389 Internally, <tt>end</tt> returning value equals to \p nullptr.
391 The returned value can be used only to control reaching the end of the list.
392 For empty list \code begin() == end() \endcode
399 /// Returns a forward const iterator addressing the first element in a list
400 const_iterator begin() const
402 return const_iterator(m_pHead );
404 /// Returns a forward const iterator addressing the first element in a list
405 const_iterator cbegin() const
407 return const_iterator(m_pHead );
410 /// Returns an const iterator that addresses the location succeeding the last element in a list
411 const_iterator end() const
413 return const_iterator();
415 /// Returns an const iterator that addresses the location succeeding the last element in a list
416 const_iterator cend() const
418 return const_iterator();
422 /// Default constructor initializes empty list
426 static_assert( (std::is_same< gc, typename node_type::gc >::value), "GC and node_type::gc must be the same type" );
437 The function inserts \p val in the list if the list does not contain
438 an item with key equal to \p val.
440 The function makes RCU lock internally.
442 Returns \p true if \p val is linked into the list, \p false otherwise.
444 bool insert( value_type& val )
446 return insert_at( m_pHead, val );
451 This function is intended for derived non-intrusive containers.
453 The function allows to split new item creating into two part:
454 - create item with key only
455 - insert new item into the list
456 - if inserting is success, calls \p f functor to initialize value-field of \p val.
458 The functor signature is:
460 void func( value_type& val );
462 where \p val is the item inserted. User-defined functor \p f should guarantee that during changing
463 \p val no any other changes could be made on this list's item by concurrent threads.
464 The user-defined functor is called only if the inserting is success.
466 The function makes RCU lock internally.
468 @warning See \ref cds_intrusive_item_creating "insert item troubleshooting"
470 template <typename Func>
471 bool insert( value_type& val, Func f )
473 return insert_at( m_pHead, val, f );
476 /// Ensures that the \p item exists in the list
478 The operation performs inserting or changing data with lock-free manner.
480 If the item \p val not found in the list, then \p val is inserted into the list.
481 Otherwise, the functor \p func is called with item found.
482 The functor signature is:
485 void operator()( bool bNew, value_type& item, value_type& val );
489 - \p bNew - \p true if the item has been inserted, \p false otherwise
490 - \p item - item of the list
491 - \p val - argument \p val passed into the \p ensure function
492 If new item has been inserted (i.e. \p bNew is \p true) then \p item and \p val arguments
493 refer to the same thing.
495 The functor may change non-key fields of the \p item; however, \p func must guarantee
496 that during changing no any other modifications could be made on this item by concurrent threads.
498 Returns <tt> std::pair<bool, bool> </tt> where \p first is true if operation is successfull,
499 \p second is true if new item has been added or \p false if the item with \p key
500 already is in the list.
502 The function makes RCU lock internally.
504 @warning See \ref cds_intrusive_item_creating "insert item troubleshooting"
506 template <typename Func>
507 std::pair<bool, bool> ensure( value_type& val, Func func )
509 return ensure_at( m_pHead, val, func );
512 /// Unlinks the item \p val from the list
514 The function searches the item \p val in the list and unlink it from the list
515 if it is found and it is equal to \p val.
517 Difference between \ref erase and \p unlink functions: \p erase finds <i>a key</i>
518 and deletes the item found. \p unlink finds an item by key and deletes it
519 only if \p val is an item of that list, i.e. the pointer to the item found
520 is equal to <tt> &val </tt>.
522 The function returns \p true if success and \p false otherwise.
524 RCU \p synchronize method can be called.
525 Note that depending on RCU type used the \ref disposer call can be deferred.
527 The function can throw cds::urcu::rcu_deadlock exception if deadlock is encountered and
528 deadlock checking policy is opt::v::rcu_throw_deadlock.
530 bool unlink( value_type& val )
532 return unlink_at( m_pHead, val );
535 /// Deletes the item from the list
536 /** \anchor cds_intrusive_MichaelList_rcu_erase_val
537 The function searches an item with key equal to \p key in the list,
538 unlinks it from the list, and returns \p true.
539 If the item with the key equal to \p key is not found the function return \p false.
541 RCU \p synchronize method can be called.
542 Note that depending on RCU type used the \ref disposer call can be deferred.
544 The function can throw \ref cds_urcu_rcu_deadlock "cds::urcu::rcu_deadlock" exception if a deadlock is detected and
545 the deadlock checking policy is \p opt::v::rcu_throw_deadlock.
547 template <typename Q>
548 bool erase( Q const& key )
550 return erase_at( m_pHead, key, key_comparator() );
553 /// Deletes the item from the list using \p pred predicate for searching
555 The function is an analog of \ref cds_intrusive_MichaelList_rcu_erase_val "erase(Q const&)"
556 but \p pred is used for key comparing.
557 \p Less functor has the interface like \p std::less.
558 \p pred must imply the same element order as the comparator used for building the list.
560 template <typename Q, typename Less>
561 bool erase_with( Q const& key, Less pred )
564 return erase_at( m_pHead, key, cds::opt::details::make_comparator_from_less<Less>() );
567 /// Deletes the item from the list
568 /** \anchor cds_intrusive_MichaelList_rcu_erase_func
569 The function searches an item with key equal to \p key in the list,
570 call \p func functor with item found, unlinks it from the list, and returns \p true.
571 The \p Func interface is
574 void operator()( value_type const& item );
578 If the item with the key equal to \p key is not found the function return \p false.
580 RCU \p synchronize method can be called.
581 Note that depending on RCU type used the \ref disposer call can be deferred.
583 The function can throw \ref cds_urcu_rcu_deadlock "cds::urcu::rcu_deadlock" exception if a deadlock is detected and
584 the deadlock checking policy is \p opt::v::rcu_throw_deadlock.
586 template <typename Q, typename Func>
587 bool erase( Q const& key, Func func )
589 return erase_at( m_pHead, key, key_comparator(), func );
592 /// Deletes the item from the list using \p pred predicate for searching
594 The function is an analog of \ref cds_intrusive_MichaelList_rcu_erase_func "erase(Q const&, Func)"
595 but \p pred is used for key comparing.
596 \p Less functor has the interface like \p std::less.
597 \p pred must imply the same element order as the comparator used for building the list.
599 template <typename Q, typename Less, typename Func>
600 bool erase_with( Q const& key, Less pred, Func func )
603 return erase_at( m_pHead, key, cds::opt::details::make_comparator_from_less<Less>(), func );
606 /// Extracts an item from the list
608 @anchor cds_intrusive_MichaelList_rcu_extract
609 The function searches an item with key equal to \p key in the list,
610 unlinks it from the list, and returns \ref cds::urcu::exempt_ptr "exempt_ptr" pointer to the item found.
611 If \p key is not found the function returns an empty \p exempt_ptr.
613 @note The function does NOT dispose the item found. It just unlinks the item from the list
614 and returns a pointer to item found.
615 You shouldn't lock RCU for current thread before calling this function, and you should manually release
616 \p dest exempt pointer outside the RCU lock before reusing it.
619 #include <cds/urcu/general_buffered.h>
620 #include <cds/intrusive/michael_list_rcu.h>
622 typedef cds::urcu::gc< general_buffered<> > rcu;
623 typedef cds::intrusive::MichaelList< rcu, Foo > rcu_michael_list;
625 rcu_michael_list theList;
628 rcu_michael_list::exempt_ptr p1;
630 // The RCU should NOT be locked when extract() is called!
631 assert( !rcu::is_locked() );
633 // You can call extract() function
634 p1 = theList.extract( 10 );
636 // do something with p1
640 // We may safely release p1 here
641 // release() passes the pointer to RCU reclamation cycle:
642 // it invokes RCU retire_ptr function with the disposer you provided for the list.
646 template <typename Q>
647 exempt_ptr extract( Q const& key )
649 return exempt_ptr( extract_at( m_pHead, key, key_comparator() ));
652 /// Extracts an item from the list using \p pred predicate for searching
654 This function is the analog for \p extract(Q const&)
656 The \p pred is a predicate used for key comparing.
657 \p Less has the interface like \p std::less.
658 \p pred must imply the same element order as \ref key_comparator.
660 template <typename Q, typename Less>
661 exempt_ptr extract_with( Q const& key, Less pred )
664 return exempt_ptr( extract_at( m_pHead, key, cds::opt::details::make_comparator_from_less<Less>() ));
667 /// Find the key \p val
668 /** \anchor cds_intrusive_MichaelList_rcu_find_func
669 The function searches the item with key equal to \p key
670 and calls the functor \p f for item found.
671 The interface of \p Func functor is:
674 void operator()( value_type& item, Q& key );
677 where \p item is the item found, \p key is the <tt>find</tt> function argument.
679 The functor can change non-key fields of \p item.
680 The function \p find does not serialize simultaneous access to the list \p item. If such access is
681 possible you must provide your own synchronization schema to exclude unsafe item modifications.
683 The function makes RCU lock internally.
685 The function returns \p true if \p val is found, \p false otherwise.
687 template <typename Q, typename Func>
688 bool find( Q& key, Func f )
690 return find_at( m_pHead, key, key_comparator(), f );
693 template <typename Q, typename Func>
694 bool find( Q const& key, Func f )
696 return find_at( m_pHead, key, key_comparator(), f );
700 /// Finds \p key using \p pred predicate for searching
702 The function is an analog of \ref cds_intrusive_MichaelList_rcu_find_func "find(Q&, Func)"
703 but \p pred is used for key comparing.
704 \p Less functor has the interface like \p std::less.
705 \p pred must imply the same element order as the comparator used for building the list.
707 template <typename Q, typename Less, typename Func>
708 bool find_with( Q& key, Less pred, Func f )
711 return find_at( m_pHead, key, cds::opt::details::make_comparator_from_less<Less>(), f );
714 template <typename Q, typename Less, typename Func>
715 bool find_with( Q const& key, Less pred, Func f )
718 return find_at( m_pHead, key, cds::opt::details::make_comparator_from_less<Less>(), f );
723 /** \anchor cds_intrusive_MichaelList_rcu_find_val
724 The function searches the item with key equal to \p key
725 and returns \p true if \p val found or \p false otherwise.
727 template <typename Q>
728 bool find( Q const& key )
730 return find_at( m_pHead, key, key_comparator() );
733 /// Finds \p key using \p pred predicate for searching
735 The function is an analog of \ref cds_intrusive_MichaelList_rcu_find_val "find(Q const&)"
736 but \p pred is used for key comparing.
737 \p Less functor has the interface like \p std::less.
738 \p pred must imply the same element order as the comparator used for building the list.
740 template <typename Q, typename Less>
741 bool find_with( Q const& key, Less pred )
744 return find_at( m_pHead, key, cds::opt::details::make_comparator_from_less<Less>() );
747 /// Finds \p key and return the item found
748 /** \anchor cds_intrusive_MichaelList_rcu_get
749 The function searches the item with key equal to \p key and returns the pointer to item found.
750 If \p key is not found it returns empty \p raw_ptr object.
752 Note the compare functor should accept a parameter of type \p Q that can be not the same as \p value_type.
754 RCU should be locked before call of this function.
755 Returned item is valid only while RCU is locked:
757 typedef cds::intrusive::MichaelList< cds::urcu::gc< cds::urcu::general_buffered<> >, foo, my_traits > ord_list;
760 typename ord_list::raw_ptr rp;
763 ord_list::rcu_lock lock;
765 rp = theList.get( 5 );
770 // Unlock RCU by rcu_lock destructor
771 // Node owned by rp can be retired by disposer at any time after RCU has been unlocked
773 // You can manually release rp after RCU-locked section
777 template <typename Q>
778 raw_ptr get( Q const& key )
780 return get_at( m_pHead, key, key_comparator());
783 /// Finds \p key and return the item found
785 The function is an analog of \ref cds_intrusive_MichaelList_rcu_get "get(Q const&)"
786 but \p pred is used for comparing the keys.
788 \p Less functor has the semantics like \p std::less but should take arguments of type \p value_type and \p Q
790 \p pred must imply the same element order as the comparator used for building the list.
792 template <typename Q, typename Less>
793 raw_ptr get_with( Q const& key, Less pred )
796 return get_at( m_pHead, key, cds::opt::details::make_comparator_from_less<Less>());
799 /// Clears the list using default disposer
801 The function clears the list using default (provided by \p Traits class template argument) disposer functor.
803 RCU \p synchronize method can be called.
804 Note that depending on RCU type used the \ref disposer invocation can be deferred.
806 The function can throw \p cds::urcu::rcu_deadlock exception if an deadlock is encountered and
807 deadlock checking policy is \p opt::v::rcu_throw_deadlock.
812 check_deadlock_policy::check();
814 marked_node_ptr pHead;
818 pHead = m_pHead.load(memory_model::memory_order_acquire);
821 marked_node_ptr pNext( pHead->m_pNext.load(memory_model::memory_order_relaxed) );
822 if ( !pHead->m_pNext.compare_exchange_weak( pNext, pNext | 1, memory_model::memory_order_acquire, memory_model::memory_order_relaxed ))
824 if ( !m_pHead.compare_exchange_weak( pHead, marked_node_ptr(pNext.ptr()), memory_model::memory_order_release, memory_model::memory_order_relaxed ))
829 dispose_node( pHead.ptr() );
834 /// Check if the list is empty
837 return m_pHead.load( memory_model::memory_order_relaxed ).all() == nullptr;
840 /// Returns list's item count
842 The value returned depends on item counter provided by \p Traits. For \p atomicity::empty_item_counter,
843 this function always returns 0.
845 @note Even if you use real item counter and it returns 0, this fact does not mean that the list
846 is empty. To check list emptyness use \p empty() method.
850 return m_ItemCounter.value();
855 // split-list support
856 bool insert_aux_node( node_type * pNode )
858 return insert_aux_node( m_pHead, pNode );
861 // split-list support
862 bool insert_aux_node( atomic_node_ptr& refHead, node_type * pNode )
864 assert( pNode != nullptr );
866 // Hack: convert node_type to value_type.
867 // In principle, auxiliary node can be non-reducible to value_type
868 // We assume that comparator can correctly distinguish between aux and regular node.
869 return insert_at( refHead, *node_traits::to_value_ptr( pNode ) );
872 bool insert_at( atomic_node_ptr& refHead, value_type& val )
874 position pos( refHead );
877 return insert_at_locked( pos, val );
881 template <typename Func>
882 bool insert_at( atomic_node_ptr& refHead, value_type& val, Func f )
884 link_checker::is_empty( node_traits::to_node_ptr( val ) );
885 position pos( refHead );
890 if ( search( refHead, val, pos, key_comparator()))
893 if ( link_node( node_traits::to_node_ptr( val ), pos ) ) {
900 node_traits::to_node_ptr( val )->m_pNext.store( marked_node_ptr(), memory_model::memory_order_relaxed );
906 iterator insert_at_( atomic_node_ptr& refHead, value_type& val )
909 if ( insert_at_locked( refHead, val ))
910 return iterator( node_traits::to_node_ptr( val ));
914 template <typename Func>
915 std::pair<iterator, bool> ensure_at_( atomic_node_ptr& refHead, value_type& val, Func func )
917 position pos( refHead );
920 return ensure_at_locked( pos, val, func );
924 template <typename Func>
925 std::pair<bool, bool> ensure_at( atomic_node_ptr& refHead, value_type& val, Func func )
927 position pos( refHead );
930 std::pair<iterator, bool> ret = ensure_at_locked( pos, val, func );
931 return std::make_pair( ret.first != end(), ret.second );
935 bool unlink_at( atomic_node_ptr& refHead, value_type& val )
937 position pos( refHead );
939 check_deadlock_policy::check();
944 if ( !search( refHead, val, pos, key_comparator() ) || node_traits::to_value_ptr( *pos.pCur ) != &val )
946 if ( !unlink_node( pos, erase_mask )) {
957 template <typename Q, typename Compare, typename Func>
958 bool erase_at( position& pos, Q const& val, Compare cmp, Func f )
961 check_deadlock_policy::check();
966 if ( !search( pos.refHead, val, pos, cmp ) )
968 if ( !unlink_node( pos, erase_mask )) {
974 f( *node_traits::to_value_ptr( *pos.pCur ) );
980 template <typename Q, typename Compare, typename Func>
981 bool erase_at( atomic_node_ptr& refHead, Q const& val, Compare cmp, Func f )
983 position pos( refHead );
984 return erase_at( pos, val, cmp, f );
987 template <typename Q, typename Compare>
988 bool erase_at( atomic_node_ptr& refHead, const Q& val, Compare cmp )
990 position pos( refHead );
991 return erase_at( pos, val, cmp, [](value_type const&){} );
994 template <typename Q, typename Compare >
995 value_type * extract_at( atomic_node_ptr& refHead, Q const& val, Compare cmp )
997 position pos( refHead );
999 assert( !gc::is_locked() ) ; // RCU must not be locked!!!
1004 if ( !search( refHead, val, pos, cmp ) )
1006 if ( !unlink_node( pos, extract_mask )) {
1012 return node_traits::to_value_ptr( pos.pCur );
1017 template <typename Q, typename Compare, typename Func>
1018 bool find_at( atomic_node_ptr& refHead, Q& val, Compare cmp, Func f )
1020 position pos( refHead );
1024 if ( search( refHead, val, pos, cmp ) ) {
1025 assert( pos.pCur != nullptr );
1026 f( *node_traits::to_value_ptr( *pos.pCur ), val );
1033 template <typename Q, typename Compare>
1034 bool find_at( atomic_node_ptr& refHead, Q const& val, Compare cmp )
1036 position pos( refHead );
1039 return find_at_locked( pos, val, cmp ) != cend();
1043 template <typename Q, typename Compare>
1044 raw_ptr get_at( atomic_node_ptr& refHead, Q const& val, Compare cmp )
1046 // RCU should be locked!
1047 assert(gc::is_locked() );
1049 position pos( refHead );
1051 if ( search( refHead, val, pos, cmp ))
1052 return raw_ptr( node_traits::to_value_ptr( pos.pCur ), raw_ptr_disposer( pos ));
1053 return raw_ptr( raw_ptr_disposer( pos ));
1060 template <typename Q, typename Compare>
1061 bool search( atomic_node_ptr& refHead, const Q& val, position& pos, Compare cmp )
1063 // RCU lock should be locked!!!
1064 assert( gc::is_locked() );
1066 atomic_node_ptr * pPrev;
1067 marked_node_ptr pNext;
1068 marked_node_ptr pCur;
1074 pCur = pPrev->load(memory_model::memory_order_acquire);
1078 if ( !pCur.ptr() ) {
1081 pos.pNext = nullptr;
1085 pNext = pCur->m_pNext.load(memory_model::memory_order_acquire);
1087 if ( pPrev->load(memory_model::memory_order_relaxed) != pCur
1088 || pNext != pCur->m_pNext.load(memory_model::memory_order_relaxed))
1094 if ( pNext.bits() ) {
1095 // pCur is marked as deleted. Try to unlink it from the list
1096 if ( pPrev->compare_exchange_weak( pCur, marked_node_ptr( pNext.ptr() ), memory_model::memory_order_acquire, atomics::memory_order_relaxed ) ) {
1097 if ( pNext.bits() == erase_mask )
1098 link_to_remove_chain( pos, pCur.ptr() );
1104 assert( pCur.ptr() != nullptr );
1105 int nCmp = cmp( *node_traits::to_value_ptr( pCur.ptr() ), val );
1108 pos.pCur = pCur.ptr();
1109 pos.pNext = pNext.ptr();
1112 pPrev = &( pCur->m_pNext );
1120 bool insert_at_locked( position& pos, value_type& val )
1122 // RCU lock should be locked!!!
1123 assert( gc::is_locked() );
1124 link_checker::is_empty( node_traits::to_node_ptr( val ) );
1127 if ( search( pos.refHead, val, pos, key_comparator() ) )
1130 if ( link_node( node_traits::to_node_ptr( val ), pos ) ) {
1136 node_traits::to_node_ptr( val )->m_pNext.store( marked_node_ptr(), memory_model::memory_order_relaxed );
1140 template <typename Func>
1141 std::pair<iterator, bool> ensure_at_locked( position& pos, value_type& val, Func func )
1143 // RCU lock should be locked!!!
1144 assert( gc::is_locked() );
1147 if ( search( pos.refHead, val, pos, key_comparator() ) ) {
1148 assert( key_comparator()( val, *node_traits::to_value_ptr( *pos.pCur ) ) == 0 );
1150 func( false, *node_traits::to_value_ptr( *pos.pCur ), val );
1151 return std::make_pair( iterator( pos.pCur ), false );
1154 link_checker::is_empty( node_traits::to_node_ptr( val ) );
1156 if ( link_node( node_traits::to_node_ptr( val ), pos ) ) {
1158 func( true, val , val );
1159 return std::make_pair( iterator( node_traits::to_node_ptr( val )), true );
1162 // clear the next field
1163 node_traits::to_node_ptr( val )->m_pNext.store( marked_node_ptr(), memory_model::memory_order_relaxed );
1168 template <typename Q, typename Compare>
1169 const_iterator find_at_locked( position& pos, Q const& val, Compare cmp )
1171 assert( gc::is_locked() );
1173 if ( search( pos.refHead, val, pos, cmp ) ) {
1174 assert( pos.pCur != nullptr );
1175 return const_iterator( pos.pCur );
1182 }} // namespace cds::intrusive
1184 #endif // #ifndef CDSLIB_INTRUSIVE_MICHAEL_LIST_NOGC_H