2 This file is a part of libcds - Concurrent Data Structures library
4 (C) Copyright Maxim Khizhinsky (libcds.dev@gmail.com) 2006-2017
6 Source code repo: http://github.com/khizmax/libcds/
7 Download: http://sourceforge.net/projects/libcds/files/
9 Redistribution and use in source and binary forms, with or without
10 modification, are permitted provided that the following conditions are met:
12 * Redistributions of source code must retain the above copyright notice, this
13 list of conditions and the following disclaimer.
15 * Redistributions in binary form must reproduce the above copyright notice,
16 this list of conditions and the following disclaimer in the documentation
17 and/or other materials provided with the distribution.
19 THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
20 AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
21 IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
22 DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
23 FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
24 DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
25 SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
26 CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
27 OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
28 OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
31 #ifndef CDSLIB_INTRUSIVE_IMPL_ITERABLE_LIST_H
32 #define CDSLIB_INTRUSIVE_IMPL_ITERABLE_LIST_H
34 #include <cds/intrusive/details/iterable_list_base.h>
35 #include <cds/details/make_const_type.h>
37 namespace cds { namespace intrusive {
39 /// Iterable lock-free ordered single-linked list
40 /** @ingroup cds_intrusive_list
41 \anchor cds_intrusive_IterableList_hp
43 This non-blocking list implementation supports thread-safe iterators;
44 searching and removing are lock-free, inserting is non-blocking because it
45 uses a light-weight synchronization based on marked pointers.
47 Unlike \p cds::intrusive::MichaelList the iterable list does not require
48 any hook in \p T to be stored in the list.
50 Usually, ordered single-linked list is used as a building block for the hash table implementation.
51 Iterable list is suitable for almost append-only hash table because the list doesn't delete
52 its internal node when erasing a key but it is marked them as empty to be reused in the future.
53 However, plenty of empty nodes degrades performance.
54 Separation of internal nodes and user data implies the need for an allocator for internal node
55 so the iterable list is not fully intrusive. Nevertheless, if you need thread-safe iterator,
56 the iterable list is good choice.
58 The complexity of searching is <tt>O(N)</tt>.
61 - \p GC - Garbage collector used.
62 - \p T - type to be stored in the list.
63 - \p Traits - type traits, default is \p iterable_list::traits. It is possible to declare option-based
64 list with \p cds::intrusive::iterable_list::make_traits metafunction:
65 For example, the following traits-based declaration of \p gc::HP iterable list
67 #include <cds/intrusive/iterable_list_hp.h>
68 // Declare item stored in your list
75 // Declare comparator for the item
77 int operator()( foo const& i1, foo const& i2 ) const
79 return i1.nKey - i2.nKey;
84 struct my_traits: public cds::intrusive::iterable_list::traits
86 typedef my_compare compare;
90 typedef cds::intrusive::IterableList< cds::gc::HP, foo, my_traits > list_type;
92 is equivalent for the following option-based list
94 #include <cds/intrusive/iterable_list_hp.h>
96 // foo struct and my_compare are the same
98 // Declare option-based list
99 typedef cds::intrusive::IterableList< cds::gc::HP, foo,
100 typename cds::intrusive::iterable_list::make_traits<
101 cds::intrusive::opt::compare< my_compare > // item comparator option
107 There are different specializations of this template for each garbage collecting schema.
108 You should select GC you want and include appropriate .h-file:
109 - for \p gc::HP: <tt> <cds/intrusive/iterable_list_hp.h> </tt>
110 - for \p gc::DHP: <tt> <cds/intrusive/iterable_list_dhp.h> </tt>
115 #ifdef CDS_DOXYGEN_INVOKED
116 ,class Traits = iterable_list::traits
122 #ifndef CDS_DOXYGEN_INVOKED
123 : public iterable_list_tag
127 typedef T value_type; ///< type of value stored in the list
128 typedef Traits traits; ///< Traits template parameter
130 typedef iterable_list::node< value_type > node_type; ///< node type
132 # ifdef CDS_DOXYGEN_INVOKED
133 typedef implementation_defined key_comparator ; ///< key comparison functor based on opt::compare and opt::less option setter.
135 typedef typename opt::details::make_comparator< value_type, traits >::type key_comparator;
138 typedef typename traits::disposer disposer; ///< disposer for \p value_type
140 typedef GC gc; ///< Garbage collector
141 typedef typename traits::back_off back_off; ///< back-off strategy
142 typedef typename traits::item_counter item_counter; ///< Item counting policy used
143 typedef typename traits::memory_model memory_model; ///< Memory ordering. See \p cds::opt::memory_model option
144 typedef typename traits::node_allocator node_allocator; ///< Node allocator
145 typedef typename traits::stat stat; ///< Internal statistics
147 typedef typename gc::template guarded_ptr< value_type > guarded_ptr; ///< Guarded pointer
149 static CDS_CONSTEXPR const size_t c_nHazardPtrCount = 4; ///< Count of hazard pointer required for the algorithm
152 // Rebind traits (split-list support)
153 template <typename... Options>
154 struct rebind_traits {
155 typedef IterableList<
158 , typename cds::opt::make_options< traits, Options...>::type
163 template <typename Stat>
164 using select_stat_wrapper = iterable_list::select_stat_wrapper< Stat >;
169 typedef atomics::atomic< node_type* > atomic_node_ptr; ///< Atomic node pointer
170 typedef atomic_node_ptr auxiliary_head; ///< Auxiliary head type (for split-list support)
171 typedef typename node_type::marked_data_ptr marked_data_ptr;
176 item_counter m_ItemCounter; ///< Item counter
177 mutable stat m_Stat; ///< Internal statistics
179 typedef cds::details::Allocator< node_type, node_allocator > cxx_node_allocator;
181 /// Position pointer for item search
183 node_type const* pHead;
184 node_type * pPrev; ///< Previous node
185 node_type * pCur; ///< Current node
187 value_type * pFound; ///< Value of \p pCur->data, valid only if data found
189 typename gc::Guard guard; ///< guard for \p pFound
192 struct insert_position: public position
194 value_type * pPrevVal; ///< Value of \p pPrev->data, can be \p nullptr
195 typename gc::Guard prevGuard; ///< guard for \p pPrevVal
201 template <bool IsConst>
204 friend class IterableList;
208 typename gc::Guard m_Guard; // data guard
212 for ( node_type* p = m_pNode->next.load( memory_model::memory_order_relaxed ); p != m_pNode; p = p->next.load( memory_model::memory_order_relaxed ))
215 if ( m_Guard.protect( p->data, []( marked_data_ptr ptr ) { return ptr.ptr(); }).ptr())
221 explicit iterator_type( node_type* pNode )
224 if ( !m_Guard.protect( pNode->data, []( marked_data_ptr p ) { return p.ptr(); }).ptr())
228 iterator_type( node_type* pNode, value_type* pVal )
232 assert( pVal != nullptr );
233 m_Guard.assign( pVal );
237 value_type* data() const
239 return m_Guard.template get<value_type>();
243 typedef typename cds::details::make_const_type<value_type, IsConst>::pointer value_ptr;
244 typedef typename cds::details::make_const_type<value_type, IsConst>::reference value_ref;
250 iterator_type( iterator_type const& src )
251 : m_pNode( src.m_pNode )
253 m_Guard.copy( src.m_Guard );
256 value_ptr operator ->() const
259 //return m_Guard.template get<value_type>();
262 value_ref operator *() const
264 assert( m_Guard.get_native() != nullptr );
266 //return *m_Guard.template get<value_type>();
270 iterator_type& operator ++()
276 iterator_type& operator = (iterator_type const& src)
278 m_pNode = src.m_pNode;
279 m_Guard.copy( src.m_Guard );
284 bool operator ==(iterator_type<C> const& i ) const
286 return m_pNode == i.m_pNode;
289 bool operator !=(iterator_type<C> const& i ) const
291 return !( *this == i );
297 ///@name Thread-safe forward iterators
301 The forward iterator for iterable list has some features:
302 - it has no post-increment operator
303 - to protect the value, the iterator contains a GC-specific guard.
304 For some GC (like as \p gc::HP), a guard is a limited resource per thread, so an exception (or assertion) "no free guard"
305 may be thrown if the limit of guard count per thread is exceeded.
306 - The iterator cannot be moved across thread boundary since it contains thread-private GC's guard.
307 - Iterator is thread-safe: even if the element the iterator points to is removed, the iterator stays valid because
308 it contains the guard keeping the value from to be recycled.
310 The iterator interface:
314 // Default constructor
318 iterator( iterator const& src );
320 // Dereference operator
321 value_type * operator ->() const;
323 // Dereference operator
324 value_type& operator *() const;
326 // Preincrement operator
327 iterator& operator ++();
329 // Assignment operator
330 iterator& operator = (iterator const& src);
332 // Equality operators
333 bool operator ==(iterator const& i ) const;
334 bool operator !=(iterator const& i ) const;
338 @note For two iterators pointed to the same element the value can be different;
342 assert( &(*it1) == &(*it2));
344 can throw assertion. The point is that the iterator stores the value of element which can be modified later by other thread.
345 The guard inside the iterator prevents recycling that value so the iterator's value remains valid even after changing.
346 Other iterator may observe modified value of the element.
348 typedef iterator_type<false> iterator;
349 /// Const forward iterator
351 For iterator's features and requirements see \ref iterator
353 typedef iterator_type<true> const_iterator;
355 /// Returns a forward iterator addressing the first element in a list
357 For empty list \code begin() == end() \endcode
361 return iterator( &m_Head );
364 /// Returns an iterator that addresses the location succeeding the last element in a list
366 Do not use the value returned by <tt>end</tt> function to access any item.
367 Internally, <tt>end</tt> returning value equals to \p nullptr.
369 The returned value can be used only to control reaching the end of the list.
370 For empty list <tt>begin() == end()</tt>
374 return iterator( &m_Tail );
377 /// Returns a forward const iterator addressing the first element in a list
378 const_iterator cbegin() const
380 return const_iterator( const_cast<node_type*>( &m_Head ));
383 /// Returns a forward const iterator addressing the first element in a list
384 const_iterator begin() const
386 return const_iterator( const_cast<node_type*>( &m_Head ));
389 /// Returns an const iterator that addresses the location succeeding the last element in a list
390 const_iterator end() const
392 return const_iterator( const_cast<node_type*>( &m_Tail ));
395 /// Returns an const iterator that addresses the location succeeding the last element in a list
396 const_iterator cend() const
398 return const_iterator( const_cast<node_type*>( &m_Tail ));
403 /// Default constructor initializes empty list
410 template <typename Stat, typename = std::enable_if<std::is_same<stat, iterable_list::wrapped_stat<Stat>>::value >>
411 explicit IterableList( Stat& st )
418 /// Destroys the list object
426 The function inserts \p val into the list if the list does not contain
427 an item with key equal to \p val.
429 Returns \p true if \p val has been linked to the list, \p false otherwise.
431 bool insert( value_type& val )
433 return insert_at( &m_Head, val );
438 This function is intended for derived non-intrusive containers.
440 The function allows to split new item creating into two part:
441 - create item with key only
442 - insert new item into the list
443 - if inserting is success, calls \p f functor to initialize value-field of \p val.
445 The functor signature is:
447 void func( value_type& val );
449 where \p val is the item inserted. User-defined functor \p f should guarantee that during changing
450 \p val no any other changes could be made on this list's item by concurrent threads.
451 The user-defined functor is called only if the inserting is success.
453 @warning See \ref cds_intrusive_item_creating "insert item troubleshooting"
455 template <typename Func>
456 bool insert( value_type& val, Func f )
458 return insert_at( &m_Head, val, f );
463 The operation performs inserting or changing data with lock-free manner.
465 If the item \p val is not found in the list, then \p val is inserted
466 iff \p bInsert is \p true.
467 Otherwise, the current element is changed to \p val, the element will be retired later
468 by call \p Traits::disposer.
469 The functor \p func is called after inserting or replacing, it signature is:
471 void func( value_type& val, value_type * old );
474 - \p val - argument \p val passed into the \p %update() function
475 - \p old - old value that will be retired. If new item has been inserted then \p old is \p nullptr.
477 Returns std::pair<bool, bool> where \p first is \p true if operation is successful,
478 \p second is \p true if \p val has been added or \p false if the item with that key
481 template <typename Func>
482 std::pair<bool, bool> update( value_type& val, Func func, bool bInsert = true )
484 return update_at( &m_Head, val, func, bInsert );
489 The operation performs inserting or updating data with lock-free manner.
491 If the item \p val is not found in the list, then \p val is inserted
492 iff \p bInsert is \p true.
493 Otherwise, the current element is changed to \p val, the old element will be retired later
494 by call \p Traits::disposer.
496 Returns std::pair<bool, bool> where \p first is \p true if operation is successful,
497 \p second is \p true if \p val has been added or \p false if the item with that key
500 std::pair<bool, bool> upsert( value_type& val, bool bInsert = true )
502 return upsert_at( &m_Head, val, bInsert );
505 /// Unlinks the item \p val from the list
507 The function searches the item \p val in the list and unlinks it from the list
508 if it is found and it is equal to \p val.
510 Difference between \p erase() and \p %unlink(): \p %erase() finds <i>a key</i>
511 and deletes the item found. \p %unlink() finds an item by key and deletes it
512 only if \p val is an item of the list, i.e. the pointer to item found
513 is equal to <tt> &val </tt>.
515 \p disposer specified in \p Traits is called for deleted item.
517 The function returns \p true if success and \p false otherwise.
519 bool unlink( value_type& val )
521 return unlink_at( &m_Head, val );
524 /// Deletes the item from the list
525 /** \anchor cds_intrusive_IterableList_hp_erase_val
526 The function searches an item with key equal to \p key in the list,
527 unlinks it from the list, and returns \p true.
528 If \p key is not found the function return \p false.
530 \p disposer specified in \p Traits is called for deleted item.
532 template <typename Q>
533 bool erase( Q const& key )
535 return erase_at( &m_Head, key, key_comparator());
538 /// Deletes the item from the list using \p pred predicate for searching
540 The function is an analog of \ref cds_intrusive_IterableList_hp_erase_val "erase(Q const&)"
541 but \p pred is used for key comparing.
542 \p Less functor has the interface like \p std::less.
543 \p pred must imply the same element order as the comparator used for building the list.
545 \p disposer specified in \p Traits is called for deleted item.
547 template <typename Q, typename Less>
548 bool erase_with( Q const& key, Less pred )
551 return erase_at( &m_Head, key, cds::opt::details::make_comparator_from_less<Less>());
554 /// Deletes the item from the list
555 /** \anchor cds_intrusive_IterableList_hp_erase_func
556 The function searches an item with key equal to \p key in the list,
557 call \p func functor with item found, unlinks it from the list, and returns \p true.
558 The \p Func interface is
561 void operator()( value_type const& item );
564 If \p key is not found the function return \p false, \p func is not called.
566 \p disposer specified in \p Traits is called for deleted item.
568 template <typename Q, typename Func>
569 bool erase( Q const& key, Func func )
571 return erase_at( &m_Head, key, key_comparator(), func );
574 /// Deletes the item from the list using \p pred predicate for searching
576 The function is an analog of \ref cds_intrusive_IterableList_hp_erase_func "erase(Q const&, Func)"
577 but \p pred is used for key comparing.
578 \p Less functor has the interface like \p std::less.
579 \p pred must imply the same element order as the comparator used for building the list.
581 \p disposer specified in \p Traits is called for deleted item.
583 template <typename Q, typename Less, typename Func>
584 bool erase_with( Q const& key, Less pred, Func f )
587 return erase_at( &m_Head, key, cds::opt::details::make_comparator_from_less<Less>(), f );
590 /// Deletes the item pointed by iterator \p iter
592 Returns \p true if the operation is successful, \p false otherwise.
593 The function can return \p false if the node the iterator points to has already been deleted
596 The function does not invalidate the iterator, it remains valid and can be used for further traversing.
598 bool erase_at( iterator const& iter )
600 assert( iter != end());
602 marked_data_ptr val( iter.data());
603 if ( iter.m_pNode->data.compare_exchange_strong( val, marked_data_ptr(), memory_model::memory_order_acquire, atomics::memory_order_relaxed )) {
605 retire_data( val.ptr());
606 m_Stat.onEraseSuccess();
612 /// Extracts the item from the list with specified \p key
613 /** \anchor cds_intrusive_IterableList_hp_extract
614 The function searches an item with key equal to \p key,
615 unlinks it from the list, and returns it as \p guarded_ptr.
616 If \p key is not found returns an empty guarded pointer.
618 Note the compare functor should accept a parameter of type \p Q that can be not the same as \p value_type.
620 The \ref disposer specified in \p Traits class template parameter is called automatically
621 by garbage collector \p GC when returned \ref guarded_ptr object will be destroyed or released.
622 @note Each \p guarded_ptr object uses the GC's guard that can be limited resource.
626 typedef cds::intrusive::IterableList< cds::gc::HP, foo, my_traits > ord_list;
630 ord_list::guarded_ptr gp( theList.extract( 5 ));
635 // Destructor of gp releases internal HP guard
639 template <typename Q>
640 guarded_ptr extract( Q const& key )
642 return extract_at( &m_Head, key, key_comparator());
645 /// Extracts the item using compare functor \p pred
647 The function is an analog of \ref cds_intrusive_IterableList_hp_extract "extract(Q const&)"
648 but \p pred predicate is used for key comparing.
650 \p Less functor has the semantics like \p std::less but should take arguments of type \ref value_type and \p Q
652 \p pred must imply the same element order as the comparator used for building the list.
654 template <typename Q, typename Less>
655 guarded_ptr extract_with( Q const& key, Less pred )
658 return extract_at( &m_Head, key, cds::opt::details::make_comparator_from_less<Less>());
661 /// Finds \p key in the list
662 /** \anchor cds_intrusive_IterableList_hp_find_func
663 The function searches the item with key equal to \p key and calls the functor \p f for item found.
664 The interface of \p Func functor is:
667 void operator()( value_type& item, Q& key );
670 where \p item is the item found, \p key is the \p %find() function argument.
672 The functor may change non-key fields of \p item. Note that the function is only guarantee
673 that \p item cannot be disposed during functor is executing.
674 The function does not serialize simultaneous access to the \p item. If such access is
675 possible you must provide your own synchronization schema to keep out unsafe item modifications.
677 The function returns \p true if \p val is found, \p false otherwise.
679 template <typename Q, typename Func>
680 bool find( Q& key, Func f ) const
682 return find_at( &m_Head, key, key_comparator(), f );
685 template <typename Q, typename Func>
686 bool find( Q const& key, Func f ) const
688 return find_at( &m_Head, key, key_comparator(), f );
692 /// Finds \p key in the list and returns iterator pointed to the item found
694 If \p key is not found the function returns \p end().
696 template <typename Q>
697 iterator find( Q const& key ) const
699 return find_iterator_at( &m_Head, key, key_comparator());
702 /// Finds the \p key using \p pred predicate for searching
704 The function is an analog of \ref cds_intrusive_IterableList_hp_find_func "find(Q&, Func)"
705 but \p pred is used for key comparing.
706 \p Less functor has the interface like \p std::less.
707 \p pred must imply the same element order as the comparator used for building the list.
709 template <typename Q, typename Less, typename Func>
710 bool find_with( Q& key, Less pred, Func f ) const
713 return find_at( &m_Head, key, cds::opt::details::make_comparator_from_less<Less>(), f );
716 template <typename Q, typename Less, typename Func>
717 bool find_with( Q const& key, Less pred, Func f ) const
720 return find_at( &m_Head, key, cds::opt::details::make_comparator_from_less<Less>(), f );
724 /// Finds \p key in the list using \p pred predicate for searching and returns iterator pointed to the item found
726 The function is an analog of \p find(Q&) but \p pred is used for key comparing.
727 \p Less functor has the interface like \p std::less.
728 \p pred must imply the same element order as the comparator used for building the list.
730 If \p key is not found the function returns \p end().
732 template <typename Q, typename Less>
733 iterator find_with( Q const& key, Less pred ) const
736 return find_iterator_at( &m_Head, key, cds::opt::details::make_comparator_from_less<Less>());
739 /// Checks whether the list contains \p key
741 The function searches the item with key equal to \p key
742 and returns \p true if it is found, and \p false otherwise.
744 template <typename Q>
745 bool contains( Q const& key ) const
747 return find_at( &m_Head, key, key_comparator());
750 /// Checks whether the list contains \p key using \p pred predicate for searching
752 The function is an analog of <tt>contains( key )</tt> but \p pred is used for key comparing.
753 \p Less functor has the interface like \p std::less.
754 \p Less must imply the same element order as the comparator used for building the list.
756 template <typename Q, typename Less>
757 bool contains( Q const& key, Less pred ) const
760 return find_at( &m_Head, key, cds::opt::details::make_comparator_from_less<Less>());
763 /// Finds the \p key and return the item found
764 /** \anchor cds_intrusive_IterableList_hp_get
765 The function searches the item with key equal to \p key
766 and returns it as \p guarded_ptr.
767 If \p key is not found the function returns an empty guarded pointer.
769 The \ref disposer specified in \p Traits class template parameter is called
770 by garbage collector \p GC automatically when returned \ref guarded_ptr object
771 will be destroyed or released.
772 @note Each \p guarded_ptr object uses one GC's guard which can be limited resource.
776 typedef cds::intrusive::IterableList< cds::gc::HP, foo, my_traits > ord_list;
780 ord_list::guarded_ptr gp(theList.get( 5 ));
785 // Destructor of guarded_ptr releases internal HP guard
789 Note the compare functor specified for \p Traits template parameter
790 should accept a parameter of type \p Q that can be not the same as \p value_type.
792 template <typename Q>
793 guarded_ptr get( Q const& key ) const
795 return get_at( &m_Head, key, key_comparator());
798 /// Finds the \p key and return the item found
800 The function is an analog of \ref cds_intrusive_IterableList_hp_get "get( Q const&)"
801 but \p pred is used for comparing the keys.
803 \p Less functor has the semantics like \p std::less but should take arguments of type \ref value_type and \p Q
805 \p pred must imply the same element order as the comparator used for building the list.
807 template <typename Q, typename Less>
808 guarded_ptr get_with( Q const& key, Less pred ) const
811 return get_at( &m_Head, key, cds::opt::details::make_comparator_from_less<Less>());
814 /// Clears the list (thread safe, not atomic)
819 for ( pos.pCur = m_Head.next.load( memory_model::memory_order_relaxed ); pos.pCur != pos.pPrev; pos.pCur = pos.pCur->next.load( memory_model::memory_order_relaxed )) {
821 pos.pFound = pos.guard.protect( pos.pCur->data, []( marked_data_ptr p ) { return p.ptr(); }).ptr();
824 if ( cds_likely( unlink_data( pos ))) {
829 pos.pPrev = pos.pCur;
833 /// Checks if the list is empty
835 Emptiness is checked by item counting: if item count is zero then the set is empty.
836 Thus, if you need to use \p %empty() you should provide appropriate (non-empty) \p iterable_list::traits::item_counter
844 /// Returns list's item count
846 The value returned depends on item counter provided by \p iterable_list::traits::item_counter. For \p atomicity::empty_item_counter,
847 this function always returns 0.
851 return m_ItemCounter.value();
854 /// Returns const reference to internal statistics
855 stat const& statistics() const
863 // split-list support
864 bool insert_aux_node( node_type * pNode )
866 return insert_aux_node( &m_Head, pNode );
869 // split-list support
870 bool insert_aux_node( node_type* pHead, node_type * pNode )
872 assert( pNode != nullptr );
873 assert( pNode->data.load( memory_model::memory_order_relaxed ) != nullptr );
878 if ( inserting_search( pHead, *pNode->data.load(memory_model::memory_order_relaxed).ptr(), pos, key_comparator())) {
879 m_Stat.onInsertFailed();
883 if ( link_aux_node( pNode, pos, pHead )) {
885 m_Stat.onInsertSuccess();
889 m_Stat.onInsertRetry();
893 bool insert_at( node_type* pHead, value_type& val )
898 if ( inserting_search( pHead, val, pos, key_comparator())) {
899 m_Stat.onInsertFailed();
903 if ( link_data( &val, pos, pHead )) {
905 m_Stat.onInsertSuccess();
909 m_Stat.onInsertRetry();
913 template <typename Func>
914 bool insert_at( node_type* pHead, value_type& val, Func f )
918 typename gc::Guard guard;
919 guard.assign( &val );
922 if ( inserting_search( pHead, val, pos, key_comparator())) {
923 m_Stat.onInsertFailed();
927 if ( link_data( &val, pos, pHead )) {
930 m_Stat.onInsertSuccess();
934 m_Stat.onInsertRetry();
938 template <typename Func>
939 std::pair<bool, bool> update_at( node_type* pHead, value_type& val, Func func, bool bInsert )
943 typename gc::Guard guard;
944 guard.assign( &val );
947 if ( inserting_search( pHead, val, pos, key_comparator())) {
948 // try to replace pCur->data with val
949 assert( pos.pFound != nullptr );
950 assert( key_comparator()(*pos.pFound, val) == 0 );
952 marked_data_ptr pFound( pos.pFound );
953 if ( cds_likely( pos.pCur->data.compare_exchange_strong( pFound, marked_data_ptr( &val ),
954 memory_model::memory_order_release, atomics::memory_order_relaxed )))
956 if ( pos.pFound != &val ) {
957 retire_data( pos.pFound );
958 func( val, pos.pFound );
960 m_Stat.onUpdateExisting();
961 return std::make_pair( true, false );
966 m_Stat.onUpdateFailed();
967 return std::make_pair( false, false );
970 if ( link_data( &val, pos, pHead )) {
971 func( val, static_cast<value_type*>( nullptr ));
973 m_Stat.onUpdateNew();
974 return std::make_pair( true, true );
978 m_Stat.onUpdateRetry();
982 std::pair<bool, bool> upsert_at( node_type* pHead, value_type& val, bool bInsert )
984 return update_at( pHead, val, []( value_type&, value_type* ) {}, bInsert );
987 bool unlink_at( node_type* pHead, value_type& val )
992 while ( search( pHead, val, pos, key_comparator())) {
993 if ( pos.pFound == &val ) {
994 if ( unlink_data( pos )) {
996 m_Stat.onEraseSuccess();
1005 m_Stat.onEraseRetry();
1008 m_Stat.onEraseFailed();
1012 template <typename Q, typename Compare, typename Func>
1013 bool erase_at( node_type* pHead, Q const& val, Compare cmp, Func f, position& pos )
1016 while ( search( pHead, val, pos, cmp )) {
1017 if ( unlink_data( pos )) {
1020 m_Stat.onEraseSuccess();
1026 m_Stat.onEraseRetry();
1029 m_Stat.onEraseFailed();
1033 template <typename Q, typename Compare, typename Func>
1034 bool erase_at( node_type* pHead, Q const& val, Compare cmp, Func f )
1037 return erase_at( pHead, val, cmp, f, pos );
1040 template <typename Q, typename Compare>
1041 bool erase_at( node_type* pHead, Q const& val, Compare cmp )
1044 return erase_at( pHead, val, cmp, [](value_type const&){}, pos );
1047 template <typename Q, typename Compare>
1048 guarded_ptr extract_at( node_type* pHead, Q const& val, Compare cmp )
1052 while ( search( pHead, val, pos, cmp )) {
1053 if ( unlink_data( pos )) {
1055 m_Stat.onEraseSuccess();
1056 assert( pos.pFound != nullptr );
1057 return guarded_ptr( std::move( pos.guard ));
1062 m_Stat.onEraseRetry();
1065 m_Stat.onEraseFailed();
1066 return guarded_ptr();
1069 template <typename Q, typename Compare>
1070 bool find_at( node_type const* pHead, Q const& val, Compare cmp ) const
1073 if ( search( pHead, val, pos, cmp )) {
1074 m_Stat.onFindSuccess();
1078 m_Stat.onFindFailed();
1082 template <typename Q, typename Compare, typename Func>
1083 bool find_at( node_type const* pHead, Q& val, Compare cmp, Func f ) const
1086 if ( search( pHead, val, pos, cmp )) {
1087 assert( pos.pFound != nullptr );
1088 f( *pos.pFound, val );
1089 m_Stat.onFindSuccess();
1093 m_Stat.onFindFailed();
1097 template <typename Q, typename Compare>
1098 iterator find_iterator_at( node_type const* pHead, Q const& val, Compare cmp ) const
1101 if ( search( pHead, val, pos, cmp )) {
1102 assert( pos.pCur != nullptr );
1103 assert( pos.pFound != nullptr );
1104 m_Stat.onFindSuccess();
1105 return iterator( pos.pCur, pos.pFound );
1108 m_Stat.onFindFailed();
1109 return iterator( const_cast<node_type*>( &m_Tail ));
1112 template <typename Q, typename Compare>
1113 guarded_ptr get_at( node_type const* pHead, Q const& val, Compare cmp ) const
1116 if ( search( pHead, val, pos, cmp )) {
1117 m_Stat.onFindSuccess();
1118 return guarded_ptr( std::move( pos.guard ));
1121 m_Stat.onFindFailed();
1122 return guarded_ptr();
1130 node_type const* head() const
1138 template <typename Q, typename Compare >
1139 bool search( node_type const* pHead, Q const& val, position& pos, Compare cmp ) const
1142 node_type* pPrev = const_cast<node_type*>( pHead );
1145 node_type * pCur = pPrev->next.load( memory_model::memory_order_relaxed );
1147 if ( pCur == pCur->next.load( memory_model::memory_order_acquire )) {
1151 pos.pFound = nullptr;
1155 value_type * pVal = pos.guard.protect( pCur->data,
1156 []( marked_data_ptr p ) -> value_type*
1162 int const nCmp = cmp( *pVal, val );
1175 template <typename Q, typename Compare >
1176 bool inserting_search( node_type const* pHead, Q const& val, insert_position& pos, Compare cmp ) const
1179 node_type* pPrev = const_cast<node_type*>(pHead);
1180 value_type* pPrevVal = pPrev->data.load( memory_model::memory_order_relaxed ).ptr();
1183 node_type * pCur = pPrev->next.load( memory_model::memory_order_relaxed );
1185 if ( pCur == pCur->next.load( memory_model::memory_order_acquire )) {
1189 pos.pFound = nullptr;
1190 pos.pPrevVal = pPrevVal;
1194 value_type * pVal = pos.guard.protect( pCur->data,
1195 []( marked_data_ptr p ) -> value_type*
1201 int const nCmp = cmp( *pVal, val );
1206 pos.pPrevVal = pPrevVal;
1213 pos.prevGuard.copy( pos.guard );
1217 // split-list support
1218 template <typename Predicate>
1219 void destroy( Predicate pred )
1221 node_type * pNode = m_Head.next.load( memory_model::memory_order_relaxed );
1222 while ( pNode != pNode->next.load( memory_model::memory_order_relaxed )) {
1223 value_type * pVal = pNode->data.load( memory_model::memory_order_relaxed ).ptr();
1224 node_type * pNext = pNode->next.load( memory_model::memory_order_relaxed );
1225 bool const is_regular_node = !pVal || pred( pVal );
1226 if ( is_regular_node ) {
1228 retire_data( pVal );
1229 delete_node( pNode );
1234 m_Head.next.store( &m_Tail, memory_model::memory_order_relaxed );
1242 m_Head.next.store( &m_Tail, memory_model::memory_order_relaxed );
1243 // end-of-list mark: node.next == node
1244 m_Tail.next.store( &m_Tail, memory_model::memory_order_release );
1247 node_type * alloc_node( value_type * pVal )
1249 m_Stat.onNodeCreated();
1250 return cxx_node_allocator().New( pVal );
1253 void delete_node( node_type * pNode )
1255 m_Stat.onNodeRemoved();
1256 cxx_node_allocator().Delete( pNode );
1259 static void retire_data( value_type * pVal )
1261 assert( pVal != nullptr );
1262 gc::template retire<disposer>( pVal );
1267 node_type * pNode = m_Head.next.load( memory_model::memory_order_relaxed );
1268 while ( pNode != pNode->next.load( memory_model::memory_order_relaxed )) {
1269 value_type * pVal = pNode->data.load( memory_model::memory_order_relaxed ).ptr();
1271 retire_data( pVal );
1272 node_type * pNext = pNode->next.load( memory_model::memory_order_relaxed );
1273 delete_node( pNode );
1278 bool link_data( value_type* pVal, insert_position& pos, node_type* pHead )
1280 assert( pos.pPrev != nullptr );
1281 assert( pos.pCur != nullptr );
1283 // We need pos.pCur data should be unchanged, otherwise ordering violation can be possible
1284 // if current thread will be preempted and another thread will delete pos.pCur data
1285 // and then set it to another.
1286 // To prevent this we mark pos.pCur data as undeletable by setting LSB
1287 marked_data_ptr valCur( pos.pFound );
1288 if ( !pos.pCur->data.compare_exchange_strong( valCur, valCur | 1, memory_model::memory_order_acquire, atomics::memory_order_relaxed )) {
1289 // oops, pos.pCur data has been changed or another thread is setting pos.pPrev data
1290 m_Stat.onNodeMarkFailed();
1294 marked_data_ptr valPrev( pos.pPrevVal );
1295 if ( !pos.pPrev->data.compare_exchange_strong( valPrev, valPrev | 1, memory_model::memory_order_acquire, atomics::memory_order_relaxed )) {
1296 pos.pCur->data.store( valCur, memory_model::memory_order_relaxed );
1298 m_Stat.onNodeMarkFailed();
1302 // checks if link pPrev -> pCur is broken
1303 if ( pos.pPrev->next.load( memory_model::memory_order_acquire ) != pos.pCur ) {
1304 // sequence pPrev - pCur is broken
1305 pos.pPrev->data.store( valPrev, memory_model::memory_order_relaxed );
1306 pos.pCur->data.store( valCur, memory_model::memory_order_relaxed );
1308 m_Stat.onNodeSeqBreak();
1312 if ( pos.pPrevVal == nullptr ) {
1313 // Check ABA-problem for prev
1314 // There is a possibility that the current thread was preempted
1315 // on entry of this function. Other threads can link data to prev
1316 // and then remove it. As a result, the order of items may be changed
1317 if ( find_prev( pHead, *pVal ) != pos.pPrev ) {
1318 pos.pPrev->data.store( valPrev, memory_model::memory_order_relaxed );
1319 pos.pCur->data.store( valCur, memory_model::memory_order_relaxed );
1321 m_Stat.onNullPrevABA();
1326 if ( pos.pPrev != pos.pHead && pos.pPrevVal == nullptr ) {
1329 // Set pos.pPrev data if it is null
1331 bool result = pos.pPrev->data.compare_exchange_strong( valPrev, marked_data_ptr( pVal ),
1332 memory_model::memory_order_release, atomics::memory_order_relaxed );
1334 // Clears data marks
1335 pos.pCur->data.store( valCur, memory_model::memory_order_relaxed );
1338 m_Stat.onReuseNode();
1343 // insert new node between pos.pPrev and pos.pCur
1344 node_type * pNode = alloc_node( pVal );
1345 pNode->next.store( pos.pCur, memory_model::memory_order_relaxed );
1347 bool result = pos.pPrev->next.compare_exchange_strong( pos.pCur, pNode, memory_model::memory_order_release, atomics::memory_order_relaxed );
1349 // Clears data marks
1350 pos.pPrev->data.store( valPrev, memory_model::memory_order_relaxed );
1351 pos.pCur->data.store( valCur, memory_model::memory_order_relaxed );
1354 m_Stat.onNewNodeCreated();
1358 delete_node( pNode );
1364 // split-list support
1365 bool link_aux_node( node_type * pNode, insert_position& pos, node_type* pHead )
1367 assert( pos.pPrev != nullptr );
1368 assert( pos.pCur != nullptr );
1370 // We need pos.pCur data should be unchanged, otherwise ordering violation can be possible
1371 // if current thread will be preempted and another thread will delete pos.pCur data
1372 // and then set it to another.
1373 // To prevent this we mark pos.pCur data as undeletable by setting LSB
1374 marked_data_ptr valCur( pos.pFound );
1375 if ( !pos.pCur->data.compare_exchange_strong( valCur, valCur | 1, memory_model::memory_order_acquire, atomics::memory_order_relaxed )) {
1376 // oops, pos.pCur data has been changed or another thread is setting pos.pPrev data
1377 m_Stat.onNodeMarkFailed();
1381 marked_data_ptr valPrev( pos.pPrevVal );
1382 if ( !pos.pPrev->data.compare_exchange_strong( valPrev, valPrev | 1, memory_model::memory_order_acquire, atomics::memory_order_relaxed )) {
1383 pos.pCur->data.store( valCur, memory_model::memory_order_relaxed );
1384 m_Stat.onNodeMarkFailed();
1388 // checks if link pPrev -> pCur is broken
1389 if ( pos.pPrev->next.load( memory_model::memory_order_acquire ) != pos.pCur ) {
1390 // sequence pPrev - pCur is broken
1391 pos.pPrev->data.store( valPrev, memory_model::memory_order_relaxed );
1392 pos.pCur->data.store( valCur, memory_model::memory_order_relaxed );
1393 m_Stat.onNodeSeqBreak();
1397 if ( pos.pPrevVal == nullptr ) {
1398 // Check ABA-problem for prev
1399 // There is a possibility that the current thread was preempted
1400 // on entry of this function. Other threads can insert (link) an item to prev
1401 // and then remove it. As a result, the order of items may be changed
1402 if ( find_prev( pHead, *pNode->data.load( memory_model::memory_order_relaxed ).ptr()) != pos.pPrev ) {
1403 pos.pPrev->data.store( valPrev, memory_model::memory_order_relaxed );
1404 pos.pCur->data.store( valCur, memory_model::memory_order_relaxed );
1406 m_Stat.onNullPrevABA();
1411 // insert new node between pos.pPrev and pos.pCur
1412 pNode->next.store( pos.pCur, memory_model::memory_order_relaxed );
1414 bool result = pos.pPrev->next.compare_exchange_strong( pos.pCur, pNode, memory_model::memory_order_release, atomics::memory_order_relaxed );
1416 // Clears data marks
1417 pos.pPrev->data.store( valPrev, memory_model::memory_order_relaxed );
1418 pos.pCur->data.store( valCur, memory_model::memory_order_relaxed );
1423 static bool unlink_data( position& pos )
1425 assert( pos.pCur != nullptr );
1426 assert( pos.pFound != nullptr );
1428 marked_data_ptr val( pos.pFound );
1429 if ( pos.pCur->data.compare_exchange_strong( val, marked_data_ptr(), memory_model::memory_order_acquire, atomics::memory_order_relaxed )) {
1430 retire_data( pos.pFound );
1436 template <typename Q>
1437 node_type* find_prev( node_type const* pHead, Q const& val ) const
1439 node_type* pPrev = const_cast<node_type*>(pHead);
1440 typename gc::Guard guard;
1444 node_type * pCur = pPrev->next.load( memory_model::memory_order_relaxed );
1446 if ( pCur == pCur->next.load( memory_model::memory_order_acquire )) {
1451 value_type * pVal = guard.protect( pCur->data,
1452 []( marked_data_ptr p ) -> value_type*
1457 if ( pVal && cmp( *pVal, val ) >= 0 )
1465 }} // namespace cds::intrusive
1467 #endif // #ifndef CDSLIB_INTRUSIVE_IMPL_ITERABLE_LIST_H