issue #76: added cds::atomicity::cache_friendly_item_counter to avoid false sharing
[libcds.git] / cds / intrusive / skip_list_rcu.h
index 0339461933f02c7d53c8989e1877be2b12c2ab27..b153aa809558155d17c1f1d7c51c2a7a3b95475a 100644 (file)
@@ -1,7 +1,7 @@
 /*
     This file is a part of libcds - Concurrent Data Structures library
 
-    (C) Copyright Maxim Khizhinsky (libcds.dev@gmail.com) 2006-2016
+    (C) Copyright Maxim Khizhinsky (libcds.dev@gmail.com) 2006-2017
 
     Source code repo: http://github.com/khizmax/libcds/
     Download: http://sourceforge.net/projects/libcds/files/
@@ -71,13 +71,14 @@ namespace cds { namespace intrusive {
 
         public:
             /// Constructs a node of height 1 (a bottom-list node)
-            CDS_CONSTEXPR node()
+            node()
                 : m_pNext( nullptr )
                 , m_pDelChain( nullptr )
                 , m_nHeight(1)
                 , m_arrNext( nullptr )
-                , m_nUnlink(1)
-            {}
+            {
+                m_nUnlink.store( 1, atomics::memory_order_release );
+            }
 
             /// Constructs a node of height \p nHeight
             void make_tower( unsigned int nHeight, atomic_marked_ptr * nextTower )
@@ -117,15 +118,7 @@ namespace cds { namespace intrusive {
                 assert( nLevel < height());
                 assert( nLevel == 0 || (nLevel > 0 && m_arrNext != nullptr));
 
-#           ifdef CDS_THREAD_SANITIZER_ENABLED
-                // TSan false positive: m_arrNext is read-only array
-                CDS_TSAN_ANNOTATE_IGNORE_READS_BEGIN;
-                atomic_marked_ptr& r = nLevel ? m_arrNext[ nLevel - 1] : m_pNext;
-                CDS_TSAN_ANNOTATE_IGNORE_READS_END;
-                return r;
-#           else
                 return nLevel ? m_arrNext[ nLevel - 1] : m_pNext;
-#           endif
             }
 
             /// Access to element of next pointer array (const version)
@@ -134,15 +127,7 @@ namespace cds { namespace intrusive {
                 assert( nLevel < height());
                 assert( nLevel == 0 || nLevel > 0 && m_arrNext != nullptr );
 
-#           ifdef CDS_THREAD_SANITIZER_ENABLED
-                // TSan false positive: m_arrNext is read-only array
-                CDS_TSAN_ANNOTATE_IGNORE_READS_BEGIN;
-                atomic_marked_ptr& r = nLevel ? m_arrNext[ nLevel - 1] : m_pNext;
-                CDS_TSAN_ANNOTATE_IGNORE_READS_END;
-                return r;
-#           else
                 return nLevel ? m_arrNext[ nLevel - 1] : m_pNext;
-#           endif
             }
 
             /// Access to element of next pointer array (same as \ref next function)
@@ -647,10 +632,10 @@ namespace cds { namespace intrusive {
     protected:
         skip_list::details::head_node< node_type > m_Head;   ///< head tower (max height)
 
-        item_counter                m_ItemCounter;      ///< item counter
         random_level_generator      m_RandomLevelGen;   ///< random level generator instance
         atomics::atomic<unsigned int>    m_nHeight;     ///< estimated high level
         atomics::atomic<node_type *>     m_pDeferredDelChain ;   ///< Deferred deleted node chain
+        item_counter                m_ItemCounter;      ///< item counter
         mutable stat                m_Stat;             ///< internal statistics
 
     protected:
@@ -1408,17 +1393,17 @@ namespace cds { namespace intrusive {
 
         void help_remove( int nLevel, node_type* pPred, marked_node_ptr pCur, marked_node_ptr pSucc, position& pos )
         {
-            marked_node_ptr p( pCur.ptr() );
+            marked_node_ptr p( pCur.ptr());
 
             if ( pCur->is_upper_level( nLevel )
                 && pPred->next( nLevel ).compare_exchange_strong( p, marked_node_ptr( pSucc.ptr()),
                     memory_model::memory_order_release, atomics::memory_order_relaxed ))
             {
                 if ( pCur->level_unlinked()) {
-                    if ( !is_extracted( pSucc ) ) {
+                    if ( !is_extracted( pSucc )) {
                         // We cannot free the node at this moment because RCU is locked
                         // Link deleted nodes to a chain to free later
-                        pos.dispose( pCur.ptr() );
+                        pos.dispose( pCur.ptr());
                         m_Stat.onEraseWhileFind();
                     }
                     else
@@ -1430,7 +1415,7 @@ namespace cds { namespace intrusive {
         template <typename Q, typename Compare >
         bool find_position( Q const& val, position& pos, Compare cmp, bool bStopIfFound )
         {
-            assert( gc::is_locked() );
+            assert( gc::is_locked());
 
             node_type * pPred;
             marked_node_ptr pSucc;
@@ -1444,7 +1429,7 @@ namespace cds { namespace intrusive {
 
                 while ( true ) {
                     pCur = pPred->next( nLevel ).load( memory_model::memory_order_acquire );
-                    if ( pCur.bits() ) {
+                    if ( pCur.bits()) {
                         // pCur.bits() means that pPred is logically deleted
                         goto retry;
                     }
@@ -1457,16 +1442,16 @@ namespace cds { namespace intrusive {
                     // pSucc contains deletion mark for pCur
                     pSucc = pCur->next( nLevel ).load( memory_model::memory_order_acquire );
 
-                    if ( pPred->next( nLevel ).load( memory_model::memory_order_acquire ).all() != pCur.ptr() )
+                    if ( pPred->next( nLevel ).load( memory_model::memory_order_acquire ).all() != pCur.ptr())
                         goto retry;
 
-                    if ( pSucc.bits() ) {
+                    if ( pSucc.bits()) {
                         // pCur is marked, i.e. logically deleted.
                         help_remove( nLevel, pPred, pCur, pSucc, pos );
                         goto retry;
                     }
                     else {
-                        nCmp = cmp( *node_traits::to_value_ptr( pCur.ptr() ), val );
+                        nCmp = cmp( *node_traits::to_value_ptr( pCur.ptr()), val );
                         if ( nCmp < 0 )
                             pPred = pCur.ptr();
                         else if ( nCmp == 0 && bStopIfFound )
@@ -1491,7 +1476,7 @@ namespace cds { namespace intrusive {
 
         bool find_min_position( position& pos )
         {
-            assert( gc::is_locked() );
+            assert( gc::is_locked());
 
             node_type * pPred;
             marked_node_ptr pSucc;
@@ -1507,15 +1492,15 @@ namespace cds { namespace intrusive {
                 // head cannot be deleted
                 assert( pCur.bits() == 0 );
 
-                if ( pCur.ptr() ) {
+                if ( pCur.ptr()) {
 
                     // pSucc contains deletion mark for pCur
                     pSucc = pCur->next( nLevel ).load( memory_model::memory_order_acquire );
 
-                    if ( pPred->next( nLevel ).load( memory_model::memory_order_acquire ).all() != pCur.ptr() )
+                    if ( pPred->next( nLevel ).load( memory_model::memory_order_acquire ).all() != pCur.ptr())
                         goto retry;
 
-                    if ( pSucc.bits() ) {
+                    if ( pSucc.bits()) {
                         // pCur is marked, i.e. logically deleted.
                         help_remove( nLevel, pPred, pCur, pSucc, pos );
                         goto retry;
@@ -1526,12 +1511,12 @@ namespace cds { namespace intrusive {
                 pos.pPrev[nLevel] = pPred;
                 pos.pSucc[nLevel] = pCur.ptr();
             }
-            return ( pos.pCur = pCur.ptr() ) != nullptr;
+            return ( pos.pCur = pCur.ptr()) != nullptr;
         }
 
         bool find_max_position( position& pos )
         {
-            assert( gc::is_locked() );
+            assert( gc::is_locked());
 
             node_type * pPred;
             marked_node_ptr pSucc;
@@ -1544,7 +1529,7 @@ namespace cds { namespace intrusive {
 
                 while ( true ) {
                     pCur = pPred->next( nLevel ).load( memory_model::memory_order_acquire );
-                    if ( pCur.bits() ) {
+                    if ( pCur.bits()) {
                         // pCur.bits() means that pPred is logically deleted
                         goto retry;
                     }
@@ -1557,16 +1542,16 @@ namespace cds { namespace intrusive {
                     // pSucc contains deletion mark for pCur
                     pSucc = pCur->next( nLevel ).load( memory_model::memory_order_acquire );
 
-                    if ( pPred->next( nLevel ).load( memory_model::memory_order_acquire ).all() != pCur.ptr() )
+                    if ( pPred->next( nLevel ).load( memory_model::memory_order_acquire ).all() != pCur.ptr())
                         goto retry;
 
-                    if ( pSucc.bits() ) {
+                    if ( pSucc.bits()) {
                         // pCur is marked, i.e. logically deleted.
                         help_remove( nLevel, pPred, pCur, pSucc, pos );
                         goto retry;
                     }
                     else {
-                        if ( !pSucc.ptr() )
+                        if ( !pSucc.ptr())
                             break;
 
                         pPred = pCur.ptr();
@@ -1578,13 +1563,74 @@ namespace cds { namespace intrusive {
                 pos.pSucc[nLevel] = pCur.ptr();
             }
 
-            return ( pos.pCur = pCur.ptr() ) != nullptr;
+            return ( pos.pCur = pCur.ptr()) != nullptr;
+        }
+
+        bool renew_insert_position( value_type& val, node_type * pNode, position& pos )
+        {
+            assert( gc::is_locked());
+
+            node_type * pPred;
+            marked_node_ptr pSucc;
+            marked_node_ptr pCur;
+            key_comparator cmp;
+            int nCmp = 1;
+
+        retry:
+            pPred = m_Head.head();
+
+            for ( int nLevel = static_cast<int>( c_nMaxHeight - 1 ); nLevel >= 0; --nLevel ) {
+
+                while ( true ) {
+                    pCur = pPred->next( nLevel ).load( memory_model::memory_order_acquire );
+                    if ( pCur.bits()) {
+                        // pCur.bits() means that pPred is logically deleted
+                        goto retry;
+                    }
+
+                    if ( pCur.ptr() == nullptr ) {
+                        // end of the list at level nLevel - goto next level
+                        break;
+                    }
+
+                    // pSucc contains deletion mark for pCur
+                    pSucc = pCur->next( nLevel ).load( memory_model::memory_order_acquire );
+
+                    if ( pPred->next( nLevel ).load( memory_model::memory_order_acquire ).all() != pCur.ptr())
+                        goto retry;
+
+                    if ( pSucc.bits()) {
+                        // pCur is marked, i.e. logically deleted.
+                        if ( pCur.ptr() == pNode ) {
+                            // Node is removing while we are inserting it
+                            return false;
+                        }
+
+                        // try to help deleting pCur
+                        help_remove( nLevel, pPred, pCur, pSucc, pos );
+                        goto retry;
+                    }
+                    else {
+                        nCmp = cmp( *node_traits::to_value_ptr( pCur.ptr()), val );
+                        if ( nCmp < 0 )
+                            pPred = pCur.ptr();
+                        else
+                            break;
+                    }
+                }
+
+                // Next level
+                pos.pPrev[nLevel] = pPred;
+                pos.pSucc[nLevel] = pCur.ptr();
+            }
+
+            return nCmp == 0;
         }
 
         template <typename Func>
         bool insert_at_position( value_type& val, node_type * pNode, position& pos, Func f )
         {
-            assert( gc::is_locked() );
+            assert( gc::is_locked());
 
             unsigned int const nHeight = pNode->height();
             pNode->clear_tower();
@@ -1608,13 +1654,19 @@ namespace cds { namespace intrusive {
                     // Set pNode->next
                     // pNode->next must be null but can have a "logical deleted" flag if another thread is removing pNode right now
                     if ( !pNode->next( nLevel ).compare_exchange_strong( p, pSucc,
-                        memory_model::memory_order_acq_rel, atomics::memory_order_acquire ) )
+                        memory_model::memory_order_acq_rel, atomics::memory_order_acquire ))
                     {
                         // pNode has been marked as removed while we are inserting it
                         // Stop inserting
                         assert( p.bits() != 0 );
-                        if ( pNode->level_unlinked( nHeight - nLevel ))
-                            pos.dispose( pNode );
+
+                        // Here pNode is linked at least level 0 so level_unlinked() cannot returns true
+                        CDS_VERIFY_FALSE( pNode->level_unlinked( nHeight - nLevel ));
+
+                        // pNode is linked up to nLevel - 1
+                        // Remove it via find_position()
+                        find_position( val, pos, key_comparator(), false );
+
                         m_Stat.onLogicDeleteWhileInsert();
                         return true;
                     }
@@ -1630,9 +1682,16 @@ namespace cds { namespace intrusive {
 
                     // Renew insert position
                     m_Stat.onRenewInsertPosition();
-                    if ( !find_position( val, pos, key_comparator(), false ) ) {
+
+                    if ( !renew_insert_position( val, pNode, pos )) {
                         // The node has been deleted while we are inserting it
-                        m_Stat.onNotFoundWhileInsert();
+                        // Update current height for concurent removing
+                        CDS_VERIFY_FALSE( pNode->level_unlinked( nHeight - nLevel ));
+
+                        m_Stat.onRemoveWhileInsert();
+
+                        // help to removing val
+                        find_position( val, pos, key_comparator(), false );
                         return true;
                     }
                 }
@@ -1644,7 +1703,7 @@ namespace cds { namespace intrusive {
         bool try_remove_at( node_type * pDel, position& pos, Func f, bool bExtract )
         {
             assert( pDel != nullptr );
-            assert( gc::is_locked() );
+            assert( gc::is_locked());
 
             marked_node_ptr pSucc;
             back_off bkoff;
@@ -1669,26 +1728,31 @@ namespace cds { namespace intrusive {
                 }
             }
 
-            marked_node_ptr p( pDel->next( 0 ).load( memory_model::memory_order_relaxed ).ptr() );
+            marked_node_ptr p( pDel->next( 0 ).load( memory_model::memory_order_relaxed ).ptr());
             while ( true ) {
                 if ( pDel->next( 0 ).compare_exchange_strong( p, p | nMask, memory_model::memory_order_release, atomics::memory_order_acquire ))
                 {
-                    f( *node_traits::to_value_ptr( pDel ) );
+                    f( *node_traits::to_value_ptr( pDel ));
 
                     // physical deletion
                     // try fast erase
                     p = pDel;
                     for ( int nLevel = static_cast<int>( pDel->height() - 1 ); nLevel >= 0; --nLevel ) {
 
-                        pSucc = pDel->next( nLevel ).load( memory_model::memory_order_relaxed );
-                        if ( pos.pPrev[nLevel]->next( nLevel ).compare_exchange_strong( p, marked_node_ptr( pSucc.ptr() ),
-                            memory_model::memory_order_acq_rel, atomics::memory_order_acquire ) )
+                        pSucc = pDel->next( nLevel ).load( memory_model::memory_order_acquire );
+                        if ( pos.pPrev[nLevel]->next( nLevel ).compare_exchange_strong( p, marked_node_ptr( pSucc.ptr()),
+                            memory_model::memory_order_acq_rel, atomics::memory_order_relaxed ))
                         {
                             pDel->level_unlinked();
                         }
                         else {
                             // Make slow erase
+#       ifdef CDS_DEBUG
+                            if ( find_position( *node_traits::to_value_ptr( pDel ), pos, key_comparator(), false ))
+                                assert( pDel != pos.pCur );
+#       else
                             find_position( *node_traits::to_value_ptr( pDel ), pos, key_comparator(), false );
+#       endif
                             if ( bExtract )
                                 m_Stat.onSlowExtract();
                             else
@@ -1709,7 +1773,7 @@ namespace cds { namespace intrusive {
                         m_Stat.onFastExtract();
                     return true;
                 }
-                else if ( p.bits() ) {
+                else if ( p.bits()) {
                     // Another thread is deleting pDel right now
                     m_Stat.onEraseContention();
                     return false;
@@ -1742,7 +1806,7 @@ namespace cds { namespace intrusive {
                 pCur = pPred->next( nLevel ).load( memory_model::memory_order_acquire );
 
                 while ( pCur != pNull ) {
-                    if ( pCur.bits() ) {
+                    if ( pCur.bits()) {
                         // pPred is being removed
                         if ( ++attempt < 4 ) {
                             bkoff();
@@ -1752,15 +1816,15 @@ namespace cds { namespace intrusive {
                         return find_fastpath_abort;
                     }
 
-                    if ( pCur.ptr() ) {
-                        int nCmp = cmp( *node_traits::to_value_ptr( pCur.ptr() ), val );
+                    if ( pCur.ptr()) {
+                        int nCmp = cmp( *node_traits::to_value_ptr( pCur.ptr()), val );
                         if ( nCmp < 0 ) {
                             pPred = pCur.ptr();
                             pCur = pCur->next( nLevel ).load( memory_model::memory_order_acquire );
                         }
                         else if ( nCmp == 0 ) {
                             // found
-                            f( *node_traits::to_value_ptr( pCur.ptr() ), val );
+                            f( *node_traits::to_value_ptr( pCur.ptr()), val );
                             return find_fastpath_found;
                         }
                         else // pCur > val - go down
@@ -1775,7 +1839,7 @@ namespace cds { namespace intrusive {
         template <typename Q, typename Compare, typename Func>
         bool find_slowpath( Q& val, Compare cmp, Func f, position& pos )
         {
-            if ( find_position( val, pos, cmp, true ) ) {
+            if ( find_position( val, pos, cmp, true )) {
                 assert( cmp( *node_traits::to_value_ptr( pos.pCur ), val ) == 0 );
 
                 f( *node_traits::to_value_ptr( pos.pCur ), val );
@@ -1800,7 +1864,7 @@ namespace cds { namespace intrusive {
             {
                 rcu_lock l;
 
-                switch ( find_fastpath( val, cmp, f ) ) {
+                switch ( find_fastpath( val, cmp, f )) {
                 case find_fastpath_found:
                     m_Stat.onFindFastSuccess();
                     return true;
@@ -1811,7 +1875,7 @@ namespace cds { namespace intrusive {
                     break;
                 }
 
-                if ( find_slowpath( val, cmp, f, pos ) ) {
+                if ( find_slowpath( val, cmp, f, pos )) {
                     m_Stat.onFindSlowSuccess();
                     bRet = true;
                 }
@@ -1834,7 +1898,7 @@ namespace cds { namespace intrusive {
             {
                 rcu_lock rcuLock;
 
-                if ( !find_position( val, pos, cmp, false ) ) {
+                if ( !find_position( val, pos, cmp, false )) {
                     m_Stat.onEraseFailed();
                     bRet = false;
                 }
@@ -1843,7 +1907,7 @@ namespace cds { namespace intrusive {
                     assert( cmp( *node_traits::to_value_ptr( pDel ), val ) == 0 );
 
                     unsigned int nHeight = pDel->height();
-                    if ( try_remove_at( pDel, pos, f, false ) ) {
+                    if ( try_remove_at( pDel, pos, f, false )) {
                         --m_ItemCounter;
                         m_Stat.onRemoveNode( nHeight );
                         m_Stat.onEraseSuccess();
@@ -1863,11 +1927,11 @@ namespace cds { namespace intrusive {
         value_type * do_extract_key( Q const& key, Compare cmp, position& pos )
         {
             // RCU should be locked!!!
-            assert( gc::is_locked() );
+            assert( gc::is_locked());
 
             node_type * pDel;
 
-            if ( !find_position( key, pos, cmp, false ) ) {
+            if ( !find_position( key, pos, cmp, false )) {
                 m_Stat.onExtractFailed();
                 pDel = nullptr;
             }
@@ -1877,7 +1941,7 @@ namespace cds { namespace intrusive {
 
                 unsigned int const nHeight = pDel->height();
 
-                if ( try_remove_at( pDel, pos, []( value_type const& ) {}, true ) ) {
+                if ( try_remove_at( pDel, pos, []( value_type const& ) {}, true )) {
                     --m_ItemCounter;
                     m_Stat.onRemoveNode( nHeight );
                     m_Stat.onExtractSuccess();
@@ -1922,7 +1986,7 @@ namespace cds { namespace intrusive {
 
         value_type * do_extract_min()
         {
-            assert( !gc::is_locked() );
+            assert( !gc::is_locked());
 
             position pos;
             node_type * pDel;
@@ -1930,7 +1994,7 @@ namespace cds { namespace intrusive {
             {
                 rcu_lock l;
 
-                if ( !find_min_position( pos ) ) {
+                if ( !find_min_position( pos )) {
                     m_Stat.onExtractMinFailed();
                     pDel = nullptr;
                 }
@@ -1938,7 +2002,7 @@ namespace cds { namespace intrusive {
                     pDel = pos.pCur;
                     unsigned int const nHeight = pDel->height();
 
-                    if ( try_remove_at( pDel, pos, []( value_type const& ) {}, true ) ) {
+                    if ( try_remove_at( pDel, pos, []( value_type const& ) {}, true )) {
                         --m_ItemCounter;
                         m_Stat.onRemoveNode( nHeight );
                         m_Stat.onExtractMinSuccess();
@@ -1955,7 +2019,7 @@ namespace cds { namespace intrusive {
 
         value_type * do_extract_max()
         {
-            assert( !gc::is_locked() );
+            assert( !gc::is_locked());
 
             position pos;
             node_type * pDel;
@@ -1963,7 +2027,7 @@ namespace cds { namespace intrusive {
             {
                 rcu_lock l;
 
-                if ( !find_max_position( pos ) ) {
+                if ( !find_max_position( pos )) {
                     m_Stat.onExtractMaxFailed();
                     pDel = nullptr;
                 }
@@ -1971,7 +2035,7 @@ namespace cds { namespace intrusive {
                     pDel = pos.pCur;
                     unsigned int const nHeight = pDel->height();
 
-                    if ( try_remove_at( pDel, pos, []( value_type const& ) {}, true ) ) {
+                    if ( try_remove_at( pDel, pos, []( value_type const& ) {}, true )) {
                         --m_ItemCounter;
                         m_Stat.onRemoveNode( nHeight );
                         m_Stat.onExtractMaxSuccess();
@@ -1998,7 +2062,7 @@ namespace cds { namespace intrusive {
             node_type* p = m_Head.head()->next( 0 ).load( atomics::memory_order_relaxed ).ptr();
             while ( p ) {
                 node_type* pNext = p->next( 0 ).load( atomics::memory_order_relaxed ).ptr();
-                dispose_node( node_traits::to_value_ptr( p ) );
+                dispose_node( node_traits::to_value_ptr( p ));
                 p = pNext;
             }
         }