marked_node_ptr cur(pos.pCur);
pNode->m_pNext.store( cur, memory_model::memory_order_release );
- if ( pos.pPrev->compare_exchange_strong( cur, marked_node_ptr(pNode), memory_model::memory_order_release, atomics::memory_order_relaxed ))
+ if ( cds_likely( pos.pPrev->compare_exchange_strong( cur, marked_node_ptr(pNode), memory_model::memory_order_release, atomics::memory_order_relaxed )))
return true;
pNode->m_pNext.store( marked_node_ptr(), memory_model::memory_order_relaxed );
// Mark the node (logical deleting)
marked_node_ptr next(pos.pNext, 0);
- if ( pos.pCur->m_pNext.compare_exchange_strong( next, marked_node_ptr(pos.pNext, 1), memory_model::memory_order_release, atomics::memory_order_relaxed )) {
+ if ( cds_likely( pos.pCur->m_pNext.compare_exchange_strong( next, marked_node_ptr(pos.pNext, 1), memory_model::memory_order_release, atomics::memory_order_relaxed ))) {
// physical deletion may be performed by search function if it detects that a node is logically deleted (marked)
// CAS may be successful here or in other thread that searching something
marked_node_ptr cur(pos.pCur);
- if ( pos.pPrev->compare_exchange_strong( cur, marked_node_ptr( pos.pNext ), memory_model::memory_order_acquire, atomics::memory_order_relaxed ))
+ if ( cds_likely( pos.pPrev->compare_exchange_strong( cur, marked_node_ptr( pos.pNext ), memory_model::memory_order_acquire, atomics::memory_order_relaxed )))
retire_node( pos.pCur );
return true;
}
do {
pNext = pCur->m_pNext.load(memory_model::memory_order_relaxed);
g.assign( node_traits::to_value_ptr( pNext.ptr()));
- } while ( pNext != pCur->m_pNext.load(memory_model::memory_order_acquire));
+ } while ( cds_unlikely( pNext != pCur->m_pNext.load(memory_model::memory_order_acquire)));
if ( pNext.ptr())
m_pNode = m_Guard.assign( g.template get<value_type>());
m_pNode = nullptr;
m_Guard.clear();
}
- if ( p == pNode.load(memory_model::memory_order_acquire))
+ if ( cds_likely( p == pNode.load(memory_model::memory_order_acquire)))
break;
}
}
head = m_pHead.load(memory_model::memory_order_relaxed);
if ( head.ptr())
guard.assign( node_traits::to_value_ptr( *head.ptr()));
- if ( m_pHead.load(memory_model::memory_order_acquire) == head ) {
+ if ( cds_likely( m_pHead.load(memory_model::memory_order_acquire) == head )) {
if ( head.ptr() == nullptr )
break;
value_type& val = *node_traits::to_value_ptr( *head.ptr());
node_type * pNode = node_traits::to_node_ptr( val );
while ( true ) {
if ( search( refHead, val, pos, key_comparator())) {
- if ( pos.pCur->m_pNext.load(memory_model::memory_order_acquire).bits()) {
+ if ( cds_unlikely( pos.pCur->m_pNext.load(memory_model::memory_order_acquire).bits())) {
back_off()();
continue; // the node found is marked as deleted
}
{
return node_traits::to_value_ptr( p.ptr());
});
- if ( pPrev->load(memory_model::memory_order_acquire).all() != pCur.ptr()) {
+ if ( cds_unlikely( pPrev->load(memory_model::memory_order_acquire).all() != pCur.ptr())) {
bkoff();
goto try_again;
}
if ( pNext.bits() == 1 ) {
// pCur marked i.e. logically deleted. Help the erase/unlink function to unlink pCur node
marked_node_ptr cur( pCur.ptr());
- if ( pPrev->compare_exchange_strong( cur, marked_node_ptr( pNext.ptr()), memory_model::memory_order_acquire, atomics::memory_order_relaxed )) {
+ if ( cds_unlikely( pPrev->compare_exchange_strong( cur, marked_node_ptr( pNext.ptr()), memory_model::memory_order_acquire, atomics::memory_order_relaxed ))) {
retire_node( pCur.ptr());
}
else {
link_checker::is_empty( pNode );
pNode->m_pNext.store( pos.pCur, memory_model::memory_order_relaxed );
- if ( pos.pPrev->compare_exchange_strong( pos.pCur, pNode, memory_model::memory_order_release, atomics::memory_order_relaxed ))
+ if ( cds_likely( pos.pPrev->compare_exchange_strong( pos.pCur, pNode, memory_model::memory_order_release, atomics::memory_order_relaxed )))
return true;
pNode->m_pNext.store( nullptr, memory_model::memory_order_relaxed );
void clear( Disposer disp )
{
node_type * pHead = m_pHead.load(memory_model::memory_order_relaxed);
- do {} while ( !m_pHead.compare_exchange_weak( pHead, nullptr, memory_model::memory_order_relaxed ) );
+ do {} while ( cds_unlikely( !m_pHead.compare_exchange_weak( pHead, nullptr, memory_model::memory_order_relaxed )));
while ( pHead ) {
node_type * p = pHead->m_pNext.load(memory_model::memory_order_relaxed);
}
pNext = pCur->m_pNext.load(memory_model::memory_order_relaxed);
- if ( pCur->m_pNext.load(memory_model::memory_order_acquire) != pNext ) {
+ if ( cds_unlikely( pCur->m_pNext.load(memory_model::memory_order_acquire) != pNext )) {
bkoff();
goto try_again;
}
- if ( pPrev->load(memory_model::memory_order_acquire) != pCur ) {
+ if ( cds_unlikely( pPrev->load(memory_model::memory_order_acquire) != pCur )) {
bkoff();
goto try_again;
}
marked_node_ptr p( pos.pCur );
pNode->m_pNext.store( p, memory_model::memory_order_release );
- if ( pos.pPrev->compare_exchange_strong( p, marked_node_ptr(pNode), memory_model::memory_order_release, atomics::memory_order_relaxed ))
+ if ( cds_likely( pos.pPrev->compare_exchange_strong( p, marked_node_ptr(pNode), memory_model::memory_order_release, atomics::memory_order_relaxed )))
return true;
pNode->m_pNext.store( marked_node_ptr(), memory_model::memory_order_relaxed );
// Mark the node (logical deletion)
marked_node_ptr next(pos.pNext, 0);
- if ( pos.pCur->m_pNext.compare_exchange_strong( next, next | nMask, memory_model::memory_order_release, atomics::memory_order_relaxed )) {
+ if ( cds_likely( pos.pCur->m_pNext.compare_exchange_strong( next, next | nMask, memory_model::memory_order_release, atomics::memory_order_relaxed ))) {
// Try physical removal - fast path
marked_node_ptr cur(pos.pCur);
- if ( pos.pPrev->compare_exchange_strong(cur, marked_node_ptr(pos.pNext), memory_model::memory_order_acquire, atomics::memory_order_relaxed )) {
+ if ( cds_likely( pos.pPrev->compare_exchange_strong(cur, marked_node_ptr(pos.pNext), memory_model::memory_order_acquire, atomics::memory_order_relaxed ))) {
if ( nMask == erase_mask )
link_to_remove_chain( pos, pos.pCur );
}
RCU \p synchronize method can be called.
Note that depending on RCU type used the \ref disposer invocation can be deferred.
- The function can throw \p cds::urcu::rcu_deadlock exception if an deadlock is encountered and
+ The function can throw \p cds::urcu::rcu_deadlock exception if a deadlock is encountered and
deadlock checking policy is \p opt::v::rcu_throw_deadlock.
*/
void clear()
if ( !pHead.ptr() )
break;
marked_node_ptr pNext( pHead->m_pNext.load(memory_model::memory_order_relaxed) );
- if ( !pHead->m_pNext.compare_exchange_weak( pNext, pNext | 1, memory_model::memory_order_acquire, memory_model::memory_order_relaxed ))
+ if ( cds_unlikely( !pHead->m_pNext.compare_exchange_weak( pNext, pNext | 1, memory_model::memory_order_acquire, memory_model::memory_order_relaxed )))
continue;
- if ( !m_pHead.compare_exchange_weak( pHead, marked_node_ptr(pNext.ptr()), memory_model::memory_order_release, memory_model::memory_order_relaxed ))
+ if ( cds_unlikely( !m_pHead.compare_exchange_weak( pHead, marked_node_ptr(pNext.ptr()), memory_model::memory_order_release, memory_model::memory_order_relaxed )))
continue;
}
pNext = pCur->m_pNext.load(memory_model::memory_order_acquire);
- if ( pPrev->load(memory_model::memory_order_acquire) != pCur
- || pNext != pCur->m_pNext.load(memory_model::memory_order_acquire))
+ if ( cds_unlikely( pPrev->load(memory_model::memory_order_acquire) != pCur
+ || pNext != pCur->m_pNext.load(memory_model::memory_order_acquire )))
{
bkoff();
goto try_again;
if ( pNext.bits() ) {
// pCur is marked as deleted. Try to unlink it from the list
- if ( pPrev->compare_exchange_weak( pCur, marked_node_ptr( pNext.ptr() ), memory_model::memory_order_acquire, atomics::memory_order_relaxed ) ) {
+ if ( cds_likely( pPrev->compare_exchange_weak( pCur, marked_node_ptr( pNext.ptr() ), memory_model::memory_order_acquire, atomics::memory_order_relaxed ))) {
if ( pNext.bits() == erase_mask )
link_to_remove_chain( pos, pCur.ptr() );
}