/*
This file is a part of libcds - Concurrent Data Structures library
- (C) Copyright Maxim Khizhinsky (libcds.dev@gmail.com) 2006-2016
+ (C) Copyright Maxim Khizhinsky (libcds.dev@gmail.com) 2006-2017
Source code repo: http://github.com/khizmax/libcds/
Download: http://sourceforge.net/projects/libcds/files/
-
+
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
/// Hash functor for \p %value_type and all its derivatives you use
typedef typename cds::opt::v::hash_selector< typename traits::hash >::type hash;
+ typedef typename traits::bit_reversal bit_reversal; ///< Bit reversal algorithm, see \p split_list::traits::bit_reversal
typedef typename traits::item_counter item_counter; ///< Item counter type
typedef typename traits::back_off back_off; ///< back-off strategy for spinning
typedef typename traits::memory_model memory_model; ///< Memory ordering. See \p cds::opt::memory_model option
return base_class::unlink_at( h, val );
}
+ template <typename Iterator>
+ typename std::enable_if<
+ std::is_same< Iterator, typename ordered_list::iterator>::value && is_iterable_list< ordered_list >::value,
+ bool
+ >::type
+ erase_at( Iterator iter )
+ {
+ return base_class::erase_at( iter );
+ }
+
template <typename Q, typename Compare, typename Func>
bool erase_at( aux_node_type * pHead, split_list::details::search_value_type<Q> const& val, Compare cmp, Func f )
{
//@cond
template <bool IsConst>
class iterator_type
- :public split_list::details::iterator_type<node_traits, ordered_list, IsConst>
+ : public split_list::details::iterator_type<node_traits, ordered_list, IsConst>
{
typedef split_list::details::iterator_type<node_traits, ordered_list, IsConst> iterator_base_class;
typedef typename iterator_base_class::list_iterator list_iterator;
+
+ friend class SplitListSet;
+
public:
iterator_type()
: iterator_base_class()
aux_node_type * pHead = get_bucket( nHash );
assert( pHead != nullptr );
- node_traits::to_node_ptr( val )->m_nHash = split_list::regular_hash( nHash );
+ node_traits::to_node_ptr( val )->m_nHash = split_list::regular_hash<bit_reversal>( nHash );
if ( m_List.insert_at( pHead, val )) {
inc_item_count();
aux_node_type * pHead = get_bucket( nHash );
assert( pHead != nullptr );
- node_traits::to_node_ptr( val )->m_nHash = split_list::regular_hash( nHash );
+ node_traits::to_node_ptr( val )->m_nHash = split_list::regular_hash<bit_reversal>( nHash );
if ( m_List.insert_at( pHead, val, f )) {
inc_item_count();
aux_node_type * pHead = get_bucket( nHash );
assert( pHead != nullptr );
- node_traits::to_node_ptr( val )->m_nHash = split_list::regular_hash( nHash );
+ node_traits::to_node_ptr( val )->m_nHash = split_list::regular_hash<bit_reversal>( nHash );
std::pair<bool, bool> bRet = m_List.update_at( pHead, val, func, bAllowInsert );
if ( bRet.first && bRet.second ) {
std::pair<bool, bool> upsert( value_type& val, bool bAllowInsert = true )
#else
template <typename Q>
- typename std::enable_if<
+ typename std::enable_if<
std::is_same< Q, value_type>::value && is_iterable_list< ordered_list >::value,
std::pair<bool, bool>
>::type
aux_node_type * pHead = get_bucket( nHash );
assert( pHead != nullptr );
- node_traits::to_node_ptr( val )->m_nHash = split_list::regular_hash( nHash );
+ node_traits::to_node_ptr( val )->m_nHash = split_list::regular_hash<bit_reversal>( nHash );
std::pair<bool, bool> bRet = m_List.upsert_at( pHead, val, bAllowInsert );
if ( bRet.first && bRet.second ) {
return erase_( key, typename ordered_list_adapter::template make_compare_from_less<Less>(), f );
}
+ /// Deletes the item pointed by iterator \p iter (only for \p IterableList based set)
+ /**
+ Returns \p true if the operation is successful, \p false otherwise.
+ The function can return \p false if the node the iterator points to has already been deleted
+ by other thread.
+
+ The function does not invalidate the iterator, it remains valid and can be used for further traversing.
+
+ @note \p %erase_at() is supported only for \p %SplitListSet based on \p IterableList.
+ */
+#ifdef CDS_DOXYGEN_INVOKED
+ bool erase_at( iterator const& iter )
+#else
+ template <typename Iterator>
+ typename std::enable_if< std::is_same<Iterator, iterator>::value && is_iterable_list< ordered_list >::value, bool >::type
+ erase_at( Iterator const& iter )
+#endif
+ {
+ assert( iter != end() );
+
+ if ( m_List.erase_at( iter.underlying_iterator())) {
+ --m_ItemCounter;
+ m_Stat.onEraseSuccess();
+ return true;
+ }
+ return false;
+ }
+
/// Extracts the item with specified \p key
/** \anchor cds_intrusive_SplitListSet_hp_extract
The function searches an item with key equal to \p key,
#endif
find( Q& key )
{
- return find_iterator_( key, key_comparator() );
+ return find_iterator_( key, key_comparator());
}
//@cond
template <typename Q>
typename std::enable_if< std::is_same<Q, Q>::value && is_iterable_list< ordered_list >::value, iterator >::type
find( Q const& key )
{
- return find_iterator_( key, key_comparator() );
+ return find_iterator_( key, key_comparator());
}
//@endcond
find_with( Q& key, Less pred )
{
CDS_UNUSED( pred );
- return find_iterator_( key, typename ordered_list_adapter::template make_compare_from_less<Less>() );
+ return find_iterator_( key, typename ordered_list_adapter::template make_compare_from_less<Less>());
}
//@cond
template <typename Q, typename Less>
find_with( Q const& key, Less pred )
{
CDS_UNUSED( pred );
- return find_iterator_( key, typename ordered_list_adapter::template make_compare_from_less<Less>() );
+ return find_iterator_( key, typename ordered_list_adapter::template make_compare_from_less<Less>());
}
//@endcond
{
m_Stat.onHeadNodeAllocated();
aux_node_type* p = m_Buckets.alloc_aux_node();
- if ( p )
+ if ( p ) {
+ CDS_TSAN_ANNOTATE_IGNORE_WRITES_BEGIN;
+ // p->m_nHash is read-only data member
p->m_nHash = nHash;
+ CDS_TSAN_ANNOTATE_IGNORE_WRITES_END;
+# ifdef CDS_DEBUG
+ cds_assert( !p->m_busy.load( atomics::memory_order_acquire ) );
+ p->m_busy.store( true, atomics::memory_order_release );
+# endif
+ }
return p;
}
void free_aux_node( aux_node_type * p )
{
+# ifdef CDS_DEBUG
+ cds_assert( p->m_busy.load( atomics::memory_order_acquire ) );
+ p->m_busy.store( false, atomics::memory_order_release );
+# endif
+
m_Buckets.free_aux_node( p );
m_Stat.onHeadNodeFreed();
}
if ( pBucket )
return pBucket;
- pBucket = alloc_aux_node( split_list::dummy_hash( nBucket ) );
+ pBucket = alloc_aux_node( split_list::dummy_hash<bit_reversal>( nBucket ));
if ( pBucket ) {
- if ( m_List.insert_aux_node( pParentBucket, pBucket ) ) {
+ if ( m_List.insert_aux_node( pParentBucket, pBucket )) {
m_Buckets.bucket( nBucket, pBucket );
m_Stat.onNewBucket();
return pBucket;
if ( pHead == nullptr )
pHead = init_bucket( nBucket );
- assert( pHead->is_dummy() );
+ assert( pHead->is_dummy());
return pHead;
}
"cds::atomicity::empty_item_counter is not allowed as a item counter");
// Initialize bucket 0
- aux_node_type * pNode = alloc_aux_node( 0 /*split_list::dummy_hash(0)*/ );
+ aux_node_type * pNode = alloc_aux_node( 0 /*split_list::dummy_hash<bit_reversal>(0)*/ );
assert( pNode != nullptr );
// insert_aux_node cannot return false for empty list
- CDS_VERIFY( m_List.insert_aux_node( pNode ) );
+ CDS_VERIFY( m_List.insert_aux_node( pNode ));
m_Buckets.bucket( 0, pNode );
}
size_t sz = m_nBucketCountLog2.load( memory_model::memory_order_relaxed );
const size_t nBucketCount = static_cast<size_t>(1) << sz;
- if ( nBucketCount < m_Buckets.capacity() ) {
+ if ( nBucketCount < m_Buckets.capacity()) {
// we may grow the bucket table
const size_t nLoadFactor = m_Buckets.load_factor();
- if ( nMaxCount < max_item_count( nBucketCount, nLoadFactor ) )
+ if ( nMaxCount < max_item_count( nBucketCount, nLoadFactor ))
return; // someone already have updated m_nBucketCountLog2, so stop here
m_nMaxItemCount.compare_exchange_strong( nMaxCount, max_item_count( nBucketCount << 1, nLoadFactor ),
bool find_( Q& val, Compare cmp, Func f )
{
size_t nHash = hash_value( val );
- split_list::details::search_value_type<Q> sv( val, split_list::regular_hash( nHash ) );
+ split_list::details::search_value_type<Q> sv( val, split_list::regular_hash<bit_reversal>( nHash ));
aux_node_type * pHead = get_bucket( nHash );
assert( pHead != nullptr );
bool find_( Q const& val, Compare cmp )
{
size_t nHash = hash_value( val );
- split_list::details::search_value_type<Q const> sv( val, split_list::regular_hash( nHash ) );
+ split_list::details::search_value_type<Q const> sv( val, split_list::regular_hash<bit_reversal>( nHash ));
aux_node_type * pHead = get_bucket( nHash );
assert( pHead != nullptr );
- return m_Stat.onFind( m_List.find_at( pHead, sv, cmp ) );
+ return m_Stat.onFind( m_List.find_at( pHead, sv, cmp ));
}
template <typename Q, typename Compare>
iterator find_iterator_( Q const& val, Compare cmp )
{
size_t nHash = hash_value( val );
- split_list::details::search_value_type<Q const> sv( val, split_list::regular_hash( nHash ) );
+ split_list::details::search_value_type<Q const> sv( val, split_list::regular_hash<bit_reversal>( nHash ));
aux_node_type * pHead = get_bucket( nHash );
assert( pHead != nullptr );
guarded_ptr get_( Q const& val, Compare cmp )
{
size_t nHash = hash_value( val );
- split_list::details::search_value_type<Q const> sv( val, split_list::regular_hash( nHash ) );
+ split_list::details::search_value_type<Q const> sv( val, split_list::regular_hash<bit_reversal>( nHash ));
aux_node_type * pHead = get_bucket( nHash );
assert( pHead != nullptr );
guarded_ptr gp = m_List.get_at( pHead, sv, cmp );
- m_Stat.onFind( !gp.empty() );
+ m_Stat.onFind( !gp.empty());
return gp;
}
template <typename Q>
guarded_ptr get_( Q const& key )
{
- return get_( key, key_comparator() );
+ return get_( key, key_comparator());
}
template <typename Q, typename Less>
guarded_ptr get_with_( Q const& key, Less )
{
- return get_( key, typename ordered_list_adapter::template make_compare_from_less<Less>() );
+ return get_( key, typename ordered_list_adapter::template make_compare_from_less<Less>());
}
template <typename Q, typename Compare, typename Func>
bool erase_( Q const& val, Compare cmp, Func f )
{
size_t nHash = hash_value( val );
- split_list::details::search_value_type<Q const> sv( val, split_list::regular_hash( nHash ) );
+ split_list::details::search_value_type<Q const> sv( val, split_list::regular_hash<bit_reversal>( nHash ));
aux_node_type * pHead = get_bucket( nHash );
assert( pHead != nullptr );
- if ( m_List.erase_at( pHead, sv, cmp, f ) ) {
+ if ( m_List.erase_at( pHead, sv, cmp, f )) {
--m_ItemCounter;
m_Stat.onEraseSuccess();
return true;
bool erase_( Q const& val, Compare cmp )
{
size_t nHash = hash_value( val );
- split_list::details::search_value_type<Q const> sv( val, split_list::regular_hash( nHash ) );
+ split_list::details::search_value_type<Q const> sv( val, split_list::regular_hash<bit_reversal>( nHash ));
aux_node_type * pHead = get_bucket( nHash );
assert( pHead != nullptr );
- if ( m_List.erase_at( pHead, sv, cmp ) ) {
+ if ( m_List.erase_at( pHead, sv, cmp )) {
--m_ItemCounter;
m_Stat.onEraseSuccess();
return true;
guarded_ptr extract_( Q const& val, Compare cmp )
{
size_t nHash = hash_value( val );
- split_list::details::search_value_type<Q const> sv( val, split_list::regular_hash( nHash ) );
+ split_list::details::search_value_type<Q const> sv( val, split_list::regular_hash<bit_reversal>( nHash ));
aux_node_type * pHead = get_bucket( nHash );
assert( pHead != nullptr );
template <typename Q>
guarded_ptr extract_( Q const& key )
{
- return extract_( key, key_comparator() );
+ return extract_( key, key_comparator());
}
template <typename Q, typename Less>
guarded_ptr extract_with_( Q const& key, Less )
{
- return extract_( key, typename ordered_list_adapter::template make_compare_from_less<Less>() );
+ return extract_( key, typename ordered_list_adapter::template make_compare_from_less<Less>());
}
//@endcond
atomics::atomic<size_t> m_nBucketCountLog2; ///< log2( current bucket count )
atomics::atomic<size_t> m_nMaxItemCount; ///< number of items container can hold, before we have to resize
- item_counter m_ItemCounter; ///< Item counter
hash m_HashFunctor; ///< Hash functor
+ item_counter m_ItemCounter; ///< Item counter
stat m_Stat; ///< Internal statistics
//@endcond
};