diff --git a/include/boost/lockfree/detail/freelist.hpp b/include/boost/lockfree/detail/freelist.hpp index 23752d4..7db87db 100644 --- a/include/boost/lockfree/detail/freelist.hpp +++ b/include/boost/lockfree/detail/freelist.hpp @@ -183,7 +183,7 @@ class alignas( cacheline_bytes ) freelist_stack : Alloc template < bool Bounded > T* allocate_impl( void ) { - tagged_node_ptr old_pool = pool_.load( memory_order_consume ); + tagged_node_ptr old_pool = pool_.load( memory_order_acquire ); for ( ;; ) { if ( !old_pool.get_ptr() ) { @@ -241,7 +241,7 @@ class alignas( cacheline_bytes ) freelist_stack : Alloc void deallocate_impl( T* n ) { void* node = n; - tagged_node_ptr old_pool = pool_.load( memory_order_consume ); + tagged_node_ptr old_pool = pool_.load( memory_order_acquire ); freelist_node* new_pool_ptr = reinterpret_cast< freelist_node* >( node ); for ( ;; ) { @@ -561,7 +561,7 @@ class fixed_size_freelist : NodeStorage private: index_t allocate_impl( void ) { - tagged_index old_pool = pool_.load( memory_order_consume ); + tagged_index old_pool = pool_.load( memory_order_acquire ); for ( ;; ) { index_t index = old_pool.get_index(); @@ -580,7 +580,7 @@ class fixed_size_freelist : NodeStorage index_t allocate_impl_unsafe( void ) { - tagged_index old_pool = pool_.load( memory_order_consume ); + tagged_index old_pool = pool_.load( memory_order_acquire ); index_t index = old_pool.get_index(); if ( index == null_handle() ) @@ -607,7 +607,7 @@ class fixed_size_freelist : NodeStorage void deallocate_impl( index_t index ) { freelist_node* new_pool_node = reinterpret_cast< freelist_node* >( NodeStorage::nodes() + index ); - tagged_index old_pool = pool_.load( memory_order_consume ); + tagged_index old_pool = pool_.load( memory_order_acquire ); for ( ;; ) { tagged_index new_pool( index, old_pool.get_tag() ); @@ -621,7 +621,7 @@ class fixed_size_freelist : NodeStorage void deallocate_impl_unsafe( index_t index ) { freelist_node* new_pool_node = reinterpret_cast< freelist_node* >( NodeStorage::nodes() + index ); - tagged_index old_pool = pool_.load( memory_order_consume ); + tagged_index old_pool = pool_.load( memory_order_acquire ); tagged_index new_pool( index, old_pool.get_tag() ); new_pool_node->next.set_index( old_pool.get_index() ); diff --git a/include/boost/lockfree/stack.hpp b/include/boost/lockfree/stack.hpp index 5b353d5..4489e05 100644 --- a/include/boost/lockfree/stack.hpp +++ b/include/boost/lockfree/stack.hpp @@ -642,7 +642,7 @@ class stack template < typename Functor > bool consume_one( Functor&& f ) { - tagged_node_handle old_tos = tos.load( detail::memory_order_consume ); + tagged_node_handle old_tos = tos.load( detail::memory_order_acquire ); for ( ;; ) { node* old_tos_pointer = pool.get_pointer( old_tos ); @@ -689,7 +689,7 @@ class stack size_t consume_all_atomic( Functor&& f ) { size_t element_count = 0; - tagged_node_handle old_tos = tos.load( detail::memory_order_consume ); + tagged_node_handle old_tos = tos.load( detail::memory_order_acquire ); for ( ;; ) { node* old_tos_pointer = pool.get_pointer( old_tos ); @@ -736,7 +736,7 @@ class stack size_t consume_all_atomic_reversed( Functor&& f ) { size_t element_count = 0; - tagged_node_handle old_tos = tos.load( detail::memory_order_consume ); + tagged_node_handle old_tos = tos.load( detail::memory_order_acquire ); for ( ;; ) { node* old_tos_pointer = pool.get_pointer( old_tos );