59     while( !try_acquire_internal_lock() ) {
    76         wait_for_release_of_internal_lock();
    78         release_internal_lock();
    81 #if defined(_MSC_VER) && !defined(__INTEL_COMPILER)    83     #pragma warning (push)    84     #pragma warning (disable: 4311 4312)    93     template<memory_semantics M>
    95         return reinterpret_cast<T*>( 
atomic_traits<
sizeof(T*),M>::fetch_and_add(location, addend) );
    97     template<memory_semantics M>
    99         return reinterpret_cast<T*>( 
atomic_traits<
sizeof(T*),M>::fetch_and_store(location, reinterpret_cast<word>(
value)) );
   101     template<memory_semantics M>
   103         return reinterpret_cast<T*>(
   104                  atomic_traits<
sizeof(T*),M>::compare_and_swap(location, reinterpret_cast<word>(
value),
   105                                                               reinterpret_cast<word>(comparand))
   113         return reinterpret_cast<T*>( reinterpret_cast<word>(ref) & operand2 );
   116         return reinterpret_cast<T*>( reinterpret_cast<word>(ref) | operand2 );
   122 #if defined(_MSC_VER) && !defined(__INTEL_COMPILER)   124     #pragma warning (pop)   132     return uintptr_t(ptr) & 
FLAG;
   142     __TBB_ASSERT( !my_mutex, 
"scoped_lock is already holding a mutex");
   171         bool sync_prepare_done = 
false;
   174             unsigned short pred_state;
   176             if( uintptr_t(pred) & 
FLAG ) {
   194                 sync_prepare_done = 
true;
   205             if( !sync_prepare_done )
   228     __TBB_ASSERT( !my_mutex, 
"scoped_lock is already holding a mutex");
   230     if( load<relaxed>(m.
q_tail) )
   268             if( 
this == my_mutex->q_tail.compare_and_swap<
tbb::release>(NULL, 
this) ) {
   278             acquire_internal_lock();
   282             unblock_or_wait_on_internal_lock(
get_flag(tmp));
   302                 tmp = tricky_pointer::compare_and_swap<tbb::release>(&my_prev, pred, 
tricky_pointer(pred) | 
FLAG );
   303                 if( !(uintptr_t(tmp) & 
FLAG) ) {
   316             acquire_internal_lock();
   328                 tmp = tricky_pointer::fetch_and_store<tbb::release>(&(l_next->my_prev), pred);
   337             acquire_internal_lock();  
   340                 if( 
this != my_mutex->q_tail.compare_and_swap<
tbb::release>(NULL, 
this) ) {
   348             tmp = tricky_pointer::fetch_and_store<tbb::release>(&(n->
my_prev), NULL);
   352         unblock_or_wait_on_internal_lock(
get_flag(tmp));
   368         if( 
this==my_mutex->q_tail.load<
full_fence>() ) {
   398     acquire_internal_lock();
   402         n = tricky_pointer::fetch_and_add<tbb::acquire>(&my_next, 
FLAG);
   403         unsigned short n_state = n->
my_state;
   407         tmp = tricky_pointer::fetch_and_store<tbb::release>(&(n->
my_prev), 
this);
   408         unblock_or_wait_on_internal_lock(
get_flag(tmp));
   428         release_internal_lock();
   440     pred = tricky_pointer::fetch_and_add<tbb::acquire>(&my_prev, 
FLAG);
   442         bool success = pred->try_acquire_internal_lock();
   445             tmp = tricky_pointer::compare_and_swap<tbb::release>(&my_prev, pred, 
tricky_pointer(pred)|
FLAG );
   446             if( uintptr_t(tmp) & 
FLAG ) {
   451                 pred->release_internal_lock();
   455             pred->release_internal_lock();
   469     wait_for_release_of_internal_lock();
 
T * operator|(word operand2) const
 
void unblock_or_wait_on_internal_lock(uintptr_t)
A helper function.
 
const unsigned char RELEASED
 
static const tricky_pointer::word FLAG
Mask for low order bit of a pointer.
 
void acquire(queuing_rw_mutex &m, bool write=true)
Acquire lock on given mutex.
 
tricky_atomic_pointer(T *&original)
 
static T * fetch_and_store(T *volatile *location, T *value)
 
scoped_lock *__TBB_atomic *__TBB_atomic my_next
 
void spin_wait_while_eq(const volatile T &location, U value)
Spin WHILE the value of the variable is equal to a given value.
 
void __TBB_Pause(int32_t)
 
const unsigned char ACQUIRED
 
unsigned char my_internal_lock
A tiny internal lock.
 
Queuing reader-writer mutex with local-only spinning.
 
void release_internal_lock()
Release the internal lock.
 
void wait_for_release_of_internal_lock()
Wait for internal lock to be released.
 
void __TBB_store_relaxed(volatile T &location, V value)
 
void spin_wait_until_eq(const volatile T &location, const U value)
Spin UNTIL the value of the variable is equal to a given value.
 
atomic< scoped_lock * > q_tail
The last competitor requesting the lock.
 
void __TBB_store_with_release(volatile T &location, V value)
 
tricky_atomic_pointer< queuing_rw_mutex::scoped_lock > tricky_pointer
 
static T * fetch_and_add(T *volatile *location, word addend)
 
atomic< state_t > my_state
State of the request: reader, writer, active reader, other service states.
 
atomic_selector< sizeof(T *)>::word word
 
bool upgrade_to_writer()
Upgrade reader to become a writer.
 
unsigned char __TBB_atomic my_going
The local spin-wait variable.
 
A view of a T* with additional functionality for twiddling low-order bits.
 
void const char const char int ITT_FORMAT __itt_group_sync x void const char ITT_FORMAT __itt_group_sync s void ITT_FORMAT __itt_group_sync p void ITT_FORMAT p sync_releasing
 
#define __TBB_control_consistency_helper()
 
bool try_acquire(queuing_rw_mutex &m, bool write=true)
Acquire lock on given mutex if free (i.e. non-blocking)
 
bool try_acquire_internal_lock()
Try to acquire the internal lock.
 
#define _T(string_literal)
Standard Windows style macro to markup the string literals.
 
state_t_flags
Flag bits in a state_t that specify information about a locking request.
 
void release()
Release lock.
 
filter_t< T, U > operator &(const filter_t< T, V > &left, const filter_t< V, U > &right)
 
The scoped locking pattern.
 
Base class for types that should not be copied or assigned.
 
Class that implements exponential backoff.
 
void __TBB_EXPORTED_METHOD internal_construct()
 
#define ITT_SYNC_CREATE(obj, type, name)
 
bool downgrade_to_reader()
Downgrade writer to become a reader.
 
#define __TBB_ASSERT(predicate, comment)
No-op version of __TBB_ASSERT.
 
#define ITT_NOTIFY(name, obj)
 
T __TBB_load_relaxed(const volatile T &location)
 
atomic< T > & as_atomic(T &t)
 
void const char const char int ITT_FORMAT __itt_group_sync x void const char ITT_FORMAT __itt_group_sync s void ITT_FORMAT __itt_group_sync p void ITT_FORMAT p void ITT_FORMAT p no args __itt_suppress_mode_t unsigned int void size_t ITT_FORMAT d void ITT_FORMAT p void ITT_FORMAT p __itt_model_site __itt_model_site_instance ITT_FORMAT p __itt_model_task __itt_model_task_instance ITT_FORMAT p void ITT_FORMAT p void ITT_FORMAT p void size_t ITT_FORMAT d void ITT_FORMAT p const wchar_t ITT_FORMAT s const char ITT_FORMAT s const char ITT_FORMAT s const char ITT_FORMAT s no args void ITT_FORMAT p size_t ITT_FORMAT d no args const wchar_t const wchar_t ITT_FORMAT s __itt_heap_function void size_t int ITT_FORMAT d __itt_heap_function void ITT_FORMAT p __itt_heap_function void void size_t int ITT_FORMAT d no args no args unsigned int ITT_FORMAT u const __itt_domain __itt_id ITT_FORMAT lu const __itt_domain __itt_id __itt_id __itt_string_handle ITT_FORMAT p const __itt_domain __itt_id ITT_FORMAT p const __itt_domain __itt_id __itt_timestamp __itt_timestamp ITT_FORMAT lu const __itt_domain __itt_id __itt_id __itt_string_handle ITT_FORMAT p const __itt_domain ITT_FORMAT p const __itt_domain __itt_string_handle unsigned long long value
 
T __TBB_load_with_acquire(const volatile T &location)
 
void acquire_internal_lock()
Acquire the internal lock.
 
static T * compare_and_swap(T *volatile *location, T *value, T *comparand)
 
tricky_atomic_pointer(T *volatile &original)
 
scoped_lock *__TBB_atomic my_prev
The pointer to the previous and next competitors for a mutex.
 
uintptr_t get_flag(queuing_rw_mutex::scoped_lock *ptr)