30     __TBB_ASSERT( !this->mutex, 
"scoped_lock is already holding a mutex");
    60     __TBB_ASSERT( !this->mutex, 
"scoped_lock is already holding a mutex");
    87         if( 
this == mutex->q_tail.compare_and_swap<
tbb::release>(NULL, 
this) ) {
 atomic< scoped_lock * > q_tail
The last competitor requesting the lock.
 
Queuing mutex with local-only spinning.
 
void spin_wait_while_eq(const volatile T &location, U value)
Spin WHILE the value of the variable is equal to a given value.
 
void __TBB_EXPORTED_METHOD acquire(queuing_mutex &m)
Acquire lock on given mutex.
 
void __TBB_store_with_release(volatile T &location, V value)
 
void __TBB_EXPORTED_METHOD release()
Release lock.
 
The scoped locking pattern.
 
void const char const char int ITT_FORMAT __itt_group_sync x void const char ITT_FORMAT __itt_group_sync s void ITT_FORMAT __itt_group_sync p void ITT_FORMAT p sync_releasing
 
#define __TBB_control_consistency_helper()
 
#define _T(string_literal)
Standard Windows style macro to markup the string literals.
 
bool __TBB_EXPORTED_METHOD try_acquire(queuing_mutex &m)
Acquire lock on given mutex if free (i.e. non-blocking)
 
#define ITT_SYNC_CREATE(obj, type, name)
 
#define __TBB_ASSERT(predicate, comment)
No-op version of __TBB_ASSERT.
 
#define ITT_NOTIFY(name, obj)
 
scoped_lock * next
The pointer to the next competitor for a mutex.
 
T __TBB_load_with_acquire(const volatile T &location)
 
void __TBB_EXPORTED_METHOD internal_construct()