17 #ifndef __TBB_queuing_mutex_H    18 #define __TBB_queuing_mutex_H    20 #define __TBB_queuing_mutex_H_include_area    36 #if TBB_USE_THREADING_TOOLS   111 #undef __TBB_queuing_mutex_H_include_area atomic< scoped_lock * > q_tail
The last competitor requesting the lock.
 
scoped_lock(queuing_mutex &m)
Acquire lock on given mutex.
 
static const bool is_fair_mutex
 
scoped_lock()
Construct lock that has not acquired a mutex.
 
Queuing mutex with local-only spinning.
 
#define __TBB_DEFINE_PROFILING_SET_NAME(sync_object_type)
 
void initialize()
Initialize fields to mean "no lock held".
 
void __TBB_EXPORTED_METHOD acquire(queuing_mutex &m)
Acquire lock on given mutex.
 
queuing_mutex()
Construct unacquired mutex.
 
~scoped_lock()
Release lock (if lock is held).
 
void __TBB_EXPORTED_METHOD release()
Release lock.
 
The scoped locking pattern.
 
uintptr_t going
The local spin-wait variable.
 
static const bool is_recursive_mutex
 
static const bool is_rw_mutex
 
Base class for types that should not be copied or assigned.
 
bool __TBB_EXPORTED_METHOD try_acquire(queuing_mutex &m)
Acquire lock on given mutex if free (i.e. non-blocking)
 
#define __TBB_EXPORTED_METHOD
 
queuing_mutex * mutex
The pointer to the mutex owned, or NULL if not holding a mutex.
 
void poison_pointer(T *__TBB_atomic &)
 
scoped_lock * next
The pointer to the next competitor for a mutex.
 
void __TBB_EXPORTED_METHOD internal_construct()