Home ⌂Doc Index ◂Up ▴
Intel(R) Threading Building Blocks Doxygen Documentation  version 4.2.3
tbb_machine.h File Reference
#include "tbb_stddef.h"
Include dependency graph for tbb_machine.h:
This graph shows which files directly or indirectly include this file:

Go to the source code of this file.

Classes

struct  tbb::internal::machine_load_store< T, S >
 
struct  tbb::internal::machine_load_store_relaxed< T, S >
 
struct  tbb::internal::machine_load_store_seq_cst< T, S >
 
struct  tbb::internal::atomic_selector< S >
 
struct  tbb::internal::atomic_selector< 1 >
 
struct  tbb::internal::atomic_selector< 2 >
 
struct  tbb::internal::atomic_selector< 4 >
 
struct  tbb::internal::atomic_selector< 8 >
 
class  tbb::internal::atomic_backoff
 Class that implements exponential backoff. More...
 
struct  tbb::internal::type_with_alignment< N >
 
struct  tbb::internal::type_with_alignment< 1 >
 
struct  tbb::internal::type_with_alignment< 2 >
 
struct  tbb::internal::type_with_alignment< 4 >
 
struct  tbb::internal::type_with_alignment< 8 >
 
struct  tbb::internal::type_with_alignment< 16 >
 
struct  tbb::internal::type_with_alignment< 32 >
 
struct  tbb::internal::type_with_alignment< 64 >
 
struct  tbb::internal::reverse< T >
 

Namespaces

 tbb
 The graph class.
 
 tbb::internal
 Identifiers declared inside namespace internal should never be used directly by client code.
 

Macros

#define __TBB_MACHINE_DEFINE_STORE8_GENERIC_FENCED(M)
 
#define __TBB_MACHINE_DEFINE_LOAD8_GENERIC_FENCED(M)
 
#define __TBB_ENDIAN_UNSUPPORTED   -1
 
#define __TBB_ENDIAN_LITTLE   0
 
#define __TBB_ENDIAN_BIG   1
 
#define __TBB_ENDIAN_DETECT   2
 
#define __TBB_64BIT_ATOMICS   1
 
#define __TBB_FetchAndAddWrelease(P, V)   __TBB_FetchAndAddW(P,V)
 
#define __TBB_FetchAndIncrementWacquire(P)   __TBB_FetchAndAddW(P,1)
 
#define __TBB_FetchAndDecrementWrelease(P)   __TBB_FetchAndAddW(P,(-1))
 
#define __TBB_ENDIANNESS   __TBB_ENDIAN_DETECT
 
#define __TBB_DefineTypeWithAlignment(PowerOf2)
 
#define __TBB_alignof(T)   alignof(T)
 
#define __TBB_TypeWithAlignmentAtLeastAsStrict(T)   tbb::internal::type_with_alignment<__TBB_alignof(T)>
 
#define __TBB_load_acquire   __TBB_load_with_acquire
 
#define __TBB_store_release   __TBB_store_with_release
 
#define __TBB_UnlockByte(addr)   __TBB_store_with_release((addr),0)
 

Typedefs

typedef unsigned char __TBB_Flag
 
typedef __TBB_atomic __TBB_Flag __TBB_atomic_flag
 

Functions

void __TBB_Pause (int32_t)
 
void tbb::atomic_fence ()
 Sequentially consistent full memory fence. More...
 
template<typename T , typename U >
void tbb::internal::spin_wait_while_eq (const volatile T &location, U value)
 Spin WHILE the value of the variable is equal to a given value. More...
 
template<typename T , typename U >
void tbb::internal::spin_wait_until_eq (const volatile T &location, const U value)
 Spin UNTIL the value of the variable is equal to a given value. More...
 
template<typename predicate_type >
void tbb::internal::spin_wait_while (predicate_type condition)
 
template<typename T >
tbb::internal::__TBB_MaskedCompareAndSwap (volatile T *const ptr, const T value, const T comparand)
 
template<size_t S, typename T >
tbb::internal::__TBB_CompareAndSwapGeneric (volatile void *ptr, T value, T comparand)
 
template<>
int8_t tbb::internal::__TBB_CompareAndSwapGeneric< 1, int8_t > (volatile void *ptr, int8_t value, int8_t comparand)
 
template<>
int16_t tbb::internal::__TBB_CompareAndSwapGeneric< 2, int16_t > (volatile void *ptr, int16_t value, int16_t comparand)
 
template<>
int32_t tbb::internal::__TBB_CompareAndSwapGeneric< 4, int32_t > (volatile void *ptr, int32_t value, int32_t comparand)
 
template<>
int64_t tbb::internal::__TBB_CompareAndSwapGeneric< 8, int64_t > (volatile void *ptr, int64_t value, int64_t comparand)
 
template<size_t S, typename T >
tbb::internal::__TBB_FetchAndAddGeneric (volatile void *ptr, T addend)
 
template<size_t S, typename T >
tbb::internal::__TBB_FetchAndStoreGeneric (volatile void *ptr, T value)
 
template<typename T >
tbb::internal::__TBB_load_with_acquire (const volatile T &location)
 
template<typename T , typename V >
void tbb::internal::__TBB_store_with_release (volatile T &location, V value)
 
void tbb::internal::__TBB_store_with_release (volatile size_t &location, size_t value)
 Overload that exists solely to avoid /Wp64 warnings. More...
 
template<typename T >
tbb::internal::__TBB_load_full_fence (const volatile T &location)
 
template<typename T , typename V >
void tbb::internal::__TBB_store_full_fence (volatile T &location, V value)
 
void tbb::internal::__TBB_store_full_fence (volatile size_t &location, size_t value)
 Overload that exists solely to avoid /Wp64 warnings. More...
 
template<typename T >
tbb::internal::__TBB_load_relaxed (const volatile T &location)
 
template<typename T , typename V >
void tbb::internal::__TBB_store_relaxed (volatile T &location, V value)
 
void tbb::internal::__TBB_store_relaxed (volatile size_t &location, size_t value)
 Overload that exists solely to avoid /Wp64 warnings. More...
 
 tbb::internal::__TBB_DefineTypeWithAlignment (8) __TBB_DefineTypeWithAlignment(16) __TBB_DefineTypeWithAlignment(32) __TBB_DefineTypeWithAlignment(64) typedef __TBB_machine_type_with_alignment_64 __TBB_machine_type_with_strictest_alignment
 
intptr_t __TBB_Log2 (uintptr_t x)
 
void __TBB_AtomicOR (volatile void *operand, uintptr_t addend)
 
void __TBB_AtomicAND (volatile void *operand, uintptr_t addend)
 
bool __TBB_TryLockByte (__TBB_atomic_flag &flag)
 
__TBB_Flag __TBB_LockByte (__TBB_atomic_flag &flag)
 
unsigned char __TBB_ReverseByte (unsigned char src)
 
template<typename T >
__TBB_ReverseBits (T src)
 

Macro Definition Documentation

◆ __TBB_64BIT_ATOMICS

#define __TBB_64BIT_ATOMICS   1

Definition at line 283 of file tbb_machine.h.

◆ __TBB_alignof

#define __TBB_alignof (   T)    alignof(T)

Definition at line 763 of file tbb_machine.h.

◆ __TBB_DefineTypeWithAlignment

#define __TBB_DefineTypeWithAlignment (   PowerOf2)
Value:
struct alignas(PowerOf2) __TBB_machine_type_with_alignment_##PowerOf2 { \
uint32_t member[PowerOf2/sizeof(uint32_t)]; \
};

Definition at line 759 of file tbb_machine.h.

◆ __TBB_ENDIAN_BIG

#define __TBB_ENDIAN_BIG   1

Definition at line 184 of file tbb_machine.h.

◆ __TBB_ENDIAN_DETECT

#define __TBB_ENDIAN_DETECT   2

Definition at line 185 of file tbb_machine.h.

◆ __TBB_ENDIAN_LITTLE

#define __TBB_ENDIAN_LITTLE   0

Definition at line 183 of file tbb_machine.h.

◆ __TBB_ENDIAN_UNSUPPORTED

#define __TBB_ENDIAN_UNSUPPORTED   -1

Definition at line 182 of file tbb_machine.h.

◆ __TBB_ENDIANNESS

#define __TBB_ENDIANNESS   __TBB_ENDIAN_DETECT

Definition at line 414 of file tbb_machine.h.

◆ __TBB_FetchAndAddWrelease

#define __TBB_FetchAndAddWrelease (   P,
 
)    __TBB_FetchAndAddW(P,V)

Definition at line 309 of file tbb_machine.h.

◆ __TBB_FetchAndDecrementWrelease

#define __TBB_FetchAndDecrementWrelease (   P)    __TBB_FetchAndAddW(P,(-1))

Definition at line 311 of file tbb_machine.h.

◆ __TBB_FetchAndIncrementWacquire

#define __TBB_FetchAndIncrementWacquire (   P)    __TBB_FetchAndAddW(P,1)

Definition at line 310 of file tbb_machine.h.

◆ __TBB_load_acquire

#define __TBB_load_acquire   __TBB_load_with_acquire

Definition at line 856 of file tbb_machine.h.

◆ __TBB_MACHINE_DEFINE_LOAD8_GENERIC_FENCED

#define __TBB_MACHINE_DEFINE_LOAD8_GENERIC_FENCED (   M)
Value:
inline int64_t __TBB_machine_generic_load8##M(const volatile void *ptr) { \
/* Comparand and new value may be anything, they only must be equal, and */ \
/* the value should have a low probability to be actually found in 'location'.*/ \
const int64_t anyvalue = 2305843009213693951LL; \
return __TBB_machine_cmpswp8##M(const_cast<volatile void *>(ptr),anyvalue,anyvalue); \
} \
#define __TBB_machine_cmpswp8
Definition: ibm_aix51.h:42

Definition at line 173 of file tbb_machine.h.

◆ __TBB_MACHINE_DEFINE_STORE8_GENERIC_FENCED

#define __TBB_MACHINE_DEFINE_STORE8_GENERIC_FENCED (   M)
Value:
inline void __TBB_machine_generic_store8##M(volatile void *ptr, int64_t value) { \
for(;;) { \
int64_t result = *(volatile int64_t *)ptr; \
if( __TBB_machine_cmpswp8##M(ptr,value,result)==result ) break; \
} \
} \
#define __TBB_machine_cmpswp8
Definition: ibm_aix51.h:42
void const char const char int ITT_FORMAT __itt_group_sync x void const char ITT_FORMAT __itt_group_sync s void ITT_FORMAT __itt_group_sync p void ITT_FORMAT p void ITT_FORMAT p no args __itt_suppress_mode_t unsigned int void size_t ITT_FORMAT d void ITT_FORMAT p void ITT_FORMAT p __itt_model_site __itt_model_site_instance ITT_FORMAT p __itt_model_task __itt_model_task_instance ITT_FORMAT p void ITT_FORMAT p void ITT_FORMAT p void size_t ITT_FORMAT d void ITT_FORMAT p const wchar_t ITT_FORMAT s const char ITT_FORMAT s const char ITT_FORMAT s const char ITT_FORMAT s no args void ITT_FORMAT p size_t ITT_FORMAT d no args const wchar_t const wchar_t ITT_FORMAT s __itt_heap_function void size_t int ITT_FORMAT d __itt_heap_function void ITT_FORMAT p __itt_heap_function void void size_t int ITT_FORMAT d no args no args unsigned int ITT_FORMAT u const __itt_domain __itt_id ITT_FORMAT lu const __itt_domain __itt_id __itt_id __itt_string_handle ITT_FORMAT p const __itt_domain __itt_id ITT_FORMAT p const __itt_domain __itt_id __itt_timestamp __itt_timestamp ITT_FORMAT lu const __itt_domain __itt_id __itt_id __itt_string_handle ITT_FORMAT p const __itt_domain ITT_FORMAT p const __itt_domain __itt_string_handle unsigned long long value

Definition at line 165 of file tbb_machine.h.

◆ __TBB_store_release

#define __TBB_store_release   __TBB_store_with_release

Definition at line 857 of file tbb_machine.h.

◆ __TBB_TypeWithAlignmentAtLeastAsStrict

#define __TBB_TypeWithAlignmentAtLeastAsStrict (   T)    tbb::internal::type_with_alignment<__TBB_alignof(T)>

Definition at line 816 of file tbb_machine.h.

◆ __TBB_UnlockByte

#define __TBB_UnlockByte (   addr)    __TBB_store_with_release((addr),0)

Definition at line 927 of file tbb_machine.h.

Typedef Documentation

◆ __TBB_atomic_flag

Definition at line 910 of file tbb_machine.h.

◆ __TBB_Flag

typedef unsigned char __TBB_Flag

Definition at line 908 of file tbb_machine.h.

Function Documentation

◆ __TBB_AtomicAND()

void __TBB_AtomicAND ( volatile void operand,
uintptr_t  addend 
)
inline

Definition at line 888 of file tbb_machine.h.

888  {
889  for( tbb::internal::atomic_backoff b;;b.pause() ) {
890  uintptr_t tmp = *(volatile uintptr_t *)operand;
891  uintptr_t result = __TBB_CompareAndSwapW(operand, tmp&addend, tmp);
892  if( result==tmp ) break;
893  }
894 }
void pause()
Pause for a while.
Definition: tbb_machine.h:360
Class that implements exponential backoff.
Definition: tbb_machine.h:345

References tbb::internal::atomic_backoff::pause().

Referenced by tbb::internal::clear_one_bit(), tbb::spin_rw_mutex_v3::internal_release_writer(), tbb::spin_rw_mutex_v3::scoped_lock::release(), and tbb::spin_rw_mutex_v3::unlock().

Here is the call graph for this function:
Here is the caller graph for this function:

◆ __TBB_AtomicOR()

void __TBB_AtomicOR ( volatile void operand,
uintptr_t  addend 
)
inline

Definition at line 878 of file tbb_machine.h.

878  {
879  for( tbb::internal::atomic_backoff b;;b.pause() ) {
880  uintptr_t tmp = *(volatile uintptr_t *)operand;
881  uintptr_t result = __TBB_CompareAndSwapW(operand, tmp|addend, tmp);
882  if( result==tmp ) break;
883  }
884 }
void pause()
Pause for a while.
Definition: tbb_machine.h:360
Class that implements exponential backoff.
Definition: tbb_machine.h:345

References tbb::internal::atomic_backoff::pause().

Referenced by tbb::spin_rw_mutex_v3::internal_acquire_writer(), and tbb::internal::set_one_bit().

Here is the call graph for this function:
Here is the caller graph for this function:

◆ __TBB_LockByte()

__TBB_Flag __TBB_LockByte ( __TBB_atomic_flag flag)
inline

Definition at line 919 of file tbb_machine.h.

919  {
921  while( !__TBB_TryLockByte(flag) ) backoff.pause();
922  return 0;
923 }
void pause()
Pause for a while.
Definition: tbb_machine.h:360
bool __TBB_TryLockByte(__TBB_atomic_flag &flag)
Definition: tbb_machine.h:913
Class that implements exponential backoff.
Definition: tbb_machine.h:345

References __TBB_TryLockByte(), and tbb::internal::atomic_backoff::pause().

Referenced by tbb::spin_mutex::scoped_lock::acquire(), tbb::spin_mutex::scoped_lock::internal_acquire(), tbb::internal::__TBB_InitOnce::lock(), tbb::spin_mutex::lock(), and tbb::spin_mutex::scoped_lock::scoped_lock().

Here is the call graph for this function:
Here is the caller graph for this function:

◆ __TBB_Log2()

intptr_t __TBB_Log2 ( uintptr_t  x)
inline

Definition at line 860 of file tbb_machine.h.

860  {
861  if( x==0 ) return -1;
862  intptr_t result = 0;
863 
864 #if !defined(_M_ARM)
865  uintptr_t tmp_;
866  if( sizeof(x)>4 && (tmp_ = ((uint64_t)x)>>32) ) { x=tmp_; result += 32; }
867 #endif
868  if( uintptr_t tmp = x>>16 ) { x=tmp; result += 16; }
869  if( uintptr_t tmp = x>>8 ) { x=tmp; result += 8; }
870  if( uintptr_t tmp = x>>4 ) { x=tmp; result += 4; }
871  if( uintptr_t tmp = x>>2 ) { x=tmp; result += 2; }
872 
873  return (x&2)? result+1: result;
874 }

Referenced by tbb::interface5::internal::concurrent_unordered_base< concurrent_unordered_map_traits< Key, T, internal::hash_compare< Key, Hasher, Key_equality >, Allocator, false > >::concurrent_unordered_base(), tbb::interface5::internal::concurrent_unordered_base< concurrent_unordered_map_traits< Key, T, internal::hash_compare< Key, Hasher, Key_equality >, Allocator, false > >::get_parent(), tbb::internal::task_stream< num_priority_levels >::initialize(), tbb::interface5::internal::hash_map_base::insert_new_node(), tbb::interface5::concurrent_hash_map< Key, T, HashCompare, Allocator >::internal_equal_range(), tbb::interface5::concurrent_hash_map< Key, T, HashCompare, Allocator >::rehash(), tbb::interface5::internal::concurrent_unordered_base< concurrent_unordered_map_traits< Key, T, internal::hash_compare< Key, Hasher, Key_equality >, Allocator, false > >::rehash(), tbb::interface5::concurrent_hash_map< Key, T, HashCompare, Allocator >::rehash_bucket(), tbb::interface5::internal::hash_map_base::segment_index_of(), tbb::internal::concurrent_vector_base_v3::segment_index_of(), and tbb::interface5::internal::concurrent_unordered_base< concurrent_unordered_map_traits< Key, T, internal::hash_compare< Key, Hasher, Key_equality >, Allocator, false > >::segment_index_of().

Here is the caller graph for this function:

◆ __TBB_Pause()

void __TBB_Pause ( int32_t  )
inline

Definition at line 331 of file tbb_machine.h.

331  {
332  __TBB_Yield();
333  }
#define __TBB_Yield()
Definition: ibm_aix51.h:44

References __TBB_Yield.

Referenced by tbb::queuing_rw_mutex::scoped_lock::acquire_internal_lock(), tbb::internal::atomic_backoff::bounded_pause(), tbb::internal::atomic_backoff::pause(), and tbb::internal::prolonged_pause().

Here is the caller graph for this function:

◆ __TBB_ReverseBits()

template<typename T >
T __TBB_ReverseBits ( src)

Definition at line 967 of file tbb_machine.h.

967  {
968  T dst;
969  unsigned char *original = (unsigned char *) &src;
970  unsigned char *reversed = (unsigned char *) &dst;
971 
972  for( int i = sizeof(T)-1; i >= 0; i-- )
973  reversed[i] = __TBB_ReverseByte( original[sizeof(T)-i-1] );
974 
975  return dst;
976 }
unsigned char __TBB_ReverseByte(unsigned char src)
Definition: tbb_machine.h:961

References __TBB_ReverseByte().

Referenced by tbb::interface5::internal::concurrent_unordered_base< concurrent_unordered_map_traits< Key, T, internal::hash_compare< Key, Hasher, Key_equality >, Allocator, false > >::concurrent_unordered_base(), tbb::interface5::internal::concurrent_unordered_base< Traits >::const_range_type::set_midpoint(), tbb::interface5::internal::concurrent_unordered_base< concurrent_unordered_map_traits< Key, T, internal::hash_compare< Key, Hasher, Key_equality >, Allocator, false > >::split_order_key_dummy(), and tbb::interface5::internal::concurrent_unordered_base< concurrent_unordered_map_traits< Key, T, internal::hash_compare< Key, Hasher, Key_equality >, Allocator, false > >::split_order_key_regular().

Here is the call graph for this function:
Here is the caller graph for this function:

◆ __TBB_ReverseByte()

unsigned char __TBB_ReverseByte ( unsigned char  src)
inline

Definition at line 961 of file tbb_machine.h.

Referenced by __TBB_ReverseBits().

Here is the caller graph for this function:

◆ __TBB_TryLockByte()

bool __TBB_TryLockByte ( __TBB_atomic_flag flag)
inline

Definition at line 913 of file tbb_machine.h.

913  {
914  return __TBB_machine_cmpswp1(&flag,1,0)==0;
915 }
__int8 __TBB_EXPORTED_FUNC __TBB_machine_cmpswp1(volatile void *ptr, __int8 value, __int8 comparand)

References __TBB_machine_cmpswp1().

Referenced by __TBB_LockByte(), tbb::spin_mutex::scoped_lock::internal_try_acquire(), tbb::spin_mutex::scoped_lock::try_acquire(), and tbb::spin_mutex::try_lock().

Here is the call graph for this function:
Here is the caller graph for this function:

Copyright © 2005-2020 Intel Corporation. All Rights Reserved.

Intel, Pentium, Intel Xeon, Itanium, Intel XScale and VTune are registered trademarks or trademarks of Intel Corporation or its subsidiaries in the United States and other countries.

* Other names and brands may be claimed as the property of others.