21 #define __TBB_concurrent_queue_H 28 #if defined(_MSC_VER) && defined(_Wp64) 30 #pragma warning (disable: 4267) 33 #define RECORD_EVENTS 0 118 static const size_t phi = 3;
150 #if _MSC_VER && !defined(__INTEL_COMPILER) 152 #pragma warning( push ) 153 #pragma warning( disable: 4146 ) 184 else if(
tail&0x1 ) {
208 static_cast<concurrent_queue_base_v8&>(base).move_item( *
p, index, item );
217 p->mask |= uintptr_t(1)<<index;
237 bool success =
false;
240 if(
p->mask & uintptr_t(1)<<index ) {
273 cur_page = cur_page->
next;
281 cur_page->
next =
make_copy( base, srcp, 0, last_index, g_index, op_type );
282 cur_page = cur_page->
next;
300 new_page->
next = NULL;
302 for( ; begin_in_page!=end_in_page; ++begin_in_page, ++g_index )
303 if( new_page->
mask & uintptr_t(1)<<begin_in_page ) {
305 base.
copy_page_item( *new_page, begin_in_page, *src_page, begin_in_page );
308 static_cast<concurrent_queue_base_v8&>(base).move_page_item( *new_page, begin_in_page, *src_page, begin_in_page );
330 #if _MSC_VER && !defined(__INTEL_COMPILER) 331 #pragma warning( pop ) 332 #endif // warning 4146 is back 344 my_capacity = size_t(-1)/(item_sz>1 ? item_sz : 2);
358 for(
size_t i=0; i<nq; i++ )
377 bool sync_prepare_done =
false;
381 if( !sync_prepare_done ) {
382 ITT_NOTIFY( sync_prepare, &sync_prepare_done );
383 sync_prepare_done =
true;
402 if (slept ==
true)
break;
408 ITT_NOTIFY( sync_acquired, &sync_prepare_done );
418 bool sync_prepare_done =
false;
426 if( !sync_prepare_done ) {
428 sync_prepare_done =
true;
447 if (slept ==
true)
break;
454 }
while( !r.
choose(k).
pop(dst,k,*
this) );
484 }
while( !r.
choose( k ).
pop( dst, k, *
this ) );
520 __TBB_ASSERT(
sizeof(ptrdiff_t)<=
sizeof(
size_t), NULL );
537 for(
size_t i=0; i<nq; ++i ) {
566 "the source concurrent queue should not be concurrently modified." );
605 return (
p->mask & uintptr_t(1)<<i)!=0;
void internal_insert_item(const void *src, copy_specifics op_type)
Enqueues item at tail of queue using specified operation (copy or move)
atomic< ticket > tail_counter
~micro_queue_pop_finalizer()
argument_integer_type modulo_power_of_two(argument_integer_type arg, divisor_integer_type divisor)
A function to compute arg modulo divisor where divisor is a power of 2.
bool __TBB_EXPORTED_METHOD internal_empty() const
Check if the queue is empty.
micro_queue & choose(ticket k)
void __TBB_EXPORTED_METHOD internal_set_capacity(ptrdiff_t capacity, size_t element_size)
Set the queue capacity.
page * make_copy(concurrent_queue_base &base, const page *src_page, size_t begin_in_page, size_t end_in_page, ticket &g_index, concurrent_queue_base::copy_specifics op_type)
const size_t offset_of_last
concurrent_monitor slots_avail
bool __TBB_EXPORTED_METHOD internal_push_if_not_full(const void *src)
Attempt to enqueue item onto queue using copy operation.
concurrent_queue_iterator_rep * my_rep
concurrent_queue over which we are iterating.
micro_queue_pop_finalizer(micro_queue &queue, concurrent_queue_base &b, ticket k, page *p)
void spin_wait_while_eq(const volatile T &location, U value)
Spin WHILE the value of the variable is equal to a given value.
A lock that occupies a single byte.
bool internal_insert_if_not_full(const void *src, copy_specifics op_type)
Attempts to enqueue at tail of queue using specified operation (copy or move)
pointer allocate(size_type n, const void *hint=0)
Allocate space for n objects, starting on a cache/sector line.
Exception for user-initiated abort.
static size_t index(ticket k)
Map ticket to an array index.
Base class for types that should not be assigned.
ptrdiff_t __TBB_EXPORTED_METHOD internal_size() const
Get size of queue.
virtual void assign_and_destroy_item(void *dst, page &src, size_t index)=0
void pause()
Pause for a while.
atomic< ticket > head_counter
void * my_item
Pointer to current item.
void prepare_wait(thread_context &thr, uintptr_t ctx=0)
prepare wait by inserting 'thr' into the wait queue
bool is_aligned(T *pointer, uintptr_t alignment)
A function to check if passed in pointer is aligned on a specific border.
virtual void copy_item(page &dst, size_t index, const void *src)=0
void spin_wait_until_eq(const volatile T &location, const U value)
Spin UNTIL the value of the variable is equal to a given value.
ptrdiff_t my_capacity
Capacity of the queue.
void internal_assign(const concurrent_queue_base_v3 &src, copy_specifics op_type)
Assigns one queue to another using specified operation (copy or move)
size_t item_size
Size of an item.
void __TBB_EXPORTED_METHOD assign(const concurrent_queue_base_v3 &src)
copy internal representation
void __TBB_EXPORTED_METHOD move_content(concurrent_queue_base_v8 &src)
move items
static const size_t phi
Approximately n_queue/golden ratio.
Internal representation of a ConcurrentQueue.
void push(const void *item, ticket k, concurrent_queue_base &base, concurrent_queue_base::copy_specifics op_type)
Type-independent portion of concurrent_queue_iterator.
atomic< ticket > tail_counter
virtual void copy_page_item(page &dst, size_t dindex, const page &src, size_t sindex)=0
void const char const char int ITT_FORMAT __itt_group_sync x void const char ITT_FORMAT __itt_group_sync s void ITT_FORMAT __itt_group_sync p void ITT_FORMAT p sync_releasing
void __TBB_EXPORTED_METHOD assign(const concurrent_queue_iterator_base_v3 &i)
Assignment.
void const char const char int ITT_FORMAT __itt_group_sync p
void __TBB_EXPORTED_METHOD internal_pop(void *dst)
Dequeue item from head of queue.
concurrent_queue_rep * my_rep
Internal representation.
bool get_item(void *&item, size_t k)
Set item to point to kth element. Return true if at end of queue or item is marked valid; false other...
virtual page * allocate_page()=0
custom allocator
void notify(const P &predicate)
Notify waiting threads of the event that satisfies the given predicate.
concurrent_queue_iterator_base_v3()
Default constructor.
atomic< page * > tail_page
concurrent_queue_base & base
A queue using simple locking.
Represents acquisition of a mutex.
concurrent_queue_base::page page
concurrent_queue_iterator_rep(const concurrent_queue_base &queue, size_t offset_of_last_)
const size_t NFS_MaxLineSize
Compile-time constant that is upper bound on cache line/sector size.
bool __TBB_EXPORTED_METHOD internal_pop_if_present(void *dst)
Attempt to dequeue item from queue.
Base class for types that should not be copied or assigned.
void cancel_wait(thread_context &thr)
Cancel the wait. Removes the thread from the wait queue if not removed yet.
static void * static_invalid_page
static const size_t n_queue
Must be power of 2.
atomic< size_t > n_invalid_entries
virtual void deallocate_page(page *p)=0
custom de-allocator
concurrent_queue_base_v3 concurrent_queue_base
Class that implements exponential backoff.
void make_invalid(ticket k)
void abort_all()
Abort any sleeping threads at the time of the call.
virtual __TBB_EXPORTED_METHOD ~concurrent_queue_base_v3()
void __TBB_EXPORTED_METHOD internal_abort()
Abort all pending queue operations.
micro_queue array[n_queue]
Meets "allocator" requirements of ISO C++ Standard, Section 20.1.5.
void const char const char int ITT_FORMAT __itt_group_sync x void const char ITT_FORMAT __itt_group_sync s void ITT_FORMAT __itt_group_sync p void ITT_FORMAT p void ITT_FORMAT p no args __itt_suppress_mode_t unsigned int void size_t ITT_FORMAT d void ITT_FORMAT p void ITT_FORMAT p __itt_model_site __itt_model_site_instance ITT_FORMAT p __itt_model_task __itt_model_task_instance ITT_FORMAT p void ITT_FORMAT p void ITT_FORMAT p void size_t ITT_FORMAT d void ITT_FORMAT p const wchar_t ITT_FORMAT s const char ITT_FORMAT s const char ITT_FORMAT s const char ITT_FORMAT s no args void ITT_FORMAT p size_t ITT_FORMAT d no args const wchar_t const wchar_t ITT_FORMAT s __itt_heap_function void size_t int ITT_FORMAT d __itt_heap_function void ITT_FORMAT p __itt_heap_function void void size_t int ITT_FORMAT d no args no args unsigned int ITT_FORMAT u const __itt_domain __itt_id ITT_FORMAT lu const __itt_domain __itt_id __itt_id __itt_string_handle ITT_FORMAT p const __itt_domain __itt_id ITT_FORMAT p const __itt_domain __itt_id __itt_timestamp __itt_timestamp ITT_FORMAT lu const __itt_domain __itt_id __itt_id __itt_string_handle ITT_FORMAT p const __itt_domain ITT_FORMAT p const __itt_domain __itt_string_handle unsigned long long ITT_FORMAT lu const __itt_domain __itt_string_handle unsigned long long ITT_FORMAT lu const __itt_domain __itt_id __itt_string_handle __itt_metadata_type size_t void ITT_FORMAT p const __itt_domain __itt_id __itt_string_handle const wchar_t size_t ITT_FORMAT lu const __itt_domain __itt_id __itt_relation __itt_id tail
char pad2[NFS_MaxLineSize-((sizeof(atomic< ticket >)+sizeof(concurrent_monitor))&(NFS_MaxLineSize-1))]
__TBB_EXPORTED_METHOD concurrent_queue_base_v3(size_t item_size)
micro_queue & assign(const micro_queue &src, concurrent_queue_base &base, concurrent_queue_base::copy_specifics op_type)
char pad1[NFS_MaxLineSize-((sizeof(atomic< ticket >)+sizeof(concurrent_monitor)+sizeof(atomic< size_t >))&(NFS_MaxLineSize-1))]
concurrent_queue_base::page * array[concurrent_queue_rep::n_queue]
void __TBB_EXPORTED_METHOD internal_throw_exception() const
throw an exception
#define __TBB_ASSERT(predicate, comment)
No-op version of __TBB_ASSERT.
void initialize(const concurrent_queue_base_v3 &queue, size_t offset_of_data)
atomic< page * > head_page
#define ITT_NOTIFY(name, obj)
size_t items_per_page
Always a power of 2.
concurrent_monitor items_avail
void deallocate(pointer p, size_type)
Free block of memory that starts on a cache line.
void abort_push(ticket k, concurrent_queue_base &base)
static const ptrdiff_t infinite_capacity
Value for effective_capacity that denotes unbounded queue.
const concurrent_queue_base & my_queue
void throw_exception(exception_id eid)
Versionless convenience wrapper for throw_exception_v4()
void const char const char int ITT_FORMAT __itt_group_sync x void const char ITT_FORMAT __itt_group_sync s void ITT_FORMAT __itt_group_sync p void ITT_FORMAT p void ITT_FORMAT p no args __itt_suppress_mode_t unsigned int void size_t ITT_FORMAT d void ITT_FORMAT p void ITT_FORMAT p __itt_model_site __itt_model_site_instance ITT_FORMAT p __itt_model_task __itt_model_task_instance ITT_FORMAT p void * lock
void __TBB_EXPORTED_METHOD advance()
Advance iterator one step towards tail of queue.
atomic< unsigned > abort_counter
void __TBB_EXPORTED_METHOD internal_finish_clear()
free any remaining pages
bool __TBB_EXPORTED_METHOD internal_push_move_if_not_full(const void *src)
Attempt to enqueue item onto queue using move operation.
bool pop(void *dst, ticket k, concurrent_queue_base &base)
concurrent_queue_base::page page
size_t __TBB_EXPORTED_FUNC NFS_GetLineSize()
Cache/sector line size.
void __TBB_EXPORTED_METHOD internal_push_move(const void *src)
Enqueue item at tail of queue using move operation.
bool commit_wait(thread_context &thr)
Commit wait if event count has not changed; otherwise, cancel wait.
void __TBB_EXPORTED_METHOD internal_push(const void *src)
Enqueue item at tail of queue using copy operation.
bool operator()(uintptr_t p) const
atomic< ticket > head_counter
__TBB_EXPORTED_METHOD ~concurrent_queue_iterator_base_v3()
Destructor.