Home ⌂Doc Index ◂Up ▴
Intel(R) Threading Building Blocks Doxygen Documentation  version 4.2.3
tbb::internal::concurrent_vector_base_v3 Class Reference

Base class of concurrent vector implementation. More...

#include <concurrent_vector.h>

Inheritance diagram for tbb::internal::concurrent_vector_base_v3:
Collaboration diagram for tbb::internal::concurrent_vector_base_v3:

Classes

class  helper
 
struct  internal_segments_table
 Internal structure for compact() More...
 
struct  segment_allocated
 
struct  segment_allocation_failed
 
struct  segment_not_used
 
class  segment_t
 
class  segment_value_t
 

Protected Types

enum  { default_initial_segments = 1, pointers_per_short_table = 3, pointers_per_long_table = sizeof(segment_index_t) * 8 }
 
typedef size_t segment_index_t
 
typedef size_t size_type
 
typedef void(__TBB_EXPORTED_FUNCinternal_array_op1) (void *begin, size_type n)
 An operation on an n-element array starting at begin. More...
 
typedef void(__TBB_EXPORTED_FUNCinternal_array_op2) (void *dst, const void *src, size_type n)
 An operation on n-element destination array and n-element source array. More...
 

Protected Member Functions

 concurrent_vector_base_v3 ()
 
__TBB_EXPORTED_METHOD ~concurrent_vector_base_v3 ()
 
void __TBB_EXPORTED_METHOD internal_reserve (size_type n, size_type element_size, size_type max_size)
 
size_type __TBB_EXPORTED_METHOD internal_capacity () const
 
void internal_grow (size_type start, size_type finish, size_type element_size, internal_array_op2 init, const void *src)
 
size_type __TBB_EXPORTED_METHOD internal_grow_by (size_type delta, size_type element_size, internal_array_op2 init, const void *src)
 
void *__TBB_EXPORTED_METHOD internal_push_back (size_type element_size, size_type &index)
 
segment_index_t __TBB_EXPORTED_METHOD internal_clear (internal_array_op1 destroy)
 
void *__TBB_EXPORTED_METHOD internal_compact (size_type element_size, void *table, internal_array_op1 destroy, internal_array_op2 copy)
 
void __TBB_EXPORTED_METHOD internal_copy (const concurrent_vector_base_v3 &src, size_type element_size, internal_array_op2 copy)
 
void __TBB_EXPORTED_METHOD internal_assign (const concurrent_vector_base_v3 &src, size_type element_size, internal_array_op1 destroy, internal_array_op2 assign, internal_array_op2 copy)
 
void __TBB_EXPORTED_METHOD internal_throw_exception (size_type) const
 Obsolete. More...
 
void __TBB_EXPORTED_METHOD internal_swap (concurrent_vector_base_v3 &v)
 
void __TBB_EXPORTED_METHOD internal_resize (size_type n, size_type element_size, size_type max_size, const void *src, internal_array_op1 destroy, internal_array_op2 init)
 
size_type __TBB_EXPORTED_METHOD internal_grow_to_at_least_with_result (size_type new_size, size_type element_size, internal_array_op2 init, const void *src)
 
void __TBB_EXPORTED_METHOD internal_grow_to_at_least (size_type new_size, size_type element_size, internal_array_op2 init, const void *src)
 Deprecated entry point for backwards compatibility to TBB 2.1. More...
 

Static Protected Member Functions

static segment_index_t segment_index_of (size_type index)
 
static segment_index_t segment_base (segment_index_t k)
 
static segment_index_t segment_base_index_of (segment_index_t &index)
 
static size_type segment_size (segment_index_t k)
 
static bool is_first_element_in_segment (size_type element_index)
 

Protected Attributes

void *(* vector_allocator_ptr )(concurrent_vector_base_v3 &, size_t)
 allocator function pointer More...
 
atomic< size_typemy_first_block
 count of segments in the first block More...
 
atomic< size_typemy_early_size
 Requested size of vector. More...
 
atomic< segment_t * > my_segment
 Pointer to the segments table. More...
 
segment_t my_storage [pointers_per_short_table]
 embedded storage of segment pointers More...
 

Friends

class helper
 
template<typename Container , typename Value >
class vector_iterator
 
void enforce_segment_allocated (segment_value_t const &s, internal::exception_id exception=eid_bad_last_alloc)
 
void swap (segment_t &, segment_t &) __TBB_NOEXCEPT(true)
 

Detailed Description

Base class of concurrent vector implementation.

Definition at line 83 of file concurrent_vector.h.

Member Typedef Documentation

◆ internal_array_op1

typedef void(__TBB_EXPORTED_FUNC * tbb::internal::concurrent_vector_base_v3::internal_array_op1) (void *begin, size_type n)
protected

An operation on an n-element array starting at begin.

Definition at line 243 of file concurrent_vector.h.

◆ internal_array_op2

typedef void(__TBB_EXPORTED_FUNC * tbb::internal::concurrent_vector_base_v3::internal_array_op2) (void *dst, const void *src, size_type n)
protected

An operation on n-element destination array and n-element source array.

Definition at line 246 of file concurrent_vector.h.

◆ segment_index_t

Definition at line 87 of file concurrent_vector.h.

◆ size_type

Definition at line 88 of file concurrent_vector.h.

Member Enumeration Documentation

◆ anonymous enum

anonymous enum
protected
Enumerator
default_initial_segments 
pointers_per_short_table 

Number of slots for segment pointers inside the class.

pointers_per_long_table 

Definition at line 91 of file concurrent_vector.h.

91  {
92  // Size constants
93  default_initial_segments = 1, // 2 initial items
95  pointers_per_short_table = 3, // to fit into 8 words of entire structure
96  pointers_per_long_table = sizeof(segment_index_t) * 8 // one segment per bit
97  };
Number of slots for segment pointers inside the class.

Constructor & Destructor Documentation

◆ concurrent_vector_base_v3()

tbb::internal::concurrent_vector_base_v3::concurrent_vector_base_v3 ( )
inlineprotected

Definition at line 194 of file concurrent_vector.h.

194  {
195  //Here the semantic is intentionally relaxed.
196  //The reason this is next:
197  //Object that is in middle of construction (i.e. its constructor is not yet finished)
198  //cannot be used concurrently until the construction is finished.
199  //Thus to flag other threads that construction is finished, some synchronization with
200  //acquire-release semantic should be done by the (external) code that uses the vector.
201  //So, no need to do the synchronization inside the vector.
202 
203  my_early_size.store<relaxed>(0);
204  my_first_block.store<relaxed>(0); // here is not default_initial_segments
205  my_segment.store<relaxed>(my_storage);
206  }
atomic< size_type > my_early_size
Requested size of vector.
segment_t my_storage[pointers_per_short_table]
embedded storage of segment pointers
atomic< size_type > my_first_block
count of segments in the first block
No ordering.
Definition: atomic.h:61
atomic< segment_t * > my_segment
Pointer to the segments table.

References my_early_size, my_first_block, my_segment, my_storage, and tbb::relaxed.

◆ ~concurrent_vector_base_v3()

tbb::internal::concurrent_vector_base_v3::~concurrent_vector_base_v3 ( )
protected

Definition at line 313 of file concurrent_vector.cpp.

313  {
314  segment_t* s = my_segment;
315  if( s != my_storage ) {
316 #if TBB_USE_ASSERT
317  //to please assert in segment_t destructor
318  std::fill_n(my_storage,size_t(pointers_per_short_table),segment_t());
319 #endif /* TBB_USE_ASSERT */
320 #if TBB_USE_DEBUG
321  for( segment_index_t i = 0; i < pointers_per_long_table; i++)
322  __TBB_ASSERT( my_segment[i].load<relaxed>() != segment_allocated(), "Segment should have been freed. Please recompile with new TBB before using exceptions.");
323 #endif
325  NFS_Free( s );
326  }
327 }
segment_t my_storage[pointers_per_short_table]
embedded storage of segment pointers
void __TBB_EXPORTED_FUNC NFS_Free(void *)
Free memory allocated by NFS_Allocate.
void const char const char int ITT_FORMAT __itt_group_sync s
Number of slots for segment pointers inside the class.
#define __TBB_ASSERT(predicate, comment)
No-op version of __TBB_ASSERT.
Definition: tbb_stddef.h:165
atomic< segment_t * > my_segment
Pointer to the segments table.

References __TBB_ASSERT, my_segment, my_storage, tbb::internal::NFS_Free(), pointers_per_long_table, pointers_per_short_table, and s.

Here is the call graph for this function:

Member Function Documentation

◆ internal_assign()

void tbb::internal::concurrent_vector_base_v3::internal_assign ( const concurrent_vector_base_v3 src,
size_type  element_size,
internal_array_op1  destroy,
internal_array_op2  assign,
internal_array_op2  copy 
)
protected

Definition at line 376 of file concurrent_vector.cpp.

376  {
377  size_type n = src.my_early_size;
378  while( my_early_size>n ) { // TODO: improve
380  size_type b=segment_base(k);
381  size_type new_end = b>=n ? b : n;
382  __TBB_ASSERT( my_early_size>new_end, NULL );
383  enforce_segment_allocated(my_segment[k].load<relaxed>()); //if vector was broken before
384  // destructors are supposed to not throw any exceptions
385  destroy( my_segment[k].load<relaxed>().pointer<char>() + element_size*(new_end-b), my_early_size-new_end );
386  my_early_size = new_end;
387  }
388  size_type dst_initialized_size = my_early_size;
389  my_early_size = n;
391  size_type b;
392  for( segment_index_t k=0; (b=segment_base(k))<n; ++k ) {
393  if( (src.my_segment.load<acquire>() == src.my_storage && k >= pointers_per_short_table)
394  || src.my_segment[k].load<relaxed>() != segment_allocated() ) { // if source is damaged
395  my_early_size = b; break; // TODO: it may cause undestructed items
396  }
398  if( my_segment[k].load<relaxed>() == segment_not_used())
399  helper::enable_segment(*this, k, element_size);
400  else
401  enforce_segment_allocated(my_segment[k].load<relaxed>());
402  size_type m = k? segment_size(k) : 2;
403  if( m > n-b ) m = n-b;
404  size_type a = 0;
405  if( dst_initialized_size>b ) {
406  a = dst_initialized_size-b;
407  if( a>m ) a = m;
408  assign( my_segment[k].load<relaxed>().pointer<void>(), src.my_segment[k].load<relaxed>().pointer<void>(), a );
409  m -= a;
410  a *= element_size;
411  }
412  if( m>0 )
413  copy( my_segment[k].load<relaxed>().pointer<char>() + a, src.my_segment[k].load<relaxed>().pointer<char>() + a, m );
414  }
415  __TBB_ASSERT( src.my_early_size==n, "detected use of concurrent_vector::operator= with right side that was concurrently modified" );
416 }
static size_type enable_segment(concurrent_vector_base_v3 &v, size_type k, size_type element_size, bool mark_as_not_used_on_failure=false)
static size_type segment_size(segment_index_t k)
atomic< size_type > my_early_size
Requested size of vector.
friend void enforce_segment_allocated(segment_value_t const &s, internal::exception_id exception=eid_bad_last_alloc)
static segment_index_t segment_index_of(size_type index)
static segment_index_t segment_base(segment_index_t k)
static void extend_table_if_necessary(concurrent_vector_base_v3 &v, size_type k, size_type start)
static void assign_first_segment_if_necessary(concurrent_vector_base_v3 &v, segment_index_t k)
assign first segment size. k - is index of last segment to be allocated, not a count of segments
Number of slots for segment pointers inside the class.
No ordering.
Definition: atomic.h:61
#define __TBB_ASSERT(predicate, comment)
No-op version of __TBB_ASSERT.
Definition: tbb_stddef.h:165
Acquire.
Definition: atomic.h:57
atomic< segment_t * > my_segment
Pointer to the segments table.

References __TBB_ASSERT, tbb::acquire, tbb::internal::concurrent_vector_base_v3::helper::assign_first_segment_if_necessary(), tbb::internal::concurrent_vector_base_v3::helper::enable_segment(), enforce_segment_allocated, tbb::internal::concurrent_vector_base_v3::helper::extend_table_if_necessary(), my_early_size, my_segment, my_storage, pointers_per_short_table, tbb::relaxed, segment_base(), segment_index_of(), and segment_size().

Referenced by tbb::concurrent_vector< padded_element, padded_allocator_type >::operator=().

Here is the call graph for this function:
Here is the caller graph for this function:

◆ internal_capacity()

concurrent_vector_base_v3::size_type tbb::internal::concurrent_vector_base_v3::internal_capacity ( ) const
protected

Definition at line 329 of file concurrent_vector.cpp.

329  {
330  return segment_base( helper::find_segment_end(*this) );
331 }
static size_type find_segment_end(const concurrent_vector_base_v3 &v)
static segment_index_t segment_base(segment_index_t k)

References tbb::internal::concurrent_vector_base_v3::helper::find_segment_end(), and segment_base().

Referenced by tbb::concurrent_vector< padded_element, padded_allocator_type >::capacity(), internal_grow_to_at_least_with_result(), and tbb::concurrent_vector< padded_element, padded_allocator_type >::size().

Here is the call graph for this function:
Here is the caller graph for this function:

◆ internal_clear()

concurrent_vector_base_v3::segment_index_t tbb::internal::concurrent_vector_base_v3::internal_clear ( internal_array_op1  destroy)
protected

Definition at line 501 of file concurrent_vector.cpp.

501  {
502  __TBB_ASSERT( my_segment, NULL );
504  my_early_size = 0;
505  helper for_each(my_segment, my_first_block, 0, 0, 0, j); // element_size is safe to be zero if 'start' is zero
506  j = for_each.apply( helper::destroy_body(destroy) );
508  return j < i? i : j+1;
509 }
static size_type find_segment_end(const concurrent_vector_base_v3 &v)
atomic< size_type > my_early_size
Requested size of vector.
atomic< size_type > my_first_block
count of segments in the first block
#define __TBB_ASSERT(predicate, comment)
No-op version of __TBB_ASSERT.
Definition: tbb_stddef.h:165
atomic< segment_t * > my_segment
Pointer to the segments table.

References __TBB_ASSERT, tbb::internal::concurrent_vector_base_v3::helper::apply(), tbb::internal::concurrent_vector_base_v3::helper::find_segment_end(), my_early_size, my_first_block, and my_segment.

Referenced by tbb::concurrent_vector< padded_element, padded_allocator_type >::clear(), tbb::concurrent_vector< padded_element, padded_allocator_type >::concurrent_vector(), and tbb::concurrent_vector< padded_element, padded_allocator_type >::~concurrent_vector().

Here is the call graph for this function:
Here is the caller graph for this function:

◆ internal_compact()

void * tbb::internal::concurrent_vector_base_v3::internal_compact ( size_type  element_size,
void table,
internal_array_op1  destroy,
internal_array_op2  copy 
)
protected

Definition at line 511 of file concurrent_vector.cpp.

512 {
513  const size_type my_size = my_early_size;
514  const segment_index_t k_end = helper::find_segment_end(*this); // allocated segments
515  const segment_index_t k_stop = my_size? segment_index_of(my_size-1) + 1 : 0; // number of segments to store existing items: 0=>0; 1,2=>1; 3,4=>2; [5-8]=>3;..
516  const segment_index_t first_block = my_first_block; // number of merged segments, getting values from atomics
517 
518  segment_index_t k = first_block;
519  if(k_stop < first_block)
520  k = k_stop;
521  else
522  while (k < k_stop && helper::incompact_predicate(segment_size( k ) * element_size) ) k++;
523  if(k_stop == k_end && k == first_block)
524  return NULL;
525 
526  segment_t *const segment_table = my_segment;
527  internal_segments_table &old = *static_cast<internal_segments_table*>( table );
528  //this call is left here for sake of backward compatibility, and as a placeholder for table initialization
529  std::fill_n(old.table,sizeof(old.table)/sizeof(old.table[0]),segment_t());
530  old.first_block=0;
531 
532  if ( k != first_block && k ) // first segment optimization
533  {
534  // exception can occur here
535  void *seg = helper::allocate_segment(*this, segment_size(k));
536  old.table[0].store<relaxed>(seg);
537  old.first_block = k; // fill info for freeing new segment if exception occurs
538  // copy items to the new segment
539  size_type my_segment_size = segment_size( first_block );
540  for (segment_index_t i = 0, j = 0; i < k && j < my_size; j = my_segment_size) {
541  __TBB_ASSERT( segment_table[i].load<relaxed>() == segment_allocated(), NULL);
542  void *s = static_cast<void*>(
543  static_cast<char*>(seg) + segment_base(i)*element_size );
544  //TODO: refactor to use std::min
545  if(j + my_segment_size >= my_size) my_segment_size = my_size - j;
546  __TBB_TRY { // exception can occur here
547  copy( s, segment_table[i].load<relaxed>().pointer<void>(), my_segment_size );
548  } __TBB_CATCH(...) { // destroy all the already copied items
549  helper for_each(&old.table[0], old.first_block, element_size,
550  0, 0, segment_base(i)+ my_segment_size);
551  for_each.apply( helper::destroy_body(destroy) );
552  __TBB_RETHROW();
553  }
554  my_segment_size = i? segment_size( ++i ) : segment_size( i = first_block );
555  }
556  // commit the changes
557  std::copy(segment_table,segment_table + k,old.table);
558  for (segment_index_t i = 0; i < k; i++) {
559  segment_table[i].store<relaxed>(static_cast<void*>(
560  static_cast<char*>(seg) + segment_base(i)*element_size ));
561  }
562  old.first_block = first_block; my_first_block = k; // now, first_block != my_first_block
563  // destroy original copies
564  my_segment_size = segment_size( first_block ); // old.first_block actually
565  for (segment_index_t i = 0, j = 0; i < k && j < my_size; j = my_segment_size) {
566  if(j + my_segment_size >= my_size) my_segment_size = my_size - j;
567  // destructors are supposed to not throw any exceptions
568  destroy( old.table[i].load<relaxed>().pointer<void>(), my_segment_size );
569  my_segment_size = i? segment_size( ++i ) : segment_size( i = first_block );
570  }
571  }
572  // free unnecessary segments allocated by reserve() call
573  if ( k_stop < k_end ) {
574  old.first_block = first_block;
575  std::copy(segment_table+k_stop, segment_table+k_end, old.table+k_stop );
576  std::fill_n(segment_table+k_stop, (k_end-k_stop), segment_t());
577  if( !k ) my_first_block = 0;
578  }
579  return table;
580 }
static size_type find_segment_end(const concurrent_vector_base_v3 &v)
static size_type segment_size(segment_index_t k)
atomic< size_type > my_early_size
Requested size of vector.
static segment_index_t segment_index_of(size_type index)
static segment_index_t segment_base(segment_index_t k)
#define __TBB_RETHROW()
Definition: tbb_stddef.h:286
static void * allocate_segment(concurrent_vector_base_v3 &v, size_type n)
void const char const char int ITT_FORMAT __itt_group_sync s
#define __TBB_CATCH(e)
Definition: tbb_stddef.h:284
#define __TBB_TRY
Definition: tbb_stddef.h:283
atomic< size_type > my_first_block
count of segments in the first block
No ordering.
Definition: atomic.h:61
#define __TBB_ASSERT(predicate, comment)
No-op version of __TBB_ASSERT.
Definition: tbb_stddef.h:165
atomic< segment_t * > my_segment
Pointer to the segments table.

References __TBB_ASSERT, __TBB_CATCH, __TBB_RETHROW, __TBB_TRY, tbb::internal::concurrent_vector_base_v3::helper::allocate_segment(), tbb::internal::concurrent_vector_base_v3::helper::apply(), tbb::internal::concurrent_vector_base_v3::helper::find_segment_end(), tbb::internal::concurrent_vector_base_v3::internal_segments_table::first_block, tbb::internal::concurrent_vector_base_v3::helper::incompact_predicate(), tbb::internal::concurrent_vector_base_v3::segment_t::load(), my_early_size, my_first_block, my_segment, tbb::relaxed, s, segment_base(), segment_index_of(), segment_size(), tbb::internal::concurrent_vector_base_v3::segment_t::store(), and tbb::internal::concurrent_vector_base_v3::internal_segments_table::table.

Here is the call graph for this function:

◆ internal_copy()

void tbb::internal::concurrent_vector_base_v3::internal_copy ( const concurrent_vector_base_v3 src,
size_type  element_size,
internal_array_op2  copy 
)
protected

Definition at line 356 of file concurrent_vector.cpp.

356  {
357  size_type n = src.my_early_size;
359  if( n ) {
361  size_type b;
362  for( segment_index_t k=0; (b=segment_base(k))<n; ++k ) {
363  if( (src.my_segment.load<acquire>() == src.my_storage && k >= pointers_per_short_table)
364  || (src.my_segment[k].load<relaxed>() != segment_allocated())) {
365  my_early_size = b; break;
366  }
368  size_type m = helper::enable_segment(*this, k, element_size);
369  if( m > n-b ) m = n-b;
370  my_early_size = b+m;
371  copy( my_segment[k].load<relaxed>().pointer<void>(), src.my_segment[k].load<relaxed>().pointer<void>(), m );
372  }
373  }
374 }
static size_type enable_segment(concurrent_vector_base_v3 &v, size_type k, size_type element_size, bool mark_as_not_used_on_failure=false)
atomic< size_type > my_early_size
Requested size of vector.
static segment_index_t segment_index_of(size_type index)
static segment_index_t segment_base(segment_index_t k)
static void extend_table_if_necessary(concurrent_vector_base_v3 &v, size_type k, size_type start)
static void assign_first_segment_if_necessary(concurrent_vector_base_v3 &v, segment_index_t k)
assign first segment size. k - is index of last segment to be allocated, not a count of segments
segment_t my_storage[pointers_per_short_table]
embedded storage of segment pointers
Number of slots for segment pointers inside the class.
No ordering.
Definition: atomic.h:61
#define __TBB_ASSERT(predicate, comment)
No-op version of __TBB_ASSERT.
Definition: tbb_stddef.h:165
Acquire.
Definition: atomic.h:57
atomic< segment_t * > my_segment
Pointer to the segments table.

References __TBB_ASSERT, tbb::acquire, tbb::internal::concurrent_vector_base_v3::helper::assign_first_segment_if_necessary(), tbb::internal::concurrent_vector_base_v3::helper::enable_segment(), tbb::internal::concurrent_vector_base_v3::helper::extend_table_if_necessary(), my_early_size, my_segment, my_storage, pointers_per_short_table, tbb::relaxed, segment_base(), and segment_index_of().

Referenced by tbb::concurrent_vector< padded_element, padded_allocator_type >::concurrent_vector().

Here is the call graph for this function:
Here is the caller graph for this function:

◆ internal_grow()

void tbb::internal::concurrent_vector_base_v3::internal_grow ( size_type  start,
size_type  finish,
size_type  element_size,
internal_array_op2  init,
const void src 
)
protected

Definition at line 473 of file concurrent_vector.cpp.

473  {
474  __TBB_ASSERT( start<finish, "start must be less than finish" );
475  segment_index_t k_start = segment_index_of(start), k_end = segment_index_of(finish-1);
477  helper::extend_table_if_necessary(*this, k_end, start);
478  helper range(my_segment, my_first_block, element_size, k_start, start, finish);
479  for(; k_end > k_start && k_end >= range.first_block; --k_end ) // allocate segments in reverse order
480  helper::acquire_segment(*this, k_end, element_size, true/*for k_end>k_start*/);
481  for(; k_start <= k_end; ++k_start ) // but allocate first block in straight order
482  helper::acquire_segment(*this, k_start, element_size, segment_base( k_start ) >= start );
483  range.apply( helper::init_body(init, src) );
484 }
static segment_index_t segment_index_of(size_type index)
static segment_index_t segment_base(segment_index_t k)
static void extend_table_if_necessary(concurrent_vector_base_v3 &v, size_type k, size_type start)
static segment_t & acquire_segment(concurrent_vector_base_v3 &v, size_type index, size_type element_size, bool owner)
static void assign_first_segment_if_necessary(concurrent_vector_base_v3 &v, segment_index_t k)
assign first segment size. k - is index of last segment to be allocated, not a count of segments
atomic< size_type > my_first_block
count of segments in the first block
#define __TBB_ASSERT(predicate, comment)
No-op version of __TBB_ASSERT.
Definition: tbb_stddef.h:165
atomic< segment_t * > my_segment
Pointer to the segments table.

References __TBB_ASSERT, tbb::internal::concurrent_vector_base_v3::helper::acquire_segment(), tbb::internal::concurrent_vector_base_v3::helper::apply(), tbb::internal::concurrent_vector_base_v3::helper::assign_first_segment_if_necessary(), tbb::internal::concurrent_vector_base_v3::helper::extend_table_if_necessary(), tbb::internal::concurrent_vector_base_v3::helper::first_block, my_first_block, my_segment, segment_base(), and segment_index_of().

Referenced by internal_grow_by(), and internal_grow_to_at_least_with_result().

Here is the call graph for this function:
Here is the caller graph for this function:

◆ internal_grow_by()

concurrent_vector_base_v3::size_type tbb::internal::concurrent_vector_base_v3::internal_grow_by ( size_type  delta,
size_type  element_size,
internal_array_op2  init,
const void src 
)
protected

Definition at line 467 of file concurrent_vector.cpp.

467  {
468  size_type result = my_early_size.fetch_and_add(delta);
469  internal_grow( result, result+delta, element_size, init, src );
470  return result;
471 }
atomic< size_type > my_early_size
Requested size of vector.
void internal_grow(size_type start, size_type finish, size_type element_size, internal_array_op2 init, const void *src)

References internal_grow(), and my_early_size.

Referenced by tbb::concurrent_vector< padded_element, padded_allocator_type >::grow_by().

Here is the call graph for this function:
Here is the caller graph for this function:

◆ internal_grow_to_at_least()

void tbb::internal::concurrent_vector_base_v3::internal_grow_to_at_least ( size_type  new_size,
size_type  element_size,
internal_array_op2  init,
const void src 
)
protected

Deprecated entry point for backwards compatibility to TBB 2.1.

Definition at line 430 of file concurrent_vector.cpp.

430  {
431  internal_grow_to_at_least_with_result( new_size, element_size, init, src );
432 }
size_type __TBB_EXPORTED_METHOD internal_grow_to_at_least_with_result(size_type new_size, size_type element_size, internal_array_op2 init, const void *src)
void const char const char int ITT_FORMAT __itt_group_sync x void const char ITT_FORMAT __itt_group_sync s void ITT_FORMAT __itt_group_sync p void ITT_FORMAT p void ITT_FORMAT p no args __itt_suppress_mode_t unsigned int void size_t ITT_FORMAT d void ITT_FORMAT p void ITT_FORMAT p __itt_model_site __itt_model_site_instance ITT_FORMAT p __itt_model_task __itt_model_task_instance ITT_FORMAT p void ITT_FORMAT p void ITT_FORMAT p void size_t ITT_FORMAT d void ITT_FORMAT p const wchar_t ITT_FORMAT s const char ITT_FORMAT s const char ITT_FORMAT s const char ITT_FORMAT s no args void ITT_FORMAT p size_t ITT_FORMAT d no args const wchar_t const wchar_t ITT_FORMAT s __itt_heap_function void size_t int ITT_FORMAT d __itt_heap_function void ITT_FORMAT p __itt_heap_function void void size_t new_size

References internal_grow_to_at_least_with_result(), and new_size.

Here is the call graph for this function:

◆ internal_grow_to_at_least_with_result()

concurrent_vector_base_v3::size_type tbb::internal::concurrent_vector_base_v3::internal_grow_to_at_least_with_result ( size_type  new_size,
size_type  element_size,
internal_array_op2  init,
const void src 
)
protected

Definition at line 434 of file concurrent_vector.cpp.

434  {
436  while( e<new_size ) {
437  size_type f = my_early_size.compare_and_swap(new_size,e);
438  if( f==e ) {
439  internal_grow( e, new_size, element_size, init, src );
440  break;
441  }
442  e = f;
443  }
444  // Check/wait for segments allocation completes
445  segment_index_t i, k_old = segment_index_of( new_size-1 );
446  if( k_old >= pointers_per_short_table && my_segment == my_storage ) {
448  }
449  for( i = 0; i <= k_old; ++i ) {
450  segment_t &s = my_segment[i];
451  if(s.load<relaxed>() == segment_not_used()) {
452  ITT_NOTIFY(sync_prepare, &s);
453  atomic_backoff backoff(true);
454  while( my_segment[i].load<acquire>() == segment_not_used() ) // my_segment may change concurrently
455  backoff.pause();
456  ITT_NOTIFY(sync_acquired, &s);
457  }
458  enforce_segment_allocated(my_segment[i].load<relaxed>());
459  }
460 #if TBB_USE_DEBUG
461  size_type capacity = internal_capacity();
462  __TBB_ASSERT( capacity >= new_size, NULL);
463 #endif
464  return e;
465 }
atomic< size_type > my_early_size
Requested size of vector.
void const char const char int ITT_FORMAT __itt_group_sync x void const char ITT_FORMAT __itt_group_sync s void ITT_FORMAT __itt_group_sync p void ITT_FORMAT p void ITT_FORMAT p no args __itt_suppress_mode_t unsigned int void size_t ITT_FORMAT d void ITT_FORMAT p void ITT_FORMAT p __itt_model_site __itt_model_site_instance ITT_FORMAT p __itt_model_task __itt_model_task_instance ITT_FORMAT p void ITT_FORMAT p void ITT_FORMAT p void size_t ITT_FORMAT d void ITT_FORMAT p const wchar_t ITT_FORMAT s const char ITT_FORMAT s const char ITT_FORMAT s const char ITT_FORMAT s no args void ITT_FORMAT p size_t ITT_FORMAT d no args const wchar_t const wchar_t ITT_FORMAT s __itt_heap_function void size_t int ITT_FORMAT d __itt_heap_function void ITT_FORMAT p __itt_heap_function void void size_t new_size
friend void enforce_segment_allocated(segment_value_t const &s, internal::exception_id exception=eid_bad_last_alloc)
static segment_index_t segment_index_of(size_type index)
void spin_wait_while_eq(const volatile T &location, U value)
Spin WHILE the value of the variable is equal to a given value.
Definition: tbb_machine.h:391
void internal_grow(size_type start, size_type finish, size_type element_size, internal_array_op2 init, const void *src)
size_type __TBB_EXPORTED_METHOD internal_capacity() const
segment_t my_storage[pointers_per_short_table]
embedded storage of segment pointers
void const char const char int ITT_FORMAT __itt_group_sync s
Number of slots for segment pointers inside the class.
No ordering.
Definition: atomic.h:61
#define __TBB_ASSERT(predicate, comment)
No-op version of __TBB_ASSERT.
Definition: tbb_stddef.h:165
#define ITT_NOTIFY(name, obj)
Definition: itt_notify.h:112
atomic< segment_t * > my_segment
Pointer to the segments table.

References __TBB_ASSERT, enforce_segment_allocated, internal_capacity(), internal_grow(), ITT_NOTIFY, my_early_size, my_segment, my_storage, new_size, tbb::internal::atomic_backoff::pause(), pointers_per_short_table, tbb::relaxed, s, segment_index_of(), and tbb::internal::spin_wait_while_eq().

Referenced by tbb::concurrent_vector< padded_element, padded_allocator_type >::grow_to_at_least(), and internal_grow_to_at_least().

Here is the call graph for this function:
Here is the caller graph for this function:

◆ internal_push_back()

void * tbb::internal::concurrent_vector_base_v3::internal_push_back ( size_type  element_size,
size_type index 
)
protected

Definition at line 418 of file concurrent_vector.cpp.

418  {
419  __TBB_ASSERT( sizeof(my_early_size)==sizeof(uintptr_t), NULL );
420  size_type tmp = my_early_size.fetch_and_increment<acquire>();
421  index = tmp;
422  segment_index_t k_old = segment_index_of( tmp );
423  size_type base = segment_base(k_old);
424  helper::extend_table_if_necessary(*this, k_old, tmp);
425  segment_t& s = helper::acquire_segment(*this, k_old, element_size, base==tmp);
426  size_type j_begin = tmp-base;
427  return (void*)(s.load<relaxed>().pointer<char>() + element_size*j_begin);
428 }
atomic< size_type > my_early_size
Requested size of vector.
static segment_index_t segment_index_of(size_type index)
static segment_index_t segment_base(segment_index_t k)
static void extend_table_if_necessary(concurrent_vector_base_v3 &v, size_type k, size_type start)
static segment_t & acquire_segment(concurrent_vector_base_v3 &v, size_type index, size_type element_size, bool owner)
void const char const char int ITT_FORMAT __itt_group_sync s
No ordering.
Definition: atomic.h:61
#define __TBB_ASSERT(predicate, comment)
No-op version of __TBB_ASSERT.
Definition: tbb_stddef.h:165
Acquire.
Definition: atomic.h:57

References __TBB_ASSERT, tbb::acquire, tbb::internal::concurrent_vector_base_v3::helper::acquire_segment(), tbb::internal::concurrent_vector_base_v3::helper::extend_table_if_necessary(), my_early_size, tbb::relaxed, s, segment_base(), and segment_index_of().

Here is the call graph for this function:

◆ internal_reserve()

void tbb::internal::concurrent_vector_base_v3::internal_reserve ( size_type  n,
size_type  element_size,
size_type  max_size 
)
protected

Definition at line 339 of file concurrent_vector.cpp.

339  {
340  if( n>max_size )
342  __TBB_ASSERT( n, NULL );
345 
346  for( ; segment_base(k)<n; ++k ) {
348  if(my_segment[k].load<relaxed>() != segment_allocated())
349  helper::enable_segment(*this, k, element_size, true ); //in case of failure mark segments as not used
350  }
351 }
static size_type find_segment_end(const concurrent_vector_base_v3 &v)
static size_type enable_segment(concurrent_vector_base_v3 &v, size_type k, size_type element_size, bool mark_as_not_used_on_failure=false)
static segment_index_t segment_index_of(size_type index)
static segment_index_t segment_base(segment_index_t k)
static void extend_table_if_necessary(concurrent_vector_base_v3 &v, size_type k, size_type start)
static void assign_first_segment_if_necessary(concurrent_vector_base_v3 &v, segment_index_t k)
assign first segment size. k - is index of last segment to be allocated, not a count of segments
#define __TBB_ASSERT(predicate, comment)
No-op version of __TBB_ASSERT.
Definition: tbb_stddef.h:165
void throw_exception(exception_id eid)
Versionless convenience wrapper for throw_exception_v4()
atomic< segment_t * > my_segment
Pointer to the segments table.

References __TBB_ASSERT, tbb::internal::concurrent_vector_base_v3::helper::assign_first_segment_if_necessary(), tbb::internal::eid_reservation_length_error, tbb::internal::concurrent_vector_base_v3::helper::enable_segment(), tbb::internal::concurrent_vector_base_v3::helper::extend_table_if_necessary(), tbb::internal::concurrent_vector_base_v3::helper::find_segment_end(), my_segment, segment_base(), segment_index_of(), and tbb::internal::throw_exception().

Referenced by internal_resize(), and tbb::concurrent_vector< padded_element, padded_allocator_type >::reserve().

Here is the call graph for this function:
Here is the caller graph for this function:

◆ internal_resize()

void tbb::internal::concurrent_vector_base_v3::internal_resize ( size_type  n,
size_type  element_size,
size_type  max_size,
const void src,
internal_array_op1  destroy,
internal_array_op2  init 
)
protected

Definition at line 486 of file concurrent_vector.cpp.

487  {
489  if( n > j ) { // construct items
490  internal_reserve(n, element_size, max_size);
491  my_early_size = n;
492  helper for_each(my_segment, my_first_block, element_size, segment_index_of(j), j, n);
493  for_each.apply( helper::safe_init_body(init, src) );
494  } else {
495  my_early_size = n;
496  helper for_each(my_segment, my_first_block, element_size, segment_index_of(n), n, j);
497  for_each.apply( helper::destroy_body(destroy) );
498  }
499 }
void __TBB_EXPORTED_METHOD internal_reserve(size_type n, size_type element_size, size_type max_size)
atomic< size_type > my_early_size
Requested size of vector.
static segment_index_t segment_index_of(size_type index)
atomic< size_type > my_first_block
count of segments in the first block
atomic< segment_t * > my_segment
Pointer to the segments table.

References tbb::internal::concurrent_vector_base_v3::helper::apply(), internal_reserve(), my_early_size, my_first_block, my_segment, and segment_index_of().

Referenced by tbb::concurrent_vector< padded_element, padded_allocator_type >::assign(), tbb::concurrent_vector< padded_element, padded_allocator_type >::concurrent_vector(), tbb::concurrent_vector< padded_element, padded_allocator_type >::internal_assign_n(), and tbb::concurrent_vector< padded_element, padded_allocator_type >::resize().

Here is the call graph for this function:
Here is the caller graph for this function:

◆ internal_swap()

void tbb::internal::concurrent_vector_base_v3::internal_swap ( concurrent_vector_base_v3 v)
protected

Definition at line 582 of file concurrent_vector.cpp.

583 {
584  size_type my_sz = my_early_size.load<acquire>();
585  size_type v_sz = v.my_early_size.load<relaxed>();
586  if(!my_sz && !v_sz) return;
587 
588  bool my_was_short = (my_segment.load<relaxed>() == my_storage);
589  bool v_was_short = (v.my_segment.load<relaxed>() == v.my_storage);
590 
591  //In C++11, this would be: swap(my_storage, v.my_storage);
592  for (int i=0; i < pointers_per_short_table; ++i){
593  swap(my_storage[i], v.my_storage[i]);
594  }
595  tbb::internal::swap<relaxed>(my_first_block, v.my_first_block);
596  tbb::internal::swap<relaxed>(my_segment, v.my_segment);
597  if (my_was_short){
598  v.my_segment.store<relaxed>(v.my_storage);
599  }
600  if(v_was_short){
601  my_segment.store<relaxed>(my_storage);
602  }
603 
604  my_early_size.store<relaxed>(v_sz);
605  v.my_early_size.store<release>(my_sz);
606 }
atomic< size_type > my_early_size
Requested size of vector.
Release.
Definition: atomic.h:59
segment_t my_storage[pointers_per_short_table]
embedded storage of segment pointers
Number of slots for segment pointers inside the class.
atomic< size_type > my_first_block
count of segments in the first block
No ordering.
Definition: atomic.h:61
Acquire.
Definition: atomic.h:57
friend void swap(segment_t &, segment_t &) __TBB_NOEXCEPT(true)
atomic< segment_t * > my_segment
Pointer to the segments table.

References tbb::acquire, my_early_size, my_first_block, my_segment, my_storage, pointers_per_short_table, tbb::relaxed, tbb::release, and swap.

Referenced by tbb::concurrent_vector< padded_element, padded_allocator_type >::concurrent_vector(), tbb::concurrent_vector< padded_element, padded_allocator_type >::operator=(), and tbb::concurrent_vector< padded_element, padded_allocator_type >::swap().

Here is the caller graph for this function:

◆ internal_throw_exception()

void tbb::internal::concurrent_vector_base_v3::internal_throw_exception ( size_type  t) const
protected

Obsolete.

Definition at line 333 of file concurrent_vector.cpp.

333  {
335  __TBB_ASSERT(t < sizeof(ids) / sizeof(exception_id), NULL);
336  throw_exception(ids[t]);
337 }
#define __TBB_ASSERT(predicate, comment)
No-op version of __TBB_ASSERT.
Definition: tbb_stddef.h:165
void throw_exception(exception_id eid)
Versionless convenience wrapper for throw_exception_v4()

References __TBB_ASSERT, tbb::internal::eid_index_range_error, tbb::internal::eid_out_of_range, tbb::internal::eid_segment_range_error, and tbb::internal::throw_exception().

Here is the call graph for this function:

◆ is_first_element_in_segment()

static bool tbb::internal::concurrent_vector_base_v3::is_first_element_in_segment ( size_type  element_index)
inlinestaticprotected

Definition at line 233 of file concurrent_vector.h.

233  {
234  //check if element_index is a power of 2 that is at least 2.
235  //The idea is to detect if the iterator crosses a segment boundary,
236  //and 2 is the minimal index for which it's true
237  __TBB_ASSERT(element_index, "there should be no need to call "
238  "is_first_element_in_segment for 0th element" );
239  return is_power_of_two_at_least( element_index, 2 );
240  }
bool is_power_of_two_at_least(argument_integer_type arg, power2_integer_type power2)
A function to determine if arg is a power of 2 at least as big as another power of 2.
Definition: tbb_stddef.h:392
#define __TBB_ASSERT(predicate, comment)
No-op version of __TBB_ASSERT.
Definition: tbb_stddef.h:165

References __TBB_ASSERT, and tbb::internal::is_power_of_two_at_least().

Referenced by tbb::internal::vector_iterator< Container, Value >::operator++(), and tbb::internal::vector_iterator< Container, Value >::operator--().

Here is the call graph for this function:
Here is the caller graph for this function:

◆ segment_base()

static segment_index_t tbb::internal::concurrent_vector_base_v3::segment_base ( segment_index_t  k)
inlinestaticprotected

◆ segment_base_index_of()

static segment_index_t tbb::internal::concurrent_vector_base_v3::segment_base_index_of ( segment_index_t index)
inlinestaticprotected

Definition at line 222 of file concurrent_vector.h.

222  {
223  segment_index_t k = segment_index_of( index );
224  index -= segment_base(k);
225  return k;
226  }
static segment_index_t segment_index_of(size_type index)
static segment_index_t segment_base(segment_index_t k)

References segment_base(), and segment_index_of().

Here is the call graph for this function:

◆ segment_index_of()

static segment_index_t tbb::internal::concurrent_vector_base_v3::segment_index_of ( size_type  index)
inlinestaticprotected

Definition at line 214 of file concurrent_vector.h.

214  {
215  return segment_index_t( __TBB_Log2( index|1 ) );
216  }
intptr_t __TBB_Log2(uintptr_t x)
Definition: tbb_machine.h:860

References __TBB_Log2().

Referenced by tbb::internal::concurrent_vector_base_v3::helper::cleanup(), internal_assign(), internal_compact(), internal_copy(), internal_grow(), internal_grow_to_at_least_with_result(), internal_push_back(), internal_reserve(), internal_resize(), and segment_base_index_of().

Here is the call graph for this function:
Here is the caller graph for this function:

◆ segment_size()

static size_type tbb::internal::concurrent_vector_base_v3::segment_size ( segment_index_t  k)
inlinestaticprotected

Definition at line 228 of file concurrent_vector.h.

228  {
229  return segment_index_t(1)<<k; // fake value for k==0
230  }

Referenced by tbb::internal::concurrent_vector_base_v3::helper::enable_segment(), tbb::internal::concurrent_vector_base_v3::helper::extend_segment_table(), tbb::internal::concurrent_vector_base_v3::helper::first_segment(), internal_assign(), internal_compact(), and tbb::internal::concurrent_vector_base_v3::helper::next_segment().

Here is the caller graph for this function:

Friends And Related Function Documentation

◆ enforce_segment_allocated

void enforce_segment_allocated ( segment_value_t const &  s,
internal::exception_id  exception = eid_bad_last_alloc 
)
friend

Definition at line 121 of file concurrent_vector.h.

121  {
122  if(s != segment_allocated()){
123  internal::throw_exception(exception);
124  }
125  }
void const char const char int ITT_FORMAT __itt_group_sync s
void throw_exception(exception_id eid)
Versionless convenience wrapper for throw_exception_v4()

Referenced by tbb::internal::concurrent_vector_base_v3::helper::acquire_segment(), tbb::internal::concurrent_vector_base_v3::helper::enable_segment(), internal_assign(), internal_grow_to_at_least_with_result(), and tbb::internal::concurrent_vector_base_v3::helper::safe_init_body::operator()().

◆ helper

friend class helper
friend

Definition at line 276 of file concurrent_vector.h.

◆ swap

Definition at line 284 of file concurrent_vector.h.

284  {
285  lhs.swap(rhs);
286  }

Referenced by internal_swap().

◆ vector_iterator

template<typename Container , typename Value >
friend class vector_iterator
friend

Definition at line 280 of file concurrent_vector.h.

Member Data Documentation

◆ my_early_size

◆ my_first_block

◆ my_segment

◆ my_storage

◆ vector_allocator_ptr

void*(* tbb::internal::concurrent_vector_base_v3::vector_allocator_ptr) (concurrent_vector_base_v3 &, size_t)
protected

The documentation for this class was generated from the following files:

Copyright © 2005-2020 Intel Corporation. All Rights Reserved.

Intel, Pentium, Intel Xeon, Itanium, Intel XScale and VTune are registered trademarks or trademarks of Intel Corporation or its subsidiaries in the United States and other countries.

* Other names and brands may be claimed as the property of others.