Home ⌂Doc Index ◂Up ▴
Intel(R) Threading Building Blocks Doxygen Documentation  version 4.2.3
tbb::interface5::internal::split_ordered_list< T, Allocator > Class Template Reference

#include <_concurrent_unordered_impl.h>

Inheritance diagram for tbb::interface5::internal::split_ordered_list< T, Allocator >:
Collaboration diagram for tbb::interface5::internal::split_ordered_list< T, Allocator >:

Classes

struct  node
 

Public Types

typedef split_ordered_list< T, Allocator > self_type
 
typedef tbb::internal::allocator_rebind< Allocator, T >::type allocator_type
 
typedef nodenodeptr_t
 
typedef tbb::internal::allocator_traits< allocator_type >::value_type value_type
 
typedef tbb::internal::allocator_traits< allocator_type >::size_type size_type
 
typedef tbb::internal::allocator_traits< allocator_type >::difference_type difference_type
 
typedef tbb::internal::allocator_traits< allocator_type >::pointer pointer
 
typedef tbb::internal::allocator_traits< allocator_type >::const_pointer const_pointer
 
typedef value_typereference
 
typedef const value_typeconst_reference
 
typedef solist_iterator< self_type, const value_typeconst_iterator
 
typedef solist_iterator< self_type, value_typeiterator
 
typedef flist_iterator< self_type, const value_typeraw_const_iterator
 
typedef flist_iterator< self_type, value_typeraw_iterator
 

Public Member Functions

nodeptr_t create_node (sokey_t order_key)
 
template<typename Arg >
nodeptr_t create_node (sokey_t order_key, __TBB_FORWARDING_REF(Arg) t, tbb::internal::true_type=tbb::internal::true_type())
 
template<typename Arg >
nodeptr_t create_node (sokey_t, __TBB_FORWARDING_REF(Arg), tbb::internal::false_type)
 
template<typename __TBB_PARAMETER_PACK Args>
nodeptr_t create_node_v (__TBB_FORWARDING_REF(Args) __TBB_PARAMETER_PACK args)
 
 split_ordered_list (allocator_type a=allocator_type())
 
 ~split_ordered_list ()
 
allocator_type get_allocator () const
 
void clear ()
 
iterator begin ()
 
const_iterator begin () const
 
iterator end ()
 
const_iterator end () const
 
const_iterator cbegin () const
 
const_iterator cend () const
 
bool empty () const
 
size_type size () const
 
size_type max_size () const
 
void swap (self_type &other)
 
raw_iterator raw_begin ()
 
raw_const_iterator raw_begin () const
 
raw_iterator raw_end ()
 
raw_const_iterator raw_end () const
 
iterator get_iterator (raw_iterator it)
 
const_iterator get_iterator (raw_const_iterator it) const
 
raw_iterator get_iterator (raw_const_iterator it)
 
iterator first_real_iterator (raw_iterator it)
 
const_iterator first_real_iterator (raw_const_iterator it) const
 
void destroy_node (nodeptr_t pnode)
 
std::pair< iterator, bool > try_insert (raw_iterator it, raw_iterator next, nodeptr_t pnode, size_type *new_count)
 
raw_iterator insert_dummy (raw_iterator it, sokey_t order_key)
 
nodeptr_t erase_node_impl (raw_iterator previous, raw_const_iterator &where)
 
void erase_node (raw_iterator previous, raw_const_iterator &where, tbb::internal::true_type)
 
void erase_node (raw_iterator previous, raw_const_iterator &where, tbb::internal::false_type)
 
void erase_node (raw_iterator previous, raw_const_iterator &where)
 
template<typename AllowDestroy >
iterator erase_node (raw_iterator previous, const_iterator where, AllowDestroy)
 
iterator erase_node (raw_iterator previous, const_iterator &where)
 
void move_all (self_type &source)
 

Static Public Member Functions

static sokey_t get_order_key (const raw_const_iterator &it)
 
static sokey_t get_safe_order_key (const raw_const_iterator &it)
 
static iterator get_iterator (const_iterator it)
 
static nodeptr_t try_insert_atomic (nodeptr_t previous, nodeptr_t new_node, nodeptr_t current_node)
 

Private Member Functions

void check_range (raw_iterator first, raw_iterator last)
 
void check_range ()
 

Private Attributes

tbb::internal::allocator_rebind< allocator_type, node >::type my_node_allocator
 
size_type my_element_count
 
nodeptr_t my_head
 

Friends

template<typename Traits >
class concurrent_unordered_base
 

Detailed Description

template<typename T, typename Allocator>
class tbb::interface5::internal::split_ordered_list< T, Allocator >

Definition at line 61 of file _concurrent_unordered_impl.h.

Member Typedef Documentation

◆ allocator_type

template<typename T, typename Allocator>
typedef tbb::internal::allocator_rebind<Allocator, T>::type tbb::interface5::internal::split_ordered_list< T, Allocator >::allocator_type

Definition at line 208 of file _concurrent_unordered_impl.h.

◆ const_iterator

template<typename T, typename Allocator>
typedef solist_iterator<self_type, const value_type> tbb::interface5::internal::split_ordered_list< T, Allocator >::const_iterator

Definition at line 222 of file _concurrent_unordered_impl.h.

◆ const_pointer

Definition at line 217 of file _concurrent_unordered_impl.h.

◆ const_reference

template<typename T, typename Allocator>
typedef const value_type& tbb::interface5::internal::split_ordered_list< T, Allocator >::const_reference

Definition at line 220 of file _concurrent_unordered_impl.h.

◆ difference_type

Definition at line 215 of file _concurrent_unordered_impl.h.

◆ iterator

template<typename T, typename Allocator>
typedef solist_iterator<self_type, value_type> tbb::interface5::internal::split_ordered_list< T, Allocator >::iterator

Definition at line 223 of file _concurrent_unordered_impl.h.

◆ nodeptr_t

template<typename T, typename Allocator>
typedef node* tbb::interface5::internal::split_ordered_list< T, Allocator >::nodeptr_t

Definition at line 210 of file _concurrent_unordered_impl.h.

◆ pointer

template<typename T, typename Allocator>
typedef tbb::internal::allocator_traits<allocator_type>::pointer tbb::interface5::internal::split_ordered_list< T, Allocator >::pointer

Definition at line 216 of file _concurrent_unordered_impl.h.

◆ raw_const_iterator

template<typename T, typename Allocator>
typedef flist_iterator<self_type, const value_type> tbb::interface5::internal::split_ordered_list< T, Allocator >::raw_const_iterator

Definition at line 224 of file _concurrent_unordered_impl.h.

◆ raw_iterator

template<typename T, typename Allocator>
typedef flist_iterator<self_type, value_type> tbb::interface5::internal::split_ordered_list< T, Allocator >::raw_iterator

Definition at line 225 of file _concurrent_unordered_impl.h.

◆ reference

template<typename T, typename Allocator>
typedef value_type& tbb::interface5::internal::split_ordered_list< T, Allocator >::reference

Definition at line 219 of file _concurrent_unordered_impl.h.

◆ self_type

template<typename T, typename Allocator>
typedef split_ordered_list<T, Allocator> tbb::interface5::internal::split_ordered_list< T, Allocator >::self_type

Definition at line 206 of file _concurrent_unordered_impl.h.

◆ size_type

template<typename T, typename Allocator>
typedef tbb::internal::allocator_traits<allocator_type>::size_type tbb::interface5::internal::split_ordered_list< T, Allocator >::size_type

Definition at line 214 of file _concurrent_unordered_impl.h.

◆ value_type

template<typename T, typename Allocator>
typedef tbb::internal::allocator_traits<allocator_type>::value_type tbb::interface5::internal::split_ordered_list< T, Allocator >::value_type

Definition at line 213 of file _concurrent_unordered_impl.h.

Constructor & Destructor Documentation

◆ split_ordered_list()

template<typename T, typename Allocator>
tbb::interface5::internal::split_ordered_list< T, Allocator >::split_ordered_list ( allocator_type  a = allocator_type())
inline

Definition at line 333 of file _concurrent_unordered_impl.h.

335  {
336  // Immediately allocate a dummy node with order key of 0. This node
337  // will always be the head of the list.
339  }
tbb::internal::allocator_rebind< allocator_type, node >::type my_node_allocator

◆ ~split_ordered_list()

template<typename T, typename Allocator>
tbb::interface5::internal::split_ordered_list< T, Allocator >::~split_ordered_list ( )
inline

Definition at line 341 of file _concurrent_unordered_impl.h.

342  {
343  // Clear the list
344  clear();
345 
346  // Remove the head element which is not cleared by clear()
347  nodeptr_t pnode = my_head;
348  my_head = NULL;
349 
350  __TBB_ASSERT(pnode != NULL && pnode->my_next == NULL, "Invalid head list node");
351 
352  destroy_node(pnode);
353  }
#define __TBB_ASSERT(predicate, comment)
No-op version of __TBB_ASSERT.
Definition: tbb_stddef.h:165

Member Function Documentation

◆ begin() [1/2]

template<typename T, typename Allocator>
iterator tbb::interface5::internal::split_ordered_list< T, Allocator >::begin ( )
inline

◆ begin() [2/2]

template<typename T, typename Allocator>
const_iterator tbb::interface5::internal::split_ordered_list< T, Allocator >::begin ( ) const
inline

◆ cbegin()

template<typename T, typename Allocator>
const_iterator tbb::interface5::internal::split_ordered_list< T, Allocator >::cbegin ( ) const
inline

Definition at line 398 of file _concurrent_unordered_impl.h.

398  {
399  return (((const self_type *)this)->begin());
400  }

Referenced by tbb::interface5::internal::concurrent_unordered_base< concurrent_unordered_map_traits< Key, T, internal::hash_compare< Key, Hasher, Key_equality >, Allocator, false > >::cbegin().

Here is the caller graph for this function:

◆ cend()

template<typename T, typename Allocator>
const_iterator tbb::interface5::internal::split_ordered_list< T, Allocator >::cend ( ) const
inline

Definition at line 402 of file _concurrent_unordered_impl.h.

402  {
403  return (((const self_type *)this)->end());
404  }

Referenced by tbb::interface5::internal::concurrent_unordered_base< concurrent_unordered_map_traits< Key, T, internal::hash_compare< Key, Hasher, Key_equality >, Allocator, false > >::cend().

Here is the caller graph for this function:

◆ check_range() [1/2]

template<typename T, typename Allocator>
void tbb::interface5::internal::split_ordered_list< T, Allocator >::check_range ( raw_iterator  first,
raw_iterator  last 
)
inlineprivate

Definition at line 674 of file _concurrent_unordered_impl.h.

675  {
676 #if TBB_USE_ASSERT
677  for (raw_iterator it = first; it != last; ++it)
678  {
679  raw_iterator next = it;
680  ++next;
681 
682  __TBB_ASSERT(next == raw_end() || get_order_key(next) >= get_order_key(it), "!!! List order inconsistency !!!");
683  }
684 #else
686 #endif
687  }
auto last(Container &c) -> decltype(begin(c))
flist_iterator< self_type, value_type > raw_iterator
void suppress_unused_warning(const T1 &)
Utility template function to prevent "unused" warnings by various compilers.
Definition: tbb_stddef.h:398
auto first(Container &c) -> decltype(begin(c))
#define __TBB_ASSERT(predicate, comment)
No-op version of __TBB_ASSERT.
Definition: tbb_stddef.h:165
static sokey_t get_order_key(const raw_const_iterator &it)

Referenced by tbb::interface5::internal::concurrent_unordered_base< concurrent_unordered_map_traits< Key, T, internal::hash_compare< Key, Hasher, Key_equality >, Allocator, false > >::concurrent_unordered_base().

Here is the caller graph for this function:

◆ check_range() [2/2]

◆ clear()

template<typename T, typename Allocator>
void tbb::interface5::internal::split_ordered_list< T, Allocator >::clear ( )
inline

Definition at line 361 of file _concurrent_unordered_impl.h.

361  {
362  nodeptr_t pnext;
363  nodeptr_t pnode = my_head;
364 
365  __TBB_ASSERT(my_head != NULL, "Invalid head list node");
366  pnext = pnode->my_next;
367  pnode->my_next = NULL;
368  pnode = pnext;
369 
370  while (pnode != NULL)
371  {
372  pnext = pnode->my_next;
373  destroy_node(pnode);
374  pnode = pnext;
375  }
376 
377  my_element_count = 0;
378  }
#define __TBB_ASSERT(predicate, comment)
No-op version of __TBB_ASSERT.
Definition: tbb_stddef.h:165

Referenced by tbb::interface5::internal::concurrent_unordered_base< concurrent_unordered_map_traits< Key, T, internal::hash_compare< Key, Hasher, Key_equality >, Allocator, false > >::clear(), tbb::interface5::internal::concurrent_unordered_base< concurrent_unordered_map_traits< Key, T, internal::hash_compare< Key, Hasher, Key_equality >, Allocator, false > >::internal_copy(), and tbb::interface5::internal::split_ordered_list< value_type, typename Traits::allocator_type >::~split_ordered_list().

Here is the caller graph for this function:

◆ create_node() [1/3]

template<typename T, typename Allocator>
nodeptr_t tbb::interface5::internal::split_ordered_list< T, Allocator >::create_node ( sokey_t  order_key)
inline

◆ create_node() [2/3]

template<typename T, typename Allocator>
template<typename Arg >
nodeptr_t tbb::interface5::internal::split_ordered_list< T, Allocator >::create_node ( sokey_t  order_key,
__TBB_FORWARDING_REF(Arg)  t,
tbb::internal::true_type  = tbb::internal::true_type() 
)
inline

Definition at line 293 of file _concurrent_unordered_impl.h.

294  {
295  nodeptr_t pnode = my_node_allocator.allocate(1);
296 
297  //TODO: use RAII scoped guard instead of explicit catch
298  __TBB_TRY {
299  new(static_cast<void*>(&pnode->my_element)) T(tbb::internal::forward<Arg>(t));
300  pnode->init(order_key);
301  } __TBB_CATCH(...) {
302  my_node_allocator.deallocate(pnode, 1);
303  __TBB_RETHROW();
304  }
305 
306  return (pnode);
307  }
#define __TBB_RETHROW()
Definition: tbb_stddef.h:286
tbb::internal::allocator_rebind< allocator_type, node >::type my_node_allocator
#define __TBB_CATCH(e)
Definition: tbb_stddef.h:284
#define __TBB_TRY
Definition: tbb_stddef.h:283

◆ create_node() [3/3]

template<typename T, typename Allocator>
template<typename Arg >
nodeptr_t tbb::interface5::internal::split_ordered_list< T, Allocator >::create_node ( sokey_t  ,
__TBB_FORWARDING_REF(Arg)  ,
tbb::internal::false_type   
)
inline

Definition at line 311 of file _concurrent_unordered_impl.h.

312  {
313  __TBB_ASSERT(false, "This compile-time helper should never get called");
314  return nodeptr_t();
315  }
#define __TBB_ASSERT(predicate, comment)
No-op version of __TBB_ASSERT.
Definition: tbb_stddef.h:165

◆ create_node_v()

template<typename T, typename Allocator>
template<typename __TBB_PARAMETER_PACK Args>
nodeptr_t tbb::interface5::internal::split_ordered_list< T, Allocator >::create_node_v ( __TBB_FORWARDING_REF(Args) __TBB_PARAMETER_PACK  args)
inline

Definition at line 319 of file _concurrent_unordered_impl.h.

319  {
320  nodeptr_t pnode = my_node_allocator.allocate(1);
321 
322  //TODO: use RAII scoped guard instead of explicit catch
323  __TBB_TRY {
324  new(static_cast<void*>(&pnode->my_element)) T(__TBB_PACK_EXPANSION(tbb::internal::forward<Args>(args)));
325  } __TBB_CATCH(...) {
326  my_node_allocator.deallocate(pnode, 1);
327  __TBB_RETHROW();
328  }
329 
330  return (pnode);
331  }
#define __TBB_RETHROW()
Definition: tbb_stddef.h:286
#define __TBB_PACK_EXPANSION(A)
Definition: tbb_stddef.h:525
tbb::internal::allocator_rebind< allocator_type, node >::type my_node_allocator
#define __TBB_CATCH(e)
Definition: tbb_stddef.h:284
#define __TBB_TRY
Definition: tbb_stddef.h:283

Referenced by tbb::interface5::internal::concurrent_unordered_base< concurrent_unordered_map_traits< Key, T, internal::hash_compare< Key, Hasher, Key_equality >, Allocator, false > >::emplace().

Here is the caller graph for this function:

◆ destroy_node()

◆ empty()

template<typename T, typename Allocator>
bool tbb::interface5::internal::split_ordered_list< T, Allocator >::empty ( ) const
inline

Definition at line 407 of file _concurrent_unordered_impl.h.

407  {
408  return (my_element_count == 0);
409  }

Referenced by tbb::interface5::internal::concurrent_unordered_base< concurrent_unordered_map_traits< Key, T, internal::hash_compare< Key, Hasher, Key_equality >, Allocator, false > >::empty().

Here is the caller graph for this function:

◆ end() [1/2]

template<typename T, typename Allocator>
iterator tbb::interface5::internal::split_ordered_list< T, Allocator >::end ( )
inline

◆ end() [2/2]

template<typename T, typename Allocator>
const_iterator tbb::interface5::internal::split_ordered_list< T, Allocator >::end ( ) const
inline

Definition at line 394 of file _concurrent_unordered_impl.h.

394  {
395  return (const_iterator(0, this));
396  }
solist_iterator< self_type, const value_type > const_iterator

◆ erase_node() [1/5]

template<typename T, typename Allocator>
void tbb::interface5::internal::split_ordered_list< T, Allocator >::erase_node ( raw_iterator  previous,
raw_const_iterator where,
tbb::internal::true_type   
)
inline

◆ erase_node() [2/5]

template<typename T, typename Allocator>
void tbb::interface5::internal::split_ordered_list< T, Allocator >::erase_node ( raw_iterator  previous,
raw_const_iterator where,
tbb::internal::false_type   
)
inline

Definition at line 614 of file _concurrent_unordered_impl.h.

616  {
617  erase_node_impl(previous, where);
618  }
nodeptr_t erase_node_impl(raw_iterator previous, raw_const_iterator &where)

◆ erase_node() [3/5]

template<typename T, typename Allocator>
void tbb::interface5::internal::split_ordered_list< T, Allocator >::erase_node ( raw_iterator  previous,
raw_const_iterator where 
)
inline

Definition at line 620 of file _concurrent_unordered_impl.h.

620  {
621  erase_node(previous, where, /*allow_destroy*/tbb::internal::true_type());
622  }
void erase_node(raw_iterator previous, raw_const_iterator &where, tbb::internal::true_type)

◆ erase_node() [4/5]

template<typename T, typename Allocator>
template<typename AllowDestroy >
iterator tbb::interface5::internal::split_ordered_list< T, Allocator >::erase_node ( raw_iterator  previous,
const_iterator  where,
AllowDestroy   
)
inline

Definition at line 626 of file _concurrent_unordered_impl.h.

627  {
628  raw_const_iterator it = where;
629  erase_node(previous, it, AllowDestroy());
631 
632  return get_iterator(first_real_iterator(it));
633  }
void erase_node(raw_iterator previous, raw_const_iterator &where, tbb::internal::true_type)
flist_iterator< self_type, const value_type > raw_const_iterator

◆ erase_node() [5/5]

template<typename T, typename Allocator>
iterator tbb::interface5::internal::split_ordered_list< T, Allocator >::erase_node ( raw_iterator  previous,
const_iterator where 
)
inline

Definition at line 635 of file _concurrent_unordered_impl.h.

635  {
636  return erase_node(previous, where, /*allow_destroy*/tbb::internal::true_type());
637  }
void erase_node(raw_iterator previous, raw_const_iterator &where, tbb::internal::true_type)

◆ erase_node_impl()

template<typename T, typename Allocator>
nodeptr_t tbb::interface5::internal::split_ordered_list< T, Allocator >::erase_node_impl ( raw_iterator  previous,
raw_const_iterator where 
)
inline

Definition at line 598 of file _concurrent_unordered_impl.h.

598  {
599  nodeptr_t pnode = (where++).get_node_ptr();
600  nodeptr_t prevnode = previous.get_node_ptr();
601  __TBB_ASSERT(prevnode->my_next == pnode, "Erase must take consecutive iterators");
602  prevnode->my_next = pnode->my_next;
603  return pnode;
604  }
#define __TBB_ASSERT(predicate, comment)
No-op version of __TBB_ASSERT.
Definition: tbb_stddef.h:165

Referenced by tbb::interface5::internal::split_ordered_list< value_type, typename Traits::allocator_type >::erase_node().

Here is the caller graph for this function:

◆ first_real_iterator() [1/2]

template<typename T, typename Allocator>
iterator tbb::interface5::internal::split_ordered_list< T, Allocator >::first_real_iterator ( raw_iterator  it)
inline

Definition at line 489 of file _concurrent_unordered_impl.h.

490  {
491  // Skip all dummy, internal only iterators
492  while (it != raw_end() && it.get_node_ptr()->is_dummy())
493  ++it;
494 
495  return iterator(it.get_node_ptr(), this);
496  }
solist_iterator< self_type, value_type > iterator

Referenced by tbb::interface5::internal::split_ordered_list< value_type, typename Traits::allocator_type >::begin(), tbb::interface5::internal::split_ordered_list< value_type, typename Traits::allocator_type >::erase_node(), tbb::interface5::internal::concurrent_unordered_base< Traits >::const_range_type::set_midpoint(), tbb::interface5::internal::concurrent_unordered_base< concurrent_unordered_map_traits< Key, T, internal::hash_compare< Key, Hasher, Key_equality >, Allocator, false > >::unsafe_begin(), and tbb::interface5::internal::concurrent_unordered_base< concurrent_unordered_map_traits< Key, T, internal::hash_compare< Key, Hasher, Key_equality >, Allocator, false > >::unsafe_end().

Here is the caller graph for this function:

◆ first_real_iterator() [2/2]

template<typename T, typename Allocator>
const_iterator tbb::interface5::internal::split_ordered_list< T, Allocator >::first_real_iterator ( raw_const_iterator  it) const
inline

Definition at line 500 of file _concurrent_unordered_impl.h.

501  {
502  // Skip all dummy, internal only iterators
503  while (it != raw_end() && it.get_node_ptr()->is_dummy())
504  ++it;
505 
506  return const_iterator(it.get_node_ptr(), this);
507  }
solist_iterator< self_type, const value_type > const_iterator

◆ get_allocator()

template<typename T, typename Allocator>
allocator_type tbb::interface5::internal::split_ordered_list< T, Allocator >::get_allocator ( ) const
inline

Definition at line 357 of file _concurrent_unordered_impl.h.

357  {
358  return (my_node_allocator);
359  }
tbb::internal::allocator_rebind< allocator_type, node >::type my_node_allocator

Referenced by tbb::interface5::internal::concurrent_unordered_base< concurrent_unordered_map_traits< Key, T, internal::hash_compare< Key, Hasher, Key_equality >, Allocator, false > >::get_allocator().

Here is the caller graph for this function:

◆ get_iterator() [1/4]

template<typename T, typename Allocator>
iterator tbb::interface5::internal::split_ordered_list< T, Allocator >::get_iterator ( raw_iterator  it)
inline

Definition at line 465 of file _concurrent_unordered_impl.h.

465  {
466  __TBB_ASSERT(it.get_node_ptr() == NULL || !it.get_node_ptr()->is_dummy(), "Invalid user node (dummy)");
467  return iterator(it.get_node_ptr(), this);
468  }
solist_iterator< self_type, value_type > iterator
#define __TBB_ASSERT(predicate, comment)
No-op version of __TBB_ASSERT.
Definition: tbb_stddef.h:165

Referenced by tbb::interface5::internal::concurrent_unordered_base< Traits >::const_range_type::begin(), tbb::interface5::internal::concurrent_unordered_base< Traits >::const_range_type::end(), tbb::interface5::internal::split_ordered_list< value_type, typename Traits::allocator_type >::erase_node(), tbb::interface5::internal::concurrent_unordered_base< concurrent_unordered_map_traits< Key, T, internal::hash_compare< Key, Hasher, Key_equality >, Allocator, false > >::internal_equal_range(), tbb::interface5::internal::concurrent_unordered_base< concurrent_unordered_map_traits< Key, T, internal::hash_compare< Key, Hasher, Key_equality >, Allocator, false > >::internal_erase(), tbb::interface5::internal::concurrent_unordered_base< concurrent_unordered_map_traits< Key, T, internal::hash_compare< Key, Hasher, Key_equality >, Allocator, false > >::internal_extract(), tbb::interface5::internal::concurrent_unordered_base< concurrent_unordered_map_traits< Key, T, internal::hash_compare< Key, Hasher, Key_equality >, Allocator, false > >::internal_find(), tbb::interface5::internal::concurrent_unordered_base< concurrent_unordered_map_traits< Key, T, internal::hash_compare< Key, Hasher, Key_equality >, Allocator, false > >::internal_insert(), tbb::interface5::internal::split_ordered_list< value_type, typename Traits::allocator_type >::move_all(), and tbb::interface5::internal::concurrent_unordered_base< concurrent_unordered_map_traits< Key, T, internal::hash_compare< Key, Hasher, Key_equality >, Allocator, false > >::unsafe_erase().

Here is the caller graph for this function:

◆ get_iterator() [2/4]

template<typename T, typename Allocator>
const_iterator tbb::interface5::internal::split_ordered_list< T, Allocator >::get_iterator ( raw_const_iterator  it) const
inline

Definition at line 472 of file _concurrent_unordered_impl.h.

472  {
473  __TBB_ASSERT(it.get_node_ptr() == NULL || !it.get_node_ptr()->is_dummy(), "Invalid user node (dummy)");
474  return const_iterator(it.get_node_ptr(), this);
475  }
solist_iterator< self_type, const value_type > const_iterator
#define __TBB_ASSERT(predicate, comment)
No-op version of __TBB_ASSERT.
Definition: tbb_stddef.h:165

◆ get_iterator() [3/4]

template<typename T, typename Allocator>
raw_iterator tbb::interface5::internal::split_ordered_list< T, Allocator >::get_iterator ( raw_const_iterator  it)
inline

Definition at line 478 of file _concurrent_unordered_impl.h.

478  {
479  return raw_iterator(it.get_node_ptr());
480  }
flist_iterator< self_type, value_type > raw_iterator

◆ get_iterator() [4/4]

template<typename T, typename Allocator>
static iterator tbb::interface5::internal::split_ordered_list< T, Allocator >::get_iterator ( const_iterator  it)
inlinestatic

Definition at line 483 of file _concurrent_unordered_impl.h.

483  {
484  return iterator(it.my_node_ptr, it.my_list_ptr);
485  }
solist_iterator< self_type, value_type > iterator

◆ get_order_key()

template<typename T, typename Allocator>
static sokey_t tbb::interface5::internal::split_ordered_list< T, Allocator >::get_order_key ( const raw_const_iterator it)
inlinestatic

Definition at line 454 of file _concurrent_unordered_impl.h.

454  {
455  return it.get_node_ptr()->get_order_key();
456  }

Referenced by tbb::interface5::internal::split_ordered_list< value_type, typename Traits::allocator_type >::check_range(), and tbb::interface5::internal::split_ordered_list< value_type, typename Traits::allocator_type >::insert_dummy().

Here is the caller graph for this function:

◆ get_safe_order_key()

template<typename T, typename Allocator>
static sokey_t tbb::interface5::internal::split_ordered_list< T, Allocator >::get_safe_order_key ( const raw_const_iterator it)
inlinestatic

Definition at line 458 of file _concurrent_unordered_impl.h.

458  {
459  if( !it.get_node_ptr() ) return ~sokey_t(0);
460  return it.get_node_ptr()->get_order_key();
461  }

◆ insert_dummy()

template<typename T, typename Allocator>
raw_iterator tbb::interface5::internal::split_ordered_list< T, Allocator >::insert_dummy ( raw_iterator  it,
sokey_t  order_key 
)
inline

Definition at line 541 of file _concurrent_unordered_impl.h.

542  {
544  raw_iterator where = it;
545 
546  __TBB_ASSERT(where != last, "Invalid head node");
547 
548  ++where;
549 
550  // Create a dummy element up front, even though it may be discarded (due to concurrent insertion)
551  nodeptr_t dummy_node = create_node(order_key);
552 
553  for (;;)
554  {
555  __TBB_ASSERT(it != last, "Invalid head list node");
556 
557  // If the head iterator is at the end of the list, or past the point where this dummy
558  // node needs to be inserted, then try to insert it.
559  if (where == last || get_order_key(where) > order_key)
560  {
561  __TBB_ASSERT(get_order_key(it) < order_key, "Invalid node order in the list");
562 
563  // Try to insert it in the right place
564  nodeptr_t inserted_node = try_insert_atomic(it.get_node_ptr(), dummy_node, where.get_node_ptr());
565 
566  if (inserted_node == dummy_node)
567  {
568  // Insertion succeeded, check the list for order violations
569  check_range(it, where);
570  return raw_iterator(dummy_node);
571  }
572  else
573  {
574  // Insertion failed: either dummy node was inserted by another thread, or
575  // a real element was inserted at exactly the same place as dummy node.
576  // Proceed with the search from the previous location where order key was
577  // known to be larger (note: this is legal only because there is no safe
578  // concurrent erase operation supported).
579  where = it;
580  ++where;
581  continue;
582  }
583  }
584  else if (get_order_key(where) == order_key)
585  {
586  // Another dummy node with the same value found, discard the new one.
587  destroy_node(dummy_node);
588  return where;
589  }
590 
591  // Move the iterator forward
592  it = where;
593  ++where;
594  }
595 
596  }
auto last(Container &c) -> decltype(begin(c))
flist_iterator< self_type, value_type > raw_iterator
static nodeptr_t try_insert_atomic(nodeptr_t previous, nodeptr_t new_node, nodeptr_t current_node)
#define __TBB_ASSERT(predicate, comment)
No-op version of __TBB_ASSERT.
Definition: tbb_stddef.h:165
static sokey_t get_order_key(const raw_const_iterator &it)

Referenced by tbb::interface5::internal::concurrent_unordered_base< concurrent_unordered_map_traits< Key, T, internal::hash_compare< Key, Hasher, Key_equality >, Allocator, false > >::init_bucket().

Here is the caller graph for this function:

◆ max_size()

template<typename T, typename Allocator>
size_type tbb::interface5::internal::split_ordered_list< T, Allocator >::max_size ( ) const
inline

Definition at line 417 of file _concurrent_unordered_impl.h.

417  {
418  return my_node_allocator.max_size();
419  }
tbb::internal::allocator_rebind< allocator_type, node >::type my_node_allocator

Referenced by tbb::interface5::internal::concurrent_unordered_base< concurrent_unordered_map_traits< Key, T, internal::hash_compare< Key, Hasher, Key_equality >, Allocator, false > >::max_size().

Here is the caller graph for this function:

◆ move_all()

template<typename T, typename Allocator>
void tbb::interface5::internal::split_ordered_list< T, Allocator >::move_all ( self_type source)
inline

Definition at line 642 of file _concurrent_unordered_impl.h.

643  {
644  raw_const_iterator first = source.raw_begin();
645  raw_const_iterator last = source.raw_end();
646 
647  if (first == last)
648  return;
649 
650  nodeptr_t previous_node = my_head;
651  raw_const_iterator begin_iterator = first++;
652 
653  // Move all elements one by one, including dummy ones
654  for (raw_const_iterator it = first; it != last;)
655  {
656  nodeptr_t pnode = it.get_node_ptr();
657 
658  nodeptr_t dummy_node = pnode->is_dummy() ? create_node(pnode->get_order_key()) : create_node(pnode->get_order_key(), pnode->my_element);
659  previous_node = try_insert_atomic(previous_node, dummy_node, NULL);
660  __TBB_ASSERT(previous_node != NULL, "Insertion must succeed");
661  raw_const_iterator where = it++;
662  source.erase_node(get_iterator(begin_iterator), where);
663  }
664  check_range();
665  }
auto last(Container &c) -> decltype(begin(c))
static nodeptr_t try_insert_atomic(nodeptr_t previous, nodeptr_t new_node, nodeptr_t current_node)
auto first(Container &c) -> decltype(begin(c))
#define __TBB_ASSERT(predicate, comment)
No-op version of __TBB_ASSERT.
Definition: tbb_stddef.h:165
flist_iterator< self_type, const value_type > raw_const_iterator

◆ raw_begin() [1/2]

◆ raw_begin() [2/2]

template<typename T, typename Allocator>
raw_const_iterator tbb::interface5::internal::split_ordered_list< T, Allocator >::raw_begin ( ) const
inline

Definition at line 442 of file _concurrent_unordered_impl.h.

442  {
443  return raw_const_iterator(my_head);
444  }
flist_iterator< self_type, const value_type > raw_const_iterator

◆ raw_end() [1/2]

template<typename T, typename Allocator>
raw_iterator tbb::interface5::internal::split_ordered_list< T, Allocator >::raw_end ( )
inline

Definition at line 446 of file _concurrent_unordered_impl.h.

446  {
447  return raw_iterator(0);
448  }
flist_iterator< self_type, value_type > raw_iterator

Referenced by tbb::interface5::internal::split_ordered_list< value_type, typename Traits::allocator_type >::check_range(), tbb::interface5::internal::split_ordered_list< value_type, typename Traits::allocator_type >::first_real_iterator(), tbb::interface5::internal::split_ordered_list< value_type, typename Traits::allocator_type >::insert_dummy(), tbb::interface5::internal::concurrent_unordered_base< concurrent_unordered_map_traits< Key, T, internal::hash_compare< Key, Hasher, Key_equality >, Allocator, false > >::internal_equal_range(), tbb::interface5::internal::concurrent_unordered_base< concurrent_unordered_map_traits< Key, T, internal::hash_compare< Key, Hasher, Key_equality >, Allocator, false > >::internal_erase(), tbb::interface5::internal::concurrent_unordered_base< concurrent_unordered_map_traits< Key, T, internal::hash_compare< Key, Hasher, Key_equality >, Allocator, false > >::internal_extract(), tbb::interface5::internal::concurrent_unordered_base< concurrent_unordered_map_traits< Key, T, internal::hash_compare< Key, Hasher, Key_equality >, Allocator, false > >::internal_find(), tbb::interface5::internal::concurrent_unordered_base< concurrent_unordered_map_traits< Key, T, internal::hash_compare< Key, Hasher, Key_equality >, Allocator, false > >::internal_insert(), tbb::interface5::internal::split_ordered_list< value_type, typename Traits::allocator_type >::move_all(), tbb::interface5::internal::concurrent_unordered_base< concurrent_unordered_map_traits< Key, T, internal::hash_compare< Key, Hasher, Key_equality >, Allocator, false > >::unsafe_bucket_size(), and tbb::interface5::internal::concurrent_unordered_base< concurrent_unordered_map_traits< Key, T, internal::hash_compare< Key, Hasher, Key_equality >, Allocator, false > >::unsafe_end().

Here is the caller graph for this function:

◆ raw_end() [2/2]

template<typename T, typename Allocator>
raw_const_iterator tbb::interface5::internal::split_ordered_list< T, Allocator >::raw_end ( ) const
inline

Definition at line 450 of file _concurrent_unordered_impl.h.

450  {
451  return raw_const_iterator(0);
452  }
flist_iterator< self_type, const value_type > raw_const_iterator

◆ size()

template<typename T, typename Allocator>
size_type tbb::interface5::internal::split_ordered_list< T, Allocator >::size ( ) const
inline

Definition at line 412 of file _concurrent_unordered_impl.h.

Referenced by tbb::interface5::internal::concurrent_unordered_base< concurrent_unordered_map_traits< Key, T, internal::hash_compare< Key, Hasher, Key_equality >, Allocator, false > >::size().

Here is the caller graph for this function:

◆ swap()

template<typename T, typename Allocator>
void tbb::interface5::internal::split_ordered_list< T, Allocator >::swap ( self_type other)
inline

Definition at line 422 of file _concurrent_unordered_impl.h.

423  {
424  if (this == &other)
425  {
426  // Nothing to do
427  return;
428  }
429 
430  std::swap(my_element_count, other.my_element_count);
431  std::swap(my_head, other.my_head);
432  }
void swap(atomic< T > &lhs, atomic< T > &rhs)
Definition: atomic.h:564

Referenced by tbb::interface5::internal::concurrent_unordered_base< concurrent_unordered_map_traits< Key, T, internal::hash_compare< Key, Hasher, Key_equality >, Allocator, false > >::swap().

Here is the caller graph for this function:

◆ try_insert()

template<typename T, typename Allocator>
std::pair<iterator, bool> tbb::interface5::internal::split_ordered_list< T, Allocator >::try_insert ( raw_iterator  it,
raw_iterator  next,
nodeptr_t  pnode,
size_type new_count 
)
inline

Definition at line 523 of file _concurrent_unordered_impl.h.

524  {
525  nodeptr_t inserted_node = try_insert_atomic(it.get_node_ptr(), pnode, next.get_node_ptr());
526 
527  if (inserted_node == pnode)
528  {
529  // If the insert succeeded, check that the order is correct and increment the element count
530  check_range(it, next);
531  *new_count = tbb::internal::as_atomic(my_element_count).fetch_and_increment();
532  return std::pair<iterator, bool>(iterator(pnode, this), true);
533  }
534  else
535  {
536  return std::pair<iterator, bool>(end(), false);
537  }
538  }
solist_iterator< self_type, value_type > iterator
static nodeptr_t try_insert_atomic(nodeptr_t previous, nodeptr_t new_node, nodeptr_t current_node)
atomic< T > & as_atomic(T &t)
Definition: atomic.h:572

Referenced by tbb::interface5::internal::concurrent_unordered_base< concurrent_unordered_map_traits< Key, T, internal::hash_compare< Key, Hasher, Key_equality >, Allocator, false > >::internal_insert().

Here is the caller graph for this function:

◆ try_insert_atomic()

template<typename T, typename Allocator>
static nodeptr_t tbb::interface5::internal::split_ordered_list< T, Allocator >::try_insert_atomic ( nodeptr_t  previous,
nodeptr_t  new_node,
nodeptr_t  current_node 
)
inlinestatic

Friends And Related Function Documentation

◆ concurrent_unordered_base

template<typename T, typename Allocator>
template<typename Traits >
friend class concurrent_unordered_base
friend

Definition at line 671 of file _concurrent_unordered_impl.h.

Member Data Documentation

◆ my_element_count

◆ my_head

◆ my_node_allocator


The documentation for this class was generated from the following file:

Copyright © 2005-2020 Intel Corporation. All Rights Reserved.

Intel, Pentium, Intel Xeon, Itanium, Intel XScale and VTune are registered trademarks or trademarks of Intel Corporation or its subsidiaries in the United States and other countries.

* Other names and brands may be claimed as the property of others.