20 #ifndef __TBB__concurrent_unordered_impl_H    21 #define __TBB__concurrent_unordered_impl_H    22 #if !defined(__TBB_concurrent_unordered_map_H) && !defined(__TBB_concurrent_unordered_set_H) && !defined(__TBB_concurrent_hash_map_H)    23 #error Do not #include this internal file directly; use public TBB headers instead.    26 #include "../tbb_stddef.h"    33 #include __TBB_STD_SWAP_HEADER    35 #include "../atomic.h"    36 #include "../tbb_exception.h"    37 #include "../tbb_allocator.h"    39 #if __TBB_INITIALIZER_LISTS_PRESENT    40     #include <initializer_list>    43 #if __TBB_CPP11_RVALUE_REF_PRESENT && !__TBB_IMPLICIT_COPY_DELETION_BROKEN    44     #define __TBB_UNORDERED_NODE_HANDLE_PRESENT 1    51 #if __TBB_UNORDERED_NODE_HANDLE_PRESENT    53 #endif // __TBB_UNORDERED_NODE_HANDLE_PRESENT    56 namespace interface5 {
    60 template <
typename T, 
typename Allocator>
    62 template <
typename Traits>
    66 template<
class Solist, 
typename Value>
    67 class flist_iterator : 
public std::iterator<std::forward_iterator_tag, Value>
    69     template <
typename T, 
typename Allocator>
    71     template <
typename Traits>
    73     template<
class M, 
typename V>
   112     template<
typename M, 
typename T, 
typename U>
   114     template<
typename M, 
typename T, 
typename U>
   118 template<
typename Solist, 
typename T, 
typename U>
   122 template<
typename Solist, 
typename T, 
typename U>
   128 template<
class Solist, 
typename Value>
   134     template <
typename T, 
typename Allocator>
   136     template<
class M, 
typename V>
   138     template <
typename Traits>
   140     template<
typename M, 
typename T, 
typename U>
   142     template<
typename M, 
typename T, 
typename U>
   188 template<
typename Solist, 
typename T, 
typename U>
   192 template<
typename Solist, 
typename T, 
typename U>
   202 template <
typename T, 
typename Allocator>
   247             return reinterpret_cast<value_type*>(&
my_element);
   260             if (exchange_node == current_node) 
   268                 return exchange_node;
   287         pnode->init(order_key);
   292     template<
typename Arg>
   299             new(static_cast<void*>(&pnode->my_element)) T(tbb::internal::forward<Arg>(t));
   300             pnode->init(order_key);
   310     template<
typename Arg>
   313         __TBB_ASSERT(
false, 
"This compile-time helper should never get called");
   318     template<
typename __TBB_PARAMETER_PACK Args>
   324             new(static_cast<void*>(&pnode->my_element)) T(
__TBB_PACK_EXPANSION(tbb::internal::forward<Args>(args)));
   350         __TBB_ASSERT(pnode != NULL && pnode->my_next == NULL, 
"Invalid head list node");
   366         pnext = pnode->my_next;
   367         pnode->my_next = NULL;
   370         while (pnode != NULL)
   372             pnext = pnode->my_next;
   518         new_node->
my_next = current_node;
   527         if (inserted_node == pnode)
   532             return std::pair<iterator, bool>(
iterator(pnode, 
this), 
true);
   536             return std::pair<iterator, bool>(
end(), 
false);
   566                 if (inserted_node == dummy_node)
   599         nodeptr_t pnode = (where++).get_node_ptr();
   601         __TBB_ASSERT(prevnode->my_next == pnode, 
"Erase must take consecutive iterators");
   602         prevnode->my_next = pnode->my_next;
   625     template<
typename AllowDestroy>
   660             __TBB_ASSERT(previous_node != NULL, 
"Insertion must succeed");
   670     template <
typename Traits>
   700 #if defined(_MSC_VER) && !defined(__INTEL_COMPILER)   701 #pragma warning(push)   702 #pragma warning(disable: 4127) // warning C4127: conditional expression is constant   705 template <
typename Traits>
   735 #if __TBB_UNORDERED_NODE_HANDLE_PRESENT   737 #endif // __TBB_UNORDERED_NODE_HANDLE_PRESENT   738     using Traits::my_hash_compare;
   739     using Traits::get_key;
   740     using Traits::allow_multimapping;
   745     template<
typename OtherTraits>
   749     typedef std::pair<const_iterator, const_iterator> 
paircc_t;
   771         if( n_of_buckets == 0) ++n_of_buckets;
   791 #if __TBB_CPP11_RVALUE_REF_PRESENT   804         call_internal_clear_on_exit clear_buckets_on_exception(
this);
   807         if (a == right.get_allocator()){
   816             if (! right.my_solist.empty()){
   822                     const nodeptr_t pnode = it.get_node_ptr();
   824                     if (pnode->is_dummy()) {
   833                     __TBB_ASSERT(previous_node != NULL, 
"Insertion of node failed. Concurrent inserts in constructor ?");
   839         clear_buckets_on_exception.dismiss();
   842 #endif // __TBB_CPP11_RVALUE_REF_PRESENT   850 #if __TBB_CPP11_RVALUE_REF_PRESENT   866                 this->
swap(moved_copy);
   872 #endif // __TBB_CPP11_RVALUE_REF_PRESENT   874 #if __TBB_INITIALIZER_LISTS_PRESENT   879         this->
insert(il.begin(),il.end());
   882 #endif // __TBB_INITIALIZER_LISTS_PRESENT   890 #if __TBB_UNORDERED_NODE_HANDLE_PRESENT   891     template<
typename SourceType>
   893         typedef typename SourceType::iterator source_iterator;
   895                             typename SourceType::node_type>::
value),
   896                             "Incompatible containers cannot be merged");
   898         for(source_iterator it = source.begin(); it != source.end();) {
   899             source_iterator where = it++;
   900             if (allow_multimapping || 
find(get_key(*where)) == 
end()) {
   901                 std::pair<node_type, raw_iterator> extract_result = source.internal_extract(where);
   904                 sokey_t old_order_key = extract_result.first.my_node->get_order_key();
   913                     extract_result.first.my_node->init(old_order_key);
   916                                 "Wrong nodes order in source container");
   918                                  extract_result.first.my_node->get_order_key() <= next.
get_node_ptr()->get_order_key(),
   919                                  "Wrong nodes order in source container");
   921                     size_t new_count = 0;
   923                         source.my_solist.try_insert(current, next, extract_result.first.my_node, &new_count).second;
   925                                                    "Changing source container while merging is unsafe.");
   927                 extract_result.first.deactivate();
   931 #endif // __TBB_UNORDERED_NODE_HANDLE_PRESENT  1039                     __TBB_ASSERT( begin_key < mid_key, 
"my_begin_node is after my_midpoint_node" );
  1040                     __TBB_ASSERT( mid_key <= end_key, 
"my_midpoint_node is after my_end_node" );
  1042 #endif // TBB_USE_ASSERT  1060         return range_type( *
this );
  1064         return const_range_type( *
this );
  1078 #if __TBB_CPP11_RVALUE_REF_PRESENT  1090 #if __TBB_UNORDERED_NODE_HANDLE_PRESENT  1094             std::pair<iterator, bool> insert_result =
  1097                                                       (handled_node->my_element, handled_node);
  1098             if (insert_result.second)
  1100             return insert_result;
  1102         return std::pair<iterator, bool>(
end(), 
false);
  1108 #endif // __TBB_UNORDERED_NODE_HANDLE_PRESENT  1110 #if __TBB_CPP11_VARIADIC_TEMPLATES_PRESENT && __TBB_CPP11_RVALUE_REF_PRESENT  1111     template<
typename... Args>
  1112     std::pair<iterator, bool> 
emplace(Args&&... args) {
  1119     template<
typename... Args>
  1122         return emplace(tbb::internal::forward<Args>(args)...).first;
  1124 #endif // __TBB_CPP11_VARIADIC_TEMPLATES_PRESENT && __TBB_CPP11_RVALUE_REF_PRESENT  1127     template<
class Iterator>
  1129         for (Iterator it = 
first; it != 
last; ++it)
  1133 #if __TBB_INITIALIZER_LISTS_PRESENT  1134     void insert(std::initializer_list<value_type> il) {
  1136         insert(il.begin(), il.end());
  1157 #if __TBB_UNORDERED_NODE_HANDLE_PRESENT  1167 #endif // __TBB_UNORDERED_NODE_HANDLE_PRESENT  1170         if (
this != &right) {
  1171             std::swap(my_hash_compare, right.my_hash_compare);
  1181         return my_hash_compare.my_hash_object;
  1185         return my_hash_compare.my_key_compare_object;
  1207         return const_cast<self_type*>(
this)->internal_find(
key);
  1211         if(allow_multimapping) {
  1216             return const_cast<self_type*>(
this)->internal_find(
key) == 
end()?0:1;
  1225         return const_cast<self_type*>(
this)->internal_equal_range(
key);
  1308         return ((
const self_type *) 
this)->unsafe_begin(bucket);
  1312         return ((
const self_type *) 
this)->unsafe_end(bucket);
  1325         if (newmax != newmax || newmax < 0)
  1335         if (current_buckets >= buckets)
  1356                 for (
size_type index2 = 0; index2 < sz; ++index2)
  1403     template<
typename AllowCreate, 
typename AllowDestroy, 
typename ValueType>
  1416             pnode->init(order_key);
  1426                  !my_hash_compare(get_key(*where), *pkey))) 
  1431                     pkey = &get_key(pnode->my_element);
  1435                 std::pair<iterator, bool> result = 
my_solist.
try_insert(previous, where, pnode, &new_count);
  1455                      !my_hash_compare(get_key(*where), *pkey)) 
  1486                 if (!my_hash_compare(get_key(*it), 
key)) 
  1511 #if __TBB_UNORDERED_NODE_HANDLE_PRESENT  1527         return std::pair<node_type, iterator>(
node_type(), 
end());
  1529 #endif // __TBB_UNORDERED_NODE_HANDLE_PRESENT  1547                      !my_hash_compare(get_key(*it), 
key)) 
  1551                 do ++
last; 
while( allow_multimapping && 
last != 
end() && !my_hash_compare(get_key(*
last), 
key) );
  1563         __TBB_ASSERT( bucket != 0, 
"The first bucket must always be initialized");
  1637             std::memset(static_cast<void*>(new_segment), 0, sz*
sizeof(
raw_iterator));
  1639             if (
my_buckets[segment].compare_and_swap( new_segment, NULL) != NULL)
  1676 #if defined(_MSC_VER) && !defined(__INTEL_COMPILER)  1677 #pragma warning(pop) // warning 4127 is back  1684 #endif // __TBB__concurrent_unordered_impl_H 
static size_type segment_base(size_type k)
 
solist_t::nodeptr_t nodeptr_t
 
const_iterator end() const
 
concurrent_unordered_base(size_type n_of_buckets=initial_bucket_number, const hash_compare &hc=hash_compare(), const allocator_type &a=allocator_type())
 
tbb::internal::allocator_traits< allocator_type >::pointer pointer
 
float load_factor() const
 
flist_iterator & operator++()
 
bool_constant< true > true_type
 
iterator insert(const_iterator, const value_type &value)
 
tbb::internal::allocator_traits< allocator_type >::value_type value_type
 
concurrent_unordered_base & operator=(const concurrent_unordered_base &right)
 
Traits::allocator_type allocator_type
 
std::pair< iterator, bool > insert(const value_type &value)
 
void swap(concurrent_unordered_base &right)
 
reference operator *() const
 
tbb::internal::allocator_traits< allocator_type >::difference_type difference_type
 
static size_type const pointers_per_table
 
nodeptr_t atomic_set_next(nodeptr_t new_node, nodeptr_t current_node)
 
concurrent_unordered_base * my_instance
 
pairii_t internal_equal_range(const key_type &key)
 
hash_compare::key_equal key_equal
 
allocator_type::value_type value_type
 
iterator internal_erase(const_iterator it)
 
static iterator get_iterator(const_iterator it)
 
const allocator_type::value_type & const_reference
 
allocator_type::value_type & reference
 
float max_load_factor() const
 
Solist::value_type value_type
 
nodeptr_t get_node_ptr() const
 
tbb::internal::allocator_traits< allocator_type >::size_type size_type
 
iterator first_real_iterator(raw_iterator it)
 
nodeptr_t get_node_ptr() const
 
void internal_swap_buckets(concurrent_unordered_base &right)
 
solist_iterator & operator++()
 
solist_t::const_iterator const_iterator
 
void swap(self_type &other)
 
Solist::value_type value_type
 
solist_iterator< self_type, const value_type > const_iterator
 
iterator insert(const_iterator, value_type &&value)
 
solist_iterator(nodeptr_t pnode, const Solist *plist)
 
Hash_compare hash_compare
 
const_iterator const_local_iterator
 
concurrent_unordered_base(const concurrent_unordered_base &right)
 
raw_iterator get_bucket(size_type bucket) const
 
const_local_iterator unsafe_cend(size_type bucket) const
 
atomic< raw_iterator * > my_buckets[pointers_per_table]
 
void move_all(self_type &source)
 
void const char const char int ITT_FORMAT __itt_group_sync x void const char ITT_FORMAT __itt_group_sync s void ITT_FORMAT __itt_group_sync p void ITT_FORMAT p void ITT_FORMAT p no args __itt_suppress_mode_t unsigned int void size_t ITT_FORMAT d void ITT_FORMAT p void ITT_FORMAT p __itt_model_site __itt_model_site_instance ITT_FORMAT p __itt_model_task __itt_model_task_instance ITT_FORMAT p void ITT_FORMAT p void ITT_FORMAT p void size_t ITT_FORMAT d void ITT_FORMAT p const wchar_t ITT_FORMAT s const char ITT_FORMAT s const char ITT_FORMAT s const char ITT_FORMAT s no args void ITT_FORMAT p size_t ITT_FORMAT d no args const wchar_t const wchar_t ITT_FORMAT s __itt_heap_function void size_t int ITT_FORMAT d __itt_heap_function void ITT_FORMAT p __itt_heap_function void void size_t int ITT_FORMAT d no args no args unsigned int ITT_FORMAT u const __itt_domain __itt_id ITT_FORMAT lu const __itt_domain __itt_id __itt_id __itt_string_handle ITT_FORMAT p const __itt_domain __itt_id ITT_FORMAT p const __itt_domain __itt_id __itt_timestamp __itt_timestamp ITT_FORMAT lu const __itt_domain __itt_id __itt_id __itt_string_handle ITT_FORMAT p const __itt_domain ITT_FORMAT p const __itt_domain __itt_string_handle unsigned long long ITT_FORMAT lu const __itt_domain __itt_string_handle unsigned long long ITT_FORMAT lu const __itt_domain __itt_id __itt_string_handle * key
 
nodeptr_t create_node(sokey_t, __TBB_FORWARDING_REF(Arg), tbb::internal::false_type)
 
local_iterator unsafe_end(size_type bucket)
 
tbb::internal::allocator_rebind< Allocator, value_type >::type allocator_type
 
void erase_node(raw_iterator previous, raw_const_iterator &where, tbb::internal::true_type)
 
hasher hash_function() const
 
sokey_t get_order_key() const
 
Base class for types that should not be assigned.
 
tbb::internal::allocator_traits< allocator_type >::difference_type difference_type
 
friend bool operator==(const solist_iterator< M, T > &i, const solist_iterator< M, U > &j)
 
const_iterator first_real_iterator(raw_const_iterator it) const
 
#define __TBB_PACK_EXPANSION(A)
 
pointer operator->() const
 
size_type unsafe_bucket_count() const
 
solist_t::iterator iterator
 
tbb::internal::allocator_traits< allocator_type >::const_pointer const_pointer
 
iterator erase_node(raw_iterator previous, const_iterator &where)
 
float my_maximum_bucket_size
 
solist_iterator operator++(int)
 
raw_iterator insert_dummy(raw_iterator it, sokey_t order_key)
 
allocator_type get_allocator() const
 
flist_iterator< Solist, Value > base_type
 
Traits::key_type key_type
 
void const char const char int ITT_FORMAT __itt_group_sync x void const char ITT_FORMAT __itt_group_sync s void ITT_FORMAT __itt_group_sync p void ITT_FORMAT p void ITT_FORMAT p no args __itt_suppress_mode_t unsigned int void size_t ITT_FORMAT d void ITT_FORMAT p void ITT_FORMAT p __itt_model_site __itt_model_site_instance ITT_FORMAT p __itt_model_task __itt_model_task_instance ITT_FORMAT p void ITT_FORMAT p void ITT_FORMAT p void size_t ITT_FORMAT d void ITT_FORMAT p const wchar_t ITT_FORMAT s const char ITT_FORMAT s const char ITT_FORMAT s const char ITT_FORMAT s no args void ITT_FORMAT p size_t ITT_FORMAT d no args const wchar_t const wchar_t ITT_FORMAT s __itt_heap_function void size_t int ITT_FORMAT d __itt_heap_function void ITT_FORMAT p __itt_heap_function void void size_t int ITT_FORMAT d no args no args unsigned int ITT_FORMAT u const __itt_domain __itt_id ITT_FORMAT lu const __itt_domain __itt_id __itt_id parent
 
auto last(Container &c) -> decltype(begin(c))
 
bool_constant< false > false_type
 
void rehash(size_type buckets)
 
~concurrent_unordered_base()
 
void internal_merge(SourceType &source)
 
bool is_initialized(size_type bucket) const
 
allocator_traits< Alloc >::template rebind_alloc< T >::other type
 
std::pair< iterator, bool > try_insert(raw_iterator it, raw_iterator next, nodeptr_t pnode, size_type *new_count)
 
const_iterator find(const key_type &key) const
 
flist_iterator< self_type, value_type > raw_iterator
 
void suppress_unused_warning(const T1 &)
Utility template function to prevent "unused" warnings by various compilers.
 
void init_bucket(size_type bucket)
 
const_iterator get_iterator(raw_const_iterator it) const
 
const_local_iterator unsafe_end(size_type bucket) const
 
size_type unsafe_bucket_size(size_type bucket)
 
local_iterator unsafe_begin(size_type bucket)
 
Solist::reference reference
 
static size_type segment_index_of(size_type index)
 
bool operator!=(const hash_map_iterator< Container, T > &i, const hash_map_iterator< Container, U > &j)
 
const_iterator begin() const
 
tbb::internal::allocator_rebind< Allocator, T >::type allocator_type
 
concurrent_unordered_base::difference_type difference_type
 
solist_iterator< self_type, value_type > iterator
 
allocator_type::size_type size_type
 
const Solist * my_list_ptr
 
tbb::internal::allocator_traits< allocator_type >::const_pointer const_pointer
 
#define __TBB_STATIC_ASSERT(condition, msg)
 
iterator erase_node(raw_iterator previous, const_iterator where, AllowDestroy)
 
static nodeptr_t try_insert_atomic(nodeptr_t previous, nodeptr_t new_node, nodeptr_t current_node)
 
raw_const_iterator my_end_node
 
Solist::nodeptr_t nodeptr_t
 
raw_const_iterator my_midpoint_node
 
flist_iterator(const flist_iterator< Solist, typename Solist::value_type > &other)
 
concurrent_unordered_base::size_type size_type
Type for size of a range.
 
nodeptr_t create_node(sokey_t order_key)
 
size_type max_size() const
 
concurrent_unordered_base::iterator iterator
 
nodeptr_t create_node(sokey_t order_key, __TBB_FORWARDING_REF(Arg) t, tbb::internal::true_type=tbb::internal::true_type())
 
void max_load_factor(float newmax)
 
std::pair< const_iterator, const_iterator > paircc_t
 
static const size_type initial_bucket_load
 
void set_midpoint() const
Set my_midpoint_node to point approximately half way between my_begin_node and my_end_node.
 
static size_type segment_size(size_type k)
 
friend bool operator!=(const solist_iterator< M, T > &i, const solist_iterator< M, U > &j)
 
const_iterator cbegin() const
 
range_type(range_type &r, split)
Split range.
 
iterator insert(const_iterator, node_type &&nh)
 
flist_iterator & operator=(const flist_iterator< Solist, typename Solist::value_type > &other)
 
iterator emplace_hint(const_iterator, Args &&... args)
 
tbb::internal::allocator_rebind< allocator_type, raw_iterator >::type my_allocator
 
pointer operator->() const
 
concurrent_unordered_base(const concurrent_unordered_base &right, const allocator_type &a)
 
split_ordered_list(allocator_type a=allocator_type())
 
concurrent_unordered_base(concurrent_unordered_base &&right)
 
static const size_type initial_bucket_number
 
concurrent_unordered_base::reference reference
 
reference operator *() const
 
const_local_iterator unsafe_begin(size_type bucket) const
 
void adjust_table_size(size_type total_elements, size_type current_size)
 
split_ordered_list< T, Allocator > self_type
 
void const char const char int ITT_FORMAT __itt_group_sync x void const char ITT_FORMAT __itt_group_sync s void ITT_FORMAT __itt_group_sync p void ITT_FORMAT p void ITT_FORMAT p no args __itt_suppress_mode_t unsigned int void size_t ITT_FORMAT d void ITT_FORMAT p void ITT_FORMAT p __itt_model_site __itt_model_site_instance * instance
 
size_type get_parent(size_type bucket) const
 
const_range_type(const_range_type &r, split)
Split range.
 
void destroy_node(nodeptr_t pnode)
 
bool operator==(const hash_map_iterator< Container, T > &i, const hash_map_iterator< Container, U > &j)
 
sokey_t split_order_key_dummy(sokey_t order_key) const
 
nodeptr_t create_node_v(__TBB_FORWARDING_REF(Args) __TBB_PARAMETER_PACK args)
 
std::pair< const Key, T > value_type
 
bool empty() const
True if range is empty.
 
concurrent_unordered_base & operator=(concurrent_unordered_base &&other)
 
size_type unsafe_max_bucket_count() const
 
std::pair< const_iterator, const_iterator > equal_range(const key_type &key) const
 
const_range_type(const concurrent_unordered_base &a_table)
Init range with container and grainsize specified.
 
Solist::difference_type difference_type
 
void internal_copy(const self_type &right)
 
call_internal_clear_on_exit(concurrent_unordered_base *instance)
 
iterator unsafe_erase(const_iterator first, const_iterator last)
 
const_iterator cbegin() const
 
T __TBB_ReverseBits(T src)
 
solist_iterator & operator=(const solist_iterator< Solist, typename Solist::value_type > &other)
 
tbb::internal::allocator_traits< allocator_type >::pointer pointer
 
#define __TBB_ASSERT_EX(predicate, comment)
"Extended" version is useful to suppress warnings if a variable is only used with an assert
 
std::pair< iterator, bool > insert(node_type &&nh)
 
range_type(const concurrent_unordered_base &a_table)
Init range with container and grainsize specified.
 
size_type unsafe_bucket(const key_type &key) const
 
allocator_type::const_pointer const_pointer
 
Dummy type that distinguishes splitting constructor from copy constructor.
 
iterator find(const key_type &key)
 
solist_t::raw_iterator raw_iterator
 
void set_bucket(size_type bucket, raw_iterator dummy_head)
 
node_type unsafe_extract(const_iterator where)
 
const concurrent_unordered_base & my_table
 
static sokey_t get_safe_order_key(const raw_const_iterator &it)
 
bool is_divisible() const
True if range can be partitioned into two subranges.
 
tbb::internal::allocator_rebind< allocator_type, node >::type my_node_allocator
 
const_local_iterator unsafe_cbegin(size_type bucket) const
 
raw_const_iterator raw_end() const
 
auto first(Container &c) -> decltype(begin(c))
 
iterator unsafe_erase(const_iterator where)
 
concurrent_unordered_base::const_iterator iterator
 
const_iterator cend() const
 
Detects whether two given types are the same.
 
flist_iterator operator++(int)
 
allocator_type::pointer pointer
 
void swap(atomic< T > &lhs, atomic< T > &rhs)
 
allocator_type get_allocator() const
 
static size_type internal_distance(const_iterator first, const_iterator last)
 
hash_compare::hasher hasher
 
std::pair< iterator, iterator > equal_range(const key_type &key)
 
void init(sokey_t order_key)
 
intptr_t __TBB_Log2(uintptr_t x)
 
std::pair< iterator, bool > insert(value_type &&value)
 
hash_compare my_hash_compare
 
solist_iterator(const solist_iterator< Solist, typename Solist::value_type > &other)
 
concurrent_unordered_base(concurrent_unordered_base &&right, const allocator_type &a)
 
const_iterator begin() const
 
iterator internal_find(const key_type &key)
 
const_range_type range() const
 
nodeptr_t erase_node_impl(raw_iterator previous, raw_const_iterator &where)
 
~call_internal_clear_on_exit()
 
raw_iterator get_iterator(raw_const_iterator it)
 
const value_type & const_reference
 
friend bool operator==(const flist_iterator< M, T > &i, const flist_iterator< M, U > &j)
 
solist_t::raw_const_iterator raw_const_iterator
 
std::pair< iterator, iterator > pairii_t
 
size_type unsafe_erase(const key_type &key)
 
size_type max_size() const
 
#define __TBB_PARAMETER_PACK
 
#define __TBB_FORWARDING_REF(A)
 
void erase_node(raw_iterator previous, raw_const_iterator &where)
 
#define __TBB_ASSERT(predicate, comment)
No-op version of __TBB_ASSERT.
 
const_iterator end() const
 
flist_iterator(nodeptr_t pnode)
 
raw_iterator prepare_bucket(sokey_t hash_key)
 
concurrent_unordered_base::value_type value_type
 
const_iterator cend() const
 
Solist::reference reference
 
Solist::nodeptr_t nodeptr_t
 
size_type grainsize() const
The grain size for this range.
 
void check_range(raw_iterator first, raw_iterator last)
 
atomic< T > & as_atomic(T &t)
 
raw_const_iterator raw_begin() const
 
void throw_exception(exception_id eid)
Versionless convenience wrapper for throw_exception_v4()
 
void const char const char int ITT_FORMAT __itt_group_sync x void const char ITT_FORMAT __itt_group_sync s void ITT_FORMAT __itt_group_sync p void ITT_FORMAT p void ITT_FORMAT p no args __itt_suppress_mode_t unsigned int void size_t ITT_FORMAT d void ITT_FORMAT p void ITT_FORMAT p __itt_model_site __itt_model_site_instance ITT_FORMAT p __itt_model_task __itt_model_task_instance ITT_FORMAT p void ITT_FORMAT p void ITT_FORMAT p void size_t ITT_FORMAT d void ITT_FORMAT p const wchar_t ITT_FORMAT s const char ITT_FORMAT s const char ITT_FORMAT s const char ITT_FORMAT s no args void ITT_FORMAT p size_t ITT_FORMAT d no args const wchar_t const wchar_t ITT_FORMAT s __itt_heap_function void size_t int ITT_FORMAT d __itt_heap_function void ITT_FORMAT p __itt_heap_function void void size_t int ITT_FORMAT d no args no args unsigned int ITT_FORMAT u const __itt_domain __itt_id ITT_FORMAT lu const __itt_domain __itt_id __itt_id __itt_string_handle ITT_FORMAT p const __itt_domain __itt_id ITT_FORMAT p const __itt_domain __itt_id __itt_timestamp __itt_timestamp ITT_FORMAT lu const __itt_domain __itt_id __itt_id __itt_string_handle ITT_FORMAT p const __itt_domain ITT_FORMAT p const __itt_domain __itt_string_handle unsigned long long value
 
sokey_t split_order_key_regular(sokey_t order_key) const
 
std::pair< node_type, raw_iterator > internal_extract(const_iterator it)
 
Traits::hash_compare hash_compare
 
concurrent_unordered_base< Traits > self_type
 
raw_const_iterator my_begin_node
 
void insert(Iterator first, Iterator last)
 
node_type unsafe_extract(const key_type &key)
 
size_type my_element_count
 
std::pair< iterator, bool > internal_insert(__TBB_FORWARDING_REF(ValueType) value, nodeptr_t pnode=NULL)
 
static sokey_t get_order_key(const raw_const_iterator &it)
 
allocator_type::difference_type difference_type
 
tbb::internal::allocator_traits< allocator_type >::size_type size_type
 
std::pair< iterator, bool > emplace(Args &&... args)
 
flist_iterator< self_type, const value_type > raw_const_iterator
 
friend bool operator!=(const flist_iterator< M, T > &i, const flist_iterator< M, U > &j)
 
Traits::node_type node_type
 
iterator get_iterator(raw_iterator it)
 
split_ordered_list< value_type, typename Traits::allocator_type > solist_t
 
size_type count(const key_type &key) const
 
Traits::value_type value_type
 
atomic< size_type > my_number_of_buckets
 
void erase_node(raw_iterator previous, raw_const_iterator &where, tbb::internal::false_type)
 
void move(tbb_thread &t1, tbb_thread &t2)
 
Solist::difference_type difference_type