20 #ifndef __TBB__concurrent_unordered_impl_H 21 #define __TBB__concurrent_unordered_impl_H 22 #if !defined(__TBB_concurrent_unordered_map_H) && !defined(__TBB_concurrent_unordered_set_H) && !defined(__TBB_concurrent_hash_map_H) 23 #error Do not #include this internal file directly; use public TBB headers instead. 26 #include "../tbb_stddef.h" 33 #include __TBB_STD_SWAP_HEADER 35 #include "../atomic.h" 36 #include "../tbb_exception.h" 37 #include "../tbb_allocator.h" 39 #if __TBB_INITIALIZER_LISTS_PRESENT 40 #include <initializer_list> 43 #if __TBB_CPP11_RVALUE_REF_PRESENT && !__TBB_IMPLICIT_COPY_DELETION_BROKEN 44 #define __TBB_UNORDERED_NODE_HANDLE_PRESENT 1 51 #if __TBB_UNORDERED_NODE_HANDLE_PRESENT 53 #endif // __TBB_UNORDERED_NODE_HANDLE_PRESENT 56 namespace interface5 {
60 template <
typename T,
typename Allocator>
62 template <
typename Traits>
66 template<
class Solist,
typename Value>
67 class flist_iterator :
public std::iterator<std::forward_iterator_tag, Value>
69 template <
typename T,
typename Allocator>
71 template <
typename Traits>
73 template<
class M,
typename V>
107 template<
typename M,
typename T,
typename U>
109 template<
typename M,
typename T,
typename U>
113 template<
typename Solist,
typename T,
typename U>
117 template<
typename Solist,
typename T,
typename U>
123 template<
class Solist,
typename Value>
128 using base_type::get_node_ptr;
129 template <
typename T,
typename Allocator>
131 template<
class M,
typename V>
133 template <
typename Traits>
135 template<
typename M,
typename T,
typename U>
137 template<
typename M,
typename T,
typename U>
141 solist_iterator(nodeptr_t pnode,
const Solist *plist) : base_type(pnode), my_list_ptr(plist) {}
151 : base_type(other), my_list_ptr(other.my_list_ptr) {}
154 return this->base_type::operator*();
162 do ++(*(base_type *)
this);
177 template<
typename Solist,
typename T,
typename U>
181 template<
typename Solist,
typename T,
typename U>
191 template <
typename T,
typename Allocator>
225 my_order_key = order_key;
236 return reinterpret_cast<value_type*
>(&my_element);
249 if (exchange_node == current_node)
257 return exchange_node;
264 return (my_order_key & 0x1) == 0;
275 nodeptr_t pnode = my_node_allocator.allocate(1);
276 pnode->init(order_key);
281 template<
typename Arg>
284 nodeptr_t pnode = my_node_allocator.allocate(1);
288 new(
static_cast<void*
>(&pnode->my_element)) T(tbb::internal::forward<Arg>(t));
289 pnode->init(order_key);
291 my_node_allocator.deallocate(pnode, 1);
299 template<
typename Arg>
302 __TBB_ASSERT(
false,
"This compile-time helper should never get called");
307 template<
typename __TBB_PARAMETER_PACK Args>
309 nodeptr_t pnode = my_node_allocator.allocate(1);
313 new(
static_cast<void*
>(&pnode->my_element)) T(
__TBB_PACK_EXPANSION(tbb::internal::forward<Args>(args)));
315 my_node_allocator.deallocate(pnode, 1);
323 : my_node_allocator(a), my_element_count(0)
327 my_head = create_node(
sokey_t(0));
336 nodeptr_t pnode = my_head;
339 __TBB_ASSERT(pnode != NULL && pnode->my_next == NULL,
"Invalid head list node");
347 return (my_node_allocator);
352 nodeptr_t pnode = my_head;
354 __TBB_ASSERT(my_head != NULL,
"Invalid head list node");
355 pnext = pnode->my_next;
356 pnode->my_next = NULL;
359 while (pnode != NULL)
361 pnext = pnode->my_next;
366 my_element_count = 0;
371 return first_real_iterator(raw_begin());
376 return first_real_iterator(raw_begin());
380 return (iterator(0,
this));
383 const_iterator
end()
const {
384 return (const_iterator(0,
this));
388 return (((
const self_type *)
this)->
begin());
392 return (((
const self_type *)
this)->
end());
397 return (my_element_count == 0);
402 return my_element_count;
407 return my_node_allocator.max_size();
427 return raw_iterator(my_head);
432 return raw_const_iterator(my_head);
436 return raw_iterator(0);
440 return raw_const_iterator(0);
481 while (it != raw_end() && it.
get_node_ptr()->is_dummy())
492 while (it != raw_end() && it.
get_node_ptr()->is_dummy())
500 if (!pnode->is_dummy()) my_node_allocator.destroy(pnode);
501 my_node_allocator.deallocate(pnode, 1);
506 static nodeptr_t
try_insert_atomic(nodeptr_t previous, nodeptr_t new_node, nodeptr_t current_node) {
507 new_node->my_next = current_node;
508 return previous->atomic_set_next(new_node, current_node);
512 std::pair<iterator, bool>
try_insert(raw_iterator it, raw_iterator next, nodeptr_t pnode, size_type *new_count)
516 if (inserted_node == pnode)
519 check_range(it, next);
521 return std::pair<iterator, bool>(iterator(pnode,
this),
true);
525 return std::pair<iterator, bool>(
end(),
false);
532 raw_iterator
last = raw_end();
533 raw_iterator where = it;
540 nodeptr_t dummy_node = create_node(order_key);
548 if (where == last || get_order_key(where) > order_key)
550 __TBB_ASSERT(get_order_key(it) < order_key,
"Invalid node order in the list");
555 if (inserted_node == dummy_node)
558 check_range(it, where);
559 return raw_iterator(dummy_node);
573 else if (get_order_key(where) == order_key)
576 destroy_node(dummy_node);
590 __TBB_ASSERT(prevnode->my_next == pnode,
"Erase must take consecutive iterators");
591 prevnode->my_next = pnode->my_next;
596 void erase_node(raw_iterator previous, raw_const_iterator& where,
599 nodeptr_t pnode = erase_node_impl(previous, where);
603 void erase_node(raw_iterator previous, raw_const_iterator& where,
606 erase_node_impl(previous, where);
609 void erase_node(raw_iterator previous, raw_const_iterator& where) {
614 template<
typename AllowDestroy>
615 iterator
erase_node(raw_iterator previous, const_iterator where, AllowDestroy)
617 raw_const_iterator it = where;
618 erase_node(previous, it, AllowDestroy());
621 return get_iterator(first_real_iterator(it));
624 iterator
erase_node(raw_iterator previous, const_iterator& where) {
639 nodeptr_t previous_node = my_head;
640 raw_const_iterator begin_iterator = first++;
643 for (raw_const_iterator it = first; it !=
last;)
647 nodeptr_t dummy_node = pnode->is_dummy() ? create_node(pnode->get_order_key()) : create_node(pnode->get_order_key(), pnode->my_element);
648 previous_node = try_insert_atomic(previous_node, dummy_node, NULL);
649 __TBB_ASSERT(previous_node != NULL,
"Insertion must succeed");
650 raw_const_iterator where = it++;
651 source.
erase_node(get_iterator(begin_iterator), where);
659 template <
typename Traits>
666 for (raw_iterator it = first; it !=
last; ++it)
668 raw_iterator next = it;
671 __TBB_ASSERT(next == raw_end() || get_order_key(next) >= get_order_key(it),
"!!! List order inconsistency !!!");
680 check_range( raw_begin(), raw_end() );
689 #if defined(_MSC_VER) && !defined(__INTEL_COMPILER) 690 #pragma warning(push) 691 #pragma warning(disable: 4127) // warning C4127: conditional expression is constant 694 template <
typename Traits>
724 #if __TBB_UNORDERED_NODE_HANDLE_PRESENT 726 #endif // __TBB_UNORDERED_NODE_HANDLE_PRESENT 727 using Traits::my_hash_compare;
728 using Traits::get_key;
729 using Traits::allow_multimapping;
731 static const size_type initial_bucket_number = 8;
734 template<
typename OtherTraits>
738 typedef std::pair<const_iterator, const_iterator>
paircc_t;
740 static size_type
const pointers_per_table =
sizeof(size_type) * 8;
741 static const size_type initial_bucket_load = 4;
756 const hash_compare& hc = hash_compare(),
const allocator_type& a = allocator_type())
757 : Traits(hc), my_solist(a),
758 my_allocator(a), my_maximum_bucket_size((float) initial_bucket_load)
760 if( n_of_buckets == 0) ++n_of_buckets;
761 my_number_of_buckets = size_type(1)<<
__TBB_Log2((uintptr_t)n_of_buckets*2-1);
766 : Traits(right.my_hash_compare), my_solist(a), my_allocator(a)
769 internal_copy(right);
773 : Traits(right.my_hash_compare), my_solist(right.get_allocator()), my_allocator(right.get_allocator())
777 internal_copy(right);
780 #if __TBB_CPP11_RVALUE_REF_PRESENT 782 : Traits(right.my_hash_compare), my_solist(right.get_allocator()), my_allocator(right.get_allocator()),
783 my_maximum_bucket_size(float(initial_bucket_load))
785 my_number_of_buckets = initial_bucket_number;
791 : Traits(right.my_hash_compare), my_solist(a), my_allocator(a)
793 call_internal_clear_on_exit clear_buckets_on_exception(
this);
796 if (a == right.get_allocator()){
797 my_number_of_buckets = initial_bucket_number;
798 my_maximum_bucket_size = float(initial_bucket_load);
801 my_maximum_bucket_size = right.my_maximum_bucket_size;
802 my_number_of_buckets = right.my_number_of_buckets;
803 my_solist.my_element_count = right.my_solist.my_element_count;
805 if (! right.my_solist.empty()){
806 nodeptr_t previous_node = my_solist.my_head;
809 for (raw_const_iterator it = ++(right.my_solist.raw_begin()),
last = right.my_solist.raw_end(); it !=
last; ++it)
811 const nodeptr_t pnode = it.get_node_ptr();
813 if (pnode->is_dummy()) {
814 node = my_solist.create_node(pnode->get_order_key());
815 size_type bucket =
__TBB_ReverseBits(pnode->get_order_key()) % my_number_of_buckets;
816 set_bucket(bucket, node);
818 node = my_solist.create_node(pnode->get_order_key(),
std::move(pnode->my_element));
821 previous_node = my_solist.try_insert_atomic(previous_node, node, NULL);
822 __TBB_ASSERT(previous_node != NULL,
"Insertion of node failed. Concurrent inserts in constructor ?");
824 my_solist.check_range();
828 clear_buckets_on_exception.dismiss();
831 #endif // __TBB_CPP11_RVALUE_REF_PRESENT 835 internal_copy(right);
839 #if __TBB_CPP11_RVALUE_REF_PRESENT 850 swap(this->my_solist.my_node_allocator, other.my_solist.my_node_allocator);
851 swap(this->my_allocator, other.my_allocator);
855 this->
swap(moved_copy);
861 #endif // __TBB_CPP11_RVALUE_REF_PRESENT 863 #if __TBB_INITIALIZER_LISTS_PRESENT 868 this->insert(il.begin(),il.end());
871 #endif // __TBB_INITIALIZER_LISTS_PRESENT 879 #if __TBB_UNORDERED_NODE_HANDLE_PRESENT 880 template<
typename SourceType>
882 typedef typename SourceType::iterator source_iterator;
884 typename SourceType::node_type>::
value),
885 "Incompatible containers cannot be merged");
887 for(source_iterator it = source.begin(); it != source.end();) {
888 source_iterator where = it++;
889 if (allow_multimapping || find(get_key(*where)) ==
end()) {
890 std::pair<node_type, raw_iterator> extract_result = source.internal_extract(where);
894 if (!insert(
std::move(extract_result.first)).second) {
895 raw_iterator next = extract_result.second;
896 raw_iterator current = next++;
899 "Wrong nodes order in source container");
901 extract_result.first.my_node->get_order_key() <= next.
get_node_ptr()->get_order_key(),
902 "Wrong nodes order in source container");
904 size_t new_count = 0;
906 source.my_solist.try_insert(current, next, extract_result.first.my_node, &new_count).second;
908 "Changing source container while merging is unsafe.");
910 extract_result.first.deactivate();
914 #endif // __TBB_UNORDERED_NODE_HANDLE_PRESENT 918 return my_solist.get_allocator();
923 return my_solist.empty();
927 return my_solist.size();
931 return my_solist.max_size();
936 return my_solist.begin();
940 return my_solist.begin();
944 return my_solist.end();
947 const_iterator
end()
const {
948 return my_solist.end();
952 return my_solist.cbegin();
956 return my_solist.cend();
974 bool empty()
const {
return my_begin_node == my_end_node;}
978 return my_midpoint_node != my_end_node;
982 my_table(r.my_table), my_end_node(r.my_end_node)
985 __TBB_ASSERT( !empty(),
"Splitting despite the range is not divisible" );
992 my_table(a_table), my_begin_node(a_table.my_solist.
begin()),
993 my_end_node(a_table.my_solist.
end())
1004 if( my_begin_node == my_end_node )
1005 my_midpoint_node = my_end_node;
1007 sokey_t begin_key = solist_t::get_safe_order_key(my_begin_node);
1008 sokey_t end_key = solist_t::get_safe_order_key(my_end_node);
1017 my_midpoint_node = my_end_node;
1021 sokey_t mid_key = solist_t::get_safe_order_key(my_midpoint_node);
1022 __TBB_ASSERT( begin_key < mid_key,
"my_begin_node is after my_midpoint_node" );
1023 __TBB_ASSERT( mid_key <= end_key,
"my_midpoint_node is after my_end_node" );
1025 #endif // TBB_USE_ASSERT 1043 return range_type( *
this );
1047 return const_range_type( *
this );
1053 tbb::internal::true_type>(
value);
1058 return insert(value).first;
1061 #if __TBB_CPP11_RVALUE_REF_PRESENT 1073 #if __TBB_UNORDERED_NODE_HANDLE_PRESENT 1074 std::pair<iterator, bool>
insert(node_type&& nh) {
1076 nodeptr_t handled_node = nh.my_node;
1077 std::pair<iterator, bool> insert_result =
1079 tbb::internal::false_type>
1080 (handled_node->my_element, handled_node);
1081 if (insert_result.second)
1083 return insert_result;
1085 return std::pair<iterator, bool>(
end(),
false);
1088 iterator
insert(const_iterator, node_type&& nh) {
1091 #endif // __TBB_UNORDERED_NODE_HANDLE_PRESENT 1093 #if __TBB_CPP11_VARIADIC_TEMPLATES_PRESENT && __TBB_CPP11_RVALUE_REF_PRESENT 1094 template<
typename... Args>
1095 std::pair<iterator, bool>
emplace(Args&&... args) {
1096 nodeptr_t pnode = my_solist.create_node_v(tbb::internal::forward<Args>(args)...);
1102 template<
typename... Args>
1105 return emplace(tbb::internal::forward<Args>(args)...).first;
1107 #endif // __TBB_CPP11_VARIADIC_TEMPLATES_PRESENT && __TBB_CPP11_RVALUE_REF_PRESENT 1110 template<
class Iterator>
1112 for (Iterator it = first; it !=
last; ++it)
1116 #if __TBB_INITIALIZER_LISTS_PRESENT 1117 void insert(std::initializer_list<value_type> il) {
1119 insert(il.begin(), il.end());
1124 return internal_erase(where);
1128 while (first != last)
1129 unsafe_erase(first++);
1130 return my_solist.get_iterator(first);
1134 pairii_t where = equal_range(key);
1135 size_type item_count = internal_distance(where.first, where.second);
1136 unsafe_erase(where.first, where.second);
1140 #if __TBB_UNORDERED_NODE_HANDLE_PRESENT 1142 return internal_extract(where).first;
1146 pairii_t where = equal_range(key);
1147 if (where.first ==
end())
return node_type();
1148 return internal_extract(where.first).first;
1150 #endif // __TBB_UNORDERED_NODE_HANDLE_PRESENT 1153 if (
this != &right) {
1154 std::swap(my_hash_compare, right.my_hash_compare);
1156 internal_swap_buckets(right);
1164 return my_hash_compare.my_hash_object;
1168 return my_hash_compare.my_key_compare_object;
1180 raw_iterator dummy_node = my_solist.raw_begin();
1181 set_bucket(0, dummy_node);
1186 return internal_find(key);
1190 return const_cast<self_type*
>(
this)->internal_find(key);
1194 if(allow_multimapping) {
1195 paircc_t answer = equal_range(key);
1196 size_type item_count = internal_distance(answer.first, answer.second);
1199 return const_cast<self_type*
>(
this)->internal_find(key) ==
end()?0:1;
1204 return internal_equal_range(key);
1208 return const_cast<self_type*
>(
this)->internal_equal_range(key);
1213 return my_number_of_buckets;
1217 return segment_size(pointers_per_table-1);
1221 size_type item_count = 0;
1222 if (is_initialized(bucket)) {
1223 raw_iterator it = get_bucket(bucket);
1225 for (; it != my_solist.raw_end() && !it.
get_node_ptr()->is_dummy(); ++it)
1232 sokey_t order_key = (
sokey_t) my_hash_compare(key);
1233 size_type bucket = order_key % my_number_of_buckets;
1239 if (!is_initialized(bucket))
1242 raw_iterator it = get_bucket(bucket);
1243 return my_solist.first_real_iterator(it);
1249 if (!is_initialized(bucket))
1252 raw_const_iterator it = get_bucket(bucket);
1253 return my_solist.first_real_iterator(it);
1260 if (!is_initialized(bucket))
1263 raw_iterator it = get_bucket(bucket);
1267 while(it != my_solist.raw_end() && !it.
get_node_ptr()->is_dummy());
1270 return my_solist.first_real_iterator(it);
1277 if (!is_initialized(bucket))
1280 raw_const_iterator it = get_bucket(bucket);
1284 while(it != my_solist.raw_end() && !it.
get_node_ptr()->is_dummy());
1287 return my_solist.first_real_iterator(it);
1291 return ((
const self_type *)
this)->unsafe_begin(bucket);
1295 return ((
const self_type *)
this)->unsafe_end(bucket);
1300 return (
float)
size() / (float) unsafe_bucket_count();
1304 return my_maximum_bucket_size;
1308 if (newmax != newmax || newmax < 0)
1310 my_maximum_bucket_size = newmax;
1317 size_type current_buckets = my_number_of_buckets;
1318 if (current_buckets >= buckets)
1320 my_number_of_buckets = size_type(1)<<
__TBB_Log2((uintptr_t)buckets*2-1);
1328 memset(my_buckets, 0,
sizeof(my_buckets));
1331 raw_iterator dummy_node = my_solist.raw_begin();
1332 set_bucket(0, dummy_node);
1336 for (size_type index = 0; index < pointers_per_table; ++index) {
1337 if (my_buckets[index] != NULL) {
1338 size_type sz = segment_size(index);
1339 for (size_type index2 = 0; index2 < sz; ++index2)
1340 my_allocator.destroy(&my_buckets[index][index2]);
1341 my_allocator.deallocate(my_buckets[index], sz);
1342 my_buckets[index] = 0;
1354 insert(right.
begin(), right.
end());
1365 for (size_type index = 0; index < pointers_per_table; ++index)
1367 raw_iterator * iterator_pointer = my_buckets[index];
1379 for (const_iterator it = first; it !=
last; ++it)
1386 template<
typename AllowCreate,
typename AllowDestroy,
typename ValueType>
1389 const key_type *pkey = &get_key(value);
1390 sokey_t hash_key = (
sokey_t) my_hash_compare(*pkey);
1391 size_type new_count = 0;
1392 sokey_t order_key = split_order_key_regular(hash_key);
1393 raw_iterator previous = prepare_bucket(hash_key);
1394 raw_iterator
last = my_solist.raw_end();
1398 for (raw_iterator where = previous;;)
1401 if (where == last || solist_t::get_order_key(where) > order_key ||
1403 (allow_multimapping && solist_t::get_order_key(where) == order_key &&
1404 !my_hash_compare(get_key(*where), *pkey)))
1407 pnode = my_solist.create_node(order_key, tbb::internal::forward<ValueType>(value), AllowCreate());
1409 pkey = &get_key(pnode->my_element);
1414 pnode->init(order_key);
1418 std::pair<iterator, bool> result = my_solist.try_insert(previous, where, pnode, &new_count);
1423 adjust_table_size(new_count, my_number_of_buckets);
1437 else if (!allow_multimapping && solist_t::get_order_key(where) == order_key &&
1438 !my_hash_compare(get_key(*where), *pkey))
1441 my_solist.destroy_node(pnode);
1442 return std::pair<iterator, bool>(my_solist.get_iterator(where),
false);
1452 sokey_t hash_key = (
sokey_t) my_hash_compare(key);
1453 sokey_t order_key = split_order_key_regular(hash_key);
1454 raw_iterator
last = my_solist.raw_end();
1456 for (raw_iterator it = prepare_bucket(hash_key); it !=
last; ++it)
1458 if (solist_t::get_order_key(it) > order_key)
1464 else if (solist_t::get_order_key(it) == order_key)
1469 if (!my_hash_compare(get_key(*it), key))
1470 return my_solist.get_iterator(it);
1480 sokey_t hash_key = (
sokey_t) my_hash_compare(get_key(*it));
1481 raw_iterator previous = prepare_bucket(hash_key);
1482 raw_iterator
last = my_solist.raw_end();
1486 for (raw_iterator where = previous; where !=
last; previous = where) {
1488 if (my_solist.get_iterator(where) == it)
1489 return my_solist.erase_node(previous, it);
1494 #if __TBB_UNORDERED_NODE_HANDLE_PRESENT 1496 sokey_t hash_key =
sokey_t(my_hash_compare(get_key(*it)));
1497 raw_iterator previous = prepare_bucket(hash_key);
1498 raw_iterator
last = my_solist.raw_end();
1501 for(raw_iterator where = previous; where !=
last; previous = where) {
1503 if (my_solist.get_iterator(where) == it) {
1504 const_iterator result = it;
1506 return std::pair<node_type, raw_iterator>( node_type(result.
get_node_ptr()),
1510 return std::pair<node_type, iterator>(node_type(),
end());
1512 #endif // __TBB_UNORDERED_NODE_HANDLE_PRESENT 1518 sokey_t hash_key = (
sokey_t) my_hash_compare(key);
1519 sokey_t order_key = split_order_key_regular(hash_key);
1520 raw_iterator end_it = my_solist.raw_end();
1522 for (raw_iterator it = prepare_bucket(hash_key); it != end_it; ++it)
1524 if (solist_t::get_order_key(it) > order_key)
1527 return pairii_t(
end(),
end());
1529 else if (solist_t::get_order_key(it) == order_key &&
1530 !my_hash_compare(get_key(*it), key))
1532 iterator
first = my_solist.get_iterator(it);
1534 do ++
last;
while( allow_multimapping && last !=
end() && !my_hash_compare(get_key(*last), key) );
1535 return pairii_t(first, last);
1539 return pairii_t(
end(),
end());
1546 __TBB_ASSERT( bucket != 0,
"The first bucket must always be initialized");
1548 size_type parent_bucket = get_parent(bucket);
1551 if (!is_initialized(parent_bucket))
1552 init_bucket(parent_bucket);
1554 raw_iterator
parent = get_bucket(parent_bucket);
1557 raw_iterator dummy_node = my_solist.insert_dummy(parent, split_order_key_dummy(bucket));
1558 set_bucket(bucket, dummy_node);
1564 if ( ((
float) total_elements / (
float) current_size) > my_maximum_bucket_size )
1567 my_number_of_buckets.compare_and_swap(2u*current_size, current_size);
1576 size_type msb =
__TBB_Log2((uintptr_t)bucket);
1577 return bucket & ~(size_type(1) << msb);
1584 return size_type(
__TBB_Log2( uintptr_t(index|1) ) );
1589 return (size_type(1)<<k & ~size_type(1));
1594 return k? size_type(1)<<k : 2;
1598 size_type segment = segment_index_of(bucket);
1599 bucket -= segment_base(segment);
1600 __TBB_ASSERT( my_buckets[segment],
"bucket must be in an allocated segment" );
1601 return my_buckets[segment][bucket];
1605 size_type bucket = hash_key % my_number_of_buckets;
1606 size_type segment = segment_index_of(bucket);
1607 size_type index = bucket - segment_base(segment);
1608 if (my_buckets[segment] == NULL || my_buckets[segment][index].
get_node_ptr() == NULL)
1609 init_bucket(bucket);
1610 return my_buckets[segment][index];
1614 size_type segment = segment_index_of(bucket);
1615 bucket -= segment_base(segment);
1617 if (my_buckets[segment] == NULL) {
1618 size_type sz = segment_size(segment);
1619 raw_iterator * new_segment = my_allocator.allocate(sz);
1620 std::memset(static_cast<void*>(new_segment), 0, sz*
sizeof(raw_iterator));
1622 if (my_buckets[segment].compare_and_swap( new_segment, NULL) != NULL)
1623 my_allocator.deallocate(new_segment, sz);
1626 my_buckets[segment][bucket] = dummy_head;
1630 size_type segment = segment_index_of(bucket);
1631 bucket -= segment_base(segment);
1633 if (my_buckets[segment] == NULL)
1636 raw_iterator it = my_buckets[segment][bucket];
1659 #if defined(_MSC_VER) && !defined(__INTEL_COMPILER) 1660 #pragma warning(pop) // warning 4127 is back 1667 #endif // __TBB__concurrent_unordered_impl_H std::pair< const_iterator, const_iterator > equal_range(const key_type &key) const
static size_type segment_index_of(size_type index)
flist_iterator(nodeptr_t pnode)
iterator unsafe_erase(const_iterator first, const_iterator last)
Solist::nodeptr_t nodeptr_t
iterator insert(const_iterator, value_type &&value)
const_iterator const_local_iterator
concurrent_unordered_base(const concurrent_unordered_base &right)
raw_iterator get_bucket(size_type bucket) const
nodeptr_t create_node(sokey_t, __TBB_FORWARDING_REF(Arg), tbb::internal::false_type)
void erase_node(raw_iterator previous, raw_const_iterator &where, tbb::internal::true_type)
hasher hash_function() const
concurrent_unordered_base< Traits > self_type
node_type unsafe_extract(const_iterator where)
void insert(Iterator first, Iterator last)
nodeptr_t create_node(sokey_t order_key, __TBB_FORWARDING_REF(Arg) t, tbb::internal::true_type=tbb::internal::true_type())
tbb::internal::allocator_rebind< allocator_type, node >::type my_node_allocator
auto last(Container &c) -> decltype(begin(c))
bool_constant< false > false_type
raw_const_iterator raw_end() const
tbb::internal::allocator_rebind< allocator_type, raw_iterator >::type my_allocator
concurrent_unordered_base::value_type value_type
flist_iterator operator++(int)
size_type grainsize() const
The grain size for this range.
raw_iterator insert_dummy(raw_iterator it, sokey_t order_key)
allocator_type get_allocator() const
const_local_iterator unsafe_begin(size_type bucket) const
raw_const_iterator my_midpoint_node
void init(sokey_t order_key)
Traits::key_type key_type
static size_type segment_base(size_type k)
solist_t::nodeptr_t nodeptr_t
const_iterator end() const
concurrent_unordered_base(size_type n_of_buckets=initial_bucket_number, const hash_compare &hc=hash_compare(), const allocator_type &a=allocator_type())
tbb::internal::allocator_traits< allocator_type >::pointer pointer
sokey_t split_order_key_dummy(sokey_t order_key) const
nodeptr_t create_node_v(__TBB_FORWARDING_REF(Args) __TBB_PARAMETER_PACK args)
concurrent_unordered_base * my_instance
const_local_iterator unsafe_end(size_type bucket) const
allocator_type::value_type value_type
size_type unsafe_bucket_size(size_type bucket)
const_iterator begin() const
const_iterator cbegin() const
Traits::allocator_type allocator_type
Detects whether two given types are the same.
const_iterator cend() const
hash_compare my_hash_compare
void check_range(raw_iterator first, raw_iterator last)
allocator_traits< Alloc >::template rebind_alloc< T >::other type
Traits::hash_compare hash_compare
flist_iterator(const flist_iterator< Solist, typename Solist::value_type > &other)
hash_compare::key_equal key_equal
void set_bucket(size_type bucket, raw_iterator dummy_head)
static iterator get_iterator(const_iterator it)
const allocator_type::value_type & const_reference
concurrent_unordered_base::difference_type difference_type
float max_load_factor() const
size_type my_element_count
auto first(Container &c) -> decltype(begin(c))
const_range_type(const concurrent_unordered_base &a_table)
Init range with container and grainsize specified.
std::pair< iterator, bool > internal_insert(__TBB_FORWARDING_REF(ValueType) value, nodeptr_t pnode=NULL)
const_local_iterator unsafe_cbegin(size_type bucket) const
iterator insert(const_iterator, node_type &&nh)
range_type(const concurrent_unordered_base &a_table)
Init range with container and grainsize specified.
iterator unsafe_erase(const_iterator where)
friend bool operator!=(const flist_iterator< M, T > &i, const flist_iterator< M, U > &j)
pointer operator->() const
#define __TBB_STATIC_ASSERT(condition, msg)
raw_const_iterator my_end_node
concurrent_unordered_base(concurrent_unordered_base &&right)
solist_t::const_iterator const_iterator
static size_type internal_distance(const_iterator first, const_iterator last)
reference operator*() const
concurrent_unordered_base::iterator iterator
atomic< size_type > my_number_of_buckets
void erase_node(raw_iterator previous, raw_const_iterator &where, tbb::internal::false_type)
split_ordered_list< T, Allocator > self_type
iterator internal_find(const key_type &key)
const_range_type range() const
size_type get_parent(size_type bucket) const
atomic< raw_iterator * > my_buckets[pointers_per_table]
void move_all(self_type &source)
reference operator*() const
local_iterator unsafe_end(size_type bucket)
float load_factor() const
concurrent_unordered_base & operator=(concurrent_unordered_base &&other)
std::pair< iterator, iterator > pairii_t
size_type unsafe_erase(const key_type &key)
void suppress_unused_warning(const T1 &)
Utility template function to prevent "unused" warnings by various compilers.
void erase_node(raw_iterator previous, raw_const_iterator &where)
size_type unsafe_max_bucket_count() const
const_iterator end() const
void internal_copy(const self_type &right)
atomic< T > & as_atomic(T &t)
raw_iterator prepare_bucket(sokey_t hash_key)
pointer operator->() const
Solist::reference reference
solist_t::iterator iterator
void const char const char int ITT_FORMAT __itt_group_sync x void const char ITT_FORMAT __itt_group_sync s void ITT_FORMAT __itt_group_sync p void ITT_FORMAT p void ITT_FORMAT p no args __itt_suppress_mode_t unsigned int void size_t ITT_FORMAT d void ITT_FORMAT p void ITT_FORMAT p __itt_model_site __itt_model_site_instance ITT_FORMAT p __itt_model_task __itt_model_task_instance ITT_FORMAT p void ITT_FORMAT p void ITT_FORMAT p void size_t ITT_FORMAT d void ITT_FORMAT p const wchar_t ITT_FORMAT s const char ITT_FORMAT s const char ITT_FORMAT s const char ITT_FORMAT s no args void ITT_FORMAT p size_t ITT_FORMAT d no args const wchar_t const wchar_t ITT_FORMAT s __itt_heap_function void size_t int ITT_FORMAT d __itt_heap_function void ITT_FORMAT p __itt_heap_function void void size_t int ITT_FORMAT d no args no args unsigned int ITT_FORMAT u const __itt_domain __itt_id ITT_FORMAT lu const __itt_domain __itt_id __itt_id __itt_string_handle ITT_FORMAT p const __itt_domain __itt_id ITT_FORMAT p const __itt_domain __itt_id __itt_timestamp __itt_timestamp ITT_FORMAT lu const __itt_domain __itt_id __itt_id __itt_string_handle ITT_FORMAT p const __itt_domain ITT_FORMAT p const __itt_domain __itt_string_handle unsigned long long value
tbb::internal::allocator_traits< allocator_type >::const_pointer const_pointer
std::pair< iterator, bool > insert(node_type &&nh)
sokey_t split_order_key_regular(sokey_t order_key) const
iterator find(const key_type &key)
tbb::internal::allocator_traits< allocator_type >::difference_type difference_type
bool empty() const
True if range is empty.
void const char const char int ITT_FORMAT __itt_group_sync x void const char ITT_FORMAT __itt_group_sync s void ITT_FORMAT __itt_group_sync p void ITT_FORMAT p void ITT_FORMAT p no args __itt_suppress_mode_t unsigned int void size_t ITT_FORMAT d void ITT_FORMAT p void ITT_FORMAT p __itt_model_site __itt_model_site_instance ITT_FORMAT p __itt_model_task __itt_model_task_instance ITT_FORMAT p void ITT_FORMAT p void ITT_FORMAT p void size_t ITT_FORMAT d void ITT_FORMAT p const wchar_t ITT_FORMAT s const char ITT_FORMAT s const char ITT_FORMAT s const char ITT_FORMAT s no args void ITT_FORMAT p size_t ITT_FORMAT d no args const wchar_t const wchar_t ITT_FORMAT s __itt_heap_function void size_t int ITT_FORMAT d __itt_heap_function void ITT_FORMAT p __itt_heap_function void void size_t int ITT_FORMAT d no args no args unsigned int ITT_FORMAT u const __itt_domain __itt_id ITT_FORMAT lu const __itt_domain __itt_id __itt_id parent
static sokey_t get_safe_order_key(const raw_const_iterator &it)
allocator_type::size_type size_type
void rehash(size_type buckets)
Dummy type that distinguishes splitting constructor from copy constructor.
tbb::internal::allocator_traits< allocator_type >::size_type size_type
static sokey_t get_order_key(const raw_const_iterator &it)
void internal_merge(SourceType &source)
#define __TBB_ASSERT(predicate, comment)
No-op version of __TBB_ASSERT.
iterator first_real_iterator(raw_iterator it)
tbb::internal::allocator_traits< allocator_type >::size_type size_type
allocator_type::const_pointer const_pointer
flist_iterator< self_type, value_type > raw_iterator
void init_bucket(size_type bucket)
Traits::node_type node_type
iterator get_iterator(raw_iterator it)
local_iterator unsafe_begin(size_type bucket)
Solist::reference reference
split_ordered_list< value_type, typename Traits::allocator_type > solist_t
size_type count(const key_type &key) const
void swap(self_type &other)
void move(tbb_thread &t1, tbb_thread &t2)
tbb::internal::allocator_rebind< Allocator, T >::type allocator_type
Traits::value_type value_type
void set_midpoint() const
Set my_midpoint_node to point approximately half way between my_begin_node and my_end_node.
std::pair< iterator, iterator > equal_range(const key_type &key)
solist_iterator< self_type, const value_type > const_iterator
solist_iterator(nodeptr_t pnode, const Solist *plist)
range_type(range_type &r, split)
Split range.
void const char const char int ITT_FORMAT __itt_group_sync x void const char ITT_FORMAT __itt_group_sync s void ITT_FORMAT __itt_group_sync p void ITT_FORMAT p void ITT_FORMAT p no args __itt_suppress_mode_t unsigned int void size_t ITT_FORMAT d void ITT_FORMAT p void ITT_FORMAT p __itt_model_site __itt_model_site_instance ITT_FORMAT p __itt_model_task __itt_model_task_instance ITT_FORMAT p void ITT_FORMAT p void ITT_FORMAT p void size_t ITT_FORMAT d void ITT_FORMAT p const wchar_t ITT_FORMAT s const char ITT_FORMAT s const char ITT_FORMAT s const char ITT_FORMAT s no args void ITT_FORMAT p size_t ITT_FORMAT d no args const wchar_t const wchar_t ITT_FORMAT s __itt_heap_function void size_t int ITT_FORMAT d __itt_heap_function void ITT_FORMAT p __itt_heap_function void void size_t int ITT_FORMAT d no args no args unsigned int ITT_FORMAT u const __itt_domain __itt_id ITT_FORMAT lu const __itt_domain __itt_id __itt_id __itt_string_handle ITT_FORMAT p const __itt_domain __itt_id ITT_FORMAT p const __itt_domain __itt_id __itt_timestamp __itt_timestamp ITT_FORMAT lu const __itt_domain __itt_id __itt_id __itt_string_handle ITT_FORMAT p const __itt_domain ITT_FORMAT p const __itt_domain __itt_string_handle unsigned long long ITT_FORMAT lu const __itt_domain __itt_id __itt_string_handle * key
concurrent_unordered_base(concurrent_unordered_base &&right, const allocator_type &a)
tbb::internal::allocator_traits< allocator_type >::const_pointer const_pointer
const_local_iterator unsafe_cend(size_type bucket) const
Solist::nodeptr_t nodeptr_t
nodeptr_t create_node(sokey_t order_key)
#define __TBB_FORWARDING_REF(A)
sokey_t get_order_key() const
#define __TBB_ASSERT_EX(predicate, comment)
"Extended" version is useful to suppress warnings if a variable is only used with an assert ...
tbb::internal::allocator_traits< allocator_type >::value_type value_type
allocator_type::pointer pointer
void max_load_factor(float newmax)
const_iterator first_real_iterator(raw_const_iterator it) const
std::pair< iterator, bool > insert(const value_type &value)
static size_type segment_size(size_type k)
size_type unsafe_bucket_count() const
raw_const_iterator raw_begin() const
iterator emplace_hint(const_iterator, Args &&... args)
iterator erase_node(raw_iterator previous, const_iterator &where)
std::pair< node_type, raw_iterator > internal_extract(const_iterator it)
float my_maximum_bucket_size
concurrent_unordered_base(const concurrent_unordered_base &right, const allocator_type &a)
solist_iterator operator++(int)
T __TBB_ReverseBits(T src)
node_type unsafe_extract(const key_type &key)
void adjust_table_size(size_type total_elements, size_type current_size)
allocator_type get_allocator() const
flist_iterator< Solist, Value > base_type
intptr_t __TBB_Log2(uintptr_t x)
Solist::value_type value_type
~concurrent_unordered_base()
void destroy_node(nodeptr_t pnode)
concurrent_unordered_base::size_type size_type
Type for size of a range.
std::pair< iterator, bool > emplace(Args &&... args)
flist_iterator< self_type, const value_type > raw_const_iterator
const_iterator find(const key_type &key) const
solist_iterator & operator++()
const concurrent_unordered_base & my_table
Solist::difference_type difference_type
solist_iterator< self_type, value_type > iterator
raw_const_iterator my_begin_node
Solist::value_type value_type
const Solist * my_list_ptr
tbb::internal::allocator_traits< allocator_type >::pointer pointer
Solist::difference_type difference_type
size_type unsafe_bucket(const key_type &key) const
concurrent_unordered_base::reference reference
iterator erase_node(raw_iterator previous, const_iterator where, AllowDestroy)
static nodeptr_t try_insert_atomic(nodeptr_t previous, nodeptr_t new_node, nodeptr_t current_node)
allocator_type::difference_type difference_type
concurrent_unordered_base::const_iterator iterator
solist_t::raw_iterator raw_iterator
flist_iterator & operator++()
Base class for types that should not be assigned.
void const char const char int ITT_FORMAT __itt_group_sync x void const char ITT_FORMAT __itt_group_sync s void ITT_FORMAT __itt_group_sync p void ITT_FORMAT p void ITT_FORMAT p no args __itt_suppress_mode_t unsigned int void size_t ITT_FORMAT d void ITT_FORMAT p void ITT_FORMAT p __itt_model_site __itt_model_site_instance ITT_FORMAT p __itt_model_task __itt_model_task_instance ITT_FORMAT p void ITT_FORMAT p void ITT_FORMAT p void size_t ITT_FORMAT d void ITT_FORMAT p const wchar_t ITT_FORMAT s const char ITT_FORMAT s const char ITT_FORMAT s const char ITT_FORMAT s no args void ITT_FORMAT p size_t ITT_FORMAT d no args const wchar_t const wchar_t ITT_FORMAT s __itt_heap_function void size_t int ITT_FORMAT d __itt_heap_function void ITT_FORMAT p __itt_heap_function void void size_t int ITT_FORMAT d no args no args unsigned int ITT_FORMAT u const __itt_domain __itt_id ITT_FORMAT lu const __itt_domain __itt_id __itt_id __itt_string_handle ITT_FORMAT p const __itt_domain __itt_id ITT_FORMAT p const __itt_domain __itt_id __itt_timestamp begin
iterator insert(const_iterator, const value_type &value)
size_type max_size() const
const_range_type(const_range_type &r, split)
Split range.
tbb::internal::allocator_traits< allocator_type >::difference_type difference_type
concurrent_unordered_base & operator=(const concurrent_unordered_base &right)
std::pair< const_iterator, const_iterator > paircc_t
void const char const char int ITT_FORMAT __itt_group_sync x void const char ITT_FORMAT __itt_group_sync s void ITT_FORMAT __itt_group_sync p void ITT_FORMAT p void ITT_FORMAT p no args __itt_suppress_mode_t unsigned int void size_t ITT_FORMAT d void ITT_FORMAT p void ITT_FORMAT p __itt_model_site __itt_model_site_instance ITT_FORMAT p __itt_model_task __itt_model_task_instance ITT_FORMAT p void ITT_FORMAT p void ITT_FORMAT p void size_t ITT_FORMAT d void ITT_FORMAT p const wchar_t ITT_FORMAT s const char ITT_FORMAT s const char ITT_FORMAT s const char ITT_FORMAT s no args void ITT_FORMAT p size_t ITT_FORMAT d no args const wchar_t const wchar_t ITT_FORMAT s __itt_heap_function void size_t int ITT_FORMAT d __itt_heap_function void ITT_FORMAT p __itt_heap_function void void size_t int ITT_FORMAT d no args no args unsigned int ITT_FORMAT u const __itt_domain __itt_id ITT_FORMAT lu const __itt_domain __itt_id __itt_id __itt_string_handle ITT_FORMAT p const __itt_domain __itt_id ITT_FORMAT p const __itt_domain __itt_id __itt_timestamp __itt_timestamp end
~call_internal_clear_on_exit()
void swap(concurrent_hash_map< Key, T, HashCompare, A > &a, concurrent_hash_map< Key, T, HashCompare, A > &b)
void swap(concurrent_unordered_base &right)
const_iterator cbegin() const
#define __TBB_PARAMETER_PACK
void throw_exception(exception_id eid)
Versionless convenience wrapper for throw_exception_v4()
void const char const char int ITT_FORMAT __itt_group_sync x void const char ITT_FORMAT __itt_group_sync s void ITT_FORMAT __itt_group_sync p void ITT_FORMAT p void ITT_FORMAT p no args __itt_suppress_mode_t unsigned int void size_t ITT_FORMAT d void ITT_FORMAT p void ITT_FORMAT p __itt_model_site __itt_model_site_instance * instance
call_internal_clear_on_exit(concurrent_unordered_base *instance)
const_iterator cend() const
split_ordered_list(allocator_type a=allocator_type())
nodeptr_t atomic_set_next(nodeptr_t new_node, nodeptr_t current_node)
pairii_t internal_equal_range(const key_type &key)
hash_compare::hasher hasher
iterator internal_erase(const_iterator it)
void const char const char int ITT_FORMAT __itt_group_sync x void const char ITT_FORMAT __itt_group_sync s void ITT_FORMAT __itt_group_sync p void ITT_FORMAT p void ITT_FORMAT p no args __itt_suppress_mode_t unsigned int void size_t size
std::pair< iterator, bool > insert(value_type &&value)
allocator_type::value_type & reference
solist_iterator(const solist_iterator< Solist, typename Solist::value_type > &other)
const_iterator begin() const
nodeptr_t erase_node_impl(raw_iterator previous, raw_const_iterator &where)
raw_iterator get_iterator(raw_const_iterator it)
bool is_initialized(size_type bucket) const
const value_type & const_reference
friend bool operator==(const flist_iterator< M, T > &i, const flist_iterator< M, U > &j)
std::pair< iterator, bool > try_insert(raw_iterator it, raw_iterator next, nodeptr_t pnode, size_type *new_count)
nodeptr_t get_node_ptr() const
solist_t::raw_const_iterator raw_const_iterator
void swap(atomic< T > &lhs, atomic< T > &rhs)
#define __TBB_PACK_EXPANSION(A)
void internal_swap_buckets(concurrent_unordered_base &right)
bool_constant< true > true_type
size_type max_size() const
const_iterator get_iterator(raw_const_iterator it) const
bool is_divisible() const
True if range can be partitioned into two subranges.