Intel(R) Threading Building Blocks Doxygen Documentation  version 4.2.3
tbb::interface5::internal::split_ordered_list< T, Allocator > Class Template Reference

#include <_concurrent_unordered_impl.h>

Inheritance diagram for tbb::interface5::internal::split_ordered_list< T, Allocator >:
Collaboration diagram for tbb::interface5::internal::split_ordered_list< T, Allocator >:

Classes

struct  node
 

Public Types

typedef split_ordered_list< T, Allocator > self_type
 
typedef tbb::internal::allocator_rebind< Allocator, T >::type allocator_type
 
typedef nodenodeptr_t
 
typedef tbb::internal::allocator_traits< allocator_type >::value_type value_type
 
typedef tbb::internal::allocator_traits< allocator_type >::size_type size_type
 
typedef tbb::internal::allocator_traits< allocator_type >::difference_type difference_type
 
typedef tbb::internal::allocator_traits< allocator_type >::pointer pointer
 
typedef tbb::internal::allocator_traits< allocator_type >::const_pointer const_pointer
 
typedef value_typereference
 
typedef const value_typeconst_reference
 
typedef solist_iterator< self_type, const value_typeconst_iterator
 
typedef solist_iterator< self_type, value_typeiterator
 
typedef flist_iterator< self_type, const value_typeraw_const_iterator
 
typedef flist_iterator< self_type, value_typeraw_iterator
 

Public Member Functions

nodeptr_t create_node (sokey_t order_key)
 
template<typename Arg >
nodeptr_t create_node (sokey_t order_key, __TBB_FORWARDING_REF(Arg) t, tbb::internal::true_type=tbb::internal::true_type())
 
template<typename Arg >
nodeptr_t create_node (sokey_t, __TBB_FORWARDING_REF(Arg), tbb::internal::false_type)
 
template<typename __TBB_PARAMETER_PACK Args>
nodeptr_t create_node_v (__TBB_FORWARDING_REF(Args) __TBB_PARAMETER_PACK args)
 
 split_ordered_list (allocator_type a=allocator_type())
 
 ~split_ordered_list ()
 
allocator_type get_allocator () const
 
void clear ()
 
iterator begin ()
 
const_iterator begin () const
 
iterator end ()
 
const_iterator end () const
 
const_iterator cbegin () const
 
const_iterator cend () const
 
bool empty () const
 
size_type size () const
 
size_type max_size () const
 
void swap (self_type &other)
 
raw_iterator raw_begin ()
 
raw_const_iterator raw_begin () const
 
raw_iterator raw_end ()
 
raw_const_iterator raw_end () const
 
iterator get_iterator (raw_iterator it)
 
const_iterator get_iterator (raw_const_iterator it) const
 
raw_iterator get_iterator (raw_const_iterator it)
 
iterator first_real_iterator (raw_iterator it)
 
const_iterator first_real_iterator (raw_const_iterator it) const
 
void destroy_node (nodeptr_t pnode)
 
std::pair< iterator, bool > try_insert (raw_iterator it, raw_iterator next, nodeptr_t pnode, size_type *new_count)
 
raw_iterator insert_dummy (raw_iterator it, sokey_t order_key)
 
nodeptr_t erase_node_impl (raw_iterator previous, raw_const_iterator &where)
 
void erase_node (raw_iterator previous, raw_const_iterator &where, tbb::internal::true_type)
 
void erase_node (raw_iterator previous, raw_const_iterator &where, tbb::internal::false_type)
 
void erase_node (raw_iterator previous, raw_const_iterator &where)
 
template<typename AllowDestroy >
iterator erase_node (raw_iterator previous, const_iterator where, AllowDestroy)
 
iterator erase_node (raw_iterator previous, const_iterator &where)
 
void move_all (self_type &source)
 

Static Public Member Functions

static sokey_t get_order_key (const raw_const_iterator &it)
 
static sokey_t get_safe_order_key (const raw_const_iterator &it)
 
static iterator get_iterator (const_iterator it)
 
static nodeptr_t try_insert_atomic (nodeptr_t previous, nodeptr_t new_node, nodeptr_t current_node)
 

Private Member Functions

void check_range (raw_iterator first, raw_iterator last)
 
void check_range ()
 

Private Attributes

tbb::internal::allocator_rebind< allocator_type, node >::type my_node_allocator
 
size_type my_element_count
 
nodeptr_t my_head
 

Friends

template<typename Traits >
class concurrent_unordered_base
 

Detailed Description

template<typename T, typename Allocator>
class tbb::interface5::internal::split_ordered_list< T, Allocator >

Definition at line 61 of file _concurrent_unordered_impl.h.

Member Typedef Documentation

◆ allocator_type

template<typename T, typename Allocator>
typedef tbb::internal::allocator_rebind<Allocator, T>::type tbb::interface5::internal::split_ordered_list< T, Allocator >::allocator_type

Definition at line 197 of file _concurrent_unordered_impl.h.

◆ const_iterator

template<typename T, typename Allocator>
typedef solist_iterator<self_type, const value_type> tbb::interface5::internal::split_ordered_list< T, Allocator >::const_iterator

Definition at line 211 of file _concurrent_unordered_impl.h.

◆ const_pointer

Definition at line 206 of file _concurrent_unordered_impl.h.

◆ const_reference

template<typename T, typename Allocator>
typedef const value_type& tbb::interface5::internal::split_ordered_list< T, Allocator >::const_reference

Definition at line 209 of file _concurrent_unordered_impl.h.

◆ difference_type

Definition at line 204 of file _concurrent_unordered_impl.h.

◆ iterator

template<typename T, typename Allocator>
typedef solist_iterator<self_type, value_type> tbb::interface5::internal::split_ordered_list< T, Allocator >::iterator

Definition at line 212 of file _concurrent_unordered_impl.h.

◆ nodeptr_t

template<typename T, typename Allocator>
typedef node* tbb::interface5::internal::split_ordered_list< T, Allocator >::nodeptr_t

Definition at line 199 of file _concurrent_unordered_impl.h.

◆ pointer

template<typename T, typename Allocator>
typedef tbb::internal::allocator_traits<allocator_type>::pointer tbb::interface5::internal::split_ordered_list< T, Allocator >::pointer

Definition at line 205 of file _concurrent_unordered_impl.h.

◆ raw_const_iterator

template<typename T, typename Allocator>
typedef flist_iterator<self_type, const value_type> tbb::interface5::internal::split_ordered_list< T, Allocator >::raw_const_iterator

Definition at line 213 of file _concurrent_unordered_impl.h.

◆ raw_iterator

template<typename T, typename Allocator>
typedef flist_iterator<self_type, value_type> tbb::interface5::internal::split_ordered_list< T, Allocator >::raw_iterator

Definition at line 214 of file _concurrent_unordered_impl.h.

◆ reference

template<typename T, typename Allocator>
typedef value_type& tbb::interface5::internal::split_ordered_list< T, Allocator >::reference

Definition at line 208 of file _concurrent_unordered_impl.h.

◆ self_type

template<typename T, typename Allocator>
typedef split_ordered_list<T, Allocator> tbb::interface5::internal::split_ordered_list< T, Allocator >::self_type

Definition at line 195 of file _concurrent_unordered_impl.h.

◆ size_type

template<typename T, typename Allocator>
typedef tbb::internal::allocator_traits<allocator_type>::size_type tbb::interface5::internal::split_ordered_list< T, Allocator >::size_type

Definition at line 203 of file _concurrent_unordered_impl.h.

◆ value_type

template<typename T, typename Allocator>
typedef tbb::internal::allocator_traits<allocator_type>::value_type tbb::interface5::internal::split_ordered_list< T, Allocator >::value_type

Definition at line 202 of file _concurrent_unordered_impl.h.

Constructor & Destructor Documentation

◆ split_ordered_list()

template<typename T, typename Allocator>
tbb::interface5::internal::split_ordered_list< T, Allocator >::split_ordered_list ( allocator_type  a = allocator_type())
inline

Definition at line 313 of file _concurrent_unordered_impl.h.

315  {
316  // Immediately allocate a dummy node with order key of 0. This node
317  // will always be the head of the list.
319  }
tbb::internal::allocator_rebind< allocator_type, node >::type my_node_allocator

◆ ~split_ordered_list()

template<typename T, typename Allocator>
tbb::interface5::internal::split_ordered_list< T, Allocator >::~split_ordered_list ( )
inline

Definition at line 321 of file _concurrent_unordered_impl.h.

322  {
323  // Clear the list
324  clear();
325 
326  // Remove the head element which is not cleared by clear()
327  nodeptr_t pnode = my_head;
328  my_head = NULL;
329 
330  __TBB_ASSERT(pnode != NULL && pnode->my_next == NULL, "Invalid head list node");
331 
332  destroy_node(pnode);
333  }
#define __TBB_ASSERT(predicate, comment)
No-op version of __TBB_ASSERT.
Definition: tbb_stddef.h:169

Member Function Documentation

◆ begin() [1/2]

◆ begin() [2/2]

template<typename T, typename Allocator>
const_iterator tbb::interface5::internal::split_ordered_list< T, Allocator >::begin ( ) const
inline

◆ cbegin()

template<typename T, typename Allocator>
const_iterator tbb::interface5::internal::split_ordered_list< T, Allocator >::cbegin ( ) const
inline

◆ cend()

template<typename T, typename Allocator>
const_iterator tbb::interface5::internal::split_ordered_list< T, Allocator >::cend ( ) const
inline

◆ check_range() [1/2]

template<typename T, typename Allocator>
void tbb::interface5::internal::split_ordered_list< T, Allocator >::check_range ( raw_iterator  first,
raw_iterator  last 
)
inlineprivate

Definition at line 654 of file _concurrent_unordered_impl.h.

655  {
656 #if TBB_USE_ASSERT
657  for (raw_iterator it = first; it != last; ++it)
658  {
659  raw_iterator next = it;
660  ++next;
661 
662  __TBB_ASSERT(next == raw_end() || get_order_key(next) >= get_order_key(it), "!!! List order inconsistency !!!");
663  }
664 #else
666 #endif
667  }
#define __TBB_ASSERT(predicate, comment)
No-op version of __TBB_ASSERT.
Definition: tbb_stddef.h:169
auto first(Container &c) -> decltype(begin(c))
void suppress_unused_warning(const T1 &)
Utility template function to prevent "unused" warnings by various compilers.
Definition: tbb_stddef.h:381
flist_iterator< self_type, value_type > raw_iterator
static sokey_t get_order_key(const raw_const_iterator &it)
auto last(Container &c) -> decltype(begin(c))

Referenced by tbb::interface5::internal::concurrent_unordered_base< concurrent_unordered_map_traits< Key, T, internal::hash_compare< Key, Hasher, Key_equality >, Allocator, false > >::concurrent_unordered_base().

Here is the caller graph for this function:

◆ check_range() [2/2]

◆ clear()

template<typename T, typename Allocator>
void tbb::interface5::internal::split_ordered_list< T, Allocator >::clear ( )
inline

Definition at line 341 of file _concurrent_unordered_impl.h.

341  {
342  nodeptr_t pnext;
343  nodeptr_t pnode = my_head;
344 
345  __TBB_ASSERT(my_head != NULL, "Invalid head list node");
346  pnext = pnode->my_next;
347  pnode->my_next = NULL;
348  pnode = pnext;
349 
350  while (pnode != NULL)
351  {
352  pnext = pnode->my_next;
353  destroy_node(pnode);
354  pnode = pnext;
355  }
356 
357  my_element_count = 0;
358  }
#define __TBB_ASSERT(predicate, comment)
No-op version of __TBB_ASSERT.
Definition: tbb_stddef.h:169

Referenced by tbb::interface5::internal::concurrent_unordered_base< concurrent_unordered_map_traits< Key, T, internal::hash_compare< Key, Hasher, Key_equality >, Allocator, false > >::clear(), tbb::interface5::internal::concurrent_unordered_base< concurrent_unordered_map_traits< Key, T, internal::hash_compare< Key, Hasher, Key_equality >, Allocator, false > >::internal_copy(), and tbb::interface5::internal::split_ordered_list< value_type, typename Traits::allocator_type >::~split_ordered_list().

Here is the caller graph for this function:

◆ create_node() [1/3]

◆ create_node() [2/3]

template<typename T, typename Allocator>
template<typename Arg >
nodeptr_t tbb::interface5::internal::split_ordered_list< T, Allocator >::create_node ( sokey_t  order_key,
__TBB_FORWARDING_REF(Arg)  t,
tbb::internal::true_type  = tbb::internal::true_type() 
)
inline

Definition at line 273 of file _concurrent_unordered_impl.h.

274  {
275  nodeptr_t pnode = my_node_allocator.allocate(1);
276 
277  //TODO: use RAII scoped guard instead of explicit catch
278  __TBB_TRY {
279  new(static_cast<void*>(&pnode->my_element)) T(tbb::internal::forward<Arg>(t));
280  pnode->init(order_key);
281  } __TBB_CATCH(...) {
282  my_node_allocator.deallocate(pnode, 1);
283  __TBB_RETHROW();
284  }
285 
286  return (pnode);
287  }
#define __TBB_TRY
Definition: tbb_stddef.h:287
#define __TBB_CATCH(e)
Definition: tbb_stddef.h:288
tbb::internal::allocator_rebind< allocator_type, node >::type my_node_allocator
#define __TBB_RETHROW()
Definition: tbb_stddef.h:290

◆ create_node() [3/3]

template<typename T, typename Allocator>
template<typename Arg >
nodeptr_t tbb::interface5::internal::split_ordered_list< T, Allocator >::create_node ( sokey_t  ,
__TBB_FORWARDING_REF(Arg)  ,
tbb::internal::false_type   
)
inline

Definition at line 291 of file _concurrent_unordered_impl.h.

292  {
293  __TBB_ASSERT(false, "This compile-time helper should never get called");
294  return nodeptr_t();
295  }
#define __TBB_ASSERT(predicate, comment)
No-op version of __TBB_ASSERT.
Definition: tbb_stddef.h:169

◆ create_node_v()

template<typename T, typename Allocator>
template<typename __TBB_PARAMETER_PACK Args>
nodeptr_t tbb::interface5::internal::split_ordered_list< T, Allocator >::create_node_v ( __TBB_FORWARDING_REF(Args) __TBB_PARAMETER_PACK  args)
inline

Definition at line 299 of file _concurrent_unordered_impl.h.

299  {
300  nodeptr_t pnode = my_node_allocator.allocate(1);
301 
302  //TODO: use RAII scoped guard instead of explicit catch
303  __TBB_TRY {
304  new(static_cast<void*>(&pnode->my_element)) T(__TBB_PACK_EXPANSION(tbb::internal::forward<Args>(args)));
305  } __TBB_CATCH(...) {
306  my_node_allocator.deallocate(pnode, 1);
307  __TBB_RETHROW();
308  }
309 
310  return (pnode);
311  }
#define __TBB_TRY
Definition: tbb_stddef.h:287
#define __TBB_CATCH(e)
Definition: tbb_stddef.h:288
#define __TBB_PACK_EXPANSION(A)
Definition: tbb_stddef.h:508
tbb::internal::allocator_rebind< allocator_type, node >::type my_node_allocator
#define __TBB_RETHROW()
Definition: tbb_stddef.h:290

Referenced by tbb::interface5::internal::concurrent_unordered_base< concurrent_unordered_map_traits< Key, T, internal::hash_compare< Key, Hasher, Key_equality >, Allocator, false > >::emplace().

Here is the caller graph for this function:

◆ destroy_node()

◆ empty()

template<typename T, typename Allocator>
bool tbb::interface5::internal::split_ordered_list< T, Allocator >::empty ( ) const
inline

◆ end() [1/2]

◆ end() [2/2]

template<typename T, typename Allocator>
const_iterator tbb::interface5::internal::split_ordered_list< T, Allocator >::end ( ) const
inline

Definition at line 374 of file _concurrent_unordered_impl.h.

374  {
375  return (const_iterator(0, this));
376  }
solist_iterator< self_type, const value_type > const_iterator

◆ erase_node() [1/5]

◆ erase_node() [2/5]

template<typename T, typename Allocator>
void tbb::interface5::internal::split_ordered_list< T, Allocator >::erase_node ( raw_iterator  previous,
raw_const_iterator where,
tbb::internal::false_type   
)
inline

Definition at line 594 of file _concurrent_unordered_impl.h.

596  {
597  erase_node_impl(previous, where);
598  }
nodeptr_t erase_node_impl(raw_iterator previous, raw_const_iterator &where)

◆ erase_node() [3/5]

template<typename T, typename Allocator>
void tbb::interface5::internal::split_ordered_list< T, Allocator >::erase_node ( raw_iterator  previous,
raw_const_iterator where 
)
inline

Definition at line 600 of file _concurrent_unordered_impl.h.

600  {
601  erase_node(previous, where, /*allow_destroy*/tbb::internal::true_type());
602  }
void erase_node(raw_iterator previous, raw_const_iterator &where, tbb::internal::true_type)

◆ erase_node() [4/5]

template<typename T, typename Allocator>
template<typename AllowDestroy >
iterator tbb::interface5::internal::split_ordered_list< T, Allocator >::erase_node ( raw_iterator  previous,
const_iterator  where,
AllowDestroy   
)
inline

Definition at line 606 of file _concurrent_unordered_impl.h.

607  {
608  raw_const_iterator it = where;
609  erase_node(previous, it, AllowDestroy());
611 
612  return get_iterator(first_real_iterator(it));
613  }
void erase_node(raw_iterator previous, raw_const_iterator &where, tbb::internal::true_type)
flist_iterator< self_type, const value_type > raw_const_iterator

◆ erase_node() [5/5]

template<typename T, typename Allocator>
iterator tbb::interface5::internal::split_ordered_list< T, Allocator >::erase_node ( raw_iterator  previous,
const_iterator where 
)
inline

Definition at line 615 of file _concurrent_unordered_impl.h.

615  {
616  return erase_node(previous, where, /*allow_destroy*/tbb::internal::true_type());
617  }
void erase_node(raw_iterator previous, raw_const_iterator &where, tbb::internal::true_type)

◆ erase_node_impl()

template<typename T, typename Allocator>
nodeptr_t tbb::interface5::internal::split_ordered_list< T, Allocator >::erase_node_impl ( raw_iterator  previous,
raw_const_iterator where 
)
inline

Definition at line 578 of file _concurrent_unordered_impl.h.

578  {
579  nodeptr_t pnode = (where++).get_node_ptr();
580  nodeptr_t prevnode = previous.get_node_ptr();
581  __TBB_ASSERT(prevnode->my_next == pnode, "Erase must take consecutive iterators");
582  prevnode->my_next = pnode->my_next;
583  return pnode;
584  }
#define __TBB_ASSERT(predicate, comment)
No-op version of __TBB_ASSERT.
Definition: tbb_stddef.h:169

Referenced by tbb::interface5::internal::split_ordered_list< value_type, typename Traits::allocator_type >::erase_node().

Here is the caller graph for this function:

◆ first_real_iterator() [1/2]

template<typename T, typename Allocator>
iterator tbb::interface5::internal::split_ordered_list< T, Allocator >::first_real_iterator ( raw_iterator  it)
inline

Definition at line 469 of file _concurrent_unordered_impl.h.

470  {
471  // Skip all dummy, internal only iterators
472  while (it != raw_end() && it.get_node_ptr()->is_dummy())
473  ++it;
474 
475  return iterator(it.get_node_ptr(), this);
476  }
solist_iterator< self_type, value_type > iterator

Referenced by tbb::interface5::internal::split_ordered_list< value_type, typename Traits::allocator_type >::begin(), tbb::interface5::internal::split_ordered_list< value_type, typename Traits::allocator_type >::erase_node(), tbb::interface5::internal::concurrent_unordered_base< Traits >::const_range_type::set_midpoint(), tbb::interface5::internal::concurrent_unordered_base< concurrent_unordered_map_traits< Key, T, internal::hash_compare< Key, Hasher, Key_equality >, Allocator, false > >::unsafe_begin(), and tbb::interface5::internal::concurrent_unordered_base< concurrent_unordered_map_traits< Key, T, internal::hash_compare< Key, Hasher, Key_equality >, Allocator, false > >::unsafe_end().

Here is the caller graph for this function:

◆ first_real_iterator() [2/2]

template<typename T, typename Allocator>
const_iterator tbb::interface5::internal::split_ordered_list< T, Allocator >::first_real_iterator ( raw_const_iterator  it) const
inline

Definition at line 480 of file _concurrent_unordered_impl.h.

481  {
482  // Skip all dummy, internal only iterators
483  while (it != raw_end() && it.get_node_ptr()->is_dummy())
484  ++it;
485 
486  return const_iterator(it.get_node_ptr(), this);
487  }
solist_iterator< self_type, const value_type > const_iterator

◆ get_allocator()

template<typename T, typename Allocator>
allocator_type tbb::interface5::internal::split_ordered_list< T, Allocator >::get_allocator ( ) const
inline

◆ get_iterator() [1/4]

template<typename T, typename Allocator>
iterator tbb::interface5::internal::split_ordered_list< T, Allocator >::get_iterator ( raw_iterator  it)
inline

Definition at line 445 of file _concurrent_unordered_impl.h.

445  {
446  __TBB_ASSERT(it.get_node_ptr() == NULL || !it.get_node_ptr()->is_dummy(), "Invalid user node (dummy)");
447  return iterator(it.get_node_ptr(), this);
448  }
solist_iterator< self_type, value_type > iterator
#define __TBB_ASSERT(predicate, comment)
No-op version of __TBB_ASSERT.
Definition: tbb_stddef.h:169

Referenced by tbb::interface5::internal::concurrent_unordered_base< Traits >::const_range_type::begin(), tbb::interface5::internal::concurrent_unordered_base< Traits >::const_range_type::end(), tbb::interface5::internal::split_ordered_list< value_type, typename Traits::allocator_type >::erase_node(), tbb::interface5::internal::concurrent_unordered_base< concurrent_unordered_map_traits< Key, T, internal::hash_compare< Key, Hasher, Key_equality >, Allocator, false > >::internal_equal_range(), tbb::interface5::internal::concurrent_unordered_base< concurrent_unordered_map_traits< Key, T, internal::hash_compare< Key, Hasher, Key_equality >, Allocator, false > >::internal_erase(), tbb::interface5::internal::concurrent_unordered_base< concurrent_unordered_map_traits< Key, T, internal::hash_compare< Key, Hasher, Key_equality >, Allocator, false > >::internal_extract(), tbb::interface5::internal::concurrent_unordered_base< concurrent_unordered_map_traits< Key, T, internal::hash_compare< Key, Hasher, Key_equality >, Allocator, false > >::internal_find(), tbb::interface5::internal::concurrent_unordered_base< concurrent_unordered_map_traits< Key, T, internal::hash_compare< Key, Hasher, Key_equality >, Allocator, false > >::internal_insert(), tbb::interface5::internal::split_ordered_list< value_type, typename Traits::allocator_type >::move_all(), and tbb::interface5::internal::concurrent_unordered_base< concurrent_unordered_map_traits< Key, T, internal::hash_compare< Key, Hasher, Key_equality >, Allocator, false > >::unsafe_erase().

Here is the caller graph for this function:

◆ get_iterator() [2/4]

template<typename T, typename Allocator>
const_iterator tbb::interface5::internal::split_ordered_list< T, Allocator >::get_iterator ( raw_const_iterator  it) const
inline

Definition at line 452 of file _concurrent_unordered_impl.h.

452  {
453  __TBB_ASSERT(it.get_node_ptr() == NULL || !it.get_node_ptr()->is_dummy(), "Invalid user node (dummy)");
454  return const_iterator(it.get_node_ptr(), this);
455  }
#define __TBB_ASSERT(predicate, comment)
No-op version of __TBB_ASSERT.
Definition: tbb_stddef.h:169
solist_iterator< self_type, const value_type > const_iterator

◆ get_iterator() [3/4]

template<typename T, typename Allocator>
raw_iterator tbb::interface5::internal::split_ordered_list< T, Allocator >::get_iterator ( raw_const_iterator  it)
inline

Definition at line 458 of file _concurrent_unordered_impl.h.

458  {
459  return raw_iterator(it.get_node_ptr());
460  }
flist_iterator< self_type, value_type > raw_iterator

◆ get_iterator() [4/4]

template<typename T, typename Allocator>
static iterator tbb::interface5::internal::split_ordered_list< T, Allocator >::get_iterator ( const_iterator  it)
inlinestatic

Definition at line 463 of file _concurrent_unordered_impl.h.

463  {
464  return iterator(it.my_node_ptr, it.my_list_ptr);
465  }
solist_iterator< self_type, value_type > iterator

◆ get_order_key()

template<typename T, typename Allocator>
static sokey_t tbb::interface5::internal::split_ordered_list< T, Allocator >::get_order_key ( const raw_const_iterator it)
inlinestatic

◆ get_safe_order_key()

template<typename T, typename Allocator>
static sokey_t tbb::interface5::internal::split_ordered_list< T, Allocator >::get_safe_order_key ( const raw_const_iterator it)
inlinestatic

Definition at line 438 of file _concurrent_unordered_impl.h.

438  {
439  if( !it.get_node_ptr() ) return ~sokey_t(0);
440  return it.get_node_ptr()->get_order_key();
441  }

◆ insert_dummy()

template<typename T, typename Allocator>
raw_iterator tbb::interface5::internal::split_ordered_list< T, Allocator >::insert_dummy ( raw_iterator  it,
sokey_t  order_key 
)
inline

Definition at line 521 of file _concurrent_unordered_impl.h.

522  {
524  raw_iterator where = it;
525 
526  __TBB_ASSERT(where != last, "Invalid head node");
527 
528  ++where;
529 
530  // Create a dummy element up front, even though it may be discarded (due to concurrent insertion)
531  nodeptr_t dummy_node = create_node(order_key);
532 
533  for (;;)
534  {
535  __TBB_ASSERT(it != last, "Invalid head list node");
536 
537  // If the head iterator is at the end of the list, or past the point where this dummy
538  // node needs to be inserted, then try to insert it.
539  if (where == last || get_order_key(where) > order_key)
540  {
541  __TBB_ASSERT(get_order_key(it) < order_key, "Invalid node order in the list");
542 
543  // Try to insert it in the right place
544  nodeptr_t inserted_node = try_insert_atomic(it.get_node_ptr(), dummy_node, where.get_node_ptr());
545 
546  if (inserted_node == dummy_node)
547  {
548  // Insertion succeeded, check the list for order violations
549  check_range(it, where);
550  return raw_iterator(dummy_node);
551  }
552  else
553  {
554  // Insertion failed: either dummy node was inserted by another thread, or
555  // a real element was inserted at exactly the same place as dummy node.
556  // Proceed with the search from the previous location where order key was
557  // known to be larger (note: this is legal only because there is no safe
558  // concurrent erase operation supported).
559  where = it;
560  ++where;
561  continue;
562  }
563  }
564  else if (get_order_key(where) == order_key)
565  {
566  // Another dummy node with the same value found, discard the new one.
567  destroy_node(dummy_node);
568  return where;
569  }
570 
571  // Move the iterator forward
572  it = where;
573  ++where;
574  }
575 
576  }
#define __TBB_ASSERT(predicate, comment)
No-op version of __TBB_ASSERT.
Definition: tbb_stddef.h:169
flist_iterator< self_type, value_type > raw_iterator
static sokey_t get_order_key(const raw_const_iterator &it)
auto last(Container &c) -> decltype(begin(c))
static nodeptr_t try_insert_atomic(nodeptr_t previous, nodeptr_t new_node, nodeptr_t current_node)

Referenced by tbb::interface5::internal::concurrent_unordered_base< concurrent_unordered_map_traits< Key, T, internal::hash_compare< Key, Hasher, Key_equality >, Allocator, false > >::init_bucket().

Here is the caller graph for this function:

◆ max_size()

template<typename T, typename Allocator>
size_type tbb::interface5::internal::split_ordered_list< T, Allocator >::max_size ( ) const
inline

Definition at line 397 of file _concurrent_unordered_impl.h.

397  {
398  return my_node_allocator.max_size();
399  }
tbb::internal::allocator_rebind< allocator_type, node >::type my_node_allocator

Referenced by tbb::interface5::internal::concurrent_unordered_base< concurrent_unordered_map_traits< Key, T, internal::hash_compare< Key, Hasher, Key_equality >, Allocator, false > >::max_size().

Here is the caller graph for this function:

◆ move_all()

template<typename T, typename Allocator>
void tbb::interface5::internal::split_ordered_list< T, Allocator >::move_all ( self_type source)
inline

Definition at line 622 of file _concurrent_unordered_impl.h.

623  {
624  raw_const_iterator first = source.raw_begin();
625  raw_const_iterator last = source.raw_end();
626 
627  if (first == last)
628  return;
629 
630  nodeptr_t previous_node = my_head;
631  raw_const_iterator begin_iterator = first++;
632 
633  // Move all elements one by one, including dummy ones
634  for (raw_const_iterator it = first; it != last;)
635  {
636  nodeptr_t pnode = it.get_node_ptr();
637 
638  nodeptr_t dummy_node = pnode->is_dummy() ? create_node(pnode->get_order_key()) : create_node(pnode->get_order_key(), pnode->my_element);
639  previous_node = try_insert_atomic(previous_node, dummy_node, NULL);
640  __TBB_ASSERT(previous_node != NULL, "Insertion must succeed");
641  raw_const_iterator where = it++;
642  source.erase_node(get_iterator(begin_iterator), where);
643  }
644  check_range();
645  }
#define __TBB_ASSERT(predicate, comment)
No-op version of __TBB_ASSERT.
Definition: tbb_stddef.h:169
auto first(Container &c) -> decltype(begin(c))
flist_iterator< self_type, const value_type > raw_const_iterator
auto last(Container &c) -> decltype(begin(c))
static nodeptr_t try_insert_atomic(nodeptr_t previous, nodeptr_t new_node, nodeptr_t current_node)

◆ raw_begin() [1/2]

◆ raw_begin() [2/2]

template<typename T, typename Allocator>
raw_const_iterator tbb::interface5::internal::split_ordered_list< T, Allocator >::raw_begin ( ) const
inline

Definition at line 422 of file _concurrent_unordered_impl.h.

422  {
423  return raw_const_iterator(my_head);
424  }
flist_iterator< self_type, const value_type > raw_const_iterator

◆ raw_end() [1/2]

template<typename T, typename Allocator>
raw_iterator tbb::interface5::internal::split_ordered_list< T, Allocator >::raw_end ( )
inline

Definition at line 426 of file _concurrent_unordered_impl.h.

426  {
427  return raw_iterator(0);
428  }
flist_iterator< self_type, value_type > raw_iterator

Referenced by tbb::interface5::internal::split_ordered_list< value_type, typename Traits::allocator_type >::check_range(), tbb::interface5::internal::split_ordered_list< value_type, typename Traits::allocator_type >::first_real_iterator(), tbb::interface5::internal::split_ordered_list< value_type, typename Traits::allocator_type >::insert_dummy(), tbb::interface5::internal::concurrent_unordered_base< concurrent_unordered_map_traits< Key, T, internal::hash_compare< Key, Hasher, Key_equality >, Allocator, false > >::internal_equal_range(), tbb::interface5::internal::concurrent_unordered_base< concurrent_unordered_map_traits< Key, T, internal::hash_compare< Key, Hasher, Key_equality >, Allocator, false > >::internal_erase(), tbb::interface5::internal::concurrent_unordered_base< concurrent_unordered_map_traits< Key, T, internal::hash_compare< Key, Hasher, Key_equality >, Allocator, false > >::internal_extract(), tbb::interface5::internal::concurrent_unordered_base< concurrent_unordered_map_traits< Key, T, internal::hash_compare< Key, Hasher, Key_equality >, Allocator, false > >::internal_find(), tbb::interface5::internal::concurrent_unordered_base< concurrent_unordered_map_traits< Key, T, internal::hash_compare< Key, Hasher, Key_equality >, Allocator, false > >::internal_insert(), tbb::interface5::internal::split_ordered_list< value_type, typename Traits::allocator_type >::move_all(), tbb::interface5::internal::concurrent_unordered_base< concurrent_unordered_map_traits< Key, T, internal::hash_compare< Key, Hasher, Key_equality >, Allocator, false > >::unsafe_bucket_size(), and tbb::interface5::internal::concurrent_unordered_base< concurrent_unordered_map_traits< Key, T, internal::hash_compare< Key, Hasher, Key_equality >, Allocator, false > >::unsafe_end().

Here is the caller graph for this function:

◆ raw_end() [2/2]

template<typename T, typename Allocator>
raw_const_iterator tbb::interface5::internal::split_ordered_list< T, Allocator >::raw_end ( ) const
inline

Definition at line 430 of file _concurrent_unordered_impl.h.

430  {
431  return raw_const_iterator(0);
432  }
flist_iterator< self_type, const value_type > raw_const_iterator

◆ size()

◆ swap()

template<typename T, typename Allocator>
void tbb::interface5::internal::split_ordered_list< T, Allocator >::swap ( self_type other)
inline

Definition at line 402 of file _concurrent_unordered_impl.h.

403  {
404  if (this == &other)
405  {
406  // Nothing to do
407  return;
408  }
409 
410  std::swap(my_element_count, other.my_element_count);
411  std::swap(my_head, other.my_head);
412  }
void swap(atomic< T > &lhs, atomic< T > &rhs)
Definition: atomic.h:539

Referenced by tbb::interface5::internal::concurrent_unordered_base< concurrent_unordered_map_traits< Key, T, internal::hash_compare< Key, Hasher, Key_equality >, Allocator, false > >::swap().

Here is the caller graph for this function:

◆ try_insert()

template<typename T, typename Allocator>
std::pair<iterator, bool> tbb::interface5::internal::split_ordered_list< T, Allocator >::try_insert ( raw_iterator  it,
raw_iterator  next,
nodeptr_t  pnode,
size_type new_count 
)
inline

Definition at line 503 of file _concurrent_unordered_impl.h.

504  {
505  nodeptr_t inserted_node = try_insert_atomic(it.get_node_ptr(), pnode, next.get_node_ptr());
506 
507  if (inserted_node == pnode)
508  {
509  // If the insert succeeded, check that the order is correct and increment the element count
510  check_range(it, next);
511  *new_count = tbb::internal::as_atomic(my_element_count).fetch_and_increment();
512  return std::pair<iterator, bool>(iterator(pnode, this), true);
513  }
514  else
515  {
516  return std::pair<iterator, bool>(end(), false);
517  }
518  }
solist_iterator< self_type, value_type > iterator
static nodeptr_t try_insert_atomic(nodeptr_t previous, nodeptr_t new_node, nodeptr_t current_node)
atomic< T > & as_atomic(T &t)
Definition: atomic.h:547

Referenced by tbb::interface5::internal::concurrent_unordered_base< concurrent_unordered_map_traits< Key, T, internal::hash_compare< Key, Hasher, Key_equality >, Allocator, false > >::internal_insert().

Here is the caller graph for this function:

◆ try_insert_atomic()

Friends And Related Function Documentation

◆ concurrent_unordered_base

template<typename T, typename Allocator>
template<typename Traits >
friend class concurrent_unordered_base
friend

Definition at line 651 of file _concurrent_unordered_impl.h.

Member Data Documentation

◆ my_element_count

◆ my_head

◆ my_node_allocator


The documentation for this class was generated from the following file:

Copyright © 2005-2019 Intel Corporation. All Rights Reserved.

Intel, Pentium, Intel Xeon, Itanium, Intel XScale and VTune are registered trademarks or trademarks of Intel Corporation or its subsidiaries in the United States and other countries.

* Other names and brands may be claimed as the property of others.