Lockless Task Scheduler  v1.0a
A lockless task scheduler
lockfreenode.h
1 // ***********************************************************************
2 // <copyright file="containers.h" >
3 // Copyright (c) viknash. All rights reserved.
4 // </copyright>
5 // <summary></summary>
6 // ***********************************************************************
7 #pragma once
8 
9 #include <atomic>
10 
11 #include "atomics.h"
12 
13 namespace task_scheduler {
14  union address {
15  volatile void *ptr;
16  atomics::type as_atomic;
17  };
18 
19  class dummy
20  {
21 
22  };
23 
24  template < typename T, class TMemInterface > struct lock_free_node;
25 
26  template < typename T, class TMemInterface > struct atomic_lock_free_node : public TMemInterface
27  {
29  union {
30  node_type *volatile node;
31  atomics::type as_atomic;
32  };
33  };
34  //static_assert(sizeof(atomic_lock_free_node< void *, dummy >) == sizeof(address),
35  // "size of atomic_lock_free_node is incorrect.");
36 
37  template < typename T, class TMemInterface > struct class_alignment lock_free_node : public TMemInterface
38  {
40 
41  explicit operator void *() const { return reinterpret_cast<void *>(this); }
42 
43  inline lock_free_node() { next.node = nullptr; }
44  inline const T &load() const { return value; }
45  inline void store(const T &_value) { value = _value; }
46 
47  atomic_node next;
48  T value;
49 
50  static_assert(sizeof(T) <= sizeof(address), "T must be a POD");
51  };
52  //static_assert(sizeof(lock_free_node< void *, dummy >) == sizeof(address) * 2,
53  // "size of atomic_lock_free_node is incorrect.");
54 
55  template < typename T, class TMemInterface > struct class_alignment atomic_lock_free_node_ptr : public TMemInterface
56  {
58 #pragma warning(push)
59 #pragma warning(disable : 4201)
60  union Data {
61  struct
62  {
63  atomic_node points_to;
64  address access;
65  };
66  atomics::type as_atomic[2];
67 
68  Data() {}
69  ~Data() {}
70  };
71 #pragma warning(pop)
72  inline void clear()
73  {
74  data.access.ptr = nullptr;
75  data.points_to.node = nullptr;
76  }
77 
78  inline bool compare_exchange_weak(atomic_lock_free_node_ptr &comperand,
79  atomic_lock_free_node_ptr &value) volatile
80  {
81  return atomics::compare_exchange_weak_128(data.as_atomic, comperand.data.as_atomic,
82  value.data.access.as_atomic, value.data.points_to.as_atomic);
83  }
84 
85  void operator=(const volatile atomic_lock_free_node_ptr &other)
86  {
87  data.access.as_atomic = other.data.access.as_atomic;
88  data.points_to.node = other.data.points_to.node;
89  }
90 
93 
94  Data data;
95  };
96  static_assert(sizeof(atomic_lock_free_node_ptr< bool, dummy >) == sizeof(address) * 2,
97  "size of atomic_lock_free_node_ptr is incorrect.");
98 }
Class stl_allocator.
Definition: allocator.h:16
Definition: lockfreenode.h:26
bool compare_exchange_weak_128(volatile int64_t _data[], int64_t _comperand[], int64_t _value_hi, int64_t _value_lo)
Compares the exchange weak 128.
Definition: atomics.h:108
Definition: lockfreenode.h:14
Definition: lockfreenode.h:24
Definition: lockfreenode.h:19
Definition: lockfreenode.h:55