33
44#include " concurrencpp/platform_defs.h"
55
6- #include < thread>
76#include < atomic>
87#include < chrono>
8+ #include < type_traits>
99
1010#include < cassert>
1111
1212namespace concurrencpp ::details {
13- void CRCPP_API atomic_wait_native (void * atom, int32_t old) noexcept ;
14- void CRCPP_API atomic_wait_for_native (void * atom, int32_t old, std::chrono::milliseconds ms) noexcept ;
15- void CRCPP_API atomic_notify_all_native (void * atom) noexcept ;
1613
1714 enum class atomic_wait_status { ok, timeout };
1815
16+ template <class type >
17+ void atomic_wait (std::atomic<type>& atom, type old, std::memory_order order) noexcept ;
18+
19+ template <class type >
20+ atomic_wait_status atomic_wait_for (std::atomic<type>& atom,
21+ type old,
22+ std::chrono::milliseconds ms,
23+ std::memory_order order) noexcept ;
24+
25+ template <class type >
26+ void atomic_notify_one (std::atomic<type>& atom) noexcept ;
27+
28+ template <class type >
29+ void atomic_notify_all (std::atomic<type>& atom) noexcept ;
30+
31+ template <class type >
32+ void assert_atomic_type_waitable () noexcept {
33+ static_assert (std::is_integral_v<type> || std::is_enum_v<type>,
34+ " atomic_wait/atomic_notify - <<type>> must be integeral or enumeration type" );
35+ static_assert (sizeof (type) == sizeof (uint32_t ), " atomic_wait/atomic_notify - <<type>> must be 4 bytes." );
36+ static_assert (std::is_standard_layout_v<std::atomic<type>>,
37+ " atomic_wait/atomic_notify - std::atom<type> is not standard-layout" );
38+ static_assert (std::atomic<type>::is_always_lock_free, " atomic_wait/atomic_notify - std::atom<type> is not lock free" );
39+ }
40+ } // namespace concurrencpp::details
41+
42+ #if !defined(CRCPP_MAC_OS)
43+
44+ namespace concurrencpp ::details {
45+ void CRCPP_API atomic_wait_native (void * atom, uint32_t old) noexcept ;
46+ void CRCPP_API atomic_wait_for_native (void * atom, uint32_t old, std::chrono::milliseconds ms) noexcept ;
47+ void CRCPP_API atomic_notify_one_native (void * atom) noexcept ;
48+ void CRCPP_API atomic_notify_all_native (void * atom) noexcept ;
49+
1950 template <class type >
2051 void atomic_wait (std::atomic<type>& atom, type old, std::memory_order order) noexcept {
21- static_assert (std::is_standard_layout_v<std::atomic<type>>, " atomic_wait - std::atom<type> is not standard-layout" );
22- static_assert (sizeof (type) == sizeof (int32_t ), " atomic_wait - <<type>> must be 4 bytes." );
52+ assert_atomic_type_waitable<type>();
2353
2454 while (true ) {
2555 const auto val = atom.load (order);
2656 if (val != old) {
2757 return ;
2858 }
2959
30- #if defined(CRCPP_MAC_OS)
31- atom.wait (old, order);
32- #else
33- atomic_wait_native (&atom, static_cast <int32_t >(old));
34- #endif
60+ atomic_wait_native (&atom, static_cast <uint32_t >(old));
3561 }
3662 }
3763
@@ -40,14 +66,10 @@ namespace concurrencpp::details {
4066 type old,
4167 std::chrono::milliseconds ms,
4268 std::memory_order order) noexcept {
43- static_assert (std::is_standard_layout_v<std::atomic<type>>, " atomic_wait_for - std::atom<type> is not standard-layout" );
44- static_assert (sizeof (type) == sizeof (int32_t ), " atomic_wait_for - <<type>> must be 4 bytes." );
69+ assert_atomic_type_waitable<type>();
4570
4671 const auto deadline = std::chrono::system_clock::now () + ms;
4772
48- #if defined(CRCPP_MAC_OS)
49- size_t polling_cycle = 0 ;
50- #endif
5173 while (true ) {
5274 if (atom.load (order) != old) {
5375 return atomic_wait_status::ok;
@@ -62,45 +84,100 @@ namespace concurrencpp::details {
6284 return atomic_wait_status::timeout;
6385 }
6486
65- #if defined(CRCPP_MAC_OS)
66- if (polling_cycle < 64 ) {
67- std::this_thread::yield ();
68- ++polling_cycle;
69- continue ;
70- }
71-
72- if (polling_cycle < 5'000 ) {
73- std::this_thread::sleep_for (std::chrono::milliseconds (1 ));
74- ++polling_cycle;
75- continue ;
76- }
77-
78- if (polling_cycle < 10'000 ) {
79- std::this_thread::sleep_for (std::chrono::milliseconds (2 ));
80- ++polling_cycle;
81- continue ;
82- }
83-
84- std::this_thread::sleep_for (std::chrono::milliseconds (5 ));
85- ++polling_cycle;
86-
87- #else
8887 const auto time_diff = std::chrono::duration_cast<std::chrono::milliseconds>(deadline - now);
8988 assert (time_diff.count () >= 0 );
90- atomic_wait_for_native (&atom, static_cast <int32_t >(old), time_diff);
91- #endif
89+ atomic_wait_for_native (&atom, static_cast <uint32_t >(old), time_diff);
9290 }
9391 }
9492
93+ template <class type >
94+ void atomic_notify_one (std::atomic<type>& atom) noexcept {
95+ atomic_notify_one_native (&atom);
96+ }
97+
9598 template <class type >
9699 void atomic_notify_all (std::atomic<type>& atom) noexcept {
97- #if defined(CRCPP_MAC_OS)
100+ atomic_notify_all_native (&atom);
101+ }
102+ } // namespace concurrencpp::details
98103
99- atom.notify_all ();
100104#else
101- atomic_notify_all_native (&atom);
102- #endif
105+
106+ #include < memory>
107+
108+ namespace concurrencpp ::details {
109+ class atomic_wait_bucket ;
110+
111+ using atomic_comp_fn = bool (*)(void *, const uint32_t , std::memory_order) noexcept ;
112+
113+ class CRCPP_API atomic_wait_table {
114+
115+ private:
116+ std::unique_ptr<atomic_wait_bucket[]> m_buckets;
117+ const size_t m_size;
118+
119+ static size_t calc_table_size () noexcept ;
120+ size_t index_for (const void * atom) const noexcept ;
121+
122+ public:
123+ atomic_wait_table ();
124+
125+ void wait (void * atom, const uint32_t old, std::memory_order order, atomic_comp_fn comp);
126+ atomic_wait_status wait_for (void * atom,
127+ const uint32_t old,
128+ std::chrono::milliseconds ms,
129+ std::memory_order order,
130+ atomic_comp_fn comp);
131+
132+ void notify_one (const void * atom) noexcept ;
133+ void notify_all (const void * atom) noexcept ;
134+
135+ static atomic_wait_table& instance ();
136+ };
137+
138+ template <class type >
139+ void atomic_wait (std::atomic<type>& atom, type old, std::memory_order order) noexcept {
140+ assert_atomic_type_waitable<type>();
141+
142+ auto comp = [](void * atom_, const uint32_t old_, std::memory_order order_) noexcept -> bool {
143+ auto & original_atom = *static_cast <std::atomic<type>*>(atom_);
144+ const auto original_old = static_cast <type>(old_);
145+
146+ return original_atom.load (order_) == original_old;
147+ };
148+
149+ atomic_wait_table::instance ().wait (&atom, static_cast <uint32_t >(old), order, comp);
150+ }
151+
152+ template <class type >
153+ atomic_wait_status atomic_wait_for (std::atomic<type>& atom,
154+ type old,
155+ std::chrono::milliseconds ms,
156+ std::memory_order order) noexcept {
157+ assert_atomic_type_waitable<type>();
158+
159+ auto comp = [](void * atom_, const uint32_t old_, std::memory_order order_) noexcept -> bool {
160+ auto & original_atom = *static_cast <std::atomic<type>*>(atom_);
161+ const auto original_old = static_cast <type>(old_);
162+
163+ return original_atom.load (order_) == original_old;
164+ };
165+
166+ return atomic_wait_table::instance ().wait_for (&atom, static_cast <uint32_t >(old), ms, order, comp);
103167 }
168+
169+ template <class type >
170+ void atomic_notify_one (std::atomic<type>& atom) noexcept {
171+ atomic_wait_table::instance ().notify_one (&atom);
172+ }
173+
174+ template <class type >
175+ void atomic_notify_all (std::atomic<type>& atom) noexcept {
176+ atomic_wait_table::instance ().notify_all (&atom);
177+ }
178+
104179} // namespace concurrencpp::details
105180
106181#endif
182+
183+ #endif
0 commit comments