1#ifndef NEFORCE_CORE_ASYNC_ATOMIC_WAIT_HPP__
2#define NEFORCE_CORE_ASYNC_ATOMIC_WAIT_HPP__
14NEFORCE_BEGIN_NAMESPACE__
35 bool operator()()
const noexcept {
return false; }
52template <
typename Pred,
typename Spin = default_spin_policy>
54 constexpr auto atomic_spin_count = 16;
55 constexpr auto atomic_spin_count_relax = 12;
56 for (
auto idx = 0; idx < atomic_spin_count; ++idx) {
60 if (idx < atomic_spin_count_relax) {
85struct waiter_pool_base {
86 static constexpr auto align_inner = 64;
91 waiter_pool_base() =
default;
98 void waiter_enter_wait() noexcept {
99#ifdef NEFORCE_PLATFORM_WINDOWS
100 ::_InterlockedIncrement(&
wait);
102 __atomic_fetch_add(&
wait, 1, __ATOMIC_SEQ_CST);
111 void waiter_leave_wait() noexcept {
112#ifdef NEFORCE_PLATFORM_WINDOWS
113 ::_InterlockedDecrement(&
wait);
115 __atomic_fetch_sub(&
wait, 1, __ATOMIC_RELEASE);
123 bool waiter_waiting() const noexcept {
124#ifdef NEFORCE_PLATFORM_WINDOWS
128 __atomic_load(&
wait, &res, __ATOMIC_SEQ_CST);
139 void waiter_notify(
platform_wait_t* addr,
bool all,
const bool bare)
const noexcept {
140 if (addr == &value) {
141#ifdef NEFORCE_PLATFORM_WINDOWS
142 ::_InterlockedIncrement(addr);
144 __atomic_fetch_add(addr, 1, __ATOMIC_SEQ_CST);
148 if (bare || waiter_waiting()) {
160 static waiter_pool_base& waiter_for(
const void* addr)
noexcept {
162 static waiter_pool_base waiter[pool_size];
163 const auto key = (
reinterpret_cast<uintptr_t>(addr) >> 2) % pool_size;
174struct waiter_pool : waiter_pool_base {
205 template <
typename U>
206 static constexpr bool platform_wait_valid_v =
209 template <
typename U, enable_if_t<platform_wait_val
id_v<U>,
int> = 0>
213 template <
typename U, enable_if_t<!platform_wait_val
id_v<U>,
int> = 0>
215#ifdef NEFORCE_PLATFORM_WINDOWS
216 value = ::_InterlockedExchangeAdd(addr, 0);
218 __atomic_load(addr, &value, __ATOMIC_ACQUIRE);
223 using waiter_type = T;
225 waiter_type& waiter_;
231 template <
typename U>
239 template <
typename U>
247 static waiter_type& waiter_for(
const void* addr)
noexcept {
248 static_assert(
sizeof(waiter_type) ==
sizeof(waiter_pool_base),
249 "waiter_for should be same size with waiter_pool_base");
250 auto& res = waiter_pool_base::waiter_for(addr);
251 return reinterpret_cast<waiter_type&
>(res);
259 template <
typename U>
260 explicit waiter_base(
const U* addr) noexcept :
261 waiter_(waiter_base::waiter_for(addr)),
262 addr_(waiter_base::waiter_wait_addr(addr, &waiter_.value)) {}
269 void waiter_notify(
bool all,
bool bare =
false) noexcept { waiter_.waiter_notify(addr_, all, bare); }
274 template <
typename U,
typename Func,
typename Spin = default_spin_policy>
276 Spin spin = Spin{}) {
278 waiter_base::waiter_do_spin_v_impl(addr, old, value);
285 template <
typename U,
typename Func,
typename Spin = default_spin_policy>
286 bool waiter_do_spin_v(
const U& old, Func f,
platform_wait_t& value, Spin spin = Spin{}) {
287 return waiter_base::waiter_do_spin_v(addr_, old, f, value, spin);
293 template <
typename Pred,
typename Spin = default_spin_policy>
295#ifdef NEFORCE_PLATFORM_WINDOWS
296 value = ::_InterlockedExchangeAdd(
const_cast<volatile LONG*
>(addr), 0);
298 __atomic_load(addr, &value, __ATOMIC_ACQUIRE);
306 template <
typename Pred,
typename Spin = default_spin_policy>
307 bool waiter_do_spin(Pred pred,
platform_wait_t& value, Spin spin = Spin{}) {
308 return waiter_base::waiter_do_spin(addr_, pred, value, spin);
320template <
typename EntersWait>
321struct waiter : waiter_base<waiter_pool> {
323 using base_type = waiter_base<waiter_pool>;
326 template <
bool Wait = EntersWait::value, enable_if_t<Wait,
int> = 0>
327 NEFORCE_ALWAYS_INLINE
void enter() const noexcept {
328 waiter_.waiter_enter_wait();
330 template <
bool Wait = EntersWait::value, enable_if_t<!Wait,
int> = 0>
331 NEFORCE_ALWAYS_INLINE
void enter() const noexcept {}
333 template <
bool Wait = EntersWait::value, enable_if_t<Wait,
int> = 0>
334 NEFORCE_ALWAYS_INLINE
void leave() const noexcept {
335 waiter_.waiter_leave_wait();
337 template <
bool Wait = EntersWait::value, enable_if_t<!Wait,
int> = 0>
338 NEFORCE_ALWAYS_INLINE
void leave() const noexcept {}
346 template <
typename T>
347 explicit waiter(
const T* addr) noexcept :
355 ~waiter() { leave(); }
366 template <
typename T,
typename Func>
367 void waiter_do_wait_v(T old, Func f) {
370 if (base_type::waiter_do_spin_v(old, f, value)) {
373 waiter_.waiter_do_wait(base_type::addr_, value);
382 template <
typename Pred>
383 void waiter_do_wait(Pred pred)
noexcept {
386 if (base_type::waiter_do_spin(pred, value)) {
389 waiter_.waiter_do_wait(base_type::addr_, value);
395using enters_wait = waiter<true_type>;
398using bare_wait = waiter<false_type>;
413template <
typename T,
typename Func>
415 inner::enters_wait waiter(addr);
416 waiter.waiter_do_wait_v(old, f);
428template <
typename T,
typename Pred>
430 inner::enters_wait waiter(addr);
431 waiter.waiter_do_wait(pred);
445 inner::bare_wait waiter(addr);
446 waiter.waiter_notify(all);
453NEFORCE_END_NAMESPACE__
void atomic_wait_address(const T *addr, Pred pred) noexcept
基于谓词的原子等待
bool atomic_spin(Pred &pred, Spin spin=Spin{}) noexcept
原子自旋等待
void atomic_wait_address_v(const T *addr, T old, Func f) noexcept
基于值的原子等待
void atomic_notify_address(const T *addr, const bool all) noexcept
原子通知
NEFORCE_INLINE17 constexpr bool is_scalar_v
is_scalar的便捷变量模板
void NEFORCE_API futex_notify(void *addr, bool all) noexcept
通知等待的线程
void NEFORCE_API futex_wait(void *addr, platform_wait_t value) noexcept
无限期等待FUTEX
int platform_wait_t
平台等待类型别名
NEFORCE_CONSTEXPR14 void * memory_copy(void *NEFORCE_RESTRICT dest, const void *NEFORCE_RESTRICT src, size_t count) noexcept
从源内存复制到目标内存
NEFORCE_PURE_FUNCTION NEFORCE_CONSTEXPR14 int memory_compare(const void *lhs, const void *rhs, size_t count) noexcept
比较两个内存区域的内容
typename enable_if< Test, T >::type enable_if_t
enable_if的便捷别名