30#ifndef _GLIBCXX_ATOMIC_WAIT_H
31#define _GLIBCXX_ATOMIC_WAIT_H 1
33#pragma GCC system_header
36#if defined _GLIBCXX_HAS_GTHREADS || defined _GLIBCXX_HAVE_LINUX_FUTEX
41#ifdef _GLIBCXX_HAVE_LINUX_FUTEX
51#define __cpp_lib_atomic_wait 201907L
53namespace std _GLIBCXX_VISIBILITY(default)
55_GLIBCXX_BEGIN_NAMESPACE_VERSION
58#ifdef _GLIBCXX_HAVE_LINUX_FUTEX
59#define _GLIBCXX_HAVE_PLATFORM_WAIT 1
60 using __platform_wait_t = int;
61 inline constexpr size_t __platform_wait_alignment = 4;
67# if ATOMIC_LONG_LOCK_FREE == 2
68 using __platform_wait_t =
unsigned long;
70 using __platform_wait_t =
unsigned int;
72 inline constexpr size_t __platform_wait_alignment
73 = __alignof__(__platform_wait_t);
77 template<
typename _Tp>
78 inline constexpr bool __platform_wait_uses_type
79#ifdef _GLIBCXX_HAVE_PLATFORM_WAIT
81 && ((
sizeof(_Tp) ==
sizeof(__detail::__platform_wait_t))
82 && (
alignof(_Tp*) >= __detail::__platform_wait_alignment));
89#ifdef _GLIBCXX_HAVE_LINUX_FUTEX
90 enum class __futex_wait_flags :
int
92#ifdef _GLIBCXX_HAVE_LINUX_FUTEX_PRIVATE
101 __wait_private = __wait | __private_flag,
102 __wake_private = __wake | __private_flag,
103 __wait_bitset_private = __wait_bitset | __private_flag,
104 __wake_bitset_private = __wake_bitset | __private_flag,
105 __bitset_match_any = -1
108 template<
typename _Tp>
110 __platform_wait(
const _Tp* __addr, __platform_wait_t __val)
noexcept
112 auto __e = syscall (SYS_futex,
static_cast<const void*
>(__addr),
113 static_cast<int>(__futex_wait_flags::__wait_private),
115 if (!__e || errno == EAGAIN)
118 __throw_system_error(errno);
121 template<
typename _Tp>
123 __platform_notify(
const _Tp* __addr,
bool __all)
noexcept
125 syscall (SYS_futex,
static_cast<const void*
>(__addr),
126 static_cast<int>(__futex_wait_flags::__wake_private),
127 __all ? INT_MAX : 1);
132 __thread_yield() noexcept
134#if defined _GLIBCXX_HAS_GTHREADS && defined _GLIBCXX_USE_SCHED_YIELD
140 __thread_relax() noexcept
142#if defined __i386__ || defined __x86_64__
143 __builtin_ia32_pause();
149 inline constexpr auto __atomic_spin_count_relax = 12;
150 inline constexpr auto __atomic_spin_count = 16;
152 struct __default_spin_policy
155 operator()() const noexcept
159 template<
typename _Pred,
160 typename _Spin = __default_spin_policy>
162 __atomic_spin(_Pred& __pred, _Spin __spin = _Spin{ })
noexcept
164 for (
auto __i = 0; __i < __atomic_spin_count; ++__i)
169 if (__i < __atomic_spin_count_relax)
170 __detail::__thread_relax();
172 __detail::__thread_yield();
185 template<
typename _Tp>
186 bool __atomic_compare(
const _Tp& __a,
const _Tp& __b)
189 return __builtin_memcmp(&__a, &__b,
sizeof(_Tp)) == 0;
192 struct __waiter_pool_base
196 static constexpr auto _S_align = 64;
198 alignas(_S_align) __platform_wait_t _M_wait = 0;
200#ifndef _GLIBCXX_HAVE_PLATFORM_WAIT
204 alignas(_S_align) __platform_wait_t _M_ver = 0;
206#ifndef _GLIBCXX_HAVE_PLATFORM_WAIT
209 __waiter_pool_base() =
default;
212 _M_enter_wait() noexcept
213 { __atomic_fetch_add(&_M_wait, 1, __ATOMIC_SEQ_CST); }
216 _M_leave_wait() noexcept
217 { __atomic_fetch_sub(&_M_wait, 1, __ATOMIC_RELEASE); }
220 _M_waiting() const noexcept
222 __platform_wait_t __res;
223 __atomic_load(&_M_wait, &__res, __ATOMIC_SEQ_CST);
228 _M_notify(__platform_wait_t* __addr, [[maybe_unused]]
bool __all,
229 bool __bare)
noexcept
231#ifdef _GLIBCXX_HAVE_PLATFORM_WAIT
232 if (__addr == &_M_ver)
234 __atomic_fetch_add(__addr, 1, __ATOMIC_SEQ_CST);
238 if (__bare || _M_waiting())
239 __platform_notify(__addr, __all);
242 lock_guard<mutex> __l(_M_mtx);
243 __atomic_fetch_add(__addr, 1, __ATOMIC_RELAXED);
245 if (__bare || _M_waiting())
250 static __waiter_pool_base&
251 _S_for(
const void* __addr)
noexcept
253 constexpr uintptr_t __ct = 16;
254 static __waiter_pool_base __w[__ct];
255 auto __key = (uintptr_t(__addr) >> 2) % __ct;
260 struct __waiter_pool : __waiter_pool_base
263 _M_do_wait(
const __platform_wait_t* __addr, __platform_wait_t __old)
noexcept
265#ifdef _GLIBCXX_HAVE_PLATFORM_WAIT
266 __platform_wait(__addr, __old);
268 __platform_wait_t __val;
269 __atomic_load(__addr, &__val, __ATOMIC_SEQ_CST);
272 lock_guard<mutex> __l(_M_mtx);
273 __atomic_load(__addr, &__val, __ATOMIC_RELAXED);
281 template<
typename _Tp>
284 using __waiter_type = _Tp;
287 __platform_wait_t* _M_addr;
289 template<
typename _Up>
290 static __platform_wait_t*
291 _S_wait_addr(
const _Up* __a, __platform_wait_t* __b)
293 if constexpr (__platform_wait_uses_type<_Up>)
294 return reinterpret_cast<__platform_wait_t*
>(
const_cast<_Up*
>(__a));
299 static __waiter_type&
300 _S_for(
const void* __addr)
noexcept
302 static_assert(
sizeof(__waiter_type) ==
sizeof(__waiter_pool_base));
303 auto& res = __waiter_pool_base::_S_for(__addr);
304 return reinterpret_cast<__waiter_type&
>(res);
307 template<
typename _Up>
308 explicit __waiter_base(
const _Up* __addr) noexcept
309 : _M_w(_S_for(__addr))
310 , _M_addr(_S_wait_addr(__addr, &_M_w._M_ver))
314 _M_notify(
bool __all,
bool __bare =
false) noexcept
315 { _M_w._M_notify(_M_addr, __all, __bare); }
317 template<
typename _Up,
typename _ValFn,
318 typename _Spin = __default_spin_policy>
320 _S_do_spin_v(__platform_wait_t* __addr,
321 const _Up& __old, _ValFn __vfn,
322 __platform_wait_t& __val,
323 _Spin __spin = _Spin{ })
325 auto const __pred = [=]
326 {
return !__detail::__atomic_compare(__old, __vfn()); };
328 if constexpr (__platform_wait_uses_type<_Up>)
330 __builtin_memcpy(&__val, &__old,
sizeof(__val));
334 __atomic_load(__addr, &__val, __ATOMIC_ACQUIRE);
336 return __atomic_spin(__pred, __spin);
339 template<
typename _Up,
typename _ValFn,
340 typename _Spin = __default_spin_policy>
342 _M_do_spin_v(
const _Up& __old, _ValFn __vfn,
343 __platform_wait_t& __val,
344 _Spin __spin = _Spin{ })
345 {
return _S_do_spin_v(_M_addr, __old, __vfn, __val, __spin); }
347 template<
typename _Pred,
348 typename _Spin = __default_spin_policy>
350 _S_do_spin(
const __platform_wait_t* __addr,
352 __platform_wait_t& __val,
353 _Spin __spin = _Spin{ })
355 __atomic_load(__addr, &__val, __ATOMIC_ACQUIRE);
356 return __atomic_spin(__pred, __spin);
359 template<
typename _Pred,
360 typename _Spin = __default_spin_policy>
362 _M_do_spin(_Pred __pred, __platform_wait_t& __val,
363 _Spin __spin = _Spin{ })
364 {
return _S_do_spin(_M_addr, __pred, __val, __spin); }
367 template<
typename _EntersWait>
368 struct __waiter : __waiter_base<__waiter_pool>
370 using __base_type = __waiter_base<__waiter_pool>;
372 template<
typename _Tp>
373 explicit __waiter(
const _Tp* __addr) noexcept
374 : __base_type(__addr)
376 if constexpr (_EntersWait::value)
377 _M_w._M_enter_wait();
382 if constexpr (_EntersWait::value)
383 _M_w._M_leave_wait();
386 template<
typename _Tp,
typename _ValFn>
388 _M_do_wait_v(_Tp __old, _ValFn __vfn)
392 __platform_wait_t __val;
393 if (__base_type::_M_do_spin_v(__old, __vfn, __val))
395 __base_type::_M_w._M_do_wait(__base_type::_M_addr, __val);
397 while (__detail::__atomic_compare(__old, __vfn()));
400 template<
typename _Pred>
402 _M_do_wait(_Pred __pred)
noexcept
406 __platform_wait_t __val;
407 if (__base_type::_M_do_spin(__pred, __val))
409 __base_type::_M_w._M_do_wait(__base_type::_M_addr, __val);
415 using __enters_wait = __waiter<std::true_type>;
416 using __bare_wait = __waiter<std::false_type>;
419 template<
typename _Tp,
typename _ValFn>
421 __atomic_wait_address_v(
const _Tp* __addr, _Tp __old,
422 _ValFn __vfn)
noexcept
424 __detail::__enters_wait __w(__addr);
425 __w._M_do_wait_v(__old, __vfn);
428 template<
typename _Tp,
typename _Pred>
430 __atomic_wait_address(
const _Tp* __addr, _Pred __pred)
noexcept
432 __detail::__enters_wait __w(__addr);
433 __w._M_do_wait(__pred);
437 template<
typename _Pred>
439 __atomic_wait_address_bare(
const __detail::__platform_wait_t* __addr,
440 _Pred __pred)
noexcept
442#ifdef _GLIBCXX_HAVE_PLATFORM_WAIT
445 __detail::__platform_wait_t __val;
446 if (__detail::__bare_wait::_S_do_spin(__addr, __pred, __val))
448 __detail::__platform_wait(__addr, __val);
452 __detail::__bare_wait __w(__addr);
453 __w._M_do_wait(__pred);
457 template<
typename _Tp>
459 __atomic_notify_address(
const _Tp* __addr,
bool __all)
noexcept
461 __detail::__bare_wait __w(__addr);
462 __w._M_notify(__all);
467 __atomic_notify_address_bare(
const __detail::__platform_wait_t* __addr,
470#ifdef _GLIBCXX_HAVE_PLATFORM_WAIT
471 __detail::__platform_notify(__addr, __all);
473 __detail::__bare_wait __w(__addr);
474 __w._M_notify(__all,
true);
477_GLIBCXX_END_NAMESPACE_VERSION
ISO C++ entities toplevel namespace is std.