30#ifndef _GLIBCXX_ATOMIC_TIMED_WAIT_H
31#define _GLIBCXX_ATOMIC_TIMED_WAIT_H 1
33#pragma GCC system_header
37#if __cpp_lib_atomic_wait
42#ifdef _GLIBCXX_HAVE_LINUX_FUTEX
46namespace std _GLIBCXX_VISIBILITY(default)
48_GLIBCXX_BEGIN_NAMESPACE_VERSION
52 using __wait_clock_t = chrono::steady_clock;
54 template<
typename _Clock,
typename _Dur>
55 __wait_clock_t::time_point
56 __to_wait_clock(
const chrono::time_point<_Clock, _Dur>& __atime)
noexcept
58 const typename _Clock::time_point __c_entry = _Clock::now();
59 const __wait_clock_t::time_point __w_entry = __wait_clock_t::now();
60 const auto __delta = __atime - __c_entry;
61 using __w_dur =
typename __wait_clock_t::duration;
62 return __w_entry + chrono::ceil<__w_dur>(__delta);
65 template<
typename _Dur>
66 __wait_clock_t::time_point
67 __to_wait_clock(
const chrono::time_point<__wait_clock_t,
68 _Dur>& __atime)
noexcept
70 using __w_dur =
typename __wait_clock_t::duration;
71 return chrono::ceil<__w_dur>(__atime);
74#ifdef _GLIBCXX_HAVE_LINUX_FUTEX
75#define _GLIBCXX_HAVE_PLATFORM_TIMED_WAIT
77 template<
typename _Dur>
79 __platform_wait_until_impl(
const __platform_wait_t* __addr,
80 __platform_wait_t __old,
81 const chrono::time_point<__wait_clock_t, _Dur>&
84 auto __s = chrono::time_point_cast<chrono::seconds>(__atime);
85 auto __ns = chrono::duration_cast<chrono::nanoseconds>(__atime - __s);
87 struct timespec __rt =
89 static_cast<std::time_t
>(__s.time_since_epoch().count()),
90 static_cast<long>(__ns.count())
93 auto __e = syscall (SYS_futex, __addr,
94 static_cast<int>(__futex_wait_flags::
95 __wait_bitset_private),
96 __old, &__rt, nullptr,
97 static_cast<int>(__futex_wait_flags::
102 if (errno == ETIMEDOUT)
104 if (errno != EINTR && errno != EAGAIN)
105 __throw_system_error(errno);
111 template<
typename _Clock,
typename _Dur>
113 __platform_wait_until(
const __platform_wait_t* __addr, __platform_wait_t __old,
114 const chrono::time_point<_Clock, _Dur>& __atime)
116 if constexpr (is_same_v<__wait_clock_t, _Clock>)
118 return __platform_wait_until_impl(__addr, __old, __atime);
122 if (!__platform_wait_until_impl(__addr, __old,
123 __to_wait_clock(__atime)))
128 if (_Clock::now() < __atime)
140#ifdef _GLIBCXX_HAS_GTHREADS
143 template<
typename _Clock,
typename _Dur>
145 __cond_wait_until_impl(__condvar& __cv, mutex& __mx,
146 const chrono::time_point<_Clock, _Dur>& __atime)
148 static_assert(std::__is_one_of<_Clock, chrono::steady_clock,
149 chrono::system_clock>::value);
151 auto __s = chrono::time_point_cast<chrono::seconds>(__atime);
152 auto __ns = chrono::duration_cast<chrono::nanoseconds>(__atime - __s);
154 __gthread_time_t __ts =
156 static_cast<std::time_t
>(__s.time_since_epoch().count()),
157 static_cast<long>(__ns.count())
160#ifdef _GLIBCXX_USE_PTHREAD_COND_CLOCKWAIT
161 if constexpr (is_same_v<chrono::steady_clock, _Clock>)
162 __cv.wait_until(__mx, CLOCK_MONOTONIC, __ts);
165 __cv.wait_until(__mx, __ts);
166 return _Clock::now() < __atime;
170 template<
typename _Clock,
typename _Dur>
172 __cond_wait_until(__condvar& __cv, mutex& __mx,
173 const chrono::time_point<_Clock, _Dur>& __atime)
175#ifdef _GLIBCXX_USE_PTHREAD_COND_CLOCKWAIT
176 if constexpr (is_same_v<_Clock, chrono::steady_clock>)
177 return __detail::__cond_wait_until_impl(__cv, __mx, __atime);
180 if constexpr (is_same_v<_Clock, chrono::system_clock>)
181 return __detail::__cond_wait_until_impl(__cv, __mx, __atime);
184 if (__cond_wait_until_impl(__cv, __mx,
185 __to_wait_clock(__atime)))
190 if (_Clock::now() < __atime)
198 struct __timed_waiter_pool : __waiter_pool_base
201 template<
typename _Clock,
typename _Dur>
203 _M_do_wait_until(__platform_wait_t* __addr, __platform_wait_t __old,
204 const chrono::time_point<_Clock, _Dur>& __atime)
206#ifdef _GLIBCXX_HAVE_PLATFORM_TIMED_WAIT
207 return __platform_wait_until(__addr, __old, __atime);
209 __platform_wait_t __val;
210 __atomic_load(__addr, &__val, __ATOMIC_RELAXED);
213 lock_guard<mutex> __l(_M_mtx);
214 return __cond_wait_until(_M_cv, _M_mtx, __atime);
222 struct __timed_backoff_spin_policy
224 __wait_clock_t::time_point _M_deadline;
225 __wait_clock_t::time_point _M_t0;
227 template<
typename _Clock,
typename _Dur>
228 __timed_backoff_spin_policy(chrono::time_point<_Clock, _Dur>
229 __deadline = _Clock::time_point::max(),
230 chrono::time_point<_Clock, _Dur>
231 __t0 = _Clock::now()) noexcept
232 : _M_deadline(__to_wait_clock(__deadline))
233 , _M_t0(__to_wait_clock(__t0))
237 operator()() const noexcept
239 using namespace literals::chrono_literals;
240 auto __now = __wait_clock_t::now();
241 if (_M_deadline <= __now)
246 auto __elapsed = __now - _M_t0;
247 if (__elapsed > 128ms)
251 else if (__elapsed > 64us)
255 else if (__elapsed > 4us)
265 template<
typename _EntersWait>
266 struct __timed_waiter : __waiter_base<__timed_waiter_pool>
268 using __base_type = __waiter_base<__timed_waiter_pool>;
270 template<
typename _Tp>
271 __timed_waiter(
const _Tp* __addr) noexcept
272 : __base_type(__addr)
274 if constexpr (_EntersWait::value)
275 _M_w._M_enter_wait();
280 if constexpr (_EntersWait::value)
281 _M_w._M_leave_wait();
285 template<
typename _Tp,
typename _ValFn,
286 typename _Clock,
typename _Dur>
288 _M_do_wait_until_v(_Tp __old, _ValFn __vfn,
289 const chrono::time_point<_Clock, _Dur>&
292 __platform_wait_t __val;
293 if (_M_do_spin(__old,
std::move(__vfn), __val,
294 __timed_backoff_spin_policy(__atime)))
296 return __base_type::_M_w._M_do_wait_until(__base_type::_M_addr, __val, __atime);
300 template<
typename _Pred,
301 typename _Clock,
typename _Dur>
303 _M_do_wait_until(_Pred __pred, __platform_wait_t __val,
304 const chrono::time_point<_Clock, _Dur>&
307 for (
auto __now = _Clock::now(); __now < __atime;
308 __now = _Clock::now())
310 if (__base_type::_M_w._M_do_wait_until(
311 __base_type::_M_addr, __val, __atime)
315 if (__base_type::_M_do_spin(__pred, __val,
316 __timed_backoff_spin_policy(__atime, __now)))
323 template<
typename _Pred,
324 typename _Clock,
typename _Dur>
326 _M_do_wait_until(_Pred __pred,
327 const chrono::time_point<_Clock, _Dur>&
330 __platform_wait_t __val;
331 if (__base_type::_M_do_spin(__pred, __val,
332 __timed_backoff_spin_policy(__atime)))
334 return _M_do_wait_until(__pred, __val, __atime);
337 template<
typename _Tp,
typename _ValFn,
338 typename _Rep,
typename _Period>
340 _M_do_wait_for_v(_Tp __old, _ValFn __vfn,
341 const chrono::duration<_Rep, _Period>&
344 __platform_wait_t __val;
345 if (_M_do_spin_v(__old,
std::move(__vfn), __val))
348 if (!__rtime.count())
351 auto __reltime = chrono::ceil<__wait_clock_t::duration>(__rtime);
353 return __base_type::_M_w._M_do_wait_until(
354 __base_type::_M_addr,
356 chrono::steady_clock::now() + __reltime);
359 template<
typename _Pred,
360 typename _Rep,
typename _Period>
362 _M_do_wait_for(_Pred __pred,
363 const chrono::duration<_Rep, _Period>& __rtime)
noexcept
365 __platform_wait_t __val;
366 if (__base_type::_M_do_spin(__pred, __val))
369 if (!__rtime.count())
372 auto __reltime = chrono::ceil<__wait_clock_t::duration>(__rtime);
374 return _M_do_wait_until(__pred, __val,
375 chrono::steady_clock::now() + __reltime);
379 using __enters_timed_wait = __timed_waiter<std::true_type>;
380 using __bare_timed_wait = __timed_waiter<std::false_type>;
384 template<
typename _Tp,
typename _ValFn,
385 typename _Clock,
typename _Dur>
387 __atomic_wait_address_until_v(
const _Tp* __addr, _Tp&& __old, _ValFn&& __vfn,
388 const chrono::time_point<_Clock, _Dur>&
391 __detail::__enters_timed_wait __w{__addr};
392 return __w._M_do_wait_until_v(__old, __vfn, __atime);
395 template<
typename _Tp,
typename _Pred,
396 typename _Clock,
typename _Dur>
398 __atomic_wait_address_until(
const _Tp* __addr, _Pred __pred,
399 const chrono::time_point<_Clock, _Dur>&
402 __detail::__enters_timed_wait __w{__addr};
403 return __w._M_do_wait_until(__pred, __atime);
406 template<
typename _Pred,
407 typename _Clock,
typename _Dur>
409 __atomic_wait_address_until_bare(
const __detail::__platform_wait_t* __addr,
411 const chrono::time_point<_Clock, _Dur>&
414 __detail::__bare_timed_wait __w{__addr};
415 return __w._M_do_wait_until(__pred, __atime);
418 template<
typename _Tp,
typename _ValFn,
419 typename _Rep,
typename _Period>
421 __atomic_wait_address_for_v(
const _Tp* __addr, _Tp&& __old, _ValFn&& __vfn,
422 const chrono::duration<_Rep, _Period>& __rtime)
noexcept
424 __detail::__enters_timed_wait __w{__addr};
425 return __w._M_do_wait_for_v(__old, __vfn, __rtime);
428 template<
typename _Tp,
typename _Pred,
429 typename _Rep,
typename _Period>
431 __atomic_wait_address_for(
const _Tp* __addr, _Pred __pred,
432 const chrono::duration<_Rep, _Period>& __rtime)
noexcept
435 __detail::__enters_timed_wait __w{__addr};
436 return __w._M_do_wait_for(__pred, __rtime);
439 template<
typename _Pred,
440 typename _Rep,
typename _Period>
442 __atomic_wait_address_for_bare(
const __detail::__platform_wait_t* __addr,
444 const chrono::duration<_Rep, _Period>& __rtime)
noexcept
446 __detail::__bare_timed_wait __w{__addr};
447 return __w._M_do_wait_for(__pred, __rtime);
449_GLIBCXX_END_NAMESPACE_VERSION
constexpr std::remove_reference< _Tp >::type && move(_Tp &&__t) noexcept
Convert a value to an rvalue.
ISO C++ entities toplevel namespace is std.
void sleep_for(const chrono::duration< _Rep, _Period > &__rtime)
this_thread::sleep_for