30#ifndef _GLIBCXX_ATOMIC_TIMED_WAIT_H
31#define _GLIBCXX_ATOMIC_TIMED_WAIT_H 1
33#pragma GCC system_header
37#if __cpp_lib_atomic_wait
43#ifdef _GLIBCXX_HAVE_LINUX_FUTEX
48namespace std _GLIBCXX_VISIBILITY(default)
50_GLIBCXX_BEGIN_NAMESPACE_VERSION
54 using __wait_clock_t = chrono::steady_clock;
56 template<
typename _Clock,
typename _Dur>
57 __wait_clock_t::time_point
58 __to_wait_clock(
const chrono::time_point<_Clock, _Dur>& __atime)
noexcept
60 const typename _Clock::time_point __c_entry = _Clock::now();
61 const __wait_clock_t::time_point __w_entry = __wait_clock_t::now();
62 const auto __delta = __atime - __c_entry;
63 using __w_dur =
typename __wait_clock_t::duration;
64 return __w_entry + chrono::ceil<__w_dur>(__delta);
67 template<
typename _Dur>
68 __wait_clock_t::time_point
69 __to_wait_clock(
const chrono::time_point<__wait_clock_t,
70 _Dur>& __atime)
noexcept
72 using __w_dur =
typename __wait_clock_t::duration;
73 return chrono::ceil<__w_dur>(__atime);
76#ifdef _GLIBCXX_HAVE_LINUX_FUTEX
77#define _GLIBCXX_HAVE_PLATFORM_TIMED_WAIT
79 template<
typename _Dur>
81 __platform_wait_until_impl(
const __platform_wait_t* __addr,
82 __platform_wait_t __old,
83 const chrono::time_point<__wait_clock_t, _Dur>&
86 auto __s = chrono::time_point_cast<chrono::seconds>(__atime);
87 auto __ns = chrono::duration_cast<chrono::nanoseconds>(__atime - __s);
89 struct timespec __rt =
91 static_cast<std::time_t
>(__s.time_since_epoch().count()),
92 static_cast<long>(__ns.count())
95 auto __e = syscall (SYS_futex, __addr,
96 static_cast<int>(__futex_wait_flags::
97 __wait_bitset_private),
98 __old, &__rt, nullptr,
99 static_cast<int>(__futex_wait_flags::
100 __bitset_match_any));
104 if (errno == ETIMEDOUT)
106 if (errno != EINTR && errno != EAGAIN)
107 __throw_system_error(errno);
113 template<
typename _Clock,
typename _Dur>
115 __platform_wait_until(
const __platform_wait_t* __addr, __platform_wait_t __old,
116 const chrono::time_point<_Clock, _Dur>& __atime)
118 if constexpr (is_same_v<__wait_clock_t, _Clock>)
120 return __platform_wait_until_impl(__addr, __old, __atime);
124 if (!__platform_wait_until_impl(__addr, __old,
125 __to_wait_clock(__atime)))
130 if (_Clock::now() < __atime)
142#ifdef _GLIBCXX_HAS_GTHREADS
145 template<
typename _Clock,
typename _Dur>
147 __cond_wait_until_impl(__condvar& __cv, mutex& __mx,
148 const chrono::time_point<_Clock, _Dur>& __atime)
150 static_assert(std::__is_one_of<_Clock, chrono::steady_clock,
151 chrono::system_clock>::value);
153 auto __s = chrono::time_point_cast<chrono::seconds>(__atime);
154 auto __ns = chrono::duration_cast<chrono::nanoseconds>(__atime - __s);
156 __gthread_time_t __ts =
158 static_cast<std::time_t
>(__s.time_since_epoch().count()),
159 static_cast<long>(__ns.count())
162#ifdef _GLIBCXX_USE_PTHREAD_COND_CLOCKWAIT
163 if constexpr (is_same_v<chrono::steady_clock, _Clock>)
164 __cv.wait_until(__mx, CLOCK_MONOTONIC, __ts);
167 __cv.wait_until(__mx, __ts);
168 return _Clock::now() < __atime;
172 template<
typename _Clock,
typename _Dur>
174 __cond_wait_until(__condvar& __cv, mutex& __mx,
175 const chrono::time_point<_Clock, _Dur>& __atime)
177#ifdef _GLIBCXX_USE_PTHREAD_COND_CLOCKWAIT
178 if constexpr (is_same_v<_Clock, chrono::steady_clock>)
179 return __detail::__cond_wait_until_impl(__cv, __mx, __atime);
182 if constexpr (is_same_v<_Clock, chrono::system_clock>)
183 return __detail::__cond_wait_until_impl(__cv, __mx, __atime);
186 if (__cond_wait_until_impl(__cv, __mx,
187 __to_wait_clock(__atime)))
192 if (_Clock::now() < __atime)
200 struct __timed_waiter_pool : __waiter_pool_base
203 template<
typename _Clock,
typename _Dur>
205 _M_do_wait_until(__platform_wait_t* __addr, __platform_wait_t __old,
206 const chrono::time_point<_Clock, _Dur>& __atime)
208#ifdef _GLIBCXX_HAVE_PLATFORM_TIMED_WAIT
209 return __platform_wait_until(__addr, __old, __atime);
211 __platform_wait_t __val;
212 __atomic_load(__addr, &__val, __ATOMIC_RELAXED);
215 lock_guard<mutex> __l(_M_mtx);
216 return __cond_wait_until(_M_cv, _M_mtx, __atime);
224 struct __timed_backoff_spin_policy
226 __wait_clock_t::time_point _M_deadline;
227 __wait_clock_t::time_point _M_t0;
229 template<
typename _Clock,
typename _Dur>
230 __timed_backoff_spin_policy(chrono::time_point<_Clock, _Dur>
231 __deadline = _Clock::time_point::max(),
232 chrono::time_point<_Clock, _Dur>
233 __t0 = _Clock::now()) noexcept
234 : _M_deadline(__to_wait_clock(__deadline))
235 , _M_t0(__to_wait_clock(__t0))
239 operator()() const noexcept
241 using namespace literals::chrono_literals;
242 auto __now = __wait_clock_t::now();
243 if (_M_deadline <= __now)
248 auto __elapsed = __now - _M_t0;
249 if (__elapsed > 128ms)
253 else if (__elapsed > 64us)
257 else if (__elapsed > 4us)
267 template<
typename _EntersWait>
268 struct __timed_waiter : __waiter_base<__timed_waiter_pool>
270 using __base_type = __waiter_base<__timed_waiter_pool>;
272 template<
typename _Tp>
273 __timed_waiter(
const _Tp* __addr) noexcept
274 : __base_type(__addr)
276 if constexpr (_EntersWait::value)
277 _M_w._M_enter_wait();
282 if constexpr (_EntersWait::value)
283 _M_w._M_leave_wait();
287 template<
typename _Tp,
typename _ValFn,
288 typename _Clock,
typename _Dur>
290 _M_do_wait_until_v(_Tp __old, _ValFn __vfn,
291 const chrono::time_point<_Clock, _Dur>&
294 __platform_wait_t __val;
295 if (_M_do_spin(__old,
std::move(__vfn), __val,
296 __timed_backoff_spin_policy(__atime)))
298 return __base_type::_M_w._M_do_wait_until(__base_type::_M_addr, __val, __atime);
302 template<
typename _Pred,
303 typename _Clock,
typename _Dur>
305 _M_do_wait_until(_Pred __pred, __platform_wait_t __val,
306 const chrono::time_point<_Clock, _Dur>&
309 for (
auto __now = _Clock::now(); __now < __atime;
310 __now = _Clock::now())
312 if (__base_type::_M_w._M_do_wait_until(
313 __base_type::_M_addr, __val, __atime)
317 if (__base_type::_M_do_spin(__pred, __val,
318 __timed_backoff_spin_policy(__atime, __now)))
325 template<
typename _Pred,
326 typename _Clock,
typename _Dur>
328 _M_do_wait_until(_Pred __pred,
329 const chrono::time_point<_Clock, _Dur>&
332 __platform_wait_t __val;
333 if (__base_type::_M_do_spin(__pred, __val,
334 __timed_backoff_spin_policy(__atime)))
336 return _M_do_wait_until(__pred, __val, __atime);
339 template<
typename _Tp,
typename _ValFn,
340 typename _Rep,
typename _Period>
342 _M_do_wait_for_v(_Tp __old, _ValFn __vfn,
343 const chrono::duration<_Rep, _Period>&
346 __platform_wait_t __val;
347 if (_M_do_spin_v(__old,
std::move(__vfn), __val))
350 if (!__rtime.count())
353 auto __reltime = chrono::ceil<__wait_clock_t::duration>(__rtime);
355 return __base_type::_M_w._M_do_wait_until(
356 __base_type::_M_addr,
358 chrono::steady_clock::now() + __reltime);
361 template<
typename _Pred,
362 typename _Rep,
typename _Period>
364 _M_do_wait_for(_Pred __pred,
365 const chrono::duration<_Rep, _Period>& __rtime)
noexcept
367 __platform_wait_t __val;
368 if (__base_type::_M_do_spin(__pred, __val))
371 if (!__rtime.count())
374 auto __reltime = chrono::ceil<__wait_clock_t::duration>(__rtime);
376 return _M_do_wait_until(__pred, __val,
377 chrono::steady_clock::now() + __reltime);
381 using __enters_timed_wait = __timed_waiter<std::true_type>;
382 using __bare_timed_wait = __timed_waiter<std::false_type>;
386 template<
typename _Tp,
typename _ValFn,
387 typename _Clock,
typename _Dur>
389 __atomic_wait_address_until_v(
const _Tp* __addr, _Tp&& __old, _ValFn&& __vfn,
390 const chrono::time_point<_Clock, _Dur>&
393 __detail::__enters_timed_wait __w{__addr};
394 return __w._M_do_wait_until_v(__old, __vfn, __atime);
397 template<
typename _Tp,
typename _Pred,
398 typename _Clock,
typename _Dur>
400 __atomic_wait_address_until(
const _Tp* __addr, _Pred __pred,
401 const chrono::time_point<_Clock, _Dur>&
404 __detail::__enters_timed_wait __w{__addr};
405 return __w._M_do_wait_until(__pred, __atime);
408 template<
typename _Pred,
409 typename _Clock,
typename _Dur>
411 __atomic_wait_address_until_bare(
const __detail::__platform_wait_t* __addr,
413 const chrono::time_point<_Clock, _Dur>&
416 __detail::__bare_timed_wait __w{__addr};
417 return __w._M_do_wait_until(__pred, __atime);
420 template<
typename _Tp,
typename _ValFn,
421 typename _Rep,
typename _Period>
423 __atomic_wait_address_for_v(
const _Tp* __addr, _Tp&& __old, _ValFn&& __vfn,
424 const chrono::duration<_Rep, _Period>& __rtime)
noexcept
426 __detail::__enters_timed_wait __w{__addr};
427 return __w._M_do_wait_for_v(__old, __vfn, __rtime);
430 template<
typename _Tp,
typename _Pred,
431 typename _Rep,
typename _Period>
433 __atomic_wait_address_for(
const _Tp* __addr, _Pred __pred,
434 const chrono::duration<_Rep, _Period>& __rtime)
noexcept
437 __detail::__enters_timed_wait __w{__addr};
438 return __w._M_do_wait_for(__pred, __rtime);
441 template<
typename _Pred,
442 typename _Rep,
typename _Period>
444 __atomic_wait_address_for_bare(
const __detail::__platform_wait_t* __addr,
446 const chrono::duration<_Rep, _Period>& __rtime)
noexcept
448 __detail::__bare_timed_wait __w{__addr};
449 return __w._M_do_wait_for(__pred, __rtime);
451_GLIBCXX_END_NAMESPACE_VERSION
constexpr std::remove_reference< _Tp >::type && move(_Tp &&__t) noexcept
Convert a value to an rvalue.
ISO C++ entities toplevel namespace is std.
void sleep_for(const chrono::duration< _Rep, _Period > &__rtime)
this_thread::sleep_for