30 #ifndef _GLIBCXX_ATOMIC_FUTEX_H 31 #define _GLIBCXX_ATOMIC_FUTEX_H 1 33 #pragma GCC system_header 38 #if ! (defined(_GLIBCXX_HAVE_LINUX_FUTEX) && ATOMIC_INT_LOCK_FREE > 1) 43 #ifndef _GLIBCXX_ALWAYS_INLINE 44 #define _GLIBCXX_ALWAYS_INLINE inline __attribute__((__always_inline__)) 47 namespace std _GLIBCXX_VISIBILITY(default)
49 _GLIBCXX_BEGIN_NAMESPACE_VERSION
51 #if defined(_GLIBCXX_HAS_GTHREADS) && defined(_GLIBCXX_USE_C99_STDINT_TR1) 52 #if defined(_GLIBCXX_HAVE_LINUX_FUTEX) && ATOMIC_INT_LOCK_FREE > 1 53 struct __atomic_futex_unsigned_base
57 _M_futex_wait_until(
unsigned *__addr,
unsigned __val,
bool __has_timeout,
61 static void _M_futex_notify_all(
unsigned* __addr);
64 template <
unsigned _Waiter_bit = 0x80000000>
65 class __atomic_futex_unsigned : __atomic_futex_unsigned_base
67 typedef chrono::system_clock __clock_t;
70 atomic<unsigned> _M_data;
74 __atomic_futex_unsigned(
unsigned __data) : _M_data(__data)
77 _GLIBCXX_ALWAYS_INLINE
unsigned 80 return _M_data.load(__mo) & ~_Waiter_bit;
90 _M_load_and_test_until(
unsigned __assumed,
unsigned __operand,
101 _M_data.fetch_or(_Waiter_bit, memory_order_relaxed);
102 bool __ret = _M_futex_wait_until((
unsigned*)(
void*)&_M_data,
103 __assumed | _Waiter_bit,
104 __has_timeout, __s, __ns);
106 __assumed = _M_load(__mo);
107 if (!__ret || ((__operand == __assumed) == __equal))
118 _M_load_and_test(
unsigned __assumed,
unsigned __operand,
121 return _M_load_and_test_until(__assumed, __operand, __equal, __mo,
130 template<
typename _Dur>
132 _M_load_and_test_until_impl(
unsigned __assumed,
unsigned __operand,
134 const chrono::time_point<__clock_t, _Dur>& __atime)
139 return _M_load_and_test_until(__assumed, __operand, __equal, __mo,
140 true, __s.time_since_epoch(), __ns);
145 _GLIBCXX_ALWAYS_INLINE
unsigned 146 _M_load_when_not_equal(
unsigned __val,
memory_order __mo)
148 unsigned __i = _M_load(__mo);
149 if ((__i & ~_Waiter_bit) != __val)
150 return (__i & ~_Waiter_bit);
152 return _M_load_and_test(__i, __val,
false, __mo);
155 _GLIBCXX_ALWAYS_INLINE
void 158 unsigned __i = _M_load(__mo);
159 if ((__i & ~_Waiter_bit) == __val)
162 _M_load_and_test(__i, __val,
true, __mo);
166 template<
typename _Rep,
typename _Period>
167 _GLIBCXX_ALWAYS_INLINE
bool 168 _M_load_when_equal_for(
unsigned __val,
memory_order __mo,
169 const chrono::duration<_Rep, _Period>& __rtime)
171 return _M_load_when_equal_until(__val, __mo,
172 __clock_t::now() + __rtime);
176 template<
typename _Clock,
typename _Duration>
177 _GLIBCXX_ALWAYS_INLINE
bool 178 _M_load_when_equal_until(
unsigned __val,
memory_order __mo,
179 const chrono::time_point<_Clock, _Duration>& __atime)
182 const typename _Clock::time_point __c_entry = _Clock::now();
183 const __clock_t::time_point __s_entry = __clock_t::now();
184 const auto __delta = __atime - __c_entry;
185 const auto __s_atime = __s_entry + __delta;
186 return _M_load_when_equal_until(__val, __mo, __s_atime);
190 template<
typename _Duration>
191 _GLIBCXX_ALWAYS_INLINE
bool 192 _M_load_when_equal_until(
unsigned __val,
memory_order __mo,
193 const chrono::time_point<__clock_t, _Duration>& __atime)
195 unsigned __i = _M_load(__mo);
196 if ((__i & ~_Waiter_bit) == __val)
199 __i = _M_load_and_test_until_impl(__i, __val,
true, __mo, __atime);
200 return (__i & ~_Waiter_bit) == __val;
203 _GLIBCXX_ALWAYS_INLINE
void 206 unsigned* __futex = (
unsigned *)(
void *)&_M_data;
207 if (_M_data.exchange(__val, __mo) & _Waiter_bit)
208 _M_futex_notify_all(__futex);
212 #else // ! (_GLIBCXX_HAVE_LINUX_FUTEX && ATOMIC_INT_LOCK_FREE > 1) 217 template <
unsigned _Waiter_bit = 0x80000000>
218 class __atomic_futex_unsigned
220 typedef chrono::system_clock __clock_t;
224 condition_variable _M_condvar;
228 __atomic_futex_unsigned(
unsigned __data) : _M_data(__data)
231 _GLIBCXX_ALWAYS_INLINE
unsigned 234 unique_lock<mutex> __lock(_M_mutex);
238 _GLIBCXX_ALWAYS_INLINE
unsigned 239 _M_load_when_not_equal(
unsigned __val,
memory_order __mo)
241 unique_lock<mutex> __lock(_M_mutex);
242 while (_M_data == __val)
243 _M_condvar.wait(__lock);
247 _GLIBCXX_ALWAYS_INLINE
void 250 unique_lock<mutex> __lock(_M_mutex);
251 while (_M_data != __val)
252 _M_condvar.wait(__lock);
255 template<
typename _Rep,
typename _Period>
256 _GLIBCXX_ALWAYS_INLINE
bool 257 _M_load_when_equal_for(
unsigned __val,
memory_order __mo,
258 const chrono::duration<_Rep, _Period>& __rtime)
260 unique_lock<mutex> __lock(_M_mutex);
261 return _M_condvar.wait_for(__lock, __rtime,
262 [&] {
return _M_data == __val;});
265 template<
typename _Clock,
typename _Duration>
266 _GLIBCXX_ALWAYS_INLINE
bool 267 _M_load_when_equal_until(
unsigned __val,
memory_order __mo,
268 const chrono::time_point<_Clock, _Duration>& __atime)
270 unique_lock<mutex> __lock(_M_mutex);
271 return _M_condvar.wait_until(__lock, __atime,
272 [&] {
return _M_data == __val;});
275 _GLIBCXX_ALWAYS_INLINE
void 278 unique_lock<mutex> __lock(_M_mutex);
280 _M_condvar.notify_all();
284 #endif // _GLIBCXX_HAVE_LINUX_FUTEX && ATOMIC_INT_LOCK_FREE > 1 285 #endif // _GLIBCXX_HAS_GTHREADS && _GLIBCXX_USE_C99_STDINT_TR1 287 _GLIBCXX_END_NAMESPACE_VERSION
constexpr enable_if< __is_duration< _ToDur >::value, _ToDur >::type duration_cast(const duration< _Rep, _Period > &__d)
duration_cast
duration< int64_t > seconds
seconds
duration< int64_t, nano > nanoseconds
nanoseconds
memory_order
Enumeration for memory_order.
ISO C++ entities toplevel namespace is std.
constexpr enable_if< __is_duration< _ToDur >::value, time_point< _Clock, _ToDur > >::type time_point_cast(const time_point< _Clock, _Dur > &__t)
time_point_cast