1 // <stop_token> -*- C++ -*-
3 // Copyright (C) 2019-2020 Free Software Foundation, Inc.
5 // This file is part of the GNU ISO C++ Library. This library is free
6 // software; you can redistribute it and/or modify it under the
7 // terms of the GNU General Public License as published by the
8 // Free Software Foundation; either version 3, or (at your option)
11 // This library is distributed in the hope that it will be useful,
12 // but WITHOUT ANY WARRANTY; without even the implied warranty of
13 // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 // GNU General Public License for more details.
16 // Under Section 7 of GPL version 3, you are granted additional
17 // permissions described in the GCC Runtime Library Exception, version
18 // 3.1, as published by the Free Software Foundation.
20 // You should have received a copy of the GNU General Public License and
21 // a copy of the GCC Runtime Library Exception along with this program;
22 // see the files COPYING3 and COPYING.RUNTIME respectively. If not, see
23 // <http://www.gnu.org/licenses/>.
25 /** @file include/stop_token
26 * This is a Standard C++ Library header.
29 #ifndef _GLIBCXX_STOP_TOKEN
30 #define _GLIBCXX_STOP_TOKEN
32 #if __cplusplus > 201703L
36 #ifdef _GLIBCXX_HAS_GTHREADS
37 # define __cpp_lib_jthread 201911L
38 # include <bits/gthr.h>
39 # if __has_include(<semaphore>)
44 namespace std _GLIBCXX_VISIBILITY(default)
46 _GLIBCXX_BEGIN_NAMESPACE_VERSION
48 /// Tag type indicating a stop_source should have no shared-stop-state.
49 struct nostopstate_t { explicit nostopstate_t() = default; };
50 inline constexpr nostopstate_t nostopstate{};
54 /// Allow testing whether a stop request has been made on a `stop_source`.
58 stop_token() noexcept = default;
60 stop_token(const stop_token&) noexcept = default;
61 stop_token(stop_token&&) noexcept = default;
63 ~stop_token() = default;
66 operator=(const stop_token&) noexcept = default;
69 operator=(stop_token&&) noexcept = default;
73 stop_possible() const noexcept
75 return static_cast<bool>(_M_state) && _M_state->_M_stop_possible();
80 stop_requested() const noexcept
82 return static_cast<bool>(_M_state) && _M_state->_M_stop_requested();
86 swap(stop_token& __rhs) noexcept
87 { _M_state.swap(__rhs._M_state); }
91 operator==(const stop_token& __a, const stop_token& __b)
92 { return __a._M_state == __b._M_state; }
95 swap(stop_token& __lhs, stop_token& __rhs) noexcept
96 { __lhs.swap(__rhs); }
99 friend class stop_source;
100 template<typename _Callback>
101 friend class stop_callback;
106 #if defined __i386__ || defined __x86_64__
107 __builtin_ia32_pause();
108 #elif defined _GLIBCXX_HAS_GTHREADS && defined _GLIBCXX_USE_SCHED_YIELD
113 #ifndef __cpp_lib_semaphore
114 // TODO: replace this with a real implementation of std::binary_semaphore
115 struct binary_semaphore
117 explicit binary_semaphore(int __d) : _M_counter(__d > 0) { }
119 void release() { _M_counter.fetch_add(1, memory_order::release); }
124 while (!_M_counter.compare_exchange_weak(__old, 0,
125 memory_order::acquire,
126 memory_order::relaxed))
133 atomic<int> _M_counter;
139 using __cb_type = void(_Stop_cb*) noexcept;
140 __cb_type* _M_callback;
141 _Stop_cb* _M_prev = nullptr;
142 _Stop_cb* _M_next = nullptr;
143 bool* _M_destroyed = nullptr;
144 binary_semaphore _M_done{0};
146 [[__gnu__::__nonnull__]]
148 _Stop_cb(__cb_type* __cb)
152 void _M_run() noexcept { _M_callback(this); }
157 using value_type = uint32_t;
158 static constexpr value_type _S_stop_requested_bit = 1;
159 static constexpr value_type _S_locked_bit = 2;
160 static constexpr value_type _S_ssrc_counter_inc = 4;
162 std::atomic<value_type> _M_owners{1};
163 std::atomic<value_type> _M_value{_S_ssrc_counter_inc};
164 _Stop_cb* _M_head = nullptr;
165 #ifdef _GLIBCXX_HAS_GTHREADS
166 __gthread_t _M_requester;
169 _Stop_state_t() noexcept { }
172 _M_stop_possible() noexcept
174 // true if a stop request has already been made or there are still
175 // stop_source objects that would allow one to be made.
176 return _M_value.load(memory_order::acquire) & ~_S_locked_bit;
180 _M_stop_requested() noexcept
182 return _M_value.load(memory_order::acquire) & _S_stop_requested_bit;
186 _M_add_owner() noexcept
188 _M_owners.fetch_add(1, memory_order::relaxed);
192 _M_release_ownership() noexcept
194 if (_M_owners.fetch_sub(1, memory_order::acq_rel) == 1)
199 _M_add_ssrc() noexcept
201 _M_value.fetch_add(_S_ssrc_counter_inc, memory_order::relaxed);
205 _M_sub_ssrc() noexcept
207 _M_value.fetch_sub(_S_ssrc_counter_inc, memory_order::release);
214 // Can use relaxed loads to get the current value.
215 // The successful call to _M_try_lock is an acquire operation.
216 auto __old = _M_value.load(memory_order::relaxed);
217 while (!_M_try_lock(__old, memory_order::relaxed))
221 // Precondition: calling thread holds the lock.
225 _M_value.fetch_sub(_S_locked_bit, memory_order::release);
229 _M_request_stop() noexcept
231 // obtain lock and set stop_requested bit
232 auto __old = _M_value.load(memory_order::acquire);
235 if (__old & _S_stop_requested_bit) // stop request already made
238 while (!_M_try_lock_and_stop(__old));
240 #ifdef _GLIBCXX_HAS_GTHREADS
241 #ifdef _GLIBCXX_NATIVE_THREAD_ID
242 _M_requester = _GLIBCXX_NATIVE_THREAD_ID;
244 _M_requester = __gthread_self();
251 _Stop_cb* __cb = _M_head;
252 _M_head = _M_head->_M_next;
255 _M_head->_M_prev = nullptr;
261 // Allow other callbacks to be unregistered while __cb runs.
264 bool __destroyed = false;
265 __cb->_M_destroyed = &__destroyed;
272 __cb->_M_destroyed = nullptr;
273 #ifdef _GLIBCXX_HAS_GTHREADS
274 // synchronize with destructor of stop_callback that owns *__cb
275 __cb->_M_done.release();
279 // Avoid relocking if we already know there are no more callbacks.
290 [[__gnu__::__nonnull__]]
292 _M_register_callback(_Stop_cb* __cb) noexcept
294 auto __old = _M_value.load(memory_order::acquire);
297 if (__old & _S_stop_requested_bit) // stop request already made
299 __cb->_M_run(); // run synchronously
303 if (__old < _S_ssrc_counter_inc) // no stop_source owns *this
304 // No need to register callback if no stop request can be made.
305 // Returning false also means the stop_callback does not share
306 // ownership of this state, but that's not observable.
309 while (!_M_try_lock(__old));
311 __cb->_M_next = _M_head;
314 _M_head->_M_prev = __cb;
321 // Called by ~stop_callback just before destroying *__cb.
322 [[__gnu__::__nonnull__]]
324 _M_remove_callback(_Stop_cb* __cb)
330 _M_head = _M_head->_M_next;
332 _M_head->_M_prev = nullptr;
336 else if (__cb->_M_prev)
338 __cb->_M_prev->_M_next = __cb->_M_next;
340 __cb->_M_next->_M_prev = __cb->_M_prev;
347 // Callback is not in the list, so must have been removed by a call to
350 #ifdef _GLIBCXX_HAS_GTHREADS
351 #ifdef _GLIBCXX_NATIVE_THREAD_ID
352 auto __tid = _GLIBCXX_NATIVE_THREAD_ID;
354 auto __tid = __gthread_self();
356 // Despite appearances there is no data race on _M_requester. The only
357 // write to it happens before the callback is removed from the list,
358 // and removing it from the list happens before this read.
359 if (!__gthread_equal(_M_requester, __tid))
361 // Synchronize with completion of callback.
362 __cb->_M_done.acquire();
363 // Safe for ~stop_callback to destroy *__cb now.
367 if (__cb->_M_destroyed)
368 *__cb->_M_destroyed = true;
371 // Try to obtain the lock.
372 // Returns true if the lock is acquired (with memory order acquire).
373 // Otherwise, sets __curval = _M_value.load(__failure) and returns false.
374 // Might fail spuriously, so must be called in a loop.
376 _M_try_lock(value_type& __curval,
377 memory_order __failure = memory_order::acquire) noexcept
379 return _M_do_try_lock(__curval, 0, memory_order::acquire, __failure);
382 // Try to obtain the lock to make a stop request.
383 // Returns true if the lock is acquired and the _S_stop_requested_bit is
384 // set (with memory order acq_rel so that other threads see the request).
385 // Otherwise, sets __curval = _M_value.load(memory_order::acquire) and
387 // Might fail spuriously, so must be called in a loop.
389 _M_try_lock_and_stop(value_type& __curval) noexcept
391 return _M_do_try_lock(__curval, _S_stop_requested_bit,
392 memory_order::acq_rel, memory_order::acquire);
396 _M_do_try_lock(value_type& __curval, value_type __newbits,
397 memory_order __success, memory_order __failure) noexcept
399 if (__curval & _S_locked_bit)
402 __curval = _M_value.load(__failure);
405 __newbits |= _S_locked_bit;
406 return _M_value.compare_exchange_weak(__curval, __curval | __newbits,
407 __success, __failure);
411 struct _Stop_state_ref
413 _Stop_state_ref() = default;
416 _Stop_state_ref(const stop_source&)
417 : _M_ptr(new _Stop_state_t())
420 _Stop_state_ref(const _Stop_state_ref& __other) noexcept
421 : _M_ptr(__other._M_ptr)
424 _M_ptr->_M_add_owner();
427 _Stop_state_ref(_Stop_state_ref&& __other) noexcept
428 : _M_ptr(__other._M_ptr)
430 __other._M_ptr = nullptr;
434 operator=(const _Stop_state_ref& __other) noexcept
436 if (auto __ptr = __other._M_ptr; __ptr != _M_ptr)
439 __ptr->_M_add_owner();
441 _M_ptr->_M_release_ownership();
448 operator=(_Stop_state_ref&& __other) noexcept
450 _Stop_state_ref(std::move(__other)).swap(*this);
457 _M_ptr->_M_release_ownership();
461 swap(_Stop_state_ref& __other) noexcept
462 { std::swap(_M_ptr, __other._M_ptr); }
464 explicit operator bool() const noexcept { return _M_ptr != nullptr; }
466 _Stop_state_t* operator->() const noexcept { return _M_ptr; }
468 #if __cpp_impl_three_way_comparison >= 201907L
470 operator==(const _Stop_state_ref&, const _Stop_state_ref&) = default;
473 operator==(const _Stop_state_ref& __lhs, const _Stop_state_ref& __rhs)
475 { return __lhs._M_ptr == __rhs._M_ptr; }
478 operator!=(const _Stop_state_ref& __lhs, const _Stop_state_ref& __rhs)
480 { return __lhs._M_ptr != __rhs._M_ptr; }
484 _Stop_state_t* _M_ptr = nullptr;
487 _Stop_state_ref _M_state;
490 stop_token(const _Stop_state_ref& __state) noexcept
495 /// A type that allows a stop request to be made.
499 stop_source() : _M_state(*this)
502 explicit stop_source(std::nostopstate_t) noexcept
505 stop_source(const stop_source& __other) noexcept
506 : _M_state(__other._M_state)
509 _M_state->_M_add_ssrc();
512 stop_source(stop_source&&) noexcept = default;
515 operator=(const stop_source& __other) noexcept
517 if (_M_state != __other._M_state)
519 stop_source __sink(std::move(*this));
520 _M_state = __other._M_state;
522 _M_state->_M_add_ssrc();
528 operator=(stop_source&&) noexcept = default;
533 _M_state->_M_sub_ssrc();
538 stop_possible() const noexcept
540 return static_cast<bool>(_M_state);
545 stop_requested() const noexcept
547 return static_cast<bool>(_M_state) && _M_state->_M_stop_requested();
551 request_stop() const noexcept
554 return _M_state->_M_request_stop();
560 get_token() const noexcept
562 return stop_token{_M_state};
566 swap(stop_source& __other) noexcept
568 _M_state.swap(__other._M_state);
573 operator==(const stop_source& __a, const stop_source& __b) noexcept
575 return __a._M_state == __b._M_state;
579 swap(stop_source& __lhs, stop_source& __rhs) noexcept
585 stop_token::_Stop_state_ref _M_state;
588 /// A wrapper for callbacks to be run when a stop request is made.
589 template<typename _Callback>
590 class [[nodiscard]] stop_callback
592 static_assert(is_nothrow_destructible_v<_Callback>);
593 static_assert(is_invocable_v<_Callback>);
596 using callback_type = _Callback;
598 template<typename _Cb,
599 enable_if_t<is_constructible_v<_Callback, _Cb>, int> = 0>
601 stop_callback(const stop_token& __token, _Cb&& __cb)
602 noexcept(is_nothrow_constructible_v<_Callback, _Cb>)
603 : _M_cb(std::forward<_Cb>(__cb))
605 if (auto __state = __token._M_state)
607 if (__state->_M_register_callback(&_M_cb))
608 _M_state.swap(__state);
612 template<typename _Cb,
613 enable_if_t<is_constructible_v<_Callback, _Cb>, int> = 0>
615 stop_callback(stop_token&& __token, _Cb&& __cb)
616 noexcept(is_nothrow_constructible_v<_Callback, _Cb>)
617 : _M_cb(std::forward<_Cb>(__cb))
619 if (auto& __state = __token._M_state)
621 if (__state->_M_register_callback(&_M_cb))
622 _M_state.swap(__state);
630 _M_state->_M_remove_callback(&_M_cb);
634 stop_callback(const stop_callback&) = delete;
635 stop_callback& operator=(const stop_callback&) = delete;
636 stop_callback(stop_callback&&) = delete;
637 stop_callback& operator=(stop_callback&&) = delete;
640 struct _Cb_impl : stop_token::_Stop_cb
642 template<typename _Cb>
645 : _Stop_cb(&_S_execute),
646 _M_cb(std::forward<_Cb>(__cb))
651 [[__gnu__::__nonnull__]]
653 _S_execute(_Stop_cb* __that) noexcept
655 _Callback& __cb = static_cast<_Cb_impl*>(__that)->_M_cb;
656 std::forward<_Callback>(__cb)();
661 stop_token::_Stop_state_ref _M_state;
664 template<typename _Callback>
665 stop_callback(stop_token, _Callback) -> stop_callback<_Callback>;
667 _GLIBCXX_END_NAMESPACE_VERSION
669 #endif // __cplusplus > 201703L
670 #endif // _GLIBCXX_STOP_TOKEN