42 #ifndef _POOL_ALLOCATOR_H 43 #define _POOL_ALLOCATOR_H 1 52 #if __cplusplus >= 201103L 56 namespace __gnu_cxx _GLIBCXX_VISIBILITY(default)
58 _GLIBCXX_BEGIN_NAMESPACE_VERSION
82 enum { _S_align = 8 };
83 enum { _S_max_bytes = 128 };
84 enum { _S_free_list_size = (size_t)_S_max_bytes / (
size_t)_S_align };
88 union _Obj* _M_free_list_link;
89 char _M_client_data[1];
92 static _Obj*
volatile _S_free_list[_S_free_list_size];
95 static char* _S_start_free;
96 static char* _S_end_free;
97 static size_t _S_heap_size;
100 _M_round_up(
size_t __bytes)
101 {
return ((__bytes + (
size_t)_S_align - 1) & ~((
size_t)_S_align - 1)); }
103 _GLIBCXX_CONST _Obj*
volatile*
104 _M_get_free_list(
size_t __bytes)
throw ();
107 _M_get_mutex()
throw ();
112 _M_refill(
size_t __n);
117 _M_allocate_chunk(
size_t __n,
int& __nobjs);
125 template<
typename _Tp>
129 static _Atomic_word _S_force_new;
132 typedef size_t size_type;
133 typedef ptrdiff_t difference_type;
134 typedef _Tp* pointer;
135 typedef const _Tp* const_pointer;
136 typedef _Tp& reference;
137 typedef const _Tp& const_reference;
138 typedef _Tp value_type;
140 template<
typename _Tp1>
144 #if __cplusplus >= 201103L 154 template<
typename _Tp1>
160 address(reference __x)
const _GLIBCXX_NOEXCEPT
164 address(const_reference __x)
const _GLIBCXX_NOEXCEPT
168 max_size()
const _GLIBCXX_USE_NOEXCEPT
169 {
return size_t(-1) /
sizeof(_Tp); }
171 #if __cplusplus >= 201103L 172 template<
typename _Up,
typename... _Args>
174 construct(_Up* __p, _Args&&... __args)
175 { ::new((
void *)__p) _Up(std::forward<_Args>(__args)...); }
177 template<
typename _Up>
179 destroy(_Up* __p) { __p->~_Up(); }
184 construct(pointer __p,
const _Tp& __val)
185 { ::new((
void *)__p) _Tp(__val); }
188 destroy(pointer __p) { __p->~_Tp(); }
192 allocate(size_type __n,
const void* = 0);
195 deallocate(pointer __p, size_type __n);
198 template<
typename _Tp>
203 template<
typename _Tp>
205 operator!=(
const __pool_alloc<_Tp>&,
const __pool_alloc<_Tp>&)
208 template<
typename _Tp>
210 __pool_alloc<_Tp>::_S_force_new;
212 template<
typename _Tp>
214 __pool_alloc<_Tp>::allocate(size_type __n,
const void*)
217 if (__builtin_expect(__n != 0,
true))
219 if (__n > this->max_size())
220 std::__throw_bad_alloc();
222 const size_t __bytes = __n *
sizeof(_Tp);
224 #if __cpp_aligned_new 225 if (
alignof(_Tp) > __STDCPP_DEFAULT_NEW_ALIGNMENT__)
227 std::align_val_t __al = std::align_val_t(
alignof(_Tp));
228 return static_cast<_Tp*>(::
operator new(__bytes, __al));
235 if (_S_force_new == 0)
237 if (std::getenv(
"GLIBCXX_FORCE_NEW"))
238 __atomic_add_dispatch(&_S_force_new, 1);
240 __atomic_add_dispatch(&_S_force_new, -1);
243 if (__bytes >
size_t(_S_max_bytes) || _S_force_new > 0)
244 __ret = static_cast<_Tp*>(::
operator new(__bytes));
247 _Obj*
volatile* __free_list = _M_get_free_list(__bytes);
249 __scoped_lock sentry(_M_get_mutex());
250 _Obj* __restrict__ __result = *__free_list;
251 if (__builtin_expect(__result == 0, 0))
252 __ret = static_cast<_Tp*>(_M_refill(_M_round_up(__bytes)));
255 *__free_list = __result->_M_free_list_link;
256 __ret = reinterpret_cast<_Tp*>(__result);
259 std::__throw_bad_alloc();
265 template<
typename _Tp>
267 __pool_alloc<_Tp>::deallocate(pointer __p, size_type __n)
269 if (__builtin_expect(__n != 0 && __p != 0,
true))
271 #if __cpp_aligned_new 272 if (
alignof(_Tp) > __STDCPP_DEFAULT_NEW_ALIGNMENT__)
274 ::operator
delete(__p, std::align_val_t(
alignof(_Tp)));
278 const size_t __bytes = __n *
sizeof(_Tp);
279 if (__bytes > static_cast<size_t>(_S_max_bytes) || _S_force_new > 0)
280 ::
operator delete(__p);
283 _Obj*
volatile* __free_list = _M_get_free_list(__bytes);
284 _Obj* __q = reinterpret_cast<_Obj*>(__p);
286 __scoped_lock sentry(_M_get_mutex());
287 __q ->_M_free_list_link = *__free_list;
293 _GLIBCXX_END_NAMESPACE_VERSION
constexpr _Tp * __addressof(_Tp &__r) noexcept
Same as C++11 std::addressof.
GNU extensions for public use.
Base class for __pool_alloc.
Allocator using a memory pool with a single lock.