42#ifndef _POOL_ALLOCATOR_H 
   43#define _POOL_ALLOCATOR_H 1 
   52#if __cplusplus >= 201103L 
   56namespace __gnu_cxx _GLIBCXX_VISIBILITY(default)
 
   58_GLIBCXX_BEGIN_NAMESPACE_VERSION
 
   77      typedef std::size_t size_t;
 
   80      enum { _S_align = 8 };
 
   81      enum { _S_max_bytes = 128 };
 
   82      enum { _S_free_list_size = (size_t)_S_max_bytes / (
size_t)_S_align };
 
   86        union _Obj* _M_free_list_link;
 
   87        char        _M_client_data[1];    
 
   90      static _Obj* 
volatile         _S_free_list[_S_free_list_size];
 
   93      static char*                  _S_start_free;
 
   94      static char*                  _S_end_free;
 
   95      static size_t                 _S_heap_size;     
 
   98      _M_round_up(
size_t __bytes)
 
   99      { 
return ((__bytes + (
size_t)_S_align - 1) & ~((
size_t)_S_align - 1)); }
 
  101      _GLIBCXX_CONST _Obj* 
volatile*
 
  102      _M_get_free_list(
size_t __bytes) 
throw ();
 
  105      _M_get_mutex() 
throw ();
 
  110      _M_refill(
size_t __n);
 
  115      _M_allocate_chunk(
size_t __n, 
int& __nobjs);
 
  123  template<
typename _Tp>
 
  127      static _Atomic_word           _S_force_new;
 
  130      typedef std::size_t     size_type;
 
  131      typedef std::ptrdiff_t  difference_type;
 
  132      typedef _Tp*       pointer;
 
  133      typedef const _Tp* const_pointer;
 
  134      typedef _Tp&       reference;
 
  135      typedef const _Tp& const_reference;
 
  136      typedef _Tp        value_type;
 
  138      template<
typename _Tp1>
 
  142#if __cplusplus >= 201103L 
  152      template<
typename _Tp1>
 
  158      address(reference __x) 
const _GLIBCXX_NOEXCEPT
 
  162      address(const_reference __x) 
const _GLIBCXX_NOEXCEPT
 
  166      max_size() 
const _GLIBCXX_USE_NOEXCEPT 
 
  167      { 
return std::size_t(-1) / 
sizeof(_Tp); }
 
  169#if __cplusplus >= 201103L 
  170      template<
typename _Up, 
typename... _Args>
 
  172        construct(_Up* __p, _Args&&... __args)
 
  173        { ::new((
void *)__p) _Up(std::forward<_Args>(__args)...); }
 
  175      template<
typename _Up>
 
  177        destroy(_Up* __p) { __p->~_Up(); }
 
  182      construct(pointer __p, 
const _Tp& __val) 
 
  183      { ::new((
void *)__p) _Tp(__val); }
 
  186      destroy(pointer __p) { __p->~_Tp(); }
 
  189      _GLIBCXX_NODISCARD pointer
 
  190      allocate(size_type __n, 
const void* = 0);
 
  193      deallocate(pointer __p, size_type __n);      
 
  196  template<
typename _Tp>
 
  201#if __cpp_impl_three_way_comparison < 201907L 
  202  template<
typename _Tp>
 
  204    operator!=(
const __pool_alloc<_Tp>&, 
const __pool_alloc<_Tp>&)
 
  208  template<
typename _Tp>
 
  210    __pool_alloc<_Tp>::_S_force_new;
 
  212  template<
typename _Tp>
 
  213    _GLIBCXX_NODISCARD _Tp*
 
  214    __pool_alloc<_Tp>::allocate(size_type __n, 
const void*)
 
  218      if (__builtin_expect(__n != 0, 
true))
 
  220          if (__n > this->max_size())
 
  221            std::__throw_bad_alloc();
 
  223          const size_t __bytes = __n * 
sizeof(_Tp);
 
  226          if (
alignof(_Tp) > __STDCPP_DEFAULT_NEW_ALIGNMENT__)
 
  228              std::align_val_t __al = std::align_val_t(
alignof(_Tp));
 
  229              return static_cast<_Tp*
>(::operator 
new(__bytes, __al));
 
  236          if (_S_force_new == 0)
 
  238              if (std::getenv(
"GLIBCXX_FORCE_NEW"))
 
  239                __atomic_add_dispatch(&_S_force_new, 1);
 
  241                __atomic_add_dispatch(&_S_force_new, -1);
 
  244          if (__bytes > 
size_t(_S_max_bytes) || _S_force_new > 0)
 
  245            __ret = 
static_cast<_Tp*
>(::operator 
new(__bytes));
 
  248              _Obj* 
volatile* __free_list = _M_get_free_list(__bytes);
 
  250              __scoped_lock sentry(_M_get_mutex());
 
  251              _Obj* __restrict__ __result = *__free_list;
 
  252              if (__builtin_expect(__result == 0, 0))
 
  253                __ret = 
static_cast<_Tp*
>(_M_refill(_M_round_up(__bytes)));
 
  256                  *__free_list = __result->_M_free_list_link;
 
  257                  __ret = 
reinterpret_cast<_Tp*
>(__result);
 
  260                std::__throw_bad_alloc();
 
  266  template<
typename _Tp>
 
  268    __pool_alloc<_Tp>::deallocate(pointer __p, size_type __n)
 
  271      if (__builtin_expect(__n != 0 && __p != 0, 
true))
 
  274          if (
alignof(_Tp) > __STDCPP_DEFAULT_NEW_ALIGNMENT__)
 
  276              ::operator 
delete(__p, std::align_val_t(
alignof(_Tp)));
 
  280          const size_t __bytes = __n * 
sizeof(_Tp);
 
  281          if (__bytes > 
static_cast<size_t>(_S_max_bytes) || _S_force_new > 0)
 
  282            ::operator 
delete(__p);
 
  285              _Obj* 
volatile* __free_list = _M_get_free_list(__bytes);
 
  286              _Obj* __q = 
reinterpret_cast<_Obj*
>(__p);
 
  288              __scoped_lock sentry(_M_get_mutex());
 
  289              __q ->_M_free_list_link = *__free_list;
 
  295_GLIBCXX_END_NAMESPACE_VERSION
 
constexpr _Tp * __addressof(_Tp &__r) noexcept
Same as C++11 std::addressof.
GNU extensions for public use.
Base class for __pool_alloc.
Allocator using a memory pool with a single lock.