libstdc++
pool_allocator.h
Go to the documentation of this file.
1// Allocators -*- C++ -*-
2
3// Copyright (C) 2001-2021 Free Software Foundation, Inc.
4//
5// This file is part of the GNU ISO C++ Library. This library is free
6// software; you can redistribute it and/or modify it under the
7// terms of the GNU General Public License as published by the
8// Free Software Foundation; either version 3, or (at your option)
9// any later version.
10
11// This library is distributed in the hope that it will be useful,
12// but WITHOUT ANY WARRANTY; without even the implied warranty of
13// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14// GNU General Public License for more details.
15
16// Under Section 7 of GPL version 3, you are granted additional
17// permissions described in the GCC Runtime Library Exception, version
18// 3.1, as published by the Free Software Foundation.
19
20// You should have received a copy of the GNU General Public License and
21// a copy of the GCC Runtime Library Exception along with this program;
22// see the files COPYING3 and COPYING.RUNTIME respectively. If not, see
23// <http://www.gnu.org/licenses/>.
24
25/*
26 * Copyright (c) 1996-1997
27 * Silicon Graphics Computer Systems, Inc.
28 *
29 * Permission to use, copy, modify, distribute and sell this software
30 * and its documentation for any purpose is hereby granted without fee,
31 * provided that the above copyright notice appear in all copies and
32 * that both that copyright notice and this permission notice appear
33 * in supporting documentation. Silicon Graphics makes no
34 * representations about the suitability of this software for any
35 * purpose. It is provided "as is" without express or implied warranty.
36 */
37
38/** @file ext/pool_allocator.h
39 * This file is a GNU extension to the Standard C++ Library.
40 */
41
42#ifndef _POOL_ALLOCATOR_H
43#define _POOL_ALLOCATOR_H 1
44
45#include <bits/c++config.h>
46#include <cstdlib>
47#include <new>
48#include <bits/functexcept.h>
49#include <ext/atomicity.h>
50#include <ext/concurrence.h>
51#include <bits/move.h>
52#if __cplusplus >= 201103L
53#include <type_traits>
54#endif
55
56namespace __gnu_cxx _GLIBCXX_VISIBILITY(default)
57{
58_GLIBCXX_BEGIN_NAMESPACE_VERSION
59
60 /**
61 * @brief Base class for __pool_alloc.
62 *
63 * Uses various allocators to fulfill underlying requests (and makes as
64 * few requests as possible when in default high-speed pool mode).
65 *
66 * Important implementation properties:
67 * 0. If globally mandated, then allocate objects from new
68 * 1. If the clients request an object of size > _S_max_bytes, the resulting
69 * object will be obtained directly from new
70 * 2. In all other cases, we allocate an object of size exactly
71 * _S_round_up(requested_size). Thus the client has enough size
72 * information that we can return the object to the proper free list
73 * without permanently losing part of the object.
74 */
76 {
77 typedef std::size_t size_t;
78 protected:
79
80 enum { _S_align = 8 };
81 enum { _S_max_bytes = 128 };
82 enum { _S_free_list_size = (size_t)_S_max_bytes / (size_t)_S_align };
83
84 union _Obj
85 {
86 union _Obj* _M_free_list_link;
87 char _M_client_data[1]; // The client sees this.
88 };
89
90 static _Obj* volatile _S_free_list[_S_free_list_size];
91
92 // Chunk allocation state.
93 static char* _S_start_free;
94 static char* _S_end_free;
95 static size_t _S_heap_size;
96
97 size_t
98 _M_round_up(size_t __bytes)
99 { return ((__bytes + (size_t)_S_align - 1) & ~((size_t)_S_align - 1)); }
100
101 _GLIBCXX_CONST _Obj* volatile*
102 _M_get_free_list(size_t __bytes) throw ();
103
104 __mutex&
105 _M_get_mutex() throw ();
106
107 // Returns an object of size __n, and optionally adds to size __n
108 // free list.
109 void*
110 _M_refill(size_t __n);
111
112 // Allocates a chunk for nobjs of size size. nobjs may be reduced
113 // if it is inconvenient to allocate the requested number.
114 char*
115 _M_allocate_chunk(size_t __n, int& __nobjs);
116 };
117
118
119 /**
120 * @brief Allocator using a memory pool with a single lock.
121 * @ingroup allocators
122 */
123 template<typename _Tp>
125 {
126 private:
127 static _Atomic_word _S_force_new;
128
129 public:
130 typedef std::size_t size_type;
131 typedef std::ptrdiff_t difference_type;
132 typedef _Tp* pointer;
133 typedef const _Tp* const_pointer;
134 typedef _Tp& reference;
135 typedef const _Tp& const_reference;
136 typedef _Tp value_type;
137
138 template<typename _Tp1>
139 struct rebind
140 { typedef __pool_alloc<_Tp1> other; };
141
142#if __cplusplus >= 201103L
143 // _GLIBCXX_RESOLVE_LIB_DEFECTS
144 // 2103. propagate_on_container_move_assignment
146#endif
147
148 __pool_alloc() _GLIBCXX_USE_NOEXCEPT { }
149
150 __pool_alloc(const __pool_alloc&) _GLIBCXX_USE_NOEXCEPT { }
151
152 template<typename _Tp1>
153 __pool_alloc(const __pool_alloc<_Tp1>&) _GLIBCXX_USE_NOEXCEPT { }
154
155 ~__pool_alloc() _GLIBCXX_USE_NOEXCEPT { }
156
157 pointer
158 address(reference __x) const _GLIBCXX_NOEXCEPT
159 { return std::__addressof(__x); }
160
161 const_pointer
162 address(const_reference __x) const _GLIBCXX_NOEXCEPT
163 { return std::__addressof(__x); }
164
165 size_type
166 max_size() const _GLIBCXX_USE_NOEXCEPT
167 { return std::size_t(-1) / sizeof(_Tp); }
168
169#if __cplusplus >= 201103L
170 template<typename _Up, typename... _Args>
171 void
172 construct(_Up* __p, _Args&&... __args)
173 { ::new((void *)__p) _Up(std::forward<_Args>(__args)...); }
174
175 template<typename _Up>
176 void
177 destroy(_Up* __p) { __p->~_Up(); }
178#else
179 // _GLIBCXX_RESOLVE_LIB_DEFECTS
180 // 402. wrong new expression in [some_] allocator::construct
181 void
182 construct(pointer __p, const _Tp& __val)
183 { ::new((void *)__p) _Tp(__val); }
184
185 void
186 destroy(pointer __p) { __p->~_Tp(); }
187#endif
188
189 _GLIBCXX_NODISCARD pointer
190 allocate(size_type __n, const void* = 0);
191
192 void
193 deallocate(pointer __p, size_type __n);
194 };
195
196 template<typename _Tp>
197 inline bool
198 operator==(const __pool_alloc<_Tp>&, const __pool_alloc<_Tp>&)
199 { return true; }
200
201#if __cpp_impl_three_way_comparison < 201907L
202 template<typename _Tp>
203 inline bool
204 operator!=(const __pool_alloc<_Tp>&, const __pool_alloc<_Tp>&)
205 { return false; }
206#endif
207
208 template<typename _Tp>
209 _Atomic_word
210 __pool_alloc<_Tp>::_S_force_new;
211
212 template<typename _Tp>
213 _GLIBCXX_NODISCARD _Tp*
214 __pool_alloc<_Tp>::allocate(size_type __n, const void*)
215 {
216 using std::size_t;
217 pointer __ret = 0;
218 if (__builtin_expect(__n != 0, true))
219 {
220 if (__n > this->max_size())
221 std::__throw_bad_alloc();
222
223 const size_t __bytes = __n * sizeof(_Tp);
224
225#if __cpp_aligned_new
226 if (alignof(_Tp) > __STDCPP_DEFAULT_NEW_ALIGNMENT__)
227 {
228 std::align_val_t __al = std::align_val_t(alignof(_Tp));
229 return static_cast<_Tp*>(::operator new(__bytes, __al));
230 }
231#endif
232
233 // If there is a race through here, assume answer from getenv
234 // will resolve in same direction. Inspired by techniques
235 // to efficiently support threading found in basic_string.h.
236 if (_S_force_new == 0)
237 {
238 if (std::getenv("GLIBCXX_FORCE_NEW"))
239 __atomic_add_dispatch(&_S_force_new, 1);
240 else
241 __atomic_add_dispatch(&_S_force_new, -1);
242 }
243
244 if (__bytes > size_t(_S_max_bytes) || _S_force_new > 0)
245 __ret = static_cast<_Tp*>(::operator new(__bytes));
246 else
247 {
248 _Obj* volatile* __free_list = _M_get_free_list(__bytes);
249
250 __scoped_lock sentry(_M_get_mutex());
251 _Obj* __restrict__ __result = *__free_list;
252 if (__builtin_expect(__result == 0, 0))
253 __ret = static_cast<_Tp*>(_M_refill(_M_round_up(__bytes)));
254 else
255 {
256 *__free_list = __result->_M_free_list_link;
257 __ret = reinterpret_cast<_Tp*>(__result);
258 }
259 if (__ret == 0)
260 std::__throw_bad_alloc();
261 }
262 }
263 return __ret;
264 }
265
266 template<typename _Tp>
267 void
268 __pool_alloc<_Tp>::deallocate(pointer __p, size_type __n)
269 {
270 using std::size_t;
271 if (__builtin_expect(__n != 0 && __p != 0, true))
272 {
273#if __cpp_aligned_new
274 if (alignof(_Tp) > __STDCPP_DEFAULT_NEW_ALIGNMENT__)
275 {
276 ::operator delete(__p, std::align_val_t(alignof(_Tp)));
277 return;
278 }
279#endif
280 const size_t __bytes = __n * sizeof(_Tp);
281 if (__bytes > static_cast<size_t>(_S_max_bytes) || _S_force_new > 0)
282 ::operator delete(__p);
283 else
284 {
285 _Obj* volatile* __free_list = _M_get_free_list(__bytes);
286 _Obj* __q = reinterpret_cast<_Obj*>(__p);
287
288 __scoped_lock sentry(_M_get_mutex());
289 __q ->_M_free_list_link = *__free_list;
290 *__free_list = __q;
291 }
292 }
293 }
294
295_GLIBCXX_END_NAMESPACE_VERSION
296} // namespace
297
298#endif
constexpr _Tp * __addressof(_Tp &__r) noexcept
Same as C++11 std::addressof.
Definition: move.h:49
GNU extensions for public use.
integral_constant
Definition: type_traits:66
Base class for __pool_alloc.
Allocator using a memory pool with a single lock.