-
Notifications
You must be signed in to change notification settings - Fork 15
Expand file tree
/
Copy pathMemArena.cc
More file actions
296 lines (263 loc) · 8.96 KB
/
MemArena.cc
File metadata and controls
296 lines (263 loc) · 8.96 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
// SPDX-License-Identifier: Apache-2.0
// Copyright Verizon Media 2020
/** @file
MemArena memory allocator. Chunks of memory are allocated, frozen into generations and thawed
away when unused.
*/
#include "swoc/MemArena.h"
#include <algorithm>
namespace swoc { inline namespace SWOC_VERSION_NS {
void (*MemArena::destroyer)(MemArena *) = std::destroy_at<MemArena>;
inline bool
MemArena::Block::satisfies(size_t n, size_t align) const {
auto r = this->remaining();
return r >= (n + align_padding(this->data() + allocated, align));
}
MemArena::MemArena(MemSpan<void> static_block) {
static constexpr Scalar<16, size_t> MIN_BLOCK_SIZE = round_up(sizeof(Block) + Block::MIN_FREE_SPACE);
if (static_block.size() < MIN_BLOCK_SIZE) {
throw std::domain_error("MemArena static block is too small.");
}
// Construct the block data in the block and put it on the list. Make a note this is the
// static block that shouldn't be deleted.
auto space = static_block.size() - sizeof(Block);
_static_block = new (static_block.data()) Block(space);
_active_reserved = space;
_active.prepend(_static_block);
}
// Need to break these out because the default implementation doesn't clear the
// integral values in @a that.
MemArena::MemArena(swoc::MemArena::self_type &&that) noexcept
: _active_allocated(that._active_allocated),
_active_reserved(that._active_reserved),
_frozen_allocated(that._frozen_allocated),
_frozen_reserved(that._frozen_reserved),
_reserve_hint(that._reserve_hint),
_frozen(std::move(that._frozen)),
_active(std::move(that._active)),
_static_block(that._static_block) {
// Clear data in @a that to indicate all of the memory has been moved.
that._active_allocated = that._active_reserved = 0;
that._frozen_allocated = that._frozen_reserved = 0;
that._reserve_hint = 0;
that._static_block = nullptr;
}
MemArena *
MemArena::construct_self_contained(size_t n) {
MemArena tmp{n + sizeof(MemArena)};
return tmp.make<MemArena>(std::move(tmp));
}
MemArena &
MemArena::operator=(swoc::MemArena::self_type &&that) noexcept {
this->clear();
std::swap(_active_allocated, that._active_allocated);
std::swap(_active_reserved, that._active_reserved);
std::swap(_frozen_allocated, that._frozen_allocated);
std::swap(_frozen_reserved, that._frozen_reserved);
std::swap(_reserve_hint, that._reserve_hint);
_active = std::move(that._active);
_frozen = std::move(that._frozen);
return *this;
}
MemArena::Block *
MemArena::make_block(size_t n) {
// If there's no reservation hint, use the extent. This is transient because the hint is cleared.
if (_reserve_hint == 0) {
if (_active_reserved) {
_reserve_hint = _active_reserved;
} else if (_frozen_allocated) { // immediately after freezing - use that extent.
_reserve_hint = _frozen_allocated;
}
}
n = std::max<size_t>(n, _reserve_hint);
_reserve_hint = 0; // did this, clear for next time.
// Add in overhead and round up to paragraph units.
n = Paragraph{round_up(n + ALLOC_HEADER_SIZE + sizeof(Block))};
// If more than a page or withing a quarter page of a full page,
// round up to page unit size and clip back to account for alloc header.
if (n >= (Page::SCALE - QuarterPage::SCALE)) {
n = Page{round_up(n)} - ALLOC_HEADER_SIZE;
} else if (n >= QuarterPage::SCALE) { // if at least a quarter page, round up to quarter pages.
n = QuarterPage{round_up(n)};
}
// Allocate space for the Block instance and the request memory and construct a Block at the front.
// In theory this could use ::operator new(n) but this causes a size mismatch during ::operator delete.
// Easier to use malloc and override @c delete.
auto free_space = n - sizeof(Block);
_active_reserved += free_space;
return new (::malloc(n)) Block(free_space);
}
MemSpan<void>
MemArena::alloc(size_t n, size_t align) {
MemSpan<void> zret;
this->require(n, align);
auto block = _active.head();
zret = block->alloc(n, align);
_active_allocated += n;
// If this block is now full, move it to the back.
if (block->is_full() && block != _active.tail()) {
_active.erase(block);
_active.append(block);
}
return zret;
}
MemArena &
MemArena::freeze(size_t n) {
this->destroy_frozen();
_frozen = std::move(_active);
// Update the meta data.
_frozen_allocated = _active_allocated;
_active_allocated = 0;
_frozen_reserved = _active_reserved;
_active_reserved = 0;
_reserve_hint = n;
return *this;
}
MemArena &
MemArena::thaw() {
this->destroy_frozen();
_frozen_reserved = _frozen_allocated = 0;
if (_static_block) {
_static_block->discard();
_active.prepend(_static_block);
_active_reserved += _static_block->remaining();
}
return *this;
}
bool
MemArena::contains(const void *ptr) const {
auto pred = [ptr](const Block &b) -> bool { return b.contains(ptr); };
return std::any_of(_active.begin(), _active.end(), pred) || std::any_of(_frozen.begin(), _frozen.end(), pred);
}
MemArena &
MemArena::require(size_t n, size_t align) {
auto spot = _active.begin();
Block *block{nullptr};
// Search back through the list until a full block is hit, which is a miss.
while (spot != _active.end() && !spot->satisfies(n, align)) {
if (spot->is_full()) {
spot = _active.end();
} else {
++spot;
}
}
if (spot == _active.end()) { // no block has enough free space
block = this->make_block(n); // assuming a new block is sufficiently aligned.
_active.prepend(block);
} else if (spot != _active.begin()) {
// big enough space, move to the head of the list.
block = spot;
_active.erase(block);
_active.prepend(block);
}
// Invariant - the head active block has at least @a n bytes of free storage.
return *this;
}
void
MemArena::destroy_active() {
auto sb = _static_block; // C++20 nonsense - capture of @a this is incompatible with C++17.
_active
.apply([=](Block *b) {
if (b != sb) {
delete b;
}
})
.clear();
}
void
MemArena::destroy_frozen() {
auto sb = _static_block; // C++20 nonsense - capture of @a this is incompatible with C++17.
_frozen
.apply([=](Block *b) {
if (b != sb) {
delete b;
}
})
.clear();
}
MemArena &
MemArena::clear(size_t hint) {
_reserve_hint = hint ? hint : _frozen_allocated + _active_allocated;
_frozen_reserved = _frozen_allocated = 0;
_active_reserved = _active_allocated = 0;
this->destroy_frozen();
this->destroy_active();
return *this;
}
MemArena &
MemArena::discard(MemSpan<const void> span) {
// This is intended to iterate over empty blocks until @a span is found.
for ( auto & block : _active) {
if (block.contains(span.data())) { // it's in this block, final iteration.
if (block.allocated_data_end() == span.data_end()) {
block.allocated -= span.size();
_active_allocated -= span.size();
}
break;
} else if (block.allocated > 0) {
// If the block wasn't empty the only other place
// @a span could be is in the most recent filled block, which is last in the list.
// Invariant - the first block does not contain @a span.
// Therefore, if the last block contains @a span, it is not the first block.
auto lfb = _active.tail(); // list is not empty, must exist.
if (lfb->contains(span.data()) && lfb->allocated_data_end() == span.data_end()) {
lfb->allocated -= span.size();
_active_allocated -= span.size();
if (!lfb->is_full()) {
_active.erase(lfb);
_active.prepend(lfb);
}
}
break; // loop always ends after hitting a non-empty block.
}
}
return *this;
}
MemArena &
MemArena::discard(size_t hint) {
// Because existing blocks remain, clear the reserve hint so then when a new block is allocated
// it uses the allocation size then, not what it is now. Now is handled by the existing blocks,
// unless the caller explicitly provides a hint,
_reserve_hint = hint ? hint : 0;
for (auto &block : _active) {
block.discard();
}
_active_allocated = 0;
return *this;
}
MemArena::~MemArena() {
// Destruct in a way that makes it safe for the instance to be in one of its own memory blocks.
// This means copying members that will be used during the delete.
Block *ba = _active.head();
Block *bf = _frozen.head();
Block *sb = _static_block;
_active.clear();
_frozen.clear();
while (bf) {
Block *b = bf;
bf = bf->_link._next;
if (b != sb) {
delete b;
}
}
while (ba) {
Block *b = ba;
ba = ba->_link._next;
if (b != sb) {
delete b;
}
}
}
#if __has_include(<memory_resource>)
void *
MemArena::do_allocate(std::size_t bytes, std::size_t align) {
return this->alloc(bytes, align).data();
}
void
MemArena::do_deallocate(void *, size_t, size_t) {}
bool
MemArena::do_is_equal(std::pmr::memory_resource const &that) const noexcept {
return this == &that;
}
#endif
}} // namespace swoc::SWOC_VERSION_NS