Helios Engine 0.1.0
A modular ECS based data-oriented C++23 game engine
 
Loading...
Searching...
No Matches
arena_allocator.hpp
Go to the documentation of this file.
1#pragma once
2
3#include <helios/core_pch.hpp>
4
8
9#include <algorithm>
10#include <atomic>
11#include <concepts>
12#include <cstddef>
13#include <cstdint>
14#include <memory>
15
16namespace helios::memory {
17
18/**
19 * @brief Lock-free, thread-safe arena allocator.
20 * @details Arena allocator that allocates from a pre-allocated buffer using a bump-pointer strategy.
21 * All allocations are performed with lock-free atomic operations on an internal offset.
22 *
23 * Memory is released only when the arena is reset, which is an O(1) operation.
24 * Individual deallocations are not supported.
25 *
26 * This allocator is suitable for use as a backing allocator for higher level systems that require fast,
27 * thread-safe allocation with predictable lifetime.
28 *
29 * All operations that modify the arena state (`Allocate`, `Reset`) use atomic operations.
30 * `Reset` is not safe to call concurrently with `Allocate` and must be externally synchronized when used in that way.
31 *
32 * @note Thread-safe.
33 * @warning `Reset` must not be used concurrently with active allocations.
34 * The caller is responsible for enforcing this invariant.
35 */
36class ArenaAllocator final {
37public:
38 /**
39 * @brief Constructs an arena allocator over an existing buffer.
40 * @details The caller provides a raw buffer and its size.
41 * The buffer must remain valid for the entire lifetime of the allocator.
42 * Alignment guarantees are provided up to `kDefaultAlignment` (or higher alignment if the
43 * buffer itself is appropriately aligned).
44 *
45 * The allocator does not take ownership of the buffer and will not free it.
46 *
47 * @warning Triggers assertion if:
48 * - buffer is nullptr but size is non-zero.
49 * - size is 0.
50 * - buffer is not aligned to `kMinAlignment`.
51 *
52 * @param buffer Pointer to the backing memory buffer.
53 * @param size Size of the buffer in bytes.
54 */
55 ArenaAllocator(void* buffer, size_t size) noexcept;
57 ArenaAllocator(ArenaAllocator&& other) noexcept;
58 ~ArenaAllocator() noexcept = default;
59
60 ArenaAllocator& operator=(const ArenaAllocator&) = delete;
61 ArenaAllocator& operator=(ArenaAllocator&& other) noexcept;
62
63 /**
64 * @brief Allocates a block of memory from the arena.
65 * @details Uses a lock-free bump-pointer with `compare_exchange_weak` to reserve space from the backing buffer.
66 * The returned memory is not initialized.
67 *
68 * @warning Triggers assertion in next cases:
69 * - Alignment is not a power of 2.
70 * - Alignment is less than `kMinAlignment`.
71 *
72 * @param size Number of bytes to allocate.
73 * @param alignment Alignment requirement (must be power of 2).
74 * @return AllocationResult with pointer and size, or `{nullptr, 0}` on failure.
75 */
76 [[nodiscard]] AllocationResult Allocate(size_t size, size_t alignment = kDefaultAlignment) noexcept;
77
78 /**
79 * @brief Allocates memory for a single object of type T.
80 * @details Convenience function that calculates size and alignment from the type.
81 * The returned memory is uninitialized - use placement new to construct the object.
82 * @tparam T Type to allocate memory for
83 * @return Pointer to allocated memory, or nullptr on failure
84 *
85 * @example
86 * @code
87 * ArenaAllocator alloc(buffer, size);
88 * int* ptr = alloc.Allocate<int>();
89 * if (ptr != nullptr) {
90 * new (ptr) int(42);
91 * }
92 * @endcode
93 */
94 template <typename T>
95 [[nodiscard]] T* Allocate() noexcept;
96
97 /**
98 * @brief Allocates memory for an array of objects of type T.
99 * @details Convenience function that calculates size and alignment from the type.
100 * The returned memory is uninitialized - use placement new to construct objects.
101 * @tparam T Type to allocate memory for
102 * @param count Number of objects to allocate space for
103 * @return Pointer to allocated memory, or nullptr on failure
104 *
105 * @example
106 * @code
107 * ArenaAllocator alloc(buffer, size);
108 * int* arr = alloc.Allocate<int>(10);
109 * @endcode
110 */
111 template <typename T>
112 [[nodiscard]] T* Allocate(size_t count) noexcept;
113
114 /**
115 * @brief Allocates and constructs a single object of type T.
116 * @details Convenience function that allocates memory and constructs the object in-place.
117 * @tparam T Type to allocate and construct
118 * @tparam Args Constructor argument types
119 * @param args Arguments to forward to T's constructor
120 * @return Pointer to constructed object, or nullptr on allocation failure
121 *
122 * @example
123 * @code
124 * ArenaAllocator alloc(buffer, size);
125 * auto* vec = alloc.AllocateAndConstruct<MyVec3>(1.0f, 2.0f, 3.0f);
126 * @endcode
127 */
128 template <typename T, typename... Args>
129 requires std::constructible_from<T, Args...>
130 [[nodiscard]] T* AllocateAndConstruct(Args&&... args) noexcept(std::is_nothrow_constructible_v<T, Args...>);
131
132 /**
133 * @brief Allocates and default-constructs an array of objects of type T.
134 * @details Convenience function that allocates memory and default-constructs objects in-place.
135 * @tparam T Type to allocate and construct (must be default constructible)
136 * @param count Number of objects to allocate and construct
137 * @return Pointer to first constructed object, or nullptr on allocation failure
138 *
139 * @example
140 * @code
141 * ArenaAllocator alloc(buffer, size);
142 * auto* arr = alloc.AllocateAndConstructArray<MyType>(10);
143 * @endcode
144 */
145 template <typename T>
146 requires std::default_initializable<T>
147 [[nodiscard]] T* AllocateAndConstructArray(size_t count) noexcept(std::is_nothrow_default_constructible_v<T>);
148
149 /**
150 * @brief Deallocation is a no-op.
151 * @details Arena allocators do not support individual deallocation.
152 * Memory is released only via `Reset`.
153 * This function exists to satisfy generic allocator interfaces.
154 * @param ptr Pointer previously returned by `Allocate` (may be nullptr).
155 */
156 void Deallocate(const void* /*ptr*/) noexcept {}
157
158 /**
159 * @brief Resets the arena, freeing all allocations.
160 * @details Sets the internal offset to zero and clears accounting statistics.
161 * This does not modify the contents of the underlying buffer.
162 * @warning Must not be called concurrently with `Allocate`.
163 * The caller must ensure there are no ongoing or future allocations that expect previous pointers to remain valid.
164 */
165 void Reset() noexcept;
166
167 /**
168 * @brief Checks if the arena is empty.
169 * @return True if no allocations have been made since the last reset.
170 */
171 [[nodiscard]] bool Empty() const noexcept { return offset_.load(std::memory_order_relaxed) == 0; }
172
173 /**
174 * @brief Checks if the arena is full.
175 * @return True if no more allocations can be made without reset.
176 */
177 [[nodiscard]] bool Full() const noexcept { return offset_.load(std::memory_order_relaxed) >= capacity_; }
178
179 /**
180 * @brief Gets current allocator statistics.
181 * @details Statistics are updated in a relaxed manner and are not guaranteed to be
182 * perfectly precise under heavy contention, but are sufficient for profiling and diagnostics.
183 * @return AllocatorStats with current usage information.
184 */
185 [[nodiscard]] AllocatorStats Stats() const noexcept;
186
187 /**
188 * @brief Gets the total capacity of the arena.
189 * @return Capacity in bytes.
190 */
191 [[nodiscard]] size_t Capacity() const noexcept { return capacity_; }
192
193 /**
194 * @brief Gets the current offset (amount of memory used).
195 * @return Current offset in bytes.
196 */
197 [[nodiscard]] size_t CurrentOffset() const noexcept { return offset_.load(std::memory_order_relaxed); }
198
199 /**
200 * @brief Gets the amount of free space remaining.
201 * @return Free space in bytes.
202 */
203 [[nodiscard]] size_t FreeSpace() const noexcept;
204
205 /**
206 * @brief Gets the raw backing buffer pointer.
207 * @return Pointer to the beginning of the backing buffer.
208 */
209 [[nodiscard]] void* Data() noexcept { return buffer_; }
210
211 /**
212 * @brief Gets the raw backing buffer pointer (const).
213 * @return Const pointer to the beginning of the backing buffer.
214 */
215 [[nodiscard]] const void* Data() const noexcept { return buffer_; }
216
217private:
218 void* buffer_ = nullptr; ///< Backing memory buffer (non-owning).
219 size_t capacity_ = 0; ///< Total capacity in bytes.
220 std::atomic<size_t> offset_{0}; ///< Current bump offset.
221 std::atomic<size_t> peak_offset_{0}; ///< Peak offset reached.
222 std::atomic<size_t> allocation_count_{0}; ///< Number of successful allocations.
223 std::atomic<size_t> alignment_waste_{0}; ///< Total bytes wasted due to alignment.
224};
225
226inline ArenaAllocator::ArenaAllocator(void* buffer, size_t size) noexcept : buffer_(buffer), capacity_(size) {
227 HELIOS_ASSERT(size > 0, "Failed to construct ArenaAllocator: size must be greater than 0!");
228 HELIOS_ASSERT(buffer != nullptr, "Failed to construct ArenaAllocator: buffer must not be nullptr!");
230 "Failed to construct ArenaAllocator: buffer must be at least {}-byte aligned!", kMinAlignment);
231
232 offset_.store(0, std::memory_order_release);
233 peak_offset_.store(0, std::memory_order_release);
234 allocation_count_.store(0, std::memory_order_release);
235 alignment_waste_.store(0, std::memory_order_release);
236}
237
239 : buffer_(other.buffer_),
240 capacity_(other.capacity_),
241 offset_(other.offset_.load(std::memory_order_acquire)),
242 peak_offset_(other.peak_offset_.load(std::memory_order_acquire)),
243 allocation_count_(other.allocation_count_.load(std::memory_order_acquire)),
244 alignment_waste_(other.alignment_waste_.load(std::memory_order_acquire)) {
245 other.buffer_ = nullptr;
246 other.capacity_ = 0;
247 other.offset_.store(0, std::memory_order_release);
248 other.peak_offset_.store(0, std::memory_order_release);
249 other.allocation_count_.store(0, std::memory_order_release);
250 other.alignment_waste_.store(0, std::memory_order_release);
251}
252
254 if (this == &other) [[unlikely]] {
255 return *this;
256 }
257
258 buffer_ = other.buffer_;
259 capacity_ = other.capacity_;
260 offset_.store(other.offset_.load(std::memory_order_acquire), std::memory_order_release);
261 peak_offset_.store(other.peak_offset_.load(std::memory_order_acquire), std::memory_order_release);
262 allocation_count_.store(other.allocation_count_.load(std::memory_order_acquire), std::memory_order_release);
263 alignment_waste_.store(other.alignment_waste_.load(std::memory_order_acquire), std::memory_order_release);
264
265 other.buffer_ = nullptr;
266 other.capacity_ = 0;
267 other.offset_.store(0, std::memory_order_release);
268 other.peak_offset_.store(0, std::memory_order_release);
269 other.allocation_count_.store(0, std::memory_order_release);
270 other.alignment_waste_.store(0, std::memory_order_release);
271
272 return *this;
273}
274
275inline AllocationResult ArenaAllocator::Allocate(size_t size, size_t alignment) noexcept {
276 HELIOS_ASSERT(IsPowerOfTwo(alignment), "ArenaAllocator::Allocate failed: alignment must be power of 2, got '{}'!",
277 alignment);
278 HELIOS_ASSERT(alignment >= kMinAlignment,
279 "ArenaAllocator::Allocate failed: alignment must be at least '{}', got '{}'!", kMinAlignment,
280 alignment);
281
282 if (size == 0) [[unlikely]] {
283 return {.ptr = nullptr, .allocated_size = 0};
284 }
285
286 // Lock-free bump-pointer allocation.
287 size_t current_offset = offset_.load(std::memory_order_acquire);
288 size_t aligned_offset = 0;
289 size_t new_offset = 0;
290 size_t padding = 0;
291
292 do {
293 auto* current_ptr = static_cast<uint8_t*>(buffer_) + current_offset;
294 padding = CalculatePadding(current_ptr, alignment);
295 aligned_offset = current_offset + padding;
296
297 if (aligned_offset + size > capacity_) {
298 return {.ptr = nullptr, .allocated_size = 0};
299 }
300
301 new_offset = aligned_offset + size;
302 } while (
303 !offset_.compare_exchange_weak(current_offset, new_offset, std::memory_order_release, std::memory_order_acquire));
304
305 allocation_count_.fetch_add(1, std::memory_order_relaxed);
306 alignment_waste_.fetch_add(padding, std::memory_order_relaxed);
307
308 size_t current_peak = peak_offset_.load(std::memory_order_acquire);
309 while (new_offset > current_peak) {
310 if (peak_offset_.compare_exchange_weak(current_peak, new_offset, std::memory_order_release,
311 std::memory_order_acquire)) {
312 break;
313 }
314 }
315
316 auto* result = static_cast<uint8_t*>(buffer_) + aligned_offset;
317 return {.ptr = result, .allocated_size = size};
318}
319
320inline void ArenaAllocator::Reset() noexcept {
321 offset_.store(0, std::memory_order_release);
322 alignment_waste_.store(0, std::memory_order_release);
323 allocation_count_.store(0, std::memory_order_release);
324 // We intentionally keep peak_offset_ to track high-water mark over lifetime.
325}
326
327inline AllocatorStats ArenaAllocator::Stats() const noexcept {
328 const auto current_offset = offset_.load(std::memory_order_relaxed);
329 const auto peak = peak_offset_.load(std::memory_order_relaxed);
330 const auto alloc_count = allocation_count_.load(std::memory_order_relaxed);
331 const auto waste = alignment_waste_.load(std::memory_order_relaxed);
332
333 // In an arena, we conceptually "free" everything on reset, but we do not track per-block frees.
334 // total_freed is modeled as 0; deallocations count remains 0 because `Deallocate` is a no-op.
335 return {
336 .total_allocated = current_offset,
337 .total_freed = 0,
338 .peak_usage = peak,
339 .allocation_count = alloc_count,
340 .total_allocations = alloc_count,
341 .total_deallocations = 0,
342 .alignment_waste = waste,
343 };
344}
345
346inline size_t ArenaAllocator::FreeSpace() const noexcept {
347 const auto current = offset_.load(std::memory_order_relaxed);
348 return current < capacity_ ? capacity_ - current : 0;
349}
350
351template <typename T>
352inline T* ArenaAllocator::Allocate() noexcept {
353 constexpr size_t size = sizeof(T);
354 constexpr size_t alignment = std::max(alignof(T), kMinAlignment);
355 auto result = Allocate(size, alignment);
356 return static_cast<T*>(result.ptr);
357}
358
359template <typename T>
360inline T* ArenaAllocator::Allocate(size_t count) noexcept {
361 if (count == 0) [[unlikely]] {
362 return nullptr;
363 }
364 constexpr size_t alignment = std::max(alignof(T), kMinAlignment);
365 const size_t size = sizeof(T) * count;
366 auto result = Allocate(size, alignment);
367 return static_cast<T*>(result.ptr);
368}
369
370template <typename T, typename... Args>
371 requires std::constructible_from<T, Args...>
372inline T* ArenaAllocator::AllocateAndConstruct(Args&&... args) noexcept(std::is_nothrow_constructible_v<T, Args...>) {
373 T* ptr = Allocate<T>();
374 if (ptr != nullptr) [[likely]] {
375 std::construct_at(ptr, std::forward<Args>(args)...);
376 }
377 return ptr;
378}
379
380template <typename T>
381 requires std::default_initializable<T>
382inline T* ArenaAllocator::AllocateAndConstructArray(size_t count) noexcept(std::is_nothrow_default_constructible_v<T>) {
383 T* ptr = Allocate<T>(count);
384 if (ptr != nullptr) [[likely]] {
385 for (size_t i = 0; i < count; ++i) {
386 std::construct_at(ptr + i);
387 }
388 }
389 return ptr;
390}
391
392} // namespace helios::memory
#define HELIOS_ASSERT(condition,...)
Assertion macro that aborts execution in debug builds.
Definition assert.hpp:140
Lock-free, thread-safe arena allocator.
void Reset() noexcept
Resets the arena, freeing all allocations.
~ArenaAllocator() noexcept=default
bool Full() const noexcept
Checks if the arena is full.
ArenaAllocator(const ArenaAllocator &)=delete
ArenaAllocator(void *buffer, size_t size) noexcept
Constructs an arena allocator over an existing buffer.
size_t FreeSpace() const noexcept
Gets the amount of free space remaining.
AllocatorStats Stats() const noexcept
Gets current allocator statistics.
bool Empty() const noexcept
Checks if the arena is empty.
ArenaAllocator & operator=(const ArenaAllocator &)=delete
T * AllocateAndConstructArray(size_t count) noexcept(std::is_nothrow_default_constructible_v< T >)
T * AllocateAndConstruct(Args &&... args) noexcept(std::is_nothrow_constructible_v< T, Args... >)
const void * Data() const noexcept
Gets the raw backing buffer pointer (const).
size_t Capacity() const noexcept
Gets the total capacity of the arena.
void * Data() noexcept
Gets the raw backing buffer pointer.
size_t CurrentOffset() const noexcept
Gets the current offset (amount of memory used).
void Deallocate(const void *) noexcept
Deallocation is a no-op.
size_t CalculatePadding(const void *ptr, size_t alignment) noexcept
Calculate padding needed for alignment.
constexpr size_t kDefaultAlignment
Default alignment for allocations (cache line size for most modern CPUs).
constexpr bool IsPowerOfTwo(size_t size) noexcept
Helper function to check if a size is a power of 2.
constexpr size_t kMinAlignment
Minimum alignment for any allocation.
constexpr T * Allocate(Alloc &allocator) noexcept
STL namespace.
Result type for allocation operations.
Statistics for tracking allocator usage.