Helios Engine 0.1.0
A modular ECS based data-oriented C++23 game engine
 
Loading...
Searching...
No Matches
stack_allocator.hpp
Go to the documentation of this file.
1#pragma once
2
3#include <helios/core_pch.hpp>
4
9
10#include <algorithm>
11#include <atomic>
12#include <concepts>
13#include <cstddef>
14#include <cstdint>
15#include <cstring>
16#include <memory>
17
18namespace helios::memory {
19
20/**
21 * @brief Stack/linear allocator with LIFO deallocation support.
22 * @details Allocates memory sequentially using a bump pointer, but unlike FrameAllocator,
23 * supports LIFO (stack-like) deallocations.
24 * Each allocation stores a header with the previous offset, allowing proper unwinding.
25 *
26 * Ideal for hierarchical/scoped allocations where deallocation follows
27 * allocation order (e.g., call stacks, recursive algorithms).
28 *
29 * Each allocation has a small header overhead for tracking.
30 *
31 * Uses lock-free atomic operations for thread-safe allocations.
32 *
33 * @note Thread-safe: Multiple threads can safely call Allocate() concurrently.
34 * @warning Deallocations must follow LIFO order (stack discipline).
35 * @warning Move operations (move constructor and move assignment) are NOT thread-safe
36 * and must be externally synchronized. Do not move an allocator while other threads
37 * are accessing it.
38 */
39class StackAllocator final {
40public:
41 /**
42 * @brief Constructs a stack allocator with specified capacity.
43 * @warning Triggers assertion if capacity is 0.
44 * @param capacity Total size of the memory buffer in bytes
45 */
46 explicit StackAllocator(size_t capacity);
47 StackAllocator(StackAllocator&& other) noexcept;
49 ~StackAllocator() noexcept;
50
51 StackAllocator& operator=(const StackAllocator&) = delete;
52 StackAllocator& operator=(StackAllocator&& other) noexcept;
53
54 /**
55 * @brief Allocates memory with specified size and alignment.
56 * @details Stores allocation header for LIFO deallocation support
57 * @warning Triggers assertion in next cases:
58 * - Alignment is not a power of 2.
59 * - Alignment is less than kMinAlignment.
60 * @param size Number of bytes to allocate
61 * @param alignment Alignment requirement (must be power of 2)
62 * @return AllocationResult with pointer and actual allocated size, or {nullptr, 0} on failure
63 */
64 [[nodiscard]] AllocationResult Allocate(size_t size, size_t alignment = kDefaultAlignment) noexcept;
65
66 /**
67 * @brief Allocates memory for a single object of type T.
68 * @details Convenience function that calculates size and alignment from the type.
69 * The returned memory is uninitialized - use placement new to construct the object.
70 * @tparam T Type to allocate memory for
71 * @return Pointer to allocated memory, or nullptr on failure
72 *
73 * @example
74 * @code
75 * StackAllocator alloc(1024);
76 * int* ptr = alloc.Allocate<int>();
77 * if (ptr != nullptr) {
78 * new (ptr) int(42);
79 * }
80 * @endcode
81 */
82 template <typename T>
83 [[nodiscard]] T* Allocate() noexcept;
84
85 /**
86 * @brief Allocates memory for an array of objects of type T.
87 * @details Convenience function that calculates size and alignment from the type.
88 * The returned memory is uninitialized - use placement new to construct objects.
89 * @tparam T Type to allocate memory for
90 * @param count Number of objects to allocate space for
91 * @return Pointer to allocated memory, or nullptr on failure
92 *
93 * @example
94 * @code
95 * StackAllocator alloc(1024);
96 * int* arr = alloc.Allocate<int>(10);
97 * @endcode
98 */
99 template <typename T>
100 [[nodiscard]] T* Allocate(size_t count) noexcept;
101
102 /**
103 * @brief Allocates and constructs a single object of type T.
104 * @details Convenience function that allocates memory and constructs the object in-place.
105 * @tparam T Type to allocate and construct
106 * @tparam Args Constructor argument types
107 * @param args Arguments to forward to T's constructor
108 * @return Pointer to constructed object, or nullptr on allocation failure
109 *
110 * @example
111 * @code
112 * StackAllocator alloc(1024);
113 * auto* vec = alloc.AllocateAndConstruct<MyVec3>(1.0f, 2.0f, 3.0f);
114 * @endcode
115 */
116 template <typename T, typename... Args>
117 requires std::constructible_from<T, Args...>
118 [[nodiscard]] T* AllocateAndConstruct(Args&&... args) noexcept(std::is_nothrow_constructible_v<T, Args...>);
119
120 /**
121 * @brief Allocates and default-constructs an array of objects of type T.
122 * @details Convenience function that allocates memory and default-constructs objects in-place.
123 * @tparam T Type to allocate and construct (must be default constructible)
124 * @param count Number of objects to allocate and construct
125 * @return Pointer to first constructed object, or nullptr on allocation failure
126 *
127 * @example
128 * @code
129 * StackAllocator alloc(1024);
130 * auto* arr = alloc.AllocateAndConstructArray<MyType>(10);
131 * @endcode
132 */
133 template <typename T>
134 requires std::default_initializable<T>
135 [[nodiscard]] T* AllocateAndConstructArray(size_t count) noexcept(std::is_nothrow_default_constructible_v<T>);
136
137 /**
138 * @brief Deallocates memory in LIFO order.
139 * @warning Triggers assertion in next cases:
140 * - Pointer does not belong to this allocator.
141 * - Deallocation violates LIFO order.
142 * @param ptr Pointer to deallocate (must be the most recent allocation)
143 * @param size Size of allocation (used for validation)
144 */
145 void Deallocate(void* ptr, size_t size) noexcept;
146
147 /**
148 * @brief Resets the allocator, freeing all allocations.
149 * @details Resets the internal offset to 0, effectively freeing all memory.
150 */
151 void Reset() noexcept;
152
153 /**
154 * @brief Rewinds the stack to a previous marker position.
155 * @details Invalidates all allocations made after the marker
156 * @warning Triggers assertion in next cases:
157 * - Marker is ahead of current offset.
158 * - Marker exceeds capacity.
159 * @param marker Previously obtained marker from Marker()
160 */
161 void RewindToMarker(size_t marker) noexcept;
162
163 /**
164 * @brief Checks if the allocator is empty.
165 * @return True if no allocations have been made since last reset
166 */
167 [[nodiscard]] bool Empty() const noexcept { return offset_.load(std::memory_order_relaxed) == 0; }
168
169 /**
170 * @brief Checks if the allocator is full.
171 * @return True if no more allocations can be made without reset
172 */
173 [[nodiscard]] bool Full() const noexcept { return offset_.load(std::memory_order_relaxed) >= capacity_; }
174
175 /**
176 * @brief Checks if a pointer belongs to this allocator.
177 * @param ptr Pointer to check
178 * @return True if pointer is within allocator's memory range
179 */
180 [[nodiscard]] bool Owns(const void* ptr) const noexcept;
181
182 /**
183 * @brief Gets current allocator statistics.
184 * @return AllocatorStats with current usage information
185 */
186 [[nodiscard]] AllocatorStats Stats() const noexcept;
187
188 /**
189 * @brief Gets a marker for the current stack position.
190 * @note Can be used with RewindToMarker for bulk deallocations.
191 * @return Current offset as a marker
192 */
193 [[nodiscard]] size_t Marker() const noexcept { return offset_.load(std::memory_order_relaxed); }
194
195 /**
196 * @brief Gets the total capacity of the allocator.
197 * @return Capacity in bytes
198 */
199 [[nodiscard]] size_t Capacity() const noexcept { return capacity_.load(std::memory_order_relaxed); }
200
201 /**
202 * @brief Gets the current offset (amount of memory used).
203 * @return Current offset in bytes
204 */
205 [[nodiscard]] size_t CurrentOffset() const noexcept { return offset_.load(std::memory_order_relaxed); }
206
207 /**
208 * @brief Gets the amount of free space remaining.
209 * @return Free space in bytes
210 */
211 [[nodiscard]] size_t FreeSpace() const noexcept;
212
213private:
214 /**
215 * @brief Allocation header stored before each allocation.
216 * @details Tracks previous offset for stack unwinding.
217 */
218 struct AllocationHeader {
219 size_t previous_offset = 0; ///< Offset before this allocation
220 size_t padding = 0; ///< Padding used for alignment
221 };
222
223 void* buffer_ = nullptr; ///< Underlying memory buffer
224 std::atomic<size_t> capacity_{0}; ///< Total capacity in bytes
225 std::atomic<size_t> offset_{0}; ///< Current allocation offset
226 std::atomic<size_t> peak_offset_{0}; ///< Peak offset reached
227 std::atomic<size_t> allocation_count_{0}; ///< Number of active allocations
228 std::atomic<size_t> total_allocations_{0}; ///< Total allocations made
229 std::atomic<size_t> total_deallocations_{0}; ///< Total deallocations made
230 std::atomic<size_t> alignment_waste_{0}; ///< Total bytes wasted due to alignment
231};
232
233inline StackAllocator::StackAllocator(size_t capacity) : capacity_{capacity} {
234 HELIOS_ASSERT(capacity > 0, "Failed to construct StackAllocator: capacity must be greater than 0!");
235
236 // Allocate aligned buffer
237 buffer_ = AlignedAlloc(kDefaultAlignment, capacity);
238 HELIOS_VERIFY(buffer_ != nullptr, "Failed to construct StackAllocator: Allocation of StackAllocator buffer failed!");
239}
240
242 if (buffer_ != nullptr) {
243 AlignedFree(buffer_);
244 }
245}
246
248 : buffer_(other.buffer_),
249 capacity_{other.capacity_.load(std::memory_order_relaxed)},
250 offset_(other.offset_.load(std::memory_order_acquire)),
251 peak_offset_(other.peak_offset_.load(std::memory_order_acquire)),
252 allocation_count_(other.allocation_count_.load(std::memory_order_acquire)),
253 total_allocations_(other.total_allocations_.load(std::memory_order_acquire)),
254 total_deallocations_(other.total_deallocations_.load(std::memory_order_acquire)),
255 alignment_waste_(other.alignment_waste_.load(std::memory_order_acquire)) {
256 other.buffer_ = nullptr;
257 other.capacity_.store(0, std::memory_order_relaxed);
258 other.offset_.store(0, std::memory_order_release);
259 other.peak_offset_.store(0, std::memory_order_release);
260 other.allocation_count_.store(0, std::memory_order_release);
261 other.total_allocations_.store(0, std::memory_order_release);
262 other.total_deallocations_.store(0, std::memory_order_release);
263 other.alignment_waste_.store(0, std::memory_order_release);
264}
265
267 if (this == &other) [[unlikely]] {
268 return *this;
269 }
270
271 // Free current buffer
272 if (buffer_ != nullptr) {
273 AlignedFree(buffer_);
274 }
275
276 // Move from other
277 buffer_ = other.buffer_;
278 capacity_.store(other.capacity_.load(std::memory_order_relaxed), std::memory_order_relaxed);
279
280 offset_.store(other.offset_.load(std::memory_order_acquire), std::memory_order_release);
281 peak_offset_.store(other.peak_offset_.load(std::memory_order_acquire), std::memory_order_release);
282 allocation_count_.store(other.allocation_count_.load(std::memory_order_acquire), std::memory_order_release);
283 total_allocations_.store(other.total_allocations_.load(std::memory_order_acquire), std::memory_order_release);
284 total_deallocations_.store(other.total_deallocations_.load(std::memory_order_acquire), std::memory_order_release);
285 alignment_waste_.store(other.alignment_waste_.load(std::memory_order_acquire), std::memory_order_release);
286
287 // Reset other
288 other.buffer_ = nullptr;
289 other.capacity_.store(0, std::memory_order_relaxed);
290 other.offset_.store(0, std::memory_order_release);
291 other.peak_offset_.store(0, std::memory_order_release);
292 other.allocation_count_.store(0, std::memory_order_release);
293 other.total_allocations_.store(0, std::memory_order_release);
294 other.total_deallocations_.store(0, std::memory_order_release);
295 other.alignment_waste_.store(0, std::memory_order_release);
296
297 return *this;
298}
299
300inline AllocationResult StackAllocator::Allocate(size_t size, size_t alignment) noexcept {
301 HELIOS_ASSERT(IsPowerOfTwo(alignment), "Failed to allocate memory: alignment must be power of 2, got '{}'!",
302 alignment);
303 HELIOS_ASSERT(alignment >= kMinAlignment, "Failed to allocate memory: alignment must be at least '{}', got '{}'!",
304 kMinAlignment, alignment);
305
306 if (size == 0) [[unlikely]] {
307 return {.ptr = nullptr, .allocated_size = 0};
308 }
309
310 // Lock-free allocation using compare-and-swap
311 size_t current_offset = offset_.load(std::memory_order_acquire);
312 size_t new_offset = 0;
313 size_t header_padding = 0;
314 uint8_t* current_ptr = nullptr;
315
316 do {
317 // Calculate space needed for header + alignment
318 current_ptr = static_cast<uint8_t*>(buffer_) + current_offset;
319 header_padding = CalculatePaddingWithHeader(current_ptr, alignment, sizeof(AllocationHeader));
320 const size_t required_space = header_padding + size;
321
322 // Check if we have enough space
323 if (current_offset + required_space > capacity_.load(std::memory_order_relaxed)) {
324 return {.ptr = nullptr, .allocated_size = 0};
325 }
326
327 new_offset = current_offset + required_space;
328
329 // Try to atomically update offset
330 } while (
331 !offset_.compare_exchange_weak(current_offset, new_offset, std::memory_order_release, std::memory_order_acquire));
332
333 // Successfully reserved space - now write the header
334 // This is safe because we own the range [current_offset, new_offset)
335 auto* header_ptr = current_ptr + header_padding - sizeof(AllocationHeader);
336 auto* header = reinterpret_cast<AllocationHeader*>(header_ptr);
337 header->previous_offset = current_offset;
338 header->padding = header_padding;
339
340 // Update stats
341 allocation_count_.fetch_add(1, std::memory_order_relaxed);
342 total_allocations_.fetch_add(1, std::memory_order_relaxed);
343 alignment_waste_.fetch_add(header_padding - sizeof(AllocationHeader), std::memory_order_relaxed);
344
345 // Update peak offset
346 size_t current_peak = peak_offset_.load(std::memory_order_acquire);
347 while (new_offset > current_peak) {
348 if (peak_offset_.compare_exchange_weak(current_peak, new_offset, std::memory_order_release,
349 std::memory_order_acquire)) {
350 break;
351 }
352 }
353
354 // Calculate aligned data pointer
355 uint8_t* data_ptr = current_ptr + header_padding;
356 return {data_ptr, size};
357}
358
359inline void StackAllocator::Deallocate(void* ptr, [[maybe_unused]] size_t size) noexcept {
360 if (ptr == nullptr) [[unlikely]] {
361 return;
362 }
363
364 HELIOS_ASSERT(Owns(ptr), "Failed to deallocate memory: ptr does not belong to this allocator!");
365
366 // Get the header stored before the allocation
367 auto* header = reinterpret_cast<AllocationHeader*>(static_cast<uint8_t*>(ptr) - sizeof(AllocationHeader));
368
369#ifdef HELIOS_DEBUG_MODE
370 {
371 // Verify this is the most recent allocation (LIFO check)
372 const size_t current_offset = offset_.load(std::memory_order_acquire);
373 HELIOS_ASSERT(static_cast<uint8_t*>(ptr) + size <= static_cast<uint8_t*>(buffer_) + current_offset,
374 "Failed to deallocate memory: Deallocation violates LIFO order!");
375 }
376#endif
377
378 // Rewind to previous offset
379 // Note: LIFO deallocations are expected to be single-threaded per stack or externally synchronized
380 offset_.store(header->previous_offset, std::memory_order_release);
381
382 // Update stats
383 allocation_count_.fetch_sub(1, std::memory_order_relaxed);
384 total_deallocations_.fetch_add(1, std::memory_order_relaxed);
385}
386
387inline void StackAllocator::Reset() noexcept {
388 offset_.store(0, std::memory_order_release);
389 allocation_count_.store(0, std::memory_order_release);
390 alignment_waste_.store(0, std::memory_order_release);
391}
392
393inline void StackAllocator::RewindToMarker(size_t marker) noexcept {
394 [[maybe_unused]] const size_t current_offset = offset_.load(std::memory_order_acquire);
395 HELIOS_ASSERT(marker <= current_offset, "Failed to rewind to marker: marker '{}' is ahead of current offset '{}'!",
396 marker, current_offset);
397
398 const size_t capacity = capacity_.load(std::memory_order_relaxed);
399 HELIOS_ASSERT(marker <= capacity, "Failed to rewind to marker: marker '{}' exceeds capacity '{}'!", marker, capacity);
400
401 offset_.store(marker, std::memory_order_release);
402
403 // Note: allocation_count_ becomes inaccurate after rewind, but that's acceptable
404 // since we're doing bulk deallocation
405}
406
407inline bool StackAllocator::Owns(const void* ptr) const noexcept {
408 if (ptr == nullptr || buffer_ == nullptr) {
409 return false;
410 }
411
412 const uintptr_t start = reinterpret_cast<uintptr_t>(buffer_);
413 const auto addr = reinterpret_cast<uintptr_t>(ptr);
414 const uintptr_t end = start + capacity_.load(std::memory_order_relaxed);
415
416 return addr >= start && addr < end;
417}
418
419inline AllocatorStats StackAllocator::Stats() const noexcept {
420 const size_t current_offset = offset_.load(std::memory_order_relaxed);
421 const size_t peak = peak_offset_.load(std::memory_order_relaxed);
422 const size_t alloc_count = allocation_count_.load(std::memory_order_relaxed);
423 const size_t total_allocs = total_allocations_.load(std::memory_order_relaxed);
424 const size_t total_deallocs = total_deallocations_.load(std::memory_order_relaxed);
425 const size_t waste = alignment_waste_.load(std::memory_order_relaxed);
426
427 return {
428 .total_allocated = current_offset,
429 .total_freed = 0,
430 .peak_usage = peak,
431 .allocation_count = alloc_count,
432 .total_allocations = total_allocs,
433 .total_deallocations = total_deallocs,
434 .alignment_waste = waste,
435 };
436}
437
438inline size_t StackAllocator::FreeSpace() const noexcept {
439 const size_t current = offset_.load(std::memory_order_relaxed);
440 const size_t capacity = capacity_.load(std::memory_order_relaxed);
441 return current < capacity ? capacity - current : 0;
442}
443
444template <typename T>
445inline T* StackAllocator::Allocate() noexcept {
446 constexpr size_t size = sizeof(T);
447 constexpr size_t alignment = std::max(alignof(T), kMinAlignment);
448 auto result = Allocate(size, alignment);
449 return static_cast<T*>(result.ptr);
450}
451
452template <typename T>
453inline T* StackAllocator::Allocate(size_t count) noexcept {
454 if (count == 0) [[unlikely]] {
455 return nullptr;
456 }
457 constexpr size_t alignment = std::max(alignof(T), kMinAlignment);
458 const size_t size = sizeof(T) * count;
459 auto result = Allocate(size, alignment);
460 return static_cast<T*>(result.ptr);
461}
462
463template <typename T, typename... Args>
464 requires std::constructible_from<T, Args...>
465inline T* StackAllocator::AllocateAndConstruct(Args&&... args) noexcept(std::is_nothrow_constructible_v<T, Args...>) {
466 T* ptr = Allocate<T>();
467 if (ptr != nullptr) [[likely]] {
468 std::construct_at(ptr, std::forward<Args>(args)...);
469 }
470 return ptr;
471}
472
473template <typename T>
474 requires std::default_initializable<T>
475inline T* StackAllocator::AllocateAndConstructArray(size_t count) noexcept(std::is_nothrow_default_constructible_v<T>) {
476 T* ptr = Allocate<T>(count);
477 if (ptr != nullptr) [[likely]] {
478 for (size_t i = 0; i < count; ++i) {
479 std::construct_at(ptr + i);
480 }
481 }
482 return ptr;
483}
484
485} // namespace helios::memory
#define HELIOS_ASSERT(condition,...)
Assertion macro that aborts execution in debug builds.
Definition assert.hpp:140
#define HELIOS_VERIFY(condition,...)
Verify macro that always checks the condition.
Definition assert.hpp:196
Stack/linear allocator with LIFO deallocation support.
StackAllocator(const StackAllocator &)=delete
T * AllocateAndConstruct(Args &&... args) noexcept(std::is_nothrow_constructible_v< T, Args... >)
T * AllocateAndConstructArray(size_t count) noexcept(std::is_nothrow_default_constructible_v< T >)
StackAllocator & operator=(const StackAllocator &)=delete
AllocatorStats Stats() const noexcept
Gets current allocator statistics.
void Reset() noexcept
Resets the allocator, freeing all allocations.
bool Owns(const void *ptr) const noexcept
Checks if a pointer belongs to this allocator.
size_t FreeSpace() const noexcept
Gets the amount of free space remaining.
void Deallocate(void *ptr, size_t size) noexcept
Deallocates memory in LIFO order.
StackAllocator(size_t capacity)
Constructs a stack allocator with specified capacity.
size_t CurrentOffset() const noexcept
Gets the current offset (amount of memory used).
void RewindToMarker(size_t marker) noexcept
Rewinds the stack to a previous marker position.
size_t Marker() const noexcept
Gets a marker for the current stack position.
bool Empty() const noexcept
Checks if the allocator is empty.
size_t Capacity() const noexcept
Gets the total capacity of the allocator.
bool Full() const noexcept
Checks if the allocator is full.
constexpr size_t kDefaultAlignment
Default alignment for allocations (cache line size for most modern CPUs).
void * AlignedAlloc(size_t alignment, size_t size)
Allocate memory with the specified alignment.
Definition common.hpp:30
constexpr bool IsPowerOfTwo(size_t size) noexcept
Helper function to check if a size is a power of 2.
constexpr size_t kMinAlignment
Minimum alignment for any allocation.
void AlignedFree(void *ptr)
Free memory allocated with AlignedAlloc.
Definition common.hpp:53
constexpr T * Allocate(Alloc &allocator) noexcept
size_t CalculatePaddingWithHeader(const void *ptr, size_t alignment, size_t header_size) noexcept
Calculate padding with header for alignment.
STL namespace.
Result type for allocation operations.
Statistics for tracking allocator usage.