Helios Engine 0.1.0
A modular ECS based data-oriented C++23 game engine
 
Loading...
Searching...
No Matches
free_list_allocator.hpp
Go to the documentation of this file.
1#pragma once
2
3#include <helios/core_pch.hpp>
4
9
10#include <algorithm>
11#include <atomic>
12#include <concepts>
13#include <cstddef>
14#include <cstdint>
15#include <cstring>
16#include <memory>
17#include <mutex>
18
19namespace helios::memory {
20
21/**
22 * @brief Free list allocator with best-fit strategy.
23 * @details General-purpose allocator that maintains a free list of availablememory blocks.
24 * Supports arbitrary allocation sizes and orders. Uses a best-fit allocation strategy to minimize fragmentation.
25 *
26 * Each free block contains a header with size and pointer to the next free block.
27 * Allocated blocks also store a header with size information for deallocation.
28 *
29 * Supports arbitrary allocation and deallocation order.
30 * May suffer from fragmentation over time with varied allocation patterns.
31 * Adjacent free blocks are coalesced to reduce fragmentation.
32 *
33 * @note Thread-safe.
34 */
35class FreeListAllocator final {
36public:
37 /**
38 * @brief Constructs a free list allocator with specified capacity.
39 * @warning Triggers assertion if capacity is less than or equal to sizeof(FreeBlockHeader).
40 * @param capacity Total size of the memory buffer in bytes
41 */
42 explicit FreeListAllocator(size_t capacity);
44 FreeListAllocator(FreeListAllocator&& other) noexcept;
45 ~FreeListAllocator() noexcept;
46
47 FreeListAllocator& operator=(const FreeListAllocator&) = delete;
48 FreeListAllocator& operator=(FreeListAllocator&& other) noexcept;
49
50 /**
51 * @brief Allocates memory with specified size and alignment.
52 * @note Uses best-fit strategy to find the smallest suitable free block
53 * @warning Triggers assertion in next cases:
54 * - Alignment is not a power of 2.
55 * - Alignment is less than kMinAlignment.
56 * @param size Number of bytes to allocate
57 * @param alignment Alignment requirement (must be power of 2)
58 * @return AllocationResult with pointer and actual allocated size, or {nullptr, 0} on failure
59 */
60 [[nodiscard]] AllocationResult Allocate(size_t size, size_t alignment = kDefaultAlignment) noexcept;
61
62 /**
63 * @brief Allocates memory for a single object of type T.
64 * @details Convenience function that calculates size and alignment from the type.
65 * The returned memory is uninitialized - use placement new to construct the object.
66 * @tparam T Type to allocate memory for
67 * @return Pointer to allocated memory, or nullptr on failure
68 *
69 * @example
70 * @code
71 * FreeListAllocator alloc(1024);
72 * int* ptr = alloc.Allocate<int>();
73 * if (ptr != nullptr) {
74 * new (ptr) int(42);
75 * }
76 * @endcode
77 */
78 template <typename T>
79 [[nodiscard]] T* Allocate() noexcept;
80
81 /**
82 * @brief Allocates memory for an array of objects of type T.
83 * @details Convenience function that calculates size and alignment from the type.
84 * The returned memory is uninitialized - use placement new to construct objects.
85 * @tparam T Type to allocate memory for
86 * @param count Number of objects to allocate space for
87 * @return Pointer to allocated memory, or nullptr on failure
88 *
89 * @example
90 * @code
91 * FreeListAllocator alloc(1024);
92 * int* arr = alloc.Allocate<int>(10);
93 * @endcode
94 */
95 template <typename T>
96 [[nodiscard]] T* Allocate(size_t count) noexcept;
97
98 /**
99 * @brief Allocates and constructs a single object of type T.
100 * @details Convenience function that allocates memory and constructs the object in-place.
101 * @tparam T Type to allocate and construct
102 * @tparam Args Constructor argument types
103 * @param args Arguments to forward to T's constructor
104 * @return Pointer to constructed object, or nullptr on allocation failure
105 *
106 * @example
107 * @code
108 * FreeListAllocator alloc(1024);
109 * auto* vec = alloc.AllocateAndConstruct<MyVec3>(1.0f, 2.0f, 3.0f);
110 * @endcode
111 */
112 template <typename T, typename... Args>
113 requires std::constructible_from<T, Args...>
114 [[nodiscard]] T* AllocateAndConstruct(Args&&... args) noexcept(std::is_nothrow_constructible_v<T, Args...>);
115
116 /**
117 * @brief Allocates and default-constructs an array of objects of type T.
118 * @details Convenience function that allocates memory and default-constructs objects in-place.
119 * @tparam T Type to allocate and construct (must be default constructible)
120 * @param count Number of objects to allocate and construct
121 * @return Pointer to first constructed object, or nullptr on allocation failure
122 *
123 * @example
124 * @code
125 * FreeListAllocator alloc(1024);
126 * auto* arr = alloc.AllocateAndConstructArray<MyType>(10);
127 * @endcode
128 */
129 template <typename T>
130 requires std::default_initializable<T>
131 [[nodiscard]] T* AllocateAndConstructArray(size_t count) noexcept(std::is_nothrow_default_constructible_v<T>);
132
133 /**
134 * @brief Deallocates memory.
135 * @note Returns the block to the free list and coalesces adjacent free blocks
136 * @warning Triggers assertion if pointer does not belong to this allocator.
137 * @param ptr Pointer to deallocate
138 */
139 void Deallocate(void* ptr) noexcept;
140
141 /**
142 * @brief Resets the allocator, freeing all allocations.
143 * @details Recreates a single large free block encompassing the entire buffer.
144 */
145 void Reset() noexcept;
146
147 /**
148 * @brief Checks if the allocator is empty (all memory free).
149 * @return True if no allocations exist
150 */
151 [[nodiscard]] bool Empty() const noexcept { return allocation_count_.load(std::memory_order_acquire) == 0; }
152
153 /**
154 * @brief Checks if a pointer belongs to this allocator.
155 * @param ptr Pointer to check
156 * @return True if pointer is within allocator's memory range
157 */
158 [[nodiscard]] bool Owns(const void* ptr) const noexcept;
159
160 /**
161 * @brief Gets current allocator statistics.
162 * @return AllocatorStats with current usage information
163 */
164 [[nodiscard]] AllocatorStats Stats() const noexcept;
165
166 /**
167 * @brief Gets the total capacity of the allocator.
168 * @return Capacity in bytes
169 */
170 [[nodiscard]] size_t Capacity() const noexcept { return capacity_; }
171
172 /**
173 * @brief Gets the amount of allocated memory.
174 * @return Allocated bytes
175 */
176 [[nodiscard]] size_t UsedMemory() const noexcept { return used_memory_.load(std::memory_order_acquire); }
177
178 /**
179 * @brief Gets the amount of free memory.
180 * @return Free bytes
181 */
182 [[nodiscard]] size_t FreeMemory() const noexcept { return capacity_ - used_memory_.load(std::memory_order_acquire); }
183
184 /**
185 * @brief Gets the number of free blocks in the free list.
186 * @return Number of free blocks
187 */
188 [[nodiscard]] size_t FreeBlockCount() const noexcept { return free_block_count_.load(std::memory_order_acquire); }
189
190 /**
191 * @brief Gets the number of active allocations.
192 * @return Number of allocations
193 */
194 [[nodiscard]] size_t AllocationCount() const noexcept { return allocation_count_.load(std::memory_order_acquire); }
195
196private:
197 /**
198 * @brief Header for free blocks in the free list.
199 * @details Stores block size and links to next free block.
200 */
201 struct FreeBlockHeader {
202 size_t size = 0; ///< Size of the free block (including header)
203 FreeBlockHeader* next = nullptr; ///< Pointer to next free block
204 };
205
206 /**
207 * @brief Header for allocated blocks.
208 * @details Stores block size for deallocation.
209 */
210 struct AllocationHeader {
211 size_t size = 0; ///< Size of the allocated block (including header)
212 size_t padding = 0; ///< Padding used for alignment
213 };
214
215 /**
216 * @brief Finds the best-fit free block for the requested size.
217 * @param size Requested size (including header and padding)
218 * @param prev_block Output parameter for previous block in list
219 * @return Pointer to best-fit block or nullptr if not found
220 */
221 [[nodiscard]] FreeBlockHeader* FindBestFit(size_t size, FreeBlockHeader** prev_block) noexcept;
222
223 /**
224 * @brief Coalesces adjacent free blocks.
225 * @param block Block to coalesce with neighbors
226 */
227 void CoalesceAdjacent(FreeBlockHeader* block) noexcept;
228
229 /**
230 * @brief Initializes the allocator with a single large free block.
231 */
232 void InitializeFreeList() noexcept;
233
234 void* buffer_ = nullptr; ///< Underlying memory buffer
235 FreeBlockHeader* free_list_head_ = nullptr; ///< Head of free list
236 size_t capacity_ = 0; ///< Total capacity in bytes
237
238 std::atomic<size_t> used_memory_{0}; ///< Currently allocated bytes
239 std::atomic<size_t> peak_usage_{0}; ///< Peak memory usage
240 std::atomic<size_t> free_block_count_{0}; ///< Number of free blocks
241 std::atomic<size_t> allocation_count_{0}; ///< Number of active allocations
242 std::atomic<size_t> total_allocations_{0}; ///< Total allocations made
243 std::atomic<size_t> total_deallocations_{0}; ///< Total deallocations made
244 std::atomic<size_t> alignment_waste_{0}; ///< Total bytes wasted due to alignment
245
246 mutable std::mutex mutex_; ///< Mutex for thread-safety
247};
248
249inline FreeListAllocator::FreeListAllocator(size_t capacity) : capacity_(capacity) {
250 HELIOS_ASSERT(capacity > sizeof(FreeBlockHeader),
251 "Failed to construct FreeListAllocator: capacity must be greater than '{}' bytes!",
252 sizeof(FreeBlockHeader));
253
254 // Allocate aligned buffer
255 buffer_ = AlignedAlloc(kDefaultAlignment, capacity_);
256 HELIOS_VERIFY(buffer_ != nullptr, "Failed to construct FreeListAllocator: Allocation of buffer failed!");
257
258 InitializeFreeList();
259}
260
262 : buffer_(other.buffer_),
263 free_list_head_(other.free_list_head_),
264 capacity_(other.capacity_),
265 used_memory_{other.used_memory_.load(std::memory_order_acquire)},
266 peak_usage_{other.peak_usage_.load(std::memory_order_acquire)},
267 free_block_count_{other.free_block_count_.load(std::memory_order_acquire)},
268 allocation_count_{other.allocation_count_.load(std::memory_order_acquire)},
269 total_allocations_{other.total_allocations_.load(std::memory_order_acquire)},
270 total_deallocations_{other.total_deallocations_.load(std::memory_order_acquire)},
271 alignment_waste_{other.alignment_waste_.load(std::memory_order_acquire)} {
272 other.buffer_ = nullptr;
273 other.free_list_head_ = nullptr;
274 other.capacity_ = 0;
275 other.used_memory_.store(0, std::memory_order_release);
276 other.peak_usage_.store(0, std::memory_order_release);
277 other.free_block_count_.store(0, std::memory_order_release);
278 other.allocation_count_.store(0, std::memory_order_release);
279 other.total_allocations_.store(0, std::memory_order_release);
280 other.total_deallocations_.store(0, std::memory_order_release);
281 other.alignment_waste_.store(0, std::memory_order_release);
282}
283
285 if (buffer_ != nullptr) {
286 AlignedFree(buffer_);
287 }
288}
289
291 if (this == &other) [[unlikely]] {
292 return *this;
293 }
294
295 {
296 const std::scoped_lock lock(mutex_);
297 const std::scoped_lock other_lock(other.mutex_);
298
299 // Free current buffer
300 if (buffer_ != nullptr) {
301 AlignedFree(buffer_);
302 }
303
304 // Move from other
305 buffer_ = other.buffer_;
306 free_list_head_ = other.free_list_head_;
307 capacity_ = other.capacity_;
308
309 // Reset other
310 other.buffer_ = nullptr;
311 other.free_list_head_ = nullptr;
312 other.capacity_ = 0;
313 }
314
315 used_memory_.store(other.used_memory_.load(std::memory_order_acquire), std::memory_order_release);
316 peak_usage_.store(other.peak_usage_.load(std::memory_order_acquire), std::memory_order_release);
317 free_block_count_.store(other.free_block_count_.load(std::memory_order_acquire), std::memory_order_release);
318 allocation_count_.store(other.allocation_count_.load(std::memory_order_acquire), std::memory_order_release);
319 total_allocations_.store(other.total_allocations_.load(std::memory_order_acquire), std::memory_order_release);
320 total_deallocations_.store(other.total_deallocations_.load(std::memory_order_acquire), std::memory_order_release);
321 alignment_waste_.store(other.alignment_waste_.load(std::memory_order_acquire), std::memory_order_release);
322
323 other.used_memory_.store(0, std::memory_order_release);
324 other.peak_usage_.store(0, std::memory_order_release);
325 other.free_block_count_.store(0, std::memory_order_release);
326 other.allocation_count_.store(0, std::memory_order_release);
327 other.total_allocations_.store(0, std::memory_order_release);
328 other.total_deallocations_.store(0, std::memory_order_release);
329 other.alignment_waste_.store(0, std::memory_order_release);
330
331 return *this;
332}
333
334inline AllocationResult FreeListAllocator::Allocate(size_t size, size_t alignment) noexcept {
335 HELIOS_ASSERT(IsPowerOfTwo(alignment), "Failed to allocate memory: alignment must be power of 2, got '{}'!",
336 alignment);
337 HELIOS_ASSERT(alignment >= kMinAlignment, "Failed to allocate memory: alignment must be at least '{}', got '{}'!",
338 kMinAlignment, alignment);
339
340 if (size == 0) {
341 return {.ptr = nullptr, .allocated_size = 0};
342 }
343
344 // Calculate total size needed (with header and alignment padding)
345 const size_t header_size = sizeof(AllocationHeader);
346 const size_t min_block_size = header_size + size;
347
348 size_t total_size = 0;
349 size_t padding = 0;
350
351 AllocationResult result{.ptr = nullptr, .allocated_size = 0};
352
353 {
354 const std::scoped_lock lock(mutex_);
355
356 // Find best-fit block
357 FreeBlockHeader* prev_block = nullptr;
358 FreeBlockHeader* best_block = FindBestFit(min_block_size + alignment, &prev_block);
359
360 if (best_block == nullptr) [[unlikely]] {
361 return result;
362 }
363
364 // Calculate aligned position for user data
365 auto* block_start = reinterpret_cast<uint8_t*>(best_block);
366 uint8_t* header_ptr = block_start;
367 padding = CalculatePaddingWithHeader(header_ptr, alignment, header_size);
368 uint8_t* aligned_data = block_start + padding;
369
370 total_size = padding + size;
371 const size_t remaining_size = best_block->size - total_size;
372
373 // Remove block from free list
374 if (prev_block != nullptr) {
375 prev_block->next = best_block->next;
376 } else {
377 free_list_head_ = best_block->next;
378 }
379 free_block_count_.fetch_sub(1, std::memory_order_relaxed);
380
381 // If there's enough space left, create a new free block
382 constexpr size_t min_free_block_size = sizeof(FreeBlockHeader) + 16;
383 if (remaining_size >= min_free_block_size) {
384 auto* new_free_block = reinterpret_cast<FreeBlockHeader*>(block_start + total_size);
385 new_free_block->size = remaining_size;
386 new_free_block->next = free_list_head_;
387 free_list_head_ = new_free_block;
388 free_block_count_.fetch_add(1, std::memory_order_relaxed);
389 }
390
391 // Write allocation header
392 auto* alloc_header = reinterpret_cast<AllocationHeader*>(aligned_data - header_size);
393 alloc_header->size = total_size;
394 alloc_header->padding = padding;
395
396 result = {.ptr = aligned_data, .allocated_size = size};
397 }
398
399 // Update stats
400 const size_t new_used = used_memory_.fetch_add(total_size, std::memory_order_relaxed) + total_size;
401 size_t current_peak = peak_usage_.load(std::memory_order_acquire);
402 while (new_used > current_peak) {
403 if (peak_usage_.compare_exchange_weak(current_peak, new_used, std::memory_order_release,
404 std::memory_order_acquire)) {
405 break;
406 }
407 }
408
409 allocation_count_.fetch_add(1, std::memory_order_relaxed);
410 total_allocations_.fetch_add(1, std::memory_order_relaxed);
411 alignment_waste_.fetch_add(padding - header_size, std::memory_order_relaxed);
412
413 return result;
414}
415
416inline void FreeListAllocator::Deallocate(void* ptr) noexcept {
417 if (ptr == nullptr) [[unlikely]] {
418 return;
419 }
420
421 HELIOS_ASSERT(Owns(ptr), "Failed to deallocate memory: ptr does not belong to this allocator!");
422
423 // Get allocation header
424 auto* alloc_header = reinterpret_cast<AllocationHeader*>(static_cast<uint8_t*>(ptr) - sizeof(AllocationHeader));
425 const size_t block_size = alloc_header->size;
426
427 // Calculate block start (account for alignment padding)
428 auto* block_start = reinterpret_cast<uint8_t*>(ptr) - alloc_header->padding;
429
430 // Create new free block
431 auto* free_block = reinterpret_cast<FreeBlockHeader*>(block_start);
432 free_block->size = block_size;
433 free_block_count_.fetch_add(1, std::memory_order_relaxed);
434
435 // Update stats
436 used_memory_.fetch_sub(block_size, std::memory_order_relaxed);
437 allocation_count_.fetch_sub(1, std::memory_order_relaxed);
438 total_deallocations_.fetch_add(1, std::memory_order_relaxed);
439
440 {
441 const std::scoped_lock lock(mutex_);
442 free_block->next = free_list_head_;
443 free_list_head_ = free_block;
444
445 // Coalesce adjacent free blocks
446 CoalesceAdjacent(free_block);
447 }
448}
449
450inline void FreeListAllocator::Reset() noexcept {
451 {
452 const std::scoped_lock lock(mutex_);
453 InitializeFreeList();
454 }
455
456 used_memory_.store(0, std::memory_order_release);
457 allocation_count_.store(0, std::memory_order_release);
458 alignment_waste_.store(0, std::memory_order_release);
459 // Don't reset peak stats
460}
461
463 const size_t used = used_memory_.load(std::memory_order_acquire);
464 const size_t peak = peak_usage_.load(std::memory_order_acquire);
465 const size_t alloc_count = allocation_count_.load(std::memory_order_acquire);
466 const size_t total_allocs = total_allocations_.load(std::memory_order_acquire);
467 const size_t total_deallocs = total_deallocations_.load(std::memory_order_acquire);
468 const size_t waste = alignment_waste_.load(std::memory_order_acquire);
469
470 return {
471 .total_allocated = used,
472 .total_freed = capacity_ - used,
473 .peak_usage = peak,
474 .allocation_count = alloc_count,
475 .total_allocations = total_allocs,
476 .total_deallocations = total_deallocs,
477 .alignment_waste = waste,
478 };
479}
480
481inline bool FreeListAllocator::Owns(const void* ptr) const noexcept {
482 if (ptr == nullptr || buffer_ == nullptr) {
483 return false;
484 }
485
486 const auto addr = reinterpret_cast<uintptr_t>(ptr);
487 const auto start = reinterpret_cast<uintptr_t>(buffer_);
488 const auto end = start + capacity_;
489
490 return addr >= start && addr < end;
491}
492
493inline auto FreeListAllocator::FindBestFit(size_t size, FreeBlockHeader** prev_block) noexcept -> FreeBlockHeader* {
494 FreeBlockHeader* best_fit = nullptr;
495 FreeBlockHeader* best_fit_prev = nullptr;
496 size_t smallest_diff = SIZE_MAX;
497
498 FreeBlockHeader* current = free_list_head_;
499 FreeBlockHeader* prev = nullptr;
500
501 while (current != nullptr) {
502 if (current->size >= size) {
503 const size_t diff = current->size - size;
504 if (diff < smallest_diff) {
505 smallest_diff = diff;
506 best_fit = current;
507 best_fit_prev = prev;
508
509 // Perfect fit found
510 if (diff == 0) {
511 break;
512 }
513 }
514 }
515 prev = current;
516 current = current->next;
517 }
518
519 if (prev_block != nullptr) {
520 *prev_block = best_fit_prev;
521 }
522
523 return best_fit;
524}
525
526inline void FreeListAllocator::CoalesceAdjacent(FreeBlockHeader* block) noexcept {
527 if (block == nullptr) [[unlikely]] {
528 return;
529 }
530
531 // Coalesce adjacent blocks in a single pass
532 // We'll track if block gets merged into another block
533 bool block_merged = false;
534 auto* block_start = reinterpret_cast<uint8_t*>(block);
535 uint8_t* block_end = block_start + block->size;
536
537 // First pass: try to merge blocks that come after 'block' in memory
538 FreeBlockHeader* current = free_list_head_;
539 FreeBlockHeader* prev = nullptr;
540
541 while (current != nullptr) {
542 if (current == block) {
543 prev = current;
544 current = current->next;
545 continue;
546 }
547
548 auto* current_start = reinterpret_cast<uint8_t*>(current);
549
550 // Check if current block is immediately after our block
551 if (current_start == block_end) {
552 // Merge current into block
553 block->size += current->size;
554 block_end = block_start + block->size;
555
556 // Remove current from free list
557 if (prev != nullptr) {
558 prev->next = current->next;
559 } else {
560 free_list_head_ = current->next;
561 }
562 free_block_count_.fetch_sub(1, std::memory_order_relaxed);
563
564 // Continue from the same prev position
565 current = (prev != nullptr) ? prev->next : free_list_head_;
566 continue;
567 }
568
569 prev = current;
570 current = current->next;
571 }
572
573 // Second pass: check if our block should be merged into a block that comes before it
574 current = free_list_head_;
575 prev = nullptr;
576
577 while (current != nullptr) {
578 if (current == block) {
579 prev = current;
580 current = current->next;
581 continue;
582 }
583
584 auto* current_start = reinterpret_cast<uint8_t*>(current);
585 uint8_t* current_end = current_start + current->size;
586
587 // Check if our block is immediately after current block
588 if (current_end == block_start) {
589 // Merge block into current
590 current->size += block->size;
591
592 // Remove block from free list
593 FreeBlockHeader* scan = free_list_head_;
594 FreeBlockHeader* scan_prev = nullptr;
595 while (scan != nullptr) {
596 if (scan == block) {
597 if (scan_prev != nullptr) {
598 scan_prev->next = scan->next;
599 } else {
600 free_list_head_ = scan->next;
601 }
602 free_block_count_.fetch_sub(1, std::memory_order_relaxed);
603 block_merged = true;
604 break;
605 }
606 scan_prev = scan;
607 scan = scan->next;
608 }
609
610 // If we merged block into current, we're done
611 if (block_merged) {
612 return;
613 }
614 }
615
616 prev = current;
617 current = current->next;
618 }
619}
620
621inline void FreeListAllocator::InitializeFreeList() noexcept {
622 // Create a single large free block encompassing the entire buffer
623 free_list_head_ = static_cast<FreeBlockHeader*>(buffer_);
624 free_list_head_->size = capacity_;
625 free_list_head_->next = nullptr;
626 free_block_count_.store(1, std::memory_order_release);
627}
628
629template <typename T>
630inline T* FreeListAllocator::Allocate() noexcept {
631 constexpr size_t size = sizeof(T);
632 constexpr size_t alignment = std::max(alignof(T), kMinAlignment);
633 auto result = Allocate(size, alignment);
634 return static_cast<T*>(result.ptr);
635}
636
637template <typename T>
638inline T* FreeListAllocator::Allocate(size_t count) noexcept {
639 if (count == 0) [[unlikely]] {
640 return nullptr;
641 }
642 constexpr size_t alignment = std::max(alignof(T), kMinAlignment);
643 const size_t size = sizeof(T) * count;
644 auto result = Allocate(size, alignment);
645 return static_cast<T*>(result.ptr);
646}
647
648template <typename T, typename... Args>
649 requires std::constructible_from<T, Args...>
650inline T* FreeListAllocator::AllocateAndConstruct(Args&&... args) noexcept(
651 std::is_nothrow_constructible_v<T, Args...>) {
652 T* ptr = Allocate<T>();
653 if (ptr != nullptr) [[likely]] {
654 std::construct_at(ptr, std::forward<Args>(args)...);
655 }
656 return ptr;
657}
658
659template <typename T>
660 requires std::default_initializable<T>
661inline T* FreeListAllocator::AllocateAndConstructArray(size_t count) noexcept(
662 std::is_nothrow_default_constructible_v<T>) {
663 T* ptr = Allocate<T>(count);
664 if (ptr != nullptr) [[likely]] {
665 for (size_t i = 0; i < count; ++i) {
666 std::construct_at(ptr + i);
667 }
668 }
669 return ptr;
670}
671
672} // namespace helios::memory
#define HELIOS_ASSERT(condition,...)
Assertion macro that aborts execution in debug builds.
Definition assert.hpp:140
#define HELIOS_VERIFY(condition,...)
Verify macro that always checks the condition.
Definition assert.hpp:196
Free list allocator with best-fit strategy.
FreeListAllocator & operator=(const FreeListAllocator &)=delete
size_t UsedMemory() const noexcept
Gets the amount of allocated memory.
size_t FreeMemory() const noexcept
Gets the amount of free memory.
FreeListAllocator(size_t capacity)
Constructs a free list allocator with specified capacity.
FreeListAllocator(const FreeListAllocator &)=delete
AllocatorStats Stats() const noexcept
Gets current allocator statistics.
void Deallocate(void *ptr) noexcept
Deallocates memory.
size_t Capacity() const noexcept
Gets the total capacity of the allocator.
bool Empty() const noexcept
Checks if the allocator is empty (all memory free).
T * AllocateAndConstruct(Args &&... args) noexcept(std::is_nothrow_constructible_v< T, Args... >)
size_t AllocationCount() const noexcept
Gets the number of active allocations.
bool Owns(const void *ptr) const noexcept
Checks if a pointer belongs to this allocator.
void Reset() noexcept
Resets the allocator, freeing all allocations.
size_t FreeBlockCount() const noexcept
Gets the number of free blocks in the free list.
T * AllocateAndConstructArray(size_t count) noexcept(std::is_nothrow_default_constructible_v< T >)
constexpr size_t kDefaultAlignment
Default alignment for allocations (cache line size for most modern CPUs).
void * AlignedAlloc(size_t alignment, size_t size)
Allocate memory with the specified alignment.
Definition common.hpp:30
constexpr bool IsPowerOfTwo(size_t size) noexcept
Helper function to check if a size is a power of 2.
constexpr size_t kMinAlignment
Minimum alignment for any allocation.
void AlignedFree(void *ptr)
Free memory allocated with AlignedAlloc.
Definition common.hpp:53
constexpr T * Allocate(Alloc &allocator) noexcept
size_t CalculatePaddingWithHeader(const void *ptr, size_t alignment, size_t header_size) noexcept
Calculate padding with header for alignment.
STL namespace.
Result type for allocation operations.
void * ptr
Pointer to allocated memory, nullptr on failure.
Statistics for tracking allocator usage.