|
| 1 | +use crate::log; |
| 2 | +use core::alloc::GlobalAlloc; |
| 3 | +extern crate alloc; |
| 4 | + |
| 5 | +const MAX: usize = crate::config::STATIC_ALLOCATOR_SIZE_BYTES; |
| 6 | + |
| 7 | +#[global_allocator] |
| 8 | +static GLOBAL_STATIC_ALLOCATOR: StaticAllocator<MAX> = StaticAllocator::new(); |
| 9 | + |
| 10 | +struct StaticAllocator<const SIZE: usize> { |
| 11 | + buffer: [u8; SIZE], |
| 12 | + bump_addr: spin::Mutex<usize>, |
| 13 | +} |
| 14 | + |
| 15 | +impl<const SIZE: usize> StaticAllocator<SIZE> { |
| 16 | + const fn new() -> Self { |
| 17 | + Self { |
| 18 | + // just a data holder, not actually referenced but still needed |
| 19 | + buffer: [0; SIZE], |
| 20 | + // get properly inited in its `alloc` definition |
| 21 | + bump_addr: spin::Mutex::new(0), |
| 22 | + } |
| 23 | + } |
| 24 | +} |
| 25 | + |
| 26 | +unsafe impl<const SIZE: usize> GlobalAlloc for StaticAllocator<SIZE> { |
| 27 | + unsafe fn alloc(&self, layout: core::alloc::Layout) -> *mut u8 { |
| 28 | + // expect by rust docs |
| 29 | + if layout.size() == 0 { |
| 30 | + log!("ZERO!"); |
| 31 | + return core::ptr::null_mut(); |
| 32 | + } |
| 33 | + |
| 34 | + let mut bumplock = self.bump_addr.lock(); |
| 35 | + |
| 36 | + // kinda bad design, but its late and works so idc |
| 37 | + if *bumplock == 0 { |
| 38 | + *bumplock = self.buffer.as_ptr() as usize; |
| 39 | + } |
| 40 | + |
| 41 | + // we take the first free chunk, or a bit more for it to be aligned |
| 42 | + let first_start = *bumplock; |
| 43 | + let aligned_start = if first_start % layout.align() > 0 { |
| 44 | + first_start + layout.align() - (first_start % layout.align()) |
| 45 | + } else { |
| 46 | + first_start |
| 47 | + }; |
| 48 | + // if the returned chunk is greater than last address of buffer, we ran out of memory :( |
| 49 | + if aligned_start + layout.size() >= self.buffer.as_ptr() as usize + self.buffer.len() { |
| 50 | + log!("NO SPACE :(!"); |
| 51 | + return core::ptr::null_mut(); |
| 52 | + } |
| 53 | + // upadte that ting |
| 54 | + *bumplock = aligned_start + layout.size(); |
| 55 | + |
| 56 | + aligned_start as *mut u8 |
| 57 | + } |
| 58 | + // we dont free this lmao |
| 59 | + unsafe fn dealloc(&self, ptr: *mut u8, layout: core::alloc::Layout) {} |
| 60 | +} |
0 commit comments