diff --git a/Cargo.lock b/Cargo.lock index fcc3919415..e3e8377683 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -859,6 +859,9 @@ name = "talc" version = "2.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "01bb0495aaae3e2a1ae07e91583476607f09275f6572d326a5a13796f05098d0" +dependencies = [ + "lock_api", +] [[package]] name = "tempfile" diff --git a/Cargo.toml b/Cargo.toml index b212466a67..9792f82126 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -89,7 +89,7 @@ num = { version = "0.4", default-features = false } num-traits = { version = "0.2", default-features = false } num-derive = "0.4" zerocopy = "0.6" -talc = { version = "2", default-features = false } +talc = { version = "2" } time = { version = "0.3", default-features = false } pci_types = { version = "0.5" } diff --git a/src/lib.rs b/src/lib.rs index db6b1efe8d..6337eaa15d 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -114,9 +114,21 @@ fn trivial_test() { panic!("Test called"); } +#[cfg(target_os = "none")] +static mut ARENA: [u8; 0x2000] = [0; 0x2000]; + #[cfg(target_os = "none")] #[global_allocator] -static ALLOCATOR: LockedAllocator = LockedAllocator::empty(); +static mut ALLOCATOR: LockedAllocator = LockedAllocator( + talc::Talc::new(unsafe { + // if we're in a hosted environment, the Rust runtime may allocate before + // main() is called, so we need to initialize the arena automatically + talc::InitOnOom::new(talc::Span::from_slice( + ARENA.as_slice() as *const [u8] as *mut [u8] + )) + }) + .lock(), +); /// Interface to allocate memory from system heap /// diff --git a/src/mm/allocator.rs b/src/mm/allocator.rs new file mode 100644 index 0000000000..7c6cf094c4 --- /dev/null +++ b/src/mm/allocator.rs @@ -0,0 +1,70 @@ +//! Implementation of the HermitCore Allocator for dynamically allocating heap memory +//! in the kernel. + +use core::alloc::{GlobalAlloc, Layout}; + +use align_address::Align; +use hermit_sync::RawInterruptTicketMutex; +use talc::{InitOnOom, Span, Talck}; + +use crate::HW_DESTRUCTIVE_INTERFERENCE_SIZE; + +pub struct LockedAllocator(pub Talck); + +impl LockedAllocator { + #[inline] + fn align_layout(layout: Layout) -> Layout { + let size = layout.size().align_up(HW_DESTRUCTIVE_INTERFERENCE_SIZE); + let align = layout.align().max(HW_DESTRUCTIVE_INTERFERENCE_SIZE); + Layout::from_size_align(size, align).unwrap() + } + + pub unsafe fn init(&self, heap_bottom: *mut u8, heap_size: usize) { + let arena = Span::from_base_size(heap_bottom, heap_size); + unsafe { + self.0.talc().init(arena); + } + } +} + +/// To avoid false sharing, the global memory allocator align +/// all requests to a cache line. +unsafe impl GlobalAlloc for LockedAllocator { + unsafe fn alloc(&self, layout: Layout) -> *mut u8 { + let layout = Self::align_layout(layout); + unsafe { self.0.alloc(layout) } + } + + unsafe fn dealloc(&self, ptr: *mut u8, layout: Layout) { + let layout = Self::align_layout(layout); + unsafe { self.0.dealloc(ptr, layout) } + } +} + +#[cfg(all(test, not(target_os = "none")))] +mod tests { + use core::mem; + + use super::*; + + #[test] + fn empty() { + let mut arena: [u8; 0x1000] = [0; 0x1000]; + let allocator: LockedAllocator = LockedAllocator( + talc::Talc::new(unsafe { + talc::InitOnOom::new(talc::Span::from_slice( + arena.as_slice() as *const [u8] as *mut [u8] + )) + }) + .lock(), + ); + + let layout = Layout::from_size_align(1, 1).unwrap(); + // we have 4 kbyte memory + assert!(unsafe { !allocator.alloc(layout.clone()).is_null() }); + + let layout = Layout::from_size_align(0x1000, mem::align_of::()).unwrap(); + let addr = unsafe { allocator.alloc(layout) }; + assert!(addr.is_null()); + } +} diff --git a/src/mm/allocator/bootstrap.rs b/src/mm/allocator/bootstrap.rs deleted file mode 100644 index 705366b8b0..0000000000 --- a/src/mm/allocator/bootstrap.rs +++ /dev/null @@ -1,99 +0,0 @@ -//! A bootstrap allocator based on a statically allocated buffer. - -/// A pointer range that can only be compared against. -mod ptr_range { - use core::ops::Range; - use core::ptr::NonNull; - - /// A pointer range that can only be compared against. - pub struct PtrRange { - inner: Range>, - } - - // SAFETY: We never dereference, but only compare, pointers. - unsafe impl Send for PtrRange {} - unsafe impl Sync for PtrRange {} - - impl PtrRange { - /// Returns `true` if the pointer range contains `ptr`. - pub fn contains(&self, ptr: NonNull) -> bool { - self.inner.contains(&ptr) - } - } - - impl From>> for PtrRange { - fn from(value: Range>) -> Self { - Self { inner: value } - } - } -} - -use core::alloc::{AllocError, Allocator, Layout}; -use core::mem::MaybeUninit; -use core::ops::Range; -use core::ptr::NonNull; - -use hermit_sync::ExclusiveCell; - -use self::ptr_range::PtrRange; - -/// A bootstrap allocator. -/// -/// This allocator is generic over the internal allocator and can only be created once. -/// The bootstrap allocator provides the internal allocator with static memory. -/// -/// This allocator tracks, which static memory it was using initially. -/// It can be queried whether a pointer belongs to it. -pub struct BootstrapAllocator { - ptr_range: PtrRange, - allocator: A, -} - -impl Default for BootstrapAllocator -where - A: From<&'static mut [MaybeUninit]>, -{ - fn default() -> Self { - let mem = { - const SIZE: usize = 4 * 1024; - const BYTE: MaybeUninit = MaybeUninit::uninit(); - /// The actual memory of the boostrap allocator. - static MEM: ExclusiveCell<[MaybeUninit; SIZE]> = ExclusiveCell::new([BYTE; SIZE]); - MEM.take().unwrap() - }; - - let ptr_range = { - let Range { start, end } = mem.as_mut_ptr_range(); - let start = NonNull::new(start).unwrap().cast::(); - let end = NonNull::new(end).unwrap().cast::(); - PtrRange::from(start..end) - }; - let allocator = A::from(mem); - - Self { - ptr_range, - allocator, - } - } -} - -impl BootstrapAllocator { - /// Returns `true` if the pointer belonged to the static memory of this allocator. - pub fn manages(&self, ptr: NonNull) -> bool { - self.ptr_range.contains(ptr) - } -} - -unsafe impl Allocator for BootstrapAllocator -where - A: Allocator, -{ - fn allocate(&self, layout: Layout) -> Result, AllocError> { - self.allocator.allocate(layout) - } - - unsafe fn deallocate(&self, ptr: NonNull, layout: Layout) { - debug_assert!(self.manages(ptr)); - unsafe { self.allocator.deallocate(ptr, layout) } - } -} diff --git a/src/mm/allocator/bump.rs b/src/mm/allocator/bump.rs deleted file mode 100644 index 6c5e38f9c7..0000000000 --- a/src/mm/allocator/bump.rs +++ /dev/null @@ -1,48 +0,0 @@ -//! A bump allocator. -//! -//! This is a simple allocator design which can only allocate and not deallocate. - -use core::alloc::{AllocError, Allocator, Layout}; -use core::cell::Cell; -use core::mem::MaybeUninit; -use core::ptr::NonNull; - -/// A simple, `!Sync` implementation of a bump allocator. -/// -/// This allocator manages the provided memory. -pub struct BumpAllocator { - mem: Cell<&'static mut [MaybeUninit]>, -} - -unsafe impl Allocator for BumpAllocator { - fn allocate(&self, layout: Layout) -> Result, AllocError> { - let ptr: *mut [MaybeUninit] = self.allocate_slice(layout)?; - Ok(NonNull::new(ptr as *mut [u8]).unwrap()) - } - - unsafe fn deallocate(&self, _ptr: NonNull, _layout: Layout) {} -} - -impl BumpAllocator { - fn allocate_slice(&self, layout: Layout) -> Result<&'static mut [MaybeUninit], AllocError> { - let mem = self.mem.take(); - let align_offset = mem.as_ptr().align_offset(layout.align()); - let mid = layout.size() + align_offset; - if mid > mem.len() { - self.mem.set(mem); - Err(AllocError) - } else { - let (alloc, remaining) = mem.split_at_mut(mid); - self.mem.set(remaining); - Ok(&mut alloc[align_offset..]) - } - } -} - -impl From<&'static mut [MaybeUninit]> for BumpAllocator { - fn from(mem: &'static mut [MaybeUninit]) -> Self { - Self { - mem: Cell::new(mem), - } - } -} diff --git a/src/mm/allocator/mod.rs b/src/mm/allocator/mod.rs deleted file mode 100644 index a9c3b9f344..0000000000 --- a/src/mm/allocator/mod.rs +++ /dev/null @@ -1,142 +0,0 @@ -//! Implementation of the HermitCore Allocator for dynamically allocating heap memory -//! in the kernel. - -mod bootstrap; -mod bump; - -use core::alloc::{AllocError, Allocator, GlobalAlloc, Layout}; -use core::ptr; -use core::ptr::NonNull; - -use align_address::Align; -use hermit_sync::InterruptTicketMutex; -use talc::{ErrOnOom, Span, Talc}; - -use self::bootstrap::BootstrapAllocator; -use self::bump::BumpAllocator; -use crate::HW_DESTRUCTIVE_INTERFERENCE_SIZE; - -/// The global system allocator for Hermit. -struct GlobalAllocator { - /// The bootstrap allocator, which is available immediately. - /// - /// It allows allocations before the heap has been initalized. - bootstrap_allocator: Option>, - - /// The heap allocator. - /// - /// This is not available immediately and must be initialized ([`Self::init`]). - heap: Option>, -} - -impl GlobalAllocator { - const fn empty() -> Self { - Self { - bootstrap_allocator: None, - heap: None, - } - } - - /// Initializes the heap allocator. - /// - /// # Safety - /// - /// The memory starting from `heap_bottom` with a size of `heap_size` - /// must be valid and ready to be managed and allocated from. - unsafe fn init(&mut self, heap_bottom: *mut u8, heap_size: usize) { - self.heap = unsafe { - Some(Talc::with_arena( - ErrOnOom, - Span::from_base_size(heap_bottom, heap_size), - )) - } - } - - fn align_layout(layout: Layout) -> Layout { - let size = layout.size().align_up(HW_DESTRUCTIVE_INTERFERENCE_SIZE); - let align = layout.align().max(HW_DESTRUCTIVE_INTERFERENCE_SIZE); - Layout::from_size_align(size, align).unwrap() - } - - fn allocate(&mut self, layout: Layout) -> Result, AllocError> { - let layout = Self::align_layout(layout); - match &mut self.heap { - Some(heap) => unsafe { heap.malloc(layout).map_err(|_| AllocError) }, - None => self - .bootstrap_allocator - .get_or_insert_with(Default::default) - .allocate(layout) - // FIXME: Use NonNull::as_mut_ptr once `slice_ptr_get` is stabilized - // https://github.com/rust-lang/rust/issues/74265 - .map(|ptr| NonNull::new(ptr.as_ptr() as *mut u8).unwrap()), - } - } - - unsafe fn deallocate(&mut self, ptr: NonNull, layout: Layout) { - let layout = Self::align_layout(layout); - let bootstrap_allocator = self.bootstrap_allocator.as_ref().unwrap(); - if bootstrap_allocator.manages(ptr) { - unsafe { - bootstrap_allocator.deallocate(ptr, layout); - } - } else { - unsafe { - self.heap.as_mut().unwrap().free(ptr, layout); - } - } - } -} - -pub struct LockedAllocator(InterruptTicketMutex); - -impl LockedAllocator { - /// Creates an empty allocator. All allocate calls will return `None`. - pub const fn empty() -> LockedAllocator { - LockedAllocator(InterruptTicketMutex::new(GlobalAllocator::empty())) - } - - pub unsafe fn init(&self, heap_bottom: *mut u8, heap_size: usize) { - unsafe { - self.0.lock().init(heap_bottom, heap_size); - } - } -} - -/// To avoid false sharing, the global memory allocator align -/// all requests to a cache line. -unsafe impl GlobalAlloc for LockedAllocator { - unsafe fn alloc(&self, layout: Layout) -> *mut u8 { - self.0 - .lock() - .allocate(layout) - .ok() - .map_or(ptr::null_mut(), |allocation| allocation.as_ptr()) - } - - unsafe fn dealloc(&self, ptr: *mut u8, layout: Layout) { - unsafe { - self.0 - .lock() - .deallocate(NonNull::new_unchecked(ptr), layout) - } - } -} - -#[cfg(all(test, not(target_os = "none")))] -mod tests { - use core::mem; - - use super::*; - - #[test] - fn empty() { - let mut allocator = GlobalAllocator::empty(); - let layout = Layout::from_size_align(1, 1).unwrap(); - // we have 4 kbyte static memory - assert!(allocator.allocate(layout.clone()).is_ok()); - - let layout = Layout::from_size_align(0x1000, mem::align_of::()); - let addr = allocator.allocate(layout.unwrap()); - assert!(addr.is_err()); - } -}