diff --git a/lib/libyalloc/.gitignore b/lib/libyalloc/.gitignore new file mode 100644 index 00000000..ea8c4bf7 --- /dev/null +++ b/lib/libyalloc/.gitignore @@ -0,0 +1 @@ +/target diff --git a/lib/libyalloc/Cargo.lock b/lib/libyalloc/Cargo.lock new file mode 100644 index 00000000..0e6d1990 --- /dev/null +++ b/lib/libyalloc/Cargo.lock @@ -0,0 +1,62 @@ +# This file is automatically @generated by Cargo. +# It is not intended for manual editing. +version = 3 + +[[package]] +name = "compiler_builtins" +version = "0.1.105" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3686cc48897ce1950aa70fd595bd2dc9f767a3c4cca4cd17b2cb52a2d37e6eb4" +dependencies = [ + "rustc-std-workspace-core", +] + +[[package]] +name = "libc" +version = "0.2.151" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "302d7ab3130588088d277783b1e2d2e10c9e9e4a16dd9050e6ec93fb3e7048f4" +dependencies = [ + "rustc-std-workspace-core", +] + +[[package]] +name = "libyalloc" +version = "0.1.0" +dependencies = [ + "libc", + "rustc-std-workspace-core", + "yggdrasil-rt", +] + +[[package]] +name = "rustc-std-workspace-alloc" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ff66d57013a5686e1917ed6a025d54dd591fcda71a41fe07edf4d16726aefa86" + +[[package]] +name = "rustc-std-workspace-core" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1956f5517128a2b6f23ab2dadf1a976f4f5b27962e7724c2bf3d45e539ec098c" + +[[package]] +name = "yggdrasil-abi" +version = "0.1.0" +source = "git+https://git.alnyan.me/yggdrasil/yggdrasil-abi.git#449da18378f3340bb3aa06f00c1f99c79835e18f" +dependencies = [ + "compiler_builtins", + "rustc-std-workspace-core", +] + +[[package]] +name = "yggdrasil-rt" +version = "0.1.0" +source = "git+https://git.alnyan.me/yggdrasil/yggdrasil-rt.git#54029c01f14902b11b079fa709f87ab4e4951911" +dependencies = [ + "compiler_builtins", + "rustc-std-workspace-alloc", + "rustc-std-workspace-core", + "yggdrasil-abi", +] diff --git a/lib/libyalloc/Cargo.toml b/lib/libyalloc/Cargo.toml new file mode 100644 index 00000000..3570285b --- /dev/null +++ b/lib/libyalloc/Cargo.toml @@ -0,0 +1,27 @@ +[package] +name = "libyalloc" +version = "0.1.0" +edition = "2021" +authors = ["Mark Poliakov "] + +# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html + +[dependencies] +core = { version = "1.0.0", optional = true, package = "rustc-std-workspace-core" } +compiler_builtins = { version = "0.1", optional = true } + + +[target.'cfg(unix)'.dependencies] +libc = { version = "0.2.140", default-features = false } +[target.'cfg(not(unix))'.dependencies] +yggdrasil-rt = { git = "https://git.alnyan.me/yggdrasil/yggdrasil-rt.git", default-features = false } + +[features] +default = [] +rustc-dep-of-std = [ + "core", + "compiler_builtins", + "compiler_builtins/rustc-dep-of-std", + "libc/rustc-dep-of-std", + "yggdrasil-rt/rustc-dep-of-std" +] diff --git a/lib/libyalloc/src/allocator.rs b/lib/libyalloc/src/allocator.rs new file mode 100644 index 00000000..ff2878a5 --- /dev/null +++ b/lib/libyalloc/src/allocator.rs @@ -0,0 +1,233 @@ +use core::{alloc::Layout, ops::Index, ptr::NonNull}; + +use crate::{ + bucket::Bucket, + util::{self, Assert, IsTrue}, +}; + +struct BucketList +where + [u64; M / 64]: Sized, + Assert<{ M % 64 == 0 }>: IsTrue, +{ + head: Option>>, +} + +pub struct BucketAllocator { + // 1024x64 = 16 pages + buckets_1024: BucketList<1024, 64>, + // 512x64 = 8 pages + buckets_512: BucketList<512, 64>, + // 256x128 = 8 pages + buckets_256: BucketList<256, 128>, + // 128x128 = 4 pages + buckets_128: BucketList<128, 128>, + // 64x128 = 2 pages + buckets_64: BucketList<128, 128>, + // 32x256 = 2 pages + buckets_32: BucketList<128, 128>, +} + +impl BucketList +where + [u64; M / 64]: Sized, + Assert<{ M % 64 == 0 }>: IsTrue, +{ + const fn new() -> Self { + Self { head: None } + } + + fn allocate(&mut self) -> Option> { + let mut node = self.head; + while let Some(mut bucket) = node { + let bucket = unsafe { bucket.as_mut() }; + + if let Some(ptr) = bucket.allocate() { + return Some(ptr); + } + + node = bucket.next; + } + + // No usable bucket found + let mut node = Bucket::new()?; + let bucket = unsafe { node.as_mut() }; + bucket.next = self.head; + self.head = Some(node); + + bucket.allocate() + } + + unsafe fn free(&mut self, ptr: NonNull) { + let mut node = self.head; + while let Some(mut bucket) = node { + let bucket = bucket.as_mut(); + + if let (true, _last) = bucket.free(ptr) { + // TODO free the node if last? + return; + } + + node = bucket.next; + } + + panic!("Possible double free detected: pointer {:p} from bucket list {}x{}B, no corresponding bucket found", ptr, N, M); + } +} + +impl Index for BucketList +where + [u64; M / 64]: Sized, + Assert<{ M % 64 == 0 }>: IsTrue, +{ + type Output = Bucket; + + fn index(&self, index: usize) -> &Self::Output { + let mut current = 0; + let mut node = self.head; + while let Some(bucket) = node { + let bucket = unsafe { bucket.as_ref() }; + if current == index { + return bucket; + } + current += 1; + node = bucket.next; + } + panic!( + "BucketList index out of range: contains {} buckets, tried to index {}", + current, index + ); + } +} + +impl BucketAllocator { + pub const fn new() -> Self { + Self { + buckets_1024: BucketList::new(), + buckets_512: BucketList::new(), + buckets_256: BucketList::new(), + buckets_128: BucketList::new(), + buckets_64: BucketList::new(), + buckets_32: BucketList::new(), + } + } + + pub fn allocate(&mut self, layout: Layout) -> Option> { + let aligned = layout.pad_to_align(); + + match aligned.size() { + 0 => todo!(), + ..=32 => self.buckets_32.allocate(), + ..=64 => self.buckets_64.allocate(), + ..=128 => self.buckets_128.allocate(), + ..=256 => self.buckets_256.allocate(), + ..=512 => self.buckets_512.allocate(), + ..=1024 => self.buckets_1024.allocate(), + size => util::map_pages((size + util::PAGE_SIZE - 1) / util::PAGE_SIZE), + } + } + + pub unsafe fn free(&mut self, ptr: NonNull, layout: Layout) { + let aligned = layout.pad_to_align(); + + match aligned.size() { + 0 => todo!(), + ..=32 => self.buckets_32.free(ptr), + ..=64 => self.buckets_64.free(ptr), + ..=128 => self.buckets_128.free(ptr), + ..=256 => self.buckets_256.free(ptr), + ..=512 => self.buckets_512.free(ptr), + ..=1024 => self.buckets_1024.free(ptr), + size => { + assert_eq!(usize::from(ptr.addr()) % util::PAGE_SIZE, 0); + util::unmap_pages(ptr, (size + util::PAGE_SIZE - 1) / util::PAGE_SIZE); + } + } + } +} + +#[cfg(test)] +mod tests { + use core::{alloc::Layout, ptr::NonNull}; + + use super::{BucketAllocator, BucketList}; + + #[test] + fn single_list_allocation() { + let mut list = BucketList::<32, 64>::new(); + let mut vec = vec![]; + + for _ in 0..4 * 64 + 3 { + let ptr = list.allocate().unwrap(); + vec.push(ptr); + } + + for ptr in vec { + unsafe { + list.free(ptr); + } + } + } + + #[test] + fn multi_list_allocation() { + const SIZES: &[usize] = &[1, 3, 7, 15, 16, 24, 33, 65, 126, 255, 500, 1000]; + + let mut allocator = BucketAllocator::new(); + let mut vec = vec![]; + + for _ in 0..65 { + for &size in SIZES { + let layout = Layout::from_size_align(size, 16).unwrap(); + let ptr = allocator.allocate(layout).unwrap(); + assert_eq!(usize::from(ptr.addr()) % 16, 0); + let mut slice = NonNull::slice_from_raw_parts(ptr, size); + unsafe { + slice.as_mut().fill(123); + } + vec.push((ptr, layout)); + } + } + + for (ptr, layout) in vec { + unsafe { + allocator.free(ptr, layout); + } + } + } + + #[test] + #[should_panic] + fn double_free() { + let mut allocator = BucketAllocator::new(); + let layout = Layout::from_size_align(63, 32).unwrap(); + let ptr = allocator.allocate(layout).unwrap(); + + unsafe { + allocator.free(ptr, layout); + allocator.free(ptr, layout); + } + } + + #[test] + fn large_alloc() { + const SIZES: &[usize] = &[2000, 2048, 4000, 4096, 8192]; + + let mut allocator = BucketAllocator::new(); + let mut vec = vec![]; + + for &size in SIZES { + let layout = Layout::from_size_align(size, 32).unwrap(); + let ptr = allocator.allocate(layout).unwrap(); + vec.push((ptr, layout)); + } + + for (ptr, layout) in vec { + assert_eq!(usize::from(ptr.addr()) % 0x1000, 0); + + unsafe { + allocator.free(ptr, layout); + } + } + } +} diff --git a/lib/libyalloc/src/bucket.rs b/lib/libyalloc/src/bucket.rs new file mode 100644 index 00000000..3b0bc1a1 --- /dev/null +++ b/lib/libyalloc/src/bucket.rs @@ -0,0 +1,160 @@ +use core::{ + mem::{size_of, MaybeUninit}, + ptr::NonNull, +}; + +use crate::util::{self, Assert, IsTrue, NonNullExt}; + +pub struct Bucket +where + [u64; M / 64]: Sized, + Assert<{ M % 64 == 0 }>: IsTrue, +{ + pub(crate) data: NonNull, + bitmap: [u64; M / 64], + allocated_count: usize, + pub(crate) next: Option>>, +} + +impl Bucket +where + [u64; M / 64]: Sized, + Assert<{ M % 64 == 0 }>: IsTrue, +{ + pub fn new() -> Option> { + let data_page_count = (M * N + 0xFFF) / 0x1000; + let info_page_count = (size_of::() + 0xFFF) / 0x1000; + + let data = util::map_pages(data_page_count)?; + let info = util::map_pages(info_page_count)?; + + let bucket = unsafe { info.cast::>().as_mut() }; + let bucket = bucket.write(Self { + data, + bitmap: [0; M / 64], + allocated_count: 0, + next: None, + }); + + Some(bucket.into()) + } + + pub fn allocate(&mut self) -> Option> { + for i in 0..self.bitmap.len() { + for j in 0..64 { + if self.bitmap[i] & (1 << j) != 0 { + continue; + } + + self.bitmap[i] |= 1 << j; + self.allocated_count += 1; + + return Some(unsafe { self.data.add_ext((i * 64 + j) * N) }); + } + } + + None + } + + pub fn free(&mut self, ptr: NonNull) -> (bool, bool) { + if ptr.addr() < self.data.addr() { + return (false, false); + } + let offset = (usize::from(ptr.addr()) - usize::from(self.data.addr())) / N; + if offset >= M { + return (false, false); + } + + let index = offset / 64; + let bit = offset % 64; + + if self.bitmap[index] & (1 << bit) == 0 { + panic!( + "Possible double free detected: pointer {:p} from bucket {}x{}B, index {}:{}", + ptr, M, N, index, bit + ); + } + self.bitmap[index] &= !(1 << bit); + self.allocated_count -= 1; + + (true, self.allocated_count == 0) + } +} + +#[cfg(test)] +mod tests { + use core::ptr::NonNull; + + use crate::{bucket::Bucket, util::NonNullExt}; + + #[test] + fn bucket_creation() { + let mut bucket = Bucket::<32, 64>::new().unwrap(); + let bucket = unsafe { bucket.as_mut() }; + assert_eq!(bucket.allocated_count, 0); + assert_eq!(bucket.next, None); + } + + #[test] + fn bucket_allocation() { + let mut bucket = Bucket::<32, 64>::new().unwrap(); + let bucket = unsafe { bucket.as_mut() }; + let mut vec = vec![]; + + let mut index = 0; + loop { + if let Some(ptr) = bucket.allocate() { + if index == bucket.bitmap.len() * 64 { + panic!("WTF"); + } + vec.push(ptr); + } else { + break; + } + index += 1; + } + assert_eq!(bucket.allocated_count, index); + assert_eq!(index, bucket.bitmap.len() * 64); + + for (i, item) in vec.into_iter().enumerate() { + assert_eq!(item, unsafe { bucket.data.add_ext(i * 32) }); + let last = i == bucket.bitmap.len() * 64 - 1; + + { + let mut slice = NonNull::slice_from_raw_parts(item, 32); + let slice = unsafe { slice.as_mut() }; + slice.fill(123); + } + + assert_eq!(bucket.free(item), (true, last)); + } + } + + #[test] + fn free_outside_of_bucket() { + let mut bucket0 = Bucket::<32, 64>::new().unwrap(); + let mut bucket1 = Bucket::<64, 64>::new().unwrap(); + let bucket0 = unsafe { bucket0.as_mut() }; + let bucket1 = unsafe { bucket1.as_mut() }; + + let ptr0 = bucket0.allocate().unwrap(); + let ptr1 = bucket1.allocate().unwrap(); + + assert_eq!(bucket1.free(ptr0), (false, false)); + assert_eq!(bucket0.free(ptr1), (false, false)); + + assert_eq!(bucket0.free(ptr0), (true, true)); + assert_eq!(bucket1.free(ptr1), (true, true)); + } + + #[test] + #[should_panic] + fn double_free() { + let mut bucket0 = Bucket::<32, 64>::new().unwrap(); + let bucket0 = unsafe { bucket0.as_mut() }; + + let ptr = bucket0.allocate().unwrap(); + assert_eq!(bucket0.free(ptr), (false, false)); + bucket0.free(ptr); + } +} diff --git a/lib/libyalloc/src/global.rs b/lib/libyalloc/src/global.rs new file mode 100644 index 00000000..f5bb6bdb --- /dev/null +++ b/lib/libyalloc/src/global.rs @@ -0,0 +1,40 @@ +use core::{ + alloc::{AllocError, Allocator, GlobalAlloc, Layout}, + ptr::{null_mut, NonNull}, +}; + +use crate::{allocator::BucketAllocator, util::Spinlock}; + +pub struct GlobalAllocator; + +unsafe impl GlobalAlloc for GlobalAllocator { + #[inline] + unsafe fn alloc(&self, layout: Layout) -> *mut u8 { + let ptr = GLOBAL_ALLOCATOR.lock().allocate(layout); + + if let Some(ptr) = ptr { + ptr.as_ptr() + } else { + null_mut() + } + } + + #[inline] + unsafe fn dealloc(&self, ptr: *mut u8, layout: Layout) { + let ptr = NonNull::new(ptr).expect("Invalid pointer"); + GLOBAL_ALLOCATOR.lock().free(ptr, layout); + } +} + +unsafe impl Allocator for GlobalAllocator { + fn allocate(&self, layout: Layout) -> Result, AllocError> { + let ptr = GLOBAL_ALLOCATOR.lock().allocate(layout).ok_or(AllocError)?; + Ok(NonNull::slice_from_raw_parts(ptr, layout.size())) + } + + unsafe fn deallocate(&self, ptr: NonNull, layout: Layout) { + GLOBAL_ALLOCATOR.lock().free(ptr, layout); + } +} + +static GLOBAL_ALLOCATOR: Spinlock = Spinlock::new(BucketAllocator::new()); diff --git a/lib/libyalloc/src/lib.rs b/lib/libyalloc/src/lib.rs new file mode 100644 index 00000000..7eb56561 --- /dev/null +++ b/lib/libyalloc/src/lib.rs @@ -0,0 +1,20 @@ +#![feature( + generic_const_exprs, + arbitrary_self_types, + strict_provenance, + exposed_provenance, + let_chains, + test, + allocator_api +)] +#![cfg_attr(not(test), no_std)] +#![allow(incomplete_features)] +#![deny(fuzzy_provenance_casts, lossy_provenance_casts)] + +#[cfg(test)] +extern crate test; + +pub mod allocator; +mod bucket; +pub mod global; +mod util; diff --git a/lib/libyalloc/src/util.rs b/lib/libyalloc/src/util.rs new file mode 100644 index 00000000..3e97c1b3 --- /dev/null +++ b/lib/libyalloc/src/util.rs @@ -0,0 +1,123 @@ +use core::{ + cell::UnsafeCell, + ops::{Deref, DerefMut}, + ptr::NonNull, + sync::atomic::{AtomicBool, Ordering}, +}; + +pub const PAGE_SIZE: usize = 0x1000; + +pub enum Assert {} +pub trait IsTrue {} +impl IsTrue for Assert {} + +pub trait NonNullExt { + unsafe fn add_ext(self, offset: usize) -> Self; +} + +impl NonNullExt for NonNull { + unsafe fn add_ext(self, offset: usize) -> Self { + NonNull::new_unchecked(self.as_ptr().add(offset)) + } +} + +pub struct Spinlock { + state: AtomicBool, + data: UnsafeCell, +} + +pub struct SpinlockGuard<'a, T: ?Sized> { + lock: &'a Spinlock, +} + +pub fn map_pages(count: usize) -> Option> { + #[cfg(unix)] + { + use core::ptr::null_mut; + + let address = unsafe { + libc::mmap( + null_mut(), + count * PAGE_SIZE, + libc::PROT_READ | libc::PROT_WRITE, + libc::MAP_ANONYMOUS | libc::MAP_PRIVATE, + -1, + 0, + ) + }; + + NonNull::new(address as *mut u8) + } + + #[cfg(not(unix))] + { + use yggdrasil_rt::mem::MappingSource; + + let address = unsafe { + yggdrasil_rt::sys::map_memory(None, count * PAGE_SIZE, &MappingSource::Anonymous) + } + .ok()?; + + NonNull::new(core::ptr::from_exposed_addr_mut(address)) + } +} + +pub fn unmap_pages(address: NonNull, count: usize) { + #[cfg(unix)] + unsafe { + libc::munmap(address.as_ptr() as _, count * PAGE_SIZE); + } + #[cfg(not(unix))] + { + unsafe { + yggdrasil_rt::sys::unmap_memory(address.addr().into(), count * PAGE_SIZE).unwrap(); + } + } +} + +impl Spinlock { + pub const fn new(value: T) -> Self + where + T: Sized, + { + Self { + state: AtomicBool::new(false), + data: UnsafeCell::new(value), + } + } + + pub fn lock<'a>(&'a self) -> SpinlockGuard<'a, T> { + while self + .state + .compare_exchange(false, true, Ordering::Acquire, Ordering::Relaxed) + .is_err() + { + core::hint::spin_loop(); + } + + // Locked + SpinlockGuard { lock: self } + } +} + +impl<'a, T: ?Sized> Deref for SpinlockGuard<'a, T> { + type Target = T; + + fn deref(&self) -> &Self::Target { + unsafe { &*self.lock.data.get() } + } +} + +impl<'a, T: ?Sized> DerefMut for SpinlockGuard<'a, T> { + fn deref_mut(&mut self) -> &mut Self::Target { + unsafe { &mut *self.lock.data.get() } + } +} + +impl<'a, T: ?Sized> Drop for SpinlockGuard<'a, T> { + fn drop(&mut self) { + self.lock.state.store(false, Ordering::Release); + } +} + +unsafe impl Sync for Spinlock {}