Add 'lib/libyalloc/' from commit 'c8399685ff776a08799c5e4c6eecd6fef1dce7bd'

git-subtree-dir: lib/libyalloc
git-subtree-mainline: 53ad163fb7a4d7d4229e76543f6890a379dbb85d
git-subtree-split: c8399685ff776a08799c5e4c6eecd6fef1dce7bd
This commit is contained in:
Mark Poliakov 2024-03-12 15:53:46 +02:00
commit 2f6f9b9b55
8 changed files with 666 additions and 0 deletions

1
lib/libyalloc/.gitignore vendored Normal file
View File

@ -0,0 +1 @@
/target

62
lib/libyalloc/Cargo.lock generated Normal file
View File

@ -0,0 +1,62 @@
# This file is automatically @generated by Cargo.
# It is not intended for manual editing.
version = 3
[[package]]
name = "compiler_builtins"
version = "0.1.105"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3686cc48897ce1950aa70fd595bd2dc9f767a3c4cca4cd17b2cb52a2d37e6eb4"
dependencies = [
"rustc-std-workspace-core",
]
[[package]]
name = "libc"
version = "0.2.151"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "302d7ab3130588088d277783b1e2d2e10c9e9e4a16dd9050e6ec93fb3e7048f4"
dependencies = [
"rustc-std-workspace-core",
]
[[package]]
name = "libyalloc"
version = "0.1.0"
dependencies = [
"libc",
"rustc-std-workspace-core",
"yggdrasil-rt",
]
[[package]]
name = "rustc-std-workspace-alloc"
version = "1.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ff66d57013a5686e1917ed6a025d54dd591fcda71a41fe07edf4d16726aefa86"
[[package]]
name = "rustc-std-workspace-core"
version = "1.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1956f5517128a2b6f23ab2dadf1a976f4f5b27962e7724c2bf3d45e539ec098c"
[[package]]
name = "yggdrasil-abi"
version = "0.1.0"
source = "git+https://git.alnyan.me/yggdrasil/yggdrasil-abi.git#449da18378f3340bb3aa06f00c1f99c79835e18f"
dependencies = [
"compiler_builtins",
"rustc-std-workspace-core",
]
[[package]]
name = "yggdrasil-rt"
version = "0.1.0"
source = "git+https://git.alnyan.me/yggdrasil/yggdrasil-rt.git#54029c01f14902b11b079fa709f87ab4e4951911"
dependencies = [
"compiler_builtins",
"rustc-std-workspace-alloc",
"rustc-std-workspace-core",
"yggdrasil-abi",
]

27
lib/libyalloc/Cargo.toml Normal file
View File

@ -0,0 +1,27 @@
[package]
name = "libyalloc"
version = "0.1.0"
edition = "2021"
authors = ["Mark Poliakov <mark@alnyan.me>"]
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]
core = { version = "1.0.0", optional = true, package = "rustc-std-workspace-core" }
compiler_builtins = { version = "0.1", optional = true }
[target.'cfg(unix)'.dependencies]
libc = { version = "0.2.140", default-features = false }
[target.'cfg(not(unix))'.dependencies]
yggdrasil-rt = { git = "https://git.alnyan.me/yggdrasil/yggdrasil-rt.git", default-features = false }
[features]
default = []
rustc-dep-of-std = [
"core",
"compiler_builtins",
"compiler_builtins/rustc-dep-of-std",
"libc/rustc-dep-of-std",
"yggdrasil-rt/rustc-dep-of-std"
]

View File

@ -0,0 +1,233 @@
use core::{alloc::Layout, ops::Index, ptr::NonNull};
use crate::{
bucket::Bucket,
util::{self, Assert, IsTrue},
};
struct BucketList<const N: usize, const M: usize>
where
[u64; M / 64]: Sized,
Assert<{ M % 64 == 0 }>: IsTrue,
{
head: Option<NonNull<Bucket<N, M>>>,
}
pub struct BucketAllocator {
// 1024x64 = 16 pages
buckets_1024: BucketList<1024, 64>,
// 512x64 = 8 pages
buckets_512: BucketList<512, 64>,
// 256x128 = 8 pages
buckets_256: BucketList<256, 128>,
// 128x128 = 4 pages
buckets_128: BucketList<128, 128>,
// 64x128 = 2 pages
buckets_64: BucketList<128, 128>,
// 32x256 = 2 pages
buckets_32: BucketList<128, 128>,
}
impl<const N: usize, const M: usize> BucketList<N, M>
where
[u64; M / 64]: Sized,
Assert<{ M % 64 == 0 }>: IsTrue,
{
const fn new() -> Self {
Self { head: None }
}
fn allocate(&mut self) -> Option<NonNull<u8>> {
let mut node = self.head;
while let Some(mut bucket) = node {
let bucket = unsafe { bucket.as_mut() };
if let Some(ptr) = bucket.allocate() {
return Some(ptr);
}
node = bucket.next;
}
// No usable bucket found
let mut node = Bucket::new()?;
let bucket = unsafe { node.as_mut() };
bucket.next = self.head;
self.head = Some(node);
bucket.allocate()
}
unsafe fn free(&mut self, ptr: NonNull<u8>) {
let mut node = self.head;
while let Some(mut bucket) = node {
let bucket = bucket.as_mut();
if let (true, _last) = bucket.free(ptr) {
// TODO free the node if last?
return;
}
node = bucket.next;
}
panic!("Possible double free detected: pointer {:p} from bucket list {}x{}B, no corresponding bucket found", ptr, N, M);
}
}
impl<const N: usize, const M: usize> Index<usize> for BucketList<N, M>
where
[u64; M / 64]: Sized,
Assert<{ M % 64 == 0 }>: IsTrue,
{
type Output = Bucket<N, M>;
fn index(&self, index: usize) -> &Self::Output {
let mut current = 0;
let mut node = self.head;
while let Some(bucket) = node {
let bucket = unsafe { bucket.as_ref() };
if current == index {
return bucket;
}
current += 1;
node = bucket.next;
}
panic!(
"BucketList index out of range: contains {} buckets, tried to index {}",
current, index
);
}
}
impl BucketAllocator {
pub const fn new() -> Self {
Self {
buckets_1024: BucketList::new(),
buckets_512: BucketList::new(),
buckets_256: BucketList::new(),
buckets_128: BucketList::new(),
buckets_64: BucketList::new(),
buckets_32: BucketList::new(),
}
}
pub fn allocate(&mut self, layout: Layout) -> Option<NonNull<u8>> {
let aligned = layout.pad_to_align();
match aligned.size() {
0 => todo!(),
..=32 => self.buckets_32.allocate(),
..=64 => self.buckets_64.allocate(),
..=128 => self.buckets_128.allocate(),
..=256 => self.buckets_256.allocate(),
..=512 => self.buckets_512.allocate(),
..=1024 => self.buckets_1024.allocate(),
size => util::map_pages((size + util::PAGE_SIZE - 1) / util::PAGE_SIZE),
}
}
pub unsafe fn free(&mut self, ptr: NonNull<u8>, layout: Layout) {
let aligned = layout.pad_to_align();
match aligned.size() {
0 => todo!(),
..=32 => self.buckets_32.free(ptr),
..=64 => self.buckets_64.free(ptr),
..=128 => self.buckets_128.free(ptr),
..=256 => self.buckets_256.free(ptr),
..=512 => self.buckets_512.free(ptr),
..=1024 => self.buckets_1024.free(ptr),
size => {
assert_eq!(usize::from(ptr.addr()) % util::PAGE_SIZE, 0);
util::unmap_pages(ptr, (size + util::PAGE_SIZE - 1) / util::PAGE_SIZE);
}
}
}
}
#[cfg(test)]
mod tests {
use core::{alloc::Layout, ptr::NonNull};
use super::{BucketAllocator, BucketList};
#[test]
fn single_list_allocation() {
let mut list = BucketList::<32, 64>::new();
let mut vec = vec![];
for _ in 0..4 * 64 + 3 {
let ptr = list.allocate().unwrap();
vec.push(ptr);
}
for ptr in vec {
unsafe {
list.free(ptr);
}
}
}
#[test]
fn multi_list_allocation() {
const SIZES: &[usize] = &[1, 3, 7, 15, 16, 24, 33, 65, 126, 255, 500, 1000];
let mut allocator = BucketAllocator::new();
let mut vec = vec![];
for _ in 0..65 {
for &size in SIZES {
let layout = Layout::from_size_align(size, 16).unwrap();
let ptr = allocator.allocate(layout).unwrap();
assert_eq!(usize::from(ptr.addr()) % 16, 0);
let mut slice = NonNull::slice_from_raw_parts(ptr, size);
unsafe {
slice.as_mut().fill(123);
}
vec.push((ptr, layout));
}
}
for (ptr, layout) in vec {
unsafe {
allocator.free(ptr, layout);
}
}
}
#[test]
#[should_panic]
fn double_free() {
let mut allocator = BucketAllocator::new();
let layout = Layout::from_size_align(63, 32).unwrap();
let ptr = allocator.allocate(layout).unwrap();
unsafe {
allocator.free(ptr, layout);
allocator.free(ptr, layout);
}
}
#[test]
fn large_alloc() {
const SIZES: &[usize] = &[2000, 2048, 4000, 4096, 8192];
let mut allocator = BucketAllocator::new();
let mut vec = vec![];
for &size in SIZES {
let layout = Layout::from_size_align(size, 32).unwrap();
let ptr = allocator.allocate(layout).unwrap();
vec.push((ptr, layout));
}
for (ptr, layout) in vec {
assert_eq!(usize::from(ptr.addr()) % 0x1000, 0);
unsafe {
allocator.free(ptr, layout);
}
}
}
}

160
lib/libyalloc/src/bucket.rs Normal file
View File

@ -0,0 +1,160 @@
use core::{
mem::{size_of, MaybeUninit},
ptr::NonNull,
};
use crate::util::{self, Assert, IsTrue, NonNullExt};
pub struct Bucket<const N: usize, const M: usize>
where
[u64; M / 64]: Sized,
Assert<{ M % 64 == 0 }>: IsTrue,
{
pub(crate) data: NonNull<u8>,
bitmap: [u64; M / 64],
allocated_count: usize,
pub(crate) next: Option<NonNull<Bucket<N, M>>>,
}
impl<const N: usize, const M: usize> Bucket<N, M>
where
[u64; M / 64]: Sized,
Assert<{ M % 64 == 0 }>: IsTrue,
{
pub fn new() -> Option<NonNull<Self>> {
let data_page_count = (M * N + 0xFFF) / 0x1000;
let info_page_count = (size_of::<Self>() + 0xFFF) / 0x1000;
let data = util::map_pages(data_page_count)?;
let info = util::map_pages(info_page_count)?;
let bucket = unsafe { info.cast::<MaybeUninit<Self>>().as_mut() };
let bucket = bucket.write(Self {
data,
bitmap: [0; M / 64],
allocated_count: 0,
next: None,
});
Some(bucket.into())
}
pub fn allocate(&mut self) -> Option<NonNull<u8>> {
for i in 0..self.bitmap.len() {
for j in 0..64 {
if self.bitmap[i] & (1 << j) != 0 {
continue;
}
self.bitmap[i] |= 1 << j;
self.allocated_count += 1;
return Some(unsafe { self.data.add_ext((i * 64 + j) * N) });
}
}
None
}
pub fn free(&mut self, ptr: NonNull<u8>) -> (bool, bool) {
if ptr.addr() < self.data.addr() {
return (false, false);
}
let offset = (usize::from(ptr.addr()) - usize::from(self.data.addr())) / N;
if offset >= M {
return (false, false);
}
let index = offset / 64;
let bit = offset % 64;
if self.bitmap[index] & (1 << bit) == 0 {
panic!(
"Possible double free detected: pointer {:p} from bucket {}x{}B, index {}:{}",
ptr, M, N, index, bit
);
}
self.bitmap[index] &= !(1 << bit);
self.allocated_count -= 1;
(true, self.allocated_count == 0)
}
}
#[cfg(test)]
mod tests {
use core::ptr::NonNull;
use crate::{bucket::Bucket, util::NonNullExt};
#[test]
fn bucket_creation() {
let mut bucket = Bucket::<32, 64>::new().unwrap();
let bucket = unsafe { bucket.as_mut() };
assert_eq!(bucket.allocated_count, 0);
assert_eq!(bucket.next, None);
}
#[test]
fn bucket_allocation() {
let mut bucket = Bucket::<32, 64>::new().unwrap();
let bucket = unsafe { bucket.as_mut() };
let mut vec = vec![];
let mut index = 0;
loop {
if let Some(ptr) = bucket.allocate() {
if index == bucket.bitmap.len() * 64 {
panic!("WTF");
}
vec.push(ptr);
} else {
break;
}
index += 1;
}
assert_eq!(bucket.allocated_count, index);
assert_eq!(index, bucket.bitmap.len() * 64);
for (i, item) in vec.into_iter().enumerate() {
assert_eq!(item, unsafe { bucket.data.add_ext(i * 32) });
let last = i == bucket.bitmap.len() * 64 - 1;
{
let mut slice = NonNull::slice_from_raw_parts(item, 32);
let slice = unsafe { slice.as_mut() };
slice.fill(123);
}
assert_eq!(bucket.free(item), (true, last));
}
}
#[test]
fn free_outside_of_bucket() {
let mut bucket0 = Bucket::<32, 64>::new().unwrap();
let mut bucket1 = Bucket::<64, 64>::new().unwrap();
let bucket0 = unsafe { bucket0.as_mut() };
let bucket1 = unsafe { bucket1.as_mut() };
let ptr0 = bucket0.allocate().unwrap();
let ptr1 = bucket1.allocate().unwrap();
assert_eq!(bucket1.free(ptr0), (false, false));
assert_eq!(bucket0.free(ptr1), (false, false));
assert_eq!(bucket0.free(ptr0), (true, true));
assert_eq!(bucket1.free(ptr1), (true, true));
}
#[test]
#[should_panic]
fn double_free() {
let mut bucket0 = Bucket::<32, 64>::new().unwrap();
let bucket0 = unsafe { bucket0.as_mut() };
let ptr = bucket0.allocate().unwrap();
assert_eq!(bucket0.free(ptr), (false, false));
bucket0.free(ptr);
}
}

View File

@ -0,0 +1,40 @@
use core::{
alloc::{AllocError, Allocator, GlobalAlloc, Layout},
ptr::{null_mut, NonNull},
};
use crate::{allocator::BucketAllocator, util::Spinlock};
pub struct GlobalAllocator;
unsafe impl GlobalAlloc for GlobalAllocator {
#[inline]
unsafe fn alloc(&self, layout: Layout) -> *mut u8 {
let ptr = GLOBAL_ALLOCATOR.lock().allocate(layout);
if let Some(ptr) = ptr {
ptr.as_ptr()
} else {
null_mut()
}
}
#[inline]
unsafe fn dealloc(&self, ptr: *mut u8, layout: Layout) {
let ptr = NonNull::new(ptr).expect("Invalid pointer");
GLOBAL_ALLOCATOR.lock().free(ptr, layout);
}
}
unsafe impl Allocator for GlobalAllocator {
fn allocate(&self, layout: Layout) -> Result<NonNull<[u8]>, AllocError> {
let ptr = GLOBAL_ALLOCATOR.lock().allocate(layout).ok_or(AllocError)?;
Ok(NonNull::slice_from_raw_parts(ptr, layout.size()))
}
unsafe fn deallocate(&self, ptr: NonNull<u8>, layout: Layout) {
GLOBAL_ALLOCATOR.lock().free(ptr, layout);
}
}
static GLOBAL_ALLOCATOR: Spinlock<BucketAllocator> = Spinlock::new(BucketAllocator::new());

20
lib/libyalloc/src/lib.rs Normal file
View File

@ -0,0 +1,20 @@
#![feature(
generic_const_exprs,
arbitrary_self_types,
strict_provenance,
exposed_provenance,
let_chains,
test,
allocator_api
)]
#![cfg_attr(not(test), no_std)]
#![allow(incomplete_features)]
#![deny(fuzzy_provenance_casts, lossy_provenance_casts)]
#[cfg(test)]
extern crate test;
pub mod allocator;
mod bucket;
pub mod global;
mod util;

123
lib/libyalloc/src/util.rs Normal file
View File

@ -0,0 +1,123 @@
use core::{
cell::UnsafeCell,
ops::{Deref, DerefMut},
ptr::NonNull,
sync::atomic::{AtomicBool, Ordering},
};
pub const PAGE_SIZE: usize = 0x1000;
pub enum Assert<const T: bool> {}
pub trait IsTrue {}
impl IsTrue for Assert<true> {}
pub trait NonNullExt<T> {
unsafe fn add_ext(self, offset: usize) -> Self;
}
impl<T> NonNullExt<T> for NonNull<T> {
unsafe fn add_ext(self, offset: usize) -> Self {
NonNull::new_unchecked(self.as_ptr().add(offset))
}
}
pub struct Spinlock<T: ?Sized> {
state: AtomicBool,
data: UnsafeCell<T>,
}
pub struct SpinlockGuard<'a, T: ?Sized> {
lock: &'a Spinlock<T>,
}
pub fn map_pages(count: usize) -> Option<NonNull<u8>> {
#[cfg(unix)]
{
use core::ptr::null_mut;
let address = unsafe {
libc::mmap(
null_mut(),
count * PAGE_SIZE,
libc::PROT_READ | libc::PROT_WRITE,
libc::MAP_ANONYMOUS | libc::MAP_PRIVATE,
-1,
0,
)
};
NonNull::new(address as *mut u8)
}
#[cfg(not(unix))]
{
use yggdrasil_rt::mem::MappingSource;
let address = unsafe {
yggdrasil_rt::sys::map_memory(None, count * PAGE_SIZE, &MappingSource::Anonymous)
}
.ok()?;
NonNull::new(core::ptr::from_exposed_addr_mut(address))
}
}
pub fn unmap_pages(address: NonNull<u8>, count: usize) {
#[cfg(unix)]
unsafe {
libc::munmap(address.as_ptr() as _, count * PAGE_SIZE);
}
#[cfg(not(unix))]
{
unsafe {
yggdrasil_rt::sys::unmap_memory(address.addr().into(), count * PAGE_SIZE).unwrap();
}
}
}
impl<T: ?Sized> Spinlock<T> {
pub const fn new(value: T) -> Self
where
T: Sized,
{
Self {
state: AtomicBool::new(false),
data: UnsafeCell::new(value),
}
}
pub fn lock<'a>(&'a self) -> SpinlockGuard<'a, T> {
while self
.state
.compare_exchange(false, true, Ordering::Acquire, Ordering::Relaxed)
.is_err()
{
core::hint::spin_loop();
}
// Locked
SpinlockGuard { lock: self }
}
}
impl<'a, T: ?Sized> Deref for SpinlockGuard<'a, T> {
type Target = T;
fn deref(&self) -> &Self::Target {
unsafe { &*self.lock.data.get() }
}
}
impl<'a, T: ?Sized> DerefMut for SpinlockGuard<'a, T> {
fn deref_mut(&mut self) -> &mut Self::Target {
unsafe { &mut *self.lock.data.get() }
}
}
impl<'a, T: ?Sized> Drop for SpinlockGuard<'a, T> {
fn drop(&mut self) {
self.lock.state.store(false, Ordering::Release);
}
}
unsafe impl<T: ?Sized> Sync for Spinlock<T> {}