Initial commit

This commit is contained in:
Mark Poliakov 2023-12-18 14:32:27 +02:00
commit 98a651e8e4
8 changed files with 390 additions and 0 deletions

1
.gitignore vendored Normal file
View File

@ -0,0 +1 @@
/target

23
Cargo.lock generated Normal file
View File

@ -0,0 +1,23 @@
# This file is automatically @generated by Cargo.
# It is not intended for manual editing.
version = 3
[[package]]
name = "libc"
version = "0.2.151"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "302d7ab3130588088d277783b1e2d2e10c9e9e4a16dd9050e6ec93fb3e7048f4"
[[package]]
name = "libyalloc"
version = "0.1.0"
dependencies = [
"libc",
"rustc-std-workspace-core",
]
[[package]]
name = "rustc-std-workspace-core"
version = "1.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1956f5517128a2b6f23ab2dadf1a976f4f5b27962e7724c2bf3d45e539ec098c"

18
Cargo.toml Normal file
View File

@ -0,0 +1,18 @@
[package]
name = "libyalloc"
version = "0.1.0"
edition = "2021"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]
core = { version = "1.0.0", optional = true, package = "rustc-std-workspace-core" }
[target.'cfg(unix)'.dependencies]
libc = "0.2.151"
[features]
default = []
rustc-dep-of-std = [
"core"
]

68
src/allocator.rs Normal file
View File

@ -0,0 +1,68 @@
use core::{
alloc::Layout,
ptr::{null_mut, NonNull},
};
use crate::bucket::BitmapBucketHeader;
const BUCKET_SIZE_32: usize = 8192;
const BUCKET_SIZE_64: usize = 16384;
const BUCKET_SIZE_128: usize = 32768;
const BUCKET_SIZE_256: usize = 32768;
const BUCKET_SIZE_512: usize = 65536;
const BUCKET_SIZE_1024: usize = 65536;
pub struct Allocator {
buckets_32: *mut BitmapBucketHeader<32, { BUCKET_SIZE_32 / 32 }>,
buckets_64: *mut BitmapBucketHeader<64, { BUCKET_SIZE_64 / 64 }>,
buckets_128: *mut BitmapBucketHeader<128, { BUCKET_SIZE_128 / 128 }>,
buckets_256: *mut BitmapBucketHeader<256, { BUCKET_SIZE_256 / 256 }>,
buckets_512: *mut BitmapBucketHeader<512, { BUCKET_SIZE_512 / 512 }>,
buckets_1024: *mut BitmapBucketHeader<1024, { BUCKET_SIZE_1024 / 1024 }>,
}
impl Allocator {
pub const fn new() -> Self {
Self {
buckets_32: null_mut(),
buckets_64: null_mut(),
buckets_128: null_mut(),
buckets_256: null_mut(),
buckets_512: null_mut(),
buckets_1024: null_mut(),
}
}
pub fn alloc(&mut self, layout: Layout) -> Option<NonNull<u8>> {
// TODO handle align properly
match layout.size() {
c if c > 1024 => todo!(), // No bucket
c if c > 512 => BitmapBucketHeader::alloc_from_list(&mut self.buckets_1024),
c if c > 256 => BitmapBucketHeader::alloc_from_list(&mut self.buckets_512),
c if c > 128 => BitmapBucketHeader::alloc_from_list(&mut self.buckets_256),
c if c > 64 => BitmapBucketHeader::alloc_from_list(&mut self.buckets_128),
c if c > 32 => BitmapBucketHeader::alloc_from_list(&mut self.buckets_64),
_ => BitmapBucketHeader::alloc_from_list(&mut self.buckets_32),
}
}
pub fn free(&mut self, ptr: NonNull<u8>, layout: Layout) {
match layout.size() {
c if c > 1024 => todo!(), // No bucket
c if c > 512 => BitmapBucketHeader::free_from_list(&mut self.buckets_1024, ptr),
c if c > 256 => BitmapBucketHeader::free_from_list(&mut self.buckets_512, ptr),
c if c > 128 => BitmapBucketHeader::free_from_list(&mut self.buckets_256, ptr),
c if c > 64 => BitmapBucketHeader::free_from_list(&mut self.buckets_128, ptr),
c if c > 32 => BitmapBucketHeader::free_from_list(&mut self.buckets_64, ptr),
_ => BitmapBucketHeader::free_from_list(&mut self.buckets_32, ptr),
}
}
}
#[cfg(test)]
mod tests {
#[test]
fn multi_size_alloc() {
todo!()
}
}

145
src/bucket.rs Normal file
View File

@ -0,0 +1,145 @@
use core::{
mem::size_of,
ptr::{null_mut, NonNull},
};
use crate::util::{map_pages, Assert, IsTrue, PAGE_SIZE};
pub type BitmapWord = u64;
pub const fn bitmap_size(element_count: usize) -> usize {
(element_count + BitmapWord::BITS as usize - 1) / BitmapWord::BITS as usize
}
pub const fn header_size(element_count: usize, element_size: usize) -> usize {
(bitmap_size(element_count) + 2 * size_of::<*mut ()>() + element_size - 1) & !(element_size - 1)
}
// Page-aligned bucket size with its header
pub const fn bucket_size(element_count: usize, element_size: usize) -> usize {
let header_size = header_size(element_count, element_size);
let data_size = element_count * element_size;
(header_size + data_size + PAGE_SIZE - 1) & !(PAGE_SIZE - 1)
}
/// M - element size
/// N - element count
pub struct BitmapBucketHeader<const M: usize, const N: usize>
where
Assert<{ (M * N) % PAGE_SIZE == 0 }>: IsTrue,
[(); bitmap_size(N)]: Sized,
{
bitmap: [BitmapWord; bitmap_size(N)],
count: usize,
next: *mut Self,
}
impl<const M: usize, const N: usize> BitmapBucketHeader<M, N>
where
Assert<{ (M * N) % PAGE_SIZE == 0 }>: IsTrue,
[(); bitmap_size(N)]: Sized,
{
pub fn new_bucket() -> Option<*mut Self> {
let size = bucket_size(N, M);
let base = map_pages(size / PAGE_SIZE)? as *mut Self;
unsafe {
base.write(Self {
bitmap: [0; bitmap_size(N)],
count: 0,
next: null_mut(),
});
}
Some(base)
}
pub fn alloc(self: *mut Self) -> Option<NonNull<u8>> {
let bucket = unsafe { &mut *self };
for i in 0..N {
let word = &mut bucket.bitmap[i / BitmapWord::BITS as usize];
let mask = 1 << (i % BitmapWord::BITS as usize);
if *word & mask == 0 {
*word |= mask;
let offset = M * i;
bucket.count += 1;
return Some(unsafe {
NonNull::new_unchecked((self as usize + header_size(N, M) + offset) as *mut u8)
});
}
}
None
}
pub fn free(self: *mut Self, ptr: NonNull<u8>) -> (bool, bool) {
let bucket = unsafe { &mut *self };
if bucket.count == 0 {
return (false, false);
}
let self_addr = self.addr();
let ptr_addr = usize::from(ptr.addr());
if let Some(offset) = ptr_addr.checked_sub(self_addr + header_size(N, M))
&& offset < N * M
{
let index = offset / M;
let word = &mut bucket.bitmap[index / BitmapWord::BITS as usize];
let mask = 1 << (index % BitmapWord::BITS as usize);
if *word & mask == 0 {
panic!("TODO: double-free?");
}
*word &= !mask;
bucket.count -= 1;
(true, bucket.count == 0)
} else {
(false, false)
}
}
pub(crate) fn alloc_from_list(list: &mut *mut Self) -> Option<NonNull<u8>> {
let mut bucket = *list;
while !bucket.is_null() {
if let Some(ptr) = bucket.alloc() {
return Some(ptr);
}
bucket = unsafe { (*bucket).next };
}
// No bucket could allocate the request, make a new one
let bucket = Self::new_bucket()?;
unsafe { (*bucket).next = *list };
*list = bucket;
bucket.alloc()
}
pub(crate) fn free_from_list(list: &mut *mut Self, ptr: NonNull<u8>) {
let mut bucket = *list;
while !bucket.is_null() {
let (here, bucket_empty) = bucket.free(ptr);
if here {
if bucket_empty {
// TODO mark bucket for removal?
}
return;
}
bucket = unsafe { (*bucket).next };
}
panic!("ptr did not belong to any bucket");
}
}

29
src/global.rs Normal file
View File

@ -0,0 +1,29 @@
use core::{
alloc::{GlobalAlloc, Layout},
ptr::{null_mut, NonNull},
};
use crate::{util::Spinlock, Allocator};
pub struct GlobalAllocator;
unsafe impl GlobalAlloc for GlobalAllocator {
#[inline]
unsafe fn alloc(&self, layout: Layout) -> *mut u8 {
let ptr = GLOBAL_ALLOCATOR.lock().alloc(layout);
if let Some(ptr) = ptr {
ptr.as_ptr()
} else {
null_mut()
}
}
#[inline]
unsafe fn dealloc(&self, ptr: *mut u8, layout: Layout) {
let ptr = NonNull::new(ptr).expect("Invalid pointer");
GLOBAL_ALLOCATOR.lock().free(ptr, layout);
}
}
static GLOBAL_ALLOCATOR: Spinlock<Allocator> = Spinlock::new(Allocator::new());

16
src/lib.rs Normal file
View File

@ -0,0 +1,16 @@
#![feature(
generic_const_exprs,
arbitrary_self_types,
strict_provenance,
let_chains
)]
#![cfg_attr(not(test), no_std)]
#![allow(incomplete_features)]
mod allocator;
mod bucket;
mod global;
mod util;
pub use allocator::Allocator;
pub use global::GlobalAllocator;

90
src/util.rs Normal file
View File

@ -0,0 +1,90 @@
use core::{
cell::UnsafeCell,
ops::{Deref, DerefMut},
ptr::null_mut,
sync::atomic::{AtomicBool, Ordering},
};
pub const PAGE_SIZE: usize = 0x1000;
pub enum Assert<const T: bool> {}
pub trait IsTrue {}
impl IsTrue for Assert<true> {}
pub struct Spinlock<T: ?Sized> {
state: AtomicBool,
data: UnsafeCell<T>,
}
pub struct SpinlockGuard<'a, T: ?Sized> {
lock: &'a Spinlock<T>,
}
pub fn map_pages(count: usize) -> Option<usize> {
#[cfg(unix)]
{
let address = unsafe {
libc::mmap(
null_mut(),
count * PAGE_SIZE,
libc::PROT_READ | libc::PROT_WRITE,
libc::MAP_ANONYMOUS | libc::MAP_PRIVATE,
-1,
0,
)
};
if address.is_null() {
None
} else {
Some(address as usize)
}
}
}
impl<T: ?Sized> Spinlock<T> {
pub const fn new(value: T) -> Self
where
T: Sized,
{
Self {
state: AtomicBool::new(false),
data: UnsafeCell::new(value),
}
}
pub fn lock(&self) -> SpinlockGuard<T> {
while self
.state
.compare_exchange(false, true, Ordering::Acquire, Ordering::Relaxed)
.is_err()
{
core::hint::spin_loop();
}
// Locked
SpinlockGuard { lock: self }
}
}
impl<'a, T: ?Sized> Deref for SpinlockGuard<'a, T> {
type Target = T;
fn deref(&self) -> &Self::Target {
unsafe { &*self.lock.data.get() }
}
}
impl<'a, T: ?Sized> DerefMut for SpinlockGuard<'a, T> {
fn deref_mut(&mut self) -> &mut Self::Target {
unsafe { &mut *self.lock.data.get() }
}
}
impl<'a, T: ?Sized> Drop for SpinlockGuard<'a, T> {
fn drop(&mut self) {
self.lock.state.store(false, Ordering::Release);
}
}
unsafe impl<T: ?Sized> Sync for Spinlock<T> {}