334 lines
9.4 KiB
Rust
334 lines
9.4 KiB
Rust
#![feature(
|
|
strict_provenance,
|
|
maybe_uninit_slice,
|
|
slice_ptr_get,
|
|
step_trait,
|
|
const_trait_impl,
|
|
maybe_uninit_as_bytes
|
|
)]
|
|
#![no_std]
|
|
|
|
extern crate alloc;
|
|
|
|
use core::{
|
|
alloc::Layout,
|
|
fmt,
|
|
mem::{size_of, MaybeUninit},
|
|
ops::{Deref, DerefMut},
|
|
};
|
|
|
|
use address::Virtualize;
|
|
use libk_mm_interface::{
|
|
address::{AsPhysicalAddress, PhysicalAddress},
|
|
table::{MapAttributes, TableAllocator},
|
|
};
|
|
use yggdrasil_abi::error::Error;
|
|
|
|
pub mod address;
|
|
pub mod device;
|
|
pub mod phys;
|
|
pub mod pointer;
|
|
pub mod process;
|
|
|
|
#[cfg(any(target_os = "none", rust_analyzer))]
|
|
pub mod heap;
|
|
|
|
pub use libk_mm_interface::table;
|
|
|
|
pub struct TableAllocatorImpl;
|
|
|
|
impl TableAllocator for TableAllocatorImpl {
|
|
fn allocate_page_table() -> Result<PhysicalAddress, Error> {
|
|
phys::alloc_page()
|
|
}
|
|
|
|
unsafe fn free_page_table(address: PhysicalAddress) {
|
|
phys::free_page(address)
|
|
}
|
|
}
|
|
|
|
// TODO find a way to integrate this nicely with Architecture?
|
|
pub const L3_PAGE_SIZE: usize = 1 << 12;
|
|
pub const L2_PAGE_SIZE: usize = 1 << 21;
|
|
|
|
pub trait PageProvider {
|
|
fn get_page(&self, offset: u64) -> Result<PhysicalAddress, Error>;
|
|
fn release_page(&self, offset: u64, phys: PhysicalAddress) -> Result<(), Error>;
|
|
fn clone_page(
|
|
&self,
|
|
offset: u64,
|
|
src_phys: PhysicalAddress,
|
|
src_attrs: MapAttributes,
|
|
) -> Result<PhysicalAddress, Error>;
|
|
}
|
|
|
|
pub struct PageBox<T: ?Sized> {
|
|
value: *mut T,
|
|
page_count: usize,
|
|
}
|
|
|
|
impl<T> PageBox<T> {
|
|
#[inline]
|
|
fn alloc_slice(count: usize, zeroed: bool) -> Result<(PhysicalAddress, usize), Error> {
|
|
// TODO hardcoded page sizes
|
|
let layout = Layout::array::<T>(count).unwrap();
|
|
let page_count = (layout.size() + L3_PAGE_SIZE - 1) / L3_PAGE_SIZE;
|
|
let base = phys::alloc_pages_contiguous(page_count)?;
|
|
if zeroed {
|
|
let ptr = base.virtualize() as *mut u8;
|
|
let slice = unsafe { core::slice::from_raw_parts_mut(ptr, page_count * L3_PAGE_SIZE) };
|
|
slice.fill(0);
|
|
}
|
|
Ok((base, page_count))
|
|
}
|
|
|
|
#[inline]
|
|
fn alloc() -> Result<(PhysicalAddress, usize), Error> {
|
|
let page_count = (size_of::<T>() + L3_PAGE_SIZE - 1) / L3_PAGE_SIZE;
|
|
Ok((phys::alloc_pages_contiguous(page_count)?, page_count))
|
|
}
|
|
|
|
pub fn new(init: T) -> Result<PageBox<T>, Error> {
|
|
let (base, page_count) = Self::alloc()?;
|
|
let value = base.virtualize() as *mut T;
|
|
|
|
unsafe {
|
|
value.write(init);
|
|
}
|
|
|
|
let result = PageBox { value, page_count };
|
|
result.trace_created();
|
|
Ok(result)
|
|
}
|
|
|
|
pub fn new_slice(item: T, count: usize) -> Result<PageBox<[T]>, Error>
|
|
where
|
|
T: Copy,
|
|
{
|
|
let (base, page_count) = Self::alloc_slice(count, false)?;
|
|
let base_virt_ptr = base.virtualize() as *mut T;
|
|
let value = core::ptr::slice_from_raw_parts_mut(base_virt_ptr, count);
|
|
|
|
for i in 0..count {
|
|
unsafe {
|
|
value.get_unchecked_mut(i).write(item);
|
|
}
|
|
}
|
|
|
|
let result = PageBox { value, page_count };
|
|
result.trace_created();
|
|
Ok(result)
|
|
}
|
|
|
|
pub fn new_slice_with<F: Fn(usize) -> T>(f: F, count: usize) -> Result<PageBox<[T]>, Error> {
|
|
let mut value = Self::new_uninit_slice(count)?;
|
|
|
|
for i in 0..count {
|
|
value[i].write(f(i));
|
|
}
|
|
|
|
Ok(unsafe { value.assume_init_slice() })
|
|
}
|
|
|
|
pub fn new_uninit() -> Result<PageBox<MaybeUninit<T>>, Error> {
|
|
let (base, page_count) = PageBox::<MaybeUninit<T>>::alloc()?;
|
|
let value = base.virtualize() as *mut MaybeUninit<T>;
|
|
let result = PageBox { value, page_count };
|
|
result.trace_created();
|
|
Ok(result)
|
|
}
|
|
|
|
pub fn new_uninit_slice(count: usize) -> Result<PageBox<[MaybeUninit<T>]>, Error> {
|
|
let (base, page_count) = PageBox::<MaybeUninit<T>>::alloc_slice(count, false)?;
|
|
let base_virt_ptr = base.virtualize() as *mut MaybeUninit<T>;
|
|
let value = core::ptr::slice_from_raw_parts_mut(base_virt_ptr, count);
|
|
let result = PageBox { value, page_count };
|
|
result.trace_created();
|
|
Ok(result)
|
|
}
|
|
|
|
pub fn new_zeroed_slice(count: usize) -> Result<PageBox<[MaybeUninit<T>]>, Error> {
|
|
let (base, page_count) = PageBox::<MaybeUninit<T>>::alloc_slice(count, true)?;
|
|
let base_virt_ptr = base.virtualize() as *mut MaybeUninit<T>;
|
|
let value = core::ptr::slice_from_raw_parts_mut(base_virt_ptr, count);
|
|
let result = PageBox { value, page_count };
|
|
result.trace_created();
|
|
Ok(result)
|
|
}
|
|
}
|
|
|
|
impl<T: ?Sized> PageBox<T> {
|
|
#[inline]
|
|
pub fn as_ptr(&self) -> *const T {
|
|
self.value as _
|
|
}
|
|
|
|
#[inline]
|
|
fn trace_created(&self) {
|
|
log::trace!(
|
|
"Alloc PageBox<{}> @ {:p}, {}",
|
|
core::any::type_name::<T>(),
|
|
self.value,
|
|
self.page_count
|
|
);
|
|
}
|
|
|
|
#[inline]
|
|
fn trace_dropped(&self) {
|
|
log::trace!(
|
|
"Free PageBox<{}> @ {:p}, {}",
|
|
core::any::type_name::<T>(),
|
|
self.value,
|
|
self.page_count
|
|
);
|
|
}
|
|
}
|
|
|
|
impl<T> PageBox<[T]> {
|
|
pub fn from_iter_exact<I: IntoIterator<Item = T>>(it: I) -> Result<Self, Error>
|
|
where
|
|
I::IntoIter: ExactSizeIterator,
|
|
{
|
|
let it = it.into_iter();
|
|
let mut slice = PageBox::new_uninit_slice(it.len())?;
|
|
for (i, item) in it.enumerate() {
|
|
slice[i].write(item);
|
|
}
|
|
let slice = unsafe { slice.assume_init_slice() };
|
|
Ok(slice)
|
|
}
|
|
}
|
|
|
|
impl<T> PageBox<MaybeUninit<T>> {
|
|
/// Consumes the [PageBox], returning a new one with [MaybeUninit] removed.
|
|
///
|
|
/// # Safety
|
|
///
|
|
/// See [MaybeUninit::assume_init_mut].
|
|
pub unsafe fn assume_init(self) -> PageBox<T> {
|
|
// SAFETY: Memory-safe, as:
|
|
// 1. MaybeUninit<T> is transparent
|
|
// 2. self.value still points to the same memory and is not deallocated
|
|
let page_count = self.page_count;
|
|
let value = MaybeUninit::assume_init_mut(&mut *self.value);
|
|
|
|
// Prevent deallocation of the PageBox with MaybeUninit
|
|
core::mem::forget(self);
|
|
|
|
PageBox { value, page_count }
|
|
}
|
|
}
|
|
|
|
impl<T> PageBox<[MaybeUninit<T>]> {
|
|
/// Consumes the [PageBox], returning a new one with [MaybeUninit] removed.
|
|
///
|
|
/// # Safety
|
|
///
|
|
/// See [MaybeUninit::slice_assume_init_mut].
|
|
pub unsafe fn assume_init_slice(self) -> PageBox<[T]> {
|
|
// SAFETY: Memory-safe, as:
|
|
// 1. MaybeUninit<T> is transparent
|
|
// 2. self.value still points to the same memory and is not deallocated
|
|
let page_count = self.page_count;
|
|
let value = MaybeUninit::slice_assume_init_mut(&mut *self.value);
|
|
|
|
core::mem::forget(self);
|
|
|
|
PageBox { value, page_count }
|
|
}
|
|
|
|
/// Returns a reference to the slice data with [MaybeUninit] removed.
|
|
///
|
|
/// # Safety
|
|
///
|
|
/// See [MaybeUninit::slice_assume_init_ref]
|
|
pub unsafe fn assume_init_slice_ref(&self) -> &[T] {
|
|
MaybeUninit::slice_assume_init_ref(self.deref())
|
|
}
|
|
|
|
/// Returns a mutable reference to the slice data with [MaybeUninit] removed.
|
|
///
|
|
/// # Safety
|
|
///
|
|
/// See [MaybeUninit::slice_assume_init_mut]
|
|
pub unsafe fn assume_init_slice_mut(&mut self) -> &mut [T] {
|
|
MaybeUninit::slice_assume_init_mut(self.deref_mut())
|
|
}
|
|
|
|
/// Fills a slice of MaybeUninit<T> with zeroes.
|
|
///
|
|
/// # Safety
|
|
///
|
|
/// Unsafe: will not drop possibly previously written data. Only meant for [Copy] and other
|
|
/// trivial types.
|
|
pub unsafe fn zero(p: &mut Self) {
|
|
let ptr = p.as_mut_ptr() as *mut u8;
|
|
let slice = core::slice::from_raw_parts_mut(ptr, p.page_count * L3_PAGE_SIZE);
|
|
slice.fill(0);
|
|
}
|
|
}
|
|
|
|
impl<T: ?Sized> AsPhysicalAddress for PageBox<T> {
|
|
#[inline]
|
|
unsafe fn as_physical_address(&self) -> PhysicalAddress {
|
|
PhysicalAddress::from_virtualized(self.value.addr())
|
|
}
|
|
}
|
|
|
|
impl<T: ?Sized> Deref for PageBox<T> {
|
|
type Target = T;
|
|
|
|
#[inline(always)]
|
|
fn deref(&self) -> &Self::Target {
|
|
unsafe { &*self.value }
|
|
}
|
|
}
|
|
|
|
impl<T: ?Sized> DerefMut for PageBox<T> {
|
|
#[inline(always)]
|
|
fn deref_mut(&mut self) -> &mut Self::Target {
|
|
unsafe { &mut *self.value }
|
|
}
|
|
}
|
|
|
|
impl<T: ?Sized> Drop for PageBox<T> {
|
|
fn drop(&mut self) {
|
|
self.trace_dropped();
|
|
unsafe {
|
|
core::ptr::drop_in_place(self.value);
|
|
}
|
|
// SAFETY: Safe, pointer obtained through "virtualize"
|
|
let base = PhysicalAddress::from_virtualized(self.value.addr());
|
|
for i in 0..self.page_count {
|
|
// SAFETY: Safe, page allocated only by this PageBox
|
|
unsafe {
|
|
phys::free_page(base.add(L3_PAGE_SIZE * i));
|
|
}
|
|
}
|
|
}
|
|
}
|
|
|
|
impl<T: ?Sized> fmt::Pointer for PageBox<T> {
|
|
#[inline]
|
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
|
self.value.fmt(f)
|
|
}
|
|
}
|
|
|
|
impl<T: ?Sized + fmt::Debug> fmt::Debug for PageBox<T> {
|
|
#[inline]
|
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
|
fmt::Debug::fmt(self.deref(), f)
|
|
}
|
|
}
|
|
|
|
impl<T: ?Sized + fmt::Display> fmt::Display for PageBox<T> {
|
|
#[inline]
|
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
|
fmt::Display::fmt(self.deref(), f)
|
|
}
|
|
}
|
|
|
|
unsafe impl<T: ?Sized + Send> Send for PageBox<T> {}
|
|
unsafe impl<T: ?Sized + Sync> Sync for PageBox<T> {}
|