496 lines
14 KiB
Rust
496 lines
14 KiB
Rust
#![feature(
|
|
maybe_uninit_slice,
|
|
slice_ptr_get,
|
|
step_trait,
|
|
const_trait_impl,
|
|
maybe_uninit_as_bytes,
|
|
negative_impls
|
|
)]
|
|
#![no_std]
|
|
|
|
extern crate alloc;
|
|
|
|
use core::{
|
|
alloc::Layout,
|
|
fmt,
|
|
marker::PhantomData,
|
|
mem::{size_of, MaybeUninit},
|
|
ops::{Deref, DerefMut},
|
|
slice::SliceIndex,
|
|
};
|
|
|
|
use address::Virtualize;
|
|
use kernel_arch::{mem::PhysicalMemoryAllocator, Architecture, ArchitectureImpl};
|
|
use libk_mm_interface::{
|
|
address::{AsPhysicalAddress, PhysicalAddress},
|
|
table::{MapAttributes, TableAllocator},
|
|
};
|
|
use phys::GlobalPhysicalAllocator;
|
|
use yggdrasil_abi::error::Error;
|
|
|
|
pub mod address;
|
|
pub mod device;
|
|
pub mod phys;
|
|
pub mod pointer;
|
|
pub mod process;
|
|
|
|
#[cfg(any(target_os = "none", rust_analyzer))]
|
|
pub mod heap;
|
|
|
|
pub use libk_mm_interface::table;
|
|
|
|
pub struct TableAllocatorImpl;
|
|
|
|
impl TableAllocator for TableAllocatorImpl {
|
|
fn allocate_page_table() -> Result<PhysicalAddress, Error> {
|
|
phys::alloc_page()
|
|
}
|
|
|
|
unsafe fn free_page_table(address: PhysicalAddress) {
|
|
phys::free_page(address)
|
|
}
|
|
}
|
|
|
|
// TODO find a way to integrate this nicely with Architecture?
|
|
pub const L3_PAGE_SIZE: usize = 1 << 12;
|
|
|
|
#[cfg(not(target_arch = "x86"))]
|
|
pub const L2_PAGE_SIZE: usize = 1 << 21;
|
|
#[cfg(target_arch = "x86")]
|
|
pub const L2_PAGE_SIZE: usize = 1 << 22;
|
|
|
|
pub trait PageProvider: Send + Sync {
|
|
fn get_page(&self, offset: u64) -> Result<PhysicalAddress, Error>;
|
|
fn release_page(&self, offset: u64, phys: PhysicalAddress) -> Result<(), Error>;
|
|
fn clone_page(
|
|
&self,
|
|
offset: u64,
|
|
src_phys: PhysicalAddress,
|
|
src_attrs: MapAttributes,
|
|
) -> Result<PhysicalAddress, Error>;
|
|
}
|
|
|
|
pub struct PageBox<
|
|
T: ?Sized,
|
|
A: PhysicalMemoryAllocator<Address = PhysicalAddress> = GlobalPhysicalAllocator,
|
|
> {
|
|
value: *mut T,
|
|
page_count: usize,
|
|
_pd: PhantomData<A>,
|
|
}
|
|
|
|
pub struct PageSlice<T> {
|
|
data: [T],
|
|
}
|
|
|
|
impl<T> PageBox<T, GlobalPhysicalAllocator> {
|
|
pub fn new(init: T) -> Result<PageBox<T>, Error> {
|
|
PageBox::new_in(init)
|
|
}
|
|
|
|
pub fn new_slice(item: T, count: usize) -> Result<PageBox<[T]>, Error>
|
|
where
|
|
T: Copy,
|
|
{
|
|
PageBox::new_slice_in(item, count)
|
|
}
|
|
|
|
pub fn new_slice_with<F: Fn(usize) -> T>(f: F, count: usize) -> Result<PageBox<[T]>, Error> {
|
|
PageBox::new_slice_in_with(f, count)
|
|
}
|
|
|
|
pub fn new_uninit() -> Result<PageBox<MaybeUninit<T>>, Error> {
|
|
PageBox::new_uninit_in()
|
|
}
|
|
|
|
pub fn new_uninit_slice(count: usize) -> Result<PageBox<[MaybeUninit<T>]>, Error> {
|
|
PageBox::new_uninit_slice_in(count)
|
|
}
|
|
|
|
pub fn new_zeroed_slice(count: usize) -> Result<PageBox<[MaybeUninit<T>]>, Error> {
|
|
let (base, page_count) = PageBox::<MaybeUninit<T>>::alloc_slice(count, true)?;
|
|
let base_virt_ptr = base.virtualize() as *mut MaybeUninit<T>;
|
|
let value = core::ptr::slice_from_raw_parts_mut(base_virt_ptr, count);
|
|
let result = PageBox {
|
|
value,
|
|
page_count,
|
|
_pd: PhantomData,
|
|
};
|
|
result.trace_created();
|
|
Ok(result)
|
|
}
|
|
|
|
pub unsafe fn from_physical_raw(address: PhysicalAddress) -> PageBox<T> {
|
|
PageBox::from_physical_raw_in(address)
|
|
}
|
|
}
|
|
|
|
impl<T, A: PhysicalMemoryAllocator<Address = PhysicalAddress>> PageBox<T, A> {
|
|
#[inline]
|
|
fn alloc_slice(count: usize, zeroed: bool) -> Result<(PhysicalAddress, usize), Error> {
|
|
// TODO hardcoded page sizes
|
|
let layout = Layout::array::<T>(count).unwrap();
|
|
let page_count = layout.size().div_ceil(L3_PAGE_SIZE);
|
|
let base = A::allocate_contiguous_pages(page_count)?;
|
|
if zeroed {
|
|
let ptr = base.virtualize() as *mut u8;
|
|
let slice = unsafe { core::slice::from_raw_parts_mut(ptr, page_count * L3_PAGE_SIZE) };
|
|
slice.fill(0);
|
|
}
|
|
Ok((base, page_count))
|
|
}
|
|
|
|
#[inline]
|
|
fn alloc() -> Result<(PhysicalAddress, usize), Error> {
|
|
let page_count = size_of::<T>().div_ceil(L3_PAGE_SIZE);
|
|
let phys = A::allocate_contiguous_pages(page_count)?;
|
|
Ok((phys, page_count))
|
|
}
|
|
|
|
pub fn new_in(init: T) -> Result<PageBox<T, A>, Error> {
|
|
let (base, page_count) = PageBox::<T, A>::alloc()?;
|
|
let value = base.virtualize() as *mut T;
|
|
|
|
unsafe {
|
|
value.write(init);
|
|
}
|
|
|
|
let result = PageBox {
|
|
value,
|
|
page_count,
|
|
_pd: PhantomData,
|
|
};
|
|
result.trace_created();
|
|
Ok(result)
|
|
}
|
|
|
|
pub fn new_slice_in(item: T, count: usize) -> Result<PageBox<[T], A>, Error>
|
|
where
|
|
T: Copy,
|
|
{
|
|
let (base, page_count) = PageBox::<T, A>::alloc_slice(count, false)?;
|
|
let base_virt_ptr = base.virtualize() as *mut T;
|
|
let value = core::ptr::slice_from_raw_parts_mut(base_virt_ptr, count);
|
|
|
|
for i in 0..count {
|
|
unsafe {
|
|
value.get_unchecked_mut(i).write(item);
|
|
}
|
|
}
|
|
|
|
let result = PageBox {
|
|
value,
|
|
page_count,
|
|
_pd: PhantomData,
|
|
};
|
|
result.trace_created();
|
|
Ok(result)
|
|
}
|
|
|
|
pub fn new_slice_in_with<F: Fn(usize) -> T>(
|
|
f: F,
|
|
count: usize,
|
|
) -> Result<PageBox<[T], A>, Error> {
|
|
let mut value = PageBox::<T, A>::new_uninit_slice_in(count)?;
|
|
|
|
for i in 0..count {
|
|
value[i].write(f(i));
|
|
}
|
|
|
|
Ok(unsafe { value.assume_init_slice() })
|
|
}
|
|
|
|
pub fn new_uninit_in() -> Result<PageBox<MaybeUninit<T>, A>, Error> {
|
|
let (base, page_count) = PageBox::<MaybeUninit<T>, A>::alloc()?;
|
|
let value = base.virtualize() as *mut MaybeUninit<T>;
|
|
let result = PageBox {
|
|
value,
|
|
page_count,
|
|
_pd: PhantomData,
|
|
};
|
|
result.trace_created();
|
|
Ok(result)
|
|
}
|
|
|
|
pub fn new_uninit_slice_in(count: usize) -> Result<PageBox<[MaybeUninit<T>], A>, Error> {
|
|
let (base, page_count) = PageBox::<MaybeUninit<T>, A>::alloc_slice(count, false)?;
|
|
let base_virt_ptr = base.virtualize() as *mut MaybeUninit<T>;
|
|
let value = core::ptr::slice_from_raw_parts_mut(base_virt_ptr, count);
|
|
let result = PageBox {
|
|
value,
|
|
page_count,
|
|
_pd: PhantomData,
|
|
};
|
|
result.trace_created();
|
|
Ok(result)
|
|
}
|
|
|
|
pub unsafe fn from_physical_raw_in(address: PhysicalAddress) -> PageBox<T, A> {
|
|
let page_count = size_of::<T>().div_ceil(L3_PAGE_SIZE);
|
|
let value = address.virtualize() as *mut T;
|
|
PageBox {
|
|
value,
|
|
page_count,
|
|
_pd: PhantomData,
|
|
}
|
|
}
|
|
}
|
|
|
|
impl<T: ?Sized, A: PhysicalMemoryAllocator<Address = PhysicalAddress>> PageBox<T, A> {
|
|
#[inline]
|
|
pub fn as_ptr(&self) -> *const T {
|
|
self.value as _
|
|
}
|
|
|
|
pub fn into_physical_raw(self) -> PhysicalAddress {
|
|
let address = unsafe { self.as_physical_address() };
|
|
core::mem::forget(self);
|
|
address
|
|
}
|
|
|
|
#[inline]
|
|
fn trace_created(&self) {
|
|
log::trace!(
|
|
"Alloc PageBox<{}> @ {:p}, {}",
|
|
core::any::type_name::<T>(),
|
|
self.value,
|
|
self.page_count
|
|
);
|
|
}
|
|
|
|
#[inline]
|
|
fn trace_dropped(&self) {
|
|
log::trace!(
|
|
"Free PageBox<{}> @ {:p}, {}",
|
|
core::any::type_name::<T>(),
|
|
self.value,
|
|
self.page_count
|
|
);
|
|
}
|
|
}
|
|
|
|
impl<T> PageBox<T> {
|
|
pub fn from_iter_exact<I: IntoIterator<Item = T>>(it: I) -> Result<PageBox<[T]>, Error>
|
|
where
|
|
I::IntoIter: ExactSizeIterator,
|
|
{
|
|
let it = it.into_iter();
|
|
let mut slice = PageBox::<T>::new_uninit_slice(it.len())?;
|
|
for (i, item) in it.enumerate() {
|
|
slice[i].write(item);
|
|
}
|
|
let slice = unsafe { slice.assume_init_slice() };
|
|
Ok(slice)
|
|
}
|
|
}
|
|
|
|
impl<T, A: PhysicalMemoryAllocator<Address = PhysicalAddress>> PageBox<[T], A> {
|
|
pub fn as_slice(&self) -> &PageSlice<T> {
|
|
unsafe { core::mem::transmute(&self[..]) }
|
|
}
|
|
|
|
pub fn as_slice_mut(&mut self) -> &mut PageSlice<T> {
|
|
unsafe { core::mem::transmute(&mut self[..]) }
|
|
}
|
|
}
|
|
|
|
impl<T, A: PhysicalMemoryAllocator<Address = PhysicalAddress>> PageBox<MaybeUninit<T>, A> {
|
|
/// Consumes the [PageBox], returning a new one with [MaybeUninit] removed.
|
|
///
|
|
/// # Safety
|
|
///
|
|
/// See [MaybeUninit::assume_init_mut].
|
|
pub unsafe fn assume_init(self) -> PageBox<T, A> {
|
|
// SAFETY: Memory-safe, as:
|
|
// 1. MaybeUninit<T> is transparent
|
|
// 2. self.value still points to the same memory and is not deallocated
|
|
let page_count = self.page_count;
|
|
let value = MaybeUninit::assume_init_mut(&mut *self.value);
|
|
|
|
// Prevent deallocation of the PageBox with MaybeUninit
|
|
core::mem::forget(self);
|
|
|
|
PageBox {
|
|
value,
|
|
page_count,
|
|
_pd: PhantomData,
|
|
}
|
|
}
|
|
|
|
pub fn as_bytes_mut(p: &mut Self) -> &mut PageSlice<MaybeUninit<u8>> {
|
|
unsafe { core::mem::transmute(p.as_bytes_mut()) }
|
|
}
|
|
}
|
|
|
|
impl<T, A: PhysicalMemoryAllocator<Address = PhysicalAddress>> PageBox<[MaybeUninit<T>], A> {
|
|
/// Consumes the [PageBox], returning a new one with [MaybeUninit] removed.
|
|
///
|
|
/// # Safety
|
|
///
|
|
/// See [MaybeUninit::slice_assume_init_mut].
|
|
pub unsafe fn assume_init_slice(self) -> PageBox<[T], A> {
|
|
// SAFETY: Memory-safe, as:
|
|
// 1. MaybeUninit<T> is transparent
|
|
// 2. self.value still points to the same memory and is not deallocated
|
|
let page_count = self.page_count;
|
|
let value = MaybeUninit::slice_assume_init_mut(&mut *self.value);
|
|
|
|
core::mem::forget(self);
|
|
|
|
PageBox {
|
|
value,
|
|
page_count,
|
|
_pd: PhantomData,
|
|
}
|
|
}
|
|
|
|
/// Returns a reference to the slice data with [MaybeUninit] removed.
|
|
///
|
|
/// # Safety
|
|
///
|
|
/// See [MaybeUninit::slice_assume_init_ref]
|
|
pub unsafe fn assume_init_slice_ref(&self) -> &[T] {
|
|
MaybeUninit::slice_assume_init_ref(self.deref())
|
|
}
|
|
|
|
/// Returns a mutable reference to the slice data with [MaybeUninit] removed.
|
|
///
|
|
/// # Safety
|
|
///
|
|
/// See [MaybeUninit::slice_assume_init_mut]
|
|
pub unsafe fn assume_init_slice_mut(&mut self) -> &mut [T] {
|
|
MaybeUninit::slice_assume_init_mut(self.deref_mut())
|
|
}
|
|
|
|
/// Fills a slice of MaybeUninit<T> with zeroes.
|
|
///
|
|
/// # Safety
|
|
///
|
|
/// Unsafe: will not drop possibly previously written data. Only meant for [Copy] and other
|
|
/// trivial types.
|
|
pub unsafe fn zero(p: &mut Self) {
|
|
let ptr = p.as_mut_ptr() as *mut u8;
|
|
let slice = core::slice::from_raw_parts_mut(ptr, p.page_count * L3_PAGE_SIZE);
|
|
slice.fill(0);
|
|
}
|
|
}
|
|
|
|
impl<T: ?Sized, A: PhysicalMemoryAllocator<Address = PhysicalAddress>> AsPhysicalAddress
|
|
for PageBox<T, A>
|
|
{
|
|
#[inline]
|
|
unsafe fn as_physical_address(&self) -> PhysicalAddress {
|
|
PhysicalAddress::from_virtualized(self.value.addr())
|
|
}
|
|
}
|
|
|
|
impl<T: ?Sized, A: PhysicalMemoryAllocator<Address = PhysicalAddress>> Deref for PageBox<T, A> {
|
|
type Target = T;
|
|
|
|
#[inline(always)]
|
|
fn deref(&self) -> &Self::Target {
|
|
unsafe { &*self.value }
|
|
}
|
|
}
|
|
|
|
impl<T: ?Sized, A: PhysicalMemoryAllocator<Address = PhysicalAddress>> DerefMut for PageBox<T, A> {
|
|
#[inline(always)]
|
|
fn deref_mut(&mut self) -> &mut Self::Target {
|
|
unsafe { &mut *self.value }
|
|
}
|
|
}
|
|
|
|
impl<T: ?Sized, A: PhysicalMemoryAllocator<Address = PhysicalAddress>> Drop for PageBox<T, A> {
|
|
fn drop(&mut self) {
|
|
self.trace_dropped();
|
|
unsafe {
|
|
core::ptr::drop_in_place(self.value);
|
|
}
|
|
// SAFETY: Safe, pointer obtained through "virtualize"
|
|
let base = PhysicalAddress::from_virtualized(self.value.addr());
|
|
for i in 0..self.page_count {
|
|
// SAFETY: Safe, page allocated only by this PageBox
|
|
unsafe {
|
|
A::free_page(base.add(L3_PAGE_SIZE * i));
|
|
}
|
|
}
|
|
}
|
|
}
|
|
|
|
impl<T: ?Sized, A: PhysicalMemoryAllocator<Address = PhysicalAddress>> fmt::Pointer
|
|
for PageBox<T, A>
|
|
{
|
|
#[inline]
|
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
|
self.value.fmt(f)
|
|
}
|
|
}
|
|
|
|
impl<T: ?Sized + fmt::Debug, A: PhysicalMemoryAllocator<Address = PhysicalAddress>> fmt::Debug
|
|
for PageBox<T, A>
|
|
{
|
|
#[inline]
|
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
|
fmt::Debug::fmt(self.deref(), f)
|
|
}
|
|
}
|
|
|
|
impl<T: ?Sized + fmt::Display, A: PhysicalMemoryAllocator<Address = PhysicalAddress>> fmt::Display
|
|
for PageBox<T, A>
|
|
{
|
|
#[inline]
|
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
|
fmt::Display::fmt(self.deref(), f)
|
|
}
|
|
}
|
|
|
|
unsafe impl<T: ?Sized + Send, A: PhysicalMemoryAllocator<Address = PhysicalAddress>> Send
|
|
for PageBox<T, A>
|
|
{
|
|
}
|
|
unsafe impl<T: ?Sized + Sync, A: PhysicalMemoryAllocator<Address = PhysicalAddress>> Sync
|
|
for PageBox<T, A>
|
|
{
|
|
}
|
|
|
|
impl<T> PageSlice<T> {
|
|
pub fn subslice_mut<R: SliceIndex<[T], Output = [T]>>(
|
|
&mut self,
|
|
index: R,
|
|
) -> &mut PageSlice<T> {
|
|
unsafe { core::mem::transmute(&mut self.data[index]) }
|
|
}
|
|
|
|
pub fn subslice<R: SliceIndex<[T], Output = [T]>>(&self, index: R) -> &PageSlice<T> {
|
|
unsafe { core::mem::transmute(&self.data[index]) }
|
|
}
|
|
}
|
|
|
|
impl<T> AsPhysicalAddress for PageSlice<T> {
|
|
unsafe fn as_physical_address(&self) -> PhysicalAddress {
|
|
PhysicalAddress::from_virtualized(self.data.as_ptr().addr())
|
|
}
|
|
}
|
|
|
|
impl<T> Deref for PageSlice<T> {
|
|
type Target = [T];
|
|
|
|
fn deref(&self) -> &Self::Target {
|
|
&self.data
|
|
}
|
|
}
|
|
|
|
impl<T> DerefMut for PageSlice<T> {
|
|
fn deref_mut(&mut self) -> &mut Self::Target {
|
|
&mut self.data
|
|
}
|
|
}
|
|
|
|
pub fn flush_cache_data<T>(data: *const T) {
|
|
ArchitectureImpl::flush_virtual_range(data.addr()..data.addr() + size_of::<T>());
|
|
}
|
|
|
|
pub fn flush_cache_data_slice<T>(data: *const [T]) {
|
|
ArchitectureImpl::flush_virtual_range(data.addr()..data.addr() + size_of::<T>() * data.len());
|
|
}
|