306 lines
8.7 KiB
Rust
306 lines
8.7 KiB
Rust
//! Memory management utilities and types
|
|
// use core::{alloc::Layout, mem::size_of};
|
|
|
|
use core::{alloc::Layout, ffi::c_void, mem::size_of};
|
|
|
|
use abi::error::Error;
|
|
|
|
// use abi::error::Error;
|
|
//
|
|
use crate::arch::{Architecture, ArchitectureImpl /*, PlatformImpl*/};
|
|
|
|
use self::table::AddressSpace;
|
|
//
|
|
// use self::table::AddressSpace;
|
|
|
|
pub mod device;
|
|
pub mod heap;
|
|
pub mod phys;
|
|
pub mod table;
|
|
|
|
/// Kernel's physical load address
|
|
// pub const KERNEL_PHYS_BASE: usize = PlatformImpl::KERNEL_PHYS_BASE;
|
|
/// Kernel's virtual memory mapping offset (i.e. kernel's virtual address is [KERNEL_PHYS_BASE] +
|
|
/// [KERNEL_VIRT_OFFSET])
|
|
pub const KERNEL_VIRT_OFFSET: usize = ArchitectureImpl::KERNEL_VIRT_OFFSET;
|
|
|
|
/// Interface for converting between address spaces.
|
|
///
|
|
/// # Safety
|
|
///
|
|
/// An incorrect implementation can produce invalid address.
|
|
pub unsafe trait ConvertAddress {
|
|
/// Convert the address into a virtual one
|
|
///
|
|
/// # Panics
|
|
///
|
|
/// Panics if the address is already a virtual one
|
|
///
|
|
/// # Safety
|
|
///
|
|
/// An incorrect implementation can produce invalid address.
|
|
unsafe fn virtualize(self) -> Self;
|
|
/// Convert the address into a physical one
|
|
///
|
|
/// # Panics
|
|
///
|
|
/// Panics if the address is already a physical one
|
|
///
|
|
/// # Safety
|
|
///
|
|
/// An incorrect implementation can produce invalid address.
|
|
unsafe fn physicalize(self) -> Self;
|
|
}
|
|
|
|
/// Helper trait to allow cross-address space access to pointers
|
|
pub trait ForeignPointer: Sized {
|
|
/// Perform a volatile pointer write without dropping the old value.
|
|
///
|
|
/// # Panics
|
|
///
|
|
/// The function panics if any of the following conditions is met:
|
|
///
|
|
/// * The address of the pointer is not mapped in the `space`.
|
|
/// * The pointer is not writable.
|
|
/// * The pointer is misaligned.
|
|
///
|
|
/// # Safety
|
|
///
|
|
/// As this function allows direct memory writes, it is inherently unsafe.
|
|
unsafe fn write_foreign_volatile(self: *mut Self, space: &AddressSpace, value: Self);
|
|
|
|
/// Performs pointer validation for given address space:
|
|
///
|
|
/// * Checks if the pointer has proper alignment for the type.
|
|
/// * Checks if the pointer is mapped in the address space.
|
|
/// * Checks if the pointer is above the userspace memory boundary.
|
|
///
|
|
/// # Safety
|
|
///
|
|
/// Even though this function does the necessary checks, it is still a raw pointer to reference
|
|
/// conversion, and thus is unsafe.
|
|
unsafe fn validate_user_ptr<'a>(
|
|
self: *const Self,
|
|
space: &AddressSpace,
|
|
) -> Result<&'a Self, Error>;
|
|
|
|
/// [ForeignPointer::validate_user_ptr], with extra "writability" check.
|
|
///
|
|
/// # Safety
|
|
///
|
|
/// Even though this function does the necessary checks, it is still a raw pointer to reference
|
|
/// conversion, and thus is unsafe.
|
|
unsafe fn validate_user_mut<'a>(
|
|
self: *mut Self,
|
|
space: &AddressSpace,
|
|
) -> Result<&'a mut Self, Error>;
|
|
|
|
/// [ForeignPointer::validate_user_ptr], but for slices
|
|
///
|
|
/// # Safety
|
|
///
|
|
/// Even though this function does the necessary checks, it is still a raw pointer to reference
|
|
/// conversion, and thus is unsafe.
|
|
unsafe fn validate_user_slice<'a>(
|
|
self: *const Self,
|
|
len: usize,
|
|
space: &AddressSpace,
|
|
) -> Result<&'a [Self], Error>;
|
|
|
|
/// [ForeignPointer::validate_user_slice], but for mutable slices
|
|
///
|
|
/// # Safety
|
|
///
|
|
/// Even though this function does the necessary checks, it is still a raw pointer to reference
|
|
/// conversion, and thus is unsafe.
|
|
unsafe fn validate_user_slice_mut<'a>(
|
|
self: *mut Self,
|
|
len: usize,
|
|
space: &AddressSpace,
|
|
) -> Result<&'a mut [Self], Error>;
|
|
}
|
|
|
|
unsafe impl ConvertAddress for usize {
|
|
#[inline(always)]
|
|
unsafe fn virtualize(self) -> Self {
|
|
#[cfg(debug_assertions)]
|
|
if self > KERNEL_VIRT_OFFSET {
|
|
todo!();
|
|
}
|
|
|
|
self + KERNEL_VIRT_OFFSET
|
|
}
|
|
|
|
#[inline(always)]
|
|
unsafe fn physicalize(self) -> Self {
|
|
#[cfg(debug_assertions)]
|
|
if self < KERNEL_VIRT_OFFSET {
|
|
todo!();
|
|
}
|
|
|
|
self - KERNEL_VIRT_OFFSET
|
|
}
|
|
}
|
|
|
|
unsafe impl<T> ConvertAddress for *mut T {
|
|
#[inline(always)]
|
|
unsafe fn virtualize(self) -> Self {
|
|
(self as usize).virtualize() as Self
|
|
}
|
|
|
|
#[inline(always)]
|
|
unsafe fn physicalize(self) -> Self {
|
|
(self as usize).physicalize() as Self
|
|
}
|
|
}
|
|
|
|
unsafe impl<T> ConvertAddress for *const T {
|
|
#[inline(always)]
|
|
unsafe fn virtualize(self) -> Self {
|
|
(self as usize).virtualize() as Self
|
|
}
|
|
|
|
#[inline(always)]
|
|
unsafe fn physicalize(self) -> Self {
|
|
(self as usize).physicalize() as Self
|
|
}
|
|
}
|
|
|
|
impl<T> ForeignPointer for T {
|
|
unsafe fn write_foreign_volatile(self: *mut Self, space: &AddressSpace, value: T) {
|
|
// TODO check align
|
|
let addr = self as usize;
|
|
let start_page = addr & !0xFFF;
|
|
let end_page = (addr + size_of::<T>() - 1) & !0xFFF;
|
|
let page_offset = addr & 0xFFF;
|
|
|
|
if start_page != end_page {
|
|
todo!("Foreign pointer write crossed a page boundary");
|
|
}
|
|
|
|
let phys_page = space
|
|
.translate(start_page)
|
|
.expect("Address is not mapped in the target address space");
|
|
|
|
let virt_ptr = (phys_page + page_offset).virtualize() as *mut T;
|
|
virt_ptr.write_volatile(value);
|
|
}
|
|
|
|
unsafe fn validate_user_slice_mut<'a>(
|
|
self: *mut Self,
|
|
len: usize,
|
|
space: &AddressSpace,
|
|
) -> Result<&'a mut [Self], Error> {
|
|
let base = self as usize;
|
|
let layout = Layout::array::<T>(len).unwrap();
|
|
|
|
validate_user_align_size(base, &layout)?;
|
|
validate_user_region(space, base, layout.size(), true)?;
|
|
|
|
Ok(core::slice::from_raw_parts_mut(self, len))
|
|
}
|
|
|
|
unsafe fn validate_user_slice<'a>(
|
|
self: *const Self,
|
|
len: usize,
|
|
space: &AddressSpace,
|
|
) -> Result<&'a [Self], Error> {
|
|
let base = self as usize;
|
|
let layout = Layout::array::<T>(len).unwrap();
|
|
|
|
validate_user_align_size(base, &layout)?;
|
|
validate_user_region(space, base, layout.size(), false)?;
|
|
|
|
Ok(core::slice::from_raw_parts(self, len))
|
|
}
|
|
|
|
unsafe fn validate_user_mut<'a>(
|
|
self: *mut Self,
|
|
space: &AddressSpace,
|
|
) -> Result<&'a mut Self, Error> {
|
|
let addr = self as usize;
|
|
let layout = Layout::new::<T>();
|
|
|
|
// Common validation
|
|
validate_user_align_size(addr, &layout)?;
|
|
|
|
// Validate that the pages covered by this address are mapped as writable by the process
|
|
// TODO for CoW this may differ
|
|
validate_user_region(space, addr, layout.size(), true)?;
|
|
|
|
Ok(&mut *self)
|
|
}
|
|
|
|
unsafe fn validate_user_ptr<'a>(
|
|
self: *const Self,
|
|
space: &AddressSpace,
|
|
) -> Result<&'a Self, Error> {
|
|
let addr = self as usize;
|
|
let layout = Layout::new::<T>();
|
|
|
|
// Common validation
|
|
validate_user_align_size(addr, &layout)?;
|
|
validate_user_region(space, addr, layout.size(), false)?;
|
|
|
|
Ok(&*self)
|
|
}
|
|
}
|
|
|
|
fn validate_user_align_size(addr: usize, layout: &Layout) -> Result<(), Error> {
|
|
// Explicitly disallow NULL
|
|
if addr == 0 {
|
|
return Err(Error::InvalidArgument);
|
|
}
|
|
// Validate alignment
|
|
if addr % layout.align() != 0 {
|
|
return Err(Error::InvalidArgument);
|
|
}
|
|
if addr + layout.size() > KERNEL_VIRT_OFFSET {
|
|
todo!();
|
|
}
|
|
|
|
Ok(())
|
|
}
|
|
|
|
/// Validates access to given userspace memory region with given constraints
|
|
pub fn validate_user_region(
|
|
space: &AddressSpace,
|
|
base: usize,
|
|
len: usize,
|
|
_need_write: bool,
|
|
) -> Result<(), Error> {
|
|
if base + len > crate::mem::KERNEL_VIRT_OFFSET {
|
|
panic!("Invalid argument");
|
|
}
|
|
|
|
let aligned_start = base & !0xFFF;
|
|
let aligned_end = (base + len + 0xFFF) & !0xFFF;
|
|
|
|
for page in (aligned_start..aligned_end).step_by(0x1000) {
|
|
// TODO check writability
|
|
space.translate(page).ok_or(Error::InvalidArgument)?;
|
|
}
|
|
|
|
Ok(())
|
|
}
|
|
|
|
#[no_mangle]
|
|
unsafe extern "C" fn memcpy(p0: *mut c_void, p1: *const c_void, len: usize) -> *mut c_void {
|
|
compiler_builtins::mem::memcpy(p0 as _, p1 as _, len) as _
|
|
}
|
|
|
|
#[no_mangle]
|
|
unsafe extern "C" fn memcmp(p0: *const c_void, p1: *const c_void, len: usize) -> i32 {
|
|
compiler_builtins::mem::memcmp(p0 as _, p1 as _, len)
|
|
}
|
|
|
|
#[no_mangle]
|
|
unsafe extern "C" fn memmove(dst: *mut c_void, src: *const c_void, len: usize) -> *mut c_void {
|
|
compiler_builtins::mem::memmove(dst as _, src as _, len) as _
|
|
}
|
|
|
|
#[no_mangle]
|
|
unsafe extern "C" fn memset(dst: *mut c_void, val: i32, len: usize) -> *mut c_void {
|
|
compiler_builtins::mem::memset(dst as _, val, len) as _
|
|
}
|