273 lines
7.7 KiB
Rust
273 lines
7.7 KiB
Rust
use core::{
|
|
marker::PhantomData,
|
|
ops::{Index, IndexMut, Range},
|
|
};
|
|
|
|
use libk_mm_interface::{
|
|
address::{AsPhysicalAddress, PhysicalAddress},
|
|
pointer::{PhysicalRef, PhysicalRefMut},
|
|
process::PageAttributeUpdate,
|
|
table::{
|
|
page_index, EntryLevel, EntryLevelDrop, NextPageTable, NonTerminalEntryLevel,
|
|
TableAllocator,
|
|
},
|
|
};
|
|
use yggdrasil_abi::error::Error;
|
|
|
|
use super::{KernelTableManagerImpl, USER_BOUNDARY};
|
|
|
|
pub use memtables::riscv64::PageAttributes;
|
|
|
|
/// L3 - entry is 4KiB
|
|
#[derive(Debug, Clone, Copy)]
|
|
pub struct L3;
|
|
/// L2 - entry is 2MiB
|
|
#[derive(Debug, Clone, Copy)]
|
|
pub struct L2;
|
|
/// L1 - entry is 1GiB
|
|
#[derive(Debug, Clone, Copy)]
|
|
pub struct L1;
|
|
|
|
impl EntryLevel for L3 {
|
|
const SHIFT: usize = 12;
|
|
}
|
|
|
|
impl EntryLevel for L2 {
|
|
const SHIFT: usize = 21;
|
|
}
|
|
|
|
impl EntryLevel for L1 {
|
|
const SHIFT: usize = 30;
|
|
}
|
|
|
|
#[repr(C, align(0x1000))]
|
|
pub struct PageTable<L: EntryLevel> {
|
|
entries: [PageEntry<L>; 512],
|
|
}
|
|
|
|
#[derive(Clone, Copy, Debug, PartialEq)]
|
|
pub struct PageEntry<L: EntryLevel>(pub u64, PhantomData<L>);
|
|
|
|
pub(super) trait DroppableRange {
|
|
const DROPPABLE_RANGE: Range<usize>;
|
|
}
|
|
|
|
impl DroppableRange for L1 {
|
|
const DROPPABLE_RANGE: Range<usize> = 0..page_index::<L1>(USER_BOUNDARY);
|
|
}
|
|
|
|
impl DroppableRange for L2 {
|
|
const DROPPABLE_RANGE: Range<usize> = 0..512;
|
|
}
|
|
|
|
impl NonTerminalEntryLevel for L1 {
|
|
type NextLevel = L2;
|
|
}
|
|
impl NonTerminalEntryLevel for L2 {
|
|
type NextLevel = L3;
|
|
}
|
|
|
|
impl<L: EntryLevel> PageTable<L> {
|
|
pub const fn zeroed() -> Self {
|
|
Self {
|
|
entries: [PageEntry::INVALID; 512],
|
|
}
|
|
}
|
|
|
|
pub fn new_zeroed<'a, TA: TableAllocator>(
|
|
) -> Result<PhysicalRefMut<'a, PageTable<L>, KernelTableManagerImpl>, Error> {
|
|
let physical = TA::allocate_page_table()?;
|
|
let mut table =
|
|
unsafe { PhysicalRefMut::<'a, Self, KernelTableManagerImpl>::map(physical) };
|
|
|
|
for i in 0..512 {
|
|
table[i] = PageEntry::INVALID;
|
|
}
|
|
|
|
Ok(table)
|
|
}
|
|
}
|
|
|
|
impl<L: EntryLevel> PageEntry<L> {
|
|
// Upper + lower 10 bits
|
|
const ATTR_MASK: u64 = 0xFFC00000000003FF;
|
|
pub const INVALID: Self = Self(0, PhantomData);
|
|
|
|
/// Constructs a [PageEntry] from its raw representation.
|
|
///
|
|
/// # Safety
|
|
///
|
|
/// The caller must ensure `value` is actually a "valid" PTE.
|
|
pub const unsafe fn from_raw(value: u64) -> Self {
|
|
Self(value, PhantomData)
|
|
}
|
|
|
|
pub const fn is_present(&self) -> bool {
|
|
self.0 & PageAttributes::V.bits() != 0
|
|
}
|
|
|
|
pub fn update(&mut self, update: &PageAttributeUpdate) -> Result<(), Error> {
|
|
let mut attrs = self.attributes();
|
|
if let Some(write) = update.user_write {
|
|
attrs.set(PageAttributes::W, write);
|
|
}
|
|
if let Some(dirty) = update.dirty {
|
|
attrs.set(PageAttributes::SW_DIRTY, dirty);
|
|
}
|
|
self.0 &= !Self::ATTR_MASK;
|
|
self.0 |= attrs.bits() & Self::ATTR_MASK;
|
|
Ok(())
|
|
}
|
|
|
|
pub const fn is_dirty(&self) -> bool {
|
|
self.0 & PageAttributes::SW_DIRTY.bits() != 0
|
|
}
|
|
|
|
pub fn attributes(self) -> PageAttributes {
|
|
PageAttributes::from_bits_retain(self.0)
|
|
}
|
|
}
|
|
|
|
impl<L: NonTerminalEntryLevel + DroppableRange> EntryLevelDrop for PageTable<L>
|
|
where
|
|
PageTable<L::NextLevel>: EntryLevelDrop,
|
|
{
|
|
const FULL_RANGE: Range<usize> = L::DROPPABLE_RANGE;
|
|
|
|
unsafe fn drop_range<TA: TableAllocator>(&mut self, range: Range<usize>) {
|
|
for index in range {
|
|
let entry = self[index];
|
|
|
|
if let Some(table) = entry.as_table() {
|
|
unsafe {
|
|
let mut table_ref: PhysicalRefMut<
|
|
PageTable<L::NextLevel>,
|
|
KernelTableManagerImpl,
|
|
> = PhysicalRefMut::map(table);
|
|
|
|
table_ref.drop_all::<TA>();
|
|
|
|
TA::free_page_table(table);
|
|
}
|
|
} else if entry.is_present() {
|
|
// Memory must've been cleared beforehand, so no non-table entries must be present
|
|
panic!(
|
|
"Expected a table containing only tables, got table[{}] = {:#x?}",
|
|
index, entry.0
|
|
);
|
|
}
|
|
|
|
self[index] = PageEntry::INVALID;
|
|
// dc_cvac((&raw const self[index]).addr());
|
|
}
|
|
}
|
|
}
|
|
|
|
impl EntryLevelDrop for PageTable<L3> {
|
|
const FULL_RANGE: Range<usize> = 0..512;
|
|
|
|
// Do nothing
|
|
unsafe fn drop_range<TA: TableAllocator>(&mut self, _range: Range<usize>) {}
|
|
}
|
|
|
|
impl<L: NonTerminalEntryLevel + 'static> NextPageTable for PageTable<L> {
|
|
type NextLevel = PageTable<L::NextLevel>;
|
|
type TableRef = PhysicalRef<'static, PageTable<L::NextLevel>, KernelTableManagerImpl>;
|
|
type TableRefMut = PhysicalRefMut<'static, PageTable<L::NextLevel>, KernelTableManagerImpl>;
|
|
|
|
fn get(&self, index: usize) -> Option<Self::TableRef> {
|
|
let table = self[index].as_table()?;
|
|
Some(unsafe { PhysicalRef::map(table) })
|
|
}
|
|
|
|
fn get_mut(&mut self, index: usize) -> Option<Self::TableRefMut> {
|
|
let table = self[index].as_table()?;
|
|
Some(unsafe { PhysicalRefMut::map(table) })
|
|
}
|
|
|
|
fn get_mut_or_alloc<TA: TableAllocator>(
|
|
&mut self,
|
|
index: usize,
|
|
) -> Result<Self::TableRefMut, Error> {
|
|
if let Some(table) = self[index].as_table() {
|
|
Ok(unsafe { PhysicalRefMut::map(table) })
|
|
} else {
|
|
let table = PageTable::new_zeroed::<TA>()?;
|
|
self[index] = PageEntry::<L>::table(
|
|
unsafe { table.as_physical_address() },
|
|
PageAttributes::empty(),
|
|
);
|
|
// dc_cvac((&raw const self[index]).addr());
|
|
Ok(table)
|
|
}
|
|
}
|
|
}
|
|
|
|
impl<L: NonTerminalEntryLevel> PageEntry<L> {
|
|
pub fn block(address: PhysicalAddress, attrs: PageAttributes) -> Self {
|
|
// TODO validate address alignment
|
|
Self(
|
|
(address.into_u64() >> 2)
|
|
| (PageAttributes::R
|
|
| PageAttributes::A
|
|
| PageAttributes::D
|
|
| PageAttributes::V
|
|
| attrs)
|
|
.bits(),
|
|
PhantomData,
|
|
)
|
|
}
|
|
|
|
pub fn table(address: PhysicalAddress, mut attrs: PageAttributes) -> Self {
|
|
attrs.remove(PageAttributes::R | PageAttributes::W | PageAttributes::X);
|
|
Self(
|
|
(address.into_u64() >> 2) | (PageAttributes::V | attrs).bits(),
|
|
PhantomData,
|
|
)
|
|
}
|
|
|
|
pub fn as_table(&self) -> Option<PhysicalAddress> {
|
|
(self.0
|
|
& (PageAttributes::R | PageAttributes::W | PageAttributes::X | PageAttributes::V)
|
|
.bits()
|
|
== PageAttributes::V.bits())
|
|
.then_some((self.0 & !Self::ATTR_MASK) << 2)
|
|
.map(PhysicalAddress::from_u64)
|
|
}
|
|
}
|
|
|
|
impl PageEntry<L3> {
|
|
pub fn page(address: PhysicalAddress, attrs: PageAttributes) -> Self {
|
|
Self(
|
|
(address.into_u64() >> 2)
|
|
| (PageAttributes::R
|
|
| PageAttributes::A
|
|
| PageAttributes::D
|
|
| PageAttributes::V
|
|
| attrs)
|
|
.bits(),
|
|
PhantomData,
|
|
)
|
|
}
|
|
|
|
pub fn as_page(&self) -> Option<PhysicalAddress> {
|
|
(self.0 & PageAttributes::V.bits() != 0)
|
|
.then_some((self.0 & !Self::ATTR_MASK) << 2)
|
|
.map(PhysicalAddress::from_u64)
|
|
}
|
|
}
|
|
|
|
impl<L: EntryLevel> Index<usize> for PageTable<L> {
|
|
type Output = PageEntry<L>;
|
|
|
|
fn index(&self, index: usize) -> &Self::Output {
|
|
&self.entries[index]
|
|
}
|
|
}
|
|
|
|
impl<L: EntryLevel> IndexMut<usize> for PageTable<L> {
|
|
fn index_mut(&mut self, index: usize) -> &mut Self::Output {
|
|
&mut self.entries[index]
|
|
}
|
|
}
|