257 lines
7.3 KiB
Rust
Raw Normal View History

2024-10-10 18:06:54 +03:00
use core::{
marker::PhantomData,
2024-10-11 18:35:26 +03:00
ops::{Index, IndexMut, Range},
2024-10-10 18:06:54 +03:00
};
use bitflags::bitflags;
use libk_mm_interface::{
address::{AsPhysicalAddress, PhysicalAddress},
pointer::{PhysicalRef, PhysicalRefMut},
2024-10-11 18:35:26 +03:00
table::{
EntryLevel, EntryLevelDrop, MapAttributes, NextPageTable, NonTerminalEntryLevel,
TableAllocator,
},
2024-10-10 18:06:54 +03:00
};
use yggdrasil_abi::error::Error;
use crate::KernelTableManagerImpl;
bitflags! {
/// Describes how each page table entry is mapped
pub struct PageAttributes: u32 {
/// When set, the mapping is considered valid and pointing somewhere
const PRESENT = 1 << 0;
/// For tables, allows writes to further translation levels, for pages/blocks, allows
/// writes to the region covered by the entry
const WRITABLE = 1 << 1;
/// When set for L2 entries, the mapping specifies a 2MiB page instead of a page table
/// reference
const BLOCK = 1 << 7;
/// For tables, allows user access to further translation levels, for pages/blocks, allows
/// user access to the region covered by the entry
const USER = 1 << 2;
}
}
// TODO stuff for PAE?
#[derive(Debug, Clone, Copy)]
pub struct L3;
#[derive(Debug, Clone, Copy)]
pub struct L0;
#[derive(Clone, Copy, Debug)]
pub struct PageEntry<L: EntryLevel>(u32, PhantomData<L>);
#[derive(Clone, Copy, Debug)]
#[repr(C, align(0x1000))]
pub struct PageTable<L: EntryLevel> {
data: [PageEntry<L>; 1024],
}
impl EntryLevel for L3 {
const SHIFT: usize = 12;
}
impl EntryLevel for L0 {
const SHIFT: usize = 22;
}
impl NonTerminalEntryLevel for L0 {
type NextLevel = L3;
}
impl PageEntry<L3> {
pub fn page(address: PhysicalAddress, attrs: PageAttributes) -> Self {
Self(
address.try_into_u32().unwrap() | (PageAttributes::PRESENT | attrs).bits(),
PhantomData,
)
}
pub fn as_page(&self) -> Option<PhysicalAddress> {
if self.0 & PageAttributes::PRESENT.bits() != 0 {
Some(PhysicalAddress::from_u32(self.0 & !0xFFF))
} else {
None
}
}
}
impl PageEntry<L0> {
pub fn block(address: PhysicalAddress, attrs: PageAttributes) -> Self {
Self(
address.try_into_u32().unwrap()
| (PageAttributes::PRESENT | PageAttributes::BLOCK | attrs).bits(),
PhantomData,
)
}
pub fn table(address: PhysicalAddress, attrs: PageAttributes) -> Self {
Self(
address.try_into_u32().unwrap() | (PageAttributes::PRESENT | attrs).bits(),
PhantomData,
)
}
pub fn as_table(&self) -> Option<PhysicalAddress> {
if self.0 & PageAttributes::PRESENT.bits() != 0
&& self.0 & PageAttributes::BLOCK.bits() == 0
{
Some(PhysicalAddress::from_u32(self.0 & !0xFFF))
} else {
None
}
}
}
impl<L: EntryLevel> PageEntry<L> {
pub const INVALID: Self = Self(0, PhantomData);
pub fn is_present(&self) -> bool {
self.0 & (1 << 0) != 0
}
pub fn attributes(&self) -> PageAttributes {
PageAttributes::from_bits_retain(self.0)
}
}
impl<L: EntryLevel> PageTable<L> {
pub const fn zeroed() -> Self {
Self {
data: [PageEntry::INVALID; 1024],
}
}
pub fn new_zeroed<'a, TA: TableAllocator>(
) -> Result<PhysicalRefMut<'a, Self, KernelTableManagerImpl>, Error> {
let physical = TA::allocate_page_table()?;
let mut table =
unsafe { PhysicalRefMut::<'a, Self, KernelTableManagerImpl>::map(physical) };
for i in 0..1024 {
table[i] = PageEntry::INVALID;
}
Ok(table)
}
2024-10-11 18:35:26 +03:00
/// Recursively clears and deallocates the translation table.
///
/// # Safety
///
/// The caller must ensure the table is no longer in use and is not referenced anymore.
pub unsafe fn free<TA: TableAllocator>(this: PhysicalRefMut<Self, KernelTableManagerImpl>) {
let physical = this.as_physical_address();
TA::free_page_table(physical);
}
2024-10-10 18:06:54 +03:00
}
impl NextPageTable for PageTable<L0> {
type NextLevel = PageTable<L3>;
type TableRef = PhysicalRef<'static, Self::NextLevel, KernelTableManagerImpl>;
type TableRefMut = PhysicalRefMut<'static, Self::NextLevel, KernelTableManagerImpl>;
fn get(&self, index: usize) -> Option<Self::TableRef> {
self[index]
.as_table()
.map(|addr| unsafe { PhysicalRef::map(addr) })
}
fn get_mut(&mut self, index: usize) -> Option<Self::TableRefMut> {
self[index]
.as_table()
.map(|addr| unsafe { PhysicalRefMut::map(addr) })
}
fn get_mut_or_alloc<TA: TableAllocator>(
&mut self,
index: usize,
) -> Result<Self::TableRefMut, Error> {
let entry = self[index];
if let Some(table) = entry.as_table() {
Ok(unsafe { PhysicalRefMut::map(table) })
} else {
let table = PageTable::new_zeroed::<TA>()?;
self[index] = PageEntry::<L0>::table(
unsafe { table.as_physical_address() },
PageAttributes::WRITABLE | PageAttributes::USER,
);
Ok(table)
}
}
}
impl<L: EntryLevel> Index<usize> for PageTable<L> {
type Output = PageEntry<L>;
fn index(&self, index: usize) -> &Self::Output {
&self.data[index]
}
}
impl<L: EntryLevel> IndexMut<usize> for PageTable<L> {
fn index_mut(&mut self, index: usize) -> &mut Self::Output {
&mut self.data[index]
}
}
2024-10-11 18:35:26 +03:00
impl EntryLevelDrop for PageTable<L3> {
const FULL_RANGE: Range<usize> = 0..1024;
unsafe fn drop_range<TA: TableAllocator>(&mut self, _range: Range<usize>) {}
}
impl EntryLevelDrop for PageTable<L0> {
const FULL_RANGE: Range<usize> = 0..1024;
unsafe fn drop_range<TA: TableAllocator>(&mut self, range: Range<usize>) {
for index in range {
let entry = self[index];
if let Some(table) = entry.as_table() {
let mut table_ref: PhysicalRefMut<PageTable<L3>, KernelTableManagerImpl> =
PhysicalRefMut::map(table);
table_ref.drop_all::<TA>();
TA::free_page_table(table);
} else if entry.is_present() {
// Memory must've been cleared beforehand, so no non-table entries must be present
panic!(
"Expected a table containing only tables, got table[{}] = {:#x?}",
index, entry.0
);
}
self[index] = PageEntry::INVALID;
}
}
}
2024-10-10 18:06:54 +03:00
impl From<MapAttributes> for PageAttributes {
fn from(value: MapAttributes) -> Self {
let mut res = PageAttributes::WRITABLE;
if value.intersects(MapAttributes::USER_READ | MapAttributes::USER_WRITE) {
res |= PageAttributes::USER;
}
res
}
}
impl From<PageAttributes> for MapAttributes {
fn from(value: PageAttributes) -> Self {
let mut res = MapAttributes::empty();
if value.contains(PageAttributes::USER) {
res |= MapAttributes::USER_READ;
if value.contains(PageAttributes::WRITABLE) {
res |= MapAttributes::USER_WRITE;
}
}
// TODO ???
res |= MapAttributes::NON_GLOBAL;
res
}
}