yggdrasil/src/mem.rs
2023-12-11 21:14:09 +02:00

105 lines
2.9 KiB
Rust

use uefi::{
prelude::BootServices,
table::boot::{MemoryDescriptor, MemoryMap, MemoryType},
Error,
};
const PTE_PRESENT: u64 = 1 << 0;
const PTE_WRITABLE: u64 = 1 << 1;
const PTE_BLOCK: u64 = 1 << 7;
const MMAP_BUFFER_SIZE: usize = 32768;
#[repr(C, align(0x10))]
struct MmapBuffer {
data: [u8; MMAP_BUFFER_SIZE],
}
#[derive(Clone, Copy)]
#[repr(C, align(0x1000))]
struct Table {
data: [u64; 512],
}
static mut MMAP_BUFFER: MmapBuffer = MmapBuffer {
data: [0; MMAP_BUFFER_SIZE],
};
const PD_COUNT: usize = 32;
static mut PML4: Table = Table { data: [0; 512] };
static mut PDPT: Table = Table { data: [0; 512] };
static mut PDS: [Table; PD_COUNT] = [Table { data: [0; 512] }; PD_COUNT];
pub trait MemoryMapExt {
fn is_usable(&self, page: u64) -> bool;
}
pub trait MemoryDescriptorExt {
fn is_runtime_usable(&self) -> bool;
}
impl MemoryMapExt for MemoryMap<'_> {
fn is_usable(&self, page: u64) -> bool {
assert_eq!(page & 0xFFF, 0);
for entry in self.entries() {
let range = entry.phys_start..entry.phys_start + entry.page_count * 0x1000;
if range.contains(&page) {
return entry.ty == MemoryType::CONVENTIONAL;
}
}
// Not found in the memory map
false
}
}
impl MemoryDescriptorExt for MemoryDescriptor {
fn is_runtime_usable(&self) -> bool {
self.ty == MemoryType::LOADER_DATA
|| self.ty == MemoryType::LOADER_CODE
|| self.ty == MemoryType::BOOT_SERVICES_CODE
|| self.ty == MemoryType::BOOT_SERVICES_DATA
|| self.ty == MemoryType::RUNTIME_SERVICES_DATA
|| self.ty == MemoryType::RUNTIME_SERVICES_CODE
|| self.ty == MemoryType::CONVENTIONAL
}
}
pub fn memory_map(bs: &BootServices) -> Result<MemoryMap, Error> {
bs.memory_map(unsafe { &mut MMAP_BUFFER.data })
}
// TODO handle other offsets
/// # Safety
///
/// Not really safe.
pub unsafe fn map_image() -> u64 {
// Load the original cr3 to obtain the first 512GiB, these are still in use by UEFI
let mut cr3: usize;
core::arch::asm!("mov %cr3, {0}", out(reg) cr3, options(att_syntax));
let orig_pml4 = core::slice::from_raw_parts(cr3 as *const u64, 512);
// Setup the mapping tables
for i in 0..512 * PDS.len() {
let pd_index = i / 512;
let pd_offset = i % 512;
PDS[pd_index].data[pd_offset] = (i << 21) as u64 | PTE_BLOCK | PTE_WRITABLE | PTE_PRESENT;
}
for (i, pd) in PDS.iter_mut().enumerate() {
let addr = pd.data.as_mut_ptr() as u64;
PDPT.data[i] = addr | PTE_WRITABLE | PTE_PRESENT;
}
let addr = PDPT.data.as_mut_ptr() as u64;
// Clone the lower mapping from the UEFI's table
PML4.data[0] = orig_pml4[0];
// Set up upper mapping for the kernel
PML4.data[511] = addr | PTE_WRITABLE | PTE_PRESENT;
PML4.data.as_mut_ptr() as u64
}