osdev4/kernel/src/mem/phys/reserved.rs
2021-08-19 18:33:58 +03:00

80 lines
2.0 KiB
Rust

use crate::mem::{kernel_end_phys, PAGE_SIZE};
use address::PhysicalAddress;
use core::mem::MaybeUninit;
use core::ptr::null_mut;
pub struct ReservedRegion {
pub start: PhysicalAddress,
pub end: PhysicalAddress,
next: *mut ReservedRegion,
}
pub struct ReservedRegionIterator {
ptr: *mut ReservedRegion,
}
impl Iterator for ReservedRegionIterator {
type Item = &'static mut ReservedRegion;
fn next(&mut self) -> Option<Self::Item> {
if let Some(item) = unsafe { self.ptr.as_mut() } {
self.ptr = item.next;
Some(item)
} else {
None
}
}
}
impl ReservedRegion {
pub const fn new(start: PhysicalAddress, end: PhysicalAddress) -> ReservedRegion {
assert!(start.is_paligned() && end.is_paligned());
ReservedRegion {
start,
end,
next: null_mut()
}
}
}
static mut RESERVED_REGIONS_HEAD: *mut ReservedRegion = null_mut();
static mut RESERVED_REGION_KERNEL: MaybeUninit<ReservedRegion> = MaybeUninit::uninit();
static mut RESERVED_REGION_PAGES: MaybeUninit<ReservedRegion> = MaybeUninit::uninit();
pub unsafe fn reserve(region: *mut ReservedRegion) {
(*region).next = RESERVED_REGIONS_HEAD;
RESERVED_REGIONS_HEAD = region;
}
pub(super) unsafe fn reserve_kernel() {
RESERVED_REGION_KERNEL.write(ReservedRegion::new(
PhysicalAddress::from(0usize),
kernel_end_phys(),
));
reserve(RESERVED_REGION_KERNEL.as_mut_ptr());
}
pub(super) unsafe fn reserve_pages(base: PhysicalAddress, count: usize) {
RESERVED_REGION_PAGES.write(ReservedRegion::new(
base,
base + count * PAGE_SIZE
));
reserve(RESERVED_REGION_PAGES.as_mut_ptr());
}
pub fn is_reserved(page: PhysicalAddress) -> bool {
unsafe {
let mut iter = RESERVED_REGIONS_HEAD;
while !iter.is_null() {
let region = &*iter;
if page >= region.start && page < region.end {
return true;
}
iter = region.next;
}
}
false
}