| 123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347 |
- use super::{
- config::mm::{PHYS_MAP_VIRT, ROOT_PAGE_TABLE_PFN},
- fdt::{FdtExt, FDT},
- };
- use crate::{arch::riscv64::config::mm::KIMAGE_OFFSET, traits::mm::Memory};
- use core::{marker::PhantomData, ptr::NonNull};
- use eonix_mm::{
- address::{Addr as _, AddrOps, PAddr, PRange, PhysAccess, VAddr},
- page_table::{
- PageAttribute, PageTable, PageTableLevel, PagingMode, RawAttribute, RawPageTable,
- TableAttribute, PTE,
- },
- paging::{NoAlloc, Page, PageBlock, PFN},
- };
- use eonix_sync_base::LazyLock;
- use fdt::Fdt;
- use riscv::{
- asm::{sfence_vma, sfence_vma_all},
- register::satp,
- };
- pub const PAGE_TABLE_BASE: PFN = PFN::from_val(ROOT_PAGE_TABLE_PFN);
- pub static GLOBAL_PAGE_TABLE: LazyLock<PageTable<ArchPagingMode, NoAlloc, ArchPhysAccess>> =
- LazyLock::new(|| unsafe {
- Page::with_raw(PAGE_TABLE_BASE, |root_table_page| {
- PageTable::with_root_table(root_table_page.clone())
- })
- });
- pub const PA_V: u64 = 0b1 << 0;
- pub const PA_R: u64 = 0b1 << 1;
- pub const PA_W: u64 = 0b1 << 2;
- pub const PA_X: u64 = 0b1 << 3;
- pub const PA_U: u64 = 0b1 << 4;
- pub const PA_G: u64 = 0b1 << 5;
- pub const PA_A: u64 = 0b1 << 6;
- pub const PA_D: u64 = 0b1 << 7;
- // in RSW
- pub const PA_COW: u64 = 0b1 << 8;
- pub const PA_MMAP: u64 = 0b1 << 9;
- #[allow(dead_code)]
- pub const PA_SHIFT: u64 = 10;
- // Bit 0-9 (V, R, W, X, U, G, A, D, RSW)
- #[allow(dead_code)]
- pub const PA_FLAGS_MASK: u64 = 0x3FF; // 0b11_1111_1111
- #[repr(transparent)]
- #[derive(Clone, Copy)]
- pub struct PTE64(pub u64);
- #[derive(Clone, Copy)]
- pub struct PageAttribute64(u64);
- pub struct RawPageTableSv48<'a>(NonNull<PTE64>, PhantomData<&'a ()>);
- pub struct PagingModeSv48;
- pub struct ArchPhysAccess;
- pub struct ArchMemory;
- impl PTE for PTE64 {
- type Attr = PageAttribute64;
- fn set(&mut self, pfn: PFN, attr: Self::Attr) {
- self.0 = (usize::from(pfn) << PA_SHIFT) as u64 | attr.0;
- }
- fn get(&self) -> (PFN, Self::Attr) {
- let pfn = PFN::from(self.0 as usize >> PA_SHIFT);
- let attr = PageAttribute64(self.0 & PA_FLAGS_MASK);
- (pfn, attr)
- }
- }
- impl PagingMode for PagingModeSv48 {
- type Entry = PTE64;
- type RawTable<'a> = RawPageTableSv48<'a>;
- const LEVELS: &'static [PageTableLevel] = &[
- PageTableLevel::new(39, 9),
- PageTableLevel::new(30, 9),
- PageTableLevel::new(21, 9),
- PageTableLevel::new(12, 9),
- ];
- }
- pub type ArchPagingMode = PagingModeSv48;
- impl<'a> RawPageTable<'a> for RawPageTableSv48<'a> {
- type Entry = PTE64;
- fn index(&self, index: u16) -> &'a Self::Entry {
- unsafe { self.0.add(index as usize).as_ref() }
- }
- fn index_mut(&mut self, index: u16) -> &'a mut Self::Entry {
- unsafe { self.0.add(index as usize).as_mut() }
- }
- unsafe fn from_ptr(ptr: NonNull<PageBlock>) -> Self {
- Self(ptr.cast(), PhantomData)
- }
- }
- impl RawAttribute for PageAttribute64 {
- fn null() -> Self {
- Self(0)
- }
- fn as_table_attr(self) -> Option<TableAttribute> {
- let mut table_attr = TableAttribute::empty();
- if self.0 & PA_V != 0 {
- table_attr |= TableAttribute::PRESENT;
- }
- if table_attr.contains(TableAttribute::PRESENT) && self.0 & (PA_R | PA_W | PA_X) != 0 {
- return None;
- }
- if self.0 & PA_G != 0 {
- table_attr |= TableAttribute::GLOBAL;
- }
- if self.0 & PA_U != 0 {
- table_attr |= TableAttribute::USER;
- }
- if self.0 & PA_A != 0 {
- table_attr |= TableAttribute::ACCESSED;
- }
- Some(table_attr)
- }
- fn as_page_attr(self) -> Option<PageAttribute> {
- let mut page_attr = PageAttribute::empty();
- if self.0 & PA_V != 0 {
- page_attr |= PageAttribute::PRESENT;
- }
- if page_attr.contains(PageAttribute::PRESENT) && (self.0 & (PA_R | PA_W | PA_X) == 0) {
- return None;
- }
- if self.0 & PA_R != 0 {
- page_attr |= PageAttribute::READ;
- }
- if self.0 & PA_W != 0 {
- page_attr |= PageAttribute::WRITE;
- }
- if self.0 & PA_X != 0 {
- page_attr |= PageAttribute::EXECUTE;
- }
- if self.0 & PA_U != 0 {
- page_attr |= PageAttribute::USER;
- }
- if self.0 & PA_A != 0 {
- page_attr |= PageAttribute::ACCESSED;
- }
- if self.0 & PA_D != 0 {
- page_attr |= PageAttribute::DIRTY;
- }
- if self.0 & PA_G != 0 {
- page_attr |= PageAttribute::GLOBAL;
- }
- if self.0 & PA_COW != 0 {
- page_attr |= PageAttribute::COPY_ON_WRITE;
- }
- if self.0 & PA_MMAP != 0 {
- page_attr |= PageAttribute::MAPPED;
- }
- /*if self.0 & PA_ANON != 0 {
- page_attr |= PageAttribute::ANONYMOUS;
- }*/
- Some(page_attr)
- }
- }
- impl From<PageAttribute> for PageAttribute64 {
- fn from(page_attr: PageAttribute) -> Self {
- let mut raw_attr = 0;
- for attr in page_attr.iter() {
- match attr {
- PageAttribute::PRESENT => raw_attr |= PA_V,
- PageAttribute::READ => raw_attr |= PA_R,
- PageAttribute::WRITE => raw_attr |= PA_W,
- PageAttribute::EXECUTE => raw_attr |= PA_X,
- PageAttribute::USER => raw_attr |= PA_U,
- PageAttribute::ACCESSED => raw_attr |= PA_A,
- PageAttribute::DIRTY => raw_attr |= PA_D,
- PageAttribute::GLOBAL => raw_attr |= PA_G,
- PageAttribute::COPY_ON_WRITE => raw_attr |= PA_COW,
- PageAttribute::MAPPED => raw_attr |= PA_MMAP,
- PageAttribute::ANONYMOUS => {}
- _ => unreachable!("Invalid page attribute"),
- }
- }
- Self(raw_attr)
- }
- }
- impl From<TableAttribute> for PageAttribute64 {
- fn from(table_attr: TableAttribute) -> Self {
- let mut raw_attr = 0;
- for attr in table_attr.iter() {
- match attr {
- TableAttribute::PRESENT => raw_attr |= PA_V,
- TableAttribute::GLOBAL => raw_attr |= PA_G,
- TableAttribute::USER | TableAttribute::ACCESSED => {}
- _ => unreachable!("Invalid table attribute"),
- }
- }
- Self(raw_attr)
- }
- }
- impl ArchPhysAccess {
- const PHYS_OFFSET: usize = PHYS_MAP_VIRT;
- }
- impl PhysAccess for ArchPhysAccess {
- unsafe fn as_ptr<T>(paddr: PAddr) -> NonNull<T> {
- let alignment: usize = align_of::<T>();
- assert!(paddr.addr() % alignment == 0, "Alignment error");
- unsafe {
- // SAFETY: We can assume that we'll never have `self.addr()` equals
- // to `-PHYS_OFFSET`. Otherwise, the kernel might be broken.
- NonNull::new_unchecked((Self::PHYS_OFFSET + paddr.addr()) as *mut T)
- }
- }
- unsafe fn from_ptr<T>(ptr: NonNull<T>) -> PAddr {
- let addr = ptr.addr().get();
- assert!(addr % align_of::<T>() == 0, "Alignment error");
- assert!(
- addr >= Self::PHYS_OFFSET,
- "Address is not a valid physical address"
- );
- PAddr::from_val(addr - Self::PHYS_OFFSET)
- }
- }
- impl Memory for ArchMemory {
- fn present_ram() -> impl Iterator<Item = PRange> {
- FDT.present_ram()
- }
- fn free_ram() -> impl Iterator<Item = PRange> {
- unsafe extern "C" {
- fn __kernel_start();
- fn __kernel_end();
- }
- let kernel_end = PAddr::from(__kernel_end as usize - KIMAGE_OFFSET);
- let paddr_after_kimage_aligned = kernel_end.ceil_to(0x200000);
- core::iter::once(PRange::new(kernel_end, paddr_after_kimage_aligned)).chain(
- Self::present_ram()
- .filter(move |range| range.end() > paddr_after_kimage_aligned)
- .map(move |range| {
- if range.start() < paddr_after_kimage_aligned {
- let (_, right) = range.split_at(paddr_after_kimage_aligned);
- right
- } else {
- range
- }
- }),
- )
- }
- }
- pub type DefaultPagingMode = PagingModeSv48;
- pub trait PresentRam: Iterator<Item = PRange> {}
- pub trait FreeRam: PresentRam {
- fn free_ram(self) -> impl Iterator<Item = PRange>;
- }
- impl<T> FreeRam for T
- where
- T: PresentRam,
- {
- fn free_ram(self) -> impl Iterator<Item = PRange> {
- unsafe extern "C" {
- fn __kernel_start();
- fn __kernel_end();
- }
- let kernel_end = PAddr::from(__kernel_end as usize - KIMAGE_OFFSET);
- let paddr_after_kimage_aligned = kernel_end.ceil_to(0x200000);
- core::iter::once(PRange::new(kernel_end, paddr_after_kimage_aligned)).chain(
- self.filter(move |range| range.end() > paddr_after_kimage_aligned)
- .map(move |range| {
- if range.start() < paddr_after_kimage_aligned {
- let (_, right) = range.split_at(paddr_after_kimage_aligned);
- right
- } else {
- range
- }
- }),
- )
- }
- }
- #[inline(always)]
- pub fn flush_tlb(vaddr: usize) {
- sfence_vma(0, vaddr);
- }
- #[inline(always)]
- pub fn flush_tlb_all() {
- sfence_vma_all();
- }
- #[inline(always)]
- pub fn get_root_page_table_pfn() -> PFN {
- let satp_val = satp::read();
- let ppn = satp_val.ppn();
- PFN::from(ppn)
- }
- #[inline(always)]
- pub fn set_root_page_table_pfn(pfn: PFN) {
- unsafe { satp::set(satp::Mode::Sv48, 0, usize::from(pfn)) };
- sfence_vma_all();
- }
|