mm.rs 9.5 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347
  1. use super::{
  2. config::mm::{PHYS_MAP_VIRT, ROOT_PAGE_TABLE_PFN},
  3. fdt::{FdtExt, FDT},
  4. };
  5. use crate::{arch::riscv64::config::mm::KIMAGE_OFFSET, traits::mm::Memory};
  6. use core::{marker::PhantomData, ptr::NonNull};
  7. use eonix_mm::{
  8. address::{Addr as _, AddrOps, PAddr, PRange, PhysAccess, VAddr},
  9. page_table::{
  10. PageAttribute, PageTable, PageTableLevel, PagingMode, RawAttribute, RawPageTable,
  11. TableAttribute, PTE,
  12. },
  13. paging::{NoAlloc, Page, PageBlock, PFN},
  14. };
  15. use eonix_sync_base::LazyLock;
  16. use fdt::Fdt;
  17. use riscv::{
  18. asm::{sfence_vma, sfence_vma_all},
  19. register::satp,
  20. };
  21. pub const PAGE_TABLE_BASE: PFN = PFN::from_val(ROOT_PAGE_TABLE_PFN);
  22. pub static GLOBAL_PAGE_TABLE: LazyLock<PageTable<ArchPagingMode, NoAlloc, ArchPhysAccess>> =
  23. LazyLock::new(|| unsafe {
  24. Page::with_raw(PAGE_TABLE_BASE, |root_table_page| {
  25. PageTable::with_root_table(root_table_page.clone())
  26. })
  27. });
  28. pub const PA_V: u64 = 0b1 << 0;
  29. pub const PA_R: u64 = 0b1 << 1;
  30. pub const PA_W: u64 = 0b1 << 2;
  31. pub const PA_X: u64 = 0b1 << 3;
  32. pub const PA_U: u64 = 0b1 << 4;
  33. pub const PA_G: u64 = 0b1 << 5;
  34. pub const PA_A: u64 = 0b1 << 6;
  35. pub const PA_D: u64 = 0b1 << 7;
  36. // in RSW
  37. pub const PA_COW: u64 = 0b1 << 8;
  38. pub const PA_MMAP: u64 = 0b1 << 9;
  39. #[allow(dead_code)]
  40. pub const PA_SHIFT: u64 = 10;
  41. // Bit 0-9 (V, R, W, X, U, G, A, D, RSW)
  42. #[allow(dead_code)]
  43. pub const PA_FLAGS_MASK: u64 = 0x3FF; // 0b11_1111_1111
  44. #[repr(transparent)]
  45. #[derive(Clone, Copy)]
  46. pub struct PTE64(pub u64);
  47. #[derive(Clone, Copy)]
  48. pub struct PageAttribute64(u64);
  49. pub struct RawPageTableSv48<'a>(NonNull<PTE64>, PhantomData<&'a ()>);
  50. pub struct PagingModeSv48;
  51. pub struct ArchPhysAccess;
  52. pub struct ArchMemory;
  53. impl PTE for PTE64 {
  54. type Attr = PageAttribute64;
  55. fn set(&mut self, pfn: PFN, attr: Self::Attr) {
  56. self.0 = (usize::from(pfn) << PA_SHIFT) as u64 | attr.0;
  57. }
  58. fn get(&self) -> (PFN, Self::Attr) {
  59. let pfn = PFN::from(self.0 as usize >> PA_SHIFT);
  60. let attr = PageAttribute64(self.0 & PA_FLAGS_MASK);
  61. (pfn, attr)
  62. }
  63. }
  64. impl PagingMode for PagingModeSv48 {
  65. type Entry = PTE64;
  66. type RawTable<'a> = RawPageTableSv48<'a>;
  67. const LEVELS: &'static [PageTableLevel] = &[
  68. PageTableLevel::new(39, 9),
  69. PageTableLevel::new(30, 9),
  70. PageTableLevel::new(21, 9),
  71. PageTableLevel::new(12, 9),
  72. ];
  73. }
  74. pub type ArchPagingMode = PagingModeSv48;
  75. impl<'a> RawPageTable<'a> for RawPageTableSv48<'a> {
  76. type Entry = PTE64;
  77. fn index(&self, index: u16) -> &'a Self::Entry {
  78. unsafe { self.0.add(index as usize).as_ref() }
  79. }
  80. fn index_mut(&mut self, index: u16) -> &'a mut Self::Entry {
  81. unsafe { self.0.add(index as usize).as_mut() }
  82. }
  83. unsafe fn from_ptr(ptr: NonNull<PageBlock>) -> Self {
  84. Self(ptr.cast(), PhantomData)
  85. }
  86. }
  87. impl RawAttribute for PageAttribute64 {
  88. fn null() -> Self {
  89. Self(0)
  90. }
  91. fn as_table_attr(self) -> Option<TableAttribute> {
  92. let mut table_attr = TableAttribute::empty();
  93. if self.0 & PA_V != 0 {
  94. table_attr |= TableAttribute::PRESENT;
  95. }
  96. if table_attr.contains(TableAttribute::PRESENT) && self.0 & (PA_R | PA_W | PA_X) != 0 {
  97. return None;
  98. }
  99. if self.0 & PA_G != 0 {
  100. table_attr |= TableAttribute::GLOBAL;
  101. }
  102. if self.0 & PA_U != 0 {
  103. table_attr |= TableAttribute::USER;
  104. }
  105. if self.0 & PA_A != 0 {
  106. table_attr |= TableAttribute::ACCESSED;
  107. }
  108. Some(table_attr)
  109. }
  110. fn as_page_attr(self) -> Option<PageAttribute> {
  111. let mut page_attr = PageAttribute::empty();
  112. if self.0 & PA_V != 0 {
  113. page_attr |= PageAttribute::PRESENT;
  114. }
  115. if page_attr.contains(PageAttribute::PRESENT) && (self.0 & (PA_R | PA_W | PA_X) == 0) {
  116. return None;
  117. }
  118. if self.0 & PA_R != 0 {
  119. page_attr |= PageAttribute::READ;
  120. }
  121. if self.0 & PA_W != 0 {
  122. page_attr |= PageAttribute::WRITE;
  123. }
  124. if self.0 & PA_X != 0 {
  125. page_attr |= PageAttribute::EXECUTE;
  126. }
  127. if self.0 & PA_U != 0 {
  128. page_attr |= PageAttribute::USER;
  129. }
  130. if self.0 & PA_A != 0 {
  131. page_attr |= PageAttribute::ACCESSED;
  132. }
  133. if self.0 & PA_D != 0 {
  134. page_attr |= PageAttribute::DIRTY;
  135. }
  136. if self.0 & PA_G != 0 {
  137. page_attr |= PageAttribute::GLOBAL;
  138. }
  139. if self.0 & PA_COW != 0 {
  140. page_attr |= PageAttribute::COPY_ON_WRITE;
  141. }
  142. if self.0 & PA_MMAP != 0 {
  143. page_attr |= PageAttribute::MAPPED;
  144. }
  145. /*if self.0 & PA_ANON != 0 {
  146. page_attr |= PageAttribute::ANONYMOUS;
  147. }*/
  148. Some(page_attr)
  149. }
  150. }
  151. impl From<PageAttribute> for PageAttribute64 {
  152. fn from(page_attr: PageAttribute) -> Self {
  153. let mut raw_attr = 0;
  154. for attr in page_attr.iter() {
  155. match attr {
  156. PageAttribute::PRESENT => raw_attr |= PA_V,
  157. PageAttribute::READ => raw_attr |= PA_R,
  158. PageAttribute::WRITE => raw_attr |= PA_W,
  159. PageAttribute::EXECUTE => raw_attr |= PA_X,
  160. PageAttribute::USER => raw_attr |= PA_U,
  161. PageAttribute::ACCESSED => raw_attr |= PA_A,
  162. PageAttribute::DIRTY => raw_attr |= PA_D,
  163. PageAttribute::GLOBAL => raw_attr |= PA_G,
  164. PageAttribute::COPY_ON_WRITE => raw_attr |= PA_COW,
  165. PageAttribute::MAPPED => raw_attr |= PA_MMAP,
  166. PageAttribute::ANONYMOUS => {}
  167. _ => unreachable!("Invalid page attribute"),
  168. }
  169. }
  170. Self(raw_attr)
  171. }
  172. }
  173. impl From<TableAttribute> for PageAttribute64 {
  174. fn from(table_attr: TableAttribute) -> Self {
  175. let mut raw_attr = 0;
  176. for attr in table_attr.iter() {
  177. match attr {
  178. TableAttribute::PRESENT => raw_attr |= PA_V,
  179. TableAttribute::GLOBAL => raw_attr |= PA_G,
  180. TableAttribute::USER | TableAttribute::ACCESSED => {}
  181. _ => unreachable!("Invalid table attribute"),
  182. }
  183. }
  184. Self(raw_attr)
  185. }
  186. }
  187. impl ArchPhysAccess {
  188. const PHYS_OFFSET: usize = PHYS_MAP_VIRT;
  189. }
  190. impl PhysAccess for ArchPhysAccess {
  191. unsafe fn as_ptr<T>(paddr: PAddr) -> NonNull<T> {
  192. let alignment: usize = align_of::<T>();
  193. assert!(paddr.addr() % alignment == 0, "Alignment error");
  194. unsafe {
  195. // SAFETY: We can assume that we'll never have `self.addr()` equals
  196. // to `-PHYS_OFFSET`. Otherwise, the kernel might be broken.
  197. NonNull::new_unchecked((Self::PHYS_OFFSET + paddr.addr()) as *mut T)
  198. }
  199. }
  200. unsafe fn from_ptr<T>(ptr: NonNull<T>) -> PAddr {
  201. let addr = ptr.addr().get();
  202. assert!(addr % align_of::<T>() == 0, "Alignment error");
  203. assert!(
  204. addr >= Self::PHYS_OFFSET,
  205. "Address is not a valid physical address"
  206. );
  207. PAddr::from_val(addr - Self::PHYS_OFFSET)
  208. }
  209. }
  210. impl Memory for ArchMemory {
  211. fn present_ram() -> impl Iterator<Item = PRange> {
  212. FDT.present_ram()
  213. }
  214. fn free_ram() -> impl Iterator<Item = PRange> {
  215. unsafe extern "C" {
  216. fn __kernel_start();
  217. fn __kernel_end();
  218. }
  219. let kernel_end = PAddr::from(__kernel_end as usize - KIMAGE_OFFSET);
  220. let paddr_after_kimage_aligned = kernel_end.ceil_to(0x200000);
  221. core::iter::once(PRange::new(kernel_end, paddr_after_kimage_aligned)).chain(
  222. Self::present_ram()
  223. .filter(move |range| range.end() > paddr_after_kimage_aligned)
  224. .map(move |range| {
  225. if range.start() < paddr_after_kimage_aligned {
  226. let (_, right) = range.split_at(paddr_after_kimage_aligned);
  227. right
  228. } else {
  229. range
  230. }
  231. }),
  232. )
  233. }
  234. }
  235. pub type DefaultPagingMode = PagingModeSv48;
  236. pub trait PresentRam: Iterator<Item = PRange> {}
  237. pub trait FreeRam: PresentRam {
  238. fn free_ram(self) -> impl Iterator<Item = PRange>;
  239. }
  240. impl<T> FreeRam for T
  241. where
  242. T: PresentRam,
  243. {
  244. fn free_ram(self) -> impl Iterator<Item = PRange> {
  245. unsafe extern "C" {
  246. fn __kernel_start();
  247. fn __kernel_end();
  248. }
  249. let kernel_end = PAddr::from(__kernel_end as usize - KIMAGE_OFFSET);
  250. let paddr_after_kimage_aligned = kernel_end.ceil_to(0x200000);
  251. core::iter::once(PRange::new(kernel_end, paddr_after_kimage_aligned)).chain(
  252. self.filter(move |range| range.end() > paddr_after_kimage_aligned)
  253. .map(move |range| {
  254. if range.start() < paddr_after_kimage_aligned {
  255. let (_, right) = range.split_at(paddr_after_kimage_aligned);
  256. right
  257. } else {
  258. range
  259. }
  260. }),
  261. )
  262. }
  263. }
  264. #[inline(always)]
  265. pub fn flush_tlb(vaddr: usize) {
  266. sfence_vma(0, vaddr);
  267. }
  268. #[inline(always)]
  269. pub fn flush_tlb_all() {
  270. sfence_vma_all();
  271. }
  272. #[inline(always)]
  273. pub fn get_root_page_table_pfn() -> PFN {
  274. let satp_val = satp::read();
  275. let ppn = satp_val.ppn();
  276. PFN::from(ppn)
  277. }
  278. #[inline(always)]
  279. pub fn set_root_page_table_pfn(pfn: PFN) {
  280. unsafe { satp::set(satp::Mode::Sv48, 0, usize::from(pfn)) };
  281. sfence_vma_all();
  282. }