mm.rs 5.2 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206
  1. use core::alloc::{AllocError, Allocator, Layout};
  2. use core::cell::RefCell;
  3. use core::ptr::NonNull;
  4. use eonix_mm::address::{AddrOps as _, PRange};
  5. use eonix_mm::page_table::PageTableAlloc;
  6. use eonix_mm::paging::{BasicFolio, FrameAlloc, PAGE_SIZE, PFN};
  7. pub use crate::arch::mm::{
  8. flush_tlb, flush_tlb_all, get_root_page_table_pfn, set_root_page_table_pfn,
  9. ArchMemory, ArchPhysAccess, GLOBAL_PAGE_TABLE,
  10. };
  11. pub struct BasicPageAlloc {
  12. ranges: [Option<PRange>; Self::MAX],
  13. head: usize,
  14. tail: usize,
  15. }
  16. struct ScopedAllocInner<'a> {
  17. _memory: &'a mut [u8],
  18. current: NonNull<[u8]>,
  19. allocated_count: usize,
  20. }
  21. pub struct ScopedAllocator<'a> {
  22. inner: RefCell<ScopedAllocInner<'a>>,
  23. }
  24. impl BasicPageAlloc {
  25. const MAX: usize = 32;
  26. pub const fn new() -> Self {
  27. Self {
  28. ranges: [None; Self::MAX],
  29. head: 0,
  30. tail: 0,
  31. }
  32. }
  33. fn alloc_one(&mut self) -> PFN {
  34. assert_ne!(self.head, self.tail, "No free pages available");
  35. let mut range = self.ranges[self.head].take().unwrap();
  36. let pfn = PFN::from(range.start());
  37. range = PRange::new(range.start() + PAGE_SIZE, range.end());
  38. if range.len() != 0 {
  39. self.ranges[self.head] = Some(range);
  40. } else {
  41. self.head += 1;
  42. self.head %= Self::MAX;
  43. }
  44. pfn
  45. }
  46. fn alloc_order(&mut self, order: u32) -> PFN {
  47. assert!(order <= 4);
  48. let me = core::mem::replace(self, Self::new());
  49. let mut found = None;
  50. for mut range in me.into_iter() {
  51. if found.is_some() || range.len() < (PAGE_SIZE << order) {
  52. self.add_range(range);
  53. continue;
  54. }
  55. range = range.shrink(PAGE_SIZE << order);
  56. found = Some(PFN::from(range.end()));
  57. if range.len() != 0 {
  58. self.add_range(range);
  59. }
  60. }
  61. found.expect("No free pages available for the requested order")
  62. }
  63. pub fn add_range(&mut self, range: PRange) {
  64. let tail = self.tail;
  65. self.tail += 1;
  66. self.tail %= Self::MAX;
  67. if self.tail == self.head {
  68. panic!("Page allocator is full");
  69. }
  70. self.ranges[tail] =
  71. Some(PRange::new(range.start().ceil(), range.end().floor()));
  72. }
  73. pub fn alloc(&mut self, order: u32) -> PFN {
  74. match order {
  75. 0 => self.alloc_one(),
  76. ..=4 => self.alloc_order(order),
  77. _ => panic!("Order {} is too large for BasicPageAlloc", order),
  78. }
  79. }
  80. pub fn into_iter(self) -> impl Iterator<Item = PRange> {
  81. self.ranges
  82. .into_iter()
  83. .cycle()
  84. .skip(self.head)
  85. .map_while(|x| x)
  86. }
  87. }
  88. #[derive(Clone)]
  89. pub struct BasicPageAllocRef<'a>(&'a RefCell<BasicPageAlloc>);
  90. impl<'a> BasicPageAllocRef<'a> {
  91. pub const fn new(alloc: &'a RefCell<BasicPageAlloc>) -> Self {
  92. Self(alloc)
  93. }
  94. }
  95. impl FrameAlloc for BasicPageAllocRef<'_> {
  96. type Folio = BasicFolio;
  97. fn alloc_order(&self, order: u32) -> Option<Self::Folio> {
  98. Some(BasicFolio::new(self.0.borrow_mut().alloc(order), order))
  99. }
  100. }
  101. impl PageTableAlloc for BasicPageAllocRef<'_> {
  102. type Folio = BasicFolio;
  103. fn alloc(&self) -> Self::Folio {
  104. FrameAlloc::alloc(self).unwrap()
  105. }
  106. unsafe fn from_raw(&self, pfn: PFN) -> Self::Folio {
  107. BasicFolio::new(pfn, 0)
  108. }
  109. }
  110. impl<'a> ScopedAllocator<'a> {
  111. pub fn new(memory: &'a mut [u8]) -> Self {
  112. ScopedAllocator {
  113. inner: RefCell::new(ScopedAllocInner {
  114. current: NonNull::new(memory).unwrap(),
  115. _memory: memory,
  116. allocated_count: 0,
  117. }),
  118. }
  119. }
  120. pub fn with_alloc<'b, 'r, O>(
  121. &'r self,
  122. func: impl FnOnce(&'b ScopedAllocator<'a>) -> O,
  123. ) -> O
  124. where
  125. 'a: 'b,
  126. 'r: 'b,
  127. {
  128. func(self)
  129. }
  130. }
  131. unsafe impl Allocator for &ScopedAllocator<'_> {
  132. fn allocate(&self, layout: Layout) -> Result<NonNull<[u8]>, AllocError> {
  133. let mut inner = self.inner.borrow_mut();
  134. let memory = &mut inner.current;
  135. let addr: NonNull<u8> = memory.cast();
  136. let offset = addr.align_offset(layout.align());
  137. if offset + layout.size() > memory.len() {
  138. return Err(AllocError);
  139. }
  140. let allocated = unsafe {
  141. // SAFETY: `addr + offset` won't overflow.
  142. NonNull::slice_from_raw_parts(addr.add(offset), layout.size())
  143. };
  144. unsafe {
  145. // SAFETY: `allocated + layout.size()` won't overflow.
  146. *memory = NonNull::slice_from_raw_parts(
  147. allocated.cast::<u8>().add(layout.size()),
  148. memory.len() - offset - layout.size(),
  149. );
  150. }
  151. inner.allocated_count += 1;
  152. Ok(allocated)
  153. }
  154. unsafe fn deallocate(&self, _: NonNull<u8>, _: Layout) {
  155. self.inner.borrow_mut().allocated_count -= 1;
  156. }
  157. }
  158. impl Drop for ScopedAllocator<'_> {
  159. fn drop(&mut self) {
  160. let inner = self.inner.borrow();
  161. if inner.allocated_count > 0 {
  162. panic!(
  163. "Memory leak detected: {} allocations not deallocated",
  164. inner.allocated_count
  165. );
  166. }
  167. }
  168. }