Преглед изворни кода

style: remove `Clone` constraint in `SlabAllocator`.

greatbridf пре 8 месеци
родитељ
комит
400662db4e
2 измењених фајлова са 14 додато и 13 уклоњено
  1. 6 6
      crates/slab_allocator/src/lib.rs
  2. 8 7
      crates/slab_allocator/src/slab_cache.rs

+ 6 - 6
crates/slab_allocator/src/lib.rs

@@ -37,6 +37,7 @@ pub trait SlabRawPage: RawPage {
 
 pub struct SlabAllocator<T, A, const SLAB_CACHE_COUNT: usize> {
     slabs: [Spin<SlabCache<T, A>>; SLAB_CACHE_COUNT],
+    alloc: A,
 }
 
 unsafe impl<T, A, const SLAB_CACHE_COUNT: usize> Send for SlabAllocator<T, A, SLAB_CACHE_COUNT> {}
@@ -45,25 +46,24 @@ unsafe impl<T, A, const SLAB_CACHE_COUNT: usize> Sync for SlabAllocator<T, A, SL
 impl<Raw, Allocator, const SLAB_CACHE_COUNT: usize> SlabAllocator<Raw, Allocator, SLAB_CACHE_COUNT>
 where
     Raw: SlabRawPage,
-    Allocator: PageAlloc<RawPage = Raw> + Clone,
+    Allocator: PageAlloc<RawPage = Raw>,
 {
     pub fn new_in(alloc: Allocator) -> Self {
         Self {
-            slabs: core::array::from_fn(|i| {
-                Spin::new(SlabCache::new_in(1 << (i + 3), alloc.clone()))
-            }),
+            slabs: core::array::from_fn(|i| Spin::new(SlabCache::new_in(1 << (i + 3)))),
+            alloc,
         }
     }
 
     pub fn alloc(&self, mut size: usize) -> *mut u8 {
         size = max(8, size);
         let idx = size.next_power_of_two().trailing_zeros() - 3;
-        self.slabs[idx as usize].lock().alloc()
+        self.slabs[idx as usize].lock().alloc(&self.alloc)
     }
 
     pub fn dealloc(&self, ptr: *mut u8, mut size: usize) {
         size = max(8, size);
         let idx = size.next_power_of_two().trailing_zeros() - 3;
-        self.slabs[idx as usize].lock().dealloc(ptr);
+        self.slabs[idx as usize].lock().dealloc(ptr, &self.alloc);
     }
 }

+ 8 - 7
crates/slab_allocator/src/slab_cache.rs

@@ -8,8 +8,7 @@ pub(crate) struct SlabCache<T, A> {
     partial_list: List,
     full_list: List,
     object_size: u32,
-    alloc: A,
-    _phantom: PhantomData<T>,
+    _phantom: PhantomData<(T, A)>,
 }
 
 trait SlabRawPageExt {
@@ -89,7 +88,7 @@ where
     Raw: SlabRawPage,
     Allocator: PageAlloc<RawPage = Raw>,
 {
-    pub(crate) const fn new_in(object_size: u32, alloc: Allocator) -> Self {
+    pub(crate) const fn new_in(object_size: u32) -> Self {
         // avoid uncessary branch in alloc and dealloc
         assert!(object_size <= PAGE_SIZE as u32 / 2);
 
@@ -97,13 +96,12 @@ where
             empty_list: List::new(),
             partial_list: List::new(),
             full_list: List::new(),
-            alloc,
             object_size: object_size,
             _phantom: PhantomData,
         }
     }
 
-    pub(crate) fn alloc(&mut self) -> *mut u8 {
+    pub(crate) fn alloc(&mut self, alloc: &Allocator) -> *mut u8 {
         if !self.partial_list.is_empty() {
             let page_ptr = unsafe {
                 Raw::from_link(
@@ -137,7 +135,7 @@ where
             return ptr.as_ptr() as *mut u8;
         }
 
-        let new_page_ptr = self.alloc.alloc().expect("slab_cache get page fail!");
+        let new_page_ptr = alloc.alloc().expect("slab_cache get page fail!");
         let first_free = new_page_ptr.slab_page_init(self.object_size);
         new_page_ptr.slab_init(first_free);
         let ptr = new_page_ptr.alloc_slot().expect("should get slot");
@@ -145,7 +143,7 @@ where
         ptr.as_ptr() as *mut u8
     }
 
-    pub(crate) fn dealloc(&mut self, ptr: *mut u8) {
+    pub(crate) fn dealloc(&mut self, ptr: *mut u8, _alloc: &Allocator) {
         let page_ptr = Raw::in_which(ptr);
 
         if page_ptr.is_full() {
@@ -159,5 +157,8 @@ where
             self.partial_list.remove(unsafe { page_ptr.get_link() });
             self.empty_list.insert(unsafe { page_ptr.get_link() });
         }
+
+        // TODO: Check whether we should place some pages back with `alloc` if the global
+        //       free page count is below the watermark.
     }
 }