use core::{ ops::Deref, sync::atomic::{AtomicPtr, Ordering}, }; use crate::{ prelude::*, sync::{lock::Guard, semaphore::RwSemaphoreStrategy}, }; use alloc::sync::Arc; use lazy_static::lazy_static; pub struct RCUReadGuard<'data, T: 'data> { value: T, guard: Guard<'data, (), RwSemaphoreStrategy, false>, _phantom: PhantomData<&'data T>, } lazy_static! { static ref GLOBAL_RCU_SEM: RwSemaphore<()> = RwSemaphore::new(()); } impl<'data, T: 'data> RCUReadGuard<'data, T> { fn lock(value: T) -> Self { Self { value, guard: GLOBAL_RCU_SEM.lock_shared(), _phantom: PhantomData, } } } impl<'data, T: 'data> Deref for RCUReadGuard<'data, T> { type Target = T; fn deref(&self) -> &Self::Target { &self.value } } fn rcu_sync() { GLOBAL_RCU_SEM.lock(); } pub trait RCUNode { fn rcu_prev(&self) -> &AtomicPtr; fn rcu_next(&self) -> &AtomicPtr; } pub struct RCUList> { head: AtomicPtr, reader_lock: RwSemaphore<()>, update_lock: Mutex<()>, } impl> RCUList { pub fn new() -> Self { Self { head: AtomicPtr::new(core::ptr::null_mut()), reader_lock: RwSemaphore::new(()), update_lock: Mutex::new(()), } } pub fn insert(&self, new_node: Arc) { let _lck = self.update_lock.lock(); let old_head = self.head.load(Ordering::Acquire); new_node .rcu_prev() .store(core::ptr::null_mut(), Ordering::Release); new_node.rcu_next().store(old_head, Ordering::Release); if let Some(old_head) = unsafe { old_head.as_ref() } { old_head .rcu_prev() .store(Arc::into_raw(new_node.clone()) as *mut _, Ordering::Release); } self.head .store(Arc::into_raw(new_node) as *mut _, Ordering::Release); } pub fn remove(&self, node: &Arc) { let _lck = self.update_lock.lock(); let prev = node.rcu_prev().load(Ordering::Acquire); let next = node.rcu_next().load(Ordering::Acquire); if let Some(next) = unsafe { next.as_ref() } { let me = next.rcu_prev().swap(prev, Ordering::AcqRel); debug_assert!(me == Arc::as_ptr(&node) as *mut _); unsafe { Arc::from_raw(me) }; } { let prev_next = unsafe { prev.as_ref().map(|rcu| rcu.rcu_next()) }.unwrap_or(&self.head); let me = prev_next.swap(next, Ordering::AcqRel); debug_assert!(me == Arc::as_ptr(&node) as *mut _); unsafe { Arc::from_raw(me) }; } let _lck = self.reader_lock.lock(); node.rcu_prev() .store(core::ptr::null_mut(), Ordering::Release); node.rcu_next() .store(core::ptr::null_mut(), Ordering::Release); } pub fn replace(&self, old_node: &Arc, new_node: Arc) { let _lck = self.update_lock.lock(); let prev = old_node.rcu_prev().load(Ordering::Acquire); let next = old_node.rcu_next().load(Ordering::Acquire); new_node.rcu_prev().store(prev, Ordering::Release); new_node.rcu_next().store(next, Ordering::Release); { let prev_next = unsafe { prev.as_ref().map(|rcu| rcu.rcu_next()) }.unwrap_or(&self.head); let old = prev_next.swap(Arc::into_raw(new_node.clone()) as *mut _, Ordering::AcqRel); debug_assert!(old == Arc::as_ptr(&old_node) as *mut _); unsafe { Arc::from_raw(old) }; } if let Some(next) = unsafe { next.as_ref() } { let old = next .rcu_prev() .swap(Arc::into_raw(new_node.clone()) as *mut _, Ordering::AcqRel); debug_assert!(old == Arc::as_ptr(&old_node) as *mut _); unsafe { Arc::from_raw(old) }; } let _lck = self.reader_lock.lock(); old_node .rcu_prev() .store(core::ptr::null_mut(), Ordering::Release); old_node .rcu_next() .store(core::ptr::null_mut(), Ordering::Release); } pub fn iter(&self) -> RCUIterator { let _lck = self.reader_lock.lock_shared(); RCUIterator { // SAFETY: We have a read lock, so the node is still alive. cur: self.head.load(Ordering::SeqCst), _lock: _lck, } } } pub struct RCUIterator<'lt, T: RCUNode> { cur: *const T, _lock: Guard<'lt, (), RwSemaphoreStrategy, false>, } impl<'lt, T: RCUNode> Iterator for RCUIterator<'lt, T> { type Item = BorrowedArc<'lt, T>; fn next(&mut self) -> Option { match unsafe { self.cur.as_ref() } { None => None, Some(real) => { // SAFETY: We have a read lock, so the node is still alive. let ret = self.cur; self.cur = real.rcu_next().load(Ordering::SeqCst); Some(BorrowedArc::from_raw(ret)) } } } } pub struct RCUPointer(AtomicPtr); impl RCUPointer { pub fn new_with(value: Arc) -> Self { Self(AtomicPtr::new(Arc::into_raw(value) as *mut _)) } pub fn empty() -> Self { Self(AtomicPtr::new(core::ptr::null_mut())) } pub fn load<'lt>(&self) -> Option>> { let ptr = self.0.load(Ordering::Acquire); if ptr.is_null() { None } else { Some(RCUReadGuard::lock(BorrowedArc::from_raw(ptr))) } } /// # Safety /// Caller must ensure that the pointer is freed after all readers are done. pub unsafe fn swap(&self, new: Option>) -> Option> { let new = new .map(|arc| Arc::into_raw(arc) as *mut T) .unwrap_or(core::ptr::null_mut()); let old = self.0.swap(new, Ordering::AcqRel); if old.is_null() { None } else { rcu_sync(); Some(unsafe { Arc::from_raw(old) }) } } }