|
|
@@ -1,154 +1,193 @@
|
|
|
-use crate::{LockStrategy, WaitStrategy};
|
|
|
+mod guard;
|
|
|
+mod wait;
|
|
|
+
|
|
|
use core::{
|
|
|
- marker::PhantomData,
|
|
|
+ cell::UnsafeCell,
|
|
|
sync::atomic::{AtomicIsize, Ordering},
|
|
|
};
|
|
|
|
|
|
-pub struct RwLockStrategy<W>(PhantomData<W>)
|
|
|
-where
|
|
|
- W: WaitStrategy;
|
|
|
+pub use guard::{RwLockReadGuard, RwLockWriteGuard};
|
|
|
+pub use wait::Wait;
|
|
|
|
|
|
-pub struct RwLockData<W>
|
|
|
+#[derive(Debug, Default)]
|
|
|
+pub struct RwLock<T, W>
|
|
|
where
|
|
|
- W: WaitStrategy,
|
|
|
+ T: ?Sized,
|
|
|
+ W: Wait,
|
|
|
{
|
|
|
counter: AtomicIsize,
|
|
|
- wait_data: W::Data,
|
|
|
+ wait: W,
|
|
|
+ value: UnsafeCell<T>,
|
|
|
}
|
|
|
|
|
|
-impl<W> RwLockStrategy<W>
|
|
|
+impl<T, W> RwLock<T, W>
|
|
|
where
|
|
|
- W: WaitStrategy,
|
|
|
+ W: Wait,
|
|
|
{
|
|
|
- #[cold]
|
|
|
- fn lock_slow_path(
|
|
|
- data: &<Self as LockStrategy>::StrategyData,
|
|
|
- ) -> <Self as LockStrategy>::GuardContext {
|
|
|
- loop {
|
|
|
- if let Ok(_) =
|
|
|
- data.counter
|
|
|
- .compare_exchange_weak(0, -1, Ordering::Acquire, Ordering::Relaxed)
|
|
|
- {
|
|
|
- return ();
|
|
|
- }
|
|
|
-
|
|
|
- W::write_wait(&data.wait_data, || {
|
|
|
- data.counter.load(Ordering::Relaxed) == 0
|
|
|
- });
|
|
|
- }
|
|
|
- }
|
|
|
-
|
|
|
- #[cold]
|
|
|
- fn lock_shared_slow_path(
|
|
|
- data: &<Self as LockStrategy>::StrategyData,
|
|
|
- ) -> <Self as LockStrategy>::GuardContext {
|
|
|
- loop {
|
|
|
- let mut counter = data.counter.load(Ordering::Relaxed);
|
|
|
- while counter >= 0 {
|
|
|
- match data.counter.compare_exchange_weak(
|
|
|
- counter,
|
|
|
- counter + 1,
|
|
|
- Ordering::Acquire,
|
|
|
- Ordering::Relaxed,
|
|
|
- ) {
|
|
|
- Ok(_) => return (),
|
|
|
- Err(previous) => counter = previous,
|
|
|
- }
|
|
|
- }
|
|
|
-
|
|
|
- W::read_wait(&data.wait_data, || {
|
|
|
- data.counter.load(Ordering::Relaxed) >= 0
|
|
|
- });
|
|
|
+ pub const fn new(value: T, wait: W) -> Self {
|
|
|
+ Self {
|
|
|
+ counter: AtomicIsize::new(0),
|
|
|
+ wait,
|
|
|
+ value: UnsafeCell::new(value),
|
|
|
}
|
|
|
}
|
|
|
}
|
|
|
|
|
|
-unsafe impl<W> LockStrategy for RwLockStrategy<W>
|
|
|
+impl<T, W> RwLock<T, W>
|
|
|
where
|
|
|
- W: WaitStrategy,
|
|
|
+ T: ?Sized,
|
|
|
+ W: Wait,
|
|
|
{
|
|
|
- type StrategyData = RwLockData<W>;
|
|
|
- type GuardContext = ();
|
|
|
-
|
|
|
- fn new_data() -> Self::StrategyData {
|
|
|
- Self::StrategyData {
|
|
|
- counter: AtomicIsize::new(0),
|
|
|
- wait_data: W::new_data(),
|
|
|
+ /// # Safety
|
|
|
+ /// This function is unsafe because the caller MUST ensure that we've got the
|
|
|
+ /// write access before calling this function.
|
|
|
+ unsafe fn write_lock(&self) -> RwLockWriteGuard<'_, T, W> {
|
|
|
+ RwLockWriteGuard {
|
|
|
+ lock: self,
|
|
|
+ // SAFETY: We are holding the write lock, so we can safely access the value.
|
|
|
+ value: unsafe { &mut *self.value.get() },
|
|
|
}
|
|
|
}
|
|
|
|
|
|
- unsafe fn is_locked(data: &Self::StrategyData) -> bool {
|
|
|
- data.counter.load(Ordering::Relaxed) == 1
|
|
|
+ /// # Safety
|
|
|
+ /// This function is unsafe because the caller MUST ensure that we've got the
|
|
|
+ /// read access before calling this function.
|
|
|
+ unsafe fn read_lock(&self) -> RwLockReadGuard<'_, T, W> {
|
|
|
+ RwLockReadGuard {
|
|
|
+ lock: self,
|
|
|
+ // SAFETY: We are holding the read lock, so we can safely access the value.
|
|
|
+ value: unsafe { &*self.value.get() },
|
|
|
+ }
|
|
|
}
|
|
|
|
|
|
- unsafe fn try_lock(data: &Self::StrategyData) -> Option<Self::GuardContext> {
|
|
|
- data.counter
|
|
|
+ pub fn try_write(&self) -> Option<RwLockWriteGuard<'_, T, W>> {
|
|
|
+ self.counter
|
|
|
.compare_exchange(0, -1, Ordering::Acquire, Ordering::Relaxed)
|
|
|
- .map(|_| ())
|
|
|
.ok()
|
|
|
+ .map(|_| unsafe { self.write_lock() })
|
|
|
+ }
|
|
|
+
|
|
|
+ fn try_write_weak(&self) -> Option<RwLockWriteGuard<'_, T, W>> {
|
|
|
+ self.counter
|
|
|
+ .compare_exchange_weak(0, -1, Ordering::Acquire, Ordering::Relaxed)
|
|
|
+ .ok()
|
|
|
+ .map(|_| unsafe { self.write_lock() })
|
|
|
}
|
|
|
|
|
|
- unsafe fn try_lock_shared(data: &Self::StrategyData) -> Option<Self::GuardContext>
|
|
|
- where
|
|
|
- Self: Sized,
|
|
|
- {
|
|
|
- if W::has_write_waiting(&data.wait_data) {
|
|
|
+ pub fn try_read(&self) -> Option<RwLockReadGuard<'_, T, W>> {
|
|
|
+ if self.wait.has_write_waiting() {
|
|
|
return None;
|
|
|
}
|
|
|
|
|
|
- let counter = data.counter.load(Ordering::Relaxed);
|
|
|
- match counter {
|
|
|
- 0.. => data
|
|
|
- .counter
|
|
|
+ let counter = self.counter.load(Ordering::Relaxed);
|
|
|
+ if counter >= 0 {
|
|
|
+ self.counter
|
|
|
.compare_exchange(counter, counter + 1, Ordering::Acquire, Ordering::Relaxed)
|
|
|
.ok()
|
|
|
- .map(|_| ()),
|
|
|
- _ => None,
|
|
|
+ .map(|_| unsafe { self.read_lock() })
|
|
|
+ } else {
|
|
|
+ None
|
|
|
}
|
|
|
}
|
|
|
|
|
|
- unsafe fn do_lock(data: &Self::StrategyData) -> Self::GuardContext {
|
|
|
- if let Some(context) = unsafe { Self::try_lock(data) } {
|
|
|
+ fn try_read_weak(&self) -> Option<RwLockReadGuard<'_, T, W>> {
|
|
|
+ if self.wait.has_write_waiting() {
|
|
|
+ return None;
|
|
|
+ }
|
|
|
+
|
|
|
+ let counter = self.counter.load(Ordering::Relaxed);
|
|
|
+ if counter >= 0 {
|
|
|
+ self.counter
|
|
|
+ .compare_exchange_weak(counter, counter + 1, Ordering::Acquire, Ordering::Relaxed)
|
|
|
+ .ok()
|
|
|
+ .map(|_| unsafe { self.read_lock() })
|
|
|
+ } else {
|
|
|
+ None
|
|
|
+ }
|
|
|
+ }
|
|
|
+
|
|
|
+ #[cold]
|
|
|
+ fn write_slow_path(&self) -> RwLockWriteGuard<'_, T, W> {
|
|
|
+ loop {
|
|
|
+ if let Some(guard) = self.try_write_weak() {
|
|
|
+ return guard;
|
|
|
+ }
|
|
|
+
|
|
|
+ self.wait
|
|
|
+ .write_wait(|| self.counter.load(Ordering::Relaxed) == 0);
|
|
|
+ }
|
|
|
+ }
|
|
|
+
|
|
|
+ #[cold]
|
|
|
+ fn read_slow_path(&self) -> RwLockReadGuard<'_, T, W> {
|
|
|
+ loop {
|
|
|
+ // TODO: can we use `try_read_weak` here?
|
|
|
+ let mut counter = self.counter.load(Ordering::Relaxed);
|
|
|
+ while counter >= 0 {
|
|
|
+ match self.counter.compare_exchange_weak(
|
|
|
+ counter,
|
|
|
+ counter + 1,
|
|
|
+ Ordering::Acquire,
|
|
|
+ Ordering::Relaxed,
|
|
|
+ ) {
|
|
|
+ Ok(_) => return unsafe { self.read_lock() },
|
|
|
+ Err(previous) => counter = previous,
|
|
|
+ }
|
|
|
+ }
|
|
|
+
|
|
|
+ self.wait
|
|
|
+ .read_wait(|| self.counter.load(Ordering::Relaxed) >= 0);
|
|
|
+ }
|
|
|
+ }
|
|
|
+
|
|
|
+ pub fn write(&self) -> RwLockWriteGuard<'_, T, W> {
|
|
|
+ if let Some(guard) = self.try_write() {
|
|
|
// Quick path
|
|
|
- context
|
|
|
+ guard
|
|
|
} else {
|
|
|
- Self::lock_slow_path(data)
|
|
|
+ self.write_slow_path()
|
|
|
}
|
|
|
}
|
|
|
|
|
|
- unsafe fn do_lock_shared(data: &Self::StrategyData) -> Self::GuardContext {
|
|
|
- if let Some(context) = unsafe { Self::try_lock_shared(data) } {
|
|
|
+ pub fn read(&self) -> RwLockReadGuard<'_, T, W> {
|
|
|
+ if let Some(guard) = self.try_read() {
|
|
|
// Quick path
|
|
|
- context
|
|
|
+ guard
|
|
|
} else {
|
|
|
- Self::lock_shared_slow_path(data)
|
|
|
+ self.read_slow_path()
|
|
|
}
|
|
|
}
|
|
|
|
|
|
- unsafe fn do_unlock(data: &Self::StrategyData, _: &mut Self::GuardContext)
|
|
|
- where
|
|
|
- Self: Sized,
|
|
|
- {
|
|
|
- let old = data.counter.fetch_add(1, Ordering::Release);
|
|
|
- assert_eq!(
|
|
|
- old, -1,
|
|
|
- "RwLockStrategy::do_unlock: erroneous counter value: {}",
|
|
|
- old
|
|
|
- );
|
|
|
- W::write_notify(&data.wait_data);
|
|
|
+ pub fn get_mut(&mut self) -> &mut T {
|
|
|
+ // SAFETY: The exclusive access to the lock is guaranteed by the borrow checker.
|
|
|
+ unsafe { &mut *self.value.get() }
|
|
|
}
|
|
|
+}
|
|
|
|
|
|
- unsafe fn do_unlock_shared(data: &Self::StrategyData, _: &mut Self::GuardContext)
|
|
|
- where
|
|
|
- Self: Sized,
|
|
|
- {
|
|
|
- match data.counter.fetch_sub(1, Ordering::Release) {
|
|
|
- 2.. => {}
|
|
|
- 1 => W::read_notify(&data.wait_data),
|
|
|
- val => unreachable!(
|
|
|
- "RwLockStrategy::do_unlock_shared: erroneous counter value: {}",
|
|
|
- val
|
|
|
- ),
|
|
|
- }
|
|
|
+impl<T, W> Clone for RwLock<T, W>
|
|
|
+where
|
|
|
+ T: ?Sized + Clone,
|
|
|
+ W: Wait,
|
|
|
+{
|
|
|
+ fn clone(&self) -> Self {
|
|
|
+ Self::new(self.read().clone(), W::new())
|
|
|
}
|
|
|
}
|
|
|
+
|
|
|
+// SAFETY: As long as the value protected by the lock is able to be shared between threads,
|
|
|
+// we can send the lock between threads.
|
|
|
+unsafe impl<T, W> Send for RwLock<T, W>
|
|
|
+where
|
|
|
+ T: ?Sized + Send,
|
|
|
+ W: Wait,
|
|
|
+{
|
|
|
+}
|
|
|
+
|
|
|
+// SAFETY: `RwLock` can provide shared access to the value it protects, so it is safe to
|
|
|
+// implement `Sync` for it. However, this is only true if the value itself is `Sync`.
|
|
|
+unsafe impl<T, W> Sync for RwLock<T, W>
|
|
|
+where
|
|
|
+ T: ?Sized + Send + Sync,
|
|
|
+ W: Wait,
|
|
|
+{
|
|
|
+}
|