summaryrefslogtreecommitdiff
path: root/src
diff options
context:
space:
mode:
Diffstat (limited to 'src')
-rw-r--r--src/sync.rs56
1 files changed, 56 insertions, 0 deletions
diff --git a/src/sync.rs b/src/sync.rs
index 0e0ace2..38cffc8 100644
--- a/src/sync.rs
+++ b/src/sync.rs
@@ -81,3 +81,59 @@ impl<T> interface::Mutex for NullLock<T> {
f(data)
}
}
+
+use core::sync::atomic::{AtomicBool, Ordering};
+
+pub struct SpinLock<T>
+where
+ T: Sized,
+{
+ data: UnsafeCell<T>,
+ lock: AtomicBool,
+}
+
+unsafe impl<T> Send for SpinLock<T> where T: Sized + Send {}
+unsafe impl<T> Sync for SpinLock<T> where T: Sized + Send {}
+
+impl<T> SpinLock<T> {
+ /// # New lock
+ pub const fn new(data: T) -> Self {
+ Self {
+ data: UnsafeCell::new(data),
+ lock: AtomicBool::new(false),
+ }
+ }
+}
+
+impl<T: interface::Initializable> SpinLock<T> {
+ /// # Init
+ pub fn init(&self) {
+ use interface::Mutex;
+ self.lock(|init| {
+ init.init();
+ });
+ }
+}
+
+impl<T> interface::Mutex for SpinLock<T> {
+ /// # Data type
+ type Data = T;
+ /// # Lock
+ fn lock<'a, R>(&'a self, f: impl FnOnce(&'a mut T) -> R) -> R {
+ loop {
+ // Loop until acquired the lock
+ if let Ok(false) =
+ self.lock
+ .compare_exchange(false, true, Ordering::Acquire, Ordering::Relaxed)
+ {
+ break;
+ }
+ }
+ let data = unsafe { &mut *self.data.get() };
+
+ let res = f(data);
+ // Release the lock after finished with the underlying data
+ self.lock.store(false, Ordering::Release);
+ res
+ }
+}