use core::{ cell::UnsafeCell, marker::PhantomData, mem, ops::{Deref, DerefMut}, sync::atomic::{AtomicBool, Ordering}, }; use crate::{guard::IrqGuard, Architecture}; pub struct Spinlock { value: UnsafeCell, state: AtomicBool, _pd: PhantomData, } pub struct SpinlockGuard<'a, A: Architecture, T> { lock: &'a Spinlock, } /// Spinlock implementation which prevents interrupts to avoid deadlocks when an interrupt handler /// tries to acquire a lock taken before the IRQ fired. pub struct IrqSafeSpinlock { inner: Spinlock, } /// Token type allowing safe access to the underlying data of the [IrqSafeSpinlock]. Resumes normal /// IRQ operation (if enabled before acquiring) when the lifetime is over. pub struct IrqSafeSpinlockGuard<'a, A: Architecture, T> { // Must come first to ensure the lock is dropped first and only then IRQs are re-enabled inner: SpinlockGuard<'a, A, T>, _irq: IrqGuard, } // Spinlock impls impl Spinlock { pub const fn new(value: T) -> Self { Self { value: UnsafeCell::new(value), state: AtomicBool::new(false), _pd: PhantomData, } } pub fn lock(&self) -> SpinlockGuard { // Loop until the lock can be acquired // if LOCK_HACK.load(Ordering::Acquire) { // return SpinlockInnerGuard { lock: self }; // } while self .state .compare_exchange(false, true, Ordering::Acquire, Ordering::Relaxed) .is_err() { core::hint::spin_loop(); } SpinlockGuard { lock: self } } } impl Deref for SpinlockGuard<'_, A, T> { type Target = T; fn deref(&self) -> &Self::Target { unsafe { &*self.lock.value.get() } } } impl DerefMut for SpinlockGuard<'_, A, T> { fn deref_mut(&mut self) -> &mut Self::Target { unsafe { &mut *self.lock.value.get() } } } impl Drop for SpinlockGuard<'_, A, T> { fn drop(&mut self) { // if !LOCK_HACK.load(Ordering::Acquire) { self.lock .state .compare_exchange(true, false, Ordering::Release, Ordering::Relaxed) .unwrap(); // } } } unsafe impl Sync for Spinlock {} unsafe impl Send for Spinlock {} // IrqSafeSpinlock impls impl IrqSafeSpinlock { /// Wraps the value in a spinlock primitive pub const fn new(value: T) -> Self { Self { inner: Spinlock::new(value), } } #[inline] pub fn replace(&self, value: T) -> T { let mut lock = self.lock(); mem::replace(&mut lock, value) } /// Attempts to acquire a lock. IRQs will be disabled until the lock is released. pub fn lock(&self) -> IrqSafeSpinlockGuard { // Disable IRQs to avoid IRQ handler trying to acquire the same lock let irq_guard = IrqGuard::acquire(); // Acquire the inner lock let inner = self.inner.lock(); IrqSafeSpinlockGuard { inner, _irq: irq_guard, } } /// Returns an unsafe reference to the inner value. /// /// # Safety /// /// Unsafe: explicitly ignores proper access sharing. #[allow(clippy::mut_from_ref)] pub unsafe fn grab(&self) -> &mut T { unsafe { &mut *self.inner.value.get() } } } impl IrqSafeSpinlock { pub fn get_cloned(&self) -> T { self.lock().clone() } } impl Clone for IrqSafeSpinlock { fn clone(&self) -> Self { let inner = self.lock(); IrqSafeSpinlock::new(inner.clone()) } } impl Deref for IrqSafeSpinlockGuard<'_, A, T> { type Target = T; fn deref(&self) -> &Self::Target { self.inner.deref() } } impl DerefMut for IrqSafeSpinlockGuard<'_, A, T> { fn deref_mut(&mut self) -> &mut Self::Target { self.inner.deref_mut() } } /// Helper macro to implement "split" locks. This may be needed when a very specific storage /// layout for the locked type is required. pub macro split_spinlock( $(use $use:path;)* $(#[$meta:meta])* static $name:ident<$lock:ident: $arch:ty>: $ty:ty = $init:expr; ) { pub use $name::$name; #[allow(non_snake_case)] pub mod $name { $(use $use;)* use core::cell::UnsafeCell; use core::marker::PhantomData; use core::sync::atomic::{AtomicBool, Ordering}; #[repr(transparent)] pub struct __Wrapper(UnsafeCell<$ty>); $(#[$meta])* pub static $name: __Wrapper = __Wrapper(UnsafeCell::new($init)); static __LOCK: AtomicBool = AtomicBool::new(false); pub struct __Guard($crate::guard::IrqGuard<$arch>); impl __Wrapper { pub fn $lock(&self) -> __Guard { let irq = $crate::guard::IrqGuard::acquire(); while __LOCK.compare_exchange(false, true, Ordering::Acquire, Ordering::Relaxed).is_err() { core::hint::spin_loop(); } __Guard(irq) } } unsafe impl Sync for __Wrapper {} impl core::ops::Deref for __Guard { type Target = $ty; fn deref(&self) -> &Self::Target { unsafe { &*$name.0.get() } } } impl core::ops::DerefMut for __Guard { fn deref_mut(&mut self) -> &mut Self::Target { unsafe { &mut *$name.0.get() } } } impl Drop for __Guard { fn drop(&mut self) { __LOCK.store(false, Ordering::Release) } } } }