tracing_mutex/
lockapi.rs

1//! Wrapper implementations for [`lock_api`].
2//!
3//! This module does not provide any particular mutex implementation by itself, but rather can be
4//! used to add dependency tracking to mutexes that already exist. It implements all of the traits
5//! in `lock_api` based on the one it wraps. Crates such as `spin` and `parking_lot` provide base
6//! primitives that can be wrapped.
7//!
8//! Wrapped mutexes are at least one `usize` larger than the types they wrapped, and must be aligned
9//! to `usize` boundaries. As such, libraries with many mutexes may want to consider the additional
10//! required memory.
11pub use lock_api as raw;
12use lock_api::GuardNoSend;
13use lock_api::RawMutex;
14use lock_api::RawMutexFair;
15use lock_api::RawMutexTimed;
16use lock_api::RawRwLock;
17use lock_api::RawRwLockDowngrade;
18use lock_api::RawRwLockFair;
19use lock_api::RawRwLockRecursive;
20use lock_api::RawRwLockRecursiveTimed;
21use lock_api::RawRwLockTimed;
22use lock_api::RawRwLockUpgrade;
23use lock_api::RawRwLockUpgradeDowngrade;
24use lock_api::RawRwLockUpgradeFair;
25use lock_api::RawRwLockUpgradeTimed;
26
27use crate::LazyMutexId;
28use crate::MutexId;
29use crate::util::PrivateTraced;
30
31/// Tracing wrapper for all [`lock_api`] traits.
32///
33/// This wrapper implements any of the locking traits available, given that the wrapped type
34/// implements them. As such, this wrapper can be used both for normal mutexes and rwlocks.
35#[derive(Debug, Default)]
36pub struct TracingWrapper<T> {
37    inner: T,
38    // Need to use a lazy mutex ID to intialize statically.
39    id: LazyMutexId,
40}
41
42impl<T> TracingWrapper<T> {
43    /// Mark this lock as held in the dependency graph.
44    fn mark_held(&self) {
45        self.id.mark_held();
46    }
47
48    /// Mark this lock as released in the dependency graph.
49    ///
50    /// # Safety
51    ///
52    /// This function should only be called when the lock has been previously acquired by this
53    /// thread.
54    unsafe fn mark_released(&self) {
55        self.id.mark_released();
56    }
57
58    /// First mark ourselves as held, then call the locking function.
59    fn lock(&self, f: impl FnOnce()) {
60        self.mark_held();
61        f();
62    }
63
64    /// First call the unlocking function, then mark ourselves as realeased.
65    unsafe fn unlock(&self, f: impl FnOnce()) {
66        f();
67        self.mark_released();
68    }
69
70    /// Conditionally lock the mutex.
71    ///
72    /// First acquires the lock, then runs the provided function. If that function returns true,
73    /// then the lock is kept, otherwise the mutex is immediately marked as relased.
74    ///
75    /// # Returns
76    ///
77    /// The value returned from the callback.
78    fn conditionally_lock(&self, f: impl FnOnce() -> bool) -> bool {
79        // Mark as locked while we try to do the thing
80        self.mark_held();
81
82        if f() {
83            true
84        } else {
85            // Safety: we just locked it above.
86            unsafe { self.mark_released() }
87            false
88        }
89    }
90}
91
92impl<T> PrivateTraced for TracingWrapper<T> {
93    fn get_id(&self) -> &MutexId {
94        &self.id
95    }
96}
97
98unsafe impl<T> RawMutex for TracingWrapper<T>
99where
100    T: RawMutex,
101{
102    // Known issue with legacy initialisers, allow
103    #[allow(clippy::declare_interior_mutable_const)]
104    const INIT: Self = Self {
105        inner: T::INIT,
106        id: LazyMutexId::new(),
107    };
108
109    /// Always equal to [`GuardNoSend`], as an implementation detail in the tracking system requires
110    /// this behaviour. May change in the future to reflect the actual guard type from the wrapped
111    /// primitive.
112    type GuardMarker = GuardNoSend;
113
114    fn lock(&self) {
115        self.lock(|| self.inner.lock());
116    }
117
118    fn try_lock(&self) -> bool {
119        self.conditionally_lock(|| self.inner.try_lock())
120    }
121
122    unsafe fn unlock(&self) {
123        self.unlock(|| self.inner.unlock());
124    }
125
126    fn is_locked(&self) -> bool {
127        // Can't use the default implementation as the inner type might've overwritten it.
128        self.inner.is_locked()
129    }
130}
131
132unsafe impl<T> RawMutexFair for TracingWrapper<T>
133where
134    T: RawMutexFair,
135{
136    unsafe fn unlock_fair(&self) {
137        self.unlock(|| self.inner.unlock_fair())
138    }
139
140    unsafe fn bump(&self) {
141        // Bumping effectively doesn't change which locks are held, so we don't need to manage the
142        // lock state.
143        self.inner.bump();
144    }
145}
146
147unsafe impl<T> RawMutexTimed for TracingWrapper<T>
148where
149    T: RawMutexTimed,
150{
151    type Duration = T::Duration;
152
153    type Instant = T::Instant;
154
155    fn try_lock_for(&self, timeout: Self::Duration) -> bool {
156        self.conditionally_lock(|| self.inner.try_lock_for(timeout))
157    }
158
159    fn try_lock_until(&self, timeout: Self::Instant) -> bool {
160        self.conditionally_lock(|| self.inner.try_lock_until(timeout))
161    }
162}
163
164unsafe impl<T> RawRwLock for TracingWrapper<T>
165where
166    T: RawRwLock,
167{
168    // Known issue with legacy initialisers, allow
169    #[allow(clippy::declare_interior_mutable_const)]
170    const INIT: Self = Self {
171        inner: T::INIT,
172        id: LazyMutexId::new(),
173    };
174
175    /// Always equal to [`GuardNoSend`], as an implementation detail in the tracking system requires
176    /// this behaviour. May change in the future to reflect the actual guard type from the wrapped
177    /// primitive.
178    type GuardMarker = GuardNoSend;
179
180    fn lock_shared(&self) {
181        self.lock(|| self.inner.lock_shared());
182    }
183
184    fn try_lock_shared(&self) -> bool {
185        self.conditionally_lock(|| self.inner.try_lock_shared())
186    }
187
188    unsafe fn unlock_shared(&self) {
189        self.unlock(|| self.inner.unlock_shared());
190    }
191
192    fn lock_exclusive(&self) {
193        self.lock(|| self.inner.lock_exclusive());
194    }
195
196    fn try_lock_exclusive(&self) -> bool {
197        self.conditionally_lock(|| self.inner.try_lock_exclusive())
198    }
199
200    unsafe fn unlock_exclusive(&self) {
201        self.unlock(|| self.inner.unlock_exclusive());
202    }
203
204    fn is_locked(&self) -> bool {
205        self.inner.is_locked()
206    }
207}
208
209unsafe impl<T> RawRwLockDowngrade for TracingWrapper<T>
210where
211    T: RawRwLockDowngrade,
212{
213    unsafe fn downgrade(&self) {
214        // Downgrading does not require tracking
215        self.inner.downgrade()
216    }
217}
218
219unsafe impl<T> RawRwLockUpgrade for TracingWrapper<T>
220where
221    T: RawRwLockUpgrade,
222{
223    fn lock_upgradable(&self) {
224        self.lock(|| self.inner.lock_upgradable());
225    }
226
227    fn try_lock_upgradable(&self) -> bool {
228        self.conditionally_lock(|| self.inner.try_lock_upgradable())
229    }
230
231    unsafe fn unlock_upgradable(&self) {
232        self.unlock(|| self.inner.unlock_upgradable());
233    }
234
235    unsafe fn upgrade(&self) {
236        self.inner.upgrade();
237    }
238
239    unsafe fn try_upgrade(&self) -> bool {
240        self.inner.try_upgrade()
241    }
242}
243
244unsafe impl<T> RawRwLockFair for TracingWrapper<T>
245where
246    T: RawRwLockFair,
247{
248    unsafe fn unlock_shared_fair(&self) {
249        self.unlock(|| self.inner.unlock_shared_fair());
250    }
251
252    unsafe fn unlock_exclusive_fair(&self) {
253        self.unlock(|| self.inner.unlock_exclusive_fair());
254    }
255
256    unsafe fn bump_shared(&self) {
257        self.inner.bump_shared();
258    }
259
260    unsafe fn bump_exclusive(&self) {
261        self.inner.bump_exclusive();
262    }
263}
264
265unsafe impl<T> RawRwLockRecursive for TracingWrapper<T>
266where
267    T: RawRwLockRecursive,
268{
269    fn lock_shared_recursive(&self) {
270        self.lock(|| self.inner.lock_shared_recursive());
271    }
272
273    fn try_lock_shared_recursive(&self) -> bool {
274        self.conditionally_lock(|| self.inner.try_lock_shared_recursive())
275    }
276}
277
278unsafe impl<T> RawRwLockRecursiveTimed for TracingWrapper<T>
279where
280    T: RawRwLockRecursiveTimed,
281{
282    fn try_lock_shared_recursive_for(&self, timeout: Self::Duration) -> bool {
283        self.conditionally_lock(|| self.inner.try_lock_shared_recursive_for(timeout))
284    }
285
286    fn try_lock_shared_recursive_until(&self, timeout: Self::Instant) -> bool {
287        self.conditionally_lock(|| self.inner.try_lock_shared_recursive_until(timeout))
288    }
289}
290
291unsafe impl<T> RawRwLockTimed for TracingWrapper<T>
292where
293    T: RawRwLockTimed,
294{
295    type Duration = T::Duration;
296
297    type Instant = T::Instant;
298
299    fn try_lock_shared_for(&self, timeout: Self::Duration) -> bool {
300        self.conditionally_lock(|| self.inner.try_lock_shared_for(timeout))
301    }
302
303    fn try_lock_shared_until(&self, timeout: Self::Instant) -> bool {
304        self.conditionally_lock(|| self.inner.try_lock_shared_until(timeout))
305    }
306
307    fn try_lock_exclusive_for(&self, timeout: Self::Duration) -> bool {
308        self.conditionally_lock(|| self.inner.try_lock_exclusive_for(timeout))
309    }
310
311    fn try_lock_exclusive_until(&self, timeout: Self::Instant) -> bool {
312        self.conditionally_lock(|| self.inner.try_lock_exclusive_until(timeout))
313    }
314}
315
316unsafe impl<T> RawRwLockUpgradeDowngrade for TracingWrapper<T>
317where
318    T: RawRwLockUpgradeDowngrade,
319{
320    unsafe fn downgrade_upgradable(&self) {
321        self.inner.downgrade_upgradable()
322    }
323
324    unsafe fn downgrade_to_upgradable(&self) {
325        self.inner.downgrade_to_upgradable()
326    }
327}
328
329unsafe impl<T> RawRwLockUpgradeFair for TracingWrapper<T>
330where
331    T: RawRwLockUpgradeFair,
332{
333    unsafe fn unlock_upgradable_fair(&self) {
334        self.unlock(|| self.inner.unlock_upgradable_fair())
335    }
336
337    unsafe fn bump_upgradable(&self) {
338        self.inner.bump_upgradable()
339    }
340}
341
342unsafe impl<T> RawRwLockUpgradeTimed for TracingWrapper<T>
343where
344    T: RawRwLockUpgradeTimed,
345{
346    fn try_lock_upgradable_for(&self, timeout: Self::Duration) -> bool {
347        self.conditionally_lock(|| self.inner.try_lock_upgradable_for(timeout))
348    }
349
350    fn try_lock_upgradable_until(&self, timeout: Self::Instant) -> bool {
351        self.conditionally_lock(|| self.inner.try_lock_upgradable_until(timeout))
352    }
353
354    unsafe fn try_upgrade_for(&self, timeout: Self::Duration) -> bool {
355        self.inner.try_upgrade_for(timeout)
356    }
357
358    unsafe fn try_upgrade_until(&self, timeout: Self::Instant) -> bool {
359        self.inner.try_upgrade_until(timeout)
360    }
361}