1use core::hash::{Hash, Hasher};
17use core::ops::Deref;
18use core::panic::Location;
19use core::sync::atomic::{AtomicBool, Ordering};
20
21use derivative::Derivative;
22use netstack3_trace::TraceResourceId;
23
24mod caller {
25 use core::panic::Location;
31
32 #[derive(Default)]
34 pub(super) struct Callers {
35 #[cfg(feature = "rc-debug-names")]
45 pub(super) callers: std::sync::Mutex<std::collections::HashMap<Location<'static>, usize>>,
46 }
47
48 impl core::fmt::Debug for Callers {
49 #[cfg(not(feature = "rc-debug-names"))]
50 fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
51 write!(f, "(Not Tracked)")
52 }
53 #[cfg(feature = "rc-debug-names")]
54 fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
55 let Self { callers } = self;
56 let callers = callers.lock().unwrap();
57 write!(f, "[\n")?;
58 for (l, c) in callers.iter() {
59 write!(f, " {l} => {c},\n")?;
60 }
61 write!(f, "]")
62 }
63 }
64
65 impl Callers {
66 pub(super) fn insert(&self, caller: &Location<'static>) -> TrackedCaller {
70 #[cfg(not(feature = "rc-debug-names"))]
71 {
72 let _ = caller;
73 TrackedCaller {}
74 }
75 #[cfg(feature = "rc-debug-names")]
76 {
77 let Self { callers } = self;
78 let mut callers = callers.lock().unwrap();
79 let count = callers.entry(caller.clone()).or_insert(0);
80 *count += 1;
81 TrackedCaller { location: caller.clone() }
82 }
83 }
84 }
85
86 #[derive(Debug)]
87 pub(super) struct TrackedCaller {
88 #[cfg(feature = "rc-debug-names")]
89 pub(super) location: Location<'static>,
90 }
91
92 impl TrackedCaller {
93 #[cfg(not(feature = "rc-debug-names"))]
94 pub(super) fn release(&mut self, Callers {}: &Callers) {
95 let Self {} = self;
96 }
97
98 #[cfg(feature = "rc-debug-names")]
99 pub(super) fn release(&mut self, Callers { callers }: &Callers) {
100 let Self { location } = self;
101 let mut callers = callers.lock().unwrap();
102 let mut entry = match callers.entry(location.clone()) {
103 std::collections::hash_map::Entry::Vacant(_) => {
104 panic!("location {location:?} was not in the callers map")
105 }
106 std::collections::hash_map::Entry::Occupied(o) => o,
107 };
108
109 let sub = entry
110 .get()
111 .checked_sub(1)
112 .unwrap_or_else(|| panic!("zero-count location {location:?} in map"));
113 if sub == 0 {
114 let _: usize = entry.remove();
115 } else {
116 *entry.get_mut() = sub;
117 }
118 }
119 }
120}
121
122mod debug_id {
123 use core::sync::atomic::{AtomicU64, Ordering};
124 use netstack3_trace::TraceResourceId;
125
126 #[derive(Clone)]
135 pub(super) struct DebugToken(u64);
136
137 impl core::fmt::Debug for DebugToken {
138 fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> Result<(), core::fmt::Error> {
139 let DebugToken(inner) = self;
140 write!(f, "{}", inner)
141 }
142 }
143
144 impl Default for DebugToken {
145 fn default() -> Self {
146 static NEXT_TOKEN: AtomicU64 = AtomicU64::new(0);
147 DebugToken(NEXT_TOKEN.fetch_add(1, Ordering::Relaxed))
153 }
154 }
155
156 impl DebugToken {
157 pub(super) fn trace_id(&self) -> TraceResourceId<'_> {
158 let Self(inner) = self;
159 TraceResourceId::new(*inner)
160 }
161 }
162
163 pub(super) enum DebugId<T> {
168 WithToken { ptr: *const T, token: DebugToken },
171 WithoutToken { ptr: *const T },
174 }
175
176 impl<T> core::fmt::Debug for DebugId<T> {
177 fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> Result<(), core::fmt::Error> {
178 match self {
179 DebugId::WithToken { ptr, token } => write!(f, "{:?}:{:?}", token, ptr),
180 DebugId::WithoutToken { ptr } => write!(f, "?:{:?}", ptr),
181 }
182 }
183 }
184}
185
186#[derive(Derivative)]
187#[derivative(Debug)]
188struct Inner<T> {
189 marked_for_destruction: AtomicBool,
190 callers: caller::Callers,
191 data: core::mem::ManuallyDrop<T>,
192 #[derivative(Debug = "ignore")]
196 notifier: crate::Mutex<Option<Box<dyn Notifier<T>>>>,
197 debug_token: debug_id::DebugToken,
198}
199
200impl<T> Inner<T> {
201 fn pre_drop_check(marked_for_destruction: &AtomicBool) {
202 assert!(marked_for_destruction.load(Ordering::Acquire), "Must be marked for destruction");
207 }
208
209 fn unwrap(mut self) -> T {
210 let Inner { marked_for_destruction, data, callers: holders, notifier, debug_token } =
214 &mut self;
215
216 Inner::<T>::pre_drop_check(marked_for_destruction);
222
223 let data = unsafe {
227 core::ptr::drop_in_place(marked_for_destruction);
229 core::ptr::drop_in_place(holders);
230 core::ptr::drop_in_place(notifier);
231 core::ptr::drop_in_place(debug_token);
232
233 core::mem::ManuallyDrop::take(data)
234 };
235 core::mem::forget(self);
239
240 data
241 }
242
243 fn set_notifier<N: Notifier<T> + 'static>(&self, notifier: N) {
247 let Self { notifier: slot, .. } = self;
248
249 let boxed: Box<dyn Notifier<T>> = Box::new(notifier);
255 let prev_notifier = { slot.lock().replace(boxed) };
256 assert!(prev_notifier.is_none(), "can't have a notifier already installed");
258 }
259}
260
261impl<T> Drop for Inner<T> {
262 fn drop(&mut self) {
263 let Inner { marked_for_destruction, data, callers: _, notifier, debug_token: _ } = self;
264 let data = unsafe { core::mem::ManuallyDrop::take(data) };
270 Self::pre_drop_check(marked_for_destruction);
271 if let Some(mut notifier) = notifier.lock().take() {
272 notifier.notify(data);
273 }
274 }
275}
276
277#[derive(Debug)]
286pub struct Primary<T> {
287 inner: core::mem::ManuallyDrop<alloc::sync::Arc<Inner<T>>>,
288}
289
290impl<T> Drop for Primary<T> {
291 fn drop(&mut self) {
292 let was_marked = self.mark_for_destruction();
293 let Self { inner } = self;
294 let inner = unsafe { core::mem::ManuallyDrop::take(inner) };
298
299 if !std::thread::panicking() {
305 assert_eq!(was_marked, false, "Must not be marked for destruction yet");
306
307 let Inner { marked_for_destruction: _, callers, data: _, notifier: _, debug_token: _ } =
308 &*inner;
309
310 let refs = alloc::sync::Arc::strong_count(&inner).checked_sub(1).unwrap();
313 assert!(
314 refs == 0,
315 "dropped Primary with {refs} strong refs remaining, \
316 Callers={callers:?}"
317 );
318 }
319 }
320}
321
322impl<T> AsRef<T> for Primary<T> {
323 fn as_ref(&self) -> &T {
324 self.deref()
325 }
326}
327
328impl<T> Deref for Primary<T> {
329 type Target = T;
330
331 fn deref(&self) -> &T {
332 let Self { inner } = self;
333 let Inner { marked_for_destruction: _, data, callers: _, notifier: _, debug_token: _ } =
334 &***inner;
335 data
336 }
337}
338
339impl<T> Primary<T> {
340 fn mark_for_destruction(&mut self) -> bool {
345 let Self { inner } = self;
346 inner.marked_for_destruction.swap(true, Ordering::Release)
350 }
351
352 pub fn new(data: T) -> Primary<T> {
354 Primary {
355 inner: core::mem::ManuallyDrop::new(alloc::sync::Arc::new(Inner {
356 marked_for_destruction: AtomicBool::new(false),
357 callers: caller::Callers::default(),
358 data: core::mem::ManuallyDrop::new(data),
359 notifier: crate::Mutex::new(None),
360 debug_token: debug_id::DebugToken::default(),
361 })),
362 }
363 }
364
365 pub fn new_cyclic(data_fn: impl FnOnce(Weak<T>) -> T) -> Primary<T> {
372 Primary {
373 inner: core::mem::ManuallyDrop::new(alloc::sync::Arc::new_cyclic(move |weak| Inner {
374 marked_for_destruction: AtomicBool::new(false),
375 callers: caller::Callers::default(),
376 data: core::mem::ManuallyDrop::new(data_fn(Weak(weak.clone()))),
377 notifier: crate::Mutex::new(None),
378 debug_token: debug_id::DebugToken::default(),
379 })),
380 }
381 }
382
383 #[cfg_attr(feature = "rc-debug-names", track_caller)]
385 pub fn clone_strong(Self { inner }: &Self) -> Strong<T> {
386 let Inner { data: _, callers, marked_for_destruction: _, notifier: _, debug_token: _ } =
387 &***inner;
388 let caller = callers.insert(Location::caller());
389 Strong { inner: alloc::sync::Arc::clone(inner), caller }
390 }
391
392 pub fn downgrade(Self { inner }: &Self) -> Weak<T> {
394 Weak(alloc::sync::Arc::downgrade(inner))
395 }
396
397 pub fn ptr_eq(
399 Self { inner: this }: &Self,
400 Strong { inner: other, caller: _ }: &Strong<T>,
401 ) -> bool {
402 alloc::sync::Arc::ptr_eq(this, other)
403 }
404
405 pub fn debug_id(&self) -> impl core::fmt::Debug + '_ {
408 let Self { inner } = self;
409 debug_id::DebugId::WithToken {
410 ptr: alloc::sync::Arc::as_ptr(inner),
411 token: inner.debug_token.clone(),
412 }
413 }
414
415 fn mark_for_destruction_and_take_inner(mut this: Self) -> alloc::sync::Arc<Inner<T>> {
416 assert!(!this.mark_for_destruction());
418 let Self { inner } = &mut this;
419 let inner = unsafe { core::mem::ManuallyDrop::take(inner) };
422 core::mem::forget(this);
423 inner
424 }
425
426 fn try_unwrap(this: Self) -> Result<T, alloc::sync::Arc<Inner<T>>> {
427 let inner = Self::mark_for_destruction_and_take_inner(this);
428 alloc::sync::Arc::try_unwrap(inner).map(Inner::unwrap)
429 }
430
431 pub fn unwrap(this: Self) -> T {
437 Self::try_unwrap(this).unwrap_or_else(|inner| {
438 let callers = &inner.callers;
439 let refs = alloc::sync::Arc::strong_count(&inner).checked_sub(1).unwrap();
440 panic!("can't unwrap, still had {refs} strong refs: {callers:?}");
441 })
442 }
443
444 pub fn unwrap_with_notifier<N: Notifier<T> + 'static>(this: Self, notifier: N) {
449 let inner = Self::mark_for_destruction_and_take_inner(this);
450 inner.set_notifier(notifier);
451 core::mem::drop(inner);
454 }
455
456 pub fn unwrap_or_notify_with<N: Notifier<T> + 'static, O, F: FnOnce() -> (N, O)>(
463 this: Self,
464 new_notifier: F,
465 ) -> Result<T, O> {
466 Self::try_unwrap(this).map_err(move |inner| {
467 let (notifier, output) = new_notifier();
468 inner.set_notifier(notifier);
469 output
470 })
471 }
472
473 pub fn debug_references(this: &Self) -> DebugReferences<T> {
475 let Self { inner } = this;
476 DebugReferences(alloc::sync::Arc::downgrade(&*inner))
477 }
478}
479
480#[derive(Debug, Derivative)]
489pub struct Strong<T> {
490 inner: alloc::sync::Arc<Inner<T>>,
491 caller: caller::TrackedCaller,
492}
493
494impl<T> Drop for Strong<T> {
495 fn drop(&mut self) {
496 let Self { inner, caller } = self;
497 let Inner { marked_for_destruction: _, callers, data: _, notifier: _, debug_token: _ } =
498 &**inner;
499 caller.release(callers);
500 }
501}
502
503impl<T> AsRef<T> for Strong<T> {
504 fn as_ref(&self) -> &T {
505 self.deref()
506 }
507}
508
509impl<T> Deref for Strong<T> {
510 type Target = T;
511
512 fn deref(&self) -> &T {
513 let Self { inner, caller: _ } = self;
514 let Inner { marked_for_destruction: _, data, callers: _, notifier: _, debug_token: _ } =
515 inner.deref();
516 data
517 }
518}
519
520impl<T> core::cmp::Eq for Strong<T> {}
521
522impl<T> core::cmp::PartialEq for Strong<T> {
523 fn eq(&self, other: &Self) -> bool {
524 Self::ptr_eq(self, other)
525 }
526}
527
528impl<T> Hash for Strong<T> {
529 fn hash<H: Hasher>(&self, state: &mut H) {
530 let Self { inner, caller: _ } = self;
531 alloc::sync::Arc::as_ptr(inner).hash(state)
532 }
533}
534
535impl<T> Clone for Strong<T> {
536 #[cfg_attr(feature = "rc-debug-names", track_caller)]
537 fn clone(&self) -> Self {
538 let Self { inner, caller: _ } = self;
539 let Inner { data: _, marked_for_destruction: _, callers, notifier: _, debug_token: _ } =
540 &**inner;
541 let caller = callers.insert(Location::caller());
542 Self { inner: alloc::sync::Arc::clone(inner), caller }
543 }
544}
545
546impl<T> Strong<T> {
547 pub fn downgrade(Self { inner, caller: _ }: &Self) -> Weak<T> {
549 Weak(alloc::sync::Arc::downgrade(inner))
550 }
551
552 pub fn debug_id(&self) -> impl core::fmt::Debug + '_ {
555 let Self { inner, caller: _ } = self;
556 debug_id::DebugId::WithToken {
557 ptr: alloc::sync::Arc::as_ptr(inner),
558 token: inner.debug_token.clone(),
559 }
560 }
561
562 pub fn trace_id(&self) -> TraceResourceId<'_> {
565 self.inner.debug_token.trace_id()
566 }
567
568 pub fn marked_for_destruction(Self { inner, caller: _ }: &Self) -> bool {
570 let Inner { marked_for_destruction, data: _, callers: _, notifier: _, debug_token: _ } =
571 inner.as_ref();
572 marked_for_destruction.load(Ordering::Acquire)
577 }
578
579 pub fn weak_ptr_eq(Self { inner: this, caller: _ }: &Self, Weak(other): &Weak<T>) -> bool {
581 core::ptr::eq(alloc::sync::Arc::as_ptr(this), other.as_ptr())
582 }
583
584 pub fn ptr_eq(
586 Self { inner: this, caller: _ }: &Self,
587 Self { inner: other, caller: _ }: &Self,
588 ) -> bool {
589 alloc::sync::Arc::ptr_eq(this, other)
590 }
591
592 pub fn ptr_cmp(
594 Self { inner: this, caller: _ }: &Self,
595 Self { inner: other, caller: _ }: &Self,
596 ) -> core::cmp::Ordering {
597 let this = alloc::sync::Arc::as_ptr(this);
598 let other = alloc::sync::Arc::as_ptr(other);
599 this.cmp(&other)
600 }
601
602 pub fn debug_references(this: &Self) -> DebugReferences<T> {
604 let Self { inner, caller: _ } = this;
605 DebugReferences(alloc::sync::Arc::downgrade(inner))
606 }
607}
608
609#[derive(Debug)]
620pub struct Weak<T>(alloc::sync::Weak<Inner<T>>);
621
622impl<T> core::cmp::Eq for Weak<T> {}
623
624impl<T> core::cmp::PartialEq for Weak<T> {
625 fn eq(&self, other: &Self) -> bool {
626 Self::ptr_eq(self, other)
627 }
628}
629
630impl<T> Hash for Weak<T> {
631 fn hash<H: Hasher>(&self, state: &mut H) {
632 let Self(this) = self;
633 this.as_ptr().hash(state)
634 }
635}
636
637impl<T> Clone for Weak<T> {
638 fn clone(&self) -> Self {
639 let Self(this) = self;
640 Weak(this.clone())
641 }
642}
643
644impl<T> Weak<T> {
645 pub fn ptr_eq(&self, Self(other): &Self) -> bool {
647 let Self(this) = self;
648 this.ptr_eq(other)
649 }
650
651 pub fn debug_id(&self) -> impl core::fmt::Debug + '_ {
654 match self.upgrade() {
655 Some(strong) => {
656 let Strong { inner, caller: _ } = &strong;
657 debug_id::DebugId::WithToken {
658 ptr: alloc::sync::Arc::as_ptr(&inner),
659 token: inner.debug_token.clone(),
660 }
661 }
662 None => {
663 let Self(this) = self;
664 debug_id::DebugId::WithoutToken { ptr: this.as_ptr() }
666 }
667 }
668 }
669
670 #[cfg_attr(feature = "rc-debug-names", track_caller)]
674 pub fn upgrade(&self) -> Option<Strong<T>> {
675 let Self(weak) = self;
676 let arc = weak.upgrade()?;
677 let Inner { marked_for_destruction, data: _, callers, notifier: _, debug_token: _ } =
678 arc.deref();
679
680 if !marked_for_destruction.load(Ordering::Acquire) {
685 let caller = callers.insert(Location::caller());
686 Some(Strong { inner: arc, caller })
687 } else {
688 None
689 }
690 }
691
692 pub fn strong_count(&self) -> usize {
694 let Self(weak) = self;
695 weak.strong_count()
696 }
697
698 pub fn debug_references(&self) -> DebugReferences<T> {
700 let Self(inner) = self;
701 DebugReferences(inner.clone())
702 }
703}
704
705fn debug_refs(
706 refs: Option<(usize, &AtomicBool, &caller::Callers)>,
707 name: &'static str,
708 f: &mut core::fmt::Formatter<'_>,
709) -> core::fmt::Result {
710 let mut f = f.debug_struct(name);
711 match refs {
712 Some((strong_count, marked_for_destruction, callers)) => f
713 .field("strong_count", &strong_count)
714 .field("marked_for_destruction", marked_for_destruction)
715 .field("callers", callers)
716 .finish(),
717 None => {
718 let strong_count = 0_usize;
719 f.field("strong_count", &strong_count).finish_non_exhaustive()
720 }
721 }
722}
723
724#[derive(Clone)]
727pub struct DebugReferences<T>(alloc::sync::Weak<Inner<T>>);
728
729impl<T> core::fmt::Debug for DebugReferences<T> {
730 fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
731 let Self(inner) = self;
732 let inner = inner.upgrade();
733 let refs = inner.as_ref().map(|inner| {
734 (alloc::sync::Arc::strong_count(inner), &inner.marked_for_destruction, &inner.callers)
735 });
736 debug_refs(refs, "DebugReferences", f)
737 }
738}
739
740impl<T: Send + Sync + 'static> DebugReferences<T> {
741 pub fn into_dyn(self) -> DynDebugReferences {
743 let Self(w) = self;
744 DynDebugReferences(w)
745 }
746}
747
748#[derive(Clone)]
750pub struct DynDebugReferences(alloc::sync::Weak<dyn ExposeRefs>);
751
752impl core::fmt::Debug for DynDebugReferences {
753 fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
754 let Self(inner) = self;
755 let inner = inner.upgrade();
756 let refs = inner.as_ref().map(|inner| {
757 let (marked_for_destruction, callers) = inner.refs_info();
758 (alloc::sync::Arc::strong_count(inner), marked_for_destruction, callers)
759 });
760 debug_refs(refs, "DynDebugReferences", f)
761 }
762}
763
764trait ExposeRefs: Send + Sync + 'static {
766 fn refs_info(&self) -> (&AtomicBool, &caller::Callers);
767}
768
769impl<T: Send + Sync + 'static> ExposeRefs for Inner<T> {
770 fn refs_info(&self) -> (&AtomicBool, &caller::Callers) {
771 (&self.marked_for_destruction, &self.callers)
772 }
773}
774
775pub trait Notifier<T>: Send {
780 fn notify(&mut self, data: T);
783}
784
785#[derive(Debug, Derivative)]
790#[derivative(Clone(bound = ""))]
791pub struct ArcNotifier<T>(alloc::sync::Arc<crate::Mutex<Option<T>>>);
792
793impl<T> ArcNotifier<T> {
794 pub fn new() -> Self {
796 Self(alloc::sync::Arc::new(crate::Mutex::new(None)))
797 }
798
799 pub fn take(&self) -> Option<T> {
801 let Self(inner) = self;
802 inner.lock().take()
803 }
804}
805
806impl<T: Send> Notifier<T> for ArcNotifier<T> {
807 fn notify(&mut self, data: T) {
808 let Self(inner) = self;
809 assert!(inner.lock().replace(data).is_none(), "notified twice");
810 }
811}
812
813pub struct MapNotifier<N, F> {
816 inner: N,
817 map: Option<F>,
818}
819
820impl<N, F> MapNotifier<N, F> {
821 pub fn new(notifier: N, map: F) -> Self {
824 Self { inner: notifier, map: Some(map) }
825 }
826}
827
828impl<A, B, N: Notifier<B>, F: FnOnce(A) -> B> Notifier<A> for MapNotifier<N, F>
829where
830 Self: Send,
831{
832 fn notify(&mut self, data: A) {
833 let Self { inner, map } = self;
834 let map = map.take().expect("notified twice");
835 inner.notify(map(data))
836 }
837}
838
839impl<T> Notifier<T> for core::convert::Infallible {
841 fn notify(&mut self, _data: T) {
842 match *self {}
843 }
844}
845
846#[cfg(test)]
847mod tests {
848 use super::*;
849
850 #[test]
851 fn zombie_weak() {
852 let primary = Primary::new(());
853 let weak = {
854 let strong = Primary::clone_strong(&primary);
855 Strong::downgrade(&strong)
856 };
857 core::mem::drop(primary);
858
859 assert!(weak.upgrade().is_none());
860 }
861
862 #[test]
863 fn rcs() {
864 const INITIAL_VAL: u8 = 1;
865 const NEW_VAL: u8 = 2;
866
867 let primary = Primary::new(crate::sync::Mutex::new(INITIAL_VAL));
868 let strong = Primary::clone_strong(&primary);
869 let weak = Strong::downgrade(&strong);
870
871 *primary.lock().unwrap() = NEW_VAL;
872 assert_eq!(*primary.deref().lock().unwrap(), NEW_VAL);
873 assert_eq!(*strong.deref().lock().unwrap(), NEW_VAL);
874 assert_eq!(*weak.upgrade().unwrap().deref().lock().unwrap(), NEW_VAL);
875 }
876
877 #[test]
878 fn unwrap_primary_without_strong_held() {
879 const VAL: u16 = 6;
880 let primary = Primary::new(VAL);
881 assert_eq!(Primary::unwrap(primary), VAL);
882 }
883
884 #[test]
885 #[should_panic(expected = "can't unwrap, still had 1 strong refs")]
886 fn unwrap_primary_with_strong_held() {
887 let primary = Primary::new(8);
888 let _strong: Strong<_> = Primary::clone_strong(&primary);
889 let _: u16 = Primary::unwrap(primary);
890 }
891
892 #[test]
893 #[should_panic(expected = "dropped Primary with 1 strong refs remaining")]
894 fn drop_primary_with_strong_held() {
895 let primary = Primary::new(9);
896 let _strong: Strong<_> = Primary::clone_strong(&primary);
897 core::mem::drop(primary);
898 }
899
900 #[cfg(not(target_os = "fuchsia"))]
904 #[test]
905 #[should_panic(expected = "oopsie")]
906 fn double_panic_protect() {
907 let primary = Primary::new(9);
908 let strong = Primary::clone_strong(&primary);
909 let _tuple_to_invert_drop_order = (primary, strong);
912 panic!("oopsie");
913 }
914
915 #[cfg(feature = "rc-debug-names")]
916 #[test]
917 fn tracked_callers() {
918 let primary = Primary::new(10);
919 let here = Location::caller();
922 let strong1 = Primary::clone_strong(&primary);
923 let strong2 = strong1.clone();
924 let weak = Strong::downgrade(&strong2);
925 let strong3 = weak.upgrade().unwrap();
926
927 let Primary { inner } = &primary;
928 let Inner { marked_for_destruction: _, callers, data: _, notifier: _, debug_token: _ } =
929 &***inner;
930
931 let strongs = [strong1, strong2, strong3];
932 let _: &Location<'_> = strongs.iter().enumerate().fold(here, |prev, (i, cur)| {
933 let Strong { inner: _, caller: caller::TrackedCaller { location: cur } } = cur;
934 assert_eq!(prev.file(), cur.file(), "{i}");
935 assert!(prev.line() < cur.line(), "{prev} < {cur}, {i}");
936 {
937 let callers = callers.callers.lock().unwrap();
938 assert_eq!(callers.get(cur).copied(), Some(1));
939 }
940
941 cur
942 });
943
944 std::mem::drop(strongs);
946 {
947 let callers = callers.callers.lock().unwrap();
948 let callers = callers.deref();
949 assert!(callers.is_empty(), "{callers:?}");
950 }
951 }
952 #[cfg(feature = "rc-debug-names")]
953 #[test]
954 fn same_location_caller_tracking() {
955 fn clone_in_fn<T>(p: &Primary<T>) -> Strong<T> {
956 Primary::clone_strong(p)
957 }
958
959 let primary = Primary::new(10);
960 let strong1 = clone_in_fn(&primary);
961 let strong2 = clone_in_fn(&primary);
962 assert_eq!(strong1.caller.location, strong2.caller.location);
963
964 let Primary { inner } = &primary;
965 let Inner { marked_for_destruction: _, callers, data: _, notifier: _, debug_token: _ } =
966 &***inner;
967
968 {
969 let callers = callers.callers.lock().unwrap();
970 assert_eq!(callers.get(&strong1.caller.location).copied(), Some(2));
971 }
972
973 std::mem::drop(strong1);
974 std::mem::drop(strong2);
975
976 {
977 let callers = callers.callers.lock().unwrap();
978 let callers = callers.deref();
979 assert!(callers.is_empty(), "{callers:?}");
980 }
981 }
982
983 #[cfg(feature = "rc-debug-names")]
984 #[test]
985 #[should_panic(expected = "core/sync/src/rc.rs")]
986 fn callers_in_panic() {
987 let primary = Primary::new(10);
988 let _strong = Primary::clone_strong(&primary);
989 drop(primary);
990 }
991
992 #[test]
993 fn unwrap_with_notifier() {
994 let primary = Primary::new(10);
995 let strong = Primary::clone_strong(&primary);
996 let notifier = ArcNotifier::new();
997 Primary::unwrap_with_notifier(primary, notifier.clone());
998 assert_eq!(notifier.take(), None);
1000 core::mem::drop(strong);
1001 assert_eq!(notifier.take(), Some(10));
1002 }
1003
1004 #[test]
1005 fn unwrap_or_notify_with_immediate() {
1006 let primary = Primary::new(10);
1007 let result = Primary::unwrap_or_notify_with::<ArcNotifier<_>, (), _>(primary, || {
1008 panic!("should not try to create notifier")
1009 });
1010 assert_eq!(result, Ok(10));
1011 }
1012
1013 #[test]
1014 fn unwrap_or_notify_with_deferred() {
1015 let primary = Primary::new(10);
1016 let strong = Primary::clone_strong(&primary);
1017 let result = Primary::unwrap_or_notify_with(primary, || {
1018 let notifier = ArcNotifier::new();
1019 (notifier.clone(), notifier)
1020 });
1021 let notifier = result.unwrap_err();
1022 assert_eq!(notifier.take(), None);
1023 core::mem::drop(strong);
1024 assert_eq!(notifier.take(), Some(10));
1025 }
1026
1027 #[test]
1028 fn map_notifier() {
1029 let primary = Primary::new(10);
1030 let notifier = ArcNotifier::new();
1031 let map_notifier = MapNotifier::new(notifier.clone(), |data| (data, data + 1));
1032 Primary::unwrap_with_notifier(primary, map_notifier);
1033 assert_eq!(notifier.take(), Some((10, 11)));
1034 }
1035
1036 #[test]
1037 fn new_cyclic() {
1038 #[derive(Debug)]
1039 struct Data {
1040 value: i32,
1041 weak: Weak<Data>,
1042 }
1043
1044 let primary = Primary::new_cyclic(|weak| Data { value: 2, weak });
1045 assert_eq!(primary.value, 2);
1046 let strong = primary.weak.upgrade().unwrap();
1047 assert_eq!(strong.value, 2);
1048 assert!(Primary::ptr_eq(&primary, &strong));
1049 }
1050
1051 macro_rules! assert_debug_id_eq {
1052 ($id1:expr, $id2:expr) => {
1053 assert_eq!(alloc::format!("{:?}", $id1), alloc::format!("{:?}", $id2))
1054 };
1055 }
1056 macro_rules! assert_debug_id_ne {
1057 ($id1:expr, $id2:expr) => {
1058 assert_ne!(alloc::format!("{:?}", $id1), alloc::format!("{:?}", $id2))
1059 };
1060 }
1061
1062 #[test]
1063 fn debug_ids_are_stable() {
1064 let primary = Primary::new(1);
1066 let strong = Primary::clone_strong(&primary);
1067 let weak_p = Primary::downgrade(&primary);
1068 let weak_s = Strong::downgrade(&strong);
1069 let weak_c = weak_p.clone();
1070 assert_debug_id_eq!(&primary.debug_id(), &strong.debug_id());
1071 assert_debug_id_eq!(&primary.debug_id(), &weak_p.debug_id());
1072 assert_debug_id_eq!(&primary.debug_id(), &weak_s.debug_id());
1073 assert_debug_id_eq!(&primary.debug_id(), &weak_c.debug_id());
1074 }
1075
1076 #[test]
1077 fn debug_ids_are_unique() {
1078 let primary1 = Primary::new(1);
1080 let primary2 = Primary::new(1);
1081 assert_debug_id_ne!(&primary1.debug_id(), &primary2.debug_id());
1082
1083 let id1 = format!("{:?}", primary1.debug_id());
1085 std::mem::drop(primary1);
1086 let primary3 = Primary::new(1);
1087 assert_ne!(id1, format!("{:?}", primary3.debug_id()));
1088 }
1089}