1use core::fmt::Debug;
17use core::hash::{Hash, Hasher};
18use core::ops::Deref;
19use core::panic::Location;
20use core::sync::atomic::{AtomicBool, Ordering};
21
22use derivative::Derivative;
23
24mod caller {
25 use core::fmt::Debug;
31 use core::panic::Location;
32
33 #[derive(Default)]
35 pub(super) struct Callers {
36 #[cfg(feature = "rc-debug-names")]
46 pub(super) callers: std::sync::Mutex<std::collections::HashMap<Location<'static>, usize>>,
47 }
48
49 impl Debug for Callers {
50 #[cfg(not(feature = "rc-debug-names"))]
51 fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
52 write!(f, "(Not Tracked)")
53 }
54 #[cfg(feature = "rc-debug-names")]
55 fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
56 let Self { callers } = self;
57 let callers = callers.lock().unwrap();
58 write!(f, "[\n")?;
59 for (l, c) in callers.iter() {
60 write!(f, " {l} => {c},\n")?;
61 }
62 write!(f, "]")
63 }
64 }
65
66 impl Callers {
67 pub(super) fn insert(&self, caller: &Location<'static>) -> TrackedCaller {
71 #[cfg(not(feature = "rc-debug-names"))]
72 {
73 let _ = caller;
74 TrackedCaller {}
75 }
76 #[cfg(feature = "rc-debug-names")]
77 {
78 let Self { callers } = self;
79 let mut callers = callers.lock().unwrap();
80 let count = callers.entry(caller.clone()).or_insert(0);
81 *count += 1;
82 TrackedCaller { location: caller.clone() }
83 }
84 }
85 }
86
87 #[derive(Debug)]
88 pub(super) struct TrackedCaller {
89 #[cfg(feature = "rc-debug-names")]
90 pub(super) location: Location<'static>,
91 }
92
93 impl TrackedCaller {
94 #[cfg(not(feature = "rc-debug-names"))]
95 pub(super) fn release(&mut self, Callers {}: &Callers) {
96 let Self {} = self;
97 }
98
99 #[cfg(feature = "rc-debug-names")]
100 pub(super) fn release(&mut self, Callers { callers }: &Callers) {
101 let Self { location } = self;
102 let mut callers = callers.lock().unwrap();
103 let mut entry = match callers.entry(location.clone()) {
104 std::collections::hash_map::Entry::Vacant(_) => {
105 panic!("location {location:?} was not in the callers map")
106 }
107 std::collections::hash_map::Entry::Occupied(o) => o,
108 };
109
110 let sub = entry
111 .get()
112 .checked_sub(1)
113 .unwrap_or_else(|| panic!("zero-count location {location:?} in map"));
114 if sub == 0 {
115 let _: usize = entry.remove();
116 } else {
117 *entry.get_mut() = sub;
118 }
119 }
120 }
121}
122
123mod resource_token {
124 use core::fmt::Debug;
125 use core::sync::atomic::{AtomicU64, Ordering};
126 use std::marker::PhantomData;
127
128 pub struct ResourceToken<'a> {
138 value: u64,
139 _marker: PhantomData<&'a ()>,
140 }
141
142 impl<'a> ResourceToken<'a> {
143 pub fn extend_lifetime(self) -> ResourceToken<'static> {
152 ResourceToken { value: self.value, _marker: PhantomData }
153 }
154
155 pub fn export_value(self) -> u64 {
165 self.value
166 }
167 }
168
169 impl<'a> Debug for ResourceToken<'a> {
170 fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> Result<(), core::fmt::Error> {
171 write!(f, "{}", self.value)
172 }
173 }
174
175 pub struct ResourceTokenValue(u64);
180
181 impl ResourceTokenValue {
182 pub fn token(&self) -> ResourceToken<'_> {
184 let ResourceTokenValue(value) = self;
185 ResourceToken { value: *value, _marker: PhantomData }
186 }
187 }
188
189 impl core::fmt::Debug for ResourceTokenValue {
190 fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> Result<(), core::fmt::Error> {
191 let ResourceTokenValue(value) = self;
192 write!(f, "{}", value)
193 }
194 }
195
196 impl Default for ResourceTokenValue {
197 fn default() -> Self {
198 static NEXT_TOKEN: AtomicU64 = AtomicU64::new(0);
199 Self(NEXT_TOKEN.fetch_add(1, Ordering::Relaxed))
205 }
206 }
207}
208
209pub use resource_token::{ResourceToken, ResourceTokenValue};
210
211mod debug_id {
212 use super::ResourceToken;
213 use core::fmt::Debug;
214
215 pub(super) enum DebugId<T> {
220 WithToken { ptr: *const T, token: ResourceToken<'static> },
223 WithoutToken { ptr: *const T },
226 }
227
228 impl<T> Debug for DebugId<T> {
229 fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> Result<(), core::fmt::Error> {
230 match self {
231 DebugId::WithToken { ptr, token } => write!(f, "{:?}:{:?}", token, ptr),
232 DebugId::WithoutToken { ptr } => write!(f, "?:{:?}", ptr),
233 }
234 }
235 }
236}
237
238#[derive(Derivative)]
239#[derivative(Debug)]
240struct Inner<T> {
241 marked_for_destruction: AtomicBool,
242 callers: caller::Callers,
243 data: core::mem::ManuallyDrop<T>,
244 #[derivative(Debug = "ignore")]
248 notifier: crate::Mutex<Option<Box<dyn Notifier<T>>>>,
249 resource_token: ResourceTokenValue,
250}
251
252impl<T> Inner<T> {
253 fn pre_drop_check(marked_for_destruction: &AtomicBool) {
254 assert!(marked_for_destruction.load(Ordering::Acquire), "Must be marked for destruction");
259 }
260
261 fn unwrap(mut self) -> T {
262 let Inner { marked_for_destruction, data, callers: holders, notifier, resource_token } =
266 &mut self;
267
268 Inner::<T>::pre_drop_check(marked_for_destruction);
274
275 let data = unsafe {
279 core::ptr::drop_in_place(marked_for_destruction);
281 core::ptr::drop_in_place(holders);
282 core::ptr::drop_in_place(notifier);
283 core::ptr::drop_in_place(resource_token);
284
285 core::mem::ManuallyDrop::take(data)
286 };
287 core::mem::forget(self);
291
292 data
293 }
294
295 fn set_notifier<N: Notifier<T> + 'static>(&self, notifier: N) {
299 let Self { notifier: slot, .. } = self;
300
301 let boxed: Box<dyn Notifier<T>> = Box::new(notifier);
307 let prev_notifier = { slot.lock().replace(boxed) };
308 assert!(prev_notifier.is_none(), "can't have a notifier already installed");
310 }
311}
312
313impl<T> Drop for Inner<T> {
314 fn drop(&mut self) {
315 let Inner { marked_for_destruction, data, callers: _, notifier, resource_token: _ } = self;
316 let data = unsafe { core::mem::ManuallyDrop::take(data) };
322 Self::pre_drop_check(marked_for_destruction);
323 if let Some(mut notifier) = notifier.lock().take() {
324 notifier.notify(data);
325 }
326 }
327}
328
329#[derive(Debug)]
338pub struct Primary<T> {
339 inner: core::mem::ManuallyDrop<alloc::sync::Arc<Inner<T>>>,
340}
341
342impl<T> Drop for Primary<T> {
343 fn drop(&mut self) {
344 let was_marked = self.mark_for_destruction();
345 let Self { inner } = self;
346 let inner = unsafe { core::mem::ManuallyDrop::take(inner) };
350
351 if !std::thread::panicking() {
357 assert_eq!(was_marked, false, "Must not be marked for destruction yet");
358
359 let Inner {
360 marked_for_destruction: _,
361 callers,
362 data: _,
363 notifier: _,
364 resource_token: _,
365 } = &*inner;
366
367 let refs = alloc::sync::Arc::strong_count(&inner).checked_sub(1).unwrap();
370 assert!(
371 refs == 0,
372 "dropped Primary with {refs} strong refs remaining, \
373 Callers={callers:?}"
374 );
375 }
376 }
377}
378
379impl<T> AsRef<T> for Primary<T> {
380 fn as_ref(&self) -> &T {
381 self.deref()
382 }
383}
384
385impl<T> Deref for Primary<T> {
386 type Target = T;
387
388 fn deref(&self) -> &T {
389 let Self { inner } = self;
390 let Inner { marked_for_destruction: _, data, callers: _, notifier: _, resource_token: _ } =
391 &***inner;
392 data
393 }
394}
395
396impl<T> Primary<T> {
397 fn mark_for_destruction(&mut self) -> bool {
402 let Self { inner } = self;
403 inner.marked_for_destruction.swap(true, Ordering::Release)
407 }
408
409 pub fn new(data: T) -> Primary<T> {
411 Primary {
412 inner: core::mem::ManuallyDrop::new(alloc::sync::Arc::new(Inner {
413 marked_for_destruction: AtomicBool::new(false),
414 callers: caller::Callers::default(),
415 data: core::mem::ManuallyDrop::new(data),
416 notifier: crate::Mutex::new(None),
417 resource_token: ResourceTokenValue::default(),
418 })),
419 }
420 }
421
422 pub fn new_cyclic(data_fn: impl FnOnce(Weak<T>) -> T) -> Primary<T> {
429 Primary {
430 inner: core::mem::ManuallyDrop::new(alloc::sync::Arc::new_cyclic(move |weak| Inner {
431 marked_for_destruction: AtomicBool::new(false),
432 callers: caller::Callers::default(),
433 data: core::mem::ManuallyDrop::new(data_fn(Weak(weak.clone()))),
434 notifier: crate::Mutex::new(None),
435 resource_token: ResourceTokenValue::default(),
436 })),
437 }
438 }
439
440 #[cfg_attr(feature = "rc-debug-names", track_caller)]
442 pub fn clone_strong(Self { inner }: &Self) -> Strong<T> {
443 let Inner { data: _, callers, marked_for_destruction: _, notifier: _, resource_token: _ } =
444 &***inner;
445 let caller = callers.insert(Location::caller());
446 Strong { inner: alloc::sync::Arc::clone(inner), caller }
447 }
448
449 pub fn downgrade(Self { inner }: &Self) -> Weak<T> {
451 Weak(alloc::sync::Arc::downgrade(inner))
452 }
453
454 pub fn ptr_eq(
456 Self { inner: this }: &Self,
457 Strong { inner: other, caller: _ }: &Strong<T>,
458 ) -> bool {
459 alloc::sync::Arc::ptr_eq(this, other)
460 }
461
462 pub fn debug_id(&self) -> impl Debug + '_ {
465 let Self { inner } = self;
466
467 let token = inner.resource_token.token().extend_lifetime();
470
471 debug_id::DebugId::WithToken { ptr: alloc::sync::Arc::as_ptr(inner), token }
472 }
473
474 fn mark_for_destruction_and_take_inner(mut this: Self) -> alloc::sync::Arc<Inner<T>> {
475 assert!(!this.mark_for_destruction());
477 let Self { inner } = &mut this;
478 let inner = unsafe { core::mem::ManuallyDrop::take(inner) };
481 core::mem::forget(this);
482 inner
483 }
484
485 fn try_unwrap(this: Self) -> Result<T, alloc::sync::Arc<Inner<T>>> {
486 let inner = Self::mark_for_destruction_and_take_inner(this);
487 alloc::sync::Arc::try_unwrap(inner).map(Inner::unwrap)
488 }
489
490 pub fn unwrap(this: Self) -> T {
496 Self::try_unwrap(this).unwrap_or_else(|inner| {
497 let callers = &inner.callers;
498 let refs = alloc::sync::Arc::strong_count(&inner).checked_sub(1).unwrap();
499 panic!("can't unwrap, still had {refs} strong refs: {callers:?}");
500 })
501 }
502
503 pub fn unwrap_with_notifier<N: Notifier<T> + 'static>(this: Self, notifier: N) {
508 let inner = Self::mark_for_destruction_and_take_inner(this);
509 inner.set_notifier(notifier);
510 core::mem::drop(inner);
513 }
514
515 pub fn unwrap_or_notify_with<N: Notifier<T> + 'static, O, F: FnOnce() -> (N, O)>(
522 this: Self,
523 new_notifier: F,
524 ) -> Result<T, O> {
525 Self::try_unwrap(this).map_err(move |inner| {
526 let (notifier, output) = new_notifier();
527 inner.set_notifier(notifier);
528 output
529 })
530 }
531
532 pub fn debug_references(this: &Self) -> DebugReferences<T> {
534 let Self { inner } = this;
535 DebugReferences(alloc::sync::Arc::downgrade(&*inner))
536 }
537}
538
539#[derive(Debug, Derivative)]
548pub struct Strong<T> {
549 inner: alloc::sync::Arc<Inner<T>>,
550 caller: caller::TrackedCaller,
551}
552
553impl<T> Drop for Strong<T> {
554 fn drop(&mut self) {
555 let Self { inner, caller } = self;
556 let Inner { marked_for_destruction: _, callers, data: _, notifier: _, resource_token: _ } =
557 &**inner;
558 caller.release(callers);
559 }
560}
561
562impl<T> AsRef<T> for Strong<T> {
563 fn as_ref(&self) -> &T {
564 self.deref()
565 }
566}
567
568impl<T> Deref for Strong<T> {
569 type Target = T;
570
571 fn deref(&self) -> &T {
572 let Self { inner, caller: _ } = self;
573 let Inner { marked_for_destruction: _, data, callers: _, notifier: _, resource_token: _ } =
574 inner.deref();
575 data
576 }
577}
578
579impl<T> core::cmp::Eq for Strong<T> {}
580
581impl<T> core::cmp::PartialEq for Strong<T> {
582 fn eq(&self, other: &Self) -> bool {
583 Self::ptr_eq(self, other)
584 }
585}
586
587impl<T> Hash for Strong<T> {
588 fn hash<H: Hasher>(&self, state: &mut H) {
589 let Self { inner, caller: _ } = self;
590 alloc::sync::Arc::as_ptr(inner).hash(state)
591 }
592}
593
594impl<T> Clone for Strong<T> {
595 #[cfg_attr(feature = "rc-debug-names", track_caller)]
596 fn clone(&self) -> Self {
597 let Self { inner, caller: _ } = self;
598 let Inner { data: _, marked_for_destruction: _, callers, notifier: _, resource_token: _ } =
599 &**inner;
600 let caller = callers.insert(Location::caller());
601 Self { inner: alloc::sync::Arc::clone(inner), caller }
602 }
603}
604
605impl<T> Strong<T> {
606 pub fn downgrade(Self { inner, caller: _ }: &Self) -> Weak<T> {
608 Weak(alloc::sync::Arc::downgrade(inner))
609 }
610
611 pub fn debug_id(&self) -> impl Debug + '_ {
614 let Self { inner, caller: _ } = self;
615
616 let token = inner.resource_token.token().extend_lifetime();
619
620 debug_id::DebugId::WithToken { ptr: alloc::sync::Arc::as_ptr(inner), token }
621 }
622
623 pub fn resource_token(&self) -> ResourceToken<'_> {
625 self.inner.resource_token.token()
626 }
627
628 pub fn marked_for_destruction(Self { inner, caller: _ }: &Self) -> bool {
630 let Inner { marked_for_destruction, data: _, callers: _, notifier: _, resource_token: _ } =
631 inner.as_ref();
632 marked_for_destruction.load(Ordering::Acquire)
637 }
638
639 pub fn weak_ptr_eq(Self { inner: this, caller: _ }: &Self, Weak(other): &Weak<T>) -> bool {
641 core::ptr::eq(alloc::sync::Arc::as_ptr(this), other.as_ptr())
642 }
643
644 pub fn ptr_eq(
646 Self { inner: this, caller: _ }: &Self,
647 Self { inner: other, caller: _ }: &Self,
648 ) -> bool {
649 alloc::sync::Arc::ptr_eq(this, other)
650 }
651
652 pub fn ptr_cmp(
654 Self { inner: this, caller: _ }: &Self,
655 Self { inner: other, caller: _ }: &Self,
656 ) -> core::cmp::Ordering {
657 let this = alloc::sync::Arc::as_ptr(this);
658 let other = alloc::sync::Arc::as_ptr(other);
659 this.cmp(&other)
660 }
661
662 pub fn debug_references(this: &Self) -> DebugReferences<T> {
664 let Self { inner, caller: _ } = this;
665 DebugReferences(alloc::sync::Arc::downgrade(inner))
666 }
667}
668
669#[derive(Debug)]
680pub struct Weak<T>(alloc::sync::Weak<Inner<T>>);
681
682impl<T> core::cmp::Eq for Weak<T> {}
683
684impl<T> core::cmp::PartialEq for Weak<T> {
685 fn eq(&self, other: &Self) -> bool {
686 Self::ptr_eq(self, other)
687 }
688}
689
690impl<T> Hash for Weak<T> {
691 fn hash<H: Hasher>(&self, state: &mut H) {
692 let Self(this) = self;
693 this.as_ptr().hash(state)
694 }
695}
696
697impl<T> Clone for Weak<T> {
698 fn clone(&self) -> Self {
699 let Self(this) = self;
700 Weak(this.clone())
701 }
702}
703
704impl<T> Weak<T> {
705 pub fn ptr_eq(&self, Self(other): &Self) -> bool {
707 let Self(this) = self;
708 this.ptr_eq(other)
709 }
710
711 pub fn debug_id(&self) -> impl Debug + '_ {
714 match self.upgrade() {
715 Some(strong) => {
716 let Strong { inner, caller: _ } = &strong;
717
718 let token = inner.resource_token.token().extend_lifetime();
721
722 debug_id::DebugId::WithToken { ptr: alloc::sync::Arc::as_ptr(&inner), token }
723 }
724 None => {
725 let Self(this) = self;
726 debug_id::DebugId::WithoutToken { ptr: this.as_ptr() }
728 }
729 }
730 }
731
732 #[cfg_attr(feature = "rc-debug-names", track_caller)]
736 pub fn upgrade(&self) -> Option<Strong<T>> {
737 let Self(weak) = self;
738 let arc = weak.upgrade()?;
739 let Inner { marked_for_destruction, data: _, callers, notifier: _, resource_token: _ } =
740 arc.deref();
741
742 if !marked_for_destruction.load(Ordering::Acquire) {
747 let caller = callers.insert(Location::caller());
748 Some(Strong { inner: arc, caller })
749 } else {
750 None
751 }
752 }
753
754 pub fn strong_count(&self) -> usize {
756 let Self(weak) = self;
757 weak.strong_count()
758 }
759
760 pub fn debug_references(&self) -> DebugReferences<T> {
762 let Self(inner) = self;
763 DebugReferences(inner.clone())
764 }
765}
766
767fn debug_refs(
768 refs: Option<(usize, &AtomicBool, &caller::Callers)>,
769 name: &'static str,
770 f: &mut core::fmt::Formatter<'_>,
771) -> core::fmt::Result {
772 let mut f = f.debug_struct(name);
773 match refs {
774 Some((strong_count, marked_for_destruction, callers)) => f
775 .field("strong_count", &strong_count)
776 .field("marked_for_destruction", marked_for_destruction)
777 .field("callers", callers)
778 .finish(),
779 None => {
780 let strong_count = 0_usize;
781 f.field("strong_count", &strong_count).finish_non_exhaustive()
782 }
783 }
784}
785
786#[derive(Clone)]
789pub struct DebugReferences<T>(alloc::sync::Weak<Inner<T>>);
790
791impl<T> Debug for DebugReferences<T> {
792 fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
793 let Self(inner) = self;
794 let inner = inner.upgrade();
795 let refs = inner.as_ref().map(|inner| {
796 (alloc::sync::Arc::strong_count(inner), &inner.marked_for_destruction, &inner.callers)
797 });
798 debug_refs(refs, "DebugReferences", f)
799 }
800}
801
802impl<T: Send + Sync + 'static> DebugReferences<T> {
803 pub fn into_dyn(self) -> DynDebugReferences {
805 let Self(w) = self;
806 DynDebugReferences(w)
807 }
808}
809
810#[derive(Clone)]
812pub struct DynDebugReferences(alloc::sync::Weak<dyn ExposeRefs>);
813
814impl Debug for DynDebugReferences {
815 fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
816 let Self(inner) = self;
817 let inner = inner.upgrade();
818 let refs = inner.as_ref().map(|inner| {
819 let (marked_for_destruction, callers) = inner.refs_info();
820 (alloc::sync::Arc::strong_count(inner), marked_for_destruction, callers)
821 });
822 debug_refs(refs, "DynDebugReferences", f)
823 }
824}
825
826trait ExposeRefs: Send + Sync + 'static {
828 fn refs_info(&self) -> (&AtomicBool, &caller::Callers);
829}
830
831impl<T: Send + Sync + 'static> ExposeRefs for Inner<T> {
832 fn refs_info(&self) -> (&AtomicBool, &caller::Callers) {
833 (&self.marked_for_destruction, &self.callers)
834 }
835}
836
837pub trait Notifier<T>: Send {
842 fn notify(&mut self, data: T);
845}
846
847#[derive(Debug, Derivative)]
852#[derivative(Clone(bound = ""))]
853pub struct ArcNotifier<T>(alloc::sync::Arc<crate::Mutex<Option<T>>>);
854
855impl<T> ArcNotifier<T> {
856 pub fn new() -> Self {
858 Self(alloc::sync::Arc::new(crate::Mutex::new(None)))
859 }
860
861 pub fn take(&self) -> Option<T> {
863 let Self(inner) = self;
864 inner.lock().take()
865 }
866}
867
868impl<T: Send> Notifier<T> for ArcNotifier<T> {
869 fn notify(&mut self, data: T) {
870 let Self(inner) = self;
871 assert!(inner.lock().replace(data).is_none(), "notified twice");
872 }
873}
874
875pub struct MapNotifier<N, F> {
878 inner: N,
879 map: Option<F>,
880}
881
882impl<N, F> MapNotifier<N, F> {
883 pub fn new(notifier: N, map: F) -> Self {
886 Self { inner: notifier, map: Some(map) }
887 }
888}
889
890impl<A, B, N: Notifier<B>, F: FnOnce(A) -> B> Notifier<A> for MapNotifier<N, F>
891where
892 Self: Send,
893{
894 fn notify(&mut self, data: A) {
895 let Self { inner, map } = self;
896 let map = map.take().expect("notified twice");
897 inner.notify(map(data))
898 }
899}
900
901impl<T> Notifier<T> for core::convert::Infallible {
903 fn notify(&mut self, _data: T) {
904 match *self {}
905 }
906}
907
908#[cfg(test)]
909mod tests {
910 use super::*;
911
912 #[test]
913 fn zombie_weak() {
914 let primary = Primary::new(());
915 let weak = {
916 let strong = Primary::clone_strong(&primary);
917 Strong::downgrade(&strong)
918 };
919 core::mem::drop(primary);
920
921 assert!(weak.upgrade().is_none());
922 }
923
924 #[test]
925 fn rcs() {
926 const INITIAL_VAL: u8 = 1;
927 const NEW_VAL: u8 = 2;
928
929 let primary = Primary::new(crate::sync::Mutex::new(INITIAL_VAL));
930 let strong = Primary::clone_strong(&primary);
931 let weak = Strong::downgrade(&strong);
932
933 *primary.lock().unwrap() = NEW_VAL;
934 assert_eq!(*primary.deref().lock().unwrap(), NEW_VAL);
935 assert_eq!(*strong.deref().lock().unwrap(), NEW_VAL);
936 assert_eq!(*weak.upgrade().unwrap().deref().lock().unwrap(), NEW_VAL);
937 }
938
939 #[test]
940 fn unwrap_primary_without_strong_held() {
941 const VAL: u16 = 6;
942 let primary = Primary::new(VAL);
943 assert_eq!(Primary::unwrap(primary), VAL);
944 }
945
946 #[test]
947 #[should_panic(expected = "can't unwrap, still had 1 strong refs")]
948 fn unwrap_primary_with_strong_held() {
949 let primary = Primary::new(8);
950 let _strong: Strong<_> = Primary::clone_strong(&primary);
951 let _: u16 = Primary::unwrap(primary);
952 }
953
954 #[test]
955 #[should_panic(expected = "dropped Primary with 1 strong refs remaining")]
956 fn drop_primary_with_strong_held() {
957 let primary = Primary::new(9);
958 let _strong: Strong<_> = Primary::clone_strong(&primary);
959 core::mem::drop(primary);
960 }
961
962 #[cfg(not(target_os = "fuchsia"))]
966 #[test]
967 #[should_panic(expected = "oopsie")]
968 fn double_panic_protect() {
969 let primary = Primary::new(9);
970 let strong = Primary::clone_strong(&primary);
971 let _tuple_to_invert_drop_order = (primary, strong);
974 panic!("oopsie");
975 }
976
977 #[cfg(feature = "rc-debug-names")]
978 #[test]
979 fn tracked_callers() {
980 let primary = Primary::new(10);
981 let here = Location::caller();
984 let strong1 = Primary::clone_strong(&primary);
985 let strong2 = strong1.clone();
986 let weak = Strong::downgrade(&strong2);
987 let strong3 = weak.upgrade().unwrap();
988
989 let Primary { inner } = &primary;
990 let Inner { marked_for_destruction: _, callers, data: _, notifier: _, resource_token: _ } =
991 &***inner;
992
993 let strongs = [strong1, strong2, strong3];
994 let _: &Location<'_> = strongs.iter().enumerate().fold(here, |prev, (i, cur)| {
995 let Strong { inner: _, caller: caller::TrackedCaller { location: cur } } = cur;
996 assert_eq!(prev.file(), cur.file(), "{i}");
997 assert!(prev.line() < cur.line(), "{prev} < {cur}, {i}");
998 {
999 let callers = callers.callers.lock().unwrap();
1000 assert_eq!(callers.get(cur).copied(), Some(1));
1001 }
1002
1003 cur
1004 });
1005
1006 std::mem::drop(strongs);
1008 {
1009 let callers = callers.callers.lock().unwrap();
1010 let callers = callers.deref();
1011 assert!(callers.is_empty(), "{callers:?}");
1012 }
1013 }
1014 #[cfg(feature = "rc-debug-names")]
1015 #[test]
1016 fn same_location_caller_tracking() {
1017 fn clone_in_fn<T>(p: &Primary<T>) -> Strong<T> {
1018 Primary::clone_strong(p)
1019 }
1020
1021 let primary = Primary::new(10);
1022 let strong1 = clone_in_fn(&primary);
1023 let strong2 = clone_in_fn(&primary);
1024 assert_eq!(strong1.caller.location, strong2.caller.location);
1025
1026 let Primary { inner } = &primary;
1027 let Inner { marked_for_destruction: _, callers, data: _, notifier: _, resource_token: _ } =
1028 &***inner;
1029
1030 {
1031 let callers = callers.callers.lock().unwrap();
1032 assert_eq!(callers.get(&strong1.caller.location).copied(), Some(2));
1033 }
1034
1035 std::mem::drop(strong1);
1036 std::mem::drop(strong2);
1037
1038 {
1039 let callers = callers.callers.lock().unwrap();
1040 let callers = callers.deref();
1041 assert!(callers.is_empty(), "{callers:?}");
1042 }
1043 }
1044
1045 #[cfg(feature = "rc-debug-names")]
1046 #[test]
1047 #[should_panic(expected = "core/sync/src/rc.rs")]
1048 fn callers_in_panic() {
1049 let primary = Primary::new(10);
1050 let _strong = Primary::clone_strong(&primary);
1051 drop(primary);
1052 }
1053
1054 #[test]
1055 fn unwrap_with_notifier() {
1056 let primary = Primary::new(10);
1057 let strong = Primary::clone_strong(&primary);
1058 let notifier = ArcNotifier::new();
1059 Primary::unwrap_with_notifier(primary, notifier.clone());
1060 assert_eq!(notifier.take(), None);
1062 core::mem::drop(strong);
1063 assert_eq!(notifier.take(), Some(10));
1064 }
1065
1066 #[test]
1067 fn unwrap_or_notify_with_immediate() {
1068 let primary = Primary::new(10);
1069 let result = Primary::unwrap_or_notify_with::<ArcNotifier<_>, (), _>(primary, || {
1070 panic!("should not try to create notifier")
1071 });
1072 assert_eq!(result, Ok(10));
1073 }
1074
1075 #[test]
1076 fn unwrap_or_notify_with_deferred() {
1077 let primary = Primary::new(10);
1078 let strong = Primary::clone_strong(&primary);
1079 let result = Primary::unwrap_or_notify_with(primary, || {
1080 let notifier = ArcNotifier::new();
1081 (notifier.clone(), notifier)
1082 });
1083 let notifier = result.unwrap_err();
1084 assert_eq!(notifier.take(), None);
1085 core::mem::drop(strong);
1086 assert_eq!(notifier.take(), Some(10));
1087 }
1088
1089 #[test]
1090 fn map_notifier() {
1091 let primary = Primary::new(10);
1092 let notifier = ArcNotifier::new();
1093 let map_notifier = MapNotifier::new(notifier.clone(), |data| (data, data + 1));
1094 Primary::unwrap_with_notifier(primary, map_notifier);
1095 assert_eq!(notifier.take(), Some((10, 11)));
1096 }
1097
1098 #[test]
1099 fn new_cyclic() {
1100 #[derive(Debug)]
1101 struct Data {
1102 value: i32,
1103 weak: Weak<Data>,
1104 }
1105
1106 let primary = Primary::new_cyclic(|weak| Data { value: 2, weak });
1107 assert_eq!(primary.value, 2);
1108 let strong = primary.weak.upgrade().unwrap();
1109 assert_eq!(strong.value, 2);
1110 assert!(Primary::ptr_eq(&primary, &strong));
1111 }
1112
1113 macro_rules! assert_debug_id_eq {
1114 ($id1:expr, $id2:expr) => {
1115 assert_eq!(alloc::format!("{:?}", $id1), alloc::format!("{:?}", $id2))
1116 };
1117 }
1118 macro_rules! assert_debug_id_ne {
1119 ($id1:expr, $id2:expr) => {
1120 assert_ne!(alloc::format!("{:?}", $id1), alloc::format!("{:?}", $id2))
1121 };
1122 }
1123
1124 #[test]
1125 fn debug_ids_are_stable() {
1126 let primary = Primary::new(1);
1128 let strong = Primary::clone_strong(&primary);
1129 let weak_p = Primary::downgrade(&primary);
1130 let weak_s = Strong::downgrade(&strong);
1131 let weak_c = weak_p.clone();
1132 assert_debug_id_eq!(&primary.debug_id(), &strong.debug_id());
1133 assert_debug_id_eq!(&primary.debug_id(), &weak_p.debug_id());
1134 assert_debug_id_eq!(&primary.debug_id(), &weak_s.debug_id());
1135 assert_debug_id_eq!(&primary.debug_id(), &weak_c.debug_id());
1136 }
1137
1138 #[test]
1139 fn debug_ids_are_unique() {
1140 let primary1 = Primary::new(1);
1142 let primary2 = Primary::new(1);
1143 assert_debug_id_ne!(&primary1.debug_id(), &primary2.debug_id());
1144
1145 let id1 = format!("{:?}", primary1.debug_id());
1147 std::mem::drop(primary1);
1148 let primary3 = Primary::new(1);
1149 assert_ne!(id1, format!("{:?}", primary3.debug_id()));
1150 }
1151}