flyweights/raw.rs
1// Copyright 2025 The Fuchsia Authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5use std::alloc::{alloc, dealloc, handle_alloc_error};
6
7use core::alloc::Layout;
8use core::ptr::{slice_from_raw_parts, NonNull};
9use core::sync::atomic::{AtomicUsize, Ordering};
10
11/// The maximum refcount is `isize::MAX` to match the standard library behavior for `Arc`.
12const MAX_REFCOUNT: usize = isize::MAX as usize;
13
14// The header for a memory allocation.
15#[repr(C)]
16struct Header {
17 ref_count: AtomicUsize,
18 len: usize,
19}
20
21/// A ZST used to indicate a pointer which points to the payload portion of a memory allocation.
22///
23/// A `Payload` is always located immediately after a `Header`, and is the start of the bytes of the
24/// slice. Users always point to the `Payload` because the most common operation we do is `as_slice`
25/// and if we already have the payload pointer then we can avoid a pointer offset operation.
26#[repr(transparent)]
27pub struct Payload {
28 // This guarantees `Payload` will always be a ZST with the same alignment as `Header`.`
29 _align: [Header; 0],
30}
31
32impl Payload {
33 #[inline]
34 fn layout(len: usize) -> Layout {
35 let (layout, byte_offset) = Layout::new::<Header>()
36 .extend(Layout::array::<u8>(len).unwrap())
37 .expect("attempted to allocate a FlyStr that was too large (~isize::MAX)");
38
39 debug_assert_eq!(byte_offset, size_of::<Header>());
40 debug_assert!(layout.align() > 1);
41
42 layout
43 }
44
45 /// Returns a pointer to a `Payload` containing a copy of `bytes`.
46 pub fn alloc(bytes: &[u8]) -> NonNull<Self> {
47 let layout = Self::layout(bytes.len());
48
49 // SAFETY: `layout` always has non-zero size because `Header` has non-zero size.
50 let ptr = unsafe { alloc(layout) };
51 if ptr.is_null() {
52 handle_alloc_error(layout);
53 }
54
55 let header = ptr.cast::<Header>();
56 // SAFETY: `header` points to memory suitable for a `Header` and is valid for writes.
57 unsafe {
58 header.write(Header {
59 ref_count: AtomicUsize::new(1),
60 len: bytes.len(),
61 });
62 }
63
64 // SAFETY: The size of `Header` fits in an `isize` and `header` points to an allocation
65 // of at least that size. Even if the payload is zero-sized, one-past-the-end pointers are
66 // valid to create.
67 let payload = unsafe { header.add(1).cast::<u8>() };
68
69 // SAFETY: `payload` points to `bytes.len()` bytes, per the constructed layout.
70 // `copy_to_nonoverlapping` is a no-op for bytes of length 0, this is always sound to do. We
71 // just have to make sure that the pointers are properly aligned, which they always are for
72 // `u8` which has an alignment of 1.
73 unsafe {
74 bytes.as_ptr().copy_to_nonoverlapping(payload, bytes.len());
75 }
76
77 // SAFETY: Allocation succeeded, so `payload` is guaranteed not to be null.
78 unsafe { NonNull::new_unchecked(payload.cast::<Payload>()) }
79 }
80
81 /// Deallocates the payload.
82 ///
83 /// # Safety
84 ///
85 /// `ptr` must be to a `Payload` returned from `Payload::alloc`.
86 #[inline]
87 pub unsafe fn dealloc(ptr: *mut Self) {
88 // SAFETY: The caller guaranteed that `ptr` is to a `Payload` returned from `Payload::alloc`.
89 let header = unsafe { Self::header(ptr) };
90 // SAFETY: `header` points to a `Header` which is valid for reads.
91 let len = unsafe { (*header).len };
92 let layout = Self::layout(len);
93
94 // SAFETY: `header` points to a memory allocation with layout `layout`.
95 unsafe {
96 dealloc(header.cast(), layout);
97 }
98 }
99
100 /// Returns the current refcount of a `Payload`.
101 ///
102 /// # Safety
103 ///
104 /// `ptr` must be to a `Payload` returned from `Payload::alloc`.
105 #[cfg(test)]
106 #[inline]
107 pub unsafe fn refcount(ptr: *mut Self) -> usize {
108 // SAFETY: The caller guaranteed that `ptr` is to a `Payload` returned from `Payload::alloc`.
109 let header = unsafe { Self::header(ptr) };
110 // SAFETY: `header` points to a `Header` which is valid for reads and writes.
111 unsafe { (*header).ref_count.load(Ordering::Relaxed) }
112 }
113
114 /// Increments the refcount of a `Payload` by one.
115 ///
116 /// # Safety
117 ///
118 /// `ptr` must be to a `Payload` returned from `Payload::alloc`.
119 #[inline]
120 pub unsafe fn inc_ref(ptr: *mut Self) -> usize {
121 // SAFETY: The caller guaranteed that `ptr` is to a `Payload` returned from `Payload::alloc`.
122 let header = unsafe { Self::header(ptr) };
123 // SAFETY: `header` points to a `Header` which is valid for reads and writes. Relaxed
124 // ordering is sufficient because headers and payloads are immutable. Any other ordering
125 // requirements are enforced externally, e.g. by thread synchronization to send data.
126 let prev_count = unsafe { (*header).ref_count.fetch_add(1, Ordering::Relaxed) };
127 if prev_count > MAX_REFCOUNT {
128 std::process::abort();
129 }
130 prev_count
131 }
132
133 /// Decrements the refcount of a `Payload` by one, returning the refcount prior to decrementing.
134 ///
135 /// Decrementing the refcount to zero does not deallocate the payload. Deallocating the payload
136 /// must be done manually.
137 ///
138 /// # Safety
139 ///
140 /// `ptr` must be to a `Payload` returned from `Payload::alloc`.
141 #[inline]
142 pub unsafe fn dec_ref(ptr: *mut Self) -> usize {
143 // SAFETY: The caller guaranteed that `ptr` is to a `Payload` returned from `Payload::alloc`.
144 let header = unsafe { Self::header(ptr) };
145 // SAFETY: `header` points to a `Header` which is valid for reads and writes. Relaxed
146 // ordering is sufficient here because the contained value is immutable and can't be
147 // modified after creation. We also don't have to drop any data, which isn't a necessary
148 // requirement but does provide an additional security.
149 unsafe { (*header).ref_count.fetch_sub(1, Ordering::Relaxed) }
150 }
151
152 /// # Safety
153 ///
154 /// `ptr` must be to a `Payload` returned from `Payload::alloc`.
155 #[inline]
156 unsafe fn header(ptr: *mut Self) -> *mut Header {
157 // SAFETY: `Payload` pointers are always preceded by a `Header`.
158 unsafe { ptr.cast::<Header>().sub(1) }
159 }
160
161 /// Returns the length of the byte slice contained in a `Payload`.
162 ///
163 /// # Safety
164 ///
165 /// `ptr` must be to a `Payload` returned from `Payload::alloc`.
166 #[inline]
167 pub unsafe fn len(ptr: *mut Self) -> usize {
168 // SAFETY: The caller guaranteed that `ptr` is to a `Payload` returned from `Payload::alloc`.
169 unsafe { (*Self::header(ptr)).len }
170 }
171
172 /// Returns a pointer to the byte slice of a `Payload`.
173 ///
174 /// # Safety
175 ///
176 /// `ptr` must be to a `Payload` returned from `Payload::alloc`.
177 #[inline]
178 pub unsafe fn bytes(ptr: *mut Self) -> *const [u8] {
179 // SAFETY: The caller guaranteed that `ptr` is to a `Payload` returned from `Payload::alloc`.
180 let len = unsafe { Self::len(ptr) };
181 slice_from_raw_parts(ptr.cast::<u8>(), len)
182 }
183}