fdf_channel/
arena.rs

1// Copyright 2024 The Fuchsia Authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5//! Safe bindings for the driver runtime arena stable ABI
6
7use core::alloc::Layout;
8use core::cmp::max;
9use core::marker::PhantomData;
10use core::mem::MaybeUninit;
11use core::ops::{Deref, DerefMut};
12use core::ptr::{null_mut, slice_from_raw_parts_mut, NonNull};
13use std::sync::{Arc, Weak};
14
15use zx::Status;
16
17use fdf_sys::*;
18
19pub use fdf_sys::fdf_arena_t;
20
21/// Implements a memory arena allocator to be used with the Fuchsia Driver
22/// Runtime when sending and receiving from channels.
23#[derive(Debug)]
24pub struct Arena(pub(crate) NonNull<fdf_arena_t>);
25
26// SAFETY: The api for `fdf_arena_t` is thread safe
27unsafe impl Send for Arena {}
28unsafe impl Sync for Arena {}
29
30impl Arena {
31    /// Allocates a new arena for use with the driver runtime
32    pub fn new() -> Self {
33        let mut arena = null_mut();
34        // SAFETY: the address we pass to fdf_arena_create is allocated on
35        // the stack and appropriately sized.
36        // This call cannot fail as the only reason it would fail is due to invalid
37        // option flags, and 0 is a valid option.
38        Status::ok(unsafe { fdf_arena_create(0, 0, &mut arena) }).expect("Failed to create arena");
39        // SAFETY: if fdf_arena_create returned ZX_OK, it will have placed
40        // a non-null pointer.
41        Arena(unsafe { NonNull::new_unchecked(arena) })
42    }
43
44    /// Creates an arena from a raw pointer to the arena object.
45    ///
46    /// # Safety
47    ///
48    /// The caller is responsible for ensuring that only one [`Arena`]
49    /// is constructed from this pointer, and that is has not previously
50    /// been freed.
51    pub unsafe fn from_raw(ptr: NonNull<fdf_arena_t>) -> Self {
52        Self(ptr)
53    }
54
55    /// Returns true if the allocation pointed to was made by this arena
56    pub fn contains_ptr<T: ?Sized>(&self, ptr: &T) -> bool {
57        // SAFETY: self.0 is valid as constructed, and `fdf_arena_contains` does not access data at the
58        // pointer but just compares its pointer value to the buffers in the arena.
59        unsafe {
60            fdf_arena_contains(self.0.as_ptr(), ptr as *const _ as *const _, size_of_val(ptr))
61        }
62    }
63
64    /// Returns true if the allocation was made by this arena
65    pub fn contains<T: ?Sized>(&self, item: &ArenaBox<'_, T>) -> bool {
66        self.contains_ptr(ArenaBox::deref(item))
67    }
68
69    /// Allocates the appropriate amount of memory for the given layout and
70    /// returns a pointer to `T` at the start of that memory.
71    ///
72    /// # Safety
73    ///
74    /// The caller is responsible for making sure that the `Layout` is laid out
75    /// properly for one or more `T` to be stored at it. This may be a single
76    /// object or a slice of them, but it must be a multiple of it.
77    unsafe fn alloc_bytes_for<T>(&self, layout: Layout) -> NonNull<T> {
78        // We make sure we allocate at least one byte so we return a unique
79        // pointer that is within the arena, which will ensure that subsequent
80        // verifications that the memory location is in the arena will pass.
81        let bytes = max(layout.size(), 1);
82        // SAFETY: Allocating a block of memory in the arena large enough to store
83        // the object we're allocating.
84        let storage =
85            unsafe { NonNull::new_unchecked(fdf_arena_allocate(self.0.as_ptr(), bytes) as *mut T) };
86        // TODO(b/352119228): when the arena allocator allows specifying alignment, use that
87        // instead of asserting the alignment after the fact.
88        assert_eq!(
89            storage.align_offset(layout.align()),
90            0,
91            "Arena returned an improperly aligned pointer: {}",
92            core::any::type_name::<T>(),
93        );
94        storage
95    }
96
97    /// Inserts a [`MaybeUninit`] object and returns the [`ArenaBox`] of it.
98    pub fn insert_uninit<T: Sized>(&self) -> ArenaBox<'_, MaybeUninit<T>> {
99        let layout = Layout::new::<MaybeUninit<T>>();
100        // SAFETY: The layout we're passing to `alloc_bytes_for` is for zero or
101        // more objects of type `T`, which is the pointer type we get back from
102        // it.
103        unsafe { ArenaBox::new(self.alloc_bytes_for(layout)) }
104    }
105
106    /// Inserts a slice of [`MaybeUninit`] objects of len `len`
107    ///
108    /// # Panics
109    ///
110    /// Panics if an array `[T; n]` is too large to be allocated.
111    pub fn insert_uninit_slice<T: Sized>(&self, len: usize) -> ArenaBox<'_, [MaybeUninit<T>]> {
112        let layout = Layout::array::<MaybeUninit<T>>(len).expect("allocation too large");
113        // SAFETY: The layout we're passing to `alloc_bytes_for` is for zero or
114        // more objects of type `T`, which is the pointer type we get back from
115        // it.
116        let storage = unsafe { self.alloc_bytes_for(layout) };
117        // At this point we have a `*mut T` but we need to return a `[T]`,
118        // which is unsized. We need to use [`slice_from_raw_parts_mut`]
119        // to construct the unsized pointer from the data and its length.
120        let ptr = slice_from_raw_parts_mut(storage.as_ptr(), len);
121        // SAFETY: alloc_bytes_for is expected to return a valid pointer.
122        unsafe { ArenaBox::new(NonNull::new_unchecked(ptr)) }
123    }
124
125    /// Moves `obj` of type `T` into the arena and returns an [`ArenaBox`]
126    /// containing the moved value.
127    pub fn insert<T: Sized>(&self, obj: T) -> ArenaBox<'_, T> {
128        let mut uninit = self.insert_uninit();
129        uninit.write(obj);
130        // SAFETY: we wrote `obj` to the object
131        unsafe { uninit.assume_init() }
132    }
133
134    /// Moves a [`Box`]ed slice into the arena and returns an [`ArenaBox`]
135    /// containing the moved value.
136    pub fn insert_boxed_slice<T: Sized>(&self, slice: Box<[T]>) -> ArenaBox<'_, [T]> {
137        let layout = Layout::for_value(&*slice);
138        let len = slice.len();
139        // SAFETY: The layout we give `alloc_bytes_for` is for storing 0 or more
140        // objects of type `T`, which is the pointer type we get from it.
141        let storage = unsafe { self.alloc_bytes_for(layout) };
142        let original_storage = Box::into_raw(slice);
143        // SAFETY: Moving the object into the arena memory we just allocated by
144        // first copying the bytes over and then deallocating the raw memory
145        // we took from the box.
146        let slice_box = unsafe {
147            core::ptr::copy_nonoverlapping(original_storage as *mut T, storage.as_ptr(), len);
148            let slice_ptr = slice_from_raw_parts_mut(storage.as_ptr(), len);
149            ArenaBox::new(NonNull::new_unchecked(slice_ptr))
150        };
151        if layout.size() != 0 {
152            // SAFETY: Since we have decomposed the Box we have to deallocate it,
153            // but only if it's not dangling.
154            unsafe {
155                std::alloc::dealloc(original_storage as *mut u8, layout);
156            }
157        }
158        slice_box
159    }
160
161    /// Copies the slice into the arena and returns an [`ArenaBox`] containing
162    /// the copied values.
163    pub fn insert_slice<T: Sized + Clone>(&self, slice: &[T]) -> ArenaBox<'_, [T]> {
164        let len = slice.len();
165        let mut uninit_slice = self.insert_uninit_slice(len);
166        for (from, to) in slice.iter().zip(uninit_slice.iter_mut()) {
167            to.write(from.clone());
168        }
169
170        // SAFETY: we wrote `from.clone()` to each item of the slice.
171        unsafe { uninit_slice.assume_init_slice() }
172    }
173
174    /// Inserts a slice of [`Default`]-initialized objects of type `T` to the
175    /// arena and returns an [`ArenaBox`] of it.
176    ///
177    /// # Panics
178    ///
179    /// Panics if an array `[T; n]` is too large to be allocated.
180    pub fn insert_default_slice<T: Sized + Default>(&self, len: usize) -> ArenaBox<'_, [T]> {
181        let mut uninit_slice = self.insert_uninit_slice(len);
182        for i in uninit_slice.iter_mut() {
183            i.write(T::default());
184        }
185        // SAFETY: we wrote `T::default()` to each item of the slice.
186        unsafe { uninit_slice.assume_init_slice() }
187    }
188
189    /// Returns an ArenaBox for the pointed to object, assuming that it is part
190    /// of this arena.
191    ///
192    /// # Safety
193    ///
194    /// This does not verify that the pointer came from this arena,
195    /// so the caller is responsible for verifying that.
196    pub unsafe fn assume_unchecked<T: ?Sized>(&self, ptr: NonNull<T>) -> ArenaBox<'_, T> {
197        // SAFETY: Caller is responsible for ensuring this per safety doc section.
198        unsafe { ArenaBox::new(ptr) }
199    }
200
201    /// Returns an [`ArenaBox`] for the pointed to object, verifying that it
202    /// is a part of this arena in the process.
203    ///
204    /// # Panics
205    ///
206    /// This function panics if the given pointer is not in this [`Arena`].
207    ///
208    /// # Safety
209    ///
210    /// The caller is responsible for ensuring that only one [`ArenaBox`] is constructed
211    /// for a given pointer, and that the pointer originated from an `ArenaBox<T>` or
212    /// a direct allocation with the arena through [`fdf_arena_allocate`], and is:
213    /// - initialized to a value of `T`.
214    /// - properly aligned for `T`.
215    /// - pointing to the beginning of the object, and not to a subfield of another
216    /// [`ArenaBox`]ed object.
217    pub unsafe fn assume<T: ?Sized>(&self, ptr: NonNull<T>) -> ArenaBox<'_, T> {
218        // SAFETY: caller promises the pointer is initialized and valid
219        assert!(
220            self.contains_ptr(unsafe { ptr.as_ref() }),
221            "Arena can't assume ownership over a pointer not allocated from within it"
222        );
223        // SAFETY: we will verify the provenance below
224        let data = unsafe { self.assume_unchecked(ptr) };
225        data
226    }
227
228    /// Moves the given [`ArenaBox`] into an [`ArenaRc`] with an owned
229    /// reference to this [`Arena`], allowing for it to be used in `'static`
230    /// contexts.
231    ///
232    /// # Panics
233    ///
234    /// Panics if the given [`ArenaBox`] is not allocated from this arena.
235    pub fn make_rc<T: ?Sized>(&self, data: ArenaBox<'_, T>) -> ArenaRc<T> {
236        assert!(self.contains(&data), "Arena doesn't own the ArenaBox");
237        // SAFETY: we just checked the box is owned by this arena.
238        unsafe { ArenaRc::new_unchecked(self.clone(), data) }
239    }
240
241    /// Moves the given [`ArenaBox`] into an [`ArenaStaticBox`] with an owned
242    /// reference to this [`Arena`], allowing for it to be used in `'static`
243    /// contexts.
244    ///
245    /// # Panics
246    ///
247    /// Panics if the given [`ArenaBox`] is not allocated from this arena.
248    pub fn make_static<T: ?Sized>(&self, data: ArenaBox<'_, T>) -> ArenaStaticBox<T> {
249        assert!(self.contains(&data), "Arena doesn't own the ArenaBox");
250        // SAFETY: we just checked the box is owned by this arena.
251        unsafe { ArenaStaticBox::new_unchecked(self.clone(), data) }
252    }
253
254    /// Creates an [`ArenaBox`]ed slice from an iterator implementing [`ExactSizeIterator`]. Note
255    /// that if [`ExactSizeIterator::len`] returns an incorrect value, the returned [`ArenaBox`]
256    /// will be no more than the length returned, and may be less.
257    pub fn insert_from_iter<I: IntoIterator>(&self, source: I) -> ArenaBox<'_, [I::Item]>
258    where
259        I::IntoIter: ExactSizeIterator,
260    {
261        let iter = source.into_iter();
262        let len = iter.len();
263        let mut actual_len = 0;
264        let mut storage = self.insert_uninit_slice(len);
265        for (output, input) in storage.iter_mut().zip(iter) {
266            output.write(input);
267            actual_len += 1;
268        }
269        // SAFETY: we wrote to `actual_len` elements of the storage
270        unsafe { ArenaBox::assume_init_slice_len(storage, actual_len) }
271    }
272
273    /// Tries to create an [`ArenaBox`]ed slice from an iterator implementing [`ExactSizeIterator`].
274    /// Note that if [`ExactSizeIterator::len`] returns an incorrect value, the returned
275    /// [`ArenaBox`] will be no more than the length returned, and may be less.
276    ///
277    /// If any item returned by the iterator returns an Err() result, results so far are discarded
278    pub fn try_insert_from_iter<I, T, E>(&self, source: I) -> Result<ArenaBox<'_, [T]>, E>
279    where
280        I: IntoIterator<Item = Result<T, E>>,
281        I::IntoIter: ExactSizeIterator,
282    {
283        let iter = source.into_iter();
284        let len = iter.len();
285        let mut actual_len = 0;
286        let mut storage = self.insert_uninit_slice(len);
287        for (output, input) in storage.iter_mut().zip(iter) {
288            match input {
289                Ok(input) => {
290                    output.write(input);
291                    actual_len += 1;
292                }
293                Err(e) => {
294                    // `assume_init` the slice so far so that drop handlers are properly called on the
295                    // items already moved. This will be dropped immediately.
296                    // SAFETY: `actual_len` will be the length of moved values into the slice so far.
297                    unsafe { ArenaBox::assume_init_slice_len(storage, actual_len) };
298                    return Err(e);
299                }
300            }
301        }
302        // SAFETY: we wrote to `actual_len` elements of the storage
303        Ok(unsafe { ArenaBox::assume_init_slice_len(storage, actual_len) })
304    }
305
306    /// Transforms this Arena into an fdf_arena_t without dropping the reference.
307    ///
308    /// If the caller drops the returned fdf_arena_t, the memory allocated by the
309    /// arena will never be freed.
310    pub fn into_raw(self) -> NonNull<fdf_arena_t> {
311        let res = self.0;
312        core::mem::forget(self);
313        return res;
314    }
315}
316
317impl Clone for Arena {
318    fn clone(&self) -> Self {
319        // SAFETY: We own this arena reference and so we can add ref it
320        unsafe { fdf_arena_add_ref(self.0.as_ptr()) }
321        Self(self.0)
322    }
323}
324
325impl Drop for Arena {
326    fn drop(&mut self) {
327        // SAFETY: We own this arena reference and so we can drop it.
328        unsafe { fdf_arena_drop_ref(self.0.as_ptr()) }
329    }
330}
331
332/// Holds a reference to data of type `T` in an [`Arena`] with lifetime `'a`,
333/// and ensures that the object is properly dropped before the [`Arena`] goes
334/// out of scope.
335#[derive(Debug)]
336pub struct ArenaBox<'a, T: ?Sized>(NonNull<T>, PhantomData<&'a Arena>);
337
338/// SAFETY: [`ArenaBox`] impls [`Send`] and [`Sync`] if `T` impls them.
339unsafe impl<'a, T: ?Sized> Send for ArenaBox<'a, T> where T: Send {}
340unsafe impl<'a, T: ?Sized> Sync for ArenaBox<'a, T> where T: Sync {}
341
342impl<'a, T> ArenaBox<'a, T> {
343    /// Moves the inner value of this ArenaBox out to owned storage.
344    pub fn take(value: Self) -> T {
345        // SAFETY: `Self::into_ptr` will forget `value` and prevent
346        // calling its `drop`.
347        unsafe { core::ptr::read(Self::into_ptr(value).as_ptr()) }
348    }
349
350    /// Moves the inner value of this ArenaBox out into a [`Box`] using the
351    /// global allocator. Using this instead of `Box::new(ArenaBox::take(v))`
352    /// helps to avoid any additional copies of the storage on its way to the
353    /// box.
354    ///
355    /// Note: if you want to take a slice, you will need to use
356    /// [`Self::take_boxed_slice`].
357    pub fn take_boxed(value: Self) -> Box<T> {
358        // SAFETY: we are allocating space for `T` with the layout of `T`, so
359        // this is simple.
360        let storage = unsafe { global_alloc(Layout::for_value(&*value)) };
361        // SAFETY: storage is sufficiently large to store the value in `value`
362        // and we used Layout to make sure that Box will be happy with its
363        // layout.
364        unsafe {
365            core::ptr::write(storage.as_ptr(), Self::take(value));
366            Box::from_raw(storage.as_ptr())
367        }
368    }
369}
370
371impl<'a, T> ArenaBox<'a, MaybeUninit<T>> {
372    /// Assumes the contents of this [`MaybeUninit`] box are initialized now.
373    ///
374    /// # Safety
375    ///
376    /// The caller is responsible for ensuring that the value is initialized
377    /// properly. See [`MaybeUninit::assume_init`] for more details on the
378    /// safety requirements of this.
379    pub unsafe fn assume_init(self) -> ArenaBox<'a, T> {
380        // SAFETY: This pointer came from an `ArenaBox` we just leaked,
381        // and casting `*MaybeUninit<T>` to `*T` is safe.
382        unsafe { ArenaBox::new(ArenaBox::into_ptr(self).cast()) }
383    }
384}
385
386impl<'a, T> ArenaBox<'a, [MaybeUninit<T>]> {
387    /// Assumes the contents of this box of `[MaybeUninit<T>]` are initialized now.
388    ///
389    /// # Safety
390    ///
391    /// The caller is responsible for ensuring that the value is initialized
392    /// properly. See [`MaybeUninit::assume_init`] for more details on the
393    /// safety requirements of this.
394    pub unsafe fn assume_init_slice(self) -> ArenaBox<'a, [T]> {
395        let len = self.len();
396        // SAFETY: We are about to reconstitute this pointer back into
397        // a new `ArenaBox` with the same lifetime, and casting
398        // `MaybeUninit<T>` to `T` is safe.
399        let data: NonNull<T> = unsafe { ArenaBox::into_ptr(self) }.cast();
400        let slice_ptr = NonNull::slice_from_raw_parts(data, len);
401
402        // SAFETY: We just got this pointer from an `ArenaBox` we decomposed.
403        unsafe { ArenaBox::new(slice_ptr) }
404    }
405
406    /// Assumes the contents of this box of `[MaybeUninit<T>]` are initialized now,
407    /// up to `len` elements and ignores the rest.
408    ///
409    /// # Safety
410    ///
411    /// The caller is responsible for ensuring that the value is initialized
412    /// properly. See [`MaybeUninit::assume_init`] for more details on the
413    /// safety requirements of this.
414    pub unsafe fn assume_init_slice_len(self, len: usize) -> ArenaBox<'a, [T]> {
415        // only use up to `len` elements of the slice.
416        let len = self.len().min(len);
417        // SAFETY: We are about to reconstitute this pointer back into
418        // a new `ArenaBox` with the same lifetime, and casting
419        // `MaybeUninit<T>` to `T` is safe.
420        let data: NonNull<T> = unsafe { ArenaBox::into_ptr(self) }.cast();
421        let slice_ptr = NonNull::slice_from_raw_parts(data, len);
422
423        // SAFETY: We just got this pointer from an `ArenaBox` we decomposed.
424        unsafe { ArenaBox::new(slice_ptr) }
425    }
426}
427
428impl<'a, T> ArenaBox<'a, [T]> {
429    /// Like [`Self::take_boxed`], this moves the inner value of this ArenaBox
430    /// out into a [`Box`] using the global allocator, and using it avoids
431    /// additional copies of the data, but this function works on slices of `T`,
432    /// which are unsized and so require special handling.
433    pub fn take_boxed_slice(value: Self) -> Box<[T]> {
434        let len = value.len();
435        // SAFETY: we are using the layout of the slice value of type `[T]` to
436        // allocate a pointer to the first element of the storage for the new
437        // slice, which is of type `T`.
438        let storage = unsafe { global_alloc(Layout::for_value(&*value)) };
439        // SAFETY: storage is sufficiently large to store the slice in `value`
440        let slice_ptr = unsafe {
441            core::ptr::copy_nonoverlapping(
442                Self::into_ptr(value).as_ptr() as *mut T,
443                storage.as_ptr(),
444                len,
445            );
446            core::ptr::slice_from_raw_parts_mut(storage.as_ptr(), len)
447        };
448        // SAFETY: we used Layout to make sure that Box will be happy with the
449        // layout of the stored value.
450        unsafe { Box::from_raw(slice_ptr) }
451    }
452}
453
454impl<'a, T: ?Sized> ArenaBox<'a, T> {
455    /// Creates a new [`ArenaBox`] from the given non-null pointer to an object of type `T`.
456    ///
457    /// # Safety
458    ///
459    /// The caller is responsible for ensuring that the object pointed to came from an [`Arena`]
460    /// and that the lifetime of this box is less than the lifetime of that [`Arena`].
461    pub unsafe fn new(obj: NonNull<T>) -> ArenaBox<'a, T> {
462        Self(obj, PhantomData)
463    }
464
465    /// Decomposes this [`ArenaBox`] into its pointer.
466    ///
467    /// # Safety
468    ///
469    /// This is unsafe because it loses the lifetime of the [`Arena`] it
470    /// came from. The caller must make sure to not let the pointer outlive the
471    /// arena. The caller is also responsible for making sure the object is
472    /// dropped before the [`Arena`], or it may leak resources.
473    pub unsafe fn into_ptr(value: Self) -> NonNull<T> {
474        let res = value.0;
475        core::mem::forget(value);
476        res
477    }
478
479    /// Turns this [`ArenaBox`] into one with the given lifetime.
480    ///
481    /// # Safety
482    ///
483    /// This is unsafe because it loses the lifetime of the [`Arena`] it
484    /// came from. The caller must make sure to not let the
485    /// [`ArenaBox`] outlive the [`Arena`] it was created from. The caller
486    /// is also responsible for making sure the object is dropped before
487    /// the [`Arena`], or it may leak resources.
488    pub unsafe fn erase_lifetime(value: Self) -> ArenaBox<'static, T> {
489        // SAFETY: the caller promises to ensure this object does not
490        // outlive the arena.
491        unsafe { ArenaBox::new(ArenaBox::into_ptr(value)) }
492    }
493
494    /// Consumes and leaks this [`ArenaBox`], returning a mutable reference
495    /// to its contents.
496    pub fn leak(mut this: Self) -> &'a mut T {
497        let res = unsafe { this.0.as_mut() };
498        core::mem::forget(this);
499        res
500    }
501}
502
503impl<'a> ArenaBox<'a, [MaybeUninit<u8>]> {
504    /// Transforms the [`ArenaBox`] into an `ArenaBox<T>`.
505    ///
506    /// # Safety
507    ///
508    /// The caller is responsible for ensuring that the contents of this
509    /// [`ArenaBox`] originated from a source with a properly allocated `T` with correct
510    /// alignment
511    pub unsafe fn cast_unchecked<T>(this: Self) -> ArenaBox<'a, T> {
512        let ptr = this.0.cast();
513        // Ensure we don't drop the original `ArenaBox`.
514        core::mem::forget(this);
515        // SAFETY: caller promises this is the correct type
516        unsafe { ArenaBox::new(ptr) }
517    }
518}
519
520impl<'a, T: ?Sized> Drop for ArenaBox<'a, T> {
521    fn drop(&mut self) {
522        // SAFETY: Since this value is allocated in the arena, and the arena
523        // will not drop the value, and ArenaBox can't be cloned, this ArenaBox
524        // owns the value and can drop it.
525        unsafe { core::ptr::drop_in_place(self.0.as_ptr()) }
526    }
527}
528
529impl<T: ?Sized> Deref for ArenaBox<'_, T> {
530    type Target = T;
531    fn deref(&self) -> &Self::Target {
532        // SAFETY: As these methods are the only way to get a reference to the
533        // contents of the ArenaBox, rust will enforce the aliasing rules
534        // of the contents of the inner `NonZero` object.
535        unsafe { self.0.as_ref() }
536    }
537}
538
539impl<T: ?Sized> DerefMut for ArenaBox<'_, T> {
540    fn deref_mut(&mut self) -> &mut Self::Target {
541        // SAFETY: As these methods are the only way to get a reference to the
542        // contents of the ArenaBox, rust will enforce the aliasing rules
543        // of the contents of the inner `NonZero` object.
544        unsafe { self.0.as_mut() }
545    }
546}
547
548impl<'a, T: 'a> IntoIterator for ArenaBox<'a, [T]> {
549    type IntoIter = IntoIter<T, PhantomData<&'a Arena>>;
550    type Item = T;
551
552    fn into_iter(self) -> Self::IntoIter {
553        let len = self.len();
554        let ptr = self.0.cast();
555        // SAFETY: we will never dereference `end`
556        let end = unsafe { ptr.add(len) };
557        // the IntoIter now owns the data, so we don't want to drop them here.
558        core::mem::forget(self);
559        IntoIter { ptr, end, _arena: PhantomData }
560    }
561}
562
563/// The implementation for an [`IntoIterator`] of an [`ArenaBox`] of a slice that manages the
564/// memory behind it and ensures that it's cleaned up.
565pub struct IntoIter<T, A> {
566    ptr: NonNull<T>,
567    end: NonNull<T>,
568    _arena: A,
569}
570
571impl<T, A> Iterator for IntoIter<T, A> {
572    type Item = T;
573
574    fn next(&mut self) -> Option<Self::Item> {
575        if self.ptr == self.end {
576            return None;
577        }
578        // SAFETY: all items from `ptr` to `end-1` are valid until moved out.
579        unsafe {
580            let res = self.ptr.read();
581            self.ptr = self.ptr.add(1);
582            Some(res)
583        }
584    }
585
586    fn size_hint(&self) -> (usize, Option<usize>) {
587        let len = self.len();
588        (len, Some(self.len()))
589    }
590}
591
592impl<T, A> ExactSizeIterator for IntoIter<T, A> {
593    fn len(&self) -> usize {
594        // SAFETY: end is always >= ptr
595        unsafe { self.end.offset_from(self.ptr) as usize }
596    }
597}
598
599impl<T, A> Drop for IntoIter<T, A> {
600    fn drop(&mut self) {
601        // go through and read all remaining items to drop them
602        while self.ptr != self.end {
603            // SAFETY: all items from `ptr` to `end-1` are valid until moved out.
604            unsafe {
605                drop(self.ptr.read());
606                self.ptr = self.ptr.add(1);
607            }
608        }
609    }
610}
611
612/// An equivalent to [`ArenaBox`] that holds onto a reference to the
613/// arena to allow it to have static lifetime.
614#[derive(Debug)]
615pub struct ArenaStaticBox<T: ?Sized> {
616    data: ArenaBox<'static, T>,
617    // Safety Note: it is important that this be last in the struct so that it is
618    // guaranteed to be freed after the [`ArenaBox`].
619    arena: Arena,
620}
621
622/// SAFETY: [`ArenaStaticBox`] impls [`Send`] and [`Sync`] if `T` impls them.
623unsafe impl<T: ?Sized> Send for ArenaStaticBox<T> where T: Send {}
624unsafe impl<T: ?Sized> Sync for ArenaStaticBox<T> where T: Sync {}
625
626impl<T: ?Sized> ArenaStaticBox<T> {
627    /// Transforms the given [`ArenaBox`] into an [`ArenaStaticBox`] with an owned
628    /// reference to the given [`Arena`], allowing for it to be used in `'static`
629    /// contexts.
630    ///
631    /// # Safety
632    ///
633    /// The caller must ensure that the given [`ArenaBox`] is owned by this
634    /// arena, or it may result in use-after-free.
635    pub unsafe fn new_unchecked(arena: Arena, data: ArenaBox<'_, T>) -> ArenaStaticBox<T> {
636        // SAFETY: The `ArenaBox` will not outlive the `Arena` as it is owned
637        // by the current struct and can't be moved out.
638        let data = unsafe { ArenaBox::erase_lifetime(data) };
639        Self { data, arena }
640    }
641
642    /// Takes ownership over the arena and data backing the given
643    /// [`ArenaStaticBox`].
644    ///
645    /// This returns an [`ArenaBox`] tied to the lifetime of the `&mut Option<Arena>`
646    /// given, and places the arena in that space.
647    pub fn unwrap(this: Self, arena: &mut Option<Arena>) -> ArenaBox<'_, T> {
648        let ArenaStaticBox { data, arena: inner_arena } = this;
649        arena.replace(inner_arena);
650        data
651    }
652
653    /// Takes ownership of the arena and data backing the given
654    /// [`ArenaStaticBox`] as raw pointers.
655    ///
656    /// Note that while this is safe, care must be taken to ensure that
657    /// the raw pointer to the data is not accessed after the arena pointer has
658    /// been released.
659    pub fn into_raw(this: Self) -> (NonNull<fdf_arena_t>, NonNull<T>) {
660        let res = (this.arena.0, this.data.0);
661        // make sure that drop handlers aren't called for the arena
662        // or box
663        core::mem::forget(this);
664        res
665    }
666}
667
668impl<T: 'static> IntoIterator for ArenaStaticBox<[T]> {
669    type IntoIter = IntoIter<T, Arena>;
670    type Item = T;
671
672    fn into_iter(self) -> Self::IntoIter {
673        let len = self.len();
674        let ptr = self.data.0.cast();
675        // SAFETY: we will never dereference `end`
676        let end = unsafe { ptr.add(len) };
677        IntoIter { ptr, end, _arena: self.arena }
678    }
679}
680
681impl<T: ?Sized> Deref for ArenaStaticBox<T> {
682    type Target = T;
683    fn deref(&self) -> &Self::Target {
684        ArenaBox::deref(&self.data)
685    }
686}
687
688impl<T: ?Sized> DerefMut for ArenaStaticBox<T> {
689    fn deref_mut(&mut self) -> &mut Self::Target {
690        ArenaBox::deref_mut(&mut self.data)
691    }
692}
693
694/// An equivalent to [`ArenaBox`] that holds onto a reference to the
695/// arena to allow it to have static lifetime, and implements [`Clone`]
696/// allowing it to be shared. Since it's shared, you can't get a mutable
697/// reference to it back without using [`Self::try_unwrap`] to get the
698/// inner [`ArenaStaticBox`].
699#[derive(Clone, Debug)]
700pub struct ArenaRc<T: ?Sized>(Arc<ArenaStaticBox<T>>);
701
702/// A weak reference to an [`ArenaRc`].
703#[derive(Clone, Debug)]
704pub struct ArenaWeak<T: ?Sized>(Weak<ArenaStaticBox<T>>);
705
706impl<T: ?Sized> ArenaRc<T> {
707    /// Transforms the given [`ArenaBox`] into an [`ArenaRc`] with an owned
708    /// reference to the given [`Arena`], allowing for it to be used in `'static`
709    /// contexts.
710    ///
711    /// # Safety
712    ///
713    /// The caller must ensure that the given [`ArenaBox`] is owned by this
714    /// arena, or it may result in use-after-free.
715    pub unsafe fn new_unchecked(arena: Arena, data: ArenaBox<'_, T>) -> ArenaRc<T> {
716        // SAFETY: The `ArenaBox` will not outlive the `Arena` as it is owned
717        // by the current struct and can't be moved out.
718        let data = unsafe { ArenaBox::erase_lifetime(data) };
719        Self(Arc::new(ArenaStaticBox { arena, data }))
720    }
721
722    /// Downgrades the given [`ArenaRc`] into an [`ArenaWeak`].
723    pub fn downgrade(this: &Self) -> ArenaWeak<T> {
724        ArenaWeak(Arc::downgrade(&this.0))
725    }
726
727    /// Attempts to take ownership over the arena and data backing the given
728    /// [`ArenaRc`] if there is only one strong reference held to it.
729    ///
730    /// If there is only one strong reference, this returns an [`ArenaBox`]
731    /// tied to the lifetime of the `&mut Option<Arena>` given, and places the
732    /// arena in that space.
733    pub fn try_unwrap(this: Self) -> Result<ArenaStaticBox<T>, Self> {
734        Arc::try_unwrap(this.0).map_err(|storage| Self(storage))
735    }
736}
737
738impl<T: ?Sized> From<ArenaStaticBox<T>> for ArenaRc<T> {
739    fn from(value: ArenaStaticBox<T>) -> Self {
740        Self(Arc::new(value))
741    }
742}
743
744impl<T: ?Sized> Deref for ArenaRc<T> {
745    type Target = T;
746    fn deref(&self) -> &Self::Target {
747        ArenaBox::deref(&self.0.data)
748    }
749}
750
751impl<T: ?Sized> ArenaWeak<T> {
752    /// Converts this [`ArenaWeak`] into a strong reference [`ArenaRc`] if there are still any
753    /// outstanding strong references to it.
754    pub fn upgrade(&self) -> Option<ArenaRc<T>> {
755        self.0.upgrade().map(ArenaRc)
756    }
757}
758
759/// Helper for allocating storage on the global heap appropriate for storing a
760/// copy of `val`. This returns a pointer of a different type than the type
761/// being referenced so that it can be used to allocate storage for unsized
762/// slices of `T`. `ActualType` should either be the same as `T` or an array of
763/// `T`.
764///
765/// This also correctly handles a zero sized type by returning
766/// [`NonZero::dangling`].
767///
768/// # Safety
769///
770/// In addition to all the safety requirements of [`std::alloc::alloc`], the
771/// caller must ensure that `T` is the type of elements of `ActualType`.
772unsafe fn global_alloc<T>(layout: Layout) -> NonNull<T> {
773    let storage = if layout.size() == 0 {
774        NonNull::dangling()
775    } else {
776        let ptr = unsafe { std::alloc::alloc(layout) };
777        if ptr.is_null() {
778            std::alloc::handle_alloc_error(layout);
779        }
780        unsafe { NonNull::new_unchecked(ptr as *mut T) }
781    };
782    storage
783}
784
785#[cfg(test)]
786mod tests {
787    use std::sync::mpsc;
788
789    use super::*;
790    use crate::test_utils::*;
791
792    #[test]
793    fn arena_allocations() {
794        let arena = Arena::new();
795        let _val = arena.insert(());
796        let val = arena.insert(1);
797        assert_eq!(*val, 1);
798        let val = arena.insert(2);
799        assert_eq!(*val, 2);
800        let val = arena.insert_boxed_slice(Box::new([1, 2, 3, 4]));
801        assert_eq!(&*val, &[1, 2, 3, 4]);
802        let val: ArenaBox<'_, [()]> = arena.insert_boxed_slice(Box::new([]));
803        assert_eq!(&*val, &[]);
804        let val = arena.insert_slice(&[5, 6, 7, 8]);
805        assert_eq!(&*val, &[5, 6, 7, 8]);
806        let val: ArenaBox<'_, [()]> = arena.insert_slice(&[]);
807        assert_eq!(&*val, &[]);
808        let val: ArenaBox<'_, [u8]> = arena.insert_default_slice(10);
809        assert_eq!(&*val, &[0; 10]);
810    }
811
812    #[test]
813    #[allow(clippy::unit_cmp)]
814    fn arena_take() {
815        let arena = Arena::new();
816        let val = arena.insert(());
817        assert_eq!(ArenaBox::take(val), ());
818        let val = arena.insert(1);
819        assert_eq!(ArenaBox::take(val), 1);
820    }
821
822    #[test]
823    #[allow(clippy::unit_cmp)]
824    fn arena_take_boxed() {
825        let arena = Arena::new();
826        let val = arena.insert(());
827        assert_eq!(*ArenaBox::take_boxed(val), ());
828        let val = arena.insert(1);
829        assert_eq!(*ArenaBox::take_boxed(val), 1);
830    }
831
832    #[test]
833    fn arena_take_boxed_slice() {
834        let arena = Arena::new();
835        let val: ArenaBox<'_, [()]> = arena.insert_slice(&[]);
836        assert_eq!(&*ArenaBox::take_boxed_slice(val), &[]);
837        let val = arena.insert_slice(&[1, 2, 3, 4]);
838        assert_eq!(&*ArenaBox::take_boxed_slice(val), &[1, 2, 3, 4]);
839    }
840
841    #[test]
842    fn arena_drop() {
843        let (tx, rx) = mpsc::channel();
844        let arena = Arena::new();
845        let val = arena.insert(DropSender::new(1, tx.clone()));
846        drop(val);
847        assert_eq!(rx.try_recv().unwrap(), 1);
848
849        let val = arena.insert_boxed_slice(Box::new([DropSender::new(2, tx.clone())]));
850        drop(val);
851        assert_eq!(rx.try_recv().unwrap(), 2);
852
853        let val = arena.insert_slice(&[DropSender::new(3, tx.clone())]);
854        drop(val);
855        assert_eq!(rx.try_recv().unwrap(), 3);
856
857        rx.try_recv().expect_err("no more drops");
858    }
859
860    #[test]
861    fn arena_take_drop() {
862        let (tx, rx) = mpsc::channel();
863        let arena = Arena::new();
864
865        let val = arena.insert(DropSender::new(1, tx.clone()));
866        let inner = ArenaBox::take(val);
867        rx.try_recv().expect_err("shouldn't have dropped when taken");
868        drop(inner);
869        assert_eq!(rx.try_recv().unwrap(), 1);
870
871        let val = arena.insert_slice(&[DropSender::new(2, tx.clone())]);
872        let inner = ArenaBox::take_boxed_slice(val);
873        rx.try_recv().expect_err("shouldn't have dropped when taken");
874        drop(inner);
875        assert_eq!(rx.try_recv().unwrap(), 2);
876
877        rx.try_recv().expect_err("no more drops");
878    }
879
880    #[test]
881    fn arena_contains() {
882        let arena1 = Arena::new();
883        let arena2 = Arena::new();
884
885        let val1 = arena1.insert(1);
886        let val2 = arena2.insert(2);
887
888        assert!(arena1.contains(&val1));
889        assert!(arena2.contains(&val2));
890        assert!(!arena1.contains(&val2));
891        assert!(!arena2.contains(&val1));
892    }
893
894    #[test]
895    fn arena_assume() {
896        let arena = Arena::new();
897
898        let val = arena.insert(1);
899        let val_leaked = unsafe { ArenaBox::into_ptr(val) };
900        let val = unsafe { arena.assume(val_leaked) };
901
902        assert!(arena.contains(&val));
903    }
904
905    #[test]
906    #[should_panic]
907    fn arena_bad_assume() {
908        let arena = Arena::new();
909
910        unsafe { arena.assume(NonNull::<()>::dangling()) };
911    }
912
913    #[test]
914    #[should_panic]
915    fn bad_static_box_ownership() {
916        let arena1 = Arena::new();
917        let arena2 = Arena::new();
918
919        let val = arena1.insert(1);
920        arena2.make_static(val);
921    }
922
923    #[test]
924    #[should_panic]
925    fn bad_rc_ownership() {
926        let arena1 = Arena::new();
927        let arena2 = Arena::new();
928
929        let val = arena1.insert(1);
930        arena2.make_rc(val);
931    }
932
933    #[test]
934    fn box_lifecycle() {
935        let arena = Arena::new();
936
937        // create the initial value and modify it
938        let mut val = arena.insert(1);
939        *val = 2;
940        assert_eq!(*val, 2);
941
942        // make it a static box and modify it
943        let mut val = arena.make_static(val);
944        *val = 3;
945        assert_eq!(*val, 3);
946
947        // make it into a refcounted shared pointer and check the value is still the
948        // same
949        let val = ArenaRc::from(val);
950        assert_eq!(*val, 3);
951
952        // clone the refcount and verify that we can't unwrap it back to a static box.
953        let val_copied = val.clone();
954        assert_eq!(*val_copied, 3);
955        let val = ArenaRc::try_unwrap(val).expect_err("Double strong count should fail to unwrap");
956        assert_eq!(*val, 3);
957        drop(val_copied);
958
959        // now that the cloned rc is gone, unwrap it back to a static box and modify it
960        let mut val =
961            ArenaRc::try_unwrap(val).expect("strong count should be one so this should unwrap now");
962        *val = 4;
963        assert_eq!(*val, 4);
964
965        // bring it back to a normal arena box and modify it
966        let mut shared_arena = None;
967        let mut val = ArenaStaticBox::unwrap(val, &mut shared_arena);
968        *val = 5;
969        assert_eq!(*val, 5);
970
971        // make it back into an rc but directly rather than from a static box
972        let val = arena.make_rc(val);
973        assert_eq!(*val, 5);
974    }
975
976    #[test]
977    fn static_raw_roundtrip() {
978        let arena = Arena::new();
979        let val = arena.make_static(arena.insert(1));
980
981        // turn it into raw pointers and modify it
982        let (arena_ptr, mut data_ptr) = ArenaStaticBox::into_raw(val);
983        *unsafe { data_ptr.as_mut() } = 2;
984        assert_eq!(*unsafe { data_ptr.as_ref() }, 2);
985
986        // reconstitute it back to an `ArenaBox` and then transform it
987        let arena = unsafe { Arena::from_raw(arena_ptr) };
988        let val = unsafe { arena.assume(data_ptr) };
989
990        assert_eq!(*val, 2);
991    }
992
993    #[test]
994    fn arena_into_and_from_iter() {
995        let arena = Arena::new();
996
997        // empty slice to vec
998        let val: ArenaBox<'_, [()]> = arena.insert_slice(&[]);
999        let vec_val = Vec::from_iter(val);
1000        assert!(vec_val.len() == 0);
1001
1002        // filled slice to vec
1003        let val = arena.insert_slice(&[1, 2, 3, 4]);
1004        let vec_val = Vec::from_iter(val);
1005        assert_eq!(&[1, 2, 3, 4], &*vec_val);
1006
1007        // filled static slice to vec
1008        let val = arena.make_static(arena.insert_slice(&[1, 2, 3, 4]));
1009        let vec_val = Vec::from_iter(val);
1010        assert_eq!(&[1, 2, 3, 4], &*vec_val);
1011
1012        // empty vec to arena box
1013        let val: Vec<()> = vec![];
1014        let arena_val = arena.insert_from_iter(val.clone());
1015        assert_eq!(val, &*arena_val);
1016
1017        // filled vec to arena box
1018        let val = vec![1, 2, 3, 4];
1019        let arena_val = arena.insert_from_iter(val);
1020        assert_eq!(&[1, 2, 3, 4], &*arena_val);
1021    }
1022
1023    #[test]
1024    fn arena_try_from_iter() {
1025        let arena = Arena::new();
1026
1027        let val: Vec<Result<_, ()>> = vec![Ok(1), Ok(2), Ok(3), Ok(4)];
1028        let arena_val = arena.try_insert_from_iter(val).unwrap();
1029        assert_eq!(&[1, 2, 3, 4], &*arena_val);
1030
1031        let (tx, rx) = mpsc::channel();
1032        let val = vec![Ok(DropSender::new(0, tx.clone())), Err(-1), Ok(DropSender::new(1, tx))];
1033        let Err(-1) = arena.try_insert_from_iter(val) else {
1034            panic!("early exit from try_insert_from_iter")
1035        };
1036        let Ok(0) = rx.try_recv() else {
1037            panic!("expected drop of leading ok value to have happened")
1038        };
1039        let Ok(1) = rx.try_recv() else {
1040            panic!("expected drop of trailing ok value to have happened")
1041        };
1042    }
1043}