fidl_next_codec/
encoder.rs1use core::marker::PhantomData;
8use core::mem::MaybeUninit;
9use core::slice::from_raw_parts;
10
11use crate::{Chunk, Encode, EncodeError, WireU64, ZeroPadding, CHUNK_SIZE};
12
13pub trait InternalHandleEncoder {
15 #[doc(hidden)]
20 fn __internal_handle_count(&self) -> usize;
21}
22
23pub trait Encoder: InternalHandleEncoder {
25 fn bytes_written(&self) -> usize;
27
28 fn write_zeroes(&mut self, len: usize);
32
33 fn write(&mut self, bytes: &[u8]);
37
38 fn rewrite(&mut self, pos: usize, bytes: &[u8]);
40}
41
42impl InternalHandleEncoder for Vec<Chunk> {
43 #[inline]
44 fn __internal_handle_count(&self) -> usize {
45 0
46 }
47}
48
49impl Encoder for Vec<Chunk> {
50 #[inline]
51 fn bytes_written(&self) -> usize {
52 self.len() * CHUNK_SIZE
53 }
54
55 #[inline]
56 fn write_zeroes(&mut self, len: usize) {
57 let count = len.div_ceil(CHUNK_SIZE);
58 self.reserve(count);
59 let ptr = unsafe { self.as_mut_ptr().add(self.len()) };
60 unsafe {
61 ptr.write_bytes(0, count);
62 }
63 unsafe {
64 self.set_len(self.len() + count);
65 }
66 }
67
68 #[inline]
69 fn write(&mut self, bytes: &[u8]) {
70 if bytes.is_empty() {
71 return;
72 }
73
74 let count = bytes.len().div_ceil(CHUNK_SIZE);
75 self.reserve(count);
76
77 unsafe {
79 self.as_mut_ptr().add(self.len() + count - 1).write(WireU64(0));
80 }
81 let ptr = unsafe { self.as_mut_ptr().add(self.len()).cast::<u8>() };
82
83 unsafe {
85 ptr.copy_from_nonoverlapping(bytes.as_ptr(), bytes.len());
86 }
87
88 unsafe {
90 self.set_len(self.len() + count);
91 }
92 }
93
94 #[inline]
95 fn rewrite(&mut self, pos: usize, bytes: &[u8]) {
96 assert!(pos + bytes.len() <= self.bytes_written());
97
98 let ptr = unsafe { self.as_mut_ptr().cast::<u8>().add(pos) };
99 unsafe {
100 ptr.copy_from_nonoverlapping(bytes.as_ptr(), bytes.len());
101 }
102 }
103}
104
105pub trait EncoderExt {
107 fn preallocate<T>(&mut self, len: usize) -> Preallocated<'_, Self, T>;
109
110 fn encode_next_iter<T: Encode<Self>>(
114 &mut self,
115 values: impl ExactSizeIterator<Item = T>,
116 ) -> Result<(), EncodeError>;
117
118 fn encode_next<T: Encode<Self>>(&mut self, value: T) -> Result<(), EncodeError>;
122}
123
124impl<E: Encoder + ?Sized> EncoderExt for E {
125 fn preallocate<T>(&mut self, len: usize) -> Preallocated<'_, Self, T> {
126 let pos = self.bytes_written();
127
128 self.write_zeroes(len * size_of::<T>());
130
131 Preallocated {
132 encoder: self,
133 pos,
134 #[cfg(debug_assertions)]
135 remaining: len,
136 _phantom: PhantomData,
137 }
138 }
139
140 fn encode_next_iter<T: Encode<Self>>(
141 &mut self,
142 values: impl ExactSizeIterator<Item = T>,
143 ) -> Result<(), EncodeError> {
144 let mut outputs = self.preallocate::<T::Encoded>(values.len());
145
146 let mut out = MaybeUninit::<T::Encoded>::uninit();
147 <T::Encoded as ZeroPadding>::zero_padding(&mut out);
148 for value in values {
149 value.encode(outputs.encoder, &mut out)?;
150 unsafe {
151 outputs.write_next(out.assume_init_ref());
152 }
153 }
154
155 Ok(())
156 }
157
158 fn encode_next<T: Encode<Self>>(&mut self, value: T) -> Result<(), EncodeError> {
159 self.encode_next_iter(core::iter::once(value))
160 }
161}
162
163pub struct Preallocated<'a, E: ?Sized, T> {
165 pub encoder: &'a mut E,
167 pos: usize,
168 #[cfg(debug_assertions)]
169 remaining: usize,
170 _phantom: PhantomData<T>,
171}
172
173impl<E: Encoder + ?Sized, T> Preallocated<'_, E, T> {
174 pub unsafe fn write_next(&mut self, value: &T) {
180 #[cfg(debug_assertions)]
181 {
182 assert!(self.remaining > 0, "attemped to write more slots than preallocated");
183 self.remaining -= 1;
184 }
185
186 let bytes_ptr = (value as *const T).cast::<u8>();
187 let bytes = unsafe { from_raw_parts(bytes_ptr, size_of::<T>()) };
188 self.encoder.rewrite(self.pos, bytes);
189 self.pos += size_of::<T>();
190 }
191}