1use core::marker::PhantomData;
8use core::mem::MaybeUninit;
9use core::slice::from_raw_parts;
10
11use crate::wire::Uint64;
12use crate::{CHUNK_SIZE, Chunk, Encode, EncodeError, Slot, Wire};
13
14pub trait InternalHandleEncoder {
16 #[doc(hidden)]
21 fn __internal_handle_count(&self) -> usize;
22}
23
24pub trait Encoder: InternalHandleEncoder {
26 fn bytes_written(&self) -> usize;
28
29 fn write_zeroes(&mut self, len: usize);
33
34 fn write(&mut self, bytes: &[u8]);
38
39 fn rewrite(&mut self, pos: usize, bytes: &[u8]);
41}
42
43impl InternalHandleEncoder for Vec<Chunk> {
44 #[inline]
45 fn __internal_handle_count(&self) -> usize {
46 0
47 }
48}
49
50impl Encoder for Vec<Chunk> {
51 #[inline]
52 fn bytes_written(&self) -> usize {
53 self.len() * CHUNK_SIZE
54 }
55
56 #[inline]
57 fn write_zeroes(&mut self, len: usize) {
58 let count = len.div_ceil(CHUNK_SIZE);
59 self.reserve(count);
60 let ptr = unsafe { self.as_mut_ptr().add(self.len()) };
61 unsafe {
62 ptr.write_bytes(0, count);
63 }
64 unsafe {
65 self.set_len(self.len() + count);
66 }
67 }
68
69 #[inline]
70 fn write(&mut self, bytes: &[u8]) {
71 if bytes.is_empty() {
72 return;
73 }
74
75 let count = bytes.len().div_ceil(CHUNK_SIZE);
76 self.reserve(count);
77
78 unsafe {
80 self.as_mut_ptr().add(self.len() + count - 1).write(Uint64(0));
81 }
82 let ptr = unsafe { self.as_mut_ptr().add(self.len()).cast::<u8>() };
83
84 unsafe {
86 ptr.copy_from_nonoverlapping(bytes.as_ptr(), bytes.len());
87 }
88
89 unsafe {
91 self.set_len(self.len() + count);
92 }
93 }
94
95 #[inline]
96 fn rewrite(&mut self, pos: usize, bytes: &[u8]) {
97 assert!(pos + bytes.len() <= self.bytes_written());
98
99 let ptr = unsafe { self.as_mut_ptr().cast::<u8>().add(pos) };
100 unsafe {
101 ptr.copy_from_nonoverlapping(bytes.as_ptr(), bytes.len());
102 }
103 }
104}
105
106pub trait EncoderExt {
108 fn preallocate<T>(&mut self, len: usize) -> Preallocated<'_, Self, T>;
110
111 fn encode_next_iter<W, T>(
115 &mut self,
116 values: impl ExactSizeIterator<Item = T>,
117 ) -> Result<(), EncodeError>
118 where
119 W: Wire<Constraint = ()>,
120 T: Encode<W, Self>;
121
122 fn encode_next_iter_with_constraint<W, T>(
126 &mut self,
127 values: impl ExactSizeIterator<Item = T>,
128 constraint: W::Constraint,
129 ) -> Result<(), EncodeError>
130 where
131 W: Wire,
132 T: Encode<W, Self>;
133
134 fn encode_next<W, T>(&mut self, value: T) -> Result<(), EncodeError>
138 where
139 W: Wire<Constraint = ()>,
140 T: Encode<W, Self>;
141
142 fn encode_next_with_constraint<W: Wire, T: Encode<W, Self>>(
146 &mut self,
147 value: T,
148 constraint: W::Constraint,
149 ) -> Result<(), EncodeError>;
150
151 fn encode<W, T>(value: T) -> Result<Self, EncodeError>
155 where
156 Self: Default,
157 W: Wire<Constraint = ()>,
158 T: Encode<W, Self>;
159
160 fn encode_with_constraint<W, T>(
164 value: T,
165 constraint: W::Constraint,
166 ) -> Result<Self, EncodeError>
167 where
168 Self: Default,
169 W: Wire,
170 T: Encode<W, Self>;
171}
172
173impl<E: Encoder + ?Sized> EncoderExt for E {
174 fn preallocate<T>(&mut self, len: usize) -> Preallocated<'_, Self, T> {
175 let pos = self.bytes_written();
176
177 self.write_zeroes(len * size_of::<T>());
179
180 Preallocated {
181 encoder: self,
182 pos,
183 #[cfg(debug_assertions)]
184 remaining: len,
185 _phantom: PhantomData,
186 }
187 }
188
189 fn encode_next_iter<W, T>(
190 &mut self,
191 values: impl ExactSizeIterator<Item = T>,
192 ) -> Result<(), EncodeError>
193 where
194 W: Wire<Constraint = ()>,
195 T: Encode<W, Self>,
196 {
197 self.encode_next_iter_with_constraint(values, ())
198 }
199
200 fn encode_next_iter_with_constraint<W, T>(
201 &mut self,
202 values: impl ExactSizeIterator<Item = T>,
203 constraint: W::Constraint,
204 ) -> Result<(), EncodeError>
205 where
206 W: Wire,
207 T: Encode<W, Self>,
208 {
209 let mut outputs = self.preallocate::<W>(values.len());
210
211 let mut out = MaybeUninit::<W>::uninit();
212 <W as Wire>::zero_padding(&mut out);
213 for value in values {
214 value.encode(outputs.encoder, &mut out, constraint)?;
215 W::validate(unsafe { Slot::new_unchecked_from_maybe_uninit(&mut out) }, constraint)
216 .map_err(EncodeError::Validation)?;
217 unsafe {
218 outputs.write_next(out.assume_init_ref());
219 }
220 }
221
222 Ok(())
223 }
224
225 fn encode_next<W, T>(&mut self, value: T) -> Result<(), EncodeError>
226 where
227 W: Wire<Constraint = ()>,
228 T: Encode<W, Self>,
229 {
230 self.encode_next_with_constraint(value, ())
231 }
232
233 fn encode_next_with_constraint<W, T>(
234 &mut self,
235 value: T,
236 constraint: W::Constraint,
237 ) -> Result<(), EncodeError>
238 where
239 W: Wire,
240 T: Encode<W, Self>,
241 {
242 self.encode_next_iter_with_constraint(core::iter::once(value), constraint)
243 }
244
245 fn encode<W, T>(value: T) -> Result<Self, EncodeError>
246 where
247 Self: Default,
248 W: Wire<Constraint = ()>,
249 T: Encode<W, Self>,
250 {
251 Self::encode_with_constraint(value, ())
252 }
253
254 fn encode_with_constraint<W, T>(
255 value: T,
256 constraint: W::Constraint,
257 ) -> Result<Self, EncodeError>
258 where
259 Self: Default,
260 W: Wire,
261 T: Encode<W, Self>,
262 {
263 let mut result = Self::default();
264 result.encode_next_with_constraint(value, constraint)?;
265 Ok(result)
266 }
267}
268
269pub struct Preallocated<'a, E: ?Sized, T> {
271 pub encoder: &'a mut E,
273 pos: usize,
274 #[cfg(debug_assertions)]
275 remaining: usize,
276 _phantom: PhantomData<T>,
277}
278
279impl<E: Encoder + ?Sized, T> Preallocated<'_, E, T> {
280 pub unsafe fn write_next(&mut self, value: &T) {
286 #[cfg(debug_assertions)]
287 {
288 assert!(self.remaining > 0, "attemped to write more slots than preallocated");
289 self.remaining -= 1;
290 }
291
292 let bytes_ptr = (value as *const T).cast::<u8>();
293 let bytes = unsafe { from_raw_parts(bytes_ptr, size_of::<T>()) };
294 self.encoder.rewrite(self.pos, bytes);
295 self.pos += size_of::<T>();
296 }
297}