fidl_next_codec/
encoder.rs1use core::marker::PhantomData;
8use core::mem::MaybeUninit;
9use core::slice::{from_mut, from_raw_parts};
10
11use crate::{Chunk, Encode, EncodeError, WireU64, ZeroPadding, CHUNK_SIZE};
12
13pub trait InternalHandleEncoder {
15 #[doc(hidden)]
20 fn __internal_handle_count(&self) -> usize;
21}
22
23pub trait Encoder: InternalHandleEncoder {
25 fn bytes_written(&self) -> usize;
27
28 fn write_zeroes(&mut self, len: usize);
32
33 fn write(&mut self, bytes: &[u8]);
37
38 fn rewrite(&mut self, pos: usize, bytes: &[u8]);
40}
41
42impl InternalHandleEncoder for Vec<Chunk> {
43 #[inline]
44 fn __internal_handle_count(&self) -> usize {
45 0
46 }
47}
48
49impl Encoder for Vec<Chunk> {
50 #[inline]
51 fn bytes_written(&self) -> usize {
52 self.len() * CHUNK_SIZE
53 }
54
55 #[inline]
56 fn write_zeroes(&mut self, len: usize) {
57 let count = len.div_ceil(CHUNK_SIZE);
58 self.reserve(count);
59 let ptr = unsafe { self.as_mut_ptr().add(self.len()) };
60 unsafe {
61 ptr.write_bytes(0, count);
62 }
63 unsafe {
64 self.set_len(self.len() + count);
65 }
66 }
67
68 #[inline]
69 fn write(&mut self, bytes: &[u8]) {
70 if bytes.is_empty() {
71 return;
72 }
73
74 let count = bytes.len().div_ceil(CHUNK_SIZE);
75 self.reserve(count);
76
77 unsafe {
79 self.as_mut_ptr().add(self.len() + count - 1).write(WireU64(0));
80 }
81 let ptr = unsafe { self.as_mut_ptr().add(self.len()).cast::<u8>() };
82
83 unsafe {
85 ptr.copy_from_nonoverlapping(bytes.as_ptr(), bytes.len());
86 }
87
88 unsafe {
90 self.set_len(self.len() + count);
91 }
92 }
93
94 #[inline]
95 fn rewrite(&mut self, pos: usize, bytes: &[u8]) {
96 assert!(pos + bytes.len() <= self.bytes_written());
97
98 let ptr = unsafe { self.as_mut_ptr().cast::<u8>().add(pos) };
99 unsafe {
100 ptr.copy_from_nonoverlapping(bytes.as_ptr(), bytes.len());
101 }
102 }
103}
104
105pub trait EncoderExt {
107 fn preallocate<T>(&mut self, len: usize) -> Preallocated<'_, Self, T>;
109
110 fn encode_next_slice<T: Encode<Self>>(&mut self, values: &mut [T]) -> Result<(), EncodeError>;
114
115 fn encode_next<T: Encode<Self>>(&mut self, value: &mut T) -> Result<(), EncodeError>;
119}
120
121impl<E: Encoder + ?Sized> EncoderExt for E {
122 fn preallocate<T>(&mut self, len: usize) -> Preallocated<'_, Self, T> {
123 let pos = self.bytes_written();
124
125 self.write_zeroes(len * size_of::<T>());
127
128 Preallocated {
129 encoder: self,
130 pos,
131 #[cfg(debug_assertions)]
132 remaining: len,
133 _phantom: PhantomData,
134 }
135 }
136
137 fn encode_next_slice<T: Encode<Self>>(&mut self, values: &mut [T]) -> Result<(), EncodeError> {
138 let mut outputs = self.preallocate::<T::Encoded>(values.len());
139
140 let mut out = MaybeUninit::<T::Encoded>::uninit();
141 <T::Encoded as ZeroPadding>::zero_padding(&mut out);
142 for value in values {
143 value.encode(outputs.encoder, &mut out)?;
144 unsafe {
145 outputs.write_next(out.assume_init_ref());
146 }
147 }
148
149 Ok(())
150 }
151
152 fn encode_next<T: Encode<Self>>(&mut self, value: &mut T) -> Result<(), EncodeError> {
153 self.encode_next_slice(from_mut(value))
154 }
155}
156
157pub struct Preallocated<'a, E: ?Sized, T> {
159 pub encoder: &'a mut E,
161 pos: usize,
162 #[cfg(debug_assertions)]
163 remaining: usize,
164 _phantom: PhantomData<T>,
165}
166
167impl<E: Encoder + ?Sized, T> Preallocated<'_, E, T> {
168 pub unsafe fn write_next(&mut self, value: &T) {
174 #[cfg(debug_assertions)]
175 {
176 assert!(self.remaining > 0, "attemped to write more slots than preallocated");
177 self.remaining -= 1;
178 }
179
180 let bytes_ptr = (value as *const T).cast::<u8>();
181 let bytes = unsafe { from_raw_parts(bytes_ptr, size_of::<T>()) };
182 self.encoder.rewrite(self.pos, bytes);
183 self.pos += size_of::<T>();
184 }
185}