1use std::cmp;
8use std::convert::Infallible as Never;
9use std::fmt::{self, Debug, Formatter};
10use std::marker::PhantomData;
11use std::ops::{Range, RangeBounds};
12
13use arrayvec::ArrayVec;
14use zerocopy::SplitByteSlice;
15
16use crate::{
17 AsFragmentedByteSlice, Buffer, BufferView, BufferViewMut, ContiguousBuffer, EmptyBuf,
18 FragmentedBuffer, FragmentedBufferMut, FragmentedBytes, FragmentedBytesMut, GrowBuffer,
19 GrowBufferMut, ParsablePacket, ParseBuffer, ParseBufferMut, ReusableBuffer, ShrinkBuffer,
20 canonicalize_range, take_back, take_back_mut, take_front, take_front_mut,
21};
22
23#[derive(Copy, Clone, Debug)]
29pub enum Either<A, B> {
30 A(A),
31 B(B),
32}
33
34impl<A, B> Either<A, B> {
35 pub fn map_a<AA, F: FnOnce(A) -> AA>(self, f: F) -> Either<AA, B> {
41 match self {
42 Either::A(a) => Either::A(f(a)),
43 Either::B(b) => Either::B(b),
44 }
45 }
46
47 pub fn map_b<BB, F: FnOnce(B) -> BB>(self, f: F) -> Either<A, BB> {
53 match self {
54 Either::A(a) => Either::A(a),
55 Either::B(b) => Either::B(f(b)),
56 }
57 }
58
59 pub fn unwrap_a(self) -> A {
65 match self {
66 Either::A(x) => x,
67 Either::B(_) => panic!("This `Either<A, B>` does not hold the `A` variant"),
68 }
69 }
70
71 pub fn unwrap_b(self) -> B {
77 match self {
78 Either::A(_) => panic!("This `Either<A, B>` does not hold the `B` variant"),
79 Either::B(x) => x,
80 }
81 }
82}
83
84impl<A> Either<A, A> {
85 pub fn into_inner(self) -> A {
88 match self {
89 Either::A(x) => x,
90 Either::B(x) => x,
91 }
92 }
93}
94
95impl<A> Either<A, Never> {
96 #[inline]
98 pub fn into_a(self) -> A {
99 match self {
100 Either::A(a) => a,
101 }
102 }
103}
104
105impl<B> Either<Never, B> {
106 #[inline]
108 pub fn into_b(self) -> B {
109 match self {
110 Either::B(b) => b,
111 }
112 }
113}
114
115macro_rules! call_method_on_either {
116 ($val:expr, $method:ident, $($args:expr),*) => {
117 match $val {
118 Either::A(a) => a.$method($($args),*),
119 Either::B(b) => b.$method($($args),*),
120 }
121 };
122 ($val:expr, $method:ident) => {
123 call_method_on_either!($val, $method,)
124 };
125}
126
127impl<A, B> FragmentedBuffer for Either<A, B>
134where
135 A: FragmentedBuffer,
136 B: FragmentedBuffer,
137{
138 fn len(&self) -> usize {
139 call_method_on_either!(self, len)
140 }
141
142 fn with_bytes<'a, R, F>(&'a self, f: F) -> R
143 where
144 F: for<'b> FnOnce(FragmentedBytes<'b, 'a>) -> R,
145 {
146 call_method_on_either!(self, with_bytes, f)
147 }
148}
149
150impl<A, B> ContiguousBuffer for Either<A, B>
151where
152 A: ContiguousBuffer,
153 B: ContiguousBuffer,
154{
155}
156
157impl<A, B> ShrinkBuffer for Either<A, B>
158where
159 A: ShrinkBuffer,
160 B: ShrinkBuffer,
161{
162 fn shrink<R: RangeBounds<usize>>(&mut self, range: R) {
163 call_method_on_either!(self, shrink, range)
164 }
165 fn shrink_front(&mut self, n: usize) {
166 call_method_on_either!(self, shrink_front, n)
167 }
168 fn shrink_back(&mut self, n: usize) {
169 call_method_on_either!(self, shrink_back, n)
170 }
171}
172
173impl<A, B> ParseBuffer for Either<A, B>
174where
175 A: ParseBuffer,
176 B: ParseBuffer,
177{
178 fn parse<'a, P: ParsablePacket<&'a [u8], ()>>(&'a mut self) -> Result<P, P::Error> {
179 call_method_on_either!(self, parse)
180 }
181 fn parse_with<'a, ParseArgs, P: ParsablePacket<&'a [u8], ParseArgs>>(
182 &'a mut self,
183 args: ParseArgs,
184 ) -> Result<P, P::Error> {
185 call_method_on_either!(self, parse_with, args)
186 }
187}
188
189impl<A, B> FragmentedBufferMut for Either<A, B>
190where
191 A: FragmentedBufferMut,
192 B: FragmentedBufferMut,
193{
194 fn with_bytes_mut<'a, R, F>(&'a mut self, f: F) -> R
195 where
196 F: for<'b> FnOnce(FragmentedBytesMut<'b, 'a>) -> R,
197 {
198 call_method_on_either!(self, with_bytes_mut, f)
199 }
200}
201
202impl<A, B> ParseBufferMut for Either<A, B>
203where
204 A: ParseBufferMut,
205 B: ParseBufferMut,
206{
207 fn parse_mut<'a, P: ParsablePacket<&'a mut [u8], ()>>(&'a mut self) -> Result<P, P::Error> {
208 call_method_on_either!(self, parse_mut)
209 }
210 fn parse_with_mut<'a, ParseArgs, P: ParsablePacket<&'a mut [u8], ParseArgs>>(
211 &'a mut self,
212 args: ParseArgs,
213 ) -> Result<P, P::Error> {
214 call_method_on_either!(self, parse_with_mut, args)
215 }
216}
217
218impl<A, B> GrowBuffer for Either<A, B>
219where
220 A: GrowBuffer,
221 B: GrowBuffer,
222{
223 #[inline]
224 fn with_parts<'a, O, F>(&'a self, f: F) -> O
225 where
226 F: for<'b> FnOnce(&'a [u8], FragmentedBytes<'b, 'a>, &'a [u8]) -> O,
227 {
228 call_method_on_either!(self, with_parts, f)
229 }
230 fn capacity(&self) -> usize {
231 call_method_on_either!(self, capacity)
232 }
233 fn prefix_len(&self) -> usize {
234 call_method_on_either!(self, prefix_len)
235 }
236 fn suffix_len(&self) -> usize {
237 call_method_on_either!(self, suffix_len)
238 }
239 fn grow_front(&mut self, n: usize) {
240 call_method_on_either!(self, grow_front, n)
241 }
242 fn grow_back(&mut self, n: usize) {
243 call_method_on_either!(self, grow_back, n)
244 }
245 fn reset(&mut self) {
246 call_method_on_either!(self, reset)
247 }
248}
249
250impl<A, B> GrowBufferMut for Either<A, B>
251where
252 A: GrowBufferMut,
253 B: GrowBufferMut,
254{
255 fn with_parts_mut<'a, O, F>(&'a mut self, f: F) -> O
256 where
257 F: for<'b> FnOnce(&'a mut [u8], FragmentedBytesMut<'b, 'a>, &'a mut [u8]) -> O,
258 {
259 call_method_on_either!(self, with_parts_mut, f)
260 }
261
262 fn with_all_contents_mut<'a, O, F>(&'a mut self, f: F) -> O
263 where
264 F: for<'b> FnOnce(FragmentedBytesMut<'b, 'a>) -> O,
265 {
266 call_method_on_either!(self, with_all_contents_mut, f)
267 }
268
269 fn serialize<BB: PacketBuilder>(&mut self, builder: BB) {
270 call_method_on_either!(self, serialize, builder)
271 }
272}
273
274impl<A, B> Buffer for Either<A, B>
275where
276 A: Buffer,
277 B: Buffer,
278{
279 fn parse_with_view<'a, ParseArgs, P: ParsablePacket<&'a [u8], ParseArgs>>(
280 &'a mut self,
281 args: ParseArgs,
282 ) -> Result<(P, &'a [u8]), P::Error> {
283 call_method_on_either!(self, parse_with_view, args)
284 }
285}
286
287impl<A: AsRef<[u8]>, B: AsRef<[u8]>> AsRef<[u8]> for Either<A, B> {
288 fn as_ref(&self) -> &[u8] {
289 call_method_on_either!(self, as_ref)
290 }
291}
292
293impl<A: AsMut<[u8]>, B: AsMut<[u8]>> AsMut<[u8]> for Either<A, B> {
294 fn as_mut(&mut self) -> &mut [u8] {
295 call_method_on_either!(self, as_mut)
296 }
297}
298
299#[derive(Clone, Debug)]
305pub struct Buf<B> {
306 buf: B,
307 body: Range<usize>,
308}
309
310impl<B: AsRef<[u8]>> PartialEq for Buf<B> {
311 fn eq(&self, other: &Self) -> bool {
312 let self_slice = AsRef::<[u8]>::as_ref(self);
313 let other_slice = AsRef::<[u8]>::as_ref(other);
314 PartialEq::eq(self_slice, other_slice)
315 }
316}
317
318impl<B: AsRef<[u8]>> Eq for Buf<B> {}
319
320impl Buf<Vec<u8>> {
321 pub fn into_inner(self) -> Vec<u8> {
323 let Buf { mut buf, body } = self;
324 let len = body.end - body.start;
325 let _ = buf.drain(..body.start);
326 buf.truncate(len);
327 buf
328 }
329}
330
331impl<B: AsRef<[u8]>> Buf<B> {
332 pub fn new<R: RangeBounds<usize>>(buf: B, body: R) -> Buf<B> {
343 let len = buf.as_ref().len();
344 Buf { buf, body: canonicalize_range(len, &body) }
345 }
346
347 pub fn buffer_view(&mut self) -> BufView<'_> {
349 BufView { buf: &self.buf.as_ref()[self.body.clone()], body: &mut self.body }
350 }
351}
352
353impl<B: AsRef<[u8]> + AsMut<[u8]>> Buf<B> {
354 pub fn buffer_view_mut(&mut self) -> BufViewMut<'_> {
356 BufViewMut { buf: &mut self.buf.as_mut()[self.body.clone()], body: &mut self.body }
357 }
358}
359
360impl<B: AsRef<[u8]>> FragmentedBuffer for Buf<B> {
361 fragmented_buffer_method_impls!();
362}
363impl<B: AsRef<[u8]>> ContiguousBuffer for Buf<B> {}
364impl<B: AsRef<[u8]>> ShrinkBuffer for Buf<B> {
365 fn shrink<R: RangeBounds<usize>>(&mut self, range: R) {
366 let len = self.len();
367 let mut range = canonicalize_range(len, &range);
368 range.start += self.body.start;
369 range.end += self.body.start;
370 self.body = range;
371 }
372
373 fn shrink_front(&mut self, n: usize) {
374 assert!(n <= self.len());
375 self.body.start += n;
376 }
377 fn shrink_back(&mut self, n: usize) {
378 assert!(n <= self.len());
379 self.body.end -= n;
380 }
381}
382impl<B: AsRef<[u8]>> ParseBuffer for Buf<B> {
383 fn parse_with<'a, ParseArgs, P: ParsablePacket<&'a [u8], ParseArgs>>(
384 &'a mut self,
385 args: ParseArgs,
386 ) -> Result<P, P::Error> {
387 P::parse(self.buffer_view(), args)
388 }
389}
390
391impl<B: AsRef<[u8]> + AsMut<[u8]>> FragmentedBufferMut for Buf<B> {
392 fragmented_buffer_mut_method_impls!();
393}
394
395impl<B: AsRef<[u8]> + AsMut<[u8]>> ParseBufferMut for Buf<B> {
396 fn parse_with_mut<'a, ParseArgs, P: ParsablePacket<&'a mut [u8], ParseArgs>>(
397 &'a mut self,
398 args: ParseArgs,
399 ) -> Result<P, P::Error> {
400 P::parse_mut(self.buffer_view_mut(), args)
401 }
402}
403
404impl<B: AsRef<[u8]>> GrowBuffer for Buf<B> {
405 fn with_parts<'a, O, F>(&'a self, f: F) -> O
406 where
407 F: for<'b> FnOnce(&'a [u8], FragmentedBytes<'b, 'a>, &'a [u8]) -> O,
408 {
409 let (prefix, buf) = self.buf.as_ref().split_at(self.body.start);
410 let (body, suffix) = buf.split_at(self.body.end - self.body.start);
411 let mut body = [&body[..]];
412 f(prefix, body.as_fragmented_byte_slice(), suffix)
413 }
414 fn capacity(&self) -> usize {
415 self.buf.as_ref().len()
416 }
417 fn prefix_len(&self) -> usize {
418 self.body.start
419 }
420 fn suffix_len(&self) -> usize {
421 self.buf.as_ref().len() - self.body.end
422 }
423 fn grow_front(&mut self, n: usize) {
424 assert!(n <= self.body.start);
425 self.body.start -= n;
426 }
427 fn grow_back(&mut self, n: usize) {
428 assert!(n <= self.buf.as_ref().len() - self.body.end);
429 self.body.end += n;
430 }
431}
432
433impl<B: AsRef<[u8]> + AsMut<[u8]>> GrowBufferMut for Buf<B> {
434 fn with_parts_mut<'a, O, F>(&'a mut self, f: F) -> O
435 where
436 F: for<'b> FnOnce(&'a mut [u8], FragmentedBytesMut<'b, 'a>, &'a mut [u8]) -> O,
437 {
438 let (prefix, buf) = self.buf.as_mut().split_at_mut(self.body.start);
439 let (body, suffix) = buf.split_at_mut(self.body.end - self.body.start);
440 let mut body = [&mut body[..]];
441 f(prefix, body.as_fragmented_byte_slice(), suffix)
442 }
443
444 fn with_all_contents_mut<'a, O, F>(&'a mut self, f: F) -> O
445 where
446 F: for<'b> FnOnce(FragmentedBytesMut<'b, 'a>) -> O,
447 {
448 let mut all = [self.buf.as_mut()];
449 f(all.as_fragmented_byte_slice())
450 }
451}
452
453impl<B: AsRef<[u8]>> AsRef<[u8]> for Buf<B> {
454 fn as_ref(&self) -> &[u8] {
455 &self.buf.as_ref()[self.body.clone()]
456 }
457}
458
459impl<B: AsMut<[u8]>> AsMut<[u8]> for Buf<B> {
460 fn as_mut(&mut self) -> &mut [u8] {
461 &mut self.buf.as_mut()[self.body.clone()]
462 }
463}
464
465impl<B: AsRef<[u8]>> Buffer for Buf<B> {
466 fn parse_with_view<'a, ParseArgs, P: ParsablePacket<&'a [u8], ParseArgs>>(
467 &'a mut self,
468 args: ParseArgs,
469 ) -> Result<(P, &'a [u8]), P::Error> {
470 let Self { body, ref buf } = self;
471 let body_before = body.clone();
472 let view = BufView { buf: &buf.as_ref()[body.clone()], body };
473 P::parse(view, args).map(|r| (r, &buf.as_ref()[body_before]))
474 }
475}
476
477pub struct BufView<'a> {
482 buf: &'a [u8],
483 body: &'a mut Range<usize>,
484}
485
486impl<'a> BufferView<&'a [u8]> for BufView<'a> {
487 fn take_front(&mut self, n: usize) -> Option<&'a [u8]> {
488 if self.len() < n {
489 return None;
490 }
491 self.body.start += n;
492 Some(take_front(&mut self.buf, n))
493 }
494
495 fn take_back(&mut self, n: usize) -> Option<&'a [u8]> {
496 if self.len() < n {
497 return None;
498 }
499 self.body.end -= n;
500 Some(take_back(&mut self.buf, n))
501 }
502
503 fn into_rest(self) -> &'a [u8] {
504 self.buf
505 }
506}
507
508impl<'a> AsRef<[u8]> for BufView<'a> {
509 fn as_ref(&self) -> &[u8] {
510 self.buf
511 }
512}
513
514pub struct BufViewMut<'a> {
520 buf: &'a mut [u8],
521 body: &'a mut Range<usize>,
522}
523
524impl<'a> BufferView<&'a mut [u8]> for BufViewMut<'a> {
525 fn take_front(&mut self, n: usize) -> Option<&'a mut [u8]> {
526 if self.len() < n {
527 return None;
528 }
529 self.body.start += n;
530 Some(take_front_mut(&mut self.buf, n))
531 }
532
533 fn take_back(&mut self, n: usize) -> Option<&'a mut [u8]> {
534 if self.len() < n {
535 return None;
536 }
537 self.body.end -= n;
538 Some(take_back_mut(&mut self.buf, n))
539 }
540
541 fn into_rest(self) -> &'a mut [u8] {
542 self.buf
543 }
544}
545
546impl<'a> BufferViewMut<&'a mut [u8]> for BufViewMut<'a> {}
547
548impl<'a> AsRef<[u8]> for BufViewMut<'a> {
549 fn as_ref(&self) -> &[u8] {
550 self.buf
551 }
552}
553
554impl<'a> AsMut<[u8]> for BufViewMut<'a> {
555 fn as_mut(&mut self) -> &mut [u8] {
556 self.buf
557 }
558}
559
560#[derive(Copy, Clone, Debug, Eq, PartialEq)]
574pub struct PacketConstraints {
575 header_len: usize,
576 footer_len: usize,
577 min_body_len: usize,
578 max_body_len: usize,
579}
580
581impl PacketConstraints {
582 pub const UNCONSTRAINED: Self =
586 Self { header_len: 0, footer_len: 0, min_body_len: 0, max_body_len: usize::MAX };
587
588 #[inline]
596 pub fn new(
597 header_len: usize,
598 footer_len: usize,
599 min_body_len: usize,
600 max_body_len: usize,
601 ) -> PacketConstraints {
602 PacketConstraints::try_new(header_len, footer_len, min_body_len, max_body_len).expect(
603 "max_body_len < min_body_len or header_len + min_body_len + footer_len overflows usize",
604 )
605 }
606
607 #[inline]
613 pub fn try_new(
614 header_len: usize,
615 footer_len: usize,
616 min_body_len: usize,
617 max_body_len: usize,
618 ) -> Option<PacketConstraints> {
619 let header_min_body_footer_overflows = header_len
621 .checked_add(min_body_len)
622 .and_then(|sum| sum.checked_add(footer_len))
623 .is_none();
624 let max_less_than_min = max_body_len < min_body_len;
626 if max_less_than_min || header_min_body_footer_overflows {
627 return None;
628 }
629 Some(PacketConstraints { header_len, footer_len, min_body_len, max_body_len })
630 }
631
632 #[inline]
636 pub fn with_max_body_len(max_body_len: usize) -> PacketConstraints {
637 PacketConstraints { header_len: 0, footer_len: 0, min_body_len: 0, max_body_len }
642 }
643
644 #[inline]
646 pub fn header_len(&self) -> usize {
647 self.header_len
648 }
649
650 #[inline]
652 pub fn footer_len(&self) -> usize {
653 self.footer_len
654 }
655
656 #[inline]
672 pub fn min_body_len(&self) -> usize {
673 self.min_body_len
674 }
675
676 #[inline]
680 pub fn max_body_len(&self) -> usize {
681 self.max_body_len
682 }
683
684 pub fn try_encapsulate(&self, outer: &Self) -> Option<PacketConstraints> {
694 let inner = self;
695 let header_len = inner.header_len.checked_add(outer.header_len)?;
697 let footer_len = inner.footer_len.checked_add(outer.footer_len)?;
699 let inner_header_footer_len = inner.header_len + inner.footer_len;
702 let min_body_len = cmp::max(
706 outer.min_body_len.saturating_sub(inner_header_footer_len),
707 inner.min_body_len,
708 );
709 let max_body_len =
714 cmp::min(outer.max_body_len.checked_sub(inner_header_footer_len)?, inner.max_body_len);
715 PacketConstraints::try_new(header_len, footer_len, min_body_len, max_body_len)
719 }
720}
721
722pub struct SerializeTarget<'a> {
725 #[allow(missing_docs)]
726 pub header: &'a mut [u8],
727 #[allow(missing_docs)]
728 pub footer: &'a mut [u8],
729}
730
731pub trait PacketBuilder: Sized {
742 fn constraints(&self) -> PacketConstraints;
744
745 fn serialize(&self, target: &mut SerializeTarget<'_>, body: FragmentedBytesMut<'_, '_>);
774
775 #[inline]
780 fn wrap_body<B>(self, body: B) -> Nested<B, Self> {
781 Nested { inner: body, outer: self }
782 }
783}
784
785impl<'a, B: PacketBuilder> PacketBuilder for &'a B {
786 #[inline]
787 fn constraints(&self) -> PacketConstraints {
788 B::constraints(self)
789 }
790 #[inline]
791 fn serialize(&self, target: &mut SerializeTarget<'_>, body: FragmentedBytesMut<'_, '_>) {
792 B::serialize(self, target, body)
793 }
794}
795
796impl<'a, B: PacketBuilder> PacketBuilder for &'a mut B {
797 #[inline]
798 fn constraints(&self) -> PacketConstraints {
799 B::constraints(self)
800 }
801 #[inline]
802 fn serialize(&self, target: &mut SerializeTarget<'_>, body: FragmentedBytesMut<'_, '_>) {
803 B::serialize(self, target, body)
804 }
805}
806
807impl PacketBuilder for () {
808 #[inline]
809 fn constraints(&self) -> PacketConstraints {
810 PacketConstraints::UNCONSTRAINED
811 }
812 #[inline]
813 fn serialize(&self, _target: &mut SerializeTarget<'_>, _body: FragmentedBytesMut<'_, '_>) {}
814}
815
816impl PacketBuilder for Never {
817 fn constraints(&self) -> PacketConstraints {
818 match *self {}
819 }
820 fn serialize(&self, _target: &mut SerializeTarget<'_>, _body: FragmentedBytesMut<'_, '_>) {}
821}
822
823#[derive(Copy, Clone, Debug, Eq, PartialEq)]
831pub struct Nested<I, O> {
832 inner: I,
833 outer: O,
834}
835
836impl<I, O> Nested<I, O> {
837 #[inline]
840 pub fn into_inner(self) -> I {
841 self.inner
842 }
843
844 #[inline]
847 pub fn into_outer(self) -> O {
848 self.outer
849 }
850
851 #[inline]
852 pub fn inner(&self) -> &I {
853 &self.inner
854 }
855
856 #[inline]
857 pub fn inner_mut(&mut self) -> &mut I {
858 &mut self.inner
859 }
860
861 #[inline]
862 pub fn outer(&self) -> &O {
863 &self.outer
864 }
865
866 #[inline]
867 pub fn outer_mut(&mut self) -> &mut O {
868 &mut self.outer
869 }
870}
871
872#[derive(Copy, Clone, Debug)]
878#[cfg_attr(test, derive(Eq, PartialEq))]
879pub struct LimitedSizePacketBuilder {
880 pub limit: usize,
882}
883
884impl PacketBuilder for LimitedSizePacketBuilder {
885 fn constraints(&self) -> PacketConstraints {
886 PacketConstraints::with_max_body_len(self.limit)
887 }
888
889 fn serialize(&self, _target: &mut SerializeTarget<'_>, _body: FragmentedBytesMut<'_, '_>) {}
890}
891
892pub trait InnerPacketBuilder {
906 fn bytes_len(&self) -> usize;
908
909 fn serialize(&self, buffer: &mut [u8]);
924
925 #[inline]
932 fn into_serializer(self) -> InnerSerializer<Self, EmptyBuf>
933 where
934 Self: Sized,
935 {
936 self.into_serializer_with(EmptyBuf)
937 }
938
939 fn into_serializer_with<B: ShrinkBuffer>(self, mut buffer: B) -> InnerSerializer<Self, B>
952 where
953 Self: Sized,
954 {
955 buffer.shrink_back_to(0);
956 InnerSerializer { inner: self, buffer }
957 }
958}
959
960impl<'a, I: InnerPacketBuilder> InnerPacketBuilder for &'a I {
961 #[inline]
962 fn bytes_len(&self) -> usize {
963 I::bytes_len(self)
964 }
965 #[inline]
966 fn serialize(&self, buffer: &mut [u8]) {
967 I::serialize(self, buffer)
968 }
969}
970impl<'a, I: InnerPacketBuilder> InnerPacketBuilder for &'a mut I {
971 #[inline]
972 fn bytes_len(&self) -> usize {
973 I::bytes_len(self)
974 }
975 #[inline]
976 fn serialize(&self, buffer: &mut [u8]) {
977 I::serialize(self, buffer)
978 }
979}
980impl<'a> InnerPacketBuilder for &'a [u8] {
981 #[inline]
982 fn bytes_len(&self) -> usize {
983 self.len()
984 }
985 #[inline]
986 fn serialize(&self, buffer: &mut [u8]) {
987 buffer.copy_from_slice(self);
988 }
989}
990impl<'a> InnerPacketBuilder for &'a mut [u8] {
991 #[inline]
992 fn bytes_len(&self) -> usize {
993 self.len()
994 }
995 #[inline]
996 fn serialize(&self, buffer: &mut [u8]) {
997 buffer.copy_from_slice(self);
998 }
999}
1000impl<'a> InnerPacketBuilder for Vec<u8> {
1001 #[inline]
1002 fn bytes_len(&self) -> usize {
1003 self.len()
1004 }
1005 #[inline]
1006 fn serialize(&self, buffer: &mut [u8]) {
1007 buffer.copy_from_slice(self.as_slice());
1008 }
1009}
1010impl<const N: usize> InnerPacketBuilder for ArrayVec<u8, N> {
1011 fn bytes_len(&self) -> usize {
1012 self.as_slice().bytes_len()
1013 }
1014 fn serialize(&self, buffer: &mut [u8]) {
1015 self.as_slice().serialize(buffer);
1016 }
1017}
1018
1019pub struct ByteSliceInnerPacketBuilder<B>(pub B);
1026
1027impl<B: SplitByteSlice> InnerPacketBuilder for ByteSliceInnerPacketBuilder<B> {
1028 fn bytes_len(&self) -> usize {
1029 self.0.deref().bytes_len()
1030 }
1031 fn serialize(&self, buffer: &mut [u8]) {
1032 self.0.deref().serialize(buffer)
1033 }
1034}
1035
1036impl<B: SplitByteSlice> Debug for ByteSliceInnerPacketBuilder<B> {
1037 fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
1038 write!(f, "ByteSliceInnerPacketBuilder({:?})", self.0.as_ref())
1039 }
1040}
1041
1042#[derive(Copy, Clone, Debug, Eq, PartialEq)]
1049pub enum SerializeError<A> {
1050 Alloc(A),
1052 SizeLimitExceeded,
1054}
1055
1056impl<A> SerializeError<A> {
1057 #[inline]
1059 pub fn is_alloc(&self) -> bool {
1060 match self {
1061 SerializeError::Alloc(_) => true,
1062 SerializeError::SizeLimitExceeded => false,
1063 }
1064 }
1065
1066 #[inline]
1068 pub fn is_size_limit_exceeded(&self) -> bool {
1069 match self {
1070 SerializeError::Alloc(_) => false,
1071 SerializeError::SizeLimitExceeded => true,
1072 }
1073 }
1074
1075 pub fn map_alloc<T, F: FnOnce(A) -> T>(self, f: F) -> SerializeError<T> {
1077 match self {
1078 SerializeError::Alloc(a) => SerializeError::Alloc(f(a)),
1079 SerializeError::SizeLimitExceeded => SerializeError::SizeLimitExceeded,
1080 }
1081 }
1082}
1083
1084impl<A> From<A> for SerializeError<A> {
1085 fn from(a: A) -> SerializeError<A> {
1086 SerializeError::Alloc(a)
1087 }
1088}
1089
1090#[derive(Copy, Clone, Debug, Eq, PartialEq)]
1099pub struct BufferTooShortError;
1100
1101pub trait BufferProvider<Input, Output> {
1118 type Error;
1122
1123 fn alloc_no_reuse(
1133 self,
1134 prefix: usize,
1135 body: usize,
1136 suffix: usize,
1137 ) -> Result<Output, Self::Error>;
1138
1139 fn reuse_or_realloc(
1152 self,
1153 buffer: Input,
1154 prefix: usize,
1155 suffix: usize,
1156 ) -> Result<Output, (Self::Error, Input)>;
1157}
1158
1159pub trait BufferAlloc<Output> {
1180 type Error;
1184
1185 fn alloc(self, len: usize) -> Result<Output, Self::Error>;
1187}
1188
1189impl<O, E, F: FnOnce(usize) -> Result<O, E>> BufferAlloc<O> for F {
1190 type Error = E;
1191
1192 #[inline]
1193 fn alloc(self, len: usize) -> Result<O, E> {
1194 self(len)
1195 }
1196}
1197
1198impl BufferAlloc<Never> for () {
1199 type Error = ();
1200
1201 #[inline]
1202 fn alloc(self, _len: usize) -> Result<Never, ()> {
1203 Err(())
1204 }
1205}
1206
1207pub fn new_buf_vec(len: usize) -> Result<Buf<Vec<u8>>, Never> {
1218 Ok(Buf::new(vec![0; len], ..))
1219}
1220
1221pub trait LayoutBufferAlloc<O> {
1224 type Error;
1228
1229 fn layout_alloc(self, prefix: usize, body: usize, suffix: usize) -> Result<O, Self::Error>;
1232}
1233
1234impl<O: ShrinkBuffer, E, F: FnOnce(usize) -> Result<O, E>> LayoutBufferAlloc<O> for F {
1235 type Error = E;
1236
1237 #[inline]
1238 fn layout_alloc(self, prefix: usize, body: usize, suffix: usize) -> Result<O, E> {
1239 let mut b = self(prefix + body + suffix)?;
1240 b.shrink_front(prefix);
1241 b.shrink_back(suffix);
1242 Ok(b)
1243 }
1244}
1245
1246impl LayoutBufferAlloc<Never> for () {
1247 type Error = ();
1248
1249 #[inline]
1250 fn layout_alloc(self, _prefix: usize, _body: usize, _suffix: usize) -> Result<Never, ()> {
1251 Err(())
1252 }
1253}
1254
1255#[inline]
1277pub fn try_reuse_buffer<B: GrowBufferMut + ShrinkBuffer>(
1278 mut buffer: B,
1279 prefix: usize,
1280 suffix: usize,
1281 max_copy_bytes: usize,
1282) -> Result<B, B> {
1283 let need_prefix = prefix;
1284 let need_suffix = suffix;
1285 let have_prefix = buffer.prefix_len();
1286 let have_body = buffer.len();
1287 let have_suffix = buffer.suffix_len();
1288 let need_capacity = need_prefix + have_body + need_suffix;
1289
1290 if have_prefix >= need_prefix && have_suffix >= need_suffix {
1291 Ok(buffer)
1293 } else if buffer.capacity() >= need_capacity && have_body <= max_copy_bytes {
1294 buffer.reset();
1298
1299 buffer.copy_within(have_prefix..(have_prefix + have_body), need_prefix);
1305 buffer.shrink(need_prefix..(need_prefix + have_body));
1306 debug_assert_eq!(buffer.prefix_len(), need_prefix);
1307 debug_assert!(buffer.suffix_len() >= need_suffix);
1308 debug_assert_eq!(buffer.len(), have_body);
1309 Ok(buffer)
1310 } else {
1311 Err(buffer)
1312 }
1313}
1314
1315pub struct MaybeReuseBufferProvider<A>(pub A);
1319
1320impl<I: ReusableBuffer, O: ReusableBuffer, A: BufferAlloc<O>> BufferProvider<I, Either<I, O>>
1321 for MaybeReuseBufferProvider<A>
1322{
1323 type Error = A::Error;
1324
1325 fn alloc_no_reuse(
1326 self,
1327 prefix: usize,
1328 body: usize,
1329 suffix: usize,
1330 ) -> Result<Either<I, O>, Self::Error> {
1331 let Self(alloc) = self;
1332 let need_capacity = prefix + body + suffix;
1333 BufferAlloc::alloc(alloc, need_capacity).map(|mut buf| {
1334 buf.shrink(prefix..(prefix + body));
1335 Either::B(buf)
1336 })
1337 }
1338
1339 #[inline]
1348 fn reuse_or_realloc(
1349 self,
1350 buffer: I,
1351 need_prefix: usize,
1352 need_suffix: usize,
1353 ) -> Result<Either<I, O>, (A::Error, I)> {
1354 match try_reuse_buffer(buffer, need_prefix, need_suffix, usize::MAX) {
1359 Ok(buffer) => Ok(Either::A(buffer)),
1360 Err(buffer) => {
1361 let have_body = buffer.len();
1362 let mut buf = match BufferProvider::<I, Either<I, O>>::alloc_no_reuse(
1363 self,
1364 need_prefix,
1365 have_body,
1366 need_suffix,
1367 ) {
1368 Ok(buf) => buf,
1369 Err(err) => return Err((err, buffer)),
1370 };
1371
1372 buf.copy_from(&buffer);
1373 debug_assert_eq!(buf.prefix_len(), need_prefix);
1374 debug_assert!(buf.suffix_len() >= need_suffix);
1375 debug_assert_eq!(buf.len(), have_body);
1376 Ok(buf)
1377 }
1378 }
1379 }
1380}
1381
1382impl<B: ReusableBuffer, A: BufferAlloc<B>> BufferProvider<B, B> for MaybeReuseBufferProvider<A> {
1383 type Error = A::Error;
1384
1385 fn alloc_no_reuse(self, prefix: usize, body: usize, suffix: usize) -> Result<B, Self::Error> {
1386 BufferProvider::<B, Either<B, B>>::alloc_no_reuse(self, prefix, body, suffix)
1387 .map(Either::into_inner)
1388 }
1389
1390 #[inline]
1399 fn reuse_or_realloc(self, buffer: B, prefix: usize, suffix: usize) -> Result<B, (A::Error, B)> {
1400 BufferProvider::<B, Either<B, B>>::reuse_or_realloc(self, buffer, prefix, suffix)
1401 .map(Either::into_inner)
1402 }
1403}
1404
1405pub struct NoReuseBufferProvider<A>(pub A);
1409
1410impl<I: FragmentedBuffer, O: ReusableBuffer, A: BufferAlloc<O>> BufferProvider<I, O>
1411 for NoReuseBufferProvider<A>
1412{
1413 type Error = A::Error;
1414
1415 fn alloc_no_reuse(self, prefix: usize, body: usize, suffix: usize) -> Result<O, A::Error> {
1416 let Self(alloc) = self;
1417 alloc.alloc(prefix + body + suffix).map(|mut b| {
1418 b.shrink(prefix..prefix + body);
1419 b
1420 })
1421 }
1422
1423 fn reuse_or_realloc(self, buffer: I, prefix: usize, suffix: usize) -> Result<O, (A::Error, I)> {
1424 BufferProvider::<I, O>::alloc_no_reuse(self, prefix, buffer.len(), suffix)
1425 .map(|mut b| {
1426 b.copy_from(&buffer);
1427 b
1428 })
1429 .map_err(|e| (e, buffer))
1430 }
1431}
1432
1433pub trait Serializer: Sized {
1434 type Buffer;
1436
1437 fn serialize<B: GrowBufferMut, P: BufferProvider<Self::Buffer, B>>(
1450 self,
1451 outer: PacketConstraints,
1452 provider: P,
1453 ) -> Result<B, (SerializeError<P::Error>, Self)>;
1454
1455 fn serialize_new_buf<B: GrowBufferMut, A: LayoutBufferAlloc<B>>(
1462 &self,
1463 outer: PacketConstraints,
1464 alloc: A,
1465 ) -> Result<B, SerializeError<A::Error>>;
1466
1467 #[inline]
1483 #[allow(clippy::type_complexity)]
1484 fn serialize_vec(
1485 self,
1486 outer: PacketConstraints,
1487 ) -> Result<Either<Self::Buffer, Buf<Vec<u8>>>, (SerializeError<Never>, Self)>
1488 where
1489 Self::Buffer: ReusableBuffer,
1490 {
1491 self.serialize(outer, MaybeReuseBufferProvider(new_buf_vec))
1492 }
1493
1494 #[inline]
1508 fn serialize_no_alloc(
1509 self,
1510 outer: PacketConstraints,
1511 ) -> Result<Self::Buffer, (SerializeError<BufferTooShortError>, Self)>
1512 where
1513 Self::Buffer: ReusableBuffer,
1514 {
1515 self.serialize(outer, MaybeReuseBufferProvider(())).map(Either::into_a).map_err(
1516 |(err, slf)| {
1517 (
1518 match err {
1519 SerializeError::Alloc(()) => BufferTooShortError.into(),
1520 SerializeError::SizeLimitExceeded => SerializeError::SizeLimitExceeded,
1521 },
1522 slf,
1523 )
1524 },
1525 )
1526 }
1527
1528 #[inline]
1537 fn serialize_outer<B: GrowBufferMut, P: BufferProvider<Self::Buffer, B>>(
1538 self,
1539 provider: P,
1540 ) -> Result<B, (SerializeError<P::Error>, Self)> {
1541 self.serialize(PacketConstraints::UNCONSTRAINED, provider)
1542 }
1543
1544 #[inline]
1555 #[allow(clippy::type_complexity)]
1556 fn serialize_vec_outer(
1557 self,
1558 ) -> Result<Either<Self::Buffer, Buf<Vec<u8>>>, (SerializeError<Never>, Self)>
1559 where
1560 Self::Buffer: ReusableBuffer,
1561 {
1562 self.serialize_vec(PacketConstraints::UNCONSTRAINED)
1563 }
1564
1565 #[inline]
1575 fn serialize_no_alloc_outer(
1576 self,
1577 ) -> Result<Self::Buffer, (SerializeError<BufferTooShortError>, Self)>
1578 where
1579 Self::Buffer: ReusableBuffer,
1580 {
1581 self.serialize_no_alloc(PacketConstraints::UNCONSTRAINED)
1582 }
1583
1584 #[inline]
1587 fn serialize_vec_outer_no_reuse(&self) -> Result<Buf<Vec<u8>>, SerializeError<Never>> {
1588 self.serialize_new_buf(PacketConstraints::UNCONSTRAINED, new_buf_vec)
1589 }
1590
1591 #[inline]
1598 fn wrap_in<B: PacketBuilder>(self, outer: B) -> Nested<Self, B> {
1599 outer.wrap_body(self)
1600 }
1601
1602 #[inline]
1611 fn with_size_limit(self, limit: usize) -> Nested<Self, LimitedSizePacketBuilder> {
1612 self.wrap_in(LimitedSizePacketBuilder { limit })
1613 }
1614}
1615
1616#[derive(Copy, Clone, Debug, Eq, PartialEq)]
1623pub struct InnerSerializer<I, B> {
1624 inner: I,
1625 buffer: B,
1630}
1631
1632impl<I, B> InnerSerializer<I, B> {
1633 pub fn inner(&self) -> &I {
1634 &self.inner
1635 }
1636}
1637
1638struct InnerPacketBuilderWrapper<I>(I);
1644
1645impl<I: InnerPacketBuilder> PacketBuilder for InnerPacketBuilderWrapper<I> {
1646 fn constraints(&self) -> PacketConstraints {
1647 let Self(wrapped) = self;
1648 PacketConstraints::new(wrapped.bytes_len(), 0, 0, usize::MAX)
1649 }
1650
1651 fn serialize(&self, target: &mut SerializeTarget<'_>, _body: FragmentedBytesMut<'_, '_>) {
1652 let Self(wrapped) = self;
1653
1654 debug_assert_eq!(target.header.len(), wrapped.bytes_len());
1658 debug_assert_eq!(target.footer.len(), 0);
1659
1660 InnerPacketBuilder::serialize(wrapped, target.header);
1661 }
1662}
1663
1664impl<I: InnerPacketBuilder, B: GrowBuffer + ShrinkBuffer> Serializer for InnerSerializer<I, B> {
1665 type Buffer = B;
1666
1667 #[inline]
1668 fn serialize<BB: GrowBufferMut, P: BufferProvider<B, BB>>(
1669 self,
1670 outer: PacketConstraints,
1671 provider: P,
1672 ) -> Result<BB, (SerializeError<P::Error>, InnerSerializer<I, B>)> {
1673 debug_assert_eq!(self.buffer.len(), 0);
1674 InnerPacketBuilderWrapper(self.inner)
1675 .wrap_body(self.buffer)
1676 .serialize(outer, provider)
1677 .map_err(|(err, Nested { inner: buffer, outer: pb })| {
1678 (err, InnerSerializer { inner: pb.0, buffer })
1679 })
1680 }
1681
1682 #[inline]
1683 fn serialize_new_buf<BB: GrowBufferMut, A: LayoutBufferAlloc<BB>>(
1684 &self,
1685 outer: PacketConstraints,
1686 alloc: A,
1687 ) -> Result<BB, SerializeError<A::Error>> {
1688 InnerPacketBuilderWrapper(&self.inner).wrap_body(EmptyBuf).serialize_new_buf(outer, alloc)
1689 }
1690}
1691
1692impl<B: GrowBuffer + ShrinkBuffer> Serializer for B {
1693 type Buffer = B;
1694
1695 #[inline]
1696 fn serialize<BB: GrowBufferMut, P: BufferProvider<Self::Buffer, BB>>(
1697 self,
1698 outer: PacketConstraints,
1699 provider: P,
1700 ) -> Result<BB, (SerializeError<P::Error>, Self)> {
1701 TruncatingSerializer::new(self, TruncateDirection::NoTruncating)
1702 .serialize(outer, provider)
1703 .map_err(|(err, ser)| (err, ser.buffer))
1704 }
1705
1706 fn serialize_new_buf<BB: GrowBufferMut, A: LayoutBufferAlloc<BB>>(
1707 &self,
1708 outer: PacketConstraints,
1709 alloc: A,
1710 ) -> Result<BB, SerializeError<A::Error>> {
1711 if self.len() > outer.max_body_len() {
1712 return Err(SerializeError::SizeLimitExceeded);
1713 }
1714
1715 let padding = outer.min_body_len().saturating_sub(self.len());
1716 let tail_size = padding + outer.footer_len();
1717 let mut buffer = alloc.layout_alloc(outer.header_len(), self.len(), tail_size)?;
1718 buffer.copy_from(self);
1719 buffer.grow_back(padding);
1720 Ok(buffer)
1721 }
1722}
1723
1724pub enum EitherSerializer<A, B> {
1728 A(A),
1729 B(B),
1730}
1731
1732impl<A: Serializer, B: Serializer<Buffer = A::Buffer>> Serializer for EitherSerializer<A, B> {
1733 type Buffer = A::Buffer;
1734
1735 fn serialize<TB: GrowBufferMut, P: BufferProvider<Self::Buffer, TB>>(
1736 self,
1737 outer: PacketConstraints,
1738 provider: P,
1739 ) -> Result<TB, (SerializeError<P::Error>, Self)> {
1740 match self {
1741 EitherSerializer::A(s) => {
1742 s.serialize(outer, provider).map_err(|(err, s)| (err, EitherSerializer::A(s)))
1743 }
1744 EitherSerializer::B(s) => {
1745 s.serialize(outer, provider).map_err(|(err, s)| (err, EitherSerializer::B(s)))
1746 }
1747 }
1748 }
1749
1750 fn serialize_new_buf<TB: GrowBufferMut, BA: LayoutBufferAlloc<TB>>(
1751 &self,
1752 outer: PacketConstraints,
1753 alloc: BA,
1754 ) -> Result<TB, SerializeError<BA::Error>> {
1755 match self {
1756 EitherSerializer::A(s) => s.serialize_new_buf(outer, alloc),
1757 EitherSerializer::B(s) => s.serialize_new_buf(outer, alloc),
1758 }
1759 }
1760}
1761
1762#[derive(Copy, Clone, Debug, Eq, PartialEq)]
1765pub enum TruncateDirection {
1766 DiscardFront,
1769 DiscardBack,
1772 NoTruncating,
1774}
1775
1776#[derive(Copy, Clone, Debug, Eq, PartialEq)]
1788pub struct TruncatingSerializer<B> {
1789 buffer: B,
1790 direction: TruncateDirection,
1791}
1792
1793impl<B> TruncatingSerializer<B> {
1794 pub fn new(buffer: B, direction: TruncateDirection) -> TruncatingSerializer<B> {
1796 TruncatingSerializer { buffer, direction }
1797 }
1798
1799 pub fn buffer(&self) -> &B {
1801 &self.buffer
1802 }
1803
1804 pub fn buffer_mut(&mut self) -> &mut B {
1806 &mut self.buffer
1807 }
1808}
1809
1810impl<B: GrowBuffer + ShrinkBuffer> Serializer for TruncatingSerializer<B> {
1811 type Buffer = B;
1812
1813 fn serialize<BB: GrowBufferMut, P: BufferProvider<B, BB>>(
1814 mut self,
1815 outer: PacketConstraints,
1816 provider: P,
1817 ) -> Result<BB, (SerializeError<P::Error>, Self)> {
1818 let original_len = self.buffer.len();
1819 let excess_bytes = if original_len > outer.max_body_len {
1820 Some(original_len - outer.max_body_len)
1821 } else {
1822 None
1823 };
1824 if let Some(excess_bytes) = excess_bytes {
1825 match self.direction {
1826 TruncateDirection::DiscardFront => self.buffer.shrink_front(excess_bytes),
1827 TruncateDirection::DiscardBack => self.buffer.shrink_back(excess_bytes),
1828 TruncateDirection::NoTruncating => {
1829 return Err((SerializeError::SizeLimitExceeded, self));
1830 }
1831 }
1832 }
1833
1834 let padding = outer.min_body_len().saturating_sub(self.buffer.len());
1835
1836 debug_assert!(self.buffer.len() + padding <= outer.max_body_len());
1840 match provider.reuse_or_realloc(
1841 self.buffer,
1842 outer.header_len(),
1843 padding + outer.footer_len(),
1844 ) {
1845 Ok(buffer) => Ok(buffer),
1846 Err((err, mut buffer)) => {
1847 if let Some(excess_bytes) = excess_bytes {
1851 match self.direction {
1852 TruncateDirection::DiscardFront => buffer.grow_front(excess_bytes),
1853 TruncateDirection::DiscardBack => buffer.grow_back(excess_bytes),
1854 TruncateDirection::NoTruncating => unreachable!(),
1855 }
1856 }
1857
1858 Err((
1859 SerializeError::Alloc(err),
1860 TruncatingSerializer { buffer, direction: self.direction },
1861 ))
1862 }
1863 }
1864 }
1865
1866 fn serialize_new_buf<BB: GrowBufferMut, A: LayoutBufferAlloc<BB>>(
1867 &self,
1868 outer: PacketConstraints,
1869 alloc: A,
1870 ) -> Result<BB, SerializeError<A::Error>> {
1871 let truncated_size = cmp::min(self.buffer.len(), outer.max_body_len());
1872 let discarded_bytes = self.buffer.len() - truncated_size;
1873 let padding = outer.min_body_len().saturating_sub(truncated_size);
1874 let tail_size = padding + outer.footer_len();
1875 let mut buffer = alloc.layout_alloc(outer.header_len(), truncated_size, tail_size)?;
1876 buffer.with_bytes_mut(|mut dst| {
1877 self.buffer.with_bytes(|src| {
1878 let src = match (discarded_bytes > 0, self.direction) {
1879 (false, _) => src,
1880 (true, TruncateDirection::DiscardFront) => src.slice(discarded_bytes..),
1881 (true, TruncateDirection::DiscardBack) => src.slice(..truncated_size),
1882 (true, TruncateDirection::NoTruncating) => {
1883 return Err(SerializeError::SizeLimitExceeded);
1884 }
1885 };
1886 dst.copy_from(&src);
1887 Ok(())
1888 })
1889 })?;
1890 buffer.grow_back_zero(padding);
1891 Ok(buffer)
1892 }
1893}
1894
1895impl<I: Serializer, O: PacketBuilder> Serializer for Nested<I, O> {
1896 type Buffer = I::Buffer;
1897
1898 #[inline]
1899 fn serialize<B: GrowBufferMut, P: BufferProvider<I::Buffer, B>>(
1900 self,
1901 outer: PacketConstraints,
1902 provider: P,
1903 ) -> Result<B, (SerializeError<P::Error>, Self)> {
1904 let Some(outer) = self.outer.constraints().try_encapsulate(&outer) else {
1905 return Err((SerializeError::SizeLimitExceeded, self));
1906 };
1907
1908 match self.inner.serialize(outer, provider) {
1909 Ok(mut buf) => {
1910 buf.serialize(&self.outer);
1911 Ok(buf)
1912 }
1913 Err((err, inner)) => Err((err, self.outer.wrap_body(inner))),
1914 }
1915 }
1916
1917 #[inline]
1918 fn serialize_new_buf<B: GrowBufferMut, A: LayoutBufferAlloc<B>>(
1919 &self,
1920 outer: PacketConstraints,
1921 alloc: A,
1922 ) -> Result<B, SerializeError<A::Error>> {
1923 let Some(outer) = self.outer.constraints().try_encapsulate(&outer) else {
1924 return Err(SerializeError::SizeLimitExceeded);
1925 };
1926
1927 let mut buf = self.inner.serialize_new_buf(outer, alloc)?;
1928 GrowBufferMut::serialize(&mut buf, &self.outer);
1929 Ok(buf)
1930 }
1931}
1932
1933pub trait PartialPacketBuilder: PacketBuilder {
1935 fn partial_serialize(&self, body_len: usize, buffer: &mut [u8]);
1944}
1945
1946impl PartialPacketBuilder for () {
1947 fn partial_serialize(&self, _body_len: usize, _buffer: &mut [u8]) {}
1948}
1949
1950#[derive(Debug, Eq, PartialEq)]
1952pub struct PartialSerializeResult {
1953 pub bytes_written: usize,
1955
1956 pub total_size: usize,
1958}
1959
1960pub trait PartialSerializer {
1965 fn partial_serialize(
1975 &self,
1976 outer: PacketConstraints,
1977 buffer: &mut [u8],
1978 ) -> Result<PartialSerializeResult, SerializeError<Never>>;
1979}
1980
1981impl<B: GrowBuffer + ShrinkBuffer> PartialSerializer for B {
1982 fn partial_serialize(
1983 &self,
1984 _outer: PacketConstraints,
1985 _buffer: &mut [u8],
1986 ) -> Result<PartialSerializeResult, SerializeError<Never>> {
1987 Ok(PartialSerializeResult { bytes_written: 0, total_size: self.len() })
1988 }
1989}
1990
1991impl<B: GrowBuffer + ShrinkBuffer> PartialSerializer for TruncatingSerializer<B> {
1992 fn partial_serialize(
1993 &self,
1994 outer: PacketConstraints,
1995 _buffer: &mut [u8],
1996 ) -> Result<PartialSerializeResult, SerializeError<Never>> {
1997 let total_size =
1998 cmp::max(outer.min_body_len(), cmp::min(self.buffer().len(), outer.max_body_len()));
1999 Ok(PartialSerializeResult { bytes_written: 0, total_size })
2000 }
2001}
2002
2003impl<I: InnerPacketBuilder, B: GrowBuffer + ShrinkBuffer> PartialSerializer
2004 for InnerSerializer<I, B>
2005{
2006 fn partial_serialize(
2007 &self,
2008 outer: PacketConstraints,
2009 _buffer: &mut [u8],
2010 ) -> Result<PartialSerializeResult, SerializeError<Never>> {
2011 Ok(PartialSerializeResult {
2012 bytes_written: 0,
2013 total_size: cmp::max(self.inner().bytes_len(), outer.min_body_len()),
2014 })
2015 }
2016}
2017
2018impl<A: Serializer + PartialSerializer, B: Serializer + PartialSerializer> PartialSerializer
2019 for EitherSerializer<A, B>
2020{
2021 fn partial_serialize(
2022 &self,
2023 outer: PacketConstraints,
2024 buffer: &mut [u8],
2025 ) -> Result<PartialSerializeResult, SerializeError<Never>> {
2026 match self {
2027 EitherSerializer::A(s) => s.partial_serialize(outer, buffer),
2028 EitherSerializer::B(s) => s.partial_serialize(outer, buffer),
2029 }
2030 }
2031}
2032
2033impl<I: PartialSerializer, O: PartialPacketBuilder> PartialSerializer for Nested<I, O> {
2034 fn partial_serialize(
2035 &self,
2036 outer: PacketConstraints,
2037 buffer: &mut [u8],
2038 ) -> Result<PartialSerializeResult, SerializeError<Never>> {
2039 let header_constraints = self.outer.constraints();
2040 let Some(constraints) = outer.try_encapsulate(&header_constraints) else {
2041 return Err(SerializeError::SizeLimitExceeded);
2042 };
2043
2044 let header_len = header_constraints.header_len();
2045 let inner_buf = buffer.get_mut(header_len..).unwrap_or(&mut []);
2046 let mut result = self.inner.partial_serialize(constraints, inner_buf)?;
2047 if header_len <= buffer.len() {
2048 self.outer.partial_serialize(result.total_size, &mut buffer[..header_len]);
2049 result.bytes_written += header_len;
2050 }
2051 result.total_size += header_len + header_constraints.footer_len();
2052 Ok(result)
2053 }
2054}
2055
2056mod sealed {
2057 use super::*;
2058
2059 pub trait DynamicSerializerInner {
2064 fn serialize_dyn_alloc(
2074 &self,
2075 outer: PacketConstraints,
2076 alloc: &mut dyn DynamicBufferAlloc,
2077 ) -> Result<(usize, usize), SerializeError<DynAllocError>>;
2078 }
2079
2080 pub trait DynamicBufferAlloc {
2086 fn alloc(
2104 &mut self,
2105 prefix: usize,
2106 body: usize,
2107 suffix: usize,
2108 ) -> Result<Buf<&mut [u8]>, DynAllocError>;
2109 }
2110
2111 pub struct DynAllocError;
2114}
2115
2116use sealed::{DynAllocError, DynamicBufferAlloc, DynamicSerializerInner};
2117
2118fn dyn_serialize_new_buf<B: GrowBufferMut, A: LayoutBufferAlloc<B>>(
2119 serializer: &dyn DynamicSerializerInner,
2120 outer: PacketConstraints,
2121 alloc: A,
2122) -> Result<B, SerializeError<A::Error>> {
2123 enum Adapter<A: LayoutBufferAlloc<B>, B> {
2124 Empty,
2125 Alloc(A),
2126 Buffer(B),
2127 Error(A::Error),
2128 }
2129
2130 impl<A: LayoutBufferAlloc<B>, B: GrowBufferMut> DynamicBufferAlloc for Adapter<A, B> {
2131 fn alloc(
2132 &mut self,
2133 prefix: usize,
2134 body: usize,
2135 suffix: usize,
2136 ) -> Result<Buf<&mut [u8]>, DynAllocError> {
2137 let alloc = match core::mem::replace(self, Self::Empty) {
2138 Self::Alloc(a) => a,
2139 _ => panic!("unexpected alloc state"),
2140 };
2141
2142 let buffer = match alloc.layout_alloc(prefix, body, suffix) {
2143 Ok(b) => b,
2144 Err(e) => {
2145 *self = Self::Error(e);
2146 return Err(DynAllocError);
2147 }
2148 };
2149 *self = Self::Buffer(buffer);
2150 let buffer = match self {
2151 Self::Buffer(b) => b.with_all_contents_mut(|b| match b.try_into_contiguous() {
2152 Ok(b) => b,
2153 Err(_) => todo!(
2154 "https://fxbug.dev/428952155: support dyn serialize fragmented buffers"
2155 ),
2156 }),
2157 _ => unreachable!(),
2159 };
2160 Ok(Buf::new(buffer, prefix..(buffer.len() - suffix)))
2161 }
2162 }
2163
2164 let mut adapter = Adapter::Alloc(alloc);
2165 let (prefix, suffix) = match serializer.serialize_dyn_alloc(outer, &mut adapter) {
2166 Ok(b) => b,
2167 Err(SerializeError::SizeLimitExceeded) => {
2168 return Err(SerializeError::SizeLimitExceeded);
2169 }
2170 Err(SerializeError::Alloc(DynAllocError)) => match adapter {
2171 Adapter::Error(e) => {
2172 return Err(SerializeError::Alloc(e));
2173 }
2174 _ => {
2175 unreachable!();
2176 }
2177 },
2178 };
2179
2180 let mut buffer = match adapter {
2181 Adapter::Buffer(b) => b,
2182 _ => unreachable!("unexpected alloc state"),
2183 };
2184 buffer.grow_front(buffer.prefix_len().checked_sub(prefix).unwrap_or_else(|| {
2185 panic!("failed to grow buffer front; want: {} got: {}", prefix, buffer.prefix_len())
2186 }));
2187 buffer.grow_back(buffer.suffix_len().checked_sub(suffix).unwrap_or_else(|| {
2188 panic!("failed to grow buffer back; want: {} got: {}", suffix, buffer.suffix_len())
2189 }));
2190 Ok(buffer)
2191}
2192
2193#[derive(Copy, Clone)]
2198pub struct DynSerializer<'a>(&'a dyn DynamicSerializerInner);
2199
2200impl<'a> DynSerializer<'a> {
2201 pub fn new<S: Serializer>(s: &'a S) -> Self {
2203 Self::new_dyn(s)
2204 }
2205
2206 pub fn new_dyn(s: &'a dyn DynamicSerializer) -> Self {
2208 Self(s)
2209 }
2210}
2211
2212impl Serializer for DynSerializer<'_> {
2213 type Buffer = EmptyBuf;
2214
2215 fn serialize<B: GrowBufferMut, P: BufferProvider<Self::Buffer, B>>(
2216 self,
2217 outer: PacketConstraints,
2218 provider: P,
2219 ) -> Result<B, (SerializeError<P::Error>, Self)> {
2220 struct Adapter<S, P>(P, PhantomData<S>);
2221
2222 impl<S, B, P> LayoutBufferAlloc<B> for Adapter<S, P>
2223 where
2224 P: BufferProvider<S, B>,
2225 {
2226 type Error = P::Error;
2227
2228 fn layout_alloc(
2229 self,
2230 prefix: usize,
2231 body: usize,
2232 suffix: usize,
2233 ) -> Result<B, Self::Error> {
2234 let Self(provider, PhantomData) = self;
2235 provider.alloc_no_reuse(prefix, body, suffix)
2236 }
2237 }
2238
2239 let Self(serializer) = self;
2240 match dyn_serialize_new_buf(serializer, outer, Adapter(provider, PhantomData)) {
2241 Ok(b) => Ok(b),
2242 Err(e) => Err((e, self)),
2243 }
2244 }
2245
2246 fn serialize_new_buf<B: GrowBufferMut, A: LayoutBufferAlloc<B>>(
2247 &self,
2248 outer: PacketConstraints,
2249 alloc: A,
2250 ) -> Result<B, SerializeError<A::Error>> {
2251 let Self(serializer) = self;
2252 dyn_serialize_new_buf(*serializer, outer, alloc)
2253 }
2254}
2255
2256impl<O> DynamicSerializerInner for O
2257where
2258 O: Serializer,
2259{
2260 fn serialize_dyn_alloc(
2261 &self,
2262 outer: PacketConstraints,
2263 alloc: &mut dyn DynamicBufferAlloc,
2264 ) -> Result<(usize, usize), SerializeError<DynAllocError>> {
2265 struct Adapter<'a>(&'a mut dyn DynamicBufferAlloc);
2266 impl<'a> LayoutBufferAlloc<Buf<&'a mut [u8]>> for Adapter<'a> {
2267 type Error = DynAllocError;
2268
2269 fn layout_alloc(
2270 self,
2271 prefix: usize,
2272 body: usize,
2273 suffix: usize,
2274 ) -> Result<Buf<&'a mut [u8]>, Self::Error> {
2275 let Self(inner) = self;
2276 inner.alloc(prefix, body, suffix)
2277 }
2278 }
2279 self.serialize_new_buf(outer, Adapter(alloc))
2280 .map(|buffer| (buffer.prefix_len(), buffer.suffix_len()))
2281 }
2282}
2283
2284pub trait DynamicSerializer: DynamicSerializerInner {}
2299impl<O> DynamicSerializer for O where O: DynamicSerializerInner {}
2300
2301#[cfg(test)]
2302mod tests {
2303 use super::*;
2304 use crate::BufferMut;
2305 use std::fmt::Debug;
2306 use test_case::test_case;
2307 use test_util::{assert_geq, assert_leq};
2308
2309 #[derive(Copy, Clone, Debug, Eq, PartialEq)]
2315 struct DummyPacketBuilder {
2316 header_len: usize,
2317 footer_len: usize,
2318 min_body_len: usize,
2319 max_body_len: usize,
2320 header_byte: u8,
2321 footer_byte: u8,
2322 }
2323
2324 impl DummyPacketBuilder {
2325 fn new(
2326 header_len: usize,
2327 footer_len: usize,
2328 min_body_len: usize,
2329 max_body_len: usize,
2330 ) -> DummyPacketBuilder {
2331 DummyPacketBuilder {
2332 header_len,
2333 footer_len,
2334 min_body_len,
2335 max_body_len,
2336 header_byte: 0xFF,
2337 footer_byte: 0xFE,
2338 }
2339 }
2340 }
2341
2342 impl PacketBuilder for DummyPacketBuilder {
2343 fn constraints(&self) -> PacketConstraints {
2344 PacketConstraints::new(
2345 self.header_len,
2346 self.footer_len,
2347 self.min_body_len,
2348 self.max_body_len,
2349 )
2350 }
2351
2352 fn serialize(&self, target: &mut SerializeTarget<'_>, body: FragmentedBytesMut<'_, '_>) {
2353 assert_eq!(target.header.len(), self.header_len);
2354 assert_eq!(target.footer.len(), self.footer_len);
2355 assert!(body.len() >= self.min_body_len);
2356 assert!(body.len() <= self.max_body_len);
2357 target.header.fill(self.header_byte);
2358 target.footer.fill(self.footer_byte);
2359 }
2360 }
2361
2362 impl PartialPacketBuilder for DummyPacketBuilder {
2363 fn partial_serialize(&self, _body_len: usize, buffer: &mut [u8]) {
2364 buffer.fill(self.header_byte)
2365 }
2366 }
2367
2368 impl InnerPacketBuilder for DummyPacketBuilder {
2369 fn bytes_len(&self) -> usize {
2370 self.header_len
2371 }
2372
2373 fn serialize(&self, buffer: &mut [u8]) {
2374 assert_eq!(buffer.len(), self.header_len);
2375 buffer.fill(self.header_byte);
2376 }
2377 }
2378
2379 #[derive(Copy, Clone, Debug, Eq, PartialEq)]
2381 struct SerializerVerifier {
2382 inner_len: Option<usize>,
2385
2386 truncating: bool,
2389 }
2390
2391 impl SerializerVerifier {
2392 fn new<S: Serializer>(serializer: &S, truncating: bool) -> Self {
2393 let inner_len = serializer
2394 .serialize_new_buf(PacketConstraints::UNCONSTRAINED, new_buf_vec)
2395 .map(|buf| buf.len())
2396 .inspect_err(|err| assert!(err.is_size_limit_exceeded()))
2397 .ok();
2398 Self { inner_len, truncating }
2399 }
2400
2401 fn verify_result<B: GrowBufferMut, A>(
2402 &self,
2403 result: Result<&B, &SerializeError<A>>,
2404 outer: PacketConstraints,
2405 ) {
2406 let should_exceed_size_limit = match self.inner_len {
2407 Some(inner_len) => outer.max_body_len() < inner_len && !self.truncating,
2408 None => true,
2409 };
2410
2411 match result {
2412 Ok(buf) => {
2413 assert_geq!(buf.prefix_len(), outer.header_len());
2414 assert_geq!(buf.suffix_len(), outer.footer_len());
2415 assert_leq!(buf.len(), outer.max_body_len());
2416
2417 let padding = outer.min_body_len().saturating_sub(buf.len());
2422 assert_leq!(padding + outer.footer_len(), buf.suffix_len());
2423
2424 assert!(!should_exceed_size_limit);
2425 }
2426 Err(err) => {
2427 if should_exceed_size_limit {
2430 assert!(err.is_size_limit_exceeded());
2431 } else {
2432 assert!(err.is_alloc());
2433 }
2434 }
2435 }
2436 }
2437 }
2438
2439 #[derive(Copy, Clone, Debug, Eq, PartialEq)]
2448 struct VerifyingSerializer<S> {
2449 ser: S,
2450 verifier: SerializerVerifier,
2451 }
2452
2453 impl<S: Serializer + Debug + Clone + Eq> Serializer for VerifyingSerializer<S>
2454 where
2455 S::Buffer: ReusableBuffer,
2456 {
2457 type Buffer = S::Buffer;
2458
2459 fn serialize<B: GrowBufferMut, P: BufferProvider<Self::Buffer, B>>(
2460 self,
2461 outer: PacketConstraints,
2462 provider: P,
2463 ) -> Result<B, (SerializeError<P::Error>, Self)> {
2464 let Self { ser, verifier } = self;
2465 let orig = ser.clone();
2466
2467 let result = ser.serialize(outer, provider).map_err(|(err, ser)| {
2468 assert_eq!(ser, orig);
2471 (err, Self { ser, verifier })
2472 });
2473
2474 verifier.verify_result(result.as_ref().map_err(|(err, _ser)| err), outer);
2475
2476 result
2477 }
2478
2479 fn serialize_new_buf<B: GrowBufferMut, A: LayoutBufferAlloc<B>>(
2480 &self,
2481 outer: PacketConstraints,
2482 alloc: A,
2483 ) -> Result<B, SerializeError<A::Error>> {
2484 let res = self.ser.serialize_new_buf(outer, alloc);
2485 self.verifier.verify_result(res.as_ref(), outer);
2486 res
2487 }
2488 }
2489
2490 trait SerializerExt: Serializer {
2491 fn into_verifying(self, truncating: bool) -> VerifyingSerializer<Self>
2492 where
2493 Self::Buffer: ReusableBuffer,
2494 {
2495 let verifier = SerializerVerifier::new(&self, truncating);
2496 VerifyingSerializer { ser: self, verifier }
2497 }
2498
2499 fn wrap_in_verifying<B: PacketBuilder>(
2500 self,
2501 outer: B,
2502 truncating: bool,
2503 ) -> VerifyingSerializer<Nested<Self, B>>
2504 where
2505 Self::Buffer: ReusableBuffer,
2506 {
2507 self.wrap_in(outer).into_verifying(truncating)
2508 }
2509
2510 fn with_size_limit_verifying(
2511 self,
2512 limit: usize,
2513 truncating: bool,
2514 ) -> VerifyingSerializer<Nested<Self, LimitedSizePacketBuilder>>
2515 where
2516 Self::Buffer: ReusableBuffer,
2517 {
2518 self.with_size_limit(limit).into_verifying(truncating)
2519 }
2520 }
2521
2522 impl<S: Serializer> SerializerExt for S {}
2523
2524 #[test]
2525 fn test_either_into_inner() {
2526 fn ret_either(a: u32, b: u32, c: bool) -> Either<u32, u32> {
2527 if c { Either::A(a) } else { Either::B(b) }
2528 }
2529
2530 assert_eq!(ret_either(1, 2, true).into_inner(), 1);
2531 assert_eq!(ret_either(1, 2, false).into_inner(), 2);
2532 }
2533
2534 #[test]
2535 fn test_either_unwrap_success() {
2536 assert_eq!(Either::<u16, u32>::A(5).unwrap_a(), 5);
2537 assert_eq!(Either::<u16, u32>::B(10).unwrap_b(), 10);
2538 }
2539
2540 #[test]
2541 #[should_panic]
2542 fn test_either_unwrap_a_panic() {
2543 let _: u16 = Either::<u16, u32>::B(10).unwrap_a();
2544 }
2545
2546 #[test]
2547 #[should_panic]
2548 fn test_either_unwrap_b_panic() {
2549 let _: u32 = Either::<u16, u32>::A(5).unwrap_b();
2550 }
2551
2552 #[test_case(Buf::new((0..100).collect(), ..); "entire buf")]
2553 #[test_case(Buf::new((0..100).collect(), 0..0); "empty range")]
2554 #[test_case(Buf::new((0..100).collect(), ..50); "prefix")]
2555 #[test_case(Buf::new((0..100).collect(), 50..); "suffix")]
2556 #[test_case(Buf::new((0..100).collect(), 25..75); "middle")]
2557 fn test_buf_into_inner(buf: Buf<Vec<u8>>) {
2558 assert_eq!(buf.clone().as_ref(), buf.into_inner());
2559 }
2560
2561 #[test]
2562 fn test_packet_constraints() {
2563 use PacketConstraints as PC;
2564
2565 assert!(PC::try_new(0, 0, 0, 0).is_some());
2569 assert!(PC::try_new(usize::MAX / 2, usize::MAX / 2, 0, 0).is_some());
2571 assert_eq!(PC::try_new(usize::MAX, 1, 0, 0), None);
2573 assert_eq!(PC::try_new(0, 0, 1, 0), None);
2575
2576 let pc = PC::new(10, 10, 0, usize::MAX);
2580 assert_eq!(pc.try_encapsulate(&pc).unwrap(), PC::new(20, 20, 0, usize::MAX - 20));
2581
2582 let pc = PC::new(10, 10, 0, usize::MAX);
2583 assert_eq!(pc.try_encapsulate(&pc).unwrap(), PC::new(20, 20, 0, usize::MAX - 20));
2584
2585 let inner = PC::new(10, 10, 0, usize::MAX);
2596 let outer = PC::new(0, 0, 10, usize::MAX);
2597 assert_eq!(inner.try_encapsulate(&outer).unwrap(), PC::new(10, 10, 0, usize::MAX - 20));
2598
2599 let inner = PC::new(usize::MAX, 0, 0, usize::MAX);
2603 let outer = PC::new(1, 0, 0, usize::MAX);
2604 assert_eq!(inner.try_encapsulate(&outer), None);
2605
2606 let inner = PC::new(0, usize::MAX, 0, usize::MAX);
2610 let outer = PC::new(0, 1, 0, usize::MAX);
2611 assert_eq!(inner.try_encapsulate(&outer), None);
2612
2613 let one_fifth_max = (usize::MAX / 5) + 1;
2620 let inner = PC::new(one_fifth_max, one_fifth_max, one_fifth_max, usize::MAX);
2621 let outer = PC::new(one_fifth_max, one_fifth_max, 0, usize::MAX);
2622 assert_eq!(inner.try_encapsulate(&outer), None);
2623
2624 let inner = PC::new(10, 10, 0, usize::MAX);
2629 let outer = PC::new(0, 0, 0, 10);
2630 assert_eq!(inner.try_encapsulate(&outer), None);
2631
2632 let inner = PC::new(0, 0, 10, usize::MAX);
2638 let outer = PC::new(0, 0, 0, 5);
2639 assert_eq!(inner.try_encapsulate(&outer), None);
2640 }
2641
2642 #[test]
2643 fn test_inner_serializer() {
2644 const INNER: &[u8] = &[0, 1, 2, 3, 4, 5, 6, 7, 8, 9];
2645
2646 fn concat<'a, I: IntoIterator<Item = &'a &'a [u8]>>(slices: I) -> Vec<u8> {
2647 let mut v = Vec::new();
2648 for slc in slices.into_iter() {
2649 v.extend_from_slice(slc);
2650 }
2651 v
2652 }
2653
2654 let buf = INNER.into_serializer().serialize_vec_outer().unwrap();
2656 assert_eq!(buf.as_ref(), INNER);
2657
2658 let buf = INNER
2661 .into_serializer()
2662 .into_verifying(false)
2663 .wrap_in(DummyPacketBuilder::new(0, 0, 20, usize::MAX))
2664 .serialize_vec_outer()
2665 .unwrap();
2666 assert_eq!(buf.as_ref(), concat(&[INNER, vec![0; 10].as_ref()]).as_slice());
2667
2668 let buf = INNER
2672 .into_serializer()
2673 .into_verifying(false)
2674 .wrap_in(DummyPacketBuilder::new(10, 10, 0, usize::MAX))
2675 .serialize_vec_outer()
2676 .unwrap();
2677 assert_eq!(
2678 buf.as_ref(),
2679 concat(&[vec![0xFF; 10].as_ref(), INNER, vec![0xFE; 10].as_ref()]).as_slice()
2680 );
2681
2682 assert_eq!(
2684 INNER
2685 .into_serializer()
2686 .into_verifying(false)
2687 .wrap_in(DummyPacketBuilder::new(0, 0, 0, 9))
2688 .serialize_vec_outer()
2689 .unwrap_err()
2690 .0,
2691 SerializeError::SizeLimitExceeded
2692 );
2693
2694 assert_eq!(
2698 INNER
2699 .into_serializer_with(Buf::new(vec![0xFF], ..))
2700 .into_verifying(false)
2701 .serialize_vec_outer()
2702 .unwrap()
2703 .as_ref(),
2704 INNER
2705 );
2706 }
2707
2708 #[test]
2709 fn test_buffer_serializer_and_inner_serializer() {
2710 fn verify_buffer_serializer<B: BufferMut + Debug>(
2711 buffer: B,
2712 header_len: usize,
2713 footer_len: usize,
2714 min_body_len: usize,
2715 ) {
2716 let old_body = buffer.to_flattened_vec();
2717 let serializer =
2718 DummyPacketBuilder::new(header_len, footer_len, min_body_len, usize::MAX)
2719 .wrap_body(buffer);
2720
2721 let buffer0 = serializer
2722 .serialize_new_buf(PacketConstraints::UNCONSTRAINED, new_buf_vec)
2723 .unwrap();
2724 verify(buffer0, &old_body, header_len, footer_len, min_body_len);
2725
2726 let buffer = serializer.serialize_vec_outer().unwrap();
2727 verify(buffer, &old_body, header_len, footer_len, min_body_len);
2728 }
2729
2730 fn verify_inner_packet_builder_serializer(
2731 body: &[u8],
2732 header_len: usize,
2733 footer_len: usize,
2734 min_body_len: usize,
2735 ) {
2736 let buffer = DummyPacketBuilder::new(header_len, footer_len, min_body_len, usize::MAX)
2737 .wrap_body(body.into_serializer())
2738 .serialize_vec_outer()
2739 .unwrap();
2740 verify(buffer, body, header_len, footer_len, min_body_len);
2741 }
2742
2743 fn verify<B: Buffer>(
2744 buffer: B,
2745 body: &[u8],
2746 header_len: usize,
2747 footer_len: usize,
2748 min_body_len: usize,
2749 ) {
2750 let flat = buffer.to_flattened_vec();
2751 let header_bytes = &flat[..header_len];
2752 let body_bytes = &flat[header_len..header_len + body.len()];
2753 let padding_len = min_body_len.saturating_sub(body.len());
2754 let padding_bytes =
2755 &flat[header_len + body.len()..header_len + body.len() + padding_len];
2756 let total_body_len = body.len() + padding_len;
2757 let footer_bytes = &flat[header_len + total_body_len..];
2758 assert_eq!(
2759 buffer.len() - total_body_len,
2760 header_len + footer_len,
2761 "buffer.len()({}) - total_body_len({}) != header_len({}) + footer_len({})",
2762 buffer.len(),
2763 header_len,
2764 footer_len,
2765 min_body_len,
2766 );
2767
2768 assert!(
2770 header_bytes.iter().all(|b| *b == 0xFF),
2771 "header_bytes {:?} are not filled with 0xFF's",
2772 header_bytes,
2773 );
2774 assert_eq!(body_bytes, body);
2775 assert!(
2777 padding_bytes.iter().all(|b| *b == 0),
2778 "padding_bytes {:?} are not filled with 0s",
2779 padding_bytes,
2780 );
2781 assert!(
2783 footer_bytes.iter().all(|b| *b == 0xFE),
2784 "footer_bytes {:?} are not filled with 0xFE's",
2785 footer_bytes,
2786 );
2787 }
2788
2789 for buf_len in 0..8 {
2792 for range_start in 0..buf_len {
2793 for range_end in range_start..buf_len {
2794 for prefix in 0..8 {
2795 for suffix in 0..8 {
2796 for min_body in 0..8 {
2797 let mut vec = vec![0; buf_len];
2798 #[allow(clippy::needless_range_loop)]
2803 for i in 0..vec.len() {
2804 vec[i] = i as u8;
2805 }
2806 verify_buffer_serializer(
2807 Buf::new(vec.as_mut_slice(), range_start..range_end),
2808 prefix,
2809 suffix,
2810 min_body,
2811 );
2812 if range_start == 0 {
2813 verify_inner_packet_builder_serializer(
2822 &vec.as_slice()[range_start..range_end],
2823 prefix,
2824 suffix,
2825 min_body,
2826 );
2827 }
2828 }
2829 }
2830 }
2831 }
2832 }
2833 }
2834 }
2835
2836 #[test]
2837 fn test_min_body_len() {
2838 let body = &[1, 2];
2843
2844 let inner = DummyPacketBuilder::new(2, 2, 0, usize::MAX);
2847 let outer = DummyPacketBuilder::new(2, 2, 8, usize::MAX);
2849 let buf = body
2850 .into_serializer()
2851 .into_verifying(false)
2852 .wrap_in_verifying(inner, false)
2853 .wrap_in_verifying(outer, false)
2854 .serialize_vec_outer()
2855 .unwrap();
2856 assert_eq!(buf.prefix_len(), 0);
2857 assert_eq!(buf.suffix_len(), 0);
2858 assert_eq!(
2859 buf.as_ref(),
2860 &[
2861 0xFF, 0xFF, 0xFF, 0xFF, 1, 2, 0xFE, 0xFE, 0, 0, 0xFE, 0xFE ]
2868 );
2869 }
2870
2871 #[test]
2872 fn test_size_limit() {
2873 fn test<S: Serializer + Clone + Debug + Eq>(ser: S)
2875 where
2876 S::Buffer: ReusableBuffer,
2877 {
2878 let pb = DummyPacketBuilder::new(1, 1, 0, usize::MAX);
2884
2885 assert!(
2890 ser.clone()
2891 .wrap_in_verifying(pb, false)
2892 .with_size_limit_verifying(3, false)
2893 .serialize_vec_outer()
2894 .is_ok()
2895 );
2896 assert!(
2898 ser.clone()
2899 .wrap_in_verifying(pb, false)
2900 .with_size_limit_verifying(4, false)
2901 .serialize_vec_outer()
2902 .is_ok()
2903 );
2904 assert!(
2908 ser.clone()
2909 .with_size_limit_verifying(1, false)
2910 .wrap_in_verifying(pb, false)
2911 .with_size_limit_verifying(3, false)
2912 .serialize_vec_outer()
2913 .is_ok()
2914 );
2915 assert!(
2918 ser.clone()
2919 .with_size_limit_verifying(0, false)
2920 .wrap_in_verifying(pb, false)
2921 .serialize_vec_outer()
2922 .is_err()
2923 );
2924 assert!(
2930 ser.clone()
2931 .wrap_in_verifying(pb, false)
2932 .with_size_limit_verifying(1, false)
2933 .serialize_vec_outer()
2934 .is_err()
2935 );
2936 }
2937
2938 test(DummyPacketBuilder::new(1, 0, 0, usize::MAX).into_serializer().into_verifying(false));
2940 test(Buf::new(vec![0], ..).into_verifying(false));
2941 }
2942
2943 #[test]
2944 fn test_truncating_serializer() {
2945 fn verify_result<S: Serializer + Debug>(ser: S, expected: &[u8])
2946 where
2947 S::Buffer: ReusableBuffer + AsRef<[u8]>,
2948 {
2949 let buf = ser.serialize_new_buf(PacketConstraints::UNCONSTRAINED, new_buf_vec).unwrap();
2950 assert_eq!(buf.as_ref(), &expected[..]);
2951 let buf = ser.serialize_vec_outer().unwrap();
2952 assert_eq!(buf.as_ref(), &expected[..]);
2953 }
2954
2955 let body = vec![0, 1, 2, 3, 4, 5, 6, 7, 8, 9];
2957 let ser =
2958 TruncatingSerializer::new(Buf::new(body.clone(), ..), TruncateDirection::DiscardFront)
2959 .into_verifying(true)
2960 .with_size_limit_verifying(4, true);
2961 verify_result(ser, &[6, 7, 8, 9]);
2962
2963 let ser =
2965 TruncatingSerializer::new(Buf::new(body.clone(), ..), TruncateDirection::DiscardBack)
2966 .into_verifying(true)
2967 .with_size_limit_verifying(7, true);
2968 verify_result(ser, &[0, 1, 2, 3, 4, 5, 6]);
2969
2970 let ser =
2972 TruncatingSerializer::new(Buf::new(body.clone(), ..), TruncateDirection::NoTruncating)
2973 .into_verifying(false)
2974 .with_size_limit_verifying(5, true);
2975 assert!(ser.clone().serialize_vec_outer().is_err());
2976 assert!(ser.serialize_new_buf(PacketConstraints::UNCONSTRAINED, new_buf_vec).is_err());
2977 assert!(ser.serialize_vec_outer().is_err());
2978
2979 fn test_serialization_failure<S: Serializer + Clone + Eq + Debug>(
2983 ser: S,
2984 err: SerializeError<BufferTooShortError>,
2985 ) where
2986 S::Buffer: ReusableBuffer + Debug,
2987 {
2988 let (e, new_ser) = DummyPacketBuilder::new(2, 2, 0, 1)
2997 .wrap_body(ser.clone())
2998 .serialize_no_alloc_outer()
2999 .unwrap_err();
3000 assert_eq!(err, e);
3001 assert_eq!(new_ser.into_inner(), ser);
3002 }
3003
3004 let body = Buf::new(vec![1, 2], ..);
3005 test_serialization_failure(
3006 TruncatingSerializer::new(body.clone(), TruncateDirection::DiscardFront)
3007 .into_verifying(true),
3008 SerializeError::Alloc(BufferTooShortError),
3009 );
3010 test_serialization_failure(
3011 TruncatingSerializer::new(body.clone(), TruncateDirection::DiscardFront)
3012 .into_verifying(true),
3013 SerializeError::Alloc(BufferTooShortError),
3014 );
3015 test_serialization_failure(
3016 TruncatingSerializer::new(body.clone(), TruncateDirection::NoTruncating)
3017 .into_verifying(false),
3018 SerializeError::SizeLimitExceeded,
3019 );
3020 }
3021
3022 #[test]
3023 fn test_try_reuse_buffer() {
3024 fn test_expect_success(
3025 body_range: Range<usize>,
3026 prefix: usize,
3027 suffix: usize,
3028 max_copy_bytes: usize,
3029 ) {
3030 let mut bytes = [0, 1, 2, 3, 4, 5, 6, 7, 8, 9];
3031 let buffer = Buf::new(&mut bytes[..], body_range);
3032 let body = buffer.as_ref().to_vec();
3033 let buffer = try_reuse_buffer(buffer, prefix, suffix, max_copy_bytes).unwrap();
3034 assert_eq!(buffer.as_ref(), body.as_slice());
3035 assert!(buffer.prefix_len() >= prefix);
3036 assert!(buffer.suffix_len() >= suffix);
3037 }
3038
3039 fn test_expect_failure(
3040 body_range: Range<usize>,
3041 prefix: usize,
3042 suffix: usize,
3043 max_copy_bytes: usize,
3044 ) {
3045 let mut bytes = [0, 1, 2, 3, 4, 5, 6, 7, 8, 9];
3046 let buffer = Buf::new(&mut bytes[..], body_range.clone());
3047 let mut bytes = [0, 1, 2, 3, 4, 5, 6, 7, 8, 9];
3048 let orig = Buf::new(&mut bytes[..], body_range.clone());
3049 let buffer = try_reuse_buffer(buffer, prefix, suffix, max_copy_bytes).unwrap_err();
3050 assert_eq!(buffer, orig);
3051 }
3052
3053 test_expect_success(0..10, 0, 0, 0);
3055 test_expect_success(1..9, 1, 1, 0);
3057 test_expect_success(0..9, 1, 0, 9);
3060 test_expect_success(1..10, 0, 1, 9);
3061 test_expect_failure(0..9, 1, 0, 8);
3063 test_expect_failure(1..10, 0, 1, 8);
3064 }
3065
3066 #[test]
3067 fn test_maybe_reuse_buffer_provider() {
3068 fn test_expect(body_range: Range<usize>, prefix: usize, suffix: usize, expect_a: bool) {
3069 let mut bytes = [0, 1, 2, 3, 4, 5, 6, 7, 8, 9];
3070 let buffer = Buf::new(&mut bytes[..], body_range);
3071 let body = buffer.as_ref().to_vec();
3072 let buffer = BufferProvider::reuse_or_realloc(
3073 MaybeReuseBufferProvider(new_buf_vec),
3074 buffer,
3075 prefix,
3076 suffix,
3077 )
3078 .unwrap();
3079 match &buffer {
3080 Either::A(_) if expect_a => {}
3081 Either::B(_) if !expect_a => {}
3082 Either::A(_) => panic!("expected Eitehr::B variant"),
3083 Either::B(_) => panic!("expected Eitehr::A variant"),
3084 }
3085 let bytes: &[u8] = buffer.as_ref();
3086 assert_eq!(bytes, body.as_slice());
3087 assert!(buffer.prefix_len() >= prefix);
3088 assert!(buffer.suffix_len() >= suffix);
3089 }
3090
3091 fn test_expect_reuse(body_range: Range<usize>, prefix: usize, suffix: usize) {
3093 test_expect(body_range, prefix, suffix, true);
3094 }
3095
3096 fn test_expect_realloc(body_range: Range<usize>, prefix: usize, suffix: usize) {
3098 test_expect(body_range, prefix, suffix, false);
3099 }
3100
3101 test_expect_reuse(0..10, 0, 0);
3103 test_expect_reuse(1..9, 1, 1);
3105 test_expect_reuse(0..9, 1, 0);
3108 test_expect_reuse(1..10, 0, 1);
3109 test_expect_realloc(0..9, 1, 1);
3111 test_expect_realloc(1..10, 1, 1);
3112 }
3113
3114 #[test]
3115 fn test_no_reuse_buffer_provider() {
3116 #[track_caller]
3117 fn test_expect(body_range: Range<usize>, prefix: usize, suffix: usize) {
3118 let mut bytes = [0, 1, 2, 3, 4, 5, 6, 7, 8, 9];
3119 let internal_buffer: Buf<&mut [u8]> = Buf::new(&mut bytes[..], body_range);
3121 let body = internal_buffer.as_ref().to_vec();
3122 let buffer: Buf<Vec<u8>> = BufferProvider::reuse_or_realloc(
3125 NoReuseBufferProvider(new_buf_vec),
3126 internal_buffer,
3127 prefix,
3128 suffix,
3129 )
3130 .unwrap();
3131 let bytes: &[u8] = buffer.as_ref();
3132 assert_eq!(bytes, body.as_slice());
3133 assert_eq!(buffer.prefix_len(), prefix);
3134 assert_eq!(buffer.suffix_len(), suffix);
3135 }
3136 test_expect(0..10, 0, 0);
3138 test_expect(1..9, 1, 1);
3140 test_expect(0..9, 10, 10);
3142 test_expect(1..10, 15, 15);
3143 }
3144
3145 struct ScatterGatherBuf<B> {
3169 data: Vec<u8>,
3170 mid: usize,
3171 range: Range<usize>,
3172 inner: B,
3173 }
3174
3175 impl<B: BufferMut> FragmentedBuffer for ScatterGatherBuf<B> {
3176 fn len(&self) -> usize {
3177 self.inner.len() + (self.range.end - self.range.start)
3178 }
3179
3180 fn with_bytes<'a, R, F>(&'a self, f: F) -> R
3181 where
3182 F: for<'b> FnOnce(FragmentedBytes<'b, 'a>) -> R,
3183 {
3184 let (_, rest) = self.data.split_at(self.range.start);
3185 let (prefix_b, rest) = rest.split_at(self.mid - self.range.start);
3186 let (suffix_b, _) = rest.split_at(self.range.end - self.mid);
3187 let mut bytes = [prefix_b, self.inner.as_ref(), suffix_b];
3188 f(FragmentedBytes::new(&mut bytes[..]))
3189 }
3190 }
3191
3192 impl<B: BufferMut> FragmentedBufferMut for ScatterGatherBuf<B> {
3193 fn with_bytes_mut<'a, R, F>(&'a mut self, f: F) -> R
3194 where
3195 F: for<'b> FnOnce(FragmentedBytesMut<'b, 'a>) -> R,
3196 {
3197 let (_, rest) = self.data.split_at_mut(self.range.start);
3198 let (prefix_b, rest) = rest.split_at_mut(self.mid - self.range.start);
3199 let (suffix_b, _) = rest.split_at_mut(self.range.end - self.mid);
3200 let mut bytes = [prefix_b, self.inner.as_mut(), suffix_b];
3201 f(FragmentedBytesMut::new(&mut bytes[..]))
3202 }
3203 }
3204
3205 impl<B: BufferMut> GrowBuffer for ScatterGatherBuf<B> {
3206 fn with_parts<'a, O, F>(&'a self, f: F) -> O
3207 where
3208 F: for<'b> FnOnce(&'a [u8], FragmentedBytes<'b, 'a>, &'a [u8]) -> O,
3209 {
3210 let (prefix, rest) = self.data.split_at(self.range.start);
3211 let (prefix_b, rest) = rest.split_at(self.mid - self.range.start);
3212 let (suffix_b, suffix) = rest.split_at(self.range.end - self.mid);
3213 let mut bytes = [prefix_b, self.inner.as_ref(), suffix_b];
3214 f(prefix, bytes.as_fragmented_byte_slice(), suffix)
3215 }
3216 fn prefix_len(&self) -> usize {
3217 self.range.start
3218 }
3219
3220 fn suffix_len(&self) -> usize {
3221 self.data.len() - self.range.end
3222 }
3223
3224 fn grow_front(&mut self, n: usize) {
3225 self.range.start -= n;
3226 }
3227
3228 fn grow_back(&mut self, n: usize) {
3229 self.range.end += n;
3230 assert!(self.range.end <= self.data.len());
3231 }
3232 }
3233
3234 impl<B: BufferMut> GrowBufferMut for ScatterGatherBuf<B> {
3235 fn with_parts_mut<'a, O, F>(&'a mut self, f: F) -> O
3236 where
3237 F: for<'b> FnOnce(&'a mut [u8], FragmentedBytesMut<'b, 'a>, &'a mut [u8]) -> O,
3238 {
3239 let (prefix, rest) = self.data.split_at_mut(self.range.start);
3240 let (prefix_b, rest) = rest.split_at_mut(self.mid - self.range.start);
3241 let (suffix_b, suffix) = rest.split_at_mut(self.range.end - self.mid);
3242 let mut bytes = [prefix_b, self.inner.as_mut(), suffix_b];
3243 f(prefix, bytes.as_fragmented_byte_slice(), suffix)
3244 }
3245
3246 fn with_all_contents_mut<'a, O, F>(&'a mut self, _f: F) -> O
3247 where
3248 F: for<'b> FnOnce(FragmentedBytesMut<'b, 'a>) -> O,
3249 {
3250 unimplemented!()
3251 }
3252 }
3253
3254 struct ScatterGatherProvider;
3255
3256 impl<B: BufferMut> BufferProvider<B, ScatterGatherBuf<B>> for ScatterGatherProvider {
3257 type Error = Never;
3258
3259 fn alloc_no_reuse(
3260 self,
3261 _prefix: usize,
3262 _body: usize,
3263 _suffix: usize,
3264 ) -> Result<ScatterGatherBuf<B>, Self::Error> {
3265 unimplemented!("not used in tests")
3266 }
3267
3268 fn reuse_or_realloc(
3269 self,
3270 buffer: B,
3271 prefix: usize,
3272 suffix: usize,
3273 ) -> Result<ScatterGatherBuf<B>, (Self::Error, B)> {
3274 let inner = buffer;
3275 let data = vec![0; prefix + suffix];
3276 let range = Range { start: prefix, end: prefix };
3277 let mid = prefix;
3278 Ok(ScatterGatherBuf { inner, data, range, mid })
3279 }
3280 }
3281
3282 #[test]
3283 fn test_scatter_gather_serialize() {
3284 let buf = Buf::new(vec![10, 20, 30, 40, 50], ..);
3287 let pb = DummyPacketBuilder::new(3, 2, 0, usize::MAX);
3288 let ser = pb.wrap_body(buf);
3289 let result = ser.serialize_outer(ScatterGatherProvider {}).unwrap();
3290 let flattened = result.to_flattened_vec();
3291 assert_eq!(&flattened[..], &[0xFF, 0xFF, 0xFF, 10, 20, 30, 40, 50, 0xFE, 0xFE]);
3292 }
3293
3294 #[test]
3295 fn dyn_serialize() {
3296 let body = Buf::new(vec![10, 20, 30, 40, 50], ..);
3297 let header1 = DummyPacketBuilder {
3298 header_len: 5,
3299 footer_len: 0,
3300 min_body_len: 0,
3301 max_body_len: usize::MAX,
3302 header_byte: 0xAA,
3303 footer_byte: 0xBB,
3304 };
3305 let header2 = DummyPacketBuilder {
3306 header_len: 3,
3307 footer_len: 2,
3308 min_body_len: 0,
3309 max_body_len: usize::MAX,
3310 header_byte: 0xCC,
3311 footer_byte: 0xDD,
3312 };
3313 let ser1 = body.clone().wrap_in(header1).wrap_in(header2);
3315 let ser2 = body.wrap_in(header1);
3317 let ser2 = DynSerializer::new(&ser2).wrap_in(header2);
3318 let ser3 = ser1.clone();
3320 let ser3 = DynSerializer::new(&ser3);
3321 let ser4 = DynSerializer::new(&ser2);
3323
3324 fn serialize(s: impl Serializer<Buffer: ReusableBuffer>) -> Vec<u8> {
3325 s.serialize_vec(PacketConstraints::UNCONSTRAINED)
3326 .map_err(|(e, _)| e)
3327 .unwrap()
3328 .unwrap_b()
3329 .into_inner()
3330 }
3331
3332 fn serialize_new(s: impl Serializer) -> Vec<u8> {
3333 s.serialize_new_buf(PacketConstraints::UNCONSTRAINED, new_buf_vec).unwrap().into_inner()
3334 }
3335
3336 let expect = serialize(ser1.clone());
3337 assert_eq!(serialize(ser2), expect);
3338 assert_eq!(serialize(ser3), expect);
3339 assert_eq!(serialize(ser4), expect);
3340 assert_eq!(serialize_new(ser1), expect);
3341 assert_eq!(serialize_new(ser2), expect);
3342 assert_eq!(serialize_new(ser3), expect);
3343 assert_eq!(serialize_new(ser4), expect);
3344 }
3345}