1use std::cmp;
8use std::convert::Infallible as Never;
9use std::fmt::{self, Debug, Formatter};
10use std::ops::{Range, RangeBounds};
11
12use arrayvec::ArrayVec;
13use zerocopy::SplitByteSlice;
14
15use crate::{
16 canonicalize_range, take_back, take_back_mut, take_front, take_front_mut,
17 AsFragmentedByteSlice, Buffer, BufferView, BufferViewMut, ContiguousBuffer, EmptyBuf,
18 FragmentedBuffer, FragmentedBufferMut, FragmentedBytes, FragmentedBytesMut, GrowBuffer,
19 GrowBufferMut, ParsablePacket, ParseBuffer, ParseBufferMut, ReusableBuffer, ShrinkBuffer,
20};
21
22#[derive(Copy, Clone, Debug)]
28pub enum Either<A, B> {
29 A(A),
30 B(B),
31}
32
33impl<A, B> Either<A, B> {
34 pub fn map_a<AA, F: FnOnce(A) -> AA>(self, f: F) -> Either<AA, B> {
40 match self {
41 Either::A(a) => Either::A(f(a)),
42 Either::B(b) => Either::B(b),
43 }
44 }
45
46 pub fn map_b<BB, F: FnOnce(B) -> BB>(self, f: F) -> Either<A, BB> {
52 match self {
53 Either::A(a) => Either::A(a),
54 Either::B(b) => Either::B(f(b)),
55 }
56 }
57
58 pub fn unwrap_a(self) -> A {
64 match self {
65 Either::A(x) => x,
66 Either::B(_) => panic!("This `Either<A, B>` does not hold the `A` variant"),
67 }
68 }
69
70 pub fn unwrap_b(self) -> B {
76 match self {
77 Either::A(_) => panic!("This `Either<A, B>` does not hold the `B` variant"),
78 Either::B(x) => x,
79 }
80 }
81}
82
83impl<A> Either<A, A> {
84 pub fn into_inner(self) -> A {
87 match self {
88 Either::A(x) => x,
89 Either::B(x) => x,
90 }
91 }
92}
93
94impl<A> Either<A, Never> {
95 #[inline]
97 pub fn into_a(self) -> A {
98 match self {
99 Either::A(a) => a,
100 }
101 }
102}
103
104impl<B> Either<Never, B> {
105 #[inline]
107 pub fn into_b(self) -> B {
108 match self {
109 Either::B(b) => b,
110 }
111 }
112}
113
114macro_rules! call_method_on_either {
115 ($val:expr, $method:ident, $($args:expr),*) => {
116 match $val {
117 Either::A(a) => a.$method($($args),*),
118 Either::B(b) => b.$method($($args),*),
119 }
120 };
121 ($val:expr, $method:ident) => {
122 call_method_on_either!($val, $method,)
123 };
124}
125
126impl<A, B> FragmentedBuffer for Either<A, B>
133where
134 A: FragmentedBuffer,
135 B: FragmentedBuffer,
136{
137 fn len(&self) -> usize {
138 call_method_on_either!(self, len)
139 }
140
141 fn with_bytes<R, F>(&self, f: F) -> R
142 where
143 F: for<'a, 'b> FnOnce(FragmentedBytes<'a, 'b>) -> R,
144 {
145 call_method_on_either!(self, with_bytes, f)
146 }
147}
148
149impl<A, B> ContiguousBuffer for Either<A, B>
150where
151 A: ContiguousBuffer,
152 B: ContiguousBuffer,
153{
154}
155
156impl<A, B> ShrinkBuffer for Either<A, B>
157where
158 A: ShrinkBuffer,
159 B: ShrinkBuffer,
160{
161 fn shrink<R: RangeBounds<usize>>(&mut self, range: R) {
162 call_method_on_either!(self, shrink, range)
163 }
164 fn shrink_front(&mut self, n: usize) {
165 call_method_on_either!(self, shrink_front, n)
166 }
167 fn shrink_back(&mut self, n: usize) {
168 call_method_on_either!(self, shrink_back, n)
169 }
170}
171
172impl<A, B> ParseBuffer for Either<A, B>
173where
174 A: ParseBuffer,
175 B: ParseBuffer,
176{
177 fn parse<'a, P: ParsablePacket<&'a [u8], ()>>(&'a mut self) -> Result<P, P::Error> {
178 call_method_on_either!(self, parse)
179 }
180 fn parse_with<'a, ParseArgs, P: ParsablePacket<&'a [u8], ParseArgs>>(
181 &'a mut self,
182 args: ParseArgs,
183 ) -> Result<P, P::Error> {
184 call_method_on_either!(self, parse_with, args)
185 }
186}
187
188impl<A, B> FragmentedBufferMut for Either<A, B>
189where
190 A: FragmentedBufferMut,
191 B: FragmentedBufferMut,
192{
193 fn with_bytes_mut<R, F>(&mut self, f: F) -> R
194 where
195 F: for<'a, 'b> FnOnce(FragmentedBytesMut<'a, 'b>) -> R,
196 {
197 call_method_on_either!(self, with_bytes_mut, f)
198 }
199}
200
201impl<A, B> ParseBufferMut for Either<A, B>
202where
203 A: ParseBufferMut,
204 B: ParseBufferMut,
205{
206 fn parse_mut<'a, P: ParsablePacket<&'a mut [u8], ()>>(&'a mut self) -> Result<P, P::Error> {
207 call_method_on_either!(self, parse_mut)
208 }
209 fn parse_with_mut<'a, ParseArgs, P: ParsablePacket<&'a mut [u8], ParseArgs>>(
210 &'a mut self,
211 args: ParseArgs,
212 ) -> Result<P, P::Error> {
213 call_method_on_either!(self, parse_with_mut, args)
214 }
215}
216
217impl<A, B> GrowBuffer for Either<A, B>
218where
219 A: GrowBuffer,
220 B: GrowBuffer,
221{
222 #[inline]
223 fn with_parts<O, F>(&self, f: F) -> O
224 where
225 F: for<'a, 'b> FnOnce(&'a [u8], FragmentedBytes<'a, 'b>, &'a [u8]) -> O,
226 {
227 call_method_on_either!(self, with_parts, f)
228 }
229 fn capacity(&self) -> usize {
230 call_method_on_either!(self, capacity)
231 }
232 fn prefix_len(&self) -> usize {
233 call_method_on_either!(self, prefix_len)
234 }
235 fn suffix_len(&self) -> usize {
236 call_method_on_either!(self, suffix_len)
237 }
238 fn grow_front(&mut self, n: usize) {
239 call_method_on_either!(self, grow_front, n)
240 }
241 fn grow_back(&mut self, n: usize) {
242 call_method_on_either!(self, grow_back, n)
243 }
244 fn reset(&mut self) {
245 call_method_on_either!(self, reset)
246 }
247}
248
249impl<A, B> GrowBufferMut for Either<A, B>
250where
251 A: GrowBufferMut,
252 B: GrowBufferMut,
253{
254 fn with_parts_mut<O, F>(&mut self, f: F) -> O
255 where
256 F: for<'a, 'b> FnOnce(&'a mut [u8], FragmentedBytesMut<'a, 'b>, &'a mut [u8]) -> O,
257 {
258 call_method_on_either!(self, with_parts_mut, f)
259 }
260
261 fn serialize<BB: PacketBuilder>(&mut self, builder: BB) {
262 call_method_on_either!(self, serialize, builder)
263 }
264}
265
266impl<A, B> Buffer for Either<A, B>
267where
268 A: Buffer,
269 B: Buffer,
270{
271 fn parse_with_view<'a, ParseArgs, P: ParsablePacket<&'a [u8], ParseArgs>>(
272 &'a mut self,
273 args: ParseArgs,
274 ) -> Result<(P, &'a [u8]), P::Error> {
275 call_method_on_either!(self, parse_with_view, args)
276 }
277}
278
279impl<A: AsRef<[u8]>, B: AsRef<[u8]>> AsRef<[u8]> for Either<A, B> {
280 fn as_ref(&self) -> &[u8] {
281 call_method_on_either!(self, as_ref)
282 }
283}
284
285impl<A: AsMut<[u8]>, B: AsMut<[u8]>> AsMut<[u8]> for Either<A, B> {
286 fn as_mut(&mut self) -> &mut [u8] {
287 call_method_on_either!(self, as_mut)
288 }
289}
290
291#[derive(Clone, Debug)]
297pub struct Buf<B> {
298 buf: B,
299 body: Range<usize>,
300}
301
302impl<B: AsRef<[u8]>> PartialEq for Buf<B> {
303 fn eq(&self, other: &Self) -> bool {
304 let self_slice = AsRef::<[u8]>::as_ref(self);
305 let other_slice = AsRef::<[u8]>::as_ref(other);
306 PartialEq::eq(self_slice, other_slice)
307 }
308}
309
310impl<B: AsRef<[u8]>> Eq for Buf<B> {}
311
312impl Buf<Vec<u8>> {
313 pub fn into_inner(self) -> Vec<u8> {
315 let Buf { mut buf, body } = self;
316 let len = body.end - body.start;
317 let _ = buf.drain(..body.start);
318 buf.truncate(len);
319 buf
320 }
321}
322
323impl<B: AsRef<[u8]>> Buf<B> {
324 pub fn new<R: RangeBounds<usize>>(buf: B, body: R) -> Buf<B> {
335 let len = buf.as_ref().len();
336 Buf { buf, body: canonicalize_range(len, &body) }
337 }
338
339 pub fn buffer_view(&mut self) -> BufView<'_> {
341 BufView { buf: &self.buf.as_ref()[self.body.clone()], body: &mut self.body }
342 }
343}
344
345impl<B: AsRef<[u8]> + AsMut<[u8]>> Buf<B> {
346 pub fn buffer_view_mut(&mut self) -> BufViewMut<'_> {
348 BufViewMut { buf: &mut self.buf.as_mut()[self.body.clone()], body: &mut self.body }
349 }
350}
351
352impl<B: AsRef<[u8]>> FragmentedBuffer for Buf<B> {
353 fragmented_buffer_method_impls!();
354}
355impl<B: AsRef<[u8]>> ContiguousBuffer for Buf<B> {}
356impl<B: AsRef<[u8]>> ShrinkBuffer for Buf<B> {
357 fn shrink<R: RangeBounds<usize>>(&mut self, range: R) {
358 let len = self.len();
359 let mut range = canonicalize_range(len, &range);
360 range.start += self.body.start;
361 range.end += self.body.start;
362 self.body = range;
363 }
364
365 fn shrink_front(&mut self, n: usize) {
366 assert!(n <= self.len());
367 self.body.start += n;
368 }
369 fn shrink_back(&mut self, n: usize) {
370 assert!(n <= self.len());
371 self.body.end -= n;
372 }
373}
374impl<B: AsRef<[u8]>> ParseBuffer for Buf<B> {
375 fn parse_with<'a, ParseArgs, P: ParsablePacket<&'a [u8], ParseArgs>>(
376 &'a mut self,
377 args: ParseArgs,
378 ) -> Result<P, P::Error> {
379 P::parse(self.buffer_view(), args)
380 }
381}
382
383impl<B: AsRef<[u8]> + AsMut<[u8]>> FragmentedBufferMut for Buf<B> {
384 fragmented_buffer_mut_method_impls!();
385}
386
387impl<B: AsRef<[u8]> + AsMut<[u8]>> ParseBufferMut for Buf<B> {
388 fn parse_with_mut<'a, ParseArgs, P: ParsablePacket<&'a mut [u8], ParseArgs>>(
389 &'a mut self,
390 args: ParseArgs,
391 ) -> Result<P, P::Error> {
392 P::parse_mut(self.buffer_view_mut(), args)
393 }
394}
395
396impl<B: AsRef<[u8]>> GrowBuffer for Buf<B> {
397 fn with_parts<O, F>(&self, f: F) -> O
398 where
399 F: for<'a, 'b> FnOnce(&'a [u8], FragmentedBytes<'a, 'b>, &'a [u8]) -> O,
400 {
401 let (prefix, buf) = self.buf.as_ref().split_at(self.body.start);
402 let (body, suffix) = buf.split_at(self.body.end - self.body.start);
403 let mut body = [&body[..]];
404 f(prefix, body.as_fragmented_byte_slice(), suffix)
405 }
406 fn capacity(&self) -> usize {
407 self.buf.as_ref().len()
408 }
409 fn prefix_len(&self) -> usize {
410 self.body.start
411 }
412 fn suffix_len(&self) -> usize {
413 self.buf.as_ref().len() - self.body.end
414 }
415 fn grow_front(&mut self, n: usize) {
416 assert!(n <= self.body.start);
417 self.body.start -= n;
418 }
419 fn grow_back(&mut self, n: usize) {
420 assert!(n <= self.buf.as_ref().len() - self.body.end);
421 self.body.end += n;
422 }
423}
424
425impl<B: AsRef<[u8]> + AsMut<[u8]>> GrowBufferMut for Buf<B> {
426 fn with_parts_mut<O, F>(&mut self, f: F) -> O
427 where
428 F: for<'a, 'b> FnOnce(&'a mut [u8], FragmentedBytesMut<'a, 'b>, &'a mut [u8]) -> O,
429 {
430 let (prefix, buf) = self.buf.as_mut().split_at_mut(self.body.start);
431 let (body, suffix) = buf.split_at_mut(self.body.end - self.body.start);
432 let mut body = [&mut body[..]];
433 f(prefix, body.as_fragmented_byte_slice(), suffix)
434 }
435}
436
437impl<B: AsRef<[u8]>> AsRef<[u8]> for Buf<B> {
438 fn as_ref(&self) -> &[u8] {
439 &self.buf.as_ref()[self.body.clone()]
440 }
441}
442
443impl<B: AsMut<[u8]>> AsMut<[u8]> for Buf<B> {
444 fn as_mut(&mut self) -> &mut [u8] {
445 &mut self.buf.as_mut()[self.body.clone()]
446 }
447}
448
449impl<B: AsRef<[u8]>> Buffer for Buf<B> {
450 fn parse_with_view<'a, ParseArgs, P: ParsablePacket<&'a [u8], ParseArgs>>(
451 &'a mut self,
452 args: ParseArgs,
453 ) -> Result<(P, &'a [u8]), P::Error> {
454 let Self { body, ref buf } = self;
455 let body_before = body.clone();
456 let view = BufView { buf: &buf.as_ref()[body.clone()], body };
457 P::parse(view, args).map(|r| (r, &buf.as_ref()[body_before]))
458 }
459}
460
461pub struct BufView<'a> {
466 buf: &'a [u8],
467 body: &'a mut Range<usize>,
468}
469
470impl<'a> BufferView<&'a [u8]> for BufView<'a> {
471 fn take_front(&mut self, n: usize) -> Option<&'a [u8]> {
472 if self.len() < n {
473 return None;
474 }
475 self.body.start += n;
476 Some(take_front(&mut self.buf, n))
477 }
478
479 fn take_back(&mut self, n: usize) -> Option<&'a [u8]> {
480 if self.len() < n {
481 return None;
482 }
483 self.body.end -= n;
484 Some(take_back(&mut self.buf, n))
485 }
486
487 fn into_rest(self) -> &'a [u8] {
488 self.buf
489 }
490}
491
492impl<'a> AsRef<[u8]> for BufView<'a> {
493 fn as_ref(&self) -> &[u8] {
494 self.buf
495 }
496}
497
498pub struct BufViewMut<'a> {
504 buf: &'a mut [u8],
505 body: &'a mut Range<usize>,
506}
507
508impl<'a> BufferView<&'a mut [u8]> for BufViewMut<'a> {
509 fn take_front(&mut self, n: usize) -> Option<&'a mut [u8]> {
510 if self.len() < n {
511 return None;
512 }
513 self.body.start += n;
514 Some(take_front_mut(&mut self.buf, n))
515 }
516
517 fn take_back(&mut self, n: usize) -> Option<&'a mut [u8]> {
518 if self.len() < n {
519 return None;
520 }
521 self.body.end -= n;
522 Some(take_back_mut(&mut self.buf, n))
523 }
524
525 fn into_rest(self) -> &'a mut [u8] {
526 self.buf
527 }
528}
529
530impl<'a> BufferViewMut<&'a mut [u8]> for BufViewMut<'a> {}
531
532impl<'a> AsRef<[u8]> for BufViewMut<'a> {
533 fn as_ref(&self) -> &[u8] {
534 self.buf
535 }
536}
537
538impl<'a> AsMut<[u8]> for BufViewMut<'a> {
539 fn as_mut(&mut self) -> &mut [u8] {
540 self.buf
541 }
542}
543
544#[derive(Copy, Clone, Debug, Eq, PartialEq)]
558pub struct PacketConstraints {
559 header_len: usize,
560 footer_len: usize,
561 min_body_len: usize,
562 max_body_len: usize,
563}
564
565impl PacketConstraints {
566 pub const UNCONSTRAINED: Self =
570 Self { header_len: 0, footer_len: 0, min_body_len: 0, max_body_len: usize::MAX };
571
572 #[inline]
580 pub fn new(
581 header_len: usize,
582 footer_len: usize,
583 min_body_len: usize,
584 max_body_len: usize,
585 ) -> PacketConstraints {
586 PacketConstraints::try_new(header_len, footer_len, min_body_len, max_body_len).expect(
587 "max_body_len < min_body_len or header_len + min_body_len + footer_len overflows usize",
588 )
589 }
590
591 #[inline]
597 pub fn try_new(
598 header_len: usize,
599 footer_len: usize,
600 min_body_len: usize,
601 max_body_len: usize,
602 ) -> Option<PacketConstraints> {
603 let header_min_body_footer_overflows = header_len
605 .checked_add(min_body_len)
606 .and_then(|sum| sum.checked_add(footer_len))
607 .is_none();
608 let max_less_than_min = max_body_len < min_body_len;
610 if max_less_than_min || header_min_body_footer_overflows {
611 return None;
612 }
613 Some(PacketConstraints { header_len, footer_len, min_body_len, max_body_len })
614 }
615
616 #[inline]
620 pub fn with_max_body_len(max_body_len: usize) -> PacketConstraints {
621 PacketConstraints { header_len: 0, footer_len: 0, min_body_len: 0, max_body_len }
626 }
627
628 #[inline]
630 pub fn header_len(&self) -> usize {
631 self.header_len
632 }
633
634 #[inline]
636 pub fn footer_len(&self) -> usize {
637 self.footer_len
638 }
639
640 #[inline]
656 pub fn min_body_len(&self) -> usize {
657 self.min_body_len
658 }
659
660 #[inline]
664 pub fn max_body_len(&self) -> usize {
665 self.max_body_len
666 }
667
668 pub fn try_encapsulate(&self, outer: &Self) -> Option<PacketConstraints> {
678 let inner = self;
679 let header_len = inner.header_len.checked_add(outer.header_len)?;
681 let footer_len = inner.footer_len.checked_add(outer.footer_len)?;
683 let inner_header_footer_len = inner.header_len + inner.footer_len;
686 let min_body_len = cmp::max(
690 outer.min_body_len.saturating_sub(inner_header_footer_len),
691 inner.min_body_len,
692 );
693 let max_body_len =
698 cmp::min(outer.max_body_len.checked_sub(inner_header_footer_len)?, inner.max_body_len);
699 PacketConstraints::try_new(header_len, footer_len, min_body_len, max_body_len)
703 }
704}
705
706pub struct SerializeTarget<'a> {
709 #[allow(missing_docs)]
710 pub header: &'a mut [u8],
711 #[allow(missing_docs)]
712 pub footer: &'a mut [u8],
713}
714
715pub trait PacketBuilder: Sized {
726 fn constraints(&self) -> PacketConstraints;
728
729 fn serialize(&self, target: &mut SerializeTarget<'_>, body: FragmentedBytesMut<'_, '_>);
758
759 #[inline]
764 fn wrap_body<B>(self, body: B) -> Nested<B, Self> {
765 Nested { inner: body, outer: self }
766 }
767}
768
769impl<'a, B: PacketBuilder> PacketBuilder for &'a B {
770 #[inline]
771 fn constraints(&self) -> PacketConstraints {
772 B::constraints(self)
773 }
774 #[inline]
775 fn serialize(&self, target: &mut SerializeTarget<'_>, body: FragmentedBytesMut<'_, '_>) {
776 B::serialize(self, target, body)
777 }
778}
779
780impl<'a, B: PacketBuilder> PacketBuilder for &'a mut B {
781 #[inline]
782 fn constraints(&self) -> PacketConstraints {
783 B::constraints(self)
784 }
785 #[inline]
786 fn serialize(&self, target: &mut SerializeTarget<'_>, body: FragmentedBytesMut<'_, '_>) {
787 B::serialize(self, target, body)
788 }
789}
790
791impl PacketBuilder for () {
792 #[inline]
793 fn constraints(&self) -> PacketConstraints {
794 PacketConstraints::UNCONSTRAINED
795 }
796 #[inline]
797 fn serialize(&self, _target: &mut SerializeTarget<'_>, _body: FragmentedBytesMut<'_, '_>) {}
798}
799
800impl PacketBuilder for Never {
801 fn constraints(&self) -> PacketConstraints {
802 match *self {}
803 }
804 fn serialize(&self, _target: &mut SerializeTarget<'_>, _body: FragmentedBytesMut<'_, '_>) {}
805}
806
807#[derive(Copy, Clone, Debug, Eq, PartialEq)]
815pub struct Nested<I, O> {
816 inner: I,
817 outer: O,
818}
819
820impl<I, O> Nested<I, O> {
821 pub fn new(inner: I, outer: O) -> Nested<I, O> {
825 Nested { inner, outer }
826 }
827
828 #[inline]
831 pub fn into_inner(self) -> I {
832 self.inner
833 }
834
835 #[inline]
838 pub fn into_outer(self) -> O {
839 self.outer
840 }
841
842 #[inline]
843 pub fn inner(&self) -> &I {
844 &self.inner
845 }
846
847 #[inline]
848 pub fn inner_mut(&mut self) -> &mut I {
849 &mut self.inner
850 }
851
852 #[inline]
853 pub fn outer(&self) -> &O {
854 &self.outer
855 }
856
857 #[inline]
858 pub fn outer_mut(&mut self) -> &mut O {
859 &mut self.outer
860 }
861}
862
863#[derive(Copy, Clone, Debug)]
869#[cfg_attr(test, derive(Eq, PartialEq))]
870pub struct LimitedSizePacketBuilder {
871 pub limit: usize,
873}
874
875impl PacketBuilder for LimitedSizePacketBuilder {
876 fn constraints(&self) -> PacketConstraints {
877 PacketConstraints::with_max_body_len(self.limit)
878 }
879
880 fn serialize(&self, _target: &mut SerializeTarget<'_>, _body: FragmentedBytesMut<'_, '_>) {}
881}
882
883pub trait InnerPacketBuilder {
897 fn bytes_len(&self) -> usize;
899
900 fn serialize(&self, buffer: &mut [u8]);
915
916 #[inline]
923 fn into_serializer(self) -> InnerSerializer<Self, EmptyBuf>
924 where
925 Self: Sized,
926 {
927 self.into_serializer_with(EmptyBuf)
928 }
929
930 fn into_serializer_with<B: ShrinkBuffer>(self, mut buffer: B) -> InnerSerializer<Self, B>
943 where
944 Self: Sized,
945 {
946 buffer.shrink_back_to(0);
947 InnerSerializer { inner: self, buffer }
948 }
949}
950
951impl<'a, I: InnerPacketBuilder> InnerPacketBuilder for &'a I {
952 #[inline]
953 fn bytes_len(&self) -> usize {
954 I::bytes_len(self)
955 }
956 #[inline]
957 fn serialize(&self, buffer: &mut [u8]) {
958 I::serialize(self, buffer)
959 }
960}
961impl<'a, I: InnerPacketBuilder> InnerPacketBuilder for &'a mut I {
962 #[inline]
963 fn bytes_len(&self) -> usize {
964 I::bytes_len(self)
965 }
966 #[inline]
967 fn serialize(&self, buffer: &mut [u8]) {
968 I::serialize(self, buffer)
969 }
970}
971impl<'a> InnerPacketBuilder for &'a [u8] {
972 #[inline]
973 fn bytes_len(&self) -> usize {
974 self.len()
975 }
976 #[inline]
977 fn serialize(&self, buffer: &mut [u8]) {
978 buffer.copy_from_slice(self);
979 }
980}
981impl<'a> InnerPacketBuilder for &'a mut [u8] {
982 #[inline]
983 fn bytes_len(&self) -> usize {
984 self.len()
985 }
986 #[inline]
987 fn serialize(&self, buffer: &mut [u8]) {
988 buffer.copy_from_slice(self);
989 }
990}
991impl<'a> InnerPacketBuilder for Vec<u8> {
992 #[inline]
993 fn bytes_len(&self) -> usize {
994 self.len()
995 }
996 #[inline]
997 fn serialize(&self, buffer: &mut [u8]) {
998 buffer.copy_from_slice(self.as_slice());
999 }
1000}
1001impl<const N: usize> InnerPacketBuilder for ArrayVec<u8, N> {
1002 fn bytes_len(&self) -> usize {
1003 self.as_slice().bytes_len()
1004 }
1005 fn serialize(&self, buffer: &mut [u8]) {
1006 self.as_slice().serialize(buffer);
1007 }
1008}
1009
1010pub struct ByteSliceInnerPacketBuilder<B>(pub B);
1017
1018impl<B: SplitByteSlice> InnerPacketBuilder for ByteSliceInnerPacketBuilder<B> {
1019 fn bytes_len(&self) -> usize {
1020 self.0.deref().bytes_len()
1021 }
1022 fn serialize(&self, buffer: &mut [u8]) {
1023 self.0.deref().serialize(buffer)
1024 }
1025}
1026
1027impl<B: SplitByteSlice> Debug for ByteSliceInnerPacketBuilder<B> {
1028 fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
1029 write!(f, "ByteSliceInnerPacketBuilder({:?})", self.0.as_ref())
1030 }
1031}
1032
1033#[derive(Copy, Clone, Debug, Eq, PartialEq)]
1040pub enum SerializeError<A> {
1041 Alloc(A),
1043 SizeLimitExceeded,
1045}
1046
1047impl<A> SerializeError<A> {
1048 #[inline]
1050 pub fn is_alloc(&self) -> bool {
1051 match self {
1052 SerializeError::Alloc(_) => true,
1053 SerializeError::SizeLimitExceeded => false,
1054 }
1055 }
1056
1057 #[inline]
1059 pub fn is_size_limit_exceeded(&self) -> bool {
1060 match self {
1061 SerializeError::Alloc(_) => false,
1062 SerializeError::SizeLimitExceeded => true,
1063 }
1064 }
1065
1066 pub fn map_alloc<T, F: FnOnce(A) -> T>(self, f: F) -> SerializeError<T> {
1068 match self {
1069 SerializeError::Alloc(a) => SerializeError::Alloc(f(a)),
1070 SerializeError::SizeLimitExceeded => SerializeError::SizeLimitExceeded,
1071 }
1072 }
1073}
1074
1075impl<A> From<A> for SerializeError<A> {
1076 fn from(a: A) -> SerializeError<A> {
1077 SerializeError::Alloc(a)
1078 }
1079}
1080
1081#[derive(Copy, Clone, Debug, Eq, PartialEq)]
1090pub struct BufferTooShortError;
1091
1092pub trait BufferProvider<Input, Output> {
1109 type Error;
1113
1114 fn alloc_no_reuse(
1124 self,
1125 prefix: usize,
1126 body: usize,
1127 suffix: usize,
1128 ) -> Result<Output, Self::Error>;
1129
1130 fn reuse_or_realloc(
1143 self,
1144 buffer: Input,
1145 prefix: usize,
1146 suffix: usize,
1147 ) -> Result<Output, (Self::Error, Input)>;
1148}
1149
1150pub trait BufferAlloc<Output> {
1171 type Error;
1175
1176 fn alloc(self, len: usize) -> Result<Output, Self::Error>;
1178}
1179
1180impl<O, E, F: FnOnce(usize) -> Result<O, E>> BufferAlloc<O> for F {
1181 type Error = E;
1182
1183 #[inline]
1184 fn alloc(self, len: usize) -> Result<O, E> {
1185 self(len)
1186 }
1187}
1188
1189impl BufferAlloc<Never> for () {
1190 type Error = ();
1191
1192 #[inline]
1193 fn alloc(self, _len: usize) -> Result<Never, ()> {
1194 Err(())
1195 }
1196}
1197
1198pub fn new_buf_vec(len: usize) -> Result<Buf<Vec<u8>>, Never> {
1209 Ok(Buf::new(vec![0; len], ..))
1210}
1211
1212#[inline]
1234pub fn try_reuse_buffer<B: GrowBufferMut + ShrinkBuffer>(
1235 mut buffer: B,
1236 prefix: usize,
1237 suffix: usize,
1238 max_copy_bytes: usize,
1239) -> Result<B, B> {
1240 let need_prefix = prefix;
1241 let need_suffix = suffix;
1242 let have_prefix = buffer.prefix_len();
1243 let have_body = buffer.len();
1244 let have_suffix = buffer.suffix_len();
1245 let need_capacity = need_prefix + have_body + need_suffix;
1246
1247 if have_prefix >= need_prefix && have_suffix >= need_suffix {
1248 Ok(buffer)
1250 } else if buffer.capacity() >= need_capacity && have_body <= max_copy_bytes {
1251 buffer.reset();
1255
1256 buffer.copy_within(have_prefix..(have_prefix + have_body), need_prefix);
1262 buffer.shrink(need_prefix..(need_prefix + have_body));
1263 debug_assert_eq!(buffer.prefix_len(), need_prefix);
1264 debug_assert!(buffer.suffix_len() >= need_suffix);
1265 debug_assert_eq!(buffer.len(), have_body);
1266 Ok(buffer)
1267 } else {
1268 Err(buffer)
1269 }
1270}
1271
1272pub struct MaybeReuseBufferProvider<A>(pub A);
1276
1277impl<I: ReusableBuffer, O: ReusableBuffer, A: BufferAlloc<O>> BufferProvider<I, Either<I, O>>
1278 for MaybeReuseBufferProvider<A>
1279{
1280 type Error = A::Error;
1281
1282 fn alloc_no_reuse(
1283 self,
1284 prefix: usize,
1285 body: usize,
1286 suffix: usize,
1287 ) -> Result<Either<I, O>, Self::Error> {
1288 let Self(alloc) = self;
1289 let need_capacity = prefix + body + suffix;
1290 BufferAlloc::alloc(alloc, need_capacity).map(|mut buf| {
1291 buf.shrink(prefix..(prefix + body));
1292 Either::B(buf)
1293 })
1294 }
1295
1296 #[inline]
1305 fn reuse_or_realloc(
1306 self,
1307 buffer: I,
1308 need_prefix: usize,
1309 need_suffix: usize,
1310 ) -> Result<Either<I, O>, (A::Error, I)> {
1311 match try_reuse_buffer(buffer, need_prefix, need_suffix, usize::MAX) {
1316 Ok(buffer) => Ok(Either::A(buffer)),
1317 Err(buffer) => {
1318 let have_body = buffer.len();
1319 let mut buf = match BufferProvider::<I, Either<I, O>>::alloc_no_reuse(
1320 self,
1321 need_prefix,
1322 have_body,
1323 need_suffix,
1324 ) {
1325 Ok(buf) => buf,
1326 Err(err) => return Err((err, buffer)),
1327 };
1328
1329 buf.copy_from(&buffer);
1330 debug_assert_eq!(buf.prefix_len(), need_prefix);
1331 debug_assert!(buf.suffix_len() >= need_suffix);
1332 debug_assert_eq!(buf.len(), have_body);
1333 Ok(buf)
1334 }
1335 }
1336 }
1337}
1338
1339impl<B: ReusableBuffer, A: BufferAlloc<B>> BufferProvider<B, B> for MaybeReuseBufferProvider<A> {
1340 type Error = A::Error;
1341
1342 fn alloc_no_reuse(self, prefix: usize, body: usize, suffix: usize) -> Result<B, Self::Error> {
1343 BufferProvider::<B, Either<B, B>>::alloc_no_reuse(self, prefix, body, suffix)
1344 .map(Either::into_inner)
1345 }
1346
1347 #[inline]
1356 fn reuse_or_realloc(self, buffer: B, prefix: usize, suffix: usize) -> Result<B, (A::Error, B)> {
1357 BufferProvider::<B, Either<B, B>>::reuse_or_realloc(self, buffer, prefix, suffix)
1358 .map(Either::into_inner)
1359 }
1360}
1361
1362pub struct NoReuseBufferProvider<A>(pub A);
1366
1367impl<I: FragmentedBuffer, O: ReusableBuffer, A: BufferAlloc<O>> BufferProvider<I, O>
1368 for NoReuseBufferProvider<A>
1369{
1370 type Error = A::Error;
1371
1372 fn alloc_no_reuse(self, prefix: usize, body: usize, suffix: usize) -> Result<O, A::Error> {
1373 let Self(alloc) = self;
1374 alloc.alloc(prefix + body + suffix).map(|mut b| {
1375 b.shrink(prefix..prefix + body);
1376 b
1377 })
1378 }
1379
1380 fn reuse_or_realloc(self, buffer: I, prefix: usize, suffix: usize) -> Result<O, (A::Error, I)> {
1381 BufferProvider::<I, O>::alloc_no_reuse(self, prefix, buffer.len(), suffix)
1382 .map(|mut b| {
1383 b.copy_from(&buffer);
1384 b
1385 })
1386 .map_err(|e| (e, buffer))
1387 }
1388}
1389
1390pub trait Serializer: Sized {
1391 type Buffer;
1393
1394 fn serialize<B: GrowBufferMut, P: BufferProvider<Self::Buffer, B>>(
1407 self,
1408 outer: PacketConstraints,
1409 provider: P,
1410 ) -> Result<B, (SerializeError<P::Error>, Self)>;
1411
1412 fn serialize_new_buf<B: ReusableBuffer, A: BufferAlloc<B>>(
1419 &self,
1420 outer: PacketConstraints,
1421 alloc: A,
1422 ) -> Result<B, SerializeError<A::Error>>;
1423
1424 #[inline]
1440 #[allow(clippy::type_complexity)]
1441 fn serialize_vec(
1442 self,
1443 outer: PacketConstraints,
1444 ) -> Result<Either<Self::Buffer, Buf<Vec<u8>>>, (SerializeError<Never>, Self)>
1445 where
1446 Self::Buffer: ReusableBuffer,
1447 {
1448 self.serialize(outer, MaybeReuseBufferProvider(new_buf_vec))
1449 }
1450
1451 #[inline]
1465 fn serialize_no_alloc(
1466 self,
1467 outer: PacketConstraints,
1468 ) -> Result<Self::Buffer, (SerializeError<BufferTooShortError>, Self)>
1469 where
1470 Self::Buffer: ReusableBuffer,
1471 {
1472 self.serialize(outer, MaybeReuseBufferProvider(())).map(Either::into_a).map_err(
1473 |(err, slf)| {
1474 (
1475 match err {
1476 SerializeError::Alloc(()) => BufferTooShortError.into(),
1477 SerializeError::SizeLimitExceeded => SerializeError::SizeLimitExceeded,
1478 },
1479 slf,
1480 )
1481 },
1482 )
1483 }
1484
1485 #[inline]
1494 fn serialize_outer<B: GrowBufferMut, P: BufferProvider<Self::Buffer, B>>(
1495 self,
1496 provider: P,
1497 ) -> Result<B, (SerializeError<P::Error>, Self)> {
1498 self.serialize(PacketConstraints::UNCONSTRAINED, provider)
1499 }
1500
1501 #[inline]
1512 #[allow(clippy::type_complexity)]
1513 fn serialize_vec_outer(
1514 self,
1515 ) -> Result<Either<Self::Buffer, Buf<Vec<u8>>>, (SerializeError<Never>, Self)>
1516 where
1517 Self::Buffer: ReusableBuffer,
1518 {
1519 self.serialize_vec(PacketConstraints::UNCONSTRAINED)
1520 }
1521
1522 #[inline]
1532 fn serialize_no_alloc_outer(
1533 self,
1534 ) -> Result<Self::Buffer, (SerializeError<BufferTooShortError>, Self)>
1535 where
1536 Self::Buffer: ReusableBuffer,
1537 {
1538 self.serialize_no_alloc(PacketConstraints::UNCONSTRAINED)
1539 }
1540
1541 #[inline]
1548 fn wrap_in<B: PacketBuilder>(self, outer: B) -> Nested<Self, B> {
1549 outer.wrap_body(self)
1550 }
1551
1552 #[inline]
1555 fn encapsulate<B>(self, outer: B) -> Nested<Self, B> {
1556 Nested { inner: self, outer }
1557 }
1558
1559 #[inline]
1568 fn with_size_limit(self, limit: usize) -> Nested<Self, LimitedSizePacketBuilder> {
1569 self.wrap_in(LimitedSizePacketBuilder { limit })
1570 }
1571}
1572
1573#[derive(Copy, Clone, Debug, Eq, PartialEq)]
1580pub struct InnerSerializer<I, B> {
1581 inner: I,
1582 buffer: B,
1587}
1588
1589impl<I, B> InnerSerializer<I, B> {
1590 pub fn inner(&self) -> &I {
1591 &self.inner
1592 }
1593}
1594
1595struct InnerPacketBuilderWrapper<I>(I);
1601
1602impl<I: InnerPacketBuilder> PacketBuilder for InnerPacketBuilderWrapper<I> {
1603 fn constraints(&self) -> PacketConstraints {
1604 let Self(wrapped) = self;
1605 PacketConstraints::new(wrapped.bytes_len(), 0, 0, usize::MAX)
1606 }
1607
1608 fn serialize(&self, target: &mut SerializeTarget<'_>, _body: FragmentedBytesMut<'_, '_>) {
1609 let Self(wrapped) = self;
1610
1611 debug_assert_eq!(target.header.len(), wrapped.bytes_len());
1615 debug_assert_eq!(target.footer.len(), 0);
1616
1617 InnerPacketBuilder::serialize(wrapped, target.header);
1618 }
1619}
1620
1621impl<I: InnerPacketBuilder, B: GrowBuffer + ShrinkBuffer> Serializer for InnerSerializer<I, B> {
1622 type Buffer = B;
1623
1624 #[inline]
1625 fn serialize<BB: GrowBufferMut, P: BufferProvider<B, BB>>(
1626 self,
1627 outer: PacketConstraints,
1628 provider: P,
1629 ) -> Result<BB, (SerializeError<P::Error>, InnerSerializer<I, B>)> {
1630 debug_assert_eq!(self.buffer.len(), 0);
1631 InnerPacketBuilderWrapper(self.inner)
1632 .wrap_body(self.buffer)
1633 .serialize(outer, provider)
1634 .map_err(|(err, Nested { inner: buffer, outer: pb })| {
1635 (err, InnerSerializer { inner: pb.0, buffer })
1636 })
1637 }
1638
1639 #[inline]
1640 fn serialize_new_buf<BB: ReusableBuffer, A: BufferAlloc<BB>>(
1641 &self,
1642 outer: PacketConstraints,
1643 alloc: A,
1644 ) -> Result<BB, SerializeError<A::Error>> {
1645 InnerPacketBuilderWrapper(&self.inner).wrap_body(EmptyBuf).serialize_new_buf(outer, alloc)
1646 }
1647}
1648
1649impl<B: GrowBuffer + ShrinkBuffer> Serializer for B {
1650 type Buffer = B;
1651
1652 #[inline]
1653 fn serialize<BB: GrowBufferMut, P: BufferProvider<Self::Buffer, BB>>(
1654 self,
1655 outer: PacketConstraints,
1656 provider: P,
1657 ) -> Result<BB, (SerializeError<P::Error>, Self)> {
1658 TruncatingSerializer::new(self, TruncateDirection::NoTruncating)
1659 .serialize(outer, provider)
1660 .map_err(|(err, ser)| (err, ser.buffer))
1661 }
1662
1663 fn serialize_new_buf<BB: ReusableBuffer, A: BufferAlloc<BB>>(
1664 &self,
1665 outer: PacketConstraints,
1666 alloc: A,
1667 ) -> Result<BB, SerializeError<A::Error>> {
1668 if self.len() > outer.max_body_len() {
1669 return Err(SerializeError::SizeLimitExceeded);
1670 }
1671
1672 let padding = outer.min_body_len().saturating_sub(self.len());
1673 let tail_size = padding + outer.footer_len();
1674 let buffer_size = outer.header_len() + self.len() + tail_size;
1675 let mut buffer = alloc.alloc(buffer_size)?;
1676 buffer.shrink_front(outer.header_len());
1677 buffer.shrink_back(tail_size);
1678 buffer.copy_from(self);
1679 buffer.grow_back(padding);
1680 Ok(buffer)
1681 }
1682}
1683
1684pub enum EitherSerializer<A, B> {
1688 A(A),
1689 B(B),
1690}
1691
1692impl<A: Serializer, B: Serializer<Buffer = A::Buffer>> Serializer for EitherSerializer<A, B> {
1693 type Buffer = A::Buffer;
1694
1695 fn serialize<TB: GrowBufferMut, P: BufferProvider<Self::Buffer, TB>>(
1696 self,
1697 outer: PacketConstraints,
1698 provider: P,
1699 ) -> Result<TB, (SerializeError<P::Error>, Self)> {
1700 match self {
1701 EitherSerializer::A(s) => {
1702 s.serialize(outer, provider).map_err(|(err, s)| (err, EitherSerializer::A(s)))
1703 }
1704 EitherSerializer::B(s) => {
1705 s.serialize(outer, provider).map_err(|(err, s)| (err, EitherSerializer::B(s)))
1706 }
1707 }
1708 }
1709
1710 fn serialize_new_buf<TB: ReusableBuffer, BA: BufferAlloc<TB>>(
1711 &self,
1712 outer: PacketConstraints,
1713 alloc: BA,
1714 ) -> Result<TB, SerializeError<BA::Error>> {
1715 match self {
1716 EitherSerializer::A(s) => s.serialize_new_buf(outer, alloc),
1717 EitherSerializer::B(s) => s.serialize_new_buf(outer, alloc),
1718 }
1719 }
1720}
1721
1722#[derive(Copy, Clone, Debug, Eq, PartialEq)]
1725pub enum TruncateDirection {
1726 DiscardFront,
1729 DiscardBack,
1732 NoTruncating,
1734}
1735
1736#[derive(Copy, Clone, Debug, Eq, PartialEq)]
1748pub struct TruncatingSerializer<B> {
1749 buffer: B,
1750 direction: TruncateDirection,
1751}
1752
1753impl<B> TruncatingSerializer<B> {
1754 pub fn new(buffer: B, direction: TruncateDirection) -> TruncatingSerializer<B> {
1756 TruncatingSerializer { buffer, direction }
1757 }
1758
1759 pub fn buffer(&self) -> &B {
1761 &self.buffer
1762 }
1763
1764 pub fn buffer_mut(&mut self) -> &mut B {
1766 &mut self.buffer
1767 }
1768}
1769
1770impl<B: GrowBuffer + ShrinkBuffer> Serializer for TruncatingSerializer<B> {
1771 type Buffer = B;
1772
1773 fn serialize<BB: GrowBufferMut, P: BufferProvider<B, BB>>(
1774 mut self,
1775 outer: PacketConstraints,
1776 provider: P,
1777 ) -> Result<BB, (SerializeError<P::Error>, Self)> {
1778 let original_len = self.buffer.len();
1779 let excess_bytes = if original_len > outer.max_body_len {
1780 Some(original_len - outer.max_body_len)
1781 } else {
1782 None
1783 };
1784 if let Some(excess_bytes) = excess_bytes {
1785 match self.direction {
1786 TruncateDirection::DiscardFront => self.buffer.shrink_front(excess_bytes),
1787 TruncateDirection::DiscardBack => self.buffer.shrink_back(excess_bytes),
1788 TruncateDirection::NoTruncating => {
1789 return Err((SerializeError::SizeLimitExceeded, self))
1790 }
1791 }
1792 }
1793
1794 let padding = outer.min_body_len().saturating_sub(self.buffer.len());
1795
1796 debug_assert!(self.buffer.len() + padding <= outer.max_body_len());
1800 match provider.reuse_or_realloc(
1801 self.buffer,
1802 outer.header_len(),
1803 padding + outer.footer_len(),
1804 ) {
1805 Ok(buffer) => Ok(buffer),
1806 Err((err, mut buffer)) => {
1807 if let Some(excess_bytes) = excess_bytes {
1811 match self.direction {
1812 TruncateDirection::DiscardFront => buffer.grow_front(excess_bytes),
1813 TruncateDirection::DiscardBack => buffer.grow_back(excess_bytes),
1814 TruncateDirection::NoTruncating => unreachable!(),
1815 }
1816 }
1817
1818 Err((
1819 SerializeError::Alloc(err),
1820 TruncatingSerializer { buffer, direction: self.direction },
1821 ))
1822 }
1823 }
1824 }
1825
1826 fn serialize_new_buf<BB: ReusableBuffer, A: BufferAlloc<BB>>(
1827 &self,
1828 outer: PacketConstraints,
1829 alloc: A,
1830 ) -> Result<BB, SerializeError<A::Error>> {
1831 let truncated_size = cmp::min(self.buffer.len(), outer.max_body_len());
1832 let discarded_bytes = self.buffer.len() - truncated_size;
1833 let padding = outer.min_body_len().saturating_sub(truncated_size);
1834 let tail_size = padding + outer.footer_len();
1835 let buffer_size = outer.header_len() + truncated_size + tail_size;
1836 let mut buffer = alloc.alloc(buffer_size)?;
1837 buffer.shrink_front(outer.header_len());
1838 buffer.shrink_back(tail_size);
1839 buffer.with_bytes_mut(|mut dst| {
1840 self.buffer.with_bytes(|src| {
1841 let src = match (discarded_bytes > 0, self.direction) {
1842 (false, _) => src,
1843 (true, TruncateDirection::DiscardFront) => src.slice(discarded_bytes..),
1844 (true, TruncateDirection::DiscardBack) => src.slice(..truncated_size),
1845 (true, TruncateDirection::NoTruncating) => {
1846 return Err(SerializeError::SizeLimitExceeded)
1847 }
1848 };
1849 dst.copy_from(&src);
1850 Ok(())
1851 })
1852 })?;
1853 buffer.grow_back_zero(padding);
1854 Ok(buffer)
1855 }
1856}
1857
1858impl<I: Serializer, O: PacketBuilder> Serializer for Nested<I, O> {
1859 type Buffer = I::Buffer;
1860
1861 #[inline]
1862 fn serialize<B: GrowBufferMut, P: BufferProvider<I::Buffer, B>>(
1863 self,
1864 outer: PacketConstraints,
1865 provider: P,
1866 ) -> Result<B, (SerializeError<P::Error>, Self)> {
1867 let Some(outer) = self.outer.constraints().try_encapsulate(&outer) else {
1868 return Err((SerializeError::SizeLimitExceeded, self));
1869 };
1870
1871 match self.inner.serialize(outer, provider) {
1872 Ok(mut buf) => {
1873 buf.serialize(&self.outer);
1874 Ok(buf)
1875 }
1876 Err((err, inner)) => Err((err, self.outer.wrap_body(inner))),
1877 }
1878 }
1879
1880 #[inline]
1881 fn serialize_new_buf<B: ReusableBuffer, A: BufferAlloc<B>>(
1882 &self,
1883 outer: PacketConstraints,
1884 alloc: A,
1885 ) -> Result<B, SerializeError<A::Error>> {
1886 let Some(outer) = self.outer.constraints().try_encapsulate(&outer) else {
1887 return Err(SerializeError::SizeLimitExceeded);
1888 };
1889
1890 let mut buf = self.inner.serialize_new_buf(outer, alloc)?;
1891 GrowBufferMut::serialize(&mut buf, &self.outer);
1892 Ok(buf)
1893 }
1894}
1895
1896pub trait PartialPacketBuilder: PacketBuilder {
1898 fn partial_serialize(&self, body_len: usize, buffer: &mut [u8]);
1907}
1908
1909impl PartialPacketBuilder for () {
1910 fn partial_serialize(&self, _body_len: usize, _buffer: &mut [u8]) {}
1911}
1912
1913#[derive(Debug, Eq, PartialEq)]
1915pub struct PartialSerializeResult {
1916 pub bytes_written: usize,
1918
1919 pub total_size: usize,
1921}
1922
1923pub trait PartialSerializer: Sized {
1928 fn partial_serialize(
1938 &self,
1939 outer: PacketConstraints,
1940 buffer: &mut [u8],
1941 ) -> Result<PartialSerializeResult, SerializeError<Never>>;
1942}
1943
1944impl<B: GrowBuffer + ShrinkBuffer> PartialSerializer for B {
1945 fn partial_serialize(
1946 &self,
1947 _outer: PacketConstraints,
1948 _buffer: &mut [u8],
1949 ) -> Result<PartialSerializeResult, SerializeError<Never>> {
1950 Ok(PartialSerializeResult { bytes_written: 0, total_size: self.len() })
1951 }
1952}
1953
1954impl<B: GrowBuffer + ShrinkBuffer> PartialSerializer for TruncatingSerializer<B> {
1955 fn partial_serialize(
1956 &self,
1957 outer: PacketConstraints,
1958 _buffer: &mut [u8],
1959 ) -> Result<PartialSerializeResult, SerializeError<Never>> {
1960 let total_size =
1961 cmp::max(outer.min_body_len(), cmp::min(self.buffer().len(), outer.max_body_len()));
1962 Ok(PartialSerializeResult { bytes_written: 0, total_size })
1963 }
1964}
1965
1966impl<I: InnerPacketBuilder, B: GrowBuffer + ShrinkBuffer> PartialSerializer
1967 for InnerSerializer<I, B>
1968{
1969 fn partial_serialize(
1970 &self,
1971 outer: PacketConstraints,
1972 _buffer: &mut [u8],
1973 ) -> Result<PartialSerializeResult, SerializeError<Never>> {
1974 Ok(PartialSerializeResult {
1975 bytes_written: 0,
1976 total_size: cmp::max(self.inner().bytes_len(), outer.min_body_len()),
1977 })
1978 }
1979}
1980
1981impl<A: Serializer + PartialSerializer, B: Serializer + PartialSerializer> PartialSerializer
1982 for EitherSerializer<A, B>
1983{
1984 fn partial_serialize(
1985 &self,
1986 outer: PacketConstraints,
1987 buffer: &mut [u8],
1988 ) -> Result<PartialSerializeResult, SerializeError<Never>> {
1989 match self {
1990 EitherSerializer::A(s) => s.partial_serialize(outer, buffer),
1991 EitherSerializer::B(s) => s.partial_serialize(outer, buffer),
1992 }
1993 }
1994}
1995
1996impl<I: PartialSerializer, O: PartialPacketBuilder> PartialSerializer for Nested<I, O> {
1997 fn partial_serialize(
1998 &self,
1999 outer: PacketConstraints,
2000 buffer: &mut [u8],
2001 ) -> Result<PartialSerializeResult, SerializeError<Never>> {
2002 let header_constraints = self.outer.constraints();
2003 let Some(constraints) = outer.try_encapsulate(&header_constraints) else {
2004 return Err(SerializeError::SizeLimitExceeded);
2005 };
2006
2007 let header_len = header_constraints.header_len();
2008 let inner_buf = buffer.get_mut(header_len..).unwrap_or(&mut []);
2009 let mut result = self.inner.partial_serialize(constraints, inner_buf)?;
2010 if header_len <= buffer.len() {
2011 self.outer.partial_serialize(result.total_size, &mut buffer[..header_len]);
2012 result.bytes_written += header_len;
2013 }
2014 result.total_size += header_len + header_constraints.footer_len();
2015 Ok(result)
2016 }
2017}
2018
2019#[cfg(test)]
2020mod tests {
2021 use super::*;
2022 use crate::BufferMut;
2023 use std::fmt::Debug;
2024 use test_case::test_case;
2025 use test_util::{assert_geq, assert_leq};
2026
2027 #[derive(Copy, Clone, Debug, Eq, PartialEq)]
2033 struct DummyPacketBuilder {
2034 header_len: usize,
2035 footer_len: usize,
2036 min_body_len: usize,
2037 max_body_len: usize,
2038 }
2039
2040 impl DummyPacketBuilder {
2041 fn new(
2042 header_len: usize,
2043 footer_len: usize,
2044 min_body_len: usize,
2045 max_body_len: usize,
2046 ) -> DummyPacketBuilder {
2047 DummyPacketBuilder { header_len, footer_len, min_body_len, max_body_len }
2048 }
2049 }
2050
2051 fn fill(bytes: &mut [u8], byte: u8) {
2052 for b in bytes {
2053 *b = byte;
2054 }
2055 }
2056
2057 impl PacketBuilder for DummyPacketBuilder {
2058 fn constraints(&self) -> PacketConstraints {
2059 PacketConstraints::new(
2060 self.header_len,
2061 self.footer_len,
2062 self.min_body_len,
2063 self.max_body_len,
2064 )
2065 }
2066
2067 fn serialize(&self, target: &mut SerializeTarget<'_>, body: FragmentedBytesMut<'_, '_>) {
2068 assert_eq!(target.header.len(), self.header_len);
2069 assert_eq!(target.footer.len(), self.footer_len);
2070 assert!(body.len() >= self.min_body_len);
2071 assert!(body.len() <= self.max_body_len);
2072 fill(target.header, 0xFF);
2073 fill(target.footer, 0xFE);
2074 }
2075 }
2076
2077 impl InnerPacketBuilder for DummyPacketBuilder {
2078 fn bytes_len(&self) -> usize {
2079 self.header_len
2080 }
2081
2082 fn serialize(&self, buffer: &mut [u8]) {
2083 assert_eq!(buffer.len(), self.header_len);
2084 fill(buffer, 0xFF);
2085 }
2086 }
2087
2088 #[derive(Copy, Clone, Debug, Eq, PartialEq)]
2090 struct SerializerVerifier {
2091 inner_len: Option<usize>,
2094
2095 truncating: bool,
2098 }
2099
2100 impl SerializerVerifier {
2101 fn new<S: Serializer>(serializer: &S, truncating: bool) -> Self {
2102 let inner_len = serializer
2103 .serialize_new_buf(PacketConstraints::UNCONSTRAINED, new_buf_vec)
2104 .map(|buf| buf.len())
2105 .inspect_err(|err| assert!(err.is_size_limit_exceeded()))
2106 .ok();
2107 Self { inner_len, truncating }
2108 }
2109
2110 fn verify_result<B: GrowBufferMut, A>(
2111 &self,
2112 result: Result<&B, &SerializeError<A>>,
2113 outer: PacketConstraints,
2114 ) {
2115 let should_exceed_size_limit = match self.inner_len {
2116 Some(inner_len) => outer.max_body_len() < inner_len && !self.truncating,
2117 None => true,
2118 };
2119
2120 match result {
2121 Ok(buf) => {
2122 assert_geq!(buf.prefix_len(), outer.header_len());
2123 assert_geq!(buf.suffix_len(), outer.footer_len());
2124 assert_leq!(buf.len(), outer.max_body_len());
2125
2126 let padding = outer.min_body_len().saturating_sub(buf.len());
2131 assert_leq!(padding + outer.footer_len(), buf.suffix_len());
2132
2133 assert!(!should_exceed_size_limit);
2134 }
2135 Err(err) => {
2136 if should_exceed_size_limit {
2139 assert!(err.is_size_limit_exceeded());
2140 } else {
2141 assert!(err.is_alloc());
2142 }
2143 }
2144 }
2145 }
2146 }
2147
2148 #[derive(Copy, Clone, Debug, Eq, PartialEq)]
2157 struct VerifyingSerializer<S> {
2158 ser: S,
2159 verifier: SerializerVerifier,
2160 }
2161
2162 impl<S: Serializer + Debug + Clone + Eq> Serializer for VerifyingSerializer<S>
2163 where
2164 S::Buffer: ReusableBuffer,
2165 {
2166 type Buffer = S::Buffer;
2167
2168 fn serialize<B: GrowBufferMut, P: BufferProvider<Self::Buffer, B>>(
2169 self,
2170 outer: PacketConstraints,
2171 provider: P,
2172 ) -> Result<B, (SerializeError<P::Error>, Self)> {
2173 let Self { ser, verifier } = self;
2174 let orig = ser.clone();
2175
2176 let result = ser.serialize(outer, provider).map_err(|(err, ser)| {
2177 assert_eq!(ser, orig);
2180 (err, Self { ser, verifier })
2181 });
2182
2183 verifier.verify_result(result.as_ref().map_err(|(err, _ser)| err), outer);
2184
2185 result
2186 }
2187
2188 fn serialize_new_buf<B: ReusableBuffer, A: BufferAlloc<B>>(
2189 &self,
2190 outer: PacketConstraints,
2191 alloc: A,
2192 ) -> Result<B, SerializeError<A::Error>> {
2193 let res = self.ser.serialize_new_buf(outer, alloc);
2194 self.verifier.verify_result(res.as_ref(), outer);
2195 res
2196 }
2197 }
2198
2199 trait SerializerExt: Serializer {
2200 fn into_verifying(self, truncating: bool) -> VerifyingSerializer<Self>
2201 where
2202 Self::Buffer: ReusableBuffer,
2203 {
2204 let verifier = SerializerVerifier::new(&self, truncating);
2205 VerifyingSerializer { ser: self, verifier }
2206 }
2207
2208 fn wrap_in_verifying<B: PacketBuilder>(
2209 self,
2210 outer: B,
2211 truncating: bool,
2212 ) -> VerifyingSerializer<Nested<Self, B>>
2213 where
2214 Self::Buffer: ReusableBuffer,
2215 {
2216 self.wrap_in(outer).into_verifying(truncating)
2217 }
2218
2219 fn with_size_limit_verifying(
2220 self,
2221 limit: usize,
2222 truncating: bool,
2223 ) -> VerifyingSerializer<Nested<Self, LimitedSizePacketBuilder>>
2224 where
2225 Self::Buffer: ReusableBuffer,
2226 {
2227 self.with_size_limit(limit).into_verifying(truncating)
2228 }
2229 }
2230
2231 impl<S: Serializer> SerializerExt for S {}
2232
2233 #[test]
2234 fn test_either_into_inner() {
2235 fn ret_either(a: u32, b: u32, c: bool) -> Either<u32, u32> {
2236 if c {
2237 Either::A(a)
2238 } else {
2239 Either::B(b)
2240 }
2241 }
2242
2243 assert_eq!(ret_either(1, 2, true).into_inner(), 1);
2244 assert_eq!(ret_either(1, 2, false).into_inner(), 2);
2245 }
2246
2247 #[test]
2248 fn test_either_unwrap_success() {
2249 assert_eq!(Either::<u16, u32>::A(5).unwrap_a(), 5);
2250 assert_eq!(Either::<u16, u32>::B(10).unwrap_b(), 10);
2251 }
2252
2253 #[test]
2254 #[should_panic]
2255 fn test_either_unwrap_a_panic() {
2256 let _: u16 = Either::<u16, u32>::B(10).unwrap_a();
2257 }
2258
2259 #[test]
2260 #[should_panic]
2261 fn test_either_unwrap_b_panic() {
2262 let _: u32 = Either::<u16, u32>::A(5).unwrap_b();
2263 }
2264
2265 #[test_case(Buf::new((0..100).collect(), ..); "entire buf")]
2266 #[test_case(Buf::new((0..100).collect(), 0..0); "empty range")]
2267 #[test_case(Buf::new((0..100).collect(), ..50); "prefix")]
2268 #[test_case(Buf::new((0..100).collect(), 50..); "suffix")]
2269 #[test_case(Buf::new((0..100).collect(), 25..75); "middle")]
2270 fn test_buf_into_inner(buf: Buf<Vec<u8>>) {
2271 assert_eq!(buf.clone().as_ref(), buf.into_inner());
2272 }
2273
2274 #[test]
2275 fn test_packet_constraints() {
2276 use PacketConstraints as PC;
2277
2278 assert!(PC::try_new(0, 0, 0, 0).is_some());
2282 assert!(PC::try_new(usize::MAX / 2, usize::MAX / 2, 0, 0).is_some());
2284 assert_eq!(PC::try_new(usize::MAX, 1, 0, 0), None);
2286 assert_eq!(PC::try_new(0, 0, 1, 0), None);
2288
2289 let pc = PC::new(10, 10, 0, usize::MAX);
2293 assert_eq!(pc.try_encapsulate(&pc).unwrap(), PC::new(20, 20, 0, usize::MAX - 20));
2294
2295 let pc = PC::new(10, 10, 0, usize::MAX);
2296 assert_eq!(pc.try_encapsulate(&pc).unwrap(), PC::new(20, 20, 0, usize::MAX - 20));
2297
2298 let inner = PC::new(10, 10, 0, usize::MAX);
2309 let outer = PC::new(0, 0, 10, usize::MAX);
2310 assert_eq!(inner.try_encapsulate(&outer).unwrap(), PC::new(10, 10, 0, usize::MAX - 20));
2311
2312 let inner = PC::new(usize::MAX, 0, 0, usize::MAX);
2316 let outer = PC::new(1, 0, 0, usize::MAX);
2317 assert_eq!(inner.try_encapsulate(&outer), None);
2318
2319 let inner = PC::new(0, usize::MAX, 0, usize::MAX);
2323 let outer = PC::new(0, 1, 0, usize::MAX);
2324 assert_eq!(inner.try_encapsulate(&outer), None);
2325
2326 let one_fifth_max = (usize::MAX / 5) + 1;
2333 let inner = PC::new(one_fifth_max, one_fifth_max, one_fifth_max, usize::MAX);
2334 let outer = PC::new(one_fifth_max, one_fifth_max, 0, usize::MAX);
2335 assert_eq!(inner.try_encapsulate(&outer), None);
2336
2337 let inner = PC::new(10, 10, 0, usize::MAX);
2342 let outer = PC::new(0, 0, 0, 10);
2343 assert_eq!(inner.try_encapsulate(&outer), None);
2344
2345 let inner = PC::new(0, 0, 10, usize::MAX);
2351 let outer = PC::new(0, 0, 0, 5);
2352 assert_eq!(inner.try_encapsulate(&outer), None);
2353 }
2354
2355 #[test]
2356 fn test_inner_serializer() {
2357 const INNER: &[u8] = &[0, 1, 2, 3, 4, 5, 6, 7, 8, 9];
2358
2359 fn concat<'a, I: IntoIterator<Item = &'a &'a [u8]>>(slices: I) -> Vec<u8> {
2360 let mut v = Vec::new();
2361 for slc in slices.into_iter() {
2362 v.extend_from_slice(slc);
2363 }
2364 v
2365 }
2366
2367 let buf = INNER.into_serializer().serialize_vec_outer().unwrap();
2369 assert_eq!(buf.as_ref(), INNER);
2370
2371 let buf = INNER
2374 .into_serializer()
2375 .into_verifying(false)
2376 .wrap_in(DummyPacketBuilder::new(0, 0, 20, usize::MAX))
2377 .serialize_vec_outer()
2378 .unwrap();
2379 assert_eq!(buf.as_ref(), concat(&[INNER, vec![0; 10].as_ref()]).as_slice());
2380
2381 let buf = INNER
2385 .into_serializer()
2386 .into_verifying(false)
2387 .wrap_in(DummyPacketBuilder::new(10, 10, 0, usize::MAX))
2388 .serialize_vec_outer()
2389 .unwrap();
2390 assert_eq!(
2391 buf.as_ref(),
2392 concat(&[vec![0xFF; 10].as_ref(), INNER, vec![0xFE; 10].as_ref()]).as_slice()
2393 );
2394
2395 assert_eq!(
2397 INNER
2398 .into_serializer()
2399 .into_verifying(false)
2400 .wrap_in(DummyPacketBuilder::new(0, 0, 0, 9))
2401 .serialize_vec_outer()
2402 .unwrap_err()
2403 .0,
2404 SerializeError::SizeLimitExceeded
2405 );
2406
2407 assert_eq!(
2411 INNER
2412 .into_serializer_with(Buf::new(vec![0xFF], ..))
2413 .into_verifying(false)
2414 .serialize_vec_outer()
2415 .unwrap()
2416 .as_ref(),
2417 INNER
2418 );
2419 }
2420
2421 #[test]
2422 fn test_buffer_serializer_and_inner_serializer() {
2423 fn verify_buffer_serializer<B: BufferMut + Debug>(
2424 buffer: B,
2425 header_len: usize,
2426 footer_len: usize,
2427 min_body_len: usize,
2428 ) {
2429 let old_body = buffer.to_flattened_vec();
2430 let serializer =
2431 DummyPacketBuilder::new(header_len, footer_len, min_body_len, usize::MAX)
2432 .wrap_body(buffer);
2433
2434 let buffer0 = serializer
2435 .serialize_new_buf(PacketConstraints::UNCONSTRAINED, new_buf_vec)
2436 .unwrap();
2437 verify(buffer0, &old_body, header_len, footer_len, min_body_len);
2438
2439 let buffer = serializer.serialize_vec_outer().unwrap();
2440 verify(buffer, &old_body, header_len, footer_len, min_body_len);
2441 }
2442
2443 fn verify_inner_packet_builder_serializer(
2444 body: &[u8],
2445 header_len: usize,
2446 footer_len: usize,
2447 min_body_len: usize,
2448 ) {
2449 let buffer = DummyPacketBuilder::new(header_len, footer_len, min_body_len, usize::MAX)
2450 .wrap_body(body.into_serializer())
2451 .serialize_vec_outer()
2452 .unwrap();
2453 verify(buffer, body, header_len, footer_len, min_body_len);
2454 }
2455
2456 fn verify<B: Buffer>(
2457 buffer: B,
2458 body: &[u8],
2459 header_len: usize,
2460 footer_len: usize,
2461 min_body_len: usize,
2462 ) {
2463 let flat = buffer.to_flattened_vec();
2464 let header_bytes = &flat[..header_len];
2465 let body_bytes = &flat[header_len..header_len + body.len()];
2466 let padding_len = min_body_len.saturating_sub(body.len());
2467 let padding_bytes =
2468 &flat[header_len + body.len()..header_len + body.len() + padding_len];
2469 let total_body_len = body.len() + padding_len;
2470 let footer_bytes = &flat[header_len + total_body_len..];
2471 assert_eq!(
2472 buffer.len() - total_body_len,
2473 header_len + footer_len,
2474 "buffer.len()({}) - total_body_len({}) != header_len({}) + footer_len({})",
2475 buffer.len(),
2476 header_len,
2477 footer_len,
2478 min_body_len,
2479 );
2480
2481 assert!(
2483 header_bytes.iter().all(|b| *b == 0xFF),
2484 "header_bytes {:?} are not filled with 0xFF's",
2485 header_bytes,
2486 );
2487 assert_eq!(body_bytes, body);
2488 assert!(
2490 padding_bytes.iter().all(|b| *b == 0),
2491 "padding_bytes {:?} are not filled with 0s",
2492 padding_bytes,
2493 );
2494 assert!(
2496 footer_bytes.iter().all(|b| *b == 0xFE),
2497 "footer_bytes {:?} are not filled with 0xFE's",
2498 footer_bytes,
2499 );
2500 }
2501
2502 for buf_len in 0..8 {
2505 for range_start in 0..buf_len {
2506 for range_end in range_start..buf_len {
2507 for prefix in 0..8 {
2508 for suffix in 0..8 {
2509 for min_body in 0..8 {
2510 let mut vec = vec![0; buf_len];
2511 #[allow(clippy::needless_range_loop)]
2516 for i in 0..vec.len() {
2517 vec[i] = i as u8;
2518 }
2519 verify_buffer_serializer(
2520 Buf::new(vec.as_mut_slice(), range_start..range_end),
2521 prefix,
2522 suffix,
2523 min_body,
2524 );
2525 if range_start == 0 {
2526 verify_inner_packet_builder_serializer(
2535 &vec.as_slice()[range_start..range_end],
2536 prefix,
2537 suffix,
2538 min_body,
2539 );
2540 }
2541 }
2542 }
2543 }
2544 }
2545 }
2546 }
2547 }
2548
2549 #[test]
2550 fn test_min_body_len() {
2551 let body = &[1, 2];
2556
2557 let inner = DummyPacketBuilder::new(2, 2, 0, usize::MAX);
2560 let outer = DummyPacketBuilder::new(2, 2, 8, usize::MAX);
2562 let buf = body
2563 .into_serializer()
2564 .into_verifying(false)
2565 .wrap_in_verifying(inner, false)
2566 .wrap_in_verifying(outer, false)
2567 .serialize_vec_outer()
2568 .unwrap();
2569 assert_eq!(buf.prefix_len(), 0);
2570 assert_eq!(buf.suffix_len(), 0);
2571 assert_eq!(
2572 buf.as_ref(),
2573 &[
2574 0xFF, 0xFF, 0xFF, 0xFF, 1, 2, 0xFE, 0xFE, 0, 0, 0xFE, 0xFE ]
2581 );
2582 }
2583
2584 #[test]
2585 fn test_size_limit() {
2586 fn test<S: Serializer + Clone + Debug + Eq>(ser: S)
2588 where
2589 S::Buffer: ReusableBuffer,
2590 {
2591 let pb = DummyPacketBuilder::new(1, 1, 0, usize::MAX);
2597
2598 assert!(ser
2603 .clone()
2604 .wrap_in_verifying(pb, false)
2605 .with_size_limit_verifying(3, false)
2606 .serialize_vec_outer()
2607 .is_ok());
2608 assert!(ser
2610 .clone()
2611 .wrap_in_verifying(pb, false)
2612 .with_size_limit_verifying(4, false)
2613 .serialize_vec_outer()
2614 .is_ok());
2615 assert!(ser
2619 .clone()
2620 .with_size_limit_verifying(1, false)
2621 .wrap_in_verifying(pb, false)
2622 .with_size_limit_verifying(3, false)
2623 .serialize_vec_outer()
2624 .is_ok());
2625 assert!(ser
2628 .clone()
2629 .with_size_limit_verifying(0, false)
2630 .wrap_in_verifying(pb, false)
2631 .serialize_vec_outer()
2632 .is_err());
2633 assert!(ser
2639 .clone()
2640 .wrap_in_verifying(pb, false)
2641 .with_size_limit_verifying(1, false)
2642 .serialize_vec_outer()
2643 .is_err());
2644 }
2645
2646 test(DummyPacketBuilder::new(1, 0, 0, usize::MAX).into_serializer().into_verifying(false));
2648 test(Buf::new(vec![0], ..).into_verifying(false));
2649 }
2650
2651 #[test]
2652 fn test_truncating_serializer() {
2653 fn verify_result<S: Serializer + Debug>(ser: S, expected: &[u8])
2654 where
2655 S::Buffer: ReusableBuffer + AsRef<[u8]>,
2656 {
2657 let buf = ser.serialize_new_buf(PacketConstraints::UNCONSTRAINED, new_buf_vec).unwrap();
2658 assert_eq!(buf.as_ref(), &expected[..]);
2659 let buf = ser.serialize_vec_outer().unwrap();
2660 assert_eq!(buf.as_ref(), &expected[..]);
2661 }
2662
2663 let body = vec![0, 1, 2, 3, 4, 5, 6, 7, 8, 9];
2665 let ser =
2666 TruncatingSerializer::new(Buf::new(body.clone(), ..), TruncateDirection::DiscardFront)
2667 .into_verifying(true)
2668 .with_size_limit_verifying(4, true);
2669 verify_result(ser, &[6, 7, 8, 9]);
2670
2671 let ser =
2673 TruncatingSerializer::new(Buf::new(body.clone(), ..), TruncateDirection::DiscardBack)
2674 .into_verifying(true)
2675 .with_size_limit_verifying(7, true);
2676 verify_result(ser, &[0, 1, 2, 3, 4, 5, 6]);
2677
2678 let ser =
2680 TruncatingSerializer::new(Buf::new(body.clone(), ..), TruncateDirection::NoTruncating)
2681 .into_verifying(false)
2682 .with_size_limit_verifying(5, true);
2683 assert!(ser.clone().serialize_vec_outer().is_err());
2684 assert!(ser.serialize_new_buf(PacketConstraints::UNCONSTRAINED, new_buf_vec).is_err());
2685 assert!(ser.serialize_vec_outer().is_err());
2686
2687 fn test_serialization_failure<S: Serializer + Clone + Eq + Debug>(
2691 ser: S,
2692 err: SerializeError<BufferTooShortError>,
2693 ) where
2694 S::Buffer: ReusableBuffer + Debug,
2695 {
2696 let (e, new_ser) = DummyPacketBuilder::new(2, 2, 0, 1)
2705 .wrap_body(ser.clone())
2706 .serialize_no_alloc_outer()
2707 .unwrap_err();
2708 assert_eq!(err, e);
2709 assert_eq!(new_ser.into_inner(), ser);
2710 }
2711
2712 let body = Buf::new(vec![1, 2], ..);
2713 test_serialization_failure(
2714 TruncatingSerializer::new(body.clone(), TruncateDirection::DiscardFront)
2715 .into_verifying(true),
2716 SerializeError::Alloc(BufferTooShortError),
2717 );
2718 test_serialization_failure(
2719 TruncatingSerializer::new(body.clone(), TruncateDirection::DiscardFront)
2720 .into_verifying(true),
2721 SerializeError::Alloc(BufferTooShortError),
2722 );
2723 test_serialization_failure(
2724 TruncatingSerializer::new(body.clone(), TruncateDirection::NoTruncating)
2725 .into_verifying(false),
2726 SerializeError::SizeLimitExceeded,
2727 );
2728 }
2729
2730 #[test]
2731 fn test_try_reuse_buffer() {
2732 fn test_expect_success(
2733 body_range: Range<usize>,
2734 prefix: usize,
2735 suffix: usize,
2736 max_copy_bytes: usize,
2737 ) {
2738 let mut bytes = [0, 1, 2, 3, 4, 5, 6, 7, 8, 9];
2739 let buffer = Buf::new(&mut bytes[..], body_range);
2740 let body = buffer.as_ref().to_vec();
2741 let buffer = try_reuse_buffer(buffer, prefix, suffix, max_copy_bytes).unwrap();
2742 assert_eq!(buffer.as_ref(), body.as_slice());
2743 assert!(buffer.prefix_len() >= prefix);
2744 assert!(buffer.suffix_len() >= suffix);
2745 }
2746
2747 fn test_expect_failure(
2748 body_range: Range<usize>,
2749 prefix: usize,
2750 suffix: usize,
2751 max_copy_bytes: usize,
2752 ) {
2753 let mut bytes = [0, 1, 2, 3, 4, 5, 6, 7, 8, 9];
2754 let buffer = Buf::new(&mut bytes[..], body_range.clone());
2755 let mut bytes = [0, 1, 2, 3, 4, 5, 6, 7, 8, 9];
2756 let orig = Buf::new(&mut bytes[..], body_range.clone());
2757 let buffer = try_reuse_buffer(buffer, prefix, suffix, max_copy_bytes).unwrap_err();
2758 assert_eq!(buffer, orig);
2759 }
2760
2761 test_expect_success(0..10, 0, 0, 0);
2763 test_expect_success(1..9, 1, 1, 0);
2765 test_expect_success(0..9, 1, 0, 9);
2768 test_expect_success(1..10, 0, 1, 9);
2769 test_expect_failure(0..9, 1, 0, 8);
2771 test_expect_failure(1..10, 0, 1, 8);
2772 }
2773
2774 #[test]
2775 fn test_maybe_reuse_buffer_provider() {
2776 fn test_expect(body_range: Range<usize>, prefix: usize, suffix: usize, expect_a: bool) {
2777 let mut bytes = [0, 1, 2, 3, 4, 5, 6, 7, 8, 9];
2778 let buffer = Buf::new(&mut bytes[..], body_range);
2779 let body = buffer.as_ref().to_vec();
2780 let buffer = BufferProvider::reuse_or_realloc(
2781 MaybeReuseBufferProvider(new_buf_vec),
2782 buffer,
2783 prefix,
2784 suffix,
2785 )
2786 .unwrap();
2787 match &buffer {
2788 Either::A(_) if expect_a => {}
2789 Either::B(_) if !expect_a => {}
2790 Either::A(_) => panic!("expected Eitehr::B variant"),
2791 Either::B(_) => panic!("expected Eitehr::A variant"),
2792 }
2793 let bytes: &[u8] = buffer.as_ref();
2794 assert_eq!(bytes, body.as_slice());
2795 assert!(buffer.prefix_len() >= prefix);
2796 assert!(buffer.suffix_len() >= suffix);
2797 }
2798
2799 fn test_expect_reuse(body_range: Range<usize>, prefix: usize, suffix: usize) {
2801 test_expect(body_range, prefix, suffix, true);
2802 }
2803
2804 fn test_expect_realloc(body_range: Range<usize>, prefix: usize, suffix: usize) {
2806 test_expect(body_range, prefix, suffix, false);
2807 }
2808
2809 test_expect_reuse(0..10, 0, 0);
2811 test_expect_reuse(1..9, 1, 1);
2813 test_expect_reuse(0..9, 1, 0);
2816 test_expect_reuse(1..10, 0, 1);
2817 test_expect_realloc(0..9, 1, 1);
2819 test_expect_realloc(1..10, 1, 1);
2820 }
2821
2822 #[test]
2823 fn test_no_reuse_buffer_provider() {
2824 #[track_caller]
2825 fn test_expect(body_range: Range<usize>, prefix: usize, suffix: usize) {
2826 let mut bytes = [0, 1, 2, 3, 4, 5, 6, 7, 8, 9];
2827 let internal_buffer: Buf<&mut [u8]> = Buf::new(&mut bytes[..], body_range);
2829 let body = internal_buffer.as_ref().to_vec();
2830 let buffer: Buf<Vec<u8>> = BufferProvider::reuse_or_realloc(
2833 NoReuseBufferProvider(new_buf_vec),
2834 internal_buffer,
2835 prefix,
2836 suffix,
2837 )
2838 .unwrap();
2839 let bytes: &[u8] = buffer.as_ref();
2840 assert_eq!(bytes, body.as_slice());
2841 assert_eq!(buffer.prefix_len(), prefix);
2842 assert_eq!(buffer.suffix_len(), suffix);
2843 }
2844 test_expect(0..10, 0, 0);
2846 test_expect(1..9, 1, 1);
2848 test_expect(0..9, 10, 10);
2850 test_expect(1..10, 15, 15);
2851 }
2852
2853 struct ScatterGatherBuf<B> {
2877 data: Vec<u8>,
2878 mid: usize,
2879 range: Range<usize>,
2880 inner: B,
2881 }
2882
2883 impl<B: BufferMut> FragmentedBuffer for ScatterGatherBuf<B> {
2884 fn len(&self) -> usize {
2885 self.inner.len() + (self.range.end - self.range.start)
2886 }
2887
2888 fn with_bytes<R, F>(&self, f: F) -> R
2889 where
2890 F: for<'a, 'b> FnOnce(FragmentedBytes<'a, 'b>) -> R,
2891 {
2892 let (_, rest) = self.data.split_at(self.range.start);
2893 let (prefix_b, rest) = rest.split_at(self.mid - self.range.start);
2894 let (suffix_b, _) = rest.split_at(self.range.end - self.mid);
2895 let mut bytes = [prefix_b, self.inner.as_ref(), suffix_b];
2896 f(FragmentedBytes::new(&mut bytes[..]))
2897 }
2898 }
2899
2900 impl<B: BufferMut> FragmentedBufferMut for ScatterGatherBuf<B> {
2901 fn with_bytes_mut<R, F>(&mut self, f: F) -> R
2902 where
2903 F: for<'a, 'b> FnOnce(FragmentedBytesMut<'a, 'b>) -> R,
2904 {
2905 let (_, rest) = self.data.split_at_mut(self.range.start);
2906 let (prefix_b, rest) = rest.split_at_mut(self.mid - self.range.start);
2907 let (suffix_b, _) = rest.split_at_mut(self.range.end - self.mid);
2908 let mut bytes = [prefix_b, self.inner.as_mut(), suffix_b];
2909 f(FragmentedBytesMut::new(&mut bytes[..]))
2910 }
2911 }
2912
2913 impl<B: BufferMut> GrowBuffer for ScatterGatherBuf<B> {
2914 fn with_parts<O, F>(&self, f: F) -> O
2915 where
2916 F: for<'a, 'b> FnOnce(&'a [u8], FragmentedBytes<'a, 'b>, &'a [u8]) -> O,
2917 {
2918 let (prefix, rest) = self.data.split_at(self.range.start);
2919 let (prefix_b, rest) = rest.split_at(self.mid - self.range.start);
2920 let (suffix_b, suffix) = rest.split_at(self.range.end - self.mid);
2921 let mut bytes = [prefix_b, self.inner.as_ref(), suffix_b];
2922 f(prefix, bytes.as_fragmented_byte_slice(), suffix)
2923 }
2924 fn prefix_len(&self) -> usize {
2925 self.range.start
2926 }
2927
2928 fn suffix_len(&self) -> usize {
2929 self.data.len() - self.range.end
2930 }
2931
2932 fn grow_front(&mut self, n: usize) {
2933 self.range.start -= n;
2934 }
2935
2936 fn grow_back(&mut self, n: usize) {
2937 self.range.end += n;
2938 assert!(self.range.end <= self.data.len());
2939 }
2940 }
2941
2942 impl<B: BufferMut> GrowBufferMut for ScatterGatherBuf<B> {
2943 fn with_parts_mut<O, F>(&mut self, f: F) -> O
2944 where
2945 F: for<'a, 'b> FnOnce(&'a mut [u8], FragmentedBytesMut<'a, 'b>, &'a mut [u8]) -> O,
2946 {
2947 let (prefix, rest) = self.data.split_at_mut(self.range.start);
2948 let (prefix_b, rest) = rest.split_at_mut(self.mid - self.range.start);
2949 let (suffix_b, suffix) = rest.split_at_mut(self.range.end - self.mid);
2950 let mut bytes = [prefix_b, self.inner.as_mut(), suffix_b];
2951 f(prefix, bytes.as_fragmented_byte_slice(), suffix)
2952 }
2953 }
2954
2955 struct ScatterGatherProvider;
2956
2957 impl<B: BufferMut> BufferProvider<B, ScatterGatherBuf<B>> for ScatterGatherProvider {
2958 type Error = Never;
2959
2960 fn alloc_no_reuse(
2961 self,
2962 _prefix: usize,
2963 _body: usize,
2964 _suffix: usize,
2965 ) -> Result<ScatterGatherBuf<B>, Self::Error> {
2966 unimplemented!("not used in tests")
2967 }
2968
2969 fn reuse_or_realloc(
2970 self,
2971 buffer: B,
2972 prefix: usize,
2973 suffix: usize,
2974 ) -> Result<ScatterGatherBuf<B>, (Self::Error, B)> {
2975 let inner = buffer;
2976 let data = vec![0; prefix + suffix];
2977 let range = Range { start: prefix, end: prefix };
2978 let mid = prefix;
2979 Ok(ScatterGatherBuf { inner, data, range, mid })
2980 }
2981 }
2982
2983 #[test]
2984 fn test_scatter_gather_serialize() {
2985 let buf = Buf::new(vec![10, 20, 30, 40, 50], ..);
2988 let pb = DummyPacketBuilder::new(3, 2, 0, usize::MAX);
2989 let ser = pb.wrap_body(buf);
2990 let result = ser.serialize_outer(ScatterGatherProvider {}).unwrap();
2991 let flattened = result.to_flattened_vec();
2992 assert_eq!(&flattened[..], &[0xFF, 0xFF, 0xFF, 10, 20, 30, 40, 50, 0xFE, 0xFE]);
2993 }
2994}