1use std::cmp;
8use std::convert::Infallible as Never;
9use std::fmt::{self, Debug, Formatter};
10use std::ops::{Range, RangeBounds};
11
12use arrayvec::ArrayVec;
13use zerocopy::SplitByteSlice;
14
15use crate::{
16 canonicalize_range, take_back, take_back_mut, take_front, take_front_mut,
17 AsFragmentedByteSlice, Buffer, BufferView, BufferViewMut, ContiguousBuffer, EmptyBuf,
18 FragmentedBuffer, FragmentedBufferMut, FragmentedBytes, FragmentedBytesMut, GrowBuffer,
19 GrowBufferMut, ParsablePacket, ParseBuffer, ParseBufferMut, ReusableBuffer, ShrinkBuffer,
20};
21
22#[derive(Copy, Clone, Debug)]
28pub enum Either<A, B> {
29 A(A),
30 B(B),
31}
32
33impl<A, B> Either<A, B> {
34 pub fn map_a<AA, F: FnOnce(A) -> AA>(self, f: F) -> Either<AA, B> {
40 match self {
41 Either::A(a) => Either::A(f(a)),
42 Either::B(b) => Either::B(b),
43 }
44 }
45
46 pub fn map_b<BB, F: FnOnce(B) -> BB>(self, f: F) -> Either<A, BB> {
52 match self {
53 Either::A(a) => Either::A(a),
54 Either::B(b) => Either::B(f(b)),
55 }
56 }
57
58 pub fn unwrap_a(self) -> A {
64 match self {
65 Either::A(x) => x,
66 Either::B(_) => panic!("This `Either<A, B>` does not hold the `A` variant"),
67 }
68 }
69
70 pub fn unwrap_b(self) -> B {
76 match self {
77 Either::A(_) => panic!("This `Either<A, B>` does not hold the `B` variant"),
78 Either::B(x) => x,
79 }
80 }
81}
82
83impl<A> Either<A, A> {
84 pub fn into_inner(self) -> A {
87 match self {
88 Either::A(x) => x,
89 Either::B(x) => x,
90 }
91 }
92}
93
94impl<A> Either<A, Never> {
95 #[inline]
97 pub fn into_a(self) -> A {
98 match self {
99 Either::A(a) => a,
100 }
101 }
102}
103
104impl<B> Either<Never, B> {
105 #[inline]
107 pub fn into_b(self) -> B {
108 match self {
109 Either::B(b) => b,
110 }
111 }
112}
113
114macro_rules! call_method_on_either {
115 ($val:expr, $method:ident, $($args:expr),*) => {
116 match $val {
117 Either::A(a) => a.$method($($args),*),
118 Either::B(b) => b.$method($($args),*),
119 }
120 };
121 ($val:expr, $method:ident) => {
122 call_method_on_either!($val, $method,)
123 };
124}
125
126impl<A, B> FragmentedBuffer for Either<A, B>
133where
134 A: FragmentedBuffer,
135 B: FragmentedBuffer,
136{
137 fn len(&self) -> usize {
138 call_method_on_either!(self, len)
139 }
140
141 fn with_bytes<R, F>(&self, f: F) -> R
142 where
143 F: for<'a, 'b> FnOnce(FragmentedBytes<'a, 'b>) -> R,
144 {
145 call_method_on_either!(self, with_bytes, f)
146 }
147}
148
149impl<A, B> ContiguousBuffer for Either<A, B>
150where
151 A: ContiguousBuffer,
152 B: ContiguousBuffer,
153{
154}
155
156impl<A, B> ShrinkBuffer for Either<A, B>
157where
158 A: ShrinkBuffer,
159 B: ShrinkBuffer,
160{
161 fn shrink<R: RangeBounds<usize>>(&mut self, range: R) {
162 call_method_on_either!(self, shrink, range)
163 }
164 fn shrink_front(&mut self, n: usize) {
165 call_method_on_either!(self, shrink_front, n)
166 }
167 fn shrink_back(&mut self, n: usize) {
168 call_method_on_either!(self, shrink_back, n)
169 }
170}
171
172impl<A, B> ParseBuffer for Either<A, B>
173where
174 A: ParseBuffer,
175 B: ParseBuffer,
176{
177 fn parse<'a, P: ParsablePacket<&'a [u8], ()>>(&'a mut self) -> Result<P, P::Error> {
178 call_method_on_either!(self, parse)
179 }
180 fn parse_with<'a, ParseArgs, P: ParsablePacket<&'a [u8], ParseArgs>>(
181 &'a mut self,
182 args: ParseArgs,
183 ) -> Result<P, P::Error> {
184 call_method_on_either!(self, parse_with, args)
185 }
186}
187
188impl<A, B> FragmentedBufferMut for Either<A, B>
189where
190 A: FragmentedBufferMut,
191 B: FragmentedBufferMut,
192{
193 fn with_bytes_mut<R, F>(&mut self, f: F) -> R
194 where
195 F: for<'a, 'b> FnOnce(FragmentedBytesMut<'a, 'b>) -> R,
196 {
197 call_method_on_either!(self, with_bytes_mut, f)
198 }
199}
200
201impl<A, B> ParseBufferMut for Either<A, B>
202where
203 A: ParseBufferMut,
204 B: ParseBufferMut,
205{
206 fn parse_mut<'a, P: ParsablePacket<&'a mut [u8], ()>>(&'a mut self) -> Result<P, P::Error> {
207 call_method_on_either!(self, parse_mut)
208 }
209 fn parse_with_mut<'a, ParseArgs, P: ParsablePacket<&'a mut [u8], ParseArgs>>(
210 &'a mut self,
211 args: ParseArgs,
212 ) -> Result<P, P::Error> {
213 call_method_on_either!(self, parse_with_mut, args)
214 }
215}
216
217impl<A, B> GrowBuffer for Either<A, B>
218where
219 A: GrowBuffer,
220 B: GrowBuffer,
221{
222 #[inline]
223 fn with_parts<O, F>(&self, f: F) -> O
224 where
225 F: for<'a, 'b> FnOnce(&'a [u8], FragmentedBytes<'a, 'b>, &'a [u8]) -> O,
226 {
227 call_method_on_either!(self, with_parts, f)
228 }
229 fn capacity(&self) -> usize {
230 call_method_on_either!(self, capacity)
231 }
232 fn prefix_len(&self) -> usize {
233 call_method_on_either!(self, prefix_len)
234 }
235 fn suffix_len(&self) -> usize {
236 call_method_on_either!(self, suffix_len)
237 }
238 fn grow_front(&mut self, n: usize) {
239 call_method_on_either!(self, grow_front, n)
240 }
241 fn grow_back(&mut self, n: usize) {
242 call_method_on_either!(self, grow_back, n)
243 }
244 fn reset(&mut self) {
245 call_method_on_either!(self, reset)
246 }
247}
248
249impl<A, B> GrowBufferMut for Either<A, B>
250where
251 A: GrowBufferMut,
252 B: GrowBufferMut,
253{
254 fn with_parts_mut<O, F>(&mut self, f: F) -> O
255 where
256 F: for<'a, 'b> FnOnce(&'a mut [u8], FragmentedBytesMut<'a, 'b>, &'a mut [u8]) -> O,
257 {
258 call_method_on_either!(self, with_parts_mut, f)
259 }
260
261 fn serialize<BB: PacketBuilder>(&mut self, builder: BB) {
262 call_method_on_either!(self, serialize, builder)
263 }
264}
265
266impl<A, B> Buffer for Either<A, B>
267where
268 A: Buffer,
269 B: Buffer,
270{
271 fn parse_with_view<'a, ParseArgs, P: ParsablePacket<&'a [u8], ParseArgs>>(
272 &'a mut self,
273 args: ParseArgs,
274 ) -> Result<(P, &'a [u8]), P::Error> {
275 call_method_on_either!(self, parse_with_view, args)
276 }
277}
278
279impl<A: AsRef<[u8]>, B: AsRef<[u8]>> AsRef<[u8]> for Either<A, B> {
280 fn as_ref(&self) -> &[u8] {
281 call_method_on_either!(self, as_ref)
282 }
283}
284
285impl<A: AsMut<[u8]>, B: AsMut<[u8]>> AsMut<[u8]> for Either<A, B> {
286 fn as_mut(&mut self) -> &mut [u8] {
287 call_method_on_either!(self, as_mut)
288 }
289}
290
291#[derive(Clone, Debug)]
297pub struct Buf<B> {
298 buf: B,
299 body: Range<usize>,
300}
301
302impl<B: AsRef<[u8]>> PartialEq for Buf<B> {
303 fn eq(&self, other: &Self) -> bool {
304 let self_slice = AsRef::<[u8]>::as_ref(self);
305 let other_slice = AsRef::<[u8]>::as_ref(other);
306 PartialEq::eq(self_slice, other_slice)
307 }
308}
309
310impl<B: AsRef<[u8]>> Eq for Buf<B> {}
311
312impl Buf<Vec<u8>> {
313 pub fn into_inner(self) -> Vec<u8> {
315 let Buf { mut buf, body } = self;
316 let len = body.end - body.start;
317 let _ = buf.drain(..body.start);
318 buf.truncate(len);
319 buf
320 }
321}
322
323impl<B: AsRef<[u8]>> Buf<B> {
324 pub fn new<R: RangeBounds<usize>>(buf: B, body: R) -> Buf<B> {
335 let len = buf.as_ref().len();
336 Buf { buf, body: canonicalize_range(len, &body) }
337 }
338
339 pub fn buffer_view(&mut self) -> BufView<'_> {
341 BufView { buf: &self.buf.as_ref()[self.body.clone()], body: &mut self.body }
342 }
343}
344
345impl<B: AsRef<[u8]> + AsMut<[u8]>> Buf<B> {
346 pub fn buffer_view_mut(&mut self) -> BufViewMut<'_> {
348 BufViewMut { buf: &mut self.buf.as_mut()[self.body.clone()], body: &mut self.body }
349 }
350}
351
352impl<B: AsRef<[u8]>> FragmentedBuffer for Buf<B> {
353 fragmented_buffer_method_impls!();
354}
355impl<B: AsRef<[u8]>> ContiguousBuffer for Buf<B> {}
356impl<B: AsRef<[u8]>> ShrinkBuffer for Buf<B> {
357 fn shrink<R: RangeBounds<usize>>(&mut self, range: R) {
358 let len = self.len();
359 let mut range = canonicalize_range(len, &range);
360 range.start += self.body.start;
361 range.end += self.body.start;
362 self.body = range;
363 }
364
365 fn shrink_front(&mut self, n: usize) {
366 assert!(n <= self.len());
367 self.body.start += n;
368 }
369 fn shrink_back(&mut self, n: usize) {
370 assert!(n <= self.len());
371 self.body.end -= n;
372 }
373}
374impl<B: AsRef<[u8]>> ParseBuffer for Buf<B> {
375 fn parse_with<'a, ParseArgs, P: ParsablePacket<&'a [u8], ParseArgs>>(
376 &'a mut self,
377 args: ParseArgs,
378 ) -> Result<P, P::Error> {
379 P::parse(self.buffer_view(), args)
380 }
381}
382
383impl<B: AsRef<[u8]> + AsMut<[u8]>> FragmentedBufferMut for Buf<B> {
384 fragmented_buffer_mut_method_impls!();
385}
386
387impl<B: AsRef<[u8]> + AsMut<[u8]>> ParseBufferMut for Buf<B> {
388 fn parse_with_mut<'a, ParseArgs, P: ParsablePacket<&'a mut [u8], ParseArgs>>(
389 &'a mut self,
390 args: ParseArgs,
391 ) -> Result<P, P::Error> {
392 P::parse_mut(self.buffer_view_mut(), args)
393 }
394}
395
396impl<B: AsRef<[u8]>> GrowBuffer for Buf<B> {
397 fn with_parts<O, F>(&self, f: F) -> O
398 where
399 F: for<'a, 'b> FnOnce(&'a [u8], FragmentedBytes<'a, 'b>, &'a [u8]) -> O,
400 {
401 let (prefix, buf) = self.buf.as_ref().split_at(self.body.start);
402 let (body, suffix) = buf.split_at(self.body.end - self.body.start);
403 let mut body = [&body[..]];
404 f(prefix, body.as_fragmented_byte_slice(), suffix)
405 }
406 fn capacity(&self) -> usize {
407 self.buf.as_ref().len()
408 }
409 fn prefix_len(&self) -> usize {
410 self.body.start
411 }
412 fn suffix_len(&self) -> usize {
413 self.buf.as_ref().len() - self.body.end
414 }
415 fn grow_front(&mut self, n: usize) {
416 assert!(n <= self.body.start);
417 self.body.start -= n;
418 }
419 fn grow_back(&mut self, n: usize) {
420 assert!(n <= self.buf.as_ref().len() - self.body.end);
421 self.body.end += n;
422 }
423}
424
425impl<B: AsRef<[u8]> + AsMut<[u8]>> GrowBufferMut for Buf<B> {
426 fn with_parts_mut<O, F>(&mut self, f: F) -> O
427 where
428 F: for<'a, 'b> FnOnce(&'a mut [u8], FragmentedBytesMut<'a, 'b>, &'a mut [u8]) -> O,
429 {
430 let (prefix, buf) = self.buf.as_mut().split_at_mut(self.body.start);
431 let (body, suffix) = buf.split_at_mut(self.body.end - self.body.start);
432 let mut body = [&mut body[..]];
433 f(prefix, body.as_fragmented_byte_slice(), suffix)
434 }
435}
436
437impl<B: AsRef<[u8]>> AsRef<[u8]> for Buf<B> {
438 fn as_ref(&self) -> &[u8] {
439 &self.buf.as_ref()[self.body.clone()]
440 }
441}
442
443impl<B: AsMut<[u8]>> AsMut<[u8]> for Buf<B> {
444 fn as_mut(&mut self) -> &mut [u8] {
445 &mut self.buf.as_mut()[self.body.clone()]
446 }
447}
448
449impl<B: AsRef<[u8]>> Buffer for Buf<B> {
450 fn parse_with_view<'a, ParseArgs, P: ParsablePacket<&'a [u8], ParseArgs>>(
451 &'a mut self,
452 args: ParseArgs,
453 ) -> Result<(P, &'a [u8]), P::Error> {
454 let Self { body, ref buf } = self;
455 let body_before = body.clone();
456 let view = BufView { buf: &buf.as_ref()[body.clone()], body };
457 P::parse(view, args).map(|r| (r, &buf.as_ref()[body_before]))
458 }
459}
460
461pub struct BufView<'a> {
466 buf: &'a [u8],
467 body: &'a mut Range<usize>,
468}
469
470impl<'a> BufferView<&'a [u8]> for BufView<'a> {
471 fn take_front(&mut self, n: usize) -> Option<&'a [u8]> {
472 if self.len() < n {
473 return None;
474 }
475 self.body.start += n;
476 Some(take_front(&mut self.buf, n))
477 }
478
479 fn take_back(&mut self, n: usize) -> Option<&'a [u8]> {
480 if self.len() < n {
481 return None;
482 }
483 self.body.end -= n;
484 Some(take_back(&mut self.buf, n))
485 }
486
487 fn into_rest(self) -> &'a [u8] {
488 self.buf
489 }
490}
491
492impl<'a> AsRef<[u8]> for BufView<'a> {
493 fn as_ref(&self) -> &[u8] {
494 self.buf
495 }
496}
497
498pub struct BufViewMut<'a> {
504 buf: &'a mut [u8],
505 body: &'a mut Range<usize>,
506}
507
508impl<'a> BufferView<&'a mut [u8]> for BufViewMut<'a> {
509 fn take_front(&mut self, n: usize) -> Option<&'a mut [u8]> {
510 if self.len() < n {
511 return None;
512 }
513 self.body.start += n;
514 Some(take_front_mut(&mut self.buf, n))
515 }
516
517 fn take_back(&mut self, n: usize) -> Option<&'a mut [u8]> {
518 if self.len() < n {
519 return None;
520 }
521 self.body.end -= n;
522 Some(take_back_mut(&mut self.buf, n))
523 }
524
525 fn into_rest(self) -> &'a mut [u8] {
526 self.buf
527 }
528}
529
530impl<'a> BufferViewMut<&'a mut [u8]> for BufViewMut<'a> {}
531
532impl<'a> AsRef<[u8]> for BufViewMut<'a> {
533 fn as_ref(&self) -> &[u8] {
534 self.buf
535 }
536}
537
538impl<'a> AsMut<[u8]> for BufViewMut<'a> {
539 fn as_mut(&mut self) -> &mut [u8] {
540 self.buf
541 }
542}
543
544#[derive(Copy, Clone, Debug, Eq, PartialEq)]
558pub struct PacketConstraints {
559 header_len: usize,
560 footer_len: usize,
561 min_body_len: usize,
562 max_body_len: usize,
563}
564
565impl PacketConstraints {
566 pub const UNCONSTRAINED: Self =
570 Self { header_len: 0, footer_len: 0, min_body_len: 0, max_body_len: usize::MAX };
571
572 #[inline]
580 pub fn new(
581 header_len: usize,
582 footer_len: usize,
583 min_body_len: usize,
584 max_body_len: usize,
585 ) -> PacketConstraints {
586 PacketConstraints::try_new(header_len, footer_len, min_body_len, max_body_len).expect(
587 "max_body_len < min_body_len or header_len + min_body_len + footer_len overflows usize",
588 )
589 }
590
591 #[inline]
597 pub fn try_new(
598 header_len: usize,
599 footer_len: usize,
600 min_body_len: usize,
601 max_body_len: usize,
602 ) -> Option<PacketConstraints> {
603 let header_min_body_footer_overflows = header_len
605 .checked_add(min_body_len)
606 .and_then(|sum| sum.checked_add(footer_len))
607 .is_none();
608 let max_less_than_min = max_body_len < min_body_len;
610 if max_less_than_min || header_min_body_footer_overflows {
611 return None;
612 }
613 Some(PacketConstraints { header_len, footer_len, min_body_len, max_body_len })
614 }
615
616 #[inline]
620 pub fn with_max_body_len(max_body_len: usize) -> PacketConstraints {
621 PacketConstraints { header_len: 0, footer_len: 0, min_body_len: 0, max_body_len }
626 }
627
628 #[inline]
630 pub fn header_len(&self) -> usize {
631 self.header_len
632 }
633
634 #[inline]
636 pub fn footer_len(&self) -> usize {
637 self.footer_len
638 }
639
640 #[inline]
656 pub fn min_body_len(&self) -> usize {
657 self.min_body_len
658 }
659
660 #[inline]
664 pub fn max_body_len(&self) -> usize {
665 self.max_body_len
666 }
667
668 pub fn try_encapsulate(&self, outer: &Self) -> Option<PacketConstraints> {
678 let inner = self;
679 let header_len = inner.header_len.checked_add(outer.header_len)?;
681 let footer_len = inner.footer_len.checked_add(outer.footer_len)?;
683 let inner_header_footer_len = inner.header_len + inner.footer_len;
686 let min_body_len = cmp::max(
690 outer.min_body_len.saturating_sub(inner_header_footer_len),
691 inner.min_body_len,
692 );
693 let max_body_len =
698 cmp::min(outer.max_body_len.checked_sub(inner_header_footer_len)?, inner.max_body_len);
699 PacketConstraints::try_new(header_len, footer_len, min_body_len, max_body_len)
703 }
704}
705
706pub struct SerializeTarget<'a> {
709 #[allow(missing_docs)]
710 pub header: &'a mut [u8],
711 #[allow(missing_docs)]
712 pub footer: &'a mut [u8],
713}
714
715pub trait PacketBuilder: Sized {
726 fn constraints(&self) -> PacketConstraints;
728
729 fn serialize(&self, target: &mut SerializeTarget<'_>, body: FragmentedBytesMut<'_, '_>);
758
759 #[inline]
764 fn wrap_body<B>(self, body: B) -> Nested<B, Self> {
765 Nested { inner: body, outer: self }
766 }
767}
768
769impl<'a, B: PacketBuilder> PacketBuilder for &'a B {
770 #[inline]
771 fn constraints(&self) -> PacketConstraints {
772 B::constraints(self)
773 }
774 #[inline]
775 fn serialize(&self, target: &mut SerializeTarget<'_>, body: FragmentedBytesMut<'_, '_>) {
776 B::serialize(self, target, body)
777 }
778}
779
780impl<'a, B: PacketBuilder> PacketBuilder for &'a mut B {
781 #[inline]
782 fn constraints(&self) -> PacketConstraints {
783 B::constraints(self)
784 }
785 #[inline]
786 fn serialize(&self, target: &mut SerializeTarget<'_>, body: FragmentedBytesMut<'_, '_>) {
787 B::serialize(self, target, body)
788 }
789}
790
791impl PacketBuilder for () {
792 #[inline]
793 fn constraints(&self) -> PacketConstraints {
794 PacketConstraints::UNCONSTRAINED
795 }
796 #[inline]
797 fn serialize(&self, _target: &mut SerializeTarget<'_>, _body: FragmentedBytesMut<'_, '_>) {}
798}
799
800impl PacketBuilder for Never {
801 fn constraints(&self) -> PacketConstraints {
802 match *self {}
803 }
804 fn serialize(&self, _target: &mut SerializeTarget<'_>, _body: FragmentedBytesMut<'_, '_>) {}
805}
806
807#[derive(Copy, Clone, Debug, Eq, PartialEq)]
815pub struct Nested<I, O> {
816 inner: I,
817 outer: O,
818}
819
820impl<I, O> Nested<I, O> {
821 #[inline]
824 pub fn into_inner(self) -> I {
825 self.inner
826 }
827
828 #[inline]
831 pub fn into_outer(self) -> O {
832 self.outer
833 }
834
835 #[inline]
836 pub fn inner(&self) -> &I {
837 &self.inner
838 }
839
840 #[inline]
841 pub fn inner_mut(&mut self) -> &mut I {
842 &mut self.inner
843 }
844
845 #[inline]
846 pub fn outer(&self) -> &O {
847 &self.outer
848 }
849
850 #[inline]
851 pub fn outer_mut(&mut self) -> &mut O {
852 &mut self.outer
853 }
854}
855
856#[derive(Copy, Clone, Debug)]
862#[cfg_attr(test, derive(Eq, PartialEq))]
863pub struct LimitedSizePacketBuilder {
864 pub limit: usize,
866}
867
868impl PacketBuilder for LimitedSizePacketBuilder {
869 fn constraints(&self) -> PacketConstraints {
870 PacketConstraints::with_max_body_len(self.limit)
871 }
872
873 fn serialize(&self, _target: &mut SerializeTarget<'_>, _body: FragmentedBytesMut<'_, '_>) {}
874}
875
876pub trait InnerPacketBuilder {
890 fn bytes_len(&self) -> usize;
892
893 fn serialize(&self, buffer: &mut [u8]);
908
909 #[inline]
916 fn into_serializer(self) -> InnerSerializer<Self, EmptyBuf>
917 where
918 Self: Sized,
919 {
920 self.into_serializer_with(EmptyBuf)
921 }
922
923 fn into_serializer_with<B: ShrinkBuffer>(self, mut buffer: B) -> InnerSerializer<Self, B>
936 where
937 Self: Sized,
938 {
939 buffer.shrink_back_to(0);
940 InnerSerializer { inner: self, buffer }
941 }
942}
943
944impl<'a, I: InnerPacketBuilder> InnerPacketBuilder for &'a I {
945 #[inline]
946 fn bytes_len(&self) -> usize {
947 I::bytes_len(self)
948 }
949 #[inline]
950 fn serialize(&self, buffer: &mut [u8]) {
951 I::serialize(self, buffer)
952 }
953}
954impl<'a, I: InnerPacketBuilder> InnerPacketBuilder for &'a mut I {
955 #[inline]
956 fn bytes_len(&self) -> usize {
957 I::bytes_len(self)
958 }
959 #[inline]
960 fn serialize(&self, buffer: &mut [u8]) {
961 I::serialize(self, buffer)
962 }
963}
964impl<'a> InnerPacketBuilder for &'a [u8] {
965 #[inline]
966 fn bytes_len(&self) -> usize {
967 self.len()
968 }
969 #[inline]
970 fn serialize(&self, buffer: &mut [u8]) {
971 buffer.copy_from_slice(self);
972 }
973}
974impl<'a> InnerPacketBuilder for &'a mut [u8] {
975 #[inline]
976 fn bytes_len(&self) -> usize {
977 self.len()
978 }
979 #[inline]
980 fn serialize(&self, buffer: &mut [u8]) {
981 buffer.copy_from_slice(self);
982 }
983}
984impl<'a> InnerPacketBuilder for Vec<u8> {
985 #[inline]
986 fn bytes_len(&self) -> usize {
987 self.len()
988 }
989 #[inline]
990 fn serialize(&self, buffer: &mut [u8]) {
991 buffer.copy_from_slice(self.as_slice());
992 }
993}
994impl<const N: usize> InnerPacketBuilder for ArrayVec<u8, N> {
995 fn bytes_len(&self) -> usize {
996 self.as_slice().bytes_len()
997 }
998 fn serialize(&self, buffer: &mut [u8]) {
999 self.as_slice().serialize(buffer);
1000 }
1001}
1002
1003pub struct ByteSliceInnerPacketBuilder<B>(pub B);
1010
1011impl<B: SplitByteSlice> InnerPacketBuilder for ByteSliceInnerPacketBuilder<B> {
1012 fn bytes_len(&self) -> usize {
1013 self.0.deref().bytes_len()
1014 }
1015 fn serialize(&self, buffer: &mut [u8]) {
1016 self.0.deref().serialize(buffer)
1017 }
1018}
1019
1020impl<B: SplitByteSlice> Debug for ByteSliceInnerPacketBuilder<B> {
1021 fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
1022 write!(f, "ByteSliceInnerPacketBuilder({:?})", self.0.as_ref())
1023 }
1024}
1025
1026#[derive(Copy, Clone, Debug, Eq, PartialEq)]
1033pub enum SerializeError<A> {
1034 Alloc(A),
1036 SizeLimitExceeded,
1038}
1039
1040impl<A> SerializeError<A> {
1041 #[inline]
1043 pub fn is_alloc(&self) -> bool {
1044 match self {
1045 SerializeError::Alloc(_) => true,
1046 SerializeError::SizeLimitExceeded => false,
1047 }
1048 }
1049
1050 #[inline]
1052 pub fn is_size_limit_exceeded(&self) -> bool {
1053 match self {
1054 SerializeError::Alloc(_) => false,
1055 SerializeError::SizeLimitExceeded => true,
1056 }
1057 }
1058
1059 pub fn map_alloc<T, F: FnOnce(A) -> T>(self, f: F) -> SerializeError<T> {
1061 match self {
1062 SerializeError::Alloc(a) => SerializeError::Alloc(f(a)),
1063 SerializeError::SizeLimitExceeded => SerializeError::SizeLimitExceeded,
1064 }
1065 }
1066}
1067
1068impl<A> From<A> for SerializeError<A> {
1069 fn from(a: A) -> SerializeError<A> {
1070 SerializeError::Alloc(a)
1071 }
1072}
1073
1074#[derive(Copy, Clone, Debug, Eq, PartialEq)]
1083pub struct BufferTooShortError;
1084
1085pub trait BufferProvider<Input, Output> {
1102 type Error;
1106
1107 fn alloc_no_reuse(
1117 self,
1118 prefix: usize,
1119 body: usize,
1120 suffix: usize,
1121 ) -> Result<Output, Self::Error>;
1122
1123 fn reuse_or_realloc(
1136 self,
1137 buffer: Input,
1138 prefix: usize,
1139 suffix: usize,
1140 ) -> Result<Output, (Self::Error, Input)>;
1141}
1142
1143pub trait BufferAlloc<Output> {
1164 type Error;
1168
1169 fn alloc(self, len: usize) -> Result<Output, Self::Error>;
1171}
1172
1173impl<O, E, F: FnOnce(usize) -> Result<O, E>> BufferAlloc<O> for F {
1174 type Error = E;
1175
1176 #[inline]
1177 fn alloc(self, len: usize) -> Result<O, E> {
1178 self(len)
1179 }
1180}
1181
1182impl BufferAlloc<Never> for () {
1183 type Error = ();
1184
1185 #[inline]
1186 fn alloc(self, _len: usize) -> Result<Never, ()> {
1187 Err(())
1188 }
1189}
1190
1191pub fn new_buf_vec(len: usize) -> Result<Buf<Vec<u8>>, Never> {
1202 Ok(Buf::new(vec![0; len], ..))
1203}
1204
1205#[inline]
1227pub fn try_reuse_buffer<B: GrowBufferMut + ShrinkBuffer>(
1228 mut buffer: B,
1229 prefix: usize,
1230 suffix: usize,
1231 max_copy_bytes: usize,
1232) -> Result<B, B> {
1233 let need_prefix = prefix;
1234 let need_suffix = suffix;
1235 let have_prefix = buffer.prefix_len();
1236 let have_body = buffer.len();
1237 let have_suffix = buffer.suffix_len();
1238 let need_capacity = need_prefix + have_body + need_suffix;
1239
1240 if have_prefix >= need_prefix && have_suffix >= need_suffix {
1241 Ok(buffer)
1243 } else if buffer.capacity() >= need_capacity && have_body <= max_copy_bytes {
1244 buffer.reset();
1248
1249 buffer.copy_within(have_prefix..(have_prefix + have_body), need_prefix);
1255 buffer.shrink(need_prefix..(need_prefix + have_body));
1256 debug_assert_eq!(buffer.prefix_len(), need_prefix);
1257 debug_assert!(buffer.suffix_len() >= need_suffix);
1258 debug_assert_eq!(buffer.len(), have_body);
1259 Ok(buffer)
1260 } else {
1261 Err(buffer)
1262 }
1263}
1264
1265pub struct MaybeReuseBufferProvider<A>(pub A);
1269
1270impl<I: ReusableBuffer, O: ReusableBuffer, A: BufferAlloc<O>> BufferProvider<I, Either<I, O>>
1271 for MaybeReuseBufferProvider<A>
1272{
1273 type Error = A::Error;
1274
1275 fn alloc_no_reuse(
1276 self,
1277 prefix: usize,
1278 body: usize,
1279 suffix: usize,
1280 ) -> Result<Either<I, O>, Self::Error> {
1281 let Self(alloc) = self;
1282 let need_capacity = prefix + body + suffix;
1283 BufferAlloc::alloc(alloc, need_capacity).map(|mut buf| {
1284 buf.shrink(prefix..(prefix + body));
1285 Either::B(buf)
1286 })
1287 }
1288
1289 #[inline]
1298 fn reuse_or_realloc(
1299 self,
1300 buffer: I,
1301 need_prefix: usize,
1302 need_suffix: usize,
1303 ) -> Result<Either<I, O>, (A::Error, I)> {
1304 match try_reuse_buffer(buffer, need_prefix, need_suffix, usize::MAX) {
1309 Ok(buffer) => Ok(Either::A(buffer)),
1310 Err(buffer) => {
1311 let have_body = buffer.len();
1312 let mut buf = match BufferProvider::<I, Either<I, O>>::alloc_no_reuse(
1313 self,
1314 need_prefix,
1315 have_body,
1316 need_suffix,
1317 ) {
1318 Ok(buf) => buf,
1319 Err(err) => return Err((err, buffer)),
1320 };
1321
1322 buf.copy_from(&buffer);
1323 debug_assert_eq!(buf.prefix_len(), need_prefix);
1324 debug_assert!(buf.suffix_len() >= need_suffix);
1325 debug_assert_eq!(buf.len(), have_body);
1326 Ok(buf)
1327 }
1328 }
1329 }
1330}
1331
1332impl<B: ReusableBuffer, A: BufferAlloc<B>> BufferProvider<B, B> for MaybeReuseBufferProvider<A> {
1333 type Error = A::Error;
1334
1335 fn alloc_no_reuse(self, prefix: usize, body: usize, suffix: usize) -> Result<B, Self::Error> {
1336 BufferProvider::<B, Either<B, B>>::alloc_no_reuse(self, prefix, body, suffix)
1337 .map(Either::into_inner)
1338 }
1339
1340 #[inline]
1349 fn reuse_or_realloc(self, buffer: B, prefix: usize, suffix: usize) -> Result<B, (A::Error, B)> {
1350 BufferProvider::<B, Either<B, B>>::reuse_or_realloc(self, buffer, prefix, suffix)
1351 .map(Either::into_inner)
1352 }
1353}
1354
1355pub struct NoReuseBufferProvider<A>(pub A);
1359
1360impl<I: FragmentedBuffer, O: ReusableBuffer, A: BufferAlloc<O>> BufferProvider<I, O>
1361 for NoReuseBufferProvider<A>
1362{
1363 type Error = A::Error;
1364
1365 fn alloc_no_reuse(self, prefix: usize, body: usize, suffix: usize) -> Result<O, A::Error> {
1366 let Self(alloc) = self;
1367 alloc.alloc(prefix + body + suffix).map(|mut b| {
1368 b.shrink(prefix..prefix + body);
1369 b
1370 })
1371 }
1372
1373 fn reuse_or_realloc(self, buffer: I, prefix: usize, suffix: usize) -> Result<O, (A::Error, I)> {
1374 BufferProvider::<I, O>::alloc_no_reuse(self, prefix, buffer.len(), suffix)
1375 .map(|mut b| {
1376 b.copy_from(&buffer);
1377 b
1378 })
1379 .map_err(|e| (e, buffer))
1380 }
1381}
1382
1383pub trait Serializer: Sized {
1384 type Buffer;
1386
1387 fn serialize<B: GrowBufferMut, P: BufferProvider<Self::Buffer, B>>(
1400 self,
1401 outer: PacketConstraints,
1402 provider: P,
1403 ) -> Result<B, (SerializeError<P::Error>, Self)>;
1404
1405 fn serialize_new_buf<B: ReusableBuffer, A: BufferAlloc<B>>(
1412 &self,
1413 outer: PacketConstraints,
1414 alloc: A,
1415 ) -> Result<B, SerializeError<A::Error>>;
1416
1417 #[inline]
1433 #[allow(clippy::type_complexity)]
1434 fn serialize_vec(
1435 self,
1436 outer: PacketConstraints,
1437 ) -> Result<Either<Self::Buffer, Buf<Vec<u8>>>, (SerializeError<Never>, Self)>
1438 where
1439 Self::Buffer: ReusableBuffer,
1440 {
1441 self.serialize(outer, MaybeReuseBufferProvider(new_buf_vec))
1442 }
1443
1444 #[inline]
1458 fn serialize_no_alloc(
1459 self,
1460 outer: PacketConstraints,
1461 ) -> Result<Self::Buffer, (SerializeError<BufferTooShortError>, Self)>
1462 where
1463 Self::Buffer: ReusableBuffer,
1464 {
1465 self.serialize(outer, MaybeReuseBufferProvider(())).map(Either::into_a).map_err(
1466 |(err, slf)| {
1467 (
1468 match err {
1469 SerializeError::Alloc(()) => BufferTooShortError.into(),
1470 SerializeError::SizeLimitExceeded => SerializeError::SizeLimitExceeded,
1471 },
1472 slf,
1473 )
1474 },
1475 )
1476 }
1477
1478 #[inline]
1487 fn serialize_outer<B: GrowBufferMut, P: BufferProvider<Self::Buffer, B>>(
1488 self,
1489 provider: P,
1490 ) -> Result<B, (SerializeError<P::Error>, Self)> {
1491 self.serialize(PacketConstraints::UNCONSTRAINED, provider)
1492 }
1493
1494 #[inline]
1505 #[allow(clippy::type_complexity)]
1506 fn serialize_vec_outer(
1507 self,
1508 ) -> Result<Either<Self::Buffer, Buf<Vec<u8>>>, (SerializeError<Never>, Self)>
1509 where
1510 Self::Buffer: ReusableBuffer,
1511 {
1512 self.serialize_vec(PacketConstraints::UNCONSTRAINED)
1513 }
1514
1515 #[inline]
1525 fn serialize_no_alloc_outer(
1526 self,
1527 ) -> Result<Self::Buffer, (SerializeError<BufferTooShortError>, Self)>
1528 where
1529 Self::Buffer: ReusableBuffer,
1530 {
1531 self.serialize_no_alloc(PacketConstraints::UNCONSTRAINED)
1532 }
1533
1534 #[inline]
1541 fn wrap_in<B: PacketBuilder>(self, outer: B) -> Nested<Self, B> {
1542 outer.wrap_body(self)
1543 }
1544
1545 #[inline]
1554 fn with_size_limit(self, limit: usize) -> Nested<Self, LimitedSizePacketBuilder> {
1555 self.wrap_in(LimitedSizePacketBuilder { limit })
1556 }
1557}
1558
1559#[derive(Copy, Clone, Debug, Eq, PartialEq)]
1566pub struct InnerSerializer<I, B> {
1567 inner: I,
1568 buffer: B,
1573}
1574
1575impl<I, B> InnerSerializer<I, B> {
1576 pub fn inner(&self) -> &I {
1577 &self.inner
1578 }
1579}
1580
1581struct InnerPacketBuilderWrapper<I>(I);
1587
1588impl<I: InnerPacketBuilder> PacketBuilder for InnerPacketBuilderWrapper<I> {
1589 fn constraints(&self) -> PacketConstraints {
1590 let Self(wrapped) = self;
1591 PacketConstraints::new(wrapped.bytes_len(), 0, 0, usize::MAX)
1592 }
1593
1594 fn serialize(&self, target: &mut SerializeTarget<'_>, _body: FragmentedBytesMut<'_, '_>) {
1595 let Self(wrapped) = self;
1596
1597 debug_assert_eq!(target.header.len(), wrapped.bytes_len());
1601 debug_assert_eq!(target.footer.len(), 0);
1602
1603 InnerPacketBuilder::serialize(wrapped, target.header);
1604 }
1605}
1606
1607impl<I: InnerPacketBuilder, B: GrowBuffer + ShrinkBuffer> Serializer for InnerSerializer<I, B> {
1608 type Buffer = B;
1609
1610 #[inline]
1611 fn serialize<BB: GrowBufferMut, P: BufferProvider<B, BB>>(
1612 self,
1613 outer: PacketConstraints,
1614 provider: P,
1615 ) -> Result<BB, (SerializeError<P::Error>, InnerSerializer<I, B>)> {
1616 debug_assert_eq!(self.buffer.len(), 0);
1617 InnerPacketBuilderWrapper(self.inner)
1618 .wrap_body(self.buffer)
1619 .serialize(outer, provider)
1620 .map_err(|(err, Nested { inner: buffer, outer: pb })| {
1621 (err, InnerSerializer { inner: pb.0, buffer })
1622 })
1623 }
1624
1625 #[inline]
1626 fn serialize_new_buf<BB: ReusableBuffer, A: BufferAlloc<BB>>(
1627 &self,
1628 outer: PacketConstraints,
1629 alloc: A,
1630 ) -> Result<BB, SerializeError<A::Error>> {
1631 InnerPacketBuilderWrapper(&self.inner).wrap_body(EmptyBuf).serialize_new_buf(outer, alloc)
1632 }
1633}
1634
1635impl<B: GrowBuffer + ShrinkBuffer> Serializer for B {
1636 type Buffer = B;
1637
1638 #[inline]
1639 fn serialize<BB: GrowBufferMut, P: BufferProvider<Self::Buffer, BB>>(
1640 self,
1641 outer: PacketConstraints,
1642 provider: P,
1643 ) -> Result<BB, (SerializeError<P::Error>, Self)> {
1644 TruncatingSerializer::new(self, TruncateDirection::NoTruncating)
1645 .serialize(outer, provider)
1646 .map_err(|(err, ser)| (err, ser.buffer))
1647 }
1648
1649 fn serialize_new_buf<BB: ReusableBuffer, A: BufferAlloc<BB>>(
1650 &self,
1651 outer: PacketConstraints,
1652 alloc: A,
1653 ) -> Result<BB, SerializeError<A::Error>> {
1654 if self.len() > outer.max_body_len() {
1655 return Err(SerializeError::SizeLimitExceeded);
1656 }
1657
1658 let padding = outer.min_body_len().saturating_sub(self.len());
1659 let tail_size = padding + outer.footer_len();
1660 let buffer_size = outer.header_len() + self.len() + tail_size;
1661 let mut buffer = alloc.alloc(buffer_size)?;
1662 buffer.shrink_front(outer.header_len());
1663 buffer.shrink_back(tail_size);
1664 buffer.copy_from(self);
1665 buffer.grow_back(padding);
1666 Ok(buffer)
1667 }
1668}
1669
1670pub enum EitherSerializer<A, B> {
1674 A(A),
1675 B(B),
1676}
1677
1678impl<A: Serializer, B: Serializer<Buffer = A::Buffer>> Serializer for EitherSerializer<A, B> {
1679 type Buffer = A::Buffer;
1680
1681 fn serialize<TB: GrowBufferMut, P: BufferProvider<Self::Buffer, TB>>(
1682 self,
1683 outer: PacketConstraints,
1684 provider: P,
1685 ) -> Result<TB, (SerializeError<P::Error>, Self)> {
1686 match self {
1687 EitherSerializer::A(s) => {
1688 s.serialize(outer, provider).map_err(|(err, s)| (err, EitherSerializer::A(s)))
1689 }
1690 EitherSerializer::B(s) => {
1691 s.serialize(outer, provider).map_err(|(err, s)| (err, EitherSerializer::B(s)))
1692 }
1693 }
1694 }
1695
1696 fn serialize_new_buf<TB: ReusableBuffer, BA: BufferAlloc<TB>>(
1697 &self,
1698 outer: PacketConstraints,
1699 alloc: BA,
1700 ) -> Result<TB, SerializeError<BA::Error>> {
1701 match self {
1702 EitherSerializer::A(s) => s.serialize_new_buf(outer, alloc),
1703 EitherSerializer::B(s) => s.serialize_new_buf(outer, alloc),
1704 }
1705 }
1706}
1707
1708#[derive(Copy, Clone, Debug, Eq, PartialEq)]
1711pub enum TruncateDirection {
1712 DiscardFront,
1715 DiscardBack,
1718 NoTruncating,
1720}
1721
1722#[derive(Copy, Clone, Debug, Eq, PartialEq)]
1734pub struct TruncatingSerializer<B> {
1735 buffer: B,
1736 direction: TruncateDirection,
1737}
1738
1739impl<B> TruncatingSerializer<B> {
1740 pub fn new(buffer: B, direction: TruncateDirection) -> TruncatingSerializer<B> {
1742 TruncatingSerializer { buffer, direction }
1743 }
1744
1745 pub fn buffer(&self) -> &B {
1747 &self.buffer
1748 }
1749
1750 pub fn buffer_mut(&mut self) -> &mut B {
1752 &mut self.buffer
1753 }
1754}
1755
1756impl<B: GrowBuffer + ShrinkBuffer> Serializer for TruncatingSerializer<B> {
1757 type Buffer = B;
1758
1759 fn serialize<BB: GrowBufferMut, P: BufferProvider<B, BB>>(
1760 mut self,
1761 outer: PacketConstraints,
1762 provider: P,
1763 ) -> Result<BB, (SerializeError<P::Error>, Self)> {
1764 let original_len = self.buffer.len();
1765 let excess_bytes = if original_len > outer.max_body_len {
1766 Some(original_len - outer.max_body_len)
1767 } else {
1768 None
1769 };
1770 if let Some(excess_bytes) = excess_bytes {
1771 match self.direction {
1772 TruncateDirection::DiscardFront => self.buffer.shrink_front(excess_bytes),
1773 TruncateDirection::DiscardBack => self.buffer.shrink_back(excess_bytes),
1774 TruncateDirection::NoTruncating => {
1775 return Err((SerializeError::SizeLimitExceeded, self))
1776 }
1777 }
1778 }
1779
1780 let padding = outer.min_body_len().saturating_sub(self.buffer.len());
1781
1782 debug_assert!(self.buffer.len() + padding <= outer.max_body_len());
1786 match provider.reuse_or_realloc(
1787 self.buffer,
1788 outer.header_len(),
1789 padding + outer.footer_len(),
1790 ) {
1791 Ok(buffer) => Ok(buffer),
1792 Err((err, mut buffer)) => {
1793 if let Some(excess_bytes) = excess_bytes {
1797 match self.direction {
1798 TruncateDirection::DiscardFront => buffer.grow_front(excess_bytes),
1799 TruncateDirection::DiscardBack => buffer.grow_back(excess_bytes),
1800 TruncateDirection::NoTruncating => unreachable!(),
1801 }
1802 }
1803
1804 Err((
1805 SerializeError::Alloc(err),
1806 TruncatingSerializer { buffer, direction: self.direction },
1807 ))
1808 }
1809 }
1810 }
1811
1812 fn serialize_new_buf<BB: ReusableBuffer, A: BufferAlloc<BB>>(
1813 &self,
1814 outer: PacketConstraints,
1815 alloc: A,
1816 ) -> Result<BB, SerializeError<A::Error>> {
1817 let truncated_size = cmp::min(self.buffer.len(), outer.max_body_len());
1818 let discarded_bytes = self.buffer.len() - truncated_size;
1819 let padding = outer.min_body_len().saturating_sub(truncated_size);
1820 let tail_size = padding + outer.footer_len();
1821 let buffer_size = outer.header_len() + truncated_size + tail_size;
1822 let mut buffer = alloc.alloc(buffer_size)?;
1823 buffer.shrink_front(outer.header_len());
1824 buffer.shrink_back(tail_size);
1825 buffer.with_bytes_mut(|mut dst| {
1826 self.buffer.with_bytes(|src| {
1827 let src = match (discarded_bytes > 0, self.direction) {
1828 (false, _) => src,
1829 (true, TruncateDirection::DiscardFront) => src.slice(discarded_bytes..),
1830 (true, TruncateDirection::DiscardBack) => src.slice(..truncated_size),
1831 (true, TruncateDirection::NoTruncating) => {
1832 return Err(SerializeError::SizeLimitExceeded)
1833 }
1834 };
1835 dst.copy_from(&src);
1836 Ok(())
1837 })
1838 })?;
1839 buffer.grow_back_zero(padding);
1840 Ok(buffer)
1841 }
1842}
1843
1844impl<I: Serializer, O: PacketBuilder> Serializer for Nested<I, O> {
1845 type Buffer = I::Buffer;
1846
1847 #[inline]
1848 fn serialize<B: GrowBufferMut, P: BufferProvider<I::Buffer, B>>(
1849 self,
1850 outer: PacketConstraints,
1851 provider: P,
1852 ) -> Result<B, (SerializeError<P::Error>, Self)> {
1853 let Some(outer) = self.outer.constraints().try_encapsulate(&outer) else {
1854 return Err((SerializeError::SizeLimitExceeded, self));
1855 };
1856
1857 match self.inner.serialize(outer, provider) {
1858 Ok(mut buf) => {
1859 buf.serialize(&self.outer);
1860 Ok(buf)
1861 }
1862 Err((err, inner)) => Err((err, self.outer.wrap_body(inner))),
1863 }
1864 }
1865
1866 #[inline]
1867 fn serialize_new_buf<B: ReusableBuffer, A: BufferAlloc<B>>(
1868 &self,
1869 outer: PacketConstraints,
1870 alloc: A,
1871 ) -> Result<B, SerializeError<A::Error>> {
1872 let Some(outer) = self.outer.constraints().try_encapsulate(&outer) else {
1873 return Err(SerializeError::SizeLimitExceeded);
1874 };
1875
1876 let mut buf = self.inner.serialize_new_buf(outer, alloc)?;
1877 GrowBufferMut::serialize(&mut buf, &self.outer);
1878 Ok(buf)
1879 }
1880}
1881
1882pub trait PartialPacketBuilder: PacketBuilder {
1884 fn partial_serialize(&self, body_len: usize, buffer: &mut [u8]);
1893}
1894
1895impl PartialPacketBuilder for () {
1896 fn partial_serialize(&self, _body_len: usize, _buffer: &mut [u8]) {}
1897}
1898
1899#[derive(Debug, Eq, PartialEq)]
1901pub struct PartialSerializeResult {
1902 pub bytes_written: usize,
1904
1905 pub total_size: usize,
1907}
1908
1909pub trait PartialSerializer: Sized {
1914 fn partial_serialize(
1924 &self,
1925 outer: PacketConstraints,
1926 buffer: &mut [u8],
1927 ) -> Result<PartialSerializeResult, SerializeError<Never>>;
1928}
1929
1930impl<B: GrowBuffer + ShrinkBuffer> PartialSerializer for B {
1931 fn partial_serialize(
1932 &self,
1933 _outer: PacketConstraints,
1934 _buffer: &mut [u8],
1935 ) -> Result<PartialSerializeResult, SerializeError<Never>> {
1936 Ok(PartialSerializeResult { bytes_written: 0, total_size: self.len() })
1937 }
1938}
1939
1940impl<B: GrowBuffer + ShrinkBuffer> PartialSerializer for TruncatingSerializer<B> {
1941 fn partial_serialize(
1942 &self,
1943 outer: PacketConstraints,
1944 _buffer: &mut [u8],
1945 ) -> Result<PartialSerializeResult, SerializeError<Never>> {
1946 let total_size =
1947 cmp::max(outer.min_body_len(), cmp::min(self.buffer().len(), outer.max_body_len()));
1948 Ok(PartialSerializeResult { bytes_written: 0, total_size })
1949 }
1950}
1951
1952impl<I: InnerPacketBuilder, B: GrowBuffer + ShrinkBuffer> PartialSerializer
1953 for InnerSerializer<I, B>
1954{
1955 fn partial_serialize(
1956 &self,
1957 outer: PacketConstraints,
1958 _buffer: &mut [u8],
1959 ) -> Result<PartialSerializeResult, SerializeError<Never>> {
1960 Ok(PartialSerializeResult {
1961 bytes_written: 0,
1962 total_size: cmp::max(self.inner().bytes_len(), outer.min_body_len()),
1963 })
1964 }
1965}
1966
1967impl<A: Serializer + PartialSerializer, B: Serializer + PartialSerializer> PartialSerializer
1968 for EitherSerializer<A, B>
1969{
1970 fn partial_serialize(
1971 &self,
1972 outer: PacketConstraints,
1973 buffer: &mut [u8],
1974 ) -> Result<PartialSerializeResult, SerializeError<Never>> {
1975 match self {
1976 EitherSerializer::A(s) => s.partial_serialize(outer, buffer),
1977 EitherSerializer::B(s) => s.partial_serialize(outer, buffer),
1978 }
1979 }
1980}
1981
1982impl<I: PartialSerializer, O: PartialPacketBuilder> PartialSerializer for Nested<I, O> {
1983 fn partial_serialize(
1984 &self,
1985 outer: PacketConstraints,
1986 buffer: &mut [u8],
1987 ) -> Result<PartialSerializeResult, SerializeError<Never>> {
1988 let header_constraints = self.outer.constraints();
1989 let Some(constraints) = outer.try_encapsulate(&header_constraints) else {
1990 return Err(SerializeError::SizeLimitExceeded);
1991 };
1992
1993 let header_len = header_constraints.header_len();
1994 let inner_buf = buffer.get_mut(header_len..).unwrap_or(&mut []);
1995 let mut result = self.inner.partial_serialize(constraints, inner_buf)?;
1996 if header_len <= buffer.len() {
1997 self.outer.partial_serialize(result.total_size, &mut buffer[..header_len]);
1998 result.bytes_written += header_len;
1999 }
2000 result.total_size += header_len + header_constraints.footer_len();
2001 Ok(result)
2002 }
2003}
2004
2005#[cfg(test)]
2006mod tests {
2007 use super::*;
2008 use crate::BufferMut;
2009 use std::fmt::Debug;
2010 use test_case::test_case;
2011 use test_util::{assert_geq, assert_leq};
2012
2013 #[derive(Copy, Clone, Debug, Eq, PartialEq)]
2019 struct DummyPacketBuilder {
2020 header_len: usize,
2021 footer_len: usize,
2022 min_body_len: usize,
2023 max_body_len: usize,
2024 }
2025
2026 impl DummyPacketBuilder {
2027 fn new(
2028 header_len: usize,
2029 footer_len: usize,
2030 min_body_len: usize,
2031 max_body_len: usize,
2032 ) -> DummyPacketBuilder {
2033 DummyPacketBuilder { header_len, footer_len, min_body_len, max_body_len }
2034 }
2035 }
2036
2037 fn fill(bytes: &mut [u8], byte: u8) {
2038 for b in bytes {
2039 *b = byte;
2040 }
2041 }
2042
2043 impl PacketBuilder for DummyPacketBuilder {
2044 fn constraints(&self) -> PacketConstraints {
2045 PacketConstraints::new(
2046 self.header_len,
2047 self.footer_len,
2048 self.min_body_len,
2049 self.max_body_len,
2050 )
2051 }
2052
2053 fn serialize(&self, target: &mut SerializeTarget<'_>, body: FragmentedBytesMut<'_, '_>) {
2054 assert_eq!(target.header.len(), self.header_len);
2055 assert_eq!(target.footer.len(), self.footer_len);
2056 assert!(body.len() >= self.min_body_len);
2057 assert!(body.len() <= self.max_body_len);
2058 fill(target.header, 0xFF);
2059 fill(target.footer, 0xFE);
2060 }
2061 }
2062
2063 impl InnerPacketBuilder for DummyPacketBuilder {
2064 fn bytes_len(&self) -> usize {
2065 self.header_len
2066 }
2067
2068 fn serialize(&self, buffer: &mut [u8]) {
2069 assert_eq!(buffer.len(), self.header_len);
2070 fill(buffer, 0xFF);
2071 }
2072 }
2073
2074 #[derive(Copy, Clone, Debug, Eq, PartialEq)]
2076 struct SerializerVerifier {
2077 inner_len: Option<usize>,
2080
2081 truncating: bool,
2084 }
2085
2086 impl SerializerVerifier {
2087 fn new<S: Serializer>(serializer: &S, truncating: bool) -> Self {
2088 let inner_len = serializer
2089 .serialize_new_buf(PacketConstraints::UNCONSTRAINED, new_buf_vec)
2090 .map(|buf| buf.len())
2091 .inspect_err(|err| assert!(err.is_size_limit_exceeded()))
2092 .ok();
2093 Self { inner_len, truncating }
2094 }
2095
2096 fn verify_result<B: GrowBufferMut, A>(
2097 &self,
2098 result: Result<&B, &SerializeError<A>>,
2099 outer: PacketConstraints,
2100 ) {
2101 let should_exceed_size_limit = match self.inner_len {
2102 Some(inner_len) => outer.max_body_len() < inner_len && !self.truncating,
2103 None => true,
2104 };
2105
2106 match result {
2107 Ok(buf) => {
2108 assert_geq!(buf.prefix_len(), outer.header_len());
2109 assert_geq!(buf.suffix_len(), outer.footer_len());
2110 assert_leq!(buf.len(), outer.max_body_len());
2111
2112 let padding = outer.min_body_len().saturating_sub(buf.len());
2117 assert_leq!(padding + outer.footer_len(), buf.suffix_len());
2118
2119 assert!(!should_exceed_size_limit);
2120 }
2121 Err(err) => {
2122 if should_exceed_size_limit {
2125 assert!(err.is_size_limit_exceeded());
2126 } else {
2127 assert!(err.is_alloc());
2128 }
2129 }
2130 }
2131 }
2132 }
2133
2134 #[derive(Copy, Clone, Debug, Eq, PartialEq)]
2143 struct VerifyingSerializer<S> {
2144 ser: S,
2145 verifier: SerializerVerifier,
2146 }
2147
2148 impl<S: Serializer + Debug + Clone + Eq> Serializer for VerifyingSerializer<S>
2149 where
2150 S::Buffer: ReusableBuffer,
2151 {
2152 type Buffer = S::Buffer;
2153
2154 fn serialize<B: GrowBufferMut, P: BufferProvider<Self::Buffer, B>>(
2155 self,
2156 outer: PacketConstraints,
2157 provider: P,
2158 ) -> Result<B, (SerializeError<P::Error>, Self)> {
2159 let Self { ser, verifier } = self;
2160 let orig = ser.clone();
2161
2162 let result = ser.serialize(outer, provider).map_err(|(err, ser)| {
2163 assert_eq!(ser, orig);
2166 (err, Self { ser, verifier })
2167 });
2168
2169 verifier.verify_result(result.as_ref().map_err(|(err, _ser)| err), outer);
2170
2171 result
2172 }
2173
2174 fn serialize_new_buf<B: ReusableBuffer, A: BufferAlloc<B>>(
2175 &self,
2176 outer: PacketConstraints,
2177 alloc: A,
2178 ) -> Result<B, SerializeError<A::Error>> {
2179 let res = self.ser.serialize_new_buf(outer, alloc);
2180 self.verifier.verify_result(res.as_ref(), outer);
2181 res
2182 }
2183 }
2184
2185 trait SerializerExt: Serializer {
2186 fn into_verifying(self, truncating: bool) -> VerifyingSerializer<Self>
2187 where
2188 Self::Buffer: ReusableBuffer,
2189 {
2190 let verifier = SerializerVerifier::new(&self, truncating);
2191 VerifyingSerializer { ser: self, verifier }
2192 }
2193
2194 fn wrap_in_verifying<B: PacketBuilder>(
2195 self,
2196 outer: B,
2197 truncating: bool,
2198 ) -> VerifyingSerializer<Nested<Self, B>>
2199 where
2200 Self::Buffer: ReusableBuffer,
2201 {
2202 self.wrap_in(outer).into_verifying(truncating)
2203 }
2204
2205 fn with_size_limit_verifying(
2206 self,
2207 limit: usize,
2208 truncating: bool,
2209 ) -> VerifyingSerializer<Nested<Self, LimitedSizePacketBuilder>>
2210 where
2211 Self::Buffer: ReusableBuffer,
2212 {
2213 self.with_size_limit(limit).into_verifying(truncating)
2214 }
2215 }
2216
2217 impl<S: Serializer> SerializerExt for S {}
2218
2219 #[test]
2220 fn test_either_into_inner() {
2221 fn ret_either(a: u32, b: u32, c: bool) -> Either<u32, u32> {
2222 if c {
2223 Either::A(a)
2224 } else {
2225 Either::B(b)
2226 }
2227 }
2228
2229 assert_eq!(ret_either(1, 2, true).into_inner(), 1);
2230 assert_eq!(ret_either(1, 2, false).into_inner(), 2);
2231 }
2232
2233 #[test]
2234 fn test_either_unwrap_success() {
2235 assert_eq!(Either::<u16, u32>::A(5).unwrap_a(), 5);
2236 assert_eq!(Either::<u16, u32>::B(10).unwrap_b(), 10);
2237 }
2238
2239 #[test]
2240 #[should_panic]
2241 fn test_either_unwrap_a_panic() {
2242 let _: u16 = Either::<u16, u32>::B(10).unwrap_a();
2243 }
2244
2245 #[test]
2246 #[should_panic]
2247 fn test_either_unwrap_b_panic() {
2248 let _: u32 = Either::<u16, u32>::A(5).unwrap_b();
2249 }
2250
2251 #[test_case(Buf::new((0..100).collect(), ..); "entire buf")]
2252 #[test_case(Buf::new((0..100).collect(), 0..0); "empty range")]
2253 #[test_case(Buf::new((0..100).collect(), ..50); "prefix")]
2254 #[test_case(Buf::new((0..100).collect(), 50..); "suffix")]
2255 #[test_case(Buf::new((0..100).collect(), 25..75); "middle")]
2256 fn test_buf_into_inner(buf: Buf<Vec<u8>>) {
2257 assert_eq!(buf.clone().as_ref(), buf.into_inner());
2258 }
2259
2260 #[test]
2261 fn test_packet_constraints() {
2262 use PacketConstraints as PC;
2263
2264 assert!(PC::try_new(0, 0, 0, 0).is_some());
2268 assert!(PC::try_new(usize::MAX / 2, usize::MAX / 2, 0, 0).is_some());
2270 assert_eq!(PC::try_new(usize::MAX, 1, 0, 0), None);
2272 assert_eq!(PC::try_new(0, 0, 1, 0), None);
2274
2275 let pc = PC::new(10, 10, 0, usize::MAX);
2279 assert_eq!(pc.try_encapsulate(&pc).unwrap(), PC::new(20, 20, 0, usize::MAX - 20));
2280
2281 let pc = PC::new(10, 10, 0, usize::MAX);
2282 assert_eq!(pc.try_encapsulate(&pc).unwrap(), PC::new(20, 20, 0, usize::MAX - 20));
2283
2284 let inner = PC::new(10, 10, 0, usize::MAX);
2295 let outer = PC::new(0, 0, 10, usize::MAX);
2296 assert_eq!(inner.try_encapsulate(&outer).unwrap(), PC::new(10, 10, 0, usize::MAX - 20));
2297
2298 let inner = PC::new(usize::MAX, 0, 0, usize::MAX);
2302 let outer = PC::new(1, 0, 0, usize::MAX);
2303 assert_eq!(inner.try_encapsulate(&outer), None);
2304
2305 let inner = PC::new(0, usize::MAX, 0, usize::MAX);
2309 let outer = PC::new(0, 1, 0, usize::MAX);
2310 assert_eq!(inner.try_encapsulate(&outer), None);
2311
2312 let one_fifth_max = (usize::MAX / 5) + 1;
2319 let inner = PC::new(one_fifth_max, one_fifth_max, one_fifth_max, usize::MAX);
2320 let outer = PC::new(one_fifth_max, one_fifth_max, 0, usize::MAX);
2321 assert_eq!(inner.try_encapsulate(&outer), None);
2322
2323 let inner = PC::new(10, 10, 0, usize::MAX);
2328 let outer = PC::new(0, 0, 0, 10);
2329 assert_eq!(inner.try_encapsulate(&outer), None);
2330
2331 let inner = PC::new(0, 0, 10, usize::MAX);
2337 let outer = PC::new(0, 0, 0, 5);
2338 assert_eq!(inner.try_encapsulate(&outer), None);
2339 }
2340
2341 #[test]
2342 fn test_inner_serializer() {
2343 const INNER: &[u8] = &[0, 1, 2, 3, 4, 5, 6, 7, 8, 9];
2344
2345 fn concat<'a, I: IntoIterator<Item = &'a &'a [u8]>>(slices: I) -> Vec<u8> {
2346 let mut v = Vec::new();
2347 for slc in slices.into_iter() {
2348 v.extend_from_slice(slc);
2349 }
2350 v
2351 }
2352
2353 let buf = INNER.into_serializer().serialize_vec_outer().unwrap();
2355 assert_eq!(buf.as_ref(), INNER);
2356
2357 let buf = INNER
2360 .into_serializer()
2361 .into_verifying(false)
2362 .wrap_in(DummyPacketBuilder::new(0, 0, 20, usize::MAX))
2363 .serialize_vec_outer()
2364 .unwrap();
2365 assert_eq!(buf.as_ref(), concat(&[INNER, vec![0; 10].as_ref()]).as_slice());
2366
2367 let buf = INNER
2371 .into_serializer()
2372 .into_verifying(false)
2373 .wrap_in(DummyPacketBuilder::new(10, 10, 0, usize::MAX))
2374 .serialize_vec_outer()
2375 .unwrap();
2376 assert_eq!(
2377 buf.as_ref(),
2378 concat(&[vec![0xFF; 10].as_ref(), INNER, vec![0xFE; 10].as_ref()]).as_slice()
2379 );
2380
2381 assert_eq!(
2383 INNER
2384 .into_serializer()
2385 .into_verifying(false)
2386 .wrap_in(DummyPacketBuilder::new(0, 0, 0, 9))
2387 .serialize_vec_outer()
2388 .unwrap_err()
2389 .0,
2390 SerializeError::SizeLimitExceeded
2391 );
2392
2393 assert_eq!(
2397 INNER
2398 .into_serializer_with(Buf::new(vec![0xFF], ..))
2399 .into_verifying(false)
2400 .serialize_vec_outer()
2401 .unwrap()
2402 .as_ref(),
2403 INNER
2404 );
2405 }
2406
2407 #[test]
2408 fn test_buffer_serializer_and_inner_serializer() {
2409 fn verify_buffer_serializer<B: BufferMut + Debug>(
2410 buffer: B,
2411 header_len: usize,
2412 footer_len: usize,
2413 min_body_len: usize,
2414 ) {
2415 let old_body = buffer.to_flattened_vec();
2416 let serializer =
2417 DummyPacketBuilder::new(header_len, footer_len, min_body_len, usize::MAX)
2418 .wrap_body(buffer);
2419
2420 let buffer0 = serializer
2421 .serialize_new_buf(PacketConstraints::UNCONSTRAINED, new_buf_vec)
2422 .unwrap();
2423 verify(buffer0, &old_body, header_len, footer_len, min_body_len);
2424
2425 let buffer = serializer.serialize_vec_outer().unwrap();
2426 verify(buffer, &old_body, header_len, footer_len, min_body_len);
2427 }
2428
2429 fn verify_inner_packet_builder_serializer(
2430 body: &[u8],
2431 header_len: usize,
2432 footer_len: usize,
2433 min_body_len: usize,
2434 ) {
2435 let buffer = DummyPacketBuilder::new(header_len, footer_len, min_body_len, usize::MAX)
2436 .wrap_body(body.into_serializer())
2437 .serialize_vec_outer()
2438 .unwrap();
2439 verify(buffer, body, header_len, footer_len, min_body_len);
2440 }
2441
2442 fn verify<B: Buffer>(
2443 buffer: B,
2444 body: &[u8],
2445 header_len: usize,
2446 footer_len: usize,
2447 min_body_len: usize,
2448 ) {
2449 let flat = buffer.to_flattened_vec();
2450 let header_bytes = &flat[..header_len];
2451 let body_bytes = &flat[header_len..header_len + body.len()];
2452 let padding_len = min_body_len.saturating_sub(body.len());
2453 let padding_bytes =
2454 &flat[header_len + body.len()..header_len + body.len() + padding_len];
2455 let total_body_len = body.len() + padding_len;
2456 let footer_bytes = &flat[header_len + total_body_len..];
2457 assert_eq!(
2458 buffer.len() - total_body_len,
2459 header_len + footer_len,
2460 "buffer.len()({}) - total_body_len({}) != header_len({}) + footer_len({})",
2461 buffer.len(),
2462 header_len,
2463 footer_len,
2464 min_body_len,
2465 );
2466
2467 assert!(
2469 header_bytes.iter().all(|b| *b == 0xFF),
2470 "header_bytes {:?} are not filled with 0xFF's",
2471 header_bytes,
2472 );
2473 assert_eq!(body_bytes, body);
2474 assert!(
2476 padding_bytes.iter().all(|b| *b == 0),
2477 "padding_bytes {:?} are not filled with 0s",
2478 padding_bytes,
2479 );
2480 assert!(
2482 footer_bytes.iter().all(|b| *b == 0xFE),
2483 "footer_bytes {:?} are not filled with 0xFE's",
2484 footer_bytes,
2485 );
2486 }
2487
2488 for buf_len in 0..8 {
2491 for range_start in 0..buf_len {
2492 for range_end in range_start..buf_len {
2493 for prefix in 0..8 {
2494 for suffix in 0..8 {
2495 for min_body in 0..8 {
2496 let mut vec = vec![0; buf_len];
2497 #[allow(clippy::needless_range_loop)]
2502 for i in 0..vec.len() {
2503 vec[i] = i as u8;
2504 }
2505 verify_buffer_serializer(
2506 Buf::new(vec.as_mut_slice(), range_start..range_end),
2507 prefix,
2508 suffix,
2509 min_body,
2510 );
2511 if range_start == 0 {
2512 verify_inner_packet_builder_serializer(
2521 &vec.as_slice()[range_start..range_end],
2522 prefix,
2523 suffix,
2524 min_body,
2525 );
2526 }
2527 }
2528 }
2529 }
2530 }
2531 }
2532 }
2533 }
2534
2535 #[test]
2536 fn test_min_body_len() {
2537 let body = &[1, 2];
2542
2543 let inner = DummyPacketBuilder::new(2, 2, 0, usize::MAX);
2546 let outer = DummyPacketBuilder::new(2, 2, 8, usize::MAX);
2548 let buf = body
2549 .into_serializer()
2550 .into_verifying(false)
2551 .wrap_in_verifying(inner, false)
2552 .wrap_in_verifying(outer, false)
2553 .serialize_vec_outer()
2554 .unwrap();
2555 assert_eq!(buf.prefix_len(), 0);
2556 assert_eq!(buf.suffix_len(), 0);
2557 assert_eq!(
2558 buf.as_ref(),
2559 &[
2560 0xFF, 0xFF, 0xFF, 0xFF, 1, 2, 0xFE, 0xFE, 0, 0, 0xFE, 0xFE ]
2567 );
2568 }
2569
2570 #[test]
2571 fn test_size_limit() {
2572 fn test<S: Serializer + Clone + Debug + Eq>(ser: S)
2574 where
2575 S::Buffer: ReusableBuffer,
2576 {
2577 let pb = DummyPacketBuilder::new(1, 1, 0, usize::MAX);
2583
2584 assert!(ser
2589 .clone()
2590 .wrap_in_verifying(pb, false)
2591 .with_size_limit_verifying(3, false)
2592 .serialize_vec_outer()
2593 .is_ok());
2594 assert!(ser
2596 .clone()
2597 .wrap_in_verifying(pb, false)
2598 .with_size_limit_verifying(4, false)
2599 .serialize_vec_outer()
2600 .is_ok());
2601 assert!(ser
2605 .clone()
2606 .with_size_limit_verifying(1, false)
2607 .wrap_in_verifying(pb, false)
2608 .with_size_limit_verifying(3, false)
2609 .serialize_vec_outer()
2610 .is_ok());
2611 assert!(ser
2614 .clone()
2615 .with_size_limit_verifying(0, false)
2616 .wrap_in_verifying(pb, false)
2617 .serialize_vec_outer()
2618 .is_err());
2619 assert!(ser
2625 .clone()
2626 .wrap_in_verifying(pb, false)
2627 .with_size_limit_verifying(1, false)
2628 .serialize_vec_outer()
2629 .is_err());
2630 }
2631
2632 test(DummyPacketBuilder::new(1, 0, 0, usize::MAX).into_serializer().into_verifying(false));
2634 test(Buf::new(vec![0], ..).into_verifying(false));
2635 }
2636
2637 #[test]
2638 fn test_truncating_serializer() {
2639 fn verify_result<S: Serializer + Debug>(ser: S, expected: &[u8])
2640 where
2641 S::Buffer: ReusableBuffer + AsRef<[u8]>,
2642 {
2643 let buf = ser.serialize_new_buf(PacketConstraints::UNCONSTRAINED, new_buf_vec).unwrap();
2644 assert_eq!(buf.as_ref(), &expected[..]);
2645 let buf = ser.serialize_vec_outer().unwrap();
2646 assert_eq!(buf.as_ref(), &expected[..]);
2647 }
2648
2649 let body = vec![0, 1, 2, 3, 4, 5, 6, 7, 8, 9];
2651 let ser =
2652 TruncatingSerializer::new(Buf::new(body.clone(), ..), TruncateDirection::DiscardFront)
2653 .into_verifying(true)
2654 .with_size_limit_verifying(4, true);
2655 verify_result(ser, &[6, 7, 8, 9]);
2656
2657 let ser =
2659 TruncatingSerializer::new(Buf::new(body.clone(), ..), TruncateDirection::DiscardBack)
2660 .into_verifying(true)
2661 .with_size_limit_verifying(7, true);
2662 verify_result(ser, &[0, 1, 2, 3, 4, 5, 6]);
2663
2664 let ser =
2666 TruncatingSerializer::new(Buf::new(body.clone(), ..), TruncateDirection::NoTruncating)
2667 .into_verifying(false)
2668 .with_size_limit_verifying(5, true);
2669 assert!(ser.clone().serialize_vec_outer().is_err());
2670 assert!(ser.serialize_new_buf(PacketConstraints::UNCONSTRAINED, new_buf_vec).is_err());
2671 assert!(ser.serialize_vec_outer().is_err());
2672
2673 fn test_serialization_failure<S: Serializer + Clone + Eq + Debug>(
2677 ser: S,
2678 err: SerializeError<BufferTooShortError>,
2679 ) where
2680 S::Buffer: ReusableBuffer + Debug,
2681 {
2682 let (e, new_ser) = DummyPacketBuilder::new(2, 2, 0, 1)
2691 .wrap_body(ser.clone())
2692 .serialize_no_alloc_outer()
2693 .unwrap_err();
2694 assert_eq!(err, e);
2695 assert_eq!(new_ser.into_inner(), ser);
2696 }
2697
2698 let body = Buf::new(vec![1, 2], ..);
2699 test_serialization_failure(
2700 TruncatingSerializer::new(body.clone(), TruncateDirection::DiscardFront)
2701 .into_verifying(true),
2702 SerializeError::Alloc(BufferTooShortError),
2703 );
2704 test_serialization_failure(
2705 TruncatingSerializer::new(body.clone(), TruncateDirection::DiscardFront)
2706 .into_verifying(true),
2707 SerializeError::Alloc(BufferTooShortError),
2708 );
2709 test_serialization_failure(
2710 TruncatingSerializer::new(body.clone(), TruncateDirection::NoTruncating)
2711 .into_verifying(false),
2712 SerializeError::SizeLimitExceeded,
2713 );
2714 }
2715
2716 #[test]
2717 fn test_try_reuse_buffer() {
2718 fn test_expect_success(
2719 body_range: Range<usize>,
2720 prefix: usize,
2721 suffix: usize,
2722 max_copy_bytes: usize,
2723 ) {
2724 let mut bytes = [0, 1, 2, 3, 4, 5, 6, 7, 8, 9];
2725 let buffer = Buf::new(&mut bytes[..], body_range);
2726 let body = buffer.as_ref().to_vec();
2727 let buffer = try_reuse_buffer(buffer, prefix, suffix, max_copy_bytes).unwrap();
2728 assert_eq!(buffer.as_ref(), body.as_slice());
2729 assert!(buffer.prefix_len() >= prefix);
2730 assert!(buffer.suffix_len() >= suffix);
2731 }
2732
2733 fn test_expect_failure(
2734 body_range: Range<usize>,
2735 prefix: usize,
2736 suffix: usize,
2737 max_copy_bytes: usize,
2738 ) {
2739 let mut bytes = [0, 1, 2, 3, 4, 5, 6, 7, 8, 9];
2740 let buffer = Buf::new(&mut bytes[..], body_range.clone());
2741 let mut bytes = [0, 1, 2, 3, 4, 5, 6, 7, 8, 9];
2742 let orig = Buf::new(&mut bytes[..], body_range.clone());
2743 let buffer = try_reuse_buffer(buffer, prefix, suffix, max_copy_bytes).unwrap_err();
2744 assert_eq!(buffer, orig);
2745 }
2746
2747 test_expect_success(0..10, 0, 0, 0);
2749 test_expect_success(1..9, 1, 1, 0);
2751 test_expect_success(0..9, 1, 0, 9);
2754 test_expect_success(1..10, 0, 1, 9);
2755 test_expect_failure(0..9, 1, 0, 8);
2757 test_expect_failure(1..10, 0, 1, 8);
2758 }
2759
2760 #[test]
2761 fn test_maybe_reuse_buffer_provider() {
2762 fn test_expect(body_range: Range<usize>, prefix: usize, suffix: usize, expect_a: bool) {
2763 let mut bytes = [0, 1, 2, 3, 4, 5, 6, 7, 8, 9];
2764 let buffer = Buf::new(&mut bytes[..], body_range);
2765 let body = buffer.as_ref().to_vec();
2766 let buffer = BufferProvider::reuse_or_realloc(
2767 MaybeReuseBufferProvider(new_buf_vec),
2768 buffer,
2769 prefix,
2770 suffix,
2771 )
2772 .unwrap();
2773 match &buffer {
2774 Either::A(_) if expect_a => {}
2775 Either::B(_) if !expect_a => {}
2776 Either::A(_) => panic!("expected Eitehr::B variant"),
2777 Either::B(_) => panic!("expected Eitehr::A variant"),
2778 }
2779 let bytes: &[u8] = buffer.as_ref();
2780 assert_eq!(bytes, body.as_slice());
2781 assert!(buffer.prefix_len() >= prefix);
2782 assert!(buffer.suffix_len() >= suffix);
2783 }
2784
2785 fn test_expect_reuse(body_range: Range<usize>, prefix: usize, suffix: usize) {
2787 test_expect(body_range, prefix, suffix, true);
2788 }
2789
2790 fn test_expect_realloc(body_range: Range<usize>, prefix: usize, suffix: usize) {
2792 test_expect(body_range, prefix, suffix, false);
2793 }
2794
2795 test_expect_reuse(0..10, 0, 0);
2797 test_expect_reuse(1..9, 1, 1);
2799 test_expect_reuse(0..9, 1, 0);
2802 test_expect_reuse(1..10, 0, 1);
2803 test_expect_realloc(0..9, 1, 1);
2805 test_expect_realloc(1..10, 1, 1);
2806 }
2807
2808 #[test]
2809 fn test_no_reuse_buffer_provider() {
2810 #[track_caller]
2811 fn test_expect(body_range: Range<usize>, prefix: usize, suffix: usize) {
2812 let mut bytes = [0, 1, 2, 3, 4, 5, 6, 7, 8, 9];
2813 let internal_buffer: Buf<&mut [u8]> = Buf::new(&mut bytes[..], body_range);
2815 let body = internal_buffer.as_ref().to_vec();
2816 let buffer: Buf<Vec<u8>> = BufferProvider::reuse_or_realloc(
2819 NoReuseBufferProvider(new_buf_vec),
2820 internal_buffer,
2821 prefix,
2822 suffix,
2823 )
2824 .unwrap();
2825 let bytes: &[u8] = buffer.as_ref();
2826 assert_eq!(bytes, body.as_slice());
2827 assert_eq!(buffer.prefix_len(), prefix);
2828 assert_eq!(buffer.suffix_len(), suffix);
2829 }
2830 test_expect(0..10, 0, 0);
2832 test_expect(1..9, 1, 1);
2834 test_expect(0..9, 10, 10);
2836 test_expect(1..10, 15, 15);
2837 }
2838
2839 struct ScatterGatherBuf<B> {
2863 data: Vec<u8>,
2864 mid: usize,
2865 range: Range<usize>,
2866 inner: B,
2867 }
2868
2869 impl<B: BufferMut> FragmentedBuffer for ScatterGatherBuf<B> {
2870 fn len(&self) -> usize {
2871 self.inner.len() + (self.range.end - self.range.start)
2872 }
2873
2874 fn with_bytes<R, F>(&self, f: F) -> R
2875 where
2876 F: for<'a, 'b> FnOnce(FragmentedBytes<'a, 'b>) -> R,
2877 {
2878 let (_, rest) = self.data.split_at(self.range.start);
2879 let (prefix_b, rest) = rest.split_at(self.mid - self.range.start);
2880 let (suffix_b, _) = rest.split_at(self.range.end - self.mid);
2881 let mut bytes = [prefix_b, self.inner.as_ref(), suffix_b];
2882 f(FragmentedBytes::new(&mut bytes[..]))
2883 }
2884 }
2885
2886 impl<B: BufferMut> FragmentedBufferMut for ScatterGatherBuf<B> {
2887 fn with_bytes_mut<R, F>(&mut self, f: F) -> R
2888 where
2889 F: for<'a, 'b> FnOnce(FragmentedBytesMut<'a, 'b>) -> R,
2890 {
2891 let (_, rest) = self.data.split_at_mut(self.range.start);
2892 let (prefix_b, rest) = rest.split_at_mut(self.mid - self.range.start);
2893 let (suffix_b, _) = rest.split_at_mut(self.range.end - self.mid);
2894 let mut bytes = [prefix_b, self.inner.as_mut(), suffix_b];
2895 f(FragmentedBytesMut::new(&mut bytes[..]))
2896 }
2897 }
2898
2899 impl<B: BufferMut> GrowBuffer for ScatterGatherBuf<B> {
2900 fn with_parts<O, F>(&self, f: F) -> O
2901 where
2902 F: for<'a, 'b> FnOnce(&'a [u8], FragmentedBytes<'a, 'b>, &'a [u8]) -> O,
2903 {
2904 let (prefix, rest) = self.data.split_at(self.range.start);
2905 let (prefix_b, rest) = rest.split_at(self.mid - self.range.start);
2906 let (suffix_b, suffix) = rest.split_at(self.range.end - self.mid);
2907 let mut bytes = [prefix_b, self.inner.as_ref(), suffix_b];
2908 f(prefix, bytes.as_fragmented_byte_slice(), suffix)
2909 }
2910 fn prefix_len(&self) -> usize {
2911 self.range.start
2912 }
2913
2914 fn suffix_len(&self) -> usize {
2915 self.data.len() - self.range.end
2916 }
2917
2918 fn grow_front(&mut self, n: usize) {
2919 self.range.start -= n;
2920 }
2921
2922 fn grow_back(&mut self, n: usize) {
2923 self.range.end += n;
2924 assert!(self.range.end <= self.data.len());
2925 }
2926 }
2927
2928 impl<B: BufferMut> GrowBufferMut for ScatterGatherBuf<B> {
2929 fn with_parts_mut<O, F>(&mut self, f: F) -> O
2930 where
2931 F: for<'a, 'b> FnOnce(&'a mut [u8], FragmentedBytesMut<'a, 'b>, &'a mut [u8]) -> O,
2932 {
2933 let (prefix, rest) = self.data.split_at_mut(self.range.start);
2934 let (prefix_b, rest) = rest.split_at_mut(self.mid - self.range.start);
2935 let (suffix_b, suffix) = rest.split_at_mut(self.range.end - self.mid);
2936 let mut bytes = [prefix_b, self.inner.as_mut(), suffix_b];
2937 f(prefix, bytes.as_fragmented_byte_slice(), suffix)
2938 }
2939 }
2940
2941 struct ScatterGatherProvider;
2942
2943 impl<B: BufferMut> BufferProvider<B, ScatterGatherBuf<B>> for ScatterGatherProvider {
2944 type Error = Never;
2945
2946 fn alloc_no_reuse(
2947 self,
2948 _prefix: usize,
2949 _body: usize,
2950 _suffix: usize,
2951 ) -> Result<ScatterGatherBuf<B>, Self::Error> {
2952 unimplemented!("not used in tests")
2953 }
2954
2955 fn reuse_or_realloc(
2956 self,
2957 buffer: B,
2958 prefix: usize,
2959 suffix: usize,
2960 ) -> Result<ScatterGatherBuf<B>, (Self::Error, B)> {
2961 let inner = buffer;
2962 let data = vec![0; prefix + suffix];
2963 let range = Range { start: prefix, end: prefix };
2964 let mid = prefix;
2965 Ok(ScatterGatherBuf { inner, data, range, mid })
2966 }
2967 }
2968
2969 #[test]
2970 fn test_scatter_gather_serialize() {
2971 let buf = Buf::new(vec![10, 20, 30, 40, 50], ..);
2974 let pb = DummyPacketBuilder::new(3, 2, 0, usize::MAX);
2975 let ser = pb.wrap_body(buf);
2976 let result = ser.serialize_outer(ScatterGatherProvider {}).unwrap();
2977 let flattened = result.to_flattened_vec();
2978 assert_eq!(&flattened[..], &[0xFF, 0xFF, 0xFF, 10, 20, 30, 40, 50, 0xFE, 0xFE]);
2979 }
2980}