1use std::cmp;
8use std::convert::Infallible as Never;
9use std::fmt::{self, Debug, Formatter};
10use std::ops::{Range, RangeBounds};
11
12use arrayvec::ArrayVec;
13use zerocopy::SplitByteSlice;
14
15use crate::{
16 canonicalize_range, take_back, take_back_mut, take_front, take_front_mut,
17 AsFragmentedByteSlice, Buffer, BufferView, BufferViewMut, ContiguousBuffer, EmptyBuf,
18 FragmentedBuffer, FragmentedBufferMut, FragmentedBytes, FragmentedBytesMut, GrowBuffer,
19 GrowBufferMut, ParsablePacket, ParseBuffer, ParseBufferMut, ReusableBuffer, ShrinkBuffer,
20};
21
22#[derive(Copy, Clone, Debug)]
28pub enum Either<A, B> {
29 A(A),
30 B(B),
31}
32
33impl<A, B> Either<A, B> {
34 pub fn map_a<AA, F: FnOnce(A) -> AA>(self, f: F) -> Either<AA, B> {
40 match self {
41 Either::A(a) => Either::A(f(a)),
42 Either::B(b) => Either::B(b),
43 }
44 }
45
46 pub fn map_b<BB, F: FnOnce(B) -> BB>(self, f: F) -> Either<A, BB> {
52 match self {
53 Either::A(a) => Either::A(a),
54 Either::B(b) => Either::B(f(b)),
55 }
56 }
57
58 pub fn unwrap_a(self) -> A {
64 match self {
65 Either::A(x) => x,
66 Either::B(_) => panic!("This `Either<A, B>` does not hold the `A` variant"),
67 }
68 }
69
70 pub fn unwrap_b(self) -> B {
76 match self {
77 Either::A(_) => panic!("This `Either<A, B>` does not hold the `B` variant"),
78 Either::B(x) => x,
79 }
80 }
81}
82
83impl<A> Either<A, A> {
84 pub fn into_inner(self) -> A {
87 match self {
88 Either::A(x) => x,
89 Either::B(x) => x,
90 }
91 }
92}
93
94impl<A> Either<A, Never> {
95 #[inline]
97 pub fn into_a(self) -> A {
98 match self {
99 Either::A(a) => a,
100 }
101 }
102}
103
104impl<B> Either<Never, B> {
105 #[inline]
107 pub fn into_b(self) -> B {
108 match self {
109 Either::B(b) => b,
110 }
111 }
112}
113
114macro_rules! call_method_on_either {
115 ($val:expr, $method:ident, $($args:expr),*) => {
116 match $val {
117 Either::A(a) => a.$method($($args),*),
118 Either::B(b) => b.$method($($args),*),
119 }
120 };
121 ($val:expr, $method:ident) => {
122 call_method_on_either!($val, $method,)
123 };
124}
125
126impl<A, B> FragmentedBuffer for Either<A, B>
133where
134 A: FragmentedBuffer,
135 B: FragmentedBuffer,
136{
137 fn len(&self) -> usize {
138 call_method_on_either!(self, len)
139 }
140
141 fn with_bytes<R, F>(&self, f: F) -> R
142 where
143 F: for<'a, 'b> FnOnce(FragmentedBytes<'a, 'b>) -> R,
144 {
145 call_method_on_either!(self, with_bytes, f)
146 }
147}
148
149impl<A, B> ContiguousBuffer for Either<A, B>
150where
151 A: ContiguousBuffer,
152 B: ContiguousBuffer,
153{
154}
155
156impl<A, B> ShrinkBuffer for Either<A, B>
157where
158 A: ShrinkBuffer,
159 B: ShrinkBuffer,
160{
161 fn shrink<R: RangeBounds<usize>>(&mut self, range: R) {
162 call_method_on_either!(self, shrink, range)
163 }
164 fn shrink_front(&mut self, n: usize) {
165 call_method_on_either!(self, shrink_front, n)
166 }
167 fn shrink_back(&mut self, n: usize) {
168 call_method_on_either!(self, shrink_back, n)
169 }
170}
171
172impl<A, B> ParseBuffer for Either<A, B>
173where
174 A: ParseBuffer,
175 B: ParseBuffer,
176{
177 fn parse<'a, P: ParsablePacket<&'a [u8], ()>>(&'a mut self) -> Result<P, P::Error> {
178 call_method_on_either!(self, parse)
179 }
180 fn parse_with<'a, ParseArgs, P: ParsablePacket<&'a [u8], ParseArgs>>(
181 &'a mut self,
182 args: ParseArgs,
183 ) -> Result<P, P::Error> {
184 call_method_on_either!(self, parse_with, args)
185 }
186}
187
188impl<A, B> FragmentedBufferMut for Either<A, B>
189where
190 A: FragmentedBufferMut,
191 B: FragmentedBufferMut,
192{
193 fn with_bytes_mut<R, F>(&mut self, f: F) -> R
194 where
195 F: for<'a, 'b> FnOnce(FragmentedBytesMut<'a, 'b>) -> R,
196 {
197 call_method_on_either!(self, with_bytes_mut, f)
198 }
199}
200
201impl<A, B> ParseBufferMut for Either<A, B>
202where
203 A: ParseBufferMut,
204 B: ParseBufferMut,
205{
206 fn parse_mut<'a, P: ParsablePacket<&'a mut [u8], ()>>(&'a mut self) -> Result<P, P::Error> {
207 call_method_on_either!(self, parse_mut)
208 }
209 fn parse_with_mut<'a, ParseArgs, P: ParsablePacket<&'a mut [u8], ParseArgs>>(
210 &'a mut self,
211 args: ParseArgs,
212 ) -> Result<P, P::Error> {
213 call_method_on_either!(self, parse_with_mut, args)
214 }
215}
216
217impl<A, B> GrowBuffer for Either<A, B>
218where
219 A: GrowBuffer,
220 B: GrowBuffer,
221{
222 #[inline]
223 fn with_parts<O, F>(&self, f: F) -> O
224 where
225 F: for<'a, 'b> FnOnce(&'a [u8], FragmentedBytes<'a, 'b>, &'a [u8]) -> O,
226 {
227 call_method_on_either!(self, with_parts, f)
228 }
229 fn capacity(&self) -> usize {
230 call_method_on_either!(self, capacity)
231 }
232 fn prefix_len(&self) -> usize {
233 call_method_on_either!(self, prefix_len)
234 }
235 fn suffix_len(&self) -> usize {
236 call_method_on_either!(self, suffix_len)
237 }
238 fn grow_front(&mut self, n: usize) {
239 call_method_on_either!(self, grow_front, n)
240 }
241 fn grow_back(&mut self, n: usize) {
242 call_method_on_either!(self, grow_back, n)
243 }
244 fn reset(&mut self) {
245 call_method_on_either!(self, reset)
246 }
247}
248
249impl<A, B> GrowBufferMut for Either<A, B>
250where
251 A: GrowBufferMut,
252 B: GrowBufferMut,
253{
254 fn with_parts_mut<O, F>(&mut self, f: F) -> O
255 where
256 F: for<'a, 'b> FnOnce(&'a mut [u8], FragmentedBytesMut<'a, 'b>, &'a mut [u8]) -> O,
257 {
258 call_method_on_either!(self, with_parts_mut, f)
259 }
260
261 fn serialize<BB: PacketBuilder>(&mut self, builder: BB) {
262 call_method_on_either!(self, serialize, builder)
263 }
264}
265
266impl<A, B> Buffer for Either<A, B>
267where
268 A: Buffer,
269 B: Buffer,
270{
271 fn parse_with_view<'a, ParseArgs, P: ParsablePacket<&'a [u8], ParseArgs>>(
272 &'a mut self,
273 args: ParseArgs,
274 ) -> Result<(P, &'a [u8]), P::Error> {
275 call_method_on_either!(self, parse_with_view, args)
276 }
277}
278
279impl<A: AsRef<[u8]>, B: AsRef<[u8]>> AsRef<[u8]> for Either<A, B> {
280 fn as_ref(&self) -> &[u8] {
281 call_method_on_either!(self, as_ref)
282 }
283}
284
285impl<A: AsMut<[u8]>, B: AsMut<[u8]>> AsMut<[u8]> for Either<A, B> {
286 fn as_mut(&mut self) -> &mut [u8] {
287 call_method_on_either!(self, as_mut)
288 }
289}
290
291#[derive(Clone, Debug)]
297pub struct Buf<B> {
298 buf: B,
299 body: Range<usize>,
300}
301
302impl<B: AsRef<[u8]>> PartialEq for Buf<B> {
303 fn eq(&self, other: &Self) -> bool {
304 let self_slice = AsRef::<[u8]>::as_ref(self);
305 let other_slice = AsRef::<[u8]>::as_ref(other);
306 PartialEq::eq(self_slice, other_slice)
307 }
308}
309
310impl<B: AsRef<[u8]>> Eq for Buf<B> {}
311
312impl Buf<Vec<u8>> {
313 pub fn into_inner(self) -> Vec<u8> {
315 let Buf { mut buf, body } = self;
316 let len = body.end - body.start;
317 let _ = buf.drain(..body.start);
318 buf.truncate(len);
319 buf
320 }
321}
322
323impl<B: AsRef<[u8]>> Buf<B> {
324 pub fn new<R: RangeBounds<usize>>(buf: B, body: R) -> Buf<B> {
335 let len = buf.as_ref().len();
336 Buf { buf, body: canonicalize_range(len, &body) }
337 }
338
339 pub fn buffer_view(&mut self) -> BufView<'_> {
341 BufView { buf: &self.buf.as_ref()[self.body.clone()], body: &mut self.body }
342 }
343}
344
345impl<B: AsRef<[u8]> + AsMut<[u8]>> Buf<B> {
346 pub fn buffer_view_mut(&mut self) -> BufViewMut<'_> {
348 BufViewMut { buf: &mut self.buf.as_mut()[self.body.clone()], body: &mut self.body }
349 }
350}
351
352impl<B: AsRef<[u8]>> FragmentedBuffer for Buf<B> {
353 fragmented_buffer_method_impls!();
354}
355impl<B: AsRef<[u8]>> ContiguousBuffer for Buf<B> {}
356impl<B: AsRef<[u8]>> ShrinkBuffer for Buf<B> {
357 fn shrink<R: RangeBounds<usize>>(&mut self, range: R) {
358 let len = self.len();
359 let mut range = canonicalize_range(len, &range);
360 range.start += self.body.start;
361 range.end += self.body.start;
362 self.body = range;
363 }
364
365 fn shrink_front(&mut self, n: usize) {
366 assert!(n <= self.len());
367 self.body.start += n;
368 }
369 fn shrink_back(&mut self, n: usize) {
370 assert!(n <= self.len());
371 self.body.end -= n;
372 }
373}
374impl<B: AsRef<[u8]>> ParseBuffer for Buf<B> {
375 fn parse_with<'a, ParseArgs, P: ParsablePacket<&'a [u8], ParseArgs>>(
376 &'a mut self,
377 args: ParseArgs,
378 ) -> Result<P, P::Error> {
379 P::parse(self.buffer_view(), args)
380 }
381}
382
383impl<B: AsRef<[u8]> + AsMut<[u8]>> FragmentedBufferMut for Buf<B> {
384 fragmented_buffer_mut_method_impls!();
385}
386
387impl<B: AsRef<[u8]> + AsMut<[u8]>> ParseBufferMut for Buf<B> {
388 fn parse_with_mut<'a, ParseArgs, P: ParsablePacket<&'a mut [u8], ParseArgs>>(
389 &'a mut self,
390 args: ParseArgs,
391 ) -> Result<P, P::Error> {
392 P::parse_mut(self.buffer_view_mut(), args)
393 }
394}
395
396impl<B: AsRef<[u8]>> GrowBuffer for Buf<B> {
397 fn with_parts<O, F>(&self, f: F) -> O
398 where
399 F: for<'a, 'b> FnOnce(&'a [u8], FragmentedBytes<'a, 'b>, &'a [u8]) -> O,
400 {
401 let (prefix, buf) = self.buf.as_ref().split_at(self.body.start);
402 let (body, suffix) = buf.split_at(self.body.end - self.body.start);
403 let mut body = [&body[..]];
404 f(prefix, body.as_fragmented_byte_slice(), suffix)
405 }
406 fn capacity(&self) -> usize {
407 self.buf.as_ref().len()
408 }
409 fn prefix_len(&self) -> usize {
410 self.body.start
411 }
412 fn suffix_len(&self) -> usize {
413 self.buf.as_ref().len() - self.body.end
414 }
415 fn grow_front(&mut self, n: usize) {
416 assert!(n <= self.body.start);
417 self.body.start -= n;
418 }
419 fn grow_back(&mut self, n: usize) {
420 assert!(n <= self.buf.as_ref().len() - self.body.end);
421 self.body.end += n;
422 }
423}
424
425impl<B: AsRef<[u8]> + AsMut<[u8]>> GrowBufferMut for Buf<B> {
426 fn with_parts_mut<O, F>(&mut self, f: F) -> O
427 where
428 F: for<'a, 'b> FnOnce(&'a mut [u8], FragmentedBytesMut<'a, 'b>, &'a mut [u8]) -> O,
429 {
430 let (prefix, buf) = self.buf.as_mut().split_at_mut(self.body.start);
431 let (body, suffix) = buf.split_at_mut(self.body.end - self.body.start);
432 let mut body = [&mut body[..]];
433 f(prefix, body.as_fragmented_byte_slice(), suffix)
434 }
435}
436
437impl<B: AsRef<[u8]>> AsRef<[u8]> for Buf<B> {
438 fn as_ref(&self) -> &[u8] {
439 &self.buf.as_ref()[self.body.clone()]
440 }
441}
442
443impl<B: AsMut<[u8]>> AsMut<[u8]> for Buf<B> {
444 fn as_mut(&mut self) -> &mut [u8] {
445 &mut self.buf.as_mut()[self.body.clone()]
446 }
447}
448
449impl<B: AsRef<[u8]>> Buffer for Buf<B> {
450 fn parse_with_view<'a, ParseArgs, P: ParsablePacket<&'a [u8], ParseArgs>>(
451 &'a mut self,
452 args: ParseArgs,
453 ) -> Result<(P, &'a [u8]), P::Error> {
454 let Self { body, ref buf } = self;
455 let body_before = body.clone();
456 let view = BufView { buf: &buf.as_ref()[body.clone()], body };
457 P::parse(view, args).map(|r| (r, &buf.as_ref()[body_before]))
458 }
459}
460
461pub struct BufView<'a> {
466 buf: &'a [u8],
467 body: &'a mut Range<usize>,
468}
469
470impl<'a> BufferView<&'a [u8]> for BufView<'a> {
471 fn take_front(&mut self, n: usize) -> Option<&'a [u8]> {
472 if self.len() < n {
473 return None;
474 }
475 self.body.start += n;
476 Some(take_front(&mut self.buf, n))
477 }
478
479 fn take_back(&mut self, n: usize) -> Option<&'a [u8]> {
480 if self.len() < n {
481 return None;
482 }
483 self.body.end -= n;
484 Some(take_back(&mut self.buf, n))
485 }
486
487 fn into_rest(self) -> &'a [u8] {
488 self.buf
489 }
490}
491
492impl<'a> AsRef<[u8]> for BufView<'a> {
493 fn as_ref(&self) -> &[u8] {
494 self.buf
495 }
496}
497
498pub struct BufViewMut<'a> {
504 buf: &'a mut [u8],
505 body: &'a mut Range<usize>,
506}
507
508impl<'a> BufferView<&'a mut [u8]> for BufViewMut<'a> {
509 fn take_front(&mut self, n: usize) -> Option<&'a mut [u8]> {
510 if self.len() < n {
511 return None;
512 }
513 self.body.start += n;
514 Some(take_front_mut(&mut self.buf, n))
515 }
516
517 fn take_back(&mut self, n: usize) -> Option<&'a mut [u8]> {
518 if self.len() < n {
519 return None;
520 }
521 self.body.end -= n;
522 Some(take_back_mut(&mut self.buf, n))
523 }
524
525 fn into_rest(self) -> &'a mut [u8] {
526 self.buf
527 }
528}
529
530impl<'a> BufferViewMut<&'a mut [u8]> for BufViewMut<'a> {}
531
532impl<'a> AsRef<[u8]> for BufViewMut<'a> {
533 fn as_ref(&self) -> &[u8] {
534 self.buf
535 }
536}
537
538impl<'a> AsMut<[u8]> for BufViewMut<'a> {
539 fn as_mut(&mut self) -> &mut [u8] {
540 self.buf
541 }
542}
543
544#[derive(Copy, Clone, Debug, Eq, PartialEq)]
558pub struct PacketConstraints {
559 header_len: usize,
560 footer_len: usize,
561 min_body_len: usize,
562 max_body_len: usize,
563}
564
565impl PacketConstraints {
566 pub const UNCONSTRAINED: Self =
570 Self { header_len: 0, footer_len: 0, min_body_len: 0, max_body_len: usize::MAX };
571
572 #[inline]
580 pub fn new(
581 header_len: usize,
582 footer_len: usize,
583 min_body_len: usize,
584 max_body_len: usize,
585 ) -> PacketConstraints {
586 PacketConstraints::try_new(header_len, footer_len, min_body_len, max_body_len).expect(
587 "max_body_len < min_body_len or header_len + min_body_len + footer_len overflows usize",
588 )
589 }
590
591 #[inline]
597 pub fn try_new(
598 header_len: usize,
599 footer_len: usize,
600 min_body_len: usize,
601 max_body_len: usize,
602 ) -> Option<PacketConstraints> {
603 let header_min_body_footer_overflows = header_len
605 .checked_add(min_body_len)
606 .and_then(|sum| sum.checked_add(footer_len))
607 .is_none();
608 let max_less_than_min = max_body_len < min_body_len;
610 if max_less_than_min || header_min_body_footer_overflows {
611 return None;
612 }
613 Some(PacketConstraints { header_len, footer_len, min_body_len, max_body_len })
614 }
615
616 #[inline]
620 pub fn with_max_body_len(max_body_len: usize) -> PacketConstraints {
621 PacketConstraints { header_len: 0, footer_len: 0, min_body_len: 0, max_body_len }
626 }
627
628 #[inline]
630 pub fn header_len(&self) -> usize {
631 self.header_len
632 }
633
634 #[inline]
636 pub fn footer_len(&self) -> usize {
637 self.footer_len
638 }
639
640 #[inline]
656 pub fn min_body_len(&self) -> usize {
657 self.min_body_len
658 }
659
660 #[inline]
664 pub fn max_body_len(&self) -> usize {
665 self.max_body_len
666 }
667
668 pub fn try_encapsulate(&self, outer: &Self) -> Option<PacketConstraints> {
678 let inner = self;
679 let header_len = inner.header_len.checked_add(outer.header_len)?;
681 let footer_len = inner.footer_len.checked_add(outer.footer_len)?;
683 let inner_header_footer_len = inner.header_len + inner.footer_len;
686 let min_body_len = cmp::max(
690 outer.min_body_len.saturating_sub(inner_header_footer_len),
691 inner.min_body_len,
692 );
693 let max_body_len =
698 cmp::min(outer.max_body_len.checked_sub(inner_header_footer_len)?, inner.max_body_len);
699 PacketConstraints::try_new(header_len, footer_len, min_body_len, max_body_len)
703 }
704}
705
706pub struct SerializeTarget<'a> {
709 #[allow(missing_docs)]
710 pub header: &'a mut [u8],
711 #[allow(missing_docs)]
712 pub footer: &'a mut [u8],
713}
714
715pub trait PacketBuilder {
732 fn constraints(&self) -> PacketConstraints;
734
735 fn serialize(&self, target: &mut SerializeTarget<'_>, body: FragmentedBytesMut<'_, '_>);
764}
765
766impl<'a, B: PacketBuilder> PacketBuilder for &'a B {
767 #[inline]
768 fn constraints(&self) -> PacketConstraints {
769 B::constraints(self)
770 }
771 #[inline]
772 fn serialize(&self, target: &mut SerializeTarget<'_>, body: FragmentedBytesMut<'_, '_>) {
773 B::serialize(self, target, body)
774 }
775}
776
777impl<'a, B: PacketBuilder> PacketBuilder for &'a mut B {
778 #[inline]
779 fn constraints(&self) -> PacketConstraints {
780 B::constraints(self)
781 }
782 #[inline]
783 fn serialize(&self, target: &mut SerializeTarget<'_>, body: FragmentedBytesMut<'_, '_>) {
784 B::serialize(self, target, body)
785 }
786}
787
788impl PacketBuilder for () {
789 #[inline]
790 fn constraints(&self) -> PacketConstraints {
791 PacketConstraints::UNCONSTRAINED
792 }
793 #[inline]
794 fn serialize(&self, _target: &mut SerializeTarget<'_>, _body: FragmentedBytesMut<'_, '_>) {}
795}
796
797impl PacketBuilder for Never {
798 fn constraints(&self) -> PacketConstraints {
799 match *self {}
800 }
801 fn serialize(&self, _target: &mut SerializeTarget<'_>, _body: FragmentedBytesMut<'_, '_>) {}
802}
803
804#[derive(Copy, Clone, Debug, Eq, PartialEq)]
813pub struct Nested<I, O> {
814 inner: I,
815 outer: O,
816}
817
818impl<I, O> Nested<I, O> {
819 #[inline]
822 pub fn into_inner(self) -> I {
823 self.inner
824 }
825
826 #[inline]
829 pub fn into_outer(self) -> O {
830 self.outer
831 }
832
833 #[inline]
834 pub fn inner(&self) -> &I {
835 &self.inner
836 }
837
838 #[inline]
839 pub fn inner_mut(&mut self) -> &mut I {
840 &mut self.inner
841 }
842
843 #[inline]
844 pub fn outer(&self) -> &O {
845 &self.outer
846 }
847
848 #[inline]
849 pub fn outer_mut(&mut self) -> &mut O {
850 &mut self.outer
851 }
852}
853
854#[derive(Copy, Clone, Debug)]
860#[cfg_attr(test, derive(Eq, PartialEq))]
861pub struct LimitedSizePacketBuilder {
862 pub limit: usize,
864}
865
866impl PacketBuilder for LimitedSizePacketBuilder {
867 fn constraints(&self) -> PacketConstraints {
868 PacketConstraints::with_max_body_len(self.limit)
869 }
870
871 fn serialize(&self, _target: &mut SerializeTarget<'_>, _body: FragmentedBytesMut<'_, '_>) {}
872}
873
874pub trait InnerPacketBuilder {
888 fn bytes_len(&self) -> usize;
890
891 fn serialize(&self, buffer: &mut [u8]);
906
907 #[inline]
914 fn into_serializer(self) -> InnerSerializer<Self, EmptyBuf>
915 where
916 Self: Sized,
917 {
918 self.into_serializer_with(EmptyBuf)
919 }
920
921 fn into_serializer_with<B: ShrinkBuffer>(self, mut buffer: B) -> InnerSerializer<Self, B>
934 where
935 Self: Sized,
936 {
937 buffer.shrink_back_to(0);
938 InnerSerializer { inner: self, buffer }
939 }
940}
941
942impl<'a, I: InnerPacketBuilder> InnerPacketBuilder for &'a I {
943 #[inline]
944 fn bytes_len(&self) -> usize {
945 I::bytes_len(self)
946 }
947 #[inline]
948 fn serialize(&self, buffer: &mut [u8]) {
949 I::serialize(self, buffer)
950 }
951}
952impl<'a, I: InnerPacketBuilder> InnerPacketBuilder for &'a mut I {
953 #[inline]
954 fn bytes_len(&self) -> usize {
955 I::bytes_len(self)
956 }
957 #[inline]
958 fn serialize(&self, buffer: &mut [u8]) {
959 I::serialize(self, buffer)
960 }
961}
962impl<'a> InnerPacketBuilder for &'a [u8] {
963 #[inline]
964 fn bytes_len(&self) -> usize {
965 self.len()
966 }
967 #[inline]
968 fn serialize(&self, buffer: &mut [u8]) {
969 buffer.copy_from_slice(self);
970 }
971}
972impl<'a> InnerPacketBuilder for &'a mut [u8] {
973 #[inline]
974 fn bytes_len(&self) -> usize {
975 self.len()
976 }
977 #[inline]
978 fn serialize(&self, buffer: &mut [u8]) {
979 buffer.copy_from_slice(self);
980 }
981}
982impl<'a> InnerPacketBuilder for Vec<u8> {
983 #[inline]
984 fn bytes_len(&self) -> usize {
985 self.len()
986 }
987 #[inline]
988 fn serialize(&self, buffer: &mut [u8]) {
989 buffer.copy_from_slice(self.as_slice());
990 }
991}
992impl<const N: usize> InnerPacketBuilder for ArrayVec<u8, N> {
993 fn bytes_len(&self) -> usize {
994 self.as_slice().bytes_len()
995 }
996 fn serialize(&self, buffer: &mut [u8]) {
997 self.as_slice().serialize(buffer);
998 }
999}
1000
1001pub struct ByteSliceInnerPacketBuilder<B>(pub B);
1008
1009impl<B: SplitByteSlice> InnerPacketBuilder for ByteSliceInnerPacketBuilder<B> {
1010 fn bytes_len(&self) -> usize {
1011 self.0.deref().bytes_len()
1012 }
1013 fn serialize(&self, buffer: &mut [u8]) {
1014 self.0.deref().serialize(buffer)
1015 }
1016}
1017
1018impl<B: SplitByteSlice> Debug for ByteSliceInnerPacketBuilder<B> {
1019 fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
1020 write!(f, "ByteSliceInnerPacketBuilder({:?})", self.0.as_ref())
1021 }
1022}
1023
1024#[derive(Copy, Clone, Debug, Eq, PartialEq)]
1031pub enum SerializeError<A> {
1032 Alloc(A),
1034 SizeLimitExceeded,
1036}
1037
1038impl<A> SerializeError<A> {
1039 #[inline]
1041 pub fn is_alloc(&self) -> bool {
1042 match self {
1043 SerializeError::Alloc(_) => true,
1044 SerializeError::SizeLimitExceeded => false,
1045 }
1046 }
1047
1048 #[inline]
1050 pub fn is_size_limit_exceeded(&self) -> bool {
1051 match self {
1052 SerializeError::Alloc(_) => false,
1053 SerializeError::SizeLimitExceeded => true,
1054 }
1055 }
1056
1057 pub fn map_alloc<T, F: FnOnce(A) -> T>(self, f: F) -> SerializeError<T> {
1059 match self {
1060 SerializeError::Alloc(a) => SerializeError::Alloc(f(a)),
1061 SerializeError::SizeLimitExceeded => SerializeError::SizeLimitExceeded,
1062 }
1063 }
1064}
1065
1066impl<A> From<A> for SerializeError<A> {
1067 fn from(a: A) -> SerializeError<A> {
1068 SerializeError::Alloc(a)
1069 }
1070}
1071
1072#[derive(Copy, Clone, Debug, Eq, PartialEq)]
1081pub struct BufferTooShortError;
1082
1083pub trait BufferProvider<Input, Output> {
1100 type Error;
1104
1105 fn alloc_no_reuse(
1115 self,
1116 prefix: usize,
1117 body: usize,
1118 suffix: usize,
1119 ) -> Result<Output, Self::Error>;
1120
1121 fn reuse_or_realloc(
1134 self,
1135 buffer: Input,
1136 prefix: usize,
1137 suffix: usize,
1138 ) -> Result<Output, (Self::Error, Input)>;
1139}
1140
1141pub trait BufferAlloc<Output> {
1162 type Error;
1166
1167 fn alloc(self, len: usize) -> Result<Output, Self::Error>;
1169}
1170
1171impl<O, E, F: FnOnce(usize) -> Result<O, E>> BufferAlloc<O> for F {
1172 type Error = E;
1173
1174 #[inline]
1175 fn alloc(self, len: usize) -> Result<O, E> {
1176 self(len)
1177 }
1178}
1179
1180impl BufferAlloc<Never> for () {
1181 type Error = ();
1182
1183 #[inline]
1184 fn alloc(self, _len: usize) -> Result<Never, ()> {
1185 Err(())
1186 }
1187}
1188
1189pub fn new_buf_vec(len: usize) -> Result<Buf<Vec<u8>>, Never> {
1200 Ok(Buf::new(vec![0; len], ..))
1201}
1202
1203#[inline]
1225pub fn try_reuse_buffer<B: GrowBufferMut + ShrinkBuffer>(
1226 mut buffer: B,
1227 prefix: usize,
1228 suffix: usize,
1229 max_copy_bytes: usize,
1230) -> Result<B, B> {
1231 let need_prefix = prefix;
1232 let need_suffix = suffix;
1233 let have_prefix = buffer.prefix_len();
1234 let have_body = buffer.len();
1235 let have_suffix = buffer.suffix_len();
1236 let need_capacity = need_prefix + have_body + need_suffix;
1237
1238 if have_prefix >= need_prefix && have_suffix >= need_suffix {
1239 Ok(buffer)
1241 } else if buffer.capacity() >= need_capacity && have_body <= max_copy_bytes {
1242 buffer.reset();
1246
1247 buffer.copy_within(have_prefix..(have_prefix + have_body), need_prefix);
1253 buffer.shrink(need_prefix..(need_prefix + have_body));
1254 debug_assert_eq!(buffer.prefix_len(), need_prefix);
1255 debug_assert!(buffer.suffix_len() >= need_suffix);
1256 debug_assert_eq!(buffer.len(), have_body);
1257 Ok(buffer)
1258 } else {
1259 Err(buffer)
1260 }
1261}
1262
1263pub struct MaybeReuseBufferProvider<A>(pub A);
1267
1268impl<I: ReusableBuffer, O: ReusableBuffer, A: BufferAlloc<O>> BufferProvider<I, Either<I, O>>
1269 for MaybeReuseBufferProvider<A>
1270{
1271 type Error = A::Error;
1272
1273 fn alloc_no_reuse(
1274 self,
1275 prefix: usize,
1276 body: usize,
1277 suffix: usize,
1278 ) -> Result<Either<I, O>, Self::Error> {
1279 let Self(alloc) = self;
1280 let need_capacity = prefix + body + suffix;
1281 BufferAlloc::alloc(alloc, need_capacity).map(|mut buf| {
1282 buf.shrink(prefix..(prefix + body));
1283 Either::B(buf)
1284 })
1285 }
1286
1287 #[inline]
1296 fn reuse_or_realloc(
1297 self,
1298 buffer: I,
1299 need_prefix: usize,
1300 need_suffix: usize,
1301 ) -> Result<Either<I, O>, (A::Error, I)> {
1302 match try_reuse_buffer(buffer, need_prefix, need_suffix, usize::MAX) {
1307 Ok(buffer) => Ok(Either::A(buffer)),
1308 Err(buffer) => {
1309 let have_body = buffer.len();
1310 let mut buf = match BufferProvider::<I, Either<I, O>>::alloc_no_reuse(
1311 self,
1312 need_prefix,
1313 have_body,
1314 need_suffix,
1315 ) {
1316 Ok(buf) => buf,
1317 Err(err) => return Err((err, buffer)),
1318 };
1319
1320 buf.copy_from(&buffer);
1321 debug_assert_eq!(buf.prefix_len(), need_prefix);
1322 debug_assert!(buf.suffix_len() >= need_suffix);
1323 debug_assert_eq!(buf.len(), have_body);
1324 Ok(buf)
1325 }
1326 }
1327 }
1328}
1329
1330impl<B: ReusableBuffer, A: BufferAlloc<B>> BufferProvider<B, B> for MaybeReuseBufferProvider<A> {
1331 type Error = A::Error;
1332
1333 fn alloc_no_reuse(self, prefix: usize, body: usize, suffix: usize) -> Result<B, Self::Error> {
1334 BufferProvider::<B, Either<B, B>>::alloc_no_reuse(self, prefix, body, suffix)
1335 .map(Either::into_inner)
1336 }
1337
1338 #[inline]
1347 fn reuse_or_realloc(self, buffer: B, prefix: usize, suffix: usize) -> Result<B, (A::Error, B)> {
1348 BufferProvider::<B, Either<B, B>>::reuse_or_realloc(self, buffer, prefix, suffix)
1349 .map(Either::into_inner)
1350 }
1351}
1352
1353pub struct NoReuseBufferProvider<A>(pub A);
1357
1358impl<I: FragmentedBuffer, O: ReusableBuffer, A: BufferAlloc<O>> BufferProvider<I, O>
1359 for NoReuseBufferProvider<A>
1360{
1361 type Error = A::Error;
1362
1363 fn alloc_no_reuse(self, prefix: usize, body: usize, suffix: usize) -> Result<O, A::Error> {
1364 let Self(alloc) = self;
1365 alloc.alloc(prefix + body + suffix).map(|mut b| {
1366 b.shrink(prefix..prefix + body);
1367 b
1368 })
1369 }
1370
1371 fn reuse_or_realloc(self, buffer: I, prefix: usize, suffix: usize) -> Result<O, (A::Error, I)> {
1372 BufferProvider::<I, O>::alloc_no_reuse(self, prefix, buffer.len(), suffix)
1373 .map(|mut b| {
1374 b.copy_from(&buffer);
1375 b
1376 })
1377 .map_err(|e| (e, buffer))
1378 }
1379}
1380
1381pub trait Serializer: Sized {
1382 type Buffer;
1384
1385 fn serialize<B: GrowBufferMut, P: BufferProvider<Self::Buffer, B>>(
1402 self,
1403 outer: PacketConstraints,
1404 provider: P,
1405 ) -> Result<B, (SerializeError<P::Error>, Self)>;
1406
1407 fn serialize_new_buf<B: ReusableBuffer, A: BufferAlloc<B>>(
1414 &self,
1415 outer: PacketConstraints,
1416 alloc: A,
1417 ) -> Result<B, SerializeError<A::Error>>;
1418
1419 #[inline]
1435 #[allow(clippy::type_complexity)]
1436 fn serialize_vec(
1437 self,
1438 outer: PacketConstraints,
1439 ) -> Result<Either<Self::Buffer, Buf<Vec<u8>>>, (SerializeError<Never>, Self)>
1440 where
1441 Self::Buffer: ReusableBuffer,
1442 {
1443 self.serialize(outer, MaybeReuseBufferProvider(new_buf_vec))
1444 }
1445
1446 #[inline]
1460 fn serialize_no_alloc(
1461 self,
1462 outer: PacketConstraints,
1463 ) -> Result<Self::Buffer, (SerializeError<BufferTooShortError>, Self)>
1464 where
1465 Self::Buffer: ReusableBuffer,
1466 {
1467 self.serialize(outer, MaybeReuseBufferProvider(())).map(Either::into_a).map_err(
1468 |(err, slf)| {
1469 (
1470 match err {
1471 SerializeError::Alloc(()) => BufferTooShortError.into(),
1472 SerializeError::SizeLimitExceeded => SerializeError::SizeLimitExceeded,
1473 },
1474 slf,
1475 )
1476 },
1477 )
1478 }
1479
1480 #[inline]
1489 fn serialize_outer<B: GrowBufferMut, P: BufferProvider<Self::Buffer, B>>(
1490 self,
1491 provider: P,
1492 ) -> Result<B, (SerializeError<P::Error>, Self)> {
1493 self.serialize(PacketConstraints::UNCONSTRAINED, provider)
1494 }
1495
1496 #[inline]
1507 #[allow(clippy::type_complexity)]
1508 fn serialize_vec_outer(
1509 self,
1510 ) -> Result<Either<Self::Buffer, Buf<Vec<u8>>>, (SerializeError<Never>, Self)>
1511 where
1512 Self::Buffer: ReusableBuffer,
1513 {
1514 self.serialize_vec(PacketConstraints::UNCONSTRAINED)
1515 }
1516
1517 #[inline]
1527 fn serialize_no_alloc_outer(
1528 self,
1529 ) -> Result<Self::Buffer, (SerializeError<BufferTooShortError>, Self)>
1530 where
1531 Self::Buffer: ReusableBuffer,
1532 {
1533 self.serialize_no_alloc(PacketConstraints::UNCONSTRAINED)
1534 }
1535
1536 #[inline]
1543 fn encapsulate<B>(self, outer: B) -> Nested<Self, B> {
1544 Nested { inner: self, outer }
1545 }
1546
1547 #[inline]
1556 fn with_size_limit(self, limit: usize) -> Nested<Self, LimitedSizePacketBuilder> {
1557 self.encapsulate(LimitedSizePacketBuilder { limit })
1558 }
1559}
1560
1561#[derive(Copy, Clone, Debug, Eq, PartialEq)]
1568pub struct InnerSerializer<I, B> {
1569 inner: I,
1570 buffer: B,
1575}
1576
1577impl<I, B> InnerSerializer<I, B> {
1578 pub fn inner(&self) -> &I {
1579 &self.inner
1580 }
1581}
1582
1583struct InnerPacketBuilderWrapper<I>(I);
1589
1590impl<I: InnerPacketBuilder> PacketBuilder for InnerPacketBuilderWrapper<I> {
1591 fn constraints(&self) -> PacketConstraints {
1592 let Self(wrapped) = self;
1593 PacketConstraints::new(wrapped.bytes_len(), 0, 0, usize::MAX)
1594 }
1595
1596 fn serialize(&self, target: &mut SerializeTarget<'_>, _body: FragmentedBytesMut<'_, '_>) {
1597 let Self(wrapped) = self;
1598
1599 debug_assert_eq!(target.header.len(), wrapped.bytes_len());
1603 debug_assert_eq!(target.footer.len(), 0);
1604
1605 InnerPacketBuilder::serialize(wrapped, target.header);
1606 }
1607}
1608
1609impl<I: InnerPacketBuilder, B: GrowBuffer + ShrinkBuffer> Serializer for InnerSerializer<I, B> {
1610 type Buffer = B;
1611
1612 #[inline]
1613 fn serialize<BB: GrowBufferMut, P: BufferProvider<B, BB>>(
1614 self,
1615 outer: PacketConstraints,
1616 provider: P,
1617 ) -> Result<BB, (SerializeError<P::Error>, InnerSerializer<I, B>)> {
1618 let pb = InnerPacketBuilderWrapper(self.inner);
1619 debug_assert_eq!(self.buffer.len(), 0);
1620 self.buffer.encapsulate(pb).serialize(outer, provider).map_err(
1621 |(err, Nested { inner: buffer, outer: pb })| {
1622 (err, InnerSerializer { inner: pb.0, buffer })
1623 },
1624 )
1625 }
1626
1627 #[inline]
1628 fn serialize_new_buf<BB: ReusableBuffer, A: BufferAlloc<BB>>(
1629 &self,
1630 outer: PacketConstraints,
1631 alloc: A,
1632 ) -> Result<BB, SerializeError<A::Error>> {
1633 let pb = InnerPacketBuilderWrapper(&self.inner);
1634 EmptyBuf.encapsulate(pb).serialize_new_buf(outer, alloc)
1635 }
1636}
1637
1638impl<B: GrowBuffer + ShrinkBuffer> Serializer for B {
1639 type Buffer = B;
1640
1641 #[inline]
1642 fn serialize<BB: GrowBufferMut, P: BufferProvider<Self::Buffer, BB>>(
1643 self,
1644 outer: PacketConstraints,
1645 provider: P,
1646 ) -> Result<BB, (SerializeError<P::Error>, Self)> {
1647 TruncatingSerializer::new(self, TruncateDirection::NoTruncating)
1648 .serialize(outer, provider)
1649 .map_err(|(err, ser)| (err, ser.buffer))
1650 }
1651
1652 fn serialize_new_buf<BB: ReusableBuffer, A: BufferAlloc<BB>>(
1653 &self,
1654 outer: PacketConstraints,
1655 alloc: A,
1656 ) -> Result<BB, SerializeError<A::Error>> {
1657 if self.len() > outer.max_body_len() {
1658 return Err(SerializeError::SizeLimitExceeded);
1659 }
1660
1661 let padding = outer.min_body_len().saturating_sub(self.len());
1662 let tail_size = padding + outer.footer_len();
1663 let buffer_size = outer.header_len() + self.len() + tail_size;
1664 let mut buffer = alloc.alloc(buffer_size)?;
1665 buffer.shrink_front(outer.header_len());
1666 buffer.shrink_back(tail_size);
1667 buffer.copy_from(self);
1668 buffer.grow_back(padding);
1669 Ok(buffer)
1670 }
1671}
1672
1673pub enum EitherSerializer<A, B> {
1677 A(A),
1678 B(B),
1679}
1680
1681impl<A: Serializer, B: Serializer<Buffer = A::Buffer>> Serializer for EitherSerializer<A, B> {
1682 type Buffer = A::Buffer;
1683
1684 fn serialize<TB: GrowBufferMut, P: BufferProvider<Self::Buffer, TB>>(
1685 self,
1686 outer: PacketConstraints,
1687 provider: P,
1688 ) -> Result<TB, (SerializeError<P::Error>, Self)> {
1689 match self {
1690 EitherSerializer::A(s) => {
1691 s.serialize(outer, provider).map_err(|(err, s)| (err, EitherSerializer::A(s)))
1692 }
1693 EitherSerializer::B(s) => {
1694 s.serialize(outer, provider).map_err(|(err, s)| (err, EitherSerializer::B(s)))
1695 }
1696 }
1697 }
1698
1699 fn serialize_new_buf<TB: ReusableBuffer, BA: BufferAlloc<TB>>(
1700 &self,
1701 outer: PacketConstraints,
1702 alloc: BA,
1703 ) -> Result<TB, SerializeError<BA::Error>> {
1704 match self {
1705 EitherSerializer::A(s) => s.serialize_new_buf(outer, alloc),
1706 EitherSerializer::B(s) => s.serialize_new_buf(outer, alloc),
1707 }
1708 }
1709}
1710
1711#[derive(Copy, Clone, Debug, Eq, PartialEq)]
1714pub enum TruncateDirection {
1715 DiscardFront,
1718 DiscardBack,
1721 NoTruncating,
1723}
1724
1725#[derive(Copy, Clone, Debug)]
1737#[cfg_attr(test, derive(Eq, PartialEq))]
1738pub struct TruncatingSerializer<B> {
1739 buffer: B,
1740 direction: TruncateDirection,
1741}
1742
1743impl<B> TruncatingSerializer<B> {
1744 pub fn new(buffer: B, direction: TruncateDirection) -> TruncatingSerializer<B> {
1746 TruncatingSerializer { buffer, direction }
1747 }
1748}
1749
1750impl<B: GrowBuffer + ShrinkBuffer> Serializer for TruncatingSerializer<B> {
1751 type Buffer = B;
1752
1753 fn serialize<BB: GrowBufferMut, P: BufferProvider<B, BB>>(
1754 mut self,
1755 outer: PacketConstraints,
1756 provider: P,
1757 ) -> Result<BB, (SerializeError<P::Error>, Self)> {
1758 let original_len = self.buffer.len();
1759 let excess_bytes = if original_len > outer.max_body_len {
1760 Some(original_len - outer.max_body_len)
1761 } else {
1762 None
1763 };
1764 if let Some(excess_bytes) = excess_bytes {
1765 match self.direction {
1766 TruncateDirection::DiscardFront => self.buffer.shrink_front(excess_bytes),
1767 TruncateDirection::DiscardBack => self.buffer.shrink_back(excess_bytes),
1768 TruncateDirection::NoTruncating => {
1769 return Err((SerializeError::SizeLimitExceeded, self))
1770 }
1771 }
1772 }
1773
1774 let padding = outer.min_body_len().saturating_sub(self.buffer.len());
1775
1776 debug_assert!(self.buffer.len() + padding <= outer.max_body_len());
1780 match provider.reuse_or_realloc(
1781 self.buffer,
1782 outer.header_len(),
1783 padding + outer.footer_len(),
1784 ) {
1785 Ok(buffer) => Ok(buffer),
1786 Err((err, mut buffer)) => {
1787 if let Some(excess_bytes) = excess_bytes {
1791 match self.direction {
1792 TruncateDirection::DiscardFront => buffer.grow_front(excess_bytes),
1793 TruncateDirection::DiscardBack => buffer.grow_back(excess_bytes),
1794 TruncateDirection::NoTruncating => unreachable!(),
1795 }
1796 }
1797
1798 Err((
1799 SerializeError::Alloc(err),
1800 TruncatingSerializer { buffer, direction: self.direction },
1801 ))
1802 }
1803 }
1804 }
1805
1806 fn serialize_new_buf<BB: ReusableBuffer, A: BufferAlloc<BB>>(
1807 &self,
1808 outer: PacketConstraints,
1809 alloc: A,
1810 ) -> Result<BB, SerializeError<A::Error>> {
1811 let truncated_size = cmp::min(self.buffer.len(), outer.max_body_len());
1812 let discarded_bytes = self.buffer.len() - truncated_size;
1813 let padding = outer.min_body_len().saturating_sub(truncated_size);
1814 let tail_size = padding + outer.footer_len();
1815 let buffer_size = outer.header_len() + truncated_size + tail_size;
1816 let mut buffer = alloc.alloc(buffer_size)?;
1817 buffer.shrink_front(outer.header_len());
1818 buffer.shrink_back(tail_size);
1819 buffer.with_bytes_mut(|mut dst| {
1820 self.buffer.with_bytes(|src| {
1821 let src = match (discarded_bytes > 0, self.direction) {
1822 (false, _) => src,
1823 (true, TruncateDirection::DiscardFront) => src.slice(discarded_bytes..),
1824 (true, TruncateDirection::DiscardBack) => src.slice(..truncated_size),
1825 (true, TruncateDirection::NoTruncating) => {
1826 return Err(SerializeError::SizeLimitExceeded)
1827 }
1828 };
1829 dst.copy_from(&src);
1830 Ok(())
1831 })
1832 })?;
1833 buffer.grow_back_zero(padding);
1834 Ok(buffer)
1835 }
1836}
1837
1838impl<I: Serializer, O: PacketBuilder> Serializer for Nested<I, O> {
1839 type Buffer = I::Buffer;
1840
1841 #[inline]
1842 fn serialize<B: GrowBufferMut, P: BufferProvider<I::Buffer, B>>(
1843 self,
1844 outer: PacketConstraints,
1845 provider: P,
1846 ) -> Result<B, (SerializeError<P::Error>, Self)> {
1847 let Some(outer) = self.outer.constraints().try_encapsulate(&outer) else {
1848 return Err((SerializeError::SizeLimitExceeded, self));
1849 };
1850
1851 match self.inner.serialize(outer, provider) {
1852 Ok(mut buf) => {
1853 buf.serialize(&self.outer);
1854 Ok(buf)
1855 }
1856 Err((err, inner)) => Err((err, inner.encapsulate(self.outer))),
1857 }
1858 }
1859
1860 #[inline]
1861 fn serialize_new_buf<B: ReusableBuffer, A: BufferAlloc<B>>(
1862 &self,
1863 outer: PacketConstraints,
1864 alloc: A,
1865 ) -> Result<B, SerializeError<A::Error>> {
1866 let Some(outer) = self.outer.constraints().try_encapsulate(&outer) else {
1867 return Err(SerializeError::SizeLimitExceeded);
1868 };
1869
1870 let mut buf = self.inner.serialize_new_buf(outer, alloc)?;
1871 GrowBufferMut::serialize(&mut buf, &self.outer);
1872 Ok(buf)
1873 }
1874}
1875
1876#[cfg(test)]
1877mod tests {
1878 use super::*;
1879 use crate::BufferMut;
1880 use std::fmt::Debug;
1881 use test_case::test_case;
1882 use test_util::{assert_geq, assert_leq};
1883
1884 #[derive(Copy, Clone, Debug, Eq, PartialEq)]
1890 struct DummyPacketBuilder {
1891 header_len: usize,
1892 footer_len: usize,
1893 min_body_len: usize,
1894 max_body_len: usize,
1895 }
1896
1897 impl DummyPacketBuilder {
1898 fn new(
1899 header_len: usize,
1900 footer_len: usize,
1901 min_body_len: usize,
1902 max_body_len: usize,
1903 ) -> DummyPacketBuilder {
1904 DummyPacketBuilder { header_len, footer_len, min_body_len, max_body_len }
1905 }
1906 }
1907
1908 fn fill(bytes: &mut [u8], byte: u8) {
1909 for b in bytes {
1910 *b = byte;
1911 }
1912 }
1913
1914 impl PacketBuilder for DummyPacketBuilder {
1915 fn constraints(&self) -> PacketConstraints {
1916 PacketConstraints::new(
1917 self.header_len,
1918 self.footer_len,
1919 self.min_body_len,
1920 self.max_body_len,
1921 )
1922 }
1923
1924 fn serialize(&self, target: &mut SerializeTarget<'_>, body: FragmentedBytesMut<'_, '_>) {
1925 assert_eq!(target.header.len(), self.header_len);
1926 assert_eq!(target.footer.len(), self.footer_len);
1927 assert!(body.len() >= self.min_body_len);
1928 assert!(body.len() <= self.max_body_len);
1929 fill(target.header, 0xFF);
1930 fill(target.footer, 0xFE);
1931 }
1932 }
1933
1934 impl InnerPacketBuilder for DummyPacketBuilder {
1935 fn bytes_len(&self) -> usize {
1936 self.header_len
1937 }
1938
1939 fn serialize(&self, buffer: &mut [u8]) {
1940 assert_eq!(buffer.len(), self.header_len);
1941 fill(buffer, 0xFF);
1942 }
1943 }
1944
1945 #[derive(Copy, Clone, Debug, Eq, PartialEq)]
1947 struct SerializerVerifier {
1948 inner_len: Option<usize>,
1951
1952 truncating: bool,
1955 }
1956
1957 impl SerializerVerifier {
1958 fn new<S: Serializer>(serializer: &S, truncating: bool) -> Self {
1959 let inner_len = serializer
1960 .serialize_new_buf(PacketConstraints::UNCONSTRAINED, new_buf_vec)
1961 .map(|buf| buf.len())
1962 .inspect_err(|err| assert!(err.is_size_limit_exceeded()))
1963 .ok();
1964 Self { inner_len, truncating }
1965 }
1966
1967 fn verify_result<B: GrowBufferMut, A>(
1968 &self,
1969 result: Result<&B, &SerializeError<A>>,
1970 outer: PacketConstraints,
1971 ) {
1972 let should_exceed_size_limit = match self.inner_len {
1973 Some(inner_len) => outer.max_body_len() < inner_len && !self.truncating,
1974 None => true,
1975 };
1976
1977 match result {
1978 Ok(buf) => {
1979 assert_geq!(buf.prefix_len(), outer.header_len());
1980 assert_geq!(buf.suffix_len(), outer.footer_len());
1981 assert_leq!(buf.len(), outer.max_body_len());
1982
1983 let padding = outer.min_body_len().saturating_sub(buf.len());
1988 assert_leq!(padding + outer.footer_len(), buf.suffix_len());
1989
1990 assert!(!should_exceed_size_limit);
1991 }
1992 Err(err) => {
1993 if should_exceed_size_limit {
1996 assert!(err.is_size_limit_exceeded());
1997 } else {
1998 assert!(err.is_alloc());
1999 }
2000 }
2001 }
2002 }
2003 }
2004
2005 #[derive(Copy, Clone, Debug, Eq, PartialEq)]
2014 struct VerifyingSerializer<S> {
2015 ser: S,
2016 verifier: SerializerVerifier,
2017 }
2018
2019 impl<S: Serializer + Debug + Clone + Eq> Serializer for VerifyingSerializer<S>
2020 where
2021 S::Buffer: ReusableBuffer,
2022 {
2023 type Buffer = S::Buffer;
2024
2025 fn serialize<B: GrowBufferMut, P: BufferProvider<Self::Buffer, B>>(
2026 self,
2027 outer: PacketConstraints,
2028 provider: P,
2029 ) -> Result<B, (SerializeError<P::Error>, Self)> {
2030 let Self { ser, verifier } = self;
2031 let orig = ser.clone();
2032
2033 let result = ser.serialize(outer, provider).map_err(|(err, ser)| {
2034 assert_eq!(ser, orig);
2037 (err, Self { ser, verifier })
2038 });
2039
2040 verifier.verify_result(result.as_ref().map_err(|(err, _ser)| err), outer);
2041
2042 result
2043 }
2044
2045 fn serialize_new_buf<B: ReusableBuffer, A: BufferAlloc<B>>(
2046 &self,
2047 outer: PacketConstraints,
2048 alloc: A,
2049 ) -> Result<B, SerializeError<A::Error>> {
2050 let res = self.ser.serialize_new_buf(outer, alloc);
2051 self.verifier.verify_result(res.as_ref(), outer);
2052 res
2053 }
2054 }
2055
2056 trait SerializerExt: Serializer {
2057 fn into_verifying(self, truncating: bool) -> VerifyingSerializer<Self>
2058 where
2059 Self::Buffer: ReusableBuffer,
2060 {
2061 let verifier = SerializerVerifier::new(&self, truncating);
2062 VerifyingSerializer { ser: self, verifier }
2063 }
2064
2065 fn encapsulate_verifying<B: PacketBuilder>(
2066 self,
2067 outer: B,
2068 truncating: bool,
2069 ) -> VerifyingSerializer<Nested<Self, B>>
2070 where
2071 Self::Buffer: ReusableBuffer,
2072 {
2073 self.encapsulate(outer).into_verifying(truncating)
2074 }
2075
2076 fn with_size_limit_verifying(
2077 self,
2078 limit: usize,
2079 truncating: bool,
2080 ) -> VerifyingSerializer<Nested<Self, LimitedSizePacketBuilder>>
2081 where
2082 Self::Buffer: ReusableBuffer,
2083 {
2084 self.with_size_limit(limit).into_verifying(truncating)
2085 }
2086 }
2087
2088 impl<S: Serializer> SerializerExt for S {}
2089
2090 #[test]
2091 fn test_either_into_inner() {
2092 fn ret_either(a: u32, b: u32, c: bool) -> Either<u32, u32> {
2093 if c {
2094 Either::A(a)
2095 } else {
2096 Either::B(b)
2097 }
2098 }
2099
2100 assert_eq!(ret_either(1, 2, true).into_inner(), 1);
2101 assert_eq!(ret_either(1, 2, false).into_inner(), 2);
2102 }
2103
2104 #[test]
2105 fn test_either_unwrap_success() {
2106 assert_eq!(Either::<u16, u32>::A(5).unwrap_a(), 5);
2107 assert_eq!(Either::<u16, u32>::B(10).unwrap_b(), 10);
2108 }
2109
2110 #[test]
2111 #[should_panic]
2112 fn test_either_unwrap_a_panic() {
2113 let _: u16 = Either::<u16, u32>::B(10).unwrap_a();
2114 }
2115
2116 #[test]
2117 #[should_panic]
2118 fn test_either_unwrap_b_panic() {
2119 let _: u32 = Either::<u16, u32>::A(5).unwrap_b();
2120 }
2121
2122 #[test_case(Buf::new((0..100).collect(), ..); "entire buf")]
2123 #[test_case(Buf::new((0..100).collect(), 0..0); "empty range")]
2124 #[test_case(Buf::new((0..100).collect(), ..50); "prefix")]
2125 #[test_case(Buf::new((0..100).collect(), 50..); "suffix")]
2126 #[test_case(Buf::new((0..100).collect(), 25..75); "middle")]
2127 fn test_buf_into_inner(buf: Buf<Vec<u8>>) {
2128 assert_eq!(buf.clone().as_ref(), buf.into_inner());
2129 }
2130
2131 #[test]
2132 fn test_packet_constraints() {
2133 use PacketConstraints as PC;
2134
2135 assert!(PC::try_new(0, 0, 0, 0).is_some());
2139 assert!(PC::try_new(usize::MAX / 2, usize::MAX / 2, 0, 0).is_some());
2141 assert_eq!(PC::try_new(usize::MAX, 1, 0, 0), None);
2143 assert_eq!(PC::try_new(0, 0, 1, 0), None);
2145
2146 let pc = PC::new(10, 10, 0, usize::MAX);
2150 assert_eq!(pc.try_encapsulate(&pc).unwrap(), PC::new(20, 20, 0, usize::MAX - 20));
2151
2152 let pc = PC::new(10, 10, 0, usize::MAX);
2153 assert_eq!(pc.try_encapsulate(&pc).unwrap(), PC::new(20, 20, 0, usize::MAX - 20));
2154
2155 let inner = PC::new(10, 10, 0, usize::MAX);
2166 let outer = PC::new(0, 0, 10, usize::MAX);
2167 assert_eq!(inner.try_encapsulate(&outer).unwrap(), PC::new(10, 10, 0, usize::MAX - 20));
2168
2169 let inner = PC::new(usize::MAX, 0, 0, usize::MAX);
2173 let outer = PC::new(1, 0, 0, usize::MAX);
2174 assert_eq!(inner.try_encapsulate(&outer), None);
2175
2176 let inner = PC::new(0, usize::MAX, 0, usize::MAX);
2180 let outer = PC::new(0, 1, 0, usize::MAX);
2181 assert_eq!(inner.try_encapsulate(&outer), None);
2182
2183 let one_fifth_max = (usize::MAX / 5) + 1;
2190 let inner = PC::new(one_fifth_max, one_fifth_max, one_fifth_max, usize::MAX);
2191 let outer = PC::new(one_fifth_max, one_fifth_max, 0, usize::MAX);
2192 assert_eq!(inner.try_encapsulate(&outer), None);
2193
2194 let inner = PC::new(10, 10, 0, usize::MAX);
2199 let outer = PC::new(0, 0, 0, 10);
2200 assert_eq!(inner.try_encapsulate(&outer), None);
2201
2202 let inner = PC::new(0, 0, 10, usize::MAX);
2208 let outer = PC::new(0, 0, 0, 5);
2209 assert_eq!(inner.try_encapsulate(&outer), None);
2210 }
2211
2212 #[test]
2213 fn test_inner_serializer() {
2214 const INNER: &[u8] = &[0, 1, 2, 3, 4, 5, 6, 7, 8, 9];
2215
2216 fn concat<'a, I: IntoIterator<Item = &'a &'a [u8]>>(slices: I) -> Vec<u8> {
2217 let mut v = Vec::new();
2218 for slc in slices.into_iter() {
2219 v.extend_from_slice(slc);
2220 }
2221 v
2222 }
2223
2224 let buf = INNER.into_serializer().serialize_vec_outer().unwrap();
2226 assert_eq!(buf.as_ref(), INNER);
2227
2228 let buf = INNER
2231 .into_serializer()
2232 .into_verifying(false)
2233 .encapsulate(DummyPacketBuilder::new(0, 0, 20, usize::MAX))
2234 .serialize_vec_outer()
2235 .unwrap();
2236 assert_eq!(buf.as_ref(), concat(&[INNER, vec![0; 10].as_ref()]).as_slice());
2237
2238 let buf = INNER
2242 .into_serializer()
2243 .into_verifying(false)
2244 .encapsulate(DummyPacketBuilder::new(10, 10, 0, usize::MAX))
2245 .serialize_vec_outer()
2246 .unwrap();
2247 assert_eq!(
2248 buf.as_ref(),
2249 concat(&[vec![0xFF; 10].as_ref(), INNER, vec![0xFE; 10].as_ref()]).as_slice()
2250 );
2251
2252 assert_eq!(
2254 INNER
2255 .into_serializer()
2256 .into_verifying(false)
2257 .encapsulate(DummyPacketBuilder::new(0, 0, 0, 9))
2258 .serialize_vec_outer()
2259 .unwrap_err()
2260 .0,
2261 SerializeError::SizeLimitExceeded
2262 );
2263
2264 assert_eq!(
2268 INNER
2269 .into_serializer_with(Buf::new(vec![0xFF], ..))
2270 .into_verifying(false)
2271 .serialize_vec_outer()
2272 .unwrap()
2273 .as_ref(),
2274 INNER
2275 );
2276 }
2277
2278 #[test]
2279 fn test_buffer_serializer_and_inner_serializer() {
2280 fn verify_buffer_serializer<B: BufferMut + Debug>(
2281 buffer: B,
2282 header_len: usize,
2283 footer_len: usize,
2284 min_body_len: usize,
2285 ) {
2286 let old_body = buffer.to_flattened_vec();
2287 let serializer = buffer.encapsulate(DummyPacketBuilder::new(
2288 header_len,
2289 footer_len,
2290 min_body_len,
2291 usize::MAX,
2292 ));
2293
2294 let buffer0 = serializer
2295 .serialize_new_buf(PacketConstraints::UNCONSTRAINED, new_buf_vec)
2296 .unwrap();
2297 verify(buffer0, &old_body, header_len, footer_len, min_body_len);
2298
2299 let buffer = serializer.serialize_vec_outer().unwrap();
2300 verify(buffer, &old_body, header_len, footer_len, min_body_len);
2301 }
2302
2303 fn verify_inner_packet_builder_serializer(
2304 body: &[u8],
2305 header_len: usize,
2306 footer_len: usize,
2307 min_body_len: usize,
2308 ) {
2309 let buffer = body
2310 .into_serializer()
2311 .encapsulate(DummyPacketBuilder::new(
2312 header_len,
2313 footer_len,
2314 min_body_len,
2315 usize::MAX,
2316 ))
2317 .serialize_vec_outer()
2318 .unwrap();
2319 verify(buffer, body, header_len, footer_len, min_body_len);
2320 }
2321
2322 fn verify<B: Buffer>(
2323 buffer: B,
2324 body: &[u8],
2325 header_len: usize,
2326 footer_len: usize,
2327 min_body_len: usize,
2328 ) {
2329 let flat = buffer.to_flattened_vec();
2330 let header_bytes = &flat[..header_len];
2331 let body_bytes = &flat[header_len..header_len + body.len()];
2332 let padding_len = min_body_len.saturating_sub(body.len());
2333 let padding_bytes =
2334 &flat[header_len + body.len()..header_len + body.len() + padding_len];
2335 let total_body_len = body.len() + padding_len;
2336 let footer_bytes = &flat[header_len + total_body_len..];
2337 assert_eq!(
2338 buffer.len() - total_body_len,
2339 header_len + footer_len,
2340 "buffer.len()({}) - total_body_len({}) != header_len({}) + footer_len({})",
2341 buffer.len(),
2342 header_len,
2343 footer_len,
2344 min_body_len,
2345 );
2346
2347 assert!(
2349 header_bytes.iter().all(|b| *b == 0xFF),
2350 "header_bytes {:?} are not filled with 0xFF's",
2351 header_bytes,
2352 );
2353 assert_eq!(body_bytes, body);
2354 assert!(
2356 padding_bytes.iter().all(|b| *b == 0),
2357 "padding_bytes {:?} are not filled with 0s",
2358 padding_bytes,
2359 );
2360 assert!(
2362 footer_bytes.iter().all(|b| *b == 0xFE),
2363 "footer_bytes {:?} are not filled with 0xFE's",
2364 footer_bytes,
2365 );
2366 }
2367
2368 for buf_len in 0..8 {
2371 for range_start in 0..buf_len {
2372 for range_end in range_start..buf_len {
2373 for prefix in 0..8 {
2374 for suffix in 0..8 {
2375 for min_body in 0..8 {
2376 let mut vec = vec![0; buf_len];
2377 #[allow(clippy::needless_range_loop)]
2382 for i in 0..vec.len() {
2383 vec[i] = i as u8;
2384 }
2385 verify_buffer_serializer(
2386 Buf::new(vec.as_mut_slice(), range_start..range_end),
2387 prefix,
2388 suffix,
2389 min_body,
2390 );
2391 if range_start == 0 {
2392 verify_inner_packet_builder_serializer(
2401 &vec.as_slice()[range_start..range_end],
2402 prefix,
2403 suffix,
2404 min_body,
2405 );
2406 }
2407 }
2408 }
2409 }
2410 }
2411 }
2412 }
2413 }
2414
2415 #[test]
2416 fn test_min_body_len() {
2417 let body = &[1, 2];
2422
2423 let inner = DummyPacketBuilder::new(2, 2, 0, usize::MAX);
2426 let outer = DummyPacketBuilder::new(2, 2, 8, usize::MAX);
2428 let buf = body
2429 .into_serializer()
2430 .into_verifying(false)
2431 .encapsulate_verifying(inner, false)
2432 .encapsulate_verifying(outer, false)
2433 .serialize_vec_outer()
2434 .unwrap();
2435 assert_eq!(buf.prefix_len(), 0);
2436 assert_eq!(buf.suffix_len(), 0);
2437 assert_eq!(
2438 buf.as_ref(),
2439 &[
2440 0xFF, 0xFF, 0xFF, 0xFF, 1, 2, 0xFE, 0xFE, 0, 0, 0xFE, 0xFE ]
2447 );
2448 }
2449
2450 #[test]
2451 fn test_size_limit() {
2452 fn test<S: Serializer + Clone + Debug + Eq>(ser: S)
2454 where
2455 S::Buffer: ReusableBuffer,
2456 {
2457 let pb = DummyPacketBuilder::new(1, 1, 0, usize::MAX);
2463
2464 assert!(ser
2469 .clone()
2470 .encapsulate_verifying(pb, false)
2471 .with_size_limit_verifying(3, false)
2472 .serialize_vec_outer()
2473 .is_ok());
2474 assert!(ser
2476 .clone()
2477 .encapsulate_verifying(pb, false)
2478 .with_size_limit_verifying(4, false)
2479 .serialize_vec_outer()
2480 .is_ok());
2481 assert!(ser
2485 .clone()
2486 .with_size_limit_verifying(1, false)
2487 .encapsulate_verifying(pb, false)
2488 .with_size_limit_verifying(3, false)
2489 .serialize_vec_outer()
2490 .is_ok());
2491 assert!(ser
2494 .clone()
2495 .with_size_limit_verifying(0, false)
2496 .encapsulate_verifying(pb, false)
2497 .serialize_vec_outer()
2498 .is_err());
2499 assert!(ser
2505 .clone()
2506 .encapsulate_verifying(pb, false)
2507 .with_size_limit_verifying(1, false)
2508 .serialize_vec_outer()
2509 .is_err());
2510 }
2511
2512 test(DummyPacketBuilder::new(1, 0, 0, usize::MAX).into_serializer().into_verifying(false));
2514 test(Buf::new(vec![0], ..).into_verifying(false));
2515 }
2516
2517 #[test]
2518 fn test_truncating_serializer() {
2519 fn verify_result<S: Serializer + Debug>(ser: S, expected: &[u8])
2520 where
2521 S::Buffer: ReusableBuffer + AsRef<[u8]>,
2522 {
2523 let buf = ser.serialize_new_buf(PacketConstraints::UNCONSTRAINED, new_buf_vec).unwrap();
2524 assert_eq!(buf.as_ref(), &expected[..]);
2525 let buf = ser.serialize_vec_outer().unwrap();
2526 assert_eq!(buf.as_ref(), &expected[..]);
2527 }
2528
2529 let body = vec![0, 1, 2, 3, 4, 5, 6, 7, 8, 9];
2531 let ser =
2532 TruncatingSerializer::new(Buf::new(body.clone(), ..), TruncateDirection::DiscardFront)
2533 .into_verifying(true)
2534 .with_size_limit_verifying(4, true);
2535 verify_result(ser, &[6, 7, 8, 9]);
2536
2537 let ser =
2539 TruncatingSerializer::new(Buf::new(body.clone(), ..), TruncateDirection::DiscardBack)
2540 .into_verifying(true)
2541 .with_size_limit_verifying(7, true);
2542 verify_result(ser, &[0, 1, 2, 3, 4, 5, 6]);
2543
2544 let ser =
2546 TruncatingSerializer::new(Buf::new(body.clone(), ..), TruncateDirection::NoTruncating)
2547 .into_verifying(false)
2548 .with_size_limit_verifying(5, true);
2549 assert!(ser.clone().serialize_vec_outer().is_err());
2550 assert!(ser.serialize_new_buf(PacketConstraints::UNCONSTRAINED, new_buf_vec).is_err());
2551 assert!(ser.serialize_vec_outer().is_err());
2552
2553 fn test_serialization_failure<S: Serializer + Clone + Eq + Debug>(
2557 ser: S,
2558 err: SerializeError<BufferTooShortError>,
2559 ) where
2560 S::Buffer: ReusableBuffer + Debug,
2561 {
2562 let (e, new_ser) = ser
2571 .clone()
2572 .encapsulate(DummyPacketBuilder::new(2, 2, 0, 1))
2573 .serialize_no_alloc_outer()
2574 .unwrap_err();
2575 assert_eq!(err, e);
2576 assert_eq!(new_ser.into_inner(), ser);
2577 }
2578
2579 let body = Buf::new(vec![1, 2], ..);
2580 test_serialization_failure(
2581 TruncatingSerializer::new(body.clone(), TruncateDirection::DiscardFront)
2582 .into_verifying(true),
2583 SerializeError::Alloc(BufferTooShortError),
2584 );
2585 test_serialization_failure(
2586 TruncatingSerializer::new(body.clone(), TruncateDirection::DiscardFront)
2587 .into_verifying(true),
2588 SerializeError::Alloc(BufferTooShortError),
2589 );
2590 test_serialization_failure(
2591 TruncatingSerializer::new(body.clone(), TruncateDirection::NoTruncating)
2592 .into_verifying(false),
2593 SerializeError::SizeLimitExceeded,
2594 );
2595 }
2596
2597 #[test]
2598 fn test_try_reuse_buffer() {
2599 fn test_expect_success(
2600 body_range: Range<usize>,
2601 prefix: usize,
2602 suffix: usize,
2603 max_copy_bytes: usize,
2604 ) {
2605 let mut bytes = [0, 1, 2, 3, 4, 5, 6, 7, 8, 9];
2606 let buffer = Buf::new(&mut bytes[..], body_range);
2607 let body = buffer.as_ref().to_vec();
2608 let buffer = try_reuse_buffer(buffer, prefix, suffix, max_copy_bytes).unwrap();
2609 assert_eq!(buffer.as_ref(), body.as_slice());
2610 assert!(buffer.prefix_len() >= prefix);
2611 assert!(buffer.suffix_len() >= suffix);
2612 }
2613
2614 fn test_expect_failure(
2615 body_range: Range<usize>,
2616 prefix: usize,
2617 suffix: usize,
2618 max_copy_bytes: usize,
2619 ) {
2620 let mut bytes = [0, 1, 2, 3, 4, 5, 6, 7, 8, 9];
2621 let buffer = Buf::new(&mut bytes[..], body_range.clone());
2622 let mut bytes = [0, 1, 2, 3, 4, 5, 6, 7, 8, 9];
2623 let orig = Buf::new(&mut bytes[..], body_range.clone());
2624 let buffer = try_reuse_buffer(buffer, prefix, suffix, max_copy_bytes).unwrap_err();
2625 assert_eq!(buffer, orig);
2626 }
2627
2628 test_expect_success(0..10, 0, 0, 0);
2630 test_expect_success(1..9, 1, 1, 0);
2632 test_expect_success(0..9, 1, 0, 9);
2635 test_expect_success(1..10, 0, 1, 9);
2636 test_expect_failure(0..9, 1, 0, 8);
2638 test_expect_failure(1..10, 0, 1, 8);
2639 }
2640
2641 #[test]
2642 fn test_maybe_reuse_buffer_provider() {
2643 fn test_expect(body_range: Range<usize>, prefix: usize, suffix: usize, expect_a: bool) {
2644 let mut bytes = [0, 1, 2, 3, 4, 5, 6, 7, 8, 9];
2645 let buffer = Buf::new(&mut bytes[..], body_range);
2646 let body = buffer.as_ref().to_vec();
2647 let buffer = BufferProvider::reuse_or_realloc(
2648 MaybeReuseBufferProvider(new_buf_vec),
2649 buffer,
2650 prefix,
2651 suffix,
2652 )
2653 .unwrap();
2654 match &buffer {
2655 Either::A(_) if expect_a => {}
2656 Either::B(_) if !expect_a => {}
2657 Either::A(_) => panic!("expected Eitehr::B variant"),
2658 Either::B(_) => panic!("expected Eitehr::A variant"),
2659 }
2660 let bytes: &[u8] = buffer.as_ref();
2661 assert_eq!(bytes, body.as_slice());
2662 assert!(buffer.prefix_len() >= prefix);
2663 assert!(buffer.suffix_len() >= suffix);
2664 }
2665
2666 fn test_expect_reuse(body_range: Range<usize>, prefix: usize, suffix: usize) {
2668 test_expect(body_range, prefix, suffix, true);
2669 }
2670
2671 fn test_expect_realloc(body_range: Range<usize>, prefix: usize, suffix: usize) {
2673 test_expect(body_range, prefix, suffix, false);
2674 }
2675
2676 test_expect_reuse(0..10, 0, 0);
2678 test_expect_reuse(1..9, 1, 1);
2680 test_expect_reuse(0..9, 1, 0);
2683 test_expect_reuse(1..10, 0, 1);
2684 test_expect_realloc(0..9, 1, 1);
2686 test_expect_realloc(1..10, 1, 1);
2687 }
2688
2689 #[test]
2690 fn test_no_reuse_buffer_provider() {
2691 #[track_caller]
2692 fn test_expect(body_range: Range<usize>, prefix: usize, suffix: usize) {
2693 let mut bytes = [0, 1, 2, 3, 4, 5, 6, 7, 8, 9];
2694 let internal_buffer: Buf<&mut [u8]> = Buf::new(&mut bytes[..], body_range);
2696 let body = internal_buffer.as_ref().to_vec();
2697 let buffer: Buf<Vec<u8>> = BufferProvider::reuse_or_realloc(
2700 NoReuseBufferProvider(new_buf_vec),
2701 internal_buffer,
2702 prefix,
2703 suffix,
2704 )
2705 .unwrap();
2706 let bytes: &[u8] = buffer.as_ref();
2707 assert_eq!(bytes, body.as_slice());
2708 assert_eq!(buffer.prefix_len(), prefix);
2709 assert_eq!(buffer.suffix_len(), suffix);
2710 }
2711 test_expect(0..10, 0, 0);
2713 test_expect(1..9, 1, 1);
2715 test_expect(0..9, 10, 10);
2717 test_expect(1..10, 15, 15);
2718 }
2719
2720 struct ScatterGatherBuf<B> {
2744 data: Vec<u8>,
2745 mid: usize,
2746 range: Range<usize>,
2747 inner: B,
2748 }
2749
2750 impl<B: BufferMut> FragmentedBuffer for ScatterGatherBuf<B> {
2751 fn len(&self) -> usize {
2752 self.inner.len() + (self.range.end - self.range.start)
2753 }
2754
2755 fn with_bytes<R, F>(&self, f: F) -> R
2756 where
2757 F: for<'a, 'b> FnOnce(FragmentedBytes<'a, 'b>) -> R,
2758 {
2759 let (_, rest) = self.data.split_at(self.range.start);
2760 let (prefix_b, rest) = rest.split_at(self.mid - self.range.start);
2761 let (suffix_b, _) = rest.split_at(self.range.end - self.mid);
2762 let mut bytes = [prefix_b, self.inner.as_ref(), suffix_b];
2763 f(FragmentedBytes::new(&mut bytes[..]))
2764 }
2765 }
2766
2767 impl<B: BufferMut> FragmentedBufferMut for ScatterGatherBuf<B> {
2768 fn with_bytes_mut<R, F>(&mut self, f: F) -> R
2769 where
2770 F: for<'a, 'b> FnOnce(FragmentedBytesMut<'a, 'b>) -> R,
2771 {
2772 let (_, rest) = self.data.split_at_mut(self.range.start);
2773 let (prefix_b, rest) = rest.split_at_mut(self.mid - self.range.start);
2774 let (suffix_b, _) = rest.split_at_mut(self.range.end - self.mid);
2775 let mut bytes = [prefix_b, self.inner.as_mut(), suffix_b];
2776 f(FragmentedBytesMut::new(&mut bytes[..]))
2777 }
2778 }
2779
2780 impl<B: BufferMut> GrowBuffer for ScatterGatherBuf<B> {
2781 fn with_parts<O, F>(&self, f: F) -> O
2782 where
2783 F: for<'a, 'b> FnOnce(&'a [u8], FragmentedBytes<'a, 'b>, &'a [u8]) -> O,
2784 {
2785 let (prefix, rest) = self.data.split_at(self.range.start);
2786 let (prefix_b, rest) = rest.split_at(self.mid - self.range.start);
2787 let (suffix_b, suffix) = rest.split_at(self.range.end - self.mid);
2788 let mut bytes = [prefix_b, self.inner.as_ref(), suffix_b];
2789 f(prefix, bytes.as_fragmented_byte_slice(), suffix)
2790 }
2791 fn prefix_len(&self) -> usize {
2792 self.range.start
2793 }
2794
2795 fn suffix_len(&self) -> usize {
2796 self.data.len() - self.range.end
2797 }
2798
2799 fn grow_front(&mut self, n: usize) {
2800 self.range.start -= n;
2801 }
2802
2803 fn grow_back(&mut self, n: usize) {
2804 self.range.end += n;
2805 assert!(self.range.end <= self.data.len());
2806 }
2807 }
2808
2809 impl<B: BufferMut> GrowBufferMut for ScatterGatherBuf<B> {
2810 fn with_parts_mut<O, F>(&mut self, f: F) -> O
2811 where
2812 F: for<'a, 'b> FnOnce(&'a mut [u8], FragmentedBytesMut<'a, 'b>, &'a mut [u8]) -> O,
2813 {
2814 let (prefix, rest) = self.data.split_at_mut(self.range.start);
2815 let (prefix_b, rest) = rest.split_at_mut(self.mid - self.range.start);
2816 let (suffix_b, suffix) = rest.split_at_mut(self.range.end - self.mid);
2817 let mut bytes = [prefix_b, self.inner.as_mut(), suffix_b];
2818 f(prefix, bytes.as_fragmented_byte_slice(), suffix)
2819 }
2820 }
2821
2822 struct ScatterGatherProvider;
2823
2824 impl<B: BufferMut> BufferProvider<B, ScatterGatherBuf<B>> for ScatterGatherProvider {
2825 type Error = Never;
2826
2827 fn alloc_no_reuse(
2828 self,
2829 _prefix: usize,
2830 _body: usize,
2831 _suffix: usize,
2832 ) -> Result<ScatterGatherBuf<B>, Self::Error> {
2833 unimplemented!("not used in tests")
2834 }
2835
2836 fn reuse_or_realloc(
2837 self,
2838 buffer: B,
2839 prefix: usize,
2840 suffix: usize,
2841 ) -> Result<ScatterGatherBuf<B>, (Self::Error, B)> {
2842 let inner = buffer;
2843 let data = vec![0; prefix + suffix];
2844 let range = Range { start: prefix, end: prefix };
2845 let mid = prefix;
2846 Ok(ScatterGatherBuf { inner, data, range, mid })
2847 }
2848 }
2849
2850 #[test]
2851 fn test_scatter_gather_serialize() {
2852 let buf = Buf::new(vec![10, 20, 30, 40, 50], ..);
2855 let pb = DummyPacketBuilder::new(3, 2, 0, usize::MAX);
2856 let ser = buf.encapsulate(pb);
2857 let result = ser.serialize_outer(ScatterGatherProvider {}).unwrap();
2858 let flattened = result.to_flattened_vec();
2859 assert_eq!(&flattened[..], &[0xFF, 0xFF, 0xFF, 10, 20, 30, 40, 50, 0xFE, 0xFE]);
2860 }
2861}