1use std::cmp;
8use std::convert::Infallible as Never;
9use std::fmt::{self, Debug, Formatter};
10use std::ops::{Range, RangeBounds};
11
12use arrayvec::ArrayVec;
13use zerocopy::SplitByteSlice;
14
15use crate::{
16 canonicalize_range, take_back, take_back_mut, take_front, take_front_mut,
17 AsFragmentedByteSlice, Buffer, BufferView, BufferViewMut, ContiguousBuffer, EmptyBuf,
18 FragmentedBuffer, FragmentedBufferMut, FragmentedBytes, FragmentedBytesMut, GrowBuffer,
19 GrowBufferMut, ParsablePacket, ParseBuffer, ParseBufferMut, ReusableBuffer, ShrinkBuffer,
20};
21
22#[derive(Copy, Clone, Debug)]
28pub enum Either<A, B> {
29 A(A),
30 B(B),
31}
32
33impl<A, B> Either<A, B> {
34 pub fn map_a<AA, F: FnOnce(A) -> AA>(self, f: F) -> Either<AA, B> {
40 match self {
41 Either::A(a) => Either::A(f(a)),
42 Either::B(b) => Either::B(b),
43 }
44 }
45
46 pub fn map_b<BB, F: FnOnce(B) -> BB>(self, f: F) -> Either<A, BB> {
52 match self {
53 Either::A(a) => Either::A(a),
54 Either::B(b) => Either::B(f(b)),
55 }
56 }
57
58 pub fn unwrap_a(self) -> A {
64 match self {
65 Either::A(x) => x,
66 Either::B(_) => panic!("This `Either<A, B>` does not hold the `A` variant"),
67 }
68 }
69
70 pub fn unwrap_b(self) -> B {
76 match self {
77 Either::A(_) => panic!("This `Either<A, B>` does not hold the `B` variant"),
78 Either::B(x) => x,
79 }
80 }
81}
82
83impl<A> Either<A, A> {
84 pub fn into_inner(self) -> A {
87 match self {
88 Either::A(x) => x,
89 Either::B(x) => x,
90 }
91 }
92}
93
94impl<A> Either<A, Never> {
95 #[inline]
97 pub fn into_a(self) -> A {
98 match self {
99 Either::A(a) => a,
100 }
101 }
102}
103
104impl<B> Either<Never, B> {
105 #[inline]
107 pub fn into_b(self) -> B {
108 match self {
109 Either::B(b) => b,
110 }
111 }
112}
113
114macro_rules! call_method_on_either {
115 ($val:expr, $method:ident, $($args:expr),*) => {
116 match $val {
117 Either::A(a) => a.$method($($args),*),
118 Either::B(b) => b.$method($($args),*),
119 }
120 };
121 ($val:expr, $method:ident) => {
122 call_method_on_either!($val, $method,)
123 };
124}
125
126impl<A, B> FragmentedBuffer for Either<A, B>
133where
134 A: FragmentedBuffer,
135 B: FragmentedBuffer,
136{
137 fn len(&self) -> usize {
138 call_method_on_either!(self, len)
139 }
140
141 fn with_bytes<R, F>(&self, f: F) -> R
142 where
143 F: for<'a, 'b> FnOnce(FragmentedBytes<'a, 'b>) -> R,
144 {
145 call_method_on_either!(self, with_bytes, f)
146 }
147}
148
149impl<A, B> ContiguousBuffer for Either<A, B>
150where
151 A: ContiguousBuffer,
152 B: ContiguousBuffer,
153{
154}
155
156impl<A, B> ShrinkBuffer for Either<A, B>
157where
158 A: ShrinkBuffer,
159 B: ShrinkBuffer,
160{
161 fn shrink<R: RangeBounds<usize>>(&mut self, range: R) {
162 call_method_on_either!(self, shrink, range)
163 }
164 fn shrink_front(&mut self, n: usize) {
165 call_method_on_either!(self, shrink_front, n)
166 }
167 fn shrink_back(&mut self, n: usize) {
168 call_method_on_either!(self, shrink_back, n)
169 }
170}
171
172impl<A, B> ParseBuffer for Either<A, B>
173where
174 A: ParseBuffer,
175 B: ParseBuffer,
176{
177 fn parse<'a, P: ParsablePacket<&'a [u8], ()>>(&'a mut self) -> Result<P, P::Error> {
178 call_method_on_either!(self, parse)
179 }
180 fn parse_with<'a, ParseArgs, P: ParsablePacket<&'a [u8], ParseArgs>>(
181 &'a mut self,
182 args: ParseArgs,
183 ) -> Result<P, P::Error> {
184 call_method_on_either!(self, parse_with, args)
185 }
186}
187
188impl<A, B> FragmentedBufferMut for Either<A, B>
189where
190 A: FragmentedBufferMut,
191 B: FragmentedBufferMut,
192{
193 fn with_bytes_mut<R, F>(&mut self, f: F) -> R
194 where
195 F: for<'a, 'b> FnOnce(FragmentedBytesMut<'a, 'b>) -> R,
196 {
197 call_method_on_either!(self, with_bytes_mut, f)
198 }
199}
200
201impl<A, B> ParseBufferMut for Either<A, B>
202where
203 A: ParseBufferMut,
204 B: ParseBufferMut,
205{
206 fn parse_mut<'a, P: ParsablePacket<&'a mut [u8], ()>>(&'a mut self) -> Result<P, P::Error> {
207 call_method_on_either!(self, parse_mut)
208 }
209 fn parse_with_mut<'a, ParseArgs, P: ParsablePacket<&'a mut [u8], ParseArgs>>(
210 &'a mut self,
211 args: ParseArgs,
212 ) -> Result<P, P::Error> {
213 call_method_on_either!(self, parse_with_mut, args)
214 }
215}
216
217impl<A, B> GrowBuffer for Either<A, B>
218where
219 A: GrowBuffer,
220 B: GrowBuffer,
221{
222 #[inline]
223 fn with_parts<O, F>(&self, f: F) -> O
224 where
225 F: for<'a, 'b> FnOnce(&'a [u8], FragmentedBytes<'a, 'b>, &'a [u8]) -> O,
226 {
227 call_method_on_either!(self, with_parts, f)
228 }
229 fn capacity(&self) -> usize {
230 call_method_on_either!(self, capacity)
231 }
232 fn prefix_len(&self) -> usize {
233 call_method_on_either!(self, prefix_len)
234 }
235 fn suffix_len(&self) -> usize {
236 call_method_on_either!(self, suffix_len)
237 }
238 fn grow_front(&mut self, n: usize) {
239 call_method_on_either!(self, grow_front, n)
240 }
241 fn grow_back(&mut self, n: usize) {
242 call_method_on_either!(self, grow_back, n)
243 }
244 fn reset(&mut self) {
245 call_method_on_either!(self, reset)
246 }
247}
248
249impl<A, B> GrowBufferMut for Either<A, B>
250where
251 A: GrowBufferMut,
252 B: GrowBufferMut,
253{
254 fn with_parts_mut<O, F>(&mut self, f: F) -> O
255 where
256 F: for<'a, 'b> FnOnce(&'a mut [u8], FragmentedBytesMut<'a, 'b>, &'a mut [u8]) -> O,
257 {
258 call_method_on_either!(self, with_parts_mut, f)
259 }
260
261 fn serialize<BB: PacketBuilder>(&mut self, builder: BB) {
262 call_method_on_either!(self, serialize, builder)
263 }
264}
265
266impl<A, B> Buffer for Either<A, B>
267where
268 A: Buffer,
269 B: Buffer,
270{
271 fn parse_with_view<'a, ParseArgs, P: ParsablePacket<&'a [u8], ParseArgs>>(
272 &'a mut self,
273 args: ParseArgs,
274 ) -> Result<(P, &'a [u8]), P::Error> {
275 call_method_on_either!(self, parse_with_view, args)
276 }
277}
278
279impl<A: AsRef<[u8]>, B: AsRef<[u8]>> AsRef<[u8]> for Either<A, B> {
280 fn as_ref(&self) -> &[u8] {
281 call_method_on_either!(self, as_ref)
282 }
283}
284
285impl<A: AsMut<[u8]>, B: AsMut<[u8]>> AsMut<[u8]> for Either<A, B> {
286 fn as_mut(&mut self) -> &mut [u8] {
287 call_method_on_either!(self, as_mut)
288 }
289}
290
291#[derive(Clone, Debug)]
297pub struct Buf<B> {
298 buf: B,
299 body: Range<usize>,
300}
301
302impl<B: AsRef<[u8]>> PartialEq for Buf<B> {
303 fn eq(&self, other: &Self) -> bool {
304 let self_slice = AsRef::<[u8]>::as_ref(self);
305 let other_slice = AsRef::<[u8]>::as_ref(other);
306 PartialEq::eq(self_slice, other_slice)
307 }
308}
309
310impl<B: AsRef<[u8]>> Eq for Buf<B> {}
311
312impl Buf<Vec<u8>> {
313 pub fn into_inner(self) -> Vec<u8> {
315 let Buf { mut buf, body } = self;
316 let len = body.end - body.start;
317 let _ = buf.drain(..body.start);
318 buf.truncate(len);
319 buf
320 }
321}
322
323impl<B: AsRef<[u8]>> Buf<B> {
324 pub fn new<R: RangeBounds<usize>>(buf: B, body: R) -> Buf<B> {
335 let len = buf.as_ref().len();
336 Buf { buf, body: canonicalize_range(len, &body) }
337 }
338
339 pub fn buffer_view(&mut self) -> BufView<'_> {
341 BufView { buf: &self.buf.as_ref()[self.body.clone()], body: &mut self.body }
342 }
343}
344
345impl<B: AsRef<[u8]> + AsMut<[u8]>> Buf<B> {
346 pub fn buffer_view_mut(&mut self) -> BufViewMut<'_> {
348 BufViewMut { buf: &mut self.buf.as_mut()[self.body.clone()], body: &mut self.body }
349 }
350}
351
352impl<B: AsRef<[u8]>> FragmentedBuffer for Buf<B> {
353 fragmented_buffer_method_impls!();
354}
355impl<B: AsRef<[u8]>> ContiguousBuffer for Buf<B> {}
356impl<B: AsRef<[u8]>> ShrinkBuffer for Buf<B> {
357 fn shrink<R: RangeBounds<usize>>(&mut self, range: R) {
358 let len = self.len();
359 let mut range = canonicalize_range(len, &range);
360 range.start += self.body.start;
361 range.end += self.body.start;
362 self.body = range;
363 }
364
365 fn shrink_front(&mut self, n: usize) {
366 assert!(n <= self.len());
367 self.body.start += n;
368 }
369 fn shrink_back(&mut self, n: usize) {
370 assert!(n <= self.len());
371 self.body.end -= n;
372 }
373}
374impl<B: AsRef<[u8]>> ParseBuffer for Buf<B> {
375 fn parse_with<'a, ParseArgs, P: ParsablePacket<&'a [u8], ParseArgs>>(
376 &'a mut self,
377 args: ParseArgs,
378 ) -> Result<P, P::Error> {
379 P::parse(self.buffer_view(), args)
380 }
381}
382
383impl<B: AsRef<[u8]> + AsMut<[u8]>> FragmentedBufferMut for Buf<B> {
384 fragmented_buffer_mut_method_impls!();
385}
386
387impl<B: AsRef<[u8]> + AsMut<[u8]>> ParseBufferMut for Buf<B> {
388 fn parse_with_mut<'a, ParseArgs, P: ParsablePacket<&'a mut [u8], ParseArgs>>(
389 &'a mut self,
390 args: ParseArgs,
391 ) -> Result<P, P::Error> {
392 P::parse_mut(self.buffer_view_mut(), args)
393 }
394}
395
396impl<B: AsRef<[u8]>> GrowBuffer for Buf<B> {
397 fn with_parts<O, F>(&self, f: F) -> O
398 where
399 F: for<'a, 'b> FnOnce(&'a [u8], FragmentedBytes<'a, 'b>, &'a [u8]) -> O,
400 {
401 let (prefix, buf) = self.buf.as_ref().split_at(self.body.start);
402 let (body, suffix) = buf.split_at(self.body.end - self.body.start);
403 let mut body = [&body[..]];
404 f(prefix, body.as_fragmented_byte_slice(), suffix)
405 }
406 fn capacity(&self) -> usize {
407 self.buf.as_ref().len()
408 }
409 fn prefix_len(&self) -> usize {
410 self.body.start
411 }
412 fn suffix_len(&self) -> usize {
413 self.buf.as_ref().len() - self.body.end
414 }
415 fn grow_front(&mut self, n: usize) {
416 assert!(n <= self.body.start);
417 self.body.start -= n;
418 }
419 fn grow_back(&mut self, n: usize) {
420 assert!(n <= self.buf.as_ref().len() - self.body.end);
421 self.body.end += n;
422 }
423}
424
425impl<B: AsRef<[u8]> + AsMut<[u8]>> GrowBufferMut for Buf<B> {
426 fn with_parts_mut<O, F>(&mut self, f: F) -> O
427 where
428 F: for<'a, 'b> FnOnce(&'a mut [u8], FragmentedBytesMut<'a, 'b>, &'a mut [u8]) -> O,
429 {
430 let (prefix, buf) = self.buf.as_mut().split_at_mut(self.body.start);
431 let (body, suffix) = buf.split_at_mut(self.body.end - self.body.start);
432 let mut body = [&mut body[..]];
433 f(prefix, body.as_fragmented_byte_slice(), suffix)
434 }
435}
436
437impl<B: AsRef<[u8]>> AsRef<[u8]> for Buf<B> {
438 fn as_ref(&self) -> &[u8] {
439 &self.buf.as_ref()[self.body.clone()]
440 }
441}
442
443impl<B: AsMut<[u8]>> AsMut<[u8]> for Buf<B> {
444 fn as_mut(&mut self) -> &mut [u8] {
445 &mut self.buf.as_mut()[self.body.clone()]
446 }
447}
448
449impl<B: AsRef<[u8]>> Buffer for Buf<B> {
450 fn parse_with_view<'a, ParseArgs, P: ParsablePacket<&'a [u8], ParseArgs>>(
451 &'a mut self,
452 args: ParseArgs,
453 ) -> Result<(P, &'a [u8]), P::Error> {
454 let Self { body, ref buf } = self;
455 let body_before = body.clone();
456 let view = BufView { buf: &buf.as_ref()[body.clone()], body };
457 P::parse(view, args).map(|r| (r, &buf.as_ref()[body_before]))
458 }
459}
460
461pub struct BufView<'a> {
466 buf: &'a [u8],
467 body: &'a mut Range<usize>,
468}
469
470impl<'a> BufferView<&'a [u8]> for BufView<'a> {
471 fn take_front(&mut self, n: usize) -> Option<&'a [u8]> {
472 if self.len() < n {
473 return None;
474 }
475 self.body.start += n;
476 Some(take_front(&mut self.buf, n))
477 }
478
479 fn take_back(&mut self, n: usize) -> Option<&'a [u8]> {
480 if self.len() < n {
481 return None;
482 }
483 self.body.end -= n;
484 Some(take_back(&mut self.buf, n))
485 }
486
487 fn into_rest(self) -> &'a [u8] {
488 self.buf
489 }
490}
491
492impl<'a> AsRef<[u8]> for BufView<'a> {
493 fn as_ref(&self) -> &[u8] {
494 self.buf
495 }
496}
497
498pub struct BufViewMut<'a> {
504 buf: &'a mut [u8],
505 body: &'a mut Range<usize>,
506}
507
508impl<'a> BufferView<&'a mut [u8]> for BufViewMut<'a> {
509 fn take_front(&mut self, n: usize) -> Option<&'a mut [u8]> {
510 if self.len() < n {
511 return None;
512 }
513 self.body.start += n;
514 Some(take_front_mut(&mut self.buf, n))
515 }
516
517 fn take_back(&mut self, n: usize) -> Option<&'a mut [u8]> {
518 if self.len() < n {
519 return None;
520 }
521 self.body.end -= n;
522 Some(take_back_mut(&mut self.buf, n))
523 }
524
525 fn into_rest(self) -> &'a mut [u8] {
526 self.buf
527 }
528}
529
530impl<'a> BufferViewMut<&'a mut [u8]> for BufViewMut<'a> {}
531
532impl<'a> AsRef<[u8]> for BufViewMut<'a> {
533 fn as_ref(&self) -> &[u8] {
534 self.buf
535 }
536}
537
538impl<'a> AsMut<[u8]> for BufViewMut<'a> {
539 fn as_mut(&mut self) -> &mut [u8] {
540 self.buf
541 }
542}
543
544#[derive(Copy, Clone, Debug, Eq, PartialEq)]
558pub struct PacketConstraints {
559 header_len: usize,
560 footer_len: usize,
561 min_body_len: usize,
562 max_body_len: usize,
563}
564
565impl PacketConstraints {
566 pub const UNCONSTRAINED: Self =
570 Self { header_len: 0, footer_len: 0, min_body_len: 0, max_body_len: usize::MAX };
571
572 #[inline]
580 pub fn new(
581 header_len: usize,
582 footer_len: usize,
583 min_body_len: usize,
584 max_body_len: usize,
585 ) -> PacketConstraints {
586 PacketConstraints::try_new(header_len, footer_len, min_body_len, max_body_len).expect(
587 "max_body_len < min_body_len or header_len + min_body_len + footer_len overflows usize",
588 )
589 }
590
591 #[inline]
597 pub fn try_new(
598 header_len: usize,
599 footer_len: usize,
600 min_body_len: usize,
601 max_body_len: usize,
602 ) -> Option<PacketConstraints> {
603 let header_min_body_footer_overflows = header_len
605 .checked_add(min_body_len)
606 .and_then(|sum| sum.checked_add(footer_len))
607 .is_none();
608 let max_less_than_min = max_body_len < min_body_len;
610 if max_less_than_min || header_min_body_footer_overflows {
611 return None;
612 }
613 Some(PacketConstraints { header_len, footer_len, min_body_len, max_body_len })
614 }
615
616 #[inline]
620 pub fn with_max_body_len(max_body_len: usize) -> PacketConstraints {
621 PacketConstraints { header_len: 0, footer_len: 0, min_body_len: 0, max_body_len }
626 }
627
628 #[inline]
630 pub fn header_len(&self) -> usize {
631 self.header_len
632 }
633
634 #[inline]
636 pub fn footer_len(&self) -> usize {
637 self.footer_len
638 }
639
640 #[inline]
656 pub fn min_body_len(&self) -> usize {
657 self.min_body_len
658 }
659
660 #[inline]
664 pub fn max_body_len(&self) -> usize {
665 self.max_body_len
666 }
667
668 pub fn try_encapsulate(&self, outer: &Self) -> Option<PacketConstraints> {
678 let inner = self;
679 let header_len = inner.header_len.checked_add(outer.header_len)?;
681 let footer_len = inner.footer_len.checked_add(outer.footer_len)?;
683 let inner_header_footer_len = inner.header_len + inner.footer_len;
686 let min_body_len = cmp::max(
690 outer.min_body_len.saturating_sub(inner_header_footer_len),
691 inner.min_body_len,
692 );
693 let max_body_len =
698 cmp::min(outer.max_body_len.checked_sub(inner_header_footer_len)?, inner.max_body_len);
699 PacketConstraints::try_new(header_len, footer_len, min_body_len, max_body_len)
703 }
704}
705
706pub struct SerializeTarget<'a> {
709 #[allow(missing_docs)]
710 pub header: &'a mut [u8],
711 #[allow(missing_docs)]
712 pub footer: &'a mut [u8],
713}
714
715pub trait PacketBuilder {
732 fn constraints(&self) -> PacketConstraints;
734
735 fn serialize(&self, target: &mut SerializeTarget<'_>, body: FragmentedBytesMut<'_, '_>);
764}
765
766impl<'a, B: PacketBuilder> PacketBuilder for &'a B {
767 #[inline]
768 fn constraints(&self) -> PacketConstraints {
769 B::constraints(self)
770 }
771 #[inline]
772 fn serialize(&self, target: &mut SerializeTarget<'_>, body: FragmentedBytesMut<'_, '_>) {
773 B::serialize(self, target, body)
774 }
775}
776
777impl<'a, B: PacketBuilder> PacketBuilder for &'a mut B {
778 #[inline]
779 fn constraints(&self) -> PacketConstraints {
780 B::constraints(self)
781 }
782 #[inline]
783 fn serialize(&self, target: &mut SerializeTarget<'_>, body: FragmentedBytesMut<'_, '_>) {
784 B::serialize(self, target, body)
785 }
786}
787
788impl PacketBuilder for () {
789 #[inline]
790 fn constraints(&self) -> PacketConstraints {
791 PacketConstraints::UNCONSTRAINED
792 }
793 #[inline]
794 fn serialize(&self, _target: &mut SerializeTarget<'_>, _body: FragmentedBytesMut<'_, '_>) {}
795}
796
797impl PacketBuilder for Never {
798 fn constraints(&self) -> PacketConstraints {
799 match *self {}
800 }
801 fn serialize(&self, _target: &mut SerializeTarget<'_>, _body: FragmentedBytesMut<'_, '_>) {}
802}
803
804#[derive(Copy, Clone, Debug, Eq, PartialEq)]
813pub struct Nested<I, O> {
814 inner: I,
815 outer: O,
816}
817
818impl<I, O> Nested<I, O> {
819 #[inline]
822 pub fn into_inner(self) -> I {
823 self.inner
824 }
825
826 #[inline]
829 pub fn into_outer(self) -> O {
830 self.outer
831 }
832
833 #[inline]
834 pub fn inner(&self) -> &I {
835 &self.inner
836 }
837
838 #[inline]
839 pub fn inner_mut(&mut self) -> &mut I {
840 &mut self.inner
841 }
842
843 #[inline]
844 pub fn outer(&self) -> &O {
845 &self.outer
846 }
847
848 #[inline]
849 pub fn outer_mut(&mut self) -> &mut O {
850 &mut self.outer
851 }
852}
853
854#[derive(Copy, Clone, Debug)]
860#[cfg_attr(test, derive(Eq, PartialEq))]
861pub struct LimitedSizePacketBuilder {
862 pub limit: usize,
864}
865
866impl PacketBuilder for LimitedSizePacketBuilder {
867 fn constraints(&self) -> PacketConstraints {
868 PacketConstraints::with_max_body_len(self.limit)
869 }
870
871 fn serialize(&self, _target: &mut SerializeTarget<'_>, _body: FragmentedBytesMut<'_, '_>) {}
872}
873
874pub trait InnerPacketBuilder {
888 fn bytes_len(&self) -> usize;
890
891 fn serialize(&self, buffer: &mut [u8]);
906
907 #[inline]
914 fn into_serializer(self) -> InnerSerializer<Self, EmptyBuf>
915 where
916 Self: Sized,
917 {
918 self.into_serializer_with(EmptyBuf)
919 }
920
921 fn into_serializer_with<B: ShrinkBuffer>(self, mut buffer: B) -> InnerSerializer<Self, B>
934 where
935 Self: Sized,
936 {
937 buffer.shrink_back_to(0);
938 InnerSerializer { inner: self, buffer }
939 }
940}
941
942impl<'a, I: InnerPacketBuilder> InnerPacketBuilder for &'a I {
943 #[inline]
944 fn bytes_len(&self) -> usize {
945 I::bytes_len(self)
946 }
947 #[inline]
948 fn serialize(&self, buffer: &mut [u8]) {
949 I::serialize(self, buffer)
950 }
951}
952impl<'a, I: InnerPacketBuilder> InnerPacketBuilder for &'a mut I {
953 #[inline]
954 fn bytes_len(&self) -> usize {
955 I::bytes_len(self)
956 }
957 #[inline]
958 fn serialize(&self, buffer: &mut [u8]) {
959 I::serialize(self, buffer)
960 }
961}
962impl<'a> InnerPacketBuilder for &'a [u8] {
963 #[inline]
964 fn bytes_len(&self) -> usize {
965 self.len()
966 }
967 #[inline]
968 fn serialize(&self, buffer: &mut [u8]) {
969 buffer.copy_from_slice(self);
970 }
971}
972impl<'a> InnerPacketBuilder for &'a mut [u8] {
973 #[inline]
974 fn bytes_len(&self) -> usize {
975 self.len()
976 }
977 #[inline]
978 fn serialize(&self, buffer: &mut [u8]) {
979 buffer.copy_from_slice(self);
980 }
981}
982impl<'a> InnerPacketBuilder for Vec<u8> {
983 #[inline]
984 fn bytes_len(&self) -> usize {
985 self.len()
986 }
987 #[inline]
988 fn serialize(&self, buffer: &mut [u8]) {
989 buffer.copy_from_slice(self.as_slice());
990 }
991}
992impl<const N: usize> InnerPacketBuilder for ArrayVec<u8, N> {
993 fn bytes_len(&self) -> usize {
994 self.as_slice().bytes_len()
995 }
996 fn serialize(&self, buffer: &mut [u8]) {
997 self.as_slice().serialize(buffer);
998 }
999}
1000
1001pub struct ByteSliceInnerPacketBuilder<B>(pub B);
1008
1009impl<B: SplitByteSlice> InnerPacketBuilder for ByteSliceInnerPacketBuilder<B> {
1010 fn bytes_len(&self) -> usize {
1011 self.0.deref().bytes_len()
1012 }
1013 fn serialize(&self, buffer: &mut [u8]) {
1014 self.0.deref().serialize(buffer)
1015 }
1016}
1017
1018impl<B: SplitByteSlice> Debug for ByteSliceInnerPacketBuilder<B> {
1019 fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
1020 write!(f, "ByteSliceInnerPacketBuilder({:?})", self.0.as_ref())
1021 }
1022}
1023
1024#[derive(Copy, Clone, Debug, Eq, PartialEq)]
1031pub enum SerializeError<A> {
1032 Alloc(A),
1034 SizeLimitExceeded,
1036}
1037
1038impl<A> SerializeError<A> {
1039 #[inline]
1041 pub fn is_alloc(&self) -> bool {
1042 match self {
1043 SerializeError::Alloc(_) => true,
1044 SerializeError::SizeLimitExceeded => false,
1045 }
1046 }
1047
1048 #[inline]
1050 pub fn is_size_limit_exceeded(&self) -> bool {
1051 match self {
1052 SerializeError::Alloc(_) => false,
1053 SerializeError::SizeLimitExceeded => true,
1054 }
1055 }
1056
1057 pub fn map_alloc<T, F: FnOnce(A) -> T>(self, f: F) -> SerializeError<T> {
1059 match self {
1060 SerializeError::Alloc(a) => SerializeError::Alloc(f(a)),
1061 SerializeError::SizeLimitExceeded => SerializeError::SizeLimitExceeded,
1062 }
1063 }
1064}
1065
1066impl<A> From<A> for SerializeError<A> {
1067 fn from(a: A) -> SerializeError<A> {
1068 SerializeError::Alloc(a)
1069 }
1070}
1071
1072#[derive(Copy, Clone, Debug, Eq, PartialEq)]
1081pub struct BufferTooShortError;
1082
1083pub trait BufferProvider<Input, Output> {
1100 type Error;
1104
1105 fn alloc_no_reuse(
1115 self,
1116 prefix: usize,
1117 body: usize,
1118 suffix: usize,
1119 ) -> Result<Output, Self::Error>;
1120
1121 fn reuse_or_realloc(
1134 self,
1135 buffer: Input,
1136 prefix: usize,
1137 suffix: usize,
1138 ) -> Result<Output, (Self::Error, Input)>;
1139}
1140
1141pub trait BufferAlloc<Output> {
1162 type Error;
1166
1167 fn alloc(self, len: usize) -> Result<Output, Self::Error>;
1169}
1170
1171impl<O, E, F: FnOnce(usize) -> Result<O, E>> BufferAlloc<O> for F {
1172 type Error = E;
1173
1174 #[inline]
1175 fn alloc(self, len: usize) -> Result<O, E> {
1176 self(len)
1177 }
1178}
1179
1180impl BufferAlloc<Never> for () {
1181 type Error = ();
1182
1183 #[inline]
1184 fn alloc(self, _len: usize) -> Result<Never, ()> {
1185 Err(())
1186 }
1187}
1188
1189pub fn new_buf_vec(len: usize) -> Result<Buf<Vec<u8>>, Never> {
1200 Ok(Buf::new(vec![0; len], ..))
1201}
1202
1203#[inline]
1225pub fn try_reuse_buffer<B: GrowBufferMut + ShrinkBuffer>(
1226 mut buffer: B,
1227 prefix: usize,
1228 suffix: usize,
1229 max_copy_bytes: usize,
1230) -> Result<B, B> {
1231 let need_prefix = prefix;
1232 let need_suffix = suffix;
1233 let have_prefix = buffer.prefix_len();
1234 let have_body = buffer.len();
1235 let have_suffix = buffer.suffix_len();
1236 let need_capacity = need_prefix + have_body + need_suffix;
1237
1238 if have_prefix >= need_prefix && have_suffix >= need_suffix {
1239 Ok(buffer)
1241 } else if buffer.capacity() >= need_capacity && have_body <= max_copy_bytes {
1242 buffer.reset();
1246
1247 buffer.copy_within(have_prefix..(have_prefix + have_body), need_prefix);
1253 buffer.shrink(need_prefix..(need_prefix + have_body));
1254 debug_assert_eq!(buffer.prefix_len(), need_prefix);
1255 debug_assert!(buffer.suffix_len() >= need_suffix);
1256 debug_assert_eq!(buffer.len(), have_body);
1257 Ok(buffer)
1258 } else {
1259 Err(buffer)
1260 }
1261}
1262
1263pub struct MaybeReuseBufferProvider<A>(pub A);
1267
1268impl<I: ReusableBuffer, O: ReusableBuffer, A: BufferAlloc<O>> BufferProvider<I, Either<I, O>>
1269 for MaybeReuseBufferProvider<A>
1270{
1271 type Error = A::Error;
1272
1273 fn alloc_no_reuse(
1274 self,
1275 prefix: usize,
1276 body: usize,
1277 suffix: usize,
1278 ) -> Result<Either<I, O>, Self::Error> {
1279 let Self(alloc) = self;
1280 let need_capacity = prefix + body + suffix;
1281 BufferAlloc::alloc(alloc, need_capacity).map(|mut buf| {
1282 buf.shrink(prefix..(prefix + body));
1283 Either::B(buf)
1284 })
1285 }
1286
1287 #[inline]
1296 fn reuse_or_realloc(
1297 self,
1298 buffer: I,
1299 need_prefix: usize,
1300 need_suffix: usize,
1301 ) -> Result<Either<I, O>, (A::Error, I)> {
1302 match try_reuse_buffer(buffer, need_prefix, need_suffix, usize::MAX) {
1307 Ok(buffer) => Ok(Either::A(buffer)),
1308 Err(buffer) => {
1309 let have_body = buffer.len();
1310 let mut buf = match BufferProvider::<I, Either<I, O>>::alloc_no_reuse(
1311 self,
1312 need_prefix,
1313 have_body,
1314 need_suffix,
1315 ) {
1316 Ok(buf) => buf,
1317 Err(err) => return Err((err, buffer)),
1318 };
1319
1320 buf.copy_from(&buffer);
1321 debug_assert_eq!(buf.prefix_len(), need_prefix);
1322 debug_assert!(buf.suffix_len() >= need_suffix);
1323 debug_assert_eq!(buf.len(), have_body);
1324 Ok(buf)
1325 }
1326 }
1327 }
1328}
1329
1330impl<B: ReusableBuffer, A: BufferAlloc<B>> BufferProvider<B, B> for MaybeReuseBufferProvider<A> {
1331 type Error = A::Error;
1332
1333 fn alloc_no_reuse(self, prefix: usize, body: usize, suffix: usize) -> Result<B, Self::Error> {
1334 BufferProvider::<B, Either<B, B>>::alloc_no_reuse(self, prefix, body, suffix)
1335 .map(Either::into_inner)
1336 }
1337
1338 #[inline]
1347 fn reuse_or_realloc(self, buffer: B, prefix: usize, suffix: usize) -> Result<B, (A::Error, B)> {
1348 BufferProvider::<B, Either<B, B>>::reuse_or_realloc(self, buffer, prefix, suffix)
1349 .map(Either::into_inner)
1350 }
1351}
1352
1353pub struct NoReuseBufferProvider<A>(pub A);
1357
1358impl<I: FragmentedBuffer, O: ReusableBuffer, A: BufferAlloc<O>> BufferProvider<I, O>
1359 for NoReuseBufferProvider<A>
1360{
1361 type Error = A::Error;
1362
1363 fn alloc_no_reuse(self, prefix: usize, body: usize, suffix: usize) -> Result<O, A::Error> {
1364 let Self(alloc) = self;
1365 alloc.alloc(prefix + body + suffix).map(|mut b| {
1366 b.shrink(prefix..prefix + body);
1367 b
1368 })
1369 }
1370
1371 fn reuse_or_realloc(self, buffer: I, prefix: usize, suffix: usize) -> Result<O, (A::Error, I)> {
1372 BufferProvider::<I, O>::alloc_no_reuse(self, prefix, buffer.len(), suffix)
1373 .map(|mut b| {
1374 b.copy_from(&buffer);
1375 b
1376 })
1377 .map_err(|e| (e, buffer))
1378 }
1379}
1380
1381pub trait Serializer: Sized {
1382 type Buffer;
1384
1385 fn serialize<B: GrowBufferMut, P: BufferProvider<Self::Buffer, B>>(
1402 self,
1403 outer: PacketConstraints,
1404 provider: P,
1405 ) -> Result<B, (SerializeError<P::Error>, Self)>;
1406
1407 fn serialize_new_buf<B: ReusableBuffer, A: BufferAlloc<B>>(
1414 &self,
1415 outer: PacketConstraints,
1416 alloc: A,
1417 ) -> Result<B, SerializeError<A::Error>>;
1418
1419 #[inline]
1435 #[allow(clippy::type_complexity)]
1436 fn serialize_vec(
1437 self,
1438 outer: PacketConstraints,
1439 ) -> Result<Either<Self::Buffer, Buf<Vec<u8>>>, (SerializeError<Never>, Self)>
1440 where
1441 Self::Buffer: ReusableBuffer,
1442 {
1443 self.serialize(outer, MaybeReuseBufferProvider(new_buf_vec))
1444 }
1445
1446 #[inline]
1460 fn serialize_no_alloc(
1461 self,
1462 outer: PacketConstraints,
1463 ) -> Result<Self::Buffer, (SerializeError<BufferTooShortError>, Self)>
1464 where
1465 Self::Buffer: ReusableBuffer,
1466 {
1467 self.serialize(outer, MaybeReuseBufferProvider(())).map(Either::into_a).map_err(
1468 |(err, slf)| {
1469 (
1470 match err {
1471 SerializeError::Alloc(()) => BufferTooShortError.into(),
1472 SerializeError::SizeLimitExceeded => SerializeError::SizeLimitExceeded,
1473 },
1474 slf,
1475 )
1476 },
1477 )
1478 }
1479
1480 #[inline]
1489 fn serialize_outer<B: GrowBufferMut, P: BufferProvider<Self::Buffer, B>>(
1490 self,
1491 provider: P,
1492 ) -> Result<B, (SerializeError<P::Error>, Self)> {
1493 self.serialize(PacketConstraints::UNCONSTRAINED, provider)
1494 }
1495
1496 #[inline]
1507 #[allow(clippy::type_complexity)]
1508 fn serialize_vec_outer(
1509 self,
1510 ) -> Result<Either<Self::Buffer, Buf<Vec<u8>>>, (SerializeError<Never>, Self)>
1511 where
1512 Self::Buffer: ReusableBuffer,
1513 {
1514 self.serialize_vec(PacketConstraints::UNCONSTRAINED)
1515 }
1516
1517 #[inline]
1527 fn serialize_no_alloc_outer(
1528 self,
1529 ) -> Result<Self::Buffer, (SerializeError<BufferTooShortError>, Self)>
1530 where
1531 Self::Buffer: ReusableBuffer,
1532 {
1533 self.serialize_no_alloc(PacketConstraints::UNCONSTRAINED)
1534 }
1535
1536 #[inline]
1543 fn encapsulate<B>(self, outer: B) -> Nested<Self, B> {
1544 Nested { inner: self, outer }
1545 }
1546
1547 #[inline]
1556 fn with_size_limit(self, limit: usize) -> Nested<Self, LimitedSizePacketBuilder> {
1557 self.encapsulate(LimitedSizePacketBuilder { limit })
1558 }
1559}
1560
1561#[derive(Copy, Clone, Debug, Eq, PartialEq)]
1568pub struct InnerSerializer<I, B> {
1569 inner: I,
1570 buffer: B,
1575}
1576
1577impl<I, B> InnerSerializer<I, B> {
1578 pub fn inner(&self) -> &I {
1579 &self.inner
1580 }
1581}
1582
1583struct InnerPacketBuilderWrapper<I>(I);
1589
1590impl<I: InnerPacketBuilder> PacketBuilder for InnerPacketBuilderWrapper<I> {
1591 fn constraints(&self) -> PacketConstraints {
1592 let Self(wrapped) = self;
1593 PacketConstraints::new(wrapped.bytes_len(), 0, 0, usize::MAX)
1594 }
1595
1596 fn serialize(&self, target: &mut SerializeTarget<'_>, _body: FragmentedBytesMut<'_, '_>) {
1597 let Self(wrapped) = self;
1598
1599 debug_assert_eq!(target.header.len(), wrapped.bytes_len());
1603 debug_assert_eq!(target.footer.len(), 0);
1604
1605 InnerPacketBuilder::serialize(wrapped, target.header);
1606 }
1607}
1608
1609impl<I: InnerPacketBuilder, B: GrowBuffer + ShrinkBuffer> Serializer for InnerSerializer<I, B> {
1610 type Buffer = B;
1611
1612 #[inline]
1613 fn serialize<BB: GrowBufferMut, P: BufferProvider<B, BB>>(
1614 self,
1615 outer: PacketConstraints,
1616 provider: P,
1617 ) -> Result<BB, (SerializeError<P::Error>, InnerSerializer<I, B>)> {
1618 let pb = InnerPacketBuilderWrapper(self.inner);
1619 debug_assert_eq!(self.buffer.len(), 0);
1620 self.buffer.encapsulate(pb).serialize(outer, provider).map_err(
1621 |(err, Nested { inner: buffer, outer: pb })| {
1622 (err, InnerSerializer { inner: pb.0, buffer })
1623 },
1624 )
1625 }
1626
1627 #[inline]
1628 fn serialize_new_buf<BB: ReusableBuffer, A: BufferAlloc<BB>>(
1629 &self,
1630 outer: PacketConstraints,
1631 alloc: A,
1632 ) -> Result<BB, SerializeError<A::Error>> {
1633 let pb = InnerPacketBuilderWrapper(&self.inner);
1634 EmptyBuf.encapsulate(pb).serialize_new_buf(outer, alloc)
1635 }
1636}
1637
1638impl<B: GrowBuffer + ShrinkBuffer> Serializer for B {
1639 type Buffer = B;
1640
1641 #[inline]
1642 fn serialize<BB: GrowBufferMut, P: BufferProvider<Self::Buffer, BB>>(
1643 self,
1644 outer: PacketConstraints,
1645 provider: P,
1646 ) -> Result<BB, (SerializeError<P::Error>, Self)> {
1647 TruncatingSerializer::new(self, TruncateDirection::NoTruncating)
1648 .serialize(outer, provider)
1649 .map_err(|(err, ser)| (err, ser.buffer))
1650 }
1651
1652 fn serialize_new_buf<BB: ReusableBuffer, A: BufferAlloc<BB>>(
1653 &self,
1654 outer: PacketConstraints,
1655 alloc: A,
1656 ) -> Result<BB, SerializeError<A::Error>> {
1657 if self.len() > outer.max_body_len() {
1658 return Err(SerializeError::SizeLimitExceeded);
1659 }
1660
1661 let padding = outer.min_body_len().saturating_sub(self.len());
1662 let tail_size = padding + outer.footer_len();
1663 let buffer_size = outer.header_len() + self.len() + tail_size;
1664 let mut buffer = alloc.alloc(buffer_size)?;
1665 buffer.shrink_front(outer.header_len());
1666 buffer.shrink_back(tail_size);
1667 buffer.copy_from(self);
1668 buffer.grow_back(padding);
1669 Ok(buffer)
1670 }
1671}
1672
1673pub enum EitherSerializer<A, B> {
1677 A(A),
1678 B(B),
1679}
1680
1681impl<A: Serializer, B: Serializer<Buffer = A::Buffer>> Serializer for EitherSerializer<A, B> {
1682 type Buffer = A::Buffer;
1683
1684 fn serialize<TB: GrowBufferMut, P: BufferProvider<Self::Buffer, TB>>(
1685 self,
1686 outer: PacketConstraints,
1687 provider: P,
1688 ) -> Result<TB, (SerializeError<P::Error>, Self)> {
1689 match self {
1690 EitherSerializer::A(s) => {
1691 s.serialize(outer, provider).map_err(|(err, s)| (err, EitherSerializer::A(s)))
1692 }
1693 EitherSerializer::B(s) => {
1694 s.serialize(outer, provider).map_err(|(err, s)| (err, EitherSerializer::B(s)))
1695 }
1696 }
1697 }
1698
1699 fn serialize_new_buf<TB: ReusableBuffer, BA: BufferAlloc<TB>>(
1700 &self,
1701 outer: PacketConstraints,
1702 alloc: BA,
1703 ) -> Result<TB, SerializeError<BA::Error>> {
1704 match self {
1705 EitherSerializer::A(s) => s.serialize_new_buf(outer, alloc),
1706 EitherSerializer::B(s) => s.serialize_new_buf(outer, alloc),
1707 }
1708 }
1709}
1710
1711#[derive(Copy, Clone, Debug, Eq, PartialEq)]
1714pub enum TruncateDirection {
1715 DiscardFront,
1718 DiscardBack,
1721 NoTruncating,
1723}
1724
1725#[derive(Copy, Clone, Debug, Eq, PartialEq)]
1737pub struct TruncatingSerializer<B> {
1738 buffer: B,
1739 direction: TruncateDirection,
1740}
1741
1742impl<B> TruncatingSerializer<B> {
1743 pub fn new(buffer: B, direction: TruncateDirection) -> TruncatingSerializer<B> {
1745 TruncatingSerializer { buffer, direction }
1746 }
1747
1748 pub fn buffer(&self) -> &B {
1750 &self.buffer
1751 }
1752
1753 pub fn buffer_mut(&mut self) -> &mut B {
1755 &mut self.buffer
1756 }
1757}
1758
1759impl<B: GrowBuffer + ShrinkBuffer> Serializer for TruncatingSerializer<B> {
1760 type Buffer = B;
1761
1762 fn serialize<BB: GrowBufferMut, P: BufferProvider<B, BB>>(
1763 mut self,
1764 outer: PacketConstraints,
1765 provider: P,
1766 ) -> Result<BB, (SerializeError<P::Error>, Self)> {
1767 let original_len = self.buffer.len();
1768 let excess_bytes = if original_len > outer.max_body_len {
1769 Some(original_len - outer.max_body_len)
1770 } else {
1771 None
1772 };
1773 if let Some(excess_bytes) = excess_bytes {
1774 match self.direction {
1775 TruncateDirection::DiscardFront => self.buffer.shrink_front(excess_bytes),
1776 TruncateDirection::DiscardBack => self.buffer.shrink_back(excess_bytes),
1777 TruncateDirection::NoTruncating => {
1778 return Err((SerializeError::SizeLimitExceeded, self))
1779 }
1780 }
1781 }
1782
1783 let padding = outer.min_body_len().saturating_sub(self.buffer.len());
1784
1785 debug_assert!(self.buffer.len() + padding <= outer.max_body_len());
1789 match provider.reuse_or_realloc(
1790 self.buffer,
1791 outer.header_len(),
1792 padding + outer.footer_len(),
1793 ) {
1794 Ok(buffer) => Ok(buffer),
1795 Err((err, mut buffer)) => {
1796 if let Some(excess_bytes) = excess_bytes {
1800 match self.direction {
1801 TruncateDirection::DiscardFront => buffer.grow_front(excess_bytes),
1802 TruncateDirection::DiscardBack => buffer.grow_back(excess_bytes),
1803 TruncateDirection::NoTruncating => unreachable!(),
1804 }
1805 }
1806
1807 Err((
1808 SerializeError::Alloc(err),
1809 TruncatingSerializer { buffer, direction: self.direction },
1810 ))
1811 }
1812 }
1813 }
1814
1815 fn serialize_new_buf<BB: ReusableBuffer, A: BufferAlloc<BB>>(
1816 &self,
1817 outer: PacketConstraints,
1818 alloc: A,
1819 ) -> Result<BB, SerializeError<A::Error>> {
1820 let truncated_size = cmp::min(self.buffer.len(), outer.max_body_len());
1821 let discarded_bytes = self.buffer.len() - truncated_size;
1822 let padding = outer.min_body_len().saturating_sub(truncated_size);
1823 let tail_size = padding + outer.footer_len();
1824 let buffer_size = outer.header_len() + truncated_size + tail_size;
1825 let mut buffer = alloc.alloc(buffer_size)?;
1826 buffer.shrink_front(outer.header_len());
1827 buffer.shrink_back(tail_size);
1828 buffer.with_bytes_mut(|mut dst| {
1829 self.buffer.with_bytes(|src| {
1830 let src = match (discarded_bytes > 0, self.direction) {
1831 (false, _) => src,
1832 (true, TruncateDirection::DiscardFront) => src.slice(discarded_bytes..),
1833 (true, TruncateDirection::DiscardBack) => src.slice(..truncated_size),
1834 (true, TruncateDirection::NoTruncating) => {
1835 return Err(SerializeError::SizeLimitExceeded)
1836 }
1837 };
1838 dst.copy_from(&src);
1839 Ok(())
1840 })
1841 })?;
1842 buffer.grow_back_zero(padding);
1843 Ok(buffer)
1844 }
1845}
1846
1847impl<I: Serializer, O: PacketBuilder> Serializer for Nested<I, O> {
1848 type Buffer = I::Buffer;
1849
1850 #[inline]
1851 fn serialize<B: GrowBufferMut, P: BufferProvider<I::Buffer, B>>(
1852 self,
1853 outer: PacketConstraints,
1854 provider: P,
1855 ) -> Result<B, (SerializeError<P::Error>, Self)> {
1856 let Some(outer) = self.outer.constraints().try_encapsulate(&outer) else {
1857 return Err((SerializeError::SizeLimitExceeded, self));
1858 };
1859
1860 match self.inner.serialize(outer, provider) {
1861 Ok(mut buf) => {
1862 buf.serialize(&self.outer);
1863 Ok(buf)
1864 }
1865 Err((err, inner)) => Err((err, inner.encapsulate(self.outer))),
1866 }
1867 }
1868
1869 #[inline]
1870 fn serialize_new_buf<B: ReusableBuffer, A: BufferAlloc<B>>(
1871 &self,
1872 outer: PacketConstraints,
1873 alloc: A,
1874 ) -> Result<B, SerializeError<A::Error>> {
1875 let Some(outer) = self.outer.constraints().try_encapsulate(&outer) else {
1876 return Err(SerializeError::SizeLimitExceeded);
1877 };
1878
1879 let mut buf = self.inner.serialize_new_buf(outer, alloc)?;
1880 GrowBufferMut::serialize(&mut buf, &self.outer);
1881 Ok(buf)
1882 }
1883}
1884
1885#[cfg(test)]
1886mod tests {
1887 use super::*;
1888 use crate::BufferMut;
1889 use std::fmt::Debug;
1890 use test_case::test_case;
1891 use test_util::{assert_geq, assert_leq};
1892
1893 #[derive(Copy, Clone, Debug, Eq, PartialEq)]
1899 struct DummyPacketBuilder {
1900 header_len: usize,
1901 footer_len: usize,
1902 min_body_len: usize,
1903 max_body_len: usize,
1904 }
1905
1906 impl DummyPacketBuilder {
1907 fn new(
1908 header_len: usize,
1909 footer_len: usize,
1910 min_body_len: usize,
1911 max_body_len: usize,
1912 ) -> DummyPacketBuilder {
1913 DummyPacketBuilder { header_len, footer_len, min_body_len, max_body_len }
1914 }
1915 }
1916
1917 fn fill(bytes: &mut [u8], byte: u8) {
1918 for b in bytes {
1919 *b = byte;
1920 }
1921 }
1922
1923 impl PacketBuilder for DummyPacketBuilder {
1924 fn constraints(&self) -> PacketConstraints {
1925 PacketConstraints::new(
1926 self.header_len,
1927 self.footer_len,
1928 self.min_body_len,
1929 self.max_body_len,
1930 )
1931 }
1932
1933 fn serialize(&self, target: &mut SerializeTarget<'_>, body: FragmentedBytesMut<'_, '_>) {
1934 assert_eq!(target.header.len(), self.header_len);
1935 assert_eq!(target.footer.len(), self.footer_len);
1936 assert!(body.len() >= self.min_body_len);
1937 assert!(body.len() <= self.max_body_len);
1938 fill(target.header, 0xFF);
1939 fill(target.footer, 0xFE);
1940 }
1941 }
1942
1943 impl InnerPacketBuilder for DummyPacketBuilder {
1944 fn bytes_len(&self) -> usize {
1945 self.header_len
1946 }
1947
1948 fn serialize(&self, buffer: &mut [u8]) {
1949 assert_eq!(buffer.len(), self.header_len);
1950 fill(buffer, 0xFF);
1951 }
1952 }
1953
1954 #[derive(Copy, Clone, Debug, Eq, PartialEq)]
1956 struct SerializerVerifier {
1957 inner_len: Option<usize>,
1960
1961 truncating: bool,
1964 }
1965
1966 impl SerializerVerifier {
1967 fn new<S: Serializer>(serializer: &S, truncating: bool) -> Self {
1968 let inner_len = serializer
1969 .serialize_new_buf(PacketConstraints::UNCONSTRAINED, new_buf_vec)
1970 .map(|buf| buf.len())
1971 .inspect_err(|err| assert!(err.is_size_limit_exceeded()))
1972 .ok();
1973 Self { inner_len, truncating }
1974 }
1975
1976 fn verify_result<B: GrowBufferMut, A>(
1977 &self,
1978 result: Result<&B, &SerializeError<A>>,
1979 outer: PacketConstraints,
1980 ) {
1981 let should_exceed_size_limit = match self.inner_len {
1982 Some(inner_len) => outer.max_body_len() < inner_len && !self.truncating,
1983 None => true,
1984 };
1985
1986 match result {
1987 Ok(buf) => {
1988 assert_geq!(buf.prefix_len(), outer.header_len());
1989 assert_geq!(buf.suffix_len(), outer.footer_len());
1990 assert_leq!(buf.len(), outer.max_body_len());
1991
1992 let padding = outer.min_body_len().saturating_sub(buf.len());
1997 assert_leq!(padding + outer.footer_len(), buf.suffix_len());
1998
1999 assert!(!should_exceed_size_limit);
2000 }
2001 Err(err) => {
2002 if should_exceed_size_limit {
2005 assert!(err.is_size_limit_exceeded());
2006 } else {
2007 assert!(err.is_alloc());
2008 }
2009 }
2010 }
2011 }
2012 }
2013
2014 #[derive(Copy, Clone, Debug, Eq, PartialEq)]
2023 struct VerifyingSerializer<S> {
2024 ser: S,
2025 verifier: SerializerVerifier,
2026 }
2027
2028 impl<S: Serializer + Debug + Clone + Eq> Serializer for VerifyingSerializer<S>
2029 where
2030 S::Buffer: ReusableBuffer,
2031 {
2032 type Buffer = S::Buffer;
2033
2034 fn serialize<B: GrowBufferMut, P: BufferProvider<Self::Buffer, B>>(
2035 self,
2036 outer: PacketConstraints,
2037 provider: P,
2038 ) -> Result<B, (SerializeError<P::Error>, Self)> {
2039 let Self { ser, verifier } = self;
2040 let orig = ser.clone();
2041
2042 let result = ser.serialize(outer, provider).map_err(|(err, ser)| {
2043 assert_eq!(ser, orig);
2046 (err, Self { ser, verifier })
2047 });
2048
2049 verifier.verify_result(result.as_ref().map_err(|(err, _ser)| err), outer);
2050
2051 result
2052 }
2053
2054 fn serialize_new_buf<B: ReusableBuffer, A: BufferAlloc<B>>(
2055 &self,
2056 outer: PacketConstraints,
2057 alloc: A,
2058 ) -> Result<B, SerializeError<A::Error>> {
2059 let res = self.ser.serialize_new_buf(outer, alloc);
2060 self.verifier.verify_result(res.as_ref(), outer);
2061 res
2062 }
2063 }
2064
2065 trait SerializerExt: Serializer {
2066 fn into_verifying(self, truncating: bool) -> VerifyingSerializer<Self>
2067 where
2068 Self::Buffer: ReusableBuffer,
2069 {
2070 let verifier = SerializerVerifier::new(&self, truncating);
2071 VerifyingSerializer { ser: self, verifier }
2072 }
2073
2074 fn encapsulate_verifying<B: PacketBuilder>(
2075 self,
2076 outer: B,
2077 truncating: bool,
2078 ) -> VerifyingSerializer<Nested<Self, B>>
2079 where
2080 Self::Buffer: ReusableBuffer,
2081 {
2082 self.encapsulate(outer).into_verifying(truncating)
2083 }
2084
2085 fn with_size_limit_verifying(
2086 self,
2087 limit: usize,
2088 truncating: bool,
2089 ) -> VerifyingSerializer<Nested<Self, LimitedSizePacketBuilder>>
2090 where
2091 Self::Buffer: ReusableBuffer,
2092 {
2093 self.with_size_limit(limit).into_verifying(truncating)
2094 }
2095 }
2096
2097 impl<S: Serializer> SerializerExt for S {}
2098
2099 #[test]
2100 fn test_either_into_inner() {
2101 fn ret_either(a: u32, b: u32, c: bool) -> Either<u32, u32> {
2102 if c {
2103 Either::A(a)
2104 } else {
2105 Either::B(b)
2106 }
2107 }
2108
2109 assert_eq!(ret_either(1, 2, true).into_inner(), 1);
2110 assert_eq!(ret_either(1, 2, false).into_inner(), 2);
2111 }
2112
2113 #[test]
2114 fn test_either_unwrap_success() {
2115 assert_eq!(Either::<u16, u32>::A(5).unwrap_a(), 5);
2116 assert_eq!(Either::<u16, u32>::B(10).unwrap_b(), 10);
2117 }
2118
2119 #[test]
2120 #[should_panic]
2121 fn test_either_unwrap_a_panic() {
2122 let _: u16 = Either::<u16, u32>::B(10).unwrap_a();
2123 }
2124
2125 #[test]
2126 #[should_panic]
2127 fn test_either_unwrap_b_panic() {
2128 let _: u32 = Either::<u16, u32>::A(5).unwrap_b();
2129 }
2130
2131 #[test_case(Buf::new((0..100).collect(), ..); "entire buf")]
2132 #[test_case(Buf::new((0..100).collect(), 0..0); "empty range")]
2133 #[test_case(Buf::new((0..100).collect(), ..50); "prefix")]
2134 #[test_case(Buf::new((0..100).collect(), 50..); "suffix")]
2135 #[test_case(Buf::new((0..100).collect(), 25..75); "middle")]
2136 fn test_buf_into_inner(buf: Buf<Vec<u8>>) {
2137 assert_eq!(buf.clone().as_ref(), buf.into_inner());
2138 }
2139
2140 #[test]
2141 fn test_packet_constraints() {
2142 use PacketConstraints as PC;
2143
2144 assert!(PC::try_new(0, 0, 0, 0).is_some());
2148 assert!(PC::try_new(usize::MAX / 2, usize::MAX / 2, 0, 0).is_some());
2150 assert_eq!(PC::try_new(usize::MAX, 1, 0, 0), None);
2152 assert_eq!(PC::try_new(0, 0, 1, 0), None);
2154
2155 let pc = PC::new(10, 10, 0, usize::MAX);
2159 assert_eq!(pc.try_encapsulate(&pc).unwrap(), PC::new(20, 20, 0, usize::MAX - 20));
2160
2161 let pc = PC::new(10, 10, 0, usize::MAX);
2162 assert_eq!(pc.try_encapsulate(&pc).unwrap(), PC::new(20, 20, 0, usize::MAX - 20));
2163
2164 let inner = PC::new(10, 10, 0, usize::MAX);
2175 let outer = PC::new(0, 0, 10, usize::MAX);
2176 assert_eq!(inner.try_encapsulate(&outer).unwrap(), PC::new(10, 10, 0, usize::MAX - 20));
2177
2178 let inner = PC::new(usize::MAX, 0, 0, usize::MAX);
2182 let outer = PC::new(1, 0, 0, usize::MAX);
2183 assert_eq!(inner.try_encapsulate(&outer), None);
2184
2185 let inner = PC::new(0, usize::MAX, 0, usize::MAX);
2189 let outer = PC::new(0, 1, 0, usize::MAX);
2190 assert_eq!(inner.try_encapsulate(&outer), None);
2191
2192 let one_fifth_max = (usize::MAX / 5) + 1;
2199 let inner = PC::new(one_fifth_max, one_fifth_max, one_fifth_max, usize::MAX);
2200 let outer = PC::new(one_fifth_max, one_fifth_max, 0, usize::MAX);
2201 assert_eq!(inner.try_encapsulate(&outer), None);
2202
2203 let inner = PC::new(10, 10, 0, usize::MAX);
2208 let outer = PC::new(0, 0, 0, 10);
2209 assert_eq!(inner.try_encapsulate(&outer), None);
2210
2211 let inner = PC::new(0, 0, 10, usize::MAX);
2217 let outer = PC::new(0, 0, 0, 5);
2218 assert_eq!(inner.try_encapsulate(&outer), None);
2219 }
2220
2221 #[test]
2222 fn test_inner_serializer() {
2223 const INNER: &[u8] = &[0, 1, 2, 3, 4, 5, 6, 7, 8, 9];
2224
2225 fn concat<'a, I: IntoIterator<Item = &'a &'a [u8]>>(slices: I) -> Vec<u8> {
2226 let mut v = Vec::new();
2227 for slc in slices.into_iter() {
2228 v.extend_from_slice(slc);
2229 }
2230 v
2231 }
2232
2233 let buf = INNER.into_serializer().serialize_vec_outer().unwrap();
2235 assert_eq!(buf.as_ref(), INNER);
2236
2237 let buf = INNER
2240 .into_serializer()
2241 .into_verifying(false)
2242 .encapsulate(DummyPacketBuilder::new(0, 0, 20, usize::MAX))
2243 .serialize_vec_outer()
2244 .unwrap();
2245 assert_eq!(buf.as_ref(), concat(&[INNER, vec![0; 10].as_ref()]).as_slice());
2246
2247 let buf = INNER
2251 .into_serializer()
2252 .into_verifying(false)
2253 .encapsulate(DummyPacketBuilder::new(10, 10, 0, usize::MAX))
2254 .serialize_vec_outer()
2255 .unwrap();
2256 assert_eq!(
2257 buf.as_ref(),
2258 concat(&[vec![0xFF; 10].as_ref(), INNER, vec![0xFE; 10].as_ref()]).as_slice()
2259 );
2260
2261 assert_eq!(
2263 INNER
2264 .into_serializer()
2265 .into_verifying(false)
2266 .encapsulate(DummyPacketBuilder::new(0, 0, 0, 9))
2267 .serialize_vec_outer()
2268 .unwrap_err()
2269 .0,
2270 SerializeError::SizeLimitExceeded
2271 );
2272
2273 assert_eq!(
2277 INNER
2278 .into_serializer_with(Buf::new(vec![0xFF], ..))
2279 .into_verifying(false)
2280 .serialize_vec_outer()
2281 .unwrap()
2282 .as_ref(),
2283 INNER
2284 );
2285 }
2286
2287 #[test]
2288 fn test_buffer_serializer_and_inner_serializer() {
2289 fn verify_buffer_serializer<B: BufferMut + Debug>(
2290 buffer: B,
2291 header_len: usize,
2292 footer_len: usize,
2293 min_body_len: usize,
2294 ) {
2295 let old_body = buffer.to_flattened_vec();
2296 let serializer = buffer.encapsulate(DummyPacketBuilder::new(
2297 header_len,
2298 footer_len,
2299 min_body_len,
2300 usize::MAX,
2301 ));
2302
2303 let buffer0 = serializer
2304 .serialize_new_buf(PacketConstraints::UNCONSTRAINED, new_buf_vec)
2305 .unwrap();
2306 verify(buffer0, &old_body, header_len, footer_len, min_body_len);
2307
2308 let buffer = serializer.serialize_vec_outer().unwrap();
2309 verify(buffer, &old_body, header_len, footer_len, min_body_len);
2310 }
2311
2312 fn verify_inner_packet_builder_serializer(
2313 body: &[u8],
2314 header_len: usize,
2315 footer_len: usize,
2316 min_body_len: usize,
2317 ) {
2318 let buffer = body
2319 .into_serializer()
2320 .encapsulate(DummyPacketBuilder::new(
2321 header_len,
2322 footer_len,
2323 min_body_len,
2324 usize::MAX,
2325 ))
2326 .serialize_vec_outer()
2327 .unwrap();
2328 verify(buffer, body, header_len, footer_len, min_body_len);
2329 }
2330
2331 fn verify<B: Buffer>(
2332 buffer: B,
2333 body: &[u8],
2334 header_len: usize,
2335 footer_len: usize,
2336 min_body_len: usize,
2337 ) {
2338 let flat = buffer.to_flattened_vec();
2339 let header_bytes = &flat[..header_len];
2340 let body_bytes = &flat[header_len..header_len + body.len()];
2341 let padding_len = min_body_len.saturating_sub(body.len());
2342 let padding_bytes =
2343 &flat[header_len + body.len()..header_len + body.len() + padding_len];
2344 let total_body_len = body.len() + padding_len;
2345 let footer_bytes = &flat[header_len + total_body_len..];
2346 assert_eq!(
2347 buffer.len() - total_body_len,
2348 header_len + footer_len,
2349 "buffer.len()({}) - total_body_len({}) != header_len({}) + footer_len({})",
2350 buffer.len(),
2351 header_len,
2352 footer_len,
2353 min_body_len,
2354 );
2355
2356 assert!(
2358 header_bytes.iter().all(|b| *b == 0xFF),
2359 "header_bytes {:?} are not filled with 0xFF's",
2360 header_bytes,
2361 );
2362 assert_eq!(body_bytes, body);
2363 assert!(
2365 padding_bytes.iter().all(|b| *b == 0),
2366 "padding_bytes {:?} are not filled with 0s",
2367 padding_bytes,
2368 );
2369 assert!(
2371 footer_bytes.iter().all(|b| *b == 0xFE),
2372 "footer_bytes {:?} are not filled with 0xFE's",
2373 footer_bytes,
2374 );
2375 }
2376
2377 for buf_len in 0..8 {
2380 for range_start in 0..buf_len {
2381 for range_end in range_start..buf_len {
2382 for prefix in 0..8 {
2383 for suffix in 0..8 {
2384 for min_body in 0..8 {
2385 let mut vec = vec![0; buf_len];
2386 #[allow(clippy::needless_range_loop)]
2391 for i in 0..vec.len() {
2392 vec[i] = i as u8;
2393 }
2394 verify_buffer_serializer(
2395 Buf::new(vec.as_mut_slice(), range_start..range_end),
2396 prefix,
2397 suffix,
2398 min_body,
2399 );
2400 if range_start == 0 {
2401 verify_inner_packet_builder_serializer(
2410 &vec.as_slice()[range_start..range_end],
2411 prefix,
2412 suffix,
2413 min_body,
2414 );
2415 }
2416 }
2417 }
2418 }
2419 }
2420 }
2421 }
2422 }
2423
2424 #[test]
2425 fn test_min_body_len() {
2426 let body = &[1, 2];
2431
2432 let inner = DummyPacketBuilder::new(2, 2, 0, usize::MAX);
2435 let outer = DummyPacketBuilder::new(2, 2, 8, usize::MAX);
2437 let buf = body
2438 .into_serializer()
2439 .into_verifying(false)
2440 .encapsulate_verifying(inner, false)
2441 .encapsulate_verifying(outer, false)
2442 .serialize_vec_outer()
2443 .unwrap();
2444 assert_eq!(buf.prefix_len(), 0);
2445 assert_eq!(buf.suffix_len(), 0);
2446 assert_eq!(
2447 buf.as_ref(),
2448 &[
2449 0xFF, 0xFF, 0xFF, 0xFF, 1, 2, 0xFE, 0xFE, 0, 0, 0xFE, 0xFE ]
2456 );
2457 }
2458
2459 #[test]
2460 fn test_size_limit() {
2461 fn test<S: Serializer + Clone + Debug + Eq>(ser: S)
2463 where
2464 S::Buffer: ReusableBuffer,
2465 {
2466 let pb = DummyPacketBuilder::new(1, 1, 0, usize::MAX);
2472
2473 assert!(ser
2478 .clone()
2479 .encapsulate_verifying(pb, false)
2480 .with_size_limit_verifying(3, false)
2481 .serialize_vec_outer()
2482 .is_ok());
2483 assert!(ser
2485 .clone()
2486 .encapsulate_verifying(pb, false)
2487 .with_size_limit_verifying(4, false)
2488 .serialize_vec_outer()
2489 .is_ok());
2490 assert!(ser
2494 .clone()
2495 .with_size_limit_verifying(1, false)
2496 .encapsulate_verifying(pb, false)
2497 .with_size_limit_verifying(3, false)
2498 .serialize_vec_outer()
2499 .is_ok());
2500 assert!(ser
2503 .clone()
2504 .with_size_limit_verifying(0, false)
2505 .encapsulate_verifying(pb, false)
2506 .serialize_vec_outer()
2507 .is_err());
2508 assert!(ser
2514 .clone()
2515 .encapsulate_verifying(pb, false)
2516 .with_size_limit_verifying(1, false)
2517 .serialize_vec_outer()
2518 .is_err());
2519 }
2520
2521 test(DummyPacketBuilder::new(1, 0, 0, usize::MAX).into_serializer().into_verifying(false));
2523 test(Buf::new(vec![0], ..).into_verifying(false));
2524 }
2525
2526 #[test]
2527 fn test_truncating_serializer() {
2528 fn verify_result<S: Serializer + Debug>(ser: S, expected: &[u8])
2529 where
2530 S::Buffer: ReusableBuffer + AsRef<[u8]>,
2531 {
2532 let buf = ser.serialize_new_buf(PacketConstraints::UNCONSTRAINED, new_buf_vec).unwrap();
2533 assert_eq!(buf.as_ref(), &expected[..]);
2534 let buf = ser.serialize_vec_outer().unwrap();
2535 assert_eq!(buf.as_ref(), &expected[..]);
2536 }
2537
2538 let body = vec![0, 1, 2, 3, 4, 5, 6, 7, 8, 9];
2540 let ser =
2541 TruncatingSerializer::new(Buf::new(body.clone(), ..), TruncateDirection::DiscardFront)
2542 .into_verifying(true)
2543 .with_size_limit_verifying(4, true);
2544 verify_result(ser, &[6, 7, 8, 9]);
2545
2546 let ser =
2548 TruncatingSerializer::new(Buf::new(body.clone(), ..), TruncateDirection::DiscardBack)
2549 .into_verifying(true)
2550 .with_size_limit_verifying(7, true);
2551 verify_result(ser, &[0, 1, 2, 3, 4, 5, 6]);
2552
2553 let ser =
2555 TruncatingSerializer::new(Buf::new(body.clone(), ..), TruncateDirection::NoTruncating)
2556 .into_verifying(false)
2557 .with_size_limit_verifying(5, true);
2558 assert!(ser.clone().serialize_vec_outer().is_err());
2559 assert!(ser.serialize_new_buf(PacketConstraints::UNCONSTRAINED, new_buf_vec).is_err());
2560 assert!(ser.serialize_vec_outer().is_err());
2561
2562 fn test_serialization_failure<S: Serializer + Clone + Eq + Debug>(
2566 ser: S,
2567 err: SerializeError<BufferTooShortError>,
2568 ) where
2569 S::Buffer: ReusableBuffer + Debug,
2570 {
2571 let (e, new_ser) = ser
2580 .clone()
2581 .encapsulate(DummyPacketBuilder::new(2, 2, 0, 1))
2582 .serialize_no_alloc_outer()
2583 .unwrap_err();
2584 assert_eq!(err, e);
2585 assert_eq!(new_ser.into_inner(), ser);
2586 }
2587
2588 let body = Buf::new(vec![1, 2], ..);
2589 test_serialization_failure(
2590 TruncatingSerializer::new(body.clone(), TruncateDirection::DiscardFront)
2591 .into_verifying(true),
2592 SerializeError::Alloc(BufferTooShortError),
2593 );
2594 test_serialization_failure(
2595 TruncatingSerializer::new(body.clone(), TruncateDirection::DiscardFront)
2596 .into_verifying(true),
2597 SerializeError::Alloc(BufferTooShortError),
2598 );
2599 test_serialization_failure(
2600 TruncatingSerializer::new(body.clone(), TruncateDirection::NoTruncating)
2601 .into_verifying(false),
2602 SerializeError::SizeLimitExceeded,
2603 );
2604 }
2605
2606 #[test]
2607 fn test_try_reuse_buffer() {
2608 fn test_expect_success(
2609 body_range: Range<usize>,
2610 prefix: usize,
2611 suffix: usize,
2612 max_copy_bytes: usize,
2613 ) {
2614 let mut bytes = [0, 1, 2, 3, 4, 5, 6, 7, 8, 9];
2615 let buffer = Buf::new(&mut bytes[..], body_range);
2616 let body = buffer.as_ref().to_vec();
2617 let buffer = try_reuse_buffer(buffer, prefix, suffix, max_copy_bytes).unwrap();
2618 assert_eq!(buffer.as_ref(), body.as_slice());
2619 assert!(buffer.prefix_len() >= prefix);
2620 assert!(buffer.suffix_len() >= suffix);
2621 }
2622
2623 fn test_expect_failure(
2624 body_range: Range<usize>,
2625 prefix: usize,
2626 suffix: usize,
2627 max_copy_bytes: usize,
2628 ) {
2629 let mut bytes = [0, 1, 2, 3, 4, 5, 6, 7, 8, 9];
2630 let buffer = Buf::new(&mut bytes[..], body_range.clone());
2631 let mut bytes = [0, 1, 2, 3, 4, 5, 6, 7, 8, 9];
2632 let orig = Buf::new(&mut bytes[..], body_range.clone());
2633 let buffer = try_reuse_buffer(buffer, prefix, suffix, max_copy_bytes).unwrap_err();
2634 assert_eq!(buffer, orig);
2635 }
2636
2637 test_expect_success(0..10, 0, 0, 0);
2639 test_expect_success(1..9, 1, 1, 0);
2641 test_expect_success(0..9, 1, 0, 9);
2644 test_expect_success(1..10, 0, 1, 9);
2645 test_expect_failure(0..9, 1, 0, 8);
2647 test_expect_failure(1..10, 0, 1, 8);
2648 }
2649
2650 #[test]
2651 fn test_maybe_reuse_buffer_provider() {
2652 fn test_expect(body_range: Range<usize>, prefix: usize, suffix: usize, expect_a: bool) {
2653 let mut bytes = [0, 1, 2, 3, 4, 5, 6, 7, 8, 9];
2654 let buffer = Buf::new(&mut bytes[..], body_range);
2655 let body = buffer.as_ref().to_vec();
2656 let buffer = BufferProvider::reuse_or_realloc(
2657 MaybeReuseBufferProvider(new_buf_vec),
2658 buffer,
2659 prefix,
2660 suffix,
2661 )
2662 .unwrap();
2663 match &buffer {
2664 Either::A(_) if expect_a => {}
2665 Either::B(_) if !expect_a => {}
2666 Either::A(_) => panic!("expected Eitehr::B variant"),
2667 Either::B(_) => panic!("expected Eitehr::A variant"),
2668 }
2669 let bytes: &[u8] = buffer.as_ref();
2670 assert_eq!(bytes, body.as_slice());
2671 assert!(buffer.prefix_len() >= prefix);
2672 assert!(buffer.suffix_len() >= suffix);
2673 }
2674
2675 fn test_expect_reuse(body_range: Range<usize>, prefix: usize, suffix: usize) {
2677 test_expect(body_range, prefix, suffix, true);
2678 }
2679
2680 fn test_expect_realloc(body_range: Range<usize>, prefix: usize, suffix: usize) {
2682 test_expect(body_range, prefix, suffix, false);
2683 }
2684
2685 test_expect_reuse(0..10, 0, 0);
2687 test_expect_reuse(1..9, 1, 1);
2689 test_expect_reuse(0..9, 1, 0);
2692 test_expect_reuse(1..10, 0, 1);
2693 test_expect_realloc(0..9, 1, 1);
2695 test_expect_realloc(1..10, 1, 1);
2696 }
2697
2698 #[test]
2699 fn test_no_reuse_buffer_provider() {
2700 #[track_caller]
2701 fn test_expect(body_range: Range<usize>, prefix: usize, suffix: usize) {
2702 let mut bytes = [0, 1, 2, 3, 4, 5, 6, 7, 8, 9];
2703 let internal_buffer: Buf<&mut [u8]> = Buf::new(&mut bytes[..], body_range);
2705 let body = internal_buffer.as_ref().to_vec();
2706 let buffer: Buf<Vec<u8>> = BufferProvider::reuse_or_realloc(
2709 NoReuseBufferProvider(new_buf_vec),
2710 internal_buffer,
2711 prefix,
2712 suffix,
2713 )
2714 .unwrap();
2715 let bytes: &[u8] = buffer.as_ref();
2716 assert_eq!(bytes, body.as_slice());
2717 assert_eq!(buffer.prefix_len(), prefix);
2718 assert_eq!(buffer.suffix_len(), suffix);
2719 }
2720 test_expect(0..10, 0, 0);
2722 test_expect(1..9, 1, 1);
2724 test_expect(0..9, 10, 10);
2726 test_expect(1..10, 15, 15);
2727 }
2728
2729 struct ScatterGatherBuf<B> {
2753 data: Vec<u8>,
2754 mid: usize,
2755 range: Range<usize>,
2756 inner: B,
2757 }
2758
2759 impl<B: BufferMut> FragmentedBuffer for ScatterGatherBuf<B> {
2760 fn len(&self) -> usize {
2761 self.inner.len() + (self.range.end - self.range.start)
2762 }
2763
2764 fn with_bytes<R, F>(&self, f: F) -> R
2765 where
2766 F: for<'a, 'b> FnOnce(FragmentedBytes<'a, 'b>) -> R,
2767 {
2768 let (_, rest) = self.data.split_at(self.range.start);
2769 let (prefix_b, rest) = rest.split_at(self.mid - self.range.start);
2770 let (suffix_b, _) = rest.split_at(self.range.end - self.mid);
2771 let mut bytes = [prefix_b, self.inner.as_ref(), suffix_b];
2772 f(FragmentedBytes::new(&mut bytes[..]))
2773 }
2774 }
2775
2776 impl<B: BufferMut> FragmentedBufferMut for ScatterGatherBuf<B> {
2777 fn with_bytes_mut<R, F>(&mut self, f: F) -> R
2778 where
2779 F: for<'a, 'b> FnOnce(FragmentedBytesMut<'a, 'b>) -> R,
2780 {
2781 let (_, rest) = self.data.split_at_mut(self.range.start);
2782 let (prefix_b, rest) = rest.split_at_mut(self.mid - self.range.start);
2783 let (suffix_b, _) = rest.split_at_mut(self.range.end - self.mid);
2784 let mut bytes = [prefix_b, self.inner.as_mut(), suffix_b];
2785 f(FragmentedBytesMut::new(&mut bytes[..]))
2786 }
2787 }
2788
2789 impl<B: BufferMut> GrowBuffer for ScatterGatherBuf<B> {
2790 fn with_parts<O, F>(&self, f: F) -> O
2791 where
2792 F: for<'a, 'b> FnOnce(&'a [u8], FragmentedBytes<'a, 'b>, &'a [u8]) -> O,
2793 {
2794 let (prefix, rest) = self.data.split_at(self.range.start);
2795 let (prefix_b, rest) = rest.split_at(self.mid - self.range.start);
2796 let (suffix_b, suffix) = rest.split_at(self.range.end - self.mid);
2797 let mut bytes = [prefix_b, self.inner.as_ref(), suffix_b];
2798 f(prefix, bytes.as_fragmented_byte_slice(), suffix)
2799 }
2800 fn prefix_len(&self) -> usize {
2801 self.range.start
2802 }
2803
2804 fn suffix_len(&self) -> usize {
2805 self.data.len() - self.range.end
2806 }
2807
2808 fn grow_front(&mut self, n: usize) {
2809 self.range.start -= n;
2810 }
2811
2812 fn grow_back(&mut self, n: usize) {
2813 self.range.end += n;
2814 assert!(self.range.end <= self.data.len());
2815 }
2816 }
2817
2818 impl<B: BufferMut> GrowBufferMut for ScatterGatherBuf<B> {
2819 fn with_parts_mut<O, F>(&mut self, f: F) -> O
2820 where
2821 F: for<'a, 'b> FnOnce(&'a mut [u8], FragmentedBytesMut<'a, 'b>, &'a mut [u8]) -> O,
2822 {
2823 let (prefix, rest) = self.data.split_at_mut(self.range.start);
2824 let (prefix_b, rest) = rest.split_at_mut(self.mid - self.range.start);
2825 let (suffix_b, suffix) = rest.split_at_mut(self.range.end - self.mid);
2826 let mut bytes = [prefix_b, self.inner.as_mut(), suffix_b];
2827 f(prefix, bytes.as_fragmented_byte_slice(), suffix)
2828 }
2829 }
2830
2831 struct ScatterGatherProvider;
2832
2833 impl<B: BufferMut> BufferProvider<B, ScatterGatherBuf<B>> for ScatterGatherProvider {
2834 type Error = Never;
2835
2836 fn alloc_no_reuse(
2837 self,
2838 _prefix: usize,
2839 _body: usize,
2840 _suffix: usize,
2841 ) -> Result<ScatterGatherBuf<B>, Self::Error> {
2842 unimplemented!("not used in tests")
2843 }
2844
2845 fn reuse_or_realloc(
2846 self,
2847 buffer: B,
2848 prefix: usize,
2849 suffix: usize,
2850 ) -> Result<ScatterGatherBuf<B>, (Self::Error, B)> {
2851 let inner = buffer;
2852 let data = vec![0; prefix + suffix];
2853 let range = Range { start: prefix, end: prefix };
2854 let mid = prefix;
2855 Ok(ScatterGatherBuf { inner, data, range, mid })
2856 }
2857 }
2858
2859 #[test]
2860 fn test_scatter_gather_serialize() {
2861 let buf = Buf::new(vec![10, 20, 30, 40, 50], ..);
2864 let pb = DummyPacketBuilder::new(3, 2, 0, usize::MAX);
2865 let ser = buf.encapsulate(pb);
2866 let result = ser.serialize_outer(ScatterGatherProvider {}).unwrap();
2867 let flattened = result.to_flattened_vec();
2868 assert_eq!(&flattened[..], &[0xFF, 0xFF, 0xFF, 10, 20, 30, 40, 50, 0xFE, 0xFE]);
2869 }
2870}