1use crate::format::{CHUNK_HEADER_SIZE, SPARSE_HEADER_SIZE};
6use crate::{Chunk, SparseHeader, BLK_SIZE, NO_SOURCE};
7use anyhow::{ensure, Context, Result};
8use std::io::{Cursor, Read, Seek, SeekFrom, Write};
9use std::ops::Range;
10
11pub enum DataSource {
13 Buffer(Box<[u8]>),
15 Reader { reader: Box<dyn Read>, size: u64 },
17 Skip(u64),
19 Fill(u32, u64),
21 #[cfg(target_os = "fuchsia")]
22 Vmo { vmo: zx::Vmo, size: u64, offset: u64 },
24}
25
26impl DataSource {
27 fn data_size(&self) -> u64 {
29 match &self {
30 DataSource::Buffer(buf) => buf.len() as u64,
31 DataSource::Reader { reader: _, size } => *size,
32 DataSource::Skip(size) => *size,
33 DataSource::Fill(_, count) => *count * std::mem::size_of::<u32>() as u64,
34 #[cfg(target_os = "fuchsia")]
35 DataSource::Vmo { vmo: _, size, offset: _ } => *size,
36 }
37 }
38}
39
40pub struct SparseImageBuilder {
42 block_size: u32,
43 sources: Vec<DataSource>,
44
45 max_chunk_size: u32,
51}
52
53impl SparseImageBuilder {
54 pub fn new() -> Self {
55 Self { block_size: BLK_SIZE, sources: vec![], max_chunk_size: u32::MAX - BLK_SIZE + 1 }
56 }
57
58 pub fn set_block_size(mut self, block_size: u32) -> Self {
59 assert!(
60 block_size >= CHUNK_HEADER_SIZE,
61 "The block size must be greater than {}",
62 CHUNK_HEADER_SIZE
63 );
64 self.max_chunk_size = u32::MAX - block_size + 1;
65 self.block_size = block_size;
66 self
67 }
68
69 pub fn add_source(mut self, source: DataSource) -> Self {
71 self.sources.push(source);
72 self
73 }
74
75 pub fn built_size(&self) -> u64 {
78 let mut built_size = SPARSE_HEADER_SIZE as u64;
79 for source in &self.sources {
80 for size in ChunkedRange::new(0..source.data_size(), self.max_chunk_size) {
81 let size = size as u64;
82 let start = 0;
85 let chunk = match &source {
86 DataSource::Buffer(..) => Chunk::Raw { start, size },
87 DataSource::Reader { .. } => Chunk::Raw { start, size },
88 DataSource::Skip(..) => Chunk::DontCare { start, size },
89 DataSource::Fill(..) => Chunk::Fill { start, size, value: 0 },
90 #[cfg(target_os = "fuchsia")]
91 DataSource::Vmo { .. } => Chunk::Raw { start, size },
92 };
93 built_size += chunk.chunk_data_len() as u64;
94 }
95 }
96 built_size
97 }
98
99 pub fn build<W: Write + Seek>(self, output: &mut W) -> Result<()> {
100 output.seek(SeekFrom::Start(SPARSE_HEADER_SIZE as u64))?;
102 let mut chunk_writer = ChunkWriter::new(self.block_size, output);
103 for source in self.sources {
104 match source {
105 DataSource::Buffer(buf) => {
106 ensure!(
107 buf.len() % self.block_size as usize == 0,
108 "Invalid buffer length {}",
109 buf.len()
110 );
111 for slice in buf.chunks(self.max_chunk_size as usize) {
112 chunk_writer
113 .write_raw_chunk(slice.len().try_into().unwrap(), Cursor::new(slice))?;
114 }
115 }
116 DataSource::Reader { mut reader, size } => {
117 ensure!(size % self.block_size as u64 == 0, "Invalid Reader length {}", size);
118 for size in ChunkedRange::new(0..size, self.max_chunk_size) {
119 chunk_writer.write_raw_chunk(size, (&mut reader).take(size as u64))?;
120 }
121 }
122 DataSource::Skip(size) => {
123 ensure!(size % self.block_size as u64 == 0, "Invalid Skip length {}", size);
124 for size in ChunkedRange::new(0..size, self.max_chunk_size) {
125 chunk_writer.write_dont_care_chunk(size)?;
126 }
127 }
128 DataSource::Fill(value, count) => {
129 let size = count * std::mem::size_of::<u32>() as u64;
130 ensure!(size % self.block_size as u64 == 0, "Invalid Fill length {}", size);
131 for size in ChunkedRange::new(0..size, self.max_chunk_size) {
132 chunk_writer.write_fill_chunk(size, value)?;
133 }
134 }
135 #[cfg(target_os = "fuchsia")]
136 DataSource::Vmo { vmo, size, mut offset } => {
137 ensure!(size % self.block_size as u64 == 0, "Invalid Vmo size {}", size);
138 let mut buffer =
139 vec![0; std::cmp::min(size as usize, self.max_chunk_size as usize)];
140 for size in ChunkedRange::new(0..size, self.max_chunk_size) {
141 let buffer = &mut buffer[0..size as usize];
142 vmo.read(buffer, offset).unwrap();
143 chunk_writer.write_raw_chunk(size, Cursor::new(buffer))?;
144 offset += size as u64;
145 }
146 }
147 };
148 }
149
150 let ChunkWriter { num_blocks, num_chunks, .. } = chunk_writer;
151 output.seek(SeekFrom::Start(0))?;
152 let header = SparseHeader::new(self.block_size, num_blocks, num_chunks);
153 bincode::serialize_into(&mut *output, &header)?;
154
155 output.flush()?;
156 Ok(())
157 }
158
159 #[cfg(target_os = "fuchsia")]
160 pub fn build_vmo(self) -> Result<zx::Vmo> {
161 let vmo = zx::Vmo::create(self.built_size())?;
162 let mut stream = zx::Stream::create(zx::StreamOptions::MODE_WRITE, &vmo, 0)?;
163 self.build(&mut stream)?;
164 Ok(vmo)
165 }
166}
167
168struct ChunkWriter<'a, W> {
169 block_size: u32,
170 current_offset: u64,
171 num_chunks: u32,
172 num_blocks: u32,
173 writer: &'a mut W,
174}
175
176impl<'a, W: Write> ChunkWriter<'a, W> {
177 fn new(block_size: u32, writer: &'a mut W) -> Self {
178 Self { block_size, current_offset: 0, num_chunks: 0, num_blocks: 0, writer }
179 }
180
181 fn write_chunk_impl<R: Read>(&mut self, chunk: Chunk, source: Option<&mut R>) -> Result<()> {
182 chunk.write(source, &mut self.writer, self.block_size)?;
183 self.num_blocks = self
184 .num_blocks
185 .checked_add(chunk.output_blocks(self.block_size))
186 .context("Sparse image would contain too many blocks")?;
187 self.num_chunks += 1;
190 self.current_offset += chunk.output_size() as u64;
191 Ok(())
192 }
193
194 fn write_raw_chunk<R: Read>(&mut self, size: u32, mut source: R) -> Result<()> {
195 self.write_chunk_impl(
196 Chunk::Raw { start: self.current_offset, size: size.into() },
197 Some(&mut source),
198 )
199 }
200
201 fn write_dont_care_chunk(&mut self, size: u32) -> Result<()> {
202 self.write_chunk_impl(
203 Chunk::DontCare { start: self.current_offset, size: size.into() },
204 NO_SOURCE,
205 )
206 }
207
208 fn write_fill_chunk(&mut self, size: u32, value: u32) -> Result<()> {
209 self.write_chunk_impl(
210 Chunk::Fill { start: self.current_offset, size: size.into(), value },
211 NO_SOURCE,
212 )
213 }
214}
215
216struct ChunkedRange {
225 range: Range<u64>,
226 max_chunk_size: u32,
227}
228
229impl ChunkedRange {
230 fn new(range: Range<u64>, max_chunk_size: u32) -> Self {
231 Self { range, max_chunk_size }
232 }
233}
234
235impl Iterator for ChunkedRange {
236 type Item = u32;
237
238 fn next(&mut self) -> Option<Self::Item> {
239 let size = self.range.end - self.range.start;
240 if size == 0 {
241 None
242 } else if size >= self.max_chunk_size as u64 {
243 self.range.start += self.max_chunk_size as u64;
244 Some(self.max_chunk_size)
245 } else {
246 self.range.start = self.range.end;
247 Some(size as u32)
248 }
249 }
250}
251
252#[cfg(test)]
253mod tests {
254 use super::*;
255 use crate::format::CHUNK_HEADER_SIZE;
256 use crate::reader::SparseReader;
257 #[cfg(target_os = "fuchsia")]
258 use zx::HandleBased as _;
259
260 #[test]
261 fn test_chunked_range() {
262 assert_eq!(&ChunkedRange::new(0..0, 32).collect::<Vec<_>>(), &[]);
263 assert_eq!(&ChunkedRange::new(0..10, 32).collect::<Vec<_>>(), &[10]);
264 assert_eq!(&ChunkedRange::new(100..101, 32).collect::<Vec<_>>(), &[1]);
265 assert_eq!(&ChunkedRange::new(0..100, 32).collect::<Vec<_>>(), &[32, 32, 32, 4]);
266 assert_eq!(&ChunkedRange::new(10..100, 32).collect::<Vec<_>>(), &[32, 32, 26]);
267 assert_eq!(
268 &ChunkedRange::new((u32::MAX as u64)..(u32::MAX as u64 + 80), 32).collect::<Vec<_>>(),
269 &[32, 32, 16]
270 );
271 assert_eq!(
272 &ChunkedRange::new((u64::MAX - 50)..u64::MAX, 32).collect::<Vec<_>>(),
273 &[32, 18]
274 );
275 }
276
277 #[test]
278 fn test_build_with_buffer() {
279 let mut builder = SparseImageBuilder::new();
280 builder.max_chunk_size = BLK_SIZE;
281 let mut buf = Vec::with_capacity((BLK_SIZE * 2) as usize);
282 let part1 = vec![0xABu8; BLK_SIZE as usize];
283 let part2 = vec![0xCDu8; BLK_SIZE as usize];
284 buf.extend_from_slice(&part1);
285 buf.extend_from_slice(&part2);
286 let mut output = vec![];
287 let builder = builder.add_source(DataSource::Buffer(buf.into_boxed_slice()));
288 let expected_size = builder.built_size() as usize;
289 builder.build(&mut Cursor::new(&mut output)).unwrap();
290 assert_eq!(output.len(), expected_size);
291
292 let reader = SparseReader::new(Cursor::new(&output)).unwrap();
293 assert_eq!(
294 reader.chunks(),
295 &[
296 (
297 Chunk::Raw { start: 0, size: BLK_SIZE.into() },
298 Some((SPARSE_HEADER_SIZE + CHUNK_HEADER_SIZE) as u64)
299 ),
300 (
301 Chunk::Raw { start: BLK_SIZE as u64, size: BLK_SIZE.into() },
302 Some((SPARSE_HEADER_SIZE + CHUNK_HEADER_SIZE * 2 + BLK_SIZE) as u64)
303 )
304 ]
305 );
306 assert_eq!(
307 &output[(SPARSE_HEADER_SIZE + CHUNK_HEADER_SIZE) as usize
308 ..(SPARSE_HEADER_SIZE + CHUNK_HEADER_SIZE + BLK_SIZE) as usize],
309 &part1
310 );
311 assert_eq!(
312 &output[(SPARSE_HEADER_SIZE + CHUNK_HEADER_SIZE * 2 + BLK_SIZE) as usize
313 ..(SPARSE_HEADER_SIZE + CHUNK_HEADER_SIZE * 2 + BLK_SIZE * 2) as usize],
314 &part2
315 );
316 }
317
318 #[test]
319 fn test_build_with_reader() {
320 let part1 = vec![0xABu8; BLK_SIZE as usize];
321 let part2 = vec![0xCDu8; BLK_SIZE as usize];
322 let mut buf = Vec::with_capacity(BLK_SIZE as usize * 2);
323 buf.extend_from_slice(&part1);
324 buf.extend_from_slice(&part2);
325
326 let mut builder = SparseImageBuilder::new();
327 builder.max_chunk_size = BLK_SIZE;
328 let mut output = vec![];
329
330 let reader1 = Cursor::new(buf.clone());
331 let mut reader2 = Cursor::new(buf);
332 reader2.seek(SeekFrom::Start(BLK_SIZE as u64)).unwrap();
333
334 let builder = builder
335 .add_source(DataSource::Reader {
336 reader: Box::new(reader1),
337 size: (BLK_SIZE * 2) as u64,
338 })
339 .add_source(DataSource::Reader { reader: Box::new(reader2), size: BLK_SIZE as u64 });
340 let expected_size = builder.built_size() as usize;
341 builder.build(&mut Cursor::new(&mut output)).unwrap();
342 assert_eq!(output.len(), expected_size);
343
344 let reader = SparseReader::new(Cursor::new(&output)).unwrap();
345 assert_eq!(
346 reader.chunks(),
347 &[
348 (
349 Chunk::Raw { start: 0, size: BLK_SIZE.into() },
350 Some((SPARSE_HEADER_SIZE + CHUNK_HEADER_SIZE) as u64)
351 ),
352 (
353 Chunk::Raw { start: BLK_SIZE as u64, size: BLK_SIZE.into() },
354 Some((SPARSE_HEADER_SIZE + CHUNK_HEADER_SIZE * 2 + BLK_SIZE) as u64)
355 ),
356 (
357 Chunk::Raw { start: (BLK_SIZE * 2) as u64, size: BLK_SIZE.into() },
358 Some((SPARSE_HEADER_SIZE + CHUNK_HEADER_SIZE * 3 + BLK_SIZE * 2) as u64)
359 ),
360 ]
361 );
362 assert_eq!(
363 &output[(SPARSE_HEADER_SIZE + CHUNK_HEADER_SIZE) as usize
364 ..(SPARSE_HEADER_SIZE + CHUNK_HEADER_SIZE + BLK_SIZE) as usize],
365 &part1
366 );
367 assert_eq!(
368 &output[(SPARSE_HEADER_SIZE + CHUNK_HEADER_SIZE * 2 + BLK_SIZE) as usize
369 ..(SPARSE_HEADER_SIZE + CHUNK_HEADER_SIZE * 2 + BLK_SIZE * 2) as usize],
370 &part2
371 );
372 assert_eq!(
373 &output[(SPARSE_HEADER_SIZE + CHUNK_HEADER_SIZE * 3 + BLK_SIZE * 2) as usize
374 ..(SPARSE_HEADER_SIZE + CHUNK_HEADER_SIZE * 3 + BLK_SIZE * 3) as usize],
375 &part2
376 );
377 }
378
379 #[test]
380 fn test_build_with_skip() {
381 let mut builder = SparseImageBuilder::new();
382 builder.max_chunk_size = BLK_SIZE;
383 let mut output = vec![];
384 let builder = builder.add_source(DataSource::Skip((BLK_SIZE * 2) as u64));
385 let expected_size = builder.built_size() as usize;
386 builder.build(&mut Cursor::new(&mut output)).unwrap();
387 assert_eq!(output.len(), expected_size);
388
389 let reader = SparseReader::new(Cursor::new(&output)).unwrap();
390 assert_eq!(
391 reader.chunks(),
392 &[
393 (Chunk::DontCare { start: 0, size: BLK_SIZE.into() }, None),
394 (Chunk::DontCare { start: BLK_SIZE as u64, size: BLK_SIZE.into() }, None)
395 ]
396 );
397 }
398
399 #[test]
400 fn test_build_with_fill() {
401 let mut builder = SparseImageBuilder::new();
402 builder.max_chunk_size = BLK_SIZE;
403 let mut output = vec![];
404 let builder = builder.add_source(DataSource::Fill(0xAB, (BLK_SIZE / 2) as u64));
405 let expected_size = builder.built_size() as usize;
406 builder.build(&mut Cursor::new(&mut output)).unwrap();
407 assert_eq!(output.len(), expected_size);
408
409 let reader = SparseReader::new(Cursor::new(&output)).unwrap();
410 assert_eq!(
411 reader.chunks(),
412 &[
413 (Chunk::Fill { start: 0, size: BLK_SIZE.into(), value: 0xAB }, None),
414 (Chunk::Fill { start: BLK_SIZE as u64, size: BLK_SIZE.into(), value: 0xAB }, None)
415 ]
416 );
417 }
418
419 #[test]
420 fn test_overflow_block_count() {
421 struct Sink;
422
423 impl Write for Sink {
424 fn write(&mut self, buf: &[u8]) -> std::io::Result<usize> {
425 Ok(buf.len())
426 }
427
428 fn flush(&mut self) -> std::io::Result<()> {
429 Ok(())
430 }
431 }
432
433 impl Seek for Sink {
434 fn seek(&mut self, _pos: SeekFrom) -> std::io::Result<u64> {
435 Ok(0)
436 }
437 }
438
439 let result = SparseImageBuilder::new()
440 .set_block_size(16)
441 .add_source(DataSource::Skip(u64::MAX - 15))
442 .build(&mut Sink);
443 assert!(result.is_err());
444 }
445
446 #[cfg(target_os = "fuchsia")]
447 #[test]
448 fn test_build_with_vmo() {
449 let mut builder = SparseImageBuilder::new();
450 builder.max_chunk_size = BLK_SIZE;
451 let size = (BLK_SIZE * 2) as u64;
452 let vmo = zx::Vmo::create(size).unwrap();
453 const PART_1: [u8; BLK_SIZE as usize] = [0xABu8; BLK_SIZE as usize];
454 const PART_2: [u8; BLK_SIZE as usize] = [0xCBu8; BLK_SIZE as usize];
455 vmo.write(&PART_1, 0).unwrap();
456 vmo.write(&PART_2, BLK_SIZE as u64).unwrap();
457 let mut output = vec![];
459 let builder = builder
460 .add_source(DataSource::Vmo {
461 vmo: vmo.duplicate_handle(zx::Rights::SAME_RIGHTS).unwrap(),
462 size: BLK_SIZE as u64,
463 offset: 0,
464 })
465 .add_source(DataSource::Vmo { vmo, size: BLK_SIZE as u64, offset: BLK_SIZE as u64 });
466 let expected_size = builder.built_size() as usize;
467 builder.build(&mut Cursor::new(&mut output)).unwrap();
468 assert_eq!(output.len(), expected_size);
469
470 let reader = SparseReader::new(Cursor::new(&output)).unwrap();
471 assert_eq!(
472 reader.chunks(),
473 &[
474 (
475 Chunk::Raw { start: 0, size: BLK_SIZE.into() },
476 Some((SPARSE_HEADER_SIZE + CHUNK_HEADER_SIZE) as u64)
477 ),
478 (
479 Chunk::Raw { start: BLK_SIZE as u64, size: BLK_SIZE.into() },
480 Some((SPARSE_HEADER_SIZE + CHUNK_HEADER_SIZE * 2 + BLK_SIZE) as u64)
481 )
482 ]
483 );
484 assert_eq!(
485 output[(SPARSE_HEADER_SIZE + CHUNK_HEADER_SIZE) as usize
486 ..(SPARSE_HEADER_SIZE + CHUNK_HEADER_SIZE + BLK_SIZE) as usize],
487 PART_1
488 );
489 assert_eq!(
490 output[(SPARSE_HEADER_SIZE + CHUNK_HEADER_SIZE * 2 + BLK_SIZE) as usize
491 ..(SPARSE_HEADER_SIZE + CHUNK_HEADER_SIZE * 2 + BLK_SIZE * 2) as usize],
492 PART_2
493 );
494 }
495}