heapdump_vmo/
memory_mapped_vmo.rs1use std::mem::{align_of, size_of};
6
7pub struct MemoryMappedVmo {
9 map_addr: usize,
10 vmo_size: usize,
11 writable: bool,
12}
13
14impl MemoryMappedVmo {
15 pub fn new_readonly(vmo: &zx::Vmo) -> Result<MemoryMappedVmo, zx::Status> {
19 Self::new_impl(vmo, false)
20 }
21
22 pub fn new_readwrite(vmo: &zx::Vmo) -> Result<MemoryMappedVmo, zx::Status> {
24 Self::new_impl(vmo, true)
25 }
26
27 fn new_impl(vmo: &zx::Vmo, writable: bool) -> Result<MemoryMappedVmo, zx::Status> {
28 let vmo_size = vmo.get_content_size()? as usize;
29
30 let mut flags = zx::VmarFlags::PERM_READ
31 | zx::VmarFlags::ALLOW_FAULTS
32 | zx::VmarFlags::REQUIRE_NON_RESIZABLE;
33 if writable {
34 flags |= zx::VmarFlags::PERM_WRITE;
35 }
36
37 let map_addr = fuchsia_runtime::vmar_root_self().map(0, &vmo, 0, vmo_size, flags)?;
38 Ok(MemoryMappedVmo { map_addr, vmo_size, writable })
39 }
40
41 pub fn vmo_size(&self) -> usize {
44 self.vmo_size
45 }
46
47 fn validate_and_get_ptr<T>(
51 &self,
52 byte_offset: usize,
53 num_elements: usize,
54 ) -> Result<*const T, crate::Error> {
55 if byte_offset % align_of::<T>() == 0 {
56 if let Some(num_bytes) = size_of::<T>().checked_mul(num_elements) {
57 if let Some(end) = byte_offset.checked_add(num_bytes) {
58 if end <= self.vmo_size {
59 return Ok((self.map_addr + byte_offset) as *const T);
60 }
61 }
62 }
63 }
64
65 Err(crate::Error::InvalidInput)
66 }
67
68 fn validate_and_get_mut_ptr<T>(
70 &mut self,
71 byte_offset: usize,
72 num_elements: usize,
73 ) -> Result<*mut T, crate::Error> {
74 if !self.writable {
75 panic!("MemoryMappedVmo is not writable");
76 }
77
78 Ok(self.validate_and_get_ptr::<T>(byte_offset, num_elements)? as *mut T)
79 }
80
81 pub fn get_slice<'a, T: MemoryMappable>(
85 &'a self,
86 byte_offset: usize,
87 num_elements: usize,
88 ) -> Result<&'a [T], crate::Error> {
89 let ptr = self.validate_and_get_ptr(byte_offset, num_elements)?;
90 unsafe { Ok(std::slice::from_raw_parts(ptr, num_elements)) }
91 }
92
93 pub fn get_object<'a, T: MemoryMappable>(
97 &'a self,
98 byte_offset: usize,
99 ) -> Result<&'a T, crate::Error> {
100 let ptr = self.validate_and_get_ptr(byte_offset, 1)?;
101 unsafe { Ok(&*ptr) }
102 }
103
104 pub fn get_slice_mut<'a, T: MemoryMappable>(
108 &'a mut self,
109 byte_offset: usize,
110 num_elements: usize,
111 ) -> Result<&'a mut [T], crate::Error> {
112 let ptr = self.validate_and_get_mut_ptr(byte_offset, num_elements)?;
113 unsafe { Ok(std::slice::from_raw_parts_mut(ptr, num_elements)) }
114 }
115
116 pub fn get_object_mut<'a, T: MemoryMappable>(
120 &mut self,
121 byte_offset: usize,
122 ) -> Result<&'a mut T, crate::Error> {
123 let ptr = self.validate_and_get_mut_ptr(byte_offset, 1)?;
124 unsafe { Ok(&mut *ptr) }
125 }
126}
127
128impl Drop for MemoryMappedVmo {
129 fn drop(&mut self) {
130 unsafe {
132 fuchsia_runtime::vmar_root_self()
133 .unmap(self.map_addr, self.vmo_size)
134 .expect("failed to unmap MemoryMappedVmo");
135 }
136 }
137}
138
139pub unsafe trait MemoryMappable {}
151
152unsafe impl MemoryMappable for u8 {}
153unsafe impl MemoryMappable for u16 {}
154unsafe impl MemoryMappable for u32 {}
155unsafe impl MemoryMappable for u64 {}
156unsafe impl<T: MemoryMappable> MemoryMappable for [T] {}
157unsafe impl<T: MemoryMappable, const N: usize> MemoryMappable for [T; N] {}
158
159#[cfg(test)]
160mod tests {
161 use super::*;
162 use assert_matches::assert_matches;
163
164 const TEST_DATA: [u64; 4] = [11, 22, 33, 44];
166 const TEST_DATA_SIZE: usize = size_of::<u64>() * TEST_DATA.len();
167
168 #[test]
169 fn test_vmo_size() {
170 let vmo = zx::Vmo::create(TEST_DATA_SIZE as u64).unwrap();
171 let m = MemoryMappedVmo::new_readwrite(&vmo).unwrap();
172
173 assert_eq!(m.vmo_size(), TEST_DATA_SIZE);
174 }
175
176 #[test]
177 fn test_write_objects_read_slice() {
178 let vmo = zx::Vmo::create(TEST_DATA_SIZE as u64).unwrap();
179
180 {
182 let mut m = MemoryMappedVmo::new_readwrite(&vmo).unwrap();
183 for (i, val) in TEST_DATA.iter().enumerate() {
184 *m.get_object_mut(size_of::<u64>() * i).unwrap() = *val;
185 }
186 }
187
188 {
190 let m = MemoryMappedVmo::new_readonly(&vmo).unwrap();
191 assert_eq!(*m.get_slice::<u64>(0, 4).unwrap(), TEST_DATA);
192 }
193 }
194
195 #[test]
196 fn test_write_slice_read_objects() {
197 let vmo = zx::Vmo::create(TEST_DATA_SIZE as u64).unwrap();
198
199 {
201 let mut m = MemoryMappedVmo::new_readwrite(&vmo).unwrap();
202 m.get_slice_mut(0, 4).unwrap().copy_from_slice(&TEST_DATA);
203 }
204
205 {
207 let m = MemoryMappedVmo::new_readonly(&vmo).unwrap();
208 for (i, expected_val) in TEST_DATA.iter().enumerate() {
209 let actual_val: &u64 = m.get_object(size_of::<u64>() * i).unwrap();
210 assert_eq!(*actual_val, *expected_val, "value mismatch at i={}", i);
211 }
212 }
213 }
214
215 #[test]
216 fn test_write_slice_read_subslices() {
217 const COUNT: usize = 4;
218 let vmo = zx::Vmo::create((size_of::<u64>() * COUNT) as u64).unwrap();
219
220 let mut m = MemoryMappedVmo::new_readwrite(&vmo).unwrap();
222 m.get_slice_mut::<u64>(0, COUNT).unwrap().copy_from_slice(&[11, 22, 33, 44]);
223
224 const SECOND_ELEM_BYTE_OFFSET: usize = size_of::<u64>();
226 assert_eq!(*m.get_slice::<u64>(SECOND_ELEM_BYTE_OFFSET, 0).unwrap(), []);
227 assert_eq!(*m.get_slice::<u64>(SECOND_ELEM_BYTE_OFFSET, 1).unwrap(), [22]);
228 assert_eq!(*m.get_slice::<u64>(SECOND_ELEM_BYTE_OFFSET, 2).unwrap(), [22, 33]);
229 assert_eq!(*m.get_slice::<u64>(SECOND_ELEM_BYTE_OFFSET, 3).unwrap(), [22, 33, 44]);
230 }
231
232 #[test]
233 fn test_uninitialized_is_zero() {
234 const COUNT: usize = 4;
235 let vmo = zx::Vmo::create((size_of::<u64>() * COUNT) as u64).unwrap();
236 let m = MemoryMappedVmo::new_readonly(&vmo).unwrap();
237
238 assert_eq!(*m.get_slice::<u64>(0, COUNT).unwrap(), [0; COUNT]);
240 }
241
242 #[test]
243 fn test_range_errors() {
244 const COUNT: usize = 4;
245 let vmo = zx::Vmo::create((size_of::<u64>() * COUNT) as u64).unwrap();
246 let m = MemoryMappedVmo::new_readonly(&vmo).unwrap();
247
248 const MISALIGNED_OFFSET: usize = size_of::<u64>() - 1;
250 assert_matches!(m.get_object::<u64>(MISALIGNED_OFFSET), Err(crate::Error::InvalidInput));
251
252 const SECOND_ELEM_BYTE_OFFSET: usize = size_of::<u64>();
254 assert_matches!(
255 m.get_slice::<u64>(SECOND_ELEM_BYTE_OFFSET, COUNT),
256 Err(crate::Error::InvalidInput)
257 );
258 }
259
260 #[test]
261 #[should_panic(expected = "MemoryMappedVmo is not writable")]
262 fn test_cannot_get_mutable_slice_from_readonly_vmo() {
263 let vmo = zx::Vmo::create(TEST_DATA_SIZE as u64).unwrap();
264 let mut m = MemoryMappedVmo::new_readonly(&vmo).unwrap();
265
266 let _ = m.get_slice_mut::<u64>(0, 1);
268 }
269
270 #[test]
271 #[should_panic(expected = "MemoryMappedVmo is not writable")]
272 fn test_cannot_get_mutable_object_from_readonly_vmo() {
273 let vmo = zx::Vmo::create(TEST_DATA_SIZE as u64).unwrap();
274 let mut m = MemoryMappedVmo::new_readonly(&vmo).unwrap();
275
276 let _ = m.get_object_mut::<u64>(0);
278 }
279}