cros_async/
mem.rs

1// Copyright 2020 The ChromiumOS Authors
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5use base::VolatileSlice;
6use remain::sorted;
7use thiserror::Error as ThisError;
8
9#[sorted]
10#[derive(ThisError, Debug)]
11pub enum Error {
12    /// Invalid offset or length given for an iovec in backing memory.
13    #[error("Invalid offset/len for getting a slice from {0} with len {1}.")]
14    InvalidOffset(u64, usize),
15}
16pub type Result<T> = std::result::Result<T, Error>;
17
18/// Used to index subslices of backing memory. Like an iovec, but relative to the start of the
19/// backing memory instead of an absolute pointer.
20/// The backing memory referenced by the region can be an array, an mmapped file, or guest memory.
21/// The offset is a u64 to allow having file or guest offsets >4GB when run on a 32bit host.
22#[derive(Copy, Clone, Debug, PartialEq, Eq)]
23pub struct MemRegion {
24    pub offset: u64,
25    pub len: usize,
26}
27
28/// Iterator over an ordered list of [`MemRegion`].
29///
30/// In addition to the usual iterator operations, `MemRegionIter` provides extra functionality that
31/// allows subslicing individual memory regions without mutating the underlying list:
32/// - [`skip_bytes()`](Self::skip_bytes): Advance the iterator some number of bytes, potentially
33///   starting iteration in the middle of a `MemRegion`.
34/// - [`take_bytes()`](Self::take_bytes): Truncate the iterator at some number of bytes, potentially
35///   ending iteration in the middle of a `MemRegion`.
36///
37/// The order of subslicing operations matters - limiting length followed by skipping bytes is not
38/// the same as skipping bytes followed by limiting length.
39#[derive(Clone)]
40pub struct MemRegionIter<'a> {
41    regions: &'a [MemRegion],
42    skip_bytes: usize,
43    remaining_bytes: usize,
44}
45
46impl<'a> MemRegionIter<'a> {
47    /// Create a new `MemRegion` iterator over a slice of `MemRegion`.
48    ///
49    /// By default, the `MemRegionIter` will iterate over each `MemRegion` in the list in its
50    /// entirety. Call [`skip_bytes()`](Self::skip_bytes) and/or
51    /// [`take_bytes()`](Self::take_bytes) to limit iteration to a sub-slice of the specified
52    /// `regions` list.
53    pub fn new(regions: &'a [MemRegion]) -> Self {
54        MemRegionIter {
55            regions,
56            skip_bytes: 0,
57            remaining_bytes: usize::MAX,
58        }
59    }
60
61    /// Advance the iterator by `offset` bytes.
62    ///
63    /// This may place the iterator in the middle of a [`MemRegion`]; in this case, the offset and
64    /// length of the next [`MemRegion`] returned by [`next()`](Self::next) will be adjusted to
65    /// account for the offset.
66    ///
67    /// Skipping more than the remaining length of an iterator is not an error; if `offset` is
68    /// greater than or equal to the total number of remaining bytes, future calls to
69    /// [`next()`](Self::next) will simply return `None`.
70    pub fn skip_bytes(self, offset: usize) -> Self {
71        MemRegionIter {
72            regions: self.regions,
73            skip_bytes: self.skip_bytes.saturating_add(offset),
74            remaining_bytes: self.remaining_bytes.saturating_sub(offset),
75        }
76    }
77
78    /// Truncate the length of the iterator to `max` bytes at most.
79    ///
80    /// This may cause the final [`MemRegion`] returned by [`next()`](Self::next) to be adjusted so
81    /// that its length does not cause the total number of bytes to exceed the requested `max`.
82    ///
83    /// If less than `max` bytes remain in the iterator already, this function will have no effect.
84    ///
85    /// Only truncation is supported; an iterator cannot be extended, even if it was truncated by a
86    /// previous call to `take_bytes()`.
87    pub fn take_bytes(self, max: usize) -> Self {
88        MemRegionIter {
89            regions: self.regions,
90            skip_bytes: self.skip_bytes,
91            remaining_bytes: self.remaining_bytes.min(max),
92        }
93    }
94}
95
96impl Iterator for MemRegionIter<'_> {
97    type Item = MemRegion;
98
99    fn next(&mut self) -> Option<Self::Item> {
100        if self.remaining_bytes == 0 {
101            return None;
102        }
103
104        while let Some((first, remaining)) = self.regions.split_first() {
105            // This call to `next()` will consume `first`; future calls will start with `remaining`.
106            self.regions = remaining;
107
108            // If skip_bytes encompasses this entire region, skip to the next region.
109            // This also skips zero-length regions, which should not be returned by the iterator.
110            if self.skip_bytes >= first.len {
111                self.skip_bytes -= first.len;
112                continue;
113            }
114
115            // Adjust the current region and reset `self.skip_bytes` to 0 to fully consume it.
116            let mut region = MemRegion {
117                offset: first.offset + self.skip_bytes as u64,
118                len: first.len - self.skip_bytes,
119            };
120            self.skip_bytes = 0;
121
122            // If this region is at least as large as `remaining_bytes`, truncate the region and set
123            // `regions` to an empty slice to terminate iteration in future calls to `next()`.
124            if region.len >= self.remaining_bytes {
125                region.len = self.remaining_bytes;
126                self.remaining_bytes = 0;
127                self.regions = &[];
128            } else {
129                // Consume and return the full region.
130                self.remaining_bytes -= region.len;
131            }
132
133            // This should never return a zero-length region (should be handled by the
134            // `remaining_bytes == 0` early return and zero-length region skipping above).
135            debug_assert_ne!(region.len, 0);
136            return Some(region);
137        }
138
139        None
140    }
141}
142
143/// Trait for memory that can yield both iovecs in to the backing memory.
144/// # Safety
145/// Must be OK to modify the backing memory without owning a mut able reference. For example,
146/// this is safe for GuestMemory and VolatileSlices in crosvm as those types guarantee they are
147/// dealt with as volatile.
148pub unsafe trait BackingMemory {
149    /// Returns VolatileSlice pointing to the backing memory. This is most commonly unsafe.
150    /// To implement this safely the implementor must guarantee that the backing memory can be
151    /// modified out of band without affecting safety guarantees.
152    fn get_volatile_slice(&self, mem_range: MemRegion) -> Result<VolatileSlice>;
153}
154
155/// Wrapper to be used for passing a Vec in as backing memory for asynchronous operations.  The
156/// wrapper owns a Vec according to the borrow checker. It is loaning this vec out to the kernel(or
157/// other modifiers) through the `BackingMemory` trait. This allows multiple modifiers of the array
158/// in the `Vec` while this struct is alive. The data in the Vec is loaned to the kernel not the
159/// data structure itself, the length, capacity, and pointer to memory cannot be modified.
160/// To ensure that those operations can be done safely, no access is allowed to the `Vec`'s memory
161/// starting at the time that `VecIoWrapper` is constructed until the time it is turned back in to a
162/// `Vec` using `to_inner`. The returned `Vec` is guaranteed to be valid as any combination of bits
163/// in a `Vec` of `u8` is valid.
164pub struct VecIoWrapper {
165    inner: Box<[u8]>,
166}
167
168impl From<Vec<u8>> for VecIoWrapper {
169    fn from(vec: Vec<u8>) -> Self {
170        VecIoWrapper { inner: vec.into() }
171    }
172}
173
174impl From<VecIoWrapper> for Vec<u8> {
175    fn from(v: VecIoWrapper) -> Vec<u8> {
176        v.inner.into()
177    }
178}
179
180impl VecIoWrapper {
181    /// Get the length of the Vec that is wrapped.
182    #[cfg_attr(windows, allow(dead_code))]
183    pub fn len(&self) -> usize {
184        self.inner.len()
185    }
186
187    pub fn is_empty(&self) -> bool {
188        self.len() == 0
189    }
190
191    // Check that the offsets are all valid in the backing vec.
192    fn check_addrs(&self, mem_range: &MemRegion) -> Result<()> {
193        let end = mem_range
194            .offset
195            .checked_add(mem_range.len as u64)
196            .ok_or(Error::InvalidOffset(mem_range.offset, mem_range.len))?;
197        if end > self.inner.len() as u64 {
198            return Err(Error::InvalidOffset(mem_range.offset, mem_range.len));
199        }
200        Ok(())
201    }
202}
203
204// SAFETY:
205// Safe to implement BackingMemory as the vec is only accessible inside the wrapper and these iovecs
206// are the only thing allowed to modify it.  Nothing else can get a reference to the vec until all
207// iovecs are dropped because they borrow Self.  Nothing can borrow the owned inner vec until self
208// is consumed by `into`, which can't happen if there are outstanding mut borrows.
209unsafe impl BackingMemory for VecIoWrapper {
210    fn get_volatile_slice(&self, mem_range: MemRegion) -> Result<VolatileSlice<'_>> {
211        self.check_addrs(&mem_range)?;
212        // SAFETY:
213        // Safe because the mem_range range is valid in the backing memory as checked above.
214        unsafe {
215            Ok(VolatileSlice::from_raw_parts(
216                self.inner.as_ptr().add(mem_range.offset as usize) as *mut _,
217                mem_range.len,
218            ))
219        }
220    }
221}
222
223#[cfg(test)]
224mod tests {
225    use super::*;
226
227    #[test]
228    fn mem_region_iter_empty() {
229        let mut iter = MemRegionIter::new(&[]);
230        assert_eq!(iter.next(), None);
231    }
232
233    #[test]
234    fn mem_region_iter_one() {
235        let mut iter = MemRegionIter::new(&[MemRegion { offset: 0, len: 4 }]);
236        assert_eq!(iter.next(), Some(MemRegion { offset: 0, len: 4 }));
237        assert_eq!(iter.next(), None);
238    }
239
240    #[test]
241    fn mem_region_iter_one_len_usize_max() {
242        let mut iter = MemRegionIter::new(&[MemRegion {
243            offset: 0,
244            len: usize::MAX,
245        }]);
246        assert_eq!(
247            iter.next(),
248            Some(MemRegion {
249                offset: 0,
250                len: usize::MAX
251            })
252        );
253        assert_eq!(iter.next(), None);
254    }
255
256    #[test]
257    fn mem_region_iter_one_len_zero() {
258        let mut iter = MemRegionIter::new(&[MemRegion { offset: 0, len: 0 }]);
259        assert_eq!(iter.next(), None);
260    }
261
262    #[test]
263    fn mem_region_iter_one_skip_partial() {
264        let mut iter = MemRegionIter::new(&[MemRegion { offset: 0, len: 4 }]).skip_bytes(1);
265        assert_eq!(iter.next(), Some(MemRegion { offset: 1, len: 3 }));
266        assert_eq!(iter.next(), None);
267    }
268
269    #[test]
270    fn mem_region_iter_one_skip_full() {
271        let mut iter = MemRegionIter::new(&[MemRegion { offset: 0, len: 4 }]).skip_bytes(4);
272        assert_eq!(iter.next(), None);
273    }
274
275    #[test]
276    fn mem_region_iter_one_skip_excess() {
277        let mut iter = MemRegionIter::new(&[MemRegion { offset: 0, len: 4 }]).skip_bytes(5);
278        assert_eq!(iter.next(), None);
279    }
280
281    #[test]
282    fn mem_region_iter_one_take_zero() {
283        let mut iter = MemRegionIter::new(&[MemRegion { offset: 0, len: 4 }]).take_bytes(0);
284        assert_eq!(iter.next(), None);
285    }
286
287    #[test]
288    fn mem_region_iter_one_take_partial() {
289        let mut iter = MemRegionIter::new(&[MemRegion { offset: 0, len: 4 }]).take_bytes(1);
290        assert_eq!(iter.next(), Some(MemRegion { offset: 0, len: 1 }));
291        assert_eq!(iter.next(), None);
292    }
293
294    #[test]
295    fn mem_region_iter_one_take_full() {
296        let mut iter = MemRegionIter::new(&[MemRegion { offset: 0, len: 4 }]).take_bytes(4);
297        assert_eq!(iter.next(), Some(MemRegion { offset: 0, len: 4 }));
298        assert_eq!(iter.next(), None);
299    }
300
301    #[test]
302    fn mem_region_iter_one_take_excess() {
303        let mut iter = MemRegionIter::new(&[MemRegion { offset: 0, len: 4 }]).take_bytes(5);
304        assert_eq!(iter.next(), Some(MemRegion { offset: 0, len: 4 }));
305        assert_eq!(iter.next(), None);
306    }
307
308    #[test]
309    fn mem_region_iter_one_take_skip() {
310        let mut iter = MemRegionIter::new(&[MemRegion { offset: 0, len: 4 }])
311            .take_bytes(2)
312            .skip_bytes(1);
313        assert_eq!(iter.next(), Some(MemRegion { offset: 1, len: 1 }));
314        assert_eq!(iter.next(), None);
315    }
316
317    #[test]
318    fn mem_region_iter_one_skip_take() {
319        let mut iter = MemRegionIter::new(&[MemRegion { offset: 0, len: 4 }])
320            .skip_bytes(1)
321            .take_bytes(2);
322        assert_eq!(iter.next(), Some(MemRegion { offset: 1, len: 2 }));
323        assert_eq!(iter.next(), None);
324    }
325
326    #[test]
327    fn mem_region_iter_two() {
328        let mut iter = MemRegionIter::new(&[
329            MemRegion { offset: 0, len: 4 },
330            MemRegion { offset: 8, len: 2 },
331        ]);
332        assert_eq!(iter.next(), Some(MemRegion { offset: 0, len: 4 }));
333        assert_eq!(iter.next(), Some(MemRegion { offset: 8, len: 2 }));
334        assert_eq!(iter.next(), None);
335    }
336
337    #[test]
338    fn mem_region_iter_two_skip_partial() {
339        let mut iter = MemRegionIter::new(&[
340            MemRegion { offset: 0, len: 4 },
341            MemRegion { offset: 8, len: 2 },
342        ])
343        .skip_bytes(1);
344        assert_eq!(iter.next(), Some(MemRegion { offset: 1, len: 3 }));
345        assert_eq!(iter.next(), Some(MemRegion { offset: 8, len: 2 }));
346        assert_eq!(iter.next(), None);
347    }
348
349    #[test]
350    fn mem_region_iter_two_skip_full() {
351        let mut iter = MemRegionIter::new(&[
352            MemRegion { offset: 0, len: 4 },
353            MemRegion { offset: 8, len: 2 },
354        ])
355        .skip_bytes(4);
356        assert_eq!(iter.next(), Some(MemRegion { offset: 8, len: 2 }));
357        assert_eq!(iter.next(), None);
358    }
359
360    #[test]
361    fn mem_region_iter_two_skip_excess() {
362        let mut iter = MemRegionIter::new(&[
363            MemRegion { offset: 0, len: 4 },
364            MemRegion { offset: 8, len: 2 },
365        ])
366        .skip_bytes(5);
367        assert_eq!(iter.next(), Some(MemRegion { offset: 9, len: 1 }));
368        assert_eq!(iter.next(), None);
369    }
370
371    #[test]
372    fn mem_region_iter_two_skip_multi() {
373        let mut iter = MemRegionIter::new(&[
374            MemRegion { offset: 0, len: 4 },
375            MemRegion { offset: 8, len: 2 },
376        ])
377        .skip_bytes(6);
378        assert_eq!(iter.next(), None);
379    }
380
381    #[test]
382    fn mem_region_iter_two_take_partial() {
383        let mut iter = MemRegionIter::new(&[
384            MemRegion { offset: 0, len: 4 },
385            MemRegion { offset: 8, len: 2 },
386        ])
387        .take_bytes(1);
388        assert_eq!(iter.next(), Some(MemRegion { offset: 0, len: 1 }));
389        assert_eq!(iter.next(), None);
390    }
391
392    #[test]
393    fn mem_region_iter_two_take_partial2() {
394        let mut iter = MemRegionIter::new(&[
395            MemRegion { offset: 0, len: 4 },
396            MemRegion { offset: 8, len: 2 },
397        ])
398        .take_bytes(5);
399        assert_eq!(iter.next(), Some(MemRegion { offset: 0, len: 4 }));
400        assert_eq!(iter.next(), Some(MemRegion { offset: 8, len: 1 }));
401        assert_eq!(iter.next(), None);
402    }
403
404    #[test]
405    fn mem_region_iter_two_take_full() {
406        let mut iter = MemRegionIter::new(&[
407            MemRegion { offset: 0, len: 4 },
408            MemRegion { offset: 8, len: 2 },
409        ])
410        .take_bytes(6);
411        assert_eq!(iter.next(), Some(MemRegion { offset: 0, len: 4 }));
412        assert_eq!(iter.next(), Some(MemRegion { offset: 8, len: 2 }));
413        assert_eq!(iter.next(), None);
414    }
415
416    #[test]
417    fn mem_region_iter_two_take_excess() {
418        let mut iter = MemRegionIter::new(&[
419            MemRegion { offset: 0, len: 4 },
420            MemRegion { offset: 8, len: 2 },
421        ])
422        .take_bytes(7);
423        assert_eq!(iter.next(), Some(MemRegion { offset: 0, len: 4 }));
424        assert_eq!(iter.next(), Some(MemRegion { offset: 8, len: 2 }));
425        assert_eq!(iter.next(), None);
426    }
427
428    #[test]
429    fn mem_region_iter_embedded_zero_len() {
430        let mut iter = MemRegionIter::new(&[
431            MemRegion { offset: 0, len: 4 },
432            MemRegion { offset: 8, len: 2 },
433            MemRegion { offset: 9, len: 0 },
434            MemRegion { offset: 16, len: 5 },
435            MemRegion { offset: 6, len: 0 },
436            MemRegion { offset: 24, len: 9 },
437        ])
438        .skip_bytes(2)
439        .take_bytes(12);
440        assert_eq!(iter.next(), Some(MemRegion { offset: 2, len: 2 }));
441        assert_eq!(iter.next(), Some(MemRegion { offset: 8, len: 2 }));
442        assert_eq!(iter.next(), Some(MemRegion { offset: 16, len: 5 }));
443        assert_eq!(iter.next(), Some(MemRegion { offset: 24, len: 3 }));
444        assert_eq!(iter.next(), None);
445    }
446
447    #[test]
448    fn mem_region_iter_skip_multi() {
449        let mut iter = MemRegionIter::new(&[
450            MemRegion { offset: 0, len: 4 },
451            MemRegion { offset: 8, len: 2 },
452            MemRegion { offset: 16, len: 5 },
453            MemRegion { offset: 24, len: 9 },
454        ])
455        .skip_bytes(7);
456        assert_eq!(iter.next(), Some(MemRegion { offset: 17, len: 4 }));
457        assert_eq!(iter.next(), Some(MemRegion { offset: 24, len: 9 }));
458        assert_eq!(iter.next(), None);
459    }
460}