1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209 210 211 212 213 214 215 216 217 218 219 220 221 222 223 224 225 226 227 228 229 230 231 232 233 234 235 236 237 238 239 240 241 242 243 244 245 246 247 248 249 250 251 252 253 254 255 256 257 258 259 260 261 262 263 264 265 266 267 268 269 270 271 272 273 274 275 276 277 278 279 280 281 282 283 284 285 286 287 288 289 290 291 292 293 294 295 296 297 298 299 300 301 302 303 304 305 306 307 308 309 310 311 312 313 314 315 316 317 318 319 320 321 322 323 324 325 326 327 328 329 330 331 332 333 334 335 336 337 338 339 340 341 342 343 344 345 346 347 348 349 350 351 352 353 354 355 356 357 358 359 360 361 362 363 364 365 366 367 368 369 370 371 372 373 374 375 376 377 378 379 380 381 382 383 384 385 386 387 388 389 390 391 392 393 394 395 396 397 398 399 400 401 402 403 404 405 406 407 408 409 410 411 412 413 414 415 416 417 418 419 420 421 422 423 424 425 426 427 428 429 430 431 432 433 434 435 436 437 438 439 440 441 442 443 444 445 446 447 448 449 450 451 452 453 454 455 456 457 458 459 460
// Copyright 2020 The ChromiumOS Authors
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
use base::VolatileSlice;
use remain::sorted;
use thiserror::Error as ThisError;
#[sorted]
#[derive(ThisError, Debug)]
pub enum Error {
/// Invalid offset or length given for an iovec in backing memory.
#[error("Invalid offset/len for getting a slice from {0} with len {1}.")]
InvalidOffset(u64, usize),
}
pub type Result<T> = std::result::Result<T, Error>;
/// Used to index subslices of backing memory. Like an iovec, but relative to the start of the
/// backing memory instead of an absolute pointer.
/// The backing memory referenced by the region can be an array, an mmapped file, or guest memory.
/// The offset is a u64 to allow having file or guest offsets >4GB when run on a 32bit host.
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
pub struct MemRegion {
pub offset: u64,
pub len: usize,
}
/// Iterator over an ordered list of [`MemRegion`].
///
/// In addition to the usual iterator operations, `MemRegionIter` provides extra functionality that
/// allows subslicing individual memory regions without mutating the underlying list:
/// - [`skip_bytes()`](Self::skip_bytes): Advance the iterator some number of bytes, potentially
/// starting iteration in the middle of a `MemRegion`.
/// - [`take_bytes()`](Self::take_bytes): Truncate the iterator at some number of bytes, potentially
/// ending iteration in the middle of a `MemRegion`.
///
/// The order of subslicing operations matters - limiting length followed by skipping bytes is not
/// the same as skipping bytes followed by limiting length.
#[derive(Clone)]
pub struct MemRegionIter<'a> {
regions: &'a [MemRegion],
skip_bytes: usize,
remaining_bytes: usize,
}
impl<'a> MemRegionIter<'a> {
/// Create a new `MemRegion` iterator over a slice of `MemRegion`.
///
/// By default, the `MemRegionIter` will iterate over each `MemRegion` in the list in its
/// entirety. Call [`skip_bytes()`](Self::skip_bytes) and/or
/// [`take_bytes()`](Self::take_bytes) to limit iteration to a sub-slice of the specified
/// `regions` list.
pub fn new(regions: &'a [MemRegion]) -> Self {
MemRegionIter {
regions,
skip_bytes: 0,
remaining_bytes: usize::MAX,
}
}
/// Advance the iterator by `offset` bytes.
///
/// This may place the iterator in the middle of a [`MemRegion`]; in this case, the offset and
/// length of the next [`MemRegion`] returned by [`next()`](Self::next) will be adjusted to
/// account for the offset.
///
/// Skipping more than the remaining length of an iterator is not an error; if `offset` is
/// greater than or equal to the total number of remaining bytes, future calls to
/// [`next()`](Self::next) will simply return `None`.
pub fn skip_bytes(self, offset: usize) -> Self {
MemRegionIter {
regions: self.regions,
skip_bytes: self.skip_bytes.saturating_add(offset),
remaining_bytes: self.remaining_bytes.saturating_sub(offset),
}
}
/// Truncate the length of the iterator to `max` bytes at most.
///
/// This may cause the final [`MemRegion`] returned by [`next()`](Self::next) to be adjusted so
/// that its length does not cause the total number of bytes to exceed the requested `max`.
///
/// If less than `max` bytes remain in the iterator already, this function will have no effect.
///
/// Only truncation is supported; an iterator cannot be extended, even if it was truncated by a
/// previous call to `take_bytes()`.
pub fn take_bytes(self, max: usize) -> Self {
MemRegionIter {
regions: self.regions,
skip_bytes: self.skip_bytes,
remaining_bytes: self.remaining_bytes.min(max),
}
}
}
impl Iterator for MemRegionIter<'_> {
type Item = MemRegion;
fn next(&mut self) -> Option<Self::Item> {
if self.remaining_bytes == 0 {
return None;
}
while let Some((first, remaining)) = self.regions.split_first() {
// This call to `next()` will consume `first`; future calls will start with `remaining`.
self.regions = remaining;
// If skip_bytes encompasses this entire region, skip to the next region.
// This also skips zero-length regions, which should not be returned by the iterator.
if self.skip_bytes >= first.len {
self.skip_bytes -= first.len;
continue;
}
// Adjust the current region and reset `self.skip_bytes` to 0 to fully consume it.
let mut region = MemRegion {
offset: first.offset + self.skip_bytes as u64,
len: first.len - self.skip_bytes,
};
self.skip_bytes = 0;
// If this region is at least as large as `remaining_bytes`, truncate the region and set
// `regions` to an empty slice to terminate iteration in future calls to `next()`.
if region.len >= self.remaining_bytes {
region.len = self.remaining_bytes;
self.remaining_bytes = 0;
self.regions = &[];
} else {
// Consume and return the full region.
self.remaining_bytes -= region.len;
}
// This should never return a zero-length region (should be handled by the
// `remaining_bytes == 0` early return and zero-length region skipping above).
debug_assert_ne!(region.len, 0);
return Some(region);
}
None
}
}
/// Trait for memory that can yield both iovecs in to the backing memory.
/// # Safety
/// Must be OK to modify the backing memory without owning a mut able reference. For example,
/// this is safe for GuestMemory and VolatileSlices in crosvm as those types guarantee they are
/// dealt with as volatile.
pub unsafe trait BackingMemory {
/// Returns VolatileSlice pointing to the backing memory. This is most commonly unsafe.
/// To implement this safely the implementor must guarantee that the backing memory can be
/// modified out of band without affecting safety guarantees.
fn get_volatile_slice(&self, mem_range: MemRegion) -> Result<VolatileSlice>;
}
/// Wrapper to be used for passing a Vec in as backing memory for asynchronous operations. The
/// wrapper owns a Vec according to the borrow checker. It is loaning this vec out to the kernel(or
/// other modifiers) through the `BackingMemory` trait. This allows multiple modifiers of the array
/// in the `Vec` while this struct is alive. The data in the Vec is loaned to the kernel not the
/// data structure itself, the length, capacity, and pointer to memory cannot be modified.
/// To ensure that those operations can be done safely, no access is allowed to the `Vec`'s memory
/// starting at the time that `VecIoWrapper` is constructed until the time it is turned back in to a
/// `Vec` using `to_inner`. The returned `Vec` is guaranteed to be valid as any combination of bits
/// in a `Vec` of `u8` is valid.
pub struct VecIoWrapper {
inner: Box<[u8]>,
}
impl From<Vec<u8>> for VecIoWrapper {
fn from(vec: Vec<u8>) -> Self {
VecIoWrapper { inner: vec.into() }
}
}
impl From<VecIoWrapper> for Vec<u8> {
fn from(v: VecIoWrapper) -> Vec<u8> {
v.inner.into()
}
}
impl VecIoWrapper {
/// Get the length of the Vec that is wrapped.
#[cfg_attr(windows, allow(dead_code))]
pub fn len(&self) -> usize {
self.inner.len()
}
pub fn is_empty(&self) -> bool {
self.len() == 0
}
// Check that the offsets are all valid in the backing vec.
fn check_addrs(&self, mem_range: &MemRegion) -> Result<()> {
let end = mem_range
.offset
.checked_add(mem_range.len as u64)
.ok_or(Error::InvalidOffset(mem_range.offset, mem_range.len))?;
if end > self.inner.len() as u64 {
return Err(Error::InvalidOffset(mem_range.offset, mem_range.len));
}
Ok(())
}
}
// SAFETY:
// Safe to implement BackingMemory as the vec is only accessible inside the wrapper and these iovecs
// are the only thing allowed to modify it. Nothing else can get a reference to the vec until all
// iovecs are dropped because they borrow Self. Nothing can borrow the owned inner vec until self
// is consumed by `into`, which can't happen if there are outstanding mut borrows.
unsafe impl BackingMemory for VecIoWrapper {
fn get_volatile_slice(&self, mem_range: MemRegion) -> Result<VolatileSlice<'_>> {
self.check_addrs(&mem_range)?;
// SAFETY:
// Safe because the mem_range range is valid in the backing memory as checked above.
unsafe {
Ok(VolatileSlice::from_raw_parts(
self.inner.as_ptr().add(mem_range.offset as usize) as *mut _,
mem_range.len,
))
}
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn mem_region_iter_empty() {
let mut iter = MemRegionIter::new(&[]);
assert_eq!(iter.next(), None);
}
#[test]
fn mem_region_iter_one() {
let mut iter = MemRegionIter::new(&[MemRegion { offset: 0, len: 4 }]);
assert_eq!(iter.next(), Some(MemRegion { offset: 0, len: 4 }));
assert_eq!(iter.next(), None);
}
#[test]
fn mem_region_iter_one_len_usize_max() {
let mut iter = MemRegionIter::new(&[MemRegion {
offset: 0,
len: usize::MAX,
}]);
assert_eq!(
iter.next(),
Some(MemRegion {
offset: 0,
len: usize::MAX
})
);
assert_eq!(iter.next(), None);
}
#[test]
fn mem_region_iter_one_len_zero() {
let mut iter = MemRegionIter::new(&[MemRegion { offset: 0, len: 0 }]);
assert_eq!(iter.next(), None);
}
#[test]
fn mem_region_iter_one_skip_partial() {
let mut iter = MemRegionIter::new(&[MemRegion { offset: 0, len: 4 }]).skip_bytes(1);
assert_eq!(iter.next(), Some(MemRegion { offset: 1, len: 3 }));
assert_eq!(iter.next(), None);
}
#[test]
fn mem_region_iter_one_skip_full() {
let mut iter = MemRegionIter::new(&[MemRegion { offset: 0, len: 4 }]).skip_bytes(4);
assert_eq!(iter.next(), None);
}
#[test]
fn mem_region_iter_one_skip_excess() {
let mut iter = MemRegionIter::new(&[MemRegion { offset: 0, len: 4 }]).skip_bytes(5);
assert_eq!(iter.next(), None);
}
#[test]
fn mem_region_iter_one_take_zero() {
let mut iter = MemRegionIter::new(&[MemRegion { offset: 0, len: 4 }]).take_bytes(0);
assert_eq!(iter.next(), None);
}
#[test]
fn mem_region_iter_one_take_partial() {
let mut iter = MemRegionIter::new(&[MemRegion { offset: 0, len: 4 }]).take_bytes(1);
assert_eq!(iter.next(), Some(MemRegion { offset: 0, len: 1 }));
assert_eq!(iter.next(), None);
}
#[test]
fn mem_region_iter_one_take_full() {
let mut iter = MemRegionIter::new(&[MemRegion { offset: 0, len: 4 }]).take_bytes(4);
assert_eq!(iter.next(), Some(MemRegion { offset: 0, len: 4 }));
assert_eq!(iter.next(), None);
}
#[test]
fn mem_region_iter_one_take_excess() {
let mut iter = MemRegionIter::new(&[MemRegion { offset: 0, len: 4 }]).take_bytes(5);
assert_eq!(iter.next(), Some(MemRegion { offset: 0, len: 4 }));
assert_eq!(iter.next(), None);
}
#[test]
fn mem_region_iter_one_take_skip() {
let mut iter = MemRegionIter::new(&[MemRegion { offset: 0, len: 4 }])
.take_bytes(2)
.skip_bytes(1);
assert_eq!(iter.next(), Some(MemRegion { offset: 1, len: 1 }));
assert_eq!(iter.next(), None);
}
#[test]
fn mem_region_iter_one_skip_take() {
let mut iter = MemRegionIter::new(&[MemRegion { offset: 0, len: 4 }])
.skip_bytes(1)
.take_bytes(2);
assert_eq!(iter.next(), Some(MemRegion { offset: 1, len: 2 }));
assert_eq!(iter.next(), None);
}
#[test]
fn mem_region_iter_two() {
let mut iter = MemRegionIter::new(&[
MemRegion { offset: 0, len: 4 },
MemRegion { offset: 8, len: 2 },
]);
assert_eq!(iter.next(), Some(MemRegion { offset: 0, len: 4 }));
assert_eq!(iter.next(), Some(MemRegion { offset: 8, len: 2 }));
assert_eq!(iter.next(), None);
}
#[test]
fn mem_region_iter_two_skip_partial() {
let mut iter = MemRegionIter::new(&[
MemRegion { offset: 0, len: 4 },
MemRegion { offset: 8, len: 2 },
])
.skip_bytes(1);
assert_eq!(iter.next(), Some(MemRegion { offset: 1, len: 3 }));
assert_eq!(iter.next(), Some(MemRegion { offset: 8, len: 2 }));
assert_eq!(iter.next(), None);
}
#[test]
fn mem_region_iter_two_skip_full() {
let mut iter = MemRegionIter::new(&[
MemRegion { offset: 0, len: 4 },
MemRegion { offset: 8, len: 2 },
])
.skip_bytes(4);
assert_eq!(iter.next(), Some(MemRegion { offset: 8, len: 2 }));
assert_eq!(iter.next(), None);
}
#[test]
fn mem_region_iter_two_skip_excess() {
let mut iter = MemRegionIter::new(&[
MemRegion { offset: 0, len: 4 },
MemRegion { offset: 8, len: 2 },
])
.skip_bytes(5);
assert_eq!(iter.next(), Some(MemRegion { offset: 9, len: 1 }));
assert_eq!(iter.next(), None);
}
#[test]
fn mem_region_iter_two_skip_multi() {
let mut iter = MemRegionIter::new(&[
MemRegion { offset: 0, len: 4 },
MemRegion { offset: 8, len: 2 },
])
.skip_bytes(6);
assert_eq!(iter.next(), None);
}
#[test]
fn mem_region_iter_two_take_partial() {
let mut iter = MemRegionIter::new(&[
MemRegion { offset: 0, len: 4 },
MemRegion { offset: 8, len: 2 },
])
.take_bytes(1);
assert_eq!(iter.next(), Some(MemRegion { offset: 0, len: 1 }));
assert_eq!(iter.next(), None);
}
#[test]
fn mem_region_iter_two_take_partial2() {
let mut iter = MemRegionIter::new(&[
MemRegion { offset: 0, len: 4 },
MemRegion { offset: 8, len: 2 },
])
.take_bytes(5);
assert_eq!(iter.next(), Some(MemRegion { offset: 0, len: 4 }));
assert_eq!(iter.next(), Some(MemRegion { offset: 8, len: 1 }));
assert_eq!(iter.next(), None);
}
#[test]
fn mem_region_iter_two_take_full() {
let mut iter = MemRegionIter::new(&[
MemRegion { offset: 0, len: 4 },
MemRegion { offset: 8, len: 2 },
])
.take_bytes(6);
assert_eq!(iter.next(), Some(MemRegion { offset: 0, len: 4 }));
assert_eq!(iter.next(), Some(MemRegion { offset: 8, len: 2 }));
assert_eq!(iter.next(), None);
}
#[test]
fn mem_region_iter_two_take_excess() {
let mut iter = MemRegionIter::new(&[
MemRegion { offset: 0, len: 4 },
MemRegion { offset: 8, len: 2 },
])
.take_bytes(7);
assert_eq!(iter.next(), Some(MemRegion { offset: 0, len: 4 }));
assert_eq!(iter.next(), Some(MemRegion { offset: 8, len: 2 }));
assert_eq!(iter.next(), None);
}
#[test]
fn mem_region_iter_embedded_zero_len() {
let mut iter = MemRegionIter::new(&[
MemRegion { offset: 0, len: 4 },
MemRegion { offset: 8, len: 2 },
MemRegion { offset: 9, len: 0 },
MemRegion { offset: 16, len: 5 },
MemRegion { offset: 6, len: 0 },
MemRegion { offset: 24, len: 9 },
])
.skip_bytes(2)
.take_bytes(12);
assert_eq!(iter.next(), Some(MemRegion { offset: 2, len: 2 }));
assert_eq!(iter.next(), Some(MemRegion { offset: 8, len: 2 }));
assert_eq!(iter.next(), Some(MemRegion { offset: 16, len: 5 }));
assert_eq!(iter.next(), Some(MemRegion { offset: 24, len: 3 }));
assert_eq!(iter.next(), None);
}
#[test]
fn mem_region_iter_skip_multi() {
let mut iter = MemRegionIter::new(&[
MemRegion { offset: 0, len: 4 },
MemRegion { offset: 8, len: 2 },
MemRegion { offset: 16, len: 5 },
MemRegion { offset: 24, len: 9 },
])
.skip_bytes(7);
assert_eq!(iter.next(), Some(MemRegion { offset: 17, len: 4 }));
assert_eq!(iter.next(), Some(MemRegion { offset: 24, len: 9 }));
assert_eq!(iter.next(), None);
}
}