use std::convert::TryInto;
use std::fmt;
use base::linux::MemoryMappingBuilderUnix;
use base::AsRawDescriptor;
use base::FromRawDescriptor;
use base::IntoRawDescriptor;
use base::MemoryMappingArena;
use base::MemoryMappingBuilder;
use base::MmapError;
use base::SafeDescriptor;
use thiserror::Error as ThisError;
use vm_memory::GuestAddress;
use vm_memory::GuestMemory;
use vm_memory::GuestMemoryError;
use zerocopy::FromBytes;
use zerocopy::Immutable;
use zerocopy::IntoBytes;
use zerocopy::KnownLayout;
use crate::virtio::resource_bridge;
use crate::virtio::resource_bridge::ResourceBridgeError;
use crate::virtio::resource_bridge::ResourceInfo;
use crate::virtio::resource_bridge::ResourceRequest;
use crate::virtio::video::format::Format;
use crate::virtio::video::format::FramePlane;
use crate::virtio::video::params::Params;
use crate::virtio::video::protocol::virtio_video_mem_entry;
use crate::virtio::video::protocol::virtio_video_object_entry;
#[derive(Clone, Copy, Debug, Default, Eq, PartialEq)]
pub enum ResourceType {
GuestPages,
#[default]
VirtioObject,
}
#[repr(C)]
#[derive(Clone, Copy, FromBytes, Immutable, IntoBytes, KnownLayout)]
pub struct UnresolvedResourceEntry([u8; 16]);
impl fmt::Debug for UnresolvedResourceEntry {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "unresolved {:?}", self.0)
}
}
impl UnresolvedResourceEntry {
pub fn object(&self) -> virtio_video_object_entry {
virtio_video_object_entry::read_from_bytes(&self.0).unwrap()
}
}
pub trait BufferHandle: Sized {
fn try_clone(&self) -> Result<Self, base::Error>;
fn get_mapping(&self, offset: usize, size: usize) -> Result<MemoryMappingArena, MmapError>;
}
#[derive(Clone)]
pub struct GuestMemArea {
pub offset: u64,
pub length: usize,
}
pub struct GuestMemHandle {
pub desc: SafeDescriptor,
pub mem_areas: Vec<GuestMemArea>,
}
impl BufferHandle for GuestMemHandle {
fn try_clone(&self) -> Result<Self, base::Error> {
Ok(Self {
desc: self.desc.try_clone()?,
mem_areas: self.mem_areas.clone(),
})
}
fn get_mapping(&self, offset: usize, size: usize) -> Result<MemoryMappingArena, MmapError> {
let mut arena = MemoryMappingArena::new(size)?;
let mut mapped_size = 0;
let mut area_iter = self.mem_areas.iter();
let mut area_offset = offset;
while mapped_size < size {
let area = match area_iter.next() {
Some(area) => area,
None => {
return Err(MmapError::InvalidRange(
offset,
size,
self.mem_areas.iter().map(|a| a.length).sum(),
));
}
};
if area_offset > area.length {
area_offset -= area.length;
} else {
let mapping_length = std::cmp::min(area.length - area_offset, size - mapped_size);
arena.add_fd_offset(mapped_size, mapping_length, &self.desc, area.offset)?;
mapped_size += mapping_length;
area_offset = 0;
}
}
Ok(arena)
}
}
pub struct VirtioObjectHandle {
pub desc: SafeDescriptor,
pub modifier: u64,
}
impl BufferHandle for VirtioObjectHandle {
fn try_clone(&self) -> Result<Self, base::Error> {
Ok(Self {
desc: self.desc.try_clone()?,
modifier: self.modifier,
})
}
fn get_mapping(&self, offset: usize, size: usize) -> Result<MemoryMappingArena, MmapError> {
MemoryMappingBuilder::new(size)
.from_descriptor(&self.desc)
.offset(offset as u64)
.build()
.map(MemoryMappingArena::from)
}
}
pub enum GuestResourceHandle {
GuestPages(GuestMemHandle),
VirtioObject(VirtioObjectHandle),
}
impl BufferHandle for GuestResourceHandle {
fn try_clone(&self) -> Result<Self, base::Error> {
Ok(match self {
Self::GuestPages(handle) => Self::GuestPages(handle.try_clone()?),
Self::VirtioObject(handle) => Self::VirtioObject(handle.try_clone()?),
})
}
fn get_mapping(&self, offset: usize, size: usize) -> Result<MemoryMappingArena, MmapError> {
match self {
GuestResourceHandle::GuestPages(handle) => handle.get_mapping(offset, size),
GuestResourceHandle::VirtioObject(handle) => handle.get_mapping(offset, size),
}
}
}
pub struct GuestResource {
pub handle: GuestResourceHandle,
pub planes: Vec<FramePlane>,
pub width: u32,
pub height: u32,
pub format: Format,
pub guest_cpu_mappable: bool,
}
#[derive(Debug, ThisError)]
pub enum GuestMemResourceCreationError {
#[error("Provided slice of entries is empty")]
NoEntriesProvided,
#[error("cannot get shm region: {0}")]
CantGetShmRegion(GuestMemoryError),
#[error("cannot get shm offset: {0}")]
CantGetShmOffset(GuestMemoryError),
#[error("error while cloning shm region descriptor: {0}")]
DescriptorCloneError(base::Error),
#[error("guest memory with multiple shm objects not supported")]
MultipleShmObjects,
}
#[derive(Debug, ThisError)]
pub enum ObjectResourceCreationError {
#[error("uuid {0:08} is larger than 32 bits")]
UuidNot32Bits(u128),
#[error("resource returned by bridge is not a buffer")]
NotABuffer,
#[error("resource bridge failure: {0}")]
ResourceBridgeFailure(ResourceBridgeError),
}
impl GuestResource {
pub fn from_virtio_guest_mem_entry(
mem_entries: &[virtio_video_mem_entry],
mem: &GuestMemory,
params: &Params,
) -> Result<GuestResource, GuestMemResourceCreationError> {
let region_desc = match mem_entries.first() {
None => return Err(GuestMemResourceCreationError::NoEntriesProvided),
Some(entry) => {
let addr: u64 = entry.addr.into();
let guest_region = mem
.shm_region(GuestAddress(addr))
.map_err(GuestMemResourceCreationError::CantGetShmRegion)?;
base::clone_descriptor(guest_region)
.map_err(GuestMemResourceCreationError::DescriptorCloneError)?
}
};
let mem_areas = mem_entries
.iter()
.map(|entry| {
let addr: u64 = entry.addr.into();
let length: u32 = entry.length.into();
let (backing_obj, region_offset) = mem
.offset_from_base(GuestAddress(addr))
.map_err(GuestMemResourceCreationError::CantGetShmOffset)
.unwrap();
if region_desc.as_raw_descriptor() != backing_obj.as_raw_descriptor() {
return Err(GuestMemResourceCreationError::MultipleShmObjects);
}
Ok(GuestMemArea {
offset: region_offset,
length: length as usize,
})
})
.collect::<Result<_, _>>()?;
let handle = GuestResourceHandle::GuestPages(GuestMemHandle {
desc: region_desc,
mem_areas,
});
let mut buffer_offset = 0;
let planes = params
.plane_formats
.iter()
.map(|p| {
let plane_offset = buffer_offset;
buffer_offset += p.plane_size;
FramePlane {
offset: plane_offset as usize,
stride: p.stride as usize,
size: p.plane_size as usize,
}
})
.collect();
Ok(GuestResource {
handle,
planes,
width: params.frame_width,
height: params.frame_height,
format: params.format.unwrap(),
guest_cpu_mappable: true,
})
}
pub fn from_virtio_object_entry(
object: virtio_video_object_entry,
res_bridge: &base::Tube,
params: &Params,
) -> Result<GuestResource, ObjectResourceCreationError> {
let uuid = u128::from_be_bytes(object.uuid);
let handle = TryInto::<u32>::try_into(uuid)
.map_err(|_| ObjectResourceCreationError::UuidNot32Bits(uuid))?;
let buffer_info = match resource_bridge::get_resource_info(
res_bridge,
ResourceRequest::GetBuffer { id: handle },
) {
Ok(ResourceInfo::Buffer(buffer_info)) => buffer_info,
Ok(_) => return Err(ObjectResourceCreationError::NotABuffer),
Err(e) => return Err(ObjectResourceCreationError::ResourceBridgeFailure(e)),
};
let handle = GuestResourceHandle::VirtioObject(VirtioObjectHandle {
desc: unsafe {
SafeDescriptor::from_raw_descriptor(buffer_info.handle.into_raw_descriptor())
},
modifier: buffer_info.modifier,
});
let planes = params
.plane_formats
.iter()
.zip(&buffer_info.planes)
.map(|(param, buffer)| FramePlane {
offset: buffer.offset as usize,
stride: buffer.stride as usize,
size: param.plane_size as usize,
})
.collect();
Ok(GuestResource {
handle,
planes,
width: params.frame_width,
height: params.frame_height,
format: params.format.unwrap(),
guest_cpu_mappable: buffer_info.guest_cpu_mappable,
})
}
#[cfg(feature = "video-encoder")]
pub fn try_clone(&self) -> Result<Self, base::Error> {
Ok(Self {
handle: self.handle.try_clone()?,
planes: self.planes.clone(),
width: self.width,
height: self.height,
format: self.format,
guest_cpu_mappable: self.guest_cpu_mappable,
})
}
}
#[cfg(test)]
mod tests {
use base::MappedRegion;
use base::SharedMemory;
use super::*;
fn check_guest_mem_handle(page_order: &[usize]) {
const PAGE_SIZE: usize = 0x1000;
const U32_SIZE: usize = std::mem::size_of::<u32>();
const ENTRIES_PER_PAGE: usize = PAGE_SIZE / std::mem::size_of::<u32>();
let mut data = vec![0u8; PAGE_SIZE * page_order.len()];
for (page_index, page) in page_order.iter().enumerate() {
let page_slice = &mut data[(page * PAGE_SIZE)..((page + 1) * PAGE_SIZE)];
for (index, chunk) in page_slice.chunks_exact_mut(4).enumerate() {
let sized_chunk: &mut [u8; 4] = chunk.try_into().unwrap();
*sized_chunk = (((page_index * ENTRIES_PER_PAGE) + index) as u32).to_ne_bytes();
}
}
let mem = SharedMemory::new("data-dest", data.len() as u64).unwrap();
let mapping = MemoryMappingBuilder::new(mem.size() as usize)
.from_shared_memory(&mem)
.build()
.unwrap();
assert_eq!(mapping.write_slice(&data, 0).unwrap(), data.len());
let mem_handle = GuestResourceHandle::GuestPages(GuestMemHandle {
desc: base::clone_descriptor(&mem).unwrap(),
mem_areas: page_order
.iter()
.map(|&page| GuestMemArea {
offset: page as u64 * PAGE_SIZE as u64,
length: PAGE_SIZE,
})
.collect(),
});
let mapping = mem_handle.get_mapping(0, mem.size() as usize).unwrap();
let mut data = vec![0u8; PAGE_SIZE * page_order.len()];
unsafe { std::ptr::copy_nonoverlapping(mapping.as_ptr(), data.as_mut_ptr(), data.len()) };
for (index, chunk) in data.chunks_exact(U32_SIZE).enumerate() {
let sized_chunk: &[u8; 4] = chunk.try_into().unwrap();
assert_eq!(u32::from_ne_bytes(*sized_chunk), index as u32);
}
}
#[test]
fn test_single_guest_mem_handle() {
check_guest_mem_handle(&[0])
}
#[test]
fn test_linear_guest_mem_handle() {
check_guest_mem_handle(&[0, 1, 2, 3])
}
#[test]
fn test_sparse_guest_mem_handle() {
check_guest_mem_handle(&[1, 7, 6, 3, 5, 0, 4, 2])
}
}