1#![deny(missing_docs)]
6
7use std::ops::Range;
8use std::ptr::copy_nonoverlapping;
9
10use base::error;
11use base::linux::MemoryMappingUnix;
12use base::MemoryMapping;
13use base::MemoryMappingBuilder;
14use base::MmapError;
15use base::SharedMemory;
16use base::VolatileMemory;
17use base::VolatileMemoryError;
18use base::VolatileSlice;
19use thiserror::Error as ThisError;
20
21use crate::pagesize::pages_to_bytes;
22use crate::present_list::PresentList;
23
24pub type Result<T> = std::result::Result<T, Error>;
25
26#[derive(ThisError, Debug)]
27pub enum Error {
28 #[error("failed to mmap operation: {0}")]
29 Mmap(MmapError),
30 #[error("failed to volatile memory operation: {0}")]
31 VolatileMemory(VolatileMemoryError),
32 #[error("index is out of range")]
33 OutOfRange,
34}
35
36impl From<MmapError> for Error {
37 fn from(e: MmapError) -> Self {
38 Self::Mmap(e)
39 }
40}
41
42impl From<VolatileMemoryError> for Error {
43 fn from(e: VolatileMemoryError) -> Self {
44 Self::VolatileMemory(e)
45 }
46}
47
48pub struct CopyOp {
50 src_addr: *const u8,
51 dst_addr: *mut u8,
52 size: usize,
53}
54
55unsafe impl Send for CopyOp {}
61
62impl CopyOp {
63 pub fn execute(self) {
65 unsafe {
72 copy_nonoverlapping(self.src_addr, self.dst_addr, self.size);
73 }
74 }
75}
76
77pub struct StagingMemory {
97 mmap: MemoryMapping,
98 present_list: PresentList,
100}
101
102impl StagingMemory {
103 pub fn new(shmem: &SharedMemory, offset_bytes: u64, num_of_pages: usize) -> Result<Self> {
111 let mmap = MemoryMappingBuilder::new(pages_to_bytes(num_of_pages))
112 .from_shared_memory(shmem)
113 .offset(offset_bytes)
114 .build()?;
115 Ok(Self {
116 mmap,
117 present_list: PresentList::new(num_of_pages),
118 })
119 }
120
121 #[deny(unsafe_op_in_unsafe_fn)]
134 pub unsafe fn copy(&mut self, src_addr: *const u8, idx: usize, pages: usize) -> Result<CopyOp> {
135 let idx_range = idx..idx + pages;
136 let dst_slice = self.get_slice(idx_range.clone())?;
137
138 let copy_op = CopyOp {
139 src_addr,
140 dst_addr: dst_slice.as_mut_ptr(),
141 size: dst_slice.size(),
142 };
143 if !self.present_list.mark_as_present(idx_range) {
144 unreachable!("idx_range is already validated by get_slice().");
145 }
146 Ok(copy_op)
147 }
148
149 pub fn page_content(&self, idx: usize) -> Result<Option<VolatileSlice>> {
159 match self.present_list.get(idx) {
160 Some(is_present) => {
161 if *is_present {
162 Ok(Some(self.get_slice(idx..idx + 1)?))
163 } else {
164 Ok(None)
165 }
166 }
167 None => Err(Error::OutOfRange),
168 }
169 }
170
171 pub fn clear_range(&mut self, idx_range: Range<usize>) -> Result<()> {
177 if !self.present_list.clear_range(idx_range.clone()) {
178 return Err(Error::OutOfRange);
179 }
180 self.mmap.remove_range(
181 pages_to_bytes(idx_range.start),
182 pages_to_bytes(idx_range.end - idx_range.start),
183 )?;
184 Ok(())
185 }
186
187 pub fn first_data_range(&mut self, max_pages: usize) -> Option<Range<usize>> {
194 self.present_list.first_data_range(max_pages)
195 }
196
197 pub fn get_slice(&self, idx_range: Range<usize>) -> Result<VolatileSlice> {
205 match self.mmap.get_slice(
206 pages_to_bytes(idx_range.start),
207 pages_to_bytes(idx_range.end - idx_range.start),
208 ) {
209 Ok(slice) => Ok(slice),
210 Err(VolatileMemoryError::OutOfBounds { .. }) => Err(Error::OutOfRange),
211 Err(e) => Err(e.into()),
212 }
213 }
214
215 pub fn present_pages(&self) -> usize {
217 self.present_list.all_present_pages()
218 }
219}
220
221#[cfg(test)]
222mod tests {
223 use base::pagesize;
224 use base::MappedRegion;
225
226 use super::*;
227
228 #[test]
229 fn new_success() {
230 let shmem = SharedMemory::new("test staging memory", 200 * pagesize() as u64).unwrap();
231 assert!(StagingMemory::new(&shmem, 0, 200).is_ok());
232 }
233
234 fn create_mmap(value: u8, pages: usize) -> MemoryMapping {
235 let size = pages_to_bytes(pages);
236 let mmap = MemoryMappingBuilder::new(size).build().unwrap();
237 for i in 0..size {
238 mmap.write_obj(value, i).unwrap();
239 }
240 mmap
241 }
242
243 #[test]
244 fn copy_marks_as_present() {
245 let shmem = SharedMemory::new("test staging memory", 200 * pagesize() as u64).unwrap();
246 let mmap = create_mmap(1, 4);
247 let mut staging_memory = StagingMemory::new(&shmem, 0, 200).unwrap();
248
249 let src_addr = mmap.as_ptr();
250 #[allow(clippy::undocumented_unsafe_blocks)]
252 unsafe {
253 staging_memory.copy(src_addr, 1, 4).unwrap();
254 staging_memory.copy(src_addr, 10, 0).unwrap();
256 staging_memory.copy(src_addr, 12, 1).unwrap();
258 }
259
260 assert!(staging_memory.page_content(0).unwrap().is_none());
261 for i in 1..5 {
262 assert!(staging_memory.page_content(i).unwrap().is_some());
263 }
264 for i in 5..12 {
265 assert!(staging_memory.page_content(i).unwrap().is_none());
266 }
267 assert!(staging_memory.page_content(12).unwrap().is_some());
268 for i in 13..200 {
269 assert!(staging_memory.page_content(i).unwrap().is_none());
270 }
271 }
272
273 #[test]
274 fn page_content_default_is_none() {
275 let shmem = SharedMemory::new("test staging memory", 200 * pagesize() as u64).unwrap();
276 let staging_memory = StagingMemory::new(&shmem, 0, 200).unwrap();
277
278 assert!(staging_memory.page_content(0).unwrap().is_none());
279 }
280
281 #[test]
282 fn page_content_returns_content() {
283 let shmem = SharedMemory::new("test staging memory", 200 * pagesize() as u64).unwrap();
284 let mmap = create_mmap(1, 1);
285 let mut staging_memory = StagingMemory::new(&shmem, 0, 200).unwrap();
286
287 #[allow(clippy::undocumented_unsafe_blocks)]
289 unsafe {
290 staging_memory.copy(mmap.as_ptr(), 0, 1).unwrap().execute();
291 }
292
293 let page = staging_memory.page_content(0).unwrap().unwrap();
294 #[allow(clippy::undocumented_unsafe_blocks)]
296 let result = unsafe { std::slice::from_raw_parts(page.as_ptr(), page.size()) };
297 assert_eq!(result, &vec![1; pagesize()]);
298 }
299
300 #[test]
301 fn page_content_out_of_range() {
302 let shmem = SharedMemory::new("test staging memory", 200 * pagesize() as u64).unwrap();
303 let staging_memory = StagingMemory::new(&shmem, 0, 200).unwrap();
304
305 assert!(staging_memory.page_content(199).is_ok());
306 match staging_memory.page_content(200) {
307 Err(Error::OutOfRange) => {}
308 _ => unreachable!("not out of range"),
309 }
310 }
311
312 #[test]
313 fn clear_range() {
314 let shmem = SharedMemory::new("test staging memory", 200 * pagesize() as u64).unwrap();
315 let mmap = create_mmap(1, 5);
316 let mut staging_memory = StagingMemory::new(&shmem, 0, 200).unwrap();
317
318 #[allow(clippy::undocumented_unsafe_blocks)]
320 unsafe {
321 staging_memory.copy(mmap.as_ptr(), 0, 5).unwrap();
322 }
323 staging_memory.clear_range(1..3).unwrap();
324
325 assert!(staging_memory.page_content(0).unwrap().is_some());
326 assert!(staging_memory.page_content(1).unwrap().is_none());
327 assert!(staging_memory.page_content(2).unwrap().is_none());
328 assert!(staging_memory.page_content(3).unwrap().is_some());
329 assert!(staging_memory.page_content(4).unwrap().is_some());
330 }
331
332 #[test]
333 fn clear_range_out_of_range() {
334 let shmem = SharedMemory::new("test staging memory", 200 * pagesize() as u64).unwrap();
335 let mut staging_memory = StagingMemory::new(&shmem, 0, 200).unwrap();
336
337 assert!(staging_memory.clear_range(199..200).is_ok());
338 match staging_memory.clear_range(199..201) {
339 Err(Error::OutOfRange) => {}
340 _ => unreachable!("not out of range"),
341 };
342 }
343
344 #[test]
345 fn first_data_range() {
346 let shmem = SharedMemory::new("test staging memory", 200 * pagesize() as u64).unwrap();
347 let mmap = create_mmap(1, 2);
348 let mut staging_memory = StagingMemory::new(&shmem, 0, 200).unwrap();
349
350 let src_addr = mmap.as_ptr();
351 #[allow(clippy::undocumented_unsafe_blocks)]
353 unsafe {
354 staging_memory.copy(src_addr, 1, 2).unwrap();
355 staging_memory.copy(src_addr, 3, 1).unwrap();
356 }
357
358 assert_eq!(staging_memory.first_data_range(200).unwrap(), 1..4);
359 assert_eq!(staging_memory.first_data_range(2).unwrap(), 1..3);
360 staging_memory.clear_range(1..3).unwrap();
361 assert_eq!(staging_memory.first_data_range(2).unwrap(), 3..4);
362 staging_memory.clear_range(3..4).unwrap();
363 assert!(staging_memory.first_data_range(2).is_none());
364 }
365
366 #[test]
367 fn get_slice() {
368 let shmem = SharedMemory::new("test staging memory", 200 * pagesize() as u64).unwrap();
369 let mmap1 = create_mmap(1, 1);
370 let mmap2 = create_mmap(2, 1);
371 let mut staging_memory = StagingMemory::new(&shmem, 0, 200).unwrap();
372
373 let src_addr1 = mmap1.as_ptr();
374 let src_addr2 = mmap2.as_ptr();
375 #[allow(clippy::undocumented_unsafe_blocks)]
377 unsafe {
378 staging_memory.copy(src_addr1, 1, 1).unwrap().execute();
379 staging_memory.copy(src_addr2, 2, 1).unwrap().execute();
380 }
381
382 let slice = staging_memory.get_slice(1..3).unwrap();
383 assert_eq!(slice.size(), 2 * pagesize());
384 for i in 0..pagesize() {
385 let mut byte = [0u8; 1];
386 slice.get_slice(i, 1).unwrap().copy_to(&mut byte);
387 assert_eq!(byte[0], 1);
388 }
389 for i in pagesize()..2 * pagesize() {
390 let mut byte = [0u8; 1];
391 slice.get_slice(i, 1).unwrap().copy_to(&mut byte);
392 assert_eq!(byte[0], 2);
393 }
394 }
395
396 #[test]
397 fn get_slice_out_of_range() {
398 let shmem = SharedMemory::new("test staging memory", 200 * pagesize() as u64).unwrap();
399 let staging_memory = StagingMemory::new(&shmem, 0, 200).unwrap();
400
401 match staging_memory.get_slice(200..201) {
402 Err(Error::OutOfRange) => {}
403 other => {
404 unreachable!("unexpected result {:?}", other);
405 }
406 }
407 }
408
409 #[test]
410 fn present_pages() {
411 let shmem = SharedMemory::new("test staging memory", 200 * pagesize() as u64).unwrap();
412 let mmap = create_mmap(1, 5);
413 let mut staging_memory = StagingMemory::new(&shmem, 0, 200).unwrap();
414
415 let src_addr = mmap.as_ptr();
416 #[allow(clippy::undocumented_unsafe_blocks)]
418 unsafe {
419 staging_memory.copy(src_addr, 1, 4).unwrap();
420 staging_memory.copy(src_addr, 12, 1).unwrap();
421 }
422
423 assert_eq!(staging_memory.present_pages(), 5);
424 }
425}