1use std::any::Any;
10use std::collections::BTreeMap;
11use std::sync::atomic::AtomicU32;
12use std::sync::atomic::Ordering;
13
14use anyhow::anyhow;
15use anyhow::bail;
16use anyhow::Context;
17use anyhow::Result;
18use base::warn;
19use base::AsRawDescriptors;
20use base::Event;
21use base::Protection;
22use base::RawDescriptor;
23use cros_async::EventAsync;
24use cros_async::Executor;
25use resources::AddressRange;
26use serde::Deserialize;
27use serde::Serialize;
28use vm_memory::GuestAddress;
29
30#[derive(Clone, Copy, Debug, PartialEq, Eq, Serialize, Deserialize)]
31pub struct MemRegion {
32 pub gpa: GuestAddress,
33 pub len: u64,
34 pub prot: Protection,
35}
36
37#[derive(Debug)]
39pub struct MappingInfo {
40 pub iova: u64,
41 pub gpa: GuestAddress,
42 pub size: u64,
43 pub prot: Protection,
44}
45
46impl MappingInfo {
47 #[allow(dead_code)]
48 fn new(iova: u64, gpa: GuestAddress, size: u64, prot: Protection) -> Result<Self> {
49 if size == 0 {
50 bail!("can't create 0 sized region");
51 }
52 iova.checked_add(size).context("iova overflow")?;
53 gpa.checked_add(size).context("gpa overflow")?;
54 Ok(Self {
55 iova,
56 gpa,
57 size,
58 prot,
59 })
60 }
61}
62
63struct ExportState {
64 exported: Vec<AddressRange>,
66
67 fault_event: Event,
69
70 fault_resolved_event_internal: Event,
72 fault_resolved_event_external: Option<EventAsync>,
74}
75
76impl ExportState {
77 fn new(ex: &Executor) -> Result<(Self, Event)> {
78 let fault_event = Event::new().context("failed to create fault_event")?;
79 let fault_resolved_event = Event::new().context("failed to create resolve event")?;
80
81 Ok((
82 Self {
83 exported: Vec::new(),
84 fault_event: fault_event
85 .try_clone()
86 .context("failed to clone fault event")?,
87 fault_resolved_event_internal: fault_resolved_event
88 .try_clone()
89 .context("failed to clone resolve event")?,
90 fault_resolved_event_external: Some(
91 EventAsync::new(fault_resolved_event, ex)
92 .context("failed to create async resolve event")?,
93 ),
94 },
95 fault_event,
96 ))
97 }
98
99 fn on_fault(&mut self) -> Option<EventAsync> {
100 let ret = self.fault_resolved_event_external.take();
101 if ret.is_some() {
102 self.fault_event.signal().expect("failed to signal fault");
103 }
104 ret
105 }
106
107 fn can_export(&self) -> bool {
108 self.fault_resolved_event_external.is_some()
109 }
110}
111
112pub struct BasicMemoryMapper {
114 maps: BTreeMap<u64, MappingInfo>, mask: u64,
116 id: u32,
117 export_state: Option<ExportState>,
118}
119
120pub enum RemoveMapResult {
121 Success(Option<EventAsync>),
124 OverlapFailure,
126}
127
128#[derive(PartialEq, Eq, Debug)]
129pub enum AddMapResult {
130 Ok,
131 OverlapFailure,
132}
133
134pub trait MemoryMapper: Send {
177 fn add_map(&mut self, new_map: MappingInfo) -> Result<AddMapResult>;
180
181 fn remove_map(&mut self, iova_start: u64, size: u64) -> Result<RemoveMapResult>;
183
184 fn get_mask(&self) -> Result<u64>;
185
186 fn supports_detach(&self) -> bool;
188 fn reset_domain(&mut self) -> Option<EventAsync> {
191 None
192 }
193
194 fn id(&self) -> u32;
197
198 fn start_export_session(&mut self, _ex: &Executor) -> Result<Event> {
208 bail!("not supported");
209 }
210
211 unsafe fn vfio_dma_map(
218 &mut self,
219 _iova: u64,
220 _hva: u64,
221 _size: u64,
222 _prot: Protection,
223 ) -> Result<AddMapResult> {
224 bail!("not supported");
225 }
226
227 fn export(&mut self, _iova: u64, _size: u64) -> Result<Vec<MemRegion>> {
229 bail!("not supported");
230 }
231
232 fn release(&mut self, _iova: u64, _size: u64) -> Result<()> {
236 bail!("not supported");
237 }
238}
239
240pub trait MemoryMapperTrait: MemoryMapper + AsRawDescriptors + Any {}
241impl<T: MemoryMapper + AsRawDescriptors + Any> MemoryMapperTrait for T {}
242
243impl BasicMemoryMapper {
244 pub fn new(mask: u64) -> BasicMemoryMapper {
245 static NEXT_ID: AtomicU32 = AtomicU32::new(0);
246 BasicMemoryMapper {
247 maps: BTreeMap::new(),
248 mask,
249 id: NEXT_ID.fetch_add(1, Ordering::Relaxed),
250 export_state: None,
251 }
252 }
253
254 #[cfg(test)]
255 pub fn len(&self) -> usize {
256 self.maps.len()
257 }
258
259 #[cfg(test)]
260 pub fn is_empty(&self) -> bool {
261 self.maps.is_empty()
262 }
263}
264
265impl MemoryMapper for BasicMemoryMapper {
266 fn add_map(&mut self, new_map: MappingInfo) -> Result<AddMapResult> {
267 if new_map.size == 0 {
268 bail!("can't map 0 sized region");
269 }
270 let new_iova_end = new_map
271 .iova
272 .checked_add(new_map.size)
273 .context("iova overflow")?;
274 new_map
275 .gpa
276 .checked_add(new_map.size)
277 .context("gpa overflow")?;
278 let mut iter = self.maps.range(..new_iova_end);
279 if let Some((_, map)) = iter.next_back() {
280 if map.iova + map.size > new_map.iova {
281 return Ok(AddMapResult::OverlapFailure);
282 }
283 }
284 self.maps.insert(new_map.iova, new_map);
285 Ok(AddMapResult::Ok)
286 }
287
288 fn remove_map(&mut self, iova_start: u64, size: u64) -> Result<RemoveMapResult> {
289 if size == 0 {
290 bail!("can't unmap 0 sized region");
291 }
292 let iova_end = iova_start.checked_add(size).context("iova overflow")?;
293
294 let mut to_be_removed = Vec::new();
297 for (key, map) in self.maps.range(..iova_end).rev() {
298 let map_iova_end = map.iova + map.size;
299 if map_iova_end <= iova_start {
300 break;
302 }
303 if iova_start <= map.iova && map_iova_end <= iova_end {
304 to_be_removed.push(*key);
305 } else {
306 return Ok(RemoveMapResult::OverlapFailure);
307 }
308 }
309 for key in to_be_removed {
310 self.maps.remove(&key).expect("map should contain key");
311 }
312 if let Some(export_state) = self.export_state.as_mut() {
313 let removed = AddressRange::from_start_and_size(iova_start, size).unwrap();
314 for export in &export_state.exported {
315 if export.overlaps(removed) {
316 return Ok(RemoveMapResult::Success(export_state.on_fault()));
317 }
318 }
319 }
320 Ok(RemoveMapResult::Success(None))
321 }
322
323 fn get_mask(&self) -> Result<u64> {
324 Ok(self.mask)
325 }
326
327 fn supports_detach(&self) -> bool {
328 true
329 }
330
331 fn reset_domain(&mut self) -> Option<EventAsync> {
332 self.maps.clear();
333 if let Some(export_state) = self.export_state.as_mut() {
334 if !export_state.exported.is_empty() {
335 return export_state.on_fault();
336 }
337 }
338 None
339 }
340
341 fn id(&self) -> u32 {
342 self.id
343 }
344
345 fn start_export_session(&mut self, ex: &Executor) -> Result<Event> {
346 if let Some(export_state) = self.export_state.as_ref() {
347 if !export_state.exported.is_empty() {
348 bail!("previous export session still active");
349 }
350 }
351
352 let (export_state, fault_event) = ExportState::new(ex)?;
353 self.export_state = Some(export_state);
354 Ok(fault_event)
355 }
356
357 fn export(&mut self, iova: u64, size: u64) -> Result<Vec<MemRegion>> {
358 let export_state = self.export_state.as_mut().context("no export state")?;
359 if !export_state.can_export() {
360 bail!("broken export state");
361 }
362 if size == 0 {
363 bail!("can't translate 0 sized region");
364 }
365
366 let iova_end = iova.checked_add(size).context("iova overflow")?;
368 let mut iter = self.maps.range(..iova_end);
369 let mut last_iova = iova_end;
370 let mut regions: Vec<MemRegion> = Vec::new();
371 while let Some((_, map)) = iter.next_back() {
372 if last_iova > map.iova + map.size {
373 break;
374 }
375 let mut new_region = true;
376
377 let region_len = last_iova - std::cmp::max::<u64>(map.iova, iova);
379 if let Some(last) = regions.last_mut() {
380 if map.gpa.unchecked_add(map.size) == last.gpa && map.prot == last.prot {
381 last.gpa = map.gpa;
382 last.len += region_len;
383 new_region = false;
384 }
385 }
386 if new_region {
387 regions.push(MemRegion {
395 gpa: map.gpa,
396 len: region_len,
397 prot: map.prot,
398 });
399 }
400 if iova >= map.iova {
401 regions.reverse();
402 regions[0].gpa = map
404 .gpa
405 .checked_add(iova - map.iova)
406 .context("gpa overflow")?;
407
408 export_state
409 .exported
410 .push(AddressRange::from_start_and_end(iova, iova_end - 1));
411
412 return Ok(regions);
413 }
414 last_iova = map.iova;
415 }
416
417 Err(anyhow!("invalid iova {:x} {:x}", iova, size))
418 }
419
420 fn release(&mut self, iova: u64, size: u64) -> Result<()> {
421 let to_remove = AddressRange::from_start_and_size(iova, size).context("iova overflow")?;
422 let state = self.export_state.as_mut().context("no export state")?;
423
424 match state.exported.iter().position(|r| r == &to_remove) {
425 Some(idx) => {
426 state.exported.swap_remove(idx);
427 }
428 None => {
429 warn!("tried to release unknown range: {:?}", to_remove);
430 return Ok(());
431 }
432 }
433
434 if state.exported.is_empty() && state.fault_resolved_event_external.is_none() {
435 state
436 .fault_resolved_event_internal
437 .signal()
438 .expect("failed to resolve fault");
439 }
440
441 Ok(())
442 }
443}
444
445impl AsRawDescriptors for BasicMemoryMapper {
446 fn as_raw_descriptors(&self) -> Vec<RawDescriptor> {
447 Vec::new()
448 }
449}
450
451#[cfg(test)]
452mod tests {
453 use std::fmt::Debug;
454
455 use super::*;
456
457 fn assert_overlap_failure(val: RemoveMapResult) {
458 match val {
459 RemoveMapResult::OverlapFailure => (),
460 _ => unreachable!(),
461 }
462 }
463
464 #[test]
465 fn test_mapping_info() {
466 MappingInfo::new(u64::MAX - 1, GuestAddress(1), 2, Protection::read()).unwrap_err();
468 MappingInfo::new(1, GuestAddress(u64::MAX - 1), 2, Protection::read()).unwrap_err();
469 MappingInfo::new(u64::MAX, GuestAddress(1), 2, Protection::read()).unwrap_err();
470 MappingInfo::new(1, GuestAddress(u64::MAX), 2, Protection::read()).unwrap_err();
471 MappingInfo::new(5, GuestAddress(5), u64::MAX, Protection::read()).unwrap_err();
472 MappingInfo::new(1, GuestAddress(5), 0, Protection::read()).unwrap_err();
474 }
475
476 #[test]
477 fn test_map_overlap() {
478 let mut mapper = BasicMemoryMapper::new(u64::MAX);
479 mapper
480 .add_map(
481 MappingInfo::new(10, GuestAddress(1000), 10, Protection::read_write()).unwrap(),
482 )
483 .unwrap();
484 assert_eq!(
485 mapper
486 .add_map(
487 MappingInfo::new(14, GuestAddress(1000), 1, Protection::read_write()).unwrap()
488 )
489 .unwrap(),
490 AddMapResult::OverlapFailure
491 );
492 assert_eq!(
493 mapper
494 .add_map(
495 MappingInfo::new(0, GuestAddress(1000), 12, Protection::read_write()).unwrap()
496 )
497 .unwrap(),
498 AddMapResult::OverlapFailure
499 );
500 assert_eq!(
501 mapper
502 .add_map(
503 MappingInfo::new(16, GuestAddress(1000), 6, Protection::read_write()).unwrap()
504 )
505 .unwrap(),
506 AddMapResult::OverlapFailure
507 );
508 assert_eq!(
509 mapper
510 .add_map(
511 MappingInfo::new(5, GuestAddress(1000), 20, Protection::read_write()).unwrap()
512 )
513 .unwrap(),
514 AddMapResult::OverlapFailure
515 );
516 }
517
518 #[test]
519 fn test_map_unmap() {
521 let ex = Executor::new().expect("Failed to create an executor");
522 {
524 let mut mapper = BasicMemoryMapper::new(u64::MAX);
525 mapper.remove_map(0, 4).unwrap();
526 }
527 {
529 let mut mapper = BasicMemoryMapper::new(u64::MAX);
530 let _ = mapper.start_export_session(&ex);
531 mapper
532 .add_map(
533 MappingInfo::new(0, GuestAddress(1000), 9, Protection::read_write()).unwrap(),
534 )
535 .unwrap();
536 assert_eq!(
537 mapper.export(0, 1).unwrap()[0],
538 MemRegion {
539 gpa: GuestAddress(1000),
540 len: 1,
541 prot: Protection::read_write()
542 }
543 );
544 assert_eq!(
545 mapper.export(8, 1).unwrap()[0],
546 MemRegion {
547 gpa: GuestAddress(1008),
548 len: 1,
549 prot: Protection::read_write()
550 }
551 );
552 mapper.export(9, 1).unwrap_err();
553 mapper.remove_map(0, 9).unwrap();
554 mapper.export(0, 1).unwrap_err();
555 }
556 {
558 let mut mapper = BasicMemoryMapper::new(u64::MAX);
559 let _ = mapper.start_export_session(&ex);
560 mapper
561 .add_map(
562 MappingInfo::new(0, GuestAddress(1000), 4, Protection::read_write()).unwrap(),
563 )
564 .unwrap();
565 mapper
566 .add_map(
567 MappingInfo::new(5, GuestAddress(50), 4, Protection::read_write()).unwrap(),
568 )
569 .unwrap();
570 assert_eq!(
571 mapper.export(0, 1).unwrap()[0],
572 MemRegion {
573 gpa: GuestAddress(1000),
574 len: 1,
575 prot: Protection::read_write()
576 }
577 );
578 assert_eq!(
579 mapper.export(6, 1).unwrap()[0],
580 MemRegion {
581 gpa: GuestAddress(51),
582 len: 1,
583 prot: Protection::read_write()
584 }
585 );
586 mapper.remove_map(0, 9).unwrap();
587 mapper.export(0, 1).unwrap_err();
588 mapper.export(6, 1).unwrap_err();
589 }
590 {
592 let mut mapper = BasicMemoryMapper::new(u64::MAX);
593 let _ = mapper.start_export_session(&ex);
594 mapper
595 .add_map(
596 MappingInfo::new(0, GuestAddress(1000), 9, Protection::read_write()).unwrap(),
597 )
598 .unwrap();
599 assert_overlap_failure(mapper.remove_map(0, 4).unwrap());
600 assert_eq!(
601 mapper.export(5, 1).unwrap()[0],
602 MemRegion {
603 gpa: GuestAddress(1005),
604 len: 1,
605 prot: Protection::read_write()
606 }
607 );
608 }
609 {
611 let mut mapper = BasicMemoryMapper::new(u64::MAX);
612 let _ = mapper.start_export_session(&ex);
613 mapper
614 .add_map(
615 MappingInfo::new(0, GuestAddress(1000), 4, Protection::read_write()).unwrap(),
616 )
617 .unwrap();
618 mapper
619 .add_map(
620 MappingInfo::new(5, GuestAddress(50), 4, Protection::read_write()).unwrap(),
621 )
622 .unwrap();
623 assert_eq!(
624 mapper.export(0, 1).unwrap()[0],
625 MemRegion {
626 gpa: GuestAddress(1000),
627 len: 1,
628 prot: Protection::read_write()
629 }
630 );
631 assert_eq!(
632 mapper.export(5, 1).unwrap()[0],
633 MemRegion {
634 gpa: GuestAddress(50),
635 len: 1,
636 prot: Protection::read_write()
637 }
638 );
639 mapper.remove_map(0, 4).unwrap();
640 mapper.export(0, 1).unwrap_err();
641 mapper.export(4, 1).unwrap_err();
642 mapper.export(5, 1).unwrap_err();
643 }
644 {
646 let mut mapper = BasicMemoryMapper::new(u64::MAX);
647 let _ = mapper.start_export_session(&ex);
648 mapper
649 .add_map(
650 MappingInfo::new(0, GuestAddress(1000), 4, Protection::read_write()).unwrap(),
651 )
652 .unwrap();
653 assert_eq!(
654 mapper.export(0, 1).unwrap()[0],
655 MemRegion {
656 gpa: GuestAddress(1000),
657 len: 1,
658 prot: Protection::read_write()
659 }
660 );
661 mapper.export(9, 1).unwrap_err();
662 mapper.remove_map(0, 9).unwrap();
663 mapper.export(0, 1).unwrap_err();
664 mapper.export(9, 1).unwrap_err();
665 }
666 {
668 let mut mapper = BasicMemoryMapper::new(u64::MAX);
669 let _ = mapper.start_export_session(&ex);
670 mapper
671 .add_map(MappingInfo::new(0, GuestAddress(1000), 4, Protection::read()).unwrap())
672 .unwrap();
673 mapper
674 .add_map(
675 MappingInfo::new(10, GuestAddress(50), 4, Protection::read_write()).unwrap(),
676 )
677 .unwrap();
678 assert_eq!(
679 mapper.export(0, 1).unwrap()[0],
680 MemRegion {
681 gpa: GuestAddress(1000),
682 len: 1,
683 prot: Protection::read()
684 }
685 );
686 assert_eq!(
687 mapper.export(3, 1).unwrap()[0],
688 MemRegion {
689 gpa: GuestAddress(1003),
690 len: 1,
691 prot: Protection::read()
692 }
693 );
694 mapper.export(4, 1).unwrap_err();
695 assert_eq!(
696 mapper.export(10, 1).unwrap()[0],
697 MemRegion {
698 gpa: GuestAddress(50),
699 len: 1,
700 prot: Protection::read_write()
701 }
702 );
703 assert_eq!(
704 mapper.export(13, 1).unwrap()[0],
705 MemRegion {
706 gpa: GuestAddress(53),
707 len: 1,
708 prot: Protection::read_write()
709 }
710 );
711 mapper.remove_map(0, 14).unwrap();
712 mapper.export(0, 1).unwrap_err();
713 mapper.export(3, 1).unwrap_err();
714 mapper.export(4, 1).unwrap_err();
715 mapper.export(10, 1).unwrap_err();
716 mapper.export(13, 1).unwrap_err();
717 }
718 }
719 #[test]
720 fn test_remove_map() {
721 let mut mapper = BasicMemoryMapper::new(u64::MAX);
722 mapper
723 .add_map(MappingInfo::new(1, GuestAddress(1000), 4, Protection::read()).unwrap())
724 .unwrap();
725 mapper
726 .add_map(MappingInfo::new(5, GuestAddress(50), 4, Protection::read_write()).unwrap())
727 .unwrap();
728 mapper
729 .add_map(MappingInfo::new(9, GuestAddress(50), 4, Protection::read_write()).unwrap())
730 .unwrap();
731 assert_eq!(mapper.len(), 3);
732 assert_overlap_failure(mapper.remove_map(0, 6).unwrap());
733 assert_eq!(mapper.len(), 3);
734 assert_overlap_failure(mapper.remove_map(1, 5).unwrap());
735 assert_eq!(mapper.len(), 3);
736 assert_overlap_failure(mapper.remove_map(1, 9).unwrap());
737 assert_eq!(mapper.len(), 3);
738 assert_overlap_failure(mapper.remove_map(6, 4).unwrap());
739 assert_eq!(mapper.len(), 3);
740 assert_overlap_failure(mapper.remove_map(6, 14).unwrap());
741 assert_eq!(mapper.len(), 3);
742 mapper.remove_map(5, 4).unwrap();
743 assert_eq!(mapper.len(), 2);
744 assert_overlap_failure(mapper.remove_map(1, 9).unwrap());
745 assert_eq!(mapper.len(), 2);
746 mapper.remove_map(0, 15).unwrap();
747 assert_eq!(mapper.len(), 0);
748 }
749
750 fn assert_vec_eq<T: std::cmp::PartialEq + Debug>(a: Vec<T>, b: Vec<T>) {
751 assert_eq!(a.len(), b.len());
752 for (x, y) in a.into_iter().zip(b.into_iter()) {
753 assert_eq!(x, y);
754 }
755 }
756
757 #[test]
758 fn test_translate_len() {
759 let mut mapper = BasicMemoryMapper::new(u64::MAX);
760 let ex = Executor::new().expect("Failed to create an executor");
761 let _ = mapper.start_export_session(&ex);
762 mapper
764 .add_map(MappingInfo::new(1, GuestAddress(1000), 4, Protection::read()).unwrap())
765 .unwrap();
766 mapper.export(1, 0).unwrap_err();
767 assert_eq!(
768 mapper.export(1, 1).unwrap()[0],
769 MemRegion {
770 gpa: GuestAddress(1000),
771 len: 1,
772 prot: Protection::read()
773 }
774 );
775 assert_eq!(
776 mapper.export(1, 2).unwrap()[0],
777 MemRegion {
778 gpa: GuestAddress(1000),
779 len: 2,
780 prot: Protection::read()
781 }
782 );
783 assert_eq!(
784 mapper.export(1, 3).unwrap()[0],
785 MemRegion {
786 gpa: GuestAddress(1000),
787 len: 3,
788 prot: Protection::read()
789 }
790 );
791 assert_eq!(
792 mapper.export(2, 1).unwrap()[0],
793 MemRegion {
794 gpa: GuestAddress(1001),
795 len: 1,
796 prot: Protection::read()
797 }
798 );
799 assert_eq!(
800 mapper.export(2, 2).unwrap()[0],
801 MemRegion {
802 gpa: GuestAddress(1001),
803 len: 2,
804 prot: Protection::read()
805 }
806 );
807 mapper.export(1, 5).unwrap_err();
808 mapper
810 .add_map(MappingInfo::new(5, GuestAddress(1004), 4, Protection::read()).unwrap())
811 .unwrap();
812 assert_eq!(
814 mapper.export(2, 5).unwrap()[0],
815 MemRegion {
816 gpa: GuestAddress(1001),
817 len: 5,
818 prot: Protection::read()
819 }
820 );
821 assert_eq!(
822 mapper.export(2, 6).unwrap()[0],
823 MemRegion {
824 gpa: GuestAddress(1001),
825 len: 6,
826 prot: Protection::read()
827 }
828 );
829 assert_eq!(
830 mapper.export(2, 7).unwrap()[0],
831 MemRegion {
832 gpa: GuestAddress(1001),
833 len: 7,
834 prot: Protection::read()
835 }
836 );
837 mapper.export(2, 8).unwrap_err();
838 mapper.export(3, 10).unwrap_err();
839 mapper
841 .add_map(MappingInfo::new(11, GuestAddress(1010), 6, Protection::read()).unwrap())
842 .unwrap();
843 mapper.export(3, 10).unwrap_err();
845 mapper
847 .add_map(MappingInfo::new(9, GuestAddress(1008), 2, Protection::read()).unwrap())
848 .unwrap();
849 assert_eq!(
851 mapper.export(3, 10).unwrap()[0],
852 MemRegion {
853 gpa: GuestAddress(1002),
854 len: 10,
855 prot: Protection::read()
856 }
857 );
858 assert_eq!(
859 mapper.export(1, 16).unwrap()[0],
860 MemRegion {
861 gpa: GuestAddress(1000),
862 len: 16,
863 prot: Protection::read()
864 }
865 );
866 mapper.export(1, 17).unwrap_err();
867 mapper.export(0, 16).unwrap_err();
868 mapper
870 .add_map(MappingInfo::new(0, GuestAddress(5), 1, Protection::read()).unwrap())
871 .unwrap();
872 assert_eq!(
873 mapper.export(0, 1).unwrap()[0],
874 MemRegion {
875 gpa: GuestAddress(5),
876 len: 1,
877 prot: Protection::read()
878 }
879 );
880 assert_vec_eq(
882 mapper.export(0, 2).unwrap(),
883 vec![
884 MemRegion {
885 gpa: GuestAddress(5),
886 len: 1,
887 prot: Protection::read(),
888 },
889 MemRegion {
890 gpa: GuestAddress(1000),
891 len: 1,
892 prot: Protection::read(),
893 },
894 ],
895 );
896 assert_vec_eq(
897 mapper.export(0, 16).unwrap(),
898 vec![
899 MemRegion {
900 gpa: GuestAddress(5),
901 len: 1,
902 prot: Protection::read(),
903 },
904 MemRegion {
905 gpa: GuestAddress(1000),
906 len: 15,
907 prot: Protection::read(),
908 },
909 ],
910 );
911 mapper
913 .add_map(MappingInfo::new(17, GuestAddress(1016), 2, Protection::read_write()).unwrap())
914 .unwrap();
915 assert_vec_eq(
917 mapper.export(1, 17).unwrap(),
918 vec![
919 MemRegion {
920 gpa: GuestAddress(1000),
921 len: 16,
922 prot: Protection::read(),
923 },
924 MemRegion {
925 gpa: GuestAddress(1016),
926 len: 1,
927 prot: Protection::read_write(),
928 },
929 ],
930 );
931 assert_vec_eq(
933 mapper.export(2, 16).unwrap(),
934 vec![
935 MemRegion {
936 gpa: GuestAddress(1001),
937 len: 15,
938 prot: Protection::read(),
939 },
940 MemRegion {
941 gpa: GuestAddress(1016),
942 len: 1,
943 prot: Protection::read_write(),
944 },
945 ],
946 );
947 assert_vec_eq(
948 mapper.export(2, 17).unwrap(),
949 vec![
950 MemRegion {
951 gpa: GuestAddress(1001),
952 len: 15,
953 prot: Protection::read(),
954 },
955 MemRegion {
956 gpa: GuestAddress(1016),
957 len: 2,
958 prot: Protection::read_write(),
959 },
960 ],
961 );
962 mapper.export(2, 500).unwrap_err();
963 mapper.export(500, 5).unwrap_err();
964 }
965}