1use std::cell::RefCell;
8use std::collections::BTreeSet;
9use std::fs::File;
10
11use anyhow::anyhow;
12use anyhow::bail;
13use anyhow::Context;
14use anyhow::Result;
15use base::MappedRegion;
16use base::MemoryMapping;
17use zerocopy::FromBytes;
18use zerocopy::Immutable;
19use zerocopy::IntoBytes;
20use zerocopy::KnownLayout;
21
22#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord)]
23struct Region {
24 start: usize,
25 len: usize,
26}
27
28#[derive(Default)]
30struct RegionManager(BTreeSet<Region>);
31
32impl RegionManager {
33 fn allocate(&mut self, start: usize, len: usize) -> Result<()> {
34 let left = self
40 .0
41 .range(
42 ..Region {
43 start: start + len,
44 len: 0,
45 },
46 )
47 .next_back()
48 .copied();
49
50 let new = match left {
52 None => Region { start, len },
53 Some(r) => {
54 if start < r.start + r.len {
55 bail!(
56 "range overlaps: existing: {:?}, new: {:?}",
57 left,
58 Region { start, len }
59 );
60 }
61
62 if start == r.start + r.len {
65 let new = Region {
66 start: r.start,
67 len: r.len + len,
68 };
69 self.0.remove(&r);
70 new
71 } else {
72 Region { start, len }
73 }
74 }
75 };
76
77 let right = self
80 .0
81 .range(
82 Region {
83 start: new.start + new.len,
84 len: 0,
85 }..,
86 )
87 .next()
88 .copied();
89 match right {
90 Some(r) if r.start == new.start + new.len => {
91 let merged = Region {
93 start: new.start,
94 len: new.len + r.len,
95 };
96 self.0.remove(&r);
97 self.0.insert(merged);
98 }
99 Some(_) | None => {
100 self.0.insert(new);
102 }
103 }
104
105 Ok(())
106 }
107
108 #[cfg(test)]
109 fn to_vec(&self) -> Vec<&Region> {
110 self.0.iter().collect()
111 }
112}
113
114#[test]
115fn test_region_manager() {
116 let mut rm: RegionManager = Default::default();
117
118 rm.allocate(0, 5).unwrap();
119 assert_eq!(rm.to_vec(), vec![&Region { start: 0, len: 5 }]);
120 rm.allocate(10, 5).unwrap();
121 rm.allocate(15, 5).unwrap(); assert_eq!(
123 rm.to_vec(),
124 vec![&Region { start: 0, len: 5 }, &Region { start: 10, len: 10 }]
125 );
126 rm.allocate(3, 5).unwrap_err(); rm.allocate(8, 5).unwrap_err(); rm.allocate(25, 5).unwrap();
130 assert_eq!(
131 rm.to_vec(),
132 vec![
133 &Region { start: 0, len: 5 },
134 &Region { start: 10, len: 10 },
135 &Region { start: 25, len: 5 }
136 ]
137 );
138
139 rm.allocate(5, 5).unwrap(); assert_eq!(
141 rm.to_vec(),
142 vec![&Region { start: 0, len: 20 }, &Region { start: 25, len: 5 }]
143 );
144 rm.allocate(20, 5).unwrap();
145 assert_eq!(rm.to_vec(), vec![&Region { start: 0, len: 30 },]);
146}
147
148#[derive(Debug, Clone, Copy, FromBytes, Immutable, IntoBytes, KnownLayout)]
149#[repr(C)]
150pub struct BlockId(u32);
152
153impl From<u32> for BlockId {
154 fn from(value: u32) -> Self {
155 BlockId(value)
156 }
157}
158
159impl From<BlockId> for u32 {
160 fn from(value: BlockId) -> Self {
161 value.0
162 }
163}
164
165impl BlockId {
166 pub fn as_bytes(&self) -> &[u8] {
167 self.0.as_bytes()
168 }
169}
170
171pub struct FileMappingInfo {
173 pub mem_offset: usize,
175 pub file: File,
177 pub length: usize,
179 pub file_offset: usize,
181}
182
183pub struct Arena<'a> {
188 mem: &'a mut MemoryMapping,
189 block_size: usize,
190 regions: RefCell<RegionManager>,
194
195 mappings: RefCell<Vec<FileMappingInfo>>,
196}
197
198impl<'a> Arena<'a> {
199 pub fn new(block_size: usize, mem: &'a mut MemoryMapping) -> Result<Self> {
201 Ok(Self {
202 mem,
203 block_size,
204 regions: Default::default(),
205 mappings: Default::default(),
206 })
207 }
208
209 fn reserve(&self, mem_offset: usize, len: usize) -> Result<()> {
211 let mem_end = mem_offset.checked_add(len).context("mem_end overflow")?;
212
213 if mem_end > self.mem.size() {
214 bail!(
215 "out of memory region: {mem_offset} + {len} > {}",
216 self.mem.size()
217 );
218 }
219
220 self.regions.borrow_mut().allocate(mem_offset, len)?;
221
222 Ok(())
223 }
224
225 pub fn reserve_for_mmap(
229 &self,
230 mem_offset: usize,
231 length: usize,
232 file: File,
233 file_offset: usize,
234 ) -> Result<()> {
235 self.reserve(mem_offset, length)?;
236 self.mappings.borrow_mut().push(FileMappingInfo {
237 mem_offset,
238 length,
239 file: file.try_clone()?,
240 file_offset,
241 });
242
243 Ok(())
244 }
245
246 pub fn allocate_slice(
249 &self,
250 block: BlockId,
251 block_offset: usize,
252 len: usize,
253 ) -> Result<&'a mut [u8]> {
254 let offset = u32::from(block) as usize * self.block_size + block_offset;
255 self.reserve(offset, len)?;
256
257 let new_addr = (self.mem.as_ptr() as usize)
258 .checked_add(offset)
259 .context("address overflow")?;
260
261 let slice = unsafe { std::slice::from_raw_parts_mut(new_addr as *mut u8, len) };
263 Ok(slice)
264 }
265
266 pub fn allocate<T: FromBytes + IntoBytes + KnownLayout>(
268 &self,
269 block: BlockId,
270 block_offset: usize,
271 ) -> Result<&'a mut T> {
272 let slice = self.allocate_slice(block, block_offset, std::mem::size_of::<T>())?;
273 T::mut_from_bytes(slice).map_err(|_| anyhow!("failed to interpret"))
274 }
275
276 pub fn write_to_mem<T: IntoBytes + Immutable>(
277 &self,
278 block_id: BlockId,
279 block_offset: usize,
280 value: &T,
281 ) -> Result<()> {
282 let slice = self.allocate_slice(block_id, block_offset, std::mem::size_of::<T>())?;
283 slice.copy_from_slice(value.as_bytes());
284 Ok(())
285 }
286
287 pub fn into_mapping_info(self) -> Vec<FileMappingInfo> {
289 self.mappings.take()
290 }
291}