devices/usb/xhci/
ring_buffer.rs1use std::fmt;
6use std::fmt::Display;
7use std::mem::size_of;
8
9use base::debug;
10use base::error;
11use remain::sorted;
12use thiserror::Error;
13use vm_memory::GuestAddress;
14use vm_memory::GuestMemory;
15use vm_memory::GuestMemoryError;
16
17use super::xhci_abi::AddressedTrb;
18use super::xhci_abi::Error as TrbError;
19use super::xhci_abi::LinkTrb;
20use super::xhci_abi::TransferDescriptor;
21use super::xhci_abi::Trb;
22use super::xhci_abi::TrbCast;
23use super::xhci_abi::TrbType;
24
25#[sorted]
26#[derive(Error, Debug)]
27pub enum Error {
28 #[error("bad dequeue pointer: {0}")]
29 BadDequeuePointer(GuestAddress),
30 #[error("cannot cast trb: {0}")]
31 CastTrb(TrbError),
32 #[error("cannot read guest memory: {0}")]
33 ReadGuestMemory(GuestMemoryError),
34 #[error("cannot get trb chain bit: {0}")]
35 TrbChain(TrbError),
36}
37
38type Result<T> = std::result::Result<T, Error>;
39
40pub struct RingBuffer {
45 name: String,
46 mem: GuestMemory,
47 dequeue_pointer: GuestAddress,
49 hw_dequeue_pointer: GuestAddress,
51 consumer_cycle_state: bool,
54 hw_consumer_cycle_state: bool,
56}
57
58impl Display for RingBuffer {
59 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
60 write!(f, "RingBuffer `{}`", self.name)
61 }
62}
63
64impl RingBuffer {
66 pub fn new(name: String, mem: GuestMemory) -> Self {
68 RingBuffer {
69 name,
70 mem,
71 dequeue_pointer: GuestAddress(0),
72 hw_dequeue_pointer: GuestAddress(0),
73 consumer_cycle_state: false,
74 hw_consumer_cycle_state: false,
75 }
76 }
77
78 pub fn dequeue_transfer_descriptor(&mut self) -> Result<Option<TransferDescriptor>> {
80 let mut trbs = Vec::new();
81 while let Some(addressed_trb) = self.get_current_trb()? {
82 if let Ok(TrbType::Link) = addressed_trb.trb.get_trb_type() {
83 let link_trb = addressed_trb
84 .trb
85 .cast::<LinkTrb>()
86 .map_err(Error::CastTrb)?;
87 self.dequeue_pointer = GuestAddress(link_trb.get_ring_segment_pointer());
88 self.consumer_cycle_state =
89 self.consumer_cycle_state != link_trb.get_toggle_cycle();
90 continue;
91 }
92
93 self.dequeue_pointer = match self.dequeue_pointer.checked_add(size_of::<Trb>() as u64) {
94 Some(addr) => addr,
95 None => {
96 return Err(Error::BadDequeuePointer(self.dequeue_pointer));
97 }
98 };
99
100 xhci_trace!(
101 "{}: adding trb {} to td {}",
102 self.name.as_str(),
103 addressed_trb.gpa,
104 addressed_trb.trb
105 );
106 trbs.push(addressed_trb);
107 if !addressed_trb.trb.get_chain_bit().map_err(Error::TrbChain)? {
108 debug!("xhci: trb chain is false returning");
109 break;
110 }
111 }
112 match trbs.last() {
115 Some(t) => {
116 if t.trb.get_chain_bit().map_err(Error::TrbChain)? {
117 return Ok(None);
118 }
119 }
120 None => return Ok(None),
121 }
122 Ok(TransferDescriptor::new(trbs))
123 }
124
125 pub fn get_dequeue_pointer(&self) -> GuestAddress {
127 self.dequeue_pointer
128 }
129
130 pub fn set_dequeue_pointer(&mut self, addr: GuestAddress) {
132 xhci_trace!("{}: set dequeue pointer {:x}", self.name.as_str(), addr.0);
133
134 self.dequeue_pointer = addr;
135 self.hw_dequeue_pointer = addr;
136 }
137
138 pub fn get_consumer_cycle_state(&self) -> bool {
140 self.consumer_cycle_state
141 }
142
143 pub fn set_consumer_cycle_state(&mut self, state: bool) {
145 xhci_trace!("{}: set consumer cycle state {}", self.name.as_str(), state);
146 self.consumer_cycle_state = state;
147 self.hw_consumer_cycle_state = state;
148 }
149
150 fn get_current_trb(&self) -> Result<Option<AddressedTrb>> {
152 let trb: Trb = self
153 .mem
154 .read_obj_from_addr(self.dequeue_pointer)
155 .map_err(Error::ReadGuestMemory)?;
156 xhci_trace!("{}: trb read from memory {:?}", self.name.as_str(), trb);
157 if trb.get_cycle() != self.consumer_cycle_state {
160 debug!(
161 "xhci: cycle bit does not match, self cycle {}",
162 self.consumer_cycle_state
163 );
164 Ok(None)
165 } else {
166 Ok(Some(AddressedTrb {
167 trb,
168 gpa: self.dequeue_pointer.0,
169 }))
170 }
171 }
172
173 pub fn complete(&mut self, trb: &AddressedTrb) {
176 self.hw_dequeue_pointer = match GuestAddress(trb.gpa).checked_add(size_of::<Trb>() as u64) {
180 Some(addr) => addr,
181 None => {
182 error!("xhci: gpa of completed TRB is corrupted");
183 self.hw_dequeue_pointer
184 }
185 };
186 self.hw_consumer_cycle_state = trb.trb.get_cycle();
187 }
188
189 pub fn synchronize_with_hardware(&mut self) {
191 self.dequeue_pointer = self.hw_dequeue_pointer;
192 self.consumer_cycle_state = self.hw_consumer_cycle_state;
193 }
194}
195
196#[cfg(test)]
197mod test {
198 use base::pagesize;
199
200 use super::*;
201 use crate::usb::xhci::xhci_abi::*;
202
203 #[test]
204 fn ring_test_dequeue() {
205 let trb_size = size_of::<Trb>() as u64;
206 let gm = GuestMemory::new(&[(GuestAddress(0), pagesize() as u64)]).unwrap();
207 let mut transfer_ring = RingBuffer::new(String::new(), gm.clone());
208
209 let mut trb = NormalTrb::new();
215 trb.set_trb_type(TrbType::Normal);
216 trb.set_data_buffer_pointer(1);
217 trb.set_chain(true);
218 gm.write_obj_at_addr(trb, GuestAddress(0x100)).unwrap();
219
220 trb.set_data_buffer_pointer(2);
221 gm.write_obj_at_addr(trb, GuestAddress(0x100 + trb_size))
222 .unwrap();
223
224 let mut ltrb = LinkTrb::new();
225 ltrb.set_trb_type(TrbType::Link);
226 ltrb.set_ring_segment_pointer(0x200);
227 gm.write_obj_at_addr(ltrb, GuestAddress(0x100 + 2 * trb_size))
228 .unwrap();
229
230 trb.set_data_buffer_pointer(3);
231 gm.write_obj_at_addr(trb, GuestAddress(0x200)).unwrap();
232
233 trb.set_data_buffer_pointer(4);
235 trb.set_chain(false);
236 gm.write_obj_at_addr(trb, GuestAddress(0x200 + 1 * trb_size))
237 .unwrap();
238
239 ltrb.set_ring_segment_pointer(0x300);
240 gm.write_obj_at_addr(ltrb, GuestAddress(0x200 + 2 * trb_size))
241 .unwrap();
242
243 trb.set_data_buffer_pointer(5);
244 trb.set_chain(true);
245 gm.write_obj_at_addr(trb, GuestAddress(0x300)).unwrap();
246
247 trb.set_data_buffer_pointer(6);
249 trb.set_chain(false);
250 gm.write_obj_at_addr(trb, GuestAddress(0x300 + 1 * trb_size))
251 .unwrap();
252
253 ltrb.set_ring_segment_pointer(0x100);
254 gm.write_obj_at_addr(ltrb, GuestAddress(0x300 + 2 * trb_size))
255 .unwrap();
256
257 transfer_ring.set_dequeue_pointer(GuestAddress(0x100));
258 transfer_ring.set_consumer_cycle_state(false);
259
260 let descriptor = transfer_ring
262 .dequeue_transfer_descriptor()
263 .unwrap()
264 .unwrap();
265 assert_eq!(descriptor.len(), 4);
266 assert_eq!(descriptor[0].trb.get_parameter(), 1);
267 assert_eq!(descriptor[1].trb.get_parameter(), 2);
268 assert_eq!(descriptor[2].trb.get_parameter(), 3);
269 assert_eq!(descriptor[3].trb.get_parameter(), 4);
270
271 let descriptor = transfer_ring
273 .dequeue_transfer_descriptor()
274 .unwrap()
275 .unwrap();
276 assert_eq!(descriptor.len(), 2);
277 assert_eq!(descriptor[0].trb.get_parameter(), 5);
278 assert_eq!(descriptor[1].trb.get_parameter(), 6);
279 }
280
281 #[test]
282 fn transfer_ring_test_dequeue_failure() {
283 let trb_size = size_of::<Trb>() as u64;
284 let gm = GuestMemory::new(&[(GuestAddress(0), pagesize() as u64)]).unwrap();
285 let mut transfer_ring = RingBuffer::new(String::new(), gm.clone());
286
287 let mut trb = NormalTrb::new();
288 trb.set_trb_type(TrbType::Normal);
289 trb.set_data_buffer_pointer(1);
290 trb.set_chain(true);
291 gm.write_obj_at_addr(trb, GuestAddress(0x100)).unwrap();
292
293 trb.set_data_buffer_pointer(2);
294 gm.write_obj_at_addr(trb, GuestAddress(0x100 + trb_size))
295 .unwrap();
296
297 let mut ltrb = LinkTrb::new();
298 ltrb.set_trb_type(TrbType::Link);
299 ltrb.set_ring_segment_pointer(0x200);
300 ltrb.set_toggle_cycle(true);
301 gm.write_obj_at_addr(ltrb, GuestAddress(0x100 + 2 * trb_size))
302 .unwrap();
303
304 trb.set_data_buffer_pointer(3);
305 gm.write_obj_at_addr(trb, GuestAddress(0x200)).unwrap();
306
307 transfer_ring.set_dequeue_pointer(GuestAddress(0x100));
308 transfer_ring.set_consumer_cycle_state(false);
309
310 let descriptor = transfer_ring.dequeue_transfer_descriptor().unwrap();
312 assert_eq!(descriptor.is_none(), true);
313 }
314
315 #[test]
316 fn ring_test_toggle_cycle() {
317 let trb_size = size_of::<Trb>() as u64;
318 let gm = GuestMemory::new(&[(GuestAddress(0), pagesize() as u64)]).unwrap();
319 let mut transfer_ring = RingBuffer::new(String::new(), gm.clone());
320
321 let mut trb = NormalTrb::new();
322 trb.set_trb_type(TrbType::Normal);
323 trb.set_data_buffer_pointer(1);
324 trb.set_chain(false);
325 trb.set_cycle(false);
326 gm.write_obj_at_addr(trb, GuestAddress(0x100)).unwrap();
327
328 let mut ltrb = LinkTrb::new();
329 ltrb.set_trb_type(TrbType::Link);
330 ltrb.set_ring_segment_pointer(0x100);
331 ltrb.set_toggle_cycle(true);
332 ltrb.set_cycle(false);
333 gm.write_obj_at_addr(ltrb, GuestAddress(0x100 + trb_size))
334 .unwrap();
335
336 transfer_ring.set_dequeue_pointer(GuestAddress(0x100));
338 transfer_ring.set_consumer_cycle_state(false);
339
340 let descriptor = transfer_ring
342 .dequeue_transfer_descriptor()
343 .unwrap()
344 .unwrap();
345 assert_eq!(descriptor.len(), 1);
346 assert_eq!(descriptor[0].trb.get_parameter(), 1);
347
348 assert_eq!(transfer_ring.consumer_cycle_state, false);
350
351 let mut trb = NormalTrb::new();
354 trb.set_trb_type(TrbType::Normal);
355 trb.set_data_buffer_pointer(2);
356 trb.set_cycle(true); gm.write_obj_at_addr(trb, GuestAddress(0x100)).unwrap();
358
359 let descriptor = transfer_ring
361 .dequeue_transfer_descriptor()
362 .unwrap()
363 .unwrap();
364 assert_eq!(descriptor.len(), 1);
365 assert_eq!(descriptor[0].trb.get_parameter(), 2);
366
367 assert_eq!(transfer_ring.consumer_cycle_state, true);
368
369 let mut ltrb = LinkTrb::new();
371 ltrb.set_trb_type(TrbType::Link);
372 ltrb.set_ring_segment_pointer(0x100);
373 ltrb.set_toggle_cycle(true);
374 ltrb.set_cycle(true); gm.write_obj_at_addr(ltrb, GuestAddress(0x100 + trb_size))
376 .unwrap();
377
378 let mut trb = NormalTrb::new();
381 trb.set_trb_type(TrbType::Normal);
382 trb.set_data_buffer_pointer(3);
383 trb.set_cycle(false); gm.write_obj_at_addr(trb, GuestAddress(0x100)).unwrap();
385
386 let descriptor = transfer_ring
388 .dequeue_transfer_descriptor()
389 .unwrap()
390 .unwrap();
391 assert_eq!(descriptor.len(), 1);
392 assert_eq!(descriptor[0].trb.get_parameter(), 3);
393
394 assert_eq!(transfer_ring.consumer_cycle_state, false);
395 }
396}