devices/usb/xhci/
ring_buffer.rs1use std::fmt;
6use std::fmt::Display;
7use std::mem::size_of;
8
9use base::debug;
10use remain::sorted;
11use thiserror::Error;
12use vm_memory::GuestAddress;
13use vm_memory::GuestMemory;
14use vm_memory::GuestMemoryError;
15
16use super::xhci_abi::AddressedTrb;
17use super::xhci_abi::Error as TrbError;
18use super::xhci_abi::LinkTrb;
19use super::xhci_abi::TransferDescriptor;
20use super::xhci_abi::Trb;
21use super::xhci_abi::TrbCast;
22use super::xhci_abi::TrbType;
23
24#[sorted]
25#[derive(Error, Debug)]
26pub enum Error {
27 #[error("bad dequeue pointer: {0}")]
28 BadDequeuePointer(GuestAddress),
29 #[error("cannot cast trb: {0}")]
30 CastTrb(TrbError),
31 #[error("cannot read guest memory: {0}")]
32 ReadGuestMemory(GuestMemoryError),
33 #[error("cannot get trb chain bit: {0}")]
34 TrbChain(TrbError),
35}
36
37type Result<T> = std::result::Result<T, Error>;
38
39pub struct RingBuffer {
44 name: String,
45 mem: GuestMemory,
46 dequeue_pointer: GuestAddress,
47 consumer_cycle_state: bool,
50}
51
52impl Display for RingBuffer {
53 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
54 write!(f, "RingBuffer `{}`", self.name)
55 }
56}
57
58impl RingBuffer {
60 pub fn new(name: String, mem: GuestMemory) -> Self {
62 RingBuffer {
63 name,
64 mem,
65 dequeue_pointer: GuestAddress(0),
66 consumer_cycle_state: false,
67 }
68 }
69
70 pub fn dequeue_transfer_descriptor(&mut self) -> Result<Option<TransferDescriptor>> {
72 let mut td: TransferDescriptor = TransferDescriptor::new();
73 while let Some(addressed_trb) = self.get_current_trb()? {
74 if let Ok(TrbType::Link) = addressed_trb.trb.get_trb_type() {
75 let link_trb = addressed_trb
76 .trb
77 .cast::<LinkTrb>()
78 .map_err(Error::CastTrb)?;
79 self.dequeue_pointer = GuestAddress(link_trb.get_ring_segment_pointer());
80 self.consumer_cycle_state =
81 self.consumer_cycle_state != link_trb.get_toggle_cycle();
82 continue;
83 }
84
85 self.dequeue_pointer = match self.dequeue_pointer.checked_add(size_of::<Trb>() as u64) {
86 Some(addr) => addr,
87 None => {
88 return Err(Error::BadDequeuePointer(self.dequeue_pointer));
89 }
90 };
91
92 xhci_trace!(
93 "{}: adding trb to td {}",
94 self.name.as_str(),
95 addressed_trb.trb
96 );
97 td.push(addressed_trb);
98 if !addressed_trb.trb.get_chain_bit().map_err(Error::TrbChain)? {
99 debug!("xhci: trb chain is false returning");
100 break;
101 }
102 }
103 match td.last() {
106 Some(t) => {
107 if t.trb.get_chain_bit().map_err(Error::TrbChain)? {
108 return Ok(None);
109 }
110 }
111 None => return Ok(None),
112 }
113 Ok(Some(td))
114 }
115
116 pub fn get_dequeue_pointer(&self) -> GuestAddress {
118 self.dequeue_pointer
119 }
120
121 pub fn set_dequeue_pointer(&mut self, addr: GuestAddress) {
123 xhci_trace!("{}: set dequeue pointer {:x}", self.name.as_str(), addr.0);
124
125 self.dequeue_pointer = addr;
126 }
127
128 pub fn get_consumer_cycle_state(&self) -> bool {
130 self.consumer_cycle_state
131 }
132
133 pub fn set_consumer_cycle_state(&mut self, state: bool) {
135 xhci_trace!("{}: set consumer cycle state {}", self.name.as_str(), state);
136 self.consumer_cycle_state = state;
137 }
138
139 fn get_current_trb(&self) -> Result<Option<AddressedTrb>> {
141 let trb: Trb = self
142 .mem
143 .read_obj_from_addr(self.dequeue_pointer)
144 .map_err(Error::ReadGuestMemory)?;
145 xhci_trace!("{}: trb read from memory {:?}", self.name.as_str(), trb);
146 if trb.get_cycle() != self.consumer_cycle_state {
149 debug!(
150 "xhci: cycle bit does not match, self cycle {}",
151 self.consumer_cycle_state
152 );
153 Ok(None)
154 } else {
155 Ok(Some(AddressedTrb {
156 trb,
157 gpa: self.dequeue_pointer.0,
158 }))
159 }
160 }
161}
162
163#[cfg(test)]
164mod test {
165 use base::pagesize;
166
167 use super::*;
168 use crate::usb::xhci::xhci_abi::*;
169
170 #[test]
171 fn ring_test_dequeue() {
172 let trb_size = size_of::<Trb>() as u64;
173 let gm = GuestMemory::new(&[(GuestAddress(0), pagesize() as u64)]).unwrap();
174 let mut transfer_ring = RingBuffer::new(String::new(), gm.clone());
175
176 let mut trb = NormalTrb::new();
182 trb.set_trb_type(TrbType::Normal);
183 trb.set_data_buffer_pointer(1);
184 trb.set_chain(true);
185 gm.write_obj_at_addr(trb, GuestAddress(0x100)).unwrap();
186
187 trb.set_data_buffer_pointer(2);
188 gm.write_obj_at_addr(trb, GuestAddress(0x100 + trb_size))
189 .unwrap();
190
191 let mut ltrb = LinkTrb::new();
192 ltrb.set_trb_type(TrbType::Link);
193 ltrb.set_ring_segment_pointer(0x200);
194 gm.write_obj_at_addr(ltrb, GuestAddress(0x100 + 2 * trb_size))
195 .unwrap();
196
197 trb.set_data_buffer_pointer(3);
198 gm.write_obj_at_addr(trb, GuestAddress(0x200)).unwrap();
199
200 trb.set_data_buffer_pointer(4);
202 trb.set_chain(false);
203 gm.write_obj_at_addr(trb, GuestAddress(0x200 + 1 * trb_size))
204 .unwrap();
205
206 ltrb.set_ring_segment_pointer(0x300);
207 gm.write_obj_at_addr(ltrb, GuestAddress(0x200 + 2 * trb_size))
208 .unwrap();
209
210 trb.set_data_buffer_pointer(5);
211 trb.set_chain(true);
212 gm.write_obj_at_addr(trb, GuestAddress(0x300)).unwrap();
213
214 trb.set_data_buffer_pointer(6);
216 trb.set_chain(false);
217 gm.write_obj_at_addr(trb, GuestAddress(0x300 + 1 * trb_size))
218 .unwrap();
219
220 ltrb.set_ring_segment_pointer(0x100);
221 gm.write_obj_at_addr(ltrb, GuestAddress(0x300 + 2 * trb_size))
222 .unwrap();
223
224 transfer_ring.set_dequeue_pointer(GuestAddress(0x100));
225 transfer_ring.set_consumer_cycle_state(false);
226
227 let descriptor = transfer_ring
229 .dequeue_transfer_descriptor()
230 .unwrap()
231 .unwrap();
232 assert_eq!(descriptor.len(), 4);
233 assert_eq!(descriptor[0].trb.get_parameter(), 1);
234 assert_eq!(descriptor[1].trb.get_parameter(), 2);
235 assert_eq!(descriptor[2].trb.get_parameter(), 3);
236 assert_eq!(descriptor[3].trb.get_parameter(), 4);
237
238 let descriptor = transfer_ring
240 .dequeue_transfer_descriptor()
241 .unwrap()
242 .unwrap();
243 assert_eq!(descriptor.len(), 2);
244 assert_eq!(descriptor[0].trb.get_parameter(), 5);
245 assert_eq!(descriptor[1].trb.get_parameter(), 6);
246 }
247
248 #[test]
249 fn transfer_ring_test_dequeue_failure() {
250 let trb_size = size_of::<Trb>() as u64;
251 let gm = GuestMemory::new(&[(GuestAddress(0), pagesize() as u64)]).unwrap();
252 let mut transfer_ring = RingBuffer::new(String::new(), gm.clone());
253
254 let mut trb = NormalTrb::new();
255 trb.set_trb_type(TrbType::Normal);
256 trb.set_data_buffer_pointer(1);
257 trb.set_chain(true);
258 gm.write_obj_at_addr(trb, GuestAddress(0x100)).unwrap();
259
260 trb.set_data_buffer_pointer(2);
261 gm.write_obj_at_addr(trb, GuestAddress(0x100 + trb_size))
262 .unwrap();
263
264 let mut ltrb = LinkTrb::new();
265 ltrb.set_trb_type(TrbType::Link);
266 ltrb.set_ring_segment_pointer(0x200);
267 ltrb.set_toggle_cycle(true);
268 gm.write_obj_at_addr(ltrb, GuestAddress(0x100 + 2 * trb_size))
269 .unwrap();
270
271 trb.set_data_buffer_pointer(3);
272 gm.write_obj_at_addr(trb, GuestAddress(0x200)).unwrap();
273
274 transfer_ring.set_dequeue_pointer(GuestAddress(0x100));
275 transfer_ring.set_consumer_cycle_state(false);
276
277 let descriptor = transfer_ring.dequeue_transfer_descriptor().unwrap();
279 assert_eq!(descriptor.is_none(), true);
280 }
281
282 #[test]
283 fn ring_test_toggle_cycle() {
284 let trb_size = size_of::<Trb>() as u64;
285 let gm = GuestMemory::new(&[(GuestAddress(0), pagesize() as u64)]).unwrap();
286 let mut transfer_ring = RingBuffer::new(String::new(), gm.clone());
287
288 let mut trb = NormalTrb::new();
289 trb.set_trb_type(TrbType::Normal);
290 trb.set_data_buffer_pointer(1);
291 trb.set_chain(false);
292 trb.set_cycle(false);
293 gm.write_obj_at_addr(trb, GuestAddress(0x100)).unwrap();
294
295 let mut ltrb = LinkTrb::new();
296 ltrb.set_trb_type(TrbType::Link);
297 ltrb.set_ring_segment_pointer(0x100);
298 ltrb.set_toggle_cycle(true);
299 ltrb.set_cycle(false);
300 gm.write_obj_at_addr(ltrb, GuestAddress(0x100 + trb_size))
301 .unwrap();
302
303 transfer_ring.set_dequeue_pointer(GuestAddress(0x100));
305 transfer_ring.set_consumer_cycle_state(false);
306
307 let descriptor = transfer_ring
309 .dequeue_transfer_descriptor()
310 .unwrap()
311 .unwrap();
312 assert_eq!(descriptor.len(), 1);
313 assert_eq!(descriptor[0].trb.get_parameter(), 1);
314
315 assert_eq!(transfer_ring.consumer_cycle_state, false);
317
318 let mut trb = NormalTrb::new();
321 trb.set_trb_type(TrbType::Normal);
322 trb.set_data_buffer_pointer(2);
323 trb.set_cycle(true); gm.write_obj_at_addr(trb, GuestAddress(0x100)).unwrap();
325
326 let descriptor = transfer_ring
328 .dequeue_transfer_descriptor()
329 .unwrap()
330 .unwrap();
331 assert_eq!(descriptor.len(), 1);
332 assert_eq!(descriptor[0].trb.get_parameter(), 2);
333
334 assert_eq!(transfer_ring.consumer_cycle_state, true);
335
336 let mut ltrb = LinkTrb::new();
338 ltrb.set_trb_type(TrbType::Link);
339 ltrb.set_ring_segment_pointer(0x100);
340 ltrb.set_toggle_cycle(true);
341 ltrb.set_cycle(true); gm.write_obj_at_addr(ltrb, GuestAddress(0x100 + trb_size))
343 .unwrap();
344
345 let mut trb = NormalTrb::new();
348 trb.set_trb_type(TrbType::Normal);
349 trb.set_data_buffer_pointer(3);
350 trb.set_cycle(false); gm.write_obj_at_addr(trb, GuestAddress(0x100)).unwrap();
352
353 let descriptor = transfer_ring
355 .dequeue_transfer_descriptor()
356 .unwrap()
357 .unwrap();
358 assert_eq!(descriptor.len(), 1);
359 assert_eq!(descriptor[0].trb.get_parameter(), 3);
360
361 assert_eq!(transfer_ring.consumer_cycle_state, false);
362 }
363}