1#![deny(missing_docs)]
6
7use std::collections::btree_map::Entry;
8use std::collections::BTreeMap;
9use std::collections::VecDeque;
10use std::os::fd::FromRawFd;
11use std::os::fd::OwnedFd;
12use std::rc::Rc;
13
14use anyhow::anyhow;
15use anyhow::Result;
16use base::IntoRawDescriptor;
17use base::MappedRegion;
18use base::MemoryMappingArena;
19use cros_codecs::decoder::stateless::h264::H264;
20use cros_codecs::decoder::stateless::h265::H265;
21use cros_codecs::decoder::stateless::vp8::Vp8;
22use cros_codecs::decoder::stateless::vp9::Vp9;
23use cros_codecs::decoder::stateless::DecodeError;
24use cros_codecs::decoder::stateless::StatelessVideoDecoder;
25use cros_codecs::decoder::DecodedHandle;
26use cros_codecs::libva;
27use cros_codecs::libva::Display;
28use cros_codecs::multiple_desc_type;
29use cros_codecs::utils::DmabufFrame;
30use cros_codecs::DecodedFormat;
31use cros_codecs::FrameLayout;
32use cros_codecs::PlaneLayout;
33
34use crate::virtio::video::decoder::Capability;
35use crate::virtio::video::decoder::DecoderBackend;
36use crate::virtio::video::decoder::DecoderEvent;
37use crate::virtio::video::decoder::DecoderSession;
38use crate::virtio::video::error::VideoError;
39use crate::virtio::video::error::VideoResult;
40use crate::virtio::video::format::Format;
41use crate::virtio::video::format::FormatDesc;
42use crate::virtio::video::format::FormatRange;
43use crate::virtio::video::format::FrameFormat;
44use crate::virtio::video::format::Level;
45use crate::virtio::video::format::Profile;
46use crate::virtio::video::format::Rect;
47use crate::virtio::video::resource::BufferHandle;
48use crate::virtio::video::resource::GuestMemHandle;
49use crate::virtio::video::resource::GuestResource;
50use crate::virtio::video::resource::GuestResourceHandle;
51use crate::virtio::video::utils::EventQueue;
52
53struct GuestMemDescriptor(GuestMemHandle);
56
57impl libva::SurfaceMemoryDescriptor for GuestMemDescriptor {
58 fn add_attrs(
59 &mut self,
60 attrs: &mut Vec<libva::VASurfaceAttrib>,
61 ) -> Option<Box<dyn std::any::Any>> {
62 ().add_attrs(attrs)
64 }
65}
66
67multiple_desc_type! {
68 enum BufferDescriptor {
69 GuestMem(GuestMemDescriptor),
70 Dmabuf(DmabufFrame),
71 }
72}
73
74struct BufferDescWithPicId {
75 desc: BufferDescriptor,
76 picture_buffer_id: i32,
77}
78
79impl libva::SurfaceMemoryDescriptor for BufferDescWithPicId {
80 fn add_attrs(
81 &mut self,
82 attrs: &mut Vec<libva::VASurfaceAttrib>,
83 ) -> Option<Box<dyn std::any::Any>> {
84 self.desc.add_attrs(attrs)
85 }
86}
87
88struct PendingJob {
90 resource_id: u32,
91 timestamp: u64,
92 resource: GuestResourceHandle,
93 offset: usize,
94 bytes_used: usize,
95 remaining: usize,
96}
97
98impl TryFrom<DecodedFormat> for Format {
99 type Error = anyhow::Error;
100
101 fn try_from(value: DecodedFormat) -> Result<Self, Self::Error> {
102 match value {
103 DecodedFormat::NV12 => Ok(Format::NV12),
104 _ => Err(anyhow!("Unsupported format")),
105 }
106 }
107}
108
109impl TryFrom<Format> for DecodedFormat {
110 type Error = anyhow::Error;
111
112 fn try_from(value: Format) -> Result<Self, Self::Error> {
113 match value {
114 Format::NV12 => Ok(DecodedFormat::NV12),
115 _ => Err(anyhow!("Unsupported format")),
116 }
117 }
118}
119
120impl TryFrom<libva::VAProfile::Type> for Profile {
121 type Error = anyhow::Error;
122
123 fn try_from(value: libva::VAProfile::Type) -> Result<Self, Self::Error> {
124 match value {
125 libva::VAProfile::VAProfileH264Baseline => Ok(Self::H264Baseline),
126 libva::VAProfile::VAProfileH264Main => Ok(Self::H264Main),
127 libva::VAProfile::VAProfileH264High => Ok(Self::H264High),
128 libva::VAProfile::VAProfileH264StereoHigh => Ok(Self::H264StereoHigh),
129 libva::VAProfile::VAProfileH264MultiviewHigh => Ok(Self::H264MultiviewHigh),
130 libva::VAProfile::VAProfileHEVCMain => Ok(Self::HevcMain),
131 libva::VAProfile::VAProfileHEVCMain10 => Ok(Self::HevcMain10),
132 libva::VAProfile::VAProfileVP8Version0_3 => Ok(Self::VP8Profile0),
133 libva::VAProfile::VAProfileVP9Profile0 => Ok(Self::VP9Profile0),
134 libva::VAProfile::VAProfileVP9Profile1 => Ok(Self::VP9Profile1),
135 libva::VAProfile::VAProfileVP9Profile2 => Ok(Self::VP9Profile2),
136 libva::VAProfile::VAProfileVP9Profile3 => Ok(Self::VP9Profile3),
137 _ => Err(anyhow!(
138 "Conversion failed for unexpected VAProfile: {}",
139 value
140 )),
141 }
142 }
143}
144
145enum OutputQueueState {
148 AwaitingBufferCount,
150 Decoding,
152 Drc,
155}
156
157pub struct VaapiDecoder {
159 caps: Capability,
161}
162
163struct CodedCap {
165 profile: libva::VAProfile::Type,
166 max_width: u32,
167 max_height: u32,
168}
169
170struct RawCap {
172 fourcc: u32,
173 min_width: u32,
174 min_height: u32,
175 max_width: u32,
176 max_height: u32,
177}
178
179impl VaapiDecoder {
180 fn get_coded_cap(
182 display: &libva::Display,
183 profile: libva::VAProfile::Type,
184 ) -> Result<CodedCap> {
185 let mut attrs = vec![
186 libva::VAConfigAttrib {
187 type_: libva::VAConfigAttribType::VAConfigAttribMaxPictureWidth,
188 value: 0,
189 },
190 libva::VAConfigAttrib {
191 type_: libva::VAConfigAttribType::VAConfigAttribMaxPictureHeight,
192 value: 0,
193 },
194 ];
195
196 display.get_config_attributes(profile, libva::VAEntrypoint::VAEntrypointVLD, &mut attrs)?;
197
198 let mut max_width = 1u32;
199 let mut max_height = 1u32;
200
201 for attr in &attrs {
202 if attr.value == libva::constants::VA_ATTRIB_NOT_SUPPORTED {
203 continue;
204 }
205
206 match attr.type_ {
207 libva::VAConfigAttribType::VAConfigAttribMaxPictureWidth => max_width = attr.value,
208 libva::VAConfigAttribType::VAConfigAttribMaxPictureHeight => {
209 max_height = attr.value
210 }
211
212 _ => panic!("Unexpected VAConfigAttribType {}", attr.type_),
213 }
214 }
215
216 Ok(CodedCap {
217 profile,
218 max_width,
219 max_height,
220 })
221 }
222
223 fn get_raw_caps(display: Rc<libva::Display>, coded_cap: &CodedCap) -> Result<Vec<RawCap>> {
225 let mut raw_caps = Vec::new();
226
227 let mut config = display.create_config(
228 vec![],
229 coded_cap.profile,
230 libva::VAEntrypoint::VAEntrypointVLD,
231 )?;
232
233 let fourccs = config.query_surface_attributes_by_type(
234 libva::VASurfaceAttribType::VASurfaceAttribPixelFormat,
235 )?;
236
237 for fourcc in fourccs {
238 let fourcc = match fourcc {
239 libva::GenericValue::Integer(i) => i as u32,
240 other => panic!("Unexpected VAGenericValue {other:?}"),
241 };
242
243 let min_width = config.query_surface_attributes_by_type(
244 libva::VASurfaceAttribType::VASurfaceAttribMinWidth,
245 )?;
246
247 let min_width = match min_width.first() {
248 Some(libva::GenericValue::Integer(i)) => *i as u32,
249 Some(other) => panic!("Unexpected VAGenericValue {other:?}"),
250 None => 1,
251 };
252
253 let min_height = config.query_surface_attributes_by_type(
254 libva::VASurfaceAttribType::VASurfaceAttribMinHeight,
255 )?;
256 let min_height = match min_height.first() {
257 Some(libva::GenericValue::Integer(i)) => *i as u32,
258 Some(other) => panic!("Unexpected VAGenericValue {other:?}"),
259 None => 1,
260 };
261
262 let max_width = config.query_surface_attributes_by_type(
263 libva::VASurfaceAttribType::VASurfaceAttribMaxWidth,
264 )?;
265 let max_width = match max_width.first() {
266 Some(libva::GenericValue::Integer(i)) => *i as u32,
267 Some(other) => panic!("Unexpected VAGenericValue {other:?}"),
268 None => coded_cap.max_width,
269 };
270
271 let max_height = config.query_surface_attributes_by_type(
272 libva::VASurfaceAttribType::VASurfaceAttribMaxHeight,
273 )?;
274 let max_height = match max_height.first() {
275 Some(libva::GenericValue::Integer(i)) => *i as u32,
276 Some(other) => panic!("Unexpected VAGenericValue {other:?}"),
277 None => coded_cap.max_height,
278 };
279
280 raw_caps.push(RawCap {
281 fourcc,
282 min_width,
283 min_height,
284 max_width,
285 max_height,
286 });
287 }
288
289 Ok(raw_caps)
290 }
291
292 pub fn new() -> Result<Self> {
294 let display = libva::Display::open().ok_or_else(|| anyhow!("failed to open VA display"))?;
295
296 let va_profiles = display.query_config_profiles()?;
297
298 let mut in_fmts = Vec::new();
299 let mut out_fmts = Vec::new();
300 let mut profiles_map: BTreeMap<Format, Vec<Profile>> = Default::default();
301
302 let levels: BTreeMap<Format, Vec<Level>> = Default::default();
306
307 for va_profile in va_profiles {
308 let mut profiles = Vec::new();
309
310 let entrypoints = display.query_config_entrypoints(va_profile)?;
311 if !entrypoints.contains(&libva::VAEntrypoint::VAEntrypointVLD) {
312 continue;
315 }
316
317 let profile = match Profile::try_from(va_profile) {
318 Ok(p) => p,
319 Err(_) => continue,
321 };
322
323 if va_profile == libva::VAProfile::VAProfileVP8Version0_3 {
326 profiles.push(Profile::VP8Profile0);
327 profiles.push(Profile::VP8Profile1);
328 profiles.push(Profile::VP8Profile2);
329 profiles.push(Profile::VP8Profile3);
330 } else {
331 profiles.push(profile);
332 }
333
334 let coded_cap = VaapiDecoder::get_coded_cap(display.as_ref(), va_profile)?;
335 let raw_caps = VaapiDecoder::get_raw_caps(Rc::clone(&display), &coded_cap)?;
336
337 let coded_frame_fmt = FrameFormat {
338 width: FormatRange {
339 min: 1,
340 max: coded_cap.max_width,
341 step: 1,
342 },
343
344 height: FormatRange {
345 min: 1,
346 max: coded_cap.max_height,
347 step: 1,
348 },
349
350 bitrates: Default::default(),
351 };
352
353 let coded_format = profile.to_format();
354 match profiles_map.entry(coded_format) {
355 Entry::Vacant(e) => {
356 e.insert(profiles);
357 }
358 Entry::Occupied(mut ps) => {
359 ps.get_mut().push(profile);
360 }
361 }
362
363 let mut n_out = 0;
364 for raw_cap in raw_caps {
365 if raw_cap.fourcc != libva::constants::VA_FOURCC_NV12 {
366 continue;
368 }
369
370 let raw_frame_fmt = FrameFormat {
371 width: FormatRange {
372 min: raw_cap.min_width,
373 max: raw_cap.max_width,
374 step: 1,
375 },
376
377 height: FormatRange {
378 min: raw_cap.min_height,
379 max: raw_cap.max_height,
380 step: 1,
381 },
382
383 bitrates: Default::default(),
384 };
385
386 out_fmts.push(FormatDesc {
387 mask: 0,
388 format: Format::NV12,
389 frame_formats: vec![raw_frame_fmt],
390 plane_align: 1,
391 });
392
393 n_out += 1;
394 }
395
396 let mask = !(u64::MAX << n_out) << (out_fmts.len() - n_out);
397
398 if mask != 0 {
399 in_fmts.push(FormatDesc {
400 mask,
401 format: coded_format,
402 frame_formats: vec![coded_frame_fmt],
403 plane_align: 1,
404 });
405 }
406 }
407
408 Ok(Self {
409 caps: Capability::new(in_fmts, out_fmts, profiles_map, levels),
410 })
411 }
412}
413
414#[derive(Copy, Clone, Debug, Default, PartialEq, Eq)]
415pub struct Resolution {
416 width: u32,
417 height: u32,
418}
419
420trait AsBufferHandle {
421 type BufferHandle: BufferHandle;
422 fn as_buffer_handle(&self) -> &Self::BufferHandle;
423}
424
425impl AsBufferHandle for GuestResource {
426 type BufferHandle = GuestResourceHandle;
427
428 fn as_buffer_handle(&self) -> &Self::BufferHandle {
429 &self.handle
430 }
431}
432
433impl AsBufferHandle for GuestMemHandle {
434 type BufferHandle = Self;
435
436 fn as_buffer_handle(&self) -> &Self::BufferHandle {
437 self
438 }
439}
440
441impl AsBufferHandle for GuestResourceHandle {
442 type BufferHandle = Self;
443
444 fn as_buffer_handle(&self) -> &Self::BufferHandle {
445 self
446 }
447}
448
449struct BufferMapping<'a, T: AsBufferHandle> {
451 #[allow(dead_code)]
452 resource: &'a T,
454 mapping: MemoryMappingArena,
456}
457
458impl<'a, T: AsBufferHandle> BufferMapping<'a, T> {
459 pub fn new(resource: &'a T, offset: usize, size: usize) -> Result<Self> {
461 let mapping = resource.as_buffer_handle().get_mapping(offset, size)?;
462
463 Ok(Self { resource, mapping })
464 }
465}
466
467impl<T: AsBufferHandle> AsRef<[u8]> for BufferMapping<'_, T> {
468 fn as_ref(&self) -> &[u8] {
469 let mapping = &self.mapping;
470 unsafe { std::slice::from_raw_parts(mapping.as_ptr(), mapping.size()) }
474 }
475}
476
477impl<T: AsBufferHandle> AsMut<[u8]> for BufferMapping<'_, T> {
478 fn as_mut(&mut self) -> &mut [u8] {
479 let mapping = &self.mapping;
480 unsafe { std::slice::from_raw_parts_mut(mapping.as_ptr(), mapping.size()) }
484 }
485}
486
487#[allow(dead_code)] enum BorrowedFrame {
493 Decoded(Box<dyn DecodedHandle<Descriptor = BufferDescWithPicId>>),
494 Held(Box<dyn AsRef<BufferDescWithPicId>>),
495}
496
497pub struct VaapiDecoderSession {
499 codec: Box<dyn StatelessVideoDecoder<BufferDescWithPicId>>,
501 output_queue_state: OutputQueueState,
504 held_frames: BTreeMap<i32, BorrowedFrame>,
506 submit_queue: VecDeque<PendingJob>,
508 event_queue: EventQueue<DecoderEvent>,
510 flushing: bool,
512}
513
514impl VaapiDecoderSession {
515 fn output_picture(
517 decoded_frame: &dyn DecodedHandle<Descriptor = BufferDescWithPicId>,
518 event_queue: &mut EventQueue<DecoderEvent>,
519 ) -> Result<()> {
520 let timestamp = decoded_frame.timestamp();
521
522 let buffer_desc = decoded_frame.resource();
523 let picture_buffer_id = buffer_desc.picture_buffer_id;
524
525 if let BufferDescriptor::GuestMem(_) = &buffer_desc.desc {
529 drop(buffer_desc);
530 decoded_frame.sync()?;
531 }
532
533 if let BufferDescriptor::GuestMem(handle) = &decoded_frame.resource().desc {
535 let picture = decoded_frame.dyn_picture();
536 let mut backend_handle = picture.dyn_mappable_handle()?;
537 let buffer_size = backend_handle.image_size();
538
539 let mut output_map = BufferMapping::new(&handle.0, 0, buffer_size)?;
542 let output_bytes = output_map.as_mut();
543
544 backend_handle.read(output_bytes)?;
545 }
546
547 event_queue
549 .queue_event(DecoderEvent::PictureReady {
550 picture_buffer_id,
551 timestamp,
552 })
553 .map_err(|e| {
554 VideoError::BackendFailure(anyhow!("Can't queue the PictureReady event {}", e))
555 })?;
556
557 Ok(())
558 }
559
560 fn try_emit_flush_completed(&mut self) -> Result<()> {
561 if self.submit_queue.is_empty() {
562 self.flushing = false;
563
564 let event_queue = &mut self.event_queue;
565
566 event_queue
567 .queue_event(DecoderEvent::FlushCompleted(Ok(())))
568 .map_err(|e| anyhow!("Can't queue the PictureReady event {}", e))
569 } else {
570 Ok(())
571 }
572 }
573
574 fn drain_submit_queue(&mut self) -> VideoResult<()> {
575 while let Some(job) = self.submit_queue.front_mut() {
576 let bitstream_map = BufferMapping::new(&job.resource, job.offset, job.bytes_used)
577 .map_err(VideoError::BackendFailure)?;
578
579 let slice_start = job.bytes_used - job.remaining;
580 match self
581 .codec
582 .decode(job.timestamp, &bitstream_map.as_ref()[slice_start..])
583 {
584 Ok(processed) => {
585 job.remaining = job.remaining.saturating_sub(processed);
586 if job.remaining == 0 {
588 self.event_queue
590 .queue_event(DecoderEvent::NotifyEndOfBitstreamBuffer(job.resource_id))
591 .map_err(|e| {
592 VideoError::BackendFailure(anyhow!(
593 "Can't queue the NotifyEndOfBitstream event {}",
594 e
595 ))
596 })?;
597 self.submit_queue.pop_front();
598 }
599 }
600 Err(DecodeError::CheckEvents) => {
601 self.process_decoder_events()?;
602 break;
603 }
604 Err(DecodeError::NotEnoughOutputBuffers(_)) => break,
607 Err(e) => {
610 self.event_queue
611 .queue_event(DecoderEvent::NotifyError(VideoError::BackendFailure(
612 anyhow!("Decoding buffer {} failed", job.resource_id),
613 )))
614 .map_err(|e| {
615 VideoError::BackendFailure(anyhow!(
616 "Can't queue the NotifyError event {}",
617 e
618 ))
619 })?;
620 return Err(VideoError::BackendFailure(e.into()));
621 }
622 }
623 }
624
625 Ok(())
626 }
627
628 fn process_decoder_events(&mut self) -> VideoResult<()> {
629 while let Some(event) = self.codec.next_event() {
630 match event {
631 cros_codecs::decoder::DecoderEvent::FrameReady(frame) => {
632 Self::output_picture(frame.as_ref(), &mut self.event_queue)
633 .map_err(VideoError::BackendFailure)?;
634 let picture_id = frame.resource().picture_buffer_id;
635 self.held_frames
636 .insert(picture_id, BorrowedFrame::Decoded(frame));
637 }
638 cros_codecs::decoder::DecoderEvent::FormatChanged(mut format) => {
639 let coded_resolution = format.stream_info().coded_resolution;
640 let display_resolution = format.stream_info().display_resolution;
641
642 self.event_queue
644 .queue_event(DecoderEvent::ProvidePictureBuffers {
645 min_num_buffers: format.stream_info().min_num_frames as u32,
646 width: coded_resolution.width as i32,
647 height: coded_resolution.height as i32,
648 visible_rect: Rect {
649 left: 0,
650 top: 0,
651 right: display_resolution.width as i32,
652 bottom: display_resolution.height as i32,
653 },
654 })
655 .map_err(|e| VideoError::BackendFailure(e.into()))?;
656
657 format.frame_pool().clear();
658
659 self.output_queue_state = match &self.output_queue_state {
661 OutputQueueState::AwaitingBufferCount => {
663 OutputQueueState::AwaitingBufferCount
664 }
665 OutputQueueState::Decoding => OutputQueueState::Drc,
666 OutputQueueState::Drc => {
667 return Err(VideoError::BackendFailure(anyhow!(
668 "Invalid state during DRC."
669 )))
670 }
671 };
672 }
673 }
674 }
675
676 Ok(())
677 }
678
679 fn try_make_progress(&mut self) -> VideoResult<()> {
680 self.process_decoder_events()?;
681 self.drain_submit_queue()?;
682
683 Ok(())
684 }
685}
686
687impl DecoderSession for VaapiDecoderSession {
688 fn set_output_parameters(&mut self, _: usize, _: Format) -> VideoResult<()> {
689 let output_queue_state = &mut self.output_queue_state;
690
691 match output_queue_state {
717 OutputQueueState::AwaitingBufferCount | OutputQueueState::Drc => {
718 *output_queue_state = OutputQueueState::Decoding;
742
743 Ok(())
744 }
745 OutputQueueState::Decoding => {
746 *output_queue_state = OutputQueueState::Decoding;
779
780 Ok(())
783 }
784 }
785 }
786
787 fn decode(
788 &mut self,
789 resource_id: u32,
790 timestamp: u64,
791 resource: GuestResourceHandle,
792 offset: u32,
793 bytes_used: u32,
794 ) -> VideoResult<()> {
795 let job = PendingJob {
796 resource_id,
797 timestamp,
798 resource,
799 offset: offset as usize,
800 bytes_used: bytes_used as usize,
801 remaining: bytes_used as usize,
802 };
803
804 self.submit_queue.push_back(job);
805 self.try_make_progress()?;
806
807 Ok(())
808 }
809
810 fn flush(&mut self) -> VideoResult<()> {
811 self.flushing = true;
812
813 self.try_make_progress()?;
814
815 if !self.submit_queue.is_empty() {
816 return Ok(());
817 }
818
819 self.codec
821 .flush()
822 .map_err(|e| VideoError::BackendFailure(e.into()))?;
823 self.process_decoder_events()?;
824
825 self.try_emit_flush_completed()
826 .map_err(VideoError::BackendFailure)
827 }
828
829 fn reset(&mut self) -> VideoResult<()> {
830 self.submit_queue.clear();
831
832 self.codec
834 .flush()
835 .map_err(|e| VideoError::BackendFailure(e.into()))?;
836
837 self.process_decoder_events()?;
838
839 self.clear_output_buffers()?;
841
842 self.event_queue
843 .queue_event(DecoderEvent::ResetCompleted(Ok(())))
844 .map_err(|e| {
845 VideoError::BackendFailure(anyhow!("Can't queue the ResetCompleted event {}", e))
846 })?;
847
848 Ok(())
849 }
850
851 fn clear_output_buffers(&mut self) -> VideoResult<()> {
852 self.flushing = false;
854
855 self.event_queue.retain(|event| {
857 !matches!(
858 event,
859 DecoderEvent::PictureReady { .. } | DecoderEvent::FlushCompleted(_)
860 )
861 });
862
863 let frame_pool = self.codec.frame_pool();
865 while let Some(frame) = frame_pool.take_free_frame() {
866 let picture_id = (*frame).as_ref().picture_buffer_id;
867 self.held_frames
868 .insert(picture_id, BorrowedFrame::Held(frame));
869 }
870
871 Ok(())
872 }
873
874 fn event_pipe(&self) -> &dyn base::AsRawDescriptor {
875 &self.event_queue
876 }
877
878 fn use_output_buffer(
879 &mut self,
880 picture_buffer_id: i32,
881 resource: GuestResource,
882 ) -> VideoResult<()> {
883 let output_queue_state = &mut self.output_queue_state;
884 if let OutputQueueState::Drc = output_queue_state {
885 return Ok(());
887 }
888
889 let desc = match resource.handle {
890 GuestResourceHandle::GuestPages(handle) => {
891 BufferDescriptor::GuestMem(GuestMemDescriptor(handle))
892 }
893 GuestResourceHandle::VirtioObject(handle) => {
894 let fd = unsafe { OwnedFd::from_raw_fd(handle.desc.into_raw_descriptor()) };
896 let modifier = handle.modifier;
897
898 let frame = DmabufFrame {
899 fds: vec![fd],
900 layout: FrameLayout {
901 format: (cros_codecs::Fourcc::from(b"NV12"), modifier),
902 size: cros_codecs::Resolution::from((resource.width, resource.height)),
903 planes: resource
904 .planes
905 .iter()
906 .map(|p| PlaneLayout {
907 buffer_index: 0,
908 offset: p.offset,
909 stride: p.stride,
910 })
911 .collect(),
912 },
913 };
914
915 BufferDescriptor::Dmabuf(frame)
916 }
917 };
918
919 let desc_with_pic_id = BufferDescWithPicId {
920 desc,
921 picture_buffer_id,
922 };
923
924 self.codec
925 .frame_pool()
926 .add_frames(vec![desc_with_pic_id])
927 .map_err(VideoError::BackendFailure)?;
928
929 self.try_make_progress()
930 }
931
932 fn reuse_output_buffer(&mut self, picture_buffer_id: i32) -> VideoResult<()> {
933 let output_queue_state = &mut self.output_queue_state;
934 if let OutputQueueState::Drc = output_queue_state {
935 return Ok(());
937 }
938
939 self.held_frames.remove(&picture_buffer_id);
940
941 self.try_make_progress()?;
942
943 if self.flushing {
944 self.flush()?;
947 }
948 Ok(())
949 }
950
951 fn read_event(&mut self) -> VideoResult<DecoderEvent> {
952 self.event_queue
953 .dequeue_event()
954 .map_err(|e| VideoError::BackendFailure(anyhow!("Can't read event {}", e)))
955 }
956}
957
958impl DecoderBackend for VaapiDecoder {
959 type Session = VaapiDecoderSession;
960
961 fn get_capabilities(&self) -> Capability {
962 self.caps.clone()
963 }
964
965 fn new_session(&mut self, format: Format) -> VideoResult<Self::Session> {
966 let display = Display::open()
967 .ok_or_else(|| VideoError::BackendFailure(anyhow!("failed to open VA display")))?;
968
969 let codec: Box<dyn StatelessVideoDecoder<BufferDescWithPicId>> = match format {
970 Format::VP8 => Box::new(
971 cros_codecs::decoder::stateless::StatelessDecoder::<Vp8, _>::new_vaapi(
972 display,
973 cros_codecs::decoder::BlockingMode::NonBlocking,
974 ),
975 ),
976 Format::VP9 => Box::new(
977 cros_codecs::decoder::stateless::StatelessDecoder::<Vp9, _>::new_vaapi(
978 display,
979 cros_codecs::decoder::BlockingMode::NonBlocking,
980 ),
981 ),
982 Format::H264 => Box::new(
983 cros_codecs::decoder::stateless::StatelessDecoder::<H264, _>::new_vaapi(
984 display,
985 cros_codecs::decoder::BlockingMode::NonBlocking,
986 ),
987 ),
988 Format::Hevc => Box::new(
989 cros_codecs::decoder::stateless::StatelessDecoder::<H265, _>::new_vaapi(
990 display,
991 cros_codecs::decoder::BlockingMode::NonBlocking,
992 ),
993 ),
994 _ => return Err(VideoError::InvalidFormat),
995 };
996
997 Ok(VaapiDecoderSession {
998 codec,
999 output_queue_state: OutputQueueState::AwaitingBufferCount,
1000 held_frames: Default::default(),
1001 submit_queue: Default::default(),
1002 event_queue: EventQueue::new().map_err(|e| VideoError::BackendFailure(anyhow!(e)))?,
1003 flushing: Default::default(),
1004 })
1005 }
1006}
1007
1008#[cfg(test)]
1009mod tests {
1010 use super::super::tests::*;
1011 use super::*;
1012
1013 #[test]
1014 #[ignore]
1016 fn test_get_capabilities() {
1017 let decoder = VaapiDecoder::new().unwrap();
1018 let caps = decoder.get_capabilities();
1019 assert!(!caps.input_formats().is_empty());
1020 assert!(!caps.output_formats().is_empty());
1021 }
1022
1023 #[test]
1025 #[ignore]
1027 fn test_decode_h264_guestmem_to_guestmem() {
1028 decode_h264_generic(
1029 &mut VaapiDecoder::new().unwrap(),
1030 build_guest_mem_handle,
1031 build_guest_mem_handle,
1032 );
1033 }
1034}