devices/virtio/video/encoder/backend/
ffmpeg.rs1use std::collections::BTreeMap;
6use std::collections::VecDeque;
7use std::os::raw::c_int;
8use std::ptr;
9use std::sync::Arc;
10use std::sync::Weak;
11
12use anyhow::anyhow;
13use anyhow::Context;
14use base::error;
15use base::AsRawDescriptor;
16use base::MappedRegion;
17use base::MemoryMappingArena;
18use ffmpeg::avcodec::AvBufferSource;
19use ffmpeg::avcodec::AvCodec;
20use ffmpeg::avcodec::AvCodecContext;
21use ffmpeg::avcodec::AvCodecIterator;
22use ffmpeg::avcodec::AvFrame;
23use ffmpeg::avcodec::AvPacket;
24use ffmpeg::avcodec::Dimensions;
25use ffmpeg::avcodec::TryReceiveResult;
26use ffmpeg::max_buffer_alignment;
27use ffmpeg::AVPictureType_AV_PICTURE_TYPE_I;
28use ffmpeg::AVRational;
29use ffmpeg::AV_PKT_FLAG_KEY;
30
31use crate::virtio::video::encoder::backend::Encoder;
32use crate::virtio::video::encoder::backend::EncoderSession;
33use crate::virtio::video::encoder::EncoderCapabilities;
34use crate::virtio::video::encoder::EncoderEvent;
35use crate::virtio::video::encoder::InputBufferId;
36use crate::virtio::video::encoder::OutputBufferId;
37use crate::virtio::video::encoder::SessionConfig;
38use crate::virtio::video::error::VideoError;
39use crate::virtio::video::error::VideoResult;
40use crate::virtio::video::ffmpeg::TryAsAvFrameExt;
41use crate::virtio::video::format::Bitrate;
42use crate::virtio::video::format::Format;
43use crate::virtio::video::format::FormatDesc;
44use crate::virtio::video::format::FormatRange;
45use crate::virtio::video::format::FrameFormat;
46use crate::virtio::video::format::Profile;
47use crate::virtio::video::resource::BufferHandle;
48use crate::virtio::video::resource::GuestResource;
49use crate::virtio::video::resource::GuestResourceHandle;
50use crate::virtio::video::utils::EventQueue;
51use crate::virtio::video::utils::SyncEventQueue;
52
53struct InputBuffer {
56 mapping: MemoryMappingArena,
58 buffer_id: InputBufferId,
60 event_queue: Weak<SyncEventQueue<EncoderEvent>>,
63}
64
65impl Drop for InputBuffer {
66 fn drop(&mut self) {
67 match self.event_queue.upgrade() {
68 None => (),
69 Some(event_queue) => event_queue
71 .queue_event(EncoderEvent::ProcessedInputBuffer { id: self.buffer_id })
72 .unwrap_or_else(|e| {
73 error!("cannot send end of input buffer notification: {:#}", e)
74 }),
75 }
76 }
77}
78
79impl AvBufferSource for InputBuffer {
80 fn as_ptr(&self) -> *const u8 {
81 self.mapping.as_ptr()
82 }
83
84 fn len(&self) -> usize {
85 self.mapping.size()
86 }
87
88 fn is_empty(&self) -> bool {
89 self.len() == 0
90 }
91}
92
93enum CodecJob {
94 Frame(AvFrame),
95 Flush,
96}
97
98pub struct FfmpegEncoderSession {
99 event_queue: Arc<SyncEventQueue<EncoderEvent>>,
101
102 codec_jobs: VecDeque<CodecJob>,
104 output_queue: VecDeque<(OutputBufferId, MemoryMappingArena)>,
106 is_flushing: bool,
112
113 context: AvCodecContext,
115
116 next_input_buffer_id: InputBufferId,
117 next_output_buffer_id: OutputBufferId,
118}
119
120impl FfmpegEncoderSession {
121 fn try_send_input_job(&mut self) -> VideoResult<bool> {
126 if self.is_flushing {
128 return Ok(false);
129 }
130
131 match self.codec_jobs.front() {
132 Some(CodecJob::Frame(b)) => {
133 let result = self
134 .context
135 .try_send_frame(b)
136 .context("while sending frame")
137 .map_err(VideoError::BackendFailure);
138 if let Ok(false) = result {
141 } else {
142 self.codec_jobs.pop_front().unwrap();
143 }
144 result
145 }
146 Some(CodecJob::Flush) => {
147 self.codec_jobs.pop_front().unwrap();
148
149 self.is_flushing = true;
151 Ok(true)
152 }
153 None => Ok(false),
154 }
155 }
156
157 fn try_receive_packet(&mut self) -> VideoResult<bool> {
162 let (buffer_id, out_buf) = match self.output_queue.front_mut() {
163 Some(p) => p,
164 None => return Ok(false),
165 };
166
167 let mut packet = AvPacket::empty();
168
169 match self
170 .context
171 .try_receive_packet(&mut packet)
172 .context("while receiving packet")
173 {
174 Ok(TryReceiveResult::TryAgain) => {
175 if !self.is_flushing {
176 return Ok(false);
177 }
178
179 if let Err(err) = self.context.flush_encoder() {
181 self.is_flushing = false;
182 self.event_queue
183 .queue_event(EncoderEvent::FlushResponse { flush_done: false })
184 .context("while flushing")
185 .map_err(VideoError::BackendFailure)?;
186 return Err(err)
187 .context("while flushing")
188 .map_err(VideoError::BackendFailure);
189 }
190 self.try_receive_packet()
191 }
192 Ok(TryReceiveResult::FlushCompleted) => {
193 self.is_flushing = false;
194 self.event_queue
195 .queue_event(EncoderEvent::FlushResponse { flush_done: true })
196 .map_err(Into::into)
197 .map_err(VideoError::BackendFailure)?;
198 self.context.reset();
199 Ok(false)
200 }
201 Ok(TryReceiveResult::Received) => {
202 let packet_size = packet.as_ref().size as usize;
203 if packet_size > out_buf.size() {
204 return Err(VideoError::BackendFailure(anyhow!(
205 "encoded packet does not fit in output buffer"
206 )));
207 }
208 unsafe {
212 ptr::copy_nonoverlapping(packet.as_ref().data, out_buf.as_ptr(), packet_size);
213 }
214 self.event_queue
215 .queue_event(EncoderEvent::ProcessedOutputBuffer {
216 id: *buffer_id,
217 bytesused: packet.as_ref().size as _,
218 keyframe: (packet.as_ref().flags as u32 & AV_PKT_FLAG_KEY) != 0,
219 timestamp: packet.as_ref().dts as _,
220 })
221 .map_err(Into::into)
222 .map_err(VideoError::BackendFailure)?;
223 self.output_queue.pop_front();
224 Ok(true)
225 }
226 Err(e) => Err(VideoError::BackendFailure(e)),
227 }
228 }
229
230 fn try_encode(&mut self) -> VideoResult<()> {
233 loop {
235 let mut progress = false;
236 progress |= self.try_send_input_job()?;
239 progress |= self.try_receive_packet()?;
240 if !progress {
241 break;
242 }
243 }
244 Ok(())
245 }
246}
247
248impl EncoderSession for FfmpegEncoderSession {
249 fn encode(
250 &mut self,
251 resource: GuestResource,
252 timestamp: u64,
253 force_keyframe: bool,
254 ) -> VideoResult<InputBufferId> {
255 let buffer_id = self.next_input_buffer_id;
256 self.next_input_buffer_id = buffer_id.wrapping_add(1);
257
258 let mut frame: AvFrame = resource
259 .try_as_av_frame(|mapping| InputBuffer {
260 mapping,
261 buffer_id,
262 event_queue: Arc::downgrade(&self.event_queue),
263 })
264 .context("while creating input AvFrame")
265 .map_err(VideoError::BackendFailure)?;
266
267 if force_keyframe {
268 frame.set_pict_type(AVPictureType_AV_PICTURE_TYPE_I);
269 }
270 frame.set_pts(timestamp as i64);
271 self.codec_jobs.push_back(CodecJob::Frame(frame));
272 self.try_encode()?;
273
274 Ok(buffer_id)
275 }
276
277 fn use_output_buffer(
278 &mut self,
279 resource: GuestResourceHandle,
280 offset: u32,
281 size: u32,
282 ) -> VideoResult<OutputBufferId> {
283 let buffer_id = self.next_output_buffer_id;
284 self.next_output_buffer_id = buffer_id.wrapping_add(1);
285
286 let mapping = resource
287 .get_mapping(offset as usize, size as usize)
288 .context("while mapping output buffer")
289 .map_err(VideoError::BackendFailure)?;
290
291 self.output_queue.push_back((buffer_id, mapping));
292 self.try_encode()?;
293 Ok(buffer_id)
294 }
295
296 fn flush(&mut self) -> VideoResult<()> {
297 if self.is_flushing {
298 return Err(VideoError::BackendFailure(anyhow!(
299 "flush is already in progress"
300 )));
301 }
302 self.codec_jobs.push_back(CodecJob::Flush);
303 self.try_encode()?;
304 Ok(())
305 }
306
307 fn request_encoding_params_change(
308 &mut self,
309 bitrate: Bitrate,
310 framerate: u32,
311 ) -> VideoResult<()> {
312 match bitrate {
313 Bitrate::Cbr { target } => {
314 self.context.set_bit_rate(target as u64);
315 }
316 Bitrate::Vbr { target, peak } => {
317 self.context.set_bit_rate(target as u64);
318 self.context.set_max_bit_rate(peak as u64);
319 }
320 }
321 self.context.set_time_base(AVRational {
323 num: 1,
324 den: framerate as c_int,
325 });
326 Ok(())
327 }
328
329 fn event_pipe(&self) -> &dyn AsRawDescriptor {
330 self.event_queue.as_ref()
331 }
332
333 fn read_event(&mut self) -> VideoResult<EncoderEvent> {
334 self.event_queue
335 .dequeue_event()
336 .context("while reading encoder event")
337 .map_err(VideoError::BackendFailure)
338 }
339}
340
341pub struct FfmpegEncoder {
342 codecs: BTreeMap<Format, AvCodec>,
343}
344
345impl FfmpegEncoder {
346 pub fn new() -> Self {
348 let codecs = AvCodecIterator::new()
350 .filter_map(|codec| {
351 if !codec.is_encoder() {
352 return None;
353 }
354
355 let codec_name = codec.name();
356
357 let format = match codec_name {
360 "libx264" => Format::H264,
361 "libvpx" => Format::VP8,
362 "libvpx-vp9" => Format::VP9,
363 "libx265" => Format::Hevc,
364 _ => return None,
365 };
366
367 Some((format, codec))
368 })
369 .collect();
370
371 Self { codecs }
372 }
373}
374
375impl Encoder for FfmpegEncoder {
376 type Session = FfmpegEncoderSession;
377
378 fn query_capabilities(&self) -> VideoResult<EncoderCapabilities> {
379 let codecs = &self.codecs;
380 let mut format_idx = BTreeMap::new();
381 let mut input_format_descs = vec![];
382 let output_format_descs = codecs
383 .iter()
384 .enumerate()
385 .map(|(i, (&format, codec))| {
386 let mut in_formats = 0;
387 for in_format in codec.pixel_format_iter() {
388 if let Ok(in_format) = Format::try_from(in_format) {
389 let idx = format_idx.entry(in_format).or_insert_with(|| {
390 let idx = input_format_descs.len();
391 input_format_descs.push(FormatDesc {
392 mask: 0,
393 format: in_format,
394 frame_formats: vec![FrameFormat {
395 width: FormatRange {
399 min: 64,
400 max: 16384,
401 step: 1,
402 },
403 height: FormatRange {
404 min: 64,
405 max: 16384,
406 step: 1,
407 },
408 bitrates: Default::default(),
409 }],
410 plane_align: max_buffer_alignment() as u32,
411 });
412 idx
413 });
414 input_format_descs[*idx].mask |= 1 << i;
415 in_formats |= 1 << *idx;
416 }
417 }
418 FormatDesc {
419 mask: in_formats,
420 format,
421 frame_formats: vec![FrameFormat {
422 width: FormatRange {
426 min: 64,
427 max: 16384,
428 step: 1,
429 },
430 height: FormatRange {
431 min: 64,
432 max: 16384,
433 step: 1,
434 },
435 bitrates: Default::default(),
436 }],
437 plane_align: max_buffer_alignment() as u32,
438 }
439 })
440 .collect();
441 let coded_format_profiles = codecs
445 .iter()
446 .map(|(&format, _codec)| {
447 (
448 format,
449 match format {
450 Format::H264 => vec![Profile::H264Baseline],
451 Format::Hevc => vec![Profile::HevcMain],
452 Format::VP8 => vec![Profile::VP8Profile0],
453 Format::VP9 => vec![Profile::VP9Profile0],
454 _ => vec![],
455 },
456 )
457 })
458 .collect();
459 let caps = EncoderCapabilities {
460 input_format_descs,
461 output_format_descs,
462 coded_format_profiles,
463 };
464
465 Ok(caps)
466 }
467
468 fn start_session(&mut self, config: SessionConfig) -> VideoResult<Self::Session> {
469 let dst_format = config
470 .dst_params
471 .format
472 .ok_or(VideoError::InvalidOperation)?;
473 let codec = self
474 .codecs
475 .get(&dst_format)
476 .ok_or(VideoError::InvalidFormat)?;
477 let pix_fmt = config
478 .src_params
479 .format
480 .ok_or(VideoError::InvalidOperation)?
481 .try_into()
482 .map_err(|_| VideoError::InvalidFormat)?;
483 let context = codec
484 .build_encoder()
485 .and_then(|mut b| {
486 b.set_pix_fmt(pix_fmt);
487 b.set_dimensions(Dimensions {
488 width: config.src_params.frame_width,
489 height: config.src_params.frame_height,
490 });
491 b.set_time_base(AVRational {
492 num: 1,
493 den: config.frame_rate as _,
494 });
495 b.build()
496 })
497 .context("while creating new session")
498 .map_err(VideoError::BackendFailure)?;
499 let session = FfmpegEncoderSession {
500 event_queue: Arc::new(
501 EventQueue::new()
502 .context("while creating encoder session")
503 .map_err(VideoError::BackendFailure)?
504 .into(),
505 ),
506 codec_jobs: Default::default(),
507 output_queue: Default::default(),
508 is_flushing: false,
509 context,
510 next_input_buffer_id: 0,
511 next_output_buffer_id: 0,
512 };
513 session
514 .event_queue
515 .queue_event(EncoderEvent::RequireInputBuffers {
516 input_count: 4,
517 input_frame_height: config.src_params.frame_height,
518 input_frame_width: config.src_params.frame_width,
519 output_buffer_size: 16 * 1024 * 1024,
520 })
521 .context("while sending buffer request")
522 .map_err(VideoError::BackendFailure)?;
523 Ok(session)
524 }
525
526 fn stop_session(&mut self, _session: Self::Session) -> VideoResult<()> {
527 Ok(())
529 }
530}