xref: /aosp_15_r20/external/virtio-media/device/src/devices/video_decoder.rs (revision 1b4853f54772485c5dd4001ae33a7a958bcc97a1)
1 // Copyright 2024 The ChromiumOS Authors
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 use std::ops::Deref;
6 use std::os::fd::BorrowedFd;
7 
8 use v4l2r::bindings;
9 use v4l2r::ioctl::BufferCapabilities;
10 use v4l2r::ioctl::BufferField;
11 use v4l2r::ioctl::BufferFlags;
12 use v4l2r::ioctl::DecoderCmd;
13 use v4l2r::ioctl::EventType;
14 use v4l2r::ioctl::SelectionTarget;
15 use v4l2r::ioctl::SelectionType;
16 use v4l2r::ioctl::SrcChanges;
17 use v4l2r::ioctl::V4l2Buffer;
18 use v4l2r::ioctl::V4l2MplaneFormat;
19 use v4l2r::ioctl::V4l2PlanesWithBacking;
20 use v4l2r::ioctl::V4l2PlanesWithBackingMut;
21 use v4l2r::memory::MemoryType;
22 use v4l2r::Colorspace;
23 use v4l2r::Quantization;
24 use v4l2r::QueueClass;
25 use v4l2r::QueueDirection;
26 use v4l2r::QueueType;
27 use v4l2r::XferFunc;
28 use v4l2r::YCbCrEncoding;
29 
30 use crate::ioctl::virtio_media_dispatch_ioctl;
31 use crate::ioctl::IoctlResult;
32 use crate::ioctl::VirtioMediaIoctlHandler;
33 use crate::mmap::MmapMappingManager;
34 use crate::DequeueBufferEvent;
35 use crate::SessionEvent;
36 use crate::SgEntry;
37 use crate::V4l2Event;
38 use crate::V4l2Ioctl;
39 use crate::VirtioMediaDevice;
40 use crate::VirtioMediaDeviceSession;
41 use crate::VirtioMediaEventQueue;
42 use crate::VirtioMediaHostMemoryMapper;
43 use crate::VIRTIO_MEDIA_MMAP_FLAG_RW;
44 
45 /// Backing MMAP memory for `VirtioVideoMediaDecoderBuffer`.
46 pub trait VideoDecoderBufferBacking {
new(queue: QueueType, index: u32, sizes: &[usize]) -> IoctlResult<Self> where Self: Sized47     fn new(queue: QueueType, index: u32, sizes: &[usize]) -> IoctlResult<Self>
48     where
49         Self: Sized;
50 
fd_for_plane(&self, plane_idx: usize) -> Option<BorrowedFd>51     fn fd_for_plane(&self, plane_idx: usize) -> Option<BorrowedFd>;
52 }
53 
54 pub struct VideoDecoderBuffer<S: VideoDecoderBufferBacking> {
55     v4l2_buffer: V4l2Buffer,
56 
57     /// Backend-specific storage.
58     pub backing: S,
59 }
60 
61 impl<S: VideoDecoderBufferBacking> VideoDecoderBuffer<S> {
new( queue: QueueType, index: u32, sizes: &[usize], mmap_offset: u32, ) -> IoctlResult<Self>62     fn new(
63         queue: QueueType,
64         index: u32,
65         sizes: &[usize],
66         // TODO: need as many offsets as there are planes.
67         mmap_offset: u32,
68     ) -> IoctlResult<Self> {
69         let backing = S::new(queue, index, sizes)?;
70 
71         let mut v4l2_buffer = V4l2Buffer::new(queue, index, MemoryType::Mmap);
72         if let V4l2PlanesWithBackingMut::Mmap(mut planes) =
73             v4l2_buffer.planes_with_backing_iter_mut()
74         {
75             // SAFETY: every buffer has at least one plane.
76             let mut plane = planes.next().unwrap();
77             plane.set_mem_offset(mmap_offset);
78             *plane.length = sizes[0] as u32;
79         } else {
80             // SAFETY: we have just set the buffer type to MMAP. Reaching this point means a bug in
81             // the code.
82             panic!()
83         }
84 
85         v4l2_buffer.set_flags(BufferFlags::TIMESTAMP_MONOTONIC);
86         v4l2_buffer.set_field(BufferField::None);
87 
88         Ok(Self {
89             v4l2_buffer,
90             backing,
91         })
92     }
93 
index(&self) -> u3294     pub fn index(&self) -> u32 {
95         self.v4l2_buffer.index()
96     }
97 
timestamp(&self) -> bindings::timeval98     pub fn timestamp(&self) -> bindings::timeval {
99         self.v4l2_buffer.timestamp()
100     }
101 }
102 
103 /// Events reported by the [`VideoDecoderBackendSession::next_event`] method.
104 #[derive(Debug, Clone, PartialEq, Eq)]
105 pub enum VideoDecoderBackendEvent {
106     /// Sent whenever the format of the stream has changed. The new format can be read using
107     /// [`VideoDecoderBackendSession::current_format`].
108     StreamFormatChanged,
109     /// Sent whenever an `OUTPUT` buffer is done processing and can be reused.
110     InputBufferDone(u32),
111     /// Sent whenever a decoded frame is ready on the `CAPTURE` queue.
112     FrameCompleted {
113         buffer_id: u32,
114         timestamp: bindings::timeval,
115         bytes_used: Vec<u32>,
116         is_last: bool,
117     },
118 }
119 
120 /// Description of the current stream parameters, as parsed from the input.
121 #[derive(Clone)]
122 pub struct StreamParams {
123     /// Minimum number of output buffers necessary to decode the stream.
124     pub min_output_buffers: u32,
125     /// Coded size of the stream.
126     pub coded_size: (u32, u32),
127     /// Visible rectangle containing the part of the frame to display.
128     pub visible_rect: v4l2r::Rect,
129 }
130 
131 /// Trait for a video decoding session.
132 pub trait VideoDecoderBackendSession {
133     type BufferStorage: VideoDecoderBufferBacking;
134 
135     /// Decode the encoded stream in `input`, of length `bytes_used`, which corresponds to
136     /// OUTPUT buffer `index`.
137     ///
138     /// `timestamp` is the timestamp of the frame, to be reported in any frame produced from this
139     /// call.
decode( &mut self, input: &Self::BufferStorage, index: u32, timestamp: bindings::timeval, bytes_used: u32, ) -> IoctlResult<()>140     fn decode(
141         &mut self,
142         input: &Self::BufferStorage,
143         index: u32,
144         timestamp: bindings::timeval,
145         bytes_used: u32,
146     ) -> IoctlResult<()>;
147 
148     /// Use `backing` as the backing storage for output buffer `index`.
use_as_output(&mut self, index: u32, backing: &mut Self::BufferStorage) -> IoctlResult<()>149     fn use_as_output(&mut self, index: u32, backing: &mut Self::BufferStorage) -> IoctlResult<()>;
150 
151     /// Start draining the decoder pipeline for all buffers still in it.
152     ///
153     /// The backend will report a frame with the `V4L2_BUF_FLAG_LAST` once the drain
154     /// process is completed.
drain(&mut self) -> IoctlResult<()>155     fn drain(&mut self) -> IoctlResult<()>;
156 
157     /// Remove any output buffer that has been previously added using [`use_as_output`].
clear_output_buffers(&mut self) -> IoctlResult<()>158     fn clear_output_buffers(&mut self) -> IoctlResult<()>;
159 
160     /// Returns the next pending event if there is one, or `None` if there aren't any.
next_event(&mut self) -> Option<VideoDecoderBackendEvent>161     fn next_event(&mut self) -> Option<VideoDecoderBackendEvent>;
162 
163     /// Returns the current format set for the given `direction`, in a form suitable as a reply to
164     /// `VIDIOC_G_FMT`.
current_format(&self, direction: QueueDirection) -> V4l2MplaneFormat165     fn current_format(&self, direction: QueueDirection) -> V4l2MplaneFormat;
166 
167     /// Returns the stream parameters as read from the input.
stream_params(&self) -> StreamParams168     fn stream_params(&self) -> StreamParams;
169 
170     /// Called whenever the decoder device has allocated buffers for a given queue.
171     ///
172     /// This can be useful for some backends that need to know how many buffers they will work
173     /// with. The default implementation does nothing, which should be suitable for backends that
174     /// don't care.
buffers_allocated(&mut self, _direction: QueueDirection, _num_buffers: u32)175     fn buffers_allocated(&mut self, _direction: QueueDirection, _num_buffers: u32) {}
176 
177     /// Returns a file descriptor that signals `POLLIN` whenever an event is pending and can be
178     /// read using [`next_event`], or `None` if the backend does not support this.
poll_fd(&self) -> Option<BorrowedFd>179     fn poll_fd(&self) -> Option<BorrowedFd> {
180         None
181     }
182 
183     /// Optional hook called whenever the streaming state of a queue changes. Some backends may
184     /// need this information to operate properly.
streaming_state(&mut self, _direction: QueueDirection, _streaming: bool)185     fn streaming_state(&mut self, _direction: QueueDirection, _streaming: bool) {}
186 
187     /// Optional hook called by the decoder to signal it has processed a pausing event
188     /// sent by the backend.
189     ///
190     /// Pausing event are currently limited to [`VideoDecoderBackendEvent::StreamFormatChanged`].
191     /// Whenever the resolution changes, the backend must stop processing until the decoder has
192     /// adapted its conditions for decoding to resume (e.g. CAPTURE buffers of the proper size and
193     /// format have been allocated).
resume(&mut self)194     fn resume(&mut self) {}
195 }
196 
197 /// State of a session.
198 #[derive(Debug)]
199 enum VideoDecoderStreamingState {
200     /// Initial state, and state after a `STOP` command or a successful drain. Contains the
201     /// state of both streaming queues.
202     Stopped {
203         input_streaming: bool,
204         output_streaming: bool,
205     },
206     /// State when both queues are streaming.
207     Running,
208     /// State when a `PAUSE` command has been received. Both queues are streaming in this state.
209     Paused,
210 }
211 
212 impl Default for VideoDecoderStreamingState {
default() -> Self213     fn default() -> Self {
214         Self::Stopped {
215             input_streaming: false,
216             output_streaming: false,
217         }
218     }
219 }
220 
221 impl VideoDecoderStreamingState {
input_streamon(&mut self)222     fn input_streamon(&mut self) {
223         match self {
224             Self::Stopped {
225                 ref mut input_streaming,
226                 output_streaming,
227             } if !(*input_streaming) => {
228                 *input_streaming = true;
229                 // If we switch to a state where both queues are streaming, then the device is
230                 // running.
231                 if *output_streaming {
232                     *self = Self::Running;
233                 }
234             }
235             Self::Stopped { .. } | Self::Running | Self::Paused => (),
236         }
237     }
238 
input_streamoff(&mut self)239     fn input_streamoff(&mut self) {
240         match self {
241             Self::Stopped {
242                 ref mut input_streaming,
243                 ..
244             } => *input_streaming = false,
245             Self::Running | Self::Paused => {
246                 *self = Self::Stopped {
247                     input_streaming: false,
248                     output_streaming: true,
249                 }
250             }
251         }
252     }
253 
output_streamon(&mut self)254     fn output_streamon(&mut self) {
255         match self {
256             Self::Stopped {
257                 input_streaming,
258                 ref mut output_streaming,
259             } if !(*output_streaming) => {
260                 *output_streaming = true;
261                 // If we switch to a state where both queues are streaming, then the device is
262                 // running.
263                 if *input_streaming {
264                     *self = Self::Running;
265                 }
266             }
267             Self::Stopped { .. } | Self::Running | Self::Paused => (),
268         }
269     }
270 
output_streamoff(&mut self)271     fn output_streamoff(&mut self) {
272         match self {
273             Self::Stopped {
274                 ref mut output_streaming,
275                 ..
276             } => *output_streaming = false,
277             Self::Running | Self::Paused => {
278                 *self = Self::Stopped {
279                     input_streaming: true,
280                     output_streaming: false,
281                 }
282             }
283         }
284     }
285 
is_output_streaming(&mut self) -> bool286     fn is_output_streaming(&mut self) -> bool {
287         matches!(
288             self,
289             Self::Running
290                 | Self::Stopped {
291                     output_streaming: true,
292                     ..
293                 }
294         )
295     }
296 }
297 
298 /// Management of the crop rectangle.
299 ///
300 /// There are two ways this parameter can be set:
301 ///
302 /// * Manually by the client, by calling `VIDIOC_S_SELECTION` with `V4L2_SEL_TGT_COMPOSE`. This has
303 ///   an effect only before the first resolution change event is emitted, and is the only way to
304 ///   properly set the crop rectangle for codecs/hardware that don't support DRC detection.
305 ///
306 /// * From the information contained in the stream, signaled via a
307 ///   [`VideoDecoderBackendEvent::StreamFormatChanged`] event. Once this event has been emitted, the
308 ///   crop rectangle is fixed and determined by the stream.
309 enum CropRectangle {
310     /// Crop rectangle has not been determined from the stream yet and can be set by the client.
311     Settable(v4l2r::Rect),
312     /// Crop rectangle has been determined from the stream and cannot be modified.
313     FromStream(v4l2r::Rect),
314 }
315 
316 impl Deref for CropRectangle {
317     type Target = v4l2r::Rect;
318 
deref(&self) -> &Self::Target319     fn deref(&self) -> &Self::Target {
320         match self {
321             CropRectangle::Settable(r) => r,
322             CropRectangle::FromStream(r) => r,
323         }
324     }
325 }
326 
327 /// Struct containing validated colorspace information for a format.
328 #[derive(Debug, Clone, Copy)]
329 struct V4l2FormatColorspace {
330     colorspace: Colorspace,
331     xfer_func: XferFunc,
332     ycbcr_enc: YCbCrEncoding,
333     quantization: Quantization,
334 }
335 
336 impl Default for V4l2FormatColorspace {
default() -> Self337     fn default() -> Self {
338         Self {
339             colorspace: Colorspace::Rec709,
340             xfer_func: XferFunc::None,
341             ycbcr_enc: YCbCrEncoding::E709,
342             quantization: Quantization::LimRange,
343         }
344     }
345 }
346 
347 impl V4l2FormatColorspace {
348     /// Apply the colorspace information of this object to `pix_mp`.
apply(self, pix_mp: &mut bindings::v4l2_pix_format_mplane)349     fn apply(self, pix_mp: &mut bindings::v4l2_pix_format_mplane) {
350         pix_mp.colorspace = self.colorspace as u32;
351         pix_mp.__bindgen_anon_1 = bindings::v4l2_pix_format_mplane__bindgen_ty_1 {
352             ycbcr_enc: self.ycbcr_enc as u8,
353         };
354         pix_mp.quantization = self.quantization as u8;
355         pix_mp.xfer_func = self.xfer_func as u8;
356     }
357 }
358 
359 pub struct VideoDecoderSession<S: VideoDecoderBackendSession> {
360     id: u32,
361 
362     state: VideoDecoderStreamingState,
363 
364     input_buffers: Vec<VideoDecoderBuffer<S::BufferStorage>>,
365     output_buffers: Vec<VideoDecoderBuffer<S::BufferStorage>>,
366     /// Indices of CAPTURE buffers that are queued but not send to the backend yet because the
367     /// decoder is not running.
368     pending_output_buffers: Vec<u32>,
369 
370     sequence_cpt: u32,
371 
372     /// Whether the input source change event has been subscribed to by the driver. If `true` then
373     /// the device will emit resolution change events.
374     src_change_subscribed: bool,
375     /// Whether the EOS event has been subscribed to by the driver. If `true` then the device will
376     /// emit EOS events.
377     eos_subscribed: bool,
378 
379     crop_rectangle: CropRectangle,
380 
381     /// Current colorspace information of the format.
382     colorspace: V4l2FormatColorspace,
383 
384     /// Adapter-specific data.
385     backend_session: S,
386 }
387 
388 impl<S: VideoDecoderBackendSession> VirtioMediaDeviceSession for VideoDecoderSession<S> {
poll_fd(&self) -> Option<BorrowedFd>389     fn poll_fd(&self) -> Option<BorrowedFd> {
390         self.backend_session.poll_fd()
391     }
392 }
393 
394 impl<S: VideoDecoderBackendSession> VideoDecoderSession<S> {
395     /// Returns the current format for `direction`.
396     ///
397     /// This is essentially like calling the backend's corresponding
398     /// [`VideoDecoderBackendSession::current_format`] method, but also applies the colorspace
399     /// information potentially set by the user.
current_format(&self, direction: QueueDirection) -> V4l2MplaneFormat400     fn current_format(&self, direction: QueueDirection) -> V4l2MplaneFormat {
401         let format = self.backend_session.current_format(direction);
402 
403         let mut pix_mp =
404             *<V4l2MplaneFormat as AsRef<bindings::v4l2_pix_format_mplane>>::as_ref(&format);
405 
406         self.colorspace.apply(&mut pix_mp);
407 
408         V4l2MplaneFormat::from((direction, pix_mp))
409     }
410 
try_decoder_cmd(&self, cmd: DecoderCmd) -> IoctlResult<DecoderCmd>411     fn try_decoder_cmd(&self, cmd: DecoderCmd) -> IoctlResult<DecoderCmd> {
412         match cmd {
413             DecoderCmd::Stop { .. } => Ok(DecoderCmd::stop()),
414             DecoderCmd::Start { .. } => Ok(DecoderCmd::start()),
415             DecoderCmd::Pause { .. } => {
416                 match &self.state {
417                     // The V4L2 documentation says this should return `EPERM`, but v4l2-compliance
418                     // requires `EINVAL`...
419                     VideoDecoderStreamingState::Stopped { .. } => Err(libc::EINVAL),
420                     VideoDecoderStreamingState::Running | VideoDecoderStreamingState::Paused => {
421                         Ok(DecoderCmd::pause())
422                     }
423                 }
424             }
425             DecoderCmd::Resume => {
426                 match &self.state {
427                     // The V4L2 documentation says this should return `EPERM`, but v4l2-compliance
428                     // requires `EINVAL`...
429                     VideoDecoderStreamingState::Stopped { .. } => Err(libc::EINVAL),
430                     VideoDecoderStreamingState::Paused | VideoDecoderStreamingState::Running => {
431                         Ok(DecoderCmd::resume())
432                     }
433                 }
434             }
435         }
436     }
437 
438     /// Send all the output buffers that are pending to the backend, if the decoder is running.
439     ///
440     /// In the adapter backend, if we receive buffers this means both queues are streaming - IOW we
441     /// can queue them as soon as the condition is good.
442     ///
443     /// In the decoder device, we need to keep them until both queues are streaming. Same applies
444     /// to input buffers BTW.
try_send_pending_output_buffers(&mut self)445     fn try_send_pending_output_buffers(&mut self) {
446         if !self.state.is_output_streaming() {
447             return;
448         }
449 
450         for i in self.pending_output_buffers.drain(..) {
451             let buffer = self.output_buffers.get_mut(i as usize).unwrap();
452             self.backend_session
453                 .use_as_output(buffer.index(), &mut buffer.backing)
454                 .unwrap();
455         }
456     }
457 }
458 
459 /// Trait for actual implementations of video decoding, to be used with [`VideoDecoder`].
460 ///
461 /// [`VideoDecoder`] takes care of (mostly) abstracting V4L2 away ; implementors of this trait are
462 /// the ones that provide the actual video decoding service.
463 pub trait VideoDecoderBackend: Sized {
464     type Session: VideoDecoderBackendSession;
465 
466     /// Create a new session with the provided `id`.
new_session(&mut self, id: u32) -> IoctlResult<Self::Session>467     fn new_session(&mut self, id: u32) -> IoctlResult<Self::Session>;
468     /// Close and destroy `session`.
close_session(&mut self, session: Self::Session)469     fn close_session(&mut self, session: Self::Session);
470 
471     /// Returns the format at `index` for the given queue `direction`, or None if `index` is out of
472     /// bounds.
enum_formats( &self, session: &VideoDecoderSession<Self::Session>, direction: QueueDirection, index: u32, ) -> Option<bindings::v4l2_fmtdesc>473     fn enum_formats(
474         &self,
475         session: &VideoDecoderSession<Self::Session>,
476         direction: QueueDirection,
477         index: u32,
478     ) -> Option<bindings::v4l2_fmtdesc>;
479     /// Returns the supported frame sizes for `pixel_format`, or None if the format is not
480     /// supported.
frame_sizes(&self, pixel_format: u32) -> Option<bindings::v4l2_frmsize_stepwise>481     fn frame_sizes(&self, pixel_format: u32) -> Option<bindings::v4l2_frmsize_stepwise>;
482 
483     /// Adjust `format` to make it applicable to the queue with the given `direction` for the current `session`.
484     ///
485     /// This method doesn't fail, implementations must return the closest acceptable format that
486     /// can be applied unchanged with [`Self::apply_format`].
adjust_format( &self, session: &Self::Session, direction: QueueDirection, format: V4l2MplaneFormat, ) -> V4l2MplaneFormat487     fn adjust_format(
488         &self,
489         session: &Self::Session,
490         direction: QueueDirection,
491         format: V4l2MplaneFormat,
492     ) -> V4l2MplaneFormat;
493 
494     /// Applies `format` to the queue of the given `direction`. The format is adjusted if needed.
apply_format( &self, session: &mut Self::Session, direction: QueueDirection, format: &V4l2MplaneFormat, )495     fn apply_format(
496         &self,
497         session: &mut Self::Session,
498         direction: QueueDirection,
499         format: &V4l2MplaneFormat,
500     );
501 }
502 
503 pub struct VideoDecoder<
504     D: VideoDecoderBackend,
505     Q: VirtioMediaEventQueue,
506     HM: VirtioMediaHostMemoryMapper,
507 > {
508     backend: D,
509     event_queue: Q,
510     host_mapper: MmapMappingManager<HM>,
511 }
512 
513 impl<B, Q, HM> VideoDecoder<B, Q, HM>
514 where
515     B: VideoDecoderBackend,
516     Q: VirtioMediaEventQueue,
517     HM: VirtioMediaHostMemoryMapper,
518 {
new(backend: B, event_queue: Q, host_mapper: HM) -> Self519     pub fn new(backend: B, event_queue: Q, host_mapper: HM) -> Self {
520         Self {
521             backend,
522             event_queue,
523             host_mapper: MmapMappingManager::from(host_mapper),
524         }
525     }
526 
527     /// Validate `format` for `queue` and return the adjusted format.
try_format( &self, session: &VideoDecoderSession<B::Session>, queue: QueueType, format: bindings::v4l2_format, ) -> IoctlResult<V4l2MplaneFormat>528     fn try_format(
529         &self,
530         session: &VideoDecoderSession<B::Session>,
531         queue: QueueType,
532         format: bindings::v4l2_format,
533     ) -> IoctlResult<V4l2MplaneFormat> {
534         if queue.class() != QueueClass::VideoMplane {
535             return Err(libc::EINVAL);
536         }
537 
538         // SAFETY: safe because we have just confirmed the queue type is mplane.
539         let pix_mp = unsafe { format.fmt.pix_mp };
540 
541         // Process the colorspace now so we can restore it after applying the backend adjustment.
542         let colorspace = if queue.direction() == QueueDirection::Output {
543             V4l2FormatColorspace {
544                 colorspace: Colorspace::n(pix_mp.colorspace)
545                     .unwrap_or(session.colorspace.colorspace),
546                 xfer_func: XferFunc::n(pix_mp.xfer_func as u32)
547                     .unwrap_or(session.colorspace.xfer_func),
548                 // TODO: safe because...
549                 ycbcr_enc: YCbCrEncoding::n(unsafe { pix_mp.__bindgen_anon_1.ycbcr_enc as u32 })
550                     .unwrap_or(session.colorspace.ycbcr_enc),
551                 quantization: Quantization::n(pix_mp.quantization as u32)
552                     .unwrap_or(session.colorspace.quantization),
553             }
554         } else {
555             session.colorspace
556         };
557 
558         let format = V4l2MplaneFormat::from((queue.direction(), pix_mp));
559 
560         let format =
561             self.backend
562                 .adjust_format(&session.backend_session, queue.direction(), format);
563 
564         let mut pix_mp =
565             *<V4l2MplaneFormat as AsRef<bindings::v4l2_pix_format_mplane>>::as_ref(&format);
566 
567         colorspace.apply(&mut pix_mp);
568 
569         Ok(V4l2MplaneFormat::from((queue.direction(), pix_mp)))
570     }
571 }
572 
573 impl<B, Q, HM, Reader, Writer> VirtioMediaDevice<Reader, Writer> for VideoDecoder<B, Q, HM>
574 where
575     B: VideoDecoderBackend,
576     Q: VirtioMediaEventQueue,
577     HM: VirtioMediaHostMemoryMapper,
578     Reader: std::io::Read,
579     Writer: std::io::Write,
580 {
581     type Session = <Self as VirtioMediaIoctlHandler>::Session;
582 
new_session(&mut self, session_id: u32) -> Result<Self::Session, i32>583     fn new_session(&mut self, session_id: u32) -> Result<Self::Session, i32> {
584         let backend_session = self.backend.new_session(session_id)?;
585 
586         Ok(VideoDecoderSession {
587             id: session_id,
588             backend_session,
589             state: Default::default(),
590             input_buffers: Default::default(),
591             output_buffers: Default::default(),
592             pending_output_buffers: Default::default(),
593             sequence_cpt: 0,
594             src_change_subscribed: false,
595             eos_subscribed: false,
596             crop_rectangle: CropRectangle::Settable(v4l2r::Rect::new(0, 0, 0, 0)),
597             colorspace: Default::default(),
598         })
599     }
600 
close_session(&mut self, session: Self::Session)601     fn close_session(&mut self, session: Self::Session) {
602         // Unregister all MMAP buffers.
603         for buffer in session
604             .input_buffers
605             .iter()
606             .chain(session.output_buffers.iter())
607         {
608             if let V4l2PlanesWithBacking::Mmap(planes) =
609                 buffer.v4l2_buffer.planes_with_backing_iter()
610             {
611                 for plane in planes {
612                     self.host_mapper.unregister_buffer(plane.mem_offset());
613                 }
614             }
615         }
616     }
617 
do_ioctl( &mut self, session: &mut Self::Session, ioctl: V4l2Ioctl, reader: &mut Reader, writer: &mut Writer, ) -> std::io::Result<()>618     fn do_ioctl(
619         &mut self,
620         session: &mut Self::Session,
621         ioctl: V4l2Ioctl,
622         reader: &mut Reader,
623         writer: &mut Writer,
624     ) -> std::io::Result<()> {
625         virtio_media_dispatch_ioctl(self, session, ioctl, reader, writer)
626     }
627 
do_mmap( &mut self, session: &mut Self::Session, flags: u32, offset: u32, ) -> Result<(u64, u64), i32>628     fn do_mmap(
629         &mut self,
630         session: &mut Self::Session,
631         flags: u32,
632         offset: u32,
633     ) -> Result<(u64, u64), i32> {
634         // Search for a MMAP plane with the right offset.
635         // TODO: O(n), not critical but not great either.
636         let (buffer, plane_idx) = session
637             .input_buffers
638             .iter()
639             .chain(session.output_buffers.iter())
640             .filter_map(|b| {
641                 if let V4l2PlanesWithBacking::Mmap(planes) =
642                     b.v4l2_buffer.planes_with_backing_iter()
643                 {
644                     Some(std::iter::repeat(b).zip(planes.enumerate()))
645                 } else {
646                     None
647                 }
648             })
649             .flatten()
650             .find(|(_, (_, p))| p.mem_offset() == offset)
651             .map(|(b, (i, _))| (b, i))
652             .ok_or(libc::EINVAL)?;
653         let rw = (flags & VIRTIO_MEDIA_MMAP_FLAG_RW) != 0;
654 
655         let fd = buffer.backing.fd_for_plane(plane_idx).unwrap();
656 
657         self.host_mapper
658             .create_mapping(offset, fd, rw)
659             .map_err(|e| {
660                 log::error!(
661                     "failed to map MMAP buffer at offset 0x{:x}: {:#}",
662                     offset,
663                     e
664                 );
665                 libc::EINVAL
666             })
667     }
668 
do_munmap(&mut self, guest_addr: u64) -> Result<(), i32>669     fn do_munmap(&mut self, guest_addr: u64) -> Result<(), i32> {
670         self.host_mapper
671             .remove_mapping(guest_addr)
672             .map(|_| ())
673             .map_err(|_| libc::EINVAL)
674     }
675 
process_events(&mut self, session: &mut Self::Session) -> Result<(), i32>676     fn process_events(&mut self, session: &mut Self::Session) -> Result<(), i32> {
677         let has_event = if let Some(event) = session.backend_session.next_event() {
678             match event {
679                 VideoDecoderBackendEvent::InputBufferDone(id) => {
680                     let Some(buffer) = session.input_buffers.get_mut(id as usize) else {
681                         log::error!("no matching OUTPUT buffer with id {} to process event", id);
682                         return Ok(());
683                     };
684 
685                     buffer.v4l2_buffer.clear_flags(BufferFlags::QUEUED);
686 
687                     self.event_queue
688                         .send_event(V4l2Event::DequeueBuffer(DequeueBufferEvent::new(
689                             session.id,
690                             buffer.v4l2_buffer.clone(),
691                         )));
692                 }
693                 VideoDecoderBackendEvent::StreamFormatChanged => {
694                     let stream_params = session.backend_session.stream_params();
695 
696                     // The crop rectangle is now determined by the stream and cannot be changed.
697                     session.crop_rectangle = CropRectangle::FromStream(stream_params.visible_rect);
698 
699                     if session.src_change_subscribed {
700                         self.event_queue
701                             .send_event(V4l2Event::Event(SessionEvent::new(
702                                 session.id,
703                                 bindings::v4l2_event {
704                                     type_: bindings::V4L2_EVENT_SOURCE_CHANGE,
705                                     u: bindings::v4l2_event__bindgen_ty_1 {
706                                         src_change: bindings::v4l2_event_src_change {
707                                             changes: SrcChanges::RESOLUTION.bits(),
708                                         },
709                                     },
710                                     // TODO: fill pending, sequence, and timestamp.
711                                     ..Default::default()
712                                 },
713                             )))
714                     }
715                 }
716                 VideoDecoderBackendEvent::FrameCompleted {
717                     buffer_id,
718                     timestamp,
719                     bytes_used,
720                     is_last,
721                 } => {
722                     let Some(buffer) = session.output_buffers.get_mut(buffer_id as usize) else {
723                         log::error!(
724                             "no matching CAPTURE buffer with id {} to process event",
725                             buffer_id
726                         );
727                         return Ok(());
728                     };
729 
730                     buffer.v4l2_buffer.clear_flags(BufferFlags::QUEUED);
731                     buffer.v4l2_buffer.set_flags(BufferFlags::TIMESTAMP_COPY);
732                     if is_last {
733                         buffer.v4l2_buffer.set_flags(BufferFlags::LAST);
734                     }
735                     buffer.v4l2_buffer.set_sequence(session.sequence_cpt);
736                     session.sequence_cpt += 1;
737                     buffer.v4l2_buffer.set_timestamp(timestamp);
738                     let first_plane = buffer.v4l2_buffer.get_first_plane_mut();
739                     *first_plane.bytesused = bytes_used.first().copied().unwrap_or(0);
740                     self.event_queue
741                         .send_event(V4l2Event::DequeueBuffer(DequeueBufferEvent::new(
742                             session.id,
743                             buffer.v4l2_buffer.clone(),
744                         )));
745 
746                     if is_last && session.eos_subscribed {
747                         self.event_queue
748                             .send_event(V4l2Event::Event(SessionEvent::new(
749                                 session.id,
750                                 bindings::v4l2_event {
751                                     type_: bindings::V4L2_EVENT_EOS,
752                                     ..Default::default()
753                                 },
754                             )))
755                     }
756                 }
757             }
758             true
759         } else {
760             false
761         };
762 
763         if !has_event {
764             log::warn!("process_events called but no event was pending");
765         }
766 
767         Ok(())
768     }
769 }
770 
771 impl<B, Q, HM> VirtioMediaIoctlHandler for VideoDecoder<B, Q, HM>
772 where
773     B: VideoDecoderBackend,
774     Q: VirtioMediaEventQueue,
775     HM: VirtioMediaHostMemoryMapper,
776 {
777     type Session = VideoDecoderSession<B::Session>;
778 
enum_fmt( &mut self, session: &Self::Session, queue: QueueType, index: u32, ) -> IoctlResult<bindings::v4l2_fmtdesc>779     fn enum_fmt(
780         &mut self,
781         session: &Self::Session,
782         queue: QueueType,
783         index: u32,
784     ) -> IoctlResult<bindings::v4l2_fmtdesc> {
785         match queue {
786             QueueType::VideoOutputMplane | QueueType::VideoCaptureMplane => {
787                 self.backend.enum_formats(session, queue.direction(), index)
788             }
789             _ => None,
790         }
791         .ok_or(libc::EINVAL)
792     }
793 
enum_framesizes( &mut self, _session: &Self::Session, index: u32, pixel_format: u32, ) -> IoctlResult<bindings::v4l2_frmsizeenum>794     fn enum_framesizes(
795         &mut self,
796         _session: &Self::Session,
797         index: u32,
798         pixel_format: u32,
799     ) -> IoctlResult<bindings::v4l2_frmsizeenum> {
800         // We only support step-wise frame sizes.
801         if index != 0 {
802             return Err(libc::EINVAL);
803         }
804 
805         Ok(bindings::v4l2_frmsizeenum {
806             index: 0,
807             pixel_format,
808             type_: bindings::v4l2_frmsizetypes_V4L2_FRMSIZE_TYPE_STEPWISE,
809             __bindgen_anon_1: bindings::v4l2_frmsizeenum__bindgen_ty_1 {
810                 stepwise: self.backend.frame_sizes(pixel_format).ok_or(libc::EINVAL)?,
811             },
812             ..Default::default()
813         })
814     }
815 
g_fmt( &mut self, session: &Self::Session, queue: QueueType, ) -> IoctlResult<bindings::v4l2_format>816     fn g_fmt(
817         &mut self,
818         session: &Self::Session,
819         queue: QueueType,
820     ) -> IoctlResult<bindings::v4l2_format> {
821         if !matches!(
822             queue,
823             QueueType::VideoOutputMplane | QueueType::VideoCaptureMplane,
824         ) {
825             return Err(libc::EINVAL);
826         }
827 
828         let format = session.current_format(queue.direction());
829         let v4l2_format: &bindings::v4l2_format = format.as_ref();
830         Ok(*v4l2_format)
831     }
832 
try_fmt( &mut self, session: &Self::Session, queue: QueueType, format: bindings::v4l2_format, ) -> IoctlResult<bindings::v4l2_format>833     fn try_fmt(
834         &mut self,
835         session: &Self::Session,
836         queue: QueueType,
837         format: bindings::v4l2_format,
838     ) -> IoctlResult<bindings::v4l2_format> {
839         let format = self.try_format(session, queue, format)?;
840 
841         let v4l2_format: &bindings::v4l2_format = format.as_ref();
842         Ok(*v4l2_format)
843     }
844 
s_fmt( &mut self, session: &mut Self::Session, queue: QueueType, format: bindings::v4l2_format, ) -> IoctlResult<bindings::v4l2_format>845     fn s_fmt(
846         &mut self,
847         session: &mut Self::Session,
848         queue: QueueType,
849         format: bindings::v4l2_format,
850     ) -> IoctlResult<bindings::v4l2_format> {
851         let format = self.try_format(session, queue, format)?;
852 
853         self.backend
854             .apply_format(&mut session.backend_session, queue.direction(), &format);
855 
856         //  Setting the colorspace information on the `OUTPUT` queue sets it for both queues.
857         if queue.direction() == QueueDirection::Output {
858             session.colorspace.colorspace = format.colorspace();
859             session.colorspace.xfer_func = format.xfer_func();
860             session.colorspace.ycbcr_enc = format.ycbcr_enc();
861             session.colorspace.quantization = format.quantization();
862         }
863 
864         // If the crop rectangle is still settable, adjust it to the size of the new format.
865         if let CropRectangle::Settable(rect) = &mut session.crop_rectangle {
866             let (width, height) = format.size();
867             *rect = v4l2r::Rect::new(0, 0, width, height);
868         }
869 
870         let v4l2_format: &bindings::v4l2_format = format.as_ref();
871         Ok(*v4l2_format)
872     }
873 
reqbufs( &mut self, session: &mut Self::Session, queue: QueueType, memory: MemoryType, count: u32, ) -> IoctlResult<bindings::v4l2_requestbuffers>874     fn reqbufs(
875         &mut self,
876         session: &mut Self::Session,
877         queue: QueueType,
878         memory: MemoryType,
879         count: u32,
880     ) -> IoctlResult<bindings::v4l2_requestbuffers> {
881         if memory != MemoryType::Mmap {
882             return Err(libc::EINVAL);
883         }
884         // TODO: fail if streaming?
885 
886         let (buffers, count) = match queue {
887             QueueType::VideoOutputMplane => (&mut session.input_buffers, count),
888             QueueType::VideoCaptureMplane => (
889                 &mut session.output_buffers,
890                 // TODO: no no, we need to reallocate all the buffers if the queue parameters have
891                 // changed... especially if the new format won't fit into the old buffers!
892                 // count.max(session.backend_session.stream_params().min_output_buffers),
893                 count,
894             ),
895             _ => return Err(libc::EINVAL),
896         };
897 
898         if (count as usize) < buffers.len() {
899             for buffer in &buffers[count as usize..] {
900                 if let V4l2PlanesWithBacking::Mmap(planes) =
901                     buffer.v4l2_buffer.planes_with_backing_iter()
902                 {
903                     for plane in planes {
904                         self.host_mapper.unregister_buffer(plane.mem_offset());
905                     }
906                 }
907             }
908             buffers.truncate(count as usize);
909         } else {
910             let sizeimage = session
911                 .backend_session
912                 .current_format(queue.direction())
913                 .planes()
914                 .first()
915                 .ok_or(libc::EINVAL)?
916                 .sizeimage;
917             let new_buffers = (buffers.len()..count as usize)
918                 .map(|i| {
919                     let mmap_offset = self
920                         .host_mapper
921                         .register_buffer(None, sizeimage)
922                         .map_err(|_| libc::EINVAL)?;
923 
924                     VideoDecoderBuffer::new(
925                         queue,
926                         i as u32,
927                         // TODO: only single-planar formats supported.
928                         &[sizeimage as usize],
929                         mmap_offset,
930                     )
931                     .map_err(|e| {
932                         // TODO: no, we need to unregister all the buffers and restore the
933                         // previous state?
934                         self.host_mapper.unregister_buffer(mmap_offset);
935                         e
936                     })
937                 })
938                 .collect::<IoctlResult<Vec<_>>>()?;
939             buffers.extend(new_buffers);
940         }
941 
942         session
943             .backend_session
944             .buffers_allocated(queue.direction(), count);
945 
946         Ok(bindings::v4l2_requestbuffers {
947             count,
948             type_: queue as u32,
949             memory: memory as u32,
950             capabilities: (BufferCapabilities::SUPPORTS_MMAP
951                 | BufferCapabilities::SUPPORTS_ORPHANED_BUFS)
952                 .bits(),
953             flags: 0,
954             reserved: Default::default(),
955         })
956     }
957 
querybuf( &mut self, session: &Self::Session, queue: QueueType, index: u32, ) -> IoctlResult<V4l2Buffer>958     fn querybuf(
959         &mut self,
960         session: &Self::Session,
961         queue: QueueType,
962         index: u32,
963     ) -> IoctlResult<V4l2Buffer> {
964         let buffers = match queue {
965             QueueType::VideoOutputMplane => &session.input_buffers,
966             QueueType::VideoCaptureMplane => &session.output_buffers,
967             _ => return Err(libc::EINVAL),
968         };
969         let buffer = buffers.get(index as usize).ok_or(libc::EINVAL)?;
970 
971         Ok(buffer.v4l2_buffer.clone())
972     }
973 
subscribe_event( &mut self, session: &mut Self::Session, event: v4l2r::ioctl::EventType, _flags: v4l2r::ioctl::SubscribeEventFlags, ) -> IoctlResult<()>974     fn subscribe_event(
975         &mut self,
976         session: &mut Self::Session,
977         event: v4l2r::ioctl::EventType,
978         _flags: v4l2r::ioctl::SubscribeEventFlags,
979     ) -> IoctlResult<()> {
980         match event {
981             EventType::SourceChange(0) => {
982                 session.src_change_subscribed = true;
983                 Ok(())
984             }
985             EventType::Eos => {
986                 session.eos_subscribed = true;
987                 Ok(())
988             }
989             _ => Err(libc::EINVAL),
990         }
991     }
992 
993     // TODO: parse the event and use an enum value to signal ALL or single event?
unsubscribe_event( &mut self, session: &mut Self::Session, event: bindings::v4l2_event_subscription, ) -> IoctlResult<()>994     fn unsubscribe_event(
995         &mut self,
996         session: &mut Self::Session,
997         event: bindings::v4l2_event_subscription,
998     ) -> IoctlResult<()> {
999         let mut valid = false;
1000 
1001         if event.type_ == 0 || matches!(EventType::try_from(&event), Ok(EventType::SourceChange(0)))
1002         {
1003             session.src_change_subscribed = false;
1004             valid = true;
1005         }
1006         if event.type_ == 0 || matches!(EventType::try_from(&event), Ok(EventType::Eos)) {
1007             session.eos_subscribed = false;
1008             valid = true;
1009         }
1010 
1011         if valid {
1012             Ok(())
1013         } else {
1014             Err(libc::EINVAL)
1015         }
1016     }
1017 
streamon(&mut self, session: &mut Self::Session, queue: QueueType) -> IoctlResult<()>1018     fn streamon(&mut self, session: &mut Self::Session, queue: QueueType) -> IoctlResult<()> {
1019         let buffers = match queue {
1020             QueueType::VideoOutputMplane => &session.input_buffers,
1021             QueueType::VideoCaptureMplane => &session.output_buffers,
1022             _ => return Err(libc::EINVAL),
1023         };
1024 
1025         let already_running = matches!(session.state, VideoDecoderStreamingState::Running);
1026 
1027         // Cannot stream if no buffers allocated.
1028         if buffers.is_empty() {
1029             return Err(libc::EINVAL);
1030         }
1031 
1032         match queue.direction() {
1033             QueueDirection::Output => session.state.input_streamon(),
1034             QueueDirection::Capture => session.state.output_streamon(),
1035         }
1036 
1037         session
1038             .backend_session
1039             .streaming_state(queue.direction(), true);
1040 
1041         if !already_running && matches!(session.state, VideoDecoderStreamingState::Running) {
1042             // TODO: start queueing pending buffers?
1043         }
1044 
1045         session.try_send_pending_output_buffers();
1046 
1047         Ok(())
1048     }
1049 
streamoff(&mut self, session: &mut Self::Session, queue: QueueType) -> IoctlResult<()>1050     fn streamoff(&mut self, session: &mut Self::Session, queue: QueueType) -> IoctlResult<()> {
1051         let buffers = match queue.direction() {
1052             QueueDirection::Output => {
1053                 // TODO: something to do on the backend?
1054                 session.state.input_streamoff();
1055 
1056                 &mut session.input_buffers
1057             }
1058             QueueDirection::Capture => {
1059                 session.backend_session.clear_output_buffers()?;
1060                 session.state.output_streamoff();
1061                 session.pending_output_buffers.clear();
1062 
1063                 &mut session.output_buffers
1064             }
1065         };
1066 
1067         for buffer in buffers {
1068             buffer.v4l2_buffer.clear_flags(BufferFlags::QUEUED);
1069         }
1070 
1071         session
1072             .backend_session
1073             .streaming_state(queue.direction(), false);
1074 
1075         Ok(())
1076     }
1077 
g_selection( &mut self, session: &Self::Session, sel_type: SelectionType, sel_target: SelectionTarget, ) -> IoctlResult<bindings::v4l2_rect>1078     fn g_selection(
1079         &mut self,
1080         session: &Self::Session,
1081         sel_type: SelectionType,
1082         sel_target: SelectionTarget,
1083     ) -> IoctlResult<bindings::v4l2_rect> {
1084         match (sel_type, sel_target) {
1085             // Coded resolution of the stream.
1086             (SelectionType::Capture, SelectionTarget::CropBounds) => {
1087                 let coded_size = session.backend_session.stream_params().coded_size;
1088                 Ok(v4l2r::Rect::new(0, 0, coded_size.0, coded_size.1).into())
1089             }
1090             // Visible area of CAPTURE buffers.
1091             (
1092                 SelectionType::Capture,
1093                 SelectionTarget::Crop
1094                 | SelectionTarget::CropDefault
1095                 | SelectionTarget::ComposeDefault
1096                 | SelectionTarget::ComposeBounds
1097                 | SelectionTarget::Compose,
1098             ) => {
1099                 //Ok(session.backend_session.stream_params().visible_rect.into())
1100                 Ok((*session.crop_rectangle).into())
1101             }
1102             _ => Err(libc::EINVAL),
1103         }
1104     }
1105 
s_selection( &mut self, session: &mut Self::Session, sel_type: SelectionType, sel_target: SelectionTarget, mut sel_rect: bindings::v4l2_rect, _sel_flags: v4l2r::ioctl::SelectionFlags, ) -> IoctlResult<bindings::v4l2_rect>1106     fn s_selection(
1107         &mut self,
1108         session: &mut Self::Session,
1109         sel_type: SelectionType,
1110         sel_target: SelectionTarget,
1111         mut sel_rect: bindings::v4l2_rect,
1112         _sel_flags: v4l2r::ioctl::SelectionFlags,
1113     ) -> IoctlResult<bindings::v4l2_rect> {
1114         if !matches!(
1115             (sel_type, sel_target),
1116             (SelectionType::Capture, SelectionTarget::Compose)
1117         ) {
1118             return Err(libc::EINVAL);
1119         }
1120 
1121         // If the crop rectangle is still settable, allow its modification within the bounds of the
1122         // coded resolution.
1123         if let CropRectangle::Settable(rect) = &mut session.crop_rectangle {
1124             let coded_size = session
1125                 .backend_session
1126                 .current_format(QueueDirection::Capture)
1127                 .size();
1128             sel_rect.left = std::cmp::max(0, sel_rect.left);
1129             sel_rect.top = std::cmp::max(0, sel_rect.top);
1130             sel_rect.width = std::cmp::min(coded_size.0, sel_rect.width - sel_rect.left as u32);
1131             sel_rect.height = std::cmp::min(coded_size.0, sel_rect.height - sel_rect.top as u32);
1132 
1133             *rect = sel_rect.into();
1134         }
1135 
1136         self.g_selection(session, sel_type, sel_target)
1137     }
1138 
qbuf( &mut self, session: &mut Self::Session, buffer: V4l2Buffer, _guest_regions: Vec<Vec<SgEntry>>, ) -> IoctlResult<V4l2Buffer>1139     fn qbuf(
1140         &mut self,
1141         session: &mut Self::Session,
1142         buffer: V4l2Buffer,
1143         _guest_regions: Vec<Vec<SgEntry>>,
1144     ) -> IoctlResult<V4l2Buffer> {
1145         let buffers = match buffer.queue() {
1146             QueueType::VideoOutputMplane => &mut session.input_buffers,
1147             QueueType::VideoCaptureMplane => &mut session.output_buffers,
1148             _ => return Err(libc::EINVAL),
1149         };
1150         let host_buffer = buffers
1151             .get_mut(buffer.index() as usize)
1152             .ok_or(libc::EINVAL)?;
1153 
1154         // Check that the buffer's memory type corresponds to the one requested during allocation.
1155         if buffer.memory() != host_buffer.v4l2_buffer.memory() {
1156             return Err(libc::EINVAL);
1157         }
1158 
1159         match buffer.queue().direction() {
1160             QueueDirection::Output => {
1161                 // Update buffer state
1162                 let v4l2_buffer = &mut host_buffer.v4l2_buffer;
1163                 v4l2_buffer.set_field(BufferField::None);
1164                 v4l2_buffer.set_timestamp(buffer.timestamp());
1165                 let first_plane = buffer.get_first_plane();
1166                 *v4l2_buffer.get_first_plane_mut().bytesused = *first_plane.bytesused;
1167                 let host_first_plane = v4l2_buffer.get_first_plane_mut();
1168                 *host_first_plane.length = *first_plane.length;
1169                 *host_first_plane.bytesused = *first_plane.bytesused;
1170                 if let Some(data_offset) = host_first_plane.data_offset {
1171                     *data_offset = first_plane.data_offset.copied().unwrap_or(0);
1172                 }
1173 
1174                 let bytes_used = {
1175                     let first_plane = host_buffer.v4l2_buffer.get_first_plane();
1176                     // V4L2's spec mentions that if `bytes_used == 0` then the whole buffer is considered to be
1177                     // used.
1178                     if *first_plane.bytesused == 0 {
1179                         *first_plane.length
1180                     } else {
1181                         *first_plane.bytesused
1182                     }
1183                 };
1184 
1185                 session.backend_session.decode(
1186                     &host_buffer.backing,
1187                     host_buffer.index(),
1188                     host_buffer.timestamp(),
1189                     bytes_used,
1190                 )?;
1191 
1192                 host_buffer.v4l2_buffer.add_flags(BufferFlags::QUEUED);
1193 
1194                 Ok(host_buffer.v4l2_buffer.clone())
1195             }
1196             QueueDirection::Capture => {
1197                 // Update buffer state
1198                 let v4l2_buffer = &mut host_buffer.v4l2_buffer;
1199                 v4l2_buffer.add_flags(BufferFlags::QUEUED);
1200                 v4l2_buffer.clear_flags(BufferFlags::LAST);
1201                 let host_first_plane = v4l2_buffer.get_first_plane_mut();
1202                 let first_plane = buffer.get_first_plane();
1203                 *host_first_plane.length = *first_plane.length;
1204                 *host_first_plane.bytesused = *first_plane.bytesused;
1205                 if let Some(data_offset) = host_first_plane.data_offset {
1206                     *data_offset = first_plane.data_offset.copied().unwrap_or(0);
1207                 }
1208 
1209                 let res = v4l2_buffer.clone();
1210 
1211                 session.pending_output_buffers.push(buffer.index());
1212                 session.try_send_pending_output_buffers();
1213 
1214                 Ok(res)
1215             }
1216         }
1217     }
1218 
try_decoder_cmd( &mut self, session: &Self::Session, cmd: bindings::v4l2_decoder_cmd, ) -> IoctlResult<bindings::v4l2_decoder_cmd>1219     fn try_decoder_cmd(
1220         &mut self,
1221         session: &Self::Session,
1222         cmd: bindings::v4l2_decoder_cmd,
1223     ) -> IoctlResult<bindings::v4l2_decoder_cmd> {
1224         let cmd = DecoderCmd::try_from(cmd).map_err(|_| libc::EINVAL)?;
1225         session.try_decoder_cmd(cmd).map(Into::into)
1226     }
1227 
decoder_cmd( &mut self, session: &mut Self::Session, cmd: bindings::v4l2_decoder_cmd, ) -> IoctlResult<bindings::v4l2_decoder_cmd>1228     fn decoder_cmd(
1229         &mut self,
1230         session: &mut Self::Session,
1231         cmd: bindings::v4l2_decoder_cmd,
1232     ) -> IoctlResult<bindings::v4l2_decoder_cmd> {
1233         let cmd = DecoderCmd::try_from(cmd).map_err(|_| libc::EINVAL)?;
1234         let cmd = session.try_decoder_cmd(cmd)?;
1235 
1236         // The command is valid, apply it.
1237         match cmd {
1238             DecoderCmd::Stop { .. } => {
1239                 // Switch to stopped state if we aren't already there.
1240                 if !matches!(session.state, VideoDecoderStreamingState::Stopped { .. }) {
1241                     session.state = VideoDecoderStreamingState::Stopped {
1242                         input_streaming: true,
1243                         output_streaming: true,
1244                     };
1245 
1246                     // Start the `DRAIN` sequence.
1247                     session.backend_session.drain()?;
1248                 }
1249             }
1250             DecoderCmd::Start { .. } => {
1251                 // Restart the decoder if we were in the stopped state with both queues streaming.
1252                 if let VideoDecoderStreamingState::Stopped {
1253                     input_streaming,
1254                     output_streaming,
1255                 } = &session.state
1256                 {
1257                     if *input_streaming && *output_streaming {
1258                         session.state = VideoDecoderStreamingState::Running;
1259                         session
1260                             .backend_session
1261                             .streaming_state(QueueDirection::Capture, true);
1262                     }
1263                     session.try_send_pending_output_buffers();
1264                 }
1265             }
1266             DecoderCmd::Pause { .. } => {
1267                 if matches!(session.state, VideoDecoderStreamingState::Running) {
1268                     session.state = VideoDecoderStreamingState::Paused;
1269                 }
1270             }
1271             DecoderCmd::Resume => {
1272                 if matches!(session.state, VideoDecoderStreamingState::Paused) {
1273                     session.state = VideoDecoderStreamingState::Running;
1274                 }
1275             }
1276         }
1277 
1278         Ok(cmd.into())
1279     }
1280 }
1281