xref: /aosp_15_r20/external/crosvm/devices/src/virtio/video/encoder/mod.rs (revision bb4ee6a4ae7042d18b07a98463b9c8b875e44b39)
1 // Copyright 2020 The ChromiumOS Authors
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 //! Implementation of the the `Encoder` struct, which is responsible for translation between the
6 //! virtio protocols and LibVDA APIs.
7 
8 pub mod backend;
9 
10 use std::collections::BTreeMap;
11 use std::collections::BTreeSet;
12 
13 use backend::*;
14 use base::debug;
15 use base::error;
16 use base::info;
17 use base::warn;
18 use base::Tube;
19 use base::WaitContext;
20 use vm_memory::GuestMemory;
21 
22 use crate::virtio::video::async_cmd_desc_map::AsyncCmdDescMap;
23 use crate::virtio::video::command::QueueType;
24 use crate::virtio::video::command::VideoCmd;
25 use crate::virtio::video::control::*;
26 use crate::virtio::video::device::AsyncCmdResponse;
27 use crate::virtio::video::device::AsyncCmdTag;
28 use crate::virtio::video::device::Device;
29 use crate::virtio::video::device::Token;
30 use crate::virtio::video::device::VideoCmdResponseType;
31 use crate::virtio::video::device::VideoEvtResponseType;
32 use crate::virtio::video::error::VideoError;
33 use crate::virtio::video::error::VideoResult;
34 use crate::virtio::video::event::EvtType;
35 use crate::virtio::video::event::VideoEvt;
36 use crate::virtio::video::format::find_closest_resolution;
37 use crate::virtio::video::format::Bitrate;
38 use crate::virtio::video::format::BitrateMode;
39 use crate::virtio::video::format::Format;
40 use crate::virtio::video::format::FormatDesc;
41 use crate::virtio::video::format::Level;
42 use crate::virtio::video::format::PlaneFormat;
43 use crate::virtio::video::format::Profile;
44 use crate::virtio::video::params::Params;
45 use crate::virtio::video::protocol;
46 use crate::virtio::video::resource::*;
47 use crate::virtio::video::response::CmdResponse;
48 use crate::virtio::video::EosBufferManager;
49 
50 pub type InputBufferId = u32;
51 pub type OutputBufferId = u32;
52 
53 #[derive(Debug)]
54 struct QueuedInputResourceParams {
55     timestamp: u64,
56     in_queue: bool,
57 }
58 
59 struct InputResource {
60     resource: GuestResource,
61     queue_params: Option<QueuedInputResourceParams>,
62 }
63 
64 #[derive(Debug)]
65 struct QueuedOutputResourceParams {
66     in_queue: bool,
67 }
68 
69 struct OutputResource {
70     resource: GuestResource,
71     offset: u32,
72     queue_params: Option<QueuedOutputResourceParams>,
73 }
74 
75 #[derive(Debug, PartialEq, Eq, Hash, Ord, PartialOrd)]
76 enum PendingCommand {
77     // TODO(b/193202566): remove this is_ext parameter throughout the code along with
78     // support for the old GET_PARAMS and SET_PARAMS commands.
79     GetSrcParams { is_ext: bool },
80     GetDstParams { is_ext: bool },
81     Drain,
82     SrcQueueClear,
83     DstQueueClear,
84 }
85 
86 struct Stream<T: EncoderSession> {
87     id: u32,
88     src_params: Params,
89     dst_params: Params,
90     dst_bitrate: Bitrate,
91     dst_profile: Profile,
92     dst_h264_level: Option<Level>,
93     force_keyframe: bool,
94 
95     encoder_session: Option<T>,
96     received_input_buffers_event: bool,
97 
98     src_resources: BTreeMap<u32, InputResource>,
99     encoder_input_buffer_ids: BTreeMap<InputBufferId, u32>,
100 
101     dst_resources: BTreeMap<u32, OutputResource>,
102     encoder_output_buffer_ids: BTreeMap<OutputBufferId, u32>,
103 
104     pending_commands: BTreeSet<PendingCommand>,
105     eos_manager: EosBufferManager,
106 }
107 
108 impl<T: EncoderSession> Stream<T> {
new<E: Encoder<Session = T>>( id: u32, src_resource_type: ResourceType, dst_resource_type: ResourceType, desired_format: Format, encoder: &EncoderDevice<E>, ) -> VideoResult<Self>109     fn new<E: Encoder<Session = T>>(
110         id: u32,
111         src_resource_type: ResourceType,
112         dst_resource_type: ResourceType,
113         desired_format: Format,
114         encoder: &EncoderDevice<E>,
115     ) -> VideoResult<Self> {
116         const MIN_BUFFERS: u32 = 1;
117         const MAX_BUFFERS: u32 = 342;
118         const DEFAULT_WIDTH: u32 = 640;
119         const DEFAULT_HEIGHT: u32 = 480;
120         const DEFAULT_BITRATE_TARGET: u32 = 6000;
121         const DEFAULT_BITRATE_PEAK: u32 = DEFAULT_BITRATE_TARGET * 2;
122         const DEFAULT_BITRATE: Bitrate = Bitrate::Vbr {
123             target: DEFAULT_BITRATE_TARGET,
124             peak: DEFAULT_BITRATE_PEAK,
125         };
126         const DEFAULT_BUFFER_SIZE: u32 = 2097152; // 2MB; chosen empirically for 1080p video
127         const DEFAULT_FPS: u32 = 30;
128 
129         let mut src_params = Params {
130             frame_rate: DEFAULT_FPS,
131             min_buffers: MIN_BUFFERS,
132             max_buffers: MAX_BUFFERS,
133             resource_type: src_resource_type,
134             ..Default::default()
135         };
136 
137         let cros_capabilities = &encoder.cros_capabilities;
138 
139         cros_capabilities
140             .populate_src_params(
141                 &mut src_params,
142                 Format::NV12,
143                 DEFAULT_WIDTH,
144                 DEFAULT_HEIGHT,
145                 0,
146             )
147             .map_err(|_| VideoError::InvalidArgument)?;
148 
149         let mut dst_params = Params {
150             resource_type: dst_resource_type,
151             frame_rate: DEFAULT_FPS,
152             frame_width: DEFAULT_WIDTH,
153             frame_height: DEFAULT_HEIGHT,
154             ..Default::default()
155         };
156 
157         // In order to support requesting encoder params change, we must know the default frame
158         // rate, because VEA's request_encoding_params_change requires both framerate and
159         // bitrate to be specified.
160         cros_capabilities
161             .populate_dst_params(&mut dst_params, desired_format, DEFAULT_BUFFER_SIZE)
162             .map_err(|_| VideoError::InvalidArgument)?;
163         // `format` is an Option since for the decoder, it is not populated until decoding has
164         // started. for encoder, format should always be populated.
165         let dest_format = dst_params.format.ok_or(VideoError::InvalidArgument)?;
166 
167         let dst_profile = cros_capabilities
168             .get_default_profile(&dest_format)
169             .ok_or(VideoError::InvalidArgument)?;
170 
171         let dst_h264_level = if dest_format == Format::H264 {
172             Some(Level::H264_1_0)
173         } else {
174             None
175         };
176 
177         Ok(Self {
178             id,
179             src_params,
180             dst_params,
181             dst_bitrate: DEFAULT_BITRATE,
182             dst_profile,
183             dst_h264_level,
184             force_keyframe: false,
185             encoder_session: None,
186             received_input_buffers_event: false,
187             src_resources: Default::default(),
188             encoder_input_buffer_ids: Default::default(),
189             dst_resources: Default::default(),
190             encoder_output_buffer_ids: Default::default(),
191             pending_commands: Default::default(),
192             eos_manager: EosBufferManager::new(id),
193         })
194     }
195 
has_encode_session(&self) -> bool196     fn has_encode_session(&self) -> bool {
197         self.encoder_session.is_some()
198     }
199 
set_encode_session<U: Encoder<Session = T>>( &mut self, encoder: &mut U, wait_ctx: &WaitContext<Token>, ) -> VideoResult<()>200     fn set_encode_session<U: Encoder<Session = T>>(
201         &mut self,
202         encoder: &mut U,
203         wait_ctx: &WaitContext<Token>,
204     ) -> VideoResult<()> {
205         if self.encoder_session.is_some() {
206             error!(
207                 "stream {}: tried to add encode session when one already exists.",
208                 self.id
209             );
210             return Err(VideoError::InvalidOperation);
211         }
212 
213         let new_session = encoder
214             .start_session(SessionConfig {
215                 src_params: self.src_params.clone(),
216                 dst_params: self.dst_params.clone(),
217                 dst_profile: self.dst_profile,
218                 dst_bitrate: self.dst_bitrate,
219                 dst_h264_level: self.dst_h264_level,
220                 frame_rate: self.dst_params.frame_rate,
221             })
222             .map_err(|_| VideoError::InvalidOperation)?;
223 
224         let event_pipe = new_session.event_pipe();
225 
226         wait_ctx
227             .add(event_pipe, Token::Event { id: self.id })
228             .map_err(|e| {
229                 error!(
230                     "stream {}: failed to add FD to poll context: {}",
231                     self.id, e
232                 );
233                 VideoError::InvalidOperation
234             })?;
235         self.encoder_session.replace(new_session);
236         self.received_input_buffers_event = false;
237         Ok(())
238     }
239 
clear_encode_session(&mut self, wait_ctx: &WaitContext<Token>) -> VideoResult<()>240     fn clear_encode_session(&mut self, wait_ctx: &WaitContext<Token>) -> VideoResult<()> {
241         if let Some(session) = self.encoder_session.take() {
242             let event_pipe = session.event_pipe();
243             wait_ctx.delete(event_pipe).map_err(|e| {
244                 error!(
245                     "stream: {}: failed to remove fd from poll context: {}",
246                     self.id, e
247                 );
248                 VideoError::InvalidOperation
249             })?;
250         }
251         Ok(())
252     }
253 
require_input_buffers( &mut self, input_count: u32, input_frame_width: u32, input_frame_height: u32, output_buffer_size: u32, ) -> Option<Vec<VideoEvtResponseType>>254     fn require_input_buffers(
255         &mut self,
256         input_count: u32,
257         input_frame_width: u32,
258         input_frame_height: u32,
259         output_buffer_size: u32,
260     ) -> Option<Vec<VideoEvtResponseType>> {
261         // TODO(alexlau): Does this always arrive after start_session,
262         // but before the first encode call?
263         // TODO(alexlau): set plane info from input_frame_width and input_frame_height
264         self.src_params.min_buffers = input_count;
265         self.src_params.max_buffers = 32;
266         self.src_params.frame_width = input_frame_width;
267         self.src_params.frame_height = input_frame_height;
268         self.dst_params.plane_formats[0].plane_size = output_buffer_size;
269         self.received_input_buffers_event = true;
270 
271         let mut responses = vec![];
272 
273         // Respond to any GetParams commands that were waiting.
274         let pending_get_src_params = if self
275             .pending_commands
276             .remove(&PendingCommand::GetSrcParams { is_ext: false })
277         {
278             Some(false)
279         } else if self
280             .pending_commands
281             .remove(&PendingCommand::GetSrcParams { is_ext: true })
282         {
283             Some(true)
284         } else {
285             None
286         };
287         if let Some(is_ext) = pending_get_src_params {
288             responses.push(VideoEvtResponseType::AsyncCmd(
289                 AsyncCmdResponse::from_response(
290                     AsyncCmdTag::GetParams {
291                         stream_id: self.id,
292                         queue_type: QueueType::Input,
293                     },
294                     CmdResponse::GetParams {
295                         queue_type: QueueType::Input,
296                         params: self.src_params.clone(),
297                         is_ext,
298                     },
299                 ),
300             ));
301         }
302         let pending_get_dst_params = if self
303             .pending_commands
304             .remove(&PendingCommand::GetDstParams { is_ext: false })
305         {
306             Some(false)
307         } else if self
308             .pending_commands
309             .remove(&PendingCommand::GetDstParams { is_ext: true })
310         {
311             Some(true)
312         } else {
313             None
314         };
315         if let Some(is_ext) = pending_get_dst_params {
316             responses.push(VideoEvtResponseType::AsyncCmd(
317                 AsyncCmdResponse::from_response(
318                     AsyncCmdTag::GetParams {
319                         stream_id: self.id,
320                         queue_type: QueueType::Output,
321                     },
322                     CmdResponse::GetParams {
323                         queue_type: QueueType::Output,
324                         params: self.dst_params.clone(),
325                         is_ext,
326                     },
327                 ),
328             ));
329         }
330 
331         if !responses.is_empty() {
332             Some(responses)
333         } else {
334             None
335         }
336     }
337 
processed_input_buffer( &mut self, input_buffer_id: InputBufferId, ) -> Option<Vec<VideoEvtResponseType>>338     fn processed_input_buffer(
339         &mut self,
340         input_buffer_id: InputBufferId,
341     ) -> Option<Vec<VideoEvtResponseType>> {
342         let resource_id = *match self.encoder_input_buffer_ids.get(&input_buffer_id) {
343             Some(id) => id,
344             None => {
345                 warn!("Received processed input buffer event for input buffer id {}, but missing resource, ResourceDestroyAll?", input_buffer_id);
346                 return None;
347             }
348         };
349 
350         let resource = match self.src_resources.get_mut(&resource_id) {
351             Some(r) => r,
352             None => {
353                 error!(
354                     "Received processed input buffer event but missing resource with id {}",
355                     resource_id
356                 );
357                 return None;
358             }
359         };
360 
361         let queue_params = match resource.queue_params.take() {
362             Some(p) => p,
363             None => {
364                 error!(
365                     "Received processed input buffer event but resource with id {} was not queued.",
366                     resource_id
367                 );
368                 return None;
369             }
370         };
371 
372         if !queue_params.in_queue {
373             // A QueueClear command occurred after this buffer was queued.
374             return None;
375         }
376 
377         let tag = AsyncCmdTag::Queue {
378             stream_id: self.id,
379             queue_type: QueueType::Input,
380             resource_id,
381         };
382 
383         let resp = CmdResponse::ResourceQueue {
384             timestamp: queue_params.timestamp,
385             flags: 0,
386             size: 0,
387         };
388 
389         Some(vec![VideoEvtResponseType::AsyncCmd(
390             AsyncCmdResponse::from_response(tag, resp),
391         )])
392     }
393 
processed_output_buffer( &mut self, output_buffer_id: OutputBufferId, bytesused: u32, keyframe: bool, timestamp: u64, ) -> Option<Vec<VideoEvtResponseType>>394     fn processed_output_buffer(
395         &mut self,
396         output_buffer_id: OutputBufferId,
397         bytesused: u32,
398         keyframe: bool,
399         timestamp: u64,
400     ) -> Option<Vec<VideoEvtResponseType>> {
401         let resource_id = *match self.encoder_output_buffer_ids.get(&output_buffer_id) {
402             Some(id) => id,
403             None => {
404                 warn!("Received processed output buffer event for output buffer id {}, but missing resource, ResourceDestroyAll?", output_buffer_id);
405                 return None;
406             }
407         };
408 
409         let resource = match self.dst_resources.get_mut(&resource_id) {
410             Some(r) => r,
411             None => {
412                 error!(
413                     "Received processed output buffer event but missing resource with id {}",
414                     resource_id
415                 );
416                 return None;
417             }
418         };
419 
420         let queue_params = match resource.queue_params.take() {
421             Some(p) => p,
422             None => {
423                 error!("Received processed output buffer event but resource with id {} was not queued.", resource_id);
424                 return None;
425             }
426         };
427 
428         if !queue_params.in_queue {
429             // A QueueClear command occurred after this buffer was queued.
430             return None;
431         }
432 
433         let tag = AsyncCmdTag::Queue {
434             stream_id: self.id,
435             queue_type: QueueType::Output,
436             resource_id,
437         };
438 
439         let resp = CmdResponse::ResourceQueue {
440             timestamp,
441             // At the moment, a buffer is saved in `eos_notification_buffer`, and
442             // the EOS flag is populated and returned after a flush() command.
443             // TODO(b/149725148): Populate flags once libvda supports it.
444             flags: if keyframe {
445                 protocol::VIRTIO_VIDEO_BUFFER_FLAG_IFRAME
446             } else {
447                 0
448             },
449             size: bytesused,
450         };
451 
452         Some(vec![VideoEvtResponseType::AsyncCmd(
453             AsyncCmdResponse::from_response(tag, resp),
454         )])
455     }
456 
flush_response(&mut self, flush_done: bool) -> Option<Vec<VideoEvtResponseType>>457     fn flush_response(&mut self, flush_done: bool) -> Option<Vec<VideoEvtResponseType>> {
458         let command_response = if flush_done {
459             CmdResponse::NoData
460         } else {
461             error!("Flush could not be completed for stream {}", self.id);
462             VideoError::InvalidOperation.into()
463         };
464 
465         let mut async_responses = vec![];
466 
467         // First gather the responses for all completed commands.
468         if self.pending_commands.remove(&PendingCommand::Drain) {
469             async_responses.push(VideoEvtResponseType::AsyncCmd(
470                 AsyncCmdResponse::from_response(
471                     AsyncCmdTag::Drain { stream_id: self.id },
472                     command_response.clone(),
473                 ),
474             ));
475         }
476 
477         if self.pending_commands.remove(&PendingCommand::SrcQueueClear) {
478             async_responses.push(VideoEvtResponseType::AsyncCmd(
479                 AsyncCmdResponse::from_response(
480                     AsyncCmdTag::Clear {
481                         stream_id: self.id,
482                         queue_type: QueueType::Input,
483                     },
484                     command_response.clone(),
485                 ),
486             ));
487         }
488 
489         if self.pending_commands.remove(&PendingCommand::DstQueueClear) {
490             async_responses.push(VideoEvtResponseType::AsyncCmd(
491                 AsyncCmdResponse::from_response(
492                     AsyncCmdTag::Clear {
493                         stream_id: self.id,
494                         queue_type: QueueType::Output,
495                     },
496                     command_response,
497                 ),
498             ));
499         }
500 
501         // Then add the EOS buffer to the responses if it is available.
502         self.eos_manager.try_complete_eos(async_responses)
503     }
504 
505     #[allow(clippy::unnecessary_wraps)]
notify_error(&self, error: VideoError) -> Option<Vec<VideoEvtResponseType>>506     fn notify_error(&self, error: VideoError) -> Option<Vec<VideoEvtResponseType>> {
507         error!(
508             "Received encoder error event for stream {}: {}",
509             self.id, error
510         );
511         Some(vec![VideoEvtResponseType::Event(VideoEvt {
512             typ: EvtType::Error,
513             stream_id: self.id,
514         })])
515     }
516 }
517 
518 pub struct EncoderDevice<T: Encoder> {
519     cros_capabilities: EncoderCapabilities,
520     encoder: T,
521     streams: BTreeMap<u32, Stream<T::Session>>,
522     resource_bridge: Tube,
523     mem: GuestMemory,
524 }
525 
526 impl<T: Encoder> EncoderDevice<T> {
527     /// Build a new encoder using the provided `backend`.
new(backend: T, resource_bridge: Tube, mem: GuestMemory) -> VideoResult<Self>528     pub fn new(backend: T, resource_bridge: Tube, mem: GuestMemory) -> VideoResult<Self> {
529         Ok(Self {
530             cros_capabilities: backend.query_capabilities()?,
531             encoder: backend,
532             streams: Default::default(),
533             resource_bridge,
534             mem,
535         })
536     }
537 
538     #[allow(clippy::unnecessary_wraps)]
query_capabilities(&self, queue_type: QueueType) -> VideoResult<VideoCmdResponseType>539     fn query_capabilities(&self, queue_type: QueueType) -> VideoResult<VideoCmdResponseType> {
540         let descs = match queue_type {
541             QueueType::Input => self.cros_capabilities.input_format_descs.clone(),
542             QueueType::Output => self.cros_capabilities.output_format_descs.clone(),
543         };
544         Ok(VideoCmdResponseType::Sync(CmdResponse::QueryCapability(
545             descs,
546         )))
547     }
548 
stream_create( &mut self, stream_id: u32, desired_format: Format, src_resource_type: ResourceType, dst_resource_type: ResourceType, ) -> VideoResult<VideoCmdResponseType>549     fn stream_create(
550         &mut self,
551         stream_id: u32,
552         desired_format: Format,
553         src_resource_type: ResourceType,
554         dst_resource_type: ResourceType,
555     ) -> VideoResult<VideoCmdResponseType> {
556         if self.streams.contains_key(&stream_id) {
557             return Err(VideoError::InvalidStreamId(stream_id));
558         }
559         let new_stream = Stream::new(
560             stream_id,
561             src_resource_type,
562             dst_resource_type,
563             desired_format,
564             self,
565         )?;
566 
567         self.streams.insert(stream_id, new_stream);
568         Ok(VideoCmdResponseType::Sync(CmdResponse::NoData))
569     }
570 
stream_destroy(&mut self, stream_id: u32) -> VideoResult<VideoCmdResponseType>571     fn stream_destroy(&mut self, stream_id: u32) -> VideoResult<VideoCmdResponseType> {
572         let mut stream = self
573             .streams
574             .remove(&stream_id)
575             .ok_or(VideoError::InvalidStreamId(stream_id))?;
576         // TODO(alexlau): Handle resources that have been queued.
577         if let Some(session) = stream.encoder_session.take() {
578             if let Err(e) = self.encoder.stop_session(session) {
579                 error!("Failed to stop encode session {}: {}", stream_id, e);
580             }
581         }
582         Ok(VideoCmdResponseType::Sync(CmdResponse::NoData))
583     }
584 
stream_drain(&mut self, stream_id: u32) -> VideoResult<VideoCmdResponseType>585     fn stream_drain(&mut self, stream_id: u32) -> VideoResult<VideoCmdResponseType> {
586         let stream = self
587             .streams
588             .get_mut(&stream_id)
589             .ok_or(VideoError::InvalidStreamId(stream_id))?;
590         match stream.encoder_session {
591             Some(ref mut session) => {
592                 if stream.pending_commands.contains(&PendingCommand::Drain) {
593                     error!("A pending Drain command already exists.");
594                     return Err(VideoError::InvalidOperation);
595                 }
596                 stream.pending_commands.insert(PendingCommand::Drain);
597 
598                 if !stream
599                     .pending_commands
600                     .contains(&PendingCommand::SrcQueueClear)
601                     && !stream
602                         .pending_commands
603                         .contains(&PendingCommand::DstQueueClear)
604                 {
605                     // If a source or dest QueueClear is underway, a flush has
606                     // already been sent.
607                     if let Err(e) = session.flush() {
608                         error!("Flush failed for stream id {}: {}", stream_id, e);
609                     }
610                 }
611                 Ok(VideoCmdResponseType::Async(AsyncCmdTag::Drain {
612                     stream_id,
613                 }))
614             }
615             None => {
616                 // Return an OK response since nothing has been queued yet.
617                 Ok(VideoCmdResponseType::Sync(CmdResponse::NoData))
618             }
619         }
620     }
621 
resource_create( &mut self, wait_ctx: &WaitContext<Token>, stream_id: u32, queue_type: QueueType, resource_id: u32, plane_offsets: Vec<u32>, plane_entries: Vec<Vec<UnresolvedResourceEntry>>, ) -> VideoResult<VideoCmdResponseType>622     fn resource_create(
623         &mut self,
624         wait_ctx: &WaitContext<Token>,
625         stream_id: u32,
626         queue_type: QueueType,
627         resource_id: u32,
628         plane_offsets: Vec<u32>,
629         plane_entries: Vec<Vec<UnresolvedResourceEntry>>,
630     ) -> VideoResult<VideoCmdResponseType> {
631         let stream = self
632             .streams
633             .get_mut(&stream_id)
634             .ok_or(VideoError::InvalidStreamId(stream_id))?;
635 
636         if !stream.has_encode_session() {
637             // No encode session would have been created upon the first
638             // QBUF if there was no previous S_FMT call.
639             stream.set_encode_session(&mut self.encoder, wait_ctx)?;
640         }
641 
642         let num_planes = plane_offsets.len();
643 
644         // We only support single-buffer resources for now.
645         let entries = if plane_entries.len() != 1 {
646             return Err(VideoError::InvalidArgument);
647         } else {
648             // unwrap() is safe because we just tested that `plane_entries` had exactly one element.
649             plane_entries.first().unwrap()
650         };
651 
652         match queue_type {
653             QueueType::Input => {
654                 // We currently only support single-buffer formats, but some clients may mistake
655                 // color planes with memory planes and submit several planes to us. This doesn't
656                 // matter as we will only consider the first one.
657                 if num_planes < 1 {
658                     return Err(VideoError::InvalidParameter);
659                 }
660 
661                 if stream.src_resources.contains_key(&resource_id) {
662                     debug!("Replacing source resource with id {}", resource_id);
663                 }
664 
665                 let resource = match stream.src_params.resource_type {
666                     ResourceType::VirtioObject => {
667                         // Virtio object resources only have one entry.
668                         if entries.len() != 1 {
669                             return Err(VideoError::InvalidArgument);
670                         }
671                         GuestResource::from_virtio_object_entry(
672                             // SAFETY:
673                             // Safe because we confirmed the correct type for the resource.
674                             // unwrap() is also safe here because we just tested above that
675                             // `entries` had exactly one element.
676                             unsafe { entries.first().unwrap().object },
677                             &self.resource_bridge,
678                             &stream.src_params,
679                         )
680                         .map_err(|_| VideoError::InvalidArgument)?
681                     }
682                     ResourceType::GuestPages => GuestResource::from_virtio_guest_mem_entry(
683                         // SAFETY:
684                         // Safe because we confirmed the correct type for the resource.
685                         unsafe {
686                             std::slice::from_raw_parts(
687                                 entries.as_ptr() as *const protocol::virtio_video_mem_entry,
688                                 entries.len(),
689                             )
690                         },
691                         &self.mem,
692                         &stream.src_params,
693                     )
694                     .map_err(|_| VideoError::InvalidArgument)?,
695                 };
696 
697                 stream.src_resources.insert(
698                     resource_id,
699                     InputResource {
700                         resource,
701                         queue_params: None,
702                     },
703                 );
704             }
705             QueueType::Output => {
706                 // Bitstream buffers always have only one plane.
707                 if num_planes != 1 {
708                     return Err(VideoError::InvalidParameter);
709                 }
710 
711                 if stream.dst_resources.contains_key(&resource_id) {
712                     debug!("Replacing dest resource with id {}", resource_id);
713                 }
714 
715                 let resource = match stream.dst_params.resource_type {
716                     ResourceType::VirtioObject => {
717                         // Virtio object resources only have one entry.
718                         if entries.len() != 1 {
719                             return Err(VideoError::InvalidArgument);
720                         }
721                         GuestResource::from_virtio_object_entry(
722                             // SAFETY:
723                             // Safe because we confirmed the correct type for the resource.
724                             // unwrap() is also safe here because we just tested above that
725                             // `entries` had exactly one element.
726                             unsafe { entries.first().unwrap().object },
727                             &self.resource_bridge,
728                             &stream.dst_params,
729                         )
730                         .map_err(|_| VideoError::InvalidArgument)?
731                     }
732                     ResourceType::GuestPages => GuestResource::from_virtio_guest_mem_entry(
733                         // SAFETY:
734                         // Safe because we confirmed the correct type for the resource.
735                         unsafe {
736                             std::slice::from_raw_parts(
737                                 entries.as_ptr() as *const protocol::virtio_video_mem_entry,
738                                 entries.len(),
739                             )
740                         },
741                         &self.mem,
742                         &stream.dst_params,
743                     )
744                     .map_err(|_| VideoError::InvalidArgument)?,
745                 };
746 
747                 let offset = plane_offsets[0];
748                 stream.dst_resources.insert(
749                     resource_id,
750                     OutputResource {
751                         resource,
752                         offset,
753                         queue_params: None,
754                     },
755                 );
756             }
757         }
758 
759         Ok(VideoCmdResponseType::Sync(CmdResponse::NoData))
760     }
761 
resource_queue( &mut self, stream_id: u32, queue_type: QueueType, resource_id: u32, timestamp: u64, data_sizes: Vec<u32>, ) -> VideoResult<VideoCmdResponseType>762     fn resource_queue(
763         &mut self,
764         stream_id: u32,
765         queue_type: QueueType,
766         resource_id: u32,
767         timestamp: u64,
768         data_sizes: Vec<u32>,
769     ) -> VideoResult<VideoCmdResponseType> {
770         let stream = self
771             .streams
772             .get_mut(&stream_id)
773             .ok_or(VideoError::InvalidStreamId(stream_id))?;
774 
775         let encoder_session = match stream.encoder_session {
776             Some(ref mut e) => e,
777             None => {
778                 // The encoder session is created on the first ResourceCreate,
779                 // so it should exist here.
780                 error!("Encoder session did not exist at resource_queue.");
781                 return Err(VideoError::InvalidOperation);
782             }
783         };
784 
785         match queue_type {
786             QueueType::Input => {
787                 // We currently only support single-buffer formats, but some clients may mistake
788                 // color planes with memory planes and submit several planes to us. This doesn't
789                 // matter as we will only consider the first one.
790                 if data_sizes.is_empty() {
791                     return Err(VideoError::InvalidParameter);
792                 }
793 
794                 let src_resource = stream.src_resources.get_mut(&resource_id).ok_or(
795                     VideoError::InvalidResourceId {
796                         stream_id,
797                         resource_id,
798                     },
799                 )?;
800 
801                 let force_keyframe = std::mem::replace(&mut stream.force_keyframe, false);
802 
803                 match encoder_session.encode(
804                     src_resource
805                         .resource
806                         .try_clone()
807                         .map_err(|_| VideoError::InvalidArgument)?,
808                     timestamp,
809                     force_keyframe,
810                 ) {
811                     Ok(input_buffer_id) => {
812                         if let Some(last_resource_id) = stream
813                             .encoder_input_buffer_ids
814                             .insert(input_buffer_id, resource_id)
815                         {
816                             error!(
817                                 "encoder input id {} was already mapped to resource id {}",
818                                 input_buffer_id, last_resource_id
819                             );
820                             return Err(VideoError::InvalidOperation);
821                         }
822                         let queue_params = QueuedInputResourceParams {
823                             timestamp,
824                             in_queue: true,
825                         };
826                         if let Some(last_queue_params) =
827                             src_resource.queue_params.replace(queue_params)
828                         {
829                             if last_queue_params.in_queue {
830                                 error!(
831                                     "resource {} was already queued ({:?})",
832                                     resource_id, last_queue_params
833                                 );
834                                 return Err(VideoError::InvalidOperation);
835                             }
836                         }
837                     }
838                     Err(e) => {
839                         // TODO(alexlau): Return the actual error
840                         error!("encode failed: {}", e);
841                         return Err(VideoError::InvalidOperation);
842                     }
843                 }
844                 Ok(VideoCmdResponseType::Async(AsyncCmdTag::Queue {
845                     stream_id,
846                     queue_type: QueueType::Input,
847                     resource_id,
848                 }))
849             }
850             QueueType::Output => {
851                 // Bitstream buffers always have only one plane.
852                 if data_sizes.len() != 1 {
853                     return Err(VideoError::InvalidParameter);
854                 }
855 
856                 let dst_resource = stream.dst_resources.get_mut(&resource_id).ok_or(
857                     VideoError::InvalidResourceId {
858                         stream_id,
859                         resource_id,
860                     },
861                 )?;
862 
863                 // data_sizes is always 0 for output buffers. We should fetch them from the
864                 // negotiated parameters, although right now the VirtioObject backend uses the
865                 // buffer's metadata instead.
866                 let buffer_size = dst_resource.resource.planes[0].size as u32;
867 
868                 // Stores an output buffer to notify EOS.
869                 // This is necessary because libvda is unable to indicate EOS along with returned
870                 // buffers. For now, when a `Flush()` completes, this saved resource
871                 // will be returned as a zero-sized buffer with the EOS flag.
872                 if stream.eos_manager.try_reserve_eos_buffer(resource_id) {
873                     return Ok(VideoCmdResponseType::Async(AsyncCmdTag::Queue {
874                         stream_id,
875                         queue_type: QueueType::Output,
876                         resource_id,
877                     }));
878                 }
879 
880                 match encoder_session.use_output_buffer(
881                     dst_resource
882                         .resource
883                         .handle
884                         .try_clone()
885                         .map_err(|_| VideoError::InvalidParameter)?,
886                     dst_resource.offset,
887                     buffer_size,
888                 ) {
889                     Ok(output_buffer_id) => {
890                         if let Some(last_resource_id) = stream
891                             .encoder_output_buffer_ids
892                             .insert(output_buffer_id, resource_id)
893                         {
894                             error!(
895                                 "encoder output id {} was already mapped to resource id {}",
896                                 output_buffer_id, last_resource_id
897                             );
898                         }
899                         let queue_params = QueuedOutputResourceParams { in_queue: true };
900                         if let Some(last_queue_params) =
901                             dst_resource.queue_params.replace(queue_params)
902                         {
903                             if last_queue_params.in_queue {
904                                 error!(
905                                     "resource {} was already queued ({:?})",
906                                     resource_id, last_queue_params
907                                 );
908                             }
909                         }
910                     }
911                     Err(e) => {
912                         error!("use_output_buffer failed: {}", e);
913                         return Err(VideoError::InvalidOperation);
914                     }
915                 }
916                 Ok(VideoCmdResponseType::Async(AsyncCmdTag::Queue {
917                     stream_id,
918                     queue_type: QueueType::Output,
919                     resource_id,
920                 }))
921             }
922         }
923     }
924 
resource_destroy_all(&mut self, stream_id: u32) -> VideoResult<VideoCmdResponseType>925     fn resource_destroy_all(&mut self, stream_id: u32) -> VideoResult<VideoCmdResponseType> {
926         let stream = self
927             .streams
928             .get_mut(&stream_id)
929             .ok_or(VideoError::InvalidStreamId(stream_id))?;
930         stream.src_resources.clear();
931         stream.encoder_input_buffer_ids.clear();
932         stream.dst_resources.clear();
933         stream.encoder_output_buffer_ids.clear();
934         stream.eos_manager.reset();
935         Ok(VideoCmdResponseType::Sync(CmdResponse::NoData))
936     }
937 
queue_clear( &mut self, stream_id: u32, queue_type: QueueType, ) -> VideoResult<VideoCmdResponseType>938     fn queue_clear(
939         &mut self,
940         stream_id: u32,
941         queue_type: QueueType,
942     ) -> VideoResult<VideoCmdResponseType> {
943         // Unfortunately, there is no way to clear the queue with VEA.
944         // VDA has Reset() which also isn't done on a per-queue basis,
945         // but VEA has no such API.
946         // Doing a Flush() here and waiting for the flush response is also
947         // not an option, because the virtio-video driver expects a prompt
948         // response (search for "timed out waiting for queue clear" in
949         // virtio_video_enc.c).
950         // So for now, we do a Flush(), but also mark each currently
951         // queued resource as no longer `in_queue`, and skip them when they
952         // are returned.
953         // TODO(b/153406792): Support per-queue clearing.
954         let stream = self
955             .streams
956             .get_mut(&stream_id)
957             .ok_or(VideoError::InvalidStreamId(stream_id))?;
958 
959         match queue_type {
960             QueueType::Input => {
961                 for src_resource in stream.src_resources.values_mut() {
962                     if let Some(ref mut queue_params) = src_resource.queue_params {
963                         queue_params.in_queue = false;
964                     }
965                 }
966             }
967             QueueType::Output => {
968                 for dst_resource in stream.dst_resources.values_mut() {
969                     if let Some(ref mut queue_params) = dst_resource.queue_params {
970                         queue_params.in_queue = false;
971                     }
972                 }
973                 stream.eos_manager.reset();
974             }
975         }
976         Ok(VideoCmdResponseType::Sync(CmdResponse::NoData))
977     }
978 
get_params( &mut self, stream_id: u32, queue_type: QueueType, is_ext: bool, ) -> VideoResult<VideoCmdResponseType>979     fn get_params(
980         &mut self,
981         stream_id: u32,
982         queue_type: QueueType,
983         is_ext: bool,
984     ) -> VideoResult<VideoCmdResponseType> {
985         let stream = self
986             .streams
987             .get_mut(&stream_id)
988             .ok_or(VideoError::InvalidStreamId(stream_id))?;
989 
990         if stream.encoder_session.is_some() && !stream.received_input_buffers_event {
991             // If we haven't yet received an RequireInputBuffers
992             // event, we need to wait for that before replying so that
993             // the G_FMT response has the correct data.
994             let pending_command = match queue_type {
995                 QueueType::Input => PendingCommand::GetSrcParams { is_ext },
996                 QueueType::Output => PendingCommand::GetDstParams { is_ext },
997             };
998 
999             if !stream.pending_commands.insert(pending_command) {
1000                 // There is already a G_FMT call waiting.
1001                 error!("Pending get params call already exists.");
1002                 return Err(VideoError::InvalidOperation);
1003             }
1004 
1005             Ok(VideoCmdResponseType::Async(AsyncCmdTag::GetParams {
1006                 stream_id,
1007                 queue_type,
1008             }))
1009         } else {
1010             let params = match queue_type {
1011                 QueueType::Input => stream.src_params.clone(),
1012                 QueueType::Output => stream.dst_params.clone(),
1013             };
1014             Ok(VideoCmdResponseType::Sync(CmdResponse::GetParams {
1015                 queue_type,
1016                 params,
1017                 is_ext,
1018             }))
1019         }
1020     }
1021 
set_params( &mut self, wait_ctx: &WaitContext<Token>, stream_id: u32, queue_type: QueueType, format: Option<Format>, frame_width: u32, frame_height: u32, frame_rate: u32, plane_formats: Vec<PlaneFormat>, resource_type: Option<ResourceType>, ) -> VideoResult<VideoCmdResponseType>1022     fn set_params(
1023         &mut self,
1024         wait_ctx: &WaitContext<Token>,
1025         stream_id: u32,
1026         queue_type: QueueType,
1027         format: Option<Format>,
1028         frame_width: u32,
1029         frame_height: u32,
1030         frame_rate: u32,
1031         plane_formats: Vec<PlaneFormat>,
1032         resource_type: Option<ResourceType>,
1033     ) -> VideoResult<VideoCmdResponseType> {
1034         let stream = self
1035             .streams
1036             .get_mut(&stream_id)
1037             .ok_or(VideoError::InvalidStreamId(stream_id))?;
1038 
1039         let mut create_session = stream.encoder_session.is_none();
1040         // TODO(ishitatsuyuki): We should additionally check that no resources are *attached* while
1041         //                      a params is being set.
1042         let src_resources_queued = !stream.src_resources.is_empty();
1043         let dst_resources_queued = !stream.dst_resources.is_empty();
1044 
1045         // Dynamic framerate changes are allowed. The framerate can be set on either the input or
1046         // output queue. Changing the framerate can influence the selected H.264 level, as the
1047         // level might be adjusted to conform to the minimum requirements for the selected bitrate
1048         // and framerate. As dynamic level changes are not supported we will just recreate the
1049         // encoder session as long as no resources have been queued yet. If an encoder session is
1050         // active we will request a dynamic framerate change instead, and it's up to the encoder
1051         // backend to return an error on invalid requests.
1052         if stream.dst_params.frame_rate != frame_rate {
1053             stream.src_params.frame_rate = frame_rate;
1054             stream.dst_params.frame_rate = frame_rate;
1055             if let Some(ref mut encoder_session) = stream.encoder_session {
1056                 if !(src_resources_queued || dst_resources_queued) {
1057                     create_session = true;
1058                 } else if let Err(e) = encoder_session.request_encoding_params_change(
1059                     stream.dst_bitrate,
1060                     stream.dst_params.frame_rate,
1061                 ) {
1062                     error!("failed to dynamically request framerate change: {}", e);
1063                     return Err(VideoError::InvalidOperation);
1064                 }
1065             }
1066         }
1067 
1068         match queue_type {
1069             QueueType::Input => {
1070                 if stream.src_params.frame_width != frame_width
1071                     || stream.src_params.frame_height != frame_height
1072                     || stream.src_params.format != format
1073                     || stream.src_params.plane_formats != plane_formats
1074                     || resource_type
1075                         .map(|resource_type| stream.src_params.resource_type != resource_type)
1076                         .unwrap_or(false)
1077                 {
1078                     if src_resources_queued {
1079                         // Buffers have already been queued and encoding has already started.
1080                         return Err(VideoError::InvalidOperation);
1081                     }
1082 
1083                     let desired_format =
1084                         format.or(stream.src_params.format).unwrap_or(Format::NV12);
1085                     self.cros_capabilities.populate_src_params(
1086                         &mut stream.src_params,
1087                         desired_format,
1088                         frame_width,
1089                         frame_height,
1090                         plane_formats.first().map(|fmt| fmt.stride).unwrap_or(0),
1091                     )?;
1092 
1093                     stream.dst_params.frame_width = frame_width;
1094                     stream.dst_params.frame_height = frame_height;
1095 
1096                     if let Some(resource_type) = resource_type {
1097                         stream.src_params.resource_type = resource_type;
1098                     }
1099 
1100                     create_session = true
1101                 }
1102             }
1103             QueueType::Output => {
1104                 if stream.dst_params.format != format
1105                     || stream.dst_params.plane_formats != plane_formats
1106                     || resource_type
1107                         .map(|resource_type| stream.dst_params.resource_type != resource_type)
1108                         .unwrap_or(false)
1109                 {
1110                     if dst_resources_queued {
1111                         // Buffers have already been queued and encoding has already started.
1112                         return Err(VideoError::InvalidOperation);
1113                     }
1114 
1115                     let desired_format =
1116                         format.or(stream.dst_params.format).unwrap_or(Format::H264);
1117 
1118                     // There should be exactly one output buffer.
1119                     if plane_formats.len() != 1 {
1120                         return Err(VideoError::InvalidArgument);
1121                     }
1122 
1123                     self.cros_capabilities.populate_dst_params(
1124                         &mut stream.dst_params,
1125                         desired_format,
1126                         plane_formats[0].plane_size,
1127                     )?;
1128 
1129                     // Format is always populated for encoder.
1130                     let new_format = stream
1131                         .dst_params
1132                         .format
1133                         .ok_or(VideoError::InvalidArgument)?;
1134 
1135                     // If the selected profile no longer corresponds to the selected coded format,
1136                     // reset it.
1137                     stream.dst_profile = self
1138                         .cros_capabilities
1139                         .get_default_profile(&new_format)
1140                         .ok_or(VideoError::InvalidArgument)?;
1141 
1142                     if new_format == Format::H264 {
1143                         stream.dst_h264_level = Some(Level::H264_1_0);
1144                     } else {
1145                         stream.dst_h264_level = None;
1146                     }
1147 
1148                     if let Some(resource_type) = resource_type {
1149                         stream.dst_params.resource_type = resource_type;
1150                     }
1151 
1152                     create_session = true;
1153                 }
1154             }
1155         }
1156 
1157         if create_session {
1158             // An encoder session has to be created immediately upon a SetParams
1159             // (S_FMT) call, because we need to receive the RequireInputBuffers
1160             // callback which has output buffer size info, in order to populate
1161             // dst_params to have the correct size on subsequent GetParams (G_FMT) calls.
1162             if stream.encoder_session.is_some() {
1163                 stream.clear_encode_session(wait_ctx)?;
1164                 if !stream.received_input_buffers_event {
1165                     // This could happen if two SetParams calls are occuring at the same time.
1166                     // For example, the user calls SetParams for the input queue on one thread,
1167                     // and a new encode session is created. Then on another thread, SetParams
1168                     // is called for the output queue before the first SetParams call has returned.
1169                     // At this point, there is a new EncodeSession being created that has not
1170                     // yet received a RequireInputBuffers event.
1171                     // Even if we clear the encoder session and recreate it, this case
1172                     // is handled because stream.pending_commands will still contain
1173                     // the waiting GetParams responses, which will then receive fresh data once
1174                     // the new session's RequireInputBuffers event happens.
1175                     warn!(
1176                         "New encoder session being created while waiting for RequireInputBuffers."
1177                     )
1178                 }
1179             }
1180             stream.set_encode_session(&mut self.encoder, wait_ctx)?;
1181         }
1182         Ok(VideoCmdResponseType::Sync(CmdResponse::NoData))
1183     }
1184 
query_control(&self, query_ctrl_type: QueryCtrlType) -> VideoResult<VideoCmdResponseType>1185     fn query_control(&self, query_ctrl_type: QueryCtrlType) -> VideoResult<VideoCmdResponseType> {
1186         let query_ctrl_response = match query_ctrl_type {
1187             QueryCtrlType::Profile(format) => match self.cros_capabilities.get_profiles(&format) {
1188                 Some(profiles) => QueryCtrlResponse::Profile(profiles.clone()),
1189                 None => {
1190                     return Err(VideoError::UnsupportedControl(CtrlType::Profile));
1191                 }
1192             },
1193             QueryCtrlType::Level(format) => {
1194                 match format {
1195                     Format::H264 => QueryCtrlResponse::Level(vec![
1196                         Level::H264_1_0,
1197                         Level::H264_1_1,
1198                         Level::H264_1_2,
1199                         Level::H264_1_3,
1200                         Level::H264_2_0,
1201                         Level::H264_2_1,
1202                         Level::H264_2_2,
1203                         Level::H264_3_0,
1204                         Level::H264_3_1,
1205                         Level::H264_3_2,
1206                         Level::H264_4_0,
1207                         Level::H264_4_1,
1208                         Level::H264_4_2,
1209                         Level::H264_5_0,
1210                         Level::H264_5_1,
1211                     ]),
1212                     _ => {
1213                         // Levels are only supported for H264.
1214                         return Err(VideoError::UnsupportedControl(CtrlType::Level));
1215                     }
1216                 }
1217             }
1218         };
1219 
1220         Ok(VideoCmdResponseType::Sync(CmdResponse::QueryControl(
1221             query_ctrl_response,
1222         )))
1223     }
1224 
get_control( &self, stream_id: u32, ctrl_type: CtrlType, ) -> VideoResult<VideoCmdResponseType>1225     fn get_control(
1226         &self,
1227         stream_id: u32,
1228         ctrl_type: CtrlType,
1229     ) -> VideoResult<VideoCmdResponseType> {
1230         let stream = self
1231             .streams
1232             .get(&stream_id)
1233             .ok_or(VideoError::InvalidStreamId(stream_id))?;
1234         let ctrl_val = match ctrl_type {
1235             CtrlType::BitrateMode => CtrlVal::BitrateMode(stream.dst_bitrate.mode()),
1236             CtrlType::Bitrate => CtrlVal::Bitrate(stream.dst_bitrate.target()),
1237             CtrlType::BitratePeak => CtrlVal::BitratePeak(match stream.dst_bitrate {
1238                 Bitrate::Vbr { peak, .. } => peak,
1239                 // For CBR there is no peak, so return the target (which is technically correct).
1240                 Bitrate::Cbr { target } => target,
1241             }),
1242             CtrlType::Profile => CtrlVal::Profile(stream.dst_profile),
1243             CtrlType::Level => {
1244                 let format = stream
1245                     .dst_params
1246                     .format
1247                     .ok_or(VideoError::InvalidArgument)?;
1248                 match format {
1249                     Format::H264 => CtrlVal::Level(stream.dst_h264_level.ok_or_else(|| {
1250                         error!("H264 level not set");
1251                         VideoError::InvalidArgument
1252                     })?),
1253                     _ => {
1254                         return Err(VideoError::UnsupportedControl(ctrl_type));
1255                     }
1256                 }
1257             }
1258             // Button controls should not be queried.
1259             CtrlType::ForceKeyframe => return Err(VideoError::UnsupportedControl(ctrl_type)),
1260             // Prepending SPS and PPS to IDR is always enabled in the libvda backend.
1261             // TODO (b/161495502): account for other backends
1262             CtrlType::PrependSpsPpsToIdr => CtrlVal::PrependSpsPpsToIdr(true),
1263         };
1264         Ok(VideoCmdResponseType::Sync(CmdResponse::GetControl(
1265             ctrl_val,
1266         )))
1267     }
1268 
set_control( &mut self, wait_ctx: &WaitContext<Token>, stream_id: u32, ctrl_val: CtrlVal, ) -> VideoResult<VideoCmdResponseType>1269     fn set_control(
1270         &mut self,
1271         wait_ctx: &WaitContext<Token>,
1272         stream_id: u32,
1273         ctrl_val: CtrlVal,
1274     ) -> VideoResult<VideoCmdResponseType> {
1275         let stream = self
1276             .streams
1277             .get_mut(&stream_id)
1278             .ok_or(VideoError::InvalidStreamId(stream_id))?;
1279         let mut recreate_session = false;
1280         let resources_queued = !stream.src_resources.is_empty() || !stream.dst_resources.is_empty();
1281 
1282         match ctrl_val {
1283             CtrlVal::BitrateMode(bitrate_mode) => {
1284                 if stream.dst_bitrate.mode() != bitrate_mode {
1285                     if resources_queued {
1286                         error!("set control called for bitrate mode but already encoding.");
1287                         return Err(VideoError::InvalidOperation);
1288                     }
1289                     stream.dst_bitrate = match bitrate_mode {
1290                         BitrateMode::Cbr => Bitrate::Cbr {
1291                             target: stream.dst_bitrate.target(),
1292                         },
1293                         BitrateMode::Vbr => Bitrate::Vbr {
1294                             target: stream.dst_bitrate.target(),
1295                             peak: stream.dst_bitrate.target(),
1296                         },
1297                     };
1298                     recreate_session = true;
1299                 }
1300             }
1301             CtrlVal::Bitrate(bitrate) => {
1302                 if stream.dst_bitrate.target() != bitrate {
1303                     let mut new_bitrate = stream.dst_bitrate;
1304                     match &mut new_bitrate {
1305                         Bitrate::Cbr { target } | Bitrate::Vbr { target, .. } => *target = bitrate,
1306                     }
1307                     if let Some(ref mut encoder_session) = stream.encoder_session {
1308                         if let Err(e) = encoder_session.request_encoding_params_change(
1309                             new_bitrate,
1310                             stream.dst_params.frame_rate,
1311                         ) {
1312                             error!("failed to dynamically request target bitrate change: {}", e);
1313                             return Err(VideoError::InvalidOperation);
1314                         }
1315                     }
1316                     stream.dst_bitrate = new_bitrate;
1317                 }
1318             }
1319             CtrlVal::BitratePeak(bitrate) => {
1320                 match stream.dst_bitrate {
1321                     Bitrate::Vbr { peak, .. } => {
1322                         if peak != bitrate {
1323                             let new_bitrate = Bitrate::Vbr {
1324                                 target: stream.dst_bitrate.target(),
1325                                 peak: bitrate,
1326                             };
1327                             if let Some(ref mut encoder_session) = stream.encoder_session {
1328                                 if let Err(e) = encoder_session.request_encoding_params_change(
1329                                     new_bitrate,
1330                                     stream.dst_params.frame_rate,
1331                                 ) {
1332                                     error!(
1333                                         "failed to dynamically request peak bitrate change: {}",
1334                                         e
1335                                     );
1336                                     return Err(VideoError::InvalidOperation);
1337                                 }
1338                             }
1339                             stream.dst_bitrate = new_bitrate;
1340                         }
1341                     }
1342                     // Trying to set the peak bitrate while in constant mode. This is not
1343                     // an error, just ignored.
1344                     Bitrate::Cbr { .. } => {}
1345                 }
1346             }
1347             CtrlVal::Profile(profile) => {
1348                 if stream.dst_profile != profile {
1349                     if resources_queued {
1350                         error!("set control called for profile but already encoding.");
1351                         return Err(VideoError::InvalidOperation);
1352                     }
1353                     let format = stream
1354                         .dst_params
1355                         .format
1356                         .ok_or(VideoError::InvalidArgument)?;
1357                     if format != profile.to_format() {
1358                         error!(
1359                             "specified profile does not correspond to the selected format ({})",
1360                             format
1361                         );
1362                         return Err(VideoError::InvalidOperation);
1363                     }
1364                     stream.dst_profile = profile;
1365                     recreate_session = true;
1366                 }
1367             }
1368             CtrlVal::Level(level) => {
1369                 if stream.dst_h264_level != Some(level) {
1370                     if resources_queued {
1371                         error!("set control called for level but already encoding.");
1372                         return Err(VideoError::InvalidOperation);
1373                     }
1374                     let format = stream
1375                         .dst_params
1376                         .format
1377                         .ok_or(VideoError::InvalidArgument)?;
1378                     if format != Format::H264 {
1379                         error!(
1380                             "set control called for level but format is not H264 ({})",
1381                             format
1382                         );
1383                         return Err(VideoError::InvalidOperation);
1384                     }
1385                     stream.dst_h264_level = Some(level);
1386                     recreate_session = true;
1387                 }
1388             }
1389             CtrlVal::ForceKeyframe => {
1390                 stream.force_keyframe = true;
1391             }
1392             CtrlVal::PrependSpsPpsToIdr(prepend_sps_pps_to_idr) => {
1393                 // Prepending SPS and PPS to IDR is always enabled in the libvda backend,
1394                 // disabling it will always fail.
1395                 // TODO (b/161495502): account for other backends
1396                 if !prepend_sps_pps_to_idr {
1397                     return Err(VideoError::InvalidOperation);
1398                 }
1399             }
1400         }
1401 
1402         // We can safely recreate the encoder session if no resources were queued yet.
1403         if recreate_session && stream.encoder_session.is_some() {
1404             stream.clear_encode_session(wait_ctx)?;
1405             stream.set_encode_session(&mut self.encoder, wait_ctx)?;
1406         }
1407 
1408         Ok(VideoCmdResponseType::Sync(CmdResponse::SetControl))
1409     }
1410 }
1411 
1412 impl<T: Encoder> Device for EncoderDevice<T> {
process_cmd( &mut self, req: VideoCmd, wait_ctx: &WaitContext<Token>, ) -> ( VideoCmdResponseType, Option<(u32, Vec<VideoEvtResponseType>)>, )1413     fn process_cmd(
1414         &mut self,
1415         req: VideoCmd,
1416         wait_ctx: &WaitContext<Token>,
1417     ) -> (
1418         VideoCmdResponseType,
1419         Option<(u32, Vec<VideoEvtResponseType>)>,
1420     ) {
1421         let mut event_ret = None;
1422         let cmd_response = match req {
1423             VideoCmd::QueryCapability { queue_type } => self.query_capabilities(queue_type),
1424             VideoCmd::StreamCreate {
1425                 stream_id,
1426                 coded_format: desired_format,
1427                 input_resource_type,
1428                 output_resource_type,
1429             } => self.stream_create(
1430                 stream_id,
1431                 desired_format,
1432                 input_resource_type,
1433                 output_resource_type,
1434             ),
1435             VideoCmd::StreamDestroy { stream_id } => self.stream_destroy(stream_id),
1436             VideoCmd::StreamDrain { stream_id } => self.stream_drain(stream_id),
1437             VideoCmd::ResourceCreate {
1438                 stream_id,
1439                 queue_type,
1440                 resource_id,
1441                 plane_offsets,
1442                 plane_entries,
1443             } => self.resource_create(
1444                 wait_ctx,
1445                 stream_id,
1446                 queue_type,
1447                 resource_id,
1448                 plane_offsets,
1449                 plane_entries,
1450             ),
1451             VideoCmd::ResourceQueue {
1452                 stream_id,
1453                 queue_type,
1454                 resource_id,
1455                 timestamp,
1456                 data_sizes,
1457             } => {
1458                 let resp =
1459                     self.resource_queue(stream_id, queue_type, resource_id, timestamp, data_sizes);
1460 
1461                 if resp.is_ok() && queue_type == QueueType::Output {
1462                     if let Some(stream) = self.streams.get_mut(&stream_id) {
1463                         // If we have a flush pending, add the response for dequeueing the EOS
1464                         // buffer.
1465                         if stream.eos_manager.client_awaits_eos {
1466                             info!(
1467                                 "stream {}: using queued buffer as EOS for pending flush",
1468                                 stream_id
1469                             );
1470                             event_ret = match stream.eos_manager.try_complete_eos(vec![]) {
1471                                 Some(eos_resps) => Some((stream_id, eos_resps)),
1472                                 None => {
1473                                     error!("stream {}: try_get_eos_buffer() should have returned a valid response. This is a bug.", stream_id);
1474                                     Some((
1475                                         stream_id,
1476                                         vec![VideoEvtResponseType::Event(VideoEvt {
1477                                             typ: EvtType::Error,
1478                                             stream_id,
1479                                         })],
1480                                     ))
1481                                 }
1482                             };
1483                         }
1484                     } else {
1485                         error!(
1486                             "stream {}: the stream ID should be valid here. This is a bug.",
1487                             stream_id
1488                         );
1489                         event_ret = Some((
1490                             stream_id,
1491                             vec![VideoEvtResponseType::Event(VideoEvt {
1492                                 typ: EvtType::Error,
1493                                 stream_id,
1494                             })],
1495                         ));
1496                     }
1497                 }
1498 
1499                 resp
1500             }
1501             VideoCmd::ResourceDestroyAll { stream_id, .. } => self.resource_destroy_all(stream_id),
1502             VideoCmd::QueueClear {
1503                 stream_id,
1504                 queue_type,
1505             } => self.queue_clear(stream_id, queue_type),
1506             VideoCmd::GetParams {
1507                 stream_id,
1508                 queue_type,
1509                 is_ext,
1510             } => self.get_params(stream_id, queue_type, is_ext),
1511             VideoCmd::SetParams {
1512                 stream_id,
1513                 queue_type,
1514                 params:
1515                     Params {
1516                         format,
1517                         frame_width,
1518                         frame_height,
1519                         frame_rate,
1520                         plane_formats,
1521                         resource_type,
1522                         ..
1523                     },
1524                 is_ext,
1525             } => self.set_params(
1526                 wait_ctx,
1527                 stream_id,
1528                 queue_type,
1529                 format,
1530                 frame_width,
1531                 frame_height,
1532                 frame_rate,
1533                 plane_formats,
1534                 if is_ext { Some(resource_type) } else { None },
1535             ),
1536             VideoCmd::QueryControl { query_ctrl_type } => self.query_control(query_ctrl_type),
1537             VideoCmd::GetControl {
1538                 stream_id,
1539                 ctrl_type,
1540             } => self.get_control(stream_id, ctrl_type),
1541             VideoCmd::SetControl {
1542                 stream_id,
1543                 ctrl_val,
1544             } => self.set_control(wait_ctx, stream_id, ctrl_val),
1545         };
1546         let cmd_ret = match cmd_response {
1547             Ok(r) => r,
1548             Err(e) => {
1549                 error!("returning error response: {}", &e);
1550                 VideoCmdResponseType::Sync(e.into())
1551             }
1552         };
1553         (cmd_ret, event_ret)
1554     }
1555 
process_event( &mut self, _desc_map: &mut AsyncCmdDescMap, stream_id: u32, _wait_ctx: &WaitContext<Token>, ) -> Option<Vec<VideoEvtResponseType>>1556     fn process_event(
1557         &mut self,
1558         _desc_map: &mut AsyncCmdDescMap,
1559         stream_id: u32,
1560         _wait_ctx: &WaitContext<Token>,
1561     ) -> Option<Vec<VideoEvtResponseType>> {
1562         let stream = match self.streams.get_mut(&stream_id) {
1563             Some(s) => s,
1564             None => {
1565                 // TODO: remove fd from poll context?
1566                 error!("Received event for missing stream id {}", stream_id);
1567                 return None;
1568             }
1569         };
1570 
1571         let encoder_session = match stream.encoder_session {
1572             Some(ref mut s) => s,
1573             None => {
1574                 error!(
1575                     "Received event for missing encoder session of stream id {}",
1576                     stream_id
1577                 );
1578                 return None;
1579             }
1580         };
1581 
1582         let event = match encoder_session.read_event() {
1583             Ok(e) => e,
1584             Err(e) => {
1585                 error!("Failed to read event for stream id {}: {}", stream_id, e);
1586                 return None;
1587             }
1588         };
1589 
1590         match event {
1591             EncoderEvent::RequireInputBuffers {
1592                 input_count,
1593                 input_frame_width,
1594                 input_frame_height,
1595                 output_buffer_size,
1596             } => stream.require_input_buffers(
1597                 input_count,
1598                 input_frame_width,
1599                 input_frame_height,
1600                 output_buffer_size,
1601             ),
1602             EncoderEvent::ProcessedInputBuffer {
1603                 id: input_buffer_id,
1604             } => stream.processed_input_buffer(input_buffer_id),
1605             EncoderEvent::ProcessedOutputBuffer {
1606                 id: output_buffer_id,
1607                 bytesused,
1608                 keyframe,
1609                 timestamp,
1610             } => stream.processed_output_buffer(output_buffer_id, bytesused, keyframe, timestamp),
1611             EncoderEvent::FlushResponse { flush_done } => stream.flush_response(flush_done),
1612             EncoderEvent::NotifyError { error } => stream.notify_error(error),
1613         }
1614     }
1615 }
1616 
1617 #[derive(Debug)]
1618 pub enum EncoderEvent {
1619     RequireInputBuffers {
1620         input_count: u32,
1621         input_frame_width: u32,
1622         input_frame_height: u32,
1623         output_buffer_size: u32,
1624     },
1625     ProcessedInputBuffer {
1626         id: InputBufferId,
1627     },
1628     ProcessedOutputBuffer {
1629         id: OutputBufferId,
1630         bytesused: u32,
1631         keyframe: bool,
1632         timestamp: u64,
1633     },
1634     FlushResponse {
1635         flush_done: bool,
1636     },
1637     #[allow(dead_code)]
1638     NotifyError {
1639         error: VideoError,
1640     },
1641 }
1642 
1643 #[derive(Debug)]
1644 pub struct SessionConfig {
1645     pub src_params: Params,
1646     pub dst_params: Params,
1647     pub dst_profile: Profile,
1648     pub dst_bitrate: Bitrate,
1649     pub dst_h264_level: Option<Level>,
1650     pub frame_rate: u32,
1651 }
1652 
1653 #[derive(Clone)]
1654 pub struct EncoderCapabilities {
1655     pub input_format_descs: Vec<FormatDesc>,
1656     pub output_format_descs: Vec<FormatDesc>,
1657     pub coded_format_profiles: BTreeMap<Format, Vec<Profile>>,
1658 }
1659 
1660 impl EncoderCapabilities {
populate_src_params( &self, src_params: &mut Params, desired_format: Format, desired_width: u32, desired_height: u32, mut stride: u32, ) -> VideoResult<()>1661     pub fn populate_src_params(
1662         &self,
1663         src_params: &mut Params,
1664         desired_format: Format,
1665         desired_width: u32,
1666         desired_height: u32,
1667         mut stride: u32,
1668     ) -> VideoResult<()> {
1669         let format_desc = self
1670             .input_format_descs
1671             .iter()
1672             .find(|&format_desc| format_desc.format == desired_format)
1673             .unwrap_or(
1674                 self.input_format_descs
1675                     .first()
1676                     .ok_or(VideoError::InvalidFormat)?,
1677             );
1678 
1679         let (allowed_width, allowed_height) =
1680             find_closest_resolution(&format_desc.frame_formats, desired_width, desired_height);
1681 
1682         if stride == 0 {
1683             stride = allowed_width;
1684         }
1685 
1686         let plane_formats =
1687             PlaneFormat::get_plane_layout(format_desc.format, stride, allowed_height)
1688                 .ok_or(VideoError::InvalidFormat)?;
1689 
1690         src_params.frame_width = allowed_width;
1691         src_params.frame_height = allowed_height;
1692         src_params.format = Some(format_desc.format);
1693         src_params.plane_formats = plane_formats;
1694         Ok(())
1695     }
1696 
populate_dst_params( &self, dst_params: &mut Params, desired_format: Format, buffer_size: u32, ) -> VideoResult<()>1697     pub fn populate_dst_params(
1698         &self,
1699         dst_params: &mut Params,
1700         desired_format: Format,
1701         buffer_size: u32,
1702     ) -> VideoResult<()> {
1703         // TODO(alexlau): Should the first be the default?
1704         let format_desc = self
1705             .output_format_descs
1706             .iter()
1707             .find(move |&format_desc| format_desc.format == desired_format)
1708             .unwrap_or(
1709                 self.output_format_descs
1710                     .first()
1711                     .ok_or(VideoError::InvalidFormat)?,
1712             );
1713         dst_params.format = Some(format_desc.format);
1714 
1715         // The requested output buffer size might be adjusted by the encoder to match hardware
1716         // requirements in RequireInputBuffers.
1717         dst_params.plane_formats = vec![PlaneFormat {
1718             plane_size: buffer_size,
1719             stride: 0,
1720         }];
1721         Ok(())
1722     }
1723 
get_profiles(&self, coded_format: &Format) -> Option<&Vec<Profile>>1724     pub fn get_profiles(&self, coded_format: &Format) -> Option<&Vec<Profile>> {
1725         self.coded_format_profiles.get(coded_format)
1726     }
1727 
get_default_profile(&self, coded_format: &Format) -> Option<Profile>1728     pub fn get_default_profile(&self, coded_format: &Format) -> Option<Profile> {
1729         let profiles = self.get_profiles(coded_format)?;
1730         match profiles.first() {
1731             None => {
1732                 error!("Format {} exists but no available profiles.", coded_format);
1733                 None
1734             }
1735             Some(profile) => Some(*profile),
1736         }
1737     }
1738 }
1739