xref: /aosp_15_r20/frameworks/av/services/camera/virtualcamera/VirtualCameraSession.cc (revision ec779b8e0859a360c3d303172224686826e6e0e1)
1 /*
2  * Copyright (C) 2023 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 // #define LOG_NDEBUG 0
18 #define LOG_TAG "VirtualCameraSession"
19 #include "VirtualCameraSession.h"
20 
21 #include <algorithm>
22 #include <atomic>
23 #include <chrono>
24 #include <cmath>
25 #include <cstddef>
26 #include <cstdint>
27 #include <cstring>
28 #include <map>
29 #include <memory>
30 #include <mutex>
31 #include <numeric>
32 #include <optional>
33 #include <tuple>
34 #include <unordered_set>
35 #include <utility>
36 #include <vector>
37 
38 #include "CameraMetadata.h"
39 #include "EGL/egl.h"
40 #include "VirtualCameraDevice.h"
41 #include "VirtualCameraRenderThread.h"
42 #include "VirtualCameraStream.h"
43 #include "aidl/android/companion/virtualcamera/SupportedStreamConfiguration.h"
44 #include "aidl/android/hardware/camera/common/Status.h"
45 #include "aidl/android/hardware/camera/device/BufferCache.h"
46 #include "aidl/android/hardware/camera/device/BufferStatus.h"
47 #include "aidl/android/hardware/camera/device/CameraMetadata.h"
48 #include "aidl/android/hardware/camera/device/CaptureRequest.h"
49 #include "aidl/android/hardware/camera/device/HalStream.h"
50 #include "aidl/android/hardware/camera/device/NotifyMsg.h"
51 #include "aidl/android/hardware/camera/device/RequestTemplate.h"
52 #include "aidl/android/hardware/camera/device/ShutterMsg.h"
53 #include "aidl/android/hardware/camera/device/Stream.h"
54 #include "aidl/android/hardware/camera/device/StreamBuffer.h"
55 #include "aidl/android/hardware/camera/device/StreamConfiguration.h"
56 #include "aidl/android/hardware/camera/device/StreamRotation.h"
57 #include "aidl/android/hardware/graphics/common/BufferUsage.h"
58 #include "aidl/android/hardware/graphics/common/PixelFormat.h"
59 #include "android/hardware_buffer.h"
60 #include "android/native_window_aidl.h"
61 #include "fmq/AidlMessageQueue.h"
62 #include "system/camera_metadata.h"
63 #include "ui/GraphicBuffer.h"
64 #include "util/EglDisplayContext.h"
65 #include "util/EglFramebuffer.h"
66 #include "util/EglProgram.h"
67 #include "util/JpegUtil.h"
68 #include "util/MetadataUtil.h"
69 #include "util/Util.h"
70 
71 namespace android {
72 namespace companion {
73 namespace virtualcamera {
74 
75 using ::aidl::android::companion::virtualcamera::IVirtualCameraCallback;
76 using ::aidl::android::companion::virtualcamera::SupportedStreamConfiguration;
77 using ::aidl::android::hardware::camera::common::Status;
78 using ::aidl::android::hardware::camera::device::BufferCache;
79 using ::aidl::android::hardware::camera::device::CameraMetadata;
80 using ::aidl::android::hardware::camera::device::CameraOfflineSessionInfo;
81 using ::aidl::android::hardware::camera::device::CaptureRequest;
82 using ::aidl::android::hardware::camera::device::HalStream;
83 using ::aidl::android::hardware::camera::device::ICameraDeviceCallback;
84 using ::aidl::android::hardware::camera::device::ICameraOfflineSession;
85 using ::aidl::android::hardware::camera::device::RequestTemplate;
86 using ::aidl::android::hardware::camera::device::Stream;
87 using ::aidl::android::hardware::camera::device::StreamBuffer;
88 using ::aidl::android::hardware::camera::device::StreamConfiguration;
89 using ::aidl::android::hardware::common::fmq::MQDescriptor;
90 using ::aidl::android::hardware::common::fmq::SynchronizedReadWrite;
91 using ::aidl::android::hardware::graphics::common::BufferUsage;
92 using ::aidl::android::hardware::graphics::common::PixelFormat;
93 using ::android::base::unique_fd;
94 
95 namespace {
96 
97 using metadata_ptr =
98     std::unique_ptr<camera_metadata_t, void (*)(camera_metadata_t*)>;
99 
100 using namespace std::chrono_literals;
101 
102 // Size of request/result metadata fast message queue.
103 // Setting to 0 to always disables FMQ.
104 constexpr size_t kMetadataMsgQueueSize = 0;
105 
106 // Maximum number of buffers to use per single stream.
107 constexpr size_t kMaxStreamBuffers = 2;
108 
109 // Thumbnail size (0,0) correspods to disabling thumbnail.
110 const Resolution kDefaultJpegThumbnailSize(0, 0);
111 
requestTemplateToIntent(const RequestTemplate type)112 camera_metadata_enum_android_control_capture_intent_t requestTemplateToIntent(
113     const RequestTemplate type) {
114   switch (type) {
115     case RequestTemplate::PREVIEW:
116       return ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW;
117     case RequestTemplate::STILL_CAPTURE:
118       return ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE;
119     case RequestTemplate::VIDEO_RECORD:
120       return ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD;
121     case RequestTemplate::VIDEO_SNAPSHOT:
122       return ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT;
123     default:
124       // Return PREVIEW by default
125       return ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW;
126   }
127 }
128 
getMaxFps(const std::vector<SupportedStreamConfiguration> & configs)129 int getMaxFps(const std::vector<SupportedStreamConfiguration>& configs) {
130   return std::transform_reduce(
131       configs.begin(), configs.end(), 0,
132       [](const int a, const int b) { return std::max(a, b); },
133       [](const SupportedStreamConfiguration& config) { return config.maxFps; });
134 }
135 
createDefaultRequestSettings(const RequestTemplate type,const std::vector<SupportedStreamConfiguration> & inputConfigs)136 CameraMetadata createDefaultRequestSettings(
137     const RequestTemplate type,
138     const std::vector<SupportedStreamConfiguration>& inputConfigs) {
139   int maxFps = getMaxFps(inputConfigs);
140   auto metadata =
141       MetadataBuilder()
142           .setAberrationCorrectionMode(
143               ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF)
144           .setControlCaptureIntent(requestTemplateToIntent(type))
145           .setControlMode(ANDROID_CONTROL_MODE_AUTO)
146           .setControlAeMode(ANDROID_CONTROL_AE_MODE_ON)
147           .setControlAeExposureCompensation(0)
148           .setControlAeTargetFpsRange(FpsRange{maxFps, maxFps})
149           .setControlAeAntibandingMode(ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO)
150           .setControlAePrecaptureTrigger(
151               ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_IDLE)
152           .setControlAfTrigger(ANDROID_CONTROL_AF_TRIGGER_IDLE)
153           .setControlAfMode(ANDROID_CONTROL_AF_MODE_OFF)
154           .setControlAwbMode(ANDROID_CONTROL_AWB_MODE_AUTO)
155           .setControlEffectMode(ANDROID_CONTROL_EFFECT_MODE_OFF)
156           .setFaceDetectMode(ANDROID_STATISTICS_FACE_DETECT_MODE_OFF)
157           .setFlashMode(ANDROID_FLASH_MODE_OFF)
158           .setFlashState(ANDROID_FLASH_STATE_UNAVAILABLE)
159           .setJpegQuality(VirtualCameraDevice::kDefaultJpegQuality)
160           .setJpegThumbnailQuality(VirtualCameraDevice::kDefaultJpegQuality)
161           .setJpegThumbnailSize(0, 0)
162           .setNoiseReductionMode(ANDROID_NOISE_REDUCTION_MODE_OFF)
163           .build();
164   if (metadata == nullptr) {
165     ALOGE("%s: Failed to construct metadata for default request type %s",
166           __func__, toString(type).c_str());
167     return CameraMetadata();
168   } else {
169     ALOGV("%s: Successfully created metadata for request type %s", __func__,
170           toString(type).c_str());
171   }
172   return *metadata;
173 }
174 
getHalStream(const Stream & stream)175 HalStream getHalStream(const Stream& stream) {
176   HalStream halStream;
177   halStream.id = stream.id;
178   halStream.physicalCameraId = stream.physicalCameraId;
179   halStream.maxBuffers = kMaxStreamBuffers;
180 
181   if (stream.format == PixelFormat::IMPLEMENTATION_DEFINED) {
182     // If format is implementation defined we need it to override
183     // it with actual format.
184     // TODO(b/301023410) Override with the format based on the
185     // camera configuration, once we support more formats.
186     halStream.overrideFormat = PixelFormat::YCBCR_420_888;
187   } else {
188     halStream.overrideFormat = stream.format;
189   }
190   halStream.overrideDataSpace = stream.dataSpace;
191 
192   halStream.producerUsage = static_cast<BufferUsage>(
193       static_cast<int64_t>(stream.usage) |
194       static_cast<int64_t>(BufferUsage::CAMERA_OUTPUT) |
195       static_cast<int64_t>(BufferUsage::GPU_RENDER_TARGET));
196 
197   halStream.supportOffline = false;
198   return halStream;
199 }
200 
getHighestResolutionStream(const std::vector<Stream> & streams)201 Stream getHighestResolutionStream(const std::vector<Stream>& streams) {
202   return *(std::max_element(streams.begin(), streams.end(),
203                             [](const Stream& a, const Stream& b) {
204                               return a.width * a.height < b.width * b.height;
205                             }));
206 }
207 
resolutionFromStream(const Stream & stream)208 Resolution resolutionFromStream(const Stream& stream) {
209   return Resolution(stream.width, stream.height);
210 }
211 
resolutionFromInputConfig(const SupportedStreamConfiguration & inputConfig)212 Resolution resolutionFromInputConfig(
213     const SupportedStreamConfiguration& inputConfig) {
214   return Resolution(inputConfig.width, inputConfig.height);
215 }
216 
resolutionFromSurface(const sp<Surface> surface)217 std::optional<Resolution> resolutionFromSurface(const sp<Surface> surface) {
218   Resolution res{0, 0};
219   if (surface == nullptr) {
220     ALOGE("%s: Cannot get resolution from null surface", __func__);
221     return std::nullopt;
222   }
223 
224   int status = surface->query(NATIVE_WINDOW_WIDTH, &res.width);
225   if (status != NO_ERROR) {
226     ALOGE("%s: Failed to get width from surface", __func__);
227     return std::nullopt;
228   }
229 
230   status = surface->query(NATIVE_WINDOW_HEIGHT, &res.height);
231   if (status != NO_ERROR) {
232     ALOGE("%s: Failed to get height from surface", __func__);
233     return std::nullopt;
234   }
235   return res;
236 }
237 
pickInputConfigurationForStreams(const std::vector<Stream> & requestedStreams,const std::vector<SupportedStreamConfiguration> & supportedInputConfigs)238 std::optional<SupportedStreamConfiguration> pickInputConfigurationForStreams(
239     const std::vector<Stream>& requestedStreams,
240     const std::vector<SupportedStreamConfiguration>& supportedInputConfigs) {
241   Stream maxResolutionStream = getHighestResolutionStream(requestedStreams);
242   Resolution maxResolution = resolutionFromStream(maxResolutionStream);
243 
244   // Find best fitting stream to satisfy all requested streams:
245   // Best fitting => same or higher resolution as input with lowest pixel count
246   // difference and same aspect ratio.
247   auto isBetterInputConfig = [maxResolution](
248                                  const SupportedStreamConfiguration& configA,
249                                  const SupportedStreamConfiguration& configB) {
250     int maxResPixelCount = maxResolution.width * maxResolution.height;
251     int pixelCountDiffA =
252         std::abs((configA.width * configA.height) - maxResPixelCount);
253     int pixelCountDiffB =
254         std::abs((configB.width * configB.height) - maxResPixelCount);
255 
256     return pixelCountDiffA < pixelCountDiffB;
257   };
258 
259   std::optional<SupportedStreamConfiguration> bestConfig;
260   for (const SupportedStreamConfiguration& inputConfig : supportedInputConfigs) {
261     Resolution inputConfigResolution = resolutionFromInputConfig(inputConfig);
262     if (inputConfigResolution < maxResolution ||
263         !isApproximatellySameAspectRatio(inputConfigResolution, maxResolution)) {
264       // We don't want to upscale from lower resolution, or use different aspect
265       // ratio, skip.
266       continue;
267     }
268 
269     if (!bestConfig.has_value() ||
270         isBetterInputConfig(inputConfig, bestConfig.value())) {
271       bestConfig = inputConfig;
272     }
273   }
274 
275   return bestConfig;
276 }
277 
createSettingsFromMetadata(const CameraMetadata & metadata)278 RequestSettings createSettingsFromMetadata(const CameraMetadata& metadata) {
279   return RequestSettings{
280       .jpegQuality = getJpegQuality(metadata).value_or(
281           VirtualCameraDevice::kDefaultJpegQuality),
282       .jpegOrientation = getJpegOrientation(metadata),
283       .thumbnailResolution =
284           getJpegThumbnailSize(metadata).value_or(Resolution(0, 0)),
285       .thumbnailJpegQuality = getJpegThumbnailQuality(metadata).value_or(
286           VirtualCameraDevice::kDefaultJpegQuality),
287       .fpsRange = getFpsRange(metadata),
288       .captureIntent = getCaptureIntent(metadata).value_or(
289           ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW),
290       .gpsCoordinates = getGpsCoordinates(metadata),
291       .aePrecaptureTrigger = getPrecaptureTrigger(metadata)};
292 }
293 
294 }  // namespace
295 
VirtualCameraSession(std::shared_ptr<VirtualCameraDevice> cameraDevice,std::shared_ptr<ICameraDeviceCallback> cameraDeviceCallback,std::shared_ptr<IVirtualCameraCallback> virtualCameraClientCallback)296 VirtualCameraSession::VirtualCameraSession(
297     std::shared_ptr<VirtualCameraDevice> cameraDevice,
298     std::shared_ptr<ICameraDeviceCallback> cameraDeviceCallback,
299     std::shared_ptr<IVirtualCameraCallback> virtualCameraClientCallback)
300     : mCameraDevice(cameraDevice),
301       mCameraDeviceCallback(cameraDeviceCallback),
302       mVirtualCameraClientCallback(virtualCameraClientCallback) {
303   mRequestMetadataQueue = std::make_unique<RequestMetadataQueue>(
304       kMetadataMsgQueueSize, false /* non blocking */);
305   if (!mRequestMetadataQueue->isValid()) {
306     ALOGE("%s: invalid request fmq", __func__);
307   }
308 
309   mResultMetadataQueue = std::make_shared<ResultMetadataQueue>(
310       kMetadataMsgQueueSize, false /* non blocking */);
311   if (!mResultMetadataQueue->isValid()) {
312     ALOGE("%s: invalid result fmq", __func__);
313   }
314 }
315 
close()316 ndk::ScopedAStatus VirtualCameraSession::close() {
317   ALOGV("%s", __func__);
318   {
319     std::lock_guard<std::mutex> lock(mLock);
320 
321     if (mVirtualCameraClientCallback != nullptr) {
322       mVirtualCameraClientCallback->onStreamClosed(mCurrentInputStreamId);
323     }
324 
325     if (mRenderThread != nullptr) {
326       mRenderThread->stop();
327       mRenderThread = nullptr;
328     }
329   }
330 
331   mSessionContext.closeAllStreams();
332   return ndk::ScopedAStatus::ok();
333 }
334 
configureStreams(const StreamConfiguration & in_requestedConfiguration,std::vector<HalStream> * _aidl_return)335 ndk::ScopedAStatus VirtualCameraSession::configureStreams(
336     const StreamConfiguration& in_requestedConfiguration,
337     std::vector<HalStream>* _aidl_return) {
338   ALOGV("%s: requestedConfiguration: %s", __func__,
339         in_requestedConfiguration.toString().c_str());
340 
341   if (_aidl_return == nullptr) {
342     return cameraStatus(Status::ILLEGAL_ARGUMENT);
343   }
344 
345   std::shared_ptr<VirtualCameraDevice> virtualCamera = mCameraDevice.lock();
346   if (virtualCamera == nullptr) {
347     ALOGW("%s: configure called on already unregistered camera", __func__);
348     return cameraStatus(Status::CAMERA_DISCONNECTED);
349   }
350 
351   mSessionContext.removeStreamsNotInStreamConfiguration(
352       in_requestedConfiguration);
353 
354   auto& streams = in_requestedConfiguration.streams;
355   auto& halStreams = *_aidl_return;
356   halStreams.clear();
357   halStreams.resize(in_requestedConfiguration.streams.size());
358 
359   if (!virtualCamera->isStreamCombinationSupported(in_requestedConfiguration)) {
360     ALOGE("%s: Requested stream configuration is not supported", __func__);
361     return cameraStatus(Status::ILLEGAL_ARGUMENT);
362   }
363 
364   sp<Surface> inputSurface = nullptr;
365   int inputStreamId = -1;
366   std::optional<SupportedStreamConfiguration> inputConfig;
367   {
368     std::lock_guard<std::mutex> lock(mLock);
369     for (int i = 0; i < in_requestedConfiguration.streams.size(); ++i) {
370       halStreams[i] = getHalStream(streams[i]);
371       if (mSessionContext.initializeStream(streams[i])) {
372         ALOGV("Configured new stream: %s", streams[i].toString().c_str());
373       }
374     }
375 
376     inputConfig = pickInputConfigurationForStreams(
377         streams, virtualCamera->getInputConfigs());
378     if (!inputConfig.has_value()) {
379       ALOGE(
380           "%s: Failed to pick any input configuration for stream configuration "
381           "request: %s",
382           __func__, in_requestedConfiguration.toString().c_str());
383       return cameraStatus(Status::ILLEGAL_ARGUMENT);
384     }
385 
386     if (mRenderThread != nullptr) {
387       // If there's already a render thread, it means this is not a first
388       // configuration call. If the surface has the same resolution and pixel
389       // format as the picked config, we don't need to do anything, the current
390       // render thread is capable of serving new set of configuration. However
391       // if it differens, we need to discard the current surface and
392       // reinitialize the render thread.
393 
394       std::optional<Resolution> currentInputResolution =
395           resolutionFromSurface(mRenderThread->getInputSurface());
396       if (currentInputResolution.has_value() &&
397           *currentInputResolution == resolutionFromInputConfig(*inputConfig)) {
398         ALOGI(
399             "%s: Newly configured set of streams matches existing client "
400             "surface (%dx%d)",
401             __func__, currentInputResolution->width,
402             currentInputResolution->height);
403         return ndk::ScopedAStatus::ok();
404       }
405 
406       if (mVirtualCameraClientCallback != nullptr) {
407         mVirtualCameraClientCallback->onStreamClosed(mCurrentInputStreamId);
408       }
409 
410       ALOGV(
411           "%s: Newly requested output streams are not suitable for "
412           "pre-existing surface (%dx%d), creating new surface (%dx%d)",
413           __func__, currentInputResolution->width,
414           currentInputResolution->height, inputConfig->width,
415           inputConfig->height);
416 
417       mRenderThread->flush();
418       mRenderThread->stop();
419     }
420 
421     mRenderThread = std::make_unique<VirtualCameraRenderThread>(
422         mSessionContext, resolutionFromInputConfig(*inputConfig),
423         virtualCamera->getMaxInputResolution(), mCameraDeviceCallback);
424     mRenderThread->start();
425     inputSurface = mRenderThread->getInputSurface();
426     inputStreamId = mCurrentInputStreamId =
427         virtualCamera->allocateInputStreamId();
428   }
429 
430   if (mVirtualCameraClientCallback != nullptr && inputSurface != nullptr) {
431     // TODO(b/301023410) Pass streamId based on client input stream id once
432     // support for multiple input streams is implemented. For now we always
433     // create single texture.
434     mVirtualCameraClientCallback->onStreamConfigured(
435         inputStreamId, aidl::android::view::Surface(inputSurface.get()),
436         inputConfig->width, inputConfig->height, inputConfig->pixelFormat);
437   }
438 
439   return ndk::ScopedAStatus::ok();
440 }
441 
constructDefaultRequestSettings(RequestTemplate in_type,CameraMetadata * _aidl_return)442 ndk::ScopedAStatus VirtualCameraSession::constructDefaultRequestSettings(
443     RequestTemplate in_type, CameraMetadata* _aidl_return) {
444   ALOGV("%s: type %d", __func__, static_cast<int32_t>(in_type));
445 
446   std::shared_ptr<VirtualCameraDevice> camera = mCameraDevice.lock();
447   if (camera == nullptr) {
448     ALOGW(
449         "%s: constructDefaultRequestSettings called on already unregistered "
450         "camera",
451         __func__);
452     return cameraStatus(Status::CAMERA_DISCONNECTED);
453   }
454 
455   switch (in_type) {
456     case RequestTemplate::PREVIEW:
457     case RequestTemplate::STILL_CAPTURE:
458     case RequestTemplate::VIDEO_RECORD:
459     case RequestTemplate::VIDEO_SNAPSHOT: {
460       *_aidl_return =
461           createDefaultRequestSettings(in_type, camera->getInputConfigs());
462       return ndk::ScopedAStatus::ok();
463     }
464     case RequestTemplate::MANUAL:
465     case RequestTemplate::ZERO_SHUTTER_LAG:
466       // Don't support VIDEO_SNAPSHOT, MANUAL, ZSL templates
467       return ndk::ScopedAStatus::fromServiceSpecificError(
468           static_cast<int32_t>(Status::ILLEGAL_ARGUMENT));
469       ;
470     default:
471       ALOGE("%s: unknown request template type %d", __FUNCTION__,
472             static_cast<int>(in_type));
473       return ndk::ScopedAStatus::fromServiceSpecificError(
474           static_cast<int32_t>(Status::ILLEGAL_ARGUMENT));
475       ;
476   }
477 }
478 
flush()479 ndk::ScopedAStatus VirtualCameraSession::flush() {
480   ALOGV("%s", __func__);
481   std::lock_guard<std::mutex> lock(mLock);
482   if (mRenderThread != nullptr) {
483     mRenderThread->flush();
484   }
485   return ndk::ScopedAStatus::ok();
486 }
487 
getCaptureRequestMetadataQueue(MQDescriptor<int8_t,SynchronizedReadWrite> * _aidl_return)488 ndk::ScopedAStatus VirtualCameraSession::getCaptureRequestMetadataQueue(
489     MQDescriptor<int8_t, SynchronizedReadWrite>* _aidl_return) {
490   ALOGV("%s", __func__);
491   *_aidl_return = mRequestMetadataQueue->dupeDesc();
492   return ndk::ScopedAStatus::ok();
493 }
494 
getCaptureResultMetadataQueue(MQDescriptor<int8_t,SynchronizedReadWrite> * _aidl_return)495 ndk::ScopedAStatus VirtualCameraSession::getCaptureResultMetadataQueue(
496     MQDescriptor<int8_t, SynchronizedReadWrite>* _aidl_return) {
497   ALOGV("%s", __func__);
498   *_aidl_return = mResultMetadataQueue->dupeDesc();
499   return ndk::ScopedAStatus::ok();
500 }
501 
isReconfigurationRequired(const CameraMetadata & in_oldSessionParams,const CameraMetadata & in_newSessionParams,bool * _aidl_return)502 ndk::ScopedAStatus VirtualCameraSession::isReconfigurationRequired(
503     const CameraMetadata& in_oldSessionParams,
504     const CameraMetadata& in_newSessionParams, bool* _aidl_return) {
505   ALOGV("%s: oldSessionParams: %s newSessionParams: %s", __func__,
506         in_newSessionParams.toString().c_str(),
507         in_oldSessionParams.toString().c_str());
508 
509   if (_aidl_return == nullptr) {
510     return ndk::ScopedAStatus::fromServiceSpecificError(
511         static_cast<int32_t>(Status::ILLEGAL_ARGUMENT));
512   }
513 
514   *_aidl_return = true;
515   return ndk::ScopedAStatus::ok();
516 }
517 
processCaptureRequest(const std::vector<CaptureRequest> & in_requests,const std::vector<BufferCache> & in_cachesToRemove,int32_t * _aidl_return)518 ndk::ScopedAStatus VirtualCameraSession::processCaptureRequest(
519     const std::vector<CaptureRequest>& in_requests,
520     const std::vector<BufferCache>& in_cachesToRemove, int32_t* _aidl_return) {
521   ALOGV("%s", __func__);
522 
523   if (!in_cachesToRemove.empty()) {
524     mSessionContext.removeBufferCaches(in_cachesToRemove);
525   }
526 
527   for (const auto& captureRequest : in_requests) {
528     auto status = processCaptureRequest(captureRequest);
529     if (!status.isOk()) {
530       return status;
531     }
532   }
533   *_aidl_return = in_requests.size();
534   return ndk::ScopedAStatus::ok();
535 }
536 
signalStreamFlush(const std::vector<int32_t> & in_streamIds,int32_t in_streamConfigCounter)537 ndk::ScopedAStatus VirtualCameraSession::signalStreamFlush(
538     const std::vector<int32_t>& in_streamIds, int32_t in_streamConfigCounter) {
539   ALOGV("%s", __func__);
540 
541   (void)in_streamIds;
542   (void)in_streamConfigCounter;
543   return ndk::ScopedAStatus::ok();
544 }
545 
switchToOffline(const std::vector<int32_t> & in_streamsToKeep,CameraOfflineSessionInfo * out_offlineSessionInfo,std::shared_ptr<ICameraOfflineSession> * _aidl_return)546 ndk::ScopedAStatus VirtualCameraSession::switchToOffline(
547     const std::vector<int32_t>& in_streamsToKeep,
548     CameraOfflineSessionInfo* out_offlineSessionInfo,
549     std::shared_ptr<ICameraOfflineSession>* _aidl_return) {
550   ALOGV("%s", __func__);
551 
552   (void)in_streamsToKeep;
553   (void)out_offlineSessionInfo;
554 
555   if (_aidl_return == nullptr) {
556     return ndk::ScopedAStatus::fromServiceSpecificError(
557         static_cast<int32_t>(Status::ILLEGAL_ARGUMENT));
558   }
559 
560   *_aidl_return = nullptr;
561   return cameraStatus(Status::OPERATION_NOT_SUPPORTED);
562 }
563 
repeatingRequestEnd(int32_t in_frameNumber,const std::vector<int32_t> & in_streamIds)564 ndk::ScopedAStatus VirtualCameraSession::repeatingRequestEnd(
565     int32_t in_frameNumber, const std::vector<int32_t>& in_streamIds) {
566   ALOGV("%s", __func__);
567   (void)in_frameNumber;
568   (void)in_streamIds;
569   return ndk::ScopedAStatus::ok();
570 }
571 
getStreamIds() const572 std::set<int> VirtualCameraSession::getStreamIds() const {
573   return mSessionContext.getStreamIds();
574 }
575 
processCaptureRequest(const CaptureRequest & request)576 ndk::ScopedAStatus VirtualCameraSession::processCaptureRequest(
577     const CaptureRequest& request) {
578   ALOGV("%s: request: %s", __func__, request.toString().c_str());
579 
580   std::shared_ptr<ICameraDeviceCallback> cameraCallback = nullptr;
581   RequestSettings requestSettings;
582   int currentInputStreamId;
583   {
584     std::lock_guard<std::mutex> lock(mLock);
585 
586     // If metadata it empty, last received metadata applies, if  it's non-empty
587     // update it.
588     if (!request.settings.metadata.empty()) {
589       mCurrentRequestMetadata = request.settings;
590     }
591 
592     // We don't have any metadata for this request - this means we received none
593     // in first request, this is an error state.
594     if (mCurrentRequestMetadata.metadata.empty()) {
595       return cameraStatus(Status::ILLEGAL_ARGUMENT);
596     }
597 
598     requestSettings = createSettingsFromMetadata(mCurrentRequestMetadata);
599 
600     cameraCallback = mCameraDeviceCallback;
601     currentInputStreamId = mCurrentInputStreamId;
602   }
603 
604   if (cameraCallback == nullptr) {
605     ALOGE(
606         "%s: processCaptureRequest called, but there's no camera callback "
607         "configured",
608         __func__);
609     return cameraStatus(Status::INTERNAL_ERROR);
610   }
611 
612   if (!mSessionContext.importBuffersFromCaptureRequest(request)) {
613     ALOGE("Failed to import buffers from capture request.");
614     return cameraStatus(Status::INTERNAL_ERROR);
615   }
616 
617   std::vector<CaptureRequestBuffer> taskBuffers;
618   taskBuffers.reserve(request.outputBuffers.size());
619   for (const StreamBuffer& streamBuffer : request.outputBuffers) {
620     taskBuffers.emplace_back(streamBuffer.streamId, streamBuffer.bufferId,
621                              importFence(streamBuffer.acquireFence));
622   }
623 
624   {
625     std::lock_guard<std::mutex> lock(mLock);
626     if (mRenderThread == nullptr) {
627       ALOGE(
628           "%s: processCaptureRequest (frameNumber %d)called before configure "
629           "(render thread not initialized)",
630           __func__, request.frameNumber);
631       return cameraStatus(Status::INTERNAL_ERROR);
632     }
633     mRenderThread->enqueueTask(std::make_unique<ProcessCaptureRequestTask>(
634         request.frameNumber, taskBuffers, requestSettings));
635   }
636 
637   if (mVirtualCameraClientCallback != nullptr) {
638     auto status = mVirtualCameraClientCallback->onProcessCaptureRequest(
639         currentInputStreamId, request.frameNumber);
640     if (!status.isOk()) {
641       ALOGE(
642           "Failed to invoke onProcessCaptureRequest client callback for frame "
643           "%d",
644           request.frameNumber);
645     }
646   }
647 
648   return ndk::ScopedAStatus::ok();
649 }
650 
651 }  // namespace virtualcamera
652 }  // namespace companion
653 }  // namespace android
654