xref: /aosp_15_r20/hardware/interfaces/camera/provider/aidl/vts/camera_aidl_test.cpp (revision 4d7e907c777eeecc4c5bd7cf640a754fac206ff7)
1 /*
2  * Copyright (C) 2022 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 #include "camera_aidl_test.h"
18 
19 #include <inttypes.h>
20 
21 #include <CameraParameters.h>
22 #include <HandleImporter.h>
23 #include <aidl/android/hardware/camera/device/ICameraDevice.h>
24 #include <aidl/android/hardware/camera/metadata/CameraMetadataTag.h>
25 #include <aidl/android/hardware/camera/metadata/RequestAvailableColorSpaceProfilesMap.h>
26 #include <aidl/android/hardware/camera/metadata/RequestAvailableDynamicRangeProfilesMap.h>
27 #include <aidl/android/hardware/camera/metadata/SensorInfoColorFilterArrangement.h>
28 #include <aidl/android/hardware/camera/metadata/SensorPixelMode.h>
29 #include <aidl/android/hardware/camera/provider/BnCameraProviderCallback.h>
30 #include <aidlcommonsupport/NativeHandle.h>
31 #include <android/binder_manager.h>
32 #include <android/binder_process.h>
33 #include <com_android_internal_camera_flags.h>
34 #include <device_cb.h>
35 #include <empty_device_cb.h>
36 #include <grallocusage/GrallocUsageConversion.h>
37 #include <hardware/gralloc1.h>
38 #include <simple_device_cb.h>
39 #include <ui/Fence.h>
40 #include <ui/GraphicBufferAllocator.h>
41 #include <regex>
42 #include <typeinfo>
43 #include "utils/Errors.h"
44 #include <nativebase/nativebase.h>
45 
46 using ::aidl::android::hardware::camera::common::CameraDeviceStatus;
47 using ::aidl::android::hardware::camera::common::TorchModeStatus;
48 using ::aidl::android::hardware::camera::device::CameraMetadata;
49 using ::aidl::android::hardware::camera::device::ICameraDevice;
50 using ::aidl::android::hardware::camera::metadata::CameraMetadataTag;
51 using ::aidl::android::hardware::camera::metadata::SensorInfoColorFilterArrangement;
52 using ::aidl::android::hardware::camera::metadata::SensorPixelMode;
53 using ::aidl::android::hardware::camera::provider::BnCameraProviderCallback;
54 using ::aidl::android::hardware::camera::provider::ConcurrentCameraIdCombination;
55 using ::aidl::android::hardware::camera::provider::ICameraProvider;
56 using ::aidl::android::hardware::common::NativeHandle;
57 using ::android::hardware::camera::common::V1_0::helper::Size;
58 using ::ndk::ScopedAStatus;
59 using ::ndk::SpAIBinder;
60 
61 namespace {
62 namespace flags = com::android::internal::camera::flags;
63 
parseProviderName(const std::string & serviceDescriptor,std::string * type,uint32_t * id)64 bool parseProviderName(const std::string& serviceDescriptor, std::string* type /*out*/,
65                        uint32_t* id /*out*/) {
66     if (!type || !id) {
67         ADD_FAILURE();
68         return false;
69     }
70 
71     // expected format: <service_name>/<type>/<id>
72     std::string::size_type slashIdx1 = serviceDescriptor.find('/');
73     if (slashIdx1 == std::string::npos || slashIdx1 == serviceDescriptor.size() - 1) {
74         ADD_FAILURE() << "Provider name does not have / separator between name, type, and id";
75         return false;
76     }
77 
78     std::string::size_type slashIdx2 = serviceDescriptor.find('/', slashIdx1 + 1);
79     if (slashIdx2 == std::string::npos || slashIdx2 == serviceDescriptor.size() - 1) {
80         ADD_FAILURE() << "Provider name does not have / separator between type and id";
81         return false;
82     }
83 
84     std::string typeVal = serviceDescriptor.substr(slashIdx1 + 1, slashIdx2 - slashIdx1 - 1);
85 
86     char* endPtr;
87     errno = 0;
88     int64_t idVal = strtol(serviceDescriptor.c_str() + slashIdx2 + 1, &endPtr, 10);
89     if (errno != 0) {
90         ADD_FAILURE() << "cannot parse provider id as an integer:" << serviceDescriptor.c_str()
91                       << strerror(errno) << errno;
92         return false;
93     }
94     if (endPtr != serviceDescriptor.c_str() + serviceDescriptor.size()) {
95         ADD_FAILURE() << "provider id has unexpected length " << serviceDescriptor.c_str();
96         return false;
97     }
98     if (idVal < 0) {
99         ADD_FAILURE() << "id is negative: " << serviceDescriptor.c_str() << idVal;
100         return false;
101     }
102 
103     *type = typeVal;
104     *id = static_cast<uint32_t>(idVal);
105 
106     return true;
107 }
108 
109 namespace flags = com::android::internal::camera::flags;
110 
111 const std::vector<int64_t> kMandatoryUseCases = {
112         ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT,
113         ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_PREVIEW,
114         ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_STILL_CAPTURE,
115         ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_VIDEO_RECORD,
116         ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_PREVIEW_VIDEO_STILL,
117         ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_VIDEO_CALL};
118 }  // namespace
119 
SetUp()120 void CameraAidlTest::SetUp() {
121     std::string serviceDescriptor = GetParam();
122     ALOGI("get service with name: %s", serviceDescriptor.c_str());
123 
124     bool success = ABinderProcess_setThreadPoolMaxThreadCount(5);
125     ALOGI("ABinderProcess_setThreadPoolMaxThreadCount returns %s", success ? "true" : "false");
126     ASSERT_TRUE(success);
127     ABinderProcess_startThreadPool();
128 
129     SpAIBinder cameraProviderBinder =
130             SpAIBinder(AServiceManager_waitForService(serviceDescriptor.c_str()));
131     ASSERT_NE(cameraProviderBinder.get(), nullptr);
132 
133     std::shared_ptr<ICameraProvider> cameraProvider =
134             ICameraProvider::fromBinder(cameraProviderBinder);
135     ASSERT_NE(cameraProvider.get(), nullptr);
136     mProvider = cameraProvider;
137     uint32_t id;
138     ASSERT_TRUE(parseProviderName(serviceDescriptor, &mProviderType, &id));
139 
140     notifyDeviceState(ICameraProvider::DEVICE_STATE_NORMAL);
141 }
142 
TearDown()143 void CameraAidlTest::TearDown() {
144     if (mSession != nullptr) {
145         ndk::ScopedAStatus ret = mSession->close();
146         ASSERT_TRUE(ret.isOk());
147     }
148 }
149 
waitForReleaseFence(std::vector<InFlightRequest::StreamBufferAndTimestamp> & resultOutputBuffers)150 void CameraAidlTest::waitForReleaseFence(
151         std::vector<InFlightRequest::StreamBufferAndTimestamp>& resultOutputBuffers) {
152     for (auto& bufferAndTimestamp : resultOutputBuffers) {
153         // wait for the fence timestamp and store it along with the buffer
154         android::sp<android::Fence> releaseFence = nullptr;
155         const native_handle_t* releaseFenceHandle = bufferAndTimestamp.buffer.releaseFence;
156         if (releaseFenceHandle != nullptr && releaseFenceHandle->numFds == 1 &&
157             releaseFenceHandle->data[0] >= 0) {
158             releaseFence = new android::Fence(dup(releaseFenceHandle->data[0]));
159         }
160         if (releaseFence && releaseFence->isValid()) {
161             releaseFence->wait(/*ms*/ 300);
162             nsecs_t releaseTime = releaseFence->getSignalTime();
163             if (bufferAndTimestamp.timeStamp < releaseTime)
164                 bufferAndTimestamp.timeStamp = releaseTime;
165         }
166     }
167 }
168 
getCameraDeviceNames(std::shared_ptr<ICameraProvider> & provider,bool addSecureOnly)169 std::vector<std::string> CameraAidlTest::getCameraDeviceNames(
170         std::shared_ptr<ICameraProvider>& provider, bool addSecureOnly) {
171     std::vector<std::string> cameraDeviceNames;
172 
173     ScopedAStatus ret = provider->getCameraIdList(&cameraDeviceNames);
174     if (!ret.isOk()) {
175         ADD_FAILURE() << "Could not get camera id list";
176     }
177 
178     // External camera devices are reported through cameraDeviceStatusChange
179     struct ProviderCb : public BnCameraProviderCallback {
180         ScopedAStatus cameraDeviceStatusChange(const std::string& devName,
181                                                CameraDeviceStatus newStatus) override {
182             ALOGI("camera device status callback name %s, status %d", devName.c_str(),
183                   (int)newStatus);
184             if (newStatus == CameraDeviceStatus::PRESENT) {
185                 externalCameraDeviceNames.push_back(devName);
186             }
187             return ScopedAStatus::ok();
188         }
189 
190         ScopedAStatus torchModeStatusChange(const std::string&, TorchModeStatus) override {
191             return ScopedAStatus::ok();
192         }
193 
194         ScopedAStatus physicalCameraDeviceStatusChange(
195                 const std::string&, const std::string&,
196                 ::aidl::android::hardware::camera::common::CameraDeviceStatus) override {
197             return ScopedAStatus::ok();
198         }
199 
200         std::vector<std::string> externalCameraDeviceNames;
201     };
202     std::shared_ptr<ProviderCb> cb = ndk::SharedRefBase::make<ProviderCb>();
203     auto status = mProvider->setCallback(cb);
204 
205     for (const auto& devName : cb->externalCameraDeviceNames) {
206         if (cameraDeviceNames.end() ==
207             std::find(cameraDeviceNames.begin(), cameraDeviceNames.end(), devName)) {
208             cameraDeviceNames.push_back(devName);
209         }
210     }
211 
212     std::vector<std::string> retList;
213     for (auto& cameraDeviceName : cameraDeviceNames) {
214         bool isSecureOnlyCamera = isSecureOnly(mProvider, cameraDeviceName);
215         if (addSecureOnly) {
216             if (isSecureOnlyCamera) {
217                 retList.emplace_back(cameraDeviceName);
218             }
219         } else if (!isSecureOnlyCamera) {
220             retList.emplace_back(cameraDeviceName);
221         }
222     }
223     return retList;
224 }
225 
isSecureOnly(const std::shared_ptr<ICameraProvider> & provider,const std::string & name)226 bool CameraAidlTest::isSecureOnly(const std::shared_ptr<ICameraProvider>& provider,
227                                   const std::string& name) {
228     std::shared_ptr<ICameraDevice> cameraDevice = nullptr;
229     ScopedAStatus retInterface = provider->getCameraDeviceInterface(name, &cameraDevice);
230     if (!retInterface.isOk()) {
231         ADD_FAILURE() << "Failed to get camera device interface for " << name;
232     }
233 
234     CameraMetadata cameraCharacteristics;
235     ScopedAStatus retChars = cameraDevice->getCameraCharacteristics(&cameraCharacteristics);
236     if (!retChars.isOk()) {
237         ADD_FAILURE() << "Failed to get camera characteristics for device " << name;
238     }
239 
240     camera_metadata_t* chars =
241             reinterpret_cast<camera_metadata_t*>(cameraCharacteristics.metadata.data());
242 
243     SystemCameraKind systemCameraKind = SystemCameraKind::PUBLIC;
244     Status retCameraKind = getSystemCameraKind(chars, &systemCameraKind);
245     if (retCameraKind != Status::OK) {
246         ADD_FAILURE() << "Failed to get camera kind for " << name;
247     }
248 
249     return systemCameraKind == SystemCameraKind::HIDDEN_SECURE_CAMERA;
250 }
251 
getCameraDeviceIdToNameMap(std::shared_ptr<ICameraProvider> provider)252 std::map<std::string, std::string> CameraAidlTest::getCameraDeviceIdToNameMap(
253         std::shared_ptr<ICameraProvider> provider) {
254     std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(provider);
255 
256     std::map<std::string, std::string> idToNameMap;
257     for (auto& name : cameraDeviceNames) {
258         std::string version, cameraId;
259         if (!matchDeviceName(name, mProviderType, &version, &cameraId)) {
260             ADD_FAILURE();
261         }
262         idToNameMap.insert(std::make_pair(std::string(cameraId), name));
263     }
264     return idToNameMap;
265 }
266 
verifyMonochromeCameraResult(const::android::hardware::camera::common::V1_0::helper::CameraMetadata & metadata)267 void CameraAidlTest::verifyMonochromeCameraResult(
268         const ::android::hardware::camera::common::V1_0::helper::CameraMetadata& metadata) {
269     camera_metadata_ro_entry entry;
270 
271     // Check tags that are not applicable for monochrome camera
272     ASSERT_FALSE(metadata.exists(ANDROID_SENSOR_GREEN_SPLIT));
273     ASSERT_FALSE(metadata.exists(ANDROID_SENSOR_NEUTRAL_COLOR_POINT));
274     ASSERT_FALSE(metadata.exists(ANDROID_COLOR_CORRECTION_MODE));
275     ASSERT_FALSE(metadata.exists(ANDROID_COLOR_CORRECTION_TRANSFORM));
276     ASSERT_FALSE(metadata.exists(ANDROID_COLOR_CORRECTION_GAINS));
277 
278     // Check dynamicBlackLevel
279     entry = metadata.find(ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL);
280     if (entry.count > 0) {
281         ASSERT_EQ(entry.count, 4);
282         for (size_t i = 1; i < entry.count; i++) {
283             ASSERT_FLOAT_EQ(entry.data.f[i], entry.data.f[0]);
284         }
285     }
286 
287     // Check noiseProfile
288     entry = metadata.find(ANDROID_SENSOR_NOISE_PROFILE);
289     if (entry.count > 0) {
290         ASSERT_EQ(entry.count, 2);
291     }
292 
293     // Check lensShadingMap
294     entry = metadata.find(ANDROID_STATISTICS_LENS_SHADING_MAP);
295     if (entry.count > 0) {
296         ASSERT_EQ(entry.count % 4, 0);
297         for (size_t i = 0; i < entry.count / 4; i++) {
298             ASSERT_FLOAT_EQ(entry.data.f[i * 4 + 1], entry.data.f[i * 4]);
299             ASSERT_FLOAT_EQ(entry.data.f[i * 4 + 2], entry.data.f[i * 4]);
300             ASSERT_FLOAT_EQ(entry.data.f[i * 4 + 3], entry.data.f[i * 4]);
301         }
302     }
303 
304     // Check tonemapCurve
305     camera_metadata_ro_entry curveRed = metadata.find(ANDROID_TONEMAP_CURVE_RED);
306     camera_metadata_ro_entry curveGreen = metadata.find(ANDROID_TONEMAP_CURVE_GREEN);
307     camera_metadata_ro_entry curveBlue = metadata.find(ANDROID_TONEMAP_CURVE_BLUE);
308     if (curveRed.count > 0 && curveGreen.count > 0 && curveBlue.count > 0) {
309         ASSERT_EQ(curveRed.count, curveGreen.count);
310         ASSERT_EQ(curveRed.count, curveBlue.count);
311         for (size_t i = 0; i < curveRed.count; i++) {
312             ASSERT_FLOAT_EQ(curveGreen.data.f[i], curveRed.data.f[i]);
313             ASSERT_FLOAT_EQ(curveBlue.data.f[i], curveRed.data.f[i]);
314         }
315     }
316 }
317 
verifyStreamUseCaseCharacteristics(const camera_metadata_t * metadata)318 void CameraAidlTest::verifyStreamUseCaseCharacteristics(const camera_metadata_t* metadata) {
319     camera_metadata_ro_entry entry;
320     bool hasStreamUseCaseCap = supportsStreamUseCaseCap(metadata);
321 
322     bool supportMandatoryUseCases = false;
323     int retcode = find_camera_metadata_ro_entry(metadata, ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES,
324                                                 &entry);
325     if ((0 == retcode) && (entry.count > 0)) {
326         supportMandatoryUseCases = true;
327         for (size_t i = 0; i < kMandatoryUseCases.size(); i++) {
328             if (std::find(entry.data.i64, entry.data.i64 + entry.count, kMandatoryUseCases[i]) ==
329                 entry.data.i64 + entry.count) {
330                 supportMandatoryUseCases = false;
331                 break;
332             }
333         }
334         bool supportDefaultUseCase = false;
335         for (size_t i = 0; i < entry.count; i++) {
336             if (entry.data.i64[i] == ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT) {
337                 supportDefaultUseCase = true;
338             }
339             ASSERT_TRUE(entry.data.i64[i] <= ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_CROPPED_RAW
340                         || entry.data.i64[i] >=
341                                 ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_VENDOR_START);
342         }
343         ASSERT_TRUE(supportDefaultUseCase);
344     }
345 
346     ASSERT_EQ(hasStreamUseCaseCap, supportMandatoryUseCases);
347 }
348 
verifySettingsOverrideCharacteristics(const camera_metadata_t * metadata)349 void CameraAidlTest::verifySettingsOverrideCharacteristics(const camera_metadata_t* metadata) {
350     camera_metadata_ro_entry entry;
351     int retcode = find_camera_metadata_ro_entry(metadata,
352             ANDROID_CONTROL_AVAILABLE_SETTINGS_OVERRIDES, &entry);
353     bool supportSettingsOverride = false;
354     if (0 == retcode) {
355         supportSettingsOverride = true;
356         bool hasOff = false;
357         for (size_t i = 0; i < entry.count; i++) {
358             if (entry.data.u8[i] == ANDROID_CONTROL_SETTINGS_OVERRIDE_OFF) {
359                 hasOff = true;
360             }
361         }
362         ASSERT_TRUE(hasOff);
363     }
364 
365     // Check availableRequestKeys
366     retcode = find_camera_metadata_ro_entry(metadata,
367             ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS, &entry);
368     bool hasSettingsOverrideRequestKey = false;
369     if ((0 == retcode) && (entry.count > 0)) {
370         hasSettingsOverrideRequestKey =
371                 std::find(entry.data.i32, entry.data.i32 + entry.count,
372                         ANDROID_CONTROL_SETTINGS_OVERRIDE) != entry.data.i32 + entry.count;
373     } else {
374         ADD_FAILURE() << "Get camera availableRequestKeys failed!";
375     }
376 
377     // Check availableResultKeys
378     retcode = find_camera_metadata_ro_entry(metadata,
379             ANDROID_REQUEST_AVAILABLE_RESULT_KEYS, &entry);
380     bool hasSettingsOverrideResultKey = false;
381     bool hasOverridingFrameNumberKey = false;
382     if ((0 == retcode) && (entry.count > 0)) {
383         hasSettingsOverrideResultKey =
384                 std::find(entry.data.i32, entry.data.i32 + entry.count,
385                         ANDROID_CONTROL_SETTINGS_OVERRIDE) != entry.data.i32 + entry.count;
386         hasOverridingFrameNumberKey =
387                 std::find(entry.data.i32, entry.data.i32 + entry.count,
388                         ANDROID_CONTROL_SETTINGS_OVERRIDING_FRAME_NUMBER)
389                         != entry.data.i32 + entry.count;
390     } else {
391         ADD_FAILURE() << "Get camera availableResultKeys failed!";
392     }
393 
394     // Check availableCharacteristicKeys
395     retcode = find_camera_metadata_ro_entry(metadata,
396             ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS, &entry);
397     bool hasSettingsOverrideCharacteristicsKey= false;
398     if ((0 == retcode) && (entry.count > 0)) {
399         hasSettingsOverrideCharacteristicsKey = std::find(entry.data.i32,
400                 entry.data.i32 + entry.count, ANDROID_CONTROL_AVAILABLE_SETTINGS_OVERRIDES)
401                         != entry.data.i32 + entry.count;
402     } else {
403         ADD_FAILURE() << "Get camera availableCharacteristicsKeys failed!";
404     }
405 
406     ASSERT_EQ(supportSettingsOverride, hasSettingsOverrideRequestKey);
407     ASSERT_EQ(supportSettingsOverride, hasSettingsOverrideResultKey);
408     ASSERT_EQ(supportSettingsOverride, hasOverridingFrameNumberKey);
409     ASSERT_EQ(supportSettingsOverride, hasSettingsOverrideCharacteristicsKey);
410 }
411 
isMonochromeCamera(const camera_metadata_t * staticMeta)412 Status CameraAidlTest::isMonochromeCamera(const camera_metadata_t* staticMeta) {
413     Status ret = Status::OPERATION_NOT_SUPPORTED;
414     if (nullptr == staticMeta) {
415         return Status::ILLEGAL_ARGUMENT;
416     }
417 
418     camera_metadata_ro_entry entry;
419     int rc = find_camera_metadata_ro_entry(staticMeta, ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
420                                            &entry);
421 
422     if (0 != rc) {
423         return Status::ILLEGAL_ARGUMENT;
424     }
425 
426     for (size_t i = 0; i < entry.count; i++) {
427         if (ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MONOCHROME == entry.data.u8[i]) {
428             ret = Status::OK;
429             break;
430         }
431     }
432 
433     return ret;
434 }
435 
isLogicalMultiCamera(const camera_metadata_t * staticMeta)436 Status CameraAidlTest::isLogicalMultiCamera(const camera_metadata_t* staticMeta) {
437     Status ret = Status::OPERATION_NOT_SUPPORTED;
438     if (nullptr == staticMeta) {
439         return Status::ILLEGAL_ARGUMENT;
440     }
441 
442     camera_metadata_ro_entry entry;
443     int rc = find_camera_metadata_ro_entry(staticMeta, ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
444                                            &entry);
445     if (0 != rc) {
446         return Status::ILLEGAL_ARGUMENT;
447     }
448 
449     for (size_t i = 0; i < entry.count; i++) {
450         if (ANDROID_REQUEST_AVAILABLE_CAPABILITIES_LOGICAL_MULTI_CAMERA == entry.data.u8[i]) {
451             ret = Status::OK;
452             break;
453         }
454     }
455 
456     return ret;
457 }
458 
isReadoutTimestampSupported(const camera_metadata_t * staticMeta)459 bool CameraAidlTest::isReadoutTimestampSupported(const camera_metadata_t* staticMeta) {
460     camera_metadata_ro_entry readoutTimestampEntry;
461     int rc = find_camera_metadata_ro_entry(staticMeta, ANDROID_SENSOR_READOUT_TIMESTAMP,
462                                            &readoutTimestampEntry);
463     if (rc != 0) {
464         ALOGI("%s: Failed to find ANDROID_SENSOR_READOUT_TIMESTAMP", __FUNCTION__);
465         return true;
466     }
467     if (readoutTimestampEntry.count == 1 && !readoutTimestampEntry.data.u8[0]) {
468         ALOGI("%s: readout timestamp not supported", __FUNCTION__);
469         return false;
470     }
471     ALOGI("%s: readout timestamp supported", __FUNCTION__);
472     return true;
473 }
474 
verifyLogicalCameraResult(const camera_metadata_t * staticMetadata,const std::vector<uint8_t> & resultMetadata)475 void CameraAidlTest::verifyLogicalCameraResult(const camera_metadata_t* staticMetadata,
476                                                const std::vector<uint8_t>& resultMetadata) {
477     camera_metadata_t* metadata = (camera_metadata_t*)resultMetadata.data();
478 
479     std::unordered_set<std::string> physicalIds;
480     Status rc = getPhysicalCameraIds(staticMetadata, &physicalIds);
481     ASSERT_TRUE(Status::OK == rc);
482     ASSERT_TRUE(physicalIds.size() > 1);
483 
484     camera_metadata_ro_entry entry;
485     // Check mainPhysicalId
486     find_camera_metadata_ro_entry(metadata, ANDROID_LOGICAL_MULTI_CAMERA_ACTIVE_PHYSICAL_ID,
487                                   &entry);
488     if (entry.count > 0) {
489         std::string mainPhysicalId(reinterpret_cast<const char*>(entry.data.u8));
490         ASSERT_NE(physicalIds.find(mainPhysicalId), physicalIds.end());
491     } else {
492         ADD_FAILURE() << "Get LOGICAL_MULTI_CAMERA_ACTIVE_PHYSICAL_ID failed!";
493     }
494 
495     if (flags::concert_mode()) {
496         auto ret = find_camera_metadata_ro_entry(
497                 metadata, ANDROID_LOGICAL_MULTI_CAMERA_ACTIVE_PHYSICAL_SENSOR_CROP_REGION, &entry);
498         if ((ret == android::OK) && (entry.count > 0)) {
499             ASSERT_TRUE(entry.count == 4);
500             ASSERT_GE(entry.data.i32[0], 0);  // Top must be non-negative
501             ASSERT_GE(entry.data.i32[1], 0);  // Left must be non-negative
502             ASSERT_GT(entry.data.i32[2], 0);  // Width must be positive
503             ASSERT_GT(entry.data.i32[3], 0);  // Height must be positive
504         }
505     }
506 }
507 
verifyLensIntrinsicsResult(const std::vector<uint8_t> & resultMetadata)508 void CameraAidlTest::verifyLensIntrinsicsResult(const std::vector<uint8_t>& resultMetadata) {
509     if (flags::concert_mode()) {
510         camera_metadata_t* metadata = (camera_metadata_t*)resultMetadata.data();
511 
512         camera_metadata_ro_entry timestampsEntry, intrinsicsEntry;
513         auto tsRet = find_camera_metadata_ro_entry(
514                 metadata, ANDROID_STATISTICS_LENS_INTRINSIC_TIMESTAMPS, &timestampsEntry);
515         auto inRet = find_camera_metadata_ro_entry(
516                 metadata, ANDROID_STATISTICS_LENS_INTRINSIC_SAMPLES, &intrinsicsEntry);
517         ASSERT_EQ(tsRet, inRet);
518         ASSERT_TRUE((intrinsicsEntry.count % 5) == 0);
519         ASSERT_EQ(timestampsEntry.count, intrinsicsEntry.count / 5);
520         if (timestampsEntry.count > 0) {
521             for (size_t i = 0; i < timestampsEntry.count - 1; i++) {
522                 ASSERT_GE(timestampsEntry.data.i64[i + 1], timestampsEntry.data.i64[i]);
523             }
524         }
525     }
526 }
527 
getPhysicalCameraIds(const camera_metadata_t * staticMeta,std::unordered_set<std::string> * physicalIds)528 Status CameraAidlTest::getPhysicalCameraIds(const camera_metadata_t* staticMeta,
529                                             std::unordered_set<std::string>* physicalIds) {
530     if ((nullptr == staticMeta) || (nullptr == physicalIds)) {
531         return Status::ILLEGAL_ARGUMENT;
532     }
533 
534     camera_metadata_ro_entry entry;
535     int rc = find_camera_metadata_ro_entry(staticMeta, ANDROID_LOGICAL_MULTI_CAMERA_PHYSICAL_IDS,
536                                            &entry);
537     if (0 != rc) {
538         return Status::ILLEGAL_ARGUMENT;
539     }
540 
541     const uint8_t* ids = entry.data.u8;
542     size_t start = 0;
543     for (size_t i = 0; i < entry.count; i++) {
544         if (ids[i] == '\0') {
545             if (start != i) {
546                 std::string currentId(reinterpret_cast<const char*>(ids + start));
547                 physicalIds->emplace(currentId);
548             }
549             start = i + 1;
550         }
551     }
552 
553     return Status::OK;
554 }
555 
getSystemCameraKind(const camera_metadata_t * staticMeta,SystemCameraKind * systemCameraKind)556 Status CameraAidlTest::getSystemCameraKind(const camera_metadata_t* staticMeta,
557                                            SystemCameraKind* systemCameraKind) {
558     if (nullptr == staticMeta || nullptr == systemCameraKind) {
559         return Status::ILLEGAL_ARGUMENT;
560     }
561 
562     camera_metadata_ro_entry entry{};
563     int rc = find_camera_metadata_ro_entry(staticMeta, ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
564                                            &entry);
565     if (0 != rc) {
566         return Status::ILLEGAL_ARGUMENT;
567     }
568 
569     if (entry.count == 1 &&
570         entry.data.u8[0] == ANDROID_REQUEST_AVAILABLE_CAPABILITIES_SECURE_IMAGE_DATA) {
571         *systemCameraKind = SystemCameraKind::HIDDEN_SECURE_CAMERA;
572         return Status::OK;
573     }
574 
575     // Go through the capabilities and check if it has
576     // ANDROID_REQUEST_AVAILABLE_CAPABILITIES_SYSTEM_CAMERA
577     for (size_t i = 0; i < entry.count; ++i) {
578         uint8_t capability = entry.data.u8[i];
579         if (capability == ANDROID_REQUEST_AVAILABLE_CAPABILITIES_SYSTEM_CAMERA) {
580             *systemCameraKind = SystemCameraKind::SYSTEM_ONLY_CAMERA;
581             return Status::OK;
582         }
583     }
584     *systemCameraKind = SystemCameraKind::PUBLIC;
585     return Status::OK;
586 }
587 
notifyDeviceState(int64_t state)588 void CameraAidlTest::notifyDeviceState(int64_t state) {
589     if (mProvider == nullptr) {
590         return;
591     }
592     mProvider->notifyDeviceStateChange(state);
593 }
594 
allocateGraphicBuffer(uint32_t width,uint32_t height,uint64_t usage,PixelFormat format,buffer_handle_t * buffer_handle)595 void CameraAidlTest::allocateGraphicBuffer(uint32_t width, uint32_t height, uint64_t usage,
596                                            PixelFormat format, buffer_handle_t* buffer_handle) {
597     ASSERT_NE(buffer_handle, nullptr);
598 
599     uint32_t stride;
600 
601     android::status_t err = android::GraphicBufferAllocator::get().allocateRawHandle(
602             width, height, static_cast<int32_t>(format), 1u /*layerCount*/, usage, buffer_handle,
603             &stride, "VtsHalCameraProviderV2");
604     ASSERT_EQ(err, android::NO_ERROR);
605 }
606 
matchDeviceName(const std::string & deviceName,const std::string & providerType,std::string * deviceVersion,std::string * cameraId)607 bool CameraAidlTest::matchDeviceName(const std::string& deviceName, const std::string& providerType,
608                                      std::string* deviceVersion, std::string* cameraId) {
609     // expected format: device@<major>.<minor>/<type>/<id>
610     std::stringstream pattern;
611     pattern << "device@([0-9]+\\.[0-9]+)/" << providerType << "/(.+)";
612     std::regex e(pattern.str());
613 
614     std::smatch sm;
615     if (std::regex_match(deviceName, sm, e)) {
616         if (deviceVersion != nullptr) {
617             *deviceVersion = sm[1];
618         }
619         if (cameraId != nullptr) {
620             *cameraId = sm[2];
621         }
622         return true;
623     }
624     return false;
625 }
626 
verifyCameraCharacteristics(const CameraMetadata & chars)627 void CameraAidlTest::verifyCameraCharacteristics(const CameraMetadata& chars) {
628     const camera_metadata_t* metadata =
629             reinterpret_cast<const camera_metadata_t*>(chars.metadata.data());
630 
631     size_t expectedSize = chars.metadata.size();
632     int result = validate_camera_metadata_structure(metadata, &expectedSize);
633     ASSERT_TRUE((result == 0) || (result == CAMERA_METADATA_VALIDATION_SHIFTED));
634     size_t entryCount = get_camera_metadata_entry_count(metadata);
635     // TODO: we can do better than 0 here. Need to check how many required
636     // characteristics keys we've defined.
637     ASSERT_GT(entryCount, 0u);
638 
639     camera_metadata_ro_entry entry;
640     int retcode =
641             find_camera_metadata_ro_entry(metadata, ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL, &entry);
642     if ((0 == retcode) && (entry.count > 0)) {
643         uint8_t hardwareLevel = entry.data.u8[0];
644         ASSERT_TRUE(hardwareLevel == ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED ||
645                     hardwareLevel == ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_FULL ||
646                     hardwareLevel == ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_3 ||
647                     hardwareLevel == ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_EXTERNAL);
648     } else {
649         ADD_FAILURE() << "Get camera hardware level failed!";
650     }
651 
652     entry.count = 0;
653     retcode = find_camera_metadata_ro_entry(
654             metadata, ANDROID_REQUEST_CHARACTERISTIC_KEYS_NEEDING_PERMISSION, &entry);
655     if ((0 == retcode) || (entry.count > 0)) {
656         ADD_FAILURE() << "ANDROID_REQUEST_CHARACTERISTIC_KEYS_NEEDING_PERMISSION "
657                       << " per API contract should never be set by Hal!";
658     }
659     retcode = find_camera_metadata_ro_entry(
660             metadata, ANDROID_DEPTH_AVAILABLE_DYNAMIC_DEPTH_STREAM_CONFIGURATIONS, &entry);
661     if ((0 == retcode) || (entry.count > 0)) {
662         ADD_FAILURE() << "ANDROID_DEPTH_AVAILABLE_DYNAMIC_DEPTH_STREAM_CONFIGURATIONS"
663                       << " per API contract should never be set by Hal!";
664     }
665     retcode = find_camera_metadata_ro_entry(
666             metadata, ANDROID_DEPTH_AVAILABLE_DYNAMIC_DEPTH_MIN_FRAME_DURATIONS, &entry);
667     if ((0 == retcode) || (entry.count > 0)) {
668         ADD_FAILURE() << "ANDROID_DEPTH_AVAILABLE_DYNAMIC_DEPTH_MIN_FRAME_DURATIONS"
669                       << " per API contract should never be set by Hal!";
670     }
671     retcode = find_camera_metadata_ro_entry(
672             metadata, ANDROID_DEPTH_AVAILABLE_DYNAMIC_DEPTH_STALL_DURATIONS, &entry);
673     if ((0 == retcode) || (entry.count > 0)) {
674         ADD_FAILURE() << "ANDROID_DEPTH_AVAILABLE_DYNAMIC_DEPTH_STALL_DURATIONS"
675                       << " per API contract should never be set by Hal!";
676     }
677 
678     retcode = find_camera_metadata_ro_entry(
679             metadata, ANDROID_HEIC_AVAILABLE_HEIC_STREAM_CONFIGURATIONS, &entry);
680     if (0 == retcode || entry.count > 0) {
681         ADD_FAILURE() << "ANDROID_HEIC_AVAILABLE_HEIC_STREAM_CONFIGURATIONS "
682                       << " per API contract should never be set by Hal!";
683     }
684 
685     retcode = find_camera_metadata_ro_entry(
686             metadata, ANDROID_HEIC_AVAILABLE_HEIC_MIN_FRAME_DURATIONS, &entry);
687     if (0 == retcode || entry.count > 0) {
688         ADD_FAILURE() << "ANDROID_HEIC_AVAILABLE_HEIC_MIN_FRAME_DURATIONS "
689                       << " per API contract should never be set by Hal!";
690     }
691 
692     retcode = find_camera_metadata_ro_entry(metadata, ANDROID_HEIC_AVAILABLE_HEIC_STALL_DURATIONS,
693                                             &entry);
694     if (0 == retcode || entry.count > 0) {
695         ADD_FAILURE() << "ANDROID_HEIC_AVAILABLE_HEIC_STALL_DURATIONS "
696                       << " per API contract should never be set by Hal!";
697     }
698 
699     retcode = find_camera_metadata_ro_entry(metadata, ANDROID_HEIC_INFO_SUPPORTED, &entry);
700     if (0 == retcode && entry.count > 0) {
701         retcode = find_camera_metadata_ro_entry(
702                 metadata, ANDROID_HEIC_INFO_MAX_JPEG_APP_SEGMENTS_COUNT, &entry);
703         if (0 == retcode && entry.count > 0) {
704             uint8_t maxJpegAppSegmentsCount = entry.data.u8[0];
705             ASSERT_TRUE(maxJpegAppSegmentsCount >= 1 && maxJpegAppSegmentsCount <= 16);
706         } else {
707             ADD_FAILURE() << "Get Heic maxJpegAppSegmentsCount failed!";
708         }
709     }
710 
711     retcode = find_camera_metadata_ro_entry(metadata, ANDROID_LENS_POSE_REFERENCE, &entry);
712     if (0 == retcode && entry.count > 0) {
713         uint8_t poseReference = entry.data.u8[0];
714         ASSERT_TRUE(poseReference <= ANDROID_LENS_POSE_REFERENCE_AUTOMOTIVE &&
715                 poseReference >= ANDROID_LENS_POSE_REFERENCE_PRIMARY_CAMERA);
716     }
717 
718     retcode =
719             find_camera_metadata_ro_entry(metadata, ANDROID_INFO_DEVICE_STATE_ORIENTATIONS, &entry);
720     if (0 == retcode && entry.count > 0) {
721         ASSERT_TRUE((entry.count % 2) == 0);
722         uint64_t maxPublicState = ((uint64_t)ICameraProvider::DEVICE_STATE_FOLDED) << 1;
723         uint64_t vendorStateStart = 1UL << 31;  // Reserved for vendor specific states
724         uint64_t stateMask = (1 << vendorStateStart) - 1;
725         stateMask &= ~((1 << maxPublicState) - 1);
726         for (int i = 0; i < entry.count; i += 2) {
727             ASSERT_TRUE((entry.data.i64[i] & stateMask) == 0);
728             ASSERT_TRUE((entry.data.i64[i + 1] % 90) == 0);
729         }
730     }
731 
732     verifyExtendedSceneModeCharacteristics(metadata);
733     verifyZoomCharacteristics(metadata);
734     verifyStreamUseCaseCharacteristics(metadata);
735     verifySettingsOverrideCharacteristics(metadata);
736 }
737 
verifyExtendedSceneModeCharacteristics(const camera_metadata_t * metadata)738 void CameraAidlTest::verifyExtendedSceneModeCharacteristics(const camera_metadata_t* metadata) {
739     camera_metadata_ro_entry entry;
740     int retcode = 0;
741 
742     retcode = find_camera_metadata_ro_entry(metadata, ANDROID_CONTROL_AVAILABLE_MODES, &entry);
743     if ((0 == retcode) && (entry.count > 0)) {
744         for (auto i = 0; i < entry.count; i++) {
745             ASSERT_TRUE(entry.data.u8[i] >= ANDROID_CONTROL_MODE_OFF &&
746                         entry.data.u8[i] <= ANDROID_CONTROL_MODE_USE_EXTENDED_SCENE_MODE);
747         }
748     } else {
749         ADD_FAILURE() << "Get camera controlAvailableModes failed!";
750     }
751 
752     // Check key availability in capabilities, request and result.
753 
754     retcode =
755             find_camera_metadata_ro_entry(metadata, ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS, &entry);
756     bool hasExtendedSceneModeRequestKey = false;
757     if ((0 == retcode) && (entry.count > 0)) {
758         hasExtendedSceneModeRequestKey =
759                 std::find(entry.data.i32, entry.data.i32 + entry.count,
760                           ANDROID_CONTROL_EXTENDED_SCENE_MODE) != entry.data.i32 + entry.count;
761     } else {
762         ADD_FAILURE() << "Get camera availableRequestKeys failed!";
763     }
764 
765     retcode =
766             find_camera_metadata_ro_entry(metadata, ANDROID_REQUEST_AVAILABLE_RESULT_KEYS, &entry);
767     bool hasExtendedSceneModeResultKey = false;
768     if ((0 == retcode) && (entry.count > 0)) {
769         hasExtendedSceneModeResultKey =
770                 std::find(entry.data.i32, entry.data.i32 + entry.count,
771                           ANDROID_CONTROL_EXTENDED_SCENE_MODE) != entry.data.i32 + entry.count;
772     } else {
773         ADD_FAILURE() << "Get camera availableResultKeys failed!";
774     }
775 
776     retcode = find_camera_metadata_ro_entry(metadata,
777                                             ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS, &entry);
778     bool hasExtendedSceneModeMaxSizesKey = false;
779     bool hasExtendedSceneModeZoomRatioRangesKey = false;
780     if ((0 == retcode) && (entry.count > 0)) {
781         hasExtendedSceneModeMaxSizesKey =
782                 std::find(entry.data.i32, entry.data.i32 + entry.count,
783                           ANDROID_CONTROL_AVAILABLE_EXTENDED_SCENE_MODE_MAX_SIZES) !=
784                 entry.data.i32 + entry.count;
785         hasExtendedSceneModeZoomRatioRangesKey =
786                 std::find(entry.data.i32, entry.data.i32 + entry.count,
787                           ANDROID_CONTROL_AVAILABLE_EXTENDED_SCENE_MODE_ZOOM_RATIO_RANGES) !=
788                 entry.data.i32 + entry.count;
789     } else {
790         ADD_FAILURE() << "Get camera availableCharacteristicsKeys failed!";
791     }
792 
793     camera_metadata_ro_entry maxSizesEntry;
794     retcode = find_camera_metadata_ro_entry(
795             metadata, ANDROID_CONTROL_AVAILABLE_EXTENDED_SCENE_MODE_MAX_SIZES, &maxSizesEntry);
796     bool hasExtendedSceneModeMaxSizes = (0 == retcode && maxSizesEntry.count > 0);
797 
798     camera_metadata_ro_entry zoomRatioRangesEntry;
799     retcode = find_camera_metadata_ro_entry(
800             metadata, ANDROID_CONTROL_AVAILABLE_EXTENDED_SCENE_MODE_ZOOM_RATIO_RANGES,
801             &zoomRatioRangesEntry);
802     bool hasExtendedSceneModeZoomRatioRanges = (0 == retcode && zoomRatioRangesEntry.count > 0);
803 
804     // Extended scene mode keys must all be available, or all be unavailable.
805     bool noExtendedSceneMode =
806             !hasExtendedSceneModeRequestKey && !hasExtendedSceneModeResultKey &&
807             !hasExtendedSceneModeMaxSizesKey && !hasExtendedSceneModeZoomRatioRangesKey &&
808             !hasExtendedSceneModeMaxSizes && !hasExtendedSceneModeZoomRatioRanges;
809     if (noExtendedSceneMode) {
810         return;
811     }
812     bool hasExtendedSceneMode = hasExtendedSceneModeRequestKey && hasExtendedSceneModeResultKey &&
813                                 hasExtendedSceneModeMaxSizesKey &&
814                                 hasExtendedSceneModeZoomRatioRangesKey &&
815                                 hasExtendedSceneModeMaxSizes && hasExtendedSceneModeZoomRatioRanges;
816     ASSERT_TRUE(hasExtendedSceneMode);
817 
818     // Must have DISABLED, and must have one of BOKEH_STILL_CAPTURE, BOKEH_CONTINUOUS, or a VENDOR
819     // mode.
820     ASSERT_TRUE((maxSizesEntry.count == 6 && zoomRatioRangesEntry.count == 2) ||
821                 (maxSizesEntry.count == 9 && zoomRatioRangesEntry.count == 4));
822     bool hasDisabledMode = false;
823     bool hasBokehStillCaptureMode = false;
824     bool hasBokehContinuousMode = false;
825     bool hasVendorMode = false;
826     std::vector<AvailableStream> outputStreams;
827     ASSERT_EQ(Status::OK, getAvailableOutputStreams(metadata, outputStreams));
828     for (int i = 0, j = 0; i < maxSizesEntry.count && j < zoomRatioRangesEntry.count; i += 3) {
829         int32_t mode = maxSizesEntry.data.i32[i];
830         int32_t maxWidth = maxSizesEntry.data.i32[i + 1];
831         int32_t maxHeight = maxSizesEntry.data.i32[i + 2];
832         switch (mode) {
833             case ANDROID_CONTROL_EXTENDED_SCENE_MODE_DISABLED:
834                 hasDisabledMode = true;
835                 ASSERT_TRUE(maxWidth == 0 && maxHeight == 0);
836                 break;
837             case ANDROID_CONTROL_EXTENDED_SCENE_MODE_BOKEH_STILL_CAPTURE:
838                 hasBokehStillCaptureMode = true;
839                 j += 2;
840                 break;
841             case ANDROID_CONTROL_EXTENDED_SCENE_MODE_BOKEH_CONTINUOUS:
842                 hasBokehContinuousMode = true;
843                 j += 2;
844                 break;
845             default:
846                 if (mode < ANDROID_CONTROL_EXTENDED_SCENE_MODE_VENDOR_START) {
847                     ADD_FAILURE() << "Invalid extended scene mode advertised: " << mode;
848                 } else {
849                     hasVendorMode = true;
850                     j += 2;
851                 }
852                 break;
853         }
854 
855         if (mode != ANDROID_CONTROL_EXTENDED_SCENE_MODE_DISABLED) {
856             // Make sure size is supported.
857             bool sizeSupported = false;
858             for (const auto& stream : outputStreams) {
859                 if ((stream.format == static_cast<int32_t>(PixelFormat::YCBCR_420_888) ||
860                      stream.format == static_cast<int32_t>(PixelFormat::IMPLEMENTATION_DEFINED)) &&
861                     stream.width == maxWidth && stream.height == maxHeight) {
862                     sizeSupported = true;
863                     break;
864                 }
865             }
866             ASSERT_TRUE(sizeSupported);
867 
868             // Make sure zoom range is valid
869             float minZoomRatio = zoomRatioRangesEntry.data.f[0];
870             float maxZoomRatio = zoomRatioRangesEntry.data.f[1];
871             ASSERT_GT(minZoomRatio, 0.0f);
872             ASSERT_LE(minZoomRatio, maxZoomRatio);
873         }
874     }
875     ASSERT_TRUE(hasDisabledMode);
876     ASSERT_TRUE(hasBokehStillCaptureMode || hasBokehContinuousMode || hasVendorMode);
877 }
878 
verifyHighSpeedRecordingCharacteristics(const std::string & cameraName,const CameraMetadata & chars)879 void CameraAidlTest::verifyHighSpeedRecordingCharacteristics(const std::string& cameraName,
880                                                              const CameraMetadata& chars) {
881     const camera_metadata_t* metadata =
882             reinterpret_cast<const camera_metadata_t*>(chars.metadata.data());
883 
884     // Check capabilities
885     bool hasHighSpeedRecordingCapability = false;
886     bool hasUltraHighResolutionCapability = false;
887     camera_metadata_ro_entry entry;
888     int rc =
889             find_camera_metadata_ro_entry(metadata, ANDROID_REQUEST_AVAILABLE_CAPABILITIES, &entry);
890     if ((0 == rc) && (entry.count > 0)) {
891         hasHighSpeedRecordingCapability =
892                 std::find(entry.data.u8, entry.data.u8 + entry.count,
893                           ANDROID_REQUEST_AVAILABLE_CAPABILITIES_CONSTRAINED_HIGH_SPEED_VIDEO) !=
894                 entry.data.u8 + entry.count;
895 
896         hasUltraHighResolutionCapability =
897                 std::find(entry.data.u8, entry.data.u8 + entry.count,
898                           ANDROID_REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR) !=
899                 entry.data.u8 + entry.count;
900     }
901 
902     // Check high speed video configurations
903     camera_metadata_ro_entry highSpeedEntry;
904     rc = find_camera_metadata_ro_entry(
905             metadata, ANDROID_CONTROL_AVAILABLE_HIGH_SPEED_VIDEO_CONFIGURATIONS, &highSpeedEntry);
906     bool hasHighSpeedEntry = (0 == rc && highSpeedEntry.count > 0);
907 
908     camera_metadata_ro_entry highSpeedMaxResEntry;
909     rc = find_camera_metadata_ro_entry(
910             metadata, ANDROID_CONTROL_AVAILABLE_HIGH_SPEED_VIDEO_CONFIGURATIONS_MAXIMUM_RESOLUTION,
911             &highSpeedMaxResEntry);
912     bool hasHighSpeedMaxResEntry = (0 == rc && highSpeedMaxResEntry.count > 0);
913 
914     // High speed recording configuration entry must be available based on capabilities
915     bool noHighSpeedRecording =
916             !hasHighSpeedRecordingCapability && !hasHighSpeedEntry && !hasHighSpeedMaxResEntry;
917     if (noHighSpeedRecording) {
918         return;
919     }
920     bool hasHighSpeedRecording = hasHighSpeedRecordingCapability && hasHighSpeedEntry &&
921                                  ((hasHighSpeedMaxResEntry && hasUltraHighResolutionCapability) ||
922                                   !hasHighSpeedMaxResEntry);
923     ASSERT_TRUE(hasHighSpeedRecording);
924 
925     std::string version, cameraId;
926     ASSERT_TRUE(matchDeviceName(cameraName, mProviderType, &version, &cameraId));
927     bool needBatchSizeCheck = (version != CAMERA_DEVICE_API_VERSION_1);
928 
929     // Check each entry item
930     ASSERT_TRUE(highSpeedEntry.count > 0 && highSpeedEntry.count % 5 == 0);
931     for (auto i = 4; i < highSpeedEntry.count; i += 5) {
932         int32_t fps_min = highSpeedEntry.data.i32[i - 2];
933         int32_t fps_max = highSpeedEntry.data.i32[i - 1];
934         int32_t batch_size_max = highSpeedEntry.data.i32[i];
935         int32_t allowedMaxBatchSize = fps_max / 30;
936 
937         ASSERT_GE(fps_max, 120);
938         ASSERT_TRUE(fps_min % 30 == 0 && fps_max % 30 == 0);
939         if (needBatchSizeCheck) {
940             ASSERT_LE(batch_size_max, 32);
941             ASSERT_TRUE(allowedMaxBatchSize % batch_size_max == 0);
942         }
943     }
944 
945     if (hasHighSpeedMaxResEntry) {
946         ASSERT_TRUE(highSpeedMaxResEntry.count > 0 && highSpeedMaxResEntry.count % 5 == 0);
947         for (auto i = 4; i < highSpeedMaxResEntry.count; i += 5) {
948             int32_t fps_min = highSpeedMaxResEntry.data.i32[i - 2];
949             int32_t fps_max = highSpeedMaxResEntry.data.i32[i - 1];
950             int32_t batch_size_max = highSpeedMaxResEntry.data.i32[i];
951             int32_t allowedMaxBatchSize = fps_max / 30;
952 
953             ASSERT_GE(fps_max, 120);
954             ASSERT_TRUE(fps_min % 30 == 0 && fps_max % 30 == 0);
955             if (needBatchSizeCheck) {
956                 ASSERT_LE(batch_size_max, 32);
957                 ASSERT_TRUE(allowedMaxBatchSize % batch_size_max == 0);
958             }
959         }
960     }
961 }
962 
getAvailableOutputStreams(const camera_metadata_t * staticMeta,std::vector<AvailableStream> & outputStreams,const AvailableStream * threshold,bool maxResolution)963 Status CameraAidlTest::getAvailableOutputStreams(const camera_metadata_t* staticMeta,
964                                                  std::vector<AvailableStream>& outputStreams,
965                                                  const AvailableStream* threshold,
966                                                  bool maxResolution) {
967     if (nullptr == staticMeta) {
968         return Status::ILLEGAL_ARGUMENT;
969     }
970     int scalerTag = maxResolution
971                             ? ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION
972                             : ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS;
973     int depthTag = maxResolution
974                            ? ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION
975                            : ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS;
976 
977     camera_metadata_ro_entry scalerEntry;
978     camera_metadata_ro_entry depthEntry;
979     int foundScaler = find_camera_metadata_ro_entry(staticMeta, scalerTag, &scalerEntry);
980     int foundDepth = find_camera_metadata_ro_entry(staticMeta, depthTag, &depthEntry);
981     if ((0 != foundScaler || (0 != (scalerEntry.count % 4))) &&
982         (0 != foundDepth || (0 != (depthEntry.count % 4)))) {
983         return Status::ILLEGAL_ARGUMENT;
984     }
985 
986     if (foundScaler == 0 && (0 == (scalerEntry.count % 4))) {
987         fillOutputStreams(&scalerEntry, outputStreams, threshold,
988                           ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
989     }
990 
991     if (foundDepth == 0 && (0 == (depthEntry.count % 4))) {
992         AvailableStream depthPreviewThreshold = {kMaxPreviewWidth, kMaxPreviewHeight,
993                                                  static_cast<int32_t>(PixelFormat::Y16)};
994         const AvailableStream* depthThreshold =
995                 isDepthOnly(staticMeta) ? &depthPreviewThreshold : threshold;
996         fillOutputStreams(&depthEntry, outputStreams, depthThreshold,
997                           ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS_OUTPUT);
998     }
999 
1000     return Status::OK;
1001 }
1002 
fillOutputStreams(camera_metadata_ro_entry_t * entry,std::vector<AvailableStream> & outputStreams,const AvailableStream * threshold,const int32_t availableConfigOutputTag)1003 void CameraAidlTest::fillOutputStreams(camera_metadata_ro_entry_t* entry,
1004                                        std::vector<AvailableStream>& outputStreams,
1005                                        const AvailableStream* threshold,
1006                                        const int32_t availableConfigOutputTag) {
1007     for (size_t i = 0; i < entry->count; i += 4) {
1008         if (availableConfigOutputTag == entry->data.i32[i + 3]) {
1009             if (nullptr == threshold) {
1010                 AvailableStream s = {entry->data.i32[i + 1], entry->data.i32[i + 2],
1011                                      entry->data.i32[i]};
1012                 outputStreams.push_back(s);
1013             } else {
1014                 if ((threshold->format == entry->data.i32[i]) &&
1015                     (threshold->width >= entry->data.i32[i + 1]) &&
1016                     (threshold->height >= entry->data.i32[i + 2])) {
1017                     AvailableStream s = {entry->data.i32[i + 1], entry->data.i32[i + 2],
1018                                          threshold->format};
1019                     outputStreams.push_back(s);
1020                 }
1021             }
1022         }
1023     }
1024 }
1025 
verifyZoomCharacteristics(const camera_metadata_t * metadata)1026 void CameraAidlTest::verifyZoomCharacteristics(const camera_metadata_t* metadata) {
1027     camera_metadata_ro_entry entry;
1028     int retcode = 0;
1029 
1030     // Check key availability in capabilities, request and result.
1031     retcode = find_camera_metadata_ro_entry(metadata, ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
1032                                             &entry);
1033     float maxDigitalZoom = 1.0;
1034     if ((0 == retcode) && (entry.count == 1)) {
1035         maxDigitalZoom = entry.data.f[0];
1036     } else {
1037         ADD_FAILURE() << "Get camera scalerAvailableMaxDigitalZoom failed!";
1038     }
1039 
1040     retcode =
1041             find_camera_metadata_ro_entry(metadata, ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS, &entry);
1042     bool hasZoomRequestKey = false;
1043     if ((0 == retcode) && (entry.count > 0)) {
1044         hasZoomRequestKey = std::find(entry.data.i32, entry.data.i32 + entry.count,
1045                                       ANDROID_CONTROL_ZOOM_RATIO) != entry.data.i32 + entry.count;
1046     } else {
1047         ADD_FAILURE() << "Get camera availableRequestKeys failed!";
1048     }
1049 
1050     retcode =
1051             find_camera_metadata_ro_entry(metadata, ANDROID_REQUEST_AVAILABLE_RESULT_KEYS, &entry);
1052     bool hasZoomResultKey = false;
1053     if ((0 == retcode) && (entry.count > 0)) {
1054         hasZoomResultKey = std::find(entry.data.i32, entry.data.i32 + entry.count,
1055                                      ANDROID_CONTROL_ZOOM_RATIO) != entry.data.i32 + entry.count;
1056     } else {
1057         ADD_FAILURE() << "Get camera availableResultKeys failed!";
1058     }
1059 
1060     retcode = find_camera_metadata_ro_entry(metadata,
1061                                             ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS, &entry);
1062     bool hasZoomCharacteristicsKey = false;
1063     if ((0 == retcode) && (entry.count > 0)) {
1064         hasZoomCharacteristicsKey =
1065                 std::find(entry.data.i32, entry.data.i32 + entry.count,
1066                           ANDROID_CONTROL_ZOOM_RATIO_RANGE) != entry.data.i32 + entry.count;
1067     } else {
1068         ADD_FAILURE() << "Get camera availableCharacteristicsKeys failed!";
1069     }
1070 
1071     retcode = find_camera_metadata_ro_entry(metadata, ANDROID_CONTROL_ZOOM_RATIO_RANGE, &entry);
1072     bool hasZoomRatioRange = (0 == retcode && entry.count == 2);
1073 
1074     // Zoom keys must all be available, or all be unavailable.
1075     bool noZoomRatio = !hasZoomRequestKey && !hasZoomResultKey && !hasZoomCharacteristicsKey &&
1076                        !hasZoomRatioRange;
1077     if (noZoomRatio) {
1078         return;
1079     }
1080     bool hasZoomRatio =
1081             hasZoomRequestKey && hasZoomResultKey && hasZoomCharacteristicsKey && hasZoomRatioRange;
1082     ASSERT_TRUE(hasZoomRatio);
1083 
1084     float minZoomRatio = entry.data.f[0];
1085     float maxZoomRatio = entry.data.f[1];
1086     constexpr float FLOATING_POINT_THRESHOLD = 0.00001f;
1087     if (maxDigitalZoom > maxZoomRatio + FLOATING_POINT_THRESHOLD) {
1088         ADD_FAILURE() << "Maximum digital zoom " << maxDigitalZoom
1089                       << " is larger than maximum zoom ratio " << maxZoomRatio << " + threshold "
1090                       << FLOATING_POINT_THRESHOLD << "!";
1091     }
1092     if (minZoomRatio > maxZoomRatio) {
1093         ADD_FAILURE() << "Maximum zoom ratio is less than minimum zoom ratio!";
1094     }
1095     if (minZoomRatio > 1.0f) {
1096         ADD_FAILURE() << "Minimum zoom ratio is more than 1.0!";
1097     }
1098     if (maxZoomRatio < 1.0f) {
1099         ADD_FAILURE() << "Maximum zoom ratio is less than 1.0!";
1100     }
1101 
1102     // Make sure CROPPING_TYPE is CENTER_ONLY
1103     retcode = find_camera_metadata_ro_entry(metadata, ANDROID_SCALER_CROPPING_TYPE, &entry);
1104     if ((0 == retcode) && (entry.count == 1)) {
1105         int8_t croppingType = entry.data.u8[0];
1106         ASSERT_EQ(croppingType, ANDROID_SCALER_CROPPING_TYPE_CENTER_ONLY);
1107     } else {
1108         ADD_FAILURE() << "Get camera scalerCroppingType failed!";
1109     }
1110 }
1111 
verifyMonochromeCharacteristics(const CameraMetadata & chars)1112 void CameraAidlTest::verifyMonochromeCharacteristics(const CameraMetadata& chars) {
1113     const camera_metadata_t* metadata = (camera_metadata_t*)chars.metadata.data();
1114     Status rc = isMonochromeCamera(metadata);
1115     if (Status::OPERATION_NOT_SUPPORTED == rc) {
1116         return;
1117     }
1118     ASSERT_EQ(Status::OK, rc);
1119 
1120     camera_metadata_ro_entry entry;
1121     // Check capabilities
1122     int retcode =
1123             find_camera_metadata_ro_entry(metadata, ANDROID_REQUEST_AVAILABLE_CAPABILITIES, &entry);
1124     if ((0 == retcode) && (entry.count > 0)) {
1125         ASSERT_EQ(std::find(entry.data.u8, entry.data.u8 + entry.count,
1126                             ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_POST_PROCESSING),
1127                   entry.data.u8 + entry.count);
1128     }
1129 
1130     // Check Cfa
1131     retcode = find_camera_metadata_ro_entry(metadata, ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
1132                                             &entry);
1133     if ((0 == retcode) && (entry.count == 1)) {
1134         ASSERT_TRUE(entry.data.i32[0] ==
1135                             static_cast<int32_t>(
1136                                     SensorInfoColorFilterArrangement::
1137                                             ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_MONO) ||
1138                     entry.data.i32[0] ==
1139                             static_cast<int32_t>(
1140                                     SensorInfoColorFilterArrangement::
1141                                             ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_NIR));
1142     }
1143 
1144     // Check availableRequestKeys
1145     retcode =
1146             find_camera_metadata_ro_entry(metadata, ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS, &entry);
1147     if ((0 == retcode) && (entry.count > 0)) {
1148         for (size_t i = 0; i < entry.count; i++) {
1149             ASSERT_NE(entry.data.i32[i], ANDROID_COLOR_CORRECTION_MODE);
1150             ASSERT_NE(entry.data.i32[i], ANDROID_COLOR_CORRECTION_TRANSFORM);
1151             ASSERT_NE(entry.data.i32[i], ANDROID_COLOR_CORRECTION_GAINS);
1152         }
1153     } else {
1154         ADD_FAILURE() << "Get camera availableRequestKeys failed!";
1155     }
1156 
1157     // Check availableResultKeys
1158     retcode =
1159             find_camera_metadata_ro_entry(metadata, ANDROID_REQUEST_AVAILABLE_RESULT_KEYS, &entry);
1160     if ((0 == retcode) && (entry.count > 0)) {
1161         for (size_t i = 0; i < entry.count; i++) {
1162             ASSERT_NE(entry.data.i32[i], ANDROID_SENSOR_GREEN_SPLIT);
1163             ASSERT_NE(entry.data.i32[i], ANDROID_SENSOR_NEUTRAL_COLOR_POINT);
1164             ASSERT_NE(entry.data.i32[i], ANDROID_COLOR_CORRECTION_MODE);
1165             ASSERT_NE(entry.data.i32[i], ANDROID_COLOR_CORRECTION_TRANSFORM);
1166             ASSERT_NE(entry.data.i32[i], ANDROID_COLOR_CORRECTION_GAINS);
1167         }
1168     } else {
1169         ADD_FAILURE() << "Get camera availableResultKeys failed!";
1170     }
1171 
1172     // Check availableCharacteristicKeys
1173     retcode = find_camera_metadata_ro_entry(metadata,
1174                                             ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS, &entry);
1175     if ((0 == retcode) && (entry.count > 0)) {
1176         for (size_t i = 0; i < entry.count; i++) {
1177             ASSERT_NE(entry.data.i32[i], ANDROID_SENSOR_REFERENCE_ILLUMINANT1);
1178             ASSERT_NE(entry.data.i32[i], ANDROID_SENSOR_REFERENCE_ILLUMINANT2);
1179             ASSERT_NE(entry.data.i32[i], ANDROID_SENSOR_CALIBRATION_TRANSFORM1);
1180             ASSERT_NE(entry.data.i32[i], ANDROID_SENSOR_CALIBRATION_TRANSFORM2);
1181             ASSERT_NE(entry.data.i32[i], ANDROID_SENSOR_COLOR_TRANSFORM1);
1182             ASSERT_NE(entry.data.i32[i], ANDROID_SENSOR_COLOR_TRANSFORM2);
1183             ASSERT_NE(entry.data.i32[i], ANDROID_SENSOR_FORWARD_MATRIX1);
1184             ASSERT_NE(entry.data.i32[i], ANDROID_SENSOR_FORWARD_MATRIX2);
1185         }
1186     } else {
1187         ADD_FAILURE() << "Get camera availableResultKeys failed!";
1188     }
1189 
1190     // Check blackLevelPattern
1191     retcode = find_camera_metadata_ro_entry(metadata, ANDROID_SENSOR_BLACK_LEVEL_PATTERN, &entry);
1192     if ((0 == retcode) && (entry.count > 0)) {
1193         ASSERT_EQ(entry.count, 4);
1194         for (size_t i = 1; i < entry.count; i++) {
1195             ASSERT_EQ(entry.data.i32[i], entry.data.i32[0]);
1196         }
1197     }
1198 }
1199 
verifyManualFlashStrengthControlCharacteristics(const camera_metadata_t * staticMeta)1200 void CameraAidlTest::verifyManualFlashStrengthControlCharacteristics(
1201         const camera_metadata_t* staticMeta) {
1202     camera_metadata_ro_entry singleMaxEntry;
1203     camera_metadata_ro_entry singleDefEntry;
1204     camera_metadata_ro_entry torchMaxEntry;
1205     camera_metadata_ro_entry torchDefEntry;
1206     bool torch_supported = false;
1207     int32_t singleMaxLevel = 0;
1208     int32_t singleDefLevel = 0;
1209     int32_t torchMaxLevel = 0;
1210     int32_t torchDefLevel = 0;
1211 
1212     // determine whether the device supports torch or not
1213     torch_supported = isTorchSupported(staticMeta);
1214 
1215     int singleMaxRetCode = find_camera_metadata_ro_entry(staticMeta,
1216             ANDROID_FLASH_SINGLE_STRENGTH_MAX_LEVEL, &singleMaxEntry);
1217     int singleDefRetCode = find_camera_metadata_ro_entry(staticMeta,
1218             ANDROID_FLASH_SINGLE_STRENGTH_DEFAULT_LEVEL, &singleDefEntry);
1219     int torchMaxRetCode = find_camera_metadata_ro_entry(staticMeta,
1220             ANDROID_FLASH_TORCH_STRENGTH_MAX_LEVEL, &torchMaxEntry);
1221     int torchDefRetCode = find_camera_metadata_ro_entry(staticMeta,
1222             ANDROID_FLASH_TORCH_STRENGTH_DEFAULT_LEVEL, &torchDefEntry);
1223     if (torch_supported) {
1224         int expectedEntryCount;
1225         if(singleMaxRetCode == 0 && singleDefRetCode == 0 && torchMaxRetCode == 0 &&
1226                 torchDefRetCode == 0) {
1227             singleMaxLevel = *singleMaxEntry.data.i32;
1228             singleDefLevel = *singleDefEntry.data.i32;
1229             torchMaxLevel = *torchMaxEntry.data.i32;
1230             torchDefLevel = *torchDefEntry.data.i32;
1231             expectedEntryCount = 1;
1232         } else {
1233             expectedEntryCount = 0;
1234         }
1235         ASSERT_EQ(singleMaxEntry.count, expectedEntryCount);
1236         ASSERT_EQ(singleDefEntry.count, expectedEntryCount);
1237         ASSERT_EQ(torchMaxEntry.count, expectedEntryCount);
1238         ASSERT_EQ(torchDefEntry.count, expectedEntryCount);
1239         // if the device supports this feature default levels should be greater than 0
1240         if (singleMaxLevel > 1) {
1241             ASSERT_GT(torchMaxLevel, 1);
1242             ASSERT_GT(torchDefLevel, 0);
1243             ASSERT_GT(singleDefLevel, 0);
1244             ASSERT_TRUE(torchDefLevel <= torchMaxLevel); // default levels should be <= max levels
1245             ASSERT_TRUE(singleDefLevel <= singleMaxLevel);
1246         }
1247     } else {
1248         ASSERT_TRUE(singleMaxRetCode != 0);
1249         ASSERT_TRUE(singleDefRetCode != 0);
1250         ASSERT_TRUE(torchMaxRetCode != 0);
1251         ASSERT_TRUE(torchDefRetCode != 0);
1252     }
1253 }
1254 
verifyRecommendedConfigs(const CameraMetadata & chars)1255 void CameraAidlTest::verifyRecommendedConfigs(const CameraMetadata& chars) {
1256     size_t CONFIG_ENTRY_SIZE = 5;
1257     size_t CONFIG_ENTRY_TYPE_OFFSET = 3;
1258     size_t CONFIG_ENTRY_BITFIELD_OFFSET = 4;
1259     uint32_t maxPublicUsecase =
1260             ANDROID_SCALER_AVAILABLE_RECOMMENDED_STREAM_CONFIGURATIONS_PUBLIC_END_3_8;
1261     uint32_t vendorUsecaseStart =
1262             ANDROID_SCALER_AVAILABLE_RECOMMENDED_STREAM_CONFIGURATIONS_VENDOR_START;
1263     uint32_t usecaseMask = (1 << vendorUsecaseStart) - 1;
1264     usecaseMask &= ~((1 << maxPublicUsecase) - 1);
1265 
1266     const camera_metadata_t* metadata =
1267             reinterpret_cast<const camera_metadata_t*>(chars.metadata.data());
1268 
1269     camera_metadata_ro_entry recommendedConfigsEntry, recommendedDepthConfigsEntry, ioMapEntry;
1270     recommendedConfigsEntry.count = recommendedDepthConfigsEntry.count = ioMapEntry.count = 0;
1271     int retCode = find_camera_metadata_ro_entry(
1272             metadata, ANDROID_SCALER_AVAILABLE_RECOMMENDED_STREAM_CONFIGURATIONS,
1273             &recommendedConfigsEntry);
1274     int depthRetCode = find_camera_metadata_ro_entry(
1275             metadata, ANDROID_DEPTH_AVAILABLE_RECOMMENDED_DEPTH_STREAM_CONFIGURATIONS,
1276             &recommendedDepthConfigsEntry);
1277     int ioRetCode = find_camera_metadata_ro_entry(
1278             metadata, ANDROID_SCALER_AVAILABLE_RECOMMENDED_INPUT_OUTPUT_FORMATS_MAP, &ioMapEntry);
1279     if ((0 != retCode) && (0 != depthRetCode)) {
1280         // In case both regular and depth recommended configurations are absent,
1281         // I/O should be absent as well.
1282         ASSERT_NE(ioRetCode, 0);
1283         return;
1284     }
1285 
1286     camera_metadata_ro_entry availableKeysEntry;
1287     retCode = find_camera_metadata_ro_entry(
1288             metadata, ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS, &availableKeysEntry);
1289     ASSERT_TRUE((0 == retCode) && (availableKeysEntry.count > 0));
1290     std::vector<int32_t> availableKeys;
1291     availableKeys.reserve(availableKeysEntry.count);
1292     availableKeys.insert(availableKeys.end(), availableKeysEntry.data.i32,
1293                          availableKeysEntry.data.i32 + availableKeysEntry.count);
1294 
1295     if (recommendedConfigsEntry.count > 0) {
1296         ASSERT_NE(std::find(availableKeys.begin(), availableKeys.end(),
1297                             ANDROID_SCALER_AVAILABLE_RECOMMENDED_STREAM_CONFIGURATIONS),
1298                   availableKeys.end());
1299         ASSERT_EQ((recommendedConfigsEntry.count % CONFIG_ENTRY_SIZE), 0);
1300         for (size_t i = 0; i < recommendedConfigsEntry.count; i += CONFIG_ENTRY_SIZE) {
1301             int32_t entryType = recommendedConfigsEntry.data.i32[i + CONFIG_ENTRY_TYPE_OFFSET];
1302             uint32_t bitfield = recommendedConfigsEntry.data.i32[i + CONFIG_ENTRY_BITFIELD_OFFSET];
1303             ASSERT_TRUE((entryType == ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT) ||
1304                         (entryType == ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_INPUT));
1305             ASSERT_TRUE((bitfield & usecaseMask) == 0);
1306         }
1307     }
1308 
1309     if (recommendedDepthConfigsEntry.count > 0) {
1310         ASSERT_NE(std::find(availableKeys.begin(), availableKeys.end(),
1311                             ANDROID_DEPTH_AVAILABLE_RECOMMENDED_DEPTH_STREAM_CONFIGURATIONS),
1312                   availableKeys.end());
1313         ASSERT_EQ((recommendedDepthConfigsEntry.count % CONFIG_ENTRY_SIZE), 0);
1314         for (size_t i = 0; i < recommendedDepthConfigsEntry.count; i += CONFIG_ENTRY_SIZE) {
1315             int32_t entryType = recommendedDepthConfigsEntry.data.i32[i + CONFIG_ENTRY_TYPE_OFFSET];
1316             uint32_t bitfield =
1317                     recommendedDepthConfigsEntry.data.i32[i + CONFIG_ENTRY_BITFIELD_OFFSET];
1318             ASSERT_TRUE((entryType == ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT) ||
1319                         (entryType == ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_INPUT));
1320             ASSERT_TRUE((bitfield & usecaseMask) == 0);
1321         }
1322 
1323         if (recommendedConfigsEntry.count == 0) {
1324             // In case regular recommended configurations are absent but suggested depth
1325             // configurations are present, I/O should be absent.
1326             ASSERT_NE(ioRetCode, 0);
1327         }
1328     }
1329 
1330     if ((ioRetCode == 0) && (ioMapEntry.count > 0)) {
1331         ASSERT_NE(std::find(availableKeys.begin(), availableKeys.end(),
1332                             ANDROID_SCALER_AVAILABLE_RECOMMENDED_INPUT_OUTPUT_FORMATS_MAP),
1333                   availableKeys.end());
1334         ASSERT_EQ(isZSLModeAvailable(metadata), Status::OK);
1335     }
1336 }
1337 
1338 // Check whether ZSL is available using the static camera
1339 // characteristics.
isZSLModeAvailable(const camera_metadata_t * staticMeta)1340 Status CameraAidlTest::isZSLModeAvailable(const camera_metadata_t* staticMeta) {
1341     if (Status::OK == isZSLModeAvailable(staticMeta, PRIV_REPROCESS)) {
1342         return Status::OK;
1343     } else {
1344         return isZSLModeAvailable(staticMeta, YUV_REPROCESS);
1345     }
1346 }
1347 
isZSLModeAvailable(const camera_metadata_t * staticMeta,ReprocessType reprocType)1348 Status CameraAidlTest::isZSLModeAvailable(const camera_metadata_t* staticMeta,
1349                                           ReprocessType reprocType) {
1350     Status ret = Status::OPERATION_NOT_SUPPORTED;
1351     if (nullptr == staticMeta) {
1352         return Status::ILLEGAL_ARGUMENT;
1353     }
1354 
1355     camera_metadata_ro_entry entry;
1356     int rc = find_camera_metadata_ro_entry(staticMeta, ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
1357                                            &entry);
1358     if (0 != rc) {
1359         return Status::ILLEGAL_ARGUMENT;
1360     }
1361 
1362     for (size_t i = 0; i < entry.count; i++) {
1363         if ((reprocType == PRIV_REPROCESS &&
1364              ANDROID_REQUEST_AVAILABLE_CAPABILITIES_PRIVATE_REPROCESSING == entry.data.u8[i]) ||
1365             (reprocType == YUV_REPROCESS &&
1366              ANDROID_REQUEST_AVAILABLE_CAPABILITIES_YUV_REPROCESSING == entry.data.u8[i])) {
1367             ret = Status::OK;
1368             break;
1369         }
1370     }
1371 
1372     return ret;
1373 }
1374 
1375 // Verify logical or ultra high resolution camera static metadata
verifyLogicalOrUltraHighResCameraMetadata(const std::string & cameraName,const std::shared_ptr<ICameraDevice> & device,const CameraMetadata & chars,const std::vector<std::string> & deviceNames)1376 void CameraAidlTest::verifyLogicalOrUltraHighResCameraMetadata(
1377         const std::string& cameraName, const std::shared_ptr<ICameraDevice>& device,
1378         const CameraMetadata& chars, const std::vector<std::string>& deviceNames) {
1379     const camera_metadata_t* metadata =
1380             reinterpret_cast<const camera_metadata_t*>(chars.metadata.data());
1381     ASSERT_NE(nullptr, metadata);
1382     SystemCameraKind systemCameraKind = SystemCameraKind::PUBLIC;
1383     Status retStatus = getSystemCameraKind(metadata, &systemCameraKind);
1384     ASSERT_EQ(retStatus, Status::OK);
1385     Status rc = isLogicalMultiCamera(metadata);
1386     ASSERT_TRUE(Status::OK == rc || Status::OPERATION_NOT_SUPPORTED == rc);
1387     bool isMultiCamera = (Status::OK == rc);
1388     bool isUltraHighResCamera = isUltraHighResolution(metadata);
1389     if (!isMultiCamera && !isUltraHighResCamera) {
1390         return;
1391     }
1392 
1393     camera_metadata_ro_entry entry;
1394     int retcode = find_camera_metadata_ro_entry(metadata, ANDROID_CONTROL_ZOOM_RATIO_RANGE, &entry);
1395     bool hasZoomRatioRange = (0 == retcode && entry.count == 2);
1396     retcode = find_camera_metadata_ro_entry(
1397             metadata, ANDROID_INFO_SUPPORTED_BUFFER_MANAGEMENT_VERSION, &entry);
1398     bool hasHalBufferManager =
1399             (0 == retcode && 1 == entry.count &&
1400              entry.data.i32[0] == ANDROID_INFO_SUPPORTED_BUFFER_MANAGEMENT_VERSION_HIDL_DEVICE_3_5);
1401     bool sessionHalBufferManager =
1402             (0 == retcode && 1 == entry.count &&
1403              entry.data.i32[0] ==
1404                      ANDROID_INFO_SUPPORTED_BUFFER_MANAGEMENT_VERSION_SESSION_CONFIGURABLE);
1405     retcode = find_camera_metadata_ro_entry(
1406             metadata, ANDROID_SCALER_MULTI_RESOLUTION_STREAM_SUPPORTED, &entry);
1407     bool multiResolutionStreamSupported =
1408             (0 == retcode && 1 == entry.count &&
1409              entry.data.u8[0] == ANDROID_SCALER_MULTI_RESOLUTION_STREAM_SUPPORTED_TRUE);
1410     if (multiResolutionStreamSupported) {
1411         ASSERT_TRUE(hasHalBufferManager || sessionHalBufferManager);
1412     }
1413 
1414     std::string version, cameraId;
1415     ASSERT_TRUE(matchDeviceName(cameraName, mProviderType, &version, &cameraId));
1416     std::unordered_set<std::string> physicalIds;
1417     rc = getPhysicalCameraIds(metadata, &physicalIds);
1418     ASSERT_TRUE(isUltraHighResCamera || Status::OK == rc);
1419     for (const auto& physicalId : physicalIds) {
1420         ASSERT_NE(physicalId, cameraId);
1421     }
1422     if (physicalIds.size() == 0) {
1423         ASSERT_TRUE(isUltraHighResCamera && !isMultiCamera);
1424         physicalIds.insert(cameraId);
1425     }
1426 
1427     std::unordered_set<int32_t> physicalRequestKeyIDs;
1428     rc = getSupportedKeys(const_cast<camera_metadata_t*>(metadata),
1429                           ANDROID_REQUEST_AVAILABLE_PHYSICAL_CAMERA_REQUEST_KEYS,
1430                           &physicalRequestKeyIDs);
1431     ASSERT_TRUE(Status::OK == rc);
1432     bool hasTestPatternPhysicalRequestKey =
1433             physicalRequestKeyIDs.find(ANDROID_SENSOR_TEST_PATTERN_MODE) !=
1434             physicalRequestKeyIDs.end();
1435     std::unordered_set<int32_t> privacyTestPatternModes;
1436     getPrivacyTestPatternModes(metadata, &privacyTestPatternModes);
1437 
1438     // Map from image format to number of multi-resolution sizes for that format
1439     std::unordered_map<int32_t, size_t> multiResOutputFormatCounterMap;
1440     std::unordered_map<int32_t, size_t> multiResInputFormatCounterMap;
1441     for (const auto& physicalId : physicalIds) {
1442         bool isPublicId = false;
1443         std::string fullPublicId;
1444         SystemCameraKind physSystemCameraKind = SystemCameraKind::PUBLIC;
1445         for (auto& deviceName : deviceNames) {
1446             std::string publicVersion, publicId;
1447             ASSERT_TRUE(matchDeviceName(deviceName, mProviderType, &publicVersion, &publicId));
1448             if (physicalId == publicId) {
1449                 isPublicId = true;
1450                 fullPublicId = deviceName;
1451                 break;
1452             }
1453         }
1454 
1455         camera_metadata_ro_entry physicalMultiResStreamConfigs;
1456         camera_metadata_ro_entry physicalStreamConfigs;
1457         camera_metadata_ro_entry physicalMaxResolutionStreamConfigs;
1458         CameraMetadata physChars;
1459         bool isUltraHighRes = false;
1460         std::unordered_set<int32_t> subCameraPrivacyTestPatterns;
1461         if (isPublicId) {
1462             std::shared_ptr<ICameraDevice> subDevice;
1463             ndk::ScopedAStatus ret = mProvider->getCameraDeviceInterface(fullPublicId, &subDevice);
1464             ASSERT_TRUE(ret.isOk());
1465             ASSERT_NE(subDevice, nullptr);
1466 
1467             ret = subDevice->getCameraCharacteristics(&physChars);
1468             ASSERT_TRUE(ret.isOk());
1469 
1470             const camera_metadata_t* staticMetadata =
1471                     reinterpret_cast<const camera_metadata_t*>(physChars.metadata.data());
1472             retStatus = getSystemCameraKind(staticMetadata, &physSystemCameraKind);
1473             ASSERT_EQ(retStatus, Status::OK);
1474 
1475             // Make sure that the system camera kind of a non-hidden
1476             // physical cameras is the same as the logical camera associated
1477             // with it.
1478             ASSERT_EQ(physSystemCameraKind, systemCameraKind);
1479             retcode = find_camera_metadata_ro_entry(staticMetadata,
1480                                                     ANDROID_CONTROL_ZOOM_RATIO_RANGE, &entry);
1481             bool subCameraHasZoomRatioRange = (0 == retcode && entry.count == 2);
1482             ASSERT_EQ(hasZoomRatioRange, subCameraHasZoomRatioRange);
1483 
1484             getMultiResolutionStreamConfigurations(
1485                     &physicalMultiResStreamConfigs, &physicalStreamConfigs,
1486                     &physicalMaxResolutionStreamConfigs, staticMetadata);
1487             isUltraHighRes = isUltraHighResolution(staticMetadata);
1488 
1489             getPrivacyTestPatternModes(staticMetadata, &subCameraPrivacyTestPatterns);
1490         } else {
1491             // Check camera characteristics for hidden camera id
1492             ndk::ScopedAStatus ret =
1493                     device->getPhysicalCameraCharacteristics(physicalId, &physChars);
1494             ASSERT_TRUE(ret.isOk());
1495             verifyCameraCharacteristics(physChars);
1496             verifyMonochromeCharacteristics(physChars);
1497 
1498             auto staticMetadata = (const camera_metadata_t*)physChars.metadata.data();
1499             retcode = find_camera_metadata_ro_entry(staticMetadata,
1500                                                     ANDROID_CONTROL_ZOOM_RATIO_RANGE, &entry);
1501             bool subCameraHasZoomRatioRange = (0 == retcode && entry.count == 2);
1502             ASSERT_EQ(hasZoomRatioRange, subCameraHasZoomRatioRange);
1503 
1504             getMultiResolutionStreamConfigurations(
1505                     &physicalMultiResStreamConfigs, &physicalStreamConfigs,
1506                     &physicalMaxResolutionStreamConfigs, staticMetadata);
1507             isUltraHighRes = isUltraHighResolution(staticMetadata);
1508             getPrivacyTestPatternModes(staticMetadata, &subCameraPrivacyTestPatterns);
1509 
1510             // Check calling getCameraDeviceInterface_V3_x() on hidden camera id returns
1511             // ILLEGAL_ARGUMENT.
1512             std::stringstream s;
1513             s << "device@" << version << "/" << mProviderType << "/" << physicalId;
1514             std::string fullPhysicalId(s.str());
1515             std::shared_ptr<ICameraDevice> subDevice;
1516             ret = mProvider->getCameraDeviceInterface(fullPhysicalId, &subDevice);
1517             ASSERT_TRUE(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT) ==
1518                         ret.getServiceSpecificError());
1519             ASSERT_EQ(subDevice, nullptr);
1520         }
1521 
1522         if (hasTestPatternPhysicalRequestKey) {
1523             ASSERT_TRUE(privacyTestPatternModes == subCameraPrivacyTestPatterns);
1524         }
1525 
1526         if (physicalMultiResStreamConfigs.count > 0) {
1527             ASSERT_EQ(physicalMultiResStreamConfigs.count % 4, 0);
1528 
1529             // Each supported size must be max size for that format,
1530             for (size_t i = 0; i < physicalMultiResStreamConfigs.count / 4; i++) {
1531                 int32_t multiResFormat = physicalMultiResStreamConfigs.data.i32[i * 4];
1532                 int32_t multiResWidth = physicalMultiResStreamConfigs.data.i32[i * 4 + 1];
1533                 int32_t multiResHeight = physicalMultiResStreamConfigs.data.i32[i * 4 + 2];
1534                 int32_t multiResInput = physicalMultiResStreamConfigs.data.i32[i * 4 + 3];
1535 
1536                 // Check if the resolution is the max resolution in stream
1537                 // configuration map
1538                 bool supported = false;
1539                 bool isMaxSize = true;
1540                 for (size_t j = 0; j < physicalStreamConfigs.count / 4; j++) {
1541                     int32_t format = physicalStreamConfigs.data.i32[j * 4];
1542                     int32_t width = physicalStreamConfigs.data.i32[j * 4 + 1];
1543                     int32_t height = physicalStreamConfigs.data.i32[j * 4 + 2];
1544                     int32_t input = physicalStreamConfigs.data.i32[j * 4 + 3];
1545                     if (format == multiResFormat && input == multiResInput) {
1546                         if (width == multiResWidth && height == multiResHeight) {
1547                             supported = true;
1548                         } else if (width * height > multiResWidth * multiResHeight) {
1549                             isMaxSize = false;
1550                         }
1551                     }
1552                 }
1553                 // Check if the resolution is the max resolution in max
1554                 // resolution stream configuration map
1555                 bool supportedUltraHighRes = false;
1556                 bool isUltraHighResMaxSize = true;
1557                 for (size_t j = 0; j < physicalMaxResolutionStreamConfigs.count / 4; j++) {
1558                     int32_t format = physicalMaxResolutionStreamConfigs.data.i32[j * 4];
1559                     int32_t width = physicalMaxResolutionStreamConfigs.data.i32[j * 4 + 1];
1560                     int32_t height = physicalMaxResolutionStreamConfigs.data.i32[j * 4 + 2];
1561                     int32_t input = physicalMaxResolutionStreamConfigs.data.i32[j * 4 + 3];
1562                     if (format == multiResFormat && input == multiResInput) {
1563                         if (width == multiResWidth && height == multiResHeight) {
1564                             supportedUltraHighRes = true;
1565                         } else if (width * height > multiResWidth * multiResHeight) {
1566                             isUltraHighResMaxSize = false;
1567                         }
1568                     }
1569                 }
1570 
1571                 if (isUltraHighRes) {
1572                     // For ultra high resolution camera, the configuration must
1573                     // be the maximum size in stream configuration map, or max
1574                     // resolution stream configuration map
1575                     ASSERT_TRUE((supported && isMaxSize) ||
1576                                 (supportedUltraHighRes && isUltraHighResMaxSize));
1577                 } else {
1578                     // The configuration must be the maximum size in stream
1579                     // configuration map
1580                     ASSERT_TRUE(supported && isMaxSize);
1581                     ASSERT_FALSE(supportedUltraHighRes);
1582                 }
1583 
1584                 // Increment the counter for the configuration's format.
1585                 auto& formatCounterMap = multiResInput ? multiResInputFormatCounterMap
1586                                                        : multiResOutputFormatCounterMap;
1587                 if (formatCounterMap.count(multiResFormat) == 0) {
1588                     formatCounterMap[multiResFormat] = 1;
1589                 } else {
1590                     formatCounterMap[multiResFormat]++;
1591                 }
1592             }
1593 
1594             // There must be no duplicates
1595             for (size_t i = 0; i < physicalMultiResStreamConfigs.count / 4 - 1; i++) {
1596                 for (size_t j = i + 1; j < physicalMultiResStreamConfigs.count / 4; j++) {
1597                     // Input/output doesn't match
1598                     if (physicalMultiResStreamConfigs.data.i32[i * 4 + 3] !=
1599                         physicalMultiResStreamConfigs.data.i32[j * 4 + 3]) {
1600                         continue;
1601                     }
1602                     // Format doesn't match
1603                     if (physicalMultiResStreamConfigs.data.i32[i * 4] !=
1604                         physicalMultiResStreamConfigs.data.i32[j * 4]) {
1605                         continue;
1606                     }
1607                     // Width doesn't match
1608                     if (physicalMultiResStreamConfigs.data.i32[i * 4 + 1] !=
1609                         physicalMultiResStreamConfigs.data.i32[j * 4 + 1]) {
1610                         continue;
1611                     }
1612                     // Height doesn't match
1613                     if (physicalMultiResStreamConfigs.data.i32[i * 4 + 2] !=
1614                         physicalMultiResStreamConfigs.data.i32[j * 4 + 2]) {
1615                         continue;
1616                     }
1617                     // input/output, format, width, and height all match
1618                     ADD_FAILURE();
1619                 }
1620             }
1621         }
1622     }
1623 
1624     // If a multi-resolution stream is supported, there must be at least one
1625     // format with more than one resolutions
1626     if (multiResolutionStreamSupported) {
1627         size_t numMultiResFormats = 0;
1628         for (const auto& [format, sizeCount] : multiResOutputFormatCounterMap) {
1629             if (sizeCount >= 2) {
1630                 numMultiResFormats++;
1631             }
1632         }
1633         for (const auto& [format, sizeCount] : multiResInputFormatCounterMap) {
1634             if (sizeCount >= 2) {
1635                 numMultiResFormats++;
1636 
1637                 // If multi-resolution reprocessing is supported, the logical
1638                 // camera or ultra-high resolution sensor camera must support
1639                 // the corresponding reprocessing capability.
1640                 if (format == static_cast<uint32_t>(PixelFormat::IMPLEMENTATION_DEFINED)) {
1641                     ASSERT_EQ(isZSLModeAvailable(metadata, PRIV_REPROCESS), Status::OK);
1642                 } else if (format == static_cast<int32_t>(PixelFormat::YCBCR_420_888)) {
1643                     ASSERT_EQ(isZSLModeAvailable(metadata, YUV_REPROCESS), Status::OK);
1644                 }
1645             }
1646         }
1647         ASSERT_GT(numMultiResFormats, 0);
1648     }
1649 
1650     // Make sure ANDROID_LOGICAL_MULTI_CAMERA_ACTIVE_PHYSICAL_ID is available in
1651     // result keys.
1652     if (isMultiCamera) {
1653         retcode = find_camera_metadata_ro_entry(metadata, ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
1654                                                 &entry);
1655         if ((0 == retcode) && (entry.count > 0)) {
1656             ASSERT_NE(std::find(entry.data.i32, entry.data.i32 + entry.count,
1657                                 static_cast<int32_t>(
1658                                         CameraMetadataTag::
1659                                                 ANDROID_LOGICAL_MULTI_CAMERA_ACTIVE_PHYSICAL_ID)),
1660                       entry.data.i32 + entry.count);
1661         } else {
1662             ADD_FAILURE() << "Get camera availableResultKeys failed!";
1663         }
1664     }
1665 }
1666 
isUltraHighResolution(const camera_metadata_t * staticMeta)1667 bool CameraAidlTest::isUltraHighResolution(const camera_metadata_t* staticMeta) {
1668     camera_metadata_ro_entry scalerEntry;
1669     int rc = find_camera_metadata_ro_entry(staticMeta, ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
1670                                            &scalerEntry);
1671     if (rc == 0) {
1672         for (uint32_t i = 0; i < scalerEntry.count; i++) {
1673             if (scalerEntry.data.u8[i] ==
1674                 ANDROID_REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR) {
1675                 return true;
1676             }
1677         }
1678     }
1679     return false;
1680 }
1681 
getSupportedKeys(camera_metadata_t * staticMeta,uint32_t tagId,std::unordered_set<int32_t> * requestIDs)1682 Status CameraAidlTest::getSupportedKeys(camera_metadata_t* staticMeta, uint32_t tagId,
1683                                         std::unordered_set<int32_t>* requestIDs) {
1684     if ((nullptr == staticMeta) || (nullptr == requestIDs)) {
1685         return Status::ILLEGAL_ARGUMENT;
1686     }
1687 
1688     camera_metadata_ro_entry entry;
1689     int rc = find_camera_metadata_ro_entry(staticMeta, tagId, &entry);
1690     if ((0 != rc) || (entry.count == 0)) {
1691         return Status::OK;
1692     }
1693 
1694     requestIDs->insert(entry.data.i32, entry.data.i32 + entry.count);
1695 
1696     return Status::OK;
1697 }
1698 
getPrivacyTestPatternModes(const camera_metadata_t * staticMetadata,std::unordered_set<int32_t> * privacyTestPatternModes)1699 void CameraAidlTest::getPrivacyTestPatternModes(
1700         const camera_metadata_t* staticMetadata,
1701         std::unordered_set<int32_t>* privacyTestPatternModes) {
1702     ASSERT_NE(staticMetadata, nullptr);
1703     ASSERT_NE(privacyTestPatternModes, nullptr);
1704 
1705     camera_metadata_ro_entry entry;
1706     int retcode = find_camera_metadata_ro_entry(
1707             staticMetadata, ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES, &entry);
1708     ASSERT_TRUE(0 == retcode);
1709 
1710     for (auto i = 0; i < entry.count; i++) {
1711         if (entry.data.i32[i] == ANDROID_SENSOR_TEST_PATTERN_MODE_SOLID_COLOR ||
1712             entry.data.i32[i] == ANDROID_SENSOR_TEST_PATTERN_MODE_BLACK) {
1713             privacyTestPatternModes->insert(entry.data.i32[i]);
1714         }
1715     }
1716 }
1717 
getMultiResolutionStreamConfigurations(camera_metadata_ro_entry * multiResStreamConfigs,camera_metadata_ro_entry * streamConfigs,camera_metadata_ro_entry * maxResolutionStreamConfigs,const camera_metadata_t * staticMetadata)1718 void CameraAidlTest::getMultiResolutionStreamConfigurations(
1719         camera_metadata_ro_entry* multiResStreamConfigs, camera_metadata_ro_entry* streamConfigs,
1720         camera_metadata_ro_entry* maxResolutionStreamConfigs,
1721         const camera_metadata_t* staticMetadata) {
1722     ASSERT_NE(multiResStreamConfigs, nullptr);
1723     ASSERT_NE(streamConfigs, nullptr);
1724     ASSERT_NE(maxResolutionStreamConfigs, nullptr);
1725     ASSERT_NE(staticMetadata, nullptr);
1726 
1727     int retcode = find_camera_metadata_ro_entry(
1728             staticMetadata, ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS, streamConfigs);
1729     ASSERT_TRUE(0 == retcode);
1730     retcode = find_camera_metadata_ro_entry(
1731             staticMetadata, ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION,
1732             maxResolutionStreamConfigs);
1733     ASSERT_TRUE(-ENOENT == retcode || 0 == retcode);
1734     retcode = find_camera_metadata_ro_entry(
1735             staticMetadata, ANDROID_SCALER_PHYSICAL_CAMERA_MULTI_RESOLUTION_STREAM_CONFIGURATIONS,
1736             multiResStreamConfigs);
1737     ASSERT_TRUE(-ENOENT == retcode || 0 == retcode);
1738 }
1739 
isTorchSupported(const camera_metadata_t * staticMeta)1740 bool CameraAidlTest::isTorchSupported(const camera_metadata_t* staticMeta) {
1741     camera_metadata_ro_entry torchEntry;
1742     int rc = find_camera_metadata_ro_entry(staticMeta, ANDROID_FLASH_INFO_AVAILABLE, &torchEntry);
1743     if (rc != 0) {
1744         ALOGI("isTorchSupported: Failed to find entry for ANDROID_FLASH_INFO_AVAILABLE");
1745         return false;
1746     }
1747     if (torchEntry.count == 1 && !torchEntry.data.u8[0]) {
1748         ALOGI("isTorchSupported: Torch not supported");
1749         return false;
1750     }
1751     ALOGI("isTorchSupported: Torch supported");
1752     return true;
1753 }
1754 
isTorchStrengthControlSupported(const camera_metadata_t * staticMeta)1755 bool CameraAidlTest::isTorchStrengthControlSupported(const camera_metadata_t* staticMeta) {
1756     int32_t maxLevel = 0;
1757     camera_metadata_ro_entry maxEntry;
1758     int rc = find_camera_metadata_ro_entry(staticMeta, ANDROID_FLASH_INFO_STRENGTH_MAXIMUM_LEVEL,
1759                                            &maxEntry);
1760     if (rc != 0) {
1761         ALOGI("isTorchStrengthControlSupported: Failed to find entry for "
1762               "ANDROID_FLASH_INFO_STRENGTH_MAXIMUM_LEVEL");
1763         return false;
1764     }
1765 
1766     maxLevel = *maxEntry.data.i32;
1767     if (maxLevel > 1) {
1768         ALOGI("isTorchStrengthControlSupported: Torch strength control supported.");
1769         return true;
1770     }
1771     ALOGI("isTorchStrengthControlSupported: Torch strength control not supported.");
1772     return false;
1773 }
1774 
verifyRequestTemplate(const camera_metadata_t * metadata,RequestTemplate requestTemplate)1775 void CameraAidlTest::verifyRequestTemplate(const camera_metadata_t* metadata,
1776                                            RequestTemplate requestTemplate) {
1777     ASSERT_NE(nullptr, metadata);
1778     size_t entryCount = get_camera_metadata_entry_count(metadata);
1779     ALOGI("template %u metadata entry count is %zu", (int32_t)requestTemplate, entryCount);
1780     // TODO: we can do better than 0 here. Need to check how many required
1781     // request keys we've defined for each template
1782     ASSERT_GT(entryCount, 0u);
1783 
1784     // Check zoomRatio
1785     camera_metadata_ro_entry zoomRatioEntry;
1786     int foundZoomRatio =
1787             find_camera_metadata_ro_entry(metadata, ANDROID_CONTROL_ZOOM_RATIO, &zoomRatioEntry);
1788     if (foundZoomRatio == 0) {
1789         ASSERT_EQ(zoomRatioEntry.count, 1);
1790         ASSERT_EQ(zoomRatioEntry.data.f[0], 1.0f);
1791     }
1792 
1793     // Check settings override
1794     camera_metadata_ro_entry settingsOverrideEntry;
1795     int foundSettingsOverride = find_camera_metadata_ro_entry(metadata,
1796            ANDROID_CONTROL_SETTINGS_OVERRIDE, &settingsOverrideEntry);
1797     if (foundSettingsOverride == 0) {
1798         ASSERT_EQ(settingsOverrideEntry.count, 1);
1799         ASSERT_EQ(settingsOverrideEntry.data.u8[0], ANDROID_CONTROL_SETTINGS_OVERRIDE_OFF);
1800     }
1801 }
1802 
openEmptyDeviceSession(const std::string & name,const std::shared_ptr<ICameraProvider> & provider,std::shared_ptr<ICameraDeviceSession> * session,CameraMetadata * staticMeta,std::shared_ptr<ICameraDevice> * device)1803 void CameraAidlTest::openEmptyDeviceSession(const std::string& name,
1804                                             const std::shared_ptr<ICameraProvider>& provider,
1805                                             std::shared_ptr<ICameraDeviceSession>* session,
1806                                             CameraMetadata* staticMeta,
1807                                             std::shared_ptr<ICameraDevice>* device) {
1808     ASSERT_NE(nullptr, session);
1809     ASSERT_NE(nullptr, staticMeta);
1810     ASSERT_NE(nullptr, device);
1811 
1812     ALOGI("configureStreams: Testing camera device %s", name.c_str());
1813     ndk::ScopedAStatus ret = provider->getCameraDeviceInterface(name, device);
1814     ALOGI("getCameraDeviceInterface returns status:%d:%d", ret.getExceptionCode(),
1815           ret.getServiceSpecificError());
1816     ASSERT_TRUE(ret.isOk());
1817     ASSERT_NE(device, nullptr);
1818 
1819     std::shared_ptr<EmptyDeviceCb> cb = ndk::SharedRefBase::make<EmptyDeviceCb>();
1820     ret = (*device)->open(cb, session);
1821     ALOGI("device::open returns status:%d:%d", ret.getExceptionCode(),
1822           ret.getServiceSpecificError());
1823     ASSERT_TRUE(ret.isOk());
1824     ASSERT_NE(*session, nullptr);
1825 
1826     ret = (*device)->getCameraCharacteristics(staticMeta);
1827     ASSERT_TRUE(ret.isOk());
1828 }
1829 
openEmptyInjectionSession(const std::string & name,const std::shared_ptr<ICameraProvider> & provider,std::shared_ptr<ICameraInjectionSession> * session,CameraMetadata * metadata,std::shared_ptr<ICameraDevice> * device)1830 void CameraAidlTest::openEmptyInjectionSession(const std::string& name,
1831                                                const std::shared_ptr<ICameraProvider>& provider,
1832                                                std::shared_ptr<ICameraInjectionSession>* session,
1833                                                CameraMetadata* metadata,
1834                                                std::shared_ptr<ICameraDevice>* device) {
1835     ASSERT_NE(nullptr, session);
1836     ASSERT_NE(nullptr, metadata);
1837     ASSERT_NE(nullptr, device);
1838 
1839     ALOGI("openEmptyInjectionSession: Testing camera device %s", name.c_str());
1840     ndk::ScopedAStatus ret = provider->getCameraDeviceInterface(name, device);
1841     ALOGI("openEmptyInjectionSession: getCameraDeviceInterface returns status:%d:%d",
1842           ret.getExceptionCode(), ret.getServiceSpecificError());
1843     ASSERT_TRUE(ret.isOk());
1844     ASSERT_NE(*device, nullptr);
1845 
1846     std::shared_ptr<EmptyDeviceCb> cb = ndk::SharedRefBase::make<EmptyDeviceCb>();
1847     ret = (*device)->openInjectionSession(cb, session);
1848     ALOGI("device::openInjectionSession returns status:%d:%d", ret.getExceptionCode(),
1849           ret.getServiceSpecificError());
1850 
1851     if (static_cast<Status>(ret.getServiceSpecificError()) == Status::OPERATION_NOT_SUPPORTED &&
1852         *session == nullptr) {
1853         return;  // Injection Session not supported. Callee will receive nullptr in *session
1854     }
1855 
1856     ASSERT_TRUE(ret.isOk());
1857     ASSERT_NE(*session, nullptr);
1858 
1859     ret = (*device)->getCameraCharacteristics(metadata);
1860     ASSERT_TRUE(ret.isOk());
1861 }
1862 
getJpegBufferSize(camera_metadata_t * staticMeta,int32_t * outBufSize)1863 Status CameraAidlTest::getJpegBufferSize(camera_metadata_t* staticMeta, int32_t* outBufSize) {
1864     if (nullptr == staticMeta || nullptr == outBufSize) {
1865         return Status::ILLEGAL_ARGUMENT;
1866     }
1867 
1868     camera_metadata_ro_entry entry;
1869     int rc = find_camera_metadata_ro_entry(staticMeta, ANDROID_JPEG_MAX_SIZE, &entry);
1870     if ((0 != rc) || (1 != entry.count)) {
1871         return Status::ILLEGAL_ARGUMENT;
1872     }
1873 
1874     *outBufSize = entry.data.i32[0];
1875     return Status::OK;
1876 }
1877 
getDataspace(PixelFormat format)1878 Dataspace CameraAidlTest::getDataspace(PixelFormat format) {
1879     switch (format) {
1880         case PixelFormat::BLOB:
1881             return Dataspace::JFIF;
1882         case PixelFormat::Y16:
1883             return Dataspace::DEPTH;
1884         case PixelFormat::RAW16:
1885         case PixelFormat::RAW_OPAQUE:
1886         case PixelFormat::RAW10:
1887         case PixelFormat::RAW12:
1888             return Dataspace::ARBITRARY;
1889         default:
1890             return Dataspace::UNKNOWN;
1891     }
1892 }
1893 
createStreamConfiguration(std::vector<Stream> & streams,StreamConfigurationMode configMode,StreamConfiguration * config,int32_t jpegBufferSize)1894 void CameraAidlTest::createStreamConfiguration(std::vector<Stream>& streams,
1895                                                StreamConfigurationMode configMode,
1896                                                StreamConfiguration* config,
1897                                                int32_t jpegBufferSize) {
1898     ASSERT_NE(nullptr, config);
1899 
1900     for (auto& stream : streams) {
1901         stream.bufferSize =
1902                 (stream.format == PixelFormat::BLOB && stream.dataSpace == Dataspace::JFIF)
1903                         ? jpegBufferSize
1904                         : 0;
1905     }
1906 
1907     // Caller is responsible to fill in non-zero config->streamConfigCounter after this returns
1908     config->streams = streams;
1909     config->operationMode = configMode;
1910     config->multiResolutionInputImage = false;
1911 }
1912 
verifyStreamCombination(const std::shared_ptr<ICameraDevice> & device,const StreamConfiguration & config,bool expectedStatus)1913 void CameraAidlTest::verifyStreamCombination(const std::shared_ptr<ICameraDevice>& device,
1914                                              const StreamConfiguration& config,
1915                                              bool expectedStatus) {
1916     if (device != nullptr) {
1917         bool streamCombinationSupported;
1918         ScopedAStatus ret =
1919                 device->isStreamCombinationSupported(config, &streamCombinationSupported);
1920         ASSERT_TRUE(ret.isOk());
1921         ASSERT_EQ(expectedStatus, streamCombinationSupported);
1922 
1923         int32_t interfaceVersion;
1924         ret = device->getInterfaceVersion(&interfaceVersion);
1925         ASSERT_TRUE(ret.isOk());
1926         bool supportFeatureCombinationQuery =
1927                 (interfaceVersion >= CAMERA_DEVICE_API_MINOR_VERSION_3);
1928         if (supportFeatureCombinationQuery) {
1929             ret = device->isStreamCombinationWithSettingsSupported(config,
1930                                                                    &streamCombinationSupported);
1931             ASSERT_TRUE(ret.isOk());
1932             ASSERT_EQ(expectedStatus, streamCombinationSupported);
1933         }
1934     }
1935 }
1936 
verifySessionCharacteristics(const CameraMetadata & session_chars,const CameraMetadata & camera_chars)1937 void CameraAidlTest::verifySessionCharacteristics(const CameraMetadata& session_chars,
1938                                                   const CameraMetadata& camera_chars) {
1939     const camera_metadata_t* session_metadata =
1940             reinterpret_cast<const camera_metadata_t*>(session_chars.metadata.data());
1941 
1942     const camera_metadata_t* camera_metadata =
1943             reinterpret_cast<const camera_metadata_t*>(camera_chars.metadata.data());
1944 
1945     size_t expectedSize = session_chars.metadata.size();
1946     int result = validate_camera_metadata_structure(session_metadata, &expectedSize);
1947     ASSERT_TRUE((result == 0) || (result == CAMERA_METADATA_VALIDATION_SHIFTED));
1948     size_t entryCount = get_camera_metadata_entry_count(session_metadata);
1949     // There should be at least 1 characteristic present:
1950     // SCALER_MAX_DIGITAL_ZOOM must always be available.
1951     // ZOOM_RATIO_RANGE must be available if ZOOM_RATIO is supported.
1952     ASSERT_TRUE(entryCount >= 1);
1953 
1954     camera_metadata_ro_entry entry;
1955     int retcode = 0;
1956     float maxDigitalZoom = 1.0;
1957 
1958     for (size_t i = 0; i < entryCount; i++) {
1959         retcode = get_camera_metadata_ro_entry(session_metadata, i, &entry);
1960         ASSERT_TRUE(retcode == 0);
1961 
1962         std::set<uint32_t> allowed_tags = {ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
1963                                            ANDROID_CONTROL_ZOOM_RATIO_RANGE};
1964 
1965         if (contains(allowed_tags, entry.tag)) {
1966             continue;
1967         }
1968 
1969         // Other than the ones above, no tags should be allowed apart from vendor tags.
1970         ASSERT_TRUE(entry.tag >= VENDOR_SECTION_START);
1971     }
1972 
1973     retcode = find_camera_metadata_ro_entry(session_metadata,
1974                                             ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM, &entry);
1975     if ((0 == retcode) && (entry.count == 1)) {
1976         maxDigitalZoom = entry.data.f[0];
1977     } else {
1978         ADD_FAILURE() << "Get camera scalerAvailableMaxDigitalZoom failed!";
1979     }
1980 
1981     retcode = find_camera_metadata_ro_entry(camera_metadata, ANDROID_CONTROL_ZOOM_RATIO_RANGE,
1982                                             &entry);
1983     bool hasZoomRatioRange = (0 == retcode && entry.count == 2);
1984     if (!hasZoomRatioRange) {
1985         ALOGI("Skipping the rest of the test as ZOOM_RATIO_RANGE is not in camera characteristics");
1986         return;
1987     }
1988 
1989     // Session characteristics must contain zoom_ratio_range if camera characteristics has it.
1990     retcode = find_camera_metadata_ro_entry(session_metadata, ANDROID_CONTROL_ZOOM_RATIO_RANGE,
1991                                             &entry);
1992     ASSERT_TRUE(0 == retcode && entry.count == 2);
1993 
1994     float minZoomRatio = entry.data.f[0];
1995     float maxZoomRatio = entry.data.f[1];
1996     constexpr float FLOATING_POINT_THRESHOLD = 0.00001f;
1997     if (abs(maxDigitalZoom - maxZoomRatio) > FLOATING_POINT_THRESHOLD) {
1998         ADD_FAILURE() << "Difference between maximum digital zoom " << maxDigitalZoom
1999                       << " and maximum zoom ratio " << maxZoomRatio
2000                       << " is greater than the threshold " << FLOATING_POINT_THRESHOLD << "!";
2001     }
2002     if (minZoomRatio > maxZoomRatio) {
2003         ADD_FAILURE() << "Maximum zoom ratio is less than minimum zoom ratio!";
2004     }
2005     if (minZoomRatio > 1.0f) {
2006         ADD_FAILURE() << "Minimum zoom ratio is more than 1.0!";
2007     }
2008     if (maxZoomRatio < 1.0f) {
2009         ADD_FAILURE() << "Maximum zoom ratio is less than 1.0!";
2010     }
2011 }
2012 
getConcurrentDeviceCombinations(std::shared_ptr<ICameraProvider> & provider)2013 std::vector<ConcurrentCameraIdCombination> CameraAidlTest::getConcurrentDeviceCombinations(
2014         std::shared_ptr<ICameraProvider>& provider) {
2015     std::vector<ConcurrentCameraIdCombination> combinations;
2016     ndk::ScopedAStatus ret = provider->getConcurrentCameraIds(&combinations);
2017     if (!ret.isOk()) {
2018         ADD_FAILURE();
2019     }
2020 
2021     return combinations;
2022 }
2023 
getMandatoryConcurrentStreams(const camera_metadata_t * staticMeta,std::vector<AvailableStream> * outputStreams)2024 Status CameraAidlTest::getMandatoryConcurrentStreams(const camera_metadata_t* staticMeta,
2025                                                      std::vector<AvailableStream>* outputStreams) {
2026     if (nullptr == staticMeta || nullptr == outputStreams) {
2027         return Status::ILLEGAL_ARGUMENT;
2028     }
2029 
2030     if (isDepthOnly(staticMeta)) {
2031         Size y16MaxSize(640, 480);
2032         Size maxAvailableY16Size;
2033         getMaxOutputSizeForFormat(staticMeta, PixelFormat::Y16, &maxAvailableY16Size);
2034         Size y16ChosenSize = getMinSize(y16MaxSize, maxAvailableY16Size);
2035         AvailableStream y16Stream = {.width = y16ChosenSize.width,
2036                                      .height = y16ChosenSize.height,
2037                                      .format = static_cast<int32_t>(PixelFormat::Y16)};
2038         outputStreams->push_back(y16Stream);
2039         return Status::OK;
2040     }
2041 
2042     Size yuvMaxSize(1280, 720);
2043     Size jpegMaxSize(1920, 1440);
2044     Size maxAvailableYuvSize;
2045     Size maxAvailableJpegSize;
2046     getMaxOutputSizeForFormat(staticMeta, PixelFormat::YCBCR_420_888, &maxAvailableYuvSize);
2047     getMaxOutputSizeForFormat(staticMeta, PixelFormat::BLOB, &maxAvailableJpegSize);
2048     Size yuvChosenSize = getMinSize(yuvMaxSize, maxAvailableYuvSize);
2049     Size jpegChosenSize = getMinSize(jpegMaxSize, maxAvailableJpegSize);
2050 
2051     AvailableStream yuvStream = {.width = yuvChosenSize.width,
2052                                  .height = yuvChosenSize.height,
2053                                  .format = static_cast<int32_t>(PixelFormat::YCBCR_420_888)};
2054 
2055     AvailableStream jpegStream = {.width = jpegChosenSize.width,
2056                                   .height = jpegChosenSize.height,
2057                                   .format = static_cast<int32_t>(PixelFormat::BLOB)};
2058     outputStreams->push_back(yuvStream);
2059     outputStreams->push_back(jpegStream);
2060 
2061     return Status::OK;
2062 }
2063 
isDepthOnly(const camera_metadata_t * staticMeta)2064 bool CameraAidlTest::isDepthOnly(const camera_metadata_t* staticMeta) {
2065     camera_metadata_ro_entry scalerEntry;
2066     camera_metadata_ro_entry depthEntry;
2067 
2068     int rc = find_camera_metadata_ro_entry(staticMeta, ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
2069                                            &scalerEntry);
2070     if (rc == 0) {
2071         for (uint32_t i = 0; i < scalerEntry.count; i++) {
2072             if (scalerEntry.data.u8[i] ==
2073                 ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE) {
2074                 return false;
2075             }
2076         }
2077     }
2078 
2079     for (uint32_t i = 0; i < scalerEntry.count; i++) {
2080         if (scalerEntry.data.u8[i] == ANDROID_REQUEST_AVAILABLE_CAPABILITIES_DEPTH_OUTPUT) {
2081             rc = find_camera_metadata_ro_entry(
2082                     staticMeta, ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS, &depthEntry);
2083             size_t idx = 0;
2084             if (rc == 0 && depthEntry.data.i32[idx] == static_cast<int32_t>(PixelFormat::Y16)) {
2085                 // only Depth16 format is supported now
2086                 return true;
2087             }
2088             break;
2089         }
2090     }
2091 
2092     return false;
2093 }
2094 
getMaxOutputSizeForFormat(const camera_metadata_t * staticMeta,PixelFormat format,Size * size,bool maxResolution)2095 Status CameraAidlTest::getMaxOutputSizeForFormat(const camera_metadata_t* staticMeta,
2096                                                  PixelFormat format, Size* size,
2097                                                  bool maxResolution) {
2098     std::vector<AvailableStream> outputStreams;
2099     if (size == nullptr ||
2100         getAvailableOutputStreams(staticMeta, outputStreams,
2101                                   /*threshold*/ nullptr, maxResolution) != Status::OK) {
2102         return Status::ILLEGAL_ARGUMENT;
2103     }
2104     Size maxSize;
2105     bool found = false;
2106     for (auto& outputStream : outputStreams) {
2107         if (static_cast<int32_t>(format) == outputStream.format &&
2108             (outputStream.width * outputStream.height > maxSize.width * maxSize.height)) {
2109             maxSize.width = outputStream.width;
2110             maxSize.height = outputStream.height;
2111             found = true;
2112         }
2113     }
2114     if (!found) {
2115         ALOGE("%s :chosen format %d not found", __FUNCTION__, static_cast<int32_t>(format));
2116         return Status::ILLEGAL_ARGUMENT;
2117     }
2118     *size = maxSize;
2119     return Status::OK;
2120 }
2121 
getMinSize(Size a,Size b)2122 Size CameraAidlTest::getMinSize(Size a, Size b) {
2123     if (a.width * a.height < b.width * b.height) {
2124         return a;
2125     }
2126     return b;
2127 }
2128 
getZSLInputOutputMap(camera_metadata_t * staticMeta,std::vector<AvailableZSLInputOutput> & inputOutputMap)2129 Status CameraAidlTest::getZSLInputOutputMap(camera_metadata_t* staticMeta,
2130                                             std::vector<AvailableZSLInputOutput>& inputOutputMap) {
2131     if (nullptr == staticMeta) {
2132         return Status::ILLEGAL_ARGUMENT;
2133     }
2134 
2135     camera_metadata_ro_entry entry;
2136     int rc = find_camera_metadata_ro_entry(
2137             staticMeta, ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP, &entry);
2138     if ((0 != rc) || (0 >= entry.count)) {
2139         return Status::ILLEGAL_ARGUMENT;
2140     }
2141 
2142     const int32_t* contents = &entry.data.i32[0];
2143     for (size_t i = 0; i < entry.count;) {
2144         int32_t inputFormat = contents[i++];
2145         int32_t length = contents[i++];
2146         for (int32_t j = 0; j < length; j++) {
2147             int32_t outputFormat = contents[i + j];
2148             AvailableZSLInputOutput zslEntry = {inputFormat, outputFormat};
2149             inputOutputMap.push_back(zslEntry);
2150         }
2151         i += length;
2152     }
2153 
2154     return Status::OK;
2155 }
2156 
findLargestSize(const std::vector<AvailableStream> & streamSizes,int32_t format,AvailableStream & result)2157 Status CameraAidlTest::findLargestSize(const std::vector<AvailableStream>& streamSizes,
2158                                        int32_t format, AvailableStream& result) {
2159     result = {0, 0, 0};
2160     for (auto& iter : streamSizes) {
2161         if (format == iter.format) {
2162             if ((result.width * result.height) < (iter.width * iter.height)) {
2163                 result = iter;
2164             }
2165         }
2166     }
2167 
2168     return (result.format == format) ? Status::OK : Status::ILLEGAL_ARGUMENT;
2169 }
2170 
constructFilteredSettings(const std::shared_ptr<ICameraDeviceSession> & session,const std::unordered_set<int32_t> & availableKeys,RequestTemplate reqTemplate,android::hardware::camera::common::V1_0::helper::CameraMetadata * defaultSettings,android::hardware::camera::common::V1_0::helper::CameraMetadata * filteredSettings)2171 void CameraAidlTest::constructFilteredSettings(
2172         const std::shared_ptr<ICameraDeviceSession>& session,
2173         const std::unordered_set<int32_t>& availableKeys, RequestTemplate reqTemplate,
2174         android::hardware::camera::common::V1_0::helper::CameraMetadata* defaultSettings,
2175         android::hardware::camera::common::V1_0::helper::CameraMetadata* filteredSettings) {
2176     ASSERT_NE(defaultSettings, nullptr);
2177     ASSERT_NE(filteredSettings, nullptr);
2178 
2179     CameraMetadata req;
2180     auto ret = session->constructDefaultRequestSettings(reqTemplate, &req);
2181     ASSERT_TRUE(ret.isOk());
2182 
2183     const camera_metadata_t* metadata =
2184             clone_camera_metadata(reinterpret_cast<const camera_metadata_t*>(req.metadata.data()));
2185     size_t expectedSize = req.metadata.size();
2186     int result = validate_camera_metadata_structure(metadata, &expectedSize);
2187     ASSERT_TRUE((result == 0) || (result == CAMERA_METADATA_VALIDATION_SHIFTED));
2188 
2189     size_t entryCount = get_camera_metadata_entry_count(metadata);
2190     ASSERT_GT(entryCount, 0u);
2191     *defaultSettings = metadata;
2192 
2193     const android::hardware::camera::common::V1_0::helper::CameraMetadata& constSettings =
2194             *defaultSettings;
2195     for (const auto& keyIt : availableKeys) {
2196         camera_metadata_ro_entry entry = constSettings.find(keyIt);
2197         if (entry.count > 0) {
2198             filteredSettings->update(entry);
2199         }
2200     }
2201 }
2202 
verifySessionReconfigurationQuery(const std::shared_ptr<ICameraDeviceSession> & session,camera_metadata * oldSessionParams,camera_metadata * newSessionParams)2203 void CameraAidlTest::verifySessionReconfigurationQuery(
2204         const std::shared_ptr<ICameraDeviceSession>& session, camera_metadata* oldSessionParams,
2205         camera_metadata* newSessionParams) {
2206     ASSERT_NE(nullptr, session);
2207     ASSERT_NE(nullptr, oldSessionParams);
2208     ASSERT_NE(nullptr, newSessionParams);
2209 
2210     std::vector<uint8_t> oldParams =
2211             std::vector(reinterpret_cast<uint8_t*>(oldSessionParams),
2212                         reinterpret_cast<uint8_t*>(oldSessionParams) +
2213                                 get_camera_metadata_size(oldSessionParams));
2214     CameraMetadata oldMetadata = {oldParams};
2215 
2216     std::vector<uint8_t> newParams =
2217             std::vector(reinterpret_cast<uint8_t*>(newSessionParams),
2218                         reinterpret_cast<uint8_t*>(newSessionParams) +
2219                                 get_camera_metadata_size(newSessionParams));
2220     CameraMetadata newMetadata = {newParams};
2221 
2222     bool reconfigReq;
2223     ndk::ScopedAStatus ret =
2224             session->isReconfigurationRequired(oldMetadata, newMetadata, &reconfigReq);
2225     ASSERT_TRUE(ret.isOk() || static_cast<Status>(ret.getServiceSpecificError()) ==
2226                                       Status::OPERATION_NOT_SUPPORTED);
2227 }
2228 
isConstrainedModeAvailable(camera_metadata_t * staticMeta)2229 Status CameraAidlTest::isConstrainedModeAvailable(camera_metadata_t* staticMeta) {
2230     Status ret = Status::OPERATION_NOT_SUPPORTED;
2231     if (nullptr == staticMeta) {
2232         return Status::ILLEGAL_ARGUMENT;
2233     }
2234 
2235     camera_metadata_ro_entry entry;
2236     int rc = find_camera_metadata_ro_entry(staticMeta, ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
2237                                            &entry);
2238     if (0 != rc) {
2239         return Status::ILLEGAL_ARGUMENT;
2240     }
2241 
2242     for (size_t i = 0; i < entry.count; i++) {
2243         if (ANDROID_REQUEST_AVAILABLE_CAPABILITIES_CONSTRAINED_HIGH_SPEED_VIDEO ==
2244             entry.data.u8[i]) {
2245             ret = Status::OK;
2246             break;
2247         }
2248     }
2249 
2250     return ret;
2251 }
2252 
pickConstrainedModeSize(camera_metadata_t * staticMeta,AvailableStream & hfrStream)2253 Status CameraAidlTest::pickConstrainedModeSize(camera_metadata_t* staticMeta,
2254                                                AvailableStream& hfrStream) {
2255     if (nullptr == staticMeta) {
2256         return Status::ILLEGAL_ARGUMENT;
2257     }
2258 
2259     camera_metadata_ro_entry entry;
2260     int rc = find_camera_metadata_ro_entry(
2261             staticMeta, ANDROID_CONTROL_AVAILABLE_HIGH_SPEED_VIDEO_CONFIGURATIONS, &entry);
2262     if (0 != rc) {
2263         return Status::OPERATION_NOT_SUPPORTED;
2264     } else if (0 != (entry.count % 5)) {
2265         return Status::ILLEGAL_ARGUMENT;
2266     }
2267 
2268     hfrStream = {0, 0, static_cast<uint32_t>(PixelFormat::IMPLEMENTATION_DEFINED)};
2269     for (size_t i = 0; i < entry.count; i += 5) {
2270         int32_t w = entry.data.i32[i];
2271         int32_t h = entry.data.i32[i + 1];
2272         if ((hfrStream.width * hfrStream.height) < (w * h)) {
2273             hfrStream.width = w;
2274             hfrStream.height = h;
2275         }
2276     }
2277 
2278     return Status::OK;
2279 }
2280 
processCaptureRequestInternal(uint64_t bufferUsage,RequestTemplate reqTemplate,bool useSecureOnlyCameras)2281 void CameraAidlTest::processCaptureRequestInternal(uint64_t bufferUsage,
2282                                                    RequestTemplate reqTemplate,
2283                                                    bool useSecureOnlyCameras) {
2284     std::vector<std::string> cameraDeviceNames =
2285             getCameraDeviceNames(mProvider, useSecureOnlyCameras);
2286     AvailableStream streamThreshold = {kMaxPreviewWidth, kMaxPreviewHeight,
2287                                        static_cast<int32_t>(PixelFormat::IMPLEMENTATION_DEFINED)};
2288     int64_t bufferId = 1;
2289     int32_t frameNumber = 1;
2290     CameraMetadata settings;
2291     for (const auto& name : cameraDeviceNames) {
2292         Stream testStream;
2293         std::vector<HalStream> halStreams;
2294         std::shared_ptr<ICameraDeviceSession> session;
2295         std::shared_ptr<DeviceCb> cb;
2296         bool supportsPartialResults = false;
2297         bool useHalBufManager = false;
2298         int32_t partialResultCount = 0;
2299         configureSingleStream(name, mProvider, &streamThreshold, bufferUsage, reqTemplate,
2300                               &session /*out*/, &testStream /*out*/, &halStreams /*out*/,
2301                               &supportsPartialResults /*out*/, &partialResultCount /*out*/,
2302                               &useHalBufManager /*out*/, &cb /*out*/);
2303 
2304         ASSERT_NE(session, nullptr);
2305         ASSERT_NE(cb, nullptr);
2306         ASSERT_FALSE(halStreams.empty());
2307 
2308         std::shared_ptr<ResultMetadataQueue> resultQueue;
2309         ::aidl::android::hardware::common::fmq::MQDescriptor<
2310                 int8_t, aidl::android::hardware::common::fmq::SynchronizedReadWrite>
2311                 descriptor;
2312         ndk::ScopedAStatus ret = session->getCaptureResultMetadataQueue(&descriptor);
2313         ASSERT_TRUE(ret.isOk());
2314 
2315         resultQueue = std::make_shared<ResultMetadataQueue>(descriptor);
2316         if (!resultQueue->isValid() || resultQueue->availableToWrite() <= 0) {
2317             ALOGE("%s: HAL returns empty result metadata fmq,"
2318                   " not use it",
2319                   __func__);
2320             resultQueue = nullptr;
2321             // Don't use the queue onwards.
2322         }
2323 
2324         std::shared_ptr<InFlightRequest> inflightReq = std::make_shared<InFlightRequest>(
2325                 1, false, supportsPartialResults, partialResultCount, resultQueue);
2326 
2327         CameraMetadata req;
2328         ret = session->constructDefaultRequestSettings(reqTemplate, &req);
2329         ASSERT_TRUE(ret.isOk());
2330         settings = req;
2331 
2332         overrideRotateAndCrop(&settings);
2333 
2334         std::vector<CaptureRequest> requests(1);
2335         CaptureRequest& request = requests[0];
2336         request.frameNumber = frameNumber;
2337         request.fmqSettingsSize = 0;
2338         request.settings = settings;
2339 
2340         std::vector<StreamBuffer>& outputBuffers = request.outputBuffers;
2341         outputBuffers.resize(1);
2342         StreamBuffer& outputBuffer = outputBuffers[0];
2343         if (useHalBufManager) {
2344             outputBuffer = {halStreams[0].id,
2345                             /*bufferId*/ 0,   NativeHandle(), BufferStatus::OK,
2346                             NativeHandle(),   NativeHandle()};
2347         } else {
2348             buffer_handle_t handle;
2349             allocateGraphicBuffer(
2350                     testStream.width, testStream.height,
2351                     /* We don't look at halStreamConfig.streams[0].consumerUsage
2352                      * since that is 0 for output streams
2353                      */
2354                     ANDROID_NATIVE_UNSIGNED_CAST(android_convertGralloc1To0Usage(
2355                             static_cast<uint64_t>(halStreams[0].producerUsage), bufferUsage)),
2356                     halStreams[0].overrideFormat, &handle);
2357 
2358             outputBuffer = {halStreams[0].id, bufferId,       ::android::makeToAidl(handle),
2359                             BufferStatus::OK, NativeHandle(), NativeHandle()};
2360         }
2361         request.inputBuffer = {-1,
2362                                0,
2363                                NativeHandle(),
2364                                BufferStatus::ERROR,
2365                                NativeHandle(),
2366                                NativeHandle()};  // Empty Input Buffer
2367 
2368         {
2369             std::unique_lock<std::mutex> l(mLock);
2370             mInflightMap.clear();
2371             mInflightMap.insert(std::make_pair(frameNumber, inflightReq));
2372         }
2373 
2374         int32_t numRequestProcessed = 0;
2375         std::vector<BufferCache> cachesToRemove;
2376         ret = session->processCaptureRequest(requests, cachesToRemove, &numRequestProcessed);
2377         ALOGI("processCaptureRequestInternal: processCaptureRequest returns status: %d:%d",
2378               ret.getExceptionCode(), ret.getServiceSpecificError());
2379 
2380         ASSERT_TRUE(ret.isOk());
2381         ASSERT_EQ(numRequestProcessed, 1u);
2382 
2383         {
2384             std::unique_lock<std::mutex> l(mLock);
2385             while (!inflightReq->errorCodeValid &&
2386                    ((0 < inflightReq->numBuffersLeft) || (!inflightReq->haveResultMetadata))) {
2387                 auto timeout = std::chrono::system_clock::now() +
2388                                std::chrono::seconds(kStreamBufferTimeoutSec);
2389                 ASSERT_NE(std::cv_status::timeout, mResultCondition.wait_until(l, timeout));
2390             }
2391 
2392             ASSERT_FALSE(inflightReq->errorCodeValid);
2393             ASSERT_NE(inflightReq->resultOutputBuffers.size(), 0u);
2394             ASSERT_EQ(testStream.id, inflightReq->resultOutputBuffers[0].buffer.streamId);
2395 
2396             // shutterReadoutTimestamp, if supported, must
2397             // be >= shutterTimestamp + exposureTime,
2398             // and < shutterTimestamp + exposureTime + rollingShutterSkew / 2.
2399             ASSERT_FALSE(inflightReq->collectedResult.isEmpty());
2400 
2401             if (mSupportReadoutTimestamp &&
2402                 inflightReq->collectedResult.exists(ANDROID_SENSOR_EXPOSURE_TIME)) {
2403                 camera_metadata_entry_t exposureTimeResult =
2404                         inflightReq->collectedResult.find(ANDROID_SENSOR_EXPOSURE_TIME);
2405                 nsecs_t exposureToReadout =
2406                         inflightReq->shutterReadoutTimestamp - inflightReq->shutterTimestamp;
2407                 ASSERT_GE(exposureToReadout, exposureTimeResult.data.i64[0]);
2408                 if (inflightReq->collectedResult.exists(ANDROID_SENSOR_ROLLING_SHUTTER_SKEW)) {
2409                     camera_metadata_entry_t rollingShutterSkew =
2410                             inflightReq->collectedResult.find(ANDROID_SENSOR_ROLLING_SHUTTER_SKEW);
2411                     ASSERT_LT(exposureToReadout,
2412                               exposureTimeResult.data.i64[0] + rollingShutterSkew.data.i64[0] / 2);
2413                 }
2414             }
2415 
2416             request.frameNumber++;
2417             // Empty settings should be supported after the first call
2418             // for repeating requests.
2419             request.settings.metadata.clear();
2420             // The buffer has been registered to HAL by bufferId, so per
2421             // API contract we should send a null handle for this buffer
2422             request.outputBuffers[0].buffer = NativeHandle();
2423             mInflightMap.clear();
2424             inflightReq = std::make_shared<InFlightRequest>(1, false, supportsPartialResults,
2425                                                             partialResultCount, resultQueue);
2426             mInflightMap.insert(std::make_pair(request.frameNumber, inflightReq));
2427         }
2428 
2429         ret = session->processCaptureRequest(requests, cachesToRemove, &numRequestProcessed);
2430         ALOGI("processCaptureRequestInternal: processCaptureRequest returns status: %d:%d",
2431               ret.getExceptionCode(), ret.getServiceSpecificError());
2432         ASSERT_TRUE(ret.isOk());
2433         ASSERT_EQ(numRequestProcessed, 1u);
2434 
2435         {
2436             std::unique_lock<std::mutex> l(mLock);
2437             while (!inflightReq->errorCodeValid &&
2438                    ((0 < inflightReq->numBuffersLeft) || (!inflightReq->haveResultMetadata))) {
2439                 auto timeout = std::chrono::system_clock::now() +
2440                                std::chrono::seconds(kStreamBufferTimeoutSec);
2441                 ASSERT_NE(std::cv_status::timeout, mResultCondition.wait_until(l, timeout));
2442             }
2443 
2444             ASSERT_FALSE(inflightReq->errorCodeValid);
2445             ASSERT_NE(inflightReq->resultOutputBuffers.size(), 0u);
2446             ASSERT_EQ(testStream.id, inflightReq->resultOutputBuffers[0].buffer.streamId);
2447         }
2448 
2449         if (useHalBufManager) {
2450             verifyBuffersReturned(session, testStream.id, cb);
2451         }
2452 
2453         ret = session->close();
2454         ASSERT_TRUE(ret.isOk());
2455     }
2456 }
2457 
configureStreamUseCaseInternal(const AvailableStream & threshold)2458 void CameraAidlTest::configureStreamUseCaseInternal(const AvailableStream &threshold) {
2459     std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
2460 
2461     for (const auto& name : cameraDeviceNames) {
2462         CameraMetadata meta;
2463         std::shared_ptr<ICameraDevice> cameraDevice;
2464 
2465         openEmptyDeviceSession(name, mProvider, &mSession /*out*/, &meta /*out*/,
2466                                &cameraDevice /*out*/);
2467 
2468         camera_metadata_t* staticMeta = reinterpret_cast<camera_metadata_t*>(meta.metadata.data());
2469         // Check if camera support depth only or doesn't support stream use case capability
2470         if (isDepthOnly(staticMeta) || !supportsStreamUseCaseCap(staticMeta) ||
2471             (threshold.format == static_cast<int32_t>(PixelFormat::RAW16) &&
2472              !supportsCroppedRawUseCase(staticMeta))) {
2473             ndk::ScopedAStatus ret = mSession->close();
2474             mSession = nullptr;
2475             ASSERT_TRUE(ret.isOk());
2476             continue;
2477         }
2478 
2479         std::vector<AvailableStream> outputPreviewStreams;
2480 
2481         ASSERT_EQ(Status::OK,
2482                   getAvailableOutputStreams(staticMeta, outputPreviewStreams, &threshold));
2483         ASSERT_NE(0u, outputPreviewStreams.size());
2484 
2485         // Combine valid and invalid stream use cases
2486         std::vector<int64_t> testedUseCases;
2487         testedUseCases.push_back(ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_CROPPED_RAW + 1);
2488 
2489         std::vector<int64_t> supportedUseCases;
2490         if (threshold.format == static_cast<int32_t>(PixelFormat::RAW16)) {
2491             // If the format is RAW16, supported use case is only CROPPED_RAW.
2492             // All others are unsupported for this format.
2493             testedUseCases.push_back(ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_CROPPED_RAW);
2494             supportedUseCases.push_back(ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_CROPPED_RAW);
2495             supportedUseCases.push_back(ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT);
2496         } else {
2497             camera_metadata_ro_entry entry;
2498             testedUseCases.insert(testedUseCases.end(), kMandatoryUseCases.begin(),
2499                                   kMandatoryUseCases.end());
2500             auto retcode = find_camera_metadata_ro_entry(
2501                     staticMeta, ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES, &entry);
2502             if ((0 == retcode) && (entry.count > 0)) {
2503                 supportedUseCases.insert(supportedUseCases.end(), entry.data.i64,
2504                                          entry.data.i64 + entry.count);
2505             } else {
2506                 supportedUseCases.push_back(ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT);
2507             }
2508         }
2509 
2510         std::vector<Stream> streams(1);
2511         streams[0] = {
2512                 0,
2513                 StreamType::OUTPUT,
2514                 outputPreviewStreams[0].width,
2515                 outputPreviewStreams[0].height,
2516                 static_cast<PixelFormat>(outputPreviewStreams[0].format),
2517                 static_cast<::aidl::android::hardware::graphics::common::BufferUsage>(
2518                         GRALLOC1_CONSUMER_USAGE_CPU_READ),
2519                 Dataspace::UNKNOWN,
2520                 StreamRotation::ROTATION_0,
2521                 std::string(),
2522                 0,
2523                 -1,
2524                 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
2525                 RequestAvailableDynamicRangeProfilesMap::
2526                         ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD,
2527                 ScalerAvailableStreamUseCases::ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT,
2528                 static_cast<int>(
2529                         RequestAvailableColorSpaceProfilesMap::
2530                                 ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED)};
2531 
2532         int32_t streamConfigCounter = 0;
2533         CameraMetadata req;
2534         StreamConfiguration config;
2535         RequestTemplate reqTemplate = RequestTemplate::STILL_CAPTURE;
2536         ndk::ScopedAStatus ret = mSession->constructDefaultRequestSettings(reqTemplate, &req);
2537         ASSERT_TRUE(ret.isOk());
2538         config.sessionParams = req;
2539 
2540         for (int64_t useCase : testedUseCases) {
2541             bool useCaseSupported = std::find(supportedUseCases.begin(), supportedUseCases.end(),
2542                                               useCase) != supportedUseCases.end();
2543 
2544             streams[0].useCase = static_cast<
2545                     aidl::android::hardware::camera::metadata::ScalerAvailableStreamUseCases>(
2546                     useCase);
2547             config.streams = streams;
2548             config.operationMode = StreamConfigurationMode::NORMAL_MODE;
2549             config.streamConfigCounter = streamConfigCounter;
2550             config.multiResolutionInputImage = false;
2551 
2552             bool combSupported;
2553             ret = cameraDevice->isStreamCombinationSupported(config, &combSupported);
2554             if (static_cast<int32_t>(Status::OPERATION_NOT_SUPPORTED) ==
2555                 ret.getServiceSpecificError()) {
2556                 continue;
2557             }
2558 
2559             ASSERT_TRUE(ret.isOk());
2560             ASSERT_EQ(combSupported, useCaseSupported);
2561 
2562             std::vector<HalStream> halStreams;
2563             ret = mSession->configureStreams(config, &halStreams);
2564             ALOGI("configureStreams returns status: %d", ret.getServiceSpecificError());
2565             if (useCaseSupported) {
2566                 ASSERT_TRUE(ret.isOk());
2567                 ASSERT_EQ(1u, halStreams.size());
2568             } else {
2569                 ASSERT_EQ(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT),
2570                           ret.getServiceSpecificError());
2571             }
2572         }
2573         ret = mSession->close();
2574         mSession = nullptr;
2575         ASSERT_TRUE(ret.isOk());
2576     }
2577 
2578 }
2579 
configureStreams(std::shared_ptr<ICameraDeviceSession> & session,const StreamConfiguration & config,BufferManagerType bufferManagerType,std::set<int32_t> * halBufManagedStreamIds,std::vector<HalStream> * halStreams)2580 ndk::ScopedAStatus CameraAidlTest::configureStreams(std::shared_ptr<ICameraDeviceSession>& session,
2581                                                     const StreamConfiguration& config,
2582                                                     BufferManagerType bufferManagerType,
2583                                                     std::set<int32_t>* halBufManagedStreamIds,
2584                                                     std::vector<HalStream>* halStreams) {
2585     auto ret = ndk::ScopedAStatus::ok();
2586     ConfigureStreamsRet aidl_return;
2587     int32_t interfaceVersion = -1;
2588     ret = session->getInterfaceVersion(&interfaceVersion);
2589     if (!ret.isOk()) {
2590         return ret;
2591     }
2592 
2593     if (bufferManagerType == BufferManagerType::SESSION && interfaceVersion >= 3) {
2594         ret = session->configureStreamsV2(config, &aidl_return);
2595     } else {
2596         ret = session->configureStreams(config, halStreams);
2597     }
2598     if (!ret.isOk()) {
2599         return ret;
2600     }
2601     if (bufferManagerType == BufferManagerType::SESSION) {
2602         *halStreams = std::move(aidl_return.halStreams);
2603     }
2604     for (const auto& halStream : *halStreams) {
2605         if ((bufferManagerType == BufferManagerType::SESSION && halStream.enableHalBufferManager) ||
2606             bufferManagerType == BufferManagerType::HAL) {
2607             halBufManagedStreamIds->insert(halStream.id);
2608         }
2609     }
2610     return ndk::ScopedAStatus::ok();
2611 }
2612 
configureSingleStream(const std::string & name,const std::shared_ptr<ICameraProvider> & provider,const AvailableStream * previewThreshold,uint64_t bufferUsage,RequestTemplate reqTemplate,std::shared_ptr<ICameraDeviceSession> * session,Stream * previewStream,std::vector<HalStream> * halStreams,bool * supportsPartialResults,int32_t * partialResultCount,bool * useHalBufManager,std::shared_ptr<DeviceCb> * cb,uint32_t streamConfigCounter)2613 void CameraAidlTest::configureSingleStream(
2614         const std::string& name, const std::shared_ptr<ICameraProvider>& provider,
2615         const AvailableStream* previewThreshold, uint64_t bufferUsage, RequestTemplate reqTemplate,
2616         std::shared_ptr<ICameraDeviceSession>* session, Stream* previewStream,
2617         std::vector<HalStream>* halStreams, bool* supportsPartialResults,
2618         int32_t* partialResultCount, bool* useHalBufManager, std::shared_ptr<DeviceCb>* cb,
2619         uint32_t streamConfigCounter) {
2620     ASSERT_NE(nullptr, session);
2621     ASSERT_NE(nullptr, previewStream);
2622     ASSERT_NE(nullptr, halStreams);
2623     ASSERT_NE(nullptr, supportsPartialResults);
2624     ASSERT_NE(nullptr, partialResultCount);
2625     ASSERT_NE(nullptr, useHalBufManager);
2626     ASSERT_NE(nullptr, cb);
2627 
2628     std::vector<AvailableStream> outputPreviewStreams;
2629     std::shared_ptr<ICameraDevice> device;
2630     ALOGI("configureStreams: Testing camera device %s", name.c_str());
2631 
2632     ndk::ScopedAStatus ret = provider->getCameraDeviceInterface(name, &device);
2633     ALOGI("getCameraDeviceInterface returns status:%d:%d", ret.getExceptionCode(),
2634           ret.getServiceSpecificError());
2635     ASSERT_TRUE(ret.isOk());
2636     ASSERT_NE(device, nullptr);
2637 
2638     camera_metadata_t* staticMeta;
2639     CameraMetadata chars;
2640     ret = device->getCameraCharacteristics(&chars);
2641     ASSERT_TRUE(ret.isOk());
2642     staticMeta = clone_camera_metadata(
2643             reinterpret_cast<const camera_metadata_t*>(chars.metadata.data()));
2644     ASSERT_NE(nullptr, staticMeta);
2645 
2646     size_t expectedSize = chars.metadata.size();
2647     ALOGE("validate_camera_metadata_structure: %d",
2648           validate_camera_metadata_structure(staticMeta, &expectedSize));
2649 
2650     camera_metadata_ro_entry entry;
2651     auto status =
2652             find_camera_metadata_ro_entry(staticMeta, ANDROID_REQUEST_PARTIAL_RESULT_COUNT, &entry);
2653     if ((0 == status) && (entry.count > 0)) {
2654         *partialResultCount = entry.data.i32[0];
2655         *supportsPartialResults = (*partialResultCount > 1);
2656     }
2657 
2658     *cb = ndk::SharedRefBase::make<DeviceCb>(this, staticMeta);
2659 
2660     device->open(*cb, session);
2661     ALOGI("device::open returns status:%d:%d", ret.getExceptionCode(),
2662           ret.getServiceSpecificError());
2663     ASSERT_TRUE(ret.isOk());
2664     ASSERT_NE(*session, nullptr);
2665 
2666     BufferManagerType bufferManagerType = BufferManagerType::FRAMEWORK;
2667     status = find_camera_metadata_ro_entry(
2668             staticMeta, ANDROID_INFO_SUPPORTED_BUFFER_MANAGEMENT_VERSION, &entry);
2669     if ((0 == status) && (entry.count == 1)) {
2670         if (entry.data.u8[0] == ANDROID_INFO_SUPPORTED_BUFFER_MANAGEMENT_VERSION_HIDL_DEVICE_3_5) {
2671             bufferManagerType = BufferManagerType::HAL;
2672         } else if (entry.data.u8[0] ==
2673                    ANDROID_INFO_SUPPORTED_BUFFER_MANAGEMENT_VERSION_SESSION_CONFIGURABLE) {
2674             bufferManagerType = BufferManagerType::SESSION;
2675         }
2676     }
2677 
2678     outputPreviewStreams.clear();
2679     auto rc = getAvailableOutputStreams(staticMeta, outputPreviewStreams, previewThreshold);
2680 
2681     int32_t jpegBufferSize = 0;
2682     ASSERT_EQ(Status::OK, getJpegBufferSize(staticMeta, &jpegBufferSize));
2683     ASSERT_NE(0u, jpegBufferSize);
2684 
2685     ASSERT_EQ(Status::OK, rc);
2686     ASSERT_FALSE(outputPreviewStreams.empty());
2687 
2688     Dataspace dataspace = Dataspace::UNKNOWN;
2689     switch (static_cast<PixelFormat>(outputPreviewStreams[0].format)) {
2690         case PixelFormat::Y16:
2691             dataspace = Dataspace::DEPTH;
2692             break;
2693         default:
2694             dataspace = Dataspace::UNKNOWN;
2695     }
2696 
2697     std::vector<Stream> streams(1);
2698     streams[0] = {0,
2699                   StreamType::OUTPUT,
2700                   outputPreviewStreams[0].width,
2701                   outputPreviewStreams[0].height,
2702                   static_cast<PixelFormat>(outputPreviewStreams[0].format),
2703                   static_cast<aidl::android::hardware::graphics::common::BufferUsage>(bufferUsage),
2704                   dataspace,
2705                   StreamRotation::ROTATION_0,
2706                   "",
2707                   0,
2708                   /*groupId*/ -1,
2709                   {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
2710                   RequestAvailableDynamicRangeProfilesMap::
2711                           ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD,
2712                   ScalerAvailableStreamUseCases::ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT,
2713                   static_cast<int>(
2714                           RequestAvailableColorSpaceProfilesMap::
2715                                   ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED)};
2716 
2717     StreamConfiguration config;
2718     config.streams = streams;
2719     createStreamConfiguration(streams, StreamConfigurationMode::NORMAL_MODE, &config,
2720                               jpegBufferSize);
2721     if (*session != nullptr) {
2722         CameraMetadata sessionParams;
2723         ret = (*session)->constructDefaultRequestSettings(reqTemplate, &sessionParams);
2724         ASSERT_TRUE(ret.isOk());
2725         config.sessionParams = sessionParams;
2726         config.streamConfigCounter = (int32_t)streamConfigCounter;
2727 
2728         bool supported = false;
2729         ret = device->isStreamCombinationSupported(config, &supported);
2730         ASSERT_TRUE(ret.isOk());
2731         ASSERT_EQ(supported, true);
2732 
2733         std::vector<HalStream> halConfigs;
2734         std::set<int32_t> halBufManagedStreamIds;
2735         ret = configureStreams(*session, config, bufferManagerType, &halBufManagedStreamIds,
2736                                &halConfigs);
2737         ALOGI("configureStreams returns status: %d:%d", ret.getExceptionCode(),
2738               ret.getServiceSpecificError());
2739         ASSERT_TRUE(ret.isOk());
2740         ASSERT_EQ(1u, halConfigs.size());
2741         halStreams->clear();
2742         halStreams->push_back(halConfigs[0]);
2743         *useHalBufManager = halBufManagedStreamIds.size() != 0;
2744         if (*useHalBufManager) {
2745             std::vector<Stream> ss(1);
2746             std::vector<HalStream> hs(1);
2747             ss[0] = config.streams[0];
2748             hs[0] = halConfigs[0];
2749             (*cb)->setCurrentStreamConfig(ss, hs);
2750         }
2751     }
2752     *previewStream = config.streams[0];
2753     ASSERT_TRUE(ret.isOk());
2754 }
2755 
overrideRotateAndCrop(CameraMetadata * settings)2756 void CameraAidlTest::overrideRotateAndCrop(CameraMetadata* settings) {
2757     if (settings == nullptr) {
2758         return;
2759     }
2760 
2761     ::android::hardware::camera::common::V1_0::helper::CameraMetadata requestMeta =
2762             clone_camera_metadata(reinterpret_cast<camera_metadata_t*>(settings->metadata.data()));
2763     auto entry = requestMeta.find(ANDROID_SCALER_ROTATE_AND_CROP);
2764     if ((entry.count > 0) && (entry.data.u8[0] == ANDROID_SCALER_ROTATE_AND_CROP_AUTO)) {
2765         uint8_t disableRotateAndCrop = ANDROID_SCALER_ROTATE_AND_CROP_NONE;
2766         requestMeta.update(ANDROID_SCALER_ROTATE_AND_CROP, &disableRotateAndCrop, 1);
2767         settings->metadata.clear();
2768         camera_metadata_t* metaBuffer = requestMeta.release();
2769         uint8_t* rawMetaBuffer = reinterpret_cast<uint8_t*>(metaBuffer);
2770         settings->metadata =
2771                 std::vector(rawMetaBuffer, rawMetaBuffer + get_camera_metadata_size(metaBuffer));
2772     }
2773 }
2774 
verifyBuffersReturned(const std::shared_ptr<ICameraDeviceSession> & session,int32_t streamId,const std::shared_ptr<DeviceCb> & cb,uint32_t streamConfigCounter)2775 void CameraAidlTest::verifyBuffersReturned(const std::shared_ptr<ICameraDeviceSession>& session,
2776                                            int32_t streamId, const std::shared_ptr<DeviceCb>& cb,
2777                                            uint32_t streamConfigCounter) {
2778     ASSERT_NE(nullptr, session);
2779 
2780     std::vector<int32_t> streamIds(1);
2781     streamIds[0] = streamId;
2782     session->signalStreamFlush(streamIds, /*streamConfigCounter*/ streamConfigCounter);
2783     cb->waitForBuffersReturned();
2784 }
2785 
processPreviewStabilizationCaptureRequestInternal(bool previewStabilizationOn,std::unordered_map<std::string,nsecs_t> & cameraDeviceToTimeLag)2786 void CameraAidlTest::processPreviewStabilizationCaptureRequestInternal(
2787         bool previewStabilizationOn,
2788         // Used as output when preview stabilization is off, as output when its on.
2789         std::unordered_map<std::string, nsecs_t>& cameraDeviceToTimeLag) {
2790     std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
2791     AvailableStream streamThreshold = {kMaxPreviewWidth, kMaxPreviewHeight,
2792                                        static_cast<int32_t>(PixelFormat::IMPLEMENTATION_DEFINED)};
2793     int64_t bufferId = 1;
2794     int32_t frameNumber = 1;
2795     std::vector<uint8_t> settings;
2796 
2797     for (const auto& name : cameraDeviceNames) {
2798         if (!supportsPreviewStabilization(name, mProvider)) {
2799             ALOGI(" %s Camera device %s doesn't support preview stabilization, skipping", __func__,
2800                   name.c_str());
2801             continue;
2802         }
2803 
2804         Stream testStream;
2805         std::vector<HalStream> halStreams;
2806         std::shared_ptr<ICameraDeviceSession> session;
2807         std::shared_ptr<DeviceCb> cb;
2808         bool supportsPartialResults = false;
2809         bool useHalBufManager = false;
2810         int32_t partialResultCount = 0;
2811         configureSingleStream(name, mProvider, &streamThreshold, GRALLOC1_CONSUMER_USAGE_HWCOMPOSER,
2812                               RequestTemplate::PREVIEW, &session /*out*/, &testStream /*out*/,
2813                               &halStreams /*out*/, &supportsPartialResults /*out*/,
2814                               &partialResultCount /*out*/, &useHalBufManager /*out*/, &cb /*out*/);
2815 
2816         ::aidl::android::hardware::common::fmq::MQDescriptor<
2817                 int8_t, aidl::android::hardware::common::fmq::SynchronizedReadWrite>
2818                 descriptor;
2819         ndk::ScopedAStatus resultQueueRet = session->getCaptureResultMetadataQueue(&descriptor);
2820         ASSERT_TRUE(resultQueueRet.isOk());
2821 
2822         std::shared_ptr<ResultMetadataQueue> resultQueue =
2823                 std::make_shared<ResultMetadataQueue>(descriptor);
2824         if (!resultQueue->isValid() || resultQueue->availableToWrite() <= 0) {
2825             ALOGE("%s: HAL returns empty result metadata fmq,"
2826                   " not use it",
2827                   __func__);
2828             resultQueue = nullptr;
2829             // Don't use the queue onwards.
2830         }
2831 
2832         std::shared_ptr<InFlightRequest> inflightReq = std::make_shared<InFlightRequest>(
2833                 1, false, supportsPartialResults, partialResultCount, resultQueue);
2834 
2835         CameraMetadata defaultMetadata;
2836         android::hardware::camera::common::V1_0::helper::CameraMetadata defaultSettings;
2837         ndk::ScopedAStatus ret = session->constructDefaultRequestSettings(RequestTemplate::PREVIEW,
2838                                                                           &defaultMetadata);
2839         ASSERT_TRUE(ret.isOk());
2840 
2841         const camera_metadata_t* metadata =
2842                 reinterpret_cast<const camera_metadata_t*>(defaultMetadata.metadata.data());
2843         defaultSettings = metadata;
2844         android::status_t metadataRet = ::android::OK;
2845         uint8_t videoStabilizationMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
2846         if (previewStabilizationOn) {
2847             videoStabilizationMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_PREVIEW_STABILIZATION;
2848             metadataRet = defaultSettings.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE,
2849                                                  &videoStabilizationMode, 1);
2850         } else {
2851             metadataRet = defaultSettings.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE,
2852                                                  &videoStabilizationMode, 1);
2853         }
2854         ASSERT_EQ(metadataRet, ::android::OK);
2855 
2856         camera_metadata_t* releasedMetadata = defaultSettings.release();
2857         uint8_t* rawMetadata = reinterpret_cast<uint8_t*>(releasedMetadata);
2858 
2859         buffer_handle_t buffer_handle;
2860 
2861         std::vector<CaptureRequest> requests(1);
2862         CaptureRequest& request = requests[0];
2863         request.frameNumber = frameNumber;
2864         request.fmqSettingsSize = 0;
2865         request.settings.metadata =
2866                 std::vector(rawMetadata, rawMetadata + get_camera_metadata_size(releasedMetadata));
2867         overrideRotateAndCrop(&request.settings);
2868         request.outputBuffers = std::vector<StreamBuffer>(1);
2869         StreamBuffer& outputBuffer = request.outputBuffers[0];
2870 
2871         if (useHalBufManager) {
2872             outputBuffer = {halStreams[0].id,
2873                             /*bufferId*/ 0,   NativeHandle(), BufferStatus::OK,
2874                             NativeHandle(),   NativeHandle()};
2875         } else {
2876             allocateGraphicBuffer(testStream.width, testStream.height,
2877                                   /* We don't look at halStreamConfig.streams[0].consumerUsage
2878                                    * since that is 0 for output streams
2879                                    */
2880                                   ANDROID_NATIVE_UNSIGNED_CAST(android_convertGralloc1To0Usage(
2881                                           static_cast<uint64_t>(halStreams[0].producerUsage),
2882                                           GRALLOC1_CONSUMER_USAGE_HWCOMPOSER)),
2883                                   halStreams[0].overrideFormat, &buffer_handle);
2884             outputBuffer = {halStreams[0].id, bufferId,       ::android::makeToAidl(buffer_handle),
2885                             BufferStatus::OK, NativeHandle(), NativeHandle()};
2886         }
2887         request.inputBuffer = {
2888                 -1, 0, NativeHandle(), BufferStatus::ERROR, NativeHandle(), NativeHandle()};
2889 
2890         {
2891             std::unique_lock<std::mutex> l(mLock);
2892             mInflightMap.clear();
2893             mInflightMap.insert(std::make_pair(frameNumber, inflightReq));
2894         }
2895 
2896         int32_t numRequestProcessed = 0;
2897         std::vector<BufferCache> cachesToRemove;
2898         ret = session->processCaptureRequest(requests, cachesToRemove, &numRequestProcessed);
2899         ASSERT_TRUE(ret.isOk());
2900         ASSERT_EQ(numRequestProcessed, 1u);
2901 
2902         {
2903             std::unique_lock<std::mutex> l(mLock);
2904             while (!inflightReq->errorCodeValid &&
2905                    ((0 < inflightReq->numBuffersLeft) || (!inflightReq->haveResultMetadata))) {
2906                 auto timeout = std::chrono::system_clock::now() +
2907                                std::chrono::seconds(kStreamBufferTimeoutSec);
2908                 ASSERT_NE(std::cv_status::timeout, mResultCondition.wait_until(l, timeout));
2909             }
2910             waitForReleaseFence(inflightReq->resultOutputBuffers);
2911 
2912             ASSERT_FALSE(inflightReq->errorCodeValid);
2913             ASSERT_NE(inflightReq->resultOutputBuffers.size(), 0u);
2914             ASSERT_EQ(testStream.id, inflightReq->resultOutputBuffers[0].buffer.streamId);
2915             nsecs_t captureTimestamp = mSupportReadoutTimestamp
2916                                                ? inflightReq->shutterReadoutTimestamp
2917                                                : inflightReq->shutterTimestamp;
2918 
2919             if (previewStabilizationOn) {
2920                 // Here we collect the time difference between the buffer ready
2921                 // timestamp - notify timestamp.
2922                 // timeLag = buffer ready timestamp - notify timestamp.
2923                 // timeLag(previewStabilization) must be <=
2924                 //        timeLag(stabilization off) + 1 frame duration.
2925                 auto it = cameraDeviceToTimeLag.find(name);
2926                 camera_metadata_entry e;
2927                 e = inflightReq->collectedResult.find(ANDROID_SENSOR_FRAME_DURATION);
2928                 ASSERT_TRUE(e.count > 0);
2929                 nsecs_t frameDuration = e.data.i64[0];
2930                 ASSERT_TRUE(it != cameraDeviceToTimeLag.end());
2931 
2932                 nsecs_t previewStabOnLagTime =
2933                         inflightReq->resultOutputBuffers[0].timeStamp - captureTimestamp;
2934                 ASSERT_TRUE(previewStabOnLagTime <= (it->second + frameDuration));
2935             } else {
2936                 // Fill in the buffer ready timestamp - notify timestamp;
2937                 cameraDeviceToTimeLag[std::string(name)] =
2938                         inflightReq->resultOutputBuffers[0].timeStamp - captureTimestamp;
2939             }
2940         }
2941 
2942         if (useHalBufManager) {
2943             verifyBuffersReturned(session, testStream.id, cb);
2944         }
2945 
2946         ret = session->close();
2947         ASSERT_TRUE(ret.isOk());
2948     }
2949 }
2950 
supportsPreviewStabilization(const std::string & name,const std::shared_ptr<ICameraProvider> & provider)2951 bool CameraAidlTest::supportsPreviewStabilization(
2952         const std::string& name, const std::shared_ptr<ICameraProvider>& provider) {
2953     std::shared_ptr<ICameraDevice> device;
2954     ndk::ScopedAStatus ret = provider->getCameraDeviceInterface(name, &device);
2955     ALOGI("getCameraDeviceInterface returns status:%d:%d", ret.getExceptionCode(),
2956           ret.getServiceSpecificError());
2957     if (!ret.isOk() || device == nullptr) {
2958         ADD_FAILURE() << "Failed to get camera device interface for " << name;
2959     }
2960 
2961     CameraMetadata metadata;
2962     ret = device->getCameraCharacteristics(&metadata);
2963     camera_metadata_t* staticMeta = clone_camera_metadata(
2964             reinterpret_cast<const camera_metadata_t*>(metadata.metadata.data()));
2965     if (!(ret.isOk())) {
2966         ADD_FAILURE() << "Failed to get camera characteristics for " << name;
2967     }
2968     // Go through the characteristics and see if video stabilization modes have
2969     // preview stabilization
2970     camera_metadata_ro_entry entry;
2971 
2972     int retcode = find_camera_metadata_ro_entry(
2973             staticMeta, ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES, &entry);
2974     if ((0 == retcode) && (entry.count > 0)) {
2975         for (auto i = 0; i < entry.count; i++) {
2976             if (entry.data.u8[i] ==
2977                 ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_PREVIEW_STABILIZATION) {
2978                 return true;
2979             }
2980         }
2981     }
2982     return false;
2983 }
2984 
configurePreviewStreams(const std::string & name,const std::shared_ptr<ICameraProvider> & provider,const AvailableStream * previewThreshold,const std::unordered_set<std::string> & physicalIds,std::shared_ptr<ICameraDeviceSession> * session,Stream * previewStream,std::vector<HalStream> * halStreams,bool * supportsPartialResults,int32_t * partialResultCount,std::set<int32_t> * halBufManagedStreamIds,std::shared_ptr<DeviceCb> * cb,int32_t streamConfigCounter,bool allowUnsupport)2985 void CameraAidlTest::configurePreviewStreams(
2986         const std::string& name, const std::shared_ptr<ICameraProvider>& provider,
2987         const AvailableStream* previewThreshold, const std::unordered_set<std::string>& physicalIds,
2988         std::shared_ptr<ICameraDeviceSession>* session, Stream* previewStream,
2989         std::vector<HalStream>* halStreams, bool* supportsPartialResults,
2990         int32_t* partialResultCount, std::set<int32_t>* halBufManagedStreamIds,
2991         std::shared_ptr<DeviceCb>* cb, int32_t streamConfigCounter, bool allowUnsupport) {
2992     ASSERT_NE(nullptr, session);
2993     ASSERT_NE(nullptr, halStreams);
2994     ASSERT_NE(nullptr, previewStream);
2995     ASSERT_NE(nullptr, supportsPartialResults);
2996     ASSERT_NE(nullptr, partialResultCount);
2997     ASSERT_NE(nullptr, halBufManagedStreamIds);
2998     ASSERT_NE(nullptr, cb);
2999 
3000     ASSERT_FALSE(physicalIds.empty());
3001 
3002     std::vector<AvailableStream> outputPreviewStreams;
3003     std::shared_ptr<ICameraDevice> device;
3004     ALOGI("configureStreams: Testing camera device %s", name.c_str());
3005 
3006     ndk::ScopedAStatus ret = provider->getCameraDeviceInterface(name, &device);
3007     ALOGI("getCameraDeviceInterface returns status:%d:%d", ret.getExceptionCode(),
3008           ret.getServiceSpecificError());
3009     ASSERT_TRUE(ret.isOk());
3010     ASSERT_NE(device, nullptr);
3011 
3012     CameraMetadata meta;
3013     ret = device->getCameraCharacteristics(&meta);
3014     ASSERT_TRUE(ret.isOk());
3015     camera_metadata_t* staticMeta =
3016             clone_camera_metadata(reinterpret_cast<const camera_metadata_t*>(meta.metadata.data()));
3017     ASSERT_NE(nullptr, staticMeta);
3018 
3019     camera_metadata_ro_entry entry;
3020     auto status =
3021             find_camera_metadata_ro_entry(staticMeta, ANDROID_REQUEST_PARTIAL_RESULT_COUNT, &entry);
3022     if ((0 == status) && (entry.count > 0)) {
3023         *partialResultCount = entry.data.i32[0];
3024         *supportsPartialResults = (*partialResultCount > 1);
3025     }
3026 
3027     *cb = ndk::SharedRefBase::make<DeviceCb>(this, staticMeta);
3028     ret = device->open(*cb, session);
3029     ALOGI("device::open returns status:%d:%d", ret.getExceptionCode(),
3030           ret.getServiceSpecificError());
3031     ASSERT_TRUE(ret.isOk());
3032     ASSERT_NE(*session, nullptr);
3033 
3034     BufferManagerType bufferManagerType = BufferManagerType::FRAMEWORK;
3035     status = find_camera_metadata_ro_entry(
3036             staticMeta, ANDROID_INFO_SUPPORTED_BUFFER_MANAGEMENT_VERSION, &entry);
3037     if ((0 == status) && (entry.count == 1)) {
3038         if (entry.data.u8[0] == ANDROID_INFO_SUPPORTED_BUFFER_MANAGEMENT_VERSION_HIDL_DEVICE_3_5) {
3039             bufferManagerType = BufferManagerType::HAL;
3040         } else if (entry.data.u8[0] ==
3041                    ANDROID_INFO_SUPPORTED_BUFFER_MANAGEMENT_VERSION_SESSION_CONFIGURABLE) {
3042             bufferManagerType = BufferManagerType::SESSION;
3043         }
3044     }
3045 
3046     outputPreviewStreams.clear();
3047     Status rc = getAvailableOutputStreams(staticMeta, outputPreviewStreams, previewThreshold);
3048 
3049     ASSERT_EQ(Status::OK, rc);
3050     ASSERT_FALSE(outputPreviewStreams.empty());
3051 
3052     std::vector<Stream> streams(physicalIds.size());
3053     int32_t streamId = 0;
3054     for (auto const& physicalId : physicalIds) {
3055         streams[streamId] = {
3056                 streamId,
3057                 StreamType::OUTPUT,
3058                 outputPreviewStreams[0].width,
3059                 outputPreviewStreams[0].height,
3060                 static_cast<PixelFormat>(outputPreviewStreams[0].format),
3061                 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
3062                         GRALLOC1_CONSUMER_USAGE_HWCOMPOSER),
3063                 Dataspace::UNKNOWN,
3064                 StreamRotation::ROTATION_0,
3065                 physicalId,
3066                 0,
3067                 -1,
3068                 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
3069                 RequestAvailableDynamicRangeProfilesMap::
3070                         ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD,
3071                 ScalerAvailableStreamUseCases::ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT,
3072                 static_cast<int>(
3073                         RequestAvailableColorSpaceProfilesMap::
3074                                 ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED)};
3075         streamId++;
3076     }
3077 
3078     StreamConfiguration config = {streams, StreamConfigurationMode::NORMAL_MODE, CameraMetadata()};
3079 
3080     RequestTemplate reqTemplate = RequestTemplate::PREVIEW;
3081     ret = (*session)->constructDefaultRequestSettings(reqTemplate, &config.sessionParams);
3082     ASSERT_TRUE(ret.isOk());
3083 
3084     bool supported = false;
3085     ret = device->isStreamCombinationSupported(config, &supported);
3086     ASSERT_TRUE(ret.isOk());
3087     if (allowUnsupport && !supported) {
3088         // stream combination not supported. return null session
3089         ret = (*session)->close();
3090         ASSERT_TRUE(ret.isOk());
3091         *session = nullptr;
3092         return;
3093     }
3094     ASSERT_TRUE(supported) << "Stream combination must be supported.";
3095 
3096     config.streamConfigCounter = streamConfigCounter;
3097     std::vector<HalStream> halConfigs;
3098     ret = configureStreams(*session, config, bufferManagerType, halBufManagedStreamIds,
3099                            &halConfigs);
3100 
3101     ASSERT_TRUE(ret.isOk());
3102     ASSERT_EQ(physicalIds.size(), halConfigs.size());
3103     *halStreams = halConfigs;
3104     if (halBufManagedStreamIds->size() != 0) {
3105         // Only include the streams that are HAL buffer managed
3106         std::vector<Stream> ss;
3107         std::vector<HalStream> hs;
3108         for (size_t i = 0; i < physicalIds.size(); i++) {
3109             if (contains(*halBufManagedStreamIds, halConfigs[i].id)) {
3110                 ss.emplace_back(streams[i]);
3111                 hs.emplace_back(halConfigs[i]);
3112             }
3113         }
3114         (*cb)->setCurrentStreamConfig(ss, hs);
3115     }
3116     *previewStream = streams[0];
3117     ASSERT_TRUE(ret.isOk());
3118 }
3119 
verifyBuffersReturned(const std::shared_ptr<ICameraDeviceSession> & session,const std::vector<int32_t> & streamIds,const std::shared_ptr<DeviceCb> & cb,uint32_t streamConfigCounter)3120 void CameraAidlTest::verifyBuffersReturned(const std::shared_ptr<ICameraDeviceSession>& session,
3121                                            const std::vector<int32_t>& streamIds,
3122                                            const std::shared_ptr<DeviceCb>& cb,
3123                                            uint32_t streamConfigCounter) {
3124     ndk::ScopedAStatus ret =
3125             session->signalStreamFlush(streamIds, /*streamConfigCounter*/ streamConfigCounter);
3126     ASSERT_TRUE(ret.isOk());
3127     cb->waitForBuffersReturned();
3128 }
3129 
configureStreams(const std::string & name,const std::shared_ptr<ICameraProvider> & provider,PixelFormat format,std::shared_ptr<ICameraDeviceSession> * session,Stream * previewStream,std::vector<HalStream> * halStreams,bool * supportsPartialResults,int32_t * partialResultCount,std::set<int32_t> * halBufManagedStreamIds,std::shared_ptr<DeviceCb> * outCb,uint32_t streamConfigCounter,bool maxResolution,RequestAvailableDynamicRangeProfilesMap dynamicRangeProf,RequestAvailableColorSpaceProfilesMap colorSpaceProf)3130 void CameraAidlTest::configureStreams(
3131         const std::string& name, const std::shared_ptr<ICameraProvider>& provider,
3132         PixelFormat format, std::shared_ptr<ICameraDeviceSession>* session, Stream* previewStream,
3133         std::vector<HalStream>* halStreams, bool* supportsPartialResults,
3134         int32_t* partialResultCount, std::set<int32_t>* halBufManagedStreamIds,
3135         std::shared_ptr<DeviceCb>* outCb, uint32_t streamConfigCounter, bool maxResolution,
3136         RequestAvailableDynamicRangeProfilesMap dynamicRangeProf,
3137         RequestAvailableColorSpaceProfilesMap colorSpaceProf) {
3138     ASSERT_NE(nullptr, session);
3139     ASSERT_NE(nullptr, halStreams);
3140     ASSERT_NE(nullptr, previewStream);
3141     ASSERT_NE(nullptr, supportsPartialResults);
3142     ASSERT_NE(nullptr, partialResultCount);
3143     ASSERT_NE(nullptr, halBufManagedStreamIds);
3144     ASSERT_NE(nullptr, outCb);
3145 
3146     ALOGI("configureStreams: Testing camera device %s", name.c_str());
3147 
3148     std::vector<AvailableStream> outputStreams;
3149     std::shared_ptr<ICameraDevice> device;
3150 
3151     ndk::ScopedAStatus ret = provider->getCameraDeviceInterface(name, &device);
3152     ALOGI("getCameraDeviceInterface returns status:%d:%d", ret.getExceptionCode(),
3153           ret.getServiceSpecificError());
3154     ASSERT_TRUE(ret.isOk());
3155     ASSERT_NE(device, nullptr);
3156 
3157     CameraMetadata metadata;
3158     camera_metadata_t* staticMeta;
3159     ret = device->getCameraCharacteristics(&metadata);
3160     ASSERT_TRUE(ret.isOk());
3161     staticMeta = clone_camera_metadata(
3162             reinterpret_cast<const camera_metadata_t*>(metadata.metadata.data()));
3163     ASSERT_NE(staticMeta, nullptr);
3164 
3165     camera_metadata_ro_entry entry;
3166     auto status =
3167             find_camera_metadata_ro_entry(staticMeta, ANDROID_REQUEST_PARTIAL_RESULT_COUNT, &entry);
3168     if ((0 == status) && (entry.count > 0)) {
3169         *partialResultCount = entry.data.i32[0];
3170         *supportsPartialResults = (*partialResultCount > 1);
3171     }
3172 
3173     *outCb = ndk::SharedRefBase::make<DeviceCb>(this, staticMeta);
3174     ret = device->open(*outCb, session);
3175     ALOGI("device::open returns status:%d:%d", ret.getExceptionCode(),
3176           ret.getServiceSpecificError());
3177     ASSERT_TRUE(ret.isOk());
3178     ASSERT_NE(*session, nullptr);
3179 
3180     BufferManagerType bufferManagerType = BufferManagerType::FRAMEWORK;
3181     status = find_camera_metadata_ro_entry(
3182             staticMeta, ANDROID_INFO_SUPPORTED_BUFFER_MANAGEMENT_VERSION, &entry);
3183     if ((0 == status) && (entry.count == 1)) {
3184         if (entry.data.u8[0] == ANDROID_INFO_SUPPORTED_BUFFER_MANAGEMENT_VERSION_HIDL_DEVICE_3_5) {
3185             bufferManagerType = BufferManagerType::HAL;
3186         } else if (entry.data.u8[0] ==
3187                    ANDROID_INFO_SUPPORTED_BUFFER_MANAGEMENT_VERSION_SESSION_CONFIGURABLE) {
3188             bufferManagerType = BufferManagerType::SESSION;
3189         }
3190     }
3191 
3192     outputStreams.clear();
3193     Size maxSize;
3194     if (maxResolution) {
3195         auto rc = getMaxOutputSizeForFormat(staticMeta, format, &maxSize, maxResolution);
3196         ASSERT_EQ(Status::OK, rc);
3197     } else {
3198         AvailableStream previewThreshold = {kMaxPreviewWidth, kMaxPreviewHeight,
3199             static_cast<int32_t>(format)};
3200         auto rc = getAvailableOutputStreams(staticMeta, outputStreams, &previewThreshold);
3201 
3202         ASSERT_EQ(Status::OK, rc);
3203         ASSERT_FALSE(outputStreams.empty());
3204         maxSize.width = outputStreams[0].width;
3205         maxSize.height = outputStreams[0].height;
3206     }
3207 
3208 
3209     std::vector<Stream> streams(1);
3210     streams[0] = {0,
3211                   StreamType::OUTPUT,
3212                   maxSize.width,
3213                   maxSize.height,
3214                   format,
3215                   previewStream->usage,
3216                   previewStream->dataSpace,
3217                   StreamRotation::ROTATION_0,
3218                   "",
3219                   0,
3220                   -1,
3221                   {maxResolution ? SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION
3222                                  : SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
3223                   dynamicRangeProf,
3224                   ScalerAvailableStreamUseCases::ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT,
3225                   static_cast<int>(colorSpaceProf)};
3226 
3227     StreamConfiguration config;
3228     config.streams = streams;
3229     config.operationMode = StreamConfigurationMode::NORMAL_MODE;
3230     config.streamConfigCounter = streamConfigCounter;
3231     config.multiResolutionInputImage = false;
3232     CameraMetadata req;
3233     RequestTemplate reqTemplate = RequestTemplate::STILL_CAPTURE;
3234     ret = (*session)->constructDefaultRequestSettings(reqTemplate, &req);
3235     ASSERT_TRUE(ret.isOk());
3236     config.sessionParams = req;
3237 
3238     bool supported = false;
3239     ret = device->isStreamCombinationSupported(config, &supported);
3240     ASSERT_TRUE(ret.isOk());
3241     ASSERT_EQ(supported, true);
3242 
3243     ret = configureStreams(*session, config, bufferManagerType, halBufManagedStreamIds, halStreams);
3244 
3245     ASSERT_TRUE(ret.isOk());
3246 
3247     if (halBufManagedStreamIds->size() != 0) {
3248         std::vector<Stream> ss(1);
3249         std::vector<HalStream> hs(1);
3250         ss[0] = streams[0];
3251         hs[0] = (*halStreams)[0];
3252         (*outCb)->setCurrentStreamConfig(ss, hs);
3253     }
3254 
3255     *previewStream = streams[0];
3256     ASSERT_TRUE(ret.isOk());
3257 }
3258 
is10BitDynamicRangeCapable(const camera_metadata_t * staticMeta)3259 bool CameraAidlTest::is10BitDynamicRangeCapable(const camera_metadata_t* staticMeta) {
3260     camera_metadata_ro_entry scalerEntry;
3261     int rc = find_camera_metadata_ro_entry(staticMeta, ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
3262                                            &scalerEntry);
3263     if (rc == 0) {
3264         for (uint32_t i = 0; i < scalerEntry.count; i++) {
3265             if (scalerEntry.data.u8[i] ==
3266                 ANDROID_REQUEST_AVAILABLE_CAPABILITIES_DYNAMIC_RANGE_TEN_BIT) {
3267                 return true;
3268             }
3269         }
3270     }
3271     return false;
3272 }
3273 
get10BitDynamicRangeProfiles(const camera_metadata_t * staticMeta,std::vector<RequestAvailableDynamicRangeProfilesMap> * profiles)3274 void CameraAidlTest::get10BitDynamicRangeProfiles(
3275         const camera_metadata_t* staticMeta,
3276         std::vector<RequestAvailableDynamicRangeProfilesMap>* profiles) {
3277     ASSERT_NE(nullptr, staticMeta);
3278     ASSERT_NE(nullptr, profiles);
3279     camera_metadata_ro_entry entry;
3280     std::unordered_set<int64_t> entries;
3281     int rc = find_camera_metadata_ro_entry(
3282             staticMeta, ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP, &entry);
3283     ASSERT_EQ(rc, 0);
3284     ASSERT_TRUE(entry.count > 0);
3285     ASSERT_EQ(entry.count % 3, 0);
3286 
3287     for (uint32_t i = 0; i < entry.count; i += 3) {
3288         ASSERT_NE(entry.data.i64[i], ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD);
3289         ASSERT_EQ(entries.find(entry.data.i64[i]), entries.end());
3290         entries.insert(static_cast<int64_t>(entry.data.i64[i]));
3291         profiles->emplace_back(
3292                 static_cast<RequestAvailableDynamicRangeProfilesMap>(entry.data.i64[i]));
3293     }
3294 
3295     if (!entries.empty()) {
3296         ASSERT_NE(entries.find(ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_HLG10),
3297                   entries.end());
3298     }
3299 }
3300 
verify10BitMetadata(HandleImporter & importer,const InFlightRequest & request,aidl::android::hardware::camera::metadata::RequestAvailableDynamicRangeProfilesMap profile)3301 void CameraAidlTest::verify10BitMetadata(
3302         HandleImporter& importer, const InFlightRequest& request,
3303         aidl::android::hardware::camera::metadata::RequestAvailableDynamicRangeProfilesMap
3304                 profile) {
3305     for (auto b : request.resultOutputBuffers) {
3306         importer.importBuffer(b.buffer.buffer);
3307         bool smpte2086Present = importer.isSmpte2086Present(b.buffer.buffer);
3308         bool smpte2094_10Present = importer.isSmpte2094_10Present(b.buffer.buffer);
3309         bool smpte2094_40Present = importer.isSmpte2094_40Present(b.buffer.buffer);
3310 
3311         switch (static_cast<int64_t>(profile)) {
3312             case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_HLG10:
3313                 ASSERT_FALSE(smpte2086Present);
3314                 ASSERT_FALSE(smpte2094_10Present);
3315                 ASSERT_FALSE(smpte2094_40Present);
3316                 break;
3317             case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_HDR10:
3318                 ASSERT_TRUE(smpte2086Present);
3319                 ASSERT_FALSE(smpte2094_10Present);
3320                 ASSERT_FALSE(smpte2094_40Present);
3321                 break;
3322             case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_HDR10_PLUS:
3323                 ASSERT_FALSE(smpte2094_10Present);
3324                 ASSERT_TRUE(smpte2094_40Present);
3325                 break;
3326             case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_10B_HDR_REF:
3327             case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_10B_HDR_REF_PO:
3328             case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_10B_HDR_OEM:
3329             case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_10B_HDR_OEM_PO:
3330             case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_8B_HDR_REF:
3331             case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_8B_HDR_REF_PO:
3332             case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_8B_HDR_OEM:
3333             case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_8B_HDR_OEM_PO:
3334                 ASSERT_FALSE(smpte2086Present);
3335                 ASSERT_TRUE(smpte2094_10Present);
3336                 ASSERT_FALSE(smpte2094_40Present);
3337                 break;
3338             default:
3339                 ALOGE("%s: Unexpected 10-bit dynamic range profile: %" PRId64, __FUNCTION__,
3340                       profile);
3341                 ADD_FAILURE();
3342         }
3343         importer.freeBuffer(b.buffer.buffer);
3344     }
3345 }
3346 
reportsColorSpaces(const camera_metadata_t * staticMeta)3347 bool CameraAidlTest::reportsColorSpaces(const camera_metadata_t* staticMeta) {
3348     camera_metadata_ro_entry capabilityEntry;
3349     int rc = find_camera_metadata_ro_entry(staticMeta, ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
3350                                            &capabilityEntry);
3351     if (rc == 0) {
3352         for (uint32_t i = 0; i < capabilityEntry.count; i++) {
3353             if (capabilityEntry.data.u8[i] ==
3354                 ANDROID_REQUEST_AVAILABLE_CAPABILITIES_COLOR_SPACE_PROFILES) {
3355                 return true;
3356             }
3357         }
3358     }
3359     return false;
3360 }
3361 
getColorSpaceProfiles(const camera_metadata_t * staticMeta,std::vector<RequestAvailableColorSpaceProfilesMap> * profiles)3362 void CameraAidlTest::getColorSpaceProfiles(
3363         const camera_metadata_t* staticMeta,
3364         std::vector<RequestAvailableColorSpaceProfilesMap>* profiles) {
3365     ASSERT_NE(nullptr, staticMeta);
3366     ASSERT_NE(nullptr, profiles);
3367     camera_metadata_ro_entry entry;
3368     int rc = find_camera_metadata_ro_entry(
3369             staticMeta, ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP, &entry);
3370     ASSERT_EQ(rc, 0);
3371     ASSERT_TRUE(entry.count > 0);
3372     ASSERT_EQ(entry.count % 3, 0);
3373 
3374     for (uint32_t i = 0; i < entry.count; i += 3) {
3375         ASSERT_NE(entry.data.i64[i],
3376                 ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED);
3377         if (std::find(profiles->begin(), profiles->end(),
3378                 static_cast<RequestAvailableColorSpaceProfilesMap>(entry.data.i64[i]))
3379                 == profiles->end()) {
3380             profiles->emplace_back(
3381                     static_cast<RequestAvailableColorSpaceProfilesMap>(entry.data.i64[i]));
3382         }
3383     }
3384 }
3385 
isColorSpaceCompatibleWithDynamicRangeAndPixelFormat(const camera_metadata_t * staticMeta,RequestAvailableColorSpaceProfilesMap colorSpace,RequestAvailableDynamicRangeProfilesMap dynamicRangeProfile,aidl::android::hardware::graphics::common::PixelFormat pixelFormat)3386 bool CameraAidlTest::isColorSpaceCompatibleWithDynamicRangeAndPixelFormat(
3387         const camera_metadata_t* staticMeta,
3388         RequestAvailableColorSpaceProfilesMap colorSpace,
3389         RequestAvailableDynamicRangeProfilesMap dynamicRangeProfile,
3390         aidl::android::hardware::graphics::common::PixelFormat pixelFormat) {
3391     camera_metadata_ro_entry entry;
3392     int rc = find_camera_metadata_ro_entry(
3393             staticMeta, ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP, &entry);
3394 
3395     if (rc == 0) {
3396         for (uint32_t i = 0; i < entry.count; i += 3) {
3397             RequestAvailableColorSpaceProfilesMap entryColorSpace =
3398                     static_cast<RequestAvailableColorSpaceProfilesMap>(entry.data.i64[i]);
3399             int64_t dynamicRangeProfileI64 = static_cast<int64_t>(dynamicRangeProfile);
3400             int32_t entryImageFormat = static_cast<int32_t>(entry.data.i64[i + 1]);
3401             int32_t expectedImageFormat = halFormatToPublicFormat(pixelFormat);
3402             if (entryColorSpace == colorSpace
3403                     && (entry.data.i64[i + 2] & dynamicRangeProfileI64) != 0
3404                     && entryImageFormat == expectedImageFormat) {
3405                 return true;
3406             }
3407         }
3408     }
3409 
3410     return false;
3411 }
3412 
getColorSpaceProfileString(RequestAvailableColorSpaceProfilesMap colorSpace)3413 const char* CameraAidlTest::getColorSpaceProfileString(
3414         RequestAvailableColorSpaceProfilesMap colorSpace) {
3415     auto colorSpaceCast = static_cast<int>(colorSpace);
3416     switch (colorSpaceCast) {
3417         case ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED:
3418             return "UNSPECIFIED";
3419         case ColorSpaceNamed::SRGB:
3420             return "SRGB";
3421         case ColorSpaceNamed::LINEAR_SRGB:
3422             return "LINEAR_SRGB";
3423         case ColorSpaceNamed::EXTENDED_SRGB:
3424             return "EXTENDED_SRGB";
3425         case ColorSpaceNamed::LINEAR_EXTENDED_SRGB:
3426             return "LINEAR_EXTENDED_SRGB";
3427         case ColorSpaceNamed::BT709:
3428             return "BT709";
3429         case ColorSpaceNamed::BT2020:
3430             return "BT2020";
3431         case ColorSpaceNamed::DCI_P3:
3432             return "DCI_P3";
3433         case ColorSpaceNamed::DISPLAY_P3:
3434             return "DISPLAY_P3";
3435         case ColorSpaceNamed::NTSC_1953:
3436             return "NTSC_1953";
3437         case ColorSpaceNamed::SMPTE_C:
3438             return "SMPTE_C";
3439         case ColorSpaceNamed::ADOBE_RGB:
3440             return "ADOBE_RGB";
3441         case ColorSpaceNamed::PRO_PHOTO_RGB:
3442             return "PRO_PHOTO_RGB";
3443         case ColorSpaceNamed::ACES:
3444             return "ACES";
3445         case ColorSpaceNamed::ACESCG:
3446             return "ACESCG";
3447         case ColorSpaceNamed::CIE_XYZ:
3448             return "CIE_XYZ";
3449         case ColorSpaceNamed::CIE_LAB:
3450             return "CIE_LAB";
3451         case ColorSpaceNamed::BT2020_HLG:
3452             return "BT2020_HLG";
3453         case ColorSpaceNamed::BT2020_PQ:
3454             return "BT2020_PQ";
3455         default:
3456             return "INVALID";
3457     }
3458 
3459     return "INVALID";
3460 }
3461 
getDynamicRangeProfileString(RequestAvailableDynamicRangeProfilesMap dynamicRangeProfile)3462 const char* CameraAidlTest::getDynamicRangeProfileString(
3463         RequestAvailableDynamicRangeProfilesMap dynamicRangeProfile) {
3464     auto dynamicRangeProfileCast =
3465             static_cast<camera_metadata_enum_android_request_available_dynamic_range_profiles_map>
3466             (dynamicRangeProfile);
3467     switch (dynamicRangeProfileCast) {
3468         case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD:
3469             return "STANDARD";
3470         case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_HLG10:
3471             return "HLG10";
3472         case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_HDR10:
3473             return "HDR10";
3474         case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_HDR10_PLUS:
3475             return "HDR10_PLUS";
3476         case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_10B_HDR_REF:
3477             return "DOLBY_VISION_10B_HDR_REF";
3478         case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_10B_HDR_REF_PO:
3479             return "DOLBY_VISION_10B_HDR_REF_P0";
3480         case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_10B_HDR_OEM:
3481             return "DOLBY_VISION_10B_HDR_OEM";
3482         case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_10B_HDR_OEM_PO:
3483             return "DOLBY_VISION_10B_HDR_OEM_P0";
3484         case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_8B_HDR_REF:
3485             return "DOLBY_VISION_8B_HDR_REF";
3486         case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_8B_HDR_REF_PO:
3487             return "DOLBY_VISION_8B_HDR_REF_P0";
3488         case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_8B_HDR_OEM:
3489             return "DOLBY_VISION_8B_HDR_OEM";
3490         case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_8B_HDR_OEM_PO:
3491             return "DOLBY_VISION_8B_HDR_OEM_P0";
3492         default:
3493             return "INVALID";
3494     }
3495 
3496     return "INVALID";
3497 }
3498 
halFormatToPublicFormat(aidl::android::hardware::graphics::common::PixelFormat pixelFormat)3499 int32_t CameraAidlTest::halFormatToPublicFormat(
3500         aidl::android::hardware::graphics::common::PixelFormat pixelFormat) {
3501     // This is an incomplete mapping of pixel format to image format and assumes dataspaces
3502     // (see getDataspace)
3503     switch (pixelFormat) {
3504     case PixelFormat::BLOB:
3505         return 0x100; // ImageFormat.JPEG
3506     case PixelFormat::Y16:
3507         return 0x44363159; // ImageFormat.DEPTH16
3508     default:
3509         return static_cast<int32_t>(pixelFormat);
3510     }
3511 }
3512 
supportZoomSettingsOverride(const camera_metadata_t * staticMeta)3513 bool CameraAidlTest::supportZoomSettingsOverride(const camera_metadata_t* staticMeta) {
3514     camera_metadata_ro_entry availableOverridesEntry;
3515     int rc = find_camera_metadata_ro_entry(staticMeta, ANDROID_CONTROL_AVAILABLE_SETTINGS_OVERRIDES,
3516                                            &availableOverridesEntry);
3517     if (rc == 0) {
3518         for (size_t i = 0; i < availableOverridesEntry.count; i++) {
3519             if (availableOverridesEntry.data.i32[i] == ANDROID_CONTROL_SETTINGS_OVERRIDE_ZOOM) {
3520                 return true;
3521             }
3522         }
3523     }
3524     return false;
3525 }
3526 
supportsCroppedRawUseCase(const camera_metadata_t * staticMeta)3527 bool CameraAidlTest::supportsCroppedRawUseCase(const camera_metadata_t *staticMeta) {
3528     camera_metadata_ro_entry availableStreamUseCasesEntry;
3529     int rc = find_camera_metadata_ro_entry(staticMeta, ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES,
3530                                            &availableStreamUseCasesEntry);
3531     if (rc == 0) {
3532         for (size_t i = 0; i < availableStreamUseCasesEntry.count; i++) {
3533             if (availableStreamUseCasesEntry.data.i64[i] ==
3534                     ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_CROPPED_RAW) {
3535                 return true;
3536             }
3537         }
3538     }
3539     return false;
3540 }
3541 
supportsStreamUseCaseCap(const camera_metadata_t * staticMeta)3542 bool CameraAidlTest::supportsStreamUseCaseCap(const camera_metadata_t* staticMeta) {
3543     camera_metadata_ro_entry entry;
3544     int retcode = find_camera_metadata_ro_entry(staticMeta, ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
3545                                                 &entry);
3546     bool hasStreamUseCaseCap = false;
3547     if ((0 == retcode) && (entry.count > 0)) {
3548         if (std::find(entry.data.u8, entry.data.u8 + entry.count,
3549                       ANDROID_REQUEST_AVAILABLE_CAPABILITIES_STREAM_USE_CASE) !=
3550             entry.data.u8 + entry.count) {
3551             hasStreamUseCaseCap = true;
3552         }
3553     }
3554     return hasStreamUseCaseCap;
3555 }
3556 
isPerFrameControl(const camera_metadata_t * staticMeta)3557 bool CameraAidlTest::isPerFrameControl(const camera_metadata_t* staticMeta) {
3558     camera_metadata_ro_entry syncLatencyEntry;
3559     int rc = find_camera_metadata_ro_entry(staticMeta, ANDROID_SYNC_MAX_LATENCY,
3560                                            &syncLatencyEntry);
3561     if (rc == 0 && syncLatencyEntry.data.i32[0] == ANDROID_SYNC_MAX_LATENCY_PER_FRAME_CONTROL) {
3562         return true;
3563     }
3564     return false;
3565 }
3566 
configurePreviewStream(const std::string & name,const std::shared_ptr<ICameraProvider> & provider,const AvailableStream * previewThreshold,std::shared_ptr<ICameraDeviceSession> * session,Stream * previewStream,std::vector<HalStream> * halStreams,bool * supportsPartialResults,int32_t * partialResultCount,bool * useHalBufManager,std::shared_ptr<DeviceCb> * cb,uint32_t streamConfigCounter)3567 void CameraAidlTest::configurePreviewStream(
3568         const std::string& name, const std::shared_ptr<ICameraProvider>& provider,
3569         const AvailableStream* previewThreshold, std::shared_ptr<ICameraDeviceSession>* session,
3570         Stream* previewStream, std::vector<HalStream>* halStreams, bool* supportsPartialResults,
3571         int32_t* partialResultCount, bool* useHalBufManager, std::shared_ptr<DeviceCb>* cb,
3572         uint32_t streamConfigCounter) {
3573     configureSingleStream(name, provider, previewThreshold, GRALLOC1_CONSUMER_USAGE_HWCOMPOSER,
3574                           RequestTemplate::PREVIEW, session, previewStream, halStreams,
3575                           supportsPartialResults, partialResultCount, useHalBufManager, cb,
3576                           streamConfigCounter);
3577 }
3578 
isOfflineSessionSupported(const camera_metadata_t * staticMeta)3579 Status CameraAidlTest::isOfflineSessionSupported(const camera_metadata_t* staticMeta) {
3580     Status ret = Status::OPERATION_NOT_SUPPORTED;
3581     if (nullptr == staticMeta) {
3582         return Status::ILLEGAL_ARGUMENT;
3583     }
3584 
3585     camera_metadata_ro_entry entry;
3586     int rc = find_camera_metadata_ro_entry(staticMeta, ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
3587                                            &entry);
3588     if (0 != rc) {
3589         return Status::ILLEGAL_ARGUMENT;
3590     }
3591 
3592     for (size_t i = 0; i < entry.count; i++) {
3593         if (ANDROID_REQUEST_AVAILABLE_CAPABILITIES_OFFLINE_PROCESSING == entry.data.u8[i]) {
3594             ret = Status::OK;
3595             break;
3596         }
3597     }
3598 
3599     return ret;
3600 }
3601 
configureOfflineStillStream(const std::string & name,const std::shared_ptr<ICameraProvider> & provider,const AvailableStream * threshold,std::shared_ptr<ICameraDeviceSession> * session,Stream * stream,std::vector<HalStream> * halStreams,bool * supportsPartialResults,int32_t * partialResultCount,std::shared_ptr<DeviceCb> * outCb,int32_t * jpegBufferSize,std::set<int32_t> * halBufManagedStreamIds)3602 void CameraAidlTest::configureOfflineStillStream(
3603         const std::string& name, const std::shared_ptr<ICameraProvider>& provider,
3604         const AvailableStream* threshold, std::shared_ptr<ICameraDeviceSession>* session,
3605         Stream* stream, std::vector<HalStream>* halStreams, bool* supportsPartialResults,
3606         int32_t* partialResultCount, std::shared_ptr<DeviceCb>* outCb, int32_t* jpegBufferSize,
3607         std::set<int32_t>* halBufManagedStreamIds) {
3608     ASSERT_NE(nullptr, session);
3609     ASSERT_NE(nullptr, halStreams);
3610     ASSERT_NE(nullptr, stream);
3611     ASSERT_NE(nullptr, supportsPartialResults);
3612     ASSERT_NE(nullptr, partialResultCount);
3613     ASSERT_NE(nullptr, outCb);
3614     ASSERT_NE(nullptr, jpegBufferSize);
3615     ASSERT_NE(nullptr, halBufManagedStreamIds);
3616 
3617     std::vector<AvailableStream> outputStreams;
3618     std::shared_ptr<ICameraDevice> cameraDevice;
3619     ALOGI("configureStreams: Testing camera device %s", name.c_str());
3620 
3621     ndk::ScopedAStatus ret = provider->getCameraDeviceInterface(name, &cameraDevice);
3622     ASSERT_TRUE(ret.isOk());
3623     ALOGI("getCameraDeviceInterface returns status:%d:%d", ret.getExceptionCode(),
3624           ret.getServiceSpecificError());
3625     ASSERT_NE(cameraDevice, nullptr);
3626 
3627     CameraMetadata metadata;
3628     ret = cameraDevice->getCameraCharacteristics(&metadata);
3629     ASSERT_TRUE(ret.isOk());
3630     camera_metadata_t* staticMeta = clone_camera_metadata(
3631             reinterpret_cast<const camera_metadata_t*>(metadata.metadata.data()));
3632     ASSERT_NE(nullptr, staticMeta);
3633 
3634     camera_metadata_ro_entry entry;
3635     auto status =
3636             find_camera_metadata_ro_entry(staticMeta, ANDROID_REQUEST_PARTIAL_RESULT_COUNT, &entry);
3637     if ((0 == status) && (entry.count > 0)) {
3638         *partialResultCount = entry.data.i32[0];
3639         *supportsPartialResults = (*partialResultCount > 1);
3640     }
3641 
3642     BufferManagerType bufferManagerType = BufferManagerType::FRAMEWORK;
3643     status = find_camera_metadata_ro_entry(
3644             staticMeta, ANDROID_INFO_SUPPORTED_BUFFER_MANAGEMENT_VERSION, &entry);
3645     if ((0 == status) && (entry.count == 1)) {
3646         if (entry.data.u8[0] == ANDROID_INFO_SUPPORTED_BUFFER_MANAGEMENT_VERSION_HIDL_DEVICE_3_5) {
3647             bufferManagerType = BufferManagerType::HAL;
3648         } else if (entry.data.u8[0] ==
3649                    ANDROID_INFO_SUPPORTED_BUFFER_MANAGEMENT_VERSION_SESSION_CONFIGURABLE) {
3650             bufferManagerType = BufferManagerType::SESSION;
3651         }
3652     }
3653 
3654     auto st = getJpegBufferSize(staticMeta, jpegBufferSize);
3655     ASSERT_EQ(st, Status::OK);
3656 
3657     *outCb = ndk::SharedRefBase::make<DeviceCb>(this, staticMeta);
3658     ret = cameraDevice->open(*outCb, session);
3659     ASSERT_TRUE(ret.isOk());
3660     ALOGI("device::open returns status:%d:%d", ret.getExceptionCode(),
3661           ret.getServiceSpecificError());
3662     ASSERT_NE(session, nullptr);
3663 
3664     outputStreams.clear();
3665     auto rc = getAvailableOutputStreams(staticMeta, outputStreams, threshold);
3666     size_t idx = 0;
3667     int currLargest = outputStreams[0].width * outputStreams[0].height;
3668     for (size_t i = 0; i < outputStreams.size(); i++) {
3669         int area = outputStreams[i].width * outputStreams[i].height;
3670         if (area > currLargest) {
3671             idx = i;
3672             currLargest = area;
3673         }
3674     }
3675 
3676     ASSERT_EQ(Status::OK, rc);
3677     ASSERT_FALSE(outputStreams.empty());
3678 
3679     Dataspace dataspace = getDataspace(static_cast<PixelFormat>(outputStreams[idx].format));
3680 
3681     std::vector<Stream> streams(/*size*/ 1);
3682     streams[0] = {/*id*/ 0,
3683                   StreamType::OUTPUT,
3684                   outputStreams[idx].width,
3685                   outputStreams[idx].height,
3686                   static_cast<PixelFormat>(outputStreams[idx].format),
3687                   static_cast<::aidl::android::hardware::graphics::common::BufferUsage>(
3688                           GRALLOC1_CONSUMER_USAGE_CPU_READ),
3689                   dataspace,
3690                   StreamRotation::ROTATION_0,
3691                   /*physicalId*/ std::string(),
3692                   *jpegBufferSize,
3693                   /*groupId*/ 0,
3694                   {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
3695                   RequestAvailableDynamicRangeProfilesMap::
3696                           ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD,
3697                   ScalerAvailableStreamUseCases::ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT,
3698                   static_cast<int>(
3699                           RequestAvailableColorSpaceProfilesMap::
3700                                   ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED)};
3701 
3702     StreamConfiguration config = {streams, StreamConfigurationMode::NORMAL_MODE, CameraMetadata()};
3703 
3704     ret = configureStreams(*session, config, bufferManagerType, halBufManagedStreamIds, halStreams);
3705 
3706     ASSERT_TRUE(ret.isOk());
3707 
3708     if (halBufManagedStreamIds->size() != 0) {
3709         (*outCb)->setCurrentStreamConfig(streams, *halStreams);
3710     }
3711 
3712     *stream = streams[0];
3713 }
3714 
updateInflightResultQueue(const std::shared_ptr<ResultMetadataQueue> & resultQueue)3715 void CameraAidlTest::updateInflightResultQueue(
3716         const std::shared_ptr<ResultMetadataQueue>& resultQueue) {
3717     std::unique_lock<std::mutex> l(mLock);
3718     for (auto& it : mInflightMap) {
3719         it.second->resultQueue = resultQueue;
3720     }
3721 }
3722 
processColorSpaceRequest(RequestAvailableColorSpaceProfilesMap colorSpace,RequestAvailableDynamicRangeProfilesMap dynamicRangeProfile)3723 void CameraAidlTest::processColorSpaceRequest(
3724         RequestAvailableColorSpaceProfilesMap colorSpace,
3725         RequestAvailableDynamicRangeProfilesMap dynamicRangeProfile) {
3726     std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
3727     CameraMetadata settings;
3728 
3729     for (const auto& name : cameraDeviceNames) {
3730         std::string version, deviceId;
3731         ASSERT_TRUE(matchDeviceName(name, mProviderType, &version, &deviceId));
3732         CameraMetadata meta;
3733         std::shared_ptr<ICameraDevice> device;
3734         openEmptyDeviceSession(name, mProvider, &mSession, &meta, &device);
3735         camera_metadata_t* staticMeta = reinterpret_cast<camera_metadata_t*>(meta.metadata.data());
3736 
3737         // Device does not report color spaces, skip.
3738         if (!reportsColorSpaces(staticMeta)) {
3739             ndk::ScopedAStatus ret = mSession->close();
3740             mSession = nullptr;
3741             ASSERT_TRUE(ret.isOk());
3742             ALOGV("Camera %s does not report color spaces", name.c_str());
3743             continue;
3744         }
3745         std::vector<RequestAvailableColorSpaceProfilesMap> profileList;
3746         getColorSpaceProfiles(staticMeta, &profileList);
3747         ASSERT_FALSE(profileList.empty());
3748 
3749         // Device does not support color space / dynamic range profile, skip
3750         if (std::find(profileList.begin(), profileList.end(), colorSpace)
3751                 == profileList.end() || !isColorSpaceCompatibleWithDynamicRangeAndPixelFormat(
3752                         staticMeta, colorSpace, dynamicRangeProfile,
3753                         PixelFormat::IMPLEMENTATION_DEFINED)) {
3754             ndk::ScopedAStatus ret = mSession->close();
3755             mSession = nullptr;
3756             ASSERT_TRUE(ret.isOk());
3757             ALOGV("Camera %s does not support color space %s with dynamic range profile %s and "
3758                   "pixel format %d", name.c_str(), getColorSpaceProfileString(colorSpace),
3759                   getDynamicRangeProfileString(dynamicRangeProfile),
3760                   PixelFormat::IMPLEMENTATION_DEFINED);
3761             continue;
3762         }
3763 
3764         ALOGV("Camera %s supports color space %s with dynamic range profile %s and pixel format %d",
3765                 name.c_str(), getColorSpaceProfileString(colorSpace),
3766                 getDynamicRangeProfileString(dynamicRangeProfile),
3767                 PixelFormat::IMPLEMENTATION_DEFINED);
3768 
3769         // If an HDR dynamic range profile is reported in the color space profile list,
3770         // the device must also have the dynamic range profiles map capability and contain
3771         // the dynamic range profile in the map.
3772         if (dynamicRangeProfile != static_cast<RequestAvailableDynamicRangeProfilesMap>(
3773                 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD)) {
3774             ASSERT_TRUE(is10BitDynamicRangeCapable(staticMeta));
3775 
3776             std::vector<RequestAvailableDynamicRangeProfilesMap> dynamicRangeProfiles;
3777             get10BitDynamicRangeProfiles(staticMeta, &dynamicRangeProfiles);
3778             ASSERT_FALSE(dynamicRangeProfiles.empty());
3779             ASSERT_FALSE(std::find(dynamicRangeProfiles.begin(), dynamicRangeProfiles.end(),
3780                     dynamicRangeProfile) == dynamicRangeProfiles.end());
3781         }
3782 
3783         CameraMetadata req;
3784         android::hardware::camera::common::V1_0::helper::CameraMetadata defaultSettings;
3785         ndk::ScopedAStatus ret =
3786                 mSession->constructDefaultRequestSettings(RequestTemplate::PREVIEW, &req);
3787         ASSERT_TRUE(ret.isOk());
3788 
3789         const camera_metadata_t* metadata =
3790                 reinterpret_cast<const camera_metadata_t*>(req.metadata.data());
3791         size_t expectedSize = req.metadata.size();
3792         int result = validate_camera_metadata_structure(metadata, &expectedSize);
3793         ASSERT_TRUE((result == 0) || (result == CAMERA_METADATA_VALIDATION_SHIFTED));
3794 
3795         size_t entryCount = get_camera_metadata_entry_count(metadata);
3796         ASSERT_GT(entryCount, 0u);
3797         defaultSettings = metadata;
3798 
3799         const camera_metadata_t* settingsBuffer = defaultSettings.getAndLock();
3800         uint8_t* rawSettingsBuffer = (uint8_t*)settingsBuffer;
3801         settings.metadata = std::vector(
3802                 rawSettingsBuffer, rawSettingsBuffer + get_camera_metadata_size(settingsBuffer));
3803         overrideRotateAndCrop(&settings);
3804 
3805         ret = mSession->close();
3806         mSession = nullptr;
3807         ASSERT_TRUE(ret.isOk());
3808 
3809         std::vector<HalStream> halStreams;
3810         bool supportsPartialResults = false;
3811         std::set<int32_t> halBufManagedStreamIds;
3812         int32_t partialResultCount = 0;
3813         Stream previewStream;
3814         std::shared_ptr<DeviceCb> cb;
3815 
3816         previewStream.usage = static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
3817                 GRALLOC1_CONSUMER_USAGE_HWCOMPOSER);
3818         configureStreams(name, mProvider, PixelFormat::IMPLEMENTATION_DEFINED, &mSession,
3819                          &previewStream, &halStreams, &supportsPartialResults, &partialResultCount,
3820                          &halBufManagedStreamIds, &cb, 0,
3821                          /*maxResolution*/ false, dynamicRangeProfile, colorSpace);
3822         ASSERT_NE(mSession, nullptr);
3823 
3824         ::aidl::android::hardware::common::fmq::MQDescriptor<
3825                 int8_t, aidl::android::hardware::common::fmq::SynchronizedReadWrite>
3826                 descriptor;
3827         auto resultQueueRet = mSession->getCaptureResultMetadataQueue(&descriptor);
3828         ASSERT_TRUE(resultQueueRet.isOk());
3829 
3830         std::shared_ptr<ResultMetadataQueue> resultQueue =
3831                 std::make_shared<ResultMetadataQueue>(descriptor);
3832         if (!resultQueue->isValid() || resultQueue->availableToWrite() <= 0) {
3833             ALOGE("%s: HAL returns empty result metadata fmq, not use it", __func__);
3834             resultQueue = nullptr;
3835             // Don't use the queue onwards.
3836         }
3837 
3838         mInflightMap.clear();
3839         // Stream as long as needed to fill the Hal inflight queue
3840         std::vector<CaptureRequest> requests(halStreams[0].maxBuffers);
3841 
3842         for (int32_t requestId = 0; requestId < requests.size(); requestId++) {
3843             std::shared_ptr<InFlightRequest> inflightReq = std::make_shared<InFlightRequest>(
3844                     static_cast<ssize_t>(halStreams.size()), false, supportsPartialResults,
3845                     partialResultCount, std::unordered_set<std::string>(), resultQueue);
3846 
3847             CaptureRequest& request = requests[requestId];
3848             std::vector<StreamBuffer>& outputBuffers = request.outputBuffers;
3849             outputBuffers.resize(halStreams.size());
3850 
3851             size_t k = 0;
3852             inflightReq->mOutstandingBufferIds.resize(halStreams.size());
3853             std::vector<buffer_handle_t> graphicBuffers;
3854             graphicBuffers.reserve(halStreams.size());
3855 
3856             auto bufferId = requestId + 1;  // Buffer id value 0 is not valid
3857             for (const auto& halStream : halStreams) {
3858                 buffer_handle_t buffer_handle;
3859                 if (contains(halBufManagedStreamIds, halStream.id)) {
3860                     outputBuffers[k] = {halStream.id,   0,
3861                                         NativeHandle(), BufferStatus::OK,
3862                                         NativeHandle(), NativeHandle()};
3863                 } else {
3864                     auto usage = ANDROID_NATIVE_UNSIGNED_CAST(android_convertGralloc1To0Usage(
3865                             static_cast<uint64_t>(halStream.producerUsage),
3866                             static_cast<uint64_t>(halStream.consumerUsage)));
3867                     allocateGraphicBuffer(previewStream.width, previewStream.height, usage,
3868                                             halStream.overrideFormat, &buffer_handle);
3869 
3870                     inflightReq->mOutstandingBufferIds[halStream.id][bufferId] = buffer_handle;
3871                     graphicBuffers.push_back(buffer_handle);
3872                     outputBuffers[k] = {
3873                             halStream.id,     bufferId,       android::makeToAidl(buffer_handle),
3874                             BufferStatus::OK, NativeHandle(), NativeHandle()};
3875                 }
3876                 k++;
3877             }
3878 
3879             request.inputBuffer = {
3880                     -1, 0, NativeHandle(), BufferStatus::ERROR, NativeHandle(), NativeHandle()};
3881             request.frameNumber = bufferId;
3882             request.fmqSettingsSize = 0;
3883             request.settings = settings;
3884             request.inputWidth = 0;
3885             request.inputHeight = 0;
3886 
3887             {
3888                 std::unique_lock<std::mutex> l(mLock);
3889                 mInflightMap[bufferId] = inflightReq;
3890             }
3891         }
3892 
3893         int32_t numRequestProcessed = 0;
3894         std::vector<BufferCache> cachesToRemove;
3895         ndk::ScopedAStatus returnStatus =
3896             mSession->processCaptureRequest(requests, cachesToRemove, &numRequestProcessed);
3897         ASSERT_TRUE(returnStatus.isOk());
3898         ASSERT_EQ(numRequestProcessed, requests.size());
3899 
3900         returnStatus = mSession->repeatingRequestEnd(requests.size() - 1,
3901                 std::vector<int32_t> {halStreams[0].id});
3902         ASSERT_TRUE(returnStatus.isOk());
3903 
3904         // We are keeping frame numbers and buffer ids consistent. Buffer id value of 0
3905         // is used to indicate a buffer that is not present/available so buffer ids as well
3906         // as frame numbers begin with 1.
3907         for (int32_t frameNumber = 1; frameNumber <= requests.size(); frameNumber++) {
3908             const auto& inflightReq = mInflightMap[frameNumber];
3909             std::unique_lock<std::mutex> l(mLock);
3910             while (!inflightReq->errorCodeValid &&
3911                     ((0 < inflightReq->numBuffersLeft) || (!inflightReq->haveResultMetadata))) {
3912                 auto timeout = std::chrono::system_clock::now() +
3913                                 std::chrono::seconds(kStreamBufferTimeoutSec);
3914                 ASSERT_NE(std::cv_status::timeout, mResultCondition.wait_until(l, timeout));
3915             }
3916 
3917             ASSERT_FALSE(inflightReq->errorCodeValid);
3918             ASSERT_NE(inflightReq->resultOutputBuffers.size(), 0u);
3919 
3920             if (dynamicRangeProfile != static_cast<RequestAvailableDynamicRangeProfilesMap>(
3921                     ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD)) {
3922                 verify10BitMetadata(mHandleImporter, *inflightReq, dynamicRangeProfile);
3923             }
3924         }
3925 
3926         if (halBufManagedStreamIds.size() != 0) {
3927             std::vector<int32_t> streamIds;
3928             for (size_t i = 0; i < streamIds.size(); i++) {
3929                 if (contains(halBufManagedStreamIds, halStreams[i].id)) {
3930                     streamIds.emplace_back(halStreams[i].id);
3931                 }
3932             }
3933             mSession->signalStreamFlush(streamIds, /*streamConfigCounter*/ 0);
3934             cb->waitForBuffersReturned();
3935         }
3936 
3937         ret = mSession->close();
3938         mSession = nullptr;
3939         ASSERT_TRUE(ret.isOk());
3940     }
3941 }
3942 
processZoomSettingsOverrideRequests(int32_t frameCount,const bool * overrideSequence,const bool * expectedResults)3943 void CameraAidlTest::processZoomSettingsOverrideRequests(
3944         int32_t frameCount, const bool *overrideSequence, const bool *expectedResults) {
3945     std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
3946     AvailableStream previewThreshold = {kMaxPreviewWidth, kMaxPreviewHeight,
3947                                         static_cast<int32_t>(PixelFormat::IMPLEMENTATION_DEFINED)};
3948     int64_t bufferId = 1;
3949     int32_t frameNumber = 1;
3950     CameraMetadata settings;
3951     ndk::ScopedAStatus ret;
3952     for (const auto& name : cameraDeviceNames) {
3953         CameraMetadata meta;
3954         std::shared_ptr<ICameraDevice> device;
3955         openEmptyDeviceSession(name, mProvider, &mSession /*out*/, &meta /*out*/,
3956                                &device /*out*/);
3957         camera_metadata_t* staticMeta =
3958                 clone_camera_metadata(reinterpret_cast<camera_metadata_t*>(meta.metadata.data()));
3959 
3960         ret = mSession->close();
3961         mSession = nullptr;
3962         ASSERT_TRUE(ret.isOk());
3963 
3964         // Device does not support zoom settnigs override
3965         if (!supportZoomSettingsOverride(staticMeta)) {
3966             continue;
3967         }
3968 
3969         if (!isPerFrameControl(staticMeta)) {
3970             continue;
3971         }
3972 
3973         bool supportsPartialResults = false;
3974         bool useHalBufManager = false;
3975         int32_t partialResultCount = 0;
3976         Stream previewStream;
3977         std::vector<HalStream> halStreams;
3978         std::shared_ptr<DeviceCb> cb;
3979         configurePreviewStream(name, mProvider, &previewThreshold, &mSession /*out*/,
3980                                &previewStream /*out*/, &halStreams /*out*/,
3981                                &supportsPartialResults /*out*/, &partialResultCount /*out*/,
3982                                &useHalBufManager /*out*/, &cb /*out*/);
3983         ASSERT_NE(mSession, nullptr);
3984 
3985         ::aidl::android::hardware::common::fmq::MQDescriptor<
3986                 int8_t, aidl::android::hardware::common::fmq::SynchronizedReadWrite>
3987                 descriptor;
3988         auto resultQueueRet = mSession->getCaptureResultMetadataQueue(&descriptor);
3989         ASSERT_TRUE(resultQueueRet.isOk());
3990 
3991         std::shared_ptr<ResultMetadataQueue> resultQueue =
3992                 std::make_shared<ResultMetadataQueue>(descriptor);
3993         if (!resultQueue->isValid() || resultQueue->availableToWrite() <= 0) {
3994             ALOGE("%s: HAL returns empty result metadata fmq, not use it", __func__);
3995             resultQueue = nullptr;
3996             // Don't use the queue onwards.
3997         }
3998 
3999         ret = mSession->constructDefaultRequestSettings(RequestTemplate::PREVIEW, &settings);
4000         ASSERT_TRUE(ret.isOk());
4001 
4002         mInflightMap.clear();
4003         ::android::hardware::camera::common::V1_0::helper::CameraMetadata requestMeta;
4004         std::vector<CaptureRequest> requests(frameCount);
4005         std::vector<buffer_handle_t> buffers(frameCount);
4006         std::vector<std::shared_ptr<InFlightRequest>> inflightReqs(frameCount);
4007         std::vector<CameraMetadata> requestSettings(frameCount);
4008 
4009         for (int32_t i = 0; i < frameCount; i++) {
4010             std::unique_lock<std::mutex> l(mLock);
4011             CaptureRequest& request = requests[i];
4012             std::vector<StreamBuffer>& outputBuffers = request.outputBuffers;
4013             outputBuffers.resize(1);
4014             StreamBuffer& outputBuffer = outputBuffers[0];
4015 
4016             if (useHalBufManager) {
4017                 outputBuffer = {halStreams[0].id, 0,
4018                                 NativeHandle(),   BufferStatus::OK,
4019                                 NativeHandle(),   NativeHandle()};
4020             } else {
4021                 allocateGraphicBuffer(previewStream.width, previewStream.height,
4022                                       ANDROID_NATIVE_UNSIGNED_CAST(android_convertGralloc1To0Usage(
4023                                               static_cast<uint64_t>(halStreams[0].producerUsage),
4024                                               static_cast<uint64_t>(halStreams[0].consumerUsage))),
4025                                       halStreams[0].overrideFormat, &buffers[i]);
4026                 outputBuffer = {halStreams[0].id, bufferId + i,   ::android::makeToAidl(buffers[i]),
4027                                 BufferStatus::OK, NativeHandle(), NativeHandle()};
4028             }
4029 
4030             // Set appropriate settings override tag
4031             requestMeta.append(reinterpret_cast<camera_metadata_t*>(settings.metadata.data()));
4032             int32_t settingsOverride = overrideSequence[i] ?
4033                     ANDROID_CONTROL_SETTINGS_OVERRIDE_ZOOM : ANDROID_CONTROL_SETTINGS_OVERRIDE_OFF;
4034             ASSERT_EQ(::android::OK, requestMeta.update(ANDROID_CONTROL_SETTINGS_OVERRIDE,
4035                     &settingsOverride, 1));
4036             camera_metadata_t* metaBuffer = requestMeta.release();
4037             uint8_t* rawMetaBuffer = reinterpret_cast<uint8_t*>(metaBuffer);
4038             requestSettings[i].metadata = std::vector(
4039                     rawMetaBuffer, rawMetaBuffer + get_camera_metadata_size(metaBuffer));
4040             overrideRotateAndCrop(&(requestSettings[i]));
4041             request.frameNumber = frameNumber + i;
4042             request.fmqSettingsSize = 0;
4043             request.settings = requestSettings[i];
4044             request.inputBuffer = {
4045                     -1, 0, NativeHandle(), BufferStatus::ERROR, NativeHandle(), NativeHandle()};
4046 
4047             inflightReqs[i] = std::make_shared<InFlightRequest>(1, false, supportsPartialResults,
4048                                                                 partialResultCount, resultQueue);
4049             mInflightMap[frameNumber + i] = inflightReqs[i];
4050         }
4051 
4052         int32_t numRequestProcessed = 0;
4053         std::vector<BufferCache> cachesToRemove;
4054 
4055         ndk::ScopedAStatus returnStatus =
4056                 mSession->processCaptureRequest(requests, cachesToRemove, &numRequestProcessed);
4057         ASSERT_TRUE(returnStatus.isOk());
4058         ASSERT_EQ(numRequestProcessed, frameCount);
4059 
4060         for (size_t i = 0; i < frameCount; i++) {
4061             std::unique_lock<std::mutex> l(mLock);
4062             while (!inflightReqs[i]->errorCodeValid && ((0 < inflightReqs[i]->numBuffersLeft) ||
4063                                                         (!inflightReqs[i]->haveResultMetadata))) {
4064                 auto timeout = std::chrono::system_clock::now() +
4065                                std::chrono::seconds(kStreamBufferTimeoutSec);
4066                 ASSERT_NE(std::cv_status::timeout, mResultCondition.wait_until(l, timeout));
4067             }
4068 
4069             ASSERT_FALSE(inflightReqs[i]->errorCodeValid);
4070             ASSERT_NE(inflightReqs[i]->resultOutputBuffers.size(), 0u);
4071             ASSERT_EQ(previewStream.id, inflightReqs[i]->resultOutputBuffers[0].buffer.streamId);
4072             ASSERT_FALSE(inflightReqs[i]->collectedResult.isEmpty());
4073             ASSERT_TRUE(inflightReqs[i]->collectedResult.exists(ANDROID_CONTROL_SETTINGS_OVERRIDE));
4074             camera_metadata_entry_t overrideResult =
4075                     inflightReqs[i]->collectedResult.find(ANDROID_CONTROL_SETTINGS_OVERRIDE);
4076             ASSERT_EQ(overrideResult.data.i32[0] == ANDROID_CONTROL_SETTINGS_OVERRIDE_ZOOM,
4077                     expectedResults[i]);
4078             ASSERT_TRUE(inflightReqs[i]->collectedResult.exists(
4079                     ANDROID_CONTROL_SETTINGS_OVERRIDING_FRAME_NUMBER));
4080             camera_metadata_entry_t frameNumberEntry = inflightReqs[i]->collectedResult.find(
4081                     ANDROID_CONTROL_SETTINGS_OVERRIDING_FRAME_NUMBER);
4082             ALOGV("%s: i %zu, expcetedResults[i] %d, overrideResult is %d, frameNumber %d",
4083                   __FUNCTION__, i, expectedResults[i], overrideResult.data.i32[0],
4084                   frameNumberEntry.data.i32[0]);
4085             if (expectedResults[i]) {
4086                 ASSERT_GT(frameNumberEntry.data.i32[0], inflightReqs[i]->frameNumber);
4087             } else {
4088                 ASSERT_EQ(frameNumberEntry.data.i32[0], frameNumber + i);
4089             }
4090         }
4091 
4092         ret = mSession->close();
4093         mSession = nullptr;
4094         ASSERT_TRUE(ret.isOk());
4095     }
4096 }
4097 
getSupportedSizes(const camera_metadata_t * ch,uint32_t tag,int32_t format,std::vector<std::tuple<size_t,size_t>> * sizes)4098 void CameraAidlTest::getSupportedSizes(const camera_metadata_t* ch, uint32_t tag, int32_t format,
4099                                        std::vector<std::tuple<size_t, size_t>>* sizes /*out*/) {
4100     if (sizes == nullptr) {
4101         return;
4102     }
4103 
4104     camera_metadata_ro_entry entry;
4105     int retcode = find_camera_metadata_ro_entry(ch, tag, &entry);
4106     if ((0 == retcode) && (entry.count > 0)) {
4107         // Scaler entry contains 4 elements (format, width, height, type)
4108         for (size_t i = 0; i < entry.count; i += 4) {
4109             if ((entry.data.i32[i] == format) &&
4110                 (entry.data.i32[i + 3] == ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT)) {
4111                 sizes->push_back(std::make_tuple(entry.data.i32[i + 1], entry.data.i32[i + 2]));
4112             }
4113         }
4114     }
4115 }
4116 
getSupportedDurations(const camera_metadata_t * ch,uint32_t tag,int32_t format,const std::vector<std::tuple<size_t,size_t>> & sizes,std::vector<int64_t> * durations)4117 void CameraAidlTest::getSupportedDurations(const camera_metadata_t* ch, uint32_t tag,
4118                                            int32_t format,
4119                                            const std::vector<std::tuple<size_t, size_t>>& sizes,
4120                                            std::vector<int64_t>* durations /*out*/) {
4121     if (durations == nullptr) {
4122         return;
4123     }
4124 
4125     camera_metadata_ro_entry entry;
4126     int retcode = find_camera_metadata_ro_entry(ch, tag, &entry);
4127     if ((0 == retcode) && (entry.count > 0)) {
4128         // Duration entry contains 4 elements (format, width, height, duration)
4129         for (const auto& size : sizes) {
4130             int64_t width = std::get<0>(size);
4131             int64_t height = std::get<1>(size);
4132             for (size_t i = 0; i < entry.count; i += 4) {
4133                 if ((entry.data.i64[i] == format) && (entry.data.i64[i + 1] == width) &&
4134                     (entry.data.i64[i + 2] == height)) {
4135                     durations->push_back(entry.data.i64[i + 3]);
4136                     break;
4137                 }
4138             }
4139         }
4140     }
4141 }
4142 
validateDefaultRequestMetadata(RequestTemplate reqTemplate,const CameraMetadata & rawMetadata)4143 void CameraAidlTest::validateDefaultRequestMetadata(RequestTemplate reqTemplate,
4144                                                     const CameraMetadata& rawMetadata) {
4145     const camera_metadata_t* metadata = (camera_metadata_t*)rawMetadata.metadata.data();
4146     size_t expectedSize = rawMetadata.metadata.size();
4147     int result = validate_camera_metadata_structure(metadata, &expectedSize);
4148     ASSERT_TRUE((result == 0) || (result == CAMERA_METADATA_VALIDATION_SHIFTED));
4149     verifyRequestTemplate(metadata, reqTemplate);
4150 }
4151