1 /*
2 * Copyright (C) 2022 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16 #include <gtest/gtest.h>
17
18 #include <aidl/Vintf.h>
19 #include <aidl/android/hardware/camera/common/VendorTagSection.h>
20 #include <aidl/android/hardware/camera/device/ICameraDevice.h>
21 #include <aidlcommonsupport/NativeHandle.h>
22 #include <camera_aidl_test.h>
23 #include <cutils/properties.h>
24 #include <device_cb.h>
25 #include <empty_device_cb.h>
26 #include <grallocusage/GrallocUsageConversion.h>
27 #include <gtest/gtest.h>
28 #include <hardware/gralloc.h>
29 #include <hardware/gralloc1.h>
30 #include <hidl/GtestPrinter.h>
31 #include <hidl/HidlSupport.h>
32 #include <torch_provider_cb.h>
33 #include <com_android_internal_camera_flags.h>
34 #include <list>
35 #include <nativebase/nativebase.h>
36
37 using ::aidl::android::hardware::camera::common::CameraDeviceStatus;
38 using ::aidl::android::hardware::camera::common::CameraResourceCost;
39 using ::aidl::android::hardware::camera::common::TorchModeStatus;
40 using ::aidl::android::hardware::camera::common::VendorTagSection;
41 using ::aidl::android::hardware::camera::device::ICameraDevice;
42 using ::aidl::android::hardware::camera::metadata::RequestAvailableColorSpaceProfilesMap;
43 using ::aidl::android::hardware::camera::metadata::RequestAvailableDynamicRangeProfilesMap;
44 using ::aidl::android::hardware::camera::metadata::SensorPixelMode;
45 using ::aidl::android::hardware::camera::provider::CameraIdAndStreamCombination;
46 using ::aidl::android::hardware::camera::provider::BnCameraProviderCallback;
47
48 using ::ndk::ScopedAStatus;
49
50 namespace {
51 const int32_t kBurstFrameCount = 10;
52 const uint32_t kMaxStillWidth = 2048;
53 const uint32_t kMaxStillHeight = 1536;
54
55 const int64_t kEmptyFlushTimeoutMSec = 200;
56 namespace flags = com::android::internal::camera::flags;
57
58 const static std::vector<int64_t> kMandatoryUseCases = {
59 ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT,
60 ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_PREVIEW,
61 ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_STILL_CAPTURE,
62 ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_VIDEO_RECORD,
63 ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_PREVIEW_VIDEO_STILL,
64 ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_VIDEO_CALL};
65 } // namespace
66
TEST_P(CameraAidlTest,getCameraIdList)67 TEST_P(CameraAidlTest, getCameraIdList) {
68 std::vector<std::string> idList;
69 ScopedAStatus ret = mProvider->getCameraIdList(&idList);
70 ASSERT_TRUE(ret.isOk());
71
72 for (size_t i = 0; i < idList.size(); i++) {
73 ALOGI("Camera Id[%zu] is %s", i, idList[i].c_str());
74 }
75 }
76
77 // Test if ICameraProvider::getVendorTags returns Status::OK
TEST_P(CameraAidlTest,getVendorTags)78 TEST_P(CameraAidlTest, getVendorTags) {
79 std::vector<VendorTagSection> vendorTags;
80 ScopedAStatus ret = mProvider->getVendorTags(&vendorTags);
81
82 ASSERT_TRUE(ret.isOk());
83 for (size_t i = 0; i < vendorTags.size(); i++) {
84 ALOGI("Vendor tag section %zu name %s", i, vendorTags[i].sectionName.c_str());
85 for (auto& tag : vendorTags[i].tags) {
86 ALOGI("Vendor tag id %u name %s type %d", tag.tagId, tag.tagName.c_str(),
87 (int)tag.tagType);
88 }
89 }
90 }
91
92 // Test if ICameraProvider::setCallback returns Status::OK
TEST_P(CameraAidlTest,setCallback)93 TEST_P(CameraAidlTest, setCallback) {
94 struct ProviderCb : public BnCameraProviderCallback {
95 ScopedAStatus cameraDeviceStatusChange(const std::string& cameraDeviceName,
96 CameraDeviceStatus newStatus) override {
97 ALOGI("camera device status callback name %s, status %d", cameraDeviceName.c_str(),
98 (int)newStatus);
99 return ScopedAStatus::ok();
100 }
101 ScopedAStatus torchModeStatusChange(const std::string& cameraDeviceName,
102 TorchModeStatus newStatus) override {
103 ALOGI("Torch mode status callback name %s, status %d", cameraDeviceName.c_str(),
104 (int)newStatus);
105 return ScopedAStatus::ok();
106 }
107 ScopedAStatus physicalCameraDeviceStatusChange(const std::string& cameraDeviceName,
108 const std::string& physicalCameraDeviceName,
109 CameraDeviceStatus newStatus) override {
110 ALOGI("physical camera device status callback name %s, physical camera name %s,"
111 " status %d",
112 cameraDeviceName.c_str(), physicalCameraDeviceName.c_str(), (int)newStatus);
113 return ScopedAStatus::ok();
114 }
115 };
116
117 std::shared_ptr<ProviderCb> cb = ndk::SharedRefBase::make<ProviderCb>();
118 ScopedAStatus ret = mProvider->setCallback(cb);
119 ASSERT_TRUE(ret.isOk());
120 ret = mProvider->setCallback(nullptr);
121 ASSERT_TRUE(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT) == ret.getServiceSpecificError() ||
122 EX_NULL_POINTER == ret.getExceptionCode());
123 }
124
125 // Test if ICameraProvider::getCameraDeviceInterface returns Status::OK and non-null device
TEST_P(CameraAidlTest,getCameraDeviceInterface)126 TEST_P(CameraAidlTest, getCameraDeviceInterface) {
127 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
128
129 for (const auto& name : cameraDeviceNames) {
130 std::shared_ptr<ICameraDevice> cameraDevice;
131 ScopedAStatus ret = mProvider->getCameraDeviceInterface(name, &cameraDevice);
132 ALOGI("getCameraDeviceInterface returns: %d:%d", ret.getExceptionCode(),
133 ret.getServiceSpecificError());
134 ASSERT_TRUE(ret.isOk());
135 ASSERT_NE(cameraDevice, nullptr);
136 }
137 }
138
139 // Verify that the device resource cost can be retrieved and the values are
140 // correct.
TEST_P(CameraAidlTest,getResourceCost)141 TEST_P(CameraAidlTest, getResourceCost) {
142 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
143
144 for (const auto& deviceName : cameraDeviceNames) {
145 std::shared_ptr<ICameraDevice> cameraDevice;
146 ScopedAStatus ret = mProvider->getCameraDeviceInterface(deviceName, &cameraDevice);
147 ALOGI("getCameraDeviceInterface returns: %d:%d", ret.getExceptionCode(),
148 ret.getServiceSpecificError());
149 ASSERT_TRUE(ret.isOk());
150 ASSERT_NE(cameraDevice, nullptr);
151
152 CameraResourceCost resourceCost;
153 ret = cameraDevice->getResourceCost(&resourceCost);
154 ALOGI("getResourceCost returns: %d:%d", ret.getExceptionCode(),
155 ret.getServiceSpecificError());
156 ASSERT_TRUE(ret.isOk());
157
158 ALOGI(" Resource cost is %d", resourceCost.resourceCost);
159 ASSERT_LE(resourceCost.resourceCost, 100u);
160
161 for (const auto& name : resourceCost.conflictingDevices) {
162 ALOGI(" Conflicting device: %s", name.c_str());
163 }
164 }
165 }
166
167 // Validate the integrity of manual flash strength control metadata
TEST_P(CameraAidlTest,validateManualFlashStrengthControlKeys)168 TEST_P(CameraAidlTest, validateManualFlashStrengthControlKeys) {
169 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
170 for (const auto& name : cameraDeviceNames) {
171 ALOGI("validateManualFlashStrengthControlKeys: Testing camera device %s", name.c_str());
172 CameraMetadata meta;
173 std::shared_ptr<ICameraDevice> cameraDevice;
174 openEmptyDeviceSession(name, mProvider, &mSession /*out*/, &meta /*out*/,
175 &cameraDevice /*out*/);
176 ndk::ScopedAStatus ret = cameraDevice->getCameraCharacteristics(&meta);
177 ASSERT_TRUE(ret.isOk());
178 const camera_metadata_t* staticMeta =
179 reinterpret_cast<const camera_metadata_t*>(meta.metadata.data());
180 verifyManualFlashStrengthControlCharacteristics(staticMeta);
181 ret = mSession->close();
182 mSession = nullptr;
183 ASSERT_TRUE(ret.isOk());
184 }
185 }
186
TEST_P(CameraAidlTest,systemCameraTest)187 TEST_P(CameraAidlTest, systemCameraTest) {
188 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
189 std::map<std::string, std::vector<SystemCameraKind>> hiddenPhysicalIdToLogicalMap;
190 for (const auto& name : cameraDeviceNames) {
191 std::shared_ptr<ICameraDevice> device;
192 ALOGI("systemCameraTest: Testing camera device %s", name.c_str());
193 ndk::ScopedAStatus ret = mProvider->getCameraDeviceInterface(name, &device);
194 ASSERT_TRUE(ret.isOk());
195 ASSERT_NE(device, nullptr);
196
197 CameraMetadata cameraCharacteristics;
198 ret = device->getCameraCharacteristics(&cameraCharacteristics);
199 ASSERT_TRUE(ret.isOk());
200
201 const camera_metadata_t* staticMeta =
202 reinterpret_cast<const camera_metadata_t*>(cameraCharacteristics.metadata.data());
203 Status rc = isLogicalMultiCamera(staticMeta);
204 if (rc == Status::OPERATION_NOT_SUPPORTED) {
205 return;
206 }
207
208 ASSERT_EQ(rc, Status::OK);
209 std::unordered_set<std::string> physicalIds;
210 ASSERT_EQ(getPhysicalCameraIds(staticMeta, &physicalIds), Status::OK);
211 SystemCameraKind systemCameraKind = SystemCameraKind::PUBLIC;
212 Status retStatus = getSystemCameraKind(staticMeta, &systemCameraKind);
213 ASSERT_EQ(retStatus, Status::OK);
214
215 for (auto physicalId : physicalIds) {
216 bool isPublicId = false;
217 for (auto& deviceName : cameraDeviceNames) {
218 std::string publicVersion, publicId;
219 ASSERT_TRUE(matchDeviceName(deviceName, mProviderType, &publicVersion, &publicId));
220 if (physicalId == publicId) {
221 isPublicId = true;
222 break;
223 }
224 }
225
226 // For hidden physical cameras, collect their associated logical cameras
227 // and store the system camera kind.
228 if (!isPublicId) {
229 auto it = hiddenPhysicalIdToLogicalMap.find(physicalId);
230 if (it == hiddenPhysicalIdToLogicalMap.end()) {
231 hiddenPhysicalIdToLogicalMap.insert(std::make_pair(
232 physicalId, std::vector<SystemCameraKind>({systemCameraKind})));
233 } else {
234 it->second.push_back(systemCameraKind);
235 }
236 }
237 }
238 }
239
240 // Check that the system camera kind of the logical cameras associated with
241 // each hidden physical camera is the same.
242 for (const auto& it : hiddenPhysicalIdToLogicalMap) {
243 SystemCameraKind neededSystemCameraKind = it.second.front();
244 for (auto foundSystemCamera : it.second) {
245 ASSERT_EQ(neededSystemCameraKind, foundSystemCamera);
246 }
247 }
248 }
249
250 // Verify that the static camera characteristics can be retrieved
251 // successfully.
TEST_P(CameraAidlTest,getCameraCharacteristics)252 TEST_P(CameraAidlTest, getCameraCharacteristics) {
253 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
254
255 for (const auto& name : cameraDeviceNames) {
256 std::shared_ptr<ICameraDevice> device;
257 ALOGI("getCameraCharacteristics: Testing camera device %s", name.c_str());
258 ndk::ScopedAStatus ret = mProvider->getCameraDeviceInterface(name, &device);
259 ALOGI("getCameraDeviceInterface returns: %d:%d", ret.getExceptionCode(),
260 ret.getServiceSpecificError());
261 ASSERT_TRUE(ret.isOk());
262 ASSERT_NE(device, nullptr);
263
264 CameraMetadata chars;
265 ret = device->getCameraCharacteristics(&chars);
266 ASSERT_TRUE(ret.isOk());
267 verifyCameraCharacteristics(chars);
268 verifyMonochromeCharacteristics(chars);
269 verifyRecommendedConfigs(chars);
270 verifyHighSpeedRecordingCharacteristics(name, chars);
271 verifyLogicalOrUltraHighResCameraMetadata(name, device, chars, cameraDeviceNames);
272
273 ASSERT_TRUE(ret.isOk());
274
275 // getPhysicalCameraCharacteristics will fail for publicly
276 // advertised camera IDs.
277 std::string version, cameraId;
278 ASSERT_TRUE(matchDeviceName(name, mProviderType, &version, &cameraId));
279 CameraMetadata devChars;
280 ret = device->getPhysicalCameraCharacteristics(cameraId, &devChars);
281 ASSERT_EQ(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT), ret.getServiceSpecificError());
282 ASSERT_EQ(0, devChars.metadata.size());
283 }
284 }
285
TEST_P(CameraAidlTest,getSessionCharacteristics)286 TEST_P(CameraAidlTest, getSessionCharacteristics) {
287 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
288
289 for (const auto& name : cameraDeviceNames) {
290 std::shared_ptr<ICameraDevice> device;
291 ALOGI("getSessionCharacteristics: Testing camera device %s", name.c_str());
292 ndk::ScopedAStatus ret = mProvider->getCameraDeviceInterface(name, &device);
293 ALOGI("getCameraDeviceInterface returns: %d:%d", ret.getExceptionCode(),
294 ret.getServiceSpecificError());
295 ASSERT_TRUE(ret.isOk());
296 ASSERT_NE(device, nullptr);
297
298 int32_t interfaceVersion = -1;
299 ret = device->getInterfaceVersion(&interfaceVersion);
300 ASSERT_TRUE(ret.isOk());
301 bool supportSessionCharacteristics =
302 (interfaceVersion >= CAMERA_DEVICE_API_MINOR_VERSION_3);
303 if (!supportSessionCharacteristics) {
304 continue;
305 }
306
307 CameraMetadata meta;
308 openEmptyDeviceSession(name, mProvider, &mSession /*out*/, &meta /*out*/, &device /*out*/);
309
310 std::vector<AvailableStream> outputStreams;
311 camera_metadata_t* staticMeta = reinterpret_cast<camera_metadata_t*>(meta.metadata.data());
312 outputStreams.clear();
313 ASSERT_EQ(Status::OK, getAvailableOutputStreams(staticMeta, outputStreams));
314 ASSERT_NE(0u, outputStreams.size());
315
316 AvailableStream sampleStream = outputStreams[0];
317
318 int32_t streamId = 0;
319 Stream stream = {streamId,
320 StreamType::OUTPUT,
321 sampleStream.width,
322 sampleStream.height,
323 static_cast<PixelFormat>(sampleStream.format),
324 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
325 GRALLOC1_CONSUMER_USAGE_VIDEO_ENCODER),
326 Dataspace::UNKNOWN,
327 StreamRotation::ROTATION_0,
328 std::string(),
329 /*bufferSize*/ 0,
330 /*groupId*/ -1,
331 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
332 RequestAvailableDynamicRangeProfilesMap::
333 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
334
335 std::vector<Stream> streams = {stream};
336 StreamConfiguration config;
337 createStreamConfiguration(streams, StreamConfigurationMode::NORMAL_MODE, &config);
338
339 CameraMetadata camera_chars;
340 ret = device->getCameraCharacteristics(&camera_chars);
341 ASSERT_TRUE(ret.isOk());
342
343 CameraMetadata session_chars;
344 ret = device->getSessionCharacteristics(config, &session_chars);
345 ASSERT_TRUE(ret.isOk());
346 verifySessionCharacteristics(session_chars, camera_chars);
347
348 ret = mSession->close();
349 mSession = nullptr;
350 ASSERT_TRUE(ret.isOk());
351 }
352 }
353
354 // Verify that the torch strength level can be set and retrieved successfully.
TEST_P(CameraAidlTest,turnOnTorchWithStrengthLevel)355 TEST_P(CameraAidlTest, turnOnTorchWithStrengthLevel) {
356 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
357
358 std::shared_ptr<TorchProviderCb> cb = ndk::SharedRefBase::make<TorchProviderCb>(this);
359 ndk::ScopedAStatus ret = mProvider->setCallback(cb);
360 ASSERT_TRUE(ret.isOk());
361
362 for (const auto& name : cameraDeviceNames) {
363 int32_t defaultLevel;
364 std::shared_ptr<ICameraDevice> device;
365 ALOGI("%s: Testing camera device %s", __FUNCTION__, name.c_str());
366
367 ret = mProvider->getCameraDeviceInterface(name, &device);
368 ASSERT_TRUE(ret.isOk());
369 ASSERT_NE(device, nullptr);
370
371 CameraMetadata chars;
372 ret = device->getCameraCharacteristics(&chars);
373 ASSERT_TRUE(ret.isOk());
374
375 const camera_metadata_t* staticMeta =
376 reinterpret_cast<const camera_metadata_t*>(chars.metadata.data());
377 bool torchStrengthControlSupported = isTorchStrengthControlSupported(staticMeta);
378 camera_metadata_ro_entry entry;
379 int rc = find_camera_metadata_ro_entry(staticMeta,
380 ANDROID_FLASH_INFO_STRENGTH_DEFAULT_LEVEL, &entry);
381 if (torchStrengthControlSupported) {
382 ASSERT_EQ(rc, 0);
383 ASSERT_GT(entry.count, 0);
384 defaultLevel = *entry.data.i32;
385 ALOGI("Default level is:%d", defaultLevel);
386 }
387
388 mTorchStatus = TorchModeStatus::NOT_AVAILABLE;
389 ret = device->turnOnTorchWithStrengthLevel(2);
390 ALOGI("turnOnTorchWithStrengthLevel returns status: %d", ret.getServiceSpecificError());
391 // OPERATION_NOT_SUPPORTED check
392 if (!torchStrengthControlSupported) {
393 ALOGI("Torch strength control not supported.");
394 ASSERT_EQ(static_cast<int32_t>(Status::OPERATION_NOT_SUPPORTED),
395 ret.getServiceSpecificError());
396 } else {
397 {
398 ASSERT_TRUE(ret.isOk());
399 std::unique_lock<std::mutex> l(mTorchLock);
400 while (TorchModeStatus::NOT_AVAILABLE == mTorchStatus) {
401 auto timeout = std::chrono::system_clock::now() +
402 std::chrono::seconds(kTorchTimeoutSec);
403 ASSERT_NE(std::cv_status::timeout, mTorchCond.wait_until(l, timeout));
404 }
405 ASSERT_EQ(TorchModeStatus::AVAILABLE_ON, mTorchStatus);
406 mTorchStatus = TorchModeStatus::NOT_AVAILABLE;
407 }
408 ALOGI("getTorchStrengthLevel: Testing");
409 int32_t strengthLevel;
410 ret = device->getTorchStrengthLevel(&strengthLevel);
411 ASSERT_TRUE(ret.isOk());
412 ALOGI("Torch strength level is : %d", strengthLevel);
413 ASSERT_EQ(strengthLevel, 2);
414
415 // Turn OFF the torch and verify torch strength level is reset to default level.
416 ALOGI("Testing torch strength level reset after turning the torch OFF.");
417 ret = device->setTorchMode(false);
418 ASSERT_TRUE(ret.isOk());
419 {
420 std::unique_lock<std::mutex> l(mTorchLock);
421 while (TorchModeStatus::NOT_AVAILABLE == mTorchStatus) {
422 auto timeout = std::chrono::system_clock::now() +
423 std::chrono::seconds(kTorchTimeoutSec);
424 ASSERT_NE(std::cv_status::timeout, mTorchCond.wait_until(l, timeout));
425 }
426 ASSERT_EQ(TorchModeStatus::AVAILABLE_OFF, mTorchStatus);
427 }
428
429 ret = device->getTorchStrengthLevel(&strengthLevel);
430 ASSERT_TRUE(ret.isOk());
431 ALOGI("Torch strength level after turning OFF torch is : %d", strengthLevel);
432 ASSERT_EQ(strengthLevel, defaultLevel);
433 }
434 }
435 }
436
437 // In case it is supported verify that torch can be enabled.
438 // Check for corresponding torch callbacks as well.
TEST_P(CameraAidlTest,setTorchMode)439 TEST_P(CameraAidlTest, setTorchMode) {
440 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
441
442 std::shared_ptr<TorchProviderCb> cb = ndk::SharedRefBase::make<TorchProviderCb>(this);
443 ndk::ScopedAStatus ret = mProvider->setCallback(cb);
444 ALOGI("setCallback returns status: %d", ret.getServiceSpecificError());
445 ASSERT_TRUE(ret.isOk());
446 ASSERT_NE(cb, nullptr);
447
448 for (const auto& name : cameraDeviceNames) {
449 std::shared_ptr<ICameraDevice> device;
450 ALOGI("setTorchMode: Testing camera device %s", name.c_str());
451 ret = mProvider->getCameraDeviceInterface(name, &device);
452 ALOGI("getCameraDeviceInterface returns status:%d:%d", ret.getExceptionCode(),
453 ret.getServiceSpecificError());
454 ASSERT_TRUE(ret.isOk());
455 ASSERT_NE(device, nullptr);
456
457 CameraMetadata metadata;
458 ret = device->getCameraCharacteristics(&metadata);
459 ALOGI("getCameraCharacteristics returns status:%d", ret.getServiceSpecificError());
460 ASSERT_TRUE(ret.isOk());
461 camera_metadata_t* staticMeta =
462 reinterpret_cast<camera_metadata_t*>(metadata.metadata.data());
463 bool torchSupported = isTorchSupported(staticMeta);
464
465 mTorchStatus = TorchModeStatus::NOT_AVAILABLE;
466 ret = device->setTorchMode(true);
467 ALOGI("setTorchMode returns status: %d", ret.getServiceSpecificError());
468 if (!torchSupported) {
469 ASSERT_EQ(static_cast<int32_t>(Status::OPERATION_NOT_SUPPORTED),
470 ret.getServiceSpecificError());
471 } else {
472 ASSERT_TRUE(ret.isOk());
473 {
474 std::unique_lock<std::mutex> l(mTorchLock);
475 while (TorchModeStatus::NOT_AVAILABLE == mTorchStatus) {
476 auto timeout = std::chrono::system_clock::now() +
477 std::chrono::seconds(kTorchTimeoutSec);
478 ASSERT_NE(std::cv_status::timeout, mTorchCond.wait_until(l, timeout));
479 }
480 ASSERT_EQ(TorchModeStatus::AVAILABLE_ON, mTorchStatus);
481 mTorchStatus = TorchModeStatus::NOT_AVAILABLE;
482 }
483
484 ret = device->setTorchMode(false);
485 ASSERT_TRUE(ret.isOk());
486 {
487 std::unique_lock<std::mutex> l(mTorchLock);
488 while (TorchModeStatus::NOT_AVAILABLE == mTorchStatus) {
489 auto timeout = std::chrono::system_clock::now() +
490 std::chrono::seconds(kTorchTimeoutSec);
491 ASSERT_NE(std::cv_status::timeout, mTorchCond.wait_until(l, timeout));
492 }
493 ASSERT_EQ(TorchModeStatus::AVAILABLE_OFF, mTorchStatus);
494 }
495 }
496 }
497 }
498
499 // Check dump functionality.
TEST_P(CameraAidlTest,dump)500 TEST_P(CameraAidlTest, dump) {
501 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
502
503 for (const auto& name : cameraDeviceNames) {
504 std::shared_ptr<ICameraDevice> device;
505 ALOGI("dump: Testing camera device %s", name.c_str());
506
507 ndk::ScopedAStatus ret = mProvider->getCameraDeviceInterface(name, &device);
508 ALOGI("getCameraDeviceInterface returns status:%d:%d", ret.getExceptionCode(),
509 ret.getServiceSpecificError());
510 ASSERT_TRUE(ret.isOk());
511 ASSERT_NE(device, nullptr);
512
513 int raw_handle = open(kDumpOutput, O_RDWR);
514 ASSERT_GE(raw_handle, 0);
515
516 auto retStatus = device->dump(raw_handle, nullptr, 0);
517 ASSERT_EQ(retStatus, ::android::OK);
518 close(raw_handle);
519 }
520 }
521
522 // Open, dump, then close
TEST_P(CameraAidlTest,openClose)523 TEST_P(CameraAidlTest, openClose) {
524 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
525
526 for (const auto& name : cameraDeviceNames) {
527 std::shared_ptr<ICameraDevice> device;
528 ALOGI("openClose: Testing camera device %s", name.c_str());
529 ndk::ScopedAStatus ret = mProvider->getCameraDeviceInterface(name, &device);
530 ALOGI("getCameraDeviceInterface returns status:%d:%d", ret.getExceptionCode(),
531 ret.getServiceSpecificError());
532 ASSERT_TRUE(ret.isOk());
533 ASSERT_NE(device, nullptr);
534
535 std::shared_ptr<EmptyDeviceCb> cb = ndk::SharedRefBase::make<EmptyDeviceCb>();
536
537 ret = device->open(cb, &mSession);
538 ASSERT_TRUE(ret.isOk());
539 ALOGI("device::open returns status:%d:%d", ret.getExceptionCode(),
540 ret.getServiceSpecificError());
541 ASSERT_NE(mSession, nullptr);
542 int raw_handle = open(kDumpOutput, O_RDWR);
543 ASSERT_GE(raw_handle, 0);
544
545 auto retStatus = device->dump(raw_handle, nullptr, 0);
546 ASSERT_EQ(retStatus, ::android::OK);
547 close(raw_handle);
548
549 ret = mSession->close();
550 mSession = nullptr;
551 ASSERT_TRUE(ret.isOk());
552 // TODO: test all session API calls return INTERNAL_ERROR after close
553 // TODO: keep a wp copy here and verify session cannot be promoted out of this scope
554 }
555 }
556
557 // Check whether all common default request settings can be successfully
558 // constructed.
TEST_P(CameraAidlTest,constructDefaultRequestSettings)559 TEST_P(CameraAidlTest, constructDefaultRequestSettings) {
560 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
561
562 for (const auto& name : cameraDeviceNames) {
563 std::shared_ptr<ICameraDevice> device;
564 ALOGI("constructDefaultRequestSettings: Testing camera device %s", name.c_str());
565 ndk::ScopedAStatus ret = mProvider->getCameraDeviceInterface(name, &device);
566 ALOGI("getCameraDeviceInterface returns status:%d:%d", ret.getExceptionCode(),
567 ret.getServiceSpecificError());
568 ASSERT_TRUE(ret.isOk());
569 ASSERT_NE(device, nullptr);
570
571 int32_t interfaceVersion;
572 ret = device->getInterfaceVersion(&interfaceVersion);
573 ASSERT_TRUE(ret.isOk());
574 bool supportFeatureCombinationQuery =
575 (interfaceVersion >= CAMERA_DEVICE_API_MINOR_VERSION_3);
576
577 std::shared_ptr<EmptyDeviceCb> cb = ndk::SharedRefBase::make<EmptyDeviceCb>();
578 ret = device->open(cb, &mSession);
579 ALOGI("device::open returns status:%d:%d", ret.getExceptionCode(),
580 ret.getServiceSpecificError());
581 ASSERT_TRUE(ret.isOk());
582 ASSERT_NE(mSession, nullptr);
583
584 for (int32_t t = (int32_t)RequestTemplate::PREVIEW; t <= (int32_t)RequestTemplate::MANUAL;
585 t++) {
586 RequestTemplate reqTemplate = (RequestTemplate)t;
587 CameraMetadata rawMetadata;
588 ret = mSession->constructDefaultRequestSettings(reqTemplate, &rawMetadata);
589 ALOGI("constructDefaultRequestSettings returns status:%d:%d", ret.getExceptionCode(),
590 ret.getServiceSpecificError());
591
592 if (reqTemplate == RequestTemplate::ZERO_SHUTTER_LAG ||
593 reqTemplate == RequestTemplate::MANUAL) {
594 // optional templates
595 ASSERT_TRUE(ret.isOk() || static_cast<int32_t>(Status::ILLEGAL_ARGUMENT) ==
596 ret.getServiceSpecificError());
597 } else {
598 ASSERT_TRUE(ret.isOk());
599 }
600
601 if (ret.isOk()) {
602 validateDefaultRequestMetadata(reqTemplate, rawMetadata);
603 } else {
604 ASSERT_EQ(0u, rawMetadata.metadata.size());
605 }
606
607 if (supportFeatureCombinationQuery) {
608 CameraMetadata rawMetadata2;
609 ndk::ScopedAStatus ret2 =
610 device->constructDefaultRequestSettings(reqTemplate, &rawMetadata2);
611
612 ASSERT_EQ(ret.isOk(), ret2.isOk());
613 ASSERT_EQ(ret.getStatus(), ret2.getStatus());
614
615 ASSERT_EQ(rawMetadata.metadata.size(), rawMetadata2.metadata.size());
616 if (ret2.isOk()) {
617 validateDefaultRequestMetadata(reqTemplate, rawMetadata2);
618 }
619 }
620 }
621 ret = mSession->close();
622 mSession = nullptr;
623 ASSERT_TRUE(ret.isOk());
624 }
625 }
626
627 // Verify that all supported stream formats and sizes can be configured
628 // successfully.
TEST_P(CameraAidlTest,configureStreamsAvailableOutputs)629 TEST_P(CameraAidlTest, configureStreamsAvailableOutputs) {
630 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
631 std::vector<AvailableStream> outputStreams;
632
633 for (const auto& name : cameraDeviceNames) {
634 CameraMetadata meta;
635 std::shared_ptr<ICameraDevice> device;
636
637 openEmptyDeviceSession(name, mProvider, &mSession /*out*/, &meta /*out*/, &device /*out*/);
638
639 camera_metadata_t* staticMeta = reinterpret_cast<camera_metadata_t*>(meta.metadata.data());
640 outputStreams.clear();
641 ASSERT_EQ(Status::OK, getAvailableOutputStreams(staticMeta, outputStreams));
642 ASSERT_NE(0u, outputStreams.size());
643
644 int32_t jpegBufferSize = 0;
645 ASSERT_EQ(Status::OK, getJpegBufferSize(staticMeta, &jpegBufferSize));
646 ASSERT_NE(0u, jpegBufferSize);
647
648 int32_t streamId = 0;
649 int32_t streamConfigCounter = 0;
650 for (auto& it : outputStreams) {
651 Stream stream;
652 Dataspace dataspace = getDataspace(static_cast<PixelFormat>(it.format));
653 stream.id = streamId;
654 stream.streamType = StreamType::OUTPUT;
655 stream.width = it.width;
656 stream.height = it.height;
657 stream.format = static_cast<PixelFormat>(it.format);
658 stream.dataSpace = dataspace;
659 stream.usage = static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
660 GRALLOC1_CONSUMER_USAGE_HWCOMPOSER);
661 stream.rotation = StreamRotation::ROTATION_0;
662 stream.dynamicRangeProfile = RequestAvailableDynamicRangeProfilesMap::
663 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD;
664 stream.useCase = ScalerAvailableStreamUseCases::
665 ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT;
666 stream.colorSpace = static_cast<int>(
667 RequestAvailableColorSpaceProfilesMap::
668 ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED);
669
670 std::vector<Stream> streams = {stream};
671 StreamConfiguration config;
672 createStreamConfiguration(streams, StreamConfigurationMode::NORMAL_MODE, &config,
673 jpegBufferSize);
674
675 verifyStreamCombination(device, config, /*expectedStatus*/ true);
676
677 config.streamConfigCounter = streamConfigCounter++;
678 std::vector<HalStream> halConfigs;
679 ndk::ScopedAStatus ret = mSession->configureStreams(config, &halConfigs);
680 ASSERT_TRUE(ret.isOk());
681 ASSERT_EQ(halConfigs.size(), 1);
682 ASSERT_EQ(halConfigs[0].id, streamId);
683
684 streamId++;
685 }
686
687 ndk::ScopedAStatus ret = mSession->close();
688 mSession = nullptr;
689 ASSERT_TRUE(ret.isOk());
690 }
691 }
692
693 // Verify that mandatory concurrent streams and outputs are supported.
TEST_P(CameraAidlTest,configureConcurrentStreamsAvailableOutputs)694 TEST_P(CameraAidlTest, configureConcurrentStreamsAvailableOutputs) {
695 struct CameraTestInfo {
696 CameraMetadata staticMeta;
697 std::shared_ptr<ICameraDeviceSession> session;
698 std::shared_ptr<ICameraDevice> cameraDevice;
699 StreamConfiguration config;
700 };
701
702 std::map<std::string, std::string> idToNameMap = getCameraDeviceIdToNameMap(mProvider);
703 std::vector<ConcurrentCameraIdCombination> concurrentDeviceCombinations =
704 getConcurrentDeviceCombinations(mProvider);
705 std::vector<AvailableStream> outputStreams;
706 for (const auto& cameraDeviceIds : concurrentDeviceCombinations) {
707 std::vector<CameraIdAndStreamCombination> cameraIdsAndStreamCombinations;
708 std::vector<CameraTestInfo> cameraTestInfos;
709 for (const auto& id : cameraDeviceIds.combination) {
710 CameraTestInfo cti;
711 auto it = idToNameMap.find(id);
712 ASSERT_TRUE(idToNameMap.end() != it);
713 std::string name = it->second;
714
715 openEmptyDeviceSession(name, mProvider, &cti.session /*out*/, &cti.staticMeta /*out*/,
716 &cti.cameraDevice /*out*/);
717
718 outputStreams.clear();
719 camera_metadata_t* staticMeta =
720 reinterpret_cast<camera_metadata_t*>(cti.staticMeta.metadata.data());
721 ASSERT_EQ(Status::OK, getMandatoryConcurrentStreams(staticMeta, &outputStreams));
722 ASSERT_NE(0u, outputStreams.size());
723
724 int32_t jpegBufferSize = 0;
725 ASSERT_EQ(Status::OK, getJpegBufferSize(staticMeta, &jpegBufferSize));
726 ASSERT_NE(0u, jpegBufferSize);
727
728 int32_t streamId = 0;
729 std::vector<Stream> streams(outputStreams.size());
730 size_t j = 0;
731 for (const auto& s : outputStreams) {
732 Stream stream;
733 Dataspace dataspace = getDataspace(static_cast<PixelFormat>(s.format));
734 stream.id = streamId++;
735 stream.streamType = StreamType::OUTPUT;
736 stream.width = s.width;
737 stream.height = s.height;
738 stream.format = static_cast<PixelFormat>(s.format);
739 stream.usage = static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
740 GRALLOC1_CONSUMER_USAGE_HWCOMPOSER);
741 stream.dataSpace = dataspace;
742 stream.rotation = StreamRotation::ROTATION_0;
743 stream.sensorPixelModesUsed = {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT};
744 stream.dynamicRangeProfile = RequestAvailableDynamicRangeProfilesMap::
745 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD;
746 streams[j] = stream;
747 j++;
748 }
749
750 // Add the created stream configs to cameraIdsAndStreamCombinations
751 createStreamConfiguration(streams, StreamConfigurationMode::NORMAL_MODE, &cti.config,
752 jpegBufferSize);
753
754 cti.config.streamConfigCounter = outputStreams.size();
755 CameraIdAndStreamCombination cameraIdAndStreamCombination;
756 cameraIdAndStreamCombination.cameraId = id;
757 cameraIdAndStreamCombination.streamConfiguration = cti.config;
758 cameraIdsAndStreamCombinations.push_back(cameraIdAndStreamCombination);
759 cameraTestInfos.push_back(cti);
760 }
761 // Now verify that concurrent streams are supported
762 bool combinationSupported;
763 ndk::ScopedAStatus ret = mProvider->isConcurrentStreamCombinationSupported(
764 cameraIdsAndStreamCombinations, &combinationSupported);
765 ASSERT_TRUE(ret.isOk());
766 ASSERT_EQ(combinationSupported, true);
767
768 // Test the stream can actually be configured
769 for (auto& cti : cameraTestInfos) {
770 if (cti.session != nullptr) {
771 verifyStreamCombination(cti.cameraDevice, cti.config, /*expectedStatus*/ true);
772 }
773
774 if (cti.session != nullptr) {
775 std::vector<HalStream> streamConfigs;
776 ret = cti.session->configureStreams(cti.config, &streamConfigs);
777 ASSERT_TRUE(ret.isOk());
778 ASSERT_EQ(cti.config.streams.size(), streamConfigs.size());
779 }
780 }
781
782 for (auto& cti : cameraTestInfos) {
783 ret = cti.session->close();
784 ASSERT_TRUE(ret.isOk());
785 }
786 }
787 }
788
789 // Check for correct handling of invalid/incorrect configuration parameters.
TEST_P(CameraAidlTest,configureStreamsInvalidOutputs)790 TEST_P(CameraAidlTest, configureStreamsInvalidOutputs) {
791 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
792 std::vector<AvailableStream> outputStreams;
793
794 for (const auto& name : cameraDeviceNames) {
795 CameraMetadata meta;
796 std::shared_ptr<ICameraDevice> cameraDevice;
797
798 openEmptyDeviceSession(name, mProvider, &mSession /*out*/, &meta /*out*/,
799 &cameraDevice /*out*/);
800 camera_metadata_t* staticMeta = reinterpret_cast<camera_metadata_t*>(meta.metadata.data());
801 outputStreams.clear();
802
803 ASSERT_EQ(Status::OK, getAvailableOutputStreams(staticMeta, outputStreams));
804 ASSERT_NE(0u, outputStreams.size());
805
806 int32_t jpegBufferSize = 0;
807 ASSERT_EQ(Status::OK, getJpegBufferSize(staticMeta, &jpegBufferSize));
808 ASSERT_NE(0u, jpegBufferSize);
809
810 int32_t streamId = 0;
811 Stream stream = {streamId++,
812 StreamType::OUTPUT,
813 static_cast<uint32_t>(0),
814 static_cast<uint32_t>(0),
815 static_cast<PixelFormat>(outputStreams[0].format),
816 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
817 GRALLOC1_CONSUMER_USAGE_HWCOMPOSER),
818 Dataspace::UNKNOWN,
819 StreamRotation::ROTATION_0,
820 std::string(),
821 jpegBufferSize,
822 -1,
823 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
824 RequestAvailableDynamicRangeProfilesMap::
825 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
826 int32_t streamConfigCounter = 0;
827 std::vector<Stream> streams = {stream};
828 StreamConfiguration config;
829 createStreamConfiguration(streams, StreamConfigurationMode::NORMAL_MODE, &config,
830 jpegBufferSize);
831
832 verifyStreamCombination(cameraDevice, config, /*expectedStatus*/ false);
833
834 config.streamConfigCounter = streamConfigCounter++;
835 std::vector<HalStream> halConfigs;
836 ndk::ScopedAStatus ret = mSession->configureStreams(config, &halConfigs);
837 ASSERT_TRUE(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT) ==
838 ret.getServiceSpecificError() ||
839 static_cast<int32_t>(Status::INTERNAL_ERROR) == ret.getServiceSpecificError());
840
841 stream = {streamId++,
842 StreamType::OUTPUT,
843 /*width*/ INT32_MAX,
844 /*height*/ INT32_MAX,
845 static_cast<PixelFormat>(outputStreams[0].format),
846 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
847 GRALLOC1_CONSUMER_USAGE_HWCOMPOSER),
848 Dataspace::UNKNOWN,
849 StreamRotation::ROTATION_0,
850 std::string(),
851 jpegBufferSize,
852 -1,
853 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
854 RequestAvailableDynamicRangeProfilesMap::
855 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
856
857 streams[0] = stream;
858 createStreamConfiguration(streams, StreamConfigurationMode::NORMAL_MODE, &config,
859 jpegBufferSize);
860
861 config.streamConfigCounter = streamConfigCounter++;
862 halConfigs.clear();
863 ret = mSession->configureStreams(config, &halConfigs);
864 ASSERT_EQ(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT), ret.getServiceSpecificError());
865
866 for (auto& it : outputStreams) {
867 stream = {streamId++,
868 StreamType::OUTPUT,
869 it.width,
870 it.height,
871 static_cast<PixelFormat>(UINT32_MAX),
872 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
873 GRALLOC1_CONSUMER_USAGE_HWCOMPOSER),
874 Dataspace::UNKNOWN,
875 StreamRotation::ROTATION_0,
876 std::string(),
877 jpegBufferSize,
878 -1,
879 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
880 RequestAvailableDynamicRangeProfilesMap::
881 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
882
883 streams[0] = stream;
884 createStreamConfiguration(streams, StreamConfigurationMode::NORMAL_MODE, &config,
885 jpegBufferSize);
886 config.streamConfigCounter = streamConfigCounter++;
887 halConfigs.clear();
888 ret = mSession->configureStreams(config, &halConfigs);
889 ASSERT_EQ(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT),
890 ret.getServiceSpecificError());
891
892 stream = {streamId++,
893 StreamType::OUTPUT,
894 it.width,
895 it.height,
896 static_cast<PixelFormat>(it.format),
897 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
898 GRALLOC1_CONSUMER_USAGE_HWCOMPOSER),
899 Dataspace::UNKNOWN,
900 static_cast<StreamRotation>(UINT32_MAX),
901 std::string(),
902 jpegBufferSize,
903 -1,
904 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
905 RequestAvailableDynamicRangeProfilesMap::
906 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
907
908 streams[0] = stream;
909 createStreamConfiguration(streams, StreamConfigurationMode::NORMAL_MODE, &config,
910 jpegBufferSize);
911
912 config.streamConfigCounter = streamConfigCounter++;
913 halConfigs.clear();
914 ret = mSession->configureStreams(config, &halConfigs);
915 ASSERT_EQ(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT),
916 ret.getServiceSpecificError());
917 }
918
919 ret = mSession->close();
920 mSession = nullptr;
921 ASSERT_TRUE(ret.isOk());
922 }
923 }
924
925 // Check whether all supported ZSL output stream combinations can be
926 // configured successfully.
TEST_P(CameraAidlTest,configureStreamsZSLInputOutputs)927 TEST_P(CameraAidlTest, configureStreamsZSLInputOutputs) {
928 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
929 std::vector<AvailableStream> inputStreams;
930 std::vector<AvailableZSLInputOutput> inputOutputMap;
931
932 for (const auto& name : cameraDeviceNames) {
933 CameraMetadata meta;
934 std::shared_ptr<ICameraDevice> cameraDevice;
935
936 openEmptyDeviceSession(name, mProvider, &mSession /*out*/, &meta /*out*/,
937 &cameraDevice /*out*/);
938 camera_metadata_t* staticMeta = reinterpret_cast<camera_metadata_t*>(meta.metadata.data());
939
940 Status rc = isZSLModeAvailable(staticMeta);
941 if (Status::OPERATION_NOT_SUPPORTED == rc) {
942 ndk::ScopedAStatus ret = mSession->close();
943 mSession = nullptr;
944 ASSERT_TRUE(ret.isOk());
945 continue;
946 }
947 ASSERT_EQ(Status::OK, rc);
948
949 inputStreams.clear();
950 ASSERT_EQ(Status::OK, getAvailableOutputStreams(staticMeta, inputStreams));
951 ASSERT_NE(0u, inputStreams.size());
952
953 inputOutputMap.clear();
954 ASSERT_EQ(Status::OK, getZSLInputOutputMap(staticMeta, inputOutputMap));
955 ASSERT_NE(0u, inputOutputMap.size());
956
957 bool supportMonoY8 = false;
958 if (Status::OK == isMonochromeCamera(staticMeta)) {
959 for (auto& it : inputStreams) {
960 if (it.format == static_cast<uint32_t>(PixelFormat::Y8)) {
961 supportMonoY8 = true;
962 break;
963 }
964 }
965 }
966
967 int32_t jpegBufferSize = 0;
968 ASSERT_EQ(Status::OK, getJpegBufferSize(staticMeta, &jpegBufferSize));
969 ASSERT_NE(0u, jpegBufferSize);
970
971 int32_t streamId = 0;
972 bool hasPrivToY8 = false, hasY8ToY8 = false, hasY8ToBlob = false;
973 uint32_t streamConfigCounter = 0;
974 for (auto& inputIter : inputOutputMap) {
975 AvailableStream input;
976 ASSERT_EQ(Status::OK, findLargestSize(inputStreams, inputIter.inputFormat, input));
977 ASSERT_NE(0u, inputStreams.size());
978
979 if (inputIter.inputFormat ==
980 static_cast<uint32_t>(PixelFormat::IMPLEMENTATION_DEFINED) &&
981 inputIter.outputFormat == static_cast<uint32_t>(PixelFormat::Y8)) {
982 hasPrivToY8 = true;
983 } else if (inputIter.inputFormat == static_cast<uint32_t>(PixelFormat::Y8)) {
984 if (inputIter.outputFormat == static_cast<uint32_t>(PixelFormat::BLOB)) {
985 hasY8ToBlob = true;
986 } else if (inputIter.outputFormat == static_cast<uint32_t>(PixelFormat::Y8)) {
987 hasY8ToY8 = true;
988 }
989 }
990 AvailableStream outputThreshold = {INT32_MAX, INT32_MAX, inputIter.outputFormat};
991 std::vector<AvailableStream> outputStreams;
992 ASSERT_EQ(Status::OK,
993 getAvailableOutputStreams(staticMeta, outputStreams, &outputThreshold));
994 for (auto& outputIter : outputStreams) {
995 Dataspace outputDataSpace =
996 getDataspace(static_cast<PixelFormat>(outputIter.format));
997 Stream zslStream = {
998 streamId++,
999 StreamType::OUTPUT,
1000 input.width,
1001 input.height,
1002 static_cast<PixelFormat>(input.format),
1003 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
1004 GRALLOC_USAGE_HW_CAMERA_ZSL),
1005 Dataspace::UNKNOWN,
1006 StreamRotation::ROTATION_0,
1007 std::string(),
1008 jpegBufferSize,
1009 -1,
1010 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
1011 RequestAvailableDynamicRangeProfilesMap::
1012 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
1013 Stream inputStream = {
1014 streamId++,
1015 StreamType::INPUT,
1016 input.width,
1017 input.height,
1018 static_cast<PixelFormat>(input.format),
1019 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(0),
1020 Dataspace::UNKNOWN,
1021 StreamRotation::ROTATION_0,
1022 std::string(),
1023 jpegBufferSize,
1024 -1,
1025 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
1026 RequestAvailableDynamicRangeProfilesMap::
1027 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
1028 Stream outputStream = {
1029 streamId++,
1030 StreamType::OUTPUT,
1031 outputIter.width,
1032 outputIter.height,
1033 static_cast<PixelFormat>(outputIter.format),
1034 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
1035 GRALLOC1_CONSUMER_USAGE_HWCOMPOSER),
1036 outputDataSpace,
1037 StreamRotation::ROTATION_0,
1038 std::string(),
1039 jpegBufferSize,
1040 -1,
1041 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
1042 RequestAvailableDynamicRangeProfilesMap::
1043 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
1044
1045 std::vector<Stream> streams = {inputStream, zslStream, outputStream};
1046
1047 StreamConfiguration config;
1048 createStreamConfiguration(streams, StreamConfigurationMode::NORMAL_MODE, &config,
1049 jpegBufferSize);
1050
1051 verifyStreamCombination(cameraDevice, config, /*expectedStatus*/ true);
1052
1053 config.streamConfigCounter = streamConfigCounter++;
1054 std::vector<HalStream> halConfigs;
1055 ndk::ScopedAStatus ret = mSession->configureStreams(config, &halConfigs);
1056 ASSERT_TRUE(ret.isOk());
1057 ASSERT_EQ(3u, halConfigs.size());
1058 }
1059 }
1060
1061 if (supportMonoY8) {
1062 if (Status::OK == isZSLModeAvailable(staticMeta, PRIV_REPROCESS)) {
1063 ASSERT_TRUE(hasPrivToY8);
1064 }
1065 if (Status::OK == isZSLModeAvailable(staticMeta, YUV_REPROCESS)) {
1066 ASSERT_TRUE(hasY8ToY8);
1067 ASSERT_TRUE(hasY8ToBlob);
1068 }
1069 }
1070
1071 ndk::ScopedAStatus ret = mSession->close();
1072 mSession = nullptr;
1073 ASSERT_TRUE(ret.isOk());
1074 }
1075 }
1076
1077 // Check whether session parameters are supported. If Hal support for them
1078 // exist, then try to configure a preview stream using them.
TEST_P(CameraAidlTest,configureStreamsWithSessionParameters)1079 TEST_P(CameraAidlTest, configureStreamsWithSessionParameters) {
1080 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
1081 std::vector<AvailableStream> outputPreviewStreams;
1082 AvailableStream previewThreshold = {kMaxPreviewWidth, kMaxPreviewHeight,
1083 static_cast<int32_t>(PixelFormat::IMPLEMENTATION_DEFINED)};
1084
1085 for (const auto& name : cameraDeviceNames) {
1086 CameraMetadata meta;
1087
1088 std::shared_ptr<ICameraDevice> unusedCameraDevice;
1089 openEmptyDeviceSession(name, mProvider, &mSession /*out*/, &meta /*out*/,
1090 &unusedCameraDevice /*out*/);
1091 camera_metadata_t* staticMetaBuffer =
1092 reinterpret_cast<camera_metadata_t*>(meta.metadata.data());
1093
1094 std::unordered_set<int32_t> availableSessionKeys;
1095 auto rc = getSupportedKeys(staticMetaBuffer, ANDROID_REQUEST_AVAILABLE_SESSION_KEYS,
1096 &availableSessionKeys);
1097 ASSERT_TRUE(Status::OK == rc);
1098 if (availableSessionKeys.empty()) {
1099 ndk::ScopedAStatus ret = mSession->close();
1100 mSession = nullptr;
1101 ASSERT_TRUE(ret.isOk());
1102 continue;
1103 }
1104
1105 android::hardware::camera::common::V1_0::helper::CameraMetadata previewRequestSettings;
1106 android::hardware::camera::common::V1_0::helper::CameraMetadata sessionParams,
1107 modifiedSessionParams;
1108 constructFilteredSettings(mSession, availableSessionKeys, RequestTemplate::PREVIEW,
1109 &previewRequestSettings, &sessionParams);
1110 if (sessionParams.isEmpty()) {
1111 ndk::ScopedAStatus ret = mSession->close();
1112 mSession = nullptr;
1113 ASSERT_TRUE(ret.isOk());
1114 continue;
1115 }
1116
1117 outputPreviewStreams.clear();
1118
1119 ASSERT_EQ(Status::OK, getAvailableOutputStreams(staticMetaBuffer, outputPreviewStreams,
1120 &previewThreshold));
1121 ASSERT_NE(0u, outputPreviewStreams.size());
1122
1123 Stream previewStream = {
1124 0,
1125 StreamType::OUTPUT,
1126 outputPreviewStreams[0].width,
1127 outputPreviewStreams[0].height,
1128 static_cast<PixelFormat>(outputPreviewStreams[0].format),
1129 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
1130 GRALLOC1_CONSUMER_USAGE_HWCOMPOSER),
1131 Dataspace::UNKNOWN,
1132 StreamRotation::ROTATION_0,
1133 std::string(),
1134 /*bufferSize*/ 0,
1135 /*groupId*/ -1,
1136 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
1137 RequestAvailableDynamicRangeProfilesMap::
1138 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
1139
1140 std::vector<Stream> streams = {previewStream};
1141 StreamConfiguration config;
1142
1143 config.streams = streams;
1144 config.operationMode = StreamConfigurationMode::NORMAL_MODE;
1145 modifiedSessionParams = sessionParams;
1146 auto sessionParamsBuffer = sessionParams.release();
1147 std::vector<uint8_t> rawSessionParam =
1148 std::vector(reinterpret_cast<uint8_t*>(sessionParamsBuffer),
1149 reinterpret_cast<uint8_t*>(sessionParamsBuffer) +
1150 get_camera_metadata_size(sessionParamsBuffer));
1151
1152 config.sessionParams.metadata = rawSessionParam;
1153 config.streamConfigCounter = 0;
1154 config.streams = {previewStream};
1155 config.streamConfigCounter = 0;
1156 config.multiResolutionInputImage = false;
1157
1158 bool newSessionParamsAvailable = false;
1159 for (const auto& it : availableSessionKeys) {
1160 if (modifiedSessionParams.exists(it)) {
1161 modifiedSessionParams.erase(it);
1162 newSessionParamsAvailable = true;
1163 break;
1164 }
1165 }
1166 if (newSessionParamsAvailable) {
1167 auto modifiedSessionParamsBuffer = modifiedSessionParams.release();
1168 verifySessionReconfigurationQuery(mSession, sessionParamsBuffer,
1169 modifiedSessionParamsBuffer);
1170 modifiedSessionParams.acquire(modifiedSessionParamsBuffer);
1171 }
1172
1173 std::vector<HalStream> halConfigs;
1174 ndk::ScopedAStatus ret = mSession->configureStreams(config, &halConfigs);
1175 ASSERT_TRUE(ret.isOk());
1176 ASSERT_EQ(1u, halConfigs.size());
1177
1178 sessionParams.acquire(sessionParamsBuffer);
1179 ret = mSession->close();
1180 mSession = nullptr;
1181 ASSERT_TRUE(ret.isOk());
1182 }
1183 }
1184
1185 // Verify that all supported preview + still capture stream combinations
1186 // can be configured successfully.
TEST_P(CameraAidlTest,configureStreamsPreviewStillOutputs)1187 TEST_P(CameraAidlTest, configureStreamsPreviewStillOutputs) {
1188 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
1189 std::vector<AvailableStream> outputBlobStreams;
1190 std::vector<AvailableStream> outputPreviewStreams;
1191 AvailableStream previewThreshold = {kMaxPreviewWidth, kMaxPreviewHeight,
1192 static_cast<int32_t>(PixelFormat::IMPLEMENTATION_DEFINED)};
1193 AvailableStream blobThreshold = {INT32_MAX, INT32_MAX, static_cast<int32_t>(PixelFormat::BLOB)};
1194
1195 for (const auto& name : cameraDeviceNames) {
1196 CameraMetadata meta;
1197
1198 std::shared_ptr<ICameraDevice> cameraDevice;
1199 openEmptyDeviceSession(name, mProvider, &mSession /*out*/, &meta /*out*/,
1200 &cameraDevice /*out*/);
1201
1202 camera_metadata_t* staticMeta = reinterpret_cast<camera_metadata_t*>(meta.metadata.data());
1203
1204 // Check if camera support depth only
1205 if (isDepthOnly(staticMeta)) {
1206 ndk::ScopedAStatus ret = mSession->close();
1207 mSession = nullptr;
1208 ASSERT_TRUE(ret.isOk());
1209 continue;
1210 }
1211
1212 outputBlobStreams.clear();
1213 ASSERT_EQ(Status::OK,
1214 getAvailableOutputStreams(staticMeta, outputBlobStreams, &blobThreshold));
1215 ASSERT_NE(0u, outputBlobStreams.size());
1216
1217 outputPreviewStreams.clear();
1218 ASSERT_EQ(Status::OK,
1219 getAvailableOutputStreams(staticMeta, outputPreviewStreams, &previewThreshold));
1220 ASSERT_NE(0u, outputPreviewStreams.size());
1221
1222 int32_t jpegBufferSize = 0;
1223 ASSERT_EQ(Status::OK, getJpegBufferSize(staticMeta, &jpegBufferSize));
1224 ASSERT_NE(0u, jpegBufferSize);
1225
1226 int32_t streamId = 0;
1227 uint32_t streamConfigCounter = 0;
1228
1229 for (auto& blobIter : outputBlobStreams) {
1230 for (auto& previewIter : outputPreviewStreams) {
1231 Stream previewStream = {
1232 streamId++,
1233 StreamType::OUTPUT,
1234 previewIter.width,
1235 previewIter.height,
1236 static_cast<PixelFormat>(previewIter.format),
1237 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
1238 GRALLOC1_CONSUMER_USAGE_HWCOMPOSER),
1239 Dataspace::UNKNOWN,
1240 StreamRotation::ROTATION_0,
1241 std::string(),
1242 /*bufferSize*/ 0,
1243 /*groupId*/ -1,
1244 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
1245 RequestAvailableDynamicRangeProfilesMap::
1246 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
1247 Stream blobStream = {
1248 streamId++,
1249 StreamType::OUTPUT,
1250 blobIter.width,
1251 blobIter.height,
1252 static_cast<PixelFormat>(blobIter.format),
1253 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
1254 GRALLOC1_CONSUMER_USAGE_CPU_READ),
1255 Dataspace::JFIF,
1256 StreamRotation::ROTATION_0,
1257 std::string(),
1258 /*bufferSize*/ 0,
1259 /*groupId*/ -1,
1260 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
1261 RequestAvailableDynamicRangeProfilesMap::
1262 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
1263 std::vector<Stream> streams = {previewStream, blobStream};
1264 StreamConfiguration config;
1265
1266 createStreamConfiguration(streams, StreamConfigurationMode::NORMAL_MODE, &config,
1267 jpegBufferSize);
1268 config.streamConfigCounter = streamConfigCounter++;
1269 verifyStreamCombination(cameraDevice, config, /*expectedStatus*/ true);
1270
1271 std::vector<HalStream> halConfigs;
1272 ndk::ScopedAStatus ret = mSession->configureStreams(config, &halConfigs);
1273 ASSERT_TRUE(ret.isOk());
1274 ASSERT_EQ(2u, halConfigs.size());
1275 }
1276 }
1277
1278 ndk::ScopedAStatus ret = mSession->close();
1279 mSession = nullptr;
1280 ASSERT_TRUE(ret.isOk());
1281 }
1282 }
1283
1284 // In case constrained mode is supported, test whether it can be
1285 // configured. Additionally check for common invalid inputs when
1286 // using this mode.
TEST_P(CameraAidlTest,configureStreamsConstrainedOutputs)1287 TEST_P(CameraAidlTest, configureStreamsConstrainedOutputs) {
1288 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
1289
1290 for (const auto& name : cameraDeviceNames) {
1291 CameraMetadata meta;
1292 std::shared_ptr<ICameraDevice> cameraDevice;
1293
1294 openEmptyDeviceSession(name, mProvider, &mSession /*out*/, &meta /*out*/,
1295 &cameraDevice /*out*/);
1296 camera_metadata_t* staticMeta = reinterpret_cast<camera_metadata_t*>(meta.metadata.data());
1297
1298 Status rc = isConstrainedModeAvailable(staticMeta);
1299 if (Status::OPERATION_NOT_SUPPORTED == rc) {
1300 ndk::ScopedAStatus ret = mSession->close();
1301 mSession = nullptr;
1302 ASSERT_TRUE(ret.isOk());
1303 continue;
1304 }
1305 ASSERT_EQ(Status::OK, rc);
1306
1307 AvailableStream hfrStream;
1308 rc = pickConstrainedModeSize(staticMeta, hfrStream);
1309 ASSERT_EQ(Status::OK, rc);
1310
1311 int32_t streamId = 0;
1312 uint32_t streamConfigCounter = 0;
1313 Stream stream = {streamId,
1314 StreamType::OUTPUT,
1315 hfrStream.width,
1316 hfrStream.height,
1317 static_cast<PixelFormat>(hfrStream.format),
1318 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
1319 GRALLOC1_CONSUMER_USAGE_VIDEO_ENCODER),
1320 Dataspace::UNKNOWN,
1321 StreamRotation::ROTATION_0,
1322 std::string(),
1323 /*bufferSize*/ 0,
1324 /*groupId*/ -1,
1325 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
1326 RequestAvailableDynamicRangeProfilesMap::
1327 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
1328 std::vector<Stream> streams = {stream};
1329 StreamConfiguration config;
1330 createStreamConfiguration(streams, StreamConfigurationMode::CONSTRAINED_HIGH_SPEED_MODE,
1331 &config);
1332
1333 verifyStreamCombination(cameraDevice, config, /*expectedStatus*/ true);
1334
1335 config.streamConfigCounter = streamConfigCounter++;
1336 std::vector<HalStream> halConfigs;
1337 ndk::ScopedAStatus ret = mSession->configureStreams(config, &halConfigs);
1338 ASSERT_TRUE(ret.isOk());
1339 ASSERT_EQ(1u, halConfigs.size());
1340 ASSERT_EQ(halConfigs[0].id, streamId);
1341
1342 stream = {streamId++,
1343 StreamType::OUTPUT,
1344 static_cast<uint32_t>(0),
1345 static_cast<uint32_t>(0),
1346 static_cast<PixelFormat>(hfrStream.format),
1347 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
1348 GRALLOC1_CONSUMER_USAGE_VIDEO_ENCODER),
1349 Dataspace::UNKNOWN,
1350 StreamRotation::ROTATION_0,
1351 std::string(),
1352 /*bufferSize*/ 0,
1353 /*groupId*/ -1,
1354 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
1355 RequestAvailableDynamicRangeProfilesMap::
1356 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
1357 streams[0] = stream;
1358 createStreamConfiguration(streams, StreamConfigurationMode::CONSTRAINED_HIGH_SPEED_MODE,
1359 &config);
1360
1361 config.streamConfigCounter = streamConfigCounter++;
1362 std::vector<HalStream> halConfig;
1363 ret = mSession->configureStreams(config, &halConfig);
1364 ASSERT_TRUE(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT) ==
1365 ret.getServiceSpecificError() ||
1366 static_cast<int32_t>(Status::INTERNAL_ERROR) == ret.getServiceSpecificError());
1367
1368 stream = {streamId++,
1369 StreamType::OUTPUT,
1370 INT32_MAX,
1371 INT32_MAX,
1372 static_cast<PixelFormat>(hfrStream.format),
1373 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
1374 GRALLOC1_CONSUMER_USAGE_VIDEO_ENCODER),
1375 Dataspace::UNKNOWN,
1376 StreamRotation::ROTATION_0,
1377 std::string(),
1378 /*bufferSize*/ 0,
1379 /*groupId*/ -1,
1380 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
1381 RequestAvailableDynamicRangeProfilesMap::
1382 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
1383 streams[0] = stream;
1384 createStreamConfiguration(streams, StreamConfigurationMode::CONSTRAINED_HIGH_SPEED_MODE,
1385 &config);
1386
1387 config.streamConfigCounter = streamConfigCounter++;
1388 halConfigs.clear();
1389 ret = mSession->configureStreams(config, &halConfigs);
1390 ASSERT_EQ(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT), ret.getServiceSpecificError());
1391
1392 stream = {streamId++,
1393 StreamType::OUTPUT,
1394 hfrStream.width,
1395 hfrStream.height,
1396 static_cast<PixelFormat>(UINT32_MAX),
1397 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
1398 GRALLOC1_CONSUMER_USAGE_VIDEO_ENCODER),
1399 Dataspace::UNKNOWN,
1400 StreamRotation::ROTATION_0,
1401 std::string(),
1402 /*bufferSize*/ 0,
1403 /*groupId*/ -1,
1404 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
1405 RequestAvailableDynamicRangeProfilesMap::
1406 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
1407 streams[0] = stream;
1408 createStreamConfiguration(streams, StreamConfigurationMode::CONSTRAINED_HIGH_SPEED_MODE,
1409 &config);
1410
1411 config.streamConfigCounter = streamConfigCounter++;
1412 halConfigs.clear();
1413 ret = mSession->configureStreams(config, &halConfigs);
1414 ASSERT_EQ(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT), ret.getServiceSpecificError());
1415
1416 ret = mSession->close();
1417 mSession = nullptr;
1418 ASSERT_TRUE(ret.isOk());
1419 }
1420 }
1421
1422 // Verify that all supported video + snapshot stream combinations can
1423 // be configured successfully.
TEST_P(CameraAidlTest,configureStreamsVideoStillOutputs)1424 TEST_P(CameraAidlTest, configureStreamsVideoStillOutputs) {
1425 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
1426 std::vector<AvailableStream> outputBlobStreams;
1427 std::vector<AvailableStream> outputVideoStreams;
1428 AvailableStream videoThreshold = {kMaxVideoWidth, kMaxVideoHeight,
1429 static_cast<int32_t>(PixelFormat::IMPLEMENTATION_DEFINED)};
1430 AvailableStream blobThreshold = {kMaxVideoWidth, kMaxVideoHeight,
1431 static_cast<int32_t>(PixelFormat::BLOB)};
1432
1433 for (const auto& name : cameraDeviceNames) {
1434 CameraMetadata meta;
1435 std::shared_ptr<ICameraDevice> cameraDevice;
1436
1437 openEmptyDeviceSession(name, mProvider, &mSession /*out*/, &meta /*out*/,
1438 &cameraDevice /*out*/);
1439
1440 camera_metadata_t* staticMeta = reinterpret_cast<camera_metadata_t*>(meta.metadata.data());
1441
1442 // Check if camera support depth only
1443 if (isDepthOnly(staticMeta)) {
1444 ndk::ScopedAStatus ret = mSession->close();
1445 mSession = nullptr;
1446 ASSERT_TRUE(ret.isOk());
1447 continue;
1448 }
1449
1450 outputBlobStreams.clear();
1451 ASSERT_EQ(Status::OK,
1452 getAvailableOutputStreams(staticMeta, outputBlobStreams, &blobThreshold));
1453 ASSERT_NE(0u, outputBlobStreams.size());
1454
1455 outputVideoStreams.clear();
1456 ASSERT_EQ(Status::OK,
1457 getAvailableOutputStreams(staticMeta, outputVideoStreams, &videoThreshold));
1458 ASSERT_NE(0u, outputVideoStreams.size());
1459
1460 int32_t jpegBufferSize = 0;
1461 ASSERT_EQ(Status::OK, getJpegBufferSize(staticMeta, &jpegBufferSize));
1462 ASSERT_NE(0u, jpegBufferSize);
1463
1464 int32_t streamId = 0;
1465 uint32_t streamConfigCounter = 0;
1466 for (auto& blobIter : outputBlobStreams) {
1467 for (auto& videoIter : outputVideoStreams) {
1468 Stream videoStream = {
1469 streamId++,
1470 StreamType::OUTPUT,
1471 videoIter.width,
1472 videoIter.height,
1473 static_cast<PixelFormat>(videoIter.format),
1474 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
1475 GRALLOC1_CONSUMER_USAGE_VIDEO_ENCODER),
1476 Dataspace::UNKNOWN,
1477 StreamRotation::ROTATION_0,
1478 std::string(),
1479 jpegBufferSize,
1480 /*groupId*/ -1,
1481 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
1482 RequestAvailableDynamicRangeProfilesMap::
1483 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
1484 Stream blobStream = {
1485 streamId++,
1486 StreamType::OUTPUT,
1487 blobIter.width,
1488 blobIter.height,
1489 static_cast<PixelFormat>(blobIter.format),
1490 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
1491 GRALLOC1_CONSUMER_USAGE_CPU_READ),
1492 Dataspace::JFIF,
1493 StreamRotation::ROTATION_0,
1494 std::string(),
1495 jpegBufferSize,
1496 /*groupId*/ -1,
1497 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
1498 RequestAvailableDynamicRangeProfilesMap::
1499 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
1500 std::vector<Stream> streams = {videoStream, blobStream};
1501 StreamConfiguration config;
1502
1503 createStreamConfiguration(streams, StreamConfigurationMode::NORMAL_MODE, &config,
1504 jpegBufferSize);
1505 verifyStreamCombination(cameraDevice, config, /*expectedStatus*/ true);
1506
1507 config.streamConfigCounter = streamConfigCounter++;
1508 std::vector<HalStream> halConfigs;
1509 ndk::ScopedAStatus ret = mSession->configureStreams(config, &halConfigs);
1510 ASSERT_TRUE(ret.isOk());
1511 ASSERT_EQ(2u, halConfigs.size());
1512 }
1513 }
1514
1515 ndk::ScopedAStatus ret = mSession->close();
1516 mSession = nullptr;
1517 ASSERT_TRUE(ret.isOk());
1518 }
1519 }
1520
1521 // Generate and verify a camera capture request
TEST_P(CameraAidlTest,processCaptureRequestPreview)1522 TEST_P(CameraAidlTest, processCaptureRequestPreview) {
1523 // TODO(b/220897574): Failing with BUFFER_ERROR
1524 processCaptureRequestInternal(GRALLOC1_CONSUMER_USAGE_HWCOMPOSER, RequestTemplate::PREVIEW,
1525 false /*secureOnlyCameras*/);
1526 }
1527
1528 // Generate and verify a secure camera capture request
TEST_P(CameraAidlTest,processSecureCaptureRequest)1529 TEST_P(CameraAidlTest, processSecureCaptureRequest) {
1530 processCaptureRequestInternal(GRALLOC1_PRODUCER_USAGE_PROTECTED, RequestTemplate::STILL_CAPTURE,
1531 true /*secureOnlyCameras*/);
1532 }
1533
TEST_P(CameraAidlTest,processCaptureRequestPreviewStabilization)1534 TEST_P(CameraAidlTest, processCaptureRequestPreviewStabilization) {
1535 std::unordered_map<std::string, nsecs_t> cameraDeviceToTimeLag;
1536 processPreviewStabilizationCaptureRequestInternal(/*previewStabilizationOn*/ false,
1537 cameraDeviceToTimeLag);
1538 processPreviewStabilizationCaptureRequestInternal(/*previewStabilizationOn*/ true,
1539 cameraDeviceToTimeLag);
1540 }
1541
1542 // Generate and verify a multi-camera capture request
TEST_P(CameraAidlTest,processMultiCaptureRequestPreview)1543 TEST_P(CameraAidlTest, processMultiCaptureRequestPreview) {
1544 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
1545 AvailableStream previewThreshold = {kMaxPreviewWidth, kMaxPreviewHeight,
1546 static_cast<int32_t>(PixelFormat::YCBCR_420_888)};
1547 int64_t bufferId = 1;
1548 uint32_t frameNumber = 1;
1549 std::vector<uint8_t> settings;
1550 std::vector<uint8_t> emptySettings;
1551 std::string invalidPhysicalId = "-1";
1552
1553 for (const auto& name : cameraDeviceNames) {
1554 std::string version, deviceId;
1555 ALOGI("processMultiCaptureRequestPreview: Test device %s", name.c_str());
1556 ASSERT_TRUE(matchDeviceName(name, mProviderType, &version, &deviceId));
1557 CameraMetadata metadata;
1558
1559 std::shared_ptr<ICameraDevice> unusedDevice;
1560 openEmptyDeviceSession(name, mProvider, &mSession /*out*/, &metadata /*out*/,
1561 &unusedDevice /*out*/);
1562
1563 camera_metadata_t* staticMeta =
1564 reinterpret_cast<camera_metadata_t*>(metadata.metadata.data());
1565 Status rc = isLogicalMultiCamera(staticMeta);
1566 if (Status::OPERATION_NOT_SUPPORTED == rc) {
1567 ndk::ScopedAStatus ret = mSession->close();
1568 mSession = nullptr;
1569 ASSERT_TRUE(ret.isOk());
1570 continue;
1571 }
1572 ASSERT_EQ(Status::OK, rc);
1573
1574 std::unordered_set<std::string> physicalIds;
1575 rc = getPhysicalCameraIds(staticMeta, &physicalIds);
1576 ASSERT_TRUE(Status::OK == rc);
1577 ASSERT_TRUE(physicalIds.size() > 1);
1578
1579 std::unordered_set<int32_t> physicalRequestKeyIDs;
1580 rc = getSupportedKeys(staticMeta, ANDROID_REQUEST_AVAILABLE_PHYSICAL_CAMERA_REQUEST_KEYS,
1581 &physicalRequestKeyIDs);
1582 ASSERT_TRUE(Status::OK == rc);
1583 if (physicalRequestKeyIDs.empty()) {
1584 ndk::ScopedAStatus ret = mSession->close();
1585 mSession = nullptr;
1586 ASSERT_TRUE(ret.isOk());
1587 // The logical camera doesn't support any individual physical requests.
1588 continue;
1589 }
1590
1591 android::hardware::camera::common::V1_0::helper::CameraMetadata defaultPreviewSettings;
1592 android::hardware::camera::common::V1_0::helper::CameraMetadata filteredSettings;
1593 constructFilteredSettings(mSession, physicalRequestKeyIDs, RequestTemplate::PREVIEW,
1594 &defaultPreviewSettings, &filteredSettings);
1595 if (filteredSettings.isEmpty()) {
1596 // No physical device settings in default request.
1597 ndk::ScopedAStatus ret = mSession->close();
1598 mSession = nullptr;
1599 ASSERT_TRUE(ret.isOk());
1600 continue;
1601 }
1602
1603 const camera_metadata_t* settingsBuffer = defaultPreviewSettings.getAndLock();
1604 uint8_t* rawSettingsBuffer = (uint8_t*)settingsBuffer;
1605 settings.assign(rawSettingsBuffer,
1606 rawSettingsBuffer + get_camera_metadata_size(settingsBuffer));
1607 CameraMetadata settingsMetadata = {settings};
1608 overrideRotateAndCrop(&settingsMetadata);
1609
1610 ndk::ScopedAStatus ret = mSession->close();
1611 mSession = nullptr;
1612 ASSERT_TRUE(ret.isOk());
1613
1614 // Leave only 2 physical devices in the id set.
1615 auto it = physicalIds.begin();
1616 std::string physicalDeviceId = *it;
1617 it++;
1618 physicalIds.erase(++it, physicalIds.end());
1619 ASSERT_EQ(physicalIds.size(), 2u);
1620
1621 std::vector<HalStream> halStreams;
1622 bool supportsPartialResults = false;
1623 std::set<int32_t> halBufManagedStreamIds;
1624 int32_t partialResultCount = 0;
1625 Stream previewStream;
1626 std::shared_ptr<DeviceCb> cb;
1627
1628 configurePreviewStreams(
1629 name, mProvider, &previewThreshold, physicalIds, &mSession, &previewStream,
1630 &halStreams /*out*/, &supportsPartialResults /*out*/, &partialResultCount /*out*/,
1631 &halBufManagedStreamIds /*out*/, &cb /*out*/, 0 /*streamConfigCounter*/, true);
1632 if (mSession == nullptr) {
1633 // stream combination not supported by HAL, skip test for device
1634 continue;
1635 }
1636
1637 ::aidl::android::hardware::common::fmq::MQDescriptor<
1638 int8_t, aidl::android::hardware::common::fmq::SynchronizedReadWrite>
1639 descriptor;
1640 auto resultQueueRet = mSession->getCaptureResultMetadataQueue(&descriptor);
1641 ASSERT_TRUE(resultQueueRet.isOk());
1642 std::shared_ptr<ResultMetadataQueue> resultQueue =
1643 std::make_shared<ResultMetadataQueue>(descriptor);
1644 if (!resultQueue->isValid() || resultQueue->availableToWrite() <= 0) {
1645 ALOGE("%s: HAL returns empty result metadata fmq, not use it", __func__);
1646 resultQueue = nullptr;
1647 // Don't use the queue onwards.
1648 }
1649
1650 std::shared_ptr<InFlightRequest> inflightReq = std::make_shared<InFlightRequest>(
1651 static_cast<ssize_t>(halStreams.size()), false, supportsPartialResults,
1652 partialResultCount, physicalIds, resultQueue);
1653
1654 std::vector<CaptureRequest> requests(1);
1655 CaptureRequest& request = requests[0];
1656 request.frameNumber = frameNumber;
1657 request.fmqSettingsSize = 0;
1658 request.settings = settingsMetadata;
1659
1660 std::vector<StreamBuffer>& outputBuffers = request.outputBuffers;
1661
1662 std::vector<buffer_handle_t> graphicBuffers;
1663 graphicBuffers.reserve(halStreams.size());
1664 outputBuffers.resize(halStreams.size());
1665 size_t k = 0;
1666 for (const auto& halStream : halStreams) {
1667 buffer_handle_t buffer_handle;
1668 bool useHalBufManagerForStream =
1669 halBufManagedStreamIds.find(halStream.id) != halBufManagedStreamIds.end();
1670 if (useHalBufManagerForStream) {
1671 outputBuffers[k] = {halStream.id, /*bufferId*/ 0, NativeHandle(),
1672 BufferStatus::OK, NativeHandle(), NativeHandle()};
1673 } else {
1674 allocateGraphicBuffer(previewStream.width, previewStream.height,
1675 ANDROID_NATIVE_UNSIGNED_CAST(android_convertGralloc1To0Usage(
1676 static_cast<uint64_t>(halStream.producerUsage),
1677 static_cast<uint64_t>(halStream.consumerUsage))),
1678 halStream.overrideFormat, &buffer_handle);
1679 graphicBuffers.push_back(buffer_handle);
1680 outputBuffers[k] = {
1681 halStream.id, bufferId, ::android::makeToAidl(buffer_handle),
1682 BufferStatus::OK, NativeHandle(), NativeHandle()};
1683 bufferId++;
1684 }
1685 k++;
1686 }
1687
1688 std::vector<PhysicalCameraSetting> camSettings(1);
1689 const camera_metadata_t* filteredSettingsBuffer = filteredSettings.getAndLock();
1690 uint8_t* rawFilteredSettingsBuffer = (uint8_t*)filteredSettingsBuffer;
1691 camSettings[0].settings = {std::vector(
1692 rawFilteredSettingsBuffer,
1693 rawFilteredSettingsBuffer + get_camera_metadata_size(filteredSettingsBuffer))};
1694 overrideRotateAndCrop(&camSettings[0].settings);
1695 camSettings[0].fmqSettingsSize = 0;
1696 camSettings[0].physicalCameraId = physicalDeviceId;
1697
1698 request.inputBuffer = {
1699 -1, 0, NativeHandle(), BufferStatus::ERROR, NativeHandle(), NativeHandle()};
1700 request.physicalCameraSettings = camSettings;
1701
1702 {
1703 std::unique_lock<std::mutex> l(mLock);
1704 mInflightMap.clear();
1705 mInflightMap[frameNumber] = inflightReq;
1706 }
1707
1708 int32_t numRequestProcessed = 0;
1709 std::vector<BufferCache> cachesToRemove;
1710 ndk::ScopedAStatus returnStatus =
1711 mSession->processCaptureRequest(requests, cachesToRemove, &numRequestProcessed);
1712 ASSERT_TRUE(returnStatus.isOk());
1713 ASSERT_EQ(numRequestProcessed, 1u);
1714
1715 {
1716 std::unique_lock<std::mutex> l(mLock);
1717 while (!inflightReq->errorCodeValid &&
1718 ((0 < inflightReq->numBuffersLeft) || (!inflightReq->haveResultMetadata))) {
1719 auto timeout = std::chrono::system_clock::now() +
1720 std::chrono::seconds(kStreamBufferTimeoutSec);
1721 ASSERT_NE(std::cv_status::timeout, mResultCondition.wait_until(l, timeout));
1722 }
1723
1724 ASSERT_FALSE(inflightReq->errorCodeValid);
1725 ASSERT_NE(inflightReq->resultOutputBuffers.size(), 0u);
1726
1727 request.frameNumber++;
1728 // Empty settings should be supported after the first call
1729 // for repeating requests.
1730 request.settings.metadata.clear();
1731 request.physicalCameraSettings[0].settings.metadata.clear();
1732 // The buffer has been registered to HAL by bufferId, so per
1733 // API contract we should send a null handle for this buffer
1734 request.outputBuffers[0].buffer = NativeHandle();
1735 mInflightMap.clear();
1736 inflightReq = std::make_shared<InFlightRequest>(
1737 static_cast<ssize_t>(physicalIds.size()), false, supportsPartialResults,
1738 partialResultCount, physicalIds, resultQueue);
1739 mInflightMap[request.frameNumber] = inflightReq;
1740 }
1741
1742 returnStatus =
1743 mSession->processCaptureRequest(requests, cachesToRemove, &numRequestProcessed);
1744 ASSERT_TRUE(returnStatus.isOk());
1745 ASSERT_EQ(numRequestProcessed, 1u);
1746
1747 {
1748 std::unique_lock<std::mutex> l(mLock);
1749 while (!inflightReq->errorCodeValid &&
1750 ((0 < inflightReq->numBuffersLeft) || (!inflightReq->haveResultMetadata))) {
1751 auto timeout = std::chrono::system_clock::now() +
1752 std::chrono::seconds(kStreamBufferTimeoutSec);
1753 ASSERT_NE(std::cv_status::timeout, mResultCondition.wait_until(l, timeout));
1754 }
1755
1756 ASSERT_FALSE(inflightReq->errorCodeValid);
1757 ASSERT_NE(inflightReq->resultOutputBuffers.size(), 0u);
1758 }
1759
1760 // Invalid physical camera id should fail process requests
1761 frameNumber++;
1762 camSettings[0].physicalCameraId = invalidPhysicalId;
1763 camSettings[0].settings.metadata = settings;
1764
1765 request.physicalCameraSettings = camSettings; // Invalid camera settings
1766 returnStatus =
1767 mSession->processCaptureRequest(requests, cachesToRemove, &numRequestProcessed);
1768 ASSERT_EQ(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT),
1769 returnStatus.getServiceSpecificError());
1770
1771 defaultPreviewSettings.unlock(settingsBuffer);
1772 filteredSettings.unlock(filteredSettingsBuffer);
1773
1774 if (halBufManagedStreamIds.size() != 0) {
1775 std::vector<int32_t> streamIds;
1776 for (size_t i = 0; i < halStreams.size(); i++) {
1777 int32_t streamId = halStreams[i].id;
1778 if (halBufManagedStreamIds.find(streamId) != halBufManagedStreamIds.end()) {
1779 streamIds.emplace_back(streamId);
1780 }
1781 }
1782 verifyBuffersReturned(mSession, streamIds, cb);
1783 }
1784
1785 ret = mSession->close();
1786 mSession = nullptr;
1787 ASSERT_TRUE(ret.isOk());
1788 }
1789 }
1790
1791 // Generate and verify an ultra high resolution capture request
TEST_P(CameraAidlTest,processUltraHighResolutionRequest)1792 TEST_P(CameraAidlTest, processUltraHighResolutionRequest) {
1793 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
1794 int64_t bufferId = 1;
1795 int32_t frameNumber = 1;
1796 CameraMetadata settings;
1797
1798 for (const auto& name : cameraDeviceNames) {
1799 std::string version, deviceId;
1800 ASSERT_TRUE(matchDeviceName(name, mProviderType, &version, &deviceId));
1801 CameraMetadata meta;
1802
1803 std::shared_ptr<ICameraDevice> unusedDevice;
1804 openEmptyDeviceSession(name, mProvider, &mSession, &meta, &unusedDevice);
1805 camera_metadata_t* staticMeta = reinterpret_cast<camera_metadata_t*>(meta.metadata.data());
1806 if (!isUltraHighResolution(staticMeta)) {
1807 ndk::ScopedAStatus ret = mSession->close();
1808 mSession = nullptr;
1809 ASSERT_TRUE(ret.isOk());
1810 continue;
1811 }
1812 CameraMetadata req;
1813 android::hardware::camera::common::V1_0::helper::CameraMetadata defaultSettings;
1814 ndk::ScopedAStatus ret =
1815 mSession->constructDefaultRequestSettings(RequestTemplate::STILL_CAPTURE, &req);
1816 ASSERT_TRUE(ret.isOk());
1817
1818 const camera_metadata_t* metadata =
1819 reinterpret_cast<const camera_metadata_t*>(req.metadata.data());
1820 size_t expectedSize = req.metadata.size();
1821 int result = validate_camera_metadata_structure(metadata, &expectedSize);
1822 ASSERT_TRUE((result == 0) || (result == CAMERA_METADATA_VALIDATION_SHIFTED));
1823
1824 size_t entryCount = get_camera_metadata_entry_count(metadata);
1825 ASSERT_GT(entryCount, 0u);
1826 defaultSettings = metadata;
1827 uint8_t sensorPixelMode =
1828 static_cast<uint8_t>(ANDROID_SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION);
1829 ASSERT_EQ(::android::OK,
1830 defaultSettings.update(ANDROID_SENSOR_PIXEL_MODE, &sensorPixelMode, 1));
1831
1832 const camera_metadata_t* settingsBuffer = defaultSettings.getAndLock();
1833 uint8_t* rawSettingsBuffer = (uint8_t*)settingsBuffer;
1834 settings.metadata = std::vector(
1835 rawSettingsBuffer, rawSettingsBuffer + get_camera_metadata_size(settingsBuffer));
1836 overrideRotateAndCrop(&settings);
1837
1838 ret = mSession->close();
1839 mSession = nullptr;
1840 ASSERT_TRUE(ret.isOk());
1841
1842 std::vector<HalStream> halStreams;
1843 bool supportsPartialResults = false;
1844 std::set<int32_t> halBufManagedStreamIds;
1845 int32_t partialResultCount = 0;
1846 Stream previewStream;
1847 std::shared_ptr<DeviceCb> cb;
1848
1849 std::list<PixelFormat> pixelFormats = {PixelFormat::YCBCR_420_888, PixelFormat::RAW16};
1850 for (PixelFormat format : pixelFormats) {
1851 previewStream.usage =
1852 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
1853 GRALLOC1_CONSUMER_USAGE_CPU_READ);
1854 previewStream.dataSpace = Dataspace::UNKNOWN;
1855 configureStreams(name, mProvider, format, &mSession, &previewStream, &halStreams,
1856 &supportsPartialResults, &partialResultCount, &halBufManagedStreamIds,
1857 &cb, 0, /*maxResolution*/ true);
1858 ASSERT_NE(mSession, nullptr);
1859
1860 ::aidl::android::hardware::common::fmq::MQDescriptor<
1861 int8_t, aidl::android::hardware::common::fmq::SynchronizedReadWrite>
1862 descriptor;
1863 auto resultQueueRet = mSession->getCaptureResultMetadataQueue(&descriptor);
1864 ASSERT_TRUE(resultQueueRet.isOk());
1865
1866 std::shared_ptr<ResultMetadataQueue> resultQueue =
1867 std::make_shared<ResultMetadataQueue>(descriptor);
1868 if (!resultQueue->isValid() || resultQueue->availableToWrite() <= 0) {
1869 ALOGE("%s: HAL returns empty result metadata fmq, not use it", __func__);
1870 resultQueue = nullptr;
1871 // Don't use the queue onwards.
1872 }
1873
1874 std::vector<buffer_handle_t> graphicBuffers;
1875 graphicBuffers.reserve(halStreams.size());
1876 std::shared_ptr<InFlightRequest> inflightReq = std::make_shared<InFlightRequest>(
1877 static_cast<ssize_t>(halStreams.size()), false, supportsPartialResults,
1878 partialResultCount, std::unordered_set<std::string>(), resultQueue);
1879
1880 std::vector<CaptureRequest> requests(1);
1881 CaptureRequest& request = requests[0];
1882 std::vector<StreamBuffer>& outputBuffers = request.outputBuffers;
1883 outputBuffers.resize(halStreams.size());
1884
1885 size_t k = 0;
1886 for (const auto& halStream : halStreams) {
1887 buffer_handle_t buffer_handle;
1888 bool halBufManagerUsed =
1889 halBufManagedStreamIds.find(halStream.id) != halBufManagedStreamIds.end();
1890 if (halBufManagerUsed) {
1891 outputBuffers[k] = {halStream.id, 0,
1892 NativeHandle(), BufferStatus::OK,
1893 NativeHandle(), NativeHandle()};
1894 } else {
1895 allocateGraphicBuffer(previewStream.width, previewStream.height,
1896 ANDROID_NATIVE_UNSIGNED_CAST(android_convertGralloc1To0Usage(
1897 static_cast<uint64_t>(halStream.producerUsage),
1898 static_cast<uint64_t>(halStream.consumerUsage))),
1899 halStream.overrideFormat, &buffer_handle);
1900 graphicBuffers.push_back(buffer_handle);
1901 outputBuffers[k] = {
1902 halStream.id, bufferId, ::android::makeToAidl(buffer_handle),
1903 BufferStatus::OK, NativeHandle(), NativeHandle()};
1904 bufferId++;
1905 }
1906 k++;
1907 }
1908
1909 request.inputBuffer = {
1910 -1, 0, NativeHandle(), BufferStatus::ERROR, NativeHandle(), NativeHandle()};
1911 request.frameNumber = frameNumber;
1912 request.fmqSettingsSize = 0;
1913 request.settings = settings;
1914 request.inputWidth = 0;
1915 request.inputHeight = 0;
1916
1917 {
1918 std::unique_lock<std::mutex> l(mLock);
1919 mInflightMap.clear();
1920 mInflightMap[frameNumber] = inflightReq;
1921 }
1922
1923 int32_t numRequestProcessed = 0;
1924 std::vector<BufferCache> cachesToRemove;
1925 ndk::ScopedAStatus returnStatus =
1926 mSession->processCaptureRequest(requests, cachesToRemove, &numRequestProcessed);
1927 ASSERT_TRUE(returnStatus.isOk());
1928 ASSERT_EQ(numRequestProcessed, 1u);
1929
1930 {
1931 std::unique_lock<std::mutex> l(mLock);
1932 while (!inflightReq->errorCodeValid &&
1933 ((0 < inflightReq->numBuffersLeft) || (!inflightReq->haveResultMetadata))) {
1934 auto timeout = std::chrono::system_clock::now() +
1935 std::chrono::seconds(kStreamBufferTimeoutSec);
1936 ASSERT_NE(std::cv_status::timeout, mResultCondition.wait_until(l, timeout));
1937 }
1938
1939 ASSERT_FALSE(inflightReq->errorCodeValid);
1940 ASSERT_NE(inflightReq->resultOutputBuffers.size(), 0u);
1941 }
1942 if (halBufManagedStreamIds.size()) {
1943 std::vector<int32_t> streamIds;
1944 for (size_t i = 0; i < halStreams.size(); i++) {
1945 if (contains(halBufManagedStreamIds, halStreams[i].id)) {
1946 streamIds.emplace_back(halStreams[i].id);
1947 }
1948 }
1949 verifyBuffersReturned(mSession, streamIds, cb);
1950 }
1951
1952 ret = mSession->close();
1953 mSession = nullptr;
1954 ASSERT_TRUE(ret.isOk());
1955 }
1956 }
1957 }
1958
1959 // Generate and verify 10-bit dynamic range request
TEST_P(CameraAidlTest,process10BitDynamicRangeRequest)1960 TEST_P(CameraAidlTest, process10BitDynamicRangeRequest) {
1961 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
1962 CameraMetadata settings;
1963
1964 for (const auto& name : cameraDeviceNames) {
1965 std::string version, deviceId;
1966 ASSERT_TRUE(matchDeviceName(name, mProviderType, &version, &deviceId));
1967 CameraMetadata meta;
1968 std::shared_ptr<ICameraDevice> device;
1969 openEmptyDeviceSession(name, mProvider, &mSession, &meta, &device);
1970 camera_metadata_t* staticMeta = reinterpret_cast<camera_metadata_t*>(meta.metadata.data());
1971 if (!is10BitDynamicRangeCapable(staticMeta)) {
1972 ndk::ScopedAStatus ret = mSession->close();
1973 mSession = nullptr;
1974 ASSERT_TRUE(ret.isOk());
1975 continue;
1976 }
1977 std::vector<RequestAvailableDynamicRangeProfilesMap> profileList;
1978 get10BitDynamicRangeProfiles(staticMeta, &profileList);
1979 ASSERT_FALSE(profileList.empty());
1980
1981 CameraMetadata req;
1982 android::hardware::camera::common::V1_0::helper::CameraMetadata defaultSettings;
1983 ndk::ScopedAStatus ret =
1984 mSession->constructDefaultRequestSettings(RequestTemplate::PREVIEW, &req);
1985 ASSERT_TRUE(ret.isOk());
1986
1987 const camera_metadata_t* metadata =
1988 reinterpret_cast<const camera_metadata_t*>(req.metadata.data());
1989 size_t expectedSize = req.metadata.size();
1990 int result = validate_camera_metadata_structure(metadata, &expectedSize);
1991 ASSERT_TRUE((result == 0) || (result == CAMERA_METADATA_VALIDATION_SHIFTED));
1992
1993 size_t entryCount = get_camera_metadata_entry_count(metadata);
1994 ASSERT_GT(entryCount, 0u);
1995 defaultSettings = metadata;
1996
1997 const camera_metadata_t* settingsBuffer = defaultSettings.getAndLock();
1998 uint8_t* rawSettingsBuffer = (uint8_t*)settingsBuffer;
1999 settings.metadata = std::vector(
2000 rawSettingsBuffer, rawSettingsBuffer + get_camera_metadata_size(settingsBuffer));
2001 overrideRotateAndCrop(&settings);
2002
2003 ret = mSession->close();
2004 mSession = nullptr;
2005 ASSERT_TRUE(ret.isOk());
2006
2007 std::vector<HalStream> halStreams;
2008 bool supportsPartialResults = false;
2009 std::set<int32_t> halBufManagedStreamIds;
2010 int32_t partialResultCount = 0;
2011 Stream previewStream;
2012 std::shared_ptr<DeviceCb> cb;
2013 for (const auto& profile : profileList) {
2014 previewStream.usage =
2015 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
2016 GRALLOC1_CONSUMER_USAGE_HWCOMPOSER);
2017 previewStream.dataSpace = getDataspace(PixelFormat::IMPLEMENTATION_DEFINED);
2018 configureStreams(name, mProvider, PixelFormat::IMPLEMENTATION_DEFINED, &mSession,
2019 &previewStream, &halStreams, &supportsPartialResults,
2020 &partialResultCount, &halBufManagedStreamIds, &cb, 0,
2021 /*maxResolution*/ false, profile);
2022 ASSERT_NE(mSession, nullptr);
2023
2024 ::aidl::android::hardware::common::fmq::MQDescriptor<
2025 int8_t, aidl::android::hardware::common::fmq::SynchronizedReadWrite>
2026 descriptor;
2027 auto resultQueueRet = mSession->getCaptureResultMetadataQueue(&descriptor);
2028 ASSERT_TRUE(resultQueueRet.isOk());
2029
2030 std::shared_ptr<ResultMetadataQueue> resultQueue =
2031 std::make_shared<ResultMetadataQueue>(descriptor);
2032 if (!resultQueue->isValid() || resultQueue->availableToWrite() <= 0) {
2033 ALOGE("%s: HAL returns empty result metadata fmq, not use it", __func__);
2034 resultQueue = nullptr;
2035 // Don't use the queue onwards.
2036 }
2037
2038 mInflightMap.clear();
2039 // Stream as long as needed to fill the Hal inflight queue
2040 std::vector<CaptureRequest> requests(halStreams[0].maxBuffers);
2041
2042 for (int32_t requestId = 0; requestId < requests.size(); requestId++) {
2043 std::shared_ptr<InFlightRequest> inflightReq = std::make_shared<InFlightRequest>(
2044 static_cast<ssize_t>(halStreams.size()), false, supportsPartialResults,
2045 partialResultCount, std::unordered_set<std::string>(), resultQueue);
2046
2047 CaptureRequest& request = requests[requestId];
2048 std::vector<StreamBuffer>& outputBuffers = request.outputBuffers;
2049 outputBuffers.resize(halStreams.size());
2050
2051 size_t k = 0;
2052 inflightReq->mOutstandingBufferIds.resize(halStreams.size());
2053 std::vector<buffer_handle_t> graphicBuffers;
2054 graphicBuffers.reserve(halStreams.size());
2055
2056 auto bufferId = requestId + 1; // Buffer id value 0 is not valid
2057 for (const auto& halStream : halStreams) {
2058 buffer_handle_t buffer_handle;
2059 if (contains(halBufManagedStreamIds, halStream.id)) {
2060 outputBuffers[k] = {halStream.id, 0,
2061 NativeHandle(), BufferStatus::OK,
2062 NativeHandle(), NativeHandle()};
2063 } else {
2064 auto usage = ANDROID_NATIVE_UNSIGNED_CAST(android_convertGralloc1To0Usage(
2065 static_cast<uint64_t>(halStream.producerUsage),
2066 static_cast<uint64_t>(halStream.consumerUsage)));
2067 allocateGraphicBuffer(previewStream.width, previewStream.height, usage,
2068 halStream.overrideFormat, &buffer_handle);
2069
2070 inflightReq->mOutstandingBufferIds[halStream.id][bufferId] = buffer_handle;
2071 graphicBuffers.push_back(buffer_handle);
2072 outputBuffers[k] = {halStream.id, bufferId,
2073 android::makeToAidl(buffer_handle), BufferStatus::OK, NativeHandle(),
2074 NativeHandle()};
2075 }
2076 k++;
2077 }
2078
2079 request.inputBuffer = {
2080 -1, 0, NativeHandle(), BufferStatus::ERROR, NativeHandle(), NativeHandle()};
2081 request.frameNumber = bufferId;
2082 request.fmqSettingsSize = 0;
2083 request.settings = settings;
2084 request.inputWidth = 0;
2085 request.inputHeight = 0;
2086
2087 {
2088 std::unique_lock<std::mutex> l(mLock);
2089 mInflightMap[bufferId] = inflightReq;
2090 }
2091
2092 }
2093
2094 int32_t numRequestProcessed = 0;
2095 std::vector<BufferCache> cachesToRemove;
2096 ndk::ScopedAStatus returnStatus =
2097 mSession->processCaptureRequest(requests, cachesToRemove, &numRequestProcessed);
2098 ASSERT_TRUE(returnStatus.isOk());
2099 ASSERT_EQ(numRequestProcessed, requests.size());
2100
2101 returnStatus = mSession->repeatingRequestEnd(requests.size() - 1,
2102 std::vector<int32_t> {halStreams[0].id});
2103 ASSERT_TRUE(returnStatus.isOk());
2104
2105 // We are keeping frame numbers and buffer ids consistent. Buffer id value of 0
2106 // is used to indicate a buffer that is not present/available so buffer ids as well
2107 // as frame numbers begin with 1.
2108 for (int32_t frameNumber = 1; frameNumber <= requests.size(); frameNumber++) {
2109 const auto& inflightReq = mInflightMap[frameNumber];
2110 std::unique_lock<std::mutex> l(mLock);
2111 while (!inflightReq->errorCodeValid &&
2112 ((0 < inflightReq->numBuffersLeft) || (!inflightReq->haveResultMetadata))) {
2113 auto timeout = std::chrono::system_clock::now() +
2114 std::chrono::seconds(kStreamBufferTimeoutSec);
2115 ASSERT_NE(std::cv_status::timeout, mResultCondition.wait_until(l, timeout));
2116 }
2117
2118 waitForReleaseFence(inflightReq->resultOutputBuffers);
2119
2120 ASSERT_FALSE(inflightReq->errorCodeValid);
2121 ASSERT_NE(inflightReq->resultOutputBuffers.size(), 0u);
2122 verify10BitMetadata(mHandleImporter, *inflightReq, profile);
2123 }
2124
2125 if (halBufManagedStreamIds.size() != 0) {
2126 std::vector<int32_t> streamIds;
2127 for (size_t i = 0; i < halStreams.size(); i++) {
2128 if (contains(halBufManagedStreamIds, halStreams[i].id)) {
2129 streamIds.emplace_back(halStreams[i].id);
2130 }
2131 }
2132 mSession->signalStreamFlush(streamIds, /*streamConfigCounter*/ 0);
2133 cb->waitForBuffersReturned();
2134 }
2135
2136 ret = mSession->close();
2137 mSession = nullptr;
2138 ASSERT_TRUE(ret.isOk());
2139 }
2140 }
2141 }
2142
TEST_P(CameraAidlTest,process8BitColorSpaceRequests)2143 TEST_P(CameraAidlTest, process8BitColorSpaceRequests) {
2144 static int profiles[] = {ColorSpaceNamed::DISPLAY_P3, ColorSpaceNamed::SRGB};
2145
2146 for (int32_t i = 0; i < sizeof(profiles) / sizeof(profiles[0]); i++) {
2147 processColorSpaceRequest(static_cast<RequestAvailableColorSpaceProfilesMap>(profiles[i]),
2148 static_cast<RequestAvailableDynamicRangeProfilesMap>(
2149 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD));
2150 }
2151 }
2152
TEST_P(CameraAidlTest,process10BitColorSpaceRequests)2153 TEST_P(CameraAidlTest, process10BitColorSpaceRequests) {
2154 static const camera_metadata_enum_android_request_available_dynamic_range_profiles_map
2155 dynamicRangeProfiles[] = {
2156 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_HLG10,
2157 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_HDR10,
2158 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_HDR10_PLUS,
2159 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_10B_HDR_REF,
2160 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_10B_HDR_REF_PO,
2161 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_10B_HDR_OEM,
2162 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_10B_HDR_OEM_PO,
2163 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_8B_HDR_REF,
2164 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_8B_HDR_REF_PO,
2165 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_8B_HDR_OEM,
2166 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_8B_HDR_OEM_PO
2167 };
2168
2169 // Process all dynamic range profiles with BT2020_HLG
2170 for (int32_t i = 0; i < sizeof(dynamicRangeProfiles) / sizeof(dynamicRangeProfiles[0]); i++) {
2171 processColorSpaceRequest(
2172 static_cast<RequestAvailableColorSpaceProfilesMap>(ColorSpaceNamed::BT2020_HLG),
2173 static_cast<RequestAvailableDynamicRangeProfilesMap>(dynamicRangeProfiles[i]));
2174 }
2175 }
2176
TEST_P(CameraAidlTest,processZoomSettingsOverrideRequests)2177 TEST_P(CameraAidlTest, processZoomSettingsOverrideRequests) {
2178 const int32_t kFrameCount = 5;
2179 const int32_t kTestCases = 2;
2180 const bool kOverrideSequence[kTestCases][kFrameCount] = {// ZOOM, ZOOM, ZOOM, ZOOM, ZOOM;
2181 {true, true, true, true, true},
2182 // OFF, ZOOM, ZOOM, ZOOM, OFF;
2183 {false, true, true, true, false}};
2184 const bool kExpectedOverrideResults[kTestCases][kFrameCount] = {
2185 // All resuls should be overridden except the last one. The last result's
2186 // zoom doesn't have speed-up.
2187 {true, true, true, true, false},
2188 // Because we require at least 1 frame speed-up, request #1, #2 and #3
2189 // will be overridden.
2190 {true, true, true, false, false}};
2191
2192 for (int i = 0; i < kTestCases; i++) {
2193 processZoomSettingsOverrideRequests(kFrameCount, kOverrideSequence[i],
2194 kExpectedOverrideResults[i]);
2195 }
2196 }
2197
2198 // Generate and verify a burst containing alternating sensor sensitivity values
TEST_P(CameraAidlTest,processCaptureRequestBurstISO)2199 TEST_P(CameraAidlTest, processCaptureRequestBurstISO) {
2200 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
2201 AvailableStream previewThreshold = {kMaxPreviewWidth, kMaxPreviewHeight,
2202 static_cast<int32_t>(PixelFormat::IMPLEMENTATION_DEFINED)};
2203 int64_t bufferId = 1;
2204 int32_t frameNumber = 1;
2205 float isoTol = .03f;
2206 CameraMetadata settings;
2207
2208 for (const auto& name : cameraDeviceNames) {
2209 CameraMetadata meta;
2210 settings.metadata.clear();
2211 std::shared_ptr<ICameraDevice> unusedDevice;
2212 openEmptyDeviceSession(name, mProvider, &mSession /*out*/, &meta /*out*/,
2213 &unusedDevice /*out*/);
2214 camera_metadata_t* staticMetaBuffer =
2215 clone_camera_metadata(reinterpret_cast<camera_metadata_t*>(meta.metadata.data()));
2216 ::android::hardware::camera::common::V1_0::helper::CameraMetadata staticMeta(
2217 staticMetaBuffer);
2218
2219 camera_metadata_entry_t hwLevel = staticMeta.find(ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL);
2220 ASSERT_TRUE(0 < hwLevel.count);
2221 if (ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED == hwLevel.data.u8[0] ||
2222 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_EXTERNAL == hwLevel.data.u8[0]) {
2223 // Limited/External devices can skip this test
2224 ndk::ScopedAStatus ret = mSession->close();
2225 mSession = nullptr;
2226 ASSERT_TRUE(ret.isOk());
2227 continue;
2228 }
2229
2230 camera_metadata_entry_t isoRange = staticMeta.find(ANDROID_SENSOR_INFO_SENSITIVITY_RANGE);
2231 ASSERT_EQ(isoRange.count, 2u);
2232
2233 ndk::ScopedAStatus ret = mSession->close();
2234 mSession = nullptr;
2235 ASSERT_TRUE(ret.isOk());
2236
2237 bool supportsPartialResults = false;
2238 bool useHalBufManager = false;
2239 int32_t partialResultCount = 0;
2240 Stream previewStream;
2241 std::vector<HalStream> halStreams;
2242 std::shared_ptr<DeviceCb> cb;
2243 configurePreviewStream(name, mProvider, &previewThreshold, &mSession /*out*/,
2244 &previewStream /*out*/, &halStreams /*out*/,
2245 &supportsPartialResults /*out*/, &partialResultCount /*out*/,
2246 &useHalBufManager /*out*/, &cb /*out*/);
2247
2248 ::aidl::android::hardware::common::fmq::MQDescriptor<
2249 int8_t, aidl::android::hardware::common::fmq::SynchronizedReadWrite>
2250 descriptor;
2251 auto resultQueueRet = mSession->getCaptureResultMetadataQueue(&descriptor);
2252 std::shared_ptr<ResultMetadataQueue> resultQueue =
2253 std::make_shared<ResultMetadataQueue>(descriptor);
2254 ASSERT_TRUE(resultQueueRet.isOk());
2255 if (!resultQueue->isValid() || resultQueue->availableToWrite() <= 0) {
2256 ALOGE("%s: HAL returns empty result metadata fmq, not use it", __func__);
2257 resultQueue = nullptr;
2258 // Don't use the queue onwards.
2259 }
2260
2261 ret = mSession->constructDefaultRequestSettings(RequestTemplate::PREVIEW, &settings);
2262 ASSERT_TRUE(ret.isOk());
2263
2264 ::android::hardware::camera::common::V1_0::helper::CameraMetadata requestMeta;
2265 std::vector<CaptureRequest> requests(kBurstFrameCount);
2266 std::vector<buffer_handle_t> buffers(kBurstFrameCount);
2267 std::vector<std::shared_ptr<InFlightRequest>> inflightReqs(kBurstFrameCount);
2268 std::vector<int32_t> isoValues(kBurstFrameCount);
2269 std::vector<CameraMetadata> requestSettings(kBurstFrameCount);
2270
2271 for (int32_t i = 0; i < kBurstFrameCount; i++) {
2272 std::unique_lock<std::mutex> l(mLock);
2273 CaptureRequest& request = requests[i];
2274 std::vector<StreamBuffer>& outputBuffers = request.outputBuffers;
2275 outputBuffers.resize(1);
2276 StreamBuffer& outputBuffer = outputBuffers[0];
2277
2278 isoValues[i] = ((i % 2) == 0) ? isoRange.data.i32[0] : isoRange.data.i32[1];
2279 if (useHalBufManager) {
2280 outputBuffer = {halStreams[0].id, 0,
2281 NativeHandle(), BufferStatus::OK,
2282 NativeHandle(), NativeHandle()};
2283 } else {
2284 allocateGraphicBuffer(previewStream.width, previewStream.height,
2285 ANDROID_NATIVE_UNSIGNED_CAST(android_convertGralloc1To0Usage(
2286 static_cast<uint64_t>(halStreams[0].producerUsage),
2287 static_cast<uint64_t>(halStreams[0].consumerUsage))),
2288 halStreams[0].overrideFormat, &buffers[i]);
2289 outputBuffer = {halStreams[0].id, bufferId + i, ::android::makeToAidl(buffers[i]),
2290 BufferStatus::OK, NativeHandle(), NativeHandle()};
2291 }
2292
2293 requestMeta.append(reinterpret_cast<camera_metadata_t*>(settings.metadata.data()));
2294
2295 // Disable all 3A routines
2296 uint8_t mode = static_cast<uint8_t>(ANDROID_CONTROL_MODE_OFF);
2297 ASSERT_EQ(::android::OK, requestMeta.update(ANDROID_CONTROL_MODE, &mode, 1));
2298 ASSERT_EQ(::android::OK,
2299 requestMeta.update(ANDROID_SENSOR_SENSITIVITY, &isoValues[i], 1));
2300 camera_metadata_t* metaBuffer = requestMeta.release();
2301 uint8_t* rawMetaBuffer = reinterpret_cast<uint8_t*>(metaBuffer);
2302 requestSettings[i].metadata = std::vector(
2303 rawMetaBuffer, rawMetaBuffer + get_camera_metadata_size(metaBuffer));
2304 overrideRotateAndCrop(&(requestSettings[i]));
2305
2306 request.frameNumber = frameNumber + i;
2307 request.fmqSettingsSize = 0;
2308 request.settings = requestSettings[i];
2309 request.inputBuffer = {
2310 -1, 0, NativeHandle(), BufferStatus::ERROR, NativeHandle(), NativeHandle()};
2311
2312 inflightReqs[i] = std::make_shared<InFlightRequest>(1, false, supportsPartialResults,
2313 partialResultCount, resultQueue);
2314 mInflightMap[frameNumber + i] = inflightReqs[i];
2315 }
2316
2317 int32_t numRequestProcessed = 0;
2318 std::vector<BufferCache> cachesToRemove;
2319
2320 ndk::ScopedAStatus returnStatus =
2321 mSession->processCaptureRequest(requests, cachesToRemove, &numRequestProcessed);
2322 ASSERT_TRUE(returnStatus.isOk());
2323 ASSERT_EQ(numRequestProcessed, kBurstFrameCount);
2324
2325 for (size_t i = 0; i < kBurstFrameCount; i++) {
2326 std::unique_lock<std::mutex> l(mLock);
2327 while (!inflightReqs[i]->errorCodeValid && ((0 < inflightReqs[i]->numBuffersLeft) ||
2328 (!inflightReqs[i]->haveResultMetadata))) {
2329 auto timeout = std::chrono::system_clock::now() +
2330 std::chrono::seconds(kStreamBufferTimeoutSec);
2331 ASSERT_NE(std::cv_status::timeout, mResultCondition.wait_until(l, timeout));
2332 }
2333
2334 ASSERT_FALSE(inflightReqs[i]->errorCodeValid);
2335 ASSERT_NE(inflightReqs[i]->resultOutputBuffers.size(), 0u);
2336 ASSERT_EQ(previewStream.id, inflightReqs[i]->resultOutputBuffers[0].buffer.streamId);
2337 ASSERT_FALSE(inflightReqs[i]->collectedResult.isEmpty());
2338 ASSERT_TRUE(inflightReqs[i]->collectedResult.exists(ANDROID_SENSOR_SENSITIVITY));
2339 camera_metadata_entry_t isoResult =
2340 inflightReqs[i]->collectedResult.find(ANDROID_SENSOR_SENSITIVITY);
2341 ASSERT_TRUE(std::abs(isoResult.data.i32[0] - isoValues[i]) <=
2342 std::round(isoValues[i] * isoTol));
2343 }
2344
2345 if (useHalBufManager) {
2346 verifyBuffersReturned(mSession, previewStream.id, cb);
2347 }
2348 ret = mSession->close();
2349 mSession = nullptr;
2350 ASSERT_TRUE(ret.isOk());
2351 }
2352 }
2353
2354 // Test whether an incorrect capture request with missing settings will
2355 // be reported correctly.
TEST_P(CameraAidlTest,processCaptureRequestInvalidSinglePreview)2356 TEST_P(CameraAidlTest, processCaptureRequestInvalidSinglePreview) {
2357 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
2358 std::vector<AvailableStream> outputPreviewStreams;
2359 AvailableStream previewThreshold = {kMaxPreviewWidth, kMaxPreviewHeight,
2360 static_cast<int32_t>(PixelFormat::IMPLEMENTATION_DEFINED)};
2361 int64_t bufferId = 1;
2362 int32_t frameNumber = 1;
2363 CameraMetadata settings;
2364
2365 for (const auto& name : cameraDeviceNames) {
2366 Stream previewStream;
2367 std::vector<HalStream> halStreams;
2368 std::shared_ptr<DeviceCb> cb;
2369 bool supportsPartialResults = false;
2370 bool useHalBufManager = false;
2371 int32_t partialResultCount = 0;
2372 configurePreviewStream(name, mProvider, &previewThreshold, &mSession /*out*/,
2373 &previewStream /*out*/, &halStreams /*out*/,
2374 &supportsPartialResults /*out*/, &partialResultCount /*out*/,
2375 &useHalBufManager /*out*/, &cb /*out*/);
2376 ASSERT_NE(mSession, nullptr);
2377 ASSERT_FALSE(halStreams.empty());
2378
2379 buffer_handle_t buffer_handle = nullptr;
2380
2381 if (useHalBufManager) {
2382 bufferId = 0;
2383 } else {
2384 allocateGraphicBuffer(previewStream.width, previewStream.height,
2385 ANDROID_NATIVE_UNSIGNED_CAST(android_convertGralloc1To0Usage(
2386 static_cast<uint64_t>(halStreams[0].producerUsage),
2387 static_cast<uint64_t>(halStreams[0].consumerUsage))),
2388 halStreams[0].overrideFormat, &buffer_handle);
2389 }
2390
2391 std::vector<CaptureRequest> requests(1);
2392 CaptureRequest& request = requests[0];
2393 std::vector<StreamBuffer>& outputBuffers = request.outputBuffers;
2394 outputBuffers.resize(1);
2395 StreamBuffer& outputBuffer = outputBuffers[0];
2396
2397 outputBuffer = {
2398 halStreams[0].id,
2399 bufferId,
2400 buffer_handle == nullptr ? NativeHandle() : ::android::makeToAidl(buffer_handle),
2401 BufferStatus::OK,
2402 NativeHandle(),
2403 NativeHandle()};
2404
2405 request.inputBuffer = {
2406 -1, 0, NativeHandle(), BufferStatus::ERROR, NativeHandle(), NativeHandle()};
2407 request.frameNumber = frameNumber;
2408 request.fmqSettingsSize = 0;
2409 request.settings = settings;
2410
2411 // Settings were not correctly initialized, we should fail here
2412 int32_t numRequestProcessed = 0;
2413 std::vector<BufferCache> cachesToRemove;
2414 ndk::ScopedAStatus ret =
2415 mSession->processCaptureRequest(requests, cachesToRemove, &numRequestProcessed);
2416 ASSERT_EQ(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT), ret.getServiceSpecificError());
2417 ASSERT_EQ(numRequestProcessed, 0u);
2418
2419 ret = mSession->close();
2420 mSession = nullptr;
2421 ASSERT_TRUE(ret.isOk());
2422 }
2423 }
2424
2425 // Verify camera offline session behavior
TEST_P(CameraAidlTest,switchToOffline)2426 TEST_P(CameraAidlTest, switchToOffline) {
2427 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
2428 AvailableStream threshold = {kMaxStillWidth, kMaxStillHeight,
2429 static_cast<int32_t>(PixelFormat::BLOB)};
2430 int64_t bufferId = 1;
2431 int32_t frameNumber = 1;
2432 CameraMetadata settings;
2433
2434 for (const auto& name : cameraDeviceNames) {
2435 CameraMetadata meta;
2436 {
2437 std::shared_ptr<ICameraDevice> unusedDevice;
2438 openEmptyDeviceSession(name, mProvider, &mSession /*out*/, &meta /*out*/,
2439 &unusedDevice);
2440 camera_metadata_t* staticMetaBuffer = clone_camera_metadata(
2441 reinterpret_cast<camera_metadata_t*>(meta.metadata.data()));
2442 ::android::hardware::camera::common::V1_0::helper::CameraMetadata staticMeta(
2443 staticMetaBuffer);
2444
2445 if (isOfflineSessionSupported(staticMetaBuffer) != Status::OK) {
2446 ndk::ScopedAStatus ret = mSession->close();
2447 mSession = nullptr;
2448 ASSERT_TRUE(ret.isOk());
2449 continue;
2450 }
2451 ndk::ScopedAStatus ret = mSession->close();
2452 mSession = nullptr;
2453 ASSERT_TRUE(ret.isOk());
2454 }
2455
2456 bool supportsPartialResults = false;
2457 int32_t partialResultCount = 0;
2458 Stream stream;
2459 std::vector<HalStream> halStreams;
2460 std::shared_ptr<DeviceCb> cb;
2461 int32_t jpegBufferSize;
2462 std::set<int32_t> halBufManagedStreamIds;
2463 configureOfflineStillStream(name, mProvider, &threshold, &mSession /*out*/, &stream /*out*/,
2464 &halStreams /*out*/, &supportsPartialResults /*out*/,
2465 &partialResultCount /*out*/, &cb /*out*/,
2466 &jpegBufferSize /*out*/, &halBufManagedStreamIds /*out*/);
2467
2468 auto ret = mSession->constructDefaultRequestSettings(RequestTemplate::STILL_CAPTURE,
2469 &settings);
2470 ASSERT_TRUE(ret.isOk());
2471
2472 ::aidl::android::hardware::common::fmq::MQDescriptor<
2473 int8_t, aidl::android::hardware::common::fmq::SynchronizedReadWrite>
2474 descriptor;
2475
2476 ndk::ScopedAStatus resultQueueRet = mSession->getCaptureResultMetadataQueue(&descriptor);
2477 ASSERT_TRUE(resultQueueRet.isOk());
2478 std::shared_ptr<ResultMetadataQueue> resultQueue =
2479 std::make_shared<ResultMetadataQueue>(descriptor);
2480 if (!resultQueue->isValid() || resultQueue->availableToWrite() <= 0) {
2481 ALOGE("%s: HAL returns empty result metadata fmq, not use it", __func__);
2482 resultQueue = nullptr;
2483 // Don't use the queue onwards.
2484 }
2485
2486 ::android::hardware::camera::common::V1_0::helper::CameraMetadata requestMeta;
2487
2488 std::vector<buffer_handle_t> buffers(kBurstFrameCount);
2489 std::vector<std::shared_ptr<InFlightRequest>> inflightReqs(kBurstFrameCount);
2490 std::vector<CameraMetadata> requestSettings(kBurstFrameCount);
2491
2492 std::vector<CaptureRequest> requests(kBurstFrameCount);
2493
2494 HalStream halStream = halStreams[0];
2495 for (uint32_t i = 0; i < kBurstFrameCount; i++) {
2496 CaptureRequest& request = requests[i];
2497 std::vector<StreamBuffer>& outputBuffers = request.outputBuffers;
2498 outputBuffers.resize(1);
2499 StreamBuffer& outputBuffer = outputBuffers[0];
2500
2501 std::unique_lock<std::mutex> l(mLock);
2502 if (contains(halBufManagedStreamIds, halStream.id)) {
2503 outputBuffer = {halStream.id, 0, NativeHandle(), BufferStatus::OK, NativeHandle(),
2504 NativeHandle()};
2505 } else {
2506 // jpeg buffer (w,h) = (blobLen, 1)
2507 allocateGraphicBuffer(jpegBufferSize, /*height*/ 1,
2508 ANDROID_NATIVE_UNSIGNED_CAST(android_convertGralloc1To0Usage(
2509 static_cast<uint64_t>(halStream.producerUsage),
2510 static_cast<uint64_t>(halStream.consumerUsage))),
2511 halStream.overrideFormat, &buffers[i]);
2512 outputBuffer = {halStream.id, bufferId + i, ::android::makeToAidl(buffers[i]),
2513 BufferStatus::OK, NativeHandle(), NativeHandle()};
2514 }
2515
2516 requestMeta.clear();
2517 requestMeta.append(reinterpret_cast<camera_metadata_t*>(settings.metadata.data()));
2518
2519 camera_metadata_t* metaBuffer = requestMeta.release();
2520 uint8_t* rawMetaBuffer = reinterpret_cast<uint8_t*>(metaBuffer);
2521 requestSettings[i].metadata = std::vector(
2522 rawMetaBuffer, rawMetaBuffer + get_camera_metadata_size(metaBuffer));
2523 overrideRotateAndCrop(&requestSettings[i]);
2524
2525 request.frameNumber = frameNumber + i;
2526 request.fmqSettingsSize = 0;
2527 request.settings = requestSettings[i];
2528 request.inputBuffer = {/*streamId*/ -1,
2529 /*bufferId*/ 0, NativeHandle(),
2530 BufferStatus::ERROR, NativeHandle(),
2531 NativeHandle()};
2532
2533 inflightReqs[i] = std::make_shared<InFlightRequest>(1, false, supportsPartialResults,
2534 partialResultCount, resultQueue);
2535 mInflightMap[frameNumber + i] = inflightReqs[i];
2536 }
2537
2538 int32_t numRequestProcessed = 0;
2539 std::vector<BufferCache> cachesToRemove;
2540
2541 ndk::ScopedAStatus returnStatus =
2542 mSession->processCaptureRequest(requests, cachesToRemove, &numRequestProcessed);
2543 ASSERT_TRUE(returnStatus.isOk());
2544 ASSERT_EQ(numRequestProcessed, kBurstFrameCount);
2545
2546 std::vector<int32_t> offlineStreamIds = {halStream.id};
2547 CameraOfflineSessionInfo offlineSessionInfo;
2548 std::shared_ptr<ICameraOfflineSession> offlineSession;
2549 returnStatus =
2550 mSession->switchToOffline(offlineStreamIds, &offlineSessionInfo, &offlineSession);
2551
2552 if (!halStreams[0].supportOffline) {
2553 ASSERT_EQ(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT),
2554 returnStatus.getServiceSpecificError());
2555 ret = mSession->close();
2556 mSession = nullptr;
2557 ASSERT_TRUE(ret.isOk());
2558 continue;
2559 }
2560
2561 ASSERT_TRUE(returnStatus.isOk());
2562 // Hal might be unable to find any requests qualified for offline mode.
2563 if (offlineSession == nullptr) {
2564 ret = mSession->close();
2565 mSession = nullptr;
2566 ASSERT_TRUE(ret.isOk());
2567 continue;
2568 }
2569
2570 ASSERT_EQ(offlineSessionInfo.offlineStreams.size(), 1u);
2571 ASSERT_EQ(offlineSessionInfo.offlineStreams[0].id, halStream.id);
2572 ASSERT_NE(offlineSessionInfo.offlineRequests.size(), 0u);
2573
2574 // close device session to make sure offline session does not rely on it
2575 ret = mSession->close();
2576 mSession = nullptr;
2577 ASSERT_TRUE(ret.isOk());
2578
2579 ::aidl::android::hardware::common::fmq::MQDescriptor<
2580 int8_t, aidl::android::hardware::common::fmq::SynchronizedReadWrite>
2581 offlineResultDescriptor;
2582
2583 auto offlineResultQueueRet =
2584 offlineSession->getCaptureResultMetadataQueue(&offlineResultDescriptor);
2585 std::shared_ptr<ResultMetadataQueue> offlineResultQueue =
2586 std::make_shared<ResultMetadataQueue>(descriptor);
2587 if (!offlineResultQueue->isValid() || offlineResultQueue->availableToWrite() <= 0) {
2588 ALOGE("%s: offline session returns empty result metadata fmq, not use it", __func__);
2589 offlineResultQueue = nullptr;
2590 // Don't use the queue onwards.
2591 }
2592 ASSERT_TRUE(offlineResultQueueRet.isOk());
2593
2594 updateInflightResultQueue(offlineResultQueue);
2595
2596 ret = offlineSession->setCallback(cb);
2597 ASSERT_TRUE(ret.isOk());
2598
2599 for (size_t i = 0; i < kBurstFrameCount; i++) {
2600 std::unique_lock<std::mutex> l(mLock);
2601 while (!inflightReqs[i]->errorCodeValid && ((0 < inflightReqs[i]->numBuffersLeft) ||
2602 (!inflightReqs[i]->haveResultMetadata))) {
2603 auto timeout = std::chrono::system_clock::now() +
2604 std::chrono::seconds(kStreamBufferTimeoutSec);
2605 ASSERT_NE(std::cv_status::timeout, mResultCondition.wait_until(l, timeout));
2606 }
2607
2608 ASSERT_FALSE(inflightReqs[i]->errorCodeValid);
2609 ASSERT_NE(inflightReqs[i]->resultOutputBuffers.size(), 0u);
2610 ASSERT_EQ(stream.id, inflightReqs[i]->resultOutputBuffers[0].buffer.streamId);
2611 ASSERT_FALSE(inflightReqs[i]->collectedResult.isEmpty());
2612 }
2613
2614 ret = offlineSession->close();
2615 ASSERT_TRUE(ret.isOk());
2616 }
2617 }
2618
2619 // Check whether an invalid capture request with missing output buffers
2620 // will be reported correctly.
TEST_P(CameraAidlTest,processCaptureRequestInvalidBuffer)2621 TEST_P(CameraAidlTest, processCaptureRequestInvalidBuffer) {
2622 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
2623 std::vector<AvailableStream> outputBlobStreams;
2624 AvailableStream previewThreshold = {kMaxPreviewWidth, kMaxPreviewHeight,
2625 static_cast<int32_t>(PixelFormat::IMPLEMENTATION_DEFINED)};
2626 int32_t frameNumber = 1;
2627 CameraMetadata settings;
2628
2629 for (const auto& name : cameraDeviceNames) {
2630 Stream previewStream;
2631 std::vector<HalStream> halStreams;
2632 std::shared_ptr<DeviceCb> cb;
2633 bool supportsPartialResults = false;
2634 bool useHalBufManager = false;
2635 int32_t partialResultCount = 0;
2636 configurePreviewStream(name, mProvider, &previewThreshold, &mSession /*out*/,
2637 &previewStream /*out*/, &halStreams /*out*/,
2638 &supportsPartialResults /*out*/, &partialResultCount /*out*/,
2639 &useHalBufManager /*out*/, &cb /*out*/);
2640
2641 RequestTemplate reqTemplate = RequestTemplate::PREVIEW;
2642 ndk::ScopedAStatus ret = mSession->constructDefaultRequestSettings(reqTemplate, &settings);
2643 ASSERT_TRUE(ret.isOk());
2644 overrideRotateAndCrop(&settings);
2645
2646 std::vector<CaptureRequest> requests(1);
2647 CaptureRequest& request = requests[0];
2648 std::vector<StreamBuffer>& outputBuffers = request.outputBuffers;
2649 outputBuffers.resize(1);
2650 // Empty output buffer
2651 outputBuffers[0] = {
2652 -1, 0, NativeHandle(), BufferStatus::ERROR, NativeHandle(), NativeHandle()};
2653
2654 request.inputBuffer = {
2655 -1, 0, NativeHandle(), BufferStatus::ERROR, NativeHandle(), NativeHandle()};
2656 request.frameNumber = frameNumber;
2657 request.fmqSettingsSize = 0;
2658 request.settings = settings;
2659
2660 // Output buffers are missing, we should fail here
2661 int32_t numRequestProcessed = 0;
2662 std::vector<BufferCache> cachesToRemove;
2663 ret = mSession->processCaptureRequest(requests, cachesToRemove, &numRequestProcessed);
2664 ASSERT_EQ(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT), ret.getServiceSpecificError());
2665 ASSERT_EQ(numRequestProcessed, 0u);
2666
2667 ret = mSession->close();
2668 mSession = nullptr;
2669 ASSERT_TRUE(ret.isOk());
2670 }
2671 }
2672
2673 // Generate, trigger and flush a preview request
TEST_P(CameraAidlTest,flushPreviewRequest)2674 TEST_P(CameraAidlTest, flushPreviewRequest) {
2675 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
2676 std::vector<AvailableStream> outputPreviewStreams;
2677 AvailableStream previewThreshold = {kMaxPreviewWidth, kMaxPreviewHeight,
2678 static_cast<int32_t>(PixelFormat::IMPLEMENTATION_DEFINED)};
2679 int64_t bufferId = 1;
2680 int32_t frameNumber = 1;
2681 CameraMetadata settings;
2682
2683 for (const auto& name : cameraDeviceNames) {
2684 Stream previewStream;
2685 std::vector<HalStream> halStreams;
2686 std::shared_ptr<DeviceCb> cb;
2687 bool supportsPartialResults = false;
2688 bool useHalBufManager = false;
2689 int32_t partialResultCount = 0;
2690
2691 configurePreviewStream(name, mProvider, &previewThreshold, &mSession /*out*/,
2692 &previewStream /*out*/, &halStreams /*out*/,
2693 &supportsPartialResults /*out*/, &partialResultCount /*out*/,
2694 &useHalBufManager /*out*/, &cb /*out*/);
2695
2696 ASSERT_NE(mSession, nullptr);
2697 ASSERT_NE(cb, nullptr);
2698 ASSERT_FALSE(halStreams.empty());
2699
2700 ::aidl::android::hardware::common::fmq::MQDescriptor<
2701 int8_t, aidl::android::hardware::common::fmq::SynchronizedReadWrite>
2702 descriptor;
2703
2704 auto resultQueueRet = mSession->getCaptureResultMetadataQueue(&descriptor);
2705 std::shared_ptr<ResultMetadataQueue> resultQueue =
2706 std::make_shared<ResultMetadataQueue>(descriptor);
2707 ASSERT_TRUE(resultQueueRet.isOk());
2708 if (!resultQueue->isValid() || resultQueue->availableToWrite() <= 0) {
2709 ALOGE("%s: HAL returns empty result metadata fmq, not use it", __func__);
2710 resultQueue = nullptr;
2711 // Don't use the queue onwards.
2712 }
2713
2714 std::shared_ptr<InFlightRequest> inflightReq = std::make_shared<InFlightRequest>(
2715 1, false, supportsPartialResults, partialResultCount, resultQueue);
2716 RequestTemplate reqTemplate = RequestTemplate::PREVIEW;
2717
2718 ndk::ScopedAStatus ret = mSession->constructDefaultRequestSettings(reqTemplate, &settings);
2719 ASSERT_TRUE(ret.isOk());
2720 overrideRotateAndCrop(&settings);
2721
2722 buffer_handle_t buffer_handle;
2723 std::vector<CaptureRequest> requests(1);
2724 CaptureRequest& request = requests[0];
2725 std::vector<StreamBuffer>& outputBuffers = request.outputBuffers;
2726 outputBuffers.resize(1);
2727 StreamBuffer& outputBuffer = outputBuffers[0];
2728 if (useHalBufManager) {
2729 bufferId = 0;
2730 outputBuffer = {halStreams[0].id, bufferId, NativeHandle(),
2731 BufferStatus::OK, NativeHandle(), NativeHandle()};
2732 } else {
2733 allocateGraphicBuffer(previewStream.width, previewStream.height,
2734 ANDROID_NATIVE_UNSIGNED_CAST(android_convertGralloc1To0Usage(
2735 static_cast<uint64_t>(halStreams[0].producerUsage),
2736 static_cast<uint64_t>(halStreams[0].consumerUsage))),
2737 halStreams[0].overrideFormat, &buffer_handle);
2738 outputBuffer = {halStreams[0].id, bufferId, ::android::makeToAidl(buffer_handle),
2739 BufferStatus::OK, NativeHandle(), NativeHandle()};
2740 }
2741
2742 request.frameNumber = frameNumber;
2743 request.fmqSettingsSize = 0;
2744 request.settings = settings;
2745 request.inputBuffer = {
2746 -1, 0, NativeHandle(), BufferStatus::ERROR, NativeHandle(), NativeHandle()};
2747
2748 {
2749 std::unique_lock<std::mutex> l(mLock);
2750 mInflightMap.clear();
2751 mInflightMap[frameNumber] = inflightReq;
2752 }
2753
2754 int32_t numRequestProcessed = 0;
2755 std::vector<BufferCache> cachesToRemove;
2756 ret = mSession->processCaptureRequest(requests, cachesToRemove, &numRequestProcessed);
2757 ASSERT_TRUE(ret.isOk());
2758 ASSERT_EQ(numRequestProcessed, 1u);
2759
2760 // Flush before waiting for request to complete.
2761 ndk::ScopedAStatus returnStatus = mSession->flush();
2762 ASSERT_TRUE(returnStatus.isOk());
2763
2764 {
2765 std::unique_lock<std::mutex> l(mLock);
2766 while (!inflightReq->errorCodeValid &&
2767 ((0 < inflightReq->numBuffersLeft) || (!inflightReq->haveResultMetadata))) {
2768 auto timeout = std::chrono::system_clock::now() +
2769 std::chrono::seconds(kStreamBufferTimeoutSec);
2770 ASSERT_NE(std::cv_status::timeout, mResultCondition.wait_until(l, timeout));
2771 }
2772
2773 if (!inflightReq->errorCodeValid) {
2774 ASSERT_NE(inflightReq->resultOutputBuffers.size(), 0u);
2775 ASSERT_EQ(previewStream.id, inflightReq->resultOutputBuffers[0].buffer.streamId);
2776 } else {
2777 switch (inflightReq->errorCode) {
2778 case ErrorCode::ERROR_REQUEST:
2779 case ErrorCode::ERROR_RESULT:
2780 case ErrorCode::ERROR_BUFFER:
2781 // Expected
2782 break;
2783 case ErrorCode::ERROR_DEVICE:
2784 default:
2785 FAIL() << "Unexpected error:"
2786 << static_cast<uint32_t>(inflightReq->errorCode);
2787 }
2788 }
2789 }
2790
2791 if (useHalBufManager) {
2792 verifyBuffersReturned(mSession, previewStream.id, cb);
2793 }
2794
2795 ret = mSession->close();
2796 mSession = nullptr;
2797 ASSERT_TRUE(ret.isOk());
2798 }
2799 }
2800
2801 // Verify that camera flushes correctly without any pending requests.
TEST_P(CameraAidlTest,flushEmpty)2802 TEST_P(CameraAidlTest, flushEmpty) {
2803 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
2804 std::vector<AvailableStream> outputPreviewStreams;
2805 AvailableStream previewThreshold = {kMaxPreviewWidth, kMaxPreviewHeight,
2806 static_cast<int32_t>(PixelFormat::IMPLEMENTATION_DEFINED)};
2807
2808 for (const auto& name : cameraDeviceNames) {
2809 Stream previewStream;
2810 std::vector<HalStream> halStreams;
2811 std::shared_ptr<DeviceCb> cb;
2812 bool supportsPartialResults = false;
2813 bool useHalBufManager = false;
2814
2815 int32_t partialResultCount = 0;
2816 configurePreviewStream(name, mProvider, &previewThreshold, &mSession /*out*/,
2817 &previewStream /*out*/, &halStreams /*out*/,
2818 &supportsPartialResults /*out*/, &partialResultCount /*out*/,
2819 &useHalBufManager /*out*/, &cb /*out*/);
2820
2821 ndk::ScopedAStatus returnStatus = mSession->flush();
2822 ASSERT_TRUE(returnStatus.isOk());
2823
2824 {
2825 std::unique_lock<std::mutex> l(mLock);
2826 auto timeout = std::chrono::system_clock::now() +
2827 std::chrono::milliseconds(kEmptyFlushTimeoutMSec);
2828 ASSERT_EQ(std::cv_status::timeout, mResultCondition.wait_until(l, timeout));
2829 }
2830
2831 ndk::ScopedAStatus ret = mSession->close();
2832 mSession = nullptr;
2833 ASSERT_TRUE(ret.isOk());
2834 }
2835 }
2836
2837 // Test camera provider notify method
TEST_P(CameraAidlTest,providerDeviceStateNotification)2838 TEST_P(CameraAidlTest, providerDeviceStateNotification) {
2839 notifyDeviceState(ICameraProvider::DEVICE_STATE_BACK_COVERED);
2840 notifyDeviceState(ICameraProvider::DEVICE_STATE_NORMAL);
2841 }
2842
2843 // Verify that all supported stream formats and sizes can be configured
2844 // successfully for injection camera.
TEST_P(CameraAidlTest,configureInjectionStreamsAvailableOutputs)2845 TEST_P(CameraAidlTest, configureInjectionStreamsAvailableOutputs) {
2846 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
2847 std::vector<AvailableStream> outputStreams;
2848
2849 for (const auto& name : cameraDeviceNames) {
2850 CameraMetadata metadata;
2851
2852 std::shared_ptr<ICameraInjectionSession> injectionSession;
2853 std::shared_ptr<ICameraDevice> unusedDevice;
2854 openEmptyInjectionSession(name, mProvider, &injectionSession /*out*/, &metadata /*out*/,
2855 &unusedDevice /*out*/);
2856 if (injectionSession == nullptr) {
2857 continue;
2858 }
2859
2860 camera_metadata_t* staticMetaBuffer =
2861 reinterpret_cast<camera_metadata_t*>(metadata.metadata.data());
2862 CameraMetadata chars;
2863 chars.metadata = metadata.metadata;
2864
2865 outputStreams.clear();
2866 ASSERT_EQ(Status::OK, getAvailableOutputStreams(staticMetaBuffer, outputStreams));
2867 ASSERT_NE(0u, outputStreams.size());
2868
2869 int32_t jpegBufferSize = 0;
2870 ASSERT_EQ(Status::OK, getJpegBufferSize(staticMetaBuffer, &jpegBufferSize));
2871 ASSERT_NE(0u, jpegBufferSize);
2872
2873 int32_t streamId = 0;
2874 int32_t streamConfigCounter = 0;
2875 for (auto& it : outputStreams) {
2876 Dataspace dataspace = getDataspace(static_cast<PixelFormat>(it.format));
2877 Stream stream = {streamId,
2878 StreamType::OUTPUT,
2879 it.width,
2880 it.height,
2881 static_cast<PixelFormat>(it.format),
2882 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
2883 GRALLOC1_CONSUMER_USAGE_HWCOMPOSER),
2884 dataspace,
2885 StreamRotation::ROTATION_0,
2886 std::string(),
2887 jpegBufferSize,
2888 0,
2889 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
2890 RequestAvailableDynamicRangeProfilesMap::
2891 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
2892
2893 std::vector<Stream> streams = {stream};
2894 StreamConfiguration config;
2895 createStreamConfiguration(streams, StreamConfigurationMode::NORMAL_MODE, &config,
2896 jpegBufferSize);
2897
2898 config.streamConfigCounter = streamConfigCounter++;
2899 ndk::ScopedAStatus s = injectionSession->configureInjectionStreams(config, chars);
2900 ASSERT_TRUE(s.isOk());
2901 streamId++;
2902 }
2903
2904 std::shared_ptr<ICameraDeviceSession> session;
2905 ndk::ScopedAStatus ret = injectionSession->getCameraDeviceSession(&session);
2906 ASSERT_TRUE(ret.isOk());
2907 ASSERT_NE(session, nullptr);
2908 ret = session->close();
2909 ASSERT_TRUE(ret.isOk());
2910 }
2911 }
2912
2913 // Check for correct handling of invalid/incorrect configuration parameters for injection camera.
TEST_P(CameraAidlTest,configureInjectionStreamsInvalidOutputs)2914 TEST_P(CameraAidlTest, configureInjectionStreamsInvalidOutputs) {
2915 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
2916 std::vector<AvailableStream> outputStreams;
2917
2918 for (const auto& name : cameraDeviceNames) {
2919 CameraMetadata metadata;
2920 std::shared_ptr<ICameraInjectionSession> injectionSession;
2921 std::shared_ptr<ICameraDevice> unusedDevice;
2922 openEmptyInjectionSession(name, mProvider, &injectionSession /*out*/, &metadata /*out*/,
2923 &unusedDevice);
2924 if (injectionSession == nullptr) {
2925 continue;
2926 }
2927
2928 camera_metadata_t* staticMetaBuffer =
2929 reinterpret_cast<camera_metadata_t*>(metadata.metadata.data());
2930 std::shared_ptr<ICameraDeviceSession> session;
2931 ndk::ScopedAStatus ret = injectionSession->getCameraDeviceSession(&session);
2932 ASSERT_TRUE(ret.isOk());
2933 ASSERT_NE(session, nullptr);
2934
2935 CameraMetadata chars;
2936 chars.metadata = metadata.metadata;
2937
2938 outputStreams.clear();
2939 ASSERT_EQ(Status::OK, getAvailableOutputStreams(staticMetaBuffer, outputStreams));
2940 ASSERT_NE(0u, outputStreams.size());
2941
2942 int32_t jpegBufferSize = 0;
2943 ASSERT_EQ(Status::OK, getJpegBufferSize(staticMetaBuffer, &jpegBufferSize));
2944 ASSERT_NE(0u, jpegBufferSize);
2945
2946 int32_t streamId = 0;
2947 Stream stream = {streamId++,
2948 StreamType::OUTPUT,
2949 0,
2950 0,
2951 static_cast<PixelFormat>(outputStreams[0].format),
2952 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
2953 GRALLOC1_CONSUMER_USAGE_HWCOMPOSER),
2954 Dataspace::UNKNOWN,
2955 StreamRotation::ROTATION_0,
2956 std::string(),
2957 jpegBufferSize,
2958 0,
2959 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
2960 RequestAvailableDynamicRangeProfilesMap::
2961 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
2962
2963 int32_t streamConfigCounter = 0;
2964 std::vector<Stream> streams = {stream};
2965 StreamConfiguration config;
2966 createStreamConfiguration(streams, StreamConfigurationMode::NORMAL_MODE, &config,
2967 jpegBufferSize);
2968
2969 config.streamConfigCounter = streamConfigCounter++;
2970 ndk::ScopedAStatus s = injectionSession->configureInjectionStreams(config, chars);
2971 ASSERT_TRUE(
2972 (static_cast<int32_t>(Status::ILLEGAL_ARGUMENT) == s.getServiceSpecificError()) ||
2973 (static_cast<int32_t>(Status::INTERNAL_ERROR) == s.getServiceSpecificError()));
2974
2975 stream = {streamId++,
2976 StreamType::OUTPUT,
2977 INT32_MAX,
2978 INT32_MAX,
2979 static_cast<PixelFormat>(outputStreams[0].format),
2980 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
2981 GRALLOC1_CONSUMER_USAGE_HWCOMPOSER),
2982 Dataspace::UNKNOWN,
2983 StreamRotation::ROTATION_0,
2984 std::string(),
2985 jpegBufferSize,
2986 0,
2987 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
2988 RequestAvailableDynamicRangeProfilesMap::
2989 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
2990
2991 streams[0] = stream;
2992 createStreamConfiguration(streams, StreamConfigurationMode::NORMAL_MODE, &config,
2993 jpegBufferSize);
2994 config.streamConfigCounter = streamConfigCounter++;
2995 s = injectionSession->configureInjectionStreams(config, chars);
2996 ASSERT_EQ(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT), s.getServiceSpecificError());
2997
2998 for (auto& it : outputStreams) {
2999 stream = {streamId++,
3000 StreamType::OUTPUT,
3001 it.width,
3002 it.height,
3003 static_cast<PixelFormat>(INT32_MAX),
3004 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
3005 GRALLOC1_CONSUMER_USAGE_HWCOMPOSER),
3006 Dataspace::UNKNOWN,
3007 StreamRotation::ROTATION_0,
3008 std::string(),
3009 jpegBufferSize,
3010 0,
3011 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
3012 RequestAvailableDynamicRangeProfilesMap::
3013 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
3014 streams[0] = stream;
3015 createStreamConfiguration(streams, StreamConfigurationMode::NORMAL_MODE, &config,
3016 jpegBufferSize);
3017 config.streamConfigCounter = streamConfigCounter++;
3018 s = injectionSession->configureInjectionStreams(config, chars);
3019 ASSERT_EQ(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT), s.getServiceSpecificError());
3020
3021 stream = {streamId++,
3022 StreamType::OUTPUT,
3023 it.width,
3024 it.height,
3025 static_cast<PixelFormat>(it.format),
3026 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
3027 GRALLOC1_CONSUMER_USAGE_HWCOMPOSER),
3028 Dataspace::UNKNOWN,
3029 static_cast<StreamRotation>(INT32_MAX),
3030 std::string(),
3031 jpegBufferSize,
3032 0,
3033 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
3034 RequestAvailableDynamicRangeProfilesMap::
3035 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
3036 streams[0] = stream;
3037 createStreamConfiguration(streams, StreamConfigurationMode::NORMAL_MODE, &config,
3038 jpegBufferSize);
3039 config.streamConfigCounter = streamConfigCounter++;
3040 s = injectionSession->configureInjectionStreams(config, chars);
3041 ASSERT_EQ(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT), s.getServiceSpecificError());
3042 }
3043
3044 ret = session->close();
3045 ASSERT_TRUE(ret.isOk());
3046 }
3047 }
3048
3049 // Check whether session parameters are supported for injection camera. If Hal support for them
3050 // exist, then try to configure a preview stream using them.
TEST_P(CameraAidlTest,configureInjectionStreamsWithSessionParameters)3051 TEST_P(CameraAidlTest, configureInjectionStreamsWithSessionParameters) {
3052 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
3053 std::vector<AvailableStream> outputPreviewStreams;
3054 AvailableStream previewThreshold = {kMaxPreviewWidth, kMaxPreviewHeight,
3055 static_cast<int32_t>(PixelFormat::IMPLEMENTATION_DEFINED)};
3056
3057 for (const auto& name : cameraDeviceNames) {
3058 CameraMetadata metadata;
3059 std::shared_ptr<ICameraInjectionSession> injectionSession;
3060 std::shared_ptr<ICameraDevice> unusedDevice;
3061 openEmptyInjectionSession(name, mProvider, &injectionSession /*out*/, &metadata /*out*/,
3062 &unusedDevice /*out*/);
3063 if (injectionSession == nullptr) {
3064 continue;
3065 }
3066
3067 std::shared_ptr<ICameraDeviceSession> session;
3068 ndk::ScopedAStatus ret = injectionSession->getCameraDeviceSession(&session);
3069 ASSERT_TRUE(ret.isOk());
3070 ASSERT_NE(session, nullptr);
3071
3072 camera_metadata_t* staticMetaBuffer =
3073 reinterpret_cast<camera_metadata_t*>(metadata.metadata.data());
3074 CameraMetadata chars;
3075 chars.metadata = metadata.metadata;
3076
3077 std::unordered_set<int32_t> availableSessionKeys;
3078 Status rc = getSupportedKeys(staticMetaBuffer, ANDROID_REQUEST_AVAILABLE_SESSION_KEYS,
3079 &availableSessionKeys);
3080 ASSERT_EQ(Status::OK, rc);
3081 if (availableSessionKeys.empty()) {
3082 ret = session->close();
3083 ASSERT_TRUE(ret.isOk());
3084 continue;
3085 }
3086
3087 android::hardware::camera::common::V1_0::helper::CameraMetadata previewRequestSettings;
3088 android::hardware::camera::common::V1_0::helper::CameraMetadata sessionParams,
3089 modifiedSessionParams;
3090 constructFilteredSettings(session, availableSessionKeys, RequestTemplate::PREVIEW,
3091 &previewRequestSettings, &sessionParams);
3092 if (sessionParams.isEmpty()) {
3093 ret = session->close();
3094 ASSERT_TRUE(ret.isOk());
3095 continue;
3096 }
3097
3098 outputPreviewStreams.clear();
3099
3100 ASSERT_EQ(Status::OK, getAvailableOutputStreams(staticMetaBuffer, outputPreviewStreams,
3101 &previewThreshold));
3102 ASSERT_NE(0u, outputPreviewStreams.size());
3103
3104 Stream previewStream = {
3105 0,
3106 StreamType::OUTPUT,
3107 outputPreviewStreams[0].width,
3108 outputPreviewStreams[0].height,
3109 static_cast<PixelFormat>(outputPreviewStreams[0].format),
3110 static_cast<::aidl::android::hardware::graphics::common::BufferUsage>(
3111 GRALLOC1_CONSUMER_USAGE_HWCOMPOSER),
3112 Dataspace::UNKNOWN,
3113 StreamRotation::ROTATION_0,
3114 std::string(),
3115 0,
3116 -1,
3117 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
3118 RequestAvailableDynamicRangeProfilesMap::
3119 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
3120 std::vector<Stream> streams = {previewStream};
3121 StreamConfiguration config;
3122 config.streams = streams;
3123 config.operationMode = StreamConfigurationMode::NORMAL_MODE;
3124
3125 modifiedSessionParams = sessionParams;
3126 camera_metadata_t* sessionParamsBuffer = sessionParams.release();
3127 uint8_t* rawSessionParamsBuffer = reinterpret_cast<uint8_t*>(sessionParamsBuffer);
3128 config.sessionParams.metadata =
3129 std::vector(rawSessionParamsBuffer,
3130 rawSessionParamsBuffer + get_camera_metadata_size(sessionParamsBuffer));
3131
3132 config.streamConfigCounter = 0;
3133 config.streamConfigCounter = 0;
3134 config.multiResolutionInputImage = false;
3135
3136 ndk::ScopedAStatus s = injectionSession->configureInjectionStreams(config, chars);
3137 ASSERT_TRUE(s.isOk());
3138
3139 sessionParams.acquire(sessionParamsBuffer);
3140 free_camera_metadata(staticMetaBuffer);
3141 ret = session->close();
3142 ASSERT_TRUE(ret.isOk());
3143 }
3144 }
3145
TEST_P(CameraAidlTest,configureStreamsUseCasesCroppedRaw)3146 TEST_P(CameraAidlTest, configureStreamsUseCasesCroppedRaw) {
3147 AvailableStream rawStreamThreshold =
3148 {INT_MAX, INT_MAX, static_cast<int32_t>(PixelFormat::RAW16)};
3149 configureStreamUseCaseInternal(rawStreamThreshold);
3150 }
3151
3152 // Verify that valid stream use cases can be configured successfully, and invalid use cases
3153 // fail stream configuration.
TEST_P(CameraAidlTest,configureStreamsUseCases)3154 TEST_P(CameraAidlTest, configureStreamsUseCases) {
3155 AvailableStream previewStreamThreshold =
3156 {kMaxPreviewWidth, kMaxPreviewHeight, static_cast<int32_t>(PixelFormat::YCBCR_420_888)};
3157 configureStreamUseCaseInternal(previewStreamThreshold);
3158 }
3159
3160 // Validate the integrity of stream configuration metadata
TEST_P(CameraAidlTest,validateStreamConfigurations)3161 TEST_P(CameraAidlTest, validateStreamConfigurations) {
3162 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
3163 std::vector<AvailableStream> outputStreams;
3164
3165 const int32_t scalerSizesTag = ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS;
3166 const int32_t scalerMinFrameDurationsTag = ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS;
3167 const int32_t scalerStallDurationsTag = ANDROID_SCALER_AVAILABLE_STALL_DURATIONS;
3168
3169 for (const auto& name : cameraDeviceNames) {
3170 CameraMetadata meta;
3171 std::shared_ptr<ICameraDevice> cameraDevice;
3172
3173 openEmptyDeviceSession(name, mProvider, &mSession /*out*/, &meta /*out*/,
3174 &cameraDevice /*out*/);
3175 camera_metadata_t* staticMeta = reinterpret_cast<camera_metadata_t*>(meta.metadata.data());
3176
3177 if (is10BitDynamicRangeCapable(staticMeta)) {
3178 std::vector<std::tuple<size_t, size_t>> supportedP010Sizes, supportedBlobSizes;
3179
3180 getSupportedSizes(staticMeta, scalerSizesTag, HAL_PIXEL_FORMAT_BLOB,
3181 &supportedBlobSizes);
3182 getSupportedSizes(staticMeta, scalerSizesTag, HAL_PIXEL_FORMAT_YCBCR_P010,
3183 &supportedP010Sizes);
3184 ASSERT_FALSE(supportedP010Sizes.empty());
3185
3186 std::vector<int64_t> blobMinDurations, blobStallDurations;
3187 getSupportedDurations(staticMeta, scalerMinFrameDurationsTag, HAL_PIXEL_FORMAT_BLOB,
3188 supportedP010Sizes, &blobMinDurations);
3189 getSupportedDurations(staticMeta, scalerStallDurationsTag, HAL_PIXEL_FORMAT_BLOB,
3190 supportedP010Sizes, &blobStallDurations);
3191 ASSERT_FALSE(blobStallDurations.empty());
3192 ASSERT_FALSE(blobMinDurations.empty());
3193 ASSERT_EQ(supportedP010Sizes.size(), blobMinDurations.size());
3194 ASSERT_EQ(blobMinDurations.size(), blobStallDurations.size());
3195 }
3196
3197 // TODO (b/280887191): Validate other aspects of stream configuration metadata...
3198
3199 ndk::ScopedAStatus ret = mSession->close();
3200 mSession = nullptr;
3201 ASSERT_TRUE(ret.isOk());
3202 }
3203 }
3204
3205 GTEST_ALLOW_UNINSTANTIATED_PARAMETERIZED_TEST(CameraAidlTest);
3206 INSTANTIATE_TEST_SUITE_P(
3207 PerInstance, CameraAidlTest,
3208 testing::ValuesIn(android::getAidlHalInstanceNames(ICameraProvider::descriptor)),
3209 android::hardware::PrintInstanceNameToString);
3210