1 /*
2 * Copyright (C) 2022 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17 #include "device_cb.h"
18
19 #include <aidl/android/hardware/graphics/common/PixelFormat.h>
20 #include <aidlcommonsupport/NativeHandle.h>
21 #include <grallocusage/GrallocUsageConversion.h>
22 #include <cinttypes>
23 #include <nativebase/nativebase.h>
24
25 using ::aidl::android::hardware::camera::device::BufferStatus;
26 using ::aidl::android::hardware::camera::device::ErrorMsg;
27 using ::aidl::android::hardware::camera::device::StreamBufferRequestError;
28 using ::aidl::android::hardware::camera::device::StreamBuffersVal;
29 using ::aidl::android::hardware::graphics::common::PixelFormat;
30
31 const int64_t kBufferReturnTimeoutSec = 1;
32
DeviceCb(CameraAidlTest * parent,camera_metadata_t * staticMeta)33 DeviceCb::DeviceCb(CameraAidlTest* parent, camera_metadata_t* staticMeta) : mParent(parent) {
34 mStaticMetadata = staticMeta;
35 parent->mSupportReadoutTimestamp = CameraAidlTest::isReadoutTimestampSupported(staticMeta);
36 }
37
notify(const std::vector<NotifyMsg> & msgs)38 ScopedAStatus DeviceCb::notify(const std::vector<NotifyMsg>& msgs) {
39 std::vector<nsecs_t> readoutTimestamps;
40
41 size_t count = msgs.size();
42 readoutTimestamps.resize(count);
43
44 for (size_t i = 0; i < count; i++) {
45 const NotifyMsg& msg = msgs[i];
46 switch (msg.getTag()) {
47 case NotifyMsg::Tag::error:
48 readoutTimestamps[i] = 0;
49 break;
50 case NotifyMsg::Tag::shutter:
51 const auto& shutter = msg.get<NotifyMsg::Tag::shutter>();
52 readoutTimestamps[i] = shutter.readoutTimestamp;
53 break;
54 }
55 }
56
57 return notifyHelper(msgs, readoutTimestamps);
58 }
59
processCaptureResult(const std::vector<CaptureResult> & results)60 ScopedAStatus DeviceCb::processCaptureResult(const std::vector<CaptureResult>& results) {
61 if (nullptr == mParent) {
62 return ScopedAStatus::fromExceptionCode(EX_ILLEGAL_STATE);
63 }
64
65 bool notify = false;
66 std::unique_lock<std::mutex> l(mParent->mLock);
67 for (const auto& result : results) {
68 notify = processCaptureResultLocked(result, result.physicalCameraMetadata);
69 }
70
71 l.unlock();
72 if (notify) {
73 mParent->mResultCondition.notify_one();
74 }
75
76 return ndk::ScopedAStatus::ok();
77 }
78
requestStreamBuffers(const std::vector<BufferRequest> & bufReqs,std::vector<StreamBufferRet> * buffers,BufferRequestStatus * _aidl_return)79 ScopedAStatus DeviceCb::requestStreamBuffers(const std::vector<BufferRequest>& bufReqs,
80 std::vector<StreamBufferRet>* buffers,
81 BufferRequestStatus* _aidl_return) {
82 std::vector<StreamBufferRet>& bufRets = *buffers;
83 std::unique_lock<std::mutex> l(mLock);
84
85 if (!mUseHalBufManager) {
86 ALOGE("%s: Camera does not support HAL buffer management", __FUNCTION__);
87 ADD_FAILURE();
88 *_aidl_return = BufferRequestStatus::FAILED_ILLEGAL_ARGUMENTS;
89 return ScopedAStatus::ok();
90 }
91
92 if (bufReqs.size() > mStreams.size()) {
93 ALOGE("%s: illegal buffer request: too many requests!", __FUNCTION__);
94 ADD_FAILURE();
95 *_aidl_return = BufferRequestStatus::FAILED_ILLEGAL_ARGUMENTS;
96 return ndk::ScopedAStatus::ok();
97 }
98
99 std::vector<size_t> indexes(bufReqs.size());
100 for (size_t i = 0; i < bufReqs.size(); i++) {
101 bool found = false;
102 for (size_t idx = 0; idx < mStreams.size(); idx++) {
103 if (bufReqs[i].streamId == mStreams[idx].id) {
104 found = true;
105 indexes[i] = idx;
106 break;
107 }
108 }
109 if (!found) {
110 ALOGE("%s: illegal buffer request: unknown streamId %d!", __FUNCTION__,
111 bufReqs[i].streamId);
112 ADD_FAILURE();
113 *_aidl_return = BufferRequestStatus::FAILED_ILLEGAL_ARGUMENTS;
114 return ScopedAStatus::ok();
115 }
116 }
117
118 bool allStreamOk = true;
119 bool atLeastOneStreamOk = false;
120 bufRets.resize(bufReqs.size());
121
122 for (size_t i = 0; i < bufReqs.size(); i++) {
123 size_t idx = indexes[i];
124 const auto& stream = mStreams[idx];
125 const auto& halStream = mHalStreams[idx];
126 const BufferRequest& bufReq = bufReqs[i];
127
128 if (mOutstandingBufferIds[idx].size() + bufReq.numBuffersRequested > halStream.maxBuffers) {
129 bufRets[i].streamId = stream.id;
130 bufRets[i].val.set<StreamBuffersVal::Tag::error>(
131 StreamBufferRequestError::MAX_BUFFER_EXCEEDED);
132 allStreamOk = false;
133 continue;
134 }
135
136 std::vector<StreamBuffer> tmpRetBuffers(bufReq.numBuffersRequested);
137 for (size_t j = 0; j < bufReq.numBuffersRequested; j++) {
138 buffer_handle_t handle;
139 uint32_t w = stream.width;
140 uint32_t h = stream.height;
141 if (stream.format == PixelFormat::BLOB) {
142 w = stream.bufferSize;
143 h = 1;
144 }
145
146 CameraAidlTest::allocateGraphicBuffer(
147 w, h,
148 ANDROID_NATIVE_UNSIGNED_CAST(android_convertGralloc1To0Usage(static_cast<uint64_t>(halStream.producerUsage),
149 static_cast<uint64_t>(halStream.consumerUsage))),
150 halStream.overrideFormat, &handle);
151
152 StreamBuffer streamBuffer = StreamBuffer();
153 StreamBuffer& sb = tmpRetBuffers[j];
154 sb = {
155 stream.id, mNextBufferId, ::android::dupToAidl(handle),
156 BufferStatus::OK, NativeHandle(), NativeHandle(),
157 };
158
159 mOutstandingBufferIds[idx][mNextBufferId++] = handle;
160 }
161 atLeastOneStreamOk = true;
162 bufRets[i].streamId = stream.id;
163 bufRets[i].val.set<StreamBuffersVal::Tag::buffers>(std::move(tmpRetBuffers));
164 }
165
166 if (allStreamOk) {
167 *_aidl_return = BufferRequestStatus::OK;
168 } else if (atLeastOneStreamOk) {
169 *_aidl_return = BufferRequestStatus::FAILED_PARTIAL;
170 } else {
171 *_aidl_return = BufferRequestStatus::FAILED_UNKNOWN;
172 }
173
174 if (!hasOutstandingBuffersLocked()) {
175 l.unlock();
176 mFlushedCondition.notify_one();
177 }
178
179 return ndk::ScopedAStatus::ok();
180 }
181
returnStreamBuffers(const std::vector<StreamBuffer> & buffers)182 ScopedAStatus DeviceCb::returnStreamBuffers(const std::vector<StreamBuffer>& buffers) {
183 if (!mUseHalBufManager) {
184 ALOGE("%s: Camera does not support HAL buffer management", __FUNCTION__);
185 ADD_FAILURE();
186 }
187
188 std::unique_lock<std::mutex> l(mLock);
189 for (const auto& buf : buffers) {
190 if (buf.bufferId == 0) {
191 // Don't return buffers of bufId 0 (empty buffer)
192 continue;
193 }
194 bool found = false;
195 for (size_t idx = 0; idx < mOutstandingBufferIds.size(); idx++) {
196 if (mStreams[idx].id == buf.streamId &&
197 mOutstandingBufferIds[idx].count(buf.bufferId) == 1) {
198 mOutstandingBufferIds[idx].erase(buf.bufferId);
199 // TODO: check do we need to close/delete native handle or assume we have enough
200 // memory to run till the test finish? since we do not capture much requests (and
201 // most of time one buffer is sufficient)
202 found = true;
203 break;
204 }
205 }
206 if (found) {
207 continue;
208 }
209 ALOGE("%s: unknown buffer ID %" PRIu64, __FUNCTION__, buf.bufferId);
210 ADD_FAILURE();
211 }
212 if (!hasOutstandingBuffersLocked()) {
213 l.unlock();
214 mFlushedCondition.notify_one();
215 }
216
217 return ndk::ScopedAStatus::ok();
218 }
219
setCurrentStreamConfig(const std::vector<Stream> & streams,const std::vector<HalStream> & halStreams)220 void DeviceCb::setCurrentStreamConfig(const std::vector<Stream>& streams,
221 const std::vector<HalStream>& halStreams) {
222 ASSERT_EQ(streams.size(), halStreams.size());
223 ASSERT_NE(streams.size(), 0);
224 for (size_t i = 0; i < streams.size(); i++) {
225 ASSERT_EQ(streams[i].id, halStreams[i].id);
226 }
227 std::lock_guard<std::mutex> l(mLock);
228 mUseHalBufManager = true;
229 mStreams = streams;
230 mHalStreams = halStreams;
231 mOutstandingBufferIds.clear();
232 for (size_t i = 0; i < streams.size(); i++) {
233 mOutstandingBufferIds.emplace_back();
234 }
235 }
236
waitForBuffersReturned()237 void DeviceCb::waitForBuffersReturned() {
238 std::unique_lock<std::mutex> lk(mLock);
239 if (hasOutstandingBuffersLocked()) {
240 auto timeout = std::chrono::seconds(kBufferReturnTimeoutSec);
241 auto st = mFlushedCondition.wait_for(lk, timeout);
242 ASSERT_NE(std::cv_status::timeout, st);
243 }
244 }
245
processCaptureResultLocked(const CaptureResult & results,std::vector<PhysicalCameraMetadata> physicalCameraMetadata)246 bool DeviceCb::processCaptureResultLocked(
247 const CaptureResult& results, std::vector<PhysicalCameraMetadata> physicalCameraMetadata) {
248 bool notify = false;
249 uint32_t frameNumber = results.frameNumber;
250
251 if ((results.result.metadata.empty()) && (results.outputBuffers.empty()) &&
252 (results.inputBuffer.buffer.fds.empty()) && (results.fmqResultSize == 0)) {
253 ALOGE("%s: No result data provided by HAL for frame %d result count: %d", __func__,
254 frameNumber, (int)results.fmqResultSize);
255 ADD_FAILURE();
256 return notify;
257 }
258
259 auto requestEntry = mParent->mInflightMap.find(frameNumber);
260 if (requestEntry == mParent->mInflightMap.end()) {
261 ALOGE("%s: Unexpected frame number! received: %u", __func__, frameNumber);
262 ADD_FAILURE();
263 return notify;
264 }
265
266 bool isPartialResult = false;
267 bool hasInputBufferInRequest = false;
268 auto& request = requestEntry->second;
269
270 CameraMetadata resultMetadata;
271 size_t resultSize = 0;
272 if (results.fmqResultSize > 0) {
273 resultMetadata.metadata.resize(results.fmqResultSize);
274 if (request->resultQueue == nullptr) {
275 ADD_FAILURE();
276 return notify;
277 }
278
279 if (!request->resultQueue->read(reinterpret_cast<int8_t*>(resultMetadata.metadata.data()),
280 results.fmqResultSize)) {
281 ALOGE("%s: Frame %d: Cannot read camera metadata from fmq,"
282 "size = %" PRIu64,
283 __func__, frameNumber, results.fmqResultSize);
284 ADD_FAILURE();
285 return notify;
286 }
287
288 // Physical device results are only expected in the last/final
289 // partial result notification.
290 bool expectPhysicalResults = !(request->usePartialResult &&
291 (results.partialResult < request->numPartialResults));
292 if (expectPhysicalResults &&
293 (physicalCameraMetadata.size() != request->expectedPhysicalResults.size())) {
294 ALOGE("%s: Frame %d: Returned physical metadata count %zu "
295 "must be equal to expected count %zu",
296 __func__, frameNumber, physicalCameraMetadata.size(),
297 request->expectedPhysicalResults.size());
298 ADD_FAILURE();
299 return notify;
300 }
301 std::vector<std::vector<uint8_t>> physResultMetadata;
302 physResultMetadata.resize(physicalCameraMetadata.size());
303 for (size_t i = 0; i < physicalCameraMetadata.size(); i++) {
304 physResultMetadata[i].resize(physicalCameraMetadata[i].fmqMetadataSize);
305 if (!request->resultQueue->read(reinterpret_cast<int8_t*>(physResultMetadata[i].data()),
306 physicalCameraMetadata[i].fmqMetadataSize)) {
307 ALOGE("%s: Frame %d: Cannot read physical camera metadata from fmq,"
308 "size = %" PRIu64,
309 __func__, frameNumber, physicalCameraMetadata[i].fmqMetadataSize);
310 ADD_FAILURE();
311 return notify;
312 }
313 }
314 resultSize = resultMetadata.metadata.size();
315 } else if (!results.result.metadata.empty()) {
316 resultMetadata = results.result;
317 resultSize = resultMetadata.metadata.size();
318 }
319
320 if (!request->usePartialResult && (resultSize > 0) && (results.partialResult != 1)) {
321 ALOGE("%s: Result is malformed for frame %d: partial_result %u "
322 "must be 1 if partial result is not supported",
323 __func__, frameNumber, results.partialResult);
324 ADD_FAILURE();
325 return notify;
326 }
327
328 if (results.partialResult != 0) {
329 request->partialResultCount = results.partialResult;
330 }
331
332 // Check if this result carries only partial metadata
333 if (request->usePartialResult && (resultSize > 0)) {
334 if ((results.partialResult > request->numPartialResults) || (results.partialResult < 1)) {
335 ALOGE("%s: Result is malformed for frame %d: partial_result %u"
336 " must be in the range of [1, %d] when metadata is "
337 "included in the result",
338 __func__, frameNumber, results.partialResult, request->numPartialResults);
339 ADD_FAILURE();
340 return notify;
341 }
342
343 // Verify no duplicate tags between partial results
344 const camera_metadata_t* partialMetadata =
345 reinterpret_cast<const camera_metadata_t*>(resultMetadata.metadata.data());
346 const camera_metadata_t* collectedMetadata = request->collectedResult.getAndLock();
347 camera_metadata_ro_entry_t searchEntry, foundEntry;
348 for (size_t i = 0; i < get_camera_metadata_entry_count(partialMetadata); i++) {
349 if (0 != get_camera_metadata_ro_entry(partialMetadata, i, &searchEntry)) {
350 ADD_FAILURE();
351 request->collectedResult.unlock(collectedMetadata);
352 return notify;
353 }
354 if (-ENOENT !=
355 find_camera_metadata_ro_entry(collectedMetadata, searchEntry.tag, &foundEntry)) {
356 ADD_FAILURE();
357 request->collectedResult.unlock(collectedMetadata);
358 return notify;
359 }
360 }
361 request->collectedResult.unlock(collectedMetadata);
362 request->collectedResult.append(partialMetadata);
363
364 isPartialResult = (results.partialResult < request->numPartialResults);
365 } else if (resultSize > 0) {
366 request->collectedResult.append(
367 reinterpret_cast<const camera_metadata_t*>(resultMetadata.metadata.data()));
368 isPartialResult = false;
369 }
370
371 hasInputBufferInRequest = request->hasInputBuffer;
372
373 // Did we get the (final) result metadata for this capture?
374 if ((resultSize > 0) && !isPartialResult) {
375 if (request->haveResultMetadata) {
376 ALOGE("%s: Called multiple times with metadata for frame %d", __func__, frameNumber);
377 ADD_FAILURE();
378 return notify;
379 }
380 request->haveResultMetadata = true;
381 request->collectedResult.sort();
382
383 // Verify final result metadata
384 camera_metadata_t* staticMetadataBuffer = mStaticMetadata;
385 bool isMonochrome = Status::OK == CameraAidlTest::isMonochromeCamera(staticMetadataBuffer);
386 if (isMonochrome) {
387 CameraAidlTest::verifyMonochromeCameraResult(request->collectedResult);
388 }
389
390 // Verify logical camera result metadata
391 bool isLogicalCamera =
392 Status::OK == CameraAidlTest::isLogicalMultiCamera(staticMetadataBuffer);
393 camera_metadata_t* collectedMetadata =
394 const_cast<camera_metadata_t*>(request->collectedResult.getAndLock());
395 uint8_t* rawMetadata = reinterpret_cast<uint8_t*>(collectedMetadata);
396 std::vector metadata =
397 std::vector(rawMetadata, rawMetadata + get_camera_metadata_size(collectedMetadata));
398 if (isLogicalCamera) {
399 CameraAidlTest::verifyLogicalCameraResult(staticMetadataBuffer, metadata);
400 }
401 CameraAidlTest::verifyLensIntrinsicsResult(metadata);
402 request->collectedResult.unlock(collectedMetadata);
403 }
404
405 uint32_t numBuffersReturned = results.outputBuffers.size();
406 auto& inputBuffer = results.inputBuffer.buffer;
407 if (!inputBuffer.fds.empty() && !inputBuffer.ints.empty()) {
408 if (hasInputBufferInRequest) {
409 numBuffersReturned += 1;
410 } else {
411 ALOGW("%s: Input buffer should be NULL if there is no input"
412 " buffer sent in the request",
413 __func__);
414 }
415 }
416 request->numBuffersLeft -= numBuffersReturned;
417 if (request->numBuffersLeft < 0) {
418 ALOGE("%s: Too many buffers returned for frame %d", __func__, frameNumber);
419 ADD_FAILURE();
420 return notify;
421 }
422
423 for (const auto& buffer : results.outputBuffers) {
424 CameraAidlTest::InFlightRequest::StreamBufferAndTimestamp streamBufferAndTimestamp;
425 auto outstandingBuffers = mUseHalBufManager ? mOutstandingBufferIds :
426 request->mOutstandingBufferIds;
427 auto bufferId = mUseHalBufManager ? buffer.bufferId : results.frameNumber;
428 auto outputBuffer = outstandingBuffers.empty() ? ::android::makeFromAidl(buffer.buffer) :
429 outstandingBuffers[buffer.streamId][bufferId];
430 streamBufferAndTimestamp.buffer = {buffer.streamId,
431 bufferId,
432 outputBuffer,
433 buffer.status,
434 ::android::dupFromAidl(buffer.acquireFence),
435 ::android::dupFromAidl(buffer.releaseFence)};
436 streamBufferAndTimestamp.timeStamp = systemTime();
437 request->resultOutputBuffers.push_back(streamBufferAndTimestamp);
438 }
439 // If shutter event is received notify the pending threads.
440 if (request->shutterTimestamp != 0) {
441 notify = true;
442 }
443
444 if (mUseHalBufManager) {
445 returnStreamBuffers(results.outputBuffers);
446 }
447 return notify;
448 }
449
notifyHelper(const std::vector<NotifyMsg> & msgs,const std::vector<nsecs_t> & readoutTimestamps)450 ScopedAStatus DeviceCb::notifyHelper(const std::vector<NotifyMsg>& msgs,
451 const std::vector<nsecs_t>& readoutTimestamps) {
452 std::lock_guard<std::mutex> l(mParent->mLock);
453
454 for (size_t i = 0; i < msgs.size(); i++) {
455 const NotifyMsg& msg = msgs[i];
456 NotifyMsg::Tag msgTag = msgs[i].getTag();
457 switch (msgTag) {
458 case NotifyMsg::Tag::error:
459 if (ErrorCode::ERROR_DEVICE == msg.get<NotifyMsg::Tag::error>().errorCode) {
460 ALOGE("%s: Camera reported serious device error", __func__);
461 ADD_FAILURE();
462 } else {
463 auto itr = mParent->mInflightMap.find(
464 msg.get<NotifyMsg::Tag::error>().frameNumber);
465 if (itr == mParent->mInflightMap.end()) {
466 ALOGE("%s: Unexpected error frame number! received: %u", __func__,
467 msg.get<NotifyMsg::Tag::error>().frameNumber);
468 ADD_FAILURE();
469 break;
470 }
471
472 auto r = itr->second;
473 if (ErrorCode::ERROR_RESULT == msg.get<NotifyMsg::Tag::error>().errorCode &&
474 msg.get<NotifyMsg::Tag::error>().errorStreamId != -1) {
475 if (r->haveResultMetadata) {
476 ALOGE("%s: Camera must report physical camera result error before "
477 "the final capture result!",
478 __func__);
479 ADD_FAILURE();
480 } else {
481 for (auto& mStream : mStreams) {
482 if (mStream.id == msg.get<NotifyMsg::Tag::error>().errorStreamId) {
483 std::string physicalCameraId = mStream.physicalCameraId;
484 bool idExpected =
485 r->expectedPhysicalResults.find(physicalCameraId) !=
486 r->expectedPhysicalResults.end();
487 if (!idExpected) {
488 ALOGE("%s: ERROR_RESULT's error stream's physicalCameraId "
489 "%s must be expected",
490 __func__, physicalCameraId.c_str());
491 ADD_FAILURE();
492 } else {
493 r->expectedPhysicalResults.erase(physicalCameraId);
494 }
495 break;
496 }
497 }
498 }
499 } else {
500 r->errorCodeValid = true;
501 r->errorCode = msg.get<NotifyMsg::Tag::error>().errorCode;
502 r->errorStreamId = msg.get<NotifyMsg::Tag::error>().errorStreamId;
503 }
504 }
505 break;
506 case NotifyMsg::Tag::shutter:
507 auto itr =
508 mParent->mInflightMap.find(msg.get<NotifyMsg::Tag::shutter>().frameNumber);
509 if (itr == mParent->mInflightMap.end()) {
510 ALOGE("%s: Unexpected shutter frame number! received: %u", __func__,
511 msg.get<NotifyMsg::Tag::shutter>().frameNumber);
512 ADD_FAILURE();
513 break;
514 }
515 auto& r = itr->second;
516 r->shutterTimestamp = msg.get<NotifyMsg::Tag::shutter>().timestamp;
517 r->shutterReadoutTimestamp = readoutTimestamps[i];
518 break;
519 }
520 }
521
522 mParent->mResultCondition.notify_one();
523 return ScopedAStatus::ok();
524 }
525
hasOutstandingBuffersLocked()526 bool DeviceCb::hasOutstandingBuffersLocked() {
527 if (!mUseHalBufManager) {
528 return false;
529 }
530 for (const auto& outstandingBuffers : mOutstandingBufferIds) {
531 if (!outstandingBuffers.empty()) {
532 return true;
533 }
534 }
535 return false;
536 }
537