1 /*
2 * Copyright (C) 2022 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17 #define LOG_TAG "Camera3-JpegRCompositeStream"
18 #define ATRACE_TAG ATRACE_TAG_CAMERA
19 //#define LOG_NDEBUG 0
20
21 #include <aidl/android/hardware/camera/device/CameraBlob.h>
22 #include <aidl/android/hardware/camera/device/CameraBlobId.h>
23
24 #include "common/CameraProviderManager.h"
25 #include "utils/SessionConfigurationUtils.h"
26
27 #include <com_android_graphics_libgui_flags.h>
28 #include <gui/Surface.h>
29 #include <hardware/gralloc.h>
30 #include <system/graphics-base-v1.0.h>
31 #include <system/graphics-base-v1.1.h>
32 #include <ultrahdr/jpegr.h>
33 #include <utils/ExifUtils.h>
34 #include <utils/Log.h>
35 #include <utils/Trace.h>
36
37 #include "JpegRCompositeStream.h"
38
39 namespace android {
40 namespace camera3 {
41
42 using aidl::android::hardware::camera::device::CameraBlob;
43 using aidl::android::hardware::camera::device::CameraBlobId;
44
JpegRCompositeStream(sp<CameraDeviceBase> device,wp<hardware::camera2::ICameraDeviceCallbacks> cb)45 JpegRCompositeStream::JpegRCompositeStream(sp<CameraDeviceBase> device,
46 wp<hardware::camera2::ICameraDeviceCallbacks> cb) :
47 CompositeStream(device, cb),
48 mBlobStreamId(-1),
49 mBlobSurfaceId(-1),
50 mP010StreamId(-1),
51 mP010SurfaceId(-1),
52 mBlobWidth(0),
53 mBlobHeight(0),
54 mP010BufferAcquired(false),
55 mBlobBufferAcquired(false),
56 mOutputColorSpace(ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED),
57 mOutputStreamUseCase(0),
58 mFirstRequestLatency(-1),
59 mStreamSurfaceListener(new StreamSurfaceListener()),
60 mMaxJpegBufferSize(-1),
61 mUHRMaxJpegBufferSize(-1),
62 mStaticInfo(device->info()) {
63 auto entry = mStaticInfo.find(ANDROID_JPEG_MAX_SIZE);
64 if (entry.count > 0) {
65 mMaxJpegBufferSize = entry.data.i32[0];
66 } else {
67 ALOGW("%s: Maximum jpeg size absent from camera characteristics", __FUNCTION__);
68 }
69
70 mUHRMaxJpegSize =
71 SessionConfigurationUtils::getMaxJpegResolution(mStaticInfo,
72 /*ultraHighResolution*/true);
73 mDefaultMaxJpegSize =
74 SessionConfigurationUtils::getMaxJpegResolution(mStaticInfo,
75 /*isUltraHighResolution*/false);
76
77 mUHRMaxJpegBufferSize =
78 SessionConfigurationUtils::getUHRMaxJpegBufferSize(mUHRMaxJpegSize, mDefaultMaxJpegSize,
79 mMaxJpegBufferSize);
80 }
81
~JpegRCompositeStream()82 JpegRCompositeStream::~JpegRCompositeStream() {
83 mBlobConsumer.clear(),
84 mBlobSurface.clear(),
85 mBlobStreamId = -1;
86 mBlobSurfaceId = -1;
87 mP010Consumer.clear();
88 mP010Surface.clear();
89 mP010Consumer = nullptr;
90 mP010Surface = nullptr;
91 }
92
compilePendingInputLocked()93 void JpegRCompositeStream::compilePendingInputLocked() {
94 CpuConsumer::LockedBuffer imgBuffer;
95
96 while (mSupportInternalJpeg && !mInputJpegBuffers.empty() && !mBlobBufferAcquired) {
97 auto it = mInputJpegBuffers.begin();
98 auto res = mBlobConsumer->lockNextBuffer(&imgBuffer);
99 if (res == NOT_ENOUGH_DATA) {
100 // Can not lock any more buffers.
101 break;
102 } else if (res != OK) {
103 ALOGE("%s: Error locking blob image buffer: %s (%d)", __FUNCTION__,
104 strerror(-res), res);
105 mPendingInputFrames[*it].error = true;
106 mInputJpegBuffers.erase(it);
107 continue;
108 }
109
110 if (*it != imgBuffer.timestamp) {
111 ALOGW("%s: Expecting jpeg buffer with time stamp: %" PRId64 " received buffer with "
112 "time stamp: %" PRId64, __FUNCTION__, *it, imgBuffer.timestamp);
113 }
114
115 if ((mPendingInputFrames.find(imgBuffer.timestamp) != mPendingInputFrames.end()) &&
116 (mPendingInputFrames[imgBuffer.timestamp].error)) {
117 mBlobConsumer->unlockBuffer(imgBuffer);
118 } else {
119 mPendingInputFrames[imgBuffer.timestamp].jpegBuffer = imgBuffer;
120 mBlobBufferAcquired = true;
121 }
122 mInputJpegBuffers.erase(it);
123 }
124
125 while (!mInputP010Buffers.empty() && !mP010BufferAcquired) {
126 auto it = mInputP010Buffers.begin();
127 auto res = mP010Consumer->lockNextBuffer(&imgBuffer);
128 if (res == NOT_ENOUGH_DATA) {
129 // Can not lock any more buffers.
130 break;
131 } else if (res != OK) {
132 ALOGE("%s: Error receiving P010 image buffer: %s (%d)", __FUNCTION__,
133 strerror(-res), res);
134 mPendingInputFrames[*it].error = true;
135 mInputP010Buffers.erase(it);
136 continue;
137 }
138
139 if (*it != imgBuffer.timestamp) {
140 ALOGW("%s: Expecting P010 buffer with time stamp: %" PRId64 " received buffer with "
141 "time stamp: %" PRId64, __FUNCTION__, *it, imgBuffer.timestamp);
142 }
143
144 if ((mPendingInputFrames.find(imgBuffer.timestamp) != mPendingInputFrames.end()) &&
145 (mPendingInputFrames[imgBuffer.timestamp].error)) {
146 mP010Consumer->unlockBuffer(imgBuffer);
147 } else {
148 mPendingInputFrames[imgBuffer.timestamp].p010Buffer = imgBuffer;
149 mP010BufferAcquired = true;
150 }
151 mInputP010Buffers.erase(it);
152 }
153
154 while (!mCaptureResults.empty()) {
155 auto it = mCaptureResults.begin();
156 // Negative timestamp indicates that something went wrong during the capture result
157 // collection process.
158 if (it->first >= 0) {
159 auto frameNumber = std::get<0>(it->second);
160 mPendingInputFrames[it->first].frameNumber = frameNumber;
161 mPendingInputFrames[it->first].result = std::get<1>(it->second);
162 mSessionStatsBuilder.incResultCounter(false /*dropped*/);
163 }
164 mCaptureResults.erase(it);
165 }
166
167 while (!mFrameNumberMap.empty()) {
168 auto it = mFrameNumberMap.begin();
169 auto frameNumber = it->first;
170 mPendingInputFrames[it->second].frameNumber = frameNumber;
171 auto requestTimeIt = mRequestTimeMap.find(frameNumber);
172 if (requestTimeIt != mRequestTimeMap.end()) {
173 mPendingInputFrames[it->second].requestTimeNs = requestTimeIt->second;
174 mRequestTimeMap.erase(requestTimeIt);
175 }
176 mFrameNumberMap.erase(it);
177 }
178
179 auto it = mErrorFrameNumbers.begin();
180 while (it != mErrorFrameNumbers.end()) {
181 bool frameFound = false;
182 for (auto &inputFrame : mPendingInputFrames) {
183 if (inputFrame.second.frameNumber == *it) {
184 inputFrame.second.error = true;
185 frameFound = true;
186 break;
187 }
188 }
189
190 if (frameFound) {
191 mSessionStatsBuilder.incCounter(mP010StreamId, true /*dropped*/,
192 0 /*captureLatencyMs*/);
193 it = mErrorFrameNumbers.erase(it);
194 } else {
195 ALOGW("%s: Not able to find failing input with frame number: %" PRId64, __FUNCTION__,
196 *it);
197 it++;
198 }
199 }
200 }
201
getNextReadyInputLocked(int64_t * currentTs)202 bool JpegRCompositeStream::getNextReadyInputLocked(int64_t *currentTs /*inout*/) {
203 if (currentTs == nullptr) {
204 return false;
205 }
206
207 bool newInputAvailable = false;
208 for (const auto& it : mPendingInputFrames) {
209 if ((!it.second.error) && (it.second.p010Buffer.data != nullptr) &&
210 (it.second.requestTimeNs != -1) &&
211 ((it.second.jpegBuffer.data != nullptr) || !mSupportInternalJpeg) &&
212 (it.first < *currentTs)) {
213 *currentTs = it.first;
214 newInputAvailable = true;
215 }
216 }
217
218 return newInputAvailable;
219 }
220
getNextFailingInputLocked(int64_t * currentTs)221 int64_t JpegRCompositeStream::getNextFailingInputLocked(int64_t *currentTs /*inout*/) {
222 int64_t ret = -1;
223 if (currentTs == nullptr) {
224 return ret;
225 }
226
227 for (const auto& it : mPendingInputFrames) {
228 if (it.second.error && !it.second.errorNotified && (it.first < *currentTs)) {
229 *currentTs = it.first;
230 ret = it.second.frameNumber;
231 }
232 }
233
234 return ret;
235 }
236
processInputFrame(nsecs_t ts,const InputFrame & inputFrame)237 status_t JpegRCompositeStream::processInputFrame(nsecs_t ts, const InputFrame &inputFrame) {
238 status_t res;
239 sp<ANativeWindow> outputANW = mOutputSurface;
240 ANativeWindowBuffer *anb;
241 int fenceFd;
242 void *dstBuffer;
243
244 size_t maxJpegRBufferSize = 0;
245 if (mMaxJpegBufferSize > 0) {
246 // If this is an ultra high resolution sensor and the input frames size
247 // is > default res jpeg.
248 if (mUHRMaxJpegSize.width != 0 &&
249 inputFrame.jpegBuffer.width * inputFrame.jpegBuffer.height >
250 mDefaultMaxJpegSize.width * mDefaultMaxJpegSize.height) {
251 maxJpegRBufferSize = mUHRMaxJpegBufferSize;
252 } else {
253 maxJpegRBufferSize = mMaxJpegBufferSize;
254 }
255 } else {
256 maxJpegRBufferSize = inputFrame.p010Buffer.width * inputFrame.p010Buffer.height;
257 }
258
259 uint8_t jpegQuality = 100;
260 auto entry = inputFrame.result.find(ANDROID_JPEG_QUALITY);
261 if (entry.count > 0) {
262 jpegQuality = entry.data.u8[0];
263 }
264
265 if ((res = native_window_set_buffers_dimensions(mOutputSurface.get(), maxJpegRBufferSize, 1))
266 != OK) {
267 ALOGE("%s: Unable to configure stream buffer dimensions"
268 " %zux%u for stream %d", __FUNCTION__, maxJpegRBufferSize, 1U, mP010StreamId);
269 return res;
270 }
271
272 res = outputANW->dequeueBuffer(mOutputSurface.get(), &anb, &fenceFd);
273 if (res != OK) {
274 ALOGE("%s: Error retrieving output buffer: %s (%d)", __FUNCTION__, strerror(-res),
275 res);
276 return res;
277 }
278
279 sp<GraphicBuffer> gb = GraphicBuffer::from(anb);
280 GraphicBufferLocker gbLocker(gb);
281 res = gbLocker.lockAsync(&dstBuffer, fenceFd);
282 if (res != OK) {
283 ALOGE("%s: Error trying to lock output buffer fence: %s (%d)", __FUNCTION__,
284 strerror(-res), res);
285 outputANW->cancelBuffer(mOutputSurface.get(), anb, /*fence*/ -1);
286 return res;
287 }
288
289 if ((gb->getWidth() < maxJpegRBufferSize) || (gb->getHeight() != 1)) {
290 ALOGE("%s: Blob buffer size mismatch, expected %zux%u received %dx%d", __FUNCTION__,
291 maxJpegRBufferSize, 1, gb->getWidth(), gb->getHeight());
292 outputANW->cancelBuffer(mOutputSurface.get(), anb, /*fence*/ -1);
293 return BAD_VALUE;
294 }
295
296 size_t actualJpegRSize = 0;
297 ultrahdr::jpegr_uncompressed_struct p010;
298 ultrahdr::jpegr_compressed_struct jpegR;
299 ultrahdr::JpegR jpegREncoder;
300
301 p010.height = inputFrame.p010Buffer.height;
302 p010.width = inputFrame.p010Buffer.width;
303 p010.colorGamut = ultrahdr::ultrahdr_color_gamut::ULTRAHDR_COLORGAMUT_BT2100;
304 p010.data = inputFrame.p010Buffer.data;
305 p010.chroma_data = inputFrame.p010Buffer.dataCb;
306 // Strides are expected to be in pixels not bytes
307 p010.luma_stride = inputFrame.p010Buffer.stride / 2;
308 p010.chroma_stride = inputFrame.p010Buffer.chromaStride / 2;
309
310 jpegR.data = dstBuffer;
311 jpegR.maxLength = maxJpegRBufferSize;
312
313 ultrahdr::ultrahdr_transfer_function transferFunction;
314 switch (mP010DynamicRange) {
315 case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_HDR10:
316 case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_HDR10_PLUS:
317 transferFunction = ultrahdr::ultrahdr_transfer_function::ULTRAHDR_TF_PQ;
318 break;
319 default:
320 transferFunction = ultrahdr::ultrahdr_transfer_function::ULTRAHDR_TF_HLG;
321 }
322
323 if (mSupportInternalJpeg) {
324 ultrahdr::jpegr_compressed_struct jpeg;
325
326 jpeg.data = inputFrame.jpegBuffer.data;
327 jpeg.length = android::camera2::JpegProcessor::findJpegSize(inputFrame.jpegBuffer.data,
328 inputFrame.jpegBuffer.width);
329 if (jpeg.length == 0) {
330 ALOGW("%s: Failed to find input jpeg size, default to using entire buffer!",
331 __FUNCTION__);
332 jpeg.length = inputFrame.jpegBuffer.width;
333 }
334
335 if (mOutputColorSpace == ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_DISPLAY_P3) {
336 jpeg.colorGamut = ultrahdr::ultrahdr_color_gamut::ULTRAHDR_COLORGAMUT_P3;
337 } else {
338 jpeg.colorGamut = ultrahdr::ultrahdr_color_gamut::ULTRAHDR_COLORGAMUT_BT709;
339 }
340
341 res = jpegREncoder.encodeJPEGR(&p010, &jpeg, transferFunction, &jpegR);
342 } else {
343 const uint8_t* exifBuffer = nullptr;
344 size_t exifBufferSize = 0;
345 std::unique_ptr<ExifUtils> utils(ExifUtils::create());
346 utils->initializeEmpty();
347 utils->setFromMetadata(inputFrame.result, mStaticInfo, inputFrame.p010Buffer.width,
348 inputFrame.p010Buffer.height);
349 if (utils->generateApp1()) {
350 exifBuffer = utils->getApp1Buffer();
351 exifBufferSize = utils->getApp1Length();
352 } else {
353 ALOGE("%s: Unable to generate App1 buffer", __FUNCTION__);
354 }
355
356 ultrahdr::jpegr_exif_struct exif;
357 exif.data = reinterpret_cast<void*>(const_cast<uint8_t*>(exifBuffer));
358 exif.length = exifBufferSize;
359
360 res = jpegREncoder.encodeJPEGR(&p010, transferFunction, &jpegR, jpegQuality, &exif);
361 }
362
363 if (res != OK) {
364 ALOGE("%s: Error trying to encode JPEG/R: %s (%d)", __FUNCTION__, strerror(-res), res);
365 return res;
366 }
367
368 actualJpegRSize = jpegR.length;
369
370 size_t finalJpegRSize = actualJpegRSize + sizeof(CameraBlob);
371 if (finalJpegRSize > maxJpegRBufferSize) {
372 ALOGE("%s: Final jpeg buffer not large enough for the jpeg blob header", __FUNCTION__);
373 outputANW->cancelBuffer(mOutputSurface.get(), anb, /*fence*/ -1);
374 return NO_MEMORY;
375 }
376
377 res = native_window_set_buffers_timestamp(mOutputSurface.get(), ts);
378 if (res != OK) {
379 ALOGE("%s: Stream %d: Error setting timestamp: %s (%d)", __FUNCTION__,
380 getStreamId(), strerror(-res), res);
381 return res;
382 }
383
384 ALOGV("%s: Final jpeg size: %zu", __func__, finalJpegRSize);
385 uint8_t* header = static_cast<uint8_t *> (dstBuffer) +
386 (gb->getWidth() - sizeof(CameraBlob));
387 CameraBlob blobHeader = {
388 .blobId = CameraBlobId::JPEG,
389 .blobSizeBytes = static_cast<int32_t>(actualJpegRSize)
390 };
391 memcpy(header, &blobHeader, sizeof(CameraBlob));
392
393 if (inputFrame.requestTimeNs != -1) {
394 auto captureLatency = ns2ms(systemTime() - inputFrame.requestTimeNs);
395 mSessionStatsBuilder.incCounter(mP010StreamId, false /*dropped*/, captureLatency);
396 if (mFirstRequestLatency == -1) {
397 mFirstRequestLatency = captureLatency;
398 }
399 }
400 outputANW->queueBuffer(mOutputSurface.get(), anb, /*fence*/ -1);
401
402 return res;
403 }
404
releaseInputFrameLocked(InputFrame * inputFrame)405 void JpegRCompositeStream::releaseInputFrameLocked(InputFrame *inputFrame /*out*/) {
406 if (inputFrame == nullptr) {
407 return;
408 }
409
410 if (inputFrame->p010Buffer.data != nullptr) {
411 mP010Consumer->unlockBuffer(inputFrame->p010Buffer);
412 inputFrame->p010Buffer.data = nullptr;
413 mP010BufferAcquired = false;
414 }
415
416 if (inputFrame->jpegBuffer.data != nullptr) {
417 mBlobConsumer->unlockBuffer(inputFrame->jpegBuffer);
418 inputFrame->jpegBuffer.data = nullptr;
419 mBlobBufferAcquired = false;
420 }
421
422 if ((inputFrame->error || mErrorState) && !inputFrame->errorNotified) {
423 //TODO: Figure out correct requestId
424 notifyError(inputFrame->frameNumber, -1 /*requestId*/);
425 inputFrame->errorNotified = true;
426 mSessionStatsBuilder.incCounter(mP010StreamId, true /*dropped*/, 0 /*captureLatencyMs*/);
427 }
428 }
429
releaseInputFramesLocked(int64_t currentTs)430 void JpegRCompositeStream::releaseInputFramesLocked(int64_t currentTs) {
431 auto it = mPendingInputFrames.begin();
432 while (it != mPendingInputFrames.end()) {
433 if (it->first <= currentTs) {
434 releaseInputFrameLocked(&it->second);
435 it = mPendingInputFrames.erase(it);
436 } else {
437 it++;
438 }
439 }
440 }
441
threadLoop()442 bool JpegRCompositeStream::threadLoop() {
443 int64_t currentTs = INT64_MAX;
444 bool newInputAvailable = false;
445
446 {
447 Mutex::Autolock l(mMutex);
448
449 if (mErrorState) {
450 // In case we landed in error state, return any pending buffers and
451 // halt all further processing.
452 compilePendingInputLocked();
453 releaseInputFramesLocked(currentTs);
454 return false;
455 }
456
457 while (!newInputAvailable) {
458 compilePendingInputLocked();
459 newInputAvailable = getNextReadyInputLocked(¤tTs);
460 if (!newInputAvailable) {
461 auto failingFrameNumber = getNextFailingInputLocked(¤tTs);
462 if (failingFrameNumber >= 0) {
463 // We cannot erase 'mPendingInputFrames[currentTs]' at this point because it is
464 // possible for two internal stream buffers to fail. In such scenario the
465 // composite stream should notify the client about a stream buffer error only
466 // once and this information is kept within 'errorNotified'.
467 // Any present failed input frames will be removed on a subsequent call to
468 // 'releaseInputFramesLocked()'.
469 releaseInputFrameLocked(&mPendingInputFrames[currentTs]);
470 currentTs = INT64_MAX;
471 }
472
473 auto ret = mInputReadyCondition.waitRelative(mMutex, kWaitDuration);
474 if (ret == TIMED_OUT) {
475 return true;
476 } else if (ret != OK) {
477 ALOGE("%s: Timed wait on condition failed: %s (%d)", __FUNCTION__,
478 strerror(-ret), ret);
479 return false;
480 }
481 }
482 }
483 }
484
485 auto res = processInputFrame(currentTs, mPendingInputFrames[currentTs]);
486 Mutex::Autolock l(mMutex);
487 if (res != OK) {
488 ALOGE("%s: Failed processing frame with timestamp: %" PRIu64 ": %s (%d)", __FUNCTION__,
489 currentTs, strerror(-res), res);
490 mPendingInputFrames[currentTs].error = true;
491 }
492
493 releaseInputFramesLocked(currentTs);
494
495 return true;
496 }
497
isJpegRCompositeStream(const sp<Surface> & surface)498 bool JpegRCompositeStream::isJpegRCompositeStream(const sp<Surface> &surface) {
499 if (CameraProviderManager::kFrameworkJpegRDisabled) {
500 return false;
501 }
502 ANativeWindow *anw = surface.get();
503 status_t err;
504 int format;
505 if ((err = anw->query(anw, NATIVE_WINDOW_FORMAT, &format)) != OK) {
506 ALOGE("%s: Failed to query Surface format: %s (%d)", __FUNCTION__, strerror(-err),
507 err);
508 return false;
509 }
510
511 int dataspace;
512 if ((err = anw->query(anw, NATIVE_WINDOW_DEFAULT_DATASPACE, &dataspace)) != OK) {
513 ALOGE("%s: Failed to query Surface dataspace: %s (%d)", __FUNCTION__, strerror(-err),
514 err);
515 return false;
516 }
517
518 if ((format == HAL_PIXEL_FORMAT_BLOB) && (dataspace == static_cast<int>(kJpegRDataSpace))) {
519 return true;
520 }
521
522 return false;
523 }
524
isJpegRCompositeStreamInfo(const OutputStreamInfo & streamInfo)525 bool JpegRCompositeStream::isJpegRCompositeStreamInfo(const OutputStreamInfo& streamInfo) {
526 if ((streamInfo.format == HAL_PIXEL_FORMAT_BLOB) &&
527 (streamInfo.dataSpace == static_cast<int>(kJpegRDataSpace))) {
528 return true;
529 }
530
531 return false;
532 }
533
deriveDynamicRangeAndDataspace(int64_t dynamicProfile,int64_t * dynamicRange,int64_t * dataSpace)534 void JpegRCompositeStream::deriveDynamicRangeAndDataspace(int64_t dynamicProfile,
535 int64_t* /*out*/dynamicRange, int64_t* /*out*/dataSpace) {
536 if ((dynamicRange == nullptr) || (dataSpace == nullptr)) {
537 return;
538 }
539
540 switch (dynamicProfile) {
541 case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_HDR10:
542 case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_HDR10_PLUS:
543 *dynamicRange = dynamicProfile;
544 *dataSpace = HAL_DATASPACE_BT2020_ITU_PQ;
545 break;
546 case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_10B_HDR_REF:
547 case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_10B_HDR_REF_PO:
548 case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_10B_HDR_OEM:
549 case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_10B_HDR_OEM_PO:
550 *dynamicRange = dynamicProfile;
551 *dataSpace = HAL_DATASPACE_BT2020_ITU_HLG;
552 break;
553 default:
554 *dynamicRange = kP010DefaultDynamicRange;
555 *dataSpace = kP010DefaultDataSpace;
556 }
557
558 }
559
createInternalStreams(const std::vector<SurfaceHolder> & consumers,bool,uint32_t width,uint32_t height,int format,camera_stream_rotation_t rotation,int * id,const std::string & physicalCameraId,const std::unordered_set<int32_t> & sensorPixelModesUsed,std::vector<int> * surfaceIds,int,bool,int32_t colorSpace,int64_t dynamicProfile,int64_t streamUseCase,bool useReadoutTimestamp)560 status_t JpegRCompositeStream::createInternalStreams(const std::vector<SurfaceHolder>& consumers,
561 bool /*hasDeferredConsumer*/, uint32_t width, uint32_t height, int format,
562 camera_stream_rotation_t rotation, int *id, const std::string& physicalCameraId,
563 const std::unordered_set<int32_t> &sensorPixelModesUsed,
564 std::vector<int> *surfaceIds,
565 int /*streamSetId*/, bool /*isShared*/, int32_t colorSpace,
566 int64_t dynamicProfile, int64_t streamUseCase, bool useReadoutTimestamp) {
567 sp<CameraDeviceBase> device = mDevice.promote();
568 if (!device.get()) {
569 ALOGE("%s: Invalid camera device!", __FUNCTION__);
570 return NO_INIT;
571 }
572
573 deriveDynamicRangeAndDataspace(dynamicProfile, &mP010DynamicRange, &mP010DataSpace);
574 mSupportInternalJpeg = CameraProviderManager::isConcurrentDynamicRangeCaptureSupported(
575 mStaticInfo, mP010DynamicRange,
576 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD);
577
578 #if COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
579 mP010Consumer = new CpuConsumer(/*maxLockedBuffers*/ 1, /*controlledByApp*/ true);
580 mP010Consumer->setFrameAvailableListener(this);
581 mP010Consumer->setName(String8("Camera3-P010CompositeStream"));
582 mP010Surface = mP010Consumer->getSurface();
583 #else
584 sp<IGraphicBufferProducer> producer;
585 sp<IGraphicBufferConsumer> consumer;
586 BufferQueue::createBufferQueue(&producer, &consumer);
587 mP010Consumer = new CpuConsumer(consumer, /*maxLockedBuffers*/1, /*controlledByApp*/ true);
588 mP010Consumer->setFrameAvailableListener(this);
589 mP010Consumer->setName(String8("Camera3-P010CompositeStream"));
590 mP010Surface = new Surface(producer);
591 #endif // COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
592
593 auto ret = device->createStream(mP010Surface, width, height, kP010PixelFormat,
594 static_cast<android_dataspace>(mP010DataSpace), rotation,
595 id, physicalCameraId, sensorPixelModesUsed, surfaceIds,
596 camera3::CAMERA3_STREAM_SET_ID_INVALID, false /*isShared*/, false /*isMultiResolution*/,
597 GRALLOC_USAGE_SW_READ_OFTEN, mP010DynamicRange, streamUseCase,
598 OutputConfiguration::TIMESTAMP_BASE_DEFAULT, OutputConfiguration::MIRROR_MODE_AUTO,
599 ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED, useReadoutTimestamp);
600 if (ret == OK) {
601 mP010StreamId = *id;
602 mP010SurfaceId = (*surfaceIds)[0];
603 mOutputSurface = consumers[0].mSurface;
604 } else {
605 return ret;
606 }
607
608 if (mSupportInternalJpeg) {
609 #if COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
610 mBlobConsumer = new CpuConsumer(/*maxLockedBuffers*/ 1, /*controlledByApp*/ true);
611 mBlobConsumer->setFrameAvailableListener(this);
612 mBlobConsumer->setName(String8("Camera3-JpegRCompositeStream"));
613 mBlobSurface = mBlobConsumer->getSurface();
614 #else
615 BufferQueue::createBufferQueue(&producer, &consumer);
616 mBlobConsumer = new CpuConsumer(consumer, /*maxLockedBuffers*/ 1, /*controlledByApp*/ true);
617 mBlobConsumer->setFrameAvailableListener(this);
618 mBlobConsumer->setName(String8("Camera3-JpegRCompositeStream"));
619 mBlobSurface = new Surface(producer);
620 #endif // COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
621 std::vector<int> blobSurfaceId;
622 ret = device->createStream(mBlobSurface, width, height, format,
623 kJpegDataSpace, rotation, &mBlobStreamId, physicalCameraId, sensorPixelModesUsed,
624 &blobSurfaceId,
625 /*streamSetI*/ camera3::CAMERA3_STREAM_SET_ID_INVALID,
626 /*isShared*/ false,
627 /*isMultiResolution*/ false,
628 /*consumerUsage*/ GRALLOC_USAGE_SW_READ_OFTEN,
629 /*dynamicProfile*/ ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD,
630 streamUseCase,
631 /*timestampBase*/ OutputConfiguration::TIMESTAMP_BASE_DEFAULT,
632 /*mirrorMode*/ OutputConfiguration::MIRROR_MODE_AUTO,
633 /*colorSpace*/ colorSpace, useReadoutTimestamp);
634 if (ret == OK) {
635 mBlobSurfaceId = blobSurfaceId[0];
636 } else {
637 return ret;
638 }
639
640 ret = registerCompositeStreamListener(mBlobStreamId);
641 if (ret != OK) {
642 ALOGE("%s: Failed to register jpeg stream listener!", __FUNCTION__);
643 return ret;
644 }
645 }
646
647 ret = registerCompositeStreamListener(getStreamId());
648 if (ret != OK) {
649 ALOGE("%s: Failed to register P010 stream listener!", __FUNCTION__);
650 return ret;
651 }
652
653 mOutputColorSpace = colorSpace;
654 mOutputStreamUseCase = streamUseCase;
655 mBlobWidth = width;
656 mBlobHeight = height;
657
658 return ret;
659 }
660
configureStream()661 status_t JpegRCompositeStream::configureStream() {
662 if (isRunning()) {
663 // Processing thread is already running, nothing more to do.
664 return NO_ERROR;
665 }
666
667 if (mOutputSurface.get() == nullptr) {
668 ALOGE("%s: No valid output surface set!", __FUNCTION__);
669 return NO_INIT;
670 }
671
672 auto res = mOutputSurface->connect(NATIVE_WINDOW_API_CAMERA, mStreamSurfaceListener);
673 if (res != OK) {
674 ALOGE("%s: Unable to connect to native window for stream %d",
675 __FUNCTION__, mP010StreamId);
676 return res;
677 }
678
679 if ((res = native_window_set_buffers_format(mOutputSurface.get(), HAL_PIXEL_FORMAT_BLOB))
680 != OK) {
681 ALOGE("%s: Unable to configure stream buffer format for stream %d", __FUNCTION__,
682 mP010StreamId);
683 return res;
684 }
685
686 if ((res = native_window_set_usage(mOutputSurface.get(),
687 GRALLOC_USAGE_SW_READ_OFTEN | GRALLOC_USAGE_SW_WRITE_OFTEN)) != OK) {
688 ALOGE("%s: Unable to configure stream buffer usage for stream %d", __FUNCTION__,
689 mP010StreamId);
690 return res;
691 }
692
693 int maxProducerBuffers;
694 ANativeWindow *anw = mP010Surface.get();
695 if ((res = anw->query(anw, NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS, &maxProducerBuffers)) != OK) {
696 ALOGE("%s: Unable to query consumer undequeued"
697 " buffer count for stream %d", __FUNCTION__, mP010StreamId);
698 return res;
699 }
700
701 ANativeWindow *anwConsumer = mOutputSurface.get();
702 int maxConsumerBuffers;
703 if ((res = anwConsumer->query(anwConsumer, NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS,
704 &maxConsumerBuffers)) != OK) {
705 ALOGE("%s: Unable to query consumer undequeued"
706 " buffer count for stream %d", __FUNCTION__, mP010StreamId);
707 return res;
708 }
709
710 if ((res = native_window_set_buffer_count(
711 anwConsumer, maxProducerBuffers + maxConsumerBuffers)) != OK) {
712 ALOGE("%s: Unable to set buffer count for stream %d", __FUNCTION__, mP010StreamId);
713 return res;
714 }
715
716 mSessionStatsBuilder.addStream(mP010StreamId);
717
718 run("JpegRCompositeStreamProc");
719
720 return NO_ERROR;
721 }
722
deleteInternalStreams()723 status_t JpegRCompositeStream::deleteInternalStreams() {
724 // The 'CameraDeviceClient' parent will delete the P010 stream
725 requestExit();
726
727 auto ret = join();
728 if (ret != OK) {
729 ALOGE("%s: Failed to join with the main processing thread: %s (%d)", __FUNCTION__,
730 strerror(-ret), ret);
731 }
732
733 if (mBlobStreamId >= 0) {
734 // Camera devices may not be valid after switching to offline mode.
735 // In this case, all offline streams including internal composite streams
736 // are managed and released by the offline session.
737 sp<CameraDeviceBase> device = mDevice.promote();
738 if (device.get() != nullptr) {
739 ret = device->deleteStream(mBlobStreamId);
740 }
741
742 mBlobStreamId = -1;
743 }
744
745 if (mOutputSurface != nullptr) {
746 mOutputSurface->disconnect(NATIVE_WINDOW_API_CAMERA);
747 mOutputSurface.clear();
748 }
749
750 return ret;
751 }
752
onFrameAvailable(const BufferItem & item)753 void JpegRCompositeStream::onFrameAvailable(const BufferItem& item) {
754 if (item.mDataSpace == kJpegDataSpace) {
755 ALOGV("%s: Jpeg buffer with ts: %" PRIu64 " ms. arrived!",
756 __func__, ns2ms(item.mTimestamp));
757
758 Mutex::Autolock l(mMutex);
759 if (!mErrorState) {
760 mInputJpegBuffers.push_back(item.mTimestamp);
761 mInputReadyCondition.signal();
762 }
763 } else if (item.mDataSpace == static_cast<android_dataspace_t>(mP010DataSpace)) {
764 ALOGV("%s: P010 buffer with ts: %" PRIu64 " ms. arrived!", __func__,
765 ns2ms(item.mTimestamp));
766
767 Mutex::Autolock l(mMutex);
768 if (!mErrorState) {
769 mInputP010Buffers.push_back(item.mTimestamp);
770 mInputReadyCondition.signal();
771 }
772 } else {
773 ALOGE("%s: Unexpected data space: 0x%x", __FUNCTION__, item.mDataSpace);
774 }
775 }
776
insertGbp(SurfaceMap * outSurfaceMap,Vector<int32_t> * outputStreamIds,int32_t * currentStreamId)777 status_t JpegRCompositeStream::insertGbp(SurfaceMap* /*out*/outSurfaceMap,
778 Vector<int32_t> * /*out*/outputStreamIds, int32_t* /*out*/currentStreamId) {
779 if (outputStreamIds == nullptr) {
780 return BAD_VALUE;
781 }
782
783 if (outSurfaceMap->find(mP010StreamId) == outSurfaceMap->end()) {
784 outputStreamIds->push_back(mP010StreamId);
785 }
786 (*outSurfaceMap)[mP010StreamId].push_back(mP010SurfaceId);
787
788 if (mSupportInternalJpeg) {
789 if (outSurfaceMap->find(mBlobStreamId) == outSurfaceMap->end()) {
790 outputStreamIds->push_back(mBlobStreamId);
791 }
792 (*outSurfaceMap)[mBlobStreamId].push_back(mBlobSurfaceId);
793 }
794
795 if (currentStreamId != nullptr) {
796 *currentStreamId = mP010StreamId;
797 }
798
799 return NO_ERROR;
800 }
801
insertCompositeStreamIds(std::vector<int32_t> * compositeStreamIds)802 status_t JpegRCompositeStream::insertCompositeStreamIds(
803 std::vector<int32_t>* compositeStreamIds /*out*/) {
804 if (compositeStreamIds == nullptr) {
805 return BAD_VALUE;
806 }
807
808 compositeStreamIds->push_back(mP010StreamId);
809 if (mSupportInternalJpeg) {
810 compositeStreamIds->push_back(mBlobStreamId);
811 }
812
813 return OK;
814 }
815
onResultError(const CaptureResultExtras & resultExtras)816 void JpegRCompositeStream::onResultError(const CaptureResultExtras& resultExtras) {
817 // Processing can continue even in case of result errors.
818 // At the moment Jpeg/R composite stream processing relies mainly on static camera
819 // characteristics data. The actual result data can be used for the jpeg quality but
820 // in case it is absent we can default to maximum.
821 eraseResult(resultExtras.frameNumber);
822 mSessionStatsBuilder.incResultCounter(true /*dropped*/);
823 }
824
onStreamBufferError(const CaptureResultExtras & resultExtras)825 bool JpegRCompositeStream::onStreamBufferError(const CaptureResultExtras& resultExtras) {
826 bool ret = false;
827 // Buffer errors concerning internal composite streams should not be directly visible to
828 // camera clients. They must only receive a single buffer error with the public composite
829 // stream id.
830 if ((resultExtras.errorStreamId == mP010StreamId) ||
831 (resultExtras.errorStreamId == mBlobStreamId)) {
832 flagAnErrorFrameNumber(resultExtras.frameNumber);
833 ret = true;
834 }
835
836 return ret;
837 }
838
getCompositeStreamInfo(const OutputStreamInfo & streamInfo,const CameraMetadata & staticInfo,std::vector<OutputStreamInfo> * compositeOutput)839 status_t JpegRCompositeStream::getCompositeStreamInfo(const OutputStreamInfo &streamInfo,
840 const CameraMetadata& staticInfo,
841 std::vector<OutputStreamInfo>* compositeOutput /*out*/) {
842 if (compositeOutput == nullptr) {
843 return BAD_VALUE;
844 }
845
846 int64_t dynamicRange, dataSpace;
847 deriveDynamicRangeAndDataspace(streamInfo.dynamicRangeProfile, &dynamicRange, &dataSpace);
848
849 compositeOutput->clear();
850 compositeOutput->push_back({});
851 (*compositeOutput)[0].width = streamInfo.width;
852 (*compositeOutput)[0].height = streamInfo.height;
853 (*compositeOutput)[0].format = kP010PixelFormat;
854 (*compositeOutput)[0].dataSpace = static_cast<android_dataspace_t>(dataSpace);
855 (*compositeOutput)[0].consumerUsage = GRALLOC_USAGE_SW_READ_OFTEN;
856 (*compositeOutput)[0].dynamicRangeProfile = dynamicRange;
857 (*compositeOutput)[0].colorSpace =
858 ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED;
859
860 if (CameraProviderManager::isConcurrentDynamicRangeCaptureSupported(
861 staticInfo, dynamicRange,
862 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD)) {
863 compositeOutput->push_back({});
864 (*compositeOutput)[1].width = streamInfo.width;
865 (*compositeOutput)[1].height = streamInfo.height;
866 (*compositeOutput)[1].format = HAL_PIXEL_FORMAT_BLOB;
867 (*compositeOutput)[1].dataSpace = kJpegDataSpace;
868 (*compositeOutput)[1].consumerUsage = GRALLOC_USAGE_SW_READ_OFTEN;
869 (*compositeOutput)[1].dynamicRangeProfile =
870 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD;
871 (*compositeOutput)[1].colorSpace = streamInfo.colorSpace;
872 }
873
874 return NO_ERROR;
875 }
876
getStreamStats(hardware::CameraStreamStats * streamStats)877 void JpegRCompositeStream::getStreamStats(hardware::CameraStreamStats* streamStats) {
878 if ((streamStats == nullptr) || (mFirstRequestLatency != -1)) {
879 return;
880 }
881
882 bool deviceError;
883 std::map<int, StreamStats> stats;
884 std::pair<int32_t, int32_t> mostRequestedFps;
885 mSessionStatsBuilder.buildAndReset(&streamStats->mRequestCount, &streamStats->mErrorCount,
886 &deviceError, &mostRequestedFps, &stats);
887 if (stats.find(mP010StreamId) != stats.end()) {
888 streamStats->mWidth = mBlobWidth;
889 streamStats->mHeight = mBlobHeight;
890 streamStats->mFormat = HAL_PIXEL_FORMAT_BLOB;
891 streamStats->mDataSpace = static_cast<int>(kJpegRDataSpace);
892 streamStats->mDynamicRangeProfile = mP010DynamicRange;
893 streamStats->mColorSpace = mOutputColorSpace;
894 streamStats->mStreamUseCase = mOutputStreamUseCase;
895 streamStats->mStartLatencyMs = mFirstRequestLatency;
896 streamStats->mHistogramType = hardware::CameraStreamStats::HISTOGRAM_TYPE_CAPTURE_LATENCY;
897 streamStats->mHistogramBins.assign(stats[mP010StreamId].mCaptureLatencyBins.begin(),
898 stats[mP010StreamId].mCaptureLatencyBins.end());
899 streamStats->mHistogramCounts.assign(stats[mP010StreamId].mCaptureLatencyHistogram.begin(),
900 stats[mP010StreamId].mCaptureLatencyHistogram.end());
901 }
902 }
903
904 }; // namespace camera3
905 }; // namespace android
906