1 /*
2 * Copyright (C) 2019 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17 #define LOG_TAG "Camera3-HeicCompositeStream"
18 #define ATRACE_TAG ATRACE_TAG_CAMERA
19 #define ALIGN(x, mask) ( ((x) + (mask) - 1) & ~((mask) - 1) )
20 //#define LOG_NDEBUG 0
21
22 #include <linux/memfd.h>
23 #include <pthread.h>
24 #include <sys/syscall.h>
25
26 #include <aidl/android/hardware/camera/device/CameraBlob.h>
27 #include <aidl/android/hardware/camera/device/CameraBlobId.h>
28 #include <camera/StringUtils.h>
29 #include <com_android_graphics_libgui_flags.h>
30 #include <com_android_internal_camera_flags.h>
31 #include <gui/Surface.h>
32 #include <libyuv.h>
33 #include <utils/Log.h>
34 #include <utils/Trace.h>
35 #include <ultrahdr/jpegr.h>
36 #include <ultrahdr/ultrahdrcommon.h>
37
38 #include <media/MediaCodecBuffer.h>
39 #include <media/stagefright/MediaCodecConstants.h>
40 #include <media/stagefright/MetaData.h>
41 #include <media/stagefright/foundation/ABuffer.h>
42 #include <media/stagefright/foundation/MediaDefs.h>
43 #include <mediadrm/ICrypto.h>
44 #include <memory>
45
46 #include "HeicCompositeStream.h"
47 #include "HeicEncoderInfoManager.h"
48 #include "common/CameraDeviceBase.h"
49 #include "system/camera_metadata.h"
50 #include "utils/ExifUtils.h"
51 #include "utils/SessionConfigurationUtils.h"
52 #include "utils/Utils.h"
53
54 using aidl::android::hardware::camera::device::CameraBlob;
55 using aidl::android::hardware::camera::device::CameraBlobId;
56
57 namespace flags = com::android::internal::camera::flags;
58
59 namespace android {
60 namespace camera3 {
61
HeicCompositeStream(sp<CameraDeviceBase> device,wp<hardware::camera2::ICameraDeviceCallbacks> cb)62 HeicCompositeStream::HeicCompositeStream(sp<CameraDeviceBase> device,
63 wp<hardware::camera2::ICameraDeviceCallbacks> cb)
64 : CompositeStream(device, cb),
65 mUseHeic(false),
66 mNumOutputTiles(1),
67 mNumGainmapOutputTiles(1),
68 mOutputWidth(0),
69 mOutputHeight(0),
70 mGainmapOutputWidth(0),
71 mGainmapOutputHeight(0),
72 mMaxHeicBufferSize(0),
73 mGridWidth(HeicEncoderInfoManager::kGridWidth),
74 mGridHeight(HeicEncoderInfoManager::kGridHeight),
75 mGainmapGridWidth(HeicEncoderInfoManager::kGridWidth),
76 mGainmapGridHeight(HeicEncoderInfoManager::kGridHeight),
77 mGridRows(1),
78 mGridCols(1),
79 mGainmapGridRows(1),
80 mGainmapGridCols(1),
81 mUseGrid(false),
82 mGainmapUseGrid(false),
83 mAppSegmentStreamId(-1),
84 mAppSegmentSurfaceId(-1),
85 mMainImageStreamId(-1),
86 mMainImageSurfaceId(-1),
87 mYuvBufferAcquired(false),
88 mStreamSurfaceListener(new StreamSurfaceListener()),
89 mDequeuedOutputBufferCnt(0),
90 mCodecOutputCounter(0),
91 mCodecGainmapOutputCounter(0),
92 mQuality(-1),
93 mGridTimestampUs(0),
94 mStatusId(StatusTracker::NO_STATUS_ID) {
95 mStaticInfo = device->info();
96 camera_metadata_entry halHeicSupport = mStaticInfo.find(ANDROID_HEIC_INFO_SUPPORTED);
97 if (halHeicSupport.count == 1 &&
98 halHeicSupport.data.u8[0] == ANDROID_HEIC_INFO_SUPPORTED_TRUE) {
99 // The camera device supports the HEIC stream combination,
100 // use the standard stream combintion.
101 mAppSegmentSupported = true;
102 }
103 }
104
~HeicCompositeStream()105 HeicCompositeStream::~HeicCompositeStream() {
106 // Call deinitCodec in case stream hasn't been deleted yet to avoid any
107 // memory/resource leak.
108 deinitCodec();
109
110 mInputAppSegmentBuffers.clear();
111 mCodecOutputBuffers.clear();
112 mGainmapCodecOutputBuffers.clear();
113
114 mAppSegmentStreamId = -1;
115 mAppSegmentSurfaceId = -1;
116 mAppSegmentConsumer.clear();
117 mAppSegmentSurface.clear();
118
119 mMainImageStreamId = -1;
120 mMainImageSurfaceId = -1;
121 mMainImageConsumer.clear();
122 mMainImageSurface.clear();
123 }
124
isHeicCompositeStreamInfo(const OutputStreamInfo & streamInfo)125 bool HeicCompositeStream::isHeicCompositeStreamInfo(const OutputStreamInfo& streamInfo) {
126 return ((streamInfo.dataSpace == static_cast<android_dataspace_t>(HAL_DATASPACE_HEIF) ||
127 (streamInfo.dataSpace == static_cast<android_dataspace_t>(kUltraHDRDataSpace))) &&
128 (streamInfo.format == HAL_PIXEL_FORMAT_BLOB));
129 }
130
isHeicCompositeStream(const sp<Surface> & surface)131 bool HeicCompositeStream::isHeicCompositeStream(const sp<Surface> &surface) {
132 ANativeWindow *anw = surface.get();
133 status_t err;
134 int format;
135 if ((err = anw->query(anw, NATIVE_WINDOW_FORMAT, &format)) != OK) {
136 std::string msg = fmt::sprintf("Failed to query Surface format: %s (%d)", strerror(-err),
137 err);
138 ALOGE("%s: %s", __FUNCTION__, msg.c_str());
139 return false;
140 }
141
142 int dataspace;
143 if ((err = anw->query(anw, NATIVE_WINDOW_DEFAULT_DATASPACE, &dataspace)) != OK) {
144 std::string msg = fmt::sprintf("Failed to query Surface dataspace: %s (%d)", strerror(-err),
145 err);
146 ALOGE("%s: %s", __FUNCTION__, msg.c_str());
147 return false;
148 }
149
150 return ((format == HAL_PIXEL_FORMAT_BLOB) && ((dataspace == HAL_DATASPACE_HEIF) ||
151 (dataspace == static_cast<int>(kUltraHDRDataSpace))));
152 }
153
createInternalStreams(const std::vector<SurfaceHolder> & consumers,bool,uint32_t width,uint32_t height,int format,camera_stream_rotation_t rotation,int * id,const std::string & physicalCameraId,const std::unordered_set<int32_t> & sensorPixelModesUsed,std::vector<int> * surfaceIds,int,bool,int32_t colorSpace,int64_t,int64_t,bool useReadoutTimestamp)154 status_t HeicCompositeStream::createInternalStreams(const std::vector<SurfaceHolder>& consumers,
155 bool /*hasDeferredConsumer*/, uint32_t width, uint32_t height, int format,
156 camera_stream_rotation_t rotation, int *id, const std::string& physicalCameraId,
157 const std::unordered_set<int32_t> &sensorPixelModesUsed,
158 std::vector<int> *surfaceIds,
159 int /*streamSetId*/, bool /*isShared*/, int32_t colorSpace,
160 int64_t /*dynamicProfile*/, int64_t /*streamUseCase*/, bool useReadoutTimestamp) {
161
162 sp<CameraDeviceBase> device = mDevice.promote();
163 if (!device.get()) {
164 ALOGE("%s: Invalid camera device!", __FUNCTION__);
165 return NO_INIT;
166 }
167
168 ANativeWindow* anw = consumers[0].mSurface.get();
169 int dataspace;
170 status_t res;
171 if ((res = anw->query(anw, NATIVE_WINDOW_DEFAULT_DATASPACE, &dataspace)) != OK) {
172 ALOGE("%s: Failed to query Surface dataspace: %s (%d)", __FUNCTION__, strerror(-res),
173 res);
174 return res;
175 }
176 if ((dataspace == static_cast<int>(kUltraHDRDataSpace)) && flags::camera_heif_gainmap()) {
177 mHDRGainmapEnabled = true;
178 mInternalDataSpace = static_cast<android_dataspace_t>(HAL_DATASPACE_BT2020_HLG);
179 }
180
181 res = initializeCodec(width, height, device);
182 if (res != OK) {
183 ALOGE("%s: Failed to initialize HEIC/HEVC codec: %s (%d)",
184 __FUNCTION__, strerror(-res), res);
185 return NO_INIT;
186 }
187
188 #if COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
189 if (mAppSegmentSupported) {
190 mAppSegmentConsumer = new CpuConsumer(kMaxAcquiredAppSegment);
191 mAppSegmentConsumer->setFrameAvailableListener(this);
192 mAppSegmentConsumer->setName(String8("Camera3-HeicComposite-AppSegmentStream"));
193 mAppSegmentSurface = mAppSegmentConsumer->getSurface();
194 }
195 sp<IGraphicBufferProducer> producer = mAppSegmentSurface.get() != nullptr ?
196 mAppSegmentSurface->getIGraphicBufferProducer() : nullptr;
197 #else
198 sp<IGraphicBufferProducer> producer;
199 sp<IGraphicBufferConsumer> consumer;
200 if (mAppSegmentSupported) {
201 BufferQueue::createBufferQueue(&producer, &consumer);
202 mAppSegmentConsumer = new CpuConsumer(consumer, kMaxAcquiredAppSegment);
203 mAppSegmentConsumer->setFrameAvailableListener(this);
204 mAppSegmentConsumer->setName(String8("Camera3-HeicComposite-AppSegmentStream"));
205 mAppSegmentSurface = new Surface(producer);
206 }
207 #endif // COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
208
209 if (mAppSegmentSupported) {
210 std::vector<int> sourceSurfaceId;
211 res = device->createStream(mAppSegmentSurface, mAppSegmentMaxSize, 1, format,
212 kAppSegmentDataSpace, rotation, &mAppSegmentStreamId, physicalCameraId,
213 sensorPixelModesUsed, &sourceSurfaceId, camera3::CAMERA3_STREAM_SET_ID_INVALID,
214 /*isShared*/false, /*isMultiResolution*/false,
215 /*consumerUsage*/0, ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD,
216 ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT,
217 OutputConfiguration::TIMESTAMP_BASE_DEFAULT,
218 OutputConfiguration::MIRROR_MODE_AUTO,
219 colorSpace,
220 useReadoutTimestamp);
221 if (res == OK) {
222 mAppSegmentSurfaceId = sourceSurfaceId[0];
223 } else {
224 ALOGE("%s: Failed to create JPEG App segment stream: %s (%d)", __FUNCTION__,
225 strerror(-res), res);
226 return res;
227 }
228 }
229
230 if (!mUseGrid && !mHDRGainmapEnabled) {
231 res = mCodec->createInputSurface(&producer);
232 if (res != OK) {
233 ALOGE("%s: Failed to create input surface for Heic codec: %s (%d)",
234 __FUNCTION__, strerror(-res), res);
235 return res;
236 }
237 } else {
238 #if COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
239 mMainImageConsumer = new CpuConsumer(1);
240 producer = mMainImageConsumer->getSurface()->getIGraphicBufferProducer();
241 #else
242 BufferQueue::createBufferQueue(&producer, &consumer);
243 mMainImageConsumer = new CpuConsumer(consumer, 1);
244 #endif // COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
245 mMainImageConsumer->setFrameAvailableListener(this);
246 mMainImageConsumer->setName(String8("Camera3-HeicComposite-HevcInputYUVStream"));
247 }
248 mMainImageSurface = new Surface(producer);
249
250 res = mCodec->start();
251 if (res != OK) {
252 ALOGE("%s: Failed to start codec: %s (%d)", __FUNCTION__,
253 strerror(-res), res);
254 return res;
255 }
256
257 if (mHDRGainmapEnabled) {
258 res = mGainmapCodec->start();
259 if (res != OK) {
260 ALOGE("%s: Failed to start gainmap codec: %s (%d)", __FUNCTION__,
261 strerror(-res), res);
262 return res;
263 }
264 }
265
266 //Use YUV_420 format if framework tiling is needed.
267 int srcStreamFmt = mHDRGainmapEnabled ?
268 static_cast<android_pixel_format_t>(HAL_PIXEL_FORMAT_YCBCR_P010) : mUseGrid ?
269 HAL_PIXEL_FORMAT_YCbCr_420_888 : HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED;
270 res = device->createStream(mMainImageSurface, width, height, srcStreamFmt, mInternalDataSpace,
271 rotation, id, physicalCameraId, sensorPixelModesUsed, surfaceIds,
272 camera3::CAMERA3_STREAM_SET_ID_INVALID, /*isShared*/false, /*isMultiResolution*/false,
273 /*consumerUsage*/0, mHDRGainmapEnabled ?
274 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_HLG10 :
275 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD,
276 ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT,
277 OutputConfiguration::TIMESTAMP_BASE_DEFAULT,
278 OutputConfiguration::MIRROR_MODE_AUTO,
279 colorSpace,
280 useReadoutTimestamp);
281 if (res == OK) {
282 mMainImageSurfaceId = (*surfaceIds)[0];
283 mMainImageStreamId = *id;
284 } else {
285 ALOGE("%s: Failed to create main image stream: %s (%d)", __FUNCTION__,
286 strerror(-res), res);
287 return res;
288 }
289
290 mOutputSurface = consumers[0].mSurface;
291 res = registerCompositeStreamListener(mMainImageStreamId);
292 if (res != OK) {
293 ALOGE("%s: Failed to register HAL main image stream: %s (%d)", __FUNCTION__,
294 strerror(-res), res);
295 return res;
296 }
297
298 if (mAppSegmentSupported) {
299 res = registerCompositeStreamListener(mAppSegmentStreamId);
300 if (res != OK) {
301 ALOGE("%s: Failed to register HAL app segment stream: %s (%d)", __FUNCTION__,
302 strerror(-res), res);
303 return res;
304 }
305 }
306
307 initCopyRowFunction(width);
308 return res;
309 }
310
deleteInternalStreams()311 status_t HeicCompositeStream::deleteInternalStreams() {
312 requestExit();
313 auto res = join();
314 if (res != OK) {
315 ALOGE("%s: Failed to join with the main processing thread: %s (%d)", __FUNCTION__,
316 strerror(-res), res);
317 }
318
319 deinitCodec();
320
321 if (mAppSegmentStreamId >= 0) {
322 // Camera devices may not be valid after switching to offline mode.
323 // In this case, all offline streams including internal composite streams
324 // are managed and released by the offline session.
325 sp<CameraDeviceBase> device = mDevice.promote();
326 if (device.get() != nullptr) {
327 res = device->deleteStream(mAppSegmentStreamId);
328 }
329
330 mAppSegmentStreamId = -1;
331 }
332
333 if (mOutputSurface != nullptr) {
334 mOutputSurface->disconnect(NATIVE_WINDOW_API_CAMERA);
335 mOutputSurface.clear();
336 }
337
338 sp<StatusTracker> statusTracker = mStatusTracker.promote();
339 if (statusTracker != nullptr && mStatusId != StatusTracker::NO_STATUS_ID) {
340 statusTracker->removeComponent(mStatusId);
341 mStatusId = StatusTracker::NO_STATUS_ID;
342 }
343
344 if (mPendingInputFrames.size() > 0) {
345 ALOGW("%s: mPendingInputFrames has %zu stale entries",
346 __FUNCTION__, mPendingInputFrames.size());
347 mPendingInputFrames.clear();
348 }
349
350 return res;
351 }
352
onBufferReleased(const BufferInfo & bufferInfo)353 void HeicCompositeStream::onBufferReleased(const BufferInfo& bufferInfo) {
354 Mutex::Autolock l(mMutex);
355
356 if (bufferInfo.mError) return;
357
358 if (bufferInfo.mStreamId == mMainImageStreamId) {
359 mMainImageFrameNumbers.push(bufferInfo.mFrameNumber);
360 mCodecOutputBufferFrameNumbers.push(bufferInfo.mFrameNumber);
361 ALOGV("%s: [%" PRId64 "]: Adding main image frame number (%zu frame numbers in total)",
362 __FUNCTION__, bufferInfo.mFrameNumber, mMainImageFrameNumbers.size());
363 if (mHDRGainmapEnabled) {
364 mCodecGainmapOutputBufferFrameNumbers.push(bufferInfo.mFrameNumber);
365 }
366 } else if (bufferInfo.mStreamId == mAppSegmentStreamId) {
367 mAppSegmentFrameNumbers.push(bufferInfo.mFrameNumber);
368 ALOGV("%s: [%" PRId64 "]: Adding app segment frame number (%zu frame numbers in total)",
369 __FUNCTION__, bufferInfo.mFrameNumber, mAppSegmentFrameNumbers.size());
370 }
371 }
372
373 // We need to get the settings early to handle the case where the codec output
374 // arrives earlier than result metadata.
onBufferRequestForFrameNumber(uint64_t frameNumber,int streamId,const CameraMetadata & settings)375 void HeicCompositeStream::onBufferRequestForFrameNumber(uint64_t frameNumber, int streamId,
376 const CameraMetadata& settings) {
377 ATRACE_ASYNC_BEGIN("HEIC capture", frameNumber);
378
379 Mutex::Autolock l(mMutex);
380 if (mErrorState || (streamId != getStreamId())) {
381 return;
382 }
383
384 mPendingCaptureResults.emplace(frameNumber, CameraMetadata());
385
386 camera_metadata_ro_entry entry;
387
388 int32_t orientation = 0;
389 entry = settings.find(ANDROID_JPEG_ORIENTATION);
390 if (entry.count == 1) {
391 orientation = entry.data.i32[0];
392 }
393
394 int32_t quality = kDefaultJpegQuality;
395 entry = settings.find(ANDROID_JPEG_QUALITY);
396 if (entry.count == 1) {
397 quality = entry.data.i32[0];
398 }
399
400 mSettingsByFrameNumber[frameNumber] = {orientation, quality};
401 }
402
onFrameAvailable(const BufferItem & item)403 void HeicCompositeStream::onFrameAvailable(const BufferItem& item) {
404 if (item.mDataSpace == static_cast<android_dataspace>(kAppSegmentDataSpace)) {
405 ALOGV("%s: JPEG APP segments buffer with ts: %" PRIu64 " ms. arrived!",
406 __func__, ns2ms(item.mTimestamp));
407
408 Mutex::Autolock l(mMutex);
409 if (!mErrorState) {
410 mInputAppSegmentBuffers.push_back(item.mTimestamp);
411 mInputReadyCondition.signal();
412 }
413 } else if (item.mDataSpace == mInternalDataSpace) {
414 ALOGV("%s: YUV_420 buffer with ts: %" PRIu64 " ms. arrived!",
415 __func__, ns2ms(item.mTimestamp));
416
417 Mutex::Autolock l(mMutex);
418 if (!mUseGrid && !mHDRGainmapEnabled) {
419 ALOGE("%s: YUV_420 internal stream is only supported for HEVC tiling",
420 __FUNCTION__);
421 return;
422 }
423 if (!mErrorState) {
424 mInputYuvBuffers.push_back(item.mTimestamp);
425 mInputReadyCondition.signal();
426 }
427 } else {
428 ALOGE("%s: Unexpected data space: 0x%x", __FUNCTION__, item.mDataSpace);
429 }
430 }
431
getCompositeStreamInfo(const OutputStreamInfo & streamInfo,const CameraMetadata & ch,std::vector<OutputStreamInfo> * compositeOutput)432 status_t HeicCompositeStream::getCompositeStreamInfo(const OutputStreamInfo &streamInfo,
433 const CameraMetadata& ch, std::vector<OutputStreamInfo>* compositeOutput /*out*/) {
434 bool gainmapEnabled = false;
435 if (compositeOutput == nullptr) {
436 return BAD_VALUE;
437 }
438
439 compositeOutput->clear();
440
441 bool useGrid, useHeic;
442 bool isSizeSupported = isSizeSupportedByHeifEncoder(
443 streamInfo.width, streamInfo.height, &useHeic, &useGrid, nullptr);
444 if (!isSizeSupported) {
445 // Size is not supported by either encoder.
446 return OK;
447 }
448
449 if (streamInfo.dataSpace == static_cast<android_dataspace_t>(kUltraHDRDataSpace)) {
450 gainmapEnabled = true;
451 }
452
453 compositeOutput->clear();
454 compositeOutput->push_back({});
455
456 // YUV/IMPLEMENTATION_DEFINED stream info
457 (*compositeOutput)[0].width = streamInfo.width;
458 (*compositeOutput)[0].height = streamInfo.height;
459 (*compositeOutput)[0].format = gainmapEnabled ?
460 static_cast<android_pixel_format_t>(HAL_PIXEL_FORMAT_YCBCR_P010) : useGrid ?
461 HAL_PIXEL_FORMAT_YCbCr_420_888 : HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED;
462 (*compositeOutput)[0].dataSpace = gainmapEnabled ?
463 static_cast<android_dataspace_t>(HAL_DATASPACE_BT2020_HLG) : kHeifDataSpace;
464 (*compositeOutput)[0].consumerUsage = useHeic ? GRALLOC_USAGE_HW_IMAGE_ENCODER :
465 useGrid ? GRALLOC_USAGE_SW_READ_OFTEN : GRALLOC_USAGE_HW_VIDEO_ENCODER;
466
467
468 camera_metadata_ro_entry halHeicSupport = ch.find(ANDROID_HEIC_INFO_SUPPORTED);
469 if (halHeicSupport.count == 1 &&
470 halHeicSupport.data.u8[0] == ANDROID_HEIC_INFO_SUPPORTED_TRUE) {
471
472 compositeOutput->push_back({});
473 // JPEG APPS segments Blob stream info
474 (*compositeOutput)[1].width = calcAppSegmentMaxSize(ch);
475 (*compositeOutput)[1].height = 1;
476 (*compositeOutput)[1].format = HAL_PIXEL_FORMAT_BLOB;
477 (*compositeOutput)[1].dataSpace = kAppSegmentDataSpace;
478 (*compositeOutput)[1].consumerUsage = GRALLOC_USAGE_SW_READ_OFTEN;
479 }
480
481 return NO_ERROR;
482 }
483
isSizeSupportedByHeifEncoder(int32_t width,int32_t height,bool * useHeic,bool * useGrid,int64_t * stall,AString * hevcName,bool allowSWCodec)484 bool HeicCompositeStream::isSizeSupportedByHeifEncoder(int32_t width, int32_t height,
485 bool* useHeic, bool* useGrid, int64_t* stall, AString* hevcName, bool allowSWCodec) {
486 static HeicEncoderInfoManager& heicManager = HeicEncoderInfoManager::getInstance(allowSWCodec);
487 return heicManager.isSizeSupported(width, height, useHeic, useGrid, stall, hevcName);
488 }
489
isInMemoryTempFileSupported()490 bool HeicCompositeStream::isInMemoryTempFileSupported() {
491 int memfd = syscall(__NR_memfd_create, "HEIF-try-memfd", MFD_CLOEXEC);
492 if (memfd == -1) {
493 if (errno != ENOSYS) {
494 ALOGE("%s: Failed to create tmpfs file. errno %d", __FUNCTION__, errno);
495 }
496 return false;
497 }
498 close(memfd);
499 return true;
500 }
501
onHeicOutputFrameAvailable(const CodecOutputBufferInfo & outputBufferInfo,bool isGainmap)502 void HeicCompositeStream::onHeicOutputFrameAvailable(
503 const CodecOutputBufferInfo& outputBufferInfo, bool isGainmap) {
504 Mutex::Autolock l(mMutex);
505
506 ALOGV("%s: index %d, offset %d, size %d, time %" PRId64 ", flags 0x%x",
507 __FUNCTION__, outputBufferInfo.index, outputBufferInfo.offset,
508 outputBufferInfo.size, outputBufferInfo.timeUs, outputBufferInfo.flags);
509
510 if (!mErrorState) {
511 if ((outputBufferInfo.size > 0) &&
512 ((outputBufferInfo.flags & MediaCodec::BUFFER_FLAG_CODECCONFIG) == 0)) {
513 isGainmap ? mGainmapCodecOutputBuffers.push_back(outputBufferInfo) :
514 mCodecOutputBuffers.push_back(outputBufferInfo);
515 mInputReadyCondition.signal();
516 } else {
517 ALOGV("%s: Releasing output buffer: size %d flags: 0x%x ", __FUNCTION__,
518 outputBufferInfo.size, outputBufferInfo.flags);
519 isGainmap ? mGainmapCodec->releaseOutputBuffer(outputBufferInfo.index) :
520 mCodec->releaseOutputBuffer(outputBufferInfo.index);
521 }
522 } else {
523 isGainmap ? mGainmapCodec->releaseOutputBuffer(outputBufferInfo.index) :
524 mCodec->releaseOutputBuffer(outputBufferInfo.index);
525 }
526 }
527
onHeicInputFrameAvailable(int32_t index,bool isGainmap)528 void HeicCompositeStream::onHeicInputFrameAvailable(int32_t index, bool isGainmap) {
529 Mutex::Autolock l(mMutex);
530
531 if (!mUseGrid && !mHDRGainmapEnabled) {
532 ALOGE("%s: Codec YUV input mode must only be used for Hevc tiling mode", __FUNCTION__);
533 return;
534 }
535
536 isGainmap ? mGainmapCodecInputBuffers.push_back(index) : mCodecInputBuffers.push_back(index);
537 mInputReadyCondition.signal();
538 }
539
onHeicGainmapFormatChanged(sp<AMessage> & newFormat)540 void HeicCompositeStream::onHeicGainmapFormatChanged(sp<AMessage>& newFormat) {
541 if (newFormat == nullptr) {
542 ALOGE("%s: newFormat must not be null!", __FUNCTION__);
543 return;
544 }
545
546 Mutex::Autolock l(mMutex);
547
548 AString mime;
549 AString mimeHeic(MIMETYPE_IMAGE_ANDROID_HEIC);
550 newFormat->findString(KEY_MIME, &mime);
551 if (mime != mimeHeic) {
552 // For HEVC codec, below keys need to be filled out or overwritten so that the
553 // muxer can handle them as HEIC output image.
554 newFormat->setString(KEY_MIME, mimeHeic);
555 newFormat->setInt32(KEY_WIDTH, mGainmapOutputWidth);
556 newFormat->setInt32(KEY_HEIGHT, mGainmapOutputHeight);
557 }
558
559 if (mGainmapUseGrid) {
560 int32_t gridRows, gridCols, tileWidth, tileHeight;
561 if (newFormat->findInt32(KEY_GRID_ROWS, &gridRows) &&
562 newFormat->findInt32(KEY_GRID_COLUMNS, &gridCols) &&
563 newFormat->findInt32(KEY_TILE_WIDTH, &tileWidth) &&
564 newFormat->findInt32(KEY_TILE_HEIGHT, &tileHeight)) {
565 mGainmapGridWidth = tileWidth;
566 mGainmapGridHeight = tileHeight;
567 mGainmapGridRows = gridRows;
568 mGainmapGridCols = gridCols;
569 } else {
570 newFormat->setInt32(KEY_TILE_WIDTH, mGainmapGridWidth);
571 newFormat->setInt32(KEY_TILE_HEIGHT, mGainmapGridHeight);
572 newFormat->setInt32(KEY_GRID_ROWS, mGainmapGridRows);
573 newFormat->setInt32(KEY_GRID_COLUMNS, mGainmapGridCols);
574 }
575 int32_t left, top, right, bottom;
576 if (newFormat->findRect("crop", &left, &top, &right, &bottom)) {
577 newFormat->setRect("crop", 0, 0, mGainmapOutputWidth - 1, mGainmapOutputHeight - 1);
578 }
579 }
580 newFormat->setInt32(KEY_IS_DEFAULT, 1 /*isPrimary*/);
581
582 int32_t gridRows, gridCols;
583 if (newFormat->findInt32(KEY_GRID_ROWS, &gridRows) &&
584 newFormat->findInt32(KEY_GRID_COLUMNS, &gridCols)) {
585 mNumGainmapOutputTiles = gridRows * gridCols;
586 } else {
587 mNumGainmapOutputTiles = 1;
588 }
589
590 mGainmapFormat = newFormat;
591
592 ALOGV("%s: mNumOutputTiles is %zu", __FUNCTION__, mNumOutputTiles);
593 mInputReadyCondition.signal();
594 }
595
596
onHeicFormatChanged(sp<AMessage> & newFormat,bool isGainmap)597 void HeicCompositeStream::onHeicFormatChanged(sp<AMessage>& newFormat, bool isGainmap) {
598 if (newFormat == nullptr) {
599 ALOGE("%s: newFormat must not be null!", __FUNCTION__);
600 return;
601 }
602
603 if (isGainmap) {
604 return onHeicGainmapFormatChanged(newFormat);
605 }
606 Mutex::Autolock l(mMutex);
607
608 AString mime;
609 AString mimeHeic(MIMETYPE_IMAGE_ANDROID_HEIC);
610 newFormat->findString(KEY_MIME, &mime);
611 if (mime != mimeHeic) {
612 // For HEVC codec, below keys need to be filled out or overwritten so that the
613 // muxer can handle them as HEIC output image.
614 newFormat->setString(KEY_MIME, mimeHeic);
615 newFormat->setInt32(KEY_WIDTH, mOutputWidth);
616 newFormat->setInt32(KEY_HEIGHT, mOutputHeight);
617 }
618
619 if (mUseGrid || mUseHeic) {
620 int32_t gridRows, gridCols, tileWidth, tileHeight;
621 if (newFormat->findInt32(KEY_GRID_ROWS, &gridRows) &&
622 newFormat->findInt32(KEY_GRID_COLUMNS, &gridCols) &&
623 newFormat->findInt32(KEY_TILE_WIDTH, &tileWidth) &&
624 newFormat->findInt32(KEY_TILE_HEIGHT, &tileHeight)) {
625 mGridWidth = tileWidth;
626 mGridHeight = tileHeight;
627 mGridRows = gridRows;
628 mGridCols = gridCols;
629 } else {
630 newFormat->setInt32(KEY_TILE_WIDTH, mGridWidth);
631 newFormat->setInt32(KEY_TILE_HEIGHT, mGridHeight);
632 newFormat->setInt32(KEY_GRID_ROWS, mGridRows);
633 newFormat->setInt32(KEY_GRID_COLUMNS, mGridCols);
634 }
635 int32_t left, top, right, bottom;
636 if (newFormat->findRect("crop", &left, &top, &right, &bottom)) {
637 newFormat->setRect("crop", 0, 0, mOutputWidth - 1, mOutputHeight - 1);
638 }
639 }
640 newFormat->setInt32(KEY_IS_DEFAULT, 1 /*isPrimary*/);
641
642 int32_t gridRows, gridCols;
643 if (newFormat->findInt32(KEY_GRID_ROWS, &gridRows) &&
644 newFormat->findInt32(KEY_GRID_COLUMNS, &gridCols)) {
645 mNumOutputTiles = gridRows * gridCols;
646 } else {
647 mNumOutputTiles = 1;
648 }
649
650 mFormat = newFormat;
651
652 ALOGV("%s: mNumOutputTiles is %zu", __FUNCTION__, mNumOutputTiles);
653 mInputReadyCondition.signal();
654 }
655
onHeicCodecError()656 void HeicCompositeStream::onHeicCodecError() {
657 Mutex::Autolock l(mMutex);
658 mErrorState = true;
659 }
660
configureStream()661 status_t HeicCompositeStream::configureStream() {
662 if (isRunning()) {
663 // Processing thread is already running, nothing more to do.
664 return NO_ERROR;
665 }
666
667 if (mOutputSurface.get() == nullptr) {
668 ALOGE("%s: No valid output surface set!", __FUNCTION__);
669 return NO_INIT;
670 }
671
672 auto res = mOutputSurface->connect(NATIVE_WINDOW_API_CAMERA, mStreamSurfaceListener);
673 if (res != OK) {
674 ALOGE("%s: Unable to connect to native window for stream %d",
675 __FUNCTION__, mMainImageStreamId);
676 return res;
677 }
678
679 if ((res = native_window_set_buffers_format(mOutputSurface.get(), HAL_PIXEL_FORMAT_BLOB))
680 != OK) {
681 ALOGE("%s: Unable to configure stream buffer format for stream %d", __FUNCTION__,
682 mMainImageStreamId);
683 return res;
684 }
685
686 ANativeWindow *anwConsumer = mOutputSurface.get();
687 int maxConsumerBuffers;
688 if ((res = anwConsumer->query(anwConsumer, NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS,
689 &maxConsumerBuffers)) != OK) {
690 ALOGE("%s: Unable to query consumer undequeued"
691 " buffer count for stream %d", __FUNCTION__, mMainImageStreamId);
692 return res;
693 }
694
695 // Cannot use SourceSurface buffer count since it could be codec's 512*512 tile
696 // buffer count.
697 if ((res = native_window_set_buffer_count(
698 anwConsumer, kMaxOutputSurfaceProducerCount + maxConsumerBuffers)) != OK) {
699 ALOGE("%s: Unable to set buffer count for stream %d", __FUNCTION__, mMainImageStreamId);
700 return res;
701 }
702
703 if ((res = native_window_set_buffers_dimensions(anwConsumer, mMaxHeicBufferSize, 1)) != OK) {
704 ALOGE("%s: Unable to set buffer dimension %zu x 1 for stream %d: %s (%d)",
705 __FUNCTION__, mMaxHeicBufferSize, mMainImageStreamId, strerror(-res), res);
706 return res;
707 }
708
709 sp<camera3::StatusTracker> statusTracker = mStatusTracker.promote();
710 if (statusTracker != nullptr) {
711 std::string name = std::string("HeicStream ") + std::to_string(getStreamId());
712 mStatusId = statusTracker->addComponent(name);
713 }
714
715 run("HeicCompositeStreamProc");
716
717 return NO_ERROR;
718 }
719
insertGbp(SurfaceMap * outSurfaceMap,Vector<int32_t> * outputStreamIds,int32_t * currentStreamId)720 status_t HeicCompositeStream::insertGbp(SurfaceMap* /*out*/outSurfaceMap,
721 Vector<int32_t>* /*out*/outputStreamIds, int32_t* /*out*/currentStreamId) {
722 if (mAppSegmentSupported) {
723 if (outSurfaceMap->find(mAppSegmentStreamId) == outSurfaceMap->end()) {
724 outputStreamIds->push_back(mAppSegmentStreamId);
725 }
726 (*outSurfaceMap)[mAppSegmentStreamId].push_back(mAppSegmentSurfaceId);
727 }
728
729 if (outSurfaceMap->find(mMainImageStreamId) == outSurfaceMap->end()) {
730 outputStreamIds->push_back(mMainImageStreamId);
731 }
732 (*outSurfaceMap)[mMainImageStreamId].push_back(mMainImageSurfaceId);
733
734 if (currentStreamId != nullptr) {
735 *currentStreamId = mMainImageStreamId;
736 }
737
738 return NO_ERROR;
739 }
740
insertCompositeStreamIds(std::vector<int32_t> * compositeStreamIds)741 status_t HeicCompositeStream::insertCompositeStreamIds(
742 std::vector<int32_t>* compositeStreamIds /*out*/) {
743 if (compositeStreamIds == nullptr) {
744 return BAD_VALUE;
745 }
746
747 if (mAppSegmentSupported) {
748 compositeStreamIds->push_back(mAppSegmentStreamId);
749 }
750 compositeStreamIds->push_back(mMainImageStreamId);
751
752 return OK;
753 }
754
onShutter(const CaptureResultExtras & resultExtras,nsecs_t timestamp)755 void HeicCompositeStream::onShutter(const CaptureResultExtras& resultExtras, nsecs_t timestamp) {
756 Mutex::Autolock l(mMutex);
757 if (mErrorState) {
758 return;
759 }
760
761 if (mSettingsByFrameNumber.find(resultExtras.frameNumber) != mSettingsByFrameNumber.end()) {
762 ALOGV("%s: [%" PRId64 "]: timestamp %" PRId64 ", requestId %d", __FUNCTION__,
763 resultExtras.frameNumber, timestamp, resultExtras.requestId);
764 mSettingsByFrameNumber[resultExtras.frameNumber].shutterNotified = true;
765 mSettingsByFrameNumber[resultExtras.frameNumber].timestamp = timestamp;
766 mSettingsByFrameNumber[resultExtras.frameNumber].requestId = resultExtras.requestId;
767 mInputReadyCondition.signal();
768 }
769 }
770
compilePendingInputLocked()771 void HeicCompositeStream::compilePendingInputLocked() {
772 auto i = mSettingsByFrameNumber.begin();
773 while (i != mSettingsByFrameNumber.end()) {
774 if (i->second.shutterNotified) {
775 mPendingInputFrames[i->first].orientation = i->second.orientation;
776 mPendingInputFrames[i->first].quality = i->second.quality;
777 mPendingInputFrames[i->first].timestamp = i->second.timestamp;
778 mPendingInputFrames[i->first].requestId = i->second.requestId;
779 ALOGV("%s: [%" PRId64 "]: timestamp is %" PRId64, __FUNCTION__,
780 i->first, i->second.timestamp);
781 i = mSettingsByFrameNumber.erase(i);
782
783 // Set encoder quality if no inflight encoding
784 if (mPendingInputFrames.size() == 1) {
785 sp<StatusTracker> statusTracker = mStatusTracker.promote();
786 if (statusTracker != nullptr) {
787 statusTracker->markComponentActive(mStatusId);
788 ALOGV("%s: Mark component as active", __FUNCTION__);
789 }
790
791 int32_t newQuality = mPendingInputFrames.begin()->second.quality;
792 updateCodecQualityLocked(newQuality);
793 }
794 } else {
795 i++;
796 }
797 }
798
799 while (!mInputAppSegmentBuffers.empty() && mAppSegmentFrameNumbers.size() > 0) {
800 CpuConsumer::LockedBuffer imgBuffer;
801 auto it = mInputAppSegmentBuffers.begin();
802 auto res = mAppSegmentConsumer->lockNextBuffer(&imgBuffer);
803 if (res == NOT_ENOUGH_DATA) {
804 // Can not lock any more buffers.
805 break;
806 } else if ((res != OK) || (*it != imgBuffer.timestamp)) {
807 if (res != OK) {
808 ALOGE("%s: Error locking JPEG_APP_SEGMENTS image buffer: %s (%d)", __FUNCTION__,
809 strerror(-res), res);
810 } else {
811 ALOGE("%s: Expecting JPEG_APP_SEGMENTS buffer with time stamp: %" PRId64
812 " received buffer with time stamp: %" PRId64, __FUNCTION__,
813 *it, imgBuffer.timestamp);
814 mAppSegmentConsumer->unlockBuffer(imgBuffer);
815 }
816 mPendingInputFrames[*it].error = true;
817 mInputAppSegmentBuffers.erase(it);
818 continue;
819 }
820
821 if (mPendingInputFrames.find(mAppSegmentFrameNumbers.front()) == mPendingInputFrames.end()) {
822 ALOGE("%s: mPendingInputFrames doesn't contain frameNumber %" PRId64, __FUNCTION__,
823 mAppSegmentFrameNumbers.front());
824 mInputAppSegmentBuffers.erase(it);
825 mAppSegmentFrameNumbers.pop();
826 continue;
827 }
828
829 int64_t frameNumber = mAppSegmentFrameNumbers.front();
830 // If mPendingInputFrames doesn't contain the expected frame number, the captured
831 // input app segment frame must have been dropped via a buffer error. Simply
832 // return the buffer to the buffer queue.
833 if ((mPendingInputFrames.find(frameNumber) == mPendingInputFrames.end()) ||
834 (mPendingInputFrames[frameNumber].error)) {
835 mAppSegmentConsumer->unlockBuffer(imgBuffer);
836 } else {
837 mPendingInputFrames[frameNumber].appSegmentBuffer = imgBuffer;
838 }
839 mInputAppSegmentBuffers.erase(it);
840 mAppSegmentFrameNumbers.pop();
841 }
842
843 while (!mInputYuvBuffers.empty() && !mYuvBufferAcquired && mMainImageFrameNumbers.size() > 0) {
844 CpuConsumer::LockedBuffer imgBuffer;
845 auto it = mInputYuvBuffers.begin();
846 auto res = mMainImageConsumer->lockNextBuffer(&imgBuffer);
847 if (res == NOT_ENOUGH_DATA) {
848 // Can not lock any more buffers.
849 break;
850 } else if (res != OK) {
851 ALOGE("%s: Error locking YUV_888 image buffer: %s (%d)", __FUNCTION__,
852 strerror(-res), res);
853 mPendingInputFrames[*it].error = true;
854 mInputYuvBuffers.erase(it);
855 continue;
856 } else if (*it != imgBuffer.timestamp) {
857 ALOGW("%s: Expecting YUV_888 buffer with time stamp: %" PRId64 " received buffer with "
858 "time stamp: %" PRId64, __FUNCTION__, *it, imgBuffer.timestamp);
859 mPendingInputFrames[*it].error = true;
860 mInputYuvBuffers.erase(it);
861 continue;
862 }
863
864 if (mPendingInputFrames.find(mMainImageFrameNumbers.front()) == mPendingInputFrames.end()) {
865 ALOGE("%s: mPendingInputFrames doesn't contain frameNumber %" PRId64, __FUNCTION__,
866 mMainImageFrameNumbers.front());
867 mInputYuvBuffers.erase(it);
868 mMainImageFrameNumbers.pop();
869 continue;
870 }
871
872 int64_t frameNumber = mMainImageFrameNumbers.front();
873 // If mPendingInputFrames doesn't contain the expected frame number, the captured
874 // input main image must have been dropped via a buffer error. Simply
875 // return the buffer to the buffer queue.
876 if ((mPendingInputFrames.find(frameNumber) == mPendingInputFrames.end()) ||
877 (mPendingInputFrames[frameNumber].error)) {
878 mMainImageConsumer->unlockBuffer(imgBuffer);
879 } else {
880 mPendingInputFrames[frameNumber].yuvBuffer = imgBuffer;
881 mYuvBufferAcquired = true;
882 }
883 mInputYuvBuffers.erase(it);
884 mMainImageFrameNumbers.pop();
885 }
886
887 while (!mCodecOutputBuffers.empty()) {
888 auto it = mCodecOutputBuffers.begin();
889 // Assume encoder input to output is FIFO, use a queue to look up
890 // frameNumber when handling codec outputs.
891 int64_t bufferFrameNumber = -1;
892 if (mCodecOutputBufferFrameNumbers.empty()) {
893 ALOGV("%s: Failed to find buffer frameNumber for codec output buffer!", __FUNCTION__);
894 break;
895 } else {
896 // Direct mapping between camera frame number and codec timestamp (in us).
897 bufferFrameNumber = mCodecOutputBufferFrameNumbers.front();
898 mCodecOutputCounter++;
899 if (mCodecOutputCounter == mNumOutputTiles) {
900 mCodecOutputBufferFrameNumbers.pop();
901 mCodecOutputCounter = 0;
902 }
903
904 mPendingInputFrames[bufferFrameNumber].codecOutputBuffers.push_back(*it);
905 ALOGV("%s: [%" PRId64 "]: Pushing codecOutputBuffers (frameNumber %" PRId64 ")",
906 __FUNCTION__, bufferFrameNumber, it->timeUs);
907 }
908 mCodecOutputBuffers.erase(it);
909 }
910
911 while (!mGainmapCodecOutputBuffers.empty()) {
912 auto it = mGainmapCodecOutputBuffers.begin();
913 // Assume encoder input to output is FIFO, use a queue to look up
914 // frameNumber when handling codec outputs.
915 int64_t bufferFrameNumber = -1;
916 if (mCodecGainmapOutputBufferFrameNumbers.empty()) {
917 ALOGV("%s: Failed to find buffer frameNumber for gainmap codec output buffer!",
918 __FUNCTION__);
919 break;
920 } else {
921 // Direct mapping between camera frame number and codec timestamp (in us).
922 bufferFrameNumber = mCodecGainmapOutputBufferFrameNumbers.front();
923 mCodecGainmapOutputCounter++;
924 if (mCodecGainmapOutputCounter == mNumGainmapOutputTiles) {
925 mCodecGainmapOutputBufferFrameNumbers.pop();
926 mCodecGainmapOutputCounter = 0;
927 }
928
929 mPendingInputFrames[bufferFrameNumber].gainmapCodecOutputBuffers.push_back(*it);
930 ALOGV("%s: [%" PRId64 "]: Pushing gainmap codecOutputBuffers (frameNumber %" PRId64 ")",
931 __FUNCTION__, bufferFrameNumber, it->timeUs);
932 }
933 mGainmapCodecOutputBuffers.erase(it);
934 }
935
936 while (!mCaptureResults.empty()) {
937 auto it = mCaptureResults.begin();
938 // Negative frame number indicates that something went wrong during the capture result
939 // collection process.
940 int64_t frameNumber = std::get<0>(it->second);
941 if (it->first >= 0 &&
942 mPendingInputFrames.find(frameNumber) != mPendingInputFrames.end()) {
943 if (mPendingInputFrames[frameNumber].timestamp == it->first) {
944 mPendingInputFrames[frameNumber].result =
945 std::make_unique<CameraMetadata>(std::get<1>(it->second));
946 if (!mAppSegmentSupported) {
947 mPendingInputFrames[frameNumber].exifError = true;
948 }
949 } else {
950 ALOGE("%s: Capture result frameNumber/timestamp mapping changed between "
951 "shutter and capture result! before: %" PRId64 ", after: %" PRId64,
952 __FUNCTION__, mPendingInputFrames[frameNumber].timestamp,
953 it->first);
954 }
955 }
956 mCaptureResults.erase(it);
957 }
958
959 // mErrorFrameNumbers stores frame number of dropped buffers.
960 auto it = mErrorFrameNumbers.begin();
961 while (it != mErrorFrameNumbers.end()) {
962 if (mPendingInputFrames.find(*it) != mPendingInputFrames.end()) {
963 mPendingInputFrames[*it].error = true;
964 } else {
965 //Error callback is guaranteed to arrive after shutter notify, which
966 //results in mPendingInputFrames being populated.
967 ALOGW("%s: Not able to find failing input with frame number: %" PRId64, __FUNCTION__,
968 *it);
969 }
970 it = mErrorFrameNumbers.erase(it);
971 }
972
973 // mExifErrorFrameNumbers stores the frame number of dropped APP_SEGMENT buffers
974 it = mExifErrorFrameNumbers.begin();
975 while (it != mExifErrorFrameNumbers.end()) {
976 if (mPendingInputFrames.find(*it) != mPendingInputFrames.end()) {
977 mPendingInputFrames[*it].exifError = true;
978 }
979 it = mExifErrorFrameNumbers.erase(it);
980 }
981
982 // Distribute codec input buffers to be filled out from YUV output
983 for (auto it = mPendingInputFrames.begin();
984 it != mPendingInputFrames.end() && mCodecInputBuffers.size() > 0; it++) {
985 InputFrame& inputFrame(it->second);
986 if (inputFrame.codecInputCounter < mGridRows * mGridCols) {
987 // Available input tiles that are required for the current input
988 // image.
989 size_t newInputTiles = std::min(mCodecInputBuffers.size(),
990 mGridRows * mGridCols - inputFrame.codecInputCounter);
991 for (size_t i = 0; i < newInputTiles; i++) {
992 CodecInputBufferInfo inputInfo =
993 { mCodecInputBuffers[0], mGridTimestampUs++, inputFrame.codecInputCounter };
994 inputFrame.codecInputBuffers.push_back(inputInfo);
995
996 mCodecInputBuffers.erase(mCodecInputBuffers.begin());
997 inputFrame.codecInputCounter++;
998 }
999 break;
1000 }
1001 }
1002
1003 // Distribute codec input buffers to be filled out from YUV output
1004 for (auto it = mPendingInputFrames.begin();
1005 it != mPendingInputFrames.end() && mGainmapCodecInputBuffers.size() > 0; it++) {
1006 InputFrame& inputFrame(it->second);
1007 if (inputFrame.gainmapCodecInputCounter < mGainmapGridRows * mGainmapGridCols) {
1008 // Available input tiles that are required for the current input
1009 // image.
1010 size_t newInputTiles = std::min(mGainmapCodecInputBuffers.size(),
1011 mGainmapGridRows * mGainmapGridCols - inputFrame.gainmapCodecInputCounter);
1012 for (size_t i = 0; i < newInputTiles; i++) {
1013 CodecInputBufferInfo inputInfo = { mGainmapCodecInputBuffers[0],
1014 mGridTimestampUs++, inputFrame.gainmapCodecInputCounter };
1015 inputFrame.gainmapCodecInputBuffers.push_back(inputInfo);
1016
1017 mGainmapCodecInputBuffers.erase(mGainmapCodecInputBuffers.begin());
1018 inputFrame.gainmapCodecInputCounter++;
1019 }
1020 break;
1021 }
1022 }
1023 }
1024
getNextReadyInputLocked(int64_t * frameNumber)1025 bool HeicCompositeStream::getNextReadyInputLocked(int64_t *frameNumber /*out*/) {
1026 if (frameNumber == nullptr) {
1027 return false;
1028 }
1029
1030 bool newInputAvailable = false;
1031 for (auto& it : mPendingInputFrames) {
1032 // New input is considered to be available only if:
1033 // 1. input buffers are ready, or
1034 // 2. App segment and muxer is created, or
1035 // 3. A codec output tile is ready, and an output buffer is available.
1036 // This makes sure that muxer gets created only when an output tile is
1037 // generated, because right now we only handle 1 HEIC output buffer at a
1038 // time (max dequeued buffer count is 1).
1039 bool appSegmentReady =
1040 (it.second.appSegmentBuffer.data != nullptr || it.second.exifError) &&
1041 !it.second.appSegmentWritten && it.second.result != nullptr &&
1042 it.second.muxer != nullptr;
1043 bool codecOutputReady = !it.second.codecOutputBuffers.empty() ||
1044 !it.second.gainmapCodecOutputBuffers.empty();
1045 bool codecInputReady = (it.second.yuvBuffer.data != nullptr) &&
1046 (!it.second.codecInputBuffers.empty());
1047 bool hasOutputBuffer = it.second.muxer != nullptr ||
1048 (mDequeuedOutputBufferCnt < kMaxOutputSurfaceProducerCount);
1049 if ((!it.second.error) &&
1050 (appSegmentReady || (codecOutputReady && hasOutputBuffer) || codecInputReady)) {
1051 *frameNumber = it.first;
1052 if (it.second.format == nullptr && mFormat != nullptr) {
1053 it.second.format = mFormat->dup();
1054 }
1055 if (it.second.gainmapFormat == nullptr && mGainmapFormat != nullptr){
1056 it.second.gainmapFormat = mGainmapFormat->dup();
1057 it.second.gainmapFormat->setInt32("gainmap", 1);
1058 }
1059 newInputAvailable = true;
1060 break;
1061 }
1062 }
1063
1064 return newInputAvailable;
1065 }
1066
getNextFailingInputLocked()1067 int64_t HeicCompositeStream::getNextFailingInputLocked() {
1068 int64_t res = -1;
1069
1070 for (const auto& it : mPendingInputFrames) {
1071 if (it.second.error) {
1072 res = it.first;
1073 break;
1074 }
1075 }
1076
1077 return res;
1078 }
1079
processInputFrame(int64_t frameNumber,InputFrame & inputFrame)1080 status_t HeicCompositeStream::processInputFrame(int64_t frameNumber,
1081 InputFrame &inputFrame) {
1082 ATRACE_CALL();
1083 status_t res = OK;
1084
1085 bool appSegmentReady =
1086 (inputFrame.appSegmentBuffer.data != nullptr || inputFrame.exifError) &&
1087 !inputFrame.appSegmentWritten && inputFrame.result != nullptr &&
1088 inputFrame.muxer != nullptr;
1089 bool codecOutputReady = inputFrame.codecOutputBuffers.size() > 0 ||
1090 inputFrame.gainmapCodecOutputBuffers.size() > 0;
1091 bool codecInputReady = inputFrame.yuvBuffer.data != nullptr &&
1092 !inputFrame.codecInputBuffers.empty();
1093 bool gainmapCodecInputReady = inputFrame.gainmapImage.get() != nullptr &&
1094 !inputFrame.gainmapCodecInputBuffers.empty();
1095 bool hasOutputBuffer = inputFrame.muxer != nullptr ||
1096 (mDequeuedOutputBufferCnt < kMaxOutputSurfaceProducerCount);
1097 bool hasGainmapMetadata = !inputFrame.isoGainmapMetadata.empty();
1098
1099 ALOGV("%s: [%" PRId64 "]: appSegmentReady %d, codecOutputReady %d, codecInputReady %d,"
1100 " dequeuedOutputBuffer %d, timestamp %" PRId64, __FUNCTION__, frameNumber,
1101 appSegmentReady, codecOutputReady, codecInputReady, mDequeuedOutputBufferCnt,
1102 inputFrame.timestamp);
1103
1104 // Handle inputs for Hevc tiling
1105 if (codecInputReady) {
1106 if (mHDRGainmapEnabled && (inputFrame.baseBuffer.get() == nullptr)) {
1107 auto res = generateBaseImageAndGainmap(inputFrame);
1108 if (res != OK) {
1109 ALOGE("%s: Error generating SDR base image and HDR gainmap: %s (%d)", __FUNCTION__,
1110 strerror(-res), res);
1111 return res;
1112 }
1113 }
1114
1115 res = processCodecInputFrame(inputFrame);
1116 if (res != OK) {
1117 ALOGE("%s: Failed to process codec input frame: %s (%d)", __FUNCTION__,
1118 strerror(-res), res);
1119 return res;
1120 }
1121 }
1122
1123 if (gainmapCodecInputReady) {
1124 res = processCodecGainmapInputFrame(inputFrame);
1125 if (res != OK) {
1126 ALOGE("%s: Failed to process gainmap codec input frame: %s (%d)", __FUNCTION__,
1127 strerror(-res), res);
1128 return res;
1129 }
1130 }
1131
1132 if (!(codecOutputReady && hasOutputBuffer) && !appSegmentReady) {
1133 return OK;
1134 }
1135
1136 // Initialize and start muxer if not yet done so. In this case,
1137 // codecOutputReady must be true. Otherwise, appSegmentReady is guaranteed
1138 // to be false, and the function must have returned early.
1139 if (inputFrame.muxer == nullptr) {
1140 res = startMuxerForInputFrame(frameNumber, inputFrame);
1141 if (res != OK) {
1142 ALOGE("%s: Failed to create and start muxer: %s (%d)", __FUNCTION__,
1143 strerror(-res), res);
1144 return res;
1145 }
1146 }
1147
1148 // Write the HDR gainmap metadata
1149 if (hasGainmapMetadata) {
1150 uint8_t kGainmapMetaMarker[] = {'t', 'm', 'a', 'p', '\0', '\0'};
1151 sp<ABuffer> aBuffer =
1152 new ABuffer(inputFrame.isoGainmapMetadata.size() + sizeof(kGainmapMetaMarker));
1153 memcpy(aBuffer->data(), kGainmapMetaMarker, sizeof(kGainmapMetaMarker));
1154 memcpy(aBuffer->data() + sizeof(kGainmapMetaMarker), inputFrame.isoGainmapMetadata.data(),
1155 inputFrame.isoGainmapMetadata.size());
1156
1157 aBuffer->meta()->setInt32(KEY_COLOR_FORMAT, kCodecColorFormat);
1158 aBuffer->meta()->setInt32("color-primaries", kCodecColorPrimaries);
1159 aBuffer->meta()->setInt32("color-transfer", kCodecColorTransfer);
1160 aBuffer->meta()->setInt32("color-matrix", kCodecColorMatrix);
1161 aBuffer->meta()->setInt32("color-range", kCodecColorRange);
1162 auto res = inputFrame.muxer->writeSampleData(aBuffer, inputFrame.trackIndex,
1163 inputFrame.timestamp,
1164 MediaCodec::BUFFER_FLAG_MUXER_DATA);
1165 if (res != OK) {
1166 ALOGE("%s: Failed to write HDR gainmap metadata to muxer: %s (%d)",
1167 __FUNCTION__, strerror(-res), res);
1168 return res;
1169 }
1170 inputFrame.isoGainmapMetadata.clear();
1171 }
1172
1173 // Write JPEG APP segments data to the muxer.
1174 if (appSegmentReady) {
1175 res = processAppSegment(frameNumber, inputFrame);
1176 if (res != OK) {
1177 ALOGE("%s: Failed to process JPEG APP segments: %s (%d)", __FUNCTION__,
1178 strerror(-res), res);
1179 return res;
1180 }
1181 }
1182
1183 // Write media codec bitstream buffers to muxer.
1184 while (!inputFrame.codecOutputBuffers.empty()) {
1185 res = processOneCodecOutputFrame(frameNumber, inputFrame);
1186 if (res != OK) {
1187 ALOGE("%s: Failed to process codec output frame: %s (%d)", __FUNCTION__,
1188 strerror(-res), res);
1189 return res;
1190 }
1191 }
1192
1193 // Write media codec gainmap bitstream buffers to muxer.
1194 while (!inputFrame.gainmapCodecOutputBuffers.empty()) {
1195 res = processOneCodecGainmapOutputFrame(frameNumber, inputFrame);
1196 if (res != OK) {
1197 ALOGE("%s: Failed to process codec gainmap output frame: %s (%d)", __FUNCTION__,
1198 strerror(-res), res);
1199 return res;
1200 }
1201 }
1202
1203 if ((inputFrame.pendingOutputTiles == 0) && (inputFrame.gainmapPendingOutputTiles == 0)) {
1204 if (inputFrame.appSegmentWritten) {
1205 res = processCompletedInputFrame(frameNumber, inputFrame);
1206 if (res != OK) {
1207 ALOGE("%s: Failed to process completed input frame: %s (%d)", __FUNCTION__,
1208 strerror(-res), res);
1209 return res;
1210 }
1211 }
1212 }
1213
1214 return res;
1215 }
1216
startMuxerForInputFrame(int64_t frameNumber,InputFrame & inputFrame)1217 status_t HeicCompositeStream::startMuxerForInputFrame(int64_t frameNumber, InputFrame &inputFrame) {
1218 sp<ANativeWindow> outputANW = mOutputSurface;
1219
1220 auto res = outputANW->dequeueBuffer(mOutputSurface.get(), &inputFrame.anb, &inputFrame.fenceFd);
1221 if (res != OK) {
1222 ALOGE("%s: Error retrieving output buffer: %s (%d)", __FUNCTION__, strerror(-res),
1223 res);
1224 return res;
1225 }
1226 mDequeuedOutputBufferCnt++;
1227
1228 // Combine current thread id, stream id and timestamp to uniquely identify image.
1229 std::ostringstream tempOutputFile;
1230 tempOutputFile << "HEIF-" << pthread_self() << "-"
1231 << getStreamId() << "-" << frameNumber;
1232 inputFrame.fileFd = syscall(__NR_memfd_create, tempOutputFile.str().c_str(), MFD_CLOEXEC);
1233 if (inputFrame.fileFd < 0) {
1234 ALOGE("%s: Failed to create file %s. Error no is %d", __FUNCTION__,
1235 tempOutputFile.str().c_str(), errno);
1236 return NO_INIT;
1237 }
1238 inputFrame.muxer = MediaMuxer::create(inputFrame.fileFd, MediaMuxer::OUTPUT_FORMAT_HEIF);
1239 if (inputFrame.muxer == nullptr) {
1240 ALOGE("%s: Failed to create MediaMuxer for file fd %d",
1241 __FUNCTION__, inputFrame.fileFd);
1242 return NO_INIT;
1243 }
1244
1245 res = inputFrame.muxer->setOrientationHint(inputFrame.orientation);
1246 if (res != OK) {
1247 ALOGE("%s: Failed to setOrientationHint: %s (%d)", __FUNCTION__,
1248 strerror(-res), res);
1249 return res;
1250 }
1251
1252 ssize_t trackId = inputFrame.muxer->addTrack(inputFrame.format);
1253 if (trackId < 0) {
1254 ALOGE("%s: Failed to addTrack to the muxer: %zd", __FUNCTION__, trackId);
1255 return NO_INIT;
1256 }
1257
1258 inputFrame.trackIndex = trackId;
1259 inputFrame.pendingOutputTiles = mNumOutputTiles;
1260
1261 if (inputFrame.gainmapFormat.get() != nullptr) {
1262 trackId = inputFrame.muxer->addTrack(inputFrame.gainmapFormat);
1263 if (trackId < 0) {
1264 ALOGE("%s: Failed to addTrack to the muxer: %zd", __FUNCTION__, trackId);
1265 return NO_INIT;
1266 }
1267 inputFrame.gainmapTrackIndex = trackId;
1268 inputFrame.gainmapPendingOutputTiles = mNumGainmapOutputTiles;
1269 }
1270
1271 res = inputFrame.muxer->start();
1272 if (res != OK) {
1273 ALOGE("%s: Failed to start MediaMuxer: %s (%d)",
1274 __FUNCTION__, strerror(-res), res);
1275 return res;
1276 }
1277
1278 ALOGV("%s: [%" PRId64 "]: Muxer started for inputFrame", __FUNCTION__,
1279 frameNumber);
1280 return OK;
1281 }
1282
processAppSegment(int64_t frameNumber,InputFrame & inputFrame)1283 status_t HeicCompositeStream::processAppSegment(int64_t frameNumber, InputFrame &inputFrame) {
1284 size_t app1Size = 0;
1285 size_t appSegmentSize = 0;
1286 if (!inputFrame.exifError) {
1287 appSegmentSize = findAppSegmentsSize(inputFrame.appSegmentBuffer.data,
1288 inputFrame.appSegmentBuffer.width * inputFrame.appSegmentBuffer.height,
1289 &app1Size);
1290 if (appSegmentSize == 0) {
1291 ALOGE("%s: Failed to find JPEG APP segment size", __FUNCTION__);
1292 return NO_INIT;
1293 }
1294 }
1295
1296 std::unique_ptr<ExifUtils> exifUtils(ExifUtils::create());
1297 auto exifRes = inputFrame.exifError ?
1298 exifUtils->initializeEmpty() :
1299 exifUtils->initialize(inputFrame.appSegmentBuffer.data, app1Size);
1300 if (!exifRes) {
1301 ALOGE("%s: Failed to initialize ExifUtils object!", __FUNCTION__);
1302 return BAD_VALUE;
1303 }
1304 exifRes = exifUtils->setFromMetadata(*inputFrame.result, mStaticInfo,
1305 mOutputWidth, mOutputHeight);
1306 if (!exifRes) {
1307 ALOGE("%s: Failed to set Exif tags using metadata and main image sizes", __FUNCTION__);
1308 return BAD_VALUE;
1309 }
1310 exifRes = exifUtils->setOrientation(inputFrame.orientation);
1311 if (!exifRes) {
1312 ALOGE("%s: ExifUtils failed to set orientation", __FUNCTION__);
1313 return BAD_VALUE;
1314 }
1315 exifRes = exifUtils->generateApp1();
1316 if (!exifRes) {
1317 ALOGE("%s: ExifUtils failed to generate APP1 segment", __FUNCTION__);
1318 return BAD_VALUE;
1319 }
1320
1321 unsigned int newApp1Length = exifUtils->getApp1Length();
1322 const uint8_t *newApp1Segment = exifUtils->getApp1Buffer();
1323
1324 //Assemble the APP1 marker buffer required by MediaCodec
1325 uint8_t kExifApp1Marker[] = {'E', 'x', 'i', 'f', 0xFF, 0xE1, 0x00, 0x00};
1326 kExifApp1Marker[6] = static_cast<uint8_t>(newApp1Length >> 8);
1327 kExifApp1Marker[7] = static_cast<uint8_t>(newApp1Length & 0xFF);
1328 size_t appSegmentBufferSize = sizeof(kExifApp1Marker) +
1329 appSegmentSize - app1Size + newApp1Length;
1330 uint8_t* appSegmentBuffer = new uint8_t[appSegmentBufferSize];
1331 memcpy(appSegmentBuffer, kExifApp1Marker, sizeof(kExifApp1Marker));
1332 memcpy(appSegmentBuffer + sizeof(kExifApp1Marker), newApp1Segment, newApp1Length);
1333 if (appSegmentSize - app1Size > 0) {
1334 memcpy(appSegmentBuffer + sizeof(kExifApp1Marker) + newApp1Length,
1335 inputFrame.appSegmentBuffer.data + app1Size, appSegmentSize - app1Size);
1336 }
1337
1338 sp<ABuffer> aBuffer = new ABuffer(appSegmentBuffer, appSegmentBufferSize);
1339 auto res = inputFrame.muxer->writeSampleData(aBuffer, inputFrame.trackIndex,
1340 inputFrame.timestamp, MediaCodec::BUFFER_FLAG_MUXER_DATA);
1341 delete[] appSegmentBuffer;
1342
1343 if (res != OK) {
1344 ALOGE("%s: Failed to write JPEG APP segments to muxer: %s (%d)",
1345 __FUNCTION__, strerror(-res), res);
1346 return res;
1347 }
1348
1349 ALOGV("%s: [%" PRId64 "]: appSegmentSize is %zu, width %d, height %d, app1Size %zu",
1350 __FUNCTION__, frameNumber, appSegmentSize, inputFrame.appSegmentBuffer.width,
1351 inputFrame.appSegmentBuffer.height, app1Size);
1352
1353 inputFrame.appSegmentWritten = true;
1354 // Release the buffer now so any pending input app segments can be processed
1355 if (!inputFrame.exifError) {
1356 mAppSegmentConsumer->unlockBuffer(inputFrame.appSegmentBuffer);
1357 inputFrame.appSegmentBuffer.data = nullptr;
1358 inputFrame.exifError = false;
1359 }
1360
1361 return OK;
1362 }
1363
generateBaseImageAndGainmap(InputFrame & inputFrame)1364 status_t HeicCompositeStream::generateBaseImageAndGainmap(InputFrame &inputFrame) {
1365 ultrahdr::JpegR jpegR(nullptr /*gles ctx*/, kGainmapScale);
1366 inputFrame.baseBuffer = std::make_unique<ultrahdr::uhdr_raw_image_ext_t>(
1367 kUltraHdrOutputFmt, kUltraHdrOutputGamut, kUltraHdrInputTransfer, kUltraHdrOutputRange,
1368 inputFrame.yuvBuffer.width, inputFrame.yuvBuffer.height, 8/*stride*/);
1369
1370 uhdr_raw_image_t hdr_intent;
1371 hdr_intent.fmt = kUltraHdrInputFmt;
1372 hdr_intent.cg = kUltraHdrInputGamut;
1373 hdr_intent.ct = kUltraHdrInputTransfer;
1374 hdr_intent.range = kUltraHdrInputRange;
1375 hdr_intent.w = inputFrame.yuvBuffer.width;
1376 hdr_intent.h = inputFrame.yuvBuffer.height;
1377 hdr_intent.planes[UHDR_PLANE_Y] = inputFrame.yuvBuffer.data;
1378 hdr_intent.planes[UHDR_PLANE_UV] = inputFrame.yuvBuffer.dataCb;
1379 hdr_intent.planes[UHDR_PLANE_V] = nullptr;
1380 //libUltraHDR expects the stride in pixels
1381 hdr_intent.stride[UHDR_PLANE_Y] = inputFrame.yuvBuffer.stride / 2;
1382 hdr_intent.stride[UHDR_PLANE_UV] = inputFrame.yuvBuffer.chromaStride / 2;
1383 hdr_intent.stride[UHDR_PLANE_V] = 0;
1384 auto res = jpegR.toneMap(&hdr_intent, inputFrame.baseBuffer.get());
1385 if (res.error_code == UHDR_CODEC_OK) {
1386 ALOGV("%s: Base image tonemapped successfully", __FUNCTION__);
1387 } else {
1388 ALOGE("%s: Failed during HDR to SDR tonemap: %d", __FUNCTION__, res.error_code);
1389 return BAD_VALUE;
1390 }
1391
1392 inputFrame.baseImage = std::make_unique<CpuConsumer::LockedBuffer>();
1393 *inputFrame.baseImage = inputFrame.yuvBuffer;
1394 inputFrame.baseImage->data = reinterpret_cast<uint8_t*>(
1395 inputFrame.baseBuffer->planes[UHDR_PLANE_Y]);
1396 inputFrame.baseImage->dataCb = reinterpret_cast<uint8_t*>(
1397 inputFrame.baseBuffer->planes[UHDR_PLANE_U]);
1398 inputFrame.baseImage->dataCr = reinterpret_cast<uint8_t*>(
1399 inputFrame.baseBuffer->planes[UHDR_PLANE_V]);
1400 inputFrame.baseImage->chromaStep = 1;
1401 inputFrame.baseImage->stride = inputFrame.baseBuffer->stride[UHDR_PLANE_Y];
1402 inputFrame.baseImage->chromaStride = inputFrame.baseBuffer->stride[UHDR_PLANE_UV];
1403 inputFrame.baseImage->dataSpace = HAL_DATASPACE_V0_JFIF;
1404
1405 ultrahdr::uhdr_gainmap_metadata_ext_t metadata;
1406 res = jpegR.generateGainMap(inputFrame.baseBuffer.get(), &hdr_intent, &metadata,
1407 inputFrame.gainmap, false /*sdr_is_601*/, true /*use_luminance*/);
1408 if (res.error_code == UHDR_CODEC_OK) {
1409 ALOGV("%s: HDR gainmap generated successfully!", __FUNCTION__);
1410 } else {
1411 ALOGE("%s: Failed HDR gainmap: %d", __FUNCTION__, res.error_code);
1412 return BAD_VALUE;
1413 }
1414 // We can only generate a single channel gainmap at the moment. However only
1415 // multi channel HEVC encoding (like YUV420) is required. Set the extra U/V
1416 // planes to 128 to avoid encoding any actual color data.
1417 inputFrame.gainmapChroma = std::make_unique<uint8_t[]>(
1418 inputFrame.gainmap->w * inputFrame.gainmap->h / 2);
1419 memset(inputFrame.gainmapChroma.get(), 128, inputFrame.gainmap->w * inputFrame.gainmap->h / 2);
1420
1421 ultrahdr::uhdr_gainmap_metadata_frac iso_secondary_metadata;
1422 res = ultrahdr::uhdr_gainmap_metadata_frac::gainmapMetadataFloatToFraction(
1423 &metadata, &iso_secondary_metadata);
1424 if (res.error_code == UHDR_CODEC_OK) {
1425 ALOGV("%s: HDR gainmap converted to fractions successfully!", __FUNCTION__);
1426 } else {
1427 ALOGE("%s: Failed to convert HDR gainmap to fractions: %d", __FUNCTION__,
1428 res.error_code);
1429 return BAD_VALUE;
1430 }
1431
1432 res = ultrahdr::uhdr_gainmap_metadata_frac::encodeGainmapMetadata(&iso_secondary_metadata,
1433 inputFrame.isoGainmapMetadata);
1434 if (res.error_code == UHDR_CODEC_OK) {
1435 ALOGV("%s: HDR gainmap encoded to ISO format successfully!", __FUNCTION__);
1436 } else {
1437 ALOGE("%s: Failed to encode HDR gainmap to ISO format: %d", __FUNCTION__,
1438 res.error_code);
1439 return BAD_VALUE;
1440 }
1441 // 6.6.2.4.2 of ISO/IEC 23008-12:2024 expects the ISO 21496-1 gainmap to be
1442 // preceded by an u8 version equal to 0
1443 inputFrame.isoGainmapMetadata.insert(inputFrame.isoGainmapMetadata.begin(), 0);
1444
1445 inputFrame.gainmapImage = std::make_unique<CpuConsumer::LockedBuffer>();
1446 *inputFrame.gainmapImage = inputFrame.yuvBuffer;
1447 inputFrame.gainmapImage->data = reinterpret_cast<uint8_t*>(
1448 inputFrame.gainmap->planes[UHDR_PLANE_Y]);
1449 inputFrame.gainmapImage->dataCb = inputFrame.gainmapChroma.get();
1450 inputFrame.gainmapImage->dataCr = inputFrame.gainmapChroma.get() + 1;
1451 inputFrame.gainmapImage->chromaStep = 2;
1452 inputFrame.gainmapImage->stride = inputFrame.gainmap->stride[UHDR_PLANE_Y];
1453 inputFrame.gainmapImage->chromaStride = inputFrame.gainmap->w;
1454 inputFrame.gainmapImage->dataSpace = HAL_DATASPACE_V0_JFIF;
1455
1456 return OK;
1457 }
1458
processCodecInputFrame(InputFrame & inputFrame)1459 status_t HeicCompositeStream::processCodecInputFrame(InputFrame &inputFrame) {
1460 for (auto& inputBuffer : inputFrame.codecInputBuffers) {
1461 sp<MediaCodecBuffer> buffer;
1462 auto res = mCodec->getInputBuffer(inputBuffer.index, &buffer);
1463 if (res != OK) {
1464 ALOGE("%s: Error getting codec input buffer: %s (%d)", __FUNCTION__,
1465 strerror(-res), res);
1466 return res;
1467 }
1468
1469 // Copy one tile from source to destination.
1470 size_t tileX = inputBuffer.tileIndex % mGridCols;
1471 size_t tileY = inputBuffer.tileIndex / mGridCols;
1472 size_t top = mGridHeight * tileY;
1473 size_t left = mGridWidth * tileX;
1474 size_t width = (tileX == static_cast<size_t>(mGridCols) - 1) ?
1475 mOutputWidth - tileX * mGridWidth : mGridWidth;
1476 size_t height = (tileY == static_cast<size_t>(mGridRows) - 1) ?
1477 mOutputHeight - tileY * mGridHeight : mGridHeight;
1478 ALOGV("%s: inputBuffer tileIndex [%zu, %zu], top %zu, left %zu, width %zu, height %zu,"
1479 " timeUs %" PRId64, __FUNCTION__, tileX, tileY, top, left, width, height,
1480 inputBuffer.timeUs);
1481
1482 auto yuvInput = (inputFrame.baseImage.get() != nullptr) ?
1483 *inputFrame.baseImage.get() : inputFrame.yuvBuffer;
1484 res = copyOneYuvTile(buffer, yuvInput, top, left, width, height);
1485 if (res != OK) {
1486 ALOGE("%s: Failed to copy YUV tile %s (%d)", __FUNCTION__,
1487 strerror(-res), res);
1488 return res;
1489 }
1490
1491 res = mCodec->queueInputBuffer(inputBuffer.index, 0, buffer->capacity(),
1492 inputBuffer.timeUs, 0, nullptr /*errorDetailMsg*/);
1493 if (res != OK) {
1494 ALOGE("%s: Failed to queueInputBuffer to Codec: %s (%d)",
1495 __FUNCTION__, strerror(-res), res);
1496 return res;
1497 }
1498 }
1499
1500 inputFrame.codecInputBuffers.clear();
1501 return OK;
1502 }
1503
processCodecGainmapInputFrame(InputFrame & inputFrame)1504 status_t HeicCompositeStream::processCodecGainmapInputFrame(InputFrame &inputFrame) {
1505 for (auto& inputBuffer : inputFrame.gainmapCodecInputBuffers) {
1506 sp<MediaCodecBuffer> buffer;
1507 auto res = mGainmapCodec->getInputBuffer(inputBuffer.index, &buffer);
1508 if (res != OK) {
1509 ALOGE("%s: Error getting codec input buffer: %s (%d)", __FUNCTION__,
1510 strerror(-res), res);
1511 return res;
1512 }
1513
1514 // Copy one tile from source to destination.
1515 size_t tileX = inputBuffer.tileIndex % mGainmapGridCols;
1516 size_t tileY = inputBuffer.tileIndex / mGainmapGridCols;
1517 size_t top = mGainmapGridHeight * tileY;
1518 size_t left = mGainmapGridWidth * tileX;
1519 size_t width = (tileX == static_cast<size_t>(mGainmapGridCols) - 1) ?
1520 mGainmapOutputWidth - tileX * mGainmapGridWidth : mGainmapGridWidth;
1521 size_t height = (tileY == static_cast<size_t>(mGainmapGridRows) - 1) ?
1522 mGainmapOutputHeight - tileY * mGainmapGridHeight : mGainmapGridHeight;
1523 ALOGV("%s: gainmap inputBuffer tileIndex [%zu, %zu], top %zu, left %zu, width %zu, "
1524 "height %zu, timeUs %" PRId64, __FUNCTION__, tileX, tileY, top, left, width, height,
1525 inputBuffer.timeUs);
1526
1527 auto yuvInput = *inputFrame.gainmapImage;
1528 res = copyOneYuvTile(buffer, yuvInput, top, left, width, height);
1529 if (res != OK) {
1530 ALOGE("%s: Failed to copy YUV tile %s (%d)", __FUNCTION__,
1531 strerror(-res), res);
1532 return res;
1533 }
1534
1535 res = mGainmapCodec->queueInputBuffer(inputBuffer.index, 0, buffer->capacity(),
1536 inputBuffer.timeUs, 0, nullptr /*errorDetailMsg*/);
1537 if (res != OK) {
1538 ALOGE("%s: Failed to queueInputBuffer to Codec: %s (%d)",
1539 __FUNCTION__, strerror(-res), res);
1540 return res;
1541 }
1542 }
1543
1544 inputFrame.gainmapCodecInputBuffers.clear();
1545 return OK;
1546 }
1547
processOneCodecOutputFrame(int64_t frameNumber,InputFrame & inputFrame)1548 status_t HeicCompositeStream::processOneCodecOutputFrame(int64_t frameNumber,
1549 InputFrame &inputFrame) {
1550 auto it = inputFrame.codecOutputBuffers.begin();
1551 sp<MediaCodecBuffer> buffer;
1552 status_t res = mCodec->getOutputBuffer(it->index, &buffer);
1553 if (res != OK) {
1554 ALOGE("%s: Error getting Heic codec output buffer at index %d: %s (%d)",
1555 __FUNCTION__, it->index, strerror(-res), res);
1556 return res;
1557 }
1558 if (buffer == nullptr) {
1559 ALOGE("%s: Invalid Heic codec output buffer at index %d",
1560 __FUNCTION__, it->index);
1561 return BAD_VALUE;
1562 }
1563
1564 sp<ABuffer> aBuffer = new ABuffer(buffer->data(), buffer->size());
1565 if (mHDRGainmapEnabled) {
1566 aBuffer->meta()->setInt32(KEY_COLOR_FORMAT, kCodecColorFormat);
1567 aBuffer->meta()->setInt32("color-primaries", kCodecColorPrimaries);
1568 aBuffer->meta()->setInt32("color-transfer", kCodecColorTransfer);
1569 aBuffer->meta()->setInt32("color-matrix", kCodecColorMatrix);
1570 aBuffer->meta()->setInt32("color-range", kCodecColorRange);
1571 }
1572 res = inputFrame.muxer->writeSampleData(
1573 aBuffer, inputFrame.trackIndex, inputFrame.timestamp, 0 /*flags*/);
1574 if (res != OK) {
1575 ALOGE("%s: Failed to write buffer index %d to muxer: %s (%d)",
1576 __FUNCTION__, it->index, strerror(-res), res);
1577 return res;
1578 }
1579
1580 mCodec->releaseOutputBuffer(it->index);
1581 if (inputFrame.pendingOutputTiles == 0) {
1582 ALOGW("%s: Codec generated more tiles than expected!", __FUNCTION__);
1583 } else {
1584 inputFrame.pendingOutputTiles--;
1585 }
1586
1587 inputFrame.codecOutputBuffers.erase(inputFrame.codecOutputBuffers.begin());
1588
1589 ALOGV("%s: [%" PRId64 "]: Output buffer index %d",
1590 __FUNCTION__, frameNumber, it->index);
1591 return OK;
1592 }
1593
processOneCodecGainmapOutputFrame(int64_t frameNumber,InputFrame & inputFrame)1594 status_t HeicCompositeStream::processOneCodecGainmapOutputFrame(int64_t frameNumber,
1595 InputFrame &inputFrame) {
1596 auto it = inputFrame.gainmapCodecOutputBuffers.begin();
1597 sp<MediaCodecBuffer> buffer;
1598 status_t res = mGainmapCodec->getOutputBuffer(it->index, &buffer);
1599 if (res != OK) {
1600 ALOGE("%s: Error getting Heic gainmap codec output buffer at index %d: %s (%d)",
1601 __FUNCTION__, it->index, strerror(-res), res);
1602 return res;
1603 }
1604 if (buffer == nullptr) {
1605 ALOGE("%s: Invalid Heic gainmap codec output buffer at index %d",
1606 __FUNCTION__, it->index);
1607 return BAD_VALUE;
1608 }
1609
1610 uint8_t kGainmapMarker[] = {'g', 'm', 'a', 'p', '\0', '\0'};
1611 sp<ABuffer> aBuffer = new ABuffer(buffer->size() + sizeof(kGainmapMarker));
1612 memcpy(aBuffer->data(), kGainmapMarker, sizeof(kGainmapMarker));
1613 memcpy(aBuffer->data() + sizeof(kGainmapMarker), buffer->data(), buffer->size());
1614 aBuffer->meta()->setInt32(KEY_COLOR_FORMAT, kCodecGainmapColorFormat);
1615 aBuffer->meta()->setInt32("color-primaries", kCodecGainmapColorPrimaries);
1616 aBuffer->meta()->setInt32("color-transfer", kCodecGainmapColorTransfer);
1617 aBuffer->meta()->setInt32("color-matrix", kCodecGainmapColorMatrix);
1618 aBuffer->meta()->setInt32("color-range", kCodecGainmapColorRange);
1619 res = inputFrame.muxer->writeSampleData(aBuffer, inputFrame.gainmapTrackIndex,
1620 inputFrame.timestamp,
1621 MediaCodec::BUFFER_FLAG_MUXER_DATA);
1622 if (res != OK) {
1623 ALOGE("%s: Failed to write buffer index %d to muxer: %s (%d)",
1624 __FUNCTION__, it->index, strerror(-res), res);
1625 return res;
1626 }
1627
1628 mGainmapCodec->releaseOutputBuffer(it->index);
1629 if (inputFrame.gainmapPendingOutputTiles == 0) {
1630 ALOGW("%s: Codec generated more gainmap tiles than expected!", __FUNCTION__);
1631 } else {
1632 inputFrame.gainmapPendingOutputTiles--;
1633 }
1634
1635 inputFrame.gainmapCodecOutputBuffers.erase(inputFrame.gainmapCodecOutputBuffers.begin());
1636
1637 ALOGV("%s: [%" PRId64 "]: Gainmap output buffer index %d",
1638 __FUNCTION__, frameNumber, it->index);
1639 return OK;
1640 }
1641
processCompletedInputFrame(int64_t frameNumber,InputFrame & inputFrame)1642 status_t HeicCompositeStream::processCompletedInputFrame(int64_t frameNumber,
1643 InputFrame &inputFrame) {
1644 sp<ANativeWindow> outputANW = mOutputSurface;
1645 inputFrame.muxer->stop();
1646
1647 // Copy the content of the file to memory.
1648 sp<GraphicBuffer> gb = GraphicBuffer::from(inputFrame.anb);
1649 void* dstBuffer;
1650 GraphicBufferLocker gbLocker(gb);
1651 auto res = gbLocker.lockAsync(&dstBuffer, inputFrame.fenceFd);
1652 if (res != OK) {
1653 ALOGE("%s: Error trying to lock output buffer fence: %s (%d)", __FUNCTION__,
1654 strerror(-res), res);
1655 return res;
1656 }
1657
1658 off_t fSize = lseek(inputFrame.fileFd, 0, SEEK_END);
1659 if (static_cast<size_t>(fSize) > mMaxHeicBufferSize - sizeof(CameraBlob)) {
1660 ALOGE("%s: Error: MediaMuxer output size %ld is larger than buffer sizer %zu",
1661 __FUNCTION__, fSize, mMaxHeicBufferSize - sizeof(CameraBlob));
1662 return BAD_VALUE;
1663 }
1664
1665 lseek(inputFrame.fileFd, 0, SEEK_SET);
1666 ssize_t bytesRead = read(inputFrame.fileFd, dstBuffer, fSize);
1667 if (bytesRead < fSize) {
1668 ALOGE("%s: Only %zd of %ld bytes read", __FUNCTION__, bytesRead, fSize);
1669 return BAD_VALUE;
1670 }
1671
1672 close(inputFrame.fileFd);
1673 inputFrame.fileFd = -1;
1674
1675 // Fill in HEIC header
1676 // Must be in sync with CAMERA3_HEIC_BLOB_ID in android_media_Utils.cpp
1677 uint8_t *header = static_cast<uint8_t*>(dstBuffer) + mMaxHeicBufferSize - sizeof(CameraBlob);
1678 CameraBlob blobHeader = {
1679 .blobId = static_cast<CameraBlobId>(0x00FE),
1680 .blobSizeBytes = static_cast<int32_t>(fSize)
1681 };
1682 memcpy(header, &blobHeader, sizeof(CameraBlob));
1683
1684 res = native_window_set_buffers_timestamp(mOutputSurface.get(), inputFrame.timestamp);
1685 if (res != OK) {
1686 ALOGE("%s: Stream %d: Error setting timestamp: %s (%d)",
1687 __FUNCTION__, getStreamId(), strerror(-res), res);
1688 return res;
1689 }
1690
1691 res = outputANW->queueBuffer(mOutputSurface.get(), inputFrame.anb, /*fence*/ -1);
1692 if (res != OK) {
1693 ALOGE("%s: Failed to queueBuffer to Heic stream: %s (%d)", __FUNCTION__,
1694 strerror(-res), res);
1695 return res;
1696 }
1697 inputFrame.anb = nullptr;
1698 mDequeuedOutputBufferCnt--;
1699
1700 ALOGV("%s: [%" PRId64 "]", __FUNCTION__, frameNumber);
1701 ATRACE_ASYNC_END("HEIC capture", frameNumber);
1702 return OK;
1703 }
1704
1705
releaseInputFrameLocked(int64_t frameNumber,InputFrame * inputFrame)1706 void HeicCompositeStream::releaseInputFrameLocked(int64_t frameNumber,
1707 InputFrame *inputFrame /*out*/) {
1708 if (inputFrame == nullptr) {
1709 return;
1710 }
1711
1712 if (inputFrame->appSegmentBuffer.data != nullptr) {
1713 mAppSegmentConsumer->unlockBuffer(inputFrame->appSegmentBuffer);
1714 inputFrame->appSegmentBuffer.data = nullptr;
1715 }
1716
1717 while (!inputFrame->codecOutputBuffers.empty()) {
1718 auto it = inputFrame->codecOutputBuffers.begin();
1719 ALOGV("%s: releaseOutputBuffer index %d", __FUNCTION__, it->index);
1720 mCodec->releaseOutputBuffer(it->index);
1721 inputFrame->codecOutputBuffers.erase(it);
1722 }
1723
1724 while (!inputFrame->gainmapCodecOutputBuffers.empty()) {
1725 auto it = inputFrame->gainmapCodecOutputBuffers.begin();
1726 ALOGV("%s: release gainmap output buffer index %d", __FUNCTION__, it->index);
1727 mGainmapCodec->releaseOutputBuffer(it->index);
1728 inputFrame->gainmapCodecOutputBuffers.erase(it);
1729 }
1730
1731 if (inputFrame->yuvBuffer.data != nullptr) {
1732 mMainImageConsumer->unlockBuffer(inputFrame->yuvBuffer);
1733 inputFrame->yuvBuffer.data = nullptr;
1734 mYuvBufferAcquired = false;
1735 }
1736
1737 while (!inputFrame->codecInputBuffers.empty()) {
1738 auto it = inputFrame->codecInputBuffers.begin();
1739 inputFrame->codecInputBuffers.erase(it);
1740 }
1741
1742 while (!inputFrame->gainmapCodecInputBuffers.empty()) {
1743 auto it = inputFrame->gainmapCodecInputBuffers.begin();
1744 inputFrame->gainmapCodecInputBuffers.erase(it);
1745 }
1746
1747 if (inputFrame->error || mErrorState) {
1748 ALOGV("%s: notifyError called for frameNumber %" PRId64, __FUNCTION__, frameNumber);
1749 notifyError(frameNumber, inputFrame->requestId);
1750 }
1751
1752 if (inputFrame->fileFd >= 0) {
1753 close(inputFrame->fileFd);
1754 inputFrame->fileFd = -1;
1755 }
1756
1757 if (inputFrame->anb != nullptr) {
1758 sp<ANativeWindow> outputANW = mOutputSurface;
1759 outputANW->cancelBuffer(mOutputSurface.get(), inputFrame->anb, /*fence*/ -1);
1760 inputFrame->anb = nullptr;
1761
1762 mDequeuedOutputBufferCnt--;
1763 }
1764 }
1765
releaseInputFramesLocked()1766 void HeicCompositeStream::releaseInputFramesLocked() {
1767 auto it = mPendingInputFrames.begin();
1768 bool inputFrameDone = false;
1769 while (it != mPendingInputFrames.end()) {
1770 auto& inputFrame = it->second;
1771 if (inputFrame.error ||
1772 (inputFrame.appSegmentWritten && inputFrame.pendingOutputTiles == 0 &&
1773 inputFrame.gainmapPendingOutputTiles == 0)) {
1774 releaseInputFrameLocked(it->first, &inputFrame);
1775 it = mPendingInputFrames.erase(it);
1776 inputFrameDone = true;
1777 } else {
1778 it++;
1779 }
1780 }
1781
1782 // Update codec quality based on first upcoming input frame.
1783 // Note that when encoding is in surface mode, currently there is no
1784 // way for camera service to synchronize quality setting on a per-frame
1785 // basis: we don't get notification when codec is ready to consume a new
1786 // input frame. So we update codec quality on a best-effort basis.
1787 if (inputFrameDone) {
1788 auto firstPendingFrame = mPendingInputFrames.begin();
1789 if (firstPendingFrame != mPendingInputFrames.end()) {
1790 updateCodecQualityLocked(firstPendingFrame->second.quality);
1791 } else {
1792 if (mSettingsByFrameNumber.size() == 0) {
1793 markTrackerIdle();
1794 }
1795 }
1796 }
1797 }
1798
initializeGainmapCodec()1799 status_t HeicCompositeStream::initializeGainmapCodec() {
1800 ALOGV("%s", __FUNCTION__);
1801
1802 if (!mHDRGainmapEnabled) {
1803 return OK;
1804 }
1805 uint32_t width = mOutputWidth / kGainmapScale;
1806 uint32_t height = mOutputHeight / kGainmapScale;
1807 bool useGrid = false;
1808 bool useHeic = false;
1809 AString hevcName;
1810 bool isSizeSupported = isSizeSupportedByHeifEncoder(width, height,
1811 &useHeic, &useGrid, nullptr, &hevcName);
1812 if (!isSizeSupported) {
1813 ALOGE("%s: Encoder doesn't support size %u x %u!",
1814 __FUNCTION__, width, height);
1815 return BAD_VALUE;
1816 }
1817
1818 // Create HEVC codec.
1819 mGainmapCodec = MediaCodec::CreateByComponentName(mCodecLooper, hevcName);
1820 if (mGainmapCodec == nullptr) {
1821 ALOGE("%s: Failed to create gainmap codec", __FUNCTION__);
1822 return NO_INIT;
1823 }
1824
1825 // Create Looper and handler for Codec callback.
1826 mGainmapCodecCallbackHandler = new CodecCallbackHandler(this, true /*isGainmap*/);
1827 if (mGainmapCodecCallbackHandler == nullptr) {
1828 ALOGE("%s: Failed to create gainmap codec callback handler", __FUNCTION__);
1829 return NO_MEMORY;
1830 }
1831 mGainmapCallbackLooper = new ALooper;
1832 mGainmapCallbackLooper->setName("Camera3-HeicComposite-MediaCodecGainmapCallbackLooper");
1833 auto res = mGainmapCallbackLooper->start(
1834 false, // runOnCallingThread
1835 false, // canCallJava
1836 PRIORITY_AUDIO);
1837 if (res != OK) {
1838 ALOGE("%s: Failed to start gainmap media callback looper: %s (%d)",
1839 __FUNCTION__, strerror(-res), res);
1840 return NO_INIT;
1841 }
1842 mGainmapCallbackLooper->registerHandler(mGainmapCodecCallbackHandler);
1843
1844 mGainmapAsyncNotify = new AMessage(kWhatCallbackNotify, mGainmapCodecCallbackHandler);
1845 res = mGainmapCodec->setCallback(mGainmapAsyncNotify);
1846 if (res != OK) {
1847 ALOGE("%s: Failed to set MediaCodec callback: %s (%d)", __FUNCTION__,
1848 strerror(-res), res);
1849 return res;
1850 }
1851
1852 // Create output format and configure the Codec.
1853 sp<AMessage> outputFormat = new AMessage();
1854 outputFormat->setString(KEY_MIME, MIMETYPE_VIDEO_HEVC);
1855 outputFormat->setInt32(KEY_BITRATE_MODE, BITRATE_MODE_CQ);
1856 outputFormat->setInt32(KEY_QUALITY, kDefaultJpegQuality);
1857 // Ask codec to skip timestamp check and encode all frames.
1858 outputFormat->setInt64(KEY_MAX_PTS_GAP_TO_ENCODER, kNoFrameDropMaxPtsGap);
1859
1860 int32_t gridWidth, gridHeight, gridRows, gridCols;
1861 if (useGrid){
1862 gridWidth = HeicEncoderInfoManager::kGridWidth;
1863 gridHeight = HeicEncoderInfoManager::kGridHeight;
1864 gridRows = (height + gridHeight - 1)/gridHeight;
1865 gridCols = (width + gridWidth - 1)/gridWidth;
1866 } else {
1867 gridWidth = width;
1868 gridHeight = height;
1869 gridRows = 1;
1870 gridCols = 1;
1871 }
1872
1873 outputFormat->setInt32(KEY_WIDTH, !useGrid ? width : gridWidth);
1874 outputFormat->setInt32(KEY_HEIGHT, !useGrid ? height : gridHeight);
1875 outputFormat->setInt32(KEY_I_FRAME_INTERVAL, 0);
1876 outputFormat->setInt32(KEY_COLOR_FORMAT, COLOR_FormatYUV420Flexible);
1877 outputFormat->setInt32(KEY_FRAME_RATE, useGrid ? gridRows * gridCols : kNoGridOpRate);
1878 // This only serves as a hint to encoder when encoding is not real-time.
1879 outputFormat->setInt32(KEY_OPERATING_RATE, useGrid ? kGridOpRate : kNoGridOpRate);
1880
1881 res = mGainmapCodec->configure(outputFormat, nullptr /*nativeWindow*/,
1882 nullptr /*crypto*/, CONFIGURE_FLAG_ENCODE);
1883 if (res != OK) {
1884 ALOGE("%s: Failed to configure codec: %s (%d)", __FUNCTION__,
1885 strerror(-res), res);
1886 return res;
1887 }
1888
1889 mGainmapGridWidth = gridWidth;
1890 mGainmapGridHeight = gridHeight;
1891 mGainmapGridRows = gridRows;
1892 mGainmapGridCols = gridCols;
1893 mGainmapUseGrid = useGrid;
1894 mGainmapOutputWidth = width;
1895 mGainmapOutputHeight = height;
1896 mMaxHeicBufferSize +=
1897 ALIGN(mGainmapOutputWidth, HeicEncoderInfoManager::kGridWidth) *
1898 ALIGN(mGainmapOutputHeight, HeicEncoderInfoManager::kGridHeight) * 3 / 2;
1899
1900 return OK;
1901 }
1902
initializeCodec(uint32_t width,uint32_t height,const sp<CameraDeviceBase> & cameraDevice)1903 status_t HeicCompositeStream::initializeCodec(uint32_t width, uint32_t height,
1904 const sp<CameraDeviceBase>& cameraDevice) {
1905 ALOGV("%s", __FUNCTION__);
1906
1907 bool useGrid = false;
1908 AString hevcName;
1909 bool isSizeSupported = isSizeSupportedByHeifEncoder(width, height,
1910 &mUseHeic, &useGrid, nullptr, &hevcName);
1911 if (!isSizeSupported) {
1912 ALOGE("%s: Encoder doesnt' support size %u x %u!",
1913 __FUNCTION__, width, height);
1914 return BAD_VALUE;
1915 }
1916 if (mHDRGainmapEnabled) {
1917 // HDR Gainmap tonemapping and generation can only be done in SW
1918 // using P010 as input. HEIC codecs expect private/impl.defined
1919 // which is opaque.
1920 mUseHeic = false;
1921 }
1922
1923 // Create Looper for MediaCodec.
1924 auto desiredMime = mUseHeic ? MIMETYPE_IMAGE_ANDROID_HEIC : MIMETYPE_VIDEO_HEVC;
1925 mCodecLooper = new ALooper;
1926 mCodecLooper->setName("Camera3-HeicComposite-MediaCodecLooper");
1927 status_t res = mCodecLooper->start(
1928 false, // runOnCallingThread
1929 false, // canCallJava
1930 PRIORITY_AUDIO);
1931 if (res != OK) {
1932 ALOGE("%s: Failed to start codec looper: %s (%d)",
1933 __FUNCTION__, strerror(-res), res);
1934 return NO_INIT;
1935 }
1936
1937 // Create HEIC/HEVC codec.
1938 if (mUseHeic) {
1939 mCodec = MediaCodec::CreateByType(mCodecLooper, desiredMime, true /*encoder*/);
1940 } else {
1941 mCodec = MediaCodec::CreateByComponentName(mCodecLooper, hevcName);
1942 }
1943 if (mCodec == nullptr) {
1944 ALOGE("%s: Failed to create codec for %s", __FUNCTION__, desiredMime);
1945 return NO_INIT;
1946 }
1947
1948 // Create Looper and handler for Codec callback.
1949 mCodecCallbackHandler = new CodecCallbackHandler(this);
1950 if (mCodecCallbackHandler == nullptr) {
1951 ALOGE("%s: Failed to create codec callback handler", __FUNCTION__);
1952 return NO_MEMORY;
1953 }
1954 mCallbackLooper = new ALooper;
1955 mCallbackLooper->setName("Camera3-HeicComposite-MediaCodecCallbackLooper");
1956 res = mCallbackLooper->start(
1957 false, // runOnCallingThread
1958 false, // canCallJava
1959 PRIORITY_AUDIO);
1960 if (res != OK) {
1961 ALOGE("%s: Failed to start media callback looper: %s (%d)",
1962 __FUNCTION__, strerror(-res), res);
1963 return NO_INIT;
1964 }
1965 mCallbackLooper->registerHandler(mCodecCallbackHandler);
1966
1967 mAsyncNotify = new AMessage(kWhatCallbackNotify, mCodecCallbackHandler);
1968 res = mCodec->setCallback(mAsyncNotify);
1969 if (res != OK) {
1970 ALOGE("%s: Failed to set MediaCodec callback: %s (%d)", __FUNCTION__,
1971 strerror(-res), res);
1972 return res;
1973 }
1974
1975 // Create output format and configure the Codec.
1976 sp<AMessage> outputFormat = new AMessage();
1977 outputFormat->setString(KEY_MIME, desiredMime);
1978 outputFormat->setInt32(KEY_BITRATE_MODE, BITRATE_MODE_CQ);
1979 outputFormat->setInt32(KEY_QUALITY, kDefaultJpegQuality);
1980 // Ask codec to skip timestamp check and encode all frames.
1981 outputFormat->setInt64(KEY_MAX_PTS_GAP_TO_ENCODER, kNoFrameDropMaxPtsGap);
1982
1983 int32_t gridWidth, gridHeight, gridRows, gridCols;
1984 if (useGrid || mUseHeic) {
1985 gridWidth = HeicEncoderInfoManager::kGridWidth;
1986 gridHeight = HeicEncoderInfoManager::kGridHeight;
1987 gridRows = (height + gridHeight - 1)/gridHeight;
1988 gridCols = (width + gridWidth - 1)/gridWidth;
1989
1990 if (mUseHeic) {
1991 outputFormat->setInt32(KEY_TILE_WIDTH, gridWidth);
1992 outputFormat->setInt32(KEY_TILE_HEIGHT, gridHeight);
1993 outputFormat->setInt32(KEY_GRID_COLUMNS, gridCols);
1994 outputFormat->setInt32(KEY_GRID_ROWS, gridRows);
1995 }
1996
1997 } else {
1998 gridWidth = width;
1999 gridHeight = height;
2000 gridRows = 1;
2001 gridCols = 1;
2002 }
2003
2004 outputFormat->setInt32(KEY_WIDTH, !useGrid ? width : gridWidth);
2005 outputFormat->setInt32(KEY_HEIGHT, !useGrid ? height : gridHeight);
2006 outputFormat->setInt32(KEY_I_FRAME_INTERVAL, 0);
2007 outputFormat->setInt32(KEY_COLOR_FORMAT,
2008 useGrid || mHDRGainmapEnabled ? COLOR_FormatYUV420Flexible : COLOR_FormatSurface);
2009 outputFormat->setInt32(KEY_FRAME_RATE, useGrid ? gridRows * gridCols : kNoGridOpRate);
2010 // This only serves as a hint to encoder when encoding is not real-time.
2011 outputFormat->setInt32(KEY_OPERATING_RATE, useGrid ? kGridOpRate : kNoGridOpRate);
2012
2013 res = mCodec->configure(outputFormat, nullptr /*nativeWindow*/,
2014 nullptr /*crypto*/, CONFIGURE_FLAG_ENCODE);
2015 if (res != OK) {
2016 ALOGE("%s: Failed to configure codec: %s (%d)", __FUNCTION__,
2017 strerror(-res), res);
2018 return res;
2019 }
2020
2021 mGridWidth = gridWidth;
2022 mGridHeight = gridHeight;
2023 mGridRows = gridRows;
2024 mGridCols = gridCols;
2025 mUseGrid = useGrid;
2026 mOutputWidth = width;
2027 mOutputHeight = height;
2028 mAppSegmentMaxSize = calcAppSegmentMaxSize(cameraDevice->info());
2029 mMaxHeicBufferSize =
2030 ALIGN(mOutputWidth, HeicEncoderInfoManager::kGridWidth) *
2031 ALIGN(mOutputHeight, HeicEncoderInfoManager::kGridHeight) * 3 / 2 + mAppSegmentMaxSize;
2032
2033 return initializeGainmapCodec();
2034 }
2035
deinitGainmapCodec()2036 void HeicCompositeStream::deinitGainmapCodec() {
2037 ALOGV("%s", __FUNCTION__);
2038 if (mGainmapCodec != nullptr) {
2039 mGainmapCodec->stop();
2040 mGainmapCodec->release();
2041 mGainmapCodec.clear();
2042 }
2043
2044 if (mGainmapCallbackLooper != nullptr) {
2045 mGainmapCallbackLooper->stop();
2046 mGainmapCallbackLooper.clear();
2047 }
2048
2049 mGainmapAsyncNotify.clear();
2050 mGainmapFormat.clear();
2051 }
2052
deinitCodec()2053 void HeicCompositeStream::deinitCodec() {
2054 ALOGV("%s", __FUNCTION__);
2055 if (mCodec != nullptr) {
2056 mCodec->stop();
2057 mCodec->release();
2058 mCodec.clear();
2059 }
2060
2061 deinitGainmapCodec();
2062
2063 if (mCodecLooper != nullptr) {
2064 mCodecLooper->stop();
2065 mCodecLooper.clear();
2066 }
2067
2068 if (mCallbackLooper != nullptr) {
2069 mCallbackLooper->stop();
2070 mCallbackLooper.clear();
2071 }
2072
2073 mAsyncNotify.clear();
2074 mFormat.clear();
2075 }
2076
2077 // Return the size of the complete list of app segment, 0 indicates failure
findAppSegmentsSize(const uint8_t * appSegmentBuffer,size_t maxSize,size_t * app1SegmentSize)2078 size_t HeicCompositeStream::findAppSegmentsSize(const uint8_t* appSegmentBuffer,
2079 size_t maxSize, size_t *app1SegmentSize) {
2080 if (appSegmentBuffer == nullptr || app1SegmentSize == nullptr) {
2081 ALOGE("%s: Invalid input appSegmentBuffer %p, app1SegmentSize %p",
2082 __FUNCTION__, appSegmentBuffer, app1SegmentSize);
2083 return 0;
2084 }
2085
2086 size_t expectedSize = 0;
2087 // First check for EXIF transport header at the end of the buffer
2088 const uint8_t *header = appSegmentBuffer + (maxSize - sizeof(CameraBlob));
2089 const CameraBlob *blob = (const CameraBlob*)(header);
2090 if (blob->blobId != CameraBlobId::JPEG_APP_SEGMENTS) {
2091 ALOGE("%s: Invalid EXIF blobId %d", __FUNCTION__, eToI(blob->blobId));
2092 return 0;
2093 }
2094
2095 expectedSize = blob->blobSizeBytes;
2096 if (expectedSize == 0 || expectedSize > maxSize - sizeof(CameraBlob)) {
2097 ALOGE("%s: Invalid blobSize %zu.", __FUNCTION__, expectedSize);
2098 return 0;
2099 }
2100
2101 uint32_t totalSize = 0;
2102
2103 // Verify APP1 marker (mandatory)
2104 uint8_t app1Marker[] = {0xFF, 0xE1};
2105 if (memcmp(appSegmentBuffer, app1Marker, sizeof(app1Marker))) {
2106 ALOGE("%s: Invalid APP1 marker: %x, %x", __FUNCTION__,
2107 appSegmentBuffer[0], appSegmentBuffer[1]);
2108 return 0;
2109 }
2110 totalSize += sizeof(app1Marker);
2111
2112 uint16_t app1Size = (static_cast<uint16_t>(appSegmentBuffer[totalSize]) << 8) +
2113 appSegmentBuffer[totalSize+1];
2114 totalSize += app1Size;
2115
2116 ALOGV("%s: Expected APP segments size %zu, APP1 segment size %u",
2117 __FUNCTION__, expectedSize, app1Size);
2118 while (totalSize < expectedSize) {
2119 if (appSegmentBuffer[totalSize] != 0xFF ||
2120 appSegmentBuffer[totalSize+1] <= 0xE1 ||
2121 appSegmentBuffer[totalSize+1] > 0xEF) {
2122 // Invalid APPn marker
2123 ALOGE("%s: Invalid APPn marker: %x, %x", __FUNCTION__,
2124 appSegmentBuffer[totalSize], appSegmentBuffer[totalSize+1]);
2125 return 0;
2126 }
2127 totalSize += 2;
2128
2129 uint16_t appnSize = (static_cast<uint16_t>(appSegmentBuffer[totalSize]) << 8) +
2130 appSegmentBuffer[totalSize+1];
2131 totalSize += appnSize;
2132 }
2133
2134 if (totalSize != expectedSize) {
2135 ALOGE("%s: Invalid JPEG APP segments: totalSize %u vs expected size %zu",
2136 __FUNCTION__, totalSize, expectedSize);
2137 return 0;
2138 }
2139
2140 *app1SegmentSize = app1Size + sizeof(app1Marker);
2141 return expectedSize;
2142 }
2143
copyOneYuvTile(sp<MediaCodecBuffer> & codecBuffer,const CpuConsumer::LockedBuffer & yuvBuffer,size_t top,size_t left,size_t width,size_t height)2144 status_t HeicCompositeStream::copyOneYuvTile(sp<MediaCodecBuffer>& codecBuffer,
2145 const CpuConsumer::LockedBuffer& yuvBuffer,
2146 size_t top, size_t left, size_t width, size_t height) {
2147 ATRACE_CALL();
2148
2149 // Get stride information for codecBuffer
2150 sp<ABuffer> imageData;
2151 if (!codecBuffer->meta()->findBuffer("image-data", &imageData)) {
2152 ALOGE("%s: Codec input buffer is not for image data!", __FUNCTION__);
2153 return BAD_VALUE;
2154 }
2155 if (imageData->size() != sizeof(MediaImage2)) {
2156 ALOGE("%s: Invalid codec input image size %zu, expected %zu",
2157 __FUNCTION__, imageData->size(), sizeof(MediaImage2));
2158 return BAD_VALUE;
2159 }
2160 MediaImage2* imageInfo = reinterpret_cast<MediaImage2*>(imageData->data());
2161 if (imageInfo->mType != MediaImage2::MEDIA_IMAGE_TYPE_YUV ||
2162 imageInfo->mBitDepth != 8 ||
2163 imageInfo->mBitDepthAllocated != 8 ||
2164 imageInfo->mNumPlanes != 3) {
2165 ALOGE("%s: Invalid codec input image info: mType %d, mBitDepth %d, "
2166 "mBitDepthAllocated %d, mNumPlanes %d!", __FUNCTION__,
2167 imageInfo->mType, imageInfo->mBitDepth,
2168 imageInfo->mBitDepthAllocated, imageInfo->mNumPlanes);
2169 return BAD_VALUE;
2170 }
2171
2172 ALOGV("%s: yuvBuffer chromaStep %d, chromaStride %d",
2173 __FUNCTION__, yuvBuffer.chromaStep, yuvBuffer.chromaStride);
2174 ALOGV("%s: U offset %u, V offset %u, U rowInc %d, V rowInc %d, U colInc %d, V colInc %d",
2175 __FUNCTION__, imageInfo->mPlane[MediaImage2::U].mOffset,
2176 imageInfo->mPlane[MediaImage2::V].mOffset,
2177 imageInfo->mPlane[MediaImage2::U].mRowInc,
2178 imageInfo->mPlane[MediaImage2::V].mRowInc,
2179 imageInfo->mPlane[MediaImage2::U].mColInc,
2180 imageInfo->mPlane[MediaImage2::V].mColInc);
2181
2182 // Y
2183 for (auto row = top; row < top+height; row++) {
2184 uint8_t *dst = codecBuffer->data() + imageInfo->mPlane[MediaImage2::Y].mOffset +
2185 imageInfo->mPlane[MediaImage2::Y].mRowInc * (row - top);
2186 mFnCopyRow(yuvBuffer.data+row*yuvBuffer.stride+left, dst, width);
2187 }
2188
2189 // U is Cb, V is Cr
2190 bool codecUPlaneFirst = imageInfo->mPlane[MediaImage2::V].mOffset >
2191 imageInfo->mPlane[MediaImage2::U].mOffset;
2192 uint32_t codecUvOffsetDiff = codecUPlaneFirst ?
2193 imageInfo->mPlane[MediaImage2::V].mOffset - imageInfo->mPlane[MediaImage2::U].mOffset :
2194 imageInfo->mPlane[MediaImage2::U].mOffset - imageInfo->mPlane[MediaImage2::V].mOffset;
2195 bool isCodecUvSemiplannar = (codecUvOffsetDiff == 1) &&
2196 (imageInfo->mPlane[MediaImage2::U].mRowInc ==
2197 imageInfo->mPlane[MediaImage2::V].mRowInc) &&
2198 (imageInfo->mPlane[MediaImage2::U].mColInc == 2) &&
2199 (imageInfo->mPlane[MediaImage2::V].mColInc == 2);
2200 bool isCodecUvPlannar =
2201 ((codecUPlaneFirst && codecUvOffsetDiff >=
2202 imageInfo->mPlane[MediaImage2::U].mRowInc * imageInfo->mHeight/2) ||
2203 ((!codecUPlaneFirst && codecUvOffsetDiff >=
2204 imageInfo->mPlane[MediaImage2::V].mRowInc * imageInfo->mHeight/2))) &&
2205 imageInfo->mPlane[MediaImage2::U].mColInc == 1 &&
2206 imageInfo->mPlane[MediaImage2::V].mColInc == 1;
2207 bool cameraUPlaneFirst = yuvBuffer.dataCr > yuvBuffer.dataCb;
2208
2209 if (isCodecUvSemiplannar && yuvBuffer.chromaStep == 2 &&
2210 (codecUPlaneFirst == cameraUPlaneFirst)) {
2211 // UV semiplannar
2212 // The chrome plane could be either Cb first, or Cr first. Take the
2213 // smaller address.
2214 uint8_t *src = std::min(yuvBuffer.dataCb, yuvBuffer.dataCr);
2215 MediaImage2::PlaneIndex dstPlane = codecUPlaneFirst ? MediaImage2::U : MediaImage2::V;
2216 for (auto row = top/2; row < (top+height)/2; row++) {
2217 uint8_t *dst = codecBuffer->data() + imageInfo->mPlane[dstPlane].mOffset +
2218 imageInfo->mPlane[dstPlane].mRowInc * (row - top/2);
2219 mFnCopyRow(src+row*yuvBuffer.chromaStride+left, dst, width);
2220 }
2221 } else if (isCodecUvPlannar && yuvBuffer.chromaStep == 1) {
2222 // U plane
2223 for (auto row = top/2; row < (top+height)/2; row++) {
2224 uint8_t *dst = codecBuffer->data() + imageInfo->mPlane[MediaImage2::U].mOffset +
2225 imageInfo->mPlane[MediaImage2::U].mRowInc * (row - top/2);
2226 mFnCopyRow(yuvBuffer.dataCb+row*yuvBuffer.chromaStride+left/2, dst, width/2);
2227 }
2228
2229 // V plane
2230 for (auto row = top/2; row < (top+height)/2; row++) {
2231 uint8_t *dst = codecBuffer->data() + imageInfo->mPlane[MediaImage2::V].mOffset +
2232 imageInfo->mPlane[MediaImage2::V].mRowInc * (row - top/2);
2233 mFnCopyRow(yuvBuffer.dataCr+row*yuvBuffer.chromaStride+left/2, dst, width/2);
2234 }
2235 } else {
2236 // Convert between semiplannar and plannar, or when UV orders are
2237 // different.
2238 uint8_t *dst = codecBuffer->data();
2239 for (auto row = top/2; row < (top+height)/2; row++) {
2240 for (auto col = left/2; col < (left+width)/2; col++) {
2241 // U/Cb
2242 int32_t dstIndex = imageInfo->mPlane[MediaImage2::U].mOffset +
2243 imageInfo->mPlane[MediaImage2::U].mRowInc * (row - top/2) +
2244 imageInfo->mPlane[MediaImage2::U].mColInc * (col - left/2);
2245 int32_t srcIndex = row * yuvBuffer.chromaStride + yuvBuffer.chromaStep * col;
2246 dst[dstIndex] = yuvBuffer.dataCb[srcIndex];
2247
2248 // V/Cr
2249 dstIndex = imageInfo->mPlane[MediaImage2::V].mOffset +
2250 imageInfo->mPlane[MediaImage2::V].mRowInc * (row - top/2) +
2251 imageInfo->mPlane[MediaImage2::V].mColInc * (col - left/2);
2252 srcIndex = row * yuvBuffer.chromaStride + yuvBuffer.chromaStep * col;
2253 dst[dstIndex] = yuvBuffer.dataCr[srcIndex];
2254 }
2255 }
2256 }
2257 return OK;
2258 }
2259
initCopyRowFunction(int32_t width)2260 void HeicCompositeStream::initCopyRowFunction([[maybe_unused]] int32_t width)
2261 {
2262 using namespace libyuv;
2263
2264 mFnCopyRow = CopyRow_C;
2265 #if defined(HAS_COPYROW_SSE2)
2266 if (TestCpuFlag(kCpuHasSSE2)) {
2267 mFnCopyRow = IS_ALIGNED(width, 32) ? CopyRow_SSE2 : CopyRow_Any_SSE2;
2268 }
2269 #endif
2270 #if defined(HAS_COPYROW_AVX)
2271 if (TestCpuFlag(kCpuHasAVX)) {
2272 mFnCopyRow = IS_ALIGNED(width, 64) ? CopyRow_AVX : CopyRow_Any_AVX;
2273 }
2274 #endif
2275 #if defined(HAS_COPYROW_ERMS)
2276 if (TestCpuFlag(kCpuHasERMS)) {
2277 mFnCopyRow = CopyRow_ERMS;
2278 }
2279 #endif
2280 #if defined(HAS_COPYROW_NEON)
2281 if (TestCpuFlag(kCpuHasNEON)) {
2282 mFnCopyRow = IS_ALIGNED(width, 32) ? CopyRow_NEON : CopyRow_Any_NEON;
2283 }
2284 #endif
2285 #if defined(HAS_COPYROW_MIPS)
2286 if (TestCpuFlag(kCpuHasMIPS)) {
2287 mFnCopyRow = CopyRow_MIPS;
2288 }
2289 #endif
2290 }
2291
calcAppSegmentMaxSize(const CameraMetadata & info)2292 size_t HeicCompositeStream::calcAppSegmentMaxSize(const CameraMetadata& info) {
2293 camera_metadata_ro_entry_t entry = info.find(ANDROID_HEIC_INFO_MAX_JPEG_APP_SEGMENTS_COUNT);
2294 size_t maxAppsSegment = 1;
2295 if (entry.count > 0) {
2296 maxAppsSegment = entry.data.u8[0] < 1 ? 1 :
2297 entry.data.u8[0] > 16 ? 16 : entry.data.u8[0];
2298 }
2299 return maxAppsSegment * (2 + 0xFFFF) + sizeof(CameraBlob);
2300 }
2301
updateCodecQualityLocked(int32_t quality)2302 void HeicCompositeStream::updateCodecQualityLocked(int32_t quality) {
2303 if (quality != mQuality) {
2304 sp<AMessage> qualityParams = new AMessage;
2305 qualityParams->setInt32(PARAMETER_KEY_VIDEO_BITRATE, quality);
2306 status_t res = mCodec->setParameters(qualityParams);
2307 if (res != OK) {
2308 ALOGE("%s: Failed to set codec quality: %s (%d)",
2309 __FUNCTION__, strerror(-res), res);
2310 } else {
2311 mQuality = quality;
2312 }
2313 }
2314 }
2315
threadLoop()2316 bool HeicCompositeStream::threadLoop() {
2317 int64_t frameNumber = -1;
2318 bool newInputAvailable = false;
2319
2320 {
2321 Mutex::Autolock l(mMutex);
2322 if (mErrorState) {
2323 // In case we landed in error state, return any pending buffers and
2324 // halt all further processing.
2325 compilePendingInputLocked();
2326 releaseInputFramesLocked();
2327 return false;
2328 }
2329
2330
2331 while (!newInputAvailable) {
2332 compilePendingInputLocked();
2333 newInputAvailable = getNextReadyInputLocked(&frameNumber);
2334
2335 if (!newInputAvailable) {
2336 auto failingFrameNumber = getNextFailingInputLocked();
2337 if (failingFrameNumber >= 0) {
2338 releaseInputFrameLocked(failingFrameNumber,
2339 &mPendingInputFrames[failingFrameNumber]);
2340
2341 // It's okay to remove the entry from mPendingInputFrames
2342 // because:
2343 // 1. Only one internal stream (main input) is critical in
2344 // backing the output stream.
2345 // 2. If captureResult/appSegment arrives after the entry is
2346 // removed, they are simply skipped.
2347 mPendingInputFrames.erase(failingFrameNumber);
2348 if (mPendingInputFrames.size() == 0) {
2349 if (mSettingsByFrameNumber.size() == 0) {
2350 markTrackerIdle();
2351 }
2352 }
2353 return true;
2354 }
2355
2356 auto ret = mInputReadyCondition.waitRelative(mMutex, kWaitDuration);
2357 if (ret == TIMED_OUT) {
2358 return true;
2359 } else if (ret != OK) {
2360 ALOGE("%s: Timed wait on condition failed: %s (%d)", __FUNCTION__,
2361 strerror(-ret), ret);
2362 return false;
2363 }
2364 }
2365 }
2366 }
2367
2368 auto res = processInputFrame(frameNumber, mPendingInputFrames[frameNumber]);
2369 Mutex::Autolock l(mMutex);
2370 if (res != OK) {
2371 ALOGE("%s: Failed processing frame with timestamp: %" PRIu64 ", frameNumber: %"
2372 PRId64 ": %s (%d)", __FUNCTION__, mPendingInputFrames[frameNumber].timestamp,
2373 frameNumber, strerror(-res), res);
2374 mPendingInputFrames[frameNumber].error = true;
2375 }
2376
2377 releaseInputFramesLocked();
2378
2379 return true;
2380 }
2381
flagAnExifErrorFrameNumber(int64_t frameNumber)2382 void HeicCompositeStream::flagAnExifErrorFrameNumber(int64_t frameNumber) {
2383 Mutex::Autolock l(mMutex);
2384 mExifErrorFrameNumbers.emplace(frameNumber);
2385 mInputReadyCondition.signal();
2386 }
2387
onStreamBufferError(const CaptureResultExtras & resultExtras)2388 bool HeicCompositeStream::onStreamBufferError(const CaptureResultExtras& resultExtras) {
2389 bool res = false;
2390 int64_t frameNumber = resultExtras.frameNumber;
2391
2392 // Buffer errors concerning internal composite streams should not be directly visible to
2393 // camera clients. They must only receive a single buffer error with the public composite
2394 // stream id.
2395 if (resultExtras.errorStreamId == mAppSegmentStreamId) {
2396 ALOGV("%s: APP_SEGMENT frameNumber: %" PRId64, __FUNCTION__, frameNumber);
2397 flagAnExifErrorFrameNumber(frameNumber);
2398 res = true;
2399 } else if (resultExtras.errorStreamId == mMainImageStreamId) {
2400 ALOGV("%s: YUV frameNumber: %" PRId64, __FUNCTION__, frameNumber);
2401 flagAnErrorFrameNumber(frameNumber);
2402 res = true;
2403 }
2404
2405 return res;
2406 }
2407
onResultError(const CaptureResultExtras & resultExtras)2408 void HeicCompositeStream::onResultError(const CaptureResultExtras& resultExtras) {
2409 // For result error, since the APPS_SEGMENT buffer already contains EXIF,
2410 // simply skip using the capture result metadata to override EXIF.
2411 Mutex::Autolock l(mMutex);
2412
2413 int64_t timestamp = -1;
2414 for (const auto& fn : mSettingsByFrameNumber) {
2415 if (fn.first == resultExtras.frameNumber) {
2416 timestamp = fn.second.timestamp;
2417 break;
2418 }
2419 }
2420 if (timestamp == -1) {
2421 for (const auto& inputFrame : mPendingInputFrames) {
2422 if (inputFrame.first == resultExtras.frameNumber) {
2423 timestamp = inputFrame.second.timestamp;
2424 break;
2425 }
2426 }
2427 }
2428
2429 if (timestamp == -1) {
2430 ALOGE("%s: Failed to find shutter timestamp for result error!", __FUNCTION__);
2431 return;
2432 }
2433
2434 mCaptureResults.emplace(timestamp, std::make_tuple(resultExtras.frameNumber, CameraMetadata()));
2435 ALOGV("%s: timestamp %" PRId64 ", frameNumber %" PRId64, __FUNCTION__,
2436 timestamp, resultExtras.frameNumber);
2437 mInputReadyCondition.signal();
2438 }
2439
onRequestError(const CaptureResultExtras & resultExtras)2440 void HeicCompositeStream::onRequestError(const CaptureResultExtras& resultExtras) {
2441 auto frameNumber = resultExtras.frameNumber;
2442 ALOGV("%s: frameNumber: %" PRId64, __FUNCTION__, frameNumber);
2443 Mutex::Autolock l(mMutex);
2444 auto numRequests = mSettingsByFrameNumber.erase(frameNumber);
2445 if (numRequests == 0) {
2446 // Pending request has been populated into mPendingInputFrames
2447 mErrorFrameNumbers.emplace(frameNumber);
2448 mInputReadyCondition.signal();
2449 } else {
2450 // REQUEST_ERROR was received without onShutter.
2451 }
2452 }
2453
markTrackerIdle()2454 void HeicCompositeStream::markTrackerIdle() {
2455 sp<StatusTracker> statusTracker = mStatusTracker.promote();
2456 if (statusTracker != nullptr) {
2457 statusTracker->markComponentIdle(mStatusId, Fence::NO_FENCE);
2458 ALOGV("%s: Mark component as idle", __FUNCTION__);
2459 }
2460 }
2461
onMessageReceived(const sp<AMessage> & msg)2462 void HeicCompositeStream::CodecCallbackHandler::onMessageReceived(const sp<AMessage> &msg) {
2463 sp<HeicCompositeStream> parent = mParent.promote();
2464 if (parent == nullptr) return;
2465
2466 switch (msg->what()) {
2467 case kWhatCallbackNotify: {
2468 int32_t cbID;
2469 if (!msg->findInt32("callbackID", &cbID)) {
2470 ALOGE("kWhatCallbackNotify: callbackID is expected.");
2471 break;
2472 }
2473
2474 ALOGV("kWhatCallbackNotify: cbID = %d", cbID);
2475
2476 switch (cbID) {
2477 case MediaCodec::CB_INPUT_AVAILABLE: {
2478 int32_t index;
2479 if (!msg->findInt32("index", &index)) {
2480 ALOGE("CB_INPUT_AVAILABLE: index is expected.");
2481 break;
2482 }
2483 parent->onHeicInputFrameAvailable(index, mIsGainmap);
2484 break;
2485 }
2486
2487 case MediaCodec::CB_OUTPUT_AVAILABLE: {
2488 int32_t index;
2489 size_t offset;
2490 size_t size;
2491 int64_t timeUs;
2492 int32_t flags;
2493
2494 if (!msg->findInt32("index", &index)) {
2495 ALOGE("CB_OUTPUT_AVAILABLE: index is expected.");
2496 break;
2497 }
2498 if (!msg->findSize("offset", &offset)) {
2499 ALOGE("CB_OUTPUT_AVAILABLE: offset is expected.");
2500 break;
2501 }
2502 if (!msg->findSize("size", &size)) {
2503 ALOGE("CB_OUTPUT_AVAILABLE: size is expected.");
2504 break;
2505 }
2506 if (!msg->findInt64("timeUs", &timeUs)) {
2507 ALOGE("CB_OUTPUT_AVAILABLE: timeUs is expected.");
2508 break;
2509 }
2510 if (!msg->findInt32("flags", &flags)) {
2511 ALOGE("CB_OUTPUT_AVAILABLE: flags is expected.");
2512 break;
2513 }
2514
2515 CodecOutputBufferInfo bufferInfo = {
2516 index,
2517 (int32_t)offset,
2518 (int32_t)size,
2519 timeUs,
2520 (uint32_t)flags};
2521
2522 parent->onHeicOutputFrameAvailable(bufferInfo, mIsGainmap);
2523 break;
2524 }
2525
2526 case MediaCodec::CB_OUTPUT_FORMAT_CHANGED: {
2527 sp<AMessage> format;
2528 if (!msg->findMessage("format", &format)) {
2529 ALOGE("CB_OUTPUT_FORMAT_CHANGED: format is expected.");
2530 break;
2531 }
2532 // Here format is MediaCodec's internal copy of output format.
2533 // Make a copy since onHeicFormatChanged() might modify it.
2534 sp<AMessage> formatCopy;
2535 if (format != nullptr) {
2536 formatCopy = format->dup();
2537 }
2538 parent->onHeicFormatChanged(formatCopy, mIsGainmap);
2539 break;
2540 }
2541
2542 case MediaCodec::CB_ERROR: {
2543 status_t err;
2544 int32_t actionCode;
2545 AString detail;
2546 if (!msg->findInt32("err", &err)) {
2547 ALOGE("CB_ERROR: err is expected.");
2548 break;
2549 }
2550 if (!msg->findInt32("action", &actionCode)) {
2551 ALOGE("CB_ERROR: action is expected.");
2552 break;
2553 }
2554 msg->findString("detail", &detail);
2555 ALOGE("Codec reported error(0x%x), actionCode(%d), detail(%s)",
2556 err, actionCode, detail.c_str());
2557
2558 parent->onHeicCodecError();
2559 break;
2560 }
2561
2562 default: {
2563 ALOGE("kWhatCallbackNotify: callbackID(%d) is unexpected.", cbID);
2564 break;
2565 }
2566 }
2567 break;
2568 }
2569
2570 default:
2571 ALOGE("shouldn't be here");
2572 break;
2573 }
2574 }
2575
2576 }; // namespace camera3
2577 }; // namespace android
2578