xref: /aosp_15_r20/frameworks/av/services/camera/libcameraservice/api1/client2/ZslProcessor.cpp (revision ec779b8e0859a360c3d303172224686826e6e0e1)
1 /*
2  * Copyright (C) 2013-2018 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 #define LOG_TAG "Camera2-ZslProcessor"
18 #define ATRACE_TAG ATRACE_TAG_CAMERA
19 //#define LOG_NDEBUG 0
20 //#define LOG_NNDEBUG 0
21 
22 #ifdef LOG_NNDEBUG
23 #define ALOGVV(...) ALOGV(__VA_ARGS__)
24 #else
25 #define ALOGVV(...) if (0) ALOGV(__VA_ARGS__)
26 #endif
27 
28 #include <inttypes.h>
29 
30 #include <camera/StringUtils.h>
31 #include <com_android_graphics_libgui_flags.h>
32 #include <gui/Surface.h>
33 #include <utils/Log.h>
34 #include <utils/Trace.h>
35 
36 #include "common/CameraDeviceBase.h"
37 #include "api1/Camera2Client.h"
38 #include "api1/client2/CaptureSequencer.h"
39 #include "api1/client2/ZslProcessor.h"
40 #include "device3/Camera3Device.h"
41 
42 typedef android::RingBufferConsumer::PinnedBufferItem PinnedBufferItem;
43 
44 namespace android {
45 namespace camera2 {
46 
47 using android::camera3::CAMERA_STREAM_ROTATION_0;
48 using android::camera3::CAMERA_TEMPLATE_STILL_CAPTURE;
49 
50 namespace {
51 struct TimestampFinder : public RingBufferConsumer::RingBufferComparator {
52     typedef RingBufferConsumer::BufferInfo BufferInfo;
53 
54     enum {
55         SELECT_I1 = -1,
56         SELECT_I2 = 1,
57         SELECT_NEITHER = 0,
58     };
59 
TimestampFinderandroid::camera2::__anon13674e3a0111::TimestampFinder60     explicit TimestampFinder(nsecs_t timestamp) : mTimestamp(timestamp) {}
~TimestampFinderandroid::camera2::__anon13674e3a0111::TimestampFinder61     ~TimestampFinder() {}
62 
63     template <typename T>
swapandroid::camera2::__anon13674e3a0111::TimestampFinder64     static void swap(T& a, T& b) {
65         T tmp = a;
66         a = b;
67         b = tmp;
68     }
69 
70     /**
71      * Try to find the best candidate for a ZSL buffer.
72      * Match priority from best to worst:
73      *  1) Timestamps match.
74      *  2) Timestamp is closest to the needle (and lower).
75      *  3) Timestamp is closest to the needle (and higher).
76      *
77      */
compareandroid::camera2::__anon13674e3a0111::TimestampFinder78     virtual int compare(const BufferInfo *i1,
79                         const BufferInfo *i2) const {
80         // Try to select non-null object first.
81         if (i1 == NULL) {
82             return SELECT_I2;
83         } else if (i2 == NULL) {
84             return SELECT_I1;
85         }
86 
87         // Best result: timestamp is identical
88         if (i1->mTimestamp == mTimestamp) {
89             return SELECT_I1;
90         } else if (i2->mTimestamp == mTimestamp) {
91             return SELECT_I2;
92         }
93 
94         const BufferInfo* infoPtrs[2] = {
95             i1,
96             i2
97         };
98         int infoSelectors[2] = {
99             SELECT_I1,
100             SELECT_I2
101         };
102 
103         // Order i1,i2 so that always i1.timestamp < i2.timestamp
104         if (i1->mTimestamp > i2->mTimestamp) {
105             swap(infoPtrs[0], infoPtrs[1]);
106             swap(infoSelectors[0], infoSelectors[1]);
107         }
108 
109         // Second best: closest (lower) timestamp
110         if (infoPtrs[1]->mTimestamp < mTimestamp) {
111             return infoSelectors[1];
112         } else if (infoPtrs[0]->mTimestamp < mTimestamp) {
113             return infoSelectors[0];
114         }
115 
116         // Worst: closest (higher) timestamp
117         return infoSelectors[0];
118 
119         /**
120          * The above cases should cover all the possibilities,
121          * and we get an 'empty' result only if the ring buffer
122          * was empty itself
123          */
124     }
125 
126     const nsecs_t mTimestamp;
127 }; // struct TimestampFinder
128 } // namespace anonymous
129 
ZslProcessor(sp<Camera2Client> client,wp<CaptureSequencer> sequencer)130 ZslProcessor::ZslProcessor(
131     sp<Camera2Client> client,
132     wp<CaptureSequencer> sequencer):
133         Thread(false),
134         mLatestClearedBufferTimestamp(0),
135         mState(RUNNING),
136         mClient(client),
137         mSequencer(sequencer),
138         mId(client->getCameraId()),
139         mZslStreamId(NO_STREAM),
140         mInputStreamId(NO_STREAM),
141         mFrameListHead(0),
142         mHasFocuser(false),
143         mInputBuffer(nullptr),
144         mProducer(nullptr),
145 #if WB_CAMERA3_AND_PROCESSORS_WITH_DEPENDENCIES
146         mInputSurface(nullptr),
147 #else
148         mInputProducer(nullptr),
149         mInputProducerSlot(-1),
150 #endif
151         mBuffersToDetach(0) {
152     // Initialize buffer queue and frame list based on pipeline max depth.
153     size_t pipelineMaxDepth = kDefaultMaxPipelineDepth;
154     if (client != 0) {
155         sp<Camera3Device> device =
156         static_cast<Camera3Device*>(client->getCameraDevice().get());
157         if (device != 0) {
158             camera_metadata_ro_entry_t entry =
159                 device->info().find(ANDROID_REQUEST_PIPELINE_MAX_DEPTH);
160             if (entry.count == 1) {
161                 pipelineMaxDepth = entry.data.u8[0];
162             } else {
163                 ALOGW("%s: Unable to find the android.request.pipelineMaxDepth,"
164                         " use default pipeline max depth %d", __FUNCTION__,
165                         kDefaultMaxPipelineDepth);
166             }
167 
168             entry = device->info().find(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE);
169             if (entry.count > 0 && entry.data.f[0] != 0.) {
170                 mHasFocuser = true;
171             }
172         }
173     }
174 
175     ALOGV("%s: Initialize buffer queue and frame list depth based on max pipeline depth (%zu)",
176           __FUNCTION__, pipelineMaxDepth);
177     // Need to keep buffer queue longer than metadata queue because sometimes buffer arrives
178     // earlier than metadata which causes the buffer corresponding to oldest metadata being
179     // removed.
180     mFrameListDepth = pipelineMaxDepth;
181     mBufferQueueDepth = mFrameListDepth + 1;
182 
183     mZslQueue.insertAt(0, mBufferQueueDepth);
184     mFrameList.resize(mFrameListDepth);
185     sp<CaptureSequencer> captureSequencer = mSequencer.promote();
186     if (captureSequencer != 0) captureSequencer->setZslProcessor(this);
187 }
188 
~ZslProcessor()189 ZslProcessor::~ZslProcessor() {
190     ALOGV("%s: Exit", __FUNCTION__);
191     deleteStream();
192 }
193 
onResultAvailable(const CaptureResult & result)194 void ZslProcessor::onResultAvailable(const CaptureResult &result) {
195     ATRACE_CALL();
196     ALOGV("%s:", __FUNCTION__);
197     Mutex::Autolock l(mInputMutex);
198     camera_metadata_ro_entry_t entry;
199     entry = result.mMetadata.find(ANDROID_SENSOR_TIMESTAMP);
200     nsecs_t timestamp = entry.data.i64[0];
201     if (entry.count == 0) {
202         ALOGE("%s: metadata doesn't have timestamp, skip this result", __FUNCTION__);
203         return;
204     }
205 
206     entry = result.mMetadata.find(ANDROID_REQUEST_FRAME_COUNT);
207     if (entry.count == 0) {
208         ALOGE("%s: metadata doesn't have frame number, skip this result", __FUNCTION__);
209         return;
210     }
211     int32_t frameNumber = entry.data.i32[0];
212 
213     ALOGVV("Got preview metadata for frame %d with timestamp %" PRId64, frameNumber, timestamp);
214 
215     if (mState != RUNNING) return;
216 
217     // Corresponding buffer has been cleared. No need to push into mFrameList
218     if (timestamp <= mLatestClearedBufferTimestamp) return;
219 
220     mFrameList[mFrameListHead] = result.mMetadata;
221     mFrameListHead = (mFrameListHead + 1) % mFrameListDepth;
222 }
223 
updateStream(const Parameters & params)224 status_t ZslProcessor::updateStream(const Parameters &params) {
225     ATRACE_CALL();
226     ALOGV("%s: Configuring ZSL streams", __FUNCTION__);
227     status_t res;
228 
229     Mutex::Autolock l(mInputMutex);
230 
231     sp<Camera2Client> client = mClient.promote();
232     if (client == 0) {
233         ALOGE("%s: Camera %d: Client does not exist", __FUNCTION__, mId);
234         return INVALID_OPERATION;
235     }
236     sp<Camera3Device> device =
237         static_cast<Camera3Device*>(client->getCameraDevice().get());
238     if (device == 0) {
239         ALOGE("%s: Camera %d: Device does not exist", __FUNCTION__, mId);
240         return INVALID_OPERATION;
241     }
242 
243     if (mInputStreamId == NO_STREAM) {
244         res = device->createInputStream(params.fastInfo.usedZslSize.width,
245             params.fastInfo.usedZslSize.height, HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED,
246             /*isMultiResolution*/false, &mInputStreamId);
247         if (res != OK) {
248             ALOGE("%s: Camera %d: Can't create input stream: "
249                     "%s (%d)", __FUNCTION__, client->getCameraId(),
250                     strerror(-res), res);
251             return res;
252         }
253     }
254 
255     if (mZslStreamId == NO_STREAM) {
256         // Create stream for HAL production
257         // TODO: Sort out better way to select resolution for ZSL
258 #if COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
259         mProducer = new RingBufferConsumer(GRALLOC_USAGE_HW_CAMERA_ZSL, mBufferQueueDepth);
260         mProducer->setName("Camera2-ZslRingBufferConsumer");
261         sp<Surface> outSurface = mProducer->getSurface();
262 #else
263         sp<IGraphicBufferProducer> producer;
264         sp<IGraphicBufferConsumer> consumer;
265         BufferQueue::createBufferQueue(&producer, &consumer);
266         mProducer = new RingBufferConsumer(consumer, GRALLOC_USAGE_HW_CAMERA_ZSL,
267             mBufferQueueDepth);
268         mProducer->setName("Camera2-ZslRingBufferConsumer");
269         sp<Surface> outSurface = new Surface(producer);
270 #endif  // COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
271 
272         res = device->createStream(outSurface, params.fastInfo.usedZslSize.width,
273             params.fastInfo.usedZslSize.height, HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED,
274             HAL_DATASPACE_UNKNOWN, CAMERA_STREAM_ROTATION_0, &mZslStreamId,
275             std::string(), std::unordered_set<int32_t>{ANDROID_SENSOR_PIXEL_MODE_DEFAULT});
276         if (res != OK) {
277             ALOGE("%s: Camera %d: Can't create ZSL stream: "
278                     "%s (%d)", __FUNCTION__, client->getCameraId(),
279                     strerror(-res), res);
280             return res;
281         }
282     }
283 
284     client->registerFrameListener(Camera2Client::kPreviewRequestIdStart,
285             Camera2Client::kPreviewRequestIdEnd,
286             this,
287             /*sendPartials*/false);
288 
289     return OK;
290 }
291 
deleteStream()292 status_t ZslProcessor::deleteStream() {
293     ATRACE_CALL();
294     status_t res;
295     sp<Camera3Device> device = nullptr;
296     sp<Camera2Client> client = nullptr;
297 
298     Mutex::Autolock l(mInputMutex);
299 
300     if ((mZslStreamId != NO_STREAM) || (mInputStreamId != NO_STREAM)) {
301         client = mClient.promote();
302         if (client == 0) {
303             ALOGE("%s: Camera %d: Client does not exist", __FUNCTION__, mId);
304             return INVALID_OPERATION;
305         }
306 
307         device =
308             reinterpret_cast<Camera3Device*>(client->getCameraDevice().get());
309         if (device == 0) {
310             ALOGE("%s: Camera %d: Device does not exist", __FUNCTION__, mId);
311             return INVALID_OPERATION;
312         }
313     }
314 
315     if (mZslStreamId != NO_STREAM) {
316         res = device->deleteStream(mZslStreamId);
317         if (res != OK) {
318             ALOGE("%s: Camera %d: Cannot delete ZSL output stream %d: "
319                     "%s (%d)", __FUNCTION__, client->getCameraId(),
320                     mZslStreamId, strerror(-res), res);
321             return res;
322         }
323 
324         mZslStreamId = NO_STREAM;
325     }
326     if (mInputStreamId != NO_STREAM) {
327         res = device->deleteStream(mInputStreamId);
328         if (res != OK) {
329             ALOGE("%s: Camera %d: Cannot delete input stream %d: "
330                     "%s (%d)", __FUNCTION__, client->getCameraId(),
331                     mInputStreamId, strerror(-res), res);
332             return res;
333         }
334 
335         mInputStreamId = NO_STREAM;
336     }
337 
338 #if WB_CAMERA3_AND_PROCESSORS_WITH_DEPENDENCIES
339     if (nullptr != mInputSurface.get()) {
340         // The surface destructor calls disconnect
341         mInputSurface.clear();
342     }
343 #else
344     if (nullptr != mInputProducer.get()) {
345         mInputProducer->disconnect(NATIVE_WINDOW_API_CPU);
346         mInputProducer.clear();
347     }
348 #endif
349 
350     return OK;
351 }
352 
getStreamId() const353 int ZslProcessor::getStreamId() const {
354     Mutex::Autolock l(mInputMutex);
355     return mZslStreamId;
356 }
357 
updateRequestWithDefaultStillRequest(CameraMetadata & request) const358 status_t ZslProcessor::updateRequestWithDefaultStillRequest(CameraMetadata &request) const {
359     sp<Camera2Client> client = mClient.promote();
360     if (client == 0) {
361         ALOGE("%s: Camera %d: Client does not exist", __FUNCTION__, mId);
362         return INVALID_OPERATION;
363     }
364     sp<Camera3Device> device =
365         static_cast<Camera3Device*>(client->getCameraDevice().get());
366     if (device == 0) {
367         ALOGE("%s: Camera %d: Device does not exist", __FUNCTION__, mId);
368         return INVALID_OPERATION;
369     }
370 
371     CameraMetadata stillTemplate;
372     device->createDefaultRequest(CAMERA_TEMPLATE_STILL_CAPTURE, &stillTemplate);
373 
374     // Find some of the post-processing tags, and assign the value from template to the request.
375     // Only check the aberration mode and noise reduction mode for now, as they are very important
376     // for image quality.
377     uint32_t postProcessingTags[] = {
378             ANDROID_NOISE_REDUCTION_MODE,
379             ANDROID_COLOR_CORRECTION_ABERRATION_MODE,
380             ANDROID_COLOR_CORRECTION_MODE,
381             ANDROID_TONEMAP_MODE,
382             ANDROID_SHADING_MODE,
383             ANDROID_HOT_PIXEL_MODE,
384             ANDROID_EDGE_MODE
385     };
386 
387     camera_metadata_entry_t entry;
388     for (size_t i = 0; i < sizeof(postProcessingTags) / sizeof(uint32_t); i++) {
389         entry = stillTemplate.find(postProcessingTags[i]);
390         if (entry.count > 0) {
391             request.update(postProcessingTags[i], entry.data.u8, 1);
392         }
393     }
394 
395     return OK;
396 }
397 
notifyInputReleased()398 void ZslProcessor::notifyInputReleased() {
399     Mutex::Autolock l(mInputMutex);
400 
401     mBuffersToDetach++;
402     mBuffersToDetachSignal.signal();
403 }
404 
doNotifyInputReleasedLocked()405 void ZslProcessor::doNotifyInputReleasedLocked() {
406     assert(nullptr != mInputBuffer.get());
407 #if WB_CAMERA3_AND_PROCESSORS_WITH_DEPENDENCIES
408     assert(nullptr != mInputSurface.get());
409 #else
410     assert(nullptr != mInputProducer.get());
411 #endif
412 
413     sp<GraphicBuffer> gb;
414     sp<Fence> fence;
415 #if WB_CAMERA3_AND_PROCESSORS_WITH_DEPENDENCIES
416     auto rc = mInputSurface->detachNextBuffer(&gb, &fence);
417 #else
418     auto rc = mInputProducer->detachNextBuffer(&gb, &fence);
419 #endif
420     if (NO_ERROR != rc) {
421         ALOGE("%s: Failed to detach buffer from input producer: %d",
422             __FUNCTION__, rc);
423         return;
424     }
425 
426     BufferItem &item = mInputBuffer->getBufferItem();
427     sp<GraphicBuffer> inputBuffer = item.mGraphicBuffer;
428     if (gb->handle != inputBuffer->handle) {
429         ALOGE("%s: Input mismatch, expected buffer %p received %p", __FUNCTION__,
430             inputBuffer->handle, gb->handle);
431         return;
432     }
433 
434     mInputBuffer.clear();
435     ALOGV("%s: Memory optimization, clearing ZSL queue",
436           __FUNCTION__);
437     clearZslResultQueueLocked();
438 
439     // Required so we accept more ZSL requests
440     mState = RUNNING;
441 }
442 
onBufferReleased()443 void ZslProcessor::InputProducerListener::onBufferReleased() {
444     sp<ZslProcessor> parent = mParent.promote();
445     if (nullptr != parent.get()) {
446         parent->notifyInputReleased();
447     }
448 }
449 
pushToReprocess(int32_t requestId)450 status_t ZslProcessor::pushToReprocess(int32_t requestId) {
451     ALOGV("%s: Send in reprocess request with id %d",
452             __FUNCTION__, requestId);
453     Mutex::Autolock l(mInputMutex);
454     status_t res;
455     sp<Camera2Client> client = mClient.promote();
456 
457     if (client == 0) {
458         ALOGE("%s: Camera %d: Client does not exist", __FUNCTION__, mId);
459         return INVALID_OPERATION;
460     }
461 
462     IF_ALOGV() {
463         dumpZslQueue(-1);
464     }
465 
466     size_t metadataIdx;
467     nsecs_t candidateTimestamp = getCandidateTimestampLocked(&metadataIdx);
468 
469     if (candidateTimestamp == -1) {
470         ALOGV("%s: Could not find good candidate for ZSL reprocessing",
471               __FUNCTION__);
472         return NOT_ENOUGH_DATA;
473     } else {
474         ALOGV("%s: Found good ZSL candidate idx: %u",
475             __FUNCTION__, (unsigned int) metadataIdx);
476     }
477 
478 #if WB_CAMERA3_AND_PROCESSORS_WITH_DEPENDENCIES
479     if (nullptr == mInputSurface.get()) {
480         res = client->getCameraDevice()->getInputSurface(
481             &mInputSurface);
482 #else
483     if (nullptr == mInputProducer.get()) {
484         res = client->getCameraDevice()->getInputBufferProducer(
485             &mInputProducer);
486 #endif
487         if (res != OK) {
488             ALOGE("%s: Camera %d: Unable to retrieve input producer: "
489                     "%s (%d)", __FUNCTION__, client->getCameraId(),
490                     strerror(-res), res);
491             return res;
492         }
493 
494 #if WB_CAMERA3_AND_PROCESSORS_WITH_DEPENDENCIES
495         res = mInputSurface->connect(NATIVE_WINDOW_API_CPU, new InputProducerListener(this),
496             false);
497 #else
498         IGraphicBufferProducer::QueueBufferOutput output;
499         res = mInputProducer->connect(new InputProducerListener(this),
500             NATIVE_WINDOW_API_CPU, false, &output);
501 #endif
502         if (res != OK) {
503             ALOGE("%s: Camera %d: Unable to connect to input producer: "
504                     "%s (%d)", __FUNCTION__, client->getCameraId(),
505                     strerror(-res), res);
506             return res;
507         }
508     }
509 
510     res = enqueueInputBufferByTimestamp(candidateTimestamp,
511         /*actualTimestamp*/NULL);
512     if (res == NO_BUFFER_AVAILABLE) {
513         ALOGV("%s: No ZSL buffers yet", __FUNCTION__);
514         return NOT_ENOUGH_DATA;
515     } else if (res != OK) {
516         ALOGE("%s: Unable to push buffer for reprocessing: %s (%d)",
517                 __FUNCTION__, strerror(-res), res);
518         return res;
519     }
520 
521     {
522         CameraMetadata request = mFrameList[metadataIdx];
523 
524         // Verify that the frame is reasonable for reprocessing
525 
526         camera_metadata_entry_t entry;
527         entry = request.find(ANDROID_CONTROL_AE_STATE);
528         if (entry.count == 0) {
529             ALOGE("%s: ZSL queue frame has no AE state field!",
530                     __FUNCTION__);
531             return BAD_VALUE;
532         }
533         if (entry.data.u8[0] != ANDROID_CONTROL_AE_STATE_CONVERGED &&
534                 entry.data.u8[0] != ANDROID_CONTROL_AE_STATE_LOCKED) {
535             ALOGV("%s: ZSL queue frame AE state is %d, need full capture",
536                     __FUNCTION__, entry.data.u8[0]);
537             return NOT_ENOUGH_DATA;
538         }
539 
540         uint8_t requestType = ANDROID_REQUEST_TYPE_REPROCESS;
541         res = request.update(ANDROID_REQUEST_TYPE,
542                 &requestType, 1);
543         if (res != OK) {
544             ALOGE("%s: Unable to update request type",
545                   __FUNCTION__);
546             return INVALID_OPERATION;
547         }
548 
549         int32_t inputStreams[1] =
550                 { mInputStreamId };
551         res = request.update(ANDROID_REQUEST_INPUT_STREAMS,
552                 inputStreams, 1);
553         if (res != OK) {
554             ALOGE("%s: Unable to update request input streams",
555                   __FUNCTION__);
556             return INVALID_OPERATION;
557         }
558 
559         uint8_t captureIntent =
560                 static_cast<uint8_t>(ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE);
561         res = request.update(ANDROID_CONTROL_CAPTURE_INTENT,
562                 &captureIntent, 1);
563         if (res != OK ) {
564             ALOGE("%s: Unable to update request capture intent",
565                   __FUNCTION__);
566             return INVALID_OPERATION;
567         }
568 
569         // TODO: Shouldn't we also update the latest preview frame?
570         int32_t outputStreams[1] =
571                 { client->getCaptureStreamId() };
572         res = request.update(ANDROID_REQUEST_OUTPUT_STREAMS,
573                 outputStreams, 1);
574         if (res != OK) {
575             ALOGE("%s: Unable to update request output streams",
576                   __FUNCTION__);
577             return INVALID_OPERATION;
578         }
579 
580         res = request.update(ANDROID_REQUEST_ID,
581                 &requestId, 1);
582         if (res != OK ) {
583             ALOGE("%s: Unable to update frame to a reprocess request",
584                   __FUNCTION__);
585             return INVALID_OPERATION;
586         }
587 
588         res = client->stopStream();
589         if (res != OK) {
590             ALOGE("%s: Camera %d: Unable to stop preview for ZSL capture: "
591                 "%s (%d)",
592                 __FUNCTION__, client->getCameraId(), strerror(-res), res);
593             return INVALID_OPERATION;
594         }
595 
596         // Update JPEG settings
597         {
598             SharedParameters::Lock l(client->getParameters());
599             res = l.mParameters.updateRequestJpeg(&request);
600             if (res != OK) {
601                 ALOGE("%s: Camera %d: Unable to update JPEG entries of ZSL "
602                         "capture request: %s (%d)", __FUNCTION__,
603                         client->getCameraId(),
604                         strerror(-res), res);
605                 return res;
606             }
607         }
608 
609         // Update post-processing settings
610         res = updateRequestWithDefaultStillRequest(request);
611         if (res != OK) {
612             ALOGW("%s: Unable to update post-processing tags, the reprocessed image quality "
613                     "may be compromised", __FUNCTION__);
614         }
615 
616         mLatestCapturedRequest = request;
617         res = client->getCameraDevice()->capture(request);
618         if (res != OK ) {
619             ALOGE("%s: Unable to send ZSL reprocess request to capture: %s"
620                   " (%d)", __FUNCTION__, strerror(-res), res);
621             return res;
622         }
623 
624         mState = LOCKED;
625     }
626 
627     return OK;
628 }
629 
630 status_t ZslProcessor::enqueueInputBufferByTimestamp(
631         nsecs_t timestamp,
632         nsecs_t* actualTimestamp) {
633 
634     TimestampFinder timestampFinder = TimestampFinder(timestamp);
635 
636     mInputBuffer = mProducer->pinSelectedBuffer(timestampFinder,
637         /*waitForFence*/false);
638 
639     if (nullptr == mInputBuffer.get()) {
640         ALOGE("%s: No ZSL buffers were available yet", __FUNCTION__);
641         return NO_BUFFER_AVAILABLE;
642     }
643 
644     nsecs_t actual = mInputBuffer->getBufferItem().mTimestamp;
645 
646     if (actual != timestamp) {
647         // TODO: This is problematic, the metadata queue timestamp should
648         //       usually have a corresponding ZSL buffer with the same timestamp.
649         //       If this is not the case, then it is possible that we will use
650         //       a ZSL buffer from a different request, which can result in
651         //       side effects during the reprocess pass.
652         ALOGW("%s: ZSL buffer candidate search didn't find an exact match --"
653               " requested timestamp = %" PRId64 ", actual timestamp = %" PRId64,
654               __FUNCTION__, timestamp, actual);
655     }
656 
657     if (nullptr != actualTimestamp) {
658         *actualTimestamp = actual;
659     }
660 
661     BufferItem &item = mInputBuffer->getBufferItem();
662 #if WB_CAMERA3_AND_PROCESSORS_WITH_DEPENDENCIES
663     auto rc = mInputSurface->attachBuffer(item.mGraphicBuffer->getNativeBuffer());
664 #else
665     auto rc = mInputProducer->attachBuffer(&mInputProducerSlot,
666         item.mGraphicBuffer);
667 #endif
668     if (OK != rc) {
669         ALOGE("%s: Failed to attach input ZSL buffer to producer: %d",
670             __FUNCTION__, rc);
671         return rc;
672     }
673 
674 #if WB_CAMERA3_AND_PROCESSORS_WITH_DEPENDENCIES
675     mInputSurface->setBuffersTimestamp(item.mTimestamp);
676     mInputSurface->setBuffersDataSpace(static_cast<ui::Dataspace>(item.mDataSpace));
677     mInputSurface->setCrop(&item.mCrop);
678     mInputSurface->setScalingMode(item.mScalingMode);
679     mInputSurface->setBuffersTransform(item.mTransform);
680     rc = mInputSurface->queueBuffer(item.mGraphicBuffer, item.mFence);
681 #else
682     IGraphicBufferProducer::QueueBufferOutput output;
683     IGraphicBufferProducer::QueueBufferInput input(item.mTimestamp,
684             item.mIsAutoTimestamp, item.mDataSpace, item.mCrop,
685             item.mScalingMode, item.mTransform, item.mFence);
686     rc = mInputProducer->queueBuffer(mInputProducerSlot, input, &output);
687 #endif
688     if (OK != rc) {
689         ALOGE("%s: Failed to queue ZSL buffer to producer: %d",
690             __FUNCTION__, rc);
691         return rc;
692     }
693 
694     return rc;
695 }
696 
697 status_t ZslProcessor::clearInputRingBufferLocked(nsecs_t* latestTimestamp) {
698 
699     if (nullptr != latestTimestamp) {
700         *latestTimestamp = mProducer->getLatestTimestamp();
701     }
702     mInputBuffer.clear();
703 
704     return mProducer->clear();
705 }
706 
707 status_t ZslProcessor::clearZslQueue() {
708     Mutex::Autolock l(mInputMutex);
709     // If in middle of capture, can't clear out queue
710     if (mState == LOCKED) return OK;
711 
712     return clearZslQueueLocked();
713 }
714 
715 status_t ZslProcessor::clearZslQueueLocked() {
716     if (NO_STREAM != mZslStreamId) {
717         // clear result metadata list first.
718         clearZslResultQueueLocked();
719         return clearInputRingBufferLocked(&mLatestClearedBufferTimestamp);
720     }
721     return OK;
722 }
723 
724 void ZslProcessor::clearZslResultQueueLocked() {
725     mFrameList.clear();
726     mFrameListHead = 0;
727     mFrameList.resize(mFrameListDepth);
728 }
729 
730 void ZslProcessor::dump(int fd, const Vector<String16>& /*args*/) const {
731     Mutex::Autolock l(mInputMutex);
732     if (!mLatestCapturedRequest.isEmpty()) {
733         std::string result = "    Latest ZSL capture request:\n";
734         write(fd, result.c_str(), result.size());
735         mLatestCapturedRequest.dump(fd, 2, 6);
736     } else {
737         std::string result = "    Latest ZSL capture request: none yet\n";
738         write(fd, result.c_str(), result.size());
739     }
740     dumpZslQueue(fd);
741 }
742 
743 bool ZslProcessor::threadLoop() {
744     Mutex::Autolock l(mInputMutex);
745 
746     if (mBuffersToDetach == 0) {
747         status_t res = mBuffersToDetachSignal.waitRelative(mInputMutex, kWaitDuration);
748         if (res == TIMED_OUT) return true;
749     }
750     while (mBuffersToDetach > 0) {
751         doNotifyInputReleasedLocked();
752         mBuffersToDetach--;
753     }
754 
755     return true;
756 }
757 
758 void ZslProcessor::dumpZslQueue(int fd) const {
759     std::string header = "ZSL queue contents:";
760     std::string indent = "    ";
761     ALOGV("%s", header.c_str());
762     if (fd != -1) {
763         header = indent + header + "\n";
764         write(fd, header.c_str(), header.size());
765     }
766     for (size_t i = 0; i < mZslQueue.size(); i++) {
767         const ZslPair &queueEntry = mZslQueue[i];
768         nsecs_t bufferTimestamp = queueEntry.buffer.mTimestamp;
769         camera_metadata_ro_entry_t entry;
770         nsecs_t frameTimestamp = 0;
771         int frameAeState = -1;
772         if (!queueEntry.frame.isEmpty()) {
773             entry = queueEntry.frame.find(ANDROID_SENSOR_TIMESTAMP);
774             if (entry.count > 0) frameTimestamp = entry.data.i64[0];
775             entry = queueEntry.frame.find(ANDROID_CONTROL_AE_STATE);
776             if (entry.count > 0) frameAeState = entry.data.u8[0];
777         }
778         std::string result =
779                 fmt::sprintf("   %zu: b: %" PRId64 "\tf: %" PRId64 ", AE state: %d", i,
780                         bufferTimestamp, frameTimestamp, frameAeState);
781         ALOGV("%s", result.c_str());
782         if (fd != -1) {
783             result = indent + result + "\n";
784             write(fd, result.c_str(), result.size());
785         }
786 
787     }
788 }
789 
790 bool ZslProcessor::isFixedFocusMode(uint8_t afMode) const {
791     switch (afMode) {
792         case ANDROID_CONTROL_AF_MODE_AUTO:
793         case ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO:
794         case ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE:
795         case ANDROID_CONTROL_AF_MODE_MACRO:
796             return false;
797             break;
798         case ANDROID_CONTROL_AF_MODE_OFF:
799         case ANDROID_CONTROL_AF_MODE_EDOF:
800             return true;
801         default:
802             ALOGE("%s: unknown focus mode %d", __FUNCTION__, afMode);
803             return false;
804     }
805 }
806 
807 nsecs_t ZslProcessor::getCandidateTimestampLocked(size_t* metadataIdx) const {
808     /**
809      * Find the smallest timestamp we know about so far
810      * - ensure that aeState is either converged or locked
811      */
812 
813     size_t idx = 0;
814     nsecs_t minTimestamp = -1;
815 
816     size_t emptyCount = mFrameList.size();
817 
818     for (size_t j = 0; j < mFrameList.size(); j++) {
819         const CameraMetadata &frame = mFrameList[j];
820         if (!frame.isEmpty()) {
821 
822             emptyCount--;
823 
824             camera_metadata_ro_entry_t entry;
825             entry = frame.find(ANDROID_SENSOR_TIMESTAMP);
826             if (entry.count == 0) {
827                 ALOGE("%s: Can't find timestamp in frame!",
828                         __FUNCTION__);
829                 continue;
830             }
831             nsecs_t frameTimestamp = entry.data.i64[0];
832             if (minTimestamp > frameTimestamp || minTimestamp == -1) {
833 
834                 entry = frame.find(ANDROID_CONTROL_AE_STATE);
835 
836                 if (entry.count == 0) {
837                     /**
838                      * This is most likely a HAL bug. The aeState field is
839                      * mandatory, so it should always be in a metadata packet.
840                      */
841                     ALOGW("%s: ZSL queue frame has no AE state field!",
842                             __FUNCTION__);
843                     continue;
844                 }
845                 if (entry.data.u8[0] != ANDROID_CONTROL_AE_STATE_CONVERGED &&
846                         entry.data.u8[0] != ANDROID_CONTROL_AE_STATE_LOCKED) {
847                     ALOGVV("%s: ZSL queue frame AE state is %d, need "
848                            "full capture",  __FUNCTION__, entry.data.u8[0]);
849                     continue;
850                 }
851 
852                 entry = frame.find(ANDROID_CONTROL_AF_MODE);
853                 if (entry.count == 0) {
854                     ALOGW("%s: ZSL queue frame has no AF mode field!",
855                             __FUNCTION__);
856                     continue;
857                 }
858                 // Check AF state if device has focuser and focus mode isn't fixed
859                 if (mHasFocuser) {
860                     uint8_t afMode = entry.data.u8[0];
861                     if (!isFixedFocusMode(afMode)) {
862                         // Make sure the candidate frame has good focus.
863                         entry = frame.find(ANDROID_CONTROL_AF_STATE);
864                         if (entry.count == 0) {
865                             ALOGW("%s: ZSL queue frame has no AF state field!",
866                                     __FUNCTION__);
867                             continue;
868                         }
869                         uint8_t afState = entry.data.u8[0];
870                         if (afState != ANDROID_CONTROL_AF_STATE_PASSIVE_FOCUSED &&
871                                 afState != ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED &&
872                                 afState != ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED) {
873                             ALOGVV("%s: ZSL queue frame AF state is %d is not good for capture,"
874                                     " skip it", __FUNCTION__, afState);
875                             continue;
876                         }
877                     }
878                 }
879 
880                 minTimestamp = frameTimestamp;
881                 idx = j;
882             }
883 
884             ALOGVV("%s: Saw timestamp %" PRId64, __FUNCTION__, frameTimestamp);
885         }
886     }
887 
888     if (emptyCount == mFrameList.size()) {
889         /**
890          * This could be mildly bad and means our ZSL was triggered before
891          * there were any frames yet received by the camera framework.
892          *
893          * This is a fairly corner case which can happen under:
894          * + a user presses the shutter button real fast when the camera starts
895          *     (startPreview followed immediately by takePicture).
896          * + burst capture case (hitting shutter button as fast possible)
897          *
898          * If this happens in steady case (preview running for a while, call
899          *     a single takePicture) then this might be a fwk bug.
900          */
901         ALOGW("%s: ZSL queue has no metadata frames", __FUNCTION__);
902     }
903 
904     ALOGV("%s: Candidate timestamp %" PRId64 " (idx %zu), empty frames: %zu",
905           __FUNCTION__, minTimestamp, idx, emptyCount);
906 
907     if (metadataIdx) {
908         *metadataIdx = idx;
909     }
910 
911     return minTimestamp;
912 }
913 
914 }; // namespace camera2
915 }; // namespace android
916