xref: /aosp_15_r20/frameworks/av/media/libstagefright/CameraSourceTimeLapse.cpp (revision ec779b8e0859a360c3d303172224686826e6e0e1)
1 /*
2  * Copyright (C) 2010 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 #include <inttypes.h>
18 
19 //#define LOG_NDEBUG 0
20 #define LOG_TAG "CameraSourceTimeLapse"
21 
22 #include <media/hardware/HardwareAPI.h>
23 #include <binder/IPCThreadState.h>
24 #include <binder/MemoryBase.h>
25 #include <binder/MemoryHeapBase.h>
26 #include <media/stagefright/foundation/ADebug.h>
27 #include <media/stagefright/CameraSource.h>
28 #include <media/stagefright/CameraSourceTimeLapse.h>
29 #include <media/stagefright/MetaData.h>
30 #include <camera/Camera.h>
31 #include <camera/CameraParameters.h>
32 #include <gui/Flags.h>
33 #include <utils/String8.h>
34 #include <utils/Vector.h>
35 
36 namespace android {
37 
38 // static
CreateFromCamera(const sp<hardware::ICamera> & camera,const sp<ICameraRecordingProxy> & proxy,int32_t cameraId,const String16 & clientName,uid_t clientUid,pid_t clientPid,Size videoSize,int32_t videoFrameRate,const sp<Surface> & surface,int64_t timeBetweenFrameCaptureUs)39 CameraSourceTimeLapse *CameraSourceTimeLapse::CreateFromCamera(
40         const sp<hardware::ICamera> &camera,
41         const sp<ICameraRecordingProxy> &proxy,
42         int32_t cameraId,
43         const String16& clientName,
44         uid_t clientUid,
45         pid_t clientPid,
46         Size videoSize,
47         int32_t videoFrameRate,
48 #if WB_LIBCAMERASERVICE_WITH_DEPENDENCIES
49         const sp<Surface>& surface,
50 #else
51         const sp<IGraphicBufferProducer>& surface,
52 #endif
53         int64_t timeBetweenFrameCaptureUs) {
54 
55     CameraSourceTimeLapse *source = new
56             CameraSourceTimeLapse(camera, proxy, cameraId,
57                 clientName, clientUid, clientPid,
58                 videoSize, videoFrameRate, surface,
59                 timeBetweenFrameCaptureUs);
60 
61     if (source != NULL) {
62         if (source->initCheck() != OK) {
63             delete source;
64             return NULL;
65         }
66     }
67     return source;
68 }
69 
CameraSourceTimeLapse(const sp<hardware::ICamera> & camera,const sp<ICameraRecordingProxy> & proxy,int32_t cameraId,const String16 & clientName,uid_t clientUid,pid_t clientPid,Size videoSize,int32_t videoFrameRate,const sp<Surface> & surface,int64_t timeBetweenFrameCaptureUs)70 CameraSourceTimeLapse::CameraSourceTimeLapse(
71         const sp<hardware::ICamera>& camera,
72         const sp<ICameraRecordingProxy>& proxy,
73         int32_t cameraId,
74         const String16& clientName,
75         uid_t clientUid,
76         pid_t clientPid,
77         Size videoSize,
78         int32_t videoFrameRate,
79 #if WB_LIBCAMERASERVICE_WITH_DEPENDENCIES
80         const sp<Surface>& surface,
81 #else
82         const sp<IGraphicBufferProducer>& surface,
83 #endif
84         int64_t timeBetweenFrameCaptureUs)
85       : CameraSource(camera, proxy, cameraId, clientName, clientUid, clientPid,
86                 videoSize, videoFrameRate, surface),
87       mTimeBetweenTimeLapseVideoFramesUs(1E6/videoFrameRate),
88       mLastTimeLapseFrameRealTimestampUs(0),
89       mSkipCurrentFrame(false) {
90 
91     mTimeBetweenFrameCaptureUs = timeBetweenFrameCaptureUs;
92     ALOGD("starting time lapse mode: %" PRId64 " us",
93         mTimeBetweenFrameCaptureUs);
94 
95     mVideoWidth = videoSize.width;
96     mVideoHeight = videoSize.height;
97 
98     if (OK == mInitCheck && !trySettingVideoSize(videoSize.width, videoSize.height)) {
99         releaseCamera();
100         mInitCheck = NO_INIT;
101     }
102 
103     // Initialize quick stop variables.
104     mQuickStop = false;
105     mForceRead = false;
106     mLastReadBufferCopy = NULL;
107     mStopWaitingForIdleCamera = false;
108 }
109 
~CameraSourceTimeLapse()110 CameraSourceTimeLapse::~CameraSourceTimeLapse() {
111     if (mLastReadBufferCopy) {
112         mLastReadBufferCopy->release();
113         mLastReadBufferCopy = NULL;
114     }
115 }
116 
startQuickReadReturns()117 void CameraSourceTimeLapse::startQuickReadReturns() {
118     ALOGV("startQuickReadReturns");
119     Mutex::Autolock autoLock(mQuickStopLock);
120 
121     // Enable quick stop mode.
122     mQuickStop = true;
123 
124     // Force dataCallbackTimestamp() coming from the video camera to
125     // not skip the next frame as we want read() to get a get a frame
126     // right away.
127     mForceRead = true;
128 }
129 
trySettingVideoSize(int32_t width,int32_t height)130 bool CameraSourceTimeLapse::trySettingVideoSize(
131         int32_t width, int32_t height) {
132 
133     ALOGV("trySettingVideoSize");
134     int64_t token = IPCThreadState::self()->clearCallingIdentity();
135     String8 s = mCamera->getParameters();
136 
137     CameraParameters params(s);
138     Vector<Size> supportedSizes;
139     params.getSupportedVideoSizes(supportedSizes);
140     bool videoOutputSupported = false;
141     if (supportedSizes.size() == 0) {
142         params.getSupportedPreviewSizes(supportedSizes);
143     } else {
144         videoOutputSupported = true;
145     }
146 
147     bool videoSizeSupported = false;
148     for (size_t i = 0; i < supportedSizes.size(); ++i) {
149         int32_t pictureWidth = supportedSizes[i].width;
150         int32_t pictureHeight = supportedSizes[i].height;
151 
152         if ((pictureWidth == width) && (pictureHeight == height)) {
153             videoSizeSupported = true;
154         }
155     }
156 
157     bool isSuccessful = false;
158     if (videoSizeSupported) {
159         ALOGV("Video size (%d, %d) is supported", width, height);
160         if (videoOutputSupported) {
161             params.setVideoSize(width, height);
162         } else {
163             params.setPreviewSize(width, height);
164         }
165         if (mCamera->setParameters(params.flatten()) == OK) {
166             isSuccessful = true;
167         } else {
168             ALOGE("Failed to set preview size to %dx%d", width, height);
169             isSuccessful = false;
170         }
171     }
172 
173     IPCThreadState::self()->restoreCallingIdentity(token);
174     return isSuccessful;
175 }
176 
signalBufferReturned(MediaBufferBase * buffer)177 void CameraSourceTimeLapse::signalBufferReturned(MediaBufferBase* buffer) {
178     ALOGV("signalBufferReturned");
179     Mutex::Autolock autoLock(mQuickStopLock);
180     if (mQuickStop && (buffer == mLastReadBufferCopy)) {
181         buffer->setObserver(NULL);
182         buffer->release();
183         mLastReadBufferCopy = NULL;
184         mForceRead = true;
185     } else {
186         return CameraSource::signalBufferReturned(buffer);
187     }
188 }
189 
createMediaBufferCopy(const MediaBufferBase & sourceBuffer,int64_t frameTime,MediaBufferBase ** newBuffer)190 void createMediaBufferCopy(
191         const MediaBufferBase& sourceBuffer,
192         int64_t frameTime,
193         MediaBufferBase **newBuffer) {
194 
195     ALOGV("createMediaBufferCopy");
196     size_t sourceSize = sourceBuffer.size();
197     void* sourcePointer = sourceBuffer.data();
198 
199     (*newBuffer) = new MediaBuffer(sourceSize);
200     memcpy((*newBuffer)->data(), sourcePointer, sourceSize);
201 
202     (*newBuffer)->meta_data().setInt64(kKeyTime, frameTime);
203 
204 }
205 
fillLastReadBufferCopy(MediaBufferBase & sourceBuffer)206 void CameraSourceTimeLapse::fillLastReadBufferCopy(MediaBufferBase& sourceBuffer) {
207     ALOGV("fillLastReadBufferCopy");
208     int64_t frameTime;
209     CHECK(sourceBuffer.meta_data().findInt64(kKeyTime, &frameTime));
210     createMediaBufferCopy(sourceBuffer, frameTime, &mLastReadBufferCopy);
211     mLastReadBufferCopy->add_ref();
212     mLastReadBufferCopy->setObserver(this);
213 }
214 
read(MediaBufferBase ** buffer,const ReadOptions * options)215 status_t CameraSourceTimeLapse::read(
216         MediaBufferBase **buffer, const ReadOptions *options) {
217     ALOGV("read");
218     if (mLastReadBufferCopy == NULL) {
219         mLastReadStatus = CameraSource::read(buffer, options);
220 
221         // mQuickStop may have turned to true while read was blocked.
222         // Make a copy of the buffer in that case.
223         Mutex::Autolock autoLock(mQuickStopLock);
224         if (mQuickStop && *buffer) {
225             fillLastReadBufferCopy(**buffer);
226         }
227         return mLastReadStatus;
228     } else {
229         (*buffer) = mLastReadBufferCopy;
230         (*buffer)->add_ref();
231         return mLastReadStatus;
232     }
233 }
234 
skipCurrentFrame(int64_t)235 bool CameraSourceTimeLapse::skipCurrentFrame(int64_t /* timestampUs */) {
236     ALOGV("skipCurrentFrame");
237     if (mSkipCurrentFrame) {
238         mSkipCurrentFrame = false;
239         return true;
240     } else {
241         return false;
242     }
243 }
244 
skipFrameAndModifyTimeStamp(int64_t * timestampUs)245 bool CameraSourceTimeLapse::skipFrameAndModifyTimeStamp(int64_t *timestampUs) {
246     ALOGV("skipFrameAndModifyTimeStamp");
247     if (mLastTimeLapseFrameRealTimestampUs == 0) {
248         // First time lapse frame. Initialize mLastTimeLapseFrameRealTimestampUs
249         // to current time (timestampUs) and save frame data.
250         ALOGV("dataCallbackTimestamp timelapse: initial frame");
251 
252         mLastTimeLapseFrameRealTimestampUs = *timestampUs;
253         return false;
254     }
255 
256     {
257         Mutex::Autolock autoLock(mQuickStopLock);
258 
259         // mForceRead may be set to true by startQuickReadReturns(). In that
260         // case don't skip this frame.
261         if (mForceRead) {
262             ALOGV("dataCallbackTimestamp timelapse: forced read");
263             mForceRead = false;
264             *timestampUs =
265                 mLastFrameTimestampUs + mTimeBetweenTimeLapseVideoFramesUs;
266 
267             // Really make sure that this video recording frame will not be dropped.
268             if (*timestampUs < mStartTimeUs) {
269                 ALOGI("set timestampUs to start time stamp %" PRId64 " us", mStartTimeUs);
270                 *timestampUs = mStartTimeUs;
271             }
272             return false;
273         }
274     }
275 
276     // Workaround to bypass the first 2 input frames for skipping.
277     // The first 2 output frames from the encoder are: decoder specific info and
278     // the compressed video frame data for the first input video frame.
279     if (mNumFramesEncoded >= 1 && *timestampUs <
280         (mLastTimeLapseFrameRealTimestampUs + mTimeBetweenFrameCaptureUs)) {
281         // Skip all frames from last encoded frame until
282         // sufficient time (mTimeBetweenFrameCaptureUs) has passed.
283         // Tell the camera to release its recording frame and return.
284         ALOGV("dataCallbackTimestamp timelapse: skipping intermediate frame");
285         return true;
286     } else {
287         // Desired frame has arrived after mTimeBetweenFrameCaptureUs time:
288         // - Reset mLastTimeLapseFrameRealTimestampUs to current time.
289         // - Artificially modify timestampUs to be one frame time (1/framerate) ahead
290         // of the last encoded frame's time stamp.
291         ALOGV("dataCallbackTimestamp timelapse: got timelapse frame");
292 
293         mLastTimeLapseFrameRealTimestampUs = *timestampUs;
294         *timestampUs = mLastFrameTimestampUs + mTimeBetweenTimeLapseVideoFramesUs;
295         return false;
296     }
297     return false;
298 }
299 
processBufferQueueFrame(BufferItem & buffer)300 void CameraSourceTimeLapse::processBufferQueueFrame(BufferItem& buffer) {
301     ALOGV("processBufferQueueFrame");
302     int64_t timestampUs = buffer.mTimestamp / 1000;
303     mSkipCurrentFrame = skipFrameAndModifyTimeStamp(&timestampUs);
304     buffer.mTimestamp = timestampUs * 1000;
305     CameraSource::processBufferQueueFrame(buffer);
306 }
307 
308 }  // namespace android
309