xref: /aosp_15_r20/frameworks/av/media/libstagefright/CameraSource.cpp (revision ec779b8e0859a360c3d303172224686826e6e0e1)
1 /*
2  * Copyright (C) 2009 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 #include <inttypes.h>
18 
19 //#define LOG_NDEBUG 0
20 #define LOG_TAG "CameraSource"
21 #include <utils/Log.h>
22 
23 #include <OMX_Component.h>
24 #include <binder/IPCThreadState.h>
25 #include <binder/MemoryBase.h>
26 #include <binder/MemoryHeapBase.h>
27 #include <media/hardware/HardwareAPI.h>
28 #include <media/stagefright/foundation/ADebug.h>
29 #include <media/stagefright/CameraSource.h>
30 #include <media/stagefright/MediaDefs.h>
31 #include <media/stagefright/MediaErrors.h>
32 #include <media/stagefright/MetaData.h>
33 #include <camera/Camera.h>
34 #include <camera/CameraParameters.h>
35 #include <camera/StringUtils.h>
36 #include <com_android_graphics_libgui_flags.h>
37 #include <gui/Surface.h>
38 #include <gui/Flags.h>
39 #include <utils/String8.h>
40 #include <cutils/properties.h>
41 
42 #if LOG_NDEBUG
43 #define UNUSED_UNLESS_VERBOSE(x) (void)(x)
44 #else
45 #define UNUSED_UNLESS_VERBOSE(x)
46 #endif
47 
48 namespace android {
49 
50 static const int64_t CAMERA_SOURCE_TIMEOUT_NS = 3000000000LL;
51 
getColorFormat(const char * colorFormat)52 static int32_t getColorFormat(const char* colorFormat) {
53     if (!colorFormat) {
54         ALOGE("Invalid color format");
55         return -1;
56     }
57 
58     if (!strcmp(colorFormat, CameraParameters::PIXEL_FORMAT_YUV420P)) {
59        return OMX_COLOR_FormatYUV420Planar;
60     }
61 
62     if (!strcmp(colorFormat, CameraParameters::PIXEL_FORMAT_YUV422SP)) {
63        return OMX_COLOR_FormatYUV422SemiPlanar;
64     }
65 
66     if (!strcmp(colorFormat, CameraParameters::PIXEL_FORMAT_YUV420SP)) {
67         return OMX_COLOR_FormatYUV420SemiPlanar;
68     }
69 
70     if (!strcmp(colorFormat, CameraParameters::PIXEL_FORMAT_YUV422I)) {
71         return OMX_COLOR_FormatYCbYCr;
72     }
73 
74     if (!strcmp(colorFormat, CameraParameters::PIXEL_FORMAT_RGB565)) {
75        return OMX_COLOR_Format16bitRGB565;
76     }
77 
78     if (!strcmp(colorFormat, "OMX_TI_COLOR_FormatYUV420PackedSemiPlanar")) {
79        return OMX_TI_COLOR_FormatYUV420PackedSemiPlanar;
80     }
81 
82     if (!strcmp(colorFormat, CameraParameters::PIXEL_FORMAT_ANDROID_OPAQUE)) {
83         return OMX_COLOR_FormatAndroidOpaque;
84     }
85 
86     ALOGE("Uknown color format (%s), please add it to "
87          "CameraSource::getColorFormat", colorFormat);
88 
89     CHECK(!"Unknown color format");
90     return -1;
91 }
92 
93 // static
CreateFromCamera(const sp<hardware::ICamera> & camera,const sp<ICameraRecordingProxy> & proxy,int32_t cameraId,const String16 & clientName,uid_t clientUid,pid_t clientPid,Size videoSize,int32_t frameRate,const sp<SurfaceType> & surface)94 CameraSource *CameraSource::CreateFromCamera(
95     const sp<hardware::ICamera>& camera,
96     const sp<ICameraRecordingProxy>& proxy,
97     int32_t cameraId,
98     const String16& clientName,
99     uid_t clientUid,
100     pid_t clientPid,
101     Size videoSize,
102     int32_t frameRate,
103     const sp<SurfaceType>& surface) {
104 
105     CameraSource *source = new CameraSource(camera, proxy, cameraId,
106             clientName, clientUid, clientPid, videoSize, frameRate, surface);
107     return source;
108 }
109 
CameraSource(const sp<hardware::ICamera> & camera,const sp<ICameraRecordingProxy> & proxy,int32_t cameraId,const String16 & clientName,uid_t clientUid,pid_t clientPid,Size videoSize,int32_t frameRate,const sp<SurfaceType> & surface)110 CameraSource::CameraSource(
111     const sp<hardware::ICamera>& camera,
112     const sp<ICameraRecordingProxy>& proxy,
113     int32_t cameraId,
114     const String16& clientName,
115     uid_t clientUid,
116     pid_t clientPid,
117     Size videoSize,
118     int32_t frameRate,
119     const sp<SurfaceType>& surface)
120     : mCameraFlags(0),
121       mNumInputBuffers(0),
122       mVideoFrameRate(-1),
123       mCamera(0),
124       mSurface(surface),
125       mNumFramesReceived(0),
126       mLastFrameTimestampUs(0),
127       mStarted(false),
128       mEos(false),
129       mNumFramesEncoded(0),
130       mTimeBetweenFrameCaptureUs(0),
131       mFirstFrameTimeUs(0),
132       mStopSystemTimeUs(-1),
133       mNumFramesDropped(0),
134       mNumGlitches(0),
135       mGlitchDurationThresholdUs(200000),
136       mCollectStats(false) {
137     mVideoSize.width  = -1;
138     mVideoSize.height = -1;
139 
140     mInitCheck = init(camera, proxy, cameraId,
141                     clientName, clientUid, clientPid,
142                     videoSize, frameRate);
143     if (mInitCheck != OK) releaseCamera();
144 }
145 
initCheck() const146 status_t CameraSource::initCheck() const {
147     return mInitCheck;
148 }
149 
isCameraAvailable(const sp<hardware::ICamera> & camera,const sp<ICameraRecordingProxy> & proxy,int32_t cameraId,const std::string & clientName,uid_t clientUid,pid_t clientPid)150 status_t CameraSource::isCameraAvailable(
151     const sp<hardware::ICamera>& camera, const sp<ICameraRecordingProxy>& proxy,
152     int32_t cameraId, const std::string& clientName, uid_t clientUid, pid_t clientPid) {
153 
154     if (camera == 0) {
155         AttributionSourceState clientAttribution;
156         clientAttribution.pid = clientPid;
157         clientAttribution.uid = clientUid;
158         clientAttribution.deviceId = kDefaultDeviceId;
159         clientAttribution.packageName = clientName;
160         clientAttribution.token = sp<BBinder>::make();
161 
162         mCamera = Camera::connect(cameraId, /*targetSdkVersion*/__ANDROID_API_FUTURE__,
163                 /*rotationOverride*/hardware::ICameraService::ROTATION_OVERRIDE_NONE,
164                 /*forceSlowJpegMode*/false, clientAttribution);
165         if (mCamera == 0) return -EBUSY;
166         mCameraFlags &= ~FLAGS_HOT_CAMERA;
167     } else {
168         // We get the proxy from Camera, not ICamera. We need to get the proxy
169         // to the remote Camera owned by the application. Here mCamera is a
170         // local Camera object created by us. We cannot use the proxy from
171         // mCamera here.
172         mCamera = Camera::create(camera);
173         if (mCamera == 0) return -EBUSY;
174         mCameraRecordingProxy = proxy;
175         mCameraFlags |= FLAGS_HOT_CAMERA;
176         mDeathNotifier = new DeathNotifier();
177         // isBinderAlive needs linkToDeath to work.
178         IInterface::asBinder(mCameraRecordingProxy)->linkToDeath(mDeathNotifier);
179     }
180 
181     mCamera->lock();
182 
183     return OK;
184 }
185 
186 
187 /*
188  * Check to see whether the requested video width and height is one
189  * of the supported sizes.
190  * @param width the video frame width in pixels
191  * @param height the video frame height in pixels
192  * @param suppportedSizes the vector of sizes that we check against
193  * @return true if the dimension (width and height) is supported.
194  */
isVideoSizeSupported(int32_t width,int32_t height,const Vector<Size> & supportedSizes)195 static bool isVideoSizeSupported(
196     int32_t width, int32_t height,
197     const Vector<Size>& supportedSizes) {
198 
199     ALOGV("isVideoSizeSupported");
200     for (size_t i = 0; i < supportedSizes.size(); ++i) {
201         if (width  == supportedSizes[i].width &&
202             height == supportedSizes[i].height) {
203             return true;
204         }
205     }
206     return false;
207 }
208 
209 /*
210  * If the preview and video output is separate, we only set the
211  * the video size, and applications should set the preview size
212  * to some proper value, and the recording framework will not
213  * change the preview size; otherwise, if the video and preview
214  * output is the same, we need to set the preview to be the same
215  * as the requested video size.
216  *
217  */
218 /*
219  * Query the camera to retrieve the supported video frame sizes
220  * and also to see whether CameraParameters::setVideoSize()
221  * is supported or not.
222  * @param params CameraParameters to retrieve the information
223  * @@param isSetVideoSizeSupported retunrs whether method
224  *      CameraParameters::setVideoSize() is supported or not.
225  * @param sizes returns the vector of Size objects for the
226  *      supported video frame sizes advertised by the camera.
227  */
getSupportedVideoSizes(const CameraParameters & params,bool * isSetVideoSizeSupported,Vector<Size> & sizes)228 static void getSupportedVideoSizes(
229     const CameraParameters& params,
230     bool *isSetVideoSizeSupported,
231     Vector<Size>& sizes) {
232 
233     *isSetVideoSizeSupported = true;
234     params.getSupportedVideoSizes(sizes);
235     if (sizes.size() == 0) {
236         ALOGD("Camera does not support setVideoSize()");
237         params.getSupportedPreviewSizes(sizes);
238         *isSetVideoSizeSupported = false;
239     }
240 }
241 
242 /*
243  * Check whether the camera has the supported color format
244  * @param params CameraParameters to retrieve the information
245  * @return OK if no error.
246  */
isCameraColorFormatSupported(const CameraParameters & params)247 status_t CameraSource::isCameraColorFormatSupported(
248         const CameraParameters& params) {
249     mColorFormat = getColorFormat(params.get(
250             CameraParameters::KEY_VIDEO_FRAME_FORMAT));
251     if (mColorFormat == -1) {
252         return BAD_VALUE;
253     }
254     return OK;
255 }
256 
257 /*
258  * Configure the camera to use the requested video size
259  * (width and height) and/or frame rate. If both width and
260  * height are -1, configuration on the video size is skipped.
261  * if frameRate is -1, configuration on the frame rate
262  * is skipped. Skipping the configuration allows one to
263  * use the current camera setting without the need to
264  * actually know the specific values (see Create() method).
265  *
266  * @param params the CameraParameters to be configured
267  * @param width the target video frame width in pixels
268  * @param height the target video frame height in pixels
269  * @param frameRate the target frame rate in frames per second.
270  * @return OK if no error.
271  */
configureCamera(CameraParameters * params,int32_t width,int32_t height,int32_t frameRate)272 status_t CameraSource::configureCamera(
273         CameraParameters* params,
274         int32_t width, int32_t height,
275         int32_t frameRate) {
276     ALOGV("configureCamera");
277     Vector<Size> sizes;
278     bool isSetVideoSizeSupportedByCamera = true;
279     getSupportedVideoSizes(*params, &isSetVideoSizeSupportedByCamera, sizes);
280     bool isCameraParamChanged = false;
281     if (width != -1 && height != -1) {
282         if (!isVideoSizeSupported(width, height, sizes)) {
283             ALOGE("Video dimension (%dx%d) is unsupported", width, height);
284             return BAD_VALUE;
285         }
286         if (isSetVideoSizeSupportedByCamera) {
287             params->setVideoSize(width, height);
288         } else {
289             params->setPreviewSize(width, height);
290         }
291         isCameraParamChanged = true;
292     } else if ((width == -1 && height != -1) ||
293                (width != -1 && height == -1)) {
294         // If one and only one of the width and height is -1
295         // we reject such a request.
296         ALOGE("Requested video size (%dx%d) is not supported", width, height);
297         return BAD_VALUE;
298     } else {  // width == -1 && height == -1
299         // Do not configure the camera.
300         // Use the current width and height value setting from the camera.
301     }
302 
303     if (frameRate != -1) {
304         CHECK(frameRate > 0 && frameRate <= 120);
305         const char* supportedFrameRates =
306                 params->get(CameraParameters::KEY_SUPPORTED_PREVIEW_FRAME_RATES);
307         CHECK(supportedFrameRates != NULL);
308         ALOGV("Supported frame rates: %s", supportedFrameRates);
309         char buf[4];
310         snprintf(buf, 4, "%d", frameRate);
311         if (strstr(supportedFrameRates, buf) == NULL) {
312             ALOGE("Requested frame rate (%d) is not supported: %s",
313                 frameRate, supportedFrameRates);
314             return BAD_VALUE;
315         }
316 
317         // The frame rate is supported, set the camera to the requested value.
318         params->setPreviewFrameRate(frameRate);
319         isCameraParamChanged = true;
320     } else {  // frameRate == -1
321         // Do not configure the camera.
322         // Use the current frame rate value setting from the camera
323     }
324 
325     if (isCameraParamChanged) {
326         // Either frame rate or frame size needs to be changed.
327         String8 s = params->flatten();
328         if (OK != mCamera->setParameters(s)) {
329             ALOGE("Could not change settings."
330                  " Someone else is using camera %p?", mCamera.get());
331             return -EBUSY;
332         }
333     }
334     return OK;
335 }
336 
337 /*
338  * Check whether the requested video frame size
339  * has been successfully configured or not. If both width and height
340  * are -1, check on the current width and height value setting
341  * is performed.
342  *
343  * @param params CameraParameters to retrieve the information
344  * @param the target video frame width in pixels to check against
345  * @param the target video frame height in pixels to check against
346  * @return OK if no error
347  */
checkVideoSize(const CameraParameters & params,int32_t width,int32_t height)348 status_t CameraSource::checkVideoSize(
349         const CameraParameters& params,
350         int32_t width, int32_t height) {
351 
352     ALOGV("checkVideoSize");
353     // The actual video size is the same as the preview size
354     // if the camera hal does not support separate video and
355     // preview output. In this case, we retrieve the video
356     // size from preview.
357     int32_t frameWidthActual = -1;
358     int32_t frameHeightActual = -1;
359     Vector<Size> sizes;
360     params.getSupportedVideoSizes(sizes);
361     if (sizes.size() == 0) {
362         // video size is the same as preview size
363         params.getPreviewSize(&frameWidthActual, &frameHeightActual);
364     } else {
365         // video size may not be the same as preview
366         params.getVideoSize(&frameWidthActual, &frameHeightActual);
367     }
368     if (frameWidthActual < 0 || frameHeightActual < 0) {
369         ALOGE("Failed to retrieve video frame size (%dx%d)",
370                 frameWidthActual, frameHeightActual);
371         return UNKNOWN_ERROR;
372     }
373 
374     // Check the actual video frame size against the target/requested
375     // video frame size.
376     if (width != -1 && height != -1) {
377         if (frameWidthActual != width || frameHeightActual != height) {
378             ALOGE("Failed to set video frame size to %dx%d. "
379                     "The actual video size is %dx%d ", width, height,
380                     frameWidthActual, frameHeightActual);
381             return UNKNOWN_ERROR;
382         }
383     }
384 
385     // Good now.
386     mVideoSize.width = frameWidthActual;
387     mVideoSize.height = frameHeightActual;
388     return OK;
389 }
390 
391 /*
392  * Check the requested frame rate has been successfully configured or not.
393  * If the target frameRate is -1, check on the current frame rate value
394  * setting is performed.
395  *
396  * @param params CameraParameters to retrieve the information
397  * @param the target video frame rate to check against
398  * @return OK if no error.
399  */
checkFrameRate(const CameraParameters & params,int32_t frameRate)400 status_t CameraSource::checkFrameRate(
401         const CameraParameters& params,
402         int32_t frameRate) {
403 
404     ALOGV("checkFrameRate");
405     int32_t frameRateActual = params.getPreviewFrameRate();
406     if (frameRateActual < 0) {
407         ALOGE("Failed to retrieve preview frame rate (%d)", frameRateActual);
408         return UNKNOWN_ERROR;
409     }
410 
411     // Check the actual video frame rate against the target/requested
412     // video frame rate.
413     if (frameRate != -1 && (frameRateActual - frameRate) != 0) {
414         ALOGE("Failed to set preview frame rate to %d fps. The actual "
415                 "frame rate is %d", frameRate, frameRateActual);
416         return UNKNOWN_ERROR;
417     }
418 
419     // Good now.
420     mVideoFrameRate = frameRateActual;
421     return OK;
422 }
423 
424 /*
425  * Initialize the CameraSource to so that it becomes
426  * ready for providing the video input streams as requested.
427  * @param camera the camera object used for the video source
428  * @param cameraId if camera == 0, use camera with this id
429  *      as the video source
430  * @param videoSize the target video frame size. If both
431  *      width and height in videoSize is -1, use the current
432  *      width and heigth settings by the camera
433  * @param frameRate the target frame rate in frames per second.
434  *      if it is -1, use the current camera frame rate setting.
435  * @param storeMetaDataInVideoBuffers request to store meta
436  *      data or real YUV data in video buffers. Request to
437  *      store meta data in video buffers may not be honored
438  *      if the source does not support this feature.
439  *
440  * @return OK if no error.
441  */
init(const sp<hardware::ICamera> & camera,const sp<ICameraRecordingProxy> & proxy,int32_t cameraId,const String16 & clientName,uid_t clientUid,pid_t clientPid,Size videoSize,int32_t frameRate)442 status_t CameraSource::init(
443         const sp<hardware::ICamera>& camera,
444         const sp<ICameraRecordingProxy>& proxy,
445         int32_t cameraId,
446         const String16& clientName,
447         uid_t clientUid,
448         pid_t clientPid,
449         Size videoSize,
450         int32_t frameRate) {
451 
452     ALOGV("init");
453     status_t err = OK;
454     int64_t token = IPCThreadState::self()->clearCallingIdentity();
455     err = initWithCameraAccess(camera, proxy, cameraId, clientName, clientUid, clientPid,
456                                videoSize, frameRate);
457     IPCThreadState::self()->restoreCallingIdentity(token);
458     return err;
459 }
460 
createVideoBufferMemoryHeap(size_t size,uint32_t bufferCount)461 void CameraSource::createVideoBufferMemoryHeap(size_t size, uint32_t bufferCount) {
462     mMemoryHeapBase = new MemoryHeapBase(size * bufferCount, 0,
463             "StageFright-CameraSource-BufferHeap");
464     for (uint32_t i = 0; i < bufferCount; i++) {
465         mMemoryBases.push_back(new MemoryBase(mMemoryHeapBase, i * size, size));
466     }
467 }
468 
initBufferQueue(uint32_t width,uint32_t height,uint32_t format,android_dataspace dataSpace,uint32_t bufferCount)469 status_t CameraSource::initBufferQueue(uint32_t width, uint32_t height,
470         uint32_t format, android_dataspace dataSpace, uint32_t bufferCount) {
471     ALOGV("initBufferQueue");
472 
473     if (mVideoBufferConsumer != nullptr || mVideoBufferProducer != nullptr) {
474         ALOGE("%s: Buffer queue already exists", __FUNCTION__);
475         return ALREADY_EXISTS;
476     }
477 #if !COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
478     // Create a buffer queue.
479     sp<IGraphicBufferProducer> producer;
480     sp<IGraphicBufferConsumer> consumer;
481     BufferQueue::createBufferQueue(&producer, &consumer);
482 #endif // !COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
483 
484 
485     uint32_t usage = GRALLOC_USAGE_SW_READ_OFTEN;
486     if (format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED) {
487         usage = GRALLOC_USAGE_HW_VIDEO_ENCODER;
488     }
489 
490     bufferCount += kConsumerBufferCount;
491 
492 #if COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
493     mVideoBufferConsumer = new BufferItemConsumer(usage, bufferCount);
494     mVideoBufferConsumer->setName(String8::format("StageFright-CameraSource"));
495 
496 #if WB_LIBCAMERASERVICE_WITH_DEPENDENCIES
497     mVideoBufferProducer = mVideoBufferConsumer->getSurface();
498 #else
499     mVideoBufferProducer = mVideoBufferConsumer->getSurface()->getIGraphicBufferProducer();
500 #endif  // WB_LIBCAMERASERVICE_WITH_DEPENDENCIES
501 
502 #else
503     mVideoBufferConsumer = new BufferItemConsumer(consumer, usage, bufferCount);
504     mVideoBufferConsumer->setName(String8::format("StageFright-CameraSource"));
505 
506 #if WB_LIBCAMERASERVICE_WITH_DEPENDENCIES
507     mVideoBufferProducer = new Surface(producer);
508 #else
509     mVideoBufferProducer = producer;
510 #endif  // WB_LIBCAMERASERVICE_WITH_DEPENDENCIES
511 
512 #endif  // COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
513 
514     status_t res = mVideoBufferConsumer->setDefaultBufferSize(width, height);
515     if (res != OK) {
516         ALOGE("%s: Could not set buffer dimensions %dx%d: %s (%d)", __FUNCTION__, width, height,
517                 strerror(-res), res);
518         return res;
519     }
520 
521     res = mVideoBufferConsumer->setDefaultBufferFormat(format);
522     if (res != OK) {
523         ALOGE("%s: Could not set buffer format %d: %s (%d)", __FUNCTION__, format,
524                 strerror(-res), res);
525         return res;
526     }
527 
528     res = mVideoBufferConsumer->setDefaultBufferDataSpace(dataSpace);
529     if (res != OK) {
530         ALOGE("%s: Could not set data space %d: %s (%d)", __FUNCTION__, dataSpace,
531                 strerror(-res), res);
532         return res;
533     }
534 
535     res = mCamera->setVideoTarget(mVideoBufferProducer);
536     if (res != OK) {
537         ALOGE("%s: Failed to set video target: %s (%d)", __FUNCTION__, strerror(-res), res);
538         return res;
539     }
540 
541     // Create memory heap to store buffers as VideoNativeMetadata.
542     createVideoBufferMemoryHeap(sizeof(VideoNativeMetadata), bufferCount);
543 
544     mBufferQueueListener = new BufferQueueListener(mVideoBufferConsumer, this);
545     res = mBufferQueueListener->run("CameraSource-BufferQueueListener");
546     if (res != OK) {
547         ALOGE("%s: Could not run buffer queue listener thread: %s (%d)", __FUNCTION__,
548                 strerror(-res), res);
549         return res;
550     }
551 
552     return OK;
553 }
554 
initWithCameraAccess(const sp<hardware::ICamera> & camera,const sp<ICameraRecordingProxy> & proxy,int32_t cameraId,const String16 & clientName,uid_t clientUid,pid_t clientPid,Size videoSize,int32_t frameRate)555 status_t CameraSource::initWithCameraAccess(
556         const sp<hardware::ICamera>& camera,
557         const sp<ICameraRecordingProxy>& proxy,
558         int32_t cameraId,
559         const String16& clientName,
560         uid_t clientUid,
561         pid_t clientPid,
562         Size videoSize,
563         int32_t frameRate) {
564     ALOGV("initWithCameraAccess");
565     status_t err = OK;
566 
567     if ((err = isCameraAvailable(camera, proxy, cameraId,
568             toStdString(clientName), clientUid, clientPid)) != OK) {
569         ALOGE("Camera connection could not be established.");
570         return err;
571     }
572     CameraParameters params(mCamera->getParameters());
573     if ((err = isCameraColorFormatSupported(params)) != OK) {
574         return err;
575     }
576 
577     // Set the camera to use the requested video frame size
578     // and/or frame rate.
579     if ((err = configureCamera(&params,
580                     videoSize.width, videoSize.height,
581                     frameRate))) {
582         return err;
583     }
584 
585     // Check on video frame size and frame rate.
586     CameraParameters newCameraParams(mCamera->getParameters());
587     if ((err = checkVideoSize(newCameraParams,
588                 videoSize.width, videoSize.height)) != OK) {
589         return err;
590     }
591     if ((err = checkFrameRate(newCameraParams, frameRate)) != OK) {
592         return err;
593     }
594 
595     // Set the preview display. Skip this if mSurface is null because
596     // applications may already set a surface to the camera.
597     if (mSurface != NULL) {
598         // Surface may be set incorrectly or could already be used even if we just
599         // passed the lock/unlock check earlier by calling mCamera->setParameters().
600         if ((err = mCamera->setPreviewTarget(mSurface)) != OK) {
601             return err;
602         }
603     }
604 
605     // Use buffer queue to receive video buffers from camera
606     err = mCamera->setVideoBufferMode(hardware::ICamera::VIDEO_BUFFER_MODE_BUFFER_QUEUE);
607     if (err != OK) {
608         ALOGE("%s: Setting video buffer mode to VIDEO_BUFFER_MODE_BUFFER_QUEUE failed: "
609                 "%s (err=%d)", __FUNCTION__, strerror(-err), err);
610         return err;
611     }
612 
613     int64_t glitchDurationUs = (1000000LL / mVideoFrameRate);
614     if (glitchDurationUs > mGlitchDurationThresholdUs) {
615         mGlitchDurationThresholdUs = glitchDurationUs;
616     }
617 
618     // XXX: query camera for the stride and slice height
619     // when the capability becomes available.
620     mMeta = new MetaData;
621     mMeta->setCString(kKeyMIMEType,  MEDIA_MIMETYPE_VIDEO_RAW);
622     mMeta->setInt32(kKeyColorFormat, mColorFormat);
623     mMeta->setInt32(kKeyWidth,       mVideoSize.width);
624     mMeta->setInt32(kKeyHeight,      mVideoSize.height);
625     mMeta->setInt32(kKeyStride,      mVideoSize.width);
626     mMeta->setInt32(kKeySliceHeight, mVideoSize.height);
627     mMeta->setInt32(kKeyFrameRate,   mVideoFrameRate);
628     return OK;
629 }
630 
~CameraSource()631 CameraSource::~CameraSource() {
632     if (mStarted) {
633         reset();
634     } else if (mInitCheck == OK) {
635         // Camera is initialized but because start() is never called,
636         // the lock on Camera is never released(). This makes sure
637         // Camera's lock is released in this case.
638         releaseCamera();
639     }
640 }
641 
startCameraRecording()642 status_t CameraSource::startCameraRecording() {
643     ALOGV("startCameraRecording");
644     // Reset the identity to the current thread because media server owns the
645     // camera and recording is started by the applications. The applications
646     // will connect to the camera in ICameraRecordingProxy::startRecording.
647     int64_t token = IPCThreadState::self()->clearCallingIdentity();
648     status_t err;
649 
650     // Initialize buffer queue.
651     err = initBufferQueue(mVideoSize.width, mVideoSize.height, mEncoderFormat,
652             (android_dataspace_t)mEncoderDataSpace,
653             mNumInputBuffers > 0 ? mNumInputBuffers : 1);
654     if (err != OK) {
655         ALOGE("%s: Failed to initialize buffer queue: %s (err=%d)", __FUNCTION__,
656                 strerror(-err), err);
657         return err;
658     }
659 
660     // Start data flow
661     err = OK;
662     if (mCameraFlags & FLAGS_HOT_CAMERA) {
663         mCamera->unlock();
664         mCamera.clear();
665         if ((err = mCameraRecordingProxy->startRecording()) != OK) {
666             ALOGE("Failed to start recording, received error: %s (%d)",
667                     strerror(-err), err);
668         }
669     } else {
670         mCamera->startRecording();
671         if (!mCamera->recordingEnabled()) {
672             err = -EINVAL;
673             ALOGE("Failed to start recording");
674         }
675     }
676     IPCThreadState::self()->restoreCallingIdentity(token);
677     return err;
678 }
679 
start(MetaData * meta)680 status_t CameraSource::start(MetaData *meta) {
681     ALOGV("start");
682     CHECK(!mStarted);
683     if (mInitCheck != OK) {
684         ALOGE("CameraSource is not initialized yet");
685         return mInitCheck;
686     }
687 
688     if (property_get_bool("media.stagefright.record-stats", false)) {
689         mCollectStats = true;
690     }
691 
692     mStartTimeUs = 0;
693     mNumInputBuffers = 0;
694     mEncoderFormat = HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED;
695     mEncoderDataSpace = mBufferDataSpace = HAL_DATASPACE_V0_BT709;
696 
697     if (meta) {
698         int64_t startTimeUs;
699         if (meta->findInt64(kKeyTime, &startTimeUs)) {
700             mStartTimeUs = startTimeUs;
701         }
702 
703         int32_t nBuffers;
704         if (meta->findInt32(kKeyNumBuffers, &nBuffers)) {
705             CHECK_GT(nBuffers, 0);
706             mNumInputBuffers = nBuffers;
707         }
708 
709         // apply encoder color format if specified
710         if (meta->findInt32(kKeyPixelFormat, &mEncoderFormat)) {
711             ALOGI("Using encoder format: %#x", mEncoderFormat);
712         }
713         if (meta->findInt32(kKeyColorSpace, &mEncoderDataSpace)) {
714             ALOGI("Using encoder data space: %#x", mEncoderDataSpace);
715             mBufferDataSpace = mEncoderDataSpace;
716         }
717     }
718 
719     status_t err;
720     if ((err = startCameraRecording()) == OK) {
721         mStarted = true;
722     }
723 
724     return err;
725 }
726 
stopCameraRecording()727 void CameraSource::stopCameraRecording() {
728     ALOGV("stopCameraRecording");
729     if (mCameraFlags & FLAGS_HOT_CAMERA) {
730         if (mCameraRecordingProxy != 0) {
731             mCameraRecordingProxy->stopRecording();
732         }
733     } else {
734         if (mCamera != 0) {
735             mCamera->stopRecording();
736         }
737     }
738 }
739 
releaseCamera()740 void CameraSource::releaseCamera() {
741     ALOGV("releaseCamera");
742     sp<Camera> camera;
743     bool coldCamera = false;
744     {
745         Mutex::Autolock autoLock(mLock);
746         // get a local ref and clear ref to mCamera now
747         camera = mCamera;
748         mCamera.clear();
749         coldCamera = (mCameraFlags & FLAGS_HOT_CAMERA) == 0;
750     }
751 
752     if (camera != 0) {
753         int64_t token = IPCThreadState::self()->clearCallingIdentity();
754         if (coldCamera) {
755             ALOGV("Camera was cold when we started, stopping preview");
756             camera->stopPreview();
757             camera->disconnect();
758         }
759         camera->unlock();
760         IPCThreadState::self()->restoreCallingIdentity(token);
761     }
762 
763     {
764         Mutex::Autolock autoLock(mLock);
765         if (mCameraRecordingProxy != 0) {
766             IInterface::asBinder(mCameraRecordingProxy)->unlinkToDeath(mDeathNotifier);
767             mCameraRecordingProxy.clear();
768         }
769         mCameraFlags = 0;
770     }
771 }
772 
reset()773 status_t CameraSource::reset() {
774     ALOGD("reset: E");
775 
776     {
777         Mutex::Autolock autoLock(mLock);
778         mStarted = false;
779         mEos = false;
780         mStopSystemTimeUs = -1;
781         mFrameAvailableCondition.signal();
782 
783         int64_t token;
784         bool isTokenValid = false;
785         if (mCamera != 0) {
786             token = IPCThreadState::self()->clearCallingIdentity();
787             isTokenValid = true;
788         }
789         releaseQueuedFrames();
790         while (!mFramesBeingEncoded.empty()) {
791             if (NO_ERROR !=
792                 mFrameCompleteCondition.waitRelative(mLock,
793                         mTimeBetweenFrameCaptureUs * 1000LL + CAMERA_SOURCE_TIMEOUT_NS)) {
794                 ALOGW("Timed out waiting for outstanding frames being encoded: %zu",
795                     mFramesBeingEncoded.size());
796             }
797         }
798         stopCameraRecording();
799         if (isTokenValid) {
800             IPCThreadState::self()->restoreCallingIdentity(token);
801         }
802 
803         if (mCollectStats) {
804             ALOGI("Frames received/encoded/dropped: %d/%d/%d in %" PRId64 " us",
805                     mNumFramesReceived, mNumFramesEncoded, mNumFramesDropped,
806                     mLastFrameTimestampUs - mFirstFrameTimeUs);
807         }
808 
809         if (mNumGlitches > 0) {
810             ALOGW("%d long delays between neighboring video frames", mNumGlitches);
811         }
812 
813         CHECK_EQ(mNumFramesReceived, mNumFramesEncoded + mNumFramesDropped);
814     }
815 
816     if (mBufferQueueListener != nullptr) {
817         mBufferQueueListener->requestExit();
818         mBufferQueueListener->join();
819         mBufferQueueListener.clear();
820     }
821 
822     mVideoBufferConsumer.clear();
823     mVideoBufferProducer.clear();
824     releaseCamera();
825 
826     ALOGD("reset: X");
827     return OK;
828 }
829 
releaseRecordingFrame(const sp<IMemory> & frame)830 void CameraSource::releaseRecordingFrame(const sp<IMemory>& frame) {
831     ALOGV("releaseRecordingFrame");
832 
833     // Return the buffer to buffer queue in VIDEO_BUFFER_MODE_BUFFER_QUEUE mode.
834     ssize_t offset;
835     size_t size;
836     sp<IMemoryHeap> heap = frame->getMemory(&offset, &size);
837     if (heap->getHeapID() != mMemoryHeapBase->getHeapID()) {
838         ALOGE("%s: Mismatched heap ID, ignoring release (got %x, expected %x)", __FUNCTION__,
839                 heap->getHeapID(), mMemoryHeapBase->getHeapID());
840         return;
841     }
842 
843     VideoNativeMetadata *payload = reinterpret_cast<VideoNativeMetadata*>(
844         (uint8_t*)heap->getBase() + offset);
845 
846     // Find the corresponding buffer item for the native window buffer.
847     ssize_t index = mReceivedBufferItemMap.indexOfKey(payload->pBuffer);
848     if (index == NAME_NOT_FOUND) {
849         ALOGE("%s: Couldn't find buffer item for %p", __FUNCTION__, payload->pBuffer);
850         return;
851     }
852 
853     BufferItem buffer = mReceivedBufferItemMap.valueAt(index);
854     mReceivedBufferItemMap.removeItemsAt(index);
855     mVideoBufferConsumer->releaseBuffer(buffer);
856     mMemoryBases.push_back(frame);
857     mMemoryBaseAvailableCond.signal();
858 }
859 
releaseQueuedFrames()860 void CameraSource::releaseQueuedFrames() {
861     List<sp<IMemory> >::iterator it;
862     while (!mFramesReceived.empty()) {
863         it = mFramesReceived.begin();
864         releaseRecordingFrame(*it);
865         mFramesReceived.erase(it);
866         ++mNumFramesDropped;
867     }
868 }
869 
getFormat()870 sp<MetaData> CameraSource::getFormat() {
871     return mMeta;
872 }
873 
releaseOneRecordingFrame(const sp<IMemory> & frame)874 void CameraSource::releaseOneRecordingFrame(const sp<IMemory>& frame) {
875     releaseRecordingFrame(frame);
876 }
877 
signalBufferReturned(MediaBufferBase * buffer)878 void CameraSource::signalBufferReturned(MediaBufferBase *buffer) {
879     ALOGV("signalBufferReturned: %p", buffer->data());
880     Mutex::Autolock autoLock(mLock);
881     for (List<sp<IMemory> >::iterator it = mFramesBeingEncoded.begin();
882          it != mFramesBeingEncoded.end(); ++it) {
883         if ((*it)->unsecurePointer() ==  buffer->data()) {
884             releaseOneRecordingFrame((*it));
885             mFramesBeingEncoded.erase(it);
886             ++mNumFramesEncoded;
887             buffer->setObserver(0);
888             buffer->release();
889             mFrameCompleteCondition.signal();
890             return;
891         }
892     }
893     CHECK(!"signalBufferReturned: bogus buffer");
894 }
895 
read(MediaBufferBase ** buffer,const ReadOptions * options)896 status_t CameraSource::read(
897         MediaBufferBase **buffer, const ReadOptions *options) {
898     ALOGV("read");
899 
900     *buffer = NULL;
901 
902     int64_t seekTimeUs;
903     ReadOptions::SeekMode mode;
904     if (options && options->getSeekTo(&seekTimeUs, &mode)) {
905         return ERROR_UNSUPPORTED;
906     }
907 
908     sp<IMemory> frame;
909     int64_t frameTime;
910 
911     {
912         Mutex::Autolock autoLock(mLock);
913         while (mStarted && !mEos && mFramesReceived.empty()) {
914             if (NO_ERROR !=
915                 mFrameAvailableCondition.waitRelative(mLock,
916                     mTimeBetweenFrameCaptureUs * 1000LL + CAMERA_SOURCE_TIMEOUT_NS)) {
917                 if (mCameraRecordingProxy != 0 &&
918                     !IInterface::asBinder(mCameraRecordingProxy)->isBinderAlive()) {
919                     ALOGW("camera recording proxy is gone");
920                     return ERROR_END_OF_STREAM;
921                 }
922                 ALOGW("Timed out waiting for incoming camera video frames: %" PRId64 " us",
923                     mLastFrameTimestampUs);
924             }
925         }
926         if (!mStarted) {
927             return OK;
928         }
929         if (mFramesReceived.empty()) {
930             return ERROR_END_OF_STREAM;
931         }
932         frame = *mFramesReceived.begin();
933         mFramesReceived.erase(mFramesReceived.begin());
934 
935         frameTime = *mFrameTimes.begin();
936         mFrameTimes.erase(mFrameTimes.begin());
937         mFramesBeingEncoded.push_back(frame);
938         // TODO: Using unsecurePointer() has some associated security pitfalls
939         //       (see declaration for details).
940         //       Either document why it is safe in this case or address the
941         //       issue (e.g. by copying).
942         *buffer = new MediaBuffer(frame->unsecurePointer(), frame->size());
943         (*buffer)->setObserver(this);
944         (*buffer)->add_ref();
945         (*buffer)->meta_data().setInt64(kKeyTime, frameTime);
946         if (mBufferDataSpace != mEncoderDataSpace) {
947             ALOGD("Data space updated to %x", mBufferDataSpace);
948             (*buffer)->meta_data().setInt32(kKeyColorSpace, mBufferDataSpace);
949             mEncoderDataSpace = mBufferDataSpace;
950         }
951     }
952     return OK;
953 }
954 
setStopTimeUs(int64_t stopTimeUs)955 status_t CameraSource::setStopTimeUs(int64_t stopTimeUs) {
956     Mutex::Autolock autoLock(mLock);
957     ALOGV("Set stoptime: %lld us", (long long)stopTimeUs);
958 
959     if (stopTimeUs < -1) {
960         ALOGE("Invalid stop time %lld us", (long long)stopTimeUs);
961         return BAD_VALUE;
962     } else if (stopTimeUs == -1) {
963         ALOGI("reset stopTime to be -1");
964     }
965 
966     mStopSystemTimeUs = stopTimeUs;
967     return OK;
968 }
969 
shouldSkipFrameLocked(int64_t timestampUs)970 bool CameraSource::shouldSkipFrameLocked(int64_t timestampUs) {
971     if (!mStarted || (mNumFramesReceived == 0 && timestampUs < mStartTimeUs)) {
972         ALOGV("Drop frame at %lld/%lld us", (long long)timestampUs, (long long)mStartTimeUs);
973         return true;
974     }
975 
976     if (mStopSystemTimeUs != -1 && timestampUs >= mStopSystemTimeUs) {
977         ALOGV("Drop Camera frame at %lld  stop time: %lld us",
978                 (long long)timestampUs, (long long)mStopSystemTimeUs);
979         mEos = true;
980         mFrameAvailableCondition.signal();
981         return true;
982     }
983 
984     // May need to skip frame or modify timestamp. Currently implemented
985     // by the subclass CameraSourceTimeLapse.
986     if (skipCurrentFrame(timestampUs)) {
987         return true;
988     }
989 
990     if (mNumFramesReceived > 0) {
991         if (timestampUs <= mLastFrameTimestampUs) {
992             ALOGW("Dropping frame with backward timestamp %lld (last %lld)",
993                     (long long)timestampUs, (long long)mLastFrameTimestampUs);
994             return true;
995         }
996         if (timestampUs - mLastFrameTimestampUs > mGlitchDurationThresholdUs) {
997             ++mNumGlitches;
998         }
999     }
1000 
1001     mLastFrameTimestampUs = timestampUs;
1002     if (mNumFramesReceived == 0) {
1003         mFirstFrameTimeUs = timestampUs;
1004         // Initial delay
1005         if (mStartTimeUs > 0) {
1006             if (timestampUs < mStartTimeUs) {
1007                 // Frame was captured before recording was started
1008                 // Drop it without updating the statistical data.
1009                 return true;
1010             }
1011             mStartTimeUs = timestampUs - mStartTimeUs;
1012         }
1013     }
1014 
1015     return false;
1016 }
1017 
BufferQueueListener(const sp<BufferItemConsumer> & consumer,const sp<CameraSource> & cameraSource)1018 CameraSource::BufferQueueListener::BufferQueueListener(const sp<BufferItemConsumer>& consumer,
1019         const sp<CameraSource>& cameraSource) {
1020     mConsumer = consumer;
1021     mConsumer->setFrameAvailableListener(this);
1022     mCameraSource = cameraSource;
1023 }
1024 
onFrameAvailable(const BufferItem &)1025 void CameraSource::BufferQueueListener::onFrameAvailable(const BufferItem& /*item*/) {
1026     ALOGV("%s: onFrameAvailable", __FUNCTION__);
1027 
1028     Mutex::Autolock l(mLock);
1029 
1030     if (!mFrameAvailable) {
1031         mFrameAvailable = true;
1032         mFrameAvailableSignal.signal();
1033     }
1034 }
1035 
threadLoop()1036 bool CameraSource::BufferQueueListener::threadLoop() {
1037     if (mConsumer == nullptr || mCameraSource == nullptr) {
1038         return false;
1039     }
1040 
1041     {
1042         Mutex::Autolock l(mLock);
1043         while (!mFrameAvailable) {
1044             if (mFrameAvailableSignal.waitRelative(mLock, kFrameAvailableTimeout) == TIMED_OUT) {
1045                 return true;
1046             }
1047         }
1048         mFrameAvailable = false;
1049     }
1050 
1051     BufferItem buffer;
1052     while (mConsumer->acquireBuffer(&buffer, 0) == OK) {
1053         mCameraSource->processBufferQueueFrame(buffer);
1054     }
1055 
1056     return true;
1057 }
1058 
processBufferQueueFrame(BufferItem & buffer)1059 void CameraSource::processBufferQueueFrame(BufferItem& buffer) {
1060     Mutex::Autolock autoLock(mLock);
1061 
1062     int64_t timestampUs = buffer.mTimestamp / 1000;
1063     if (shouldSkipFrameLocked(timestampUs)) {
1064         mVideoBufferConsumer->releaseBuffer(buffer);
1065         return;
1066     }
1067 
1068     while (mMemoryBases.empty()) {
1069         if (mMemoryBaseAvailableCond.waitRelative(mLock, kMemoryBaseAvailableTimeoutNs) ==
1070                 TIMED_OUT) {
1071             ALOGW("Waiting on an available memory base timed out. Dropping a recording frame.");
1072             mVideoBufferConsumer->releaseBuffer(buffer);
1073             return;
1074         }
1075     }
1076 
1077     ++mNumFramesReceived;
1078 
1079     // Find a available memory slot to store the buffer as VideoNativeMetadata.
1080     sp<IMemory> data = *mMemoryBases.begin();
1081     mMemoryBases.erase(mMemoryBases.begin());
1082     mBufferDataSpace = buffer.mDataSpace;
1083 
1084     ssize_t offset;
1085     size_t size;
1086     sp<IMemoryHeap> heap = data->getMemory(&offset, &size);
1087     VideoNativeMetadata *payload = reinterpret_cast<VideoNativeMetadata*>(
1088         (uint8_t*)heap->getBase() + offset);
1089     memset(payload, 0, sizeof(VideoNativeMetadata));
1090     payload->eType = kMetadataBufferTypeANWBuffer;
1091     payload->pBuffer = buffer.mGraphicBuffer->getNativeBuffer();
1092     payload->nFenceFd = -1;
1093 
1094     // Add the mapping so we can find the corresponding buffer item to release to the buffer queue
1095     // when the encoder returns the native window buffer.
1096     mReceivedBufferItemMap.add(payload->pBuffer, buffer);
1097 
1098     mFramesReceived.push_back(data);
1099     int64_t timeUs = mStartTimeUs + (timestampUs - mFirstFrameTimeUs);
1100     mFrameTimes.push_back(timeUs);
1101     ALOGV("initial delay: %" PRId64 ", current time stamp: %" PRId64,
1102         mStartTimeUs, timeUs);
1103     mFrameAvailableCondition.signal();
1104 }
1105 
metaDataStoredInVideoBuffers() const1106 MetadataBufferType CameraSource::metaDataStoredInVideoBuffers() const {
1107     ALOGV("metaDataStoredInVideoBuffers");
1108 
1109     return kMetadataBufferTypeANWBuffer;
1110 }
1111 
binderDied(const wp<IBinder> & who __unused)1112 void CameraSource::DeathNotifier::binderDied(const wp<IBinder>& who __unused) {
1113     ALOGI("Camera recording proxy died");
1114 }
1115 
1116 }  // namespace android
1117