1 /* 2 * Copyright (C) 2013-2018 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17 #ifndef ANDROID_SERVERS_CAMERA3_OUTPUT_STREAM_H 18 #define ANDROID_SERVERS_CAMERA3_OUTPUT_STREAM_H 19 20 #include <mutex> 21 #include <optional> 22 #include <utils/RefBase.h> 23 #include <gui/IProducerListener.h> 24 #include <gui/Surface.h> 25 #include <gui/DisplayEventReceiver.h> 26 27 #include "utils/IPCTransport.h" 28 #include "utils/LatencyHistogram.h" 29 #include "Camera3Stream.h" 30 #include "Camera3IOStreamBase.h" 31 #include "Camera3OutputStreamInterface.h" 32 #include "Camera3BufferManager.h" 33 #include "PreviewFrameSpacer.h" 34 35 namespace android { 36 37 namespace camera3 { 38 39 class Camera3BufferManager; 40 41 /** 42 * Stream info structure that holds the necessary stream info for buffer manager to use for 43 * buffer allocation and management. 44 */ 45 struct StreamInfo { 46 int streamId; 47 int streamSetId; 48 uint32_t width; 49 uint32_t height; 50 uint32_t format; 51 android_dataspace dataSpace; 52 uint64_t combinedUsage; 53 size_t totalBufferCount; 54 bool isConfigured; 55 bool isMultiRes; 56 explicit StreamInfo(int id = CAMERA3_STREAM_ID_INVALID, 57 int setId = CAMERA3_STREAM_SET_ID_INVALID, 58 uint32_t w = 0, 59 uint32_t h = 0, 60 uint32_t fmt = 0, 61 android_dataspace ds = HAL_DATASPACE_UNKNOWN, 62 uint64_t usage = 0, 63 size_t bufferCount = 0, 64 bool configured = false, 65 bool multiRes = false) : streamIdStreamInfo66 streamId(id), 67 streamSetId(setId), 68 width(w), 69 height(h), 70 format(fmt), 71 dataSpace(ds), 72 combinedUsage(usage), 73 totalBufferCount(bufferCount), 74 isConfigured(configured), 75 isMultiRes(multiRes) {} 76 }; 77 78 /** 79 * A class for managing a single stream of output data from the camera device. 80 */ 81 class Camera3OutputStream : 82 public Camera3IOStreamBase, 83 public Camera3OutputStreamInterface { 84 public: 85 /** 86 * Set up a stream for formats that have 2 dimensions, such as RAW and YUV. 87 * A valid stream set id needs to be set to support buffer sharing between multiple 88 * streams. 89 */ 90 Camera3OutputStream(int id, sp<Surface> consumer, 91 uint32_t width, uint32_t height, int format, 92 android_dataspace dataSpace, camera_stream_rotation_t rotation, 93 nsecs_t timestampOffset, const std::string& physicalCameraId, 94 const std::unordered_set<int32_t> &sensorPixelModesUsed, IPCTransport transport, 95 int setId = CAMERA3_STREAM_SET_ID_INVALID, bool isMultiResolution = false, 96 int64_t dynamicProfile = ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD, 97 int64_t streamUseCase = ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT, 98 bool deviceTimeBaseIsRealtime = false, 99 int timestampBase = OutputConfiguration::TIMESTAMP_BASE_DEFAULT, 100 int mirrorMode = OutputConfiguration::MIRROR_MODE_AUTO, 101 int32_t colorSpace = ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED, 102 bool useReadoutTimestamp = false); 103 /** 104 * Set up a stream for formats that have a variable buffer size for the same 105 * dimensions, such as compressed JPEG. 106 * A valid stream set id needs to be set to support buffer sharing between multiple 107 * streams. 108 */ 109 Camera3OutputStream(int id, sp<Surface> consumer, 110 uint32_t width, uint32_t height, size_t maxSize, int format, 111 android_dataspace dataSpace, camera_stream_rotation_t rotation, 112 nsecs_t timestampOffset, const std::string& physicalCameraId, 113 const std::unordered_set<int32_t> &sensorPixelModesUsed, IPCTransport transport, 114 int setId = CAMERA3_STREAM_SET_ID_INVALID, bool isMultiResolution = false, 115 int64_t dynamicProfile = ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD, 116 int64_t streamUseCase = ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT, 117 bool deviceTimeBaseIsRealtime = false, 118 int timestampBase = OutputConfiguration::TIMESTAMP_BASE_DEFAULT, 119 int mirrorMode = OutputConfiguration::MIRROR_MODE_AUTO, 120 int32_t colorSpace = ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED, 121 bool useReadoutTimestamp = false); 122 /** 123 * Set up a stream with deferred consumer for formats that have 2 dimensions, such as 124 * RAW and YUV. The consumer must be set before using this stream for output. A valid 125 * stream set id needs to be set to support buffer sharing between multiple streams. 126 */ 127 Camera3OutputStream(int id, uint32_t width, uint32_t height, int format, 128 uint64_t consumerUsage, android_dataspace dataSpace, 129 camera_stream_rotation_t rotation, nsecs_t timestampOffset, 130 const std::string& physicalCameraId, 131 const std::unordered_set<int32_t> &sensorPixelModesUsed, IPCTransport transport, 132 int setId = CAMERA3_STREAM_SET_ID_INVALID, bool isMultiResolution = false, 133 int64_t dynamicProfile = ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD, 134 int64_t streamUseCase = ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT, 135 bool deviceTimeBaseIsRealtime = false, 136 int timestampBase = OutputConfiguration::TIMESTAMP_BASE_DEFAULT, 137 int32_t colorSpace = ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED, 138 bool useReadoutTimestamp = false); 139 140 virtual ~Camera3OutputStream(); 141 142 /** 143 * Camera3Stream interface 144 */ 145 146 virtual void dump(int fd, const Vector<String16> &args); 147 148 /** 149 * Set the transform on the output stream; one of the 150 * HAL_TRANSFORM_* / NATIVE_WINDOW_TRANSFORM_* constants. 151 */ 152 virtual status_t setTransform(int transform, bool mayChangeMirror, int surfaceId = 0); 153 154 /** 155 * Return if this output stream is for video encoding. 156 */ 157 bool isVideoStream(); 158 /** 159 * Return if this output stream is consumed by hardware composer. 160 */ 161 bool isConsumedByHWComposer(); 162 163 /** 164 * Return if this output stream is consumed by hardware texture. 165 */ 166 bool isConsumedByHWTexture(); 167 168 /** 169 * Return if this output stream is consumed by CPU. 170 */ 171 bool isConsumedByCPU(); 172 173 /** 174 * Return if the consumer configuration of this stream is deferred. 175 */ 176 virtual bool isConsumerConfigurationDeferred(size_t surface_id) const; 177 178 /** 179 * Set the consumer surfaces to the output stream. 180 */ 181 virtual status_t setConsumers(const std::vector<SurfaceHolder>& consumers); 182 183 class BufferProducerListener : public SurfaceListener { 184 public: BufferProducerListener(wp<Camera3OutputStream> parent,bool needsReleaseNotify)185 BufferProducerListener(wp<Camera3OutputStream> parent, bool needsReleaseNotify) 186 : mParent(parent), mNeedsReleaseNotify(needsReleaseNotify) {} 187 188 /** 189 * Implementation of IProducerListener, used to notify this stream that the consumer 190 * has returned a buffer and it is ready to return to Camera3BufferManager for reuse. 191 */ 192 virtual void onBufferReleased(); needsReleaseNotify()193 virtual bool needsReleaseNotify() { return mNeedsReleaseNotify; } 194 virtual void onBuffersDiscarded(const std::vector<sp<GraphicBuffer>>& buffers); onBufferDetached(int)195 virtual void onBufferDetached(int /*slot*/) override {}; 196 197 private: 198 wp<Camera3OutputStream> mParent; 199 bool mNeedsReleaseNotify; 200 }; 201 202 virtual status_t detachBuffer(sp<GraphicBuffer>* buffer, int* fenceFd); 203 204 /** 205 * Notify that the buffer is being released to the buffer queue instead of 206 * being queued to the consumer. 207 */ 208 virtual status_t notifyBufferReleased(ANativeWindowBuffer *anwBuffer); 209 210 /** 211 * Drop buffers if dropping is true. If dropping is false, do not drop buffers. 212 */ 213 virtual status_t dropBuffers(bool dropping) override; 214 215 /** 216 * Query the physical camera id for the output stream. 217 */ 218 virtual const std::string& getPhysicalCameraId() const override; 219 220 /** 221 * Set the graphic buffer manager to get/return the stream buffers. 222 * 223 * It is only legal to call this method when stream is in STATE_CONSTRUCTED state. 224 */ 225 status_t setBufferManager(sp<Camera3BufferManager> bufferManager); 226 227 /** 228 * Query the ouput surface id. 229 */ getSurfaceId(const sp<Surface> &)230 virtual ssize_t getSurfaceId(const sp<Surface> &/*surface*/) { return 0; } 231 getUniqueSurfaceIds(const std::vector<size_t> &,std::vector<size_t> *)232 virtual status_t getUniqueSurfaceIds(const std::vector<size_t>&, 233 /*out*/std::vector<size_t>*) { return INVALID_OPERATION; }; 234 235 /** 236 * Update the stream output surfaces. 237 */ 238 virtual status_t updateStream(const std::vector<SurfaceHolder> &outputSurfaces, 239 const std::vector<OutputStreamInfo> &outputInfo, 240 const std::vector<size_t> &removedSurfaceIds, 241 KeyedVector<sp<Surface>, size_t> *outputMap/*out*/); 242 243 /** 244 * Set the batch size for buffer operations. The output stream will request 245 * buffers from buffer queue on a batch basis. Currently only video streams 246 * are allowed to set the batch size. Also if the stream is managed by 247 * buffer manager (Surface group in Java API) then batching is also not 248 * supported. Changing batch size on the fly while there is already batched 249 * buffers in the stream is also not supported. 250 * If the batch size is larger than the max dequeue count set 251 * by the camera HAL, the batch size will be set to the max dequeue count 252 * instead. 253 */ 254 virtual status_t setBatchSize(size_t batchSize = 1) override; 255 256 /** 257 * Notify the stream on change of min frame durations or variable/fixed 258 * frame rate. 259 */ 260 virtual void onMinDurationChanged(nsecs_t duration, bool fixedFps) override; 261 262 /** 263 * Modify stream use case 264 */ 265 virtual void setStreamUseCase(int64_t streamUseCase) override; 266 267 /** 268 * Apply ZSL related consumer usage quirk. 269 */ 270 static void applyZSLUsageQuirk(int format, uint64_t *consumerUsage /*inout*/); 271 setImageDumpMask(int mask)272 void setImageDumpMask(int mask) { mImageDumpMask = mask; } 273 bool shouldLogError(status_t res); 274 void onCachedBufferQueued(); 275 276 protected: 277 Camera3OutputStream(int id, camera_stream_type_t type, 278 uint32_t width, uint32_t height, int format, 279 android_dataspace dataSpace, camera_stream_rotation_t rotation, 280 const std::string& physicalCameraId, 281 const std::unordered_set<int32_t> &sensorPixelModesUsed, IPCTransport transport, 282 uint64_t consumerUsage = 0, nsecs_t timestampOffset = 0, 283 int setId = CAMERA3_STREAM_SET_ID_INVALID, bool isMultiResolution = false, 284 int64_t dynamicProfile = ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD, 285 int64_t streamUseCase = ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT, 286 bool deviceTimeBaseIsRealtime = false, 287 int timestampBase = OutputConfiguration::TIMESTAMP_BASE_DEFAULT, 288 int32_t colorSpace = ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED, 289 bool useReadoutTimestamp = false); 290 291 /** 292 * Note that we release the lock briefly in this function 293 */ 294 virtual status_t returnBufferCheckedLocked( 295 const camera_stream_buffer &buffer, 296 nsecs_t timestamp, 297 nsecs_t readoutTimestamp, 298 bool output, 299 int32_t transform, 300 const std::vector<size_t>& surface_ids, 301 /*out*/ 302 sp<Fence> *releaseFenceOut); 303 304 virtual status_t disconnectLocked(); 305 status_t fixUpHidlJpegBlobHeader(ANativeWindowBuffer* anwBuffer, int fence); 306 307 status_t getEndpointUsageForSurface(uint64_t *usage, const sp<Surface>& surface); 308 status_t configureConsumerQueueLocked(bool allowPreviewRespace); 309 310 // Consumer as the output of camera HAL 311 sp<Surface> mConsumer; 312 getPresetConsumerUsage()313 uint64_t getPresetConsumerUsage() const { return mConsumerUsage; } 314 315 static const nsecs_t kDequeueBufferTimeout = 1000000000; // 1 sec 316 317 status_t getBufferLockedCommon(ANativeWindowBuffer** anb, int* fenceFd); 318 319 320 private: 321 322 int mTransform; 323 324 bool mTraceFirstBuffer; 325 326 /** 327 * GraphicBuffer manager this stream is registered to. Used to replace the buffer 328 * allocation/deallocation role of BufferQueue. 329 */ 330 sp<Camera3BufferManager> mBufferManager; 331 332 /** 333 * Buffer producer listener, used to handle notification when a buffer is released 334 * from consumer side, or a set of buffers are discarded by the consumer. 335 */ 336 sp<BufferProducerListener> mBufferProducerListener; 337 338 /** 339 * Flag indicating if the buffer manager is used to allocate the stream buffers 340 */ 341 bool mUseBufferManager; 342 343 /** 344 * Offset used to override camera HAL produced timestamps 345 * 346 * The offset is first initialized to bootTime - monotonicTime in 347 * constructor, and may later be updated based on the client's timestampBase 348 * setting. 349 */ 350 nsecs_t mTimestampOffset; 351 352 /** 353 * If camera readout time is used rather than the start-of-exposure time. 354 */ 355 bool mUseReadoutTime; 356 357 /** 358 * Consumer end point usage flag set by the constructor for the deferred 359 * consumer case. 360 */ 361 uint64_t mConsumerUsage; 362 363 /** 364 * Consumer end point usage flag retrieved from the buffer queue. 365 */ 366 std::optional<uint64_t> mConsumerUsageCachedValue; 367 368 // Whether to drop valid buffers. 369 bool mDropBuffers; 370 371 372 373 // The batch size for buffer operation 374 std::atomic_size_t mBatchSize = 1; 375 376 // Protecting batch states below, must be acquired after mLock 377 std::mutex mBatchLock; 378 // Prefetched buffers (ready to be handed to client) 379 std::vector<Surface::BatchBuffer> mBatchedBuffers; 380 // ---- End of mBatchLock protected scope ---- 381 382 int mMirrorMode; 383 384 /** 385 * Internal Camera3Stream interface 386 */ 387 virtual status_t getBufferLocked(camera_stream_buffer *buffer, 388 const std::vector<size_t>& surface_ids); 389 390 virtual status_t returnBufferLocked( 391 const camera_stream_buffer &buffer, 392 nsecs_t timestamp, nsecs_t readoutTimestamp, 393 int32_t transform, const std::vector<size_t>& surface_ids); 394 395 virtual status_t queueBufferToConsumer(sp<ANativeWindow>& consumer, 396 ANativeWindowBuffer* buffer, int anwReleaseFence, 397 const std::vector<size_t>& surface_ids); 398 399 virtual status_t configureQueueLocked(); 400 401 virtual status_t getEndpointUsage(uint64_t *usage); 402 403 /** 404 * Private methods 405 */ 406 void onBuffersRemovedLocked(const std::vector<sp<GraphicBuffer>>&); 407 status_t detachBufferLocked(sp<GraphicBuffer>* buffer, int* fenceFd); 408 // Call this after each dequeueBuffer/attachBuffer/detachNextBuffer call to get update on 409 // removed buffers. Set notifyBufferManager to false when the call is initiated by buffer 410 // manager so buffer manager doesn't need to be notified. 411 void checkRemovedBuffersLocked(bool notifyBufferManager = true); 412 413 // Check return status of IGBP calls and set abandoned state accordingly 414 void checkRetAndSetAbandonedLocked(status_t res); 415 416 // If the status indicates abandonded stream, only log when state hasn't been updated to 417 // STATE_ABANDONED 418 static bool shouldLogError(status_t res, StreamState state); 419 420 // Dump images to disk before returning to consumer 421 void dumpImageToDisk(nsecs_t timestamp, ANativeWindowBuffer* anwBuffer, int fence); 422 423 void returnPrefetchedBuffersLocked(); 424 425 426 static const int32_t kDequeueLatencyBinSize = 5; // in ms 427 CameraLatencyHistogram mDequeueBufferLatency; 428 IPCTransport mIPCTransport = IPCTransport::INVALID; 429 430 int mImageDumpMask = 0; 431 432 // Re-space frames by overriding timestamp to align with display Vsync. 433 // Default is on for SurfaceView bound streams. 434 bool mFixedFps = false; 435 nsecs_t mMinExpectedDuration = 0; 436 bool mSyncToDisplay = false; 437 DisplayEventReceiver mDisplayEventReceiver; 438 nsecs_t mLastCaptureTime = 0; 439 nsecs_t mLastPresentTime = 0; 440 nsecs_t mCaptureToPresentOffset = 0; 441 static constexpr size_t kDisplaySyncExtraBuffer = 2; 442 static constexpr nsecs_t kSpacingResetIntervalNs = 50000000LL; // 50 millisecond 443 static constexpr nsecs_t kTimelineThresholdNs = 1000000LL; // 1 millisecond 444 static constexpr float kMaxIntervalRatioDeviation = 0.05f; 445 static constexpr int kMaxTimelines = 2; 446 nsecs_t syncTimestampToDisplayLocked(nsecs_t t, sp<Fence> releaseFence); 447 448 // In case of fence being used 449 sp<Fence> mReferenceFrameFence; 450 nsecs_t mReferenceCaptureTime = 0; 451 nsecs_t mReferenceArrivalTime = 0; 452 nsecs_t mFenceSignalOffset = 0; 453 VsyncEventData mRefVsyncData; 454 455 // Re-space frames by delaying queueBuffer so that frame delivery has 456 // the same cadence as capture. Default is on for SurfaceTexture bound 457 // streams. 458 sp<PreviewFrameSpacer> mPreviewFrameSpacer; 459 }; // class Camera3OutputStream 460 461 } // namespace camera3 462 463 } // namespace android 464 465 #endif 466