1 /*
2 * Copyright (C) 2022 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17 #include "EvsV4lCamera.h"
18
19 #include "bufferCopy.h"
20
21 #include <aidl/android/hardware/graphics/common/HardwareBufferDescription.h>
22 #include <aidlcommonsupport/NativeHandle.h>
23 #include <android-base/logging.h>
24 #include <android-base/unique_fd.h>
25 #include <android/hardware_buffer.h>
26 #include <ui/GraphicBufferAllocator.h>
27 #include <ui/GraphicBufferMapper.h>
28 #include <utils/SystemClock.h>
29
30 #include <sys/stat.h>
31 #include <sys/types.h>
32
33 namespace {
34
35 using ::aidl::android::hardware::graphics::common::BufferUsage;
36 using ::aidl::android::hardware::graphics::common::HardwareBufferDescription;
37 using ::android::base::Error;
38 using ::android::base::Result;
39 using ::ndk::ScopedAStatus;
40
41 // Default camera output image resolution
42 constexpr std::array<int32_t, 2> kDefaultResolution = {640, 480};
43
44 // Arbitrary limit on number of graphics buffers allowed to be allocated
45 // Safeguards against unreasonable resource consumption and provides a testable limit
46 constexpr unsigned kMaxBuffersInFlight = 100;
47
48 } // namespace
49
50 namespace aidl::android::hardware::automotive::evs::implementation {
51
EvsV4lCamera(const char * deviceName,std::unique_ptr<ConfigManager::CameraInfo> & camInfo)52 EvsV4lCamera::EvsV4lCamera(const char* deviceName,
53 std::unique_ptr<ConfigManager::CameraInfo>& camInfo) :
54 mFramesAllowed(0), mFramesInUse(0), mCameraInfo(camInfo) {
55 LOG(DEBUG) << "EvsV4lCamera instantiated";
56
57 mDescription.id = deviceName;
58 if (camInfo) {
59 uint8_t* ptr = reinterpret_cast<uint8_t*>(camInfo->characteristics);
60 const size_t len = get_camera_metadata_size(camInfo->characteristics);
61 mDescription.metadata.insert(mDescription.metadata.end(), ptr, ptr + len);
62 }
63
64 // Default output buffer format.
65 mFormat = HAL_PIXEL_FORMAT_RGBA_8888;
66
67 // How we expect to use the gralloc buffers we'll exchange with our client
68 mUsage = GRALLOC_USAGE_HW_TEXTURE | GRALLOC_USAGE_SW_READ_RARELY | GRALLOC_USAGE_SW_WRITE_OFTEN;
69 }
70
~EvsV4lCamera()71 EvsV4lCamera::~EvsV4lCamera() {
72 LOG(DEBUG) << "EvsV4lCamera being destroyed";
73 shutdown();
74 }
75
76 // This gets called if another caller "steals" ownership of the camera
shutdown()77 void EvsV4lCamera::shutdown() {
78 LOG(DEBUG) << "EvsV4lCamera shutdown";
79
80 // Make sure our output stream is cleaned up
81 // (It really should be already)
82 stopVideoStream();
83
84 // Note: Since stopVideoStream is blocking, no other threads can now be running
85
86 // Close our video capture device
87 mVideo.close();
88
89 // Drop all the graphics buffers we've been using
90 if (mBuffers.size() > 0) {
91 ::android::GraphicBufferAllocator& alloc(::android::GraphicBufferAllocator::get());
92 for (auto&& rec : mBuffers) {
93 if (rec.inUse) {
94 LOG(WARNING) << "Releasing buffer despite remote ownership";
95 }
96 alloc.free(rec.handle);
97 rec.handle = nullptr;
98 }
99 mBuffers.clear();
100 }
101 }
102
103 // Methods from ::aidl::android::hardware::automotive::evs::IEvsCamera follow.
getCameraInfo(CameraDesc * _aidl_return)104 ScopedAStatus EvsV4lCamera::getCameraInfo(CameraDesc* _aidl_return) {
105 LOG(DEBUG) << __FUNCTION__;
106
107 // Send back our self description
108 *_aidl_return = mDescription;
109 return ScopedAStatus::ok();
110 }
111
setMaxFramesInFlight(int32_t bufferCount)112 ScopedAStatus EvsV4lCamera::setMaxFramesInFlight(int32_t bufferCount) {
113 LOG(DEBUG) << __FUNCTION__;
114 std::lock_guard<std::mutex> lock(mAccessLock);
115
116 // If we've been displaced by another owner of the camera, then we can't do anything else
117 if (!mVideo.isOpen()) {
118 LOG(WARNING) << "Ignoring setMaxFramesInFlight call when camera has been lost.";
119 return ScopedAStatus::fromServiceSpecificError(static_cast<int>(EvsResult::OWNERSHIP_LOST));
120 }
121
122 // We cannot function without at least one video buffer to send data
123 if (bufferCount < 1) {
124 LOG(ERROR) << "Ignoring setMaxFramesInFlight with less than one buffer requested";
125 return ScopedAStatus::fromServiceSpecificError(static_cast<int>(EvsResult::INVALID_ARG));
126 }
127
128 // Update our internal state
129 if (setAvailableFrames_Locked(bufferCount)) {
130 return ScopedAStatus::ok();
131 } else {
132 return ScopedAStatus::fromServiceSpecificError(
133 static_cast<int>(EvsResult::BUFFER_NOT_AVAILABLE));
134 }
135 }
136
startVideoStream(const std::shared_ptr<IEvsCameraStream> & client)137 ScopedAStatus EvsV4lCamera::startVideoStream(const std::shared_ptr<IEvsCameraStream>& client) {
138 LOG(DEBUG) << __FUNCTION__;
139 std::lock_guard<std::mutex> lock(mAccessLock);
140
141 // If we've been displaced by another owner of the camera, then we can't do anything else
142 if (!mVideo.isOpen()) {
143 LOG(WARNING) << "Ignoring startVideoStream call when camera has been lost.";
144 return ScopedAStatus::fromServiceSpecificError(static_cast<int>(EvsResult::OWNERSHIP_LOST));
145 }
146
147 if (mStream) {
148 LOG(ERROR) << "Ignoring startVideoStream call when a stream is already running.";
149 return ScopedAStatus::fromServiceSpecificError(
150 static_cast<int>(EvsResult::STREAM_ALREADY_RUNNING));
151 }
152
153 // If the client never indicated otherwise, configure ourselves for a single streaming buffer
154 if (mFramesAllowed < 1) {
155 if (!setAvailableFrames_Locked(1)) {
156 LOG(ERROR) << "Failed to start stream because we couldn't get a graphics buffer";
157 return ScopedAStatus::fromServiceSpecificError(
158 static_cast<int>(EvsResult::BUFFER_NOT_AVAILABLE));
159 }
160 }
161
162 // Choose which image transfer function we need
163 // Map from V4L2 to Android graphic buffer format
164 const auto videoSrcFormat = mVideo.getV4LFormat();
165 LOG(INFO) << "Configuring to accept " << std::string((char*)&videoSrcFormat)
166 << " camera data and convert to " << std::hex << mFormat;
167
168 switch (mFormat) {
169 case HAL_PIXEL_FORMAT_YCRCB_420_SP:
170 switch (videoSrcFormat) {
171 case V4L2_PIX_FMT_NV21:
172 mFillBufferFromVideo = fillNV21FromNV21;
173 break;
174 case V4L2_PIX_FMT_YUYV:
175 mFillBufferFromVideo = fillNV21FromYUYV;
176 break;
177 default:
178 LOG(ERROR) << "Unhandled camera output format: " << ((char*)&videoSrcFormat)[0]
179 << ((char*)&videoSrcFormat)[1] << ((char*)&videoSrcFormat)[2]
180 << ((char*)&videoSrcFormat)[3] << std::hex << videoSrcFormat;
181 }
182 break;
183 case HAL_PIXEL_FORMAT_RGBA_8888:
184 switch (videoSrcFormat) {
185 case V4L2_PIX_FMT_YUYV:
186 mFillBufferFromVideo = fillRGBAFromYUYV;
187 break;
188 case V4L2_PIX_FMT_BGRX32:
189 mFillBufferFromVideo = fillRGBAFromBGRA;
190 break;
191 default:
192 LOG(ERROR) << "Unhandled camera source format " << (char*)&videoSrcFormat;
193 }
194 break;
195 case HAL_PIXEL_FORMAT_YCBCR_422_I:
196 switch (videoSrcFormat) {
197 case V4L2_PIX_FMT_YUYV:
198 mFillBufferFromVideo = fillYUYVFromYUYV;
199 break;
200 case V4L2_PIX_FMT_UYVY:
201 mFillBufferFromVideo = fillYUYVFromUYVY;
202 break;
203 default:
204 LOG(ERROR) << "Unhandled camera source format " << (char*)&videoSrcFormat;
205 }
206 break;
207 default:
208 LOG(ERROR) << "Unhandled camera format " << (char*)&mFormat;
209 }
210
211 // Record the user's callback for use when we have a frame ready
212 mStream = client;
213
214 // Set up the video stream with a callback to our member function forwardFrame()
215 if (!mVideo.startStream([this](VideoCapture*, imageBuffer* tgt, void* data) {
216 this->forwardFrame(tgt, data);
217 })) {
218 // No need to hold onto this if we failed to start
219 mStream = nullptr;
220 LOG(ERROR) << "Underlying camera start stream failed";
221 return ScopedAStatus::fromServiceSpecificError(
222 static_cast<int>(EvsResult::UNDERLYING_SERVICE_ERROR));
223 }
224
225 return ScopedAStatus::ok();
226 }
227
stopVideoStream()228 ScopedAStatus EvsV4lCamera::stopVideoStream() {
229 LOG(DEBUG) << __FUNCTION__;
230
231 // Tell the capture device to stop (and block until it does)
232 mVideo.stopStream();
233 if (mStream) {
234 std::unique_lock<std::mutex> lock(mAccessLock);
235
236 EvsEventDesc event;
237 event.aType = EvsEventType::STREAM_STOPPED;
238 auto result = mStream->notify(event);
239 if (!result.isOk()) {
240 LOG(WARNING) << "Error delivering end of stream event";
241 }
242
243 // Drop our reference to the client's stream receiver
244 mStream = nullptr;
245 }
246
247 return ScopedAStatus::ok();
248 }
249
getPhysicalCameraInfo(const std::string & id,CameraDesc * _aidl_return)250 ScopedAStatus EvsV4lCamera::getPhysicalCameraInfo([[maybe_unused]] const std::string& id,
251 CameraDesc* _aidl_return) {
252 LOG(DEBUG) << __FUNCTION__;
253
254 // This method works exactly same as getCameraInfo_1_1() in EVS HW module.
255 *_aidl_return = mDescription;
256 return ScopedAStatus::ok();
257 }
258
doneWithFrame(const std::vector<BufferDesc> & buffers)259 ScopedAStatus EvsV4lCamera::doneWithFrame(const std::vector<BufferDesc>& buffers) {
260 LOG(DEBUG) << __FUNCTION__;
261
262 for (const auto& buffer : buffers) {
263 doneWithFrame_impl(buffer);
264 }
265
266 return ScopedAStatus::ok();
267 }
268
pauseVideoStream()269 ScopedAStatus EvsV4lCamera::pauseVideoStream() {
270 return ScopedAStatus::fromServiceSpecificError(static_cast<int>(EvsResult::NOT_SUPPORTED));
271 }
272
resumeVideoStream()273 ScopedAStatus EvsV4lCamera::resumeVideoStream() {
274 return ScopedAStatus::fromServiceSpecificError(static_cast<int>(EvsResult::NOT_SUPPORTED));
275 }
276
setPrimaryClient()277 ScopedAStatus EvsV4lCamera::setPrimaryClient() {
278 /* Because EVS HW module reference implementation expects a single client at
279 * a time, this returns a success code always.
280 */
281 return ScopedAStatus::ok();
282 }
283
forcePrimaryClient(const std::shared_ptr<IEvsDisplay> &)284 ScopedAStatus EvsV4lCamera::forcePrimaryClient(const std::shared_ptr<IEvsDisplay>&) {
285 /* Because EVS HW module reference implementation expects a single client at
286 * a time, this returns a success code always.
287 */
288 return ScopedAStatus::ok();
289 }
290
unsetPrimaryClient()291 ScopedAStatus EvsV4lCamera::unsetPrimaryClient() {
292 /* Because EVS HW module reference implementation expects a single client at
293 * a time, there is no chance that this is called by the secondary client and
294 * therefore returns a success code always.
295 */
296 return ScopedAStatus::ok();
297 }
298
getParameterList(std::vector<CameraParam> * _aidl_return)299 ScopedAStatus EvsV4lCamera::getParameterList(std::vector<CameraParam>* _aidl_return) {
300 if (mCameraInfo) {
301 _aidl_return->resize(mCameraInfo->controls.size());
302 auto idx = 0;
303 for (auto& [name, range] : mCameraInfo->controls) {
304 (*_aidl_return)[idx++] = name;
305 }
306 }
307
308 return ScopedAStatus::ok();
309 }
310
getIntParameterRange(CameraParam id,ParameterRange * _aidl_return)311 ScopedAStatus EvsV4lCamera::getIntParameterRange(CameraParam id, ParameterRange* _aidl_return) {
312 if (!mCameraInfo) {
313 return ScopedAStatus::fromServiceSpecificError(static_cast<int>(EvsResult::NOT_SUPPORTED));
314 }
315
316 auto it = mCameraInfo->controls.find(id);
317 if (it == mCameraInfo->controls.end()) {
318 return ScopedAStatus::fromServiceSpecificError(static_cast<int>(EvsResult::NOT_SUPPORTED));
319 }
320
321 _aidl_return->min = std::get<0>(it->second);
322 _aidl_return->max = std::get<1>(it->second);
323 _aidl_return->step = std::get<2>(it->second);
324
325 return ScopedAStatus::ok();
326 }
327
setIntParameter(CameraParam id,int32_t value,std::vector<int32_t> * effectiveValue)328 ScopedAStatus EvsV4lCamera::setIntParameter(CameraParam id, int32_t value,
329 std::vector<int32_t>* effectiveValue) {
330 uint32_t v4l2cid = V4L2_CID_BASE;
331 if (!convertToV4l2CID(id, v4l2cid)) {
332 return ScopedAStatus::fromServiceSpecificError(static_cast<int>(EvsResult::INVALID_ARG));
333 } else {
334 v4l2_control control = {v4l2cid, value};
335 if (mVideo.setParameter(control) < 0 || mVideo.getParameter(control) < 0) {
336 return ScopedAStatus::fromServiceSpecificError(
337 static_cast<int>(EvsResult::UNDERLYING_SERVICE_ERROR));
338 }
339
340 (*effectiveValue)[0] = control.value;
341 }
342
343 return ScopedAStatus::ok();
344 }
345
getIntParameter(CameraParam id,std::vector<int32_t> * value)346 ScopedAStatus EvsV4lCamera::getIntParameter(CameraParam id, std::vector<int32_t>* value) {
347 uint32_t v4l2cid = V4L2_CID_BASE;
348 if (!convertToV4l2CID(id, v4l2cid)) {
349 return ScopedAStatus::fromServiceSpecificError(static_cast<int>(EvsResult::INVALID_ARG));
350 } else {
351 v4l2_control control = {v4l2cid, 0};
352 if (mVideo.getParameter(control) < 0) {
353 return ScopedAStatus::fromServiceSpecificError(
354 static_cast<int>(EvsResult::INVALID_ARG));
355 }
356
357 // Report a result
358 (*value)[0] = control.value;
359 }
360
361 return ScopedAStatus::ok();
362 }
363
setExtendedInfo(int32_t opaqueIdentifier,const std::vector<uint8_t> & opaqueValue)364 ScopedAStatus EvsV4lCamera::setExtendedInfo(int32_t opaqueIdentifier,
365 const std::vector<uint8_t>& opaqueValue) {
366 mExtInfo.insert_or_assign(opaqueIdentifier, opaqueValue);
367 return ScopedAStatus::ok();
368 }
369
getExtendedInfo(int32_t opaqueIdentifier,std::vector<uint8_t> * opaqueValue)370 ScopedAStatus EvsV4lCamera::getExtendedInfo(int32_t opaqueIdentifier,
371 std::vector<uint8_t>* opaqueValue) {
372 const auto it = mExtInfo.find(opaqueIdentifier);
373 if (it == mExtInfo.end()) {
374 return ScopedAStatus::fromServiceSpecificError(static_cast<int>(EvsResult::INVALID_ARG));
375 } else {
376 *opaqueValue = mExtInfo[opaqueIdentifier];
377 }
378
379 return ScopedAStatus::ok();
380 }
381
importExternalBuffers(const std::vector<BufferDesc> & buffers,int32_t * _aidl_return)382 ScopedAStatus EvsV4lCamera::importExternalBuffers(const std::vector<BufferDesc>& buffers,
383 int32_t* _aidl_return) {
384 LOG(DEBUG) << __FUNCTION__;
385
386 // If we've been displaced by another owner of the camera, then we can't do anything else
387 if (!mVideo.isOpen()) {
388 LOG(WARNING) << "Ignoring a request add external buffers " << "when camera has been lost.";
389 *_aidl_return = 0;
390 return ScopedAStatus::fromServiceSpecificError(static_cast<int>(EvsResult::OWNERSHIP_LOST));
391 }
392
393 size_t numBuffersToAdd = buffers.size();
394 if (numBuffersToAdd < 1) {
395 LOG(DEBUG) << "No buffers to add.";
396 *_aidl_return = 0;
397 return ScopedAStatus::ok();
398 }
399
400 {
401 std::lock_guard<std::mutex> lock(mAccessLock);
402 if (numBuffersToAdd > (kMaxBuffersInFlight - mFramesAllowed)) {
403 numBuffersToAdd -= (kMaxBuffersInFlight - mFramesAllowed);
404 LOG(WARNING) << "Exceed the limit on number of buffers. " << numBuffersToAdd
405 << " buffers will be added only.";
406 }
407
408 ::android::GraphicBufferMapper& mapper = ::android::GraphicBufferMapper::get();
409 const auto before = mFramesAllowed;
410 for (size_t i = 0; i < numBuffersToAdd; ++i) {
411 // TODO: reject if external buffer is configured differently.
412 auto& b = buffers[i];
413 const HardwareBufferDescription& description = b.buffer.description;
414
415 // Import a buffer to add
416 buffer_handle_t memHandle = nullptr;
417 const auto result =
418 mapper.importBuffer(::android::dupFromAidl(b.buffer.handle), description.width,
419 description.height, 1,
420 static_cast<::android::PixelFormat>(description.format),
421 static_cast<uint64_t>(description.usage),
422 description.stride, &memHandle);
423 if (result != ::android::NO_ERROR || memHandle == nullptr) {
424 LOG(WARNING) << "Failed to import a buffer " << b.bufferId;
425 continue;
426 }
427
428 auto stored = false;
429 for (auto&& rec : mBuffers) {
430 if (rec.handle == nullptr) {
431 // Use this existing entry
432 rec.handle = memHandle;
433 rec.inUse = false;
434
435 stored = true;
436 break;
437 }
438 }
439
440 if (!stored) {
441 // Add a BufferRecord wrapping this handle to our set of available buffers
442 mBuffers.push_back(BufferRecord(memHandle));
443 }
444
445 ++mFramesAllowed;
446 }
447
448 *_aidl_return = mFramesAllowed - before;
449 return ScopedAStatus::ok();
450 }
451 }
452
doneWithFrame_impl(const BufferDesc & bufferDesc)453 EvsResult EvsV4lCamera::doneWithFrame_impl(const BufferDesc& bufferDesc) {
454 if (!mVideo.isOpen()) {
455 LOG(WARNING) << "Ignoring doneWithFrame call when camera has been lost.";
456 return EvsResult::OK;
457 }
458
459 if (static_cast<uint32_t>(bufferDesc.bufferId) >= mBuffers.size()) {
460 LOG(WARNING) << "Ignoring doneWithFrame called with invalid id " << bufferDesc.bufferId
461 << " (max is " << mBuffers.size() - 1 << ")";
462 return EvsResult::OK;
463 }
464
465 // Mark this buffer as available
466 {
467 std::lock_guard<std::mutex> lock(mAccessLock);
468 mBuffers[bufferDesc.bufferId].inUse = false;
469 --mFramesInUse;
470
471 // If this frame's index is high in the array, try to move it down
472 // to improve locality after mFramesAllowed has been reduced.
473 if (static_cast<uint32_t>(bufferDesc.bufferId) >= mFramesAllowed) {
474 // Find an empty slot lower in the array (which should always exist in this case)
475 bool found = false;
476 for (auto&& rec : mBuffers) {
477 if (!rec.handle) {
478 rec.handle = mBuffers[bufferDesc.bufferId].handle;
479 mBuffers[bufferDesc.bufferId].handle = nullptr;
480 found = true;
481 break;
482 }
483 }
484
485 if (!found) {
486 LOG(WARNING) << "No empty slot!";
487 }
488 }
489 }
490
491 return EvsResult::OK;
492 }
493
doneWithFrame_impl(uint32_t bufferId,buffer_handle_t handle)494 EvsResult EvsV4lCamera::doneWithFrame_impl(uint32_t bufferId, buffer_handle_t handle) {
495 std::lock_guard<std::mutex> lock(mAccessLock);
496
497 // If we've been displaced by another owner of the camera, then we can't do anything else
498 if (!mVideo.isOpen()) {
499 LOG(WARNING) << "Ignoring doneWithFrame call when camera has been lost.";
500 return EvsResult::OK;
501 }
502
503 if (handle == nullptr) {
504 LOG(ERROR) << "Ignoring doneWithFrame called with null handle";
505 } else if (bufferId >= mBuffers.size()) {
506 LOG(ERROR) << "Ignoring doneWithFrame called with invalid bufferId " << bufferId
507 << " (max is " << mBuffers.size() - 1 << ")";
508 } else if (!mBuffers[bufferId].inUse) {
509 LOG(ERROR) << "Ignoring doneWithFrame called on frame " << bufferId
510 << " which is already free";
511 } else {
512 // Mark the frame as available
513 mBuffers[bufferId].inUse = false;
514 --mFramesInUse;
515
516 // If this frame's index is high in the array, try to move it down
517 // to improve locality after mFramesAllowed has been reduced.
518 if (bufferId >= mFramesAllowed) {
519 // Find an empty slot lower in the array (which should always exist in this case)
520 bool found = false;
521 for (auto&& rec : mBuffers) {
522 if (!rec.handle) {
523 rec.handle = mBuffers[bufferId].handle;
524 mBuffers[bufferId].handle = nullptr;
525 found = true;
526 break;
527 }
528 }
529
530 if (!found) {
531 LOG(WARNING) << "No empty slot!";
532 }
533 }
534 }
535
536 return EvsResult::OK;
537 }
538
setAvailableFrames_Locked(unsigned bufferCount)539 bool EvsV4lCamera::setAvailableFrames_Locked(unsigned bufferCount) {
540 if (bufferCount < 1) {
541 LOG(ERROR) << "Ignoring request to set buffer count to zero";
542 return false;
543 }
544 if (bufferCount > kMaxBuffersInFlight) {
545 LOG(ERROR) << "Rejecting buffer request in excess of internal limit";
546 return false;
547 }
548
549 // Is an increase required?
550 if (mFramesAllowed < bufferCount) {
551 // An increase is required
552 auto needed = bufferCount - mFramesAllowed;
553 LOG(INFO) << "Allocating " << needed << " buffers for camera frames";
554
555 auto added = increaseAvailableFrames_Locked(needed);
556 if (added != needed) {
557 // If we didn't add all the frames we needed, then roll back to the previous state
558 LOG(ERROR) << "Rolling back to previous frame queue size";
559 decreaseAvailableFrames_Locked(added);
560 return false;
561 }
562 } else if (mFramesAllowed > bufferCount) {
563 // A decrease is required
564 auto framesToRelease = mFramesAllowed - bufferCount;
565 LOG(INFO) << "Returning " << framesToRelease << " camera frame buffers";
566
567 auto released = decreaseAvailableFrames_Locked(framesToRelease);
568 if (released != framesToRelease) {
569 // This shouldn't happen with a properly behaving client because the client
570 // should only make this call after returning sufficient outstanding buffers
571 // to allow a clean resize.
572 LOG(ERROR) << "Buffer queue shrink failed -- too many buffers currently in use?";
573 }
574 }
575
576 return true;
577 }
578
increaseAvailableFrames_Locked(unsigned numToAdd)579 unsigned EvsV4lCamera::increaseAvailableFrames_Locked(unsigned numToAdd) {
580 // Acquire the graphics buffer allocator
581 ::android::GraphicBufferAllocator& alloc(::android::GraphicBufferAllocator::get());
582
583 unsigned added = 0;
584 while (added < numToAdd) {
585 unsigned pixelsPerLine = 0;
586 buffer_handle_t memHandle = nullptr;
587 auto result = alloc.allocate(mVideo.getWidth(), mVideo.getHeight(), mFormat, 1, mUsage,
588 &memHandle, &pixelsPerLine, 0, "EvsV4lCamera");
589 if (result != ::android::NO_ERROR) {
590 LOG(ERROR) << "Error " << result << " allocating " << mVideo.getWidth() << " x "
591 << mVideo.getHeight() << " graphics buffer";
592 break;
593 }
594 if (memHandle == nullptr) {
595 LOG(ERROR) << "We didn't get a buffer handle back from the allocator";
596 break;
597 }
598 if (mStride > 0) {
599 if (mStride != pixelsPerLine) {
600 LOG(ERROR) << "We did not expect to get buffers with different strides!";
601 }
602 } else {
603 // Gralloc defines stride in terms of pixels per line
604 mStride = pixelsPerLine;
605 }
606
607 // Find a place to store the new buffer
608 auto stored = false;
609 for (auto&& rec : mBuffers) {
610 if (rec.handle == nullptr) {
611 // Use this existing entry
612 rec.handle = memHandle;
613 rec.inUse = false;
614 stored = true;
615 break;
616 }
617 }
618 if (!stored) {
619 // Add a BufferRecord wrapping this handle to our set of available buffers
620 mBuffers.push_back(BufferRecord(memHandle));
621 }
622
623 ++mFramesAllowed;
624 ++added;
625 }
626
627 return added;
628 }
629
decreaseAvailableFrames_Locked(unsigned numToRemove)630 unsigned EvsV4lCamera::decreaseAvailableFrames_Locked(unsigned numToRemove) {
631 // Acquire the graphics buffer allocator
632 ::android::GraphicBufferAllocator& alloc(::android::GraphicBufferAllocator::get());
633
634 unsigned removed = 0;
635 for (auto&& rec : mBuffers) {
636 // Is this record not in use, but holding a buffer that we can free?
637 if ((rec.inUse == false) && (rec.handle != nullptr)) {
638 // Release buffer and update the record so we can recognize it as "empty"
639 alloc.free(rec.handle);
640 rec.handle = nullptr;
641
642 --mFramesAllowed;
643 ++removed;
644
645 if (removed == numToRemove) {
646 break;
647 }
648 }
649 }
650
651 return removed;
652 }
653
654 // This is the async callback from the video camera that tells us a frame is ready
forwardFrame(imageBuffer * pV4lBuff,void * pData)655 void EvsV4lCamera::forwardFrame(imageBuffer* pV4lBuff, void* pData) {
656 LOG(DEBUG) << __FUNCTION__;
657 bool readyForFrame = false;
658 unsigned idx = 0;
659
660 // Lock scope for updating shared state
661 {
662 std::lock_guard<std::mutex> lock(mAccessLock);
663
664 // Are we allowed to issue another buffer?
665 if (mFramesInUse >= mFramesAllowed) {
666 // Can't do anything right now -- skip this frame
667 LOG(WARNING) << "Skipped a frame because too many are in flight";
668 } else {
669 // Identify an available buffer to fill
670 for (idx = 0; idx < mBuffers.size(); idx++) {
671 if (!mBuffers[idx].inUse) {
672 if (mBuffers[idx].handle != nullptr) {
673 // Found an available record, so stop looking
674 break;
675 }
676 }
677 }
678 if (idx >= mBuffers.size()) {
679 // This shouldn't happen since we already checked mFramesInUse vs mFramesAllowed
680 LOG(ERROR) << "Failed to find an available buffer slot";
681 } else {
682 // We're going to make the frame busy
683 mBuffers[idx].inUse = true;
684 mFramesInUse++;
685 readyForFrame = true;
686 }
687 }
688 }
689
690 if (mDumpFrame) {
691 // Construct a target filename with the device identifier
692 std::string filename = std::string(mDescription.id);
693 std::replace(filename.begin(), filename.end(), '/', '_');
694 filename = mDumpPath + filename + "_" + std::to_string(mFrameCounter) + ".bin";
695
696 ::android::base::unique_fd fd(
697 open(filename.data(), O_WRONLY | O_CREAT, S_IRUSR | S_IWUSR | S_IRGRP));
698 LOG(ERROR) << filename << ", " << fd;
699 if (fd == -1) {
700 PLOG(ERROR) << "Failed to open a file, " << filename;
701 } else {
702 auto width = mVideo.getWidth();
703 auto height = mVideo.getHeight();
704 auto len = write(fd.get(), &width, sizeof(width));
705 len += write(fd.get(), &height, sizeof(height));
706 len += write(fd.get(), &mStride, sizeof(mStride));
707 len += write(fd.get(), &mFormat, sizeof(mFormat));
708 len += write(fd.get(), pData, pV4lBuff->length);
709 LOG(INFO) << len << " bytes are written to " << filename;
710 }
711 }
712
713 if (!readyForFrame) {
714 // We need to return the video buffer so it can capture a new frame
715 mVideo.markFrameConsumed(pV4lBuff->index);
716 } else {
717 using AidlPixelFormat = ::aidl::android::hardware::graphics::common::PixelFormat;
718
719 // Assemble the buffer description we'll transmit below
720 buffer_handle_t memHandle = mBuffers[idx].handle;
721 BufferDesc bufferDesc = {
722 .buffer =
723 {
724 .description =
725 {
726 .width = static_cast<int32_t>(mVideo.getWidth()),
727 .height = static_cast<int32_t>(mVideo.getHeight()),
728 .layers = 1,
729 .format = static_cast<AidlPixelFormat>(mFormat),
730 .usage = static_cast<BufferUsage>(mUsage),
731 .stride = static_cast<int32_t>(mStride),
732 },
733 .handle = ::android::dupToAidl(memHandle),
734 },
735 .bufferId = static_cast<int32_t>(idx),
736 .deviceId = mDescription.id,
737 .timestamp = static_cast<int64_t>(::android::elapsedRealtimeNano() * 1e+3),
738 };
739
740 // Lock our output buffer for writing
741 // TODO(b/145459970): Sometimes, physical camera device maps a buffer
742 // into the address that is about to be unmapped by another device; this
743 // causes SEGV_MAPPER.
744 void* targetPixels = nullptr;
745 ::android::GraphicBufferMapper& mapper = ::android::GraphicBufferMapper::get();
746 auto result =
747 mapper.lock(memHandle, GRALLOC_USAGE_SW_WRITE_OFTEN | GRALLOC_USAGE_SW_READ_NEVER,
748 ::android::Rect(bufferDesc.buffer.description.width,
749 bufferDesc.buffer.description.height),
750 (void**)&targetPixels);
751
752 // If we failed to lock the pixel buffer, we're about to crash, but log it first
753 if (!targetPixels) {
754 // TODO(b/145457727): When EvsHidlTest::CameraToDisplayRoundTrip
755 // test case was repeatedly executed, EVS occasionally fails to map
756 // a buffer.
757 LOG(ERROR) << "Camera failed to gain access to image buffer for writing - "
758 << " status: " << ::android::statusToString(result);
759 }
760
761 // Transfer the video image into the output buffer, making any needed
762 // format conversion along the way
763 mFillBufferFromVideo(bufferDesc, (uint8_t*)targetPixels, pData, mVideo.getStride());
764
765 // Unlock the output buffer
766 mapper.unlock(memHandle);
767
768 // Give the video frame back to the underlying device for reuse
769 // Note that we do this before making the client callback to give the
770 // underlying camera more time to capture the next frame
771 mVideo.markFrameConsumed(pV4lBuff->index);
772
773 // Issue the (asynchronous) callback to the client -- can't be holding
774 // the lock
775 auto flag = false;
776 if (mStream) {
777 std::vector<BufferDesc> frames;
778 frames.push_back(std::move(bufferDesc));
779 flag = mStream->deliverFrame(frames).isOk();
780 }
781
782 if (flag) {
783 LOG(DEBUG) << "Delivered " << memHandle << " as id " << bufferDesc.bufferId;
784 } else {
785 // This can happen if the client dies and is likely unrecoverable.
786 // To avoid consuming resources generating failing calls, we stop sending
787 // frames. Note, however, that the stream remains in the "STREAMING" state
788 // until cleaned up on the main thread.
789 LOG(ERROR) << "Frame delivery call failed in the transport layer.";
790
791 // Since we didn't actually deliver it, mark the frame as available
792 std::lock_guard<std::mutex> lock(mAccessLock);
793 mBuffers[idx].inUse = false;
794 --mFramesInUse;
795 }
796 }
797
798 // Increse a frame counter
799 ++mFrameCounter;
800 }
801
convertToV4l2CID(CameraParam id,uint32_t & v4l2cid)802 bool EvsV4lCamera::convertToV4l2CID(CameraParam id, uint32_t& v4l2cid) {
803 switch (id) {
804 case CameraParam::BRIGHTNESS:
805 v4l2cid = V4L2_CID_BRIGHTNESS;
806 break;
807 case CameraParam::CONTRAST:
808 v4l2cid = V4L2_CID_CONTRAST;
809 break;
810 case CameraParam::AUTO_WHITE_BALANCE:
811 v4l2cid = V4L2_CID_AUTO_WHITE_BALANCE;
812 break;
813 case CameraParam::WHITE_BALANCE_TEMPERATURE:
814 v4l2cid = V4L2_CID_WHITE_BALANCE_TEMPERATURE;
815 break;
816 case CameraParam::SHARPNESS:
817 v4l2cid = V4L2_CID_SHARPNESS;
818 break;
819 case CameraParam::AUTO_EXPOSURE:
820 v4l2cid = V4L2_CID_EXPOSURE_AUTO;
821 break;
822 case CameraParam::ABSOLUTE_EXPOSURE:
823 v4l2cid = V4L2_CID_EXPOSURE_ABSOLUTE;
824 break;
825 case CameraParam::AUTO_FOCUS:
826 v4l2cid = V4L2_CID_FOCUS_AUTO;
827 break;
828 case CameraParam::ABSOLUTE_FOCUS:
829 v4l2cid = V4L2_CID_FOCUS_ABSOLUTE;
830 break;
831 case CameraParam::ABSOLUTE_ZOOM:
832 v4l2cid = V4L2_CID_ZOOM_ABSOLUTE;
833 break;
834 default:
835 LOG(ERROR) << "Camera parameter " << static_cast<unsigned>(id) << " is unknown.";
836 return false;
837 }
838
839 return mCameraControls.find(v4l2cid) != mCameraControls.end();
840 }
841
Create(const char * deviceName)842 std::shared_ptr<EvsV4lCamera> EvsV4lCamera::Create(const char* deviceName) {
843 std::unique_ptr<ConfigManager::CameraInfo> nullCamInfo = nullptr;
844 return Create(deviceName, nullCamInfo);
845 }
846
Create(const char * deviceName,std::unique_ptr<ConfigManager::CameraInfo> & camInfo,const Stream * requestedStreamCfg)847 std::shared_ptr<EvsV4lCamera> EvsV4lCamera::Create(
848 const char* deviceName, std::unique_ptr<ConfigManager::CameraInfo>& camInfo,
849 const Stream* requestedStreamCfg) {
850 LOG(INFO) << "Create " << deviceName;
851 std::shared_ptr<EvsV4lCamera> evsCamera =
852 ndk::SharedRefBase::make<EvsV4lCamera>(deviceName, camInfo);
853 if (!evsCamera) {
854 return nullptr;
855 }
856
857 // Initialize the video device
858 bool success = false;
859 if (camInfo != nullptr && requestedStreamCfg != nullptr) {
860 LOG(INFO) << "Requested stream configuration:";
861 LOG(INFO) << " width = " << requestedStreamCfg->width;
862 LOG(INFO) << " height = " << requestedStreamCfg->height;
863 LOG(INFO) << " format = " << static_cast<int>(requestedStreamCfg->format);
864 // Validate a given stream configuration. If there is no exact match,
865 // this will try to find the best match based on:
866 // 1) same output format
867 // 2) the largest resolution that is smaller that a given configuration.
868 int32_t streamId = -1, area = INT_MIN;
869 for (auto& [id, cfg] : camInfo->streamConfigurations) {
870 if (cfg.format == requestedStreamCfg->format) {
871 if (cfg.width == requestedStreamCfg->width &&
872 cfg.height == requestedStreamCfg->height) {
873 // Find exact match.
874 streamId = id;
875 break;
876 } else if (cfg.width < requestedStreamCfg->width &&
877 cfg.height < requestedStreamCfg->height &&
878 cfg.width * cfg.height > area) {
879 streamId = id;
880 area = cfg.width * cfg.height;
881 }
882 }
883 }
884
885 if (streamId >= 0) {
886 LOG(INFO) << "Selected video stream configuration:";
887 LOG(INFO) << " width = " << camInfo->streamConfigurations[streamId].width;
888 LOG(INFO) << " height = " << camInfo->streamConfigurations[streamId].height;
889 LOG(INFO) << " format = "
890 << static_cast<int>(camInfo->streamConfigurations[streamId].format);
891 success = evsCamera->mVideo.open(deviceName,
892 camInfo->streamConfigurations[streamId].width,
893 camInfo->streamConfigurations[streamId].height);
894 // Safe to statically cast
895 // ::aidl::android::hardware::graphics::common::PixelFormat type to
896 // android_pixel_format_t
897 evsCamera->mFormat =
898 static_cast<uint32_t>(camInfo->streamConfigurations[streamId].format);
899 }
900 }
901
902 if (!success) {
903 // Create a camera object with the default resolution and format
904 // , HAL_PIXEL_FORMAT_RGBA_8888.
905 LOG(INFO) << "Open a video with default parameters";
906 success = evsCamera->mVideo.open(deviceName, kDefaultResolution[0], kDefaultResolution[1]);
907 if (!success) {
908 LOG(ERROR) << "Failed to open a video stream";
909 return nullptr;
910 }
911 }
912
913 // List available camera parameters
914 evsCamera->mCameraControls = evsCamera->mVideo.enumerateCameraControls();
915
916 // Please note that the buffer usage flag does not come from a given stream
917 // configuration.
918 evsCamera->mUsage =
919 GRALLOC_USAGE_HW_TEXTURE | GRALLOC_USAGE_SW_READ_RARELY | GRALLOC_USAGE_SW_WRITE_OFTEN;
920
921 return evsCamera;
922 }
923
startDumpFrames(const std::string & path)924 Result<void> EvsV4lCamera::startDumpFrames(const std::string& path) {
925 struct stat info;
926 if (stat(path.data(), &info) != 0) {
927 return Error(::android::BAD_VALUE) << "Cannot access " << path;
928 } else if (!(info.st_mode & S_IFDIR)) {
929 return Error(::android::BAD_VALUE) << path << " is not a directory";
930 }
931
932 mDumpPath = path;
933 mDumpFrame = true;
934
935 return {};
936 }
937
stopDumpFrames()938 Result<void> EvsV4lCamera::stopDumpFrames() {
939 if (!mDumpFrame) {
940 return Error(::android::INVALID_OPERATION) << "Device is not dumping frames";
941 }
942
943 mDumpFrame = false;
944 return {};
945 }
946
947 } // namespace aidl::android::hardware::automotive::evs::implementation
948