1 /*
2 * Copyright (C) 2023 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17 #define FAILURE_DEBUG_PREFIX "FakeRotatingCamera"
18
19 #include <log/log.h>
20 #include <android-base/properties.h>
21 #include <system/camera_metadata.h>
22 #include <ui/GraphicBuffer.h>
23 #include <ui/GraphicBufferAllocator.h>
24 #include <ui/GraphicBufferMapper.h>
25
26 #include <qemu_pipe_bp.h>
27
28 #define GL_GLEXT_PROTOTYPES
29 #define EGL_EGLEXT_PROTOTYPES
30 #include <EGL/egl.h>
31 #include <EGL/eglext.h>
32 #include <GLES2/gl2.h>
33 #include <GLES2/gl2ext.h>
34 #undef EGL_EGLEXT_PROTOTYPES
35 #undef GL_GLEXT_PROTOTYPES
36
37 #include "acircles_pattern_512_512.h"
38 #include "converters.h"
39 #include "debug.h"
40 #include "FakeRotatingCamera.h"
41 #include "jpeg.h"
42 #include "metadata_utils.h"
43 #include "utils.h"
44 #include "yuv.h"
45
46 namespace android {
47 namespace hardware {
48 namespace camera {
49 namespace provider {
50 namespace implementation {
51 namespace hw {
52
53 using base::unique_fd;
54
55 namespace {
56 constexpr char kClass[] = "FakeRotatingCamera";
57
58 constexpr int kMinFPS = 2;
59 constexpr int kMedFPS = 15;
60 constexpr int kMaxFPS = 30;
61 constexpr int64_t kOneSecondNs = 1000000000;
62
63 constexpr float kDefaultFocalLength = 2.8;
64
65 constexpr int64_t kMinFrameDurationNs = kOneSecondNs / kMaxFPS;
66 constexpr int64_t kMaxFrameDurationNs = kOneSecondNs / kMinFPS;
67 constexpr int64_t kDefaultFrameDurationNs = kOneSecondNs / kMedFPS;
68
69 constexpr int64_t kDefaultSensorExposureTimeNs = kOneSecondNs / 100;
70 constexpr int64_t kMinSensorExposureTimeNs = kDefaultSensorExposureTimeNs / 100;
71 constexpr int64_t kMaxSensorExposureTimeNs = kDefaultSensorExposureTimeNs * 10;
72
73 constexpr int32_t kDefaultJpegQuality = 85;
74
usageOr(const BufferUsage a,const BufferUsage b)75 constexpr BufferUsage usageOr(const BufferUsage a, const BufferUsage b) {
76 return static_cast<BufferUsage>(static_cast<uint64_t>(a) | static_cast<uint64_t>(b));
77 }
78
usageTest(const BufferUsage a,const BufferUsage b)79 constexpr bool usageTest(const BufferUsage a, const BufferUsage b) {
80 return (static_cast<uint64_t>(a) & static_cast<uint64_t>(b)) != 0;
81 }
82
toR5G6B5(float r,float g,float b)83 constexpr uint16_t toR5G6B5(float r, float g, float b) {
84 return uint16_t(b * 31) | (uint16_t(g * 63) << 5) | (uint16_t(r * 31) << 11);
85 }
86
toR8G8B8A8(uint8_t r,uint8_t g,uint8_t b,uint8_t a)87 constexpr uint32_t toR8G8B8A8(uint8_t r, uint8_t g, uint8_t b, uint8_t a) {
88 return uint32_t(r) | (uint32_t(g) << 8) | (uint32_t(b) << 16) | (uint32_t(a) << 24);
89 }
90
degrees2rad(const double degrees)91 constexpr double degrees2rad(const double degrees) {
92 return degrees * M_PI / 180.0;
93 }
94
95 // This texture is useful to debug camera orientation and image aspect ratio
loadTestPatternTextureA()96 abc3d::AutoTexture loadTestPatternTextureA() {
97 constexpr uint16_t B = toR5G6B5(.4, .4, .4);
98 constexpr uint16_t R = toR5G6B5( 1, .1, .1);
99
100 static const uint16_t texels[] = {
101 B, R, R, R, R, R, B, B,
102 R, B, B, B, B, B, R, B,
103 B, B, B, B, B, B, R, B,
104 B, R, R, R, R, R, B, B,
105 R, B, B, B, B, B, R, B,
106 R, B, B, B, B, B, R, B,
107 R, B, B, B, B, B, R, B,
108 B, R, R, R, R, R, B, R,
109 };
110
111 abc3d::AutoTexture tex(GL_TEXTURE_2D, GL_RGB, 8, 8,
112 GL_RGB, GL_UNSIGNED_SHORT_5_6_5, texels);
113 glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST);
114 glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_NEAREST);
115
116 return tex;
117 }
118
119 // This texture is useful to debug camera dataspace
loadTestPatternTextureColors()120 abc3d::AutoTexture loadTestPatternTextureColors() {
121 static const uint32_t texels[] = {
122 toR8G8B8A8(32, 0, 0, 255), toR8G8B8A8(64, 0, 0, 255), toR8G8B8A8(96, 0, 0, 255), toR8G8B8A8(128, 0, 0, 255),
123 toR8G8B8A8(160, 0, 0, 255), toR8G8B8A8(192, 0, 0, 255), toR8G8B8A8(224, 0, 0, 255), toR8G8B8A8(255, 0, 0, 255),
124
125 toR8G8B8A8(32, 32, 0, 255), toR8G8B8A8(64, 64, 0, 255), toR8G8B8A8(96, 96, 0, 255), toR8G8B8A8(128, 128, 0, 255),
126 toR8G8B8A8(160, 160, 0, 255), toR8G8B8A8(192, 192, 0, 255), toR8G8B8A8(224, 224, 0, 255), toR8G8B8A8(255, 255, 0, 255),
127
128 toR8G8B8A8(0, 32, 0, 255), toR8G8B8A8(0, 64, 0, 255), toR8G8B8A8(0, 96, 0, 255), toR8G8B8A8(0, 128, 0, 255),
129 toR8G8B8A8(0, 160, 0, 255), toR8G8B8A8(0, 192, 0, 255), toR8G8B8A8(0, 224, 0, 255), toR8G8B8A8(0, 255, 0, 255),
130
131 toR8G8B8A8(0, 32, 32, 255), toR8G8B8A8(0, 64, 64, 255), toR8G8B8A8(0, 96, 96, 255), toR8G8B8A8(0, 128, 128, 255),
132 toR8G8B8A8(0, 160, 160, 255), toR8G8B8A8(0, 192, 192, 255), toR8G8B8A8(0, 224, 224, 255), toR8G8B8A8(0, 255, 255, 255),
133
134 toR8G8B8A8(0, 0, 32, 255), toR8G8B8A8(0, 0, 64, 255), toR8G8B8A8(0, 0, 96, 255), toR8G8B8A8(0, 0, 128, 255),
135 toR8G8B8A8(0, 0, 160, 255), toR8G8B8A8(0, 0, 192, 255), toR8G8B8A8(0, 0, 224, 255), toR8G8B8A8(0, 0, 255, 255),
136
137 toR8G8B8A8(32, 0, 32, 255), toR8G8B8A8(64, 0, 64, 255), toR8G8B8A8(96, 0, 96, 255), toR8G8B8A8(128, 0, 128, 255),
138 toR8G8B8A8(160, 0, 160, 255), toR8G8B8A8(192, 0, 192, 255), toR8G8B8A8(224, 0, 224, 255), toR8G8B8A8(255, 0, 255, 255),
139
140 toR8G8B8A8(32, 128, 0, 255), toR8G8B8A8(64, 128, 0, 255), toR8G8B8A8(96, 128, 0, 255), toR8G8B8A8(255, 255, 255, 255),
141 toR8G8B8A8(160, 128, 0, 255), toR8G8B8A8(192, 128, 0, 255), toR8G8B8A8(224, 128, 0, 255), toR8G8B8A8(255, 128, 0, 255),
142
143 toR8G8B8A8(0, 0, 0, 255), toR8G8B8A8(32, 32, 32, 255), toR8G8B8A8(64, 64, 64, 255), toR8G8B8A8(96, 96, 96, 255),
144 toR8G8B8A8(128, 128, 128, 255), toR8G8B8A8(160, 160, 160, 255), toR8G8B8A8(192, 192, 192, 255), toR8G8B8A8(224, 224, 224, 255),
145 };
146
147 abc3d::AutoTexture tex(GL_TEXTURE_2D, GL_RGBA, 8, 8,
148 GL_RGBA, GL_UNSIGNED_BYTE, texels);
149 glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST);
150 glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_NEAREST);
151
152 return tex;
153 }
154
155 // This texture is used to pass CtsVerifier
loadTestPatternTextureAcircles()156 abc3d::AutoTexture loadTestPatternTextureAcircles() {
157 constexpr uint16_t kPalette[] = {
158 toR5G6B5(0, 0, 0),
159 toR5G6B5(.25, .25, .25),
160 toR5G6B5(.5, .5, .5),
161 toR5G6B5(1, 1, 0),
162 toR5G6B5(1, 1, 1),
163 };
164
165 std::vector<uint16_t> texels;
166 texels.reserve(kAcirclesPatternWidth * kAcirclesPatternWidth);
167
168 auto i = std::begin(kAcirclesPatternRLE);
169 const auto end = std::end(kAcirclesPatternRLE);
170 while (i < end) {
171 const unsigned x = *i;
172 ++i;
173 unsigned n;
174 uint16_t color;
175 if (x & 1) {
176 n = (x >> 3) + 1;
177 color = kPalette[(x >> 1) & 3];
178 } else {
179 if (x & 2) {
180 n = ((unsigned(*i) << 6) | (x >> 2)) + 1;
181 ++i;
182 } else {
183 n = (x >> 2) + 1;
184 }
185 color = kPalette[4];
186 }
187 texels.insert(texels.end(), n, color);
188 }
189
190 abc3d::AutoTexture tex(GL_TEXTURE_2D, GL_RGB,
191 kAcirclesPatternWidth, kAcirclesPatternWidth,
192 GL_RGB, GL_UNSIGNED_SHORT_5_6_5, texels.data());
193 glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
194 glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
195
196 return tex;
197 }
198
loadTestPatternTexture()199 abc3d::AutoTexture loadTestPatternTexture() {
200 std::string valueStr =
201 base::GetProperty("vendor.qemu.FakeRotatingCamera.scene", "");
202 if (valueStr.empty()) {
203 valueStr =
204 base::GetProperty("ro.boot.qemu.FakeRotatingCamera.scene", "");
205 }
206
207 if (strcmp(valueStr.c_str(), "a") == 0) {
208 return loadTestPatternTextureA();
209 } else if (strcmp(valueStr.c_str(), "colors") == 0) {
210 return loadTestPatternTextureColors();
211 } else {
212 return loadTestPatternTextureAcircles();
213 }
214 }
215
compressNV21IntoJpeg(const Rect<uint16_t> imageSize,const uint8_t * nv21data,const CameraMetadata & metadata,const native_handle_t * jpegBuffer,const size_t jpegBufferSize)216 bool compressNV21IntoJpeg(const Rect<uint16_t> imageSize,
217 const uint8_t* nv21data,
218 const CameraMetadata& metadata,
219 const native_handle_t* jpegBuffer,
220 const size_t jpegBufferSize) {
221 const android_ycbcr imageYcbcr = yuv::NV21init(imageSize.width, imageSize.height,
222 const_cast<uint8_t*>(nv21data));
223
224 return HwCamera::compressJpeg(imageSize, imageYcbcr, metadata,
225 jpegBuffer, jpegBufferSize);
226 }
227
228 } // namespace
229
FakeRotatingCamera(const bool isBackFacing)230 FakeRotatingCamera::FakeRotatingCamera(const bool isBackFacing)
231 : mIsBackFacing(isBackFacing)
232 , mAFStateMachine(200, 1, 2) {}
233
~FakeRotatingCamera()234 FakeRotatingCamera::~FakeRotatingCamera() {
235 closeImpl(true);
236 }
237
238 std::tuple<PixelFormat, BufferUsage, Dataspace, int32_t>
overrideStreamParams(const PixelFormat format,const BufferUsage usage,const Dataspace dataspace) const239 FakeRotatingCamera::overrideStreamParams(const PixelFormat format,
240 const BufferUsage usage,
241 const Dataspace dataspace) const {
242 constexpr BufferUsage kRgbaExtraUsage = usageOr(BufferUsage::CAMERA_OUTPUT,
243 BufferUsage::GPU_RENDER_TARGET);
244 constexpr BufferUsage kYuvExtraUsage = usageOr(BufferUsage::CAMERA_OUTPUT,
245 BufferUsage::CPU_WRITE_OFTEN);
246 constexpr BufferUsage kBlobExtraUsage = usageOr(BufferUsage::CAMERA_OUTPUT,
247 BufferUsage::CPU_WRITE_OFTEN);
248
249 switch (format) {
250 case PixelFormat::YCBCR_420_888:
251 return {PixelFormat::YCBCR_420_888, usageOr(usage, kYuvExtraUsage),
252 Dataspace::JFIF, (usageTest(usage, BufferUsage::VIDEO_ENCODER) ? 8 : 4)};
253
254 case PixelFormat::IMPLEMENTATION_DEFINED:
255 if (usageTest(usage, BufferUsage::VIDEO_ENCODER)) {
256 return {PixelFormat::YCBCR_420_888, usageOr(usage, kYuvExtraUsage),
257 Dataspace::JFIF, 8};
258 } else {
259 return {PixelFormat::RGBA_8888, usageOr(usage, kRgbaExtraUsage),
260 Dataspace::UNKNOWN, 4};
261 }
262
263 case PixelFormat::RGBA_8888:
264 return {PixelFormat::RGBA_8888, usageOr(usage, kRgbaExtraUsage),
265 Dataspace::UNKNOWN, (usageTest(usage, BufferUsage::VIDEO_ENCODER) ? 8 : 4)};
266
267 case PixelFormat::BLOB:
268 switch (dataspace) {
269 case Dataspace::JFIF:
270 return {PixelFormat::BLOB, usageOr(usage, kBlobExtraUsage),
271 Dataspace::JFIF, 4}; // JPEG
272 default:
273 return {format, usage, dataspace, FAILURE(kErrorBadDataspace)};
274 }
275
276 default:
277 return {format, usage, dataspace, FAILURE(kErrorBadFormat)};
278 }
279 }
280
configure(const CameraMetadata & sessionParams,size_t nStreams,const Stream * streams,const HalStream * halStreams)281 bool FakeRotatingCamera::configure(const CameraMetadata& sessionParams,
282 size_t nStreams,
283 const Stream* streams,
284 const HalStream* halStreams) {
285 closeImpl(false);
286
287 applyMetadata(sessionParams);
288
289 if (!mQemuChannel.ok()) {
290 static const char kPipeName[] = "FakeRotatingCameraSensor";
291 mQemuChannel.reset(qemu_pipe_open_ns(NULL, kPipeName, O_RDWR));
292 if (!mQemuChannel.ok()) {
293 ALOGE("%s:%s:%d qemu_pipe_open_ns failed for '%s'",
294 kClass, __func__, __LINE__, kPipeName);
295 return FAILURE(false);
296 }
297 }
298
299 const abc3d::EglCurrentContext currentContext = initOpenGL();
300 if (!currentContext.ok()) {
301 return FAILURE(false);
302 }
303
304 LOG_ALWAYS_FATAL_IF(!mStreamInfoCache.empty());
305 for (; nStreams > 0; --nStreams, ++streams, ++halStreams) {
306 const int32_t id = streams->id;
307 LOG_ALWAYS_FATAL_IF(halStreams->id != id);
308 StreamInfo& si = mStreamInfoCache[id];
309 si.usage = halStreams->producerUsage;
310 si.size.width = streams->width;
311 si.size.height = streams->height;
312 si.pixelFormat = halStreams->overrideFormat;
313 si.blobBufferSize = streams->bufferSize;
314
315 if (si.pixelFormat != PixelFormat::RGBA_8888) {
316 const native_handle_t* buffer;
317 GraphicBufferAllocator& gba = GraphicBufferAllocator::get();
318
319 if (gba.allocate(si.size.width, si.size.height,
320 static_cast<int>(PixelFormat::RGBA_8888), 1,
321 static_cast<uint64_t>(usageOr(BufferUsage::GPU_RENDER_TARGET,
322 usageOr(BufferUsage::CPU_READ_OFTEN,
323 BufferUsage::CAMERA_OUTPUT))),
324 &buffer, &si.stride, kClass) == NO_ERROR) {
325 si.rgbaBuffer.reset(buffer);
326 } else {
327 mStreamInfoCache.clear();
328 return FAILURE(false);
329 }
330 }
331 }
332
333 return true;
334 }
335
close()336 void FakeRotatingCamera::close() {
337 closeImpl(true);
338 }
339
initOpenGL()340 abc3d::EglCurrentContext FakeRotatingCamera::initOpenGL() {
341 if (mGlProgram.ok()) {
342 return mEglContext.getCurrentContext();
343 }
344
345 abc3d::EglContext context;
346 abc3d::EglCurrentContext currentContext = context.init();
347 if (!currentContext.ok()) {
348 return abc3d::EglCurrentContext();
349 }
350
351 abc3d::AutoTexture testPatternTexture = loadTestPatternTexture();
352 if (!testPatternTexture.ok()) {
353 return abc3d::EglCurrentContext();
354 }
355
356 const char kVertexShaderStr[] = R"CODE(
357 attribute vec4 a_position;
358 attribute vec2 a_texCoord;
359 uniform mat4 u_pvmMatrix;
360 varying vec2 v_texCoord;
361 void main() {
362 gl_Position = u_pvmMatrix * a_position;
363 v_texCoord = a_texCoord;
364 }
365 )CODE";
366 abc3d::AutoShader vertexShader;
367 if (!vertexShader.compile(GL_VERTEX_SHADER, kVertexShaderStr)) {
368 return abc3d::EglCurrentContext();
369 }
370
371 const char kFragmentShaderStr[] = R"CODE(
372 precision mediump float;
373 varying vec2 v_texCoord;
374 uniform sampler2D u_texture;
375 void main() {
376 gl_FragColor = texture2D(u_texture, v_texCoord);
377 }
378 )CODE";
379 abc3d::AutoShader fragmentShader;
380 if (!fragmentShader.compile(GL_FRAGMENT_SHADER, kFragmentShaderStr)) {
381 return abc3d::EglCurrentContext();
382 }
383
384 abc3d::AutoProgram program;
385 if (!program.link(vertexShader.get(), fragmentShader.get())) {
386 return abc3d::EglCurrentContext();
387 }
388
389 const GLint programAttrPositionLoc = program.getAttribLocation("a_position");
390 if (programAttrPositionLoc < 0) {
391 return abc3d::EglCurrentContext();
392 }
393 const GLint programAttrTexCoordLoc = program.getAttribLocation("a_texCoord");
394 if (programAttrTexCoordLoc < 0) {
395 return abc3d::EglCurrentContext();
396 }
397 const GLint programUniformTextureLoc = program.getUniformLocation("u_texture");
398 if (programUniformTextureLoc < 0) {
399 return abc3d::EglCurrentContext();
400 }
401 const GLint programUniformPvmMatrixLoc = program.getUniformLocation("u_pvmMatrix");
402 if (programUniformPvmMatrixLoc < 0) {
403 return abc3d::EglCurrentContext();
404 }
405
406 mEglContext = std::move(context);
407 mGlTestPatternTexture = std::move(testPatternTexture);
408 mGlProgramAttrPositionLoc = programAttrPositionLoc;
409 mGlProgramAttrTexCoordLoc = programAttrTexCoordLoc;
410 mGlProgramUniformTextureLoc = programUniformTextureLoc;
411 mGlProgramUniformPvmMatrixLoc = programUniformPvmMatrixLoc;
412 mGlProgram = std::move(program);
413
414 return std::move(currentContext);
415 }
416
closeImpl(const bool everything)417 void FakeRotatingCamera::closeImpl(const bool everything) {
418 {
419 const abc3d::EglCurrentContext currentContext = mEglContext.getCurrentContext();
420 LOG_ALWAYS_FATAL_IF(!mStreamInfoCache.empty() && !currentContext.ok());
421 mStreamInfoCache.clear();
422
423 if (everything) {
424 mGlProgram.clear();
425 mGlTestPatternTexture.clear();
426 }
427 }
428
429 if (everything) {
430 mEglContext.clear();
431 mQemuChannel.reset();
432 }
433 }
434
435 std::tuple<int64_t, int64_t, CameraMetadata,
436 std::vector<StreamBuffer>, std::vector<DelayedStreamBuffer>>
processCaptureRequest(CameraMetadata metadataUpdate,Span<CachedStreamBuffer * > csbs)437 FakeRotatingCamera::processCaptureRequest(CameraMetadata metadataUpdate,
438 Span<CachedStreamBuffer*> csbs) {
439 CameraMetadata resultMetadata = metadataUpdate.metadata.empty() ?
440 updateCaptureResultMetadata() :
441 applyMetadata(std::move(metadataUpdate));
442
443 const size_t csbsSize = csbs.size();
444 std::vector<StreamBuffer> outputBuffers;
445 std::vector<DelayedStreamBuffer> delayedOutputBuffers;
446 outputBuffers.reserve(csbsSize);
447
448 const abc3d::EglCurrentContext currentContext = mEglContext.getCurrentContext();
449 if (!currentContext.ok()) {
450 goto fail;
451 }
452
453 RenderParams renderParams;
454 {
455 SensorValues sensorValues;
456 if (readSensors(&sensorValues)) {
457 static_assert(sizeof(renderParams.cameraParams.rotXYZ3) ==
458 sizeof(sensorValues.rotation));
459
460 memcpy(renderParams.cameraParams.rotXYZ3, sensorValues.rotation,
461 sizeof(sensorValues.rotation));
462 } else {
463 goto fail;
464 }
465
466 constexpr double kR = 5.0;
467
468 float* pos3 = renderParams.cameraParams.pos3;
469 pos3[0] = -kR * sin(sensorValues.rotation[0]) * sin(sensorValues.rotation[1]);
470 pos3[1] = -kR * sin(sensorValues.rotation[0]) * cos(sensorValues.rotation[1]);
471 pos3[2] = kR * cos(sensorValues.rotation[0]);
472 }
473
474 for (size_t i = 0; i < csbsSize; ++i) {
475 CachedStreamBuffer* csb = csbs[i];
476 LOG_ALWAYS_FATAL_IF(!csb); // otherwise mNumBuffersInFlight will be hard
477
478 const StreamInfo* si = csb->getStreamInfo<StreamInfo>();
479 if (!si) {
480 const auto sii = mStreamInfoCache.find(csb->getStreamId());
481 if (sii == mStreamInfoCache.end()) {
482 ALOGE("%s:%s:%d could not find stream=%d in the cache",
483 kClass, __func__, __LINE__, csb->getStreamId());
484 } else {
485 si = &sii->second;
486 csb->setStreamInfo(si);
487 }
488 }
489
490 if (si) {
491 captureFrame(*si, renderParams, csb, &outputBuffers, &delayedOutputBuffers);
492 } else {
493 outputBuffers.push_back(csb->finish(false));
494 }
495 }
496
497 return make_tuple(mFrameDurationNs, kDefaultSensorExposureTimeNs,
498 std::move(resultMetadata), std::move(outputBuffers),
499 std::move(delayedOutputBuffers));
500
501 fail:
502 for (size_t i = 0; i < csbsSize; ++i) {
503 CachedStreamBuffer* csb = csbs[i];
504 LOG_ALWAYS_FATAL_IF(!csb); // otherwise mNumBuffersInFlight will be hard
505 outputBuffers.push_back(csb->finish(false));
506 }
507
508 return make_tuple(FAILURE(-1), 0,
509 std::move(resultMetadata), std::move(outputBuffers),
510 std::move(delayedOutputBuffers));
511 }
512
captureFrame(const StreamInfo & si,const RenderParams & renderParams,CachedStreamBuffer * csb,std::vector<StreamBuffer> * outputBuffers,std::vector<DelayedStreamBuffer> * delayedOutputBuffers) const513 void FakeRotatingCamera::captureFrame(const StreamInfo& si,
514 const RenderParams& renderParams,
515 CachedStreamBuffer* csb,
516 std::vector<StreamBuffer>* outputBuffers,
517 std::vector<DelayedStreamBuffer>* delayedOutputBuffers) const {
518 switch (si.pixelFormat) {
519 case PixelFormat::RGBA_8888:
520 outputBuffers->push_back(csb->finish(captureFrameRGBA(si, renderParams, csb)));
521 break;
522
523 case PixelFormat::YCBCR_420_888:
524 outputBuffers->push_back(csb->finish(captureFrameYUV(si, renderParams, csb)));
525 break;
526
527 case PixelFormat::BLOB:
528 delayedOutputBuffers->push_back(captureFrameJpeg(si, renderParams, csb));
529 break;
530
531 default:
532 ALOGE("%s:%s:%d: unexpected pixelFormat=%" PRIx32,
533 kClass, __func__, __LINE__, static_cast<uint32_t>(si.pixelFormat));
534 outputBuffers->push_back(csb->finish(false));
535 break;
536 }
537 }
538
captureFrameRGBA(const StreamInfo & si,const RenderParams & renderParams,CachedStreamBuffer * csb) const539 bool FakeRotatingCamera::captureFrameRGBA(const StreamInfo& si,
540 const RenderParams& renderParams,
541 CachedStreamBuffer* csb) const {
542 if (!csb->waitAcquireFence(mFrameDurationNs / 2000000)) {
543 return FAILURE(false);
544 }
545
546 return renderIntoRGBA(si, renderParams, csb->getBuffer());
547 }
548
captureFrameYUV(const StreamInfo & si,const RenderParams & renderParams,CachedStreamBuffer * csb) const549 bool FakeRotatingCamera::captureFrameYUV(const StreamInfo& si,
550 const RenderParams& renderParams,
551 CachedStreamBuffer* csb) const {
552 LOG_ALWAYS_FATAL_IF(!si.rgbaBuffer);
553 if (!renderIntoRGBA(si, renderParams, si.rgbaBuffer.get())) {
554 return false;
555 }
556
557 if (!csb->waitAcquireFence(mFrameDurationNs / 2000000)) {
558 return false;
559 }
560
561 void* rgba = nullptr;
562 if (GraphicBufferMapper::get().lock(
563 si.rgbaBuffer.get(), static_cast<uint32_t>(BufferUsage::CPU_READ_OFTEN),
564 {si.size.width, si.size.height}, &rgba) != NO_ERROR) {
565 return FAILURE(false);
566 }
567
568 android_ycbcr ycbcr;
569 if (GraphicBufferMapper::get().lockYCbCr(
570 csb->getBuffer(), static_cast<uint32_t>(BufferUsage::CPU_WRITE_OFTEN),
571 {si.size.width, si.size.height}, &ycbcr) != NO_ERROR) {
572 LOG_ALWAYS_FATAL_IF(GraphicBufferMapper::get().unlock(si.rgbaBuffer.get()) != NO_ERROR);
573 return FAILURE(false);
574 }
575
576 const bool converted = conv::rgba2yuv(si.size.width, si.size.height,
577 static_cast<const uint32_t*>(rgba),
578 ycbcr);
579
580 LOG_ALWAYS_FATAL_IF(GraphicBufferMapper::get().unlock(csb->getBuffer()) != NO_ERROR);
581 LOG_ALWAYS_FATAL_IF(GraphicBufferMapper::get().unlock(si.rgbaBuffer.get()) != NO_ERROR);
582
583 return converted;
584 }
585
captureFrameJpeg(const StreamInfo & si,const RenderParams & renderParams,CachedStreamBuffer * csb) const586 DelayedStreamBuffer FakeRotatingCamera::captureFrameJpeg(const StreamInfo& si,
587 const RenderParams& renderParams,
588 CachedStreamBuffer* csb) const {
589 std::vector<uint8_t> nv21data = captureFrameForCompressing(si, renderParams);
590
591 const Rect<uint16_t> imageSize = si.size;
592 const uint32_t jpegBufferSize = si.blobBufferSize;
593 const int64_t frameDurationNs = mFrameDurationNs;
594 CameraMetadata metadata = mCaptureResultMetadata;
595
596 return [csb, imageSize, nv21data = std::move(nv21data), metadata = std::move(metadata),
597 jpegBufferSize, frameDurationNs](const bool ok) -> StreamBuffer {
598 StreamBuffer sb;
599 if (ok && !nv21data.empty() && csb->waitAcquireFence(frameDurationNs / 1000000)) {
600 sb = csb->finish(compressNV21IntoJpeg(imageSize, nv21data.data(), metadata,
601 csb->getBuffer(), jpegBufferSize));
602 } else {
603 sb = csb->finish(false);
604 }
605
606 return sb;
607 };
608 }
609
610 std::vector<uint8_t>
captureFrameForCompressing(const StreamInfo & si,const RenderParams & renderParams) const611 FakeRotatingCamera::captureFrameForCompressing(const StreamInfo& si,
612 const RenderParams& renderParams) const {
613 if (!renderIntoRGBA(si, renderParams, si.rgbaBuffer.get())) {
614 return {};
615 }
616
617 void* rgba = nullptr;
618 if (GraphicBufferMapper::get().lock(
619 si.rgbaBuffer.get(), static_cast<uint32_t>(BufferUsage::CPU_READ_OFTEN),
620 {si.size.width, si.size.height}, &rgba) != NO_ERROR) {
621 return {};
622 }
623
624 std::vector<uint8_t> nv21data(yuv::NV21size(si.size.width, si.size.height));
625 const android_ycbcr ycbcr = yuv::NV21init(si.size.width, si.size.height,
626 nv21data.data());
627
628 const bool converted = conv::rgba2yuv(si.size.width, si.size.height,
629 static_cast<const uint32_t*>(rgba),
630 ycbcr);
631
632 LOG_ALWAYS_FATAL_IF(GraphicBufferMapper::get().unlock(si.rgbaBuffer.get()) != NO_ERROR);
633
634 if (converted) {
635 return nv21data;
636 } else {
637 return {};
638 }
639 }
640
drawScene(const Rect<uint16_t> imageSize,const RenderParams & renderParams,const bool isHardwareBuffer) const641 bool FakeRotatingCamera::drawScene(const Rect<uint16_t> imageSize,
642 const RenderParams& renderParams,
643 const bool isHardwareBuffer) const {
644 float pvMatrix44[16];
645 {
646 float projectionMatrix44[16];
647 float viewMatrix44[16];
648
649 // This matrix takes into account specific behaviors below:
650 // * The Y axis if rendering int0 AHardwareBuffer goes down while it
651 // goes up everywhere else (e.g. when rendering to `EGLSurface`).
652 // * We set `sensorOrientation=90` because a lot of places in Android and
653 // 3Ps assume this and don't work properly with `sensorOrientation=0`.
654 const float workaroundMatrix44[16] = {
655 0, (isHardwareBuffer ? -1.0f : 1.0f), 0, 0,
656 -1, 0, 0, 0,
657 0, 0, 1, 0,
658 0, 0, 0, 1,
659 };
660
661 {
662 constexpr double kNear = 1.0;
663 constexpr double kFar = 10.0;
664
665 // We use `height` to calculate `right` because the image is 90degrees
666 // rotated (sensorOrientation=90).
667 const double right = kNear * (.5 * getSensorSize().height / getSensorDPI() / getDefaultFocalLength());
668 const double top = right / imageSize.width * imageSize.height;
669 abc3d::frustum(pvMatrix44, -right, right, -top, top,
670 kNear, kFar);
671 }
672
673 abc3d::mulM44(projectionMatrix44, pvMatrix44, workaroundMatrix44);
674
675 {
676 const auto& cam = renderParams.cameraParams;
677 abc3d::lookAtXyzRot(viewMatrix44, cam.pos3, cam.rotXYZ3);
678 }
679
680 abc3d::mulM44(pvMatrix44, projectionMatrix44, viewMatrix44);
681 }
682
683 glViewport(0, 0, imageSize.width, imageSize.height);
684 const bool result = drawSceneImpl(pvMatrix44);
685 glFinish();
686 return result;
687 }
688
drawSceneImpl(const float pvMatrix44[]) const689 bool FakeRotatingCamera::drawSceneImpl(const float pvMatrix44[]) const {
690 constexpr float kX = 0;
691 constexpr float kY = 0;
692 constexpr float kZ = 0;
693 constexpr float kS = 1;
694
695 const GLfloat vVertices[] = {
696 -kS + kX, kY, kZ - kS, // Position 0
697 0, 0, // TexCoord 0
698 -kS + kX, kY, kZ + kS, // Position 1
699 0, 1, // TexCoord 1
700 kS + kX, kY, kZ + kS, // Position 2
701 1, 1, // TexCoord 2
702 kS + kX, kY, kZ - kS, // Position 3
703 1, 0 // TexCoord 3
704 };
705
706 static const GLushort indices[] = { 0, 1, 2, 0, 2, 3 };
707
708 glClearColor(0.2, 0.3, 0.2, 1.0);
709 glClear(GL_COLOR_BUFFER_BIT);
710
711 glUseProgram(mGlProgram.get());
712 glVertexAttribPointer(mGlProgramAttrPositionLoc, 3, GL_FLOAT, GL_FALSE,
713 5 * sizeof(GLfloat), &vVertices[0]);
714 glEnableVertexAttribArray(mGlProgramAttrPositionLoc);
715 glVertexAttribPointer(mGlProgramAttrTexCoordLoc, 2, GL_FLOAT, GL_FALSE,
716 5 * sizeof(GLfloat), &vVertices[3]);
717 glEnableVertexAttribArray(mGlProgramAttrTexCoordLoc);
718 glUniformMatrix4fv(mGlProgramUniformPvmMatrixLoc, 1, true, pvMatrix44);
719
720 glActiveTexture(GL_TEXTURE0);
721 glBindTexture(GL_TEXTURE_2D, mGlTestPatternTexture.get());
722 glUniform1i(mGlProgramUniformTextureLoc, 0);
723
724 glDrawElements(GL_TRIANGLES, 6, GL_UNSIGNED_SHORT, indices);
725
726 return true;
727 }
728
renderIntoRGBA(const StreamInfo & si,const RenderParams & renderParams,const native_handle_t * rgbaBuffer) const729 bool FakeRotatingCamera::renderIntoRGBA(const StreamInfo& si,
730 const RenderParams& renderParams,
731 const native_handle_t* rgbaBuffer) const {
732 const auto gb = sp<GraphicBuffer>::make(
733 rgbaBuffer, GraphicBuffer::WRAP_HANDLE, si.size.width,
734 si.size.height, static_cast<int>(si.pixelFormat), 1,
735 static_cast<uint64_t>(si.usage), si.stride);
736
737 const EGLClientBuffer clientBuf =
738 eglGetNativeClientBufferANDROID(gb->toAHardwareBuffer());
739 if (!clientBuf) {
740 return FAILURE(false);
741 }
742
743 const abc3d::AutoImageKHR eglImage(mEglContext.getDisplay(), clientBuf);
744 if (!eglImage.ok()) {
745 return false;
746 }
747
748 abc3d::AutoTexture fboTex(GL_TEXTURE_2D);
749 glEGLImageTargetTexture2DOES(GL_TEXTURE_2D, eglImage.get());
750
751 abc3d::AutoFrameBuffer fbo;
752 glFramebufferTexture2D(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0,
753 GL_TEXTURE_2D, fboTex.get(), 0);
754
755 // drawing into EGLClientBuffer is Y-flipped on Android
756 return drawScene(si.size, renderParams, true);
757 }
758
readSensors(SensorValues * vals)759 bool FakeRotatingCamera::readSensors(SensorValues* vals) {
760 static const char kReadCommand[] = "get";
761
762 uint32_t len = sizeof(kReadCommand);
763 if (qemu_pipe_write_fully(mQemuChannel.get(), &len, sizeof(len))) {
764 return FAILURE(false);
765 }
766 if (qemu_pipe_write_fully(mQemuChannel.get(), &kReadCommand[0], sizeof(kReadCommand))) {
767 return FAILURE(false);
768 }
769 if (qemu_pipe_read_fully(mQemuChannel.get(), &len, sizeof(len))) {
770 return FAILURE(false);
771 }
772 if (len != sizeof(*vals)) {
773 return FAILURE(false);
774 }
775 if (qemu_pipe_read_fully(mQemuChannel.get(), vals, len)) {
776 return FAILURE(false);
777 }
778
779 return true;
780 }
781
applyMetadata(const CameraMetadata & metadata)782 CameraMetadata FakeRotatingCamera::applyMetadata(const CameraMetadata& metadata) {
783 const camera_metadata_t* const raw =
784 reinterpret_cast<const camera_metadata_t*>(metadata.metadata.data());
785
786 mFrameDurationNs = getFrameDuration(raw, kDefaultFrameDurationNs,
787 kMinFrameDurationNs, kMaxFrameDurationNs);
788
789 camera_metadata_ro_entry_t entry;
790 const camera_metadata_enum_android_control_af_mode_t afMode =
791 find_camera_metadata_ro_entry(raw, ANDROID_CONTROL_AF_MODE, &entry) ?
792 ANDROID_CONTROL_AF_MODE_OFF :
793 static_cast<camera_metadata_enum_android_control_af_mode_t>(entry.data.u8[0]);
794
795 const camera_metadata_enum_android_control_af_trigger_t afTrigger =
796 find_camera_metadata_ro_entry(raw, ANDROID_CONTROL_AF_TRIGGER, &entry) ?
797 ANDROID_CONTROL_AF_TRIGGER_IDLE :
798 static_cast<camera_metadata_enum_android_control_af_trigger_t>(entry.data.u8[0]);
799
800 const auto af = mAFStateMachine(afMode, afTrigger);
801
802 CameraMetadataMap m = parseCameraMetadataMap(metadata);
803
804 m[ANDROID_CONTROL_AE_STATE] = uint8_t(ANDROID_CONTROL_AE_STATE_CONVERGED);
805 m[ANDROID_CONTROL_AF_STATE] = uint8_t(af.first);
806 m[ANDROID_CONTROL_AWB_STATE] = uint8_t(ANDROID_CONTROL_AWB_STATE_CONVERGED);
807 m[ANDROID_FLASH_STATE] = uint8_t(ANDROID_FLASH_STATE_UNAVAILABLE);
808 m[ANDROID_LENS_APERTURE] = getDefaultAperture();
809 m[ANDROID_LENS_FOCUS_DISTANCE] = af.second;
810 m[ANDROID_LENS_STATE] = uint8_t(getAfLensState(af.first));
811 m[ANDROID_REQUEST_PIPELINE_DEPTH] = uint8_t(4);
812 m[ANDROID_SENSOR_FRAME_DURATION] = mFrameDurationNs;
813 m[ANDROID_SENSOR_EXPOSURE_TIME] = kDefaultSensorExposureTimeNs;
814 m[ANDROID_SENSOR_SENSITIVITY] = getDefaultSensorSensitivity();
815 m[ANDROID_SENSOR_TIMESTAMP] = int64_t(0);
816 m[ANDROID_SENSOR_ROLLING_SHUTTER_SKEW] = kMinSensorExposureTimeNs;
817 m[ANDROID_STATISTICS_SCENE_FLICKER] = uint8_t(ANDROID_STATISTICS_SCENE_FLICKER_NONE);
818
819 std::optional<CameraMetadata> maybeSerialized =
820 serializeCameraMetadataMap(m);
821
822 if (maybeSerialized) {
823 mCaptureResultMetadata = std::move(maybeSerialized.value());
824 }
825
826 { // reset ANDROID_CONTROL_AF_TRIGGER to IDLE
827 camera_metadata_t* const raw =
828 reinterpret_cast<camera_metadata_t*>(mCaptureResultMetadata.metadata.data());
829
830 camera_metadata_ro_entry_t entry;
831 const auto newTriggerValue = ANDROID_CONTROL_AF_TRIGGER_IDLE;
832
833 if (find_camera_metadata_ro_entry(raw, ANDROID_CONTROL_AF_TRIGGER, &entry)) {
834 return mCaptureResultMetadata;
835 } else if (entry.data.i32[0] == newTriggerValue) {
836 return mCaptureResultMetadata;
837 } else {
838 CameraMetadata result = mCaptureResultMetadata;
839
840 if (update_camera_metadata_entry(raw, entry.index, &newTriggerValue, 1, nullptr)) {
841 ALOGW("%s:%s:%d: update_camera_metadata_entry(ANDROID_CONTROL_AF_TRIGGER) "
842 "failed", kClass, __func__, __LINE__);
843 }
844
845 return result;
846 }
847 }
848 }
849
updateCaptureResultMetadata()850 CameraMetadata FakeRotatingCamera::updateCaptureResultMetadata() {
851 camera_metadata_t* const raw =
852 reinterpret_cast<camera_metadata_t*>(mCaptureResultMetadata.metadata.data());
853
854 const auto af = mAFStateMachine();
855
856 camera_metadata_ro_entry_t entry;
857
858 if (find_camera_metadata_ro_entry(raw, ANDROID_CONTROL_AF_STATE, &entry)) {
859 ALOGW("%s:%s:%d: find_camera_metadata_ro_entry(ANDROID_CONTROL_AF_STATE) failed",
860 kClass, __func__, __LINE__);
861 } else if (update_camera_metadata_entry(raw, entry.index, &af.first, 1, nullptr)) {
862 ALOGW("%s:%s:%d: update_camera_metadata_entry(ANDROID_CONTROL_AF_STATE) failed",
863 kClass, __func__, __LINE__);
864 }
865
866 if (find_camera_metadata_ro_entry(raw, ANDROID_LENS_FOCUS_DISTANCE, &entry)) {
867 ALOGW("%s:%s:%d: find_camera_metadata_ro_entry(ANDROID_LENS_FOCUS_DISTANCE) failed",
868 kClass, __func__, __LINE__);
869 } else if (update_camera_metadata_entry(raw, entry.index, &af.second, 1, nullptr)) {
870 ALOGW("%s:%s:%d: update_camera_metadata_entry(ANDROID_LENS_FOCUS_DISTANCE) failed",
871 kClass, __func__, __LINE__);
872 }
873
874 return metadataCompact(mCaptureResultMetadata);
875 }
876
877 ////////////////////////////////////////////////////////////////////////////////
878
getTargetFpsRanges() const879 Span<const std::pair<int32_t, int32_t>> FakeRotatingCamera::getTargetFpsRanges() const {
880 // ordered to satisfy testPreviewFpsRangeByCamera
881 static const std::pair<int32_t, int32_t> targetFpsRanges[] = {
882 {kMinFPS, kMedFPS},
883 {kMedFPS, kMedFPS},
884 {kMinFPS, kMaxFPS},
885 {kMaxFPS, kMaxFPS},
886 };
887
888 return targetFpsRanges;
889 }
890
getAvailableThumbnailSizes() const891 Span<const Rect<uint16_t>> FakeRotatingCamera::getAvailableThumbnailSizes() const {
892 static const Rect<uint16_t> availableThumbnailSizes[] = {
893 {0, 0},
894 {11 * 4, 9 * 4},
895 {16 * 4, 9 * 4},
896 {4 * 16, 3 * 16},
897 };
898
899 return availableThumbnailSizes;
900 }
901
isBackFacing() const902 bool FakeRotatingCamera::isBackFacing() const {
903 return mIsBackFacing;
904 }
905
getAvailableFocalLength() const906 Span<const float> FakeRotatingCamera::getAvailableFocalLength() const {
907 static const float availableFocalLengths[] = {
908 kDefaultFocalLength
909 };
910
911 return availableFocalLengths;
912 }
913
getMaxNumOutputStreams() const914 std::tuple<int32_t, int32_t, int32_t> FakeRotatingCamera::getMaxNumOutputStreams() const {
915 return {
916 0, // raw
917 2, // processed
918 1, // jpeg
919 };
920 }
921
getSupportedPixelFormats() const922 Span<const PixelFormat> FakeRotatingCamera::getSupportedPixelFormats() const {
923 static const PixelFormat supportedPixelFormats[] = {
924 PixelFormat::IMPLEMENTATION_DEFINED,
925 PixelFormat::YCBCR_420_888,
926 PixelFormat::RGBA_8888,
927 PixelFormat::BLOB,
928 };
929
930 return {supportedPixelFormats};
931 }
932
getMinFrameDurationNs() const933 int64_t FakeRotatingCamera::getMinFrameDurationNs() const {
934 return kMinFrameDurationNs;
935 }
936
getSensorSize() const937 Rect<uint16_t> FakeRotatingCamera::getSensorSize() const {
938 return {1920, 1080};
939 }
940
getSensorColorFilterArrangement() const941 uint8_t FakeRotatingCamera::getSensorColorFilterArrangement() const {
942 return ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_RGB;
943 }
944
getSensorExposureTimeRange() const945 std::pair<int64_t, int64_t> FakeRotatingCamera::getSensorExposureTimeRange() const {
946 return {kMinSensorExposureTimeNs, kMaxSensorExposureTimeNs};
947 }
948
getSensorMaxFrameDuration() const949 int64_t FakeRotatingCamera::getSensorMaxFrameDuration() const {
950 return kMaxFrameDurationNs;
951 }
952
getSupportedResolutions() const953 Span<const Rect<uint16_t>> FakeRotatingCamera::getSupportedResolutions() const {
954 static const Rect<uint16_t> supportedResolutions[] = {
955 {176, 144},
956 {320, 240},
957 {640, 480},
958 {1024, 576},
959 {1280, 720},
960 {1600, 900},
961 {1920, 1080},
962 };
963
964 return supportedResolutions;
965 }
966
getDefaultTargetFpsRange(const RequestTemplate tpl) const967 std::pair<int32_t, int32_t> FakeRotatingCamera::getDefaultTargetFpsRange(const RequestTemplate tpl) const {
968 switch (tpl) {
969 case RequestTemplate::PREVIEW:
970 case RequestTemplate::VIDEO_RECORD:
971 case RequestTemplate::VIDEO_SNAPSHOT:
972 return {kMaxFPS, kMaxFPS};
973
974 default:
975 return {kMinFPS, kMaxFPS};
976 }
977 }
978
getDefaultSensorExpTime() const979 int64_t FakeRotatingCamera::getDefaultSensorExpTime() const {
980 return kDefaultSensorExposureTimeNs;
981 }
982
getDefaultSensorFrameDuration() const983 int64_t FakeRotatingCamera::getDefaultSensorFrameDuration() const {
984 return kMinFrameDurationNs;
985 }
986
getDefaultFocalLength() const987 float FakeRotatingCamera::getDefaultFocalLength() const {
988 return kDefaultFocalLength;
989 }
990
991 } // namespace hw
992 } // namespace implementation
993 } // namespace provider
994 } // namespace camera
995 } // namespace hardware
996 } // namespace android
997