1 /*
2 * Copyright (C) 2012 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17 //#define LOG_NDEBUG 0
18 //#define LOG_NNDEBUG 0
19 #include "system/graphics-base-v1.1.h"
20 #define LOG_TAG "EmulatedSensor"
21 #define ATRACE_TAG ATRACE_TAG_CAMERA
22
23 #ifdef LOG_NNDEBUG
24 #define ALOGVV(...) ALOGV(__VA_ARGS__)
25 #else
26 #define ALOGVV(...) ((void)0)
27 #endif
28
29 #include <android/hardware/graphics/common/1.2/types.h>
30 #include <cutils/properties.h>
31 #include <inttypes.h>
32 #include <libyuv.h>
33 #include <memory.h>
34 #include <system/camera_metadata.h>
35 #include <utils/Log.h>
36 #include <utils/Trace.h>
37
38 #include <cmath>
39 #include <cstdlib>
40
41 #include "EmulatedSensor.h"
42 #include "utils/ExifUtils.h"
43 #include "utils/HWLUtils.h"
44
45 namespace android {
46
47 using android::google_camera_hal::ErrorCode;
48 using google_camera_hal::HalCameraMetadata;
49 using google_camera_hal::MessageType;
50 using google_camera_hal::NotifyMessage;
51
52 using android::hardware::graphics::common::V1_2::Dataspace;
53
54 // Copied from ColorSpace.java (see Named)
55 enum ColorSpaceNamed {
56 SRGB,
57 LINEAR_SRGB,
58 EXTENDED_SRGB,
59 LINEAR_EXTENDED_SRGB,
60 BT709,
61 BT2020,
62 DCI_P3,
63 DISPLAY_P3,
64 NTSC_1953,
65 SMPTE_C,
66 ADOBE_RGB,
67 PRO_PHOTO_RGB,
68 ACES,
69 ACESCG,
70 CIE_XYZ,
71 CIE_LAB
72 };
73
74 const uint32_t EmulatedSensor::kRegularSceneHandshake = 1; // Scene handshake divider
75 const uint32_t EmulatedSensor::kReducedSceneHandshake = 2; // Scene handshake divider
76
77 // 1 us - 30 sec
78 const nsecs_t EmulatedSensor::kSupportedExposureTimeRange[2] = {1000LL,
79 30000000000LL};
80
81 // ~1/30 s - 30 sec
82 const nsecs_t EmulatedSensor::kSupportedFrameDurationRange[2] = {33331760LL,
83 30000000000LL};
84
85 const int32_t EmulatedSensor::kSupportedSensitivityRange[2] = {100, 1600};
86 const int32_t EmulatedSensor::kDefaultSensitivity = 100; // ISO
87 const nsecs_t EmulatedSensor::kDefaultExposureTime = ms2ns(15);
88 const nsecs_t EmulatedSensor::kDefaultFrameDuration = ms2ns(33);
89 // Deadline within we should return the results as soon as possible to
90 // avoid skewing the frame cycle due to external delays.
91 const nsecs_t EmulatedSensor::kReturnResultThreshod = 3 * kDefaultFrameDuration;
92
93 // Sensor defaults
94 const uint8_t EmulatedSensor::kSupportedColorFilterArrangement =
95 ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_RGGB;
96 const uint32_t EmulatedSensor::kDefaultMaxRawValue = 4000;
97 const uint32_t EmulatedSensor::kDefaultBlackLevelPattern[4] = {1000, 1000, 1000,
98 1000};
99
100 const nsecs_t EmulatedSensor::kMinVerticalBlank = 10000L;
101
102 // Sensor sensitivity
103 const float EmulatedSensor::kSaturationVoltage = 0.520f;
104 const uint32_t EmulatedSensor::kSaturationElectrons = 2000;
105 const float EmulatedSensor::kVoltsPerLuxSecond = 0.100f;
106
107 const float EmulatedSensor::kElectronsPerLuxSecond =
108 EmulatedSensor::kSaturationElectrons / EmulatedSensor::kSaturationVoltage *
109 EmulatedSensor::kVoltsPerLuxSecond;
110
111 const float EmulatedSensor::kReadNoiseStddevBeforeGain = 1.177; // in electrons
112 const float EmulatedSensor::kReadNoiseStddevAfterGain =
113 2.100; // in digital counts
114 const float EmulatedSensor::kReadNoiseVarBeforeGain =
115 EmulatedSensor::kReadNoiseStddevBeforeGain *
116 EmulatedSensor::kReadNoiseStddevBeforeGain;
117 const float EmulatedSensor::kReadNoiseVarAfterGain =
118 EmulatedSensor::kReadNoiseStddevAfterGain *
119 EmulatedSensor::kReadNoiseStddevAfterGain;
120
121 const uint32_t EmulatedSensor::kMaxRAWStreams = 1;
122 const uint32_t EmulatedSensor::kMaxProcessedStreams = 3;
123 const uint32_t EmulatedSensor::kMaxStallingStreams = 2;
124 const uint32_t EmulatedSensor::kMaxInputStreams = 1;
125
126 const uint32_t EmulatedSensor::kMaxLensShadingMapSize[2]{64, 64};
127 const int32_t EmulatedSensor::kFixedBitPrecision = 64; // 6-bit
128 // In fixed-point math, saturation point of sensor after gain
129 const int32_t EmulatedSensor::kSaturationPoint = kFixedBitPrecision * 255;
130 const camera_metadata_rational EmulatedSensor::kNeutralColorPoint[3] = {
131 {255, 1}, {255, 1}, {255, 1}};
132 const float EmulatedSensor::kGreenSplit = 1.f; // No divergence
133 // Reduce memory usage by allowing only one buffer in sensor, one in jpeg
134 // compressor and one pending request to avoid stalls.
135 const uint8_t EmulatedSensor::kPipelineDepth = 3;
136
137 const camera_metadata_rational EmulatedSensor::kDefaultColorTransform[9] = {
138 {1, 1}, {0, 1}, {0, 1}, {0, 1}, {1, 1}, {0, 1}, {0, 1}, {0, 1}, {1, 1}};
139 const float EmulatedSensor::kDefaultColorCorrectionGains[4] = {1.0f, 1.0f, 1.0f,
140 1.0f};
141
142 const float EmulatedSensor::kDefaultToneMapCurveRed[4] = {.0f, .0f, 1.f, 1.f};
143 const float EmulatedSensor::kDefaultToneMapCurveGreen[4] = {.0f, .0f, 1.f, 1.f};
144 const float EmulatedSensor::kDefaultToneMapCurveBlue[4] = {.0f, .0f, 1.f, 1.f};
145
146 // All XY matrix coefficients sourced from
147 // https://developer.android.com/reference/kotlin/android/graphics/ColorSpace.Named
148 // and XYZ coefficients calculated using the method found in
149 // ColorSpace.Rgb.computeXyzMatrix
150 struct XyzMatrix {
151 float xR = 3.2406f;
152 float yR = -1.5372f;
153 float zR = -0.4986f;
154 float xG = -0.9689f;
155 float yG = 1.8758f;
156 float zG = 0.0415f;
157 float xB = 0.0557f;
158 float yB = -0.2040f;
159 float zB = 1.0570f;
160 };
161
162 static const XyzMatrix kSrgbXyzMatrix = {3.2406f, -1.5372f, -0.4986f,
163 -0.9689f, 1.8758f, 0.0415f,
164 0.0557f, -0.2040f, 1.0570f};
165
166 static const XyzMatrix kDisplayP3Matrix = {2.4931f, -0.9316f, -0.4023f,
167 -0.8291f, 1.7627f, 0.0234f,
168 0.0361f, -0.0761f, 0.9570f};
169
170 static const XyzMatrix kBt709Matrix = {3.2410f, -1.5374f, -0.4986f,
171 -0.9692f, 1.8760f, 0.0416f,
172 0.0556f, -0.2040f, 1.0570f};
173
174 static const XyzMatrix kBt2020Matrix = {1.7167f, -0.3556f, -0.2534f,
175 -0.6666f, 1.6164f, 0.0158f,
176 0.0177f, -0.0428f, 0.9421f};
177
178 /** A few utility functions for math, normal distributions */
179
180 // Take advantage of IEEE floating-point format to calculate an approximate
181 // square root. Accurate to within +-3.6%
sqrtf_approx(float r)182 float sqrtf_approx(float r) {
183 // Modifier is based on IEEE floating-point representation; the
184 // manipulations boil down to finding approximate log2, dividing by two, and
185 // then inverting the log2. A bias is added to make the relative error
186 // symmetric about the real answer.
187 const int32_t modifier = 0x1FBB4000;
188
189 int32_t r_i = *(int32_t*)(&r);
190 r_i = (r_i >> 1) + modifier;
191
192 return *(float*)(&r_i);
193 }
194
EmulatedSensor()195 EmulatedSensor::EmulatedSensor() : Thread(false), got_vsync_(false) {
196 gamma_table_sRGB_.resize(kSaturationPoint + 1);
197 gamma_table_smpte170m_.resize(kSaturationPoint + 1);
198 gamma_table_hlg_.resize(kSaturationPoint + 1);
199 for (int32_t i = 0; i <= kSaturationPoint; i++) {
200 gamma_table_sRGB_[i] = ApplysRGBGamma(i, kSaturationPoint);
201 gamma_table_smpte170m_[i] = ApplySMPTE170MGamma(i, kSaturationPoint);
202 gamma_table_hlg_[i] = ApplyHLGGamma(i, kSaturationPoint);
203 }
204 }
205
~EmulatedSensor()206 EmulatedSensor::~EmulatedSensor() {
207 ShutDown();
208 }
209
AreCharacteristicsSupported(const SensorCharacteristics & characteristics)210 bool EmulatedSensor::AreCharacteristicsSupported(
211 const SensorCharacteristics& characteristics) {
212 if ((characteristics.width == 0) || (characteristics.height == 0)) {
213 ALOGE("%s: Invalid sensor size %zux%zu", __FUNCTION__,
214 characteristics.width, characteristics.height);
215 return false;
216 }
217
218 if ((characteristics.full_res_width == 0) ||
219 (characteristics.full_res_height == 0)) {
220 ALOGE("%s: Invalid sensor full res size %zux%zu", __FUNCTION__,
221 characteristics.full_res_width, characteristics.full_res_height);
222 return false;
223 }
224
225 if (characteristics.is_10bit_dynamic_range_capable) {
226 // We support only HLG10 at the moment
227 const auto& hlg10_entry = characteristics.dynamic_range_profiles.find(
228 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_HLG10);
229 if ((characteristics.dynamic_range_profiles.size() != 1) ||
230 (hlg10_entry == characteristics.dynamic_range_profiles.end())) {
231 ALOGE("%s: Only support for HLG10 is available!", __FUNCTION__);
232 return false;
233 }
234 }
235
236 if ((characteristics.exposure_time_range[0] >=
237 characteristics.exposure_time_range[1]) ||
238 ((characteristics.exposure_time_range[0] < kSupportedExposureTimeRange[0]) ||
239 (characteristics.exposure_time_range[1] >
240 kSupportedExposureTimeRange[1]))) {
241 ALOGE("%s: Unsupported exposure range", __FUNCTION__);
242 return false;
243 }
244
245 if ((characteristics.frame_duration_range[0] >=
246 characteristics.frame_duration_range[1]) ||
247 ((characteristics.frame_duration_range[0] <
248 kSupportedFrameDurationRange[0]) ||
249 (characteristics.frame_duration_range[1] >
250 kSupportedFrameDurationRange[1]))) {
251 ALOGE("%s: Unsupported frame duration range", __FUNCTION__);
252 return false;
253 }
254
255 if ((characteristics.sensitivity_range[0] >=
256 characteristics.sensitivity_range[1]) ||
257 ((characteristics.sensitivity_range[0] < kSupportedSensitivityRange[0]) ||
258 (characteristics.sensitivity_range[1] > kSupportedSensitivityRange[1])) ||
259 (!((kDefaultSensitivity >= characteristics.sensitivity_range[0]) &&
260 (kDefaultSensitivity <= characteristics.sensitivity_range[1])))) {
261 ALOGE("%s: Unsupported sensitivity range", __FUNCTION__);
262 return false;
263 }
264
265 if (characteristics.color_arangement != kSupportedColorFilterArrangement) {
266 ALOGE("%s: Unsupported color arrangement!", __FUNCTION__);
267 return false;
268 }
269
270 for (const auto& blackLevel : characteristics.black_level_pattern) {
271 if (blackLevel >= characteristics.max_raw_value) {
272 ALOGE("%s: Black level matches or exceeds max RAW value!", __FUNCTION__);
273 return false;
274 }
275 }
276
277 if ((characteristics.frame_duration_range[0] / characteristics.height) == 0) {
278 ALOGE("%s: Zero row readout time!", __FUNCTION__);
279 return false;
280 }
281
282 if (characteristics.max_raw_streams > kMaxRAWStreams) {
283 ALOGE("%s: RAW streams maximum %u exceeds supported maximum %u",
284 __FUNCTION__, characteristics.max_raw_streams, kMaxRAWStreams);
285 return false;
286 }
287
288 if (characteristics.max_processed_streams > kMaxProcessedStreams) {
289 ALOGE("%s: Processed streams maximum %u exceeds supported maximum %u",
290 __FUNCTION__, characteristics.max_processed_streams,
291 kMaxProcessedStreams);
292 return false;
293 }
294
295 if (characteristics.max_stalling_streams > kMaxStallingStreams) {
296 ALOGE("%s: Stalling streams maximum %u exceeds supported maximum %u",
297 __FUNCTION__, characteristics.max_stalling_streams,
298 kMaxStallingStreams);
299 return false;
300 }
301
302 if (characteristics.max_input_streams > kMaxInputStreams) {
303 ALOGE("%s: Input streams maximum %u exceeds supported maximum %u",
304 __FUNCTION__, characteristics.max_input_streams, kMaxInputStreams);
305 return false;
306 }
307
308 if ((characteristics.lens_shading_map_size[0] > kMaxLensShadingMapSize[0]) ||
309 (characteristics.lens_shading_map_size[1] > kMaxLensShadingMapSize[1])) {
310 ALOGE("%s: Lens shading map [%dx%d] exceeds supprorted maximum [%dx%d]",
311 __FUNCTION__, characteristics.lens_shading_map_size[0],
312 characteristics.lens_shading_map_size[1], kMaxLensShadingMapSize[0],
313 kMaxLensShadingMapSize[1]);
314 return false;
315 }
316
317 if (characteristics.max_pipeline_depth < kPipelineDepth) {
318 ALOGE("%s: Pipeline depth %d smaller than supprorted minimum %d",
319 __FUNCTION__, characteristics.max_pipeline_depth, kPipelineDepth);
320 return false;
321 }
322
323 return true;
324 }
325
SplitStreamCombination(const StreamConfiguration & original_config,StreamConfiguration * default_mode_config,StreamConfiguration * max_resolution_mode_config,StreamConfiguration * input_stream_config)326 static void SplitStreamCombination(
327 const StreamConfiguration& original_config,
328 StreamConfiguration* default_mode_config,
329 StreamConfiguration* max_resolution_mode_config,
330 StreamConfiguration* input_stream_config) {
331 // Go through the streams
332 if (default_mode_config == nullptr || max_resolution_mode_config == nullptr ||
333 input_stream_config == nullptr) {
334 ALOGE("%s: Input stream / output stream configs are nullptr", __FUNCTION__);
335 return;
336 }
337 for (const auto& stream : original_config.streams) {
338 if (stream.stream_type == google_camera_hal::StreamType::kInput) {
339 input_stream_config->streams.push_back(stream);
340 continue;
341 }
342 if (stream.intended_for_default_resolution_mode) {
343 default_mode_config->streams.push_back(stream);
344 }
345 if (stream.intended_for_max_resolution_mode) {
346 max_resolution_mode_config->streams.push_back(stream);
347 }
348 }
349 }
350
IsStreamCombinationSupported(uint32_t logical_id,const StreamConfiguration & config,StreamConfigurationMap & default_config_map,StreamConfigurationMap & max_resolution_config_map,const PhysicalStreamConfigurationMap & physical_map,const PhysicalStreamConfigurationMap & physical_map_max_resolution,const LogicalCharacteristics & sensor_chars)351 bool EmulatedSensor::IsStreamCombinationSupported(
352 uint32_t logical_id, const StreamConfiguration& config,
353 StreamConfigurationMap& default_config_map,
354 StreamConfigurationMap& max_resolution_config_map,
355 const PhysicalStreamConfigurationMap& physical_map,
356 const PhysicalStreamConfigurationMap& physical_map_max_resolution,
357 const LogicalCharacteristics& sensor_chars) {
358 StreamConfiguration default_mode_config, max_resolution_mode_config,
359 input_stream_config;
360 SplitStreamCombination(config, &default_mode_config,
361 &max_resolution_mode_config, &input_stream_config);
362
363 return IsStreamCombinationSupported(logical_id, default_mode_config,
364 default_config_map, physical_map,
365 sensor_chars) &&
366 IsStreamCombinationSupported(
367 logical_id, max_resolution_mode_config, max_resolution_config_map,
368 physical_map_max_resolution, sensor_chars, /*is_max_res*/ true) &&
369
370 (IsStreamCombinationSupported(logical_id, input_stream_config,
371 default_config_map, physical_map,
372 sensor_chars) ||
373 IsStreamCombinationSupported(
374 logical_id, input_stream_config, max_resolution_config_map,
375 physical_map_max_resolution, sensor_chars, /*is_max_res*/ true));
376 }
377
IsStreamCombinationSupported(uint32_t logical_id,const StreamConfiguration & config,StreamConfigurationMap & config_map,const PhysicalStreamConfigurationMap & physical_map,const LogicalCharacteristics & sensor_chars,bool is_max_res)378 bool EmulatedSensor::IsStreamCombinationSupported(
379 uint32_t logical_id, const StreamConfiguration& config,
380 StreamConfigurationMap& config_map,
381 const PhysicalStreamConfigurationMap& physical_map,
382 const LogicalCharacteristics& sensor_chars, bool is_max_res) {
383 uint32_t input_stream_count = 0;
384 // Map from physical camera id to number of streams for that physical camera
385 std::map<uint32_t, uint32_t> raw_stream_count;
386 std::map<uint32_t, uint32_t> processed_stream_count;
387 std::map<uint32_t, uint32_t> stalling_stream_count;
388
389 // Only allow the stream configurations specified in
390 // dynamicSizeStreamConfigurations.
391 for (const auto& stream : config.streams) {
392 bool is_dynamic_output =
393 (stream.is_physical_camera_stream && stream.group_id != -1);
394 if (stream.rotation != google_camera_hal::StreamRotation::kRotation0) {
395 ALOGE("%s: Stream rotation: 0x%x not supported!", __FUNCTION__,
396 stream.rotation);
397 return false;
398 }
399
400 if (stream.stream_type == google_camera_hal::StreamType::kInput) {
401 if (sensor_chars.at(logical_id).max_input_streams == 0) {
402 ALOGE("%s: Input streams are not supported on this device!",
403 __FUNCTION__);
404 return false;
405 }
406
407 auto const& supported_outputs =
408 config_map.GetValidOutputFormatsForInput(stream.format);
409 if (supported_outputs.empty()) {
410 ALOGE("%s: Input stream with format: 0x%x no supported on this device!",
411 __FUNCTION__, stream.format);
412 return false;
413 }
414
415 input_stream_count++;
416 } else {
417 if (stream.is_physical_camera_stream &&
418 physical_map.find(stream.physical_camera_id) == physical_map.end()) {
419 ALOGE("%s: Invalid physical camera id %d", __FUNCTION__,
420 stream.physical_camera_id);
421 return false;
422 }
423
424 if (is_dynamic_output) {
425 auto dynamic_physical_output_formats =
426 physical_map.at(stream.physical_camera_id)
427 ->GetDynamicPhysicalStreamOutputFormats();
428 if (dynamic_physical_output_formats.find(stream.format) ==
429 dynamic_physical_output_formats.end()) {
430 ALOGE("%s: Unsupported physical stream format %d", __FUNCTION__,
431 stream.format);
432 return false;
433 }
434 }
435
436 if (stream.dynamic_profile !=
437 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD) {
438 const SensorCharacteristics& sensor_char =
439 stream.is_physical_camera_stream
440 ? sensor_chars.at(stream.physical_camera_id)
441 : sensor_chars.at(logical_id);
442 if (!sensor_char.is_10bit_dynamic_range_capable) {
443 ALOGE("%s: 10-bit dynamic range output not supported on this device!",
444 __FUNCTION__);
445 return false;
446 }
447
448 if ((stream.format != HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED) &&
449 (static_cast<android_pixel_format_v1_1_t>(stream.format) !=
450 HAL_PIXEL_FORMAT_YCBCR_P010)) {
451 ALOGE(
452 "%s: 10-bit dynamic range profile 0x%x not supported on a non "
453 "10-bit output stream"
454 " pixel format 0x%x",
455 __FUNCTION__, stream.dynamic_profile, stream.format);
456 return false;
457 }
458
459 if ((static_cast<android_pixel_format_v1_1_t>(stream.format) ==
460 HAL_PIXEL_FORMAT_YCBCR_P010) &&
461 ((stream.data_space !=
462 static_cast<android_dataspace_t>(Dataspace::BT2020_ITU_HLG)) &&
463 (stream.data_space !=
464 static_cast<android_dataspace_t>(Dataspace::BT2020_HLG)) &&
465 (stream.data_space !=
466 static_cast<android_dataspace_t>(Dataspace::UNKNOWN)))) {
467 ALOGE(
468 "%s: Unsupported stream data space 0x%x for 10-bit YUV "
469 "output",
470 __FUNCTION__, stream.data_space);
471 return false;
472 }
473 }
474
475 switch (stream.format) {
476 case HAL_PIXEL_FORMAT_BLOB:
477 if ((stream.data_space != HAL_DATASPACE_V0_JFIF) &&
478 (stream.data_space !=
479 static_cast<android_dataspace_t>(
480 aidl::android::hardware::graphics::common::Dataspace::JPEG_R)) &&
481 (stream.data_space != HAL_DATASPACE_UNKNOWN)) {
482 ALOGE("%s: Unsupported Blob dataspace 0x%x", __FUNCTION__,
483 stream.data_space);
484 return false;
485 }
486 if (stream.is_physical_camera_stream) {
487 stalling_stream_count[stream.physical_camera_id]++;
488 } else {
489 for (const auto& p : physical_map) {
490 stalling_stream_count[p.first]++;
491 }
492 }
493 break;
494 case HAL_PIXEL_FORMAT_RAW16: {
495 const SensorCharacteristics& sensor_char =
496 stream.is_physical_camera_stream
497 ? sensor_chars.at(stream.physical_camera_id)
498 : sensor_chars.at(logical_id);
499 auto sensor_height =
500 is_max_res ? sensor_char.full_res_height : sensor_char.height;
501 auto sensor_width =
502 is_max_res ? sensor_char.full_res_width : sensor_char.width;
503 if (stream.height != sensor_height || stream.width != sensor_width) {
504 ALOGE(
505 "%s, RAW16 buffer height %d and width %d must match sensor "
506 "height: %zu"
507 " and width: %zu",
508 __FUNCTION__, stream.height, stream.width, sensor_height,
509 sensor_width);
510 return false;
511 }
512 if (stream.is_physical_camera_stream) {
513 raw_stream_count[stream.physical_camera_id]++;
514 } else {
515 for (const auto& p : physical_map) {
516 raw_stream_count[p.first]++;
517 }
518 }
519 } break;
520 default:
521 if (stream.is_physical_camera_stream) {
522 processed_stream_count[stream.physical_camera_id]++;
523 } else {
524 for (const auto& p : physical_map) {
525 processed_stream_count[p.first]++;
526 }
527 }
528 }
529
530 auto output_sizes =
531 is_dynamic_output
532 ? physical_map.at(stream.physical_camera_id)
533 ->GetDynamicPhysicalStreamOutputSizes(stream.format)
534 : stream.is_physical_camera_stream
535 ? physical_map.at(stream.physical_camera_id)
536 ->GetOutputSizes(stream.format, stream.data_space)
537 : config_map.GetOutputSizes(stream.format, stream.data_space);
538
539 auto stream_size = std::make_pair(stream.width, stream.height);
540 if (output_sizes.find(stream_size) == output_sizes.end()) {
541 ALOGE("%s: Stream with size %dx%d and format 0x%x is not supported!",
542 __FUNCTION__, stream.width, stream.height, stream.format);
543 return false;
544 }
545 }
546
547 if (!sensor_chars.at(logical_id).support_stream_use_case) {
548 if (stream.use_case != ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT) {
549 ALOGE("%s: Camera device doesn't support non-default stream use case!",
550 __FUNCTION__);
551 return false;
552 }
553 } else if (stream.use_case >
554 sensor_chars.at(logical_id).end_valid_stream_use_case) {
555 ALOGE("%s: Stream with use case %d is not supported!", __FUNCTION__,
556 stream.use_case);
557 return false;
558 } else if (stream.use_case !=
559 ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT) {
560 if (stream.use_case ==
561 ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_STILL_CAPTURE) {
562 if (stream.format != HAL_PIXEL_FORMAT_YCBCR_420_888 &&
563 stream.format != HAL_PIXEL_FORMAT_BLOB) {
564 ALOGE("%s: Stream with use case %d isn't compatible with format %d",
565 __FUNCTION__, stream.use_case, stream.format);
566 return false;
567 }
568 } else if ((stream.format == HAL_PIXEL_FORMAT_RAW16) ^
569 (stream.use_case ==
570 ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_CROPPED_RAW)) {
571 // Either both stream use case == CROPPED_RAW and format == RAW16, or
572 // stream use case != CROPPED_RAW and format != RAW16 for the
573 // combination to be valid.
574 ALOGE(
575 "%s: Stream with use case CROPPED_RAW isn't compatible with non "
576 "RAW_SENSOR formats",
577 __FUNCTION__);
578 return false;
579
580 } else if (stream.format != HAL_PIXEL_FORMAT_YCBCR_420_888 &&
581 stream.format != HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED &&
582 stream.format != HAL_PIXEL_FORMAT_RAW16) {
583 ALOGE("%s: Stream with use case %d isn't compatible with format %d",
584 __FUNCTION__, stream.use_case, stream.format);
585 return false;
586 }
587 }
588 }
589
590 for (const auto& raw_count : raw_stream_count) {
591 unsigned int max_raw_streams =
592 sensor_chars.at(raw_count.first).max_raw_streams +
593 (is_max_res
594 ? 1
595 : 0); // The extra raw stream is allowed for remosaic reprocessing.
596 if (raw_count.second > max_raw_streams) {
597 ALOGE("%s: RAW streams maximum %u exceeds supported maximum %u",
598 __FUNCTION__, raw_count.second, max_raw_streams);
599 return false;
600 }
601 }
602
603 for (const auto& stalling_count : stalling_stream_count) {
604 if (stalling_count.second >
605 sensor_chars.at(stalling_count.first).max_stalling_streams) {
606 ALOGE("%s: Stalling streams maximum %u exceeds supported maximum %u",
607 __FUNCTION__, stalling_count.second,
608 sensor_chars.at(stalling_count.first).max_stalling_streams);
609 return false;
610 }
611 }
612
613 for (const auto& processed_count : processed_stream_count) {
614 if (processed_count.second >
615 sensor_chars.at(processed_count.first).max_processed_streams) {
616 ALOGE("%s: Processed streams maximum %u exceeds supported maximum %u",
617 __FUNCTION__, processed_count.second,
618 sensor_chars.at(processed_count.first).max_processed_streams);
619 return false;
620 }
621 }
622
623 if (input_stream_count > sensor_chars.at(logical_id).max_input_streams) {
624 ALOGE("%s: Input stream maximum %u exceeds supported maximum %u",
625 __FUNCTION__, input_stream_count,
626 sensor_chars.at(logical_id).max_input_streams);
627 return false;
628 }
629
630 // TODO: Check session parameters. For now assuming all combinations
631 // are supported.
632
633 return true;
634 }
635
StartUp(uint32_t logical_camera_id,std::unique_ptr<LogicalCharacteristics> logical_chars)636 status_t EmulatedSensor::StartUp(
637 uint32_t logical_camera_id,
638 std::unique_ptr<LogicalCharacteristics> logical_chars) {
639 if (isRunning()) {
640 return OK;
641 }
642
643 if (logical_chars.get() == nullptr) {
644 return BAD_VALUE;
645 }
646
647 chars_ = std::move(logical_chars);
648 auto device_chars = chars_->find(logical_camera_id);
649 if (device_chars == chars_->end()) {
650 ALOGE(
651 "%s: Logical camera id: %u absent from logical camera characteristics!",
652 __FUNCTION__, logical_camera_id);
653 return BAD_VALUE;
654 }
655
656 for (const auto& it : *chars_) {
657 if (!AreCharacteristicsSupported(it.second)) {
658 ALOGE("%s: Sensor characteristics for camera id: %u not supported!",
659 __FUNCTION__, it.first);
660 return BAD_VALUE;
661 }
662 }
663
664 logical_camera_id_ = logical_camera_id;
665 scene_ = std::make_unique<EmulatedScene>(
666 device_chars->second.full_res_width, device_chars->second.full_res_height,
667 kElectronsPerLuxSecond, device_chars->second.orientation,
668 device_chars->second.is_front_facing);
669 jpeg_compressor_ = std::make_unique<JpegCompressor>();
670
671 auto res = run(LOG_TAG, ANDROID_PRIORITY_URGENT_DISPLAY);
672 if (res != OK) {
673 ALOGE("Unable to start up sensor capture thread: %d", res);
674 }
675
676 return res;
677 }
678
ShutDown()679 status_t EmulatedSensor::ShutDown() {
680 int res;
681 res = requestExitAndWait();
682 if (res != OK) {
683 ALOGE("Unable to shut down sensor capture thread: %d", res);
684 }
685 return res;
686 }
687
SetCurrentRequest(std::unique_ptr<LogicalCameraSettings> logical_settings,std::unique_ptr<HwlPipelineResult> result,std::unique_ptr<HwlPipelineResult> partial_result,std::unique_ptr<Buffers> input_buffers,std::unique_ptr<Buffers> output_buffers)688 void EmulatedSensor::SetCurrentRequest(
689 std::unique_ptr<LogicalCameraSettings> logical_settings,
690 std::unique_ptr<HwlPipelineResult> result,
691 std::unique_ptr<HwlPipelineResult> partial_result,
692 std::unique_ptr<Buffers> input_buffers,
693 std::unique_ptr<Buffers> output_buffers) {
694 Mutex::Autolock lock(control_mutex_);
695 current_settings_ = std::move(logical_settings);
696 current_result_ = std::move(result);
697 current_input_buffers_ = std::move(input_buffers);
698 current_output_buffers_ = std::move(output_buffers);
699 partial_result_ = std::move(partial_result);
700 }
701
WaitForVSyncLocked(nsecs_t reltime)702 bool EmulatedSensor::WaitForVSyncLocked(nsecs_t reltime) {
703 got_vsync_ = false;
704 while (!got_vsync_) {
705 auto res = vsync_.waitRelative(control_mutex_, reltime);
706 if (res != OK && res != TIMED_OUT) {
707 ALOGE("%s: Error waiting for VSync signal: %d", __FUNCTION__, res);
708 return false;
709 }
710 }
711
712 return got_vsync_;
713 }
714
WaitForVSync(nsecs_t reltime)715 bool EmulatedSensor::WaitForVSync(nsecs_t reltime) {
716 Mutex::Autolock lock(control_mutex_);
717
718 return WaitForVSyncLocked(reltime);
719 }
720
Flush()721 status_t EmulatedSensor::Flush() {
722 Mutex::Autolock lock(control_mutex_);
723 auto ret = WaitForVSyncLocked(kSupportedFrameDurationRange[1]);
724
725 // First recreate the jpeg compressor. This will abort any ongoing processing
726 // and flush any pending jobs.
727 jpeg_compressor_ = std::make_unique<JpegCompressor>();
728
729 // Then return any pending frames here
730 if ((current_input_buffers_.get() != nullptr) &&
731 (!current_input_buffers_->empty())) {
732 current_input_buffers_->clear();
733 }
734 if ((current_output_buffers_.get() != nullptr) &&
735 (!current_output_buffers_->empty())) {
736 for (const auto& buffer : *current_output_buffers_) {
737 buffer->stream_buffer.status = BufferStatus::kError;
738 }
739
740 if ((current_result_.get() != nullptr) &&
741 (current_result_->result_metadata.get() != nullptr)) {
742 if (current_output_buffers_->at(0)->callback.notify != nullptr) {
743 NotifyMessage msg{
744 .type = MessageType::kError,
745 .message.error = {
746 .frame_number = current_output_buffers_->at(0)->frame_number,
747 .error_stream_id = -1,
748 .error_code = ErrorCode::kErrorResult,
749 }};
750
751 current_output_buffers_->at(0)->callback.notify(
752 current_result_->pipeline_id, msg);
753 }
754 }
755
756 current_output_buffers_->clear();
757 }
758
759 return ret ? OK : TIMED_OUT;
760 }
761
getSystemTimeWithSource(uint32_t timestamp_source)762 nsecs_t EmulatedSensor::getSystemTimeWithSource(uint32_t timestamp_source) {
763 if (timestamp_source == ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_REALTIME) {
764 return systemTime(SYSTEM_TIME_BOOTTIME);
765 }
766 return systemTime(SYSTEM_TIME_MONOTONIC);
767 }
768
threadLoop()769 bool EmulatedSensor::threadLoop() {
770 ATRACE_CALL();
771 /**
772 * Sensor capture operation main loop.
773 *
774 */
775
776 /**
777 * Stage 1: Read in latest control parameters
778 */
779 std::unique_ptr<Buffers> next_buffers;
780 std::unique_ptr<Buffers> next_input_buffer;
781 std::unique_ptr<HwlPipelineResult> next_result;
782 std::unique_ptr<HwlPipelineResult> partial_result;
783 std::unique_ptr<LogicalCameraSettings> settings;
784 HwlPipelineCallback callback = {
785 .process_pipeline_result = nullptr,
786 .process_pipeline_batch_result = nullptr,
787 .notify = nullptr,
788 };
789 {
790 Mutex::Autolock lock(control_mutex_);
791 std::swap(settings, current_settings_);
792 std::swap(next_buffers, current_output_buffers_);
793 std::swap(next_input_buffer, current_input_buffers_);
794 std::swap(next_result, current_result_);
795 std::swap(partial_result, partial_result_);
796
797 // Signal VSync for start of readout
798 ALOGVV("Sensor VSync");
799 got_vsync_ = true;
800 vsync_.signal();
801 }
802
803 auto frame_duration = EmulatedSensor::kSupportedFrameDurationRange[0];
804 auto exposure_time = EmulatedSensor::kSupportedExposureTimeRange[0];
805 uint32_t timestamp_source = ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_UNKNOWN;
806 // Frame duration must always be the same among all physical devices
807 if ((settings.get() != nullptr) && (!settings->empty())) {
808 frame_duration = settings->begin()->second.frame_duration;
809 exposure_time = settings->begin()->second.exposure_time;
810 timestamp_source = settings->begin()->second.timestamp_source;
811 }
812
813 nsecs_t start_real_time = getSystemTimeWithSource(timestamp_source);
814 // Stagefright cares about system time for timestamps, so base simulated
815 // time on that.
816 nsecs_t frame_end_real_time = start_real_time + frame_duration;
817
818 /**
819 * Stage 2: Capture new image
820 */
821 next_capture_time_ = frame_end_real_time;
822 next_readout_time_ = frame_end_real_time + exposure_time;
823
824 sensor_binning_factor_info_.clear();
825
826 bool reprocess_request = false;
827 if ((next_input_buffer.get() != nullptr) && (!next_input_buffer->empty())) {
828 if (next_input_buffer->size() > 1) {
829 ALOGW("%s: Reprocess supports only single input!", __FUNCTION__);
830 }
831
832 camera_metadata_ro_entry_t entry;
833 auto ret =
834 next_result->result_metadata->Get(ANDROID_SENSOR_TIMESTAMP, &entry);
835 if ((ret == OK) && (entry.count == 1)) {
836 next_capture_time_ = entry.data.i64[0];
837 } else {
838 ALOGW("%s: Reprocess timestamp absent!", __FUNCTION__);
839 }
840
841 ret = next_result->result_metadata->Get(ANDROID_SENSOR_EXPOSURE_TIME,
842 &entry);
843 if ((ret == OK) && (entry.count == 1)) {
844 next_readout_time_ = next_capture_time_ + entry.data.i64[0];
845 } else {
846 next_readout_time_ = next_capture_time_;
847 }
848
849 reprocess_request = true;
850 }
851
852 if ((next_buffers != nullptr) && (settings != nullptr)) {
853 callback = next_buffers->at(0)->callback;
854 if (callback.notify != nullptr) {
855 NotifyMessage msg{
856 .type = MessageType::kShutter,
857 .message.shutter = {
858 .frame_number = next_buffers->at(0)->frame_number,
859 .timestamp_ns = static_cast<uint64_t>(next_capture_time_),
860 .readout_timestamp_ns =
861 static_cast<uint64_t>(next_readout_time_)}};
862 callback.notify(next_result->pipeline_id, msg);
863 }
864 auto b = next_buffers->begin();
865 while (b != next_buffers->end()) {
866 auto device_settings = settings->find((*b)->camera_id);
867 if (device_settings == settings->end()) {
868 ALOGE("%s: Sensor settings absent for device: %d", __func__,
869 (*b)->camera_id);
870 b = next_buffers->erase(b);
871 continue;
872 }
873
874 auto device_chars = chars_->find((*b)->camera_id);
875 if (device_chars == chars_->end()) {
876 ALOGE("%s: Sensor characteristics absent for device: %d", __func__,
877 (*b)->camera_id);
878 b = next_buffers->erase(b);
879 continue;
880 }
881
882 sensor_binning_factor_info_[(*b)->camera_id].quad_bayer_sensor =
883 device_chars->second.quad_bayer_sensor;
884
885 ALOGVV("Starting next capture: Exposure: %" PRIu64 " ms, gain: %d",
886 ns2ms(device_settings->second.exposure_time),
887 device_settings->second.gain);
888
889 scene_->Initialize(device_chars->second.full_res_width,
890 device_chars->second.full_res_height,
891 kElectronsPerLuxSecond);
892 scene_->SetExposureDuration((float)device_settings->second.exposure_time /
893 1e9);
894 scene_->SetColorFilterXYZ(device_chars->second.color_filter.rX,
895 device_chars->second.color_filter.rY,
896 device_chars->second.color_filter.rZ,
897 device_chars->second.color_filter.grX,
898 device_chars->second.color_filter.grY,
899 device_chars->second.color_filter.grZ,
900 device_chars->second.color_filter.gbX,
901 device_chars->second.color_filter.gbY,
902 device_chars->second.color_filter.gbZ,
903 device_chars->second.color_filter.bX,
904 device_chars->second.color_filter.bY,
905 device_chars->second.color_filter.bZ);
906 scene_->SetTestPattern(device_settings->second.test_pattern_mode ==
907 ANDROID_SENSOR_TEST_PATTERN_MODE_SOLID_COLOR);
908 scene_->SetTestPatternData(device_settings->second.test_pattern_data);
909 scene_->SetScreenRotation(device_settings->second.screen_rotation);
910
911 uint32_t handshake_divider =
912 (device_settings->second.video_stab ==
913 ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_ON) ||
914 (device_settings->second.video_stab ==
915 ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_PREVIEW_STABILIZATION)
916 ? kReducedSceneHandshake
917 : kRegularSceneHandshake;
918 scene_->CalculateScene(next_capture_time_, handshake_divider);
919
920 (*b)->stream_buffer.status = BufferStatus::kOk;
921 bool max_res_mode = device_settings->second.sensor_pixel_mode;
922 sensor_binning_factor_info_[(*b)->camera_id].max_res_request =
923 max_res_mode;
924 switch ((*b)->format) {
925 case PixelFormat::RAW16:
926 sensor_binning_factor_info_[(*b)->camera_id].has_raw_stream = true;
927 if (!sensor_binning_factor_info_[(*b)->camera_id]
928 .has_cropped_raw_stream &&
929 (*b)->use_case ==
930 ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_CROPPED_RAW) {
931 sensor_binning_factor_info_[(*b)->camera_id].has_cropped_raw_stream =
932 true;
933 }
934 break;
935 default:
936 sensor_binning_factor_info_[(*b)->camera_id].has_non_raw_stream = true;
937 }
938
939 // TODO: remove hack. Implement RAW -> YUV / JPEG reprocessing http://b/192382904
940 bool treat_as_reprocess =
941 (device_chars->second.quad_bayer_sensor && reprocess_request &&
942 (*next_input_buffer->begin())->format == PixelFormat::RAW16)
943 ? false
944 : reprocess_request;
945 ProcessType process_type = treat_as_reprocess ? REPROCESS
946 : (device_settings->second.edge_mode ==
947 ANDROID_EDGE_MODE_HIGH_QUALITY)
948 ? HIGH_QUALITY
949 : REGULAR;
950
951 if ((*b)->color_space !=
952 ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED) {
953 CalculateRgbRgbMatrix((*b)->color_space, device_chars->second);
954 }
955
956 switch ((*b)->format) {
957 case PixelFormat::RAW16:
958 if (!reprocess_request) {
959 uint64_t min_full_res_raw_size =
960 2 * device_chars->second.full_res_width *
961 device_chars->second.full_res_height;
962 uint64_t min_default_raw_size =
963 2 * device_chars->second.width * device_chars->second.height;
964 bool default_mode_for_qb =
965 device_chars->second.quad_bayer_sensor && !max_res_mode;
966 size_t buffer_size = (*b)->plane.img.buffer_size;
967 if (default_mode_for_qb) {
968 if (buffer_size < min_default_raw_size) {
969 ALOGE(
970 "%s: Output buffer size too small for RAW capture in "
971 "default "
972 "mode, "
973 "expected %" PRIu64 ", got %zu, for camera id %d",
974 __FUNCTION__, min_default_raw_size, buffer_size,
975 (*b)->camera_id);
976 (*b)->stream_buffer.status = BufferStatus::kError;
977 break;
978 }
979 } else if (buffer_size < min_full_res_raw_size) {
980 ALOGE(
981 "%s: Output buffer size too small for RAW capture in max res "
982 "mode, "
983 "expected %" PRIu64 ", got %zu, for camera id %d",
984 __FUNCTION__, min_full_res_raw_size, buffer_size,
985 (*b)->camera_id);
986 (*b)->stream_buffer.status = BufferStatus::kError;
987 break;
988 }
989 if (default_mode_for_qb) {
990 if (device_settings->second.zoom_ratio > 2.0f &&
991 ((*b)->use_case ==
992 ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_CROPPED_RAW)) {
993 sensor_binning_factor_info_[(*b)->camera_id]
994 .raw_in_sensor_zoom_applied = true;
995 CaptureRawInSensorZoom(
996 (*b)->plane.img.img, (*b)->plane.img.stride_in_bytes,
997 device_settings->second.gain, device_chars->second);
998
999 } else {
1000 CaptureRawBinned(
1001 (*b)->plane.img.img, (*b)->plane.img.stride_in_bytes,
1002 device_settings->second.gain, device_chars->second);
1003 }
1004 } else {
1005 CaptureRawFullRes(
1006 (*b)->plane.img.img, (*b)->plane.img.stride_in_bytes,
1007 device_settings->second.gain, device_chars->second);
1008 }
1009 } else {
1010 if (!device_chars->second.quad_bayer_sensor) {
1011 ALOGE(
1012 "%s: Reprocess requests with output format %x no supported!",
1013 __FUNCTION__, (*b)->format);
1014 (*b)->stream_buffer.status = BufferStatus::kError;
1015 break;
1016 }
1017 // Remosaic the RAW input buffer
1018 if ((*next_input_buffer->begin())->width != (*b)->width ||
1019 (*next_input_buffer->begin())->height != (*b)->height) {
1020 ALOGE(
1021 "%s: RAW16 input dimensions %dx%d don't match output buffer "
1022 "dimensions %dx%d",
1023 __FUNCTION__, (*next_input_buffer->begin())->width,
1024 (*next_input_buffer->begin())->height, (*b)->width,
1025 (*b)->height);
1026 (*b)->stream_buffer.status = BufferStatus::kError;
1027 break;
1028 }
1029 ALOGV("%s remosaic Raw16 Image", __FUNCTION__);
1030 RemosaicRAW16Image(
1031 (uint16_t*)(*next_input_buffer->begin())->plane.img.img,
1032 (uint16_t*)(*b)->plane.img.img, (*b)->plane.img.stride_in_bytes,
1033 device_chars->second);
1034 }
1035 break;
1036 case PixelFormat::RGB_888:
1037 if (!reprocess_request) {
1038 CaptureRGB((*b)->plane.img.img, (*b)->width, (*b)->height,
1039 (*b)->plane.img.stride_in_bytes, RGBLayout::RGB,
1040 device_settings->second.gain, (*b)->color_space,
1041 device_chars->second);
1042 } else {
1043 ALOGE("%s: Reprocess requests with output format %x no supported!",
1044 __FUNCTION__, (*b)->format);
1045 (*b)->stream_buffer.status = BufferStatus::kError;
1046 }
1047 break;
1048 case PixelFormat::RGBA_8888:
1049 if (!reprocess_request) {
1050 CaptureRGB((*b)->plane.img.img, (*b)->width, (*b)->height,
1051 (*b)->plane.img.stride_in_bytes, RGBLayout::RGBA,
1052 device_settings->second.gain, (*b)->color_space,
1053 device_chars->second);
1054 } else {
1055 ALOGE("%s: Reprocess requests with output format %x no supported!",
1056 __FUNCTION__, (*b)->format);
1057 (*b)->stream_buffer.status = BufferStatus::kError;
1058 }
1059 break;
1060 case PixelFormat::BLOB:
1061 if ((*b)->dataSpace == HAL_DATASPACE_V0_JFIF) {
1062 YUV420Frame yuv_input{
1063 .width = treat_as_reprocess
1064 ? (*next_input_buffer->begin())->width
1065 : 0,
1066 .height = treat_as_reprocess
1067 ? (*next_input_buffer->begin())->height
1068 : 0,
1069 .planes = treat_as_reprocess
1070 ? (*next_input_buffer->begin())->plane.img_y_crcb
1071 : YCbCrPlanes{}};
1072 auto jpeg_input = std::make_unique<JpegYUV420Input>();
1073 jpeg_input->width = (*b)->width;
1074 jpeg_input->height = (*b)->height;
1075 jpeg_input->color_space = (*b)->color_space;
1076 auto img =
1077 new uint8_t[(jpeg_input->width * jpeg_input->height * 3) / 2];
1078 jpeg_input->yuv_planes = {
1079 .img_y = img,
1080 .img_cb = img + jpeg_input->width * jpeg_input->height,
1081 .img_cr = img + (jpeg_input->width * jpeg_input->height * 5) / 4,
1082 .y_stride = jpeg_input->width,
1083 .cbcr_stride = jpeg_input->width / 2,
1084 .cbcr_step = 1};
1085 jpeg_input->buffer_owner = true;
1086 YUV420Frame yuv_output{.width = jpeg_input->width,
1087 .height = jpeg_input->height,
1088 .planes = jpeg_input->yuv_planes};
1089
1090 bool rotate = device_settings->second.rotate_and_crop ==
1091 ANDROID_SCALER_ROTATE_AND_CROP_90;
1092 auto ret = ProcessYUV420(yuv_input, yuv_output,
1093 device_settings->second.gain, process_type,
1094 device_settings->second.zoom_ratio, rotate,
1095 (*b)->color_space, device_chars->second);
1096 if (ret != 0) {
1097 (*b)->stream_buffer.status = BufferStatus::kError;
1098 break;
1099 }
1100
1101 auto jpeg_job = std::make_unique<JpegYUV420Job>();
1102 jpeg_job->exif_utils = std::unique_ptr<ExifUtils>(
1103 ExifUtils::Create(device_chars->second));
1104 jpeg_job->input = std::move(jpeg_input);
1105 // If jpeg compression is successful, then the jpeg compressor
1106 // must set the corresponding status.
1107 (*b)->stream_buffer.status = BufferStatus::kError;
1108 std::swap(jpeg_job->output, *b);
1109 jpeg_job->result_metadata =
1110 HalCameraMetadata::Clone(next_result->result_metadata.get());
1111
1112 Mutex::Autolock lock(control_mutex_);
1113 jpeg_compressor_->QueueYUV420(std::move(jpeg_job));
1114 } else if ((*b)->dataSpace == static_cast<android_dataspace_t>(
1115 aidl::android::hardware::graphics::
1116 common::Dataspace::JPEG_R)) {
1117 if (!reprocess_request) {
1118 YUV420Frame yuv_input{};
1119 auto jpeg_input = std::make_unique<JpegYUV420Input>();
1120 jpeg_input->width = (*b)->width;
1121 jpeg_input->height = (*b)->height;
1122 jpeg_input->color_space = (*b)->color_space;
1123 auto img = new uint8_t[(*b)->width * (*b)->height * 3];
1124 jpeg_input->yuv_planes = {
1125 .img_y = img,
1126 .img_cb = img + (*b)->width * (*b)->height * 2,
1127 .img_cr = img + (*b)->width * (*b)->height * 2 + 2,
1128 .y_stride = (*b)->width * 2,
1129 .cbcr_stride = (*b)->width * 2,
1130 .cbcr_step = 2,
1131 .bytesPerPixel = 2};
1132 jpeg_input->buffer_owner = true;
1133 YUV420Frame yuv_output{.width = jpeg_input->width,
1134 .height = jpeg_input->height,
1135 .planes = jpeg_input->yuv_planes};
1136
1137 bool rotate = device_settings->second.rotate_and_crop ==
1138 ANDROID_SCALER_ROTATE_AND_CROP_90;
1139 auto ret = ProcessYUV420(
1140 yuv_input, yuv_output, device_settings->second.gain,
1141 process_type, device_settings->second.zoom_ratio, rotate,
1142 (*b)->color_space, device_chars->second);
1143 if (ret != 0) {
1144 (*b)->stream_buffer.status = BufferStatus::kError;
1145 break;
1146 }
1147
1148 auto jpeg_job = std::make_unique<JpegYUV420Job>();
1149 jpeg_job->exif_utils = std::unique_ptr<ExifUtils>(
1150 ExifUtils::Create(device_chars->second));
1151 jpeg_job->input = std::move(jpeg_input);
1152 // If jpeg compression is successful, then the jpeg compressor
1153 // must set the corresponding status.
1154 (*b)->stream_buffer.status = BufferStatus::kError;
1155 std::swap(jpeg_job->output, *b);
1156 jpeg_job->result_metadata =
1157 HalCameraMetadata::Clone(next_result->result_metadata.get());
1158
1159 Mutex::Autolock lock(control_mutex_);
1160 jpeg_compressor_->QueueYUV420(std::move(jpeg_job));
1161 } else {
1162 ALOGE(
1163 "%s: Reprocess requests with output format JPEG_R are not "
1164 "supported!",
1165 __FUNCTION__);
1166 (*b)->stream_buffer.status = BufferStatus::kError;
1167 }
1168 } else {
1169 ALOGE("%s: Format %x with dataspace %x is TODO", __FUNCTION__,
1170 (*b)->format, (*b)->dataSpace);
1171 (*b)->stream_buffer.status = BufferStatus::kError;
1172 }
1173 break;
1174 case PixelFormat::YCRCB_420_SP:
1175 case PixelFormat::YCBCR_420_888: {
1176 YUV420Frame yuv_input{
1177 .width =
1178 treat_as_reprocess ? (*next_input_buffer->begin())->width : 0,
1179 .height =
1180 treat_as_reprocess ? (*next_input_buffer->begin())->height : 0,
1181 .planes = treat_as_reprocess
1182 ? (*next_input_buffer->begin())->plane.img_y_crcb
1183 : YCbCrPlanes{}};
1184 YUV420Frame yuv_output{.width = (*b)->width,
1185 .height = (*b)->height,
1186 .planes = (*b)->plane.img_y_crcb};
1187 bool rotate = device_settings->second.rotate_and_crop ==
1188 ANDROID_SCALER_ROTATE_AND_CROP_90;
1189 auto ret =
1190 ProcessYUV420(yuv_input, yuv_output, device_settings->second.gain,
1191 process_type, device_settings->second.zoom_ratio,
1192 rotate, (*b)->color_space, device_chars->second);
1193 if (ret != 0) {
1194 (*b)->stream_buffer.status = BufferStatus::kError;
1195 }
1196 } break;
1197 case PixelFormat::Y16:
1198 if (!reprocess_request) {
1199 if ((*b)->dataSpace == HAL_DATASPACE_DEPTH) {
1200 CaptureDepth((*b)->plane.img.img, device_settings->second.gain,
1201 (*b)->width, (*b)->height,
1202 (*b)->plane.img.stride_in_bytes,
1203 device_chars->second);
1204 } else {
1205 ALOGE("%s: Format %x with dataspace %x is TODO", __FUNCTION__,
1206 (*b)->format, (*b)->dataSpace);
1207 (*b)->stream_buffer.status = BufferStatus::kError;
1208 }
1209 } else {
1210 ALOGE("%s: Reprocess requests with output format %x no supported!",
1211 __FUNCTION__, (*b)->format);
1212 (*b)->stream_buffer.status = BufferStatus::kError;
1213 }
1214 break;
1215 case PixelFormat::YCBCR_P010:
1216 if (!reprocess_request) {
1217 bool rotate = device_settings->second.rotate_and_crop ==
1218 ANDROID_SCALER_ROTATE_AND_CROP_90;
1219 YUV420Frame yuv_input{};
1220 YUV420Frame yuv_output{.width = (*b)->width,
1221 .height = (*b)->height,
1222 .planes = (*b)->plane.img_y_crcb};
1223 ProcessYUV420(yuv_input, yuv_output, device_settings->second.gain,
1224 process_type, device_settings->second.zoom_ratio,
1225 rotate, (*b)->color_space, device_chars->second);
1226 } else {
1227 ALOGE(
1228 "%s: Reprocess requests with output format %x no supported!",
1229 __FUNCTION__, (*b)->format);
1230 (*b)->stream_buffer.status = BufferStatus::kError;
1231 }
1232 break;
1233 default:
1234 ALOGE("%s: Unknown format %x, no output", __FUNCTION__, (*b)->format);
1235 (*b)->stream_buffer.status = BufferStatus::kError;
1236 break;
1237 }
1238
1239 b = next_buffers->erase(b);
1240 }
1241 }
1242
1243 if (reprocess_request) {
1244 auto input_buffer = next_input_buffer->begin();
1245 while (input_buffer != next_input_buffer->end()) {
1246 (*input_buffer++)->stream_buffer.status = BufferStatus::kOk;
1247 }
1248 next_input_buffer->clear();
1249 }
1250
1251 nsecs_t work_done_real_time = getSystemTimeWithSource(timestamp_source);
1252 // Returning the results at this point is not entirely correct from timing
1253 // perspective. Under ideal conditions where 'ReturnResults' completes
1254 // in less than 'time_accuracy' we need to return the results after the
1255 // frame cycle expires. However under real conditions various system
1256 // components like SurfaceFlinger, Encoder, LMK etc. could be consuming most
1257 // of the resources and the duration of "ReturnResults" can get comparable to
1258 // 'kDefaultFrameDuration'. This will skew the frame cycle and can result in
1259 // potential frame drops. To avoid this scenario when we are running under
1260 // tight deadlines (less than 'kReturnResultThreshod') try to return the
1261 // results immediately. In all other cases with more relaxed deadlines
1262 // the occasional bump during 'ReturnResults' should not have any
1263 // noticeable effect.
1264 if ((work_done_real_time + kReturnResultThreshod) > frame_end_real_time) {
1265 ReturnResults(callback, std::move(settings), std::move(next_result),
1266 reprocess_request, std::move(partial_result));
1267 }
1268
1269 work_done_real_time = getSystemTimeWithSource(timestamp_source);
1270 ALOGVV("Sensor vertical blanking interval");
1271 const nsecs_t time_accuracy = 2e6; // 2 ms of imprecision is ok
1272 if (work_done_real_time < frame_end_real_time - time_accuracy) {
1273 timespec t;
1274 t.tv_sec = (frame_end_real_time - work_done_real_time) / 1000000000L;
1275 t.tv_nsec = (frame_end_real_time - work_done_real_time) % 1000000000L;
1276
1277 int ret;
1278 do {
1279 ret = nanosleep(&t, &t);
1280 } while (ret != 0);
1281 }
1282
1283 ReturnResults(callback, std::move(settings), std::move(next_result),
1284 reprocess_request, std::move(partial_result));
1285
1286 return true;
1287 };
1288
ReturnResults(HwlPipelineCallback callback,std::unique_ptr<LogicalCameraSettings> settings,std::unique_ptr<HwlPipelineResult> result,bool reprocess_request,std::unique_ptr<HwlPipelineResult> partial_result)1289 void EmulatedSensor::ReturnResults(
1290 HwlPipelineCallback callback,
1291 std::unique_ptr<LogicalCameraSettings> settings,
1292 std::unique_ptr<HwlPipelineResult> result, bool reprocess_request,
1293 std::unique_ptr<HwlPipelineResult> partial_result) {
1294 if ((callback.process_pipeline_result != nullptr) &&
1295 (result.get() != nullptr) && (result->result_metadata.get() != nullptr)) {
1296 auto logical_settings = settings->find(logical_camera_id_);
1297 if (logical_settings == settings->end()) {
1298 ALOGE("%s: Logical camera id: %u not found in settings!", __FUNCTION__,
1299 logical_camera_id_);
1300 return;
1301 }
1302 auto device_chars = chars_->find(logical_camera_id_);
1303 if (device_chars == chars_->end()) {
1304 ALOGE("%s: Sensor characteristics absent for device: %d", __func__,
1305 logical_camera_id_);
1306 return;
1307 }
1308 result->result_metadata->Set(ANDROID_SENSOR_TIMESTAMP, &next_capture_time_,
1309 1);
1310
1311 camera_metadata_ro_entry_t lensEntry;
1312 auto lensRet = result->result_metadata->Get(
1313 ANDROID_STATISTICS_LENS_INTRINSIC_SAMPLES, &lensEntry);
1314 if ((lensRet == OK) && (lensEntry.count > 0)) {
1315 result->result_metadata->Set(ANDROID_STATISTICS_LENS_INTRINSIC_TIMESTAMPS,
1316 &next_capture_time_, 1);
1317 }
1318
1319 uint8_t raw_binned_factor_used = false;
1320 if (sensor_binning_factor_info_.find(logical_camera_id_) !=
1321 sensor_binning_factor_info_.end()) {
1322 auto& info = sensor_binning_factor_info_[logical_camera_id_];
1323 // Logical stream was included in the request
1324 if (!reprocess_request && info.quad_bayer_sensor && info.max_res_request &&
1325 info.has_raw_stream && !info.has_non_raw_stream) {
1326 raw_binned_factor_used = true;
1327 }
1328 result->result_metadata->Set(ANDROID_SENSOR_RAW_BINNING_FACTOR_USED,
1329 &raw_binned_factor_used, 1);
1330 if (info.has_cropped_raw_stream) {
1331 if (info.raw_in_sensor_zoom_applied) {
1332 result->result_metadata->Set(
1333 ANDROID_SCALER_RAW_CROP_REGION,
1334 device_chars->second.raw_crop_region_zoomed, 4);
1335
1336 } else {
1337 result->result_metadata->Set(
1338 ANDROID_SCALER_RAW_CROP_REGION,
1339 device_chars->second.raw_crop_region_unzoomed, 4);
1340 }
1341 }
1342 }
1343 if (logical_settings->second.lens_shading_map_mode ==
1344 ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON) {
1345 if ((device_chars->second.lens_shading_map_size[0] > 0) &&
1346 (device_chars->second.lens_shading_map_size[1] > 0)) {
1347 // Perfect lens, no actual shading needed.
1348 std::vector<float> lens_shading_map(
1349 device_chars->second.lens_shading_map_size[0] *
1350 device_chars->second.lens_shading_map_size[1] * 4,
1351 1.f);
1352
1353 result->result_metadata->Set(ANDROID_STATISTICS_LENS_SHADING_MAP,
1354 lens_shading_map.data(),
1355 lens_shading_map.size());
1356 }
1357 }
1358 if (logical_settings->second.report_video_stab) {
1359 result->result_metadata->Set(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE,
1360 &logical_settings->second.video_stab, 1);
1361 }
1362 if (logical_settings->second.report_edge_mode) {
1363 result->result_metadata->Set(ANDROID_EDGE_MODE,
1364 &logical_settings->second.edge_mode, 1);
1365 }
1366 if (logical_settings->second.report_neutral_color_point) {
1367 result->result_metadata->Set(ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
1368 kNeutralColorPoint,
1369 ARRAY_SIZE(kNeutralColorPoint));
1370 }
1371 if (logical_settings->second.report_green_split) {
1372 result->result_metadata->Set(ANDROID_SENSOR_GREEN_SPLIT, &kGreenSplit, 1);
1373 }
1374 if (logical_settings->second.report_noise_profile) {
1375 CalculateAndAppendNoiseProfile(
1376 logical_settings->second.gain,
1377 GetBaseGainFactor(device_chars->second.max_raw_value),
1378 result->result_metadata.get());
1379 }
1380 if (logical_settings->second.report_rotate_and_crop) {
1381 result->result_metadata->Set(ANDROID_SCALER_ROTATE_AND_CROP,
1382 &logical_settings->second.rotate_and_crop, 1);
1383 }
1384
1385 if (!result->physical_camera_results.empty()) {
1386 for (auto& it : result->physical_camera_results) {
1387 auto physical_settings = settings->find(it.first);
1388 if (physical_settings == settings->end()) {
1389 ALOGE("%s: Physical settings for camera id: %u are absent!",
1390 __FUNCTION__, it.first);
1391 continue;
1392 }
1393 uint8_t raw_binned_factor_used = false;
1394 if (sensor_binning_factor_info_.find(it.first) !=
1395 sensor_binning_factor_info_.end()) {
1396 auto& info = sensor_binning_factor_info_[it.first];
1397 // physical stream was included in the request
1398 if (!reprocess_request && info.quad_bayer_sensor &&
1399 info.max_res_request && info.has_raw_stream &&
1400 !info.has_non_raw_stream) {
1401 raw_binned_factor_used = true;
1402 }
1403 it.second->Set(ANDROID_SENSOR_RAW_BINNING_FACTOR_USED,
1404 &raw_binned_factor_used, 1);
1405 }
1406 // Sensor timestamp for all physical devices must be the same.
1407 it.second->Set(ANDROID_SENSOR_TIMESTAMP, &next_capture_time_, 1);
1408 if (physical_settings->second.report_neutral_color_point) {
1409 it.second->Set(ANDROID_SENSOR_NEUTRAL_COLOR_POINT, kNeutralColorPoint,
1410 ARRAY_SIZE(kNeutralColorPoint));
1411 }
1412 if (physical_settings->second.report_green_split) {
1413 it.second->Set(ANDROID_SENSOR_GREEN_SPLIT, &kGreenSplit, 1);
1414 }
1415 if (physical_settings->second.report_noise_profile) {
1416 auto device_chars = chars_->find(it.first);
1417 if (device_chars == chars_->end()) {
1418 ALOGE("%s: Sensor characteristics absent for device: %d", __func__,
1419 it.first);
1420 }
1421 CalculateAndAppendNoiseProfile(
1422 physical_settings->second.gain,
1423 GetBaseGainFactor(device_chars->second.max_raw_value),
1424 it.second.get());
1425 }
1426 }
1427 }
1428
1429 // Partial result count for partial result is set to a value
1430 // only when partial results are supported
1431 if (partial_result->partial_result != 0) {
1432 callback.process_pipeline_result(std::move(partial_result));
1433 }
1434 callback.process_pipeline_result(std::move(result));
1435 }
1436 }
1437
CalculateAndAppendNoiseProfile(float gain,float base_gain_factor,HalCameraMetadata * result)1438 void EmulatedSensor::CalculateAndAppendNoiseProfile(
1439 float gain /*in ISO*/, float base_gain_factor,
1440 HalCameraMetadata* result /*out*/) {
1441 if (result != nullptr) {
1442 float total_gain = gain / 100.0 * base_gain_factor;
1443 float noise_var_gain = total_gain * total_gain;
1444 float read_noise_var =
1445 kReadNoiseVarBeforeGain * noise_var_gain + kReadNoiseVarAfterGain;
1446 // Noise profile is the same across all 4 CFA channels
1447 double noise_profile[2 * 4] = {
1448 noise_var_gain, read_noise_var, noise_var_gain, read_noise_var,
1449 noise_var_gain, read_noise_var, noise_var_gain, read_noise_var};
1450 result->Set(ANDROID_SENSOR_NOISE_PROFILE, noise_profile,
1451 ARRAY_SIZE(noise_profile));
1452 }
1453 }
1454
GetQuadBayerColor(uint32_t x,uint32_t y)1455 EmulatedScene::ColorChannels EmulatedSensor::GetQuadBayerColor(uint32_t x,
1456 uint32_t y) {
1457 // Row within larger set of quad bayer filter
1458 uint32_t row_mod = y % 4;
1459 // Column within larger set of quad bayer filter
1460 uint32_t col_mod = x % 4;
1461
1462 // Row is within the left quadrants of a quad bayer sensor
1463 if (row_mod < 2) {
1464 if (col_mod < 2) {
1465 return EmulatedScene::ColorChannels::R;
1466 }
1467 return EmulatedScene::ColorChannels::Gr;
1468 } else {
1469 if (col_mod < 2) {
1470 return EmulatedScene::ColorChannels::Gb;
1471 }
1472 return EmulatedScene::ColorChannels::B;
1473 }
1474 }
1475
RemosaicQuadBayerBlock(uint16_t * img_in,uint16_t * img_out,int xstart,int ystart,int row_stride_in_bytes)1476 void EmulatedSensor::RemosaicQuadBayerBlock(uint16_t* img_in, uint16_t* img_out,
1477 int xstart, int ystart,
1478 int row_stride_in_bytes) {
1479 uint32_t quad_block_copy_idx_map[16] = {0, 2, 1, 3, 8, 10, 6, 11,
1480 4, 9, 5, 7, 12, 14, 13, 15};
1481 uint16_t quad_block_copy[16];
1482 uint32_t i = 0;
1483 for (uint32_t row = 0; row < 4; row++) {
1484 uint16_t* quad_bayer_row =
1485 img_in + (ystart + row) * (row_stride_in_bytes / 2) + xstart;
1486 for (uint32_t j = 0; j < 4; j++, i++) {
1487 quad_block_copy[i] = quad_bayer_row[j];
1488 }
1489 }
1490
1491 for (uint32_t row = 0; row < 4; row++) {
1492 uint16_t* regular_bayer_row =
1493 img_out + (ystart + row) * (row_stride_in_bytes / 2) + xstart;
1494 for (uint32_t j = 0; j < 4; j++, i++) {
1495 uint32_t idx = quad_block_copy_idx_map[row + 4 * j];
1496 regular_bayer_row[j] = quad_block_copy[idx];
1497 }
1498 }
1499 }
1500
RemosaicRAW16Image(uint16_t * img_in,uint16_t * img_out,size_t row_stride_in_bytes,const SensorCharacteristics & chars)1501 status_t EmulatedSensor::RemosaicRAW16Image(uint16_t* img_in, uint16_t* img_out,
1502 size_t row_stride_in_bytes,
1503 const SensorCharacteristics& chars) {
1504 if (chars.full_res_width % 2 != 0 || chars.full_res_height % 2 != 0) {
1505 ALOGE(
1506 "%s RAW16 Image with quad CFA, height %zu and width %zu, not multiples "
1507 "of 4",
1508 __FUNCTION__, chars.full_res_height, chars.full_res_width);
1509 return BAD_VALUE;
1510 }
1511 for (uint32_t i = 0; i < chars.full_res_width; i += 4) {
1512 for (uint32_t j = 0; j < chars.full_res_height; j += 4) {
1513 RemosaicQuadBayerBlock(img_in, img_out, i, j, row_stride_in_bytes);
1514 }
1515 }
1516 return OK;
1517 }
1518
CaptureRawBinned(uint8_t * img,size_t row_stride_in_bytes,uint32_t gain,const SensorCharacteristics & chars)1519 void EmulatedSensor::CaptureRawBinned(uint8_t* img, size_t row_stride_in_bytes,
1520 uint32_t gain,
1521 const SensorCharacteristics& chars) {
1522 CaptureRaw(img, row_stride_in_bytes, gain, chars, /*in_sensor_zoom*/ false,
1523 /*binned*/ true);
1524 return;
1525 }
1526
CaptureRawInSensorZoom(uint8_t * img,size_t row_stride_in_bytes,uint32_t gain,const SensorCharacteristics & chars)1527 void EmulatedSensor::CaptureRawInSensorZoom(uint8_t* img,
1528 size_t row_stride_in_bytes,
1529 uint32_t gain,
1530 const SensorCharacteristics& chars) {
1531 CaptureRaw(img, row_stride_in_bytes, gain, chars, /*in_sensor_zoom*/ true,
1532 /*binned*/ false);
1533 return;
1534 }
1535
CaptureRawFullRes(uint8_t * img,size_t row_stride_in_bytes,uint32_t gain,const SensorCharacteristics & chars)1536 void EmulatedSensor::CaptureRawFullRes(uint8_t* img, size_t row_stride_in_bytes,
1537 uint32_t gain,
1538 const SensorCharacteristics& chars) {
1539 CaptureRaw(img, row_stride_in_bytes, gain, chars, /*inSensorZoom*/ false,
1540 /*binned*/ false);
1541 return;
1542 }
1543
CaptureRaw(uint8_t * img,size_t row_stride_in_bytes,uint32_t gain,const SensorCharacteristics & chars,bool in_sensor_zoom,bool binned)1544 void EmulatedSensor::CaptureRaw(uint8_t* img, size_t row_stride_in_bytes,
1545 uint32_t gain,
1546 const SensorCharacteristics& chars,
1547 bool in_sensor_zoom, bool binned) {
1548 ATRACE_CALL();
1549 if (in_sensor_zoom && binned) {
1550 ALOGE("%s: Can't perform in-sensor zoom in binned mode", __FUNCTION__);
1551 return;
1552 }
1553 float total_gain = gain / 100.0 * GetBaseGainFactor(chars.max_raw_value);
1554 float noise_var_gain = total_gain * total_gain;
1555 float read_noise_var =
1556 kReadNoiseVarBeforeGain * noise_var_gain + kReadNoiseVarAfterGain;
1557
1558 scene_->SetReadoutPixel(0, 0);
1559 // RGGB
1560 int bayer_select[4] = {EmulatedScene::R, EmulatedScene::Gr, EmulatedScene::Gb,
1561 EmulatedScene::B};
1562 const float raw_zoom_ratio = in_sensor_zoom ? 2.0f : 1.0f;
1563 unsigned int image_width =
1564 in_sensor_zoom || binned ? chars.width : chars.full_res_width;
1565 unsigned int image_height =
1566 in_sensor_zoom || binned ? chars.height : chars.full_res_height;
1567 const float norm_left_top = 0.5f - 0.5f / raw_zoom_ratio;
1568 for (unsigned int out_y = 0; out_y < image_height; out_y++) {
1569 int* bayer_row = bayer_select + (out_y & 0x1) * 2;
1570 uint16_t* px = (uint16_t*)img + out_y * (row_stride_in_bytes / 2);
1571
1572 float norm_y = out_y / (image_height * raw_zoom_ratio);
1573 int y = static_cast<int>(chars.full_res_height * (norm_left_top + norm_y));
1574 y = std::min(std::max(y, 0), (int)chars.full_res_height - 1);
1575
1576 for (unsigned int out_x = 0; out_x < image_width; out_x++) {
1577 int color_idx = chars.quad_bayer_sensor && !(in_sensor_zoom || binned)
1578 ? GetQuadBayerColor(out_x, out_y)
1579 : bayer_row[out_x & 0x1];
1580 float norm_x = out_x / (image_width * raw_zoom_ratio);
1581 int x = static_cast<int>(chars.full_res_width * (norm_left_top + norm_x));
1582 x = std::min(std::max(x, 0), (int)chars.full_res_width - 1);
1583
1584 uint32_t electron_count;
1585 scene_->SetReadoutPixel(x, y);
1586 electron_count = scene_->GetPixelElectrons()[color_idx];
1587
1588 // TODO: Better pixel saturation curve?
1589 electron_count = (electron_count < kSaturationElectrons)
1590 ? electron_count
1591 : kSaturationElectrons;
1592
1593 // TODO: Better A/D saturation curve?
1594 uint16_t raw_count = electron_count * total_gain;
1595 raw_count =
1596 (raw_count < chars.max_raw_value) ? raw_count : chars.max_raw_value;
1597
1598 // Calculate noise value
1599 // TODO: Use more-correct Gaussian instead of uniform noise
1600 float photon_noise_var = electron_count * noise_var_gain;
1601 float noise_stddev = sqrtf_approx(read_noise_var + photon_noise_var);
1602 // Scaled to roughly match gaussian/uniform noise stddev
1603 float noise_sample = rand_r(&rand_seed_) * (2.5 / (1.0 + RAND_MAX)) - 1.25;
1604
1605 raw_count += chars.black_level_pattern[color_idx];
1606 raw_count += noise_stddev * noise_sample;
1607
1608 *px++ = raw_count;
1609 }
1610 // TODO: Handle this better
1611 // simulatedTime += mRowReadoutTime;
1612 }
1613 ALOGVV("Raw sensor image captured");
1614 }
1615
CaptureRGB(uint8_t * img,uint32_t width,uint32_t height,uint32_t stride,RGBLayout layout,uint32_t gain,int32_t color_space,const SensorCharacteristics & chars)1616 void EmulatedSensor::CaptureRGB(uint8_t* img, uint32_t width, uint32_t height,
1617 uint32_t stride, RGBLayout layout,
1618 uint32_t gain, int32_t color_space,
1619 const SensorCharacteristics& chars) {
1620 ATRACE_CALL();
1621 float total_gain = gain / 100.0 * GetBaseGainFactor(chars.max_raw_value);
1622 // In fixed-point math, calculate total scaling from electrons to 8bpp
1623 int scale64x = 64 * total_gain * 255 / chars.max_raw_value;
1624 uint32_t inc_h = ceil((float)chars.full_res_width / width);
1625 uint32_t inc_v = ceil((float)chars.full_res_height / height);
1626
1627 for (unsigned int y = 0, outy = 0; y < chars.full_res_height;
1628 y += inc_v, outy++) {
1629 scene_->SetReadoutPixel(0, y);
1630 uint8_t* px = img + outy * stride;
1631 for (unsigned int x = 0; x < chars.full_res_width; x += inc_h) {
1632 uint32_t r_count, g_count, b_count;
1633 // TODO: Perfect demosaicing is a cheat
1634 const uint32_t* pixel = scene_->GetPixelElectrons();
1635 r_count = pixel[EmulatedScene::R] * scale64x;
1636 g_count = pixel[EmulatedScene::Gr] * scale64x;
1637 b_count = pixel[EmulatedScene::B] * scale64x;
1638
1639 if (color_space !=
1640 ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED) {
1641 RgbToRgb(&r_count, &g_count, &b_count);
1642 }
1643
1644 uint8_t r = r_count < 255 * 64 ? r_count / 64 : 255;
1645 uint8_t g = g_count < 255 * 64 ? g_count / 64 : 255;
1646 uint8_t b = b_count < 255 * 64 ? b_count / 64 : 255;
1647 switch (layout) {
1648 case RGB:
1649 *px++ = r;
1650 *px++ = g;
1651 *px++ = b;
1652 break;
1653 case RGBA:
1654 *px++ = r;
1655 *px++ = g;
1656 *px++ = b;
1657 *px++ = 255;
1658 break;
1659 case ARGB:
1660 *px++ = 255;
1661 *px++ = r;
1662 *px++ = g;
1663 *px++ = b;
1664 break;
1665 default:
1666 ALOGE("%s: RGB layout: %d not supported", __FUNCTION__, layout);
1667 return;
1668 }
1669 for (unsigned int j = 1; j < inc_h; j++) scene_->GetPixelElectrons();
1670 }
1671 }
1672 ALOGVV("RGB sensor image captured");
1673 }
1674
CaptureYUV420(YCbCrPlanes yuv_layout,uint32_t width,uint32_t height,uint32_t gain,float zoom_ratio,bool rotate,int32_t color_space,const SensorCharacteristics & chars)1675 void EmulatedSensor::CaptureYUV420(YCbCrPlanes yuv_layout, uint32_t width,
1676 uint32_t height, uint32_t gain,
1677 float zoom_ratio, bool rotate,
1678 int32_t color_space,
1679 const SensorCharacteristics& chars) {
1680 ATRACE_CALL();
1681 float total_gain = gain / 100.0 * GetBaseGainFactor(chars.max_raw_value);
1682 // Using fixed-point math with 6 bits of fractional precision.
1683 // In fixed-point math, calculate total scaling from electrons to 8bpp
1684 const int scale64x =
1685 kFixedBitPrecision * total_gain * 255 / chars.max_raw_value;
1686 // Fixed-point coefficients for RGB-YUV transform
1687 // Based on JFIF RGB->YUV transform.
1688 // Cb/Cr offset scaled by 64x twice since they're applied post-multiply
1689 const int rgb_to_y[] = {19, 37, 7};
1690 const int rgb_to_cb[] = {-10, -21, 32, 524288};
1691 const int rgb_to_cr[] = {32, -26, -5, 524288};
1692 // Scale back to 8bpp non-fixed-point
1693 const int scale_out = 64;
1694 const int scale_out_sq = scale_out * scale_out; // after multiplies
1695
1696 // inc = how many pixels to skip while reading every next pixel
1697 const float aspect_ratio = static_cast<float>(width) / height;
1698
1699 // precalculate normalized coordinates and dimensions
1700 const float norm_left_top = 0.5f - 0.5f / zoom_ratio;
1701 const float norm_rot_top = norm_left_top;
1702 const float norm_width = 1 / zoom_ratio;
1703 const float norm_rot_width = norm_width / aspect_ratio;
1704 const float norm_rot_height = norm_width;
1705 const float norm_rot_left =
1706 norm_left_top + (norm_width + norm_rot_width) * 0.5f;
1707
1708 for (unsigned int out_y = 0; out_y < height; out_y++) {
1709 uint8_t* px_y = yuv_layout.img_y + out_y * yuv_layout.y_stride;
1710 uint8_t* px_cb = yuv_layout.img_cb + (out_y / 2) * yuv_layout.cbcr_stride;
1711 uint8_t* px_cr = yuv_layout.img_cr + (out_y / 2) * yuv_layout.cbcr_stride;
1712
1713 for (unsigned int out_x = 0; out_x < width; out_x++) {
1714 int x, y;
1715 float norm_x = out_x / (width * zoom_ratio);
1716 float norm_y = out_y / (height * zoom_ratio);
1717 if (rotate) {
1718 x = static_cast<int>(chars.full_res_width *
1719 (norm_rot_left - norm_y * norm_rot_width));
1720 y = static_cast<int>(chars.full_res_height *
1721 (norm_rot_top + norm_x * norm_rot_height));
1722 } else {
1723 x = static_cast<int>(chars.full_res_width * (norm_left_top + norm_x));
1724 y = static_cast<int>(chars.full_res_height * (norm_left_top + norm_y));
1725 }
1726 x = std::min(std::max(x, 0), (int)chars.full_res_width - 1);
1727 y = std::min(std::max(y, 0), (int)chars.full_res_height - 1);
1728 scene_->SetReadoutPixel(x, y);
1729
1730 uint32_t r_count, g_count, b_count;
1731 // TODO: Perfect demosaicing is a cheat
1732 const uint32_t* pixel = rotate ? scene_->GetPixelElectronsColumn()
1733 : scene_->GetPixelElectrons();
1734 r_count = pixel[EmulatedScene::R] * scale64x;
1735 g_count = pixel[EmulatedScene::Gr] * scale64x;
1736 b_count = pixel[EmulatedScene::B] * scale64x;
1737
1738 if (color_space !=
1739 ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED) {
1740 RgbToRgb(&r_count, &g_count, &b_count);
1741 }
1742
1743 r_count = r_count < kSaturationPoint ? r_count : kSaturationPoint;
1744 g_count = g_count < kSaturationPoint ? g_count : kSaturationPoint;
1745 b_count = b_count < kSaturationPoint ? b_count : kSaturationPoint;
1746
1747 // Gamma correction
1748 r_count = GammaTable(r_count, color_space);
1749 g_count = GammaTable(g_count, color_space);
1750 b_count = GammaTable(b_count, color_space);
1751
1752 uint8_t y8 = (rgb_to_y[0] * r_count + rgb_to_y[1] * g_count +
1753 rgb_to_y[2] * b_count) /
1754 scale_out_sq;
1755 if (yuv_layout.bytesPerPixel == 1) {
1756 *px_y = y8;
1757 } else if (yuv_layout.bytesPerPixel == 2) {
1758 *(reinterpret_cast<uint16_t*>(px_y)) = htole16(y8 << 8);
1759 } else {
1760 ALOGE("%s: Unsupported bytes per pixel value: %zu", __func__,
1761 yuv_layout.bytesPerPixel);
1762 return;
1763 }
1764 px_y += yuv_layout.bytesPerPixel;
1765
1766 if (out_y % 2 == 0 && out_x % 2 == 0) {
1767 uint8_t cb8 = (rgb_to_cb[0] * r_count + rgb_to_cb[1] * g_count +
1768 rgb_to_cb[2] * b_count + rgb_to_cb[3]) /
1769 scale_out_sq;
1770 uint8_t cr8 = (rgb_to_cr[0] * r_count + rgb_to_cr[1] * g_count +
1771 rgb_to_cr[2] * b_count + rgb_to_cr[3]) /
1772 scale_out_sq;
1773 if (yuv_layout.bytesPerPixel == 1) {
1774 *px_cb = cb8;
1775 *px_cr = cr8;
1776 } else if (yuv_layout.bytesPerPixel == 2) {
1777 *(reinterpret_cast<uint16_t*>(px_cb)) = htole16(cb8 << 8);
1778 *(reinterpret_cast<uint16_t*>(px_cr)) = htole16(cr8 << 8);
1779 } else {
1780 ALOGE("%s: Unsupported bytes per pixel value: %zu", __func__,
1781 yuv_layout.bytesPerPixel);
1782 return;
1783 }
1784 px_cr += yuv_layout.cbcr_step;
1785 px_cb += yuv_layout.cbcr_step;
1786 }
1787 }
1788 }
1789 ALOGVV("YUV420 sensor image captured");
1790 }
1791
CaptureDepth(uint8_t * img,uint32_t gain,uint32_t width,uint32_t height,uint32_t stride,const SensorCharacteristics & chars)1792 void EmulatedSensor::CaptureDepth(uint8_t* img, uint32_t gain, uint32_t width,
1793 uint32_t height, uint32_t stride,
1794 const SensorCharacteristics& chars) {
1795 ATRACE_CALL();
1796 float total_gain = gain / 100.0 * GetBaseGainFactor(chars.max_raw_value);
1797 // In fixed-point math, calculate scaling factor to 13bpp millimeters
1798 int scale64x = 64 * total_gain * 8191 / chars.max_raw_value;
1799 uint32_t inc_h = ceil((float)chars.full_res_width / width);
1800 uint32_t inc_v = ceil((float)chars.full_res_height / height);
1801
1802 for (unsigned int y = 0, out_y = 0; y < chars.full_res_height;
1803 y += inc_v, out_y++) {
1804 scene_->SetReadoutPixel(0, y);
1805 uint16_t* px = (uint16_t*)(img + (out_y * stride));
1806 for (unsigned int x = 0; x < chars.full_res_width; x += inc_h) {
1807 uint32_t depth_count;
1808 // TODO: Make up real depth scene instead of using green channel
1809 // as depth
1810 const uint32_t* pixel = scene_->GetPixelElectrons();
1811 depth_count = pixel[EmulatedScene::Gr] * scale64x;
1812
1813 *px++ = depth_count < 8191 * 64 ? depth_count / 64 : 0;
1814 for (unsigned int j = 1; j < inc_h; j++) scene_->GetPixelElectrons();
1815 }
1816 // TODO: Handle this better
1817 // simulatedTime += mRowReadoutTime;
1818 }
1819 ALOGVV("Depth sensor image captured");
1820 }
1821
ProcessYUV420(const YUV420Frame & input,const YUV420Frame & output,uint32_t gain,ProcessType process_type,float zoom_ratio,bool rotate_and_crop,int32_t color_space,const SensorCharacteristics & chars)1822 status_t EmulatedSensor::ProcessYUV420(const YUV420Frame& input,
1823 const YUV420Frame& output, uint32_t gain,
1824 ProcessType process_type,
1825 float zoom_ratio, bool rotate_and_crop,
1826 int32_t color_space,
1827 const SensorCharacteristics& chars) {
1828 ATRACE_CALL();
1829 size_t input_width, input_height;
1830 YCbCrPlanes input_planes, output_planes;
1831 std::vector<uint8_t> temp_yuv, temp_output_uv, temp_input_uv;
1832
1833 // Overwrite HIGH_QUALITY to REGULAR for Emulator if property
1834 // ro.boot.qemu.camera_hq_edge_processing is false;
1835 if (process_type == HIGH_QUALITY &&
1836 !property_get_bool("ro.boot.qemu.camera_hq_edge_processing", true)) {
1837 process_type = REGULAR;
1838 }
1839
1840 size_t bytes_per_pixel = output.planes.bytesPerPixel;
1841 switch (process_type) {
1842 case HIGH_QUALITY:
1843 CaptureYUV420(output.planes, output.width, output.height, gain,
1844 zoom_ratio, rotate_and_crop, color_space, chars);
1845 return OK;
1846 case REPROCESS:
1847 input_width = input.width;
1848 input_height = input.height;
1849 input_planes = input.planes;
1850
1851 // libyuv only supports planar YUV420 during scaling.
1852 // Split the input U/V plane in separate planes if needed.
1853 if (input_planes.cbcr_step == 2) {
1854 temp_input_uv.resize(input_width * input_height / 2);
1855 auto temp_uv_buffer = temp_input_uv.data();
1856 input_planes.img_cb = temp_uv_buffer;
1857 input_planes.img_cr = temp_uv_buffer + (input_width * input_height) / 4;
1858 input_planes.cbcr_stride = input_width / 2;
1859 if (input.planes.img_cb < input.planes.img_cr) {
1860 libyuv::SplitUVPlane(input.planes.img_cb, input.planes.cbcr_stride,
1861 input_planes.img_cb, input_planes.cbcr_stride,
1862 input_planes.img_cr, input_planes.cbcr_stride,
1863 input_width / 2, input_height / 2);
1864 } else {
1865 libyuv::SplitUVPlane(input.planes.img_cr, input.planes.cbcr_stride,
1866 input_planes.img_cr, input_planes.cbcr_stride,
1867 input_planes.img_cb, input_planes.cbcr_stride,
1868 input_width / 2, input_height / 2);
1869 }
1870 }
1871 break;
1872 case REGULAR:
1873 default:
1874 // Generate the smallest possible frame with the expected AR and
1875 // then scale using libyuv.
1876 float aspect_ratio = static_cast<float>(output.width) / output.height;
1877 zoom_ratio = std::max(1.f, zoom_ratio);
1878 input_width = EmulatedScene::kSceneWidth * aspect_ratio;
1879 input_height = EmulatedScene::kSceneHeight;
1880 temp_yuv.reserve((input_width * input_height * 3 * bytes_per_pixel) / 2);
1881 auto temp_yuv_buffer = temp_yuv.data();
1882 input_planes = {
1883 .img_y = temp_yuv_buffer,
1884 .img_cb =
1885 temp_yuv_buffer + input_width * input_height * bytes_per_pixel,
1886 .img_cr = temp_yuv_buffer +
1887 (input_width * input_height * bytes_per_pixel * 5) / 4,
1888 .y_stride = static_cast<uint32_t>(input_width * bytes_per_pixel),
1889 .cbcr_stride =
1890 static_cast<uint32_t>(input_width * bytes_per_pixel) / 2,
1891 .cbcr_step = 1,
1892 .bytesPerPixel = bytes_per_pixel};
1893 CaptureYUV420(input_planes, input_width, input_height, gain, zoom_ratio,
1894 rotate_and_crop, color_space, chars);
1895 }
1896
1897 output_planes = output.planes;
1898 // libyuv only supports planar YUV420 during scaling.
1899 // Treat the output UV space as planar first and then
1900 // interleave in the second step.
1901 if (output_planes.cbcr_step == 2) {
1902 temp_output_uv.resize(output.width * output.height * bytes_per_pixel / 2);
1903 auto temp_uv_buffer = temp_output_uv.data();
1904 output_planes.img_cb = temp_uv_buffer;
1905 output_planes.img_cr =
1906 temp_uv_buffer + output.width * output.height * bytes_per_pixel / 4;
1907 output_planes.cbcr_stride = output.width * bytes_per_pixel / 2;
1908 }
1909
1910 // NOTE: libyuv takes strides in pixels, not bytes.
1911 int ret = 0;
1912 if (bytes_per_pixel == 2) {
1913 ret = I420Scale_16((const uint16_t*)input_planes.img_y,
1914 input_planes.y_stride / bytes_per_pixel,
1915 (const uint16_t*)input_planes.img_cb,
1916 input_planes.cbcr_stride / bytes_per_pixel,
1917 (const uint16_t*)input_planes.img_cr,
1918 input_planes.cbcr_stride / bytes_per_pixel, input_width,
1919 input_height, (uint16_t*)output_planes.img_y,
1920 output_planes.y_stride / bytes_per_pixel,
1921 (uint16_t*)output_planes.img_cb,
1922 output_planes.cbcr_stride / bytes_per_pixel,
1923 (uint16_t*)output_planes.img_cr,
1924 output_planes.cbcr_stride / bytes_per_pixel,
1925 output.width, output.height, libyuv::kFilterNone);
1926 } else {
1927 ret = I420Scale(input_planes.img_y, input_planes.y_stride,
1928 input_planes.img_cb, input_planes.cbcr_stride,
1929 input_planes.img_cr, input_planes.cbcr_stride, input_width,
1930 input_height, output_planes.img_y, output_planes.y_stride,
1931 output_planes.img_cb, output_planes.cbcr_stride,
1932 output_planes.img_cr, output_planes.cbcr_stride,
1933 output.width, output.height, libyuv::kFilterNone);
1934 }
1935 if (ret != 0) {
1936 ALOGE("%s: Failed during YUV scaling: %d", __FUNCTION__, ret);
1937 return ret;
1938 }
1939
1940 // Merge U/V Planes for the interleaved case
1941 if (output_planes.cbcr_step == 2) {
1942 if (output.planes.img_cb < output.planes.img_cr) {
1943 if (bytes_per_pixel == 2) {
1944 libyuv::MergeUVPlane_16((const uint16_t*)output_planes.img_cb,
1945 output_planes.cbcr_stride / bytes_per_pixel,
1946 (const uint16_t*)output_planes.img_cr,
1947 output_planes.cbcr_stride / bytes_per_pixel,
1948 (uint16_t*)output.planes.img_cb,
1949 output.planes.cbcr_stride / bytes_per_pixel,
1950 output.width / 2, output.height / 2,
1951 /*depth*/ 16);
1952 } else {
1953 libyuv::MergeUVPlane(output_planes.img_cb, output_planes.cbcr_stride,
1954 output_planes.img_cr, output_planes.cbcr_stride,
1955 output.planes.img_cb, output.planes.cbcr_stride,
1956 output.width / 2, output.height / 2);
1957 }
1958 } else {
1959 if (bytes_per_pixel == 2) {
1960 libyuv::MergeUVPlane_16((const uint16_t*)output_planes.img_cr,
1961 output_planes.cbcr_stride / bytes_per_pixel,
1962 (const uint16_t*)output_planes.img_cb,
1963 output_planes.cbcr_stride / bytes_per_pixel,
1964 (uint16_t*)output.planes.img_cr,
1965 output.planes.cbcr_stride / bytes_per_pixel,
1966 output.width / 2, output.height / 2,
1967 /*depth*/ 16);
1968 } else {
1969 libyuv::MergeUVPlane(output_planes.img_cr, output_planes.cbcr_stride,
1970 output_planes.img_cb, output_planes.cbcr_stride,
1971 output.planes.img_cr, output.planes.cbcr_stride,
1972 output.width / 2, output.height / 2);
1973 }
1974 }
1975 }
1976
1977 return ret;
1978 }
1979
ApplysRGBGamma(int32_t value,int32_t saturation)1980 int32_t EmulatedSensor::ApplysRGBGamma(int32_t value, int32_t saturation) {
1981 float n_value = (static_cast<float>(value) / saturation);
1982 n_value = (n_value <= 0.0031308f)
1983 ? n_value * 12.92f
1984 : 1.055f * pow(n_value, 0.4166667f) - 0.055f;
1985 return n_value * saturation;
1986 }
1987
ApplySMPTE170MGamma(int32_t value,int32_t saturation)1988 int32_t EmulatedSensor::ApplySMPTE170MGamma(int32_t value, int32_t saturation) {
1989 float n_value = (static_cast<float>(value) / saturation);
1990 n_value = (n_value <= 0.018f) ? n_value * 4.5f
1991 : 1.099f * pow(n_value, 0.45f) - 0.099f;
1992 return n_value * saturation;
1993 }
1994
ApplyST2084Gamma(int32_t value,int32_t saturation)1995 int32_t EmulatedSensor::ApplyST2084Gamma(int32_t value, int32_t saturation) {
1996 float n_value = (static_cast<float>(value) / saturation);
1997 float c2 = 32.f * 2413.f / 4096.f;
1998 float c3 = 32.f * 2392.f / 4096.f;
1999 float c1 = c3 - c2 + 1.f;
2000 float m = 128.f * 2523.f / 4096.f;
2001 float n = 0.25f * 2610.f / 4096.f;
2002 n_value = pow((c1 + c2 * pow(n_value, n)) / (1 + c3 * pow(n_value, n)), m);
2003 return n_value * saturation;
2004 }
2005
ApplyHLGGamma(int32_t value,int32_t saturation)2006 int32_t EmulatedSensor::ApplyHLGGamma(int32_t value, int32_t saturation) {
2007 float n_value = (static_cast<float>(value) / saturation);
2008 // The full HLG gamma curve has additional parameters for n_value > 1, but n_value
2009 // in the emulated camera is always <= 1 due to lack of HDR display features.
2010 n_value = 0.5f * pow(n_value, 0.5f);
2011 return n_value * saturation;
2012 }
2013
GammaTable(int32_t value,int32_t color_space)2014 int32_t EmulatedSensor::GammaTable(int32_t value, int32_t color_space) {
2015 switch (color_space) {
2016 case ColorSpaceNamed::BT709:
2017 return gamma_table_smpte170m_[value];
2018 case ColorSpaceNamed::BT2020:
2019 return gamma_table_hlg_[value]; // Assume HLG
2020 case ColorSpaceNamed::DISPLAY_P3:
2021 case ColorSpaceNamed::SRGB:
2022 default:
2023 return gamma_table_sRGB_[value];
2024 }
2025
2026 return 0;
2027 }
2028
RgbToRgb(uint32_t * r_count,uint32_t * g_count,uint32_t * b_count)2029 void EmulatedSensor::RgbToRgb(uint32_t* r_count, uint32_t* g_count,
2030 uint32_t* b_count) {
2031 uint32_t r = *r_count;
2032 uint32_t g = *g_count;
2033 uint32_t b = *b_count;
2034 *r_count = (uint32_t)std::max(
2035 r * rgb_rgb_matrix_.rR + g * rgb_rgb_matrix_.gR + b * rgb_rgb_matrix_.bR,
2036 0.0f);
2037 *g_count = (uint32_t)std::max(
2038 r * rgb_rgb_matrix_.rG + g * rgb_rgb_matrix_.gG + b * rgb_rgb_matrix_.bG,
2039 0.0f);
2040 *b_count = (uint32_t)std::max(
2041 r * rgb_rgb_matrix_.rB + g * rgb_rgb_matrix_.gB + b * rgb_rgb_matrix_.bB,
2042 0.0f);
2043 }
2044
CalculateRgbRgbMatrix(int32_t color_space,const SensorCharacteristics & chars)2045 void EmulatedSensor::CalculateRgbRgbMatrix(int32_t color_space,
2046 const SensorCharacteristics& chars) {
2047 const XyzMatrix* xyzMatrix;
2048 switch (color_space) {
2049 case ColorSpaceNamed::DISPLAY_P3:
2050 xyzMatrix = &kDisplayP3Matrix;
2051 break;
2052 case ColorSpaceNamed::BT709:
2053 xyzMatrix = &kBt709Matrix;
2054 break;
2055 case ColorSpaceNamed::BT2020:
2056 xyzMatrix = &kBt2020Matrix;
2057 break;
2058 case ColorSpaceNamed::SRGB:
2059 default:
2060 xyzMatrix = &kSrgbXyzMatrix;
2061 break;
2062 }
2063
2064 rgb_rgb_matrix_.rR = xyzMatrix->xR * chars.forward_matrix.rX +
2065 xyzMatrix->yR * chars.forward_matrix.rY +
2066 xyzMatrix->zR * chars.forward_matrix.rZ;
2067 rgb_rgb_matrix_.gR = xyzMatrix->xR * chars.forward_matrix.gX +
2068 xyzMatrix->yR * chars.forward_matrix.gY +
2069 xyzMatrix->zR * chars.forward_matrix.gZ;
2070 rgb_rgb_matrix_.bR = xyzMatrix->xR * chars.forward_matrix.bX +
2071 xyzMatrix->yR * chars.forward_matrix.bY +
2072 xyzMatrix->zR * chars.forward_matrix.bZ;
2073 rgb_rgb_matrix_.rG = xyzMatrix->xG * chars.forward_matrix.rX +
2074 xyzMatrix->yG * chars.forward_matrix.rY +
2075 xyzMatrix->zG * chars.forward_matrix.rZ;
2076 rgb_rgb_matrix_.gG = xyzMatrix->xG * chars.forward_matrix.gX +
2077 xyzMatrix->yG * chars.forward_matrix.gY +
2078 xyzMatrix->zG * chars.forward_matrix.gZ;
2079 rgb_rgb_matrix_.bG = xyzMatrix->xG * chars.forward_matrix.bX +
2080 xyzMatrix->yG * chars.forward_matrix.bY +
2081 xyzMatrix->zG * chars.forward_matrix.bZ;
2082 rgb_rgb_matrix_.rB = xyzMatrix->xB * chars.forward_matrix.rX +
2083 xyzMatrix->yB * chars.forward_matrix.rY +
2084 xyzMatrix->zB * chars.forward_matrix.rZ;
2085 rgb_rgb_matrix_.gB = xyzMatrix->xB * chars.forward_matrix.gX +
2086 xyzMatrix->yB * chars.forward_matrix.gY +
2087 xyzMatrix->zB * chars.forward_matrix.gZ;
2088 rgb_rgb_matrix_.bB = xyzMatrix->xB * chars.forward_matrix.bX +
2089 xyzMatrix->yB * chars.forward_matrix.bY +
2090 xyzMatrix->zB * chars.forward_matrix.bZ;
2091 }
2092
2093 } // namespace android
2094