1 /*
2 * Copyright (C) 2019 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17 #define LOG_TAG "EmulatedRequestProcessor"
18 #define ATRACE_TAG ATRACE_TAG_CAMERA
19
20 #include "EmulatedRequestProcessor.h"
21
22 #include <HandleImporter.h>
23 #include <hardware/gralloc.h>
24 #include <log/log.h>
25 #include <sync/sync.h>
26 #include <utils/Timers.h>
27 #include <utils/Trace.h>
28
29 #include <memory>
30
31 #include "GrallocSensorBuffer.h"
32
33 namespace android {
34
35 using ::android::frameworks::sensorservice::V1_0::ISensorManager;
36 using ::android::frameworks::sensorservice::V1_0::Result;
37 using android::hardware::camera::common::V1_0::helper::HandleImporter;
38 using ::android::hardware::sensors::V1_0::SensorInfo;
39 using ::android::hardware::sensors::V1_0::SensorType;
40 using google_camera_hal::ErrorCode;
41 using google_camera_hal::HwlPipelineResult;
42 using google_camera_hal::MessageType;
43 using google_camera_hal::NotifyMessage;
44
EmulatedRequestProcessor(uint32_t camera_id,sp<EmulatedSensor> sensor,const HwlSessionCallback & session_callback)45 EmulatedRequestProcessor::EmulatedRequestProcessor(
46 uint32_t camera_id, sp<EmulatedSensor> sensor,
47 const HwlSessionCallback& session_callback)
48 : camera_id_(camera_id),
49 sensor_(sensor),
50 session_callback_(session_callback),
51 request_state_(std::make_unique<EmulatedLogicalRequestState>(camera_id)) {
52 ATRACE_CALL();
53 request_thread_ = std::thread([this] { this->RequestProcessorLoop(); });
54 importer_ = std::make_shared<HandleImporter>();
55 }
56
~EmulatedRequestProcessor()57 EmulatedRequestProcessor::~EmulatedRequestProcessor() {
58 ATRACE_CALL();
59 processor_done_ = true;
60 request_thread_.join();
61
62 auto ret = sensor_->ShutDown();
63 if (ret != OK) {
64 ALOGE("%s: Failed during sensor shutdown %s (%d)", __FUNCTION__,
65 strerror(-ret), ret);
66 }
67
68 if (sensor_event_queue_.get() != nullptr) {
69 sensor_event_queue_->disableSensor(sensor_handle_);
70 sensor_event_queue_.clear();
71 sensor_event_queue_ = nullptr;
72 }
73 }
74
ProcessPipelineRequests(uint32_t frame_number,std::vector<HwlPipelineRequest> & requests,const std::vector<EmulatedPipeline> & pipelines,const DynamicStreamIdMapType & dynamic_stream_id_map,bool use_default_physical_camera)75 status_t EmulatedRequestProcessor::ProcessPipelineRequests(
76 uint32_t frame_number, std::vector<HwlPipelineRequest>& requests,
77 const std::vector<EmulatedPipeline>& pipelines,
78 const DynamicStreamIdMapType& dynamic_stream_id_map,
79 bool use_default_physical_camera) {
80 ATRACE_CALL();
81 status_t res = OK;
82
83 std::unique_lock<std::mutex> lock(process_mutex_);
84
85 for (auto& request : requests) {
86 if (request.pipeline_id >= pipelines.size()) {
87 ALOGE("%s: Pipeline request with invalid pipeline id: %u", __FUNCTION__,
88 request.pipeline_id);
89 return BAD_VALUE;
90 }
91
92 while (pending_requests_.size() > EmulatedSensor::kPipelineDepth) {
93 auto result = request_condition_.wait_for(
94 lock, std::chrono::nanoseconds(
95 EmulatedSensor::kSupportedFrameDurationRange[1]));
96 if (result == std::cv_status::timeout) {
97 ALOGE("%s: Timed out waiting for a pending request slot", __FUNCTION__);
98 return TIMED_OUT;
99 }
100 }
101
102 res = request_state_->UpdateRequestForDynamicStreams(
103 &request, pipelines, dynamic_stream_id_map, use_default_physical_camera);
104 if (res != OK) {
105 ALOGE("%s: Failed to update request for dynamic streams: %s(%d)",
106 __FUNCTION__, strerror(-res), res);
107 return res;
108 }
109
110 auto output_buffers = CreateSensorBuffers(
111 frame_number, request.output_buffers,
112 pipelines[request.pipeline_id].streams, request.pipeline_id,
113 pipelines[request.pipeline_id].cb, /*override_width*/ 0,
114 /*override_height*/ 0);
115 if (output_buffers == nullptr) {
116 return NO_MEMORY;
117 }
118
119 auto input_buffers = CreateSensorBuffers(
120 frame_number, request.input_buffers,
121 pipelines[request.pipeline_id].streams, request.pipeline_id,
122 pipelines[request.pipeline_id].cb, request.input_width,
123 request.input_height);
124
125 // Check if there are any settings that need to be overridden.
126 camera_metadata_ro_entry_t entry;
127 if (request.settings.get() != nullptr) {
128 auto ret = request.settings.get()->Get(ANDROID_CONTROL_SETTINGS_OVERRIDE,
129 &entry);
130 if ((ret == OK) && (entry.count == 1)) {
131 std::unique_ptr<HalCameraMetadata> override_setting =
132 HalCameraMetadata::Clone(request.settings.get());
133 override_settings_.push({.settings = std::move(override_setting),
134 .frame_number = frame_number});
135 }
136 } else {
137 override_settings_.push(
138 {.settings = nullptr, .frame_number = frame_number});
139 }
140 pending_requests_.push(
141 {.frame_number = frame_number,
142 .pipeline_id = request.pipeline_id,
143 .callback = pipelines[request.pipeline_id].cb,
144 .settings = HalCameraMetadata::Clone(request.settings.get()),
145 .input_buffers = std::move(input_buffers),
146 .output_buffers = std::move(output_buffers)});
147 }
148
149 return OK;
150 }
151
CreateSensorBuffers(uint32_t frame_number,const std::vector<StreamBuffer> & buffers,const std::unordered_map<uint32_t,EmulatedStream> & streams,uint32_t pipeline_id,HwlPipelineCallback cb,int32_t override_width,int32_t override_height)152 std::unique_ptr<Buffers> EmulatedRequestProcessor::CreateSensorBuffers(
153 uint32_t frame_number, const std::vector<StreamBuffer>& buffers,
154 const std::unordered_map<uint32_t, EmulatedStream>& streams,
155 uint32_t pipeline_id, HwlPipelineCallback cb, int32_t override_width,
156 int32_t override_height) {
157 if (buffers.empty()) {
158 return nullptr;
159 }
160
161 std::vector<StreamBuffer> requested_buffers;
162 for (auto& buffer : buffers) {
163 if (buffer.buffer != nullptr) {
164 requested_buffers.push_back(buffer);
165 continue;
166 }
167
168 if (session_callback_.request_stream_buffers != nullptr) {
169 std::vector<StreamBuffer> one_requested_buffer;
170 status_t res = session_callback_.request_stream_buffers(
171 buffer.stream_id, 1, &one_requested_buffer, frame_number);
172 if (res != OK) {
173 ALOGE("%s: request_stream_buffers failed: %s(%d)", __FUNCTION__,
174 strerror(-res), res);
175 continue;
176 }
177 if (one_requested_buffer.size() != 1 ||
178 one_requested_buffer[0].buffer == nullptr) {
179 ALOGE("%s: request_stream_buffers failed to return a valid buffer",
180 __FUNCTION__);
181 continue;
182 }
183 requested_buffers.push_back(one_requested_buffer[0]);
184 }
185 }
186
187 if (requested_buffers.size() < buffers.size()) {
188 ALOGE(
189 "%s: Failed to acquire all sensor buffers: %zu acquired, %zu requested",
190 __FUNCTION__, requested_buffers.size(), buffers.size());
191 // This only happens for HAL buffer manager use case.
192 if (session_callback_.return_stream_buffers != nullptr) {
193 session_callback_.return_stream_buffers(requested_buffers);
194 }
195 requested_buffers.clear();
196 }
197
198 auto sensor_buffers = std::make_unique<Buffers>();
199 sensor_buffers->reserve(requested_buffers.size());
200 for (auto& buffer : requested_buffers) {
201 auto sensor_buffer = CreateSensorBuffer(
202 frame_number, streams.at(buffer.stream_id), pipeline_id, cb, buffer,
203 override_width, override_height);
204 if (sensor_buffer.get() != nullptr) {
205 sensor_buffers->push_back(std::move(sensor_buffer));
206 }
207 }
208
209 return sensor_buffers;
210 }
211
NotifyFailedRequest(const PendingRequest & request)212 void EmulatedRequestProcessor::NotifyFailedRequest(const PendingRequest& request) {
213 if (request.output_buffers != nullptr) {
214 // Mark all output buffers for this request in order not to send
215 // ERROR_BUFFER for them.
216 for (auto& output_buffer : *(request.output_buffers)) {
217 output_buffer->is_failed_request = true;
218 }
219 }
220
221 NotifyMessage msg = {
222 .type = MessageType::kError,
223 .message.error = {.frame_number = request.frame_number,
224 .error_stream_id = -1,
225 .error_code = ErrorCode::kErrorRequest}};
226 request.callback.notify(request.pipeline_id, msg);
227 }
228
Flush()229 status_t EmulatedRequestProcessor::Flush() {
230 std::lock_guard<std::mutex> lock(process_mutex_);
231 // First flush in-flight requests
232 auto ret = sensor_->Flush();
233
234 // Then the rest of the pending requests
235 while (!pending_requests_.empty()) {
236 const auto& request = pending_requests_.front();
237 NotifyFailedRequest(request);
238 pending_requests_.pop();
239 }
240
241 return ret;
242 }
243
GetBufferSizeAndStride(const EmulatedStream & stream,buffer_handle_t buffer,uint32_t * size,uint32_t * stride)244 status_t EmulatedRequestProcessor::GetBufferSizeAndStride(
245 const EmulatedStream& stream, buffer_handle_t buffer,
246 uint32_t* size /*out*/, uint32_t* stride /*out*/) {
247 if (size == nullptr) {
248 return BAD_VALUE;
249 }
250
251 switch (stream.override_format) {
252 case HAL_PIXEL_FORMAT_RGB_888:
253 *stride = stream.width * 3;
254 *size = (*stride) * stream.height;
255 break;
256 case HAL_PIXEL_FORMAT_RGBA_8888:
257 *stride = stream.width * 4;
258 *size = (*stride) * stream.height;
259 break;
260 case HAL_PIXEL_FORMAT_Y16:
261 if (stream.override_data_space == HAL_DATASPACE_DEPTH) {
262 *stride = AlignTo(AlignTo(stream.width, 2) * 2, 16);
263 *size = (*stride) * AlignTo(stream.height, 2);
264 } else {
265 return BAD_VALUE;
266 }
267 break;
268 case HAL_PIXEL_FORMAT_BLOB:
269 if (stream.override_data_space == HAL_DATASPACE_V0_JFIF ||
270 stream.override_data_space ==
271 static_cast<android_dataspace_t>(
272 aidl::android::hardware::graphics::common::Dataspace::JPEG_R)) {
273 *size = stream.buffer_size;
274 *stride = *size;
275 } else {
276 return BAD_VALUE;
277 }
278 break;
279 case HAL_PIXEL_FORMAT_RAW16:
280 if (importer_->getMonoPlanarStrideBytes(buffer, stride) != NO_ERROR) {
281 *stride = stream.width * 2;
282 }
283 *size = (*stride) * stream.height;
284 break;
285 default:
286 return BAD_VALUE;
287 }
288
289 return OK;
290 }
291
LockSensorBuffer(const EmulatedStream & stream,buffer_handle_t buffer,int32_t width,int32_t height,SensorBuffer * sensor_buffer)292 status_t EmulatedRequestProcessor::LockSensorBuffer(
293 const EmulatedStream& stream, buffer_handle_t buffer, int32_t width,
294 int32_t height, SensorBuffer* sensor_buffer /*out*/) {
295 if (sensor_buffer == nullptr) {
296 return BAD_VALUE;
297 }
298
299 auto usage = GRALLOC_USAGE_SW_WRITE_OFTEN;
300 bool isYUV_420_888 = stream.override_format == HAL_PIXEL_FORMAT_YCBCR_420_888;
301 bool isP010 = static_cast<android_pixel_format_v1_1_t>(
302 stream.override_format) == HAL_PIXEL_FORMAT_YCBCR_P010;
303 if ((isYUV_420_888) || (isP010)) {
304 android::Rect map_rect = {0, 0, width, height};
305 auto yuv_layout = importer_->lockYCbCr(buffer, usage, map_rect);
306 if ((yuv_layout.y != nullptr) && (yuv_layout.cb != nullptr) &&
307 (yuv_layout.cr != nullptr)) {
308 sensor_buffer->plane.img_y_crcb.img_y =
309 static_cast<uint8_t*>(yuv_layout.y);
310 sensor_buffer->plane.img_y_crcb.img_cb =
311 static_cast<uint8_t*>(yuv_layout.cb);
312 sensor_buffer->plane.img_y_crcb.img_cr =
313 static_cast<uint8_t*>(yuv_layout.cr);
314 sensor_buffer->plane.img_y_crcb.y_stride = yuv_layout.ystride;
315 sensor_buffer->plane.img_y_crcb.cbcr_stride = yuv_layout.cstride;
316 sensor_buffer->plane.img_y_crcb.cbcr_step = yuv_layout.chroma_step;
317 if (isYUV_420_888 && (yuv_layout.chroma_step == 2) &&
318 std::abs(sensor_buffer->plane.img_y_crcb.img_cb -
319 sensor_buffer->plane.img_y_crcb.img_cr) != 1) {
320 ALOGE(
321 "%s: Unsupported YUV layout, chroma step: %zu U/V plane delta: %u",
322 __FUNCTION__, yuv_layout.chroma_step,
323 static_cast<unsigned>(
324 std::abs(sensor_buffer->plane.img_y_crcb.img_cb -
325 sensor_buffer->plane.img_y_crcb.img_cr)));
326 return BAD_VALUE;
327 }
328 sensor_buffer->plane.img_y_crcb.bytesPerPixel = isP010 ? 2 : 1;
329 } else {
330 ALOGE("%s: Failed to lock output buffer for stream id %d !", __FUNCTION__,
331 stream.id);
332 return BAD_VALUE;
333 }
334 } else {
335 uint32_t buffer_size = 0, stride = 0;
336 auto ret = GetBufferSizeAndStride(stream, buffer, &buffer_size, &stride);
337 if (ret != OK) {
338 ALOGE("%s: Unsupported pixel format: 0x%x", __FUNCTION__,
339 stream.override_format);
340 return BAD_VALUE;
341 }
342 if (stream.override_format == HAL_PIXEL_FORMAT_BLOB) {
343 sensor_buffer->plane.img.img =
344 static_cast<uint8_t*>(importer_->lock(buffer, usage, buffer_size));
345 } else {
346 android::Rect region{0, 0, width, height};
347 sensor_buffer->plane.img.img =
348 static_cast<uint8_t*>(importer_->lock(buffer, usage, region));
349 }
350 if (sensor_buffer->plane.img.img == nullptr) {
351 ALOGE("%s: Failed to lock output buffer!", __FUNCTION__);
352 return BAD_VALUE;
353 }
354 sensor_buffer->plane.img.stride_in_bytes = stride;
355 sensor_buffer->plane.img.buffer_size = buffer_size;
356 }
357
358 return OK;
359 }
360
CreateSensorBuffer(uint32_t frame_number,const EmulatedStream & emulated_stream,uint32_t pipeline_id,HwlPipelineCallback callback,StreamBuffer stream_buffer,int32_t override_width,int32_t override_height)361 std::unique_ptr<SensorBuffer> EmulatedRequestProcessor::CreateSensorBuffer(
362 uint32_t frame_number, const EmulatedStream& emulated_stream,
363 uint32_t pipeline_id, HwlPipelineCallback callback,
364 StreamBuffer stream_buffer, int32_t override_width,
365 int32_t override_height) {
366 auto buffer = std::make_unique<GrallocSensorBuffer>(importer_);
367
368 auto stream = emulated_stream;
369 // Make sure input stream formats are correctly mapped here
370 if (stream.is_input) {
371 stream.override_format = EmulatedSensor::OverrideFormat(
372 stream.override_format,
373 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD);
374 }
375 if (override_width > 0 && override_height > 0) {
376 buffer->width = override_width;
377 buffer->height = override_height;
378 } else {
379 buffer->width = stream.width;
380 buffer->height = stream.height;
381 }
382 buffer->format = static_cast<PixelFormat>(stream.override_format);
383 buffer->dataSpace = stream.override_data_space;
384 buffer->color_space = stream.color_space;
385 buffer->use_case = stream.use_case;
386 buffer->stream_buffer = stream_buffer;
387 buffer->pipeline_id = pipeline_id;
388 buffer->callback = callback;
389 buffer->frame_number = frame_number;
390 buffer->camera_id = emulated_stream.is_physical_camera_stream
391 ? emulated_stream.physical_camera_id
392 : camera_id_;
393 buffer->is_input = stream.is_input;
394 // In case buffer processing is successful, flip this flag accordingly
395 buffer->stream_buffer.status = BufferStatus::kError;
396
397 if (buffer->stream_buffer.buffer != nullptr) {
398 auto ret = LockSensorBuffer(stream, buffer->stream_buffer.buffer,
399 buffer->width, buffer->height, buffer.get());
400 if (ret != OK) {
401 buffer->is_failed_request = true;
402 buffer = nullptr;
403 }
404 }
405
406 if ((buffer.get() != nullptr) && (stream_buffer.acquire_fence != nullptr)) {
407 auto fence_status = importer_->importFence(stream_buffer.acquire_fence,
408 buffer->acquire_fence_fd);
409 if (!fence_status) {
410 ALOGE("%s: Failed importing acquire fence!", __FUNCTION__);
411 buffer->is_failed_request = true;
412 buffer = nullptr;
413 }
414 }
415
416 return buffer;
417 }
418
AcquireBuffers(Buffers * buffers)419 std::unique_ptr<Buffers> EmulatedRequestProcessor::AcquireBuffers(
420 Buffers* buffers) {
421 if ((buffers == nullptr) || (buffers->empty())) {
422 return nullptr;
423 }
424
425 auto acquired_buffers = std::make_unique<Buffers>();
426 acquired_buffers->reserve(buffers->size());
427 auto output_buffer = buffers->begin();
428 while (output_buffer != buffers->end()) {
429 status_t ret = OK;
430 if ((*output_buffer)->acquire_fence_fd >= 0) {
431 ret = sync_wait((*output_buffer)->acquire_fence_fd,
432 ns2ms(EmulatedSensor::kSupportedFrameDurationRange[1]));
433 if (ret != OK) {
434 ALOGE("%s: Fence sync failed: %s, (%d)", __FUNCTION__, strerror(-ret),
435 ret);
436 }
437 }
438
439 if (ret == OK) {
440 acquired_buffers->push_back(std::move(*output_buffer));
441 }
442
443 output_buffer = buffers->erase(output_buffer);
444 }
445
446 return acquired_buffers;
447 }
448
RequestProcessorLoop()449 void EmulatedRequestProcessor::RequestProcessorLoop() {
450 ATRACE_CALL();
451
452 bool vsync_status_ = true;
453 while (!processor_done_ && vsync_status_) {
454 {
455 std::lock_guard<std::mutex> lock(process_mutex_);
456 if (!pending_requests_.empty()) {
457 status_t ret;
458 const auto& request = pending_requests_.front();
459 auto frame_number = request.frame_number;
460 auto notify_callback = request.callback;
461 auto pipeline_id = request.pipeline_id;
462
463 auto output_buffers = AcquireBuffers(request.output_buffers.get());
464 auto input_buffers = AcquireBuffers(request.input_buffers.get());
465 if ((output_buffers != nullptr) && !output_buffers->empty()) {
466 std::unique_ptr<EmulatedSensor::LogicalCameraSettings> logical_settings =
467 std::make_unique<EmulatedSensor::LogicalCameraSettings>();
468
469 std::unique_ptr<std::set<uint32_t>> physical_camera_output_ids =
470 std::make_unique<std::set<uint32_t>>();
471 for (const auto& it : *output_buffers) {
472 if (it->camera_id != camera_id_) {
473 physical_camera_output_ids->emplace(it->camera_id);
474 }
475 }
476
477 // Repeating requests usually include valid settings only during the
478 // initial call. Afterwards an invalid settings pointer means that
479 // there are no changes in the parameters and Hal should re-use the
480 // last valid values.
481 // TODO: Add support for individual physical camera requests.
482 if (request.settings.get() != nullptr) {
483 auto override_frame_number =
484 ApplyOverrideSettings(frame_number, request.settings);
485 ret = request_state_->InitializeLogicalSettings(
486 HalCameraMetadata::Clone(request.settings.get()),
487 std::move(physical_camera_output_ids), override_frame_number,
488 logical_settings.get());
489 last_settings_ = HalCameraMetadata::Clone(request.settings.get());
490 } else {
491 auto override_frame_number =
492 ApplyOverrideSettings(frame_number, last_settings_);
493 ret = request_state_->InitializeLogicalSettings(
494 HalCameraMetadata::Clone(last_settings_.get()),
495 std::move(physical_camera_output_ids), override_frame_number,
496 logical_settings.get());
497 }
498
499 if (ret == OK) {
500 auto partial_result = request_state_->InitializeLogicalResult(
501 pipeline_id, frame_number,
502 /*partial result*/ true);
503 auto result = request_state_->InitializeLogicalResult(
504 pipeline_id, frame_number,
505 /*partial result*/ false);
506 // The screen rotation will be the same for all logical and physical devices
507 uint32_t screen_rotation = screen_rotation_;
508 for (auto it = logical_settings->begin();
509 it != logical_settings->end(); it++) {
510 it->second.screen_rotation = screen_rotation;
511 }
512
513 sensor_->SetCurrentRequest(
514 std::move(logical_settings), std::move(result),
515 std::move(partial_result), std::move(input_buffers),
516 std::move(output_buffers));
517 } else {
518 NotifyMessage msg{.type = MessageType::kError,
519 .message.error = {
520 .frame_number = frame_number,
521 .error_stream_id = -1,
522 .error_code = ErrorCode::kErrorResult,
523 }};
524
525 notify_callback.notify(pipeline_id, msg);
526 }
527 } else {
528 // No further processing is needed, just fail the result which will
529 // complete this request.
530 NotifyMessage msg{.type = MessageType::kError,
531 .message.error = {
532 .frame_number = frame_number,
533 .error_stream_id = -1,
534 .error_code = ErrorCode::kErrorResult,
535 }};
536
537 notify_callback.notify(pipeline_id, msg);
538 }
539
540 pending_requests_.pop();
541 request_condition_.notify_one();
542 }
543 }
544
545 vsync_status_ =
546 sensor_->WaitForVSync(EmulatedSensor::kSupportedFrameDurationRange[1]);
547 }
548 }
549
Initialize(std::unique_ptr<EmulatedCameraDeviceInfo> device_info,PhysicalDeviceMapPtr physical_devices)550 status_t EmulatedRequestProcessor::Initialize(
551 std::unique_ptr<EmulatedCameraDeviceInfo> device_info,
552 PhysicalDeviceMapPtr physical_devices) {
553 std::lock_guard<std::mutex> lock(process_mutex_);
554 return request_state_->Initialize(std::move(device_info),
555 std::move(physical_devices));
556 }
557
SetSessionCallback(const HwlSessionCallback & hwl_session_callback)558 void EmulatedRequestProcessor::SetSessionCallback(
559 const HwlSessionCallback& hwl_session_callback) {
560 std::lock_guard<std::mutex> lock(process_mutex_);
561 session_callback_ = hwl_session_callback;
562 }
563
GetDefaultRequest(RequestTemplate type,std::unique_ptr<HalCameraMetadata> * default_settings)564 status_t EmulatedRequestProcessor::GetDefaultRequest(
565 RequestTemplate type, std::unique_ptr<HalCameraMetadata>* default_settings) {
566 std::lock_guard<std::mutex> lock(process_mutex_);
567 return request_state_->GetDefaultRequest(type, default_settings);
568 }
569
ApplyOverrideSettings(uint32_t frame_number,const std::unique_ptr<HalCameraMetadata> & request_settings)570 uint32_t EmulatedRequestProcessor::ApplyOverrideSettings(
571 uint32_t frame_number,
572 const std::unique_ptr<HalCameraMetadata>& request_settings) {
573 while (!override_settings_.empty() && request_settings.get() != nullptr) {
574 auto override_frame_number = override_settings_.front().frame_number;
575 bool repeatingOverride = (override_settings_.front().settings == nullptr);
576 const auto& override_setting = repeatingOverride
577 ? last_override_settings_
578 : override_settings_.front().settings;
579
580 camera_metadata_ro_entry_t entry;
581 status_t ret =
582 override_setting->Get(ANDROID_CONTROL_SETTINGS_OVERRIDE, &entry);
583 bool overriding = false;
584 if ((ret == OK) && (entry.count == 1) &&
585 (entry.data.i32[0] == ANDROID_CONTROL_SETTINGS_OVERRIDE_ZOOM)) {
586 ApplyOverrideZoom(override_setting, request_settings,
587 ANDROID_CONTROL_SETTINGS_OVERRIDE);
588 ApplyOverrideZoom(override_setting, request_settings,
589 ANDROID_CONTROL_ZOOM_RATIO);
590 ApplyOverrideZoom(override_setting, request_settings,
591 ANDROID_SCALER_CROP_REGION);
592 ApplyOverrideZoom(override_setting, request_settings,
593 ANDROID_CONTROL_AE_REGIONS);
594 ApplyOverrideZoom(override_setting, request_settings,
595 ANDROID_CONTROL_AWB_REGIONS);
596 ApplyOverrideZoom(override_setting, request_settings,
597 ANDROID_CONTROL_AF_REGIONS);
598 overriding = true;
599 }
600 if (!repeatingOverride) {
601 last_override_settings_ = HalCameraMetadata::Clone(override_setting.get());
602 }
603
604 override_settings_.pop();
605 // If there are multiple queued override settings, skip until the speed-up
606 // is at least 2 frames.
607 if (override_frame_number - frame_number >= kZoomSpeedup) {
608 // If the request's settings override isn't ON, do not return
609 // override_frame_number. Return 0 to indicate there is no
610 // override happening.
611 return overriding ? override_frame_number : 0;
612 }
613 }
614 return 0;
615 }
616
ApplyOverrideZoom(const std::unique_ptr<HalCameraMetadata> & override_setting,const std::unique_ptr<HalCameraMetadata> & request_settings,camera_metadata_tag tag)617 void EmulatedRequestProcessor::ApplyOverrideZoom(
618 const std::unique_ptr<HalCameraMetadata>& override_setting,
619 const std::unique_ptr<HalCameraMetadata>& request_settings,
620 camera_metadata_tag tag) {
621 status_t ret;
622 camera_metadata_ro_entry_t entry;
623 ret = override_setting->Get(tag, &entry);
624 if (ret == OK) {
625 if (entry.type == TYPE_INT32) {
626 request_settings->Set(tag, entry.data.i32, entry.count);
627 } else if (entry.type == TYPE_FLOAT) {
628 request_settings->Set(tag, entry.data.f, entry.count);
629 } else {
630 ALOGE("%s: Unsupported override key %d", __FUNCTION__, tag);
631 }
632 } else {
633 auto missing_tag = get_camera_metadata_tag_name(tag);
634 ALOGE("%s: %s needs to be specified for overriding zoom", __func__,
635 missing_tag);
636 }
637 }
638
onEvent(const Event & e)639 Return<void> EmulatedRequestProcessor::SensorHandler::onEvent(const Event& e) {
640 auto processor = processor_.lock();
641 if (processor.get() == nullptr) {
642 return Void();
643 }
644
645 if (e.sensorType == SensorType::ACCELEROMETER) {
646 // Heuristic approach for deducing the screen
647 // rotation depending on the reported
648 // accelerometer readings. We switch
649 // the screen rotation when one of the
650 // x/y axis gets close enough to the earth
651 // acceleration.
652 const uint32_t earth_accel = 9; // Switch threshold [m/s^2]
653 uint32_t x_accel = e.u.vec3.x;
654 uint32_t y_accel = e.u.vec3.y;
655 uint32_t z_accel = abs(e.u.vec3.z);
656 if (z_accel == earth_accel) {
657 return Void();
658 }
659
660 if (x_accel == earth_accel) {
661 processor->screen_rotation_ = 270;
662 } else if (x_accel == -earth_accel) {
663 processor->screen_rotation_ = 90;
664 } else if (y_accel == -earth_accel) {
665 processor->screen_rotation_ = 180;
666 } else {
667 processor->screen_rotation_ = 0;
668 }
669 } else {
670 ALOGE("%s: unexpected event received type: %d", __func__, e.sensorType);
671 }
672 return Void();
673 }
674
InitializeSensorQueue(std::weak_ptr<EmulatedRequestProcessor> processor)675 void EmulatedRequestProcessor::InitializeSensorQueue(
676 std::weak_ptr<EmulatedRequestProcessor> processor) {
677 if (sensor_event_queue_.get() != nullptr) {
678 return;
679 }
680
681 sp<ISensorManager> manager = ISensorManager::getService();
682 if (manager == nullptr) {
683 ALOGE("%s: Cannot get ISensorManager", __func__);
684 } else {
685 bool sensor_found = false;
686 manager->getSensorList([&](const auto& list, auto result) {
687 if (result != Result::OK) {
688 ALOGE("%s: Failed to retrieve sensor list!", __func__);
689 } else {
690 for (const SensorInfo& it : list) {
691 if (it.type == SensorType::ACCELEROMETER) {
692 sensor_found = true;
693 sensor_handle_ = it.sensorHandle;
694 }
695 }
696 }
697 });
698 if (sensor_found) {
699 manager->createEventQueue(
700 new SensorHandler(processor), [&](const auto& q, auto result) {
701 if (result != Result::OK) {
702 ALOGE("%s: Cannot create event queue", __func__);
703 return;
704 }
705 sensor_event_queue_ = q;
706 });
707
708 if (sensor_event_queue_.get() != nullptr) {
709 auto res = sensor_event_queue_->enableSensor(
710 sensor_handle_,
711 ns2us(EmulatedSensor::kSupportedFrameDurationRange[0]),
712 0 /*maxBatchReportLatencyUs*/);
713 if (res.isOk()) {
714 } else {
715 ALOGE("%s: Failed to enable sensor", __func__);
716 }
717 } else {
718 ALOGE("%s: Failed to create event queue", __func__);
719 }
720 }
721 }
722 }
723
724 } // namespace android
725