xref: /aosp_15_r20/frameworks/av/services/camera/libcameraservice/utils/SessionConfigurationUtils.cpp (revision ec779b8e0859a360c3d303172224686826e6e0e1)
1 /*
2  * Copyright (C) 2020 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 #include <cutils/properties.h>
18 
19 #include "SessionConfigurationUtils.h"
20 #include <android/data_space.h>
21 #include <camera/StringUtils.h>
22 #include <gui/Flags.h>  // remove with WB_LIBCAMERASERVICE_WITH_DEPENDENCIES
23 #include <ui/PublicFormat.h>
24 #include "../CameraService.h"
25 #include "../api2/DepthCompositeStream.h"
26 #include "../api2/HeicCompositeStream.h"
27 #include "SessionConfigurationUtils.h"
28 #include "aidl/android/hardware/graphics/common/Dataspace.h"
29 #include "api2/JpegRCompositeStream.h"
30 #include "binder/Status.h"
31 #include "common/CameraDeviceBase.h"
32 #include "common/HalConversionsTemplated.h"
33 #include "device3/Camera3OutputStream.h"
34 #include "device3/ZoomRatioMapper.h"
35 #include "device3/aidl/AidlCamera3Device.h"
36 #include "device3/hidl/HidlCamera3Device.h"
37 #include "system/graphics-base-v1.1.h"
38 
39 using android::camera3::OutputStreamInfo;
40 using android::camera3::OutputStreamInfo;
41 using android::hardware::camera2::ICameraDeviceUser;
42 using aidl::android::hardware::camera::device::RequestTemplate;
43 
44 namespace android {
45 namespace camera3 {
46 namespace flags = com::android::internal::camera::flags;
47 
getStreamConfigurations(const CameraMetadata & staticInfo,int configuration,std::unordered_map<int,std::vector<StreamConfiguration>> * scm)48 void StreamConfiguration::getStreamConfigurations(
49         const CameraMetadata &staticInfo, int configuration,
50         std::unordered_map<int, std::vector<StreamConfiguration>> *scm) {
51     if (scm == nullptr) {
52         ALOGE("%s: StreamConfigurationMap nullptr", __FUNCTION__);
53         return;
54     }
55     const int STREAM_FORMAT_OFFSET = 0;
56     const int STREAM_WIDTH_OFFSET = 1;
57     const int STREAM_HEIGHT_OFFSET = 2;
58     const int STREAM_IS_INPUT_OFFSET = 3;
59 
60     camera_metadata_ro_entry availableStreamConfigs = staticInfo.find(configuration);
61     for (size_t i = 0; i < availableStreamConfigs.count; i += 4) {
62         int32_t format = availableStreamConfigs.data.i32[i + STREAM_FORMAT_OFFSET];
63         int32_t width = availableStreamConfigs.data.i32[i + STREAM_WIDTH_OFFSET];
64         int32_t height = availableStreamConfigs.data.i32[i + STREAM_HEIGHT_OFFSET];
65         int32_t isInput = availableStreamConfigs.data.i32[i + STREAM_IS_INPUT_OFFSET];
66         StreamConfiguration sc = {format, width, height, isInput};
67         (*scm)[format].push_back(sc);
68     }
69 }
70 
getStreamConfigurations(const CameraMetadata & staticInfo,bool maxRes,std::unordered_map<int,std::vector<StreamConfiguration>> * scm)71 void StreamConfiguration::getStreamConfigurations(
72         const CameraMetadata &staticInfo, bool maxRes,
73         std::unordered_map<int, std::vector<StreamConfiguration>> *scm) {
74     int32_t scalerKey =
75             SessionConfigurationUtils::getAppropriateModeTag(
76                     ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS, maxRes);
77 
78     int32_t depthKey =
79             SessionConfigurationUtils::getAppropriateModeTag(
80                     ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS, maxRes);
81 
82     int32_t dynamicDepthKey =
83             SessionConfigurationUtils::getAppropriateModeTag(
84                     ANDROID_DEPTH_AVAILABLE_DYNAMIC_DEPTH_STREAM_CONFIGURATIONS, maxRes);
85 
86     int32_t heicKey =
87             SessionConfigurationUtils::getAppropriateModeTag(
88                     ANDROID_HEIC_AVAILABLE_HEIC_STREAM_CONFIGURATIONS, maxRes);
89 
90     getStreamConfigurations(staticInfo, scalerKey, scm);
91     getStreamConfigurations(staticInfo, depthKey, scm);
92     getStreamConfigurations(staticInfo, dynamicDepthKey, scm);
93     getStreamConfigurations(staticInfo, heicKey, scm);
94 }
95 
96 namespace SessionConfigurationUtils {
97 
98 int32_t PERF_CLASS_LEVEL =
99         property_get_int32("ro.odm.build.media_performance_class", 0);
100 
101 bool IS_PERF_CLASS = (PERF_CLASS_LEVEL >= SDK_VERSION_S);
102 
getMaxJpegResolution(const CameraMetadata & metadata,bool ultraHighResolution)103 camera3::Size getMaxJpegResolution(const CameraMetadata &metadata,
104         bool ultraHighResolution) {
105     int32_t maxJpegWidth = 0, maxJpegHeight = 0;
106     const int STREAM_CONFIGURATION_SIZE = 4;
107     const int STREAM_FORMAT_OFFSET = 0;
108     const int STREAM_WIDTH_OFFSET = 1;
109     const int STREAM_HEIGHT_OFFSET = 2;
110     const int STREAM_IS_INPUT_OFFSET = 3;
111 
112     int32_t scalerSizesTag = ultraHighResolution ?
113             ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION :
114                     ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS;
115     camera_metadata_ro_entry_t availableStreamConfigs =
116             metadata.find(scalerSizesTag);
117     if (availableStreamConfigs.count == 0 ||
118             availableStreamConfigs.count % STREAM_CONFIGURATION_SIZE != 0) {
119         return camera3::Size(0, 0);
120     }
121 
122     // Get max jpeg size (area-wise).
123     for (size_t i= 0; i < availableStreamConfigs.count; i+= STREAM_CONFIGURATION_SIZE) {
124         int32_t format = availableStreamConfigs.data.i32[i + STREAM_FORMAT_OFFSET];
125         int32_t width = availableStreamConfigs.data.i32[i + STREAM_WIDTH_OFFSET];
126         int32_t height = availableStreamConfigs.data.i32[i + STREAM_HEIGHT_OFFSET];
127         int32_t isInput = availableStreamConfigs.data.i32[i + STREAM_IS_INPUT_OFFSET];
128         if (isInput == ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT
129                 && format == HAL_PIXEL_FORMAT_BLOB &&
130                 (width * height > maxJpegWidth * maxJpegHeight)) {
131             maxJpegWidth = width;
132             maxJpegHeight = height;
133         }
134     }
135 
136     return camera3::Size(maxJpegWidth, maxJpegHeight);
137 }
138 
getUHRMaxJpegBufferSize(camera3::Size uhrMaxJpegSize,camera3::Size defaultMaxJpegSize,size_t defaultMaxJpegBufferSize)139 size_t getUHRMaxJpegBufferSize(camera3::Size uhrMaxJpegSize,
140         camera3::Size defaultMaxJpegSize, size_t defaultMaxJpegBufferSize) {
141     return ((float)(uhrMaxJpegSize.width * uhrMaxJpegSize.height)) /
142             (defaultMaxJpegSize.width * defaultMaxJpegSize.height) * defaultMaxJpegBufferSize;
143 }
144 
145 StreamConfigurationPair
getStreamConfigurationPair(const CameraMetadata & staticInfo)146 getStreamConfigurationPair(const CameraMetadata &staticInfo) {
147     camera3::StreamConfigurationPair streamConfigurationPair;
148     camera3::StreamConfiguration::getStreamConfigurations(staticInfo, false,
149             &streamConfigurationPair.mDefaultStreamConfigurationMap);
150     camera3::StreamConfiguration::getStreamConfigurations(staticInfo, true,
151             &streamConfigurationPair.mMaximumResolutionStreamConfigurationMap);
152     return streamConfigurationPair;
153 }
154 
euclidDistSquare(int32_t x0,int32_t y0,int32_t x1,int32_t y1)155 int64_t euclidDistSquare(int32_t x0, int32_t y0, int32_t x1, int32_t y1) {
156     int64_t d0 = x0 - x1;
157     int64_t d1 = y0 - y1;
158     return d0 * d0 + d1 * d1;
159 }
160 
roundBufferDimensionNearest(int32_t width,int32_t height,int32_t format,android_dataspace dataSpace,const CameraMetadata & info,bool maxResolution,int32_t * outWidth,int32_t * outHeight)161 bool roundBufferDimensionNearest(int32_t width, int32_t height,
162         int32_t format, android_dataspace dataSpace,
163         const CameraMetadata& info, bool maxResolution, /*out*/int32_t* outWidth,
164         /*out*/int32_t* outHeight) {
165     const int32_t depthSizesTag =
166             getAppropriateModeTag(ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS,
167                     maxResolution);
168     const int32_t scalerSizesTag =
169             getAppropriateModeTag(ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS, maxResolution);
170     const int32_t heicSizesTag =
171             getAppropriateModeTag(ANDROID_HEIC_AVAILABLE_HEIC_STREAM_CONFIGURATIONS, maxResolution);
172     const int32_t jpegRSizesTag = getAppropriateModeTag(
173             ANDROID_JPEGR_AVAILABLE_JPEG_R_STREAM_CONFIGURATIONS, maxResolution);
174     const int32_t heicUltraHDRSizesTag = getAppropriateModeTag(
175             ANDROID_HEIC_AVAILABLE_HEIC_ULTRA_HDR_STREAM_CONFIGURATIONS, maxResolution);
176 
177     bool isJpegRDataSpace = (dataSpace == static_cast<android_dataspace_t>(
178                 ::aidl::android::hardware::graphics::common::Dataspace::JPEG_R));
179     bool isHeicUltraHDRDataSpace = (dataSpace == static_cast<android_dataspace_t>(
180                 ::aidl::android::hardware::graphics::common::Dataspace::HEIF_ULTRAHDR));
181     camera_metadata_ro_entry streamConfigs =
182             (isJpegRDataSpace) ? info.find(jpegRSizesTag) :
183             (isHeicUltraHDRDataSpace) ? info.find(heicUltraHDRSizesTag) :
184             (dataSpace == HAL_DATASPACE_DEPTH) ? info.find(depthSizesTag) :
185             (dataSpace == static_cast<android_dataspace>(HAL_DATASPACE_HEIF)) ?
186             info.find(heicSizesTag) :
187             info.find(scalerSizesTag);
188 
189     int32_t bestWidth = -1;
190     int32_t bestHeight = -1;
191 
192     // Iterate through listed stream configurations and find the one with the smallest euclidean
193     // distance from the given dimensions for the given format.
194     for (size_t i = 0; i < streamConfigs.count; i += 4) {
195         int32_t fmt = streamConfigs.data.i32[i];
196         int32_t w = streamConfigs.data.i32[i + 1];
197         int32_t h = streamConfigs.data.i32[i + 2];
198 
199         // Ignore input/output type for now
200         if (fmt == format) {
201             if (w == width && h == height) {
202                 bestWidth = width;
203                 bestHeight = height;
204                 break;
205             } else if (w <= ROUNDING_WIDTH_CAP && (bestWidth == -1 ||
206                     SessionConfigurationUtils::euclidDistSquare(w, h, width, height) <
207                     SessionConfigurationUtils::euclidDistSquare(bestWidth, bestHeight, width,
208                             height))) {
209                 bestWidth = w;
210                 bestHeight = h;
211             }
212         }
213     }
214 
215     if (bestWidth == -1) {
216         // Return false if no configurations for this format were listed
217         ALOGE("%s: No configurations for format %d width %d, height %d, maxResolution ? %s",
218                 __FUNCTION__, format, width, height, maxResolution ? "true" : "false");
219         return false;
220     }
221 
222     // Set the outputs to the closet width/height
223     if (outWidth != NULL) {
224         *outWidth = bestWidth;
225     }
226     if (outHeight != NULL) {
227         *outHeight = bestHeight;
228     }
229 
230     // Return true if at least one configuration for this format was listed
231     return true;
232 }
233 
234 //check if format is 10-bit compatible
is10bitCompatibleFormat(int32_t format,android_dataspace_t dataSpace)235 bool is10bitCompatibleFormat(int32_t format, android_dataspace_t dataSpace) {
236     switch(format) {
237         case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
238         case HAL_PIXEL_FORMAT_YCBCR_P010:
239             return true;
240         case HAL_PIXEL_FORMAT_BLOB:
241             if (dataSpace == static_cast<android_dataspace_t>(
242                         ::aidl::android::hardware::graphics::common::Dataspace::JPEG_R)) {
243                 return true;
244             } else if (dataSpace == static_cast<android_dataspace_t>(
245                         ::aidl::android::hardware::graphics::common::Dataspace::HEIF_ULTRAHDR)) {
246                 return true;
247             }
248 
249             return false;
250         default:
251             return false;
252     }
253 }
254 
isDynamicRangeProfileSupported(int64_t dynamicRangeProfile,const CameraMetadata & staticInfo)255 bool isDynamicRangeProfileSupported(int64_t dynamicRangeProfile, const CameraMetadata& staticInfo) {
256     if (dynamicRangeProfile == ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD) {
257         // Supported by default
258         return true;
259     }
260 
261     camera_metadata_ro_entry_t entry = staticInfo.find(ANDROID_REQUEST_AVAILABLE_CAPABILITIES);
262     bool is10bitDynamicRangeSupported = false;
263     for (size_t i = 0; i < entry.count; ++i) {
264         uint8_t capability = entry.data.u8[i];
265         if (capability == ANDROID_REQUEST_AVAILABLE_CAPABILITIES_DYNAMIC_RANGE_TEN_BIT) {
266             is10bitDynamicRangeSupported = true;
267             break;
268         }
269     }
270 
271     if (!is10bitDynamicRangeSupported) {
272         return false;
273     }
274 
275     switch (dynamicRangeProfile) {
276         case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_HDR10_PLUS:
277         case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_HDR10:
278         case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_HLG10:
279         case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_10B_HDR_OEM:
280         case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_10B_HDR_OEM_PO:
281         case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_10B_HDR_REF:
282         case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_10B_HDR_REF_PO:
283         case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_8B_HDR_OEM:
284         case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_8B_HDR_OEM_PO:
285         case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_8B_HDR_REF:
286         case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_8B_HDR_REF_PO:
287             entry = staticInfo.find(ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP);
288             for (size_t i = 0; i < entry.count; i += 3) {
289                 if (dynamicRangeProfile == entry.data.i64[i]) {
290                     return true;
291                 }
292             }
293 
294             return false;
295         default:
296             return false;
297     }
298 
299     return false;
300 }
301 
302 //check if format is 10-bit compatible
is10bitDynamicRangeProfile(int64_t dynamicRangeProfile)303 bool is10bitDynamicRangeProfile(int64_t dynamicRangeProfile) {
304     switch (dynamicRangeProfile) {
305         case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_HDR10_PLUS:
306         case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_HDR10:
307         case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_HLG10:
308         case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_10B_HDR_OEM:
309         case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_10B_HDR_OEM_PO:
310         case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_10B_HDR_REF:
311         case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_10B_HDR_REF_PO:
312             return true;
313         default:
314             return false;
315     }
316 }
317 
deviceReportsColorSpaces(const CameraMetadata & staticInfo)318 bool deviceReportsColorSpaces(const CameraMetadata& staticInfo) {
319     camera_metadata_ro_entry_t entry = staticInfo.find(ANDROID_REQUEST_AVAILABLE_CAPABILITIES);
320     for (size_t i = 0; i < entry.count; ++i) {
321         uint8_t capability = entry.data.u8[i];
322         if (capability == ANDROID_REQUEST_AVAILABLE_CAPABILITIES_COLOR_SPACE_PROFILES) {
323             return true;
324         }
325     }
326 
327     return false;
328 }
329 
isColorSpaceSupported(int32_t colorSpace,int32_t format,android_dataspace dataSpace,int64_t dynamicRangeProfile,const CameraMetadata & staticInfo)330 bool isColorSpaceSupported(int32_t colorSpace, int32_t format, android_dataspace dataSpace,
331         int64_t dynamicRangeProfile, const CameraMetadata& staticInfo) {
332     int64_t colorSpace64 = colorSpace;
333     int64_t format64 = format;
334 
335     // Translate HAL format + data space to public format
336     if (format == HAL_PIXEL_FORMAT_BLOB && dataSpace == HAL_DATASPACE_V0_JFIF) {
337         format64 = 0x100; // JPEG
338     } else if (format == HAL_PIXEL_FORMAT_BLOB
339             && dataSpace == static_cast<android_dataspace>(HAL_DATASPACE_HEIF)) {
340         format64 = 0x48454946; // HEIC
341     } else if (format == HAL_PIXEL_FORMAT_BLOB
342             && dataSpace == static_cast<android_dataspace>(HAL_DATASPACE_DYNAMIC_DEPTH)) {
343         format64 = 0x69656963; // DEPTH_JPEG
344     } else if (format == HAL_PIXEL_FORMAT_BLOB && dataSpace == HAL_DATASPACE_DEPTH) {
345         return false; // DEPTH_POINT_CLOUD, not applicable
346     } else if (format == HAL_PIXEL_FORMAT_Y16 && dataSpace == HAL_DATASPACE_DEPTH) {
347         return false; // DEPTH16, not applicable
348     } else if (format == HAL_PIXEL_FORMAT_RAW16 && dataSpace == HAL_DATASPACE_DEPTH) {
349         return false; // RAW_DEPTH, not applicable
350     } else if (format == HAL_PIXEL_FORMAT_RAW10 && dataSpace == HAL_DATASPACE_DEPTH) {
351         return false; // RAW_DEPTH10, not applicable
352     } else if (format == HAL_PIXEL_FORMAT_BLOB && dataSpace ==
353             static_cast<android_dataspace>(
354                 ::aidl::android::hardware::graphics::common::Dataspace::JPEG_R)) {
355         format64 = static_cast<int64_t>(PublicFormat::JPEG_R);
356     } else if (format == HAL_PIXEL_FORMAT_BLOB && dataSpace ==
357             static_cast<android_dataspace>(
358                 ::aidl::android::hardware::graphics::common::Dataspace::HEIF_ULTRAHDR)) {
359         format64 = static_cast<int64_t>(PublicFormat::HEIC_ULTRAHDR);
360     }
361 
362     camera_metadata_ro_entry_t entry =
363             staticInfo.find(ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP);
364     for (size_t i = 0; i < entry.count; i += 3) {
365         bool isFormatCompatible = (format64 == entry.data.i64[i + 1]);
366         bool isDynamicProfileCompatible =
367                 (dynamicRangeProfile & entry.data.i64[i + 2]) != 0;
368 
369         if (colorSpace64 == entry.data.i64[i]
370                 && isFormatCompatible
371                 && isDynamicProfileCompatible) {
372             return true;
373         }
374     }
375 
376     ALOGE("Color space %d, image format %" PRId64 ", and dynamic range 0x%" PRIx64
377             " combination not found", colorSpace, format64, dynamicRangeProfile);
378     return false;
379 }
380 
isPublicFormat(int32_t format)381 bool isPublicFormat(int32_t format)
382 {
383     switch(format) {
384         case HAL_PIXEL_FORMAT_RGBA_8888:
385         case HAL_PIXEL_FORMAT_RGBX_8888:
386         case HAL_PIXEL_FORMAT_RGB_888:
387         case HAL_PIXEL_FORMAT_RGB_565:
388         case HAL_PIXEL_FORMAT_BGRA_8888:
389         case HAL_PIXEL_FORMAT_YV12:
390         case HAL_PIXEL_FORMAT_Y8:
391         case HAL_PIXEL_FORMAT_Y16:
392         case HAL_PIXEL_FORMAT_RAW16:
393         case HAL_PIXEL_FORMAT_RAW10:
394         case HAL_PIXEL_FORMAT_RAW12:
395         case HAL_PIXEL_FORMAT_RAW_OPAQUE:
396         case HAL_PIXEL_FORMAT_BLOB:
397         case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
398         case HAL_PIXEL_FORMAT_YCbCr_420_888:
399         case HAL_PIXEL_FORMAT_YCbCr_422_SP:
400         case HAL_PIXEL_FORMAT_YCrCb_420_SP:
401         case HAL_PIXEL_FORMAT_YCbCr_422_I:
402             return true;
403         default:
404             return false;
405     }
406 }
407 
dataSpaceFromColorSpace(android_dataspace * dataSpace,int32_t colorSpace)408 bool dataSpaceFromColorSpace(android_dataspace *dataSpace, int32_t colorSpace) {
409     switch (colorSpace) {
410         case ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_SRGB:
411             *dataSpace = HAL_DATASPACE_V0_SRGB;
412             return true;
413         case ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_DISPLAY_P3:
414             *dataSpace = HAL_DATASPACE_DISPLAY_P3;
415             return true;
416         case ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_BT2020_HLG:
417             *(reinterpret_cast<int32_t*>(dataSpace)) = HAL_DATASPACE_BT2020_HLG;
418             return true;
419         default:
420             ALOGE("%s: Unsupported color space %d", __FUNCTION__, colorSpace);
421             return false;
422     }
423 }
424 
isStreamUseCaseSupported(int64_t streamUseCase,const CameraMetadata & deviceInfo)425 bool isStreamUseCaseSupported(int64_t streamUseCase,
426         const CameraMetadata &deviceInfo) {
427     camera_metadata_ro_entry_t availableStreamUseCases =
428             deviceInfo.find(ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES);
429 
430     if (availableStreamUseCases.count == 0 &&
431             streamUseCase == ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT) {
432         return true;
433     }
434     // Allow vendor stream use case unconditionally.
435     if (streamUseCase >= ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_VENDOR_START) {
436         return true;
437     }
438 
439     for (size_t i = 0; i < availableStreamUseCases.count; i++) {
440         if (availableStreamUseCases.data.i64[i] == streamUseCase) {
441             return true;
442         }
443     }
444     return false;
445 }
446 
createSurfaceFromGbp(OutputStreamInfo & streamInfo,bool isStreamInfoValid,sp<Surface> & surface,const sp<IGraphicBufferProducer> & gbp,const std::string & logicalCameraId,const CameraMetadata & physicalCameraMetadata,const std::vector<int32_t> & sensorPixelModesUsed,int64_t dynamicRangeProfile,int64_t streamUseCase,int timestampBase,int mirrorMode,int32_t colorSpace,bool respectSurfaceSize)447 binder::Status createSurfaceFromGbp(
448         OutputStreamInfo& streamInfo, bool isStreamInfoValid,
449         sp<Surface>& surface, const sp<IGraphicBufferProducer>& gbp,
450         const std::string &logicalCameraId, const CameraMetadata &physicalCameraMetadata,
451         const std::vector<int32_t> &sensorPixelModesUsed, int64_t dynamicRangeProfile,
452         int64_t streamUseCase, int timestampBase, int mirrorMode,
453         int32_t colorSpace, bool respectSurfaceSize) {
454     // bufferProducer must be non-null
455     if (gbp == nullptr) {
456         std::string msg = fmt::sprintf("Camera %s: Surface is NULL", logicalCameraId.c_str());
457         ALOGW("%s: %s", __FUNCTION__, msg.c_str());
458         return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.c_str());
459     }
460     // HACK b/10949105
461     // Query consumer usage bits to set async operation mode for
462     // GLConsumer using controlledByApp parameter.
463     bool useAsync = false;
464     uint64_t consumerUsage = 0;
465     status_t err;
466     if ((err = gbp->getConsumerUsage(&consumerUsage)) != OK) {
467         std::string msg = fmt::sprintf("Camera %s: Failed to query Surface consumer usage: %s (%d)",
468                 logicalCameraId.c_str(), strerror(-err), err);
469         ALOGE("%s: %s", __FUNCTION__, msg.c_str());
470         return STATUS_ERROR(CameraService::ERROR_INVALID_OPERATION, msg.c_str());
471     }
472     if (consumerUsage & GraphicBuffer::USAGE_HW_TEXTURE) {
473         ALOGW("%s: Camera %s with consumer usage flag: %" PRIu64 ": Forcing asynchronous mode for"
474                 "stream", __FUNCTION__, logicalCameraId.c_str(), consumerUsage);
475         useAsync = true;
476     }
477 
478     uint64_t disallowedFlags = GraphicBuffer::USAGE_HW_VIDEO_ENCODER |
479                               GRALLOC_USAGE_RENDERSCRIPT;
480     uint64_t allowedFlags = GraphicBuffer::USAGE_SW_READ_MASK |
481                            GraphicBuffer::USAGE_HW_TEXTURE |
482                            GraphicBuffer::USAGE_HW_COMPOSER;
483     bool flexibleConsumer = (consumerUsage & disallowedFlags) == 0 &&
484             (consumerUsage & allowedFlags) != 0;
485 
486     surface = new Surface(gbp, useAsync);
487     ANativeWindow *anw = surface.get();
488 
489     int width, height, format;
490     android_dataspace dataSpace;
491     if ((err = anw->query(anw, NATIVE_WINDOW_WIDTH, &width)) != OK) {
492         std::string msg = fmt::sprintf("Camera %s: Failed to query Surface width: %s (%d)",
493                  logicalCameraId.c_str(), strerror(-err), err);
494         ALOGE("%s: %s", __FUNCTION__, msg.c_str());
495         return STATUS_ERROR(CameraService::ERROR_INVALID_OPERATION, msg.c_str());
496     }
497     if ((err = anw->query(anw, NATIVE_WINDOW_HEIGHT, &height)) != OK) {
498         std::string msg = fmt::sprintf("Camera %s: Failed to query Surface height: %s (%d)",
499                 logicalCameraId.c_str(), strerror(-err), err);
500         ALOGE("%s: %s", __FUNCTION__, msg.c_str());
501         return STATUS_ERROR(CameraService::ERROR_INVALID_OPERATION, msg.c_str());
502     }
503     if ((err = anw->query(anw, NATIVE_WINDOW_FORMAT, &format)) != OK) {
504         std::string msg = fmt::sprintf("Camera %s: Failed to query Surface format: %s (%d)",
505                 logicalCameraId.c_str(), strerror(-err), err);
506         ALOGE("%s: %s", __FUNCTION__, msg.c_str());
507         return STATUS_ERROR(CameraService::ERROR_INVALID_OPERATION, msg.c_str());
508     }
509     if ((err = anw->query(anw, NATIVE_WINDOW_DEFAULT_DATASPACE,
510             reinterpret_cast<int*>(&dataSpace))) != OK) {
511         std::string msg = fmt::sprintf("Camera %s: Failed to query Surface dataspace: %s (%d)",
512                 logicalCameraId.c_str(), strerror(-err), err);
513         ALOGE("%s: %s", __FUNCTION__, msg.c_str());
514         return STATUS_ERROR(CameraService::ERROR_INVALID_OPERATION, msg.c_str());
515     }
516 
517     if (colorSpace != ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED &&
518             format != HAL_PIXEL_FORMAT_BLOB) {
519         if (!dataSpaceFromColorSpace(&dataSpace, colorSpace)) {
520             std::string msg = fmt::sprintf("Camera %s: color space %d not supported, failed to "
521                     "convert to data space", logicalCameraId.c_str(), colorSpace);
522             ALOGE("%s: %s", __FUNCTION__, msg.c_str());
523             return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.c_str());
524         }
525     }
526 
527     // FIXME: remove this override since the default format should be
528     //       IMPLEMENTATION_DEFINED. b/9487482 & b/35317944
529     if ((format >= HAL_PIXEL_FORMAT_RGBA_8888 && format <= HAL_PIXEL_FORMAT_BGRA_8888) &&
530             ((consumerUsage & GRALLOC_USAGE_HW_MASK) &&
531              ((consumerUsage & GRALLOC_USAGE_SW_READ_MASK) == 0))) {
532         ALOGW("%s: Camera %s: Overriding format %#x to IMPLEMENTATION_DEFINED",
533                 __FUNCTION__, logicalCameraId.c_str(), format);
534         format = HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED;
535     }
536     std::unordered_set<int32_t> overriddenSensorPixelModes;
537     if (checkAndOverrideSensorPixelModesUsed(sensorPixelModesUsed, format, width, height,
538             physicalCameraMetadata, &overriddenSensorPixelModes) != OK) {
539         std::string msg = fmt::sprintf("Camera %s: sensor pixel modes for stream with "
540                 "format %#x are not valid",logicalCameraId.c_str(), format);
541         ALOGE("%s: %s", __FUNCTION__, msg.c_str());
542         return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.c_str());
543     }
544     bool foundInMaxRes = false;
545     if (overriddenSensorPixelModes.find(ANDROID_SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION) !=
546             overriddenSensorPixelModes.end()) {
547         // we can use the default stream configuration map
548         foundInMaxRes = true;
549     }
550     // Round dimensions to the nearest dimensions available for this format.
551     // Only do the rounding if the client doesn't ask to respect the surface
552     // size.
553     if (flexibleConsumer && isPublicFormat(format) && !respectSurfaceSize &&
554             !SessionConfigurationUtils::roundBufferDimensionNearest(width, height,
555             format, dataSpace, physicalCameraMetadata, foundInMaxRes, /*out*/&width,
556             /*out*/&height)) {
557         std::string msg = fmt::sprintf("Camera %s: No supported stream configurations with "
558                 "format %#x defined, failed to create output stream",
559                 logicalCameraId.c_str(), format);
560         ALOGE("%s: %s", __FUNCTION__, msg.c_str());
561         return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.c_str());
562     }
563     if (!SessionConfigurationUtils::isDynamicRangeProfileSupported(dynamicRangeProfile,
564                 physicalCameraMetadata)) {
565         std::string msg = fmt::sprintf("Camera %s: Dynamic range profile 0x%" PRIx64
566                 " not supported,failed to create output stream", logicalCameraId.c_str(),
567                 dynamicRangeProfile);
568         ALOGE("%s: %s", __FUNCTION__, msg.c_str());
569         return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.c_str());
570     }
571     if (SessionConfigurationUtils::is10bitDynamicRangeProfile(dynamicRangeProfile) &&
572             !SessionConfigurationUtils::is10bitCompatibleFormat(format, dataSpace)) {
573         std::string msg = fmt::sprintf("Camera %s: No 10-bit supported stream configurations with "
574                 "format %#x defined and profile %" PRIx64 ", failed to create output stream",
575                 logicalCameraId.c_str(), format, dynamicRangeProfile);
576         ALOGE("%s: %s", __FUNCTION__, msg.c_str());
577         return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.c_str());
578     }
579     if (colorSpace != ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED &&
580             SessionConfigurationUtils::deviceReportsColorSpaces(physicalCameraMetadata) &&
581             !SessionConfigurationUtils::isColorSpaceSupported(colorSpace, format, dataSpace,
582                     dynamicRangeProfile, physicalCameraMetadata)) {
583         std::string msg = fmt::sprintf("Camera %s: Color space %d not supported, failed to "
584                 "create output stream (pixel format %d dynamic range profile %" PRId64 ")",
585                 logicalCameraId.c_str(), colorSpace, format, dynamicRangeProfile);
586         ALOGE("%s: %s", __FUNCTION__, msg.c_str());
587         return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.c_str());
588     }
589     if (!SessionConfigurationUtils::isStreamUseCaseSupported(streamUseCase,
590             physicalCameraMetadata)) {
591         std::string msg = fmt::sprintf("Camera %s: stream use case %" PRId64 " not supported,"
592                 " failed to create output stream", logicalCameraId.c_str(), streamUseCase);
593         ALOGE("%s: %s", __FUNCTION__, msg.c_str());
594         return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.c_str());
595     }
596     if (timestampBase < OutputConfiguration::TIMESTAMP_BASE_DEFAULT ||
597             timestampBase > OutputConfiguration::TIMESTAMP_BASE_MAX) {
598         std::string msg = fmt::sprintf("Camera %s: invalid timestamp base %d",
599                 logicalCameraId.c_str(), timestampBase);
600         ALOGE("%s: %s", __FUNCTION__, msg.c_str());
601         return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.c_str());
602     }
603     if (mirrorMode < OutputConfiguration::MIRROR_MODE_AUTO ||
604             mirrorMode > OutputConfiguration::MIRROR_MODE_V) {
605         std::string msg = fmt::sprintf("Camera %s: invalid mirroring mode %d",
606                 logicalCameraId.c_str(), mirrorMode);
607         ALOGE("%s: %s", __FUNCTION__, msg.c_str());
608         return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.c_str());
609     }
610 
611     if (!isStreamInfoValid) {
612         streamInfo.width = width;
613         streamInfo.height = height;
614         streamInfo.format = format;
615         streamInfo.dataSpace = dataSpace;
616         streamInfo.consumerUsage = consumerUsage;
617         streamInfo.sensorPixelModesUsed = overriddenSensorPixelModes;
618         streamInfo.dynamicRangeProfile = dynamicRangeProfile;
619         streamInfo.streamUseCase = streamUseCase;
620         streamInfo.timestampBase = timestampBase;
621         streamInfo.colorSpace = colorSpace;
622         return binder::Status::ok();
623     }
624     if (width != streamInfo.width) {
625         std::string msg = fmt::sprintf("Camera %s:Surface width doesn't match: %d vs %d",
626                 logicalCameraId.c_str(), width, streamInfo.width);
627         ALOGE("%s: %s", __FUNCTION__, msg.c_str());
628         return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.c_str());
629     }
630     if (height != streamInfo.height) {
631         std::string msg = fmt::sprintf("Camera %s:Surface height doesn't match: %d vs %d",
632                  logicalCameraId.c_str(), height, streamInfo.height);
633         ALOGE("%s: %s", __FUNCTION__, msg.c_str());
634         return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.c_str());
635     }
636     if (format != streamInfo.format) {
637         std::string msg = fmt::sprintf("Camera %s:Surface format doesn't match: %d vs %d",
638                  logicalCameraId.c_str(), format, streamInfo.format);
639         ALOGE("%s: %s", __FUNCTION__, msg.c_str());
640         return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.c_str());
641     }
642     if (format != HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED) {
643         if (dataSpace != streamInfo.dataSpace) {
644             std::string msg = fmt::sprintf("Camera %s:Surface dataSpace doesn't match: %d vs %d",
645                     logicalCameraId.c_str(), static_cast<int>(dataSpace), static_cast<int>(streamInfo.dataSpace));
646             ALOGE("%s: %s", __FUNCTION__, msg.c_str());
647             return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.c_str());
648         }
649         //At the native side, there isn't a way to check whether 2 surfaces come from the same
650         //surface class type. Use usage flag to approximate the comparison.
651         if (consumerUsage != streamInfo.consumerUsage) {
652             std::string msg = fmt::sprintf(
653                     "Camera %s:Surface usage flag doesn't match %" PRIu64 " vs %" PRIu64 "",
654                     logicalCameraId.c_str(), consumerUsage, streamInfo.consumerUsage);
655             ALOGE("%s: %s", __FUNCTION__, msg.c_str());
656             return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.c_str());
657         }
658     }
659     return binder::Status::ok();
660 }
661 
mapStreamInfo(const OutputStreamInfo & streamInfo,camera3::camera_stream_rotation_t rotation,const std::string & physicalId,int32_t groupId,aidl::android::hardware::camera::device::Stream * stream)662 void mapStreamInfo(const OutputStreamInfo &streamInfo,
663             camera3::camera_stream_rotation_t rotation, const std::string &physicalId,
664             int32_t groupId, aidl::android::hardware::camera::device::Stream *stream /*out*/) {
665     if (stream == nullptr) {
666         return;
667     }
668 
669     stream->streamType = aidl::android::hardware::camera::device::StreamType::OUTPUT;
670     stream->width = streamInfo.width;
671     stream->height = streamInfo.height;
672     stream->format = AidlCamera3Device::mapToAidlPixelFormat(streamInfo.format);
673     auto u = streamInfo.consumerUsage;
674     camera3::Camera3OutputStream::applyZSLUsageQuirk(streamInfo.format, &u);
675     stream->usage = AidlCamera3Device::mapToAidlConsumerUsage(u);
676     stream->dataSpace = AidlCamera3Device::mapToAidlDataspace(streamInfo.dataSpace);
677     stream->colorSpace = streamInfo.colorSpace;
678     stream->rotation = AidlCamera3Device::mapToAidlStreamRotation(rotation);
679     stream->id = -1; // Invalid stream id
680     stream->physicalCameraId = physicalId;
681     stream->bufferSize = 0;
682     stream->groupId = groupId;
683     stream->sensorPixelModesUsed.resize(streamInfo.sensorPixelModesUsed.size());
684     size_t idx = 0;
685     using SensorPixelMode = aidl::android::hardware::camera::metadata::SensorPixelMode;
686     for (auto mode : streamInfo.sensorPixelModesUsed) {
687         stream->sensorPixelModesUsed[idx++] =
688                 static_cast<SensorPixelMode>(mode);
689     }
690     using DynamicRangeProfile =
691             aidl::android::hardware::camera::metadata::RequestAvailableDynamicRangeProfilesMap;
692     stream->dynamicRangeProfile = static_cast<DynamicRangeProfile>(streamInfo.dynamicRangeProfile);
693     using StreamUseCases =
694             aidl::android::hardware::camera::metadata::ScalerAvailableStreamUseCases;
695     stream->useCase = static_cast<StreamUseCases>(streamInfo.streamUseCase);
696 }
697 
mapStream(const OutputStreamInfo & streamInfo,bool isCompositeJpegRDisabled,const CameraMetadata & deviceInfo,camera_stream_rotation_t rotation,size_t * streamIdx,const std::string & physicalId,int32_t groupId,const std::string & logicalCameraId,aidl::android::hardware::camera::device::StreamConfiguration & streamConfiguration,bool * earlyExit)698 binder::Status mapStream(const OutputStreamInfo& streamInfo, bool isCompositeJpegRDisabled,
699         const CameraMetadata& deviceInfo, camera_stream_rotation_t rotation,
700         size_t* streamIdx/*out*/, const std::string &physicalId, int32_t groupId,
701         const std::string& logicalCameraId,
702         aidl::android::hardware::camera::device::StreamConfiguration &streamConfiguration /*out*/,
703         bool *earlyExit /*out*/) {
704     bool isDepthCompositeStream =
705             camera3::DepthCompositeStream::isDepthCompositeStreamInfo(streamInfo);
706     bool isHeicCompositeStream =
707             camera3::HeicCompositeStream::isHeicCompositeStreamInfo(streamInfo);
708     bool isJpegRCompositeStream =
709             camera3::JpegRCompositeStream::isJpegRCompositeStreamInfo(streamInfo) &&
710             !isCompositeJpegRDisabled;
711     if (isDepthCompositeStream || isHeicCompositeStream || isJpegRCompositeStream) {
712         // We need to take in to account that composite streams can have
713         // additional internal camera streams.
714         std::vector<OutputStreamInfo> compositeStreams;
715         status_t ret;
716         if (isDepthCompositeStream) {
717           // TODO: Take care of composite streams.
718             ret = camera3::DepthCompositeStream::getCompositeStreamInfo(streamInfo,
719                     deviceInfo, &compositeStreams);
720         } else if (isHeicCompositeStream) {
721             ret = camera3::HeicCompositeStream::getCompositeStreamInfo(streamInfo,
722                 deviceInfo, &compositeStreams);
723         } else {
724             ret = camera3::JpegRCompositeStream::getCompositeStreamInfo(streamInfo,
725                 deviceInfo, &compositeStreams);
726         }
727 
728         if (ret != OK) {
729             std::string msg = fmt::sprintf(
730                     "Camera %s: Failed adding composite streams: %s (%d)",
731                     logicalCameraId.c_str(), strerror(-ret), ret);
732             ALOGE("%s: %s", __FUNCTION__, msg.c_str());
733             return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.c_str());
734         }
735 
736         if (compositeStreams.size() == 0) {
737             // No internal streams means composite stream not
738             // supported.
739             *earlyExit = true;
740             return binder::Status::ok();
741         } else if (compositeStreams.size() > 1) {
742             size_t streamCount = streamConfiguration.streams.size() + compositeStreams.size() - 1;
743             streamConfiguration.streams.resize(streamCount);
744         }
745 
746         for (const auto& compositeStream : compositeStreams) {
747             mapStreamInfo(compositeStream, rotation,
748                     physicalId, groupId,
749                     &streamConfiguration.streams[(*streamIdx)++]);
750         }
751     } else {
752         mapStreamInfo(streamInfo, rotation,
753                 physicalId, groupId, &streamConfiguration.streams[(*streamIdx)++]);
754     }
755 
756     return binder::Status::ok();
757 }
758 
759 binder::Status
convertToHALStreamCombination(const SessionConfiguration & sessionConfiguration,const std::string & logicalCameraId,const CameraMetadata & deviceInfo,bool isCompositeJpegRDisabled,metadataGetter getMetadata,const std::vector<std::string> & physicalCameraIds,aidl::android::hardware::camera::device::StreamConfiguration & streamConfiguration,bool overrideForPerfClass,metadata_vendor_id_t vendorTagId,bool checkSessionParams,const std::vector<int32_t> & additionalKeys,bool * earlyExit)760 convertToHALStreamCombination(
761         const SessionConfiguration& sessionConfiguration,
762         const std::string &logicalCameraId, const CameraMetadata &deviceInfo,
763         bool isCompositeJpegRDisabled,
764         metadataGetter getMetadata, const std::vector<std::string> &physicalCameraIds,
765         aidl::android::hardware::camera::device::StreamConfiguration &streamConfiguration,
766         bool overrideForPerfClass, metadata_vendor_id_t vendorTagId,
767         bool checkSessionParams, const std::vector<int32_t>& additionalKeys,
768         bool *earlyExit) {
769     using SensorPixelMode = aidl::android::hardware::camera::metadata::SensorPixelMode;
770     auto operatingMode = sessionConfiguration.getOperatingMode();
771     binder::Status res = checkOperatingMode(operatingMode, deviceInfo,
772             logicalCameraId);
773     if (!res.isOk()) {
774         return res;
775     }
776 
777     if (earlyExit == nullptr) {
778         std::string msg("earlyExit nullptr");
779         ALOGE("%s: %s", __FUNCTION__, msg.c_str());
780         return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.c_str());
781     }
782     *earlyExit = false;
783     auto ret = AidlCamera3Device::mapToAidlStreamConfigurationMode(
784             static_cast<camera_stream_configuration_mode_t> (operatingMode),
785             /*out*/ &streamConfiguration.operationMode);
786     if (ret != OK) {
787         std::string msg = fmt::sprintf(
788             "Camera %s: Failed mapping operating mode %d requested: %s (%d)",
789             logicalCameraId.c_str(), operatingMode, strerror(-ret), ret);
790         ALOGE("%s: %s", __FUNCTION__, msg.c_str());
791         return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT,
792                 msg.c_str());
793     }
794 
795     bool isInputValid = (sessionConfiguration.getInputWidth() > 0) &&
796             (sessionConfiguration.getInputHeight() > 0) &&
797             (sessionConfiguration.getInputFormat() > 0);
798     auto outputConfigs = sessionConfiguration.getOutputConfigurations();
799     size_t streamCount = outputConfigs.size();
800     streamCount = isInputValid ? streamCount + 1 : streamCount;
801     streamConfiguration.streams.resize(streamCount);
802     size_t streamIdx = 0;
803     if (isInputValid) {
804         std::vector<SensorPixelMode> defaultSensorPixelModes;
805         defaultSensorPixelModes.resize(1);
806         defaultSensorPixelModes[0] =
807                 static_cast<SensorPixelMode>(ANDROID_SENSOR_PIXEL_MODE_DEFAULT);
808         aidl::android::hardware::camera::device::Stream stream;
809         stream.id = 0;
810         stream.streamType =  aidl::android::hardware::camera::device::StreamType::INPUT;
811         stream.width = static_cast<uint32_t> (sessionConfiguration.getInputWidth());
812         stream.height =  static_cast<uint32_t> (sessionConfiguration.getInputHeight());
813         stream.format =
814                 AidlCamera3Device::AidlCamera3Device::mapToAidlPixelFormat(
815                         sessionConfiguration.getInputFormat());
816         stream.usage = static_cast<aidl::android::hardware::graphics::common::BufferUsage>(0);
817         stream.dataSpace =
818               static_cast<aidl::android::hardware::graphics::common::Dataspace>(
819                       HAL_DATASPACE_UNKNOWN);
820         stream.rotation = aidl::android::hardware::camera::device::StreamRotation::ROTATION_0;
821         stream.bufferSize = 0;
822         stream.groupId = -1;
823         stream.sensorPixelModesUsed = defaultSensorPixelModes;
824         using DynamicRangeProfile =
825             aidl::android::hardware::camera::metadata::RequestAvailableDynamicRangeProfilesMap;
826         stream.dynamicRangeProfile =
827             DynamicRangeProfile::ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD;
828         streamConfiguration.streams[streamIdx++] = stream;
829         streamConfiguration.multiResolutionInputImage =
830                 sessionConfiguration.inputIsMultiResolution();
831     }
832 
833     for (const auto &it : outputConfigs) {
834         const std::vector<ParcelableSurfaceType>& surfaces = it.getSurfaces();
835         bool deferredConsumer = it.isDeferred();
836         bool isConfigurationComplete = it.isComplete();
837         const std::string &physicalCameraId = it.getPhysicalCameraId();
838 
839         int64_t dynamicRangeProfile = it.getDynamicRangeProfile();
840         int32_t colorSpace = it.getColorSpace();
841         std::vector<int32_t> sensorPixelModesUsed = it.getSensorPixelModesUsed();
842         const CameraMetadata &physicalDeviceInfo = getMetadata(physicalCameraId,
843                 overrideForPerfClass);
844         const CameraMetadata &metadataChosen =
845                 physicalCameraId.size() > 0 ? physicalDeviceInfo : deviceInfo;
846 
847         size_t numSurfaces = surfaces.size();
848         bool isStreamInfoValid = false;
849         int32_t groupId = it.isMultiResolution() ? it.getSurfaceSetID() : -1;
850         OutputStreamInfo streamInfo;
851 
852         res = checkSurfaceType(numSurfaces, deferredConsumer, it.getSurfaceType(),
853                                isConfigurationComplete);
854         if (!res.isOk()) {
855             return res;
856         }
857         res = checkPhysicalCameraId(physicalCameraIds, physicalCameraId,
858                 logicalCameraId);
859         if (!res.isOk()) {
860             return res;
861         }
862 
863         int64_t streamUseCase = it.getStreamUseCase();
864         int timestampBase = it.getTimestampBase();
865         // If the configuration is a deferred consumer, or a not yet completed
866         // configuration with no buffer producers attached.
867         if (deferredConsumer || (!isConfigurationComplete && numSurfaces == 0)) {
868             streamInfo.width = it.getWidth();
869             streamInfo.height = it.getHeight();
870             auto surfaceType = it.getSurfaceType();
871             switch (surfaceType) {
872                 case OutputConfiguration::SURFACE_TYPE_SURFACE_TEXTURE:
873                     streamInfo.consumerUsage = GraphicBuffer::USAGE_HW_TEXTURE;
874                     streamInfo.format = HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED;
875                     streamInfo.dataSpace = android_dataspace_t::HAL_DATASPACE_UNKNOWN;
876                     break;
877                 case OutputConfiguration::SURFACE_TYPE_SURFACE_VIEW:
878                     streamInfo.consumerUsage = GraphicBuffer::USAGE_HW_TEXTURE
879                             | GraphicBuffer::USAGE_HW_COMPOSER;
880                     streamInfo.format = HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED;
881                     streamInfo.dataSpace = android_dataspace_t::HAL_DATASPACE_UNKNOWN;
882                     break;
883                 case OutputConfiguration::SURFACE_TYPE_MEDIA_RECORDER:
884                 case OutputConfiguration::SURFACE_TYPE_MEDIA_CODEC:
885                     streamInfo.consumerUsage = GraphicBuffer::USAGE_HW_VIDEO_ENCODER;
886                     streamInfo.format = HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED;
887                     streamInfo.dataSpace = android_dataspace_t::HAL_DATASPACE_UNKNOWN;
888                     break;
889                 case OutputConfiguration::SURFACE_TYPE_IMAGE_READER:
890                     streamInfo.consumerUsage = it.getUsage();
891                     streamInfo.format = it.getFormat();
892                     streamInfo.dataSpace = (android_dataspace)it.getDataspace();
893                     break;
894                 default:
895                     return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT,
896                                         "Invalid surface type.");
897             }
898             streamInfo.dynamicRangeProfile = it.getDynamicRangeProfile();
899             if (checkAndOverrideSensorPixelModesUsed(sensorPixelModesUsed,
900                     streamInfo.format, streamInfo.width,
901                     streamInfo.height, metadataChosen,
902                     &streamInfo.sensorPixelModesUsed) != OK) {
903                         ALOGE("%s: Deferred surface sensor pixel modes not valid",
904                                 __FUNCTION__);
905                         return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT,
906                                 "Deferred surface sensor pixel modes not valid");
907             }
908             streamInfo.streamUseCase = streamUseCase;
909             auto status = mapStream(streamInfo, isCompositeJpegRDisabled, deviceInfo,
910                     camera3::CAMERA_STREAM_ROTATION_0, &streamIdx, physicalCameraId, groupId,
911                     logicalCameraId, streamConfiguration, earlyExit);
912             if (*earlyExit || !status.isOk()) {
913                 return status;
914             }
915 
916             isStreamInfoValid = true;
917 
918             if (numSurfaces == 0) {
919                 continue;
920             }
921         }
922 
923         for (auto& surface_type : surfaces) {
924             sp<Surface> surface;
925             int mirrorMode = it.getMirrorMode(surface_type);
926             res = createSurfaceFromGbp(streamInfo, isStreamInfoValid, surface,
927                                        surface_type
928 #if WB_LIBCAMERASERVICE_WITH_DEPENDENCIES
929                                        .graphicBufferProducer
930 #endif
931                                        , logicalCameraId,
932                                        metadataChosen, sensorPixelModesUsed, dynamicRangeProfile,
933                                        streamUseCase, timestampBase, mirrorMode, colorSpace,
934                                        /*respectSurfaceSize*/ true);
935 
936             if (!res.isOk()) return res;
937 
938             if (!isStreamInfoValid) {
939                 auto status = mapStream(streamInfo, isCompositeJpegRDisabled, deviceInfo,
940                                         static_cast<camera_stream_rotation_t>(it.getRotation()),
941                                         &streamIdx, physicalCameraId, groupId, logicalCameraId,
942                                         streamConfiguration, earlyExit);
943                 if (*earlyExit || !status.isOk()) {
944                     return status;
945                 }
946                 isStreamInfoValid = true;
947             }
948         }
949     }
950 
951     if (checkSessionParams) {
952         const CameraMetadata &deviceInfo = getMetadata(logicalCameraId,
953                 /*overrideForPerfClass*/false);
954         CameraMetadata filteredParams;
955 
956         filterParameters(sessionConfiguration.getSessionParameters(), deviceInfo,
957                 additionalKeys, vendorTagId, filteredParams);
958 
959         camera_metadata_t* metadata = const_cast<camera_metadata_t*>(filteredParams.getAndLock());
960         uint8_t *metadataP = reinterpret_cast<uint8_t*>(metadata);
961         streamConfiguration.sessionParams.metadata.assign(metadataP,
962                 metadataP + get_camera_metadata_size(metadata));
963     }
964 
965     return binder::Status::ok();
966 }
967 
checkPhysicalCameraId(const std::vector<std::string> & physicalCameraIds,const std::string & physicalCameraId,const std::string & logicalCameraId)968 binder::Status checkPhysicalCameraId(
969         const std::vector<std::string> &physicalCameraIds, const std::string &physicalCameraId,
970         const std::string &logicalCameraId) {
971     if (physicalCameraId.size() == 0) {
972         return binder::Status::ok();
973     }
974     if (std::find(physicalCameraIds.begin(), physicalCameraIds.end(),
975         physicalCameraId) == physicalCameraIds.end()) {
976         std::string msg = fmt::sprintf("Camera %s: Camera doesn't support physicalCameraId %s.",
977                 logicalCameraId.c_str(), physicalCameraId.c_str());
978         ALOGE("%s: %s", __FUNCTION__, msg.c_str());
979         return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.c_str());
980     }
981     return binder::Status::ok();
982 }
983 
checkSurfaceType(size_t numBufferProducers,bool deferredConsumer,int surfaceType,bool isConfigurationComplete)984 binder::Status checkSurfaceType(size_t numBufferProducers,
985         bool deferredConsumer, int surfaceType, bool isConfigurationComplete)  {
986     if (numBufferProducers > MAX_SURFACES_PER_STREAM) {
987         ALOGE("%s: GraphicBufferProducer count %zu for stream exceeds limit of %d",
988                 __FUNCTION__, numBufferProducers, MAX_SURFACES_PER_STREAM);
989         return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, "Surface count is too high");
990     } else if ((numBufferProducers == 0) && (!deferredConsumer) && isConfigurationComplete) {
991         ALOGE("%s: Number of consumers cannot be smaller than 1", __FUNCTION__);
992         return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, "No valid consumers.");
993     }
994 
995     if (deferredConsumer) {
996         bool validSurfaceType = (
997                 (surfaceType == OutputConfiguration::SURFACE_TYPE_SURFACE_VIEW) ||
998                 (surfaceType == OutputConfiguration::SURFACE_TYPE_SURFACE_TEXTURE));
999         if (!validSurfaceType) {
1000             std::string msg = fmt::sprintf("Deferred target surface has invalid "
1001                     "surfaceType = %d.", surfaceType);
1002             ALOGE("%s: %s", __FUNCTION__, msg.c_str());
1003             return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.c_str());
1004         }
1005     } else if (!isConfigurationComplete && numBufferProducers == 0) {
1006         bool validSurfaceType = (
1007                 (surfaceType == OutputConfiguration::SURFACE_TYPE_MEDIA_RECORDER) ||
1008                 (surfaceType == OutputConfiguration::SURFACE_TYPE_MEDIA_CODEC) ||
1009                 (surfaceType == OutputConfiguration::SURFACE_TYPE_IMAGE_READER));
1010         if (!validSurfaceType) {
1011             std::string msg = fmt::sprintf("OutputConfiguration target surface has invalid "
1012                     "surfaceType = %d.", surfaceType);
1013             ALOGE("%s: %s", __FUNCTION__, msg.c_str());
1014             return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.c_str());
1015         }
1016     }
1017 
1018     return binder::Status::ok();
1019 }
1020 
checkOperatingMode(int operatingMode,const CameraMetadata & staticInfo,const std::string & cameraId)1021 binder::Status checkOperatingMode(int operatingMode,
1022         const CameraMetadata &staticInfo, const std::string &cameraId) {
1023     if (operatingMode < 0) {
1024         std::string msg = fmt::sprintf(
1025             "Camera %s: Invalid operating mode %d requested", cameraId.c_str(), operatingMode);
1026         ALOGE("%s: %s", __FUNCTION__, msg.c_str());
1027         return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT,
1028                 msg.c_str());
1029     }
1030 
1031     bool isConstrainedHighSpeed = (operatingMode == ICameraDeviceUser::CONSTRAINED_HIGH_SPEED_MODE);
1032     if (isConstrainedHighSpeed) {
1033         camera_metadata_ro_entry_t entry = staticInfo.find(ANDROID_REQUEST_AVAILABLE_CAPABILITIES);
1034         bool isConstrainedHighSpeedSupported = false;
1035         for(size_t i = 0; i < entry.count; ++i) {
1036             uint8_t capability = entry.data.u8[i];
1037             if (capability == ANDROID_REQUEST_AVAILABLE_CAPABILITIES_CONSTRAINED_HIGH_SPEED_VIDEO) {
1038                 isConstrainedHighSpeedSupported = true;
1039                 break;
1040             }
1041         }
1042         if (!isConstrainedHighSpeedSupported) {
1043             std::string msg = fmt::sprintf(
1044                 "Camera %s: Try to create a constrained high speed configuration on a device"
1045                 " that doesn't support it.", cameraId.c_str());
1046             ALOGE("%s: %s", __FUNCTION__, msg.c_str());
1047             return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT,
1048                     msg.c_str());
1049         }
1050     }
1051 
1052     return binder::Status::ok();
1053 }
1054 
inStreamConfigurationMap(int format,int width,int height,const std::unordered_map<int,std::vector<camera3::StreamConfiguration>> & sm)1055 static bool inStreamConfigurationMap(int format, int width, int height,
1056         const std::unordered_map<int, std::vector<camera3::StreamConfiguration>> &sm) {
1057     auto scs = sm.find(format);
1058     if (scs == sm.end()) {
1059         return false;
1060     }
1061     for (auto &sc : scs->second) {
1062         if (sc.width == width && sc.height == height && sc.isInput == 0) {
1063             return true;
1064         }
1065     }
1066     return false;
1067 }
1068 
convertToSet(const std::vector<int32_t> & sensorPixelModesUsed)1069 static std::unordered_set<int32_t> convertToSet(const std::vector<int32_t> &sensorPixelModesUsed) {
1070     return std::unordered_set<int32_t>(sensorPixelModesUsed.begin(), sensorPixelModesUsed.end());
1071 }
1072 
checkAndOverrideSensorPixelModesUsed(const std::vector<int32_t> & sensorPixelModesUsed,int format,int width,int height,const CameraMetadata & staticInfo,std::unordered_set<int32_t> * overriddenSensorPixelModesUsed)1073 status_t checkAndOverrideSensorPixelModesUsed(
1074         const std::vector<int32_t> &sensorPixelModesUsed, int format, int width, int height,
1075         const CameraMetadata &staticInfo,
1076         std::unordered_set<int32_t> *overriddenSensorPixelModesUsed) {
1077 
1078     const std::unordered_set<int32_t> &sensorPixelModesUsedSet =
1079             convertToSet(sensorPixelModesUsed);
1080     if (!supportsUltraHighResolutionCapture(staticInfo)) {
1081         if (sensorPixelModesUsedSet.find(ANDROID_SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION) !=
1082                 sensorPixelModesUsedSet.end()) {
1083             // invalid value for non ultra high res sensors
1084             ALOGE("%s ANDROID_SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION used on a device which doesn't "
1085                     "support ultra high resolution capture", __FUNCTION__);
1086             return BAD_VALUE;
1087         }
1088         overriddenSensorPixelModesUsed->clear();
1089         overriddenSensorPixelModesUsed->insert(ANDROID_SENSOR_PIXEL_MODE_DEFAULT);
1090         return OK;
1091     }
1092 
1093     StreamConfigurationPair streamConfigurationPair = getStreamConfigurationPair(staticInfo);
1094 
1095     bool isInDefaultStreamConfigurationMap =
1096             inStreamConfigurationMap(format, width, height,
1097                     streamConfigurationPair.mDefaultStreamConfigurationMap);
1098 
1099     bool isInMaximumResolutionStreamConfigurationMap =
1100             inStreamConfigurationMap(format, width, height,
1101                     streamConfigurationPair.mMaximumResolutionStreamConfigurationMap);
1102 
1103     // Case 1: The client has not changed the sensor mode defaults. In this case, we check if the
1104     // size + format of the OutputConfiguration is found exclusively in 1.
1105     // If yes, add that sensorPixelMode to overriddenSensorPixelModes.
1106     // If no, add 'DEFAULT' and MAXIMUM_RESOLUTION to overriddenSensorPixelModes.
1107     // This maintains backwards compatibility and also tells the framework the stream
1108     // might be used in either sensor pixel mode.
1109     if (sensorPixelModesUsedSet.size() == 0) {
1110         // Ambiguous case, override to include both cases.
1111         if (isInDefaultStreamConfigurationMap && isInMaximumResolutionStreamConfigurationMap) {
1112             overriddenSensorPixelModesUsed->insert(ANDROID_SENSOR_PIXEL_MODE_DEFAULT);
1113             overriddenSensorPixelModesUsed->insert(ANDROID_SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION);
1114             return OK;
1115         }
1116         if (isInMaximumResolutionStreamConfigurationMap) {
1117             overriddenSensorPixelModesUsed->insert(
1118                     ANDROID_SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION);
1119         } else {
1120             overriddenSensorPixelModesUsed->insert(ANDROID_SENSOR_PIXEL_MODE_DEFAULT);
1121         }
1122         return OK;
1123     }
1124 
1125     // Case2: The app has set sensorPixelModesUsed, we need to verify that they
1126     // are valid / err out.
1127     if (sensorPixelModesUsedSet.find(ANDROID_SENSOR_PIXEL_MODE_DEFAULT) !=
1128             sensorPixelModesUsedSet.end() && !isInDefaultStreamConfigurationMap) {
1129         ALOGE("%s: ANDROID_SENSOR_PIXEL_MODE_DEFAULT set by client, but stream f: %d size %d x %d"
1130                 " isn't present in default stream configuration map", __FUNCTION__, format, width,
1131                 height);
1132         return BAD_VALUE;
1133     }
1134 
1135    if (sensorPixelModesUsedSet.find(ANDROID_SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION) !=
1136             sensorPixelModesUsedSet.end() && !isInMaximumResolutionStreamConfigurationMap) {
1137         ALOGE("%s: ANDROID_SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION set by client, but stream f: "
1138                 "%d size %d x %d isn't present in default stream configuration map", __FUNCTION__,
1139                 format, width, height);
1140         return BAD_VALUE;
1141     }
1142     *overriddenSensorPixelModesUsed = sensorPixelModesUsedSet;
1143     return OK;
1144 }
1145 
targetPerfClassPrimaryCamera(const std::set<std::string> & perfClassPrimaryCameraIds,const std::string & cameraId,int targetSdkVersion)1146 bool targetPerfClassPrimaryCamera(
1147         const std::set<std::string>& perfClassPrimaryCameraIds, const std::string& cameraId,
1148         int targetSdkVersion) {
1149     bool isPerfClassPrimaryCamera =
1150             perfClassPrimaryCameraIds.find(cameraId) != perfClassPrimaryCameraIds.end();
1151     return targetSdkVersion >= SDK_VERSION_S && isPerfClassPrimaryCamera;
1152 }
1153 
mapRequestTemplateFromClient(const std::string & cameraId,int templateId,camera_request_template_t * tempId)1154 binder::Status mapRequestTemplateFromClient(const std::string& cameraId, int templateId,
1155         camera_request_template_t* tempId /*out*/) {
1156     binder::Status ret = binder::Status::ok();
1157 
1158     if (tempId == nullptr) {
1159         ret = STATUS_ERROR_FMT(CameraService::ERROR_ILLEGAL_ARGUMENT,
1160                 "Camera %s: Invalid template argument", cameraId.c_str());
1161         return ret;
1162     }
1163     switch(templateId) {
1164         case ICameraDeviceUser::TEMPLATE_PREVIEW:
1165             *tempId = camera_request_template_t::CAMERA_TEMPLATE_PREVIEW;
1166             break;
1167         case ICameraDeviceUser::TEMPLATE_RECORD:
1168             *tempId = camera_request_template_t::CAMERA_TEMPLATE_VIDEO_RECORD;
1169             break;
1170         case ICameraDeviceUser::TEMPLATE_STILL_CAPTURE:
1171             *tempId = camera_request_template_t::CAMERA_TEMPLATE_STILL_CAPTURE;
1172             break;
1173         case ICameraDeviceUser::TEMPLATE_VIDEO_SNAPSHOT:
1174             *tempId = camera_request_template_t::CAMERA_TEMPLATE_VIDEO_SNAPSHOT;
1175             break;
1176         case ICameraDeviceUser::TEMPLATE_ZERO_SHUTTER_LAG:
1177             *tempId = camera_request_template_t::CAMERA_TEMPLATE_ZERO_SHUTTER_LAG;
1178             break;
1179         case ICameraDeviceUser::TEMPLATE_MANUAL:
1180             *tempId = camera_request_template_t::CAMERA_TEMPLATE_MANUAL;
1181             break;
1182         default:
1183             ret = STATUS_ERROR_FMT(CameraService::ERROR_ILLEGAL_ARGUMENT,
1184                     "Camera %s: Template ID %d is invalid or not supported",
1185                     cameraId.c_str(), templateId);
1186             return ret;
1187     }
1188 
1189     return ret;
1190 }
1191 
mapRequestTemplateToAidl(camera_request_template_t templateId,RequestTemplate * id)1192 status_t mapRequestTemplateToAidl(camera_request_template_t templateId,
1193         RequestTemplate* id /*out*/) {
1194     switch (templateId) {
1195         case CAMERA_TEMPLATE_PREVIEW:
1196             *id = RequestTemplate::PREVIEW;
1197             break;
1198         case CAMERA_TEMPLATE_STILL_CAPTURE:
1199             *id = RequestTemplate::STILL_CAPTURE;
1200             break;
1201         case CAMERA_TEMPLATE_VIDEO_RECORD:
1202             *id = RequestTemplate::VIDEO_RECORD;
1203             break;
1204         case CAMERA_TEMPLATE_VIDEO_SNAPSHOT:
1205             *id = RequestTemplate::VIDEO_SNAPSHOT;
1206             break;
1207         case CAMERA_TEMPLATE_ZERO_SHUTTER_LAG:
1208             *id = RequestTemplate::ZERO_SHUTTER_LAG;
1209             break;
1210         case CAMERA_TEMPLATE_MANUAL:
1211             *id = RequestTemplate::MANUAL;
1212             break;
1213         default:
1214             // Unknown template ID, or this HAL is too old to support it
1215             return BAD_VALUE;
1216     }
1217     return OK;
1218 }
1219 
filterParameters(const CameraMetadata & src,const CameraMetadata & deviceInfo,const std::vector<int32_t> & additionalTags,metadata_vendor_id_t vendorTagId,CameraMetadata & dst)1220 void filterParameters(const CameraMetadata& src, const CameraMetadata& deviceInfo,
1221         const std::vector<int32_t>& additionalTags, metadata_vendor_id_t vendorTagId,
1222         CameraMetadata& dst) {
1223     const CameraMetadata params(src);
1224     camera_metadata_ro_entry_t availableSessionKeys = deviceInfo.find(
1225             ANDROID_REQUEST_AVAILABLE_SESSION_KEYS);
1226     CameraMetadata filteredParams(availableSessionKeys.count);
1227     camera_metadata_t *meta = const_cast<camera_metadata_t *>(
1228             filteredParams.getAndLock());
1229     set_camera_metadata_vendor_id(meta, vendorTagId);
1230     filteredParams.unlock(meta);
1231 
1232     std::unordered_set<int32_t> filteredTags(availableSessionKeys.data.i32,
1233             availableSessionKeys.data.i32 + availableSessionKeys.count);
1234     filteredTags.insert(additionalTags.begin(), additionalTags.end());
1235     for (int32_t tag : filteredTags) {
1236         camera_metadata_ro_entry entry = params.find(tag);
1237         if (entry.count > 0) {
1238             filteredParams.update(entry);
1239         }
1240     }
1241     dst = std::move(filteredParams);
1242 }
1243 
overrideDefaultRequestKeys(CameraMetadata * request)1244 status_t overrideDefaultRequestKeys(CameraMetadata *request) {
1245     // Override the template request with ZoomRatioMapper
1246     status_t res = ZoomRatioMapper::initZoomRatioInTemplate(request);
1247     if (res != OK) {
1248         ALOGE("Failed to update zoom ratio: %s (%d)", strerror(-res), res);
1249         return res;
1250     }
1251 
1252     // Fill in JPEG_QUALITY if not available
1253     if (!request->exists(ANDROID_JPEG_QUALITY)) {
1254         static const uint8_t kDefaultJpegQuality = 95;
1255         request->update(ANDROID_JPEG_QUALITY, &kDefaultJpegQuality, 1);
1256     }
1257 
1258     // Fill in AUTOFRAMING if not available
1259     if (!request->exists(ANDROID_CONTROL_AUTOFRAMING)) {
1260         static const uint8_t kDefaultAutoframingMode = ANDROID_CONTROL_AUTOFRAMING_OFF;
1261         request->update(ANDROID_CONTROL_AUTOFRAMING, &kDefaultAutoframingMode, 1);
1262     }
1263 
1264     return OK;
1265 }
1266 
1267 } // namespace SessionConfigurationUtils
1268 } // namespace camera3
1269 } // namespace android
1270