1 /* Copyright (c) 2012-2016, The Linux Foundation. All rights reserved.
2 *
3 * Redistribution and use in source and binary forms, with or without
4 * modification, are permitted provided that the following conditions are
5 * met:
6 *     * Redistributions of source code must retain the above copyright
7 *       notice, this list of conditions and the following disclaimer.
8 *     * Redistributions in binary form must reproduce the above
9 *       copyright notice, this list of conditions and the following
10 *       disclaimer in the documentation and/or other materials provided
11 *       with the distribution.
12 *     * Neither the name of The Linux Foundation nor the names of its
13 *       contributors may be used to endorse or promote products derived
14 *       from this software without specific prior written permission.
15 *
16 * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
17 * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
18 * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
19 * ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
20 * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
21 * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
22 * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
23 * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
24 * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
25 * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
26 * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27 *
28 */
29 
30 #define LOG_TAG "QCamera3HWI"
31 //#define LOG_NDEBUG 0
32 
33 #define __STDC_LIMIT_MACROS
34 
35 // To remove
36 #include <cutils/properties.h>
37 
38 // System dependencies
39 #include <dlfcn.h>
40 #include <fcntl.h>
41 #include <stdio.h>
42 #include <stdlib.h>
43 #include "utils/Timers.h"
44 #include "sys/ioctl.h"
45 #include <time.h>
46 #include <sync/sync.h>
47 #include "gralloc_priv.h"
48 #include <map>
49 #include <unordered_map>
50 
51 // Display dependencies
52 #include "qdMetaData.h"
53 
54 // Camera dependencies
55 #include "android/QCamera3External.h"
56 #include "util/QCameraFlash.h"
57 #include "QCamera3HWI.h"
58 #include "QCamera3VendorTags.h"
59 #include "QCameraTrace.h"
60 
61 // XML parsing
62 #include "tinyxml2.h"
63 
64 #include "HdrPlusClientUtils.h"
65 
66 extern "C" {
67 #include "mm_camera_dbg.h"
68 }
69 #include "cam_cond.h"
70 
71 using ::android::hardware::camera::common::V1_0::helper::CameraMetadata;
72 using namespace android;
73 
74 namespace qcamera {
75 
76 #define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
77 
78 #define EMPTY_PIPELINE_DELAY 2
79 // mm_camera has 2 partial results: 3A, and final result.
80 // HDR+ requests have 3 partial results: postview, next request ready, and final result.
81 #define PARTIAL_RESULT_COUNT 3
82 #define FRAME_SKIP_DELAY     0
83 
84 #define MAX_VALUE_8BIT ((1<<8)-1)
85 #define MAX_VALUE_10BIT ((1<<10)-1)
86 #define MAX_VALUE_12BIT ((1<<12)-1)
87 
88 #define VIDEO_4K_WIDTH  3840
89 #define VIDEO_4K_HEIGHT 2160
90 
91 #define MAX_EIS_WIDTH 3840
92 #define MAX_EIS_HEIGHT 2160
93 
94 #define MAX_RAW_STREAMS        1
95 #define MAX_STALLING_STREAMS   1
96 #define MAX_PROCESSED_STREAMS  3
97 /* Batch mode is enabled only if FPS set is equal to or greater than this */
98 #define MIN_FPS_FOR_BATCH_MODE (120)
99 #define PREVIEW_FPS_FOR_HFR    (30)
100 #define DEFAULT_VIDEO_FPS      (30.0)
101 #define TEMPLATE_MAX_PREVIEW_FPS (30.0)
102 #define MAX_HFR_BATCH_SIZE     (8)
103 #define REGIONS_TUPLE_COUNT    5
104 // Set a threshold for detection of missing buffers //seconds
105 #define MISSING_REQUEST_BUF_TIMEOUT 10
106 #define MISSING_HDRPLUS_REQUEST_BUF_TIMEOUT 30
107 #define FLUSH_TIMEOUT 3
108 #define METADATA_MAP_SIZE(MAP) (sizeof(MAP)/sizeof(MAP[0]))
109 
110 #define CAM_QCOM_FEATURE_PP_SUPERSET_HAL3   ( CAM_QCOM_FEATURE_DENOISE2D |\
111                                               CAM_QCOM_FEATURE_CROP |\
112                                               CAM_QCOM_FEATURE_ROTATION |\
113                                               CAM_QCOM_FEATURE_SHARPNESS |\
114                                               CAM_QCOM_FEATURE_SCALE |\
115                                               CAM_QCOM_FEATURE_CAC |\
116                                               CAM_QCOM_FEATURE_CDS )
117 /* Per configuration size for static metadata length*/
118 #define PER_CONFIGURATION_SIZE_3 (3)
119 
120 #define TIMEOUT_NEVER -1
121 
122 /* Face rect indices */
123 #define FACE_LEFT              0
124 #define FACE_TOP               1
125 #define FACE_RIGHT             2
126 #define FACE_BOTTOM            3
127 #define FACE_WEIGHT            4
128 
129 /* Face landmarks indices */
130 #define LEFT_EYE_X             0
131 #define LEFT_EYE_Y             1
132 #define RIGHT_EYE_X            2
133 #define RIGHT_EYE_Y            3
134 #define MOUTH_X                4
135 #define MOUTH_Y                5
136 #define TOTAL_LANDMARK_INDICES 6
137 
138 // Max preferred zoom
139 #define MAX_PREFERRED_ZOOM_RATIO 7.0
140 
141 // Whether to check for the GPU stride padding, or use the default
142 //#define CHECK_GPU_PIXEL_ALIGNMENT
143 
144 cam_capability_t *gCamCapability[MM_CAMERA_MAX_NUM_SENSORS];
145 const camera_metadata_t *gStaticMetadata[MM_CAMERA_MAX_NUM_SENSORS];
146 extern pthread_mutex_t gCamLock;
147 volatile uint32_t gCamHal3LogLevel = 1;
148 extern uint8_t gNumCameraSessions;
149 
150 // Note that this doesn't support concurrent front and back camera b/35960155.
151 // The following Easel related variables must be protected by gHdrPlusClientLock.
152 std::unique_ptr<EaselManagerClient> gEaselManagerClient;
153 bool EaselManagerClientOpened = false; // If gEaselManagerClient is opened.
154 int32_t gActiveEaselClient = 0; // The number of active cameras on Easel.
155 std::unique_ptr<HdrPlusClient> gHdrPlusClient = nullptr;
156 bool gHdrPlusClientOpening = false; // If HDR+ client is being opened.
157 std::condition_variable gHdrPlusClientOpenCond; // Used to synchronize HDR+ client opening.
158 bool gEaselProfilingEnabled = false; // If Easel profiling is enabled.
159 bool gExposeEnableZslKey = false; // If HAL makes android.control.enableZsl available.
160 
161 // If Easel is in bypass only mode. If true, Easel HDR+ won't be enabled.
162 bool gEaselBypassOnly;
163 
164 std::mutex gHdrPlusClientLock; // Protect above Easel related variables.
165 
166 
167 const QCamera3HardwareInterface::QCameraPropMap QCamera3HardwareInterface::CDS_MAP [] = {
168     {"On",  CAM_CDS_MODE_ON},
169     {"Off", CAM_CDS_MODE_OFF},
170     {"Auto",CAM_CDS_MODE_AUTO}
171 };
172 const QCamera3HardwareInterface::QCameraMap<
173         camera_metadata_enum_android_video_hdr_mode_t,
174         cam_video_hdr_mode_t> QCamera3HardwareInterface::VIDEO_HDR_MODES_MAP[] = {
175     { QCAMERA3_VIDEO_HDR_MODE_OFF,  CAM_VIDEO_HDR_MODE_OFF },
176     { QCAMERA3_VIDEO_HDR_MODE_ON,   CAM_VIDEO_HDR_MODE_ON }
177 };
178 
179 const QCamera3HardwareInterface::QCameraMap<
180         camera_metadata_enum_android_binning_correction_mode_t,
181         cam_binning_correction_mode_t> QCamera3HardwareInterface::BINNING_CORRECTION_MODES_MAP[] = {
182     { QCAMERA3_BINNING_CORRECTION_MODE_OFF,  CAM_BINNING_CORRECTION_MODE_OFF },
183     { QCAMERA3_BINNING_CORRECTION_MODE_ON,   CAM_BINNING_CORRECTION_MODE_ON }
184 };
185 
186 const QCamera3HardwareInterface::QCameraMap<
187         camera_metadata_enum_android_ir_mode_t,
188         cam_ir_mode_type_t> QCamera3HardwareInterface::IR_MODES_MAP [] = {
189     {QCAMERA3_IR_MODE_OFF,  CAM_IR_MODE_OFF},
190     {QCAMERA3_IR_MODE_ON, CAM_IR_MODE_ON},
191     {QCAMERA3_IR_MODE_AUTO, CAM_IR_MODE_AUTO}
192 };
193 
194 const QCamera3HardwareInterface::QCameraMap<
195         camera_metadata_enum_android_control_effect_mode_t,
196         cam_effect_mode_type> QCamera3HardwareInterface::EFFECT_MODES_MAP[] = {
197     { ANDROID_CONTROL_EFFECT_MODE_OFF,       CAM_EFFECT_MODE_OFF },
198     { ANDROID_CONTROL_EFFECT_MODE_MONO,       CAM_EFFECT_MODE_MONO },
199     { ANDROID_CONTROL_EFFECT_MODE_NEGATIVE,   CAM_EFFECT_MODE_NEGATIVE },
200     { ANDROID_CONTROL_EFFECT_MODE_SOLARIZE,   CAM_EFFECT_MODE_SOLARIZE },
201     { ANDROID_CONTROL_EFFECT_MODE_SEPIA,      CAM_EFFECT_MODE_SEPIA },
202     { ANDROID_CONTROL_EFFECT_MODE_POSTERIZE,  CAM_EFFECT_MODE_POSTERIZE },
203     { ANDROID_CONTROL_EFFECT_MODE_WHITEBOARD, CAM_EFFECT_MODE_WHITEBOARD },
204     { ANDROID_CONTROL_EFFECT_MODE_BLACKBOARD, CAM_EFFECT_MODE_BLACKBOARD },
205     { ANDROID_CONTROL_EFFECT_MODE_AQUA,       CAM_EFFECT_MODE_AQUA }
206 };
207 
208 const QCamera3HardwareInterface::QCameraMap<
209         camera_metadata_enum_android_control_awb_mode_t,
210         cam_wb_mode_type> QCamera3HardwareInterface::WHITE_BALANCE_MODES_MAP[] = {
211     { ANDROID_CONTROL_AWB_MODE_OFF,             CAM_WB_MODE_OFF },
212     { ANDROID_CONTROL_AWB_MODE_AUTO,            CAM_WB_MODE_AUTO },
213     { ANDROID_CONTROL_AWB_MODE_INCANDESCENT,    CAM_WB_MODE_INCANDESCENT },
214     { ANDROID_CONTROL_AWB_MODE_FLUORESCENT,     CAM_WB_MODE_FLUORESCENT },
215     { ANDROID_CONTROL_AWB_MODE_WARM_FLUORESCENT,CAM_WB_MODE_WARM_FLUORESCENT},
216     { ANDROID_CONTROL_AWB_MODE_DAYLIGHT,        CAM_WB_MODE_DAYLIGHT },
217     { ANDROID_CONTROL_AWB_MODE_CLOUDY_DAYLIGHT, CAM_WB_MODE_CLOUDY_DAYLIGHT },
218     { ANDROID_CONTROL_AWB_MODE_TWILIGHT,        CAM_WB_MODE_TWILIGHT },
219     { ANDROID_CONTROL_AWB_MODE_SHADE,           CAM_WB_MODE_SHADE }
220 };
221 
222 const QCamera3HardwareInterface::QCameraMap<
223         camera_metadata_enum_android_control_scene_mode_t,
224         cam_scene_mode_type> QCamera3HardwareInterface::SCENE_MODES_MAP[] = {
225     { ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY,  CAM_SCENE_MODE_FACE_PRIORITY },
226     { ANDROID_CONTROL_SCENE_MODE_ACTION,         CAM_SCENE_MODE_ACTION },
227     { ANDROID_CONTROL_SCENE_MODE_PORTRAIT,       CAM_SCENE_MODE_PORTRAIT },
228     { ANDROID_CONTROL_SCENE_MODE_LANDSCAPE,      CAM_SCENE_MODE_LANDSCAPE },
229     { ANDROID_CONTROL_SCENE_MODE_NIGHT,          CAM_SCENE_MODE_NIGHT },
230     { ANDROID_CONTROL_SCENE_MODE_NIGHT_PORTRAIT, CAM_SCENE_MODE_NIGHT_PORTRAIT },
231     { ANDROID_CONTROL_SCENE_MODE_THEATRE,        CAM_SCENE_MODE_THEATRE },
232     { ANDROID_CONTROL_SCENE_MODE_BEACH,          CAM_SCENE_MODE_BEACH },
233     { ANDROID_CONTROL_SCENE_MODE_SNOW,           CAM_SCENE_MODE_SNOW },
234     { ANDROID_CONTROL_SCENE_MODE_SUNSET,         CAM_SCENE_MODE_SUNSET },
235     { ANDROID_CONTROL_SCENE_MODE_STEADYPHOTO,    CAM_SCENE_MODE_ANTISHAKE },
236     { ANDROID_CONTROL_SCENE_MODE_FIREWORKS ,     CAM_SCENE_MODE_FIREWORKS },
237     { ANDROID_CONTROL_SCENE_MODE_SPORTS ,        CAM_SCENE_MODE_SPORTS },
238     { ANDROID_CONTROL_SCENE_MODE_PARTY,          CAM_SCENE_MODE_PARTY },
239     { ANDROID_CONTROL_SCENE_MODE_CANDLELIGHT,    CAM_SCENE_MODE_CANDLELIGHT },
240     { ANDROID_CONTROL_SCENE_MODE_BARCODE,        CAM_SCENE_MODE_BARCODE},
241     { ANDROID_CONTROL_SCENE_MODE_HDR,            CAM_SCENE_MODE_HDR}
242 };
243 
244 const QCamera3HardwareInterface::QCameraMap<
245         camera_metadata_enum_android_control_af_mode_t,
246         cam_focus_mode_type> QCamera3HardwareInterface::FOCUS_MODES_MAP[] = {
247     { ANDROID_CONTROL_AF_MODE_OFF,                CAM_FOCUS_MODE_OFF },
248     { ANDROID_CONTROL_AF_MODE_OFF,                CAM_FOCUS_MODE_FIXED },
249     { ANDROID_CONTROL_AF_MODE_AUTO,               CAM_FOCUS_MODE_AUTO },
250     { ANDROID_CONTROL_AF_MODE_MACRO,              CAM_FOCUS_MODE_MACRO },
251     { ANDROID_CONTROL_AF_MODE_EDOF,               CAM_FOCUS_MODE_EDOF },
252     { ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE, CAM_FOCUS_MODE_CONTINOUS_PICTURE },
253     { ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO,   CAM_FOCUS_MODE_CONTINOUS_VIDEO }
254 };
255 
256 const QCamera3HardwareInterface::QCameraMap<
257         camera_metadata_enum_android_color_correction_aberration_mode_t,
258         cam_aberration_mode_t> QCamera3HardwareInterface::COLOR_ABERRATION_MAP[] = {
259     { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF,
260             CAM_COLOR_CORRECTION_ABERRATION_OFF },
261     { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST,
262             CAM_COLOR_CORRECTION_ABERRATION_FAST },
263     { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY,
264             CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY },
265 };
266 
267 const QCamera3HardwareInterface::QCameraMap<
268         camera_metadata_enum_android_control_ae_antibanding_mode_t,
269         cam_antibanding_mode_type> QCamera3HardwareInterface::ANTIBANDING_MODES_MAP[] = {
270     { ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF,  CAM_ANTIBANDING_MODE_OFF },
271     { ANDROID_CONTROL_AE_ANTIBANDING_MODE_50HZ, CAM_ANTIBANDING_MODE_50HZ },
272     { ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ, CAM_ANTIBANDING_MODE_60HZ },
273     { ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO, CAM_ANTIBANDING_MODE_AUTO }
274 };
275 
276 const QCamera3HardwareInterface::QCameraMap<
277         camera_metadata_enum_android_control_ae_mode_t,
278         cam_flash_mode_t> QCamera3HardwareInterface::AE_FLASH_MODE_MAP[] = {
279     { ANDROID_CONTROL_AE_MODE_OFF,                  CAM_FLASH_MODE_OFF },
280     { ANDROID_CONTROL_AE_MODE_ON,                   CAM_FLASH_MODE_OFF },
281     { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH,        CAM_FLASH_MODE_AUTO},
282     { ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH,      CAM_FLASH_MODE_ON  },
283     { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE, CAM_FLASH_MODE_AUTO},
284     { ANDROID_CONTROL_AE_MODE_ON_EXTERNAL_FLASH, CAM_FLASH_MODE_OFF }
285 };
286 
287 const QCamera3HardwareInterface::QCameraMap<
288         camera_metadata_enum_android_flash_mode_t,
289         cam_flash_mode_t> QCamera3HardwareInterface::FLASH_MODES_MAP[] = {
290     { ANDROID_FLASH_MODE_OFF,    CAM_FLASH_MODE_OFF  },
291     { ANDROID_FLASH_MODE_SINGLE, CAM_FLASH_MODE_SINGLE },
292     { ANDROID_FLASH_MODE_TORCH,  CAM_FLASH_MODE_TORCH }
293 };
294 
295 const QCamera3HardwareInterface::QCameraMap<
296         camera_metadata_enum_android_statistics_face_detect_mode_t,
297         cam_face_detect_mode_t> QCamera3HardwareInterface::FACEDETECT_MODES_MAP[] = {
298     { ANDROID_STATISTICS_FACE_DETECT_MODE_OFF,    CAM_FACE_DETECT_MODE_OFF     },
299     { ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE, CAM_FACE_DETECT_MODE_SIMPLE  },
300     { ANDROID_STATISTICS_FACE_DETECT_MODE_FULL,   CAM_FACE_DETECT_MODE_FULL    }
301 };
302 
303 const QCamera3HardwareInterface::QCameraMap<
304         camera_metadata_enum_android_lens_info_focus_distance_calibration_t,
305         cam_focus_calibration_t> QCamera3HardwareInterface::FOCUS_CALIBRATION_MAP[] = {
306     { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_UNCALIBRATED,
307       CAM_FOCUS_UNCALIBRATED },
308     { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_APPROXIMATE,
309       CAM_FOCUS_APPROXIMATE },
310     { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_CALIBRATED,
311       CAM_FOCUS_CALIBRATED }
312 };
313 
314 const QCamera3HardwareInterface::QCameraMap<
315         camera_metadata_enum_android_lens_state_t,
316         cam_af_lens_state_t> QCamera3HardwareInterface::LENS_STATE_MAP[] = {
317     { ANDROID_LENS_STATE_STATIONARY,    CAM_AF_LENS_STATE_STATIONARY},
318     { ANDROID_LENS_STATE_MOVING,        CAM_AF_LENS_STATE_MOVING}
319 };
320 
321 const int32_t available_thumbnail_sizes[] = {0, 0,
322                                              176, 144,
323                                              240, 144,
324                                              256, 144,
325                                              240, 160,
326                                              256, 154,
327                                              240, 240,
328                                              320, 240};
329 
330 const QCamera3HardwareInterface::QCameraMap<
331         camera_metadata_enum_android_sensor_test_pattern_mode_t,
332         cam_test_pattern_mode_t> QCamera3HardwareInterface::TEST_PATTERN_MAP[] = {
333     { ANDROID_SENSOR_TEST_PATTERN_MODE_OFF,          CAM_TEST_PATTERN_OFF   },
334     { ANDROID_SENSOR_TEST_PATTERN_MODE_SOLID_COLOR,  CAM_TEST_PATTERN_SOLID_COLOR },
335     { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS,   CAM_TEST_PATTERN_COLOR_BARS },
336     { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS_FADE_TO_GRAY, CAM_TEST_PATTERN_COLOR_BARS_FADE_TO_GRAY },
337     { ANDROID_SENSOR_TEST_PATTERN_MODE_PN9,          CAM_TEST_PATTERN_PN9 },
338     { ANDROID_SENSOR_TEST_PATTERN_MODE_CUSTOM1,      CAM_TEST_PATTERN_CUSTOM1},
339 };
340 
341 /* Since there is no mapping for all the options some Android enum are not listed.
342  * Also, the order in this list is important because while mapping from HAL to Android it will
343  * traverse from lower to higher index which means that for HAL values that are map to different
344  * Android values, the traverse logic will select the first one found.
345  */
346 const QCamera3HardwareInterface::QCameraMap<
347         camera_metadata_enum_android_sensor_reference_illuminant1_t,
348         cam_illuminat_t> QCamera3HardwareInterface::REFERENCE_ILLUMINANT_MAP[] = {
349     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FLUORESCENT, CAM_AWB_WARM_FLO},
350     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT_FLUORESCENT, CAM_AWB_CUSTOM_DAYLIGHT },
351     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_COOL_WHITE_FLUORESCENT, CAM_AWB_COLD_FLO },
352     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_STANDARD_A, CAM_AWB_A },
353     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D55, CAM_AWB_NOON },
354     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D65, CAM_AWB_D65 },
355     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D75, CAM_AWB_D75 },
356     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D50, CAM_AWB_D50 },
357     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_ISO_STUDIO_TUNGSTEN, CAM_AWB_CUSTOM_A},
358     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT, CAM_AWB_D50 },
359     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_TUNGSTEN, CAM_AWB_A },
360     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FINE_WEATHER, CAM_AWB_D50 },
361     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_CLOUDY_WEATHER, CAM_AWB_D65 },
362     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_SHADE, CAM_AWB_D75 },
363     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAY_WHITE_FLUORESCENT, CAM_AWB_CUSTOM_DAYLIGHT },
364     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_WHITE_FLUORESCENT, CAM_AWB_COLD_FLO},
365 };
366 
367 const QCamera3HardwareInterface::QCameraMap<
368         int32_t, cam_hfr_mode_t> QCamera3HardwareInterface::HFR_MODE_MAP[] = {
369     { 60, CAM_HFR_MODE_60FPS},
370     { 90, CAM_HFR_MODE_90FPS},
371     { 120, CAM_HFR_MODE_120FPS},
372     { 150, CAM_HFR_MODE_150FPS},
373     { 180, CAM_HFR_MODE_180FPS},
374     { 210, CAM_HFR_MODE_210FPS},
375     { 240, CAM_HFR_MODE_240FPS},
376     { 480, CAM_HFR_MODE_480FPS},
377 };
378 
379 const QCamera3HardwareInterface::QCameraMap<
380         qcamera3_ext_instant_aec_mode_t,
381         cam_aec_convergence_type> QCamera3HardwareInterface::INSTANT_AEC_MODES_MAP[] = {
382     { QCAMERA3_INSTANT_AEC_NORMAL_CONVERGENCE, CAM_AEC_NORMAL_CONVERGENCE},
383     { QCAMERA3_INSTANT_AEC_AGGRESSIVE_CONVERGENCE, CAM_AEC_AGGRESSIVE_CONVERGENCE},
384     { QCAMERA3_INSTANT_AEC_FAST_CONVERGENCE, CAM_AEC_FAST_CONVERGENCE},
385 };
386 
387 const QCamera3HardwareInterface::QCameraMap<
388         qcamera3_ext_exposure_meter_mode_t,
389         cam_auto_exposure_mode_type> QCamera3HardwareInterface::AEC_MODES_MAP[] = {
390     { QCAMERA3_EXP_METER_MODE_FRAME_AVERAGE, CAM_AEC_MODE_FRAME_AVERAGE },
391     { QCAMERA3_EXP_METER_MODE_CENTER_WEIGHTED, CAM_AEC_MODE_CENTER_WEIGHTED },
392     { QCAMERA3_EXP_METER_MODE_SPOT_METERING, CAM_AEC_MODE_SPOT_METERING },
393     { QCAMERA3_EXP_METER_MODE_SMART_METERING, CAM_AEC_MODE_SMART_METERING },
394     { QCAMERA3_EXP_METER_MODE_USER_METERING, CAM_AEC_MODE_USER_METERING },
395     { QCAMERA3_EXP_METER_MODE_SPOT_METERING_ADV, CAM_AEC_MODE_SPOT_METERING_ADV },
396     { QCAMERA3_EXP_METER_MODE_CENTER_WEIGHTED_ADV, CAM_AEC_MODE_CENTER_WEIGHTED_ADV },
397 };
398 
399 const QCamera3HardwareInterface::QCameraMap<
400         qcamera3_ext_iso_mode_t,
401         cam_iso_mode_type> QCamera3HardwareInterface::ISO_MODES_MAP[] = {
402     { QCAMERA3_ISO_MODE_AUTO, CAM_ISO_MODE_AUTO },
403     { QCAMERA3_ISO_MODE_DEBLUR, CAM_ISO_MODE_DEBLUR },
404     { QCAMERA3_ISO_MODE_100, CAM_ISO_MODE_100 },
405     { QCAMERA3_ISO_MODE_200, CAM_ISO_MODE_200 },
406     { QCAMERA3_ISO_MODE_400, CAM_ISO_MODE_400 },
407     { QCAMERA3_ISO_MODE_800, CAM_ISO_MODE_800 },
408     { QCAMERA3_ISO_MODE_1600, CAM_ISO_MODE_1600 },
409     { QCAMERA3_ISO_MODE_3200, CAM_ISO_MODE_3200 },
410 };
411 
412 camera3_device_ops_t QCamera3HardwareInterface::mCameraOps = {
413     .initialize                         = QCamera3HardwareInterface::initialize,
414     .configure_streams                  = QCamera3HardwareInterface::configure_streams,
415     .register_stream_buffers            = NULL,
416     .construct_default_request_settings = QCamera3HardwareInterface::construct_default_request_settings,
417     .process_capture_request            = QCamera3HardwareInterface::process_capture_request,
418     .get_metadata_vendor_tag_ops        = NULL,
419     .dump                               = QCamera3HardwareInterface::dump,
420     .flush                              = QCamera3HardwareInterface::flush,
421     .reserved                           = {0},
422 };
423 
424 typedef std::tuple<int32_t, int32_t, int32_t, int32_t> config_entry;
425 
operator ==(const config_entry & lhs,const config_entry & rhs)426 bool operator == (const config_entry & lhs, const config_entry & rhs) {
427     return (std::get<0> (lhs) == std::get<0> (rhs)) &&
428         (std::get<1> (lhs) == std::get<1> (rhs)) &&
429         (std::get<2> (lhs) == std::get<2> (rhs)) &&
430         (std::get<3> (lhs) == std::get<3> (rhs));
431 }
432 
433 struct ConfigEntryHash {
operator ()qcamera::ConfigEntryHash434     std::size_t operator() (config_entry const& entry) const {
435         size_t result = 1;
436         size_t hashValue = 31;
437         result = hashValue*result + std::hash<int> {} (std::get<0>(entry));
438         result = hashValue*result + std::hash<int> {} (std::get<1>(entry));
439         result = hashValue*result + std::hash<int> {} (std::get<2>(entry));
440         result = hashValue*result + std::hash<int> {} (std::get<3>(entry));
441         return result;
442     }
443 };
444 
445 // initialise to some default value
446 uint32_t QCamera3HardwareInterface::sessionId[] = {0xDEADBEEF, 0xDEADBEEF, 0xDEADBEEF};
447 
logEaselEvent(const char * tag,const char * event)448 static inline void logEaselEvent(const char *tag, const char *event) {
449     if (CC_UNLIKELY(gEaselProfilingEnabled)) {
450         struct timespec ts = {};
451         static int64_t kMsPerSec = 1000;
452         static int64_t kNsPerMs = 1000000;
453         status_t res = clock_gettime(CLOCK_BOOTTIME, &ts);
454         if (res != OK) {
455             ALOGE("[%s] Failed to get boot time for <%s>.", tag, event);
456         } else {
457             int64_t now = static_cast<int64_t>(ts.tv_sec) * kMsPerSec + ts.tv_nsec / kNsPerMs;
458             ALOGI("[%s] %s at %" PRId64 " ms", tag, event, now);
459         }
460     }
461 }
462 
463 /*===========================================================================
464  * FUNCTION   : QCamera3HardwareInterface
465  *
466  * DESCRIPTION: constructor of QCamera3HardwareInterface
467  *
468  * PARAMETERS :
469  *   @cameraId  : camera ID
470  *
471  * RETURN     : none
472  *==========================================================================*/
QCamera3HardwareInterface(uint32_t cameraId,const camera_module_callbacks_t * callbacks)473 QCamera3HardwareInterface::QCamera3HardwareInterface(uint32_t cameraId,
474         const camera_module_callbacks_t *callbacks)
475     : mCameraId(cameraId),
476       mCameraHandle(NULL),
477       mCameraInitialized(false),
478       mCallbackOps(NULL),
479       mMetadataChannel(NULL),
480       mPictureChannel(NULL),
481       mRawChannel(NULL),
482       mSupportChannel(NULL),
483       mAnalysisChannel(NULL),
484       mRawDumpChannel(NULL),
485       mHdrPlusRawSrcChannel(NULL),
486       mDummyBatchChannel(NULL),
487       mDepthChannel(NULL),
488       mDepthCloudMode(CAM_PD_DATA_SKIP),
489       mPerfLockMgr(),
490       mChannelHandle(0),
491       mFirstConfiguration(true),
492       mFlush(false),
493       mFlushPerf(false),
494       mParamHeap(NULL),
495       mParameters(NULL),
496       mPrevParameters(NULL),
497       m_ISTypeVideo(IS_TYPE_NONE),
498       m_bIsVideo(false),
499       m_bIs4KVideo(false),
500       m_bEisSupportedSize(false),
501       m_bEisEnable(false),
502       m_bEis3PropertyEnabled(false),
503       m_bAVTimerEnabled(false),
504       m_MobicatMask(0),
505       mShutterDispatcher(this),
506       mOutputBufferDispatcher(this),
507       mMinProcessedFrameDuration(0),
508       mMinJpegFrameDuration(0),
509       mMinRawFrameDuration(0),
510       mExpectedFrameDuration(0),
511       mExpectedInflightDuration(0),
512       mMetaFrameCount(0U),
513       mUpdateDebugLevel(false),
514       mCallbacks(callbacks),
515       mCaptureIntent(0),
516       mCacMode(0),
517       /* DevCamDebug metadata internal m control*/
518       mDevCamDebugMetaEnable(0),
519       /* DevCamDebug metadata end */
520       mBatchSize(0),
521       mToBeQueuedVidBufs(0),
522       mHFRVideoFps(DEFAULT_VIDEO_FPS),
523       mOpMode(CAMERA3_STREAM_CONFIGURATION_NORMAL_MODE),
524       mStreamConfig(false),
525       mCommon(),
526       mFirstFrameNumberInBatch(0),
527       mNeedSensorRestart(false),
528       mPreviewStarted(false),
529       mMinInFlightRequests(MIN_INFLIGHT_REQUESTS),
530       mMaxInFlightRequests(MAX_INFLIGHT_REQUESTS),
531       mPDSupported(false),
532       mPDIndex(0),
533       mInstantAEC(false),
534       mResetInstantAEC(false),
535       mInstantAECSettledFrameNumber(0),
536       mAecSkipDisplayFrameBound(0),
537       mInstantAecFrameIdxCount(0),
538       mLastRequestedLensShadingMapMode(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF),
539       mLastRequestedFaceDetectMode(ANDROID_STATISTICS_FACE_DETECT_MODE_OFF),
540       mLastRequestedOisDataMode(ANDROID_STATISTICS_OIS_DATA_MODE_OFF),
541       mCurrFeatureState(0),
542       mLdafCalibExist(false),
543       mLastCustIntentFrmNum(-1),
544       mFirstMetadataCallback(true),
545       mState(CLOSED),
546       mIsDeviceLinked(false),
547       mIsMainCamera(true),
548       mLinkedCameraId(0),
549       m_pDualCamCmdHeap(NULL),
550       m_pDualCamCmdPtr(NULL),
551       mHdrPlusModeEnabled(false),
552       mZslEnabled(false),
553       mEaselMipiStarted(false),
554       mIsApInputUsedForHdrPlus(false),
555       mFirstPreviewIntentSeen(false),
556       m_bSensorHDREnabled(false),
557       mAfTrigger(),
558       mSceneDistance(-1),
559       mLastFocusDistance(0.0)
560 {
561     getLogLevel();
562     mCommon.init(gCamCapability[cameraId]);
563     mCameraDevice.common.tag = HARDWARE_DEVICE_TAG;
564 #ifndef USE_HAL_3_3
565     mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_5;
566 #else
567     mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_3;
568 #endif
569     mCameraDevice.common.close = close_camera_device;
570     mCameraDevice.ops = &mCameraOps;
571     mCameraDevice.priv = this;
572     gCamCapability[cameraId]->version = CAM_HAL_V3;
573     // TODO: hardcode for now until mctl add support for min_num_pp_bufs
574     //TBD - To see if this hardcoding is needed. Check by printing if this is filled by mctl to 3
575     gCamCapability[cameraId]->min_num_pp_bufs = 3;
576 
577     PTHREAD_COND_INIT(&mBuffersCond);
578 
579     PTHREAD_COND_INIT(&mRequestCond);
580     mPendingLiveRequest = 0;
581     mCurrentRequestId = -1;
582     pthread_mutex_init(&mMutex, NULL);
583 
584     for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
585         mDefaultMetadata[i] = NULL;
586 
587     // Getting system props of different kinds
588     char prop[PROPERTY_VALUE_MAX];
589     memset(prop, 0, sizeof(prop));
590     property_get("persist.camera.raw.dump", prop, "0");
591     mEnableRawDump = atoi(prop);
592     property_get("persist.camera.hal3.force.hdr", prop, "0");
593     mForceHdrSnapshot = atoi(prop);
594 
595     if (mEnableRawDump)
596         LOGD("Raw dump from Camera HAL enabled");
597 
598     memset(&mInputStreamInfo, 0, sizeof(mInputStreamInfo));
599     memset(mLdafCalib, 0, sizeof(mLdafCalib));
600 
601     memset(mEaselFwVersion, 0, sizeof(mEaselFwVersion));
602     mEaselFwUpdated = false;
603 
604     memset(prop, 0, sizeof(prop));
605     property_get("persist.camera.tnr.preview", prop, "0");
606     m_bTnrPreview = (uint8_t)atoi(prop);
607 
608     memset(prop, 0, sizeof(prop));
609     property_get("persist.camera.swtnr.preview", prop, "1");
610     m_bSwTnrPreview = (uint8_t)atoi(prop);
611 
612     memset(prop, 0, sizeof(prop));
613     property_get("persist.camera.tnr.video", prop, "1");
614     m_bTnrVideo = (uint8_t)atoi(prop);
615 
616     memset(prop, 0, sizeof(prop));
617     property_get("persist.camera.avtimer.debug", prop, "0");
618     m_debug_avtimer = (uint8_t)atoi(prop);
619     LOGI("AV timer enabled: %d", m_debug_avtimer);
620 
621     memset(prop, 0, sizeof(prop));
622     property_get("persist.camera.cacmode.disable", prop, "0");
623     m_cacModeDisabled = (uint8_t)atoi(prop);
624 
625     m_bForceInfinityAf = property_get_bool("persist.camera.af.infinity", 0);
626     m_MobicatMask = (uint8_t)property_get_int32("persist.camera.mobicat", 0);
627 
628     //Load and read GPU library.
629     lib_surface_utils = NULL;
630     LINK_get_surface_pixel_alignment = NULL;
631     mSurfaceStridePadding = CAM_PAD_TO_64;
632 #ifdef CHECK_GPU_PIXEL_ALIGNMENT
633     lib_surface_utils = dlopen("libadreno_utils.so", RTLD_NOW);
634     if (lib_surface_utils) {
635         *(void **)&LINK_get_surface_pixel_alignment =
636                 dlsym(lib_surface_utils, "get_gpu_pixel_alignment");
637          if (LINK_get_surface_pixel_alignment) {
638              mSurfaceStridePadding = LINK_get_surface_pixel_alignment();
639          }
640          dlclose(lib_surface_utils);
641     }
642 #endif
643     mPDIndex = getPDStatIndex(gCamCapability[cameraId]);
644     mPDSupported = (0 <= mPDIndex) ? true : false;
645 
646     m60HzZone = is60HzZone();
647 }
648 
649 /*===========================================================================
650  * FUNCTION   : ~QCamera3HardwareInterface
651  *
652  * DESCRIPTION: destructor of QCamera3HardwareInterface
653  *
654  * PARAMETERS : none
655  *
656  * RETURN     : none
657  *==========================================================================*/
~QCamera3HardwareInterface()658 QCamera3HardwareInterface::~QCamera3HardwareInterface()
659 {
660     LOGD("E");
661 
662     int32_t rc = 0;
663 
664     // Clean up Easel error future first to avoid Easel error happens during destructor.
665     cleanupEaselErrorFuture();
666 
667     // Disable power hint and enable the perf lock for close camera
668     mPerfLockMgr.releasePerfLock(PERF_LOCK_POWERHINT_ENCODE);
669     mPerfLockMgr.acquirePerfLock(PERF_LOCK_CLOSE_CAMERA);
670 
671     // Close HDR+ client first before destroying HAL.
672     {
673         std::unique_lock<std::mutex> l(gHdrPlusClientLock);
674         finishHdrPlusClientOpeningLocked(l);
675         closeHdrPlusClientLocked();
676     }
677 
678     // unlink of dualcam during close camera
679     if (mIsDeviceLinked) {
680         cam_dual_camera_bundle_info_t *m_pRelCamSyncBuf =
681                 &m_pDualCamCmdPtr->bundle_info;
682         m_pDualCamCmdPtr->cmd_type = CAM_DUAL_CAMERA_BUNDLE_INFO;
683         m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
684         pthread_mutex_lock(&gCamLock);
685 
686         if (mIsMainCamera == 1) {
687             m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
688             m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
689             m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
690             // related session id should be session id of linked session
691             m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
692         } else {
693             m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
694             m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
695             m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
696             m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
697         }
698         m_pRelCamSyncBuf->is_hw_sync_enabled = DUALCAM_HW_SYNC_ENABLED;
699         pthread_mutex_unlock(&gCamLock);
700 
701         rc = mCameraHandle->ops->set_dual_cam_cmd(
702                 mCameraHandle->camera_handle);
703         if (rc < 0) {
704             LOGE("Dualcam: Unlink failed, but still proceed to close");
705         }
706     }
707 
708     /* We need to stop all streams before deleting any stream */
709     if (mRawDumpChannel) {
710         mRawDumpChannel->stop();
711     }
712 
713     if (mHdrPlusRawSrcChannel) {
714         mHdrPlusRawSrcChannel->stop();
715     }
716 
717     // NOTE: 'camera3_stream_t *' objects are already freed at
718     //        this stage by the framework
719     for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
720         it != mStreamInfo.end(); it++) {
721         QCamera3ProcessingChannel *channel = (*it)->channel;
722         if (channel) {
723             channel->stop();
724         }
725     }
726     if (mSupportChannel)
727         mSupportChannel->stop();
728 
729     if (mAnalysisChannel) {
730         mAnalysisChannel->stop();
731     }
732     if (mMetadataChannel) {
733         mMetadataChannel->stop();
734     }
735     if (mChannelHandle) {
736         stopChannelLocked(/*stop_immediately*/false);
737     }
738 
739     for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
740         it != mStreamInfo.end(); it++) {
741         QCamera3ProcessingChannel *channel = (*it)->channel;
742         if (channel)
743             delete channel;
744         free (*it);
745     }
746     if (mSupportChannel) {
747         delete mSupportChannel;
748         mSupportChannel = NULL;
749     }
750 
751     if (mAnalysisChannel) {
752         delete mAnalysisChannel;
753         mAnalysisChannel = NULL;
754     }
755     if (mRawDumpChannel) {
756         delete mRawDumpChannel;
757         mRawDumpChannel = NULL;
758     }
759     if (mHdrPlusRawSrcChannel) {
760         delete mHdrPlusRawSrcChannel;
761         mHdrPlusRawSrcChannel = NULL;
762     }
763     if (mDummyBatchChannel) {
764         delete mDummyBatchChannel;
765         mDummyBatchChannel = NULL;
766     }
767 
768     mPictureChannel = NULL;
769     mDepthChannel = NULL;
770 
771     if (mMetadataChannel) {
772         delete mMetadataChannel;
773         mMetadataChannel = NULL;
774     }
775 
776     /* Clean up all channels */
777     if (mCameraInitialized) {
778         if(!mFirstConfiguration){
779             //send the last unconfigure
780             cam_stream_size_info_t stream_config_info;
781             memset(&stream_config_info, 0, sizeof(cam_stream_size_info_t));
782             stream_config_info.buffer_info.min_buffers = MIN_INFLIGHT_REQUESTS;
783             stream_config_info.buffer_info.max_buffers =
784                     m_bIs4KVideo ? 0 :
785                     m_bEis3PropertyEnabled && m_bIsVideo ? MAX_VIDEO_BUFFERS : MAX_INFLIGHT_REQUESTS;
786             clear_metadata_buffer(mParameters);
787             ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_INFO,
788                     stream_config_info);
789             int rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
790             if (rc < 0) {
791                 LOGE("set_parms failed for unconfigure");
792             }
793         }
794         deinitParameters();
795     }
796 
797     if (mChannelHandle) {
798         mCameraHandle->ops->delete_channel(mCameraHandle->camera_handle,
799                 mChannelHandle);
800         LOGH("deleting channel %d", mChannelHandle);
801         mChannelHandle = 0;
802     }
803 
804     if (mState != CLOSED)
805         closeCamera();
806 
807     for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
808         req.mPendingBufferList.clear();
809     }
810     mPendingBuffersMap.mPendingBuffersInRequest.clear();
811     for (pendingRequestIterator i = mPendingRequestsList.begin();
812             i != mPendingRequestsList.end();) {
813         i = erasePendingRequest(i);
814     }
815     for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
816         if (mDefaultMetadata[i])
817             free_camera_metadata(mDefaultMetadata[i]);
818 
819     mPerfLockMgr.releasePerfLock(PERF_LOCK_CLOSE_CAMERA);
820 
821     pthread_cond_destroy(&mRequestCond);
822 
823     pthread_cond_destroy(&mBuffersCond);
824 
825     pthread_mutex_destroy(&mMutex);
826     LOGD("X");
827 }
828 
829 /*===========================================================================
830  * FUNCTION   : erasePendingRequest
831  *
832  * DESCRIPTION: function to erase a desired pending request after freeing any
833  *              allocated memory
834  *
835  * PARAMETERS :
836  *   @i       : iterator pointing to pending request to be erased
837  *
838  * RETURN     : iterator pointing to the next request
839  *==========================================================================*/
840 QCamera3HardwareInterface::pendingRequestIterator
erasePendingRequest(pendingRequestIterator i)841         QCamera3HardwareInterface::erasePendingRequest (pendingRequestIterator i)
842 {
843     if (i->input_buffer != NULL) {
844         free(i->input_buffer);
845         i->input_buffer = NULL;
846     }
847     if (i->settings != NULL)
848         free_camera_metadata((camera_metadata_t*)i->settings);
849 
850     mExpectedInflightDuration -= i->expectedFrameDuration;
851     if (mExpectedInflightDuration < 0) {
852         LOGE("Negative expected in-flight duration!");
853         mExpectedInflightDuration = 0;
854     }
855 
856     return mPendingRequestsList.erase(i);
857 }
858 
859 /*===========================================================================
860  * FUNCTION   : camEvtHandle
861  *
862  * DESCRIPTION: Function registered to mm-camera-interface to handle events
863  *
864  * PARAMETERS :
865  *   @camera_handle : interface layer camera handle
866  *   @evt           : ptr to event
867  *   @user_data     : user data ptr
868  *
869  * RETURN     : none
870  *==========================================================================*/
camEvtHandle(uint32_t,mm_camera_event_t * evt,void * user_data)871 void QCamera3HardwareInterface::camEvtHandle(uint32_t /*camera_handle*/,
872                                           mm_camera_event_t *evt,
873                                           void *user_data)
874 {
875     QCamera3HardwareInterface *obj = (QCamera3HardwareInterface *)user_data;
876     if (obj && evt) {
877         switch(evt->server_event_type) {
878             case CAM_EVENT_TYPE_DAEMON_DIED:
879                 pthread_mutex_lock(&obj->mMutex);
880                 obj->mState = ERROR;
881                 pthread_mutex_unlock(&obj->mMutex);
882                 LOGE("Fatal, camera daemon died");
883                 break;
884 
885             case CAM_EVENT_TYPE_DAEMON_PULL_REQ:
886                 LOGD("HAL got request pull from Daemon");
887                 pthread_mutex_lock(&obj->mMutex);
888                 obj->mWokenUpByDaemon = true;
889                 obj->unblockRequestIfNecessary();
890                 pthread_mutex_unlock(&obj->mMutex);
891                 break;
892 
893             default:
894                 LOGW("Warning: Unhandled event %d",
895                         evt->server_event_type);
896                 break;
897         }
898     } else {
899         LOGE("NULL user_data/evt");
900     }
901 }
902 
903 /*===========================================================================
904  * FUNCTION   : openCamera
905  *
906  * DESCRIPTION: open camera
907  *
908  * PARAMETERS :
909  *   @hw_device  : double ptr for camera device struct
910  *
911  * RETURN     : int32_t type of status
912  *              NO_ERROR  -- success
913  *              none-zero failure code
914  *==========================================================================*/
openCamera(struct hw_device_t ** hw_device)915 int QCamera3HardwareInterface::openCamera(struct hw_device_t **hw_device)
916 {
917     int rc = 0;
918     if (mState != CLOSED) {
919         *hw_device = NULL;
920         return PERMISSION_DENIED;
921     }
922 
923     logEaselEvent("EASEL_STARTUP_LATENCY", "Camera Open");
924     mPerfLockMgr.acquirePerfLock(PERF_LOCK_OPEN_CAMERA);
925     LOGI("[KPI Perf]: E PROFILE_OPEN_CAMERA camera id %d",
926              mCameraId);
927 
928     if (mCameraHandle) {
929         LOGE("Failure: Camera already opened");
930         return ALREADY_EXISTS;
931     }
932 
933     {
934         std::unique_lock<std::mutex> l(gHdrPlusClientLock);
935         if (gEaselManagerClient != nullptr && gEaselManagerClient->isEaselPresentOnDevice()) {
936             logEaselEvent("EASEL_STARTUP_LATENCY", "Resume");
937             if (gActiveEaselClient == 0) {
938                 rc = gEaselManagerClient->resume(this);
939                 if (rc != 0) {
940                     ALOGE("%s: Resuming Easel failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
941                     return rc;
942                 }
943                 mEaselFwUpdated = false;
944             }
945             gActiveEaselClient++;
946 
947             mQCamera3HdrPlusListenerThread = new QCamera3HdrPlusListenerThread(this);
948             rc = mQCamera3HdrPlusListenerThread->run("QCamera3HdrPlusListenerThread");
949             if (rc != OK) {
950                 ALOGE("%s: Starting HDR+ client listener thread failed: %s (%d)", __FUNCTION__,
951                         strerror(-rc), rc);
952                 return rc;
953             }
954         }
955     }
956 
957     rc = openCamera();
958     if (rc == 0) {
959         *hw_device = &mCameraDevice.common;
960     } else {
961         *hw_device = NULL;
962 
963         // Suspend Easel because opening camera failed.
964         {
965             std::unique_lock<std::mutex> l(gHdrPlusClientLock);
966             if (gEaselManagerClient != nullptr && gEaselManagerClient->isEaselPresentOnDevice()) {
967                 if (gActiveEaselClient == 1) {
968                     status_t suspendErr = gEaselManagerClient->suspend();
969                     if (suspendErr != 0) {
970                         ALOGE("%s: Suspending Easel failed: %s (%d)", __FUNCTION__,
971                                 strerror(-suspendErr), suspendErr);
972                     }
973                 }
974                 gActiveEaselClient--;
975             }
976 
977             if (mQCamera3HdrPlusListenerThread != nullptr) {
978                 mQCamera3HdrPlusListenerThread->requestExit();
979                 mQCamera3HdrPlusListenerThread->join();
980                 mQCamera3HdrPlusListenerThread = nullptr;
981             }
982         }
983     }
984 
985     LOGI("[KPI Perf]: X PROFILE_OPEN_CAMERA camera id %d, rc: %d",
986              mCameraId, rc);
987 
988     if (rc == NO_ERROR) {
989         mState = OPENED;
990     }
991 
992     return rc;
993 }
994 
995 /*===========================================================================
996  * FUNCTION   : openCamera
997  *
998  * DESCRIPTION: open camera
999  *
1000  * PARAMETERS : none
1001  *
1002  * RETURN     : int32_t type of status
1003  *              NO_ERROR  -- success
1004  *              none-zero failure code
1005  *==========================================================================*/
openCamera()1006 int QCamera3HardwareInterface::openCamera()
1007 {
1008     int rc = 0;
1009     char value[PROPERTY_VALUE_MAX];
1010 
1011     KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_OPENCAMERA);
1012 
1013     rc = QCameraFlash::getInstance().reserveFlashForCamera(mCameraId);
1014     if (rc < 0) {
1015         LOGE("Failed to reserve flash for camera id: %d",
1016                 mCameraId);
1017         return UNKNOWN_ERROR;
1018     }
1019 
1020     rc = camera_open((uint8_t)mCameraId, &mCameraHandle);
1021     if (rc) {
1022         LOGE("camera_open failed. rc = %d, mCameraHandle = %p", rc, mCameraHandle);
1023         return rc;
1024     }
1025 
1026     if (!mCameraHandle) {
1027         LOGE("camera_open failed. mCameraHandle = %p", mCameraHandle);
1028         return -ENODEV;
1029     }
1030 
1031     rc = mCameraHandle->ops->register_event_notify(mCameraHandle->camera_handle,
1032             camEvtHandle, (void *)this);
1033 
1034     if (rc < 0) {
1035         LOGE("Error, failed to register event callback");
1036         /* Not closing camera here since it is already handled in destructor */
1037         return FAILED_TRANSACTION;
1038     }
1039 
1040     mExifParams.debug_params =
1041             (mm_jpeg_debug_exif_params_t *) malloc (sizeof(mm_jpeg_debug_exif_params_t));
1042     if (mExifParams.debug_params) {
1043         memset(mExifParams.debug_params, 0, sizeof(mm_jpeg_debug_exif_params_t));
1044     } else {
1045         LOGE("Out of Memory. Allocation failed for 3A debug exif params");
1046         return NO_MEMORY;
1047     }
1048     mFirstConfiguration = true;
1049 
1050     //Notify display HAL that a camera session is active.
1051     //But avoid calling the same during bootup because camera service might open/close
1052     //cameras at boot time during its initialization and display service will also internally
1053     //wait for camera service to initialize first while calling this display API, resulting in a
1054     //deadlock situation. Since boot time camera open/close calls are made only to fetch
1055     //capabilities, no need of this display bw optimization.
1056     //Use "service.bootanim.exit" property to know boot status.
1057     property_get("service.bootanim.exit", value, "0");
1058     if (atoi(value) == 1) {
1059         pthread_mutex_lock(&gCamLock);
1060         if (gNumCameraSessions++ == 0) {
1061             setCameraLaunchStatus(true);
1062         }
1063         pthread_mutex_unlock(&gCamLock);
1064     }
1065 
1066     //fill the session id needed while linking dual cam
1067     pthread_mutex_lock(&gCamLock);
1068     rc = mCameraHandle->ops->get_session_id(mCameraHandle->camera_handle,
1069         &sessionId[mCameraId]);
1070     pthread_mutex_unlock(&gCamLock);
1071 
1072     if (rc < 0) {
1073         LOGE("Error, failed to get sessiion id");
1074         return UNKNOWN_ERROR;
1075     } else {
1076         //Allocate related cam sync buffer
1077         //this is needed for the payload that goes along with bundling cmd for related
1078         //camera use cases
1079         m_pDualCamCmdHeap = new QCamera3HeapMemory(1);
1080         rc = m_pDualCamCmdHeap->allocate(sizeof(cam_dual_camera_cmd_info_t));
1081         if(rc != OK) {
1082             rc = NO_MEMORY;
1083             LOGE("Dualcam: Failed to allocate Related cam sync Heap memory");
1084             return NO_MEMORY;
1085         }
1086 
1087         //Map memory for related cam sync buffer
1088         rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
1089                 CAM_MAPPING_BUF_TYPE_DUAL_CAM_CMD_BUF,
1090                 m_pDualCamCmdHeap->getFd(0),
1091                 sizeof(cam_dual_camera_cmd_info_t),
1092                 m_pDualCamCmdHeap->getPtr(0));
1093         if(rc < 0) {
1094             LOGE("Dualcam: failed to map Related cam sync buffer");
1095             rc = FAILED_TRANSACTION;
1096             return NO_MEMORY;
1097         }
1098         m_pDualCamCmdPtr =
1099                 (cam_dual_camera_cmd_info_t*) DATA_PTR(m_pDualCamCmdHeap,0);
1100     }
1101 
1102     LOGH("mCameraId=%d",mCameraId);
1103 
1104     return NO_ERROR;
1105 }
1106 
1107 /*===========================================================================
1108  * FUNCTION   : closeCamera
1109  *
1110  * DESCRIPTION: close camera
1111  *
1112  * PARAMETERS : none
1113  *
1114  * RETURN     : int32_t type of status
1115  *              NO_ERROR  -- success
1116  *              none-zero failure code
1117  *==========================================================================*/
closeCamera()1118 int QCamera3HardwareInterface::closeCamera()
1119 {
1120     KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CLOSECAMERA);
1121     int rc = NO_ERROR;
1122     char value[PROPERTY_VALUE_MAX];
1123 
1124     LOGI("[KPI Perf]: E PROFILE_CLOSE_CAMERA camera id %d",
1125              mCameraId);
1126 
1127     // unmap memory for related cam sync buffer
1128     mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle,
1129             CAM_MAPPING_BUF_TYPE_DUAL_CAM_CMD_BUF);
1130     if (NULL != m_pDualCamCmdHeap) {
1131         m_pDualCamCmdHeap->deallocate();
1132         delete m_pDualCamCmdHeap;
1133         m_pDualCamCmdHeap = NULL;
1134         m_pDualCamCmdPtr = NULL;
1135     }
1136 
1137     rc = mCameraHandle->ops->close_camera(mCameraHandle->camera_handle);
1138     mCameraHandle = NULL;
1139 
1140     //reset session id to some invalid id
1141     pthread_mutex_lock(&gCamLock);
1142     sessionId[mCameraId] = 0xDEADBEEF;
1143     pthread_mutex_unlock(&gCamLock);
1144 
1145     //Notify display HAL that there is no active camera session
1146     //but avoid calling the same during bootup. Refer to openCamera
1147     //for more details.
1148     property_get("service.bootanim.exit", value, "0");
1149     if (atoi(value) == 1) {
1150         pthread_mutex_lock(&gCamLock);
1151         if (--gNumCameraSessions == 0) {
1152             setCameraLaunchStatus(false);
1153         }
1154         pthread_mutex_unlock(&gCamLock);
1155     }
1156 
1157     if (mExifParams.debug_params) {
1158         free(mExifParams.debug_params);
1159         mExifParams.debug_params = NULL;
1160     }
1161     if (QCameraFlash::getInstance().releaseFlashFromCamera(mCameraId) != 0) {
1162         LOGW("Failed to release flash for camera id: %d",
1163                 mCameraId);
1164     }
1165     mState = CLOSED;
1166     LOGI("[KPI Perf]: X PROFILE_CLOSE_CAMERA camera id %d, rc: %d",
1167          mCameraId, rc);
1168 
1169     {
1170         std::unique_lock<std::mutex> l(gHdrPlusClientLock);
1171         if (EaselManagerClientOpened) {
1172             if (gActiveEaselClient == 1) {
1173                 rc = gEaselManagerClient->suspend();
1174                 if (rc != 0) {
1175                     ALOGE("%s: Suspending Easel failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
1176                 }
1177             }
1178             gActiveEaselClient--;
1179         }
1180 
1181         if (mQCamera3HdrPlusListenerThread != nullptr) {
1182             mQCamera3HdrPlusListenerThread->requestExit();
1183             mQCamera3HdrPlusListenerThread->join();
1184             mQCamera3HdrPlusListenerThread = nullptr;
1185         }
1186     }
1187 
1188     return rc;
1189 }
1190 
1191 /*===========================================================================
1192  * FUNCTION   : initialize
1193  *
1194  * DESCRIPTION: Initialize frameworks callback functions
1195  *
1196  * PARAMETERS :
1197  *   @callback_ops : callback function to frameworks
1198  *
1199  * RETURN     :
1200  *
1201  *==========================================================================*/
initialize(const struct camera3_callback_ops * callback_ops)1202 int QCamera3HardwareInterface::initialize(
1203         const struct camera3_callback_ops *callback_ops)
1204 {
1205     ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_INIT);
1206     int rc;
1207 
1208     LOGI("E :mCameraId = %d mState = %d", mCameraId, mState);
1209     pthread_mutex_lock(&mMutex);
1210 
1211     // Validate current state
1212     switch (mState) {
1213         case OPENED:
1214             /* valid state */
1215             break;
1216         default:
1217             LOGE("Invalid state %d", mState);
1218             rc = -ENODEV;
1219             goto err1;
1220     }
1221 
1222     rc = initParameters();
1223     if (rc < 0) {
1224         LOGE("initParamters failed %d", rc);
1225         goto err1;
1226     }
1227     mCallbackOps = callback_ops;
1228 
1229     mChannelHandle = mCameraHandle->ops->add_channel(
1230             mCameraHandle->camera_handle, NULL, NULL, this);
1231     if (mChannelHandle == 0) {
1232         LOGE("add_channel failed");
1233         rc = -ENOMEM;
1234         pthread_mutex_unlock(&mMutex);
1235         return rc;
1236     }
1237 
1238     pthread_mutex_unlock(&mMutex);
1239     mCameraInitialized = true;
1240     mState = INITIALIZED;
1241     LOGI("X");
1242     return 0;
1243 
1244 err1:
1245     pthread_mutex_unlock(&mMutex);
1246     return rc;
1247 }
1248 
1249 /*===========================================================================
1250  * FUNCTION   : validateStreamDimensions
1251  *
1252  * DESCRIPTION: Check if the configuration requested are those advertised
1253  *
1254  * PARAMETERS :
1255  *   @cameraId : cameraId
1256  *   @stream_list : streams to be configured
1257  *
1258  * RETURN     :
1259  *
1260  *==========================================================================*/
validateStreamDimensions(uint32_t cameraId,camera3_stream_configuration_t * streamList)1261 int QCamera3HardwareInterface::validateStreamDimensions(uint32_t cameraId,
1262         camera3_stream_configuration_t *streamList)
1263 {
1264     int rc = NO_ERROR;
1265     size_t count = 0;
1266     uint32_t depthWidth = 0;
1267     uint32_t depthHeight = 0;
1268     auto pDIndex = getPDStatIndex(gCamCapability[cameraId]);
1269     bool pDSupported = (0 <= pDIndex) ? true : false;
1270     if (pDSupported) {
1271         depthWidth = gCamCapability[cameraId]->raw_meta_dim[pDIndex].width;
1272         depthHeight = gCamCapability[cameraId]->raw_meta_dim[pDIndex].height;
1273     }
1274 
1275     camera3_stream_t *inputStream = NULL;
1276     /*
1277     * Loop through all streams to find input stream if it exists*
1278     */
1279     for (size_t i = 0; i< streamList->num_streams; i++) {
1280         if (streamList->streams[i]->stream_type == CAMERA3_STREAM_INPUT) {
1281             if (inputStream != NULL) {
1282                 LOGE("Error, Multiple input streams requested");
1283                 return -EINVAL;
1284             }
1285             inputStream = streamList->streams[i];
1286         }
1287     }
1288     /*
1289     * Loop through all streams requested in configuration
1290     * Check if unsupported sizes have been requested on any of them
1291     */
1292     for (size_t j = 0; j < streamList->num_streams; j++) {
1293         bool sizeFound = false;
1294         camera3_stream_t *newStream = streamList->streams[j];
1295 
1296         uint32_t rotatedHeight = newStream->height;
1297         uint32_t rotatedWidth = newStream->width;
1298         if ((newStream->rotation == CAMERA3_STREAM_ROTATION_90) ||
1299                 (newStream->rotation == CAMERA3_STREAM_ROTATION_270)) {
1300             rotatedHeight = newStream->width;
1301             rotatedWidth = newStream->height;
1302         }
1303 
1304         /*
1305         * Sizes are different for each type of stream format check against
1306         * appropriate table.
1307         */
1308         switch (newStream->format) {
1309         case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
1310         case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
1311         case HAL_PIXEL_FORMAT_RAW10:
1312             if ((HAL_DATASPACE_DEPTH == newStream->data_space) &&
1313                     (ANDROID_SCALER_AVAILABLE_FORMATS_RAW16 == newStream->format) &&
1314                     pDSupported) {
1315                 if ((depthWidth == newStream->width) &&
1316                         (depthHeight == newStream->height)) {
1317                     sizeFound = true;
1318                 }
1319                 break;
1320             }
1321             count = MIN(gCamCapability[cameraId]->supported_raw_dim_cnt, MAX_SIZES_CNT);
1322             for (size_t i = 0; i < count; i++) {
1323                 if ((gCamCapability[cameraId]->raw_dim[i].width == (int32_t)rotatedWidth) &&
1324                         (gCamCapability[cameraId]->raw_dim[i].height == (int32_t)rotatedHeight)) {
1325                     sizeFound = true;
1326                     break;
1327                 }
1328             }
1329             break;
1330         case HAL_PIXEL_FORMAT_BLOB:
1331             if ((newStream->data_space == HAL_DATASPACE_DEPTH) &&
1332                     pDSupported) {
1333                 //As per spec. depth cloud should be sample count / 16
1334                 uint32_t depthSamplesCount = (depthWidth * depthHeight * 2) / 16;
1335                 if ((depthSamplesCount == newStream->width) &&
1336                         (1 == newStream->height)) {
1337                     sizeFound = true;
1338                 }
1339                 break;
1340             }
1341             count = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
1342             /* Verify set size against generated sizes table */
1343             for (size_t i = 0; i < count; i++) {
1344                 if (((int32_t)rotatedWidth ==
1345                         gCamCapability[cameraId]->picture_sizes_tbl[i].width) &&
1346                         ((int32_t)rotatedHeight ==
1347                         gCamCapability[cameraId]->picture_sizes_tbl[i].height)) {
1348                     sizeFound = true;
1349                     break;
1350                 }
1351             }
1352             break;
1353         case HAL_PIXEL_FORMAT_Y8:
1354         case HAL_PIXEL_FORMAT_YCbCr_420_888:
1355         case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
1356         default:
1357             if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL
1358                     || newStream->stream_type == CAMERA3_STREAM_INPUT
1359                     || IS_USAGE_ZSL(newStream->usage)) {
1360                 if (((int32_t)rotatedWidth ==
1361                                 gCamCapability[cameraId]->active_array_size.width) &&
1362                                 ((int32_t)rotatedHeight ==
1363                                 gCamCapability[cameraId]->active_array_size.height)) {
1364                     sizeFound = true;
1365                     break;
1366                 }
1367                 /* We could potentially break here to enforce ZSL stream
1368                  * set from frameworks always is full active array size
1369                  * but it is not clear from the spc if framework will always
1370                  * follow that, also we have logic to override to full array
1371                  * size, so keeping the logic lenient at the moment
1372                  */
1373             }
1374             count = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt,
1375                     MAX_SIZES_CNT);
1376             for (size_t i = 0; i < count; i++) {
1377                 if (((int32_t)rotatedWidth ==
1378                             gCamCapability[cameraId]->picture_sizes_tbl[i].width) &&
1379                             ((int32_t)rotatedHeight ==
1380                             gCamCapability[cameraId]->picture_sizes_tbl[i].height)) {
1381                     sizeFound = true;
1382                     break;
1383                 }
1384             }
1385             break;
1386         } /* End of switch(newStream->format) */
1387 
1388         /* We error out even if a single stream has unsupported size set */
1389         if (!sizeFound) {
1390             LOGE("Error: Unsupported size: %d x %d type: %d array size: %d x %d",
1391                     rotatedWidth, rotatedHeight, newStream->format,
1392                     gCamCapability[cameraId]->active_array_size.width,
1393                     gCamCapability[cameraId]->active_array_size.height);
1394             rc = -EINVAL;
1395             break;
1396         }
1397     } /* End of for each stream */
1398     return rc;
1399 }
1400 
1401 /*===========================================================================
1402  * FUNCTION   : validateUsageFlags
1403  *
1404  * DESCRIPTION: Check if the configuration usage flags map to same internal format.
1405  *
1406  * PARAMETERS :
1407  *   @cameraId : camera id.
1408  *   @stream_list : streams to be configured
1409  *
1410  * RETURN     :
1411  *   NO_ERROR if the usage flags are supported
1412  *   error code if usage flags are not supported
1413  *
1414  *==========================================================================*/
validateUsageFlags(uint32_t cameraId,const camera3_stream_configuration_t * streamList)1415 int QCamera3HardwareInterface::validateUsageFlags(uint32_t cameraId,
1416         const camera3_stream_configuration_t* streamList)
1417 {
1418     for (size_t j = 0; j < streamList->num_streams; j++) {
1419         const camera3_stream_t *newStream = streamList->streams[j];
1420 
1421         if (newStream->format != HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED ||
1422             (newStream->stream_type != CAMERA3_STREAM_OUTPUT &&
1423              newStream->stream_type != CAMERA3_STREAM_BIDIRECTIONAL)) {
1424             continue;
1425         }
1426 
1427         // Here we only care whether it's EIS3 or not
1428         char is_type_value[PROPERTY_VALUE_MAX];
1429         property_get("persist.camera.is_type", is_type_value, "4");
1430         cam_is_type_t isType = atoi(is_type_value) == IS_TYPE_EIS_3_0 ? IS_TYPE_EIS_3_0 : IS_TYPE_NONE;
1431         if (gCamCapability[cameraId]->position == CAM_POSITION_FRONT ||
1432                 streamList->operation_mode ==
1433                 CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
1434             isType = IS_TYPE_NONE;
1435 
1436         bool isVideo = IS_USAGE_VIDEO(newStream->usage);
1437         bool isPreview = IS_USAGE_PREVIEW(newStream->usage);
1438         bool isZSL = IS_USAGE_ZSL(newStream->usage);
1439         bool forcePreviewUBWC = true;
1440         if (isVideo && !QCameraCommon::isVideoUBWCEnabled()) {
1441             forcePreviewUBWC = false;
1442         }
1443         cam_format_t videoFormat = QCamera3Channel::getStreamDefaultFormat(
1444                 CAM_STREAM_TYPE_VIDEO, newStream->width, newStream->height, forcePreviewUBWC, isType);
1445         cam_format_t previewFormat = QCamera3Channel::getStreamDefaultFormat(
1446                 CAM_STREAM_TYPE_PREVIEW, newStream->width, newStream->height, forcePreviewUBWC, isType);
1447         cam_format_t zslFormat = QCamera3Channel::getStreamDefaultFormat(
1448                 CAM_STREAM_TYPE_SNAPSHOT, newStream->width, newStream->height, forcePreviewUBWC, isType);
1449 
1450         // Color space for this camera device is guaranteed to be ITU_R_601_FR.
1451         // So color spaces will always match.
1452 
1453         // Check whether underlying formats of shared streams match.
1454         if (isVideo && isPreview && videoFormat != previewFormat) {
1455             LOGE("Combined video and preview usage flag is not supported");
1456             return -EINVAL;
1457         }
1458         if (isPreview && isZSL && previewFormat != zslFormat) {
1459             LOGE("Combined preview and zsl usage flag is not supported");
1460             return -EINVAL;
1461         }
1462         if (isVideo && isZSL && videoFormat != zslFormat) {
1463             LOGE("Combined video and zsl usage flag is not supported");
1464             return -EINVAL;
1465         }
1466     }
1467     return NO_ERROR;
1468 }
1469 
1470 /*===========================================================================
1471  * FUNCTION   : validateUsageFlagsForEis
1472  *
1473  * DESCRIPTION: Check if the configuration usage flags conflict with Eis
1474  *
1475  * PARAMETERS :
1476  *   @bEisEnable : Flag indicated that EIS is enabled.
1477  *   @bEisSupportedSize : Flag indicating that there is a preview/video stream
1478  *                        within the EIS supported size.
1479  *   @stream_list : streams to be configured
1480  *
1481  * RETURN     :
1482  *   NO_ERROR if the usage flags are supported
1483  *   error code if usage flags are not supported
1484  *
1485  *==========================================================================*/
validateUsageFlagsForEis(bool bEisEnable,bool bEisSupportedSize,const camera3_stream_configuration_t * streamList)1486 int QCamera3HardwareInterface::validateUsageFlagsForEis(bool bEisEnable, bool bEisSupportedSize,
1487         const camera3_stream_configuration_t* streamList)
1488 {
1489     for (size_t j = 0; j < streamList->num_streams; j++) {
1490         const camera3_stream_t *newStream = streamList->streams[j];
1491 
1492         bool isVideo = IS_USAGE_VIDEO(newStream->usage);
1493         bool isPreview = IS_USAGE_PREVIEW(newStream->usage);
1494 
1495         // Because EIS is "hard-coded" for certain use case, and current
1496        // implementation doesn't support shared preview and video on the same
1497         // stream, return failure if EIS is forced on.
1498         if (isPreview && isVideo && bEisEnable && bEisSupportedSize) {
1499             LOGE("Combined video and preview usage flag is not supported due to EIS");
1500             return -EINVAL;
1501         }
1502     }
1503     return NO_ERROR;
1504 }
1505 
1506 /*==============================================================================
1507  * FUNCTION   : isSupportChannelNeeded
1508  *
1509  * DESCRIPTION: Simple heuristic func to determine if support channels is needed
1510  *
1511  * PARAMETERS :
1512  *   @stream_list : streams to be configured
1513  *   @stream_config_info : the config info for streams to be configured
1514  *
1515  * RETURN     : Boolen true/false decision
1516  *
1517  *==========================================================================*/
isSupportChannelNeeded(camera3_stream_configuration_t * streamList,cam_stream_size_info_t stream_config_info)1518 bool QCamera3HardwareInterface::isSupportChannelNeeded(
1519         camera3_stream_configuration_t *streamList,
1520         cam_stream_size_info_t stream_config_info)
1521 {
1522     uint32_t i;
1523     bool pprocRequested = false;
1524     /* Check for conditions where PProc pipeline does not have any streams*/
1525     for (i = 0; i < stream_config_info.num_streams; i++) {
1526         if (stream_config_info.type[i] != CAM_STREAM_TYPE_ANALYSIS &&
1527                 stream_config_info.postprocess_mask[i] != CAM_QCOM_FEATURE_NONE) {
1528             pprocRequested = true;
1529             break;
1530         }
1531     }
1532 
1533     if (pprocRequested == false )
1534         return true;
1535 
1536     /* Dummy stream needed if only raw or jpeg streams present */
1537     for (i = 0; i < streamList->num_streams; i++) {
1538         switch(streamList->streams[i]->format) {
1539             case HAL_PIXEL_FORMAT_RAW_OPAQUE:
1540             case HAL_PIXEL_FORMAT_RAW10:
1541             case HAL_PIXEL_FORMAT_RAW16:
1542             case HAL_PIXEL_FORMAT_BLOB:
1543                 break;
1544             default:
1545                 return false;
1546         }
1547     }
1548     return true;
1549 }
1550 
1551 /*==============================================================================
1552  * FUNCTION   : sensor_mode_info
1553  *
1554  * DESCRIPTION: Get sensor mode information based on current stream configuratoin
1555  *
1556  * PARAMETERS :
1557  *   @sensor_mode_info : sensor mode information (output)
1558  *
1559  * RETURN     : int32_t type of status
1560  *              NO_ERROR  -- success
1561  *              none-zero failure code
1562  *
1563  *==========================================================================*/
getSensorModeInfo(cam_sensor_mode_info_t & sensorModeInfo)1564 int32_t QCamera3HardwareInterface::getSensorModeInfo(cam_sensor_mode_info_t &sensorModeInfo)
1565 {
1566     int32_t rc = NO_ERROR;
1567 
1568     cam_dimension_t max_dim = {0, 0};
1569     for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
1570         if (mStreamConfigInfo.stream_sizes[i].width > max_dim.width)
1571             max_dim.width = mStreamConfigInfo.stream_sizes[i].width;
1572         if (mStreamConfigInfo.stream_sizes[i].height > max_dim.height)
1573             max_dim.height = mStreamConfigInfo.stream_sizes[i].height;
1574     }
1575 
1576     clear_metadata_buffer(mParameters);
1577 
1578     rc = ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_MAX_DIMENSION,
1579             max_dim);
1580     if (rc != NO_ERROR) {
1581         LOGE("Failed to update table for CAM_INTF_PARM_MAX_DIMENSION");
1582         return rc;
1583     }
1584 
1585     rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
1586     if (rc != NO_ERROR) {
1587         LOGE("Failed to set CAM_INTF_PARM_MAX_DIMENSION");
1588         return rc;
1589     }
1590 
1591     clear_metadata_buffer(mParameters);
1592     ADD_GET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_SENSOR_MODE_INFO);
1593 
1594     rc = mCameraHandle->ops->get_parms(mCameraHandle->camera_handle,
1595             mParameters);
1596     if (rc != NO_ERROR) {
1597         LOGE("Failed to get CAM_INTF_PARM_SENSOR_MODE_INFO");
1598         return rc;
1599     }
1600 
1601     READ_PARAM_ENTRY(mParameters, CAM_INTF_PARM_SENSOR_MODE_INFO, sensorModeInfo);
1602     LOGH("%s: active array size %dx%d, pixel array size %dx%d, output pixel clock %u, "
1603             "raw bits: %d", __FUNCTION__, sensorModeInfo.active_array_size.width,
1604             sensorModeInfo.active_array_size.height, sensorModeInfo.pixel_array_size.width,
1605             sensorModeInfo.pixel_array_size.height, sensorModeInfo.op_pixel_clk,
1606             sensorModeInfo.num_raw_bits);
1607 
1608     return rc;
1609 }
1610 
1611 /*==============================================================================
1612  * FUNCTION   : getCurrentSensorModeInfo
1613  *
1614  * DESCRIPTION: Get sensor mode information that is currently selected.
1615  *
1616  * PARAMETERS :
1617  *   @sensorModeInfo : sensor mode information (output)
1618  *
1619  * RETURN     : int32_t type of status
1620  *              NO_ERROR  -- success
1621  *              none-zero failure code
1622  *
1623  *==========================================================================*/
getCurrentSensorModeInfo(cam_sensor_mode_info_t & sensorModeInfo)1624 int32_t QCamera3HardwareInterface::getCurrentSensorModeInfo(cam_sensor_mode_info_t &sensorModeInfo)
1625 {
1626     int32_t rc = NO_ERROR;
1627 
1628     metadata_buffer_t *cachedParameters = (metadata_buffer_t *) malloc(sizeof(metadata_buffer_t));
1629     if (nullptr == cachedParameters) {
1630         return NO_MEMORY;
1631     }
1632 
1633     memcpy(cachedParameters, mParameters, sizeof(metadata_buffer_t));
1634 
1635     clear_metadata_buffer(mParameters);
1636     ADD_GET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_CURRENT_SENSOR_MODE_INFO);
1637 
1638     rc = mCameraHandle->ops->get_parms(mCameraHandle->camera_handle,
1639             mParameters);
1640     if (rc != NO_ERROR) {
1641         LOGE("Failed to get CAM_INTF_PARM_SENSOR_MODE_INFO");
1642         free(cachedParameters);
1643         return rc;
1644     }
1645 
1646     READ_PARAM_ENTRY(mParameters, CAM_INTF_PARM_CURRENT_SENSOR_MODE_INFO, sensorModeInfo);
1647     LOGH("%s: active array size %dx%d, pixel array size %dx%d, output pixel clock %u, "
1648             "raw bits: %d", __FUNCTION__, sensorModeInfo.active_array_size.width,
1649             sensorModeInfo.active_array_size.height, sensorModeInfo.pixel_array_size.width,
1650             sensorModeInfo.pixel_array_size.height, sensorModeInfo.op_pixel_clk,
1651             sensorModeInfo.num_raw_bits);
1652 
1653     memcpy(mParameters, cachedParameters, sizeof(metadata_buffer_t));
1654     free(cachedParameters);
1655 
1656     return rc;
1657 }
1658 
1659 /*==============================================================================
1660  * FUNCTION   : addToPPFeatureMask
1661  *
1662  * DESCRIPTION: add additional features to pp feature mask based on
1663  *              stream type and usecase
1664  *
1665  * PARAMETERS :
1666  *   @stream_format : stream type for feature mask
1667  *   @stream_idx : stream idx within postprocess_mask list to change
1668  *
1669  * RETURN     : NULL
1670  *
1671  *==========================================================================*/
addToPPFeatureMask(int stream_format,uint32_t stream_idx)1672 void QCamera3HardwareInterface::addToPPFeatureMask(int stream_format,
1673         uint32_t stream_idx)
1674 {
1675     char feature_mask_value[PROPERTY_VALUE_MAX];
1676     cam_feature_mask_t feature_mask;
1677     int args_converted;
1678     int property_len;
1679 
1680     /* Get feature mask from property */
1681 #ifdef _LE_CAMERA_
1682     char swtnr_feature_mask_value[PROPERTY_VALUE_MAX];
1683     snprintf(swtnr_feature_mask_value, PROPERTY_VALUE_MAX, "%lld", CAM_QTI_FEATURE_SW_TNR);
1684     property_len = property_get("persist.camera.hal3.feature",
1685             feature_mask_value, swtnr_feature_mask_value);
1686 #else
1687     property_len = property_get("persist.camera.hal3.feature",
1688             feature_mask_value, "0");
1689 #endif
1690     if ((property_len > 2) && (feature_mask_value[0] == '0') &&
1691             (feature_mask_value[1] == 'x')) {
1692         args_converted = sscanf(feature_mask_value, "0x%llx", &feature_mask);
1693     } else {
1694         args_converted = sscanf(feature_mask_value, "%lld", &feature_mask);
1695     }
1696     if (1 != args_converted) {
1697         feature_mask = 0;
1698         LOGE("Wrong feature mask %s", feature_mask_value);
1699         return;
1700     }
1701 
1702     switch (stream_format) {
1703     case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED: {
1704         /* Add LLVD to pp feature mask only if video hint is enabled */
1705         if ((m_bIsVideo) && (feature_mask & CAM_QTI_FEATURE_SW_TNR)) {
1706             mStreamConfigInfo.postprocess_mask[stream_idx]
1707                     |= CAM_QTI_FEATURE_SW_TNR;
1708             LOGH("Added SW TNR to pp feature mask");
1709         } else if ((m_bIsVideo) && (feature_mask & CAM_QCOM_FEATURE_LLVD)) {
1710             mStreamConfigInfo.postprocess_mask[stream_idx]
1711                     |= CAM_QCOM_FEATURE_LLVD;
1712             LOGH("Added LLVD SeeMore to pp feature mask");
1713         }
1714         if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
1715                 CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) {
1716             mStreamConfigInfo.postprocess_mask[stream_idx] |= CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR;
1717         }
1718         if ((m_bIsVideo) && (gCamCapability[mCameraId]->qcom_supported_feature_mask &
1719                 CAM_QTI_FEATURE_BINNING_CORRECTION)) {
1720             mStreamConfigInfo.postprocess_mask[stream_idx] |=
1721                     CAM_QTI_FEATURE_BINNING_CORRECTION;
1722         }
1723         break;
1724     }
1725     default:
1726         break;
1727     }
1728     LOGD("PP feature mask %llx",
1729             mStreamConfigInfo.postprocess_mask[stream_idx]);
1730 }
1731 
1732 /*==============================================================================
1733  * FUNCTION   : updateFpsInPreviewBuffer
1734  *
1735  * DESCRIPTION: update FPS information in preview buffer.
1736  *
1737  * PARAMETERS :
1738  *   @metadata    : pointer to metadata buffer
1739  *   @frame_number: frame_number to look for in pending buffer list
1740  *
1741  * RETURN     : None
1742  *
1743  *==========================================================================*/
updateFpsInPreviewBuffer(metadata_buffer_t * metadata,uint32_t frame_number)1744 void QCamera3HardwareInterface::updateFpsInPreviewBuffer(metadata_buffer_t *metadata,
1745         uint32_t frame_number)
1746 {
1747     // Mark all pending buffers for this particular request
1748     // with corresponding framerate information
1749     for (List<PendingBuffersInRequest>::iterator req =
1750             mPendingBuffersMap.mPendingBuffersInRequest.begin();
1751             req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
1752         for(List<PendingBufferInfo>::iterator j =
1753                 req->mPendingBufferList.begin();
1754                 j != req->mPendingBufferList.end(); j++) {
1755             QCamera3Channel *channel = (QCamera3Channel *)j->stream->priv;
1756             if ((req->frame_number == frame_number) &&
1757                 (channel->getStreamTypeMask() &
1758                 (1U << CAM_STREAM_TYPE_PREVIEW))) {
1759                 IF_META_AVAILABLE(cam_fps_range_t, float_range,
1760                     CAM_INTF_PARM_FPS_RANGE, metadata) {
1761                     typeof (MetaData_t::refreshrate) cameraFps = float_range->max_fps;
1762                     struct private_handle_t *priv_handle =
1763                         (struct private_handle_t *)(*(j->buffer));
1764                     setMetaData(priv_handle, UPDATE_REFRESH_RATE, &cameraFps);
1765                 }
1766             }
1767         }
1768     }
1769 }
1770 
1771 /*==============================================================================
1772  * FUNCTION   : updateTimeStampInPendingBuffers
1773  *
1774  * DESCRIPTION: update timestamp in display metadata for all pending buffers
1775  *              of a frame number
1776  *
1777  * PARAMETERS :
1778  *   @frame_number: frame_number. Timestamp will be set on pending buffers of this frame number
1779  *   @timestamp   : timestamp to be set
1780  *
1781  * RETURN     : None
1782  *
1783  *==========================================================================*/
updateTimeStampInPendingBuffers(uint32_t frameNumber,nsecs_t timestamp)1784 void QCamera3HardwareInterface::updateTimeStampInPendingBuffers(
1785         uint32_t frameNumber, nsecs_t timestamp)
1786 {
1787     for (auto req = mPendingBuffersMap.mPendingBuffersInRequest.begin();
1788             req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
1789         // WAR: save the av_timestamp to the next frame
1790         if(req->frame_number == frameNumber + 1) {
1791             req->av_timestamp = timestamp;
1792         }
1793 
1794         if (req->frame_number != frameNumber)
1795             continue;
1796 
1797         for (auto k = req->mPendingBufferList.begin();
1798                 k != req->mPendingBufferList.end(); k++ ) {
1799             // WAR: update timestamp when it's not VT usecase
1800             QCamera3Channel *channel = (QCamera3Channel *)k->stream->priv;
1801             if (!((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask() &&
1802                 m_bAVTimerEnabled)) {
1803                     struct private_handle_t *priv_handle =
1804                         (struct private_handle_t *) (*(k->buffer));
1805                     setMetaData(priv_handle, SET_VT_TIMESTAMP, &timestamp);
1806             }
1807         }
1808     }
1809     return;
1810 }
1811 
1812 /*===========================================================================
1813  * FUNCTION   : configureStreams
1814  *
1815  * DESCRIPTION: Reset HAL camera device processing pipeline and set up new input
1816  *              and output streams.
1817  *
1818  * PARAMETERS :
1819  *   @stream_list : streams to be configured
1820  *
1821  * RETURN     :
1822  *
1823  *==========================================================================*/
configureStreams(camera3_stream_configuration_t * streamList)1824 int QCamera3HardwareInterface::configureStreams(
1825         camera3_stream_configuration_t *streamList)
1826 {
1827     ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CFG_STRMS);
1828     int rc = 0;
1829 
1830     // Acquire perfLock before configure streams
1831     mPerfLockMgr.acquirePerfLock(PERF_LOCK_START_PREVIEW);
1832     rc = configureStreamsPerfLocked(streamList);
1833     mPerfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
1834 
1835     return rc;
1836 }
1837 
1838 /*===========================================================================
1839  * FUNCTION   : validateStreamCombination
1840  *
1841  * DESCRIPTION: Validate a given stream combination.
1842  *
1843  * PARAMETERS :
1844  *   @cameraId : camera Id.
1845  *   @stream_list : stream combination to be validated.
1846  *   @status : validation status.
1847  *
1848  * RETURN     : int32_t type of status
1849  *              NO_ERROR  -- success
1850  *              none-zero failure code
1851  *==========================================================================*/
validateStreamCombination(uint32_t cameraId,camera3_stream_configuration_t * streamList,StreamValidateStatus * status)1852 int32_t QCamera3HardwareInterface::validateStreamCombination(uint32_t cameraId,
1853         camera3_stream_configuration_t *streamList /*in*/, StreamValidateStatus *status /*out*/)
1854 {
1855     bool isJpeg = false;
1856     bool bJpegExceeds4K = false;
1857     bool bJpegOnEncoder = false;
1858     uint32_t width_ratio;
1859     uint32_t height_ratio;
1860     size_t rawStreamCnt = 0;
1861     size_t stallStreamCnt = 0;
1862     size_t processedStreamCnt = 0;
1863     size_t pdStatCount = 0;
1864     size_t numYuv888OnEncoder = 0;
1865     cam_dimension_t jpegSize = {0, 0};
1866     camera3_stream_t *zslStream = nullptr;
1867     uint32_t maxEisWidth = 0;
1868     uint32_t maxEisHeight = 0;
1869 
1870     if (status == nullptr) {
1871         LOGE("NULL stream status");
1872         return BAD_VALUE;
1873     }
1874 
1875     // Sanity check stream_list
1876     if (streamList == NULL) {
1877         LOGE("NULL stream configuration");
1878         return BAD_VALUE;
1879     }
1880     if (streamList->streams == NULL) {
1881         LOGE("NULL stream list");
1882         return BAD_VALUE;
1883     }
1884 
1885     if (streamList->num_streams < 1) {
1886         LOGE("Bad number of streams requested: %d",
1887                 streamList->num_streams);
1888         return BAD_VALUE;
1889     }
1890 
1891     if (streamList->num_streams >= MAX_NUM_STREAMS) {
1892         LOGE("Maximum number of streams %d exceeded: %d",
1893                 MAX_NUM_STREAMS, streamList->num_streams);
1894         return BAD_VALUE;
1895     }
1896 
1897     auto rc = validateUsageFlags(cameraId, streamList);
1898     if (rc != NO_ERROR) {
1899         return rc;
1900     }
1901 
1902     rc = validateStreamDimensions(cameraId, streamList);
1903     if (rc == NO_ERROR) {
1904         rc = validateStreamRotations(streamList);
1905     }
1906     if (rc != NO_ERROR) {
1907         LOGE("Invalid stream configuration requested!");
1908         return rc;
1909     }
1910 
1911     size_t count = IS_TYPE_MAX;
1912     count = MIN(gCamCapability[cameraId]->supported_is_types_cnt, count);
1913     for (size_t i = 0; i < count; i++) {
1914         if ((gCamCapability[cameraId]->supported_is_types[i] == IS_TYPE_EIS_2_0) ||
1915             (gCamCapability[cameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0)) {
1916             status->bEisSupported = true;
1917             break;
1918         }
1919     }
1920 
1921     if (status->bEisSupported) {
1922         maxEisWidth = MAX_EIS_WIDTH;
1923         maxEisHeight = MAX_EIS_HEIGHT;
1924     }
1925 
1926     status->maxViewfinderSize = gCamCapability[cameraId]->max_viewfinder_size;
1927     status->largeYuv888Size = {0, 0};
1928     /* stream configurations */
1929     for (size_t i = 0; i < streamList->num_streams; i++) {
1930         camera3_stream_t *newStream = streamList->streams[i];
1931         LOGI("stream[%d] type = %d, format = %d, width = %d, "
1932                 "height = %d, rotation = %d, usage = 0x%x",
1933                  i, newStream->stream_type, newStream->format,
1934                 newStream->width, newStream->height, newStream->rotation,
1935                 newStream->usage);
1936         if (newStream->stream_type == CAMERA3_STREAM_INPUT){
1937             status->isZsl = true;
1938             status->inputStream = newStream;
1939         }
1940 
1941         if (IS_USAGE_ZSL(newStream->usage)) {
1942             if (zslStream != nullptr) {
1943                 LOGE("Multiple input/reprocess streams requested!");
1944                 return BAD_VALUE;
1945             }
1946             zslStream = newStream;
1947         }
1948 
1949         if ((newStream->format == HAL_PIXEL_FORMAT_BLOB) &&
1950                 (newStream->data_space != HAL_DATASPACE_DEPTH)) {
1951             isJpeg = true;
1952             jpegSize.width = newStream->width;
1953             jpegSize.height = newStream->height;
1954             if (newStream->width > VIDEO_4K_WIDTH ||
1955                     newStream->height > VIDEO_4K_HEIGHT)
1956                 bJpegExceeds4K = true;
1957         }
1958 
1959         if ((HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED == newStream->format) &&
1960                 (IS_USAGE_PREVIEW(newStream->usage) || IS_USAGE_VIDEO(newStream->usage))) {
1961             if (IS_USAGE_VIDEO(newStream->usage)) {
1962                 status->bIsVideo = true;
1963                 // In HAL3 we can have multiple different video streams.
1964                 // The variables video width and height are used below as
1965                 // dimensions of the biggest of them
1966                 if (status->videoWidth < newStream->width ||
1967                         status->videoHeight < newStream->height) {
1968                     status->videoWidth = newStream->width;
1969                     status->videoHeight = newStream->height;
1970                 }
1971                 if ((VIDEO_4K_WIDTH <= newStream->width) &&
1972                         (VIDEO_4K_HEIGHT <= newStream->height)) {
1973                     status->bIs4KVideo = true;
1974                 }
1975             }
1976             status->bEisSupportedSize &= (newStream->width <= maxEisWidth) &&
1977                                   (newStream->height <= maxEisHeight);
1978         }
1979         if (newStream->stream_type == CAMERA3_STREAM_OUTPUT) {
1980             switch (newStream->format) {
1981             case HAL_PIXEL_FORMAT_BLOB:
1982                 if (newStream->data_space == HAL_DATASPACE_DEPTH) {
1983                     status->depthPresent = true;
1984                     break;
1985                 }
1986                 stallStreamCnt++;
1987                 if (isOnEncoder(status->maxViewfinderSize, newStream->width,
1988                         newStream->height)) {
1989                     status->numStreamsOnEncoder++;
1990                     bJpegOnEncoder = true;
1991                 }
1992                 width_ratio = CEIL_DIVISION(gCamCapability[cameraId]->active_array_size.width,
1993                         newStream->width);
1994                 height_ratio = CEIL_DIVISION(gCamCapability[cameraId]->active_array_size.height,
1995                         newStream->height);;
1996                 FATAL_IF(gCamCapability[cameraId]->max_downscale_factor == 0,
1997                         "FATAL: max_downscale_factor cannot be zero and so assert");
1998                 if ( (width_ratio > gCamCapability[cameraId]->max_downscale_factor) ||
1999                     (height_ratio > gCamCapability[cameraId]->max_downscale_factor)) {
2000                     LOGH("Setting small jpeg size flag to true");
2001                     status->bSmallJpegSize = true;
2002                 }
2003                 break;
2004             case HAL_PIXEL_FORMAT_RAW10:
2005             case HAL_PIXEL_FORMAT_RAW_OPAQUE:
2006             case HAL_PIXEL_FORMAT_RAW16:
2007                 rawStreamCnt++;
2008                 if ((HAL_DATASPACE_DEPTH == newStream->data_space) &&
2009                         (HAL_PIXEL_FORMAT_RAW16 == newStream->format)) {
2010                     pdStatCount++;
2011                 }
2012                 break;
2013             case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
2014                 processedStreamCnt++;
2015                 if (isOnEncoder(status->maxViewfinderSize, newStream->width,
2016                         newStream->height)) {
2017                     if (newStream->stream_type != CAMERA3_STREAM_BIDIRECTIONAL &&
2018                             !IS_USAGE_ZSL(newStream->usage)) {
2019                         status->commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2020                     }
2021                     status->numStreamsOnEncoder++;
2022                 }
2023                 break;
2024             case HAL_PIXEL_FORMAT_YCbCr_420_888:
2025             case HAL_PIXEL_FORMAT_Y8:
2026                 processedStreamCnt++;
2027                 if (isOnEncoder(status->maxViewfinderSize, newStream->width,
2028                         newStream->height)) {
2029                     // If Yuv888/Y8 size is not greater than 4K, set feature mask
2030                     // to SUPERSET so that it support concurrent request on
2031                     // YUV and JPEG.
2032                     if (newStream->width <= VIDEO_4K_WIDTH &&
2033                             newStream->height <= VIDEO_4K_HEIGHT) {
2034                         status->commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2035                     }
2036                     if (newStream->format == HAL_PIXEL_FORMAT_Y8) {
2037                         status->bY80OnEncoder = true;
2038                     }
2039                     status->numStreamsOnEncoder++;
2040                     numYuv888OnEncoder++;
2041                     status->largeYuv888Size.width = newStream->width;
2042                     status->largeYuv888Size.height = newStream->height;
2043                 }
2044                 break;
2045             default:
2046                 LOGE("not a supported format 0x%x", newStream->format);
2047                 return BAD_VALUE;
2048             }
2049         }
2050     }
2051 
2052     if (validateUsageFlagsForEis(status->bEisSupported, status->bEisSupportedSize, streamList) !=
2053             NO_ERROR) {
2054         return BAD_VALUE;
2055     }
2056 
2057     /* Check if num_streams is sane */
2058     if (stallStreamCnt > MAX_STALLING_STREAMS ||
2059             rawStreamCnt > MAX_RAW_STREAMS ||
2060             processedStreamCnt > MAX_PROCESSED_STREAMS) {
2061         LOGE("Invalid stream configu: stall: %d, raw: %d, processed %d",
2062                  stallStreamCnt, rawStreamCnt, processedStreamCnt);
2063         return BAD_VALUE;
2064     }
2065     /* Check whether we have zsl stream or 4k video case */
2066     if (status->isZsl && status->bIs4KVideo) {
2067         LOGE("Currently invalid configuration ZSL & 4K Video!");
2068         return BAD_VALUE;
2069     }
2070     /* Check if stream sizes are sane */
2071     if (status->numStreamsOnEncoder > 2) {
2072         LOGE("Number of streams on ISP encoder path exceeds limits of 2");
2073         return BAD_VALUE;
2074     } else if (1 < status->numStreamsOnEncoder){
2075         status->bUseCommonFeatureMask = true;
2076         LOGH("Multiple streams above max viewfinder size, common mask needed");
2077     }
2078 
2079     /* Check if BLOB size is greater than 4k in 4k recording case */
2080     if (status->bIs4KVideo && bJpegExceeds4K) {
2081         LOGE("HAL doesn't support Blob size greater than 4k in 4k recording");
2082         return BAD_VALUE;
2083     }
2084 
2085     if ((streamList->operation_mode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
2086             status->depthPresent) {
2087         LOGE("HAL doesn't support depth streams in HFR mode!");
2088         return BAD_VALUE;
2089     }
2090 
2091     // When JPEG and preview streams share VFE output, CPP will not apply CAC2
2092     // on JPEG stream. So disable such configurations to ensure CAC2 is applied.
2093     // Don't fail for reprocess configurations. Also don't fail if bJpegExceeds4K
2094     // is not true. Otherwise testMandatoryOutputCombinations will fail with following
2095     // configurations:
2096     //    {[PRIV, PREVIEW] [PRIV, RECORD] [JPEG, RECORD]}
2097     //    {[PRIV, PREVIEW] [YUV, RECORD] [JPEG, RECORD]}
2098     //    (These two configurations will not have CAC2 enabled even in HQ modes.)
2099     if (!status->isZsl && bJpegOnEncoder && bJpegExceeds4K && status->bUseCommonFeatureMask) {
2100         ALOGE("%s: Blob size greater than 4k and multiple streams are on encoder output",
2101                 __func__);
2102         return BAD_VALUE;
2103     }
2104 
2105     // If jpeg stream is available, and a YUV 888 stream is on Encoder path, and
2106     // the YUV stream's size is greater or equal to the JPEG size, set common
2107     // postprocess mask to NONE, so that we can take advantage of postproc bypass.
2108     if (numYuv888OnEncoder && isOnEncoder(status->maxViewfinderSize,
2109             jpegSize.width, jpegSize.height) &&
2110             status->largeYuv888Size.width > jpegSize.width &&
2111             status->largeYuv888Size.height > jpegSize.height) {
2112         status->bYuv888OverrideJpeg = true;
2113     } else if (!isJpeg && status->numStreamsOnEncoder > 1) {
2114         status->commonFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2115     }
2116 
2117     LOGH("max viewfinder width %d height %d isZsl %d bUseCommonFeature %x commonFeatureMask %llx",
2118             status->maxViewfinderSize.width, status->maxViewfinderSize.height, status->isZsl,
2119             status->bUseCommonFeatureMask, status->commonFeatureMask);
2120     LOGH("numStreamsOnEncoder %d, processedStreamCnt %d, stallcnt %d bSmallJpegSize %d",
2121             status->numStreamsOnEncoder, processedStreamCnt, stallStreamCnt,
2122             status->bSmallJpegSize);
2123 
2124     if (1 < pdStatCount) {
2125         LOGE("HAL doesn't support multiple PD streams");
2126         return BAD_VALUE;
2127     }
2128 
2129     if ((streamList->operation_mode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
2130             (1 == pdStatCount)) {
2131         LOGE("HAL doesn't support PD streams in HFR mode!");
2132         return -EINVAL;
2133     }
2134 
2135     return NO_ERROR;
2136 }
2137 
2138 /*===========================================================================
2139  * FUNCTION   : configureStreamsPerfLocked
2140  *
2141  * DESCRIPTION: configureStreams while perfLock is held.
2142  *
2143  * PARAMETERS :
2144  *   @stream_list : streams to be configured
2145  *
2146  * RETURN     : int32_t type of status
2147  *              NO_ERROR  -- success
2148  *              none-zero failure code
2149  *==========================================================================*/
configureStreamsPerfLocked(camera3_stream_configuration_t * streamList)2150 int QCamera3HardwareInterface::configureStreamsPerfLocked(
2151         camera3_stream_configuration_t *streamList)
2152 {
2153     ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CFG_STRMS_PERF_LKD);
2154 
2155     StreamValidateStatus streamStatus;
2156     auto rc = validateStreamCombination(mCameraId, streamList, &streamStatus);
2157     if (NO_ERROR != rc) {
2158         return rc;
2159     }
2160 
2161     mOpMode = streamList->operation_mode;
2162     LOGD("mOpMode: %d", mOpMode);
2163 
2164     // Disable HDR+ if it's enabled;
2165     {
2166         std::unique_lock<std::mutex> l(gHdrPlusClientLock);
2167         finishHdrPlusClientOpeningLocked(l);
2168         disableHdrPlusModeLocked();
2169     }
2170 
2171     /* first invalidate all the steams in the mStreamList
2172      * if they appear again, they will be validated */
2173     for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
2174             it != mStreamInfo.end(); it++) {
2175         QCamera3ProcessingChannel *channel = (QCamera3ProcessingChannel*)(*it)->stream->priv;
2176         if (channel) {
2177           channel->stop();
2178         }
2179         (*it)->status = INVALID;
2180     }
2181 
2182     if (mRawDumpChannel) {
2183         mRawDumpChannel->stop();
2184         delete mRawDumpChannel;
2185         mRawDumpChannel = NULL;
2186     }
2187 
2188     if (mHdrPlusRawSrcChannel) {
2189         mHdrPlusRawSrcChannel->stop();
2190         delete mHdrPlusRawSrcChannel;
2191         mHdrPlusRawSrcChannel = NULL;
2192     }
2193 
2194     if (mSupportChannel)
2195         mSupportChannel->stop();
2196 
2197     if (mAnalysisChannel) {
2198         mAnalysisChannel->stop();
2199     }
2200     if (mMetadataChannel) {
2201         /* If content of mStreamInfo is not 0, there is metadata stream */
2202         mMetadataChannel->stop();
2203     }
2204     if (mChannelHandle) {
2205         stopChannelLocked(/*stop_immediately*/false);
2206     }
2207 
2208     pthread_mutex_lock(&mMutex);
2209 
2210     mPictureChannel = NULL;
2211 
2212     // Check state
2213     switch (mState) {
2214         case INITIALIZED:
2215         case CONFIGURED:
2216         case STARTED:
2217             /* valid state */
2218             break;
2219         default:
2220             LOGE("Invalid state %d", mState);
2221             pthread_mutex_unlock(&mMutex);
2222             return -ENODEV;
2223     }
2224 
2225     /* Check whether we have video stream */
2226     m_bIs4KVideo = streamStatus.bIs4KVideo;
2227     m_bIsVideo = streamStatus.bIsVideo;
2228     m_bEisSupported = streamStatus.bEisSupported;
2229     m_bEisSupportedSize = streamStatus.bEisSupportedSize;
2230     m_bTnrEnabled = false;
2231     m_bVideoHdrEnabled = false;
2232     cam_dimension_t previewSize = {0, 0};
2233 
2234     cam_padding_info_t padding_info = gCamCapability[mCameraId]->padding_info;
2235 
2236     /*EIS configuration*/
2237     uint8_t eis_prop_set;
2238 
2239     // Initialize all instant AEC related variables
2240     mInstantAEC = false;
2241     mResetInstantAEC = false;
2242     mInstantAECSettledFrameNumber = 0;
2243     mAecSkipDisplayFrameBound = 0;
2244     mInstantAecFrameIdxCount = 0;
2245     mCurrFeatureState = 0;
2246     mStreamConfig = true;
2247 
2248     m_bAVTimerEnabled = false;
2249 
2250     memset(&mInputStreamInfo, 0, sizeof(mInputStreamInfo));
2251 
2252     /* EIS setprop control */
2253     char eis_prop[PROPERTY_VALUE_MAX];
2254     memset(eis_prop, 0, sizeof(eis_prop));
2255     property_get("persist.camera.eis.enable", eis_prop, "1");
2256     eis_prop_set = (uint8_t)atoi(eis_prop);
2257 
2258     m_bEisEnable = eis_prop_set && m_bEisSupported &&
2259             (mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
2260             (gCamCapability[mCameraId]->position == CAM_POSITION_BACK ||
2261              gCamCapability[mCameraId]->position == CAM_POSITION_BACK_AUX);
2262 
2263     LOGD("m_bEisEnable: %d, eis_prop_set: %d, m_bEisSupported: %d",
2264             m_bEisEnable, eis_prop_set, m_bEisSupported);
2265 
2266     uint8_t forceEnableTnr = 0;
2267     char tnr_prop[PROPERTY_VALUE_MAX];
2268     memset(tnr_prop, 0, sizeof(tnr_prop));
2269     property_get("debug.camera.tnr.forceenable", tnr_prop, "0");
2270     forceEnableTnr = (uint8_t)atoi(tnr_prop);
2271 
2272     /* Logic to enable/disable TNR based on specific config size/etc.*/
2273     if ((m_bTnrPreview || m_bTnrVideo) && m_bIsVideo &&
2274             (mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE))
2275         m_bTnrEnabled = true;
2276     else if (forceEnableTnr)
2277         m_bTnrEnabled = true;
2278 
2279     char videoHdrProp[PROPERTY_VALUE_MAX];
2280     memset(videoHdrProp, 0, sizeof(videoHdrProp));
2281     property_get("persist.camera.hdr.video", videoHdrProp, "0");
2282     uint8_t hdr_mode_prop = (uint8_t)atoi(videoHdrProp);
2283 
2284     if (hdr_mode_prop == 1 && m_bIsVideo &&
2285             mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
2286         m_bVideoHdrEnabled = true;
2287     else
2288         m_bVideoHdrEnabled = false;
2289 
2290     camera3_stream_t *zslStream = NULL; //Only use this for size and not actual handle!
2291     for (size_t i = 0; i < streamList->num_streams; i++) {
2292         camera3_stream_t *newStream = streamList->streams[i];
2293         LOGH("newStream type = %d, stream format = %d "
2294                 "stream size : %d x %d, stream rotation = %d",
2295                  newStream->stream_type, newStream->format,
2296                 newStream->width, newStream->height, newStream->rotation);
2297         //if the stream is in the mStreamList validate it
2298         bool stream_exists = false;
2299         for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
2300                 it != mStreamInfo.end(); it++) {
2301             if ((*it)->stream == newStream) {
2302                 QCamera3ProcessingChannel *channel =
2303                     (QCamera3ProcessingChannel*)(*it)->stream->priv;
2304                 stream_exists = true;
2305                 if (channel)
2306                     delete channel;
2307                 (*it)->status = VALID;
2308                 (*it)->stream->priv = NULL;
2309                 (*it)->channel = NULL;
2310             }
2311         }
2312         if (!stream_exists && newStream->stream_type != CAMERA3_STREAM_INPUT) {
2313             //new stream
2314             stream_info_t* stream_info;
2315             stream_info = (stream_info_t* )malloc(sizeof(stream_info_t));
2316             if (!stream_info) {
2317                LOGE("Could not allocate stream info");
2318                rc = -ENOMEM;
2319                pthread_mutex_unlock(&mMutex);
2320                return rc;
2321             }
2322             stream_info->stream = newStream;
2323             stream_info->status = VALID;
2324             stream_info->channel = NULL;
2325             stream_info->id = i; // ID will be re-assigned in cleanAndSortStreamInfo().
2326             mStreamInfo.push_back(stream_info);
2327         }
2328         /* Covers Opaque ZSL and API1 F/W ZSL */
2329         if (IS_USAGE_ZSL(newStream->usage)
2330                 || newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ) {
2331             if (zslStream != NULL) {
2332                 LOGE("Multiple input/reprocess streams requested!");
2333                 pthread_mutex_unlock(&mMutex);
2334                 return BAD_VALUE;
2335             }
2336             zslStream = newStream;
2337         }
2338         /* Covers YUV reprocess */
2339         if (streamStatus.inputStream != NULL) {
2340             if (newStream->stream_type == CAMERA3_STREAM_OUTPUT
2341                     && ((newStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888
2342                          && streamStatus.inputStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888)
2343                         || (newStream->format == HAL_PIXEL_FORMAT_Y8
2344                          && streamStatus.inputStream->format == HAL_PIXEL_FORMAT_Y8))
2345                     && streamStatus.inputStream->width == newStream->width
2346                     && streamStatus.inputStream->height == newStream->height) {
2347                 if (zslStream != NULL) {
2348                     /* This scenario indicates multiple YUV streams with same size
2349                      * as input stream have been requested, since zsl stream handle
2350                      * is solely use for the purpose of overriding the size of streams
2351                      * which share h/w streams we will just make a guess here as to
2352                      * which of the stream is a ZSL stream, this will be refactored
2353                      * once we make generic logic for streams sharing encoder output
2354                      */
2355                     LOGH("Warning, Multiple ip/reprocess streams requested!");
2356                 }
2357                 zslStream = newStream;
2358             }
2359         }
2360     }
2361 
2362     /* If a zsl stream is set, we know that we have configured at least one input or
2363        bidirectional stream */
2364     if (NULL != zslStream) {
2365         mInputStreamInfo.dim.width = (int32_t)zslStream->width;
2366         mInputStreamInfo.dim.height = (int32_t)zslStream->height;
2367         mInputStreamInfo.format = zslStream->format;
2368         mInputStreamInfo.usage = zslStream->usage;
2369         LOGD("Input stream configured! %d x %d, format %d, usage %d",
2370                  mInputStreamInfo.dim.width,
2371                 mInputStreamInfo.dim.height,
2372                 mInputStreamInfo.format, mInputStreamInfo.usage);
2373     }
2374 
2375     cleanAndSortStreamInfo();
2376     if (mMetadataChannel) {
2377         delete mMetadataChannel;
2378         mMetadataChannel = NULL;
2379     }
2380     if (mSupportChannel) {
2381         delete mSupportChannel;
2382         mSupportChannel = NULL;
2383     }
2384 
2385     if (mAnalysisChannel) {
2386         delete mAnalysisChannel;
2387         mAnalysisChannel = NULL;
2388     }
2389 
2390     if (mDummyBatchChannel) {
2391         delete mDummyBatchChannel;
2392         mDummyBatchChannel = NULL;
2393     }
2394 
2395     if (mDepthChannel) {
2396         mDepthChannel = NULL;
2397     }
2398     mDepthCloudMode = CAM_PD_DATA_SKIP;
2399 
2400     mShutterDispatcher.clear();
2401     mOutputBufferDispatcher.clear();
2402 
2403     char is_type_value[PROPERTY_VALUE_MAX];
2404     property_get("persist.camera.is_type", is_type_value, "4");
2405     m_bEis3PropertyEnabled = (atoi(is_type_value) == IS_TYPE_EIS_3_0);
2406 
2407     char property_value[PROPERTY_VALUE_MAX];
2408     property_get("persist.camera.gzoom.at", property_value, "0");
2409     int goog_zoom_at = atoi(property_value);
2410     bool is_goog_zoom_video_enabled = ((goog_zoom_at & 1) > 0) &&
2411         gCamCapability[mCameraId]->position == CAM_POSITION_BACK;
2412     bool is_goog_zoom_preview_enabled = ((goog_zoom_at & 2) > 0) &&
2413         gCamCapability[mCameraId]->position == CAM_POSITION_BACK;
2414 
2415     property_get("persist.camera.gzoom.4k", property_value, "0");
2416     bool is_goog_zoom_4k_enabled = (atoi(property_value) > 0);
2417 
2418     //Create metadata channel and initialize it
2419     cam_feature_mask_t metadataFeatureMask = CAM_QCOM_FEATURE_NONE;
2420     setPAAFSupport(metadataFeatureMask, CAM_STREAM_TYPE_METADATA,
2421             gCamCapability[mCameraId]->color_arrangement);
2422     mMetadataChannel = new QCamera3MetadataChannel(mCameraHandle->camera_handle,
2423                     mChannelHandle, mCameraHandle->ops, captureResultCb,
2424                     setBufferErrorStatus, &padding_info, metadataFeatureMask, this);
2425     if (mMetadataChannel == NULL) {
2426         LOGE("failed to allocate metadata channel");
2427         rc = -ENOMEM;
2428         pthread_mutex_unlock(&mMutex);
2429         return rc;
2430     }
2431     mMetadataChannel->enableDepthData(streamStatus.depthPresent);
2432     rc = mMetadataChannel->initialize(IS_TYPE_NONE);
2433     if (rc < 0) {
2434         LOGE("metadata channel initialization failed");
2435         delete mMetadataChannel;
2436         mMetadataChannel = NULL;
2437         pthread_mutex_unlock(&mMutex);
2438         return rc;
2439     }
2440 
2441     cam_feature_mask_t zsl_ppmask = CAM_QCOM_FEATURE_NONE;
2442     bool isRawStreamRequested = false;
2443     bool onlyRaw = true;
2444     // Keep track of preview/video streams indices.
2445     // There could be more than one preview streams, but only one video stream.
2446     int32_t video_stream_idx = -1;
2447     int32_t preview_stream_idx[streamList->num_streams];
2448     size_t preview_stream_cnt = 0;
2449     bool previewTnr[streamList->num_streams];
2450     memset(previewTnr, 0, sizeof(bool) * streamList->num_streams);
2451     bool isFront = gCamCapability[mCameraId]->position == CAM_POSITION_FRONT;
2452     // Loop through once to determine preview TNR conditions before creating channels.
2453     for (size_t i = 0; i < streamList->num_streams; i++) {
2454         camera3_stream_t *newStream = streamList->streams[i];
2455         uint32_t stream_usage = newStream->usage;
2456         if (newStream->stream_type == CAMERA3_STREAM_OUTPUT &&
2457                 newStream->format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED) {
2458             if (stream_usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER)
2459                 video_stream_idx = (int32_t)i;
2460             else
2461                 preview_stream_idx[preview_stream_cnt++] = (int32_t)i;
2462         }
2463     }
2464     // By default, preview stream TNR is disabled.
2465     // Enable TNR to the preview stream if all conditions below are satisfied:
2466     //  1. preview resolution == video resolution.
2467     //  2. video stream TNR is enabled.
2468     //  3. EIS2.0 OR is front camera (which wouldn't use EIS3 even if it's set)
2469     for (size_t i = 0; i < preview_stream_cnt && video_stream_idx != -1; i++) {
2470         camera3_stream_t *video_stream = streamList->streams[video_stream_idx];
2471         camera3_stream_t *preview_stream = streamList->streams[preview_stream_idx[i]];
2472         if (m_bTnrEnabled && m_bTnrVideo &&
2473                 (isFront || (atoi(is_type_value) == IS_TYPE_EIS_2_0)) &&
2474                 video_stream->width == preview_stream->width &&
2475                 video_stream->height == preview_stream->height) {
2476             previewTnr[preview_stream_idx[i]] = true;
2477         }
2478     }
2479 
2480     memset(&mStreamConfigInfo, 0, sizeof(cam_stream_size_info_t));
2481     /* Allocate channel objects for the requested streams */
2482     for (size_t i = 0; i < streamList->num_streams; i++) {
2483 
2484         camera3_stream_t *newStream = streamList->streams[i];
2485         uint32_t stream_usage = newStream->usage;
2486         mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width = (int32_t)newStream->width;
2487         mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height = (int32_t)newStream->height;
2488         struct camera_info *p_info = NULL;
2489         pthread_mutex_lock(&gCamLock);
2490         p_info = get_cam_info(mCameraId, &mStreamConfigInfo.sync_type);
2491         pthread_mutex_unlock(&gCamLock);
2492         if ((newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL
2493                 || IS_USAGE_ZSL(newStream->usage)) &&
2494             newStream->format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED){
2495             onlyRaw = false; // There is non-raw stream - bypass flag if set
2496             mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_SNAPSHOT;
2497             if (isOnEncoder(streamStatus.maxViewfinderSize, newStream->width, newStream->height)) {
2498                 if (streamStatus.bUseCommonFeatureMask)
2499                     zsl_ppmask = streamStatus.commonFeatureMask;
2500                 else
2501                     zsl_ppmask = CAM_QCOM_FEATURE_NONE;
2502             } else {
2503                 if (streamStatus.numStreamsOnEncoder > 0)
2504                     zsl_ppmask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2505                 else
2506                     zsl_ppmask = CAM_QCOM_FEATURE_NONE;
2507             }
2508             mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = zsl_ppmask;
2509         } else if(newStream->stream_type == CAMERA3_STREAM_INPUT) {
2510             onlyRaw = false; // There is non-raw stream - bypass flag if set
2511                 LOGH("Input stream configured, reprocess config");
2512         } else {
2513             //for non zsl streams find out the format
2514             switch (newStream->format) {
2515             case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED :
2516             {
2517                 onlyRaw = false; // There is non-raw stream - bypass flag if set
2518                 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2519                         CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2520                 /* add additional features to pp feature mask */
2521                 addToPPFeatureMask(HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED,
2522                         mStreamConfigInfo.num_streams);
2523 
2524                 if (stream_usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) {
2525                         mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2526                                 CAM_STREAM_TYPE_VIDEO;
2527                     if (m_bTnrEnabled && m_bTnrVideo) {
2528                         mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2529                             CAM_QCOM_FEATURE_CPP_TNR;
2530                         //TNR and CDS are mutually exclusive. So reset CDS from feature mask
2531                         mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
2532                                 ~CAM_QCOM_FEATURE_CDS;
2533                     }
2534                     if (m_bEis3PropertyEnabled /* hint for EIS 3 needed here */) {
2535                         mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2536                             CAM_QTI_FEATURE_PPEISCORE;
2537                     }
2538                     if (is_goog_zoom_video_enabled && (is_goog_zoom_4k_enabled || !m_bIs4KVideo)) {
2539                         mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2540                             CAM_QCOM_FEATURE_GOOG_ZOOM;
2541                     }
2542                 } else {
2543                         mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2544                             CAM_STREAM_TYPE_PREVIEW;
2545                     if (m_bTnrEnabled && (previewTnr[i] || m_bTnrPreview)) {
2546                         mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2547                                 CAM_QCOM_FEATURE_CPP_TNR;
2548                         //TNR and CDS are mutually exclusive. So reset CDS from feature mask
2549                         mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
2550                                 ~CAM_QCOM_FEATURE_CDS;
2551                     }
2552                     if(!m_bSwTnrPreview) {
2553                         mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
2554                                 ~CAM_QTI_FEATURE_SW_TNR;
2555                     }
2556                     if (is_goog_zoom_preview_enabled) {
2557                         mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2558                             CAM_QCOM_FEATURE_GOOG_ZOOM;
2559                     }
2560                     padding_info.width_padding = mSurfaceStridePadding;
2561                     padding_info.height_padding = CAM_PAD_TO_2;
2562                     previewSize.width = (int32_t)newStream->width;
2563                     previewSize.height = (int32_t)newStream->height;
2564                 }
2565                 if ((newStream->rotation == CAMERA3_STREAM_ROTATION_90) ||
2566                         (newStream->rotation == CAMERA3_STREAM_ROTATION_270)) {
2567                     mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2568                             newStream->height;
2569                     mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2570                             newStream->width;
2571                 }
2572             }
2573             break;
2574             case HAL_PIXEL_FORMAT_YCbCr_420_888:
2575             case HAL_PIXEL_FORMAT_Y8:
2576                 onlyRaw = false; // There is non-raw stream - bypass flag if set
2577                 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_CALLBACK;
2578                 if (isOnEncoder(streamStatus.maxViewfinderSize, newStream->width,
2579                             newStream->height)) {
2580                     if (streamStatus.bUseCommonFeatureMask)
2581                         mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2582                                 streamStatus.commonFeatureMask;
2583                     else
2584                         mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2585                                 CAM_QCOM_FEATURE_NONE;
2586                 } else {
2587                     mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2588                             CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2589                 }
2590             break;
2591             case HAL_PIXEL_FORMAT_BLOB:
2592                 onlyRaw = false; // There is non-raw stream - bypass flag if set
2593                 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_SNAPSHOT;
2594                 // No need to check bSmallJpegSize if ZSL is present since JPEG uses ZSL stream
2595                 if ((m_bIs4KVideo && !streamStatus.isZsl) ||
2596                         (streamStatus.bSmallJpegSize && !streamStatus.isZsl)) {
2597                      mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2598                              CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2599                      /* Remove rotation if it is not supported
2600                         for 4K LiveVideo snapshot case (online processing) */
2601                      if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask &
2602                                 CAM_QCOM_FEATURE_ROTATION)) {
2603                          mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams]
2604                                  &= ~CAM_QCOM_FEATURE_ROTATION;
2605                      }
2606                 } else {
2607                     if (streamStatus.bUseCommonFeatureMask &&
2608                             isOnEncoder(streamStatus.maxViewfinderSize, newStream->width,
2609                             newStream->height)) {
2610                         mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2611                                 streamStatus.commonFeatureMask;
2612                     } else {
2613                         mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2614                     }
2615                 }
2616                 if (streamStatus.isZsl) {
2617                     if (zslStream) {
2618                         mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2619                                 (int32_t)zslStream->width;
2620                         mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2621                                 (int32_t)zslStream->height;
2622                         mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2623                                 zsl_ppmask;
2624                     } else {
2625                         LOGE("Error, No ZSL stream identified");
2626                         pthread_mutex_unlock(&mMutex);
2627                         return -EINVAL;
2628                     }
2629                 } else if (m_bIs4KVideo) {
2630                     mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2631                             (int32_t) streamStatus.videoWidth;
2632                     mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2633                             (int32_t) streamStatus.videoHeight;
2634                 } else if (streamStatus.bYuv888OverrideJpeg) {
2635                     mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2636                             (int32_t) streamStatus.largeYuv888Size.width;
2637                     mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2638                             (int32_t) streamStatus.largeYuv888Size.height;
2639                 }
2640                 break;
2641             case HAL_PIXEL_FORMAT_RAW_OPAQUE:
2642             case HAL_PIXEL_FORMAT_RAW16:
2643             case HAL_PIXEL_FORMAT_RAW10:
2644                 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_RAW;
2645                 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2646                 isRawStreamRequested = true;
2647                 if ((HAL_DATASPACE_DEPTH == newStream->data_space) &&
2648                         (HAL_PIXEL_FORMAT_RAW16 == newStream->format)) {
2649                     mStreamConfigInfo.sub_format_type[mStreamConfigInfo.num_streams] =
2650                             gCamCapability[mCameraId]->sub_fmt[mPDIndex];
2651                     mStreamConfigInfo.format[mStreamConfigInfo.num_streams] =
2652                             gCamCapability[mCameraId]->supported_meta_raw_fmts[mPDIndex];
2653                     mStreamConfigInfo.dt[mStreamConfigInfo.num_streams] =
2654                             gCamCapability[mCameraId]->dt[mPDIndex];
2655                     mStreamConfigInfo.vc[mStreamConfigInfo.num_streams] =
2656                             gCamCapability[mCameraId]->vc[mPDIndex];
2657                 }
2658                 break;
2659             default:
2660                 onlyRaw = false; // There is non-raw stream - bypass flag if set
2661                 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_DEFAULT;
2662                 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2663                 break;
2664             }
2665         }
2666 
2667         setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2668                 (cam_stream_type_t) mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2669                 gCamCapability[mCameraId]->color_arrangement);
2670 
2671         if (newStream->priv == NULL) {
2672             //New stream, construct channel
2673             switch (newStream->stream_type) {
2674             case CAMERA3_STREAM_INPUT:
2675                 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_READ;
2676                 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_WRITE;//WR for inplace algo's
2677                 break;
2678             case CAMERA3_STREAM_BIDIRECTIONAL:
2679                 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_READ |
2680                     GRALLOC_USAGE_HW_CAMERA_WRITE;
2681                 break;
2682             case CAMERA3_STREAM_OUTPUT:
2683                 /* For video encoding stream, set read/write rarely
2684                  * flag so that they may be set to un-cached */
2685                 if (newStream->usage & GRALLOC_USAGE_HW_VIDEO_ENCODER)
2686                     newStream->usage |=
2687                          (GRALLOC_USAGE_SW_READ_RARELY |
2688                          GRALLOC_USAGE_SW_WRITE_RARELY |
2689                          GRALLOC_USAGE_HW_CAMERA_WRITE);
2690                 else if (IS_USAGE_ZSL(newStream->usage))
2691                 {
2692                     LOGD("ZSL usage flag skipping");
2693                 }
2694                 else if (newStream == zslStream
2695                         || (newStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888 ||
2696                             newStream->format == HAL_PIXEL_FORMAT_Y8)) {
2697                     newStream->usage |= GRALLOC_USAGE_HW_CAMERA_ZSL;
2698                 } else
2699                     newStream->usage |= GRALLOC_USAGE_HW_CAMERA_WRITE;
2700                 break;
2701             default:
2702                 LOGE("Invalid stream_type %d", newStream->stream_type);
2703                 break;
2704             }
2705 
2706             bool forcePreviewUBWC = true;
2707             if (newStream->stream_type == CAMERA3_STREAM_OUTPUT ||
2708                     newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
2709                 QCamera3ProcessingChannel *channel = NULL;
2710                 switch (newStream->format) {
2711                 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
2712                     if ((newStream->usage &
2713                             private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) &&
2714                             (streamList->operation_mode ==
2715                             CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
2716                     ) {
2717                         channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2718                                 mChannelHandle, mCameraHandle->ops, captureResultCb,
2719                                 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
2720                                 this,
2721                                 newStream,
2722                                 (cam_stream_type_t)
2723                                         mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2724                                 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2725                                 mMetadataChannel,
2726                                 0); //heap buffers are not required for HFR video channel
2727                         if (channel == NULL) {
2728                             LOGE("allocation of channel failed");
2729                             pthread_mutex_unlock(&mMutex);
2730                             return -ENOMEM;
2731                         }
2732                         //channel->getNumBuffers() will return 0 here so use
2733                         //MAX_INFLIGH_HFR_REQUESTS
2734                         newStream->max_buffers = MAX_INFLIGHT_HFR_REQUESTS;
2735                         newStream->priv = channel;
2736                         LOGI("num video buffers in HFR mode: %d",
2737                                  MAX_INFLIGHT_HFR_REQUESTS);
2738                     } else {
2739                         /* Copy stream contents in HFR preview only case to create
2740                          * dummy batch channel so that sensor streaming is in
2741                          * HFR mode */
2742                         if (!m_bIsVideo && (streamList->operation_mode ==
2743                                 CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)) {
2744                             mDummyBatchStream = *newStream;
2745                             mDummyBatchStream.usage = GRALLOC_USAGE_HW_VIDEO_ENCODER;
2746                         }
2747                         int bufferCount = MAX_INFLIGHT_REQUESTS;
2748                         if (mStreamConfigInfo.type[mStreamConfigInfo.num_streams] ==
2749                                 CAM_STREAM_TYPE_VIDEO) {
2750                             if (m_bEis3PropertyEnabled /* hint for EIS 3 needed here */) {
2751                                 // WAR: 4K video can only run <=30fps, reduce the buffer count.
2752                                 bufferCount = m_bIs4KVideo ?
2753                                     MAX_30FPS_VIDEO_BUFFERS : MAX_VIDEO_BUFFERS;
2754                             }
2755 
2756                         }
2757                         channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2758                                 mChannelHandle, mCameraHandle->ops, captureResultCb,
2759                                 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
2760                                 this,
2761                                 newStream,
2762                                 (cam_stream_type_t)
2763                                         mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2764                                 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2765                                 mMetadataChannel,
2766                                 bufferCount);
2767                         if (channel == NULL) {
2768                             LOGE("allocation of channel failed");
2769                             pthread_mutex_unlock(&mMutex);
2770                             return -ENOMEM;
2771                         }
2772                         /* disable UBWC for preview, though supported,
2773                          * to take advantage of CPP duplication */
2774                         if (m_bIsVideo && (!QCameraCommon::isVideoUBWCEnabled()) &&
2775                                 (previewSize.width == (int32_t) streamStatus.videoWidth)&&
2776                                 (previewSize.height == (int32_t) streamStatus.videoHeight)){
2777                             forcePreviewUBWC = false;
2778                         }
2779                         channel->setUBWCEnabled(forcePreviewUBWC);
2780                          /* When goog_zoom is linked to the preview or video stream,
2781                           * disable ubwc to the linked stream */
2782                         if ((mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &
2783                                 CAM_QCOM_FEATURE_GOOG_ZOOM) != 0) {
2784                             channel->setUBWCEnabled(false);
2785                         }
2786                         newStream->max_buffers = channel->getNumBuffers();
2787                         newStream->priv = channel;
2788                     }
2789                     break;
2790                 case HAL_PIXEL_FORMAT_YCbCr_420_888:
2791                 case HAL_PIXEL_FORMAT_Y8: {
2792                     channel = new QCamera3YUVChannel(mCameraHandle->camera_handle,
2793                             mChannelHandle,
2794                             mCameraHandle->ops, captureResultCb,
2795                             setBufferErrorStatus, &padding_info,
2796                             this,
2797                             newStream,
2798                             (cam_stream_type_t)
2799                                     mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2800                             mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2801                             mMetadataChannel);
2802                     if (channel == NULL) {
2803                         LOGE("allocation of YUV channel failed");
2804                         pthread_mutex_unlock(&mMutex);
2805                         return -ENOMEM;
2806                     }
2807                     newStream->max_buffers = channel->getNumBuffers();
2808                     newStream->priv = channel;
2809                     break;
2810                 }
2811                 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
2812                 case HAL_PIXEL_FORMAT_RAW16:
2813                 case HAL_PIXEL_FORMAT_RAW10: {
2814                     bool isRAW16 = ((newStream->format == HAL_PIXEL_FORMAT_RAW16) &&
2815                             (HAL_DATASPACE_DEPTH != newStream->data_space))
2816                             ? true : false;
2817                     mRawChannel = new QCamera3RawChannel(
2818                             mCameraHandle->camera_handle, mChannelHandle,
2819                             mCameraHandle->ops, captureResultCb,
2820                             setBufferErrorStatus, &padding_info,
2821                             this, newStream,
2822                             mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2823                             mMetadataChannel, isRAW16);
2824                     if (mRawChannel == NULL) {
2825                         LOGE("allocation of raw channel failed");
2826                         pthread_mutex_unlock(&mMutex);
2827                         return -ENOMEM;
2828                     }
2829                     newStream->max_buffers = mRawChannel->getNumBuffers();
2830                     newStream->priv = (QCamera3ProcessingChannel*)mRawChannel;
2831                     break;
2832                 }
2833                 case HAL_PIXEL_FORMAT_BLOB:
2834                     if (newStream->data_space == HAL_DATASPACE_DEPTH) {
2835                         mDepthChannel = new QCamera3DepthChannel(
2836                                 mCameraHandle->camera_handle, mChannelHandle,
2837                                 mCameraHandle->ops, NULL, NULL, &padding_info,
2838                                 0, this, MAX_INFLIGHT_REQUESTS, newStream,
2839                                 mMetadataChannel);
2840                         if (NULL == mDepthChannel) {
2841                             LOGE("Allocation of depth channel failed");
2842                             pthread_mutex_unlock(&mMutex);
2843                             return NO_MEMORY;
2844                         }
2845                         newStream->priv = mDepthChannel;
2846                         newStream->max_buffers = MAX_INFLIGHT_REQUESTS;
2847                     } else {
2848                         // Max live snapshot inflight buffer is 1. This is to mitigate
2849                         // frame drop issues for video snapshot. The more buffers being
2850                         // allocated, the more frame drops there are.
2851                         mPictureChannel = new QCamera3PicChannel(
2852                                 mCameraHandle->camera_handle, mChannelHandle,
2853                                 mCameraHandle->ops, captureResultCb,
2854                                 setBufferErrorStatus, &padding_info, this, newStream,
2855                                 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2856                                 m_bIs4KVideo, streamStatus.isZsl, streamStatus.bY80OnEncoder,
2857                                 mMetadataChannel, (m_bIsVideo ? 1 : MAX_INFLIGHT_BLOB));
2858                         if (mPictureChannel == NULL) {
2859                             LOGE("allocation of channel failed");
2860                             pthread_mutex_unlock(&mMutex);
2861                             return -ENOMEM;
2862                         }
2863                         newStream->priv = (QCamera3ProcessingChannel*)mPictureChannel;
2864                         newStream->max_buffers = mPictureChannel->getNumBuffers();
2865                         mPictureChannel->overrideYuvSize(
2866                                 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width,
2867                                 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height);
2868                     }
2869                     break;
2870 
2871                 default:
2872                     LOGE("not a supported format 0x%x", newStream->format);
2873                     pthread_mutex_unlock(&mMutex);
2874                     return -EINVAL;
2875                 }
2876             } else if (newStream->stream_type == CAMERA3_STREAM_INPUT) {
2877                 newStream->max_buffers = MAX_INFLIGHT_REPROCESS_REQUESTS;
2878             } else {
2879                 LOGE("Error, Unknown stream type");
2880                 pthread_mutex_unlock(&mMutex);
2881                 return -EINVAL;
2882             }
2883 
2884             QCamera3Channel *channel = (QCamera3Channel*) newStream->priv;
2885             if (channel != NULL && QCamera3Channel::isUBWCEnabled()) {
2886                 // Here we only care whether it's EIS3 or not
2887                 cam_is_type_t isType = m_bEis3PropertyEnabled ? IS_TYPE_EIS_3_0 : IS_TYPE_NONE;
2888                 if (gCamCapability[mCameraId]->position == CAM_POSITION_FRONT ||
2889                         mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
2890                     isType = IS_TYPE_NONE;
2891                 cam_format_t fmt = QCamera3Channel::getStreamDefaultFormat(
2892                         mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2893                         newStream->width, newStream->height, forcePreviewUBWC, isType);
2894                 if(fmt == CAM_FORMAT_YUV_420_NV12_UBWC) {
2895                     newStream->usage |= GRALLOC_USAGE_PRIVATE_ALLOC_UBWC;
2896                 }
2897             }
2898 
2899             for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
2900                     it != mStreamInfo.end(); it++) {
2901                 if ((*it)->stream == newStream) {
2902                     (*it)->channel = (QCamera3ProcessingChannel*) newStream->priv;
2903                     break;
2904                 }
2905             }
2906         } else {
2907             // Channel already exists for this stream
2908             // Do nothing for now
2909         }
2910         padding_info = gCamCapability[mCameraId]->padding_info;
2911 
2912         /* Do not add entries for input&depth stream in metastream info
2913          * since there is no real stream associated with it
2914          */
2915         if ((newStream->stream_type != CAMERA3_STREAM_INPUT) &&
2916                 !((newStream->data_space == HAL_DATASPACE_DEPTH) &&
2917                         (newStream->format == HAL_PIXEL_FORMAT_BLOB))) {
2918             mStreamConfigInfo.num_streams++;
2919         }
2920     }
2921 
2922     // Let buffer dispatcher know the configured streams.
2923     mOutputBufferDispatcher.configureStreams(streamList);
2924 
2925     if (mOpMode != QCAMERA3_VENDOR_STREAM_CONFIGURATION_RAW_ONLY_MODE) {
2926         onlyRaw = false;
2927     }
2928 
2929     // Create analysis stream all the time, even when h/w support is not available
2930     if (!onlyRaw) {
2931         cam_feature_mask_t analysisFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2932         cam_analysis_info_t analysisInfo;
2933         int32_t ret = NO_ERROR;
2934         ret = mCommon.getAnalysisInfo(
2935                 FALSE,
2936                 analysisFeatureMask,
2937                 &analysisInfo);
2938         if (ret == NO_ERROR) {
2939             cam_color_filter_arrangement_t analysis_color_arrangement =
2940                     (analysisInfo.analysis_format == CAM_FORMAT_Y_ONLY ?
2941                     CAM_FILTER_ARRANGEMENT_Y :
2942                     gCamCapability[mCameraId]->color_arrangement);
2943             setPAAFSupport(analysisFeatureMask, CAM_STREAM_TYPE_ANALYSIS,
2944                                                analysis_color_arrangement);
2945             cam_dimension_t analysisDim;
2946             analysisDim = mCommon.getMatchingDimension(previewSize,
2947                     analysisInfo.analysis_recommended_res);
2948 
2949             mAnalysisChannel = new QCamera3SupportChannel(
2950                     mCameraHandle->camera_handle,
2951                     mChannelHandle,
2952                     mCameraHandle->ops,
2953                     &analysisInfo.analysis_padding_info,
2954                     analysisFeatureMask,
2955                     CAM_STREAM_TYPE_ANALYSIS,
2956                     &analysisDim,
2957                     (analysisInfo.analysis_format
2958                     == CAM_FORMAT_Y_ONLY ? CAM_FORMAT_Y_ONLY
2959                     : CAM_FORMAT_YUV_420_NV21),
2960                     analysisInfo.hw_analysis_supported,
2961                     gCamCapability[mCameraId]->color_arrangement,
2962                     this,
2963                     0); // force buffer count to 0
2964         } else {
2965             LOGW("getAnalysisInfo failed, ret = %d", ret);
2966         }
2967         if (!mAnalysisChannel) {
2968             LOGW("Analysis channel cannot be created");
2969         }
2970     }
2971 
2972     //RAW DUMP channel
2973     if (mEnableRawDump && isRawStreamRequested == false){
2974         cam_dimension_t rawDumpSize;
2975         rawDumpSize = getMaxRawSize(mCameraId);
2976         cam_feature_mask_t rawDumpFeatureMask = CAM_QCOM_FEATURE_NONE;
2977         setPAAFSupport(rawDumpFeatureMask,
2978                 CAM_STREAM_TYPE_RAW,
2979                 gCamCapability[mCameraId]->color_arrangement);
2980         mRawDumpChannel = new QCamera3RawDumpChannel(mCameraHandle->camera_handle,
2981                                   mChannelHandle,
2982                                   mCameraHandle->ops,
2983                                   rawDumpSize,
2984                                   &padding_info,
2985                                   this, rawDumpFeatureMask);
2986         if (!mRawDumpChannel) {
2987             LOGE("Raw Dump channel cannot be created");
2988             pthread_mutex_unlock(&mMutex);
2989             return -ENOMEM;
2990         }
2991     }
2992 
2993     if (mAnalysisChannel) {
2994         cam_analysis_info_t analysisInfo;
2995         memset(&analysisInfo, 0, sizeof(cam_analysis_info_t));
2996         mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2997                 CAM_STREAM_TYPE_ANALYSIS;
2998         mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2999                 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
3000         rc = mCommon.getAnalysisInfo(FALSE,
3001                 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
3002                 &analysisInfo);
3003         if (rc != NO_ERROR) {
3004             LOGE("getAnalysisInfo failed, ret = %d", rc);
3005             pthread_mutex_unlock(&mMutex);
3006             return rc;
3007         }
3008         cam_color_filter_arrangement_t analysis_color_arrangement =
3009                 (analysisInfo.analysis_format == CAM_FORMAT_Y_ONLY ?
3010                 CAM_FILTER_ARRANGEMENT_Y :
3011                 gCamCapability[mCameraId]->color_arrangement);
3012         setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
3013                 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
3014                 analysis_color_arrangement);
3015 
3016         mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
3017                 mCommon.getMatchingDimension(previewSize,
3018                 analysisInfo.analysis_recommended_res);
3019         mStreamConfigInfo.num_streams++;
3020     }
3021 
3022     if (!onlyRaw && isSupportChannelNeeded(streamList, mStreamConfigInfo)) {
3023         cam_analysis_info_t supportInfo;
3024         memset(&supportInfo, 0, sizeof(cam_analysis_info_t));
3025         cam_feature_mask_t callbackFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
3026         setPAAFSupport(callbackFeatureMask,
3027                 CAM_STREAM_TYPE_CALLBACK,
3028                 gCamCapability[mCameraId]->color_arrangement);
3029         int32_t ret = NO_ERROR;
3030         ret = mCommon.getAnalysisInfo(FALSE, callbackFeatureMask, &supportInfo);
3031         if (ret != NO_ERROR) {
3032             /* Ignore the error for Mono camera
3033              * because the PAAF bit mask is only set
3034              * for CAM_STREAM_TYPE_ANALYSIS stream type
3035              */
3036             if (gCamCapability[mCameraId]->color_arrangement != CAM_FILTER_ARRANGEMENT_Y) {
3037                 LOGW("getAnalysisInfo failed, ret = %d", ret);
3038             }
3039         }
3040         mSupportChannel = new QCamera3SupportChannel(
3041                 mCameraHandle->camera_handle,
3042                 mChannelHandle,
3043                 mCameraHandle->ops,
3044                 &gCamCapability[mCameraId]->padding_info,
3045                 callbackFeatureMask,
3046                 CAM_STREAM_TYPE_CALLBACK,
3047                 &QCamera3SupportChannel::kDim,
3048                 CAM_FORMAT_YUV_420_NV21,
3049                 supportInfo.hw_analysis_supported,
3050                 gCamCapability[mCameraId]->color_arrangement,
3051                 this, 0);
3052         if (!mSupportChannel) {
3053             LOGE("dummy channel cannot be created");
3054             pthread_mutex_unlock(&mMutex);
3055             return -ENOMEM;
3056         }
3057     }
3058 
3059     if (mSupportChannel) {
3060         mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
3061                 QCamera3SupportChannel::kDim;
3062         mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
3063                 CAM_STREAM_TYPE_CALLBACK;
3064         mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
3065                 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
3066         setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
3067                 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
3068                 gCamCapability[mCameraId]->color_arrangement);
3069         mStreamConfigInfo.num_streams++;
3070     }
3071 
3072     if (mRawDumpChannel) {
3073         cam_dimension_t rawSize;
3074         rawSize = getMaxRawSize(mCameraId);
3075         mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
3076                 rawSize;
3077         mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
3078                 CAM_STREAM_TYPE_RAW;
3079         mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
3080                 CAM_QCOM_FEATURE_NONE;
3081         setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
3082                 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
3083                 gCamCapability[mCameraId]->color_arrangement);
3084         mStreamConfigInfo.num_streams++;
3085     }
3086 
3087     if (mHdrPlusRawSrcChannel) {
3088         cam_dimension_t rawSize;
3089         rawSize = getMaxRawSize(mCameraId);
3090         mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] = rawSize;
3091         mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_RAW;
3092         mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
3093         setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
3094                 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
3095                 gCamCapability[mCameraId]->color_arrangement);
3096         mStreamConfigInfo.num_streams++;
3097     }
3098 
3099     /* In HFR mode, if video stream is not added, create a dummy channel so that
3100      * ISP can create a batch mode even for preview only case. This channel is
3101      * never 'start'ed (no stream-on), it is only 'initialized'  */
3102     if ((mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
3103             !m_bIsVideo) {
3104         cam_feature_mask_t dummyFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
3105         setPAAFSupport(dummyFeatureMask,
3106                 CAM_STREAM_TYPE_VIDEO,
3107                 gCamCapability[mCameraId]->color_arrangement);
3108         mDummyBatchChannel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
3109                 mChannelHandle,
3110                 mCameraHandle->ops, captureResultCb,
3111                 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
3112                 this,
3113                 &mDummyBatchStream,
3114                 CAM_STREAM_TYPE_VIDEO,
3115                 dummyFeatureMask,
3116                 mMetadataChannel);
3117         if (NULL == mDummyBatchChannel) {
3118             LOGE("creation of mDummyBatchChannel failed."
3119                     "Preview will use non-hfr sensor mode ");
3120         }
3121     }
3122     if (mDummyBatchChannel) {
3123         mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
3124                 mDummyBatchStream.width;
3125         mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
3126                 mDummyBatchStream.height;
3127         mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
3128                 CAM_STREAM_TYPE_VIDEO;
3129         mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
3130                 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
3131         setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
3132                 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
3133                 gCamCapability[mCameraId]->color_arrangement);
3134         mStreamConfigInfo.num_streams++;
3135     }
3136 
3137     mStreamConfigInfo.buffer_info.min_buffers = MIN_INFLIGHT_REQUESTS;
3138     mStreamConfigInfo.buffer_info.max_buffers =
3139             m_bIs4KVideo ? 0 :
3140             m_bEis3PropertyEnabled && m_bIsVideo ? MAX_VIDEO_BUFFERS : MAX_INFLIGHT_REQUESTS;
3141 
3142     /* Initialize mPendingRequestInfo and mPendingBuffersMap */
3143     for (pendingRequestIterator i = mPendingRequestsList.begin();
3144             i != mPendingRequestsList.end();) {
3145         i = erasePendingRequest(i);
3146     }
3147     mPendingFrameDropList.clear();
3148     // Initialize/Reset the pending buffers list
3149     for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
3150         req.mPendingBufferList.clear();
3151     }
3152     mPendingBuffersMap.mPendingBuffersInRequest.clear();
3153     mExpectedInflightDuration = 0;
3154     mExpectedFrameDuration = 0;
3155 
3156     mCurJpegMeta.clear();
3157     //Get min frame duration for this streams configuration
3158     deriveMinFrameDuration();
3159 
3160     mFirstPreviewIntentSeen = false;
3161 
3162     // Update state
3163     mState = CONFIGURED;
3164 
3165     mFirstMetadataCallback = true;
3166 
3167     memset(&mLastEISCropInfo, 0, sizeof(mLastEISCropInfo));
3168 
3169     if (streamList->session_parameters != nullptr) {
3170         CameraMetadata meta;
3171         meta = streamList->session_parameters;
3172 
3173         // send an unconfigure to the backend so that the isp
3174         // resources are deallocated
3175         if (!mFirstConfiguration) {
3176             cam_stream_size_info_t stream_config_info;
3177             int32_t hal_version = CAM_HAL_V3;
3178             memset(&stream_config_info, 0, sizeof(cam_stream_size_info_t));
3179             stream_config_info.buffer_info.min_buffers =
3180                     MIN_INFLIGHT_REQUESTS;
3181             stream_config_info.buffer_info.max_buffers =
3182                     m_bIs4KVideo ? 0 :
3183                     m_bEis3PropertyEnabled && m_bIsVideo ? MAX_VIDEO_BUFFERS : MAX_INFLIGHT_REQUESTS;
3184             clear_metadata_buffer(mParameters);
3185             ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
3186                     CAM_INTF_PARM_HAL_VERSION, hal_version);
3187             ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
3188                     CAM_INTF_META_STREAM_INFO, stream_config_info);
3189             rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
3190                     mParameters);
3191             if (rc < 0) {
3192                 LOGE("set_parms for unconfigure failed");
3193                 pthread_mutex_unlock(&mMutex);
3194                 return rc;
3195             }
3196 
3197         }
3198         /* get eis information for stream configuration */
3199         cam_is_type_t isTypePreview, is_type=IS_TYPE_NONE;
3200         char is_type_value[PROPERTY_VALUE_MAX];
3201         property_get("persist.camera.is_type", is_type_value, "4");
3202         m_ISTypeVideo = static_cast<cam_is_type_t>(atoi(is_type_value));
3203         // Make default value for preview IS_TYPE as IS_TYPE_EIS_2_0
3204         property_get("persist.camera.is_type_preview", is_type_value, "4");
3205         isTypePreview = static_cast<cam_is_type_t>(atoi(is_type_value));
3206         LOGD("isTypeVideo: %d isTypePreview: %d", m_ISTypeVideo, isTypePreview);
3207 
3208         int32_t hal_version = CAM_HAL_V3;
3209         clear_metadata_buffer(mParameters);
3210         ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_HAL_VERSION, hal_version);
3211         ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_CAPTURE_INTENT, mCaptureIntent);
3212 
3213         if (mFirstConfiguration) {
3214             // configure instant AEC
3215             // Instant AEC is a session based parameter and it is needed only
3216             // once per complete session after open camera.
3217             // i.e. This is set only once for the first capture request, after open camera.
3218             setInstantAEC(meta);
3219         }
3220 
3221         bool setEis = isEISEnabled(meta);
3222         int32_t vsMode;
3223         vsMode = (setEis)? DIS_ENABLE: DIS_DISABLE;
3224         if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_DIS_ENABLE, vsMode)) {
3225             rc = BAD_VALUE;
3226         }
3227         LOGD("setEis %d", setEis);
3228         bool eis3Supported = false;
3229         size_t count = IS_TYPE_MAX;
3230         count = MIN(gCamCapability[mCameraId]->supported_is_types_cnt, count);
3231         for (size_t i = 0; i < count; i++) {
3232             if (gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0) {
3233                 eis3Supported = true;
3234                 break;
3235             }
3236         }
3237 
3238         //IS type will be 0 unless EIS is supported. If EIS is supported
3239         //it could either be 4 or 5 depending on the stream and video size
3240         for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
3241             if (setEis) {
3242                 if (mStreamConfigInfo.type[i] == CAM_STREAM_TYPE_PREVIEW) {
3243                     is_type = isTypePreview;
3244                 } else if (mStreamConfigInfo.type[i] == CAM_STREAM_TYPE_VIDEO ) {
3245                     if ( (m_ISTypeVideo == IS_TYPE_EIS_3_0) && (eis3Supported == FALSE) ) {
3246                         LOGW(" EIS_3.0 is not supported and so setting EIS_2.0");
3247                         is_type = IS_TYPE_EIS_2_0;
3248                     } else {
3249                         is_type = m_ISTypeVideo;
3250                     }
3251                 } else {
3252                     is_type = IS_TYPE_NONE;
3253                 }
3254                  mStreamConfigInfo.is_type[i] = is_type;
3255             } else {
3256                  mStreamConfigInfo.is_type[i] = IS_TYPE_NONE;
3257             }
3258         }
3259 
3260         ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
3261                 CAM_INTF_META_STREAM_INFO, mStreamConfigInfo);
3262 
3263         char prop[PROPERTY_VALUE_MAX];
3264         //Disable tintless only if the property is set to 0
3265         memset(prop, 0, sizeof(prop));
3266         property_get("persist.camera.tintless.enable", prop, "1");
3267         int32_t tintless_value = atoi(prop);
3268 
3269         ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
3270                 CAM_INTF_PARM_TINTLESS, tintless_value);
3271 
3272         //Disable CDS for HFR mode or if DIS/EIS is on.
3273         //CDS is a session parameter in the backend/ISP, so need to be set/reset
3274         //after every configure_stream
3275         if ((CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE == mOpMode) ||
3276                 (m_bIsVideo)) {
3277             int32_t cds = CAM_CDS_MODE_OFF;
3278             if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
3279                     CAM_INTF_PARM_CDS_MODE, cds))
3280                 LOGE("Failed to disable CDS for HFR mode");
3281 
3282         }
3283 
3284         if (m_debug_avtimer || meta.exists(QCAMERA3_USE_AV_TIMER)) {
3285             uint8_t* use_av_timer = NULL;
3286 
3287             if (m_debug_avtimer){
3288                 LOGI(" Enabling AV timer through setprop");
3289                 use_av_timer = &m_debug_avtimer;
3290                 m_bAVTimerEnabled = true;
3291             }
3292             else{
3293                 use_av_timer =
3294                     meta.find(QCAMERA3_USE_AV_TIMER).data.u8;
3295                 if (use_av_timer) {
3296                     m_bAVTimerEnabled = true;
3297                     LOGI("Enabling AV timer through Metadata: use_av_timer: %d", *use_av_timer);
3298                 }
3299             }
3300 
3301             if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_USE_AV_TIMER, *use_av_timer)) {
3302                 rc = BAD_VALUE;
3303             }
3304         }
3305 
3306         setMobicat();
3307 
3308         /* Set fps and hfr mode while sending meta stream info so that sensor
3309          * can configure appropriate streaming mode */
3310         mHFRVideoFps = DEFAULT_VIDEO_FPS;
3311         mMinInFlightRequests = MIN_INFLIGHT_REQUESTS;
3312         mMaxInFlightRequests = MAX_INFLIGHT_REQUESTS;
3313         if (meta.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
3314             rc = setHalFpsRange(meta, mParameters);
3315             if (rc == NO_ERROR) {
3316                 int32_t max_fps =
3317                     (int32_t) meta.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
3318                 if (max_fps == 60 || mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD) {
3319                     mMinInFlightRequests = MIN_INFLIGHT_60FPS_REQUESTS;
3320                 }
3321                 /* For HFR, more buffers are dequeued upfront to improve the performance */
3322                 if (mBatchSize) {
3323                     mMinInFlightRequests = MIN_INFLIGHT_HFR_REQUESTS;
3324                     mMaxInFlightRequests = MAX_INFLIGHT_HFR_REQUESTS;
3325                 }
3326             }
3327             else {
3328                 LOGE("setHalFpsRange failed");
3329             }
3330         }
3331         memset(&mBatchedStreamsArray, 0, sizeof(cam_stream_ID_t));
3332 
3333         if (meta.exists(QCAMERA3_VIDEO_HDR_MODE)) {
3334             cam_video_hdr_mode_t vhdr = (cam_video_hdr_mode_t)
3335                     meta.find(QCAMERA3_VIDEO_HDR_MODE).data.i32[0];
3336             rc = setVideoHdrMode(mParameters, vhdr);
3337             if (rc != NO_ERROR) {
3338                 LOGE("setVideoHDR is failed");
3339             }
3340         }
3341 
3342         if (meta.exists(TANGO_MODE_DATA_SENSOR_FULLFOV)) {
3343             uint8_t sensorModeFullFov =
3344                     meta.find(TANGO_MODE_DATA_SENSOR_FULLFOV).data.u8[0];
3345             LOGD("SENSOR_MODE_FULLFOV %d" , sensorModeFullFov);
3346             if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_SENSOR_MODE_FULLFOV,
3347                     sensorModeFullFov)) {
3348                 rc = BAD_VALUE;
3349             }
3350         }
3351         //TODO: validate the arguments, HSV scenemode should have only the
3352         //advertised fps ranges
3353 
3354         /*set the capture intent, hal version, tintless, stream info,
3355          *and disenable parameters to the backend*/
3356         LOGD("set_parms META_STREAM_INFO " );
3357         for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
3358             LOGI("STREAM INFO : type %d, wxh: %d x %d, pp_mask: 0x%" PRIx64
3359                     ", Format:%d is_type: %d",
3360                     mStreamConfigInfo.type[i],
3361                     mStreamConfigInfo.stream_sizes[i].width,
3362                     mStreamConfigInfo.stream_sizes[i].height,
3363                     mStreamConfigInfo.postprocess_mask[i],
3364                     mStreamConfigInfo.format[i],
3365                     mStreamConfigInfo.is_type[i]);
3366         }
3367 
3368         rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
3369                     mParameters);
3370         if (rc < 0) {
3371             LOGE("set_parms failed for hal version, stream info");
3372         }
3373 
3374     }
3375 
3376     pthread_mutex_unlock(&mMutex);
3377 
3378     return rc;
3379 }
3380 
3381 /*===========================================================================
3382  * FUNCTION   : isEISEnabled
3383  *
3384  * DESCRIPTION: Decide whether EIS should get enabled or not.
3385  *
3386  * PARAMETERS :
3387  *   @meta : request from framework to process
3388  *
3389  * RETURN     : true/false Whether EIS should be enabled
3390  *
3391  *==========================================================================*/
isEISEnabled(const CameraMetadata & meta)3392 bool QCamera3HardwareInterface::isEISEnabled(const CameraMetadata& meta) {
3393     uint8_t fwkVideoStabMode = 0;
3394     if (meta.exists(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE)) {
3395         fwkVideoStabMode = meta.find(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE).data.u8[0];
3396     }
3397 
3398     // If EIS setprop is enabled then only turn it on for video/preview
3399     return  m_bEisEnable && (m_bIsVideo || fwkVideoStabMode) && m_bEisSupportedSize &&
3400         (m_ISTypeVideo >= IS_TYPE_EIS_2_0) && !meta.exists(QCAMERA3_USE_AV_TIMER);
3401 }
3402 
3403 /*===========================================================================
3404  * FUNCTION   : validateCaptureRequest
3405  *
3406  * DESCRIPTION: validate a capture request from camera service
3407  *
3408  * PARAMETERS :
3409  *   @request : request from framework to process
3410  *
3411  * RETURN     :
3412  *
3413  *==========================================================================*/
validateCaptureRequest(camera3_capture_request_t * request,List<InternalRequest> & internallyRequestedStreams)3414 int QCamera3HardwareInterface::validateCaptureRequest(
3415                     camera3_capture_request_t *request,
3416                     List<InternalRequest> &internallyRequestedStreams)
3417 {
3418     ssize_t idx = 0;
3419     const camera3_stream_buffer_t *b;
3420     CameraMetadata meta;
3421 
3422     /* Sanity check the request */
3423     if (request == NULL) {
3424         LOGE("NULL capture request");
3425         return BAD_VALUE;
3426     }
3427 
3428     if ((request->settings == NULL) && (mState == CONFIGURED)) {
3429         /*settings cannot be null for the first request*/
3430         return BAD_VALUE;
3431     }
3432 
3433     uint32_t frameNumber = request->frame_number;
3434     if ((request->num_output_buffers < 1 || request->output_buffers == NULL)
3435             && (internallyRequestedStreams.size() == 0)) {
3436         LOGE("Request %d: No output buffers provided!",
3437                 __FUNCTION__, frameNumber);
3438         return BAD_VALUE;
3439     }
3440     if (request->num_output_buffers >= MAX_NUM_STREAMS) {
3441         LOGE("Number of buffers %d equals or is greater than maximum number of streams!",
3442                  request->num_output_buffers, MAX_NUM_STREAMS);
3443         return BAD_VALUE;
3444     }
3445     if (request->input_buffer != NULL) {
3446         b = request->input_buffer;
3447         if (b->status != CAMERA3_BUFFER_STATUS_OK) {
3448             LOGE("Request %d: Buffer %ld: Status not OK!",
3449                      frameNumber, (long)idx);
3450             return BAD_VALUE;
3451         }
3452         if (b->release_fence != -1) {
3453             LOGE("Request %d: Buffer %ld: Has a release fence!",
3454                      frameNumber, (long)idx);
3455             return BAD_VALUE;
3456         }
3457         if (b->buffer == NULL) {
3458             LOGE("Request %d: Buffer %ld: NULL buffer handle!",
3459                      frameNumber, (long)idx);
3460             return BAD_VALUE;
3461         }
3462     }
3463 
3464     // Validate all buffers
3465     b = request->output_buffers;
3466     if (b == NULL) {
3467        return BAD_VALUE;
3468     }
3469     while (idx < (ssize_t)request->num_output_buffers) {
3470         QCamera3ProcessingChannel *channel =
3471                 static_cast<QCamera3ProcessingChannel*>(b->stream->priv);
3472         if (channel == NULL) {
3473             LOGE("Request %d: Buffer %ld: Unconfigured stream!",
3474                      frameNumber, (long)idx);
3475             return BAD_VALUE;
3476         }
3477         if (b->status != CAMERA3_BUFFER_STATUS_OK) {
3478             LOGE("Request %d: Buffer %ld: Status not OK!",
3479                      frameNumber, (long)idx);
3480             return BAD_VALUE;
3481         }
3482         if (b->release_fence != -1) {
3483             LOGE("Request %d: Buffer %ld: Has a release fence!",
3484                      frameNumber, (long)idx);
3485             return BAD_VALUE;
3486         }
3487         if (b->buffer == NULL) {
3488             LOGE("Request %d: Buffer %ld: NULL buffer handle!",
3489                      frameNumber, (long)idx);
3490             return BAD_VALUE;
3491         }
3492         if (*(b->buffer) == NULL) {
3493             LOGE("Request %d: Buffer %ld: NULL private handle!",
3494                      frameNumber, (long)idx);
3495             return BAD_VALUE;
3496         }
3497         idx++;
3498         b = request->output_buffers + idx;
3499     }
3500     return NO_ERROR;
3501 }
3502 
3503 /*===========================================================================
3504  * FUNCTION   : deriveMinFrameDuration
3505  *
3506  * DESCRIPTION: derive mininum processed, jpeg, and raw frame durations based
3507  *              on currently configured streams.
3508  *
3509  * PARAMETERS : NONE
3510  *
3511  * RETURN     : NONE
3512  *
3513  *==========================================================================*/
deriveMinFrameDuration()3514 void QCamera3HardwareInterface::deriveMinFrameDuration()
3515 {
3516     int32_t maxJpegDim, maxProcessedDim, maxRawDim;
3517     bool hasRaw = false;
3518 
3519     mMinRawFrameDuration = 0;
3520     mMinJpegFrameDuration = 0;
3521     mMinProcessedFrameDuration = 0;
3522 
3523     maxJpegDim = 0;
3524     maxProcessedDim = 0;
3525     maxRawDim = 0;
3526 
3527     // Figure out maximum jpeg, processed, and raw dimensions
3528     for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
3529         it != mStreamInfo.end(); it++) {
3530 
3531         // Input stream doesn't have valid stream_type
3532         if ((*it)->stream->stream_type == CAMERA3_STREAM_INPUT)
3533             continue;
3534 
3535         int32_t dimension = (int32_t)((*it)->stream->width * (*it)->stream->height);
3536         if ((*it)->stream->format == HAL_PIXEL_FORMAT_BLOB) {
3537             if (dimension > maxJpegDim)
3538                 maxJpegDim = dimension;
3539         } else if ((*it)->stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
3540                 (*it)->stream->format == HAL_PIXEL_FORMAT_RAW10 ||
3541                 (*it)->stream->format == HAL_PIXEL_FORMAT_RAW16) {
3542             hasRaw = true;
3543             if (dimension > maxRawDim)
3544                 maxRawDim = dimension;
3545         } else {
3546             if (dimension > maxProcessedDim)
3547                 maxProcessedDim = dimension;
3548         }
3549     }
3550 
3551     size_t count = MIN(gCamCapability[mCameraId]->supported_raw_dim_cnt,
3552             MAX_SIZES_CNT);
3553 
3554     //Assume all jpeg dimensions are in processed dimensions.
3555     if (maxJpegDim > maxProcessedDim)
3556         maxProcessedDim = maxJpegDim;
3557     //Find the smallest raw dimension that is greater or equal to jpeg dimension
3558     if (hasRaw && maxProcessedDim > maxRawDim) {
3559         maxRawDim = INT32_MAX;
3560 
3561         for (size_t i = 0; i < count; i++) {
3562             int32_t dimension = gCamCapability[mCameraId]->raw_dim[i].width *
3563                     gCamCapability[mCameraId]->raw_dim[i].height;
3564             if (dimension >= maxProcessedDim && dimension < maxRawDim)
3565                 maxRawDim = dimension;
3566         }
3567     }
3568 
3569     //Find minimum durations for processed, jpeg, and raw
3570     for (size_t i = 0; i < count; i++) {
3571         if (maxRawDim == gCamCapability[mCameraId]->raw_dim[i].width *
3572                 gCamCapability[mCameraId]->raw_dim[i].height) {
3573             mMinRawFrameDuration = gCamCapability[mCameraId]->raw_min_duration[i];
3574             break;
3575         }
3576     }
3577     count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
3578     for (size_t i = 0; i < count; i++) {
3579         if (maxProcessedDim ==
3580                 gCamCapability[mCameraId]->picture_sizes_tbl[i].width *
3581                 gCamCapability[mCameraId]->picture_sizes_tbl[i].height) {
3582             mMinProcessedFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i];
3583             mMinJpegFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i];
3584             break;
3585         }
3586     }
3587 }
3588 
3589 /*===========================================================================
3590  * FUNCTION   : getMinFrameDuration
3591  *
3592  * DESCRIPTION: get minimum frame draution based on the current maximum frame durations
3593  *              and current request configuration.
3594  *
3595  * PARAMETERS : @request: requset sent by the frameworks
3596  *
3597  * RETURN     : min farme duration for a particular request
3598  *
3599  *==========================================================================*/
getMinFrameDuration(const camera3_capture_request_t * request)3600 int64_t QCamera3HardwareInterface::getMinFrameDuration(const camera3_capture_request_t *request)
3601 {
3602     bool hasJpegStream = false;
3603     bool hasRawStream = false;
3604     for (uint32_t i = 0; i < request->num_output_buffers; i ++) {
3605         const camera3_stream_t *stream = request->output_buffers[i].stream;
3606         if (stream->format == HAL_PIXEL_FORMAT_BLOB)
3607             hasJpegStream = true;
3608         else if (stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
3609                 stream->format == HAL_PIXEL_FORMAT_RAW10 ||
3610                 stream->format == HAL_PIXEL_FORMAT_RAW16)
3611             hasRawStream = true;
3612     }
3613 
3614     if (!hasJpegStream)
3615         return MAX(mMinRawFrameDuration, mMinProcessedFrameDuration);
3616     else
3617         return MAX(MAX(mMinRawFrameDuration, mMinProcessedFrameDuration), mMinJpegFrameDuration);
3618 }
3619 
3620 /*===========================================================================
3621  * FUNCTION   : handleBuffersDuringFlushLock
3622  *
3623  * DESCRIPTION: Account for buffers returned from back-end during flush
3624  *              This function is executed while mMutex is held by the caller.
3625  *
3626  * PARAMETERS :
3627  *   @buffer: image buffer for the callback
3628  *
3629  * RETURN     :
3630  *==========================================================================*/
handleBuffersDuringFlushLock(camera3_stream_buffer_t * buffer)3631 void QCamera3HardwareInterface::handleBuffersDuringFlushLock(camera3_stream_buffer_t *buffer)
3632 {
3633     bool buffer_found = false;
3634     for (List<PendingBuffersInRequest>::iterator req =
3635             mPendingBuffersMap.mPendingBuffersInRequest.begin();
3636             req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
3637         for (List<PendingBufferInfo>::iterator i =
3638                 req->mPendingBufferList.begin();
3639                 i != req->mPendingBufferList.end(); i++) {
3640             if (i->buffer == buffer->buffer) {
3641                 mPendingBuffersMap.numPendingBufsAtFlush--;
3642                 LOGD("Found buffer %p for Frame %d, numPendingBufsAtFlush = %d",
3643                     buffer->buffer, req->frame_number,
3644                     mPendingBuffersMap.numPendingBufsAtFlush);
3645                 buffer_found = true;
3646                 break;
3647             }
3648         }
3649         if (buffer_found) {
3650             break;
3651         }
3652     }
3653     if (mPendingBuffersMap.numPendingBufsAtFlush == 0) {
3654         //signal the flush()
3655         LOGD("All buffers returned to HAL. Continue flush");
3656         pthread_cond_signal(&mBuffersCond);
3657     }
3658 }
3659 
3660 /*===========================================================================
3661  * FUNCTION   : handleBatchMetadata
3662  *
3663  * DESCRIPTION: Handles metadata buffer callback in batch mode
3664  *
3665  * PARAMETERS : @metadata_buf: metadata buffer
3666  *              @free_and_bufdone_meta_buf: Buf done on the meta buf and free
3667  *                 the meta buf in this method
3668  *
3669  * RETURN     :
3670  *
3671  *==========================================================================*/
handleBatchMetadata(mm_camera_super_buf_t * metadata_buf,bool free_and_bufdone_meta_buf)3672 void QCamera3HardwareInterface::handleBatchMetadata(
3673         mm_camera_super_buf_t *metadata_buf, bool free_and_bufdone_meta_buf)
3674 {
3675     ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_BATCH_METADATA);
3676 
3677     if (NULL == metadata_buf) {
3678         LOGE("metadata_buf is NULL");
3679         return;
3680     }
3681     /* In batch mode, the metdata will contain the frame number and timestamp of
3682      * the last frame in the batch. Eg: a batch containing buffers from request
3683      * 5,6,7 and 8 will have frame number and timestamp corresponding to 8.
3684      * multiple process_capture_requests => 1 set_param => 1 handleBatchMetata =>
3685      * multiple process_capture_results */
3686     metadata_buffer_t *metadata =
3687             (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
3688     int32_t frame_number_valid = 0, urgent_frame_number_valid = 0;
3689     uint32_t last_frame_number = 0, last_urgent_frame_number = 0;
3690     uint32_t first_frame_number = 0, first_urgent_frame_number = 0;
3691     uint32_t frame_number = 0, urgent_frame_number = 0;
3692     int64_t last_frame_capture_time = 0, first_frame_capture_time, capture_time;
3693     bool invalid_metadata = false;
3694     size_t urgentFrameNumDiff = 0, frameNumDiff = 0;
3695     size_t loopCount = 1;
3696     bool is_metabuf_queued = false;
3697 
3698     int32_t *p_frame_number_valid =
3699             POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
3700     uint32_t *p_frame_number =
3701             POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
3702     int64_t *p_capture_time =
3703             POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
3704     int32_t *p_urgent_frame_number_valid =
3705             POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, metadata);
3706     uint32_t *p_urgent_frame_number =
3707             POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER, metadata);
3708 
3709     if ((NULL == p_frame_number_valid) || (NULL == p_frame_number) ||
3710             (NULL == p_capture_time) || (NULL == p_urgent_frame_number_valid) ||
3711             (NULL == p_urgent_frame_number)) {
3712         LOGE("Invalid metadata");
3713         invalid_metadata = true;
3714     } else {
3715         frame_number_valid = *p_frame_number_valid;
3716         last_frame_number = *p_frame_number;
3717         last_frame_capture_time = *p_capture_time;
3718         urgent_frame_number_valid = *p_urgent_frame_number_valid;
3719         last_urgent_frame_number = *p_urgent_frame_number;
3720     }
3721 
3722     /* In batchmode, when no video buffers are requested, set_parms are sent
3723      * for every capture_request. The difference between consecutive urgent
3724      * frame numbers and frame numbers should be used to interpolate the
3725      * corresponding frame numbers and time stamps */
3726     pthread_mutex_lock(&mMutex);
3727     if (urgent_frame_number_valid) {
3728         ssize_t idx = mPendingBatchMap.indexOfKey(last_urgent_frame_number);
3729         if(idx < 0) {
3730             LOGE("Invalid urgent frame number received: %d. Irrecoverable error",
3731                 last_urgent_frame_number);
3732             mState = ERROR;
3733             pthread_mutex_unlock(&mMutex);
3734             return;
3735         }
3736         first_urgent_frame_number = mPendingBatchMap.valueAt(idx);
3737         urgentFrameNumDiff = last_urgent_frame_number + 1 -
3738                 first_urgent_frame_number;
3739 
3740         LOGD("urgent_frm: valid: %d frm_num: %d - %d",
3741                  urgent_frame_number_valid,
3742                 first_urgent_frame_number, last_urgent_frame_number);
3743     }
3744 
3745     if (frame_number_valid) {
3746         ssize_t idx = mPendingBatchMap.indexOfKey(last_frame_number);
3747         if(idx < 0) {
3748             LOGE("Invalid frame number received: %d. Irrecoverable error",
3749                 last_frame_number);
3750             mState = ERROR;
3751             pthread_mutex_unlock(&mMutex);
3752             return;
3753         }
3754         first_frame_number = mPendingBatchMap.valueAt(idx);
3755         frameNumDiff = last_frame_number + 1 -
3756                 first_frame_number;
3757         mPendingBatchMap.removeItem(last_frame_number);
3758 
3759         LOGD("frm: valid: %d frm_num: %d - %d",
3760                  frame_number_valid,
3761                 first_frame_number, last_frame_number);
3762 
3763     }
3764     pthread_mutex_unlock(&mMutex);
3765 
3766     if (urgent_frame_number_valid || frame_number_valid) {
3767         loopCount = MAX(urgentFrameNumDiff, frameNumDiff);
3768         if (urgentFrameNumDiff > MAX_HFR_BATCH_SIZE)
3769             LOGE("urgentFrameNumDiff: %d urgentFrameNum: %d",
3770                      urgentFrameNumDiff, last_urgent_frame_number);
3771         if (frameNumDiff > MAX_HFR_BATCH_SIZE)
3772             LOGE("frameNumDiff: %d frameNum: %d",
3773                      frameNumDiff, last_frame_number);
3774     }
3775 
3776     for (size_t i = 0; i < loopCount; i++) {
3777         /* handleMetadataWithLock is called even for invalid_metadata for
3778          * pipeline depth calculation */
3779         if (!invalid_metadata) {
3780             /* Infer frame number. Batch metadata contains frame number of the
3781              * last frame */
3782             if (urgent_frame_number_valid) {
3783                 if (i < urgentFrameNumDiff) {
3784                     urgent_frame_number =
3785                             first_urgent_frame_number + i;
3786                     LOGD("inferred urgent frame_number: %d",
3787                              urgent_frame_number);
3788                     ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3789                             CAM_INTF_META_URGENT_FRAME_NUMBER, urgent_frame_number);
3790                 } else {
3791                     /* This is to handle when urgentFrameNumDiff < frameNumDiff */
3792                     ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3793                             CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, 0);
3794                 }
3795             }
3796 
3797             /* Infer frame number. Batch metadata contains frame number of the
3798              * last frame */
3799             if (frame_number_valid) {
3800                 if (i < frameNumDiff) {
3801                     frame_number = first_frame_number + i;
3802                     LOGD("inferred frame_number: %d", frame_number);
3803                     ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3804                             CAM_INTF_META_FRAME_NUMBER, frame_number);
3805                 } else {
3806                     /* This is to handle when urgentFrameNumDiff > frameNumDiff */
3807                     ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3808                              CAM_INTF_META_FRAME_NUMBER_VALID, 0);
3809                 }
3810             }
3811 
3812             if (last_frame_capture_time) {
3813                 //Infer timestamp
3814                 first_frame_capture_time = last_frame_capture_time -
3815                         (((loopCount - 1) * NSEC_PER_SEC) / (double) mHFRVideoFps);
3816                 capture_time =
3817                         first_frame_capture_time + (i * NSEC_PER_SEC / (double) mHFRVideoFps);
3818                 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3819                         CAM_INTF_META_SENSOR_TIMESTAMP, capture_time);
3820                 LOGD("batch capture_time: %lld, capture_time: %lld",
3821                          last_frame_capture_time, capture_time);
3822             }
3823         }
3824         pthread_mutex_lock(&mMutex);
3825         handleMetadataWithLock(metadata_buf,
3826                 false /* free_and_bufdone_meta_buf */,
3827                 (i == urgentFrameNumDiff-1), /* last urgent metadata in the batch */
3828                 (i == frameNumDiff-1), /* last metadata in the batch metadata */
3829                 &is_metabuf_queued /* if metabuf isqueued or not */);
3830         pthread_mutex_unlock(&mMutex);
3831     }
3832 
3833     /* BufDone metadata buffer */
3834     if (free_and_bufdone_meta_buf && !is_metabuf_queued) {
3835         mMetadataChannel->bufDone(metadata_buf);
3836         free(metadata_buf);
3837         metadata_buf = NULL;
3838     }
3839 }
3840 
notifyError(uint32_t frameNumber,camera3_error_msg_code_t errorCode)3841 void QCamera3HardwareInterface::notifyError(uint32_t frameNumber,
3842         camera3_error_msg_code_t errorCode)
3843 {
3844     camera3_notify_msg_t notify_msg;
3845     memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
3846     notify_msg.type = CAMERA3_MSG_ERROR;
3847     notify_msg.message.error.error_code = errorCode;
3848     notify_msg.message.error.error_stream = NULL;
3849     notify_msg.message.error.frame_number = frameNumber;
3850     orchestrateNotify(&notify_msg);
3851 
3852     return;
3853 }
3854 
3855 /*===========================================================================
3856  * FUNCTION   : sendPartialMetadataWithLock
3857  *
3858  * DESCRIPTION: Send partial capture result callback with mMutex lock held.
3859  *
3860  * PARAMETERS : @metadata: metadata buffer
3861  *              @requestIter: The iterator for the pending capture request for
3862  *              which the partial result is being sen
3863  *              @lastUrgentMetadataInBatch: Boolean to indicate whether this is the
3864  *                  last urgent metadata in a batch. Always true for non-batch mode
3865  *              @isJumpstartMetadata: Whether this is a partial metadata for
3866  *              jumpstart, i.e. even though it doesn't map to a valid partial
3867  *              frame number, its metadata entries should be kept.
3868  *
3869  * RETURN     :
3870  *
3871  *==========================================================================*/
3872 
sendPartialMetadataWithLock(metadata_buffer_t * metadata,const pendingRequestIterator requestIter,bool lastUrgentMetadataInBatch,bool isJumpstartMetadata)3873 void QCamera3HardwareInterface::sendPartialMetadataWithLock(
3874         metadata_buffer_t *metadata,
3875         const pendingRequestIterator requestIter,
3876         bool lastUrgentMetadataInBatch,
3877         bool isJumpstartMetadata)
3878 {
3879     camera3_capture_result_t result;
3880     memset(&result, 0, sizeof(camera3_capture_result_t));
3881 
3882     requestIter->partial_result_cnt++;
3883 
3884     // Extract 3A metadata
3885     result.result = translateCbUrgentMetadataToResultMetadata(
3886             metadata, lastUrgentMetadataInBatch, requestIter->frame_number,
3887             isJumpstartMetadata);
3888     // Populate metadata result
3889     result.frame_number = requestIter->frame_number;
3890     result.num_output_buffers = 0;
3891     result.output_buffers = NULL;
3892     result.partial_result = requestIter->partial_result_cnt;
3893 
3894     {
3895         std::unique_lock<std::mutex> l(gHdrPlusClientLock);
3896         if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled) {
3897             // Notify HDR+ client about the partial metadata.
3898             gHdrPlusClient->notifyFrameMetadata(result.frame_number, *result.result,
3899             result.partial_result == PARTIAL_RESULT_COUNT);
3900         }
3901     }
3902 
3903     orchestrateResult(&result);
3904     LOGD("urgent frame_number = %u", result.frame_number);
3905     free_camera_metadata((camera_metadata_t *)result.result);
3906 }
3907 
3908 /*===========================================================================
3909  * FUNCTION   : handleMetadataWithLock
3910  *
3911  * DESCRIPTION: Handles metadata buffer callback with mMutex lock held.
3912  *
3913  * PARAMETERS : @metadata_buf: metadata buffer
3914  *              @free_and_bufdone_meta_buf: Buf done on the meta buf and free
3915  *                 the meta buf in this method
3916  *              @lastUrgentMetadataInBatch: Boolean to indicate whether this is the
3917  *                  last urgent metadata in a batch. Always true for non-batch mode
3918  *              @lastMetadataInBatch: Boolean to indicate whether this is the
3919  *                  last metadata in a batch. Always true for non-batch mode
3920  *              @p_is_metabuf_queued: Pointer to Boolean to check if metadata
3921  *                  buffer is enqueued or not.
3922  *
3923  * RETURN     :
3924  *
3925  *==========================================================================*/
handleMetadataWithLock(mm_camera_super_buf_t * metadata_buf,bool free_and_bufdone_meta_buf,bool lastUrgentMetadataInBatch,bool lastMetadataInBatch,bool * p_is_metabuf_queued)3926 void QCamera3HardwareInterface::handleMetadataWithLock(
3927     mm_camera_super_buf_t *metadata_buf, bool free_and_bufdone_meta_buf,
3928     bool lastUrgentMetadataInBatch, bool lastMetadataInBatch,
3929     bool *p_is_metabuf_queued)
3930 {
3931     ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_METADATA_LKD);
3932     if ((mFlushPerf) || (ERROR == mState) || (DEINIT == mState)) {
3933         //during flush do not send metadata from this thread
3934         LOGD("not sending metadata during flush or when mState is error");
3935         if (free_and_bufdone_meta_buf) {
3936             mMetadataChannel->bufDone(metadata_buf);
3937             free(metadata_buf);
3938         }
3939         return;
3940     }
3941 
3942     //not in flush
3943     metadata_buffer_t *metadata = (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
3944     int32_t frame_number_valid, urgent_frame_number_valid;
3945     uint32_t frame_number, urgent_frame_number;
3946     int64_t capture_time, capture_time_av;
3947     nsecs_t currentSysTime;
3948 
3949     int32_t *p_frame_number_valid =
3950             POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
3951     uint32_t *p_frame_number = POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
3952     int64_t *p_capture_time = POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
3953     int64_t *p_capture_time_av = POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP_AV, metadata);
3954     int32_t *p_urgent_frame_number_valid =
3955             POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, metadata);
3956     uint32_t *p_urgent_frame_number =
3957             POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER, metadata);
3958     IF_META_AVAILABLE(cam_stream_ID_t, p_cam_frame_drop, CAM_INTF_META_FRAME_DROPPED,
3959             metadata) {
3960         LOGD("Dropped frame info for frame_number_valid %d, frame_number %d",
3961                  *p_frame_number_valid, *p_frame_number);
3962     }
3963 
3964     camera_metadata_t *resultMetadata = nullptr;
3965 
3966     if ((NULL == p_frame_number_valid) || (NULL == p_frame_number) || (NULL == p_capture_time) ||
3967             (NULL == p_urgent_frame_number_valid) || (NULL == p_urgent_frame_number)) {
3968         LOGE("Invalid metadata");
3969         if (free_and_bufdone_meta_buf) {
3970             mMetadataChannel->bufDone(metadata_buf);
3971             free(metadata_buf);
3972         }
3973         goto done_metadata;
3974     }
3975     frame_number_valid =        *p_frame_number_valid;
3976     frame_number =              *p_frame_number;
3977     capture_time =              *p_capture_time;
3978     capture_time_av =           *p_capture_time_av;
3979     urgent_frame_number_valid = *p_urgent_frame_number_valid;
3980     urgent_frame_number =       *p_urgent_frame_number;
3981     currentSysTime =            systemTime(CLOCK_MONOTONIC);
3982 
3983     if (!gCamCapability[mCameraId]->timestamp_calibrated) {
3984         const int tries = 3;
3985         nsecs_t bestGap, measured;
3986         for (int i = 0; i < tries; ++i) {
3987             const nsecs_t tmono = systemTime(SYSTEM_TIME_MONOTONIC);
3988             const nsecs_t tbase = systemTime(SYSTEM_TIME_BOOTTIME);
3989             const nsecs_t tmono2 = systemTime(SYSTEM_TIME_MONOTONIC);
3990             const nsecs_t gap = tmono2 - tmono;
3991             if (i == 0 || gap < bestGap) {
3992                 bestGap = gap;
3993                 measured = tbase - ((tmono + tmono2) >> 1);
3994             }
3995         }
3996         capture_time -= measured;
3997     }
3998 
3999     // Detect if buffers from any requests are overdue
4000     for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
4001         int64_t timeout;
4002         {
4003             Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
4004             // If there is a pending HDR+ request, the following requests may be blocked until the
4005             // HDR+ request is done. So allow a longer timeout.
4006             timeout = (mHdrPlusPendingRequests.size() > 0) ?
4007                     MISSING_HDRPLUS_REQUEST_BUF_TIMEOUT : MISSING_REQUEST_BUF_TIMEOUT;
4008             timeout = s2ns(timeout);
4009             if (timeout < mExpectedInflightDuration) {
4010                 timeout = mExpectedInflightDuration;
4011             }
4012         }
4013 
4014         if ((currentSysTime - req.timestamp) > timeout) {
4015             for (auto &missed : req.mPendingBufferList) {
4016                 assert(missed.stream->priv);
4017                 if (missed.stream->priv) {
4018                     QCamera3Channel *ch = (QCamera3Channel *)(missed.stream->priv);
4019                     assert(ch->mStreams[0]);
4020                     if (ch->mStreams[0]) {
4021                         LOGE("Cancel missing frame = %d, buffer = %p,"
4022                             "stream type = %d, stream format = %d",
4023                             req.frame_number, missed.buffer,
4024                             ch->mStreams[0]->getMyType(), missed.stream->format);
4025                         ch->timeoutFrame(req.frame_number);
4026                     }
4027                 }
4028             }
4029         }
4030     }
4031     //For the very first metadata callback, regardless whether it contains valid
4032     //frame number, send the partial metadata for the jumpstarting requests.
4033     //Note that this has to be done even if the metadata doesn't contain valid
4034     //urgent frame number, because in the case only 1 request is ever submitted
4035     //to HAL, there won't be subsequent valid urgent frame number.
4036     if (mFirstMetadataCallback) {
4037         for (pendingRequestIterator i =
4038                 mPendingRequestsList.begin(); i != mPendingRequestsList.end(); i++) {
4039             if (i->bUseFirstPartial) {
4040                 sendPartialMetadataWithLock(metadata, i, lastUrgentMetadataInBatch,
4041                         true /*isJumpstartMetadata*/);
4042             }
4043         }
4044         mFirstMetadataCallback = false;
4045     }
4046 
4047     //Partial result on process_capture_result for timestamp
4048     if (urgent_frame_number_valid) {
4049         LOGD("valid urgent frame_number = %u", urgent_frame_number);
4050 
4051         //Recieved an urgent Frame Number, handle it
4052         //using partial results
4053         for (pendingRequestIterator i =
4054                 mPendingRequestsList.begin(); i != mPendingRequestsList.end(); i++) {
4055             LOGD("Iterator Frame = %d urgent frame = %d",
4056                  i->frame_number, urgent_frame_number);
4057 
4058             if ((!i->input_buffer) && (!i->hdrplus) && (i->frame_number < urgent_frame_number) &&
4059                     (i->partial_result_cnt == 0)) {
4060                 LOGE("Error: HAL missed urgent metadata for frame number %d",
4061                          i->frame_number);
4062                 i->partialResultDropped = true;
4063                 i->partial_result_cnt++;
4064             }
4065 
4066             if (i->frame_number == urgent_frame_number &&
4067                      i->partial_result_cnt == 0) {
4068                 sendPartialMetadataWithLock(metadata, i, lastUrgentMetadataInBatch,
4069                         false /*isJumpstartMetadata*/);
4070                 if (mResetInstantAEC && mInstantAECSettledFrameNumber == 0) {
4071                     // Instant AEC settled for this frame.
4072                     LOGH("instant AEC settled for frame number %d", urgent_frame_number);
4073                     mInstantAECSettledFrameNumber = urgent_frame_number;
4074                 }
4075                 break;
4076             }
4077         }
4078     }
4079 
4080     if (!frame_number_valid) {
4081         LOGD("Not a valid normal frame number, used as SOF only");
4082         if (free_and_bufdone_meta_buf) {
4083             mMetadataChannel->bufDone(metadata_buf);
4084             free(metadata_buf);
4085         }
4086         goto done_metadata;
4087     }
4088     LOGH("valid frame_number = %u, capture_time = %lld",
4089             frame_number, capture_time);
4090 
4091     handleDepthDataLocked(metadata->depth_data, frame_number,
4092             metadata->is_depth_data_valid);
4093 
4094     // Check whether any stream buffer corresponding to this is dropped or not
4095     // If dropped, then send the ERROR_BUFFER for the corresponding stream
4096     // OR check if instant AEC is enabled, then need to drop frames untill AEC is settled.
4097     for (auto & pendingRequest : mPendingRequestsList) {
4098         if (p_cam_frame_drop || (mInstantAEC || pendingRequest.frame_number <
4099                     mInstantAECSettledFrameNumber)) {
4100             camera3_notify_msg_t notify_msg = {};
4101             for (auto & buffer : pendingRequest.buffers) {
4102                 bool dropFrame = false;
4103                 QCamera3ProcessingChannel *channel =
4104                         (QCamera3ProcessingChannel *)buffer.stream->priv;
4105                 uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
4106                 if (p_cam_frame_drop) {
4107                     for (uint32_t k = 0; k < p_cam_frame_drop->num_streams; k++) {
4108                         if (streamID == p_cam_frame_drop->stream_request[k].streamID) {
4109                             // Got the stream ID for drop frame.
4110                             dropFrame = true;
4111                             break;
4112                         }
4113                     }
4114                 } else {
4115                     // This is instant AEC case.
4116                     // For instant AEC drop the stream untill AEC is settled.
4117                     dropFrame = true;
4118                 }
4119 
4120                 if (dropFrame) {
4121                     // Send Error notify to frameworks with CAMERA3_MSG_ERROR_BUFFER
4122                     if (p_cam_frame_drop) {
4123                         // Treat msg as error for system buffer drops
4124                         LOGE("Start of reporting error frame#=%u, streamID=%u",
4125                                  pendingRequest.frame_number, streamID);
4126                     } else {
4127                         // For instant AEC, inform frame drop and frame number
4128                         LOGH("Start of reporting error frame#=%u for instant AEC, streamID=%u, "
4129                                 "AEC settled frame number = %u",
4130                                 pendingRequest.frame_number, streamID,
4131                                 mInstantAECSettledFrameNumber);
4132                     }
4133                     notify_msg.type = CAMERA3_MSG_ERROR;
4134                     notify_msg.message.error.frame_number = pendingRequest.frame_number;
4135                     notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER ;
4136                     notify_msg.message.error.error_stream = buffer.stream;
4137                     orchestrateNotify(&notify_msg);
4138                     if (p_cam_frame_drop) {
4139                         // Treat msg as error for system buffer drops
4140                         LOGE("End of reporting error frame#=%u, streamID=%u",
4141                                 pendingRequest.frame_number, streamID);
4142                     } else {
4143                         // For instant AEC, inform frame drop and frame number
4144                         LOGH("End of reporting error frame#=%u for instant AEC, streamID=%u, "
4145                                 "AEC settled frame number = %u",
4146                                 pendingRequest.frame_number, streamID,
4147                                 mInstantAECSettledFrameNumber);
4148                     }
4149                     PendingFrameDropInfo PendingFrameDrop;
4150                     PendingFrameDrop.frame_number = pendingRequest.frame_number;
4151                     PendingFrameDrop.stream_ID = streamID;
4152                     // Add the Frame drop info to mPendingFrameDropList
4153                     mPendingFrameDropList.push_back(PendingFrameDrop);
4154                 }
4155             }
4156         }
4157     }
4158 
4159     for (auto & pendingRequest : mPendingRequestsList) {
4160         // Find the pending request with the frame number.
4161         if (pendingRequest.frame_number < frame_number) {
4162             // Workaround for case where shutter is missing due to dropped
4163             // metadata
4164             if (!pendingRequest.hdrplus && (pendingRequest.input_buffer == nullptr)) {
4165                 mShutterDispatcher.markShutterReady(pendingRequest.frame_number, capture_time);
4166             }
4167         } else if (pendingRequest.frame_number == frame_number) {
4168             // Update the sensor timestamp.
4169             pendingRequest.timestamp = capture_time;
4170 
4171 
4172             /* Set the timestamp in display metadata so that clients aware of
4173                private_handle such as VT can use this un-modified timestamps.
4174                Camera framework is unaware of this timestamp and cannot change this */
4175             updateTimeStampInPendingBuffers(pendingRequest.frame_number, capture_time_av);
4176 
4177             // Find channel requiring metadata, meaning internal offline postprocess
4178             // is needed.
4179             //TODO: for now, we don't support two streams requiring metadata at the same time.
4180             // (because we are not making copies, and metadata buffer is not reference counted.
4181             bool internalPproc = false;
4182             for (pendingBufferIterator iter = pendingRequest.buffers.begin();
4183                     iter != pendingRequest.buffers.end(); iter++) {
4184                 if (iter->need_metadata) {
4185                     internalPproc = true;
4186                     QCamera3ProcessingChannel *channel =
4187                             (QCamera3ProcessingChannel *)iter->stream->priv;
4188 
4189                     if (iter->need_crop) {
4190                         QCamera3Stream *stream = channel->getStreamByIndex(0);
4191 
4192                         // Map the EIS crop to respective stream crop and append it.
4193                         IF_META_AVAILABLE(cam_crop_data_t, crop_data, CAM_INTF_META_CROP_DATA,
4194                                 metadata) {
4195                             for (int j = 0; j < crop_data->num_of_streams; j++) {
4196                                 if ((stream != nullptr) &&
4197                                         (stream->getMyServerID() ==
4198                                          crop_data->crop_info[j].stream_id)) {
4199 
4200                                     cam_dimension_t streamDim;
4201                                     if (stream->getFrameDimension(streamDim) != NO_ERROR) {
4202                                         LOGE("%s: Failed obtaining stream dimensions!", __func__);
4203                                         continue;
4204                                     }
4205 
4206                                     mStreamCropMapper.update(
4207                                             gCamCapability[mCameraId]->active_array_size.width,
4208                                             gCamCapability[mCameraId]->active_array_size.height,
4209                                             streamDim.width, streamDim.height);
4210 
4211                                     cam_eis_crop_info_t eisCrop = iter->crop_info;
4212                                     mStreamCropMapper.toSensor(eisCrop.delta_x, eisCrop.delta_y,
4213                                             eisCrop.delta_width, eisCrop.delta_height);
4214 
4215                                     int32_t crop[4] = {
4216                                         crop_data->crop_info[j].crop.left   + eisCrop.delta_x,
4217                                         crop_data->crop_info[j].crop.top    + eisCrop.delta_y,
4218                                         crop_data->crop_info[j].crop.width  - eisCrop.delta_width,
4219                                         crop_data->crop_info[j].crop.height - eisCrop.delta_height
4220                                     };
4221 
4222                                     if (isCropValid(crop[0], crop[1], crop[2], crop[3],
4223                                                 streamDim.width, streamDim.height)) {
4224                                         crop_data->crop_info[j].crop.left   = crop[0];
4225                                         crop_data->crop_info[j].crop.top    = crop[1];
4226                                         crop_data->crop_info[j].crop.width  = crop[2];
4227                                         crop_data->crop_info[j].crop.height = crop[3];
4228                                     } else {
4229                                         LOGE("Invalid EIS compensated crop region");
4230                                     }
4231 
4232                                     break;
4233                                 }
4234                             }
4235                         }
4236                     }
4237 
4238                     channel->queueReprocMetadata(metadata_buf);
4239                     if(p_is_metabuf_queued != NULL) {
4240                         *p_is_metabuf_queued = true;
4241                     }
4242                     iter->need_metadata = false;
4243                     break;
4244                 }
4245             }
4246             for (auto itr = pendingRequest.internalRequestList.begin();
4247                   itr != pendingRequest.internalRequestList.end(); itr++) {
4248                 if (itr->need_metadata) {
4249                     internalPproc = true;
4250                     QCamera3ProcessingChannel *channel =
4251                             (QCamera3ProcessingChannel *)itr->stream->priv;
4252                     channel->queueReprocMetadata(metadata_buf);
4253                     break;
4254                 }
4255             }
4256 
4257             saveExifParams(metadata);
4258 
4259             bool *enableZsl = nullptr;
4260             if (gExposeEnableZslKey) {
4261                 enableZsl = &pendingRequest.enableZsl;
4262             }
4263 
4264             resultMetadata = translateFromHalMetadata(metadata,
4265                     pendingRequest, internalPproc,
4266                     lastMetadataInBatch, enableZsl);
4267 
4268             updateFpsInPreviewBuffer(metadata, pendingRequest.frame_number);
4269 
4270             if (pendingRequest.blob_request) {
4271                 //Dump tuning metadata if enabled and available
4272                 char prop[PROPERTY_VALUE_MAX];
4273                 memset(prop, 0, sizeof(prop));
4274                 property_get("persist.camera.dumpmetadata", prop, "0");
4275                 int32_t enabled = atoi(prop);
4276                 if (enabled && metadata->is_tuning_params_valid) {
4277                     dumpMetadataToFile(metadata->tuning_params,
4278                            mMetaFrameCount,
4279                            enabled,
4280                            "Snapshot",
4281                            frame_number);
4282                 }
4283             }
4284 
4285             if (!internalPproc) {
4286                 LOGD("couldn't find need_metadata for this metadata");
4287                 // Return metadata buffer
4288                 if (free_and_bufdone_meta_buf) {
4289                     mMetadataChannel->bufDone(metadata_buf);
4290                     free(metadata_buf);
4291                 }
4292             }
4293 
4294             break;
4295         }
4296     }
4297 
4298     mShutterDispatcher.markShutterReady(frame_number, capture_time);
4299 
4300     // Try to send out capture result metadata.
4301     handlePendingResultMetadataWithLock(frame_number,  resultMetadata);
4302     return;
4303 
4304 done_metadata:
4305     for (pendingRequestIterator i = mPendingRequestsList.begin();
4306             i != mPendingRequestsList.end() ;i++) {
4307         i->pipeline_depth++;
4308     }
4309     LOGD("mPendingLiveRequest = %d", mPendingLiveRequest);
4310     unblockRequestIfNecessary();
4311 }
4312 
4313 /*===========================================================================
4314  * FUNCTION   : handleDepthDataWithLock
4315  *
4316  * DESCRIPTION: Handles incoming depth data
4317  *
4318  * PARAMETERS : @depthData  : Depth data
4319  *              @frameNumber: Frame number of the incoming depth data
4320  *              @valid      : Valid flag for the incoming data
4321  *
4322  * RETURN     :
4323  *
4324  *==========================================================================*/
handleDepthDataLocked(const cam_depth_data_t & depthData,uint32_t frameNumber,uint8_t valid)4325 void QCamera3HardwareInterface::handleDepthDataLocked(
4326         const cam_depth_data_t &depthData, uint32_t frameNumber, uint8_t valid) {
4327     uint32_t currentFrameNumber;
4328     buffer_handle_t *depthBuffer;
4329 
4330     if (nullptr == mDepthChannel) {
4331         return;
4332     }
4333 
4334     camera3_stream_buffer_t resultBuffer =
4335         {.acquire_fence = -1,
4336          .release_fence = -1,
4337          .status = CAMERA3_BUFFER_STATUS_OK,
4338          .buffer = nullptr,
4339          .stream = mDepthChannel->getStream()};
4340     do {
4341         depthBuffer = mDepthChannel->getOldestFrame(currentFrameNumber);
4342         if (nullptr == depthBuffer) {
4343             break;
4344         }
4345 
4346         resultBuffer.buffer = depthBuffer;
4347         if (currentFrameNumber == frameNumber) {
4348             if (valid) {
4349                 int32_t rc = mDepthChannel->populateDepthData(depthData,
4350                         frameNumber);
4351                 if (NO_ERROR != rc) {
4352                     resultBuffer.status = CAMERA3_BUFFER_STATUS_ERROR;
4353                 } else {
4354                     resultBuffer.status = CAMERA3_BUFFER_STATUS_OK;
4355                 }
4356             } else {
4357                 resultBuffer.status = CAMERA3_BUFFER_STATUS_ERROR;
4358             }
4359         } else if (currentFrameNumber > frameNumber) {
4360             break;
4361         } else {
4362             camera3_notify_msg_t notify_msg = {.type = CAMERA3_MSG_ERROR,
4363                     {{currentFrameNumber, mDepthChannel->getStream(),
4364                             CAMERA3_MSG_ERROR_BUFFER}}};
4365             orchestrateNotify(&notify_msg);
4366 
4367             LOGE("Depth buffer for frame number: %d is missing "
4368                     "returning back!", currentFrameNumber);
4369             resultBuffer.status = CAMERA3_BUFFER_STATUS_ERROR;
4370         }
4371         mDepthChannel->unmapBuffer(currentFrameNumber);
4372         mOutputBufferDispatcher.markBufferReady(currentFrameNumber, resultBuffer);
4373     } while (currentFrameNumber < frameNumber);
4374 }
4375 
4376 /*===========================================================================
4377  * FUNCTION   : notifyErrorFoPendingDepthData
4378  *
4379  * DESCRIPTION: Returns error for any pending depth buffers
4380  *
4381  * PARAMETERS : depthCh - depth channel that needs to get flushed
4382  *
4383  * RETURN     :
4384  *
4385  *==========================================================================*/
notifyErrorFoPendingDepthData(QCamera3DepthChannel * depthCh)4386 void QCamera3HardwareInterface::notifyErrorFoPendingDepthData(
4387         QCamera3DepthChannel *depthCh) {
4388     uint32_t currentFrameNumber;
4389     buffer_handle_t *depthBuffer;
4390 
4391     if (nullptr == depthCh) {
4392         return;
4393     }
4394 
4395     camera3_notify_msg_t notify_msg =
4396         {.type = CAMERA3_MSG_ERROR,
4397                 {{0, depthCh->getStream(), CAMERA3_MSG_ERROR_BUFFER}}};
4398     camera3_stream_buffer_t resultBuffer =
4399         {.acquire_fence = -1,
4400          .release_fence = -1,
4401          .buffer = nullptr,
4402          .stream = depthCh->getStream(),
4403          .status = CAMERA3_BUFFER_STATUS_ERROR};
4404 
4405     while (nullptr !=
4406             (depthBuffer = depthCh->getOldestFrame(currentFrameNumber))) {
4407         depthCh->unmapBuffer(currentFrameNumber);
4408 
4409         notify_msg.message.error.frame_number = currentFrameNumber;
4410         orchestrateNotify(&notify_msg);
4411 
4412         mOutputBufferDispatcher.markBufferReady(currentFrameNumber, resultBuffer);
4413     };
4414 }
4415 
4416 /*===========================================================================
4417  * FUNCTION   : hdrPlusPerfLock
4418  *
4419  * DESCRIPTION: perf lock for HDR+ using custom intent
4420  *
4421  * PARAMETERS : @metadata_buf: Metadata super_buf pointer
4422  *
4423  * RETURN     : None
4424  *
4425  *==========================================================================*/
hdrPlusPerfLock(mm_camera_super_buf_t * metadata_buf)4426 void QCamera3HardwareInterface::hdrPlusPerfLock(
4427         mm_camera_super_buf_t *metadata_buf)
4428 {
4429     if (NULL == metadata_buf) {
4430         LOGE("metadata_buf is NULL");
4431         return;
4432     }
4433     metadata_buffer_t *metadata =
4434             (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
4435     int32_t *p_frame_number_valid =
4436             POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
4437     uint32_t *p_frame_number =
4438             POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
4439 
4440     if (p_frame_number_valid == NULL || p_frame_number == NULL) {
4441         LOGE("%s: Invalid metadata", __func__);
4442         return;
4443     }
4444 
4445     //acquire perf lock for 2 secs after the last HDR frame is captured
4446     constexpr uint32_t HDR_PLUS_PERF_TIME_OUT = 2000;
4447     if ((p_frame_number_valid != NULL) && *p_frame_number_valid) {
4448         if ((p_frame_number != NULL) &&
4449                 (mLastCustIntentFrmNum == (int32_t)*p_frame_number)) {
4450             mPerfLockMgr.acquirePerfLock(PERF_LOCK_TAKE_SNAPSHOT, HDR_PLUS_PERF_TIME_OUT);
4451         }
4452     }
4453 }
4454 
4455 /*===========================================================================
4456  * FUNCTION   : handleInputBufferWithLock
4457  *
4458  * DESCRIPTION: Handles input buffer and shutter callback with mMutex lock held.
4459  *
4460  * PARAMETERS : @frame_number: frame number of the input buffer
4461  *
4462  * RETURN     :
4463  *
4464  *==========================================================================*/
handleInputBufferWithLock(uint32_t frame_number)4465 void QCamera3HardwareInterface::handleInputBufferWithLock(uint32_t frame_number)
4466 {
4467     ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_IN_BUF_LKD);
4468     pendingRequestIterator i = mPendingRequestsList.begin();
4469     while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
4470         i++;
4471     }
4472     if (i != mPendingRequestsList.end() && i->input_buffer) {
4473         //found the right request
4474         CameraMetadata settings;
4475         nsecs_t capture_time = systemTime(CLOCK_MONOTONIC);
4476         if(i->settings) {
4477             settings = i->settings;
4478             if (settings.exists(ANDROID_SENSOR_TIMESTAMP)) {
4479                 capture_time = settings.find(ANDROID_SENSOR_TIMESTAMP).data.i64[0];
4480             } else {
4481                 LOGE("No timestamp in input settings! Using current one.");
4482             }
4483         } else {
4484             LOGE("Input settings missing!");
4485         }
4486 
4487         mShutterDispatcher.markShutterReady(frame_number, capture_time);
4488         LOGD("Input request metadata notify frame_number = %u, capture_time = %llu",
4489                     i->frame_number, capture_time);
4490 
4491         camera3_capture_result result;
4492         memset(&result, 0, sizeof(camera3_capture_result));
4493         result.frame_number = frame_number;
4494         result.result = i->settings;
4495         result.input_buffer = i->input_buffer;
4496         result.partial_result = PARTIAL_RESULT_COUNT;
4497 
4498         orchestrateResult(&result);
4499         LOGD("Input request metadata and input buffer frame_number = %u",
4500                         i->frame_number);
4501         i = erasePendingRequest(i);
4502 
4503         // Dispatch result metadata that may be just unblocked by this reprocess result.
4504         dispatchResultMetadataWithLock(frame_number, REPROCESS, false/*isHdrPlus*/);
4505     } else {
4506         LOGE("Could not find input request for frame number %d", frame_number);
4507     }
4508 }
4509 
4510 /*===========================================================================
4511  * FUNCTION   : handleBufferWithLock
4512  *
4513  * DESCRIPTION: Handles image buffer callback with mMutex lock held.
4514  *
4515  * PARAMETERS : @buffer: image buffer for the callback
4516  *              @frame_number: frame number of the image buffer
4517  *
4518  * RETURN     :
4519  *
4520  *==========================================================================*/
handleBufferWithLock(camera3_stream_buffer_t * buffer,uint32_t frame_number)4521 void QCamera3HardwareInterface::handleBufferWithLock(
4522     camera3_stream_buffer_t *buffer, uint32_t frame_number)
4523 {
4524     ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_BUF_LKD);
4525 
4526     if (buffer->stream->format == HAL_PIXEL_FORMAT_BLOB) {
4527         mPerfLockMgr.releasePerfLock(PERF_LOCK_TAKE_SNAPSHOT);
4528     }
4529 
4530     /* Nothing to be done during error state */
4531     if ((ERROR == mState) || (DEINIT == mState)) {
4532         return;
4533     }
4534     if (mFlushPerf) {
4535         handleBuffersDuringFlushLock(buffer);
4536         return;
4537     }
4538     //not in flush
4539     // If the frame number doesn't exist in the pending request list,
4540     // directly send the buffer to the frameworks, and update pending buffers map
4541     // Otherwise, book-keep the buffer.
4542     pendingRequestIterator i = mPendingRequestsList.begin();
4543     while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
4544         i++;
4545     }
4546 
4547     if (i != mPendingRequestsList.end()) {
4548         if (i->input_buffer) {
4549             // For a reprocessing request, try to send out result metadata.
4550             handlePendingResultMetadataWithLock(frame_number, nullptr);
4551         }
4552     }
4553 
4554     // Check if this frame was dropped.
4555     for (List<PendingFrameDropInfo>::iterator m = mPendingFrameDropList.begin();
4556             m != mPendingFrameDropList.end(); m++) {
4557         QCamera3Channel *channel = (QCamera3Channel *)buffer->stream->priv;
4558         uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
4559         if((m->stream_ID == streamID) && (m->frame_number==frame_number) ) {
4560             buffer->status=CAMERA3_BUFFER_STATUS_ERROR;
4561             LOGD("Stream STATUS_ERROR frame_number=%d, streamID=%d",
4562                      frame_number, streamID);
4563             m = mPendingFrameDropList.erase(m);
4564             break;
4565         }
4566     }
4567 
4568     // WAR for encoder avtimer timestamp issue
4569     QCamera3Channel *channel = (QCamera3Channel *)buffer->stream->priv;
4570     if ((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask() &&
4571         m_bAVTimerEnabled) {
4572         for (auto req = mPendingBuffersMap.mPendingBuffersInRequest.begin();
4573             req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
4574             if (req->frame_number != frame_number)
4575                 continue;
4576             if(req->av_timestamp == 0) {
4577                 buffer->status |= CAMERA3_BUFFER_STATUS_ERROR;
4578             }
4579             else {
4580                 struct private_handle_t *priv_handle =
4581                     (struct private_handle_t *) (*(buffer->buffer));
4582                 setMetaData(priv_handle, SET_VT_TIMESTAMP, &(req->av_timestamp));
4583             }
4584         }
4585     }
4586 
4587     buffer->status |= mPendingBuffersMap.getBufErrStatus(buffer->buffer);
4588     LOGH("result frame_number = %d, buffer = %p",
4589              frame_number, buffer->buffer);
4590 
4591     mPendingBuffersMap.removeBuf(buffer->buffer);
4592     mOutputBufferDispatcher.markBufferReady(frame_number, *buffer);
4593 
4594     if (mPreviewStarted == false) {
4595         QCamera3Channel *channel = (QCamera3Channel *)buffer->stream->priv;
4596         if ((1U << CAM_STREAM_TYPE_PREVIEW) == channel->getStreamTypeMask()) {
4597             logEaselEvent("EASEL_STARTUP_LATENCY", "Preview Started");
4598 
4599             mPerfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
4600             mPerfLockMgr.releasePerfLock(PERF_LOCK_OPEN_CAMERA);
4601             mPreviewStarted = true;
4602 
4603             // Set power hint for preview
4604             mPerfLockMgr.acquirePerfLock(PERF_LOCK_POWERHINT_ENCODE, 0);
4605         }
4606     }
4607 }
4608 
removeUnrequestedMetadata(pendingRequestIterator requestIter,camera_metadata_t * resultMetadata)4609 void QCamera3HardwareInterface::removeUnrequestedMetadata(pendingRequestIterator requestIter,
4610         camera_metadata_t *resultMetadata) {
4611     CameraMetadata metadata;
4612     metadata.acquire(resultMetadata);
4613 
4614     // Remove len shading map if it's not requested.
4615     if (requestIter->requestedLensShadingMapMode == ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF &&
4616             metadata.exists(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE) &&
4617             metadata.find(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE).data.u8[0] !=
4618             ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF) {
4619         metadata.erase(ANDROID_STATISTICS_LENS_SHADING_MAP);
4620         metadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE,
4621             &requestIter->requestedLensShadingMapMode, 1);
4622     }
4623 
4624     // Remove face information if it's not requested.
4625     if (requestIter->requestedFaceDetectMode == ANDROID_STATISTICS_FACE_DETECT_MODE_OFF &&
4626             metadata.exists(ANDROID_STATISTICS_FACE_DETECT_MODE) &&
4627             metadata.find(ANDROID_STATISTICS_FACE_DETECT_MODE).data.u8[0] !=
4628             ANDROID_STATISTICS_FACE_DETECT_MODE_OFF) {
4629         metadata.erase(ANDROID_STATISTICS_FACE_RECTANGLES);
4630         metadata.update(ANDROID_STATISTICS_FACE_DETECT_MODE,
4631                 &requestIter->requestedFaceDetectMode, 1);
4632     }
4633 
4634     requestIter->resultMetadata = metadata.release();
4635 }
4636 
handlePendingResultMetadataWithLock(uint32_t frameNumber,camera_metadata_t * resultMetadata)4637 void QCamera3HardwareInterface::handlePendingResultMetadataWithLock(uint32_t frameNumber,
4638         camera_metadata_t *resultMetadata)
4639 {
4640     // Find the pending request for this result metadata.
4641     auto requestIter = mPendingRequestsList.begin();
4642     while (requestIter != mPendingRequestsList.end() && requestIter->frame_number != frameNumber) {
4643         requestIter++;
4644     }
4645 
4646     if (requestIter == mPendingRequestsList.end()) {
4647         ALOGE("%s: Cannot find a pending request for frame number %u.", __FUNCTION__, frameNumber);
4648         return;
4649     }
4650 
4651     // Update the result metadata
4652     requestIter->resultMetadata = resultMetadata;
4653 
4654     // Check what type of request this is.
4655     RequestType requestType = (requestIter->input_buffer != nullptr) ?  REPROCESS :
4656             (isStillZsl(*requestIter) ? ZSL : NORMAL);
4657     if (requestIter->hdrplus) {
4658         // HDR+ request doesn't have partial results.
4659         requestIter->partial_result_cnt = PARTIAL_RESULT_COUNT;
4660     } else if (requestType == REPROCESS) {
4661         // Reprocessing request result is the same as settings.
4662         requestIter->resultMetadata = requestIter->settings;
4663         // Reprocessing request doesn't have partial results.
4664         requestIter->partial_result_cnt = PARTIAL_RESULT_COUNT;
4665     } else {
4666         if ((requestIter->partial_result_cnt == 0) && !requestIter->partialResultDropped) {
4667             LOGE("Urgent metadata for frame number: %d didn't arrive!", frameNumber);
4668             requestIter->partialResultDropped = true;
4669         }
4670         requestIter->partial_result_cnt = PARTIAL_RESULT_COUNT;
4671         mPendingLiveRequest--;
4672 
4673         {
4674             std::unique_lock<std::mutex> l(gHdrPlusClientLock);
4675             // For a live request, send the metadata to HDR+ client.
4676             if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled) {
4677                 gHdrPlusClient->notifyFrameMetadata(frameNumber, *resultMetadata,
4678                     requestIter->partial_result_cnt == PARTIAL_RESULT_COUNT);
4679             }
4680         }
4681     }
4682 
4683     if (requestType != REPROCESS) {
4684         removeUnrequestedMetadata(requestIter, resultMetadata);
4685     }
4686 
4687     dispatchResultMetadataWithLock(frameNumber, requestType, requestIter->hdrplus);
4688 }
4689 
dispatchResultMetadataWithLock(uint32_t frameNumber,RequestType requestType,bool isHdrPlus)4690 void QCamera3HardwareInterface::dispatchResultMetadataWithLock(uint32_t frameNumber,
4691         RequestType requestType, bool isHdrPlus) {
4692     // The pending requests are ordered by increasing frame numbers. The result metadata are ready
4693     // to be sent if all previous pending requests are ready to be sent.
4694     bool readyToSend = true;
4695 
4696     // Iterate through the pending requests to send out result metadata that are ready. Also if
4697     // this result metadata belongs to a live request, notify errors for previous live requests
4698     // that don't have result metadata yet.
4699     // Note: a live request is either a NORMAL request, or a ZSL non-hdrplus request.
4700     bool isLiveRequest = requestType != REPROCESS && !isHdrPlus;
4701     auto iter = mPendingRequestsList.begin();
4702     while (iter != mPendingRequestsList.end()) {
4703         bool thisIsStillZsl = isStillZsl(*iter);
4704         RequestType thisRequestType = (iter->input_buffer != nullptr) ? REPROCESS :
4705                 (thisIsStillZsl ? ZSL : NORMAL);
4706         if (thisRequestType != requestType) {
4707             iter++;
4708             continue;
4709         }
4710         // Check if current pending request is ready. If it's not ready, the following pending
4711         // requests are also not ready.
4712         readyToSend &= iter->resultMetadata != nullptr;
4713 
4714         bool thisLiveRequest = !iter->hdrplus && iter->input_buffer == nullptr;
4715         bool errorResult = false;
4716 
4717         camera3_capture_result_t result = {};
4718         result.frame_number = iter->frame_number;
4719         result.result = iter->resultMetadata;
4720         result.partial_result = iter->partial_result_cnt;
4721 
4722         // If this pending buffer has result metadata, we may be able to send it out.
4723         if (iter->resultMetadata != nullptr) {
4724             if (!readyToSend) {
4725                 // If any of the previous pending request is not ready, this pending request is
4726                 // also not ready to send in order to keep shutter callbacks and result metadata
4727                 // in order.
4728                 iter++;
4729                 continue;
4730             }
4731             // Notify ERROR_RESULT if partial result was dropped.
4732             errorResult = iter->partialResultDropped;
4733         } else if (iter->frame_number < frameNumber && isLiveRequest && thisLiveRequest) {
4734             // If the result metadata belongs to a live request, notify errors for previous pending
4735             // live requests.
4736             mPendingLiveRequest--;
4737 
4738             LOGE("Error: HAL missed metadata for frame number %d", iter->frame_number);
4739             errorResult = true;
4740         } else {
4741             iter++;
4742             continue;
4743         }
4744 
4745         if (errorResult) {
4746             // Check for any buffers that might be stuck in the post-process input queue
4747             // awaiting metadata and queue an empty meta buffer. The invalid data should
4748             // fail the offline post-process pass and return any buffers that otherwise
4749             // will become lost.
4750             for (auto it = iter->buffers.begin(); it != iter->buffers.end(); it++) {
4751                 if (it->need_metadata) {
4752                     QCamera3ProcessingChannel *channel =
4753                         reinterpret_cast<QCamera3ProcessingChannel *> (it->stream->priv);
4754                     if (channel != nullptr) {
4755                         LOGE("Dropped result: %d Unblocking any pending pp buffers!",
4756                                 iter->frame_number);
4757                         channel->queueReprocMetadata(nullptr);
4758                     }
4759                     it->need_metadata = false;
4760                     break;
4761                 }
4762             }
4763 
4764             notifyError(iter->frame_number, CAMERA3_MSG_ERROR_RESULT);
4765         } else {
4766             result.output_buffers = nullptr;
4767             result.num_output_buffers = 0;
4768             orchestrateResult(&result);
4769         }
4770         // For reprocessing, result metadata is the same as settings so do not free it here to
4771         // avoid double free.
4772         if (result.result != iter->settings) {
4773             free_camera_metadata((camera_metadata_t *)result.result);
4774         }
4775         iter->resultMetadata = nullptr;
4776         iter = erasePendingRequest(iter);
4777     }
4778 
4779     if (isLiveRequest) {
4780         for (auto &iter : mPendingRequestsList) {
4781             // Increment pipeline depth for the following pending requests.
4782             if (iter.frame_number > frameNumber) {
4783                 iter.pipeline_depth++;
4784             }
4785         }
4786     }
4787 
4788     unblockRequestIfNecessary();
4789 }
4790 
4791 /*===========================================================================
4792  * FUNCTION   : unblockRequestIfNecessary
4793  *
4794  * DESCRIPTION: Unblock capture_request if max_buffer hasn't been reached. Note
4795  *              that mMutex is held when this function is called.
4796  *
4797  * PARAMETERS :
4798  *
4799  * RETURN     :
4800  *
4801  *==========================================================================*/
unblockRequestIfNecessary()4802 void QCamera3HardwareInterface::unblockRequestIfNecessary()
4803 {
4804    // Unblock process_capture_request
4805    pthread_cond_signal(&mRequestCond);
4806 }
4807 
4808 /*===========================================================================
4809  * FUNCTION   : isHdrSnapshotRequest
4810  *
4811  * DESCRIPTION: Function to determine if the request is for a HDR snapshot
4812  *
4813  * PARAMETERS : camera3 request structure
4814  *
4815  * RETURN     : boolean decision variable
4816  *
4817  *==========================================================================*/
isHdrSnapshotRequest(camera3_capture_request * request)4818 bool QCamera3HardwareInterface::isHdrSnapshotRequest(camera3_capture_request *request)
4819 {
4820     if (request == NULL) {
4821         LOGE("Invalid request handle");
4822         assert(0);
4823         return false;
4824     }
4825 
4826     if (!mForceHdrSnapshot) {
4827         CameraMetadata frame_settings;
4828         frame_settings = request->settings;
4829 
4830         if (frame_settings.exists(ANDROID_CONTROL_MODE)) {
4831             uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0];
4832             if (metaMode != ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
4833                 return false;
4834             }
4835         } else {
4836             return false;
4837         }
4838 
4839         if (frame_settings.exists(ANDROID_CONTROL_SCENE_MODE)) {
4840             uint8_t fwk_sceneMode = frame_settings.find(ANDROID_CONTROL_SCENE_MODE).data.u8[0];
4841             if (fwk_sceneMode != ANDROID_CONTROL_SCENE_MODE_HDR) {
4842                 return false;
4843             }
4844         } else {
4845             return false;
4846         }
4847     }
4848 
4849     for (uint32_t i = 0; i < request->num_output_buffers; i++) {
4850         if (request->output_buffers[i].stream->format
4851                 == HAL_PIXEL_FORMAT_BLOB) {
4852             return true;
4853         }
4854     }
4855 
4856     return false;
4857 }
4858 /*===========================================================================
4859  * FUNCTION   : orchestrateRequest
4860  *
4861  * DESCRIPTION: Orchestrates a capture request from camera service
4862  *
4863  * PARAMETERS :
4864  *   @request : request from framework to process
4865  *
4866  * RETURN     : Error status codes
4867  *
4868  *==========================================================================*/
orchestrateRequest(camera3_capture_request_t * request)4869 int32_t QCamera3HardwareInterface::orchestrateRequest(
4870         camera3_capture_request_t *request)
4871 {
4872 
4873     uint32_t originalFrameNumber = request->frame_number;
4874     uint32_t originalOutputCount = request->num_output_buffers;
4875     const camera_metadata_t *original_settings = request->settings;
4876     List<InternalRequest> internallyRequestedStreams;
4877     List<InternalRequest> emptyInternalList;
4878 
4879     if (isHdrSnapshotRequest(request) && request->input_buffer == NULL) {
4880         LOGD("Framework requested:%d buffers in HDR snapshot", request->num_output_buffers);
4881         uint32_t internalFrameNumber;
4882         CameraMetadata modified_meta;
4883 
4884 
4885         /* Add Blob channel to list of internally requested streams */
4886         for (uint32_t i = 0; i < request->num_output_buffers; i++) {
4887             if (request->output_buffers[i].stream->format
4888                     == HAL_PIXEL_FORMAT_BLOB) {
4889                 InternalRequest streamRequested;
4890                 streamRequested.meteringOnly = 1;
4891                 streamRequested.need_metadata = 0;
4892                 streamRequested.stream = request->output_buffers[i].stream;
4893                 internallyRequestedStreams.push_back(streamRequested);
4894             }
4895         }
4896         request->num_output_buffers = 0;
4897         auto itr =  internallyRequestedStreams.begin();
4898 
4899         /* Modify setting to set compensation */
4900         modified_meta = request->settings;
4901         int32_t expCompensation = GB_HDR_HALF_STEP_EV;
4902         uint8_t aeLock = 1;
4903         modified_meta.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &expCompensation, 1);
4904         modified_meta.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
4905         camera_metadata_t *modified_settings = modified_meta.release();
4906         request->settings = modified_settings;
4907 
4908         /* Capture Settling & -2x frame */
4909         _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4910         request->frame_number = internalFrameNumber;
4911         processCaptureRequest(request, internallyRequestedStreams);
4912 
4913         request->num_output_buffers = originalOutputCount;
4914         _orchestrationDb.allocStoreInternalFrameNumber(originalFrameNumber, internalFrameNumber);
4915         request->frame_number = internalFrameNumber;
4916         processCaptureRequest(request, emptyInternalList);
4917         request->num_output_buffers = 0;
4918 
4919         modified_meta = modified_settings;
4920         expCompensation = 0;
4921         aeLock = 1;
4922         modified_meta.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &expCompensation, 1);
4923         modified_meta.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
4924         modified_settings = modified_meta.release();
4925         request->settings = modified_settings;
4926 
4927         /* Capture Settling & 0X frame */
4928 
4929         itr =  internallyRequestedStreams.begin();
4930         if (itr == internallyRequestedStreams.end()) {
4931             LOGE("Error Internally Requested Stream list is empty");
4932             assert(0);
4933         } else {
4934             itr->need_metadata = 0;
4935             itr->meteringOnly = 1;
4936         }
4937 
4938         _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4939         request->frame_number = internalFrameNumber;
4940         processCaptureRequest(request, internallyRequestedStreams);
4941 
4942         itr =  internallyRequestedStreams.begin();
4943         if (itr == internallyRequestedStreams.end()) {
4944             ALOGE("Error Internally Requested Stream list is empty");
4945             assert(0);
4946         } else {
4947             itr->need_metadata = 1;
4948             itr->meteringOnly = 0;
4949         }
4950 
4951         _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4952         request->frame_number = internalFrameNumber;
4953         processCaptureRequest(request, internallyRequestedStreams);
4954 
4955         /* Capture 2X frame*/
4956         modified_meta = modified_settings;
4957         expCompensation = GB_HDR_2X_STEP_EV;
4958         aeLock = 1;
4959         modified_meta.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &expCompensation, 1);
4960         modified_meta.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
4961         modified_settings = modified_meta.release();
4962         request->settings = modified_settings;
4963 
4964         itr =  internallyRequestedStreams.begin();
4965         if (itr == internallyRequestedStreams.end()) {
4966             ALOGE("Error Internally Requested Stream list is empty");
4967             assert(0);
4968         } else {
4969             itr->need_metadata = 0;
4970             itr->meteringOnly = 1;
4971         }
4972         _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4973         request->frame_number = internalFrameNumber;
4974         processCaptureRequest(request, internallyRequestedStreams);
4975 
4976         itr =  internallyRequestedStreams.begin();
4977         if (itr == internallyRequestedStreams.end()) {
4978             ALOGE("Error Internally Requested Stream list is empty");
4979             assert(0);
4980         } else {
4981             itr->need_metadata = 1;
4982             itr->meteringOnly = 0;
4983         }
4984 
4985         _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4986         request->frame_number = internalFrameNumber;
4987         processCaptureRequest(request, internallyRequestedStreams);
4988 
4989 
4990         /* Capture 2X on original streaming config*/
4991         internallyRequestedStreams.clear();
4992 
4993         /* Restore original settings pointer */
4994         request->settings = original_settings;
4995     } else {
4996         uint32_t internalFrameNumber;
4997         _orchestrationDb.allocStoreInternalFrameNumber(request->frame_number, internalFrameNumber);
4998         request->frame_number = internalFrameNumber;
4999         return processCaptureRequest(request, internallyRequestedStreams);
5000     }
5001 
5002     return NO_ERROR;
5003 }
5004 
5005 /*===========================================================================
5006  * FUNCTION   : orchestrateResult
5007  *
5008  * DESCRIPTION: Orchestrates a capture result to camera service
5009  *
5010  * PARAMETERS :
5011  *   @request : request from framework to process
5012  *
5013  * RETURN     :
5014  *
5015  *==========================================================================*/
orchestrateResult(camera3_capture_result_t * result)5016 void QCamera3HardwareInterface::orchestrateResult(
5017                     camera3_capture_result_t *result)
5018 {
5019     uint32_t frameworkFrameNumber;
5020     int32_t rc = _orchestrationDb.getFrameworkFrameNumber(result->frame_number,
5021             frameworkFrameNumber);
5022     if (rc != NO_ERROR) {
5023         LOGE("Cannot find translated frameworkFrameNumber");
5024         assert(0);
5025     } else {
5026         if (frameworkFrameNumber == EMPTY_FRAMEWORK_FRAME_NUMBER) {
5027             LOGD("Internal Request drop the result");
5028         } else {
5029             if (result->result != NULL) {
5030                 camera_metadata_t *metadata = const_cast<camera_metadata_t*>(result->result);
5031                 camera_metadata_entry_t entry;
5032                 int ret = find_camera_metadata_entry(metadata, ANDROID_SYNC_FRAME_NUMBER, &entry);
5033                 if (ret == OK) {
5034                     int64_t sync_frame_number = frameworkFrameNumber;
5035                     ret = update_camera_metadata_entry(metadata, entry.index, &sync_frame_number, 1, &entry);
5036                     if (ret != OK)
5037                         LOGE("Update ANDROID_SYNC_FRAME_NUMBER Error!");
5038                 }
5039             }
5040             result->frame_number = frameworkFrameNumber;
5041             LOGH("process_capture_result frame_number %d, result %p, partial %d", result->frame_number, result->result, result->partial_result);
5042             mCallbackOps->process_capture_result(mCallbackOps, result);
5043         }
5044     }
5045 }
5046 
5047 /*===========================================================================
5048  * FUNCTION   : orchestrateNotify
5049  *
5050  * DESCRIPTION: Orchestrates a notify to camera service
5051  *
5052  * PARAMETERS :
5053  *   @request : request from framework to process
5054  *
5055  * RETURN     :
5056  *
5057  *==========================================================================*/
orchestrateNotify(camera3_notify_msg_t * notify_msg)5058 void QCamera3HardwareInterface::orchestrateNotify(camera3_notify_msg_t *notify_msg)
5059 {
5060     uint32_t frameworkFrameNumber;
5061     uint32_t internalFrameNumber = notify_msg->message.shutter.frame_number;
5062     int32_t rc = NO_ERROR;
5063 
5064     rc = _orchestrationDb.getFrameworkFrameNumber(internalFrameNumber,
5065                                                           frameworkFrameNumber);
5066 
5067     if (rc != NO_ERROR) {
5068         if (notify_msg->message.error.error_code == CAMERA3_MSG_ERROR_DEVICE) {
5069             LOGD("Sending CAMERA3_MSG_ERROR_DEVICE to framework");
5070             frameworkFrameNumber = 0;
5071         } else {
5072             LOGE("Cannot find translated frameworkFrameNumber");
5073             assert(0);
5074             return;
5075         }
5076     }
5077 
5078     if (frameworkFrameNumber == EMPTY_FRAMEWORK_FRAME_NUMBER) {
5079         LOGD("Internal Request drop the notifyCb");
5080     } else {
5081         notify_msg->message.shutter.frame_number = frameworkFrameNumber;
5082         mCallbackOps->notify(mCallbackOps, notify_msg);
5083     }
5084 }
5085 
5086 /*===========================================================================
5087  * FUNCTION   : FrameNumberRegistry
5088  *
5089  * DESCRIPTION: Constructor
5090  *
5091  * PARAMETERS :
5092  *
5093  * RETURN     :
5094  *
5095  *==========================================================================*/
FrameNumberRegistry()5096 FrameNumberRegistry::FrameNumberRegistry()
5097 {
5098     _nextFreeInternalNumber = INTERNAL_FRAME_STARTING_NUMBER;
5099 }
5100 
5101 /*===========================================================================
5102  * FUNCTION   : ~FrameNumberRegistry
5103  *
5104  * DESCRIPTION: Destructor
5105  *
5106  * PARAMETERS :
5107  *
5108  * RETURN     :
5109  *
5110  *==========================================================================*/
~FrameNumberRegistry()5111 FrameNumberRegistry::~FrameNumberRegistry()
5112 {
5113 }
5114 
5115 /*===========================================================================
5116  * FUNCTION   : PurgeOldEntriesLocked
5117  *
5118  * DESCRIPTION: Maintainance function to trigger LRU cleanup mechanism
5119  *
5120  * PARAMETERS :
5121  *
5122  * RETURN     : NONE
5123  *
5124  *==========================================================================*/
purgeOldEntriesLocked()5125 void FrameNumberRegistry::purgeOldEntriesLocked()
5126 {
5127     while (_register.begin() != _register.end()) {
5128         auto itr = _register.begin();
5129         if (itr->first < (_nextFreeInternalNumber - FRAME_REGISTER_LRU_SIZE)) {
5130             _register.erase(itr);
5131         } else {
5132             return;
5133         }
5134     }
5135 }
5136 
5137 /*===========================================================================
5138  * FUNCTION   : allocStoreInternalFrameNumber
5139  *
5140  * DESCRIPTION: Method to note down a framework request and associate a new
5141  *              internal request number against it
5142  *
5143  * PARAMETERS :
5144  *   @fFrameNumber: Identifier given by framework
5145  *   @internalFN  : Output parameter which will have the newly generated internal
5146  *                  entry
5147  *
5148  * RETURN     : Error code
5149  *
5150  *==========================================================================*/
allocStoreInternalFrameNumber(uint32_t frameworkFrameNumber,uint32_t & internalFrameNumber)5151 int32_t FrameNumberRegistry::allocStoreInternalFrameNumber(uint32_t frameworkFrameNumber,
5152                                                             uint32_t &internalFrameNumber)
5153 {
5154     Mutex::Autolock lock(mRegistryLock);
5155     internalFrameNumber = _nextFreeInternalNumber++;
5156     LOGD("Storing ff#:%d, with internal:%d", frameworkFrameNumber, internalFrameNumber);
5157     _register.insert(std::pair<uint32_t,uint32_t>(internalFrameNumber, frameworkFrameNumber));
5158     purgeOldEntriesLocked();
5159     return NO_ERROR;
5160 }
5161 
5162 /*===========================================================================
5163  * FUNCTION   : generateStoreInternalFrameNumber
5164  *
5165  * DESCRIPTION: Method to associate a new internal request number independent
5166  *              of any associate with framework requests
5167  *
5168  * PARAMETERS :
5169  *   @internalFrame#: Output parameter which will have the newly generated internal
5170  *
5171  *
5172  * RETURN     : Error code
5173  *
5174  *==========================================================================*/
generateStoreInternalFrameNumber(uint32_t & internalFrameNumber)5175 int32_t FrameNumberRegistry::generateStoreInternalFrameNumber(uint32_t &internalFrameNumber)
5176 {
5177     Mutex::Autolock lock(mRegistryLock);
5178     internalFrameNumber = _nextFreeInternalNumber++;
5179     LOGD("Generated internal framenumber:%d", internalFrameNumber);
5180     _register.insert(std::pair<uint32_t,uint32_t>(internalFrameNumber, EMPTY_FRAMEWORK_FRAME_NUMBER));
5181     purgeOldEntriesLocked();
5182     return NO_ERROR;
5183 }
5184 
5185 /*===========================================================================
5186  * FUNCTION   : getFrameworkFrameNumber
5187  *
5188  * DESCRIPTION: Method to query the framework framenumber given an internal #
5189  *
5190  * PARAMETERS :
5191  *   @internalFrame#: Internal reference
5192  *   @frameworkframenumber: Output parameter holding framework frame entry
5193  *
5194  * RETURN     : Error code
5195  *
5196  *==========================================================================*/
getFrameworkFrameNumber(uint32_t internalFrameNumber,uint32_t & frameworkFrameNumber)5197 int32_t FrameNumberRegistry::getFrameworkFrameNumber(uint32_t internalFrameNumber,
5198                                                      uint32_t &frameworkFrameNumber)
5199 {
5200     Mutex::Autolock lock(mRegistryLock);
5201     auto itr = _register.find(internalFrameNumber);
5202     if (itr == _register.end()) {
5203         LOGE("Cannot find internal#: %d", internalFrameNumber);
5204         return -ENOENT;
5205     }
5206 
5207     frameworkFrameNumber = itr->second;
5208     purgeOldEntriesLocked();
5209     return NO_ERROR;
5210 }
5211 
fillPbStreamConfig(pbcamera::StreamConfiguration * config,uint32_t pbStreamId,QCamera3Channel * channel,uint32_t streamIndex)5212 status_t QCamera3HardwareInterface::fillPbStreamConfig(
5213         pbcamera::StreamConfiguration *config, uint32_t pbStreamId, QCamera3Channel *channel,
5214         uint32_t streamIndex) {
5215     if (config == nullptr) {
5216         LOGE("%s: config is null", __FUNCTION__);
5217         return BAD_VALUE;
5218     }
5219 
5220     if (channel == nullptr) {
5221         LOGE("%s: channel is null", __FUNCTION__);
5222         return BAD_VALUE;
5223     }
5224 
5225     QCamera3Stream *stream = channel->getStreamByIndex(streamIndex);
5226     if (stream == nullptr) {
5227         LOGE("%s: Failed to get stream %d in channel.", __FUNCTION__, streamIndex);
5228         return NAME_NOT_FOUND;
5229     }
5230 
5231     const cam_stream_info_t* streamInfo = stream->getStreamInfo();
5232     if (streamInfo == nullptr) {
5233         LOGE("%s: Failed to get stream info for stream %d in channel.", __FUNCTION__, streamIndex);
5234         return NAME_NOT_FOUND;
5235     }
5236 
5237     config->id = pbStreamId;
5238     config->image.width = streamInfo->dim.width;
5239     config->image.height = streamInfo->dim.height;
5240     config->image.padding = 0;
5241 
5242     int bytesPerPixel = 0;
5243 
5244     switch (streamInfo->fmt) {
5245         case CAM_FORMAT_YUV_420_NV21:
5246             config->image.format = HAL_PIXEL_FORMAT_YCrCb_420_SP;
5247             bytesPerPixel = 1;
5248             break;
5249         case CAM_FORMAT_YUV_420_NV12:
5250         case CAM_FORMAT_YUV_420_NV12_VENUS:
5251             config->image.format = HAL_PIXEL_FORMAT_YCbCr_420_SP;
5252             bytesPerPixel = 1;
5253             break;
5254         default:
5255             ALOGE("%s: Stream format %d not supported.", __FUNCTION__, streamInfo->fmt);
5256             return BAD_VALUE;
5257     }
5258 
5259     uint32_t totalPlaneSize = 0;
5260 
5261     // Fill plane information.
5262     for (uint32_t i = 0; i < streamInfo->buf_planes.plane_info.num_planes; i++) {
5263         pbcamera::PlaneConfiguration plane;
5264         plane.stride = streamInfo->buf_planes.plane_info.mp[i].stride * bytesPerPixel;
5265         plane.scanline = streamInfo->buf_planes.plane_info.mp[i].scanline;
5266         config->image.planes.push_back(plane);
5267 
5268         totalPlaneSize += (plane.stride * plane.scanline);
5269     }
5270 
5271     config->image.padding = streamInfo->buf_planes.plane_info.frame_len - totalPlaneSize;
5272     return OK;
5273 }
5274 
5275 /*===========================================================================
5276  * FUNCTION   : processCaptureRequest
5277  *
5278  * DESCRIPTION: process a capture request from camera service
5279  *
5280  * PARAMETERS :
5281  *   @request : request from framework to process
5282  *
5283  * RETURN     :
5284  *
5285  *==========================================================================*/
processCaptureRequest(camera3_capture_request_t * request,List<InternalRequest> & internallyRequestedStreams)5286 int QCamera3HardwareInterface::processCaptureRequest(
5287                     camera3_capture_request_t *request,
5288                     List<InternalRequest> &internallyRequestedStreams)
5289 {
5290     ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_PROC_CAP_REQ);
5291     int rc = NO_ERROR;
5292     int32_t request_id;
5293     CameraMetadata meta;
5294     bool isVidBufRequested = false;
5295     camera3_stream_buffer_t *pInputBuffer = NULL;
5296 
5297     // If Easel is thermal throttled and there is no pending HDR+ request,
5298     // close HDR+ client.
5299     {
5300         std::unique_lock<std::mutex> l(gHdrPlusClientLock);
5301         if (gHdrPlusClient != nullptr && mEaselThermalThrottled) {
5302             Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
5303             if (mHdrPlusPendingRequests.empty()) {
5304                 closeHdrPlusClientLocked();
5305             }
5306         }
5307     }
5308 
5309     pthread_mutex_lock(&mMutex);
5310 
5311     // Validate current state
5312     switch (mState) {
5313         case CONFIGURED:
5314         case STARTED:
5315             /* valid state */
5316             break;
5317 
5318         case ERROR:
5319             pthread_mutex_unlock(&mMutex);
5320             handleCameraDeviceError();
5321             return -ENODEV;
5322 
5323         default:
5324             LOGE("Invalid state %d", mState);
5325             pthread_mutex_unlock(&mMutex);
5326             return -ENODEV;
5327     }
5328 
5329     rc = validateCaptureRequest(request, internallyRequestedStreams);
5330     if (rc != NO_ERROR) {
5331         LOGE("incoming request is not valid");
5332         pthread_mutex_unlock(&mMutex);
5333         return rc;
5334     }
5335 
5336     meta = request->settings;
5337 
5338     if (mState == CONFIGURED) {
5339         logEaselEvent("EASEL_STARTUP_LATENCY", "First request");
5340 
5341         // For HFR first capture request, send capture intent, and
5342         // stream on all streams
5343         if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT) && mBatchSize) {
5344             int32_t hal_version = CAM_HAL_V3;
5345             uint8_t captureIntent = meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
5346             clear_metadata_buffer(mParameters);
5347             ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_HAL_VERSION, hal_version);
5348             ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_CAPTURE_INTENT, captureIntent);
5349             rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
5350             if (rc < 0) {
5351                 LOGE("set_parms for for capture intent failed");
5352                 pthread_mutex_unlock(&mMutex);
5353                 return rc;
5354             }
5355         }
5356 
5357         uint8_t nrMode = 0;
5358         if (meta.exists(ANDROID_NOISE_REDUCTION_MODE)) {
5359             nrMode = meta.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0];
5360         }
5361 
5362         cam_is_type_t is_type = IS_TYPE_NONE;
5363         bool setEis = isEISEnabled(meta);
5364         cam_sensor_mode_info_t sensorModeInfo = {};
5365         rc = getSensorModeInfo(sensorModeInfo);
5366         if (rc != NO_ERROR) {
5367             LOGE("Failed to get sensor output size");
5368             pthread_mutex_unlock(&mMutex);
5369             goto error_exit;
5370         }
5371 
5372         mCropRegionMapper.update(gCamCapability[mCameraId]->active_array_size.width,
5373                 gCamCapability[mCameraId]->active_array_size.height,
5374                 sensorModeInfo.active_array_size.width,
5375                 sensorModeInfo.active_array_size.height);
5376 
5377         /* Set batchmode before initializing channel. Since registerBuffer
5378          * internally initializes some of the channels, better set batchmode
5379          * even before first register buffer */
5380         for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
5381             it != mStreamInfo.end(); it++) {
5382             QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
5383             if (((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask())
5384                     && mBatchSize) {
5385                 rc = channel->setBatchSize(mBatchSize);
5386                 //Disable per frame map unmap for HFR/batchmode case
5387                 rc |= channel->setPerFrameMapUnmap(false);
5388                 if (NO_ERROR != rc) {
5389                     LOGE("Channel init failed %d", rc);
5390                     pthread_mutex_unlock(&mMutex);
5391                     goto error_exit;
5392                 }
5393             }
5394         }
5395 
5396         //First initialize all streams
5397         for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
5398             it != mStreamInfo.end(); it++) {
5399             QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
5400 
5401             /* Initial value of NR mode is needed before stream on */
5402             channel->setNRMode(nrMode);
5403             if ((((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) ||
5404                ((1U << CAM_STREAM_TYPE_PREVIEW) == channel->getStreamTypeMask())) &&
5405                setEis) {
5406                 for (size_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
5407                     if ( (1U << mStreamConfigInfo.type[i]) == channel->getStreamTypeMask() ) {
5408                         is_type = mStreamConfigInfo.is_type[i];
5409                         break;
5410                     }
5411                 }
5412                 rc = channel->initialize(is_type);
5413             } else {
5414                 rc = channel->initialize(IS_TYPE_NONE);
5415             }
5416             if (NO_ERROR != rc) {
5417                 LOGE("Channel initialization failed %d", rc);
5418                 pthread_mutex_unlock(&mMutex);
5419                 goto error_exit;
5420             }
5421         }
5422 
5423         if (mRawDumpChannel) {
5424             rc = mRawDumpChannel->initialize(IS_TYPE_NONE);
5425             if (rc != NO_ERROR) {
5426                 LOGE("Error: Raw Dump Channel init failed");
5427                 pthread_mutex_unlock(&mMutex);
5428                 goto error_exit;
5429             }
5430         }
5431         if (mHdrPlusRawSrcChannel) {
5432             rc = mHdrPlusRawSrcChannel->initialize(IS_TYPE_NONE);
5433             if (rc != NO_ERROR) {
5434                 LOGE("Error: HDR+ RAW Source Channel init failed");
5435                 pthread_mutex_unlock(&mMutex);
5436                 goto error_exit;
5437             }
5438         }
5439         if (mSupportChannel) {
5440             rc = mSupportChannel->initialize(IS_TYPE_NONE);
5441             if (rc < 0) {
5442                 LOGE("Support channel initialization failed");
5443                 pthread_mutex_unlock(&mMutex);
5444                 goto error_exit;
5445             }
5446         }
5447         if (mAnalysisChannel) {
5448             rc = mAnalysisChannel->initialize(IS_TYPE_NONE);
5449             if (rc < 0) {
5450                 LOGE("Analysis channel initialization failed");
5451                 pthread_mutex_unlock(&mMutex);
5452                 goto error_exit;
5453             }
5454         }
5455         if (mDummyBatchChannel) {
5456             rc = mDummyBatchChannel->setBatchSize(mBatchSize);
5457             if (rc < 0) {
5458                 LOGE("mDummyBatchChannel setBatchSize failed");
5459                 pthread_mutex_unlock(&mMutex);
5460                 goto error_exit;
5461             }
5462             rc = mDummyBatchChannel->initialize(IS_TYPE_NONE);
5463             if (rc < 0) {
5464                 LOGE("mDummyBatchChannel initialization failed");
5465                 pthread_mutex_unlock(&mMutex);
5466                 goto error_exit;
5467             }
5468         }
5469 
5470         // Set bundle info
5471         rc = setBundleInfo();
5472         if (rc < 0) {
5473             LOGE("setBundleInfo failed %d", rc);
5474             pthread_mutex_unlock(&mMutex);
5475             goto error_exit;
5476         }
5477 
5478         //update settings from app here
5479         if (meta.exists(QCAMERA3_DUALCAM_LINK_ENABLE)) {
5480             mIsDeviceLinked = meta.find(QCAMERA3_DUALCAM_LINK_ENABLE).data.u8[0];
5481             LOGH("Dualcam: setting On=%d id =%d", mIsDeviceLinked, mCameraId);
5482         }
5483         if (meta.exists(QCAMERA3_DUALCAM_LINK_IS_MAIN)) {
5484             mIsMainCamera = meta.find(QCAMERA3_DUALCAM_LINK_IS_MAIN).data.u8[0];
5485             LOGH("Dualcam: Is this main camera = %d id =%d", mIsMainCamera, mCameraId);
5486         }
5487         if (meta.exists(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID)) {
5488             mLinkedCameraId = meta.find(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID).data.u8[0];
5489             LOGH("Dualcam: Linked camera Id %d id =%d", mLinkedCameraId, mCameraId);
5490 
5491             if ( (mLinkedCameraId >= MM_CAMERA_MAX_NUM_SENSORS) &&
5492                 (mLinkedCameraId != mCameraId) ) {
5493                 LOGE("Dualcam: mLinkedCameraId %d is invalid, current cam id = %d",
5494                     mLinkedCameraId, mCameraId);
5495                 pthread_mutex_unlock(&mMutex);
5496                 goto error_exit;
5497             }
5498         }
5499 
5500         // add bundle related cameras
5501         LOGH("%s: Dualcam: id =%d, mIsDeviceLinked=%d", __func__,mCameraId, mIsDeviceLinked);
5502         if (meta.exists(QCAMERA3_DUALCAM_LINK_ENABLE)) {
5503             cam_dual_camera_bundle_info_t *m_pRelCamSyncBuf =
5504                     &m_pDualCamCmdPtr->bundle_info;
5505             m_pDualCamCmdPtr->cmd_type = CAM_DUAL_CAMERA_BUNDLE_INFO;
5506             if (mIsDeviceLinked)
5507                 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_ON;
5508             else
5509                 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
5510 
5511             pthread_mutex_lock(&gCamLock);
5512 
5513             if (sessionId[mLinkedCameraId] == 0xDEADBEEF) {
5514                 LOGE("Dualcam: Invalid Session Id ");
5515                 pthread_mutex_unlock(&gCamLock);
5516                 pthread_mutex_unlock(&mMutex);
5517                 goto error_exit;
5518             }
5519 
5520             if (mIsMainCamera == 1) {
5521                 m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
5522                 m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
5523                 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
5524                 m_pRelCamSyncBuf->cam_role = CAM_ROLE_BAYER;
5525                 // related session id should be session id of linked session
5526                 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
5527             } else {
5528                 m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
5529                 m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
5530                 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
5531                 m_pRelCamSyncBuf->cam_role = CAM_ROLE_MONO;
5532                 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
5533             }
5534             m_pRelCamSyncBuf->is_hw_sync_enabled = DUALCAM_HW_SYNC_ENABLED;
5535             pthread_mutex_unlock(&gCamLock);
5536 
5537             rc = mCameraHandle->ops->set_dual_cam_cmd(
5538                     mCameraHandle->camera_handle);
5539             if (rc < 0) {
5540                 LOGE("Dualcam: link failed");
5541                 pthread_mutex_unlock(&mMutex);
5542                 goto error_exit;
5543             }
5544         }
5545         goto no_error;
5546 error_exit:
5547         mPerfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
5548         return rc;
5549 no_error:
5550         mWokenUpByDaemon = false;
5551         mPendingLiveRequest = 0;
5552         mFirstConfiguration = false;
5553     }
5554 
5555     uint32_t frameNumber = request->frame_number;
5556     cam_stream_ID_t streamsArray;
5557 
5558     if (mFlushPerf) {
5559         //we cannot accept any requests during flush
5560         LOGE("process_capture_request cannot proceed during flush");
5561         pthread_mutex_unlock(&mMutex);
5562         return NO_ERROR; //should return an error
5563     }
5564 
5565     if (meta.exists(ANDROID_REQUEST_ID)) {
5566         request_id = meta.find(ANDROID_REQUEST_ID).data.i32[0];
5567         mCurrentRequestId = request_id;
5568         LOGD("Received request with id: %d", request_id);
5569     } else if (mState == CONFIGURED || mCurrentRequestId == -1){
5570         LOGE("Unable to find request id field, \
5571                 & no previous id available");
5572         pthread_mutex_unlock(&mMutex);
5573         return NAME_NOT_FOUND;
5574     } else {
5575         LOGD("Re-using old request id");
5576         request_id = mCurrentRequestId;
5577     }
5578 
5579     LOGH("num_output_buffers = %d input_buffer = %p frame_number = %d",
5580                                     request->num_output_buffers,
5581                                     request->input_buffer,
5582                                     frameNumber);
5583     // Acquire all request buffers first
5584     streamsArray.num_streams = 0;
5585     int blob_request = 0;
5586     bool depthRequestPresent = false;
5587     uint32_t snapshotStreamId = 0;
5588     for (size_t i = 0; i < request->num_output_buffers; i++) {
5589         const camera3_stream_buffer_t& output = request->output_buffers[i];
5590         QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
5591 
5592         if ((output.stream->format == HAL_PIXEL_FORMAT_BLOB) &&
5593                 (output.stream->data_space != HAL_DATASPACE_DEPTH)) {
5594             //FIXME??:Call function to store local copy of jpeg data for encode params.
5595             blob_request = 1;
5596             snapshotStreamId = channel->getStreamID(channel->getStreamTypeMask());
5597         }
5598 
5599         if (output.acquire_fence != -1) {
5600            rc = sync_wait(output.acquire_fence, TIMEOUT_NEVER);
5601            close(output.acquire_fence);
5602            if (rc != OK) {
5603               LOGE("sync wait failed %d", rc);
5604               pthread_mutex_unlock(&mMutex);
5605               return rc;
5606            }
5607         }
5608 
5609         if ((output.stream->format == HAL_PIXEL_FORMAT_BLOB) &&
5610                 (output.stream->data_space == HAL_DATASPACE_DEPTH)) {
5611             depthRequestPresent = true;
5612             continue;
5613         }
5614 
5615         streamsArray.stream_request[streamsArray.num_streams++].streamID =
5616             channel->getStreamID(channel->getStreamTypeMask());
5617 
5618         if ((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) {
5619             isVidBufRequested = true;
5620         }
5621     }
5622 
5623     //FIXME: Add checks to ensure to dups in validateCaptureRequest
5624     for (auto itr = internallyRequestedStreams.begin(); itr != internallyRequestedStreams.end();
5625           itr++) {
5626         QCamera3Channel *channel = (QCamera3Channel *)(*itr).stream->priv;
5627         streamsArray.stream_request[streamsArray.num_streams++].streamID =
5628             channel->getStreamID(channel->getStreamTypeMask());
5629 
5630         if ((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) {
5631             isVidBufRequested = true;
5632         }
5633     }
5634 
5635     if (blob_request) {
5636         ATRACE_ASYNC_BEGIN("SNAPSHOT", frameNumber);
5637         mPerfLockMgr.acquirePerfLock(PERF_LOCK_TAKE_SNAPSHOT);
5638     }
5639     if (blob_request && mRawDumpChannel) {
5640         LOGD("Trigger Raw based on blob request if Raw dump is enabled");
5641         streamsArray.stream_request[streamsArray.num_streams].streamID =
5642             mRawDumpChannel->getStreamID(mRawDumpChannel->getStreamTypeMask());
5643         streamsArray.stream_request[streamsArray.num_streams++].buf_index = CAM_FREERUN_IDX;
5644     }
5645 
5646     {
5647         Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
5648         // Request a RAW buffer if
5649         //  1. mHdrPlusRawSrcChannel is valid.
5650         //  2. frameNumber is multiples of kHdrPlusRawPeriod (in order to limit RAW capture rate.)
5651         //  3. There is no pending HDR+ request.
5652         if (mHdrPlusRawSrcChannel && frameNumber % kHdrPlusRawPeriod == 0 &&
5653                 mHdrPlusPendingRequests.size() == 0) {
5654             streamsArray.stream_request[streamsArray.num_streams].streamID =
5655                 mHdrPlusRawSrcChannel->getStreamID(mHdrPlusRawSrcChannel->getStreamTypeMask());
5656             streamsArray.stream_request[streamsArray.num_streams++].buf_index = CAM_FREERUN_IDX;
5657         }
5658     }
5659 
5660     //extract capture intent
5661     if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
5662         mCaptureIntent =
5663                 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
5664     }
5665 
5666     if (meta.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
5667         mCacMode =
5668                 meta.find(ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
5669     }
5670 
5671     uint8_t requestedLensShadingMapMode;
5672     // Get the shading map mode.
5673     if (meta.exists(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE)) {
5674         mLastRequestedLensShadingMapMode = requestedLensShadingMapMode =
5675                 meta.find(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE).data.u8[0];
5676     } else {
5677         requestedLensShadingMapMode = mLastRequestedLensShadingMapMode;
5678     }
5679 
5680     if (meta.exists(ANDROID_STATISTICS_FACE_DETECT_MODE)) {
5681         mLastRequestedFaceDetectMode =
5682                 meta.find(ANDROID_STATISTICS_FACE_DETECT_MODE).data.u8[0];
5683     }
5684 
5685     if (meta.exists(ANDROID_STATISTICS_OIS_DATA_MODE)) {
5686         mLastRequestedOisDataMode =
5687                 meta.find(ANDROID_STATISTICS_OIS_DATA_MODE).data.u8[0];
5688     }
5689 
5690     bool hdrPlusRequest = false;
5691     HdrPlusPendingRequest pendingHdrPlusRequest = {};
5692 
5693     {
5694         std::unique_lock<std::mutex> l(gHdrPlusClientLock);
5695         // If this request has a still capture intent, try to submit an HDR+ request.
5696         if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled &&
5697                 mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE) {
5698             hdrPlusRequest = trySubmittingHdrPlusRequestLocked(&pendingHdrPlusRequest, *request, meta);
5699         }
5700     }
5701 
5702     if (hdrPlusRequest) {
5703         // For a HDR+ request, just set the frame parameters.
5704         rc = setFrameParameters(request, streamsArray, blob_request, snapshotStreamId);
5705         if (rc < 0) {
5706             LOGE("fail to set frame parameters");
5707             pthread_mutex_unlock(&mMutex);
5708             return rc;
5709         }
5710     } else if(request->input_buffer == NULL) {
5711         /* Parse the settings:
5712          * - For every request in NORMAL MODE
5713          * - For every request in HFR mode during preview only case
5714          * - For first request of every batch in HFR mode during video
5715          * recording. In batchmode the same settings except frame number is
5716          * repeated in each request of the batch.
5717          */
5718         if (!mBatchSize ||
5719            (mBatchSize && !isVidBufRequested) ||
5720            (mBatchSize && isVidBufRequested && !mToBeQueuedVidBufs)) {
5721             rc = setFrameParameters(request, streamsArray, blob_request, snapshotStreamId);
5722             if (rc < 0) {
5723                 LOGE("fail to set frame parameters");
5724                 pthread_mutex_unlock(&mMutex);
5725                 return rc;
5726             }
5727 
5728             {
5729                 // If HDR+ mode is enabled, override the following modes so the necessary metadata
5730                 // will be included in the result metadata sent to Easel HDR+.
5731                 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
5732                 if (mHdrPlusModeEnabled) {
5733                     ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_LENS_SHADING_MAP_MODE,
5734                         ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON);
5735                     ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STATS_FACEDETECT_MODE,
5736                         ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE);
5737                 }
5738             }
5739         }
5740         /* For batchMode HFR, setFrameParameters is not called for every
5741          * request. But only frame number of the latest request is parsed.
5742          * Keep track of first and last frame numbers in a batch so that
5743          * metadata for the frame numbers of batch can be duplicated in
5744          * handleBatchMetadta */
5745         if (mBatchSize) {
5746             if (!mToBeQueuedVidBufs) {
5747                 //start of the batch
5748                 mFirstFrameNumberInBatch = request->frame_number;
5749             }
5750             if(ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
5751                 CAM_INTF_META_FRAME_NUMBER, request->frame_number)) {
5752                 LOGE("Failed to set the frame number in the parameters");
5753                 pthread_mutex_unlock(&mMutex);
5754                 return BAD_VALUE;
5755             }
5756         }
5757         if (mNeedSensorRestart) {
5758             /* Unlock the mutex as restartSensor waits on the channels to be
5759              * stopped, which in turn calls stream callback functions -
5760              * handleBufferWithLock and handleMetadataWithLock */
5761             pthread_mutex_unlock(&mMutex);
5762             rc = dynamicUpdateMetaStreamInfo();
5763             if (rc != NO_ERROR) {
5764                 LOGE("Restarting the sensor failed");
5765                 return BAD_VALUE;
5766             }
5767             mNeedSensorRestart = false;
5768             pthread_mutex_lock(&mMutex);
5769         }
5770         if(mResetInstantAEC) {
5771             ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
5772                     CAM_INTF_PARM_INSTANT_AEC, (uint8_t)CAM_AEC_NORMAL_CONVERGENCE);
5773             mResetInstantAEC = false;
5774         }
5775     } else {
5776         if (request->input_buffer->acquire_fence != -1) {
5777            rc = sync_wait(request->input_buffer->acquire_fence, TIMEOUT_NEVER);
5778            close(request->input_buffer->acquire_fence);
5779            if (rc != OK) {
5780               LOGE("input buffer sync wait failed %d", rc);
5781               pthread_mutex_unlock(&mMutex);
5782               return rc;
5783            }
5784         }
5785     }
5786 
5787     if (mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM) {
5788         mLastCustIntentFrmNum = frameNumber;
5789     }
5790     /* Update pending request list and pending buffers map */
5791     PendingRequestInfo pendingRequest = {};
5792     pendingRequestIterator latestRequest;
5793     pendingRequest.frame_number = frameNumber;
5794     pendingRequest.num_buffers = depthRequestPresent ?
5795             (request->num_output_buffers - 1 ) : request->num_output_buffers;
5796     pendingRequest.request_id = request_id;
5797     pendingRequest.blob_request = blob_request;
5798     pendingRequest.timestamp = 0;
5799     pendingRequest.requestedLensShadingMapMode = requestedLensShadingMapMode;
5800     pendingRequest.requestedFaceDetectMode = mLastRequestedFaceDetectMode;
5801     pendingRequest.requestedOisDataMode = mLastRequestedOisDataMode;
5802     if (request->input_buffer) {
5803         pendingRequest.input_buffer =
5804                 (camera3_stream_buffer_t*)malloc(sizeof(camera3_stream_buffer_t));
5805         *(pendingRequest.input_buffer) = *(request->input_buffer);
5806         pInputBuffer = pendingRequest.input_buffer;
5807     } else {
5808        pendingRequest.input_buffer = NULL;
5809        pInputBuffer = NULL;
5810     }
5811     pendingRequest.bUseFirstPartial = (mState == CONFIGURED && !request->input_buffer);
5812 
5813     pendingRequest.pipeline_depth = 0;
5814     pendingRequest.partial_result_cnt = 0;
5815     extractJpegMetadata(mCurJpegMeta, request);
5816     pendingRequest.jpegMetadata = mCurJpegMeta;
5817     pendingRequest.settings = saveRequestSettings(mCurJpegMeta, request);
5818     pendingRequest.capture_intent = mCaptureIntent;
5819     if (meta.exists(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE)) {
5820         pendingRequest.hybrid_ae_enable =
5821                 meta.find(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE).data.u8[0];
5822     }
5823 
5824     if (meta.exists(NEXUS_EXPERIMENTAL_2017_MOTION_DETECTION_ENABLE)) {
5825         pendingRequest.motion_detection_enable =
5826                 meta.find(NEXUS_EXPERIMENTAL_2017_MOTION_DETECTION_ENABLE).data.u8[0];
5827     }
5828 
5829     /* DevCamDebug metadata processCaptureRequest */
5830     if (meta.exists(DEVCAMDEBUG_META_ENABLE)) {
5831         mDevCamDebugMetaEnable =
5832                 meta.find(DEVCAMDEBUG_META_ENABLE).data.u8[0];
5833     }
5834     pendingRequest.DevCamDebug_meta_enable = mDevCamDebugMetaEnable;
5835     /* DevCamDebug metadata end */
5836 
5837     //extract CAC info
5838     if (meta.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
5839         mCacMode =
5840                 meta.find(ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
5841     }
5842     pendingRequest.fwkCacMode = mCacMode;
5843     pendingRequest.hdrplus = hdrPlusRequest;
5844     // We need to account for several dropped frames initially on sensor side.
5845     pendingRequest.expectedFrameDuration = (mState == CONFIGURED) ? (4 * mExpectedFrameDuration) :
5846         mExpectedFrameDuration;
5847     mExpectedInflightDuration += pendingRequest.expectedFrameDuration;
5848 
5849     // extract enableZsl info
5850     if (gExposeEnableZslKey) {
5851         if (meta.exists(ANDROID_CONTROL_ENABLE_ZSL)) {
5852             pendingRequest.enableZsl = meta.find(ANDROID_CONTROL_ENABLE_ZSL).data.u8[0];
5853             mZslEnabled = pendingRequest.enableZsl;
5854         } else {
5855             pendingRequest.enableZsl = mZslEnabled;
5856         }
5857     }
5858 
5859     PendingBuffersInRequest bufsForCurRequest;
5860     bufsForCurRequest.frame_number = frameNumber;
5861     // Mark current timestamp for the new request
5862     bufsForCurRequest.timestamp = systemTime(CLOCK_MONOTONIC);
5863     bufsForCurRequest.av_timestamp = 0;
5864 
5865     if (hdrPlusRequest) {
5866         // Save settings for this request.
5867         pendingHdrPlusRequest.settings = std::make_shared<metadata_buffer_t>();
5868         memcpy(pendingHdrPlusRequest.settings.get(), mParameters, sizeof(metadata_buffer_t));
5869 
5870         // Add to pending HDR+ request queue.
5871         Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
5872         mHdrPlusPendingRequests.emplace(frameNumber, pendingHdrPlusRequest);
5873 
5874         ALOGD("%s: frame number %u is an HDR+ request.", __FUNCTION__, frameNumber);
5875     }
5876 
5877     for (size_t i = 0; i < request->num_output_buffers; i++) {
5878         if ((request->output_buffers[i].stream->data_space ==
5879                 HAL_DATASPACE_DEPTH) &&
5880                 (HAL_PIXEL_FORMAT_BLOB ==
5881                         request->output_buffers[i].stream->format)) {
5882             continue;
5883         }
5884         RequestedBufferInfo requestedBuf;
5885         memset(&requestedBuf, 0, sizeof(requestedBuf));
5886         requestedBuf.stream = request->output_buffers[i].stream;
5887         requestedBuf.buffer = NULL;
5888         pendingRequest.buffers.push_back(requestedBuf);
5889 
5890         // Add to buffer handle the pending buffers list
5891         PendingBufferInfo bufferInfo;
5892         bufferInfo.buffer = request->output_buffers[i].buffer;
5893         bufferInfo.stream = request->output_buffers[i].stream;
5894         bufsForCurRequest.mPendingBufferList.push_back(bufferInfo);
5895         QCamera3Channel *channel = (QCamera3Channel *)bufferInfo.stream->priv;
5896         LOGD("frame = %d, buffer = %p, streamTypeMask = %d, stream format = %d",
5897             frameNumber, bufferInfo.buffer,
5898             channel->getStreamTypeMask(), bufferInfo.stream->format);
5899     }
5900     // Add this request packet into mPendingBuffersMap
5901     mPendingBuffersMap.mPendingBuffersInRequest.push_back(bufsForCurRequest);
5902     LOGD("mPendingBuffersMap.num_overall_buffers = %d",
5903         mPendingBuffersMap.get_num_overall_buffers());
5904 
5905     latestRequest = mPendingRequestsList.insert(
5906             mPendingRequestsList.end(), pendingRequest);
5907 
5908     // Let shutter dispatcher and buffer dispatcher know shutter and output buffers are expected
5909     // for the frame number.
5910     mShutterDispatcher.expectShutter(frameNumber, request->input_buffer != nullptr,
5911             isStillZsl(pendingRequest));
5912     for (size_t i = 0; i < request->num_output_buffers; i++) {
5913         mOutputBufferDispatcher.expectBuffer(frameNumber, request->output_buffers[i].stream);
5914     }
5915 
5916     if(mFlush) {
5917         LOGI("mFlush is true");
5918         pthread_mutex_unlock(&mMutex);
5919         return NO_ERROR;
5920     }
5921 
5922     // If this is not an HDR+ request, send the request to metadata and each output buffer's
5923     // channel.
5924     if (!hdrPlusRequest) {
5925         int indexUsed;
5926         // Notify metadata channel we receive a request
5927         mMetadataChannel->request(NULL, frameNumber, indexUsed);
5928 
5929         if(request->input_buffer != NULL){
5930             LOGD("Input request, frame_number %d", frameNumber);
5931             rc = setReprocParameters(request, &mReprocMeta, snapshotStreamId);
5932             if (NO_ERROR != rc) {
5933                 LOGE("fail to set reproc parameters");
5934                 pthread_mutex_unlock(&mMutex);
5935                 return rc;
5936             }
5937         }
5938 
5939         // Call request on other streams
5940         uint32_t streams_need_metadata = 0;
5941         pendingBufferIterator pendingBufferIter = latestRequest->buffers.begin();
5942         for (size_t i = 0; i < request->num_output_buffers; i++) {
5943             const camera3_stream_buffer_t& output = request->output_buffers[i];
5944             QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
5945 
5946             if (channel == NULL) {
5947                 LOGW("invalid channel pointer for stream");
5948                 continue;
5949             }
5950 
5951             if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
5952                 LOGD("snapshot request with output buffer %p, input buffer %p, frame_number %d",
5953                           output.buffer, request->input_buffer, frameNumber);
5954                 if(request->input_buffer != NULL){
5955                     rc = channel->request(output.buffer, frameNumber,
5956                             pInputBuffer, &mReprocMeta, indexUsed, false, false);
5957                     if (rc < 0) {
5958                         LOGE("Fail to request on picture channel");
5959                         pthread_mutex_unlock(&mMutex);
5960                         return rc;
5961                     }
5962                 } else {
5963                     if (HAL_DATASPACE_DEPTH == output.stream->data_space) {
5964                         assert(NULL != mDepthChannel);
5965                         assert(mDepthChannel == output.stream->priv);
5966 
5967                         rc = mDepthChannel->mapBuffer(output.buffer, request->frame_number);
5968                         if (rc < 0) {
5969                             LOGE("Fail to map on depth buffer");
5970                             pthread_mutex_unlock(&mMutex);
5971                             return rc;
5972                         }
5973                         continue;
5974                     } else {
5975                         LOGD("snapshot request with buffer %p, frame_number %d",
5976                                  output.buffer, frameNumber);
5977                         if (!request->settings) {
5978                             rc = channel->request(output.buffer, frameNumber,
5979                                     NULL, mPrevParameters, indexUsed);
5980                         } else {
5981                             rc = channel->request(output.buffer, frameNumber,
5982                                     NULL, mParameters, indexUsed);
5983                         }
5984                         if (rc < 0) {
5985                             LOGE("Fail to request on picture channel");
5986                             pthread_mutex_unlock(&mMutex);
5987                             return rc;
5988                         }
5989 
5990                         uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5991                         uint32_t j = 0;
5992                         for (j = 0; j < streamsArray.num_streams; j++) {
5993                             if (streamsArray.stream_request[j].streamID == streamId) {
5994                                 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5995                                     streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5996                                 else
5997                                     streamsArray.stream_request[j].buf_index = indexUsed;
5998                                 break;
5999                             }
6000                         }
6001                         if (j == streamsArray.num_streams) {
6002                             LOGE("Did not find matching stream to update index");
6003                             assert(0);
6004                         }
6005 
6006                         pendingBufferIter->need_metadata = true;
6007 
6008                         if (isEISCropInSnapshotNeeded(meta)) {
6009                             pendingBufferIter->need_crop = true;
6010                             pendingBufferIter->crop_info = mLastEISCropInfo;
6011                         }
6012 
6013                         streams_need_metadata++;
6014                     }
6015                 }
6016             } else if (output.stream->format == HAL_PIXEL_FORMAT_YCbCr_420_888 ||
6017                     output.stream->format == HAL_PIXEL_FORMAT_Y8) {
6018                 bool needMetadata = false;
6019                 QCamera3YUVChannel *yuvChannel = (QCamera3YUVChannel *)channel;
6020                 rc = yuvChannel->request(output.buffer, frameNumber,
6021                         pInputBuffer, (pInputBuffer ? &mReprocMeta : mParameters),
6022                         needMetadata, indexUsed, false, false);
6023                 if (rc < 0) {
6024                     LOGE("Fail to request on YUV channel");
6025                     pthread_mutex_unlock(&mMutex);
6026                     return rc;
6027                 }
6028 
6029                 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
6030                 uint32_t j = 0;
6031                 for (j = 0; j < streamsArray.num_streams; j++) {
6032                     if (streamsArray.stream_request[j].streamID == streamId) {
6033                         if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
6034                             streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
6035                         else
6036                             streamsArray.stream_request[j].buf_index = indexUsed;
6037                         break;
6038                     }
6039                 }
6040                 if (j == streamsArray.num_streams) {
6041                     LOGE("Did not find matching stream to update index");
6042                     assert(0);
6043                 }
6044 
6045                 pendingBufferIter->need_metadata = needMetadata;
6046                 if (needMetadata)
6047                     streams_need_metadata += 1;
6048                 LOGD("calling YUV channel request, need_metadata is %d",
6049                          needMetadata);
6050             } else {
6051                 LOGD("request with buffer %p, frame_number %d",
6052                       output.buffer, frameNumber);
6053 
6054                 rc = channel->request(output.buffer, frameNumber, indexUsed);
6055 
6056                 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
6057                 uint32_t j = 0;
6058                 for (j = 0; j < streamsArray.num_streams; j++) {
6059                     if (streamsArray.stream_request[j].streamID == streamId) {
6060                         if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
6061                             streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
6062                         else
6063                             streamsArray.stream_request[j].buf_index = indexUsed;
6064                         break;
6065                     }
6066                 }
6067                 if (j == streamsArray.num_streams) {
6068                     LOGE("Did not find matching stream to update index");
6069                     assert(0);
6070                 }
6071 
6072                 if (((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask())
6073                         && mBatchSize) {
6074                     mToBeQueuedVidBufs++;
6075                     if (mToBeQueuedVidBufs == mBatchSize) {
6076                         channel->queueBatchBuf();
6077                     }
6078                 }
6079                 if (rc < 0) {
6080                     LOGE("request failed");
6081                     pthread_mutex_unlock(&mMutex);
6082                     return rc;
6083                 }
6084             }
6085             pendingBufferIter++;
6086         }
6087 
6088         for (auto itr = internallyRequestedStreams.begin(); itr != internallyRequestedStreams.end();
6089               itr++) {
6090             QCamera3Channel *channel = (QCamera3Channel *)(*itr).stream->priv;
6091 
6092             if (channel == NULL) {
6093                 LOGE("invalid channel pointer for stream");
6094                 assert(0);
6095                 pthread_mutex_unlock(&mMutex);
6096                 return BAD_VALUE;
6097             }
6098 
6099             InternalRequest requestedStream;
6100             requestedStream = (*itr);
6101 
6102 
6103             if ((*itr).stream->format == HAL_PIXEL_FORMAT_BLOB) {
6104                 LOGD("snapshot request internally input buffer %p, frame_number %d",
6105                           request->input_buffer, frameNumber);
6106                 if(request->input_buffer != NULL){
6107                     rc = channel->request(NULL, frameNumber,
6108                             pInputBuffer, &mReprocMeta, indexUsed, true,
6109                             requestedStream.meteringOnly);
6110                     if (rc < 0) {
6111                         LOGE("Fail to request on picture channel");
6112                         pthread_mutex_unlock(&mMutex);
6113                         return rc;
6114                     }
6115                 } else {
6116                     LOGD("snapshot request with frame_number %d", frameNumber);
6117                     if (!request->settings) {
6118                         rc = channel->request(NULL, frameNumber,
6119                                 NULL, mPrevParameters, indexUsed, true,
6120                                 requestedStream.meteringOnly);
6121                     } else {
6122                         rc = channel->request(NULL, frameNumber,
6123                                 NULL, mParameters, indexUsed, true, requestedStream.meteringOnly);
6124                     }
6125                     if (rc < 0) {
6126                         LOGE("Fail to request on picture channel");
6127                         pthread_mutex_unlock(&mMutex);
6128                         return rc;
6129                     }
6130 
6131                     if ((*itr).meteringOnly != 1) {
6132                         requestedStream.need_metadata = 1;
6133                         streams_need_metadata++;
6134                     }
6135                 }
6136 
6137                 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
6138                 uint32_t j = 0;
6139                 for (j = 0; j < streamsArray.num_streams; j++) {
6140                     if (streamsArray.stream_request[j].streamID == streamId) {
6141                       if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
6142                           streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
6143                       else
6144                           streamsArray.stream_request[j].buf_index = indexUsed;
6145                         break;
6146                     }
6147                 }
6148                 if (j == streamsArray.num_streams) {
6149                     LOGE("Did not find matching stream to update index");
6150                     assert(0);
6151                 }
6152 
6153             } else {
6154                 LOGE("Internal requests not supported on this stream type");
6155                 assert(0);
6156                 pthread_mutex_unlock(&mMutex);
6157                 return INVALID_OPERATION;
6158             }
6159             latestRequest->internalRequestList.push_back(requestedStream);
6160         }
6161 
6162         //If 2 streams have need_metadata set to true, fail the request, unless
6163         //we copy/reference count the metadata buffer
6164         if (streams_need_metadata > 1) {
6165             LOGE("not supporting request in which two streams requires"
6166                     " 2 HAL metadata for reprocessing");
6167             pthread_mutex_unlock(&mMutex);
6168             return -EINVAL;
6169         }
6170 
6171         cam_sensor_pd_data_t pdafEnable = (nullptr != mDepthChannel) ?
6172                 CAM_PD_DATA_SKIP : CAM_PD_DATA_DISABLED;
6173         if (depthRequestPresent && mDepthChannel) {
6174             if (request->settings) {
6175                 camera_metadata_ro_entry entry;
6176                 if (find_camera_metadata_ro_entry(request->settings,
6177                         NEXUS_EXPERIMENTAL_2017_PD_DATA_ENABLE, &entry) == 0) {
6178                     if (entry.data.u8[0]) {
6179                         pdafEnable = CAM_PD_DATA_ENABLED;
6180                     } else {
6181                         pdafEnable = CAM_PD_DATA_SKIP;
6182                     }
6183                     mDepthCloudMode = pdafEnable;
6184                 } else {
6185                     pdafEnable = mDepthCloudMode;
6186                 }
6187             } else {
6188                 pdafEnable = mDepthCloudMode;
6189             }
6190         }
6191 
6192         if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
6193                 CAM_INTF_META_PDAF_DATA_ENABLE, pdafEnable)) {
6194             LOGE("%s: Failed to enable PDAF data in parameters!", __func__);
6195             pthread_mutex_unlock(&mMutex);
6196             return BAD_VALUE;
6197         }
6198 
6199         if (request->input_buffer == NULL) {
6200             /* Set the parameters to backend:
6201              * - For every request in NORMAL MODE
6202              * - For every request in HFR mode during preview only case
6203              * - Once every batch in HFR mode during video recording
6204              */
6205             if (!mBatchSize ||
6206                (mBatchSize && !isVidBufRequested) ||
6207                (mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs == mBatchSize))) {
6208                 LOGD("set_parms  batchSz: %d IsVidBufReq: %d vidBufTobeQd: %d ",
6209                          mBatchSize, isVidBufRequested,
6210                         mToBeQueuedVidBufs);
6211 
6212                 if(mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs == mBatchSize)) {
6213                     for (uint32_t k = 0; k < streamsArray.num_streams; k++) {
6214                         uint32_t m = 0;
6215                         for (m = 0; m < mBatchedStreamsArray.num_streams; m++) {
6216                             if (streamsArray.stream_request[k].streamID ==
6217                                     mBatchedStreamsArray.stream_request[m].streamID)
6218                                 break;
6219                             }
6220                             if (m == mBatchedStreamsArray.num_streams) {
6221                                 mBatchedStreamsArray.stream_request\
6222                                     [mBatchedStreamsArray.num_streams].streamID =
6223                                     streamsArray.stream_request[k].streamID;
6224                                 mBatchedStreamsArray.stream_request\
6225                                     [mBatchedStreamsArray.num_streams].buf_index =
6226                                     streamsArray.stream_request[k].buf_index;
6227                                 mBatchedStreamsArray.num_streams =
6228                                     mBatchedStreamsArray.num_streams + 1;
6229                             }
6230                     }
6231                     streamsArray = mBatchedStreamsArray;
6232                 }
6233                 /* Update stream id of all the requested buffers */
6234                 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_ID,
6235                         streamsArray)) {
6236                     LOGE("Failed to set stream type mask in the parameters");
6237                     pthread_mutex_unlock(&mMutex);
6238                     return BAD_VALUE;
6239                 }
6240 
6241                 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
6242                         mParameters);
6243                 if (rc < 0) {
6244                     LOGE("set_parms failed");
6245                 }
6246                 /* reset to zero coz, the batch is queued */
6247                 mToBeQueuedVidBufs = 0;
6248                 mPendingBatchMap.add(frameNumber, mFirstFrameNumberInBatch);
6249                 memset(&mBatchedStreamsArray, 0, sizeof(cam_stream_ID_t));
6250             } else if (mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs != mBatchSize)) {
6251                 for (uint32_t k = 0; k < streamsArray.num_streams; k++) {
6252                     uint32_t m = 0;
6253                     for (m = 0; m < mBatchedStreamsArray.num_streams; m++) {
6254                         if (streamsArray.stream_request[k].streamID ==
6255                                 mBatchedStreamsArray.stream_request[m].streamID)
6256                             break;
6257                     }
6258                     if (m == mBatchedStreamsArray.num_streams) {
6259                         mBatchedStreamsArray.stream_request[mBatchedStreamsArray.num_streams].
6260                                 streamID = streamsArray.stream_request[k].streamID;
6261                         mBatchedStreamsArray.stream_request[mBatchedStreamsArray.num_streams].
6262                                 buf_index = streamsArray.stream_request[k].buf_index;
6263                         mBatchedStreamsArray.num_streams = mBatchedStreamsArray.num_streams + 1;
6264                     }
6265                 }
6266             }
6267             mPendingLiveRequest++;
6268 
6269             // Start all streams after the first setting is sent, so that the
6270             // setting can be applied sooner: (0 + apply_delay)th frame.
6271             if (mState == CONFIGURED && mChannelHandle) {
6272                 //Then start them.
6273                 LOGH("Start META Channel");
6274                 rc = mMetadataChannel->start();
6275                 if (rc < 0) {
6276                     LOGE("META channel start failed");
6277                     pthread_mutex_unlock(&mMutex);
6278                     return rc;
6279                 }
6280 
6281                 if (mAnalysisChannel) {
6282                     rc = mAnalysisChannel->start();
6283                     if (rc < 0) {
6284                         LOGE("Analysis channel start failed");
6285                         mMetadataChannel->stop();
6286                         pthread_mutex_unlock(&mMutex);
6287                         return rc;
6288                     }
6289                 }
6290 
6291                 if (mSupportChannel) {
6292                     rc = mSupportChannel->start();
6293                     if (rc < 0) {
6294                         LOGE("Support channel start failed");
6295                         mMetadataChannel->stop();
6296                         /* Although support and analysis are mutually exclusive today
6297                            adding it in anycase for future proofing */
6298                         if (mAnalysisChannel) {
6299                             mAnalysisChannel->stop();
6300                         }
6301                         pthread_mutex_unlock(&mMutex);
6302                         return rc;
6303                     }
6304                 }
6305                 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
6306                         it != mStreamInfo.end(); it++) {
6307                     QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
6308                     LOGH("Start Processing Channel mask=%d",
6309                             channel->getStreamTypeMask());
6310                     rc = channel->start();
6311                     if (rc < 0) {
6312                         LOGE("channel start failed");
6313                         pthread_mutex_unlock(&mMutex);
6314                         return rc;
6315                     }
6316                 }
6317 
6318                 if (mRawDumpChannel) {
6319                     LOGD("Starting raw dump stream");
6320                     rc = mRawDumpChannel->start();
6321                     if (rc != NO_ERROR) {
6322                         LOGE("Error Starting Raw Dump Channel");
6323                         for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
6324                               it != mStreamInfo.end(); it++) {
6325                             QCamera3Channel *channel =
6326                                 (QCamera3Channel *)(*it)->stream->priv;
6327                             LOGH("Stopping Processing Channel mask=%d",
6328                                 channel->getStreamTypeMask());
6329                             channel->stop();
6330                         }
6331                         if (mSupportChannel)
6332                             mSupportChannel->stop();
6333                         if (mAnalysisChannel) {
6334                             mAnalysisChannel->stop();
6335                         }
6336                         mMetadataChannel->stop();
6337                         pthread_mutex_unlock(&mMutex);
6338                         return rc;
6339                     }
6340                 }
6341 
6342                 // Configure modules for stream on.
6343                 rc = startChannelLocked();
6344                 if (rc != NO_ERROR) {
6345                     LOGE("startChannelLocked failed %d", rc);
6346                     pthread_mutex_unlock(&mMutex);
6347                     return rc;
6348                 }
6349             }
6350         }
6351     }
6352 
6353     // Enable HDR+ mode for the first PREVIEW_INTENT request that doesn't disable HDR+.
6354     {
6355         std::unique_lock<std::mutex> l(gHdrPlusClientLock);
6356         if (gEaselManagerClient != nullptr && gEaselManagerClient->isEaselPresentOnDevice() &&
6357                 !gEaselBypassOnly && !mFirstPreviewIntentSeen &&
6358                 meta.exists(ANDROID_CONTROL_CAPTURE_INTENT) &&
6359                 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0] ==
6360                 ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW &&
6361                 meta.exists(NEXUS_EXPERIMENTAL_2017_DISABLE_HDRPLUS) &&
6362                 meta.find(NEXUS_EXPERIMENTAL_2017_DISABLE_HDRPLUS).data.i32[0] == 0) {
6363 
6364             if (isSessionHdrPlusModeCompatible()) {
6365                 rc = enableHdrPlusModeLocked();
6366                 if (rc != OK) {
6367                     LOGE("%s: Failed to open HDR+ asynchronously", __FUNCTION__);
6368                     pthread_mutex_unlock(&mMutex);
6369                     return rc;
6370                 }
6371             }
6372 
6373             mFirstPreviewIntentSeen = true;
6374         }
6375     }
6376 
6377     LOGD("mPendingLiveRequest = %d", mPendingLiveRequest);
6378 
6379     mState = STARTED;
6380     // Added a timed condition wait
6381     struct timespec ts;
6382     uint8_t isValidTimeout = 1;
6383     rc = clock_gettime(CLOCK_MONOTONIC, &ts);
6384     if (rc < 0) {
6385       isValidTimeout = 0;
6386       LOGE("Error reading the real time clock!!");
6387     }
6388     else {
6389       // Make timeout as 5 sec for request to be honored
6390       int64_t timeout = 5;
6391       {
6392           Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
6393           // If there is a pending HDR+ request, the following requests may be blocked until the
6394           // HDR+ request is done. So allow a longer timeout.
6395           if (mHdrPlusPendingRequests.size() > 0) {
6396               timeout = MISSING_HDRPLUS_REQUEST_BUF_TIMEOUT;
6397           }
6398       }
6399       ts.tv_sec += timeout;
6400     }
6401     //Block on conditional variable
6402     while ((mPendingLiveRequest >= mMinInFlightRequests) && !pInputBuffer &&
6403             (mState != ERROR) && (mState != DEINIT)) {
6404         if (!isValidTimeout) {
6405             LOGD("Blocking on conditional wait");
6406             pthread_cond_wait(&mRequestCond, &mMutex);
6407         }
6408         else {
6409             LOGD("Blocking on timed conditional wait");
6410             rc = pthread_cond_timedwait(&mRequestCond, &mMutex, &ts);
6411             if (rc == ETIMEDOUT) {
6412                 rc = -ENODEV;
6413                 LOGE("Unblocked on timeout!!!!");
6414                 break;
6415             }
6416         }
6417         LOGD("Unblocked");
6418         if (mWokenUpByDaemon) {
6419             mWokenUpByDaemon = false;
6420             if (mPendingLiveRequest < mMaxInFlightRequests)
6421                 break;
6422         }
6423     }
6424     pthread_mutex_unlock(&mMutex);
6425 
6426     return rc;
6427 }
6428 
startChannelLocked()6429 int32_t QCamera3HardwareInterface::startChannelLocked()
6430 {
6431     // Configure modules for stream on.
6432     int32_t rc = mCameraHandle->ops->start_channel(mCameraHandle->camera_handle,
6433             mChannelHandle, /*start_sensor_streaming*/false);
6434     if (rc != NO_ERROR) {
6435         LOGE("start_channel failed %d", rc);
6436         return rc;
6437     }
6438 
6439     {
6440         // Configure Easel for stream on.
6441         std::unique_lock<std::mutex> l(gHdrPlusClientLock);
6442         if (EaselManagerClientOpened) {
6443             // Now that sensor mode should have been selected, get the selected sensor mode
6444             // info.
6445             memset(&mSensorModeInfo, 0, sizeof(mSensorModeInfo));
6446             rc = getCurrentSensorModeInfo(mSensorModeInfo);
6447             if (rc != NO_ERROR) {
6448                 ALOGE("%s: Get current sensor mode failed, bail out: %s (%d).", __FUNCTION__,
6449                         strerror(-rc), rc);
6450                 return rc;
6451             }
6452             logEaselEvent("EASEL_STARTUP_LATENCY", "Starting MIPI");
6453             rc = gEaselManagerClient->startMipi(mCameraId, mSensorModeInfo.op_pixel_clk,
6454                     /*enableCapture*/true);
6455             if (rc != OK) {
6456                 ALOGE("%s: Failed to start MIPI rate for camera %u to %u", __FUNCTION__,
6457                         mCameraId, mSensorModeInfo.op_pixel_clk);
6458                 return rc;
6459             }
6460             logEaselEvent("EASEL_STARTUP_LATENCY", "Starting MIPI done");
6461             mEaselMipiStarted = true;
6462         }
6463     }
6464 
6465     // Start sensor streaming.
6466     rc = mCameraHandle->ops->start_sensor_streaming(mCameraHandle->camera_handle,
6467             mChannelHandle);
6468     if (rc != NO_ERROR) {
6469         LOGE("start_sensor_stream_on failed %d", rc);
6470         return rc;
6471     }
6472 
6473     return 0;
6474 }
6475 
stopChannelLocked(bool stopChannelImmediately)6476 void QCamera3HardwareInterface::stopChannelLocked(bool stopChannelImmediately)
6477 {
6478     mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
6479             mChannelHandle, stopChannelImmediately);
6480 
6481     {
6482         std::unique_lock<std::mutex> l(gHdrPlusClientLock);
6483         if (EaselManagerClientOpened && mEaselMipiStarted) {
6484             int32_t rc = gEaselManagerClient->stopMipi(mCameraId);
6485             if (rc != 0) {
6486                 ALOGE("%s: Stopping MIPI failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
6487             }
6488             mEaselMipiStarted = false;
6489         }
6490     }
6491 }
6492 
6493 /*===========================================================================
6494  * FUNCTION   : dump
6495  *
6496  * DESCRIPTION:
6497  *
6498  * PARAMETERS :
6499  *
6500  *
6501  * RETURN     :
6502  *==========================================================================*/
dump(int fd)6503 void QCamera3HardwareInterface::dump(int fd)
6504 {
6505     pthread_mutex_lock(&mMutex);
6506     dprintf(fd, "\n Camera HAL3 information Begin \n");
6507 
6508     dprintf(fd, "\nNumber of pending requests: %zu \n",
6509         mPendingRequestsList.size());
6510     dprintf(fd, "-------+-------------------+-------------+----------+---------------------\n");
6511     dprintf(fd, " Frame | Number of Buffers |   Req Id:   | Blob Req | Input buffer present\n");
6512     dprintf(fd, "-------+-------------------+-------------+----------+---------------------\n");
6513     for(pendingRequestIterator i = mPendingRequestsList.begin();
6514             i != mPendingRequestsList.end(); i++) {
6515         dprintf(fd, " %5d | %17d | %11d | %8d | %p \n",
6516         i->frame_number, i->num_buffers, i->request_id, i->blob_request,
6517         i->input_buffer);
6518     }
6519     dprintf(fd, "\nPending buffer map: Number of buffers: %u\n",
6520                 mPendingBuffersMap.get_num_overall_buffers());
6521     dprintf(fd, "-------+------------------\n");
6522     dprintf(fd, " Frame | Stream type mask \n");
6523     dprintf(fd, "-------+------------------\n");
6524     for(auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
6525         for(auto &j : req.mPendingBufferList) {
6526             QCamera3Channel *channel = (QCamera3Channel *)(j.stream->priv);
6527             dprintf(fd, " %5d | %11d \n",
6528                     req.frame_number, channel->getStreamTypeMask());
6529         }
6530     }
6531     dprintf(fd, "-------+------------------\n");
6532 
6533     dprintf(fd, "\nPending frame drop list: %zu\n",
6534         mPendingFrameDropList.size());
6535     dprintf(fd, "-------+-----------\n");
6536     dprintf(fd, " Frame | Stream ID \n");
6537     dprintf(fd, "-------+-----------\n");
6538     for(List<PendingFrameDropInfo>::iterator i = mPendingFrameDropList.begin();
6539         i != mPendingFrameDropList.end(); i++) {
6540         dprintf(fd, " %5d | %9d \n",
6541             i->frame_number, i->stream_ID);
6542     }
6543     dprintf(fd, "-------+-----------\n");
6544 
6545     dprintf(fd, "\n Camera HAL3 information End \n");
6546 
6547     /* use dumpsys media.camera as trigger to send update debug level event */
6548     mUpdateDebugLevel = true;
6549     pthread_mutex_unlock(&mMutex);
6550     return;
6551 }
6552 
6553 /*===========================================================================
6554  * FUNCTION   : flush
6555  *
6556  * DESCRIPTION: Calls stopAllChannels, notifyErrorForPendingRequests and
6557  *              conditionally restarts channels
6558  *
6559  * PARAMETERS :
6560  *  @ restartChannels: re-start all channels
6561  *  @ stopChannelImmediately: stop the channel immediately. This should be used
6562  *                            when device encountered an error and MIPI may has
6563  *                            been stopped.
6564  *
6565  * RETURN     :
6566  *          0 on success
6567  *          Error code on failure
6568  *==========================================================================*/
flush(bool restartChannels,bool stopChannelImmediately)6569 int QCamera3HardwareInterface::flush(bool restartChannels, bool stopChannelImmediately)
6570 {
6571     KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_STOP_PREVIEW);
6572     int32_t rc = NO_ERROR;
6573 
6574     LOGD("Unblocking Process Capture Request");
6575     pthread_mutex_lock(&mMutex);
6576     mFlush = true;
6577     pthread_mutex_unlock(&mMutex);
6578 
6579     // Disable HDR+ if it's enabled;
6580     {
6581         std::unique_lock<std::mutex> l(gHdrPlusClientLock);
6582         finishHdrPlusClientOpeningLocked(l);
6583         disableHdrPlusModeLocked();
6584     }
6585 
6586     rc = stopAllChannels();
6587     // unlink of dualcam
6588     if (mIsDeviceLinked) {
6589         cam_dual_camera_bundle_info_t *m_pRelCamSyncBuf =
6590                 &m_pDualCamCmdPtr->bundle_info;
6591         m_pDualCamCmdPtr->cmd_type = CAM_DUAL_CAMERA_BUNDLE_INFO;
6592         m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
6593         pthread_mutex_lock(&gCamLock);
6594 
6595         if (mIsMainCamera == 1) {
6596             m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
6597             m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
6598             m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
6599             // related session id should be session id of linked session
6600             m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
6601         } else {
6602             m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
6603             m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
6604             m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
6605             m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
6606         }
6607         m_pRelCamSyncBuf->is_hw_sync_enabled = DUALCAM_HW_SYNC_ENABLED;
6608         pthread_mutex_unlock(&gCamLock);
6609 
6610         rc = mCameraHandle->ops->set_dual_cam_cmd(
6611                 mCameraHandle->camera_handle);
6612         if (rc < 0) {
6613             LOGE("Dualcam: Unlink failed, but still proceed to close");
6614         }
6615     }
6616 
6617     if (rc < 0) {
6618         LOGE("stopAllChannels failed");
6619         return rc;
6620     }
6621     if (mChannelHandle) {
6622         stopChannelLocked(stopChannelImmediately);
6623     }
6624 
6625     // Reset bundle info
6626     rc = setBundleInfo();
6627     if (rc < 0) {
6628         LOGE("setBundleInfo failed %d", rc);
6629         return rc;
6630     }
6631 
6632     // Mutex Lock
6633     pthread_mutex_lock(&mMutex);
6634 
6635     // Unblock process_capture_request
6636     mPendingLiveRequest = 0;
6637     pthread_cond_signal(&mRequestCond);
6638 
6639     rc = notifyErrorForPendingRequests();
6640     if (rc < 0) {
6641         LOGE("notifyErrorForPendingRequests failed");
6642         pthread_mutex_unlock(&mMutex);
6643         return rc;
6644     }
6645 
6646     mFlush = false;
6647 
6648     // Start the Streams/Channels
6649     if (restartChannels) {
6650         rc = startAllChannels();
6651         if (rc < 0) {
6652             LOGE("startAllChannels failed");
6653             pthread_mutex_unlock(&mMutex);
6654             return rc;
6655         }
6656         if (mChannelHandle) {
6657             // Configure modules for stream on.
6658             rc = startChannelLocked();
6659             if (rc < 0) {
6660                 LOGE("startChannelLocked failed");
6661                 pthread_mutex_unlock(&mMutex);
6662                 return rc;
6663             }
6664         }
6665         mFirstPreviewIntentSeen = false;
6666     }
6667     pthread_mutex_unlock(&mMutex);
6668 
6669     return 0;
6670 }
6671 
6672 /*===========================================================================
6673  * FUNCTION   : flushPerf
6674  *
6675  * DESCRIPTION: This is the performance optimization version of flush that does
6676  *              not use stream off, rather flushes the system
6677  *
6678  * PARAMETERS :
6679  *
6680  *
6681  * RETURN     : 0 : success
6682  *              -EINVAL: input is malformed (device is not valid)
6683  *              -ENODEV: if the device has encountered a serious error
6684  *==========================================================================*/
flushPerf()6685 int QCamera3HardwareInterface::flushPerf()
6686 {
6687     KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_STOP_PREVIEW);
6688     int32_t rc = 0;
6689     struct timespec timeout;
6690     bool timed_wait = false;
6691 
6692     pthread_mutex_lock(&mMutex);
6693     mFlushPerf = true;
6694     mPendingBuffersMap.numPendingBufsAtFlush =
6695         mPendingBuffersMap.get_num_overall_buffers();
6696     LOGD("Calling flush. Wait for %d buffers to return",
6697         mPendingBuffersMap.numPendingBufsAtFlush);
6698 
6699     /* send the flush event to the backend */
6700     rc = mCameraHandle->ops->flush(mCameraHandle->camera_handle);
6701     if (rc < 0) {
6702         LOGE("Error in flush: IOCTL failure");
6703         mFlushPerf = false;
6704         pthread_mutex_unlock(&mMutex);
6705         return -ENODEV;
6706     }
6707 
6708     if (mPendingBuffersMap.numPendingBufsAtFlush == 0) {
6709         LOGD("No pending buffers in HAL, return flush");
6710         mFlushPerf = false;
6711         pthread_mutex_unlock(&mMutex);
6712         return rc;
6713     }
6714 
6715     /* wait on a signal that buffers were received */
6716     rc = clock_gettime(CLOCK_MONOTONIC, &timeout);
6717     if (rc < 0) {
6718         LOGE("Error reading the real time clock, cannot use timed wait");
6719     } else {
6720         timeout.tv_sec += FLUSH_TIMEOUT;
6721         timed_wait = true;
6722     }
6723 
6724     //Block on conditional variable
6725     while (mPendingBuffersMap.numPendingBufsAtFlush != 0) {
6726         LOGD("Waiting on mBuffersCond");
6727         if (!timed_wait) {
6728             rc = pthread_cond_wait(&mBuffersCond, &mMutex);
6729             if (rc != 0) {
6730                  LOGE("pthread_cond_wait failed due to rc = %s",
6731                         strerror(rc));
6732                  break;
6733             }
6734         } else {
6735             rc = pthread_cond_timedwait(&mBuffersCond, &mMutex, &timeout);
6736             if (rc != 0) {
6737                 LOGE("pthread_cond_timedwait failed due to rc = %s",
6738                             strerror(rc));
6739                 break;
6740             }
6741         }
6742     }
6743     if (rc != 0) {
6744         mFlushPerf = false;
6745         pthread_mutex_unlock(&mMutex);
6746         return -ENODEV;
6747     }
6748 
6749     LOGD("Received buffers, now safe to return them");
6750 
6751     //make sure the channels handle flush
6752     //currently only required for the picture channel to release snapshot resources
6753     for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
6754             it != mStreamInfo.end(); it++) {
6755         QCamera3Channel *channel = (*it)->channel;
6756         if (channel) {
6757             rc = channel->flush();
6758             if (rc) {
6759                LOGE("Flushing the channels failed with error %d", rc);
6760                // even though the channel flush failed we need to continue and
6761                // return the buffers we have to the framework, however the return
6762                // value will be an error
6763                rc = -ENODEV;
6764             }
6765         }
6766     }
6767 
6768     /* notify the frameworks and send errored results */
6769     rc = notifyErrorForPendingRequests();
6770     if (rc < 0) {
6771         LOGE("notifyErrorForPendingRequests failed");
6772         pthread_mutex_unlock(&mMutex);
6773         return rc;
6774     }
6775 
6776     //unblock process_capture_request
6777     mPendingLiveRequest = 0;
6778     unblockRequestIfNecessary();
6779 
6780     mFlushPerf = false;
6781     pthread_mutex_unlock(&mMutex);
6782     LOGD ("Flush Operation complete. rc = %d", rc);
6783     return rc;
6784 }
6785 
6786 /*===========================================================================
6787  * FUNCTION   : handleCameraDeviceError
6788  *
6789  * DESCRIPTION: This function calls internal flush and notifies the error to
6790  *              framework and updates the state variable.
6791  *
6792  * PARAMETERS :
6793  *   @stopChannelImmediately : stop channels immediately without waiting for
6794  *                             frame boundary.
6795  *
6796  * RETURN     : NO_ERROR on Success
6797  *              Error code on failure
6798  *==========================================================================*/
handleCameraDeviceError(bool stopChannelImmediately)6799 int32_t QCamera3HardwareInterface::handleCameraDeviceError(bool stopChannelImmediately)
6800 {
6801     int32_t rc = NO_ERROR;
6802 
6803     {
6804         Mutex::Autolock lock(mFlushLock);
6805         pthread_mutex_lock(&mMutex);
6806         if (mState != ERROR) {
6807             //if mState != ERROR, nothing to be done
6808             pthread_mutex_unlock(&mMutex);
6809             return NO_ERROR;
6810         }
6811         pthread_mutex_unlock(&mMutex);
6812 
6813         rc = flush(false /* restart channels */, stopChannelImmediately);
6814         if (NO_ERROR != rc) {
6815             LOGE("internal flush to handle mState = ERROR failed");
6816         }
6817 
6818         pthread_mutex_lock(&mMutex);
6819         mState = DEINIT;
6820         pthread_mutex_unlock(&mMutex);
6821     }
6822 
6823     camera3_notify_msg_t notify_msg;
6824     memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
6825     notify_msg.type = CAMERA3_MSG_ERROR;
6826     notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_DEVICE;
6827     notify_msg.message.error.error_stream = NULL;
6828     notify_msg.message.error.frame_number = 0;
6829     orchestrateNotify(&notify_msg);
6830 
6831     return rc;
6832 }
6833 
6834 /*===========================================================================
6835  * FUNCTION   : captureResultCb
6836  *
6837  * DESCRIPTION: Callback handler for all capture result
6838  *              (streams, as well as metadata)
6839  *
6840  * PARAMETERS :
6841  *   @metadata : metadata information
6842  *   @buffer   : actual gralloc buffer to be returned to frameworks.
6843  *               NULL if metadata.
6844  *
6845  * RETURN     : NONE
6846  *==========================================================================*/
captureResultCb(mm_camera_super_buf_t * metadata_buf,camera3_stream_buffer_t * buffer,uint32_t frame_number,bool isInputBuffer)6847 void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata_buf,
6848                 camera3_stream_buffer_t *buffer, uint32_t frame_number, bool isInputBuffer)
6849 {
6850     if (metadata_buf) {
6851         pthread_mutex_lock(&mMutex);
6852         uint8_t batchSize = mBatchSize;
6853         pthread_mutex_unlock(&mMutex);
6854         if (batchSize) {
6855             handleBatchMetadata(metadata_buf,
6856                     true /* free_and_bufdone_meta_buf */);
6857         } else { /* mBatchSize = 0 */
6858             hdrPlusPerfLock(metadata_buf);
6859             pthread_mutex_lock(&mMutex);
6860             handleMetadataWithLock(metadata_buf,
6861                     true /* free_and_bufdone_meta_buf */,
6862                     true /* last urgent frame of batch metadata */,
6863                     true /* last frame of batch metadata */,
6864                     NULL);
6865             pthread_mutex_unlock(&mMutex);
6866         }
6867     } else if (isInputBuffer) {
6868         pthread_mutex_lock(&mMutex);
6869         handleInputBufferWithLock(frame_number);
6870         pthread_mutex_unlock(&mMutex);
6871     } else {
6872         pthread_mutex_lock(&mMutex);
6873         handleBufferWithLock(buffer, frame_number);
6874         pthread_mutex_unlock(&mMutex);
6875     }
6876     return;
6877 }
6878 
6879 /*===========================================================================
6880  * FUNCTION   : getReprocessibleOutputStreamId
6881  *
6882  * DESCRIPTION: Get source output stream id for the input reprocess stream
6883  *              based on size and format, which would be the largest
6884  *              output stream if an input stream exists.
6885  *
6886  * PARAMETERS :
6887  *   @id      : return the stream id if found
6888  *
6889  * RETURN     : int32_t type of status
6890  *              NO_ERROR  -- success
6891  *              none-zero failure code
6892  *==========================================================================*/
getReprocessibleOutputStreamId(uint32_t & id)6893 int32_t QCamera3HardwareInterface::getReprocessibleOutputStreamId(uint32_t &id)
6894 {
6895     /* check if any output or bidirectional stream with the same size and format
6896        and return that stream */
6897     if ((mInputStreamInfo.dim.width > 0) &&
6898             (mInputStreamInfo.dim.height > 0)) {
6899         for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
6900                 it != mStreamInfo.end(); it++) {
6901 
6902             camera3_stream_t *stream = (*it)->stream;
6903             if ((stream->width == (uint32_t)mInputStreamInfo.dim.width) &&
6904                     (stream->height == (uint32_t)mInputStreamInfo.dim.height) &&
6905                     (stream->format == mInputStreamInfo.format)) {
6906                 // Usage flag for an input stream and the source output stream
6907                 // may be different.
6908                 LOGD("Found reprocessible output stream! %p", *it);
6909                 LOGD("input stream usage 0x%x, current stream usage 0x%x",
6910                          stream->usage, mInputStreamInfo.usage);
6911 
6912                 QCamera3Channel *channel = (QCamera3Channel *)stream->priv;
6913                 if (channel != NULL && channel->mStreams[0]) {
6914                     id = channel->mStreams[0]->getMyServerID();
6915                     return NO_ERROR;
6916                 }
6917             }
6918         }
6919     } else {
6920         LOGD("No input stream, so no reprocessible output stream");
6921     }
6922     return NAME_NOT_FOUND;
6923 }
6924 
6925 /*===========================================================================
6926  * FUNCTION   : lookupFwkName
6927  *
6928  * DESCRIPTION: In case the enum is not same in fwk and backend
6929  *              make sure the parameter is correctly propogated
6930  *
6931  * PARAMETERS  :
6932  *   @arr      : map between the two enums
6933  *   @len      : len of the map
6934  *   @hal_name : name of the hal_parm to map
6935  *
6936  * RETURN     : int type of status
6937  *              fwk_name  -- success
6938  *              none-zero failure code
6939  *==========================================================================*/
lookupFwkName(const mapType * arr,size_t len,halType hal_name)6940 template <typename halType, class mapType> int lookupFwkName(const mapType *arr,
6941         size_t len, halType hal_name)
6942 {
6943 
6944     for (size_t i = 0; i < len; i++) {
6945         if (arr[i].hal_name == hal_name) {
6946             return arr[i].fwk_name;
6947         }
6948     }
6949 
6950     /* Not able to find matching framework type is not necessarily
6951      * an error case. This happens when mm-camera supports more attributes
6952      * than the frameworks do */
6953     LOGH("Cannot find matching framework type");
6954     return NAME_NOT_FOUND;
6955 }
6956 
6957 /*===========================================================================
6958  * FUNCTION   : lookupHalName
6959  *
6960  * DESCRIPTION: In case the enum is not same in fwk and backend
6961  *              make sure the parameter is correctly propogated
6962  *
6963  * PARAMETERS  :
6964  *   @arr      : map between the two enums
6965  *   @len      : len of the map
6966  *   @fwk_name : name of the hal_parm to map
6967  *
6968  * RETURN     : int32_t type of status
6969  *              hal_name  -- success
6970  *              none-zero failure code
6971  *==========================================================================*/
lookupHalName(const mapType * arr,size_t len,fwkType fwk_name)6972 template <typename fwkType, class mapType> int lookupHalName(const mapType *arr,
6973         size_t len, fwkType fwk_name)
6974 {
6975     for (size_t i = 0; i < len; i++) {
6976         if (arr[i].fwk_name == fwk_name) {
6977             return arr[i].hal_name;
6978         }
6979     }
6980 
6981     LOGE("Cannot find matching hal type fwk_name=%d", fwk_name);
6982     return NAME_NOT_FOUND;
6983 }
6984 
6985 /*===========================================================================
6986  * FUNCTION   : lookupProp
6987  *
6988  * DESCRIPTION: lookup a value by its name
6989  *
6990  * PARAMETERS :
6991  *   @arr     : map between the two enums
6992  *   @len     : size of the map
6993  *   @name    : name to be looked up
6994  *
6995  * RETURN     : Value if found
6996  *              CAM_CDS_MODE_MAX if not found
6997  *==========================================================================*/
lookupProp(const mapType * arr,size_t len,const char * name)6998 template <class mapType> cam_cds_mode_type_t lookupProp(const mapType *arr,
6999         size_t len, const char *name)
7000 {
7001     if (name) {
7002         for (size_t i = 0; i < len; i++) {
7003             if (!strcmp(arr[i].desc, name)) {
7004                 return arr[i].val;
7005             }
7006         }
7007     }
7008     return CAM_CDS_MODE_MAX;
7009 }
7010 
7011 /*===========================================================================
7012  *
7013  * DESCRIPTION:
7014  *
7015  * PARAMETERS :
7016  *   @metadata : metadata information from callback
7017  *   @pendingRequest: pending request for this metadata
7018  *   @pprocDone: whether internal offline postprocsesing is done
7019  *   @lastMetadataInBatch: Boolean to indicate whether this is the last metadata
7020  *                         in a batch. Always true for non-batch mode.
7021  *
7022  * RETURN     : camera_metadata_t*
7023  *              metadata in a format specified by fwk
7024  *==========================================================================*/
7025 camera_metadata_t*
translateFromHalMetadata(metadata_buffer_t * metadata,const PendingRequestInfo & pendingRequest,bool pprocDone,bool lastMetadataInBatch,const bool * enableZsl)7026 QCamera3HardwareInterface::translateFromHalMetadata(
7027                                  metadata_buffer_t *metadata,
7028                                  const PendingRequestInfo& pendingRequest,
7029                                  bool pprocDone,
7030                                  bool lastMetadataInBatch,
7031                                  const bool *enableZsl)
7032 {
7033     CameraMetadata camMetadata;
7034     camera_metadata_t *resultMetadata;
7035 
7036     if (!lastMetadataInBatch) {
7037         /* In batch mode, only populate SENSOR_TIMESTAMP if this is not the last in batch.
7038          * Timestamp is needed because it's used for shutter notify calculation.
7039          * */
7040         camMetadata.update(ANDROID_SENSOR_TIMESTAMP, &pendingRequest.timestamp, 1);
7041         resultMetadata = camMetadata.release();
7042         return resultMetadata;
7043     }
7044 
7045     if (pendingRequest.jpegMetadata.entryCount())
7046         camMetadata.append(pendingRequest.jpegMetadata);
7047 
7048     camMetadata.update(ANDROID_SENSOR_TIMESTAMP, &pendingRequest.timestamp, 1);
7049     camMetadata.update(ANDROID_REQUEST_ID, &pendingRequest.request_id, 1);
7050     camMetadata.update(ANDROID_REQUEST_PIPELINE_DEPTH, &pendingRequest.pipeline_depth, 1);
7051     camMetadata.update(ANDROID_CONTROL_CAPTURE_INTENT, &pendingRequest.capture_intent, 1);
7052     camMetadata.update(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE, &pendingRequest.hybrid_ae_enable, 1);
7053     camMetadata.update(NEXUS_EXPERIMENTAL_2017_MOTION_DETECTION_ENABLE, &pendingRequest.motion_detection_enable, 1);
7054     if (mBatchSize == 0) {
7055         // DevCamDebug metadata translateFromHalMetadata. Only update this one for non-HFR mode
7056         camMetadata.update(DEVCAMDEBUG_META_ENABLE, &pendingRequest.DevCamDebug_meta_enable, 1);
7057     }
7058 
7059     // atrace_begin(ATRACE_TAG_ALWAYS, "DevCamDebugInfo");
7060     // Only update DevCameraDebug metadta conditionally: non-HFR mode and it is enabled.
7061     if (mBatchSize == 0 && pendingRequest.DevCamDebug_meta_enable != 0) {
7062         // DevCamDebug metadata translateFromHalMetadata AF
7063         IF_META_AVAILABLE(int32_t, DevCamDebug_af_lens_position,
7064                 CAM_INTF_META_DEV_CAM_AF_LENS_POSITION, metadata) {
7065             int32_t fwk_DevCamDebug_af_lens_position = *DevCamDebug_af_lens_position;
7066             camMetadata.update(DEVCAMDEBUG_AF_LENS_POSITION, &fwk_DevCamDebug_af_lens_position, 1);
7067         }
7068         IF_META_AVAILABLE(int32_t, DevCamDebug_af_tof_confidence,
7069                 CAM_INTF_META_AF_TOF_CONFIDENCE, metadata) {
7070             int32_t fwk_DevCamDebug_af_tof_confidence = *DevCamDebug_af_tof_confidence;
7071             camMetadata.update(DEVCAMDEBUG_AF_TOF_CONFIDENCE, &fwk_DevCamDebug_af_tof_confidence, 1);
7072         }
7073         IF_META_AVAILABLE(int32_t, DevCamDebug_af_tof_distance,
7074                 CAM_INTF_META_AF_TOF_DISTANCE, metadata) {
7075             int32_t fwk_DevCamDebug_af_tof_distance = *DevCamDebug_af_tof_distance;
7076             camMetadata.update(DEVCAMDEBUG_AF_TOF_DISTANCE, &fwk_DevCamDebug_af_tof_distance, 1);
7077         }
7078         IF_META_AVAILABLE(int32_t, DevCamDebug_af_luma,
7079                 CAM_INTF_META_DEV_CAM_AF_LUMA, metadata) {
7080             int32_t fwk_DevCamDebug_af_luma = *DevCamDebug_af_luma;
7081             camMetadata.update(DEVCAMDEBUG_AF_LUMA, &fwk_DevCamDebug_af_luma, 1);
7082         }
7083         IF_META_AVAILABLE(int32_t, DevCamDebug_af_haf_state,
7084                 CAM_INTF_META_DEV_CAM_AF_HAF_STATE, metadata) {
7085             int32_t fwk_DevCamDebug_af_haf_state = *DevCamDebug_af_haf_state;
7086             camMetadata.update(DEVCAMDEBUG_AF_HAF_STATE, &fwk_DevCamDebug_af_haf_state, 1);
7087         }
7088         IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_target_pos,
7089                 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_TARGET_POS, metadata) {
7090             int32_t fwk_DevCamDebug_af_monitor_pdaf_target_pos =
7091                 *DevCamDebug_af_monitor_pdaf_target_pos;
7092             camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_TARGET_POS,
7093                 &fwk_DevCamDebug_af_monitor_pdaf_target_pos, 1);
7094         }
7095         IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_confidence,
7096                 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_CONFIDENCE, metadata) {
7097             int32_t fwk_DevCamDebug_af_monitor_pdaf_confidence =
7098                 *DevCamDebug_af_monitor_pdaf_confidence;
7099             camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_CONFIDENCE,
7100                 &fwk_DevCamDebug_af_monitor_pdaf_confidence, 1);
7101         }
7102         IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_refocus,
7103                 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_REFOCUS, metadata) {
7104             int32_t fwk_DevCamDebug_af_monitor_pdaf_refocus = *DevCamDebug_af_monitor_pdaf_refocus;
7105             camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_REFOCUS,
7106                 &fwk_DevCamDebug_af_monitor_pdaf_refocus, 1);
7107         }
7108         IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_target_pos,
7109                 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_TARGET_POS, metadata) {
7110             int32_t fwk_DevCamDebug_af_monitor_tof_target_pos =
7111                 *DevCamDebug_af_monitor_tof_target_pos;
7112             camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_TARGET_POS,
7113                 &fwk_DevCamDebug_af_monitor_tof_target_pos, 1);
7114         }
7115         IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_confidence,
7116                 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_CONFIDENCE, metadata) {
7117             int32_t fwk_DevCamDebug_af_monitor_tof_confidence =
7118                 *DevCamDebug_af_monitor_tof_confidence;
7119             camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_CONFIDENCE,
7120                 &fwk_DevCamDebug_af_monitor_tof_confidence, 1);
7121         }
7122         IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_refocus,
7123                 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_REFOCUS, metadata) {
7124             int32_t fwk_DevCamDebug_af_monitor_tof_refocus = *DevCamDebug_af_monitor_tof_refocus;
7125             camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_REFOCUS,
7126                 &fwk_DevCamDebug_af_monitor_tof_refocus, 1);
7127         }
7128         IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_type_select,
7129                 CAM_INTF_META_DEV_CAM_AF_MONITOR_TYPE_SELECT, metadata) {
7130             int32_t fwk_DevCamDebug_af_monitor_type_select = *DevCamDebug_af_monitor_type_select;
7131             camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TYPE_SELECT,
7132                 &fwk_DevCamDebug_af_monitor_type_select, 1);
7133         }
7134         IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_refocus,
7135                 CAM_INTF_META_DEV_CAM_AF_MONITOR_REFOCUS, metadata) {
7136             int32_t fwk_DevCamDebug_af_monitor_refocus = *DevCamDebug_af_monitor_refocus;
7137             camMetadata.update(DEVCAMDEBUG_AF_MONITOR_REFOCUS,
7138                 &fwk_DevCamDebug_af_monitor_refocus, 1);
7139         }
7140         IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_target_pos,
7141                 CAM_INTF_META_DEV_CAM_AF_MONITOR_TARGET_POS, metadata) {
7142             int32_t fwk_DevCamDebug_af_monitor_target_pos = *DevCamDebug_af_monitor_target_pos;
7143             camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TARGET_POS,
7144                 &fwk_DevCamDebug_af_monitor_target_pos, 1);
7145         }
7146         IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_target_pos,
7147                 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_TARGET_POS, metadata) {
7148             int32_t fwk_DevCamDebug_af_search_pdaf_target_pos =
7149                 *DevCamDebug_af_search_pdaf_target_pos;
7150             camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_TARGET_POS,
7151                 &fwk_DevCamDebug_af_search_pdaf_target_pos, 1);
7152         }
7153         IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_next_pos,
7154                 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_NEXT_POS, metadata) {
7155             int32_t fwk_DevCamDebug_af_search_pdaf_next_pos = *DevCamDebug_af_search_pdaf_next_pos;
7156             camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_NEXT_POS,
7157                 &fwk_DevCamDebug_af_search_pdaf_next_pos, 1);
7158         }
7159         IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_near_pos,
7160                 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_NEAR_POS, metadata) {
7161             int32_t fwk_DevCamDebug_af_search_pdaf_near_pos = *DevCamDebug_af_search_pdaf_near_pos;
7162             camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_NEAR_POS,
7163                 &fwk_DevCamDebug_af_search_pdaf_near_pos, 1);
7164         }
7165         IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_far_pos,
7166                 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_FAR_POS, metadata) {
7167             int32_t fwk_DevCamDebug_af_search_pdaf_far_pos = *DevCamDebug_af_search_pdaf_far_pos;
7168             camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_FAR_POS,
7169                 &fwk_DevCamDebug_af_search_pdaf_far_pos, 1);
7170         }
7171         IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_confidence,
7172                 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_CONFIDENCE, metadata) {
7173             int32_t fwk_DevCamDebug_af_search_pdaf_confidence = *DevCamDebug_af_search_pdaf_confidence;
7174             camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_CONFIDENCE,
7175                 &fwk_DevCamDebug_af_search_pdaf_confidence, 1);
7176         }
7177         IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_target_pos,
7178                 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_TARGET_POS, metadata) {
7179             int32_t fwk_DevCamDebug_af_search_tof_target_pos =
7180                 *DevCamDebug_af_search_tof_target_pos;
7181             camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_TARGET_POS,
7182                 &fwk_DevCamDebug_af_search_tof_target_pos, 1);
7183         }
7184         IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_next_pos,
7185                 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_NEXT_POS, metadata) {
7186             int32_t fwk_DevCamDebug_af_search_tof_next_pos = *DevCamDebug_af_search_tof_next_pos;
7187             camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_NEXT_POS,
7188                 &fwk_DevCamDebug_af_search_tof_next_pos, 1);
7189         }
7190         IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_near_pos,
7191                 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_NEAR_POS, metadata) {
7192             int32_t fwk_DevCamDebug_af_search_tof_near_pos = *DevCamDebug_af_search_tof_near_pos;
7193             camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_NEAR_POS,
7194                 &fwk_DevCamDebug_af_search_tof_near_pos, 1);
7195         }
7196         IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_far_pos,
7197                 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_FAR_POS, metadata) {
7198             int32_t fwk_DevCamDebug_af_search_tof_far_pos = *DevCamDebug_af_search_tof_far_pos;
7199             camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_FAR_POS,
7200                 &fwk_DevCamDebug_af_search_tof_far_pos, 1);
7201         }
7202         IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_confidence,
7203                 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_CONFIDENCE, metadata) {
7204             int32_t fwk_DevCamDebug_af_search_tof_confidence = *DevCamDebug_af_search_tof_confidence;
7205             camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_CONFIDENCE,
7206                 &fwk_DevCamDebug_af_search_tof_confidence, 1);
7207         }
7208         IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_type_select,
7209                 CAM_INTF_META_DEV_CAM_AF_SEARCH_TYPE_SELECT, metadata) {
7210             int32_t fwk_DevCamDebug_af_search_type_select = *DevCamDebug_af_search_type_select;
7211             camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TYPE_SELECT,
7212                 &fwk_DevCamDebug_af_search_type_select, 1);
7213         }
7214         IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_next_pos,
7215                 CAM_INTF_META_DEV_CAM_AF_SEARCH_NEXT_POS, metadata) {
7216             int32_t fwk_DevCamDebug_af_search_next_pos = *DevCamDebug_af_search_next_pos;
7217             camMetadata.update(DEVCAMDEBUG_AF_SEARCH_NEXT_POS,
7218                 &fwk_DevCamDebug_af_search_next_pos, 1);
7219         }
7220         IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_target_pos,
7221                 CAM_INTF_META_DEV_CAM_AF_SEARCH_TARGET_POS, metadata) {
7222             int32_t fwk_DevCamDebug_af_search_target_pos = *DevCamDebug_af_search_target_pos;
7223             camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TARGET_POS,
7224                 &fwk_DevCamDebug_af_search_target_pos, 1);
7225         }
7226         // DevCamDebug metadata translateFromHalMetadata AEC
7227         IF_META_AVAILABLE(int32_t, DevCamDebug_aec_target_luma,
7228                 CAM_INTF_META_DEV_CAM_AEC_TARGET_LUMA, metadata) {
7229             int32_t fwk_DevCamDebug_aec_target_luma = *DevCamDebug_aec_target_luma;
7230             camMetadata.update(DEVCAMDEBUG_AEC_TARGET_LUMA, &fwk_DevCamDebug_aec_target_luma, 1);
7231     }
7232         IF_META_AVAILABLE(int32_t, DevCamDebug_aec_comp_luma,
7233                 CAM_INTF_META_DEV_CAM_AEC_COMP_LUMA, metadata) {
7234             int32_t fwk_DevCamDebug_aec_comp_luma = *DevCamDebug_aec_comp_luma;
7235             camMetadata.update(DEVCAMDEBUG_AEC_COMP_LUMA, &fwk_DevCamDebug_aec_comp_luma, 1);
7236         }
7237         IF_META_AVAILABLE(int32_t, DevCamDebug_aec_avg_luma,
7238                 CAM_INTF_META_DEV_CAM_AEC_AVG_LUMA, metadata) {
7239             int32_t fwk_DevCamDebug_aec_avg_luma = *DevCamDebug_aec_avg_luma;
7240             camMetadata.update(DEVCAMDEBUG_AEC_AVG_LUMA, &fwk_DevCamDebug_aec_avg_luma, 1);
7241         }
7242         IF_META_AVAILABLE(int32_t, DevCamDebug_aec_cur_luma,
7243                 CAM_INTF_META_DEV_CAM_AEC_CUR_LUMA, metadata) {
7244             int32_t fwk_DevCamDebug_aec_cur_luma = *DevCamDebug_aec_cur_luma;
7245             camMetadata.update(DEVCAMDEBUG_AEC_CUR_LUMA, &fwk_DevCamDebug_aec_cur_luma, 1);
7246         }
7247         IF_META_AVAILABLE(int32_t, DevCamDebug_aec_linecount,
7248                 CAM_INTF_META_DEV_CAM_AEC_LINECOUNT, metadata) {
7249             int32_t fwk_DevCamDebug_aec_linecount = *DevCamDebug_aec_linecount;
7250             camMetadata.update(DEVCAMDEBUG_AEC_LINECOUNT, &fwk_DevCamDebug_aec_linecount, 1);
7251         }
7252         IF_META_AVAILABLE(float, DevCamDebug_aec_real_gain,
7253                 CAM_INTF_META_DEV_CAM_AEC_REAL_GAIN, metadata) {
7254             float fwk_DevCamDebug_aec_real_gain = *DevCamDebug_aec_real_gain;
7255             camMetadata.update(DEVCAMDEBUG_AEC_REAL_GAIN, &fwk_DevCamDebug_aec_real_gain, 1);
7256         }
7257         IF_META_AVAILABLE(int32_t, DevCamDebug_aec_exp_index,
7258                 CAM_INTF_META_DEV_CAM_AEC_EXP_INDEX, metadata) {
7259             int32_t fwk_DevCamDebug_aec_exp_index = *DevCamDebug_aec_exp_index;
7260             camMetadata.update(DEVCAMDEBUG_AEC_EXP_INDEX, &fwk_DevCamDebug_aec_exp_index, 1);
7261         }
7262         IF_META_AVAILABLE(float, DevCamDebug_aec_lux_idx,
7263                 CAM_INTF_META_DEV_CAM_AEC_LUX_IDX, metadata) {
7264             float fwk_DevCamDebug_aec_lux_idx = *DevCamDebug_aec_lux_idx;
7265             camMetadata.update(DEVCAMDEBUG_AEC_LUX_IDX, &fwk_DevCamDebug_aec_lux_idx, 1);
7266         }
7267         // DevCamDebug metadata translateFromHalMetadata zzHDR
7268         IF_META_AVAILABLE(float, DevCamDebug_aec_l_real_gain,
7269                 CAM_INTF_META_DEV_CAM_AEC_L_REAL_GAIN, metadata) {
7270             float fwk_DevCamDebug_aec_l_real_gain = *DevCamDebug_aec_l_real_gain;
7271             camMetadata.update(DEVCAMDEBUG_AEC_L_REAL_GAIN, &fwk_DevCamDebug_aec_l_real_gain, 1);
7272         }
7273         IF_META_AVAILABLE(int32_t, DevCamDebug_aec_l_linecount,
7274                 CAM_INTF_META_DEV_CAM_AEC_L_LINECOUNT, metadata) {
7275             int32_t fwk_DevCamDebug_aec_l_linecount = *DevCamDebug_aec_l_linecount;
7276             camMetadata.update(DEVCAMDEBUG_AEC_L_LINECOUNT, &fwk_DevCamDebug_aec_l_linecount, 1);
7277         }
7278         IF_META_AVAILABLE(float, DevCamDebug_aec_s_real_gain,
7279                 CAM_INTF_META_DEV_CAM_AEC_S_REAL_GAIN, metadata) {
7280             float fwk_DevCamDebug_aec_s_real_gain = *DevCamDebug_aec_s_real_gain;
7281             camMetadata.update(DEVCAMDEBUG_AEC_S_REAL_GAIN, &fwk_DevCamDebug_aec_s_real_gain, 1);
7282         }
7283         IF_META_AVAILABLE(int32_t, DevCamDebug_aec_s_linecount,
7284                 CAM_INTF_META_DEV_CAM_AEC_S_LINECOUNT, metadata) {
7285             int32_t fwk_DevCamDebug_aec_s_linecount = *DevCamDebug_aec_s_linecount;
7286             camMetadata.update(DEVCAMDEBUG_AEC_S_LINECOUNT, &fwk_DevCamDebug_aec_s_linecount, 1);
7287         }
7288         IF_META_AVAILABLE(float, DevCamDebug_aec_hdr_sensitivity_ratio,
7289                 CAM_INTF_META_DEV_CAM_AEC_HDR_SENSITIVITY_RATIO, metadata) {
7290             float fwk_DevCamDebug_aec_hdr_sensitivity_ratio =
7291                 *DevCamDebug_aec_hdr_sensitivity_ratio;
7292             camMetadata.update(DEVCAMDEBUG_AEC_HDR_SENSITIVITY_RATIO,
7293                                &fwk_DevCamDebug_aec_hdr_sensitivity_ratio, 1);
7294         }
7295         IF_META_AVAILABLE(float, DevCamDebug_aec_hdr_exp_time_ratio,
7296                 CAM_INTF_META_DEV_CAM_AEC_HDR_EXP_TIME_RATIO, metadata) {
7297             float fwk_DevCamDebug_aec_hdr_exp_time_ratio = *DevCamDebug_aec_hdr_exp_time_ratio;
7298             camMetadata.update(DEVCAMDEBUG_AEC_HDR_EXP_TIME_RATIO,
7299                                &fwk_DevCamDebug_aec_hdr_exp_time_ratio, 1);
7300         }
7301         // DevCamDebug metadata translateFromHalMetadata ADRC
7302         IF_META_AVAILABLE(float, DevCamDebug_aec_total_drc_gain,
7303                 CAM_INTF_META_DEV_CAM_AEC_TOTAL_DRC_GAIN, metadata) {
7304             float fwk_DevCamDebug_aec_total_drc_gain = *DevCamDebug_aec_total_drc_gain;
7305             camMetadata.update(DEVCAMDEBUG_AEC_TOTAL_DRC_GAIN,
7306                                &fwk_DevCamDebug_aec_total_drc_gain, 1);
7307         }
7308         IF_META_AVAILABLE(float, DevCamDebug_aec_color_drc_gain,
7309                 CAM_INTF_META_DEV_CAM_AEC_COLOR_DRC_GAIN, metadata) {
7310             float fwk_DevCamDebug_aec_color_drc_gain = *DevCamDebug_aec_color_drc_gain;
7311             camMetadata.update(DEVCAMDEBUG_AEC_COLOR_DRC_GAIN,
7312                                &fwk_DevCamDebug_aec_color_drc_gain, 1);
7313         }
7314         IF_META_AVAILABLE(float, DevCamDebug_aec_gtm_ratio,
7315                 CAM_INTF_META_DEV_CAM_AEC_GTM_RATIO, metadata) {
7316             float fwk_DevCamDebug_aec_gtm_ratio = *DevCamDebug_aec_gtm_ratio;
7317             camMetadata.update(DEVCAMDEBUG_AEC_GTM_RATIO, &fwk_DevCamDebug_aec_gtm_ratio, 1);
7318         }
7319         IF_META_AVAILABLE(float, DevCamDebug_aec_ltm_ratio,
7320                 CAM_INTF_META_DEV_CAM_AEC_LTM_RATIO, metadata) {
7321             float fwk_DevCamDebug_aec_ltm_ratio = *DevCamDebug_aec_ltm_ratio;
7322             camMetadata.update(DEVCAMDEBUG_AEC_LTM_RATIO, &fwk_DevCamDebug_aec_ltm_ratio, 1);
7323         }
7324         IF_META_AVAILABLE(float, DevCamDebug_aec_la_ratio,
7325                 CAM_INTF_META_DEV_CAM_AEC_LA_RATIO, metadata) {
7326             float fwk_DevCamDebug_aec_la_ratio = *DevCamDebug_aec_la_ratio;
7327             camMetadata.update(DEVCAMDEBUG_AEC_LA_RATIO, &fwk_DevCamDebug_aec_la_ratio, 1);
7328         }
7329         IF_META_AVAILABLE(float, DevCamDebug_aec_gamma_ratio,
7330                 CAM_INTF_META_DEV_CAM_AEC_GAMMA_RATIO, metadata) {
7331             float fwk_DevCamDebug_aec_gamma_ratio = *DevCamDebug_aec_gamma_ratio;
7332             camMetadata.update(DEVCAMDEBUG_AEC_GAMMA_RATIO, &fwk_DevCamDebug_aec_gamma_ratio, 1);
7333         }
7334         // DevCamDebug metadata translateFromHalMetadata AEC MOTION
7335         IF_META_AVAILABLE(float, DevCamDebug_aec_camera_motion_dx,
7336                 CAM_INTF_META_DEV_CAM_AEC_CAMERA_MOTION_DX, metadata) {
7337             float fwk_DevCamDebug_aec_camera_motion_dx = *DevCamDebug_aec_camera_motion_dx;
7338             camMetadata.update(DEVCAMDEBUG_AEC_CAMERA_MOTION_DX,
7339                                &fwk_DevCamDebug_aec_camera_motion_dx, 1);
7340         }
7341         IF_META_AVAILABLE(float, DevCamDebug_aec_camera_motion_dy,
7342                 CAM_INTF_META_DEV_CAM_AEC_CAMERA_MOTION_DY, metadata) {
7343             float fwk_DevCamDebug_aec_camera_motion_dy = *DevCamDebug_aec_camera_motion_dy;
7344             camMetadata.update(DEVCAMDEBUG_AEC_CAMERA_MOTION_DY,
7345                                &fwk_DevCamDebug_aec_camera_motion_dy, 1);
7346         }
7347         IF_META_AVAILABLE(float, DevCamDebug_aec_subject_motion,
7348                 CAM_INTF_META_DEV_CAM_AEC_SUBJECT_MOTION, metadata) {
7349             float fwk_DevCamDebug_aec_subject_motion = *DevCamDebug_aec_subject_motion;
7350             camMetadata.update(DEVCAMDEBUG_AEC_SUBJECT_MOTION,
7351                                &fwk_DevCamDebug_aec_subject_motion, 1);
7352         }
7353         // DevCamDebug metadata translateFromHalMetadata AWB
7354         IF_META_AVAILABLE(float, DevCamDebug_awb_r_gain,
7355                 CAM_INTF_META_DEV_CAM_AWB_R_GAIN, metadata) {
7356             float fwk_DevCamDebug_awb_r_gain = *DevCamDebug_awb_r_gain;
7357             camMetadata.update(DEVCAMDEBUG_AWB_R_GAIN, &fwk_DevCamDebug_awb_r_gain, 1);
7358         }
7359         IF_META_AVAILABLE(float, DevCamDebug_awb_g_gain,
7360                 CAM_INTF_META_DEV_CAM_AWB_G_GAIN, metadata) {
7361             float fwk_DevCamDebug_awb_g_gain = *DevCamDebug_awb_g_gain;
7362             camMetadata.update(DEVCAMDEBUG_AWB_G_GAIN, &fwk_DevCamDebug_awb_g_gain, 1);
7363         }
7364         IF_META_AVAILABLE(float, DevCamDebug_awb_b_gain,
7365                 CAM_INTF_META_DEV_CAM_AWB_B_GAIN, metadata) {
7366             float fwk_DevCamDebug_awb_b_gain = *DevCamDebug_awb_b_gain;
7367             camMetadata.update(DEVCAMDEBUG_AWB_B_GAIN, &fwk_DevCamDebug_awb_b_gain, 1);
7368         }
7369         IF_META_AVAILABLE(int32_t, DevCamDebug_awb_cct,
7370                 CAM_INTF_META_DEV_CAM_AWB_CCT, metadata) {
7371             int32_t fwk_DevCamDebug_awb_cct = *DevCamDebug_awb_cct;
7372             camMetadata.update(DEVCAMDEBUG_AWB_CCT, &fwk_DevCamDebug_awb_cct, 1);
7373         }
7374         IF_META_AVAILABLE(int32_t, DevCamDebug_awb_decision,
7375                 CAM_INTF_META_DEV_CAM_AWB_DECISION, metadata) {
7376             int32_t fwk_DevCamDebug_awb_decision = *DevCamDebug_awb_decision;
7377             camMetadata.update(DEVCAMDEBUG_AWB_DECISION, &fwk_DevCamDebug_awb_decision, 1);
7378         }
7379     }
7380     // atrace_end(ATRACE_TAG_ALWAYS);
7381 
7382     IF_META_AVAILABLE(uint32_t, frame_number, CAM_INTF_META_FRAME_NUMBER, metadata) {
7383         int64_t fwk_frame_number = *frame_number;
7384         camMetadata.update(ANDROID_SYNC_FRAME_NUMBER, &fwk_frame_number, 1);
7385     }
7386 
7387     IF_META_AVAILABLE(cam_fps_range_t, float_range, CAM_INTF_PARM_FPS_RANGE, metadata) {
7388         int32_t fps_range[2];
7389         fps_range[0] = (int32_t)float_range->min_fps;
7390         fps_range[1] = (int32_t)float_range->max_fps;
7391         camMetadata.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
7392                                       fps_range, 2);
7393         LOGD("urgent Metadata : ANDROID_CONTROL_AE_TARGET_FPS_RANGE [%d, %d]",
7394              fps_range[0], fps_range[1]);
7395     }
7396 
7397     IF_META_AVAILABLE(int32_t, expCompensation, CAM_INTF_PARM_EXPOSURE_COMPENSATION, metadata) {
7398         camMetadata.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, expCompensation, 1);
7399     }
7400 
7401     IF_META_AVAILABLE(uint32_t, sceneMode, CAM_INTF_PARM_BESTSHOT_MODE, metadata) {
7402         int val = (uint8_t)lookupFwkName(SCENE_MODES_MAP,
7403                 METADATA_MAP_SIZE(SCENE_MODES_MAP),
7404                 *sceneMode);
7405         if (NAME_NOT_FOUND != val) {
7406             uint8_t fwkSceneMode = (uint8_t)val;
7407             camMetadata.update(ANDROID_CONTROL_SCENE_MODE, &fwkSceneMode, 1);
7408             LOGD("urgent Metadata : ANDROID_CONTROL_SCENE_MODE: %d",
7409                      fwkSceneMode);
7410         }
7411     }
7412 
7413     IF_META_AVAILABLE(uint32_t, ae_lock, CAM_INTF_PARM_AEC_LOCK, metadata) {
7414         uint8_t fwk_ae_lock = (uint8_t) *ae_lock;
7415         camMetadata.update(ANDROID_CONTROL_AE_LOCK, &fwk_ae_lock, 1);
7416     }
7417 
7418     IF_META_AVAILABLE(uint32_t, awb_lock, CAM_INTF_PARM_AWB_LOCK, metadata) {
7419         uint8_t fwk_awb_lock = (uint8_t) *awb_lock;
7420         camMetadata.update(ANDROID_CONTROL_AWB_LOCK, &fwk_awb_lock, 1);
7421     }
7422 
7423     IF_META_AVAILABLE(uint32_t, color_correct_mode, CAM_INTF_META_COLOR_CORRECT_MODE, metadata) {
7424         uint8_t fwk_color_correct_mode = (uint8_t) *color_correct_mode;
7425         camMetadata.update(ANDROID_COLOR_CORRECTION_MODE, &fwk_color_correct_mode, 1);
7426     }
7427 
7428     IF_META_AVAILABLE(cam_edge_application_t, edgeApplication,
7429             CAM_INTF_META_EDGE_MODE, metadata) {
7430         camMetadata.update(ANDROID_EDGE_MODE, &(edgeApplication->edge_mode), 1);
7431     }
7432 
7433     IF_META_AVAILABLE(uint32_t, flashPower, CAM_INTF_META_FLASH_POWER, metadata) {
7434         uint8_t fwk_flashPower = (uint8_t) *flashPower;
7435         camMetadata.update(ANDROID_FLASH_FIRING_POWER, &fwk_flashPower, 1);
7436     }
7437 
7438     IF_META_AVAILABLE(int64_t, flashFiringTime, CAM_INTF_META_FLASH_FIRING_TIME, metadata) {
7439         camMetadata.update(ANDROID_FLASH_FIRING_TIME, flashFiringTime, 1);
7440     }
7441 
7442     IF_META_AVAILABLE(int32_t, flashState, CAM_INTF_META_FLASH_STATE, metadata) {
7443         if (0 <= *flashState) {
7444             uint8_t fwk_flashState = (uint8_t) *flashState;
7445             if (!gCamCapability[mCameraId]->flash_available) {
7446                 fwk_flashState = ANDROID_FLASH_STATE_UNAVAILABLE;
7447             }
7448             camMetadata.update(ANDROID_FLASH_STATE, &fwk_flashState, 1);
7449         }
7450     }
7451 
7452     IF_META_AVAILABLE(uint32_t, flashMode, CAM_INTF_META_FLASH_MODE, metadata) {
7453         int val = lookupFwkName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP), *flashMode);
7454         if (NAME_NOT_FOUND != val) {
7455             uint8_t fwk_flashMode = (uint8_t)val;
7456             camMetadata.update(ANDROID_FLASH_MODE, &fwk_flashMode, 1);
7457         }
7458     }
7459 
7460     IF_META_AVAILABLE(uint32_t, hotPixelMode, CAM_INTF_META_HOTPIXEL_MODE, metadata) {
7461         uint8_t fwk_hotPixelMode = (uint8_t) *hotPixelMode;
7462         camMetadata.update(ANDROID_HOT_PIXEL_MODE, &fwk_hotPixelMode, 1);
7463     }
7464 
7465     IF_META_AVAILABLE(float, lensAperture, CAM_INTF_META_LENS_APERTURE, metadata) {
7466         camMetadata.update(ANDROID_LENS_APERTURE , lensAperture, 1);
7467     }
7468 
7469     IF_META_AVAILABLE(float, filterDensity, CAM_INTF_META_LENS_FILTERDENSITY, metadata) {
7470         camMetadata.update(ANDROID_LENS_FILTER_DENSITY , filterDensity, 1);
7471     }
7472 
7473     IF_META_AVAILABLE(float, focalLength, CAM_INTF_META_LENS_FOCAL_LENGTH, metadata) {
7474         camMetadata.update(ANDROID_LENS_FOCAL_LENGTH, focalLength, 1);
7475     }
7476 
7477     IF_META_AVAILABLE(uint32_t, opticalStab, CAM_INTF_META_LENS_OPT_STAB_MODE, metadata) {
7478         uint8_t fwk_opticalStab = (uint8_t) *opticalStab;
7479         camMetadata.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &fwk_opticalStab, 1);
7480     }
7481 
7482     IF_META_AVAILABLE(uint32_t, videoStab, CAM_INTF_META_VIDEO_STAB_MODE, metadata) {
7483         uint8_t fwk_videoStab = (uint8_t) *videoStab;
7484         LOGD("fwk_videoStab = %d", fwk_videoStab);
7485         camMetadata.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &fwk_videoStab, 1);
7486     } else {
7487         // Regardless of Video stab supports or not, CTS is expecting the EIS result to be non NULL
7488         // and so hardcoding the Video Stab result to OFF mode.
7489         uint8_t fwkVideoStabMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
7490         camMetadata.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &fwkVideoStabMode, 1);
7491         LOGD("EIS result default to OFF mode");
7492     }
7493 
7494     IF_META_AVAILABLE(uint32_t, noiseRedMode, CAM_INTF_META_NOISE_REDUCTION_MODE, metadata) {
7495         uint8_t fwk_noiseRedMode = (uint8_t) *noiseRedMode;
7496         camMetadata.update(ANDROID_NOISE_REDUCTION_MODE, &fwk_noiseRedMode, 1);
7497     }
7498 
7499     IF_META_AVAILABLE(float, effectiveExposureFactor, CAM_INTF_META_EFFECTIVE_EXPOSURE_FACTOR, metadata) {
7500         camMetadata.update(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR, effectiveExposureFactor, 1);
7501     }
7502 
7503     IF_META_AVAILABLE(cam_black_level_metadata_t, blackLevelAppliedPattern,
7504         CAM_INTF_META_BLACK_LEVEL_APPLIED_PATTERN, metadata) {
7505         float fwk_blackLevelInd[BLACK_LEVEL_PATTERN_CNT];
7506 
7507         adjustBlackLevelForCFA(blackLevelAppliedPattern->cam_black_level, fwk_blackLevelInd,
7508               gCamCapability[mCameraId]->color_arrangement);
7509 
7510         LOGD("applied dynamicblackLevel in RGGB order = %f %f %f %f",
7511           blackLevelAppliedPattern->cam_black_level[0],
7512           blackLevelAppliedPattern->cam_black_level[1],
7513           blackLevelAppliedPattern->cam_black_level[2],
7514           blackLevelAppliedPattern->cam_black_level[3]);
7515         camMetadata.update(QCAMERA3_SENSOR_DYNAMIC_BLACK_LEVEL_PATTERN, fwk_blackLevelInd,
7516                 BLACK_LEVEL_PATTERN_CNT);
7517 
7518 #ifndef USE_HAL_3_3
7519         // Update the ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL
7520         // Need convert the internal 14 bit depth to sensor 10 bit sensor raw
7521         // depth space.
7522         fwk_blackLevelInd[0] /= 16.0;
7523         fwk_blackLevelInd[1] /= 16.0;
7524         fwk_blackLevelInd[2] /= 16.0;
7525         fwk_blackLevelInd[3] /= 16.0;
7526         camMetadata.update(ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL, fwk_blackLevelInd,
7527                 BLACK_LEVEL_PATTERN_CNT);
7528 #endif
7529     }
7530 
7531 #ifndef USE_HAL_3_3
7532     // Fixed whitelevel is used by ISP/Sensor
7533     camMetadata.update(ANDROID_SENSOR_DYNAMIC_WHITE_LEVEL,
7534             &gCamCapability[mCameraId]->white_level, 1);
7535 #endif
7536 
7537     IF_META_AVAILABLE(cam_eis_crop_info_t, eisCropInfo,
7538             CAM_INTF_META_EIS_CROP_INFO, metadata) {
7539         mLastEISCropInfo = *eisCropInfo;
7540 
7541         mCropRegionMapper.toActiveArray(mLastEISCropInfo.delta_x, mLastEISCropInfo.delta_y,
7542                 mLastEISCropInfo.delta_width, mLastEISCropInfo.delta_height);
7543     }
7544 
7545     IF_META_AVAILABLE(cam_crop_region_t, hScalerCropRegion,
7546             CAM_INTF_META_SCALER_CROP_REGION, metadata) {
7547         int32_t scalerCropRegion[4];
7548         scalerCropRegion[0] = hScalerCropRegion->left;
7549         scalerCropRegion[1] = hScalerCropRegion->top;
7550         scalerCropRegion[2] = hScalerCropRegion->width;
7551         scalerCropRegion[3] = hScalerCropRegion->height;
7552 
7553         // Adjust crop region from sensor output coordinate system to active
7554         // array coordinate system.
7555         mCropRegionMapper.toActiveArray(scalerCropRegion[0], scalerCropRegion[1],
7556                 scalerCropRegion[2], scalerCropRegion[3]);
7557 
7558         camMetadata.update(ANDROID_SCALER_CROP_REGION, scalerCropRegion, 4);
7559     }
7560 
7561     IF_META_AVAILABLE(int64_t, sensorExpTime, CAM_INTF_META_SENSOR_EXPOSURE_TIME, metadata) {
7562         LOGD("sensorExpTime = %lld", *sensorExpTime);
7563         camMetadata.update(ANDROID_SENSOR_EXPOSURE_TIME , sensorExpTime, 1);
7564     }
7565 
7566     IF_META_AVAILABLE(float, expTimeBoost, CAM_INTF_META_EXP_TIME_BOOST, metadata) {
7567         LOGD("expTimeBoost = %f", *expTimeBoost);
7568         camMetadata.update(NEXUS_EXPERIMENTAL_2017_EXP_TIME_BOOST, expTimeBoost, 1);
7569     }
7570 
7571     IF_META_AVAILABLE(int64_t, sensorFameDuration,
7572             CAM_INTF_META_SENSOR_FRAME_DURATION, metadata) {
7573         LOGD("sensorFameDuration = %lld", *sensorFameDuration);
7574         camMetadata.update(ANDROID_SENSOR_FRAME_DURATION, sensorFameDuration, 1);
7575     }
7576 
7577     IF_META_AVAILABLE(int64_t, sensorRollingShutterSkew,
7578             CAM_INTF_META_SENSOR_ROLLING_SHUTTER_SKEW, metadata) {
7579         LOGD("sensorRollingShutterSkew = %lld", *sensorRollingShutterSkew);
7580         camMetadata.update(ANDROID_SENSOR_ROLLING_SHUTTER_SKEW,
7581                 sensorRollingShutterSkew, 1);
7582     }
7583 
7584     IF_META_AVAILABLE(int32_t, sensorSensitivity, CAM_INTF_META_SENSOR_SENSITIVITY, metadata) {
7585         LOGD("sensorSensitivity = %d", *sensorSensitivity);
7586         camMetadata.update(ANDROID_SENSOR_SENSITIVITY, sensorSensitivity, 1);
7587 
7588         //calculate the noise profile based on sensitivity
7589         double noise_profile_S = computeNoiseModelEntryS(*sensorSensitivity);
7590         double noise_profile_O = computeNoiseModelEntryO(*sensorSensitivity);
7591         double noise_profile[2 * gCamCapability[mCameraId]->num_color_channels];
7592         for (int i = 0; i < 2 * gCamCapability[mCameraId]->num_color_channels; i += 2) {
7593             noise_profile[i]   = noise_profile_S;
7594             noise_profile[i+1] = noise_profile_O;
7595         }
7596         LOGD("noise model entry (S, O) is (%f, %f)",
7597                 noise_profile_S, noise_profile_O);
7598         camMetadata.update(ANDROID_SENSOR_NOISE_PROFILE, noise_profile,
7599                 (size_t) (2 * gCamCapability[mCameraId]->num_color_channels));
7600     }
7601 
7602 #ifndef USE_HAL_3_3
7603     int32_t fwk_ispSensitivity = 100;
7604     IF_META_AVAILABLE(int32_t, ispSensitivity, CAM_INTF_META_ISP_SENSITIVITY, metadata) {
7605         fwk_ispSensitivity = (int32_t) *ispSensitivity;
7606     }
7607     IF_META_AVAILABLE(float, postStatsSensitivity, CAM_INTF_META_ISP_POST_STATS_SENSITIVITY, metadata) {
7608         fwk_ispSensitivity = (int32_t) (*postStatsSensitivity * fwk_ispSensitivity);
7609     }
7610     camMetadata.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST, &fwk_ispSensitivity, 1);
7611 #endif
7612 
7613     IF_META_AVAILABLE(uint32_t, shadingMode, CAM_INTF_META_SHADING_MODE, metadata) {
7614         uint8_t fwk_shadingMode = (uint8_t) *shadingMode;
7615         camMetadata.update(ANDROID_SHADING_MODE, &fwk_shadingMode, 1);
7616     }
7617 
7618     IF_META_AVAILABLE(uint32_t, faceDetectMode, CAM_INTF_META_STATS_FACEDETECT_MODE, metadata) {
7619         int val = lookupFwkName(FACEDETECT_MODES_MAP, METADATA_MAP_SIZE(FACEDETECT_MODES_MAP),
7620                 *faceDetectMode);
7621         if (NAME_NOT_FOUND != val) {
7622             uint8_t fwk_faceDetectMode = (uint8_t)val;
7623             camMetadata.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &fwk_faceDetectMode, 1);
7624 
7625             if (fwk_faceDetectMode != ANDROID_STATISTICS_FACE_DETECT_MODE_OFF) {
7626                 IF_META_AVAILABLE(cam_face_detection_data_t, faceDetectionInfo,
7627                         CAM_INTF_META_FACE_DETECTION, metadata) {
7628                     uint8_t numFaces = MIN(
7629                             faceDetectionInfo->num_faces_detected, MAX_ROI);
7630                     int32_t faceIds[MAX_ROI];
7631                     uint8_t faceScores[MAX_ROI];
7632                     int32_t faceRectangles[MAX_ROI * 4];
7633                     int32_t faceLandmarks[MAX_ROI * 6];
7634                     size_t j = 0, k = 0;
7635 
7636                     for (size_t i = 0; i < numFaces; i++) {
7637                         faceScores[i] = (uint8_t)faceDetectionInfo->faces[i].score;
7638                         // Adjust crop region from sensor output coordinate system to active
7639                         // array coordinate system.
7640                         cam_rect_t rect = faceDetectionInfo->faces[i].face_boundary;
7641                         mCropRegionMapper.toActiveArray(rect.left, rect.top,
7642                                 rect.width, rect.height);
7643 
7644                         convertToRegions(rect, faceRectangles+j, -1);
7645 
7646                         LOGL("FD_DEBUG : Frame[%d] Face[%d] : top-left (%d, %d), "
7647                                 "bottom-right (%d, %d)",
7648                                 faceDetectionInfo->frame_id, i,
7649                                 faceRectangles[j + FACE_LEFT], faceRectangles[j + FACE_TOP],
7650                                 faceRectangles[j + FACE_RIGHT], faceRectangles[j + FACE_BOTTOM]);
7651 
7652                         j+= 4;
7653                     }
7654                     if (numFaces <= 0) {
7655                         memset(faceIds, 0, sizeof(int32_t) * MAX_ROI);
7656                         memset(faceScores, 0, sizeof(uint8_t) * MAX_ROI);
7657                         memset(faceRectangles, 0, sizeof(int32_t) * MAX_ROI * 4);
7658                         memset(faceLandmarks, 0, sizeof(int32_t) * MAX_ROI * 6);
7659                     }
7660 
7661                     camMetadata.update(ANDROID_STATISTICS_FACE_SCORES, faceScores,
7662                             numFaces);
7663                     camMetadata.update(ANDROID_STATISTICS_FACE_RECTANGLES,
7664                             faceRectangles, numFaces * 4U);
7665                     if (fwk_faceDetectMode ==
7666                             ANDROID_STATISTICS_FACE_DETECT_MODE_FULL) {
7667                         IF_META_AVAILABLE(cam_face_landmarks_data_t, landmarks,
7668                                 CAM_INTF_META_FACE_LANDMARK, metadata) {
7669 
7670                             for (size_t i = 0; i < numFaces; i++) {
7671                                 cam_face_landmarks_info_t face_landmarks = landmarks->face_landmarks[i];
7672                                 // Map the co-ordinate sensor output coordinate system to active
7673                                 // array coordinate system.
7674                                 mCropRegionMapper.toActiveArray(
7675                                         face_landmarks.left_eye_center.x,
7676                                         face_landmarks.left_eye_center.y);
7677                                 mCropRegionMapper.toActiveArray(
7678                                         face_landmarks.right_eye_center.x,
7679                                         face_landmarks.right_eye_center.y);
7680                                 mCropRegionMapper.toActiveArray(
7681                                         face_landmarks.mouth_center.x,
7682                                         face_landmarks.mouth_center.y);
7683 
7684                                 convertLandmarks(face_landmarks, faceLandmarks+k);
7685 
7686                                 LOGL("FD_DEBUG LANDMARK : Frame[%d] Face[%d] : "
7687                                         "left-eye (%d, %d), right-eye (%d, %d), mouth (%d, %d)",
7688                                         faceDetectionInfo->frame_id, i,
7689                                         faceLandmarks[k + LEFT_EYE_X],
7690                                         faceLandmarks[k + LEFT_EYE_Y],
7691                                         faceLandmarks[k + RIGHT_EYE_X],
7692                                         faceLandmarks[k + RIGHT_EYE_Y],
7693                                         faceLandmarks[k + MOUTH_X],
7694                                         faceLandmarks[k + MOUTH_Y]);
7695 
7696                                 k+= TOTAL_LANDMARK_INDICES;
7697                             }
7698                         } else {
7699                             for (size_t i = 0; i < numFaces; i++) {
7700                                 setInvalidLandmarks(faceLandmarks+k);
7701                                 k+= TOTAL_LANDMARK_INDICES;
7702                             }
7703                         }
7704 
7705                         for (size_t i = 0; i < numFaces; i++) {
7706                             faceIds[i] = faceDetectionInfo->faces[i].face_id;
7707 
7708                             LOGL("FD_DEBUG LANDMARK : Frame[%d] : Face[%d] : faceIds=%d",
7709                                     faceDetectionInfo->frame_id, i, faceIds[i]);
7710                         }
7711 
7712                         camMetadata.update(ANDROID_STATISTICS_FACE_IDS, faceIds, numFaces);
7713                         camMetadata.update(ANDROID_STATISTICS_FACE_LANDMARKS,
7714                                 faceLandmarks, numFaces * 6U);
7715                     }
7716                     IF_META_AVAILABLE(cam_face_blink_data_t, blinks,
7717                             CAM_INTF_META_FACE_BLINK, metadata) {
7718                         uint8_t detected[MAX_ROI];
7719                         uint8_t degree[MAX_ROI * 2];
7720                         for (size_t i = 0; i < numFaces; i++) {
7721                             detected[i] = blinks->blink[i].blink_detected;
7722                             degree[2 * i] = blinks->blink[i].left_blink;
7723                             degree[2 * i + 1] = blinks->blink[i].right_blink;
7724 
7725                             LOGL("FD_DEBUG LANDMARK : Frame[%d] : Face[%d] : "
7726                                     "blink_detected=%d, leye_blink=%d, reye_blink=%d",
7727                                     faceDetectionInfo->frame_id, i, detected[i], degree[2 * i],
7728                                     degree[2 * i + 1]);
7729                         }
7730                         camMetadata.update(QCAMERA3_STATS_BLINK_DETECTED,
7731                                 detected, numFaces);
7732                         camMetadata.update(QCAMERA3_STATS_BLINK_DEGREE,
7733                                 degree, numFaces * 2);
7734                     }
7735                     IF_META_AVAILABLE(cam_face_smile_data_t, smiles,
7736                             CAM_INTF_META_FACE_SMILE, metadata) {
7737                         uint8_t degree[MAX_ROI];
7738                         uint8_t confidence[MAX_ROI];
7739                         for (size_t i = 0; i < numFaces; i++) {
7740                             degree[i] = smiles->smile[i].smile_degree;
7741                             confidence[i] = smiles->smile[i].smile_confidence;
7742 
7743                             LOGL("FD_DEBUG LANDMARK : Frame[%d] : Face[%d] : "
7744                                     "smile_degree=%d, smile_score=%d",
7745                                     faceDetectionInfo->frame_id, i, degree[i], confidence[i]);
7746                         }
7747                         camMetadata.update(QCAMERA3_STATS_SMILE_DEGREE,
7748                                 degree, numFaces);
7749                         camMetadata.update(QCAMERA3_STATS_SMILE_CONFIDENCE,
7750                                 confidence, numFaces);
7751                     }
7752                     IF_META_AVAILABLE(cam_face_gaze_data_t, gazes,
7753                             CAM_INTF_META_FACE_GAZE, metadata) {
7754                         int8_t angle[MAX_ROI];
7755                         int32_t direction[MAX_ROI * 3];
7756                         int8_t degree[MAX_ROI * 2];
7757                         for (size_t i = 0; i < numFaces; i++) {
7758                             angle[i] = gazes->gaze[i].gaze_angle;
7759                             direction[3 * i] = gazes->gaze[i].updown_dir;
7760                             direction[3 * i + 1] = gazes->gaze[i].leftright_dir;
7761                             direction[3 * i + 2] = gazes->gaze[i].roll_dir;
7762                             degree[2 * i] = gazes->gaze[i].left_right_gaze;
7763                             degree[2 * i + 1] = gazes->gaze[i].top_bottom_gaze;
7764 
7765                             LOGL("FD_DEBUG LANDMARK : Frame[%d] : Face[%d] : gaze_angle=%d, "
7766                                     "updown_dir=%d, leftright_dir=%d,, roll_dir=%d, "
7767                                     "left_right_gaze=%d, top_bottom_gaze=%d",
7768                                     faceDetectionInfo->frame_id, i, angle[i],
7769                                     direction[3 * i], direction[3 * i + 1],
7770                                     direction[3 * i + 2],
7771                                     degree[2 * i], degree[2 * i + 1]);
7772                         }
7773                         camMetadata.update(QCAMERA3_STATS_GAZE_ANGLE,
7774                                 (uint8_t *)angle, numFaces);
7775                         camMetadata.update(QCAMERA3_STATS_GAZE_DIRECTION,
7776                                 direction, numFaces * 3);
7777                         camMetadata.update(QCAMERA3_STATS_GAZE_DEGREE,
7778                                 (uint8_t *)degree, numFaces * 2);
7779                     }
7780                 }
7781             }
7782         }
7783     }
7784 
7785     IF_META_AVAILABLE(uint32_t, histogramMode, CAM_INTF_META_STATS_HISTOGRAM_MODE, metadata) {
7786         uint8_t fwk_histogramMode = (uint8_t) *histogramMode;
7787         int32_t histogramBins = 0;
7788         camMetadata.update(QCAMERA3_HISTOGRAM_MODE, &fwk_histogramMode, 1);
7789         camMetadata.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE, &fwk_histogramMode, 1);
7790 
7791         IF_META_AVAILABLE(int32_t, histBins, CAM_INTF_META_STATS_HISTOGRAM_BINS, metadata) {
7792             histogramBins = *histBins;
7793             camMetadata.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS, &histogramBins, 1);
7794         }
7795 
7796         if (fwk_histogramMode == QCAMERA3_HISTOGRAM_MODE_ON && histogramBins > 0) {
7797             IF_META_AVAILABLE(cam_hist_stats_t, stats_data, CAM_INTF_META_HISTOGRAM, metadata) {
7798                 // process histogram statistics info
7799                 int32_t* histogramData = NULL;
7800 
7801                 switch (stats_data->type) {
7802                 case CAM_HISTOGRAM_TYPE_BAYER:
7803                     switch (stats_data->bayer_stats.data_type) {
7804                         case CAM_STATS_CHANNEL_GR:
7805                           histogramData = (int32_t *)stats_data->bayer_stats.gr_stats.hist_buf;
7806                           break;
7807                         case CAM_STATS_CHANNEL_GB:
7808                           histogramData = (int32_t *)stats_data->bayer_stats.gb_stats.hist_buf;
7809                           break;
7810                         case CAM_STATS_CHANNEL_B:
7811                           histogramData = (int32_t *)stats_data->bayer_stats.b_stats.hist_buf;
7812                           break;
7813                         case CAM_STATS_CHANNEL_Y:
7814                         case CAM_STATS_CHANNEL_ALL:
7815                         case CAM_STATS_CHANNEL_R:
7816                         default:
7817                           histogramData = (int32_t *)stats_data->bayer_stats.r_stats.hist_buf;
7818                           break;
7819                     }
7820                     break;
7821                 case CAM_HISTOGRAM_TYPE_YUV:
7822                     histogramData = (int32_t *)stats_data->yuv_stats.hist_buf;
7823                     break;
7824                 }
7825 
7826                 camMetadata.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM, histogramData, histogramBins);
7827             }
7828         }
7829     }
7830 
7831     IF_META_AVAILABLE(uint32_t, sharpnessMapMode,
7832             CAM_INTF_META_STATS_SHARPNESS_MAP_MODE, metadata) {
7833         uint8_t fwk_sharpnessMapMode = (uint8_t) *sharpnessMapMode;
7834         camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &fwk_sharpnessMapMode, 1);
7835     }
7836 
7837     IF_META_AVAILABLE(cam_sharpness_map_t, sharpnessMap,
7838             CAM_INTF_META_STATS_SHARPNESS_MAP, metadata) {
7839         camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP, (int32_t *)sharpnessMap->sharpness,
7840                 CAM_MAX_MAP_WIDTH * CAM_MAX_MAP_HEIGHT * 3);
7841     }
7842 
7843     IF_META_AVAILABLE(cam_lens_shading_map_t, lensShadingMap,
7844             CAM_INTF_META_LENS_SHADING_MAP, metadata) {
7845         size_t map_height = MIN((size_t)gCamCapability[mCameraId]->lens_shading_map_size.height,
7846                 CAM_MAX_SHADING_MAP_HEIGHT);
7847         size_t map_width = MIN((size_t)gCamCapability[mCameraId]->lens_shading_map_size.width,
7848                 CAM_MAX_SHADING_MAP_WIDTH);
7849         camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP,
7850                 lensShadingMap->lens_shading, 4U * map_width * map_height);
7851     }
7852 
7853     IF_META_AVAILABLE(uint32_t, toneMapMode, CAM_INTF_META_TONEMAP_MODE, metadata) {
7854         uint8_t fwk_toneMapMode = (uint8_t) *toneMapMode;
7855         camMetadata.update(ANDROID_TONEMAP_MODE, &fwk_toneMapMode, 1);
7856     }
7857 
7858     IF_META_AVAILABLE(cam_rgb_tonemap_curves, tonemap, CAM_INTF_META_TONEMAP_CURVES, metadata) {
7859         //Populate CAM_INTF_META_TONEMAP_CURVES
7860         /* ch0 = G, ch 1 = B, ch 2 = R*/
7861         if (tonemap->tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
7862             LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
7863                      tonemap->tonemap_points_cnt,
7864                     CAM_MAX_TONEMAP_CURVE_SIZE);
7865             tonemap->tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
7866         }
7867 
7868         camMetadata.update(ANDROID_TONEMAP_CURVE_GREEN,
7869                         &tonemap->curves[0].tonemap_points[0][0],
7870                         tonemap->tonemap_points_cnt * 2);
7871 
7872         camMetadata.update(ANDROID_TONEMAP_CURVE_BLUE,
7873                         &tonemap->curves[1].tonemap_points[0][0],
7874                         tonemap->tonemap_points_cnt * 2);
7875 
7876         camMetadata.update(ANDROID_TONEMAP_CURVE_RED,
7877                         &tonemap->curves[2].tonemap_points[0][0],
7878                         tonemap->tonemap_points_cnt * 2);
7879     }
7880 
7881     IF_META_AVAILABLE(cam_color_correct_gains_t, colorCorrectionGains,
7882             CAM_INTF_META_COLOR_CORRECT_GAINS, metadata) {
7883         camMetadata.update(ANDROID_COLOR_CORRECTION_GAINS, colorCorrectionGains->gains,
7884                 CC_GAIN_MAX);
7885     }
7886 
7887     IF_META_AVAILABLE(cam_color_correct_matrix_t, colorCorrectionMatrix,
7888             CAM_INTF_META_COLOR_CORRECT_TRANSFORM, metadata) {
7889         camMetadata.update(ANDROID_COLOR_CORRECTION_TRANSFORM,
7890                 (camera_metadata_rational_t *)(void *)colorCorrectionMatrix->transform_matrix,
7891                 CC_MATRIX_COLS * CC_MATRIX_ROWS);
7892     }
7893 
7894     IF_META_AVAILABLE(cam_profile_tone_curve, toneCurve,
7895             CAM_INTF_META_PROFILE_TONE_CURVE, metadata) {
7896         if (toneCurve->tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
7897             LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
7898                      toneCurve->tonemap_points_cnt,
7899                     CAM_MAX_TONEMAP_CURVE_SIZE);
7900             toneCurve->tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
7901         }
7902         camMetadata.update(ANDROID_SENSOR_PROFILE_TONE_CURVE,
7903                 (float*)toneCurve->curve.tonemap_points,
7904                 toneCurve->tonemap_points_cnt * 2);
7905     }
7906 
7907     IF_META_AVAILABLE(cam_color_correct_gains_t, predColorCorrectionGains,
7908             CAM_INTF_META_PRED_COLOR_CORRECT_GAINS, metadata) {
7909         camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_GAINS,
7910                 predColorCorrectionGains->gains, 4);
7911     }
7912 
7913     IF_META_AVAILABLE(cam_color_correct_matrix_t, predColorCorrectionMatrix,
7914             CAM_INTF_META_PRED_COLOR_CORRECT_TRANSFORM, metadata) {
7915         camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
7916                 (camera_metadata_rational_t *)(void *)predColorCorrectionMatrix->transform_matrix,
7917                 CC_MATRIX_ROWS * CC_MATRIX_COLS);
7918     }
7919 
7920     IF_META_AVAILABLE(float, otpWbGrGb, CAM_INTF_META_OTP_WB_GRGB, metadata) {
7921         camMetadata.update(ANDROID_SENSOR_GREEN_SPLIT, otpWbGrGb, 1);
7922     }
7923 
7924     IF_META_AVAILABLE(uint32_t, blackLevelLock, CAM_INTF_META_BLACK_LEVEL_LOCK, metadata) {
7925         uint8_t fwk_blackLevelLock = (uint8_t) *blackLevelLock;
7926         camMetadata.update(ANDROID_BLACK_LEVEL_LOCK, &fwk_blackLevelLock, 1);
7927     }
7928 
7929     IF_META_AVAILABLE(uint32_t, sceneFlicker, CAM_INTF_META_SCENE_FLICKER, metadata) {
7930         uint8_t fwk_sceneFlicker = (uint8_t) *sceneFlicker;
7931         camMetadata.update(ANDROID_STATISTICS_SCENE_FLICKER, &fwk_sceneFlicker, 1);
7932     }
7933 
7934     IF_META_AVAILABLE(uint32_t, effectMode, CAM_INTF_PARM_EFFECT, metadata) {
7935         int val = lookupFwkName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
7936                 *effectMode);
7937         if (NAME_NOT_FOUND != val) {
7938             uint8_t fwk_effectMode = (uint8_t)val;
7939             camMetadata.update(ANDROID_CONTROL_EFFECT_MODE, &fwk_effectMode, 1);
7940         }
7941     }
7942 
7943     IF_META_AVAILABLE(cam_test_pattern_data_t, testPatternData,
7944             CAM_INTF_META_TEST_PATTERN_DATA, metadata) {
7945         int32_t fwk_testPatternMode = lookupFwkName(TEST_PATTERN_MAP,
7946                 METADATA_MAP_SIZE(TEST_PATTERN_MAP), testPatternData->mode);
7947         if (NAME_NOT_FOUND != fwk_testPatternMode) {
7948             camMetadata.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &fwk_testPatternMode, 1);
7949         }
7950         int32_t fwk_testPatternData[4];
7951         fwk_testPatternData[0] = testPatternData->r;
7952         fwk_testPatternData[3] = testPatternData->b;
7953         switch (gCamCapability[mCameraId]->color_arrangement) {
7954         case CAM_FILTER_ARRANGEMENT_RGGB:
7955         case CAM_FILTER_ARRANGEMENT_GRBG:
7956             fwk_testPatternData[1] = testPatternData->gr;
7957             fwk_testPatternData[2] = testPatternData->gb;
7958             break;
7959         case CAM_FILTER_ARRANGEMENT_GBRG:
7960         case CAM_FILTER_ARRANGEMENT_BGGR:
7961             fwk_testPatternData[2] = testPatternData->gr;
7962             fwk_testPatternData[1] = testPatternData->gb;
7963             break;
7964         default:
7965             LOGE("color arrangement %d is not supported",
7966                 gCamCapability[mCameraId]->color_arrangement);
7967             break;
7968         }
7969         camMetadata.update(ANDROID_SENSOR_TEST_PATTERN_DATA, fwk_testPatternData, 4);
7970     }
7971 
7972     IF_META_AVAILABLE(double, gps_coords, CAM_INTF_META_JPEG_GPS_COORDINATES, metadata) {
7973         camMetadata.update(ANDROID_JPEG_GPS_COORDINATES, gps_coords, 3);
7974     }
7975 
7976     IF_META_AVAILABLE(uint8_t, gps_methods, CAM_INTF_META_JPEG_GPS_PROC_METHODS, metadata) {
7977         String8 str((const char *)gps_methods);
7978         camMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD, str);
7979     }
7980 
7981     IF_META_AVAILABLE(int64_t, gps_timestamp, CAM_INTF_META_JPEG_GPS_TIMESTAMP, metadata) {
7982         camMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP, gps_timestamp, 1);
7983     }
7984 
7985     IF_META_AVAILABLE(int32_t, jpeg_orientation, CAM_INTF_META_JPEG_ORIENTATION, metadata) {
7986         camMetadata.update(ANDROID_JPEG_ORIENTATION, jpeg_orientation, 1);
7987     }
7988 
7989     IF_META_AVAILABLE(uint32_t, jpeg_quality, CAM_INTF_META_JPEG_QUALITY, metadata) {
7990         uint8_t fwk_jpeg_quality = (uint8_t) *jpeg_quality;
7991         camMetadata.update(ANDROID_JPEG_QUALITY, &fwk_jpeg_quality, 1);
7992     }
7993 
7994     IF_META_AVAILABLE(uint32_t, thumb_quality, CAM_INTF_META_JPEG_THUMB_QUALITY, metadata) {
7995         uint8_t fwk_thumb_quality = (uint8_t) *thumb_quality;
7996         camMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY, &fwk_thumb_quality, 1);
7997     }
7998 
7999     IF_META_AVAILABLE(cam_dimension_t, thumb_size, CAM_INTF_META_JPEG_THUMB_SIZE, metadata) {
8000         int32_t fwk_thumb_size[2];
8001         fwk_thumb_size[0] = thumb_size->width;
8002         fwk_thumb_size[1] = thumb_size->height;
8003         camMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, fwk_thumb_size, 2);
8004     }
8005 
8006     // Skip reprocess metadata if there is no input stream.
8007     if (mInputStreamInfo.dim.width > 0 && mInputStreamInfo.dim.height > 0) {
8008         IF_META_AVAILABLE(int32_t, privateData, CAM_INTF_META_PRIVATE_DATA, metadata) {
8009             camMetadata.update(QCAMERA3_PRIVATEDATA_REPROCESS,
8010                     privateData,
8011                     MAX_METADATA_PRIVATE_PAYLOAD_SIZE_IN_BYTES / sizeof(int32_t));
8012         }
8013     }
8014 
8015     IF_META_AVAILABLE(int32_t, meteringMode, CAM_INTF_PARM_AEC_ALGO_TYPE, metadata) {
8016         camMetadata.update(QCAMERA3_EXPOSURE_METER,
8017                 meteringMode, 1);
8018     }
8019 
8020     IF_META_AVAILABLE(cam_asd_hdr_scene_data_t, hdr_scene_data,
8021             CAM_INTF_META_ASD_HDR_SCENE_DATA, metadata) {
8022         LOGD("hdr_scene_data: %d %f\n",
8023                 hdr_scene_data->is_hdr_scene, hdr_scene_data->hdr_confidence);
8024         uint8_t isHdr = hdr_scene_data->is_hdr_scene;
8025         float isHdrConfidence = hdr_scene_data->hdr_confidence;
8026         camMetadata.update(QCAMERA3_STATS_IS_HDR_SCENE,
8027                            &isHdr, 1);
8028         camMetadata.update(QCAMERA3_STATS_IS_HDR_SCENE_CONFIDENCE,
8029                            &isHdrConfidence, 1);
8030     }
8031 
8032 
8033 
8034     if (metadata->is_tuning_params_valid) {
8035         uint8_t tuning_meta_data_blob[sizeof(tuning_params_t)];
8036         uint8_t *data = (uint8_t *)&tuning_meta_data_blob[0];
8037         metadata->tuning_params.tuning_data_version = TUNING_DATA_VERSION;
8038 
8039 
8040         memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_data_version),
8041                 sizeof(uint32_t));
8042         data += sizeof(uint32_t);
8043 
8044         memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_sensor_data_size),
8045                 sizeof(uint32_t));
8046         LOGD("tuning_sensor_data_size %d",(int)(*(int *)data));
8047         data += sizeof(uint32_t);
8048 
8049         memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_vfe_data_size),
8050                 sizeof(uint32_t));
8051         LOGD("tuning_vfe_data_size %d",(int)(*(int *)data));
8052         data += sizeof(uint32_t);
8053 
8054         memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_cpp_data_size),
8055                 sizeof(uint32_t));
8056         LOGD("tuning_cpp_data_size %d",(int)(*(int *)data));
8057         data += sizeof(uint32_t);
8058 
8059         memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_cac_data_size),
8060                 sizeof(uint32_t));
8061         LOGD("tuning_cac_data_size %d",(int)(*(int *)data));
8062         data += sizeof(uint32_t);
8063 
8064         metadata->tuning_params.tuning_mod3_data_size = 0;
8065         memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_mod3_data_size),
8066                 sizeof(uint32_t));
8067         LOGD("tuning_mod3_data_size %d",(int)(*(int *)data));
8068         data += sizeof(uint32_t);
8069 
8070         size_t count = MIN(metadata->tuning_params.tuning_sensor_data_size,
8071                 TUNING_SENSOR_DATA_MAX);
8072         memcpy(data, ((uint8_t *)&metadata->tuning_params.data),
8073                 count);
8074         data += count;
8075 
8076         count = MIN(metadata->tuning_params.tuning_vfe_data_size,
8077                 TUNING_VFE_DATA_MAX);
8078         memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_VFE_DATA_OFFSET]),
8079                 count);
8080         data += count;
8081 
8082         count = MIN(metadata->tuning_params.tuning_cpp_data_size,
8083                 TUNING_CPP_DATA_MAX);
8084         memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_CPP_DATA_OFFSET]),
8085                 count);
8086         data += count;
8087 
8088         count = MIN(metadata->tuning_params.tuning_cac_data_size,
8089                 TUNING_CAC_DATA_MAX);
8090         memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_CAC_DATA_OFFSET]),
8091                 count);
8092         data += count;
8093 
8094         camMetadata.update(QCAMERA3_TUNING_META_DATA_BLOB,
8095                 (int32_t *)(void *)tuning_meta_data_blob,
8096                 (size_t)(data-tuning_meta_data_blob) / sizeof(uint32_t));
8097     }
8098 
8099     IF_META_AVAILABLE(cam_neutral_col_point_t, neuColPoint,
8100             CAM_INTF_META_NEUTRAL_COL_POINT, metadata) {
8101         camMetadata.update(ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
8102                 (camera_metadata_rational_t *)(void *)neuColPoint->neutral_col_point,
8103                 NEUTRAL_COL_POINTS);
8104     }
8105 
8106     IF_META_AVAILABLE(uint32_t, shadingMapMode, CAM_INTF_META_LENS_SHADING_MAP_MODE, metadata) {
8107         uint8_t fwk_shadingMapMode = (uint8_t) *shadingMapMode;
8108         camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &fwk_shadingMapMode, 1);
8109     }
8110 
8111     IF_META_AVAILABLE(cam_area_t, hAeRegions, CAM_INTF_META_AEC_ROI, metadata) {
8112         int32_t aeRegions[REGIONS_TUPLE_COUNT];
8113         // Adjust crop region from sensor output coordinate system to active
8114         // array coordinate system.
8115         cam_rect_t hAeRect = hAeRegions->rect;
8116         mCropRegionMapper.toActiveArray(hAeRect.left, hAeRect.top,
8117                 hAeRect.width, hAeRect.height);
8118 
8119         convertToRegions(hAeRect, aeRegions, hAeRegions->weight);
8120         camMetadata.update(ANDROID_CONTROL_AE_REGIONS, aeRegions,
8121                 REGIONS_TUPLE_COUNT);
8122         LOGD("Metadata : ANDROID_CONTROL_AE_REGIONS: FWK: [%d,%d,%d,%d] HAL: [%d,%d,%d,%d]",
8123                  aeRegions[0], aeRegions[1], aeRegions[2], aeRegions[3],
8124                 hAeRect.left, hAeRect.top, hAeRect.width,
8125                 hAeRect.height);
8126     }
8127 
8128     if (!pendingRequest.focusStateSent) {
8129         if (pendingRequest.focusStateValid) {
8130             camMetadata.update(ANDROID_CONTROL_AF_STATE, &pendingRequest.focusState, 1);
8131             LOGD("Metadata : ANDROID_CONTROL_AF_STATE %u", pendingRequest.focusState);
8132         } else {
8133             IF_META_AVAILABLE(uint32_t, afState, CAM_INTF_META_AF_STATE, metadata) {
8134                 uint8_t fwk_afState = (uint8_t) *afState;
8135                 camMetadata.update(ANDROID_CONTROL_AF_STATE, &fwk_afState, 1);
8136                 LOGD("Metadata : ANDROID_CONTROL_AF_STATE %u", *afState);
8137             }
8138         }
8139     }
8140 
8141     IF_META_AVAILABLE(float, focusDistance, CAM_INTF_META_LENS_FOCUS_DISTANCE, metadata) {
8142         camMetadata.update(ANDROID_LENS_FOCUS_DISTANCE , focusDistance, 1);
8143         mLastFocusDistance = *focusDistance;
8144     } else {
8145         LOGE("Missing LENS_FOCUS_DISTANCE metadata. Use last known distance of %f",
8146                 mLastFocusDistance);
8147         camMetadata.update(ANDROID_LENS_FOCUS_DISTANCE , &mLastFocusDistance, 1);
8148     }
8149 
8150     IF_META_AVAILABLE(float, focusRange, CAM_INTF_META_LENS_FOCUS_RANGE, metadata) {
8151         camMetadata.update(ANDROID_LENS_FOCUS_RANGE , focusRange, 2);
8152     }
8153 
8154     IF_META_AVAILABLE(cam_af_lens_state_t, lensState, CAM_INTF_META_LENS_STATE, metadata) {
8155         uint8_t fwk_lensState = *lensState;
8156         camMetadata.update(ANDROID_LENS_STATE , &fwk_lensState, 1);
8157     }
8158 
8159     IF_META_AVAILABLE(uint32_t, hal_ab_mode, CAM_INTF_PARM_ANTIBANDING, metadata) {
8160         uint32_t ab_mode = *hal_ab_mode;
8161         if (ab_mode == CAM_ANTIBANDING_MODE_AUTO_60HZ ||
8162                 ab_mode == CAM_ANTIBANDING_MODE_AUTO_50HZ) {
8163               ab_mode = CAM_ANTIBANDING_MODE_AUTO;
8164         }
8165         int val = lookupFwkName(ANTIBANDING_MODES_MAP, METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP),
8166                 ab_mode);
8167         if (NAME_NOT_FOUND != val) {
8168             uint8_t fwk_ab_mode = (uint8_t)val;
8169             camMetadata.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &fwk_ab_mode, 1);
8170         }
8171     }
8172 
8173     IF_META_AVAILABLE(uint32_t, bestshotMode, CAM_INTF_PARM_BESTSHOT_MODE, metadata) {
8174         int val = lookupFwkName(SCENE_MODES_MAP,
8175                 METADATA_MAP_SIZE(SCENE_MODES_MAP), *bestshotMode);
8176         if (NAME_NOT_FOUND != val) {
8177             uint8_t fwkBestshotMode = (uint8_t)val;
8178             camMetadata.update(ANDROID_CONTROL_SCENE_MODE, &fwkBestshotMode, 1);
8179             LOGD("Metadata : ANDROID_CONTROL_SCENE_MODE");
8180         } else {
8181             LOGH("Metadata not found : ANDROID_CONTROL_SCENE_MODE");
8182         }
8183     }
8184 
8185     IF_META_AVAILABLE(uint32_t, mode, CAM_INTF_META_MODE, metadata) {
8186          uint8_t fwk_mode = (uint8_t) *mode;
8187          camMetadata.update(ANDROID_CONTROL_MODE, &fwk_mode, 1);
8188     }
8189 
8190     /* Constant metadata values to be update*/
8191 
8192     uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
8193     camMetadata.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
8194 
8195     int32_t hotPixelMap[2];
8196     camMetadata.update(ANDROID_STATISTICS_HOT_PIXEL_MAP, &hotPixelMap[0], 0);
8197 
8198     // CDS
8199     IF_META_AVAILABLE(int32_t, cds, CAM_INTF_PARM_CDS_MODE, metadata) {
8200         camMetadata.update(QCAMERA3_CDS_MODE, cds, 1);
8201     }
8202 
8203     IF_META_AVAILABLE(cam_sensor_hdr_type_t, vhdr, CAM_INTF_PARM_SENSOR_HDR, metadata) {
8204         int32_t fwk_hdr;
8205         int8_t curr_hdr_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) != 0);
8206         if(*vhdr == CAM_SENSOR_HDR_OFF) {
8207             fwk_hdr = QCAMERA3_VIDEO_HDR_MODE_OFF;
8208         } else {
8209             fwk_hdr = QCAMERA3_VIDEO_HDR_MODE_ON;
8210         }
8211 
8212         if(fwk_hdr != curr_hdr_state) {
8213            LOGH("PROFILE_META_HDR_TOGGLED value=%d", fwk_hdr);
8214            if(fwk_hdr)
8215               mCurrFeatureState |= CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR;
8216            else
8217               mCurrFeatureState &= ~CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR;
8218         }
8219         camMetadata.update(QCAMERA3_VIDEO_HDR_MODE, &fwk_hdr, 1);
8220     }
8221 
8222     //binning correction
8223     IF_META_AVAILABLE(cam_binning_correction_mode_t, bin_correction,
8224             CAM_INTF_META_BINNING_CORRECTION_MODE, metadata) {
8225         int32_t fwk_bin_mode = (int32_t) *bin_correction;
8226         camMetadata.update(QCAMERA3_BINNING_CORRECTION_MODE, &fwk_bin_mode, 1);
8227     }
8228 
8229     IF_META_AVAILABLE(cam_ir_mode_type_t, ir, CAM_INTF_META_IR_MODE, metadata) {
8230         int32_t fwk_ir = (int32_t) *ir;
8231         int8_t curr_ir_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_IR ) != 0);
8232         int8_t is_ir_on = 0;
8233 
8234         (fwk_ir > 0) ? (is_ir_on = 1) : (is_ir_on = 0) ;
8235         if(is_ir_on != curr_ir_state) {
8236            LOGH("PROFILE_META_IR_TOGGLED value=%d", fwk_ir);
8237            if(is_ir_on)
8238               mCurrFeatureState |= CAM_QCOM_FEATURE_IR;
8239            else
8240               mCurrFeatureState &= ~CAM_QCOM_FEATURE_IR;
8241         }
8242         camMetadata.update(QCAMERA3_IR_MODE, &fwk_ir, 1);
8243     }
8244 
8245     // AEC SPEED
8246     IF_META_AVAILABLE(float, aec, CAM_INTF_META_AEC_CONVERGENCE_SPEED, metadata) {
8247         camMetadata.update(QCAMERA3_AEC_CONVERGENCE_SPEED, aec, 1);
8248     }
8249 
8250     // AWB SPEED
8251     IF_META_AVAILABLE(float, awb, CAM_INTF_META_AWB_CONVERGENCE_SPEED, metadata) {
8252         camMetadata.update(QCAMERA3_AWB_CONVERGENCE_SPEED, awb, 1);
8253     }
8254 
8255     // TNR
8256     IF_META_AVAILABLE(cam_denoise_param_t, tnr, CAM_INTF_PARM_TEMPORAL_DENOISE, metadata) {
8257         uint8_t tnr_enable       = tnr->denoise_enable;
8258         int32_t tnr_process_type = (int32_t)tnr->process_plates;
8259         int8_t curr_tnr_state = ((mCurrFeatureState & CAM_QTI_FEATURE_SW_TNR) != 0) ;
8260         int8_t is_tnr_on = 0;
8261 
8262         (tnr_enable > 0) ? (is_tnr_on = 1) : (is_tnr_on = 0);
8263         if(is_tnr_on != curr_tnr_state) {
8264            LOGH("PROFILE_META_TNR_TOGGLED value=%d", tnr_enable);
8265            if(is_tnr_on)
8266               mCurrFeatureState |= CAM_QTI_FEATURE_SW_TNR;
8267            else
8268               mCurrFeatureState &= ~CAM_QTI_FEATURE_SW_TNR;
8269         }
8270 
8271         camMetadata.update(QCAMERA3_TEMPORAL_DENOISE_ENABLE, &tnr_enable, 1);
8272         camMetadata.update(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE, &tnr_process_type, 1);
8273     }
8274 
8275     // Reprocess crop data
8276     IF_META_AVAILABLE(cam_crop_data_t, crop_data, CAM_INTF_META_CROP_DATA, metadata) {
8277         uint8_t cnt = crop_data->num_of_streams;
8278         if ( (0 >= cnt) || (cnt > MAX_NUM_STREAMS)) {
8279             // mm-qcamera-daemon only posts crop_data for streams
8280             // not linked to pproc. So no valid crop metadata is not
8281             // necessarily an error case.
8282             LOGD("No valid crop metadata entries");
8283         } else {
8284             uint32_t reproc_stream_id;
8285             if ( NO_ERROR != getReprocessibleOutputStreamId(reproc_stream_id)) {
8286                 LOGD("No reprocessible stream found, ignore crop data");
8287             } else {
8288                 int rc = NO_ERROR;
8289                 Vector<int32_t> roi_map;
8290                 int32_t *crop = new int32_t[cnt*4];
8291                 if (NULL == crop) {
8292                    rc = NO_MEMORY;
8293                 }
8294                 if (NO_ERROR == rc) {
8295                     int32_t streams_found = 0;
8296                     for (size_t i = 0; i < cnt; i++) {
8297                         if (crop_data->crop_info[i].stream_id == reproc_stream_id) {
8298                             if (pprocDone) {
8299                                 // HAL already does internal reprocessing,
8300                                 // either via reprocessing before JPEG encoding,
8301                                 // or offline postprocessing for pproc bypass case.
8302                                 crop[0] = 0;
8303                                 crop[1] = 0;
8304                                 crop[2] = mInputStreamInfo.dim.width;
8305                                 crop[3] = mInputStreamInfo.dim.height;
8306                             } else {
8307                                 crop[0] = crop_data->crop_info[i].crop.left;
8308                                 crop[1] = crop_data->crop_info[i].crop.top;
8309                                 crop[2] = crop_data->crop_info[i].crop.width;
8310                                 crop[3] = crop_data->crop_info[i].crop.height;
8311                             }
8312                             roi_map.add(crop_data->crop_info[i].roi_map.left);
8313                             roi_map.add(crop_data->crop_info[i].roi_map.top);
8314                             roi_map.add(crop_data->crop_info[i].roi_map.width);
8315                             roi_map.add(crop_data->crop_info[i].roi_map.height);
8316                             streams_found++;
8317                             LOGD("Adding reprocess crop data for stream %dx%d, %dx%d",
8318                                     crop[0], crop[1], crop[2], crop[3]);
8319                             LOGD("Adding reprocess crop roi map for stream %dx%d, %dx%d",
8320                                     crop_data->crop_info[i].roi_map.left,
8321                                     crop_data->crop_info[i].roi_map.top,
8322                                     crop_data->crop_info[i].roi_map.width,
8323                                     crop_data->crop_info[i].roi_map.height);
8324                             break;
8325 
8326                        }
8327                     }
8328                     camMetadata.update(QCAMERA3_CROP_COUNT_REPROCESS,
8329                             &streams_found, 1);
8330                     camMetadata.update(QCAMERA3_CROP_REPROCESS,
8331                             crop, (size_t)(streams_found * 4));
8332                     if (roi_map.array()) {
8333                         camMetadata.update(QCAMERA3_CROP_ROI_MAP_REPROCESS,
8334                                 roi_map.array(), roi_map.size());
8335                     }
8336                }
8337                if (crop) {
8338                    delete [] crop;
8339                }
8340             }
8341         }
8342     }
8343 
8344     if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
8345         // Regardless of CAC supports or not, CTS is expecting the CAC result to be non NULL and
8346         // so hardcoding the CAC result to OFF mode.
8347         uint8_t fwkCacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
8348         camMetadata.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &fwkCacMode, 1);
8349     } else {
8350         IF_META_AVAILABLE(cam_aberration_mode_t, cacMode, CAM_INTF_PARM_CAC, metadata) {
8351             int val = lookupFwkName(COLOR_ABERRATION_MAP, METADATA_MAP_SIZE(COLOR_ABERRATION_MAP),
8352                     *cacMode);
8353             if (NAME_NOT_FOUND != val) {
8354                 uint8_t resultCacMode = (uint8_t)val;
8355                 // check whether CAC result from CB is equal to Framework set CAC mode
8356                 // If not equal then set the CAC mode came in corresponding request
8357                 if (pendingRequest.fwkCacMode != resultCacMode) {
8358                     resultCacMode = pendingRequest.fwkCacMode;
8359                 }
8360                 //Check if CAC is disabled by property
8361                 if (m_cacModeDisabled) {
8362                     resultCacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
8363                 }
8364 
8365                 LOGD("fwk_cacMode=%d resultCacMode=%d", pendingRequest.fwkCacMode, resultCacMode);
8366                 camMetadata.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &resultCacMode, 1);
8367             } else {
8368                 LOGE("Invalid CAC camera parameter: %d", *cacMode);
8369             }
8370         }
8371     }
8372 
8373     // Post blob of cam_cds_data through vendor tag.
8374     IF_META_AVAILABLE(cam_cds_data_t, cdsInfo, CAM_INTF_META_CDS_DATA, metadata) {
8375         uint8_t cnt = cdsInfo->num_of_streams;
8376         cam_cds_data_t cdsDataOverride;
8377         memset(&cdsDataOverride, 0, sizeof(cdsDataOverride));
8378         cdsDataOverride.session_cds_enable = cdsInfo->session_cds_enable;
8379         cdsDataOverride.num_of_streams = 1;
8380         if ((0 < cnt) && (cnt <= MAX_NUM_STREAMS)) {
8381             uint32_t reproc_stream_id;
8382             if ( NO_ERROR != getReprocessibleOutputStreamId(reproc_stream_id)) {
8383                 LOGD("No reprocessible stream found, ignore cds data");
8384             } else {
8385                 for (size_t i = 0; i < cnt; i++) {
8386                     if (cdsInfo->cds_info[i].stream_id ==
8387                             reproc_stream_id) {
8388                         cdsDataOverride.cds_info[0].cds_enable =
8389                                 cdsInfo->cds_info[i].cds_enable;
8390                         break;
8391                     }
8392                 }
8393             }
8394         } else {
8395             LOGD("Invalid stream count %d in CDS_DATA", cnt);
8396         }
8397         camMetadata.update(QCAMERA3_CDS_INFO,
8398                 (uint8_t *)&cdsDataOverride,
8399                 sizeof(cam_cds_data_t));
8400     }
8401 
8402     // Ldaf calibration data
8403     if (!mLdafCalibExist) {
8404         IF_META_AVAILABLE(uint32_t, ldafCalib,
8405                 CAM_INTF_META_LDAF_EXIF, metadata) {
8406             mLdafCalibExist = true;
8407             mLdafCalib[0] = ldafCalib[0];
8408             mLdafCalib[1] = ldafCalib[1];
8409             LOGD("ldafCalib[0] is %d, ldafCalib[1] is %d",
8410                     ldafCalib[0], ldafCalib[1]);
8411         }
8412     }
8413 
8414     // EXIF debug data through vendor tag
8415     /*
8416      * Mobicat Mask can assume 3 values:
8417      * 1 refers to Mobicat data,
8418      * 2 refers to Stats Debug and Exif Debug Data
8419      * 3 refers to Mobicat and Stats Debug Data
8420      * We want to make sure that we are sending Exif debug data
8421      * only when Mobicat Mask is 2.
8422      */
8423     if ((mExifParams.debug_params != NULL) && (getMobicatMask() == 2)) {
8424         camMetadata.update(QCAMERA3_HAL_PRIVATEDATA_EXIF_DEBUG_DATA_BLOB,
8425                 (uint8_t *)(void *)mExifParams.debug_params,
8426                 sizeof(mm_jpeg_debug_exif_params_t));
8427     }
8428 
8429     // Reprocess and DDM debug data through vendor tag
8430     cam_reprocess_info_t repro_info;
8431     memset(&repro_info, 0, sizeof(cam_reprocess_info_t));
8432     IF_META_AVAILABLE(cam_stream_crop_info_t, sensorCropInfo,
8433             CAM_INTF_META_SNAP_CROP_INFO_SENSOR, metadata) {
8434         memcpy(&(repro_info.sensor_crop_info), sensorCropInfo, sizeof(cam_stream_crop_info_t));
8435     }
8436     IF_META_AVAILABLE(cam_stream_crop_info_t, camifCropInfo,
8437             CAM_INTF_META_SNAP_CROP_INFO_CAMIF, metadata) {
8438         memcpy(&(repro_info.camif_crop_info), camifCropInfo, sizeof(cam_stream_crop_info_t));
8439     }
8440     IF_META_AVAILABLE(cam_stream_crop_info_t, ispCropInfo,
8441             CAM_INTF_META_SNAP_CROP_INFO_ISP, metadata) {
8442         memcpy(&(repro_info.isp_crop_info), ispCropInfo, sizeof(cam_stream_crop_info_t));
8443     }
8444     IF_META_AVAILABLE(cam_stream_crop_info_t, cppCropInfo,
8445             CAM_INTF_META_SNAP_CROP_INFO_CPP, metadata) {
8446         memcpy(&(repro_info.cpp_crop_info), cppCropInfo, sizeof(cam_stream_crop_info_t));
8447     }
8448     IF_META_AVAILABLE(cam_focal_length_ratio_t, ratio,
8449             CAM_INTF_META_AF_FOCAL_LENGTH_RATIO, metadata) {
8450         memcpy(&(repro_info.af_focal_length_ratio), ratio, sizeof(cam_focal_length_ratio_t));
8451     }
8452     IF_META_AVAILABLE(int32_t, flip, CAM_INTF_PARM_FLIP, metadata) {
8453         memcpy(&(repro_info.pipeline_flip), flip, sizeof(int32_t));
8454     }
8455     IF_META_AVAILABLE(cam_rotation_info_t, rotationInfo,
8456             CAM_INTF_PARM_ROTATION, metadata) {
8457         memcpy(&(repro_info.rotation_info), rotationInfo, sizeof(cam_rotation_info_t));
8458     }
8459     IF_META_AVAILABLE(cam_area_t, afRoi, CAM_INTF_META_AF_ROI, metadata) {
8460         memcpy(&(repro_info.af_roi), afRoi, sizeof(cam_area_t));
8461     }
8462     IF_META_AVAILABLE(cam_dyn_img_data_t, dynMask, CAM_INTF_META_IMG_DYN_FEAT, metadata) {
8463         memcpy(&(repro_info.dyn_mask), dynMask, sizeof(cam_dyn_img_data_t));
8464     }
8465     camMetadata.update(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB,
8466         (uint8_t *)&repro_info, sizeof(cam_reprocess_info_t));
8467 
8468     // INSTANT AEC MODE
8469     IF_META_AVAILABLE(uint8_t, instant_aec_mode,
8470             CAM_INTF_PARM_INSTANT_AEC, metadata) {
8471         camMetadata.update(QCAMERA3_INSTANT_AEC_MODE, instant_aec_mode, 1);
8472     }
8473 
8474     // AF scene change
8475     IF_META_AVAILABLE(uint8_t, afSceneChange, CAM_INTF_META_AF_SCENE_CHANGE, metadata) {
8476         camMetadata.update(NEXUS_EXPERIMENTAL_2016_AF_SCENE_CHANGE, afSceneChange, 1);
8477         camMetadata.update(ANDROID_CONTROL_AF_SCENE_CHANGE, afSceneChange, 1);
8478     } else {
8479         uint8_t noSceneChange = 0;
8480         camMetadata.update(NEXUS_EXPERIMENTAL_2016_AF_SCENE_CHANGE, &noSceneChange, 1);
8481         camMetadata.update(ANDROID_CONTROL_AF_SCENE_CHANGE, &noSceneChange, 1);
8482         LOGE("Missing AF_SCENE_CHANGE metadata!");
8483     }
8484 
8485     // Enable ZSL
8486     if (enableZsl != nullptr) {
8487         uint8_t value = *enableZsl ?
8488                 ANDROID_CONTROL_ENABLE_ZSL_TRUE : ANDROID_CONTROL_ENABLE_ZSL_FALSE;
8489         camMetadata.update(ANDROID_CONTROL_ENABLE_ZSL, &value, 1);
8490     }
8491 
8492     camMetadata.update(ANDROID_STATISTICS_OIS_DATA_MODE, &pendingRequest.requestedOisDataMode, 1);
8493 
8494     // OIS Data
8495     IF_META_AVAILABLE(cam_frame_ois_info_t, frame_ois_data, CAM_INTF_META_FRAME_OIS_DATA, metadata) {
8496         camMetadata.update(NEXUS_EXPERIMENTAL_2017_OIS_FRAME_TIMESTAMP_BOOTTIME,
8497             &(frame_ois_data->frame_sof_timestamp_boottime), 1);
8498         camMetadata.update(NEXUS_EXPERIMENTAL_2017_OIS_TIMESTAMPS_BOOTTIME,
8499             frame_ois_data->ois_sample_timestamp_boottime, frame_ois_data->num_ois_sample);
8500         camMetadata.update(NEXUS_EXPERIMENTAL_2017_OIS_SHIFT_PIXEL_X,
8501             frame_ois_data->ois_sample_shift_pixel_x, frame_ois_data->num_ois_sample);
8502         camMetadata.update(NEXUS_EXPERIMENTAL_2017_OIS_SHIFT_PIXEL_Y,
8503             frame_ois_data->ois_sample_shift_pixel_y, frame_ois_data->num_ois_sample);
8504 
8505         if (pendingRequest.requestedOisDataMode == ANDROID_STATISTICS_OIS_DATA_MODE_ON) {
8506             int64_t timeDiff = pendingRequest.timestamp -
8507                     frame_ois_data->frame_sof_timestamp_boottime;
8508 
8509             std::vector<int64_t> oisTimestamps;
8510 
8511             for (int32_t i = 0; i < frame_ois_data->num_ois_sample; i++) {
8512                 oisTimestamps.push_back(
8513                         frame_ois_data->ois_sample_timestamp_boottime[i] + timeDiff);
8514             }
8515 
8516             camMetadata.update(ANDROID_STATISTICS_OIS_TIMESTAMPS,
8517                     oisTimestamps.data(), frame_ois_data->num_ois_sample);
8518             camMetadata.update(ANDROID_STATISTICS_OIS_X_SHIFTS,
8519                     frame_ois_data->ois_sample_shift_pixel_x, frame_ois_data->num_ois_sample);
8520             camMetadata.update(ANDROID_STATISTICS_OIS_Y_SHIFTS,
8521                     frame_ois_data->ois_sample_shift_pixel_y, frame_ois_data->num_ois_sample);
8522         } else {
8523             // If OIS data mode is OFF, add NULL for OIS keys.
8524             camMetadata.update(ANDROID_STATISTICS_OIS_TIMESTAMPS,
8525                     frame_ois_data->ois_sample_timestamp_boottime, 0);
8526             camMetadata.update(ANDROID_STATISTICS_OIS_X_SHIFTS,
8527                     frame_ois_data->ois_sample_shift_pixel_x, 0);
8528             camMetadata.update(ANDROID_STATISTICS_OIS_Y_SHIFTS,
8529                     frame_ois_data->ois_sample_shift_pixel_y, 0);
8530         }
8531     }
8532 
8533     // DevCamDebug metadata translateFromHalMetadata AEC MOTION
8534     IF_META_AVAILABLE(float, DevCamDebug_aec_camera_motion_dx,
8535             CAM_INTF_META_DEV_CAM_AEC_CAMERA_MOTION_DX, metadata) {
8536         float fwk_DevCamDebug_aec_camera_motion_dx = *DevCamDebug_aec_camera_motion_dx;
8537         camMetadata.update(NEXUS_EXPERIMENTAL_2017_CAMERA_MOTION_X,
8538                            &fwk_DevCamDebug_aec_camera_motion_dx, 1);
8539     }
8540     IF_META_AVAILABLE(float, DevCamDebug_aec_camera_motion_dy,
8541             CAM_INTF_META_DEV_CAM_AEC_CAMERA_MOTION_DY, metadata) {
8542         float fwk_DevCamDebug_aec_camera_motion_dy = *DevCamDebug_aec_camera_motion_dy;
8543         camMetadata.update(NEXUS_EXPERIMENTAL_2017_CAMERA_MOTION_Y,
8544                            &fwk_DevCamDebug_aec_camera_motion_dy, 1);
8545     }
8546     IF_META_AVAILABLE(float, DevCamDebug_aec_subject_motion,
8547             CAM_INTF_META_DEV_CAM_AEC_SUBJECT_MOTION, metadata) {
8548         float fwk_DevCamDebug_aec_subject_motion = *DevCamDebug_aec_subject_motion;
8549         camMetadata.update(NEXUS_EXPERIMENTAL_2017_SUBJECT_MOTION,
8550                            &fwk_DevCamDebug_aec_subject_motion, 1);
8551     }
8552 
8553     // Camera lens calibration dynamic fields, for back camera. Same values as for static metadata.
8554     if (mCameraId == 0) {
8555         const camera_metadata_t *staticInfo = gStaticMetadata[mCameraId];
8556         camera_metadata_ro_entry_t rotation, translation, intrinsics, distortion, reference;
8557         int res;
8558         bool fail = false;
8559         res = find_camera_metadata_ro_entry(staticInfo, ANDROID_LENS_POSE_ROTATION,
8560                 &rotation);
8561         if (res != 0) {
8562             fail = true;
8563         }
8564         res = find_camera_metadata_ro_entry(staticInfo, ANDROID_LENS_POSE_TRANSLATION,
8565                 &translation);
8566         if (res != 0) {
8567             fail = true;
8568         }
8569         res = find_camera_metadata_ro_entry(staticInfo, ANDROID_LENS_INTRINSIC_CALIBRATION,
8570                 &intrinsics);
8571         if (res != 0) {
8572             fail = true;
8573         }
8574         res = find_camera_metadata_ro_entry(staticInfo, ANDROID_LENS_DISTORTION,
8575                 &distortion);
8576         if (res != 0) {
8577             fail = true;
8578         }
8579         res = find_camera_metadata_ro_entry(staticInfo, ANDROID_LENS_POSE_REFERENCE,
8580                 &reference);
8581         if (res != 0) {
8582             fail = true;
8583         }
8584 
8585         if (!fail) {
8586             camMetadata.update(ANDROID_LENS_POSE_ROTATION,
8587                     rotation.data.f, rotation.count);
8588             camMetadata.update(ANDROID_LENS_POSE_TRANSLATION,
8589                     translation.data.f, translation.count);
8590             camMetadata.update(ANDROID_LENS_INTRINSIC_CALIBRATION,
8591                     intrinsics.data.f, intrinsics.count);
8592             camMetadata.update(ANDROID_LENS_DISTORTION,
8593                     distortion.data.f, distortion.count);
8594             camMetadata.update(ANDROID_LENS_POSE_REFERENCE,
8595                     reference.data.u8, reference.count);
8596         }
8597     }
8598 
8599     resultMetadata = camMetadata.release();
8600     return resultMetadata;
8601 }
8602 
8603 /*===========================================================================
8604  * FUNCTION   : saveExifParams
8605  *
8606  * DESCRIPTION:
8607  *
8608  * PARAMETERS :
8609  *   @metadata : metadata information from callback
8610  *
8611  * RETURN     : none
8612  *
8613  *==========================================================================*/
saveExifParams(metadata_buffer_t * metadata)8614 void QCamera3HardwareInterface::saveExifParams(metadata_buffer_t *metadata)
8615 {
8616     IF_META_AVAILABLE(cam_ae_exif_debug_t, ae_exif_debug_params,
8617             CAM_INTF_META_EXIF_DEBUG_AE, metadata) {
8618         if (mExifParams.debug_params) {
8619             mExifParams.debug_params->ae_debug_params = *ae_exif_debug_params;
8620             mExifParams.debug_params->ae_debug_params_valid = TRUE;
8621         }
8622     }
8623     IF_META_AVAILABLE(cam_awb_exif_debug_t,awb_exif_debug_params,
8624             CAM_INTF_META_EXIF_DEBUG_AWB, metadata) {
8625         if (mExifParams.debug_params) {
8626             mExifParams.debug_params->awb_debug_params = *awb_exif_debug_params;
8627             mExifParams.debug_params->awb_debug_params_valid = TRUE;
8628         }
8629     }
8630     IF_META_AVAILABLE(cam_af_exif_debug_t,af_exif_debug_params,
8631             CAM_INTF_META_EXIF_DEBUG_AF, metadata) {
8632         if (mExifParams.debug_params) {
8633             mExifParams.debug_params->af_debug_params = *af_exif_debug_params;
8634             mExifParams.debug_params->af_debug_params_valid = TRUE;
8635         }
8636     }
8637     IF_META_AVAILABLE(cam_asd_exif_debug_t, asd_exif_debug_params,
8638             CAM_INTF_META_EXIF_DEBUG_ASD, metadata) {
8639         if (mExifParams.debug_params) {
8640             mExifParams.debug_params->asd_debug_params = *asd_exif_debug_params;
8641             mExifParams.debug_params->asd_debug_params_valid = TRUE;
8642         }
8643     }
8644     IF_META_AVAILABLE(cam_stats_buffer_exif_debug_t,stats_exif_debug_params,
8645             CAM_INTF_META_EXIF_DEBUG_STATS, metadata) {
8646         if (mExifParams.debug_params) {
8647             mExifParams.debug_params->stats_debug_params = *stats_exif_debug_params;
8648             mExifParams.debug_params->stats_debug_params_valid = TRUE;
8649         }
8650     }
8651     IF_META_AVAILABLE(cam_bestats_buffer_exif_debug_t,bestats_exif_debug_params,
8652             CAM_INTF_META_EXIF_DEBUG_BESTATS, metadata) {
8653         if (mExifParams.debug_params) {
8654             mExifParams.debug_params->bestats_debug_params = *bestats_exif_debug_params;
8655             mExifParams.debug_params->bestats_debug_params_valid = TRUE;
8656         }
8657     }
8658     IF_META_AVAILABLE(cam_bhist_buffer_exif_debug_t, bhist_exif_debug_params,
8659             CAM_INTF_META_EXIF_DEBUG_BHIST, metadata) {
8660         if (mExifParams.debug_params) {
8661             mExifParams.debug_params->bhist_debug_params = *bhist_exif_debug_params;
8662             mExifParams.debug_params->bhist_debug_params_valid = TRUE;
8663         }
8664     }
8665     IF_META_AVAILABLE(cam_q3a_tuning_info_t, q3a_tuning_exif_debug_params,
8666             CAM_INTF_META_EXIF_DEBUG_3A_TUNING, metadata) {
8667         if (mExifParams.debug_params) {
8668             mExifParams.debug_params->q3a_tuning_debug_params = *q3a_tuning_exif_debug_params;
8669             mExifParams.debug_params->q3a_tuning_debug_params_valid = TRUE;
8670         }
8671     }
8672 }
8673 
8674 /*===========================================================================
8675  * FUNCTION   : get3AExifParams
8676  *
8677  * DESCRIPTION:
8678  *
8679  * PARAMETERS : none
8680  *
8681  *
8682  * RETURN     : mm_jpeg_exif_params_t
8683  *
8684  *==========================================================================*/
get3AExifParams()8685 mm_jpeg_exif_params_t QCamera3HardwareInterface::get3AExifParams()
8686 {
8687     return mExifParams;
8688 }
8689 
8690 /*===========================================================================
8691  * FUNCTION   : translateCbUrgentMetadataToResultMetadata
8692  *
8693  * DESCRIPTION:
8694  *
8695  * PARAMETERS :
8696  *   @metadata : metadata information from callback
8697  *   @lastUrgentMetadataInBatch: Boolean to indicate whether this is the last
8698  *                               urgent metadata in a batch. Always true for
8699  *                               non-batch mode.
8700  *   @frame_number :             frame number for this urgent metadata
8701  *   @isJumpstartMetadata: Whether this is a partial metadata for jumpstart,
8702  *                         i.e. even though it doesn't map to a valid partial
8703  *                         frame number, its metadata entries should be kept.
8704  * RETURN     : camera_metadata_t*
8705  *              metadata in a format specified by fwk
8706  *==========================================================================*/
8707 camera_metadata_t*
translateCbUrgentMetadataToResultMetadata(metadata_buffer_t * metadata,bool lastUrgentMetadataInBatch,uint32_t frame_number,bool isJumpstartMetadata)8708 QCamera3HardwareInterface::translateCbUrgentMetadataToResultMetadata
8709                                 (metadata_buffer_t *metadata, bool lastUrgentMetadataInBatch,
8710                                  uint32_t frame_number, bool isJumpstartMetadata)
8711 {
8712     CameraMetadata camMetadata;
8713     camera_metadata_t *resultMetadata;
8714 
8715     if (!lastUrgentMetadataInBatch && !isJumpstartMetadata) {
8716         /* In batch mode, use empty metadata if this is not the last in batch
8717          */
8718         resultMetadata = allocate_camera_metadata(0, 0);
8719         return resultMetadata;
8720     }
8721 
8722     IF_META_AVAILABLE(uint32_t, whiteBalanceState, CAM_INTF_META_AWB_STATE, metadata) {
8723         uint8_t fwk_whiteBalanceState = (uint8_t) *whiteBalanceState;
8724         camMetadata.update(ANDROID_CONTROL_AWB_STATE, &fwk_whiteBalanceState, 1);
8725         LOGD("urgent Metadata : ANDROID_CONTROL_AWB_STATE %u", *whiteBalanceState);
8726     }
8727 
8728     IF_META_AVAILABLE(cam_trigger_t, aecTrigger, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER, metadata) {
8729         camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER,
8730                 &aecTrigger->trigger, 1);
8731         camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_ID,
8732                 &aecTrigger->trigger_id, 1);
8733         LOGD("urgent Metadata : CAM_INTF_META_AEC_PRECAPTURE_TRIGGER: %d",
8734                  aecTrigger->trigger);
8735         LOGD("urgent Metadata : ANDROID_CONTROL_AE_PRECAPTURE_ID: %d",
8736                 aecTrigger->trigger_id);
8737     }
8738 
8739     IF_META_AVAILABLE(uint32_t, ae_state, CAM_INTF_META_AEC_STATE, metadata) {
8740         uint8_t fwk_ae_state = (uint8_t) *ae_state;
8741         camMetadata.update(ANDROID_CONTROL_AE_STATE, &fwk_ae_state, 1);
8742         LOGD("urgent Metadata : ANDROID_CONTROL_AE_STATE %u", *ae_state);
8743     }
8744 
8745     IF_META_AVAILABLE(uint32_t, focusMode, CAM_INTF_PARM_FOCUS_MODE, metadata) {
8746         int val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP), *focusMode);
8747         if (NAME_NOT_FOUND != val) {
8748             uint8_t fwkAfMode = (uint8_t)val;
8749             camMetadata.update(ANDROID_CONTROL_AF_MODE, &fwkAfMode, 1);
8750             LOGD("urgent Metadata : ANDROID_CONTROL_AF_MODE %d", val);
8751         } else {
8752             LOGH("urgent Metadata not found : ANDROID_CONTROL_AF_MODE %d",
8753                     val);
8754         }
8755     }
8756 
8757     IF_META_AVAILABLE(cam_trigger_t, af_trigger, CAM_INTF_META_AF_TRIGGER, metadata) {
8758         LOGD("urgent Metadata : CAM_INTF_META_AF_TRIGGER = %d",
8759             af_trigger->trigger);
8760         LOGD("urgent Metadata : ANDROID_CONTROL_AF_TRIGGER_ID = %d",
8761             af_trigger->trigger_id);
8762 
8763         IF_META_AVAILABLE(uint32_t, afState, CAM_INTF_META_AF_STATE, metadata) {
8764             mAfTrigger = *af_trigger;
8765             uint32_t fwk_AfState = (uint32_t) *afState;
8766 
8767             // If this is the result for a new trigger, check if there is new early
8768             // af state. If there is, use the last af state for all results
8769             // preceding current partial frame number.
8770             for (auto & pendingRequest : mPendingRequestsList) {
8771                 if (pendingRequest.frame_number < frame_number) {
8772                     pendingRequest.focusStateValid = true;
8773                     pendingRequest.focusState = fwk_AfState;
8774                 } else if (pendingRequest.frame_number == frame_number) {
8775                     IF_META_AVAILABLE(uint32_t, earlyAfState, CAM_INTF_META_EARLY_AF_STATE, metadata) {
8776                         // Check if early AF state for trigger exists. If yes, send AF state as
8777                         // partial result for better latency.
8778                         uint8_t fwkEarlyAfState = (uint8_t) *earlyAfState;
8779                         pendingRequest.focusStateSent = true;
8780                         camMetadata.update(ANDROID_CONTROL_AF_STATE, &fwkEarlyAfState, 1);
8781                         LOGD("urgent Metadata(%d) : ANDROID_CONTROL_AF_STATE %u",
8782                                  frame_number, fwkEarlyAfState);
8783                     }
8784                 }
8785             }
8786         }
8787     }
8788     camMetadata.update(ANDROID_CONTROL_AF_TRIGGER,
8789         &mAfTrigger.trigger, 1);
8790     camMetadata.update(ANDROID_CONTROL_AF_TRIGGER_ID, &mAfTrigger.trigger_id, 1);
8791 
8792     IF_META_AVAILABLE(cam_area_t, hAfRegions, CAM_INTF_META_AF_ROI, metadata) {
8793         /*af regions*/
8794         cam_rect_t hAfRect = hAfRegions->rect;
8795         int32_t afRegions[REGIONS_TUPLE_COUNT];
8796         // Adjust crop region from sensor output coordinate system to active
8797         // array coordinate system.
8798         mCropRegionMapper.toActiveArray(hAfRect.left, hAfRect.top,
8799                 hAfRect.width, hAfRect.height);
8800 
8801         convertToRegions(hAfRect, afRegions, hAfRegions->weight);
8802         camMetadata.update(ANDROID_CONTROL_AF_REGIONS, afRegions,
8803                 REGIONS_TUPLE_COUNT);
8804         LOGD("Metadata : ANDROID_CONTROL_AF_REGIONS: FWK: [%d,%d,%d,%d] HAL: [%d,%d,%d,%d]",
8805                  afRegions[0], afRegions[1], afRegions[2], afRegions[3],
8806                 hAfRect.left, hAfRect.top, hAfRect.width,
8807                 hAfRect.height);
8808     }
8809 
8810     // AF region confidence
8811     IF_META_AVAILABLE(int32_t, afRegionConfidence, CAM_INTF_META_AF_REGIONS_CONFIDENCE, metadata) {
8812         camMetadata.update(NEXUS_EXPERIMENTAL_2017_AF_REGIONS_CONFIDENCE, afRegionConfidence, 1);
8813     }
8814 
8815     IF_META_AVAILABLE(int32_t, whiteBalance, CAM_INTF_PARM_WHITE_BALANCE, metadata) {
8816         int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
8817                 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP), *whiteBalance);
8818         if (NAME_NOT_FOUND != val) {
8819             uint8_t fwkWhiteBalanceMode = (uint8_t)val;
8820             camMetadata.update(ANDROID_CONTROL_AWB_MODE, &fwkWhiteBalanceMode, 1);
8821             LOGD("urgent Metadata : ANDROID_CONTROL_AWB_MODE %d", val);
8822         } else {
8823             LOGH("urgent Metadata not found : ANDROID_CONTROL_AWB_MODE");
8824         }
8825     }
8826 
8827     uint8_t fwk_aeMode = ANDROID_CONTROL_AE_MODE_OFF;
8828     uint32_t aeMode = CAM_AE_MODE_MAX;
8829     int32_t flashMode = CAM_FLASH_MODE_MAX;
8830     int32_t redeye = -1;
8831     IF_META_AVAILABLE(uint32_t, pAeMode, CAM_INTF_META_AEC_MODE, metadata) {
8832         aeMode = *pAeMode;
8833     }
8834     IF_META_AVAILABLE(int32_t, pFlashMode, CAM_INTF_PARM_LED_MODE, metadata) {
8835         flashMode = *pFlashMode;
8836     }
8837     IF_META_AVAILABLE(int32_t, pRedeye, CAM_INTF_PARM_REDEYE_REDUCTION, metadata) {
8838         redeye = *pRedeye;
8839     }
8840 
8841     if (1 == redeye) {
8842         fwk_aeMode = ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE;
8843         camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
8844     } else if ((CAM_FLASH_MODE_AUTO == flashMode) || (CAM_FLASH_MODE_ON == flashMode)) {
8845         int val = lookupFwkName(AE_FLASH_MODE_MAP, METADATA_MAP_SIZE(AE_FLASH_MODE_MAP),
8846                 flashMode);
8847         if (NAME_NOT_FOUND != val) {
8848             fwk_aeMode = (uint8_t)val;
8849             camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
8850         } else {
8851             LOGE("Unsupported flash mode %d", flashMode);
8852         }
8853     } else if (aeMode == CAM_AE_MODE_ON) {
8854         fwk_aeMode = ANDROID_CONTROL_AE_MODE_ON;
8855         camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
8856     } else if (aeMode == CAM_AE_MODE_OFF) {
8857         fwk_aeMode = ANDROID_CONTROL_AE_MODE_OFF;
8858         camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
8859     } else if (aeMode == CAM_AE_MODE_ON_EXTERNAL_FLASH) {
8860         fwk_aeMode = ANDROID_CONTROL_AE_MODE_ON_EXTERNAL_FLASH;
8861         camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
8862     } else {
8863         LOGE("Not enough info to deduce ANDROID_CONTROL_AE_MODE redeye:%d, "
8864               "flashMode:%d, aeMode:%u!!!",
8865                  redeye, flashMode, aeMode);
8866     }
8867     if (mInstantAEC) {
8868         // Increment frame Idx count untill a bound reached for instant AEC.
8869         mInstantAecFrameIdxCount++;
8870         IF_META_AVAILABLE(cam_3a_params_t, ae_params,
8871                 CAM_INTF_META_AEC_INFO, metadata) {
8872             LOGH("ae_params->settled = %d",ae_params->settled);
8873             // If AEC settled, or if number of frames reached bound value,
8874             // should reset instant AEC.
8875             if (ae_params->settled ||
8876                     (mInstantAecFrameIdxCount > mAecSkipDisplayFrameBound)) {
8877                 LOGH("AEC settled or Frames reached instantAEC bound, resetting instantAEC");
8878                 mInstantAEC = false;
8879                 mResetInstantAEC = true;
8880                 mInstantAecFrameIdxCount = 0;
8881             }
8882         }
8883     }
8884 
8885     IF_META_AVAILABLE(int32_t, af_tof_confidence,
8886             CAM_INTF_META_AF_TOF_CONFIDENCE, metadata) {
8887         IF_META_AVAILABLE(int32_t, af_tof_distance,
8888                 CAM_INTF_META_AF_TOF_DISTANCE, metadata) {
8889             int32_t fwk_af_tof_confidence = *af_tof_confidence;
8890             int32_t fwk_af_tof_distance = *af_tof_distance;
8891             if (fwk_af_tof_confidence == 1) {
8892                 mSceneDistance = fwk_af_tof_distance;
8893             } else {
8894                 mSceneDistance = -1;
8895             }
8896             LOGD("tof_distance %d, tof_confidence %d, mSceneDistance %d",
8897                      fwk_af_tof_distance, fwk_af_tof_confidence, mSceneDistance);
8898         }
8899     }
8900     camMetadata.update(NEXUS_EXPERIMENTAL_2017_SCENE_DISTANCE, &mSceneDistance, 1);
8901 
8902     resultMetadata = camMetadata.release();
8903     return resultMetadata;
8904 }
8905 
8906 /*===========================================================================
8907  * FUNCTION   : dumpMetadataToFile
8908  *
8909  * DESCRIPTION: Dumps tuning metadata to file system
8910  *
8911  * PARAMETERS :
8912  *   @meta           : tuning metadata
8913  *   @dumpFrameCount : current dump frame count
8914  *   @enabled        : Enable mask
8915  *
8916  *==========================================================================*/
dumpMetadataToFile(tuning_params_t & meta,uint32_t & dumpFrameCount,bool enabled,const char * type,uint32_t frameNumber)8917 void QCamera3HardwareInterface::dumpMetadataToFile(tuning_params_t &meta,
8918                                                    uint32_t &dumpFrameCount,
8919                                                    bool enabled,
8920                                                    const char *type,
8921                                                    uint32_t frameNumber)
8922 {
8923     //Some sanity checks
8924     if (meta.tuning_sensor_data_size > TUNING_SENSOR_DATA_MAX) {
8925         LOGE("Tuning sensor data size bigger than expected %d: %d",
8926               meta.tuning_sensor_data_size,
8927               TUNING_SENSOR_DATA_MAX);
8928         return;
8929     }
8930 
8931     if (meta.tuning_vfe_data_size > TUNING_VFE_DATA_MAX) {
8932         LOGE("Tuning VFE data size bigger than expected %d: %d",
8933               meta.tuning_vfe_data_size,
8934               TUNING_VFE_DATA_MAX);
8935         return;
8936     }
8937 
8938     if (meta.tuning_cpp_data_size > TUNING_CPP_DATA_MAX) {
8939         LOGE("Tuning CPP data size bigger than expected %d: %d",
8940               meta.tuning_cpp_data_size,
8941               TUNING_CPP_DATA_MAX);
8942         return;
8943     }
8944 
8945     if (meta.tuning_cac_data_size > TUNING_CAC_DATA_MAX) {
8946         LOGE("Tuning CAC data size bigger than expected %d: %d",
8947               meta.tuning_cac_data_size,
8948               TUNING_CAC_DATA_MAX);
8949         return;
8950     }
8951     //
8952 
8953     if(enabled){
8954         char timeBuf[FILENAME_MAX];
8955         char buf[FILENAME_MAX];
8956         memset(buf, 0, sizeof(buf));
8957         memset(timeBuf, 0, sizeof(timeBuf));
8958         time_t current_time;
8959         struct tm * timeinfo;
8960         time (&current_time);
8961         timeinfo = localtime (&current_time);
8962         if (timeinfo != NULL) {
8963             strftime (timeBuf, sizeof(timeBuf),
8964                     QCAMERA_DUMP_FRM_LOCATION"%Y%m%d%H%M%S", timeinfo);
8965         }
8966         String8 filePath(timeBuf);
8967         snprintf(buf,
8968                 sizeof(buf),
8969                 "%dm_%s_%d.bin",
8970                 dumpFrameCount,
8971                 type,
8972                 frameNumber);
8973         filePath.append(buf);
8974         int file_fd = open(filePath.string(), O_RDWR | O_CREAT, 0777);
8975         if (file_fd >= 0) {
8976             ssize_t written_len = 0;
8977             meta.tuning_data_version = TUNING_DATA_VERSION;
8978             void *data = (void *)((uint8_t *)&meta.tuning_data_version);
8979             written_len += write(file_fd, data, sizeof(uint32_t));
8980             data = (void *)((uint8_t *)&meta.tuning_sensor_data_size);
8981             LOGD("tuning_sensor_data_size %d",(int)(*(int *)data));
8982             written_len += write(file_fd, data, sizeof(uint32_t));
8983             data = (void *)((uint8_t *)&meta.tuning_vfe_data_size);
8984             LOGD("tuning_vfe_data_size %d",(int)(*(int *)data));
8985             written_len += write(file_fd, data, sizeof(uint32_t));
8986             data = (void *)((uint8_t *)&meta.tuning_cpp_data_size);
8987             LOGD("tuning_cpp_data_size %d",(int)(*(int *)data));
8988             written_len += write(file_fd, data, sizeof(uint32_t));
8989             data = (void *)((uint8_t *)&meta.tuning_cac_data_size);
8990             LOGD("tuning_cac_data_size %d",(int)(*(int *)data));
8991             written_len += write(file_fd, data, sizeof(uint32_t));
8992             meta.tuning_mod3_data_size = 0;
8993             data = (void *)((uint8_t *)&meta.tuning_mod3_data_size);
8994             LOGD("tuning_mod3_data_size %d",(int)(*(int *)data));
8995             written_len += write(file_fd, data, sizeof(uint32_t));
8996             size_t total_size = meta.tuning_sensor_data_size;
8997             data = (void *)((uint8_t *)&meta.data);
8998             written_len += write(file_fd, data, total_size);
8999             total_size = meta.tuning_vfe_data_size;
9000             data = (void *)((uint8_t *)&meta.data[TUNING_VFE_DATA_OFFSET]);
9001             written_len += write(file_fd, data, total_size);
9002             total_size = meta.tuning_cpp_data_size;
9003             data = (void *)((uint8_t *)&meta.data[TUNING_CPP_DATA_OFFSET]);
9004             written_len += write(file_fd, data, total_size);
9005             total_size = meta.tuning_cac_data_size;
9006             data = (void *)((uint8_t *)&meta.data[TUNING_CAC_DATA_OFFSET]);
9007             written_len += write(file_fd, data, total_size);
9008             close(file_fd);
9009         }else {
9010             LOGE("fail to open file for metadata dumping");
9011         }
9012     }
9013 }
9014 
9015 /*===========================================================================
9016  * FUNCTION   : cleanAndSortStreamInfo
9017  *
9018  * DESCRIPTION: helper method to clean up invalid streams in stream_info,
9019  *              and sort them such that raw stream is at the end of the list
9020  *              This is a workaround for camera daemon constraint.
9021  *
9022  * PARAMETERS : None
9023  *
9024  *==========================================================================*/
cleanAndSortStreamInfo()9025 void QCamera3HardwareInterface::cleanAndSortStreamInfo()
9026 {
9027     List<stream_info_t *> newStreamInfo;
9028 
9029     /*clean up invalid streams*/
9030     for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
9031             it != mStreamInfo.end();) {
9032         if(((*it)->status) == INVALID){
9033             QCamera3Channel *channel = (QCamera3Channel*)(*it)->stream->priv;
9034             delete channel;
9035             free(*it);
9036             it = mStreamInfo.erase(it);
9037         } else {
9038             it++;
9039         }
9040     }
9041 
9042     // Move preview/video/callback/snapshot streams into newList
9043     for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
9044             it != mStreamInfo.end();) {
9045         if ((*it)->stream->format != HAL_PIXEL_FORMAT_RAW_OPAQUE &&
9046                 (*it)->stream->format != HAL_PIXEL_FORMAT_RAW10 &&
9047                 (*it)->stream->format != HAL_PIXEL_FORMAT_RAW16) {
9048             newStreamInfo.push_back(*it);
9049             it = mStreamInfo.erase(it);
9050         } else
9051             it++;
9052     }
9053     // Move raw streams into newList
9054     for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
9055             it != mStreamInfo.end();) {
9056         newStreamInfo.push_back(*it);
9057         it = mStreamInfo.erase(it);
9058     }
9059 
9060     mStreamInfo = newStreamInfo;
9061 
9062     // Make sure that stream IDs are unique.
9063     uint32_t id = 0;
9064     for (auto streamInfo : mStreamInfo) {
9065         streamInfo->id = id++;
9066     }
9067 
9068 }
9069 
9070 /*===========================================================================
9071  * FUNCTION   : extractJpegMetadata
9072  *
9073  * DESCRIPTION: helper method to extract Jpeg metadata from capture request.
9074  *              JPEG metadata is cached in HAL, and return as part of capture
9075  *              result when metadata is returned from camera daemon.
9076  *
9077  * PARAMETERS : @jpegMetadata: jpeg metadata to be extracted
9078  *              @request:      capture request
9079  *
9080  *==========================================================================*/
extractJpegMetadata(CameraMetadata & jpegMetadata,const camera3_capture_request_t * request)9081 void QCamera3HardwareInterface::extractJpegMetadata(
9082         CameraMetadata& jpegMetadata,
9083         const camera3_capture_request_t *request)
9084 {
9085     CameraMetadata frame_settings;
9086     frame_settings = request->settings;
9087 
9088     if (frame_settings.exists(ANDROID_JPEG_GPS_COORDINATES))
9089         jpegMetadata.update(ANDROID_JPEG_GPS_COORDINATES,
9090                 frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).data.d,
9091                 frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).count);
9092 
9093     if (frame_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD))
9094         jpegMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD,
9095                 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8,
9096                 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).count);
9097 
9098     if (frame_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP))
9099         jpegMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP,
9100                 frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64,
9101                 frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).count);
9102 
9103     if (frame_settings.exists(ANDROID_JPEG_ORIENTATION))
9104         jpegMetadata.update(ANDROID_JPEG_ORIENTATION,
9105                 frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32,
9106                 frame_settings.find(ANDROID_JPEG_ORIENTATION).count);
9107 
9108     if (frame_settings.exists(ANDROID_JPEG_QUALITY))
9109         jpegMetadata.update(ANDROID_JPEG_QUALITY,
9110                 frame_settings.find(ANDROID_JPEG_QUALITY).data.u8,
9111                 frame_settings.find(ANDROID_JPEG_QUALITY).count);
9112 
9113     if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_QUALITY))
9114         jpegMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY,
9115                 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).data.u8,
9116                 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).count);
9117 
9118     if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
9119         int32_t thumbnail_size[2];
9120         thumbnail_size[0] = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
9121         thumbnail_size[1] = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
9122         if (frame_settings.exists(ANDROID_JPEG_ORIENTATION)) {
9123             int32_t orientation =
9124                   frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
9125             if ((!needJpegExifRotation()) && ((orientation == 90) || (orientation == 270))) {
9126                //swap thumbnail dimensions for rotations 90 and 270 in jpeg metadata.
9127                int32_t temp;
9128                temp = thumbnail_size[0];
9129                thumbnail_size[0] = thumbnail_size[1];
9130                thumbnail_size[1] = temp;
9131             }
9132          }
9133          jpegMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE,
9134                 thumbnail_size,
9135                 frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).count);
9136     }
9137 
9138 }
9139 
9140 /*===========================================================================
9141  * FUNCTION   : convertToRegions
9142  *
9143  * DESCRIPTION: helper method to convert from cam_rect_t into int32_t array
9144  *
9145  * PARAMETERS :
9146  *   @rect   : cam_rect_t struct to convert
9147  *   @region : int32_t destination array
9148  *   @weight : if we are converting from cam_area_t, weight is valid
9149  *             else weight = -1
9150  *
9151  *==========================================================================*/
convertToRegions(cam_rect_t rect,int32_t * region,int weight)9152 void QCamera3HardwareInterface::convertToRegions(cam_rect_t rect,
9153         int32_t *region, int weight)
9154 {
9155     region[FACE_LEFT] = rect.left;
9156     region[FACE_TOP] = rect.top;
9157     region[FACE_RIGHT] = rect.left + rect.width;
9158     region[FACE_BOTTOM] = rect.top + rect.height;
9159     if (weight > -1) {
9160         region[FACE_WEIGHT] = weight;
9161     }
9162 }
9163 
9164 /*===========================================================================
9165  * FUNCTION   : convertFromRegions
9166  *
9167  * DESCRIPTION: helper method to convert from array to cam_rect_t
9168  *
9169  * PARAMETERS :
9170  *   @rect   : cam_rect_t struct to convert
9171  *   @region : int32_t destination array
9172  *   @weight : if we are converting from cam_area_t, weight is valid
9173  *             else weight = -1
9174  *
9175  *==========================================================================*/
convertFromRegions(cam_area_t & roi,const CameraMetadata & frame_settings,uint32_t tag)9176 void QCamera3HardwareInterface::convertFromRegions(cam_area_t &roi,
9177         const CameraMetadata &frame_settings, uint32_t tag)
9178 {
9179     int32_t x_min = frame_settings.find(tag).data.i32[0];
9180     int32_t y_min = frame_settings.find(tag).data.i32[1];
9181     int32_t x_max = frame_settings.find(tag).data.i32[2];
9182     int32_t y_max = frame_settings.find(tag).data.i32[3];
9183     roi.weight = frame_settings.find(tag).data.i32[4];
9184     roi.rect.left = x_min;
9185     roi.rect.top = y_min;
9186     roi.rect.width = x_max - x_min;
9187     roi.rect.height = y_max - y_min;
9188 }
9189 
9190 /*===========================================================================
9191  * FUNCTION   : resetIfNeededROI
9192  *
9193  * DESCRIPTION: helper method to reset the roi if it is greater than scaler
9194  *              crop region
9195  *
9196  * PARAMETERS :
9197  *   @roi       : cam_area_t struct to resize
9198  *   @scalerCropRegion : cam_crop_region_t region to compare against
9199  *
9200  *
9201  *==========================================================================*/
resetIfNeededROI(cam_area_t * roi,const cam_crop_region_t * scalerCropRegion)9202 bool QCamera3HardwareInterface::resetIfNeededROI(cam_area_t* roi,
9203                                                  const cam_crop_region_t* scalerCropRegion)
9204 {
9205     int32_t roi_x_max = roi->rect.width + roi->rect.left;
9206     int32_t roi_y_max = roi->rect.height + roi->rect.top;
9207     int32_t crop_x_max = scalerCropRegion->width + scalerCropRegion->left;
9208     int32_t crop_y_max = scalerCropRegion->height + scalerCropRegion->top;
9209 
9210     /* According to spec weight = 0 is used to indicate roi needs to be disabled
9211      * without having this check the calculations below to validate if the roi
9212      * is inside scalar crop region will fail resulting in the roi not being
9213      * reset causing algorithm to continue to use stale roi window
9214      */
9215     if (roi->weight == 0) {
9216         return true;
9217     }
9218 
9219     if ((roi_x_max < scalerCropRegion->left) ||
9220         // right edge of roi window is left of scalar crop's left edge
9221         (roi_y_max < scalerCropRegion->top)  ||
9222         // bottom edge of roi window is above scalar crop's top edge
9223         (roi->rect.left > crop_x_max) ||
9224         // left edge of roi window is beyond(right) of scalar crop's right edge
9225         (roi->rect.top > crop_y_max)){
9226         // top edge of roi windo is above scalar crop's top edge
9227         return false;
9228     }
9229     if (roi->rect.left < scalerCropRegion->left) {
9230         roi->rect.left = scalerCropRegion->left;
9231     }
9232     if (roi->rect.top < scalerCropRegion->top) {
9233         roi->rect.top = scalerCropRegion->top;
9234     }
9235     if (roi_x_max > crop_x_max) {
9236         roi_x_max = crop_x_max;
9237     }
9238     if (roi_y_max > crop_y_max) {
9239         roi_y_max = crop_y_max;
9240     }
9241     roi->rect.width = roi_x_max - roi->rect.left;
9242     roi->rect.height = roi_y_max - roi->rect.top;
9243     return true;
9244 }
9245 
9246 /*===========================================================================
9247  * FUNCTION   : convertLandmarks
9248  *
9249  * DESCRIPTION: helper method to extract the landmarks from face detection info
9250  *
9251  * PARAMETERS :
9252  *   @landmark_data : input landmark data to be converted
9253  *   @landmarks : int32_t destination array
9254  *
9255  *
9256  *==========================================================================*/
convertLandmarks(cam_face_landmarks_info_t landmark_data,int32_t * landmarks)9257 void QCamera3HardwareInterface::convertLandmarks(
9258         cam_face_landmarks_info_t landmark_data,
9259         int32_t *landmarks)
9260 {
9261     if (landmark_data.is_left_eye_valid) {
9262         landmarks[LEFT_EYE_X] = (int32_t)landmark_data.left_eye_center.x;
9263         landmarks[LEFT_EYE_Y] = (int32_t)landmark_data.left_eye_center.y;
9264     } else {
9265         landmarks[LEFT_EYE_X] = FACE_INVALID_POINT;
9266         landmarks[LEFT_EYE_Y] = FACE_INVALID_POINT;
9267     }
9268 
9269     if (landmark_data.is_right_eye_valid) {
9270         landmarks[RIGHT_EYE_X] = (int32_t)landmark_data.right_eye_center.x;
9271         landmarks[RIGHT_EYE_Y] = (int32_t)landmark_data.right_eye_center.y;
9272     } else {
9273         landmarks[RIGHT_EYE_X] = FACE_INVALID_POINT;
9274         landmarks[RIGHT_EYE_Y] = FACE_INVALID_POINT;
9275     }
9276 
9277     if (landmark_data.is_mouth_valid) {
9278         landmarks[MOUTH_X] = (int32_t)landmark_data.mouth_center.x;
9279         landmarks[MOUTH_Y] = (int32_t)landmark_data.mouth_center.y;
9280     } else {
9281         landmarks[MOUTH_X] = FACE_INVALID_POINT;
9282         landmarks[MOUTH_Y] = FACE_INVALID_POINT;
9283     }
9284 }
9285 
9286 /*===========================================================================
9287  * FUNCTION   : setInvalidLandmarks
9288  *
9289  * DESCRIPTION: helper method to set invalid landmarks
9290  *
9291  * PARAMETERS :
9292  *   @landmarks : int32_t destination array
9293  *
9294  *
9295  *==========================================================================*/
setInvalidLandmarks(int32_t * landmarks)9296 void QCamera3HardwareInterface::setInvalidLandmarks(
9297         int32_t *landmarks)
9298 {
9299     landmarks[LEFT_EYE_X] = FACE_INVALID_POINT;
9300     landmarks[LEFT_EYE_Y] = FACE_INVALID_POINT;
9301     landmarks[RIGHT_EYE_X] = FACE_INVALID_POINT;
9302     landmarks[RIGHT_EYE_Y] = FACE_INVALID_POINT;
9303     landmarks[MOUTH_X] = FACE_INVALID_POINT;
9304     landmarks[MOUTH_Y] = FACE_INVALID_POINT;
9305 }
9306 
9307 #define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
9308 
9309 /*===========================================================================
9310  * FUNCTION   : getCapabilities
9311  *
9312  * DESCRIPTION: query camera capability from back-end
9313  *
9314  * PARAMETERS :
9315  *   @ops  : mm-interface ops structure
9316  *   @cam_handle  : camera handle for which we need capability
9317  *
9318  * RETURN     : ptr type of capability structure
9319  *              capability for success
9320  *              NULL for failure
9321  *==========================================================================*/
getCapabilities(mm_camera_ops_t * ops,uint32_t cam_handle)9322 cam_capability_t *QCamera3HardwareInterface::getCapabilities(mm_camera_ops_t *ops,
9323         uint32_t cam_handle)
9324 {
9325     int rc = NO_ERROR;
9326     QCamera3HeapMemory *capabilityHeap = NULL;
9327     cam_capability_t *cap_ptr = NULL;
9328 
9329     if (ops == NULL) {
9330         LOGE("Invalid arguments");
9331         return NULL;
9332     }
9333 
9334     capabilityHeap = new QCamera3HeapMemory(1);
9335     if (capabilityHeap == NULL) {
9336         LOGE("creation of capabilityHeap failed");
9337         return NULL;
9338     }
9339 
9340     /* Allocate memory for capability buffer */
9341     rc = capabilityHeap->allocate(sizeof(cam_capability_t));
9342     if(rc != OK) {
9343         LOGE("No memory for cappability");
9344         goto allocate_failed;
9345     }
9346 
9347     /* Map memory for capability buffer */
9348     memset(DATA_PTR(capabilityHeap,0), 0, sizeof(cam_capability_t));
9349 
9350     rc = ops->map_buf(cam_handle,
9351             CAM_MAPPING_BUF_TYPE_CAPABILITY, capabilityHeap->getFd(0),
9352             sizeof(cam_capability_t), capabilityHeap->getPtr(0));
9353     if(rc < 0) {
9354         LOGE("failed to map capability buffer");
9355         rc = FAILED_TRANSACTION;
9356         goto map_failed;
9357     }
9358 
9359     /* Query Capability */
9360     rc = ops->query_capability(cam_handle);
9361     if(rc < 0) {
9362         LOGE("failed to query capability");
9363         rc = FAILED_TRANSACTION;
9364         goto query_failed;
9365     }
9366 
9367     cap_ptr = (cam_capability_t *)malloc(sizeof(cam_capability_t));
9368     if (cap_ptr == NULL) {
9369         LOGE("out of memory");
9370         rc = NO_MEMORY;
9371         goto query_failed;
9372     }
9373 
9374     memset(cap_ptr, 0, sizeof(cam_capability_t));
9375     memcpy(cap_ptr, DATA_PTR(capabilityHeap, 0), sizeof(cam_capability_t));
9376 
9377     int index;
9378     for (index = 0; index < CAM_ANALYSIS_INFO_MAX; index++) {
9379         cam_analysis_info_t *p_analysis_info = &cap_ptr->analysis_info[index];
9380         p_analysis_info->analysis_padding_info.offset_info.offset_x = 0;
9381         p_analysis_info->analysis_padding_info.offset_info.offset_y = 0;
9382     }
9383 
9384 query_failed:
9385     ops->unmap_buf(cam_handle, CAM_MAPPING_BUF_TYPE_CAPABILITY);
9386 map_failed:
9387     capabilityHeap->deallocate();
9388 allocate_failed:
9389     delete capabilityHeap;
9390 
9391     if (rc != NO_ERROR) {
9392         return NULL;
9393     } else {
9394         return cap_ptr;
9395     }
9396 }
9397 
9398 /*===========================================================================
9399  * FUNCTION   : initCapabilities
9400  *
9401  * DESCRIPTION: initialize camera capabilities in static data struct
9402  *
9403  * PARAMETERS :
9404  *   @cameraId  : camera Id
9405  *
9406  * RETURN     : int32_t type of status
9407  *              NO_ERROR  -- success
9408  *              none-zero failure code
9409  *==========================================================================*/
initCapabilities(uint32_t cameraId)9410 int QCamera3HardwareInterface::initCapabilities(uint32_t cameraId)
9411 {
9412     int rc = 0;
9413     mm_camera_vtbl_t *cameraHandle = NULL;
9414     uint32_t handle = 0;
9415 
9416     rc = camera_open((uint8_t)cameraId, &cameraHandle);
9417     if (rc) {
9418         LOGE("camera_open failed. rc = %d", rc);
9419         goto open_failed;
9420     }
9421     if (!cameraHandle) {
9422         LOGE("camera_open failed. cameraHandle = %p", cameraHandle);
9423         goto open_failed;
9424     }
9425 
9426     handle = get_main_camera_handle(cameraHandle->camera_handle);
9427     gCamCapability[cameraId] = getCapabilities(cameraHandle->ops, handle);
9428     if (gCamCapability[cameraId] == NULL) {
9429         rc = FAILED_TRANSACTION;
9430         goto failed_op;
9431     }
9432 
9433     gCamCapability[cameraId]->camera_index = cameraId;
9434     if (is_dual_camera_by_idx(cameraId)) {
9435         handle = get_aux_camera_handle(cameraHandle->camera_handle);
9436         gCamCapability[cameraId]->aux_cam_cap =
9437                 getCapabilities(cameraHandle->ops, handle);
9438         if (gCamCapability[cameraId]->aux_cam_cap == NULL) {
9439             rc = FAILED_TRANSACTION;
9440             free(gCamCapability[cameraId]);
9441             goto failed_op;
9442         }
9443 
9444         // Copy the main camera capability to main_cam_cap struct
9445         gCamCapability[cameraId]->main_cam_cap =
9446                         (cam_capability_t *)malloc(sizeof(cam_capability_t));
9447         if (gCamCapability[cameraId]->main_cam_cap == NULL) {
9448             LOGE("out of memory");
9449             rc = NO_MEMORY;
9450             goto failed_op;
9451         }
9452         memcpy(gCamCapability[cameraId]->main_cam_cap, gCamCapability[cameraId],
9453                 sizeof(cam_capability_t));
9454     }
9455 failed_op:
9456     cameraHandle->ops->close_camera(cameraHandle->camera_handle);
9457     cameraHandle = NULL;
9458 open_failed:
9459     return rc;
9460 }
9461 
9462 /*==========================================================================
9463  * FUNCTION   : get3Aversion
9464  *
9465  * DESCRIPTION: get the Q3A S/W version
9466  *
9467  * PARAMETERS :
9468  *  @sw_version: Reference of Q3A structure which will hold version info upon
9469  *               return
9470  *
9471  * RETURN     : None
9472  *
9473  *==========================================================================*/
get3AVersion(cam_q3a_version_t & sw_version)9474 void QCamera3HardwareInterface::get3AVersion(cam_q3a_version_t &sw_version)
9475 {
9476     if(gCamCapability[mCameraId])
9477         sw_version = gCamCapability[mCameraId]->q3a_version;
9478     else
9479         LOGE("Capability structure NULL!");
9480 }
9481 
9482 
9483 /*===========================================================================
9484  * FUNCTION   : initParameters
9485  *
9486  * DESCRIPTION: initialize camera parameters
9487  *
9488  * PARAMETERS :
9489  *
9490  * RETURN     : int32_t type of status
9491  *              NO_ERROR  -- success
9492  *              none-zero failure code
9493  *==========================================================================*/
initParameters()9494 int QCamera3HardwareInterface::initParameters()
9495 {
9496     int rc = 0;
9497 
9498     //Allocate Set Param Buffer
9499     mParamHeap = new QCamera3HeapMemory(1);
9500     rc = mParamHeap->allocate(sizeof(metadata_buffer_t));
9501     if(rc != OK) {
9502         rc = NO_MEMORY;
9503         LOGE("Failed to allocate SETPARM Heap memory");
9504         delete mParamHeap;
9505         mParamHeap = NULL;
9506         return rc;
9507     }
9508 
9509     //Map memory for parameters buffer
9510     rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
9511             CAM_MAPPING_BUF_TYPE_PARM_BUF,
9512             mParamHeap->getFd(0),
9513             sizeof(metadata_buffer_t),
9514             (metadata_buffer_t *) DATA_PTR(mParamHeap,0));
9515     if(rc < 0) {
9516         LOGE("failed to map SETPARM buffer");
9517         rc = FAILED_TRANSACTION;
9518         mParamHeap->deallocate();
9519         delete mParamHeap;
9520         mParamHeap = NULL;
9521         return rc;
9522     }
9523 
9524     mParameters = (metadata_buffer_t *) DATA_PTR(mParamHeap,0);
9525 
9526     mPrevParameters = (metadata_buffer_t *)malloc(sizeof(metadata_buffer_t));
9527     return rc;
9528 }
9529 
9530 /*===========================================================================
9531  * FUNCTION   : deinitParameters
9532  *
9533  * DESCRIPTION: de-initialize camera parameters
9534  *
9535  * PARAMETERS :
9536  *
9537  * RETURN     : NONE
9538  *==========================================================================*/
deinitParameters()9539 void QCamera3HardwareInterface::deinitParameters()
9540 {
9541     mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle,
9542             CAM_MAPPING_BUF_TYPE_PARM_BUF);
9543 
9544     mParamHeap->deallocate();
9545     delete mParamHeap;
9546     mParamHeap = NULL;
9547 
9548     mParameters = NULL;
9549 
9550     free(mPrevParameters);
9551     mPrevParameters = NULL;
9552 }
9553 
9554 /*===========================================================================
9555  * FUNCTION   : calcMaxJpegSize
9556  *
9557  * DESCRIPTION: Calculates maximum jpeg size supported by the cameraId
9558  *
9559  * PARAMETERS :
9560  *
9561  * RETURN     : max_jpeg_size
9562  *==========================================================================*/
calcMaxJpegSize(uint32_t camera_id)9563 size_t QCamera3HardwareInterface::calcMaxJpegSize(uint32_t camera_id)
9564 {
9565     size_t max_jpeg_size = 0;
9566     size_t temp_width, temp_height;
9567     size_t count = MIN(gCamCapability[camera_id]->picture_sizes_tbl_cnt,
9568             MAX_SIZES_CNT);
9569     for (size_t i = 0; i < count; i++) {
9570         temp_width = (size_t)gCamCapability[camera_id]->picture_sizes_tbl[i].width;
9571         temp_height = (size_t)gCamCapability[camera_id]->picture_sizes_tbl[i].height;
9572         if (temp_width * temp_height > max_jpeg_size ) {
9573             max_jpeg_size = temp_width * temp_height;
9574         }
9575     }
9576     max_jpeg_size = max_jpeg_size * 3/2 + sizeof(camera3_jpeg_blob_t);
9577     return max_jpeg_size;
9578 }
9579 
9580 /*===========================================================================
9581  * FUNCTION   : getMaxRawSize
9582  *
9583  * DESCRIPTION: Fetches maximum raw size supported by the cameraId
9584  *
9585  * PARAMETERS :
9586  *
9587  * RETURN     : Largest supported Raw Dimension
9588  *==========================================================================*/
getMaxRawSize(uint32_t camera_id)9589 cam_dimension_t QCamera3HardwareInterface::getMaxRawSize(uint32_t camera_id)
9590 {
9591     int max_width = 0;
9592     cam_dimension_t maxRawSize;
9593 
9594     memset(&maxRawSize, 0, sizeof(cam_dimension_t));
9595     for (size_t i = 0; i < gCamCapability[camera_id]->supported_raw_dim_cnt; i++) {
9596         if (max_width < gCamCapability[camera_id]->raw_dim[i].width) {
9597             max_width = gCamCapability[camera_id]->raw_dim[i].width;
9598             maxRawSize = gCamCapability[camera_id]->raw_dim[i];
9599         }
9600     }
9601     return maxRawSize;
9602 }
9603 
9604 
9605 /*===========================================================================
9606  * FUNCTION   : calcMaxJpegDim
9607  *
9608  * DESCRIPTION: Calculates maximum jpeg dimension supported by the cameraId
9609  *
9610  * PARAMETERS :
9611  *
9612  * RETURN     : max_jpeg_dim
9613  *==========================================================================*/
calcMaxJpegDim()9614 cam_dimension_t QCamera3HardwareInterface::calcMaxJpegDim()
9615 {
9616     cam_dimension_t max_jpeg_dim;
9617     cam_dimension_t curr_jpeg_dim;
9618     max_jpeg_dim.width = 0;
9619     max_jpeg_dim.height = 0;
9620     curr_jpeg_dim.width = 0;
9621     curr_jpeg_dim.height = 0;
9622     for (size_t i = 0; i < gCamCapability[mCameraId]->picture_sizes_tbl_cnt; i++) {
9623         curr_jpeg_dim.width = gCamCapability[mCameraId]->picture_sizes_tbl[i].width;
9624         curr_jpeg_dim.height = gCamCapability[mCameraId]->picture_sizes_tbl[i].height;
9625         if (curr_jpeg_dim.width * curr_jpeg_dim.height >
9626             max_jpeg_dim.width * max_jpeg_dim.height ) {
9627             max_jpeg_dim.width = curr_jpeg_dim.width;
9628             max_jpeg_dim.height = curr_jpeg_dim.height;
9629         }
9630     }
9631     return max_jpeg_dim;
9632 }
9633 
9634 /*===========================================================================
9635  * FUNCTION   : addStreamConfig
9636  *
9637  * DESCRIPTION: adds the stream configuration to the array
9638  *
9639  * PARAMETERS :
9640  * @available_stream_configs : pointer to stream configuration array
9641  * @scalar_format            : scalar format
9642  * @dim                      : configuration dimension
9643  * @config_type              : input or output configuration type
9644  *
9645  * RETURN     : NONE
9646  *==========================================================================*/
addStreamConfig(Vector<int32_t> & available_stream_configs,int32_t scalar_format,const cam_dimension_t & dim,int32_t config_type)9647 void QCamera3HardwareInterface::addStreamConfig(Vector<int32_t> &available_stream_configs,
9648         int32_t scalar_format, const cam_dimension_t &dim, int32_t config_type)
9649 {
9650     available_stream_configs.add(scalar_format);
9651     available_stream_configs.add(dim.width);
9652     available_stream_configs.add(dim.height);
9653     available_stream_configs.add(config_type);
9654 }
9655 
9656 /*===========================================================================
9657  * FUNCTION   : suppportBurstCapture
9658  *
9659  * DESCRIPTION: Whether a particular camera supports BURST_CAPTURE
9660  *
9661  * PARAMETERS :
9662  *   @cameraId  : camera Id
9663  *
9664  * RETURN     : true if camera supports BURST_CAPTURE
9665  *              false otherwise
9666  *==========================================================================*/
supportBurstCapture(uint32_t cameraId)9667 bool QCamera3HardwareInterface::supportBurstCapture(uint32_t cameraId)
9668 {
9669     const int64_t highResDurationBound = 50000000; // 50 ms, 20 fps
9670     const int64_t fullResDurationBound = 100000000; // 100 ms, 10 fps
9671     const int32_t highResWidth = 3264;
9672     const int32_t highResHeight = 2448;
9673 
9674     if (gCamCapability[cameraId]->picture_min_duration[0] > fullResDurationBound) {
9675         // Maximum resolution images cannot be captured at >= 10fps
9676         // -> not supporting BURST_CAPTURE
9677         return false;
9678     }
9679 
9680     if (gCamCapability[cameraId]->picture_min_duration[0] <= highResDurationBound) {
9681         // Maximum resolution images can be captured at >= 20fps
9682         // --> supporting BURST_CAPTURE
9683         return true;
9684     }
9685 
9686     // Find the smallest highRes resolution, or largest resolution if there is none
9687     size_t totalCnt = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt,
9688             MAX_SIZES_CNT);
9689     size_t highRes = 0;
9690     while ((highRes + 1 < totalCnt) &&
9691             (gCamCapability[cameraId]->picture_sizes_tbl[highRes+1].width *
9692             gCamCapability[cameraId]->picture_sizes_tbl[highRes+1].height >=
9693             highResWidth * highResHeight)) {
9694         highRes++;
9695     }
9696     if (gCamCapability[cameraId]->picture_min_duration[highRes] <= highResDurationBound) {
9697         return true;
9698     } else {
9699         return false;
9700     }
9701 }
9702 
9703 /*===========================================================================
9704  * FUNCTION   : getPDStatIndex
9705  *
9706  * DESCRIPTION: Return the meta raw phase detection statistics index if present
9707  *
9708  * PARAMETERS :
9709  *   @caps    : camera capabilities
9710  *
9711  * RETURN     : int32_t type
9712  *              non-negative - on success
9713  *              -1 - on failure
9714  *==========================================================================*/
getPDStatIndex(cam_capability_t * caps)9715 int32_t QCamera3HardwareInterface::getPDStatIndex(cam_capability_t *caps) {
9716     if (nullptr == caps) {
9717         return -1;
9718     }
9719 
9720     uint32_t metaRawCount = caps->meta_raw_channel_count;
9721     int32_t ret = -1;
9722     for (size_t i = 0; i < metaRawCount; i++) {
9723         if (CAM_FORMAT_SUBTYPE_PDAF_STATS == caps->sub_fmt[i]) {
9724             ret = i;
9725             break;
9726         }
9727     }
9728 
9729     return ret;
9730 }
9731 
9732 /*===========================================================================
9733  * FUNCTION   : initStaticMetadata
9734  *
9735  * DESCRIPTION: initialize the static metadata
9736  *
9737  * PARAMETERS :
9738  *   @cameraId  : camera Id
9739  *
9740  * RETURN     : int32_t type of status
9741  *              0  -- success
9742  *              non-zero failure code
9743  *==========================================================================*/
initStaticMetadata(uint32_t cameraId)9744 int QCamera3HardwareInterface::initStaticMetadata(uint32_t cameraId)
9745 {
9746     int rc = 0;
9747     CameraMetadata staticInfo;
9748     size_t count = 0;
9749     bool limitedDevice = false;
9750     char prop[PROPERTY_VALUE_MAX];
9751     bool supportBurst = false;
9752     Vector<int32_t> available_characteristics_keys;
9753 
9754     supportBurst = supportBurstCapture(cameraId);
9755 
9756     /* If sensor is YUV sensor (no raw support) or if per-frame control is not
9757      * guaranteed or if min fps of max resolution is less than 20 fps, its
9758      * advertised as limited device*/
9759     limitedDevice = gCamCapability[cameraId]->no_per_frame_control_support ||
9760             (CAM_SENSOR_YUV == gCamCapability[cameraId]->sensor_type.sens_type) ||
9761             (CAM_SENSOR_MONO == gCamCapability[cameraId]->sensor_type.sens_type) ||
9762             !supportBurst;
9763 
9764     uint8_t supportedHwLvl = limitedDevice ?
9765             ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED :
9766 #ifndef USE_HAL_3_3
9767             // LEVEL_3 - This device will support level 3.
9768             ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_3;
9769 #else
9770             ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_FULL;
9771 #endif
9772 
9773     staticInfo.update(ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
9774             &supportedHwLvl, 1);
9775 
9776     bool facingBack = false;
9777     if ((gCamCapability[cameraId]->position == CAM_POSITION_BACK) ||
9778             (gCamCapability[cameraId]->position == CAM_POSITION_BACK_AUX)) {
9779         facingBack = true;
9780     }
9781     /*HAL 3 only*/
9782     staticInfo.update(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
9783                     &gCamCapability[cameraId]->min_focus_distance, 1);
9784 
9785     staticInfo.update(ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE,
9786                     &gCamCapability[cameraId]->hyper_focal_distance, 1);
9787 
9788     /*should be using focal lengths but sensor doesn't provide that info now*/
9789     staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
9790                       &gCamCapability[cameraId]->focal_length,
9791                       1);
9792 
9793     staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_APERTURES,
9794             gCamCapability[cameraId]->apertures,
9795             MIN(CAM_APERTURES_MAX, gCamCapability[cameraId]->apertures_count));
9796 
9797     staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
9798             gCamCapability[cameraId]->filter_densities,
9799             MIN(CAM_FILTER_DENSITIES_MAX, gCamCapability[cameraId]->filter_densities_count));
9800 
9801 
9802     uint8_t available_opt_stab_modes[CAM_OPT_STAB_MAX];
9803     size_t mode_count =
9804         MIN((size_t)CAM_OPT_STAB_MAX, gCamCapability[cameraId]->optical_stab_modes_count);
9805     for (size_t i = 0; i < mode_count; i++) {
9806       available_opt_stab_modes[i] = gCamCapability[cameraId]->optical_stab_modes[i];
9807     }
9808     staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
9809             available_opt_stab_modes, mode_count);
9810 
9811     int32_t lens_shading_map_size[] = {
9812             MIN(CAM_MAX_SHADING_MAP_WIDTH, gCamCapability[cameraId]->lens_shading_map_size.width),
9813             MIN(CAM_MAX_SHADING_MAP_HEIGHT, gCamCapability[cameraId]->lens_shading_map_size.height)};
9814     staticInfo.update(ANDROID_LENS_INFO_SHADING_MAP_SIZE,
9815                       lens_shading_map_size,
9816                       sizeof(lens_shading_map_size)/sizeof(int32_t));
9817 
9818     // Lens calibration for MOTION_TRACKING, back camera only
9819     if (cameraId == 0) {
9820 
9821         float poseRotation[4] = {1.0f, 0.f, 0.f, 0.f}; // quaternion rotation
9822         float poseTranslation[3] = {0.0f, 0.f, 0.f}; // xyz translation, meters
9823         uint8_t poseReference = ANDROID_LENS_POSE_REFERENCE_GYROSCOPE;
9824         // TODO: b/70565622 - these should have better identity values as a fallback
9825         float cameraIntrinsics[5] = {100.f, 100.f, 0.f, 1000, 1000}; // fx,fy,sx,cx,cy
9826         float radialDistortion[5] = {0.f, 0.f, 0.f, 0.f, 0.f}; // identity
9827 
9828         bool success = readSensorCalibration(
9829                 gCamCapability[cameraId]->active_array_size.width,
9830                 poseRotation, poseTranslation, cameraIntrinsics, radialDistortion);
9831         if (!success) {
9832             ALOGE("Using identity lens calibration values");
9833         }
9834         staticInfo.update(ANDROID_LENS_POSE_ROTATION,
9835                 poseRotation, sizeof(poseRotation)/sizeof(float));
9836         staticInfo.update(ANDROID_LENS_POSE_TRANSLATION,
9837                 poseTranslation, sizeof(poseTranslation)/sizeof(float));
9838         staticInfo.update(ANDROID_LENS_INTRINSIC_CALIBRATION,
9839                 cameraIntrinsics, sizeof(cameraIntrinsics)/sizeof(float));
9840         staticInfo.update(ANDROID_LENS_DISTORTION,
9841                 radialDistortion, sizeof(radialDistortion)/sizeof(float));
9842         staticInfo.update(ANDROID_LENS_POSE_REFERENCE,
9843                 &poseReference, sizeof(poseReference));
9844     }
9845 
9846     staticInfo.update(ANDROID_SENSOR_INFO_PHYSICAL_SIZE,
9847             gCamCapability[cameraId]->sensor_physical_size, SENSOR_PHYSICAL_SIZE_CNT);
9848 
9849     staticInfo.update(ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE,
9850             gCamCapability[cameraId]->exposure_time_range, EXPOSURE_TIME_RANGE_CNT);
9851 
9852     staticInfo.update(ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
9853             &gCamCapability[cameraId]->max_frame_duration, 1);
9854 
9855     camera_metadata_rational baseGainFactor = {
9856             gCamCapability[cameraId]->base_gain_factor.numerator,
9857             gCamCapability[cameraId]->base_gain_factor.denominator};
9858     staticInfo.update(ANDROID_SENSOR_BASE_GAIN_FACTOR,
9859                       &baseGainFactor, 1);
9860 
9861     staticInfo.update(ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
9862                      (uint8_t *)&gCamCapability[cameraId]->color_arrangement, 1);
9863 
9864     int32_t pixel_array_size[] = {gCamCapability[cameraId]->pixel_array_size.width,
9865             gCamCapability[cameraId]->pixel_array_size.height};
9866     staticInfo.update(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
9867                       pixel_array_size, sizeof(pixel_array_size)/sizeof(pixel_array_size[0]));
9868 
9869     int32_t active_array_size[] = {gCamCapability[cameraId]->active_array_size.left,
9870             gCamCapability[cameraId]->active_array_size.top,
9871             gCamCapability[cameraId]->active_array_size.width,
9872             gCamCapability[cameraId]->active_array_size.height};
9873     staticInfo.update(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
9874             active_array_size, sizeof(active_array_size)/sizeof(active_array_size[0]));
9875 
9876     staticInfo.update(ANDROID_SENSOR_INFO_WHITE_LEVEL,
9877             &gCamCapability[cameraId]->white_level, 1);
9878 
9879     int32_t adjusted_bl_per_cfa[BLACK_LEVEL_PATTERN_CNT];
9880     adjustBlackLevelForCFA(gCamCapability[cameraId]->black_level_pattern, adjusted_bl_per_cfa,
9881             gCamCapability[cameraId]->color_arrangement);
9882     staticInfo.update(ANDROID_SENSOR_BLACK_LEVEL_PATTERN,
9883             adjusted_bl_per_cfa, BLACK_LEVEL_PATTERN_CNT);
9884 
9885 #ifndef USE_HAL_3_3
9886     bool hasBlackRegions = false;
9887     if (gCamCapability[cameraId]->optical_black_region_count > MAX_OPTICAL_BLACK_REGIONS) {
9888         LOGW("black_region_count: %d is bounded to %d",
9889             gCamCapability[cameraId]->optical_black_region_count, MAX_OPTICAL_BLACK_REGIONS);
9890         gCamCapability[cameraId]->optical_black_region_count = MAX_OPTICAL_BLACK_REGIONS;
9891     }
9892     if (gCamCapability[cameraId]->optical_black_region_count != 0) {
9893         int32_t opticalBlackRegions[MAX_OPTICAL_BLACK_REGIONS * 4];
9894         for (size_t i = 0; i < gCamCapability[cameraId]->optical_black_region_count * 4; i++) {
9895             opticalBlackRegions[i] = gCamCapability[cameraId]->optical_black_regions[i];
9896         }
9897         staticInfo.update(ANDROID_SENSOR_OPTICAL_BLACK_REGIONS,
9898                 opticalBlackRegions, gCamCapability[cameraId]->optical_black_region_count * 4);
9899         hasBlackRegions = true;
9900     }
9901 #endif
9902     staticInfo.update(ANDROID_FLASH_INFO_CHARGE_DURATION,
9903             &gCamCapability[cameraId]->flash_charge_duration, 1);
9904 
9905     staticInfo.update(ANDROID_TONEMAP_MAX_CURVE_POINTS,
9906             &gCamCapability[cameraId]->max_tone_map_curve_points, 1);
9907 
9908     uint8_t timestampSource = (gCamCapability[cameraId]->timestamp_calibrated ?
9909             ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_REALTIME :
9910             ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_UNKNOWN);
9911     staticInfo.update(ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
9912             &timestampSource, 1);
9913 
9914     //update histogram vendor data
9915     staticInfo.update(QCAMERA3_HISTOGRAM_BUCKETS,
9916             &gCamCapability[cameraId]->histogram_size, 1);
9917 
9918     staticInfo.update(QCAMERA3_HISTOGRAM_MAX_COUNT,
9919             &gCamCapability[cameraId]->max_histogram_count, 1);
9920 
9921     //Set supported bins to be {max_bins, max_bins/2, max_bins/4, ...}
9922     //so that app can request fewer number of bins than the maximum supported.
9923     std::vector<int32_t> histBins;
9924     int32_t maxHistBins = gCamCapability[cameraId]->max_histogram_count;
9925     histBins.push_back(maxHistBins);
9926     while ((maxHistBins >> 1) >= MIN_CAM_HISTOGRAM_STATS_SIZE &&
9927            (maxHistBins & 0x1) == 0) {
9928         histBins.push_back(maxHistBins >> 1);
9929         maxHistBins >>= 1;
9930     }
9931     staticInfo.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_SUPPORTED_BINS,
9932             histBins.data(), histBins.size());
9933     if (!histBins.empty()) {
9934         available_characteristics_keys.add(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_SUPPORTED_BINS);
9935     }
9936 
9937     int32_t sharpness_map_size[] = {
9938             gCamCapability[cameraId]->sharpness_map_size.width,
9939             gCamCapability[cameraId]->sharpness_map_size.height};
9940 
9941     staticInfo.update(ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE,
9942             sharpness_map_size, sizeof(sharpness_map_size)/sizeof(int32_t));
9943 
9944     staticInfo.update(ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
9945             &gCamCapability[cameraId]->max_sharpness_map_value, 1);
9946 
9947     int32_t indexPD = getPDStatIndex(gCamCapability[cameraId]);
9948     if (0 <= indexPD) {
9949         // Advertise PD stats data as part of the Depth capabilities
9950         int32_t depthWidth =
9951                 gCamCapability[cameraId]->raw_meta_dim[indexPD].width;
9952         int32_t depthHeight =
9953                 gCamCapability[cameraId]->raw_meta_dim[indexPD].height;
9954         int32_t depthStride =
9955                 gCamCapability[cameraId]->raw_meta_dim[indexPD].width * 2;
9956         int32_t depthSamplesCount = (depthWidth * depthHeight * 2) / 16;
9957         assert(0 < depthSamplesCount);
9958         staticInfo.update(ANDROID_DEPTH_MAX_DEPTH_SAMPLES,
9959                 &depthSamplesCount, 1);
9960 
9961         int32_t depthConfigs[] = {HAL_PIXEL_FORMAT_RAW16, depthWidth,
9962                 depthHeight,
9963                 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT,
9964                 HAL_PIXEL_FORMAT_BLOB, depthSamplesCount, 1,
9965                 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT};
9966         staticInfo.update(ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS,
9967                 depthConfigs, sizeof(depthConfigs)/sizeof(depthConfigs[0]));
9968 
9969         int64_t depthMinDuration[] = {HAL_PIXEL_FORMAT_RAW16, depthWidth,
9970                 depthHeight, 33333333,
9971                 HAL_PIXEL_FORMAT_BLOB, depthSamplesCount, 1, 33333333};
9972         staticInfo.update(ANDROID_DEPTH_AVAILABLE_DEPTH_MIN_FRAME_DURATIONS,
9973                 depthMinDuration,
9974                 sizeof(depthMinDuration) / sizeof(depthMinDuration[0]));
9975 
9976         int64_t depthStallDuration[] = {HAL_PIXEL_FORMAT_RAW16, depthWidth,
9977                 depthHeight, 0,
9978                 HAL_PIXEL_FORMAT_BLOB, depthSamplesCount, 1, 0};
9979         staticInfo.update(ANDROID_DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS,
9980                 depthStallDuration,
9981                 sizeof(depthStallDuration) / sizeof(depthStallDuration[0]));
9982 
9983         uint8_t depthExclusive = ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE_FALSE;
9984         staticInfo.update(ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE, &depthExclusive, 1);
9985 
9986         //RAW16 depth format doesn't require inefficient memory copy, recommend
9987         //only this depth format. The format itself is not public so it won't be
9988         //possible to advertise in the RAW use case. Use snapshot for now.
9989         int32_t recommendedDepthConfigs[] = {depthWidth, depthHeight, HAL_PIXEL_FORMAT_RAW16,
9990             ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT,
9991             1 << ANDROID_SCALER_AVAILABLE_RECOMMENDED_STREAM_CONFIGURATIONS_SNAPSHOT};
9992         staticInfo.update(ANDROID_DEPTH_AVAILABLE_RECOMMENDED_DEPTH_STREAM_CONFIGURATIONS,
9993                 recommendedDepthConfigs,
9994                 sizeof(recommendedDepthConfigs) / sizeof(recommendedDepthConfigs[0]));
9995         available_characteristics_keys.add(
9996                 ANDROID_DEPTH_AVAILABLE_RECOMMENDED_DEPTH_STREAM_CONFIGURATIONS);
9997 
9998         int32_t pd_dimensions [] = {depthWidth, depthHeight, depthStride};
9999         staticInfo.update(NEXUS_EXPERIMENTAL_2017_PD_DATA_DIMENSIONS,
10000                 pd_dimensions, sizeof(pd_dimensions) / sizeof(pd_dimensions[0]));
10001         available_characteristics_keys.add(NEXUS_EXPERIMENTAL_2017_PD_DATA_DIMENSIONS);
10002 
10003         staticInfo.update(NEXUS_EXPERIMENTAL_2017_EEPROM_PDAF_CALIB_RIGHT_GAINS,
10004                 reinterpret_cast<uint8_t *>(gCamCapability[cameraId]->pdaf_cal.right_gain_map),
10005                 sizeof(gCamCapability[cameraId]->pdaf_cal.right_gain_map));
10006         available_characteristics_keys.add(NEXUS_EXPERIMENTAL_2017_EEPROM_PDAF_CALIB_RIGHT_GAINS);
10007 
10008         staticInfo.update(NEXUS_EXPERIMENTAL_2017_EEPROM_PDAF_CALIB_LEFT_GAINS,
10009                 reinterpret_cast<uint8_t *>(gCamCapability[cameraId]->pdaf_cal.left_gain_map),
10010                 sizeof(gCamCapability[cameraId]->pdaf_cal.left_gain_map));
10011         available_characteristics_keys.add(NEXUS_EXPERIMENTAL_2017_EEPROM_PDAF_CALIB_LEFT_GAINS);
10012 
10013         staticInfo.update(NEXUS_EXPERIMENTAL_2017_EEPROM_PDAF_CALIB_CONV_COEFF,
10014                 reinterpret_cast<uint8_t *>(gCamCapability[cameraId]->pdaf_cal.conversion_coeff),
10015                 sizeof(gCamCapability[cameraId]->pdaf_cal.conversion_coeff));
10016         available_characteristics_keys.add(NEXUS_EXPERIMENTAL_2017_EEPROM_PDAF_CALIB_CONV_COEFF);
10017     }
10018 
10019 
10020     staticInfo.update(NEXUS_EXPERIMENTAL_2017_EEPROM_WB_CALIB_NUM_LIGHTS,
10021         &(gCamCapability[cameraId]->wb_cal.num_lights), 1);
10022     available_characteristics_keys.add(NEXUS_EXPERIMENTAL_2017_EEPROM_WB_CALIB_NUM_LIGHTS);
10023 
10024     const int32_t num_lights = gCamCapability[cameraId]->wb_cal.num_lights;
10025     staticInfo.update(NEXUS_EXPERIMENTAL_2017_EEPROM_WB_CALIB_R_OVER_G_RATIOS,
10026         gCamCapability[cameraId]->wb_cal.r_over_g, num_lights);
10027     available_characteristics_keys.add(NEXUS_EXPERIMENTAL_2017_EEPROM_WB_CALIB_R_OVER_G_RATIOS);
10028 
10029     staticInfo.update(NEXUS_EXPERIMENTAL_2017_EEPROM_WB_CALIB_B_OVER_G_RATIOS,
10030         gCamCapability[cameraId]->wb_cal.b_over_g, num_lights);
10031     available_characteristics_keys.add(NEXUS_EXPERIMENTAL_2017_EEPROM_WB_CALIB_B_OVER_G_RATIOS);
10032 
10033     staticInfo.update(NEXUS_EXPERIMENTAL_2017_EEPROM_WB_CALIB_GR_OVER_GB_RATIO,
10034         &(gCamCapability[cameraId]->wb_cal.gr_over_gb), 1);
10035     available_characteristics_keys.add(NEXUS_EXPERIMENTAL_2017_EEPROM_WB_CALIB_GR_OVER_GB_RATIO);
10036 
10037     int32_t scalar_formats[] = {
10038             ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE,
10039             ANDROID_SCALER_AVAILABLE_FORMATS_RAW16,
10040             ANDROID_SCALER_AVAILABLE_FORMATS_YCbCr_420_888,
10041             ANDROID_SCALER_AVAILABLE_FORMATS_BLOB,
10042             HAL_PIXEL_FORMAT_RAW10,
10043             HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED,
10044             HAL_PIXEL_FORMAT_Y8};
10045     size_t scalar_formats_count = sizeof(scalar_formats) / sizeof(scalar_formats[0]);
10046     staticInfo.update(ANDROID_SCALER_AVAILABLE_FORMATS, scalar_formats,
10047             scalar_formats_count);
10048 
10049     int32_t available_processed_sizes[MAX_SIZES_CNT * 2];
10050     count = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
10051     makeTable(gCamCapability[cameraId]->picture_sizes_tbl,
10052             count, MAX_SIZES_CNT, available_processed_sizes);
10053     staticInfo.update(ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES,
10054             available_processed_sizes, count * 2);
10055 
10056     int32_t available_raw_sizes[MAX_SIZES_CNT * 2];
10057     count = MIN(gCamCapability[cameraId]->supported_raw_dim_cnt, MAX_SIZES_CNT);
10058     makeTable(gCamCapability[cameraId]->raw_dim,
10059             count, MAX_SIZES_CNT, available_raw_sizes);
10060     staticInfo.update(ANDROID_SCALER_AVAILABLE_RAW_SIZES,
10061             available_raw_sizes, count * 2);
10062 
10063     int32_t available_fps_ranges[MAX_SIZES_CNT * 2];
10064     count = MIN(gCamCapability[cameraId]->fps_ranges_tbl_cnt, MAX_SIZES_CNT);
10065     makeFPSTable(gCamCapability[cameraId]->fps_ranges_tbl,
10066             count, MAX_SIZES_CNT, available_fps_ranges);
10067     staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
10068             available_fps_ranges, count * 2);
10069 
10070     camera_metadata_rational exposureCompensationStep = {
10071             gCamCapability[cameraId]->exp_compensation_step.numerator,
10072             gCamCapability[cameraId]->exp_compensation_step.denominator};
10073     staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_STEP,
10074                       &exposureCompensationStep, 1);
10075 
10076     Vector<uint8_t> availableVstabModes;
10077     availableVstabModes.add(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF);
10078     char eis_prop[PROPERTY_VALUE_MAX];
10079     bool eisSupported = false;
10080     memset(eis_prop, 0, sizeof(eis_prop));
10081     property_get("persist.camera.eis.enable", eis_prop, "1");
10082     uint8_t eis_prop_set = (uint8_t)atoi(eis_prop);
10083     count = IS_TYPE_MAX;
10084     count = MIN(gCamCapability[cameraId]->supported_is_types_cnt, count);
10085     for (size_t i = 0; i < count; i++) {
10086         if ((gCamCapability[cameraId]->supported_is_types[i] == IS_TYPE_EIS_2_0) ||
10087             (gCamCapability[cameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0)) {
10088             eisSupported = true;
10089             break;
10090         }
10091     }
10092     if (facingBack && eis_prop_set && eisSupported) {
10093         availableVstabModes.add(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_ON);
10094     }
10095     staticInfo.update(ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
10096                       availableVstabModes.array(), availableVstabModes.size());
10097 
10098     /*HAL 1 and HAL 3 common*/
10099     uint32_t zoomSteps = gCamCapability[cameraId]->zoom_ratio_tbl_cnt;
10100     uint32_t maxZoomStep = gCamCapability[cameraId]->zoom_ratio_tbl[zoomSteps - 1];
10101     uint32_t minZoomStep = 100; //as per HAL1/API1 spec
10102     // Cap the max zoom to the max preferred value
10103     float maxZoom = MIN(maxZoomStep/minZoomStep, MAX_PREFERRED_ZOOM_RATIO);
10104     staticInfo.update(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
10105             &maxZoom, 1);
10106 
10107     uint8_t croppingType = ANDROID_SCALER_CROPPING_TYPE_CENTER_ONLY;
10108     staticInfo.update(ANDROID_SCALER_CROPPING_TYPE, &croppingType, 1);
10109 
10110     int32_t max3aRegions[3] = {/*AE*/1,/*AWB*/ 0,/*AF*/ 1};
10111     if (gCamCapability[cameraId]->supported_focus_modes_cnt == 1)
10112         max3aRegions[2] = 0; /* AF not supported */
10113     staticInfo.update(ANDROID_CONTROL_MAX_REGIONS,
10114             max3aRegions, 3);
10115 
10116     /* 0: OFF, 1: OFF+SIMPLE, 2: OFF+FULL, 3: OFF+SIMPLE+FULL */
10117     memset(prop, 0, sizeof(prop));
10118     property_get("persist.camera.facedetect", prop, "1");
10119     uint8_t supportedFaceDetectMode = (uint8_t)atoi(prop);
10120     LOGD("Support face detection mode: %d",
10121              supportedFaceDetectMode);
10122 
10123     int32_t maxFaces = gCamCapability[cameraId]->max_num_roi;
10124     /* support mode should be OFF if max number of face is 0 */
10125     if (maxFaces <= 0) {
10126         supportedFaceDetectMode = 0;
10127     }
10128     Vector<uint8_t> availableFaceDetectModes;
10129     availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_OFF);
10130     if (supportedFaceDetectMode == 1) {
10131         availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE);
10132     } else if (supportedFaceDetectMode == 2) {
10133         availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_FULL);
10134     } else if (supportedFaceDetectMode == 3) {
10135         availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE);
10136         availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_FULL);
10137     } else {
10138         maxFaces = 0;
10139     }
10140     staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
10141             availableFaceDetectModes.array(),
10142             availableFaceDetectModes.size());
10143     staticInfo.update(ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
10144             (int32_t *)&maxFaces, 1);
10145     uint8_t face_bsgc = gCamCapability[cameraId]->face_bsgc;
10146     staticInfo.update(QCAMERA3_STATS_BSGC_AVAILABLE,
10147             &face_bsgc, 1);
10148 
10149     int32_t exposureCompensationRange[] = {
10150             gCamCapability[cameraId]->exposure_compensation_min,
10151             gCamCapability[cameraId]->exposure_compensation_max};
10152     staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_RANGE,
10153             exposureCompensationRange,
10154             sizeof(exposureCompensationRange)/sizeof(int32_t));
10155 
10156     uint8_t lensFacing = (facingBack) ?
10157             ANDROID_LENS_FACING_BACK : ANDROID_LENS_FACING_FRONT;
10158     staticInfo.update(ANDROID_LENS_FACING, &lensFacing, 1);
10159 
10160     staticInfo.update(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
10161                       available_thumbnail_sizes,
10162                       sizeof(available_thumbnail_sizes)/sizeof(int32_t));
10163 
10164     /*all sizes will be clubbed into this tag*/
10165     count = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
10166     /*android.scaler.availableStreamConfigurations*/
10167     Vector<int32_t> available_stream_configs;
10168     std::vector<config_entry> stream_configs;
10169     std::unordered_map<config_entry, int32_t, ConfigEntryHash> suggested_configs;
10170     int32_t suggested_proc_formats[] = {
10171         ANDROID_SCALER_AVAILABLE_FORMATS_YCbCr_420_888,
10172         HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED};
10173     size_t suggested_formats_count = sizeof(suggested_proc_formats) /
10174         sizeof(suggested_proc_formats[0]);
10175     cam_dimension_t active_array_dim;
10176     active_array_dim.width = gCamCapability[cameraId]->active_array_size.width;
10177     active_array_dim.height = gCamCapability[cameraId]->active_array_size.height;
10178 
10179     /*advertise list of input dimensions supported based on below property.
10180     By default all sizes upto 5MP will be advertised.
10181     Note that the setprop resolution format should be WxH.
10182     e.g: adb shell setprop persist.camera.input.minsize 1280x720
10183     To list all supported sizes, setprop needs to be set with "0x0" */
10184     cam_dimension_t minInputSize = {2592,1944}; //5MP
10185     memset(prop, 0, sizeof(prop));
10186     property_get("persist.camera.input.minsize", prop, "2592x1944");
10187     if (strlen(prop) > 0) {
10188         char *saveptr = NULL;
10189         char *token = strtok_r(prop, "x", &saveptr);
10190         if (token != NULL) {
10191             minInputSize.width = atoi(token);
10192         }
10193         token = strtok_r(NULL, "x", &saveptr);
10194         if (token != NULL) {
10195             minInputSize.height = atoi(token);
10196         }
10197     }
10198 
10199     int32_t raw_usecase =
10200             1 << ANDROID_SCALER_AVAILABLE_RECOMMENDED_STREAM_CONFIGURATIONS_RAW;
10201     int32_t zsl_snapshot_usecase =
10202             (1 << ANDROID_SCALER_AVAILABLE_RECOMMENDED_STREAM_CONFIGURATIONS_SNAPSHOT) |
10203             (1 << ANDROID_SCALER_AVAILABLE_RECOMMENDED_STREAM_CONFIGURATIONS_ZSL);
10204     int32_t zsl_usecase =
10205             1 << ANDROID_SCALER_AVAILABLE_RECOMMENDED_STREAM_CONFIGURATIONS_ZSL;
10206     /* Add input/output stream configurations for each scalar formats*/
10207     for (size_t j = 0; j < scalar_formats_count; j++) {
10208         switch (scalar_formats[j]) {
10209         case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
10210         case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
10211         case HAL_PIXEL_FORMAT_RAW10:
10212             for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
10213                     gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
10214                 addStreamConfig(available_stream_configs, scalar_formats[j],
10215                         gCamCapability[cameraId]->raw_dim[i],
10216                         ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
10217                 config_entry entry(gCamCapability[cameraId]->raw_dim[i].width,
10218                         gCamCapability[cameraId]->raw_dim[i].height, scalar_formats[j],
10219                         ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
10220                 stream_configs.push_back(entry);
10221                 if ((scalar_formats[j] == HAL_PIXEL_FORMAT_RAW10) ||
10222                         (scalar_formats[j] == ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE)) {
10223                     suggested_configs[entry] |= raw_usecase;
10224                 }
10225             }
10226             break;
10227         case HAL_PIXEL_FORMAT_BLOB:
10228             for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
10229                     gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
10230                 addStreamConfig(available_stream_configs, scalar_formats[j],
10231                         gCamCapability[cameraId]->picture_sizes_tbl[i],
10232                         ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
10233                 stream_configs.push_back(config_entry(
10234                             gCamCapability[cameraId]->picture_sizes_tbl[i].width,
10235                             gCamCapability[cameraId]->picture_sizes_tbl[i].height,
10236                             scalar_formats[j],
10237                             ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT));
10238                 config_entry entry(gCamCapability[cameraId]->picture_sizes_tbl[i].width,
10239                         gCamCapability[cameraId]->picture_sizes_tbl[i].height, scalar_formats[j],
10240                         ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
10241                 suggested_configs[entry] |= zsl_snapshot_usecase;
10242             }
10243             break;
10244         case HAL_PIXEL_FORMAT_YCbCr_420_888:
10245         case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
10246         case HAL_PIXEL_FORMAT_Y8:
10247         default:
10248             cam_dimension_t largest_picture_size;
10249             memset(&largest_picture_size, 0, sizeof(cam_dimension_t));
10250             for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
10251                     gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
10252                 addStreamConfig(available_stream_configs, scalar_formats[j],
10253                         gCamCapability[cameraId]->picture_sizes_tbl[i],
10254                         ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
10255                 config_entry entry(gCamCapability[cameraId]->picture_sizes_tbl[i].width,
10256                         gCamCapability[cameraId]->picture_sizes_tbl[i].height,
10257                         scalar_formats[j],
10258                         ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
10259                 stream_configs.push_back(entry);
10260                 suggested_configs[entry] |= zsl_snapshot_usecase;
10261                 /*For below 2 formats we also support i/p streams for reprocessing advertise those*/
10262                 if ((scalar_formats[j] == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED ||
10263                         scalar_formats[j] == HAL_PIXEL_FORMAT_YCbCr_420_888 ||
10264                         scalar_formats[j] == HAL_PIXEL_FORMAT_Y8) && i == 0) {
10265                      if ((gCamCapability[cameraId]->picture_sizes_tbl[i].width
10266                             >= minInputSize.width) || (gCamCapability[cameraId]->
10267                             picture_sizes_tbl[i].height >= minInputSize.height)) {
10268                          addStreamConfig(available_stream_configs, scalar_formats[j],
10269                                  gCamCapability[cameraId]->picture_sizes_tbl[i],
10270                                  ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_INPUT);
10271                          config_entry entry(
10272                                  gCamCapability[cameraId]->picture_sizes_tbl[i].width,
10273                                  gCamCapability[cameraId]->picture_sizes_tbl[i].height,
10274                                  scalar_formats[j],
10275                                  ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_INPUT);
10276                          suggested_configs[entry] |= zsl_usecase;
10277                      }
10278                 }
10279             }
10280 
10281             break;
10282         }
10283     }
10284 
10285     staticInfo.update(ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
10286                       available_stream_configs.array(), available_stream_configs.size());
10287 
10288     int32_t preview_usecase =
10289             1 << ANDROID_SCALER_AVAILABLE_RECOMMENDED_STREAM_CONFIGURATIONS_PREVIEW;
10290     for (size_t i = 0; i < gCamCapability[cameraId]->preview_sizes_tbl_cnt; i++) {
10291         for (size_t j = 0; j < suggested_formats_count; j++) {
10292             config_entry entry(gCamCapability[cameraId]->preview_sizes_tbl[i].width,
10293                     gCamCapability[cameraId]->preview_sizes_tbl[i].height,
10294                     suggested_proc_formats[j],
10295                     ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
10296             if (std::find(stream_configs.begin(), stream_configs.end(), entry) !=
10297                     stream_configs.end()) {
10298                 suggested_configs[entry] |= preview_usecase;
10299             }
10300         }
10301     }
10302 
10303     int32_t record_usecase =
10304             1 << ANDROID_SCALER_AVAILABLE_RECOMMENDED_STREAM_CONFIGURATIONS_RECORD;
10305     for (size_t i = 0; i < gCamCapability[cameraId]->video_sizes_tbl_cnt; i++) {
10306         for (size_t j = 0; j < suggested_formats_count; j++) {
10307             config_entry entry(gCamCapability[cameraId]->video_sizes_tbl[i].width,
10308                     gCamCapability[cameraId]->video_sizes_tbl[i].height,
10309                     suggested_proc_formats[j],
10310                     ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
10311             if (std::find(stream_configs.begin(), stream_configs.end(), entry) !=
10312                     stream_configs.end()) {
10313                 suggested_configs[entry] |= record_usecase;
10314             }
10315         }
10316     }
10317 
10318     int32_t video_snapshot_usecase =
10319             1 << ANDROID_SCALER_AVAILABLE_RECOMMENDED_STREAM_CONFIGURATIONS_VIDEO_SNAPSHOT;
10320     for (size_t i = 0; i < gCamCapability[cameraId]->livesnapshot_sizes_tbl_cnt; i++) {
10321         config_entry entry(gCamCapability[cameraId]->livesnapshot_sizes_tbl[i].width,
10322                 gCamCapability[cameraId]->livesnapshot_sizes_tbl[i].height,
10323                 HAL_PIXEL_FORMAT_BLOB,
10324                 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
10325         if (std::find(stream_configs.begin(), stream_configs.end(), entry) !=
10326                 stream_configs.end()) {
10327             suggested_configs[entry] |= video_snapshot_usecase;
10328         }
10329     }
10330 
10331     std::vector<int32_t> suggested_array;
10332     suggested_array.reserve(suggested_configs.size() * 5);
10333     for (const auto &it : suggested_configs) {
10334         suggested_array.push_back(std::get<0>(it.first));
10335         suggested_array.push_back(std::get<1>(it.first));
10336         suggested_array.push_back(std::get<2>(it.first));
10337         suggested_array.push_back(std::get<3>(it.first));
10338         suggested_array.push_back(it.second);
10339     }
10340 
10341     staticInfo.update(ANDROID_SCALER_AVAILABLE_RECOMMENDED_STREAM_CONFIGURATIONS,
10342             suggested_array.data(), suggested_array.size());
10343 
10344     /* android.scaler.availableMinFrameDurations */
10345     Vector<int64_t> available_min_durations;
10346     for (size_t j = 0; j < scalar_formats_count; j++) {
10347         switch (scalar_formats[j]) {
10348         case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
10349         case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
10350         case HAL_PIXEL_FORMAT_RAW10:
10351             for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
10352                     gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
10353                 available_min_durations.add(scalar_formats[j]);
10354                 available_min_durations.add(gCamCapability[cameraId]->raw_dim[i].width);
10355                 available_min_durations.add(gCamCapability[cameraId]->raw_dim[i].height);
10356                 available_min_durations.add(gCamCapability[cameraId]->raw_min_duration[i]);
10357             }
10358             break;
10359         default:
10360             for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
10361                     gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
10362                 available_min_durations.add(scalar_formats[j]);
10363                 available_min_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].width);
10364                 available_min_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].height);
10365                 available_min_durations.add(gCamCapability[cameraId]->picture_min_duration[i]);
10366             }
10367             break;
10368         }
10369     }
10370     staticInfo.update(ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS,
10371                       available_min_durations.array(), available_min_durations.size());
10372 
10373     Vector<int32_t> available_hfr_configs;
10374     for (size_t i = 0; i < gCamCapability[cameraId]->hfr_tbl_cnt; i++) {
10375         int32_t fps = 0;
10376         switch (gCamCapability[cameraId]->hfr_tbl[i].mode) {
10377         case CAM_HFR_MODE_60FPS:
10378             fps = 60;
10379             break;
10380         case CAM_HFR_MODE_90FPS:
10381             fps = 90;
10382             break;
10383         case CAM_HFR_MODE_120FPS:
10384             fps = 120;
10385             break;
10386         case CAM_HFR_MODE_150FPS:
10387             fps = 150;
10388             break;
10389         case CAM_HFR_MODE_180FPS:
10390             fps = 180;
10391             break;
10392         case CAM_HFR_MODE_210FPS:
10393             fps = 210;
10394             break;
10395         case CAM_HFR_MODE_240FPS:
10396             fps = 240;
10397             break;
10398         case CAM_HFR_MODE_480FPS:
10399             fps = 480;
10400             break;
10401         case CAM_HFR_MODE_OFF:
10402         case CAM_HFR_MODE_MAX:
10403         default:
10404             break;
10405         }
10406 
10407         /* Advertise only MIN_FPS_FOR_BATCH_MODE or above as HIGH_SPEED_CONFIGS */
10408         if (fps >= MIN_FPS_FOR_BATCH_MODE) {
10409             /* For each HFR frame rate, need to advertise one variable fps range
10410              * and one fixed fps range per dimension. Eg: for 120 FPS, advertise [30, 120]
10411              * and [120, 120]. While camcorder preview alone is running [30, 120] is
10412              * set by the app. When video recording is started, [120, 120] is
10413              * set. This way sensor configuration does not change when recording
10414              * is started */
10415 
10416             /* (width, height, fps_min, fps_max, batch_size_max) */
10417             for (size_t j = 0; j < gCamCapability[cameraId]->hfr_tbl[i].dim_cnt &&
10418                 j < MAX_SIZES_CNT; j++) {
10419                 available_hfr_configs.add(
10420                         gCamCapability[cameraId]->hfr_tbl[i].dim[j].width);
10421                 available_hfr_configs.add(
10422                         gCamCapability[cameraId]->hfr_tbl[i].dim[j].height);
10423                 available_hfr_configs.add(PREVIEW_FPS_FOR_HFR);
10424                 available_hfr_configs.add(fps);
10425                 available_hfr_configs.add(fps / PREVIEW_FPS_FOR_HFR);
10426 
10427                 /* (width, height, fps_min, fps_max, batch_size_max) */
10428                 available_hfr_configs.add(
10429                         gCamCapability[cameraId]->hfr_tbl[i].dim[j].width);
10430                 available_hfr_configs.add(
10431                         gCamCapability[cameraId]->hfr_tbl[i].dim[j].height);
10432                 available_hfr_configs.add(fps);
10433                 available_hfr_configs.add(fps);
10434                 available_hfr_configs.add(fps / PREVIEW_FPS_FOR_HFR);
10435             }
10436        }
10437     }
10438     //Advertise HFR capability only if the property is set
10439     memset(prop, 0, sizeof(prop));
10440     property_get("persist.camera.hal3hfr.enable", prop, "1");
10441     uint8_t hfrEnable = (uint8_t)atoi(prop);
10442 
10443     if(hfrEnable && available_hfr_configs.array()) {
10444         staticInfo.update(
10445                 ANDROID_CONTROL_AVAILABLE_HIGH_SPEED_VIDEO_CONFIGURATIONS,
10446                 available_hfr_configs.array(), available_hfr_configs.size());
10447     }
10448 
10449     int32_t max_jpeg_size = (int32_t)calcMaxJpegSize(cameraId);
10450     staticInfo.update(ANDROID_JPEG_MAX_SIZE,
10451                       &max_jpeg_size, 1);
10452 
10453     uint8_t avail_effects[CAM_EFFECT_MODE_MAX];
10454     size_t size = 0;
10455     count = CAM_EFFECT_MODE_MAX;
10456     count = MIN(gCamCapability[cameraId]->supported_effects_cnt, count);
10457     for (size_t i = 0; i < count; i++) {
10458         int val = lookupFwkName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
10459                 gCamCapability[cameraId]->supported_effects[i]);
10460         if (NAME_NOT_FOUND != val) {
10461             avail_effects[size] = (uint8_t)val;
10462             size++;
10463         }
10464     }
10465     staticInfo.update(ANDROID_CONTROL_AVAILABLE_EFFECTS,
10466                       avail_effects,
10467                       size);
10468 
10469     uint8_t avail_scene_modes[CAM_SCENE_MODE_MAX];
10470     uint8_t supported_indexes[CAM_SCENE_MODE_MAX];
10471     size_t supported_scene_modes_cnt = 0;
10472     count = CAM_SCENE_MODE_MAX;
10473     count = MIN(gCamCapability[cameraId]->supported_scene_modes_cnt, count);
10474     for (size_t i = 0; i < count; i++) {
10475         if (gCamCapability[cameraId]->supported_scene_modes[i] !=
10476                 CAM_SCENE_MODE_OFF) {
10477             int val = lookupFwkName(SCENE_MODES_MAP,
10478                     METADATA_MAP_SIZE(SCENE_MODES_MAP),
10479                     gCamCapability[cameraId]->supported_scene_modes[i]);
10480 
10481             if (NAME_NOT_FOUND != val) {
10482                 avail_scene_modes[supported_scene_modes_cnt] = (uint8_t)val;
10483                 supported_indexes[supported_scene_modes_cnt] = (uint8_t)i;
10484                 supported_scene_modes_cnt++;
10485             }
10486         }
10487     }
10488     staticInfo.update(ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
10489                       avail_scene_modes,
10490                       supported_scene_modes_cnt);
10491 
10492     uint8_t scene_mode_overrides[CAM_SCENE_MODE_MAX  * 3];
10493     makeOverridesList(gCamCapability[cameraId]->scene_mode_overrides,
10494                       supported_scene_modes_cnt,
10495                       CAM_SCENE_MODE_MAX,
10496                       scene_mode_overrides,
10497                       supported_indexes,
10498                       cameraId);
10499 
10500     if (supported_scene_modes_cnt == 0) {
10501         supported_scene_modes_cnt = 1;
10502         avail_scene_modes[0] = ANDROID_CONTROL_SCENE_MODE_DISABLED;
10503     }
10504 
10505     staticInfo.update(ANDROID_CONTROL_SCENE_MODE_OVERRIDES,
10506             scene_mode_overrides, supported_scene_modes_cnt * 3);
10507 
10508     uint8_t available_control_modes[] = {ANDROID_CONTROL_MODE_OFF,
10509                                          ANDROID_CONTROL_MODE_AUTO,
10510                                          ANDROID_CONTROL_MODE_USE_SCENE_MODE};
10511     staticInfo.update(ANDROID_CONTROL_AVAILABLE_MODES,
10512             available_control_modes,
10513             3);
10514 
10515     uint8_t avail_antibanding_modes[CAM_ANTIBANDING_MODE_MAX];
10516     size = 0;
10517     count = CAM_ANTIBANDING_MODE_MAX;
10518     count = MIN(gCamCapability[cameraId]->supported_antibandings_cnt, count);
10519     for (size_t i = 0; i < count; i++) {
10520         int val = lookupFwkName(ANTIBANDING_MODES_MAP, METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP),
10521                 gCamCapability[cameraId]->supported_antibandings[i]);
10522         if (NAME_NOT_FOUND != val) {
10523             avail_antibanding_modes[size] = (uint8_t)val;
10524             size++;
10525         }
10526 
10527     }
10528     staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
10529                       avail_antibanding_modes,
10530                       size);
10531 
10532     uint8_t avail_abberation_modes[] = {
10533             ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF,
10534             ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST,
10535             ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY};
10536     count = CAM_COLOR_CORRECTION_ABERRATION_MAX;
10537     count = MIN(gCamCapability[cameraId]->aberration_modes_count, count);
10538     if (0 == count) {
10539         //  If no aberration correction modes are available for a device, this advertise OFF mode
10540         size = 1;
10541     } else {
10542         // If count is not zero then atleast one among the FAST or HIGH quality is supported
10543         // So, advertize all 3 modes if atleast any one mode is supported as per the
10544         // new M requirement
10545         size = 3;
10546     }
10547     staticInfo.update(ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
10548             avail_abberation_modes,
10549             size);
10550 
10551     uint8_t avail_af_modes[CAM_FOCUS_MODE_MAX];
10552     size = 0;
10553     count = CAM_FOCUS_MODE_MAX;
10554     count = MIN(gCamCapability[cameraId]->supported_focus_modes_cnt, count);
10555     for (size_t i = 0; i < count; i++) {
10556         int val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
10557                 gCamCapability[cameraId]->supported_focus_modes[i]);
10558         if (NAME_NOT_FOUND != val) {
10559             avail_af_modes[size] = (uint8_t)val;
10560             size++;
10561         }
10562     }
10563     staticInfo.update(ANDROID_CONTROL_AF_AVAILABLE_MODES,
10564                       avail_af_modes,
10565                       size);
10566 
10567     uint8_t avail_awb_modes[CAM_WB_MODE_MAX];
10568     size = 0;
10569     count = CAM_WB_MODE_MAX;
10570     count = MIN(gCamCapability[cameraId]->supported_white_balances_cnt, count);
10571     for (size_t i = 0; i < count; i++) {
10572         int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
10573                 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
10574                 gCamCapability[cameraId]->supported_white_balances[i]);
10575         if (NAME_NOT_FOUND != val) {
10576             avail_awb_modes[size] = (uint8_t)val;
10577             size++;
10578         }
10579     }
10580     staticInfo.update(ANDROID_CONTROL_AWB_AVAILABLE_MODES,
10581                       avail_awb_modes,
10582                       size);
10583 
10584     uint8_t available_flash_levels[CAM_FLASH_FIRING_LEVEL_MAX];
10585     count = CAM_FLASH_FIRING_LEVEL_MAX;
10586     count = MIN(gCamCapability[cameraId]->supported_flash_firing_level_cnt,
10587             count);
10588     for (size_t i = 0; i < count; i++) {
10589         available_flash_levels[i] =
10590                 gCamCapability[cameraId]->supported_firing_levels[i];
10591     }
10592     staticInfo.update(ANDROID_FLASH_FIRING_POWER,
10593             available_flash_levels, count);
10594 
10595     uint8_t flashAvailable;
10596     if (gCamCapability[cameraId]->flash_available)
10597         flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_TRUE;
10598     else
10599         flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_FALSE;
10600     staticInfo.update(ANDROID_FLASH_INFO_AVAILABLE,
10601             &flashAvailable, 1);
10602 
10603     Vector<uint8_t> avail_ae_modes;
10604     count = CAM_AE_MODE_MAX;
10605     count = MIN(gCamCapability[cameraId]->supported_ae_modes_cnt, count);
10606     for (size_t i = 0; i < count; i++) {
10607         uint8_t aeMode = gCamCapability[cameraId]->supported_ae_modes[i];
10608         if (aeMode == CAM_AE_MODE_ON_EXTERNAL_FLASH) {
10609             aeMode = ANDROID_CONTROL_AE_MODE_ON_EXTERNAL_FLASH;
10610         }
10611         avail_ae_modes.add(aeMode);
10612     }
10613     if (flashAvailable) {
10614         avail_ae_modes.add(ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH);
10615         avail_ae_modes.add(ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH);
10616     }
10617     staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_MODES,
10618                       avail_ae_modes.array(),
10619                       avail_ae_modes.size());
10620 
10621     int32_t sensitivity_range[2];
10622     sensitivity_range[0] = gCamCapability[cameraId]->sensitivity_range.min_sensitivity;
10623     sensitivity_range[1] = gCamCapability[cameraId]->sensitivity_range.max_sensitivity;
10624     staticInfo.update(ANDROID_SENSOR_INFO_SENSITIVITY_RANGE,
10625                       sensitivity_range,
10626                       sizeof(sensitivity_range) / sizeof(int32_t));
10627 
10628     staticInfo.update(ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
10629                       &gCamCapability[cameraId]->max_analog_sensitivity,
10630                       1);
10631 
10632     int32_t sensor_orientation = (int32_t)gCamCapability[cameraId]->sensor_mount_angle;
10633     staticInfo.update(ANDROID_SENSOR_ORIENTATION,
10634                       &sensor_orientation,
10635                       1);
10636 
10637     int32_t max_output_streams[] = {
10638             MAX_STALLING_STREAMS,
10639             MAX_PROCESSED_STREAMS,
10640             MAX_RAW_STREAMS};
10641     staticInfo.update(ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS,
10642             max_output_streams,
10643             sizeof(max_output_streams)/sizeof(max_output_streams[0]));
10644 
10645     uint8_t avail_leds = 0;
10646     staticInfo.update(ANDROID_LED_AVAILABLE_LEDS,
10647                       &avail_leds, 0);
10648 
10649     uint8_t focus_dist_calibrated;
10650     int val = lookupFwkName(FOCUS_CALIBRATION_MAP, METADATA_MAP_SIZE(FOCUS_CALIBRATION_MAP),
10651             gCamCapability[cameraId]->focus_dist_calibrated);
10652     if (NAME_NOT_FOUND != val) {
10653         focus_dist_calibrated = (uint8_t)val;
10654         staticInfo.update(ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
10655                      &focus_dist_calibrated, 1);
10656     }
10657 
10658     int32_t avail_testpattern_modes[MAX_TEST_PATTERN_CNT];
10659     size = 0;
10660     count = MIN(gCamCapability[cameraId]->supported_test_pattern_modes_cnt,
10661             MAX_TEST_PATTERN_CNT);
10662     for (size_t i = 0; i < count; i++) {
10663         int testpatternMode = lookupFwkName(TEST_PATTERN_MAP, METADATA_MAP_SIZE(TEST_PATTERN_MAP),
10664                 gCamCapability[cameraId]->supported_test_pattern_modes[i]);
10665         if (NAME_NOT_FOUND != testpatternMode) {
10666             avail_testpattern_modes[size] = testpatternMode;
10667             size++;
10668         }
10669     }
10670     staticInfo.update(ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
10671                       avail_testpattern_modes,
10672                       size);
10673 
10674     uint8_t max_pipeline_depth = (uint8_t)(MAX_INFLIGHT_REQUESTS + EMPTY_PIPELINE_DELAY + FRAME_SKIP_DELAY);
10675     staticInfo.update(ANDROID_REQUEST_PIPELINE_MAX_DEPTH,
10676                       &max_pipeline_depth,
10677                       1);
10678 
10679     int32_t partial_result_count = PARTIAL_RESULT_COUNT;
10680     staticInfo.update(ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
10681                       &partial_result_count,
10682                        1);
10683 
10684     int32_t max_stall_duration = MAX_REPROCESS_STALL;
10685     staticInfo.update(ANDROID_REPROCESS_MAX_CAPTURE_STALL, &max_stall_duration, 1);
10686 
10687     Vector<uint8_t> available_capabilities;
10688     available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE);
10689     available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR);
10690     available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_POST_PROCESSING);
10691     available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_READ_SENSOR_SETTINGS);
10692     if (supportBurst) {
10693         available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BURST_CAPTURE);
10694     }
10695     available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_PRIVATE_REPROCESSING);
10696     available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_YUV_REPROCESSING);
10697     if (hfrEnable && available_hfr_configs.array()) {
10698         available_capabilities.add(
10699                 ANDROID_REQUEST_AVAILABLE_CAPABILITIES_CONSTRAINED_HIGH_SPEED_VIDEO);
10700     }
10701 
10702     if (CAM_SENSOR_YUV != gCamCapability[cameraId]->sensor_type.sens_type) {
10703         available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_RAW);
10704     }
10705     // Only back camera supports MOTION_TRACKING
10706     if (cameraId == 0) {
10707         available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MOTION_TRACKING);
10708     }
10709 
10710     staticInfo.update(ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
10711             available_capabilities.array(),
10712             available_capabilities.size());
10713 
10714     //aeLockAvailable to be set to true if capabilities has MANUAL_SENSOR or BURST_CAPTURE
10715     //Assumption is that all bayer cameras support MANUAL_SENSOR.
10716     uint8_t aeLockAvailable = (gCamCapability[cameraId]->sensor_type.sens_type == CAM_SENSOR_RAW) ?
10717             ANDROID_CONTROL_AE_LOCK_AVAILABLE_TRUE : ANDROID_CONTROL_AE_LOCK_AVAILABLE_FALSE;
10718 
10719     staticInfo.update(ANDROID_CONTROL_AE_LOCK_AVAILABLE,
10720             &aeLockAvailable, 1);
10721 
10722     //awbLockAvailable to be set to true if capabilities has MANUAL_POST_PROCESSING or
10723     //BURST_CAPTURE. Assumption is that all bayer cameras support MANUAL_POST_PROCESSING.
10724     uint8_t awbLockAvailable = (gCamCapability[cameraId]->sensor_type.sens_type == CAM_SENSOR_RAW) ?
10725             ANDROID_CONTROL_AWB_LOCK_AVAILABLE_TRUE : ANDROID_CONTROL_AWB_LOCK_AVAILABLE_FALSE;
10726 
10727     staticInfo.update(ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
10728             &awbLockAvailable, 1);
10729 
10730     int32_t max_input_streams = 1;
10731     staticInfo.update(ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
10732                       &max_input_streams,
10733                       1);
10734 
10735     /* format of the map is : input format, num_output_formats, outputFormat1,..,outputFormatN */
10736     int32_t io_format_map[] = {HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, 3,
10737             HAL_PIXEL_FORMAT_BLOB, HAL_PIXEL_FORMAT_YCbCr_420_888,
10738             HAL_PIXEL_FORMAT_Y8,
10739             HAL_PIXEL_FORMAT_YCbCr_420_888, 2, HAL_PIXEL_FORMAT_BLOB,
10740             HAL_PIXEL_FORMAT_YCbCr_420_888,
10741             HAL_PIXEL_FORMAT_Y8, 2,
10742             HAL_PIXEL_FORMAT_BLOB, HAL_PIXEL_FORMAT_Y8};
10743     staticInfo.update(ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,
10744                       io_format_map, sizeof(io_format_map)/sizeof(io_format_map[0]));
10745 
10746     staticInfo.update(ANDROID_SCALER_AVAILABLE_RECOMMENDED_INPUT_OUTPUT_FORMATS_MAP,
10747                       io_format_map, sizeof(io_format_map)/sizeof(io_format_map[0]));
10748 
10749     int32_t max_latency = ANDROID_SYNC_MAX_LATENCY_PER_FRAME_CONTROL;
10750     staticInfo.update(ANDROID_SYNC_MAX_LATENCY,
10751                       &max_latency,
10752                       1);
10753 
10754 #ifndef USE_HAL_3_3
10755     int32_t isp_sensitivity_range[2];
10756     isp_sensitivity_range[0] =
10757         gCamCapability[cameraId]->isp_sensitivity_range.min_sensitivity;
10758     isp_sensitivity_range[1] =
10759         gCamCapability[cameraId]->isp_sensitivity_range.max_sensitivity;
10760     staticInfo.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST_RANGE,
10761                       isp_sensitivity_range,
10762                       sizeof(isp_sensitivity_range) / sizeof(isp_sensitivity_range[0]));
10763 #endif
10764 
10765     uint8_t available_hot_pixel_modes[] = {ANDROID_HOT_PIXEL_MODE_FAST,
10766                                            ANDROID_HOT_PIXEL_MODE_HIGH_QUALITY};
10767     staticInfo.update(ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
10768             available_hot_pixel_modes,
10769             sizeof(available_hot_pixel_modes)/sizeof(available_hot_pixel_modes[0]));
10770 
10771     uint8_t available_shading_modes[] = {ANDROID_SHADING_MODE_OFF,
10772                                          ANDROID_SHADING_MODE_FAST,
10773                                          ANDROID_SHADING_MODE_HIGH_QUALITY};
10774     staticInfo.update(ANDROID_SHADING_AVAILABLE_MODES,
10775                       available_shading_modes,
10776                       3);
10777 
10778     uint8_t available_lens_shading_map_modes[] = {ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF,
10779                                                   ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON};
10780     staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,
10781                       available_lens_shading_map_modes,
10782                       2);
10783 
10784     uint8_t available_edge_modes[] = {ANDROID_EDGE_MODE_OFF,
10785                                       ANDROID_EDGE_MODE_FAST,
10786                                       ANDROID_EDGE_MODE_HIGH_QUALITY,
10787                                       ANDROID_EDGE_MODE_ZERO_SHUTTER_LAG};
10788     staticInfo.update(ANDROID_EDGE_AVAILABLE_EDGE_MODES,
10789             available_edge_modes,
10790             sizeof(available_edge_modes)/sizeof(available_edge_modes[0]));
10791 
10792     uint8_t available_noise_red_modes[] = {ANDROID_NOISE_REDUCTION_MODE_OFF,
10793                                            ANDROID_NOISE_REDUCTION_MODE_FAST,
10794                                            ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY,
10795                                            ANDROID_NOISE_REDUCTION_MODE_MINIMAL,
10796                                            ANDROID_NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG};
10797     staticInfo.update(ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
10798             available_noise_red_modes,
10799             sizeof(available_noise_red_modes)/sizeof(available_noise_red_modes[0]));
10800 
10801     uint8_t available_tonemap_modes[] = {ANDROID_TONEMAP_MODE_CONTRAST_CURVE,
10802                                          ANDROID_TONEMAP_MODE_FAST,
10803                                          ANDROID_TONEMAP_MODE_HIGH_QUALITY};
10804     staticInfo.update(ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
10805             available_tonemap_modes,
10806             sizeof(available_tonemap_modes)/sizeof(available_tonemap_modes[0]));
10807 
10808     uint8_t available_hot_pixel_map_modes[] = {ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF};
10809     staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
10810             available_hot_pixel_map_modes,
10811             sizeof(available_hot_pixel_map_modes)/sizeof(available_hot_pixel_map_modes[0]));
10812 
10813     val = lookupFwkName(REFERENCE_ILLUMINANT_MAP, METADATA_MAP_SIZE(REFERENCE_ILLUMINANT_MAP),
10814             gCamCapability[cameraId]->reference_illuminant1);
10815     if (NAME_NOT_FOUND != val) {
10816         uint8_t fwkReferenceIlluminant = (uint8_t)val;
10817         staticInfo.update(ANDROID_SENSOR_REFERENCE_ILLUMINANT1, &fwkReferenceIlluminant, 1);
10818     }
10819 
10820     val = lookupFwkName(REFERENCE_ILLUMINANT_MAP, METADATA_MAP_SIZE(REFERENCE_ILLUMINANT_MAP),
10821             gCamCapability[cameraId]->reference_illuminant2);
10822     if (NAME_NOT_FOUND != val) {
10823         uint8_t fwkReferenceIlluminant = (uint8_t)val;
10824         staticInfo.update(ANDROID_SENSOR_REFERENCE_ILLUMINANT2, &fwkReferenceIlluminant, 1);
10825     }
10826 
10827     staticInfo.update(ANDROID_SENSOR_FORWARD_MATRIX1, (camera_metadata_rational_t *)
10828             (void *)gCamCapability[cameraId]->forward_matrix1,
10829             FORWARD_MATRIX_COLS * FORWARD_MATRIX_ROWS);
10830 
10831     staticInfo.update(ANDROID_SENSOR_FORWARD_MATRIX2, (camera_metadata_rational_t *)
10832             (void *)gCamCapability[cameraId]->forward_matrix2,
10833             FORWARD_MATRIX_COLS * FORWARD_MATRIX_ROWS);
10834 
10835     staticInfo.update(ANDROID_SENSOR_COLOR_TRANSFORM1, (camera_metadata_rational_t *)
10836             (void *)gCamCapability[cameraId]->color_transform1,
10837             COLOR_TRANSFORM_COLS * COLOR_TRANSFORM_ROWS);
10838 
10839     staticInfo.update(ANDROID_SENSOR_COLOR_TRANSFORM2, (camera_metadata_rational_t *)
10840             (void *)gCamCapability[cameraId]->color_transform2,
10841             COLOR_TRANSFORM_COLS * COLOR_TRANSFORM_ROWS);
10842 
10843     staticInfo.update(ANDROID_SENSOR_CALIBRATION_TRANSFORM1, (camera_metadata_rational_t *)
10844             (void *)gCamCapability[cameraId]->calibration_transform1,
10845             CAL_TRANSFORM_COLS * CAL_TRANSFORM_ROWS);
10846 
10847     staticInfo.update(ANDROID_SENSOR_CALIBRATION_TRANSFORM2, (camera_metadata_rational_t *)
10848             (void *)gCamCapability[cameraId]->calibration_transform2,
10849             CAL_TRANSFORM_COLS * CAL_TRANSFORM_ROWS);
10850 
10851 #ifndef USE_HAL_3_3
10852 
10853     int32_t session_keys[] = {ANDROID_CONTROL_VIDEO_STABILIZATION_MODE,
10854         ANDROID_CONTROL_AE_TARGET_FPS_RANGE, QCAMERA3_INSTANT_AEC_MODE, QCAMERA3_USE_AV_TIMER,
10855         QCAMERA3_VIDEO_HDR_MODE, TANGO_MODE_DATA_SENSOR_FULLFOV};
10856     staticInfo.update(ANDROID_REQUEST_AVAILABLE_SESSION_KEYS, session_keys,
10857             sizeof(session_keys) / sizeof(session_keys[0]));
10858 
10859 #endif
10860 
10861     int32_t request_keys_basic[] = {ANDROID_COLOR_CORRECTION_MODE,
10862        ANDROID_COLOR_CORRECTION_TRANSFORM, ANDROID_COLOR_CORRECTION_GAINS,
10863        ANDROID_COLOR_CORRECTION_ABERRATION_MODE,
10864        ANDROID_CONTROL_AE_ANTIBANDING_MODE, ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
10865        ANDROID_CONTROL_AE_LOCK, ANDROID_CONTROL_AE_MODE,
10866        ANDROID_CONTROL_AE_REGIONS, ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
10867        ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, ANDROID_CONTROL_AF_MODE,
10868        ANDROID_CONTROL_AF_TRIGGER, ANDROID_CONTROL_AWB_LOCK,
10869        ANDROID_CONTROL_AWB_MODE, ANDROID_CONTROL_CAPTURE_INTENT,
10870        ANDROID_CONTROL_EFFECT_MODE, ANDROID_CONTROL_MODE,
10871        ANDROID_CONTROL_SCENE_MODE, ANDROID_CONTROL_VIDEO_STABILIZATION_MODE,
10872        ANDROID_DEMOSAIC_MODE, ANDROID_EDGE_MODE,
10873        ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE,
10874        ANDROID_JPEG_GPS_COORDINATES,
10875        ANDROID_JPEG_GPS_PROCESSING_METHOD, ANDROID_JPEG_GPS_TIMESTAMP,
10876        ANDROID_JPEG_ORIENTATION, ANDROID_JPEG_QUALITY, ANDROID_JPEG_THUMBNAIL_QUALITY,
10877        ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE, ANDROID_LENS_FILTER_DENSITY,
10878        ANDROID_LENS_FOCAL_LENGTH, ANDROID_LENS_FOCUS_DISTANCE,
10879        ANDROID_LENS_OPTICAL_STABILIZATION_MODE, ANDROID_NOISE_REDUCTION_MODE,
10880        ANDROID_REQUEST_ID, ANDROID_REQUEST_TYPE,
10881        ANDROID_SCALER_CROP_REGION, ANDROID_SENSOR_EXPOSURE_TIME,
10882        ANDROID_SENSOR_FRAME_DURATION, ANDROID_HOT_PIXEL_MODE,
10883        ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE,
10884        ANDROID_SENSOR_SENSITIVITY, ANDROID_SHADING_MODE,
10885 #ifndef USE_HAL_3_3
10886        ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST,
10887 #endif
10888        ANDROID_STATISTICS_FACE_DETECT_MODE,
10889        ANDROID_STATISTICS_SHARPNESS_MAP_MODE, ANDROID_STATISTICS_OIS_DATA_MODE,
10890        ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, ANDROID_TONEMAP_CURVE_BLUE,
10891        ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE,
10892        ANDROID_BLACK_LEVEL_LOCK, NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE,
10893        QCAMERA3_PRIVATEDATA_REPROCESS, QCAMERA3_CDS_MODE, QCAMERA3_CDS_INFO,
10894        QCAMERA3_CROP_COUNT_REPROCESS, QCAMERA3_CROP_REPROCESS,
10895        QCAMERA3_CROP_ROI_MAP_REPROCESS, QCAMERA3_TEMPORAL_DENOISE_ENABLE,
10896        QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE, QCAMERA3_USE_ISO_EXP_PRIORITY,
10897        QCAMERA3_SELECT_PRIORITY, QCAMERA3_USE_SATURATION,
10898        QCAMERA3_EXPOSURE_METER, QCAMERA3_USE_AV_TIMER,
10899        QCAMERA3_DUALCAM_LINK_ENABLE, QCAMERA3_DUALCAM_LINK_IS_MAIN,
10900        QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID,
10901        QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS,
10902        QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB,
10903        QCAMERA3_HAL_PRIVATEDATA_EXIF_DEBUG_DATA_BLOB,
10904        QCAMERA3_JPEG_ENCODE_CROP_ENABLE, QCAMERA3_JPEG_ENCODE_CROP_RECT,
10905        QCAMERA3_JPEG_ENCODE_CROP_ROI, QCAMERA3_VIDEO_HDR_MODE,
10906        QCAMERA3_IR_MODE, QCAMERA3_AEC_CONVERGENCE_SPEED,
10907        QCAMERA3_AWB_CONVERGENCE_SPEED, QCAMERA3_INSTANT_AEC_MODE,
10908        QCAMERA3_SHARPNESS_STRENGTH, QCAMERA3_HISTOGRAM_MODE,
10909        QCAMERA3_BINNING_CORRECTION_MODE,
10910        /* DevCamDebug metadata request_keys_basic */
10911        DEVCAMDEBUG_META_ENABLE,
10912        /* DevCamDebug metadata end */
10913        NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE,
10914        NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS,
10915        TANGO_MODE_DATA_SENSOR_FULLFOV,
10916        NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER,
10917        NEXUS_EXPERIMENTAL_2017_PD_DATA_ENABLE,
10918        NEXUS_EXPERIMENTAL_2017_EXIF_MAKERNOTE,
10919        NEXUS_EXPERIMENTAL_2017_MOTION_DETECTION_ENABLE,
10920        };
10921 
10922     size_t request_keys_cnt =
10923             sizeof(request_keys_basic)/sizeof(request_keys_basic[0]);
10924     Vector<int32_t> available_request_keys;
10925     available_request_keys.appendArray(request_keys_basic, request_keys_cnt);
10926     if (gCamCapability[cameraId]->supported_focus_modes_cnt > 1) {
10927         available_request_keys.add(ANDROID_CONTROL_AF_REGIONS);
10928     }
10929 
10930     if (gExposeEnableZslKey) {
10931         available_request_keys.add(ANDROID_CONTROL_ENABLE_ZSL);
10932         available_request_keys.add(NEXUS_EXPERIMENTAL_2017_POSTVIEW);
10933         available_request_keys.add(NEXUS_EXPERIMENTAL_2017_CONTINUOUS_ZSL_CAPTURE);
10934         available_request_keys.add(NEXUS_EXPERIMENTAL_2017_DISABLE_HDRPLUS);
10935     }
10936 
10937     staticInfo.update(ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS,
10938             available_request_keys.array(), available_request_keys.size());
10939 
10940     int32_t result_keys_basic[] = {ANDROID_COLOR_CORRECTION_TRANSFORM,
10941        ANDROID_COLOR_CORRECTION_GAINS, ANDROID_CONTROL_AE_MODE, ANDROID_CONTROL_AE_REGIONS,
10942        ANDROID_CONTROL_AE_STATE, ANDROID_CONTROL_AF_MODE,
10943        ANDROID_CONTROL_AF_STATE, ANDROID_CONTROL_AWB_MODE,
10944        ANDROID_CONTROL_AF_SCENE_CHANGE,
10945        ANDROID_CONTROL_AWB_STATE, ANDROID_CONTROL_MODE, ANDROID_EDGE_MODE,
10946        ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE,
10947        ANDROID_FLASH_STATE, ANDROID_JPEG_GPS_COORDINATES, ANDROID_JPEG_GPS_PROCESSING_METHOD,
10948        ANDROID_JPEG_GPS_TIMESTAMP, ANDROID_JPEG_ORIENTATION, ANDROID_JPEG_QUALITY,
10949        ANDROID_JPEG_THUMBNAIL_QUALITY, ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE,
10950        ANDROID_LENS_FILTER_DENSITY, ANDROID_LENS_FOCAL_LENGTH, ANDROID_LENS_FOCUS_DISTANCE,
10951        ANDROID_LENS_FOCUS_RANGE, ANDROID_LENS_STATE, ANDROID_LENS_OPTICAL_STABILIZATION_MODE,
10952        ANDROID_NOISE_REDUCTION_MODE, ANDROID_REQUEST_ID,
10953        ANDROID_SCALER_CROP_REGION, ANDROID_SHADING_MODE, ANDROID_SENSOR_EXPOSURE_TIME,
10954        ANDROID_SENSOR_FRAME_DURATION, ANDROID_SENSOR_SENSITIVITY,
10955        ANDROID_SENSOR_TIMESTAMP, ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
10956        ANDROID_SENSOR_PROFILE_TONE_CURVE, ANDROID_BLACK_LEVEL_LOCK, ANDROID_TONEMAP_CURVE_BLUE,
10957        ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE,
10958        ANDROID_STATISTICS_FACE_DETECT_MODE,
10959        ANDROID_STATISTICS_SHARPNESS_MAP, ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
10960        ANDROID_STATISTICS_PREDICTED_COLOR_GAINS, ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
10961        ANDROID_STATISTICS_SCENE_FLICKER, ANDROID_STATISTICS_FACE_RECTANGLES,
10962        ANDROID_STATISTICS_FACE_SCORES, ANDROID_STATISTICS_OIS_DATA_MODE,
10963        ANDROID_STATISTICS_OIS_TIMESTAMPS, ANDROID_STATISTICS_OIS_X_SHIFTS,
10964        ANDROID_STATISTICS_OIS_Y_SHIFTS,
10965 #ifndef USE_HAL_3_3
10966        ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST,
10967 #endif
10968        NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE,
10969        NEXUS_EXPERIMENTAL_2016_AF_SCENE_CHANGE,
10970        QCAMERA3_PRIVATEDATA_REPROCESS, QCAMERA3_CDS_MODE, QCAMERA3_CDS_INFO,
10971        QCAMERA3_CROP_COUNT_REPROCESS, QCAMERA3_CROP_REPROCESS,
10972        QCAMERA3_CROP_ROI_MAP_REPROCESS, QCAMERA3_TUNING_META_DATA_BLOB,
10973        QCAMERA3_TEMPORAL_DENOISE_ENABLE, QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE,
10974        QCAMERA3_EXPOSURE_METER, QCAMERA3_SENSOR_DYNAMIC_BLACK_LEVEL_PATTERN,
10975        QCAMERA3_DUALCAM_LINK_ENABLE, QCAMERA3_DUALCAM_LINK_IS_MAIN,
10976        QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID,
10977        QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS,
10978        QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB,
10979        QCAMERA3_HAL_PRIVATEDATA_EXIF_DEBUG_DATA_BLOB, QCAMERA3_VIDEO_HDR_MODE,
10980        QCAMERA3_IR_MODE, QCAMERA3_AEC_CONVERGENCE_SPEED,
10981        QCAMERA3_AWB_CONVERGENCE_SPEED, QCAMERA3_INSTANT_AEC_MODE,
10982        QCAMERA3_HISTOGRAM_MODE, QCAMERA3_BINNING_CORRECTION_MODE,
10983        QCAMERA3_STATS_IS_HDR_SCENE, QCAMERA3_STATS_IS_HDR_SCENE_CONFIDENCE,
10984        QCAMERA3_STATS_BLINK_DETECTED, QCAMERA3_STATS_BLINK_DEGREE,
10985        QCAMERA3_STATS_SMILE_DEGREE, QCAMERA3_STATS_SMILE_CONFIDENCE,
10986        QCAMERA3_STATS_GAZE_ANGLE, QCAMERA3_STATS_GAZE_DIRECTION,
10987        QCAMERA3_STATS_GAZE_DEGREE,
10988        // DevCamDebug metadata result_keys_basic
10989        DEVCAMDEBUG_META_ENABLE,
10990        // DevCamDebug metadata result_keys AF
10991        DEVCAMDEBUG_AF_LENS_POSITION,
10992        DEVCAMDEBUG_AF_TOF_CONFIDENCE,
10993        DEVCAMDEBUG_AF_TOF_DISTANCE,
10994        DEVCAMDEBUG_AF_LUMA,
10995        DEVCAMDEBUG_AF_HAF_STATE,
10996        DEVCAMDEBUG_AF_MONITOR_PDAF_TARGET_POS,
10997        DEVCAMDEBUG_AF_MONITOR_PDAF_CONFIDENCE,
10998        DEVCAMDEBUG_AF_MONITOR_PDAF_REFOCUS,
10999        DEVCAMDEBUG_AF_MONITOR_TOF_TARGET_POS,
11000        DEVCAMDEBUG_AF_MONITOR_TOF_CONFIDENCE,
11001        DEVCAMDEBUG_AF_MONITOR_TOF_REFOCUS,
11002        DEVCAMDEBUG_AF_MONITOR_TYPE_SELECT,
11003        DEVCAMDEBUG_AF_MONITOR_REFOCUS,
11004        DEVCAMDEBUG_AF_MONITOR_TARGET_POS,
11005        DEVCAMDEBUG_AF_SEARCH_PDAF_TARGET_POS,
11006        DEVCAMDEBUG_AF_SEARCH_PDAF_NEXT_POS,
11007        DEVCAMDEBUG_AF_SEARCH_PDAF_NEAR_POS,
11008        DEVCAMDEBUG_AF_SEARCH_PDAF_FAR_POS,
11009        DEVCAMDEBUG_AF_SEARCH_PDAF_CONFIDENCE,
11010        DEVCAMDEBUG_AF_SEARCH_TOF_TARGET_POS,
11011        DEVCAMDEBUG_AF_SEARCH_TOF_NEXT_POS,
11012        DEVCAMDEBUG_AF_SEARCH_TOF_NEAR_POS,
11013        DEVCAMDEBUG_AF_SEARCH_TOF_FAR_POS,
11014        DEVCAMDEBUG_AF_SEARCH_TOF_CONFIDENCE,
11015        DEVCAMDEBUG_AF_SEARCH_TYPE_SELECT,
11016        DEVCAMDEBUG_AF_SEARCH_NEXT_POS,
11017        DEVCAMDEBUG_AF_SEARCH_TARGET_POS,
11018        // DevCamDebug metadata result_keys AEC
11019        DEVCAMDEBUG_AEC_TARGET_LUMA,
11020        DEVCAMDEBUG_AEC_COMP_LUMA,
11021        DEVCAMDEBUG_AEC_AVG_LUMA,
11022        DEVCAMDEBUG_AEC_CUR_LUMA,
11023        DEVCAMDEBUG_AEC_LINECOUNT,
11024        DEVCAMDEBUG_AEC_REAL_GAIN,
11025        DEVCAMDEBUG_AEC_EXP_INDEX,
11026        DEVCAMDEBUG_AEC_LUX_IDX,
11027        // DevCamDebug metadata result_keys zzHDR
11028        DEVCAMDEBUG_AEC_L_REAL_GAIN,
11029        DEVCAMDEBUG_AEC_L_LINECOUNT,
11030        DEVCAMDEBUG_AEC_S_REAL_GAIN,
11031        DEVCAMDEBUG_AEC_S_LINECOUNT,
11032        DEVCAMDEBUG_AEC_HDR_SENSITIVITY_RATIO,
11033        DEVCAMDEBUG_AEC_HDR_EXP_TIME_RATIO,
11034        // DevCamDebug metadata result_keys ADRC
11035        DEVCAMDEBUG_AEC_TOTAL_DRC_GAIN,
11036        DEVCAMDEBUG_AEC_COLOR_DRC_GAIN,
11037        DEVCAMDEBUG_AEC_GTM_RATIO,
11038        DEVCAMDEBUG_AEC_LTM_RATIO,
11039        DEVCAMDEBUG_AEC_LA_RATIO,
11040        DEVCAMDEBUG_AEC_GAMMA_RATIO,
11041        // DevCamDebug metadata result_keys AEC MOTION
11042        DEVCAMDEBUG_AEC_CAMERA_MOTION_DX,
11043        DEVCAMDEBUG_AEC_CAMERA_MOTION_DY,
11044        DEVCAMDEBUG_AEC_SUBJECT_MOTION,
11045        // DevCamDebug metadata result_keys AWB
11046        DEVCAMDEBUG_AWB_R_GAIN,
11047        DEVCAMDEBUG_AWB_G_GAIN,
11048        DEVCAMDEBUG_AWB_B_GAIN,
11049        DEVCAMDEBUG_AWB_CCT,
11050        DEVCAMDEBUG_AWB_DECISION,
11051        /* DevCamDebug metadata end */
11052        NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE,
11053        NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS,
11054        NEXUS_EXPERIMENTAL_2017_HISTOGRAM,
11055        NEXUS_EXPERIMENTAL_2017_AF_REGIONS_CONFIDENCE,
11056        NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER,
11057        NEXUS_EXPERIMENTAL_2017_EXP_TIME_BOOST,
11058        NEXUS_EXPERIMENTAL_2017_SCENE_DISTANCE,
11059        NEXUS_EXPERIMENTAL_2017_OIS_FRAME_TIMESTAMP_VSYNC,
11060        NEXUS_EXPERIMENTAL_2017_OIS_FRAME_TIMESTAMP_BOOTTIME,
11061        NEXUS_EXPERIMENTAL_2017_OIS_TIMESTAMPS_BOOTTIME,
11062        NEXUS_EXPERIMENTAL_2017_OIS_SHIFT_X,
11063        NEXUS_EXPERIMENTAL_2017_OIS_SHIFT_Y,
11064        NEXUS_EXPERIMENTAL_2017_OIS_SHIFT_PIXEL_X,
11065        NEXUS_EXPERIMENTAL_2017_OIS_SHIFT_PIXEL_Y,
11066        NEXUS_EXPERIMENTAL_2017_MOTION_DETECTION_ENABLE,
11067        NEXUS_EXPERIMENTAL_2017_CAMERA_MOTION_X,
11068        NEXUS_EXPERIMENTAL_2017_CAMERA_MOTION_Y,
11069        NEXUS_EXPERIMENTAL_2017_SUBJECT_MOTION
11070        };
11071 
11072     size_t result_keys_cnt =
11073             sizeof(result_keys_basic)/sizeof(result_keys_basic[0]);
11074 
11075     Vector<int32_t> available_result_keys;
11076     available_result_keys.appendArray(result_keys_basic, result_keys_cnt);
11077     if (gCamCapability[cameraId]->supported_focus_modes_cnt > 1) {
11078         available_result_keys.add(ANDROID_CONTROL_AF_REGIONS);
11079     }
11080     if (CAM_SENSOR_RAW == gCamCapability[cameraId]->sensor_type.sens_type) {
11081         available_result_keys.add(ANDROID_SENSOR_NOISE_PROFILE);
11082         available_result_keys.add(ANDROID_SENSOR_GREEN_SPLIT);
11083     }
11084     if (supportedFaceDetectMode == 1) {
11085         available_result_keys.add(ANDROID_STATISTICS_FACE_RECTANGLES);
11086         available_result_keys.add(ANDROID_STATISTICS_FACE_SCORES);
11087     } else if ((supportedFaceDetectMode == 2) ||
11088             (supportedFaceDetectMode == 3)) {
11089         available_result_keys.add(ANDROID_STATISTICS_FACE_IDS);
11090         available_result_keys.add(ANDROID_STATISTICS_FACE_LANDMARKS);
11091     }
11092 #ifndef USE_HAL_3_3
11093     {
11094         available_result_keys.add(ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL);
11095         available_result_keys.add(ANDROID_SENSOR_DYNAMIC_WHITE_LEVEL);
11096     }
11097 #endif
11098 
11099     if (gExposeEnableZslKey) {
11100         available_result_keys.add(ANDROID_CONTROL_ENABLE_ZSL);
11101         available_result_keys.add(NEXUS_EXPERIMENTAL_2017_NEXT_STILL_INTENT_REQUEST_READY);
11102         available_result_keys.add(NEXUS_EXPERIMENTAL_2017_POSTVIEW_CONFIG);
11103         available_result_keys.add(NEXUS_EXPERIMENTAL_2017_POSTVIEW_DATA);
11104     }
11105 
11106     staticInfo.update(ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
11107             available_result_keys.array(), available_result_keys.size());
11108 
11109     int32_t characteristics_keys_basic[] = {ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
11110        ANDROID_CONTROL_AE_AVAILABLE_MODES, ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
11111        ANDROID_CONTROL_AE_COMPENSATION_RANGE, ANDROID_CONTROL_AE_COMPENSATION_STEP,
11112        ANDROID_CONTROL_AF_AVAILABLE_MODES, ANDROID_CONTROL_AVAILABLE_EFFECTS,
11113        ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
11114        ANDROID_SCALER_CROPPING_TYPE,
11115        ANDROID_SYNC_MAX_LATENCY,
11116        ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
11117        ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
11118        ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
11119        ANDROID_CONTROL_AWB_AVAILABLE_MODES, ANDROID_CONTROL_MAX_REGIONS,
11120        ANDROID_CONTROL_SCENE_MODE_OVERRIDES,ANDROID_FLASH_INFO_AVAILABLE,
11121        ANDROID_FLASH_INFO_CHARGE_DURATION, ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
11122        ANDROID_JPEG_MAX_SIZE, ANDROID_LENS_INFO_AVAILABLE_APERTURES,
11123        ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
11124        ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
11125        ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
11126        ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE, ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
11127        ANDROID_LENS_INFO_SHADING_MAP_SIZE, ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
11128        ANDROID_LENS_FACING,
11129        ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS, ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
11130        ANDROID_REQUEST_PIPELINE_MAX_DEPTH, ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
11131        ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS, ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
11132        ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS, ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
11133        ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
11134        ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,
11135        ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
11136        /*ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,*/
11137        ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS, ANDROID_SENSOR_FORWARD_MATRIX1,
11138        ANDROID_SENSOR_REFERENCE_ILLUMINANT1, ANDROID_SENSOR_REFERENCE_ILLUMINANT2,
11139        ANDROID_SENSOR_FORWARD_MATRIX2, ANDROID_SENSOR_COLOR_TRANSFORM1,
11140        ANDROID_SENSOR_COLOR_TRANSFORM2, ANDROID_SENSOR_CALIBRATION_TRANSFORM1,
11141        ANDROID_SENSOR_CALIBRATION_TRANSFORM2, ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
11142        ANDROID_SENSOR_INFO_SENSITIVITY_RANGE, ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
11143        ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE, ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
11144        ANDROID_SENSOR_INFO_PHYSICAL_SIZE, ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
11145        ANDROID_SENSOR_INFO_WHITE_LEVEL, ANDROID_SENSOR_BASE_GAIN_FACTOR,
11146        ANDROID_SENSOR_BLACK_LEVEL_PATTERN, ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
11147        ANDROID_SENSOR_ORIENTATION, ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
11148        ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
11149        ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
11150        ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
11151        ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE, ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
11152        ANDROID_EDGE_AVAILABLE_EDGE_MODES,
11153        ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
11154        ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
11155        ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
11156        ANDROID_TONEMAP_MAX_CURVE_POINTS,
11157        ANDROID_CONTROL_AVAILABLE_MODES,
11158        ANDROID_CONTROL_AE_LOCK_AVAILABLE,
11159        ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
11160        ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,
11161        ANDROID_SHADING_AVAILABLE_MODES,
11162        ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
11163 #ifndef USE_HAL_3_3
11164        ANDROID_SENSOR_OPAQUE_RAW_SIZE,
11165        ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST_RANGE,
11166 #endif
11167        ANDROID_SCALER_AVAILABLE_RECOMMENDED_STREAM_CONFIGURATIONS,
11168        ANDROID_SCALER_AVAILABLE_RECOMMENDED_INPUT_OUTPUT_FORMATS_MAP,
11169        QCAMERA3_OPAQUE_RAW_FORMAT, QCAMERA3_EXP_TIME_RANGE,
11170        QCAMERA3_SATURATION_RANGE, QCAMERA3_SENSOR_IS_MONO_ONLY,
11171        QCAMERA3_DUALCAM_CALIB_META_DATA_BLOB,
11172        QCAMERA3_SHARPNESS_RANGE,
11173        QCAMERA3_HISTOGRAM_BUCKETS, QCAMERA3_HISTOGRAM_MAX_COUNT,
11174        QCAMERA3_STATS_BSGC_AVAILABLE
11175        };
11176 
11177     available_characteristics_keys.appendArray(characteristics_keys_basic,
11178             sizeof(characteristics_keys_basic)/sizeof(int32_t));
11179 #ifndef USE_HAL_3_3
11180     if (hasBlackRegions) {
11181         available_characteristics_keys.add(ANDROID_SENSOR_OPTICAL_BLACK_REGIONS);
11182     }
11183 #endif
11184 
11185     if (cameraId == 0) {
11186         int32_t lensCalibrationKeys[] = {
11187             ANDROID_LENS_POSE_ROTATION,
11188             ANDROID_LENS_POSE_TRANSLATION,
11189             ANDROID_LENS_POSE_REFERENCE,
11190             ANDROID_LENS_INTRINSIC_CALIBRATION,
11191             ANDROID_LENS_DISTORTION,
11192         };
11193         available_characteristics_keys.appendArray(lensCalibrationKeys,
11194                 sizeof(lensCalibrationKeys) / sizeof(lensCalibrationKeys[0]));
11195     }
11196 
11197     if (0 <= indexPD) {
11198         int32_t depthKeys[] = {
11199                 ANDROID_DEPTH_MAX_DEPTH_SAMPLES,
11200                 ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS,
11201                 ANDROID_DEPTH_AVAILABLE_DEPTH_MIN_FRAME_DURATIONS,
11202                 ANDROID_DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS,
11203                 ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE
11204         };
11205         available_characteristics_keys.appendArray(depthKeys,
11206                 sizeof(depthKeys) / sizeof(depthKeys[0]));
11207     }
11208 
11209     /*available stall durations depend on the hw + sw and will be different for different devices */
11210     /*have to add for raw after implementation*/
11211     int32_t stall_formats[] = {HAL_PIXEL_FORMAT_BLOB, ANDROID_SCALER_AVAILABLE_FORMATS_RAW16};
11212     size_t stall_formats_count = sizeof(stall_formats)/sizeof(int32_t);
11213 
11214     Vector<int64_t> available_stall_durations;
11215     for (uint32_t j = 0; j < stall_formats_count; j++) {
11216         if (stall_formats[j] == HAL_PIXEL_FORMAT_BLOB) {
11217             for (uint32_t i = 0; i < MIN(MAX_SIZES_CNT,
11218                     gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
11219                 available_stall_durations.add(stall_formats[j]);
11220                 available_stall_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].width);
11221                 available_stall_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].height);
11222                 available_stall_durations.add(gCamCapability[cameraId]->jpeg_stall_durations[i]);
11223           }
11224         } else {
11225             for (uint32_t i = 0; i < MIN(MAX_SIZES_CNT,
11226                     gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
11227                 available_stall_durations.add(stall_formats[j]);
11228                 available_stall_durations.add(gCamCapability[cameraId]->raw_dim[i].width);
11229                 available_stall_durations.add(gCamCapability[cameraId]->raw_dim[i].height);
11230                 available_stall_durations.add(gCamCapability[cameraId]->raw16_stall_durations[i]);
11231             }
11232         }
11233     }
11234     staticInfo.update(ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,
11235                       available_stall_durations.array(),
11236                       available_stall_durations.size());
11237 
11238     //QCAMERA3_OPAQUE_RAW
11239     uint8_t raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_LEGACY;
11240     cam_format_t fmt = CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG;
11241     switch (gCamCapability[cameraId]->opaque_raw_fmt) {
11242     case LEGACY_RAW:
11243         if (gCamCapability[cameraId]->white_level == MAX_VALUE_8BIT)
11244             fmt = CAM_FORMAT_BAYER_QCOM_RAW_8BPP_GBRG;
11245         else if (gCamCapability[cameraId]->white_level == MAX_VALUE_10BIT)
11246             fmt = CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG;
11247         else if (gCamCapability[cameraId]->white_level == MAX_VALUE_12BIT)
11248             fmt = CAM_FORMAT_BAYER_QCOM_RAW_12BPP_GBRG;
11249         raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_LEGACY;
11250         break;
11251     case MIPI_RAW:
11252         if (gCamCapability[cameraId]->white_level == MAX_VALUE_8BIT)
11253             fmt = CAM_FORMAT_BAYER_MIPI_RAW_8BPP_GBRG;
11254         else if (gCamCapability[cameraId]->white_level == MAX_VALUE_10BIT)
11255             fmt = CAM_FORMAT_BAYER_MIPI_RAW_10BPP_GBRG;
11256         else if (gCamCapability[cameraId]->white_level == MAX_VALUE_12BIT)
11257             fmt = CAM_FORMAT_BAYER_MIPI_RAW_12BPP_GBRG;
11258         raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_MIPI;
11259         break;
11260     default:
11261         LOGE("unknown opaque_raw_format %d",
11262                 gCamCapability[cameraId]->opaque_raw_fmt);
11263         break;
11264     }
11265     staticInfo.update(QCAMERA3_OPAQUE_RAW_FORMAT, &raw_format, 1);
11266 
11267     Vector<int32_t> strides;
11268     for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
11269             gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
11270         cam_stream_buf_plane_info_t buf_planes;
11271         strides.add(gCamCapability[cameraId]->raw_dim[i].width);
11272         strides.add(gCamCapability[cameraId]->raw_dim[i].height);
11273         cam_stream_info_t info = {.fmt = fmt};
11274         mm_stream_calc_offset_raw(&info, &gCamCapability[cameraId]->raw_dim[i],
11275             &gCamCapability[cameraId]->padding_info, &buf_planes);
11276         strides.add(buf_planes.plane_info.mp[0].stride);
11277     }
11278 
11279     if (!strides.isEmpty()) {
11280         staticInfo.update(QCAMERA3_OPAQUE_RAW_STRIDES, strides.array(),
11281                 strides.size());
11282         available_characteristics_keys.add(QCAMERA3_OPAQUE_RAW_STRIDES);
11283     }
11284 
11285     //TBD: remove the following line once backend advertises zzHDR in feature mask
11286     gCamCapability[cameraId]->qcom_supported_feature_mask |= CAM_QCOM_FEATURE_ZIGZAG_HDR;
11287     //Video HDR default
11288     if ((gCamCapability[cameraId]->qcom_supported_feature_mask) &
11289             (CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR |
11290             CAM_QCOM_FEATURE_ZIGZAG_HDR | CAM_QCOM_FEATURE_SENSOR_HDR)) {
11291         int32_t vhdr_mode[] = {
11292                 QCAMERA3_VIDEO_HDR_MODE_OFF,
11293                 QCAMERA3_VIDEO_HDR_MODE_ON};
11294 
11295         size_t vhdr_mode_count = sizeof(vhdr_mode) / sizeof(int32_t);
11296         staticInfo.update(QCAMERA3_AVAILABLE_VIDEO_HDR_MODES,
11297                     vhdr_mode, vhdr_mode_count);
11298         available_characteristics_keys.add(QCAMERA3_AVAILABLE_VIDEO_HDR_MODES);
11299     }
11300 
11301     staticInfo.update(QCAMERA3_DUALCAM_CALIB_META_DATA_BLOB,
11302             (const uint8_t*)&gCamCapability[cameraId]->related_cam_calibration,
11303             sizeof(gCamCapability[cameraId]->related_cam_calibration));
11304 
11305     uint8_t isMonoOnly =
11306             (gCamCapability[cameraId]->color_arrangement == CAM_FILTER_ARRANGEMENT_Y);
11307     staticInfo.update(QCAMERA3_SENSOR_IS_MONO_ONLY,
11308             &isMonoOnly, 1);
11309 
11310 #ifndef USE_HAL_3_3
11311     Vector<int32_t> opaque_size;
11312     for (size_t j = 0; j < scalar_formats_count; j++) {
11313         if (scalar_formats[j] == ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE) {
11314             for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
11315                     gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
11316                 cam_stream_buf_plane_info_t buf_planes;
11317                 cam_stream_info_t info = {.fmt = fmt};
11318                 rc = mm_stream_calc_offset_raw(&info, &gCamCapability[cameraId]->raw_dim[i],
11319                          &gCamCapability[cameraId]->padding_info, &buf_planes);
11320 
11321                 if (rc == 0) {
11322                     opaque_size.add(gCamCapability[cameraId]->raw_dim[i].width);
11323                     opaque_size.add(gCamCapability[cameraId]->raw_dim[i].height);
11324                     opaque_size.add(buf_planes.plane_info.frame_len);
11325                 }else {
11326                     LOGE("raw frame calculation failed!");
11327                 }
11328             }
11329         }
11330     }
11331 
11332     if ((opaque_size.size() > 0) &&
11333             (opaque_size.size() % PER_CONFIGURATION_SIZE_3 == 0))
11334         staticInfo.update(ANDROID_SENSOR_OPAQUE_RAW_SIZE, opaque_size.array(), opaque_size.size());
11335     else
11336         LOGW("Warning: ANDROID_SENSOR_OPAQUE_RAW_SIZE is using rough estimation(2 bytes/pixel)");
11337 #endif
11338 
11339     if (gCamCapability[cameraId]->supported_ir_mode_cnt > 0) {
11340         int32_t avail_ir_modes[CAM_IR_MODE_MAX];
11341         size = 0;
11342         count = CAM_IR_MODE_MAX;
11343         count = MIN(gCamCapability[cameraId]->supported_ir_mode_cnt, count);
11344         for (size_t i = 0; i < count; i++) {
11345             int val = lookupFwkName(IR_MODES_MAP, METADATA_MAP_SIZE(IR_MODES_MAP),
11346                     gCamCapability[cameraId]->supported_ir_modes[i]);
11347             if (NAME_NOT_FOUND != val) {
11348                 avail_ir_modes[size] = (int32_t)val;
11349                 size++;
11350             }
11351         }
11352         staticInfo.update(QCAMERA3_IR_AVAILABLE_MODES,
11353                 avail_ir_modes, size);
11354         available_characteristics_keys.add(QCAMERA3_IR_AVAILABLE_MODES);
11355     }
11356 
11357     if (gCamCapability[cameraId]->supported_instant_aec_modes_cnt > 0) {
11358         uint8_t available_instant_aec_modes[CAM_AEC_CONVERGENCE_MAX];
11359         size = 0;
11360         count = CAM_AEC_CONVERGENCE_MAX;
11361         count = MIN(gCamCapability[cameraId]->supported_instant_aec_modes_cnt, count);
11362         for (size_t i = 0; i < count; i++) {
11363             int val = lookupFwkName(INSTANT_AEC_MODES_MAP, METADATA_MAP_SIZE(INSTANT_AEC_MODES_MAP),
11364                     gCamCapability[cameraId]->supported_instant_aec_modes[i]);
11365             if (NAME_NOT_FOUND != val) {
11366                 available_instant_aec_modes[size] = (uint8_t)val;
11367                 size++;
11368             }
11369         }
11370         staticInfo.update(QCAMERA3_INSTANT_AEC_AVAILABLE_MODES,
11371                 available_instant_aec_modes, size);
11372         available_characteristics_keys.add(QCAMERA3_INSTANT_AEC_AVAILABLE_MODES);
11373     }
11374 
11375     int32_t sharpness_range[] = {
11376             gCamCapability[cameraId]->sharpness_ctrl.min_value,
11377             gCamCapability[cameraId]->sharpness_ctrl.max_value};
11378     staticInfo.update(QCAMERA3_SHARPNESS_RANGE, sharpness_range, 2);
11379 
11380     if (gCamCapability[cameraId]->supported_binning_correction_mode_cnt > 0) {
11381         int32_t avail_binning_modes[CAM_BINNING_CORRECTION_MODE_MAX];
11382         size = 0;
11383         count = CAM_BINNING_CORRECTION_MODE_MAX;
11384         count = MIN(gCamCapability[cameraId]->supported_binning_correction_mode_cnt, count);
11385         for (size_t i = 0; i < count; i++) {
11386             int val = lookupFwkName(BINNING_CORRECTION_MODES_MAP,
11387                     METADATA_MAP_SIZE(BINNING_CORRECTION_MODES_MAP),
11388                     gCamCapability[cameraId]->supported_binning_modes[i]);
11389             if (NAME_NOT_FOUND != val) {
11390                 avail_binning_modes[size] = (int32_t)val;
11391                 size++;
11392             }
11393         }
11394         staticInfo.update(QCAMERA3_AVAILABLE_BINNING_CORRECTION_MODES,
11395                 avail_binning_modes, size);
11396         available_characteristics_keys.add(QCAMERA3_AVAILABLE_BINNING_CORRECTION_MODES);
11397     }
11398 
11399     if (gCamCapability[cameraId]->supported_aec_modes_cnt > 0) {
11400         int32_t available_aec_modes[CAM_AEC_MODE_MAX];
11401         size = 0;
11402         count = MIN(gCamCapability[cameraId]->supported_aec_modes_cnt, CAM_AEC_MODE_MAX);
11403         for (size_t i = 0; i < count; i++) {
11404             int32_t val = lookupFwkName(AEC_MODES_MAP, METADATA_MAP_SIZE(AEC_MODES_MAP),
11405                     gCamCapability[cameraId]->supported_aec_modes[i]);
11406             if (NAME_NOT_FOUND != val)
11407                 available_aec_modes[size++] = val;
11408         }
11409         staticInfo.update(QCAMERA3_EXPOSURE_METER_AVAILABLE_MODES,
11410                 available_aec_modes, size);
11411         available_characteristics_keys.add(QCAMERA3_EXPOSURE_METER_AVAILABLE_MODES);
11412     }
11413 
11414     if (gCamCapability[cameraId]->supported_iso_modes_cnt > 0) {
11415         int32_t available_iso_modes[CAM_ISO_MODE_MAX];
11416         size = 0;
11417         count = MIN(gCamCapability[cameraId]->supported_iso_modes_cnt, CAM_ISO_MODE_MAX);
11418         for (size_t i = 0; i < count; i++) {
11419             int32_t val = lookupFwkName(ISO_MODES_MAP, METADATA_MAP_SIZE(ISO_MODES_MAP),
11420                     gCamCapability[cameraId]->supported_iso_modes[i]);
11421             if (NAME_NOT_FOUND != val)
11422                 available_iso_modes[size++] = val;
11423         }
11424         staticInfo.update(QCAMERA3_ISO_AVAILABLE_MODES,
11425                 available_iso_modes, size);
11426         available_characteristics_keys.add(QCAMERA3_ISO_AVAILABLE_MODES);
11427     }
11428 
11429     int64_t available_exp_time_range[EXPOSURE_TIME_RANGE_CNT];
11430     for (size_t i = 0; i < EXPOSURE_TIME_RANGE_CNT; i++)
11431         available_exp_time_range[i] = gCamCapability[cameraId]->exposure_time_range[i];
11432     staticInfo.update(QCAMERA3_EXP_TIME_RANGE,
11433             available_exp_time_range, EXPOSURE_TIME_RANGE_CNT);
11434 
11435     int32_t available_saturation_range[4];
11436     available_saturation_range[0] = gCamCapability[cameraId]->saturation_ctrl.min_value;
11437     available_saturation_range[1] = gCamCapability[cameraId]->saturation_ctrl.max_value;
11438     available_saturation_range[2] = gCamCapability[cameraId]->saturation_ctrl.def_value;
11439     available_saturation_range[3] = gCamCapability[cameraId]->saturation_ctrl.step;
11440     staticInfo.update(QCAMERA3_SATURATION_RANGE,
11441             available_saturation_range, 4);
11442 
11443     uint8_t is_hdr_values[2];
11444     is_hdr_values[0] = 0;
11445     is_hdr_values[1] = 1;
11446     staticInfo.update(QCAMERA3_STATS_IS_HDR_SCENE_VALUES,
11447             is_hdr_values, 2);
11448 
11449     float is_hdr_confidence_range[2];
11450     is_hdr_confidence_range[0] = 0.0;
11451     is_hdr_confidence_range[1] = 1.0;
11452     staticInfo.update(QCAMERA3_STATS_IS_HDR_SCENE_CONFIDENCE_RANGE,
11453             is_hdr_confidence_range, 2);
11454 
11455     size_t eepromLength = strnlen(
11456             reinterpret_cast<const char *>(
11457                     gCamCapability[cameraId]->eeprom_version_info),
11458             sizeof(gCamCapability[cameraId]->eeprom_version_info));
11459     if (0 < eepromLength) {
11460         char easelInfo[] = ",E:N";
11461         char *eepromInfo = reinterpret_cast<char *>(gCamCapability[cameraId]->eeprom_version_info);
11462         if (eepromLength + sizeof(easelInfo) < MAX_EEPROM_VERSION_INFO_LEN) {
11463             eepromLength += sizeof(easelInfo);
11464             strlcat(eepromInfo, ((gEaselManagerClient != nullptr &&
11465                     gEaselManagerClient->isEaselPresentOnDevice()) ? ",E-Y" : ",E:N"),
11466                     MAX_EEPROM_VERSION_INFO_LEN);
11467         }
11468         staticInfo.update(NEXUS_EXPERIMENTAL_2017_EEPROM_VERSION_INFO,
11469                 gCamCapability[cameraId]->eeprom_version_info, eepromLength);
11470         available_characteristics_keys.add(NEXUS_EXPERIMENTAL_2017_EEPROM_VERSION_INFO);
11471 
11472         staticInfo.update(ANDROID_INFO_VERSION,
11473                 gCamCapability[cameraId]->eeprom_version_info, eepromLength);
11474         available_characteristics_keys.add(ANDROID_INFO_VERSION);
11475     }
11476 
11477     staticInfo.update(ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS,
11478                       available_characteristics_keys.array(),
11479                       available_characteristics_keys.size());
11480 
11481     std::vector<uint8_t> availableOisModes;
11482     availableOisModes.push_back(ANDROID_STATISTICS_OIS_DATA_MODE_OFF);
11483     if (cameraId == 0) {
11484         availableOisModes.push_back(ANDROID_STATISTICS_OIS_DATA_MODE_ON);
11485     }
11486 
11487     staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_OIS_DATA_MODES,
11488                       availableOisModes.data(),
11489                       availableOisModes.size());
11490 
11491     gStaticMetadata[cameraId] = staticInfo.release();
11492     return rc;
11493 }
11494 
11495 /*===========================================================================
11496  * FUNCTION   : makeTable
11497  *
11498  * DESCRIPTION: make a table of sizes
11499  *
11500  * PARAMETERS :
11501  *
11502  *
11503  *==========================================================================*/
makeTable(cam_dimension_t * dimTable,size_t size,size_t max_size,int32_t * sizeTable)11504 void QCamera3HardwareInterface::makeTable(cam_dimension_t* dimTable, size_t size,
11505         size_t max_size, int32_t *sizeTable)
11506 {
11507     size_t j = 0;
11508     if (size > max_size) {
11509        size = max_size;
11510     }
11511     for (size_t i = 0; i < size; i++) {
11512         sizeTable[j] = dimTable[i].width;
11513         sizeTable[j+1] = dimTable[i].height;
11514         j+=2;
11515     }
11516 }
11517 
11518 /*===========================================================================
11519  * FUNCTION   : makeFPSTable
11520  *
11521  * DESCRIPTION: make a table of fps ranges
11522  *
11523  * PARAMETERS :
11524  *
11525  *==========================================================================*/
makeFPSTable(cam_fps_range_t * fpsTable,size_t size,size_t max_size,int32_t * fpsRangesTable)11526 void QCamera3HardwareInterface::makeFPSTable(cam_fps_range_t* fpsTable, size_t size,
11527         size_t max_size, int32_t *fpsRangesTable)
11528 {
11529     size_t j = 0;
11530     if (size > max_size) {
11531        size = max_size;
11532     }
11533     for (size_t i = 0; i < size; i++) {
11534         fpsRangesTable[j] = (int32_t)fpsTable[i].min_fps;
11535         fpsRangesTable[j+1] = (int32_t)fpsTable[i].max_fps;
11536         j+=2;
11537     }
11538 }
11539 
11540 /*===========================================================================
11541  * FUNCTION   : makeOverridesList
11542  *
11543  * DESCRIPTION: make a list of scene mode overrides
11544  *
11545  * PARAMETERS :
11546  *
11547  *
11548  *==========================================================================*/
makeOverridesList(cam_scene_mode_overrides_t * overridesTable,size_t size,size_t max_size,uint8_t * overridesList,uint8_t * supported_indexes,uint32_t camera_id)11549 void QCamera3HardwareInterface::makeOverridesList(
11550         cam_scene_mode_overrides_t* overridesTable, size_t size, size_t max_size,
11551         uint8_t *overridesList, uint8_t *supported_indexes, uint32_t camera_id)
11552 {
11553     /*daemon will give a list of overrides for all scene modes.
11554       However we should send the fwk only the overrides for the scene modes
11555       supported by the framework*/
11556     size_t j = 0;
11557     if (size > max_size) {
11558        size = max_size;
11559     }
11560     size_t focus_count = CAM_FOCUS_MODE_MAX;
11561     focus_count = MIN(gCamCapability[camera_id]->supported_focus_modes_cnt,
11562             focus_count);
11563     for (size_t i = 0; i < size; i++) {
11564         bool supt = false;
11565         size_t index = supported_indexes[i];
11566         overridesList[j] = gCamCapability[camera_id]->flash_available ?
11567                 ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH : ANDROID_CONTROL_AE_MODE_ON;
11568         int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
11569                 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
11570                 overridesTable[index].awb_mode);
11571         if (NAME_NOT_FOUND != val) {
11572             overridesList[j+1] = (uint8_t)val;
11573         }
11574         uint8_t focus_override = overridesTable[index].af_mode;
11575         for (size_t k = 0; k < focus_count; k++) {
11576            if (gCamCapability[camera_id]->supported_focus_modes[k] == focus_override) {
11577               supt = true;
11578               break;
11579            }
11580         }
11581         if (supt) {
11582             val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
11583                     focus_override);
11584             if (NAME_NOT_FOUND != val) {
11585                 overridesList[j+2] = (uint8_t)val;
11586             }
11587         } else {
11588            overridesList[j+2] = ANDROID_CONTROL_AF_MODE_OFF;
11589         }
11590         j+=3;
11591     }
11592 }
11593 
11594 /*===========================================================================
11595  * FUNCTION   : filterJpegSizes
11596  *
11597  * DESCRIPTION: Returns the supported jpeg sizes based on the max dimension that
11598  *              could be downscaled to
11599  *
11600  * PARAMETERS :
11601  *
11602  * RETURN     : length of jpegSizes array
11603  *==========================================================================*/
11604 
filterJpegSizes(int32_t * jpegSizes,int32_t * processedSizes,size_t processedSizesCnt,size_t maxCount,cam_rect_t active_array_size,uint8_t downscale_factor)11605 size_t QCamera3HardwareInterface::filterJpegSizes(int32_t *jpegSizes, int32_t *processedSizes,
11606         size_t processedSizesCnt, size_t maxCount, cam_rect_t active_array_size,
11607         uint8_t downscale_factor)
11608 {
11609     if (0 == downscale_factor) {
11610         downscale_factor = 1;
11611     }
11612 
11613     int32_t min_width = active_array_size.width / downscale_factor;
11614     int32_t min_height = active_array_size.height / downscale_factor;
11615     size_t jpegSizesCnt = 0;
11616     if (processedSizesCnt > maxCount) {
11617         processedSizesCnt = maxCount;
11618     }
11619     for (size_t i = 0; i < processedSizesCnt; i+=2) {
11620         if (processedSizes[i] >= min_width && processedSizes[i+1] >= min_height) {
11621             jpegSizes[jpegSizesCnt] = processedSizes[i];
11622             jpegSizes[jpegSizesCnt+1] = processedSizes[i+1];
11623             jpegSizesCnt += 2;
11624         }
11625     }
11626     return jpegSizesCnt;
11627 }
11628 
11629 /*===========================================================================
11630  * FUNCTION   : computeNoiseModelEntryS
11631  *
11632  * DESCRIPTION: function to map a given sensitivity to the S noise
11633  *              model parameters in the DNG noise model.
11634  *
11635  * PARAMETERS : sens : the sensor sensitivity
11636  *
11637  ** RETURN    : S (sensor amplification) noise
11638  *
11639  *==========================================================================*/
computeNoiseModelEntryS(int32_t sens)11640 double QCamera3HardwareInterface::computeNoiseModelEntryS(int32_t sens) {
11641     double s = gCamCapability[mCameraId]->gradient_S * sens +
11642             gCamCapability[mCameraId]->offset_S;
11643     return ((s < 0.0) ? 0.0 : s);
11644 }
11645 
11646 /*===========================================================================
11647  * FUNCTION   : computeNoiseModelEntryO
11648  *
11649  * DESCRIPTION: function to map a given sensitivity to the O noise
11650  *              model parameters in the DNG noise model.
11651  *
11652  * PARAMETERS : sens : the sensor sensitivity
11653  *
11654  ** RETURN    : O (sensor readout) noise
11655  *
11656  *==========================================================================*/
computeNoiseModelEntryO(int32_t sens)11657 double QCamera3HardwareInterface::computeNoiseModelEntryO(int32_t sens) {
11658     int32_t max_analog_sens = gCamCapability[mCameraId]->max_analog_sensitivity;
11659     double digital_gain = (1.0 * sens / max_analog_sens) < 1.0 ?
11660             1.0 : (1.0 * sens / max_analog_sens);
11661     double o = gCamCapability[mCameraId]->gradient_O * sens * sens +
11662             gCamCapability[mCameraId]->offset_O * digital_gain * digital_gain;
11663     return ((o < 0.0) ? 0.0 : o);
11664 }
11665 
11666 /*===========================================================================
11667  * FUNCTION   : getSensorSensitivity
11668  *
11669  * DESCRIPTION: convert iso_mode to an integer value
11670  *
11671  * PARAMETERS : iso_mode : the iso_mode supported by sensor
11672  *
11673  ** RETURN    : sensitivity supported by sensor
11674  *
11675  *==========================================================================*/
getSensorSensitivity(int32_t iso_mode)11676 int32_t QCamera3HardwareInterface::getSensorSensitivity(int32_t iso_mode)
11677 {
11678     int32_t sensitivity;
11679 
11680     switch (iso_mode) {
11681     case CAM_ISO_MODE_100:
11682         sensitivity = 100;
11683         break;
11684     case CAM_ISO_MODE_200:
11685         sensitivity = 200;
11686         break;
11687     case CAM_ISO_MODE_400:
11688         sensitivity = 400;
11689         break;
11690     case CAM_ISO_MODE_800:
11691         sensitivity = 800;
11692         break;
11693     case CAM_ISO_MODE_1600:
11694         sensitivity = 1600;
11695         break;
11696     default:
11697         sensitivity = -1;
11698         break;
11699     }
11700     return sensitivity;
11701 }
11702 
initHdrPlusClientLocked()11703 int QCamera3HardwareInterface::initHdrPlusClientLocked() {
11704     if (gEaselManagerClient == nullptr) {
11705         gEaselManagerClient = EaselManagerClient::create();
11706         if (gEaselManagerClient == nullptr) {
11707             ALOGE("%s: Failed to create Easel manager client.", __FUNCTION__);
11708             return -ENODEV;
11709         }
11710     }
11711 
11712     if (!EaselManagerClientOpened && gEaselManagerClient->isEaselPresentOnDevice()) {
11713         // Check if HAL should not power on Easel even if it's present. This is to allow HDR+ tests
11714         //  to connect to Easel.
11715         bool doNotpowerOnEasel =
11716                 property_get_bool("camera.hdrplus.donotpoweroneasel", false);
11717 
11718         if (doNotpowerOnEasel) {
11719             ALOGI("%s: Easel is present but not powered on.", __FUNCTION__);
11720             return OK;
11721         }
11722 
11723         // If Easel is present, power on Easel and suspend it immediately.
11724         status_t res = gEaselManagerClient->open();
11725         if (res != OK) {
11726             ALOGE("%s: Opening Easel manager client failed: %s (%d)", __FUNCTION__, strerror(-res),
11727                     res);
11728             return res;
11729         }
11730 
11731         EaselManagerClientOpened = true;
11732 
11733         res = gEaselManagerClient->suspend();
11734         if (res != OK) {
11735             ALOGE("%s: Suspending Easel failed: %s (%d)", __FUNCTION__, strerror(-res), res);
11736         }
11737 
11738         gEaselBypassOnly = property_get_bool("persist.camera.hdrplus.disable", false);
11739         gEaselProfilingEnabled = property_get_bool("persist.camera.hdrplus.profiling", false);
11740 
11741         // Expose enableZsl key only when HDR+ mode is enabled.
11742         gExposeEnableZslKey = !gEaselBypassOnly;
11743     }
11744 
11745     return OK;
11746 }
11747 
11748 /*===========================================================================
11749  * FUNCTION   : isStreamCombinationSupported
11750  *
11751  * DESCRIPTION: query camera support for specific stream combination
11752  *
11753  * PARAMETERS :
11754  *   @cameraId  : camera Id
11755  *   @comb      : stream combination
11756  *
11757  * RETURN     : int type of status
11758  *              NO_ERROR  -- in case combination is supported
11759  *              none-zero failure code
11760  *==========================================================================*/
isStreamCombinationSupported(uint32_t cameraId,const camera_stream_combination_t * comb)11761 int QCamera3HardwareInterface::isStreamCombinationSupported(uint32_t cameraId,
11762         const camera_stream_combination_t *comb)
11763 {
11764     int rc = BAD_VALUE;
11765     pthread_mutex_lock(&gCamLock);
11766 
11767     if (NULL == gCamCapability[cameraId]) {
11768         rc = initCapabilities(cameraId);
11769         if (rc < 0) {
11770             pthread_mutex_unlock(&gCamLock);
11771             return rc;
11772         }
11773     }
11774 
11775     camera3_stream_configuration_t streamList = {comb->num_streams, /*streams*/ nullptr,
11776             comb->operation_mode, /*session_parameters*/ nullptr};
11777     streamList.streams = new camera3_stream_t * [comb->num_streams];
11778     camera3_stream_t *streamBuffer = new camera3_stream_t[comb->num_streams];
11779     for (size_t i = 0; i < comb->num_streams; i++) {
11780         streamBuffer[i] = {comb->streams[i].stream_type, comb->streams[i].width,
11781             comb->streams[i].height, comb->streams[i].format, comb->streams[i].usage,
11782             /*max_buffers*/ 0, /*priv*/ nullptr, comb->streams[i].data_space,
11783             comb->streams[i].rotation, comb->streams[i].physical_camera_id, /*reserved*/ {nullptr}};
11784         streamList.streams[i] = &streamBuffer[i];
11785     }
11786 
11787     StreamValidateStatus validateStatus;
11788     rc = validateStreamCombination(cameraId, &streamList, &validateStatus);
11789 
11790     delete [] streamBuffer;
11791     delete [] streamList.streams;
11792     pthread_mutex_unlock(&gCamLock);
11793 
11794     return rc;
11795 }
11796 
11797 /*===========================================================================
11798  * FUNCTION   : getCamInfo
11799  *
11800  * DESCRIPTION: query camera capabilities
11801  *
11802  * PARAMETERS :
11803  *   @cameraId  : camera Id
11804  *   @info      : camera info struct to be filled in with camera capabilities
11805  *
11806  * RETURN     : int type of status
11807  *              NO_ERROR  -- success
11808  *              none-zero failure code
11809  *==========================================================================*/
getCamInfo(uint32_t cameraId,struct camera_info * info)11810 int QCamera3HardwareInterface::getCamInfo(uint32_t cameraId,
11811         struct camera_info *info)
11812 {
11813     ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_GET_CAM_INFO);
11814     int rc = 0;
11815 
11816     pthread_mutex_lock(&gCamLock);
11817 
11818     {
11819         std::unique_lock<std::mutex> l(gHdrPlusClientLock);
11820         rc = initHdrPlusClientLocked();
11821         if (rc != OK) {
11822             ALOGE("%s: initHdrPlusClientLocked failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
11823             pthread_mutex_unlock(&gCamLock);
11824             return rc;
11825         }
11826     }
11827 
11828     if (NULL == gCamCapability[cameraId]) {
11829         rc = initCapabilities(cameraId);
11830         if (rc < 0) {
11831             pthread_mutex_unlock(&gCamLock);
11832             return rc;
11833         }
11834     }
11835 
11836     if (NULL == gStaticMetadata[cameraId]) {
11837         rc = initStaticMetadata(cameraId);
11838         if (rc < 0) {
11839             pthread_mutex_unlock(&gCamLock);
11840             return rc;
11841         }
11842     }
11843 
11844     switch(gCamCapability[cameraId]->position) {
11845     case CAM_POSITION_BACK:
11846     case CAM_POSITION_BACK_AUX:
11847         info->facing = CAMERA_FACING_BACK;
11848         break;
11849 
11850     case CAM_POSITION_FRONT:
11851     case CAM_POSITION_FRONT_AUX:
11852         info->facing = CAMERA_FACING_FRONT;
11853         break;
11854 
11855     default:
11856         LOGE("Unknown position type %d for camera id:%d",
11857                 gCamCapability[cameraId]->position, cameraId);
11858         rc = -1;
11859         break;
11860     }
11861 
11862 
11863     info->orientation = (int)gCamCapability[cameraId]->sensor_mount_angle;
11864 #ifndef USE_HAL_3_3
11865     info->device_version = CAMERA_DEVICE_API_VERSION_3_5;
11866 #else
11867     info->device_version = CAMERA_DEVICE_API_VERSION_3_3;
11868 #endif
11869     info->static_camera_characteristics = gStaticMetadata[cameraId];
11870 
11871     //For now assume both cameras can operate independently.
11872     info->conflicting_devices = NULL;
11873     info->conflicting_devices_length = 0;
11874 
11875     //resource cost is 100 * MIN(1.0, m/M),
11876     //where m is throughput requirement with maximum stream configuration
11877     //and M is CPP maximum throughput.
11878     float max_fps = 0.0;
11879     for (uint32_t i = 0;
11880             i < gCamCapability[cameraId]->fps_ranges_tbl_cnt; i++) {
11881         if (max_fps < gCamCapability[cameraId]->fps_ranges_tbl[i].max_fps)
11882             max_fps = gCamCapability[cameraId]->fps_ranges_tbl[i].max_fps;
11883     }
11884     float ratio = 1.0 * MAX_PROCESSED_STREAMS *
11885             gCamCapability[cameraId]->active_array_size.width *
11886             gCamCapability[cameraId]->active_array_size.height * max_fps /
11887             gCamCapability[cameraId]->max_pixel_bandwidth;
11888     info->resource_cost = 100 * MIN(1.0, ratio);
11889     LOGI("camera %d resource cost is %d", cameraId,
11890             info->resource_cost);
11891 
11892     pthread_mutex_unlock(&gCamLock);
11893     return rc;
11894 }
11895 
11896 /*===========================================================================
11897  * FUNCTION   : translateCapabilityToMetadata
11898  *
11899  * DESCRIPTION: translate the capability into camera_metadata_t
11900  *
11901  * PARAMETERS : type of the request
11902  *
11903  *
11904  * RETURN     : success: camera_metadata_t*
11905  *              failure: NULL
11906  *
11907  *==========================================================================*/
translateCapabilityToMetadata(int type)11908 camera_metadata_t* QCamera3HardwareInterface::translateCapabilityToMetadata(int type)
11909 {
11910     if (mDefaultMetadata[type] != NULL) {
11911         return mDefaultMetadata[type];
11912     }
11913     //first time we are handling this request
11914     //fill up the metadata structure using the wrapper class
11915     CameraMetadata settings;
11916     //translate from cam_capability_t to camera_metadata_tag_t
11917     static const uint8_t requestType = ANDROID_REQUEST_TYPE_CAPTURE;
11918     settings.update(ANDROID_REQUEST_TYPE, &requestType, 1);
11919     int32_t defaultRequestID = 0;
11920     settings.update(ANDROID_REQUEST_ID, &defaultRequestID, 1);
11921 
11922     /* OIS disable */
11923     char ois_prop[PROPERTY_VALUE_MAX];
11924     memset(ois_prop, 0, sizeof(ois_prop));
11925     property_get("persist.camera.ois.disable", ois_prop, "0");
11926     uint8_t ois_disable = (uint8_t)atoi(ois_prop);
11927 
11928     /* Force video to use OIS */
11929     char videoOisProp[PROPERTY_VALUE_MAX];
11930     memset(videoOisProp, 0, sizeof(videoOisProp));
11931     property_get("persist.camera.ois.video", videoOisProp, "1");
11932     uint8_t forceVideoOis = (uint8_t)atoi(videoOisProp);
11933 
11934     // Hybrid AE enable/disable
11935     char hybrid_ae_prop[PROPERTY_VALUE_MAX];
11936     memset(hybrid_ae_prop, 0, sizeof(hybrid_ae_prop));
11937     property_get("persist.camera.hybrid_ae.enable", hybrid_ae_prop, "0");
11938     uint8_t hybrid_ae = (uint8_t)atoi(hybrid_ae_prop);
11939 
11940     uint8_t controlIntent = 0;
11941     uint8_t focusMode;
11942     uint8_t vsMode;
11943     uint8_t optStabMode;
11944     uint8_t cacMode;
11945     uint8_t edge_mode;
11946     uint8_t noise_red_mode;
11947     uint8_t shading_mode;
11948     uint8_t hot_pixel_mode;
11949     uint8_t tonemap_mode;
11950     bool highQualityModeEntryAvailable = FALSE;
11951     bool fastModeEntryAvailable = FALSE;
11952     uint8_t histogramEnable = false;
11953     vsMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
11954     optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
11955     uint8_t shadingmap_mode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF;
11956     uint8_t trackingAfTrigger = NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER_IDLE;
11957     uint8_t enableZsl = ANDROID_CONTROL_ENABLE_ZSL_FALSE;
11958 
11959     switch (type) {
11960       case CAMERA3_TEMPLATE_PREVIEW:
11961         controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW;
11962         focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
11963         optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
11964         cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
11965         edge_mode = ANDROID_EDGE_MODE_FAST;
11966         noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
11967         shading_mode = ANDROID_SHADING_MODE_FAST;
11968         hot_pixel_mode = ANDROID_HOT_PIXEL_MODE_FAST;
11969         tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
11970         break;
11971       case CAMERA3_TEMPLATE_STILL_CAPTURE:
11972         controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE;
11973         focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
11974         optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
11975         edge_mode = ANDROID_EDGE_MODE_HIGH_QUALITY;
11976         noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY;
11977         shading_mode = ANDROID_SHADING_MODE_HIGH_QUALITY;
11978         hot_pixel_mode = ANDROID_HOT_PIXEL_MODE_HIGH_QUALITY;
11979         tonemap_mode = ANDROID_TONEMAP_MODE_HIGH_QUALITY;
11980         cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
11981         // Order of priority for default CAC is HIGH Quality -> FAST -> OFF
11982         for (size_t i = 0; i < gCamCapability[mCameraId]->aberration_modes_count; i++) {
11983             if (gCamCapability[mCameraId]->aberration_modes[i] ==
11984                     CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY) {
11985                 highQualityModeEntryAvailable = TRUE;
11986             } else if (gCamCapability[mCameraId]->aberration_modes[i] ==
11987                     CAM_COLOR_CORRECTION_ABERRATION_FAST) {
11988                 fastModeEntryAvailable = TRUE;
11989             }
11990         }
11991         if (highQualityModeEntryAvailable) {
11992             cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY;
11993         } else if (fastModeEntryAvailable) {
11994             cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
11995         }
11996         if (CAM_SENSOR_RAW == gCamCapability[mCameraId]->sensor_type.sens_type) {
11997             shadingmap_mode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON;
11998         }
11999         enableZsl = ANDROID_CONTROL_ENABLE_ZSL_TRUE;
12000         break;
12001       case CAMERA3_TEMPLATE_VIDEO_RECORD:
12002         controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD;
12003         focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
12004         optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
12005         cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
12006         edge_mode = ANDROID_EDGE_MODE_FAST;
12007         noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
12008         shading_mode = ANDROID_SHADING_MODE_FAST;
12009         hot_pixel_mode = ANDROID_HOT_PIXEL_MODE_FAST;
12010         tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
12011         if (forceVideoOis)
12012             optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
12013         break;
12014       case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT:
12015         controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT;
12016         focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
12017         optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
12018         cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
12019         edge_mode = ANDROID_EDGE_MODE_FAST;
12020         noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
12021         shading_mode = ANDROID_SHADING_MODE_FAST;
12022         hot_pixel_mode = ANDROID_HOT_PIXEL_MODE_FAST;
12023         tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
12024         if (forceVideoOis)
12025             optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
12026         break;
12027       case CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG:
12028         controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_ZERO_SHUTTER_LAG;
12029         focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
12030         optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
12031         cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
12032         edge_mode = ANDROID_EDGE_MODE_ZERO_SHUTTER_LAG;
12033         noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG;
12034         shading_mode = ANDROID_SHADING_MODE_FAST;
12035         hot_pixel_mode = ANDROID_HOT_PIXEL_MODE_FAST;
12036         tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
12037         break;
12038       case CAMERA3_TEMPLATE_MANUAL:
12039         edge_mode = ANDROID_EDGE_MODE_FAST;
12040         noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
12041         shading_mode = ANDROID_SHADING_MODE_FAST;
12042         hot_pixel_mode = ANDROID_HOT_PIXEL_MODE_FAST;
12043         tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
12044         cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
12045         controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_MANUAL;
12046         focusMode = ANDROID_CONTROL_AF_MODE_OFF;
12047         optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
12048         break;
12049       default:
12050         edge_mode = ANDROID_EDGE_MODE_FAST;
12051         noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
12052         shading_mode = ANDROID_SHADING_MODE_FAST;
12053         hot_pixel_mode = ANDROID_HOT_PIXEL_MODE_FAST;
12054         tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
12055         cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
12056         controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM;
12057         focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
12058         optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
12059         break;
12060     }
12061     // Set CAC to OFF if underlying device doesn't support
12062     if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
12063         cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
12064     }
12065     settings.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &cacMode, 1);
12066     settings.update(ANDROID_CONTROL_CAPTURE_INTENT, &controlIntent, 1);
12067     settings.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &vsMode, 1);
12068     if (gCamCapability[mCameraId]->supported_focus_modes_cnt == 1) {
12069         focusMode = ANDROID_CONTROL_AF_MODE_OFF;
12070     }
12071     settings.update(ANDROID_CONTROL_AF_MODE, &focusMode, 1);
12072     settings.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE, &histogramEnable, 1);
12073     settings.update(NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER, &trackingAfTrigger, 1);
12074 
12075     if (gCamCapability[mCameraId]->optical_stab_modes_count == 1 &&
12076             gCamCapability[mCameraId]->optical_stab_modes[0] == CAM_OPT_STAB_ON)
12077         optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
12078     else if ((gCamCapability[mCameraId]->optical_stab_modes_count == 1 &&
12079             gCamCapability[mCameraId]->optical_stab_modes[0] == CAM_OPT_STAB_OFF)
12080             || ois_disable)
12081         optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
12082     settings.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &optStabMode, 1);
12083     settings.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &shadingmap_mode, 1);
12084 
12085     settings.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
12086             &gCamCapability[mCameraId]->exposure_compensation_default, 1);
12087 
12088     static const uint8_t aeLock = ANDROID_CONTROL_AE_LOCK_OFF;
12089     settings.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
12090 
12091     static const uint8_t awbLock = ANDROID_CONTROL_AWB_LOCK_OFF;
12092     settings.update(ANDROID_CONTROL_AWB_LOCK, &awbLock, 1);
12093 
12094     static const uint8_t awbMode = ANDROID_CONTROL_AWB_MODE_AUTO;
12095     settings.update(ANDROID_CONTROL_AWB_MODE, &awbMode, 1);
12096 
12097     static const uint8_t controlMode = ANDROID_CONTROL_MODE_AUTO;
12098     settings.update(ANDROID_CONTROL_MODE, &controlMode, 1);
12099 
12100     static const uint8_t effectMode = ANDROID_CONTROL_EFFECT_MODE_OFF;
12101     settings.update(ANDROID_CONTROL_EFFECT_MODE, &effectMode, 1);
12102 
12103     static const uint8_t sceneMode = ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY;
12104     settings.update(ANDROID_CONTROL_SCENE_MODE, &sceneMode, 1);
12105 
12106     static const uint8_t aeMode = ANDROID_CONTROL_AE_MODE_ON;
12107     settings.update(ANDROID_CONTROL_AE_MODE, &aeMode, 1);
12108 
12109     /*flash*/
12110     static const uint8_t flashMode = ANDROID_FLASH_MODE_OFF;
12111     settings.update(ANDROID_FLASH_MODE, &flashMode, 1);
12112 
12113     static const uint8_t flashFiringLevel = CAM_FLASH_FIRING_LEVEL_4;
12114     settings.update(ANDROID_FLASH_FIRING_POWER,
12115             &flashFiringLevel, 1);
12116 
12117     /* lens */
12118     float default_aperture = gCamCapability[mCameraId]->apertures[0];
12119     settings.update(ANDROID_LENS_APERTURE, &default_aperture, 1);
12120 
12121     if (gCamCapability[mCameraId]->filter_densities_count) {
12122         float default_filter_density = gCamCapability[mCameraId]->filter_densities[0];
12123         settings.update(ANDROID_LENS_FILTER_DENSITY, &default_filter_density,
12124                         gCamCapability[mCameraId]->filter_densities_count);
12125     }
12126 
12127     float default_focal_length = gCamCapability[mCameraId]->focal_length;
12128     settings.update(ANDROID_LENS_FOCAL_LENGTH, &default_focal_length, 1);
12129 
12130     static const uint8_t demosaicMode = ANDROID_DEMOSAIC_MODE_FAST;
12131     settings.update(ANDROID_DEMOSAIC_MODE, &demosaicMode, 1);
12132 
12133     static const int32_t testpatternMode = ANDROID_SENSOR_TEST_PATTERN_MODE_OFF;
12134     settings.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &testpatternMode, 1);
12135 
12136     /* face detection (default to OFF) */
12137     static const uint8_t faceDetectMode = ANDROID_STATISTICS_FACE_DETECT_MODE_OFF;
12138     settings.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &faceDetectMode, 1);
12139 
12140     static const uint8_t histogramMode = QCAMERA3_HISTOGRAM_MODE_OFF;
12141     settings.update(QCAMERA3_HISTOGRAM_MODE, &histogramMode, 1);
12142 
12143     static const uint8_t sharpnessMapMode = ANDROID_STATISTICS_SHARPNESS_MAP_MODE_OFF;
12144     settings.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &sharpnessMapMode, 1);
12145 
12146     static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
12147     settings.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
12148 
12149 
12150     static const uint8_t blackLevelLock = ANDROID_BLACK_LEVEL_LOCK_OFF;
12151     settings.update(ANDROID_BLACK_LEVEL_LOCK, &blackLevelLock, 1);
12152 
12153     /* Exposure time(Update the Min Exposure Time)*/
12154     int64_t default_exposure_time = gCamCapability[mCameraId]->exposure_time_range[0];
12155     settings.update(ANDROID_SENSOR_EXPOSURE_TIME, &default_exposure_time, 1);
12156 
12157     /* frame duration */
12158     static const int64_t default_frame_duration = NSEC_PER_33MSEC;
12159     settings.update(ANDROID_SENSOR_FRAME_DURATION, &default_frame_duration, 1);
12160 
12161     /* sensitivity */
12162     static const int32_t default_sensitivity = 100;
12163     settings.update(ANDROID_SENSOR_SENSITIVITY, &default_sensitivity, 1);
12164 #ifndef USE_HAL_3_3
12165     static const int32_t default_isp_sensitivity =
12166             gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity;
12167     settings.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST, &default_isp_sensitivity, 1);
12168 #endif
12169 
12170     /*edge mode*/
12171     settings.update(ANDROID_EDGE_MODE, &edge_mode, 1);
12172 
12173     /*noise reduction mode*/
12174     settings.update(ANDROID_NOISE_REDUCTION_MODE, &noise_red_mode, 1);
12175 
12176     /*shading mode*/
12177     settings.update(ANDROID_SHADING_MODE, &shading_mode, 1);
12178 
12179     /*hot pixel mode*/
12180     settings.update(ANDROID_HOT_PIXEL_MODE, &hot_pixel_mode, 1);
12181 
12182     /*color correction mode*/
12183     static const uint8_t color_correct_mode = ANDROID_COLOR_CORRECTION_MODE_FAST;
12184     settings.update(ANDROID_COLOR_CORRECTION_MODE, &color_correct_mode, 1);
12185 
12186     /*transform matrix mode*/
12187     settings.update(ANDROID_TONEMAP_MODE, &tonemap_mode, 1);
12188 
12189     int32_t scaler_crop_region[4];
12190     scaler_crop_region[0] = 0;
12191     scaler_crop_region[1] = 0;
12192     scaler_crop_region[2] = gCamCapability[mCameraId]->active_array_size.width;
12193     scaler_crop_region[3] = gCamCapability[mCameraId]->active_array_size.height;
12194     settings.update(ANDROID_SCALER_CROP_REGION, scaler_crop_region, 4);
12195 
12196     static const uint8_t antibanding_mode = ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO;
12197     settings.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &antibanding_mode, 1);
12198 
12199     /*focus distance*/
12200     float focus_distance = 0.0;
12201     settings.update(ANDROID_LENS_FOCUS_DISTANCE, &focus_distance, 1);
12202 
12203     /*target fps range: use maximum range for picture, and maximum fixed range for video*/
12204     /* Restrict template max_fps to 30 */
12205     float max_range = 0.0;
12206     float max_fixed_fps = 0.0;
12207     int32_t fps_range[2] = {0, 0};
12208     for (uint32_t i = 0; i < gCamCapability[mCameraId]->fps_ranges_tbl_cnt;
12209             i++) {
12210         if (gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps >
12211                 TEMPLATE_MAX_PREVIEW_FPS) {
12212             continue;
12213         }
12214         float range = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps -
12215             gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
12216         if (type == CAMERA3_TEMPLATE_PREVIEW ||
12217                 type == CAMERA3_TEMPLATE_STILL_CAPTURE ||
12218                 type == CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG) {
12219             if (range > max_range) {
12220                 fps_range[0] =
12221                     (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
12222                 fps_range[1] =
12223                     (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
12224                 max_range = range;
12225             }
12226         } else {
12227             if (range < 0.01 && max_fixed_fps <
12228                     gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps) {
12229                 fps_range[0] =
12230                     (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
12231                 fps_range[1] =
12232                     (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
12233                 max_fixed_fps = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
12234             }
12235         }
12236     }
12237     settings.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE, fps_range, 2);
12238 
12239     /*precapture trigger*/
12240     uint8_t precapture_trigger = ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_IDLE;
12241     settings.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, &precapture_trigger, 1);
12242 
12243     /*af trigger*/
12244     uint8_t af_trigger = ANDROID_CONTROL_AF_TRIGGER_IDLE;
12245     settings.update(ANDROID_CONTROL_AF_TRIGGER, &af_trigger, 1);
12246 
12247     /* ae & af regions */
12248     int32_t active_region[] = {
12249             gCamCapability[mCameraId]->active_array_size.left,
12250             gCamCapability[mCameraId]->active_array_size.top,
12251             gCamCapability[mCameraId]->active_array_size.left +
12252                     gCamCapability[mCameraId]->active_array_size.width,
12253             gCamCapability[mCameraId]->active_array_size.top +
12254                     gCamCapability[mCameraId]->active_array_size.height,
12255             0};
12256     settings.update(ANDROID_CONTROL_AE_REGIONS, active_region,
12257             sizeof(active_region) / sizeof(active_region[0]));
12258     settings.update(ANDROID_CONTROL_AF_REGIONS, active_region,
12259             sizeof(active_region) / sizeof(active_region[0]));
12260 
12261     /* black level lock */
12262     uint8_t blacklevel_lock = ANDROID_BLACK_LEVEL_LOCK_OFF;
12263     settings.update(ANDROID_BLACK_LEVEL_LOCK, &blacklevel_lock, 1);
12264 
12265     //special defaults for manual template
12266     if (type == CAMERA3_TEMPLATE_MANUAL) {
12267         static const uint8_t manualControlMode = ANDROID_CONTROL_MODE_OFF;
12268         settings.update(ANDROID_CONTROL_MODE, &manualControlMode, 1);
12269 
12270         static const uint8_t manualFocusMode = ANDROID_CONTROL_AF_MODE_OFF;
12271         settings.update(ANDROID_CONTROL_AF_MODE, &manualFocusMode, 1);
12272 
12273         static const uint8_t manualAeMode = ANDROID_CONTROL_AE_MODE_OFF;
12274         settings.update(ANDROID_CONTROL_AE_MODE, &manualAeMode, 1);
12275 
12276         static const uint8_t manualAwbMode = ANDROID_CONTROL_AWB_MODE_OFF;
12277         settings.update(ANDROID_CONTROL_AWB_MODE, &manualAwbMode, 1);
12278 
12279         static const uint8_t manualTonemapMode = ANDROID_TONEMAP_MODE_FAST;
12280         settings.update(ANDROID_TONEMAP_MODE, &manualTonemapMode, 1);
12281 
12282         static const uint8_t manualColorCorrectMode = ANDROID_COLOR_CORRECTION_MODE_TRANSFORM_MATRIX;
12283         settings.update(ANDROID_COLOR_CORRECTION_MODE, &manualColorCorrectMode, 1);
12284     }
12285 
12286 
12287     /* TNR
12288      * We'll use this location to determine which modes TNR will be set.
12289      * We will enable TNR to be on if either of the Preview/Video stream requires TNR
12290      * This is not to be confused with linking on a per stream basis that decision
12291      * is still on per-session basis and will be handled as part of config stream
12292      */
12293     uint8_t tnr_enable = 0;
12294 
12295     if (m_bTnrPreview || m_bTnrVideo) {
12296 
12297         switch (type) {
12298             case CAMERA3_TEMPLATE_VIDEO_RECORD:
12299                     tnr_enable = 1;
12300                     break;
12301 
12302             default:
12303                     tnr_enable = 0;
12304                     break;
12305         }
12306 
12307         int32_t tnr_process_type = (int32_t)getTemporalDenoiseProcessPlate();
12308         settings.update(QCAMERA3_TEMPORAL_DENOISE_ENABLE, &tnr_enable, 1);
12309         settings.update(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE, &tnr_process_type, 1);
12310 
12311         LOGD("TNR:%d with process plate %d for template:%d",
12312                              tnr_enable, tnr_process_type, type);
12313     }
12314 
12315     //Update Link tags to default
12316     uint8_t sync_type = CAM_TYPE_STANDALONE;
12317     settings.update(QCAMERA3_DUALCAM_LINK_ENABLE, &sync_type, 1);
12318 
12319     uint8_t is_main = 1;
12320     settings.update(QCAMERA3_DUALCAM_LINK_IS_MAIN, &is_main, 1);
12321 
12322     uint8_t related_camera_id = mCameraId;
12323     settings.update(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID, &related_camera_id, 1);
12324 
12325     /* CDS default */
12326     char prop[PROPERTY_VALUE_MAX];
12327     memset(prop, 0, sizeof(prop));
12328     property_get("persist.camera.CDS", prop, "Auto");
12329     cam_cds_mode_type_t cds_mode = CAM_CDS_MODE_AUTO;
12330     cds_mode = lookupProp(CDS_MAP, METADATA_MAP_SIZE(CDS_MAP), prop);
12331     if (CAM_CDS_MODE_MAX == cds_mode) {
12332         cds_mode = CAM_CDS_MODE_AUTO;
12333     }
12334 
12335     /* Disabling CDS in templates which have TNR enabled*/
12336     if (tnr_enable)
12337         cds_mode = CAM_CDS_MODE_OFF;
12338 
12339     int32_t mode = cds_mode;
12340     settings.update(QCAMERA3_CDS_MODE, &mode, 1);
12341 
12342     /* Manual Convergence AEC Speed is disabled by default*/
12343     float default_aec_speed = 0;
12344     settings.update(QCAMERA3_AEC_CONVERGENCE_SPEED, &default_aec_speed, 1);
12345 
12346     /* Manual Convergence AWB Speed is disabled by default*/
12347     float default_awb_speed = 0;
12348     settings.update(QCAMERA3_AWB_CONVERGENCE_SPEED, &default_awb_speed, 1);
12349 
12350     // Set instant AEC to normal convergence by default
12351     uint8_t instant_aec_mode = (uint8_t)QCAMERA3_INSTANT_AEC_NORMAL_CONVERGENCE;
12352     settings.update(QCAMERA3_INSTANT_AEC_MODE, &instant_aec_mode, 1);
12353 
12354     uint8_t oisDataMode = ANDROID_STATISTICS_OIS_DATA_MODE_OFF;
12355     if (mCameraId == 0) {
12356         oisDataMode = ANDROID_STATISTICS_OIS_DATA_MODE_ON;
12357     }
12358     settings.update(ANDROID_STATISTICS_OIS_DATA_MODE, &oisDataMode, 1);
12359 
12360     if (gExposeEnableZslKey) {
12361         settings.update(ANDROID_CONTROL_ENABLE_ZSL, &enableZsl, 1);
12362         int32_t postview = 0;
12363         settings.update(NEXUS_EXPERIMENTAL_2017_POSTVIEW, &postview, 1);
12364         int32_t continuousZslCapture = 0;
12365         settings.update(NEXUS_EXPERIMENTAL_2017_CONTINUOUS_ZSL_CAPTURE, &continuousZslCapture, 1);
12366         // Disable HDR+ for templates other than CAMERA3_TEMPLATE_STILL_CAPTURE and
12367         // CAMERA3_TEMPLATE_PREVIEW.
12368         int32_t disableHdrplus = (type == CAMERA3_TEMPLATE_STILL_CAPTURE ||
12369                                   type == CAMERA3_TEMPLATE_PREVIEW) ? 0 : 1;
12370         settings.update(NEXUS_EXPERIMENTAL_2017_DISABLE_HDRPLUS, &disableHdrplus, 1);
12371 
12372         // Set hybrid_ae tag in PREVIEW and STILL_CAPTURE templates to 1 so that
12373         // hybrid ae is enabled for 3rd party app HDR+.
12374         if (type == CAMERA3_TEMPLATE_PREVIEW ||
12375                 type == CAMERA3_TEMPLATE_STILL_CAPTURE) {
12376             hybrid_ae = 1;
12377         }
12378     }
12379     /* hybrid ae */
12380     settings.update(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE, &hybrid_ae, 1);
12381 
12382     int32_t fwk_hdr = QCAMERA3_VIDEO_HDR_MODE_OFF;
12383     settings.update(QCAMERA3_VIDEO_HDR_MODE, &fwk_hdr, 1);
12384 
12385     mDefaultMetadata[type] = settings.release();
12386 
12387     return mDefaultMetadata[type];
12388 }
12389 
12390 /*===========================================================================
12391  * FUNCTION   : getExpectedFrameDuration
12392  *
12393  * DESCRIPTION: Extract the maximum frame duration from either exposure or frame
12394  *              duration
12395  *
12396  * PARAMETERS :
12397  *   @request   : request settings
12398  *   @frameDuration : The maximum frame duration in nanoseconds
12399  *
12400  * RETURN     : None
12401  *==========================================================================*/
getExpectedFrameDuration(const camera_metadata_t * request,nsecs_t * frameDuration)12402 void QCamera3HardwareInterface::getExpectedFrameDuration(
12403         const camera_metadata_t *request, nsecs_t *frameDuration /*out*/) {
12404     if (nullptr == frameDuration) {
12405         return;
12406     }
12407 
12408     camera_metadata_ro_entry_t e = camera_metadata_ro_entry_t();
12409     find_camera_metadata_ro_entry(request,
12410             ANDROID_SENSOR_EXPOSURE_TIME,
12411             &e);
12412     if (e.count > 0) {
12413         *frameDuration = e.data.i64[0];
12414     }
12415     find_camera_metadata_ro_entry(request,
12416             ANDROID_SENSOR_FRAME_DURATION,
12417             &e);
12418     if (e.count > 0) {
12419         *frameDuration = std::max(e.data.i64[0], *frameDuration);
12420     }
12421 }
12422 
12423 /*===========================================================================
12424  * FUNCTION   : calculateMaxExpectedDuration
12425  *
12426  * DESCRIPTION: Calculate the expected frame duration in nanoseconds given the
12427  *              current camera settings.
12428  *
12429  * PARAMETERS :
12430  *   @request   : request settings
12431  *
12432  * RETURN     : Expected frame duration in nanoseconds.
12433  *==========================================================================*/
calculateMaxExpectedDuration(const camera_metadata_t * request)12434 nsecs_t QCamera3HardwareInterface::calculateMaxExpectedDuration(
12435         const camera_metadata_t *request) {
12436     nsecs_t maxExpectedDuration = kDefaultExpectedDuration;
12437     camera_metadata_ro_entry_t e = camera_metadata_ro_entry_t();
12438     find_camera_metadata_ro_entry(request, ANDROID_CONTROL_MODE, &e);
12439     if (e.count == 0) {
12440         return maxExpectedDuration;
12441     }
12442 
12443     if (e.data.u8[0] == ANDROID_CONTROL_MODE_OFF) {
12444         getExpectedFrameDuration(request, &maxExpectedDuration /*out*/);
12445     }
12446 
12447     if (e.data.u8[0] != ANDROID_CONTROL_MODE_AUTO) {
12448         return maxExpectedDuration;
12449     }
12450 
12451     find_camera_metadata_ro_entry(request, ANDROID_CONTROL_AE_MODE, &e);
12452     if (e.count == 0) {
12453         return maxExpectedDuration;
12454     }
12455 
12456     switch (e.data.u8[0]) {
12457         case ANDROID_CONTROL_AE_MODE_OFF:
12458             getExpectedFrameDuration(request, &maxExpectedDuration /*out*/);
12459             break;
12460         default:
12461             find_camera_metadata_ro_entry(request,
12462                     ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
12463                     &e);
12464             if (e.count > 1) {
12465                 maxExpectedDuration = 1e9 / e.data.u8[0];
12466             }
12467             break;
12468     }
12469 
12470     return maxExpectedDuration;
12471 }
12472 
12473 /*===========================================================================
12474  * FUNCTION   : setFrameParameters
12475  *
12476  * DESCRIPTION: set parameters per frame as requested in the metadata from
12477  *              framework
12478  *
12479  * PARAMETERS :
12480  *   @request   : request that needs to be serviced
12481  *   @streamsArray : Stream ID of all the requested streams
12482  *   @blob_request: Whether this request is a blob request or not
12483  *
12484  * RETURN     : success: NO_ERROR
12485  *              failure:
12486  *==========================================================================*/
setFrameParameters(camera3_capture_request_t * request,cam_stream_ID_t streamsArray,int blob_request,uint32_t snapshotStreamId)12487 int QCamera3HardwareInterface::setFrameParameters(
12488                     camera3_capture_request_t *request,
12489                     cam_stream_ID_t streamsArray,
12490                     int blob_request,
12491                     uint32_t snapshotStreamId)
12492 {
12493     /*translate from camera_metadata_t type to parm_type_t*/
12494     int rc = 0;
12495     int32_t hal_version = CAM_HAL_V3;
12496 
12497     clear_metadata_buffer(mParameters);
12498     if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_HAL_VERSION, hal_version)) {
12499         LOGE("Failed to set hal version in the parameters");
12500         return BAD_VALUE;
12501     }
12502 
12503     /*we need to update the frame number in the parameters*/
12504     if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_FRAME_NUMBER,
12505             request->frame_number)) {
12506         LOGE("Failed to set the frame number in the parameters");
12507         return BAD_VALUE;
12508     }
12509 
12510     /* Update stream id of all the requested buffers */
12511     if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_ID, streamsArray)) {
12512         LOGE("Failed to set stream type mask in the parameters");
12513         return BAD_VALUE;
12514     }
12515 
12516     if (mUpdateDebugLevel) {
12517         uint32_t dummyDebugLevel = 0;
12518         /* The value of dummyDebugLevel is irrelavent. On
12519          * CAM_INTF_PARM_UPDATE_DEBUG_LEVEL, read debug property */
12520         if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_UPDATE_DEBUG_LEVEL,
12521                 dummyDebugLevel)) {
12522             LOGE("Failed to set UPDATE_DEBUG_LEVEL");
12523             return BAD_VALUE;
12524         }
12525         mUpdateDebugLevel = false;
12526     }
12527 
12528     if(request->settings != NULL){
12529         mExpectedFrameDuration = calculateMaxExpectedDuration(request->settings);
12530         rc = translateToHalMetadata(request, mParameters, snapshotStreamId);
12531         if (blob_request)
12532             memcpy(mPrevParameters, mParameters, sizeof(metadata_buffer_t));
12533     }
12534 
12535     return rc;
12536 }
12537 
12538 /*===========================================================================
12539  * FUNCTION   : setReprocParameters
12540  *
12541  * DESCRIPTION: Translate frameworks metadata to HAL metadata structure, and
12542  *              return it.
12543  *
12544  * PARAMETERS :
12545  *   @request   : request that needs to be serviced
12546  *
12547  * RETURN     : success: NO_ERROR
12548  *              failure:
12549  *==========================================================================*/
setReprocParameters(camera3_capture_request_t * request,metadata_buffer_t * reprocParam,uint32_t snapshotStreamId)12550 int32_t QCamera3HardwareInterface::setReprocParameters(
12551         camera3_capture_request_t *request, metadata_buffer_t *reprocParam,
12552         uint32_t snapshotStreamId)
12553 {
12554     /*translate from camera_metadata_t type to parm_type_t*/
12555     int rc = 0;
12556 
12557     if (NULL == request->settings){
12558         LOGE("Reprocess settings cannot be NULL");
12559         return BAD_VALUE;
12560     }
12561 
12562     if (NULL == reprocParam) {
12563         LOGE("Invalid reprocessing metadata buffer");
12564         return BAD_VALUE;
12565     }
12566     clear_metadata_buffer(reprocParam);
12567 
12568     /*we need to update the frame number in the parameters*/
12569     if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FRAME_NUMBER,
12570             request->frame_number)) {
12571         LOGE("Failed to set the frame number in the parameters");
12572         return BAD_VALUE;
12573     }
12574 
12575     rc = translateToHalMetadata(request, reprocParam, snapshotStreamId);
12576     if (rc < 0) {
12577         LOGE("Failed to translate reproc request");
12578         return rc;
12579     }
12580 
12581     CameraMetadata frame_settings;
12582     frame_settings = request->settings;
12583     if (frame_settings.exists(QCAMERA3_CROP_COUNT_REPROCESS) &&
12584             frame_settings.exists(QCAMERA3_CROP_REPROCESS)) {
12585         int32_t *crop_count =
12586                 frame_settings.find(QCAMERA3_CROP_COUNT_REPROCESS).data.i32;
12587         int32_t *crop_data =
12588                 frame_settings.find(QCAMERA3_CROP_REPROCESS).data.i32;
12589         int32_t *roi_map =
12590                 frame_settings.find(QCAMERA3_CROP_ROI_MAP_REPROCESS).data.i32;
12591         if ((0 < *crop_count) && (*crop_count < MAX_NUM_STREAMS)) {
12592             cam_crop_data_t crop_meta;
12593             memset(&crop_meta, 0, sizeof(cam_crop_data_t));
12594             crop_meta.num_of_streams = 1;
12595             crop_meta.crop_info[0].crop.left   = crop_data[0];
12596             crop_meta.crop_info[0].crop.top    = crop_data[1];
12597             crop_meta.crop_info[0].crop.width  = crop_data[2];
12598             crop_meta.crop_info[0].crop.height = crop_data[3];
12599 
12600             crop_meta.crop_info[0].roi_map.left =
12601                     roi_map[0];
12602             crop_meta.crop_info[0].roi_map.top =
12603                     roi_map[1];
12604             crop_meta.crop_info[0].roi_map.width =
12605                     roi_map[2];
12606             crop_meta.crop_info[0].roi_map.height =
12607                     roi_map[3];
12608 
12609             if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_CROP_DATA, crop_meta)) {
12610                 rc = BAD_VALUE;
12611             }
12612             LOGD("Found reprocess crop data for stream %p %dx%d, %dx%d",
12613                     request->input_buffer->stream,
12614                     crop_meta.crop_info[0].crop.left,
12615                     crop_meta.crop_info[0].crop.top,
12616                     crop_meta.crop_info[0].crop.width,
12617                     crop_meta.crop_info[0].crop.height);
12618             LOGD("Found reprocess roi map data for stream %p %dx%d, %dx%d",
12619                     request->input_buffer->stream,
12620                     crop_meta.crop_info[0].roi_map.left,
12621                     crop_meta.crop_info[0].roi_map.top,
12622                     crop_meta.crop_info[0].roi_map.width,
12623                     crop_meta.crop_info[0].roi_map.height);
12624             } else {
12625                 LOGE("Invalid reprocess crop count %d!", *crop_count);
12626             }
12627     } else {
12628         LOGE("No crop data from matching output stream");
12629     }
12630 
12631     /* These settings are not needed for regular requests so handle them specially for
12632        reprocess requests; information needed for EXIF tags */
12633     if (frame_settings.exists(ANDROID_FLASH_MODE)) {
12634         int val = lookupHalName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP),
12635                     (int)frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
12636         if (NAME_NOT_FOUND != val) {
12637             uint32_t flashMode = (uint32_t)val;
12638             if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FLASH_MODE, flashMode)) {
12639                 rc = BAD_VALUE;
12640             }
12641         } else {
12642             LOGE("Could not map fwk flash mode %d to correct hal flash mode",
12643                     frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
12644         }
12645     } else {
12646         LOGH("No flash mode in reprocess settings");
12647     }
12648 
12649     if (frame_settings.exists(ANDROID_FLASH_STATE)) {
12650         int32_t flashState = (int32_t)frame_settings.find(ANDROID_FLASH_STATE).data.u8[0];
12651         if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FLASH_STATE, flashState)) {
12652             rc = BAD_VALUE;
12653         }
12654     } else {
12655         LOGH("No flash state in reprocess settings");
12656     }
12657 
12658     if (frame_settings.exists(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS)) {
12659         uint8_t *reprocessFlags =
12660             frame_settings.find(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS).data.u8;
12661         if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_REPROCESS_FLAGS,
12662                 *reprocessFlags)) {
12663                 rc = BAD_VALUE;
12664         }
12665     }
12666 
12667     // Add exif debug data to internal metadata
12668     if (frame_settings.exists(QCAMERA3_HAL_PRIVATEDATA_EXIF_DEBUG_DATA_BLOB)) {
12669         mm_jpeg_debug_exif_params_t *debug_params =
12670                 (mm_jpeg_debug_exif_params_t *)frame_settings.find
12671                 (QCAMERA3_HAL_PRIVATEDATA_EXIF_DEBUG_DATA_BLOB).data.u8;
12672         // AE
12673         if (debug_params->ae_debug_params_valid == TRUE) {
12674             ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_AE,
12675                     debug_params->ae_debug_params);
12676         }
12677         // AWB
12678         if (debug_params->awb_debug_params_valid == TRUE) {
12679             ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_AWB,
12680                 debug_params->awb_debug_params);
12681         }
12682         // AF
12683        if (debug_params->af_debug_params_valid == TRUE) {
12684             ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_AF,
12685                    debug_params->af_debug_params);
12686         }
12687         // ASD
12688         if (debug_params->asd_debug_params_valid == TRUE) {
12689             ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_ASD,
12690                     debug_params->asd_debug_params);
12691         }
12692         // Stats
12693         if (debug_params->stats_debug_params_valid == TRUE) {
12694             ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_STATS,
12695                     debug_params->stats_debug_params);
12696        }
12697         // BE Stats
12698         if (debug_params->bestats_debug_params_valid == TRUE) {
12699             ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_BESTATS,
12700                     debug_params->bestats_debug_params);
12701         }
12702         // BHIST
12703         if (debug_params->bhist_debug_params_valid == TRUE) {
12704             ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_BHIST,
12705                     debug_params->bhist_debug_params);
12706        }
12707         // 3A Tuning
12708         if (debug_params->q3a_tuning_debug_params_valid == TRUE) {
12709             ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_3A_TUNING,
12710                     debug_params->q3a_tuning_debug_params);
12711         }
12712     }
12713 
12714     // Add metadata which reprocess needs
12715     if (frame_settings.exists(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB)) {
12716         cam_reprocess_info_t *repro_info =
12717                 (cam_reprocess_info_t *)frame_settings.find
12718                 (QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB).data.u8;
12719         ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_SENSOR,
12720                 repro_info->sensor_crop_info);
12721         ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_CAMIF,
12722                 repro_info->camif_crop_info);
12723         ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_ISP,
12724                 repro_info->isp_crop_info);
12725         ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_CPP,
12726                 repro_info->cpp_crop_info);
12727         ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_AF_FOCAL_LENGTH_RATIO,
12728                 repro_info->af_focal_length_ratio);
12729         ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_FLIP,
12730                 repro_info->pipeline_flip);
12731         ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_AF_ROI,
12732                 repro_info->af_roi);
12733         ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_IMG_DYN_FEAT,
12734                 repro_info->dyn_mask);
12735         /* If there is ANDROID_JPEG_ORIENTATION in frame setting,
12736            CAM_INTF_PARM_ROTATION metadata then has been added in
12737            translateToHalMetadata. HAL need to keep this new rotation
12738            metadata. Otherwise, the old rotation info saved in the vendor tag
12739            would be used */
12740         IF_META_AVAILABLE(cam_rotation_info_t, rotationInfo,
12741                 CAM_INTF_PARM_ROTATION, reprocParam) {
12742             LOGD("CAM_INTF_PARM_ROTATION metadata is added in translateToHalMetadata");
12743         } else {
12744             ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_ROTATION,
12745                     repro_info->rotation_info);
12746         }
12747     }
12748 
12749     /* Add additional JPEG cropping information. App add QCAMERA3_JPEG_ENCODE_CROP_RECT
12750        to ask for cropping and use ROI for downscale/upscale during HW JPEG encoding.
12751        roi.width and roi.height would be the final JPEG size.
12752        For now, HAL only checks this for reprocess request */
12753     if (frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_ENABLE) &&
12754             frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_RECT)) {
12755         uint8_t *enable =
12756             frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_ENABLE).data.u8;
12757         if (*enable == TRUE) {
12758             int32_t *crop_data =
12759                     frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_RECT).data.i32;
12760             cam_stream_crop_info_t crop_meta;
12761             memset(&crop_meta, 0, sizeof(cam_stream_crop_info_t));
12762             crop_meta.stream_id = 0;
12763             crop_meta.crop.left   = crop_data[0];
12764             crop_meta.crop.top    = crop_data[1];
12765             crop_meta.crop.width  = crop_data[2];
12766             crop_meta.crop.height = crop_data[3];
12767             // The JPEG crop roi should match cpp output size
12768             IF_META_AVAILABLE(cam_stream_crop_info_t, cpp_crop,
12769                     CAM_INTF_META_SNAP_CROP_INFO_CPP, reprocParam) {
12770                 crop_meta.roi_map.left = 0;
12771                 crop_meta.roi_map.top = 0;
12772                 crop_meta.roi_map.width = cpp_crop->crop.width;
12773                 crop_meta.roi_map.height = cpp_crop->crop.height;
12774             }
12775             ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_JPEG_ENCODE_CROP,
12776                     crop_meta);
12777             LOGH("Add JPEG encode crop left %d, top %d, width %d, height %d, mCameraId %d",
12778                     crop_meta.crop.left, crop_meta.crop.top,
12779                     crop_meta.crop.width, crop_meta.crop.height, mCameraId);
12780             LOGH("Add JPEG encode crop ROI left %d, top %d, width %d, height %d, mCameraId %d",
12781                     crop_meta.roi_map.left, crop_meta.roi_map.top,
12782                     crop_meta.roi_map.width, crop_meta.roi_map.height, mCameraId);
12783 
12784             // Add JPEG scale information
12785             cam_dimension_t scale_dim;
12786             memset(&scale_dim, 0, sizeof(cam_dimension_t));
12787             if (frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_ROI)) {
12788                 int32_t *roi =
12789                     frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_ROI).data.i32;
12790                 scale_dim.width = roi[2];
12791                 scale_dim.height = roi[3];
12792                 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_JPEG_SCALE_DIMENSION,
12793                     scale_dim);
12794                 LOGH("Add JPEG encode scale width %d, height %d, mCameraId %d",
12795                     scale_dim.width, scale_dim.height, mCameraId);
12796             }
12797         }
12798     }
12799 
12800     return rc;
12801 }
12802 
12803 /*===========================================================================
12804  * FUNCTION   : saveRequestSettings
12805  *
12806  * DESCRIPTION: Add any settings that might have changed to the request settings
12807  *              and save the settings to be applied on the frame
12808  *
12809  * PARAMETERS :
12810  *   @jpegMetadata : the extracted and/or modified jpeg metadata
12811  *   @request      : request with initial settings
12812  *
12813  * RETURN     :
12814  * camera_metadata_t* : pointer to the saved request settings
12815  *==========================================================================*/
saveRequestSettings(const CameraMetadata & jpegMetadata,camera3_capture_request_t * request)12816 camera_metadata_t* QCamera3HardwareInterface::saveRequestSettings(
12817         const CameraMetadata &jpegMetadata,
12818         camera3_capture_request_t *request)
12819 {
12820     camera_metadata_t *resultMetadata;
12821     CameraMetadata camMetadata;
12822     camMetadata = request->settings;
12823 
12824     if (jpegMetadata.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
12825         int32_t thumbnail_size[2];
12826         thumbnail_size[0] = jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
12827         thumbnail_size[1] = jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
12828         camMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, thumbnail_size,
12829                 jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).count);
12830     }
12831 
12832     if (request->input_buffer != NULL) {
12833         uint8_t reprocessFlags = 1;
12834         camMetadata.update(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS,
12835                 (uint8_t*)&reprocessFlags,
12836                 sizeof(reprocessFlags));
12837     }
12838 
12839     resultMetadata = camMetadata.release();
12840     return resultMetadata;
12841 }
12842 
12843 /*===========================================================================
12844  * FUNCTION   : setHalFpsRange
12845  *
12846  * DESCRIPTION: set FPS range parameter
12847  *
12848  *
12849  * PARAMETERS :
12850  *   @settings    : Metadata from framework
12851  *   @hal_metadata: Metadata buffer
12852  *
12853  *
12854  * RETURN     : success: NO_ERROR
12855  *              failure:
12856  *==========================================================================*/
setHalFpsRange(const CameraMetadata & settings,metadata_buffer_t * hal_metadata)12857 int32_t QCamera3HardwareInterface::setHalFpsRange(const CameraMetadata &settings,
12858         metadata_buffer_t *hal_metadata)
12859 {
12860     int32_t rc = NO_ERROR;
12861     cam_fps_range_t fps_range;
12862     fps_range.min_fps = (float)
12863             settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[0];
12864     fps_range.max_fps = (float)
12865             settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
12866     fps_range.video_min_fps = fps_range.min_fps;
12867     fps_range.video_max_fps = fps_range.max_fps;
12868 
12869     LOGD("aeTargetFpsRange fps: [%f %f]",
12870             fps_range.min_fps, fps_range.max_fps);
12871     /* In CONSTRAINED_HFR_MODE, sensor_fps is derived from aeTargetFpsRange as
12872      * follows:
12873      * ---------------------------------------------------------------|
12874      *      Video stream is absent in configure_streams               |
12875      *    (Camcorder preview before the first video record            |
12876      * ---------------------------------------------------------------|
12877      * vid_buf_requested | aeTgtFpsRng | snsrFpsMode | sensorFpsRange |
12878      *                   |             |             | vid_min/max_fps|
12879      * ---------------------------------------------------------------|
12880      *        NO         |  [ 30, 240] |     240     |  [240, 240]    |
12881      *                   |-------------|-------------|----------------|
12882      *                   |  [240, 240] |     240     |  [240, 240]    |
12883      * ---------------------------------------------------------------|
12884      *     Video stream is present in configure_streams               |
12885      * ---------------------------------------------------------------|
12886      * vid_buf_requested | aeTgtFpsRng | snsrFpsMode | sensorFpsRange |
12887      *                   |             |             | vid_min/max_fps|
12888      * ---------------------------------------------------------------|
12889      *        NO         |  [ 30, 240] |     240     |  [240, 240]    |
12890      * (camcorder prev   |-------------|-------------|----------------|
12891      *  after video rec  |  [240, 240] |     240     |  [240, 240]    |
12892      *  is stopped)      |             |             |                |
12893      * ---------------------------------------------------------------|
12894      *       YES         |  [ 30, 240] |     240     |  [240, 240]    |
12895      *                   |-------------|-------------|----------------|
12896      *                   |  [240, 240] |     240     |  [240, 240]    |
12897      * ---------------------------------------------------------------|
12898      * When Video stream is absent in configure_streams,
12899      * preview fps = sensor_fps / batchsize
12900      * Eg: for 240fps at batchSize 4, preview = 60fps
12901      *     for 120fps at batchSize 4, preview = 30fps
12902      *
12903      * When video stream is present in configure_streams, preview fps is as per
12904      * the ratio of preview buffers to video buffers requested in process
12905      * capture request
12906      */
12907     mBatchSize = 0;
12908     if (CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE == mOpMode) {
12909         fps_range.min_fps = fps_range.video_max_fps;
12910         fps_range.video_min_fps = fps_range.video_max_fps;
12911         int val = lookupHalName(HFR_MODE_MAP, METADATA_MAP_SIZE(HFR_MODE_MAP),
12912                 fps_range.max_fps);
12913         if (NAME_NOT_FOUND != val) {
12914             cam_hfr_mode_t hfrMode = (cam_hfr_mode_t)val;
12915             if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_HFR, hfrMode)) {
12916                 return BAD_VALUE;
12917             }
12918 
12919             if (fps_range.max_fps >= MIN_FPS_FOR_BATCH_MODE) {
12920                 /* If batchmode is currently in progress and the fps changes,
12921                  * set the flag to restart the sensor */
12922                 if((mHFRVideoFps >= MIN_FPS_FOR_BATCH_MODE) &&
12923                         (mHFRVideoFps != fps_range.max_fps)) {
12924                     mNeedSensorRestart = true;
12925                 }
12926                 mHFRVideoFps = fps_range.max_fps;
12927                 mBatchSize = mHFRVideoFps / PREVIEW_FPS_FOR_HFR;
12928                 if (mBatchSize > MAX_HFR_BATCH_SIZE) {
12929                     mBatchSize = MAX_HFR_BATCH_SIZE;
12930                 }
12931              }
12932             LOGD("hfrMode: %d batchSize: %d", hfrMode, mBatchSize);
12933 
12934          }
12935     } else {
12936         /* HFR mode is session param in backend/ISP. This should be reset when
12937          * in non-HFR mode  */
12938         cam_hfr_mode_t hfrMode = CAM_HFR_MODE_OFF;
12939         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_HFR, hfrMode)) {
12940             return BAD_VALUE;
12941         }
12942     }
12943     if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_FPS_RANGE, fps_range)) {
12944         return BAD_VALUE;
12945     }
12946     LOGD("fps: [%f %f] vid_fps: [%f %f]", fps_range.min_fps,
12947             fps_range.max_fps, fps_range.video_min_fps, fps_range.video_max_fps);
12948     return rc;
12949 }
12950 
12951 /*===========================================================================
12952  * FUNCTION   : translateToHalMetadata
12953  *
12954  * DESCRIPTION: read from the camera_metadata_t and change to parm_type_t
12955  *
12956  *
12957  * PARAMETERS :
12958  *   @request  : request sent from framework
12959  *   @hal_metadata: Hal specific metadata buffer
12960  *   @snapshotStreamId: Snapshot stream ID.
12961  *
12962  * RETURN     : success: NO_ERROR
12963  *              failure:
12964  *==========================================================================*/
translateToHalMetadata(const camera3_capture_request_t * request,metadata_buffer_t * hal_metadata,uint32_t snapshotStreamId)12965 int QCamera3HardwareInterface::translateToHalMetadata
12966                                   (const camera3_capture_request_t *request,
12967                                    metadata_buffer_t *hal_metadata,
12968                                    uint32_t snapshotStreamId) {
12969     if (request == nullptr || hal_metadata == nullptr) {
12970         return BAD_VALUE;
12971     }
12972 
12973     int64_t minFrameDuration = getMinFrameDuration(request);
12974 
12975     return translateFwkMetadataToHalMetadata(request->settings, hal_metadata, snapshotStreamId,
12976             minFrameDuration);
12977 }
12978 
translateFwkMetadataToHalMetadata(const camera_metadata_t * frameworkMetadata,metadata_buffer_t * hal_metadata,uint32_t snapshotStreamId,int64_t minFrameDuration)12979 int QCamera3HardwareInterface::translateFwkMetadataToHalMetadata(
12980         const camera_metadata_t *frameworkMetadata, metadata_buffer_t *hal_metadata,
12981         uint32_t snapshotStreamId, int64_t minFrameDuration) {
12982 
12983     int rc = 0;
12984     CameraMetadata frame_settings;
12985     frame_settings = frameworkMetadata;
12986 
12987     /* Do not change the order of the following list unless you know what you are
12988      * doing.
12989      * The order is laid out in such a way that parameters in the front of the table
12990      * may be used to override the parameters later in the table. Examples are:
12991      * 1. META_MODE should precede AEC/AWB/AF MODE
12992      * 2. AEC MODE should preced EXPOSURE_TIME/SENSITIVITY/FRAME_DURATION
12993      * 3. AWB_MODE should precede COLOR_CORRECTION_MODE
12994      * 4. Any mode should precede it's corresponding settings
12995      */
12996     if (frame_settings.exists(ANDROID_CONTROL_MODE)) {
12997         uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0];
12998         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_MODE, metaMode)) {
12999             rc = BAD_VALUE;
13000         }
13001         rc = extractSceneMode(frame_settings, metaMode, hal_metadata);
13002         if (rc != NO_ERROR) {
13003             LOGE("extractSceneMode failed");
13004         }
13005     }
13006 
13007     if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
13008         uint8_t fwk_aeMode =
13009             frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
13010         uint8_t aeMode;
13011         int32_t redeye;
13012 
13013         if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_OFF ) {
13014             aeMode = CAM_AE_MODE_OFF;
13015         } else if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_EXTERNAL_FLASH) {
13016             aeMode = CAM_AE_MODE_ON_EXTERNAL_FLASH;
13017         } else {
13018             aeMode = CAM_AE_MODE_ON;
13019         }
13020         if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE) {
13021             redeye = 1;
13022         } else {
13023             redeye = 0;
13024         }
13025 
13026         int val = lookupHalName(AE_FLASH_MODE_MAP, METADATA_MAP_SIZE(AE_FLASH_MODE_MAP),
13027                 fwk_aeMode);
13028         if (NAME_NOT_FOUND != val) {
13029             int32_t flashMode = (int32_t)val;
13030             ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_LED_MODE, flashMode);
13031         }
13032 
13033         ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_MODE, aeMode);
13034         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_REDEYE_REDUCTION, redeye)) {
13035             rc = BAD_VALUE;
13036         }
13037     }
13038 
13039     if (frame_settings.exists(ANDROID_CONTROL_AWB_MODE)) {
13040         uint8_t fwk_whiteLevel = frame_settings.find(ANDROID_CONTROL_AWB_MODE).data.u8[0];
13041         int val = lookupHalName(WHITE_BALANCE_MODES_MAP, METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
13042                 fwk_whiteLevel);
13043         if (NAME_NOT_FOUND != val) {
13044             uint8_t whiteLevel = (uint8_t)val;
13045             if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_WHITE_BALANCE, whiteLevel)) {
13046                 rc = BAD_VALUE;
13047             }
13048         }
13049     }
13050 
13051     if (frame_settings.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
13052         uint8_t fwk_cacMode =
13053                 frame_settings.find(
13054                         ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
13055         int val = lookupHalName(COLOR_ABERRATION_MAP, METADATA_MAP_SIZE(COLOR_ABERRATION_MAP),
13056                 fwk_cacMode);
13057         if (NAME_NOT_FOUND != val) {
13058             cam_aberration_mode_t cacMode = (cam_aberration_mode_t) val;
13059             bool entryAvailable = FALSE;
13060             // Check whether Frameworks set CAC mode is supported in device or not
13061             for (size_t i = 0; i < gCamCapability[mCameraId]->aberration_modes_count; i++) {
13062                 if (gCamCapability[mCameraId]->aberration_modes[i] == cacMode) {
13063                     entryAvailable = TRUE;
13064                     break;
13065                 }
13066             }
13067             LOGD("FrameworksCacMode=%d entryAvailable=%d", cacMode, entryAvailable);
13068             // If entry not found then set the device supported mode instead of frameworks mode i.e,
13069             // Only HW ISP CAC + NO SW CAC : Advertise all 3 with High doing same as fast by ISP
13070             // NO HW ISP CAC + Only SW CAC : Advertise all 3 with Fast doing the same as OFF
13071             if (entryAvailable == FALSE) {
13072                 if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
13073                     cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
13074                 } else {
13075                     if (cacMode == CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY) {
13076                         // High is not supported and so set the FAST as spec say's underlying
13077                         // device implementation can be the same for both modes.
13078                         cacMode = CAM_COLOR_CORRECTION_ABERRATION_FAST;
13079                     } else if (cacMode == CAM_COLOR_CORRECTION_ABERRATION_FAST) {
13080                         // Fast is not supported and so we cannot set HIGH or FAST but choose OFF
13081                         // in order to avoid the fps drop due to high quality
13082                         cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
13083                     } else {
13084                         cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
13085                     }
13086                 }
13087             }
13088             LOGD("Final cacMode is %d", cacMode);
13089             if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_CAC, cacMode)) {
13090                 rc = BAD_VALUE;
13091             }
13092         } else {
13093             LOGE("Invalid framework CAC mode: %d", fwk_cacMode);
13094         }
13095     }
13096 
13097     uint8_t fwk_focusMode = 0;
13098     if (m_bForceInfinityAf == 0) {
13099         if (frame_settings.exists(ANDROID_CONTROL_AF_MODE)) {
13100             fwk_focusMode = frame_settings.find(ANDROID_CONTROL_AF_MODE).data.u8[0];
13101             int val = lookupHalName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
13102                     fwk_focusMode);
13103             if (NAME_NOT_FOUND != val) {
13104                 uint8_t focusMode = (uint8_t)val;
13105                 LOGD("set focus mode %d", focusMode);
13106                 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13107                          CAM_INTF_PARM_FOCUS_MODE, focusMode)) {
13108                     rc = BAD_VALUE;
13109                 }
13110             }
13111         } else {
13112             LOGE("Fatal: Missing ANDROID_CONTROL_AF_MODE");
13113         }
13114     } else {
13115         uint8_t focusMode = (uint8_t)CAM_FOCUS_MODE_INFINITY;
13116         LOGE("Focus forced to infinity %d", focusMode);
13117         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_FOCUS_MODE, focusMode)) {
13118             rc = BAD_VALUE;
13119         }
13120     }
13121 
13122     if (frame_settings.exists(ANDROID_LENS_FOCUS_DISTANCE) &&
13123             fwk_focusMode == ANDROID_CONTROL_AF_MODE_OFF) {
13124         float focalDistance = frame_settings.find(ANDROID_LENS_FOCUS_DISTANCE).data.f[0];
13125         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FOCUS_DISTANCE,
13126                 focalDistance)) {
13127             rc = BAD_VALUE;
13128         }
13129     }
13130 
13131     if (frame_settings.exists(ANDROID_CONTROL_AE_ANTIBANDING_MODE)) {
13132         uint8_t fwk_antibandingMode =
13133                 frame_settings.find(ANDROID_CONTROL_AE_ANTIBANDING_MODE).data.u8[0];
13134         int val = lookupHalName(ANTIBANDING_MODES_MAP,
13135                 METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP), fwk_antibandingMode);
13136         if (NAME_NOT_FOUND != val) {
13137             uint32_t hal_antibandingMode = (uint32_t)val;
13138             if (hal_antibandingMode == CAM_ANTIBANDING_MODE_AUTO) {
13139                 if (m60HzZone) {
13140                     hal_antibandingMode = CAM_ANTIBANDING_MODE_AUTO_60HZ;
13141                 } else {
13142                     hal_antibandingMode = CAM_ANTIBANDING_MODE_AUTO_50HZ;
13143                 }
13144             }
13145             if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ANTIBANDING,
13146                     hal_antibandingMode)) {
13147                 rc = BAD_VALUE;
13148             }
13149         }
13150     }
13151 
13152     if (frame_settings.exists(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION)) {
13153         int32_t expCompensation = frame_settings.find(
13154                 ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION).data.i32[0];
13155         if (expCompensation < gCamCapability[mCameraId]->exposure_compensation_min)
13156             expCompensation = gCamCapability[mCameraId]->exposure_compensation_min;
13157         if (expCompensation > gCamCapability[mCameraId]->exposure_compensation_max)
13158             expCompensation = gCamCapability[mCameraId]->exposure_compensation_max;
13159         LOGD("Setting compensation:%d", expCompensation);
13160         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EXPOSURE_COMPENSATION,
13161                 expCompensation)) {
13162             rc = BAD_VALUE;
13163         }
13164     }
13165 
13166     if (frame_settings.exists(ANDROID_CONTROL_AE_LOCK)) {
13167         uint8_t aeLock = frame_settings.find(ANDROID_CONTROL_AE_LOCK).data.u8[0];
13168         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AEC_LOCK, aeLock)) {
13169             rc = BAD_VALUE;
13170         }
13171     }
13172     if (frame_settings.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
13173         rc = setHalFpsRange(frame_settings, hal_metadata);
13174         if (rc != NO_ERROR) {
13175             LOGE("setHalFpsRange failed");
13176         }
13177     }
13178 
13179     if (frame_settings.exists(ANDROID_CONTROL_AWB_LOCK)) {
13180         uint8_t awbLock = frame_settings.find(ANDROID_CONTROL_AWB_LOCK).data.u8[0];
13181         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AWB_LOCK, awbLock)) {
13182             rc = BAD_VALUE;
13183         }
13184     }
13185 
13186     if (frame_settings.exists(ANDROID_CONTROL_EFFECT_MODE)) {
13187         uint8_t fwk_effectMode = frame_settings.find(ANDROID_CONTROL_EFFECT_MODE).data.u8[0];
13188         int val = lookupHalName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
13189                 fwk_effectMode);
13190         if (NAME_NOT_FOUND != val) {
13191             uint8_t effectMode = (uint8_t)val;
13192             if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EFFECT, effectMode)) {
13193                 rc = BAD_VALUE;
13194             }
13195         }
13196     }
13197 
13198     if (frame_settings.exists(ANDROID_COLOR_CORRECTION_MODE)) {
13199         uint8_t colorCorrectMode = frame_settings.find(ANDROID_COLOR_CORRECTION_MODE).data.u8[0];
13200         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_MODE,
13201                 colorCorrectMode)) {
13202             rc = BAD_VALUE;
13203         }
13204     }
13205 
13206     if (frame_settings.exists(ANDROID_COLOR_CORRECTION_GAINS)) {
13207         cam_color_correct_gains_t colorCorrectGains;
13208         for (size_t i = 0; i < CC_GAIN_MAX; i++) {
13209             colorCorrectGains.gains[i] =
13210                     frame_settings.find(ANDROID_COLOR_CORRECTION_GAINS).data.f[i];
13211         }
13212         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_GAINS,
13213                 colorCorrectGains)) {
13214             rc = BAD_VALUE;
13215         }
13216     }
13217 
13218     if (frame_settings.exists(ANDROID_COLOR_CORRECTION_TRANSFORM)) {
13219         cam_color_correct_matrix_t colorCorrectTransform;
13220         cam_rational_type_t transform_elem;
13221         size_t num = 0;
13222         for (size_t i = 0; i < CC_MATRIX_ROWS; i++) {
13223            for (size_t j = 0; j < CC_MATRIX_COLS; j++) {
13224               transform_elem.numerator =
13225                  frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].numerator;
13226               transform_elem.denominator =
13227                  frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].denominator;
13228               colorCorrectTransform.transform_matrix[i][j] = transform_elem;
13229               num++;
13230            }
13231         }
13232         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_TRANSFORM,
13233                 colorCorrectTransform)) {
13234             rc = BAD_VALUE;
13235         }
13236     }
13237 
13238     cam_trigger_t aecTrigger;
13239     aecTrigger.trigger = CAM_AEC_TRIGGER_IDLE;
13240     aecTrigger.trigger_id = -1;
13241     if (frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER)&&
13242         frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_ID)) {
13243         aecTrigger.trigger =
13244             frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER).data.u8[0];
13245         aecTrigger.trigger_id =
13246             frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_ID).data.i32[0];
13247         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER,
13248                 aecTrigger)) {
13249             rc = BAD_VALUE;
13250         }
13251         LOGD("precaptureTrigger: %d precaptureTriggerID: %d",
13252                 aecTrigger.trigger, aecTrigger.trigger_id);
13253     }
13254 
13255     /*af_trigger must come with a trigger id*/
13256     if (frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER) &&
13257         frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER_ID)) {
13258         cam_trigger_t af_trigger;
13259         af_trigger.trigger =
13260             frame_settings.find(ANDROID_CONTROL_AF_TRIGGER).data.u8[0];
13261         af_trigger.trigger_id =
13262             frame_settings.find(ANDROID_CONTROL_AF_TRIGGER_ID).data.i32[0];
13263         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AF_TRIGGER, af_trigger)) {
13264             rc = BAD_VALUE;
13265         }
13266         LOGD("AfTrigger: %d AfTriggerID: %d",
13267                 af_trigger.trigger, af_trigger.trigger_id);
13268     }
13269 
13270     if (frame_settings.exists(ANDROID_DEMOSAIC_MODE)) {
13271         int32_t demosaic = frame_settings.find(ANDROID_DEMOSAIC_MODE).data.u8[0];
13272         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_DEMOSAIC, demosaic)) {
13273             rc = BAD_VALUE;
13274         }
13275     }
13276     if (frame_settings.exists(ANDROID_EDGE_MODE)) {
13277         cam_edge_application_t edge_application;
13278         edge_application.edge_mode = frame_settings.find(ANDROID_EDGE_MODE).data.u8[0];
13279 
13280         if (edge_application.edge_mode == CAM_EDGE_MODE_OFF) {
13281             edge_application.sharpness = 0;
13282         } else {
13283             edge_application.sharpness =
13284                     gCamCapability[mCameraId]->sharpness_ctrl.def_value; //default
13285             if (frame_settings.exists(QCAMERA3_SHARPNESS_STRENGTH)) {
13286                 int32_t sharpness =
13287                         frame_settings.find(QCAMERA3_SHARPNESS_STRENGTH).data.i32[0];
13288                 if (sharpness >= gCamCapability[mCameraId]->sharpness_ctrl.min_value &&
13289                     sharpness <= gCamCapability[mCameraId]->sharpness_ctrl.max_value) {
13290                     LOGD("Setting edge mode sharpness %d", sharpness);
13291                     edge_application.sharpness = sharpness;
13292                 }
13293             }
13294         }
13295         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_EDGE_MODE, edge_application)) {
13296             rc = BAD_VALUE;
13297         }
13298     }
13299 
13300     if (frame_settings.exists(ANDROID_FLASH_MODE)) {
13301         uint32_t flashMode = (uint32_t)frame_settings.find(ANDROID_FLASH_MODE).data.u8[0];
13302         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_FLASH_MODE, flashMode)) {
13303             rc = BAD_VALUE;
13304         }
13305 
13306         int32_t respectFlashMode = 1;
13307         if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
13308             uint8_t fwk_aeMode =
13309                 frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
13310             if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH ||
13311                     fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH ||
13312                     fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE) {
13313                 respectFlashMode = 0;
13314                 LOGH("AE Mode controls flash, ignore android.flash.mode");
13315             }
13316         }
13317         if (respectFlashMode) {
13318             int val = lookupHalName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP),
13319                     (int)frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
13320             LOGH("flash mode after mapping %d", val);
13321             // To check: CAM_INTF_META_FLASH_MODE usage
13322             if (NAME_NOT_FOUND != val) {
13323                 uint8_t ledMode = (uint8_t)val;
13324                 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_LED_MODE, ledMode)) {
13325                     rc = BAD_VALUE;
13326                 }
13327             }
13328         }
13329     }
13330 
13331     if (frame_settings.exists(ANDROID_FLASH_STATE)) {
13332         int32_t flashState = (int32_t)frame_settings.find(ANDROID_FLASH_STATE).data.i32[0];
13333         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_FLASH_STATE, flashState)) {
13334             rc = BAD_VALUE;
13335         }
13336     }
13337 
13338     if (frame_settings.exists(ANDROID_FLASH_FIRING_POWER)) {
13339         uint8_t flashPower = frame_settings.find(ANDROID_FLASH_FIRING_POWER).data.u8[0];
13340         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_FLASH_POWER, flashPower)) {
13341             rc = BAD_VALUE;
13342         }
13343     }
13344 
13345     if (frame_settings.exists(ANDROID_FLASH_FIRING_TIME)) {
13346         int64_t flashFiringTime = frame_settings.find(ANDROID_FLASH_FIRING_TIME).data.i64[0];
13347         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_FLASH_FIRING_TIME,
13348                 flashFiringTime)) {
13349             rc = BAD_VALUE;
13350         }
13351     }
13352 
13353     if (frame_settings.exists(ANDROID_HOT_PIXEL_MODE)) {
13354         uint8_t hotPixelMode = frame_settings.find(ANDROID_HOT_PIXEL_MODE).data.u8[0];
13355         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_HOTPIXEL_MODE,
13356                 hotPixelMode)) {
13357             rc = BAD_VALUE;
13358         }
13359     }
13360 
13361     if (frame_settings.exists(ANDROID_LENS_APERTURE)) {
13362         float lensAperture = frame_settings.find( ANDROID_LENS_APERTURE).data.f[0];
13363         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_APERTURE,
13364                 lensAperture)) {
13365             rc = BAD_VALUE;
13366         }
13367     }
13368 
13369     if (frame_settings.exists(ANDROID_LENS_FILTER_DENSITY)) {
13370         float filterDensity = frame_settings.find(ANDROID_LENS_FILTER_DENSITY).data.f[0];
13371         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FILTERDENSITY,
13372                 filterDensity)) {
13373             rc = BAD_VALUE;
13374         }
13375     }
13376 
13377     if (frame_settings.exists(ANDROID_LENS_FOCAL_LENGTH)) {
13378         float focalLength = frame_settings.find(ANDROID_LENS_FOCAL_LENGTH).data.f[0];
13379         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FOCAL_LENGTH,
13380                 focalLength)) {
13381             rc = BAD_VALUE;
13382         }
13383     }
13384 
13385     if (frame_settings.exists(ANDROID_LENS_OPTICAL_STABILIZATION_MODE)) {
13386         uint8_t optStabMode =
13387                 frame_settings.find(ANDROID_LENS_OPTICAL_STABILIZATION_MODE).data.u8[0];
13388         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_OPT_STAB_MODE,
13389                 optStabMode)) {
13390             rc = BAD_VALUE;
13391         }
13392     }
13393 
13394     if (frame_settings.exists(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE)) {
13395         uint8_t videoStabMode =
13396                 frame_settings.find(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE).data.u8[0];
13397         LOGD("videoStabMode from APP = %d", videoStabMode);
13398         if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_VIDEO_STAB_MODE,
13399                 videoStabMode)) {
13400             rc = BAD_VALUE;
13401         }
13402     }
13403 
13404 
13405     if (frame_settings.exists(ANDROID_NOISE_REDUCTION_MODE)) {
13406         uint8_t noiseRedMode = frame_settings.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0];
13407         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_NOISE_REDUCTION_MODE,
13408                 noiseRedMode)) {
13409             rc = BAD_VALUE;
13410         }
13411     }
13412 
13413     if (frame_settings.exists(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR)) {
13414         float reprocessEffectiveExposureFactor =
13415             frame_settings.find(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR).data.f[0];
13416         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_EFFECTIVE_EXPOSURE_FACTOR,
13417                 reprocessEffectiveExposureFactor)) {
13418             rc = BAD_VALUE;
13419         }
13420     }
13421 
13422     cam_crop_region_t scalerCropRegion;
13423     bool scalerCropSet = false;
13424     if (frame_settings.exists(ANDROID_SCALER_CROP_REGION)) {
13425         scalerCropRegion.left = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[0];
13426         scalerCropRegion.top = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[1];
13427         scalerCropRegion.width = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[2];
13428         scalerCropRegion.height = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[3];
13429 
13430         // Map coordinate system from active array to sensor output.
13431         mCropRegionMapper.toSensor(scalerCropRegion.left, scalerCropRegion.top,
13432                 scalerCropRegion.width, scalerCropRegion.height);
13433 
13434         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SCALER_CROP_REGION,
13435                 scalerCropRegion)) {
13436             rc = BAD_VALUE;
13437         }
13438         scalerCropSet = true;
13439     }
13440 
13441     if (frame_settings.exists(ANDROID_SENSOR_EXPOSURE_TIME)) {
13442         int64_t sensorExpTime =
13443                 frame_settings.find(ANDROID_SENSOR_EXPOSURE_TIME).data.i64[0];
13444         LOGD("setting sensorExpTime %lld", sensorExpTime);
13445         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_EXPOSURE_TIME,
13446                 sensorExpTime)) {
13447             rc = BAD_VALUE;
13448         }
13449     }
13450 
13451     if (frame_settings.exists(ANDROID_SENSOR_FRAME_DURATION)) {
13452         int64_t sensorFrameDuration =
13453                 frame_settings.find(ANDROID_SENSOR_FRAME_DURATION).data.i64[0];
13454         sensorFrameDuration = MAX(sensorFrameDuration, minFrameDuration);
13455         if (sensorFrameDuration > gCamCapability[mCameraId]->max_frame_duration)
13456             sensorFrameDuration = gCamCapability[mCameraId]->max_frame_duration;
13457         LOGD("clamp sensorFrameDuration to %lld", sensorFrameDuration);
13458         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_FRAME_DURATION,
13459                 sensorFrameDuration)) {
13460             rc = BAD_VALUE;
13461         }
13462     }
13463 
13464     if (frame_settings.exists(ANDROID_SENSOR_SENSITIVITY)) {
13465         int32_t sensorSensitivity = frame_settings.find(ANDROID_SENSOR_SENSITIVITY).data.i32[0];
13466         if (sensorSensitivity < gCamCapability[mCameraId]->sensitivity_range.min_sensitivity)
13467                 sensorSensitivity = gCamCapability[mCameraId]->sensitivity_range.min_sensitivity;
13468         if (sensorSensitivity > gCamCapability[mCameraId]->sensitivity_range.max_sensitivity)
13469                 sensorSensitivity = gCamCapability[mCameraId]->sensitivity_range.max_sensitivity;
13470         LOGD("clamp sensorSensitivity to %d", sensorSensitivity);
13471         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_SENSITIVITY,
13472                 sensorSensitivity)) {
13473             rc = BAD_VALUE;
13474         }
13475     }
13476 
13477 #ifndef USE_HAL_3_3
13478     if (frame_settings.exists(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST)) {
13479         int32_t ispSensitivity =
13480             frame_settings.find(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST).data.i32[0];
13481         if (ispSensitivity <
13482             gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity) {
13483                 ispSensitivity =
13484                     gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity;
13485                 LOGD("clamp ispSensitivity to %d", ispSensitivity);
13486         }
13487         if (ispSensitivity >
13488             gCamCapability[mCameraId]->isp_sensitivity_range.max_sensitivity) {
13489                 ispSensitivity =
13490                     gCamCapability[mCameraId]->isp_sensitivity_range.max_sensitivity;
13491                 LOGD("clamp ispSensitivity to %d", ispSensitivity);
13492         }
13493         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_ISP_SENSITIVITY,
13494                 ispSensitivity)) {
13495             rc = BAD_VALUE;
13496         }
13497     }
13498 #endif
13499 
13500     if (frame_settings.exists(ANDROID_SHADING_MODE)) {
13501         uint8_t shadingMode = frame_settings.find(ANDROID_SHADING_MODE).data.u8[0];
13502         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SHADING_MODE, shadingMode)) {
13503             rc = BAD_VALUE;
13504         }
13505     }
13506 
13507     if (frame_settings.exists(ANDROID_STATISTICS_FACE_DETECT_MODE)) {
13508         uint8_t fwk_facedetectMode =
13509                 frame_settings.find(ANDROID_STATISTICS_FACE_DETECT_MODE).data.u8[0];
13510 
13511         int val = lookupHalName(FACEDETECT_MODES_MAP, METADATA_MAP_SIZE(FACEDETECT_MODES_MAP),
13512                 fwk_facedetectMode);
13513 
13514         if (NAME_NOT_FOUND != val) {
13515             uint8_t facedetectMode = (uint8_t)val;
13516             if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_FACEDETECT_MODE,
13517                     facedetectMode)) {
13518                 rc = BAD_VALUE;
13519             }
13520         }
13521     }
13522 
13523     if (frame_settings.exists(QCAMERA3_HISTOGRAM_MODE)) {
13524         uint8_t histogramMode =
13525                 frame_settings.find(QCAMERA3_HISTOGRAM_MODE).data.u8[0];
13526         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_HISTOGRAM_MODE,
13527                 histogramMode)) {
13528             rc = BAD_VALUE;
13529         }
13530     }
13531 
13532     if (frame_settings.exists(ANDROID_STATISTICS_SHARPNESS_MAP_MODE)) {
13533         uint8_t sharpnessMapMode =
13534                 frame_settings.find(ANDROID_STATISTICS_SHARPNESS_MAP_MODE).data.u8[0];
13535         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_SHARPNESS_MAP_MODE,
13536                 sharpnessMapMode)) {
13537             rc = BAD_VALUE;
13538         }
13539     }
13540 
13541     if (frame_settings.exists(ANDROID_TONEMAP_MODE)) {
13542         uint8_t tonemapMode =
13543                 frame_settings.find(ANDROID_TONEMAP_MODE).data.u8[0];
13544         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TONEMAP_MODE, tonemapMode)) {
13545             rc = BAD_VALUE;
13546         }
13547     }
13548     /* Tonemap curve channels ch0 = G, ch 1 = B, ch 2 = R */
13549     /*All tonemap channels will have the same number of points*/
13550     if (frame_settings.exists(ANDROID_TONEMAP_CURVE_GREEN) &&
13551         frame_settings.exists(ANDROID_TONEMAP_CURVE_BLUE) &&
13552         frame_settings.exists(ANDROID_TONEMAP_CURVE_RED)) {
13553         cam_rgb_tonemap_curves tonemapCurves;
13554         tonemapCurves.tonemap_points_cnt = frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).count/2;
13555         if (tonemapCurves.tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
13556             LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
13557                      tonemapCurves.tonemap_points_cnt,
13558                     CAM_MAX_TONEMAP_CURVE_SIZE);
13559             tonemapCurves.tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
13560         }
13561 
13562         /* ch0 = G*/
13563         size_t point = 0;
13564         cam_tonemap_curve_t tonemapCurveGreen;
13565         for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
13566             for (size_t j = 0; j < 2; j++) {
13567                tonemapCurveGreen.tonemap_points[i][j] =
13568                   frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).data.f[point];
13569                point++;
13570             }
13571         }
13572         tonemapCurves.curves[0] = tonemapCurveGreen;
13573 
13574         /* ch 1 = B */
13575         point = 0;
13576         cam_tonemap_curve_t tonemapCurveBlue;
13577         for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
13578             for (size_t j = 0; j < 2; j++) {
13579                tonemapCurveBlue.tonemap_points[i][j] =
13580                   frame_settings.find(ANDROID_TONEMAP_CURVE_BLUE).data.f[point];
13581                point++;
13582             }
13583         }
13584         tonemapCurves.curves[1] = tonemapCurveBlue;
13585 
13586         /* ch 2 = R */
13587         point = 0;
13588         cam_tonemap_curve_t tonemapCurveRed;
13589         for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
13590             for (size_t j = 0; j < 2; j++) {
13591                tonemapCurveRed.tonemap_points[i][j] =
13592                   frame_settings.find(ANDROID_TONEMAP_CURVE_RED).data.f[point];
13593                point++;
13594             }
13595         }
13596         tonemapCurves.curves[2] = tonemapCurveRed;
13597 
13598         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TONEMAP_CURVES,
13599                 tonemapCurves)) {
13600             rc = BAD_VALUE;
13601         }
13602     }
13603 
13604     if (frame_settings.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
13605         uint8_t captureIntent = frame_settings.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
13606         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_CAPTURE_INTENT,
13607                 captureIntent)) {
13608             rc = BAD_VALUE;
13609         }
13610     }
13611 
13612     if (frame_settings.exists(ANDROID_BLACK_LEVEL_LOCK)) {
13613         uint8_t blackLevelLock = frame_settings.find(ANDROID_BLACK_LEVEL_LOCK).data.u8[0];
13614         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_BLACK_LEVEL_LOCK,
13615                 blackLevelLock)) {
13616             rc = BAD_VALUE;
13617         }
13618     }
13619 
13620     if (frame_settings.exists(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE)) {
13621         uint8_t lensShadingMapMode =
13622                 frame_settings.find(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE).data.u8[0];
13623         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_SHADING_MAP_MODE,
13624                 lensShadingMapMode)) {
13625             rc = BAD_VALUE;
13626         }
13627     }
13628 
13629     if (frame_settings.exists(ANDROID_CONTROL_AE_REGIONS)) {
13630         cam_area_t roi;
13631         bool reset = true;
13632         convertFromRegions(roi, frame_settings, ANDROID_CONTROL_AE_REGIONS);
13633 
13634         // Map coordinate system from active array to sensor output.
13635         mCropRegionMapper.toSensor(roi.rect.left, roi.rect.top, roi.rect.width,
13636                 roi.rect.height);
13637 
13638         if (scalerCropSet) {
13639             reset = resetIfNeededROI(&roi, &scalerCropRegion);
13640         }
13641         if (reset && ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_ROI, roi)) {
13642             rc = BAD_VALUE;
13643         }
13644     }
13645 
13646     if (frame_settings.exists(ANDROID_CONTROL_AF_REGIONS)) {
13647         cam_area_t roi;
13648         bool reset = true;
13649         convertFromRegions(roi, frame_settings, ANDROID_CONTROL_AF_REGIONS);
13650 
13651         // Map coordinate system from active array to sensor output.
13652         mCropRegionMapper.toSensor(roi.rect.left, roi.rect.top, roi.rect.width,
13653                 roi.rect.height);
13654 
13655         if (scalerCropSet) {
13656             reset = resetIfNeededROI(&roi, &scalerCropRegion);
13657         }
13658         if (reset && ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AF_ROI, roi)) {
13659             rc = BAD_VALUE;
13660         }
13661     }
13662 
13663     // CDS for non-HFR non-video mode
13664     if ((mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
13665             !(m_bIsVideo) && frame_settings.exists(QCAMERA3_CDS_MODE)) {
13666         int32_t *fwk_cds = frame_settings.find(QCAMERA3_CDS_MODE).data.i32;
13667         if ((CAM_CDS_MODE_MAX <= *fwk_cds) || (0 > *fwk_cds)) {
13668             LOGE("Invalid CDS mode %d!", *fwk_cds);
13669         } else {
13670             if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13671                     CAM_INTF_PARM_CDS_MODE, *fwk_cds)) {
13672                 rc = BAD_VALUE;
13673             }
13674         }
13675     }
13676 
13677     // Video HDR
13678     cam_video_hdr_mode_t vhdr = CAM_VIDEO_HDR_MODE_OFF;
13679     if (frame_settings.exists(QCAMERA3_VIDEO_HDR_MODE)) {
13680         vhdr = (cam_video_hdr_mode_t) frame_settings.find(QCAMERA3_VIDEO_HDR_MODE).data.i32[0];
13681     }
13682     if (m_bVideoHdrEnabled)
13683         vhdr = CAM_VIDEO_HDR_MODE_ON;
13684 
13685     int8_t curr_hdr_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) != 0);
13686 
13687     if(vhdr != curr_hdr_state)
13688         LOGH("PROFILE_SET_HDR_MODE %d" ,vhdr);
13689 
13690     rc = setVideoHdrMode(mParameters, vhdr);
13691     if (rc != NO_ERROR) {
13692         LOGE("setVideoHDR is failed");
13693     }
13694 
13695     //IR
13696     if(frame_settings.exists(QCAMERA3_IR_MODE)) {
13697         cam_ir_mode_type_t fwk_ir = (cam_ir_mode_type_t)
13698                 frame_settings.find(QCAMERA3_IR_MODE).data.i32[0];
13699         uint8_t curr_ir_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_IR) != 0);
13700         uint8_t isIRon = 0;
13701 
13702         (fwk_ir >0) ? (isIRon = 1) : (isIRon = 0) ;
13703         if ((CAM_IR_MODE_MAX <= fwk_ir) || (0 > fwk_ir)) {
13704             LOGE("Invalid IR mode %d!", fwk_ir);
13705         } else {
13706             if(isIRon != curr_ir_state )
13707                LOGH("PROFILE_SET_IR_MODE %d" ,isIRon);
13708 
13709             if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13710                     CAM_INTF_META_IR_MODE, fwk_ir)) {
13711                 rc = BAD_VALUE;
13712             }
13713         }
13714     }
13715 
13716     //Binning Correction Mode
13717     if(frame_settings.exists(QCAMERA3_BINNING_CORRECTION_MODE)) {
13718         cam_binning_correction_mode_t fwk_binning_correction = (cam_binning_correction_mode_t)
13719                 frame_settings.find(QCAMERA3_BINNING_CORRECTION_MODE).data.i32[0];
13720         if ((CAM_BINNING_CORRECTION_MODE_MAX <= fwk_binning_correction)
13721                 || (0 > fwk_binning_correction)) {
13722             LOGE("Invalid binning correction mode %d!", fwk_binning_correction);
13723         } else {
13724             if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13725                     CAM_INTF_META_BINNING_CORRECTION_MODE, fwk_binning_correction)) {
13726                 rc = BAD_VALUE;
13727             }
13728         }
13729     }
13730 
13731     if (frame_settings.exists(QCAMERA3_AEC_CONVERGENCE_SPEED)) {
13732         float aec_speed;
13733         aec_speed = frame_settings.find(QCAMERA3_AEC_CONVERGENCE_SPEED).data.f[0];
13734         LOGD("AEC Speed :%f", aec_speed);
13735         if ( aec_speed < 0 ) {
13736             LOGE("Invalid AEC mode %f!", aec_speed);
13737         } else {
13738             if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_CONVERGENCE_SPEED,
13739                     aec_speed)) {
13740                 rc = BAD_VALUE;
13741             }
13742         }
13743     }
13744 
13745     if (frame_settings.exists(QCAMERA3_AWB_CONVERGENCE_SPEED)) {
13746         float awb_speed;
13747         awb_speed = frame_settings.find(QCAMERA3_AWB_CONVERGENCE_SPEED).data.f[0];
13748         LOGD("AWB Speed :%f", awb_speed);
13749         if ( awb_speed < 0 ) {
13750             LOGE("Invalid AWB mode %f!", awb_speed);
13751         } else {
13752             if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AWB_CONVERGENCE_SPEED,
13753                     awb_speed)) {
13754                 rc = BAD_VALUE;
13755             }
13756         }
13757     }
13758 
13759     // TNR
13760     if (frame_settings.exists(QCAMERA3_TEMPORAL_DENOISE_ENABLE) &&
13761         frame_settings.exists(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE)) {
13762         uint8_t b_TnrRequested = 0;
13763         uint8_t curr_tnr_state = ((mCurrFeatureState & CAM_QTI_FEATURE_SW_TNR) != 0);
13764         cam_denoise_param_t tnr;
13765         tnr.denoise_enable = frame_settings.find(QCAMERA3_TEMPORAL_DENOISE_ENABLE).data.u8[0];
13766         tnr.process_plates =
13767             (cam_denoise_process_type_t)frame_settings.find(
13768             QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE).data.i32[0];
13769         b_TnrRequested = tnr.denoise_enable;
13770 
13771         if(b_TnrRequested != curr_tnr_state)
13772            LOGH("PROFILE_SET_TNR_MODE %d" ,b_TnrRequested);
13773 
13774         if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_TEMPORAL_DENOISE, tnr)) {
13775             rc = BAD_VALUE;
13776         }
13777     }
13778 
13779     if (frame_settings.exists(QCAMERA3_EXPOSURE_METER)) {
13780         int32_t* exposure_metering_mode =
13781                 frame_settings.find(QCAMERA3_EXPOSURE_METER).data.i32;
13782         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AEC_ALGO_TYPE,
13783                 *exposure_metering_mode)) {
13784             rc = BAD_VALUE;
13785         }
13786     }
13787 
13788     if (frame_settings.exists(ANDROID_SENSOR_TEST_PATTERN_MODE)) {
13789         int32_t fwk_testPatternMode =
13790                 frame_settings.find(ANDROID_SENSOR_TEST_PATTERN_MODE).data.i32[0];
13791         int testPatternMode = lookupHalName(TEST_PATTERN_MAP,
13792                 METADATA_MAP_SIZE(TEST_PATTERN_MAP), fwk_testPatternMode);
13793 
13794         if (NAME_NOT_FOUND != testPatternMode) {
13795             cam_test_pattern_data_t testPatternData;
13796             memset(&testPatternData, 0, sizeof(testPatternData));
13797             testPatternData.mode = (cam_test_pattern_mode_t)testPatternMode;
13798             if (testPatternMode == CAM_TEST_PATTERN_SOLID_COLOR &&
13799                     frame_settings.exists(ANDROID_SENSOR_TEST_PATTERN_DATA)) {
13800                 int32_t *fwk_testPatternData =
13801                         frame_settings.find(ANDROID_SENSOR_TEST_PATTERN_DATA).data.i32;
13802                 testPatternData.r = fwk_testPatternData[0];
13803                 testPatternData.b = fwk_testPatternData[3];
13804                 switch (gCamCapability[mCameraId]->color_arrangement) {
13805                     case CAM_FILTER_ARRANGEMENT_RGGB:
13806                     case CAM_FILTER_ARRANGEMENT_GRBG:
13807                         testPatternData.gr = fwk_testPatternData[1];
13808                         testPatternData.gb = fwk_testPatternData[2];
13809                         break;
13810                     case CAM_FILTER_ARRANGEMENT_GBRG:
13811                     case CAM_FILTER_ARRANGEMENT_BGGR:
13812                         testPatternData.gr = fwk_testPatternData[2];
13813                         testPatternData.gb = fwk_testPatternData[1];
13814                         break;
13815                     default:
13816                         LOGE("color arrangement %d is not supported",
13817                                 gCamCapability[mCameraId]->color_arrangement);
13818                         break;
13819                 }
13820             }
13821             if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TEST_PATTERN_DATA,
13822                     testPatternData)) {
13823                 rc = BAD_VALUE;
13824             }
13825         } else {
13826             LOGE("Invalid framework sensor test pattern mode %d",
13827                     fwk_testPatternMode);
13828         }
13829     }
13830 
13831     if (frame_settings.exists(ANDROID_JPEG_GPS_COORDINATES)) {
13832         size_t count = 0;
13833         camera_metadata_entry_t gps_coords = frame_settings.find(ANDROID_JPEG_GPS_COORDINATES);
13834         ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_COORDINATES,
13835                 gps_coords.data.d, gps_coords.count, count);
13836         if (gps_coords.count != count) {
13837             rc = BAD_VALUE;
13838         }
13839     }
13840 
13841     if (frame_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD)) {
13842         char gps_methods[GPS_PROCESSING_METHOD_SIZE];
13843         size_t count = 0;
13844         const char *gps_methods_src = (const char *)
13845                 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8;
13846         memset(gps_methods, '\0', sizeof(gps_methods));
13847         strlcpy(gps_methods, gps_methods_src, sizeof(gps_methods));
13848         ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_PROC_METHODS,
13849                 gps_methods, GPS_PROCESSING_METHOD_SIZE, count);
13850         if (GPS_PROCESSING_METHOD_SIZE != count) {
13851             rc = BAD_VALUE;
13852         }
13853     }
13854 
13855     if (frame_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP)) {
13856         int64_t gps_timestamp = frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64[0];
13857         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_TIMESTAMP,
13858                 gps_timestamp)) {
13859             rc = BAD_VALUE;
13860         }
13861     }
13862 
13863     if (frame_settings.exists(ANDROID_JPEG_ORIENTATION)) {
13864         int32_t orientation = frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
13865         cam_rotation_info_t rotation_info;
13866         if (orientation == 0) {
13867            rotation_info.rotation = ROTATE_0;
13868         } else if (orientation == 90) {
13869            rotation_info.rotation = ROTATE_90;
13870         } else if (orientation == 180) {
13871            rotation_info.rotation = ROTATE_180;
13872         } else if (orientation == 270) {
13873            rotation_info.rotation = ROTATE_270;
13874         }
13875         rotation_info.device_rotation = ROTATE_0;
13876         rotation_info.streamId = snapshotStreamId;
13877         ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_ORIENTATION, orientation);
13878         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ROTATION, rotation_info)) {
13879             rc = BAD_VALUE;
13880         }
13881     }
13882 
13883     if (frame_settings.exists(ANDROID_JPEG_QUALITY)) {
13884         uint32_t quality = (uint32_t) frame_settings.find(ANDROID_JPEG_QUALITY).data.u8[0];
13885         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_QUALITY, quality)) {
13886             rc = BAD_VALUE;
13887         }
13888     }
13889 
13890     if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_QUALITY)) {
13891         uint32_t thumb_quality = (uint32_t)
13892                 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).data.u8[0];
13893         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_THUMB_QUALITY,
13894                 thumb_quality)) {
13895             rc = BAD_VALUE;
13896         }
13897     }
13898 
13899     if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
13900         cam_dimension_t dim;
13901         dim.width = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
13902         dim.height = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
13903         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_THUMB_SIZE, dim)) {
13904             rc = BAD_VALUE;
13905         }
13906     }
13907 
13908     // Internal metadata
13909     if (frame_settings.exists(QCAMERA3_PRIVATEDATA_REPROCESS)) {
13910         size_t count = 0;
13911         camera_metadata_entry_t privatedata = frame_settings.find(QCAMERA3_PRIVATEDATA_REPROCESS);
13912         ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_PRIVATE_DATA,
13913                 privatedata.data.i32, privatedata.count, count);
13914         if (privatedata.count != count) {
13915             rc = BAD_VALUE;
13916         }
13917     }
13918 
13919     // ISO/Exposure Priority
13920     if (frame_settings.exists(QCAMERA3_USE_ISO_EXP_PRIORITY) &&
13921         frame_settings.exists(QCAMERA3_SELECT_PRIORITY)) {
13922         cam_priority_mode_t mode =
13923                 (cam_priority_mode_t)frame_settings.find(QCAMERA3_SELECT_PRIORITY).data.i32[0];
13924         if((CAM_ISO_PRIORITY == mode) || (CAM_EXP_PRIORITY == mode)) {
13925             cam_intf_parm_manual_3a_t use_iso_exp_pty;
13926             use_iso_exp_pty.previewOnly = FALSE;
13927             uint64_t* ptr = (uint64_t*)frame_settings.find(QCAMERA3_USE_ISO_EXP_PRIORITY).data.i64;
13928             use_iso_exp_pty.value = *ptr;
13929 
13930             if(CAM_ISO_PRIORITY == mode) {
13931                 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ISO,
13932                         use_iso_exp_pty)) {
13933                     rc = BAD_VALUE;
13934                 }
13935             }
13936             else {
13937                 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EXPOSURE_TIME,
13938                         use_iso_exp_pty)) {
13939                     rc = BAD_VALUE;
13940                 }
13941             }
13942 
13943             if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ZSL_MODE, 1)) {
13944                     rc = BAD_VALUE;
13945             }
13946         }
13947     } else {
13948         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ZSL_MODE, 0)) {
13949             rc = BAD_VALUE;
13950         }
13951     }
13952 
13953     // Saturation
13954     if (frame_settings.exists(QCAMERA3_USE_SATURATION)) {
13955         int32_t* use_saturation =
13956                 frame_settings.find(QCAMERA3_USE_SATURATION).data.i32;
13957         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_SATURATION, *use_saturation)) {
13958             rc = BAD_VALUE;
13959         }
13960     }
13961 
13962     // EV step
13963     if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EV_STEP,
13964             gCamCapability[mCameraId]->exp_compensation_step)) {
13965         rc = BAD_VALUE;
13966     }
13967 
13968     // CDS info
13969     if (frame_settings.exists(QCAMERA3_CDS_INFO)) {
13970         cam_cds_data_t *cdsData = (cam_cds_data_t *)
13971                 frame_settings.find(QCAMERA3_CDS_INFO).data.u8;
13972 
13973         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13974                 CAM_INTF_META_CDS_DATA, *cdsData)) {
13975             rc = BAD_VALUE;
13976         }
13977     }
13978 
13979     // Hybrid AE
13980     if (frame_settings.exists(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE)) {
13981         uint8_t *hybrid_ae = (uint8_t *)
13982                 frame_settings.find(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE).data.u8;
13983         // Motion tracking intent isn't compatible with hybrid ae.
13984         if (mCaptureIntent == CAM_INTENT_MOTION_TRACKING) {
13985             *hybrid_ae = 0;
13986         }
13987         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_HYBRID_AE, *hybrid_ae)) {
13988             rc = BAD_VALUE;
13989         }
13990     }
13991 
13992     // Motion Detection
13993     if (frame_settings.exists(NEXUS_EXPERIMENTAL_2017_MOTION_DETECTION_ENABLE)) {
13994         uint8_t *motion_detection = (uint8_t *)
13995                 frame_settings.find(NEXUS_EXPERIMENTAL_2017_MOTION_DETECTION_ENABLE).data.u8;
13996         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_MOTION_DETECTION_ENABLE, *motion_detection)) {
13997             rc = BAD_VALUE;
13998         }
13999     }
14000 
14001     // Histogram
14002     if (frame_settings.exists(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE)) {
14003         uint8_t histogramMode =
14004                 frame_settings.find(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE).data.u8[0];
14005         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_HISTOGRAM_MODE,
14006                 histogramMode)) {
14007             rc = BAD_VALUE;
14008         }
14009     }
14010 
14011     if (frame_settings.exists(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS)) {
14012         int32_t histogramBins =
14013                  frame_settings.find(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS).data.i32[0];
14014         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_HISTOGRAM_BINS,
14015                 histogramBins)) {
14016             rc = BAD_VALUE;
14017         }
14018     }
14019 
14020     // Tracking AF
14021     if (frame_settings.exists(NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER)) {
14022         uint8_t trackingAfTrigger =
14023                 frame_settings.find(NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER).data.u8[0];
14024         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TRACKING_AF_TRIGGER,
14025                 trackingAfTrigger)) {
14026             rc = BAD_VALUE;
14027         }
14028     }
14029 
14030     // Makernote
14031     camera_metadata_entry entry = frame_settings.find(NEXUS_EXPERIMENTAL_2017_EXIF_MAKERNOTE);
14032     if (entry.count != 0) {
14033         if (entry.count <= MAX_MAKERNOTE_LENGTH) {
14034             cam_makernote_t makernote;
14035             makernote.length = entry.count;
14036             memcpy(makernote.data, entry.data.u8, makernote.length);
14037             if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_MAKERNOTE, makernote)) {
14038                 rc = BAD_VALUE;
14039             }
14040         } else {
14041             ALOGE("%s: Makernote length %u is larger than %d", __FUNCTION__, entry.count,
14042                     MAX_MAKERNOTE_LENGTH);
14043             rc = BAD_VALUE;
14044         }
14045     }
14046 
14047     return rc;
14048 }
14049 
14050 /*===========================================================================
14051  * FUNCTION   : captureResultCb
14052  *
14053  * DESCRIPTION: Callback handler for all channels (streams, as well as metadata)
14054  *
14055  * PARAMETERS :
14056  *   @frame  : frame information from mm-camera-interface
14057  *   @buffer : actual gralloc buffer to be returned to frameworks. NULL if metadata.
14058  *   @userdata: userdata
14059  *
14060  * RETURN     : NONE
14061  *==========================================================================*/
captureResultCb(mm_camera_super_buf_t * metadata,camera3_stream_buffer_t * buffer,uint32_t frame_number,bool isInputBuffer,void * userdata)14062 void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata,
14063                 camera3_stream_buffer_t *buffer,
14064                 uint32_t frame_number, bool isInputBuffer, void *userdata)
14065 {
14066     QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
14067     if (hw == NULL) {
14068         LOGE("Invalid hw %p", hw);
14069         return;
14070     }
14071 
14072     hw->captureResultCb(metadata, buffer, frame_number, isInputBuffer);
14073     return;
14074 }
14075 
14076 /*===========================================================================
14077  * FUNCTION   : setBufferErrorStatus
14078  *
14079  * DESCRIPTION: Callback handler for channels to report any buffer errors
14080  *
14081  * PARAMETERS :
14082  *   @ch     : Channel on which buffer error is reported from
14083  *   @frame_number  : frame number on which buffer error is reported on
14084  *   @buffer_status : buffer error status
14085  *   @userdata: userdata
14086  *
14087  * RETURN     : NONE
14088  *==========================================================================*/
setBufferErrorStatus(QCamera3Channel * ch,uint32_t frame_number,camera3_buffer_status_t err,void * userdata)14089 void QCamera3HardwareInterface::setBufferErrorStatus(QCamera3Channel* ch,
14090         uint32_t frame_number, camera3_buffer_status_t err, void *userdata)
14091 {
14092     QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
14093     if (hw == NULL) {
14094         LOGE("Invalid hw %p", hw);
14095         return;
14096     }
14097 
14098     hw->setBufferErrorStatus(ch, frame_number, err);
14099     return;
14100 }
14101 
setBufferErrorStatus(QCamera3Channel * ch,uint32_t frameNumber,camera3_buffer_status_t err)14102 void QCamera3HardwareInterface::setBufferErrorStatus(QCamera3Channel* ch,
14103         uint32_t frameNumber, camera3_buffer_status_t err)
14104 {
14105     LOGD("channel: %p, frame# %d, buf err: %d", ch, frameNumber, err);
14106     pthread_mutex_lock(&mMutex);
14107 
14108     for (auto& req : mPendingBuffersMap.mPendingBuffersInRequest) {
14109         if (req.frame_number != frameNumber)
14110             continue;
14111         for (auto& k : req.mPendingBufferList) {
14112             if(k.stream->priv == ch) {
14113                 k.bufStatus = CAMERA3_BUFFER_STATUS_ERROR;
14114             }
14115         }
14116     }
14117 
14118     pthread_mutex_unlock(&mMutex);
14119     return;
14120 }
14121 /*===========================================================================
14122  * FUNCTION   : initialize
14123  *
14124  * DESCRIPTION: Pass framework callback pointers to HAL
14125  *
14126  * PARAMETERS :
14127  *
14128  *
14129  * RETURN     : Success : 0
14130  *              Failure: -ENODEV
14131  *==========================================================================*/
14132 
initialize(const struct camera3_device * device,const camera3_callback_ops_t * callback_ops)14133 int QCamera3HardwareInterface::initialize(const struct camera3_device *device,
14134                                   const camera3_callback_ops_t *callback_ops)
14135 {
14136     LOGD("E");
14137     QCamera3HardwareInterface *hw =
14138         reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
14139     if (!hw) {
14140         LOGE("NULL camera device");
14141         return -ENODEV;
14142     }
14143 
14144     int rc = hw->initialize(callback_ops);
14145     LOGD("X");
14146     return rc;
14147 }
14148 
14149 /*===========================================================================
14150  * FUNCTION   : configure_streams
14151  *
14152  * DESCRIPTION:
14153  *
14154  * PARAMETERS :
14155  *
14156  *
14157  * RETURN     : Success: 0
14158  *              Failure: -EINVAL (if stream configuration is invalid)
14159  *                       -ENODEV (fatal error)
14160  *==========================================================================*/
14161 
configure_streams(const struct camera3_device * device,camera3_stream_configuration_t * stream_list)14162 int QCamera3HardwareInterface::configure_streams(
14163         const struct camera3_device *device,
14164         camera3_stream_configuration_t *stream_list)
14165 {
14166     LOGD("E");
14167     QCamera3HardwareInterface *hw =
14168         reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
14169     if (!hw) {
14170         LOGE("NULL camera device");
14171         return -ENODEV;
14172     }
14173     int rc = hw->configureStreams(stream_list);
14174     LOGD("X");
14175     return rc;
14176 }
14177 
14178 /*===========================================================================
14179  * FUNCTION   : construct_default_request_settings
14180  *
14181  * DESCRIPTION: Configure a settings buffer to meet the required use case
14182  *
14183  * PARAMETERS :
14184  *
14185  *
14186  * RETURN     : Success: Return valid metadata
14187  *              Failure: Return NULL
14188  *==========================================================================*/
14189 const camera_metadata_t* QCamera3HardwareInterface::
construct_default_request_settings(const struct camera3_device * device,int type)14190     construct_default_request_settings(const struct camera3_device *device,
14191                                         int type)
14192 {
14193 
14194     LOGD("E");
14195     camera_metadata_t* fwk_metadata = NULL;
14196     QCamera3HardwareInterface *hw =
14197         reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
14198     if (!hw) {
14199         LOGE("NULL camera device");
14200         return NULL;
14201     }
14202 
14203     fwk_metadata = hw->translateCapabilityToMetadata(type);
14204 
14205     LOGD("X");
14206     return fwk_metadata;
14207 }
14208 
14209 /*===========================================================================
14210  * FUNCTION   : process_capture_request
14211  *
14212  * DESCRIPTION:
14213  *
14214  * PARAMETERS :
14215  *
14216  *
14217  * RETURN     :
14218  *==========================================================================*/
process_capture_request(const struct camera3_device * device,camera3_capture_request_t * request)14219 int QCamera3HardwareInterface::process_capture_request(
14220                     const struct camera3_device *device,
14221                     camera3_capture_request_t *request)
14222 {
14223     LOGD("E");
14224     CAMSCOPE_UPDATE_FLAGS(CAMSCOPE_SECTION_HAL, kpi_camscope_flags);
14225     QCamera3HardwareInterface *hw =
14226         reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
14227     if (!hw) {
14228         LOGE("NULL camera device");
14229         return -EINVAL;
14230     }
14231 
14232     int rc = hw->orchestrateRequest(request);
14233     LOGD("X");
14234     return rc;
14235 }
14236 
14237 /*===========================================================================
14238  * FUNCTION   : dump
14239  *
14240  * DESCRIPTION:
14241  *
14242  * PARAMETERS :
14243  *
14244  *
14245  * RETURN     :
14246  *==========================================================================*/
14247 
dump(const struct camera3_device * device,int fd)14248 void QCamera3HardwareInterface::dump(
14249                 const struct camera3_device *device, int fd)
14250 {
14251     /* Log level property is read when "adb shell dumpsys media.camera" is
14252        called so that the log level can be controlled without restarting
14253        the media server */
14254     getLogLevel();
14255 
14256     LOGD("E");
14257     QCamera3HardwareInterface *hw =
14258         reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
14259     if (!hw) {
14260         LOGE("NULL camera device");
14261         return;
14262     }
14263 
14264     hw->dump(fd);
14265     LOGD("X");
14266     return;
14267 }
14268 
14269 /*===========================================================================
14270  * FUNCTION   : flush
14271  *
14272  * DESCRIPTION:
14273  *
14274  * PARAMETERS :
14275  *
14276  *
14277  * RETURN     :
14278  *==========================================================================*/
14279 
flush(const struct camera3_device * device)14280 int QCamera3HardwareInterface::flush(
14281                 const struct camera3_device *device)
14282 {
14283     int rc;
14284     LOGD("E");
14285     QCamera3HardwareInterface *hw =
14286         reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
14287     if (!hw) {
14288         LOGE("NULL camera device");
14289         return -EINVAL;
14290     }
14291 
14292     pthread_mutex_lock(&hw->mMutex);
14293     // Validate current state
14294     switch (hw->mState) {
14295         case STARTED:
14296             /* valid state */
14297             break;
14298 
14299         case ERROR:
14300             pthread_mutex_unlock(&hw->mMutex);
14301             hw->handleCameraDeviceError();
14302             return -ENODEV;
14303 
14304         default:
14305             LOGI("Flush returned during state %d", hw->mState);
14306             pthread_mutex_unlock(&hw->mMutex);
14307             return 0;
14308     }
14309     pthread_mutex_unlock(&hw->mMutex);
14310 
14311     rc = hw->flush(true /* restart channels */ );
14312     LOGD("X");
14313     return rc;
14314 }
14315 
14316 /*===========================================================================
14317  * FUNCTION   : close_camera_device
14318  *
14319  * DESCRIPTION:
14320  *
14321  * PARAMETERS :
14322  *
14323  *
14324  * RETURN     :
14325  *==========================================================================*/
close_camera_device(struct hw_device_t * device)14326 int QCamera3HardwareInterface::close_camera_device(struct hw_device_t* device)
14327 {
14328     int ret = NO_ERROR;
14329     QCamera3HardwareInterface *hw =
14330         reinterpret_cast<QCamera3HardwareInterface *>(
14331             reinterpret_cast<camera3_device_t *>(device)->priv);
14332     if (!hw) {
14333         LOGE("NULL camera device");
14334         return BAD_VALUE;
14335     }
14336 
14337     LOGI("[KPI Perf]: E camera id %d", hw->mCameraId);
14338     delete hw;
14339     LOGI("[KPI Perf]: X");
14340     CAMSCOPE_DESTROY(CAMSCOPE_SECTION_HAL);
14341     return ret;
14342 }
14343 
14344 /*===========================================================================
14345  * FUNCTION   : getWaveletDenoiseProcessPlate
14346  *
14347  * DESCRIPTION: query wavelet denoise process plate
14348  *
14349  * PARAMETERS : None
14350  *
14351  * RETURN     : WNR prcocess plate value
14352  *==========================================================================*/
getWaveletDenoiseProcessPlate()14353 cam_denoise_process_type_t QCamera3HardwareInterface::getWaveletDenoiseProcessPlate()
14354 {
14355     char prop[PROPERTY_VALUE_MAX];
14356     memset(prop, 0, sizeof(prop));
14357     property_get("persist.denoise.process.plates", prop, "0");
14358     int processPlate = atoi(prop);
14359     switch(processPlate) {
14360     case 0:
14361         return CAM_WAVELET_DENOISE_YCBCR_PLANE;
14362     case 1:
14363         return CAM_WAVELET_DENOISE_CBCR_ONLY;
14364     case 2:
14365         return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
14366     case 3:
14367         return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
14368     default:
14369         return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
14370     }
14371 }
14372 
14373 
14374 /*===========================================================================
14375  * FUNCTION   : getTemporalDenoiseProcessPlate
14376  *
14377  * DESCRIPTION: query temporal denoise process plate
14378  *
14379  * PARAMETERS : None
14380  *
14381  * RETURN     : TNR prcocess plate value
14382  *==========================================================================*/
getTemporalDenoiseProcessPlate()14383 cam_denoise_process_type_t QCamera3HardwareInterface::getTemporalDenoiseProcessPlate()
14384 {
14385     char prop[PROPERTY_VALUE_MAX];
14386     memset(prop, 0, sizeof(prop));
14387     property_get("persist.tnr.process.plates", prop, "0");
14388     int processPlate = atoi(prop);
14389     switch(processPlate) {
14390     case 0:
14391         return CAM_WAVELET_DENOISE_YCBCR_PLANE;
14392     case 1:
14393         return CAM_WAVELET_DENOISE_CBCR_ONLY;
14394     case 2:
14395         return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
14396     case 3:
14397         return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
14398     default:
14399         return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
14400     }
14401 }
14402 
14403 
14404 /*===========================================================================
14405  * FUNCTION   : extractSceneMode
14406  *
14407  * DESCRIPTION: Extract scene mode from frameworks set metadata
14408  *
14409  * PARAMETERS :
14410  *      @frame_settings: CameraMetadata reference
14411  *      @metaMode: ANDROID_CONTORL_MODE
14412  *      @hal_metadata: hal metadata structure
14413  *
14414  * RETURN     : None
14415  *==========================================================================*/
extractSceneMode(const CameraMetadata & frame_settings,uint8_t metaMode,metadata_buffer_t * hal_metadata)14416 int32_t QCamera3HardwareInterface::extractSceneMode(
14417         const CameraMetadata &frame_settings, uint8_t metaMode,
14418         metadata_buffer_t *hal_metadata)
14419 {
14420     int32_t rc = NO_ERROR;
14421     uint8_t sceneMode = CAM_SCENE_MODE_OFF;
14422 
14423     if (ANDROID_CONTROL_MODE_OFF_KEEP_STATE == metaMode) {
14424         LOGD("Ignoring control mode OFF_KEEP_STATE");
14425         return NO_ERROR;
14426     }
14427 
14428     if (metaMode == ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
14429         camera_metadata_ro_entry entry =
14430                 frame_settings.find(ANDROID_CONTROL_SCENE_MODE);
14431         if (0 == entry.count)
14432             return rc;
14433 
14434         uint8_t fwk_sceneMode = entry.data.u8[0];
14435 
14436         int val = lookupHalName(SCENE_MODES_MAP,
14437                 sizeof(SCENE_MODES_MAP)/sizeof(SCENE_MODES_MAP[0]),
14438                 fwk_sceneMode);
14439         if (NAME_NOT_FOUND != val) {
14440             sceneMode = (uint8_t)val;
14441             LOGD("sceneMode: %d", sceneMode);
14442         }
14443     }
14444 
14445     if ((sceneMode == CAM_SCENE_MODE_HDR) || m_bSensorHDREnabled) {
14446         rc = setSensorHDR(hal_metadata, (sceneMode == CAM_SCENE_MODE_HDR));
14447     }
14448 
14449     if ((rc == NO_ERROR) && !m_bSensorHDREnabled) {
14450         if (sceneMode == ANDROID_CONTROL_SCENE_MODE_HDR) {
14451             cam_hdr_param_t hdr_params;
14452             hdr_params.hdr_enable = 1;
14453             hdr_params.hdr_mode = CAM_HDR_MODE_MULTIFRAME;
14454             hdr_params.hdr_need_1x = false;
14455             if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
14456                     CAM_INTF_PARM_HAL_BRACKETING_HDR, hdr_params)) {
14457                 rc = BAD_VALUE;
14458             }
14459         }
14460 
14461         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
14462                 CAM_INTF_PARM_BESTSHOT_MODE, sceneMode)) {
14463             rc = BAD_VALUE;
14464         }
14465     }
14466 
14467     if (mForceHdrSnapshot) {
14468         cam_hdr_param_t hdr_params;
14469         hdr_params.hdr_enable = 1;
14470         hdr_params.hdr_mode = CAM_HDR_MODE_MULTIFRAME;
14471         hdr_params.hdr_need_1x = false;
14472         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
14473                 CAM_INTF_PARM_HAL_BRACKETING_HDR, hdr_params)) {
14474             rc = BAD_VALUE;
14475         }
14476     }
14477 
14478     return rc;
14479 }
14480 
14481 /*===========================================================================
14482  * FUNCTION   : setVideoHdrMode
14483  *
14484  * DESCRIPTION: Set Video HDR mode from frameworks set metadata
14485  *
14486  * PARAMETERS :
14487  *      @hal_metadata: hal metadata structure
14488  *      @metaMode: QCAMERA3_VIDEO_HDR_MODE
14489  *
14490  * RETURN     : None
14491  *==========================================================================*/
setVideoHdrMode(metadata_buffer_t * hal_metadata,cam_video_hdr_mode_t vhdr)14492 int32_t QCamera3HardwareInterface::setVideoHdrMode(
14493         metadata_buffer_t *hal_metadata, cam_video_hdr_mode_t vhdr)
14494 {
14495     if ( (vhdr >= CAM_VIDEO_HDR_MODE_OFF) && (vhdr < CAM_VIDEO_HDR_MODE_MAX)) {
14496         return setSensorHDR(hal_metadata, (vhdr == CAM_VIDEO_HDR_MODE_ON), true);
14497     }
14498 
14499     LOGE("Invalid Video HDR mode %d!", vhdr);
14500     return BAD_VALUE;
14501 }
14502 
14503 /*===========================================================================
14504  * FUNCTION   : setSensorHDR
14505  *
14506  * DESCRIPTION: Enable/disable sensor HDR.
14507  *
14508  * PARAMETERS :
14509  *      @hal_metadata: hal metadata structure
14510  *      @enable: boolean whether to enable/disable sensor HDR
14511  *
14512  * RETURN     : None
14513  *==========================================================================*/
setSensorHDR(metadata_buffer_t * hal_metadata,bool enable,bool isVideoHdrEnable)14514 int32_t QCamera3HardwareInterface::setSensorHDR(
14515         metadata_buffer_t *hal_metadata, bool enable, bool isVideoHdrEnable)
14516 {
14517     int32_t rc = NO_ERROR;
14518     cam_sensor_hdr_type_t sensor_hdr = CAM_SENSOR_HDR_OFF;
14519 
14520     if (enable) {
14521         char sensor_hdr_prop[PROPERTY_VALUE_MAX];
14522         memset(sensor_hdr_prop, 0, sizeof(sensor_hdr_prop));
14523         #ifdef _LE_CAMERA_
14524         //Default to staggered HDR for IOT
14525         property_get("persist.camera.sensor.hdr", sensor_hdr_prop, "3");
14526         #else
14527         property_get("persist.camera.sensor.hdr", sensor_hdr_prop, "0");
14528         #endif
14529         sensor_hdr = (cam_sensor_hdr_type_t) atoi(sensor_hdr_prop);
14530     }
14531 
14532     bool isSupported = false;
14533     switch (sensor_hdr) {
14534         case CAM_SENSOR_HDR_IN_SENSOR:
14535             if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
14536                     CAM_QCOM_FEATURE_SENSOR_HDR) {
14537                 isSupported = true;
14538                 LOGD("Setting HDR mode In Sensor");
14539             }
14540             break;
14541         case CAM_SENSOR_HDR_ZIGZAG:
14542             if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
14543                     CAM_QCOM_FEATURE_ZIGZAG_HDR) {
14544                 isSupported = true;
14545                 LOGD("Setting HDR mode Zigzag");
14546             }
14547             break;
14548         case CAM_SENSOR_HDR_STAGGERED:
14549             if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
14550                     CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) {
14551                 isSupported = true;
14552                 LOGD("Setting HDR mode Staggered");
14553             }
14554             break;
14555         case CAM_SENSOR_HDR_OFF:
14556             isSupported = true;
14557             LOGD("Turning off sensor HDR");
14558             break;
14559         default:
14560             LOGE("HDR mode %d not supported", sensor_hdr);
14561             rc = BAD_VALUE;
14562             break;
14563     }
14564 
14565     if(isSupported) {
14566         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
14567                 CAM_INTF_PARM_SENSOR_HDR, sensor_hdr)) {
14568             rc = BAD_VALUE;
14569         } else {
14570             if(!isVideoHdrEnable)
14571                 m_bSensorHDREnabled = (sensor_hdr != CAM_SENSOR_HDR_OFF);
14572         }
14573     }
14574     return rc;
14575 }
14576 
14577 /*===========================================================================
14578  * FUNCTION   : needRotationReprocess
14579  *
14580  * DESCRIPTION: if rotation needs to be done by reprocess in pp
14581  *
14582  * PARAMETERS : none
14583  *
14584  * RETURN     : true: needed
14585  *              false: no need
14586  *==========================================================================*/
needRotationReprocess()14587 bool QCamera3HardwareInterface::needRotationReprocess()
14588 {
14589     if ((gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION) > 0) {
14590         // current rotation is not zero, and pp has the capability to process rotation
14591         LOGH("need do reprocess for rotation");
14592         return true;
14593     }
14594 
14595     return false;
14596 }
14597 
14598 /*===========================================================================
14599  * FUNCTION   : needReprocess
14600  *
14601  * DESCRIPTION: if reprocess in needed
14602  *
14603  * PARAMETERS : none
14604  *
14605  * RETURN     : true: needed
14606  *              false: no need
14607  *==========================================================================*/
needReprocess(cam_feature_mask_t postprocess_mask)14608 bool QCamera3HardwareInterface::needReprocess(cam_feature_mask_t postprocess_mask)
14609 {
14610     if (gCamCapability[mCameraId]->qcom_supported_feature_mask > 0) {
14611         // TODO: add for ZSL HDR later
14612         // pp module has min requirement for zsl reprocess, or WNR in ZSL mode
14613         if(postprocess_mask == CAM_QCOM_FEATURE_NONE){
14614             LOGH("need do reprocess for ZSL WNR or min PP reprocess");
14615             return true;
14616         } else {
14617             LOGH("already post processed frame");
14618             return false;
14619         }
14620     }
14621     return needRotationReprocess();
14622 }
14623 
14624 /*===========================================================================
14625  * FUNCTION   : needJpegExifRotation
14626  *
14627  * DESCRIPTION: if rotation from jpeg is needed
14628  *
14629  * PARAMETERS : none
14630  *
14631  * RETURN     : true: needed
14632  *              false: no need
14633  *==========================================================================*/
needJpegExifRotation()14634 bool QCamera3HardwareInterface::needJpegExifRotation()
14635 {
14636     /*If the pp does not have the ability to do rotation, enable jpeg rotation*/
14637     if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION)) {
14638        LOGD("Need use Jpeg EXIF Rotation");
14639        return true;
14640     }
14641     return false;
14642 }
14643 
14644 /*===========================================================================
14645  * FUNCTION   : addOfflineReprocChannel
14646  *
14647  * DESCRIPTION: add a reprocess channel that will do reprocess on frames
14648  *              coming from input channel
14649  *
14650  * PARAMETERS :
14651  *   @config  : reprocess configuration
14652  *   @inputChHandle : pointer to the input (source) channel
14653  *
14654  *
14655  * RETURN     : Ptr to the newly created channel obj. NULL if failed.
14656  *==========================================================================*/
addOfflineReprocChannel(const reprocess_config_t & config,QCamera3ProcessingChannel * inputChHandle)14657 QCamera3ReprocessChannel *QCamera3HardwareInterface::addOfflineReprocChannel(
14658         const reprocess_config_t &config, QCamera3ProcessingChannel *inputChHandle)
14659 {
14660     int32_t rc = NO_ERROR;
14661     QCamera3ReprocessChannel *pChannel = NULL;
14662 
14663     pChannel = new QCamera3ReprocessChannel(mCameraHandle->camera_handle,
14664             mChannelHandle, mCameraHandle->ops, captureResultCb, setBufferErrorStatus,
14665             config.padding, CAM_QCOM_FEATURE_NONE, this, inputChHandle);
14666     if (NULL == pChannel) {
14667         LOGE("no mem for reprocess channel");
14668         return NULL;
14669     }
14670 
14671     rc = pChannel->initialize(IS_TYPE_NONE);
14672     if (rc != NO_ERROR) {
14673         LOGE("init reprocess channel failed, ret = %d", rc);
14674         delete pChannel;
14675         return NULL;
14676     }
14677 
14678     // pp feature config
14679     cam_pp_feature_config_t pp_config;
14680     memset(&pp_config, 0, sizeof(cam_pp_feature_config_t));
14681 
14682     pp_config.feature_mask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
14683     if (gCamCapability[mCameraId]->qcom_supported_feature_mask
14684             & CAM_QCOM_FEATURE_DSDN) {
14685         //Use CPP CDS incase h/w supports it.
14686         pp_config.feature_mask &= ~CAM_QCOM_FEATURE_CDS;
14687         pp_config.feature_mask |= CAM_QCOM_FEATURE_DSDN;
14688     }
14689     if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION)) {
14690         pp_config.feature_mask &= ~CAM_QCOM_FEATURE_ROTATION;
14691     }
14692 
14693     if (config.hdr_param.hdr_enable) {
14694         pp_config.feature_mask |= CAM_QCOM_FEATURE_HDR;
14695         pp_config.hdr_param = config.hdr_param;
14696     }
14697 
14698     if (mForceHdrSnapshot) {
14699         pp_config.feature_mask |= CAM_QCOM_FEATURE_HDR;
14700         pp_config.hdr_param.hdr_enable = 1;
14701         pp_config.hdr_param.hdr_need_1x = 0;
14702         pp_config.hdr_param.hdr_mode = CAM_HDR_MODE_MULTIFRAME;
14703     }
14704 
14705     rc = pChannel->addReprocStreamsFromSource(pp_config,
14706             config,
14707             IS_TYPE_NONE,
14708             mMetadataChannel);
14709 
14710     if (rc != NO_ERROR) {
14711         delete pChannel;
14712         return NULL;
14713     }
14714     return pChannel;
14715 }
14716 
14717 /*===========================================================================
14718  * FUNCTION   : getMobicatMask
14719  *
14720  * DESCRIPTION: returns mobicat mask
14721  *
14722  * PARAMETERS : none
14723  *
14724  * RETURN     : mobicat mask
14725  *
14726  *==========================================================================*/
getMobicatMask()14727 uint8_t QCamera3HardwareInterface::getMobicatMask()
14728 {
14729     return m_MobicatMask;
14730 }
14731 
14732 /*===========================================================================
14733  * FUNCTION   : setMobicat
14734  *
14735  * DESCRIPTION: set Mobicat on/off.
14736  *
14737  * PARAMETERS :
14738  *   @params  : none
14739  *
14740  * RETURN     : int32_t type of status
14741  *              NO_ERROR  -- success
14742  *              none-zero failure code
14743  *==========================================================================*/
setMobicat()14744 int32_t QCamera3HardwareInterface::setMobicat()
14745 {
14746     int32_t ret = NO_ERROR;
14747 
14748     if (m_MobicatMask) {
14749         tune_cmd_t tune_cmd;
14750         tune_cmd.type = SET_RELOAD_CHROMATIX;
14751         tune_cmd.module = MODULE_ALL;
14752         tune_cmd.value = TRUE;
14753         ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
14754                 CAM_INTF_PARM_SET_VFE_COMMAND,
14755                 tune_cmd);
14756 
14757         ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
14758                 CAM_INTF_PARM_SET_PP_COMMAND,
14759                 tune_cmd);
14760     }
14761 
14762     return ret;
14763 }
14764 
14765 /*===========================================================================
14766 * FUNCTION   : getLogLevel
14767 *
14768 * DESCRIPTION: Reads the log level property into a variable
14769 *
14770 * PARAMETERS :
14771 *   None
14772 *
14773 * RETURN     :
14774 *   None
14775 *==========================================================================*/
getLogLevel()14776 void QCamera3HardwareInterface::getLogLevel()
14777 {
14778     char prop[PROPERTY_VALUE_MAX];
14779     uint32_t globalLogLevel = 0;
14780 
14781     property_get("persist.camera.hal.debug", prop, "0");
14782     int val = atoi(prop);
14783     if (0 <= val) {
14784         gCamHal3LogLevel = (uint32_t)val;
14785     }
14786 
14787     property_get("persist.camera.kpi.debug", prop, "0");
14788     gKpiDebugLevel = atoi(prop);
14789 
14790     property_get("persist.camera.global.debug", prop, "0");
14791     val = atoi(prop);
14792     if (0 <= val) {
14793         globalLogLevel = (uint32_t)val;
14794     }
14795 
14796     /* Highest log level among hal.logs and global.logs is selected */
14797     if (gCamHal3LogLevel < globalLogLevel)
14798         gCamHal3LogLevel = globalLogLevel;
14799 
14800     return;
14801 }
14802 
14803 /*===========================================================================
14804  * FUNCTION   : validateStreamRotations
14805  *
14806  * DESCRIPTION: Check if the rotations requested are supported
14807  *
14808  * PARAMETERS :
14809  *   @stream_list : streams to be configured
14810  *
14811  * RETURN     : NO_ERROR on success
14812  *              -EINVAL on failure
14813  *
14814  *==========================================================================*/
validateStreamRotations(camera3_stream_configuration_t * streamList)14815 int QCamera3HardwareInterface::validateStreamRotations(
14816         camera3_stream_configuration_t *streamList)
14817 {
14818     int rc = NO_ERROR;
14819 
14820     /*
14821     * Loop through all streams requested in configuration
14822     * Check if unsupported rotations have been requested on any of them
14823     */
14824     for (size_t j = 0; j < streamList->num_streams; j++){
14825         camera3_stream_t *newStream = streamList->streams[j];
14826 
14827         switch(newStream->rotation) {
14828             case CAMERA3_STREAM_ROTATION_0:
14829             case CAMERA3_STREAM_ROTATION_90:
14830             case CAMERA3_STREAM_ROTATION_180:
14831             case CAMERA3_STREAM_ROTATION_270:
14832                 //Expected values
14833                 break;
14834             default:
14835                 ALOGE("%s: Error: Unsupported rotation of %d requested for stream"
14836                         "type:%d and stream format:%d", __func__,
14837                         newStream->rotation, newStream->stream_type,
14838                         newStream->format);
14839                 return -EINVAL;
14840         }
14841 
14842         bool isRotated = (newStream->rotation != CAMERA3_STREAM_ROTATION_0);
14843         bool isImplDef = (newStream->format ==
14844                 HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED);
14845         bool isZsl = (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL &&
14846                 isImplDef);
14847 
14848         if (isRotated && (!isImplDef || isZsl)) {
14849             LOGE("Error: Unsupported rotation of %d requested for stream"
14850                     "type:%d and stream format:%d",
14851                     newStream->rotation, newStream->stream_type,
14852                     newStream->format);
14853             rc = -EINVAL;
14854             break;
14855         }
14856     }
14857 
14858     return rc;
14859 }
14860 
14861 /*===========================================================================
14862 * FUNCTION   : getFlashInfo
14863 *
14864 * DESCRIPTION: Retrieve information about whether the device has a flash.
14865 *
14866 * PARAMETERS :
14867 *   @cameraId  : Camera id to query
14868 *   @hasFlash  : Boolean indicating whether there is a flash device
14869 *                associated with given camera
14870 *   @flashNode : If a flash device exists, this will be its device node.
14871 *
14872 * RETURN     :
14873 *   None
14874 *==========================================================================*/
getFlashInfo(const int cameraId,bool & hasFlash,char (& flashNode)[QCAMERA_MAX_FILEPATH_LENGTH])14875 void QCamera3HardwareInterface::getFlashInfo(const int cameraId,
14876         bool& hasFlash,
14877         char (&flashNode)[QCAMERA_MAX_FILEPATH_LENGTH])
14878 {
14879     cam_capability_t* camCapability = gCamCapability[cameraId];
14880     if (NULL == camCapability) {
14881         hasFlash = false;
14882         flashNode[0] = '\0';
14883     } else {
14884         hasFlash = camCapability->flash_available;
14885         strlcpy(flashNode,
14886                 (char*)camCapability->flash_dev_name,
14887                 QCAMERA_MAX_FILEPATH_LENGTH);
14888     }
14889 }
14890 
14891 /*===========================================================================
14892 * FUNCTION   : getEepromVersionInfo
14893 *
14894 * DESCRIPTION: Retrieve version info of the sensor EEPROM data
14895 *
14896 * PARAMETERS : None
14897 *
14898 * RETURN     : string describing EEPROM version
14899 *              "\0" if no such info available
14900 *==========================================================================*/
getEepromVersionInfo()14901 const char *QCamera3HardwareInterface::getEepromVersionInfo()
14902 {
14903     return (const char *)&gCamCapability[mCameraId]->eeprom_version_info[0];
14904 }
14905 
14906 /*===========================================================================
14907 * FUNCTION   : getLdafCalib
14908 *
14909 * DESCRIPTION: Retrieve Laser AF calibration data
14910 *
14911 * PARAMETERS : None
14912 *
14913 * RETURN     : Two uint32_t describing laser AF calibration data
14914 *              NULL if none is available.
14915 *==========================================================================*/
getLdafCalib()14916 const uint32_t *QCamera3HardwareInterface::getLdafCalib()
14917 {
14918     if (mLdafCalibExist) {
14919         return &mLdafCalib[0];
14920     } else {
14921         return NULL;
14922     }
14923 }
14924 
14925 /*===========================================================================
14926 * FUNCTION   : getEaselFwVersion
14927 *
14928 * DESCRIPTION: Retrieve Easel firmware version
14929 *
14930 * PARAMETERS : None
14931 *
14932 * RETURN     : string describing Firmware version
14933 *              "\0" if version is not up to date
14934 *==========================================================================*/
getEaselFwVersion()14935 const char *QCamera3HardwareInterface::getEaselFwVersion()
14936 {
14937     if (mEaselFwUpdated) {
14938         return (const char *)&mEaselFwVersion[0];
14939     } else {
14940         return NULL;
14941     }
14942 }
14943 
14944 /*===========================================================================
14945  * FUNCTION   : dynamicUpdateMetaStreamInfo
14946  *
14947  * DESCRIPTION: This function:
14948  *             (1) stops all the channels
14949  *             (2) returns error on pending requests and buffers
14950  *             (3) sends metastream_info in setparams
14951  *             (4) starts all channels
14952  *             This is useful when sensor has to be restarted to apply any
14953  *             settings such as frame rate from a different sensor mode
14954  *
14955  * PARAMETERS : None
14956  *
14957  * RETURN     : NO_ERROR on success
14958  *              Error codes on failure
14959  *
14960  *==========================================================================*/
dynamicUpdateMetaStreamInfo()14961 int32_t QCamera3HardwareInterface::dynamicUpdateMetaStreamInfo()
14962 {
14963     ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_DYN_UPDATE_META_STRM_INFO);
14964     int rc = NO_ERROR;
14965 
14966     LOGD("E");
14967 
14968     rc = stopAllChannels();
14969     if (rc < 0) {
14970         LOGE("stopAllChannels failed");
14971         return rc;
14972     }
14973 
14974     rc = notifyErrorForPendingRequests();
14975     if (rc < 0) {
14976         LOGE("notifyErrorForPendingRequests failed");
14977         return rc;
14978     }
14979 
14980     for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
14981         LOGI("STREAM INFO : type %d, wxh: %d x %d, pp_mask: 0x%x"
14982                 "Format:%d",
14983                 mStreamConfigInfo.type[i],
14984                 mStreamConfigInfo.stream_sizes[i].width,
14985                 mStreamConfigInfo.stream_sizes[i].height,
14986                 mStreamConfigInfo.postprocess_mask[i],
14987                 mStreamConfigInfo.format[i]);
14988     }
14989 
14990     /* Send meta stream info once again so that ISP can start */
14991     ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
14992             CAM_INTF_META_STREAM_INFO, mStreamConfigInfo);
14993     rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
14994             mParameters);
14995     if (rc < 0) {
14996         LOGE("set Metastreaminfo failed. Sensor mode does not change");
14997     }
14998 
14999     rc = startAllChannels();
15000     if (rc < 0) {
15001         LOGE("startAllChannels failed");
15002         return rc;
15003     }
15004 
15005     LOGD("X");
15006     return rc;
15007 }
15008 
15009 /*===========================================================================
15010  * FUNCTION   : stopAllChannels
15011  *
15012  * DESCRIPTION: This function stops (equivalent to stream-off) all channels
15013  *
15014  * PARAMETERS : None
15015  *
15016  * RETURN     : NO_ERROR on success
15017  *              Error codes on failure
15018  *
15019  *==========================================================================*/
stopAllChannels()15020 int32_t QCamera3HardwareInterface::stopAllChannels()
15021 {
15022     int32_t rc = NO_ERROR;
15023 
15024     LOGD("Stopping all channels");
15025     // Stop the Streams/Channels
15026     for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
15027         it != mStreamInfo.end(); it++) {
15028         QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
15029         if (channel) {
15030             channel->stop();
15031         }
15032         (*it)->status = INVALID;
15033     }
15034 
15035     if (mSupportChannel) {
15036         mSupportChannel->stop();
15037     }
15038     if (mAnalysisChannel) {
15039         mAnalysisChannel->stop();
15040     }
15041     if (mRawDumpChannel) {
15042         mRawDumpChannel->stop();
15043     }
15044     if (mHdrPlusRawSrcChannel) {
15045         mHdrPlusRawSrcChannel->stop();
15046     }
15047     if (mMetadataChannel) {
15048         /* If content of mStreamInfo is not 0, there is metadata stream */
15049         mMetadataChannel->stop();
15050     }
15051 
15052     LOGD("All channels stopped");
15053     return rc;
15054 }
15055 
15056 /*===========================================================================
15057  * FUNCTION   : startAllChannels
15058  *
15059  * DESCRIPTION: This function starts (equivalent to stream-on) all channels
15060  *
15061  * PARAMETERS : None
15062  *
15063  * RETURN     : NO_ERROR on success
15064  *              Error codes on failure
15065  *
15066  *==========================================================================*/
startAllChannels()15067 int32_t QCamera3HardwareInterface::startAllChannels()
15068 {
15069     int32_t rc = NO_ERROR;
15070 
15071     LOGD("Start all channels ");
15072     // Start the Streams/Channels
15073     if (mMetadataChannel) {
15074         /* If content of mStreamInfo is not 0, there is metadata stream */
15075         rc = mMetadataChannel->start();
15076         if (rc < 0) {
15077             LOGE("META channel start failed");
15078             return rc;
15079         }
15080     }
15081     for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
15082         it != mStreamInfo.end(); it++) {
15083         QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
15084         if (channel) {
15085             rc = channel->start();
15086             if (rc < 0) {
15087                 LOGE("channel start failed");
15088                 return rc;
15089             }
15090         }
15091     }
15092     if (mAnalysisChannel) {
15093         mAnalysisChannel->start();
15094     }
15095     if (mSupportChannel) {
15096         rc = mSupportChannel->start();
15097         if (rc < 0) {
15098             LOGE("Support channel start failed");
15099             return rc;
15100         }
15101     }
15102     if (mRawDumpChannel) {
15103         rc = mRawDumpChannel->start();
15104         if (rc < 0) {
15105             LOGE("RAW dump channel start failed");
15106             return rc;
15107         }
15108     }
15109     if (mHdrPlusRawSrcChannel) {
15110         rc = mHdrPlusRawSrcChannel->start();
15111         if (rc < 0) {
15112             LOGE("HDR+ RAW channel start failed");
15113             return rc;
15114         }
15115     }
15116 
15117     LOGD("All channels started");
15118     return rc;
15119 }
15120 
15121 /*===========================================================================
15122  * FUNCTION   : notifyErrorForPendingRequests
15123  *
15124  * DESCRIPTION: This function sends error for all the pending requests/buffers
15125  *
15126  * PARAMETERS : None
15127  *
15128  * RETURN     : Error codes
15129  *              NO_ERROR on success
15130  *
15131  *==========================================================================*/
notifyErrorForPendingRequests()15132 int32_t QCamera3HardwareInterface::notifyErrorForPendingRequests()
15133 {
15134     notifyErrorFoPendingDepthData(mDepthChannel);
15135 
15136     auto pendingRequest = mPendingRequestsList.begin();
15137     auto pendingBuffer = mPendingBuffersMap.mPendingBuffersInRequest.begin();
15138 
15139     // Iterate through pending requests (for which result metadata isn't sent yet) and pending
15140     // buffers (for which buffers aren't sent yet).
15141     while (pendingRequest != mPendingRequestsList.end() ||
15142            pendingBuffer != mPendingBuffersMap.mPendingBuffersInRequest.end()) {
15143         if (pendingRequest == mPendingRequestsList.end() ||
15144                 ((pendingBuffer != mPendingBuffersMap.mPendingBuffersInRequest.end()) &&
15145                  (pendingBuffer->frame_number < pendingRequest->frame_number))) {
15146             // If metadata for this frame was sent, notify about a buffer error and returns buffers
15147             // with error.
15148             for (auto &info : pendingBuffer->mPendingBufferList) {
15149                 // Send a buffer error for this frame number.
15150                 camera3_notify_msg_t notify_msg;
15151                 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
15152                 notify_msg.type = CAMERA3_MSG_ERROR;
15153                 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER;
15154                 notify_msg.message.error.error_stream = info.stream;
15155                 notify_msg.message.error.frame_number = pendingBuffer->frame_number;
15156                 orchestrateNotify(&notify_msg);
15157 
15158                 camera3_stream_buffer_t buffer = {};
15159                 buffer.acquire_fence = -1;
15160                 buffer.release_fence = -1;
15161                 buffer.buffer = info.buffer;
15162                 buffer.status = CAMERA3_BUFFER_STATUS_ERROR;
15163                 buffer.stream = info.stream;
15164                 mOutputBufferDispatcher.markBufferReady(pendingBuffer->frame_number, buffer);
15165             }
15166 
15167             pendingBuffer = mPendingBuffersMap.mPendingBuffersInRequest.erase(pendingBuffer);
15168         } else if (pendingBuffer == mPendingBuffersMap.mPendingBuffersInRequest.end() ||
15169                    ((pendingRequest != mPendingRequestsList.end()) &&
15170                    (pendingBuffer->frame_number > pendingRequest->frame_number))) {
15171             // If the buffers for this frame were sent already, notify about a result error.
15172             camera3_notify_msg_t notify_msg;
15173             memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
15174             notify_msg.type = CAMERA3_MSG_ERROR;
15175             notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_RESULT;
15176             notify_msg.message.error.error_stream = nullptr;
15177             notify_msg.message.error.frame_number = pendingRequest->frame_number;
15178             orchestrateNotify(&notify_msg);
15179 
15180             if (pendingRequest->input_buffer != nullptr) {
15181                 camera3_capture_result result = {};
15182                 result.frame_number = pendingRequest->frame_number;
15183                 result.result = nullptr;
15184                 result.input_buffer = pendingRequest->input_buffer;
15185                 orchestrateResult(&result);
15186             }
15187 
15188             mShutterDispatcher.clear(pendingRequest->frame_number);
15189             pendingRequest = mPendingRequestsList.erase(pendingRequest);
15190         } else {
15191             // If both buffers and result metadata weren't sent yet, notify about a request error
15192             // and return buffers with error.
15193             for (auto &info : pendingBuffer->mPendingBufferList) {
15194                 camera3_notify_msg_t notify_msg;
15195                 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
15196                 notify_msg.type = CAMERA3_MSG_ERROR;
15197                 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_REQUEST;
15198                 notify_msg.message.error.error_stream = info.stream;
15199                 notify_msg.message.error.frame_number = pendingBuffer->frame_number;
15200                 orchestrateNotify(&notify_msg);
15201 
15202                 camera3_stream_buffer_t buffer = {};
15203                 buffer.acquire_fence = -1;
15204                 buffer.release_fence = -1;
15205                 buffer.buffer = info.buffer;
15206                 buffer.status = CAMERA3_BUFFER_STATUS_ERROR;
15207                 buffer.stream = info.stream;
15208                 mOutputBufferDispatcher.markBufferReady(pendingBuffer->frame_number, buffer);
15209             }
15210 
15211             if (pendingRequest->input_buffer != nullptr) {
15212                 camera3_capture_result result = {};
15213                 result.frame_number = pendingRequest->frame_number;
15214                 result.result = nullptr;
15215                 result.input_buffer = pendingRequest->input_buffer;
15216                 orchestrateResult(&result);
15217             }
15218 
15219             mShutterDispatcher.clear(pendingRequest->frame_number);
15220             pendingBuffer = mPendingBuffersMap.mPendingBuffersInRequest.erase(pendingBuffer);
15221             pendingRequest = mPendingRequestsList.erase(pendingRequest);
15222         }
15223     }
15224 
15225     /* Reset pending frame Drop list and requests list */
15226     mPendingFrameDropList.clear();
15227     mShutterDispatcher.clear();
15228     mOutputBufferDispatcher.clear(/*clearConfiguredStreams*/false);
15229     mPendingBuffersMap.mPendingBuffersInRequest.clear();
15230     mExpectedFrameDuration = 0;
15231     mExpectedInflightDuration = 0;
15232     LOGH("Cleared all the pending buffers ");
15233 
15234     return NO_ERROR;
15235 }
15236 
isOnEncoder(const cam_dimension_t max_viewfinder_size,uint32_t width,uint32_t height)15237 bool QCamera3HardwareInterface::isOnEncoder(
15238         const cam_dimension_t max_viewfinder_size,
15239         uint32_t width, uint32_t height)
15240 {
15241     return ((width > (uint32_t)max_viewfinder_size.width) ||
15242             (height > (uint32_t)max_viewfinder_size.height) ||
15243             (width > (uint32_t)VIDEO_4K_WIDTH) ||
15244             (height > (uint32_t)VIDEO_4K_HEIGHT));
15245 }
15246 
15247 /*===========================================================================
15248  * FUNCTION   : setBundleInfo
15249  *
15250  * DESCRIPTION: Set bundle info for all streams that are bundle.
15251  *
15252  * PARAMETERS : None
15253  *
15254  * RETURN     : NO_ERROR on success
15255  *              Error codes on failure
15256  *==========================================================================*/
setBundleInfo()15257 int32_t QCamera3HardwareInterface::setBundleInfo()
15258 {
15259     int32_t rc = NO_ERROR;
15260 
15261     if (mChannelHandle) {
15262         cam_bundle_config_t bundleInfo;
15263         memset(&bundleInfo, 0, sizeof(bundleInfo));
15264         rc = mCameraHandle->ops->get_bundle_info(
15265                 mCameraHandle->camera_handle, mChannelHandle, &bundleInfo);
15266         if (rc != NO_ERROR) {
15267             LOGE("get_bundle_info failed");
15268             return rc;
15269         }
15270         if (mAnalysisChannel) {
15271             mAnalysisChannel->setBundleInfo(bundleInfo);
15272         }
15273         if (mSupportChannel) {
15274             mSupportChannel->setBundleInfo(bundleInfo);
15275         }
15276         for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
15277                 it != mStreamInfo.end(); it++) {
15278             QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
15279             channel->setBundleInfo(bundleInfo);
15280         }
15281         if (mRawDumpChannel) {
15282             mRawDumpChannel->setBundleInfo(bundleInfo);
15283         }
15284         if (mHdrPlusRawSrcChannel) {
15285             mHdrPlusRawSrcChannel->setBundleInfo(bundleInfo);
15286         }
15287     }
15288 
15289     return rc;
15290 }
15291 
15292 /*===========================================================================
15293  * FUNCTION   : setInstantAEC
15294  *
15295  * DESCRIPTION: Set Instant AEC related params.
15296  *
15297  * PARAMETERS :
15298  *      @meta: CameraMetadata reference
15299  *
15300  * RETURN     : NO_ERROR on success
15301  *              Error codes on failure
15302  *==========================================================================*/
setInstantAEC(const CameraMetadata & meta)15303 int32_t QCamera3HardwareInterface::setInstantAEC(const CameraMetadata &meta)
15304 {
15305     int32_t rc = NO_ERROR;
15306     uint8_t val = 0;
15307     char prop[PROPERTY_VALUE_MAX];
15308 
15309     // First try to configure instant AEC from framework metadata
15310     if (meta.exists(QCAMERA3_INSTANT_AEC_MODE)) {
15311         val = meta.find(QCAMERA3_INSTANT_AEC_MODE).data.u8[0];
15312         LOGE("Instant AEC mode set: %d", val);
15313     }
15314 
15315     // If framework did not set this value, try to read from set prop.
15316     if (val == 0) {
15317         memset(prop, 0, sizeof(prop));
15318         property_get("persist.camera.instant.aec", prop, "0");
15319         val = (uint8_t)atoi(prop);
15320     }
15321 
15322     if ((val >= (uint8_t)CAM_AEC_NORMAL_CONVERGENCE) &&
15323            ( val < (uint8_t)CAM_AEC_CONVERGENCE_MAX)) {
15324         ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_INSTANT_AEC, val);
15325         mInstantAEC = val;
15326         mInstantAECSettledFrameNumber = 0;
15327         mInstantAecFrameIdxCount = 0;
15328         LOGH("instantAEC value set %d",val);
15329         if (mInstantAEC) {
15330             memset(prop, 0, sizeof(prop));
15331             property_get("persist.camera.ae.instant.bound", prop, "10");
15332             int32_t aec_frame_skip_cnt = atoi(prop);
15333             if (aec_frame_skip_cnt >= 0) {
15334                 mAecSkipDisplayFrameBound = (uint8_t)aec_frame_skip_cnt;
15335             } else {
15336                 LOGE("Invalid prop for aec frame bound %d", aec_frame_skip_cnt);
15337                 rc = BAD_VALUE;
15338             }
15339         }
15340     } else {
15341         LOGE("Bad instant aec value set %d", val);
15342         rc = BAD_VALUE;
15343     }
15344     return rc;
15345 }
15346 
15347 /*===========================================================================
15348  * FUNCTION   : get_num_overall_buffers
15349  *
15350  * DESCRIPTION: Estimate number of pending buffers across all requests.
15351  *
15352  * PARAMETERS : None
15353  *
15354  * RETURN     : Number of overall pending buffers
15355  *
15356  *==========================================================================*/
get_num_overall_buffers()15357 uint32_t PendingBuffersMap::get_num_overall_buffers()
15358 {
15359     uint32_t sum_buffers = 0;
15360     for (auto &req : mPendingBuffersInRequest) {
15361         sum_buffers += req.mPendingBufferList.size();
15362     }
15363     return sum_buffers;
15364 }
15365 
15366 /*===========================================================================
15367  * FUNCTION   : removeBuf
15368  *
15369  * DESCRIPTION: Remove a matching buffer from tracker.
15370  *
15371  * PARAMETERS : @buffer: image buffer for the callback
15372  *
15373  * RETURN     : None
15374  *
15375  *==========================================================================*/
removeBuf(buffer_handle_t * buffer)15376 void PendingBuffersMap::removeBuf(buffer_handle_t *buffer)
15377 {
15378     bool buffer_found = false;
15379     for (auto req = mPendingBuffersInRequest.begin();
15380             req != mPendingBuffersInRequest.end(); req++) {
15381         for (auto k = req->mPendingBufferList.begin();
15382                 k != req->mPendingBufferList.end(); k++ ) {
15383             if (k->buffer == buffer) {
15384                 LOGD("Frame %d: Found Frame buffer %p, take it out from mPendingBufferList",
15385                         req->frame_number, buffer);
15386                 k = req->mPendingBufferList.erase(k);
15387                 if (req->mPendingBufferList.empty()) {
15388                     // Remove this request from Map
15389                     req = mPendingBuffersInRequest.erase(req);
15390                 }
15391                 buffer_found = true;
15392                 break;
15393             }
15394         }
15395         if (buffer_found) {
15396             break;
15397         }
15398     }
15399     LOGD("mPendingBuffersMap.num_overall_buffers = %d",
15400             get_num_overall_buffers());
15401 }
15402 
15403 /*===========================================================================
15404  * FUNCTION   : getBufErrStatus
15405  *
15406  * DESCRIPTION: get buffer error status
15407  *
15408  * PARAMETERS : @buffer: buffer handle
15409  *
15410  * RETURN     : Error status
15411  *
15412  *==========================================================================*/
getBufErrStatus(buffer_handle_t * buffer)15413 int32_t PendingBuffersMap::getBufErrStatus(buffer_handle_t *buffer)
15414 {
15415     for (auto& req : mPendingBuffersInRequest) {
15416         for (auto& k : req.mPendingBufferList) {
15417             if (k.buffer == buffer)
15418                 return k.bufStatus;
15419         }
15420     }
15421     return CAMERA3_BUFFER_STATUS_OK;
15422 }
15423 
15424 /*===========================================================================
15425  * FUNCTION   : setPAAFSupport
15426  *
15427  * DESCRIPTION: Set the preview-assisted auto focus support bit in
15428  *              feature mask according to stream type and filter
15429  *              arrangement
15430  *
15431  * PARAMETERS : @feature_mask: current feature mask, which may be modified
15432  *              @stream_type: stream type
15433  *              @filter_arrangement: filter arrangement
15434  *
15435  * RETURN     : None
15436  *==========================================================================*/
setPAAFSupport(cam_feature_mask_t & feature_mask,cam_stream_type_t stream_type,cam_color_filter_arrangement_t filter_arrangement)15437 void QCamera3HardwareInterface::setPAAFSupport(
15438         cam_feature_mask_t& feature_mask,
15439         cam_stream_type_t stream_type,
15440         cam_color_filter_arrangement_t filter_arrangement)
15441 {
15442     switch (filter_arrangement) {
15443     case CAM_FILTER_ARRANGEMENT_RGGB:
15444     case CAM_FILTER_ARRANGEMENT_GRBG:
15445     case CAM_FILTER_ARRANGEMENT_GBRG:
15446     case CAM_FILTER_ARRANGEMENT_BGGR:
15447         if ((stream_type == CAM_STREAM_TYPE_PREVIEW) ||
15448                 (stream_type == CAM_STREAM_TYPE_ANALYSIS) ||
15449                 (stream_type == CAM_STREAM_TYPE_VIDEO)) {
15450             if (!(feature_mask & CAM_QTI_FEATURE_PPEISCORE))
15451                 feature_mask |= CAM_QCOM_FEATURE_PAAF;
15452         }
15453         break;
15454     case CAM_FILTER_ARRANGEMENT_Y:
15455         if (stream_type == CAM_STREAM_TYPE_ANALYSIS) {
15456             feature_mask |= CAM_QCOM_FEATURE_PAAF;
15457         }
15458         break;
15459     default:
15460         break;
15461     }
15462     LOGD("feature_mask=0x%llx; stream_type=%d, filter_arrangement=%d",
15463             feature_mask, stream_type, filter_arrangement);
15464 
15465 
15466 }
15467 
15468 /*===========================================================================
15469 * FUNCTION   : getSensorMountAngle
15470 *
15471 * DESCRIPTION: Retrieve sensor mount angle
15472 *
15473 * PARAMETERS : None
15474 *
15475 * RETURN     : sensor mount angle in uint32_t
15476 *==========================================================================*/
getSensorMountAngle()15477 uint32_t QCamera3HardwareInterface::getSensorMountAngle()
15478 {
15479     return gCamCapability[mCameraId]->sensor_mount_angle;
15480 }
15481 
15482 /*===========================================================================
15483 * FUNCTION   : getRelatedCalibrationData
15484 *
15485 * DESCRIPTION: Retrieve related system calibration data
15486 *
15487 * PARAMETERS : None
15488 *
15489 * RETURN     : Pointer of related system calibration data
15490 *==========================================================================*/
getRelatedCalibrationData()15491 const cam_related_system_calibration_data_t *QCamera3HardwareInterface::getRelatedCalibrationData()
15492 {
15493     return (const cam_related_system_calibration_data_t *)
15494             &(gCamCapability[mCameraId]->related_cam_calibration);
15495 }
15496 
15497 /*===========================================================================
15498  * FUNCTION   : is60HzZone
15499  *
15500  * DESCRIPTION: Whether the phone is in zone with 60hz electricity frequency
15501  *
15502  * PARAMETERS : None
15503  *
15504  * RETURN     : True if in 60Hz zone, False otherwise
15505  *==========================================================================*/
is60HzZone()15506 bool QCamera3HardwareInterface::is60HzZone()
15507 {
15508     time_t t = time(NULL);
15509     struct tm lt;
15510 
15511     struct tm* r = localtime_r(&t, &lt);
15512 
15513     if (r == NULL || lt.tm_gmtoff <=  -2*60*60 || lt.tm_gmtoff >= 8*60*60)
15514         return true;
15515     else
15516         return false;
15517 }
15518 
15519 /*===========================================================================
15520  * FUNCTION   : adjustBlackLevelForCFA
15521  *
15522  * DESCRIPTION: Adjust the black level pattern in the order of RGGB to the order
15523  *              of bayer CFA (Color Filter Array).
15524  *
15525  * PARAMETERS : @input: black level pattern in the order of RGGB
15526  *              @output: black level pattern in the order of CFA
15527  *              @color_arrangement: CFA color arrangement
15528  *
15529  * RETURN     : None
15530  *==========================================================================*/
15531 template<typename T>
adjustBlackLevelForCFA(T input[BLACK_LEVEL_PATTERN_CNT],T output[BLACK_LEVEL_PATTERN_CNT],cam_color_filter_arrangement_t color_arrangement)15532 void QCamera3HardwareInterface::adjustBlackLevelForCFA(
15533         T input[BLACK_LEVEL_PATTERN_CNT],
15534         T output[BLACK_LEVEL_PATTERN_CNT],
15535         cam_color_filter_arrangement_t color_arrangement)
15536 {
15537     switch (color_arrangement) {
15538     case CAM_FILTER_ARRANGEMENT_GRBG:
15539         output[0] = input[1];
15540         output[1] = input[0];
15541         output[2] = input[3];
15542         output[3] = input[2];
15543         break;
15544     case CAM_FILTER_ARRANGEMENT_GBRG:
15545         output[0] = input[2];
15546         output[1] = input[3];
15547         output[2] = input[0];
15548         output[3] = input[1];
15549         break;
15550     case CAM_FILTER_ARRANGEMENT_BGGR:
15551         output[0] = input[3];
15552         output[1] = input[2];
15553         output[2] = input[1];
15554         output[3] = input[0];
15555         break;
15556     case CAM_FILTER_ARRANGEMENT_RGGB:
15557         output[0] = input[0];
15558         output[1] = input[1];
15559         output[2] = input[2];
15560         output[3] = input[3];
15561         break;
15562     default:
15563         LOGE("Invalid color arrangement to derive dynamic blacklevel");
15564         break;
15565     }
15566 }
15567 
updateHdrPlusResultMetadata(CameraMetadata & resultMetadata,std::shared_ptr<metadata_buffer_t> settings)15568 void QCamera3HardwareInterface::updateHdrPlusResultMetadata(
15569     CameraMetadata &resultMetadata,
15570     std::shared_ptr<metadata_buffer_t> settings)
15571 {
15572     if (settings == nullptr) {
15573         ALOGE("%s: settings is nullptr.", __FUNCTION__);
15574         return;
15575     }
15576 
15577     IF_META_AVAILABLE(double, gps_coords, CAM_INTF_META_JPEG_GPS_COORDINATES, settings) {
15578         resultMetadata.update(ANDROID_JPEG_GPS_COORDINATES, gps_coords, 3);
15579     } else {
15580         resultMetadata.erase(ANDROID_JPEG_GPS_COORDINATES);
15581     }
15582 
15583     IF_META_AVAILABLE(uint8_t, gps_methods, CAM_INTF_META_JPEG_GPS_PROC_METHODS, settings) {
15584         String8 str((const char *)gps_methods);
15585         resultMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD, str);
15586     } else {
15587         resultMetadata.erase(ANDROID_JPEG_GPS_PROCESSING_METHOD);
15588     }
15589 
15590     IF_META_AVAILABLE(int64_t, gps_timestamp, CAM_INTF_META_JPEG_GPS_TIMESTAMP, settings) {
15591         resultMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP, gps_timestamp, 1);
15592     } else {
15593         resultMetadata.erase(ANDROID_JPEG_GPS_TIMESTAMP);
15594     }
15595 
15596     IF_META_AVAILABLE(int32_t, jpeg_orientation, CAM_INTF_META_JPEG_ORIENTATION, settings) {
15597         resultMetadata.update(ANDROID_JPEG_ORIENTATION, jpeg_orientation, 1);
15598     } else {
15599         resultMetadata.erase(ANDROID_JPEG_ORIENTATION);
15600     }
15601 
15602     IF_META_AVAILABLE(uint32_t, jpeg_quality, CAM_INTF_META_JPEG_QUALITY, settings) {
15603         uint8_t fwk_jpeg_quality = static_cast<uint8_t>(*jpeg_quality);
15604         resultMetadata.update(ANDROID_JPEG_QUALITY, &fwk_jpeg_quality, 1);
15605     } else {
15606         resultMetadata.erase(ANDROID_JPEG_QUALITY);
15607     }
15608 
15609     IF_META_AVAILABLE(uint32_t, thumb_quality, CAM_INTF_META_JPEG_THUMB_QUALITY, settings) {
15610         uint8_t fwk_thumb_quality = static_cast<uint8_t>(*thumb_quality);
15611         resultMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY, &fwk_thumb_quality, 1);
15612     } else {
15613         resultMetadata.erase(ANDROID_JPEG_THUMBNAIL_QUALITY);
15614     }
15615 
15616     IF_META_AVAILABLE(cam_dimension_t, thumb_size, CAM_INTF_META_JPEG_THUMB_SIZE, settings) {
15617         int32_t fwk_thumb_size[2];
15618         fwk_thumb_size[0] = thumb_size->width;
15619         fwk_thumb_size[1] = thumb_size->height;
15620         resultMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, fwk_thumb_size, 2);
15621     } else {
15622         resultMetadata.erase(ANDROID_JPEG_THUMBNAIL_SIZE);
15623     }
15624 
15625     IF_META_AVAILABLE(uint32_t, intent, CAM_INTF_META_CAPTURE_INTENT, settings) {
15626         uint8_t fwk_intent = intent[0];
15627         resultMetadata.update(ANDROID_CONTROL_CAPTURE_INTENT, &fwk_intent, 1);
15628     } else {
15629         resultMetadata.erase(ANDROID_CONTROL_CAPTURE_INTENT);
15630     }
15631 }
15632 
isRequestHdrPlusCompatible(const camera3_capture_request_t & request,const CameraMetadata & metadata)15633 bool QCamera3HardwareInterface::isRequestHdrPlusCompatible(
15634         const camera3_capture_request_t &request, const CameraMetadata &metadata) {
15635     if (metadata.exists(NEXUS_EXPERIMENTAL_2017_DISABLE_HDRPLUS) &&
15636             metadata.find(NEXUS_EXPERIMENTAL_2017_DISABLE_HDRPLUS).data.i32[0] == 1) {
15637         ALOGV("%s: NEXUS_EXPERIMENTAL_2017_DISABLE_HDRPLUS is 1", __FUNCTION__);
15638         return false;
15639     }
15640 
15641     if (!metadata.exists(ANDROID_NOISE_REDUCTION_MODE) ||
15642          metadata.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0] !=
15643             ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY) {
15644         ALOGV("%s: ANDROID_NOISE_REDUCTION_MODE is not HQ: %d", __FUNCTION__,
15645                 metadata.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0]);
15646         return false;
15647     }
15648 
15649     if (!metadata.exists(ANDROID_EDGE_MODE) ||
15650             metadata.find(ANDROID_EDGE_MODE).data.u8[0] != ANDROID_EDGE_MODE_HIGH_QUALITY) {
15651         ALOGV("%s: ANDROID_EDGE_MODE is not HQ.", __FUNCTION__);
15652         return false;
15653     }
15654 
15655     if (!metadata.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE) ||
15656             metadata.find(ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0] !=
15657                     ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY) {
15658         ALOGV("%s: ANDROID_COLOR_CORRECTION_ABERRATION_MODE is not HQ.", __FUNCTION__);
15659         return false;
15660     }
15661 
15662     if (!metadata.exists(ANDROID_CONTROL_AE_MODE) ||
15663             (metadata.find(ANDROID_CONTROL_AE_MODE).data.u8[0] != ANDROID_CONTROL_AE_MODE_ON &&
15664              metadata.find(ANDROID_CONTROL_AE_MODE).data.u8[0] !=
15665                     ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH)) {
15666         ALOGV("%s: ANDROID_CONTROL_AE_MODE is not ON or ON_AUTO_FLASH.", __FUNCTION__);
15667         return false;
15668     }
15669 
15670     if (!metadata.exists(ANDROID_CONTROL_AWB_MODE) ||
15671             metadata.find(ANDROID_CONTROL_AWB_MODE).data.u8[0] != ANDROID_CONTROL_AWB_MODE_AUTO) {
15672         ALOGV("%s: ANDROID_CONTROL_AWB_MODE is not AUTO.", __FUNCTION__);
15673         return false;
15674     }
15675 
15676     if (!metadata.exists(ANDROID_CONTROL_EFFECT_MODE) ||
15677             metadata.find(ANDROID_CONTROL_EFFECT_MODE).data.u8[0] !=
15678                     ANDROID_CONTROL_EFFECT_MODE_OFF) {
15679         ALOGV("%s: ANDROID_CONTROL_EFFECT_MODE_OFF is not OFF.", __FUNCTION__);
15680         return false;
15681     }
15682 
15683     if (!metadata.exists(ANDROID_CONTROL_MODE) ||
15684             (metadata.find(ANDROID_CONTROL_MODE).data.u8[0] != ANDROID_CONTROL_MODE_AUTO &&
15685              metadata.find(ANDROID_CONTROL_MODE).data.u8[0] !=
15686                     ANDROID_CONTROL_MODE_USE_SCENE_MODE)) {
15687         ALOGV("%s: ANDROID_CONTROL_MODE is not AUTO or USE_SCENE_MODE.", __FUNCTION__);
15688         return false;
15689     }
15690 
15691     // TODO (b/32585046): support non-ZSL.
15692     if (!metadata.exists(ANDROID_CONTROL_ENABLE_ZSL) ||
15693          metadata.find(ANDROID_CONTROL_ENABLE_ZSL).data.u8[0] != ANDROID_CONTROL_ENABLE_ZSL_TRUE) {
15694         ALOGV("%s: ANDROID_CONTROL_ENABLE_ZSL is not true.", __FUNCTION__);
15695         return false;
15696     }
15697 
15698     // TODO (b/32586081): support flash.
15699     if (!metadata.exists(ANDROID_FLASH_MODE) ||
15700          metadata.find(ANDROID_FLASH_MODE).data.u8[0] != ANDROID_FLASH_MODE_OFF) {
15701         ALOGV("%s: ANDROID_FLASH_MODE is not OFF.", __FUNCTION__);
15702         return false;
15703     }
15704 
15705     if (!metadata.exists(ANDROID_TONEMAP_MODE) ||
15706          metadata.find(ANDROID_TONEMAP_MODE).data.u8[0] != ANDROID_TONEMAP_MODE_HIGH_QUALITY) {
15707         ALOGV("%s: ANDROID_TONEMAP_MODE is not HQ.", __FUNCTION__);
15708         return false;
15709     }
15710 
15711     switch (request.output_buffers[0].stream->format) {
15712         case HAL_PIXEL_FORMAT_BLOB:
15713         case HAL_PIXEL_FORMAT_YCbCr_420_888:
15714         case HAL_PIXEL_FORMAT_Y8:
15715         case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
15716             break;
15717         default:
15718             ALOGV("%s: Not an HDR+ request: Only Jpeg and YUV output is supported.", __FUNCTION__);
15719             for (uint32_t i = 0; i < request.num_output_buffers; i++) {
15720                 ALOGV("%s: output_buffers[%u]: %dx%d format %d", __FUNCTION__, i,
15721                         request.output_buffers[0].stream->width,
15722                         request.output_buffers[0].stream->height,
15723                         request.output_buffers[0].stream->format);
15724             }
15725             return false;
15726     }
15727 
15728     return true;
15729 }
15730 
abortPendingHdrplusRequest(HdrPlusPendingRequest * hdrPlusRequest)15731 void QCamera3HardwareInterface::abortPendingHdrplusRequest(HdrPlusPendingRequest *hdrPlusRequest) {
15732     if (hdrPlusRequest == nullptr) return;
15733 
15734     for (auto & outputBufferIter : hdrPlusRequest->outputBuffers) {
15735         // Find the stream for this buffer.
15736         for (auto streamInfo : mStreamInfo) {
15737             if (streamInfo->id == outputBufferIter.first) {
15738                 if (streamInfo->channel == mPictureChannel) {
15739                     // For picture channel, this buffer is internally allocated so return this
15740                     // buffer to picture channel.
15741                     mPictureChannel->returnYuvBuffer(outputBufferIter.second.get());
15742                 } else {
15743                     // Unregister this buffer for other channels.
15744                     streamInfo->channel->unregisterBuffer(outputBufferIter.second.get());
15745                 }
15746                 break;
15747             }
15748         }
15749     }
15750 
15751     hdrPlusRequest->outputBuffers.clear();
15752     hdrPlusRequest->frameworkOutputBuffers.clear();
15753 }
15754 
15755 /*===========================================================================
15756  * FUNCTION   : isEISCropInSnapshotNeeded
15757  *
15758  * DESCRIPTION: In case EIS is active, check whether additional crop is needed
15759  *              to avoid FOV jumps in snapshot streams.
15760  *
15761  * PARAMETERS : @metadata: Current request settings.
15762  *
15763  * RETURN     : True in case EIS crop is needed, False otherwise.
15764  *==========================================================================*/
isEISCropInSnapshotNeeded(const CameraMetadata & metadata) const15765 bool QCamera3HardwareInterface::isEISCropInSnapshotNeeded(const CameraMetadata &metadata) const
15766 {
15767     if (metadata.exists(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE)) {
15768         uint8_t vstabMode =
15769             metadata.find(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE).data.u8[0];
15770         if (vstabMode == ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_ON) {
15771             if ((mLastEISCropInfo.delta_x != 0) || (mLastEISCropInfo.delta_y != 0) ||
15772                     (mLastEISCropInfo.delta_width != 0) || (mLastEISCropInfo.delta_height != 0)) {
15773                 return true;
15774             }
15775         }
15776     }
15777 
15778     return false;
15779 }
15780 
15781 /*===========================================================================
15782  * FUNCTION   : isCropValid
15783  *
15784  * DESCRIPTION: Crop sanity checks.
15785  *
15786  * PARAMETERS : @startX: Horizontal crop offset.
15787  *              @startY: Vertical crop offset.
15788  *              @width: Crop width.
15789  *              @height: Crop height.
15790  *              @maxWidth: Horizontal maximum size.
15791  *              @maxHeight: Vertical maximum size.
15792  *
15793  * RETURN     : True in case crop is valid, False otherwise.
15794  *==========================================================================*/
isCropValid(int32_t startX,int32_t startY,int32_t width,int32_t height,int32_t maxWidth,int32_t maxHeight) const15795 bool QCamera3HardwareInterface::isCropValid(int32_t startX, int32_t startY, int32_t width,
15796         int32_t height, int32_t maxWidth, int32_t maxHeight) const
15797 {
15798     if ((startX < 0) || (startY < 0) || (startX >= maxWidth) || (startY >= maxHeight)) {
15799         LOGE("Crop offset is invalid: %dx%d", startX, startY);
15800         return false;
15801     }
15802 
15803     if ((width < 0) || (height < 0) || (width >= maxWidth) || (height >= maxHeight)) {
15804         LOGE("Crop dimensions are invalid: %dx%d", width, height);
15805         return false;
15806     }
15807 
15808     if (((startX + width) > maxWidth)  || ((startY + height) > maxHeight)) {
15809         LOGE("Crop is out of bounds: %dx%d max %dx%d", startX + width, startY + height, maxWidth,
15810                 maxHeight);
15811         return false;
15812     }
15813 
15814     return true;
15815 }
15816 
trySubmittingHdrPlusRequestLocked(HdrPlusPendingRequest * hdrPlusRequest,const camera3_capture_request_t & request,const CameraMetadata & metadata)15817 bool QCamera3HardwareInterface::trySubmittingHdrPlusRequestLocked(
15818         HdrPlusPendingRequest *hdrPlusRequest, const camera3_capture_request_t &request,
15819         const CameraMetadata &metadata)
15820 {
15821     if (hdrPlusRequest == nullptr) return false;
15822     if (!isRequestHdrPlusCompatible(request, metadata)) return false;
15823 
15824     status_t res = OK;
15825     pbcamera::CaptureRequest pbRequest;
15826     pbRequest.id = request.frame_number;
15827     // Iterate through all requested output buffers and add them to an HDR+ request.
15828     for (uint32_t i = 0; i < request.num_output_buffers; i++) {
15829         // Find the index of the stream in mStreamInfo.
15830         uint32_t pbStreamId = 0;
15831         bool found = false;
15832         for (auto streamInfo : mStreamInfo) {
15833             if (streamInfo->stream == request.output_buffers[i].stream) {
15834                 pbStreamId = streamInfo->id;
15835                 found = true;
15836                 break;
15837             }
15838         }
15839 
15840         if (!found) {
15841             ALOGE("%s: requested stream was not configured.", __FUNCTION__);
15842             abortPendingHdrplusRequest(hdrPlusRequest);
15843             return false;
15844         }
15845         auto outBuffer = std::make_shared<mm_camera_buf_def_t>();
15846         switch (request.output_buffers[i].stream->format) {
15847             case HAL_PIXEL_FORMAT_BLOB:
15848             {
15849                 // For jpeg output, get a YUV buffer from pic channel.
15850                 QCamera3PicChannel *picChannel =
15851                         (QCamera3PicChannel*)request.output_buffers[i].stream->priv;
15852                 res = picChannel->getYuvBufferForRequest(outBuffer.get(), request.frame_number);
15853                 if (res != OK) {
15854                     ALOGE("%s: Getting an available YUV buffer from pic channel failed: %s (%d)",
15855                             __FUNCTION__, strerror(-res), res);
15856                     abortPendingHdrplusRequest(hdrPlusRequest);
15857                     return false;
15858                 }
15859                 break;
15860             }
15861             case HAL_PIXEL_FORMAT_YCbCr_420_888:
15862             case HAL_PIXEL_FORMAT_Y8:
15863             case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
15864             {
15865                 // For YUV output, register the buffer and get the buffer def from the channel.
15866                 QCamera3ProcessingChannel *channel =
15867                         (QCamera3ProcessingChannel*)request.output_buffers[i].stream->priv;
15868                 res = channel->registerBufferAndGetBufDef(request.output_buffers[i].buffer,
15869                         outBuffer.get());
15870                 if (res != OK) {
15871                     ALOGE("%s: Getting the buffer def failed: %s (%d)", __FUNCTION__,
15872                             strerror(-res), res);
15873                     abortPendingHdrplusRequest(hdrPlusRequest);
15874                     return false;
15875                 }
15876                 break;
15877             }
15878             default:
15879                 abortPendingHdrplusRequest(hdrPlusRequest);
15880                 return false;
15881         }
15882 
15883         pbcamera::StreamBuffer buffer;
15884         buffer.streamId = pbStreamId;
15885         buffer.dmaBufFd = outBuffer->fd;
15886         buffer.data = outBuffer->fd == -1 ? outBuffer->buffer : nullptr;
15887         buffer.dataSize = outBuffer->frame_len;
15888 
15889         pbRequest.outputBuffers.push_back(buffer);
15890 
15891         hdrPlusRequest->outputBuffers.emplace(pbStreamId, outBuffer);
15892         hdrPlusRequest->frameworkOutputBuffers.emplace(pbStreamId, request.output_buffers[i]);
15893     }
15894 
15895     if (isEISCropInSnapshotNeeded(metadata)) {
15896         int32_t scalerRegion[4] = {0, 0, gCamCapability[mCameraId]->active_array_size.width,
15897             gCamCapability[mCameraId]->active_array_size.height};
15898         if (metadata.exists(ANDROID_SCALER_CROP_REGION)) {
15899             auto currentScalerRegion = metadata.find(ANDROID_SCALER_CROP_REGION).data.i32;
15900             scalerRegion[0] = currentScalerRegion[0] + mLastEISCropInfo.delta_x;
15901             scalerRegion[1] = currentScalerRegion[1] + mLastEISCropInfo.delta_y;
15902             scalerRegion[2] = currentScalerRegion[2] - mLastEISCropInfo.delta_width;
15903             scalerRegion[3] = currentScalerRegion[3] - mLastEISCropInfo.delta_height;
15904         } else {
15905             scalerRegion[0] += mLastEISCropInfo.delta_x;
15906             scalerRegion[1] += mLastEISCropInfo.delta_y;
15907             scalerRegion[2] -= mLastEISCropInfo.delta_width;
15908             scalerRegion[3] -= mLastEISCropInfo.delta_height;
15909         }
15910 
15911         // Capture requests should not be modified.
15912         CameraMetadata updatedMetadata(metadata);
15913         if (isCropValid(scalerRegion[0], scalerRegion[1], scalerRegion[2], scalerRegion[3],
15914                     gCamCapability[mCameraId]->active_array_size.width,
15915                     gCamCapability[mCameraId]->active_array_size.height)) {
15916             updatedMetadata.update(ANDROID_SCALER_CROP_REGION, scalerRegion, 4);
15917         } else {
15918             LOGE("Invalid EIS compensated crop region");
15919         }
15920 
15921         res = gHdrPlusClient->submitCaptureRequest(&pbRequest, updatedMetadata);
15922     } else {
15923         res = gHdrPlusClient->submitCaptureRequest(&pbRequest, metadata);
15924     }
15925 
15926     if (res != OK) {
15927         ALOGE("%s: %d: Submitting a capture request failed: %s (%d)", __FUNCTION__, __LINE__,
15928                 strerror(-res), res);
15929         abortPendingHdrplusRequest(hdrPlusRequest);
15930         return false;
15931     }
15932 
15933     return true;
15934 }
15935 
openHdrPlusClientAsyncLocked()15936 status_t QCamera3HardwareInterface::openHdrPlusClientAsyncLocked()
15937 {
15938     if (gHdrPlusClientOpening || gHdrPlusClient != nullptr) {
15939         return OK;
15940     }
15941 
15942     status_t res = gEaselManagerClient->openHdrPlusClientAsync(mQCamera3HdrPlusListenerThread.get());
15943     if (res != OK) {
15944         ALOGE("%s: Opening HDR+ client asynchronously failed: %s (%d)", __FUNCTION__,
15945                 strerror(-res), res);
15946         return res;
15947     }
15948     gHdrPlusClientOpening = true;
15949 
15950     return OK;
15951 }
15952 
enableHdrPlusModeLocked()15953 status_t QCamera3HardwareInterface::enableHdrPlusModeLocked()
15954 {
15955     status_t res;
15956 
15957     if (mHdrPlusModeEnabled) {
15958         return OK;
15959     }
15960 
15961     // Check if gHdrPlusClient is opened or being opened.
15962     if (gHdrPlusClient == nullptr) {
15963         if (gHdrPlusClientOpening) {
15964             // HDR+ client is being opened. HDR+ mode will be enabled when it's opened.
15965             return OK;
15966         }
15967 
15968         res = openHdrPlusClientAsyncLocked();
15969         if (res != OK) {
15970             ALOGE("%s: Failed to open HDR+ client asynchronously: %s (%d).", __FUNCTION__,
15971                     strerror(-res), res);
15972             return res;
15973         }
15974 
15975         // When opening HDR+ client completes, HDR+ mode will be enabled.
15976         return OK;
15977 
15978     }
15979 
15980     // Configure stream for HDR+.
15981     res = configureHdrPlusStreamsLocked();
15982     if (res != OK) {
15983         LOGE("%s: Failed to configure HDR+ streams: %s (%d)", __FUNCTION__, strerror(-res), res);
15984         return res;
15985     }
15986 
15987     // Enable HDR+ mode so Easel will start capturing ZSL raw buffers.
15988     res = gHdrPlusClient->setZslHdrPlusMode(true);
15989     if (res != OK) {
15990         LOGE("%s: Failed to enable HDR+ mode: %s (%d)", __FUNCTION__, strerror(-res), res);
15991         return res;
15992     }
15993 
15994     mHdrPlusModeEnabled = true;
15995     ALOGD("%s: HDR+ mode enabled", __FUNCTION__);
15996 
15997     return OK;
15998 }
15999 
finishHdrPlusClientOpeningLocked(std::unique_lock<std::mutex> & lock)16000 void QCamera3HardwareInterface::finishHdrPlusClientOpeningLocked(std::unique_lock<std::mutex> &lock)
16001 {
16002     if (gHdrPlusClientOpening) {
16003         gHdrPlusClientOpenCond.wait(lock, [&] { return !gHdrPlusClientOpening; });
16004     }
16005 }
16006 
disableHdrPlusModeLocked()16007 void QCamera3HardwareInterface::disableHdrPlusModeLocked()
16008 {
16009     // Disable HDR+ mode.
16010     if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled) {
16011         status_t res = gHdrPlusClient->setZslHdrPlusMode(false);
16012         if (res != OK) {
16013             ALOGE("%s: Failed to disable HDR+ mode: %s (%d)", __FUNCTION__, strerror(-res), res);
16014         }
16015 
16016         // Close HDR+ client so Easel can enter low power mode.
16017         gEaselManagerClient->closeHdrPlusClient(std::move(gHdrPlusClient));
16018         gHdrPlusClient = nullptr;
16019     }
16020 
16021     mHdrPlusModeEnabled = false;
16022     ALOGD("%s: HDR+ mode disabled", __FUNCTION__);
16023 }
16024 
isSessionHdrPlusModeCompatible()16025 bool QCamera3HardwareInterface::isSessionHdrPlusModeCompatible()
16026 {
16027     // Check that at least one YUV or one JPEG output is configured.
16028     // TODO: Support RAW (b/36690506)
16029     for (auto streamInfo : mStreamInfo) {
16030         if (streamInfo != nullptr && streamInfo->stream != nullptr) {
16031             if (streamInfo->stream->stream_type == CAMERA3_STREAM_OUTPUT &&
16032                     (streamInfo->stream->format == HAL_PIXEL_FORMAT_BLOB ||
16033                      streamInfo->stream->format == HAL_PIXEL_FORMAT_YCbCr_420_888 ||
16034                      streamInfo->stream->format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED)) {
16035                 return true;
16036             }
16037         }
16038     }
16039 
16040     return false;
16041 }
16042 
configureHdrPlusStreamsLocked()16043 status_t QCamera3HardwareInterface::configureHdrPlusStreamsLocked()
16044 {
16045     pbcamera::InputConfiguration inputConfig;
16046     std::vector<pbcamera::StreamConfiguration> outputStreamConfigs;
16047     status_t res = OK;
16048 
16049     // Sensor MIPI will send data to Easel.
16050     inputConfig.isSensorInput = true;
16051     inputConfig.sensorMode.cameraId = mCameraId;
16052     inputConfig.sensorMode.pixelArrayWidth = mSensorModeInfo.pixel_array_size.width;
16053     inputConfig.sensorMode.pixelArrayHeight = mSensorModeInfo.pixel_array_size.height;
16054     inputConfig.sensorMode.activeArrayWidth = mSensorModeInfo.active_array_size.width;
16055     inputConfig.sensorMode.activeArrayHeight = mSensorModeInfo.active_array_size.height;
16056     inputConfig.sensorMode.outputPixelClkHz = mSensorModeInfo.op_pixel_clk;
16057     inputConfig.sensorMode.timestampOffsetNs = mSensorModeInfo.timestamp_offset;
16058     inputConfig.sensorMode.timestampCropOffsetNs = mSensorModeInfo.timestamp_crop_offset;
16059 
16060     if (mSensorModeInfo.num_raw_bits != 10) {
16061         ALOGE("%s: Only RAW10 is supported but this sensor mode has %d raw bits.", __FUNCTION__,
16062                 mSensorModeInfo.num_raw_bits);
16063         return BAD_VALUE;
16064     }
16065 
16066     inputConfig.sensorMode.format = HAL_PIXEL_FORMAT_RAW10;
16067 
16068     // Iterate through configured output streams in HAL and configure those streams in HDR+
16069     // service.
16070     for (auto streamInfo : mStreamInfo) {
16071         pbcamera::StreamConfiguration outputConfig;
16072         if (streamInfo->stream->stream_type == CAMERA3_STREAM_OUTPUT) {
16073             switch (streamInfo->stream->format) {
16074                 case HAL_PIXEL_FORMAT_BLOB:
16075                 case HAL_PIXEL_FORMAT_YCbCr_420_888:
16076                 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
16077                     res = fillPbStreamConfig(&outputConfig, streamInfo->id,
16078                             streamInfo->channel, /*stream index*/0);
16079                     if (res != OK) {
16080                         LOGE("%s: Failed to get fill stream config for YUV stream: %s (%d)",
16081                             __FUNCTION__, strerror(-res), res);
16082 
16083                         return res;
16084                     }
16085 
16086                     outputStreamConfigs.push_back(outputConfig);
16087                     break;
16088                 default:
16089                     // TODO: handle RAW16 outputs if mRawChannel was created. (b/36690506)
16090                     break;
16091             }
16092         }
16093     }
16094 
16095     res = gHdrPlusClient->configureStreams(inputConfig, outputStreamConfigs);
16096     if (res != OK) {
16097         LOGE("%d: Failed to configure streams with HDR+ client: %s (%d)", __FUNCTION__,
16098             strerror(-res), res);
16099         return res;
16100     }
16101 
16102     return OK;
16103 }
16104 
handleEaselFatalError()16105 void QCamera3HardwareInterface::handleEaselFatalError()
16106 {
16107     {
16108         std::unique_lock<std::mutex> l(gHdrPlusClientLock);
16109         if (gHdrPlusClient != nullptr) {
16110             gHdrPlusClient->nofityEaselFatalError();
16111         }
16112     }
16113 
16114     pthread_mutex_lock(&mMutex);
16115     mState = ERROR;
16116     pthread_mutex_unlock(&mMutex);
16117 
16118     handleCameraDeviceError(/*stopChannelImmediately*/true);
16119 }
16120 
cleanupEaselErrorFuture()16121 void QCamera3HardwareInterface::cleanupEaselErrorFuture()
16122 {
16123     {
16124         std::lock_guard<std::mutex> lock(mEaselErrorFutureLock);
16125         if (!mEaselErrorFuture.valid()) {
16126             // If there is no Easel error, construct a dummy future to wait for.
16127             mEaselErrorFuture = std::async([]() { return; });
16128         }
16129     }
16130 
16131     mEaselErrorFuture.wait();
16132 }
16133 
handleEaselFatalErrorAsync()16134 void QCamera3HardwareInterface::handleEaselFatalErrorAsync()
16135 {
16136     std::lock_guard<std::mutex> lock(mEaselErrorFutureLock);
16137 
16138     if (mEaselErrorFuture.valid()) {
16139         // The error future has been invoked.
16140         return;
16141     }
16142 
16143     // Launch a future to handle the fatal error.
16144     mEaselErrorFuture = std::async(std::launch::async,
16145             &QCamera3HardwareInterface::handleEaselFatalError, this);
16146 }
16147 
onEaselFatalError(std::string errMsg)16148 void QCamera3HardwareInterface::onEaselFatalError(std::string errMsg)
16149 {
16150     ALOGE("%s: Got an Easel fatal error: %s", __FUNCTION__, errMsg.c_str());
16151     handleEaselFatalErrorAsync();
16152 }
16153 
closeHdrPlusClientLocked()16154 void QCamera3HardwareInterface::closeHdrPlusClientLocked()
16155 {
16156     if (gHdrPlusClient != nullptr) {
16157         // Disable HDR+ mode.
16158         disableHdrPlusModeLocked();
16159         // Disconnect Easel if it's connected.
16160         gEaselManagerClient->closeHdrPlusClient(std::move(gHdrPlusClient));
16161         gHdrPlusClient = nullptr;
16162         ALOGD("HDR+ client closed.");
16163     }
16164 }
16165 
onThermalThrottle()16166 void QCamera3HardwareInterface::onThermalThrottle() {
16167     ALOGW("%s: Thermal throttling. Will close HDR+ client.", __FUNCTION__);
16168     // HDR+ will be disabled when HAL receives the next request and there is no
16169     // pending HDR+ request.
16170     mEaselThermalThrottled = true;
16171 }
16172 
onOpened(std::unique_ptr<HdrPlusClient> client)16173 void QCamera3HardwareInterface::onOpened(std::unique_ptr<HdrPlusClient> client)
16174 {
16175     int rc = NO_ERROR;
16176 
16177     if (client == nullptr) {
16178         ALOGE("%s: Opened client is null.", __FUNCTION__);
16179         return;
16180     }
16181 
16182     logEaselEvent("EASEL_STARTUP_LATENCY", "HDR+ client opened.");
16183     ALOGI("%s: HDR+ client opened.", __FUNCTION__);
16184 
16185     std::unique_lock<std::mutex> l(gHdrPlusClientLock);
16186     if (!gHdrPlusClientOpening) {
16187         ALOGW("%s: HDR+ is disabled while HDR+ client is being opened.", __FUNCTION__);
16188         return;
16189     }
16190 
16191     gHdrPlusClient = std::move(client);
16192     gHdrPlusClientOpening = false;
16193     gHdrPlusClientOpenCond.notify_one();
16194 
16195     // Set static metadata.
16196     status_t res = gHdrPlusClient->setStaticMetadata(*gStaticMetadata[mCameraId]);
16197     if (res != OK) {
16198         LOGE("%s: Failed to set static metadata in HDR+ client: %s (%d). Closing HDR+ client.",
16199             __FUNCTION__, strerror(-res), res);
16200         gEaselManagerClient->closeHdrPlusClient(std::move(gHdrPlusClient));
16201         gHdrPlusClient = nullptr;
16202         return;
16203     }
16204 
16205     // Enable HDR+ mode.
16206     res = enableHdrPlusModeLocked();
16207     if (res != OK) {
16208         LOGE("%s: Failed to configure HDR+ streams.", __FUNCTION__);
16209     }
16210 
16211     // Get Easel firmware version
16212     if (EaselManagerClientOpened) {
16213         rc = gEaselManagerClient->getFwVersion(mEaselFwVersion);
16214         if (rc != OK) {
16215             ALOGD("%s: Failed to query Easel firmware version", __FUNCTION__);
16216         } else {
16217             mEaselFwUpdated = true;
16218         }
16219     }
16220 }
16221 
onOpenFailed(status_t err)16222 void QCamera3HardwareInterface::onOpenFailed(status_t err)
16223 {
16224     ALOGE("%s: Opening HDR+ client failed: %s (%d)", __FUNCTION__, strerror(-err), err);
16225     std::unique_lock<std::mutex> l(gHdrPlusClientLock);
16226     gHdrPlusClientOpening = false;
16227     gHdrPlusClientOpenCond.notify_one();
16228 }
16229 
onFatalError()16230 void QCamera3HardwareInterface::onFatalError()
16231 {
16232     ALOGE("%s: HDR+ client encountered a fatal error.", __FUNCTION__);
16233     handleEaselFatalErrorAsync();
16234 }
16235 
onShutter(uint32_t requestId,int64_t apSensorTimestampNs)16236 void QCamera3HardwareInterface::onShutter(uint32_t requestId, int64_t apSensorTimestampNs)
16237 {
16238     ALOGV("%s: %d: Received a shutter for HDR+ request %d timestamp %" PRId64, __FUNCTION__,
16239             __LINE__, requestId, apSensorTimestampNs);
16240 
16241     mShutterDispatcher.markShutterReady(requestId, apSensorTimestampNs);
16242 }
16243 
onNextCaptureReady(uint32_t requestId)16244 void QCamera3HardwareInterface::onNextCaptureReady(uint32_t requestId)
16245 {
16246     pthread_mutex_lock(&mMutex);
16247 
16248     // Find the pending request for this result metadata.
16249     auto requestIter = mPendingRequestsList.begin();
16250     while (requestIter != mPendingRequestsList.end() && requestIter->frame_number != requestId) {
16251         requestIter++;
16252     }
16253 
16254     if (requestIter == mPendingRequestsList.end()) {
16255         ALOGE("%s: Cannot find a pending request for frame number %u.", __FUNCTION__, requestId);
16256         pthread_mutex_unlock(&mMutex);
16257         return;
16258     }
16259 
16260     requestIter->partial_result_cnt++;
16261 
16262     CameraMetadata metadata;
16263     uint8_t ready = true;
16264     metadata.update(NEXUS_EXPERIMENTAL_2017_NEXT_STILL_INTENT_REQUEST_READY, &ready, 1);
16265 
16266     // Send it to framework.
16267     camera3_capture_result_t result = {};
16268 
16269     result.result = metadata.getAndLock();
16270     // Populate metadata result
16271     result.frame_number = requestId;
16272     result.num_output_buffers = 0;
16273     result.output_buffers = NULL;
16274     result.partial_result = requestIter->partial_result_cnt;
16275 
16276     orchestrateResult(&result);
16277     metadata.unlock(result.result);
16278 
16279     pthread_mutex_unlock(&mMutex);
16280 }
16281 
onPostview(uint32_t requestId,std::unique_ptr<std::vector<uint8_t>> postview,uint32_t width,uint32_t height,uint32_t stride,int32_t format)16282 void QCamera3HardwareInterface::onPostview(uint32_t requestId,
16283         std::unique_ptr<std::vector<uint8_t>> postview, uint32_t width, uint32_t height,
16284         uint32_t stride, int32_t format)
16285 {
16286     if (property_get_bool("persist.camera.hdrplus.dump_postview", false)) {
16287         ALOGI("%s: %d: Received a postview %dx%d for HDR+ request %d", __FUNCTION__,
16288                 __LINE__, width, height, requestId);
16289         char buf[FILENAME_MAX] = {};
16290         snprintf(buf, sizeof(buf), QCAMERA_DUMP_FRM_LOCATION"postview_%d_%dx%d.ppm",
16291                 requestId, width, height);
16292 
16293         pbcamera::StreamConfiguration config = {};
16294         config.image.width = width;
16295         config.image.height = height;
16296         config.image.format = format;
16297 
16298         pbcamera::PlaneConfiguration plane = {};
16299         plane.stride = stride;
16300         plane.scanline = height;
16301 
16302         config.image.planes.push_back(plane);
16303 
16304         pbcamera::StreamBuffer buffer = {};
16305         buffer.streamId = 0;
16306         buffer.dmaBufFd = -1;
16307         buffer.data = postview->data();
16308         buffer.dataSize = postview->size();
16309 
16310         hdrplus_client_utils::writePpm(buf, config, buffer);
16311     }
16312 
16313     pthread_mutex_lock(&mMutex);
16314 
16315     // Find the pending request for this result metadata.
16316     auto requestIter = mPendingRequestsList.begin();
16317     while (requestIter != mPendingRequestsList.end() && requestIter->frame_number != requestId) {
16318         requestIter++;
16319     }
16320 
16321     if (requestIter == mPendingRequestsList.end()) {
16322         ALOGE("%s: Cannot find a pending request for frame number %u.", __FUNCTION__, requestId);
16323         pthread_mutex_unlock(&mMutex);
16324         return;
16325     }
16326 
16327     requestIter->partial_result_cnt++;
16328 
16329     CameraMetadata metadata;
16330     int32_t config[3] = {static_cast<int32_t>(width), static_cast<int32_t>(height),
16331             static_cast<int32_t>(stride)};
16332     metadata.update(NEXUS_EXPERIMENTAL_2017_POSTVIEW_CONFIG, config, 3);
16333     metadata.update(NEXUS_EXPERIMENTAL_2017_POSTVIEW_DATA, postview->data(), postview->size());
16334 
16335     // Send it to framework.
16336     camera3_capture_result_t result = {};
16337 
16338     result.result = metadata.getAndLock();
16339     // Populate metadata result
16340     result.frame_number = requestId;
16341     result.num_output_buffers = 0;
16342     result.output_buffers = NULL;
16343     result.partial_result = requestIter->partial_result_cnt;
16344 
16345     orchestrateResult(&result);
16346     metadata.unlock(result.result);
16347 
16348     pthread_mutex_unlock(&mMutex);
16349 }
16350 
onCaptureResult(pbcamera::CaptureResult * result,const camera_metadata_t & resultMetadata)16351 void QCamera3HardwareInterface::onCaptureResult(pbcamera::CaptureResult *result,
16352         const camera_metadata_t &resultMetadata)
16353 {
16354     if (result == nullptr) {
16355         ALOGE("%s: result is nullptr.", __FUNCTION__);
16356         return;
16357     }
16358 
16359     // Find the pending HDR+ request.
16360     HdrPlusPendingRequest pendingRequest;
16361     {
16362         Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
16363         auto req = mHdrPlusPendingRequests.find(result->requestId);
16364         pendingRequest = req->second;
16365     }
16366 
16367     // Update the result metadata with the settings of the HDR+ still capture request because
16368     // the result metadata belongs to a ZSL buffer.
16369     CameraMetadata metadata;
16370     metadata = &resultMetadata;
16371     updateHdrPlusResultMetadata(metadata, pendingRequest.settings);
16372     camera_metadata_t* updatedResultMetadata = metadata.release();
16373 
16374     uint32_t halSnapshotStreamId = 0;
16375     if (mPictureChannel != nullptr) {
16376         halSnapshotStreamId = mPictureChannel->getStreamID(mPictureChannel->getStreamTypeMask());
16377     }
16378 
16379     auto halMetadata = std::make_shared<metadata_buffer_t>();
16380     clear_metadata_buffer(halMetadata.get());
16381 
16382     // Convert updated result metadata to HAL metadata.
16383     status_t res = translateFwkMetadataToHalMetadata(updatedResultMetadata, halMetadata.get(),
16384             halSnapshotStreamId, /*minFrameDuration*/0);
16385     if (res != 0) {
16386         ALOGE("%s: Translating metadata failed: %s (%d)", __FUNCTION__, strerror(-res), res);
16387     }
16388 
16389     for (auto &outputBuffer : result->outputBuffers) {
16390         uint32_t streamId = outputBuffer.streamId;
16391 
16392         // Find the framework output buffer in the pending request.
16393         auto frameworkOutputBufferIter = pendingRequest.frameworkOutputBuffers.find(streamId);
16394         if (frameworkOutputBufferIter == pendingRequest.frameworkOutputBuffers.end()) {
16395             ALOGE("%s: Couldn't find framework output buffers for stream id %u", __FUNCTION__,
16396                     streamId);
16397             continue;
16398         }
16399 
16400         camera3_stream_buffer_t *frameworkOutputBuffer = &frameworkOutputBufferIter->second;
16401 
16402         // Find the channel for the output buffer.
16403         QCamera3ProcessingChannel *channel =
16404                 (QCamera3ProcessingChannel*)frameworkOutputBuffer->stream->priv;
16405 
16406         // Find the output buffer def.
16407         auto outputBufferIter = pendingRequest.outputBuffers.find(streamId);
16408         if (outputBufferIter == pendingRequest.outputBuffers.end()) {
16409             ALOGE("%s: Cannot find output buffer", __FUNCTION__);
16410             continue;
16411         }
16412 
16413         std::shared_ptr<mm_camera_buf_def_t> outputBufferDef = outputBufferIter->second;
16414 
16415         // Check whether to dump the buffer.
16416         if (frameworkOutputBuffer->stream->format == HAL_PIXEL_FORMAT_YCbCr_420_888 ||
16417                 frameworkOutputBuffer->stream->format == HAL_PIXEL_FORMAT_BLOB) {
16418             // If the stream format is YUV or jpeg, check if dumping HDR+ YUV output is enabled.
16419             char prop[PROPERTY_VALUE_MAX];
16420             property_get("persist.camera.hdrplus.dump_yuv", prop, "0");
16421             bool dumpYuvOutput = atoi(prop);
16422 
16423             if (dumpYuvOutput) {
16424                 // Dump yuv buffer to a ppm file.
16425                 pbcamera::StreamConfiguration outputConfig;
16426                 status_t rc = fillPbStreamConfig(&outputConfig, streamId,
16427                         channel, /*stream index*/0);
16428                 if (rc == OK) {
16429                     char buf[FILENAME_MAX] = {};
16430                     snprintf(buf, sizeof(buf), QCAMERA_DUMP_FRM_LOCATION"s_%d_%d_%dx%d.ppm",
16431                             result->requestId, streamId,
16432                             outputConfig.image.width, outputConfig.image.height);
16433 
16434                     hdrplus_client_utils::writePpm(buf, outputConfig, outputBuffer);
16435                 } else {
16436                     LOGW("%s: Couldn't dump YUV buffer because getting stream config failed: "
16437                             "%s (%d).", __FUNCTION__, strerror(-rc), rc);
16438                 }
16439             }
16440         }
16441 
16442         if (channel == mPictureChannel) {
16443             android_errorWriteLog(0x534e4554, "150004253");
16444             // Keep a copy of outputBufferDef until the final JPEG buffer is
16445             // ready because the JPEG callback uses the mm_camera_buf_def_t
16446             // struct. The metaBufDef is stored in a shared_ptr to make sure
16447             // it's freed.
16448             std::shared_ptr<mm_camera_buf_def_t> metaBufDef =
16449                     std::make_shared<mm_camera_buf_def_t>();
16450             {
16451                 pthread_mutex_lock(&mMutex);
16452                 for (auto& pendingBuffers : mPendingBuffersMap.mPendingBuffersInRequest) {
16453                     if (pendingBuffers.frame_number == result->requestId) {
16454                         pendingBuffers.mHdrplusInputBuf = outputBufferDef;
16455                         pendingBuffers.mHdrplusInputMetaBuf = metaBufDef;
16456                         break;
16457                     }
16458                 }
16459                 pthread_mutex_unlock(&mMutex);
16460             }
16461 
16462             // Return the buffer to pic channel for encoding.
16463             mPictureChannel->returnYuvBufferAndEncode(outputBufferDef.get(),
16464                     frameworkOutputBuffer->buffer, result->requestId,
16465                     halMetadata, metaBufDef.get());
16466         } else {
16467             // Return the buffer to camera framework.
16468             pthread_mutex_lock(&mMutex);
16469             handleBufferWithLock(frameworkOutputBuffer, result->requestId);
16470             channel->unregisterBuffer(outputBufferDef.get());
16471             pthread_mutex_unlock(&mMutex);
16472         }
16473     }
16474 
16475     // Send HDR+ metadata to framework.
16476     {
16477         pthread_mutex_lock(&mMutex);
16478 
16479         // updatedResultMetadata will be freed in handlePendingResultMetadataWithLock.
16480         handlePendingResultMetadataWithLock(result->requestId, updatedResultMetadata);
16481         pthread_mutex_unlock(&mMutex);
16482     }
16483 
16484     // Remove the HDR+ pending request.
16485     {
16486         Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
16487         auto req = mHdrPlusPendingRequests.find(result->requestId);
16488         mHdrPlusPendingRequests.erase(req);
16489     }
16490 }
16491 
onFailedCaptureResult(pbcamera::CaptureResult * failedResult)16492 void QCamera3HardwareInterface::onFailedCaptureResult(pbcamera::CaptureResult *failedResult)
16493 {
16494     if (failedResult == nullptr) {
16495         ALOGE("%s: Got an empty failed result.", __FUNCTION__);
16496         return;
16497     }
16498 
16499     ALOGE("%s: Got a failed HDR+ result for request %d", __FUNCTION__, failedResult->requestId);
16500 
16501     // Find the pending HDR+ request.
16502     HdrPlusPendingRequest pendingRequest;
16503     {
16504         Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
16505         auto req = mHdrPlusPendingRequests.find(failedResult->requestId);
16506         if (req == mHdrPlusPendingRequests.end()) {
16507             ALOGE("%s: Couldn't find pending request %d", __FUNCTION__, failedResult->requestId);
16508             return;
16509         }
16510         pendingRequest = req->second;
16511     }
16512 
16513     for (auto &outputBuffer : failedResult->outputBuffers) {
16514         uint32_t streamId = outputBuffer.streamId;
16515 
16516         // Find the channel
16517         // Find the framework output buffer in the pending request.
16518         auto frameworkOutputBufferIter = pendingRequest.frameworkOutputBuffers.find(streamId);
16519         if (frameworkOutputBufferIter == pendingRequest.frameworkOutputBuffers.end()) {
16520             ALOGE("%s: Couldn't find framework output buffers for stream id %u", __FUNCTION__,
16521                     streamId);
16522             continue;
16523         }
16524 
16525         camera3_stream_buffer_t *frameworkOutputBuffer = &frameworkOutputBufferIter->second;
16526 
16527         // Find the channel for the output buffer.
16528         QCamera3ProcessingChannel *channel =
16529                 (QCamera3ProcessingChannel*)frameworkOutputBuffer->stream->priv;
16530 
16531         // Find the output buffer def.
16532         auto outputBufferIter = pendingRequest.outputBuffers.find(streamId);
16533         if (outputBufferIter == pendingRequest.outputBuffers.end()) {
16534             ALOGE("%s: Cannot find output buffer", __FUNCTION__);
16535             continue;
16536         }
16537 
16538         std::shared_ptr<mm_camera_buf_def_t> outputBufferDef = outputBufferIter->second;
16539 
16540         if (channel == mPictureChannel) {
16541             // Return the buffer to pic channel.
16542             mPictureChannel->returnYuvBuffer(outputBufferDef.get());
16543         } else {
16544             channel->unregisterBuffer(outputBufferDef.get());
16545         }
16546     }
16547 
16548     // Remove the HDR+ pending request.
16549     {
16550         Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
16551         auto req = mHdrPlusPendingRequests.find(failedResult->requestId);
16552         mHdrPlusPendingRequests.erase(req);
16553     }
16554 
16555     pthread_mutex_lock(&mMutex);
16556 
16557     // Find the pending buffers.
16558     auto pendingBuffers = mPendingBuffersMap.mPendingBuffersInRequest.begin();
16559     while (pendingBuffers != mPendingBuffersMap.mPendingBuffersInRequest.end()) {
16560         if (pendingBuffers->frame_number == failedResult->requestId) {
16561             break;
16562         }
16563         pendingBuffers++;
16564     }
16565 
16566     // Send out request errors for the pending buffers.
16567     if (pendingBuffers != mPendingBuffersMap.mPendingBuffersInRequest.end()) {
16568         std::vector<camera3_stream_buffer_t> streamBuffers;
16569         for (auto &buffer : pendingBuffers->mPendingBufferList) {
16570             // Prepare a stream buffer.
16571             camera3_stream_buffer_t streamBuffer = {};
16572             streamBuffer.stream = buffer.stream;
16573             streamBuffer.buffer = buffer.buffer;
16574             streamBuffer.status = CAMERA3_BUFFER_STATUS_ERROR;
16575             streamBuffer.acquire_fence = -1;
16576             streamBuffer.release_fence = -1;
16577 
16578             // Send out request error event.
16579             camera3_notify_msg_t notify_msg = {};
16580             notify_msg.type = CAMERA3_MSG_ERROR;
16581             notify_msg.message.error.frame_number = pendingBuffers->frame_number;
16582             notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_REQUEST;
16583             notify_msg.message.error.error_stream = buffer.stream;
16584 
16585             orchestrateNotify(&notify_msg);
16586             mOutputBufferDispatcher.markBufferReady(pendingBuffers->frame_number, streamBuffer);
16587         }
16588 
16589         mShutterDispatcher.clear(pendingBuffers->frame_number);
16590 
16591 
16592 
16593         // Remove pending buffers.
16594         mPendingBuffersMap.mPendingBuffersInRequest.erase(pendingBuffers);
16595     }
16596 
16597     // Remove pending request.
16598     auto halRequest = mPendingRequestsList.begin();
16599     while (halRequest != mPendingRequestsList.end()) {
16600         if (halRequest->frame_number == failedResult->requestId) {
16601             mPendingRequestsList.erase(halRequest);
16602             break;
16603         }
16604         halRequest++;
16605     }
16606 
16607     pthread_mutex_unlock(&mMutex);
16608 }
16609 
readSensorCalibration(int activeArrayWidth,float poseRotation[4],float poseTranslation[3],float cameraIntrinsics[5],float radialDistortion[5])16610 bool QCamera3HardwareInterface::readSensorCalibration(
16611         int activeArrayWidth,
16612         float poseRotation[4], float poseTranslation[3],
16613         float cameraIntrinsics[5], float radialDistortion[5]) {
16614 
16615     const char* calibrationPath = "/persist/sensors/calibration/calibration.xml";
16616 
16617     using namespace tinyxml2;
16618 
16619     XMLDocument calibrationXml;
16620     XMLError err = calibrationXml.LoadFile(calibrationPath);
16621     if (err != XML_SUCCESS) {
16622         ALOGE("Unable to load calibration file '%s'. Error: %s",
16623                 calibrationPath, XMLDocument::ErrorIDToName(err));
16624         return false;
16625     }
16626     XMLElement *rig = calibrationXml.FirstChildElement("rig");
16627     if (rig == nullptr) {
16628         ALOGE("No 'rig' in calibration file");
16629         return false;
16630     }
16631     XMLElement *cam = rig->FirstChildElement("camera");
16632     XMLElement *camModel = nullptr;
16633     while (cam != nullptr) {
16634         camModel = cam->FirstChildElement("camera_model");
16635         if (camModel == nullptr) {
16636             ALOGE("No 'camera_model' in calibration file");
16637             return false;
16638         }
16639         int modelIndex = camModel->IntAttribute("index", -1);
16640         // Model index "0" has the calibration we need
16641         if (modelIndex == 0) {
16642             break;
16643         }
16644         cam = cam->NextSiblingElement("camera");
16645     }
16646     if (cam == nullptr) {
16647         ALOGE("No 'camera' in calibration file");
16648         return false;
16649     }
16650     const char *modelType = camModel->Attribute("type");
16651     if (modelType == nullptr || strcmp(modelType,"calibu_fu_fv_u0_v0_k1_k2_k3")) {
16652         ALOGE("Camera model is unknown type %s",
16653                 modelType ? modelType : "NULL");
16654         return false;
16655     }
16656     XMLElement *modelWidth = camModel->FirstChildElement("width");
16657     if (modelWidth == nullptr || modelWidth->GetText() == nullptr) {
16658         ALOGE("No camera model width in calibration file");
16659         return false;
16660     }
16661     int width = atoi(modelWidth->GetText());
16662     XMLElement *modelHeight = camModel->FirstChildElement("height");
16663     if (modelHeight == nullptr || modelHeight->GetText() == nullptr) {
16664         ALOGE("No camera model height in calibration file");
16665         return false;
16666     }
16667     int height = atoi(modelHeight->GetText());
16668     if (width <= 0 || height <= 0) {
16669         ALOGE("Bad model width or height in calibration file: %d x %d", width, height);
16670         return false;
16671     }
16672     ALOGI("Width: %d, Height: %d", width, height);
16673 
16674     XMLElement *modelParams = camModel->FirstChildElement("params");
16675     if (modelParams == nullptr) {
16676         ALOGE("No camera model params in calibration file");
16677         return false;
16678     }
16679     const char* paramText = modelParams->GetText();
16680     if (paramText == nullptr) {
16681         ALOGE("No parameters in params element in calibration file");
16682         return false;
16683     }
16684     ALOGI("Parameters: %s", paramText);
16685 
16686     // Parameter string is of the form "[ float; float; float ...]"
16687     float params[7];
16688     bool success = parseStringArray(paramText, params, 7);
16689     if (!success) {
16690         ALOGE("Malformed camera parameter string in calibration file");
16691         return false;
16692     }
16693 
16694     XMLElement *extCalib = rig->FirstChildElement("extrinsic_calibration");
16695     while (extCalib != nullptr) {
16696         int id = extCalib->IntAttribute("frame_B_id", -1);
16697         if (id == 0) {
16698             break;
16699         }
16700         extCalib = extCalib->NextSiblingElement("extrinsic_calibration");
16701     }
16702     if (extCalib == nullptr) {
16703         ALOGE("No 'extrinsic_calibration' in calibration file");
16704         return false;
16705     }
16706 
16707     XMLElement *q = extCalib->FirstChildElement("A_q_B");
16708     if (q == nullptr || q->GetText() == nullptr) {
16709         ALOGE("No extrinsic quarternion in calibration file");
16710         return false;
16711     }
16712     float rotation[4];
16713     success = parseStringArray(q->GetText(), rotation, 4);
16714     if (!success) {
16715         ALOGE("Malformed extrinsic quarternion string in calibration file");
16716         return false;
16717     }
16718 
16719     XMLElement *p = extCalib->FirstChildElement("A_p_B");
16720     if (p == nullptr || p->GetText() == nullptr) {
16721         ALOGE("No extrinsic translation in calibration file");
16722         return false;
16723     }
16724     float position[3];
16725     success = parseStringArray(p->GetText(), position, 3);
16726     if (!success) {
16727         ALOGE("Malformed extrinsic position string in calibration file");
16728         return false;
16729     }
16730 
16731     // Map from width x height to active array
16732     float scaleFactor = static_cast<float>(activeArrayWidth) / width;
16733 
16734     cameraIntrinsics[0] = params[0] * scaleFactor; // fu -> f_x
16735     cameraIntrinsics[1] = params[1] * scaleFactor; // fv -> f_y
16736     cameraIntrinsics[2] = params[2] * scaleFactor; // u0 -> c_x
16737     cameraIntrinsics[3] = params[3] * scaleFactor; // v0 -> c_y
16738     cameraIntrinsics[4] = 0; // s = 0
16739 
16740     radialDistortion[0] = params[4]; // k1 -> k_1
16741     radialDistortion[1] = params[5]; // k2 -> k_2
16742     radialDistortion[2] = params[6]; // k3 -> k_3
16743     radialDistortion[3] = 0; // k_4 = 0
16744     radialDistortion[4] = 0; // k_5 = 0
16745 
16746     for (int i = 0; i < 4; i++) {
16747         poseRotation[i] = rotation[i];
16748     }
16749     for (int i = 0; i < 3; i++) {
16750         poseTranslation[i] = position[i];
16751     }
16752 
16753     ALOGI("Intrinsics: %f, %f, %f, %f, %f", cameraIntrinsics[0],
16754             cameraIntrinsics[1], cameraIntrinsics[2],
16755             cameraIntrinsics[3], cameraIntrinsics[4]);
16756     ALOGI("Distortion: %f, %f, %f, %f, %f",
16757             radialDistortion[0], radialDistortion[1], radialDistortion[2], radialDistortion[3],
16758             radialDistortion[4]);
16759     ALOGI("Pose rotation: %f, %f, %f, %f",
16760             poseRotation[0], poseRotation[1], poseRotation[2], poseRotation[3]);
16761     ALOGI("Pose translation: %f, %f, %f",
16762             poseTranslation[0], poseTranslation[1], poseTranslation[2]);
16763 
16764     return true;
16765 }
16766 
parseStringArray(const char * str,float * dest,int count)16767 bool QCamera3HardwareInterface::parseStringArray(const char *str, float *dest, int count) {
16768     size_t idx = 0;
16769     size_t len = strlen(str);
16770     for (; idx < len; idx++) {
16771         if (str[idx] == '[') break;
16772     }
16773     const char *startParam = str + idx + 1;
16774     if (startParam >= str + len) {
16775         ALOGE("Malformed array: %s", str);
16776         return false;
16777     }
16778     char *endParam = nullptr;
16779     for (int i = 0; i < count; i++) {
16780         dest[i] = strtod(startParam, &endParam);
16781         if (startParam == endParam) {
16782             ALOGE("Malformed array, index %d: %s", i, str);
16783             return false;
16784         }
16785         startParam = endParam + 1;
16786         if (startParam >= str + len) {
16787             ALOGE("Malformed array, index %d: %s", i, str);
16788             return false;
16789         }
16790     }
16791     return true;
16792 }
16793 
ShutterDispatcher(QCamera3HardwareInterface * parent)16794 ShutterDispatcher::ShutterDispatcher(QCamera3HardwareInterface *parent) :
16795         mParent(parent) {}
16796 
expectShutter(uint32_t frameNumber,bool isReprocess,bool isZsl)16797 void ShutterDispatcher::expectShutter(uint32_t frameNumber, bool isReprocess, bool isZsl)
16798 {
16799     std::lock_guard<std::mutex> lock(mLock);
16800 
16801     if (isReprocess) {
16802         mReprocessShutters.emplace(frameNumber, Shutter());
16803     } else if (isZsl) {
16804         mZslShutters.emplace(frameNumber, Shutter());
16805     } else {
16806         mShutters.emplace(frameNumber, Shutter());
16807     }
16808 }
16809 
markShutterReady(uint32_t frameNumber,uint64_t timestamp)16810 void ShutterDispatcher::markShutterReady(uint32_t frameNumber, uint64_t timestamp)
16811 {
16812     std::lock_guard<std::mutex> lock(mLock);
16813 
16814     std::map<uint32_t, Shutter> *shutters = nullptr;
16815 
16816     // Find the shutter entry.
16817     auto shutter = mShutters.find(frameNumber);
16818     if (shutter != mShutters.end()) {
16819         shutters = &mShutters;
16820     } else {
16821         shutter = mReprocessShutters.find(frameNumber);
16822         if (shutter != mReprocessShutters.end()) {
16823             shutters = &mReprocessShutters;
16824         } else {
16825             shutter = mZslShutters.find(frameNumber);
16826             if (shutter != mZslShutters.end()) {
16827                 shutters = &mZslShutters;
16828             } else {
16829                 // Shutter was already sent.
16830                 return;
16831             }
16832         }
16833     }
16834 
16835     if (shutter->second.ready) {
16836         // If shutter is already ready, don't update timestamp again.
16837         return;
16838     }
16839 
16840     // Make this frame's shutter ready.
16841     shutter->second.ready = true;
16842     shutter->second.timestamp = timestamp;
16843 
16844     // Iterate throught the shutters and send out shuters until the one that's not ready yet.
16845     shutter = shutters->begin();
16846     while (shutter != shutters->end()) {
16847         if (!shutter->second.ready) {
16848             // If this shutter is not ready, the following shutters can't be sent.
16849             break;
16850         }
16851 
16852         camera3_notify_msg_t msg = {};
16853         msg.type = CAMERA3_MSG_SHUTTER;
16854         msg.message.shutter.frame_number = shutter->first;
16855         msg.message.shutter.timestamp = shutter->second.timestamp;
16856         mParent->orchestrateNotify(&msg);
16857 
16858         shutter = shutters->erase(shutter);
16859     }
16860 }
16861 
clear(uint32_t frameNumber)16862 void ShutterDispatcher::clear(uint32_t frameNumber)
16863 {
16864     std::lock_guard<std::mutex> lock(mLock);
16865     mShutters.erase(frameNumber);
16866     mReprocessShutters.erase(frameNumber);
16867     mZslShutters.erase(frameNumber);
16868 }
16869 
clear()16870 void ShutterDispatcher::clear()
16871 {
16872     std::lock_guard<std::mutex> lock(mLock);
16873 
16874     // Log errors for stale shutters.
16875     for (auto &shutter : mShutters) {
16876         ALOGE("%s: stale shutter: frame number %u, ready %d, timestamp %" PRId64,
16877             __FUNCTION__, shutter.first, shutter.second.ready,
16878             shutter.second.timestamp);
16879     }
16880 
16881     // Log errors for stale reprocess shutters.
16882     for (auto &shutter : mReprocessShutters) {
16883         ALOGE("%s: stale reprocess shutter: frame number %u, ready %d, timestamp %" PRId64,
16884             __FUNCTION__, shutter.first, shutter.second.ready,
16885             shutter.second.timestamp);
16886     }
16887 
16888     // Log errors for stale ZSL shutters.
16889     for (auto &shutter : mZslShutters) {
16890         ALOGE("%s: stale zsl shutter: frame number %u, ready %d, timestamp %" PRId64,
16891             __FUNCTION__, shutter.first, shutter.second.ready,
16892             shutter.second.timestamp);
16893     }
16894 
16895     mShutters.clear();
16896     mReprocessShutters.clear();
16897     mZslShutters.clear();
16898 }
16899 
OutputBufferDispatcher(QCamera3HardwareInterface * parent)16900 OutputBufferDispatcher::OutputBufferDispatcher(QCamera3HardwareInterface *parent) :
16901         mParent(parent) {}
16902 
configureStreams(camera3_stream_configuration_t * streamList)16903 status_t OutputBufferDispatcher::configureStreams(camera3_stream_configuration_t *streamList)
16904 {
16905     std::lock_guard<std::mutex> lock(mLock);
16906     mStreamBuffers.clear();
16907     if (!streamList) {
16908         ALOGE("%s: streamList is nullptr.", __FUNCTION__);
16909         return -EINVAL;
16910     }
16911 
16912     // Create a "frame-number -> buffer" map for each stream.
16913     for (uint32_t i = 0; i < streamList->num_streams; i++) {
16914         mStreamBuffers.emplace(streamList->streams[i], std::map<uint32_t, Buffer>());
16915     }
16916 
16917     return OK;
16918 }
16919 
expectBuffer(uint32_t frameNumber,camera3_stream_t * stream)16920 status_t OutputBufferDispatcher::expectBuffer(uint32_t frameNumber, camera3_stream_t *stream)
16921 {
16922     std::lock_guard<std::mutex> lock(mLock);
16923 
16924     // Find the "frame-number -> buffer" map for the stream.
16925     auto buffers = mStreamBuffers.find(stream);
16926     if (buffers == mStreamBuffers.end()) {
16927         ALOGE("%s: Stream %p was not configured.", __FUNCTION__, stream);
16928         return -EINVAL;
16929     }
16930 
16931     // Create an unready buffer for this frame number.
16932     buffers->second.emplace(frameNumber, Buffer());
16933     return OK;
16934 }
16935 
markBufferReady(uint32_t frameNumber,const camera3_stream_buffer_t & buffer)16936 void OutputBufferDispatcher::markBufferReady(uint32_t frameNumber,
16937         const camera3_stream_buffer_t &buffer)
16938 {
16939     std::lock_guard<std::mutex> lock(mLock);
16940 
16941     // Find the frame number -> buffer map for the stream.
16942     auto buffers = mStreamBuffers.find(buffer.stream);
16943     if (buffers == mStreamBuffers.end()) {
16944         ALOGE("%s: Cannot find pending buffers for stream %p.", __FUNCTION__, buffer.stream);
16945         return;
16946     }
16947 
16948     // Find the unready buffer this frame number and mark it ready.
16949     auto pendingBuffer = buffers->second.find(frameNumber);
16950     if (pendingBuffer == buffers->second.end()) {
16951         ALOGE("%s: Cannot find the pending buffer for frame number %u.", __FUNCTION__, frameNumber);
16952         return;
16953     }
16954 
16955     pendingBuffer->second.ready = true;
16956     pendingBuffer->second.buffer = buffer;
16957 
16958     // Iterate through the buffers and send out buffers until the one that's not ready yet.
16959     pendingBuffer = buffers->second.begin();
16960     while (pendingBuffer != buffers->second.end()) {
16961         if (!pendingBuffer->second.ready) {
16962             // If this buffer is not ready, the following buffers can't be sent.
16963             break;
16964         }
16965 
16966         camera3_capture_result_t result = {};
16967         result.frame_number = pendingBuffer->first;
16968         result.num_output_buffers = 1;
16969         result.output_buffers = &pendingBuffer->second.buffer;
16970 
16971         // Send out result with buffer errors.
16972         mParent->orchestrateResult(&result);
16973 
16974         pendingBuffer = buffers->second.erase(pendingBuffer);
16975     }
16976 }
16977 
clear(bool clearConfiguredStreams)16978 void OutputBufferDispatcher::clear(bool clearConfiguredStreams)
16979 {
16980     std::lock_guard<std::mutex> lock(mLock);
16981 
16982     // Log errors for stale buffers.
16983     for (auto &buffers : mStreamBuffers) {
16984         for (auto &buffer : buffers.second) {
16985             ALOGE("%s: stale buffer: stream %p, frame number %u, ready %d",
16986                 __FUNCTION__, buffers.first, buffer.first, buffer.second.ready);
16987         }
16988         buffers.second.clear();
16989     }
16990 
16991     if (clearConfiguredStreams) {
16992         mStreamBuffers.clear();
16993     }
16994 }
16995 
16996 }; //end namespace qcamera
16997