1 /*
2 * Copyright (C) 2018 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17 #define LOG_TAG "ExtCamDev@3.4"
18 //#define LOG_NDEBUG 0
19 #include <log/log.h>
20
21 #include <algorithm>
22 #include <array>
23 #include <linux/videodev2.h>
24 #include "android-base/macros.h"
25 #include "CameraMetadata.h"
26 #include "../../3.2/default/include/convert.h"
27 #include "ExternalCameraDevice_3_4.h"
28
29 namespace android {
30 namespace hardware {
31 namespace camera {
32 namespace device {
33 namespace V3_4 {
34 namespace implementation {
35
36 namespace {
37 // Only support MJPEG for now as it seems to be the one supports higher fps
38 // Other formats to consider in the future:
39 // * V4L2_PIX_FMT_YVU420 (== YV12)
40 // * V4L2_PIX_FMT_YVYU (YVYU: can be converted to YV12 or other YUV420_888 formats)
41 const std::array<uint32_t, /*size*/ 2> kSupportedFourCCs{
42 {V4L2_PIX_FMT_MJPEG, V4L2_PIX_FMT_Z16}}; // double braces required in C++11
43
44 constexpr int MAX_RETRY = 5; // Allow retry v4l2 open failures a few times.
45 constexpr int OPEN_RETRY_SLEEP_US = 100000; // 100ms * MAX_RETRY = 0.5 seconds
46
47 } // anonymous namespace
48
ExternalCameraDevice(const std::string & cameraId,const ExternalCameraConfig & cfg)49 ExternalCameraDevice::ExternalCameraDevice(
50 const std::string& cameraId, const ExternalCameraConfig& cfg) :
51 mCameraId(cameraId),
52 mCfg(cfg) {}
53
~ExternalCameraDevice()54 ExternalCameraDevice::~ExternalCameraDevice() {}
55
isInitFailed()56 bool ExternalCameraDevice::isInitFailed() {
57 Mutex::Autolock _l(mLock);
58 return isInitFailedLocked();
59 }
60
isInitFailedLocked()61 bool ExternalCameraDevice::isInitFailedLocked() {
62 if (!mInitialized) {
63 status_t ret = initCameraCharacteristics();
64 if (ret != OK) {
65 ALOGE("%s: init camera characteristics failed: errorno %d", __FUNCTION__, ret);
66 mInitFailed = true;
67 }
68 mInitialized = true;
69 }
70 return mInitFailed;
71 }
72
getResourceCost(ICameraDevice::getResourceCost_cb _hidl_cb)73 Return<void> ExternalCameraDevice::getResourceCost(
74 ICameraDevice::getResourceCost_cb _hidl_cb) {
75 CameraResourceCost resCost;
76 resCost.resourceCost = 100;
77 _hidl_cb(Status::OK, resCost);
78 return Void();
79 }
80
getCameraCharacteristics(ICameraDevice::getCameraCharacteristics_cb _hidl_cb)81 Return<void> ExternalCameraDevice::getCameraCharacteristics(
82 ICameraDevice::getCameraCharacteristics_cb _hidl_cb) {
83 Mutex::Autolock _l(mLock);
84 V3_2::CameraMetadata hidlChars;
85
86 if (isInitFailedLocked()) {
87 _hidl_cb(Status::INTERNAL_ERROR, hidlChars);
88 return Void();
89 }
90
91 const camera_metadata_t* rawMetadata = mCameraCharacteristics.getAndLock();
92 V3_2::implementation::convertToHidl(rawMetadata, &hidlChars);
93 _hidl_cb(Status::OK, hidlChars);
94 mCameraCharacteristics.unlock(rawMetadata);
95 return Void();
96 }
97
setTorchMode(TorchMode)98 Return<Status> ExternalCameraDevice::setTorchMode(TorchMode) {
99 return Status::OPERATION_NOT_SUPPORTED;
100 }
101
open(const sp<ICameraDeviceCallback> & callback,ICameraDevice::open_cb _hidl_cb)102 Return<void> ExternalCameraDevice::open(
103 const sp<ICameraDeviceCallback>& callback, ICameraDevice::open_cb _hidl_cb) {
104 Status status = Status::OK;
105 sp<ExternalCameraDeviceSession> session = nullptr;
106
107 if (callback == nullptr) {
108 ALOGE("%s: cannot open camera %s. callback is null!",
109 __FUNCTION__, mCameraId.c_str());
110 _hidl_cb(Status::ILLEGAL_ARGUMENT, nullptr);
111 return Void();
112 }
113
114 if (isInitFailed()) {
115 ALOGE("%s: cannot open camera %s. camera init failed!",
116 __FUNCTION__, mCameraId.c_str());
117 _hidl_cb(Status::INTERNAL_ERROR, nullptr);
118 return Void();
119 }
120
121 mLock.lock();
122
123 ALOGV("%s: Initializing device for camera %s", __FUNCTION__, mCameraId.c_str());
124 session = mSession.promote();
125 if (session != nullptr && !session->isClosed()) {
126 ALOGE("%s: cannot open an already opened camera!", __FUNCTION__);
127 mLock.unlock();
128 _hidl_cb(Status::CAMERA_IN_USE, nullptr);
129 return Void();
130 }
131
132 unique_fd fd(::open(mCameraId.c_str(), O_RDWR));
133 if (fd.get() < 0) {
134 int numAttempt = 0;
135 do {
136 ALOGW("%s: v4l2 device %s open failed, wait 33ms and try again",
137 __FUNCTION__, mCameraId.c_str());
138 usleep(OPEN_RETRY_SLEEP_US); // sleep and try again
139 fd.reset(::open(mCameraId.c_str(), O_RDWR));
140 numAttempt++;
141 } while (fd.get() < 0 && numAttempt <= MAX_RETRY);
142
143 if (fd.get() < 0) {
144 ALOGE("%s: v4l2 device open %s failed: %s",
145 __FUNCTION__, mCameraId.c_str(), strerror(errno));
146 mLock.unlock();
147 _hidl_cb(Status::INTERNAL_ERROR, nullptr);
148 return Void();
149 }
150 }
151
152 session = createSession(
153 callback, mCfg, mSupportedFormats, mCroppingType,
154 mCameraCharacteristics, mCameraId, std::move(fd));
155 if (session == nullptr) {
156 ALOGE("%s: camera device session allocation failed", __FUNCTION__);
157 mLock.unlock();
158 _hidl_cb(Status::INTERNAL_ERROR, nullptr);
159 return Void();
160 }
161 if (session->isInitFailed()) {
162 ALOGE("%s: camera device session init failed", __FUNCTION__);
163 session = nullptr;
164 mLock.unlock();
165 _hidl_cb(Status::INTERNAL_ERROR, nullptr);
166 return Void();
167 }
168 mSession = session;
169
170 mLock.unlock();
171
172 _hidl_cb(status, session->getInterface());
173 return Void();
174 }
175
dumpState(const::android::hardware::hidl_handle & handle)176 Return<void> ExternalCameraDevice::dumpState(const ::android::hardware::hidl_handle& handle) {
177 Mutex::Autolock _l(mLock);
178 if (handle.getNativeHandle() == nullptr) {
179 ALOGE("%s: handle must not be null", __FUNCTION__);
180 return Void();
181 }
182 if (handle->numFds != 1 || handle->numInts != 0) {
183 ALOGE("%s: handle must contain 1 FD and 0 integers! Got %d FDs and %d ints",
184 __FUNCTION__, handle->numFds, handle->numInts);
185 return Void();
186 }
187 int fd = handle->data[0];
188 if (mSession == nullptr) {
189 dprintf(fd, "No active camera device session instance\n");
190 return Void();
191 }
192 auto session = mSession.promote();
193 if (session == nullptr) {
194 dprintf(fd, "No active camera device session instance\n");
195 return Void();
196 }
197 // Call into active session to dump states
198 session->dumpState(handle);
199 return Void();
200 }
201
202
initCameraCharacteristics()203 status_t ExternalCameraDevice::initCameraCharacteristics() {
204 if (mCameraCharacteristics.isEmpty()) {
205 // init camera characteristics
206 unique_fd fd(::open(mCameraId.c_str(), O_RDWR));
207 if (fd.get() < 0) {
208 ALOGE("%s: v4l2 device open %s failed", __FUNCTION__, mCameraId.c_str());
209 return DEAD_OBJECT;
210 }
211
212 status_t ret;
213 ret = initDefaultCharsKeys(&mCameraCharacteristics);
214 if (ret != OK) {
215 ALOGE("%s: init default characteristics key failed: errorno %d", __FUNCTION__, ret);
216 mCameraCharacteristics.clear();
217 return ret;
218 }
219
220 ret = initCameraControlsCharsKeys(fd.get(), &mCameraCharacteristics);
221 if (ret != OK) {
222 ALOGE("%s: init camera control characteristics key failed: errorno %d", __FUNCTION__, ret);
223 mCameraCharacteristics.clear();
224 return ret;
225 }
226
227 ret = initOutputCharsKeys(fd.get(), &mCameraCharacteristics);
228 if (ret != OK) {
229 ALOGE("%s: init output characteristics key failed: errorno %d", __FUNCTION__, ret);
230 mCameraCharacteristics.clear();
231 return ret;
232 }
233
234 ret = initAvailableCapabilities(&mCameraCharacteristics);
235 if (ret != OK) {
236 ALOGE("%s: init available capabilities key failed: errorno %d", __FUNCTION__, ret);
237 mCameraCharacteristics.clear();
238 return ret;
239 }
240 }
241 return OK;
242 }
243
244 #define ARRAY_SIZE(a) (sizeof(a) / sizeof(a[0]))
245 #define UPDATE(tag, data, size) \
246 do { \
247 if (metadata->update((tag), (data), (size))) { \
248 ALOGE("Update " #tag " failed!"); \
249 return -EINVAL; \
250 } \
251 } while (0)
252
initAvailableCapabilities(::android::hardware::camera::common::V1_0::helper::CameraMetadata * metadata)253 status_t ExternalCameraDevice::initAvailableCapabilities(
254 ::android::hardware::camera::common::V1_0::helper::CameraMetadata* metadata) {
255
256 if (mSupportedFormats.empty()) {
257 ALOGE("%s: Supported formats list is empty", __FUNCTION__);
258 return UNKNOWN_ERROR;
259 }
260
261 bool hasDepth = false;
262 bool hasColor = false;
263 for (const auto& fmt : mSupportedFormats) {
264 switch (fmt.fourcc) {
265 case V4L2_PIX_FMT_Z16: hasDepth = true; break;
266 case V4L2_PIX_FMT_MJPEG: hasColor = true; break;
267 default: ALOGW("%s: Unsupported format found", __FUNCTION__);
268 }
269 }
270
271 std::vector<uint8_t> availableCapabilities;
272 if (hasDepth) {
273 availableCapabilities.push_back(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_DEPTH_OUTPUT);
274 }
275 if (hasColor) {
276 availableCapabilities.push_back(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE);
277 }
278 if(!availableCapabilities.empty()) {
279 UPDATE(ANDROID_REQUEST_AVAILABLE_CAPABILITIES, availableCapabilities.data(),
280 availableCapabilities.size());
281 }
282
283 return OK;
284 }
285
initDefaultCharsKeys(::android::hardware::camera::common::V1_0::helper::CameraMetadata * metadata)286 status_t ExternalCameraDevice::initDefaultCharsKeys(
287 ::android::hardware::camera::common::V1_0::helper::CameraMetadata* metadata) {
288 const uint8_t hardware_level = ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_EXTERNAL;
289 UPDATE(ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL, &hardware_level, 1);
290
291 // android.colorCorrection
292 const uint8_t availableAberrationModes[] = {
293 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF};
294 UPDATE(ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
295 availableAberrationModes, ARRAY_SIZE(availableAberrationModes));
296
297 // android.control
298 const uint8_t antibandingMode =
299 ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO;
300 UPDATE(ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
301 &antibandingMode, 1);
302
303 const int32_t controlMaxRegions[] = {/*AE*/ 0, /*AWB*/ 0, /*AF*/ 0};
304 UPDATE(ANDROID_CONTROL_MAX_REGIONS, controlMaxRegions,
305 ARRAY_SIZE(controlMaxRegions));
306
307 const uint8_t videoStabilizationMode =
308 ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
309 UPDATE(ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
310 &videoStabilizationMode, 1);
311
312 const uint8_t awbAvailableMode = ANDROID_CONTROL_AWB_MODE_AUTO;
313 UPDATE(ANDROID_CONTROL_AWB_AVAILABLE_MODES, &awbAvailableMode, 1);
314
315 const uint8_t aeAvailableMode = ANDROID_CONTROL_AE_MODE_ON;
316 UPDATE(ANDROID_CONTROL_AE_AVAILABLE_MODES, &aeAvailableMode, 1);
317
318 const uint8_t availableFffect = ANDROID_CONTROL_EFFECT_MODE_OFF;
319 UPDATE(ANDROID_CONTROL_AVAILABLE_EFFECTS, &availableFffect, 1);
320
321 const uint8_t controlAvailableModes[] = {ANDROID_CONTROL_MODE_OFF,
322 ANDROID_CONTROL_MODE_AUTO};
323 UPDATE(ANDROID_CONTROL_AVAILABLE_MODES, controlAvailableModes,
324 ARRAY_SIZE(controlAvailableModes));
325
326 // android.edge
327 const uint8_t edgeMode = ANDROID_EDGE_MODE_OFF;
328 UPDATE(ANDROID_EDGE_AVAILABLE_EDGE_MODES, &edgeMode, 1);
329
330 // android.flash
331 const uint8_t flashInfo = ANDROID_FLASH_INFO_AVAILABLE_FALSE;
332 UPDATE(ANDROID_FLASH_INFO_AVAILABLE, &flashInfo, 1);
333
334 // android.hotPixel
335 const uint8_t hotPixelMode = ANDROID_HOT_PIXEL_MODE_OFF;
336 UPDATE(ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES, &hotPixelMode, 1);
337
338 // android.jpeg
339 const int32_t jpegAvailableThumbnailSizes[] = {0, 0,
340 176, 144,
341 240, 144,
342 256, 144,
343 240, 160,
344 256, 154,
345 240, 180};
346 UPDATE(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES, jpegAvailableThumbnailSizes,
347 ARRAY_SIZE(jpegAvailableThumbnailSizes));
348
349 const int32_t jpegMaxSize = mCfg.maxJpegBufSize;
350 UPDATE(ANDROID_JPEG_MAX_SIZE, &jpegMaxSize, 1);
351
352 // android.lens
353 const uint8_t focusDistanceCalibration =
354 ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_UNCALIBRATED;
355 UPDATE(ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION, &focusDistanceCalibration, 1);
356
357 const uint8_t opticalStabilizationMode =
358 ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
359 UPDATE(ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
360 &opticalStabilizationMode, 1);
361
362 const uint8_t facing = ANDROID_LENS_FACING_EXTERNAL;
363 UPDATE(ANDROID_LENS_FACING, &facing, 1);
364
365 // android.noiseReduction
366 const uint8_t noiseReductionMode = ANDROID_NOISE_REDUCTION_MODE_OFF;
367 UPDATE(ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
368 &noiseReductionMode, 1);
369 UPDATE(ANDROID_NOISE_REDUCTION_MODE, &noiseReductionMode, 1);
370
371 const int32_t partialResultCount = 1;
372 UPDATE(ANDROID_REQUEST_PARTIAL_RESULT_COUNT, &partialResultCount, 1);
373
374 // This means pipeline latency of X frame intervals. The maximum number is 4.
375 const uint8_t requestPipelineMaxDepth = 4;
376 UPDATE(ANDROID_REQUEST_PIPELINE_MAX_DEPTH, &requestPipelineMaxDepth, 1);
377
378 // Three numbers represent the maximum numbers of different types of output
379 // streams simultaneously. The types are raw sensor, processed (but not
380 // stalling), and processed (but stalling). For usb limited mode, raw sensor
381 // is not supported. Stalling stream is JPEG. Non-stalling streams are
382 // YUV_420_888 or YV12.
383 const int32_t requestMaxNumOutputStreams[] = {
384 /*RAW*/0,
385 /*Processed*/ExternalCameraDeviceSession::kMaxProcessedStream,
386 /*Stall*/ExternalCameraDeviceSession::kMaxStallStream};
387 UPDATE(ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS, requestMaxNumOutputStreams,
388 ARRAY_SIZE(requestMaxNumOutputStreams));
389
390 // Limited mode doesn't support reprocessing.
391 const int32_t requestMaxNumInputStreams = 0;
392 UPDATE(ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS, &requestMaxNumInputStreams,
393 1);
394
395 // android.scaler
396 // TODO: b/72263447 V4L2_CID_ZOOM_*
397 const float scalerAvailableMaxDigitalZoom[] = {1};
398 UPDATE(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
399 scalerAvailableMaxDigitalZoom,
400 ARRAY_SIZE(scalerAvailableMaxDigitalZoom));
401
402 const uint8_t croppingType = ANDROID_SCALER_CROPPING_TYPE_CENTER_ONLY;
403 UPDATE(ANDROID_SCALER_CROPPING_TYPE, &croppingType, 1);
404
405 const int32_t testPatternModes[] = {
406 ANDROID_SENSOR_TEST_PATTERN_MODE_OFF};
407 UPDATE(ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES, testPatternModes,
408 ARRAY_SIZE(testPatternModes));
409
410 const uint8_t timestampSource = ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_UNKNOWN;
411 UPDATE(ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE, ×tampSource, 1);
412
413 // Orientation is a bit odd for external camera, but consider it as the orientation
414 // between the external camera sensor (which is usually landscape) and the device's
415 // natural display orientation. For devices with natural landscape display (ex: tablet/TV), the
416 // orientation should be 0. For devices with natural portrait display (phone), the orientation
417 // should be 270.
418 const int32_t orientation = mCfg.orientation;
419 UPDATE(ANDROID_SENSOR_ORIENTATION, &orientation, 1);
420
421 // android.shading
422 const uint8_t availabeMode = ANDROID_SHADING_MODE_OFF;
423 UPDATE(ANDROID_SHADING_AVAILABLE_MODES, &availabeMode, 1);
424
425 // android.statistics
426 const uint8_t faceDetectMode = ANDROID_STATISTICS_FACE_DETECT_MODE_OFF;
427 UPDATE(ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES, &faceDetectMode,
428 1);
429
430 const int32_t maxFaceCount = 0;
431 UPDATE(ANDROID_STATISTICS_INFO_MAX_FACE_COUNT, &maxFaceCount, 1);
432
433 const uint8_t availableHotpixelMode =
434 ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
435 UPDATE(ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
436 &availableHotpixelMode, 1);
437
438 const uint8_t lensShadingMapMode =
439 ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF;
440 UPDATE(ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,
441 &lensShadingMapMode, 1);
442
443 // android.sync
444 const int32_t maxLatency = ANDROID_SYNC_MAX_LATENCY_UNKNOWN;
445 UPDATE(ANDROID_SYNC_MAX_LATENCY, &maxLatency, 1);
446
447 /* Other sensor/RAW realted keys:
448 * android.sensor.info.colorFilterArrangement -> no need if we don't do RAW
449 * android.sensor.info.physicalSize -> not available
450 * android.sensor.info.whiteLevel -> not available/not needed
451 * android.sensor.info.lensShadingApplied -> not needed
452 * android.sensor.info.preCorrectionActiveArraySize -> not available/not needed
453 * android.sensor.blackLevelPattern -> not available/not needed
454 */
455
456 const int32_t availableRequestKeys[] = {
457 ANDROID_COLOR_CORRECTION_ABERRATION_MODE,
458 ANDROID_CONTROL_AE_ANTIBANDING_MODE,
459 ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
460 ANDROID_CONTROL_AE_LOCK,
461 ANDROID_CONTROL_AE_MODE,
462 ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER,
463 ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
464 ANDROID_CONTROL_AF_MODE,
465 ANDROID_CONTROL_AF_TRIGGER,
466 ANDROID_CONTROL_AWB_LOCK,
467 ANDROID_CONTROL_AWB_MODE,
468 ANDROID_CONTROL_CAPTURE_INTENT,
469 ANDROID_CONTROL_EFFECT_MODE,
470 ANDROID_CONTROL_MODE,
471 ANDROID_CONTROL_SCENE_MODE,
472 ANDROID_CONTROL_VIDEO_STABILIZATION_MODE,
473 ANDROID_FLASH_MODE,
474 ANDROID_JPEG_ORIENTATION,
475 ANDROID_JPEG_QUALITY,
476 ANDROID_JPEG_THUMBNAIL_QUALITY,
477 ANDROID_JPEG_THUMBNAIL_SIZE,
478 ANDROID_LENS_OPTICAL_STABILIZATION_MODE,
479 ANDROID_NOISE_REDUCTION_MODE,
480 ANDROID_SCALER_CROP_REGION,
481 ANDROID_SENSOR_TEST_PATTERN_MODE,
482 ANDROID_STATISTICS_FACE_DETECT_MODE,
483 ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE};
484 UPDATE(ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS, availableRequestKeys,
485 ARRAY_SIZE(availableRequestKeys));
486
487 const int32_t availableResultKeys[] = {
488 ANDROID_COLOR_CORRECTION_ABERRATION_MODE,
489 ANDROID_CONTROL_AE_ANTIBANDING_MODE,
490 ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
491 ANDROID_CONTROL_AE_LOCK,
492 ANDROID_CONTROL_AE_MODE,
493 ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER,
494 ANDROID_CONTROL_AE_STATE,
495 ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
496 ANDROID_CONTROL_AF_MODE,
497 ANDROID_CONTROL_AF_STATE,
498 ANDROID_CONTROL_AF_TRIGGER,
499 ANDROID_CONTROL_AWB_LOCK,
500 ANDROID_CONTROL_AWB_MODE,
501 ANDROID_CONTROL_AWB_STATE,
502 ANDROID_CONTROL_CAPTURE_INTENT,
503 ANDROID_CONTROL_EFFECT_MODE,
504 ANDROID_CONTROL_MODE,
505 ANDROID_CONTROL_SCENE_MODE,
506 ANDROID_CONTROL_VIDEO_STABILIZATION_MODE,
507 ANDROID_FLASH_MODE,
508 ANDROID_FLASH_STATE,
509 ANDROID_JPEG_ORIENTATION,
510 ANDROID_JPEG_QUALITY,
511 ANDROID_JPEG_THUMBNAIL_QUALITY,
512 ANDROID_JPEG_THUMBNAIL_SIZE,
513 ANDROID_LENS_OPTICAL_STABILIZATION_MODE,
514 ANDROID_NOISE_REDUCTION_MODE,
515 ANDROID_REQUEST_PIPELINE_DEPTH,
516 ANDROID_SCALER_CROP_REGION,
517 ANDROID_SENSOR_TIMESTAMP,
518 ANDROID_STATISTICS_FACE_DETECT_MODE,
519 ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE,
520 ANDROID_STATISTICS_LENS_SHADING_MAP_MODE,
521 ANDROID_STATISTICS_SCENE_FLICKER};
522 UPDATE(ANDROID_REQUEST_AVAILABLE_RESULT_KEYS, availableResultKeys,
523 ARRAY_SIZE(availableResultKeys));
524
525 UPDATE(ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS,
526 AVAILABLE_CHARACTERISTICS_KEYS_3_4.data(),
527 AVAILABLE_CHARACTERISTICS_KEYS_3_4.size());
528
529 return OK;
530 }
531
initCameraControlsCharsKeys(int,::android::hardware::camera::common::V1_0::helper::CameraMetadata * metadata)532 status_t ExternalCameraDevice::initCameraControlsCharsKeys(int,
533 ::android::hardware::camera::common::V1_0::helper::CameraMetadata* metadata) {
534 /**
535 * android.sensor.info.sensitivityRange -> V4L2_CID_ISO_SENSITIVITY
536 * android.sensor.info.exposureTimeRange -> V4L2_CID_EXPOSURE_ABSOLUTE
537 * android.sensor.info.maxFrameDuration -> TBD
538 * android.lens.info.minimumFocusDistance -> V4L2_CID_FOCUS_ABSOLUTE
539 * android.lens.info.hyperfocalDistance
540 * android.lens.info.availableFocalLengths -> not available?
541 */
542
543 // android.control
544 // No AE compensation support for now.
545 // TODO: V4L2_CID_EXPOSURE_BIAS
546 const int32_t controlAeCompensationRange[] = {0, 0};
547 UPDATE(ANDROID_CONTROL_AE_COMPENSATION_RANGE, controlAeCompensationRange,
548 ARRAY_SIZE(controlAeCompensationRange));
549 const camera_metadata_rational_t controlAeCompensationStep[] = {{0, 1}};
550 UPDATE(ANDROID_CONTROL_AE_COMPENSATION_STEP, controlAeCompensationStep,
551 ARRAY_SIZE(controlAeCompensationStep));
552
553
554 // TODO: Check V4L2_CID_AUTO_FOCUS_*.
555 const uint8_t afAvailableModes[] = {ANDROID_CONTROL_AF_MODE_AUTO,
556 ANDROID_CONTROL_AF_MODE_OFF};
557 UPDATE(ANDROID_CONTROL_AF_AVAILABLE_MODES, afAvailableModes,
558 ARRAY_SIZE(afAvailableModes));
559
560 // TODO: V4L2_CID_SCENE_MODE
561 const uint8_t availableSceneMode = ANDROID_CONTROL_SCENE_MODE_DISABLED;
562 UPDATE(ANDROID_CONTROL_AVAILABLE_SCENE_MODES, &availableSceneMode, 1);
563
564 // TODO: V4L2_CID_3A_LOCK
565 const uint8_t aeLockAvailable = ANDROID_CONTROL_AE_LOCK_AVAILABLE_FALSE;
566 UPDATE(ANDROID_CONTROL_AE_LOCK_AVAILABLE, &aeLockAvailable, 1);
567 const uint8_t awbLockAvailable = ANDROID_CONTROL_AWB_LOCK_AVAILABLE_FALSE;
568 UPDATE(ANDROID_CONTROL_AWB_LOCK_AVAILABLE, &awbLockAvailable, 1);
569
570 // TODO: V4L2_CID_ZOOM_*
571 const float scalerAvailableMaxDigitalZoom[] = {1};
572 UPDATE(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
573 scalerAvailableMaxDigitalZoom,
574 ARRAY_SIZE(scalerAvailableMaxDigitalZoom));
575
576 return OK;
577 }
578
579 template <size_t SIZE>
initOutputCharskeysByFormat(::android::hardware::camera::common::V1_0::helper::CameraMetadata * metadata,uint32_t fourcc,const std::array<int,SIZE> & halFormats,int streamConfigTag,int streamConfiguration,int minFrameDuration,int stallDuration)580 status_t ExternalCameraDevice::initOutputCharskeysByFormat(
581 ::android::hardware::camera::common::V1_0::helper::CameraMetadata* metadata,
582 uint32_t fourcc, const std::array<int, SIZE>& halFormats,
583 int streamConfigTag, int streamConfiguration, int minFrameDuration, int stallDuration) {
584 if (mSupportedFormats.empty()) {
585 ALOGE("%s: Init supported format list failed", __FUNCTION__);
586 return UNKNOWN_ERROR;
587 }
588
589 std::vector<int32_t> streamConfigurations;
590 std::vector<int64_t> minFrameDurations;
591 std::vector<int64_t> stallDurations;
592
593 for (const auto& supportedFormat : mSupportedFormats) {
594 if (supportedFormat.fourcc != fourcc) {
595 // Skip 4CCs not meant for the halFormats
596 continue;
597 }
598 for (const auto& format : halFormats) {
599 streamConfigurations.push_back(format);
600 streamConfigurations.push_back(supportedFormat.width);
601 streamConfigurations.push_back(supportedFormat.height);
602 streamConfigurations.push_back(streamConfigTag);
603 }
604
605 int64_t minFrameDuration = std::numeric_limits<int64_t>::max();
606 for (const auto& fr : supportedFormat.frameRates) {
607 // 1000000000LL < (2^32 - 1) and
608 // fr.durationNumerator is uint32_t, so no overflow here
609 int64_t frameDuration = 1000000000LL * fr.durationNumerator /
610 fr.durationDenominator;
611 if (frameDuration < minFrameDuration) {
612 minFrameDuration = frameDuration;
613 }
614 }
615
616 for (const auto& format : halFormats) {
617 minFrameDurations.push_back(format);
618 minFrameDurations.push_back(supportedFormat.width);
619 minFrameDurations.push_back(supportedFormat.height);
620 minFrameDurations.push_back(minFrameDuration);
621 }
622
623 // The stall duration is 0 for non-jpeg formats. For JPEG format, stall
624 // duration can be 0 if JPEG is small. Here we choose 1 sec for JPEG.
625 // TODO: b/72261675. Maybe set this dynamically
626 for (const auto& format : halFormats) {
627 const int64_t NS_TO_SECOND = 1000000000;
628 int64_t stall_duration =
629 (format == HAL_PIXEL_FORMAT_BLOB) ? NS_TO_SECOND : 0;
630 stallDurations.push_back(format);
631 stallDurations.push_back(supportedFormat.width);
632 stallDurations.push_back(supportedFormat.height);
633 stallDurations.push_back(stall_duration);
634 }
635 }
636
637 UPDATE(streamConfiguration, streamConfigurations.data(), streamConfigurations.size());
638
639 UPDATE(minFrameDuration, minFrameDurations.data(), minFrameDurations.size());
640
641 UPDATE(stallDuration, stallDurations.data(), stallDurations.size());
642
643 return true;
644 }
645
calculateMinFps(::android::hardware::camera::common::V1_0::helper::CameraMetadata * metadata)646 bool ExternalCameraDevice::calculateMinFps(
647 ::android::hardware::camera::common::V1_0::helper::CameraMetadata* metadata) {
648 std::set<int32_t> framerates;
649 int32_t minFps = std::numeric_limits<int32_t>::max();
650
651 for (const auto& supportedFormat : mSupportedFormats) {
652 for (const auto& fr : supportedFormat.frameRates) {
653 int32_t frameRateInt = static_cast<int32_t>(fr.getDouble());
654 if (minFps > frameRateInt) {
655 minFps = frameRateInt;
656 }
657 framerates.insert(frameRateInt);
658 }
659 }
660
661 std::vector<int32_t> fpsRanges;
662 // FPS ranges
663 for (const auto& framerate : framerates) {
664 // Empirical: webcams often have close to 2x fps error and cannot support fixed fps range
665 fpsRanges.push_back(framerate / 2);
666 fpsRanges.push_back(framerate);
667 }
668 minFps /= 2;
669 int64_t maxFrameDuration = 1000000000LL / minFps;
670
671 UPDATE(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES, fpsRanges.data(),
672 fpsRanges.size());
673
674 UPDATE(ANDROID_SENSOR_INFO_MAX_FRAME_DURATION, &maxFrameDuration, 1);
675
676 return true;
677 }
678
initOutputCharsKeys(int fd,::android::hardware::camera::common::V1_0::helper::CameraMetadata * metadata)679 status_t ExternalCameraDevice::initOutputCharsKeys(
680 int fd, ::android::hardware::camera::common::V1_0::helper::CameraMetadata* metadata) {
681 initSupportedFormatsLocked(fd);
682 if (mSupportedFormats.empty()) {
683 ALOGE("%s: Init supported format list failed", __FUNCTION__);
684 return UNKNOWN_ERROR;
685 }
686
687 bool hasDepth = false;
688 bool hasColor = false;
689
690 // For V4L2_PIX_FMT_Z16
691 std::array<int, /*size*/ 1> halDepthFormats{{HAL_PIXEL_FORMAT_Y16}};
692 // For V4L2_PIX_FMT_MJPEG
693 std::array<int, /*size*/ 3> halFormats{{HAL_PIXEL_FORMAT_BLOB, HAL_PIXEL_FORMAT_YCbCr_420_888,
694 HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED}};
695
696 for (const auto& supportedFormat : mSupportedFormats) {
697 switch (supportedFormat.fourcc) {
698 case V4L2_PIX_FMT_Z16:
699 hasDepth = true;
700 break;
701 case V4L2_PIX_FMT_MJPEG:
702 hasColor = true;
703 break;
704 default:
705 ALOGW("%s: format %c%c%c%c is not supported!", __FUNCTION__,
706 supportedFormat.fourcc & 0xFF, (supportedFormat.fourcc >> 8) & 0xFF,
707 (supportedFormat.fourcc >> 16) & 0xFF, (supportedFormat.fourcc >> 24) & 0xFF);
708 }
709 }
710
711 if (hasDepth) {
712 initOutputCharskeysByFormat(metadata, V4L2_PIX_FMT_Z16, halDepthFormats,
713 ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS_OUTPUT,
714 ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS,
715 ANDROID_DEPTH_AVAILABLE_DEPTH_MIN_FRAME_DURATIONS,
716 ANDROID_DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS);
717 }
718 if (hasColor) {
719 initOutputCharskeysByFormat(metadata, V4L2_PIX_FMT_MJPEG, halFormats,
720 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT,
721 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
722 ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS,
723 ANDROID_SCALER_AVAILABLE_STALL_DURATIONS);
724 }
725
726 calculateMinFps(metadata);
727
728 SupportedV4L2Format maximumFormat {.width = 0, .height = 0};
729 for (const auto& supportedFormat : mSupportedFormats) {
730 if (supportedFormat.width >= maximumFormat.width &&
731 supportedFormat.height >= maximumFormat.height) {
732 maximumFormat = supportedFormat;
733 }
734 }
735 int32_t activeArraySize[] = {0, 0,
736 static_cast<int32_t>(maximumFormat.width),
737 static_cast<int32_t>(maximumFormat.height)};
738 UPDATE(ANDROID_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE,
739 activeArraySize, ARRAY_SIZE(activeArraySize));
740 UPDATE(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE, activeArraySize,
741 ARRAY_SIZE(activeArraySize));
742
743 int32_t pixelArraySize[] = {static_cast<int32_t>(maximumFormat.width),
744 static_cast<int32_t>(maximumFormat.height)};
745 UPDATE(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE, pixelArraySize,
746 ARRAY_SIZE(pixelArraySize));
747 return OK;
748 }
749
750 #undef ARRAY_SIZE
751 #undef UPDATE
752
getFrameRateList(int fd,double fpsUpperBound,SupportedV4L2Format * format)753 void ExternalCameraDevice::getFrameRateList(
754 int fd, double fpsUpperBound, SupportedV4L2Format* format) {
755 format->frameRates.clear();
756
757 v4l2_frmivalenum frameInterval{
758 .index = 0,
759 .pixel_format = format->fourcc,
760 .width = format->width,
761 .height = format->height,
762 };
763
764 for (frameInterval.index = 0;
765 TEMP_FAILURE_RETRY(ioctl(fd, VIDIOC_ENUM_FRAMEINTERVALS, &frameInterval)) == 0;
766 ++frameInterval.index) {
767 if (frameInterval.type == V4L2_FRMIVAL_TYPE_DISCRETE) {
768 if (frameInterval.discrete.numerator != 0) {
769 SupportedV4L2Format::FrameRate fr = {
770 frameInterval.discrete.numerator,
771 frameInterval.discrete.denominator};
772 double framerate = fr.getDouble();
773 if (framerate > fpsUpperBound) {
774 continue;
775 }
776 ALOGV("index:%d, format:%c%c%c%c, w %d, h %d, framerate %f",
777 frameInterval.index,
778 frameInterval.pixel_format & 0xFF,
779 (frameInterval.pixel_format >> 8) & 0xFF,
780 (frameInterval.pixel_format >> 16) & 0xFF,
781 (frameInterval.pixel_format >> 24) & 0xFF,
782 frameInterval.width, frameInterval.height, framerate);
783 format->frameRates.push_back(fr);
784 }
785 }
786 }
787
788 if (format->frameRates.empty()) {
789 ALOGE("%s: failed to get supported frame rates for format:%c%c%c%c w %d h %d",
790 __FUNCTION__,
791 frameInterval.pixel_format & 0xFF,
792 (frameInterval.pixel_format >> 8) & 0xFF,
793 (frameInterval.pixel_format >> 16) & 0xFF,
794 (frameInterval.pixel_format >> 24) & 0xFF,
795 frameInterval.width, frameInterval.height);
796 }
797 }
798
trimSupportedFormats(CroppingType cropType,std::vector<SupportedV4L2Format> * pFmts)799 void ExternalCameraDevice::trimSupportedFormats(
800 CroppingType cropType,
801 /*inout*/std::vector<SupportedV4L2Format>* pFmts) {
802 std::vector<SupportedV4L2Format>& sortedFmts = *pFmts;
803 if (cropType == VERTICAL) {
804 std::sort(sortedFmts.begin(), sortedFmts.end(),
805 [](const SupportedV4L2Format& a, const SupportedV4L2Format& b) -> bool {
806 if (a.width == b.width) {
807 return a.height < b.height;
808 }
809 return a.width < b.width;
810 });
811 } else {
812 std::sort(sortedFmts.begin(), sortedFmts.end(),
813 [](const SupportedV4L2Format& a, const SupportedV4L2Format& b) -> bool {
814 if (a.height == b.height) {
815 return a.width < b.width;
816 }
817 return a.height < b.height;
818 });
819 }
820
821 if (sortedFmts.size() == 0) {
822 ALOGE("%s: input format list is empty!", __FUNCTION__);
823 return;
824 }
825
826 const auto& maxSize = sortedFmts[sortedFmts.size() - 1];
827 float maxSizeAr = ASPECT_RATIO(maxSize);
828
829 // Remove formats that has aspect ratio not croppable from largest size
830 std::vector<SupportedV4L2Format> out;
831 for (const auto& fmt : sortedFmts) {
832 float ar = ASPECT_RATIO(fmt);
833 if (isAspectRatioClose(ar, maxSizeAr)) {
834 out.push_back(fmt);
835 } else if (cropType == HORIZONTAL && ar < maxSizeAr) {
836 out.push_back(fmt);
837 } else if (cropType == VERTICAL && ar > maxSizeAr) {
838 out.push_back(fmt);
839 } else {
840 ALOGV("%s: size (%d,%d) is removed due to unable to crop %s from (%d,%d)",
841 __FUNCTION__, fmt.width, fmt.height,
842 cropType == VERTICAL ? "vertically" : "horizontally",
843 maxSize.width, maxSize.height);
844 }
845 }
846 sortedFmts = out;
847 }
848
getCandidateSupportedFormatsLocked(int fd,CroppingType cropType,const std::vector<ExternalCameraConfig::FpsLimitation> & fpsLimits,const std::vector<ExternalCameraConfig::FpsLimitation> & depthFpsLimits,const Size & minStreamSize,bool depthEnabled)849 std::vector<SupportedV4L2Format> ExternalCameraDevice::getCandidateSupportedFormatsLocked(
850 int fd, CroppingType cropType,
851 const std::vector<ExternalCameraConfig::FpsLimitation>& fpsLimits,
852 const std::vector<ExternalCameraConfig::FpsLimitation>& depthFpsLimits,
853 const Size& minStreamSize,
854 bool depthEnabled) {
855 std::vector<SupportedV4L2Format> outFmts;
856 struct v4l2_fmtdesc fmtdesc {
857 .index = 0,
858 .type = V4L2_BUF_TYPE_VIDEO_CAPTURE};
859 int ret = 0;
860 while (ret == 0) {
861 ret = TEMP_FAILURE_RETRY(ioctl(fd, VIDIOC_ENUM_FMT, &fmtdesc));
862 ALOGV("index:%d,ret:%d, format:%c%c%c%c", fmtdesc.index, ret,
863 fmtdesc.pixelformat & 0xFF,
864 (fmtdesc.pixelformat >> 8) & 0xFF,
865 (fmtdesc.pixelformat >> 16) & 0xFF,
866 (fmtdesc.pixelformat >> 24) & 0xFF);
867 if (ret == 0 && !(fmtdesc.flags & V4L2_FMT_FLAG_EMULATED)) {
868 auto it = std::find (
869 kSupportedFourCCs.begin(), kSupportedFourCCs.end(), fmtdesc.pixelformat);
870 if (it != kSupportedFourCCs.end()) {
871 // Found supported format
872 v4l2_frmsizeenum frameSize {
873 .index = 0,
874 .pixel_format = fmtdesc.pixelformat};
875 for (; TEMP_FAILURE_RETRY(ioctl(fd, VIDIOC_ENUM_FRAMESIZES, &frameSize)) == 0;
876 ++frameSize.index) {
877 if (frameSize.type == V4L2_FRMSIZE_TYPE_DISCRETE) {
878 ALOGV("index:%d, format:%c%c%c%c, w %d, h %d", frameSize.index,
879 fmtdesc.pixelformat & 0xFF,
880 (fmtdesc.pixelformat >> 8) & 0xFF,
881 (fmtdesc.pixelformat >> 16) & 0xFF,
882 (fmtdesc.pixelformat >> 24) & 0xFF,
883 frameSize.discrete.width, frameSize.discrete.height);
884 // Disregard h > w formats so all aspect ratio (h/w) <= 1.0
885 // This will simplify the crop/scaling logic down the road
886 if (frameSize.discrete.height > frameSize.discrete.width) {
887 continue;
888 }
889 // Discard all formats which is smaller than minStreamSize
890 if (frameSize.discrete.width < minStreamSize.width
891 || frameSize.discrete.height < minStreamSize.height) {
892 continue;
893 }
894 SupportedV4L2Format format {
895 .width = frameSize.discrete.width,
896 .height = frameSize.discrete.height,
897 .fourcc = fmtdesc.pixelformat
898 };
899
900 if (format.fourcc == V4L2_PIX_FMT_Z16 && depthEnabled) {
901 updateFpsBounds(fd, cropType, depthFpsLimits, format, outFmts);
902 } else {
903 updateFpsBounds(fd, cropType, fpsLimits, format, outFmts);
904 }
905 }
906 }
907 }
908 }
909 fmtdesc.index++;
910 }
911 trimSupportedFormats(cropType, &outFmts);
912 return outFmts;
913 }
914
updateFpsBounds(int fd,CroppingType cropType,const std::vector<ExternalCameraConfig::FpsLimitation> & fpsLimits,SupportedV4L2Format format,std::vector<SupportedV4L2Format> & outFmts)915 void ExternalCameraDevice::updateFpsBounds(
916 int fd, CroppingType cropType,
917 const std::vector<ExternalCameraConfig::FpsLimitation>& fpsLimits, SupportedV4L2Format format,
918 std::vector<SupportedV4L2Format>& outFmts) {
919 double fpsUpperBound = -1.0;
920 for (const auto& limit : fpsLimits) {
921 if (cropType == VERTICAL) {
922 if (format.width <= limit.size.width) {
923 fpsUpperBound = limit.fpsUpperBound;
924 break;
925 }
926 } else { // HORIZONTAL
927 if (format.height <= limit.size.height) {
928 fpsUpperBound = limit.fpsUpperBound;
929 break;
930 }
931 }
932 }
933 if (fpsUpperBound < 0.f) {
934 return;
935 }
936
937 getFrameRateList(fd, fpsUpperBound, &format);
938 if (!format.frameRates.empty()) {
939 outFmts.push_back(format);
940 }
941 }
942
initSupportedFormatsLocked(int fd)943 void ExternalCameraDevice::initSupportedFormatsLocked(int fd) {
944 std::vector<SupportedV4L2Format> horizontalFmts = getCandidateSupportedFormatsLocked(
945 fd, HORIZONTAL, mCfg.fpsLimits, mCfg.depthFpsLimits, mCfg.minStreamSize, mCfg.depthEnabled);
946 std::vector<SupportedV4L2Format> verticalFmts = getCandidateSupportedFormatsLocked(
947 fd, VERTICAL, mCfg.fpsLimits, mCfg.depthFpsLimits, mCfg.minStreamSize, mCfg.depthEnabled);
948
949 size_t horiSize = horizontalFmts.size();
950 size_t vertSize = verticalFmts.size();
951
952 if (horiSize == 0 && vertSize == 0) {
953 ALOGE("%s: cannot find suitable cropping type!", __FUNCTION__);
954 return;
955 }
956
957 if (horiSize == 0) {
958 mSupportedFormats = verticalFmts;
959 mCroppingType = VERTICAL;
960 return;
961 } else if (vertSize == 0) {
962 mSupportedFormats = horizontalFmts;
963 mCroppingType = HORIZONTAL;
964 return;
965 }
966
967 const auto& maxHoriSize = horizontalFmts[horizontalFmts.size() - 1];
968 const auto& maxVertSize = verticalFmts[verticalFmts.size() - 1];
969
970 // Try to keep largest possible output size
971 // When they are the same or ambiguous, pick the one support more sizes
972 if (maxHoriSize.width == maxVertSize.width &&
973 maxHoriSize.height == maxVertSize.height) {
974 if (horiSize > vertSize) {
975 mSupportedFormats = horizontalFmts;
976 mCroppingType = HORIZONTAL;
977 } else {
978 mSupportedFormats = verticalFmts;
979 mCroppingType = VERTICAL;
980 }
981 } else if (maxHoriSize.width >= maxVertSize.width &&
982 maxHoriSize.height >= maxVertSize.height) {
983 mSupportedFormats = horizontalFmts;
984 mCroppingType = HORIZONTAL;
985 } else if (maxHoriSize.width <= maxVertSize.width &&
986 maxHoriSize.height <= maxVertSize.height) {
987 mSupportedFormats = verticalFmts;
988 mCroppingType = VERTICAL;
989 } else {
990 if (horiSize > vertSize) {
991 mSupportedFormats = horizontalFmts;
992 mCroppingType = HORIZONTAL;
993 } else {
994 mSupportedFormats = verticalFmts;
995 mCroppingType = VERTICAL;
996 }
997 }
998 }
999
createSession(const sp<ICameraDeviceCallback> & cb,const ExternalCameraConfig & cfg,const std::vector<SupportedV4L2Format> & sortedFormats,const CroppingType & croppingType,const common::V1_0::helper::CameraMetadata & chars,const std::string & cameraId,unique_fd v4l2Fd)1000 sp<ExternalCameraDeviceSession> ExternalCameraDevice::createSession(
1001 const sp<ICameraDeviceCallback>& cb,
1002 const ExternalCameraConfig& cfg,
1003 const std::vector<SupportedV4L2Format>& sortedFormats,
1004 const CroppingType& croppingType,
1005 const common::V1_0::helper::CameraMetadata& chars,
1006 const std::string& cameraId,
1007 unique_fd v4l2Fd) {
1008 return new ExternalCameraDeviceSession(
1009 cb, cfg, sortedFormats, croppingType, chars, cameraId, std::move(v4l2Fd));
1010 }
1011
1012 } // namespace implementation
1013 } // namespace V3_4
1014 } // namespace device
1015 } // namespace camera
1016 } // namespace hardware
1017 } // namespace android
1018
1019