1 /*
2 * Copyright (C) 2013 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17 /*
18 * Contains implementation of a class EmulatedFakeCamera3 that encapsulates
19 * functionality of an advanced fake camera.
20 */
21
22 #include <cstdint>
23 #include <inttypes.h>
24
25 //#define LOG_NDEBUG 0
26 //#define LOG_NNDEBUG 0
27 #define LOG_TAG "EmulatedCamera_FakeCamera3"
28 #include <cutils/properties.h>
29 #include <utils/Log.h>
30
31 #include <ui/Fence.h>
32 #include "EmulatedCameraFactory.h"
33 #include "EmulatedFakeCamera3.h"
34 #include "GrallocModule.h"
35
36 #include <cmath>
37 #include "fake-pipeline2/JpegCompressor.h"
38 #include "fake-pipeline2/Sensor.h"
39
40 #include <vector>
41
42 #if defined(LOG_NNDEBUG) && LOG_NNDEBUG == 0
43 #define ALOGVV ALOGV
44 #else
45 #define ALOGVV(...) ((void)0)
46 #endif
47
48 namespace android {
49
50 /**
51 * Constants for camera capabilities
52 */
53
54 const int64_t USEC = 1000LL;
55 const int64_t MSEC = USEC * 1000LL;
56 // const int64_t SEC = MSEC * 1000LL;
57
58 const int32_t EmulatedFakeCamera3::kAvailableFormats[] = {
59 HAL_PIXEL_FORMAT_RAW16, HAL_PIXEL_FORMAT_BLOB, HAL_PIXEL_FORMAT_RGBA_8888,
60 HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED,
61 // These are handled by YCbCr_420_888
62 // HAL_PIXEL_FORMAT_YV12,
63 // HAL_PIXEL_FORMAT_YCrCb_420_SP,
64 HAL_PIXEL_FORMAT_YCbCr_420_888, HAL_PIXEL_FORMAT_Y16};
65
66 /**
67 * 3A constants
68 */
69
70 // Default exposure and gain targets for different scenarios
71 const nsecs_t EmulatedFakeCamera3::kNormalExposureTime = 10 * MSEC;
72 const nsecs_t EmulatedFakeCamera3::kFacePriorityExposureTime = 30 * MSEC;
73 const int EmulatedFakeCamera3::kNormalSensitivity = 100;
74 const int EmulatedFakeCamera3::kFacePrioritySensitivity = 400;
75 const float EmulatedFakeCamera3::kExposureTrackRate = 0.1;
76 const int EmulatedFakeCamera3::kPrecaptureMinFrames = 10;
77 const int EmulatedFakeCamera3::kStableAeMaxFrames = 100;
78 const float EmulatedFakeCamera3::kExposureWanderMin = -2;
79 const float EmulatedFakeCamera3::kExposureWanderMax = 1;
80
81 /**
82 * Camera device lifecycle methods
83 */
84
EmulatedFakeCamera3(int cameraId,bool facingBack,struct hw_module_t * module)85 EmulatedFakeCamera3::EmulatedFakeCamera3(int cameraId, bool facingBack,
86 struct hw_module_t *module)
87 : EmulatedCamera3(cameraId, module), mFacingBack(facingBack) {
88 ALOGI("Constructing emulated fake camera 3: ID %d, facing %s", mCameraID,
89 facingBack ? "back" : "front");
90
91 for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++) {
92 mDefaultTemplates[i] = NULL;
93 }
94 }
95
~EmulatedFakeCamera3()96 EmulatedFakeCamera3::~EmulatedFakeCamera3() {
97 for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++) {
98 if (mDefaultTemplates[i] != NULL) {
99 free_camera_metadata(mDefaultTemplates[i]);
100 }
101 }
102 }
103
Initialize(const cuttlefish::CameraDefinition & params)104 status_t EmulatedFakeCamera3::Initialize(const cuttlefish::CameraDefinition ¶ms) {
105 ALOGV("%s: E", __FUNCTION__);
106 status_t res;
107
108 if (mStatus != STATUS_ERROR) {
109 ALOGE("%s: Already initialized!", __FUNCTION__);
110 return INVALID_OPERATION;
111 }
112
113 res = getCameraCapabilities();
114 if (res != OK) {
115 ALOGE("%s: Unable to get camera capabilities: %s (%d)", __FUNCTION__,
116 strerror(-res), res);
117 return res;
118 }
119
120 res = constructStaticInfo(params);
121 if (res != OK) {
122 ALOGE("%s: Unable to allocate static info: %s (%d)", __FUNCTION__,
123 strerror(-res), res);
124 return res;
125 }
126
127 return EmulatedCamera3::Initialize(params);
128 }
129
connectCamera(hw_device_t ** device)130 status_t EmulatedFakeCamera3::connectCamera(hw_device_t **device) {
131 ALOGV("%s: E", __FUNCTION__);
132 Mutex::Autolock l(mLock);
133 status_t res;
134
135 if (mStatus != STATUS_CLOSED) {
136 ALOGE("%s: Can't connect in state %d", __FUNCTION__, mStatus);
137 return INVALID_OPERATION;
138 }
139
140 mSensor = new Sensor(mSensorWidth, mSensorHeight);
141 mSensor->setSensorListener(this);
142
143 res = mSensor->startUp();
144 if (res != NO_ERROR) return res;
145
146 mReadoutThread = new ReadoutThread(this);
147 mJpegCompressor = new JpegCompressor();
148
149 res = mReadoutThread->run("EmuCam3::readoutThread");
150 if (res != NO_ERROR) return res;
151
152 // Initialize fake 3A
153
154 mControlMode = ANDROID_CONTROL_MODE_AUTO;
155 mFacePriority = false;
156 mAeMode = ANDROID_CONTROL_AE_MODE_ON;
157 mAfMode = ANDROID_CONTROL_AF_MODE_AUTO;
158 mAwbMode = ANDROID_CONTROL_AWB_MODE_AUTO;
159 mAeState = ANDROID_CONTROL_AE_STATE_INACTIVE;
160 mAfState = ANDROID_CONTROL_AF_STATE_INACTIVE;
161 mAwbState = ANDROID_CONTROL_AWB_STATE_INACTIVE;
162 mAeCounter = 0;
163 mAeTargetExposureTime = kNormalExposureTime;
164 mAeCurrentExposureTime = kNormalExposureTime;
165 mAeCurrentSensitivity = kNormalSensitivity;
166
167 return EmulatedCamera3::connectCamera(device);
168 }
169
closeCamera()170 status_t EmulatedFakeCamera3::closeCamera() {
171 ALOGV("%s: E", __FUNCTION__);
172 status_t res;
173 {
174 Mutex::Autolock l(mLock);
175 if (mStatus == STATUS_CLOSED) return OK;
176
177 res = mSensor->shutDown();
178 if (res != NO_ERROR) {
179 ALOGE("%s: Unable to shut down sensor: %d", __FUNCTION__, res);
180 return res;
181 }
182 mSensor.clear();
183
184 mReadoutThread->requestExit();
185 }
186
187 mReadoutThread->join();
188
189 {
190 Mutex::Autolock l(mLock);
191 // Clear out private stream information
192 for (StreamIterator s = mStreams.begin(); s != mStreams.end(); s++) {
193 PrivateStreamInfo *privStream =
194 static_cast<PrivateStreamInfo *>((*s)->priv);
195 delete privStream;
196 (*s)->priv = NULL;
197 }
198 mStreams.clear();
199 mReadoutThread.clear();
200 }
201
202 return EmulatedCamera3::closeCamera();
203 }
204
getCameraInfo(struct camera_info * info)205 status_t EmulatedFakeCamera3::getCameraInfo(struct camera_info *info) {
206 info->facing = mFacingBack ? CAMERA_FACING_BACK : CAMERA_FACING_FRONT;
207 info->orientation =
208 EmulatedCameraFactory::Instance().getFakeCameraOrientation();
209 info->resource_cost = 100;
210 info->conflicting_devices = NULL;
211 info->conflicting_devices_length = 0;
212 return EmulatedCamera3::getCameraInfo(info);
213 }
214
setTorchMode(bool enabled)215 status_t EmulatedFakeCamera3::setTorchMode(bool enabled) {
216 if (!mFacingBack) {
217 ALOGE("%s: Front camera does not have flash unit", __FUNCTION__);
218 return INVALID_OPERATION;
219 }
220 EmulatedCameraFactory::Instance().onTorchModeStatusChanged(
221 mCameraID, enabled ? TORCH_MODE_STATUS_AVAILABLE_ON
222 : TORCH_MODE_STATUS_AVAILABLE_OFF);
223 return NO_ERROR;
224 }
225
226 /**
227 * Camera3 interface methods
228 */
229
configureStreams(camera3_stream_configuration * streamList)230 status_t EmulatedFakeCamera3::configureStreams(
231 camera3_stream_configuration *streamList) {
232 Mutex::Autolock l(mLock);
233 ALOGV("%s: %d streams", __FUNCTION__, streamList->num_streams);
234
235 if (mStatus != STATUS_OPEN && mStatus != STATUS_READY) {
236 ALOGE("%s: Cannot configure streams in state %d", __FUNCTION__, mStatus);
237 return NO_INIT;
238 }
239
240 /**
241 * Validity-check input list.
242 */
243 if (streamList == NULL) {
244 ALOGE("%s: NULL stream configuration", __FUNCTION__);
245 return BAD_VALUE;
246 }
247
248 if (streamList->streams == NULL) {
249 ALOGE("%s: NULL stream list", __FUNCTION__);
250 return BAD_VALUE;
251 }
252
253 if (streamList->num_streams < 1) {
254 ALOGE("%s: Bad number of streams requested: %d", __FUNCTION__,
255 streamList->num_streams);
256 return BAD_VALUE;
257 }
258
259 camera3_stream_t *inputStream = NULL;
260 for (size_t i = 0; i < streamList->num_streams; i++) {
261 camera3_stream_t *newStream = streamList->streams[i];
262
263 if (newStream == NULL) {
264 ALOGE("%s: Stream index %zu was NULL", __FUNCTION__, i);
265 return BAD_VALUE;
266 }
267
268 ALOGV("%s: Stream %p (id %zu), type %d, usage 0x%x, format 0x%x",
269 __FUNCTION__, newStream, i, newStream->stream_type, newStream->usage,
270 newStream->format);
271
272 if (newStream->stream_type == CAMERA3_STREAM_INPUT ||
273 newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
274 if (inputStream != NULL) {
275 ALOGE("%s: Multiple input streams requested!", __FUNCTION__);
276 return BAD_VALUE;
277 }
278 inputStream = newStream;
279 }
280
281 bool validFormat = false;
282 for (size_t f = 0;
283 f < sizeof(kAvailableFormats) / sizeof(kAvailableFormats[0]); f++) {
284 if (newStream->format == kAvailableFormats[f]) {
285 validFormat = true;
286 break;
287 }
288 }
289 if (!validFormat) {
290 ALOGE("%s: Unsupported stream format 0x%x requested", __FUNCTION__,
291 newStream->format);
292 return BAD_VALUE;
293 }
294 }
295 mInputStream = inputStream;
296
297 /**
298 * Initially mark all existing streams as not alive
299 */
300 for (StreamIterator s = mStreams.begin(); s != mStreams.end(); ++s) {
301 PrivateStreamInfo *privStream =
302 static_cast<PrivateStreamInfo *>((*s)->priv);
303 privStream->alive = false;
304 }
305
306 /**
307 * Find new streams and mark still-alive ones
308 */
309 for (size_t i = 0; i < streamList->num_streams; i++) {
310 camera3_stream_t *newStream = streamList->streams[i];
311 if (newStream->priv == NULL) {
312 // New stream, construct info
313 PrivateStreamInfo *privStream = new PrivateStreamInfo();
314 privStream->alive = true;
315
316 newStream->max_buffers = kMaxBufferCount;
317 newStream->priv = privStream;
318 mStreams.push_back(newStream);
319 } else {
320 // Existing stream, mark as still alive.
321 PrivateStreamInfo *privStream =
322 static_cast<PrivateStreamInfo *>(newStream->priv);
323 privStream->alive = true;
324 }
325 // Always update usage and max buffers
326 newStream->max_buffers = kMaxBufferCount;
327 switch (newStream->stream_type) {
328 case CAMERA3_STREAM_OUTPUT:
329 newStream->usage = GRALLOC_USAGE_HW_CAMERA_WRITE;
330 break;
331 case CAMERA3_STREAM_INPUT:
332 newStream->usage = GRALLOC_USAGE_HW_CAMERA_READ;
333 break;
334 case CAMERA3_STREAM_BIDIRECTIONAL:
335 newStream->usage =
336 GRALLOC_USAGE_HW_CAMERA_READ | GRALLOC_USAGE_HW_CAMERA_WRITE;
337 break;
338 }
339 }
340
341 /**
342 * Reap the dead streams
343 */
344 for (StreamIterator s = mStreams.begin(); s != mStreams.end();) {
345 PrivateStreamInfo *privStream =
346 static_cast<PrivateStreamInfo *>((*s)->priv);
347 if (!privStream->alive) {
348 (*s)->priv = NULL;
349 delete privStream;
350 s = mStreams.erase(s);
351 } else {
352 ++s;
353 }
354 }
355
356 /**
357 * Can't reuse settings across configure call
358 */
359 mPrevSettings.clear();
360
361 return OK;
362 }
363
registerStreamBuffers(const camera3_stream_buffer_set *)364 status_t EmulatedFakeCamera3::registerStreamBuffers(
365 const camera3_stream_buffer_set * /*bufferSet*/) {
366 ALOGV("%s: E", __FUNCTION__);
367 Mutex::Autolock l(mLock);
368
369 // Should not be called in HAL versions >= 3.2
370
371 ALOGE("%s: Should not be invoked on new HALs!", __FUNCTION__);
372 return NO_INIT;
373 }
374
constructDefaultRequestSettings(int type)375 const camera_metadata_t *EmulatedFakeCamera3::constructDefaultRequestSettings(
376 int type) {
377 ALOGV("%s: E", __FUNCTION__);
378 Mutex::Autolock l(mLock);
379
380 if (type < 0 || type >= CAMERA3_TEMPLATE_COUNT) {
381 ALOGE("%s: Unknown request settings template: %d", __FUNCTION__, type);
382 return NULL;
383 }
384
385 if (!hasCapability(BACKWARD_COMPATIBLE) && type != CAMERA3_TEMPLATE_PREVIEW) {
386 ALOGE("%s: Template %d not supported w/o BACKWARD_COMPATIBLE capability",
387 __FUNCTION__, type);
388 return NULL;
389 }
390
391 /**
392 * Cache is not just an optimization - pointer returned has to live at
393 * least as long as the camera device instance does.
394 */
395 if (mDefaultTemplates[type] != NULL) {
396 return mDefaultTemplates[type];
397 }
398
399 CameraMetadata settings;
400
401 /** android.request */
402
403 static const uint8_t metadataMode = ANDROID_REQUEST_METADATA_MODE_FULL;
404 settings.update(ANDROID_REQUEST_METADATA_MODE, &metadataMode, 1);
405
406 static const int32_t id = 0;
407 settings.update(ANDROID_REQUEST_ID, &id, 1);
408
409 static const int32_t frameCount = 0;
410 settings.update(ANDROID_REQUEST_FRAME_COUNT, &frameCount, 1);
411
412 /** android.lens */
413
414 static const float focalLength = 5.0f;
415 settings.update(ANDROID_LENS_FOCAL_LENGTH, &focalLength, 1);
416
417 if (hasCapability(BACKWARD_COMPATIBLE)) {
418 static const float focusDistance = 0;
419 settings.update(ANDROID_LENS_FOCUS_DISTANCE, &focusDistance, 1);
420
421 static const float aperture = 2.8f;
422 settings.update(ANDROID_LENS_APERTURE, &aperture, 1);
423
424 static const float filterDensity = 0;
425 settings.update(ANDROID_LENS_FILTER_DENSITY, &filterDensity, 1);
426
427 static const uint8_t opticalStabilizationMode =
428 ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
429 settings.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE,
430 &opticalStabilizationMode, 1);
431
432 // FOCUS_RANGE set only in frame
433 }
434
435 /** android.sensor */
436
437 if (hasCapability(MANUAL_SENSOR)) {
438 static const int64_t exposureTime = 10 * MSEC;
439 settings.update(ANDROID_SENSOR_EXPOSURE_TIME, &exposureTime, 1);
440
441 static const int64_t frameDuration = 33333333L; // 1/30 s
442 settings.update(ANDROID_SENSOR_FRAME_DURATION, &frameDuration, 1);
443
444 static const int32_t sensitivity = 100;
445 settings.update(ANDROID_SENSOR_SENSITIVITY, &sensitivity, 1);
446 }
447
448 // TIMESTAMP set only in frame
449
450 /** android.flash */
451
452 if (hasCapability(BACKWARD_COMPATIBLE)) {
453 static const uint8_t flashMode = ANDROID_FLASH_MODE_OFF;
454 settings.update(ANDROID_FLASH_MODE, &flashMode, 1);
455
456 static const uint8_t flashPower = 10;
457 settings.update(ANDROID_FLASH_FIRING_POWER, &flashPower, 1);
458
459 static const int64_t firingTime = 0;
460 settings.update(ANDROID_FLASH_FIRING_TIME, &firingTime, 1);
461 }
462
463 /** Processing block modes */
464 if (hasCapability(MANUAL_POST_PROCESSING)) {
465 uint8_t hotPixelMode = 0;
466 uint8_t demosaicMode = 0;
467 uint8_t noiseMode = 0;
468 uint8_t shadingMode = 0;
469 uint8_t colorMode = 0;
470 uint8_t tonemapMode = 0;
471 uint8_t edgeMode = 0;
472 switch (type) {
473 case CAMERA3_TEMPLATE_STILL_CAPTURE:
474 // fall-through
475 case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT:
476 // fall-through
477 case CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG:
478 hotPixelMode = ANDROID_HOT_PIXEL_MODE_HIGH_QUALITY;
479 demosaicMode = ANDROID_DEMOSAIC_MODE_HIGH_QUALITY;
480 noiseMode = ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY;
481 shadingMode = ANDROID_SHADING_MODE_HIGH_QUALITY;
482 colorMode = ANDROID_COLOR_CORRECTION_MODE_HIGH_QUALITY;
483 tonemapMode = ANDROID_TONEMAP_MODE_HIGH_QUALITY;
484 edgeMode = ANDROID_EDGE_MODE_HIGH_QUALITY;
485 break;
486 case CAMERA3_TEMPLATE_PREVIEW:
487 // fall-through
488 case CAMERA3_TEMPLATE_VIDEO_RECORD:
489 // fall-through
490 default:
491 hotPixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
492 demosaicMode = ANDROID_DEMOSAIC_MODE_FAST;
493 noiseMode = ANDROID_NOISE_REDUCTION_MODE_FAST;
494 shadingMode = ANDROID_SHADING_MODE_FAST;
495 colorMode = ANDROID_COLOR_CORRECTION_MODE_FAST;
496 tonemapMode = ANDROID_TONEMAP_MODE_FAST;
497 edgeMode = ANDROID_EDGE_MODE_FAST;
498 break;
499 }
500 settings.update(ANDROID_HOT_PIXEL_MODE, &hotPixelMode, 1);
501 settings.update(ANDROID_DEMOSAIC_MODE, &demosaicMode, 1);
502 settings.update(ANDROID_NOISE_REDUCTION_MODE, &noiseMode, 1);
503 settings.update(ANDROID_SHADING_MODE, &shadingMode, 1);
504 settings.update(ANDROID_COLOR_CORRECTION_MODE, &colorMode, 1);
505 settings.update(ANDROID_TONEMAP_MODE, &tonemapMode, 1);
506 settings.update(ANDROID_EDGE_MODE, &edgeMode, 1);
507 }
508
509 /** android.colorCorrection */
510
511 if (hasCapability(MANUAL_POST_PROCESSING)) {
512 static const camera_metadata_rational colorTransform[9] = {
513 {1, 1}, {0, 1}, {0, 1}, {0, 1}, {1, 1}, {0, 1}, {0, 1}, {0, 1}, {1, 1}};
514 settings.update(ANDROID_COLOR_CORRECTION_TRANSFORM, colorTransform, 9);
515
516 static const float colorGains[4] = {1.0f, 1.0f, 1.0f, 1.0f};
517 settings.update(ANDROID_COLOR_CORRECTION_GAINS, colorGains, 4);
518 }
519
520 /** android.tonemap */
521
522 if (hasCapability(MANUAL_POST_PROCESSING)) {
523 static const float tonemapCurve[4] = {0.f, 0.f, 1.f, 1.f};
524 settings.update(ANDROID_TONEMAP_CURVE_RED, tonemapCurve, 4);
525 settings.update(ANDROID_TONEMAP_CURVE_GREEN, tonemapCurve, 4);
526 settings.update(ANDROID_TONEMAP_CURVE_BLUE, tonemapCurve, 4);
527 }
528
529 /** android.scaler */
530 if (hasCapability(BACKWARD_COMPATIBLE)) {
531 static const int32_t cropRegion[4] = {0, 0, mSensorWidth, mSensorHeight};
532 settings.update(ANDROID_SCALER_CROP_REGION, cropRegion, 4);
533 }
534
535 /** android.jpeg */
536 if (hasCapability(BACKWARD_COMPATIBLE)) {
537 static const uint8_t jpegQuality = 80;
538 settings.update(ANDROID_JPEG_QUALITY, &jpegQuality, 1);
539
540 static const int32_t thumbnailSize[2] = {640, 480};
541 settings.update(ANDROID_JPEG_THUMBNAIL_SIZE, thumbnailSize, 2);
542
543 static const uint8_t thumbnailQuality = 80;
544 settings.update(ANDROID_JPEG_THUMBNAIL_QUALITY, &thumbnailQuality, 1);
545
546 static const double gpsCoordinates[2] = {0, 0};
547 settings.update(ANDROID_JPEG_GPS_COORDINATES, gpsCoordinates, 2);
548
549 static const uint8_t gpsProcessingMethod[32] = "None";
550 settings.update(ANDROID_JPEG_GPS_PROCESSING_METHOD, gpsProcessingMethod,
551 32);
552
553 static const int64_t gpsTimestamp = 0;
554 settings.update(ANDROID_JPEG_GPS_TIMESTAMP, &gpsTimestamp, 1);
555
556 static const int32_t jpegOrientation = 0;
557 settings.update(ANDROID_JPEG_ORIENTATION, &jpegOrientation, 1);
558 }
559
560 /** android.stats */
561
562 if (hasCapability(BACKWARD_COMPATIBLE)) {
563 static const uint8_t faceDetectMode =
564 ANDROID_STATISTICS_FACE_DETECT_MODE_OFF;
565 settings.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &faceDetectMode, 1);
566
567 static const uint8_t hotPixelMapMode =
568 ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
569 settings.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
570 }
571
572 // faceRectangles, faceScores, faceLandmarks, faceIds, histogram,
573 // sharpnessMap only in frames
574
575 /** android.control */
576
577 uint8_t controlIntent = 0;
578 switch (type) {
579 case CAMERA3_TEMPLATE_PREVIEW:
580 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW;
581 break;
582 case CAMERA3_TEMPLATE_STILL_CAPTURE:
583 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE;
584 break;
585 case CAMERA3_TEMPLATE_VIDEO_RECORD:
586 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD;
587 break;
588 case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT:
589 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT;
590 break;
591 case CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG:
592 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_ZERO_SHUTTER_LAG;
593 break;
594 case CAMERA3_TEMPLATE_MANUAL:
595 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_MANUAL;
596 break;
597 default:
598 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM;
599 break;
600 }
601 settings.update(ANDROID_CONTROL_CAPTURE_INTENT, &controlIntent, 1);
602
603 const uint8_t controlMode = (type == CAMERA3_TEMPLATE_MANUAL)
604 ? ANDROID_CONTROL_MODE_OFF
605 : ANDROID_CONTROL_MODE_AUTO;
606 settings.update(ANDROID_CONTROL_MODE, &controlMode, 1);
607
608 int32_t aeTargetFpsRange[2] = {5, 30};
609 if (type == CAMERA3_TEMPLATE_VIDEO_RECORD ||
610 type == CAMERA3_TEMPLATE_VIDEO_SNAPSHOT) {
611 aeTargetFpsRange[0] = 30;
612 }
613 settings.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE, aeTargetFpsRange, 2);
614
615 if (hasCapability(BACKWARD_COMPATIBLE)) {
616 static const uint8_t effectMode = ANDROID_CONTROL_EFFECT_MODE_OFF;
617 settings.update(ANDROID_CONTROL_EFFECT_MODE, &effectMode, 1);
618
619 static const uint8_t sceneMode = ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY;
620 settings.update(ANDROID_CONTROL_SCENE_MODE, &sceneMode, 1);
621
622 const uint8_t aeMode = (type == CAMERA3_TEMPLATE_MANUAL)
623 ? ANDROID_CONTROL_AE_MODE_OFF
624 : ANDROID_CONTROL_AE_MODE_ON;
625 settings.update(ANDROID_CONTROL_AE_MODE, &aeMode, 1);
626
627 static const uint8_t aeLock = ANDROID_CONTROL_AE_LOCK_OFF;
628 settings.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
629
630 static const int32_t controlRegions[5] = {0, 0, 0, 0, 0};
631 settings.update(ANDROID_CONTROL_AE_REGIONS, controlRegions, 5);
632
633 static const int32_t aeExpCompensation = 0;
634 settings.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
635 &aeExpCompensation, 1);
636
637 static const uint8_t aeAntibandingMode =
638 ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO;
639 settings.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &aeAntibandingMode, 1);
640
641 static const uint8_t aePrecaptureTrigger =
642 ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_IDLE;
643 settings.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, &aePrecaptureTrigger,
644 1);
645
646 const uint8_t awbMode = (type == CAMERA3_TEMPLATE_MANUAL)
647 ? ANDROID_CONTROL_AWB_MODE_OFF
648 : ANDROID_CONTROL_AWB_MODE_AUTO;
649 settings.update(ANDROID_CONTROL_AWB_MODE, &awbMode, 1);
650
651 static const uint8_t awbLock = ANDROID_CONTROL_AWB_LOCK_OFF;
652 settings.update(ANDROID_CONTROL_AWB_LOCK, &awbLock, 1);
653
654 uint8_t afMode = 0;
655
656 if (mFacingBack) {
657 switch (type) {
658 case CAMERA3_TEMPLATE_PREVIEW:
659 afMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
660 break;
661 case CAMERA3_TEMPLATE_STILL_CAPTURE:
662 afMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
663 break;
664 case CAMERA3_TEMPLATE_VIDEO_RECORD:
665 afMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
666 break;
667 case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT:
668 afMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
669 break;
670 case CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG:
671 afMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
672 break;
673 case CAMERA3_TEMPLATE_MANUAL:
674 afMode = ANDROID_CONTROL_AF_MODE_OFF;
675 break;
676 default:
677 afMode = ANDROID_CONTROL_AF_MODE_AUTO;
678 break;
679 }
680 } else {
681 afMode = ANDROID_CONTROL_AF_MODE_OFF;
682 }
683 settings.update(ANDROID_CONTROL_AF_MODE, &afMode, 1);
684
685 settings.update(ANDROID_CONTROL_AF_REGIONS, controlRegions, 5);
686
687 static const uint8_t afTrigger = ANDROID_CONTROL_AF_TRIGGER_IDLE;
688 settings.update(ANDROID_CONTROL_AF_TRIGGER, &afTrigger, 1);
689
690 static const uint8_t vstabMode =
691 ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
692 settings.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &vstabMode, 1);
693
694 static const uint8_t blackLevelLock = ANDROID_BLACK_LEVEL_LOCK_OFF;
695 settings.update(ANDROID_BLACK_LEVEL_LOCK, &blackLevelLock, 1);
696
697 static const uint8_t lensShadingMapMode =
698 ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF;
699 settings.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE,
700 &lensShadingMapMode, 1);
701
702 static const uint8_t aberrationMode =
703 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
704 settings.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &aberrationMode,
705 1);
706
707 static const int32_t testPatternMode = ANDROID_SENSOR_TEST_PATTERN_MODE_OFF;
708 settings.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &testPatternMode, 1);
709 }
710
711 mDefaultTemplates[type] = settings.release();
712
713 return mDefaultTemplates[type];
714 }
715
processCaptureRequest(camera3_capture_request * request)716 status_t EmulatedFakeCamera3::processCaptureRequest(
717 camera3_capture_request *request) {
718 Mutex::Autolock l(mLock);
719 status_t res;
720
721 /** Validation */
722
723 if (mStatus < STATUS_READY) {
724 ALOGE("%s: Can't submit capture requests in state %d", __FUNCTION__,
725 mStatus);
726 return INVALID_OPERATION;
727 }
728
729 if (request == NULL) {
730 ALOGE("%s: NULL request!", __FUNCTION__);
731 return BAD_VALUE;
732 }
733
734 uint32_t frameNumber = request->frame_number;
735
736 if (request->settings == NULL && mPrevSettings.isEmpty()) {
737 ALOGE(
738 "%s: Request %d: NULL settings for first request after"
739 "configureStreams()",
740 __FUNCTION__, frameNumber);
741 return BAD_VALUE;
742 }
743
744 if (request->input_buffer != NULL &&
745 request->input_buffer->stream != mInputStream) {
746 ALOGE("%s: Request %d: Input buffer not from input stream!", __FUNCTION__,
747 frameNumber);
748 ALOGV("%s: Bad stream %p, expected: %p", __FUNCTION__,
749 request->input_buffer->stream, mInputStream);
750 ALOGV("%s: Bad stream type %d, expected stream type %d", __FUNCTION__,
751 request->input_buffer->stream->stream_type,
752 mInputStream ? mInputStream->stream_type : -1);
753
754 return BAD_VALUE;
755 }
756
757 if (request->num_output_buffers < 1 || request->output_buffers == NULL) {
758 ALOGE("%s: Request %d: No output buffers provided!", __FUNCTION__,
759 frameNumber);
760 return BAD_VALUE;
761 }
762
763 // Validate all buffers, starting with input buffer if it's given
764
765 ssize_t idx;
766 const camera3_stream_buffer_t *b;
767 if (request->input_buffer != NULL) {
768 idx = -1;
769 b = request->input_buffer;
770 } else {
771 idx = 0;
772 b = request->output_buffers;
773 }
774 do {
775 PrivateStreamInfo *priv = static_cast<PrivateStreamInfo *>(b->stream->priv);
776 if (priv == NULL) {
777 ALOGE("%s: Request %d: Buffer %zu: Unconfigured stream!", __FUNCTION__,
778 frameNumber, idx);
779 return BAD_VALUE;
780 }
781 if (!priv->alive) {
782 ALOGE("%s: Request %d: Buffer %zu: Dead stream!", __FUNCTION__,
783 frameNumber, idx);
784 return BAD_VALUE;
785 }
786 if (b->status != CAMERA3_BUFFER_STATUS_OK) {
787 ALOGE("%s: Request %d: Buffer %zu: Status not OK!", __FUNCTION__,
788 frameNumber, idx);
789 return BAD_VALUE;
790 }
791 if (b->release_fence != -1) {
792 ALOGE("%s: Request %d: Buffer %zu: Has a release fence!", __FUNCTION__,
793 frameNumber, idx);
794 return BAD_VALUE;
795 }
796 if (b->buffer == NULL) {
797 ALOGE("%s: Request %d: Buffer %zu: NULL buffer handle!", __FUNCTION__,
798 frameNumber, idx);
799 return BAD_VALUE;
800 }
801 idx++;
802 b = &(request->output_buffers[idx]);
803 } while (idx < (ssize_t)request->num_output_buffers);
804
805 // TODO: Validate settings parameters
806
807 /**
808 * Start processing this request
809 */
810
811 mStatus = STATUS_ACTIVE;
812
813 CameraMetadata settings;
814
815 if (request->settings == NULL) {
816 settings.acquire(mPrevSettings);
817 } else {
818 settings = request->settings;
819 }
820
821 res = process3A(settings);
822 if (res != OK) {
823 return res;
824 }
825
826 // TODO: Handle reprocessing
827
828 /**
829 * Get ready for sensor config
830 */
831
832 nsecs_t exposureTime;
833 nsecs_t frameDuration;
834 uint32_t sensitivity;
835 bool needJpeg = false;
836 camera_metadata_entry_t entry;
837
838 entry = settings.find(ANDROID_SENSOR_EXPOSURE_TIME);
839 exposureTime =
840 (entry.count > 0) ? entry.data.i64[0] : Sensor::kExposureTimeRange[0];
841 entry = settings.find(ANDROID_SENSOR_FRAME_DURATION);
842 frameDuration =
843 (entry.count > 0) ? entry.data.i64[0] : Sensor::kFrameDurationRange[0];
844 entry = settings.find(ANDROID_SENSOR_SENSITIVITY);
845 sensitivity =
846 (entry.count > 0) ? entry.data.i32[0] : Sensor::kSensitivityRange[0];
847
848 if (exposureTime > frameDuration) {
849 frameDuration = exposureTime + Sensor::kMinVerticalBlank;
850 settings.update(ANDROID_SENSOR_FRAME_DURATION, &frameDuration, 1);
851 }
852
853 Buffers *sensorBuffers = new Buffers();
854 HalBufferVector *buffers = new HalBufferVector();
855
856 sensorBuffers->setCapacity(request->num_output_buffers);
857 buffers->setCapacity(request->num_output_buffers);
858
859 // Process all the buffers we got for output, constructing internal buffer
860 // structures for them, and lock them for writing.
861 for (size_t i = 0; i < request->num_output_buffers; i++) {
862 const camera3_stream_buffer &srcBuf = request->output_buffers[i];
863 StreamBuffer destBuf;
864 destBuf.streamId = kGenericStreamId;
865 destBuf.width = srcBuf.stream->width;
866 destBuf.height = srcBuf.stream->height;
867 // For GCE, IMPLEMENTATION_DEFINED is always RGBx_8888
868 destBuf.format =
869 (srcBuf.stream->format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED)
870 ? HAL_PIXEL_FORMAT_RGBA_8888
871 : srcBuf.stream->format;
872 destBuf.stride = srcBuf.stream->width;
873 destBuf.dataSpace = srcBuf.stream->data_space;
874 destBuf.buffer = srcBuf.buffer;
875 destBuf.importedBuffer = NULL;
876
877 if (destBuf.format == HAL_PIXEL_FORMAT_BLOB) {
878 needJpeg = true;
879 }
880
881 // Wait on fence
882 sp<Fence> bufferAcquireFence = new Fence(srcBuf.acquire_fence);
883 res = bufferAcquireFence->wait(kFenceTimeoutMs);
884 if (res == TIMED_OUT) {
885 ALOGE("%s: Request %d: Buffer %zu: Fence timed out after %d ms",
886 __FUNCTION__, frameNumber, i, kFenceTimeoutMs);
887 }
888
889 if (res == OK) {
890 res = GrallocModule::getInstance().import(*(destBuf.buffer),
891 &destBuf.importedBuffer);
892 if (res != OK) {
893 ALOGE("%s: Request %d: Buffer %zu: Unable to import buffer",
894 __FUNCTION__, frameNumber, i);
895 }
896 }
897
898 if (res == OK) {
899 const int usage = GRALLOC_USAGE_SW_WRITE_OFTEN |
900 GRALLOC_USAGE_HW_CAMERA_WRITE;
901 // Lock buffer for writing
902 if (srcBuf.stream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
903 if (destBuf.format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
904 android_ycbcr ycbcr = android_ycbcr();
905 res = GrallocModule::getInstance().lock_ycbcr(
906 destBuf.importedBuffer, usage, 0, 0,
907 destBuf.width, destBuf.height, &ycbcr);
908 // This is only valid because we know that emulator's
909 // YCbCr_420_888 is really contiguous NV21 under the hood
910 destBuf.img = static_cast<uint8_t *>(ycbcr.y);
911 } else {
912 ALOGE("Unexpected private format for flexible YUV: 0x%x",
913 destBuf.format);
914 res = INVALID_OPERATION;
915 }
916 } else if (srcBuf.stream->format == HAL_PIXEL_FORMAT_BLOB) {
917 // All zero rectangle means lock the entire buffer.
918 res = GrallocModule::getInstance().lock(
919 destBuf.importedBuffer, usage, 0, 0, 0, 0, (void **)&(destBuf.img));
920 } else {
921 res = GrallocModule::getInstance().lock(
922 destBuf.importedBuffer, usage, 0, 0,
923 destBuf.width, destBuf.height, (void **)&(destBuf.img));
924 }
925 if (res != OK) {
926 ALOGE("%s: Request %d: Buffer %zu: Unable to lock buffer", __FUNCTION__,
927 frameNumber, i);
928 }
929 }
930
931 if (res != OK) {
932 // Either waiting or locking failed. Unlock and release locked buffers and
933 // bail out.
934 for (size_t j = 0; j < i; j++) {
935 GrallocModule::getInstance().unlock((*sensorBuffers)[i].importedBuffer);
936 GrallocModule::getInstance().release((*sensorBuffers)[i].importedBuffer);
937 }
938 delete sensorBuffers;
939 delete buffers;
940 return NO_INIT;
941 }
942
943 sensorBuffers->push_back(destBuf);
944 buffers->push_back(srcBuf);
945 }
946
947 /**
948 * Wait for JPEG compressor to not be busy, if needed
949 */
950 if (needJpeg) {
951 bool ready = mJpegCompressor->waitForDone(kJpegTimeoutNs);
952 if (!ready) {
953 ALOGE("%s: Timeout waiting for JPEG compression to complete!",
954 __FUNCTION__);
955 return NO_INIT;
956 }
957 res = mJpegCompressor->reserve();
958 if (res != OK) {
959 ALOGE("%s: Error managing JPEG compressor resources, can't reserve it!",
960 __FUNCTION__);
961 return NO_INIT;
962 }
963 }
964
965 /**
966 * Wait until the in-flight queue has room
967 */
968 res = mReadoutThread->waitForReadout();
969 if (res != OK) {
970 ALOGE("%s: Timeout waiting for previous requests to complete!",
971 __FUNCTION__);
972 return NO_INIT;
973 }
974
975 /**
976 * Wait until sensor's ready. This waits for lengthy amounts of time with
977 * mLock held, but the interface spec is that no other calls may by done to
978 * the HAL by the framework while process_capture_request is happening.
979 */
980 int syncTimeoutCount = 0;
981 while (!mSensor->waitForVSync(kSyncWaitTimeout)) {
982 if (mStatus == STATUS_ERROR) {
983 return NO_INIT;
984 }
985 if (syncTimeoutCount == kMaxSyncTimeoutCount) {
986 ALOGE("%s: Request %d: Sensor sync timed out after %" PRId64 " ms",
987 __FUNCTION__, frameNumber,
988 kSyncWaitTimeout * kMaxSyncTimeoutCount / 1000000);
989 return NO_INIT;
990 }
991 syncTimeoutCount++;
992 }
993
994 /**
995 * Configure sensor and queue up the request to the readout thread
996 */
997 mSensor->setExposureTime(exposureTime);
998 mSensor->setFrameDuration(frameDuration);
999 mSensor->setSensitivity(sensitivity);
1000 mSensor->setDestinationBuffers(sensorBuffers);
1001 mSensor->setFrameNumber(request->frame_number);
1002
1003 ReadoutThread::Request r;
1004 r.frameNumber = request->frame_number;
1005 r.settings = settings;
1006 r.sensorBuffers = sensorBuffers;
1007 r.buffers = buffers;
1008
1009 mReadoutThread->queueCaptureRequest(r);
1010 ALOGVV("%s: Queued frame %d", __FUNCTION__, request->frame_number);
1011
1012 // Cache the settings for next time
1013 mPrevSettings.acquire(settings);
1014
1015 return OK;
1016 }
1017
flush()1018 status_t EmulatedFakeCamera3::flush() {
1019 ALOGW("%s: Not implemented; ignored", __FUNCTION__);
1020 return OK;
1021 }
1022
1023 /** Debug methods */
1024
dump(int)1025 void EmulatedFakeCamera3::dump(int /*fd*/) {}
1026
1027 /**
1028 * Private methods
1029 */
1030
getCameraCapabilities()1031 status_t EmulatedFakeCamera3::getCameraCapabilities() {
1032 const char *key =
1033 mFacingBack ? "qemu.sf.back_camera_caps" : "qemu.sf.front_camera_caps";
1034
1035 /* Defined by 'qemu.sf.*_camera_caps' boot property: if the
1036 * property doesn't exist, it is assumed to list FULL. */
1037 char prop[PROPERTY_VALUE_MAX];
1038 if (property_get(key, prop, NULL) > 0) {
1039 char *saveptr = nullptr;
1040 char *cap = strtok_r(prop, " ,", &saveptr);
1041 while (cap != NULL) {
1042 for (int i = 0; i < NUM_CAPABILITIES; i++) {
1043 if (!strcasecmp(cap, sAvailableCapabilitiesStrings[i])) {
1044 mCapabilities.add(static_cast<AvailableCapabilities>(i));
1045 break;
1046 }
1047 }
1048 cap = strtok_r(NULL, " ,", &saveptr);
1049 }
1050 if (mCapabilities.size() == 0) {
1051 ALOGE("qemu.sf.back_camera_caps had no valid capabilities: %s", prop);
1052 }
1053 }
1054 // Default to FULL_LEVEL plus RAW if nothing is defined
1055 if (mCapabilities.size() == 0) {
1056 mCapabilities.add(FULL_LEVEL);
1057 mCapabilities.add(RAW);
1058 }
1059
1060 // Add level-based caps
1061 if (hasCapability(FULL_LEVEL)) {
1062 mCapabilities.add(BURST_CAPTURE);
1063 mCapabilities.add(READ_SENSOR_SETTINGS);
1064 mCapabilities.add(MANUAL_SENSOR);
1065 mCapabilities.add(MANUAL_POST_PROCESSING);
1066 };
1067
1068 // Backwards-compatible is required for most other caps
1069 // Not required for DEPTH_OUTPUT, though.
1070 if (hasCapability(BURST_CAPTURE) || hasCapability(READ_SENSOR_SETTINGS) ||
1071 hasCapability(RAW) || hasCapability(MANUAL_SENSOR) ||
1072 hasCapability(MANUAL_POST_PROCESSING) ||
1073 hasCapability(PRIVATE_REPROCESSING) || hasCapability(YUV_REPROCESSING) ||
1074 hasCapability(CONSTRAINED_HIGH_SPEED_VIDEO)) {
1075 mCapabilities.add(BACKWARD_COMPATIBLE);
1076 }
1077
1078 ALOGI("Camera %d capabilities:", mCameraID);
1079 for (size_t i = 0; i < mCapabilities.size(); i++) {
1080 ALOGI(" %s", sAvailableCapabilitiesStrings[mCapabilities[i]]);
1081 }
1082
1083 return OK;
1084 }
1085
hasCapability(AvailableCapabilities cap)1086 bool EmulatedFakeCamera3::hasCapability(AvailableCapabilities cap) {
1087 ssize_t idx = mCapabilities.indexOf(cap);
1088 return idx >= 0;
1089 }
1090
constructStaticInfo(const cuttlefish::CameraDefinition & params)1091 status_t EmulatedFakeCamera3::constructStaticInfo(
1092 const cuttlefish::CameraDefinition ¶ms) {
1093 CameraMetadata info;
1094 Vector<int32_t> availableCharacteristicsKeys;
1095 status_t res;
1096
1097 int32_t width = 0, height = 0;
1098
1099 /* TODO(ender): this currently supports only maximum resolution. */
1100 for (size_t index = 0; index < params.resolutions.size(); ++index) {
1101 if (width <= params.resolutions[index].width &&
1102 height <= params.resolutions[index].height) {
1103 width = params.resolutions[index].width;
1104 height = params.resolutions[index].height;
1105 }
1106 }
1107
1108 if (width < 640 || height < 480) {
1109 width = 640;
1110 height = 480;
1111 }
1112
1113 mSensorWidth = width;
1114 mSensorHeight = height;
1115
1116 #define ADD_STATIC_ENTRY(name, varptr, count) \
1117 availableCharacteristicsKeys.add(name); \
1118 res = info.update(name, varptr, count); \
1119 if (res != OK) return res
1120
1121 // android.sensor
1122
1123 if (hasCapability(MANUAL_SENSOR)) {
1124 ADD_STATIC_ENTRY(ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE,
1125 Sensor::kExposureTimeRange, 2);
1126
1127 ADD_STATIC_ENTRY(ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
1128 &Sensor::kFrameDurationRange[1], 1);
1129
1130 ADD_STATIC_ENTRY(ANDROID_SENSOR_INFO_SENSITIVITY_RANGE,
1131 Sensor::kSensitivityRange,
1132 sizeof(Sensor::kSensitivityRange) / sizeof(int32_t));
1133
1134 ADD_STATIC_ENTRY(ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
1135 &Sensor::kSensitivityRange[1], 1);
1136 }
1137
1138 static const float sensorPhysicalSize[2] = {3.20f, 2.40f}; // mm
1139 ADD_STATIC_ENTRY(ANDROID_SENSOR_INFO_PHYSICAL_SIZE, sensorPhysicalSize, 2);
1140
1141 const int32_t pixelArray[] = {mSensorWidth, mSensorHeight};
1142 ADD_STATIC_ENTRY(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE, pixelArray, 2);
1143 const int32_t activeArray[] = {0, 0, mSensorWidth, mSensorHeight};
1144 ADD_STATIC_ENTRY(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE, activeArray, 4);
1145
1146 static const int32_t orientation = 90; // Aligned with 'long edge'
1147 ADD_STATIC_ENTRY(ANDROID_SENSOR_ORIENTATION, &orientation, 1);
1148
1149 static const uint8_t timestampSource =
1150 ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_REALTIME;
1151 ADD_STATIC_ENTRY(ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE, ×tampSource, 1);
1152
1153 if (hasCapability(RAW)) {
1154 ADD_STATIC_ENTRY(ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
1155 &Sensor::kColorFilterArrangement, 1);
1156
1157 ADD_STATIC_ENTRY(ANDROID_SENSOR_INFO_WHITE_LEVEL,
1158 (int32_t *)&Sensor::kMaxRawValue, 1);
1159
1160 static const int32_t blackLevelPattern[4] = {
1161 (int32_t)Sensor::kBlackLevel, (int32_t)Sensor::kBlackLevel,
1162 (int32_t)Sensor::kBlackLevel, (int32_t)Sensor::kBlackLevel};
1163 ADD_STATIC_ENTRY(ANDROID_SENSOR_BLACK_LEVEL_PATTERN, blackLevelPattern,
1164 sizeof(blackLevelPattern) / sizeof(int32_t));
1165 }
1166
1167 if (hasCapability(BACKWARD_COMPATIBLE)) {
1168 static const int32_t availableTestPatternModes[] = {
1169 ANDROID_SENSOR_TEST_PATTERN_MODE_OFF};
1170 ADD_STATIC_ENTRY(ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
1171 availableTestPatternModes,
1172 sizeof(availableTestPatternModes) / sizeof(int32_t));
1173 }
1174
1175 // android.lens
1176
1177 static const float focalLength = 3.30f; // mm
1178 ADD_STATIC_ENTRY(ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS, &focalLength, 1);
1179
1180 if (hasCapability(BACKWARD_COMPATIBLE)) {
1181 // 5 cm min focus distance for back camera, infinity (fixed focus) for front
1182 const float minFocusDistance = mFacingBack ? 1.0 / 0.05 : 0.0;
1183 ADD_STATIC_ENTRY(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
1184 &minFocusDistance, 1);
1185
1186 // 5 m hyperfocal distance for back camera, infinity (fixed focus) for front
1187 const float hyperFocalDistance = mFacingBack ? 1.0 / 5.0 : 0.0;
1188 ADD_STATIC_ENTRY(ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE, &hyperFocalDistance,
1189 1);
1190
1191 static const float aperture = 2.8f;
1192 ADD_STATIC_ENTRY(ANDROID_LENS_INFO_AVAILABLE_APERTURES, &aperture, 1);
1193 static const float filterDensity = 0;
1194 ADD_STATIC_ENTRY(ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
1195 &filterDensity, 1);
1196 static const uint8_t availableOpticalStabilization =
1197 ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
1198 ADD_STATIC_ENTRY(ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
1199 &availableOpticalStabilization, 1);
1200
1201 static const int32_t lensShadingMapSize[] = {1, 1};
1202 ADD_STATIC_ENTRY(ANDROID_LENS_INFO_SHADING_MAP_SIZE, lensShadingMapSize,
1203 sizeof(lensShadingMapSize) / sizeof(int32_t));
1204
1205 static const uint8_t lensFocusCalibration =
1206 ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_APPROXIMATE;
1207 ADD_STATIC_ENTRY(ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
1208 &lensFocusCalibration, 1);
1209 }
1210
1211 if (hasCapability(DEPTH_OUTPUT)) {
1212 // These could be included for non-DEPTH capability as well, but making this
1213 // variable for testing coverage
1214
1215 // 90 degree rotation to align with long edge of a phone device that's by
1216 // default portrait
1217 static const float qO[] = {0.707107f, 0.f, 0.f, 0.707107f};
1218
1219 // Either a 180-degree rotation for back-facing, or no rotation for
1220 // front-facing
1221 const float qF[] = {0, (mFacingBack ? 1.f : 0.f), 0,
1222 (mFacingBack ? 0.f : 1.f)};
1223
1224 // Quarternion product, orientation change then facing
1225 const float lensPoseRotation[] = {
1226 qO[0] * qF[0] - qO[1] * qF[1] - qO[2] * qF[2] - qO[3] * qF[3],
1227 qO[0] * qF[1] + qO[1] * qF[0] + qO[2] * qF[3] - qO[3] * qF[2],
1228 qO[0] * qF[2] + qO[2] * qF[0] + qO[1] * qF[3] - qO[3] * qF[1],
1229 qO[0] * qF[3] + qO[3] * qF[0] + qO[1] * qF[2] - qO[2] * qF[1]};
1230
1231 ADD_STATIC_ENTRY(ANDROID_LENS_POSE_ROTATION, lensPoseRotation,
1232 sizeof(lensPoseRotation) / sizeof(float));
1233
1234 // Only one camera facing each way, so 0 translation needed to the center of
1235 // the 'main' camera
1236 static const float lensPoseTranslation[] = {0.f, 0.f, 0.f};
1237
1238 ADD_STATIC_ENTRY(ANDROID_LENS_POSE_TRANSLATION, lensPoseTranslation,
1239 sizeof(lensPoseTranslation) / sizeof(float));
1240
1241 // Intrinsics are 'ideal' (f_x, f_y, c_x, c_y, s) match focal length and
1242 // active array size
1243 float f_x = focalLength * mSensorWidth / sensorPhysicalSize[0];
1244 float f_y = focalLength * mSensorHeight / sensorPhysicalSize[1];
1245 float c_x = mSensorWidth / 2.f;
1246 float c_y = mSensorHeight / 2.f;
1247 float s = 0.f;
1248 const float lensIntrinsics[] = {f_x, f_y, c_x, c_y, s};
1249
1250 ADD_STATIC_ENTRY(ANDROID_LENS_INTRINSIC_CALIBRATION, lensIntrinsics,
1251 sizeof(lensIntrinsics) / sizeof(float));
1252
1253 // No radial or tangential distortion
1254
1255 float lensRadialDistortion[] = {1.0f, 0.f, 0.f, 0.f, 0.f, 0.f};
1256
1257 ADD_STATIC_ENTRY(ANDROID_LENS_RADIAL_DISTORTION, lensRadialDistortion,
1258 sizeof(lensRadialDistortion) / sizeof(float));
1259 }
1260
1261 const uint8_t lensFacing =
1262 mFacingBack ? ANDROID_LENS_FACING_BACK : ANDROID_LENS_FACING_FRONT;
1263 ADD_STATIC_ENTRY(ANDROID_LENS_FACING, &lensFacing, 1);
1264
1265 // android.flash
1266
1267 const uint8_t flashAvailable = mFacingBack;
1268 ADD_STATIC_ENTRY(ANDROID_FLASH_INFO_AVAILABLE, &flashAvailable, 1);
1269
1270 // android.tonemap
1271
1272 if (hasCapability(MANUAL_POST_PROCESSING)) {
1273 static const int32_t tonemapCurvePoints = 128;
1274 ADD_STATIC_ENTRY(ANDROID_TONEMAP_MAX_CURVE_POINTS, &tonemapCurvePoints, 1);
1275
1276 static const uint8_t availableToneMapModes[] = {
1277 ANDROID_TONEMAP_MODE_CONTRAST_CURVE, ANDROID_TONEMAP_MODE_FAST,
1278 ANDROID_TONEMAP_MODE_HIGH_QUALITY};
1279 ADD_STATIC_ENTRY(ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
1280 availableToneMapModes, sizeof(availableToneMapModes));
1281 }
1282
1283 // android.scaler
1284
1285 const std::vector<int32_t> availableStreamConfigurationsBasic = {
1286 HAL_PIXEL_FORMAT_BLOB,
1287 width,
1288 height,
1289 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT,
1290 HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED,
1291 320,
1292 240,
1293 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT,
1294 HAL_PIXEL_FORMAT_YCbCr_420_888,
1295 320,
1296 240,
1297 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT,
1298 HAL_PIXEL_FORMAT_BLOB,
1299 320,
1300 240,
1301 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT,
1302 };
1303
1304 // Always need to include 640x480 in basic formats
1305 const std::vector<int32_t> availableStreamConfigurationsBasic640 = {
1306 HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED,
1307 640,
1308 480,
1309 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT,
1310 HAL_PIXEL_FORMAT_YCbCr_420_888,
1311 640,
1312 480,
1313 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT,
1314 HAL_PIXEL_FORMAT_BLOB,
1315 640,
1316 480,
1317 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT};
1318
1319 const std::vector<int32_t> availableStreamConfigurationsRaw = {
1320 HAL_PIXEL_FORMAT_RAW16,
1321 width,
1322 height,
1323 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT,
1324 };
1325
1326 const std::vector<int32_t> availableStreamConfigurationsBurst = {
1327 HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED,
1328 width,
1329 height,
1330 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT,
1331 HAL_PIXEL_FORMAT_YCbCr_420_888,
1332 width,
1333 height,
1334 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT,
1335 HAL_PIXEL_FORMAT_RGBA_8888,
1336 width,
1337 height,
1338 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT,
1339 };
1340
1341 std::vector<int32_t> availableStreamConfigurations;
1342
1343 if (hasCapability(BACKWARD_COMPATIBLE)) {
1344 availableStreamConfigurations.insert(
1345 availableStreamConfigurations.end(),
1346 availableStreamConfigurationsBasic.begin(),
1347 availableStreamConfigurationsBasic.end());
1348 if (width > 640) {
1349 availableStreamConfigurations.insert(
1350 availableStreamConfigurations.end(),
1351 availableStreamConfigurationsBasic640.begin(),
1352 availableStreamConfigurationsBasic640.end());
1353 }
1354 }
1355 if (hasCapability(RAW)) {
1356 availableStreamConfigurations.insert(
1357 availableStreamConfigurations.end(),
1358 availableStreamConfigurationsRaw.begin(),
1359 availableStreamConfigurationsRaw.end());
1360 }
1361 if (hasCapability(BURST_CAPTURE)) {
1362 availableStreamConfigurations.insert(
1363 availableStreamConfigurations.end(),
1364 availableStreamConfigurationsBurst.begin(),
1365 availableStreamConfigurationsBurst.end());
1366 }
1367
1368 if (availableStreamConfigurations.size() > 0) {
1369 ADD_STATIC_ENTRY(ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
1370 &availableStreamConfigurations[0],
1371 availableStreamConfigurations.size());
1372 }
1373
1374 const std::vector<int64_t> availableMinFrameDurationsBasic = {
1375 HAL_PIXEL_FORMAT_BLOB,
1376 width,
1377 height,
1378 Sensor::kFrameDurationRange[0],
1379 HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED,
1380 320,
1381 240,
1382 Sensor::kFrameDurationRange[0],
1383 HAL_PIXEL_FORMAT_YCbCr_420_888,
1384 320,
1385 240,
1386 Sensor::kFrameDurationRange[0],
1387 HAL_PIXEL_FORMAT_BLOB,
1388 320,
1389 240,
1390 Sensor::kFrameDurationRange[0],
1391 };
1392
1393 // Always need to include 640x480 in basic formats
1394 const std::vector<int64_t> availableMinFrameDurationsBasic640 = {
1395 HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED,
1396 640,
1397 480,
1398 Sensor::kFrameDurationRange[0],
1399 HAL_PIXEL_FORMAT_YCbCr_420_888,
1400 640,
1401 480,
1402 Sensor::kFrameDurationRange[0],
1403 HAL_PIXEL_FORMAT_BLOB,
1404 640,
1405 480,
1406 Sensor::kFrameDurationRange[0]};
1407
1408 const std::vector<int64_t> availableMinFrameDurationsRaw = {
1409 HAL_PIXEL_FORMAT_RAW16,
1410 width,
1411 height,
1412 Sensor::kFrameDurationRange[0],
1413 };
1414
1415 const std::vector<int64_t> availableMinFrameDurationsBurst = {
1416 HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED,
1417 width,
1418 height,
1419 Sensor::kFrameDurationRange[0],
1420 HAL_PIXEL_FORMAT_YCbCr_420_888,
1421 width,
1422 height,
1423 Sensor::kFrameDurationRange[0],
1424 HAL_PIXEL_FORMAT_RGBA_8888,
1425 width,
1426 height,
1427 Sensor::kFrameDurationRange[0],
1428 };
1429
1430 std::vector<int64_t> availableMinFrameDurations;
1431
1432 if (hasCapability(BACKWARD_COMPATIBLE)) {
1433 availableMinFrameDurations.insert(availableMinFrameDurations.end(),
1434 availableMinFrameDurationsBasic.begin(),
1435 availableMinFrameDurationsBasic.end());
1436 if (width > 640) {
1437 availableMinFrameDurations.insert(
1438 availableMinFrameDurations.end(),
1439 availableMinFrameDurationsBasic640.begin(),
1440 availableMinFrameDurationsBasic640.end());
1441 }
1442 }
1443 if (hasCapability(RAW)) {
1444 availableMinFrameDurations.insert(availableMinFrameDurations.end(),
1445 availableMinFrameDurationsRaw.begin(),
1446 availableMinFrameDurationsRaw.end());
1447 }
1448 if (hasCapability(BURST_CAPTURE)) {
1449 availableMinFrameDurations.insert(availableMinFrameDurations.end(),
1450 availableMinFrameDurationsBurst.begin(),
1451 availableMinFrameDurationsBurst.end());
1452 }
1453
1454 if (availableMinFrameDurations.size() > 0) {
1455 ADD_STATIC_ENTRY(ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS,
1456 &availableMinFrameDurations[0],
1457 availableMinFrameDurations.size());
1458 }
1459
1460 const std::vector<int64_t> availableStallDurationsBasic = {
1461 HAL_PIXEL_FORMAT_BLOB,
1462 width,
1463 height,
1464 Sensor::kFrameDurationRange[0],
1465 HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED,
1466 320,
1467 240,
1468 0,
1469 HAL_PIXEL_FORMAT_YCbCr_420_888,
1470 320,
1471 240,
1472 0,
1473 HAL_PIXEL_FORMAT_RGBA_8888,
1474 320,
1475 240,
1476 0,
1477 };
1478
1479 // Always need to include 640x480 in basic formats
1480 const std::vector<int64_t> availableStallDurationsBasic640 = {
1481 HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED,
1482 640,
1483 480,
1484 0,
1485 HAL_PIXEL_FORMAT_YCbCr_420_888,
1486 640,
1487 480,
1488 0,
1489 HAL_PIXEL_FORMAT_BLOB,
1490 640,
1491 480,
1492 Sensor::kFrameDurationRange[0]};
1493
1494 const std::vector<int64_t> availableStallDurationsRaw = {
1495 HAL_PIXEL_FORMAT_RAW16, width, height, Sensor::kFrameDurationRange[0]};
1496 const std::vector<int64_t> availableStallDurationsBurst = {
1497 HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED,
1498 width,
1499 height,
1500 0,
1501 HAL_PIXEL_FORMAT_YCbCr_420_888,
1502 width,
1503 height,
1504 0,
1505 HAL_PIXEL_FORMAT_RGBA_8888,
1506 width,
1507 height,
1508 0};
1509
1510 std::vector<int64_t> availableStallDurations;
1511
1512 if (hasCapability(BACKWARD_COMPATIBLE)) {
1513 availableStallDurations.insert(availableStallDurations.end(),
1514 availableStallDurationsBasic.begin(),
1515 availableStallDurationsBasic.end());
1516 if (width > 640) {
1517 availableStallDurations.insert(availableStallDurations.end(),
1518 availableStallDurationsBasic640.begin(),
1519 availableStallDurationsBasic640.end());
1520 }
1521 }
1522 if (hasCapability(RAW)) {
1523 availableStallDurations.insert(availableStallDurations.end(),
1524 availableStallDurationsRaw.begin(),
1525 availableStallDurationsRaw.end());
1526 }
1527 if (hasCapability(BURST_CAPTURE)) {
1528 availableStallDurations.insert(availableStallDurations.end(),
1529 availableStallDurationsBurst.begin(),
1530 availableStallDurationsBurst.end());
1531 }
1532
1533 if (availableStallDurations.size() > 0) {
1534 ADD_STATIC_ENTRY(ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,
1535 &availableStallDurations[0],
1536 availableStallDurations.size());
1537 }
1538
1539 if (hasCapability(BACKWARD_COMPATIBLE)) {
1540 static const uint8_t croppingType = ANDROID_SCALER_CROPPING_TYPE_FREEFORM;
1541 ADD_STATIC_ENTRY(ANDROID_SCALER_CROPPING_TYPE, &croppingType, 1);
1542
1543 static const float maxZoom = 10;
1544 ADD_STATIC_ENTRY(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM, &maxZoom, 1);
1545 }
1546
1547 // android.jpeg
1548
1549 if (hasCapability(BACKWARD_COMPATIBLE)) {
1550 static const int32_t jpegThumbnailSizes[] = {0, 0, 160, 120, 320, 240};
1551 ADD_STATIC_ENTRY(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES, jpegThumbnailSizes,
1552 sizeof(jpegThumbnailSizes) / sizeof(int32_t));
1553
1554 static const int32_t jpegMaxSize = JpegCompressor::kMaxJpegSize;
1555 ADD_STATIC_ENTRY(ANDROID_JPEG_MAX_SIZE, &jpegMaxSize, 1);
1556 }
1557
1558 // android.stats
1559
1560 if (hasCapability(BACKWARD_COMPATIBLE)) {
1561 static const uint8_t availableFaceDetectModes[] = {
1562 ANDROID_STATISTICS_FACE_DETECT_MODE_OFF,
1563 ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE,
1564 ANDROID_STATISTICS_FACE_DETECT_MODE_FULL};
1565 ADD_STATIC_ENTRY(ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
1566 availableFaceDetectModes,
1567 sizeof(availableFaceDetectModes));
1568
1569 static const int32_t maxFaceCount = 8;
1570 ADD_STATIC_ENTRY(ANDROID_STATISTICS_INFO_MAX_FACE_COUNT, &maxFaceCount, 1);
1571
1572 static const uint8_t availableShadingMapModes[] = {
1573 ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF};
1574 ADD_STATIC_ENTRY(ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,
1575 availableShadingMapModes,
1576 sizeof(availableShadingMapModes));
1577 }
1578
1579 // android.sync
1580
1581 static const int32_t maxLatency =
1582 hasCapability(FULL_LEVEL) ? ANDROID_SYNC_MAX_LATENCY_PER_FRAME_CONTROL
1583 : 3;
1584 ADD_STATIC_ENTRY(ANDROID_SYNC_MAX_LATENCY, &maxLatency, 1);
1585
1586 // android.control
1587
1588 if (hasCapability(BACKWARD_COMPATIBLE)) {
1589 static const uint8_t availableControlModes[] = {
1590 ANDROID_CONTROL_MODE_OFF, ANDROID_CONTROL_MODE_AUTO,
1591 ANDROID_CONTROL_MODE_USE_SCENE_MODE};
1592 ADD_STATIC_ENTRY(ANDROID_CONTROL_AVAILABLE_MODES, availableControlModes,
1593 sizeof(availableControlModes));
1594 } else {
1595 static const uint8_t availableControlModes[] = {ANDROID_CONTROL_MODE_AUTO};
1596 ADD_STATIC_ENTRY(ANDROID_CONTROL_AVAILABLE_MODES, availableControlModes,
1597 sizeof(availableControlModes));
1598 }
1599
1600 static const uint8_t availableSceneModes[] = {
1601 static_cast<uint8_t>(hasCapability(BACKWARD_COMPATIBLE)
1602 ? ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY
1603 : ANDROID_CONTROL_SCENE_MODE_DISABLED)};
1604 ADD_STATIC_ENTRY(ANDROID_CONTROL_AVAILABLE_SCENE_MODES, availableSceneModes,
1605 sizeof(availableSceneModes));
1606
1607 if (hasCapability(BACKWARD_COMPATIBLE)) {
1608 static const uint8_t availableEffects[] = {ANDROID_CONTROL_EFFECT_MODE_OFF};
1609 ADD_STATIC_ENTRY(ANDROID_CONTROL_AVAILABLE_EFFECTS, availableEffects,
1610 sizeof(availableEffects));
1611 }
1612
1613 if (hasCapability(BACKWARD_COMPATIBLE)) {
1614 static const int32_t max3aRegions[] = {/*AE*/ 1, /*AWB*/ 0, /*AF*/ 1};
1615 ADD_STATIC_ENTRY(ANDROID_CONTROL_MAX_REGIONS, max3aRegions,
1616 sizeof(max3aRegions) / sizeof(max3aRegions[0]));
1617
1618 static const uint8_t availableAeModes[] = {ANDROID_CONTROL_AE_MODE_OFF,
1619 ANDROID_CONTROL_AE_MODE_ON};
1620 ADD_STATIC_ENTRY(ANDROID_CONTROL_AE_AVAILABLE_MODES, availableAeModes,
1621 sizeof(availableAeModes));
1622
1623 static const camera_metadata_rational exposureCompensationStep = {1, 3};
1624 ADD_STATIC_ENTRY(ANDROID_CONTROL_AE_COMPENSATION_STEP,
1625 &exposureCompensationStep, 1);
1626
1627 int32_t exposureCompensationRange[] = {-9, 9};
1628 ADD_STATIC_ENTRY(ANDROID_CONTROL_AE_COMPENSATION_RANGE,
1629 exposureCompensationRange,
1630 sizeof(exposureCompensationRange) / sizeof(int32_t));
1631 }
1632
1633 static const int32_t availableTargetFpsRanges[] = {5, 30, 15, 30,
1634 15, 15, 30, 30};
1635 ADD_STATIC_ENTRY(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
1636 availableTargetFpsRanges,
1637 sizeof(availableTargetFpsRanges) / sizeof(int32_t));
1638
1639 if (hasCapability(BACKWARD_COMPATIBLE)) {
1640 static const uint8_t availableAntibandingModes[] = {
1641 ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF,
1642 ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO};
1643 ADD_STATIC_ENTRY(ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
1644 availableAntibandingModes,
1645 sizeof(availableAntibandingModes));
1646 }
1647
1648 static const uint8_t aeLockAvailable =
1649 hasCapability(BACKWARD_COMPATIBLE)
1650 ? ANDROID_CONTROL_AE_LOCK_AVAILABLE_TRUE
1651 : ANDROID_CONTROL_AE_LOCK_AVAILABLE_FALSE;
1652
1653 ADD_STATIC_ENTRY(ANDROID_CONTROL_AE_LOCK_AVAILABLE, &aeLockAvailable, 1);
1654
1655 if (hasCapability(BACKWARD_COMPATIBLE)) {
1656 static const uint8_t availableAwbModes[] = {
1657 ANDROID_CONTROL_AWB_MODE_OFF,
1658 ANDROID_CONTROL_AWB_MODE_AUTO,
1659 ANDROID_CONTROL_AWB_MODE_INCANDESCENT,
1660 ANDROID_CONTROL_AWB_MODE_FLUORESCENT,
1661 ANDROID_CONTROL_AWB_MODE_DAYLIGHT,
1662 ANDROID_CONTROL_AWB_MODE_SHADE};
1663 ADD_STATIC_ENTRY(ANDROID_CONTROL_AWB_AVAILABLE_MODES, availableAwbModes,
1664 sizeof(availableAwbModes));
1665 }
1666
1667 static const uint8_t awbLockAvailable =
1668 hasCapability(BACKWARD_COMPATIBLE)
1669 ? ANDROID_CONTROL_AWB_LOCK_AVAILABLE_TRUE
1670 : ANDROID_CONTROL_AWB_LOCK_AVAILABLE_FALSE;
1671
1672 ADD_STATIC_ENTRY(ANDROID_CONTROL_AWB_LOCK_AVAILABLE, &awbLockAvailable, 1);
1673
1674 static const uint8_t availableAfModesBack[] = {
1675 ANDROID_CONTROL_AF_MODE_OFF, ANDROID_CONTROL_AF_MODE_AUTO,
1676 ANDROID_CONTROL_AF_MODE_MACRO, ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO,
1677 ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE};
1678
1679 static const uint8_t availableAfModesFront[] = {ANDROID_CONTROL_AF_MODE_OFF};
1680
1681 if (mFacingBack && hasCapability(BACKWARD_COMPATIBLE)) {
1682 ADD_STATIC_ENTRY(ANDROID_CONTROL_AF_AVAILABLE_MODES, availableAfModesBack,
1683 sizeof(availableAfModesBack));
1684 } else {
1685 ADD_STATIC_ENTRY(ANDROID_CONTROL_AF_AVAILABLE_MODES, availableAfModesFront,
1686 sizeof(availableAfModesFront));
1687 }
1688
1689 static const uint8_t availableVstabModes[] = {
1690 ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF};
1691 ADD_STATIC_ENTRY(ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
1692 availableVstabModes, sizeof(availableVstabModes));
1693
1694 // android.colorCorrection
1695
1696 if (hasCapability(BACKWARD_COMPATIBLE)) {
1697 static const uint8_t availableAberrationModes[] = {
1698 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF,
1699 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST,
1700 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY};
1701 ADD_STATIC_ENTRY(ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
1702 availableAberrationModes,
1703 sizeof(availableAberrationModes));
1704 } else {
1705 static const uint8_t availableAberrationModes[] = {
1706 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF,
1707 };
1708 ADD_STATIC_ENTRY(ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
1709 availableAberrationModes,
1710 sizeof(availableAberrationModes));
1711 }
1712 // android.edge
1713
1714 if (hasCapability(BACKWARD_COMPATIBLE)) {
1715 static const uint8_t availableEdgeModes[] = {
1716 ANDROID_EDGE_MODE_OFF, ANDROID_EDGE_MODE_FAST,
1717 ANDROID_EDGE_MODE_HIGH_QUALITY};
1718 ADD_STATIC_ENTRY(ANDROID_EDGE_AVAILABLE_EDGE_MODES, availableEdgeModes,
1719 sizeof(availableEdgeModes));
1720 } else {
1721 static const uint8_t availableEdgeModes[] = {ANDROID_EDGE_MODE_OFF};
1722 ADD_STATIC_ENTRY(ANDROID_EDGE_AVAILABLE_EDGE_MODES, availableEdgeModes,
1723 sizeof(availableEdgeModes));
1724 }
1725
1726 // android.info
1727
1728 static const uint8_t supportedHardwareLevel =
1729 hasCapability(FULL_LEVEL) ? ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_FULL
1730 : ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED;
1731 ADD_STATIC_ENTRY(ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
1732 &supportedHardwareLevel,
1733 /*count*/ 1);
1734
1735 // android.noiseReduction
1736
1737 if (hasCapability(BACKWARD_COMPATIBLE)) {
1738 static const uint8_t availableNoiseReductionModes[] = {
1739 ANDROID_NOISE_REDUCTION_MODE_OFF, ANDROID_NOISE_REDUCTION_MODE_FAST,
1740 ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY};
1741 ADD_STATIC_ENTRY(ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
1742 availableNoiseReductionModes,
1743 sizeof(availableNoiseReductionModes));
1744 } else {
1745 static const uint8_t availableNoiseReductionModes[] = {
1746 ANDROID_NOISE_REDUCTION_MODE_OFF,
1747 };
1748 ADD_STATIC_ENTRY(ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
1749 availableNoiseReductionModes,
1750 sizeof(availableNoiseReductionModes));
1751 }
1752
1753 // android.depth
1754
1755 if (hasCapability(DEPTH_OUTPUT)) {
1756 static const int32_t maxDepthSamples = 100;
1757 ADD_STATIC_ENTRY(ANDROID_DEPTH_MAX_DEPTH_SAMPLES, &maxDepthSamples, 1);
1758
1759 static const int32_t availableDepthStreamConfigurations[] = {
1760 HAL_PIXEL_FORMAT_Y16,
1761 160,
1762 120,
1763 ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS_OUTPUT,
1764 HAL_PIXEL_FORMAT_BLOB,
1765 maxDepthSamples,
1766 1,
1767 ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS_OUTPUT};
1768 ADD_STATIC_ENTRY(
1769 ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS,
1770 availableDepthStreamConfigurations,
1771 sizeof(availableDepthStreamConfigurations) / sizeof(int32_t));
1772
1773 static const int64_t availableDepthMinFrameDurations[] = {
1774 HAL_PIXEL_FORMAT_Y16,
1775 160,
1776 120,
1777 Sensor::kFrameDurationRange[0],
1778 HAL_PIXEL_FORMAT_BLOB,
1779 maxDepthSamples,
1780 1,
1781 Sensor::kFrameDurationRange[0]};
1782 ADD_STATIC_ENTRY(ANDROID_DEPTH_AVAILABLE_DEPTH_MIN_FRAME_DURATIONS,
1783 availableDepthMinFrameDurations,
1784 sizeof(availableDepthMinFrameDurations) / sizeof(int64_t));
1785
1786 static const int64_t availableDepthStallDurations[] = {
1787 HAL_PIXEL_FORMAT_Y16,
1788 160,
1789 120,
1790 Sensor::kFrameDurationRange[0],
1791 HAL_PIXEL_FORMAT_BLOB,
1792 maxDepthSamples,
1793 1,
1794 Sensor::kFrameDurationRange[0]};
1795 ADD_STATIC_ENTRY(ANDROID_DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS,
1796 availableDepthStallDurations,
1797 sizeof(availableDepthStallDurations) / sizeof(int64_t));
1798
1799 uint8_t depthIsExclusive = ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE_FALSE;
1800 ADD_STATIC_ENTRY(ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE, &depthIsExclusive, 1);
1801 }
1802
1803 // android.shading
1804
1805 if (hasCapability(BACKWARD_COMPATIBLE)) {
1806 static const uint8_t availableShadingModes[] = {
1807 ANDROID_SHADING_MODE_OFF, ANDROID_SHADING_MODE_FAST,
1808 ANDROID_SHADING_MODE_HIGH_QUALITY};
1809 ADD_STATIC_ENTRY(ANDROID_SHADING_AVAILABLE_MODES, availableShadingModes,
1810 sizeof(availableShadingModes));
1811 } else {
1812 static const uint8_t availableShadingModes[] = {ANDROID_SHADING_MODE_OFF};
1813 ADD_STATIC_ENTRY(ANDROID_SHADING_AVAILABLE_MODES, availableShadingModes,
1814 sizeof(availableShadingModes));
1815 }
1816
1817 // android.request
1818
1819 static const int32_t maxNumOutputStreams[] = {
1820 kMaxRawStreamCount, kMaxProcessedStreamCount, kMaxJpegStreamCount};
1821 ADD_STATIC_ENTRY(ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS, maxNumOutputStreams,
1822 3);
1823
1824 static const uint8_t maxPipelineDepth = kMaxBufferCount;
1825 ADD_STATIC_ENTRY(ANDROID_REQUEST_PIPELINE_MAX_DEPTH, &maxPipelineDepth, 1);
1826
1827 static const int32_t partialResultCount = 1;
1828 ADD_STATIC_ENTRY(ANDROID_REQUEST_PARTIAL_RESULT_COUNT, &partialResultCount,
1829 /*count*/ 1);
1830
1831 SortedVector<uint8_t> caps;
1832 for (size_t i = 0; i < mCapabilities.size(); i++) {
1833 switch (mCapabilities[i]) {
1834 case BACKWARD_COMPATIBLE:
1835 caps.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE);
1836 break;
1837 case MANUAL_SENSOR:
1838 caps.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR);
1839 break;
1840 case MANUAL_POST_PROCESSING:
1841 caps.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_POST_PROCESSING);
1842 break;
1843 case RAW:
1844 caps.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_RAW);
1845 break;
1846 case PRIVATE_REPROCESSING:
1847 caps.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_PRIVATE_REPROCESSING);
1848 break;
1849 case READ_SENSOR_SETTINGS:
1850 caps.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_READ_SENSOR_SETTINGS);
1851 break;
1852 case BURST_CAPTURE:
1853 caps.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BURST_CAPTURE);
1854 break;
1855 case YUV_REPROCESSING:
1856 caps.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_YUV_REPROCESSING);
1857 break;
1858 case DEPTH_OUTPUT:
1859 caps.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_DEPTH_OUTPUT);
1860 break;
1861 case CONSTRAINED_HIGH_SPEED_VIDEO:
1862 caps.add(
1863 ANDROID_REQUEST_AVAILABLE_CAPABILITIES_CONSTRAINED_HIGH_SPEED_VIDEO);
1864 break;
1865 default:
1866 // Ignore LEVELs
1867 break;
1868 }
1869 }
1870 ADD_STATIC_ENTRY(ANDROID_REQUEST_AVAILABLE_CAPABILITIES, caps.array(),
1871 caps.size());
1872
1873 // Scan a default request template for included request keys
1874 Vector<int32_t> availableRequestKeys;
1875 const camera_metadata_t *previewRequest =
1876 constructDefaultRequestSettings(CAMERA3_TEMPLATE_PREVIEW);
1877 for (size_t i = 0; i < get_camera_metadata_entry_count(previewRequest); i++) {
1878 camera_metadata_ro_entry_t entry;
1879 get_camera_metadata_ro_entry(previewRequest, i, &entry);
1880 availableRequestKeys.add(entry.tag);
1881 }
1882 ADD_STATIC_ENTRY(ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS,
1883 availableRequestKeys.array(), availableRequestKeys.size());
1884
1885 // Add a few more result keys. Must be kept up to date with the various places
1886 // that add these
1887
1888 Vector<int32_t> availableResultKeys(availableRequestKeys);
1889 if (hasCapability(BACKWARD_COMPATIBLE)) {
1890 availableResultKeys.add(ANDROID_CONTROL_AE_STATE);
1891 availableResultKeys.add(ANDROID_CONTROL_AF_STATE);
1892 availableResultKeys.add(ANDROID_CONTROL_AWB_STATE);
1893 availableResultKeys.add(ANDROID_FLASH_STATE);
1894 availableResultKeys.add(ANDROID_LENS_STATE);
1895 availableResultKeys.add(ANDROID_LENS_FOCUS_RANGE);
1896 availableResultKeys.add(ANDROID_SENSOR_ROLLING_SHUTTER_SKEW);
1897 availableResultKeys.add(ANDROID_STATISTICS_SCENE_FLICKER);
1898 }
1899
1900 if (hasCapability(DEPTH_OUTPUT)) {
1901 availableResultKeys.add(ANDROID_LENS_POSE_ROTATION);
1902 availableResultKeys.add(ANDROID_LENS_POSE_TRANSLATION);
1903 availableResultKeys.add(ANDROID_LENS_INTRINSIC_CALIBRATION);
1904 availableResultKeys.add(ANDROID_LENS_RADIAL_DISTORTION);
1905 }
1906
1907 availableResultKeys.add(ANDROID_REQUEST_PIPELINE_DEPTH);
1908 availableResultKeys.add(ANDROID_SENSOR_TIMESTAMP);
1909
1910 ADD_STATIC_ENTRY(ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
1911 availableResultKeys.array(), availableResultKeys.size());
1912
1913 // Needs to be last, to collect all the keys set
1914
1915 availableCharacteristicsKeys.add(
1916 ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS);
1917 info.update(ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS,
1918 availableCharacteristicsKeys);
1919
1920 mCameraInfo = info.release();
1921
1922 #undef ADD_STATIC_ENTRY
1923 return OK;
1924 }
1925
process3A(CameraMetadata & settings)1926 status_t EmulatedFakeCamera3::process3A(CameraMetadata &settings) {
1927 /**
1928 * Extract top-level 3A controls
1929 */
1930 status_t res;
1931
1932 camera_metadata_entry e;
1933
1934 e = settings.find(ANDROID_CONTROL_MODE);
1935 if (e.count == 0) {
1936 ALOGE("%s: No control mode entry!", __FUNCTION__);
1937 return BAD_VALUE;
1938 }
1939 uint8_t controlMode = e.data.u8[0];
1940
1941 if (controlMode == ANDROID_CONTROL_MODE_OFF) {
1942 mAeMode = ANDROID_CONTROL_AE_MODE_OFF;
1943 mAfMode = ANDROID_CONTROL_AF_MODE_OFF;
1944 mAwbMode = ANDROID_CONTROL_AWB_MODE_OFF;
1945 mAeState = ANDROID_CONTROL_AE_STATE_INACTIVE;
1946 mAfState = ANDROID_CONTROL_AF_STATE_INACTIVE;
1947 mAwbState = ANDROID_CONTROL_AWB_STATE_INACTIVE;
1948 update3A(settings);
1949 return OK;
1950 } else if (controlMode == ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
1951 if (!hasCapability(BACKWARD_COMPATIBLE)) {
1952 ALOGE("%s: Can't use scene mode when BACKWARD_COMPATIBLE not supported!",
1953 __FUNCTION__);
1954 return BAD_VALUE;
1955 }
1956
1957 e = settings.find(ANDROID_CONTROL_SCENE_MODE);
1958 if (e.count == 0) {
1959 ALOGE("%s: No scene mode entry!", __FUNCTION__);
1960 return BAD_VALUE;
1961 }
1962 uint8_t sceneMode = e.data.u8[0];
1963
1964 switch (sceneMode) {
1965 case ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY:
1966 mFacePriority = true;
1967 break;
1968 default:
1969 ALOGE("%s: Emulator doesn't support scene mode %d", __FUNCTION__,
1970 sceneMode);
1971 return BAD_VALUE;
1972 }
1973 } else {
1974 mFacePriority = false;
1975 }
1976
1977 // controlMode == AUTO or sceneMode = FACE_PRIORITY
1978 // Process individual 3A controls
1979
1980 res = doFakeAE(settings);
1981 if (res != OK) return res;
1982
1983 res = doFakeAF(settings);
1984 if (res != OK) return res;
1985
1986 res = doFakeAWB(settings);
1987 if (res != OK) return res;
1988
1989 update3A(settings);
1990 return OK;
1991 }
1992
doFakeAE(CameraMetadata & settings)1993 status_t EmulatedFakeCamera3::doFakeAE(CameraMetadata &settings) {
1994 camera_metadata_entry e;
1995
1996 e = settings.find(ANDROID_CONTROL_AE_MODE);
1997 if (e.count == 0 && hasCapability(BACKWARD_COMPATIBLE)) {
1998 ALOGE("%s: No AE mode entry!", __FUNCTION__);
1999 return BAD_VALUE;
2000 }
2001 uint8_t aeMode =
2002 (e.count > 0) ? e.data.u8[0] : (uint8_t)ANDROID_CONTROL_AE_MODE_ON;
2003 mAeMode = aeMode;
2004
2005 switch (aeMode) {
2006 case ANDROID_CONTROL_AE_MODE_OFF:
2007 // AE is OFF
2008 mAeState = ANDROID_CONTROL_AE_STATE_INACTIVE;
2009 return OK;
2010 case ANDROID_CONTROL_AE_MODE_ON:
2011 // OK for AUTO modes
2012 break;
2013 default:
2014 // Mostly silently ignore unsupported modes
2015 ALOGV("%s: Emulator doesn't support AE mode %d, assuming ON",
2016 __FUNCTION__, aeMode);
2017 break;
2018 }
2019
2020 e = settings.find(ANDROID_CONTROL_AE_LOCK);
2021 bool aeLocked =
2022 (e.count > 0) ? (e.data.u8[0] == ANDROID_CONTROL_AE_LOCK_ON) : false;
2023
2024 e = settings.find(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER);
2025 bool precaptureTrigger = false;
2026 if (e.count != 0) {
2027 precaptureTrigger =
2028 (e.data.u8[0] == ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_START);
2029 }
2030
2031 if (precaptureTrigger) {
2032 ALOGV("%s: Pre capture trigger = %d", __FUNCTION__, precaptureTrigger);
2033 } else if (e.count > 0) {
2034 ALOGV("%s: Pre capture trigger was present? %zu", __FUNCTION__, e.count);
2035 }
2036
2037 if (precaptureTrigger || mAeState == ANDROID_CONTROL_AE_STATE_PRECAPTURE) {
2038 // Run precapture sequence
2039 if (mAeState != ANDROID_CONTROL_AE_STATE_PRECAPTURE) {
2040 mAeCounter = 0;
2041 }
2042
2043 if (mFacePriority) {
2044 mAeTargetExposureTime = kFacePriorityExposureTime;
2045 } else {
2046 mAeTargetExposureTime = kNormalExposureTime;
2047 }
2048
2049 if (mAeCounter > kPrecaptureMinFrames &&
2050 (mAeTargetExposureTime - mAeCurrentExposureTime) <
2051 mAeTargetExposureTime / 10) {
2052 // Done with precapture
2053 mAeCounter = 0;
2054 mAeState = aeLocked ? ANDROID_CONTROL_AE_STATE_LOCKED
2055 : ANDROID_CONTROL_AE_STATE_CONVERGED;
2056 } else {
2057 // Converge some more
2058 mAeCurrentExposureTime +=
2059 (mAeTargetExposureTime - mAeCurrentExposureTime) * kExposureTrackRate;
2060 mAeCounter++;
2061 mAeState = ANDROID_CONTROL_AE_STATE_PRECAPTURE;
2062 }
2063
2064 } else if (!aeLocked) {
2065 // Run standard occasional AE scan
2066 switch (mAeState) {
2067 case ANDROID_CONTROL_AE_STATE_CONVERGED:
2068 case ANDROID_CONTROL_AE_STATE_INACTIVE:
2069 mAeCounter++;
2070 if (mAeCounter > kStableAeMaxFrames) {
2071 mAeTargetExposureTime =
2072 mFacePriority ? kFacePriorityExposureTime : kNormalExposureTime;
2073 float exposureStep = ((double)rand() / RAND_MAX) *
2074 (kExposureWanderMax - kExposureWanderMin) +
2075 kExposureWanderMin;
2076 mAeTargetExposureTime *= std::pow(2, exposureStep);
2077 mAeState = ANDROID_CONTROL_AE_STATE_SEARCHING;
2078 }
2079 break;
2080 case ANDROID_CONTROL_AE_STATE_SEARCHING:
2081 mAeCurrentExposureTime +=
2082 (mAeTargetExposureTime - mAeCurrentExposureTime) *
2083 kExposureTrackRate;
2084 if (llabs(mAeTargetExposureTime - mAeCurrentExposureTime) <
2085 mAeTargetExposureTime / 10) {
2086 // Close enough
2087 mAeState = ANDROID_CONTROL_AE_STATE_CONVERGED;
2088 mAeCounter = 0;
2089 }
2090 break;
2091 case ANDROID_CONTROL_AE_STATE_LOCKED:
2092 mAeState = ANDROID_CONTROL_AE_STATE_CONVERGED;
2093 mAeCounter = 0;
2094 break;
2095 default:
2096 ALOGE("%s: Emulator in unexpected AE state %d", __FUNCTION__, mAeState);
2097 return INVALID_OPERATION;
2098 }
2099 } else {
2100 // AE is locked
2101 mAeState = ANDROID_CONTROL_AE_STATE_LOCKED;
2102 }
2103
2104 return OK;
2105 }
2106
doFakeAF(CameraMetadata & settings)2107 status_t EmulatedFakeCamera3::doFakeAF(CameraMetadata &settings) {
2108 camera_metadata_entry e;
2109
2110 e = settings.find(ANDROID_CONTROL_AF_MODE);
2111 if (e.count == 0 && hasCapability(BACKWARD_COMPATIBLE)) {
2112 ALOGE("%s: No AF mode entry!", __FUNCTION__);
2113 return BAD_VALUE;
2114 }
2115 uint8_t afMode =
2116 (e.count > 0) ? e.data.u8[0] : (uint8_t)ANDROID_CONTROL_AF_MODE_OFF;
2117
2118 e = settings.find(ANDROID_CONTROL_AF_TRIGGER);
2119 typedef camera_metadata_enum_android_control_af_trigger af_trigger_t;
2120 af_trigger_t afTrigger;
2121 if (e.count != 0) {
2122 afTrigger = static_cast<af_trigger_t>(e.data.u8[0]);
2123
2124 ALOGV("%s: AF trigger set to 0x%x", __FUNCTION__, afTrigger);
2125 ALOGV("%s: AF mode is 0x%x", __FUNCTION__, afMode);
2126 } else {
2127 afTrigger = ANDROID_CONTROL_AF_TRIGGER_IDLE;
2128 }
2129
2130 switch (afMode) {
2131 case ANDROID_CONTROL_AF_MODE_OFF:
2132 mAfState = ANDROID_CONTROL_AF_STATE_INACTIVE;
2133 return OK;
2134 case ANDROID_CONTROL_AF_MODE_AUTO:
2135 case ANDROID_CONTROL_AF_MODE_MACRO:
2136 case ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO:
2137 case ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE:
2138 if (!mFacingBack) {
2139 ALOGE("%s: Front camera doesn't support AF mode %d", __FUNCTION__,
2140 afMode);
2141 return BAD_VALUE;
2142 }
2143 // OK, handle transitions lower on
2144 break;
2145 default:
2146 ALOGE("%s: Emulator doesn't support AF mode %d", __FUNCTION__, afMode);
2147 return BAD_VALUE;
2148 }
2149
2150 bool afModeChanged = mAfMode != afMode;
2151 mAfMode = afMode;
2152
2153 /**
2154 * Simulate AF triggers. Transition at most 1 state per frame.
2155 * - Focusing always succeeds (goes into locked, or PASSIVE_SCAN).
2156 */
2157
2158 bool afTriggerStart = false;
2159 bool afTriggerCancel = false;
2160 switch (afTrigger) {
2161 case ANDROID_CONTROL_AF_TRIGGER_IDLE:
2162 break;
2163 case ANDROID_CONTROL_AF_TRIGGER_START:
2164 afTriggerStart = true;
2165 break;
2166 case ANDROID_CONTROL_AF_TRIGGER_CANCEL:
2167 afTriggerCancel = true;
2168 // Cancel trigger always transitions into INACTIVE
2169 mAfState = ANDROID_CONTROL_AF_STATE_INACTIVE;
2170
2171 ALOGV("%s: AF State transition to STATE_INACTIVE", __FUNCTION__);
2172
2173 // Stay in 'inactive' until at least next frame
2174 return OK;
2175 default:
2176 ALOGE("%s: Unknown af trigger value %d", __FUNCTION__, afTrigger);
2177 return BAD_VALUE;
2178 }
2179
2180 // If we get down here, we're either in an autofocus mode
2181 // or in a continuous focus mode (and no other modes)
2182
2183 int oldAfState = mAfState;
2184 switch (mAfState) {
2185 case ANDROID_CONTROL_AF_STATE_INACTIVE:
2186 if (afTriggerStart) {
2187 switch (afMode) {
2188 case ANDROID_CONTROL_AF_MODE_AUTO:
2189 // fall-through
2190 case ANDROID_CONTROL_AF_MODE_MACRO:
2191 mAfState = ANDROID_CONTROL_AF_STATE_ACTIVE_SCAN;
2192 break;
2193 case ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO:
2194 // fall-through
2195 case ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE:
2196 mAfState = ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED;
2197 break;
2198 }
2199 } else {
2200 // At least one frame stays in INACTIVE
2201 if (!afModeChanged) {
2202 switch (afMode) {
2203 case ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO:
2204 // fall-through
2205 case ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE:
2206 mAfState = ANDROID_CONTROL_AF_STATE_PASSIVE_SCAN;
2207 break;
2208 }
2209 }
2210 }
2211 break;
2212 case ANDROID_CONTROL_AF_STATE_PASSIVE_SCAN:
2213 /**
2214 * When the AF trigger is activated, the algorithm should finish
2215 * its PASSIVE_SCAN if active, and then transition into AF_FOCUSED
2216 * or AF_NOT_FOCUSED as appropriate
2217 */
2218 if (afTriggerStart) {
2219 // Randomly transition to focused or not focused
2220 if (rand() % 3) {
2221 mAfState = ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED;
2222 } else {
2223 mAfState = ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED;
2224 }
2225 }
2226 /**
2227 * When the AF trigger is not involved, the AF algorithm should
2228 * start in INACTIVE state, and then transition into PASSIVE_SCAN
2229 * and PASSIVE_FOCUSED states
2230 */
2231 else if (!afTriggerCancel) {
2232 // Randomly transition to passive focus
2233 if (rand() % 3 == 0) {
2234 mAfState = ANDROID_CONTROL_AF_STATE_PASSIVE_FOCUSED;
2235 }
2236 }
2237
2238 break;
2239 case ANDROID_CONTROL_AF_STATE_PASSIVE_FOCUSED:
2240 if (afTriggerStart) {
2241 // Randomly transition to focused or not focused
2242 if (rand() % 3) {
2243 mAfState = ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED;
2244 } else {
2245 mAfState = ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED;
2246 }
2247 }
2248 // TODO: initiate passive scan (PASSIVE_SCAN)
2249 break;
2250 case ANDROID_CONTROL_AF_STATE_ACTIVE_SCAN:
2251 // Simulate AF sweep completing instantaneously
2252
2253 // Randomly transition to focused or not focused
2254 if (rand() % 3) {
2255 mAfState = ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED;
2256 } else {
2257 mAfState = ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED;
2258 }
2259 break;
2260 case ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED:
2261 if (afTriggerStart) {
2262 switch (afMode) {
2263 case ANDROID_CONTROL_AF_MODE_AUTO:
2264 // fall-through
2265 case ANDROID_CONTROL_AF_MODE_MACRO:
2266 mAfState = ANDROID_CONTROL_AF_STATE_ACTIVE_SCAN;
2267 break;
2268 case ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO:
2269 // fall-through
2270 case ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE:
2271 // continuous autofocus => trigger start has no effect
2272 break;
2273 }
2274 }
2275 break;
2276 case ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED:
2277 if (afTriggerStart) {
2278 switch (afMode) {
2279 case ANDROID_CONTROL_AF_MODE_AUTO:
2280 // fall-through
2281 case ANDROID_CONTROL_AF_MODE_MACRO:
2282 mAfState = ANDROID_CONTROL_AF_STATE_ACTIVE_SCAN;
2283 break;
2284 case ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO:
2285 // fall-through
2286 case ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE:
2287 // continuous autofocus => trigger start has no effect
2288 break;
2289 }
2290 }
2291 break;
2292 default:
2293 ALOGE("%s: Bad af state %d", __FUNCTION__, mAfState);
2294 }
2295
2296 {
2297 char afStateString[100] = {
2298 0,
2299 };
2300 camera_metadata_enum_snprint(ANDROID_CONTROL_AF_STATE, oldAfState,
2301 afStateString, sizeof(afStateString));
2302
2303 char afNewStateString[100] = {
2304 0,
2305 };
2306 camera_metadata_enum_snprint(ANDROID_CONTROL_AF_STATE, mAfState,
2307 afNewStateString, sizeof(afNewStateString));
2308 ALOGVV("%s: AF state transitioned from %s to %s", __FUNCTION__,
2309 afStateString, afNewStateString);
2310 }
2311
2312 return OK;
2313 }
2314
doFakeAWB(CameraMetadata & settings)2315 status_t EmulatedFakeCamera3::doFakeAWB(CameraMetadata &settings) {
2316 camera_metadata_entry e;
2317
2318 e = settings.find(ANDROID_CONTROL_AWB_MODE);
2319 if (e.count == 0 && hasCapability(BACKWARD_COMPATIBLE)) {
2320 ALOGE("%s: No AWB mode entry!", __FUNCTION__);
2321 return BAD_VALUE;
2322 }
2323 uint8_t awbMode =
2324 (e.count > 0) ? e.data.u8[0] : (uint8_t)ANDROID_CONTROL_AWB_MODE_AUTO;
2325
2326 // TODO: Add white balance simulation
2327
2328 e = settings.find(ANDROID_CONTROL_AWB_LOCK);
2329 bool awbLocked =
2330 (e.count > 0) ? (e.data.u8[0] == ANDROID_CONTROL_AWB_LOCK_ON) : false;
2331
2332 switch (awbMode) {
2333 case ANDROID_CONTROL_AWB_MODE_OFF:
2334 mAwbState = ANDROID_CONTROL_AWB_STATE_INACTIVE;
2335 break;
2336 case ANDROID_CONTROL_AWB_MODE_AUTO:
2337 case ANDROID_CONTROL_AWB_MODE_INCANDESCENT:
2338 case ANDROID_CONTROL_AWB_MODE_FLUORESCENT:
2339 case ANDROID_CONTROL_AWB_MODE_DAYLIGHT:
2340 case ANDROID_CONTROL_AWB_MODE_SHADE:
2341 // Always magically right, or locked
2342 mAwbState = awbLocked ? ANDROID_CONTROL_AWB_STATE_LOCKED
2343 : ANDROID_CONTROL_AWB_STATE_CONVERGED;
2344 break;
2345 default:
2346 ALOGE("%s: Emulator doesn't support AWB mode %d", __FUNCTION__, awbMode);
2347 return BAD_VALUE;
2348 }
2349
2350 return OK;
2351 }
2352
update3A(CameraMetadata & settings)2353 void EmulatedFakeCamera3::update3A(CameraMetadata &settings) {
2354 if (mAeMode != ANDROID_CONTROL_AE_MODE_OFF) {
2355 settings.update(ANDROID_SENSOR_EXPOSURE_TIME, &mAeCurrentExposureTime, 1);
2356 settings.update(ANDROID_SENSOR_SENSITIVITY, &mAeCurrentSensitivity, 1);
2357 }
2358
2359 settings.update(ANDROID_CONTROL_AE_STATE, &mAeState, 1);
2360 settings.update(ANDROID_CONTROL_AF_STATE, &mAfState, 1);
2361 settings.update(ANDROID_CONTROL_AWB_STATE, &mAwbState, 1);
2362
2363 uint8_t lensState;
2364 switch (mAfState) {
2365 case ANDROID_CONTROL_AF_STATE_PASSIVE_SCAN:
2366 case ANDROID_CONTROL_AF_STATE_ACTIVE_SCAN:
2367 lensState = ANDROID_LENS_STATE_MOVING;
2368 break;
2369 case ANDROID_CONTROL_AF_STATE_INACTIVE:
2370 case ANDROID_CONTROL_AF_STATE_PASSIVE_FOCUSED:
2371 case ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED:
2372 case ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED:
2373 case ANDROID_CONTROL_AF_STATE_PASSIVE_UNFOCUSED:
2374 default:
2375 lensState = ANDROID_LENS_STATE_STATIONARY;
2376 break;
2377 }
2378 settings.update(ANDROID_LENS_STATE, &lensState, 1);
2379 }
2380
signalReadoutIdle()2381 void EmulatedFakeCamera3::signalReadoutIdle() {
2382 Mutex::Autolock l(mLock);
2383 // Need to chek isIdle again because waiting on mLock may have allowed
2384 // something to be placed in the in-flight queue.
2385 if (mStatus == STATUS_ACTIVE && mReadoutThread->isIdle()) {
2386 ALOGV("Now idle");
2387 mStatus = STATUS_READY;
2388 }
2389 }
2390
onSensorEvent(uint32_t frameNumber,Event e,nsecs_t timestamp)2391 void EmulatedFakeCamera3::onSensorEvent(uint32_t frameNumber, Event e,
2392 nsecs_t timestamp) {
2393 switch (e) {
2394 case Sensor::SensorListener::EXPOSURE_START: {
2395 ALOGVV("%s: Frame %d: Sensor started exposure at %lld", __FUNCTION__,
2396 frameNumber, timestamp);
2397 // Trigger shutter notify to framework
2398 camera3_notify_msg_t msg;
2399 msg.type = CAMERA3_MSG_SHUTTER;
2400 msg.message.shutter.frame_number = frameNumber;
2401 msg.message.shutter.timestamp = timestamp;
2402 sendNotify(&msg);
2403 break;
2404 }
2405 default:
2406 ALOGW("%s: Unexpected sensor event %d at %" PRId64, __FUNCTION__, e,
2407 timestamp);
2408 break;
2409 }
2410 }
2411
ReadoutThread(EmulatedFakeCamera3 * parent)2412 EmulatedFakeCamera3::ReadoutThread::ReadoutThread(EmulatedFakeCamera3 *parent)
2413 : mParent(parent), mJpegWaiting(false) {}
2414
~ReadoutThread()2415 EmulatedFakeCamera3::ReadoutThread::~ReadoutThread() {
2416 for (List<Request>::iterator i = mInFlightQueue.begin();
2417 i != mInFlightQueue.end(); i++) {
2418 delete i->buffers;
2419 delete i->sensorBuffers;
2420 }
2421 }
2422
queueCaptureRequest(const Request & r)2423 void EmulatedFakeCamera3::ReadoutThread::queueCaptureRequest(const Request &r) {
2424 Mutex::Autolock l(mLock);
2425
2426 mInFlightQueue.push_back(r);
2427 mInFlightSignal.signal();
2428 }
2429
isIdle()2430 bool EmulatedFakeCamera3::ReadoutThread::isIdle() {
2431 Mutex::Autolock l(mLock);
2432 return mInFlightQueue.empty() && !mThreadActive;
2433 }
2434
waitForReadout()2435 status_t EmulatedFakeCamera3::ReadoutThread::waitForReadout() {
2436 status_t res;
2437 Mutex::Autolock l(mLock);
2438 int loopCount = 0;
2439 while (mInFlightQueue.size() >= kMaxQueueSize) {
2440 res = mInFlightSignal.waitRelative(mLock, kWaitPerLoop);
2441 if (res != OK && res != TIMED_OUT) {
2442 ALOGE("%s: Error waiting for in-flight queue to shrink", __FUNCTION__);
2443 return INVALID_OPERATION;
2444 }
2445 if (loopCount == kMaxWaitLoops) {
2446 ALOGE("%s: Timed out waiting for in-flight queue to shrink",
2447 __FUNCTION__);
2448 return TIMED_OUT;
2449 }
2450 loopCount++;
2451 }
2452 return OK;
2453 }
2454
threadLoop()2455 bool EmulatedFakeCamera3::ReadoutThread::threadLoop() {
2456 status_t res;
2457
2458 ALOGVV("%s: ReadoutThread waiting for request", __FUNCTION__);
2459
2460 // First wait for a request from the in-flight queue
2461
2462 if (mCurrentRequest.settings.isEmpty()) {
2463 Mutex::Autolock l(mLock);
2464 if (mInFlightQueue.empty()) {
2465 res = mInFlightSignal.waitRelative(mLock, kWaitPerLoop);
2466 if (res == TIMED_OUT) {
2467 ALOGVV("%s: ReadoutThread: Timed out waiting for request",
2468 __FUNCTION__);
2469 return true;
2470 } else if (res != NO_ERROR) {
2471 ALOGE("%s: Error waiting for capture requests: %d", __FUNCTION__, res);
2472 return false;
2473 }
2474 }
2475 mCurrentRequest.frameNumber = mInFlightQueue.begin()->frameNumber;
2476 mCurrentRequest.settings.acquire(mInFlightQueue.begin()->settings);
2477 mCurrentRequest.buffers = mInFlightQueue.begin()->buffers;
2478 mCurrentRequest.sensorBuffers = mInFlightQueue.begin()->sensorBuffers;
2479 mInFlightQueue.erase(mInFlightQueue.begin());
2480 mInFlightSignal.signal();
2481 mThreadActive = true;
2482 ALOGVV("%s: Beginning readout of frame %d", __FUNCTION__,
2483 mCurrentRequest.frameNumber);
2484 }
2485
2486 // Then wait for it to be delivered from the sensor
2487 ALOGVV("%s: ReadoutThread: Wait for frame to be delivered from sensor",
2488 __FUNCTION__);
2489
2490 nsecs_t captureTime;
2491 bool gotFrame = mParent->mSensor->waitForNewFrame(kWaitPerLoop, &captureTime);
2492 if (!gotFrame) {
2493 ALOGVV("%s: ReadoutThread: Timed out waiting for sensor frame",
2494 __FUNCTION__);
2495 return true;
2496 }
2497
2498 ALOGVV("Sensor done with readout for frame %d, captured at %lld ",
2499 mCurrentRequest.frameNumber, captureTime);
2500
2501 // Check if we need to JPEG encode a buffer, and send it for async
2502 // compression if so. Otherwise prepare the buffer for return.
2503 bool needJpeg = false;
2504 HalBufferVector::iterator buf = mCurrentRequest.buffers->begin();
2505 while (buf != mCurrentRequest.buffers->end()) {
2506 bool goodBuffer = true;
2507 if (buf->stream->format == HAL_PIXEL_FORMAT_BLOB &&
2508 buf->stream->data_space != HAL_DATASPACE_DEPTH) {
2509 Mutex::Autolock jl(mJpegLock);
2510 if (mJpegWaiting) {
2511 // This shouldn't happen, because processCaptureRequest should
2512 // be stalling until JPEG compressor is free.
2513 ALOGE("%s: Already processing a JPEG!", __FUNCTION__);
2514 goodBuffer = false;
2515 }
2516 if (goodBuffer) {
2517 // Compressor takes ownership of sensorBuffers here
2518 res = mParent->mJpegCompressor->start(mCurrentRequest.sensorBuffers,
2519 this);
2520 goodBuffer = (res == OK);
2521 }
2522 if (goodBuffer) {
2523 needJpeg = true;
2524
2525 mJpegHalBuffer = *buf;
2526 mJpegFrameNumber = mCurrentRequest.frameNumber;
2527 mJpegWaiting = true;
2528
2529 mCurrentRequest.sensorBuffers = NULL;
2530 buf = mCurrentRequest.buffers->erase(buf);
2531
2532 continue;
2533 }
2534 ALOGE("%s: Error compressing output buffer: %s (%d)", __FUNCTION__,
2535 strerror(-res), res);
2536 // fallthrough for cleanup
2537 }
2538
2539 buf->status =
2540 goodBuffer ? CAMERA3_BUFFER_STATUS_OK : CAMERA3_BUFFER_STATUS_ERROR;
2541 buf->acquire_fence = -1;
2542 buf->release_fence = -1;
2543
2544 ++buf;
2545 } // end while
2546
2547 // Construct result for all completed buffers and results
2548
2549 camera3_capture_result result;
2550
2551 if (mParent->hasCapability(BACKWARD_COMPATIBLE)) {
2552 static const uint8_t sceneFlicker = ANDROID_STATISTICS_SCENE_FLICKER_NONE;
2553 mCurrentRequest.settings.update(ANDROID_STATISTICS_SCENE_FLICKER,
2554 &sceneFlicker, 1);
2555
2556 static const uint8_t flashState = ANDROID_FLASH_STATE_UNAVAILABLE;
2557 mCurrentRequest.settings.update(ANDROID_FLASH_STATE, &flashState, 1);
2558
2559 nsecs_t rollingShutterSkew = Sensor::kFrameDurationRange[0];
2560 mCurrentRequest.settings.update(ANDROID_SENSOR_ROLLING_SHUTTER_SKEW,
2561 &rollingShutterSkew, 1);
2562
2563 float focusRange[] = {1.0f / 5.0f, 0}; // 5 m to infinity in focus
2564 mCurrentRequest.settings.update(ANDROID_LENS_FOCUS_RANGE, focusRange,
2565 sizeof(focusRange) / sizeof(float));
2566 }
2567
2568 if (mParent->hasCapability(DEPTH_OUTPUT)) {
2569 camera_metadata_entry_t entry;
2570
2571 find_camera_metadata_entry(mParent->mCameraInfo,
2572 ANDROID_LENS_POSE_TRANSLATION, &entry);
2573 mCurrentRequest.settings.update(ANDROID_LENS_POSE_TRANSLATION, entry.data.f,
2574 entry.count);
2575
2576 find_camera_metadata_entry(mParent->mCameraInfo, ANDROID_LENS_POSE_ROTATION,
2577 &entry);
2578 mCurrentRequest.settings.update(ANDROID_LENS_POSE_ROTATION, entry.data.f,
2579 entry.count);
2580
2581 find_camera_metadata_entry(mParent->mCameraInfo,
2582 ANDROID_LENS_INTRINSIC_CALIBRATION, &entry);
2583 mCurrentRequest.settings.update(ANDROID_LENS_INTRINSIC_CALIBRATION,
2584 entry.data.f, entry.count);
2585
2586 find_camera_metadata_entry(mParent->mCameraInfo,
2587 ANDROID_LENS_RADIAL_DISTORTION, &entry);
2588 mCurrentRequest.settings.update(ANDROID_LENS_RADIAL_DISTORTION,
2589 entry.data.f, entry.count);
2590 }
2591
2592 mCurrentRequest.settings.update(ANDROID_SENSOR_TIMESTAMP, &captureTime, 1);
2593
2594 // JPEGs take a stage longer
2595 const uint8_t pipelineDepth =
2596 needJpeg ? kMaxBufferCount : kMaxBufferCount - 1;
2597 mCurrentRequest.settings.update(ANDROID_REQUEST_PIPELINE_DEPTH,
2598 &pipelineDepth, 1);
2599
2600 result.frame_number = mCurrentRequest.frameNumber;
2601 result.result = mCurrentRequest.settings.getAndLock();
2602 result.num_output_buffers = mCurrentRequest.buffers->size();
2603 result.output_buffers = mCurrentRequest.buffers->array();
2604 result.input_buffer = nullptr;
2605 result.partial_result = 1;
2606
2607 // Go idle if queue is empty, before sending result
2608 bool signalIdle = false;
2609 {
2610 Mutex::Autolock l(mLock);
2611 if (mInFlightQueue.empty()) {
2612 mThreadActive = false;
2613 signalIdle = true;
2614 }
2615 }
2616 if (signalIdle) mParent->signalReadoutIdle();
2617
2618 // Send it off to the framework
2619 ALOGVV("%s: ReadoutThread: Send result to framework", __FUNCTION__);
2620 mParent->sendCaptureResult(&result);
2621
2622 // Clean up
2623 mCurrentRequest.settings.unlock(result.result);
2624
2625 delete mCurrentRequest.buffers;
2626 mCurrentRequest.buffers = NULL;
2627 if (!needJpeg) {
2628 for (StreamBuffer& sensorBuffer : *mCurrentRequest.sensorBuffers) {
2629 GrallocModule::getInstance().unlock(sensorBuffer.importedBuffer);
2630 GrallocModule::getInstance().release(sensorBuffer.importedBuffer);
2631 }
2632 delete mCurrentRequest.sensorBuffers;
2633 mCurrentRequest.sensorBuffers = NULL;
2634 }
2635 mCurrentRequest.settings.clear();
2636
2637 return true;
2638 }
2639
onJpegDone(const StreamBuffer & jpegBuffer,bool success)2640 void EmulatedFakeCamera3::ReadoutThread::onJpegDone(
2641 const StreamBuffer &jpegBuffer, bool success) {
2642 Mutex::Autolock jl(mJpegLock);
2643
2644 GrallocModule::getInstance().unlock(jpegBuffer.importedBuffer);
2645 GrallocModule::getInstance().release(jpegBuffer.importedBuffer);
2646
2647 mJpegHalBuffer.status =
2648 success ? CAMERA3_BUFFER_STATUS_OK : CAMERA3_BUFFER_STATUS_ERROR;
2649 mJpegHalBuffer.acquire_fence = -1;
2650 mJpegHalBuffer.release_fence = -1;
2651 mJpegWaiting = false;
2652
2653 camera3_capture_result result;
2654
2655 result.frame_number = mJpegFrameNumber;
2656 result.result = NULL;
2657 result.num_output_buffers = 1;
2658 result.output_buffers = &mJpegHalBuffer;
2659 result.input_buffer = nullptr;
2660 result.partial_result = 0;
2661
2662 if (!success) {
2663 ALOGE(
2664 "%s: Compression failure, returning error state buffer to"
2665 " framework",
2666 __FUNCTION__);
2667 } else {
2668 ALOGV("%s: Compression complete, returning buffer to framework",
2669 __FUNCTION__);
2670 }
2671
2672 mParent->sendCaptureResult(&result);
2673 }
2674
onJpegInputDone(const StreamBuffer &)2675 void EmulatedFakeCamera3::ReadoutThread::onJpegInputDone(
2676 const StreamBuffer & /*inputBuffer*/) {
2677 // Should never get here, since the input buffer has to be returned
2678 // by end of processCaptureRequest
2679 ALOGE("%s: Unexpected input buffer from JPEG compressor!", __FUNCTION__);
2680 }
2681
2682 }; // namespace android
2683