1 /*
2 * Copyright (C) 2017 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17 /*
18 * Contains implementation of a class EmulatedQemuCamera3 that encapsulates
19 * functionality of an advanced fake camera.
20 */
21
22 // Uncomment LOG_NDEBUG to enable verbose logging, and uncomment both LOG_NDEBUG
23 // *and* LOG_NNDEBUG to enable very verbose logging.
24
25 //#define LOG_NDEBUG 0
26 //#define LOG_NNDEBUG 0
27
28 #define LOG_TAG "EmulatedCamera_QemuCamera3"
29
30 #if defined(LOG_NNDEBUG) && LOG_NNDEBUG == 0
31 #define ALOGVV ALOGV
32 #else
33 #define ALOGVV(...) ((void)0)
34 #endif
35
36 #include "EmulatedCameraFactory.h"
37 #include "EmulatedQemuCamera3.h"
38
39 #include <cmath>
40 #include <cutils/properties.h>
41 #include <inttypes.h>
42 #include <sstream>
43 #include <ui/Fence.h>
44 #include <ui/Rect.h>
45 #include <log/log.h>
46 #include <vector>
47
48 namespace android {
49
50 /*
51 * Constants for Camera Capabilities
52 */
53
54 const int64_t USEC = 1000LL;
55 const int64_t MSEC = USEC * 1000LL;
56
57 const int32_t EmulatedQemuCamera3::kAvailableFormats[] = {
58 HAL_PIXEL_FORMAT_BLOB,
59 HAL_PIXEL_FORMAT_RGBA_8888,
60 HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED,
61 // These are handled by YCbCr_420_888
62 // HAL_PIXEL_FORMAT_YV12,
63 // HAL_PIXEL_FORMAT_YCrCb_420_SP,
64 HAL_PIXEL_FORMAT_YCbCr_420_888
65 };
66
67 /**
68 * 3A constants
69 */
70
71 // Default exposure and gain targets for different scenarios
72 const nsecs_t EmulatedQemuCamera3::kNormalExposureTime = 10 * MSEC;
73 const nsecs_t EmulatedQemuCamera3::kFacePriorityExposureTime = 30 * MSEC;
74 const int EmulatedQemuCamera3::kNormalSensitivity = 100;
75 const int EmulatedQemuCamera3::kFacePrioritySensitivity = 400;
76 //CTS requires 8 frames timeout in waitForAeStable
77 const float EmulatedQemuCamera3::kExposureTrackRate = 0.2;
78 const int EmulatedQemuCamera3::kPrecaptureMinFrames = 10;
79 const int EmulatedQemuCamera3::kStableAeMaxFrames = 100;
80 const float EmulatedQemuCamera3::kExposureWanderMin = -2;
81 const float EmulatedQemuCamera3::kExposureWanderMax = 1;
82
83 /*****************************************************************************
84 * Constructor/Destructor
85 ****************************************************************************/
86
EmulatedQemuCamera3(int cameraId,struct hw_module_t * module,GraphicBufferMapper * gbm)87 EmulatedQemuCamera3::EmulatedQemuCamera3(int cameraId, struct hw_module_t* module,
88 GraphicBufferMapper* gbm) :
89 EmulatedCamera3(cameraId, module), mGBM(gbm) {
90 ALOGI("Constructing emulated qemu camera 3: ID %d", mCameraID);
91
92 for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; ++i) {
93 mDefaultTemplates[i] = nullptr;
94 }
95 }
96
~EmulatedQemuCamera3()97 EmulatedQemuCamera3::~EmulatedQemuCamera3() {
98 for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; ++i) {
99 if (mDefaultTemplates[i] != nullptr) {
100 free_camera_metadata(mDefaultTemplates[i]);
101 }
102 }
103 delete[] mDeviceName;
104 }
105
106 /*****************************************************************************
107 * Public Methods
108 ****************************************************************************/
109
110 /*
111 * Camera Device Lifecycle Methods
112 */
113
parseResolutions(const char * frameDims)114 void EmulatedQemuCamera3::parseResolutions(const char *frameDims) {
115 const size_t kMaxFrameDimsLength = 512;
116 size_t frameDimsLength = strnlen(frameDims, kMaxFrameDimsLength);
117 if (frameDimsLength == kMaxFrameDimsLength) {
118 ALOGE("%s: Frame dimensions string was too long (>= %d)",
119 __FUNCTION__, frameDimsLength);
120 return;
121 } else if (frameDimsLength == 0) {
122 ALOGE("%s: Frame dimensions string was NULL or zero-length",
123 __FUNCTION__);
124 return;
125 }
126 std::stringstream ss(frameDims);
127 std::string input;
128 while (std::getline(ss, input, ',')) {
129 int width = 0;
130 int height = 0;
131 char none = 0;
132 /*
133 * Expect only two results because that means there was nothing after
134 * the height, we don't want any trailing characters. Otherwise, we just
135 * ignore this entry.
136 */
137 if (sscanf(input.c_str(), "%dx%d%c", &width, &height, &none) == 2) {
138 mResolutions.push_back(std::pair<int32_t,int32_t>(width, height));
139 ALOGI("%s: %dx%d", __FUNCTION__, width, height);
140 }
141 else {
142 ALOGE("wrong resolution input %s", input.c_str());
143 }
144 }
145
146 /*
147 * We assume the sensor size of the webcam is the resolution with the
148 * largest area. Any resolution with a dimension that exceeds the sensor
149 * size will be rejected, so Camera API calls will start failing. To work
150 * around this, we remove any resolutions with at least one dimension
151 * exceeding that of the max area resolution.
152 */
153
154 // Find the resolution with the maximum area and use that as the sensor
155 // size.
156 int maxArea = 0;
157 for (const auto &res : mResolutions) {
158 int area = res.first * res.second;
159 if (area > maxArea) {
160 maxArea = area;
161 mSensorWidth = res.first;
162 mSensorHeight = res.second;
163 }
164 }
165
166 // Remove any resolution with a dimension exceeding the sensor size.
167 for (auto res = mResolutions.begin(); res != mResolutions.end(); ) {
168 if (res->first > (int32_t)mSensorWidth ||
169 res->second > (int32_t)mSensorHeight) {
170 // Width and/or height larger than sensor. Remove it.
171 res = mResolutions.erase(res);
172 } else {
173 ++res;
174 }
175 }
176
177 if (mResolutions.empty()) {
178 ALOGE("%s: Qemu camera has no valid resolutions", __FUNCTION__);
179 }
180 }
181
Initialize(const char * deviceName,const char * frameDims,const char * facingDir)182 status_t EmulatedQemuCamera3::Initialize(const char *deviceName,
183 const char *frameDims,
184 const char *facingDir) {
185 if (mStatus != STATUS_ERROR) {
186 ALOGE("%s: Already initialized!", __FUNCTION__);
187 return INVALID_OPERATION;
188 }
189
190 /*
191 * Save parameters for later.
192 */
193 mDeviceName = deviceName;
194 parseResolutions(frameDims);
195 if (strcmp("back", facingDir) == 0) {
196 mFacingBack = true;
197 } else {
198 mFacingBack = false;
199 }
200 // We no longer need these two strings.
201 delete[] frameDims;
202 delete[] facingDir;
203
204 status_t res = getCameraCapabilities();
205 if (res != OK) {
206 ALOGE("%s: Unable to get camera capabilities: %s (%d)",
207 __FUNCTION__, strerror(-res), res);
208 return res;
209 }
210
211 res = constructStaticInfo();
212 if (res != OK) {
213 ALOGE("%s: Unable to allocate static info: %s (%d)",
214 __FUNCTION__, strerror(-res), res);
215 return res;
216 }
217
218 return EmulatedCamera3::Initialize();
219 }
220
connectCamera(hw_device_t ** device)221 status_t EmulatedQemuCamera3::connectCamera(hw_device_t** device) {
222 Mutex::Autolock l(mLock);
223 status_t res;
224
225 if (mStatus != STATUS_CLOSED) {
226 ALOGE("%s: Can't connect in state %d", __FUNCTION__, mStatus);
227 return INVALID_OPERATION;
228 }
229
230 /*
231 * Initialize sensor.
232 */
233 mSensor = new QemuSensor(mDeviceName, mSensorWidth, mSensorHeight, mGBM);
234 mSensor->setQemuSensorListener(this);
235 res = mSensor->startUp();
236 if (res != NO_ERROR) {
237 return res;
238 }
239
240 mReadoutThread = new ReadoutThread(this);
241 mJpegCompressor = new JpegCompressor(mGBM);
242
243 res = mReadoutThread->run("EmuCam3::readoutThread");
244 if (res != NO_ERROR) return res;
245
246 // Initialize fake 3A
247
248 mFacePriority = false;
249 mAeMode = ANDROID_CONTROL_AE_MODE_ON;
250 mAfMode = ANDROID_CONTROL_AF_MODE_AUTO;
251 mAwbMode = ANDROID_CONTROL_AWB_MODE_AUTO;
252 mAeState = ANDROID_CONTROL_AE_STATE_INACTIVE;
253 mAfState = ANDROID_CONTROL_AF_STATE_INACTIVE;
254 mAwbState = ANDROID_CONTROL_AWB_STATE_INACTIVE;
255 mAeCounter = 0;
256 mAeTargetExposureTime = kNormalExposureTime;
257 mAeCurrentExposureTime = kNormalExposureTime;
258 mAeCurrentSensitivity = kNormalSensitivity;
259
260 return EmulatedCamera3::connectCamera(device);
261 }
262
closeCamera()263 status_t EmulatedQemuCamera3::closeCamera() {
264 status_t res;
265 {
266 Mutex::Autolock l(mLock);
267 if (mStatus == STATUS_CLOSED) return OK;
268
269 res = mSensor->shutDown();
270 if (res != NO_ERROR) {
271 ALOGE("%s: Unable to shut down sensor: %d", __FUNCTION__, res);
272 return res;
273 }
274 mSensor.clear();
275
276 mReadoutThread->requestExit();
277 }
278
279 mReadoutThread->join();
280
281 {
282 Mutex::Autolock l(mLock);
283 // Clear out private stream information.
284 for (StreamIterator s = mStreams.begin(); s != mStreams.end(); s++) {
285 PrivateStreamInfo *privStream =
286 static_cast<PrivateStreamInfo*>((*s)->priv);
287 delete privStream;
288 (*s)->priv = nullptr;
289 }
290 mStreams.clear();
291 mReadoutThread.clear();
292 }
293
294 return EmulatedCamera3::closeCamera();
295 }
296
getCameraInfo(struct camera_info * info)297 status_t EmulatedQemuCamera3::getCameraInfo(struct camera_info *info) {
298 info->facing = mFacingBack ? CAMERA_FACING_BACK : CAMERA_FACING_FRONT;
299 info->orientation = gEmulatedCameraFactory.getFakeCameraOrientation();
300 return EmulatedCamera3::getCameraInfo(info);
301 }
302
303 /*
304 * Camera3 Interface Methods
305 */
306
configureStreams(camera3_stream_configuration * streamList)307 status_t EmulatedQemuCamera3::configureStreams(
308 camera3_stream_configuration *streamList) {
309 Mutex::Autolock l(mLock);
310 ALOGV("%s: %d streams", __FUNCTION__, streamList->num_streams);
311
312 if (mStatus != STATUS_OPEN && mStatus != STATUS_READY) {
313 ALOGE("%s: Cannot configure streams in state %d",
314 __FUNCTION__, mStatus);
315 return NO_INIT;
316 }
317
318 /*
319 * Sanity-check input list.
320 */
321 if (streamList == nullptr) {
322 ALOGE("%s: NULL stream configuration", __FUNCTION__);
323 return BAD_VALUE;
324 }
325 if (streamList->streams == nullptr) {
326 ALOGE("%s: NULL stream list", __FUNCTION__);
327 return BAD_VALUE;
328 }
329 if (streamList->num_streams < 1) {
330 ALOGE("%s: Bad number of streams requested: %d", __FUNCTION__,
331 streamList->num_streams);
332 return BAD_VALUE;
333 }
334
335 camera3_stream_t *inputStream = nullptr;
336 for (size_t i = 0; i < streamList->num_streams; ++i) {
337 camera3_stream_t *newStream = streamList->streams[i];
338
339 if (newStream == nullptr) {
340 ALOGE("%s: Stream index %zu was NULL", __FUNCTION__, i);
341 return BAD_VALUE;
342 }
343
344 ALOGV("%s: Stream %p (id %zu), type %d, usage 0x%x, format 0x%x",
345 __FUNCTION__, newStream, i, newStream->stream_type,
346 newStream->usage, newStream->format);
347
348 if (newStream->stream_type == CAMERA3_STREAM_INPUT ||
349 newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
350 if (inputStream != nullptr) {
351 ALOGE("%s: Multiple input streams requested!", __FUNCTION__);
352 return BAD_VALUE;
353 }
354 inputStream = newStream;
355 }
356
357 bool validFormat = false;
358 size_t numFormats = sizeof(kAvailableFormats) /
359 sizeof(kAvailableFormats[0]);
360 for (size_t f = 0; f < numFormats; ++f) {
361 if (newStream->format == kAvailableFormats[f]) {
362 validFormat = true;
363 break;
364 }
365 }
366 if (!validFormat) {
367 ALOGE("%s: Unsupported stream format 0x%x requested",
368 __FUNCTION__, newStream->format);
369 return BAD_VALUE;
370 }
371 }
372 mInputStream = inputStream;
373
374 /*
375 * Initially mark all existing streams as not alive.
376 */
377 for (StreamIterator s = mStreams.begin(); s != mStreams.end(); ++s) {
378 PrivateStreamInfo *privStream =
379 static_cast<PrivateStreamInfo*>((*s)->priv);
380 privStream->alive = false;
381 }
382
383 /*
384 * Find new streams and mark still-alive ones.
385 */
386 for (size_t i = 0; i < streamList->num_streams; ++i) {
387 camera3_stream_t *newStream = streamList->streams[i];
388 if (newStream->priv == nullptr) {
389 // New stream. Construct info.
390 PrivateStreamInfo *privStream = new PrivateStreamInfo();
391 privStream->alive = true;
392
393 newStream->max_buffers = kMaxBufferCount;
394 newStream->priv = privStream;
395 mStreams.push_back(newStream);
396 } else {
397 // Existing stream, mark as still alive.
398 PrivateStreamInfo *privStream =
399 static_cast<PrivateStreamInfo*>(newStream->priv);
400 privStream->alive = true;
401 }
402 // Always update usage and max buffers.
403 newStream->max_buffers = kMaxBufferCount;
404 switch (newStream->stream_type) {
405 case CAMERA3_STREAM_OUTPUT:
406 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_WRITE;
407 break;
408 case CAMERA3_STREAM_INPUT:
409 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_READ;
410 break;
411 case CAMERA3_STREAM_BIDIRECTIONAL:
412 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_READ |
413 GRALLOC_USAGE_HW_CAMERA_WRITE;
414 break;
415 }
416 // Set the buffer format, inline with gralloc implementation
417 if (newStream->format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED) {
418 if (newStream->usage & GRALLOC_USAGE_HW_CAMERA_WRITE) {
419 if (newStream->usage & GRALLOC_USAGE_HW_TEXTURE) {
420 newStream->format = HAL_PIXEL_FORMAT_YCbCr_420_888;
421 }
422 else if (newStream->usage & GRALLOC_USAGE_HW_VIDEO_ENCODER) {
423 newStream->format = HAL_PIXEL_FORMAT_YCbCr_420_888;
424 }
425 else {
426 newStream->format = HAL_PIXEL_FORMAT_RGB_888;
427 }
428 }
429 }
430 }
431
432 /*
433 * Reap the dead streams.
434 */
435 for (StreamIterator s = mStreams.begin(); s != mStreams.end();) {
436 PrivateStreamInfo *privStream =
437 static_cast<PrivateStreamInfo*>((*s)->priv);
438 if (!privStream->alive) {
439 (*s)->priv = nullptr;
440 delete privStream;
441 s = mStreams.erase(s);
442 } else {
443 ++s;
444 }
445 }
446
447 /*
448 * Can't reuse settings across configure call.
449 */
450 mPrevSettings.clear();
451
452 return OK;
453 }
454
registerStreamBuffers(const camera3_stream_buffer_set * bufferSet)455 status_t EmulatedQemuCamera3::registerStreamBuffers(
456 const camera3_stream_buffer_set *bufferSet) {
457 Mutex::Autolock l(mLock);
458 ALOGE("%s: Should not be invoked on HAL versions >= 3.2!", __FUNCTION__);
459 return NO_INIT;
460 }
461
constructDefaultRequestSettings(int type)462 const camera_metadata_t* EmulatedQemuCamera3::constructDefaultRequestSettings(
463 int type) {
464 Mutex::Autolock l(mLock);
465
466 if (type < 0 || type >= CAMERA3_TEMPLATE_COUNT) {
467 ALOGE("%s: Unknown request settings template: %d",
468 __FUNCTION__, type);
469 return nullptr;
470 }
471
472 if (!hasCapability(BACKWARD_COMPATIBLE) && type != CAMERA3_TEMPLATE_PREVIEW) {
473 ALOGE("%s: Template %d not supported w/o BACKWARD_COMPATIBLE capability",
474 __FUNCTION__, type);
475 return nullptr;
476 }
477
478 /*
479 * Cache is not just an optimization - pointer returned has to live at least
480 * as long as the camera device instance does.
481 */
482 if (mDefaultTemplates[type] != nullptr) {
483 return mDefaultTemplates[type];
484 }
485
486 CameraMetadata settings;
487
488 /* android.request */
489
490 static const uint8_t metadataMode = ANDROID_REQUEST_METADATA_MODE_FULL;
491 settings.update(ANDROID_REQUEST_METADATA_MODE, &metadataMode, 1);
492
493 static const int32_t id = 0;
494 settings.update(ANDROID_REQUEST_ID, &id, 1);
495
496 static const int32_t frameCount = 0;
497 settings.update(ANDROID_REQUEST_FRAME_COUNT, &frameCount, 1);
498
499 /* android.lens */
500
501 static const float focalLength = 5.0f;
502 settings.update(ANDROID_LENS_FOCAL_LENGTH, &focalLength, 1);
503
504 if (hasCapability(BACKWARD_COMPATIBLE)) {
505 static const float focusDistance = 0;
506 settings.update(ANDROID_LENS_FOCUS_DISTANCE, &focusDistance, 1);
507
508 static const float aperture = 2.8f;
509 settings.update(ANDROID_LENS_APERTURE, &aperture, 1);
510
511 static const float filterDensity = 0;
512 settings.update(ANDROID_LENS_FILTER_DENSITY, &filterDensity, 1);
513
514 static const uint8_t opticalStabilizationMode =
515 ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
516 settings.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE,
517 &opticalStabilizationMode, 1);
518
519 // FOCUS_RANGE set only in frame
520 }
521
522 /* android.flash */
523
524 if (hasCapability(BACKWARD_COMPATIBLE)) {
525 static const uint8_t flashMode = ANDROID_FLASH_MODE_OFF;
526 settings.update(ANDROID_FLASH_MODE, &flashMode, 1);
527
528 static const uint8_t flashPower = 10;
529 settings.update(ANDROID_FLASH_FIRING_POWER, &flashPower, 1);
530
531 static const int64_t firingTime = 0;
532 settings.update(ANDROID_FLASH_FIRING_TIME, &firingTime, 1);
533 }
534
535 /* android.scaler */
536 if (hasCapability(BACKWARD_COMPATIBLE)) {
537 const int32_t cropRegion[4] = {
538 0, 0, mSensorWidth, mSensorHeight
539 };
540 settings.update(ANDROID_SCALER_CROP_REGION, cropRegion, 4);
541 }
542
543 /* android.jpeg */
544 if (hasCapability(BACKWARD_COMPATIBLE)) {
545 static const uint8_t jpegQuality = 80;
546 settings.update(ANDROID_JPEG_QUALITY, &jpegQuality, 1);
547
548 static const int32_t thumbnailSize[2] = {
549 320, 240
550 };
551 settings.update(ANDROID_JPEG_THUMBNAIL_SIZE, thumbnailSize, 2);
552
553 static const uint8_t thumbnailQuality = 80;
554 settings.update(ANDROID_JPEG_THUMBNAIL_QUALITY, &thumbnailQuality, 1);
555
556 static const double gpsCoordinates[3] = {
557 0, 0, 0
558 };
559 settings.update(ANDROID_JPEG_GPS_COORDINATES, gpsCoordinates, 3);
560
561 static const uint8_t gpsProcessingMethod[32] = "None";
562 settings.update(ANDROID_JPEG_GPS_PROCESSING_METHOD, gpsProcessingMethod, 32);
563
564 static const int64_t gpsTimestamp = 0;
565 settings.update(ANDROID_JPEG_GPS_TIMESTAMP, &gpsTimestamp, 1);
566
567 static const int32_t jpegOrientation = 0;
568 settings.update(ANDROID_JPEG_ORIENTATION, &jpegOrientation, 1);
569 }
570
571 /* android.stats */
572 if (hasCapability(BACKWARD_COMPATIBLE)) {
573 static const uint8_t faceDetectMode =
574 ANDROID_STATISTICS_FACE_DETECT_MODE_OFF;
575 settings.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &faceDetectMode, 1);
576
577 static const uint8_t hotPixelMapMode =
578 ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
579 settings.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
580 }
581
582 /* android.control */
583
584 uint8_t controlIntent = 0;
585 switch (type) {
586 case CAMERA3_TEMPLATE_PREVIEW:
587 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW;
588 break;
589 case CAMERA3_TEMPLATE_STILL_CAPTURE:
590 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE;
591 break;
592 case CAMERA3_TEMPLATE_VIDEO_RECORD:
593 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD;
594 break;
595 case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT:
596 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT;
597 break;
598 case CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG:
599 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_ZERO_SHUTTER_LAG;
600 break;
601 case CAMERA3_TEMPLATE_MANUAL:
602 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_MANUAL;
603 break;
604 default:
605 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM;
606 break;
607 }
608 settings.update(ANDROID_CONTROL_CAPTURE_INTENT, &controlIntent, 1);
609
610 const uint8_t controlMode = (type == CAMERA3_TEMPLATE_MANUAL) ?
611 ANDROID_CONTROL_MODE_OFF :
612 ANDROID_CONTROL_MODE_AUTO;
613 settings.update(ANDROID_CONTROL_MODE, &controlMode, 1);
614
615 int32_t aeTargetFpsRange[2] = {
616 5, 30
617 };
618 if (type == CAMERA3_TEMPLATE_VIDEO_RECORD ||
619 type == CAMERA3_TEMPLATE_VIDEO_SNAPSHOT) {
620 aeTargetFpsRange[0] = 30;
621 }
622 settings.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE, aeTargetFpsRange, 2);
623
624 if (hasCapability(BACKWARD_COMPATIBLE)) {
625 static const uint8_t effectMode = ANDROID_CONTROL_EFFECT_MODE_OFF;
626 settings.update(ANDROID_CONTROL_EFFECT_MODE, &effectMode, 1);
627
628 const uint8_t sceneMode =
629 ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY;
630 settings.update(ANDROID_CONTROL_SCENE_MODE, &sceneMode, 1);
631
632 const uint8_t aeMode = (type == CAMERA3_TEMPLATE_MANUAL) ?
633 ANDROID_CONTROL_AE_MODE_OFF : ANDROID_CONTROL_AE_MODE_ON;
634 settings.update(ANDROID_CONTROL_AE_MODE, &aeMode, 1);
635
636 static const uint8_t aeLock = ANDROID_CONTROL_AE_LOCK_OFF;
637 settings.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
638
639 static const int32_t controlRegions[5] = {
640 0, 0, 0, 0, 0
641 };
642 settings.update(ANDROID_CONTROL_AE_REGIONS, controlRegions, 5);
643
644 static const int32_t aeExpCompensation = 0;
645 settings.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &aeExpCompensation, 1);
646
647
648 static const uint8_t aeAntibandingMode =
649 ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO;
650 settings.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &aeAntibandingMode, 1);
651
652 static const uint8_t aePrecaptureTrigger = ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_IDLE;
653 settings.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, &aePrecaptureTrigger, 1);
654
655 const uint8_t awbMode = (type == CAMERA3_TEMPLATE_MANUAL) ?
656 ANDROID_CONTROL_AWB_MODE_OFF :
657 ANDROID_CONTROL_AWB_MODE_AUTO;
658 settings.update(ANDROID_CONTROL_AWB_MODE, &awbMode, 1);
659
660 static const uint8_t awbLock = ANDROID_CONTROL_AWB_LOCK_OFF;
661 settings.update(ANDROID_CONTROL_AWB_LOCK, &awbLock, 1);
662
663 uint8_t afMode = 0;
664
665 if (mFacingBack) {
666 switch (type) {
667 case CAMERA3_TEMPLATE_PREVIEW:
668 afMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
669 break;
670 case CAMERA3_TEMPLATE_STILL_CAPTURE:
671 afMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
672 break;
673 case CAMERA3_TEMPLATE_VIDEO_RECORD:
674 afMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
675 break;
676 case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT:
677 afMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
678 break;
679 case CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG:
680 afMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
681 break;
682 case CAMERA3_TEMPLATE_MANUAL:
683 afMode = ANDROID_CONTROL_AF_MODE_OFF;
684 break;
685 default:
686 afMode = ANDROID_CONTROL_AF_MODE_AUTO;
687 break;
688 }
689 } else {
690 afMode = ANDROID_CONTROL_AF_MODE_OFF;
691 }
692 settings.update(ANDROID_CONTROL_AF_MODE, &afMode, 1);
693 settings.update(ANDROID_CONTROL_AF_REGIONS, controlRegions, 5);
694
695 static const uint8_t afTrigger = ANDROID_CONTROL_AF_TRIGGER_IDLE;
696 settings.update(ANDROID_CONTROL_AF_TRIGGER, &afTrigger, 1);
697
698 static const uint8_t vstabMode =
699 ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
700 settings.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE,
701 &vstabMode, 1);
702
703 static const uint8_t blackLevelLock = ANDROID_BLACK_LEVEL_LOCK_OFF;
704 settings.update(ANDROID_BLACK_LEVEL_LOCK, &blackLevelLock, 1);
705
706 static const uint8_t lensShadingMapMode =
707 ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF;
708 settings.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE,
709 &lensShadingMapMode, 1);
710
711 static const uint8_t aberrationMode =
712 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
713 settings.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE,
714 &aberrationMode, 1);
715
716 static const int32_t testPatternMode =
717 ANDROID_SENSOR_TEST_PATTERN_MODE_OFF;
718 settings.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &testPatternMode, 1);
719 }
720
721 mDefaultTemplates[type] = settings.release();
722
723 return mDefaultTemplates[type];
724 }
725
processCaptureRequest(camera3_capture_request * request)726 status_t EmulatedQemuCamera3::processCaptureRequest(
727 camera3_capture_request *request) {
728 Mutex::Autolock l(mLock);
729 status_t res;
730
731 /* Validation */
732
733 if (mStatus < STATUS_READY) {
734 ALOGE("%s: Can't submit capture requests in state %d", __FUNCTION__,
735 mStatus);
736 return INVALID_OPERATION;
737 }
738
739 if (request == nullptr) {
740 ALOGE("%s: NULL request!", __FUNCTION__);
741 return BAD_VALUE;
742 }
743
744 uint32_t frameNumber = request->frame_number;
745
746 if (request->settings == nullptr && mPrevSettings.isEmpty()) {
747 ALOGE("%s: Request %d: NULL settings for first request after"
748 "configureStreams()", __FUNCTION__, frameNumber);
749 return BAD_VALUE;
750 }
751
752 if (request->input_buffer != nullptr &&
753 request->input_buffer->stream != mInputStream) {
754 ALOGE("%s: Request %d: Input buffer not from input stream!",
755 __FUNCTION__, frameNumber);
756 ALOGV("%s: Bad stream %p, expected: %p", __FUNCTION__,
757 request->input_buffer->stream, mInputStream);
758 ALOGV("%s: Bad stream type %d, expected stream type %d", __FUNCTION__,
759 request->input_buffer->stream->stream_type,
760 mInputStream ? mInputStream->stream_type : -1);
761
762 return BAD_VALUE;
763 }
764
765 if (request->num_output_buffers < 1 || request->output_buffers == nullptr) {
766 ALOGE("%s: Request %d: No output buffers provided!",
767 __FUNCTION__, frameNumber);
768 return BAD_VALUE;
769 }
770
771 /*
772 * Validate all buffers, starting with input buffer if it's given.
773 */
774
775 ssize_t idx;
776 const camera3_stream_buffer_t *b;
777 if (request->input_buffer != nullptr) {
778 idx = -1;
779 b = request->input_buffer;
780 } else {
781 idx = 0;
782 b = request->output_buffers;
783 }
784 do {
785 PrivateStreamInfo *priv =
786 static_cast<PrivateStreamInfo*>(b->stream->priv);
787 if (priv == nullptr) {
788 ALOGE("%s: Request %d: Buffer %zu: Unconfigured stream!",
789 __FUNCTION__, frameNumber, idx);
790 return BAD_VALUE;
791 }
792 if (!priv->alive) {
793 ALOGE("%s: Request %d: Buffer %zu: Dead stream!",
794 __FUNCTION__, frameNumber, idx);
795 return BAD_VALUE;
796 }
797 if (b->status != CAMERA3_BUFFER_STATUS_OK) {
798 ALOGE("%s: Request %d: Buffer %zu: Status not OK!",
799 __FUNCTION__, frameNumber, idx);
800 return BAD_VALUE;
801 }
802 if (b->release_fence != -1) {
803 ALOGE("%s: Request %d: Buffer %zu: Has a release fence!",
804 __FUNCTION__, frameNumber, idx);
805 return BAD_VALUE;
806 }
807 if (b->buffer == nullptr) {
808 ALOGE("%s: Request %d: Buffer %zu: NULL buffer handle!",
809 __FUNCTION__, frameNumber, idx);
810 return BAD_VALUE;
811 }
812 idx++;
813 b = &(request->output_buffers[idx]);
814 } while (idx < (ssize_t)request->num_output_buffers);
815
816 // TODO: Validate settings parameters.
817
818 /*
819 * Start processing this request.
820 */
821
822 mStatus = STATUS_ACTIVE;
823
824 CameraMetadata settings;
825
826 if (request->settings == nullptr) {
827 settings.acquire(mPrevSettings);
828 } else {
829 settings = request->settings;
830 }
831
832 res = process3A(settings);
833 if (res != OK) {
834 return res;
835 }
836
837 /*
838 * Get ready for sensor config.
839 */
840 // TODO: We shouldn't need exposureTime or frameDuration for webcams.
841 nsecs_t exposureTime;
842 nsecs_t frameDuration;
843 bool needJpeg = false;
844 camera_metadata_entry_t entry;
845
846 entry = settings.find(ANDROID_SENSOR_EXPOSURE_TIME);
847 exposureTime = (entry.count > 0) ?
848 entry.data.i64[0] :
849 QemuSensor::kExposureTimeRange[0];
850
851 // Note: Camera consumers may rely on there being an exposure
852 // time set in the camera metadata.
853 settings.update(ANDROID_SENSOR_EXPOSURE_TIME, &exposureTime, 1);
854
855 entry = settings.find(ANDROID_SENSOR_FRAME_DURATION);
856 frameDuration = (entry.count > 0) ?
857 entry.data.i64[0] :
858 QemuSensor::kFrameDurationRange[0];
859
860 if (exposureTime > frameDuration) {
861 frameDuration = exposureTime + QemuSensor::kMinVerticalBlank;
862 settings.update(ANDROID_SENSOR_FRAME_DURATION, &frameDuration, 1);
863 }
864
865 static const int32_t sensitivity = QemuSensor::kSensitivityRange[0];
866 settings.update(ANDROID_SENSOR_SENSITIVITY, &sensitivity, 1);
867
868 static const uint8_t colorMode = ANDROID_COLOR_CORRECTION_MODE_FAST;
869 settings.update(ANDROID_COLOR_CORRECTION_MODE, &colorMode, 1);
870
871 static const float colorGains[4] = {
872 1.0f, 1.0f, 1.0f, 1.0f
873 };
874 settings.update(ANDROID_COLOR_CORRECTION_GAINS, colorGains, 4);
875
876 static const camera_metadata_rational colorTransform[9] = {
877 {1,1}, {0,1}, {0,1},
878 {0,1}, {1,1}, {0,1},
879 {0,1}, {0,1}, {1,1}
880 };
881 settings.update(ANDROID_COLOR_CORRECTION_TRANSFORM, colorTransform, 9);
882
883 static const camera_metadata_rational neutralColorPoint[3] = {
884 {1,1}, {1,1}, {1,1},
885 };
886 settings.update(ANDROID_SENSOR_NEUTRAL_COLOR_POINT, neutralColorPoint, 3);
887
888 Buffers *sensorBuffers = new Buffers();
889 HalBufferVector *buffers = new HalBufferVector();
890
891 sensorBuffers->setCapacity(request->num_output_buffers);
892 buffers->setCapacity(request->num_output_buffers);
893
894 /*
895 * Process all the buffers we got for output, constructing internal buffer
896 * structures for them, and lock them for writing.
897 */
898 for (size_t i = 0; i < request->num_output_buffers; ++i) {
899 const camera3_stream_buffer &srcBuf = request->output_buffers[i];
900 StreamBuffer destBuf;
901 destBuf.streamId = kGenericStreamId;
902 destBuf.width = srcBuf.stream->width;
903 destBuf.height = srcBuf.stream->height;
904 // inline with goldfish gralloc
905 if (srcBuf.stream->format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED) {
906 if (srcBuf.stream->usage & GRALLOC_USAGE_HW_CAMERA_WRITE) {
907 if (srcBuf.stream->usage & GRALLOC_USAGE_HW_TEXTURE) {
908 destBuf.format = HAL_PIXEL_FORMAT_YCbCr_420_888;
909 }
910 else if (srcBuf.stream->usage & GRALLOC_USAGE_HW_VIDEO_ENCODER) {
911 destBuf.format = HAL_PIXEL_FORMAT_YCbCr_420_888;
912 }
913 else if ((srcBuf.stream->usage & GRALLOC_USAGE_HW_CAMERA_MASK)
914 == GRALLOC_USAGE_HW_CAMERA_ZSL) {
915 destBuf.format = HAL_PIXEL_FORMAT_RGB_888;
916 }
917 }
918 }
919 else {
920 destBuf.format = srcBuf.stream->format;
921 }
922
923 destBuf.stride = srcBuf.stream->width;
924 destBuf.dataSpace = srcBuf.stream->data_space;
925 destBuf.buffer = srcBuf.buffer;
926
927 if (destBuf.format == HAL_PIXEL_FORMAT_BLOB) {
928 needJpeg = true;
929 }
930
931 // Wait on fence.
932 sp<Fence> bufferAcquireFence = new Fence(srcBuf.acquire_fence);
933 res = bufferAcquireFence->wait(kFenceTimeoutMs);
934 if (res == TIMED_OUT) {
935 ALOGE("%s: Request %d: Buffer %zu: Fence timed out after %d ms",
936 __FUNCTION__, frameNumber, i, kFenceTimeoutMs);
937 }
938 if (res == OK) {
939 // Lock buffer for writing.
940 if (srcBuf.stream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
941 if (destBuf.format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
942 android_ycbcr ycbcr = {};
943 res = mGBM->lockYCbCr(
944 *(destBuf.buffer),
945 GRALLOC_USAGE_HW_CAMERA_WRITE,
946 Rect(0, 0, destBuf.width, destBuf.height),
947 &ycbcr);
948 /*
949 * This is only valid because we know that emulator's
950 * YCbCr_420_888 is really contiguous NV21 under the hood.
951 */
952 destBuf.img = static_cast<uint8_t*>(ycbcr.y);
953 } else {
954 ALOGE("Unexpected private format for flexible YUV: 0x%x",
955 destBuf.format);
956 res = INVALID_OPERATION;
957 }
958 } else {
959 res = mGBM->lock(
960 *(destBuf.buffer),
961 GRALLOC_USAGE_HW_CAMERA_WRITE,
962 Rect(0, 0, destBuf.width, destBuf.height),
963 (void**)&(destBuf.img));
964
965 }
966 if (res != OK) {
967 ALOGE("%s: Request %d: Buffer %zu: Unable to lock buffer",
968 __FUNCTION__, frameNumber, i);
969 }
970 }
971
972 if (res != OK) {
973 /*
974 * Either waiting or locking failed. Unlock locked buffers and bail
975 * out.
976 */
977 for (size_t j = 0; j < i; j++) {
978 mGBM->unlock(*(request->output_buffers[i].buffer));
979 }
980 delete sensorBuffers;
981 delete buffers;
982 return NO_INIT;
983 }
984
985 sensorBuffers->push_back(destBuf);
986 buffers->push_back(srcBuf);
987 }
988
989 /*
990 * Wait for JPEG compressor to not be busy, if needed.
991 */
992 if (needJpeg) {
993 bool ready = mJpegCompressor->waitForDone(kJpegTimeoutNs);
994 if (!ready) {
995 ALOGE("%s: Timeout waiting for JPEG compression to complete!",
996 __FUNCTION__);
997 return NO_INIT;
998 }
999 res = mJpegCompressor->reserve();
1000 if (res != OK) {
1001 ALOGE("%s: Error managing JPEG compressor resources, can't "
1002 "reserve it!", __FUNCTION__);
1003 return NO_INIT;
1004 }
1005 }
1006
1007 /*
1008 * TODO: We shouldn't need to wait for sensor readout with a webcam, because
1009 * we might be wasting time.
1010 */
1011
1012 /*
1013 * Wait until the in-flight queue has room.
1014 */
1015 res = mReadoutThread->waitForReadout();
1016 if (res != OK) {
1017 ALOGE("%s: Timeout waiting for previous requests to complete!",
1018 __FUNCTION__);
1019 return NO_INIT;
1020 }
1021
1022 /*
1023 * Wait until sensor's ready. This waits for lengthy amounts of time with
1024 * mLock held, but the interface spec is that no other calls may by done to
1025 * the HAL by the framework while process_capture_request is happening.
1026 */
1027 int syncTimeoutCount = 0;
1028 while(!mSensor->waitForVSync(kSyncWaitTimeout)) {
1029 if (mStatus == STATUS_ERROR) {
1030 return NO_INIT;
1031 }
1032 if (syncTimeoutCount == kMaxSyncTimeoutCount) {
1033 ALOGE("%s: Request %d: Sensor sync timed out after %" PRId64 " ms",
1034 __FUNCTION__, frameNumber,
1035 kSyncWaitTimeout * kMaxSyncTimeoutCount / 1000000);
1036 return NO_INIT;
1037 }
1038 syncTimeoutCount++;
1039 }
1040
1041 /*
1042 * Configure sensor and queue up the request to the readout thread.
1043 */
1044 mSensor->setFrameDuration(frameDuration);
1045 mSensor->setDestinationBuffers(sensorBuffers);
1046 mSensor->setFrameNumber(request->frame_number);
1047
1048 ReadoutThread::Request r;
1049 r.frameNumber = request->frame_number;
1050 r.settings = settings;
1051 r.sensorBuffers = sensorBuffers;
1052 r.buffers = buffers;
1053
1054 mReadoutThread->queueCaptureRequest(r);
1055 ALOGVV("%s: Queued frame %d", __FUNCTION__, request->frame_number);
1056
1057 // Cache the settings for next time.
1058 mPrevSettings.acquire(settings);
1059
1060 return OK;
1061 }
1062
flush()1063 status_t EmulatedQemuCamera3::flush() {
1064 ALOGW("%s: Not implemented; ignored", __FUNCTION__);
1065 return OK;
1066 }
1067
1068 /*****************************************************************************
1069 * Private Methods
1070 ****************************************************************************/
1071
getCameraCapabilities()1072 status_t EmulatedQemuCamera3::getCameraCapabilities() {
1073 const char *key = mFacingBack ? "qemu.sf.back_camera_caps" :
1074 "qemu.sf.front_camera_caps";
1075
1076 /*
1077 * Defined by 'qemu.sf.*_camera_caps' boot property: if the property doesn't
1078 * exist, it is assumed to list FULL.
1079 */
1080 char prop[PROPERTY_VALUE_MAX];
1081 if (property_get(key, prop, nullptr) > 0) {
1082 char *saveptr = nullptr;
1083 char *cap = strtok_r(prop, " ,", &saveptr);
1084 while (cap != nullptr) {
1085 for (int i = 0; i < NUM_CAPABILITIES; ++i) {
1086 if (!strcasecmp(cap, sAvailableCapabilitiesStrings[i])) {
1087 mCapabilities.add(static_cast<AvailableCapabilities>(i));
1088 break;
1089 }
1090 }
1091 cap = strtok_r(nullptr, " ,", &saveptr);
1092 }
1093 if (mCapabilities.size() == 0) {
1094 ALOGE("qemu.sf.back_camera_caps had no valid capabilities: %s", prop);
1095 }
1096 }
1097
1098 mCapabilities.add(BACKWARD_COMPATIBLE);
1099
1100 ALOGI("Camera %d capabilities:", mCameraID);
1101 for (size_t i = 0; i < mCapabilities.size(); ++i) {
1102 ALOGI(" %s", sAvailableCapabilitiesStrings[mCapabilities[i]]);
1103 }
1104
1105 return OK;
1106 }
1107
hasCapability(AvailableCapabilities cap)1108 bool EmulatedQemuCamera3::hasCapability(AvailableCapabilities cap) {
1109 ssize_t idx = mCapabilities.indexOf(cap);
1110 return idx >= 0;
1111 }
1112
constructStaticInfo()1113 status_t EmulatedQemuCamera3::constructStaticInfo() {
1114 CameraMetadata info;
1115 Vector<int32_t> availableCharacteristicsKeys;
1116 status_t res;
1117
1118 #define ADD_STATIC_ENTRY(name, varptr, count) \
1119 availableCharacteristicsKeys.add(name); \
1120 res = info.update(name, varptr, count); \
1121 if (res != OK) return res
1122
1123 static const float sensorPhysicalSize[2] = {3.20f, 2.40f}; // mm
1124 ADD_STATIC_ENTRY(ANDROID_SENSOR_INFO_PHYSICAL_SIZE,
1125 sensorPhysicalSize, 2);
1126
1127 const int32_t pixelArray[] = {mSensorWidth, mSensorHeight};
1128 ADD_STATIC_ENTRY(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
1129 pixelArray, 2);
1130 const int32_t activeArray[] = {0, 0, mSensorWidth, mSensorHeight};
1131 ADD_STATIC_ENTRY(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
1132 activeArray, 4);
1133
1134 static const int32_t orientation = 90; // Aligned with 'long edge'.
1135 ADD_STATIC_ENTRY(ANDROID_SENSOR_ORIENTATION, &orientation, 1);
1136
1137 static const uint8_t timestampSource =
1138 ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_UNKNOWN;
1139 ADD_STATIC_ENTRY(ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE, ×tampSource, 1);
1140
1141 if (hasCapability(BACKWARD_COMPATIBLE)) {
1142 static const int32_t availableTestPatternModes[] = {
1143 ANDROID_SENSOR_TEST_PATTERN_MODE_OFF
1144 };
1145 ADD_STATIC_ENTRY(ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
1146 availableTestPatternModes,
1147 sizeof(availableTestPatternModes) / sizeof(int32_t));
1148 }
1149
1150 /* android.lens */
1151
1152 static const float focalLengths = 5.0f; // mm
1153 ADD_STATIC_ENTRY(ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
1154 &focalLengths, 1);
1155
1156 if (hasCapability(BACKWARD_COMPATIBLE)) {
1157 // infinity (fixed focus)
1158 static const float minFocusDistance = 0.0;
1159 ADD_STATIC_ENTRY(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
1160 &minFocusDistance, 1);
1161
1162 // (fixed focus)
1163 static const float hyperFocalDistance = 0.0;
1164 ADD_STATIC_ENTRY(ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE,
1165 &hyperFocalDistance, 1);
1166
1167 static const float apertures = 2.8f;
1168 ADD_STATIC_ENTRY(ANDROID_LENS_INFO_AVAILABLE_APERTURES,
1169 &apertures, 1);
1170 static const float filterDensities = 0;
1171 ADD_STATIC_ENTRY(ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
1172 &filterDensities, 1);
1173 static const uint8_t availableOpticalStabilization =
1174 ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
1175 ADD_STATIC_ENTRY(ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
1176 &availableOpticalStabilization, 1);
1177
1178 static const int32_t lensShadingMapSize[] = {1, 1};
1179 ADD_STATIC_ENTRY(ANDROID_LENS_INFO_SHADING_MAP_SIZE, lensShadingMapSize,
1180 sizeof(lensShadingMapSize) / sizeof(int32_t));
1181
1182 static const uint8_t lensFocusCalibration =
1183 ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_APPROXIMATE;
1184 ADD_STATIC_ENTRY(ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
1185 &lensFocusCalibration, 1);
1186 }
1187
1188 const uint8_t lensFacing = mFacingBack ?
1189 ANDROID_LENS_FACING_BACK : ANDROID_LENS_FACING_FRONT;
1190 ADD_STATIC_ENTRY(ANDROID_LENS_FACING, &lensFacing, 1);
1191
1192 /* android.flash */
1193
1194 static const uint8_t flashAvailable = 0;
1195 ADD_STATIC_ENTRY(ANDROID_FLASH_INFO_AVAILABLE, &flashAvailable, 1);
1196
1197 /* android.scaler */
1198
1199 std::vector<int32_t> availableStreamConfigurations;
1200 std::vector<int64_t> availableMinFrameDurations;
1201 std::vector<int64_t> availableStallDurations;
1202
1203 /*
1204 * Build stream configurations, min frame durations, and stall durations for
1205 * all resolutions reported by camera device.
1206 */
1207 for (const auto &res : mResolutions) {
1208 int32_t width = res.first, height = res.second;
1209 std::vector<int32_t> currentResStreamConfigurations = {
1210 HAL_PIXEL_FORMAT_BLOB, width, height,
1211 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT,
1212
1213 HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, width, height,
1214 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT,
1215
1216 HAL_PIXEL_FORMAT_YCbCr_420_888, width, height,
1217 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT,
1218
1219 HAL_PIXEL_FORMAT_RGBA_8888, width, height,
1220 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT
1221 };
1222 std::vector<int32_t> currentResMinFrameDurations = {
1223 HAL_PIXEL_FORMAT_BLOB, width, height,
1224 QemuSensor::kFrameDurationRange[0],
1225
1226 HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, width, height,
1227 QemuSensor::kFrameDurationRange[0],
1228
1229 HAL_PIXEL_FORMAT_YCbCr_420_888, width, height,
1230 QemuSensor::kFrameDurationRange[0],
1231
1232 HAL_PIXEL_FORMAT_RGBA_8888, width, height,
1233 QemuSensor::kFrameDurationRange[0]
1234 };
1235 std::vector<int32_t> currentResStallDurations = {
1236 // We should only introduce stall times with JPEG-compressed frames.
1237 HAL_PIXEL_FORMAT_BLOB, width, height,
1238 QemuSensor::kFrameDurationRange[0],
1239
1240 HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, width, height, 0,
1241
1242 HAL_PIXEL_FORMAT_YCbCr_420_888, width, height, 0,
1243
1244 HAL_PIXEL_FORMAT_RGBA_8888, width, height, 0
1245 };
1246 availableStreamConfigurations.insert(
1247 availableStreamConfigurations.end(),
1248 currentResStreamConfigurations.begin(),
1249 currentResStreamConfigurations.end());
1250 availableMinFrameDurations.insert(
1251 availableMinFrameDurations.end(),
1252 currentResMinFrameDurations.begin(),
1253 currentResMinFrameDurations.end());
1254 availableStallDurations.insert(
1255 availableStallDurations.end(),
1256 currentResStallDurations.begin(),
1257 currentResStallDurations.end());
1258 }
1259
1260 /*
1261 * Now, if nonempty, add them to the camera's available characteristics.
1262 */
1263 if (availableStreamConfigurations.size() > 0) {
1264 ADD_STATIC_ENTRY(ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
1265 availableStreamConfigurations.data(),
1266 availableStreamConfigurations.size());
1267 }
1268 if (availableMinFrameDurations.size() > 0) {
1269 ADD_STATIC_ENTRY(ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS,
1270 &availableMinFrameDurations[0],
1271 availableMinFrameDurations.size());
1272 }
1273 if (availableStallDurations.size() > 0) {
1274 ADD_STATIC_ENTRY(ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,
1275 &availableStallDurations[0],
1276 availableStallDurations.size());
1277 }
1278
1279 if (hasCapability(BACKWARD_COMPATIBLE)) {
1280 static const uint8_t croppingType = ANDROID_SCALER_CROPPING_TYPE_FREEFORM;
1281 ADD_STATIC_ENTRY(ANDROID_SCALER_CROPPING_TYPE,
1282 &croppingType, 1);
1283
1284 static const float maxZoom = 10;
1285 ADD_STATIC_ENTRY(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
1286 &maxZoom, 1);
1287 }
1288
1289 /* android.jpeg */
1290
1291 if (hasCapability(BACKWARD_COMPATIBLE)) {
1292 static const int32_t jpegThumbnailSizes[] = {
1293 0, 0,
1294 160, 120,
1295 320, 240
1296 };
1297 ADD_STATIC_ENTRY(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
1298 jpegThumbnailSizes,
1299 sizeof(jpegThumbnailSizes) / sizeof(int32_t));
1300
1301 static const int32_t jpegMaxSize = JpegCompressor::kMaxJpegSize;
1302 ADD_STATIC_ENTRY(ANDROID_JPEG_MAX_SIZE, &jpegMaxSize, 1);
1303 }
1304
1305 /* android.stats */
1306
1307 if (hasCapability(BACKWARD_COMPATIBLE)) {
1308 static const uint8_t availableFaceDetectModes[] = {
1309 ANDROID_STATISTICS_FACE_DETECT_MODE_OFF
1310 };
1311 ADD_STATIC_ENTRY(ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
1312 availableFaceDetectModes,
1313 sizeof(availableFaceDetectModes));
1314
1315 static const int32_t maxFaceCount = 0;
1316 ADD_STATIC_ENTRY(ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
1317 &maxFaceCount, 1);
1318
1319 static const uint8_t availableShadingMapModes[] = {
1320 ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF
1321 };
1322 ADD_STATIC_ENTRY(
1323 ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,
1324 availableShadingMapModes, sizeof(availableShadingMapModes));
1325 }
1326
1327 /* android.sync */
1328
1329 const int32_t maxLatency =
1330 hasCapability(FULL_LEVEL) ?
1331 ANDROID_SYNC_MAX_LATENCY_PER_FRAME_CONTROL : 3;
1332 ADD_STATIC_ENTRY(ANDROID_SYNC_MAX_LATENCY, &maxLatency, 1);
1333
1334 /* android.control */
1335
1336 if (hasCapability(BACKWARD_COMPATIBLE)) {
1337 const uint8_t availableControlModes[] = {
1338 ANDROID_CONTROL_MODE_OFF,
1339 ANDROID_CONTROL_MODE_AUTO,
1340 ANDROID_CONTROL_MODE_USE_SCENE_MODE
1341 };
1342 ADD_STATIC_ENTRY(ANDROID_CONTROL_AVAILABLE_MODES,
1343 availableControlModes, sizeof(availableControlModes));
1344 } else {
1345 const uint8_t availableControlModes[] = {
1346 ANDROID_CONTROL_MODE_AUTO
1347 };
1348 ADD_STATIC_ENTRY(ANDROID_CONTROL_AVAILABLE_MODES,
1349 availableControlModes, sizeof(availableControlModes));
1350 }
1351
1352 const uint8_t availableSceneModes[] = {
1353 hasCapability(BACKWARD_COMPATIBLE) ?
1354 ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY :
1355 ANDROID_CONTROL_SCENE_MODE_DISABLED
1356 };
1357 ADD_STATIC_ENTRY(ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
1358 availableSceneModes, sizeof(availableSceneModes));
1359
1360 if (hasCapability(BACKWARD_COMPATIBLE)) {
1361 static const uint8_t availableEffects[] = {
1362 ANDROID_CONTROL_EFFECT_MODE_OFF
1363 };
1364 ADD_STATIC_ENTRY(ANDROID_CONTROL_AVAILABLE_EFFECTS,
1365 availableEffects, sizeof(availableEffects));
1366 }
1367
1368 if (hasCapability(BACKWARD_COMPATIBLE)) {
1369 static const int32_t max3aRegions[] = {
1370 /* AE */ 1,
1371 /* AWB */ 0,
1372 /* AF */ 1
1373 };
1374 ADD_STATIC_ENTRY(ANDROID_CONTROL_MAX_REGIONS,
1375 max3aRegions,
1376 sizeof(max3aRegions) / sizeof(max3aRegions[0]));
1377
1378 static const uint8_t availableAeModes[] = {
1379 ANDROID_CONTROL_AE_MODE_OFF,
1380 ANDROID_CONTROL_AE_MODE_ON
1381 };
1382 ADD_STATIC_ENTRY(ANDROID_CONTROL_AE_AVAILABLE_MODES,
1383 availableAeModes, sizeof(availableAeModes));
1384
1385 static const camera_metadata_rational exposureCompensationStep = {1, 3};
1386 ADD_STATIC_ENTRY(ANDROID_CONTROL_AE_COMPENSATION_STEP,
1387 &exposureCompensationStep, 1);
1388
1389 static int32_t exposureCompensationRange[] = {-9, 9};
1390 ADD_STATIC_ENTRY(ANDROID_CONTROL_AE_COMPENSATION_RANGE,
1391 exposureCompensationRange,
1392 sizeof(exposureCompensationRange) / sizeof(int32_t));
1393 }
1394
1395 static const int32_t availableTargetFpsRanges[] = {
1396 5, 30, 15, 30, 15, 15, 30, 30
1397 };
1398 ADD_STATIC_ENTRY(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
1399 availableTargetFpsRanges,
1400 sizeof(availableTargetFpsRanges) / sizeof(int32_t));
1401
1402 if (hasCapability(BACKWARD_COMPATIBLE)) {
1403 static const uint8_t availableAntibandingModes[] = {
1404 ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF,
1405 ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO
1406 };
1407 ADD_STATIC_ENTRY(ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
1408 availableAntibandingModes, sizeof(availableAntibandingModes));
1409 }
1410
1411 static const uint8_t aeLockAvailable = ANDROID_CONTROL_AE_LOCK_AVAILABLE_FALSE;
1412
1413 ADD_STATIC_ENTRY(ANDROID_CONTROL_AE_LOCK_AVAILABLE,
1414 &aeLockAvailable, 1);
1415
1416 if (hasCapability(BACKWARD_COMPATIBLE)) {
1417 static const uint8_t availableAwbModes[] = {
1418 ANDROID_CONTROL_AWB_MODE_OFF,
1419 ANDROID_CONTROL_AWB_MODE_AUTO,
1420 ANDROID_CONTROL_AWB_MODE_INCANDESCENT,
1421 ANDROID_CONTROL_AWB_MODE_FLUORESCENT,
1422 ANDROID_CONTROL_AWB_MODE_DAYLIGHT,
1423 ANDROID_CONTROL_AWB_MODE_SHADE,
1424 };
1425 ADD_STATIC_ENTRY(ANDROID_CONTROL_AWB_AVAILABLE_MODES,
1426 availableAwbModes, sizeof(availableAwbModes));
1427 }
1428
1429 static const uint8_t awbLockAvailable = ANDROID_CONTROL_AWB_LOCK_AVAILABLE_FALSE;
1430
1431 ADD_STATIC_ENTRY(ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
1432 &awbLockAvailable, 1);
1433
1434 static const uint8_t availableAfModesBack[] = {
1435 ANDROID_CONTROL_AF_MODE_OFF
1436 };
1437
1438 static const uint8_t availableAfModesFront[] = {
1439 ANDROID_CONTROL_AF_MODE_OFF
1440 };
1441
1442 if (mFacingBack && hasCapability(BACKWARD_COMPATIBLE)) {
1443 ADD_STATIC_ENTRY(ANDROID_CONTROL_AF_AVAILABLE_MODES,
1444 availableAfModesBack, sizeof(availableAfModesBack));
1445 } else {
1446 ADD_STATIC_ENTRY(ANDROID_CONTROL_AF_AVAILABLE_MODES,
1447 availableAfModesFront, sizeof(availableAfModesFront));
1448 }
1449
1450 static const uint8_t availableVstabModes[] = {
1451 ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF,
1452 };
1453 ADD_STATIC_ENTRY(ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
1454 availableVstabModes, sizeof(availableVstabModes));
1455
1456 /* android.colorCorrection */
1457
1458 if (hasCapability(BACKWARD_COMPATIBLE)) {
1459 const uint8_t availableAberrationModes[] = {
1460 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF,
1461 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST,
1462 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY
1463 };
1464 ADD_STATIC_ENTRY(ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
1465 availableAberrationModes, sizeof(availableAberrationModes));
1466 } else {
1467 const uint8_t availableAberrationModes[] = {
1468 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF,
1469 };
1470 ADD_STATIC_ENTRY(ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
1471 availableAberrationModes, sizeof(availableAberrationModes));
1472 }
1473
1474 /* android.edge */
1475
1476 if (hasCapability(BACKWARD_COMPATIBLE)) {
1477 const uint8_t availableEdgeModes[] = {
1478 ANDROID_EDGE_MODE_OFF,
1479 ANDROID_EDGE_MODE_FAST,
1480 ANDROID_EDGE_MODE_HIGH_QUALITY,
1481 };
1482 ADD_STATIC_ENTRY(ANDROID_EDGE_AVAILABLE_EDGE_MODES,
1483 availableEdgeModes, sizeof(availableEdgeModes));
1484 } else {
1485 const uint8_t availableEdgeModes[] = {
1486 ANDROID_EDGE_MODE_OFF
1487 };
1488 ADD_STATIC_ENTRY(ANDROID_EDGE_AVAILABLE_EDGE_MODES,
1489 availableEdgeModes, sizeof(availableEdgeModes));
1490 }
1491
1492 /* android.info */
1493
1494 static const uint8_t supportedHardwareLevel =
1495 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED;
1496 ADD_STATIC_ENTRY(ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
1497 &supportedHardwareLevel, /* count */ 1);
1498
1499 /* android.noiseReduction */
1500
1501 if (hasCapability(BACKWARD_COMPATIBLE)) {
1502 const uint8_t availableNoiseReductionModes[] = {
1503 ANDROID_NOISE_REDUCTION_MODE_OFF,
1504 ANDROID_NOISE_REDUCTION_MODE_FAST,
1505 ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY
1506 };
1507 ADD_STATIC_ENTRY(ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
1508 availableNoiseReductionModes,
1509 sizeof(availableNoiseReductionModes));
1510 } else {
1511 const uint8_t availableNoiseReductionModes[] = {
1512 ANDROID_NOISE_REDUCTION_MODE_OFF
1513 };
1514 ADD_STATIC_ENTRY(ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
1515 availableNoiseReductionModes,
1516 sizeof(availableNoiseReductionModes));
1517 }
1518
1519 /* android.shading */
1520
1521 if (hasCapability(BACKWARD_COMPATIBLE)) {
1522 const uint8_t availableShadingModes[] = {
1523 ANDROID_SHADING_MODE_OFF,
1524 ANDROID_SHADING_MODE_FAST,
1525 ANDROID_SHADING_MODE_HIGH_QUALITY
1526 };
1527 ADD_STATIC_ENTRY(ANDROID_SHADING_AVAILABLE_MODES, availableShadingModes,
1528 sizeof(availableShadingModes));
1529 } else {
1530 const uint8_t availableShadingModes[] = {
1531 ANDROID_SHADING_MODE_OFF
1532 };
1533 ADD_STATIC_ENTRY(ANDROID_SHADING_AVAILABLE_MODES, availableShadingModes,
1534 sizeof(availableShadingModes));
1535 }
1536
1537 /* android.request */
1538
1539 static const int32_t maxNumOutputStreams[] = {
1540 kMaxRawStreamCount, kMaxProcessedStreamCount, kMaxJpegStreamCount
1541 };
1542 ADD_STATIC_ENTRY(ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS,
1543 maxNumOutputStreams, 3);
1544
1545 static const uint8_t maxPipelineDepth = kMaxBufferCount;
1546 ADD_STATIC_ENTRY(ANDROID_REQUEST_PIPELINE_MAX_DEPTH, &maxPipelineDepth, 1);
1547
1548 static const int32_t partialResultCount = 1;
1549 ADD_STATIC_ENTRY(ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
1550 &partialResultCount, /* count */ 1);
1551
1552 SortedVector<uint8_t> caps;
1553 for (size_t i = 0; i < mCapabilities.size(); ++i) {
1554 switch (mCapabilities[i]) {
1555 case BACKWARD_COMPATIBLE:
1556 caps.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE);
1557 break;
1558 case PRIVATE_REPROCESSING:
1559 caps.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_PRIVATE_REPROCESSING);
1560 break;
1561 case READ_SENSOR_SETTINGS:
1562 caps.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_READ_SENSOR_SETTINGS);
1563 break;
1564 case BURST_CAPTURE:
1565 caps.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BURST_CAPTURE);
1566 break;
1567 case YUV_REPROCESSING:
1568 caps.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_YUV_REPROCESSING);
1569 break;
1570 case CONSTRAINED_HIGH_SPEED_VIDEO:
1571 caps.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_CONSTRAINED_HIGH_SPEED_VIDEO);
1572 break;
1573 default:
1574 // Ignore LEVELs.
1575 break;
1576 }
1577 }
1578 ADD_STATIC_ENTRY(ANDROID_REQUEST_AVAILABLE_CAPABILITIES, caps.array(), caps.size());
1579
1580 // Scan a default request template for included request keys.
1581 Vector<int32_t> availableRequestKeys;
1582 const camera_metadata_t *previewRequest =
1583 constructDefaultRequestSettings(CAMERA3_TEMPLATE_PREVIEW);
1584 for (size_t i = 0; i < get_camera_metadata_entry_count(previewRequest); ++i) {
1585 camera_metadata_ro_entry_t entry;
1586 get_camera_metadata_ro_entry(previewRequest, i, &entry);
1587 availableRequestKeys.add(entry.tag);
1588 }
1589 ADD_STATIC_ENTRY(ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS, availableRequestKeys.array(),
1590 availableRequestKeys.size());
1591
1592 /*
1593 * Add a few more result keys. Must be kept up to date with the various
1594 * places that add these.
1595 */
1596
1597 Vector<int32_t> availableResultKeys(availableRequestKeys);
1598 if (hasCapability(BACKWARD_COMPATIBLE)) {
1599 availableResultKeys.add(ANDROID_CONTROL_AE_STATE);
1600 availableResultKeys.add(ANDROID_CONTROL_AF_STATE);
1601 availableResultKeys.add(ANDROID_CONTROL_AWB_STATE);
1602 availableResultKeys.add(ANDROID_FLASH_STATE);
1603 availableResultKeys.add(ANDROID_LENS_STATE);
1604 availableResultKeys.add(ANDROID_LENS_FOCUS_RANGE);
1605 availableResultKeys.add(ANDROID_SENSOR_ROLLING_SHUTTER_SKEW);
1606 availableResultKeys.add(ANDROID_STATISTICS_SCENE_FLICKER);
1607 }
1608
1609 availableResultKeys.add(ANDROID_REQUEST_PIPELINE_DEPTH);
1610 availableResultKeys.add(ANDROID_SENSOR_TIMESTAMP);
1611
1612 ADD_STATIC_ENTRY(ANDROID_REQUEST_AVAILABLE_RESULT_KEYS, availableResultKeys.array(),
1613 availableResultKeys.size());
1614
1615 // Needs to be last, to collect all the keys set.
1616
1617 availableCharacteristicsKeys.add(ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS);
1618 info.update(ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS,
1619 availableCharacteristicsKeys);
1620
1621 mCameraInfo = info.release();
1622
1623 #undef ADD_STATIC_ENTRY
1624 return OK;
1625 }
1626
process3A(CameraMetadata & settings)1627 status_t EmulatedQemuCamera3::process3A(CameraMetadata &settings) {
1628 /**
1629 * Extract top-level 3A controls
1630 */
1631 status_t res;
1632
1633 camera_metadata_entry e;
1634
1635 e = settings.find(ANDROID_CONTROL_MODE);
1636 if (e.count == 0) {
1637 ALOGE("%s: No control mode entry!", __FUNCTION__);
1638 return BAD_VALUE;
1639 }
1640 uint8_t controlMode = e.data.u8[0];
1641
1642 if (controlMode == ANDROID_CONTROL_MODE_OFF) {
1643 mAeMode = ANDROID_CONTROL_AE_MODE_OFF;
1644 mAfMode = ANDROID_CONTROL_AF_MODE_OFF;
1645 mAwbMode = ANDROID_CONTROL_AWB_MODE_OFF;
1646 mAeState = ANDROID_CONTROL_AE_STATE_INACTIVE;
1647 mAfState = ANDROID_CONTROL_AF_STATE_INACTIVE;
1648 mAwbState = ANDROID_CONTROL_AWB_STATE_INACTIVE;
1649 update3A(settings);
1650 return OK;
1651 } else if (controlMode == ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
1652 if (!hasCapability(BACKWARD_COMPATIBLE)) {
1653 ALOGE("%s: Can't use scene mode when BACKWARD_COMPATIBLE not supported!",
1654 __FUNCTION__);
1655 return BAD_VALUE;
1656 }
1657
1658 e = settings.find(ANDROID_CONTROL_SCENE_MODE);
1659 if (e.count == 0) {
1660 ALOGE("%s: No scene mode entry!", __FUNCTION__);
1661 return BAD_VALUE;
1662 }
1663 uint8_t sceneMode = e.data.u8[0];
1664
1665 switch(sceneMode) {
1666 case ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY:
1667 mFacePriority = true;
1668 break;
1669 default:
1670 ALOGE("%s: Emulator doesn't support scene mode %d",
1671 __FUNCTION__, sceneMode);
1672 return BAD_VALUE;
1673 }
1674 } else {
1675 mFacePriority = false;
1676 }
1677
1678 // controlMode == AUTO or sceneMode = FACE_PRIORITY
1679 // Process individual 3A controls
1680
1681 res = doFakeAE(settings);
1682 if (res != OK) return res;
1683
1684 res = doFakeAF(settings);
1685 if (res != OK) return res;
1686
1687 res = doFakeAWB(settings);
1688 if (res != OK) return res;
1689
1690 update3A(settings);
1691 return OK;
1692 }
1693
doFakeAE(CameraMetadata & settings)1694 status_t EmulatedQemuCamera3::doFakeAE(CameraMetadata &settings) {
1695 camera_metadata_entry e;
1696
1697 e = settings.find(ANDROID_CONTROL_AE_MODE);
1698 if (e.count == 0 && hasCapability(BACKWARD_COMPATIBLE)) {
1699 ALOGE("%s: No AE mode entry!", __FUNCTION__);
1700 return BAD_VALUE;
1701 }
1702 uint8_t aeMode = (e.count > 0) ? e.data.u8[0] : (uint8_t)ANDROID_CONTROL_AE_MODE_ON;
1703 mAeMode = aeMode;
1704
1705 switch (aeMode) {
1706 case ANDROID_CONTROL_AE_MODE_OFF:
1707 // AE is OFF
1708 mAeState = ANDROID_CONTROL_AE_STATE_INACTIVE;
1709 return OK;
1710 case ANDROID_CONTROL_AE_MODE_ON:
1711 // OK for AUTO modes
1712 break;
1713 default:
1714 // Mostly silently ignore unsupported modes
1715 ALOGV("%s: Emulator doesn't support AE mode %d, assuming ON",
1716 __FUNCTION__, aeMode);
1717 break;
1718 }
1719
1720 e = settings.find(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER);
1721 bool precaptureTrigger = false;
1722 if (e.count != 0) {
1723 precaptureTrigger =
1724 (e.data.u8[0] == ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_START);
1725 }
1726
1727 if (precaptureTrigger) {
1728 ALOGV("%s: Pre capture trigger = %d", __FUNCTION__, precaptureTrigger);
1729 } else if (e.count > 0) {
1730 ALOGV("%s: Pre capture trigger was present? %zu",
1731 __FUNCTION__, e.count);
1732 }
1733
1734 if (precaptureTrigger || mAeState == ANDROID_CONTROL_AE_STATE_PRECAPTURE) {
1735 // Run precapture sequence
1736 if (mAeState != ANDROID_CONTROL_AE_STATE_PRECAPTURE) {
1737 mAeCounter = 0;
1738 }
1739
1740 if (mFacePriority) {
1741 mAeTargetExposureTime = kFacePriorityExposureTime;
1742 } else {
1743 mAeTargetExposureTime = kNormalExposureTime;
1744 }
1745
1746 if (mAeCounter > kPrecaptureMinFrames &&
1747 (mAeTargetExposureTime - mAeCurrentExposureTime) <
1748 mAeTargetExposureTime / 10) {
1749 // Done with precapture
1750 mAeCounter = 0;
1751 mAeState = ANDROID_CONTROL_AE_STATE_CONVERGED;
1752 } else {
1753 // Converge some more
1754 mAeCurrentExposureTime +=
1755 (mAeTargetExposureTime - mAeCurrentExposureTime) *
1756 kExposureTrackRate;
1757 mAeCounter++;
1758 mAeState = ANDROID_CONTROL_AE_STATE_PRECAPTURE;
1759 }
1760 }
1761 else {
1762 mAeState = ANDROID_CONTROL_AE_STATE_CONVERGED;
1763 }
1764
1765 return OK;
1766 }
1767
doFakeAF(CameraMetadata & settings)1768 status_t EmulatedQemuCamera3::doFakeAF(CameraMetadata &settings) {
1769 camera_metadata_entry e;
1770
1771 e = settings.find(ANDROID_CONTROL_AF_MODE);
1772 if (e.count == 0 && hasCapability(BACKWARD_COMPATIBLE)) {
1773 ALOGE("%s: No AF mode entry!", __FUNCTION__);
1774 return BAD_VALUE;
1775 }
1776 uint8_t afMode = (e.count > 0) ? e.data.u8[0] : (uint8_t)ANDROID_CONTROL_AF_MODE_OFF;
1777
1778 switch (afMode) {
1779 case ANDROID_CONTROL_AF_MODE_OFF:
1780 case ANDROID_CONTROL_AF_MODE_AUTO:
1781 case ANDROID_CONTROL_AF_MODE_MACRO:
1782 case ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO:
1783 case ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE:
1784 // Always report INACTIVE for Qemu Camera
1785 mAfState = ANDROID_CONTROL_AF_STATE_INACTIVE;
1786 break;
1787 default:
1788 ALOGE("%s: Emulator doesn't support AF mode %d",
1789 __FUNCTION__, afMode);
1790 return BAD_VALUE;
1791 }
1792
1793 return OK;
1794 }
1795
doFakeAWB(CameraMetadata & settings)1796 status_t EmulatedQemuCamera3::doFakeAWB(CameraMetadata &settings) {
1797 camera_metadata_entry e;
1798
1799 e = settings.find(ANDROID_CONTROL_AWB_MODE);
1800 if (e.count == 0 && hasCapability(BACKWARD_COMPATIBLE)) {
1801 ALOGE("%s: No AWB mode entry!", __FUNCTION__);
1802 return BAD_VALUE;
1803 }
1804 uint8_t awbMode = (e.count > 0) ? e.data.u8[0] : (uint8_t)ANDROID_CONTROL_AWB_MODE_AUTO;
1805
1806 // TODO: Add white balance simulation
1807
1808 switch (awbMode) {
1809 case ANDROID_CONTROL_AWB_MODE_OFF:
1810 case ANDROID_CONTROL_AWB_MODE_AUTO:
1811 case ANDROID_CONTROL_AWB_MODE_INCANDESCENT:
1812 case ANDROID_CONTROL_AWB_MODE_FLUORESCENT:
1813 case ANDROID_CONTROL_AWB_MODE_DAYLIGHT:
1814 case ANDROID_CONTROL_AWB_MODE_SHADE:
1815 // Always magically right for Qemu Camera
1816 mAwbState = ANDROID_CONTROL_AWB_STATE_CONVERGED;
1817 break;
1818 default:
1819 ALOGE("%s: Emulator doesn't support AWB mode %d",
1820 __FUNCTION__, awbMode);
1821 return BAD_VALUE;
1822 }
1823
1824 return OK;
1825 }
1826
update3A(CameraMetadata & settings)1827 void EmulatedQemuCamera3::update3A(CameraMetadata &settings) {
1828 if (mAeMode != ANDROID_CONTROL_AE_MODE_OFF) {
1829 settings.update(ANDROID_SENSOR_EXPOSURE_TIME,
1830 &mAeCurrentExposureTime, 1);
1831 settings.update(ANDROID_SENSOR_SENSITIVITY,
1832 &mAeCurrentSensitivity, 1);
1833 }
1834
1835 settings.update(ANDROID_CONTROL_AE_STATE,
1836 &mAeState, 1);
1837 settings.update(ANDROID_CONTROL_AF_STATE,
1838 &mAfState, 1);
1839 settings.update(ANDROID_CONTROL_AWB_STATE,
1840 &mAwbState, 1);
1841
1842 uint8_t lensState;
1843 switch (mAfState) {
1844 case ANDROID_CONTROL_AF_STATE_PASSIVE_SCAN:
1845 case ANDROID_CONTROL_AF_STATE_ACTIVE_SCAN:
1846 lensState = ANDROID_LENS_STATE_MOVING;
1847 break;
1848 case ANDROID_CONTROL_AF_STATE_INACTIVE:
1849 case ANDROID_CONTROL_AF_STATE_PASSIVE_FOCUSED:
1850 case ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED:
1851 case ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED:
1852 case ANDROID_CONTROL_AF_STATE_PASSIVE_UNFOCUSED:
1853 default:
1854 lensState = ANDROID_LENS_STATE_STATIONARY;
1855 break;
1856 }
1857 settings.update(ANDROID_LENS_STATE, &lensState, 1);
1858 }
1859
signalReadoutIdle()1860 void EmulatedQemuCamera3::signalReadoutIdle() {
1861 Mutex::Autolock l(mLock);
1862 /*
1863 * Need to check isIdle again because waiting on mLock may have allowed
1864 * something to be placed in the in-flight queue.
1865 */
1866 if (mStatus == STATUS_ACTIVE && mReadoutThread->isIdle()) {
1867 ALOGV("Now idle");
1868 mStatus = STATUS_READY;
1869 }
1870 }
1871
onQemuSensorEvent(uint32_t frameNumber,Event e,nsecs_t timestamp)1872 void EmulatedQemuCamera3::onQemuSensorEvent(uint32_t frameNumber, Event e,
1873 nsecs_t timestamp) {
1874 switch (e) {
1875 case QemuSensor::QemuSensorListener::EXPOSURE_START:
1876 ALOGVV("%s: Frame %d: Sensor started exposure at %lld",
1877 __FUNCTION__, frameNumber, timestamp);
1878 // Trigger shutter notify to framework.
1879 camera3_notify_msg_t msg;
1880 msg.type = CAMERA3_MSG_SHUTTER;
1881 msg.message.shutter.frame_number = frameNumber;
1882 msg.message.shutter.timestamp = timestamp;
1883 sendNotify(&msg);
1884 break;
1885 default:
1886 ALOGW("%s: Unexpected sensor event %d at %" PRId64, __FUNCTION__,
1887 e, timestamp);
1888 break;
1889 }
1890 }
1891
ReadoutThread(EmulatedQemuCamera3 * parent)1892 EmulatedQemuCamera3::ReadoutThread::ReadoutThread(EmulatedQemuCamera3 *parent) :
1893 mParent(parent), mJpegWaiting(false) {
1894 ALOGV("%s: Creating readout thread", __FUNCTION__);
1895 }
1896
~ReadoutThread()1897 EmulatedQemuCamera3::ReadoutThread::~ReadoutThread() {
1898 for (List<Request>::iterator i = mInFlightQueue.begin();
1899 i != mInFlightQueue.end(); ++i) {
1900 delete i->buffers;
1901 delete i->sensorBuffers;
1902 }
1903 }
1904
queueCaptureRequest(const Request & r)1905 void EmulatedQemuCamera3::ReadoutThread::queueCaptureRequest(const Request &r) {
1906 Mutex::Autolock l(mLock);
1907
1908 mInFlightQueue.push_back(r);
1909 mInFlightSignal.signal();
1910 }
1911
isIdle()1912 bool EmulatedQemuCamera3::ReadoutThread::isIdle() {
1913 Mutex::Autolock l(mLock);
1914 return mInFlightQueue.empty() && !mThreadActive;
1915 }
1916
waitForReadout()1917 status_t EmulatedQemuCamera3::ReadoutThread::waitForReadout() {
1918 status_t res;
1919 Mutex::Autolock l(mLock);
1920 int loopCount = 0;
1921 while (mInFlightQueue.size() >= kMaxQueueSize) {
1922 res = mInFlightSignal.waitRelative(mLock, kWaitPerLoop);
1923 if (res != OK && res != TIMED_OUT) {
1924 ALOGE("%s: Error waiting for in-flight queue to shrink",
1925 __FUNCTION__);
1926 return INVALID_OPERATION;
1927 }
1928 if (loopCount == kMaxWaitLoops) {
1929 ALOGE("%s: Timed out waiting for in-flight queue to shrink",
1930 __FUNCTION__);
1931 return TIMED_OUT;
1932 }
1933 loopCount++;
1934 }
1935 return OK;
1936 }
1937
threadLoop()1938 bool EmulatedQemuCamera3::ReadoutThread::threadLoop() {
1939 status_t res;
1940
1941 ALOGVV("%s: ReadoutThread waiting for request", __FUNCTION__);
1942
1943 // First wait for a request from the in-flight queue.
1944
1945 if (mCurrentRequest.settings.isEmpty()) {
1946 Mutex::Autolock l(mLock);
1947 if (mInFlightQueue.empty()) {
1948 res = mInFlightSignal.waitRelative(mLock, kWaitPerLoop);
1949 if (res == TIMED_OUT) {
1950 ALOGVV("%s: ReadoutThread: Timed out waiting for request",
1951 __FUNCTION__);
1952 return true;
1953 } else if (res != NO_ERROR) {
1954 ALOGE("%s: Error waiting for capture requests: %d",
1955 __FUNCTION__, res);
1956 return false;
1957 }
1958 }
1959 mCurrentRequest.frameNumber = mInFlightQueue.begin()->frameNumber;
1960 mCurrentRequest.settings.acquire(mInFlightQueue.begin()->settings);
1961 mCurrentRequest.buffers = mInFlightQueue.begin()->buffers;
1962 mCurrentRequest.sensorBuffers = mInFlightQueue.begin()->sensorBuffers;
1963 mInFlightQueue.erase(mInFlightQueue.begin());
1964 mInFlightSignal.signal();
1965 mThreadActive = true;
1966 ALOGVV("%s: Beginning readout of frame %d", __FUNCTION__,
1967 mCurrentRequest.frameNumber);
1968 }
1969
1970 // Then wait for it to be delivered from the sensor.
1971 ALOGVV("%s: ReadoutThread: Wait for frame to be delivered from sensor",
1972 __FUNCTION__);
1973
1974 nsecs_t captureTime;
1975 bool gotFrame =
1976 mParent->mSensor->waitForNewFrame(kWaitPerLoop, &captureTime);
1977 if (!gotFrame) {
1978 ALOGVV("%s: ReadoutThread: Timed out waiting for sensor frame",
1979 __FUNCTION__);
1980 return true;
1981 }
1982
1983 ALOGVV("Sensor done with readout for frame %d, captured at %lld ",
1984 mCurrentRequest.frameNumber, captureTime);
1985
1986 /*
1987 * Check if we need to JPEG encode a buffer, and send it for async
1988 * compression if so. Otherwise prepare the buffer for return.
1989 */
1990 bool needJpeg = false;
1991 HalBufferVector::iterator buf = mCurrentRequest.buffers->begin();
1992 while (buf != mCurrentRequest.buffers->end()) {
1993 bool goodBuffer = true;
1994 if (buf->stream->format == HAL_PIXEL_FORMAT_BLOB &&
1995 buf->stream->data_space != HAL_DATASPACE_DEPTH) {
1996 Mutex::Autolock jl(mJpegLock);
1997 if (mJpegWaiting) {
1998 /*
1999 * This shouldn't happen, because processCaptureRequest should
2000 * be stalling until JPEG compressor is free.
2001 */
2002 ALOGE("%s: Already processing a JPEG!", __FUNCTION__);
2003 goodBuffer = false;
2004 }
2005 if (goodBuffer) {
2006 // Compressor takes ownership of sensorBuffers here.
2007 res = mParent->mJpegCompressor->start(mCurrentRequest.sensorBuffers,
2008 this, &(mCurrentRequest.settings));
2009 goodBuffer = (res == OK);
2010 }
2011 if (goodBuffer) {
2012 needJpeg = true;
2013
2014 mJpegHalBuffer = *buf;
2015 mJpegFrameNumber = mCurrentRequest.frameNumber;
2016 mJpegWaiting = true;
2017
2018 mCurrentRequest.sensorBuffers = nullptr;
2019 buf = mCurrentRequest.buffers->erase(buf);
2020
2021 continue;
2022 }
2023 ALOGE("%s: Error compressing output buffer: %s (%d)",
2024 __FUNCTION__, strerror(-res), res);
2025 // Fallthrough for cleanup.
2026 }
2027 mParent->mGBM->unlock(*(buf->buffer));
2028
2029 buf->status = goodBuffer ? CAMERA3_BUFFER_STATUS_OK :
2030 CAMERA3_BUFFER_STATUS_ERROR;
2031 buf->acquire_fence = -1;
2032 buf->release_fence = -1;
2033
2034 ++buf;
2035 }
2036
2037 // Construct result for all completed buffers and results.
2038
2039 camera3_capture_result result;
2040
2041 if (mParent->hasCapability(BACKWARD_COMPATIBLE)) {
2042 static const uint8_t sceneFlicker =
2043 ANDROID_STATISTICS_SCENE_FLICKER_NONE;
2044 mCurrentRequest.settings.update(ANDROID_STATISTICS_SCENE_FLICKER,
2045 &sceneFlicker, 1);
2046
2047 static const uint8_t flashState = ANDROID_FLASH_STATE_UNAVAILABLE;
2048 mCurrentRequest.settings.update(ANDROID_FLASH_STATE,
2049 &flashState, 1);
2050
2051 nsecs_t rollingShutterSkew = 0;
2052 mCurrentRequest.settings.update(ANDROID_SENSOR_ROLLING_SHUTTER_SKEW,
2053 &rollingShutterSkew, 1);
2054
2055 float focusRange[] = { 1.0f / 5.0f, 0 }; // 5 m to infinity in focus
2056 mCurrentRequest.settings.update(ANDROID_LENS_FOCUS_RANGE, focusRange,
2057 sizeof(focusRange) / sizeof(float));
2058 }
2059
2060 mCurrentRequest.settings.update(ANDROID_SENSOR_TIMESTAMP,
2061 &captureTime, 1);
2062
2063
2064 // JPEGs take a stage longer.
2065 const uint8_t pipelineDepth = needJpeg ? kMaxBufferCount : kMaxBufferCount - 1;
2066 mCurrentRequest.settings.update(ANDROID_REQUEST_PIPELINE_DEPTH,
2067 &pipelineDepth, 1);
2068
2069 result.frame_number = mCurrentRequest.frameNumber;
2070 result.result = mCurrentRequest.settings.getAndLock();
2071 result.num_output_buffers = mCurrentRequest.buffers->size();
2072 result.output_buffers = mCurrentRequest.buffers->array();
2073 result.input_buffer = nullptr;
2074 result.partial_result = 1;
2075
2076 // Go idle if queue is empty, before sending result.
2077 bool signalIdle = false;
2078 {
2079 Mutex::Autolock l(mLock);
2080 if (mInFlightQueue.empty()) {
2081 mThreadActive = false;
2082 signalIdle = true;
2083 }
2084 }
2085 if (signalIdle) mParent->signalReadoutIdle();
2086
2087 // Send it off to the framework.
2088 ALOGVV("%s: ReadoutThread: Send result to framework",
2089 __FUNCTION__);
2090 mParent->sendCaptureResult(&result);
2091
2092 // Clean up.
2093 mCurrentRequest.settings.unlock(result.result);
2094
2095 delete mCurrentRequest.buffers;
2096 mCurrentRequest.buffers = nullptr;
2097 if (!needJpeg) {
2098 delete mCurrentRequest.sensorBuffers;
2099 mCurrentRequest.sensorBuffers = nullptr;
2100 }
2101 mCurrentRequest.settings.clear();
2102
2103 return true;
2104 }
2105
onJpegDone(const StreamBuffer & jpegBuffer,bool success)2106 void EmulatedQemuCamera3::ReadoutThread::onJpegDone(
2107 const StreamBuffer &jpegBuffer, bool success) {
2108 Mutex::Autolock jl(mJpegLock);
2109
2110 mParent->mGBM->unlock(*(jpegBuffer.buffer));
2111
2112 mJpegHalBuffer.status = success ?
2113 CAMERA3_BUFFER_STATUS_OK : CAMERA3_BUFFER_STATUS_ERROR;
2114 mJpegHalBuffer.acquire_fence = -1;
2115 mJpegHalBuffer.release_fence = -1;
2116 mJpegWaiting = false;
2117
2118 camera3_capture_result result;
2119
2120 result.frame_number = mJpegFrameNumber;
2121 result.result = nullptr;
2122 result.num_output_buffers = 1;
2123 result.output_buffers = &mJpegHalBuffer;
2124 result.input_buffer = nullptr;
2125 result.partial_result = 0;
2126
2127 if (!success) {
2128 ALOGE("%s: Compression failure, returning error state buffer to"
2129 " framework", __FUNCTION__);
2130 } else {
2131 ALOGV("%s: Compression complete, returning buffer to framework",
2132 __FUNCTION__);
2133 }
2134
2135 mParent->sendCaptureResult(&result);
2136 }
2137
onJpegInputDone(const StreamBuffer & inputBuffer)2138 void EmulatedQemuCamera3::ReadoutThread::onJpegInputDone(
2139 const StreamBuffer &inputBuffer) {
2140 /*
2141 * Should never get here, since the input buffer has to be returned by end
2142 * of processCaptureRequest.
2143 */
2144 ALOGE("%s: Unexpected input buffer from JPEG compressor!", __FUNCTION__);
2145 }
2146
2147 }; // end of namespace android
2148