1 /*
2  * Copyright (C) 2013 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 /*
18  * Contains implementation of a class EmulatedFakeCamera3 that encapsulates
19  * functionality of an advanced fake camera.
20  */
21 
22 #include <inttypes.h>
23 
24 //#define LOG_NDEBUG 0
25 //#define LOG_NNDEBUG 0
26 #define LOG_TAG "EmulatedCamera_FakeCamera3"
27 #include <cutils/properties.h>
28 #include <log/log.h>
29 
30 #include "EmulatedFakeCamera3.h"
31 #include "EmulatedCameraFactory.h"
32 #include <ui/Fence.h>
33 #include <ui/Rect.h>
34 
35 #include "fake-pipeline2/Sensor.h"
36 #include "fake-pipeline2/JpegCompressor.h"
37 #include <cmath>
38 
39 #include <vector>
40 #include <algorithm>
41 
42 #if defined(LOG_NNDEBUG) && LOG_NNDEBUG == 0
43 #define ALOGVV ALOGV
44 #else
45 #define ALOGVV(...) ((void)0)
46 #endif
47 
48 namespace android {
49 
50 /**
51  * Constants for camera capabilities
52  */
53 
54 const int64_t USEC = 1000LL;
55 const int64_t MSEC = USEC * 1000LL;
56 
57 const int32_t EmulatedFakeCamera3::kAvailableFormats[] = {
58         HAL_PIXEL_FORMAT_RAW16,
59         HAL_PIXEL_FORMAT_BLOB,
60         HAL_PIXEL_FORMAT_RGBA_8888,
61         HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED,
62         // These are handled by YCbCr_420_888
63         //        HAL_PIXEL_FORMAT_YV12,
64         //        HAL_PIXEL_FORMAT_YCrCb_420_SP,
65         HAL_PIXEL_FORMAT_YCbCr_420_888,
66         HAL_PIXEL_FORMAT_Y16
67 };
68 
69 const uint32_t EmulatedFakeCamera3::kAvailableRawSizes[6] = {
70     640, 480,
71     1280, 720,
72     1920, 1080
73     //    mSensorWidth, mSensorHeight
74 };
75 
76 /**
77  * 3A constants
78  */
79 
80 // Default exposure and gain targets for different scenarios
81 const nsecs_t EmulatedFakeCamera3::kNormalExposureTime       = 10 * MSEC;
82 const nsecs_t EmulatedFakeCamera3::kFacePriorityExposureTime = 30 * MSEC;
83 const int     EmulatedFakeCamera3::kNormalSensitivity        = 100;
84 const int     EmulatedFakeCamera3::kFacePrioritySensitivity  = 400;
85 //CTS requires 8 frames timeout in waitForAeStable
86 const float   EmulatedFakeCamera3::kExposureTrackRate        = 0.2;
87 const int     EmulatedFakeCamera3::kPrecaptureMinFrames      = 10;
88 const int     EmulatedFakeCamera3::kStableAeMaxFrames        = 100;
89 const float   EmulatedFakeCamera3::kExposureWanderMin        = -2;
90 const float   EmulatedFakeCamera3::kExposureWanderMax        = 1;
91 
92 /**
93  * Camera device lifecycle methods
94  */
95 
EmulatedFakeCamera3(int cameraId,bool facingBack,struct hw_module_t * module,GraphicBufferMapper * gbm)96 EmulatedFakeCamera3::EmulatedFakeCamera3(int cameraId, bool facingBack,
97         struct hw_module_t* module, GraphicBufferMapper* gbm) :
98         EmulatedCamera3(cameraId, module),
99         mFacingBack(facingBack), mGBM(gbm) {
100     ALOGI("Constructing emulated fake camera 3: ID %d, facing %s",
101             mCameraID, facingBack ? "back" : "front");
102 
103     for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++) {
104         mDefaultTemplates[i] = NULL;
105     }
106 }
107 
~EmulatedFakeCamera3()108 EmulatedFakeCamera3::~EmulatedFakeCamera3() {
109     for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++) {
110         if (mDefaultTemplates[i] != NULL) {
111             free_camera_metadata(mDefaultTemplates[i]);
112         }
113     }
114 }
115 
Initialize()116 status_t EmulatedFakeCamera3::Initialize() {
117     ALOGV("%s: E", __FUNCTION__);
118     status_t res;
119 
120     if (mStatus != STATUS_ERROR) {
121         ALOGE("%s: Already initialized!", __FUNCTION__);
122         return INVALID_OPERATION;
123     }
124 
125     res = getCameraCapabilities();
126     if (res != OK) {
127         ALOGE("%s: Unable to get camera capabilities: %s (%d)",
128                 __FUNCTION__, strerror(-res), res);
129         return res;
130     }
131 
132     res = constructStaticInfo();
133     if (res != OK) {
134         ALOGE("%s: Unable to allocate static info: %s (%d)",
135                 __FUNCTION__, strerror(-res), res);
136         return res;
137     }
138 
139     return EmulatedCamera3::Initialize();
140 }
141 
connectCamera(hw_device_t ** device)142 status_t EmulatedFakeCamera3::connectCamera(hw_device_t** device) {
143     ALOGV("%s: E", __FUNCTION__);
144     Mutex::Autolock l(mLock);
145     status_t res;
146 
147     if (mStatus != STATUS_CLOSED) {
148         ALOGE("%s: Can't connect in state %d", __FUNCTION__, mStatus);
149         return INVALID_OPERATION;
150     }
151 
152     mSensor = new Sensor(mSensorWidth, mSensorHeight);
153     mSensor->setSensorListener(this);
154 
155     res = mSensor->startUp();
156     if (res != NO_ERROR) return res;
157 
158     mReadoutThread = new ReadoutThread(this);
159     mJpegCompressor = new JpegCompressor(mGBM);
160 
161     res = mReadoutThread->run("EmuCam3::readoutThread");
162     if (res != NO_ERROR) return res;
163 
164     // Initialize fake 3A
165 
166     mControlMode  = ANDROID_CONTROL_MODE_AUTO;
167     mFacePriority = false;
168     mAeMode       = ANDROID_CONTROL_AE_MODE_ON;
169     mAfMode       = ANDROID_CONTROL_AF_MODE_AUTO;
170     mAwbMode      = ANDROID_CONTROL_AWB_MODE_AUTO;
171     mAeState      = ANDROID_CONTROL_AE_STATE_INACTIVE;
172     mAfState      = ANDROID_CONTROL_AF_STATE_INACTIVE;
173     mAwbState     = ANDROID_CONTROL_AWB_STATE_INACTIVE;
174     mAeCounter    = 0;
175     mAeTargetExposureTime = kNormalExposureTime;
176     mAeCurrentExposureTime = kNormalExposureTime;
177     mAeCurrentSensitivity  = kNormalSensitivity;
178 
179     return EmulatedCamera3::connectCamera(device);
180 }
181 
closeCamera()182 status_t EmulatedFakeCamera3::closeCamera() {
183     ALOGV("%s: E", __FUNCTION__);
184     status_t res;
185     {
186         Mutex::Autolock l(mLock);
187         if (mStatus == STATUS_CLOSED) return OK;
188 
189         res = mSensor->shutDown();
190         if (res != NO_ERROR) {
191             ALOGE("%s: Unable to shut down sensor: %d", __FUNCTION__, res);
192             return res;
193         }
194         mSensor.clear();
195 
196         mReadoutThread->requestExit();
197     }
198 
199     mReadoutThread->join();
200 
201     {
202         Mutex::Autolock l(mLock);
203         // Clear out private stream information
204         for (StreamIterator s = mStreams.begin(); s != mStreams.end(); s++) {
205             PrivateStreamInfo *privStream =
206                     static_cast<PrivateStreamInfo*>((*s)->priv);
207             delete privStream;
208             (*s)->priv = NULL;
209         }
210         mStreams.clear();
211         mReadoutThread.clear();
212     }
213 
214     return EmulatedCamera3::closeCamera();
215 }
216 
getCameraInfo(struct camera_info * info)217 status_t EmulatedFakeCamera3::getCameraInfo(struct camera_info *info) {
218     info->facing = mFacingBack ? CAMERA_FACING_BACK : CAMERA_FACING_FRONT;
219     info->orientation = gEmulatedCameraFactory.getFakeCameraOrientation();
220     return EmulatedCamera3::getCameraInfo(info);
221 }
222 
223 /**
224  * Camera3 interface methods
225  */
226 
configureStreams(camera3_stream_configuration * streamList)227 status_t EmulatedFakeCamera3::configureStreams(
228         camera3_stream_configuration *streamList) {
229     Mutex::Autolock l(mLock);
230     ALOGV("%s: %d streams", __FUNCTION__, streamList->num_streams);
231 
232     if (mStatus != STATUS_OPEN && mStatus != STATUS_READY) {
233         ALOGE("%s: Cannot configure streams in state %d",
234                 __FUNCTION__, mStatus);
235         return NO_INIT;
236     }
237 
238     /**
239      * Sanity-check input list.
240      */
241     if (streamList == NULL) {
242         ALOGE("%s: NULL stream configuration", __FUNCTION__);
243         return BAD_VALUE;
244     }
245 
246     if (streamList->streams == NULL) {
247         ALOGE("%s: NULL stream list", __FUNCTION__);
248         return BAD_VALUE;
249     }
250 
251     if (streamList->num_streams < 1) {
252         ALOGE("%s: Bad number of streams requested: %d", __FUNCTION__,
253                 streamList->num_streams);
254         return BAD_VALUE;
255     }
256 
257     camera3_stream_t *inputStream = NULL;
258     for (size_t i = 0; i < streamList->num_streams; i++) {
259         camera3_stream_t *newStream = streamList->streams[i];
260 
261         if (newStream == NULL) {
262             ALOGE("%s: Stream index %zu was NULL",
263                   __FUNCTION__, i);
264             return BAD_VALUE;
265         }
266 
267         ALOGV("%s: Stream %p (id %zu), type %d, usage 0x%x, format 0x%x "
268               "width 0x%x, height 0x%x",
269                 __FUNCTION__, newStream, i, newStream->stream_type,
270                 newStream->usage,
271                 newStream->format,
272                 newStream->width,
273                 newStream->height);
274 
275         if (newStream->stream_type == CAMERA3_STREAM_INPUT ||
276             newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
277             if (inputStream != NULL) {
278 
279                 ALOGE("%s: Multiple input streams requested!", __FUNCTION__);
280                 return BAD_VALUE;
281             }
282             inputStream = newStream;
283         }
284 
285         if (newStream->stream_type != CAMERA3_STREAM_INPUT) {
286             if (newStream->rotation < CAMERA3_STREAM_ROTATION_0 ||
287                 newStream->rotation > CAMERA3_STREAM_ROTATION_270) {
288                 ALOGE("%s: Unsupported stream rotation 0x%x requested",
289                       __FUNCTION__, newStream->rotation);
290                 return BAD_VALUE;
291             }
292         }
293 
294         if (newStream->width == 0 || newStream->height == 0 ||
295             newStream->width > (uint32_t)mSensorWidth ||
296             newStream->height > (uint32_t)mSensorHeight) {
297             ALOGE("%s: Unsupported stream width 0x%x height 0x%x",
298                   __FUNCTION__, newStream->width, newStream->height);
299             return BAD_VALUE;
300         }
301 
302         bool validFormat = false;
303         for (size_t f = 0;
304              f < sizeof(kAvailableFormats)/sizeof(kAvailableFormats[0]);
305              f++) {
306             if (newStream->format == kAvailableFormats[f]) {
307                 validFormat = true;
308                 break;
309             }
310         }
311         if (!validFormat) {
312             ALOGE("%s: Unsupported stream format 0x%x requested",
313                     __FUNCTION__, newStream->format);
314             return BAD_VALUE;
315         }
316     }
317     mInputStream = inputStream;
318 
319     /**
320      * Initially mark all existing streams as not alive
321      */
322     for (StreamIterator s = mStreams.begin(); s != mStreams.end(); ++s) {
323         PrivateStreamInfo *privStream =
324                 static_cast<PrivateStreamInfo*>((*s)->priv);
325         privStream->alive = false;
326     }
327 
328     /**
329      * Find new streams and mark still-alive ones
330      */
331     for (size_t i = 0; i < streamList->num_streams; i++) {
332         camera3_stream_t *newStream = streamList->streams[i];
333         if (newStream->priv == NULL) {
334             // New stream, construct info
335             PrivateStreamInfo *privStream = new PrivateStreamInfo();
336             privStream->alive = true;
337 
338             newStream->max_buffers = kMaxBufferCount;
339             newStream->priv = privStream;
340             mStreams.push_back(newStream);
341         } else {
342             // Existing stream, mark as still alive.
343             PrivateStreamInfo *privStream =
344                     static_cast<PrivateStreamInfo*>(newStream->priv);
345             privStream->alive = true;
346         }
347         // Always update usage and max buffers
348         newStream->max_buffers = kMaxBufferCount;
349         switch (newStream->stream_type) {
350             case CAMERA3_STREAM_OUTPUT:
351                 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_WRITE;
352                 break;
353             case CAMERA3_STREAM_INPUT:
354                 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_READ;
355                 break;
356             case CAMERA3_STREAM_BIDIRECTIONAL:
357                 newStream->usage |= (GRALLOC_USAGE_HW_CAMERA_READ |
358                         GRALLOC_USAGE_HW_CAMERA_WRITE);
359                 break;
360         }
361         // Set the buffer format, inline with gralloc implementation
362         if (newStream->format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED) {
363             if (newStream->usage & GRALLOC_USAGE_HW_CAMERA_WRITE) {
364                 if (newStream->usage & GRALLOC_USAGE_HW_TEXTURE) {
365                     newStream->format = HAL_PIXEL_FORMAT_RGBA_8888;
366                 }
367                 else if (newStream->usage & GRALLOC_USAGE_HW_VIDEO_ENCODER) {
368                     newStream->format = HAL_PIXEL_FORMAT_YCbCr_420_888;
369                 }
370                 else {
371                     newStream->format = HAL_PIXEL_FORMAT_RGB_888;
372                 }
373             }
374         }
375     }
376 
377     /**
378      * Reap the dead streams
379      */
380     for (StreamIterator s = mStreams.begin(); s != mStreams.end();) {
381         PrivateStreamInfo *privStream =
382                 static_cast<PrivateStreamInfo*>((*s)->priv);
383         if (!privStream->alive) {
384             (*s)->priv = NULL;
385             delete privStream;
386             s = mStreams.erase(s);
387         } else {
388             ++s;
389         }
390     }
391 
392     /**
393      * Can't reuse settings across configure call
394      */
395     mPrevSettings.clear();
396 
397     return OK;
398 }
399 
registerStreamBuffers(const camera3_stream_buffer_set * bufferSet)400 status_t EmulatedFakeCamera3::registerStreamBuffers(
401         const camera3_stream_buffer_set *bufferSet) {
402     ALOGV("%s: E", __FUNCTION__);
403     Mutex::Autolock l(mLock);
404 
405     // Should not be called in HAL versions >= 3.2
406 
407     ALOGE("%s: Should not be invoked on new HALs!",
408             __FUNCTION__);
409     return NO_INIT;
410 }
411 
constructDefaultRequestSettings(int type)412 const camera_metadata_t* EmulatedFakeCamera3::constructDefaultRequestSettings(
413         int type) {
414     ALOGV("%s: E", __FUNCTION__);
415     Mutex::Autolock l(mLock);
416 
417     if (type < 0 || type >= CAMERA3_TEMPLATE_COUNT) {
418         ALOGE("%s: Unknown request settings template: %d",
419                 __FUNCTION__, type);
420         return NULL;
421     }
422 
423     if (!hasCapability(BACKWARD_COMPATIBLE) && type != CAMERA3_TEMPLATE_PREVIEW) {
424         ALOGE("%s: Template %d not supported w/o BACKWARD_COMPATIBLE capability",
425                 __FUNCTION__, type);
426         return NULL;
427     }
428 
429     /**
430      * Cache is not just an optimization - pointer returned has to live at
431      * least as long as the camera device instance does.
432      */
433     if (mDefaultTemplates[type] != NULL) {
434         return mDefaultTemplates[type];
435     }
436 
437     CameraMetadata settings;
438 
439     /** android.request */
440 
441     static const uint8_t metadataMode = ANDROID_REQUEST_METADATA_MODE_FULL;
442     settings.update(ANDROID_REQUEST_METADATA_MODE, &metadataMode, 1);
443 
444     static const int32_t requestId = 0;
445     settings.update(ANDROID_REQUEST_ID, &requestId, 1);
446 
447     static const int32_t frameCount = 0;
448     settings.update(ANDROID_REQUEST_FRAME_COUNT, &frameCount, 1);
449 
450     /** android.lens */
451 
452     static const float focalLength = 5.0f;
453     settings.update(ANDROID_LENS_FOCAL_LENGTH, &focalLength, 1);
454 
455     if (hasCapability(BACKWARD_COMPATIBLE)) {
456         static const float focusDistance = 0;
457         settings.update(ANDROID_LENS_FOCUS_DISTANCE, &focusDistance, 1);
458 
459         static const float aperture = 2.8f;
460         settings.update(ANDROID_LENS_APERTURE, &aperture, 1);
461 
462         static const float filterDensity = 0;
463         settings.update(ANDROID_LENS_FILTER_DENSITY, &filterDensity, 1);
464 
465         static const uint8_t opticalStabilizationMode =
466                 ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
467         settings.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE,
468                 &opticalStabilizationMode, 1);
469 
470         // FOCUS_RANGE set only in frame
471     }
472 
473     /** android.sensor */
474 
475     if (hasCapability(MANUAL_SENSOR)) {
476         const int64_t exposureTime = 10 * MSEC;
477         settings.update(ANDROID_SENSOR_EXPOSURE_TIME, &exposureTime, 1);
478 
479         const int64_t frameDuration = 33333333L; // 1/30 s
480         settings.update(ANDROID_SENSOR_FRAME_DURATION, &frameDuration, 1);
481 
482         const int32_t sensitivity = 100;
483         settings.update(ANDROID_SENSOR_SENSITIVITY, &sensitivity, 1);
484     }
485 
486     // TIMESTAMP set only in frame
487 
488     /** android.flash */
489 
490     if (hasCapability(BACKWARD_COMPATIBLE)) {
491         static const uint8_t flashMode = ANDROID_FLASH_MODE_OFF;
492         settings.update(ANDROID_FLASH_MODE, &flashMode, 1);
493 
494         static const uint8_t flashPower = 10;
495         settings.update(ANDROID_FLASH_FIRING_POWER, &flashPower, 1);
496 
497         static const int64_t firingTime = 0;
498         settings.update(ANDROID_FLASH_FIRING_TIME, &firingTime, 1);
499     }
500 
501     /** Processing block modes */
502     if (hasCapability(MANUAL_POST_PROCESSING)) {
503         uint8_t hotPixelMode = 0;
504         uint8_t demosaicMode = 0;
505         uint8_t noiseMode = 0;
506         uint8_t shadingMode = 0;
507         uint8_t colorMode = 0;
508         uint8_t tonemapMode = 0;
509         uint8_t edgeMode = 0;
510         switch (type) {
511             case CAMERA3_TEMPLATE_STILL_CAPTURE:
512                 // fall-through
513             case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT:
514                 // fall-through
515             case CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG:
516                 hotPixelMode = ANDROID_HOT_PIXEL_MODE_HIGH_QUALITY;
517                 demosaicMode = ANDROID_DEMOSAIC_MODE_HIGH_QUALITY;
518                 noiseMode = ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY;
519                 shadingMode = ANDROID_SHADING_MODE_HIGH_QUALITY;
520                 colorMode = ANDROID_COLOR_CORRECTION_MODE_HIGH_QUALITY;
521                 tonemapMode = ANDROID_TONEMAP_MODE_HIGH_QUALITY;
522                 edgeMode = ANDROID_EDGE_MODE_HIGH_QUALITY;
523                 break;
524             case CAMERA3_TEMPLATE_PREVIEW:
525                 // fall-through
526             case CAMERA3_TEMPLATE_VIDEO_RECORD:
527                 // fall-through
528             default:
529                 hotPixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
530                 demosaicMode = ANDROID_DEMOSAIC_MODE_FAST;
531                 noiseMode = ANDROID_NOISE_REDUCTION_MODE_FAST;
532                 shadingMode = ANDROID_SHADING_MODE_FAST;
533                 colorMode = ANDROID_COLOR_CORRECTION_MODE_FAST;
534                 tonemapMode = ANDROID_TONEMAP_MODE_FAST;
535                 edgeMode = ANDROID_EDGE_MODE_FAST;
536                 break;
537         }
538         settings.update(ANDROID_HOT_PIXEL_MODE, &hotPixelMode, 1);
539         settings.update(ANDROID_DEMOSAIC_MODE, &demosaicMode, 1);
540         settings.update(ANDROID_NOISE_REDUCTION_MODE, &noiseMode, 1);
541         settings.update(ANDROID_SHADING_MODE, &shadingMode, 1);
542         settings.update(ANDROID_COLOR_CORRECTION_MODE, &colorMode, 1);
543         settings.update(ANDROID_TONEMAP_MODE, &tonemapMode, 1);
544         settings.update(ANDROID_EDGE_MODE, &edgeMode, 1);
545     }
546 
547     /** android.colorCorrection */
548 
549     if (hasCapability(MANUAL_POST_PROCESSING)) {
550         static const camera_metadata_rational colorTransform[9] = {
551             {1,1}, {0,1}, {0,1},
552             {0,1}, {1,1}, {0,1},
553             {0,1}, {0,1}, {1,1}
554         };
555         settings.update(ANDROID_COLOR_CORRECTION_TRANSFORM, colorTransform, 9);
556 
557         static const float colorGains[4] = {
558             1.0f, 1.0f, 1.0f, 1.0f
559         };
560         settings.update(ANDROID_COLOR_CORRECTION_GAINS, colorGains, 4);
561     }
562 
563     /** android.tonemap */
564 
565     if (hasCapability(MANUAL_POST_PROCESSING)) {
566         static const float tonemapCurve[4] = {
567             0.f, 0.f,
568             1.f, 1.f
569         };
570         settings.update(ANDROID_TONEMAP_CURVE_RED, tonemapCurve, 4);
571         settings.update(ANDROID_TONEMAP_CURVE_GREEN, tonemapCurve, 4);
572         settings.update(ANDROID_TONEMAP_CURVE_BLUE, tonemapCurve, 4);
573     }
574 
575     /** android.scaler */
576     if (hasCapability(BACKWARD_COMPATIBLE)) {
577         const int32_t cropRegion[4] = {
578             0, 0, mSensorWidth, mSensorHeight
579         };
580         settings.update(ANDROID_SCALER_CROP_REGION, cropRegion, 4);
581     }
582 
583     /** android.jpeg */
584     if (hasCapability(BACKWARD_COMPATIBLE)) {
585         static const uint8_t jpegQuality = 80;
586         settings.update(ANDROID_JPEG_QUALITY, &jpegQuality, 1);
587 
588         static const int32_t thumbnailSize[2] = {
589             320, 240
590         };
591         settings.update(ANDROID_JPEG_THUMBNAIL_SIZE, thumbnailSize, 2);
592 
593         static const uint8_t thumbnailQuality = 80;
594         settings.update(ANDROID_JPEG_THUMBNAIL_QUALITY, &thumbnailQuality, 1);
595 
596         static const double gpsCoordinates[3] = {
597             0, 0, 0
598         };
599         settings.update(ANDROID_JPEG_GPS_COORDINATES, gpsCoordinates, 3);
600 
601         static const uint8_t gpsProcessingMethod[32] = "None";
602         settings.update(ANDROID_JPEG_GPS_PROCESSING_METHOD, gpsProcessingMethod, 32);
603 
604         static const int64_t gpsTimestamp = 0;
605         settings.update(ANDROID_JPEG_GPS_TIMESTAMP, &gpsTimestamp, 1);
606 
607         static const int32_t jpegOrientation = 0;
608         settings.update(ANDROID_JPEG_ORIENTATION, &jpegOrientation, 1);
609     }
610 
611     /** android.stats */
612 
613     if (hasCapability(BACKWARD_COMPATIBLE)) {
614         static const uint8_t faceDetectMode =
615                 ANDROID_STATISTICS_FACE_DETECT_MODE_OFF;
616         settings.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &faceDetectMode, 1);
617 
618         static const uint8_t hotPixelMapMode =
619                 ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
620         settings.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
621     }
622 
623     // faceRectangles, faceScores, faceLandmarks, faceIds, histogram,
624     // sharpnessMap only in frames
625 
626     /** android.control */
627 
628     uint8_t controlIntent = 0;
629     switch (type) {
630       case CAMERA3_TEMPLATE_PREVIEW:
631         controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW;
632         break;
633       case CAMERA3_TEMPLATE_STILL_CAPTURE:
634         controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE;
635         break;
636       case CAMERA3_TEMPLATE_VIDEO_RECORD:
637         controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD;
638         break;
639       case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT:
640         controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT;
641         break;
642       case CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG:
643         controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_ZERO_SHUTTER_LAG;
644         break;
645       case CAMERA3_TEMPLATE_MANUAL:
646         controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_MANUAL;
647         break;
648       default:
649         controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM;
650         break;
651     }
652     settings.update(ANDROID_CONTROL_CAPTURE_INTENT, &controlIntent, 1);
653 
654     const uint8_t controlMode = (type == CAMERA3_TEMPLATE_MANUAL) ?
655             ANDROID_CONTROL_MODE_OFF :
656             ANDROID_CONTROL_MODE_AUTO;
657     settings.update(ANDROID_CONTROL_MODE, &controlMode, 1);
658 
659     int32_t aeTargetFpsRange[2] = {
660         15, 30
661     };
662     if (type == CAMERA3_TEMPLATE_VIDEO_RECORD || type == CAMERA3_TEMPLATE_VIDEO_SNAPSHOT) {
663         aeTargetFpsRange[0] = 30;
664     }
665     settings.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE, aeTargetFpsRange, 2);
666 
667     if (hasCapability(BACKWARD_COMPATIBLE)) {
668 
669         static const uint8_t effectMode = ANDROID_CONTROL_EFFECT_MODE_OFF;
670         settings.update(ANDROID_CONTROL_EFFECT_MODE, &effectMode, 1);
671 
672         const uint8_t sceneMode = ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY;
673         settings.update(ANDROID_CONTROL_SCENE_MODE, &sceneMode, 1);
674 
675         const uint8_t aeMode = (type == CAMERA3_TEMPLATE_MANUAL) ?
676                 ANDROID_CONTROL_AE_MODE_OFF :
677                 ANDROID_CONTROL_AE_MODE_ON;
678         settings.update(ANDROID_CONTROL_AE_MODE, &aeMode, 1);
679 
680         static const uint8_t aeLock = ANDROID_CONTROL_AE_LOCK_OFF;
681         settings.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
682 
683         static const int32_t controlRegions[5] = {
684             0, 0, 0, 0, 0
685         };
686         settings.update(ANDROID_CONTROL_AE_REGIONS, controlRegions, 5);
687 
688         static const int32_t aeExpCompensation = 0;
689         settings.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &aeExpCompensation, 1);
690 
691 
692         static const uint8_t aeAntibandingMode =
693                 ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO;
694         settings.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &aeAntibandingMode, 1);
695 
696         static const uint8_t aePrecaptureTrigger = ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_IDLE;
697         settings.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, &aePrecaptureTrigger, 1);
698 
699         const uint8_t awbMode = (type == CAMERA3_TEMPLATE_MANUAL) ?
700                 ANDROID_CONTROL_AWB_MODE_OFF :
701                 ANDROID_CONTROL_AWB_MODE_AUTO;
702         settings.update(ANDROID_CONTROL_AWB_MODE, &awbMode, 1);
703 
704         static const uint8_t awbLock = ANDROID_CONTROL_AWB_LOCK_OFF;
705         settings.update(ANDROID_CONTROL_AWB_LOCK, &awbLock, 1);
706 
707         uint8_t afMode = 0;
708 
709         {
710             switch (type) {
711                 case CAMERA3_TEMPLATE_PREVIEW:
712                     afMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
713                     break;
714                 case CAMERA3_TEMPLATE_STILL_CAPTURE:
715                     afMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
716                     break;
717                 case CAMERA3_TEMPLATE_VIDEO_RECORD:
718                     afMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
719                     break;
720                 case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT:
721                     afMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
722                     break;
723                 case CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG:
724                     afMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
725                     break;
726                 case CAMERA3_TEMPLATE_MANUAL:
727                     afMode = ANDROID_CONTROL_AF_MODE_OFF;
728                     break;
729                 default:
730                     afMode = ANDROID_CONTROL_AF_MODE_AUTO;
731                     break;
732             }
733         }
734 
735         settings.update(ANDROID_CONTROL_AF_MODE, &afMode, 1);
736 
737         settings.update(ANDROID_CONTROL_AF_REGIONS, controlRegions, 5);
738 
739         const uint8_t afTrigger = ANDROID_CONTROL_AF_TRIGGER_IDLE;
740         settings.update(ANDROID_CONTROL_AF_TRIGGER, &afTrigger, 1);
741 
742         static const uint8_t vstabMode =
743                 ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
744         settings.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &vstabMode, 1);
745 
746         static const uint8_t blackLevelLock = ANDROID_BLACK_LEVEL_LOCK_OFF;
747         settings.update(ANDROID_BLACK_LEVEL_LOCK, &blackLevelLock, 1);
748 
749         static const uint8_t lensShadingMapMode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF;
750         settings.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &lensShadingMapMode, 1);
751 
752         uint8_t aberrationMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
753         if (type == CAMERA3_TEMPLATE_STILL_CAPTURE) {
754             aberrationMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY;
755         }
756         settings.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &aberrationMode, 1);
757 
758         static const int32_t testPatternMode = ANDROID_SENSOR_TEST_PATTERN_MODE_OFF;
759         settings.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &testPatternMode, 1);
760     }
761 
762     mDefaultTemplates[type] = settings.release();
763 
764     return mDefaultTemplates[type];
765 }
766 
processCaptureRequest(camera3_capture_request * request)767 status_t EmulatedFakeCamera3::processCaptureRequest(
768         camera3_capture_request *request) {
769 
770     Mutex::Autolock l(mLock);
771     status_t res;
772 
773     /** Validation */
774 
775     if (mStatus < STATUS_READY) {
776         ALOGE("%s: Can't submit capture requests in state %d", __FUNCTION__,
777                 mStatus);
778         return INVALID_OPERATION;
779     }
780 
781     if (request == NULL) {
782         ALOGE("%s: NULL request!", __FUNCTION__);
783         return BAD_VALUE;
784     }
785 
786     uint32_t frameNumber = request->frame_number;
787 
788     if (request->settings == NULL && mPrevSettings.isEmpty()) {
789         ALOGE("%s: Request %d: NULL settings for first request after"
790                 "configureStreams()", __FUNCTION__, frameNumber);
791         return BAD_VALUE;
792     }
793 
794     if (request->input_buffer != NULL &&
795             request->input_buffer->stream != mInputStream) {
796         ALOGE("%s: Request %d: Input buffer not from input stream!",
797                 __FUNCTION__, frameNumber);
798         ALOGV("%s: Bad stream %p, expected: %p",
799               __FUNCTION__, request->input_buffer->stream,
800               mInputStream);
801         ALOGV("%s: Bad stream type %d, expected stream type %d",
802               __FUNCTION__, request->input_buffer->stream->stream_type,
803               mInputStream ? mInputStream->stream_type : -1);
804 
805         return BAD_VALUE;
806     }
807 
808     if (request->num_output_buffers < 1 || request->output_buffers == NULL) {
809         ALOGE("%s: Request %d: No output buffers provided!",
810                 __FUNCTION__, frameNumber);
811         return BAD_VALUE;
812     }
813 
814     // Validate all buffers, starting with input buffer if it's given
815 
816     ssize_t idx;
817     const camera3_stream_buffer_t *b;
818     if (request->input_buffer != NULL) {
819         idx = -1;
820         b = request->input_buffer;
821     } else {
822         idx = 0;
823         b = request->output_buffers;
824     }
825     do {
826         PrivateStreamInfo *priv =
827                 static_cast<PrivateStreamInfo*>(b->stream->priv);
828         if (priv == NULL) {
829             ALOGE("%s: Request %d: Buffer %zu: Unconfigured stream!",
830                     __FUNCTION__, frameNumber, idx);
831             return BAD_VALUE;
832         }
833         if (!priv->alive) {
834             ALOGE("%s: Request %d: Buffer %zu: Dead stream!",
835                     __FUNCTION__, frameNumber, idx);
836             return BAD_VALUE;
837         }
838         if (b->status != CAMERA3_BUFFER_STATUS_OK) {
839             ALOGE("%s: Request %d: Buffer %zu: Status not OK!",
840                     __FUNCTION__, frameNumber, idx);
841             return BAD_VALUE;
842         }
843         if (b->release_fence != -1) {
844             ALOGE("%s: Request %d: Buffer %zu: Has a release fence!",
845                     __FUNCTION__, frameNumber, idx);
846             return BAD_VALUE;
847         }
848         if (b->buffer == NULL) {
849             ALOGE("%s: Request %d: Buffer %zu: NULL buffer handle!",
850                     __FUNCTION__, frameNumber, idx);
851             return BAD_VALUE;
852         }
853         idx++;
854         b = &(request->output_buffers[idx]);
855     } while (idx < (ssize_t)request->num_output_buffers);
856 
857     // TODO: Validate settings parameters
858 
859     /**
860      * Start processing this request
861      */
862 
863     mStatus = STATUS_ACTIVE;
864 
865     CameraMetadata settings;
866 
867     if (request->settings == NULL) {
868         settings.acquire(mPrevSettings);
869     } else {
870         settings = request->settings;
871     }
872 
873     res = process3A(settings);
874     if (res != OK) {
875         return res;
876     }
877 
878     // TODO: Handle reprocessing
879 
880     /**
881      * Get ready for sensor config
882      */
883 
884     nsecs_t  exposureTime;
885     nsecs_t  frameDuration;
886     uint32_t sensitivity;
887     bool     needJpeg = false;
888     camera_metadata_entry_t entry;
889     entry = settings.find(ANDROID_SENSOR_EXPOSURE_TIME);
890     exposureTime = (entry.count > 0) ? entry.data.i64[0] : Sensor::kExposureTimeRange[0];
891     entry = settings.find(ANDROID_SENSOR_FRAME_DURATION);
892     frameDuration = (entry.count > 0)? entry.data.i64[0] : Sensor::kFrameDurationRange[0];
893     entry = settings.find(ANDROID_SENSOR_SENSITIVITY);
894     sensitivity = (entry.count > 0) ? entry.data.i32[0] : Sensor::kSensitivityRange[0];
895 
896     if (exposureTime > frameDuration) {
897         frameDuration = exposureTime + Sensor::kMinVerticalBlank;
898         settings.update(ANDROID_SENSOR_FRAME_DURATION, &frameDuration, 1);
899     }
900 
901     Buffers *sensorBuffers = new Buffers();
902     HalBufferVector *buffers = new HalBufferVector();
903 
904     sensorBuffers->setCapacity(request->num_output_buffers);
905     buffers->setCapacity(request->num_output_buffers);
906 
907     // Process all the buffers we got for output, constructing internal buffer
908     // structures for them, and lock them for writing.
909     for (size_t i = 0; i < request->num_output_buffers; i++) {
910         const camera3_stream_buffer &srcBuf = request->output_buffers[i];
911         StreamBuffer destBuf;
912         destBuf.streamId = kGenericStreamId;
913         destBuf.width    = srcBuf.stream->width;
914         destBuf.height   = srcBuf.stream->height;
915         // inline with goldfish gralloc
916         if (srcBuf.stream->format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED) {
917             if (srcBuf.stream->usage & GRALLOC_USAGE_HW_CAMERA_WRITE) {
918                 if (srcBuf.stream->usage & GRALLOC_USAGE_HW_TEXTURE) {
919                     destBuf.format = HAL_PIXEL_FORMAT_RGBA_8888;
920                 }
921                 else if (srcBuf.stream->usage & GRALLOC_USAGE_HW_VIDEO_ENCODER) {
922                     destBuf.format = HAL_PIXEL_FORMAT_YCbCr_420_888;
923                 }
924                 else if ((srcBuf.stream->usage & GRALLOC_USAGE_HW_CAMERA_MASK)
925                          == GRALLOC_USAGE_HW_CAMERA_ZSL) {
926                     destBuf.format = HAL_PIXEL_FORMAT_RGB_888;
927                 }
928             }
929         }
930         else {
931             destBuf.format = srcBuf.stream->format;
932         }
933         destBuf.stride   = srcBuf.stream->width;
934         destBuf.dataSpace = srcBuf.stream->data_space;
935         destBuf.buffer   = srcBuf.buffer;
936 
937         if (destBuf.format == HAL_PIXEL_FORMAT_BLOB) {
938             needJpeg = true;
939         }
940 
941         // Wait on fence
942         sp<Fence> bufferAcquireFence = new Fence(srcBuf.acquire_fence);
943         res = bufferAcquireFence->wait(kFenceTimeoutMs);
944         if (res == TIMED_OUT) {
945             ALOGE("%s: Request %d: Buffer %zu: Fence timed out after %d ms",
946                     __FUNCTION__, frameNumber, i, kFenceTimeoutMs);
947         }
948         if (res == OK) {
949             // Lock buffer for writing
950             if (srcBuf.stream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
951                 if (destBuf.format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
952                     android_ycbcr ycbcr = {};
953                     res = mGBM->lockYCbCr(
954                         *(destBuf.buffer),
955                         GRALLOC_USAGE_HW_CAMERA_WRITE,
956                         Rect(0, 0, destBuf.width, destBuf.height),
957                         &ycbcr);
958                     // This is only valid because we know that emulator's
959                     // YCbCr_420_888 is really contiguous NV21 under the hood
960                     destBuf.img = static_cast<uint8_t*>(ycbcr.y);
961                 } else {
962                     ALOGE("Unexpected private format for flexible YUV: 0x%x",
963                             destBuf.format);
964                     res = INVALID_OPERATION;
965                 }
966             } else {
967                 res = mGBM->lock(
968                     *(destBuf.buffer),
969                     GRALLOC_USAGE_HW_CAMERA_WRITE,
970                     Rect(0, 0, destBuf.width, destBuf.height),
971                     (void**)&(destBuf.img));
972 
973             }
974             if (res != OK) {
975                 ALOGE("%s: Request %d: Buffer %zu: Unable to lock buffer",
976                         __FUNCTION__, frameNumber, i);
977             } else {
978                 ALOGV("%s, stream format 0x%x width %d height %d buffer 0x%p img 0x%p",
979                   __FUNCTION__, destBuf.format, destBuf.width, destBuf.height,
980                   destBuf.buffer, destBuf.img);
981             }
982         }
983 
984         if (res != OK) {
985             // Either waiting or locking failed. Unlock locked buffers and bail
986             // out.
987             for (size_t j = 0; j < i; j++) {
988                 mGBM->unlock(*(request->output_buffers[i].buffer));
989             }
990             delete sensorBuffers;
991             delete buffers;
992             return NO_INIT;
993         }
994 
995         sensorBuffers->push_back(destBuf);
996         buffers->push_back(srcBuf);
997     }
998 
999     /**
1000      * Wait for JPEG compressor to not be busy, if needed
1001      */
1002     if (needJpeg) {
1003         bool ready = mJpegCompressor->waitForDone(kJpegTimeoutNs);
1004         if (!ready) {
1005             ALOGE("%s: Timeout waiting for JPEG compression to complete!",
1006                     __FUNCTION__);
1007             return NO_INIT;
1008         }
1009         res = mJpegCompressor->reserve();
1010         if (res != OK) {
1011             ALOGE("%s: Error managing JPEG compressor resources, can't reserve it!", __FUNCTION__);
1012             return NO_INIT;
1013         }
1014     }
1015 
1016     /**
1017      * Wait until the in-flight queue has room
1018      */
1019     res = mReadoutThread->waitForReadout();
1020     if (res != OK) {
1021         ALOGE("%s: Timeout waiting for previous requests to complete!",
1022                 __FUNCTION__);
1023         return NO_INIT;
1024     }
1025 
1026     /**
1027      * Wait until sensor's ready. This waits for lengthy amounts of time with
1028      * mLock held, but the interface spec is that no other calls may by done to
1029      * the HAL by the framework while process_capture_request is happening.
1030      */
1031     int syncTimeoutCount = 0;
1032     while(!mSensor->waitForVSync(kSyncWaitTimeout)) {
1033         if (mStatus == STATUS_ERROR) {
1034             return NO_INIT;
1035         }
1036         if (syncTimeoutCount == kMaxSyncTimeoutCount) {
1037             ALOGE("%s: Request %d: Sensor sync timed out after %" PRId64 " ms",
1038                     __FUNCTION__, frameNumber,
1039                     kSyncWaitTimeout * kMaxSyncTimeoutCount / 1000000);
1040             return NO_INIT;
1041         }
1042         syncTimeoutCount++;
1043     }
1044 
1045     /**
1046      * Configure sensor and queue up the request to the readout thread
1047      */
1048     mSensor->setExposureTime(exposureTime);
1049     mSensor->setFrameDuration(frameDuration);
1050     mSensor->setSensitivity(sensitivity);
1051     mSensor->setDestinationBuffers(sensorBuffers);
1052     mSensor->setFrameNumber(request->frame_number);
1053 
1054     ReadoutThread::Request r;
1055     r.frameNumber = request->frame_number;
1056     r.settings = settings;
1057     r.sensorBuffers = sensorBuffers;
1058     r.buffers = buffers;
1059 
1060     mReadoutThread->queueCaptureRequest(r);
1061     ALOGVV("%s: Queued frame %d", __FUNCTION__, request->frame_number);
1062 
1063     // Cache the settings for next time
1064     mPrevSettings.acquire(settings);
1065 
1066     return OK;
1067 }
1068 
flush()1069 status_t EmulatedFakeCamera3::flush() {
1070     ALOGW("%s: Not implemented; ignored", __FUNCTION__);
1071     return OK;
1072 }
1073 
1074 /** Debug methods */
1075 
dump(int fd)1076 void EmulatedFakeCamera3::dump(int fd) {
1077 
1078 }
1079 
1080 /**
1081  * Private methods
1082  */
1083 
getCameraCapabilities()1084 status_t EmulatedFakeCamera3::getCameraCapabilities() {
1085 
1086     const char *key = mFacingBack ? "qemu.sf.back_camera_caps" : "qemu.sf.front_camera_caps";
1087 
1088     /* Defined by 'qemu.sf.*_camera_caps' boot property: if the
1089      * property doesn't exist, it is assumed to list FULL. */
1090     char prop[PROPERTY_VALUE_MAX];
1091     if (property_get(key, prop, NULL) > 0) {
1092         char *saveptr = nullptr;
1093         char *cap = strtok_r(prop, " ,", &saveptr);
1094         while (cap != NULL) {
1095             for (int i = 0; i < NUM_CAPABILITIES; i++) {
1096                 if (!strcasecmp(cap, sAvailableCapabilitiesStrings[i])) {
1097                     mCapabilities.add(static_cast<AvailableCapabilities>(i));
1098                     break;
1099                 }
1100             }
1101             cap = strtok_r(NULL, " ,", &saveptr);
1102         }
1103         if (mCapabilities.size() == 0) {
1104             ALOGE("qemu.sf.back_camera_caps had no valid capabilities: %s", prop);
1105         }
1106     }
1107     // Default to FULL_LEVEL plus RAW if nothing is defined
1108     if (mCapabilities.size() == 0) {
1109         mCapabilities.add(FULL_LEVEL);
1110         // "RAW" causes several CTS failures: b/68723953, disable it so far.
1111         // TODO: add "RAW" back when all failures are resolved.
1112         //mCapabilities.add(RAW);
1113         mCapabilities.add(MOTION_TRACKING);
1114     }
1115 
1116     // Add level-based caps
1117     if (hasCapability(FULL_LEVEL)) {
1118         mCapabilities.add(BURST_CAPTURE);
1119         mCapabilities.add(READ_SENSOR_SETTINGS);
1120         mCapabilities.add(MANUAL_SENSOR);
1121         mCapabilities.add(MANUAL_POST_PROCESSING);
1122     };
1123 
1124     // Backwards-compatible is required for most other caps
1125     // Not required for DEPTH_OUTPUT, though.
1126     if (hasCapability(BURST_CAPTURE) ||
1127             hasCapability(READ_SENSOR_SETTINGS) ||
1128             hasCapability(RAW) ||
1129             hasCapability(MANUAL_SENSOR) ||
1130             hasCapability(MANUAL_POST_PROCESSING) ||
1131             hasCapability(PRIVATE_REPROCESSING) ||
1132             hasCapability(YUV_REPROCESSING) ||
1133             hasCapability(CONSTRAINED_HIGH_SPEED_VIDEO)) {
1134         mCapabilities.add(BACKWARD_COMPATIBLE);
1135     }
1136 
1137     ALOGI("Camera %d capabilities:", mCameraID);
1138     for (size_t i = 0; i < mCapabilities.size(); i++) {
1139         ALOGI("  %s", sAvailableCapabilitiesStrings[mCapabilities[i]]);
1140     }
1141 
1142     return OK;
1143 }
1144 
hasCapability(AvailableCapabilities cap)1145 bool EmulatedFakeCamera3::hasCapability(AvailableCapabilities cap) {
1146     ssize_t idx = mCapabilities.indexOf(cap);
1147     return idx >= 0;
1148 }
1149 
constructStaticInfo()1150 status_t EmulatedFakeCamera3::constructStaticInfo() {
1151 
1152     CameraMetadata info;
1153     Vector<int32_t> availableCharacteristicsKeys;
1154     status_t res;
1155 
1156     // Find max width/height
1157     int32_t width = 0, height = 0;
1158     size_t rawSizeCount = sizeof(kAvailableRawSizes)/sizeof(kAvailableRawSizes[0]);
1159     for (size_t index = 0; index + 1 < rawSizeCount; index += 2) {
1160         if (width <= (int32_t)kAvailableRawSizes[index] &&
1161             height <= (int32_t)kAvailableRawSizes[index+1]) {
1162             width = kAvailableRawSizes[index];
1163             height = kAvailableRawSizes[index+1];
1164         }
1165     }
1166 
1167     if (width < 640 || height < 480) {
1168         width = 640;
1169         height = 480;
1170     }
1171     mSensorWidth = width;
1172     mSensorHeight = height;
1173 
1174 #define ADD_STATIC_ENTRY(name, varptr, count) \
1175         availableCharacteristicsKeys.add(name);   \
1176         res = info.update(name, varptr, count); \
1177         if (res != OK) return res
1178 
1179     // android.sensor
1180 
1181     if (hasCapability(MANUAL_SENSOR)) {
1182 
1183         ADD_STATIC_ENTRY(ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE,
1184                 Sensor::kExposureTimeRange, 2);
1185 
1186         ADD_STATIC_ENTRY(ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
1187                 &Sensor::kFrameDurationRange[1], 1);
1188 
1189         ADD_STATIC_ENTRY(ANDROID_SENSOR_INFO_SENSITIVITY_RANGE,
1190                 Sensor::kSensitivityRange,
1191                 sizeof(Sensor::kSensitivityRange)
1192                 /sizeof(int32_t));
1193 
1194         ADD_STATIC_ENTRY(ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
1195                 &Sensor::kSensitivityRange[1], 1);
1196     }
1197 
1198     static const uint8_t sensorColorFilterArrangement =
1199         ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_RGGB;
1200     ADD_STATIC_ENTRY(ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
1201             &sensorColorFilterArrangement, 1);
1202 
1203     static const float sensorPhysicalSize[2] = {3.20f, 2.40f}; // mm
1204     ADD_STATIC_ENTRY(ANDROID_SENSOR_INFO_PHYSICAL_SIZE,
1205             sensorPhysicalSize, 2);
1206 
1207     const int32_t pixelArray[] = {mSensorWidth, mSensorHeight};
1208     ADD_STATIC_ENTRY(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
1209             pixelArray, 2);
1210     const int32_t activeArray[] = {0, 0, mSensorWidth, mSensorHeight};
1211     ADD_STATIC_ENTRY(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
1212             activeArray, 4);
1213 
1214     static const int32_t orientation = 90; // Aligned with 'long edge'
1215     ADD_STATIC_ENTRY(ANDROID_SENSOR_ORIENTATION, &orientation, 1);
1216 
1217     static const uint8_t timestampSource = ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_REALTIME;
1218     ADD_STATIC_ENTRY(ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE, &timestampSource, 1);
1219 
1220     if (hasCapability(RAW) || hasCapability(MANUAL_SENSOR)) {
1221         ADD_STATIC_ENTRY(ANDROID_SENSOR_INFO_WHITE_LEVEL,
1222                 (int32_t*)&Sensor::kMaxRawValue, 1);
1223 
1224         static const int32_t blackLevelPattern[4] = {
1225             (int32_t)Sensor::kBlackLevel, (int32_t)Sensor::kBlackLevel,
1226             (int32_t)Sensor::kBlackLevel, (int32_t)Sensor::kBlackLevel
1227         };
1228         ADD_STATIC_ENTRY(ANDROID_SENSOR_BLACK_LEVEL_PATTERN,
1229                 blackLevelPattern, sizeof(blackLevelPattern)/sizeof(int32_t));
1230     }
1231 
1232     if (hasCapability(RAW)) {
1233         ADD_STATIC_ENTRY(ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
1234                 &Sensor::kColorFilterArrangement, 1);
1235     }
1236 
1237     if (hasCapability(BACKWARD_COMPATIBLE)) {
1238         static const int32_t availableTestPatternModes[] = {
1239             ANDROID_SENSOR_TEST_PATTERN_MODE_OFF
1240         };
1241         ADD_STATIC_ENTRY(ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
1242                 availableTestPatternModes, sizeof(availableTestPatternModes)/sizeof(int32_t));
1243     }
1244 
1245     // android.lens
1246     static const float focalLengths = 5.0f;
1247     ADD_STATIC_ENTRY(ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
1248             &focalLengths, 1);
1249 
1250     if (hasCapability(BACKWARD_COMPATIBLE)) {
1251         // 5 cm min focus distance for back camera, infinity (fixed focus) for front
1252         const float minFocusDistance = 1.0/0.05;
1253         ADD_STATIC_ENTRY(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
1254                 &minFocusDistance, 1);
1255 
1256         // 5 m hyperfocal distance for back camera, infinity (fixed focus) for front
1257         const float hyperFocalDistance = 1.0/5.0;
1258         ADD_STATIC_ENTRY(ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE,
1259                 &hyperFocalDistance, 1);
1260 
1261         static const float apertures = 2.8f;
1262         ADD_STATIC_ENTRY(ANDROID_LENS_INFO_AVAILABLE_APERTURES,
1263                 &apertures, 1);
1264         static const float filterDensities = 0;
1265         ADD_STATIC_ENTRY(ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
1266                 &filterDensities, 1);
1267         static const uint8_t availableOpticalStabilization =
1268                 ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
1269         ADD_STATIC_ENTRY(ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
1270                 &availableOpticalStabilization, 1);
1271 
1272         static const int32_t lensShadingMapSize[] = {1, 1};
1273         ADD_STATIC_ENTRY(ANDROID_LENS_INFO_SHADING_MAP_SIZE, lensShadingMapSize,
1274                 sizeof(lensShadingMapSize)/sizeof(int32_t));
1275 
1276         static const uint8_t lensFocusCalibration =
1277                 ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_APPROXIMATE;
1278         ADD_STATIC_ENTRY(ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION, &lensFocusCalibration, 1);
1279     }
1280 
1281     if (hasCapability(DEPTH_OUTPUT)) {
1282         // These could be included for non-DEPTH capability as well, but making this variable for
1283         // testing coverage
1284 
1285         // 90 degree rotation to align with long edge of a phone device that's by default portrait
1286         static const float qO[] = { 0.707107f, 0.f, 0.f, 0.707107f};
1287 
1288         const float qF[] = {0, 1.f, 0, 0.f};
1289 
1290         // Quarternion product, orientation change then facing
1291         const float lensPoseRotation[] = {qO[0]*qF[0] - qO[1]*qF[1] - qO[2]*qF[2] - qO[3]*qF[3],
1292                                           qO[0]*qF[1] + qO[1]*qF[0] + qO[2]*qF[3] - qO[3]*qF[2],
1293                                           qO[0]*qF[2] + qO[2]*qF[0] + qO[1]*qF[3] - qO[3]*qF[1],
1294                                           qO[0]*qF[3] + qO[3]*qF[0] + qO[1]*qF[2] - qO[2]*qF[1]};
1295 
1296         ADD_STATIC_ENTRY(ANDROID_LENS_POSE_ROTATION, lensPoseRotation,
1297                 sizeof(lensPoseRotation)/sizeof(float));
1298 
1299         // Only one camera facing each way, so 0 translation needed to the center of the 'main'
1300         // camera
1301         static const float lensPoseTranslation[] = {0.f, 0.f, 0.f};
1302 
1303         ADD_STATIC_ENTRY(ANDROID_LENS_POSE_TRANSLATION, lensPoseTranslation,
1304                 sizeof(lensPoseTranslation)/sizeof(float));
1305 
1306         // Intrinsics are 'ideal' (f_x, f_y, c_x, c_y, s) match focal length and active array size
1307         float f_x = focalLengths * mSensorWidth / sensorPhysicalSize[0];
1308         float f_y = focalLengths * mSensorHeight / sensorPhysicalSize[1];
1309         float c_x = mSensorWidth / 2.f;
1310         float c_y = mSensorHeight / 2.f;
1311         float s = 0.f;
1312         const float lensIntrinsics[] = { f_x, f_y, c_x, c_y, s };
1313 
1314         ADD_STATIC_ENTRY(ANDROID_LENS_INTRINSIC_CALIBRATION, lensIntrinsics,
1315                 sizeof(lensIntrinsics)/sizeof(float));
1316 
1317         // No radial or tangential distortion
1318 
1319         float lensRadialDistortion[] = {1.0f, 0.f, 0.f, 0.f, 0.f, 0.f};
1320 
1321         ADD_STATIC_ENTRY(ANDROID_LENS_RADIAL_DISTORTION, lensRadialDistortion,
1322                 sizeof(lensRadialDistortion)/sizeof(float));
1323 
1324     }
1325 
1326 
1327     const uint8_t lensFacing = mFacingBack ?
1328             ANDROID_LENS_FACING_BACK : ANDROID_LENS_FACING_FRONT;
1329     ADD_STATIC_ENTRY(ANDROID_LENS_FACING, &lensFacing, 1);
1330 
1331     // android.flash
1332 
1333     static const uint8_t flashAvailable = 0;
1334     ADD_STATIC_ENTRY(ANDROID_FLASH_INFO_AVAILABLE, &flashAvailable, 1);
1335 
1336     // android.hotPixel
1337 
1338     if (hasCapability(MANUAL_POST_PROCESSING)) {
1339         static const uint8_t availableHotPixelModes[] = {
1340             ANDROID_HOT_PIXEL_MODE_FAST, ANDROID_HOT_PIXEL_MODE_HIGH_QUALITY
1341         };
1342         ADD_STATIC_ENTRY(ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
1343                 availableHotPixelModes, sizeof(availableHotPixelModes));
1344     }
1345 
1346     // android.tonemap
1347 
1348     if (hasCapability(MANUAL_POST_PROCESSING)) {
1349         static const int32_t tonemapCurvePoints = 128;
1350         ADD_STATIC_ENTRY(ANDROID_TONEMAP_MAX_CURVE_POINTS, &tonemapCurvePoints, 1);
1351 
1352         static const uint8_t availableToneMapModes[] = {
1353             ANDROID_TONEMAP_MODE_CONTRAST_CURVE,  ANDROID_TONEMAP_MODE_FAST,
1354             ANDROID_TONEMAP_MODE_HIGH_QUALITY
1355         };
1356         ADD_STATIC_ENTRY(ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES, availableToneMapModes,
1357                 sizeof(availableToneMapModes));
1358     }
1359 
1360     // android.scaler
1361 
1362     const std::vector<int32_t> availableStreamConfigurationsBasic = {
1363         HAL_PIXEL_FORMAT_BLOB, width, height, ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT,
1364         HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, 1280, 720, ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT,
1365         HAL_PIXEL_FORMAT_YCbCr_420_888, 1280, 720, ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT,
1366         HAL_PIXEL_FORMAT_BLOB, 1280, 720, ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT,
1367         HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, 640, 480, ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT,
1368         HAL_PIXEL_FORMAT_YCbCr_420_888, 640, 480, ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT,
1369         HAL_PIXEL_FORMAT_BLOB, 640, 480, ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT,
1370         HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, 320, 240, ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT,
1371         HAL_PIXEL_FORMAT_YCbCr_420_888, 320, 240, ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT,
1372         HAL_PIXEL_FORMAT_BLOB, 320, 240, ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT,
1373         HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, 176, 144, ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT,
1374         HAL_PIXEL_FORMAT_YCbCr_420_888, 176, 144, ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT,
1375         HAL_PIXEL_FORMAT_BLOB, 176, 144, ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT,
1376     };
1377 
1378     const std::vector<int32_t> availableStreamConfigurationsRaw = {
1379         HAL_PIXEL_FORMAT_RAW16, width, height, ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT,
1380     };
1381 
1382     const std::vector<int32_t> availableStreamConfigurationsBurst = {
1383         HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, width, height, ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT,
1384         HAL_PIXEL_FORMAT_YCbCr_420_888, width, height, ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT,
1385         HAL_PIXEL_FORMAT_RGBA_8888, width, height, ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT,
1386     };
1387 
1388     std::vector<int32_t> availableStreamConfigurations;
1389 
1390     if (hasCapability(BACKWARD_COMPATIBLE)) {
1391         availableStreamConfigurations.insert(availableStreamConfigurations.end(),
1392                 availableStreamConfigurationsBasic.begin(),
1393                 availableStreamConfigurationsBasic.end());
1394     }
1395     if (hasCapability(RAW)) {
1396         availableStreamConfigurations.insert(availableStreamConfigurations.end(),
1397                 availableStreamConfigurationsRaw.begin(),
1398                 availableStreamConfigurationsRaw.end());
1399     }
1400     if (hasCapability(BURST_CAPTURE)) {
1401         availableStreamConfigurations.insert(availableStreamConfigurations.end(),
1402                 availableStreamConfigurationsBurst.begin(),
1403                 availableStreamConfigurationsBurst.end());
1404     }
1405 
1406     if (availableStreamConfigurations.size() > 0) {
1407         ADD_STATIC_ENTRY(ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
1408                 &availableStreamConfigurations[0],
1409                 availableStreamConfigurations.size());
1410     }
1411 
1412     const std::vector<int64_t> availableMinFrameDurationsBasic = {
1413         HAL_PIXEL_FORMAT_BLOB, width, height, Sensor::kFrameDurationRange[0],
1414         HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, 1280, 720, Sensor::kFrameDurationRange[0],
1415         HAL_PIXEL_FORMAT_YCbCr_420_888, 1280, 720, Sensor::kFrameDurationRange[0],
1416         HAL_PIXEL_FORMAT_BLOB, 1280, 720, Sensor::kFrameDurationRange[0],
1417         HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, 640, 480, Sensor::kFrameDurationRange[0],
1418         HAL_PIXEL_FORMAT_YCbCr_420_888, 640, 480, Sensor::kFrameDurationRange[0],
1419         HAL_PIXEL_FORMAT_BLOB, 640, 480, Sensor::kFrameDurationRange[0],
1420         HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, 320, 240, Sensor::kFrameDurationRange[0],
1421         HAL_PIXEL_FORMAT_YCbCr_420_888, 320, 240, Sensor::kFrameDurationRange[0],
1422         HAL_PIXEL_FORMAT_BLOB, 320, 240, Sensor::kFrameDurationRange[0],
1423         HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, 176, 144, Sensor::kFrameDurationRange[0],
1424         HAL_PIXEL_FORMAT_YCbCr_420_888, 176, 144, Sensor::kFrameDurationRange[0],
1425         HAL_PIXEL_FORMAT_BLOB, 176, 144, Sensor::kFrameDurationRange[0],
1426     };
1427 
1428     const std::vector<int64_t> availableMinFrameDurationsRaw = {
1429         HAL_PIXEL_FORMAT_RAW16, width, height, Sensor::kFrameDurationRange[0],
1430     };
1431 
1432     const std::vector<int64_t> availableMinFrameDurationsBurst = {
1433         HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, width, height, Sensor::kFrameDurationRange[0],
1434         HAL_PIXEL_FORMAT_YCbCr_420_888, width, height, Sensor::kFrameDurationRange[0],
1435         HAL_PIXEL_FORMAT_RGBA_8888, width, height, Sensor::kFrameDurationRange[0],
1436     };
1437 
1438     std::vector<int64_t> availableMinFrameDurations;
1439 
1440     if (hasCapability(BACKWARD_COMPATIBLE)) {
1441         availableMinFrameDurations.insert(availableMinFrameDurations.end(),
1442                 availableMinFrameDurationsBasic.begin(),
1443                 availableMinFrameDurationsBasic.end());
1444     }
1445     if (hasCapability(RAW)) {
1446         availableMinFrameDurations.insert(availableMinFrameDurations.end(),
1447                 availableMinFrameDurationsRaw.begin(),
1448                 availableMinFrameDurationsRaw.end());
1449     }
1450     if (hasCapability(BURST_CAPTURE)) {
1451         availableMinFrameDurations.insert(availableMinFrameDurations.end(),
1452                 availableMinFrameDurationsBurst.begin(),
1453                 availableMinFrameDurationsBurst.end());
1454     }
1455 
1456     if (availableMinFrameDurations.size() > 0) {
1457         ADD_STATIC_ENTRY(ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS,
1458                 &availableMinFrameDurations[0],
1459                 availableMinFrameDurations.size());
1460     }
1461 
1462     const std::vector<int64_t> availableStallDurationsBasic = {
1463         HAL_PIXEL_FORMAT_BLOB, width, height, Sensor::kFrameDurationRange[0],
1464         HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, 1280, 720, 0,
1465         HAL_PIXEL_FORMAT_YCbCr_420_888, 1280, 720, 0,
1466         HAL_PIXEL_FORMAT_BLOB, 1280, 720, Sensor::kFrameDurationRange[0],
1467         HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, 640, 480, 0,
1468         HAL_PIXEL_FORMAT_YCbCr_420_888, 640, 480, 0,
1469         HAL_PIXEL_FORMAT_BLOB, 640, 480, Sensor::kFrameDurationRange[0],
1470         HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, 320, 240, 0,
1471         HAL_PIXEL_FORMAT_YCbCr_420_888, 320, 240, 0,
1472         HAL_PIXEL_FORMAT_RGBA_8888, 320, 240, 0,
1473         HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, 176, 144, 0,
1474         HAL_PIXEL_FORMAT_YCbCr_420_888, 176, 144, 0,
1475         HAL_PIXEL_FORMAT_RGBA_8888, 176, 144, 0,
1476     };
1477 
1478     const std::vector<int64_t> availableStallDurationsRaw = {
1479         HAL_PIXEL_FORMAT_RAW16, width, height, Sensor::kFrameDurationRange[0]
1480     };
1481     const std::vector<int64_t> availableStallDurationsBurst = {
1482         HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, width, height, 0,
1483         HAL_PIXEL_FORMAT_YCbCr_420_888, width, height, 0,
1484         HAL_PIXEL_FORMAT_RGBA_8888, width, height, 0
1485     };
1486 
1487     std::vector<int64_t> availableStallDurations;
1488 
1489     if (hasCapability(BACKWARD_COMPATIBLE)) {
1490         availableStallDurations.insert(availableStallDurations.end(),
1491                 availableStallDurationsBasic.begin(),
1492                 availableStallDurationsBasic.end());
1493     }
1494     if (hasCapability(RAW)) {
1495         availableStallDurations.insert(availableStallDurations.end(),
1496                 availableStallDurationsRaw.begin(),
1497                 availableStallDurationsRaw.end());
1498     }
1499     if (hasCapability(BURST_CAPTURE)) {
1500         availableStallDurations.insert(availableStallDurations.end(),
1501                 availableStallDurationsBurst.begin(),
1502                 availableStallDurationsBurst.end());
1503     }
1504 
1505     if (availableStallDurations.size() > 0) {
1506         ADD_STATIC_ENTRY(ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,
1507                 &availableStallDurations[0],
1508                 availableStallDurations.size());
1509     }
1510 
1511     if (hasCapability(BACKWARD_COMPATIBLE)) {
1512         static const uint8_t croppingType = ANDROID_SCALER_CROPPING_TYPE_FREEFORM;
1513         ADD_STATIC_ENTRY(ANDROID_SCALER_CROPPING_TYPE,
1514                 &croppingType, 1);
1515 
1516         static const float maxZoom = 10;
1517         ADD_STATIC_ENTRY(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
1518                 &maxZoom, 1);
1519     }
1520 
1521     // android.jpeg
1522 
1523     if (hasCapability(BACKWARD_COMPATIBLE)) {
1524         static const int32_t jpegThumbnailSizes[] = {
1525             0, 0,
1526             160, 120,
1527             320, 180,
1528             320, 240
1529         };
1530         ADD_STATIC_ENTRY(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
1531                 jpegThumbnailSizes, sizeof(jpegThumbnailSizes)/sizeof(int32_t));
1532 
1533         static const int32_t jpegMaxSize = JpegCompressor::kMaxJpegSize;
1534         ADD_STATIC_ENTRY(ANDROID_JPEG_MAX_SIZE, &jpegMaxSize, 1);
1535     }
1536 
1537     // android.stats
1538 
1539     if (hasCapability(BACKWARD_COMPATIBLE)) {
1540         static const uint8_t availableFaceDetectModes[] = {
1541             ANDROID_STATISTICS_FACE_DETECT_MODE_OFF,
1542             ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE,
1543             ANDROID_STATISTICS_FACE_DETECT_MODE_FULL
1544         };
1545         ADD_STATIC_ENTRY(ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
1546                 availableFaceDetectModes,
1547                 sizeof(availableFaceDetectModes));
1548 
1549         static const int32_t maxFaceCount = 8;
1550         ADD_STATIC_ENTRY(ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
1551                 &maxFaceCount, 1);
1552 
1553 
1554         static const uint8_t availableShadingMapModes[] = {
1555             ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF
1556         };
1557         ADD_STATIC_ENTRY(ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,
1558                 availableShadingMapModes, sizeof(availableShadingMapModes));
1559     }
1560 
1561     // android.sync
1562 
1563     static const int32_t maxLatency =
1564             hasCapability(FULL_LEVEL) ? ANDROID_SYNC_MAX_LATENCY_PER_FRAME_CONTROL : 3;
1565     ADD_STATIC_ENTRY(ANDROID_SYNC_MAX_LATENCY, &maxLatency, 1);
1566 
1567     // android.control
1568 
1569     if (hasCapability(BACKWARD_COMPATIBLE)) {
1570         const uint8_t availableControlModes[] = {
1571             ANDROID_CONTROL_MODE_OFF, ANDROID_CONTROL_MODE_AUTO, ANDROID_CONTROL_MODE_USE_SCENE_MODE
1572         };
1573         ADD_STATIC_ENTRY(ANDROID_CONTROL_AVAILABLE_MODES,
1574                 availableControlModes, sizeof(availableControlModes));
1575     } else {
1576         const uint8_t availableControlModes[] = {
1577             ANDROID_CONTROL_MODE_AUTO
1578         };
1579         ADD_STATIC_ENTRY(ANDROID_CONTROL_AVAILABLE_MODES,
1580                 availableControlModes, sizeof(availableControlModes));
1581     }
1582 
1583     const uint8_t availableSceneModes[] = {
1584         hasCapability(BACKWARD_COMPATIBLE) ?
1585             ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY :
1586             ANDROID_CONTROL_SCENE_MODE_DISABLED
1587     };
1588     ADD_STATIC_ENTRY(ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
1589             availableSceneModes, sizeof(availableSceneModes));
1590 
1591     if (hasCapability(BACKWARD_COMPATIBLE)) {
1592         static const uint8_t availableEffects[] = {
1593             ANDROID_CONTROL_EFFECT_MODE_OFF
1594         };
1595         ADD_STATIC_ENTRY(ANDROID_CONTROL_AVAILABLE_EFFECTS,
1596                 availableEffects, sizeof(availableEffects));
1597     }
1598 
1599     if (hasCapability(BACKWARD_COMPATIBLE)) {
1600         static const int32_t max3aRegions[] = {/*AE*/ 1,/*AWB*/ 0,/*AF*/ 1};
1601         ADD_STATIC_ENTRY(ANDROID_CONTROL_MAX_REGIONS,
1602                 max3aRegions, sizeof(max3aRegions)/sizeof(max3aRegions[0]));
1603 
1604         static const uint8_t availableAeModes[] = {
1605             ANDROID_CONTROL_AE_MODE_OFF,
1606             ANDROID_CONTROL_AE_MODE_ON
1607         };
1608         ADD_STATIC_ENTRY(ANDROID_CONTROL_AE_AVAILABLE_MODES,
1609                 availableAeModes, sizeof(availableAeModes));
1610 
1611         static const camera_metadata_rational exposureCompensationStep = {
1612             0, 3
1613         };
1614         ADD_STATIC_ENTRY(ANDROID_CONTROL_AE_COMPENSATION_STEP,
1615                 &exposureCompensationStep, 1);
1616 
1617         static const int32_t exposureCompensationRange[] = {0, 0};
1618         ADD_STATIC_ENTRY(ANDROID_CONTROL_AE_COMPENSATION_RANGE,
1619                 exposureCompensationRange,
1620                 sizeof(exposureCompensationRange)/sizeof(int32_t));
1621     }
1622 
1623     static const int32_t availableTargetFpsRanges[] = {
1624         15, 30, 30, 30
1625     };
1626     ADD_STATIC_ENTRY(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
1627             availableTargetFpsRanges,
1628             sizeof(availableTargetFpsRanges)/sizeof(int32_t));
1629 
1630     if (hasCapability(BACKWARD_COMPATIBLE)) {
1631         static const uint8_t availableAntibandingModes[] = {
1632             ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF,
1633             ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO
1634         };
1635         ADD_STATIC_ENTRY(ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
1636                 availableAntibandingModes, sizeof(availableAntibandingModes));
1637     }
1638 
1639     const uint8_t aeLockAvailable = hasCapability(BACKWARD_COMPATIBLE) ?
1640             ANDROID_CONTROL_AE_LOCK_AVAILABLE_TRUE : ANDROID_CONTROL_AE_LOCK_AVAILABLE_FALSE;
1641 
1642     ADD_STATIC_ENTRY(ANDROID_CONTROL_AE_LOCK_AVAILABLE,
1643             &aeLockAvailable, 1);
1644 
1645     if (hasCapability(BACKWARD_COMPATIBLE)) {
1646         static const uint8_t availableAwbModes[] = {
1647             ANDROID_CONTROL_AWB_MODE_OFF,
1648             ANDROID_CONTROL_AWB_MODE_AUTO,
1649             ANDROID_CONTROL_AWB_MODE_INCANDESCENT,
1650             ANDROID_CONTROL_AWB_MODE_FLUORESCENT,
1651             ANDROID_CONTROL_AWB_MODE_DAYLIGHT,
1652             ANDROID_CONTROL_AWB_MODE_SHADE
1653         };
1654         ADD_STATIC_ENTRY(ANDROID_CONTROL_AWB_AVAILABLE_MODES,
1655                 availableAwbModes, sizeof(availableAwbModes));
1656     }
1657 
1658     const uint8_t awbLockAvailable = hasCapability(BACKWARD_COMPATIBLE) ?
1659             ANDROID_CONTROL_AWB_LOCK_AVAILABLE_TRUE : ANDROID_CONTROL_AWB_LOCK_AVAILABLE_FALSE;
1660 
1661     ADD_STATIC_ENTRY(ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
1662             &awbLockAvailable, 1);
1663 
1664     static const uint8_t availableAfModesBack[] = {
1665             ANDROID_CONTROL_AF_MODE_OFF,
1666             ANDROID_CONTROL_AF_MODE_AUTO,
1667             ANDROID_CONTROL_AF_MODE_MACRO,
1668             ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO,
1669             ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE
1670     };
1671 
1672     static const uint8_t availableAfModesFront[] = {
1673             ANDROID_CONTROL_AF_MODE_OFF
1674     };
1675 
1676     if (hasCapability(BACKWARD_COMPATIBLE)) {
1677         ADD_STATIC_ENTRY(ANDROID_CONTROL_AF_AVAILABLE_MODES,
1678                 availableAfModesBack, sizeof(availableAfModesBack));
1679     } else {
1680         ADD_STATIC_ENTRY(ANDROID_CONTROL_AF_AVAILABLE_MODES,
1681                 availableAfModesFront, sizeof(availableAfModesFront));
1682     }
1683 
1684     static const uint8_t availableVstabModes[] = {
1685         ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF
1686     };
1687     ADD_STATIC_ENTRY(ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
1688             availableVstabModes, sizeof(availableVstabModes));
1689 
1690     // android.colorCorrection
1691 
1692     if (hasCapability(BACKWARD_COMPATIBLE)) {
1693         const uint8_t availableAberrationModes[] = {
1694             ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF,
1695             ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST,
1696             ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY
1697         };
1698         ADD_STATIC_ENTRY(ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
1699                 availableAberrationModes, sizeof(availableAberrationModes));
1700     } else {
1701         const uint8_t availableAberrationModes[] = {
1702             ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF,
1703         };
1704         ADD_STATIC_ENTRY(ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
1705                 availableAberrationModes, sizeof(availableAberrationModes));
1706     }
1707     // android.edge
1708 
1709     if (hasCapability(BACKWARD_COMPATIBLE)) {
1710         const uint8_t availableEdgeModes[] = {
1711             ANDROID_EDGE_MODE_OFF, ANDROID_EDGE_MODE_FAST, ANDROID_EDGE_MODE_HIGH_QUALITY
1712         };
1713         ADD_STATIC_ENTRY(ANDROID_EDGE_AVAILABLE_EDGE_MODES,
1714                 availableEdgeModes, sizeof(availableEdgeModes));
1715     } else {
1716         const uint8_t availableEdgeModes[] = {
1717             ANDROID_EDGE_MODE_OFF
1718         };
1719         ADD_STATIC_ENTRY(ANDROID_EDGE_AVAILABLE_EDGE_MODES,
1720                 availableEdgeModes, sizeof(availableEdgeModes));
1721     }
1722 
1723     // android.info
1724 
1725     const uint8_t supportedHardwareLevel =
1726             hasCapability(FULL_LEVEL) ? ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_FULL :
1727                     ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED;
1728     ADD_STATIC_ENTRY(ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
1729                 &supportedHardwareLevel,
1730                 /*count*/1);
1731 
1732     // android.noiseReduction
1733 
1734     if (hasCapability(BACKWARD_COMPATIBLE)) {
1735         const uint8_t availableNoiseReductionModes[] = {
1736             ANDROID_NOISE_REDUCTION_MODE_OFF,
1737             ANDROID_NOISE_REDUCTION_MODE_FAST,
1738             ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY
1739         };
1740         ADD_STATIC_ENTRY(ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
1741                 availableNoiseReductionModes, sizeof(availableNoiseReductionModes));
1742     } else {
1743         const uint8_t availableNoiseReductionModes[] = {
1744             ANDROID_NOISE_REDUCTION_MODE_OFF,
1745         };
1746         ADD_STATIC_ENTRY(ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
1747                 availableNoiseReductionModes, sizeof(availableNoiseReductionModes));
1748     }
1749 
1750     // android.depth
1751 
1752     if (hasCapability(DEPTH_OUTPUT)) {
1753 
1754         static const int32_t maxDepthSamples = 100;
1755         ADD_STATIC_ENTRY(ANDROID_DEPTH_MAX_DEPTH_SAMPLES,
1756                 &maxDepthSamples, 1);
1757 
1758         static const int32_t availableDepthStreamConfigurations[] = {
1759             HAL_PIXEL_FORMAT_Y16, 160, 120, ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS_OUTPUT,
1760             HAL_PIXEL_FORMAT_BLOB, maxDepthSamples,1, ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS_OUTPUT
1761         };
1762         ADD_STATIC_ENTRY(ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS,
1763                 availableDepthStreamConfigurations,
1764                 sizeof(availableDepthStreamConfigurations)/sizeof(int32_t));
1765 
1766         static const int64_t availableDepthMinFrameDurations[] = {
1767             HAL_PIXEL_FORMAT_Y16, 160, 120, Sensor::kFrameDurationRange[0],
1768             HAL_PIXEL_FORMAT_BLOB, maxDepthSamples,1, Sensor::kFrameDurationRange[0]
1769         };
1770         ADD_STATIC_ENTRY(ANDROID_DEPTH_AVAILABLE_DEPTH_MIN_FRAME_DURATIONS,
1771                 availableDepthMinFrameDurations,
1772                 sizeof(availableDepthMinFrameDurations)/sizeof(int64_t));
1773 
1774         static const int64_t availableDepthStallDurations[] = {
1775             HAL_PIXEL_FORMAT_Y16, 160, 120, Sensor::kFrameDurationRange[0],
1776             HAL_PIXEL_FORMAT_BLOB, maxDepthSamples,1, Sensor::kFrameDurationRange[0]
1777         };
1778         ADD_STATIC_ENTRY(ANDROID_DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS,
1779                 availableDepthStallDurations,
1780                 sizeof(availableDepthStallDurations)/sizeof(int64_t));
1781 
1782         static const uint8_t depthIsExclusive = ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE_FALSE;
1783         ADD_STATIC_ENTRY(ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE,
1784                 &depthIsExclusive, 1);
1785     }
1786 
1787     // android.shading
1788 
1789     if (hasCapability(BACKWARD_COMPATIBLE)) {
1790         const uint8_t availableShadingModes[] = {
1791             ANDROID_SHADING_MODE_OFF, ANDROID_SHADING_MODE_FAST, ANDROID_SHADING_MODE_HIGH_QUALITY
1792         };
1793         ADD_STATIC_ENTRY(ANDROID_SHADING_AVAILABLE_MODES, availableShadingModes,
1794                 sizeof(availableShadingModes));
1795     } else {
1796         const uint8_t availableShadingModes[] = {
1797             ANDROID_SHADING_MODE_OFF
1798         };
1799         ADD_STATIC_ENTRY(ANDROID_SHADING_AVAILABLE_MODES, availableShadingModes,
1800                 sizeof(availableShadingModes));
1801     }
1802 
1803     // android.request
1804 
1805     static const int32_t maxNumOutputStreams[] = {
1806             kMaxRawStreamCount, kMaxProcessedStreamCount, kMaxJpegStreamCount
1807     };
1808     ADD_STATIC_ENTRY(ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS, maxNumOutputStreams, 3);
1809 
1810     static const uint8_t maxPipelineDepth = kMaxBufferCount;
1811     ADD_STATIC_ENTRY(ANDROID_REQUEST_PIPELINE_MAX_DEPTH, &maxPipelineDepth, 1);
1812 
1813     static const int32_t partialResultCount = 1;
1814     ADD_STATIC_ENTRY(ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
1815             &partialResultCount, /*count*/1);
1816 
1817     SortedVector<uint8_t> caps;
1818     for (size_t i = 0; i < mCapabilities.size(); i++) {
1819         switch(mCapabilities[i]) {
1820             case BACKWARD_COMPATIBLE:
1821                 caps.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE);
1822                 break;
1823             case MANUAL_SENSOR:
1824                 caps.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR);
1825                 break;
1826             case MANUAL_POST_PROCESSING:
1827                 caps.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_POST_PROCESSING);
1828                 break;
1829             case RAW:
1830                 caps.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_RAW);
1831                 break;
1832             case PRIVATE_REPROCESSING:
1833                 caps.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_PRIVATE_REPROCESSING);
1834                 break;
1835             case READ_SENSOR_SETTINGS:
1836                 caps.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_READ_SENSOR_SETTINGS);
1837                 break;
1838             case BURST_CAPTURE:
1839                 caps.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BURST_CAPTURE);
1840                 break;
1841             case YUV_REPROCESSING:
1842                 caps.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_YUV_REPROCESSING);
1843                 break;
1844             case DEPTH_OUTPUT:
1845                 caps.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_DEPTH_OUTPUT);
1846                 break;
1847             case CONSTRAINED_HIGH_SPEED_VIDEO:
1848                 caps.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_CONSTRAINED_HIGH_SPEED_VIDEO);
1849                 break;
1850             case MOTION_TRACKING:
1851                 caps.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MOTION_TRACKING);
1852                 break;
1853             default:
1854                 // Ignore LEVELs
1855                 break;
1856         }
1857     }
1858     ADD_STATIC_ENTRY(ANDROID_REQUEST_AVAILABLE_CAPABILITIES, caps.array(), caps.size());
1859 
1860     // Scan a default request template for included request keys
1861     Vector<int32_t> availableRequestKeys;
1862     const camera_metadata_t *previewRequest =
1863         constructDefaultRequestSettings(CAMERA3_TEMPLATE_PREVIEW);
1864     for (size_t i = 0; i < get_camera_metadata_entry_count(previewRequest); i++) {
1865         camera_metadata_ro_entry_t entry;
1866         get_camera_metadata_ro_entry(previewRequest, i, &entry);
1867         availableRequestKeys.add(entry.tag);
1868     }
1869     ADD_STATIC_ENTRY(ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS, availableRequestKeys.array(),
1870             availableRequestKeys.size());
1871 
1872     // Add a few more result keys. Must be kept up to date with the various places that add these
1873 
1874     Vector<int32_t> availableResultKeys(availableRequestKeys);
1875     if (hasCapability(BACKWARD_COMPATIBLE)) {
1876         availableResultKeys.add(ANDROID_CONTROL_AE_STATE);
1877         availableResultKeys.add(ANDROID_CONTROL_AF_STATE);
1878         availableResultKeys.add(ANDROID_CONTROL_AWB_STATE);
1879         availableResultKeys.add(ANDROID_FLASH_STATE);
1880         availableResultKeys.add(ANDROID_LENS_STATE);
1881         availableResultKeys.add(ANDROID_LENS_FOCUS_RANGE);
1882         availableResultKeys.add(ANDROID_SENSOR_ROLLING_SHUTTER_SKEW);
1883         availableResultKeys.add(ANDROID_STATISTICS_SCENE_FLICKER);
1884     }
1885 
1886     if (hasCapability(DEPTH_OUTPUT)) {
1887         availableResultKeys.add(ANDROID_LENS_POSE_ROTATION);
1888         availableResultKeys.add(ANDROID_LENS_POSE_TRANSLATION);
1889         availableResultKeys.add(ANDROID_LENS_INTRINSIC_CALIBRATION);
1890         availableResultKeys.add(ANDROID_LENS_RADIAL_DISTORTION);
1891     }
1892 
1893     availableResultKeys.add(ANDROID_REQUEST_PIPELINE_DEPTH);
1894     availableResultKeys.add(ANDROID_SENSOR_TIMESTAMP);
1895 
1896     ADD_STATIC_ENTRY(ANDROID_REQUEST_AVAILABLE_RESULT_KEYS, availableResultKeys.array(),
1897             availableResultKeys.size());
1898 
1899     // Needs to be last, to collect all the keys set
1900 
1901     availableCharacteristicsKeys.add(ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS);
1902     info.update(ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS,
1903             availableCharacteristicsKeys);
1904 
1905     mCameraInfo = info.release();
1906 
1907 #undef ADD_STATIC_ENTRY
1908     return OK;
1909 }
1910 
process3A(CameraMetadata & settings)1911 status_t EmulatedFakeCamera3::process3A(CameraMetadata &settings) {
1912     /**
1913      * Extract top-level 3A controls
1914      */
1915     status_t res;
1916 
1917     camera_metadata_entry e;
1918 
1919     e = settings.find(ANDROID_CONTROL_MODE);
1920     if (e.count == 0) {
1921         ALOGE("%s: No control mode entry!", __FUNCTION__);
1922         return BAD_VALUE;
1923     }
1924     uint8_t controlMode = e.data.u8[0];
1925 
1926     if (controlMode == ANDROID_CONTROL_MODE_OFF) {
1927         mAeMode   = ANDROID_CONTROL_AE_MODE_OFF;
1928         mAfMode   = ANDROID_CONTROL_AF_MODE_OFF;
1929         mAwbMode  = ANDROID_CONTROL_AWB_MODE_OFF;
1930         mAeState  = ANDROID_CONTROL_AE_STATE_INACTIVE;
1931         mAfState  = ANDROID_CONTROL_AF_STATE_INACTIVE;
1932         mAwbState = ANDROID_CONTROL_AWB_STATE_INACTIVE;
1933         update3A(settings);
1934         return OK;
1935     } else if (controlMode == ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
1936         if (!hasCapability(BACKWARD_COMPATIBLE)) {
1937             ALOGE("%s: Can't use scene mode when BACKWARD_COMPATIBLE not supported!",
1938                   __FUNCTION__);
1939             return BAD_VALUE;
1940         }
1941 
1942         e = settings.find(ANDROID_CONTROL_SCENE_MODE);
1943         if (e.count == 0) {
1944             ALOGE("%s: No scene mode entry!", __FUNCTION__);
1945             return BAD_VALUE;
1946         }
1947         uint8_t sceneMode = e.data.u8[0];
1948 
1949         switch(sceneMode) {
1950             case ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY:
1951                 mFacePriority = true;
1952                 break;
1953             default:
1954                 ALOGE("%s: Emulator doesn't support scene mode %d",
1955                         __FUNCTION__, sceneMode);
1956                 return BAD_VALUE;
1957         }
1958     } else {
1959         mFacePriority = false;
1960     }
1961 
1962     // controlMode == AUTO or sceneMode = FACE_PRIORITY
1963     // Process individual 3A controls
1964 
1965     res = doFakeAE(settings);
1966     if (res != OK) return res;
1967 
1968     res = doFakeAF(settings);
1969     if (res != OK) return res;
1970 
1971     res = doFakeAWB(settings);
1972     if (res != OK) return res;
1973 
1974     update3A(settings);
1975     return OK;
1976 }
1977 
doFakeAE(CameraMetadata & settings)1978 status_t EmulatedFakeCamera3::doFakeAE(CameraMetadata &settings) {
1979     camera_metadata_entry e;
1980 
1981     e = settings.find(ANDROID_CONTROL_AE_MODE);
1982     if (e.count == 0 && hasCapability(BACKWARD_COMPATIBLE)) {
1983         ALOGE("%s: No AE mode entry!", __FUNCTION__);
1984         return BAD_VALUE;
1985     }
1986     uint8_t aeMode = (e.count > 0) ? e.data.u8[0] : (uint8_t)ANDROID_CONTROL_AE_MODE_ON;
1987     mAeMode = aeMode;
1988 
1989     switch (aeMode) {
1990         case ANDROID_CONTROL_AE_MODE_OFF:
1991             // AE is OFF
1992             mAeState = ANDROID_CONTROL_AE_STATE_INACTIVE;
1993             return OK;
1994         case ANDROID_CONTROL_AE_MODE_ON:
1995             // OK for AUTO modes
1996             break;
1997         default:
1998             // Mostly silently ignore unsupported modes
1999             ALOGV("%s: Emulator doesn't support AE mode %d, assuming ON",
2000                     __FUNCTION__, aeMode);
2001             break;
2002     }
2003 
2004     e = settings.find(ANDROID_CONTROL_AE_LOCK);
2005     bool aeLocked = (e.count > 0) ? (e.data.u8[0] == ANDROID_CONTROL_AE_LOCK_ON) : false;
2006 
2007     e = settings.find(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER);
2008     bool precaptureTrigger = false;
2009     if (e.count != 0) {
2010         precaptureTrigger =
2011                 (e.data.u8[0] == ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_START);
2012     }
2013 
2014     if (precaptureTrigger) {
2015         ALOGV("%s: Pre capture trigger = %d", __FUNCTION__, precaptureTrigger);
2016     } else if (e.count > 0) {
2017         ALOGV("%s: Pre capture trigger was present? %zu",
2018               __FUNCTION__,
2019               e.count);
2020     }
2021 
2022     if (precaptureTrigger || mAeState == ANDROID_CONTROL_AE_STATE_PRECAPTURE) {
2023         // Run precapture sequence
2024         if (mAeState != ANDROID_CONTROL_AE_STATE_PRECAPTURE) {
2025             mAeCounter = 0;
2026         }
2027 
2028         if (mFacePriority) {
2029             mAeTargetExposureTime = kFacePriorityExposureTime;
2030         } else {
2031             mAeTargetExposureTime = kNormalExposureTime;
2032         }
2033 
2034         if (mAeCounter > kPrecaptureMinFrames &&
2035                 (mAeTargetExposureTime - mAeCurrentExposureTime) <
2036                 mAeTargetExposureTime / 10) {
2037             // Done with precapture
2038             mAeCounter = 0;
2039             mAeState = aeLocked ? ANDROID_CONTROL_AE_STATE_LOCKED :
2040                     ANDROID_CONTROL_AE_STATE_CONVERGED;
2041         } else {
2042             // Converge some more
2043             mAeCurrentExposureTime +=
2044                     (mAeTargetExposureTime - mAeCurrentExposureTime) *
2045                     kExposureTrackRate;
2046             mAeCounter++;
2047             mAeState = ANDROID_CONTROL_AE_STATE_PRECAPTURE;
2048         }
2049 
2050     } else if (!aeLocked) {
2051         // Run standard occasional AE scan
2052         switch (mAeState) {
2053             case ANDROID_CONTROL_AE_STATE_INACTIVE:
2054                 mAeState = ANDROID_CONTROL_AE_STATE_SEARCHING;
2055                 break;
2056             case ANDROID_CONTROL_AE_STATE_CONVERGED:
2057                 mAeCounter++;
2058                 if (mAeCounter > kStableAeMaxFrames) {
2059                     mAeTargetExposureTime =
2060                             mFacePriority ? kFacePriorityExposureTime :
2061                             kNormalExposureTime;
2062                     float exposureStep = ((double)rand() / RAND_MAX) *
2063                             (kExposureWanderMax - kExposureWanderMin) +
2064                             kExposureWanderMin;
2065                     mAeTargetExposureTime *= std::pow(2, exposureStep);
2066                     mAeState = ANDROID_CONTROL_AE_STATE_SEARCHING;
2067                 }
2068                 break;
2069             case ANDROID_CONTROL_AE_STATE_SEARCHING:
2070                 mAeCurrentExposureTime +=
2071                         (mAeTargetExposureTime - mAeCurrentExposureTime) *
2072                         kExposureTrackRate;
2073                 if (abs(mAeTargetExposureTime - mAeCurrentExposureTime) <
2074                         mAeTargetExposureTime / 10) {
2075                     // Close enough
2076                     mAeState = ANDROID_CONTROL_AE_STATE_CONVERGED;
2077                     mAeCounter = 0;
2078                 }
2079                 break;
2080             case ANDROID_CONTROL_AE_STATE_LOCKED:
2081                 mAeState = ANDROID_CONTROL_AE_STATE_CONVERGED;
2082                 mAeCounter = 0;
2083                 break;
2084             default:
2085                 ALOGE("%s: Emulator in unexpected AE state %d",
2086                         __FUNCTION__, mAeState);
2087                 return INVALID_OPERATION;
2088         }
2089     } else {
2090         // AE is locked
2091         mAeState = ANDROID_CONTROL_AE_STATE_LOCKED;
2092     }
2093 
2094     return OK;
2095 }
2096 
doFakeAF(CameraMetadata & settings)2097 status_t EmulatedFakeCamera3::doFakeAF(CameraMetadata &settings) {
2098     camera_metadata_entry e;
2099 
2100     e = settings.find(ANDROID_CONTROL_AF_MODE);
2101     if (e.count == 0 && hasCapability(BACKWARD_COMPATIBLE)) {
2102         ALOGE("%s: No AF mode entry!", __FUNCTION__);
2103         return BAD_VALUE;
2104     }
2105     uint8_t afMode = (e.count > 0) ? e.data.u8[0] : (uint8_t)ANDROID_CONTROL_AF_MODE_OFF;
2106 
2107     e = settings.find(ANDROID_CONTROL_AF_TRIGGER);
2108     typedef camera_metadata_enum_android_control_af_trigger af_trigger_t;
2109     af_trigger_t afTrigger;
2110     if (e.count != 0) {
2111         afTrigger = static_cast<af_trigger_t>(e.data.u8[0]);
2112 
2113         ALOGV("%s: AF trigger set to 0x%x", __FUNCTION__, afTrigger);
2114         ALOGV("%s: AF mode is 0x%x", __FUNCTION__, afMode);
2115     } else {
2116         afTrigger = ANDROID_CONTROL_AF_TRIGGER_IDLE;
2117     }
2118 
2119     switch (afMode) {
2120         case ANDROID_CONTROL_AF_MODE_OFF:
2121             mAfState = ANDROID_CONTROL_AF_STATE_INACTIVE;
2122             return OK;
2123         case ANDROID_CONTROL_AF_MODE_AUTO:
2124         case ANDROID_CONTROL_AF_MODE_MACRO:
2125         case ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO:
2126         case ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE:
2127             break;
2128         default:
2129             ALOGE("%s: Emulator doesn't support AF mode %d",
2130                     __FUNCTION__, afMode);
2131             return BAD_VALUE;
2132     }
2133 
2134     bool afModeChanged = mAfMode != afMode;
2135     mAfMode = afMode;
2136 
2137     /**
2138      * Simulate AF triggers. Transition at most 1 state per frame.
2139      * - Focusing always succeeds (goes into locked, or PASSIVE_SCAN).
2140      */
2141 
2142     bool afTriggerStart = false;
2143     bool afTriggerCancel = false;
2144     switch (afTrigger) {
2145         case ANDROID_CONTROL_AF_TRIGGER_IDLE:
2146             break;
2147         case ANDROID_CONTROL_AF_TRIGGER_START:
2148             afTriggerStart = true;
2149             break;
2150         case ANDROID_CONTROL_AF_TRIGGER_CANCEL:
2151             afTriggerCancel = true;
2152             // Cancel trigger always transitions into INACTIVE
2153             mAfState = ANDROID_CONTROL_AF_STATE_INACTIVE;
2154 
2155             ALOGV("%s: AF State transition to STATE_INACTIVE", __FUNCTION__);
2156 
2157             // Stay in 'inactive' until at least next frame
2158             return OK;
2159         default:
2160             ALOGE("%s: Unknown af trigger value %d", __FUNCTION__, afTrigger);
2161             return BAD_VALUE;
2162     }
2163 
2164     // If we get down here, we're either in an autofocus mode
2165     //  or in a continuous focus mode (and no other modes)
2166 
2167     int oldAfState = mAfState;
2168     switch (mAfState) {
2169         case ANDROID_CONTROL_AF_STATE_INACTIVE:
2170             if (afTriggerStart) {
2171                 switch (afMode) {
2172                     case ANDROID_CONTROL_AF_MODE_AUTO:
2173                         // fall-through
2174                     case ANDROID_CONTROL_AF_MODE_MACRO:
2175                         mAfState = ANDROID_CONTROL_AF_STATE_ACTIVE_SCAN;
2176                         break;
2177                     case ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO:
2178                         // fall-through
2179                     case ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE:
2180                         mAfState = ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED;
2181                         break;
2182                 }
2183             } else {
2184                 // At least one frame stays in INACTIVE
2185                 if (!afModeChanged) {
2186                     switch (afMode) {
2187                         case ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO:
2188                             // fall-through
2189                         case ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE:
2190                             mAfState = ANDROID_CONTROL_AF_STATE_PASSIVE_SCAN;
2191                             break;
2192                     }
2193                 }
2194             }
2195             break;
2196         case ANDROID_CONTROL_AF_STATE_PASSIVE_SCAN:
2197             /**
2198              * When the AF trigger is activated, the algorithm should finish
2199              * its PASSIVE_SCAN if active, and then transition into AF_FOCUSED
2200              * or AF_NOT_FOCUSED as appropriate
2201              */
2202             if (afTriggerStart) {
2203                 // Randomly transition to focused or not focused
2204                 if (rand() % 3) {
2205                     mAfState = ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED;
2206                 } else {
2207                     mAfState = ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED;
2208                 }
2209             }
2210             /**
2211              * When the AF trigger is not involved, the AF algorithm should
2212              * start in INACTIVE state, and then transition into PASSIVE_SCAN
2213              * and PASSIVE_FOCUSED states
2214              */
2215             else if (!afTriggerCancel) {
2216                // Randomly transition to passive focus
2217                 if (rand() % 3 == 0) {
2218                     mAfState = ANDROID_CONTROL_AF_STATE_PASSIVE_FOCUSED;
2219                 }
2220             }
2221 
2222             break;
2223         case ANDROID_CONTROL_AF_STATE_PASSIVE_FOCUSED:
2224             if (afTriggerStart) {
2225                 // Randomly transition to focused or not focused
2226                 if (rand() % 3) {
2227                     mAfState = ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED;
2228                 } else {
2229                     mAfState = ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED;
2230                 }
2231             }
2232             // TODO: initiate passive scan (PASSIVE_SCAN)
2233             break;
2234         case ANDROID_CONTROL_AF_STATE_ACTIVE_SCAN:
2235             // Simulate AF sweep completing instantaneously
2236 
2237             // Randomly transition to focused or not focused
2238             if (rand() % 3) {
2239                 mAfState = ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED;
2240             } else {
2241                 mAfState = ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED;
2242             }
2243             break;
2244         case ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED:
2245             if (afTriggerStart) {
2246                 switch (afMode) {
2247                     case ANDROID_CONTROL_AF_MODE_AUTO:
2248                         // fall-through
2249                     case ANDROID_CONTROL_AF_MODE_MACRO:
2250                         mAfState = ANDROID_CONTROL_AF_STATE_ACTIVE_SCAN;
2251                         break;
2252                     case ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO:
2253                         // fall-through
2254                     case ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE:
2255                         // continuous autofocus => trigger start has no effect
2256                         break;
2257                 }
2258             }
2259             break;
2260         case ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED:
2261             if (afTriggerStart) {
2262                 switch (afMode) {
2263                     case ANDROID_CONTROL_AF_MODE_AUTO:
2264                         // fall-through
2265                     case ANDROID_CONTROL_AF_MODE_MACRO:
2266                         mAfState = ANDROID_CONTROL_AF_STATE_ACTIVE_SCAN;
2267                         break;
2268                     case ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO:
2269                         // fall-through
2270                     case ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE:
2271                         // continuous autofocus => trigger start has no effect
2272                         break;
2273                 }
2274             }
2275             break;
2276         default:
2277             ALOGE("%s: Bad af state %d", __FUNCTION__, mAfState);
2278     }
2279 
2280     {
2281         char afStateString[100] = {0,};
2282         camera_metadata_enum_snprint(ANDROID_CONTROL_AF_STATE,
2283                 oldAfState,
2284                 afStateString,
2285                 sizeof(afStateString));
2286 
2287         char afNewStateString[100] = {0,};
2288         camera_metadata_enum_snprint(ANDROID_CONTROL_AF_STATE,
2289                 mAfState,
2290                 afNewStateString,
2291                 sizeof(afNewStateString));
2292         ALOGVV("%s: AF state transitioned from %s to %s",
2293               __FUNCTION__, afStateString, afNewStateString);
2294     }
2295 
2296 
2297     return OK;
2298 }
2299 
doFakeAWB(CameraMetadata & settings)2300 status_t EmulatedFakeCamera3::doFakeAWB(CameraMetadata &settings) {
2301     camera_metadata_entry e;
2302 
2303     e = settings.find(ANDROID_CONTROL_AWB_MODE);
2304     if (e.count == 0 && hasCapability(BACKWARD_COMPATIBLE)) {
2305         ALOGE("%s: No AWB mode entry!", __FUNCTION__);
2306         return BAD_VALUE;
2307     }
2308     uint8_t awbMode = (e.count > 0) ? e.data.u8[0] : (uint8_t)ANDROID_CONTROL_AWB_MODE_AUTO;
2309 
2310     // TODO: Add white balance simulation
2311 
2312     e = settings.find(ANDROID_CONTROL_AWB_LOCK);
2313     bool awbLocked = (e.count > 0) ? (e.data.u8[0] == ANDROID_CONTROL_AWB_LOCK_ON) : false;
2314 
2315     switch (awbMode) {
2316         case ANDROID_CONTROL_AWB_MODE_OFF:
2317             mAwbState = ANDROID_CONTROL_AWB_STATE_INACTIVE;
2318             break;
2319         case ANDROID_CONTROL_AWB_MODE_AUTO:
2320         case ANDROID_CONTROL_AWB_MODE_INCANDESCENT:
2321         case ANDROID_CONTROL_AWB_MODE_FLUORESCENT:
2322         case ANDROID_CONTROL_AWB_MODE_DAYLIGHT:
2323         case ANDROID_CONTROL_AWB_MODE_SHADE:
2324             // Always magically right, or locked
2325             mAwbState = awbLocked ? ANDROID_CONTROL_AWB_STATE_LOCKED :
2326                     ANDROID_CONTROL_AWB_STATE_CONVERGED;
2327             break;
2328         default:
2329             ALOGE("%s: Emulator doesn't support AWB mode %d",
2330                     __FUNCTION__, awbMode);
2331             return BAD_VALUE;
2332     }
2333 
2334     return OK;
2335 }
2336 
2337 // Update the 3A Region by calculating the intersection of AE/AF/AWB and CROP
2338 // regions
update3ARegion(uint32_t tag,CameraMetadata & settings)2339 static void update3ARegion(uint32_t tag, CameraMetadata &settings) {
2340     if (tag != ANDROID_CONTROL_AE_REGIONS &&
2341         tag != ANDROID_CONTROL_AF_REGIONS &&
2342         tag != ANDROID_CONTROL_AWB_REGIONS) {
2343         return;
2344     }
2345     camera_metadata_entry_t entry;
2346     entry = settings.find(ANDROID_SCALER_CROP_REGION);
2347     if (entry.count > 0) {
2348         int32_t cropRegion[4];
2349         cropRegion[0] =  entry.data.i32[0];
2350         cropRegion[1] =  entry.data.i32[1];
2351         cropRegion[2] =  entry.data.i32[2] + cropRegion[0];
2352         cropRegion[3] =  entry.data.i32[3] + cropRegion[1];
2353         entry = settings.find(tag);
2354         if (entry.count > 0) {
2355             int32_t* ARegion = entry.data.i32;
2356             // calculate the intersection of AE/AF/AWB and CROP regions
2357             if (ARegion[0] < cropRegion[2] && cropRegion[0] < ARegion[2] &&
2358                 ARegion[1] < cropRegion[3] && cropRegion[1] < ARegion[3]) {
2359                 int32_t interSect[5];
2360                 interSect[0] = std::max(ARegion[0], cropRegion[0]);
2361                 interSect[1] = std::max(ARegion[1], cropRegion[1]);
2362                 interSect[2] = std::min(ARegion[2], cropRegion[2]);
2363                 interSect[3] = std::min(ARegion[3], cropRegion[3]);
2364                 interSect[4] = ARegion[4];
2365                 settings.update(tag, &interSect[0], 5);
2366             }
2367         }
2368     }
2369 }
2370 
update3A(CameraMetadata & settings)2371 void EmulatedFakeCamera3::update3A(CameraMetadata &settings) {
2372     if (mAeMode != ANDROID_CONTROL_AE_MODE_OFF) {
2373         settings.update(ANDROID_SENSOR_EXPOSURE_TIME,
2374                 &mAeCurrentExposureTime, 1);
2375         settings.update(ANDROID_SENSOR_SENSITIVITY,
2376                 &mAeCurrentSensitivity, 1);
2377     }
2378 
2379     settings.update(ANDROID_CONTROL_AE_STATE,
2380             &mAeState, 1);
2381     settings.update(ANDROID_CONTROL_AF_STATE,
2382             &mAfState, 1);
2383     settings.update(ANDROID_CONTROL_AWB_STATE,
2384             &mAwbState, 1);
2385 
2386     uint8_t lensState;
2387     switch (mAfState) {
2388         case ANDROID_CONTROL_AF_STATE_PASSIVE_SCAN:
2389         case ANDROID_CONTROL_AF_STATE_ACTIVE_SCAN:
2390             lensState = ANDROID_LENS_STATE_MOVING;
2391             break;
2392         case ANDROID_CONTROL_AF_STATE_INACTIVE:
2393         case ANDROID_CONTROL_AF_STATE_PASSIVE_FOCUSED:
2394         case ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED:
2395         case ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED:
2396         case ANDROID_CONTROL_AF_STATE_PASSIVE_UNFOCUSED:
2397         default:
2398             lensState = ANDROID_LENS_STATE_STATIONARY;
2399             break;
2400     }
2401     settings.update(ANDROID_LENS_STATE, &lensState, 1);
2402     update3ARegion(ANDROID_CONTROL_AE_REGIONS, settings);
2403     update3ARegion(ANDROID_CONTROL_AF_REGIONS, settings);
2404     update3ARegion(ANDROID_CONTROL_AWB_REGIONS, settings);
2405 }
2406 
signalReadoutIdle()2407 void EmulatedFakeCamera3::signalReadoutIdle() {
2408     Mutex::Autolock l(mLock);
2409     // Need to chek isIdle again because waiting on mLock may have allowed
2410     // something to be placed in the in-flight queue.
2411     if (mStatus == STATUS_ACTIVE && mReadoutThread->isIdle()) {
2412         ALOGV("Now idle");
2413         mStatus = STATUS_READY;
2414     }
2415 }
2416 
onSensorEvent(uint32_t frameNumber,Event e,nsecs_t timestamp)2417 void EmulatedFakeCamera3::onSensorEvent(uint32_t frameNumber, Event e,
2418         nsecs_t timestamp) {
2419     switch(e) {
2420         case Sensor::SensorListener::EXPOSURE_START: {
2421             ALOGVV("%s: Frame %d: Sensor started exposure at %lld",
2422                     __FUNCTION__, frameNumber, timestamp);
2423             // Trigger shutter notify to framework
2424             camera3_notify_msg_t msg;
2425             msg.type = CAMERA3_MSG_SHUTTER;
2426             msg.message.shutter.frame_number = frameNumber;
2427             msg.message.shutter.timestamp = timestamp;
2428             sendNotify(&msg);
2429             break;
2430         }
2431         default:
2432             ALOGW("%s: Unexpected sensor event %d at %" PRId64, __FUNCTION__,
2433                     e, timestamp);
2434             break;
2435     }
2436 }
2437 
ReadoutThread(EmulatedFakeCamera3 * parent)2438 EmulatedFakeCamera3::ReadoutThread::ReadoutThread(EmulatedFakeCamera3 *parent) :
2439         mParent(parent), mJpegWaiting(false) {
2440 }
2441 
~ReadoutThread()2442 EmulatedFakeCamera3::ReadoutThread::~ReadoutThread() {
2443     for (List<Request>::iterator i = mInFlightQueue.begin();
2444          i != mInFlightQueue.end(); i++) {
2445         delete i->buffers;
2446         delete i->sensorBuffers;
2447     }
2448 }
2449 
queueCaptureRequest(const Request & r)2450 void EmulatedFakeCamera3::ReadoutThread::queueCaptureRequest(const Request &r) {
2451     Mutex::Autolock l(mLock);
2452 
2453     mInFlightQueue.push_back(r);
2454     mInFlightSignal.signal();
2455 }
2456 
isIdle()2457 bool EmulatedFakeCamera3::ReadoutThread::isIdle() {
2458     Mutex::Autolock l(mLock);
2459     return mInFlightQueue.empty() && !mThreadActive;
2460 }
2461 
waitForReadout()2462 status_t EmulatedFakeCamera3::ReadoutThread::waitForReadout() {
2463     status_t res;
2464     Mutex::Autolock l(mLock);
2465     int loopCount = 0;
2466     while (mInFlightQueue.size() >= kMaxQueueSize) {
2467         res = mInFlightSignal.waitRelative(mLock, kWaitPerLoop);
2468         if (res != OK && res != TIMED_OUT) {
2469             ALOGE("%s: Error waiting for in-flight queue to shrink",
2470                     __FUNCTION__);
2471             return INVALID_OPERATION;
2472         }
2473         if (loopCount == kMaxWaitLoops) {
2474             ALOGE("%s: Timed out waiting for in-flight queue to shrink",
2475                     __FUNCTION__);
2476             return TIMED_OUT;
2477         }
2478         loopCount++;
2479     }
2480     return OK;
2481 }
2482 
threadLoop()2483 bool EmulatedFakeCamera3::ReadoutThread::threadLoop() {
2484     status_t res;
2485 
2486     ALOGVV("%s: ReadoutThread waiting for request", __FUNCTION__);
2487 
2488     // First wait for a request from the in-flight queue
2489 
2490     if (mCurrentRequest.settings.isEmpty()) {
2491         Mutex::Autolock l(mLock);
2492         if (mInFlightQueue.empty()) {
2493             res = mInFlightSignal.waitRelative(mLock, kWaitPerLoop);
2494             if (res == TIMED_OUT) {
2495                 ALOGVV("%s: ReadoutThread: Timed out waiting for request",
2496                         __FUNCTION__);
2497                 return true;
2498             } else if (res != NO_ERROR) {
2499                 ALOGE("%s: Error waiting for capture requests: %d",
2500                         __FUNCTION__, res);
2501                 return false;
2502             }
2503         }
2504         mCurrentRequest.frameNumber = mInFlightQueue.begin()->frameNumber;
2505         mCurrentRequest.settings.acquire(mInFlightQueue.begin()->settings);
2506         mCurrentRequest.buffers = mInFlightQueue.begin()->buffers;
2507         mCurrentRequest.sensorBuffers = mInFlightQueue.begin()->sensorBuffers;
2508         mInFlightQueue.erase(mInFlightQueue.begin());
2509         mInFlightSignal.signal();
2510         mThreadActive = true;
2511         ALOGVV("%s: Beginning readout of frame %d", __FUNCTION__,
2512                 mCurrentRequest.frameNumber);
2513     }
2514 
2515     // Then wait for it to be delivered from the sensor
2516     ALOGVV("%s: ReadoutThread: Wait for frame to be delivered from sensor",
2517             __FUNCTION__);
2518 
2519     nsecs_t captureTime;
2520     bool gotFrame =
2521             mParent->mSensor->waitForNewFrame(kWaitPerLoop, &captureTime);
2522     if (!gotFrame) {
2523         ALOGVV("%s: ReadoutThread: Timed out waiting for sensor frame",
2524                 __FUNCTION__);
2525         return true;
2526     }
2527 
2528     ALOGVV("Sensor done with readout for frame %d, captured at %lld ",
2529             mCurrentRequest.frameNumber, captureTime);
2530 
2531     // Check if we need to JPEG encode a buffer, and send it for async
2532     // compression if so. Otherwise prepare the buffer for return.
2533     bool needJpeg = false;
2534     HalBufferVector::iterator buf = mCurrentRequest.buffers->begin();
2535     while(buf != mCurrentRequest.buffers->end()) {
2536         bool goodBuffer = true;
2537         if ( buf->stream->format ==
2538                 HAL_PIXEL_FORMAT_BLOB && buf->stream->data_space != HAL_DATASPACE_DEPTH) {
2539             Mutex::Autolock jl(mJpegLock);
2540             if (mJpegWaiting) {
2541                 // This shouldn't happen, because processCaptureRequest should
2542                 // be stalling until JPEG compressor is free.
2543                 ALOGE("%s: Already processing a JPEG!", __FUNCTION__);
2544                 goodBuffer = false;
2545             }
2546             if (goodBuffer) {
2547                 // Compressor takes ownership of sensorBuffers here
2548                 res = mParent->mJpegCompressor->start(mCurrentRequest.sensorBuffers,
2549                         this, &(mCurrentRequest.settings));
2550                 goodBuffer = (res == OK);
2551             }
2552             if (goodBuffer) {
2553                 needJpeg = true;
2554 
2555                 mJpegHalBuffer = *buf;
2556                 mJpegFrameNumber = mCurrentRequest.frameNumber;
2557                 mJpegWaiting = true;
2558 
2559                 mCurrentRequest.sensorBuffers = NULL;
2560                 buf = mCurrentRequest.buffers->erase(buf);
2561 
2562                 continue;
2563             }
2564             ALOGE("%s: Error compressing output buffer: %s (%d)",
2565                         __FUNCTION__, strerror(-res), res);
2566             // fallthrough for cleanup
2567         }
2568         mParent->mGBM->unlock(*(buf->buffer));
2569 
2570         buf->status = goodBuffer ? CAMERA3_BUFFER_STATUS_OK :
2571                 CAMERA3_BUFFER_STATUS_ERROR;
2572         buf->acquire_fence = -1;
2573         buf->release_fence = -1;
2574 
2575         ++buf;
2576     } // end while
2577 
2578     // Construct result for all completed buffers and results
2579 
2580     camera3_capture_result result;
2581 
2582     if (mParent->hasCapability(BACKWARD_COMPATIBLE)) {
2583         static const uint8_t sceneFlicker = ANDROID_STATISTICS_SCENE_FLICKER_NONE;
2584         mCurrentRequest.settings.update(ANDROID_STATISTICS_SCENE_FLICKER,
2585                 &sceneFlicker, 1);
2586 
2587         static const uint8_t flashState = ANDROID_FLASH_STATE_UNAVAILABLE;
2588         mCurrentRequest.settings.update(ANDROID_FLASH_STATE,
2589                 &flashState, 1);
2590 
2591         nsecs_t rollingShutterSkew = Sensor::kFrameDurationRange[0];
2592         mCurrentRequest.settings.update(ANDROID_SENSOR_ROLLING_SHUTTER_SKEW,
2593                 &rollingShutterSkew, 1);
2594 
2595         float focusRange[] = { 1.0f/5.0f, 0 }; // 5 m to infinity in focus
2596         mCurrentRequest.settings.update(ANDROID_LENS_FOCUS_RANGE,
2597                 focusRange, sizeof(focusRange)/sizeof(float));
2598     }
2599 
2600     if (mParent->hasCapability(DEPTH_OUTPUT)) {
2601         camera_metadata_entry_t entry;
2602 
2603         find_camera_metadata_entry(mParent->mCameraInfo, ANDROID_LENS_POSE_TRANSLATION, &entry);
2604         mCurrentRequest.settings.update(ANDROID_LENS_POSE_TRANSLATION,
2605                 entry.data.f, entry.count);
2606 
2607         find_camera_metadata_entry(mParent->mCameraInfo, ANDROID_LENS_POSE_ROTATION, &entry);
2608         mCurrentRequest.settings.update(ANDROID_LENS_POSE_ROTATION,
2609                 entry.data.f, entry.count);
2610 
2611         find_camera_metadata_entry(mParent->mCameraInfo, ANDROID_LENS_INTRINSIC_CALIBRATION, &entry);
2612         mCurrentRequest.settings.update(ANDROID_LENS_INTRINSIC_CALIBRATION,
2613                 entry.data.f, entry.count);
2614 
2615         find_camera_metadata_entry(mParent->mCameraInfo, ANDROID_LENS_RADIAL_DISTORTION, &entry);
2616         mCurrentRequest.settings.update(ANDROID_LENS_RADIAL_DISTORTION,
2617                 entry.data.f, entry.count);
2618     }
2619 
2620     mCurrentRequest.settings.update(ANDROID_SENSOR_TIMESTAMP,
2621             &captureTime, 1);
2622 
2623 
2624     // JPEGs take a stage longer
2625     const uint8_t pipelineDepth = needJpeg ? kMaxBufferCount : kMaxBufferCount - 1;
2626     mCurrentRequest.settings.update(ANDROID_REQUEST_PIPELINE_DEPTH,
2627             &pipelineDepth, 1);
2628 
2629     result.frame_number = mCurrentRequest.frameNumber;
2630     result.result = mCurrentRequest.settings.getAndLock();
2631     result.num_output_buffers = mCurrentRequest.buffers->size();
2632     result.output_buffers = mCurrentRequest.buffers->array();
2633     result.input_buffer = nullptr;
2634     result.partial_result = 1;
2635 
2636     // Go idle if queue is empty, before sending result
2637     bool signalIdle = false;
2638     {
2639         Mutex::Autolock l(mLock);
2640         if (mInFlightQueue.empty()) {
2641             mThreadActive = false;
2642             signalIdle = true;
2643         }
2644     }
2645     if (signalIdle) mParent->signalReadoutIdle();
2646 
2647     // Send it off to the framework
2648     ALOGVV("%s: ReadoutThread: Send result to framework",
2649             __FUNCTION__);
2650     mParent->sendCaptureResult(&result);
2651 
2652     // Clean up
2653     mCurrentRequest.settings.unlock(result.result);
2654 
2655     delete mCurrentRequest.buffers;
2656     mCurrentRequest.buffers = NULL;
2657     if (!needJpeg) {
2658         delete mCurrentRequest.sensorBuffers;
2659         mCurrentRequest.sensorBuffers = NULL;
2660     }
2661     mCurrentRequest.settings.clear();
2662 
2663     return true;
2664 }
2665 
onJpegDone(const StreamBuffer & jpegBuffer,bool success)2666 void EmulatedFakeCamera3::ReadoutThread::onJpegDone(
2667         const StreamBuffer &jpegBuffer, bool success) {
2668     Mutex::Autolock jl(mJpegLock);
2669 
2670     mParent->mGBM->unlock(*(jpegBuffer.buffer));
2671 
2672     mJpegHalBuffer.status = success ?
2673             CAMERA3_BUFFER_STATUS_OK : CAMERA3_BUFFER_STATUS_ERROR;
2674     mJpegHalBuffer.acquire_fence = -1;
2675     mJpegHalBuffer.release_fence = -1;
2676     mJpegWaiting = false;
2677 
2678     camera3_capture_result result;
2679 
2680     result.frame_number = mJpegFrameNumber;
2681     result.result = NULL;
2682     result.num_output_buffers = 1;
2683     result.output_buffers = &mJpegHalBuffer;
2684     result.input_buffer = nullptr;
2685     result.partial_result = 0;
2686 
2687     if (!success) {
2688         ALOGE("%s: Compression failure, returning error state buffer to"
2689                 " framework", __FUNCTION__);
2690     } else {
2691         ALOGV("%s: Compression complete, returning buffer to framework",
2692                 __FUNCTION__);
2693     }
2694 
2695     mParent->sendCaptureResult(&result);
2696 }
2697 
onJpegInputDone(const StreamBuffer & inputBuffer)2698 void EmulatedFakeCamera3::ReadoutThread::onJpegInputDone(
2699         const StreamBuffer &inputBuffer) {
2700     // Should never get here, since the input buffer has to be returned
2701     // by end of processCaptureRequest
2702     ALOGE("%s: Unexpected input buffer from JPEG compressor!", __FUNCTION__);
2703 }
2704 
2705 
2706 }; // namespace android
2707