1 /*
2  * Copyright 2014 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 package android.hardware.camera2.cts;
18 
19 import static android.hardware.camera2.cts.CameraTestUtils.*;
20 import static android.hardware.camera2.CameraCharacteristics.*;
21 
22 import android.graphics.Point;
23 import android.graphics.PointF;
24 import android.graphics.Rect;
25 import android.graphics.SurfaceTexture;
26 import android.hardware.cts.helpers.CameraUtils;
27 import android.hardware.camera2.CameraCharacteristics;
28 import android.hardware.camera2.CameraDevice;
29 import android.hardware.camera2.CameraMetadata;
30 import android.hardware.camera2.CaptureRequest;
31 import android.hardware.camera2.CaptureResult;
32 import android.hardware.camera2.cts.CameraTestUtils.SimpleCaptureCallback;
33 import android.hardware.camera2.cts.helpers.StaticMetadata;
34 import android.hardware.camera2.cts.testcases.Camera2SurfaceViewTestCase;
35 import android.hardware.camera2.params.BlackLevelPattern;
36 import android.hardware.camera2.params.ColorSpaceTransform;
37 import android.hardware.camera2.params.Face;
38 import android.hardware.camera2.params.LensShadingMap;
39 import android.hardware.camera2.params.MeteringRectangle;
40 import android.hardware.camera2.params.RggbChannelVector;
41 import android.hardware.camera2.params.TonemapCurve;
42 import android.media.Image;
43 import android.os.Parcel;
44 import android.util.ArraySet;
45 import android.util.Log;
46 import android.util.Range;
47 import android.util.Rational;
48 import android.util.Size;
49 import android.view.Surface;
50 
51 import java.nio.ByteBuffer;
52 import java.util.ArrayList;
53 import java.util.Arrays;
54 import java.util.List;
55 
56 import org.junit.Test;
57 
58 /**
59  * <p>
60  * Basic test for camera CaptureRequest key controls.
61  * </p>
62  * <p>
63  * Several test categories are covered: manual sensor control, 3A control,
64  * manual ISP control and other per-frame control and synchronization.
65  * </p>
66  */
67 public class CaptureRequestTest extends Camera2SurfaceViewTestCase {
68     private static final String TAG = "CaptureRequestTest";
69     private static final boolean VERBOSE = Log.isLoggable(TAG, Log.VERBOSE);
70     private static final int NUM_FRAMES_VERIFIED = 15;
71     private static final int NUM_FACE_DETECTION_FRAMES_VERIFIED = 60;
72     /** 30ms exposure time must be supported by full capability devices. */
73     private static final long DEFAULT_EXP_TIME_NS = 30000000L; // 30ms
74     private static final int DEFAULT_SENSITIVITY = 100;
75     private static final int RGGB_COLOR_CHANNEL_COUNT = 4;
76     private static final int MAX_SHADING_MAP_SIZE = 64 * 64 * RGGB_COLOR_CHANNEL_COUNT;
77     private static final int MIN_SHADING_MAP_SIZE = 1 * 1 * RGGB_COLOR_CHANNEL_COUNT;
78     private static final long IGNORE_REQUESTED_EXPOSURE_TIME_CHECK = -1L;
79     private static final long EXPOSURE_TIME_BOUNDARY_50HZ_NS = 10000000L; // 10ms
80     private static final long EXPOSURE_TIME_BOUNDARY_60HZ_NS = 8333333L; // 8.3ms, Approximation.
81     private static final long EXPOSURE_TIME_ERROR_MARGIN_NS = 100000L; // 100us, Approximation.
82     private static final float EXPOSURE_TIME_ERROR_MARGIN_RATE = 0.03f; // 3%, Approximation.
83     private static final float SENSITIVITY_ERROR_MARGIN_RATE = 0.03f; // 3%, Approximation.
84     private static final int DEFAULT_NUM_EXPOSURE_TIME_STEPS = 3;
85     private static final int DEFAULT_NUM_SENSITIVITY_STEPS = 8;
86     private static final int DEFAULT_SENSITIVITY_STEP_SIZE = 100;
87     private static final int NUM_RESULTS_WAIT_TIMEOUT = 100;
88     private static final int NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY = 8;
89     private static final int NUM_FRAMES_WAITED_FOR_TORCH = 100;
90     private static final int NUM_PARTIAL_FRAMES_PFC = 2;
91     private static final int NUM_PARTIAL_FRAMES_NPFC = 6;
92 
93     private static final int NUM_TEST_FOCUS_DISTANCES = 10;
94     private static final int NUM_FOCUS_DISTANCES_REPEAT = 3;
95     // 5 percent error margin for calibrated device
96     private static final float FOCUS_DISTANCE_ERROR_PERCENT_CALIBRATED = 0.05f;
97     // 25 percent error margin for uncalibrated device
98     private static final float FOCUS_DISTANCE_ERROR_PERCENT_UNCALIBRATED = 0.25f;
99     // 10 percent error margin for approximate device
100     private static final float FOCUS_DISTANCE_ERROR_PERCENT_APPROXIMATE = 0.10f;
101     private static final int ANTI_FLICKERING_50HZ = 1;
102     private static final int ANTI_FLICKERING_60HZ = 2;
103     // 5 percent error margin for resulting crop regions
104     private static final float CROP_REGION_ERROR_PERCENT_DELTA = 0.05f;
105     // 1 percent error margin for centering the crop region
106     private static final float CROP_REGION_ERROR_PERCENT_CENTERED = 0.01f;
107     private static final float DYNAMIC_VS_FIXED_BLK_WH_LVL_ERROR_MARGIN = 0.25f;
108     private static final float DYNAMIC_VS_OPTICAL_BLK_LVL_ERROR_MARGIN = 0.2f;
109 
110     // Linear tone mapping curve example.
111     private static final float[] TONEMAP_CURVE_LINEAR = {0, 0, 1.0f, 1.0f};
112     // Standard sRGB tone mapping, per IEC 61966-2-1:1999, with 16 control points.
113     private static final float[] TONEMAP_CURVE_SRGB = {
114             0.0000f, 0.0000f, 0.0667f, 0.2864f, 0.1333f, 0.4007f, 0.2000f, 0.4845f,
115             0.2667f, 0.5532f, 0.3333f, 0.6125f, 0.4000f, 0.6652f, 0.4667f, 0.7130f,
116             0.5333f, 0.7569f, 0.6000f, 0.7977f, 0.6667f, 0.8360f, 0.7333f, 0.8721f,
117             0.8000f, 0.9063f, 0.8667f, 0.9389f, 0.9333f, 0.9701f, 1.0000f, 1.0000f
118     };
119     private final Rational ZERO_R = new Rational(0, 1);
120     private final Rational ONE_R = new Rational(1, 1);
121 
122     private final int NUM_ALGORITHMS = 3; // AE, AWB and AF
123     private final int INDEX_ALGORITHM_AE = 0;
124     private final int INDEX_ALGORITHM_AWB = 1;
125     private final int INDEX_ALGORITHM_AF = 2;
126 
127     private enum TorchSeqState {
128         RAMPING_UP,
129         FIRED,
130         RAMPING_DOWN
131     }
132 
133     @Override
setUp()134     public void setUp() throws Exception {
135         super.setUp();
136     }
137 
138     @Override
tearDown()139     public void tearDown() throws Exception {
140         super.tearDown();
141     }
142 
143     /**
144      * Test CaptureRequest settings parcelling.
145      */
146     @Test
testSettingsBinderParcel()147     public void testSettingsBinderParcel() throws Exception {
148         SurfaceTexture outputTexture = new SurfaceTexture(/* random texture ID */ 5);
149         Surface surface = new Surface(outputTexture);
150 
151         for (int i = 0; i < mCameraIds.length; i++) {
152             try {
153                 openDevice(mCameraIds[i]);
154                 CaptureRequest.Builder requestBuilder =
155                         mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
156                 requestBuilder.addTarget(surface);
157 
158                 // Check regular/default case
159                 CaptureRequest captureRequestOriginal = requestBuilder.build();
160                 Parcel p;
161                 p = Parcel.obtain();
162                 captureRequestOriginal.writeToParcel(p, 0);
163                 p.setDataPosition(0);
164                 CaptureRequest captureRequestParcelled = CaptureRequest.CREATOR.createFromParcel(p);
165                 assertEquals("Parcelled camera settings should match",
166                         captureRequestParcelled.get(CaptureRequest.CONTROL_CAPTURE_INTENT),
167                         new Integer(CameraMetadata.CONTROL_CAPTURE_INTENT_PREVIEW));
168                 p.recycle();
169 
170                 // Check capture request with additional physical camera settings
171                 String physicalId = new String(Integer.toString(i + 1));
172                 ArraySet<String> physicalIds = new ArraySet<String> ();
173                 physicalIds.add(physicalId);
174 
175                 requestBuilder = mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW,
176                         physicalIds);
177                 requestBuilder.addTarget(surface);
178                 captureRequestOriginal = requestBuilder.build();
179                 p = Parcel.obtain();
180                 captureRequestOriginal.writeToParcel(p, 0);
181                 p.setDataPosition(0);
182                 captureRequestParcelled = CaptureRequest.CREATOR.createFromParcel(p);
183                 assertEquals("Parcelled camera settings should match",
184                         captureRequestParcelled.get(CaptureRequest.CONTROL_CAPTURE_INTENT),
185                         new Integer(CameraMetadata.CONTROL_CAPTURE_INTENT_PREVIEW));
186                 p.recycle();
187 
188                 // Check various invalid cases
189                 p = Parcel.obtain();
190                 p.writeInt(-1);
191                 p.setDataPosition(0);
192                 try {
193                     captureRequestParcelled = CaptureRequest.CREATOR.createFromParcel(p);
194                     fail("should get RuntimeException due to invalid number of settings");
195                 } catch (RuntimeException e) {
196                     // Expected
197                 }
198                 p.recycle();
199 
200                 p = Parcel.obtain();
201                 p.writeInt(0);
202                 p.setDataPosition(0);
203                 try {
204                     captureRequestParcelled = CaptureRequest.CREATOR.createFromParcel(p);
205                     fail("should get RuntimeException due to invalid number of settings");
206                 } catch (RuntimeException e) {
207                     // Expected
208                 }
209                 p.recycle();
210 
211                 p = Parcel.obtain();
212                 p.writeInt(1);
213                 p.setDataPosition(0);
214                 try {
215                     captureRequestParcelled = CaptureRequest.CREATOR.createFromParcel(p);
216                     fail("should get RuntimeException due to absent settings");
217                 } catch (RuntimeException e) {
218                     // Expected
219                 }
220                 p.recycle();
221             } finally {
222                 closeDevice();
223             }
224         }
225     }
226 
227     /**
228      * Test black level lock when exposure value change.
229      * <p>
230      * When {@link CaptureRequest#BLACK_LEVEL_LOCK} is true in a request, the
231      * camera device should lock the black level. When the exposure values are changed,
232      * the camera may require reset black level Since changes to certain capture
233      * parameters (such as exposure time) may require resetting of black level
234      * compensation. However, the black level must remain locked after exposure
235      * value changes (when requests have lock ON).
236      * </p>
237      */
238     @Test
testBlackLevelLock()239     public void testBlackLevelLock() throws Exception {
240         for (int i = 0; i < mCameraIds.length; i++) {
241             try {
242                 if (!mAllStaticInfo.get(mCameraIds[i]).isCapabilitySupported(
243                         CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR)) {
244                     continue;
245                 }
246 
247                 openDevice(mCameraIds[i]);
248                 SimpleCaptureCallback listener = new SimpleCaptureCallback();
249                 CaptureRequest.Builder requestBuilder =
250                         mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
251 
252                 // Start with default manual exposure time, with black level being locked.
253                 requestBuilder.set(CaptureRequest.BLACK_LEVEL_LOCK, true);
254                 changeExposure(requestBuilder, DEFAULT_EXP_TIME_NS, DEFAULT_SENSITIVITY);
255 
256                 Size previewSz =
257                         getMaxPreviewSize(mCamera.getId(), mCameraManager,
258                         getPreviewSizeBound(mWindowManager, PREVIEW_SIZE_BOUND));
259 
260                 startPreview(requestBuilder, previewSz, listener);
261                 waitForSettingsApplied(listener, NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY);
262                 // No lock OFF state is allowed as the exposure is not changed.
263                 verifyBlackLevelLockResults(listener, NUM_FRAMES_VERIFIED, /*maxLockOffCnt*/0);
264 
265                 // Double the exposure time and gain, with black level still being locked.
266                 changeExposure(requestBuilder, DEFAULT_EXP_TIME_NS * 2, DEFAULT_SENSITIVITY * 2);
267                 listener = new SimpleCaptureCallback();
268                 startPreview(requestBuilder, previewSz, listener);
269                 waitForSettingsApplied(listener, NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY);
270                 // Allow at most one lock OFF state as the exposure is changed once.
271                 verifyBlackLevelLockResults(listener, NUM_FRAMES_VERIFIED, /*maxLockOffCnt*/1);
272 
273                 stopPreview();
274             } finally {
275                 closeDevice();
276             }
277         }
278     }
279 
280     /**
281      * Test dynamic black/white levels if they are supported.
282      *
283      * <p>
284      * If the dynamic black and white levels are reported, test below:
285      *   1. the dynamic black and white levels shouldn't deviate from the global value too much
286      *   for different sensitivities.
287      *   2. If the RAW_SENSOR and optical black regions are supported, capture RAW images and
288      *   calculate the optical black level values. The reported dynamic black level should be
289      *   close enough to the optical black level values.
290      * </p>
291      */
292     @Test
testDynamicBlackWhiteLevel()293     public void testDynamicBlackWhiteLevel() throws Exception {
294         for (String id : mCameraIds) {
295             try {
296                 if (!mAllStaticInfo.get(id).isDynamicBlackLevelSupported()) {
297                     continue;
298                 }
299                 openDevice(id);
300                 dynamicBlackWhiteLevelTestByCamera();
301             } finally {
302                 closeDevice();
303             }
304         }
305     }
306 
307     /**
308      * Basic lens shading map request test.
309      * <p>
310      * When {@link CaptureRequest#SHADING_MODE} is set to OFF, no lens shading correction will
311      * be applied by the camera device, and an identity lens shading map data
312      * will be provided if {@link CaptureRequest#STATISTICS_LENS_SHADING_MAP_MODE} is ON.
313      * </p>
314      * <p>
315      * When {@link CaptureRequest#SHADING_MODE} is set to other modes, lens shading correction
316      * will be applied by the camera device. The lens shading map data can be
317      * requested by setting {@link CaptureRequest#STATISTICS_LENS_SHADING_MAP_MODE} to ON.
318      * </p>
319      */
320     @Test
testLensShadingMap()321     public void testLensShadingMap() throws Exception {
322         for (int i = 0; i < mCameraIds.length; i++) {
323             try {
324                 StaticMetadata staticInfo = mAllStaticInfo.get(mCameraIds[i]);
325                 if (!staticInfo.isManualLensShadingMapSupported()) {
326                     Log.i(TAG, "Camera " + mCameraIds[i] +
327                             " doesn't support lens shading controls, skipping test");
328                     continue;
329                 }
330 
331                 List<Integer> lensShadingMapModes = Arrays.asList(CameraTestUtils.toObject(
332                         staticInfo.getAvailableLensShadingMapModesChecked()));
333 
334                 if (!lensShadingMapModes.contains(STATISTICS_LENS_SHADING_MAP_MODE_ON)) {
335                     continue;
336                 }
337 
338                 openDevice(mCameraIds[i]);
339                 SimpleCaptureCallback listener = new SimpleCaptureCallback();
340                 CaptureRequest.Builder requestBuilder =
341                         mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
342                 requestBuilder.set(CaptureRequest.STATISTICS_LENS_SHADING_MAP_MODE,
343                         STATISTICS_LENS_SHADING_MAP_MODE_ON);
344 
345                 Size previewSz =
346                         getMaxPreviewSize(mCamera.getId(), mCameraManager,
347                         getPreviewSizeBound(mWindowManager, PREVIEW_SIZE_BOUND));
348                 List<Integer> lensShadingModes = Arrays.asList(CameraTestUtils.toObject(
349                         mStaticInfo.getAvailableLensShadingModesChecked()));
350 
351                 // Shading map mode OFF, lensShadingMapMode ON, camera device
352                 // should output unity maps.
353                 if (lensShadingModes.contains(SHADING_MODE_OFF)) {
354                     requestBuilder.set(CaptureRequest.SHADING_MODE, SHADING_MODE_OFF);
355                     listener = new SimpleCaptureCallback();
356                     startPreview(requestBuilder, previewSz, listener);
357                     waitForSettingsApplied(listener, NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY);
358                     verifyShadingMap(listener, NUM_FRAMES_VERIFIED, SHADING_MODE_OFF);
359                 }
360 
361                 // Shading map mode FAST, lensShadingMapMode ON, camera device
362                 // should output valid maps.
363                 if (lensShadingModes.contains(SHADING_MODE_FAST)) {
364                     requestBuilder.set(CaptureRequest.SHADING_MODE, SHADING_MODE_FAST);
365 
366                     listener = new SimpleCaptureCallback();
367                     startPreview(requestBuilder, previewSz, listener);
368                     waitForSettingsApplied(listener, NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY);
369                     // Allow at most one lock OFF state as the exposure is changed once.
370                     verifyShadingMap(listener, NUM_FRAMES_VERIFIED, SHADING_MODE_FAST);
371                 }
372 
373                 // Shading map mode HIGH_QUALITY, lensShadingMapMode ON, camera device
374                 // should output valid maps.
375                 if (lensShadingModes.contains(SHADING_MODE_HIGH_QUALITY)) {
376                     requestBuilder.set(CaptureRequest.SHADING_MODE, SHADING_MODE_HIGH_QUALITY);
377 
378                     listener = new SimpleCaptureCallback();
379                     startPreview(requestBuilder, previewSz, listener);
380                     waitForSettingsApplied(listener, NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY);
381                     verifyShadingMap(listener, NUM_FRAMES_VERIFIED, SHADING_MODE_HIGH_QUALITY);
382                 }
383 
384                 stopPreview();
385             } finally {
386                 closeDevice();
387             }
388         }
389     }
390 
391     /**
392      * Test {@link CaptureRequest#CONTROL_AE_ANTIBANDING_MODE} control.
393      * <p>
394      * Test all available anti-banding modes, check if the exposure time adjustment is
395      * correct.
396      * </p>
397      */
398     @Test
testAntiBandingModes()399     public void testAntiBandingModes() throws Exception {
400         for (int i = 0; i < mCameraIds.length; i++) {
401             try {
402                 // Without manual sensor control, exposure time cannot be verified
403                 if (!mAllStaticInfo.get(mCameraIds[i]).isCapabilitySupported(
404                         CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR)) {
405                     continue;
406                 }
407 
408                 openDevice(mCameraIds[i]);
409                 int[] modes = mStaticInfo.getAeAvailableAntiBandingModesChecked();
410 
411                 Size previewSz =
412                         getMaxPreviewSize(mCamera.getId(), mCameraManager,
413                         getPreviewSizeBound(mWindowManager, PREVIEW_SIZE_BOUND));
414 
415                 for (int mode : modes) {
416                     antiBandingTestByMode(previewSz, mode);
417                 }
418             } finally {
419                 closeDevice();
420             }
421         }
422 
423     }
424 
425     /**
426      * Test AE mode and lock.
427      *
428      * <p>
429      * For AE lock, when it is locked, exposure parameters shouldn't be changed.
430      * For AE modes, each mode should satisfy the per frame controls defined in
431      * API specifications.
432      * </p>
433      */
434     @Test(timeout=60*60*1000) // timeout = 60 mins for long running tests
testAeModeAndLock()435     public void testAeModeAndLock() throws Exception {
436         for (int i = 0; i < mCameraIds.length; i++) {
437             try {
438                 if (!mAllStaticInfo.get(mCameraIds[i]).isColorOutputSupported()) {
439                     Log.i(TAG, "Camera " + mCameraIds[i] +
440                             " does not support color outputs, skipping");
441                     continue;
442                 }
443 
444                 openDevice(mCameraIds[i]);
445                 Size maxPreviewSz = mOrderedPreviewSizes.get(0); // Max preview size.
446 
447                 // Update preview surface with given size for all sub-tests.
448                 updatePreviewSurface(maxPreviewSz);
449 
450                 // Test aeMode and lock
451                 int[] aeModes = mStaticInfo.getAeAvailableModesChecked();
452                 for (int mode : aeModes) {
453                     aeModeAndLockTestByMode(mode);
454                 }
455             } finally {
456                 closeDevice();
457             }
458         }
459     }
460 
461     /** Test {@link CaptureRequest#FLASH_MODE} control.
462      * <p>
463      * For each {@link CaptureRequest#FLASH_MODE} mode, test the flash control
464      * and {@link CaptureResult#FLASH_STATE} result.
465      * </p>
466      */
467     @Test
testFlashControl()468     public void testFlashControl() throws Exception {
469         for (int i = 0; i < mCameraIds.length; i++) {
470             try {
471                 if (!mAllStaticInfo.get(mCameraIds[i]).isColorOutputSupported()) {
472                     Log.i(TAG, "Camera " + mCameraIds[i] +
473                             " does not support color outputs, skipping");
474                     continue;
475                 }
476 
477                 openDevice(mCameraIds[i]);
478                 SimpleCaptureCallback listener = new SimpleCaptureCallback();
479                 CaptureRequest.Builder requestBuilder =
480                         mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
481 
482                 Size maxPreviewSz = mOrderedPreviewSizes.get(0); // Max preview size.
483 
484                 startPreview(requestBuilder, maxPreviewSz, listener);
485 
486                 // Flash control can only be used when the AE mode is ON or OFF.
487                 flashTestByAeMode(listener, CaptureRequest.CONTROL_AE_MODE_ON);
488 
489                 // LEGACY won't support AE mode OFF
490                 boolean aeOffModeSupported = false;
491                 for (int aeMode : mStaticInfo.getAeAvailableModesChecked()) {
492                     if (aeMode == CaptureRequest.CONTROL_AE_MODE_OFF) {
493                         aeOffModeSupported = true;
494                     }
495                 }
496                 if (aeOffModeSupported) {
497                     flashTestByAeMode(listener, CaptureRequest.CONTROL_AE_MODE_OFF);
498                 }
499 
500                 stopPreview();
501             } finally {
502                 closeDevice();
503             }
504         }
505     }
506 
507     /**
508      * Test that the flash can be successfully turned off given various initial and final
509      * AE_CONTROL modes for repeating CaptureRequests.
510      */
511     @Test
testFlashTurnOff()512     public void testFlashTurnOff() throws Exception {
513         for (int i = 0; i < mCameraIds.length; i++) {
514             try {
515                 if (!mAllStaticInfo.get(mCameraIds[i]).isColorOutputSupported()) {
516                     Log.i(TAG, "Camera " + mCameraIds[i] +
517                             " does not support color outputs, skipping");
518                     continue;
519                 }
520                 if (!mAllStaticInfo.get(mCameraIds[i]).hasFlash()) {
521                     Log.i(TAG, "Camera " + mCameraIds[i] +
522                             " does not support flash, skipping");
523                     continue;
524                 }
525                 openDevice(mCameraIds[i]);
526                 SimpleCaptureCallback listener = new SimpleCaptureCallback();
527                 CaptureRequest.Builder requestBuilder =
528                         mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
529 
530                 Size maxPreviewSz = mOrderedPreviewSizes.get(0); // Max preview size.
531 
532                 startPreview(requestBuilder, maxPreviewSz, listener);
533                 boolean isLegacy = CameraUtils.isLegacyHAL(mCameraManager, mCameraIds[i]);
534                 flashTurnOffTest(listener, isLegacy,
535                         /* initiaAeControl */CaptureRequest.CONTROL_AE_MODE_ON_ALWAYS_FLASH,
536                         /* offAeControl */CaptureRequest.CONTROL_AE_MODE_ON_ALWAYS_FLASH);
537 
538                 flashTurnOffTest(listener, isLegacy,
539                         /* initiaAeControl */CaptureRequest.CONTROL_AE_MODE_ON_ALWAYS_FLASH,
540                         /* offAeControl */CaptureRequest.CONTROL_AE_MODE_ON_AUTO_FLASH);
541 
542                 flashTurnOffTest(listener, isLegacy,
543                         /* initiaAeControl */CaptureRequest.CONTROL_AE_MODE_ON_ALWAYS_FLASH,
544                         /* offAeControl */CaptureRequest.CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE);
545 
546                 stopPreview();
547             } finally {
548                 closeDevice();
549             }
550         }
551 
552     }
553 
554     /**
555      * Test face detection modes and results.
556      */
557     @Test
testFaceDetection()558     public void testFaceDetection() throws Exception {
559         for (int i = 0; i < mCameraIds.length; i++) {
560             try {
561                 if (!mAllStaticInfo.get(mCameraIds[i]).isColorOutputSupported()) {
562                     Log.i(TAG, "Camera " + mCameraIds[i] +
563                             " does not support color outputs, skipping");
564                     continue;
565                 }
566                 openDevice(mCameraIds[i]);
567                 faceDetectionTestByCamera();
568             } finally {
569                 closeDevice();
570             }
571         }
572     }
573 
574     /**
575      * Test tone map modes and controls.
576      */
577     @Test
testToneMapControl()578     public void testToneMapControl() throws Exception {
579         for (String id : mCameraIds) {
580             try {
581                 if (!mAllStaticInfo.get(id).isManualToneMapSupported()) {
582                     Log.i(TAG, "Camera " + id +
583                             " doesn't support tone mapping controls, skipping test");
584                     continue;
585                 }
586                 openDevice(id);
587                 toneMapTestByCamera();
588             } finally {
589                 closeDevice();
590             }
591         }
592     }
593 
594     /**
595      * Test color correction modes and controls.
596      */
597     @Test
testColorCorrectionControl()598     public void testColorCorrectionControl() throws Exception {
599         for (String id : mCameraIds) {
600             try {
601                 if (!mAllStaticInfo.get(id).isColorCorrectionSupported()) {
602                     Log.i(TAG, "Camera " + id +
603                             " doesn't support color correction controls, skipping test");
604                     continue;
605                 }
606                 openDevice(id);
607                 colorCorrectionTestByCamera();
608             } finally {
609                 closeDevice();
610             }
611         }
612     }
613 
614     /**
615      * Test edge mode control for Fps not exceeding 30.
616      */
617     @Test
testEdgeModeControl()618     public void testEdgeModeControl() throws Exception {
619         for (String id : mCameraIds) {
620             try {
621                 if (!mAllStaticInfo.get(id).isEdgeModeControlSupported()) {
622                     Log.i(TAG, "Camera " + id +
623                             " doesn't support EDGE_MODE controls, skipping test");
624                     continue;
625                 }
626 
627                 openDevice(id);
628                 List<Range<Integer>> fpsRanges = getTargetFpsRangesUpTo30(mStaticInfo);
629                 edgeModesTestByCamera(fpsRanges);
630             } finally {
631                 closeDevice();
632             }
633         }
634     }
635 
636     /**
637      * Test edge mode control for Fps greater than 30.
638      */
639     @Test
testEdgeModeControlFastFps()640     public void testEdgeModeControlFastFps() throws Exception {
641         for (String id : mCameraIds) {
642             try {
643                 if (!mAllStaticInfo.get(id).isEdgeModeControlSupported()) {
644                     Log.i(TAG, "Camera " + id +
645                             " doesn't support EDGE_MODE controls, skipping test");
646                     continue;
647                 }
648 
649                 openDevice(id);
650                 List<Range<Integer>> fpsRanges = getTargetFpsRangesGreaterThan30(mStaticInfo);
651                 edgeModesTestByCamera(fpsRanges);
652             } finally {
653                 closeDevice();
654             }
655         }
656 
657     }
658 
659     /**
660      * Test focus distance control.
661      */
662     @Test
testFocusDistanceControl()663     public void testFocusDistanceControl() throws Exception {
664         for (String id : mCameraIds) {
665             try {
666                 StaticMetadata staticInfo = mAllStaticInfo.get(id);
667                 if (!staticInfo.hasFocuser()) {
668                     Log.i(TAG, "Camera " + id + " has no focuser, skipping test");
669                     continue;
670                 }
671 
672                 if (!staticInfo.isCapabilitySupported(
673                         CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR)) {
674                     Log.i(TAG, "Camera " + id +
675                             " does not support MANUAL_SENSOR, skipping test");
676                     continue;
677                 }
678 
679                 openDevice(id);
680                 focusDistanceTestByCamera();
681             } finally {
682                 closeDevice();
683             }
684         }
685     }
686 
687     /**
688      * Test noise reduction mode for fps ranges not exceeding 30
689      */
690     @Test
testNoiseReductionModeControl()691     public void testNoiseReductionModeControl() throws Exception {
692         for (String id : mCameraIds) {
693             try {
694                 if (!mAllStaticInfo.get(id).isNoiseReductionModeControlSupported()) {
695                     Log.i(TAG, "Camera " + id +
696                             " doesn't support noise reduction mode, skipping test");
697                     continue;
698                 }
699 
700                 openDevice(id);
701                 List<Range<Integer>> fpsRanges = getTargetFpsRangesUpTo30(mStaticInfo);
702                 noiseReductionModeTestByCamera(fpsRanges);
703             } finally {
704                 closeDevice();
705             }
706         }
707     }
708 
709     /**
710      * Test noise reduction mode for fps ranges greater than 30
711      */
712     @Test
testNoiseReductionModeControlFastFps()713     public void testNoiseReductionModeControlFastFps() throws Exception {
714         for (String id : mCameraIds) {
715             try {
716                 if (!mAllStaticInfo.get(id).isNoiseReductionModeControlSupported()) {
717                     Log.i(TAG, "Camera " + id +
718                             " doesn't support noise reduction mode, skipping test");
719                     continue;
720                 }
721 
722                 openDevice(id);
723                 List<Range<Integer>> fpsRanges = getTargetFpsRangesGreaterThan30(mStaticInfo);
724                 noiseReductionModeTestByCamera(fpsRanges);
725             } finally {
726                 closeDevice();
727             }
728         }
729     }
730 
731     /**
732      * Test AWB lock control.
733      *
734      * <p>The color correction gain and transform shouldn't be changed when AWB is locked.</p>
735      */
736     @Test
testAwbModeAndLock()737     public void testAwbModeAndLock() throws Exception {
738         for (String id : mCameraIds) {
739             try {
740                 if (!mAllStaticInfo.get(id).isColorOutputSupported()) {
741                     Log.i(TAG, "Camera " + id + " does not support color outputs, skipping");
742                     continue;
743                 }
744                 openDevice(id);
745                 awbModeAndLockTestByCamera();
746             } finally {
747                 closeDevice();
748             }
749         }
750     }
751 
752     /**
753      * Test different AF modes.
754      */
755     @Test
testAfModes()756     public void testAfModes() throws Exception {
757         for (String id : mCameraIds) {
758             try {
759                 if (!mAllStaticInfo.get(id).isColorOutputSupported()) {
760                     Log.i(TAG, "Camera " + id + " does not support color outputs, skipping");
761                     continue;
762                 }
763                 openDevice(id);
764                 afModeTestByCamera();
765             } finally {
766                 closeDevice();
767             }
768         }
769     }
770 
771     /**
772      * Test video and optical stabilizations.
773      */
774     @Test
testCameraStabilizations()775     public void testCameraStabilizations() throws Exception {
776         for (String id : mCameraIds) {
777             try {
778                 StaticMetadata staticInfo = mAllStaticInfo.get(id);
779                 List<Key<?>> keys = staticInfo.getCharacteristics().getKeys();
780                 if (!(keys.contains(
781                         CameraCharacteristics.CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES) ||
782                         keys.contains(
783                                 CameraCharacteristics.LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION))) {
784                     Log.i(TAG, "Camera " + id + " doesn't support any stabilization modes");
785                     continue;
786                 }
787                 if (!staticInfo.isColorOutputSupported()) {
788                     Log.i(TAG, "Camera " + id + " does not support color outputs, skipping");
789                     continue;
790                 }
791                 openDevice(id);
792                 stabilizationTestByCamera();
793             } finally {
794                 closeDevice();
795             }
796         }
797     }
798 
799     /**
800      * Test digitalZoom (center wise and non-center wise), validate the returned crop regions.
801      * The max preview size is used for each camera.
802      */
803     @Test
testDigitalZoom()804     public void testDigitalZoom() throws Exception {
805         for (String id : mCameraIds) {
806             try {
807                 if (!mAllStaticInfo.get(id).isColorOutputSupported()) {
808                     Log.i(TAG, "Camera " + id + " does not support color outputs, skipping");
809                     continue;
810                 }
811                 openDevice(id);
812                 Size maxPreviewSize = mOrderedPreviewSizes.get(0);
813                 digitalZoomTestByCamera(maxPreviewSize);
814             } finally {
815                 closeDevice();
816             }
817         }
818     }
819 
820     /**
821      * Test digital zoom and all preview size combinations.
822      * TODO: this and above test should all be moved to preview test class.
823      */
824     @Test
testDigitalZoomPreviewCombinations()825     public void testDigitalZoomPreviewCombinations() throws Exception {
826         for (String id : mCameraIds) {
827             try {
828                 if (!mAllStaticInfo.get(id).isColorOutputSupported()) {
829                     Log.i(TAG, "Camera " + id + " does not support color outputs, skipping");
830                     continue;
831                 }
832                 openDevice(id);
833                 digitalZoomPreviewCombinationTestByCamera();
834             } finally {
835                 closeDevice();
836             }
837         }
838     }
839 
840     /**
841      * Test scene mode controls.
842      */
843     @Test
testSceneModes()844     public void testSceneModes() throws Exception {
845         for (String id : mCameraIds) {
846             try {
847                 if (mAllStaticInfo.get(id).isSceneModeSupported()) {
848                     openDevice(id);
849                     sceneModeTestByCamera();
850                 }
851             } finally {
852                 closeDevice();
853             }
854         }
855     }
856 
857     /**
858      * Test effect mode controls.
859      */
860     @Test
testEffectModes()861     public void testEffectModes() throws Exception {
862         for (String id : mCameraIds) {
863             try {
864                 if (!mAllStaticInfo.get(id).isColorOutputSupported()) {
865                     Log.i(TAG, "Camera " + id + " does not support color outputs, skipping");
866                     continue;
867                 }
868                 openDevice(id);
869                 effectModeTestByCamera();
870             } finally {
871                 closeDevice();
872             }
873         }
874     }
875 
876     // TODO: add 3A state machine test.
877 
878     /**
879      * Per camera dynamic black and white level test.
880      */
dynamicBlackWhiteLevelTestByCamera()881     private void dynamicBlackWhiteLevelTestByCamera() throws Exception {
882         SimpleCaptureCallback resultListener = new SimpleCaptureCallback();
883         SimpleImageReaderListener imageListener = null;
884         CaptureRequest.Builder previewBuilder =
885                 mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
886         CaptureRequest.Builder rawBuilder = null;
887         Size previewSize =
888                 getMaxPreviewSize(mCamera.getId(), mCameraManager,
889                 getPreviewSizeBound(mWindowManager, PREVIEW_SIZE_BOUND));
890         Size rawSize = null;
891         boolean canCaptureBlackRaw =
892                 mStaticInfo.isCapabilitySupported(
893                         CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_RAW) &&
894                 mStaticInfo.isOpticalBlackRegionSupported();
895         if (canCaptureBlackRaw) {
896             // Capture Raw16, then calculate the optical black, and use it to check with the dynamic
897             // black level.
898             rawBuilder =
899                     mCamera.createCaptureRequest(CameraDevice.TEMPLATE_STILL_CAPTURE);
900             rawSize = mStaticInfo.getRawDimensChecked();
901             imageListener = new SimpleImageReaderListener();
902             prepareRawCaptureAndStartPreview(previewBuilder, rawBuilder, previewSize, rawSize,
903                     resultListener, imageListener);
904         } else {
905             startPreview(previewBuilder, previewSize, resultListener);
906         }
907 
908         // Capture a sequence of frames with different sensitivities and validate the black/white
909         // level values
910         int[] sensitivities = getSensitivityTestValues();
911         float[][] dynamicBlackLevels = new float[sensitivities.length][];
912         int[] dynamicWhiteLevels = new int[sensitivities.length];
913         float[][] opticalBlackLevels = new float[sensitivities.length][];
914         for (int i = 0; i < sensitivities.length; i++) {
915             CaptureResult result = null;
916             if (canCaptureBlackRaw) {
917                 changeExposure(rawBuilder, DEFAULT_EXP_TIME_NS, sensitivities[i]);
918                 CaptureRequest rawRequest = rawBuilder.build();
919                 mSession.capture(rawRequest, resultListener, mHandler);
920                 result = resultListener.getCaptureResultForRequest(rawRequest,
921                         NUM_RESULTS_WAIT_TIMEOUT);
922                 Image rawImage = imageListener.getImage(CAPTURE_IMAGE_TIMEOUT_MS);
923 
924                 // Get max (area-wise) optical black region
925                 Rect[] opticalBlackRegions = mStaticInfo.getCharacteristics().get(
926                         CameraCharacteristics.SENSOR_OPTICAL_BLACK_REGIONS);
927                 Rect maxRegion = opticalBlackRegions[0];
928                 for (Rect region : opticalBlackRegions) {
929                     if (region.width() * region.height() > maxRegion.width() * maxRegion.height()) {
930                         maxRegion = region;
931                     }
932                 }
933 
934                 // Get average black pixel values in the region (region is multiple of 2x2)
935                 Image.Plane rawPlane = rawImage.getPlanes()[0];
936                 ByteBuffer rawBuffer = rawPlane.getBuffer();
937                 float[] avgBlackLevels = {0, 0, 0, 0};
938                 final int rowSize = rawPlane.getRowStride();
939                 final int bytePerPixel = rawPlane.getPixelStride();
940                 if (VERBOSE) {
941                     Log.v(TAG, "maxRegion: " + maxRegion + ", Row stride: " +
942                             rawPlane.getRowStride());
943                 }
944                 for (int row = maxRegion.top; row < maxRegion.bottom; row += 2) {
945                     for (int col = maxRegion.left; col < maxRegion.right; col += 2) {
946                         int startOffset = row * rowSize + col * bytePerPixel;
947                         avgBlackLevels[0] += rawBuffer.getShort(startOffset);
948                         avgBlackLevels[1] += rawBuffer.getShort(startOffset + bytePerPixel);
949                         startOffset += rowSize;
950                         avgBlackLevels[2] += rawBuffer.getShort(startOffset);
951                         avgBlackLevels[3] += rawBuffer.getShort(startOffset + bytePerPixel);
952                     }
953                 }
954                 int numBlackBlocks = maxRegion.width() * maxRegion.height() / (2 * 2);
955                 for (int m = 0; m < avgBlackLevels.length; m++) {
956                     avgBlackLevels[m] /= numBlackBlocks;
957                 }
958                 opticalBlackLevels[i] = avgBlackLevels;
959 
960                 if (VERBOSE) {
961                     Log.v(TAG, String.format("Optical black level results for sensitivity (%d): %s",
962                             sensitivities[i], Arrays.toString(avgBlackLevels)));
963                 }
964 
965                 rawImage.close();
966             } else {
967                 changeExposure(previewBuilder, DEFAULT_EXP_TIME_NS, sensitivities[i]);
968                 CaptureRequest previewRequest = previewBuilder.build();
969                 mSession.capture(previewRequest, resultListener, mHandler);
970                 result = resultListener.getCaptureResultForRequest(previewRequest,
971                         NUM_RESULTS_WAIT_TIMEOUT);
972             }
973 
974             dynamicBlackLevels[i] = getValueNotNull(result,
975                     CaptureResult.SENSOR_DYNAMIC_BLACK_LEVEL);
976             dynamicWhiteLevels[i] = getValueNotNull(result,
977                     CaptureResult.SENSOR_DYNAMIC_WHITE_LEVEL);
978         }
979 
980         if (VERBOSE) {
981             Log.v(TAG, "Different sensitivities tested: " + Arrays.toString(sensitivities));
982             Log.v(TAG, "Dynamic black level results: " + Arrays.deepToString(dynamicBlackLevels));
983             Log.v(TAG, "Dynamic white level results: " + Arrays.toString(dynamicWhiteLevels));
984             if (canCaptureBlackRaw) {
985                 Log.v(TAG, "Optical black level results " +
986                         Arrays.deepToString(opticalBlackLevels));
987             }
988         }
989 
990         // check the dynamic black level against global black level.
991         // Implicit guarantee: if the dynamic black level is supported, fixed black level must be
992         // supported as well (tested in ExtendedCameraCharacteristicsTest#testOpticalBlackRegions).
993         BlackLevelPattern blackPattern = mStaticInfo.getCharacteristics().get(
994                 CameraCharacteristics.SENSOR_BLACK_LEVEL_PATTERN);
995         int[] fixedBlackLevels = new int[4];
996         int fixedWhiteLevel = mStaticInfo.getCharacteristics().get(
997                 CameraCharacteristics.SENSOR_INFO_WHITE_LEVEL);
998         blackPattern.copyTo(fixedBlackLevels, 0);
999         float maxBlackDeviation = 0;
1000         int maxWhiteDeviation = 0;
1001         for (int i = 0; i < dynamicBlackLevels.length; i++) {
1002             for (int j = 0; j < dynamicBlackLevels[i].length; j++) {
1003                 if (maxBlackDeviation < Math.abs(fixedBlackLevels[j] - dynamicBlackLevels[i][j])) {
1004                     maxBlackDeviation = Math.abs(fixedBlackLevels[j] - dynamicBlackLevels[i][j]);
1005                 }
1006             }
1007             if (maxWhiteDeviation < Math.abs(dynamicWhiteLevels[i] - fixedWhiteLevel)) {
1008                 maxWhiteDeviation = Math.abs(dynamicWhiteLevels[i] - fixedWhiteLevel);
1009             }
1010         }
1011         mCollector.expectLessOrEqual("Max deviation of the dynamic black level vs fixed black level"
1012                 + " exceed threshold."
1013                 + " Dynamic black level results: " + Arrays.deepToString(dynamicBlackLevels),
1014                 fixedBlackLevels[0] * DYNAMIC_VS_FIXED_BLK_WH_LVL_ERROR_MARGIN, maxBlackDeviation);
1015         mCollector.expectLessOrEqual("Max deviation of the dynamic white level exceed threshold."
1016                 + " Dynamic white level results: " + Arrays.toString(dynamicWhiteLevels),
1017                 fixedWhiteLevel * DYNAMIC_VS_FIXED_BLK_WH_LVL_ERROR_MARGIN,
1018                 (float)maxWhiteDeviation);
1019 
1020         // Validate against optical black levels if it is available
1021         if (canCaptureBlackRaw) {
1022             maxBlackDeviation = 0;
1023             for (int i = 0; i < dynamicBlackLevels.length; i++) {
1024                 for (int j = 0; j < dynamicBlackLevels[i].length; j++) {
1025                     if (maxBlackDeviation <
1026                             Math.abs(opticalBlackLevels[i][j] - dynamicBlackLevels[i][j])) {
1027                         maxBlackDeviation =
1028                                 Math.abs(opticalBlackLevels[i][j] - dynamicBlackLevels[i][j]);
1029                     }
1030                 }
1031             }
1032 
1033             mCollector.expectLessOrEqual("Max deviation of the dynamic black level vs optical black"
1034                     + " exceed threshold."
1035                     + " Dynamic black level results: " + Arrays.deepToString(dynamicBlackLevels)
1036                     + " Optical black level results: " + Arrays.deepToString(opticalBlackLevels),
1037                     fixedBlackLevels[0] * DYNAMIC_VS_OPTICAL_BLK_LVL_ERROR_MARGIN,
1038                     maxBlackDeviation);
1039         }
1040     }
1041 
noiseReductionModeTestByCamera(List<Range<Integer>> fpsRanges)1042     private void noiseReductionModeTestByCamera(List<Range<Integer>> fpsRanges) throws Exception {
1043         Size maxPrevSize = mOrderedPreviewSizes.get(0);
1044         CaptureRequest.Builder requestBuilder =
1045                 mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
1046         int[] availableModes = mStaticInfo.getAvailableNoiseReductionModesChecked();
1047 
1048         for (int mode : availableModes) {
1049             requestBuilder.set(CaptureRequest.NOISE_REDUCTION_MODE, mode);
1050 
1051             // Test that OFF and FAST mode should not slow down the frame rate.
1052             if (mode == CaptureRequest.NOISE_REDUCTION_MODE_OFF ||
1053                     mode == CaptureRequest.NOISE_REDUCTION_MODE_FAST) {
1054                 verifyFpsNotSlowDown(requestBuilder, NUM_FRAMES_VERIFIED, fpsRanges);
1055             }
1056 
1057             SimpleCaptureCallback resultListener = new SimpleCaptureCallback();
1058             startPreview(requestBuilder, maxPrevSize, resultListener);
1059             mSession.setRepeatingRequest(requestBuilder.build(), resultListener, mHandler);
1060             waitForSettingsApplied(resultListener, NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY);
1061 
1062             verifyCaptureResultForKey(CaptureResult.NOISE_REDUCTION_MODE, mode,
1063                     resultListener, NUM_FRAMES_VERIFIED);
1064         }
1065 
1066         stopPreview();
1067     }
1068 
focusDistanceTestByCamera()1069     private void focusDistanceTestByCamera() throws Exception {
1070         CaptureRequest.Builder requestBuilder =
1071                 mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
1072         requestBuilder.set(CaptureRequest.CONTROL_AF_MODE, CaptureRequest.CONTROL_AF_MODE_OFF);
1073         int calibrationStatus = mStaticInfo.getFocusDistanceCalibrationChecked();
1074         float errorMargin = FOCUS_DISTANCE_ERROR_PERCENT_UNCALIBRATED;
1075         if (calibrationStatus ==
1076                 CameraMetadata.LENS_INFO_FOCUS_DISTANCE_CALIBRATION_CALIBRATED) {
1077             errorMargin = FOCUS_DISTANCE_ERROR_PERCENT_CALIBRATED;
1078         } else if (calibrationStatus ==
1079                 CameraMetadata.LENS_INFO_FOCUS_DISTANCE_CALIBRATION_APPROXIMATE) {
1080             errorMargin = FOCUS_DISTANCE_ERROR_PERCENT_APPROXIMATE;
1081         }
1082 
1083         // Test changing focus distance with repeating request
1084         focusDistanceTestRepeating(requestBuilder, errorMargin);
1085 
1086         if (calibrationStatus ==
1087                 CameraMetadata.LENS_INFO_FOCUS_DISTANCE_CALIBRATION_CALIBRATED)  {
1088             // Test changing focus distance with burst request
1089             focusDistanceTestBurst(requestBuilder, errorMargin);
1090         }
1091     }
1092 
focusDistanceTestRepeating(CaptureRequest.Builder requestBuilder, float errorMargin)1093     private void focusDistanceTestRepeating(CaptureRequest.Builder requestBuilder,
1094             float errorMargin) throws Exception {
1095         CaptureRequest request;
1096         float[] testDistances = getFocusDistanceTestValuesInOrder(0, 0);
1097         Size maxPrevSize = mOrderedPreviewSizes.get(0);
1098         SimpleCaptureCallback resultListener = new SimpleCaptureCallback();
1099         startPreview(requestBuilder, maxPrevSize, resultListener);
1100 
1101         float[] resultDistances = new float[testDistances.length];
1102         int[] resultLensStates = new int[testDistances.length];
1103 
1104         // Collect results
1105         for (int i = 0; i < testDistances.length; i++) {
1106             requestBuilder.set(CaptureRequest.LENS_FOCUS_DISTANCE, testDistances[i]);
1107             request = requestBuilder.build();
1108             resultListener = new SimpleCaptureCallback();
1109             mSession.setRepeatingRequest(request, resultListener, mHandler);
1110             waitForSettingsApplied(resultListener, NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY);
1111             waitForResultValue(resultListener, CaptureResult.LENS_STATE,
1112                     CaptureResult.LENS_STATE_STATIONARY, NUM_RESULTS_WAIT_TIMEOUT);
1113             CaptureResult result = resultListener.getCaptureResultForRequest(request,
1114                     NUM_RESULTS_WAIT_TIMEOUT);
1115 
1116             resultDistances[i] = getValueNotNull(result, CaptureResult.LENS_FOCUS_DISTANCE);
1117             resultLensStates[i] = getValueNotNull(result, CaptureResult.LENS_STATE);
1118 
1119             if (VERBOSE) {
1120                 Log.v(TAG, "Capture repeating request focus distance: " + testDistances[i]
1121                         + " result: " + resultDistances[i] + " lens state " + resultLensStates[i]);
1122             }
1123         }
1124 
1125         verifyFocusDistance(testDistances, resultDistances, resultLensStates,
1126                 /*ascendingOrder*/true, /*noOvershoot*/false, /*repeatStart*/0, /*repeatEnd*/0,
1127                 errorMargin);
1128 
1129         if (mStaticInfo.areKeysAvailable(CameraCharacteristics.LENS_INFO_HYPERFOCAL_DISTANCE)) {
1130 
1131             // Test hyperfocal distance optionally
1132             float hyperFocalDistance = mStaticInfo.getHyperfocalDistanceChecked();
1133             if (hyperFocalDistance > 0) {
1134                 requestBuilder.set(CaptureRequest.LENS_FOCUS_DISTANCE, hyperFocalDistance);
1135                 request = requestBuilder.build();
1136                 resultListener = new SimpleCaptureCallback();
1137                 mSession.setRepeatingRequest(request, resultListener, mHandler);
1138                 waitForSettingsApplied(resultListener, NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY);
1139 
1140                 // Then wait for the lens.state to be stationary.
1141                 waitForResultValue(resultListener, CaptureResult.LENS_STATE,
1142                         CaptureResult.LENS_STATE_STATIONARY, NUM_RESULTS_WAIT_TIMEOUT);
1143                 CaptureResult result = resultListener.getCaptureResult(WAIT_FOR_RESULT_TIMEOUT_MS);
1144                 Float focusDistance = getValueNotNull(result, CaptureResult.LENS_FOCUS_DISTANCE);
1145                 mCollector.expectInRange("Focus distance for hyper focal should be close enough to" +
1146                         " requested value", focusDistance,
1147                         hyperFocalDistance * (1.0f - errorMargin),
1148                         hyperFocalDistance * (1.0f + errorMargin));
1149             }
1150         }
1151     }
1152 
focusDistanceTestBurst(CaptureRequest.Builder requestBuilder, float errorMargin)1153     private void focusDistanceTestBurst(CaptureRequest.Builder requestBuilder,
1154             float errorMargin) throws Exception {
1155 
1156         Size maxPrevSize = mOrderedPreviewSizes.get(0);
1157         float[] testDistances = getFocusDistanceTestValuesInOrder(NUM_FOCUS_DISTANCES_REPEAT,
1158                 NUM_FOCUS_DISTANCES_REPEAT);
1159         SimpleCaptureCallback resultListener = new SimpleCaptureCallback();
1160         startPreview(requestBuilder, maxPrevSize, resultListener);
1161 
1162         float[] resultDistances = new float[testDistances.length];
1163         int[] resultLensStates = new int[testDistances.length];
1164 
1165         final int maxPipelineDepth = mStaticInfo.getCharacteristics().get(
1166             CameraCharacteristics.REQUEST_PIPELINE_MAX_DEPTH);
1167 
1168         // Move lens to starting position, and wait for the lens.state to be stationary.
1169         CaptureRequest request;
1170         requestBuilder.set(CaptureRequest.LENS_FOCUS_DISTANCE, testDistances[0]);
1171         request = requestBuilder.build();
1172         mSession.setRepeatingRequest(request, resultListener, mHandler);
1173         waitForResultValue(resultListener, CaptureResult.LENS_STATE,
1174                 CaptureResult.LENS_STATE_STATIONARY, NUM_RESULTS_WAIT_TIMEOUT);
1175 
1176         // Submit burst of requests with different focus distances
1177         List<CaptureRequest> burst = new ArrayList<>();
1178         for (int i = 0; i < testDistances.length; i ++) {
1179             requestBuilder.set(CaptureRequest.LENS_FOCUS_DISTANCE, testDistances[i]);
1180             burst.add(requestBuilder.build());
1181         }
1182         mSession.captureBurst(burst, resultListener, mHandler);
1183 
1184         for (int i = 0; i < testDistances.length; i++) {
1185             CaptureResult result = resultListener.getCaptureResultForRequest(
1186                     burst.get(i), maxPipelineDepth+1);
1187 
1188             resultDistances[i] = getValueNotNull(result, CaptureResult.LENS_FOCUS_DISTANCE);
1189             resultLensStates[i] = getValueNotNull(result, CaptureResult.LENS_STATE);
1190 
1191             if (VERBOSE) {
1192                 Log.v(TAG, "Capture burst request focus distance: " + testDistances[i]
1193                         + " result: " + resultDistances[i] + " lens state " + resultLensStates[i]);
1194             }
1195         }
1196 
1197         verifyFocusDistance(testDistances, resultDistances, resultLensStates,
1198                 /*ascendingOrder*/true, /*noOvershoot*/true,
1199                 /*repeatStart*/NUM_FOCUS_DISTANCES_REPEAT, /*repeatEnd*/NUM_FOCUS_DISTANCES_REPEAT,
1200                 errorMargin);
1201 
1202     }
1203 
1204     /**
1205      * Verify focus distance control.
1206      *
1207      * Assumption:
1208      * - First repeatStart+1 elements of requestedDistances share the same value
1209      * - Last repeatEnd+1 elements of requestedDistances share the same value
1210      * - All elements in between are monotonically increasing/decreasing depending on ascendingOrder.
1211      * - Focuser is at requestedDistances[0] at the beginning of the test.
1212      *
1213      * @param requestedDistances The requested focus distances
1214      * @param resultDistances The result focus distances
1215      * @param lensStates The result lens states
1216      * @param ascendingOrder The order of the expected focus distance request/output
1217      * @param noOvershoot Assert that focus control doesn't overshoot the requested value
1218      * @param repeatStart The number of times the starting focus distance is repeated
1219      * @param repeatEnd The number of times the ending focus distance is repeated
1220      * @param errorMargin The error margin between request and result
1221      */
verifyFocusDistance(float[] requestedDistances, float[] resultDistances, int[] lensStates, boolean ascendingOrder, boolean noOvershoot, int repeatStart, int repeatEnd, float errorMargin)1222     private void verifyFocusDistance(float[] requestedDistances, float[] resultDistances,
1223             int[] lensStates, boolean ascendingOrder, boolean noOvershoot, int repeatStart,
1224             int repeatEnd, float errorMargin) {
1225 
1226         float minValue = 0;
1227         float maxValue = mStaticInfo.getMinimumFocusDistanceChecked();
1228         float hyperfocalDistance = 0;
1229         if (mStaticInfo.areKeysAvailable(CameraCharacteristics.LENS_INFO_HYPERFOCAL_DISTANCE)) {
1230             hyperfocalDistance = mStaticInfo.getHyperfocalDistanceChecked();
1231         }
1232 
1233         // Verify lens and focus distance do not change for first repeatStart
1234         // results.
1235         for (int i = 0; i < repeatStart; i ++) {
1236             float marginMin = requestedDistances[i] * (1.0f - errorMargin);
1237             // HAL may choose to use hyperfocal distance for all distances between [0, hyperfocal].
1238             float marginMax =
1239                     Math.max(requestedDistances[i], hyperfocalDistance) * (1.0f + errorMargin);
1240 
1241             mCollector.expectEquals("Lens moves even though focus_distance didn't change",
1242                     lensStates[i], CaptureResult.LENS_STATE_STATIONARY);
1243             if (noOvershoot) {
1244                 mCollector.expectInRange("Focus distance in result should be close enough to " +
1245                         "requested value", resultDistances[i], marginMin, marginMax);
1246             }
1247             mCollector.expectInRange("Result focus distance is out of range",
1248                     resultDistances[i], minValue, maxValue);
1249         }
1250 
1251         for (int i = repeatStart; i < resultDistances.length-1; i ++) {
1252             float marginMin = requestedDistances[i] * (1.0f - errorMargin);
1253             // HAL may choose to use hyperfocal distance for all distances between [0, hyperfocal].
1254             float marginMax =
1255                     Math.max(requestedDistances[i], hyperfocalDistance) * (1.0f + errorMargin);
1256             if (noOvershoot) {
1257                 // Result focus distance shouldn't overshoot the request
1258                 boolean condition;
1259                 if (ascendingOrder) {
1260                     condition = resultDistances[i] <= marginMax;
1261                } else {
1262                     condition = resultDistances[i] >= marginMin;
1263                 }
1264                 mCollector.expectTrue(String.format(
1265                       "Lens shouldn't move past request focus distance. result " +
1266                       resultDistances[i] + " vs target of " +
1267                       (ascendingOrder ? marginMax : marginMin)), condition);
1268             }
1269 
1270             // Verify monotonically increased focus distance setting
1271             boolean condition;
1272             float compareDistance = resultDistances[i+1] - resultDistances[i];
1273             if (i < resultDistances.length-1-repeatEnd) {
1274                 condition = (ascendingOrder ? compareDistance > 0 : compareDistance < 0);
1275             } else {
1276                 condition = (ascendingOrder ? compareDistance >= 0 : compareDistance <= 0);
1277             }
1278             mCollector.expectTrue(String.format("Adjacent [resultDistances, lens_state] results ["
1279                   + resultDistances[i] + "," + lensStates[i] + "], [" + resultDistances[i+1] + ","
1280                   + lensStates[i+1] + "] monotonicity is broken"), condition);
1281         }
1282 
1283         mCollector.expectTrue(String.format("All values of this array are equal: " +
1284                 resultDistances[0] + " " + resultDistances[resultDistances.length-1]),
1285                 resultDistances[0] != resultDistances[resultDistances.length-1]);
1286 
1287         // Verify lens moved to destination location.
1288         mCollector.expectInRange("Focus distance " + resultDistances[resultDistances.length-1] +
1289                 " for minFocusDistance should be closed enough to requested value " +
1290                 requestedDistances[requestedDistances.length-1],
1291                 resultDistances[resultDistances.length-1],
1292                 requestedDistances[requestedDistances.length-1] * (1.0f - errorMargin),
1293                 requestedDistances[requestedDistances.length-1] * (1.0f + errorMargin));
1294     }
1295 
1296     /**
1297      * Verify edge mode control results for fpsRanges
1298      */
edgeModesTestByCamera(List<Range<Integer>> fpsRanges)1299     private void edgeModesTestByCamera(List<Range<Integer>> fpsRanges) throws Exception {
1300         Size maxPrevSize = mOrderedPreviewSizes.get(0);
1301         int[] edgeModes = mStaticInfo.getAvailableEdgeModesChecked();
1302         CaptureRequest.Builder requestBuilder =
1303                 mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
1304 
1305         for (int mode : edgeModes) {
1306             requestBuilder.set(CaptureRequest.EDGE_MODE, mode);
1307 
1308             // Test that OFF and FAST mode should not slow down the frame rate.
1309             if (mode == CaptureRequest.EDGE_MODE_OFF ||
1310                     mode == CaptureRequest.EDGE_MODE_FAST) {
1311                 verifyFpsNotSlowDown(requestBuilder, NUM_FRAMES_VERIFIED, fpsRanges);
1312             }
1313 
1314             SimpleCaptureCallback resultListener = new SimpleCaptureCallback();
1315             startPreview(requestBuilder, maxPrevSize, resultListener);
1316             mSession.setRepeatingRequest(requestBuilder.build(), resultListener, mHandler);
1317             waitForSettingsApplied(resultListener, NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY);
1318 
1319             verifyCaptureResultForKey(CaptureResult.EDGE_MODE, mode, resultListener,
1320                     NUM_FRAMES_VERIFIED);
1321        }
1322 
1323         stopPreview();
1324     }
1325 
1326     /**
1327      * Test color correction controls.
1328      *
1329      * <p>Test different color correction modes. For TRANSFORM_MATRIX, only test
1330      * the unit gain and identity transform.</p>
1331      */
colorCorrectionTestByCamera()1332     private void colorCorrectionTestByCamera() throws Exception {
1333         CaptureRequest request;
1334         CaptureResult result;
1335         Size maxPreviewSz = mOrderedPreviewSizes.get(0); // Max preview size.
1336         updatePreviewSurface(maxPreviewSz);
1337         CaptureRequest.Builder manualRequestBuilder = createRequestForPreview();
1338         CaptureRequest.Builder previewRequestBuilder = createRequestForPreview();
1339         SimpleCaptureCallback listener = new SimpleCaptureCallback();
1340 
1341         startPreview(previewRequestBuilder, maxPreviewSz, listener);
1342 
1343         // Default preview result should give valid color correction metadata.
1344         result = listener.getCaptureResult(WAIT_FOR_RESULT_TIMEOUT_MS);
1345         validateColorCorrectionResult(result,
1346                 previewRequestBuilder.get(CaptureRequest.COLOR_CORRECTION_MODE));
1347         int colorCorrectionMode = CaptureRequest.COLOR_CORRECTION_MODE_TRANSFORM_MATRIX;
1348         // TRANSFORM_MATRIX mode
1349         // Only test unit gain and identity transform
1350         List<Integer> availableControlModes = Arrays.asList(
1351                 CameraTestUtils.toObject(mStaticInfo.getAvailableControlModesChecked()));
1352         List<Integer> availableAwbModes = Arrays.asList(
1353                 CameraTestUtils.toObject(mStaticInfo.getAwbAvailableModesChecked()));
1354         boolean isManualCCSupported =
1355                 availableControlModes.contains(CaptureRequest.CONTROL_MODE_OFF) ||
1356                 availableAwbModes.contains(CaptureRequest.CONTROL_AWB_MODE_OFF);
1357         if (isManualCCSupported) {
1358             if (!availableControlModes.contains(CaptureRequest.CONTROL_MODE_OFF)) {
1359                 // Only manual AWB mode is supported
1360                 manualRequestBuilder.set(CaptureRequest.CONTROL_MODE,
1361                         CaptureRequest.CONTROL_MODE_AUTO);
1362                 manualRequestBuilder.set(CaptureRequest.CONTROL_AWB_MODE,
1363                         CaptureRequest.CONTROL_AWB_MODE_OFF);
1364             } else {
1365                 // All 3A manual controls are supported, it doesn't matter what we set for AWB mode.
1366                 manualRequestBuilder.set(CaptureRequest.CONTROL_MODE,
1367                         CaptureRequest.CONTROL_MODE_OFF);
1368             }
1369 
1370             RggbChannelVector UNIT_GAIN = new RggbChannelVector(1.0f, 1.0f, 1.0f, 1.0f);
1371 
1372             ColorSpaceTransform IDENTITY_TRANSFORM = new ColorSpaceTransform(
1373                 new Rational[] {
1374                     ONE_R, ZERO_R, ZERO_R,
1375                     ZERO_R, ONE_R, ZERO_R,
1376                     ZERO_R, ZERO_R, ONE_R
1377                 });
1378 
1379             manualRequestBuilder.set(CaptureRequest.COLOR_CORRECTION_MODE, colorCorrectionMode);
1380             manualRequestBuilder.set(CaptureRequest.COLOR_CORRECTION_GAINS, UNIT_GAIN);
1381             manualRequestBuilder.set(CaptureRequest.COLOR_CORRECTION_TRANSFORM, IDENTITY_TRANSFORM);
1382             request = manualRequestBuilder.build();
1383             mSession.capture(request, listener, mHandler);
1384             result = listener.getCaptureResultForRequest(request, NUM_RESULTS_WAIT_TIMEOUT);
1385             RggbChannelVector gains = result.get(CaptureResult.COLOR_CORRECTION_GAINS);
1386             ColorSpaceTransform transform = result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM);
1387             validateColorCorrectionResult(result, colorCorrectionMode);
1388             mCollector.expectEquals("control mode result/request mismatch",
1389                     CaptureResult.CONTROL_MODE_OFF, result.get(CaptureResult.CONTROL_MODE));
1390             mCollector.expectEquals("Color correction gain result/request mismatch",
1391                     UNIT_GAIN, gains);
1392             mCollector.expectEquals("Color correction gain result/request mismatch",
1393                     IDENTITY_TRANSFORM, transform);
1394 
1395         }
1396 
1397         // FAST mode
1398         colorCorrectionMode = CaptureRequest.COLOR_CORRECTION_MODE_FAST;
1399         manualRequestBuilder.set(CaptureRequest.CONTROL_MODE, CaptureRequest.CONTROL_MODE_AUTO);
1400         manualRequestBuilder.set(CaptureRequest.COLOR_CORRECTION_MODE, colorCorrectionMode);
1401         request = manualRequestBuilder.build();
1402         mSession.capture(request, listener, mHandler);
1403         result = listener.getCaptureResultForRequest(request, NUM_RESULTS_WAIT_TIMEOUT);
1404         validateColorCorrectionResult(result, colorCorrectionMode);
1405         mCollector.expectEquals("control mode result/request mismatch",
1406                 CaptureResult.CONTROL_MODE_AUTO, result.get(CaptureResult.CONTROL_MODE));
1407 
1408         // HIGH_QUALITY mode
1409         colorCorrectionMode = CaptureRequest.COLOR_CORRECTION_MODE_HIGH_QUALITY;
1410         manualRequestBuilder.set(CaptureRequest.CONTROL_MODE, CaptureRequest.CONTROL_MODE_AUTO);
1411         manualRequestBuilder.set(CaptureRequest.COLOR_CORRECTION_MODE, colorCorrectionMode);
1412         request = manualRequestBuilder.build();
1413         mSession.capture(request, listener, mHandler);
1414         result = listener.getCaptureResultForRequest(request, NUM_RESULTS_WAIT_TIMEOUT);
1415         validateColorCorrectionResult(result, colorCorrectionMode);
1416         mCollector.expectEquals("control mode result/request mismatch",
1417                 CaptureResult.CONTROL_MODE_AUTO, result.get(CaptureResult.CONTROL_MODE));
1418     }
1419 
validateColorCorrectionResult(CaptureResult result, int colorCorrectionMode)1420     private void validateColorCorrectionResult(CaptureResult result, int colorCorrectionMode) {
1421         final RggbChannelVector ZERO_GAINS = new RggbChannelVector(0, 0, 0, 0);
1422         final int TRANSFORM_SIZE = 9;
1423         Rational[] zeroTransform = new Rational[TRANSFORM_SIZE];
1424         Arrays.fill(zeroTransform, ZERO_R);
1425         final ColorSpaceTransform ZERO_TRANSFORM = new ColorSpaceTransform(zeroTransform);
1426 
1427         RggbChannelVector resultGain;
1428         if ((resultGain = mCollector.expectKeyValueNotNull(result,
1429                 CaptureResult.COLOR_CORRECTION_GAINS)) != null) {
1430             mCollector.expectKeyValueNotEquals(result,
1431                     CaptureResult.COLOR_CORRECTION_GAINS, ZERO_GAINS);
1432         }
1433 
1434         ColorSpaceTransform resultTransform;
1435         if ((resultTransform = mCollector.expectKeyValueNotNull(result,
1436                 CaptureResult.COLOR_CORRECTION_TRANSFORM)) != null) {
1437             mCollector.expectKeyValueNotEquals(result,
1438                     CaptureResult.COLOR_CORRECTION_TRANSFORM, ZERO_TRANSFORM);
1439         }
1440 
1441         mCollector.expectEquals("color correction mode result/request mismatch",
1442                 colorCorrectionMode, result.get(CaptureResult.COLOR_CORRECTION_MODE));
1443     }
1444 
1445     /**
1446      * Test that flash can be turned off successfully with a given initial and final AE_CONTROL
1447      * states.
1448      *
1449      * This function expects that initialAeControl and flashOffAeControl will not be either
1450      * CaptureRequest.CONTROL_AE_MODE_ON or CaptureRequest.CONTROL_AE_MODE_OFF
1451      *
1452      * @param listener The Capture listener that is used to wait for capture result
1453      * @param isLegacy Boolean specifying if the camera device being tested is a legacy device
1454      * @param initialAeControl The initial AE_CONTROL mode to start repeating requests with.
1455      * @param flashOffAeControl The final AE_CONTROL mode which is expected to turn flash off for
1456      *        TEMPLATE_PREVIEW repeating requests.
1457      */
flashTurnOffTest(SimpleCaptureCallback listener, boolean isLegacy, int initialAeControl, int flashOffAeControl)1458     private void flashTurnOffTest(SimpleCaptureCallback listener, boolean isLegacy,
1459             int initialAeControl, int flashOffAeControl) throws Exception {
1460         CaptureResult result;
1461         final int NUM_FLASH_REQUESTS_TESTED = 10;
1462         CaptureRequest.Builder requestBuilder = createRequestForPreview();
1463         requestBuilder.set(CaptureRequest.CONTROL_MODE, CaptureRequest.CONTROL_MODE_AUTO);
1464         requestBuilder.set(CaptureRequest.CONTROL_AE_MODE, initialAeControl);
1465 
1466         mSession.setRepeatingRequest(requestBuilder.build(), listener, mHandler);
1467         waitForSettingsApplied(listener, NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY);
1468 
1469         // Turn on torch using FLASH_MODE_TORCH
1470         requestBuilder.set(CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_ON);
1471         requestBuilder.set(CaptureRequest.FLASH_MODE, CaptureRequest.FLASH_MODE_TORCH);
1472         CaptureRequest torchOnRequest = requestBuilder.build();
1473         mSession.setRepeatingRequest(torchOnRequest, listener, mHandler);
1474         waitForSettingsApplied(listener, NUM_FRAMES_WAITED_FOR_TORCH);
1475         result = listener.getCaptureResultForRequest(torchOnRequest, NUM_RESULTS_WAIT_TIMEOUT);
1476         // Test that the flash actually turned on continuously.
1477         mCollector.expectEquals("Flash state result must be FIRED", CaptureResult.FLASH_STATE_FIRED,
1478                 result.get(CaptureResult.FLASH_STATE));
1479         mSession.stopRepeating();
1480         // Turn off the torch
1481         requestBuilder.set(CaptureRequest.CONTROL_AE_MODE, flashOffAeControl);
1482         // TODO: jchowdhary@, b/130323585, this line can be removed.
1483         requestBuilder.set(CaptureRequest.FLASH_MODE, CaptureRequest.FLASH_MODE_OFF);
1484         int numAllowedTransitionStates = NUM_PARTIAL_FRAMES_NPFC;
1485         if (mStaticInfo.isPerFrameControlSupported()) {
1486            numAllowedTransitionStates = NUM_PARTIAL_FRAMES_PFC;
1487 
1488         }
1489         // We submit 2 * numAllowedTransitionStates + 1 requests since we have two torch mode
1490         // transitions. The additional request is to check for at least 1 expected (FIRED / READY)
1491         // state.
1492         int numTorchTestSamples =  2 * numAllowedTransitionStates  + 1;
1493         CaptureRequest flashOffRequest = requestBuilder.build();
1494         int flashModeOffRequests = captureRequestsSynchronizedBurst(flashOffRequest,
1495                 numTorchTestSamples, listener, mHandler);
1496         // Turn it on again.
1497         requestBuilder.set(CaptureRequest.FLASH_MODE, CaptureRequest.FLASH_MODE_TORCH);
1498         // We need to have CONTROL_AE_MODE be either CONTROL_AE_MODE_ON or CONTROL_AE_MODE_OFF to
1499         // turn the torch on again.
1500         requestBuilder.set(CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_ON);
1501         CaptureRequest flashModeTorchRequest = requestBuilder.build();
1502         int flashModeTorchRequests = captureRequestsSynchronizedBurst(flashModeTorchRequest,
1503                 numTorchTestSamples, listener, mHandler);
1504 
1505         CaptureResult[] torchStateResults =
1506                 new CaptureResult[flashModeTorchRequests + flashModeOffRequests];
1507         Arrays.fill(torchStateResults, null);
1508         int i = 0;
1509         for (; i < flashModeOffRequests; i++) {
1510             torchStateResults[i] =
1511                     listener.getCaptureResultForRequest(flashOffRequest, NUM_RESULTS_WAIT_TIMEOUT);
1512             mCollector.expectNotEquals("Result for flashModeOff request null",
1513                     torchStateResults[i], null);
1514         }
1515         for (int j = i; j < torchStateResults.length; j++) {
1516             torchStateResults[j] =
1517                     listener.getCaptureResultForRequest(flashModeTorchRequest,
1518                             NUM_RESULTS_WAIT_TIMEOUT);
1519             mCollector.expectNotEquals("Result for flashModeTorch request null",
1520                     torchStateResults[j], null);
1521         }
1522         if (isLegacy) {
1523             // For LEGACY devices, flash state is null for all situations except:
1524             // android.control.aeMode == ON_ALWAYS_FLASH, where flash.state will be FIRED
1525             // android.flash.mode == TORCH, where flash.state will be FIRED
1526             testLegacyTorchStates(torchStateResults, 0, flashModeOffRequests - 1, flashOffRequest);
1527             testLegacyTorchStates(torchStateResults, flashModeOffRequests,
1528                     torchStateResults.length -1,
1529                     flashModeTorchRequest);
1530         } else {
1531             checkTorchStates(torchStateResults, numAllowedTransitionStates, flashModeOffRequests,
1532                     flashModeTorchRequests);
1533         }
1534     }
1535 
testLegacyTorchStates(CaptureResult []torchStateResults, int beg, int end, CaptureRequest request)1536     private void testLegacyTorchStates(CaptureResult []torchStateResults, int beg, int end,
1537             CaptureRequest request) {
1538         for (int i = beg; i <= end; i++) {
1539             Integer requestControlAeMode = request.get(CaptureRequest.CONTROL_AE_MODE);
1540             Integer requestFlashMode = request.get(CaptureRequest.FLASH_MODE);
1541             Integer resultFlashState = torchStateResults[i].get(CaptureResult.FLASH_STATE);
1542             if (requestControlAeMode == CaptureRequest.CONTROL_AE_MODE_ON_ALWAYS_FLASH ||
1543                     requestFlashMode == CaptureRequest.FLASH_MODE_TORCH) {
1544                 mCollector.expectEquals("For LEGACY devices, flash state must be FIRED when" +
1545                         "CONTROL_AE_MODE == CONTROL_AE_MODE_ON_ALWAYS_FLASH or FLASH_MODE == " +
1546                         "TORCH, CONTROL_AE_MODE = " + requestControlAeMode + " FLASH_MODE = " +
1547                         requestFlashMode, CaptureResult.FLASH_STATE_FIRED, resultFlashState);
1548                 continue;
1549             }
1550             mCollector.expectTrue("For LEGACY devices, flash state must be null when" +
1551                         "CONTROL_AE_MODE != CONTROL_AE_MODE_ON_ALWAYS_FLASH or FLASH_MODE != " +
1552                         "TORCH, CONTROL_AE_MODE = " + requestControlAeMode + " FLASH_MODE = " +
1553                         requestFlashMode,  resultFlashState == null);
1554         }
1555     }
1556     // We check that torch states appear in the order expected. We don't necessarily know how many
1557     // times each state might appear, however we make sure that the states do not appear out of
1558     // order.
checkTorchTransitionStates(CaptureResult []torchStateResults, int beg, int end, List<Integer> stateOrder, boolean isTurningOff)1559     private void checkTorchTransitionStates(CaptureResult []torchStateResults, int beg, int end,
1560             List<Integer> stateOrder, boolean isTurningOff) {
1561         Integer flashState;
1562         Integer curIndex = 0;
1563         for (int i = beg; i <= end; i++) {
1564             flashState = torchStateResults[i].get(CaptureResult.FLASH_STATE);
1565             int index = stateOrder.indexOf(flashState);
1566             mCollector.expectNotEquals("Invalid state " + flashState + " not in expected list" +
1567                     stateOrder, index, -1);
1568             mCollector.expectGreaterOrEqual("state " + flashState  + " index " + index +
1569                     " is expected to be >= " + curIndex,
1570                     curIndex, index);
1571             curIndex = index;
1572         }
1573     }
1574 
checkTorchStates(CaptureResult []torchResults, int numAllowedTransitionStates, int numTorchOffSamples, int numTorchOnSamples)1575     private void checkTorchStates(CaptureResult []torchResults, int numAllowedTransitionStates,
1576             int numTorchOffSamples, int numTorchOnSamples) {
1577         // We test for flash states from request:
1578         // Request:       O(0) O(1) O(2) O(n)....O(nOFF) T(0) T(1) T(2) ....T(n) .... T(nON)
1579         // Valid Result : P/R  P/R  P/R  R R R...P/R P/R   P/F  P/F  P/F      F         F
1580         // For the FLASH_STATE_OFF requests, once FLASH_STATE READY has been seen, for the
1581         // transition states while switching the torch off, it must not transition to
1582         // FLASH_STATE_PARTIAL again till the next transition period which turns the torch on.
1583         // P - FLASH_STATE_PARTIAL
1584         // R - FLASH_STATE_READY
1585         // F - FLASH_STATE_FIRED
1586         // O(k) - kth FLASH_MODE_OFF request
1587         // T(k) - kth FLASH_MODE_TORCH request
1588         // nOFF - number of torch off samples
1589         // nON - number of torch on samples
1590         Integer flashState;
1591         // Check on -> off transition states
1592         List<Integer> onToOffStateOrderList = new ArrayList<Integer>();
1593         onToOffStateOrderList.add(CaptureRequest.FLASH_STATE_PARTIAL);
1594         onToOffStateOrderList.add(CaptureRequest.FLASH_STATE_READY);
1595         checkTorchTransitionStates(torchResults, 0, numAllowedTransitionStates,
1596                 onToOffStateOrderList, true);
1597         // The next frames (before transition) must have its flash state as FLASH_STATE_READY
1598         for (int i = numAllowedTransitionStates + 1;
1599                 i < numTorchOffSamples - numAllowedTransitionStates; i++) {
1600             flashState = torchResults[numAllowedTransitionStates].get(CaptureResult.FLASH_STATE);
1601             mCollector.expectEquals("flash state result must be READY",
1602                     CaptureResult.FLASH_STATE_READY, flashState);
1603         }
1604         // check off -> on transition states, before the FLASH_MODE_TORCH request was sent
1605         List<Integer> offToOnPreStateOrderList = new ArrayList<Integer>();
1606         offToOnPreStateOrderList.add(CaptureRequest.FLASH_STATE_READY);
1607         offToOnPreStateOrderList.add(CaptureRequest.FLASH_STATE_PARTIAL);
1608         checkTorchTransitionStates(torchResults,
1609                 numTorchOffSamples - numAllowedTransitionStates, numTorchOffSamples - 1,
1610                 offToOnPreStateOrderList, false);
1611         // check off -> on transition states
1612         List<Integer> offToOnPostStateOrderList = new ArrayList<Integer>();
1613         offToOnPostStateOrderList.add(CaptureRequest.FLASH_STATE_PARTIAL);
1614         offToOnPostStateOrderList.add(CaptureRequest.FLASH_STATE_FIRED);
1615         checkTorchTransitionStates(torchResults,
1616                 numTorchOffSamples, numTorchOffSamples + numAllowedTransitionStates,
1617                 offToOnPostStateOrderList, false);
1618         // check on states after off -> on transition
1619         // The next frames must have its flash state as FLASH_STATE_FIRED
1620         for (int i = numTorchOffSamples + numAllowedTransitionStates + 1;
1621                 i < torchResults.length - 1; i++) {
1622             flashState = torchResults[i].get(CaptureResult.FLASH_STATE);
1623             mCollector.expectEquals("flash state result must be FIRED for frame " + i,
1624                     CaptureRequest.FLASH_STATE_FIRED, flashState);
1625         }
1626     }
1627 
1628     /**
1629      * Test flash mode control by AE mode.
1630      * <p>
1631      * Only allow AE mode ON or OFF, because other AE mode could run into conflict with
1632      * flash manual control. This function expects the camera to already have an active
1633      * repeating request and be sending results to the listener.
1634      * </p>
1635      *
1636      * @param listener The Capture listener that is used to wait for capture result
1637      * @param aeMode The AE mode for flash to test with
1638      */
flashTestByAeMode(SimpleCaptureCallback listener, int aeMode)1639     private void flashTestByAeMode(SimpleCaptureCallback listener, int aeMode) throws Exception {
1640         CaptureResult result;
1641         final int NUM_FLASH_REQUESTS_TESTED = 10;
1642         CaptureRequest.Builder requestBuilder = createRequestForPreview();
1643 
1644         if (aeMode == CaptureRequest.CONTROL_AE_MODE_ON) {
1645             requestBuilder.set(CaptureRequest.CONTROL_AE_MODE, aeMode);
1646         } else if (aeMode == CaptureRequest.CONTROL_AE_MODE_OFF) {
1647             changeExposure(requestBuilder, DEFAULT_EXP_TIME_NS, DEFAULT_SENSITIVITY);
1648         } else {
1649             throw new IllegalArgumentException("This test only works when AE mode is ON or OFF");
1650         }
1651 
1652         mSession.setRepeatingRequest(requestBuilder.build(), listener, mHandler);
1653         waitForSettingsApplied(listener, NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY);
1654 
1655         // For camera that doesn't have flash unit, flash state should always be UNAVAILABLE.
1656         if (mStaticInfo.getFlashInfoChecked() == false) {
1657             for (int i = 0; i < NUM_FLASH_REQUESTS_TESTED; i++) {
1658                 result = listener.getCaptureResult(CAPTURE_RESULT_TIMEOUT_MS);
1659                 mCollector.expectEquals("No flash unit available, flash state must be UNAVAILABLE"
1660                         + "for AE mode " + aeMode, CaptureResult.FLASH_STATE_UNAVAILABLE,
1661                         result.get(CaptureResult.FLASH_STATE));
1662             }
1663 
1664             return;
1665         }
1666 
1667         // Test flash SINGLE mode control. Wait for flash state to be READY first.
1668         if (mStaticInfo.isHardwareLevelAtLeastLimited()) {
1669             waitForResultValue(listener, CaptureResult.FLASH_STATE, CaptureResult.FLASH_STATE_READY,
1670                     NUM_RESULTS_WAIT_TIMEOUT);
1671         } // else the settings were already waited on earlier
1672 
1673         requestBuilder.set(CaptureRequest.FLASH_MODE, CaptureRequest.FLASH_MODE_SINGLE);
1674         CaptureRequest flashSinglerequest = requestBuilder.build();
1675 
1676         int flashModeSingleRequests = captureRequestsSynchronized(
1677                 flashSinglerequest, listener, mHandler);
1678         waitForNumResults(listener, flashModeSingleRequests - 1);
1679         result = listener.getCaptureResultForRequest(flashSinglerequest, NUM_RESULTS_WAIT_TIMEOUT);
1680         // Result mode must be SINGLE, state must be FIRED.
1681         mCollector.expectEquals("Flash mode result must be SINGLE",
1682                 CaptureResult.FLASH_MODE_SINGLE, result.get(CaptureResult.FLASH_MODE));
1683         mCollector.expectEquals("Flash state result must be FIRED",
1684                 CaptureResult.FLASH_STATE_FIRED, result.get(CaptureResult.FLASH_STATE));
1685 
1686         // Test flash TORCH mode control.
1687         requestBuilder.set(CaptureRequest.FLASH_MODE, CaptureRequest.FLASH_MODE_TORCH);
1688         CaptureRequest torchRequest = requestBuilder.build();
1689 
1690         int flashModeTorchRequests = captureRequestsSynchronized(torchRequest,
1691                 NUM_FLASH_REQUESTS_TESTED, listener, mHandler);
1692         waitForNumResults(listener, flashModeTorchRequests - NUM_FLASH_REQUESTS_TESTED);
1693 
1694         // Verify the results
1695         TorchSeqState state = TorchSeqState.RAMPING_UP;
1696         for (int i = 0; i < NUM_FLASH_REQUESTS_TESTED; i++) {
1697             result = listener.getCaptureResultForRequest(torchRequest,
1698                     NUM_RESULTS_WAIT_TIMEOUT);
1699             int flashMode = result.get(CaptureResult.FLASH_MODE);
1700             int flashState = result.get(CaptureResult.FLASH_STATE);
1701             // Result mode must be TORCH
1702             mCollector.expectEquals("Flash mode result " + i + " must be TORCH",
1703                     CaptureResult.FLASH_MODE_TORCH, result.get(CaptureResult.FLASH_MODE));
1704             if (state == TorchSeqState.RAMPING_UP &&
1705                     flashState == CaptureResult.FLASH_STATE_FIRED) {
1706                 state = TorchSeqState.FIRED;
1707             } else if (state == TorchSeqState.FIRED &&
1708                     flashState == CaptureResult.FLASH_STATE_PARTIAL) {
1709                 state = TorchSeqState.RAMPING_DOWN;
1710             }
1711 
1712             if (i == 0 && mStaticInfo.isPerFrameControlSupported()) {
1713                 mCollector.expectTrue(
1714                         "Per frame control device must enter FIRED state on first torch request",
1715                         state == TorchSeqState.FIRED);
1716             }
1717 
1718             if (state == TorchSeqState.FIRED) {
1719                 mCollector.expectEquals("Flash state result " + i + " must be FIRED",
1720                         CaptureResult.FLASH_STATE_FIRED, result.get(CaptureResult.FLASH_STATE));
1721             } else {
1722                 mCollector.expectEquals("Flash state result " + i + " must be PARTIAL",
1723                         CaptureResult.FLASH_STATE_PARTIAL, result.get(CaptureResult.FLASH_STATE));
1724             }
1725         }
1726         mCollector.expectTrue("Torch state FIRED never seen",
1727                 state == TorchSeqState.FIRED || state == TorchSeqState.RAMPING_DOWN);
1728 
1729         // Test flash OFF mode control
1730         requestBuilder.set(CaptureRequest.FLASH_MODE, CaptureRequest.FLASH_MODE_OFF);
1731         CaptureRequest flashOffrequest = requestBuilder.build();
1732 
1733         int flashModeOffRequests = captureRequestsSynchronized(flashOffrequest, listener, mHandler);
1734         waitForNumResults(listener, flashModeOffRequests - 1);
1735         result = listener.getCaptureResultForRequest(flashOffrequest, NUM_RESULTS_WAIT_TIMEOUT);
1736         mCollector.expectEquals("Flash mode result must be OFF", CaptureResult.FLASH_MODE_OFF,
1737                 result.get(CaptureResult.FLASH_MODE));
1738     }
1739 
verifyAntiBandingMode(SimpleCaptureCallback listener, int numFramesVerified, int mode, boolean isAeManual, long requestExpTime)1740     private void verifyAntiBandingMode(SimpleCaptureCallback listener, int numFramesVerified,
1741             int mode, boolean isAeManual, long requestExpTime) throws Exception {
1742         // Skip the first a couple of frames as antibanding may not be fully up yet.
1743         final int NUM_FRAMES_SKIPPED = 5;
1744         for (int i = 0; i < NUM_FRAMES_SKIPPED; i++) {
1745             listener.getCaptureResult(WAIT_FOR_RESULT_TIMEOUT_MS);
1746         }
1747 
1748         for (int i = 0; i < numFramesVerified; i++) {
1749             CaptureResult result = listener.getCaptureResult(WAIT_FOR_RESULT_TIMEOUT_MS);
1750             Long resultExpTime = result.get(CaptureResult.SENSOR_EXPOSURE_TIME);
1751             assertNotNull("Exposure time shouldn't be null", resultExpTime);
1752             Integer flicker = result.get(CaptureResult.STATISTICS_SCENE_FLICKER);
1753             // Scene flicker result should be always available.
1754             assertNotNull("Scene flicker must not be null", flicker);
1755             assertTrue("Scene flicker is invalid", flicker >= STATISTICS_SCENE_FLICKER_NONE &&
1756                     flicker <= STATISTICS_SCENE_FLICKER_60HZ);
1757 
1758             Integer antiBandMode = result.get(CaptureResult.CONTROL_AE_ANTIBANDING_MODE);
1759             assertNotNull("antiBanding mode shouldn't be null", antiBandMode);
1760             assertTrue("antiBanding Mode invalid, should be == " + mode + ", is: " + antiBandMode,
1761                     antiBandMode == mode);
1762             if (isAeManual) {
1763                 // First, round down not up, second, need close enough.
1764                 validateExposureTime(requestExpTime, resultExpTime);
1765                 return;
1766             }
1767 
1768             long expectedExpTime = resultExpTime; // Default, no exposure adjustment.
1769             if (mode == CONTROL_AE_ANTIBANDING_MODE_50HZ) {
1770                 // result exposure time must be adjusted by 50Hz illuminant source.
1771                 expectedExpTime =
1772                         getAntiFlickeringExposureTime(ANTI_FLICKERING_50HZ, resultExpTime);
1773             } else if (mode == CONTROL_AE_ANTIBANDING_MODE_60HZ) {
1774                 // result exposure time must be adjusted by 60Hz illuminant source.
1775                 expectedExpTime =
1776                         getAntiFlickeringExposureTime(ANTI_FLICKERING_60HZ, resultExpTime);
1777             } else if (mode == CONTROL_AE_ANTIBANDING_MODE_AUTO){
1778                 /**
1779                  * Use STATISTICS_SCENE_FLICKER to tell the illuminant source
1780                  * and do the exposure adjustment.
1781                  */
1782                 expectedExpTime = resultExpTime;
1783                 if (flicker == STATISTICS_SCENE_FLICKER_60HZ) {
1784                     expectedExpTime =
1785                             getAntiFlickeringExposureTime(ANTI_FLICKERING_60HZ, resultExpTime);
1786                 } else if (flicker == STATISTICS_SCENE_FLICKER_50HZ) {
1787                     expectedExpTime =
1788                             getAntiFlickeringExposureTime(ANTI_FLICKERING_50HZ, resultExpTime);
1789                 }
1790             }
1791 
1792             if (Math.abs(resultExpTime - expectedExpTime) > EXPOSURE_TIME_ERROR_MARGIN_NS) {
1793                 mCollector.addMessage(String.format("Result exposure time %dns diverges too much"
1794                         + " from expected exposure time %dns for mode %d when AE is auto",
1795                         resultExpTime, expectedExpTime, mode));
1796             }
1797         }
1798     }
1799 
antiBandingTestByMode(Size size, int mode)1800     private void antiBandingTestByMode(Size size, int mode)
1801             throws Exception {
1802         if(VERBOSE) {
1803             Log.v(TAG, "Anti-banding test for mode " + mode + " for camera " + mCamera.getId());
1804         }
1805         CaptureRequest.Builder requestBuilder =
1806                 mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
1807 
1808         requestBuilder.set(CaptureRequest.CONTROL_AE_ANTIBANDING_MODE, mode);
1809 
1810         // Test auto AE mode anti-banding behavior
1811         SimpleCaptureCallback resultListener = new SimpleCaptureCallback();
1812         startPreview(requestBuilder, size, resultListener);
1813         waitForSettingsApplied(resultListener, NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY);
1814         verifyAntiBandingMode(resultListener, NUM_FRAMES_VERIFIED, mode, /*isAeManual*/false,
1815                 IGNORE_REQUESTED_EXPOSURE_TIME_CHECK);
1816 
1817         // Test manual AE mode anti-banding behavior
1818         // 65ms, must be supported by full capability devices.
1819         final long TEST_MANUAL_EXP_TIME_NS = 65000000L;
1820         long manualExpTime = mStaticInfo.getExposureClampToRange(TEST_MANUAL_EXP_TIME_NS);
1821         changeExposure(requestBuilder, manualExpTime);
1822         resultListener = new SimpleCaptureCallback();
1823         startPreview(requestBuilder, size, resultListener);
1824         waitForSettingsApplied(resultListener, NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY);
1825         verifyAntiBandingMode(resultListener, NUM_FRAMES_VERIFIED, mode, /*isAeManual*/true,
1826                 manualExpTime);
1827 
1828         stopPreview();
1829     }
1830 
1831     /**
1832      * Test the all available AE modes and AE lock.
1833      * <p>
1834      * For manual AE mode, test iterates through different sensitivities and
1835      * exposure times, validate the result exposure time correctness. For
1836      * CONTROL_AE_MODE_ON_ALWAYS_FLASH mode, the AE lock and flash are tested.
1837      * For the rest of the AUTO mode, AE lock is tested.
1838      * </p>
1839      *
1840      * @param mode
1841      */
aeModeAndLockTestByMode(int mode)1842     private void aeModeAndLockTestByMode(int mode)
1843             throws Exception {
1844         switch (mode) {
1845             case CONTROL_AE_MODE_OFF:
1846                 if (mStaticInfo.isCapabilitySupported(
1847                         CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR)) {
1848                     // Test manual exposure control.
1849                     aeManualControlTest();
1850                 } else {
1851                     Log.w(TAG,
1852                             "aeModeAndLockTestByMode - can't test AE mode OFF without " +
1853                             "manual sensor control");
1854                 }
1855                 break;
1856             case CONTROL_AE_MODE_ON:
1857             case CONTROL_AE_MODE_ON_AUTO_FLASH:
1858             case CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE:
1859             case CONTROL_AE_MODE_ON_ALWAYS_FLASH:
1860             case CONTROL_AE_MODE_ON_EXTERNAL_FLASH:
1861                 // Test AE lock for above AUTO modes.
1862                 aeAutoModeTestLock(mode);
1863                 break;
1864             default:
1865                 throw new UnsupportedOperationException("Unhandled AE mode " + mode);
1866         }
1867     }
1868 
1869     /**
1870      * Test AE auto modes.
1871      * <p>
1872      * Use single request rather than repeating request to test AE lock per frame control.
1873      * </p>
1874      */
aeAutoModeTestLock(int mode)1875     private void aeAutoModeTestLock(int mode) throws Exception {
1876         CaptureRequest.Builder requestBuilder =
1877                 mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
1878         if (mStaticInfo.isAeLockSupported()) {
1879             requestBuilder.set(CaptureRequest.CONTROL_AE_LOCK, false);
1880         }
1881         requestBuilder.set(CaptureRequest.CONTROL_AE_MODE, mode);
1882         configurePreviewOutput(requestBuilder);
1883 
1884         final int MAX_NUM_CAPTURES_DURING_LOCK = 5;
1885         for (int i = 1; i <= MAX_NUM_CAPTURES_DURING_LOCK; i++) {
1886             autoAeMultipleCapturesThenTestLock(requestBuilder, mode, i);
1887         }
1888     }
1889 
1890     /**
1891      * Issue multiple auto AE captures, then lock AE, validate the AE lock vs.
1892      * the first capture result after the AE lock. The right AE lock behavior is:
1893      * When it is locked, it locks to the current exposure value, and all subsequent
1894      * request with lock ON will have the same exposure value locked.
1895      */
autoAeMultipleCapturesThenTestLock( CaptureRequest.Builder requestBuilder, int aeMode, int numCapturesDuringLock)1896     private void autoAeMultipleCapturesThenTestLock(
1897             CaptureRequest.Builder requestBuilder, int aeMode, int numCapturesDuringLock)
1898             throws Exception {
1899         if (numCapturesDuringLock < 1) {
1900             throw new IllegalArgumentException("numCapturesBeforeLock must be no less than 1");
1901         }
1902         if (VERBOSE) {
1903             Log.v(TAG, "Camera " + mCamera.getId() + ": Testing auto AE mode and lock for mode "
1904                     + aeMode + " with " + numCapturesDuringLock + " captures before lock");
1905         }
1906 
1907         final int NUM_CAPTURES_BEFORE_LOCK = 2;
1908         SimpleCaptureCallback listener =  new SimpleCaptureCallback();
1909 
1910         CaptureResult[] resultsDuringLock = new CaptureResult[numCapturesDuringLock];
1911         boolean canSetAeLock = mStaticInfo.isAeLockSupported();
1912 
1913         // Reset the AE lock to OFF, since we are reusing this builder many times
1914         if (canSetAeLock) {
1915             requestBuilder.set(CaptureRequest.CONTROL_AE_LOCK, false);
1916         }
1917 
1918         // Just send several captures with auto AE, lock off.
1919         CaptureRequest request = requestBuilder.build();
1920         for (int i = 0; i < NUM_CAPTURES_BEFORE_LOCK; i++) {
1921             mSession.capture(request, listener, mHandler);
1922         }
1923         waitForNumResults(listener, NUM_CAPTURES_BEFORE_LOCK);
1924 
1925         if (!canSetAeLock) {
1926             // Without AE lock, the remaining tests items won't work
1927             return;
1928         }
1929 
1930         // Then fire several capture to lock the AE.
1931         requestBuilder.set(CaptureRequest.CONTROL_AE_LOCK, true);
1932 
1933         int requestCount = captureRequestsSynchronized(
1934                 requestBuilder.build(), numCapturesDuringLock, listener, mHandler);
1935 
1936         int[] sensitivities = new int[numCapturesDuringLock];
1937         long[] expTimes = new long[numCapturesDuringLock];
1938         Arrays.fill(sensitivities, -1);
1939         Arrays.fill(expTimes, -1L);
1940 
1941         // Get the AE lock on result and validate the exposure values.
1942         waitForNumResults(listener, requestCount - numCapturesDuringLock);
1943         for (int i = 0; i < resultsDuringLock.length; i++) {
1944             resultsDuringLock[i] = listener.getCaptureResult(WAIT_FOR_RESULT_TIMEOUT_MS);
1945         }
1946 
1947         for (int i = 0; i < numCapturesDuringLock; i++) {
1948             mCollector.expectKeyValueEquals(
1949                     resultsDuringLock[i], CaptureResult.CONTROL_AE_LOCK, true);
1950         }
1951 
1952         // Can't read manual sensor/exposure settings without manual sensor
1953         if (mStaticInfo.isCapabilitySupported(
1954                 CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_READ_SENSOR_SETTINGS)) {
1955             int sensitivityLocked =
1956                     getValueNotNull(resultsDuringLock[0], CaptureResult.SENSOR_SENSITIVITY);
1957             long expTimeLocked =
1958                     getValueNotNull(resultsDuringLock[0], CaptureResult.SENSOR_EXPOSURE_TIME);
1959             for (int i = 1; i < resultsDuringLock.length; i++) {
1960                 mCollector.expectKeyValueEquals(
1961                         resultsDuringLock[i], CaptureResult.SENSOR_EXPOSURE_TIME, expTimeLocked);
1962                 mCollector.expectKeyValueEquals(
1963                         resultsDuringLock[i], CaptureResult.SENSOR_SENSITIVITY, sensitivityLocked);
1964             }
1965         }
1966     }
1967 
1968     /**
1969      * Iterate through exposure times and sensitivities for manual AE control.
1970      * <p>
1971      * Use single request rather than repeating request to test manual exposure
1972      * value change per frame control.
1973      * </p>
1974      */
aeManualControlTest()1975     private void aeManualControlTest()
1976             throws Exception {
1977         CaptureRequest.Builder requestBuilder =
1978                 mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
1979         configurePreviewOutput(requestBuilder);
1980 
1981         // Warm up pipeline for more accurate timing
1982         SimpleCaptureCallback warmupListener =  new SimpleCaptureCallback();
1983         mSession.setRepeatingRequest(requestBuilder.build(), warmupListener, mHandler);
1984         warmupListener.getCaptureResult(WAIT_FOR_RESULT_TIMEOUT_MS);
1985 
1986         // Do manual captures
1987         requestBuilder.set(CaptureRequest.CONTROL_AE_MODE, CONTROL_AE_MODE_OFF);
1988         SimpleCaptureCallback listener =  new SimpleCaptureCallback();
1989 
1990         long[] expTimesNs = getExposureTimeTestValues();
1991         int[] sensitivities = getSensitivityTestValues();
1992         // Submit single request at a time, then verify the result.
1993         for (int i = 0; i < expTimesNs.length; i++) {
1994             for (int j = 0; j < sensitivities.length; j++) {
1995                 if (VERBOSE) {
1996                     Log.v(TAG, "Camera " + mCamera.getId() + ": Testing sensitivity "
1997                             + sensitivities[j] + ", exposure time " + expTimesNs[i] + "ns");
1998                 }
1999 
2000                 changeExposure(requestBuilder, expTimesNs[i], sensitivities[j]);
2001                 mSession.capture(requestBuilder.build(), listener, mHandler);
2002 
2003                 // make sure timeout is long enough for long exposure time - add a 2x safety margin
2004                 // to exposure time
2005                 long timeoutMs = WAIT_FOR_RESULT_TIMEOUT_MS + 2 * expTimesNs[i] / 1000000;
2006                 CaptureResult result = listener.getCaptureResult(timeoutMs);
2007                 long resultExpTimeNs = getValueNotNull(result, CaptureResult.SENSOR_EXPOSURE_TIME);
2008                 int resultSensitivity = getValueNotNull(result, CaptureResult.SENSOR_SENSITIVITY);
2009                 validateExposureTime(expTimesNs[i], resultExpTimeNs);
2010                 validateSensitivity(sensitivities[j], resultSensitivity);
2011                 validateFrameDurationForCapture(result);
2012             }
2013         }
2014         mSession.stopRepeating();
2015 
2016         // TODO: Add another case to test where we can submit all requests, then wait for
2017         // results, which will hide the pipeline latency. this is not only faster, but also
2018         // test high speed per frame control and synchronization.
2019     }
2020 
2021 
2022     /**
2023      * Verify black level lock control.
2024      */
verifyBlackLevelLockResults(SimpleCaptureCallback listener, int numFramesVerified, int maxLockOffCnt)2025     private void verifyBlackLevelLockResults(SimpleCaptureCallback listener, int numFramesVerified,
2026             int maxLockOffCnt) throws Exception {
2027         int noLockCnt = 0;
2028         for (int i = 0; i < numFramesVerified; i++) {
2029             CaptureResult result = listener.getCaptureResult(WAIT_FOR_RESULT_TIMEOUT_MS);
2030             Boolean blackLevelLock = result.get(CaptureResult.BLACK_LEVEL_LOCK);
2031             assertNotNull("Black level lock result shouldn't be null", blackLevelLock);
2032 
2033             // Count the lock == false result, which could possibly occur at most once.
2034             if (blackLevelLock == false) {
2035                 noLockCnt++;
2036             }
2037 
2038             if(VERBOSE) {
2039                 Log.v(TAG, "Black level lock result: " + blackLevelLock);
2040             }
2041         }
2042         assertTrue("Black level lock OFF occurs " + noLockCnt + " times,  expect at most "
2043                 + maxLockOffCnt + " for camera " + mCamera.getId(), noLockCnt <= maxLockOffCnt);
2044     }
2045 
2046     /**
2047      * Verify shading map for different shading modes.
2048      */
verifyShadingMap(SimpleCaptureCallback listener, int numFramesVerified, int shadingMode)2049     private void verifyShadingMap(SimpleCaptureCallback listener, int numFramesVerified,
2050             int shadingMode) throws Exception {
2051 
2052         for (int i = 0; i < numFramesVerified; i++) {
2053             CaptureResult result = listener.getCaptureResult(WAIT_FOR_RESULT_TIMEOUT_MS);
2054             mCollector.expectEquals("Shading mode result doesn't match request",
2055                     shadingMode, result.get(CaptureResult.SHADING_MODE));
2056             LensShadingMap mapObj = result.get(
2057                     CaptureResult.STATISTICS_LENS_SHADING_CORRECTION_MAP);
2058             assertNotNull("Map object must not be null", mapObj);
2059             int numElementsInMap = mapObj.getGainFactorCount();
2060             float[] map = new float[numElementsInMap];
2061             mapObj.copyGainFactors(map, /*offset*/0);
2062             assertNotNull("Map must not be null", map);
2063             assertFalse(String.format(
2064                     "Map size %d should be less than %d", numElementsInMap, MAX_SHADING_MAP_SIZE),
2065                     numElementsInMap >= MAX_SHADING_MAP_SIZE);
2066             assertFalse(String.format("Map size %d should be no less than %d", numElementsInMap,
2067                     MIN_SHADING_MAP_SIZE), numElementsInMap < MIN_SHADING_MAP_SIZE);
2068 
2069             if (shadingMode == CaptureRequest.SHADING_MODE_FAST ||
2070                     shadingMode == CaptureRequest.SHADING_MODE_HIGH_QUALITY) {
2071                 // shading mode is FAST or HIGH_QUALITY, expect to receive a map with all
2072                 // elements >= 1.0f
2073 
2074                 int badValueCnt = 0;
2075                 // Detect the bad values of the map data.
2076                 for (int j = 0; j < numElementsInMap; j++) {
2077                     if (Float.isNaN(map[j]) || map[j] < 1.0f) {
2078                         badValueCnt++;
2079                     }
2080                 }
2081                 assertEquals("Number of value in the map is " + badValueCnt + " out of "
2082                         + numElementsInMap, /*expected*/0, /*actual*/badValueCnt);
2083             } else if (shadingMode == CaptureRequest.SHADING_MODE_OFF) {
2084                 float[] unityMap = new float[numElementsInMap];
2085                 Arrays.fill(unityMap, 1.0f);
2086                 // shading mode is OFF, expect to receive a unity map.
2087                 assertTrue("Result map " + Arrays.toString(map) + " must be an unity map",
2088                         Arrays.equals(unityMap, map));
2089             }
2090         }
2091     }
2092 
2093     /**
2094      * Test face detection for a camera.
2095      */
2096     private void faceDetectionTestByCamera() throws Exception {
2097         int[] faceDetectModes = mStaticInfo.getAvailableFaceDetectModesChecked();
2098 
2099         SimpleCaptureCallback listener;
2100         CaptureRequest.Builder requestBuilder =
2101                 mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
2102 
2103         Size maxPreviewSz = mOrderedPreviewSizes.get(0); // Max preview size.
2104         for (int mode : faceDetectModes) {
2105             requestBuilder.set(CaptureRequest.STATISTICS_FACE_DETECT_MODE, mode);
2106             if (VERBOSE) {
2107                 Log.v(TAG, "Start testing face detection mode " + mode);
2108             }
2109 
2110             // Create a new listener for each run to avoid the results from one run spill
2111             // into another run.
2112             listener = new SimpleCaptureCallback();
2113             startPreview(requestBuilder, maxPreviewSz, listener);
2114             waitForSettingsApplied(listener, NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY);
2115             verifyFaceDetectionResults(listener, NUM_FACE_DETECTION_FRAMES_VERIFIED, mode);
2116         }
2117 
2118         stopPreview();
2119     }
2120 
2121     /**
2122      * Verify face detection results for different face detection modes.
2123      *
2124      * @param listener The listener to get capture result
2125      * @param numFramesVerified Number of results to be verified
2126      * @param faceDetectionMode Face detection mode to be verified against
2127      */
2128     private void verifyFaceDetectionResults(SimpleCaptureCallback listener, int numFramesVerified,
2129             int faceDetectionMode) {
2130         for (int i = 0; i < numFramesVerified; i++) {
2131             CaptureResult result = listener.getCaptureResult(WAIT_FOR_RESULT_TIMEOUT_MS);
2132             mCollector.expectEquals("Result face detection mode should match the request",
2133                     faceDetectionMode, result.get(CaptureResult.STATISTICS_FACE_DETECT_MODE));
2134 
2135             Face[] faces = result.get(CaptureResult.STATISTICS_FACES);
2136             List<Integer> faceIds = new ArrayList<Integer>(faces.length);
2137             List<Integer> faceScores = new ArrayList<Integer>(faces.length);
2138             if (faceDetectionMode == CaptureResult.STATISTICS_FACE_DETECT_MODE_OFF) {
2139                 mCollector.expectEquals("Number of detection faces should always 0 for OFF mode",
2140                         0, faces.length);
2141             } else if (faceDetectionMode == CaptureResult.STATISTICS_FACE_DETECT_MODE_SIMPLE) {
2142                 for (Face face : faces) {
2143                     mCollector.expectNotNull("Face rectangle shouldn't be null", face.getBounds());
2144                     faceScores.add(face.getScore());
2145                     mCollector.expectTrue("Face id is expected to be -1 for SIMPLE mode",
2146                             face.getId() == Face.ID_UNSUPPORTED);
2147                 }
2148             } else if (faceDetectionMode == CaptureResult.STATISTICS_FACE_DETECT_MODE_FULL) {
2149                 if (VERBOSE) {
2150                     Log.v(TAG, "Number of faces detected: " + faces.length);
2151                 }
2152 
2153                 for (Face face : faces) {
2154                     Rect faceBound;
2155                     boolean faceRectAvailable =  mCollector.expectTrue("Face rectangle "
2156                             + "shouldn't be null", face.getBounds() != null);
2157                     if (!faceRectAvailable) {
2158                         continue;
2159                     }
2160                     faceBound = face.getBounds();
2161 
2162                     faceScores.add(face.getScore());
2163                     faceIds.add(face.getId());
2164 
2165                     mCollector.expectTrue("Face id is shouldn't be -1 for FULL mode",
2166                             face.getId() != Face.ID_UNSUPPORTED);
2167                     boolean leftEyeAvailable =
2168                             mCollector.expectTrue("Left eye position shouldn't be null",
2169                                     face.getLeftEyePosition() != null);
2170                     boolean rightEyeAvailable =
2171                             mCollector.expectTrue("Right eye position shouldn't be null",
2172                                     face.getRightEyePosition() != null);
2173                     boolean mouthAvailable =
2174                             mCollector.expectTrue("Mouth position shouldn't be null",
2175                             face.getMouthPosition() != null);
2176                     // Eyes/mouth position should be inside of the face rect.
2177                     if (leftEyeAvailable) {
2178                         Point leftEye = face.getLeftEyePosition();
2179                         mCollector.expectTrue("Left eye " + leftEye + "should be"
2180                                 + "inside of face rect " + faceBound,
2181                                 faceBound.contains(leftEye.x, leftEye.y));
2182                     }
2183                     if (rightEyeAvailable) {
2184                         Point rightEye = face.getRightEyePosition();
2185                         mCollector.expectTrue("Right eye " + rightEye + "should be"
2186                                 + "inside of face rect " + faceBound,
2187                                 faceBound.contains(rightEye.x, rightEye.y));
2188                     }
2189                     if (mouthAvailable) {
2190                         Point mouth = face.getMouthPosition();
2191                         mCollector.expectTrue("Mouth " + mouth +  " should be inside of"
2192                                 + " face rect " + faceBound,
2193                                 faceBound.contains(mouth.x, mouth.y));
2194                     }
2195                 }
2196             }
2197             mCollector.expectValuesInRange("Face scores are invalid", faceScores,
2198                     Face.SCORE_MIN, Face.SCORE_MAX);
2199             mCollector.expectValuesUnique("Face ids are invalid", faceIds);
2200         }
2201     }
2202 
2203     /**
2204      * Test tone map mode and result by camera
2205      */
2206     private void toneMapTestByCamera() throws Exception {
2207         if (!mStaticInfo.isManualToneMapSupported()) {
2208             return;
2209         }
2210 
2211         CaptureRequest.Builder requestBuilder =
2212                 mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
2213         int[] toneMapModes = mStaticInfo.getAvailableToneMapModesChecked();
2214         // Test AUTO modes first. Note that FAST/HQ must both present or not present
2215         for (int i = 0; i < toneMapModes.length; i++) {
2216             if (toneMapModes[i] == CaptureRequest.TONEMAP_MODE_FAST && i > 0) {
2217                 int tmpMode = toneMapModes[0];
2218                 toneMapModes[0] = CaptureRequest.TONEMAP_MODE_FAST;
2219                 toneMapModes[i] = tmpMode;
2220             }
2221             if (toneMapModes[i] == CaptureRequest.TONEMAP_MODE_HIGH_QUALITY && i > 1) {
2222                 int tmpMode = toneMapModes[1];
2223                 toneMapModes[1] = CaptureRequest.TONEMAP_MODE_HIGH_QUALITY;
2224                 toneMapModes[i] = tmpMode;
2225             }
2226         }
2227         for (int mode : toneMapModes) {
2228             if (VERBOSE) {
2229                 Log.v(TAG, "Testing tonemap mode " + mode);
2230             }
2231 
2232             requestBuilder.set(CaptureRequest.TONEMAP_MODE, mode);
2233             switch (mode) {
2234                 case CaptureRequest.TONEMAP_MODE_CONTRAST_CURVE:
2235                     TonemapCurve toneCurve = new TonemapCurve(TONEMAP_CURVE_LINEAR,
2236                             TONEMAP_CURVE_LINEAR, TONEMAP_CURVE_LINEAR);
2237                     requestBuilder.set(CaptureRequest.TONEMAP_CURVE, toneCurve);
2238                     testToneMapMode(NUM_FRAMES_VERIFIED, requestBuilder);
2239 
2240                     toneCurve = new TonemapCurve(TONEMAP_CURVE_SRGB,
2241                             TONEMAP_CURVE_SRGB, TONEMAP_CURVE_SRGB);
2242                     requestBuilder.set(CaptureRequest.TONEMAP_CURVE, toneCurve);
2243                     testToneMapMode(NUM_FRAMES_VERIFIED, requestBuilder);
2244                     break;
2245                 case CaptureRequest.TONEMAP_MODE_GAMMA_VALUE:
2246                     requestBuilder.set(CaptureRequest.TONEMAP_GAMMA, 1.0f);
2247                     testToneMapMode(NUM_FRAMES_VERIFIED, requestBuilder);
2248                     requestBuilder.set(CaptureRequest.TONEMAP_GAMMA, 2.2f);
2249                     testToneMapMode(NUM_FRAMES_VERIFIED, requestBuilder);
2250                     requestBuilder.set(CaptureRequest.TONEMAP_GAMMA, 5.0f);
2251                     testToneMapMode(NUM_FRAMES_VERIFIED, requestBuilder);
2252                     break;
2253                 case CaptureRequest.TONEMAP_MODE_PRESET_CURVE:
2254                     requestBuilder.set(CaptureRequest.TONEMAP_PRESET_CURVE,
2255                             CaptureRequest.TONEMAP_PRESET_CURVE_REC709);
2256                     testToneMapMode(NUM_FRAMES_VERIFIED, requestBuilder);
2257                     requestBuilder.set(CaptureRequest.TONEMAP_PRESET_CURVE,
2258                             CaptureRequest.TONEMAP_PRESET_CURVE_SRGB);
2259                     testToneMapMode(NUM_FRAMES_VERIFIED, requestBuilder);
2260                     break;
2261                 default:
2262                     testToneMapMode(NUM_FRAMES_VERIFIED, requestBuilder);
2263                     break;
2264             }
2265         }
2266 
2267 
2268     }
2269 
2270     /**
2271      * Test tonemap mode with speficied request settings
2272      *
2273      * @param numFramesVerified Number of results to be verified
2274      * @param requestBuilder the request builder of settings to be tested
2275      */
2276     private void testToneMapMode (int numFramesVerified,
2277             CaptureRequest.Builder requestBuilder)  throws Exception  {
2278         final int MIN_TONEMAP_CURVE_POINTS = 2;
2279         final Float ZERO = new Float(0);
2280         final Float ONE = new Float(1.0f);
2281 
2282         SimpleCaptureCallback listener = new SimpleCaptureCallback();
2283         int tonemapMode = requestBuilder.get(CaptureRequest.TONEMAP_MODE);
2284         Size maxPreviewSz = mOrderedPreviewSizes.get(0); // Max preview size.
2285         startPreview(requestBuilder, maxPreviewSz, listener);
2286         waitForSettingsApplied(listener, NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY);
2287 
2288         int maxCurvePoints = mStaticInfo.getMaxTonemapCurvePointChecked();
2289         for (int i = 0; i < numFramesVerified; i++) {
2290             CaptureResult result = listener.getCaptureResult(WAIT_FOR_RESULT_TIMEOUT_MS);
2291             mCollector.expectEquals("Capture result tonemap mode should match request", tonemapMode,
2292                     result.get(CaptureResult.TONEMAP_MODE));
2293             TonemapCurve tc = getValueNotNull(result, CaptureResult.TONEMAP_CURVE);
2294             int pointCount = tc.getPointCount(TonemapCurve.CHANNEL_RED);
2295             float[] mapRed = new float[pointCount * TonemapCurve.POINT_SIZE];
2296             pointCount = tc.getPointCount(TonemapCurve.CHANNEL_GREEN);
2297             float[] mapGreen = new float[pointCount * TonemapCurve.POINT_SIZE];
2298             pointCount = tc.getPointCount(TonemapCurve.CHANNEL_BLUE);
2299             float[] mapBlue = new float[pointCount * TonemapCurve.POINT_SIZE];
2300             tc.copyColorCurve(TonemapCurve.CHANNEL_RED, mapRed, 0);
2301             tc.copyColorCurve(TonemapCurve.CHANNEL_GREEN, mapGreen, 0);
2302             tc.copyColorCurve(TonemapCurve.CHANNEL_BLUE, mapBlue, 0);
2303             if (tonemapMode == CaptureResult.TONEMAP_MODE_CONTRAST_CURVE) {
2304                 /**
2305                  * TODO: need figure out a good way to measure the difference
2306                  * between request and result, as they may have different array
2307                  * size.
2308                  */
2309             } else if (tonemapMode == CaptureResult.TONEMAP_MODE_GAMMA_VALUE) {
2310                 mCollector.expectEquals("Capture result gamma value should match request",
2311                         requestBuilder.get(CaptureRequest.TONEMAP_GAMMA),
2312                         result.get(CaptureResult.TONEMAP_GAMMA));
2313             } else if (tonemapMode == CaptureResult.TONEMAP_MODE_PRESET_CURVE) {
2314                 mCollector.expectEquals("Capture result preset curve should match request",
2315                         requestBuilder.get(CaptureRequest.TONEMAP_PRESET_CURVE),
2316                         result.get(CaptureResult.TONEMAP_PRESET_CURVE));
2317             }
2318 
2319             // Tonemap curve result availability and basic validity check for all modes.
2320             mCollector.expectValuesInRange("Tonemap curve red values are out of range",
2321                     CameraTestUtils.toObject(mapRed), /*min*/ZERO, /*max*/ONE);
2322             mCollector.expectInRange("Tonemap curve red length is out of range",
2323                     mapRed.length, MIN_TONEMAP_CURVE_POINTS, maxCurvePoints * 2);
2324             mCollector.expectValuesInRange("Tonemap curve green values are out of range",
2325                     CameraTestUtils.toObject(mapGreen), /*min*/ZERO, /*max*/ONE);
2326             mCollector.expectInRange("Tonemap curve green length is out of range",
2327                     mapGreen.length, MIN_TONEMAP_CURVE_POINTS, maxCurvePoints * 2);
2328             mCollector.expectValuesInRange("Tonemap curve blue values are out of range",
2329                     CameraTestUtils.toObject(mapBlue), /*min*/ZERO, /*max*/ONE);
2330             mCollector.expectInRange("Tonemap curve blue length is out of range",
2331                     mapBlue.length, MIN_TONEMAP_CURVE_POINTS, maxCurvePoints * 2);
2332 
2333             // Make sure capture result tonemap has identical channels.
2334             if (mStaticInfo.isMonochromeCamera()) {
2335                 mCollector.expectEquals("Capture result tonemap of monochrome camera should " +
2336                         "have same dimension for all channels", mapRed.length, mapGreen.length);
2337                 mCollector.expectEquals("Capture result tonemap of monochrome camera should " +
2338                         "have same dimension for all channels", mapRed.length, mapBlue.length);
2339 
2340                 if (mapRed.length == mapGreen.length && mapRed.length == mapBlue.length) {
2341                     boolean isIdentical = true;
2342                     for (int j = 0; j < mapRed.length; j++) {
2343                         isIdentical = (mapRed[j] == mapGreen[j] && mapRed[j] == mapBlue[j]);
2344                         if (!isIdentical)
2345                             break;
2346                     }
2347                     mCollector.expectTrue("Capture result tonemap of monochrome camera should " +
2348                             "be identical between all channels", isIdentical);
2349                 }
2350             }
2351         }
2352         stopPreview();
2353     }
2354 
2355     /**
2356      * Test awb mode control.
2357      * <p>
2358      * Test each supported AWB mode, verify the AWB mode in capture result
2359      * matches request. When AWB is locked, the color correction gains and
2360      * transform should remain unchanged.
2361      * </p>
2362      */
2363     private void awbModeAndLockTestByCamera() throws Exception {
2364         int[] awbModes = mStaticInfo.getAwbAvailableModesChecked();
2365         Size maxPreviewSize = mOrderedPreviewSizes.get(0);
2366         boolean canSetAwbLock = mStaticInfo.isAwbLockSupported();
2367         CaptureRequest.Builder requestBuilder =
2368                 mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
2369         startPreview(requestBuilder, maxPreviewSize, /*listener*/null);
2370 
2371         for (int mode : awbModes) {
2372             SimpleCaptureCallback listener;
2373             requestBuilder.set(CaptureRequest.CONTROL_AWB_MODE, mode);
2374             listener = new SimpleCaptureCallback();
2375             mSession.setRepeatingRequest(requestBuilder.build(), listener, mHandler);
2376             waitForSettingsApplied(listener, NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY);
2377 
2378             // Verify AWB mode in capture result.
2379             verifyCaptureResultForKey(CaptureResult.CONTROL_AWB_MODE, mode, listener,
2380                     NUM_FRAMES_VERIFIED);
2381 
2382             if (mode == CameraMetadata.CONTROL_AWB_MODE_AUTO && canSetAwbLock) {
2383                 // Verify color correction transform and gains stay unchanged after a lock.
2384                 requestBuilder.set(CaptureRequest.CONTROL_AWB_LOCK, true);
2385                 listener = new SimpleCaptureCallback();
2386                 mSession.setRepeatingRequest(requestBuilder.build(), listener, mHandler);
2387                 waitForSettingsApplied(listener, NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY);
2388 
2389                 if (mStaticInfo.areKeysAvailable(CaptureResult.CONTROL_AWB_STATE)) {
2390                     waitForResultValue(listener, CaptureResult.CONTROL_AWB_STATE,
2391                             CaptureResult.CONTROL_AWB_STATE_LOCKED, NUM_RESULTS_WAIT_TIMEOUT);
2392                 }
2393 
2394             }
2395             // Don't verify auto mode result if AWB lock is not supported
2396             if (mode != CameraMetadata.CONTROL_AWB_MODE_AUTO || canSetAwbLock) {
2397                 verifyAwbCaptureResultUnchanged(listener, NUM_FRAMES_VERIFIED);
2398             }
2399         }
2400     }
2401 
2402     private void verifyAwbCaptureResultUnchanged(SimpleCaptureCallback listener,
2403             int numFramesVerified) {
2404         // Skip check if cc gains/transform/mode are not available
2405         if (!mStaticInfo.areKeysAvailable(
2406                 CaptureResult.COLOR_CORRECTION_GAINS,
2407                 CaptureResult.COLOR_CORRECTION_TRANSFORM,
2408                 CaptureResult.COLOR_CORRECTION_MODE)) {
2409             return;
2410         }
2411 
2412         CaptureResult result = listener.getCaptureResult(WAIT_FOR_RESULT_TIMEOUT_MS);
2413         RggbChannelVector lockedGains =
2414                 getValueNotNull(result, CaptureResult.COLOR_CORRECTION_GAINS);
2415         ColorSpaceTransform lockedTransform =
2416                 getValueNotNull(result, CaptureResult.COLOR_CORRECTION_TRANSFORM);
2417 
2418         for (int i = 0; i < numFramesVerified; i++) {
2419             result = listener.getCaptureResult(WAIT_FOR_RESULT_TIMEOUT_MS);
2420             // Color correction mode check is skipped here, as it is checked in colorCorrectionTest.
2421             validateColorCorrectionResult(result, result.get(CaptureResult.COLOR_CORRECTION_MODE));
2422 
2423             RggbChannelVector gains = getValueNotNull(result, CaptureResult.COLOR_CORRECTION_GAINS);
2424             ColorSpaceTransform transform =
2425                     getValueNotNull(result, CaptureResult.COLOR_CORRECTION_TRANSFORM);
2426             mCollector.expectEquals("Color correction gains should remain unchanged after awb lock",
2427                     lockedGains, gains);
2428             mCollector.expectEquals("Color correction transform should remain unchanged after"
2429                     + " awb lock", lockedTransform, transform);
2430         }
2431     }
2432 
2433     /**
2434      * Test AF mode control.
2435      * <p>
2436      * Test all supported AF modes, verify the AF mode in capture result matches
2437      * request. When AF mode is one of the CONTROL_AF_MODE_CONTINUOUS_* mode,
2438      * verify if the AF can converge to PASSIVE_FOCUSED or PASSIVE_UNFOCUSED
2439      * state within certain amount of frames.
2440      * </p>
2441      */
2442     private void afModeTestByCamera() throws Exception {
2443         int[] afModes = mStaticInfo.getAfAvailableModesChecked();
2444         Size maxPreviewSize = mOrderedPreviewSizes.get(0);
2445         CaptureRequest.Builder requestBuilder =
2446                 mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
2447         startPreview(requestBuilder, maxPreviewSize, /*listener*/null);
2448 
2449         for (int mode : afModes) {
2450             SimpleCaptureCallback listener;
2451             requestBuilder.set(CaptureRequest.CONTROL_AF_MODE, mode);
2452             listener = new SimpleCaptureCallback();
2453             mSession.setRepeatingRequest(requestBuilder.build(), listener, mHandler);
2454             waitForSettingsApplied(listener, NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY);
2455 
2456             // Verify AF mode in capture result.
2457             verifyCaptureResultForKey(CaptureResult.CONTROL_AF_MODE, mode, listener,
2458                     NUM_FRAMES_VERIFIED);
2459 
2460             // Verify AF can finish a scan for CONTROL_AF_MODE_CONTINUOUS_* modes.
2461             // In LEGACY mode, a transition to one of the continuous AF modes does not necessarily
2462             // result in a passive AF call if the camera has already been focused, and the scene has
2463             // not changed enough to trigger an AF pass.  Skip this constraint for LEGACY.
2464             if (mStaticInfo.isHardwareLevelAtLeastLimited() &&
2465                     (mode == CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_PICTURE ||
2466                     mode == CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_VIDEO)) {
2467                 List<Integer> afStateList = new ArrayList<Integer>();
2468                 afStateList.add(CaptureResult.CONTROL_AF_STATE_PASSIVE_FOCUSED);
2469                 afStateList.add(CaptureResult.CONTROL_AF_STATE_PASSIVE_UNFOCUSED);
2470                 waitForAnyResultValue(listener, CaptureResult.CONTROL_AF_STATE, afStateList,
2471                         NUM_RESULTS_WAIT_TIMEOUT);
2472             }
2473         }
2474     }
2475 
2476     /**
2477      * Test video and optical stabilizations if they are supported by a given camera.
2478      */
2479     private void stabilizationTestByCamera() throws Exception {
2480         // video stabilization test.
2481         List<Key<?>> keys = mStaticInfo.getCharacteristics().getKeys();
2482 
2483         Integer[] videoStabModes = (keys.contains(CameraCharacteristics.
2484                 CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES)) ?
2485                 CameraTestUtils.toObject(mStaticInfo.getAvailableVideoStabilizationModesChecked()) :
2486                     new Integer[0];
2487         int[] opticalStabModes = (keys.contains(
2488                 CameraCharacteristics.LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION)) ?
2489                 mStaticInfo.getAvailableOpticalStabilizationChecked() : new int[0];
2490 
2491         Size maxPreviewSize = mOrderedPreviewSizes.get(0);
2492         CaptureRequest.Builder requestBuilder =
2493                 mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
2494         SimpleCaptureCallback listener = new SimpleCaptureCallback();
2495         startPreview(requestBuilder, maxPreviewSize, listener);
2496 
2497         for (Integer mode : videoStabModes) {
2498             listener = new SimpleCaptureCallback();
2499             requestBuilder.set(CaptureRequest.CONTROL_VIDEO_STABILIZATION_MODE, mode);
2500             mSession.setRepeatingRequest(requestBuilder.build(), listener, mHandler);
2501             waitForSettingsApplied(listener, NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY);
2502             // Video stabilization could return any modes.
2503             verifyAnyCaptureResultForKey(CaptureResult.CONTROL_VIDEO_STABILIZATION_MODE,
2504                     videoStabModes, listener, NUM_FRAMES_VERIFIED);
2505         }
2506 
2507         for (int mode : opticalStabModes) {
2508             listener = new SimpleCaptureCallback();
2509             requestBuilder.set(CaptureRequest.LENS_OPTICAL_STABILIZATION_MODE, mode);
2510             mSession.setRepeatingRequest(requestBuilder.build(), listener, mHandler);
2511             waitForSettingsApplied(listener, NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY);
2512             verifyCaptureResultForKey(CaptureResult.LENS_OPTICAL_STABILIZATION_MODE, mode,
2513                     listener, NUM_FRAMES_VERIFIED);
2514         }
2515 
2516         stopPreview();
2517     }
2518 
2519     private void digitalZoomTestByCamera(Size previewSize) throws Exception {
2520         final int ZOOM_STEPS = 15;
2521         final PointF[] TEST_ZOOM_CENTERS;
2522         final float maxZoom = mStaticInfo.getAvailableMaxDigitalZoomChecked();
2523         final float ZOOM_ERROR_MARGIN = 0.01f;
2524         if (Math.abs(maxZoom - 1.0f) < ZOOM_ERROR_MARGIN) {
2525             // It doesn't make much sense to test the zoom if the device effectively supports
2526             // no zoom.
2527             return;
2528         }
2529 
2530         final int croppingType = mStaticInfo.getScalerCroppingTypeChecked();
2531         if (croppingType == CameraCharacteristics.SCALER_CROPPING_TYPE_FREEFORM) {
2532             // Set the four corners in a way that the minimally allowed zoom factor is 2x.
2533             float normalizedLeft = 0.25f;
2534             float normalizedTop = 0.25f;
2535             float normalizedRight = 0.75f;
2536             float normalizedBottom = 0.75f;
2537             // If the max supported zoom is too small, make sure we at least test the max
2538             // Zoom is tested for the four corners.
2539             if (maxZoom < 2.0f) {
2540                 normalizedLeft = 0.5f / maxZoom;
2541                 normalizedTop = 0.5f / maxZoom;
2542                 normalizedRight = 1.0f - normalizedLeft;
2543                 normalizedBottom = 1.0f - normalizedTop;
2544             }
2545             TEST_ZOOM_CENTERS = new PointF[] {
2546                 new PointF(0.5f, 0.5f),   // Center point
2547                 new PointF(normalizedLeft, normalizedTop),     // top left corner zoom
2548                 new PointF(normalizedRight, normalizedTop),    // top right corner zoom
2549                 new PointF(normalizedLeft, normalizedBottom),  // bottom left corner zoom
2550                 new PointF(normalizedRight, normalizedBottom), // bottom right corner zoom
2551             };
2552 
2553             if (VERBOSE) {
2554                 Log.v(TAG, "Testing zoom with CROPPING_TYPE = FREEFORM");
2555             }
2556         } else {
2557             // CENTER_ONLY
2558             TEST_ZOOM_CENTERS = new PointF[] {
2559                     new PointF(0.5f, 0.5f),   // Center point
2560             };
2561 
2562             if (VERBOSE) {
2563                 Log.v(TAG, "Testing zoom with CROPPING_TYPE = CENTER_ONLY");
2564             }
2565         }
2566 
2567         final Rect activeArraySize = mStaticInfo.getActiveArraySizeChecked();
2568         Rect[] cropRegions = new Rect[ZOOM_STEPS];
2569         MeteringRectangle[][] expectRegions = new MeteringRectangle[ZOOM_STEPS][];
2570         CaptureRequest.Builder requestBuilder =
2571                 mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
2572         SimpleCaptureCallback listener = new SimpleCaptureCallback();
2573 
2574         updatePreviewSurface(previewSize);
2575         configurePreviewOutput(requestBuilder);
2576 
2577         CaptureRequest[] requests = new CaptureRequest[ZOOM_STEPS];
2578 
2579         // Set algorithm regions to full active region
2580         // TODO: test more different 3A regions
2581         final MeteringRectangle[] defaultMeteringRect = new MeteringRectangle[] {
2582                 new MeteringRectangle (
2583                         /*x*/0, /*y*/0, activeArraySize.width(), activeArraySize.height(),
2584                         /*meteringWeight*/1)
2585         };
2586 
2587         for (int algo = 0; algo < NUM_ALGORITHMS; algo++) {
2588             update3aRegion(requestBuilder, algo,  defaultMeteringRect);
2589         }
2590 
2591         final int CAPTURE_SUBMIT_REPEAT;
2592         {
2593             int maxLatency = mStaticInfo.getSyncMaxLatency();
2594             if (maxLatency == CameraMetadata.SYNC_MAX_LATENCY_UNKNOWN) {
2595                 CAPTURE_SUBMIT_REPEAT = NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY + 1;
2596             } else {
2597                 CAPTURE_SUBMIT_REPEAT = maxLatency + 1;
2598             }
2599         }
2600 
2601         if (VERBOSE) {
2602             Log.v(TAG, "Testing zoom with CAPTURE_SUBMIT_REPEAT = " + CAPTURE_SUBMIT_REPEAT);
2603         }
2604 
2605         for (PointF center : TEST_ZOOM_CENTERS) {
2606             Rect previousCrop = null;
2607 
2608             for (int i = 0; i < ZOOM_STEPS; i++) {
2609                 /*
2610                  * Submit capture request
2611                  */
2612                 float zoomFactor = (float) (1.0f + (maxZoom - 1.0) * i / ZOOM_STEPS);
2613                 cropRegions[i] = getCropRegionForZoom(zoomFactor, center, maxZoom, activeArraySize);
2614                 if (VERBOSE) {
2615                     Log.v(TAG, "Testing Zoom for factor " + zoomFactor + " and center " +
2616                             center + " The cropRegion is " + cropRegions[i] +
2617                             " Preview size is " + previewSize);
2618                 }
2619                 requestBuilder.set(CaptureRequest.SCALER_CROP_REGION, cropRegions[i]);
2620                 requests[i] = requestBuilder.build();
2621                 for (int j = 0; j < CAPTURE_SUBMIT_REPEAT; ++j) {
2622                     if (VERBOSE) {
2623                         Log.v(TAG, "submit crop region " + cropRegions[i]);
2624                     }
2625                     mSession.capture(requests[i], listener, mHandler);
2626                 }
2627 
2628                 /*
2629                  * Validate capture result
2630                  */
2631                 waitForNumResults(listener, CAPTURE_SUBMIT_REPEAT - 1); // Drop first few frames
2632                 CaptureResult result = listener.getCaptureResultForRequest(
2633                         requests[i], NUM_RESULTS_WAIT_TIMEOUT);
2634                 Rect cropRegion = getValueNotNull(result, CaptureResult.SCALER_CROP_REGION);
2635 
2636                 /*
2637                  * Validate resulting crop regions
2638                  */
2639                 if (previousCrop != null) {
2640                     Rect currentCrop = cropRegion;
2641                     mCollector.expectTrue(String.format(
2642                             "Crop region should shrink or stay the same " +
2643                                     "(previous = %s, current = %s)",
2644                                     previousCrop, currentCrop),
2645                             previousCrop.equals(currentCrop) ||
2646                                 (previousCrop.width() > currentCrop.width() &&
2647                                  previousCrop.height() > currentCrop.height()));
2648                 }
2649 
2650                 if (mStaticInfo.isHardwareLevelAtLeastLimited()) {
2651                     mCollector.expectRectsAreSimilar(
2652                             "Request and result crop region should be similar",
2653                             cropRegions[i], cropRegion, CROP_REGION_ERROR_PERCENT_DELTA);
2654                 }
2655 
2656                 if (croppingType == SCALER_CROPPING_TYPE_CENTER_ONLY) {
2657                     mCollector.expectRectCentered(
2658                             "Result crop region should be centered inside the active array",
2659                             new Size(activeArraySize.width(), activeArraySize.height()),
2660                             cropRegion, CROP_REGION_ERROR_PERCENT_CENTERED);
2661                 }
2662 
2663                 /*
2664                  * Validate resulting metering regions
2665                  */
2666 
2667                 // Use the actual reported crop region to calculate the resulting metering region
2668                 expectRegions[i] = getExpectedOutputRegion(
2669                         /*requestRegion*/defaultMeteringRect,
2670                         /*cropRect*/     cropRegion);
2671 
2672                 // Verify Output 3A region is intersection of input 3A region and crop region
2673                 for (int algo = 0; algo < NUM_ALGORITHMS; algo++) {
2674                     validate3aRegion(result, algo, expectRegions[i]);
2675                 }
2676 
2677                 previousCrop = cropRegion;
2678             }
2679 
2680             if (maxZoom > 1.0f) {
2681                 mCollector.expectTrue(
2682                         String.format("Most zoomed-in crop region should be smaller" +
2683                                         "than active array w/h" +
2684                                         "(last crop = %s, active array = %s)",
2685                                         previousCrop, activeArraySize),
2686                             (previousCrop.width() < activeArraySize.width() &&
2687                              previousCrop.height() < activeArraySize.height()));
2688             }
2689         }
2690     }
2691 
2692     private void digitalZoomPreviewCombinationTestByCamera() throws Exception {
2693         final double ASPECT_RATIO_THRESHOLD = 0.001;
2694         List<Double> aspectRatiosTested = new ArrayList<Double>();
2695         Size maxPreviewSize = mOrderedPreviewSizes.get(0);
2696         aspectRatiosTested.add((double)(maxPreviewSize.getWidth()) / maxPreviewSize.getHeight());
2697 
2698         for (Size size : mOrderedPreviewSizes) {
2699             // Max preview size was already tested in testDigitalZoom test. skip it.
2700             if (size.equals(maxPreviewSize)) {
2701                 continue;
2702             }
2703 
2704             // Only test the largest size for each aspect ratio.
2705             double aspectRatio = (double)(size.getWidth()) / size.getHeight();
2706             if (isAspectRatioContained(aspectRatiosTested, aspectRatio, ASPECT_RATIO_THRESHOLD)) {
2707                 continue;
2708             }
2709 
2710             if (VERBOSE) {
2711                 Log.v(TAG, "Test preview size " + size.toString() + " digital zoom");
2712             }
2713 
2714             aspectRatiosTested.add(aspectRatio);
2715             digitalZoomTestByCamera(size);
2716         }
2717     }
2718 
2719     private static boolean isAspectRatioContained(List<Double> aspectRatioList,
2720             double aspectRatio, double delta) {
2721         for (Double ratio : aspectRatioList) {
2722             if (Math.abs(ratio - aspectRatio) < delta) {
2723                 return true;
2724             }
2725         }
2726 
2727         return false;
2728     }
2729 
2730     private void sceneModeTestByCamera() throws Exception {
2731         int[] sceneModes = mStaticInfo.getAvailableSceneModesChecked();
2732         Size maxPreviewSize = mOrderedPreviewSizes.get(0);
2733         CaptureRequest.Builder requestBuilder =
2734                 mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
2735         SimpleCaptureCallback listener = new SimpleCaptureCallback();
2736         requestBuilder.set(CaptureRequest.CONTROL_MODE, CaptureRequest.CONTROL_MODE_USE_SCENE_MODE);
2737         startPreview(requestBuilder, maxPreviewSize, listener);
2738 
2739         for(int mode : sceneModes) {
2740             requestBuilder.set(CaptureRequest.CONTROL_SCENE_MODE, mode);
2741             listener = new SimpleCaptureCallback();
2742             mSession.setRepeatingRequest(requestBuilder.build(), listener, mHandler);
2743             waitForSettingsApplied(listener, NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY);
2744 
2745             verifyCaptureResultForKey(CaptureResult.CONTROL_SCENE_MODE,
2746                     mode, listener, NUM_FRAMES_VERIFIED);
2747             // This also serves as purpose of showing preview for NUM_FRAMES_VERIFIED
2748             verifyCaptureResultForKey(CaptureResult.CONTROL_MODE,
2749                     CaptureRequest.CONTROL_MODE_USE_SCENE_MODE, listener, NUM_FRAMES_VERIFIED);
2750         }
2751     }
2752 
2753     private void effectModeTestByCamera() throws Exception {
2754         int[] effectModes = mStaticInfo.getAvailableEffectModesChecked();
2755         Size maxPreviewSize = mOrderedPreviewSizes.get(0);
2756         CaptureRequest.Builder requestBuilder =
2757                 mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
2758         requestBuilder.set(CaptureRequest.CONTROL_MODE, CaptureRequest.CONTROL_MODE_AUTO);
2759         SimpleCaptureCallback listener = new SimpleCaptureCallback();
2760         startPreview(requestBuilder, maxPreviewSize, listener);
2761 
2762         for(int mode : effectModes) {
2763             requestBuilder.set(CaptureRequest.CONTROL_EFFECT_MODE, mode);
2764             listener = new SimpleCaptureCallback();
2765             mSession.setRepeatingRequest(requestBuilder.build(), listener, mHandler);
2766             waitForSettingsApplied(listener, NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY);
2767 
2768             verifyCaptureResultForKey(CaptureResult.CONTROL_EFFECT_MODE,
2769                     mode, listener, NUM_FRAMES_VERIFIED);
2770             // This also serves as purpose of showing preview for NUM_FRAMES_VERIFIED
2771             verifyCaptureResultForKey(CaptureResult.CONTROL_MODE,
2772                     CaptureRequest.CONTROL_MODE_AUTO, listener, NUM_FRAMES_VERIFIED);
2773         }
2774     }
2775 
2776     //----------------------------------------------------------------
2777     //---------Below are common functions for all tests.--------------
2778     //----------------------------------------------------------------
2779 
2780     /**
2781      * Enable exposure manual control and change exposure and sensitivity and
2782      * clamp the value into the supported range.
2783      */
2784     private void changeExposure(CaptureRequest.Builder requestBuilder,
2785             long expTime, int sensitivity) {
2786         // Check if the max analog sensitivity is available and no larger than max sensitivity.  The
2787         // max analog sensitivity is not actually used here. This is only an extra correctness
2788         // check.
2789         mStaticInfo.getMaxAnalogSensitivityChecked();
2790 
2791         expTime = mStaticInfo.getExposureClampToRange(expTime);
2792         sensitivity = mStaticInfo.getSensitivityClampToRange(sensitivity);
2793 
2794         requestBuilder.set(CaptureRequest.CONTROL_AE_MODE, CONTROL_AE_MODE_OFF);
2795         requestBuilder.set(CaptureRequest.SENSOR_EXPOSURE_TIME, expTime);
2796         requestBuilder.set(CaptureRequest.SENSOR_SENSITIVITY, sensitivity);
2797     }
2798     /**
2799      * Enable exposure manual control and change exposure time and
2800      * clamp the value into the supported range.
2801      *
2802      * <p>The sensitivity is set to default value.</p>
2803      */
2804     private void changeExposure(CaptureRequest.Builder requestBuilder, long expTime) {
2805         changeExposure(requestBuilder, expTime, DEFAULT_SENSITIVITY);
2806     }
2807 
2808     /**
2809      * Get the exposure time array that contains multiple exposure time steps in
2810      * the exposure time range, in nanoseconds.
2811      */
2812     private long[] getExposureTimeTestValues() {
2813         long[] testValues = new long[DEFAULT_NUM_EXPOSURE_TIME_STEPS + 1];
2814         long maxExpTime = mStaticInfo.getExposureMaximumOrDefault(DEFAULT_EXP_TIME_NS);
2815         long minExpTime = mStaticInfo.getExposureMinimumOrDefault(DEFAULT_EXP_TIME_NS);
2816 
2817         long range = maxExpTime - minExpTime;
2818         double stepSize = range / (double)DEFAULT_NUM_EXPOSURE_TIME_STEPS;
2819         for (int i = 0; i < testValues.length; i++) {
2820             testValues[i] = maxExpTime - (long)(stepSize * i);
2821             testValues[i] = mStaticInfo.getExposureClampToRange(testValues[i]);
2822         }
2823 
2824         return testValues;
2825     }
2826 
2827     /**
2828      * Generate test focus distances in range of [0, minFocusDistance] in increasing order.
2829      *
2830      * @param repeatMin number of times minValue will be repeated.
2831      * @param repeatMax number of times maxValue will be repeated.
2832      */
2833     private float[] getFocusDistanceTestValuesInOrder(int repeatMin, int repeatMax) {
2834         int totalCount = NUM_TEST_FOCUS_DISTANCES + 1 + repeatMin + repeatMax;
2835         float[] testValues = new float[totalCount];
2836         float minValue = 0;
2837         float maxValue = mStaticInfo.getMinimumFocusDistanceChecked();
2838 
2839         float range = maxValue - minValue;
2840         float stepSize = range / NUM_TEST_FOCUS_DISTANCES;
2841 
2842         for (int i = 0; i < repeatMin; i++) {
2843             testValues[i] = minValue;
2844         }
2845         for (int i = 0; i <= NUM_TEST_FOCUS_DISTANCES; i++) {
2846             testValues[repeatMin+i] = minValue + stepSize * i;
2847         }
2848         for (int i = 0; i < repeatMax; i++) {
2849             testValues[repeatMin+NUM_TEST_FOCUS_DISTANCES+1+i] =
2850                     maxValue;
2851         }
2852 
2853         return testValues;
2854     }
2855 
2856     /**
2857      * Get the sensitivity array that contains multiple sensitivity steps in the
2858      * sensitivity range.
2859      * <p>
2860      * Sensitivity number of test values is determined by
2861      * {@value #DEFAULT_SENSITIVITY_STEP_SIZE} and sensitivity range, and
2862      * bounded by {@value #DEFAULT_NUM_SENSITIVITY_STEPS}.
2863      * </p>
2864      */
2865     private int[] getSensitivityTestValues() {
2866         int maxSensitivity = mStaticInfo.getSensitivityMaximumOrDefault(
2867                 DEFAULT_SENSITIVITY);
2868         int minSensitivity = mStaticInfo.getSensitivityMinimumOrDefault(
2869                 DEFAULT_SENSITIVITY);
2870 
2871         int range = maxSensitivity - minSensitivity;
2872         int stepSize = DEFAULT_SENSITIVITY_STEP_SIZE;
2873         int numSteps = range / stepSize;
2874         // Bound the test steps to avoid supper long test.
2875         if (numSteps > DEFAULT_NUM_SENSITIVITY_STEPS) {
2876             numSteps = DEFAULT_NUM_SENSITIVITY_STEPS;
2877             stepSize = range / numSteps;
2878         }
2879         int[] testValues = new int[numSteps + 1];
2880         for (int i = 0; i < testValues.length; i++) {
2881             testValues[i] = maxSensitivity - stepSize * i;
2882             testValues[i] = mStaticInfo.getSensitivityClampToRange(testValues[i]);
2883         }
2884 
2885         return testValues;
2886     }
2887 
2888     /**
2889      * Validate the AE manual control exposure time.
2890      *
2891      * <p>Exposure should be close enough, and only round down if they are not equal.</p>
2892      *
2893      * @param request Request exposure time
2894      * @param result Result exposure time
2895      */
2896     private void validateExposureTime(long request, long result) {
2897         long expTimeDelta = request - result;
2898         long expTimeErrorMargin = (long)(Math.max(EXPOSURE_TIME_ERROR_MARGIN_NS, request
2899                 * EXPOSURE_TIME_ERROR_MARGIN_RATE));
2900         // First, round down not up, second, need close enough.
2901         mCollector.expectTrue("Exposture time is invalid for AE manaul control test, request: "
2902                 + request + " result: " + result,
2903                 expTimeDelta < expTimeErrorMargin && expTimeDelta >= 0);
2904     }
2905 
2906     /**
2907      * Validate AE manual control sensitivity.
2908      *
2909      * @param request Request sensitivity
2910      * @param result Result sensitivity
2911      */
2912     private void validateSensitivity(int request, int result) {
2913         float sensitivityDelta = request - result;
2914         float sensitivityErrorMargin = request * SENSITIVITY_ERROR_MARGIN_RATE;
2915         // First, round down not up, second, need close enough.
2916         mCollector.expectTrue("Sensitivity is invalid for AE manaul control test, request: "
2917                 + request + " result: " + result,
2918                 sensitivityDelta < sensitivityErrorMargin && sensitivityDelta >= 0);
2919     }
2920 
2921     /**
2922      * Validate frame duration for a given capture.
2923      *
2924      * <p>Frame duration should be longer than exposure time.</p>
2925      *
2926      * @param result The capture result for a given capture
2927      */
2928     private void validateFrameDurationForCapture(CaptureResult result) {
2929         long expTime = getValueNotNull(result, CaptureResult.SENSOR_EXPOSURE_TIME);
2930         long frameDuration = getValueNotNull(result, CaptureResult.SENSOR_FRAME_DURATION);
2931         if (VERBOSE) {
2932             Log.v(TAG, "frame duration: " + frameDuration + " Exposure time: " + expTime);
2933         }
2934 
2935         mCollector.expectTrue(String.format("Frame duration (%d) should be longer than exposure"
2936                 + " time (%d) for a given capture", frameDuration, expTime),
2937                 frameDuration >= expTime);
2938 
2939         validatePipelineDepth(result);
2940     }
2941 
2942     /**
2943      * Basic verification for the control mode capture result.
2944      *
2945      * @param key The capture result key to be verified against
2946      * @param requestMode The request mode for this result
2947      * @param listener The capture listener to get capture results
2948      * @param numFramesVerified The number of capture results to be verified
2949      */
2950     private <T> void verifyCaptureResultForKey(CaptureResult.Key<T> key, T requestMode,
2951             SimpleCaptureCallback listener, int numFramesVerified) {
2952         for (int i = 0; i < numFramesVerified; i++) {
2953             CaptureResult result = listener.getCaptureResult(WAIT_FOR_RESULT_TIMEOUT_MS);
2954             validatePipelineDepth(result);
2955             T resultMode = getValueNotNull(result, key);
2956             if (VERBOSE) {
2957                 Log.v(TAG, "Expect value: " + requestMode.toString() + " result value: "
2958                         + resultMode.toString());
2959             }
2960             mCollector.expectEquals("Key " + key.getName() + " result should match request",
2961                     requestMode, resultMode);
2962         }
2963     }
2964 
2965     /**
2966      * Basic verification that the value of a capture result key should be one of the expected
2967      * values.
2968      *
2969      * @param key The capture result key to be verified against
2970      * @param expectedModes The list of any possible expected modes for this result
2971      * @param listener The capture listener to get capture results
2972      * @param numFramesVerified The number of capture results to be verified
2973      */
2974     private <T> void verifyAnyCaptureResultForKey(CaptureResult.Key<T> key, T[] expectedModes,
2975             SimpleCaptureCallback listener, int numFramesVerified) {
2976         for (int i = 0; i < numFramesVerified; i++) {
2977             CaptureResult result = listener.getCaptureResult(WAIT_FOR_RESULT_TIMEOUT_MS);
2978             validatePipelineDepth(result);
2979             T resultMode = getValueNotNull(result, key);
2980             if (VERBOSE) {
2981                 Log.v(TAG, "Expect values: " + Arrays.toString(expectedModes) + " result value: "
2982                         + resultMode.toString());
2983             }
2984             // Capture result should be one of the expected values.
2985             mCollector.expectContains(expectedModes, resultMode);
2986         }
2987     }
2988 
2989     /**
2990      * Verify if the fps is slow down for given input request with certain
2991      * controls inside.
2992      * <p>
2993      * This method selects a max preview size for each fps range, and then
2994      * configure the preview stream. Preview is started with the max preview
2995      * size, and then verify if the result frame duration is in the frame
2996      * duration range.
2997      * </p>
2998      *
2999      * @param requestBuilder The request builder that contains post-processing
3000      *            controls that could impact the output frame rate, such as
3001      *            {@link CaptureRequest.NOISE_REDUCTION_MODE}. The value of
3002      *            these controls must be set to some values such that the frame
3003      *            rate is not slow down.
3004      * @param numFramesVerified The number of frames to be verified
3005      * @param fpsRanges The fps ranges to be verified
3006      */
3007     private void verifyFpsNotSlowDown(CaptureRequest.Builder requestBuilder,
3008             int numFramesVerified, List<Range<Integer>> fpsRanges )  throws Exception {
3009         boolean frameDurationAvailable = true;
3010         // Allow a few frames for AE to settle on target FPS range
3011         final int NUM_FRAME_TO_SKIP = 6;
3012         float frameDurationErrorMargin = FRAME_DURATION_ERROR_MARGIN;
3013         if (!mStaticInfo.areKeysAvailable(CaptureResult.SENSOR_FRAME_DURATION)) {
3014             frameDurationAvailable = false;
3015             // Allow a larger error margin (1.5%) for timestamps
3016             frameDurationErrorMargin = 0.015f;
3017         }
3018         if (mStaticInfo.isExternalCamera()) {
3019             // Allow a even larger error margin (15%) for external camera timestamps
3020             frameDurationErrorMargin = 0.15f;
3021         }
3022 
3023         boolean antiBandingOffIsSupported = mStaticInfo.isAntiBandingOffModeSupported();
3024         Range<Integer> fpsRange;
3025         SimpleCaptureCallback resultListener;
3026 
3027         for (int i = 0; i < fpsRanges.size(); i += 1) {
3028             fpsRange = fpsRanges.get(i);
3029             Size previewSz = getMaxPreviewSizeForFpsRange(fpsRange);
3030             // If unable to find a preview size, then log the failure, and skip this run.
3031             if (previewSz == null) {
3032                 if (mStaticInfo.isCapabilitySupported(
3033                         CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR)) {
3034                     mCollector.addMessage(String.format(
3035                             "Unable to find a preview size supporting given fps range %s",
3036                             fpsRange));
3037                 }
3038                 continue;
3039             }
3040 
3041             if (VERBOSE) {
3042                 Log.v(TAG, String.format("Test fps range %s for preview size %s",
3043                         fpsRange, previewSz.toString()));
3044             }
3045             requestBuilder.set(CaptureRequest.CONTROL_AE_TARGET_FPS_RANGE, fpsRange);
3046             // Turn off auto antibanding to avoid exposure time and frame duration interference
3047             // from antibanding algorithm.
3048             if (antiBandingOffIsSupported) {
3049                 requestBuilder.set(CaptureRequest.CONTROL_AE_ANTIBANDING_MODE,
3050                         CaptureRequest.CONTROL_AE_ANTIBANDING_MODE_OFF);
3051             } else {
3052                 // The device doesn't implement the OFF mode, test continues. It need make sure
3053                 // that the antibanding algorithm doesn't slow down the fps.
3054                 Log.i(TAG, "OFF antibanding mode is not supported, the camera device output must" +
3055                         " not slow down the frame rate regardless of its current antibanding" +
3056                         " mode");
3057             }
3058 
3059             resultListener = new SimpleCaptureCallback();
3060             startPreview(requestBuilder, previewSz, resultListener);
3061             waitForSettingsApplied(resultListener, NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY);
3062             // Wait several more frames for AE to settle on target FPS range
3063             waitForNumResults(resultListener, NUM_FRAME_TO_SKIP);
3064 
3065             long[] frameDurationRange = new long[]{
3066                     (long) (1e9 / fpsRange.getUpper()), (long) (1e9 / fpsRange.getLower())};
3067             long captureTime = 0, prevCaptureTime = 0;
3068             for (int j = 0; j < numFramesVerified; j++) {
3069                 long frameDuration = frameDurationRange[0];
3070                 CaptureResult result =
3071                         resultListener.getCaptureResult(WAIT_FOR_RESULT_TIMEOUT_MS);
3072                 validatePipelineDepth(result);
3073                 if (frameDurationAvailable) {
3074                     frameDuration = getValueNotNull(result, CaptureResult.SENSOR_FRAME_DURATION);
3075                 } else {
3076                     // if frame duration is not available, check timestamp instead
3077                     captureTime = getValueNotNull(result, CaptureResult.SENSOR_TIMESTAMP);
3078                     if (j > 0) {
3079                         frameDuration = captureTime - prevCaptureTime;
3080                     }
3081                     prevCaptureTime = captureTime;
3082                 }
3083                 mCollector.expectInRange(
3084                         "Frame duration must be in the range of " +
3085                                 Arrays.toString(frameDurationRange),
3086                         frameDuration,
3087                         (long) (frameDurationRange[0] * (1 - frameDurationErrorMargin)),
3088                         (long) (frameDurationRange[1] * (1 + frameDurationErrorMargin)));
3089             }
3090         }
3091 
3092         stopPreview();
3093     }
3094 
3095     /**
3096      * Validate the pipeline depth result.
3097      *
3098      * @param result The capture result to get pipeline depth data
3099      */
3100     private void validatePipelineDepth(CaptureResult result) {
3101         final byte MIN_PIPELINE_DEPTH = 1;
3102         byte maxPipelineDepth = mStaticInfo.getPipelineMaxDepthChecked();
3103         Byte pipelineDepth = getValueNotNull(result, CaptureResult.REQUEST_PIPELINE_DEPTH);
3104         mCollector.expectInRange(String.format("Pipeline depth must be in the range of [%d, %d]",
3105                 MIN_PIPELINE_DEPTH, maxPipelineDepth), pipelineDepth, MIN_PIPELINE_DEPTH,
3106                 maxPipelineDepth);
3107     }
3108 
3109     /**
3110      * Calculate the anti-flickering corrected exposure time.
3111      * <p>
3112      * If the input exposure time is very short (shorter than flickering
3113      * boundary), which indicate the scene is bright and very likely at outdoor
3114      * environment, skip the correction, as it doesn't make much sense by doing so.
3115      * </p>
3116      * <p>
3117      * For long exposure time (larger than the flickering boundary), find the
3118      * exposure time that is closest to the flickering boundary.
3119      * </p>
3120      *
3121      * @param flickeringMode The flickering mode
3122      * @param exposureTime The input exposureTime to be corrected
3123      * @return anti-flickering corrected exposure time
3124      */
3125     private long getAntiFlickeringExposureTime(int flickeringMode, long exposureTime) {
3126         if (flickeringMode != ANTI_FLICKERING_50HZ && flickeringMode != ANTI_FLICKERING_60HZ) {
3127             throw new IllegalArgumentException("Input anti-flickering mode must be 50 or 60Hz");
3128         }
3129         long flickeringBoundary = EXPOSURE_TIME_BOUNDARY_50HZ_NS;
3130         if (flickeringMode == ANTI_FLICKERING_60HZ) {
3131             flickeringBoundary = EXPOSURE_TIME_BOUNDARY_60HZ_NS;
3132         }
3133 
3134         if (exposureTime <= flickeringBoundary) {
3135             return exposureTime;
3136         }
3137 
3138         // Find the closest anti-flickering corrected exposure time
3139         long correctedExpTime = exposureTime + (flickeringBoundary / 2);
3140         correctedExpTime = correctedExpTime - (correctedExpTime % flickeringBoundary);
3141         return correctedExpTime;
3142     }
3143 
3144     /**
3145      * Update one 3A region in capture request builder if that region is supported. Do nothing
3146      * if the specified 3A region is not supported by camera device.
3147      * @param requestBuilder The request to be updated
3148      * @param algoIdx The index to the algorithm. (AE: 0, AWB: 1, AF: 2)
3149      * @param regions The 3A regions to be set
3150      */
3151     private void update3aRegion(
3152             CaptureRequest.Builder requestBuilder, int algoIdx, MeteringRectangle[] regions)
3153     {
3154         int maxRegions;
3155         CaptureRequest.Key<MeteringRectangle[]> key;
3156 
3157         if (regions == null || regions.length == 0) {
3158             throw new IllegalArgumentException("Invalid input 3A region!");
3159         }
3160 
3161         switch (algoIdx) {
3162             case INDEX_ALGORITHM_AE:
3163                 maxRegions = mStaticInfo.getAeMaxRegionsChecked();
3164                 key = CaptureRequest.CONTROL_AE_REGIONS;
3165                 break;
3166             case INDEX_ALGORITHM_AWB:
3167                 maxRegions = mStaticInfo.getAwbMaxRegionsChecked();
3168                 key = CaptureRequest.CONTROL_AWB_REGIONS;
3169                 break;
3170             case INDEX_ALGORITHM_AF:
3171                 maxRegions = mStaticInfo.getAfMaxRegionsChecked();
3172                 key = CaptureRequest.CONTROL_AF_REGIONS;
3173                 break;
3174             default:
3175                 throw new IllegalArgumentException("Unknown 3A Algorithm!");
3176         }
3177 
3178         if (maxRegions >= regions.length) {
3179             requestBuilder.set(key, regions);
3180         }
3181     }
3182 
3183     /**
3184      * Validate one 3A region in capture result equals to expected region if that region is
3185      * supported. Do nothing if the specified 3A region is not supported by camera device.
3186      * @param result The capture result to be validated
3187      * @param algoIdx The index to the algorithm. (AE: 0, AWB: 1, AF: 2)
3188      * @param expectRegions The 3A regions expected in capture result
3189      */
3190     private void validate3aRegion(
3191             CaptureResult result, int algoIdx, MeteringRectangle[] expectRegions)
3192     {
3193         final int maxCorrectionDist = 2;
3194         int maxRegions;
3195         CaptureResult.Key<MeteringRectangle[]> key;
3196         MeteringRectangle[] actualRegion;
3197 
3198         switch (algoIdx) {
3199             case INDEX_ALGORITHM_AE:
3200                 maxRegions = mStaticInfo.getAeMaxRegionsChecked();
3201                 key = CaptureResult.CONTROL_AE_REGIONS;
3202                 break;
3203             case INDEX_ALGORITHM_AWB:
3204                 maxRegions = mStaticInfo.getAwbMaxRegionsChecked();
3205                 key = CaptureResult.CONTROL_AWB_REGIONS;
3206                 break;
3207             case INDEX_ALGORITHM_AF:
3208                 maxRegions = mStaticInfo.getAfMaxRegionsChecked();
3209                 key = CaptureResult.CONTROL_AF_REGIONS;
3210                 break;
3211             default:
3212                 throw new IllegalArgumentException("Unknown 3A Algorithm!");
3213         }
3214 
3215         Integer distortionCorrectionMode = result.get(CaptureResult.DISTORTION_CORRECTION_MODE);
3216         boolean correctionEnabled =
3217                 distortionCorrectionMode != null &&
3218                 distortionCorrectionMode != CaptureResult.DISTORTION_CORRECTION_MODE_OFF;
3219 
3220         if (maxRegions > 0)
3221         {
3222             actualRegion = getValueNotNull(result, key);
3223             if (correctionEnabled) {
3224                 for(int i = 0; i < actualRegion.length; i++) {
3225                     Rect a = actualRegion[i].getRect();
3226                     Rect e = expectRegions[i].getRect();
3227                     if (!mCollector.expectLessOrEqual(
3228                         "Expected 3A regions: " + Arrays.toString(expectRegions) +
3229                         " are not close enough to the actual one: " + Arrays.toString(actualRegion),
3230                         maxCorrectionDist, Math.abs(a.left - e.left))) continue;
3231                     if (!mCollector.expectLessOrEqual(
3232                         "Expected 3A regions: " + Arrays.toString(expectRegions) +
3233                         " are not close enough to the actual one: " + Arrays.toString(actualRegion),
3234                         maxCorrectionDist, Math.abs(a.right - e.right))) continue;
3235                     if (!mCollector.expectLessOrEqual(
3236                         "Expected 3A regions: " + Arrays.toString(expectRegions) +
3237                         " are not close enough to the actual one: " + Arrays.toString(actualRegion),
3238                         maxCorrectionDist, Math.abs(a.top - e.top))) continue;
3239                     if (!mCollector.expectLessOrEqual(
3240                         "Expected 3A regions: " + Arrays.toString(expectRegions) +
3241                         " are not close enough to the actual one: " + Arrays.toString(actualRegion),
3242                         maxCorrectionDist, Math.abs(a.bottom - e.bottom))) continue;
3243                 }
3244             } else {
3245                 mCollector.expectEquals(
3246                     "Expected 3A regions: " + Arrays.toString(expectRegions) +
3247                     " does not match actual one: " + Arrays.toString(actualRegion),
3248                     expectRegions, actualRegion);
3249             }
3250         }
3251     }
3252 }
3253