1 /*
2  * Copyright (C) 2014 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 package android.hardware.camera2.cts;
18 
19 import static com.android.ex.camera2.blocking.BlockingSessionCallback.SESSION_CLOSED;
20 
21 import static org.junit.Assert.assertNotNull;
22 import static org.junit.Assert.assertTrue;
23 
24 import android.app.Instrumentation;
25 import android.graphics.ImageFormat;
26 import android.graphics.SurfaceTexture;
27 import android.hardware.camera2.CameraAccessException;
28 import android.hardware.camera2.CameraCaptureSession;
29 import android.hardware.camera2.CameraCaptureSession.CaptureCallback;
30 import android.hardware.camera2.CameraCharacteristics;
31 import android.hardware.camera2.CameraDevice;
32 import android.hardware.camera2.CaptureRequest;
33 import android.hardware.camera2.CaptureResult;
34 import android.hardware.camera2.TotalCaptureResult;
35 import android.hardware.camera2.cts.CameraTestUtils.SimpleCaptureCallback;
36 import android.hardware.camera2.cts.CameraTestUtils.SimpleImageReaderListener;
37 import android.hardware.camera2.cts.helpers.StaticMetadata;
38 import android.hardware.camera2.cts.helpers.StaticMetadata.CheckLevel;
39 import android.hardware.camera2.cts.testcases.Camera2AndroidTestCase;
40 import android.hardware.camera2.params.InputConfiguration;
41 import android.hardware.camera2.params.StreamConfigurationMap;
42 import android.media.Image;
43 import android.media.ImageReader;
44 import android.media.ImageWriter;
45 import android.os.ConditionVariable;
46 import android.os.SystemClock;
47 import android.util.Log;
48 import android.util.Pair;
49 import android.util.Range;
50 import android.util.Size;
51 import android.view.Surface;
52 
53 import androidx.test.InstrumentationRegistry;
54 
55 import com.android.compatibility.common.util.DeviceReportLog;
56 import com.android.compatibility.common.util.ResultType;
57 import com.android.compatibility.common.util.ResultUnit;
58 import com.android.compatibility.common.util.Stat;
59 import com.android.ex.camera2.blocking.BlockingSessionCallback;
60 import com.android.ex.camera2.exceptions.TimeoutRuntimeException;
61 
62 import org.junit.Test;
63 
64 import java.util.ArrayList;
65 import java.util.Arrays;
66 import java.util.List;
67 import java.util.concurrent.LinkedBlockingQueue;
68 import java.util.concurrent.TimeUnit;
69 
70 /**
71  * Test camera2 API use case performance KPIs, such as camera open time, session creation time,
72  * shutter lag etc. The KPI data will be reported in cts results.
73  */
74 public class PerformanceTest extends Camera2AndroidTestCase {
75     private static final String TAG = "PerformanceTest";
76     private static final String REPORT_LOG_NAME = "CtsCameraTestCases";
77     private static final boolean VERBOSE = Log.isLoggable(TAG, Log.VERBOSE);
78     private static final int NUM_TEST_LOOPS = 10;
79     private static final int NUM_MAX_IMAGES = 4;
80     private static final int NUM_RESULTS_WAIT = 30;
81     private static final int[] REPROCESS_FORMATS = {ImageFormat.YUV_420_888, ImageFormat.PRIVATE};
82     private final int MAX_REPROCESS_IMAGES = 6;
83     private final int MAX_JPEG_IMAGES = MAX_REPROCESS_IMAGES;
84     private final int MAX_INPUT_IMAGES = MAX_REPROCESS_IMAGES;
85     // ZSL queue depth should be bigger than the max simultaneous reprocessing capture request
86     // count to maintain reasonable number of candidate image for the worse-case.
87     private final int MAX_ZSL_IMAGES = MAX_REPROCESS_IMAGES * 3 / 2;
88     private final double REPROCESS_STALL_MARGIN = 0.1;
89     private static final int WAIT_FOR_RESULT_TIMEOUT_MS = 3000;
90     private static final int NUM_RESULTS_WAIT_TIMEOUT = 100;
91     private static final int NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY = 8;
92 
93     private DeviceReportLog mReportLog;
94 
95     // Used for reading camera output buffers.
96     private ImageReader mCameraZslReader;
97     private SimpleImageReaderListener mCameraZslImageListener;
98     // Used for reprocessing (jpeg) output.
99     private ImageReader mJpegReader;
100     private SimpleImageReaderListener mJpegListener;
101     // Used for reprocessing input.
102     private ImageWriter mWriter;
103     private SimpleCaptureCallback mZslResultListener;
104 
105     private Instrumentation mInstrumentation;
106 
107     private Surface mPreviewSurface;
108     private SurfaceTexture mPreviewSurfaceTexture;
109 
110     @Override
setUp()111     public void setUp() throws Exception {
112         super.setUp();
113         mInstrumentation = InstrumentationRegistry.getInstrumentation();
114     }
115 
116     @Override
tearDown()117     public void tearDown() throws Exception {
118         super.tearDown();
119     }
120 
121     /**
122      * Test camera launch KPI: the time duration between a camera device is
123      * being opened and first preview frame is available.
124      * <p>
125      * It includes camera open time, session creation time, and sending first
126      * preview request processing latency etc. For the SurfaceView based preview use
127      * case, there is no way for client to know the exact preview frame
128      * arrival time. To approximate this time, a companion YUV420_888 stream is
129      * created. The first YUV420_888 Image coming out of the ImageReader is treated
130      * as the first preview arrival time.</p>
131      * <p>
132      * For depth-only devices, timing is done with the DEPTH16 format instead.
133      * </p>
134      */
testCameraLaunch()135     public void testCameraLaunch() throws Exception {
136         double[] avgCameraLaunchTimes = new double[mCameraIds.length];
137 
138         int counter = 0;
139         for (String id : mCameraIds) {
140             // Do NOT move these variables to outer scope
141             // They will be passed to DeviceReportLog and their references will be stored
142             String streamName = "test_camera_launch";
143             mReportLog = new DeviceReportLog(REPORT_LOG_NAME, streamName);
144             mReportLog.addValue("camera_id", id, ResultType.NEUTRAL, ResultUnit.NONE);
145             double[] cameraOpenTimes = new double[NUM_TEST_LOOPS];
146             double[] configureStreamTimes = new double[NUM_TEST_LOOPS];
147             double[] startPreviewTimes = new double[NUM_TEST_LOOPS];
148             double[] stopPreviewTimes = new double[NUM_TEST_LOOPS];
149             double[] cameraCloseTimes = new double[NUM_TEST_LOOPS];
150             double[] cameraLaunchTimes = new double[NUM_TEST_LOOPS];
151             try {
152                 mStaticInfo = new StaticMetadata(mCameraManager.getCameraCharacteristics(id));
153                 if (mStaticInfo.isColorOutputSupported()) {
154                     initializeImageReader(id, ImageFormat.YUV_420_888);
155                 } else {
156                     assertTrue("Depth output must be supported if regular output isn't!",
157                             mStaticInfo.isDepthOutputSupported());
158                     initializeImageReader(id, ImageFormat.DEPTH16);
159                 }
160 
161                 SimpleImageListener imageListener = null;
162                 long startTimeMs, openTimeMs, configureTimeMs, previewStartedTimeMs;
163                 for (int i = 0; i < NUM_TEST_LOOPS; i++) {
164                     try {
165                         // Need create a new listener every iteration to be able to wait
166                         // for the first image comes out.
167                         imageListener = new SimpleImageListener();
168                         mReader.setOnImageAvailableListener(imageListener, mHandler);
169                         startTimeMs = SystemClock.elapsedRealtime();
170 
171                         // Blocking open camera
172                         simpleOpenCamera(id);
173                         openTimeMs = SystemClock.elapsedRealtime();
174                         cameraOpenTimes[i] = openTimeMs - startTimeMs;
175 
176                         // Blocking configure outputs.
177                         configureReaderAndPreviewOutputs();
178                         configureTimeMs = SystemClock.elapsedRealtime();
179                         configureStreamTimes[i] = configureTimeMs - openTimeMs;
180 
181                         // Blocking start preview (start preview to first image arrives)
182                         SimpleCaptureCallback resultListener =
183                                 new SimpleCaptureCallback();
184                         blockingStartPreview(resultListener, imageListener);
185                         previewStartedTimeMs = SystemClock.elapsedRealtime();
186                         startPreviewTimes[i] = previewStartedTimeMs - configureTimeMs;
187                         cameraLaunchTimes[i] = previewStartedTimeMs - startTimeMs;
188 
189                         // Let preview on for a couple of frames
190                         CameraTestUtils.waitForNumResults(resultListener, NUM_RESULTS_WAIT,
191                                 WAIT_FOR_RESULT_TIMEOUT_MS);
192 
193                         // Blocking stop preview
194                         startTimeMs = SystemClock.elapsedRealtime();
195                         blockingStopPreview();
196                         stopPreviewTimes[i] = SystemClock.elapsedRealtime() - startTimeMs;
197                     }
198                     finally {
199                         // Blocking camera close
200                         startTimeMs = SystemClock.elapsedRealtime();
201                         closeDevice(id);
202                         cameraCloseTimes[i] = SystemClock.elapsedRealtime() - startTimeMs;
203                     }
204                 }
205 
206                 avgCameraLaunchTimes[counter] = Stat.getAverage(cameraLaunchTimes);
207                 // Finish the data collection, report the KPIs.
208                 // ReportLog keys have to be lowercase underscored format.
209                 mReportLog.addValues("camera_open_time", cameraOpenTimes, ResultType.LOWER_BETTER,
210                         ResultUnit.MS);
211                 mReportLog.addValues("camera_configure_stream_time", configureStreamTimes,
212                         ResultType.LOWER_BETTER, ResultUnit.MS);
213                 mReportLog.addValues("camera_start_preview_time", startPreviewTimes,
214                         ResultType.LOWER_BETTER, ResultUnit.MS);
215                 mReportLog.addValues("camera_camera_stop_preview", stopPreviewTimes,
216                         ResultType.LOWER_BETTER, ResultUnit.MS);
217                 mReportLog.addValues("camera_camera_close_time", cameraCloseTimes,
218                         ResultType.LOWER_BETTER, ResultUnit.MS);
219                 mReportLog.addValues("camera_launch_time", cameraLaunchTimes,
220                         ResultType.LOWER_BETTER, ResultUnit.MS);
221             }
222             finally {
223                 closeDefaultImageReader();
224                 closePreviewSurface();
225             }
226             counter++;
227             mReportLog.submit(mInstrumentation);
228 
229             if (VERBOSE) {
230                 Log.v(TAG, "Camera " + id + " device open times(ms): "
231                         + Arrays.toString(cameraOpenTimes)
232                         + ". Average(ms): " + Stat.getAverage(cameraOpenTimes)
233                         + ". Min(ms): " + Stat.getMin(cameraOpenTimes)
234                         + ". Max(ms): " + Stat.getMax(cameraOpenTimes));
235                 Log.v(TAG, "Camera " + id + " configure stream times(ms): "
236                         + Arrays.toString(configureStreamTimes)
237                         + ". Average(ms): " + Stat.getAverage(configureStreamTimes)
238                         + ". Min(ms): " + Stat.getMin(configureStreamTimes)
239                         + ". Max(ms): " + Stat.getMax(configureStreamTimes));
240                 Log.v(TAG, "Camera " + id + " start preview times(ms): "
241                         + Arrays.toString(startPreviewTimes)
242                         + ". Average(ms): " + Stat.getAverage(startPreviewTimes)
243                         + ". Min(ms): " + Stat.getMin(startPreviewTimes)
244                         + ". Max(ms): " + Stat.getMax(startPreviewTimes));
245                 Log.v(TAG, "Camera " + id + " stop preview times(ms): "
246                         + Arrays.toString(stopPreviewTimes)
247                         + ". Average(ms): " + Stat.getAverage(stopPreviewTimes)
248                         + ". nMin(ms): " + Stat.getMin(stopPreviewTimes)
249                         + ". nMax(ms): " + Stat.getMax(stopPreviewTimes));
250                 Log.v(TAG, "Camera " + id + " device close times(ms): "
251                         + Arrays.toString(cameraCloseTimes)
252                         + ". Average(ms): " + Stat.getAverage(cameraCloseTimes)
253                         + ". Min(ms): " + Stat.getMin(cameraCloseTimes)
254                         + ". Max(ms): " + Stat.getMax(cameraCloseTimes));
255                 Log.v(TAG, "Camera " + id + " camera launch times(ms): "
256                         + Arrays.toString(cameraLaunchTimes)
257                         + ". Average(ms): " + Stat.getAverage(cameraLaunchTimes)
258                         + ". Min(ms): " + Stat.getMin(cameraLaunchTimes)
259                         + ". Max(ms): " + Stat.getMax(cameraLaunchTimes));
260             }
261         }
262         if (mCameraIds.length != 0) {
263             String streamName = "test_camera_launch_average";
264             mReportLog = new DeviceReportLog(REPORT_LOG_NAME, streamName);
265             mReportLog.setSummary("camera_launch_average_time_for_all_cameras",
266                     Stat.getAverage(avgCameraLaunchTimes), ResultType.LOWER_BETTER, ResultUnit.MS);
267             mReportLog.submit(mInstrumentation);
268         }
269     }
270 
271     /**
272      * Test camera capture KPI for YUV_420_888, PRIVATE, JPEG, RAW and RAW+JPEG
273      * formats: the time duration between sending out a single image capture request
274      * and receiving image data and capture result.
275      * <p>
276      * It enumerates the following metrics: capture latency, computed by
277      * measuring the time between sending out the capture request and getting
278      * the image data; partial result latency, computed by measuring the time
279      * between sending out the capture request and getting the partial result;
280      * capture result latency, computed by measuring the time between sending
281      * out the capture request and getting the full capture result.
282      * </p>
283      */
testSingleCapture()284     public void testSingleCapture() throws Exception {
285         int[] YUV_FORMAT = {ImageFormat.YUV_420_888};
286         testSingleCaptureForFormat(YUV_FORMAT, null, /*addPreviewDelay*/ false);
287         int[] PRIVATE_FORMAT = {ImageFormat.PRIVATE};
288         testSingleCaptureForFormat(PRIVATE_FORMAT, "private", /*addPreviewDelay*/ true);
289         int[] JPEG_FORMAT = {ImageFormat.JPEG};
290         testSingleCaptureForFormat(JPEG_FORMAT, "jpeg", /*addPreviewDelay*/ true);
291         int[] RAW_FORMAT = {ImageFormat.RAW_SENSOR};
292         testSingleCaptureForFormat(RAW_FORMAT, "raw", /*addPreviewDelay*/ true);
293         int[] RAW_JPEG_FORMATS = {ImageFormat.RAW_SENSOR, ImageFormat.JPEG};
294         testSingleCaptureForFormat(RAW_JPEG_FORMATS, "raw_jpeg", /*addPreviewDelay*/ true);
295     }
296 
appendFormatDescription(String message, String formatDescription)297     private String appendFormatDescription(String message, String formatDescription) {
298         if (message == null) {
299             return null;
300         }
301 
302         String ret = message;
303         if (formatDescription != null) {
304             ret = String.format(ret + "_%s", formatDescription);
305         }
306 
307         return ret;
308     }
309 
testSingleCaptureForFormat(int[] formats, String formatDescription, boolean addPreviewDelay)310     private void testSingleCaptureForFormat(int[] formats, String formatDescription,
311             boolean addPreviewDelay) throws Exception {
312         double[] avgResultTimes = new double[mCameraIds.length];
313 
314         int counter = 0;
315         for (String id : mCameraIds) {
316             // Do NOT move these variables to outer scope
317             // They will be passed to DeviceReportLog and their references will be stored
318             String streamName = appendFormatDescription("test_single_capture", formatDescription);
319             mReportLog = new DeviceReportLog(REPORT_LOG_NAME, streamName);
320             mReportLog.addValue("camera_id", id, ResultType.NEUTRAL, ResultUnit.NONE);
321             double[] captureTimes = new double[NUM_TEST_LOOPS];
322             double[] getPartialTimes = new double[NUM_TEST_LOOPS];
323             double[] getResultTimes = new double[NUM_TEST_LOOPS];
324             ImageReader[] readers = null;
325             try {
326                 if (!mAllStaticInfo.get(id).isColorOutputSupported()) {
327                     Log.i(TAG, "Camera " + id + " does not support color outputs, skipping");
328                     continue;
329                 }
330 
331                 StreamConfigurationMap configMap = mAllStaticInfo.get(id).getCharacteristics().get(
332                         CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
333                 boolean formatsSupported = true;
334                 for (int format : formats) {
335                     if (!configMap.isOutputSupportedFor(format)) {
336                         Log.i(TAG, "Camera " + id + " does not support output format: " + format +
337                                 " skipping");
338                         formatsSupported = false;
339                         break;
340                     }
341                 }
342                 if (!formatsSupported) {
343                     continue;
344                 }
345 
346                 openDevice(id);
347 
348                 boolean partialsExpected = mStaticInfo.getPartialResultCount() > 1;
349                 long startTimeMs;
350                 boolean isPartialTimingValid = partialsExpected;
351                 for (int i = 0; i < NUM_TEST_LOOPS; i++) {
352 
353                     // setup builders and listeners
354                     CaptureRequest.Builder previewBuilder =
355                             mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
356                     CaptureRequest.Builder captureBuilder =
357                             mCamera.createCaptureRequest(CameraDevice.TEMPLATE_STILL_CAPTURE);
358                     SimpleCaptureCallback previewResultListener =
359                             new SimpleCaptureCallback();
360                     SimpleTimingResultListener captureResultListener =
361                             new SimpleTimingResultListener();
362                     SimpleImageListener[] imageListeners = new SimpleImageListener[formats.length];
363                     Size[] imageSizes = new Size[formats.length];
364                     for (int j = 0; j < formats.length; j++) {
365                         imageSizes[j] = CameraTestUtils.getSortedSizesForFormat(
366                                 id, mCameraManager, formats[j], /*bound*/null).get(0);
367                         imageListeners[j] = new SimpleImageListener();
368                     }
369 
370                     readers = prepareStillCaptureAndStartPreview(previewBuilder, captureBuilder,
371                             mOrderedPreviewSizes.get(0), imageSizes, formats,
372                             previewResultListener, NUM_MAX_IMAGES, imageListeners,
373                             false /*isHeic*/);
374 
375                     if (addPreviewDelay) {
376                         Thread.sleep(500);
377                     }
378 
379                     // Capture an image and get image data
380                     startTimeMs = SystemClock.elapsedRealtime();
381                     CaptureRequest request = captureBuilder.build();
382                     mCameraSession.capture(request, captureResultListener, mHandler);
383 
384                     Pair<CaptureResult, Long> partialResultNTime = null;
385                     if (partialsExpected) {
386                         partialResultNTime = captureResultListener.getPartialResultNTimeForRequest(
387                             request, NUM_RESULTS_WAIT);
388                         // Even if maxPartials > 1, may not see partials for some devices
389                         if (partialResultNTime == null) {
390                             partialsExpected = false;
391                             isPartialTimingValid = false;
392                         }
393                     }
394                     Pair<CaptureResult, Long> captureResultNTime =
395                             captureResultListener.getCaptureResultNTimeForRequest(
396                                     request, NUM_RESULTS_WAIT);
397 
398                     double [] imageTimes = new double[formats.length];
399                     for (int j = 0; j < formats.length; j++) {
400                         imageListeners[j].waitForImageAvailable(
401                                 CameraTestUtils.CAPTURE_IMAGE_TIMEOUT_MS);
402                         imageTimes[j] = imageListeners[j].getTimeReceivedImage();
403                     }
404 
405                     captureTimes[i] = Stat.getAverage(imageTimes) - startTimeMs;
406                     if (partialsExpected) {
407                         getPartialTimes[i] = partialResultNTime.second - startTimeMs;
408                         if (getPartialTimes[i] < 0) {
409                             isPartialTimingValid = false;
410                         }
411                     }
412                     getResultTimes[i] = captureResultNTime.second - startTimeMs;
413 
414                     // simulate real scenario (preview runs a bit)
415                     CameraTestUtils.waitForNumResults(previewResultListener, NUM_RESULTS_WAIT,
416                             WAIT_FOR_RESULT_TIMEOUT_MS);
417 
418                     stopPreviewAndDrain();
419 
420                     CameraTestUtils.closeImageReaders(readers);
421                     readers = null;
422                 }
423                 String message = appendFormatDescription("camera_capture_latency",
424                         formatDescription);
425                 mReportLog.addValues(message, captureTimes, ResultType.LOWER_BETTER, ResultUnit.MS);
426                 // If any of the partial results do not contain AE and AF state, then no report
427                 if (isPartialTimingValid) {
428                     message = appendFormatDescription("camera_partial_result_latency",
429                             formatDescription);
430                     mReportLog.addValues(message, getPartialTimes, ResultType.LOWER_BETTER,
431                             ResultUnit.MS);
432                 }
433                 message = appendFormatDescription("camera_capture_result_latency",
434                         formatDescription);
435                 mReportLog.addValues(message, getResultTimes, ResultType.LOWER_BETTER,
436                         ResultUnit.MS);
437 
438                 avgResultTimes[counter] = Stat.getAverage(getResultTimes);
439             }
440             finally {
441                 CameraTestUtils.closeImageReaders(readers);
442                 readers = null;
443                 closeDevice(id);
444                 closePreviewSurface();
445             }
446             counter++;
447             mReportLog.submit(mInstrumentation);
448         }
449 
450         // Result will not be reported in CTS report if no summary is printed.
451         if (mCameraIds.length != 0) {
452             String streamName = appendFormatDescription("test_single_capture_average",
453                     formatDescription);
454             mReportLog = new DeviceReportLog(REPORT_LOG_NAME, streamName);
455             String message = appendFormatDescription(
456                     "camera_capture_result_average_latency_for_all_cameras", formatDescription);
457             mReportLog.setSummary(message, Stat.getAverage(avgResultTimes),
458                     ResultType.LOWER_BETTER, ResultUnit.MS);
459             mReportLog.submit(mInstrumentation);
460         }
461     }
462 
463     /**
464      * Test multiple capture KPI for YUV_420_888 format: the average time duration
465      * between sending out image capture requests and receiving capture results.
466      * <p>
467      * It measures capture latency, which is the time between sending out the capture
468      * request and getting the full capture result, and the frame duration, which is the timestamp
469      * gap between results.
470      * </p>
471      */
testMultipleCapture()472     public void testMultipleCapture() throws Exception {
473         double[] avgResultTimes = new double[mCameraIds.length];
474         double[] avgDurationMs = new double[mCameraIds.length];
475 
476         // A simple CaptureSession StateCallback to handle onCaptureQueueEmpty
477         class MultipleCaptureStateCallback extends CameraCaptureSession.StateCallback {
478             private ConditionVariable captureQueueEmptyCond = new ConditionVariable();
479             private int captureQueueEmptied = 0;
480 
481             @Override
482             public void onConfigured(CameraCaptureSession session) {
483                 // Empty implementation
484             }
485 
486             @Override
487             public void onConfigureFailed(CameraCaptureSession session) {
488                 // Empty implementation
489             }
490 
491             @Override
492             public void onCaptureQueueEmpty(CameraCaptureSession session) {
493                 captureQueueEmptied++;
494                 if (VERBOSE) {
495                     Log.v(TAG, "onCaptureQueueEmpty received. captureQueueEmptied = "
496                         + captureQueueEmptied);
497                 }
498 
499                 captureQueueEmptyCond.open();
500             }
501 
502             /* Wait for onCaptureQueueEmpty, return immediately if an onCaptureQueueEmpty was
503              * already received, otherwise, wait for one to arrive. */
504             public void waitForCaptureQueueEmpty(long timeout) {
505                 if (captureQueueEmptied > 0) {
506                     captureQueueEmptied--;
507                     return;
508                 }
509 
510                 if (captureQueueEmptyCond.block(timeout)) {
511                     captureQueueEmptyCond.close();
512                     captureQueueEmptied = 0;
513                 } else {
514                     throw new TimeoutRuntimeException("Unable to receive onCaptureQueueEmpty after "
515                         + timeout + "ms");
516                 }
517             }
518         }
519 
520         final MultipleCaptureStateCallback sessionListener = new MultipleCaptureStateCallback();
521 
522         int counter = 0;
523         for (String id : mCameraIds) {
524             // Do NOT move these variables to outer scope
525             // They will be passed to DeviceReportLog and their references will be stored
526             String streamName = "test_multiple_capture";
527             mReportLog = new DeviceReportLog(REPORT_LOG_NAME, streamName);
528             mReportLog.addValue("camera_id", id, ResultType.NEUTRAL, ResultUnit.NONE);
529             long[] startTimes = new long[NUM_MAX_IMAGES];
530             double[] getResultTimes = new double[NUM_MAX_IMAGES];
531             double[] frameDurationMs = new double[NUM_MAX_IMAGES-1];
532             try {
533                 if (!mAllStaticInfo.get(id).isColorOutputSupported()) {
534                     Log.i(TAG, "Camera " + id + " does not support color outputs, skipping");
535                     continue;
536                 }
537 
538                 openDevice(id);
539                 for (int i = 0; i < NUM_TEST_LOOPS; i++) {
540 
541                     // setup builders and listeners
542                     CaptureRequest.Builder previewBuilder =
543                             mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
544                     CaptureRequest.Builder captureBuilder =
545                             mCamera.createCaptureRequest(CameraDevice.TEMPLATE_STILL_CAPTURE);
546                     SimpleCaptureCallback previewResultListener =
547                             new SimpleCaptureCallback();
548                     SimpleTimingResultListener captureResultListener =
549                             new SimpleTimingResultListener();
550                     SimpleImageReaderListener imageListener =
551                             new SimpleImageReaderListener(/*asyncMode*/true, NUM_MAX_IMAGES);
552 
553                     Size maxYuvSize = CameraTestUtils.getSortedSizesForFormat(
554                         id, mCameraManager, ImageFormat.YUV_420_888, /*bound*/null).get(0);
555                     // Find minimum frame duration for YUV_420_888
556                     StreamConfigurationMap config = mStaticInfo.getCharacteristics().get(
557                             CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
558 
559                     final long minStillFrameDuration =
560                             config.getOutputMinFrameDuration(ImageFormat.YUV_420_888, maxYuvSize);
561                     if (minStillFrameDuration > 0) {
562                         Range<Integer> targetRange =
563                             CameraTestUtils.getSuitableFpsRangeForDuration(id,
564                                     minStillFrameDuration, mStaticInfo);
565                         previewBuilder.set(CaptureRequest.CONTROL_AE_TARGET_FPS_RANGE, targetRange);
566                         captureBuilder.set(CaptureRequest.CONTROL_AE_TARGET_FPS_RANGE, targetRange);
567                     }
568 
569                     prepareCaptureAndStartPreview(previewBuilder, captureBuilder,
570                             mOrderedPreviewSizes.get(0), maxYuvSize,
571                             ImageFormat.YUV_420_888, previewResultListener,
572                             sessionListener, NUM_MAX_IMAGES, imageListener);
573 
574                     // Converge AE
575                     CameraTestUtils.waitForAeStable(previewResultListener,
576                             NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY, mStaticInfo,
577                             WAIT_FOR_RESULT_TIMEOUT_MS, NUM_RESULTS_WAIT_TIMEOUT);
578 
579                     if (mStaticInfo.isAeLockSupported()) {
580                         // Lock AE if possible to improve stability
581                         previewBuilder.set(CaptureRequest.CONTROL_AE_LOCK, true);
582                         mCameraSession.setRepeatingRequest(previewBuilder.build(),
583                                 previewResultListener, mHandler);
584                         if (mStaticInfo.isHardwareLevelAtLeastLimited()) {
585                             // Legacy mode doesn't output AE state
586                             CameraTestUtils.waitForResultValue(previewResultListener,
587                                     CaptureResult.CONTROL_AE_STATE,
588                                     CaptureResult.CONTROL_AE_STATE_LOCKED,
589                                     NUM_RESULTS_WAIT_TIMEOUT, WAIT_FOR_RESULT_TIMEOUT_MS);
590                         }
591                     }
592 
593                     // Capture NUM_MAX_IMAGES images based on onCaptureQueueEmpty callback
594                     for (int j = 0; j < NUM_MAX_IMAGES; j++) {
595 
596                         // Capture an image and get image data
597                         startTimes[j] = SystemClock.elapsedRealtime();
598                         CaptureRequest request = captureBuilder.build();
599                         mCameraSession.capture(request, captureResultListener, mHandler);
600 
601                         // Wait for capture queue empty for the current request
602                         sessionListener.waitForCaptureQueueEmpty(
603                                 CameraTestUtils.CAPTURE_IMAGE_TIMEOUT_MS);
604                     }
605 
606                     // Acquire the capture result time and frame duration
607                     long prevTimestamp = -1;
608                     for (int j = 0; j < NUM_MAX_IMAGES; j++) {
609                         Pair<CaptureResult, Long> captureResultNTime =
610                                 captureResultListener.getCaptureResultNTime(
611                                         CameraTestUtils.CAPTURE_RESULT_TIMEOUT_MS);
612 
613                         getResultTimes[j] +=
614                                 (double)(captureResultNTime.second - startTimes[j])/NUM_TEST_LOOPS;
615 
616                         // Collect inter-frame timestamp
617                         long timestamp = captureResultNTime.first.get(CaptureResult.SENSOR_TIMESTAMP);
618                         if (prevTimestamp != -1) {
619                             frameDurationMs[j-1] +=
620                                     (double)(timestamp - prevTimestamp)/(NUM_TEST_LOOPS * 1000000.0);
621                         }
622                         prevTimestamp = timestamp;
623                     }
624 
625                     // simulate real scenario (preview runs a bit)
626                     CameraTestUtils.waitForNumResults(previewResultListener, NUM_RESULTS_WAIT,
627                             WAIT_FOR_RESULT_TIMEOUT_MS);
628 
629                     stopPreview();
630                 }
631 
632                 for (int i = 0; i < getResultTimes.length; i++) {
633                     Log.v(TAG, "Camera " + id + " result time[" + i + "] is " +
634                             getResultTimes[i] + " ms");
635                 }
636                 for (int i = 0; i < NUM_MAX_IMAGES-1; i++) {
637                     Log.v(TAG, "Camera " + id + " frame duration time[" + i + "] is " +
638                             frameDurationMs[i] + " ms");
639                 }
640 
641                 mReportLog.addValues("camera_multiple_capture_result_latency", getResultTimes,
642                         ResultType.LOWER_BETTER, ResultUnit.MS);
643                 mReportLog.addValues("camera_multiple_capture_frame_duration", frameDurationMs,
644                         ResultType.LOWER_BETTER, ResultUnit.MS);
645 
646 
647                 avgResultTimes[counter] = Stat.getAverage(getResultTimes);
648                 avgDurationMs[counter] = Stat.getAverage(frameDurationMs);
649             }
650             finally {
651                 closeDefaultImageReader();
652                 closeDevice(id);
653                 closePreviewSurface();
654             }
655             counter++;
656             mReportLog.submit(mInstrumentation);
657         }
658 
659         // Result will not be reported in CTS report if no summary is printed.
660         if (mCameraIds.length != 0) {
661             String streamName = "test_multiple_capture_average";
662             mReportLog = new DeviceReportLog(REPORT_LOG_NAME, streamName);
663             mReportLog.setSummary("camera_multiple_capture_result_average_latency_for_all_cameras",
664                     Stat.getAverage(avgResultTimes), ResultType.LOWER_BETTER, ResultUnit.MS);
665             mReportLog.submit(mInstrumentation);
666             mReportLog = new DeviceReportLog(REPORT_LOG_NAME, streamName);
667             mReportLog.setSummary("camera_multiple_capture_frame_duration_average_for_all_cameras",
668                     Stat.getAverage(avgDurationMs), ResultType.LOWER_BETTER, ResultUnit.MS);
669             mReportLog.submit(mInstrumentation);
670         }
671     }
672 
673     /**
674      * Test reprocessing shot-to-shot latency with default NR and edge options, i.e., from the time
675      * a reprocess request is issued to the time the reprocess image is returned.
676      */
testReprocessingLatency()677     public void testReprocessingLatency() throws Exception {
678         for (String id : mCameraIds) {
679             for (int format : REPROCESS_FORMATS) {
680                 if (!isReprocessSupported(id, format)) {
681                     continue;
682                 }
683 
684                 try {
685                     openDevice(id);
686                     String streamName = "test_reprocessing_latency";
687                     mReportLog = new DeviceReportLog(REPORT_LOG_NAME, streamName);
688                     mReportLog.addValue("camera_id", id, ResultType.NEUTRAL, ResultUnit.NONE);
689                     mReportLog.addValue("format", format, ResultType.NEUTRAL, ResultUnit.NONE);
690                     reprocessingPerformanceTestByCamera(format, /*asyncMode*/false,
691                             /*highQuality*/false);
692                 } finally {
693                     closeReaderWriters();
694                     closeDevice(id);
695                     closePreviewSurface();
696                     mReportLog.submit(mInstrumentation);
697                 }
698             }
699         }
700     }
701 
702     /**
703      * Test reprocessing throughput with default NR and edge options, i.e., how many frames can be reprocessed
704      * during a given amount of time.
705      *
706      */
testReprocessingThroughput()707     public void testReprocessingThroughput() throws Exception {
708         for (String id : mCameraIds) {
709             for (int format : REPROCESS_FORMATS) {
710                 if (!isReprocessSupported(id, format)) {
711                     continue;
712                 }
713 
714                 try {
715                     openDevice(id);
716                     String streamName = "test_reprocessing_throughput";
717                     mReportLog = new DeviceReportLog(REPORT_LOG_NAME, streamName);
718                     mReportLog.addValue("camera_id", id, ResultType.NEUTRAL, ResultUnit.NONE);
719                     mReportLog.addValue("format", format, ResultType.NEUTRAL, ResultUnit.NONE);
720                     reprocessingPerformanceTestByCamera(format, /*asyncMode*/true,
721                             /*highQuality*/false);
722                 } finally {
723                     closeReaderWriters();
724                     closeDevice(id);
725                     closePreviewSurface();
726                     mReportLog.submit(mInstrumentation);
727                 }
728             }
729         }
730     }
731 
732     /**
733      * Test reprocessing shot-to-shot latency with High Quality NR and edge options, i.e., from the
734      * time a reprocess request is issued to the time the reprocess image is returned.
735      */
testHighQualityReprocessingLatency()736     public void testHighQualityReprocessingLatency() throws Exception {
737         for (String id : mCameraIds) {
738             for (int format : REPROCESS_FORMATS) {
739                 if (!isReprocessSupported(id, format)) {
740                     continue;
741                 }
742 
743                 try {
744                     openDevice(id);
745                     String streamName = "test_high_quality_reprocessing_latency";
746                     mReportLog = new DeviceReportLog(REPORT_LOG_NAME, streamName);
747                     mReportLog.addValue("camera_id", id, ResultType.NEUTRAL, ResultUnit.NONE);
748                     mReportLog.addValue("format", format, ResultType.NEUTRAL, ResultUnit.NONE);
749                     reprocessingPerformanceTestByCamera(format, /*asyncMode*/false,
750                             /*requireHighQuality*/true);
751                 } finally {
752                     closeReaderWriters();
753                     closeDevice(id);
754                     closePreviewSurface();
755                     mReportLog.submit(mInstrumentation);
756                 }
757             }
758         }
759     }
760 
761     /**
762      * Test reprocessing throughput with high quality NR and edge options, i.e., how many frames can
763      * be reprocessed during a given amount of time.
764      *
765      */
testHighQualityReprocessingThroughput()766     public void testHighQualityReprocessingThroughput() throws Exception {
767         for (String id : mCameraIds) {
768             for (int format : REPROCESS_FORMATS) {
769                 if (!isReprocessSupported(id, format)) {
770                     continue;
771                 }
772 
773                 try {
774                     openDevice(id);
775                     String streamName = "test_high_quality_reprocessing_throughput";
776                     mReportLog = new DeviceReportLog(REPORT_LOG_NAME, streamName);
777                     mReportLog.addValue("camera_id", id, ResultType.NEUTRAL, ResultUnit.NONE);
778                     mReportLog.addValue("format", format, ResultType.NEUTRAL, ResultUnit.NONE);
779                     reprocessingPerformanceTestByCamera(format, /*asyncMode*/true,
780                             /*requireHighQuality*/true);
781                 } finally {
782                     closeReaderWriters();
783                     closeDevice(id);
784                     closePreviewSurface();
785                     mReportLog.submit(mInstrumentation);
786                 }
787             }
788         }
789     }
790 
791     /**
792      * Testing reprocessing caused preview stall (frame drops)
793      */
testReprocessingCaptureStall()794     public void testReprocessingCaptureStall() throws Exception {
795         for (String id : mCameraIds) {
796             for (int format : REPROCESS_FORMATS) {
797                 if (!isReprocessSupported(id, format)) {
798                     continue;
799                 }
800 
801                 try {
802                     openDevice(id);
803                     String streamName = "test_reprocessing_capture_stall";
804                     mReportLog = new DeviceReportLog(REPORT_LOG_NAME, streamName);
805                     mReportLog.addValue("camera_id", id, ResultType.NEUTRAL, ResultUnit.NONE);
806                     mReportLog.addValue("format", format, ResultType.NEUTRAL, ResultUnit.NONE);
807                     reprocessingCaptureStallTestByCamera(format);
808                 } finally {
809                     closeReaderWriters();
810                     closeDevice(id);
811                     closePreviewSurface();
812                     mReportLog.submit(mInstrumentation);
813                 }
814             }
815         }
816     }
817 
reprocessingCaptureStallTestByCamera(int reprocessInputFormat)818     private void reprocessingCaptureStallTestByCamera(int reprocessInputFormat) throws Exception {
819         prepareReprocessCapture(reprocessInputFormat);
820 
821         // Let it stream for a while before reprocessing
822         startZslStreaming();
823         waitForFrames(NUM_RESULTS_WAIT);
824 
825         final int NUM_REPROCESS_TESTED = MAX_REPROCESS_IMAGES / 2;
826         // Prepare several reprocessing request
827         Image[] inputImages = new Image[NUM_REPROCESS_TESTED];
828         CaptureRequest.Builder[] reprocessReqs = new CaptureRequest.Builder[MAX_REPROCESS_IMAGES];
829         for (int i = 0; i < NUM_REPROCESS_TESTED; i++) {
830             inputImages[i] =
831                     mCameraZslImageListener.getImage(CameraTestUtils.CAPTURE_IMAGE_TIMEOUT_MS);
832             TotalCaptureResult zslResult =
833                     mZslResultListener.getCaptureResult(
834                             WAIT_FOR_RESULT_TIMEOUT_MS, inputImages[i].getTimestamp());
835             reprocessReqs[i] = mCamera.createReprocessCaptureRequest(zslResult);
836             reprocessReqs[i].addTarget(mJpegReader.getSurface());
837             reprocessReqs[i].set(CaptureRequest.NOISE_REDUCTION_MODE,
838                     CaptureRequest.NOISE_REDUCTION_MODE_HIGH_QUALITY);
839             reprocessReqs[i].set(CaptureRequest.EDGE_MODE,
840                     CaptureRequest.EDGE_MODE_HIGH_QUALITY);
841             mWriter.queueInputImage(inputImages[i]);
842         }
843 
844         double[] maxCaptureGapsMs = new double[NUM_REPROCESS_TESTED];
845         double[] averageFrameDurationMs = new double[NUM_REPROCESS_TESTED];
846         Arrays.fill(averageFrameDurationMs, 0.0);
847         final int MAX_REPROCESS_RETURN_FRAME_COUNT = 20;
848         SimpleCaptureCallback reprocessResultListener = new SimpleCaptureCallback();
849         for (int i = 0; i < NUM_REPROCESS_TESTED; i++) {
850             mZslResultListener.drain();
851             CaptureRequest reprocessRequest = reprocessReqs[i].build();
852             mCameraSession.capture(reprocessRequest, reprocessResultListener, mHandler);
853             // Wait for reprocess output jpeg and result come back.
854             reprocessResultListener.getCaptureResultForRequest(reprocessRequest,
855                     CameraTestUtils.CAPTURE_RESULT_TIMEOUT_MS);
856             mJpegListener.getImage(CameraTestUtils.CAPTURE_IMAGE_TIMEOUT_MS).close();
857             long numFramesMaybeStalled = mZslResultListener.getTotalNumFrames();
858             assertTrue("Reprocess capture result should be returned in "
859                     + MAX_REPROCESS_RETURN_FRAME_COUNT + " frames",
860                     numFramesMaybeStalled <= MAX_REPROCESS_RETURN_FRAME_COUNT);
861 
862             // Need look longer time, as the stutter could happen after the reprocessing
863             // output frame is received.
864             long[] timestampGap = new long[MAX_REPROCESS_RETURN_FRAME_COUNT + 1];
865             Arrays.fill(timestampGap, 0);
866             CaptureResult[] results = new CaptureResult[timestampGap.length];
867             long[] frameDurationsNs = new long[timestampGap.length];
868             for (int j = 0; j < results.length; j++) {
869                 results[j] = mZslResultListener.getCaptureResult(
870                         CameraTestUtils.CAPTURE_IMAGE_TIMEOUT_MS);
871                 if (j > 0) {
872                     timestampGap[j] = results[j].get(CaptureResult.SENSOR_TIMESTAMP) -
873                             results[j - 1].get(CaptureResult.SENSOR_TIMESTAMP);
874                     assertTrue("Time stamp should be monotonically increasing",
875                             timestampGap[j] > 0);
876                 }
877                 frameDurationsNs[j] = results[j].get(CaptureResult.SENSOR_FRAME_DURATION);
878             }
879 
880             if (VERBOSE) {
881                 Log.i(TAG, "timestampGap: " + Arrays.toString(timestampGap));
882                 Log.i(TAG, "frameDurationsNs: " + Arrays.toString(frameDurationsNs));
883             }
884 
885             // Get the number of candidate results, calculate the average frame duration
886             // and max timestamp gap.
887             Arrays.sort(timestampGap);
888             double maxTimestampGapMs = timestampGap[timestampGap.length - 1] / 1000000.0;
889             for (int m = 0; m < frameDurationsNs.length; m++) {
890                 averageFrameDurationMs[i] += (frameDurationsNs[m] / 1000000.0);
891             }
892             averageFrameDurationMs[i] /= frameDurationsNs.length;
893 
894             maxCaptureGapsMs[i] = maxTimestampGapMs;
895         }
896 
897         stopZslStreaming();
898 
899         String reprocessType = "YUV reprocessing";
900         if (reprocessInputFormat == ImageFormat.PRIVATE) {
901             reprocessType = "opaque reprocessing";
902         }
903         mReportLog.addValue("reprocess_type", reprocessType, ResultType.NEUTRAL, ResultUnit.NONE);
904         mReportLog.addValues("max_capture_timestamp_gaps", maxCaptureGapsMs,
905                 ResultType.LOWER_BETTER, ResultUnit.MS);
906         mReportLog.addValues("capture_average_frame_duration", averageFrameDurationMs,
907                 ResultType.LOWER_BETTER, ResultUnit.MS);
908         mReportLog.setSummary("camera_reprocessing_average_max_capture_timestamp_gaps",
909                 Stat.getAverage(maxCaptureGapsMs), ResultType.LOWER_BETTER, ResultUnit.MS);
910 
911         // The max timestamp gap should be less than (captureStall + 1) x average frame
912         // duration * (1 + error margin).
913         int maxCaptureStallFrames = mStaticInfo.getMaxCaptureStallOrDefault();
914         for (int i = 0; i < maxCaptureGapsMs.length; i++) {
915             double stallDurationBound = averageFrameDurationMs[i] *
916                     (maxCaptureStallFrames + 1) * (1 + REPROCESS_STALL_MARGIN);
917             assertTrue("max capture stall duration should be no larger than " + stallDurationBound,
918                     maxCaptureGapsMs[i] <= stallDurationBound);
919         }
920     }
921 
reprocessingPerformanceTestByCamera(int reprocessInputFormat, boolean asyncMode, boolean requireHighQuality)922     private void reprocessingPerformanceTestByCamera(int reprocessInputFormat, boolean asyncMode,
923             boolean requireHighQuality)
924             throws Exception {
925         // Prepare the reprocessing capture
926         prepareReprocessCapture(reprocessInputFormat);
927 
928         // Start ZSL streaming
929         startZslStreaming();
930         waitForFrames(NUM_RESULTS_WAIT);
931 
932         CaptureRequest.Builder[] reprocessReqs = new CaptureRequest.Builder[MAX_REPROCESS_IMAGES];
933         Image[] inputImages = new Image[MAX_REPROCESS_IMAGES];
934         double[] getImageLatenciesMs = new double[MAX_REPROCESS_IMAGES];
935         long startTimeMs;
936         for (int i = 0; i < MAX_REPROCESS_IMAGES; i++) {
937             inputImages[i] =
938                     mCameraZslImageListener.getImage(CameraTestUtils.CAPTURE_IMAGE_TIMEOUT_MS);
939             TotalCaptureResult zslResult =
940                     mZslResultListener.getCaptureResult(
941                             WAIT_FOR_RESULT_TIMEOUT_MS, inputImages[i].getTimestamp());
942             reprocessReqs[i] = mCamera.createReprocessCaptureRequest(zslResult);
943             if (requireHighQuality) {
944                 // Reprocessing should support high quality for NR and edge modes.
945                 reprocessReqs[i].set(CaptureRequest.NOISE_REDUCTION_MODE,
946                         CaptureRequest.NOISE_REDUCTION_MODE_HIGH_QUALITY);
947                 reprocessReqs[i].set(CaptureRequest.EDGE_MODE,
948                         CaptureRequest.EDGE_MODE_HIGH_QUALITY);
949             }
950             reprocessReqs[i].addTarget(mJpegReader.getSurface());
951         }
952 
953         if (asyncMode) {
954             // async capture: issue all the reprocess requests as quick as possible, then
955             // check the throughput of the output jpegs.
956             for (int i = 0; i < MAX_REPROCESS_IMAGES; i++) {
957                 // Could be slow for YUV reprocessing, do it in advance.
958                 mWriter.queueInputImage(inputImages[i]);
959             }
960 
961             // Submit the requests
962             for (int i = 0; i < MAX_REPROCESS_IMAGES; i++) {
963                 mCameraSession.capture(reprocessReqs[i].build(), null, null);
964             }
965 
966             // Get images
967             startTimeMs = SystemClock.elapsedRealtime();
968             Image jpegImages[] = new Image[MAX_REPROCESS_IMAGES];
969             for (int i = 0; i < MAX_REPROCESS_IMAGES; i++) {
970                 jpegImages[i] = mJpegListener.getImage(CameraTestUtils.CAPTURE_IMAGE_TIMEOUT_MS);
971                 getImageLatenciesMs[i] = SystemClock.elapsedRealtime() - startTimeMs;
972                 startTimeMs = SystemClock.elapsedRealtime();
973             }
974             for (Image i : jpegImages) {
975                 i.close();
976             }
977         } else {
978             // sync capture: issue reprocess request one by one, only submit next one when
979             // the previous capture image is returned. This is to test the back to back capture
980             // performance.
981             Image jpegImages[] = new Image[MAX_REPROCESS_IMAGES];
982             for (int i = 0; i < MAX_REPROCESS_IMAGES; i++) {
983                 startTimeMs = SystemClock.elapsedRealtime();
984                 mWriter.queueInputImage(inputImages[i]);
985                 mCameraSession.capture(reprocessReqs[i].build(), null, null);
986                 jpegImages[i] = mJpegListener.getImage(CameraTestUtils.CAPTURE_IMAGE_TIMEOUT_MS);
987                 getImageLatenciesMs[i] = SystemClock.elapsedRealtime() - startTimeMs;
988             }
989             for (Image i : jpegImages) {
990                 i.close();
991             }
992         }
993 
994         stopZslStreaming();
995 
996         String reprocessType = "YUV reprocessing";
997         if (reprocessInputFormat == ImageFormat.PRIVATE) {
998             reprocessType = "opaque reprocessing";
999         }
1000 
1001         // Report the performance data
1002         String captureMsg;
1003         if (asyncMode) {
1004             captureMsg = "capture latency";
1005             if (requireHighQuality) {
1006                 captureMsg += " for High Quality noise reduction and edge modes";
1007             }
1008             mReportLog.addValue("reprocess_type", reprocessType, ResultType.NEUTRAL,
1009                     ResultUnit.NONE);
1010             mReportLog.addValue("capture_message", captureMsg, ResultType.NEUTRAL,
1011                     ResultUnit.NONE);
1012             mReportLog.addValues("latency", getImageLatenciesMs, ResultType.LOWER_BETTER,
1013                     ResultUnit.MS);
1014             mReportLog.setSummary("camera_reprocessing_average_latency",
1015                     Stat.getAverage(getImageLatenciesMs), ResultType.LOWER_BETTER, ResultUnit.MS);
1016         } else {
1017             captureMsg = "shot to shot latency";
1018             if (requireHighQuality) {
1019                 captureMsg += " for High Quality noise reduction and edge modes";
1020             }
1021             mReportLog.addValue("reprocess_type", reprocessType, ResultType.NEUTRAL,
1022                     ResultUnit.NONE);
1023             mReportLog.addValue("capture_message", captureMsg, ResultType.NEUTRAL,
1024                     ResultUnit.NONE);
1025             mReportLog.addValues("latency", getImageLatenciesMs, ResultType.LOWER_BETTER,
1026                     ResultUnit.MS);
1027             mReportLog.setSummary("camera_reprocessing_shot_to_shot_average_latency",
1028                     Stat.getAverage(getImageLatenciesMs), ResultType.LOWER_BETTER, ResultUnit.MS);
1029         }
1030     }
1031 
1032     /**
1033      * Start preview and ZSL streaming
1034      */
startZslStreaming()1035     private void startZslStreaming() throws Exception {
1036         CaptureRequest.Builder zslBuilder =
1037                 mCamera.createCaptureRequest(CameraDevice.TEMPLATE_ZERO_SHUTTER_LAG);
1038         zslBuilder.addTarget(mPreviewSurface);
1039         zslBuilder.addTarget(mCameraZslReader.getSurface());
1040         mCameraSession.setRepeatingRequest(zslBuilder.build(), mZslResultListener, mHandler);
1041     }
1042 
stopZslStreaming()1043     private void stopZslStreaming() throws Exception {
1044         mCameraSession.stopRepeating();
1045         mCameraSessionListener.getStateWaiter().waitForState(
1046             BlockingSessionCallback.SESSION_READY, CameraTestUtils.CAMERA_IDLE_TIMEOUT_MS);
1047     }
1048 
1049     /**
1050      * Wait for a certain number of frames, the images and results will be drained from the
1051      * listeners to make sure that next reprocessing can get matched results and images.
1052      *
1053      * @param numFrameWait The number of frames to wait before return, 0 means that
1054      *      this call returns immediately after streaming on.
1055      */
waitForFrames(int numFrameWait)1056     private void waitForFrames(int numFrameWait) throws Exception {
1057         if (numFrameWait < 0) {
1058             throw new IllegalArgumentException("numFrameWait " + numFrameWait +
1059                     " should be non-negative");
1060         }
1061 
1062         for (int i = 0; i < numFrameWait; i++) {
1063             mCameraZslImageListener.getImage(CameraTestUtils.CAPTURE_IMAGE_TIMEOUT_MS).close();
1064         }
1065     }
1066 
closeReaderWriters()1067     private void closeReaderWriters() {
1068         mCameraZslImageListener.drain();
1069         CameraTestUtils.closeImageReader(mCameraZslReader);
1070         mCameraZslReader = null;
1071         mJpegListener.drain();
1072         CameraTestUtils.closeImageReader(mJpegReader);
1073         mJpegReader = null;
1074         CameraTestUtils.closeImageWriter(mWriter);
1075         mWriter = null;
1076     }
1077 
prepareReprocessCapture(int inputFormat)1078     private void prepareReprocessCapture(int inputFormat)
1079                     throws CameraAccessException {
1080         // 1. Find the right preview and capture sizes.
1081         Size maxPreviewSize = mOrderedPreviewSizes.get(0);
1082         Size[] supportedInputSizes =
1083                 mStaticInfo.getAvailableSizesForFormatChecked(inputFormat,
1084                 StaticMetadata.StreamDirection.Input);
1085         Size maxInputSize = CameraTestUtils.getMaxSize(supportedInputSizes);
1086         Size maxJpegSize = mOrderedStillSizes.get(0);
1087         updatePreviewSurface(maxPreviewSize);
1088         mZslResultListener = new SimpleCaptureCallback();
1089 
1090         // 2. Create camera output ImageReaders.
1091         // YUV/Opaque output, camera should support output with input size/format
1092         mCameraZslImageListener = new SimpleImageReaderListener(
1093                 /*asyncMode*/true, MAX_ZSL_IMAGES - MAX_REPROCESS_IMAGES);
1094         mCameraZslReader = CameraTestUtils.makeImageReader(
1095                 maxInputSize, inputFormat, MAX_ZSL_IMAGES, mCameraZslImageListener, mHandler);
1096         // Jpeg reprocess output
1097         mJpegListener = new SimpleImageReaderListener();
1098         mJpegReader = CameraTestUtils.makeImageReader(
1099                 maxJpegSize, ImageFormat.JPEG, MAX_JPEG_IMAGES, mJpegListener, mHandler);
1100 
1101         // create camera reprocess session
1102         List<Surface> outSurfaces = new ArrayList<Surface>();
1103         outSurfaces.add(mPreviewSurface);
1104         outSurfaces.add(mCameraZslReader.getSurface());
1105         outSurfaces.add(mJpegReader.getSurface());
1106         InputConfiguration inputConfig = new InputConfiguration(maxInputSize.getWidth(),
1107                 maxInputSize.getHeight(), inputFormat);
1108         mCameraSessionListener = new BlockingSessionCallback();
1109         mCameraSession = CameraTestUtils.configureReprocessableCameraSession(
1110                 mCamera, inputConfig, outSurfaces, mCameraSessionListener, mHandler);
1111 
1112         // 3. Create ImageWriter for input
1113         mWriter = CameraTestUtils.makeImageWriter(
1114                 mCameraSession.getInputSurface(), MAX_INPUT_IMAGES, /*listener*/null, /*handler*/null);
1115 
1116     }
1117 
blockingStopPreview()1118     private void blockingStopPreview() throws Exception {
1119         stopPreview();
1120         mCameraSessionListener.getStateWaiter().waitForState(SESSION_CLOSED,
1121                 CameraTestUtils.SESSION_CLOSE_TIMEOUT_MS);
1122     }
1123 
blockingStartPreview(CaptureCallback listener, SimpleImageListener imageListener)1124     private void blockingStartPreview(CaptureCallback listener, SimpleImageListener imageListener)
1125             throws Exception {
1126         if (mPreviewSurface == null || mReaderSurface == null) {
1127             throw new IllegalStateException("preview and reader surface must be initilized first");
1128         }
1129 
1130         CaptureRequest.Builder previewBuilder =
1131                 mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
1132         if (mStaticInfo.isColorOutputSupported()) {
1133             previewBuilder.addTarget(mPreviewSurface);
1134         }
1135         previewBuilder.addTarget(mReaderSurface);
1136         mCameraSession.setRepeatingRequest(previewBuilder.build(), listener, mHandler);
1137         imageListener.waitForImageAvailable(CameraTestUtils.CAPTURE_IMAGE_TIMEOUT_MS);
1138     }
1139 
1140     /**
1141      * Setup still capture configuration and start preview.
1142      *
1143      * @param previewRequest The capture request to be used for preview
1144      * @param stillRequest The capture request to be used for still capture
1145      * @param previewSz Preview size
1146      * @param captureSizes Still capture sizes
1147      * @param formats The single capture image formats
1148      * @param resultListener Capture result listener
1149      * @param maxNumImages The max number of images set to the image reader
1150      * @param imageListeners The single capture capture image listeners
1151      * @param isHeic Capture HEIC image if true, JPEG image if false
1152      */
prepareStillCaptureAndStartPreview( CaptureRequest.Builder previewRequest, CaptureRequest.Builder stillRequest, Size previewSz, Size[] captureSizes, int[] formats, CaptureCallback resultListener, int maxNumImages, ImageReader.OnImageAvailableListener[] imageListeners, boolean isHeic)1153     private ImageReader[] prepareStillCaptureAndStartPreview(
1154             CaptureRequest.Builder previewRequest, CaptureRequest.Builder stillRequest,
1155             Size previewSz, Size[] captureSizes, int[] formats, CaptureCallback resultListener,
1156             int maxNumImages, ImageReader.OnImageAvailableListener[] imageListeners,
1157             boolean isHeic)
1158             throws Exception {
1159 
1160         if ((captureSizes == null) || (formats == null) || (imageListeners == null) &&
1161                 (captureSizes.length != formats.length) ||
1162                 (formats.length != imageListeners.length)) {
1163             throw new IllegalArgumentException("Invalid capture sizes/formats or image listeners!");
1164         }
1165 
1166         if (VERBOSE) {
1167             Log.v(TAG, String.format("Prepare still capture and preview (%s)",
1168                     previewSz.toString()));
1169         }
1170 
1171         // Update preview size.
1172         updatePreviewSurface(previewSz);
1173 
1174         ImageReader[] readers = new ImageReader[captureSizes.length];
1175         List<Surface> outputSurfaces = new ArrayList<Surface>();
1176         outputSurfaces.add(mPreviewSurface);
1177         for (int i = 0; i < captureSizes.length; i++) {
1178             readers[i] = CameraTestUtils.makeImageReader(captureSizes[i], formats[i], maxNumImages,
1179                     imageListeners[i], mHandler);
1180             outputSurfaces.add(readers[i].getSurface());
1181         }
1182 
1183         mCameraSessionListener = new BlockingSessionCallback();
1184         mCameraSession = CameraTestUtils.configureCameraSession(mCamera, outputSurfaces,
1185                 mCameraSessionListener, mHandler);
1186 
1187         // Configure the requests.
1188         previewRequest.addTarget(mPreviewSurface);
1189         stillRequest.addTarget(mPreviewSurface);
1190         for (int i = 0; i < readers.length; i++) {
1191             stillRequest.addTarget(readers[i].getSurface());
1192         }
1193 
1194         // Start preview.
1195         mCameraSession.setRepeatingRequest(previewRequest.build(), resultListener, mHandler);
1196 
1197         return readers;
1198     }
1199 
1200     /**
1201      * Setup single capture configuration and start preview.
1202      *
1203      * @param previewRequest The capture request to be used for preview
1204      * @param stillRequest The capture request to be used for still capture
1205      * @param previewSz Preview size
1206      * @param captureSz Still capture size
1207      * @param format The single capture image format
1208      * @param resultListener Capture result listener
1209      * @param sessionListener Session listener
1210      * @param maxNumImages The max number of images set to the image reader
1211      * @param imageListener The single capture capture image listener
1212      */
prepareCaptureAndStartPreview(CaptureRequest.Builder previewRequest, CaptureRequest.Builder stillRequest, Size previewSz, Size captureSz, int format, CaptureCallback resultListener, CameraCaptureSession.StateCallback sessionListener, int maxNumImages, ImageReader.OnImageAvailableListener imageListener)1213     private void prepareCaptureAndStartPreview(CaptureRequest.Builder previewRequest,
1214             CaptureRequest.Builder stillRequest, Size previewSz, Size captureSz, int format,
1215             CaptureCallback resultListener, CameraCaptureSession.StateCallback sessionListener,
1216             int maxNumImages, ImageReader.OnImageAvailableListener imageListener) throws Exception {
1217         if ((captureSz == null) || (imageListener == null)) {
1218             throw new IllegalArgumentException("Invalid capture size or image listener!");
1219         }
1220 
1221         if (VERBOSE) {
1222             Log.v(TAG, String.format("Prepare single capture (%s) and preview (%s)",
1223                     captureSz.toString(), previewSz.toString()));
1224         }
1225 
1226         // Update preview size.
1227         updatePreviewSurface(previewSz);
1228 
1229         // Create ImageReader.
1230         createDefaultImageReader(captureSz, format, maxNumImages, imageListener);
1231 
1232         // Configure output streams with preview and jpeg streams.
1233         List<Surface> outputSurfaces = new ArrayList<Surface>();
1234         outputSurfaces.add(mPreviewSurface);
1235         outputSurfaces.add(mReaderSurface);
1236         if (sessionListener == null) {
1237             mCameraSessionListener = new BlockingSessionCallback();
1238         } else {
1239             mCameraSessionListener = new BlockingSessionCallback(sessionListener);
1240         }
1241         mCameraSession = CameraTestUtils.configureCameraSession(mCamera, outputSurfaces,
1242                 mCameraSessionListener, mHandler);
1243 
1244         // Configure the requests.
1245         previewRequest.addTarget(mPreviewSurface);
1246         stillRequest.addTarget(mPreviewSurface);
1247         stillRequest.addTarget(mReaderSurface);
1248 
1249         // Start preview.
1250         mCameraSession.setRepeatingRequest(previewRequest.build(), resultListener, mHandler);
1251     }
1252 
1253     /**
1254      * Update the preview surface size.
1255      *
1256      * @param size The preview size to be updated.
1257      */
updatePreviewSurface(Size size)1258     private void updatePreviewSurface(Size size) {
1259         if ((mPreviewSurfaceTexture != null ) || (mPreviewSurface != null)) {
1260             closePreviewSurface();
1261         }
1262 
1263         mPreviewSurfaceTexture = new SurfaceTexture(/*random int*/ 1);
1264         mPreviewSurfaceTexture.setDefaultBufferSize(size.getWidth(), size.getHeight());
1265         mPreviewSurface = new Surface(mPreviewSurfaceTexture);
1266     }
1267 
1268     /**
1269      * Release preview surface and corresponding surface texture.
1270      */
closePreviewSurface()1271     private void closePreviewSurface() {
1272         if (mPreviewSurface != null) {
1273             mPreviewSurface.release();
1274             mPreviewSurface = null;
1275         }
1276 
1277         if (mPreviewSurfaceTexture != null) {
1278             mPreviewSurfaceTexture.release();
1279             mPreviewSurfaceTexture = null;
1280         }
1281     }
1282 
isReprocessSupported(String cameraId, int format)1283     private boolean isReprocessSupported(String cameraId, int format)
1284             throws CameraAccessException {
1285         if (format != ImageFormat.YUV_420_888 && format != ImageFormat.PRIVATE) {
1286             throw new IllegalArgumentException(
1287                     "format " + format + " is not supported for reprocessing");
1288         }
1289 
1290         StaticMetadata info = new StaticMetadata(
1291                 mCameraManager.getCameraCharacteristics(cameraId), CheckLevel.ASSERT,
1292                 /*collector*/ null);
1293         int cap = CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_YUV_REPROCESSING;
1294         if (format == ImageFormat.PRIVATE) {
1295             cap = CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_PRIVATE_REPROCESSING;
1296         }
1297         return info.isCapabilitySupported(cap);
1298     }
1299 
1300     /**
1301      * Stop preview for current camera device by closing the session.
1302      * Does _not_ wait for the device to go idle
1303      */
stopPreview()1304     private void stopPreview() throws Exception {
1305         // Stop repeat, wait for captures to complete, and disconnect from surfaces
1306         if (mCameraSession != null) {
1307             if (VERBOSE) Log.v(TAG, "Stopping preview");
1308             mCameraSession.close();
1309         }
1310     }
1311 
1312     /**
1313      * Stop preview for current camera device by closing the session and waiting for it to close,
1314      * resulting in an idle device.
1315      */
stopPreviewAndDrain()1316     private void stopPreviewAndDrain() throws Exception {
1317         // Stop repeat, wait for captures to complete, and disconnect from surfaces
1318         if (mCameraSession != null) {
1319             if (VERBOSE) Log.v(TAG, "Stopping preview and waiting for idle");
1320             mCameraSession.close();
1321             mCameraSessionListener.getStateWaiter().waitForState(
1322                     BlockingSessionCallback.SESSION_CLOSED,
1323                     /*timeoutMs*/WAIT_FOR_RESULT_TIMEOUT_MS);
1324         }
1325     }
1326 
1327     /**
1328      * Configure reader and preview outputs and wait until done.
1329      */
configureReaderAndPreviewOutputs()1330     private void configureReaderAndPreviewOutputs() throws Exception {
1331         if (mPreviewSurface == null || mReaderSurface == null) {
1332             throw new IllegalStateException("preview and reader surface must be initilized first");
1333         }
1334         mCameraSessionListener = new BlockingSessionCallback();
1335         List<Surface> outputSurfaces = new ArrayList<>();
1336         if (mStaticInfo.isColorOutputSupported()) {
1337             outputSurfaces.add(mPreviewSurface);
1338         }
1339         outputSurfaces.add(mReaderSurface);
1340         mCameraSession = CameraTestUtils.configureCameraSession(mCamera, outputSurfaces,
1341                 mCameraSessionListener, mHandler);
1342     }
1343 
1344     /**
1345      * Initialize the ImageReader instance and preview surface.
1346      * @param cameraId The camera to be opened.
1347      * @param format The format used to create ImageReader instance.
1348      */
initializeImageReader(String cameraId, int format)1349     private void initializeImageReader(String cameraId, int format) throws Exception {
1350         mOrderedPreviewSizes = CameraTestUtils.getSortedSizesForFormat(
1351                 cameraId, mCameraManager, format,
1352                 CameraTestUtils.getPreviewSizeBound(mWindowManager,
1353                     CameraTestUtils.PREVIEW_SIZE_BOUND));
1354         Size maxPreviewSize = mOrderedPreviewSizes.get(0);
1355         createDefaultImageReader(maxPreviewSize, format, NUM_MAX_IMAGES, /*listener*/null);
1356         updatePreviewSurface(maxPreviewSize);
1357     }
1358 
simpleOpenCamera(String cameraId)1359     private void simpleOpenCamera(String cameraId) throws Exception {
1360         mCamera = CameraTestUtils.openCamera(
1361                 mCameraManager, cameraId, mCameraListener, mHandler);
1362         mCollector.setCameraId(cameraId);
1363         mStaticInfo = new StaticMetadata(mCameraManager.getCameraCharacteristics(cameraId),
1364                 CheckLevel.ASSERT, /*collector*/null);
1365     }
1366 
1367     /**
1368      * Simple image listener that can be used to time the availability of first image.
1369      *
1370      */
1371     private static class SimpleImageListener implements ImageReader.OnImageAvailableListener {
1372         private ConditionVariable imageAvailable = new ConditionVariable();
1373         private boolean imageReceived = false;
1374         private long mTimeReceivedImage = 0;
1375 
1376         @Override
onImageAvailable(ImageReader reader)1377         public void onImageAvailable(ImageReader reader) {
1378             Image image = null;
1379             if (!imageReceived) {
1380                 if (VERBOSE) {
1381                     Log.v(TAG, "First image arrives");
1382                 }
1383                 imageReceived = true;
1384                 mTimeReceivedImage = SystemClock.elapsedRealtime();
1385                 imageAvailable.open();
1386             }
1387             image = reader.acquireNextImage();
1388             if (image != null) {
1389                 image.close();
1390             }
1391         }
1392 
1393         /**
1394          * Wait for image available, return immediately if the image was already
1395          * received, otherwise wait until an image arrives.
1396          */
waitForImageAvailable(long timeout)1397         public void waitForImageAvailable(long timeout) {
1398             if (imageReceived) {
1399                 imageReceived = false;
1400                 return;
1401             }
1402 
1403             if (imageAvailable.block(timeout)) {
1404                 imageAvailable.close();
1405                 imageReceived = true;
1406             } else {
1407                 throw new TimeoutRuntimeException("Unable to get the first image after "
1408                         + CameraTestUtils.CAPTURE_IMAGE_TIMEOUT_MS + "ms");
1409             }
1410         }
1411 
getTimeReceivedImage()1412         public long getTimeReceivedImage() {
1413             return mTimeReceivedImage;
1414         }
1415     }
1416 
1417     private static class SimpleTimingResultListener
1418             extends CameraCaptureSession.CaptureCallback {
1419         private final LinkedBlockingQueue<Pair<CaptureResult, Long> > mPartialResultQueue =
1420                 new LinkedBlockingQueue<Pair<CaptureResult, Long> >();
1421         private final LinkedBlockingQueue<Pair<CaptureResult, Long> > mResultQueue =
1422                 new LinkedBlockingQueue<Pair<CaptureResult, Long> > ();
1423 
1424         @Override
onCaptureCompleted(CameraCaptureSession session, CaptureRequest request, TotalCaptureResult result)1425         public void onCaptureCompleted(CameraCaptureSession session, CaptureRequest request,
1426                 TotalCaptureResult result) {
1427             try {
1428                 Long time = SystemClock.elapsedRealtime();
1429                 mResultQueue.put(new Pair<CaptureResult, Long>(result, time));
1430             } catch (InterruptedException e) {
1431                 throw new UnsupportedOperationException(
1432                         "Can't handle InterruptedException in onCaptureCompleted");
1433             }
1434         }
1435 
1436         @Override
onCaptureProgressed(CameraCaptureSession session, CaptureRequest request, CaptureResult partialResult)1437         public void onCaptureProgressed(CameraCaptureSession session, CaptureRequest request,
1438                 CaptureResult partialResult) {
1439             try {
1440                 // check if AE and AF state exists
1441                 Long time = -1L;
1442                 if (partialResult.get(CaptureResult.CONTROL_AE_STATE) != null &&
1443                         partialResult.get(CaptureResult.CONTROL_AF_STATE) != null) {
1444                     time = SystemClock.elapsedRealtime();
1445                 }
1446                 mPartialResultQueue.put(new Pair<CaptureResult, Long>(partialResult, time));
1447             } catch (InterruptedException e) {
1448                 throw new UnsupportedOperationException(
1449                         "Can't handle InterruptedException in onCaptureProgressed");
1450             }
1451         }
1452 
getPartialResultNTime(long timeout)1453         public Pair<CaptureResult, Long> getPartialResultNTime(long timeout) {
1454             try {
1455                 Pair<CaptureResult, Long> result =
1456                         mPartialResultQueue.poll(timeout, TimeUnit.MILLISECONDS);
1457                 return result;
1458             } catch (InterruptedException e) {
1459                 throw new UnsupportedOperationException("Unhandled interrupted exception", e);
1460             }
1461         }
1462 
getCaptureResultNTime(long timeout)1463         public Pair<CaptureResult, Long> getCaptureResultNTime(long timeout) {
1464             try {
1465                 Pair<CaptureResult, Long> result =
1466                         mResultQueue.poll(timeout, TimeUnit.MILLISECONDS);
1467                 assertNotNull("Wait for a capture result timed out in " + timeout + "ms", result);
1468                 return result;
1469             } catch (InterruptedException e) {
1470                 throw new UnsupportedOperationException("Unhandled interrupted exception", e);
1471             }
1472         }
1473 
getPartialResultNTimeForRequest(CaptureRequest myRequest, int numResultsWait)1474         public Pair<CaptureResult, Long> getPartialResultNTimeForRequest(CaptureRequest myRequest,
1475                 int numResultsWait) {
1476             if (numResultsWait < 0) {
1477                 throw new IllegalArgumentException("numResultsWait must be no less than 0");
1478             }
1479 
1480             Pair<CaptureResult, Long> result;
1481             int i = 0;
1482             do {
1483                 result = getPartialResultNTime(CameraTestUtils.CAPTURE_RESULT_TIMEOUT_MS);
1484                 // The result may be null if no partials are produced on this particular path, so
1485                 // stop trying
1486                 if (result == null) break;
1487                 if (result.first.getRequest().equals(myRequest)) {
1488                     return result;
1489                 }
1490             } while (i++ < numResultsWait);
1491 
1492             // No partials produced - this may not be an error, since a given device may not
1493             // produce any partials on this testing path
1494             return null;
1495         }
1496 
getCaptureResultNTimeForRequest(CaptureRequest myRequest, int numResultsWait)1497         public Pair<CaptureResult, Long> getCaptureResultNTimeForRequest(CaptureRequest myRequest,
1498                 int numResultsWait) {
1499             if (numResultsWait < 0) {
1500                 throw new IllegalArgumentException("numResultsWait must be no less than 0");
1501             }
1502 
1503             Pair<CaptureResult, Long> result;
1504             int i = 0;
1505             do {
1506                 result = getCaptureResultNTime(CameraTestUtils.CAPTURE_RESULT_TIMEOUT_MS);
1507                 if (result.first.getRequest().equals(myRequest)) {
1508                     return result;
1509                 }
1510             } while (i++ < numResultsWait);
1511 
1512             throw new TimeoutRuntimeException("Unable to get the expected capture result after "
1513                     + "waiting for " + numResultsWait + " results");
1514         }
1515 
1516     }
1517 }
1518