1 /*
2  * Copyright 2014 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 package android.media.cts;
18 
19 import static android.media.MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Flexible;
20 
21 import android.content.Context;
22 import android.content.res.AssetFileDescriptor;
23 import android.content.res.Resources;
24 import android.content.res.Resources.NotFoundException;
25 import android.graphics.ImageFormat;
26 import android.graphics.Rect;
27 import android.media.Image;
28 import android.media.Image.Plane;
29 import android.media.ImageReader;
30 import android.media.MediaCodec;
31 import android.media.MediaCodecInfo;
32 import android.media.MediaCodecInfo.CodecCapabilities;
33 import android.media.MediaCodecInfo.VideoCapabilities;
34 import android.media.MediaCodecList;
35 import android.media.MediaExtractor;
36 import android.media.MediaFormat;
37 import android.media.cts.CodecUtils;
38 import android.media.cts.R;
39 import android.os.Handler;
40 import android.os.HandlerThread;
41 import android.platform.test.annotations.RequiresDevice;
42 import android.test.AndroidTestCase;
43 import android.util.Log;
44 import android.view.Surface;
45 
46 import androidx.test.filters.SmallTest;
47 
48 import com.android.compatibility.common.util.MediaUtils;
49 
50 import java.io.File;
51 import java.io.FileOutputStream;
52 import java.io.IOException;
53 import java.io.InputStream;
54 import java.nio.ByteBuffer;
55 import java.util.ArrayList;
56 import java.util.Arrays;
57 import java.util.concurrent.LinkedBlockingQueue;
58 import java.util.concurrent.TimeUnit;
59 
60 /**
61  * Basic test for ImageReader APIs.
62  * <p>
63  * It uses MediaCodec to decode a short video stream, send the video frames to
64  * the surface provided by ImageReader. Then compare if output buffers of the
65  * ImageReader matches the output buffers of the MediaCodec. The video format
66  * used here is AVC although the compression format doesn't matter for this
67  * test. For decoder test, hw and sw decoders are tested,
68  * </p>
69  */
70 @SmallTest
71 @RequiresDevice
72 public class ImageReaderDecoderTest extends AndroidTestCase {
73     private static final String TAG = "ImageReaderDecoderTest";
74     private static final boolean VERBOSE = Log.isLoggable(TAG, Log.VERBOSE);
75     private static final boolean DEBUG = Log.isLoggable(TAG, Log.DEBUG);
76     private static final long DEFAULT_TIMEOUT_US = 10000;
77     private static final long WAIT_FOR_IMAGE_TIMEOUT_MS = 1000;
78     private static final String DEBUG_FILE_NAME_BASE = "/sdcard/";
79     private static final int NUM_FRAME_DECODED = 100;
80     // video decoders only support a single outstanding image with the consumer
81     private static final int MAX_NUM_IMAGES = 1;
82     private static final float COLOR_STDEV_ALLOWANCE = 5f;
83     private static final float COLOR_DELTA_ALLOWANCE = 5f;
84 
85     private final static int MODE_IMAGEREADER = 0;
86     private final static int MODE_IMAGE       = 1;
87 
88     private Resources mResources;
89     private MediaCodec.BufferInfo mBufferInfo = new MediaCodec.BufferInfo();
90     private ImageReader mReader;
91     private Surface mReaderSurface;
92     private HandlerThread mHandlerThread;
93     private Handler mHandler;
94     private ImageListener mImageListener;
95 
96     @Override
setContext(Context context)97     public void setContext(Context context) {
98         super.setContext(context);
99         mResources = mContext.getResources();
100     }
101 
102     @Override
setUp()103     protected void setUp() throws Exception {
104         super.setUp();
105         mHandlerThread = new HandlerThread(TAG);
106         mHandlerThread.start();
107         mHandler = new Handler(mHandlerThread.getLooper());
108         mImageListener = new ImageListener();
109     }
110 
111     @Override
tearDown()112     protected void tearDown() throws Exception {
113         mHandlerThread.quitSafely();
114         mHandler = null;
115     }
116 
117     static class MediaAsset {
MediaAsset(int resource, int width, int height)118         public MediaAsset(int resource, int width, int height) {
119             mResource = resource;
120             mWidth = width;
121             mHeight = height;
122         }
123 
getWidth()124         public int getWidth() {
125             return mWidth;
126         }
127 
getHeight()128         public int getHeight() {
129             return mHeight;
130         }
131 
getResource()132         public int getResource() {
133             return mResource;
134         }
135 
136         private final int mResource;
137         private final int mWidth;
138         private final int mHeight;
139     }
140 
141     static class MediaAssets {
MediaAssets(String mime, MediaAsset... assets)142         public MediaAssets(String mime, MediaAsset... assets) {
143             mMime = mime;
144             mAssets = assets;
145         }
146 
getMime()147         public String getMime() {
148             return mMime;
149         }
150 
getAssets()151         public MediaAsset[] getAssets() {
152             return mAssets;
153         }
154 
155         private final String mMime;
156         private final MediaAsset[] mAssets;
157     }
158 
159     private static MediaAssets H263_ASSETS = new MediaAssets(
160             MediaFormat.MIMETYPE_VIDEO_H263,
161             new MediaAsset(R.raw.swirl_176x144_h263, 176, 144),
162             new MediaAsset(R.raw.swirl_352x288_h263, 352, 288),
163             new MediaAsset(R.raw.swirl_128x96_h263, 128, 96));
164 
165     private static MediaAssets MPEG4_ASSETS = new MediaAssets(
166             MediaFormat.MIMETYPE_VIDEO_MPEG4,
167             new MediaAsset(R.raw.swirl_128x128_mpeg4, 128, 128),
168             new MediaAsset(R.raw.swirl_144x136_mpeg4, 144, 136),
169             new MediaAsset(R.raw.swirl_136x144_mpeg4, 136, 144),
170             new MediaAsset(R.raw.swirl_132x130_mpeg4, 132, 130),
171             new MediaAsset(R.raw.swirl_130x132_mpeg4, 130, 132));
172 
173     private static MediaAssets H264_ASSETS = new MediaAssets(
174             MediaFormat.MIMETYPE_VIDEO_AVC,
175             new MediaAsset(R.raw.swirl_128x128_h264, 128, 128),
176             new MediaAsset(R.raw.swirl_144x136_h264, 144, 136),
177             new MediaAsset(R.raw.swirl_136x144_h264, 136, 144),
178             new MediaAsset(R.raw.swirl_132x130_h264, 132, 130),
179             new MediaAsset(R.raw.swirl_130x132_h264, 130, 132));
180 
181     private static MediaAssets H265_ASSETS = new MediaAssets(
182             MediaFormat.MIMETYPE_VIDEO_HEVC,
183             new MediaAsset(R.raw.swirl_128x128_h265, 128, 128),
184             new MediaAsset(R.raw.swirl_144x136_h265, 144, 136),
185             new MediaAsset(R.raw.swirl_136x144_h265, 136, 144),
186             new MediaAsset(R.raw.swirl_132x130_h265, 132, 130),
187             new MediaAsset(R.raw.swirl_130x132_h265, 130, 132));
188 
189     private static MediaAssets VP8_ASSETS = new MediaAssets(
190             MediaFormat.MIMETYPE_VIDEO_VP8,
191             new MediaAsset(R.raw.swirl_128x128_vp8, 128, 128),
192             new MediaAsset(R.raw.swirl_144x136_vp8, 144, 136),
193             new MediaAsset(R.raw.swirl_136x144_vp8, 136, 144),
194             new MediaAsset(R.raw.swirl_132x130_vp8, 132, 130),
195             new MediaAsset(R.raw.swirl_130x132_vp8, 130, 132));
196 
197     private static MediaAssets VP9_ASSETS = new MediaAssets(
198             MediaFormat.MIMETYPE_VIDEO_VP9,
199             new MediaAsset(R.raw.swirl_128x128_vp9, 128, 128),
200             new MediaAsset(R.raw.swirl_144x136_vp9, 144, 136),
201             new MediaAsset(R.raw.swirl_136x144_vp9, 136, 144),
202             new MediaAsset(R.raw.swirl_132x130_vp9, 132, 130),
203             new MediaAsset(R.raw.swirl_130x132_vp9, 130, 132));
204 
205     static final float SWIRL_FPS = 12.f;
206 
207     class Decoder {
208         final private String mName;
209         final private String mMime;
210         final private VideoCapabilities mCaps;
211         final private ArrayList<MediaAsset> mAssets;
212 
isFlexibleFormatSupported(CodecCapabilities caps)213         boolean isFlexibleFormatSupported(CodecCapabilities caps) {
214             for (int c : caps.colorFormats) {
215                 if (c == COLOR_FormatYUV420Flexible) {
216                     return true;
217                 }
218             }
219             return false;
220         }
221 
Decoder(String name, MediaAssets assets, CodecCapabilities caps)222         Decoder(String name, MediaAssets assets, CodecCapabilities caps) {
223             mName = name;
224             mMime = assets.getMime();
225             mCaps = caps.getVideoCapabilities();
226             mAssets = new ArrayList<MediaAsset>();
227 
228             for (MediaAsset asset : assets.getAssets()) {
229                 if (mCaps.areSizeAndRateSupported(asset.getWidth(), asset.getHeight(), SWIRL_FPS)
230                         && isFlexibleFormatSupported(caps)) {
231                     mAssets.add(asset);
232                 }
233             }
234         }
235 
videoDecode(int mode, boolean checkSwirl)236         public boolean videoDecode(int mode, boolean checkSwirl) {
237             boolean skipped = true;
238             for (MediaAsset asset: mAssets) {
239                 // TODO: loop over all supported image formats
240                 int imageFormat = ImageFormat.YUV_420_888;
241                 int colorFormat = COLOR_FormatYUV420Flexible;
242                 videoDecode(asset, imageFormat, colorFormat, mode, checkSwirl);
243                 skipped = false;
244             }
245             return skipped;
246         }
247 
videoDecode( MediaAsset asset, int imageFormat, int colorFormat, int mode, boolean checkSwirl)248         private void videoDecode(
249                 MediaAsset asset, int imageFormat, int colorFormat, int mode, boolean checkSwirl) {
250             int video = asset.getResource();
251             int width = asset.getWidth();
252             int height = asset.getHeight();
253 
254             if (DEBUG) Log.d(TAG, "videoDecode " + mName + " " + width + "x" + height);
255 
256             MediaCodec decoder = null;
257             AssetFileDescriptor vidFD = null;
258 
259             MediaExtractor extractor = null;
260             File tmpFile = null;
261             InputStream is = null;
262             FileOutputStream os = null;
263             MediaFormat mediaFormat = null;
264             try {
265                 extractor = new MediaExtractor();
266 
267                 try {
268                     vidFD = mResources.openRawResourceFd(video);
269                     extractor.setDataSource(
270                             vidFD.getFileDescriptor(), vidFD.getStartOffset(), vidFD.getLength());
271                 } catch (NotFoundException e) {
272                     // resource is compressed, uncompress locally
273                     String tmpName = "tempStream";
274                     tmpFile = File.createTempFile(tmpName, null, mContext.getCacheDir());
275                     is = mResources.openRawResource(video);
276                     os = new FileOutputStream(tmpFile);
277                     byte[] buf = new byte[1024];
278                     int len;
279                     while ((len = is.read(buf, 0, buf.length)) > 0) {
280                         os.write(buf, 0, len);
281                     }
282                     os.close();
283                     is.close();
284 
285                     extractor.setDataSource(tmpFile.getAbsolutePath());
286                 }
287 
288                 mediaFormat = extractor.getTrackFormat(0);
289                 mediaFormat.setInteger(MediaFormat.KEY_COLOR_FORMAT, colorFormat);
290 
291                 // Create decoder
292                 decoder = MediaCodec.createByCodecName(mName);
293                 assertNotNull("couldn't create decoder" + mName, decoder);
294 
295                 decodeFramesToImage(
296                         decoder, extractor, mediaFormat,
297                         width, height, imageFormat, mode, checkSwirl);
298 
299                 decoder.stop();
300                 if (vidFD != null) {
301                     vidFD.close();
302                 }
303             } catch (Throwable e) {
304                 throw new RuntimeException("while " + mName + " decoding "
305                         + mResources.getResourceEntryName(video) + ": " + mediaFormat, e);
306             } finally {
307                 if (decoder != null) {
308                     decoder.release();
309                 }
310                 if (extractor != null) {
311                     extractor.release();
312                 }
313                 if (tmpFile != null) {
314                     tmpFile.delete();
315                 }
316             }
317         }
318     }
319 
decoders(MediaAssets assets, boolean goog)320     private Decoder[] decoders(MediaAssets assets, boolean goog) {
321         String mime = assets.getMime();
322         MediaCodecList mcl = new MediaCodecList(MediaCodecList.REGULAR_CODECS);
323         ArrayList<Decoder> result = new ArrayList<Decoder>();
324 
325         for (MediaCodecInfo info : mcl.getCodecInfos()) {
326             if (info.isEncoder() || info.isAlias() || !info.isVendor() != goog) {
327                 continue;
328             }
329             CodecCapabilities caps = null;
330             try {
331                 caps = info.getCapabilitiesForType(mime);
332             } catch (IllegalArgumentException e) { // mime is not supported
333                 continue;
334             }
335             assertNotNull(info.getName() + " capabilties for " + mime + " returned null", caps);
336             result.add(new Decoder(info.getName(), assets, caps));
337         }
338         return result.toArray(new Decoder[result.size()]);
339     }
340 
goog(MediaAssets assets)341     private Decoder[] goog(MediaAssets assets) {
342         return decoders(assets, true /* goog */);
343     }
344 
other(MediaAssets assets)345     private Decoder[] other(MediaAssets assets) {
346         return decoders(assets, false /* goog */);
347     }
348 
googH265()349     private Decoder[] googH265()  { return goog(H265_ASSETS); }
googH264()350     private Decoder[] googH264()  { return goog(H264_ASSETS); }
googH263()351     private Decoder[] googH263()  { return goog(H263_ASSETS); }
googMpeg4()352     private Decoder[] googMpeg4() { return goog(MPEG4_ASSETS); }
googVP8()353     private Decoder[] googVP8()   { return goog(VP8_ASSETS); }
googVP9()354     private Decoder[] googVP9()   { return goog(VP9_ASSETS); }
355 
otherH265()356     private Decoder[] otherH265()  { return other(H265_ASSETS); }
otherH264()357     private Decoder[] otherH264()  { return other(H264_ASSETS); }
otherH263()358     private Decoder[] otherH263()  { return other(H263_ASSETS); }
otherMpeg4()359     private Decoder[] otherMpeg4() { return other(MPEG4_ASSETS); }
otherVP8()360     private Decoder[] otherVP8()   { return other(VP8_ASSETS); }
otherVP9()361     private Decoder[] otherVP9()   { return other(VP9_ASSETS); }
362 
testGoogH265Image()363     public void testGoogH265Image()   { swirlTest(googH265(),   MODE_IMAGE); }
testGoogH264Image()364     public void testGoogH264Image()   { swirlTest(googH264(),   MODE_IMAGE); }
testGoogH263Image()365     public void testGoogH263Image()   { swirlTest(googH263(),   MODE_IMAGE); }
testGoogMpeg4Image()366     public void testGoogMpeg4Image()  { swirlTest(googMpeg4(),  MODE_IMAGE); }
testGoogVP8Image()367     public void testGoogVP8Image()    { swirlTest(googVP8(),    MODE_IMAGE); }
testGoogVP9Image()368     public void testGoogVP9Image()    { swirlTest(googVP9(),    MODE_IMAGE); }
369 
testOtherH265Image()370     public void testOtherH265Image()  { swirlTest(otherH265(),  MODE_IMAGE); }
testOtherH264Image()371     public void testOtherH264Image()  { swirlTest(otherH264(),  MODE_IMAGE); }
testOtherH263Image()372     public void testOtherH263Image()  { swirlTest(otherH263(),  MODE_IMAGE); }
testOtherMpeg4Image()373     public void testOtherMpeg4Image() { swirlTest(otherMpeg4(), MODE_IMAGE); }
testOtherVP8Image()374     public void testOtherVP8Image()   { swirlTest(otherVP8(),   MODE_IMAGE); }
testOtherVP9Image()375     public void testOtherVP9Image()   { swirlTest(otherVP9(),   MODE_IMAGE); }
376 
testGoogH265ImageReader()377     public void testGoogH265ImageReader()   { swirlTest(googH265(),   MODE_IMAGEREADER); }
testGoogH264ImageReader()378     public void testGoogH264ImageReader()   { swirlTest(googH264(),   MODE_IMAGEREADER); }
testGoogH263ImageReader()379     public void testGoogH263ImageReader()   { swirlTest(googH263(),   MODE_IMAGEREADER); }
testGoogMpeg4ImageReader()380     public void testGoogMpeg4ImageReader()  { swirlTest(googMpeg4(),  MODE_IMAGEREADER); }
testGoogVP8ImageReader()381     public void testGoogVP8ImageReader()    { swirlTest(googVP8(),    MODE_IMAGEREADER); }
testGoogVP9ImageReader()382     public void testGoogVP9ImageReader()    { swirlTest(googVP9(),    MODE_IMAGEREADER); }
383 
testOtherH265ImageReader()384     public void testOtherH265ImageReader()  { swirlTest(otherH265(),  MODE_IMAGEREADER); }
testOtherH264ImageReader()385     public void testOtherH264ImageReader()  { swirlTest(otherH264(),  MODE_IMAGEREADER); }
testOtherH263ImageReader()386     public void testOtherH263ImageReader()  { swirlTest(otherH263(),  MODE_IMAGEREADER); }
testOtherMpeg4ImageReader()387     public void testOtherMpeg4ImageReader() { swirlTest(otherMpeg4(), MODE_IMAGEREADER); }
testOtherVP8ImageReader()388     public void testOtherVP8ImageReader()   { swirlTest(otherVP8(),   MODE_IMAGEREADER); }
testOtherVP9ImageReader()389     public void testOtherVP9ImageReader()   { swirlTest(otherVP9(),   MODE_IMAGEREADER); }
390 
391     /**
392      * Test ImageReader with 480x360 non-google AVC decoding for flexible yuv format
393      */
testHwAVCDecode360pForFlexibleYuv()394     public void testHwAVCDecode360pForFlexibleYuv() throws Exception {
395         Decoder[] decoders = other(new MediaAssets(
396                 MediaFormat.MIMETYPE_VIDEO_AVC,
397                 new MediaAsset(
398                         R.raw.video_480x360_mp4_h264_1000kbps_25fps_aac_stereo_128kbps_44100hz,
399                         480 /* width */, 360 /* height */)));
400 
401         decodeTest(decoders, MODE_IMAGEREADER, false /* checkSwirl */);
402     }
403 
404     /**
405      * Test ImageReader with 480x360 google (SW) AVC decoding for flexible yuv format
406      */
testSwAVCDecode360pForFlexibleYuv()407     public void testSwAVCDecode360pForFlexibleYuv() throws Exception {
408         Decoder[] decoders = goog(new MediaAssets(
409                 MediaFormat.MIMETYPE_VIDEO_AVC,
410                 new MediaAsset(
411                         R.raw.video_480x360_mp4_h264_1000kbps_25fps_aac_stereo_128kbps_44100hz,
412                         480 /* width */, 360 /* height */)));
413 
414         decodeTest(decoders, MODE_IMAGEREADER, false /* checkSwirl */);
415     }
416 
swirlTest(Decoder[] decoders, int mode)417     private void swirlTest(Decoder[] decoders, int mode) {
418         decodeTest(decoders, mode, true /* checkSwirl */);
419     }
420 
decodeTest(Decoder[] decoders, int mode, boolean checkSwirl)421     private void decodeTest(Decoder[] decoders, int mode, boolean checkSwirl) {
422         try {
423             boolean skipped = true;
424             for (Decoder codec : decoders) {
425                 if (codec.videoDecode(mode, checkSwirl)) {
426                     skipped = false;
427                 }
428             }
429             if (skipped) {
430                 MediaUtils.skipTest("decoder does not any of the input files");
431             }
432         } finally {
433             closeImageReader();
434         }
435     }
436 
437     private static class ImageListener implements ImageReader.OnImageAvailableListener {
438         private final LinkedBlockingQueue<Image> mQueue =
439                 new LinkedBlockingQueue<Image>();
440 
441         @Override
onImageAvailable(ImageReader reader)442         public void onImageAvailable(ImageReader reader) {
443             try {
444                 mQueue.put(reader.acquireNextImage());
445             } catch (InterruptedException e) {
446                 throw new UnsupportedOperationException(
447                         "Can't handle InterruptedException in onImageAvailable");
448             }
449         }
450 
451         /**
452          * Get an image from the image reader.
453          *
454          * @param timeout Timeout value for the wait.
455          * @return The image from the image reader.
456          */
getImage(long timeout)457         public Image getImage(long timeout) throws InterruptedException {
458             Image image = mQueue.poll(timeout, TimeUnit.MILLISECONDS);
459             assertNotNull("Wait for an image timed out in " + timeout + "ms", image);
460             return image;
461         }
462     }
463 
464     /**
465      * Decode video frames to image reader.
466      */
decodeFramesToImage( MediaCodec decoder, MediaExtractor extractor, MediaFormat mediaFormat, int width, int height, int imageFormat, int mode, boolean checkSwirl)467     private void decodeFramesToImage(
468             MediaCodec decoder, MediaExtractor extractor, MediaFormat mediaFormat,
469             int width, int height, int imageFormat, int mode, boolean checkSwirl)
470             throws InterruptedException {
471         ByteBuffer[] decoderInputBuffers;
472         ByteBuffer[] decoderOutputBuffers;
473 
474         // Configure decoder.
475         if (VERBOSE) Log.v(TAG, "stream format: " + mediaFormat);
476         if (mode == MODE_IMAGEREADER) {
477             createImageReader(width, height, imageFormat, MAX_NUM_IMAGES, mImageListener);
478             decoder.configure(mediaFormat, mReaderSurface, null /* crypto */, 0 /* flags */);
479         } else {
480             assertEquals(mode, MODE_IMAGE);
481             decoder.configure(mediaFormat, null /* surface */, null /* crypto */, 0 /* flags */);
482         }
483 
484         decoder.start();
485         decoderInputBuffers = decoder.getInputBuffers();
486         decoderOutputBuffers = decoder.getOutputBuffers();
487         extractor.selectTrack(0);
488 
489         // Start decoding and get Image, only test the first NUM_FRAME_DECODED frames.
490         MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
491         boolean sawInputEOS = false;
492         boolean sawOutputEOS = false;
493         int outputFrameCount = 0;
494         while (!sawOutputEOS && outputFrameCount < NUM_FRAME_DECODED) {
495             if (VERBOSE) Log.v(TAG, "loop:" + outputFrameCount);
496             // Feed input frame.
497             if (!sawInputEOS) {
498                 int inputBufIndex = decoder.dequeueInputBuffer(DEFAULT_TIMEOUT_US);
499                 if (inputBufIndex >= 0) {
500                     ByteBuffer dstBuf = decoderInputBuffers[inputBufIndex];
501                     int sampleSize =
502                         extractor.readSampleData(dstBuf, 0 /* offset */);
503 
504                     if (VERBOSE) Log.v(TAG, "queue a input buffer, idx/size: "
505                         + inputBufIndex + "/" + sampleSize);
506 
507                     long presentationTimeUs = 0;
508 
509                     if (sampleSize < 0) {
510                         if (VERBOSE) Log.v(TAG, "saw input EOS.");
511                         sawInputEOS = true;
512                         sampleSize = 0;
513                     } else {
514                         presentationTimeUs = extractor.getSampleTime();
515                     }
516 
517                     decoder.queueInputBuffer(
518                             inputBufIndex,
519                             0 /* offset */,
520                             sampleSize,
521                             presentationTimeUs,
522                             sawInputEOS ? MediaCodec.BUFFER_FLAG_END_OF_STREAM : 0);
523 
524                     if (!sawInputEOS) {
525                         extractor.advance();
526                     }
527                 }
528             }
529 
530             // Get output frame
531             int res = decoder.dequeueOutputBuffer(info, DEFAULT_TIMEOUT_US);
532             if (VERBOSE) Log.v(TAG, "got a buffer: " + info.size + "/" + res);
533             if (res == MediaCodec.INFO_TRY_AGAIN_LATER) {
534                 // no output available yet
535                 if (VERBOSE) Log.v(TAG, "no output frame available");
536             } else if (res == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
537                 // decoder output buffers changed, need update.
538                 if (VERBOSE) Log.v(TAG, "decoder output buffers changed");
539                 decoderOutputBuffers = decoder.getOutputBuffers();
540             } else if (res == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
541                 // this happens before the first frame is returned.
542                 MediaFormat outFormat = decoder.getOutputFormat();
543                 if (VERBOSE) Log.v(TAG, "decoder output format changed: " + outFormat);
544             } else if (res < 0) {
545                 // Should be decoding error.
546                 fail("unexpected result from deocder.dequeueOutputBuffer: " + res);
547             } else {
548                 if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
549                     sawOutputEOS = true;
550                 }
551 
552                 // res >= 0: normal decoding case, copy the output buffer.
553                 // Will use it as reference to valid the ImageReader output
554                 // Some decoders output a 0-sized buffer at the end. Ignore those.
555                 boolean doRender = (info.size != 0);
556 
557                 if (doRender) {
558                     outputFrameCount++;
559                     String fileName = DEBUG_FILE_NAME_BASE + MediaUtils.getTestName()
560                             + (mode == MODE_IMAGE ? "_image_" : "_reader_")
561                             + width + "x" + height + "_" + outputFrameCount + ".yuv";
562 
563                     Image image = null;
564                     try {
565                         if (mode == MODE_IMAGE) {
566                             image = decoder.getOutputImage(res);
567                         } else {
568                             decoder.releaseOutputBuffer(res, doRender);
569                             res = -1;
570                             // Read image and verify
571                             image = mImageListener.getImage(WAIT_FOR_IMAGE_TIMEOUT_MS);
572                         }
573                         validateImage(image, width, height, imageFormat, fileName);
574 
575                         if (checkSwirl) {
576                             try {
577                                 validateSwirl(image);
578                             } catch (Throwable e) {
579                                 dumpFile(fileName, getDataFromImage(image));
580                                 throw e;
581                             }
582                         }
583                     } finally {
584                         if (image != null) {
585                             image.close();
586                         }
587                     }
588                 }
589 
590                 if (res >= 0) {
591                     decoder.releaseOutputBuffer(res, false /* render */);
592                 }
593             }
594         }
595     }
596 
597     /**
598      * Validate image based on format and size.
599      *
600      * @param image The image to be validated.
601      * @param width The image width.
602      * @param height The image height.
603      * @param format The image format.
604      * @param filePath The debug dump file path, null if don't want to dump to file.
605      */
validateImage( Image image, int width, int height, int format, String filePath)606     public static void validateImage(
607             Image image, int width, int height, int format, String filePath) {
608         if (VERBOSE) {
609             Plane[] imagePlanes = image.getPlanes();
610             Log.v(TAG, "Image " + filePath + " Info:");
611             Log.v(TAG, "first plane pixelstride " + imagePlanes[0].getPixelStride());
612             Log.v(TAG, "first plane rowstride " + imagePlanes[0].getRowStride());
613             Log.v(TAG, "Image timestamp:" + image.getTimestamp());
614         }
615 
616         assertNotNull("Input image is invalid", image);
617         assertEquals("Format doesn't match", format, image.getFormat());
618         assertEquals("Width doesn't match", width, image.getCropRect().width());
619         assertEquals("Height doesn't match", height, image.getCropRect().height());
620 
621         if(VERBOSE) Log.v(TAG, "validating Image");
622         byte[] data = getDataFromImage(image);
623         assertTrue("Invalid image data", data != null && data.length > 0);
624 
625         validateYuvData(data, width, height, format, image.getTimestamp());
626 
627         if (VERBOSE && filePath != null) {
628             dumpFile(filePath, data);
629         }
630     }
631 
validateSwirl(Image image)632     private static void validateSwirl(Image image) {
633         Rect crop = image.getCropRect();
634         final int NUM_SIDES = 4;
635         final int step = 8;      // the width of the layers
636         long[][] rawStats = new long[NUM_SIDES][10];
637         int[][] colors = new int[][] {
638             { 111, 96, 204 }, { 178, 27, 174 }, { 100, 192, 92 }, { 106, 117, 62 }
639         };
640 
641         // successively accumulate statistics for each layer of the swirl
642         // by using overlapping rectangles, and the observation that
643         // layer_i = rectangle_i - rectangle_(i+1)
644         int lastLayer = 0;
645         int layer = 0;
646         boolean lastLayerValid = false;
647         for (int pos = 0; ; pos += step) {
648             Rect area = new Rect(pos - step, pos, crop.width() / 2, crop.height() + 2 * step - pos);
649             if (area.isEmpty()) {
650                 break;
651             }
652             area.offset(crop.left, crop.top);
653             area.intersect(crop);
654             for (int lr = 0; lr < 2; ++lr) {
655                 long[] oneStat = CodecUtils.getRawStats(image, area);
656                 if (VERBOSE) Log.v(TAG, "area=" + area + ", layer=" + layer + ", last="
657                                     + lastLayer + ": " + Arrays.toString(oneStat));
658                 for (int i = 0; i < oneStat.length; i++) {
659                     rawStats[layer][i] += oneStat[i];
660                     if (lastLayerValid) {
661                         rawStats[lastLayer][i] -= oneStat[i];
662                     }
663                 }
664                 if (VERBOSE && lastLayerValid) {
665                     Log.v(TAG, "layer-" + lastLayer + ": " + Arrays.toString(rawStats[lastLayer]));
666                     Log.v(TAG, Arrays.toString(CodecUtils.Raw2YUVStats(rawStats[lastLayer])));
667                 }
668                 // switch to the opposite side
669                 layer ^= 2;      // NUM_SIDES / 2
670                 lastLayer ^= 2;  // NUM_SIDES / 2
671                 area.offset(crop.centerX() - area.left, 2 * (crop.centerY() - area.centerY()));
672             }
673 
674             lastLayer = layer;
675             lastLayerValid = true;
676             layer = (layer + 1) % NUM_SIDES;
677         }
678 
679         for (layer = 0; layer < NUM_SIDES; ++layer) {
680             float[] stats = CodecUtils.Raw2YUVStats(rawStats[layer]);
681             if (DEBUG) Log.d(TAG, "layer-" + layer + ": " + Arrays.toString(stats));
682             if (VERBOSE) Log.v(TAG, Arrays.toString(rawStats[layer]));
683 
684             // check layer uniformity
685             for (int i = 0; i < 3; i++) {
686                 assertTrue("color of layer-" + layer + " is not uniform: "
687                         + Arrays.toString(stats),
688                         stats[3 + i] < COLOR_STDEV_ALLOWANCE);
689             }
690 
691             // check layer color
692             for (int i = 0; i < 3; i++) {
693                 assertTrue("color of layer-" + layer + " mismatches target "
694                         + Arrays.toString(colors[layer]) + " vs "
695                         + Arrays.toString(Arrays.copyOf(stats, 3)),
696                         Math.abs(stats[i] - colors[layer][i]) < COLOR_DELTA_ALLOWANCE);
697             }
698         }
699     }
700 
validateYuvData(byte[] yuvData, int width, int height, int format, long ts)701     private static void validateYuvData(byte[] yuvData, int width, int height, int format,
702             long ts) {
703 
704         assertTrue("YUV format must be one of the YUV_420_888, NV21, or YV12",
705                 format == ImageFormat.YUV_420_888 ||
706                 format == ImageFormat.NV21 ||
707                 format == ImageFormat.YV12);
708 
709         if (VERBOSE) Log.v(TAG, "Validating YUV data");
710         int expectedSize = width * height * ImageFormat.getBitsPerPixel(format) / 8;
711         assertEquals("Yuv data doesn't match", expectedSize, yuvData.length);
712     }
713 
checkYuvFormat(int format)714     private static void checkYuvFormat(int format) {
715         if ((format != ImageFormat.YUV_420_888) &&
716                 (format != ImageFormat.NV21) &&
717                 (format != ImageFormat.YV12)) {
718             fail("Wrong formats: " + format);
719         }
720     }
721     /**
722      * <p>Check android image format validity for an image, only support below formats:</p>
723      *
724      * <p>Valid formats are YUV_420_888/NV21/YV12 for video decoder</p>
725      */
checkAndroidImageFormat(Image image)726     private static void checkAndroidImageFormat(Image image) {
727         int format = image.getFormat();
728         Plane[] planes = image.getPlanes();
729         switch (format) {
730             case ImageFormat.YUV_420_888:
731             case ImageFormat.NV21:
732             case ImageFormat.YV12:
733                 assertEquals("YUV420 format Images should have 3 planes", 3, planes.length);
734                 break;
735             default:
736                 fail("Unsupported Image Format: " + format);
737         }
738     }
739 
740     /**
741      * Get a byte array image data from an Image object.
742      * <p>
743      * Read data from all planes of an Image into a contiguous unpadded,
744      * unpacked 1-D linear byte array, such that it can be write into disk, or
745      * accessed by software conveniently. It supports YUV_420_888/NV21/YV12
746      * input Image format.
747      * </p>
748      * <p>
749      * For YUV_420_888/NV21/YV12/Y8/Y16, it returns a byte array that contains
750      * the Y plane data first, followed by U(Cb), V(Cr) planes if there is any
751      * (xstride = width, ystride = height for chroma and luma components).
752      * </p>
753      */
getDataFromImage(Image image)754     private static byte[] getDataFromImage(Image image) {
755         assertNotNull("Invalid image:", image);
756         Rect crop = image.getCropRect();
757         int format = image.getFormat();
758         int width = crop.width();
759         int height = crop.height();
760         int rowStride, pixelStride;
761         byte[] data = null;
762 
763         // Read image data
764         Plane[] planes = image.getPlanes();
765         assertTrue("Fail to get image planes", planes != null && planes.length > 0);
766 
767         // Check image validity
768         checkAndroidImageFormat(image);
769 
770         ByteBuffer buffer = null;
771 
772         int offset = 0;
773         data = new byte[width * height * ImageFormat.getBitsPerPixel(format) / 8];
774         byte[] rowData = new byte[planes[0].getRowStride()];
775         if(VERBOSE) Log.v(TAG, "get data from " + planes.length + " planes");
776         for (int i = 0; i < planes.length; i++) {
777             int shift = (i == 0) ? 0 : 1;
778             buffer = planes[i].getBuffer();
779             assertNotNull("Fail to get bytebuffer from plane", buffer);
780             rowStride = planes[i].getRowStride();
781             pixelStride = planes[i].getPixelStride();
782             assertTrue("pixel stride " + pixelStride + " is invalid", pixelStride > 0);
783             if (VERBOSE) {
784                 Log.v(TAG, "pixelStride " + pixelStride);
785                 Log.v(TAG, "rowStride " + rowStride);
786                 Log.v(TAG, "width " + width);
787                 Log.v(TAG, "height " + height);
788             }
789             // For multi-planar yuv images, assuming yuv420 with 2x2 chroma subsampling.
790             int w = crop.width() >> shift;
791             int h = crop.height() >> shift;
792             buffer.position(rowStride * (crop.top >> shift) + pixelStride * (crop.left >> shift));
793             assertTrue("rowStride " + rowStride + " should be >= width " + w , rowStride >= w);
794             for (int row = 0; row < h; row++) {
795                 int bytesPerPixel = ImageFormat.getBitsPerPixel(format) / 8;
796                 int length;
797                 if (pixelStride == bytesPerPixel) {
798                     // Special case: optimized read of the entire row
799                     length = w * bytesPerPixel;
800                     buffer.get(data, offset, length);
801                     offset += length;
802                 } else {
803                     // Generic case: should work for any pixelStride but slower.
804                     // Use intermediate buffer to avoid read byte-by-byte from
805                     // DirectByteBuffer, which is very bad for performance
806                     length = (w - 1) * pixelStride + bytesPerPixel;
807                     buffer.get(rowData, 0, length);
808                     for (int col = 0; col < w; col++) {
809                         data[offset++] = rowData[col * pixelStride];
810                     }
811                 }
812                 // Advance buffer the remainder of the row stride
813                 if (row < h - 1) {
814                     buffer.position(buffer.position() + rowStride - length);
815                 }
816             }
817             if (VERBOSE) Log.v(TAG, "Finished reading data from plane " + i);
818         }
819         return data;
820     }
821 
dumpFile(String fileName, byte[] data)822     private static void dumpFile(String fileName, byte[] data) {
823         assertNotNull("fileName must not be null", fileName);
824         assertNotNull("data must not be null", data);
825 
826         FileOutputStream outStream;
827         try {
828             Log.v(TAG, "output will be saved as " + fileName);
829             outStream = new FileOutputStream(fileName);
830         } catch (IOException ioe) {
831             throw new RuntimeException("Unable to create debug output file " + fileName, ioe);
832         }
833 
834         try {
835             outStream.write(data);
836             outStream.close();
837         } catch (IOException ioe) {
838             throw new RuntimeException("failed writing data to file " + fileName, ioe);
839         }
840     }
841 
createImageReader( int width, int height, int format, int maxNumImages, ImageReader.OnImageAvailableListener listener)842     private void createImageReader(
843             int width, int height, int format, int maxNumImages,
844             ImageReader.OnImageAvailableListener listener)  {
845         closeImageReader();
846 
847         mReader = ImageReader.newInstance(width, height, format, maxNumImages);
848         mReaderSurface = mReader.getSurface();
849         mReader.setOnImageAvailableListener(listener, mHandler);
850         if (VERBOSE) {
851             Log.v(TAG, String.format("Created ImageReader size (%dx%d), format %d", width, height,
852                     format));
853         }
854     }
855 
856     /**
857      * Close the pending images then close current active {@link ImageReader} object.
858      */
closeImageReader()859     private void closeImageReader() {
860         if (mReader != null) {
861             try {
862                 // Close all possible pending images first.
863                 Image image = mReader.acquireLatestImage();
864                 if (image != null) {
865                     image.close();
866                 }
867             } finally {
868                 mReader.close();
869                 mReader = null;
870             }
871         }
872     }
873 }
874