1 /* 2 * Copyright (C) 2014 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17 package android.media.cts; 18 19 import android.media.cts.R; 20 21 import android.content.Context; 22 import android.content.pm.PackageManager; 23 import android.content.res.AssetFileDescriptor; 24 import android.media.MediaCodec; 25 import android.media.MediaCodecInfo; 26 import android.media.MediaCodecInfo.CodecCapabilities; 27 import android.media.MediaCodecInfo.CodecProfileLevel; 28 import android.media.MediaCodecList; 29 import android.media.MediaExtractor; 30 import android.media.MediaFormat; 31 import android.platform.test.annotations.AppModeFull; 32 import android.util.Log; 33 import android.view.Surface; 34 35 import com.android.compatibility.common.util.MediaUtils; 36 37 import android.opengl.GLES20; 38 import javax.microedition.khronos.opengles.GL10; 39 40 import java.io.IOException; 41 import java.lang.System; 42 import java.nio.ByteBuffer; 43 import java.util.ArrayList; 44 import java.util.Arrays; 45 import java.util.List; 46 import java.util.Locale; 47 import java.util.Vector; 48 import java.util.zip.CRC32; 49 50 @MediaHeavyPresubmitTest 51 @AppModeFull 52 public class AdaptivePlaybackTest extends MediaPlayerTestBase { 53 private static final String TAG = "AdaptivePlaybackTest"; 54 private boolean verify = false; 55 private static final int MIN_FRAMES_BEFORE_DRC = 2; 56 H264(CodecFactory factory)57 public Iterable<Codec> H264(CodecFactory factory) { 58 return factory.createCodecList( 59 mContext, 60 MediaFormat.MIMETYPE_VIDEO_AVC, 61 R.raw.video_480x360_mp4_h264_1000kbps_25fps_aac_stereo_128kbps_44100hz, 62 R.raw.video_1280x720_mp4_h264_1000kbps_25fps_aac_stereo_128kbps_44100hz, 63 R.raw.bbb_s1_720x480_mp4_h264_mp3_2mbps_30fps_aac_lc_5ch_320kbps_48000hz); 64 } 65 HEVC(CodecFactory factory)66 public Iterable<Codec> HEVC(CodecFactory factory) { 67 return factory.createCodecList( 68 mContext, 69 MediaFormat.MIMETYPE_VIDEO_HEVC, 70 R.raw.bbb_s1_720x480_mp4_hevc_mp3_1600kbps_30fps_aac_he_6ch_240kbps_48000hz, 71 R.raw.bbb_s4_1280x720_mp4_hevc_mp31_4mbps_30fps_aac_he_stereo_80kbps_32000hz, 72 R.raw.bbb_s1_352x288_mp4_hevc_mp2_600kbps_30fps_aac_he_stereo_96kbps_48000hz); 73 } 74 Mpeg2(CodecFactory factory)75 public Iterable<Codec> Mpeg2(CodecFactory factory) { 76 return factory.createCodecList( 77 mContext, 78 MediaFormat.MIMETYPE_VIDEO_MPEG2, 79 R.raw.video_640x360_mp4_mpeg2_2000kbps_30fps_aac_stereo_128kbps_48000hz, 80 R.raw.video_1280x720_mp4_mpeg2_3000kbps_30fps_aac_stereo_128kbps_48000hz); 81 } 82 H263(CodecFactory factory)83 public Iterable<Codec> H263(CodecFactory factory) { 84 return factory.createCodecList( 85 mContext, 86 MediaFormat.MIMETYPE_VIDEO_H263, 87 R.raw.video_176x144_3gp_h263_300kbps_12fps_aac_stereo_128kbps_22050hz, 88 R.raw.video_352x288_3gp_h263_300kbps_12fps_aac_stereo_128kbps_22050hz); 89 } 90 Mpeg4(CodecFactory factory)91 public Iterable<Codec> Mpeg4(CodecFactory factory) { 92 return factory.createCodecList( 93 mContext, 94 MediaFormat.MIMETYPE_VIDEO_MPEG4, 95 R.raw.video_1280x720_mp4_mpeg4_1000kbps_25fps_aac_stereo_128kbps_44100hz, 96 R.raw.video_480x360_mp4_mpeg4_860kbps_25fps_aac_stereo_128kbps_44100hz, 97 R.raw.video_176x144_mp4_mpeg4_300kbps_25fps_aac_stereo_128kbps_44100hz); 98 } 99 VP8(CodecFactory factory)100 public Iterable<Codec> VP8(CodecFactory factory) { 101 return factory.createCodecList( 102 mContext, 103 MediaFormat.MIMETYPE_VIDEO_VP8, 104 R.raw.video_480x360_webm_vp8_333kbps_25fps_vorbis_stereo_128kbps_48000hz, 105 R.raw.bbb_s3_1280x720_webm_vp8_8mbps_60fps_opus_6ch_384kbps_48000hz, 106 R.raw.bbb_s1_320x180_webm_vp8_800kbps_30fps_opus_5ch_320kbps_48000hz); 107 } 108 VP9(CodecFactory factory)109 public Iterable<Codec> VP9(CodecFactory factory) { 110 return factory.createCodecList( 111 mContext, 112 MediaFormat.MIMETYPE_VIDEO_VP9, 113 R.raw.video_480x360_webm_vp9_333kbps_25fps_vorbis_stereo_128kbps_48000hz, 114 R.raw.bbb_s4_1280x720_webm_vp9_0p31_4mbps_30fps_opus_stereo_128kbps_48000hz, 115 R.raw.bbb_s1_320x180_webm_vp9_0p11_600kbps_30fps_vorbis_mono_64kbps_48000hz); 116 } 117 AV1(CodecFactory factory)118 public Iterable<Codec> AV1(CodecFactory factory) { 119 return factory.createCodecList( 120 mContext, 121 MediaFormat.MIMETYPE_VIDEO_AV1, 122 R.raw.video_480x360_webm_av1_400kbps_30fps_vorbis_stereo_128kbps_48000hz, 123 R.raw.video_1280x720_webm_av1_2000kbps_30fps_vorbis_stereo_128kbps_48000hz, 124 R.raw.video_320x180_webm_av1_200kbps_30fps_vorbis_stereo_128kbps_48000hz); 125 } 126 127 CodecFactory ALL = new CodecFactory(); 128 CodecFactory SW = new SWCodecFactory(); 129 CodecFactory HW = new HWCodecFactory(); 130 H264()131 public Iterable<Codec> H264() { return H264(ALL); } HEVC()132 public Iterable<Codec> HEVC() { return HEVC(ALL); } VP8()133 public Iterable<Codec> VP8() { return VP8(ALL); } VP9()134 public Iterable<Codec> VP9() { return VP9(ALL); } AV1()135 public Iterable<Codec> AV1() { return AV1(ALL); } Mpeg2()136 public Iterable<Codec> Mpeg2() { return Mpeg2(ALL); } Mpeg4()137 public Iterable<Codec> Mpeg4() { return Mpeg4(ALL); } H263()138 public Iterable<Codec> H263() { return H263(ALL); } 139 AllCodecs()140 public Iterable<Codec> AllCodecs() { 141 return chain(H264(ALL), HEVC(ALL), VP8(ALL), VP9(ALL), AV1(ALL), Mpeg2(ALL), Mpeg4(ALL), H263(ALL)); 142 } 143 SWCodecs()144 public Iterable<Codec> SWCodecs() { 145 return chain(H264(SW), HEVC(SW), VP8(SW), VP9(SW), AV1(SW), Mpeg2(SW), Mpeg4(SW), H263(SW)); 146 } 147 HWCodecs()148 public Iterable<Codec> HWCodecs() { 149 return chain(H264(HW), HEVC(HW), VP8(HW), VP9(HW), AV1(HW), Mpeg2(HW), Mpeg4(HW), H263(HW)); 150 } 151 152 /* tests for adaptive codecs */ 153 Test adaptiveEarlyEos = new EarlyEosTest().adaptive(); 154 Test adaptiveEosFlushSeek = new EosFlushSeekTest().adaptive(); 155 Test adaptiveSkipAhead = new AdaptiveSkipTest(true /* forward */); 156 Test adaptiveSkipBack = new AdaptiveSkipTest(false /* forward */); 157 158 /* DRC tests for adaptive codecs */ 159 Test adaptiveReconfigDrc = new ReconfigDrcTest().adaptive(); 160 Test adaptiveSmallReconfigDrc = new ReconfigDrcTest().adaptiveSmall(); 161 Test adaptiveDrc = new AdaptiveDrcTest(); /* adaptive */ 162 Test adaptiveSmallDrc = new AdaptiveDrcTest().adaptiveSmall(); 163 164 /* tests for regular codecs */ 165 Test earlyEos = new EarlyEosTest(); 166 Test eosFlushSeek = new EosFlushSeekTest(); 167 Test flushConfigureDrc = new ReconfigDrcTest(); 168 169 Test[] allTests = { 170 adaptiveEarlyEos, 171 adaptiveEosFlushSeek, 172 adaptiveSkipAhead, 173 adaptiveSkipBack, 174 adaptiveReconfigDrc, 175 adaptiveSmallReconfigDrc, 176 adaptiveDrc, 177 adaptiveSmallDrc, 178 earlyEos, 179 eosFlushSeek, 180 flushConfigureDrc, 181 }; 182 183 /* helpers to run sets of tests */ runEOS()184 public void runEOS() { ex(AllCodecs(), new Test[] { 185 adaptiveEarlyEos, 186 adaptiveEosFlushSeek, 187 adaptiveReconfigDrc, 188 adaptiveSmallReconfigDrc, 189 earlyEos, 190 eosFlushSeek, 191 flushConfigureDrc, 192 }); } 193 runAll()194 public void runAll() { ex(AllCodecs(), allTests); } runSW()195 public void runSW() { ex(SWCodecs(), allTests); } runHW()196 public void runHW() { ex(HWCodecs(), allTests); } 197 verifyAll()198 public void verifyAll() { verify = true; try { runAll(); } finally { verify = false; } } verifySW()199 public void verifySW() { verify = true; try { runSW(); } finally { verify = false; } } verifyHW()200 public void verifyHW() { verify = true; try { runHW(); } finally { verify = false; } } 201 runH264()202 public void runH264() { ex(H264(), allTests); } runHEVC()203 public void runHEVC() { ex(HEVC(), allTests); } runVP8()204 public void runVP8() { ex(VP8(), allTests); } runVP9()205 public void runVP9() { ex(VP9(), allTests); } runAV1()206 public void runAV1() { ex(AV1(), allTests); } runMpeg2()207 public void runMpeg2() { ex(Mpeg2(), allTests); } runMpeg4()208 public void runMpeg4() { ex(Mpeg4(), allTests); } runH263()209 public void runH263() { ex(H263(), allTests); } 210 onlyH264HW()211 public void onlyH264HW() { ex(H264(HW), allTests); } onlyHEVCHW()212 public void onlyHEVCHW() { ex(HEVC(HW), allTests); } onlyVP8HW()213 public void onlyVP8HW() { ex(VP8(HW), allTests); } onlyVP9HW()214 public void onlyVP9HW() { ex(VP9(HW), allTests); } onlyAV1HW()215 public void onlyAV1HW() { ex(AV1(HW), allTests); } onlyMpeg2HW()216 public void onlyMpeg2HW() { ex(Mpeg2(HW), allTests); } onlyMpeg4HW()217 public void onlyMpeg4HW() { ex(Mpeg4(HW), allTests); } onlyH263HW()218 public void onlyH263HW() { ex(H263(HW), allTests); } 219 onlyH264SW()220 public void onlyH264SW() { ex(H264(SW), allTests); } onlyHEVCSW()221 public void onlyHEVCSW() { ex(HEVC(SW), allTests); } onlyVP8SW()222 public void onlyVP8SW() { ex(VP8(SW), allTests); } onlyVP9SW()223 public void onlyVP9SW() { ex(VP9(SW), allTests); } onlyAV1SW()224 public void onlyAV1SW() { ex(AV1(SW), allTests); } onlyMpeg2SW()225 public void onlyMpeg2SW() { ex(Mpeg2(SW), allTests); } onlyMpeg4SW()226 public void onlyMpeg4SW() { ex(Mpeg4(SW), allTests); } onlyH263SW()227 public void onlyH263SW() { ex(H263(SW), allTests); } 228 bytebuffer()229 public void bytebuffer() { ex(H264(SW), new EarlyEosTest().byteBuffer()); } onlyTexture()230 public void onlyTexture() { ex(H264(HW), new EarlyEosTest().texture()); } 231 232 /* inidividual tests */ testH264_adaptiveEarlyEos()233 public void testH264_adaptiveEarlyEos() { ex(H264(), adaptiveEarlyEos); } testHEVC_adaptiveEarlyEos()234 public void testHEVC_adaptiveEarlyEos() { ex(HEVC(), adaptiveEarlyEos); } testVP8_adaptiveEarlyEos()235 public void testVP8_adaptiveEarlyEos() { ex(VP8(), adaptiveEarlyEos); } testVP9_adaptiveEarlyEos()236 public void testVP9_adaptiveEarlyEos() { ex(VP9(), adaptiveEarlyEos); } testAV1_adaptiveEarlyEos()237 public void testAV1_adaptiveEarlyEos() { ex(AV1(), adaptiveEarlyEos); } testMpeg2_adaptiveEarlyEos()238 public void testMpeg2_adaptiveEarlyEos() { ex(Mpeg2(), adaptiveEarlyEos); } testMpeg4_adaptiveEarlyEos()239 public void testMpeg4_adaptiveEarlyEos() { ex(Mpeg4(), adaptiveEarlyEos); } testH263_adaptiveEarlyEos()240 public void testH263_adaptiveEarlyEos() { ex(H263(), adaptiveEarlyEos); } 241 testH264_adaptiveEosFlushSeek()242 public void testH264_adaptiveEosFlushSeek() { ex(H264(), adaptiveEosFlushSeek); } testHEVC_adaptiveEosFlushSeek()243 public void testHEVC_adaptiveEosFlushSeek() { ex(HEVC(), adaptiveEosFlushSeek); } testVP8_adaptiveEosFlushSeek()244 public void testVP8_adaptiveEosFlushSeek() { ex(VP8(), adaptiveEosFlushSeek); } testVP9_adaptiveEosFlushSeek()245 public void testVP9_adaptiveEosFlushSeek() { ex(VP9(), adaptiveEosFlushSeek); } testAV1_adaptiveEosFlushSeek()246 public void testAV1_adaptiveEosFlushSeek() { ex(AV1(), adaptiveEosFlushSeek); } testMpeg2_adaptiveEosFlushSeek()247 public void testMpeg2_adaptiveEosFlushSeek() { ex(Mpeg2(), adaptiveEosFlushSeek); } testMpeg4_adaptiveEosFlushSeek()248 public void testMpeg4_adaptiveEosFlushSeek() { ex(Mpeg4(), adaptiveEosFlushSeek); } testH263_adaptiveEosFlushSeek()249 public void testH263_adaptiveEosFlushSeek() { ex(H263(), adaptiveEosFlushSeek); } 250 testH264_adaptiveSkipAhead()251 public void testH264_adaptiveSkipAhead() { ex(H264(), adaptiveSkipAhead); } testHEVC_adaptiveSkipAhead()252 public void testHEVC_adaptiveSkipAhead() { ex(HEVC(), adaptiveSkipAhead); } testVP8_adaptiveSkipAhead()253 public void testVP8_adaptiveSkipAhead() { ex(VP8(), adaptiveSkipAhead); } testVP9_adaptiveSkipAhead()254 public void testVP9_adaptiveSkipAhead() { ex(VP9(), adaptiveSkipAhead); } testAV1_adaptiveSkipAhead()255 public void testAV1_adaptiveSkipAhead() { ex(AV1(), adaptiveSkipAhead); } testMpeg2_adaptiveSkipAhead()256 public void testMpeg2_adaptiveSkipAhead() { ex(Mpeg2(), adaptiveSkipAhead); } testMpeg4_adaptiveSkipAhead()257 public void testMpeg4_adaptiveSkipAhead() { ex(Mpeg4(), adaptiveSkipAhead); } testH263_adaptiveSkipAhead()258 public void testH263_adaptiveSkipAhead() { ex(H263(), adaptiveSkipAhead); } 259 testH264_adaptiveSkipBack()260 public void testH264_adaptiveSkipBack() { ex(H264(), adaptiveSkipBack); } testHEVC_adaptiveSkipBack()261 public void testHEVC_adaptiveSkipBack() { ex(HEVC(), adaptiveSkipBack); } testVP8_adaptiveSkipBack()262 public void testVP8_adaptiveSkipBack() { ex(VP8(), adaptiveSkipBack); } testVP9_adaptiveSkipBack()263 public void testVP9_adaptiveSkipBack() { ex(VP9(), adaptiveSkipBack); } testAV1_adaptiveSkipBack()264 public void testAV1_adaptiveSkipBack() { ex(AV1(), adaptiveSkipBack); } testMpeg2_adaptiveSkipBack()265 public void testMpeg2_adaptiveSkipBack() { ex(Mpeg2(), adaptiveSkipBack); } testMpeg4_adaptiveSkipBack()266 public void testMpeg4_adaptiveSkipBack() { ex(Mpeg4(), adaptiveSkipBack); } testH263_adaptiveSkipBack()267 public void testH263_adaptiveSkipBack() { ex(H263(), adaptiveSkipBack); } 268 testH264_adaptiveReconfigDrc()269 public void testH264_adaptiveReconfigDrc() { ex(H264(), adaptiveReconfigDrc); } testHEVC_adaptiveReconfigDrc()270 public void testHEVC_adaptiveReconfigDrc() { ex(HEVC(), adaptiveReconfigDrc); } testVP8_adaptiveReconfigDrc()271 public void testVP8_adaptiveReconfigDrc() { ex(VP8(), adaptiveReconfigDrc); } testVP9_adaptiveReconfigDrc()272 public void testVP9_adaptiveReconfigDrc() { ex(VP9(), adaptiveReconfigDrc); } testAV1_adaptiveReconfigDrc()273 public void testAV1_adaptiveReconfigDrc() { ex(AV1(), adaptiveReconfigDrc); } testMpeg2_adaptiveReconfigDrc()274 public void testMpeg2_adaptiveReconfigDrc() { ex(Mpeg2(), adaptiveReconfigDrc); } testMpeg4_adaptiveReconfigDrc()275 public void testMpeg4_adaptiveReconfigDrc() { ex(Mpeg4(), adaptiveReconfigDrc); } testH263_adaptiveReconfigDrc()276 public void testH263_adaptiveReconfigDrc() { ex(H263(), adaptiveReconfigDrc); } 277 testH264_adaptiveSmallReconfigDrc()278 public void testH264_adaptiveSmallReconfigDrc() { ex(H264(), adaptiveSmallReconfigDrc); } testHEVC_adaptiveSmallReconfigDrc()279 public void testHEVC_adaptiveSmallReconfigDrc() { ex(HEVC(), adaptiveSmallReconfigDrc); } testVP8_adaptiveSmallReconfigDrc()280 public void testVP8_adaptiveSmallReconfigDrc() { ex(VP8(), adaptiveSmallReconfigDrc); } testVP9_adaptiveSmallReconfigDrc()281 public void testVP9_adaptiveSmallReconfigDrc() { ex(VP9(), adaptiveSmallReconfigDrc); } testAV1_adaptiveSmallReconfigDrc()282 public void testAV1_adaptiveSmallReconfigDrc() { ex(AV1(), adaptiveSmallReconfigDrc); } testMpeg2_adaptiveSmallReconfigDrc()283 public void testMpeg2_adaptiveSmallReconfigDrc() { ex(Mpeg2(), adaptiveSmallReconfigDrc); } testMpeg4_adaptiveSmallReconfigDrc()284 public void testMpeg4_adaptiveSmallReconfigDrc() { ex(Mpeg4(), adaptiveSmallReconfigDrc); } testH263_adaptiveSmallReconfigDrc()285 public void testH263_adaptiveSmallReconfigDrc() { ex(H263(), adaptiveSmallReconfigDrc); } 286 testH264_adaptiveDrc()287 public void testH264_adaptiveDrc() { ex(H264(), adaptiveDrc); } testHEVC_adaptiveDrc()288 public void testHEVC_adaptiveDrc() { ex(HEVC(), adaptiveDrc); } testVP8_adaptiveDrc()289 public void testVP8_adaptiveDrc() { ex(VP8(), adaptiveDrc); } testVP9_adaptiveDrc()290 public void testVP9_adaptiveDrc() { ex(VP9(), adaptiveDrc); } testAV1_adaptiveDrc()291 public void testAV1_adaptiveDrc() { ex(AV1(), adaptiveDrc); } testMpeg2_adaptiveDrc()292 public void testMpeg2_adaptiveDrc() { ex(Mpeg2(), adaptiveDrc); } testMpeg4_adaptiveDrc()293 public void testMpeg4_adaptiveDrc() { ex(Mpeg4(), adaptiveDrc); } testH263_adaptiveDrc()294 public void testH263_adaptiveDrc() { ex(H263(), adaptiveDrc); } 295 testH264_adaptiveDrcEarlyEos()296 public void testH264_adaptiveDrcEarlyEos() { ex(H264(), new AdaptiveDrcEarlyEosTest()); } testHEVC_adaptiveDrcEarlyEos()297 public void testHEVC_adaptiveDrcEarlyEos() { ex(HEVC(), new AdaptiveDrcEarlyEosTest()); } testVP8_adaptiveDrcEarlyEos()298 public void testVP8_adaptiveDrcEarlyEos() { ex(VP8(), new AdaptiveDrcEarlyEosTest()); } testVP9_adaptiveDrcEarlyEos()299 public void testVP9_adaptiveDrcEarlyEos() { ex(VP9(), new AdaptiveDrcEarlyEosTest()); } testAV1_adaptiveDrcEarlyEos()300 public void testAV1_adaptiveDrcEarlyEos() { ex(AV1(), new AdaptiveDrcEarlyEosTest()); } testMpeg2_adaptiveDrcEarlyEos()301 public void testMpeg2_adaptiveDrcEarlyEos(){ ex(Mpeg2(), new AdaptiveDrcEarlyEosTest()); } 302 testH264_adaptiveSmallDrc()303 public void testH264_adaptiveSmallDrc() { ex(H264(), adaptiveSmallDrc); } testHEVC_adaptiveSmallDrc()304 public void testHEVC_adaptiveSmallDrc() { ex(HEVC(), adaptiveSmallDrc); } testVP8_adaptiveSmallDrc()305 public void testVP8_adaptiveSmallDrc() { ex(VP8(), adaptiveSmallDrc); } testVP9_adaptiveSmallDrc()306 public void testVP9_adaptiveSmallDrc() { ex(VP9(), adaptiveSmallDrc); } testAV1_adaptiveSmallDrc()307 public void testAV1_adaptiveSmallDrc() { ex(AV1(), adaptiveSmallDrc); } testMpeg2_adaptiveSmallDrc()308 public void testMpeg2_adaptiveSmallDrc() { ex(Mpeg2(), adaptiveSmallDrc); } 309 testH264_earlyEos()310 public void testH264_earlyEos() { ex(H264(), earlyEos); } testHEVC_earlyEos()311 public void testHEVC_earlyEos() { ex(HEVC(), earlyEos); } testVP8_earlyEos()312 public void testVP8_earlyEos() { ex(VP8(), earlyEos); } testVP9_earlyEos()313 public void testVP9_earlyEos() { ex(VP9(), earlyEos); } testAV1_earlyEos()314 public void testAV1_earlyEos() { ex(AV1(), earlyEos); } testMpeg2_earlyEos()315 public void testMpeg2_earlyEos() { ex(Mpeg2(), earlyEos); } testMpeg4_earlyEos()316 public void testMpeg4_earlyEos() { ex(Mpeg4(), earlyEos); } testH263_earlyEos()317 public void testH263_earlyEos() { ex(H263(), earlyEos); } 318 testH264_eosFlushSeek()319 public void testH264_eosFlushSeek() { ex(H264(), eosFlushSeek); } testHEVC_eosFlushSeek()320 public void testHEVC_eosFlushSeek() { ex(HEVC(), eosFlushSeek); } testVP8_eosFlushSeek()321 public void testVP8_eosFlushSeek() { ex(VP8(), eosFlushSeek); } testVP9_eosFlushSeek()322 public void testVP9_eosFlushSeek() { ex(VP9(), eosFlushSeek); } testAV1_eosFlushSeek()323 public void testAV1_eosFlushSeek() { ex(AV1(), eosFlushSeek); } testMpeg2_eosFlushSeek()324 public void testMpeg2_eosFlushSeek() { ex(Mpeg2(), eosFlushSeek); } testMpeg4_eosFlushSeek()325 public void testMpeg4_eosFlushSeek() { ex(Mpeg4(), eosFlushSeek); } testH263_eosFlushSeek()326 public void testH263_eosFlushSeek() { ex(H263(), eosFlushSeek); } 327 testH264_flushConfigureDrc()328 public void testH264_flushConfigureDrc() { ex(H264(), flushConfigureDrc); } testHEVC_flushConfigureDrc()329 public void testHEVC_flushConfigureDrc() { ex(HEVC(), flushConfigureDrc); } testVP8_flushConfigureDrc()330 public void testVP8_flushConfigureDrc() { ex(VP8(), flushConfigureDrc); } testVP9_flushConfigureDrc()331 public void testVP9_flushConfigureDrc() { ex(VP9(), flushConfigureDrc); } testAV1_flushConfigureDrc()332 public void testAV1_flushConfigureDrc() { ex(AV1(), flushConfigureDrc); } testMpeg2_flushConfigureDrc()333 public void testMpeg2_flushConfigureDrc() { ex(Mpeg2(), flushConfigureDrc); } testMpeg4_flushConfigureDrc()334 public void testMpeg4_flushConfigureDrc() { ex(Mpeg4(), flushConfigureDrc); } testH263_flushConfigureDrc()335 public void testH263_flushConfigureDrc() { ex(H263(), flushConfigureDrc); } 336 337 /* only use unchecked exceptions to allow brief test methods */ ex(Iterable<Codec> codecList, Test test)338 private void ex(Iterable<Codec> codecList, Test test) { 339 ex(codecList, new Test[] { test } ); 340 } 341 ex(Iterable<Codec> codecList, Test[] testList)342 private void ex(Iterable<Codec> codecList, Test[] testList) { 343 if (codecList == null) { 344 Log.i(TAG, "CodecList was empty. Skipping test."); 345 return; 346 } 347 348 TestList tests = new TestList(); 349 for (Codec c : codecList) { 350 for (Test test : testList) { 351 if (test.isValid(c)) { 352 test.addTests(tests, c); 353 } 354 } 355 } 356 try { 357 tests.run(); 358 } catch (Throwable t) { 359 throw new RuntimeException(t); 360 } 361 } 362 363 /* need an inner class to have access to the activity */ 364 abstract class ActivityTest extends Test { 365 TestSurface mNullSurface = new ActivitySurface(null); getSurface()366 protected TestSurface getSurface() { 367 if (mUseSurface) { 368 return new ActivitySurface(getActivity().getSurfaceHolder().getSurface()); 369 } else if (mUseSurfaceTexture) { 370 return new DecoderSurface(1280, 720, mCRC); 371 } 372 return mNullSurface; 373 } 374 } 375 376 static final int NUM_FRAMES = 50; 377 378 /** 379 * Queue some frames with an EOS on the last one. Test that we have decoded as many 380 * frames as we queued. This tests the EOS handling of the codec to see if all queued 381 * (and out-of-order) frames are actually decoded and returned. 382 * 383 * Also test flushing prior to sending CSD, and immediately after sending CSD. 384 */ 385 class EarlyEosTest extends ActivityTest { 386 // using bitfields to create a directed state graph that terminates at FLUSH_NEVER 387 static final int FLUSH_BEFORE_CSD = (1 << 1); 388 static final int FLUSH_AFTER_CSD = (1 << 0); 389 static final int FLUSH_NEVER = 0; 390 isValid(Codec c)391 public boolean isValid(Codec c) { 392 return getFormat(c) != null; 393 } addTests(TestList tests, final Codec c)394 public void addTests(TestList tests, final Codec c) { 395 int state = FLUSH_BEFORE_CSD; 396 for (int i = NUM_FRAMES / 2; i > 0; --i, state >>= 1) { 397 final int queuedFrames = i; 398 final int earlyFlushMode = state; 399 tests.add( 400 new Step("testing early EOS at " + queuedFrames, this, c) { 401 public void run() { 402 Decoder decoder = new Decoder(c.name); 403 try { 404 MediaFormat fmt = stepFormat(); 405 MediaFormat configFmt = fmt; 406 if (earlyFlushMode == FLUSH_BEFORE_CSD) { 407 // flush before CSD requires not submitting CSD with configure 408 configFmt = Media.removeCSD(fmt); 409 } 410 decoder.configureAndStart(configFmt, stepSurface()); 411 if (earlyFlushMode != FLUSH_NEVER) { 412 decoder.flush(); 413 // We must always queue CSD after a flush that is potentially 414 // before we receive output format has changed. This should 415 // work even after we receive the format change. 416 decoder.queueCSD(fmt); 417 } 418 int decodedFrames = -decoder.queueInputBufferRange( 419 stepMedia(), 420 0 /* startFrame */, 421 queuedFrames, 422 true /* sendEos */, 423 true /* waitForEos */); 424 if (decodedFrames <= 0) { 425 Log.w(TAG, "Did not receive EOS -- negating frame count"); 426 } 427 decoder.stop(); 428 if (decodedFrames != queuedFrames) { 429 warn("decoded " + decodedFrames + " frames out of " + 430 queuedFrames + " queued"); 431 } 432 } finally { 433 warn(decoder.getWarnings()); 434 decoder.releaseQuietly(); 435 } 436 } 437 }); 438 if (verify) { 439 i >>= 1; 440 } 441 } 442 } 443 }; 444 445 /** 446 * Similar to EarlyEosTest, but we keep the component alive and running in between the steps. 447 * This is how seeking should be done if all frames must be outputted. This also tests that 448 * PTS can be repeated after flush. 449 */ 450 class EosFlushSeekTest extends ActivityTest { 451 Decoder mDecoder; // test state isValid(Codec c)452 public boolean isValid(Codec c) { 453 return getFormat(c) != null; 454 } addTests(TestList tests, final Codec c)455 public void addTests(TestList tests, final Codec c) { 456 tests.add( 457 new Step("testing EOS & flush before seek - init", this, c) { 458 public void run() { 459 mDecoder = new Decoder(c.name); 460 mDecoder.configureAndStart(stepFormat(), stepSurface()); 461 }}); 462 463 for (int i = NUM_FRAMES; i > 0; i--) { 464 final int queuedFrames = i; 465 tests.add( 466 new Step("testing EOS & flush before seeking after " + queuedFrames + 467 " frames", this, c) { 468 public void run() { 469 int decodedFrames = -mDecoder.queueInputBufferRange( 470 stepMedia(), 471 0 /* startFrame */, 472 queuedFrames, 473 true /* sendEos */, 474 true /* waitForEos */); 475 if (decodedFrames != queuedFrames) { 476 warn("decoded " + decodedFrames + " frames out of " + 477 queuedFrames + " queued"); 478 } 479 warn(mDecoder.getWarnings()); 480 mDecoder.clearWarnings(); 481 mDecoder.flush(); 482 } 483 }); 484 if (verify) { 485 i >>= 1; 486 } 487 } 488 489 tests.add( 490 new Step("testing EOS & flush before seek - finally", this, c) { 491 public void run() { 492 try { 493 mDecoder.stop(); 494 } finally { 495 mDecoder.release(); 496 } 497 }}); 498 } 499 }; 500 501 /** 502 * Similar to EosFlushSeekTest, but we change the media size between the steps. 503 * This is how dynamic resolution switching can be done on codecs that do not support 504 * adaptive playback. 505 */ 506 class ReconfigDrcTest extends ActivityTest { 507 Decoder mDecoder; // test state isValid(Codec c)508 public boolean isValid(Codec c) { 509 return getFormat(c) != null && c.mediaList.length > 1; 510 } addTests(TestList tests, final Codec c)511 public void addTests(TestList tests, final Codec c) { 512 tests.add( 513 new Step("testing DRC with reconfigure - init", this, c) { 514 public void run() { 515 mDecoder = new Decoder(c.name); 516 }}); 517 518 for (int i = NUM_FRAMES, ix = 0; i > 0; i--, ix++) { 519 final int queuedFrames = i; 520 final int mediaIx = ix % c.mediaList.length; 521 tests.add( 522 new Step("testing DRC with reconfigure after " + queuedFrames + " frames", 523 this, c, mediaIx) { 524 public void run() { 525 try { 526 mDecoder.configureAndStart(stepFormat(), stepSurface()); 527 int decodedFrames = -mDecoder.queueInputBufferRange( 528 stepMedia(), 529 0 /* startFrame */, 530 queuedFrames, 531 true /* sendEos */, 532 true /* waitForEos */); 533 if (decodedFrames != queuedFrames) { 534 warn("decoded " + decodedFrames + " frames out of " + 535 queuedFrames + " queued"); 536 } 537 warn(mDecoder.getWarnings()); 538 mDecoder.clearWarnings(); 539 mDecoder.flush(); 540 } finally { 541 mDecoder.stop(); 542 } 543 } 544 }); 545 if (verify) { 546 i >>= 1; 547 } 548 } 549 tests.add( 550 new Step("testing DRC with reconfigure - finally", this, c) { 551 public void run() { 552 mDecoder.release(); 553 }}); 554 } 555 }; 556 557 /* ADAPTIVE-ONLY TESTS - only run on codecs that support adaptive playback */ 558 559 /** 560 * Test dynamic resolution change support. Queue various sized media segments 561 * with different resolutions, verify that all queued frames were decoded. Here 562 * PTS will grow between segments. 563 */ 564 class AdaptiveDrcTest extends ActivityTest { 565 Decoder mDecoder; 566 int mAdjustTimeUs; 567 int mDecodedFrames; 568 int mQueuedFrames; 569 AdaptiveDrcTest()570 public AdaptiveDrcTest() { 571 super(); 572 adaptive(); 573 } isValid(Codec c)574 public boolean isValid(Codec c) { 575 checkAdaptiveFormat(); 576 return c.adaptive && c.mediaList.length > 1; 577 } addTests(TestList tests, final Codec c)578 public void addTests(TestList tests, final Codec c) { 579 tests.add( 580 new Step("testing DRC with no reconfigure - init", this, c) { 581 public void run() throws Throwable { 582 // FIXME wait 2 seconds to allow system to free up previous codecs 583 try { 584 Thread.sleep(2000); 585 } catch (InterruptedException e) {} 586 mDecoder = new Decoder(c.name); 587 mDecoder.configureAndStart(stepFormat(), stepSurface()); 588 mAdjustTimeUs = 0; 589 mDecodedFrames = 0; 590 mQueuedFrames = 0; 591 }}); 592 593 for (int i = NUM_FRAMES, ix = 0; i >= MIN_FRAMES_BEFORE_DRC; i--, ix++) { 594 final int mediaIx = ix % c.mediaList.length; 595 final int segmentSize = i; 596 tests.add( 597 new Step("testing DRC with no reconfigure after " + i + " frames", 598 this, c, mediaIx) { 599 public void run() throws Throwable { 600 mQueuedFrames += segmentSize; 601 boolean lastSequence = segmentSize == MIN_FRAMES_BEFORE_DRC; 602 if (verify) { 603 lastSequence = (segmentSize >> 1) <= MIN_FRAMES_BEFORE_DRC; 604 } 605 int frames = mDecoder.queueInputBufferRange( 606 stepMedia(), 607 0 /* startFrame */, 608 segmentSize, 609 lastSequence /* sendEos */, 610 lastSequence /* expectEos */, 611 mAdjustTimeUs); 612 if (lastSequence && frames >= 0) { 613 warn("did not receive EOS, received " + frames + " frames"); 614 } else if (!lastSequence && frames < 0) { 615 warn("received EOS, received " + (-frames) + " frames"); 616 } 617 warn(mDecoder.getWarnings()); 618 mDecoder.clearWarnings(); 619 620 mDecodedFrames += Math.abs(frames); 621 mAdjustTimeUs += 1 + stepMedia().getTimestampRangeValue( 622 0, segmentSize, Media.RANGE_END); 623 }}); 624 if (verify) { 625 i >>= 1; 626 } 627 } 628 tests.add( 629 new Step("testing DRC with no reconfigure - init", this, c) { 630 public void run() throws Throwable { 631 if (mDecodedFrames != mQueuedFrames) { 632 warn("decoded " + mDecodedFrames + " frames out of " + 633 mQueuedFrames + " queued"); 634 } 635 try { 636 mDecoder.stop(); 637 } finally { 638 mDecoder.release(); 639 } 640 } 641 }); 642 } 643 }; 644 645 /** 646 * Queue EOS shortly after a dynamic resolution change. Test that all frames were 647 * decoded. 648 */ 649 class AdaptiveDrcEarlyEosTest extends ActivityTest { AdaptiveDrcEarlyEosTest()650 public AdaptiveDrcEarlyEosTest() { 651 super(); 652 adaptive(); 653 } isValid(Codec c)654 public boolean isValid(Codec c) { 655 checkAdaptiveFormat(); 656 return c.adaptive && c.mediaList.length > 1; 657 } testStep(final Codec c, final int framesBeforeDrc, final int framesBeforeEos)658 public Step testStep(final Codec c, final int framesBeforeDrc, 659 final int framesBeforeEos) { 660 return new Step("testing DRC with no reconfigure after " + framesBeforeDrc + 661 " frames and subsequent EOS after " + framesBeforeEos + " frames", 662 this, c) { 663 public void run() throws Throwable { 664 Decoder decoder = new Decoder(c.name); 665 int queuedFrames = framesBeforeDrc + framesBeforeEos; 666 int framesA = 0; 667 int framesB = 0; 668 try { 669 decoder.configureAndStart(stepFormat(), stepSurface()); 670 Media media = c.mediaList[0]; 671 672 framesA = decoder.queueInputBufferRange( 673 media, 674 0 /* startFrame */, 675 framesBeforeDrc, 676 false /* sendEos */, 677 false /* expectEos */); 678 if (framesA < 0) { 679 warn("received unexpected EOS, received " + (-framesA) + " frames"); 680 } 681 long adjustTimeUs = 1 + media.getTimestampRangeValue( 682 0, framesBeforeDrc, Media.RANGE_END); 683 684 media = c.mediaList[1]; 685 framesB = decoder.queueInputBufferRange( 686 media, 687 0 /* startFrame */, 688 framesBeforeEos, 689 true /* sendEos */, 690 true /* expectEos */, 691 adjustTimeUs); 692 if (framesB >= 0) { 693 warn("did not receive EOS, received " + (-framesB) + " frames"); 694 } 695 decoder.stop(); 696 warn(decoder.getWarnings()); 697 } finally { 698 int decodedFrames = Math.abs(framesA) + Math.abs(framesB); 699 if (decodedFrames != queuedFrames) { 700 warn("decoded " + decodedFrames + " frames out of " + queuedFrames + 701 " queued"); 702 } 703 decoder.release(); 704 } 705 } 706 }; 707 } addTests(TestList tests, Codec c)708 public void addTests(TestList tests, Codec c) { 709 for (int drcFrame = 6; drcFrame >= MIN_FRAMES_BEFORE_DRC; drcFrame--) { 710 for (int eosFrame = 6; eosFrame >= 1; eosFrame--) { 711 tests.add(testStep(c, drcFrame, eosFrame)); 712 } 713 } 714 } 715 }; 716 717 /** 718 * Similar to AdaptiveDrcTest, but tests that PTS can change at adaptive boundaries both 719 * forward and backward without the need to flush. 720 */ 721 class AdaptiveSkipTest extends ActivityTest { 722 boolean forward; 723 public AdaptiveSkipTest(boolean fwd) { 724 forward = fwd; 725 adaptive(); 726 } 727 public boolean isValid(Codec c) { 728 checkAdaptiveFormat(); 729 return c.adaptive; 730 } 731 Decoder mDecoder; 732 int mAdjustTimeUs = 0; 733 int mDecodedFrames = 0; 734 int mQueuedFrames = 0; 735 public void addTests(TestList tests, final Codec c) { 736 tests.add( 737 new Step("testing flushless skipping - init", this, c) { 738 public void run() throws Throwable { 739 mDecoder = new Decoder(c.name); 740 mDecoder.configureAndStart(stepFormat(), stepSurface()); 741 mAdjustTimeUs = 0; 742 mDecodedFrames = 0; 743 mQueuedFrames = 0; 744 }}); 745 746 for (int i = 2, ix = 0; i <= NUM_FRAMES; i++, ix++) { 747 final int mediaIx = ix % c.mediaList.length; 748 final int segmentSize = i; 749 final boolean lastSequence; 750 if (verify) { 751 lastSequence = (segmentSize << 1) + 1 > NUM_FRAMES; 752 } else { 753 lastSequence = segmentSize >= NUM_FRAMES; 754 } 755 tests.add( 756 new Step("testing flushless skipping " + (forward ? "forward" : "backward") + 757 " after " + i + " frames", this, c) { 758 public void run() throws Throwable { 759 int frames = mDecoder.queueInputBufferRange( 760 stepMedia(), 761 0 /* startFrame */, 762 segmentSize, 763 lastSequence /* sendEos */, 764 lastSequence /* expectEos */, 765 mAdjustTimeUs); 766 if (lastSequence && frames >= 0) { 767 warn("did not receive EOS, received " + frames + " frames"); 768 } else if (!lastSequence && frames < 0) { 769 warn("received unexpected EOS, received " + (-frames) + " frames"); 770 } 771 warn(mDecoder.getWarnings()); 772 mDecoder.clearWarnings(); 773 774 mQueuedFrames += segmentSize; 775 mDecodedFrames += Math.abs(frames); 776 if (forward) { 777 mAdjustTimeUs += 10000000 + stepMedia().getTimestampRangeValue( 778 0, segmentSize, Media.RANGE_DURATION); 779 } 780 }}); 781 if (verify) { 782 i <<= 1; 783 } 784 } 785 786 tests.add( 787 new Step("testing flushless skipping - finally", this, c) { 788 public void run() throws Throwable { 789 if (mDecodedFrames != mQueuedFrames) { 790 warn("decoded " + mDecodedFrames + " frames out of " + mQueuedFrames + 791 " queued"); 792 } 793 try { 794 mDecoder.stop(); 795 } finally { 796 mDecoder.release(); 797 } 798 }}); 799 } 800 }; 801 802 // not yet used 803 static long checksum(ByteBuffer buf, int size, CRC32 crc) { 804 assertTrue(size >= 0); 805 assertTrue(size <= buf.capacity()); 806 crc.reset(); 807 if (buf.hasArray()) { 808 crc.update(buf.array(), buf.arrayOffset(), size); 809 } else { 810 int pos = buf.position(); 811 buf.rewind(); 812 final int rdsize = Math.min(4096, size); 813 byte bb[] = new byte[rdsize]; 814 int chk; 815 for (int i = 0; i < size; i += chk) { 816 chk = Math.min(rdsize, size - i); 817 buf.get(bb, 0, chk); 818 crc.update(bb, 0, chk); 819 } 820 buf.position(pos); 821 } 822 return crc.getValue(); 823 } 824 825 CRC32 mCRC; 826 827 @Override 828 protected void setUp() throws Exception { 829 super.setUp(); 830 mCRC = new CRC32(); 831 } 832 833 /* ====================================================================== */ 834 /* UTILITY FUNCTIONS */ 835 /* ====================================================================== */ 836 static String byteBufferToString(ByteBuffer buf, int start, int len) { 837 int oldPosition = buf.position(); 838 buf.position(start); 839 int strlen = 2; // {} 840 boolean ellipsis = len < buf.limit(); 841 if (ellipsis) { 842 strlen += 3; // ... 843 } else { 844 len = buf.limit(); 845 } 846 strlen += 3 * len - (len > 0 ? 1 : 0); // XX,XX 847 char[] res = new char[strlen]; 848 res[0] = '{'; 849 res[strlen - 1] = '}'; 850 if (ellipsis) { 851 res[strlen - 2] = res[strlen - 3] = res[strlen - 4] = '.'; 852 } 853 for (int i = 1; i < len; i++) { 854 res[i * 3] = ','; 855 } 856 for (int i = 0; i < len; i++) { 857 byte b = buf.get(); 858 int d = (b >> 4) & 15; 859 res[i * 3 + 1] = (char)(d + (d > 9 ? 'a' - 10 : '0')); 860 d = (b & 15); 861 res[i * 3 + 2] = (char)(d + (d > 9 ? 'a' - 10 : '0')); 862 } 863 buf.position(oldPosition); 864 return new String(res); 865 } 866 867 static <E> Iterable<E> chain(Iterable<E> ... iterables) { 868 /* simple chainer using ArrayList */ 869 ArrayList<E> items = new ArrayList<E>(); 870 for (Iterable<E> it: iterables) { 871 for (E el: it) { 872 items.add(el); 873 } 874 } 875 return items; 876 } 877 878 class Decoder implements MediaCodec.OnFrameRenderedListener { 879 private final static String TAG = "AdaptiveDecoder"; 880 final long kTimeOutUs = 5000; 881 final long kCSDTimeOutUs = 1000000; 882 MediaCodec mCodec; 883 ByteBuffer[] mInputBuffers; 884 ByteBuffer[] mOutputBuffers; 885 TestSurface mSurface; 886 boolean mDoChecksum; 887 boolean mQueuedEos; 888 ArrayList<Long> mTimeStamps; 889 ArrayList<String> mWarnings; 890 Vector<Long> mRenderedTimeStamps; // using Vector as it is implicitly synchronized 891 long mLastRenderNanoTime; 892 int mFramesNotifiedRendered; 893 894 public Decoder(String codecName) { 895 MediaCodec codec = null; 896 try { 897 codec = MediaCodec.createByCodecName(codecName); 898 } catch (Exception e) { 899 throw new RuntimeException("couldn't create codec " + codecName, e); 900 } 901 Log.i(TAG, "using codec: " + codec.getName()); 902 mCodec = codec; 903 mDoChecksum = false; 904 mQueuedEos = false; 905 mTimeStamps = new ArrayList<Long>(); 906 mWarnings = new ArrayList<String>(); 907 mRenderedTimeStamps = new Vector<Long>(); 908 mLastRenderNanoTime = System.nanoTime(); 909 mFramesNotifiedRendered = 0; 910 911 codec.setOnFrameRenderedListener(this, null); 912 } 913 914 public void onFrameRendered(MediaCodec codec, long presentationTimeUs, long nanoTime) { 915 final long NSECS_IN_1SEC = 1000000000; 916 if (!mRenderedTimeStamps.remove(presentationTimeUs)) { 917 warn("invalid timestamp " + presentationTimeUs + ", queued " + 918 mRenderedTimeStamps); 919 } 920 assert nanoTime > mLastRenderNanoTime; 921 mLastRenderNanoTime = nanoTime; 922 ++mFramesNotifiedRendered; 923 assert nanoTime > System.nanoTime() - NSECS_IN_1SEC; 924 } 925 926 public String getName() { 927 return mCodec.getName(); 928 } 929 930 public Iterable<String> getWarnings() { 931 return mWarnings; 932 } 933 934 private void warn(String warning) { 935 mWarnings.add(warning); 936 Log.w(TAG, warning); 937 } 938 939 public void clearWarnings() { 940 mWarnings.clear(); 941 } 942 943 public void configureAndStart(MediaFormat format, TestSurface surface) { 944 mSurface = surface; 945 Log.i(TAG, "configure(" + format + ", " + mSurface.getSurface() + ")"); 946 mCodec.configure(format, mSurface.getSurface(), null /* crypto */, 0 /* flags */); 947 Log.i(TAG, "start"); 948 mCodec.start(); 949 950 // inject some minimal setOutputSurface test 951 // TODO: change this test to also change the surface midstream 952 try { 953 mCodec.setOutputSurface(null); 954 fail("should not be able to set surface to NULL"); 955 } catch (IllegalArgumentException e) {} 956 mCodec.setOutputSurface(mSurface.getSurface()); 957 958 mInputBuffers = mCodec.getInputBuffers(); 959 mOutputBuffers = mCodec.getOutputBuffers(); 960 Log.i(TAG, "configured " + mInputBuffers.length + " input[" + 961 mInputBuffers[0].capacity() + "] and " + 962 mOutputBuffers.length + "output[" + 963 (mOutputBuffers[0] == null ? null : mOutputBuffers[0].capacity()) + "]"); 964 mQueuedEos = false; 965 mRenderedTimeStamps.clear(); 966 mLastRenderNanoTime = System.nanoTime(); 967 mFramesNotifiedRendered = 0; 968 } 969 970 public void stop() { 971 Log.i(TAG, "stop"); 972 mCodec.stop(); 973 // if we have queued 32 frames or more, at least one should have been notified 974 // to have rendered. 975 if (mRenderedTimeStamps.size() > 32 && mFramesNotifiedRendered == 0) { 976 fail("rendered " + mRenderedTimeStamps.size() + 977 " frames, but none have been notified."); 978 } 979 } 980 981 public void flush() { 982 Log.i(TAG, "flush"); 983 mCodec.flush(); 984 mQueuedEos = false; 985 mTimeStamps.clear(); 986 } 987 988 public String dequeueAndReleaseOutputBuffer(MediaCodec.BufferInfo info) { 989 int ix = mCodec.dequeueOutputBuffer(info, kTimeOutUs); 990 if (ix == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) { 991 mOutputBuffers = mCodec.getOutputBuffers(); 992 Log.d(TAG, "output buffers have changed."); 993 return null; 994 } else if (ix == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) { 995 MediaFormat format = mCodec.getOutputFormat(); 996 Log.d(TAG, "output format has changed to " + format); 997 int colorFormat = format.getInteger(MediaFormat.KEY_COLOR_FORMAT); 998 mDoChecksum = isRecognizedFormat(colorFormat); 999 return null; 1000 } else if (ix < 0) { 1001 Log.v(TAG, "no output"); 1002 return null; 1003 } 1004 /* create checksum */ 1005 long sum = 0; 1006 1007 1008 Log.v(TAG, "dequeue #" + ix + " => { [" + info.size + "] flags=" + info.flags + 1009 " @" + info.presentationTimeUs + "}"); 1010 1011 // we get a nonzero size for valid decoded frames 1012 boolean doRender = (info.size != 0); 1013 if (mSurface.getSurface() == null) { 1014 if (mDoChecksum) { 1015 sum = checksum(mOutputBuffers[ix], info.size, mCRC); 1016 } 1017 mCodec.releaseOutputBuffer(ix, doRender); 1018 } else if (doRender) { 1019 // If using SurfaceTexture, as soon as we call releaseOutputBuffer, the 1020 // buffer will be forwarded to SurfaceTexture to convert to a texture. 1021 // The API doesn't guarantee that the texture will be available before 1022 // the call returns, so we need to wait for the onFrameAvailable callback 1023 // to fire. If we don't wait, we risk dropping frames. 1024 mSurface.prepare(); 1025 mCodec.releaseOutputBuffer(ix, doRender); 1026 mSurface.waitForDraw(); 1027 if (mDoChecksum) { 1028 sum = mSurface.checksum(); 1029 } 1030 } else { 1031 mCodec.releaseOutputBuffer(ix, doRender); 1032 } 1033 1034 if (doRender) { 1035 mRenderedTimeStamps.add(info.presentationTimeUs); 1036 if (!mTimeStamps.remove(info.presentationTimeUs)) { 1037 warn("invalid timestamp " + info.presentationTimeUs + ", queued " + 1038 mTimeStamps); 1039 } 1040 } 1041 1042 return String.format(Locale.US, "{pts=%d, flags=%x, data=0x%x}", 1043 info.presentationTimeUs, info.flags, sum); 1044 } 1045 1046 /* returns true iff queued a frame */ 1047 public boolean queueInputBuffer(Media media, int frameIx, boolean EOS) { 1048 return queueInputBuffer(media, frameIx, EOS, 0); 1049 } 1050 1051 public boolean queueInputBuffer(Media media, int frameIx, boolean EOS, long adjustTimeUs) { 1052 if (mQueuedEos) { 1053 return false; 1054 } 1055 1056 int ix = mCodec.dequeueInputBuffer(kTimeOutUs); 1057 1058 if (ix < 0) { 1059 return false; 1060 } 1061 1062 ByteBuffer buf = mInputBuffers[ix]; 1063 Media.Frame frame = media.getFrame(frameIx); 1064 buf.clear(); 1065 1066 long presentationTimeUs = adjustTimeUs; 1067 int flags = 0; 1068 if (frame != null) { 1069 buf.put((ByteBuffer)frame.buf.clear()); 1070 presentationTimeUs += frame.presentationTimeUs; 1071 flags = frame.flags; 1072 } 1073 1074 if (EOS) { 1075 flags |= MediaCodec.BUFFER_FLAG_END_OF_STREAM; 1076 mQueuedEos = true; 1077 } 1078 1079 mTimeStamps.add(presentationTimeUs); 1080 Log.v(TAG, "queue { [" + buf.position() + "]=" + byteBufferToString(buf, 0, 16) + 1081 " flags=" + flags + " @" + presentationTimeUs + "} => #" + ix); 1082 mCodec.queueInputBuffer( 1083 ix, 0 /* offset */, buf.position(), presentationTimeUs, flags); 1084 return true; 1085 } 1086 1087 /* returns number of frames received multiplied by -1 if received EOS, 1 otherwise */ 1088 public int queueInputBufferRange( 1089 Media media, int frameStartIx, int frameEndIx, boolean sendEosAtEnd, 1090 boolean waitForEos) { 1091 return queueInputBufferRange(media,frameStartIx,frameEndIx,sendEosAtEnd,waitForEos,0); 1092 } 1093 1094 public void queueCSD(MediaFormat format) { 1095 MediaCodec.BufferInfo info = new MediaCodec.BufferInfo(); 1096 for (int csdIx = 0; ; ++csdIx) { 1097 ByteBuffer csdBuf = format.getByteBuffer("csd-" + csdIx); 1098 if (csdBuf == null) { 1099 break; 1100 } 1101 1102 int ix = mCodec.dequeueInputBuffer(kCSDTimeOutUs); 1103 if (ix < 0) { 1104 fail("Could not dequeue input buffer for CSD #" + csdIx); 1105 return; 1106 } 1107 1108 ByteBuffer buf = mInputBuffers[ix]; 1109 buf.clear(); 1110 buf.put((ByteBuffer)csdBuf.clear()); 1111 Log.v(TAG, "queue-CSD { [" + buf.position() + "]=" + 1112 byteBufferToString(buf, 0, 16) + "} => #" + ix); 1113 mCodec.queueInputBuffer( 1114 ix, 0 /* offset */, buf.position(), 0 /* timeUs */, 1115 MediaCodec.BUFFER_FLAG_CODEC_CONFIG); 1116 } 1117 } 1118 1119 public int queueInputBufferRange( 1120 Media media, int frameStartIx, int frameEndIx, boolean sendEosAtEnd, 1121 boolean waitForEos, long adjustTimeUs) { 1122 MediaCodec.BufferInfo info = new MediaCodec.BufferInfo(); 1123 int frameIx = frameStartIx; 1124 int numFramesDecoded = 0; 1125 boolean sawOutputEos = false; 1126 int deadDecoderCounter = 0; 1127 ArrayList<String> frames = new ArrayList<String>(); 1128 String buf = null; 1129 // After all input buffers are queued, dequeue as many output buffers as possible. 1130 while ((waitForEos && !sawOutputEos) || frameIx < frameEndIx || buf != null) { 1131 if (frameIx < frameEndIx) { 1132 if (queueInputBuffer( 1133 media, 1134 frameIx, 1135 sendEosAtEnd && (frameIx + 1 == frameEndIx), 1136 adjustTimeUs)) { 1137 frameIx++; 1138 } 1139 } 1140 1141 buf = dequeueAndReleaseOutputBuffer(info); 1142 if (buf != null) { 1143 // Some decoders output a 0-sized buffer at the end. Disregard those. 1144 if (info.size > 0) { 1145 deadDecoderCounter = 0; 1146 numFramesDecoded++; 1147 } 1148 1149 if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) { 1150 Log.d(TAG, "saw output EOS."); 1151 sawOutputEos = true; 1152 } 1153 } 1154 if (++deadDecoderCounter >= 100) { 1155 warn("have not received an output frame for a while"); 1156 break; 1157 } 1158 } 1159 1160 if (numFramesDecoded < frameEndIx - frameStartIx - 16) { 1161 fail("Queued " + (frameEndIx - frameStartIx) + " frames but only received " + 1162 numFramesDecoded); 1163 } 1164 return (sawOutputEos ? -1 : 1) * numFramesDecoded; 1165 } 1166 1167 void release() { 1168 Log.i(TAG, "release"); 1169 mCodec.release(); 1170 mSurface.release(); 1171 mInputBuffers = null; 1172 mOutputBuffers = null; 1173 mCodec = null; 1174 mSurface = null; 1175 } 1176 1177 // don't fail on exceptions in release() 1178 void releaseQuietly() { 1179 try { 1180 Log.i(TAG, "release"); 1181 mCodec.release(); 1182 } catch (Throwable e) { 1183 Log.e(TAG, "Exception while releasing codec", e); 1184 } 1185 mSurface.release(); 1186 mInputBuffers = null; 1187 mOutputBuffers = null; 1188 mCodec = null; 1189 mSurface = null; 1190 } 1191 }; 1192 1193 /* from EncodeDecodeTest */ 1194 private static boolean isRecognizedFormat(int colorFormat) { 1195 switch (colorFormat) { 1196 // these are the formats we know how to handle for this test 1197 case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Planar: 1198 case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420PackedPlanar: 1199 case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420SemiPlanar: 1200 case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420PackedSemiPlanar: 1201 case MediaCodecInfo.CodecCapabilities.COLOR_TI_FormatYUV420PackedSemiPlanar: 1202 return true; 1203 default: 1204 return false; 1205 } 1206 } 1207 1208 private int countFrames( 1209 String codecName, MediaCodecInfo codecInfo, Media media, int eosframe, TestSurface s) 1210 throws Exception { 1211 Decoder codec = new Decoder(codecName); 1212 codec.configureAndStart(media.getFormat(), s /* surface */); 1213 1214 int numframes = codec.queueInputBufferRange( 1215 media, 0, eosframe, true /* sendEos */, true /* waitForEos */); 1216 if (numframes >= 0) { 1217 Log.w(TAG, "Did not receive EOS"); 1218 } else { 1219 numframes *= -1; 1220 } 1221 1222 codec.stop(); 1223 codec.release(); 1224 return numframes; 1225 } 1226 } 1227 1228 /* ====================================================================== */ 1229 /* Video Media Asset */ 1230 /* ====================================================================== */ 1231 class Media { 1232 private final static String TAG = "AdaptiveMedia"; 1233 private MediaFormat mFormat; 1234 private MediaFormat mAdaptiveFormat; 1235 static class Frame { 1236 long presentationTimeUs; 1237 int flags; 1238 ByteBuffer buf; 1239 public Frame(long _pts, int _flags, ByteBuffer _buf) { 1240 presentationTimeUs = _pts; 1241 flags = _flags; 1242 buf = _buf; 1243 } 1244 }; 1245 private Frame[] mFrames; 1246 1247 public Frame getFrame(int ix) { 1248 /* this works even on short sample as frame is allocated as null */ 1249 if (ix >= 0 && ix < mFrames.length) { 1250 return mFrames[ix]; 1251 } 1252 return null; 1253 } 1254 private Media(MediaFormat format, MediaFormat adaptiveFormat, int numFrames) { 1255 /* need separate copies of format as once we add adaptive flags to 1256 MediaFormat, we cannot remove them */ 1257 mFormat = format; 1258 mAdaptiveFormat = adaptiveFormat; 1259 mFrames = new Frame[numFrames]; 1260 } 1261 1262 public MediaFormat getFormat() { 1263 return mFormat; 1264 } 1265 1266 public static MediaFormat removeCSD(MediaFormat orig) { 1267 MediaFormat copy = MediaFormat.createVideoFormat( 1268 orig.getString(orig.KEY_MIME), 1269 orig.getInteger(orig.KEY_WIDTH), orig.getInteger(orig.KEY_HEIGHT)); 1270 for (String k : new String[] { 1271 orig.KEY_FRAME_RATE, orig.KEY_MAX_WIDTH, orig.KEY_MAX_HEIGHT, 1272 orig.KEY_MAX_INPUT_SIZE 1273 }) { 1274 if (orig.containsKey(k)) { 1275 try { 1276 copy.setInteger(k, orig.getInteger(k)); 1277 } catch (ClassCastException e) { 1278 try { 1279 copy.setFloat(k, orig.getFloat(k)); 1280 } catch (ClassCastException e2) { 1281 // Could not copy value. Don't fail here, as having non-standard 1282 // value types for defined keys is permissible by the media API 1283 // for optional keys. 1284 } 1285 } 1286 } 1287 } 1288 return copy; 1289 } 1290 1291 public MediaFormat getAdaptiveFormat(int width, int height, int maxInputSize) { 1292 mAdaptiveFormat.setInteger(MediaFormat.KEY_MAX_WIDTH, width); 1293 mAdaptiveFormat.setInteger(MediaFormat.KEY_MAX_HEIGHT, height); 1294 mAdaptiveFormat.setInteger(MediaFormat.KEY_MAX_INPUT_SIZE, maxInputSize); 1295 return mAdaptiveFormat; 1296 } 1297 1298 public String getMime() { 1299 return mFormat.getString(MediaFormat.KEY_MIME); 1300 } 1301 1302 public int getMaxInputSize() { 1303 return mFormat.getInteger(MediaFormat.KEY_MAX_INPUT_SIZE); 1304 } 1305 1306 public void setMaxInputSize(int maxInputSize) { 1307 mFormat.setInteger(MediaFormat.KEY_MAX_INPUT_SIZE, maxInputSize); 1308 } 1309 1310 public int getWidth() { 1311 return mFormat.getInteger(MediaFormat.KEY_WIDTH); 1312 } 1313 1314 public int getHeight() { 1315 return mFormat.getInteger(MediaFormat.KEY_HEIGHT); 1316 } 1317 1318 public final static int RANGE_START = 0; 1319 public final static int RANGE_END = 1; 1320 public final static int RANGE_DURATION = 2; 1321 1322 public long getTimestampRangeValue(int frameStartIx, int frameEndIx, int kind) { 1323 long min = Long.MAX_VALUE, max = Long.MIN_VALUE; 1324 for (int frameIx = frameStartIx; frameIx < frameEndIx; frameIx++) { 1325 Frame frame = getFrame(frameIx); 1326 if (frame != null) { 1327 if (min > frame.presentationTimeUs) { 1328 min = frame.presentationTimeUs; 1329 } 1330 if (max < frame.presentationTimeUs) { 1331 max = frame.presentationTimeUs; 1332 } 1333 } 1334 } 1335 if (kind == RANGE_START) { 1336 return min; 1337 } else if (kind == RANGE_END) { 1338 return max; 1339 } else if (kind == RANGE_DURATION) { 1340 return max - min; 1341 } else { 1342 throw new IllegalArgumentException("kind is not valid: " + kind); 1343 } 1344 } 1345 1346 public static Media read(Context context, int video, int numFrames) 1347 throws java.io.IOException { 1348 MediaExtractor extractor = new MediaExtractor(); 1349 AssetFileDescriptor testFd = context.getResources().openRawResourceFd(video); 1350 extractor.setDataSource(testFd.getFileDescriptor(), testFd.getStartOffset(), 1351 testFd.getLength()); 1352 1353 Media media = new Media( 1354 extractor.getTrackFormat(0), extractor.getTrackFormat(0), numFrames); 1355 extractor.selectTrack(0); 1356 1357 Log.i(TAG, "format=" + media.getFormat()); 1358 ArrayList<ByteBuffer> csds = new ArrayList<ByteBuffer>(); 1359 for (String tag: new String[] { "csd-0", "csd-1" }) { 1360 if (media.getFormat().containsKey(tag)) { 1361 ByteBuffer csd = media.getFormat().getByteBuffer(tag); 1362 Log.i(TAG, tag + "=" + AdaptivePlaybackTest.byteBufferToString(csd, 0, 16)); 1363 csds.add(csd); 1364 } 1365 } 1366 1367 int maxInputSize = 0; 1368 ByteBuffer readBuf = ByteBuffer.allocate(2000000); 1369 for (int ix = 0; ix < numFrames; ix++) { 1370 int sampleSize = extractor.readSampleData(readBuf, 0 /* offset */); 1371 1372 if (sampleSize < 0) { 1373 throw new IllegalArgumentException("media is too short at " + ix + " frames"); 1374 } else { 1375 readBuf.position(0).limit(sampleSize); 1376 for (ByteBuffer csd: csds) { 1377 sampleSize += csd.capacity(); 1378 } 1379 1380 if (maxInputSize < sampleSize) { 1381 maxInputSize = sampleSize; 1382 } 1383 1384 ByteBuffer buf = ByteBuffer.allocate(sampleSize); 1385 for (ByteBuffer csd: csds) { 1386 csd.clear(); 1387 buf.put(csd); 1388 csd.clear(); 1389 Log.i(TAG, "csd[" + csd.capacity() + "]"); 1390 } 1391 Log.i(TAG, "frame-" + ix + "[" + sampleSize + "]"); 1392 csds.clear(); 1393 buf.put(readBuf); 1394 media.mFrames[ix] = new Frame( 1395 extractor.getSampleTime(), 1396 extractor.getSampleFlags(), 1397 buf); 1398 extractor.advance(); 1399 } 1400 } 1401 extractor.release(); 1402 testFd.close(); 1403 1404 /* Override MAX_INPUT_SIZE in format, as CSD is being combined 1405 * with one of the input buffers */ 1406 media.setMaxInputSize(maxInputSize); 1407 return media; 1408 } 1409 } 1410 1411 /* ====================================================================== */ 1412 /* Codec, CodecList and CodecFactory */ 1413 /* ====================================================================== */ 1414 class Codec { 1415 private final static String TAG = "AdaptiveCodec"; 1416 1417 public String name; 1418 public CodecCapabilities capabilities; 1419 public Media[] mediaList; 1420 public boolean adaptive; 1421 public boolean vendor; 1422 public Codec(MediaCodecInfo info, CodecCapabilities c, Media[] m) { 1423 name = info.getName(); 1424 capabilities = c; 1425 List<Media> medias = new ArrayList<Media>(); 1426 1427 if (capabilities == null) { 1428 adaptive = false; 1429 vendor = true; 1430 } else { 1431 Log.w(TAG, "checking capabilities of " + name + " for " + m[0].getMime()); 1432 adaptive = capabilities.isFeatureSupported(CodecCapabilities.FEATURE_AdaptivePlayback); 1433 vendor = info.isVendor(); 1434 for (Media media : m) { 1435 if (media.getHeight() >= 720 && 1436 !capabilities.isFormatSupported(media.getFormat())) { 1437 // skip if 720p and up is unsupported 1438 Log.w(TAG, "codec " + name + " doesn't support " + media.getFormat()); 1439 continue; 1440 } 1441 medias.add(media); 1442 } 1443 } 1444 1445 if (medias.size() < 2) { 1446 Log.e(TAG, "codec " + name + " doesn't support required resolutions"); 1447 } 1448 mediaList = medias.subList(0, 2).toArray(new Media[2]); 1449 } 1450 } 1451 1452 class CodecList extends ArrayList<Codec> { }; 1453 1454 /* all codecs of mime, plus named codec if exists */ 1455 class CodecFamily extends CodecList { 1456 private final static String TAG = "AdaptiveCodecFamily"; 1457 private static final int NUM_FRAMES = AdaptivePlaybackTest.NUM_FRAMES; 1458 1459 public CodecFamily(Context context, String mime, int ... resources) { 1460 try { 1461 /* read all media */ 1462 Media[] mediaList = new Media[resources.length]; 1463 for (int i = 0; i < resources.length; i++) { 1464 Log.v(TAG, "reading media " + resources[i]); 1465 Media media = Media.read(context, resources[i], NUM_FRAMES); 1466 assert media.getMime().equals(mime): 1467 "test stream " + resources[i] + " has " + media.getMime() + 1468 " mime type instead of " + mime; 1469 1470 /* assuming the first timestamp is the smallest */ 1471 long firstPTS = media.getFrame(0).presentationTimeUs; 1472 long smallestPTS = media.getTimestampRangeValue(0, NUM_FRAMES, Media.RANGE_START); 1473 1474 assert firstPTS == smallestPTS: 1475 "first frame timestamp (" + firstPTS + ") is not smallest (" + 1476 smallestPTS + ")"; 1477 1478 mediaList[i] = media; 1479 } 1480 1481 /* enumerate codecs */ 1482 MediaCodecList mcl = new MediaCodecList(MediaCodecList.REGULAR_CODECS); 1483 for (MediaCodecInfo codecInfo : mcl.getCodecInfos()) { 1484 if (codecInfo.isAlias()) { 1485 continue; 1486 } 1487 if (codecInfo.isEncoder()) { 1488 continue; 1489 } 1490 for (String type : codecInfo.getSupportedTypes()) { 1491 if (type.equals(mime)) { 1492 add(new Codec( 1493 codecInfo, 1494 codecInfo.getCapabilitiesForType(mime), 1495 mediaList)); 1496 break; 1497 } 1498 } 1499 } 1500 } catch (Throwable t) { 1501 Log.wtf("Constructor failed", t); 1502 throw new RuntimeException("constructor failed", t); 1503 } 1504 } 1505 } 1506 1507 /* all codecs of mime, except named codec if exists */ 1508 class CodecFamilySpecific extends CodecList { 1509 public CodecFamilySpecific( 1510 Context context, String mime, boolean isGoogle, int ... resources) { 1511 for (Codec c: new CodecFamily(context, mime, resources)) { 1512 if (!c.vendor == isGoogle) { 1513 add(c); 1514 } 1515 } 1516 } 1517 } 1518 1519 class CodecFactory { 1520 public CodecList createCodecList( 1521 Context context, String mime, int ...resources) { 1522 return new CodecFamily(context, mime, resources); 1523 } 1524 } 1525 1526 class SWCodecFactory extends CodecFactory { 1527 public CodecList createCodecList( 1528 Context context, String mime, int ...resources) { 1529 return new CodecFamilySpecific(context, mime, true, resources); 1530 } 1531 } 1532 1533 class HWCodecFactory extends CodecFactory { 1534 public CodecList createCodecList( 1535 Context context, String mime, int ...resources) { 1536 return new CodecFamilySpecific(context, mime, false, resources); 1537 } 1538 } 1539 1540 /* ====================================================================== */ 1541 /* Test Steps, Test (Case)s, and Test List */ 1542 /* ====================================================================== */ 1543 class StepRunner implements Runnable { 1544 public StepRunner(Step s) { 1545 mStep = s; 1546 mThrowed = null; 1547 } 1548 public void run() { 1549 try { 1550 mStep.run(); 1551 } catch (Throwable e) { 1552 mThrowed = e; 1553 } 1554 } 1555 public void throwThrowed() throws Throwable { 1556 if (mThrowed != null) { 1557 throw mThrowed; 1558 } 1559 } 1560 private Throwable mThrowed; 1561 private Step mStep; 1562 } 1563 1564 class TestList extends ArrayList<Step> { 1565 private final static String TAG = "AdaptiveTestList"; 1566 public void run() throws Throwable { 1567 Throwable res = null; 1568 for (Step step: this) { 1569 try { 1570 Log.i(TAG, step.getDescription()); 1571 if (step.stepSurface().needsToRunInSeparateThread()) { 1572 StepRunner runner = new StepRunner(step); 1573 Thread th = new Thread(runner, "stepWrapper"); 1574 th.start(); 1575 th.join(); 1576 runner.throwThrowed(); 1577 } else { 1578 step.run(); 1579 } 1580 } catch (Throwable e) { 1581 Log.e(TAG, "while " + step.getDescription(), e); 1582 res = e; 1583 mFailedSteps++; 1584 } finally { 1585 mWarnings += step.getWarnings(); 1586 } 1587 } 1588 if (res != null) { 1589 throw new RuntimeException( 1590 mFailedSteps + " failed steps, " + mWarnings + " warnings", 1591 res); 1592 } 1593 } 1594 public int getWarnings() { 1595 return mWarnings; 1596 } 1597 public int getFailures() { 1598 return mFailedSteps; 1599 } 1600 private int mFailedSteps; 1601 private int mWarnings; 1602 } 1603 1604 abstract class Test { 1605 public static final int FORMAT_ADAPTIVE_LARGEST = 1; 1606 public static final int FORMAT_ADAPTIVE_FIRST = 2; 1607 public static final int FORMAT_REGULAR = 3; 1608 1609 protected int mFormatType; 1610 protected boolean mUseSurface; 1611 protected boolean mUseSurfaceTexture; 1612 1613 public Test() { 1614 mFormatType = FORMAT_REGULAR; 1615 mUseSurface = true; 1616 mUseSurfaceTexture = false; 1617 } 1618 1619 public Test adaptive() { 1620 mFormatType = FORMAT_ADAPTIVE_LARGEST; 1621 return this; 1622 } 1623 1624 public Test adaptiveSmall() { 1625 mFormatType = FORMAT_ADAPTIVE_FIRST; 1626 return this; 1627 } 1628 1629 public Test byteBuffer() { 1630 mUseSurface = false; 1631 mUseSurfaceTexture = false; 1632 return this; 1633 } 1634 1635 public Test texture() { 1636 mUseSurface = false; 1637 mUseSurfaceTexture = true; 1638 return this; 1639 } 1640 1641 public void checkAdaptiveFormat() { 1642 assert mFormatType != FORMAT_REGULAR: 1643 "must be used with adaptive format"; 1644 } 1645 1646 abstract protected TestSurface getSurface(); 1647 1648 /* TRICKY: format is updated in each test run as we are actually reusing the 1649 same 2 MediaFormat objects returned from MediaExtractor. Therefore, 1650 format must be explicitly obtained in each test step. 1651 1652 returns null if codec does not support the format. 1653 */ 1654 protected MediaFormat getFormat(Codec c) { 1655 return getFormat(c, 0); 1656 } 1657 1658 protected MediaFormat getFormat(Codec c, int i) { 1659 MediaFormat format = null; 1660 if (mFormatType == FORMAT_REGULAR) { 1661 format = c.mediaList[i].getFormat(); 1662 } else if (mFormatType == FORMAT_ADAPTIVE_FIRST && c.adaptive) { 1663 format = c.mediaList[i].getAdaptiveFormat( 1664 c.mediaList[i].getWidth(), c.mediaList[i].getHeight(), c.mediaList[i].getMaxInputSize()); 1665 for (Media media : c.mediaList) { 1666 /* get the largest max input size for all media and use that */ 1667 if (media.getMaxInputSize() > format.getInteger(MediaFormat.KEY_MAX_INPUT_SIZE)) { 1668 format.setInteger(MediaFormat.KEY_MAX_INPUT_SIZE, media.getMaxInputSize()); 1669 } 1670 } 1671 } else if (mFormatType == FORMAT_ADAPTIVE_LARGEST && c.adaptive) { 1672 /* update adaptive format to max size used */ 1673 format = c.mediaList[i].getAdaptiveFormat(0, 0, 0); 1674 for (Media media : c.mediaList) { 1675 /* get the largest width, and the largest height independently */ 1676 if (media.getWidth() > format.getInteger(MediaFormat.KEY_MAX_WIDTH)) { 1677 format.setInteger(MediaFormat.KEY_MAX_WIDTH, media.getWidth()); 1678 } 1679 if (media.getHeight() > format.getInteger(MediaFormat.KEY_MAX_HEIGHT)) { 1680 format.setInteger(MediaFormat.KEY_MAX_HEIGHT, media.getHeight()); 1681 } 1682 if (media.getMaxInputSize() > format.getInteger(MediaFormat.KEY_MAX_INPUT_SIZE)) { 1683 format.setInteger(MediaFormat.KEY_MAX_INPUT_SIZE, media.getMaxInputSize()); 1684 } 1685 } 1686 } 1687 return format; 1688 } 1689 1690 public boolean isValid(Codec c) { return true; } 1691 public abstract void addTests(TestList tests, Codec c); 1692 } 1693 1694 abstract class Step { 1695 private static final String TAG = "AdaptiveStep"; 1696 1697 public Step(String title, Test instance, Codec codec, Media media) { 1698 mTest = instance; 1699 mCodec = codec; 1700 mMedia = media; 1701 mDescription = title + " on " + stepSurface().getSurface() + " using " + 1702 mCodec.name + " and " + stepFormat(); 1703 } 1704 public Step(String title, Test instance, Codec codec, int mediaIx) { 1705 this(title, instance, codec, codec.mediaList[mediaIx]); 1706 } 1707 public Step(String title, Test instance, Codec codec) { 1708 this(title, instance, codec, 0); 1709 } 1710 public Step(String description) { 1711 mDescription = description; 1712 } 1713 public Step() { } 1714 1715 public abstract void run() throws Throwable; 1716 1717 private String mDescription; 1718 private Test mTest; 1719 private Codec mCodec; 1720 private Media mMedia; 1721 private int mWarnings; 1722 1723 /* TRICKY: use non-standard getter names so that we don't conflict with the getters 1724 in the Test classes, as most test Steps are defined as anonymous classes inside 1725 the test classes. */ 1726 public MediaFormat stepFormat() { 1727 int ix = Arrays.asList(mCodec.mediaList).indexOf(mMedia); 1728 return mTest.getFormat(mCodec, ix); 1729 } 1730 1731 public TestSurface stepSurface() { 1732 return mTest.getSurface(); 1733 } 1734 1735 public Media stepMedia() { return mMedia; } 1736 1737 public String getDescription() { return mDescription; } 1738 public int getWarnings() { return mWarnings; } 1739 1740 public void warn(String message) { 1741 Log.e(TAG, "WARNING: " + message + " in " + getDescription()); 1742 mWarnings++; 1743 } 1744 public void warn(String message, Throwable t) { 1745 Log.e(TAG, "WARNING: " + message + " in " + getDescription(), t); 1746 mWarnings++; 1747 } 1748 public void warn(Iterable<String> warnings) { 1749 for (String warning: warnings) { 1750 warn(warning); 1751 } 1752 } 1753 } 1754 1755 interface TestSurface { 1756 public Surface getSurface(); 1757 public long checksum(); 1758 public void release(); 1759 public void prepare(); // prepare surface prior to render 1760 public void waitForDraw(); // wait for rendering to take place 1761 public boolean needsToRunInSeparateThread(); 1762 } 1763 1764 class DecoderSurface extends OutputSurface implements TestSurface { 1765 private ByteBuffer mBuf; 1766 int mWidth; 1767 int mHeight; 1768 CRC32 mCRC; 1769 1770 public DecoderSurface(int width, int height, CRC32 crc) { 1771 super(width, height); 1772 mWidth = width; 1773 mHeight = height; 1774 mCRC = crc; 1775 mBuf = ByteBuffer.allocateDirect(4 * width * height); 1776 } 1777 1778 public void prepare() { 1779 makeCurrent(); 1780 } 1781 1782 public void waitForDraw() { 1783 awaitNewImage(); 1784 drawImage(); 1785 } 1786 1787 public long checksum() { 1788 mBuf.position(0); 1789 GLES20.glReadPixels(0, 0, mWidth, mHeight, GL10.GL_RGBA, GL10.GL_UNSIGNED_BYTE, mBuf); 1790 mBuf.position(0); 1791 return AdaptivePlaybackTest.checksum(mBuf, mBuf.capacity(), mCRC); 1792 } 1793 1794 public void release() { 1795 super.release(); 1796 mBuf = null; 1797 } 1798 1799 public boolean needsToRunInSeparateThread() { 1800 return true; 1801 } 1802 } 1803 1804 class ActivitySurface implements TestSurface { 1805 private Surface mSurface; 1806 public ActivitySurface(Surface s) { 1807 mSurface = s; 1808 } 1809 public Surface getSurface() { 1810 return mSurface; 1811 } 1812 public void prepare() { } 1813 public void waitForDraw() { } 1814 public long checksum() { 1815 return 0; 1816 } 1817 public void release() { 1818 // don't release activity surface, as it is reusable 1819 } 1820 public boolean needsToRunInSeparateThread() { 1821 return false; 1822 } 1823 } 1824