1 /* 2 * Copyright (C) 2013 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17 package android.media.cts; 18 19 20 import android.app.Presentation; 21 import android.content.ComponentName; 22 import android.content.Context; 23 import android.content.Intent; 24 import android.content.ServiceConnection; 25 import android.graphics.SurfaceTexture; 26 import android.graphics.drawable.ColorDrawable; 27 import android.hardware.display.DisplayManager; 28 import android.hardware.display.VirtualDisplay; 29 import android.media.MediaCodec; 30 import android.media.MediaCodec.BufferInfo; 31 import android.media.MediaCodecInfo; 32 import android.media.MediaCodecList; 33 import android.media.MediaFormat; 34 import android.media.cts.R; 35 import android.opengl.GLES11Ext; 36 import android.opengl.GLES20; 37 import android.opengl.Matrix; 38 import android.os.Bundle; 39 import android.os.Handler; 40 import android.os.IBinder; 41 import android.os.Looper; 42 import android.os.Message; 43 import android.os.Parcel; 44 import android.platform.test.annotations.RequiresDevice; 45 import android.test.AndroidTestCase; 46 import android.util.Log; 47 import android.util.Size; 48 import android.view.Display; 49 import android.view.Surface; 50 import android.view.View; 51 import android.view.ViewGroup; 52 import android.view.ViewGroup.LayoutParams; 53 import android.view.WindowManager; 54 import android.widget.FrameLayout; 55 import android.widget.ImageView; 56 import android.widget.TableLayout; 57 import android.widget.TableRow; 58 59 import androidx.test.filters.SmallTest; 60 61 import java.nio.ByteBuffer; 62 import java.nio.ByteOrder; 63 import java.nio.FloatBuffer; 64 import java.nio.IntBuffer; 65 import java.util.ArrayList; 66 import java.util.Arrays; 67 import java.util.List; 68 import java.util.concurrent.Semaphore; 69 import java.util.concurrent.TimeUnit; 70 import java.util.concurrent.atomic.AtomicInteger; 71 72 /** 73 * Tests to check if MediaCodec encoding works with composition of multiple virtual displays 74 * The test also tries to destroy and create virtual displays repeatedly to 75 * detect any issues. The test itself does not check the output as it is already done in other 76 * tests. 77 */ 78 @SmallTest 79 @RequiresDevice 80 public class EncodeVirtualDisplayWithCompositionTest extends AndroidTestCase { 81 private static final String TAG = "EncodeVirtualDisplayWithCompositionTest"; 82 private static final boolean DBG = true; 83 private static final String MIME_TYPE = MediaFormat.MIMETYPE_VIDEO_AVC; 84 85 private static final long DEFAULT_WAIT_TIMEOUT_MS = 3000; 86 private static final long DEFAULT_WAIT_TIMEOUT_US = 3000000; 87 88 private static final int COLOR_RED = makeColor(100, 0, 0); 89 private static final int COLOR_GREEN = makeColor(0, 100, 0); 90 private static final int COLOR_BLUE = makeColor(0, 0, 100); 91 private static final int COLOR_GREY = makeColor(100, 100, 100); 92 93 private static final int BITRATE_1080p = 20000000; 94 private static final int BITRATE_720p = 14000000; 95 private static final int BITRATE_800x480 = 14000000; 96 private static final int BITRATE_DEFAULT = 10000000; 97 98 private static final int IFRAME_INTERVAL = 10; 99 100 private static final int MAX_NUM_WINDOWS = 3; 101 102 private static Handler sHandlerForRunOnMain = new Handler(Looper.getMainLooper()); 103 104 private Surface mEncodingSurface; 105 private OutputSurface mDecodingSurface; 106 private volatile boolean mCodecConfigReceived = false; 107 private volatile boolean mCodecBufferReceived = false; 108 private EncodingHelper mEncodingHelper; 109 private MediaCodec mDecoder; 110 private final ByteBuffer mPixelBuf = ByteBuffer.allocateDirect(4); 111 private volatile boolean mIsQuitting = false; 112 private Throwable mTestException; 113 private VirtualDisplayPresentation mLocalPresentation; 114 private RemoteVirtualDisplayPresentation mRemotePresentation; 115 private ByteBuffer[] mDecoderInputBuffers; 116 117 /** event listener for test without verifying output */ 118 private EncoderEventListener mEncoderEventListener = new EncoderEventListener() { 119 @Override 120 public void onCodecConfig(ByteBuffer data, MediaCodec.BufferInfo info) { 121 mCodecConfigReceived = true; 122 } 123 @Override 124 public void onBufferReady(ByteBuffer data, MediaCodec.BufferInfo info) { 125 mCodecBufferReceived = true; 126 } 127 @Override 128 public void onError(String errorMessage) { 129 fail(errorMessage); 130 } 131 }; 132 133 /* TEST_COLORS static initialization; need ARGB for ColorDrawable */ makeColor(int red, int green, int blue)134 private static int makeColor(int red, int green, int blue) { 135 return 0xff << 24 | (red & 0xff) << 16 | (green & 0xff) << 8 | (blue & 0xff); 136 } 137 testVirtualDisplayRecycles()138 public void testVirtualDisplayRecycles() throws Exception { 139 doTestVirtualDisplayRecycles(3); 140 } 141 testRendering800x480Locally()142 public void testRendering800x480Locally() throws Throwable { 143 Log.i(TAG, "testRendering800x480Locally"); 144 if (isConcurrentEncodingDecodingSupported(800, 480, BITRATE_800x480)) { 145 runTestRenderingInSeparateThread(800, 480, false, false); 146 } else { 147 Log.i(TAG, "SKIPPING testRendering800x480Locally(): codec not supported"); 148 } 149 } 150 testRendering800x480Rotated90()151 public void testRendering800x480Rotated90() throws Throwable { 152 testRendering800x480Rotated(90); 153 } 154 testRendering800x480Rotated180()155 public void testRendering800x480Rotated180() throws Throwable { 156 testRendering800x480Rotated(180); 157 } 158 testRendering800x480Rotated270()159 public void testRendering800x480Rotated270() throws Throwable { 160 testRendering800x480Rotated(270); 161 } 162 testRendering800x480Rotated360()163 public void testRendering800x480Rotated360() throws Throwable { 164 testRendering800x480Rotated(360); 165 } 166 testRendering800x480Rotated(int degrees)167 private void testRendering800x480Rotated(int degrees) throws Throwable { 168 Log.i(TAG, "testRendering800x480Rotated " + degrees); 169 if (isConcurrentEncodingDecodingSupported(800, 480, BITRATE_800x480)) { 170 runTestRenderingInSeparateThread(800, 480, false, false, degrees); 171 } else { 172 Log.i(TAG, "SKIPPING testRendering800x480Rotated" + degrees + ":codec not supported"); 173 } 174 } 175 testRenderingMaxResolutionLocally()176 public void testRenderingMaxResolutionLocally() throws Throwable { 177 Log.i(TAG, "testRenderingMaxResolutionLocally"); 178 Size maxRes = checkMaxConcurrentEncodingDecodingResolution(); 179 if (maxRes == null) { 180 Log.i(TAG, "SKIPPING testRenderingMaxResolutionLocally(): codec not supported"); 181 } else { 182 Log.w(TAG, "Trying resolution " + maxRes); 183 runTestRenderingInSeparateThread(maxRes.getWidth(), maxRes.getHeight(), false, false); 184 } 185 } 186 testRendering800x480Remotely()187 public void testRendering800x480Remotely() throws Throwable { 188 Log.i(TAG, "testRendering800x480Remotely"); 189 if (isConcurrentEncodingDecodingSupported(800, 480, BITRATE_800x480)) { 190 runTestRenderingInSeparateThread(800, 480, true, false); 191 } else { 192 Log.i(TAG, "SKIPPING testRendering800x480Remotely(): codec not supported"); 193 } 194 } 195 testRenderingMaxResolutionRemotely()196 public void testRenderingMaxResolutionRemotely() throws Throwable { 197 Log.i(TAG, "testRenderingMaxResolutionRemotely"); 198 Size maxRes = checkMaxConcurrentEncodingDecodingResolution(); 199 if (maxRes == null) { 200 Log.i(TAG, "SKIPPING testRenderingMaxResolutionRemotely(): codec not supported"); 201 } else { 202 Log.w(TAG, "Trying resolution " + maxRes); 203 runTestRenderingInSeparateThread(maxRes.getWidth(), maxRes.getHeight(), true, false); 204 } 205 } 206 testRendering800x480RemotelyWith3Windows()207 public void testRendering800x480RemotelyWith3Windows() throws Throwable { 208 Log.i(TAG, "testRendering800x480RemotelyWith3Windows"); 209 if (isConcurrentEncodingDecodingSupported(800, 480, BITRATE_800x480)) { 210 runTestRenderingInSeparateThread(800, 480, true, true); 211 } else { 212 Log.i(TAG, "SKIPPING testRendering800x480RemotelyWith3Windows(): codec not supported"); 213 } 214 } 215 testRendering800x480LocallyWith3Windows()216 public void testRendering800x480LocallyWith3Windows() throws Throwable { 217 Log.i(TAG, "testRendering800x480LocallyWith3Windows"); 218 if (isConcurrentEncodingDecodingSupported(800, 480, BITRATE_800x480)) { 219 runTestRenderingInSeparateThread(800, 480, false, true); 220 } else { 221 Log.i(TAG, "SKIPPING testRendering800x480LocallyWith3Windows(): codec not supported"); 222 } 223 } 224 225 /** 226 * Run rendering test in a separate thread. This is necessary as {@link OutputSurface} requires 227 * constructing it in a non-test thread. 228 * @param w 229 * @param h 230 * @throws Exception 231 */ runTestRenderingInSeparateThread(final int w, final int h, final boolean runRemotely, final boolean multipleWindows)232 private void runTestRenderingInSeparateThread(final int w, final int h, 233 final boolean runRemotely, final boolean multipleWindows) throws Throwable { 234 runTestRenderingInSeparateThread(w, h, runRemotely, multipleWindows, /* degrees */ 0); 235 } 236 runTestRenderingInSeparateThread(final int w, final int h, final boolean runRemotely, final boolean multipleWindows, final int degrees)237 private void runTestRenderingInSeparateThread(final int w, final int h, 238 final boolean runRemotely, final boolean multipleWindows, final int degrees) 239 throws Throwable { 240 mTestException = null; 241 Thread renderingThread = new Thread(new Runnable() { 242 public void run() { 243 try { 244 doTestRenderingOutput(w, h, runRemotely, multipleWindows, degrees); 245 } catch (Throwable t) { 246 t.printStackTrace(); 247 mTestException = t; 248 } 249 } 250 }); 251 renderingThread.start(); 252 renderingThread.join(60000); 253 assertTrue(!renderingThread.isAlive()); 254 if (mTestException != null) { 255 throw mTestException; 256 } 257 } 258 doTestRenderingOutput(int w, int h, boolean runRemotely, boolean multipleWindows, int degrees)259 private void doTestRenderingOutput(int w, int h, boolean runRemotely, boolean multipleWindows, 260 int degrees) throws Throwable { 261 if (DBG) { 262 Log.i(TAG, "doTestRenderingOutput for w:" + w + " h:" + h); 263 } 264 try { 265 mIsQuitting = false; 266 mDecoder = MediaCodec.createDecoderByType(MIME_TYPE); 267 MediaFormat decoderFormat = MediaFormat.createVideoFormat(MIME_TYPE, w, h); 268 if (degrees != 0) { 269 decoderFormat.setInteger(MediaFormat.KEY_ROTATION, degrees); 270 } 271 mDecodingSurface = new OutputSurface(w, h); 272 mDecoder.configure(decoderFormat, mDecodingSurface.getSurface(), null, 0); 273 // only scale to fit scaling mode is supported 274 mDecoder.setVideoScalingMode(MediaCodec.VIDEO_SCALING_MODE_SCALE_TO_FIT); 275 mDecoder.start(); 276 mDecoderInputBuffers = mDecoder.getInputBuffers(); 277 278 mEncodingHelper = new EncodingHelper(); 279 mEncodingSurface = mEncodingHelper.startEncoding(w, h, 280 new EncoderEventListener() { 281 @Override 282 public void onCodecConfig(ByteBuffer data, BufferInfo info) { 283 if (DBG) { 284 Log.i(TAG, "onCodecConfig l:" + info.size); 285 } 286 handleEncodedData(data, info); 287 } 288 289 @Override 290 public void onBufferReady(ByteBuffer data, BufferInfo info) { 291 if (DBG) { 292 Log.i(TAG, "onBufferReady l:" + info.size); 293 } 294 handleEncodedData(data, info); 295 } 296 297 @Override 298 public void onError(String errorMessage) { 299 fail(errorMessage); 300 } 301 302 private void handleEncodedData(ByteBuffer data, BufferInfo info) { 303 if (mIsQuitting) { 304 if (DBG) { 305 Log.i(TAG, "ignore data as test is quitting"); 306 } 307 return; 308 } 309 int inputBufferIndex = mDecoder.dequeueInputBuffer(DEFAULT_WAIT_TIMEOUT_US); 310 if (inputBufferIndex < 0) { 311 if (DBG) { 312 Log.i(TAG, "dequeueInputBuffer returned:" + inputBufferIndex); 313 } 314 return; 315 } 316 assertTrue(inputBufferIndex >= 0); 317 ByteBuffer inputBuffer = mDecoderInputBuffers[inputBufferIndex]; 318 inputBuffer.clear(); 319 inputBuffer.put(data); 320 mDecoder.queueInputBuffer(inputBufferIndex, 0, info.size, 321 info.presentationTimeUs, info.flags); 322 } 323 }); 324 GlCompositor compositor = new GlCompositor(); 325 if (DBG) { 326 Log.i(TAG, "start composition"); 327 } 328 compositor.startComposition(mEncodingSurface, w, h, multipleWindows ? 3 : 1); 329 330 if (DBG) { 331 Log.i(TAG, "create display"); 332 } 333 334 Renderer renderer = null; 335 Context context = getContext(); 336 Surface windowSurface = compositor.getWindowSurface(multipleWindows? 1 : 0); 337 if (runRemotely) { 338 mRemotePresentation = 339 new RemoteVirtualDisplayPresentation(context, windowSurface, w, h); 340 mRemotePresentation.connect(); 341 mRemotePresentation.start(); 342 renderer = mRemotePresentation; 343 } else { 344 mLocalPresentation = (degrees == 0) 345 ? new VirtualDisplayPresentation(context, windowSurface, w, h) 346 : new RotateVirtualDisplayPresentation(context, windowSurface, w, h); 347 mLocalPresentation.createVirtualDisplay(); 348 mLocalPresentation.createPresentation(); 349 renderer = mLocalPresentation; 350 } 351 352 if (DBG) { 353 Log.i(TAG, "start rendering and check"); 354 } 355 if (degrees == 0) { 356 renderColorAndCheckResult(renderer, w, h, COLOR_RED); 357 renderColorAndCheckResult(renderer, w, h, COLOR_BLUE); 358 renderColorAndCheckResult(renderer, w, h, COLOR_GREEN); 359 renderColorAndCheckResult(renderer, w, h, COLOR_GREY); 360 } else { 361 renderRotationAndCheckResult(renderer, w, h, degrees); 362 } 363 364 mIsQuitting = true; 365 if (runRemotely) { 366 mRemotePresentation.disconnect(); 367 } else { 368 mLocalPresentation.dismissPresentation(); 369 mLocalPresentation.destroyVirtualDisplay(); 370 } 371 372 compositor.stopComposition(); 373 } finally { 374 if (mEncodingHelper != null) { 375 mEncodingHelper.stopEncoding(); 376 mEncodingHelper = null; 377 } 378 if (mDecoder != null) { 379 mDecoder.stop(); 380 mDecoder.release(); 381 mDecoder = null; 382 } 383 if (mDecodingSurface != null) { 384 mDecodingSurface.release(); 385 mDecodingSurface = null; 386 } 387 } 388 } 389 390 private static final int NUM_MAX_RETRY = 120; 391 private static final int IMAGE_WAIT_TIMEOUT_MS = 1000; 392 renderColorAndCheckResult(Renderer renderer, int w, int h, int color)393 private void renderColorAndCheckResult(Renderer renderer, int w, int h, 394 int color) throws Exception { 395 BufferInfo info = new BufferInfo(); 396 for (int i = 0; i < NUM_MAX_RETRY; i++) { 397 renderer.doRendering(color); 398 int bufferIndex = mDecoder.dequeueOutputBuffer(info, DEFAULT_WAIT_TIMEOUT_US); 399 if (DBG) { 400 Log.i(TAG, "decoder dequeueOutputBuffer returned " + bufferIndex); 401 } 402 if (bufferIndex < 0) { 403 continue; 404 } 405 mDecoder.releaseOutputBuffer(bufferIndex, true); 406 if (mDecodingSurface.checkForNewImage(IMAGE_WAIT_TIMEOUT_MS)) { 407 mDecodingSurface.drawImage(); 408 if (checkSurfaceFrameColor(w, h, color)) { 409 Log.i(TAG, "color " + Integer.toHexString(color) + " matched"); 410 return; 411 } 412 } else if(DBG) { 413 Log.i(TAG, "no rendering yet"); 414 } 415 } 416 fail("Color did not match"); 417 } 418 renderRotationAndCheckResult(Renderer renderer, int w, int h, int degrees)419 private void renderRotationAndCheckResult(Renderer renderer, int w, int h, 420 int degrees) throws Exception { 421 BufferInfo info = new BufferInfo(); 422 for (int i = 0; i < NUM_MAX_RETRY; i++) { 423 renderer.doRendering(-1); 424 int bufferIndex = mDecoder.dequeueOutputBuffer(info, DEFAULT_WAIT_TIMEOUT_US); 425 if (DBG) { 426 Log.i(TAG, "decoder dequeueOutputBuffer returned " + bufferIndex); 427 } 428 if (bufferIndex < 0) { 429 continue; 430 } 431 mDecoder.releaseOutputBuffer(bufferIndex, true); 432 if (mDecodingSurface.checkForNewImage(IMAGE_WAIT_TIMEOUT_MS)) { 433 mDecodingSurface.drawImage(); 434 if (checkRotatedFrameQuadrants(w, h, degrees)) { 435 Log.i(TAG, "output rotated " + degrees + " degrees"); 436 return; 437 } 438 } else if(DBG) { 439 Log.i(TAG, "no rendering yet"); 440 } 441 } 442 fail("Frame not properly rotated"); 443 } 444 checkRotatedFrameQuadrants(int w, int h, int degrees)445 private boolean checkRotatedFrameQuadrants(int w, int h, int degrees) { 446 // Read a pixel from each quadrant of the surface. 447 int ww = w / 4; 448 int hh = h / 4; 449 // coords is ordered counter clockwise (note, gl 0,0 is bottom left) 450 int[][] coords = new int[][] {{ww, hh}, {ww * 3, hh}, {ww * 3, hh * 3}, {ww, hh * 3}}; 451 List<Integer> expected = new ArrayList<>(); 452 List<Integer> colors = Arrays.asList( 453 new Integer[] {COLOR_GREEN, COLOR_BLUE, COLOR_RED, COLOR_GREY}); 454 expected.addAll(colors); 455 expected.addAll(colors); 456 int offset = (degrees / 90) % 4; 457 for (int i = 0; i < coords.length; i++) { 458 int[] c = coords[i]; 459 int x = c[0]; 460 int y = c[1]; 461 GLES20.glReadPixels(x, y, 1, 1, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, mPixelBuf); 462 int r = mPixelBuf.get(0) & 0xff; 463 int g = mPixelBuf.get(1) & 0xff; 464 int b = mPixelBuf.get(2) & 0xff; 465 // adding the offset to rotate expected colors clockwise 466 int color = expected.get(offset + i); 467 int redExpected = (color >> 16) & 0xff; 468 int greenExpected = (color >> 8) & 0xff; 469 int blueExpected = color & 0xff; 470 Log.i(TAG, String.format("(%d,%d) expecting %d,%d,%d saw %d,%d,%d", 471 x, y, redExpected, greenExpected, blueExpected, r, g, b)); 472 if (!approxEquals(redExpected, r) || !approxEquals(greenExpected, g) 473 || !approxEquals(blueExpected, b)) { 474 return false; 475 } 476 } 477 return true; 478 } 479 checkSurfaceFrameColor(int w, int h, int color)480 private boolean checkSurfaceFrameColor(int w, int h, int color) { 481 // Read a pixel from the center of the surface. Might want to read from multiple points 482 // and average them together. 483 int x = w / 2; 484 int y = h / 2; 485 GLES20.glReadPixels(x, y, 1, 1, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, mPixelBuf); 486 int r = mPixelBuf.get(0) & 0xff; 487 int g = mPixelBuf.get(1) & 0xff; 488 int b = mPixelBuf.get(2) & 0xff; 489 490 int redExpected = (color >> 16) & 0xff; 491 int greenExpected = (color >> 8) & 0xff; 492 int blueExpected = color & 0xff; 493 if (approxEquals(redExpected, r) && approxEquals(greenExpected, g) 494 && approxEquals(blueExpected, b)) { 495 return true; 496 } 497 Log.i(TAG, "expected 0x" + Integer.toHexString(color) + " got 0x" 498 + Integer.toHexString(makeColor(r, g, b))); 499 return false; 500 } 501 502 /** 503 * Determines if two color values are approximately equal. 504 */ approxEquals(int expected, int actual)505 private static boolean approxEquals(int expected, int actual) { 506 final int MAX_DELTA = 4; 507 return Math.abs(expected - actual) <= MAX_DELTA; 508 } 509 510 private static final int NUM_CODEC_CREATION = 5; 511 private static final int NUM_DISPLAY_CREATION = 10; 512 private static final int NUM_RENDERING = 10; doTestVirtualDisplayRecycles(int numDisplays)513 private void doTestVirtualDisplayRecycles(int numDisplays) throws Exception { 514 Size maxSize = getMaxSupportedEncoderSize(); 515 if (maxSize == null) { 516 Log.i(TAG, "no codec found, skipping"); 517 return; 518 } 519 VirtualDisplayPresentation[] virtualDisplays = new VirtualDisplayPresentation[numDisplays]; 520 for (int i = 0; i < NUM_CODEC_CREATION; i++) { 521 mCodecConfigReceived = false; 522 mCodecBufferReceived = false; 523 if (DBG) { 524 Log.i(TAG, "start encoding"); 525 } 526 EncodingHelper encodingHelper = new EncodingHelper(); 527 try { 528 mEncodingSurface = encodingHelper.startEncoding( 529 maxSize.getWidth(), maxSize.getHeight(), mEncoderEventListener); 530 GlCompositor compositor = new GlCompositor(); 531 if (DBG) { 532 Log.i(TAG, "start composition"); 533 } 534 compositor.startComposition(mEncodingSurface, 535 maxSize.getWidth(), maxSize.getHeight(), numDisplays); 536 for (int j = 0; j < NUM_DISPLAY_CREATION; j++) { 537 if (DBG) { 538 Log.i(TAG, "create display"); 539 } 540 for (int k = 0; k < numDisplays; k++) { 541 virtualDisplays[k] = 542 new VirtualDisplayPresentation(getContext(), 543 compositor.getWindowSurface(k), 544 maxSize.getWidth()/numDisplays, maxSize.getHeight()); 545 virtualDisplays[k].createVirtualDisplay(); 546 virtualDisplays[k].createPresentation(); 547 } 548 if (DBG) { 549 Log.i(TAG, "start rendering"); 550 } 551 for (int k = 0; k < NUM_RENDERING; k++) { 552 for (int l = 0; l < numDisplays; l++) { 553 virtualDisplays[l].doRendering(COLOR_RED); 554 } 555 // do not care how many frames are actually rendered. 556 Thread.sleep(1); 557 } 558 for (int k = 0; k < numDisplays; k++) { 559 virtualDisplays[k].dismissPresentation(); 560 virtualDisplays[k].destroyVirtualDisplay(); 561 } 562 compositor.recreateWindows(); 563 } 564 if (DBG) { 565 Log.i(TAG, "stop composition"); 566 } 567 compositor.stopComposition(); 568 } finally { 569 if (DBG) { 570 Log.i(TAG, "stop encoding"); 571 } 572 encodingHelper.stopEncoding(); 573 assertTrue(mCodecConfigReceived); 574 assertTrue(mCodecBufferReceived); 575 } 576 } 577 } 578 579 interface EncoderEventListener { onCodecConfig(ByteBuffer data, MediaCodec.BufferInfo info)580 public void onCodecConfig(ByteBuffer data, MediaCodec.BufferInfo info); onBufferReady(ByteBuffer data, MediaCodec.BufferInfo info)581 public void onBufferReady(ByteBuffer data, MediaCodec.BufferInfo info); onError(String errorMessage)582 public void onError(String errorMessage); 583 } 584 585 private class EncodingHelper { 586 private MediaCodec mEncoder; 587 private volatile boolean mStopEncoding = false; 588 private EncoderEventListener mEventListener; 589 private int mW; 590 private int mH; 591 private Thread mEncodingThread; 592 private Surface mEncodingSurface; 593 private Semaphore mInitCompleted = new Semaphore(0); 594 startEncoding(int w, int h, EncoderEventListener eventListener)595 Surface startEncoding(int w, int h, EncoderEventListener eventListener) { 596 mStopEncoding = false; 597 mW = w; 598 mH = h; 599 mEventListener = eventListener; 600 mEncodingThread = new Thread(new Runnable() { 601 @Override 602 public void run() { 603 try { 604 doEncoding(); 605 } catch (Exception e) { 606 e.printStackTrace(); 607 mEventListener.onError(e.toString()); 608 } 609 } 610 }); 611 mEncodingThread.start(); 612 try { 613 if (DBG) { 614 Log.i(TAG, "wait for encoder init"); 615 } 616 mInitCompleted.acquire(); 617 if (DBG) { 618 Log.i(TAG, "wait for encoder done"); 619 } 620 } catch (InterruptedException e) { 621 fail("should not happen"); 622 } 623 return mEncodingSurface; 624 } 625 stopEncoding()626 void stopEncoding() { 627 try { 628 mStopEncoding = true; 629 mEncodingThread.join(); 630 } catch(InterruptedException e) { 631 // just ignore 632 } finally { 633 mEncodingThread = null; 634 } 635 } 636 doEncoding()637 private void doEncoding() throws Exception { 638 final int TIMEOUT_USEC_NORMAL = 1000000; 639 MediaFormat format = MediaFormat.createVideoFormat(MIME_TYPE, mW, mH); 640 format.setInteger(MediaFormat.KEY_COLOR_FORMAT, 641 MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface); 642 int bitRate = BITRATE_DEFAULT; 643 if (mW == 1920 && mH == 1080) { 644 bitRate = BITRATE_1080p; 645 } else if (mW == 1280 && mH == 720) { 646 bitRate = BITRATE_720p; 647 } else if (mW == 800 && mH == 480) { 648 bitRate = BITRATE_800x480; 649 } 650 format.setInteger(MediaFormat.KEY_BIT_RATE, bitRate); 651 format.setInteger(MediaFormat.KEY_FRAME_RATE, 30); 652 format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, IFRAME_INTERVAL); 653 654 MediaCodecList mcl = new MediaCodecList(MediaCodecList.REGULAR_CODECS); 655 String codecName = null; 656 if ((codecName = mcl.findEncoderForFormat(format)) == null) { 657 throw new RuntimeException("encoder "+ MIME_TYPE + " not support : " + format.toString()); 658 } 659 660 try { 661 mEncoder = MediaCodec.createByCodecName(codecName); 662 mEncoder.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE); 663 mEncodingSurface = mEncoder.createInputSurface(); 664 mEncoder.start(); 665 mInitCompleted.release(); 666 if (DBG) { 667 Log.i(TAG, "starting encoder"); 668 } 669 ByteBuffer[] encoderOutputBuffers = mEncoder.getOutputBuffers(); 670 MediaCodec.BufferInfo info = new MediaCodec.BufferInfo(); 671 while (!mStopEncoding) { 672 int index = mEncoder.dequeueOutputBuffer(info, TIMEOUT_USEC_NORMAL); 673 if (DBG) { 674 Log.i(TAG, "encoder dequeOutputBuffer returned " + index); 675 } 676 if (index >= 0) { 677 if ((info.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) { 678 Log.i(TAG, "codec config data"); 679 ByteBuffer encodedData = encoderOutputBuffers[index]; 680 encodedData.position(info.offset); 681 encodedData.limit(info.offset + info.size); 682 mEventListener.onCodecConfig(encodedData, info); 683 mEncoder.releaseOutputBuffer(index, false); 684 } else if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) { 685 Log.i(TAG, "EOS, stopping encoding"); 686 break; 687 } else { 688 ByteBuffer encodedData = encoderOutputBuffers[index]; 689 encodedData.position(info.offset); 690 encodedData.limit(info.offset + info.size); 691 mEventListener.onBufferReady(encodedData, info); 692 mEncoder.releaseOutputBuffer(index, false); 693 } 694 } else if (index == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED){ 695 Log.i(TAG, "output buffer changed"); 696 encoderOutputBuffers = mEncoder.getOutputBuffers(); 697 } 698 } 699 } catch (Exception e) { 700 e.printStackTrace(); 701 throw e; 702 } finally { 703 if (mEncoder != null) { 704 mEncoder.stop(); 705 mEncoder.release(); 706 mEncoder = null; 707 } 708 if (mEncodingSurface != null) { 709 mEncodingSurface.release(); 710 mEncodingSurface = null; 711 } 712 } 713 } 714 } 715 716 /** 717 * Handles composition of multiple SurfaceTexture into a single Surface 718 */ 719 private class GlCompositor implements SurfaceTexture.OnFrameAvailableListener { 720 private Surface mSurface; 721 private int mWidth; 722 private int mHeight; 723 private volatile int mNumWindows; 724 private GlWindow mTopWindow; 725 private Thread mCompositionThread; 726 private Semaphore mStartCompletionSemaphore; 727 private Semaphore mRecreationCompletionSemaphore; 728 private Looper mLooper; 729 private Handler mHandler; 730 private InputSurface mEglHelper; 731 private int mGlProgramId = 0; 732 private int mGluMVPMatrixHandle; 733 private int mGluSTMatrixHandle; 734 private int mGlaPositionHandle; 735 private int mGlaTextureHandle; 736 private float[] mMVPMatrix = new float[16]; 737 private TopWindowVirtualDisplayPresentation mTopPresentation; 738 739 private static final String VERTEX_SHADER = 740 "uniform mat4 uMVPMatrix;\n" + 741 "uniform mat4 uSTMatrix;\n" + 742 "attribute vec4 aPosition;\n" + 743 "attribute vec4 aTextureCoord;\n" + 744 "varying vec2 vTextureCoord;\n" + 745 "void main() {\n" + 746 " gl_Position = uMVPMatrix * aPosition;\n" + 747 " vTextureCoord = (uSTMatrix * aTextureCoord).xy;\n" + 748 "}\n"; 749 750 private static final String FRAGMENT_SHADER = 751 "#extension GL_OES_EGL_image_external : require\n" + 752 "precision mediump float;\n" + 753 "varying vec2 vTextureCoord;\n" + 754 "uniform samplerExternalOES sTexture;\n" + 755 "void main() {\n" + 756 " gl_FragColor = texture2D(sTexture, vTextureCoord);\n" + 757 "}\n"; 758 startComposition(Surface surface, int w, int h, int numWindows)759 void startComposition(Surface surface, int w, int h, int numWindows) throws Exception { 760 mSurface = surface; 761 mWidth = w; 762 mHeight = h; 763 mNumWindows = numWindows; 764 mCompositionThread = new Thread(new CompositionRunnable()); 765 mStartCompletionSemaphore = new Semaphore(0); 766 mCompositionThread.start(); 767 waitForStartCompletion(); 768 } 769 stopComposition()770 void stopComposition() { 771 try { 772 if (mLooper != null) { 773 mLooper.quit(); 774 mCompositionThread.join(); 775 } 776 } catch (InterruptedException e) { 777 // don't care 778 } 779 mCompositionThread = null; 780 mSurface = null; 781 mStartCompletionSemaphore = null; 782 } 783 getWindowSurface(int windowIndex)784 Surface getWindowSurface(int windowIndex) { 785 return mTopPresentation.getSurface(windowIndex); 786 } 787 recreateWindows()788 void recreateWindows() throws Exception { 789 mRecreationCompletionSemaphore = new Semaphore(0); 790 Message msg = mHandler.obtainMessage(CompositionHandler.DO_RECREATE_WINDOWS); 791 mHandler.sendMessage(msg); 792 if(!mRecreationCompletionSemaphore.tryAcquire(DEFAULT_WAIT_TIMEOUT_MS, 793 TimeUnit.MILLISECONDS)) { 794 fail("recreation timeout"); 795 } 796 mTopPresentation.waitForSurfaceReady(DEFAULT_WAIT_TIMEOUT_MS); 797 } 798 799 @Override onFrameAvailable(SurfaceTexture surface)800 public void onFrameAvailable(SurfaceTexture surface) { 801 if (DBG) { 802 Log.i(TAG, "onFrameAvailable " + surface); 803 } 804 GlWindow w = mTopWindow; 805 if (w != null) { 806 w.markTextureUpdated(); 807 requestUpdate(); 808 } else { 809 Log.w(TAG, "top window gone"); 810 } 811 } 812 requestUpdate()813 private void requestUpdate() { 814 Thread compositionThread = mCompositionThread; 815 if (compositionThread == null || !compositionThread.isAlive()) { 816 return; 817 } 818 Message msg = mHandler.obtainMessage(CompositionHandler.DO_RENDERING); 819 mHandler.sendMessage(msg); 820 } 821 loadShader(int shaderType, String source)822 private int loadShader(int shaderType, String source) throws GlException { 823 int shader = GLES20.glCreateShader(shaderType); 824 checkGlError("glCreateShader type=" + shaderType); 825 GLES20.glShaderSource(shader, source); 826 GLES20.glCompileShader(shader); 827 int[] compiled = new int[1]; 828 GLES20.glGetShaderiv(shader, GLES20.GL_COMPILE_STATUS, compiled, 0); 829 if (compiled[0] == 0) { 830 Log.e(TAG, "Could not compile shader " + shaderType + ":"); 831 Log.e(TAG, " " + GLES20.glGetShaderInfoLog(shader)); 832 GLES20.glDeleteShader(shader); 833 shader = 0; 834 } 835 return shader; 836 } 837 createProgram(String vertexSource, String fragmentSource)838 private int createProgram(String vertexSource, String fragmentSource) throws GlException { 839 int vertexShader = loadShader(GLES20.GL_VERTEX_SHADER, vertexSource); 840 if (vertexShader == 0) { 841 return 0; 842 } 843 int pixelShader = loadShader(GLES20.GL_FRAGMENT_SHADER, fragmentSource); 844 if (pixelShader == 0) { 845 return 0; 846 } 847 848 int program = GLES20.glCreateProgram(); 849 checkGlError("glCreateProgram"); 850 if (program == 0) { 851 Log.e(TAG, "Could not create program"); 852 } 853 GLES20.glAttachShader(program, vertexShader); 854 checkGlError("glAttachShader"); 855 GLES20.glAttachShader(program, pixelShader); 856 checkGlError("glAttachShader"); 857 GLES20.glLinkProgram(program); 858 int[] linkStatus = new int[1]; 859 GLES20.glGetProgramiv(program, GLES20.GL_LINK_STATUS, linkStatus, 0); 860 if (linkStatus[0] != GLES20.GL_TRUE) { 861 Log.e(TAG, "Could not link program: "); 862 Log.e(TAG, GLES20.glGetProgramInfoLog(program)); 863 GLES20.glDeleteProgram(program); 864 program = 0; 865 } 866 return program; 867 } 868 initGl()869 private void initGl() throws GlException { 870 mEglHelper = new InputSurface(mSurface); 871 mEglHelper.makeCurrent(); 872 mGlProgramId = createProgram(VERTEX_SHADER, FRAGMENT_SHADER); 873 mGlaPositionHandle = GLES20.glGetAttribLocation(mGlProgramId, "aPosition"); 874 checkGlError("glGetAttribLocation aPosition"); 875 if (mGlaPositionHandle == -1) { 876 throw new RuntimeException("Could not get attrib location for aPosition"); 877 } 878 mGlaTextureHandle = GLES20.glGetAttribLocation(mGlProgramId, "aTextureCoord"); 879 checkGlError("glGetAttribLocation aTextureCoord"); 880 if (mGlaTextureHandle == -1) { 881 throw new RuntimeException("Could not get attrib location for aTextureCoord"); 882 } 883 mGluMVPMatrixHandle = GLES20.glGetUniformLocation(mGlProgramId, "uMVPMatrix"); 884 checkGlError("glGetUniformLocation uMVPMatrix"); 885 if (mGluMVPMatrixHandle == -1) { 886 throw new RuntimeException("Could not get attrib location for uMVPMatrix"); 887 } 888 mGluSTMatrixHandle = GLES20.glGetUniformLocation(mGlProgramId, "uSTMatrix"); 889 checkGlError("glGetUniformLocation uSTMatrix"); 890 if (mGluSTMatrixHandle == -1) { 891 throw new RuntimeException("Could not get attrib location for uSTMatrix"); 892 } 893 Matrix.setIdentityM(mMVPMatrix, 0); 894 Log.i(TAG, "initGl w:" + mWidth + " h:" + mHeight); 895 GLES20.glViewport(0, 0, mWidth, mHeight); 896 float[] vMatrix = new float[16]; 897 float[] projMatrix = new float[16]; 898 // max window is from (0,0) to (mWidth - 1, mHeight - 1) 899 float wMid = mWidth / 2f; 900 float hMid = mHeight / 2f; 901 // look from positive z to hide windows in lower z 902 Matrix.setLookAtM(vMatrix, 0, wMid, hMid, 5f, wMid, hMid, 0f, 0f, 1.0f, 0.0f); 903 Matrix.orthoM(projMatrix, 0, -wMid, wMid, -hMid, hMid, 1, 10); 904 Matrix.multiplyMM(mMVPMatrix, 0, projMatrix, 0, vMatrix, 0); 905 createWindows(); 906 907 } 908 createWindows()909 private void createWindows() throws GlException { 910 mTopWindow = new GlWindow(this, 0, 0, mWidth, mHeight); 911 mTopWindow.init(); 912 mTopPresentation = new TopWindowVirtualDisplayPresentation(mContext, 913 mTopWindow.getSurface(), mWidth, mHeight, mNumWindows); 914 mTopPresentation.createVirtualDisplay(); 915 mTopPresentation.createPresentation(); 916 ((TopWindowPresentation) mTopPresentation.getPresentation()).populateWindows(); 917 } 918 cleanupGl()919 private void cleanupGl() { 920 if (mTopPresentation != null) { 921 mTopPresentation.dismissPresentation(); 922 mTopPresentation.destroyVirtualDisplay(); 923 mTopPresentation = null; 924 } 925 if (mTopWindow != null) { 926 mTopWindow.cleanup(); 927 mTopWindow = null; 928 } 929 if (mEglHelper != null) { 930 mEglHelper.release(); 931 mEglHelper = null; 932 } 933 } 934 doGlRendering()935 private void doGlRendering() throws GlException { 936 if (DBG) { 937 Log.i(TAG, "doGlRendering"); 938 } 939 mTopWindow.updateTexImageIfNecessary(); 940 GLES20.glClearColor(0.0f, 1.0f, 0.0f, 1.0f); 941 GLES20.glClear(GLES20.GL_DEPTH_BUFFER_BIT | GLES20.GL_COLOR_BUFFER_BIT); 942 943 GLES20.glUseProgram(mGlProgramId); 944 GLES20.glUniformMatrix4fv(mGluMVPMatrixHandle, 1, false, mMVPMatrix, 0); 945 mTopWindow.onDraw(mGluSTMatrixHandle, mGlaPositionHandle, mGlaTextureHandle); 946 checkGlError("window draw"); 947 if (DBG) { 948 final IntBuffer pixels = IntBuffer.allocate(1); 949 GLES20.glReadPixels(mWidth / 2, mHeight / 2, 1, 1, 950 GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, pixels); 951 Log.i(TAG, "glReadPixels returned 0x" + Integer.toHexString(pixels.get(0))); 952 } 953 mEglHelper.swapBuffers(); 954 } 955 doRecreateWindows()956 private void doRecreateWindows() throws GlException { 957 mTopPresentation.dismissPresentation(); 958 mTopPresentation.destroyVirtualDisplay(); 959 mTopWindow.cleanup(); 960 createWindows(); 961 mRecreationCompletionSemaphore.release(); 962 } 963 waitForStartCompletion()964 private void waitForStartCompletion() throws Exception { 965 if (!mStartCompletionSemaphore.tryAcquire(DEFAULT_WAIT_TIMEOUT_MS, 966 TimeUnit.MILLISECONDS)) { 967 fail("start timeout"); 968 } 969 mStartCompletionSemaphore = null; 970 mTopPresentation.waitForSurfaceReady(DEFAULT_WAIT_TIMEOUT_MS); 971 } 972 973 private class CompositionRunnable implements Runnable { 974 @Override run()975 public void run() { 976 try { 977 Looper.prepare(); 978 mLooper = Looper.myLooper(); 979 mHandler = new CompositionHandler(); 980 initGl(); 981 // init done 982 mStartCompletionSemaphore.release(); 983 Looper.loop(); 984 } catch (GlException e) { 985 e.printStackTrace(); 986 fail("got gl exception"); 987 } finally { 988 cleanupGl(); 989 mHandler = null; 990 mLooper = null; 991 } 992 } 993 } 994 995 private class CompositionHandler extends Handler { 996 private static final int DO_RENDERING = 1; 997 private static final int DO_RECREATE_WINDOWS = 2; 998 999 @Override handleMessage(Message msg)1000 public void handleMessage(Message msg) { 1001 try { 1002 switch(msg.what) { 1003 case DO_RENDERING: { 1004 doGlRendering(); 1005 } break; 1006 case DO_RECREATE_WINDOWS: { 1007 doRecreateWindows(); 1008 } break; 1009 } 1010 } catch (GlException e) { 1011 //ignore as this can happen during tearing down 1012 } 1013 } 1014 } 1015 1016 private class GlWindow { 1017 private static final int FLOAT_SIZE_BYTES = 4; 1018 private static final int TRIANGLE_VERTICES_DATA_STRIDE_BYTES = 5 * FLOAT_SIZE_BYTES; 1019 private static final int TRIANGLE_VERTICES_DATA_POS_OFFSET = 0; 1020 private static final int TRIANGLE_VERTICES_DATA_UV_OFFSET = 3; 1021 private int mBlX; 1022 private int mBlY; 1023 private int mWidth; 1024 private int mHeight; 1025 private int mTextureId = 0; // 0 is invalid 1026 private volatile SurfaceTexture mSurfaceTexture; 1027 private volatile Surface mSurface; 1028 private FloatBuffer mVerticesData; 1029 private float[] mSTMatrix = new float[16]; 1030 private AtomicInteger mNumTextureUpdated = new AtomicInteger(0); 1031 private GlCompositor mCompositor; 1032 1033 /** 1034 * @param blX X coordinate of bottom-left point of window 1035 * @param blY Y coordinate of bottom-left point of window 1036 * @param w window width 1037 * @param h window height 1038 */ GlWindow(GlCompositor compositor, int blX, int blY, int w, int h)1039 public GlWindow(GlCompositor compositor, int blX, int blY, int w, int h) { 1040 mCompositor = compositor; 1041 mBlX = blX; 1042 mBlY = blY; 1043 mWidth = w; 1044 mHeight = h; 1045 int trX = blX + w; 1046 int trY = blY + h; 1047 float[] vertices = new float[] { 1048 // x, y, z, u, v 1049 mBlX, mBlY, 0, 0, 0, 1050 trX, mBlY, 0, 1, 0, 1051 mBlX, trY, 0, 0, 1, 1052 trX, trY, 0, 1, 1 1053 }; 1054 Log.i(TAG, "create window " + this + " blX:" + mBlX + " blY:" + mBlY + " trX:" + 1055 trX + " trY:" + trY); 1056 mVerticesData = ByteBuffer.allocateDirect( 1057 vertices.length * FLOAT_SIZE_BYTES) 1058 .order(ByteOrder.nativeOrder()).asFloatBuffer(); 1059 mVerticesData.put(vertices).position(0); 1060 } 1061 1062 /** 1063 * initialize the window for composition. counter-part is cleanup() 1064 * @throws GlException 1065 */ init()1066 public void init() throws GlException { 1067 int[] textures = new int[1]; 1068 GLES20.glGenTextures(1, textures, 0); 1069 1070 mTextureId = textures[0]; 1071 GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, mTextureId); 1072 checkGlError("glBindTexture mTextureID"); 1073 1074 GLES20.glTexParameterf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, 1075 GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_NEAREST); 1076 GLES20.glTexParameterf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, 1077 GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_NEAREST); 1078 GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_WRAP_S, 1079 GLES20.GL_CLAMP_TO_EDGE); 1080 GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_WRAP_T, 1081 GLES20.GL_CLAMP_TO_EDGE); 1082 checkGlError("glTexParameter"); 1083 mSurfaceTexture = new SurfaceTexture(mTextureId); 1084 mSurfaceTexture.setDefaultBufferSize(mWidth, mHeight); 1085 mSurface = new Surface(mSurfaceTexture); 1086 mSurfaceTexture.setOnFrameAvailableListener(mCompositor); 1087 } 1088 cleanup()1089 public void cleanup() { 1090 mNumTextureUpdated.set(0); 1091 if (mTextureId != 0) { 1092 int[] textures = new int[] { 1093 mTextureId 1094 }; 1095 GLES20.glDeleteTextures(1, textures, 0); 1096 } 1097 GLES20.glFinish(); 1098 if (mSurface != null) { 1099 mSurface.release(); 1100 mSurface = null; 1101 } 1102 if (mSurfaceTexture != null) { 1103 mSurfaceTexture.release(); 1104 mSurfaceTexture = null; 1105 } 1106 } 1107 1108 /** 1109 * make texture as updated so that it can be updated in the next rendering. 1110 */ markTextureUpdated()1111 public void markTextureUpdated() { 1112 mNumTextureUpdated.incrementAndGet(); 1113 } 1114 1115 /** 1116 * update texture for rendering if it is updated. 1117 */ updateTexImageIfNecessary()1118 public void updateTexImageIfNecessary() { 1119 int numTextureUpdated = mNumTextureUpdated.getAndDecrement(); 1120 if (numTextureUpdated > 0) { 1121 if (DBG) { 1122 Log.i(TAG, "updateTexImageIfNecessary " + this); 1123 } 1124 mSurfaceTexture.updateTexImage(); 1125 mSurfaceTexture.getTransformMatrix(mSTMatrix); 1126 } 1127 if (numTextureUpdated < 0) { 1128 fail("should not happen"); 1129 } 1130 } 1131 1132 /** 1133 * draw the window. It will not be drawn at all if the window is not visible. 1134 * @param uSTMatrixHandle shader handler for the STMatrix for texture coordinates 1135 * mapping 1136 * @param aPositionHandle shader handle for vertex position. 1137 * @param aTextureHandle shader handle for texture 1138 */ onDraw(int uSTMatrixHandle, int aPositionHandle, int aTextureHandle)1139 public void onDraw(int uSTMatrixHandle, int aPositionHandle, int aTextureHandle) { 1140 GLES20.glActiveTexture(GLES20.GL_TEXTURE0); 1141 GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, mTextureId); 1142 mVerticesData.position(TRIANGLE_VERTICES_DATA_POS_OFFSET); 1143 GLES20.glVertexAttribPointer(aPositionHandle, 3, GLES20.GL_FLOAT, false, 1144 TRIANGLE_VERTICES_DATA_STRIDE_BYTES, mVerticesData); 1145 GLES20.glEnableVertexAttribArray(aPositionHandle); 1146 1147 mVerticesData.position(TRIANGLE_VERTICES_DATA_UV_OFFSET); 1148 GLES20.glVertexAttribPointer(aTextureHandle, 2, GLES20.GL_FLOAT, false, 1149 TRIANGLE_VERTICES_DATA_STRIDE_BYTES, mVerticesData); 1150 GLES20.glEnableVertexAttribArray(aTextureHandle); 1151 GLES20.glUniformMatrix4fv(uSTMatrixHandle, 1, false, mSTMatrix, 0); 1152 GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4); 1153 } 1154 getSurfaceTexture()1155 public SurfaceTexture getSurfaceTexture() { 1156 return mSurfaceTexture; 1157 } 1158 getSurface()1159 public Surface getSurface() { 1160 return mSurface; 1161 } 1162 } 1163 } 1164 checkGlError(String op)1165 static void checkGlError(String op) throws GlException { 1166 int error; 1167 while ((error = GLES20.glGetError()) != GLES20.GL_NO_ERROR) { 1168 Log.e(TAG, op + ": glError " + error); 1169 throw new GlException(op + ": glError " + error); 1170 } 1171 } 1172 1173 public static class GlException extends Exception { GlException(String msg)1174 public GlException(String msg) { 1175 super(msg); 1176 } 1177 } 1178 1179 private interface Renderer { doRendering(final int color)1180 void doRendering(final int color) throws Exception; 1181 } 1182 1183 private static class RotateVirtualDisplayPresentation extends VirtualDisplayPresentation { 1184 RotateVirtualDisplayPresentation(Context context, Surface surface, int w, int h)1185 RotateVirtualDisplayPresentation(Context context, Surface surface, int w, int h) { 1186 super(context, surface, w, h); 1187 } 1188 1189 @Override doCreatePresentation()1190 protected TestPresentationBase doCreatePresentation() { 1191 return new TestRotatePresentation(mContext, mVirtualDisplay.getDisplay()); 1192 } 1193 1194 } 1195 1196 private static class VirtualDisplayPresentation implements Renderer { 1197 protected final Context mContext; 1198 protected final Surface mSurface; 1199 protected final int mWidth; 1200 protected final int mHeight; 1201 protected VirtualDisplay mVirtualDisplay; 1202 protected TestPresentationBase mPresentation; 1203 private final DisplayManager mDisplayManager; 1204 VirtualDisplayPresentation(Context context, Surface surface, int w, int h)1205 VirtualDisplayPresentation(Context context, Surface surface, int w, int h) { 1206 mContext = context; 1207 mSurface = surface; 1208 mWidth = w; 1209 mHeight = h; 1210 mDisplayManager = (DisplayManager)context.getSystemService(Context.DISPLAY_SERVICE); 1211 } 1212 createVirtualDisplay()1213 void createVirtualDisplay() { 1214 runOnMainSync(new Runnable() { 1215 @Override 1216 public void run() { 1217 mVirtualDisplay = mDisplayManager.createVirtualDisplay( 1218 TAG, mWidth, mHeight, 200, mSurface, 1219 DisplayManager.VIRTUAL_DISPLAY_FLAG_OWN_CONTENT_ONLY | 1220 DisplayManager.VIRTUAL_DISPLAY_FLAG_PRESENTATION); 1221 } 1222 }); 1223 } 1224 destroyVirtualDisplay()1225 void destroyVirtualDisplay() { 1226 runOnMainSync(new Runnable() { 1227 @Override 1228 public void run() { 1229 mVirtualDisplay.release(); 1230 } 1231 }); 1232 } 1233 createPresentation()1234 void createPresentation() { 1235 runOnMainSync(new Runnable() { 1236 @Override 1237 public void run() { 1238 mPresentation = doCreatePresentation(); 1239 mPresentation.show(); 1240 } 1241 }); 1242 } 1243 doCreatePresentation()1244 protected TestPresentationBase doCreatePresentation() { 1245 return new TestPresentation(mContext, mVirtualDisplay.getDisplay()); 1246 } 1247 getPresentation()1248 TestPresentationBase getPresentation() { 1249 return mPresentation; 1250 } 1251 dismissPresentation()1252 void dismissPresentation() { 1253 runOnMainSync(new Runnable() { 1254 @Override 1255 public void run() { 1256 mPresentation.dismiss(); 1257 } 1258 }); 1259 } 1260 1261 @Override doRendering(final int color)1262 public void doRendering(final int color) throws Exception { 1263 runOnMainSync(new Runnable() { 1264 @Override 1265 public void run() { 1266 mPresentation.doRendering(color); 1267 } 1268 }); 1269 } 1270 } 1271 1272 private static class TestPresentationBase extends Presentation { 1273 TestPresentationBase(Context outerContext, Display display)1274 public TestPresentationBase(Context outerContext, Display display) { 1275 // This theme is required to prevent an extra view from obscuring the presentation 1276 super(outerContext, display, 1277 android.R.style.Theme_Holo_Light_NoActionBar_TranslucentDecor); 1278 getWindow().setType(WindowManager.LayoutParams.TYPE_PRIVATE_PRESENTATION); 1279 getWindow().addFlags(WindowManager.LayoutParams.FLAG_LOCAL_FOCUS_MODE); 1280 getWindow().addFlags(WindowManager.LayoutParams.FLAG_HARDWARE_ACCELERATED); 1281 } 1282 doRendering(int color)1283 public void doRendering(int color) { 1284 // to be implemented by child 1285 } 1286 } 1287 1288 private static class TestPresentation extends TestPresentationBase { 1289 private ImageView mImageView; 1290 TestPresentation(Context outerContext, Display display)1291 public TestPresentation(Context outerContext, Display display) { 1292 super(outerContext, display); 1293 } 1294 1295 @Override onCreate(Bundle savedInstanceState)1296 protected void onCreate(Bundle savedInstanceState) { 1297 super.onCreate(savedInstanceState); 1298 mImageView = new ImageView(getContext()); 1299 mImageView.setImageDrawable(new ColorDrawable(COLOR_RED)); 1300 mImageView.setLayoutParams(new LayoutParams( 1301 LayoutParams.MATCH_PARENT, LayoutParams.MATCH_PARENT)); 1302 setContentView(mImageView); 1303 } 1304 doRendering(int color)1305 public void doRendering(int color) { 1306 if (DBG) { 1307 Log.i(TAG, "doRendering " + Integer.toHexString(color)); 1308 } 1309 mImageView.setImageDrawable(new ColorDrawable(color)); 1310 } 1311 } 1312 1313 private static class TestRotatePresentation extends TestPresentationBase { 1314 static final int[] kColors = new int[] {COLOR_GREY, COLOR_RED, COLOR_GREEN, COLOR_BLUE}; 1315 private final ImageView[] mQuadrants = new ImageView[4]; 1316 TestRotatePresentation(Context outerContext, Display display)1317 public TestRotatePresentation(Context outerContext, Display display) { 1318 super(outerContext, display); 1319 } 1320 1321 @Override onCreate(Bundle savedInstanceState)1322 protected void onCreate(Bundle savedInstanceState) { 1323 super.onCreate(savedInstanceState); 1324 Context ctx = getContext(); 1325 TableLayout table = new TableLayout(ctx); 1326 ViewGroup.LayoutParams fill = new ViewGroup.LayoutParams( 1327 ViewGroup.LayoutParams.MATCH_PARENT, ViewGroup.LayoutParams.MATCH_PARENT); 1328 TableLayout.LayoutParams fillTable = new TableLayout.LayoutParams( 1329 ViewGroup.LayoutParams.MATCH_PARENT, ViewGroup.LayoutParams.MATCH_PARENT, 1f); 1330 TableRow.LayoutParams fillRow = new TableRow.LayoutParams( 1331 ViewGroup.LayoutParams.MATCH_PARENT, ViewGroup.LayoutParams.MATCH_PARENT, 1f); 1332 table.setLayoutParams(fill); 1333 table.setStretchAllColumns(true); 1334 TableRow rows[] = new TableRow[] {new TableRow(ctx), new TableRow(ctx)}; 1335 for (int i = 0; i < mQuadrants.length; i++) { 1336 mQuadrants[i] = new ImageView(ctx); 1337 mQuadrants[i].setImageDrawable(new ColorDrawable(kColors[i])); 1338 rows[i / 2].addView(mQuadrants[i], fillRow); 1339 } 1340 for (TableRow row: rows) { 1341 table.addView(row, fillTable); 1342 } 1343 setContentView(table); 1344 Log.v(TAG, "setContentView(table)"); 1345 } 1346 1347 @Override doRendering(int color)1348 public void doRendering(int color) { 1349 Log.v(TAG, "doRendering: ignoring color: " + Integer.toHexString(color)); 1350 for (int i = 0; i < mQuadrants.length; i++) { 1351 mQuadrants[i].setImageDrawable(new ColorDrawable(kColors[i])); 1352 } 1353 } 1354 1355 } 1356 1357 private static class TopWindowPresentation extends TestPresentationBase { 1358 private FrameLayout[] mWindowsLayout = new FrameLayout[MAX_NUM_WINDOWS]; 1359 private CompositionTextureView[] mWindows = new CompositionTextureView[MAX_NUM_WINDOWS]; 1360 private final int mNumWindows; 1361 private final Semaphore mWindowWaitSemaphore = new Semaphore(0); 1362 TopWindowPresentation(int numWindows, Context outerContext, Display display)1363 public TopWindowPresentation(int numWindows, Context outerContext, Display display) { 1364 super(outerContext, display); 1365 mNumWindows = numWindows; 1366 } 1367 1368 @Override onCreate(Bundle savedInstanceState)1369 protected void onCreate(Bundle savedInstanceState) { 1370 super.onCreate(savedInstanceState); 1371 if (DBG) { 1372 Log.i(TAG, "TopWindowPresentation onCreate, numWindows " + mNumWindows); 1373 } 1374 setContentView(R.layout.composition_layout); 1375 mWindowsLayout[0] = (FrameLayout) findViewById(R.id.window0); 1376 mWindowsLayout[1] = (FrameLayout) findViewById(R.id.window1); 1377 mWindowsLayout[2] = (FrameLayout) findViewById(R.id.window2); 1378 } 1379 populateWindows()1380 public void populateWindows() { 1381 runOnMain(new Runnable() { 1382 public void run() { 1383 for (int i = 0; i < mNumWindows; i++) { 1384 mWindows[i] = new CompositionTextureView(getContext()); 1385 mWindows[i].setLayoutParams(new ViewGroup.LayoutParams( 1386 ViewGroup.LayoutParams.MATCH_PARENT, 1387 ViewGroup.LayoutParams.MATCH_PARENT)); 1388 mWindowsLayout[i].setVisibility(View.VISIBLE); 1389 mWindowsLayout[i].addView(mWindows[i]); 1390 mWindows[i].startListening(); 1391 } 1392 mWindowWaitSemaphore.release(); 1393 } 1394 }); 1395 } 1396 waitForSurfaceReady(long timeoutMs)1397 public void waitForSurfaceReady(long timeoutMs) throws Exception { 1398 mWindowWaitSemaphore.tryAcquire(DEFAULT_WAIT_TIMEOUT_MS, TimeUnit.MILLISECONDS); 1399 for (int i = 0; i < mNumWindows; i++) { 1400 if(!mWindows[i].waitForSurfaceReady(timeoutMs)) { 1401 fail("surface wait timeout"); 1402 } 1403 } 1404 } 1405 getSurface(int windowIndex)1406 public Surface getSurface(int windowIndex) { 1407 Surface surface = mWindows[windowIndex].getSurface(); 1408 assertNotNull(surface); 1409 return surface; 1410 } 1411 } 1412 1413 private static class TopWindowVirtualDisplayPresentation extends VirtualDisplayPresentation { 1414 private final int mNumWindows; 1415 TopWindowVirtualDisplayPresentation(Context context, Surface surface, int w, int h, int numWindows)1416 TopWindowVirtualDisplayPresentation(Context context, Surface surface, int w, int h, 1417 int numWindows) { 1418 super(context, surface, w, h); 1419 assertNotNull(surface); 1420 mNumWindows = numWindows; 1421 } 1422 waitForSurfaceReady(long timeoutMs)1423 void waitForSurfaceReady(long timeoutMs) throws Exception { 1424 ((TopWindowPresentation) mPresentation).waitForSurfaceReady(timeoutMs); 1425 } 1426 getSurface(int windowIndex)1427 Surface getSurface(int windowIndex) { 1428 return ((TopWindowPresentation) mPresentation).getSurface(windowIndex); 1429 } 1430 doCreatePresentation()1431 protected TestPresentationBase doCreatePresentation() { 1432 return new TopWindowPresentation(mNumWindows, mContext, mVirtualDisplay.getDisplay()); 1433 } 1434 } 1435 1436 private static class RemoteVirtualDisplayPresentation implements Renderer { 1437 /** argument: Surface, int w, int h, return none */ 1438 private static final int BINDER_CMD_START = IBinder.FIRST_CALL_TRANSACTION; 1439 /** argument: int color, return none */ 1440 private static final int BINDER_CMD_RENDER = IBinder.FIRST_CALL_TRANSACTION + 1; 1441 1442 private final Context mContext; 1443 private final Surface mSurface; 1444 private final int mWidth; 1445 private final int mHeight; 1446 1447 private IBinder mService; 1448 private final Semaphore mConnectionWait = new Semaphore(0); 1449 private final ServiceConnection mConnection = new ServiceConnection() { 1450 1451 public void onServiceConnected(ComponentName arg0, IBinder arg1) { 1452 mService = arg1; 1453 mConnectionWait.release(); 1454 } 1455 1456 public void onServiceDisconnected(ComponentName arg0) { 1457 //ignore 1458 } 1459 1460 }; 1461 RemoteVirtualDisplayPresentation(Context context, Surface surface, int w, int h)1462 RemoteVirtualDisplayPresentation(Context context, Surface surface, int w, int h) { 1463 mContext = context; 1464 mSurface = surface; 1465 mWidth = w; 1466 mHeight = h; 1467 } 1468 connect()1469 void connect() throws Exception { 1470 Intent intent = new Intent(); 1471 intent.setClassName("android.media.cts", 1472 "android.media.cts.RemoteVirtualDisplayService"); 1473 mContext.bindService(intent, mConnection, Context.BIND_AUTO_CREATE); 1474 if (!mConnectionWait.tryAcquire(DEFAULT_WAIT_TIMEOUT_MS, TimeUnit.MILLISECONDS)) { 1475 fail("cannot bind to service"); 1476 } 1477 } 1478 disconnect()1479 void disconnect() { 1480 mContext.unbindService(mConnection); 1481 } 1482 start()1483 void start() throws Exception { 1484 Parcel parcel = Parcel.obtain(); 1485 mSurface.writeToParcel(parcel, 0); 1486 parcel.writeInt(mWidth); 1487 parcel.writeInt(mHeight); 1488 mService.transact(BINDER_CMD_START, parcel, null, 0); 1489 } 1490 1491 @Override doRendering(int color)1492 public void doRendering(int color) throws Exception { 1493 Parcel parcel = Parcel.obtain(); 1494 parcel.writeInt(color); 1495 mService.transact(BINDER_CMD_RENDER, parcel, null, 0); 1496 } 1497 } 1498 getMaxSupportedEncoderSize()1499 private static Size getMaxSupportedEncoderSize() { 1500 final Size[] standardSizes = new Size[] { 1501 new Size(1920, 1080), 1502 new Size(1280, 720), 1503 new Size(720, 480), 1504 new Size(352, 576) 1505 }; 1506 1507 MediaCodecList mcl = new MediaCodecList(MediaCodecList.REGULAR_CODECS); 1508 for (Size sz : standardSizes) { 1509 MediaFormat format = MediaFormat.createVideoFormat( 1510 MIME_TYPE, sz.getWidth(), sz.getHeight()); 1511 format.setInteger(MediaFormat.KEY_COLOR_FORMAT, 1512 MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface); 1513 int bitRate = BITRATE_DEFAULT; 1514 if (sz.getWidth() == 1920 && sz.getHeight() == 1080) { 1515 bitRate = BITRATE_1080p; 1516 } else if (sz.getWidth() == 1280 && sz.getHeight() == 720) { 1517 bitRate = BITRATE_720p; 1518 } else if (sz.getWidth() == 800 && sz.getHeight() == 480) { 1519 bitRate = BITRATE_800x480; 1520 } 1521 format.setInteger(MediaFormat.KEY_BIT_RATE, bitRate); 1522 format.setInteger(MediaFormat.KEY_FRAME_RATE, 30); 1523 format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, IFRAME_INTERVAL); 1524 Log.i(TAG,"format = " + format.toString()); 1525 if (mcl.findEncoderForFormat(format) != null) { 1526 return sz; 1527 } 1528 } 1529 return null; 1530 } 1531 1532 /** 1533 * Check maximum concurrent encoding / decoding resolution allowed. 1534 * Some H/Ws cannot support maximum resolution reported in encoder if decoder is running 1535 * at the same time. 1536 * Check is done for 4 different levels: 1080p, 720p, 800x480, 480p 1537 * (The last one is required by CDD.) 1538 */ checkMaxConcurrentEncodingDecodingResolution()1539 private Size checkMaxConcurrentEncodingDecodingResolution() { 1540 if (isConcurrentEncodingDecodingSupported(1920, 1080, BITRATE_1080p)) { 1541 return new Size(1920, 1080); 1542 } else if (isConcurrentEncodingDecodingSupported(1280, 720, BITRATE_720p)) { 1543 return new Size(1280, 720); 1544 } else if (isConcurrentEncodingDecodingSupported(800, 480, BITRATE_800x480)) { 1545 return new Size(800, 480); 1546 } else if (isConcurrentEncodingDecodingSupported(720, 480, BITRATE_DEFAULT)) { 1547 return new Size(720, 480); 1548 } 1549 Log.i(TAG, "SKIPPING test: concurrent encoding and decoding is not supported"); 1550 return null; 1551 } 1552 isConcurrentEncodingDecodingSupported(int w, int h, int bitRate)1553 private boolean isConcurrentEncodingDecodingSupported(int w, int h, int bitRate) { 1554 MediaCodecList mcl = new MediaCodecList(MediaCodecList.REGULAR_CODECS); 1555 MediaFormat testFormat = MediaFormat.createVideoFormat(MIME_TYPE, w, h); 1556 testFormat.setInteger(MediaFormat.KEY_BIT_RATE, bitRate); 1557 testFormat.setInteger(MediaFormat.KEY_FRAME_RATE, 30); 1558 if (mcl.findDecoderForFormat(testFormat) == null 1559 || mcl.findEncoderForFormat(testFormat) == null) { 1560 return false; 1561 } 1562 1563 MediaCodec decoder = null; 1564 OutputSurface decodingSurface = null; 1565 MediaCodec encoder = null; 1566 Surface encodingSurface = null; 1567 try { 1568 decoder = MediaCodec.createDecoderByType(MIME_TYPE); 1569 MediaFormat decoderFormat = MediaFormat.createVideoFormat(MIME_TYPE, w, h); 1570 decodingSurface = new OutputSurface(w, h); 1571 decodingSurface.makeCurrent(); 1572 decoder.configure(decoderFormat, decodingSurface.getSurface(), null, 0); 1573 decoder.start(); 1574 1575 MediaFormat format = MediaFormat.createVideoFormat(MIME_TYPE, w, h); 1576 format.setInteger(MediaFormat.KEY_COLOR_FORMAT, 1577 MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface); 1578 format.setInteger(MediaFormat.KEY_BIT_RATE, bitRate); 1579 format.setInteger(MediaFormat.KEY_FRAME_RATE, 30); 1580 format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, IFRAME_INTERVAL); 1581 encoder = MediaCodec.createEncoderByType(MIME_TYPE);; 1582 encoder.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE); 1583 encodingSurface = encoder.createInputSurface(); 1584 encoder.start(); 1585 1586 encoder.stop(); 1587 decoder.stop(); 1588 } catch (Exception e) { 1589 e.printStackTrace(); 1590 Log.i(TAG, "This H/W does not support w:" + w + " h:" + h); 1591 return false; 1592 } finally { 1593 if (encodingSurface != null) { 1594 encodingSurface.release(); 1595 } 1596 if (encoder != null) { 1597 encoder.release(); 1598 } 1599 if (decoder != null) { 1600 decoder.release(); 1601 } 1602 if (decodingSurface != null) { 1603 decodingSurface.release(); 1604 } 1605 } 1606 return true; 1607 } 1608 runOnMain(Runnable runner)1609 private static void runOnMain(Runnable runner) { 1610 sHandlerForRunOnMain.post(runner); 1611 } 1612 runOnMainSync(Runnable runner)1613 private static void runOnMainSync(Runnable runner) { 1614 SyncRunnable sr = new SyncRunnable(runner); 1615 sHandlerForRunOnMain.post(sr); 1616 sr.waitForComplete(); 1617 } 1618 1619 private static final class SyncRunnable implements Runnable { 1620 private final Runnable mTarget; 1621 private boolean mComplete; 1622 SyncRunnable(Runnable target)1623 public SyncRunnable(Runnable target) { 1624 mTarget = target; 1625 } 1626 run()1627 public void run() { 1628 mTarget.run(); 1629 synchronized (this) { 1630 mComplete = true; 1631 notifyAll(); 1632 } 1633 } 1634 waitForComplete()1635 public void waitForComplete() { 1636 synchronized (this) { 1637 while (!mComplete) { 1638 try { 1639 wait(); 1640 } catch (InterruptedException e) { 1641 //ignore 1642 } 1643 } 1644 } 1645 } 1646 } 1647 } 1648