1 /* 2 * Copyright (C) 2016 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 package android.media.cts; 17 18 import static org.junit.Assert.assertNotNull; 19 20 import android.annotation.SuppressLint; 21 import android.annotation.TargetApi; 22 import android.app.Activity; 23 import android.content.Context; 24 import android.content.Intent; 25 import android.content.pm.ActivityInfo; 26 import android.content.res.AssetFileDescriptor; 27 import android.content.res.Configuration; 28 import android.content.res.Resources; 29 import android.graphics.Bitmap; 30 import android.graphics.Bitmap.Config; 31 import android.graphics.BitmapFactory; 32 import android.graphics.Color; 33 import android.graphics.SurfaceTexture; 34 import android.media.MediaCodec; 35 import android.media.MediaCodec.BufferInfo; 36 import android.media.MediaCodecInfo.VideoCapabilities; 37 import android.media.MediaCodecList; 38 import android.media.MediaExtractor; 39 import android.media.MediaFormat; 40 import android.media.cts.R; 41 import android.opengl.EGL14; 42 import android.opengl.GLES11Ext; 43 import android.opengl.GLES20; 44 import android.opengl.GLSurfaceView; 45 import android.os.Build; 46 import android.os.Handler; 47 import android.os.HandlerThread; 48 import android.os.Looper; 49 import android.os.SystemClock; 50 import android.util.Log; 51 import android.util.Pair; 52 import android.view.PixelCopy; 53 import android.view.PixelCopy.OnPixelCopyFinishedListener; 54 import android.view.Surface; 55 import android.view.SurfaceHolder; 56 import android.view.SurfaceView; 57 import android.view.TextureView; 58 import android.view.View; 59 import android.view.ViewGroup; 60 import android.widget.RelativeLayout; 61 62 import androidx.test.rule.ActivityTestRule; 63 64 import com.android.compatibility.common.util.ApiLevelUtil; 65 import com.android.compatibility.common.util.MediaUtils; 66 67 import org.junit.After; 68 import org.junit.Before; 69 import org.junit.Rule; 70 71 import java.io.File; 72 import java.io.IOException; 73 import java.nio.ByteBuffer; 74 import java.nio.ByteOrder; 75 import java.nio.FloatBuffer; 76 import java.util.HashMap; 77 import java.util.concurrent.TimeUnit; 78 79 import javax.microedition.khronos.egl.EGL10; 80 import javax.microedition.khronos.egl.EGLConfig; 81 import javax.microedition.khronos.egl.EGLContext; 82 import javax.microedition.khronos.egl.EGLDisplay; 83 import javax.microedition.khronos.egl.EGLSurface; 84 85 @TargetApi(16) 86 public class DecodeAccuracyTestBase { 87 88 protected Context mContext; 89 protected Resources mResources; 90 protected DecodeAccuracyTestActivity mActivity; 91 protected TestHelper testHelper; 92 93 @Rule 94 public ActivityTestRule<DecodeAccuracyTestActivity> mActivityRule = 95 new ActivityTestRule<>(DecodeAccuracyTestActivity.class); 96 97 @Before setUp()98 public void setUp() throws Exception { 99 mActivity = mActivityRule.getActivity(); 100 mContext = mActivity.getApplicationContext(); 101 mResources = mActivity.getResources(); 102 testHelper = new TestHelper(mContext, mActivity); 103 } 104 105 @After tearDown()106 public void tearDown() throws Exception { 107 mActivity = null; 108 mResources = null; 109 mContext = null; 110 mActivityRule = null; 111 } 112 bringActivityToFront()113 protected void bringActivityToFront() { 114 Intent intent = new Intent(mContext, DecodeAccuracyTestActivity.class); 115 intent.addFlags(Intent.FLAG_ACTIVITY_REORDER_TO_FRONT); 116 mActivity.startActivity(intent); 117 } 118 getHelper()119 protected TestHelper getHelper() { 120 return testHelper; 121 } 122 checkNotNull(T reference)123 public static <T> T checkNotNull(T reference) { 124 assertNotNull(reference); 125 return reference; 126 } 127 checkNotNull(String msg, T reference)128 public static <T> T checkNotNull(String msg, T reference) { 129 assertNotNull(msg, reference); 130 return reference; 131 } 132 133 /* Simple Player that decodes a local video file only. */ 134 @TargetApi(16) 135 static class SimplePlayer { 136 137 public static final long MIN_MS_PER_FRAME = TimeUnit.SECONDS.toMillis(1) / 5; // 5 FPS 138 public static final long STARTUP_ALLOW_MS = TimeUnit.SECONDS.toMillis(1) ; 139 public static final int END_OF_STREAM = -1; 140 public static final int DEQUEUE_SUCCESS = 1; 141 public static final int DEQUEUE_FAIL = 0; 142 143 private static final String TAG = SimplePlayer.class.getSimpleName(); 144 private static final int NO_TRACK_INDEX = -3; 145 private static final long DEQUEUE_TIMEOUT_US = 20; 146 147 private final Context context; 148 private final MediaExtractor extractor; 149 private final String codecName; 150 private MediaCodec decoder; 151 private byte[] outputBytes; 152 private boolean renderToSurface; 153 private MediaCodecList mediaCodecList; 154 private Surface surface; 155 SimplePlayer(Context context)156 public SimplePlayer(Context context) { 157 this(context, null); 158 } 159 SimplePlayer(Context context, String codecName)160 public SimplePlayer(Context context, String codecName) { 161 this.context = checkNotNull(context); 162 this.codecName = codecName; 163 this.extractor = new MediaExtractor(); 164 this.renderToSurface = false; 165 this.surface = null; 166 } 167 168 /** 169 * The function play the corresponding file for certain number of frames. 170 * 171 * @param surface is the surface view of decoder output. 172 * @param videoFormat is the format of the video to extract and decode. 173 * @param numOfTotalFrames is the number of Frame wish to play. 174 * @param msPerFrameCap is the maximum msec per frame. No cap is set if value is less than 1. 175 * @return {@link PlayerResult} that consists the result. 176 */ decodeVideoFrames( Surface surface, VideoFormat videoFormat, int numOfTotalFrames, long msPerFrameCap, boolean releasePlayer)177 public PlayerResult decodeVideoFrames( 178 Surface surface, VideoFormat videoFormat, int numOfTotalFrames, long msPerFrameCap, 179 boolean releasePlayer) { 180 this.surface = surface; 181 PlayerResult playerResult; 182 if (prepareVideoDecode(videoFormat)) { 183 if (startDecoder()) { 184 final long timeout = 185 Math.max(MIN_MS_PER_FRAME, msPerFrameCap) * numOfTotalFrames + STARTUP_ALLOW_MS; 186 playerResult = decodeFramesAndPlay(numOfTotalFrames, timeout, msPerFrameCap); 187 } else { 188 playerResult = PlayerResult.failToStart(); 189 } 190 } else { 191 playerResult = new PlayerResult(); 192 } 193 if (releasePlayer) { 194 release(); 195 } 196 return new PlayerResult(playerResult); 197 } 198 decodeVideoFrames( Surface surface, VideoFormat videoFormat, int numOfTotalFrames)199 public PlayerResult decodeVideoFrames( 200 Surface surface, VideoFormat videoFormat, int numOfTotalFrames) { 201 return decodeVideoFrames(surface, videoFormat, numOfTotalFrames, 0, false); 202 } 203 204 /** 205 * The function sets up the extractor and video decoder with proper format. 206 * This must be called before doing starting up the decoder. 207 */ prepareVideoDecode(VideoFormat videoFormat)208 private boolean prepareVideoDecode(VideoFormat videoFormat) { 209 MediaFormat mediaFormat = prepareExtractor(videoFormat); 210 if (mediaFormat == null) { 211 return false; 212 } 213 configureVideoFormat(mediaFormat, videoFormat); 214 setRenderToSurface(surface != null); 215 return createDecoder(mediaFormat) && configureDecoder(surface, mediaFormat); 216 } 217 218 /** 219 * Sets up the extractor and gets the {@link MediaFormat} of the track. 220 */ prepareExtractor(VideoFormat videoFormat)221 private MediaFormat prepareExtractor(VideoFormat videoFormat) { 222 if (!setExtractorDataSource(videoFormat)) { 223 return null; 224 } 225 final int trackNum = getFirstTrackIndexByType(videoFormat.getMediaFormat()); 226 if (trackNum == NO_TRACK_INDEX) { 227 return null; 228 } 229 extractor.selectTrack(trackNum); 230 return extractor.getTrackFormat(trackNum); 231 } 232 233 /** 234 * The function decode video frames and display in a surface. 235 * 236 * @param numOfTotalFrames is the number of frames to be decoded. 237 * @param timeOutMs is the time limit for decoding the frames. 238 * @param msPerFrameCap is the maximum msec per frame. No cap is set if value is less than 1. 239 * @return {@link PlayerResult} that consists the result. 240 */ decodeFramesAndPlay( int numOfTotalFrames, long timeOutMs, long msPerFrameCap)241 private PlayerResult decodeFramesAndPlay( 242 int numOfTotalFrames, long timeOutMs, long msPerFrameCap) { 243 int numOfDecodedFrames = 0; 244 long firstOutputTimeMs = 0; 245 long lastFrameAt = 0; 246 final long loopStart = SystemClock.elapsedRealtime(); 247 248 while (numOfDecodedFrames < numOfTotalFrames 249 && (SystemClock.elapsedRealtime() - loopStart < timeOutMs)) { 250 try { 251 queueDecoderInputBuffer(); 252 } catch (IllegalStateException exception) { 253 Log.e(TAG, "IllegalStateException in queueDecoderInputBuffer", exception); 254 break; 255 } 256 try { 257 final int outputResult = dequeueDecoderOutputBuffer(); 258 if (outputResult == SimplePlayer.END_OF_STREAM) { 259 break; 260 } 261 if (outputResult == SimplePlayer.DEQUEUE_SUCCESS) { 262 if (firstOutputTimeMs == 0) { 263 firstOutputTimeMs = SystemClock.elapsedRealtime(); 264 } 265 if (msPerFrameCap > 0) { 266 // Slow down if cap is set and not reached. 267 final long delayMs = 268 msPerFrameCap - (SystemClock.elapsedRealtime() - lastFrameAt); 269 if (lastFrameAt != 0 && delayMs > 0) { 270 final long threadDelayMs = 3; // In case of delay in thread. 271 if (delayMs > threadDelayMs) { 272 try { 273 Thread.sleep(delayMs - threadDelayMs); 274 } catch (InterruptedException ex) { /* */} 275 } 276 while (SystemClock.elapsedRealtime() - lastFrameAt 277 < msPerFrameCap) { /* */ } 278 } 279 lastFrameAt = SystemClock.elapsedRealtime(); 280 } 281 numOfDecodedFrames++; 282 } 283 } catch (IllegalStateException exception) { 284 Log.e(TAG, "IllegalStateException in dequeueDecoderOutputBuffer", exception); 285 } 286 } 287 // NB: totalTime measures from "first output" instead of 288 // "first INPUT", so does not include first frame latency 289 // and therefore does not tell us if the timeout expired 290 final long totalTime = SystemClock.elapsedRealtime() - firstOutputTimeMs; 291 return new PlayerResult(true, true, numOfTotalFrames == numOfDecodedFrames, totalTime); 292 } 293 294 /** 295 * Queues the input buffer with the media file one buffer at a time. 296 * 297 * @return true if success, fail otherwise. 298 */ queueDecoderInputBuffer()299 private boolean queueDecoderInputBuffer() { 300 ByteBuffer inputBuffer; 301 final ByteBuffer[] inputBufferArray = decoder.getInputBuffers(); 302 final int inputBufferIndex = decoder.dequeueInputBuffer(DEQUEUE_TIMEOUT_US); 303 if (inputBufferIndex >= 0) { 304 if (ApiLevelUtil.isBefore(Build.VERSION_CODES.LOLLIPOP)) { 305 inputBuffer = inputBufferArray[inputBufferIndex]; 306 } else { 307 inputBuffer = decoder.getInputBuffer(inputBufferIndex); 308 } 309 final int sampleSize = extractor.readSampleData(inputBuffer, 0); 310 if (sampleSize > 0) { 311 decoder.queueInputBuffer( 312 inputBufferIndex, 0, sampleSize, extractor.getSampleTime(), 0); 313 extractor.advance(); 314 } 315 return true; 316 } 317 return false; 318 } 319 320 /** 321 * Dequeues the output buffer. 322 * For video decoder, renders to surface if provided. 323 * For audio decoder, gets the bytes from the output buffer. 324 * 325 * @return an integer indicating its status (fail, success, or end of stream). 326 */ dequeueDecoderOutputBuffer()327 private int dequeueDecoderOutputBuffer() { 328 final BufferInfo info = new BufferInfo(); 329 final int decoderStatus = decoder.dequeueOutputBuffer(info, DEQUEUE_TIMEOUT_US); 330 if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) { 331 return END_OF_STREAM; 332 } 333 if (decoderStatus >= 0) { 334 // For JELLY_BEAN_MR2- devices, when rendering to a surface, 335 // info.size seems to always return 0 even if 336 // the decoder successfully decoded the frame. 337 if (info.size <= 0 && ApiLevelUtil.isAtLeast(Build.VERSION_CODES.JELLY_BEAN_MR2)) { 338 return DEQUEUE_FAIL; 339 } 340 if (!renderToSurface) { 341 ByteBuffer outputBuffer; 342 if (ApiLevelUtil.isBefore(Build.VERSION_CODES.LOLLIPOP)) { 343 outputBuffer = decoder.getOutputBuffers()[decoderStatus]; 344 } else { 345 outputBuffer = decoder.getOutputBuffer(decoderStatus); 346 } 347 outputBytes = new byte[info.size]; 348 outputBuffer.get(outputBytes); 349 outputBuffer.clear(); 350 } 351 decoder.releaseOutputBuffer(decoderStatus, renderToSurface); 352 return DEQUEUE_SUCCESS; 353 } 354 return DEQUEUE_FAIL; 355 } 356 release()357 public void release() { 358 decoderRelease(); 359 extractorRelease(); 360 } 361 setExtractorDataSource(VideoFormat videoFormat)362 private boolean setExtractorDataSource(VideoFormat videoFormat) { 363 checkNotNull(videoFormat); 364 try { 365 final AssetFileDescriptor afd = videoFormat.getAssetFileDescriptor(context); 366 extractor.setDataSource( 367 afd.getFileDescriptor(), afd.getStartOffset(), afd.getLength()); 368 afd.close(); 369 } catch (IOException exception) { 370 Log.e(TAG, "IOException in setDataSource", exception); 371 return false; 372 } 373 return true; 374 } 375 376 /** 377 * Creates a decoder based on conditions. 378 * 379 * <p>If codec name is provided, {@link MediaCodec#createByCodecName(String)} is used. 380 * If codec name is not provided, {@link MediaCodecList#findDecoderForFormat(MediaFormat)} 381 * is preferred on LOLLIPOP and up for finding out the codec name that 382 * supports the media format. 383 * For OS older than LOLLIPOP, {@link MediaCodec#createDecoderByType(String)} is used. 384 */ createDecoder(MediaFormat mediaFormat)385 private boolean createDecoder(MediaFormat mediaFormat) { 386 try { 387 if (codecName != null) { 388 decoder = MediaCodec.createByCodecName(codecName); 389 } else if (ApiLevelUtil.isAtLeast(Build.VERSION_CODES.LOLLIPOP)) { 390 if (Build.VERSION.SDK_INT == Build.VERSION_CODES.LOLLIPOP) { 391 // On LOLLIPOP, format must not contain a frame rate. 392 mediaFormat.setString(MediaFormat.KEY_FRAME_RATE, null); 393 } 394 if (mediaCodecList == null) { 395 mediaCodecList = new MediaCodecList(MediaCodecList.REGULAR_CODECS); 396 } 397 decoder = MediaCodec.createByCodecName( 398 mediaCodecList.findDecoderForFormat(mediaFormat)); 399 } else { 400 decoder = MediaCodec.createDecoderByType( 401 mediaFormat.getString(MediaFormat.KEY_MIME)); 402 } 403 } catch (Exception exception) { 404 Log.e(TAG, "Exception during decoder creation", exception); 405 decoderRelease(); 406 return false; 407 } 408 return true; 409 } 410 configureDecoder(Surface surface, MediaFormat mediaFormat)411 private boolean configureDecoder(Surface surface, MediaFormat mediaFormat) { 412 try { 413 decoder.configure(mediaFormat, surface, null, 0); 414 } catch (Exception exception) { 415 Log.e(TAG, "Exception during decoder configuration", exception); 416 try { 417 decoder.reset(); 418 } catch (Exception resetException) { 419 Log.e(TAG, "Exception during decoder reset", resetException); 420 } 421 decoderRelease(); 422 return false; 423 } 424 return true; 425 } 426 setRenderToSurface(boolean render)427 private void setRenderToSurface(boolean render) { 428 this.renderToSurface = render; 429 } 430 startDecoder()431 private boolean startDecoder() { 432 try { 433 decoder.start(); 434 } catch (Exception exception) { 435 Log.e(TAG, "Exception during decoder start", exception); 436 decoder.reset(); 437 decoderRelease(); 438 return false; 439 } 440 return true; 441 } 442 decoderRelease()443 private void decoderRelease() { 444 if (decoder == null) { 445 return; 446 } 447 try { 448 decoder.stop(); 449 } catch (IllegalStateException exception) { 450 decoder.reset(); 451 // IllegalStateException happens when decoder fail to start. 452 Log.e(TAG, "IllegalStateException during decoder stop", exception); 453 } finally { 454 try { 455 decoder.release(); 456 } catch (IllegalStateException exception) { 457 Log.e(TAG, "IllegalStateException during decoder release", exception); 458 } 459 decoder = null; 460 } 461 } 462 extractorRelease()463 private void extractorRelease() { 464 if (extractor == null) { 465 return; 466 } 467 try { 468 extractor.release(); 469 } catch (IllegalStateException exception) { 470 Log.e(TAG, "IllegalStateException during extractor release", exception); 471 } 472 } 473 configureVideoFormat(MediaFormat mediaFormat, VideoFormat videoFormat)474 private static void configureVideoFormat(MediaFormat mediaFormat, VideoFormat videoFormat) { 475 checkNotNull(mediaFormat); 476 checkNotNull(videoFormat); 477 videoFormat.setMimeType(mediaFormat.getString(MediaFormat.KEY_MIME)); 478 videoFormat.setWidth(mediaFormat.getInteger(MediaFormat.KEY_WIDTH)); 479 videoFormat.setHeight(mediaFormat.getInteger(MediaFormat.KEY_HEIGHT)); 480 mediaFormat.setInteger(MediaFormat.KEY_WIDTH, videoFormat.getWidth()); 481 mediaFormat.setInteger(MediaFormat.KEY_HEIGHT, videoFormat.getHeight()); 482 if (ApiLevelUtil.isBefore(Build.VERSION_CODES.KITKAT)) { 483 return; 484 } 485 // Set KEY_MAX_WIDTH and KEY_MAX_HEIGHT when isAbrEnabled() is set. 486 if (videoFormat.isAbrEnabled()) { 487 try { 488 // Check for max resolution supported by the codec. 489 final MediaCodec decoder = MediaUtils.getDecoder(mediaFormat); 490 final VideoCapabilities videoCapabilities = MediaUtils.getVideoCapabilities( 491 decoder.getName(), videoFormat.getMimeType()); 492 decoder.release(); 493 final int maxWidth = videoCapabilities.getSupportedWidths().getUpper(); 494 final int maxHeight = 495 videoCapabilities.getSupportedHeightsFor(maxWidth).getUpper(); 496 if (maxWidth >= videoFormat.getWidth() && maxHeight >= videoFormat.getHeight()) { 497 mediaFormat.setInteger(MediaFormat.KEY_MAX_WIDTH, maxWidth); 498 mediaFormat.setInteger(MediaFormat.KEY_MAX_HEIGHT, maxHeight); 499 return; 500 } 501 } catch (NullPointerException exception) { /* */ } 502 // Set max width/height to current size if can't get codec's max supported 503 // width/height or max is not greater than the current size. 504 mediaFormat.setInteger(MediaFormat.KEY_MAX_WIDTH, videoFormat.getWidth()); 505 mediaFormat.setInteger(MediaFormat.KEY_MAX_HEIGHT, videoFormat.getHeight()); 506 } 507 } 508 509 /** 510 * The function returns the first track found based on the media type. 511 */ getFirstTrackIndexByType(String format)512 private int getFirstTrackIndexByType(String format) { 513 for (int i = 0; i < extractor.getTrackCount(); i++) { 514 MediaFormat trackMediaFormat = extractor.getTrackFormat(i); 515 if (trackMediaFormat.getString(MediaFormat.KEY_MIME).startsWith(format + "/")) { 516 return i; 517 } 518 } 519 Log.e(TAG, "couldn't get a " + format + " track"); 520 return NO_TRACK_INDEX; 521 } 522 523 /** 524 * Stores the result from SimplePlayer. 525 */ 526 public static final class PlayerResult { 527 528 public static final int UNSET = -1; 529 private final boolean configureSuccess; 530 private final boolean startSuccess; 531 private final boolean decodeSuccess; 532 private final long totalTime; 533 PlayerResult( boolean configureSuccess, boolean startSuccess, boolean decodeSuccess, long totalTime)534 public PlayerResult( 535 boolean configureSuccess, boolean startSuccess, 536 boolean decodeSuccess, long totalTime) { 537 this.configureSuccess = configureSuccess; 538 this.startSuccess = startSuccess; 539 this.decodeSuccess = decodeSuccess; 540 this.totalTime = totalTime; 541 } 542 PlayerResult(PlayerResult playerResult)543 public PlayerResult(PlayerResult playerResult) { 544 this(playerResult.configureSuccess, playerResult.startSuccess, 545 playerResult.decodeSuccess, playerResult.totalTime); 546 } 547 PlayerResult()548 public PlayerResult() { 549 // Fake PlayerResult. 550 this(false, false, false, UNSET); 551 } 552 failToStart()553 public static PlayerResult failToStart() { 554 return new PlayerResult(true, false, false, UNSET); 555 } 556 getFailureMessage()557 public String getFailureMessage() { 558 if (!configureSuccess) { 559 return "Failed to configure decoder."; 560 } else if (!startSuccess) { 561 return "Failed to start decoder."; 562 } else if (!decodeSuccess) { 563 return "Failed to decode the expected number of frames."; 564 } else { 565 return "Failed to finish decoding."; 566 } 567 } 568 isConfigureSuccess()569 public boolean isConfigureSuccess() { 570 return configureSuccess; 571 } 572 isSuccess()573 public boolean isSuccess() { 574 return configureSuccess && startSuccess && decodeSuccess && getTotalTime() != UNSET; 575 } 576 getTotalTime()577 public long getTotalTime() { 578 return totalTime; 579 } 580 581 } 582 583 } 584 585 /* Utility class for collecting common test case functionality. */ 586 class TestHelper { 587 588 private final String TAG = TestHelper.class.getSimpleName(); 589 590 private final Context context; 591 private final Handler handler; 592 private final Activity activity; 593 TestHelper(Context context, Activity activity)594 public TestHelper(Context context, Activity activity) { 595 this.context = checkNotNull(context); 596 this.handler = new Handler(Looper.getMainLooper()); 597 this.activity = activity; 598 } 599 generateBitmapFromImageResourceId(int resourceId)600 public Bitmap generateBitmapFromImageResourceId(int resourceId) { 601 return BitmapFactory.decodeStream(context.getResources().openRawResource(resourceId)); 602 } 603 getContext()604 public Context getContext() { 605 return context; 606 } 607 rotateOrientation()608 public void rotateOrientation() { 609 handler.post(new Runnable() { 610 @Override 611 public void run() { 612 final int orientation = context.getResources().getConfiguration().orientation; 613 if (orientation == Configuration.ORIENTATION_PORTRAIT) { 614 activity.setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_LANDSCAPE); 615 } else { 616 activity.setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_PORTRAIT); 617 } 618 } 619 }); 620 } 621 unsetOrientation()622 public void unsetOrientation() { 623 handler.post(new Runnable() { 624 @Override 625 public void run() { 626 activity.setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_UNSPECIFIED); 627 } 628 }); 629 } 630 generateView(View view)631 public void generateView(View view) { 632 RelativeLayout relativeLayout = 633 (RelativeLayout) activity.findViewById(R.id.attach_view); 634 ViewGenerator viewGenerator = new ViewGenerator(relativeLayout, view); 635 handler.post(viewGenerator); 636 } 637 cleanUpView(View view)638 public void cleanUpView(View view) { 639 ViewCleaner viewCleaner = new ViewCleaner(view); 640 handler.post(viewCleaner); 641 } 642 generateBitmapFromVideoViewSnapshot(VideoViewSnapshot snapshot)643 public Bitmap generateBitmapFromVideoViewSnapshot(VideoViewSnapshot snapshot) { 644 handler.post(snapshot); 645 synchronized (snapshot.getSyncObject()) { 646 try { 647 snapshot.getSyncObject().wait(snapshot.SNAPSHOT_TIMEOUT_MS + 100); 648 } catch (InterruptedException e) { 649 e.printStackTrace(); 650 Log.e(TAG, "Unable to finish generateBitmapFromVideoViewSnapshot()."); 651 return null; 652 } 653 } 654 if (!snapshot.isBitmapReady()) { 655 Log.e(TAG, "Time out in generateBitmapFromVideoViewSnapshot()."); 656 return null; 657 } 658 return snapshot.getBitmap(); 659 } 660 661 private class ViewGenerator implements Runnable { 662 663 private final View view; 664 private final RelativeLayout relativeLayout; 665 ViewGenerator(RelativeLayout relativeLayout, View view)666 public ViewGenerator(RelativeLayout relativeLayout, View view) { 667 this.view = checkNotNull(view); 668 this.relativeLayout = checkNotNull(relativeLayout); 669 } 670 671 @Override run()672 public void run() { 673 if (view.getParent() != null) { 674 ((ViewGroup) view.getParent()).removeView(view); 675 } 676 RelativeLayout.LayoutParams params = new RelativeLayout.LayoutParams( 677 VideoViewFactory.VIEW_WIDTH, VideoViewFactory.VIEW_HEIGHT); 678 view.setLayoutParams(params); 679 relativeLayout.addView(view); 680 } 681 682 } 683 684 private class ViewCleaner implements Runnable { 685 686 private final View view; 687 ViewCleaner(View view)688 public ViewCleaner(View view) { 689 this.view = checkNotNull(view); 690 } 691 692 @Override run()693 public void run() { 694 if (view.getParent() != null) { 695 ((ViewGroup) view.getParent()).removeView(view); 696 } 697 } 698 699 } 700 701 } 702 703 } 704 705 /* Factory for manipulating a {@link View}. */ 706 abstract class VideoViewFactory { 707 708 public static final long VIEW_WAITTIME_MS = TimeUnit.SECONDS.toMillis(1); 709 public static final long DEFAULT_VIEW_AVAILABLE_TIMEOUT_MS = TimeUnit.SECONDS.toMillis(3); 710 public static final int VIEW_WIDTH = 480; 711 public static final int VIEW_HEIGHT = 360; 712 VideoViewFactory()713 public VideoViewFactory() {} 714 release()715 public abstract void release(); 716 getName()717 public abstract String getName(); 718 createView(Context context)719 public abstract View createView(Context context); 720 waitForViewIsAvailable()721 public void waitForViewIsAvailable() throws Exception { 722 waitForViewIsAvailable(DEFAULT_VIEW_AVAILABLE_TIMEOUT_MS); 723 }; 724 waitForViewIsAvailable(long timeOutMs)725 public abstract void waitForViewIsAvailable(long timeOutMs) throws Exception; 726 getSurface()727 public abstract Surface getSurface(); 728 getVideoViewSnapshot()729 public abstract VideoViewSnapshot getVideoViewSnapshot(); 730 hasLooper()731 public boolean hasLooper() { 732 return Looper.myLooper() != null; 733 } 734 735 } 736 737 /* Factory for building a {@link TextureView}. */ 738 @TargetApi(16) 739 class TextureViewFactory extends VideoViewFactory implements TextureView.SurfaceTextureListener { 740 741 private static final String TAG = TextureViewFactory.class.getSimpleName(); 742 private static final String NAME = "TextureView"; 743 744 private final Object syncToken = new Object(); 745 private TextureView textureView; 746 TextureViewFactory()747 public TextureViewFactory() {} 748 749 @Override createView(Context context)750 public TextureView createView(Context context) { 751 Log.i(TAG, "Creating a " + NAME); 752 textureView = DecodeAccuracyTestBase.checkNotNull(new TextureView(context)); 753 textureView.setSurfaceTextureListener(this); 754 return textureView; 755 } 756 757 @Override release()758 public void release() { 759 textureView = null; 760 } 761 762 @Override getName()763 public String getName() { 764 return NAME; 765 } 766 767 @Override getSurface()768 public Surface getSurface() { 769 return new Surface(textureView.getSurfaceTexture()); 770 } 771 772 @Override getVideoViewSnapshot()773 public TextureViewSnapshot getVideoViewSnapshot() { 774 return new TextureViewSnapshot(textureView); 775 } 776 777 @Override waitForViewIsAvailable(long timeOutMs)778 public void waitForViewIsAvailable(long timeOutMs) throws Exception { 779 final long start = SystemClock.elapsedRealtime(); 780 while (SystemClock.elapsedRealtime() - start < timeOutMs && !textureView.isAvailable()) { 781 synchronized (syncToken) { 782 try { 783 syncToken.wait(VIEW_WAITTIME_MS); 784 } catch (InterruptedException e) { 785 Log.e(TAG, "Exception occurred when attaching a TextureView to a window.", e); 786 throw new InterruptedException(e.getMessage()); 787 } 788 } 789 } 790 if (!textureView.isAvailable()) { 791 throw new InterruptedException("Taking too long to attach a TextureView to a window."); 792 } 793 Log.i(TAG, NAME + " is available."); 794 } 795 796 @Override onSurfaceTextureAvailable(SurfaceTexture surfaceTexture, int width, int height)797 public void onSurfaceTextureAvailable(SurfaceTexture surfaceTexture, int width, int height) { 798 synchronized (syncToken) { 799 syncToken.notify(); 800 } 801 } 802 803 @Override onSurfaceTextureSizeChanged( SurfaceTexture surfaceTexture, int width, int height)804 public void onSurfaceTextureSizeChanged( 805 SurfaceTexture surfaceTexture, int width, int height) {} 806 807 @Override onSurfaceTextureDestroyed(SurfaceTexture surfaceTexture)808 public boolean onSurfaceTextureDestroyed(SurfaceTexture surfaceTexture) { 809 return false; 810 } 811 812 @Override onSurfaceTextureUpdated(SurfaceTexture surfaceTexture)813 public void onSurfaceTextureUpdated(SurfaceTexture surfaceTexture) {} 814 815 } 816 817 /** 818 * Factory for building a {@link SurfaceView} 819 */ 820 @TargetApi(24) 821 class SurfaceViewFactory extends VideoViewFactory implements SurfaceHolder.Callback { 822 823 private static final String TAG = SurfaceViewFactory.class.getSimpleName(); 824 private static final String NAME = "SurfaceView"; 825 private final Object syncToken = new Object(); 826 827 private SurfaceView surfaceView; 828 private SurfaceHolder surfaceHolder; 829 SurfaceViewFactory()830 public SurfaceViewFactory() {} 831 832 @Override release()833 public void release() { 834 surfaceView = null; 835 surfaceHolder = null; 836 } 837 838 @Override getName()839 public String getName() { 840 return NAME; 841 } 842 843 @Override createView(Context context)844 public View createView(Context context) { 845 Log.i(TAG, "Creating a " + NAME); 846 if (!super.hasLooper()) { 847 Looper.prepare(); 848 } 849 surfaceView = new SurfaceView(context); 850 surfaceHolder = surfaceView.getHolder(); 851 surfaceHolder.addCallback(this); 852 return surfaceView; 853 } 854 855 @Override waitForViewIsAvailable(long timeOutMs)856 public void waitForViewIsAvailable(long timeOutMs) throws Exception { 857 final long start = SystemClock.elapsedRealtime(); 858 while (SystemClock.elapsedRealtime() - start < timeOutMs && !getSurface().isValid()) { 859 synchronized (syncToken) { 860 try { 861 syncToken.wait(VIEW_WAITTIME_MS); 862 } catch (InterruptedException e) { 863 Log.e(TAG, "Exception occurred when attaching a SurfaceView to a window.", e); 864 throw new InterruptedException(e.getMessage()); 865 } 866 } 867 } 868 if (!getSurface().isValid()) { 869 throw new InterruptedException("Taking too long to attach a SurfaceView to a window."); 870 } 871 Log.i(TAG, NAME + " is available."); 872 } 873 874 @Override getSurface()875 public Surface getSurface() { 876 return surfaceHolder == null ? null : surfaceHolder.getSurface(); 877 } 878 879 @Override getVideoViewSnapshot()880 public VideoViewSnapshot getVideoViewSnapshot() { 881 return new SurfaceViewSnapshot(surfaceView, VIEW_WIDTH, VIEW_HEIGHT); 882 } 883 884 @Override surfaceChanged(SurfaceHolder holder, int format, int width, int height)885 public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {} 886 887 @Override surfaceCreated(SurfaceHolder holder)888 public void surfaceCreated(SurfaceHolder holder) { 889 synchronized (syncToken) { 890 syncToken.notify(); 891 } 892 } 893 894 @Override surfaceDestroyed(SurfaceHolder holder)895 public void surfaceDestroyed(SurfaceHolder holder) {} 896 897 } 898 899 /** 900 * Factory for building EGL and GLES that could render to GLSurfaceView. 901 * {@link GLSurfaceView} {@link EGL10} {@link GLES20}. 902 */ 903 @TargetApi(16) 904 class GLSurfaceViewFactory extends VideoViewFactory { 905 906 private static final String TAG = GLSurfaceViewFactory.class.getSimpleName(); 907 private static final String NAME = "GLSurfaceView"; 908 909 private final Object surfaceSyncToken = new Object(); 910 911 private GLSurfaceViewThread glSurfaceViewThread; 912 private boolean byteBufferIsReady = false; 913 GLSurfaceViewFactory()914 public GLSurfaceViewFactory() {} 915 916 @Override release()917 public void release() { 918 glSurfaceViewThread.release(); 919 glSurfaceViewThread = null; 920 } 921 922 @Override getName()923 public String getName() { 924 return NAME; 925 } 926 927 @Override createView(Context context)928 public View createView(Context context) { 929 Log.i(TAG, "Creating a " + NAME); 930 // Do all GL rendering in the GL thread. 931 glSurfaceViewThread = new GLSurfaceViewThread(); 932 glSurfaceViewThread.start(); 933 // No necessary view to display, return null. 934 return null; 935 } 936 937 @Override waitForViewIsAvailable(long timeOutMs)938 public void waitForViewIsAvailable(long timeOutMs) throws Exception { 939 final long start = SystemClock.elapsedRealtime(); 940 while (SystemClock.elapsedRealtime() - start < timeOutMs 941 && glSurfaceViewThread.getSurface() == null) { 942 synchronized (surfaceSyncToken) { 943 try { 944 surfaceSyncToken.wait(VIEW_WAITTIME_MS); 945 } catch (InterruptedException e) { 946 Log.e(TAG, "Exception occurred when waiting for the surface from" 947 + " GLSurfaceView to become available.", e); 948 throw new InterruptedException(e.getMessage()); 949 } 950 } 951 } 952 if (glSurfaceViewThread.getSurface() == null) { 953 throw new InterruptedException("Taking too long for the surface from" 954 + " GLSurfaceView to become available."); 955 } 956 Log.i(TAG, NAME + " is available."); 957 } 958 959 @Override getSurface()960 public Surface getSurface() { 961 return glSurfaceViewThread.getSurface(); 962 } 963 964 @Override getVideoViewSnapshot()965 public VideoViewSnapshot getVideoViewSnapshot() { 966 return new GLSurfaceViewSnapshot(this, VIEW_WIDTH, VIEW_HEIGHT); 967 } 968 byteBufferIsReady()969 public boolean byteBufferIsReady() { 970 return byteBufferIsReady; 971 } 972 getByteBuffer()973 public ByteBuffer getByteBuffer() { 974 return glSurfaceViewThread.getByteBuffer(); 975 } 976 977 /* Does all GL operations. */ 978 private class GLSurfaceViewThread extends Thread 979 implements SurfaceTexture.OnFrameAvailableListener { 980 981 private static final int FLOAT_SIZE_BYTES = 4; 982 private static final int TRIANGLE_VERTICES_DATA_STRIDE_BYTES = 5 * FLOAT_SIZE_BYTES; 983 private static final int TRIANGLE_VERTICES_DATA_POS_OFFSET = 0; 984 private static final int TRIANGLE_VERTICES_DATA_UV_OFFSET = 3; 985 private FloatBuffer triangleVertices; 986 private float[] textureTransform = new float[16]; 987 988 private float[] triangleVerticesData = { 989 // X, Y, Z, U, V 990 -1f, -1f, 0f, 0f, 1f, 991 1f, -1f, 0f, 1f, 1f, 992 -1f, 1f, 0f, 0f, 0f, 993 1f, 1f, 0f, 1f, 0f, 994 }; 995 // Make the top-left corner corresponds to texture coordinate 996 // (0, 0). This complies with the transformation matrix obtained from 997 // SurfaceTexture.getTransformMatrix. 998 999 private static final String VERTEX_SHADER = 1000 "attribute vec4 aPosition;\n" 1001 + "attribute vec4 aTextureCoord;\n" 1002 + "uniform mat4 uTextureTransform;\n" 1003 + "varying vec2 vTextureCoord;\n" 1004 + "void main() {\n" 1005 + " gl_Position = aPosition;\n" 1006 + " vTextureCoord = (uTextureTransform * aTextureCoord).xy;\n" 1007 + "}\n"; 1008 1009 private static final String FRAGMENT_SHADER = 1010 "#extension GL_OES_EGL_image_external : require\n" 1011 + "precision mediump float;\n" // highp here doesn't seem to matter 1012 + "varying vec2 vTextureCoord;\n" 1013 + "uniform samplerExternalOES sTexture;\n" 1014 + "void main() {\n" 1015 + " gl_FragColor = texture2D(sTexture, vTextureCoord);\n" 1016 + "}\n"; 1017 1018 private int glProgram; 1019 private int textureID = -1; 1020 private int aPositionHandle; 1021 private int aTextureHandle; 1022 private int uTextureTransformHandle; 1023 private EGLDisplay eglDisplay = null; 1024 private EGLContext eglContext = null; 1025 private EGLSurface eglSurface = null; 1026 private EGL10 egl10; 1027 private Surface surface = null; 1028 private SurfaceTexture surfaceTexture; 1029 private ByteBuffer byteBuffer; 1030 private Looper looper; 1031 GLSurfaceViewThread()1032 public GLSurfaceViewThread() {} 1033 1034 @Override run()1035 public void run() { 1036 Looper.prepare(); 1037 looper = Looper.myLooper(); 1038 triangleVertices = ByteBuffer 1039 .allocateDirect(triangleVerticesData.length * FLOAT_SIZE_BYTES) 1040 .order(ByteOrder.nativeOrder()).asFloatBuffer(); 1041 triangleVertices.put(triangleVerticesData).position(0); 1042 1043 eglSetup(); 1044 makeCurrent(); 1045 eglSurfaceCreated(); 1046 1047 surfaceTexture = new SurfaceTexture(getTextureId()); 1048 surfaceTexture.setOnFrameAvailableListener(this); 1049 surface = new Surface(surfaceTexture); 1050 synchronized (surfaceSyncToken) { 1051 surfaceSyncToken.notify(); 1052 } 1053 // Store pixels from surface 1054 byteBuffer = ByteBuffer.allocateDirect(VIEW_WIDTH * VIEW_HEIGHT * 4); 1055 byteBuffer.order(ByteOrder.LITTLE_ENDIAN); 1056 Looper.loop(); 1057 } 1058 1059 @Override onFrameAvailable(SurfaceTexture st)1060 public void onFrameAvailable(SurfaceTexture st) { 1061 checkGlError("before updateTexImage"); 1062 surfaceTexture.updateTexImage(); 1063 st.getTransformMatrix(textureTransform); 1064 drawFrame(); 1065 saveFrame(); 1066 } 1067 1068 /* Prepares EGL to use GLES 2.0 context and a surface that supports pbuffer. */ eglSetup()1069 public void eglSetup() { 1070 egl10 = (EGL10) EGLContext.getEGL(); 1071 eglDisplay = egl10.eglGetDisplay(EGL10.EGL_DEFAULT_DISPLAY); 1072 if (eglDisplay == EGL10.EGL_NO_DISPLAY) { 1073 throw new RuntimeException("unable to get egl10 display"); 1074 } 1075 int[] version = new int[2]; 1076 if (!egl10.eglInitialize(eglDisplay, version)) { 1077 eglDisplay = null; 1078 throw new RuntimeException("unable to initialize egl10"); 1079 } 1080 // Configure EGL for pbuffer and OpenGL ES 2.0, 24-bit RGB. 1081 int[] configAttribs = { 1082 EGL10.EGL_RED_SIZE, 8, 1083 EGL10.EGL_GREEN_SIZE, 8, 1084 EGL10.EGL_BLUE_SIZE, 8, 1085 EGL10.EGL_ALPHA_SIZE, 8, 1086 EGL10.EGL_RENDERABLE_TYPE, EGL14.EGL_OPENGL_ES2_BIT, 1087 EGL10.EGL_SURFACE_TYPE, EGL10.EGL_PBUFFER_BIT, 1088 EGL10.EGL_NONE 1089 }; 1090 EGLConfig[] configs = new EGLConfig[1]; 1091 int[] numConfigs = new int[1]; 1092 if (!egl10.eglChooseConfig( 1093 eglDisplay, configAttribs, configs, configs.length, numConfigs)) { 1094 throw new RuntimeException("unable to find RGB888+recordable ES2 EGL config"); 1095 } 1096 // Configure EGL context for OpenGL ES 2.0. 1097 int[] contextAttribs = { 1098 EGL14.EGL_CONTEXT_CLIENT_VERSION, 2, 1099 EGL10.EGL_NONE 1100 }; 1101 eglContext = egl10.eglCreateContext( 1102 eglDisplay, configs[0], EGL10.EGL_NO_CONTEXT, contextAttribs); 1103 checkEglError("eglCreateContext"); 1104 if (eglContext == null) { 1105 throw new RuntimeException("null context"); 1106 } 1107 // Create a pbuffer surface. 1108 int[] surfaceAttribs = { 1109 EGL10.EGL_WIDTH, VIEW_WIDTH, 1110 EGL10.EGL_HEIGHT, VIEW_HEIGHT, 1111 EGL10.EGL_NONE 1112 }; 1113 eglSurface = egl10.eglCreatePbufferSurface(eglDisplay, configs[0], surfaceAttribs); 1114 checkEglError("eglCreatePbufferSurface"); 1115 if (eglSurface == null) { 1116 throw new RuntimeException("surface was null"); 1117 } 1118 } 1119 release()1120 public void release() { 1121 looper.quit(); 1122 surface.release(); 1123 surfaceTexture.release(); 1124 byteBufferIsReady = false; 1125 byteBuffer = null; 1126 if (eglDisplay != EGL10.EGL_NO_DISPLAY) { 1127 egl10.eglMakeCurrent(eglDisplay, 1128 EGL10.EGL_NO_SURFACE, EGL10.EGL_NO_SURFACE, EGL10.EGL_NO_CONTEXT); 1129 egl10.eglDestroySurface(eglDisplay, eglSurface); 1130 egl10.eglDestroyContext(eglDisplay, eglContext); 1131 //TODO: uncomment following line after fixing crash in GL driver libGLESv2_adreno.so 1132 //TODO: see b/123755902 1133 //egl10.eglTerminate(eglDisplay); 1134 } 1135 eglDisplay = EGL10.EGL_NO_DISPLAY; 1136 eglContext = EGL10.EGL_NO_CONTEXT; 1137 eglSurface = EGL10.EGL_NO_SURFACE; 1138 } 1139 1140 /* Makes our EGL context and surface current. */ makeCurrent()1141 public void makeCurrent() { 1142 if (!egl10.eglMakeCurrent(eglDisplay, eglSurface, eglSurface, eglContext)) { 1143 throw new RuntimeException("eglMakeCurrent failed"); 1144 } 1145 checkEglError("eglMakeCurrent"); 1146 } 1147 1148 /* Call this after the EGL Surface is created and made current. */ eglSurfaceCreated()1149 public void eglSurfaceCreated() { 1150 glProgram = createProgram(VERTEX_SHADER, FRAGMENT_SHADER); 1151 if (glProgram == 0) { 1152 throw new RuntimeException("failed creating program"); 1153 } 1154 aPositionHandle = GLES20.glGetAttribLocation(glProgram, "aPosition"); 1155 checkLocation(aPositionHandle, "aPosition"); 1156 aTextureHandle = GLES20.glGetAttribLocation(glProgram, "aTextureCoord"); 1157 checkLocation(aTextureHandle, "aTextureCoord"); 1158 uTextureTransformHandle = GLES20.glGetUniformLocation(glProgram, "uTextureTransform"); 1159 checkLocation(uTextureTransformHandle, "uTextureTransform"); 1160 1161 int[] textures = new int[1]; 1162 GLES20.glGenTextures(1, textures, 0); 1163 checkGlError("glGenTextures"); 1164 textureID = textures[0]; 1165 GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, textureID); 1166 checkGlError("glBindTexture"); 1167 1168 GLES20.glTexParameterf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MIN_FILTER, 1169 GLES20.GL_LINEAR); 1170 GLES20.glTexParameterf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MAG_FILTER, 1171 GLES20.GL_LINEAR); 1172 GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_WRAP_S, 1173 GLES20.GL_CLAMP_TO_EDGE); 1174 GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_WRAP_T, 1175 GLES20.GL_CLAMP_TO_EDGE); 1176 checkGlError("glTexParameter"); 1177 } 1178 drawFrame()1179 public void drawFrame() { 1180 GLES20.glUseProgram(glProgram); 1181 checkGlError("glUseProgram"); 1182 GLES20.glActiveTexture(GLES20.GL_TEXTURE0); 1183 checkGlError("glActiveTexture"); 1184 GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, textureID); 1185 checkGlError("glBindTexture"); 1186 1187 triangleVertices.position(TRIANGLE_VERTICES_DATA_POS_OFFSET); 1188 GLES20.glVertexAttribPointer(aPositionHandle, 3, GLES20.GL_FLOAT, false, 1189 TRIANGLE_VERTICES_DATA_STRIDE_BYTES, triangleVertices); 1190 checkGlError("glVertexAttribPointer aPositionHandle"); 1191 GLES20.glEnableVertexAttribArray(aPositionHandle); 1192 checkGlError("glEnableVertexAttribArray aPositionHandle"); 1193 1194 triangleVertices.position(TRIANGLE_VERTICES_DATA_UV_OFFSET); 1195 GLES20.glVertexAttribPointer(aTextureHandle, 2, GLES20.GL_FLOAT, false, 1196 TRIANGLE_VERTICES_DATA_STRIDE_BYTES, triangleVertices); 1197 checkGlError("glVertexAttribPointer aTextureHandle"); 1198 GLES20.glEnableVertexAttribArray(aTextureHandle); 1199 checkGlError("glEnableVertexAttribArray aTextureHandle"); 1200 1201 GLES20.glUniformMatrix4fv(uTextureTransformHandle, 1, false, textureTransform, 0); 1202 checkGlError("glUniformMatrix uTextureTransformHandle"); 1203 1204 GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4); 1205 checkGlError("glDrawArrays"); 1206 GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, 0); 1207 } 1208 1209 /* Reads the pixels to a ByteBuffer. */ saveFrame()1210 public void saveFrame() { 1211 byteBufferIsReady = false; 1212 byteBuffer.clear(); 1213 GLES20.glReadPixels(0, 0, VIEW_WIDTH, VIEW_HEIGHT, GLES20.GL_RGBA, 1214 GLES20.GL_UNSIGNED_BYTE, byteBuffer); 1215 byteBufferIsReady = true; 1216 } 1217 getTextureId()1218 public int getTextureId() { 1219 return textureID; 1220 } 1221 getSurface()1222 public Surface getSurface() { 1223 return surface; 1224 } 1225 getByteBuffer()1226 public ByteBuffer getByteBuffer() { 1227 return byteBuffer; 1228 } 1229 loadShader(int shaderType, String source)1230 private int loadShader(int shaderType, String source) { 1231 int shader = GLES20.glCreateShader(shaderType); 1232 checkGlError("glCreateShader type=" + shaderType); 1233 GLES20.glShaderSource(shader, source); 1234 GLES20.glCompileShader(shader); 1235 int[] compiled = new int[1]; 1236 GLES20.glGetShaderiv(shader, GLES20.GL_COMPILE_STATUS, compiled, 0); 1237 1238 if (compiled[0] == 0) { 1239 Log.e(TAG, "Could not compile shader " + shaderType + ":"); 1240 Log.e(TAG, " " + GLES20.glGetShaderInfoLog(shader)); 1241 GLES20.glDeleteShader(shader); 1242 shader = 0; 1243 } 1244 return shader; 1245 } 1246 createProgram(String vertexSource, String fragmentSource)1247 private int createProgram(String vertexSource, String fragmentSource) { 1248 int vertexShader = loadShader(GLES20.GL_VERTEX_SHADER, vertexSource); 1249 if (vertexShader == 0) { 1250 return 0; 1251 } 1252 int pixelShader = loadShader(GLES20.GL_FRAGMENT_SHADER, fragmentSource); 1253 if (pixelShader == 0) { 1254 return 0; 1255 } 1256 int program = GLES20.glCreateProgram(); 1257 if (program == 0) { 1258 Log.e(TAG, "Could not create program"); 1259 } 1260 GLES20.glAttachShader(program, vertexShader); 1261 checkGlError("glAttachShader"); 1262 GLES20.glAttachShader(program, pixelShader); 1263 checkGlError("glAttachShader"); 1264 GLES20.glLinkProgram(program); 1265 int[] linkStatus = new int[1]; 1266 GLES20.glGetProgramiv(program, GLES20.GL_LINK_STATUS, linkStatus, 0); 1267 1268 if (linkStatus[0] != GLES20.GL_TRUE) { 1269 Log.e(TAG, "Could not link program: "); 1270 Log.e(TAG, GLES20.glGetProgramInfoLog(program)); 1271 GLES20.glDeleteProgram(program); 1272 program = 0; 1273 } 1274 return program; 1275 } 1276 checkEglError(String msg)1277 private void checkEglError(String msg) { 1278 int error; 1279 if ((error = egl10.eglGetError()) != EGL10.EGL_SUCCESS) { 1280 throw new RuntimeException(msg + ": EGL error: 0x" + Integer.toHexString(error)); 1281 } 1282 } 1283 checkGlError(String op)1284 public void checkGlError(String op) { 1285 int error; 1286 if ((error = GLES20.glGetError()) != GLES20.GL_NO_ERROR) { 1287 Log.e(TAG, op + ": glError " + error); 1288 throw new RuntimeException(op + ": glError " + error); 1289 } 1290 } 1291 checkLocation(int location, String label)1292 public void checkLocation(int location, String label) { 1293 if (location < 0) { 1294 throw new RuntimeException("Unable to locate '" + label + "' in program"); 1295 } 1296 } 1297 } 1298 1299 } 1300 1301 /* Definition of a VideoViewSnapshot and a runnable to get a bitmap from a view. */ 1302 abstract class VideoViewSnapshot implements Runnable { 1303 1304 public static final long SNAPSHOT_TIMEOUT_MS = TimeUnit.SECONDS.toMillis(30); 1305 public static final long SLEEP_TIME_MS = 30; 1306 public static final Object SYNC_TOKEN = new Object(); 1307 getBitmap()1308 public abstract Bitmap getBitmap(); 1309 isBitmapReady()1310 public abstract boolean isBitmapReady(); 1311 getSyncObject()1312 public abstract Object getSyncObject(); 1313 1314 } 1315 1316 /* Runnable to get a bitmap from a texture view on the UI thread via a handler. 1317 * This class is to be used together with 1318 * {@link TestHelper#generateBitmapFromVideoViewSnapshot(VideoViewSnapshot)} 1319 */ 1320 class TextureViewSnapshot extends VideoViewSnapshot { 1321 1322 private final TextureView tv; 1323 private Bitmap bitmap = null; 1324 TextureViewSnapshot(TextureView tv)1325 public TextureViewSnapshot(TextureView tv) { 1326 this.tv = DecodeAccuracyTestBase.checkNotNull(tv); 1327 } 1328 1329 @Override run()1330 public void run() { 1331 bitmap = null; 1332 bitmap = tv.getBitmap(); 1333 synchronized (SYNC_TOKEN) { 1334 SYNC_TOKEN.notify(); 1335 } 1336 } 1337 1338 @Override getBitmap()1339 public Bitmap getBitmap() { 1340 return bitmap; 1341 } 1342 1343 @Override isBitmapReady()1344 public boolean isBitmapReady() { 1345 return bitmap != null; 1346 } 1347 1348 @Override getSyncObject()1349 public Object getSyncObject() { 1350 return SYNC_TOKEN; 1351 } 1352 1353 } 1354 1355 /** 1356 * Method to get bitmap of a {@link SurfaceView}. 1357 * Note that PixelCopy does not have to be called in a runnable. 1358 * This class is to be used together with 1359 * {@link TestHelper#generateBitmapFromVideoViewSnapshot(VideoViewSnapshot)} 1360 */ 1361 class SurfaceViewSnapshot extends VideoViewSnapshot { 1362 1363 private static final String TAG = SurfaceViewSnapshot.class.getSimpleName(); 1364 private static final int PIXELCOPY_TIMEOUT_MS = 1000; 1365 private static final int INITIAL_STATE = -1; 1366 1367 private final SurfaceView surfaceView; 1368 private final int width; 1369 private final int height; 1370 1371 private Bitmap bitmap; 1372 private int copyResult; 1373 SurfaceViewSnapshot(SurfaceView surfaceView, int width, int height)1374 public SurfaceViewSnapshot(SurfaceView surfaceView, int width, int height) { 1375 this.surfaceView = surfaceView; 1376 this.width = width; 1377 this.height = height; 1378 this.copyResult = INITIAL_STATE; 1379 this.bitmap = null; 1380 } 1381 1382 @Override run()1383 public void run() { 1384 final long start = SystemClock.elapsedRealtime(); 1385 copyResult = INITIAL_STATE; 1386 final SynchronousPixelCopy copyHelper = new SynchronousPixelCopy(); 1387 bitmap = Bitmap.createBitmap(width, height, Config.ARGB_8888); 1388 try { 1389 // Wait for PixelCopy to finish. 1390 while ((copyResult = copyHelper.request(surfaceView, bitmap)) != PixelCopy.SUCCESS 1391 && (SystemClock.elapsedRealtime() - start) < SNAPSHOT_TIMEOUT_MS) { 1392 Thread.sleep(SLEEP_TIME_MS); 1393 } 1394 } catch (InterruptedException e) { 1395 Log.e(TAG, "Pixel Copy is stopped/interrupted before it finishes.", e); 1396 bitmap = null; 1397 } finally { 1398 copyHelper.release(); 1399 synchronized (SYNC_TOKEN) { 1400 SYNC_TOKEN.notify(); 1401 } 1402 } 1403 } 1404 1405 @Override getBitmap()1406 public Bitmap getBitmap() { 1407 return bitmap; 1408 } 1409 1410 @Override isBitmapReady()1411 public boolean isBitmapReady() { 1412 return bitmap != null && copyResult == PixelCopy.SUCCESS; 1413 } 1414 1415 @Override getSyncObject()1416 public Object getSyncObject() { 1417 return SYNC_TOKEN; 1418 } 1419 1420 private static class SynchronousPixelCopy implements OnPixelCopyFinishedListener { 1421 1422 private final Handler handler; 1423 private final HandlerThread thread; 1424 1425 private int status = INITIAL_STATE; 1426 SynchronousPixelCopy()1427 public SynchronousPixelCopy() { 1428 this.thread = new HandlerThread("PixelCopyHelper"); 1429 thread.start(); 1430 this.handler = new Handler(thread.getLooper()); 1431 } 1432 release()1433 public void release() { 1434 if (thread.isAlive()) { 1435 thread.quit(); 1436 } 1437 } 1438 request(SurfaceView source, Bitmap dest)1439 public int request(SurfaceView source, Bitmap dest) { 1440 synchronized (this) { 1441 try { 1442 PixelCopy.request(source, dest, this, handler); 1443 return getResultLocked(); 1444 } catch (Exception e) { 1445 Log.e(TAG, "Exception occurred when copying a SurfaceView.", e); 1446 return -1; 1447 } 1448 } 1449 } 1450 getResultLocked()1451 private int getResultLocked() { 1452 try { 1453 this.wait(PIXELCOPY_TIMEOUT_MS); 1454 } catch (InterruptedException e) { /* PixelCopy request didn't complete within 1s */ } 1455 return status; 1456 } 1457 1458 @Override onPixelCopyFinished(int copyResult)1459 public void onPixelCopyFinished(int copyResult) { 1460 synchronized (this) { 1461 status = copyResult; 1462 this.notify(); 1463 } 1464 } 1465 1466 } 1467 1468 } 1469 1470 /** 1471 * Runnable to get a bitmap from a GLSurfaceView on the UI thread via a handler. 1472 * Note, because of how the bitmap is captured in GLSurfaceView, 1473 * this method does not have to be a runnable. 1474 * This class is to be used together with 1475 * {@link TestHelper#generateBitmapFromVideoViewSnapshot(VideoViewSnapshot)} 1476 */ 1477 class GLSurfaceViewSnapshot extends VideoViewSnapshot { 1478 1479 private static final String TAG = GLSurfaceViewSnapshot.class.getSimpleName(); 1480 1481 private final GLSurfaceViewFactory glSurfaceViewFactory; 1482 private final int width; 1483 private final int height; 1484 1485 private Bitmap bitmap = null; 1486 private boolean bitmapIsReady = false; 1487 GLSurfaceViewSnapshot(GLSurfaceViewFactory glSurfaceViewFactory, int width, int height)1488 public GLSurfaceViewSnapshot(GLSurfaceViewFactory glSurfaceViewFactory, int width, int height) { 1489 this.glSurfaceViewFactory = DecodeAccuracyTestBase.checkNotNull(glSurfaceViewFactory); 1490 this.width = width; 1491 this.height = height; 1492 } 1493 1494 @Override run()1495 public void run() { 1496 bitmapIsReady = false; 1497 bitmap = null; 1498 try { 1499 waitForByteBuffer(); 1500 } catch (InterruptedException exception) { 1501 Log.e(TAG, exception.getMessage()); 1502 bitmap = null; 1503 notifyObject(); 1504 return; 1505 } 1506 try { 1507 final ByteBuffer byteBuffer = glSurfaceViewFactory.getByteBuffer(); 1508 bitmap = Bitmap.createBitmap(width, height, Bitmap.Config.ARGB_8888); 1509 byteBuffer.rewind(); 1510 bitmap.copyPixelsFromBuffer(byteBuffer); 1511 bitmapIsReady = true; 1512 byteBuffer.clear(); 1513 } catch (NullPointerException exception) { 1514 Log.e(TAG, "glSurfaceViewFactory or byteBuffer may have been released", exception); 1515 bitmap = null; 1516 } finally { 1517 notifyObject(); 1518 } 1519 } 1520 1521 @Override getBitmap()1522 public Bitmap getBitmap() { 1523 return bitmap; 1524 } 1525 1526 @Override isBitmapReady()1527 public boolean isBitmapReady() { 1528 return bitmapIsReady; 1529 } 1530 1531 @Override getSyncObject()1532 public Object getSyncObject() { 1533 return SYNC_TOKEN; 1534 } 1535 notifyObject()1536 private void notifyObject() { 1537 synchronized (SYNC_TOKEN) { 1538 SYNC_TOKEN.notify(); 1539 } 1540 } 1541 waitForByteBuffer()1542 private void waitForByteBuffer() throws InterruptedException { 1543 // Wait for byte buffer to be ready. 1544 final long start = SystemClock.elapsedRealtime(); 1545 while (SystemClock.elapsedRealtime() - start < SNAPSHOT_TIMEOUT_MS) { 1546 if (glSurfaceViewFactory.byteBufferIsReady()) { 1547 return; 1548 } 1549 Thread.sleep(SLEEP_TIME_MS); 1550 } 1551 throw new InterruptedException("Taking too long to read pixels into a ByteBuffer."); 1552 } 1553 1554 } 1555 1556 /* Stores information of a video file. */ 1557 class VideoFormat { 1558 1559 public static final String STRING_UNSET = "UNSET"; 1560 public static final int INT_UNSET = -1; 1561 1562 private final String filename; 1563 1564 private String mimeType = STRING_UNSET; 1565 private int width = INT_UNSET; 1566 private int height = INT_UNSET; 1567 private int maxWidth = INT_UNSET; 1568 private int maxHeight = INT_UNSET; 1569 private FilenameParser filenameParser; 1570 VideoFormat(String filename)1571 public VideoFormat(String filename) { 1572 this.filename = filename; 1573 } 1574 VideoFormat(VideoFormat videoFormat)1575 public VideoFormat(VideoFormat videoFormat) { 1576 this(videoFormat.filename); 1577 } 1578 getParsedName()1579 private FilenameParser getParsedName() { 1580 if (filenameParser == null) { 1581 filenameParser = new FilenameParser(filename); 1582 } 1583 return filenameParser; 1584 } 1585 getMediaFormat()1586 public String getMediaFormat() { 1587 return "video"; 1588 } 1589 setMimeType(String mimeType)1590 public void setMimeType(String mimeType) { 1591 this.mimeType = mimeType; 1592 } 1593 getMimeType()1594 public String getMimeType() { 1595 if (mimeType.equals(STRING_UNSET)) { 1596 return getParsedName().getMimeType(); 1597 } 1598 return mimeType; 1599 } 1600 setWidth(int width)1601 public void setWidth(int width) { 1602 this.width = width; 1603 } 1604 setMaxWidth(int maxWidth)1605 public void setMaxWidth(int maxWidth) { 1606 this.maxWidth = maxWidth; 1607 } 1608 getWidth()1609 public int getWidth() { 1610 if (width == INT_UNSET) { 1611 return getParsedName().getWidth(); 1612 } 1613 return width; 1614 } 1615 getMaxWidth()1616 public int getMaxWidth() { 1617 return maxWidth; 1618 } 1619 getOriginalWidth()1620 public int getOriginalWidth() { 1621 return getParsedName().getWidth(); 1622 } 1623 setHeight(int height)1624 public void setHeight(int height) { 1625 this.height = height; 1626 } 1627 setMaxHeight(int maxHeight)1628 public void setMaxHeight(int maxHeight) { 1629 this.maxHeight = maxHeight; 1630 } 1631 getHeight()1632 public int getHeight() { 1633 if (height == INT_UNSET) { 1634 return getParsedName().getHeight(); 1635 } 1636 return height; 1637 } 1638 getMaxHeight()1639 public int getMaxHeight() { 1640 return maxHeight; 1641 } 1642 getOriginalHeight()1643 public int getOriginalHeight() { 1644 return getParsedName().getHeight(); 1645 } 1646 isAbrEnabled()1647 public boolean isAbrEnabled() { 1648 return false; 1649 } 1650 getOriginalSize()1651 public String getOriginalSize() { 1652 if (width == INT_UNSET || height == INT_UNSET) { 1653 return getParsedName().getSize(); 1654 } 1655 return width + "x" + height; 1656 } 1657 getDescription()1658 public String getDescription() { 1659 return getParsedName().getDescription(); 1660 } 1661 toPrettyString()1662 public String toPrettyString() { 1663 return getParsedName().toPrettyString(); 1664 } 1665 getAssetFileDescriptor(Context context)1666 public AssetFileDescriptor getAssetFileDescriptor(Context context) { 1667 try { 1668 return context.getAssets().openFd(filename); 1669 } catch (Exception e) { 1670 e.printStackTrace(); 1671 return null; 1672 } 1673 } 1674 1675 } 1676 1677 /* File parser for filenames with format of {description}-{mimeType}_{size}_{framerate}.{format} */ 1678 class FilenameParser { 1679 1680 static final String VP9 = "vp9"; 1681 static final String H264 = "h264"; 1682 1683 private final String filename; 1684 1685 private String codec = VideoFormat.STRING_UNSET; 1686 private String description = VideoFormat.STRING_UNSET; 1687 private int width = VideoFormat.INT_UNSET; 1688 private int height = VideoFormat.INT_UNSET; 1689 FilenameParser(String filename)1690 FilenameParser(String filename) { 1691 this.filename = filename; 1692 parseFilename(filename); 1693 } 1694 getCodec()1695 public String getCodec() { 1696 return codec; 1697 } 1698 getMimeType()1699 public String getMimeType() { 1700 switch (codec) { 1701 case H264: 1702 return MimeTypes.VIDEO_H264; 1703 case VP9: 1704 return MimeTypes.VIDEO_VP9; 1705 default: 1706 return null; 1707 } 1708 } 1709 getWidth()1710 public int getWidth() { 1711 return width; 1712 } 1713 getHeight()1714 public int getHeight() { 1715 return height; 1716 } 1717 getSize()1718 public String getSize() { 1719 return width + "x" + height; 1720 } 1721 getDescription()1722 public String getDescription() { 1723 return description; 1724 } 1725 toPrettyString()1726 String toPrettyString() { 1727 if (codec != null) { 1728 return codec.toUpperCase() + " " + getSize(); 1729 } 1730 return filename; 1731 } 1732 parseFilename(String filename)1733 private void parseFilename(String filename) { 1734 final String descriptionDelimiter = "-"; 1735 final String infoDelimiter = "_"; 1736 final String sizeDelimiter = "x"; 1737 try { 1738 this.description = filename.split(descriptionDelimiter)[0]; 1739 final String[] fileInfo = filename.split(descriptionDelimiter)[1].split(infoDelimiter); 1740 this.codec = fileInfo[0]; 1741 this.width = Integer.parseInt(fileInfo[1].split(sizeDelimiter)[0]); 1742 this.height = Integer.parseInt(fileInfo[1].split(sizeDelimiter)[1]); 1743 } catch (Exception exception) { /* Filename format does not match. */ } 1744 } 1745 1746 } 1747 1748 /** 1749 * Compares bitmaps to determine if they are similar. 1750 * 1751 * <p>To determine greatest pixel difference we transform each pixel into the 1752 * CIE L*a*b* color space. The euclidean distance formula is used to determine pixel differences. 1753 */ 1754 class BitmapCompare { 1755 1756 private static final int RED = 0; 1757 private static final int GREEN = 1; 1758 private static final int BLUE = 2; 1759 private static final int X = 0; 1760 private static final int Y = 1; 1761 private static final int Z = 2; 1762 BitmapCompare()1763 private BitmapCompare() {} 1764 1765 /** 1766 * Produces greatest pixel between two bitmaps. Used to determine bitmap similarity. 1767 * 1768 * @param bitmap1 A bitmap to compare to bitmap2. 1769 * @param bitmap2 A bitmap to compare to bitmap1. 1770 * @return A {@link Difference} with an integer describing the greatest pixel difference, 1771 * using {@link Integer#MAX_VALUE} for completely different bitmaps, and an optional 1772 * {@link Pair<Integer, Integer>} of the (col, row) pixel coordinate where it was first found. 1773 */ 1774 @TargetApi(12) computeDifference(Bitmap bitmap1, Bitmap bitmap2)1775 public static Difference computeDifference(Bitmap bitmap1, Bitmap bitmap2) { 1776 if (bitmap1 == null || bitmap2 == null) { 1777 return new Difference(Integer.MAX_VALUE); 1778 } 1779 if (bitmap1.equals(bitmap2) || bitmap1.sameAs(bitmap2)) { 1780 return new Difference(0); 1781 } 1782 if (bitmap1.getHeight() != bitmap2.getHeight() || bitmap1.getWidth() != bitmap2.getWidth()) { 1783 return new Difference(Integer.MAX_VALUE); 1784 } 1785 // Convert all pixels to CIE L*a*b* color space so we can do a direct color comparison using 1786 // euclidean distance formula. 1787 final double[][] pixels1 = convertRgbToCieLab(bitmap1); 1788 final double[][] pixels2 = convertRgbToCieLab(bitmap2); 1789 int greatestDifference = 0; 1790 int greatestDifferenceIndex = -1; 1791 for (int i = 0; i < pixels1.length; i++) { 1792 final int difference = euclideanDistance(pixels1[i], pixels2[i]); 1793 if (difference > greatestDifference) { 1794 greatestDifference = difference; 1795 greatestDifferenceIndex = i; 1796 } 1797 } 1798 return new Difference(greatestDifference, Pair.create( 1799 greatestDifferenceIndex % bitmap1.getWidth(), 1800 greatestDifferenceIndex / bitmap1.getHeight())); 1801 } 1802 1803 @SuppressLint("UseSparseArrays") convertRgbToCieLab(Bitmap bitmap)1804 private static double[][] convertRgbToCieLab(Bitmap bitmap) { 1805 final HashMap<Integer, double[]> pixelTransformCache = new HashMap<>(); 1806 final double[][] result = new double[bitmap.getHeight() * bitmap.getWidth()][3]; 1807 final int[] pixels = new int[bitmap.getHeight() * bitmap.getWidth()]; 1808 bitmap.getPixels(pixels, 0, bitmap.getWidth(), 0, 0, bitmap.getWidth(), bitmap.getHeight()); 1809 for (int i = 0; i < pixels.length; i++) { 1810 final double[] transformedColor = pixelTransformCache.get(pixels[i]); 1811 if (transformedColor != null) { 1812 result[i] = transformedColor; 1813 } else { 1814 result[i] = convertXyzToCieLab(convertRgbToXyz(pixels[i])); 1815 pixelTransformCache.put(pixels[i], result[i]); 1816 } 1817 } 1818 return result; 1819 } 1820 1821 /** 1822 * Conversion from RGB to XYZ based algorithm as defined by: 1823 * http://www.easyrgb.com/index.php?X=MATH&H=02#text2 1824 * 1825 * <p><pre>{@code 1826 * var_R = ( R / 255 ) //R from 0 to 255 1827 * var_G = ( G / 255 ) //G from 0 to 255 1828 * var_B = ( B / 255 ) //B from 0 to 255 1829 * 1830 * if ( var_R > 0.04045 ) var_R = ( ( var_R + 0.055 ) / 1.055 ) ^ 2.4 1831 * else var_R = var_R / 12.92 1832 * if ( var_G > 0.04045 ) var_G = ( ( var_G + 0.055 ) / 1.055 ) ^ 2.4 1833 * else var_G = var_G / 12.92 1834 * if ( var_B > 0.04045 ) var_B = ( ( var_B + 0.055 ) / 1.055 ) ^ 2.4 1835 * else var_B = var_B / 12.92 1836 * 1837 * var_R = var_R * 100 1838 * var_G = var_G * 100 1839 * var_B = var_B * 100 1840 * 1841 * // Observer. = 2°, Illuminant = D65 1842 * X = var_R * 0.4124 + var_G * 0.3576 + var_B * 0.1805 1843 * Y = var_R * 0.2126 + var_G * 0.7152 + var_B * 0.0722 1844 * Z = var_R * 0.0193 + var_G * 0.1192 + var_B * 0.9505 1845 * }</pre> 1846 * 1847 * @param rgbColor A packed int made up of 4 bytes: alpha, red, green, blue. 1848 * @return An array of doubles where each value is a component of the XYZ color space. 1849 */ convertRgbToXyz(int rgbColor)1850 private static double[] convertRgbToXyz(int rgbColor) { 1851 final double[] comp = {Color.red(rgbColor), Color.green(rgbColor), Color.blue(rgbColor)}; 1852 for (int i = 0; i < comp.length; i++) { 1853 comp[i] /= 255.0; 1854 if (comp[i] > 0.04045) { 1855 comp[i] = Math.pow((comp[i] + 0.055) / 1.055, 2.4); 1856 } else { 1857 comp[i] /= 12.92; 1858 } 1859 comp[i] *= 100; 1860 } 1861 final double x = (comp[RED] * 0.4124) + (comp[GREEN] * 0.3576) + (comp[BLUE] * 0.1805); 1862 final double y = (comp[RED] * 0.2126) + (comp[GREEN] * 0.7152) + (comp[BLUE] * 0.0722); 1863 final double z = (comp[RED] * 0.0193) + (comp[GREEN] * 0.1192) + (comp[BLUE] * 0.9505); 1864 return new double[] {x, y, z}; 1865 } 1866 1867 /** 1868 * Conversion from XYZ to CIE-L*a*b* based algorithm as defined by: 1869 * http://www.easyrgb.com/index.php?X=MATH&H=07#text7 1870 * 1871 * <p><pre> 1872 * {@code 1873 * var_X = X / ref_X //ref_X = 95.047 Observer= 2°, Illuminant= D65 1874 * var_Y = Y / ref_Y //ref_Y = 100.000 1875 * var_Z = Z / ref_Z //ref_Z = 108.883 1876 * 1877 * if ( var_X > 0.008856 ) var_X = var_X ^ ( 1/3 ) 1878 * else var_X = ( 7.787 * var_X ) + ( 16 / 116 ) 1879 * if ( var_Y > 0.008856 ) var_Y = var_Y ^ ( 1/3 ) 1880 * else var_Y = ( 7.787 * var_Y ) + ( 16 / 116 ) 1881 * if ( var_Z > 0.008856 ) var_Z = var_Z ^ ( 1/3 ) 1882 * else var_Z = ( 7.787 * var_Z ) + ( 16 / 116 ) 1883 * 1884 * CIE-L* = ( 116 * var_Y ) - 16 1885 * CIE-a* = 500 * ( var_X - var_Y ) 1886 * CIE-b* = 200 * ( var_Y - var_Z ) 1887 * } 1888 * </pre> 1889 * 1890 * @param comp An array of doubles where each value is a component of the XYZ color space. 1891 * @return An array of doubles where each value is a component of the CIE-L*a*b* color space. 1892 */ convertXyzToCieLab(double[] comp)1893 private static double[] convertXyzToCieLab(double[] comp) { 1894 comp[X] /= 95.047; 1895 comp[Y] /= 100.0; 1896 comp[Z] /= 108.883; 1897 for (int i = 0; i < comp.length; i++) { 1898 if (comp[i] > 0.008856) { 1899 comp[i] = Math.pow(comp[i], (1.0 / 3.0)); 1900 } else { 1901 comp[i] = (7.787 * comp[i]) + (16.0 / 116.0); 1902 } 1903 } 1904 final double l = (116 * comp[Y]) - 16; 1905 final double a = 500 * (comp[X] - comp[Y]); 1906 final double b = 200 * (comp[Y] - comp[Z]); 1907 return new double[] {l, a, b}; 1908 } 1909 euclideanDistance(double[] p1, double[] p2)1910 private static int euclideanDistance(double[] p1, double[] p2) { 1911 if (p1.length != p2.length) { 1912 return Integer.MAX_VALUE; 1913 } 1914 double result = 0; 1915 for (int i = 0; i < p1.length; i++) { 1916 result += Math.pow(p1[i] - p2[i], 2); 1917 } 1918 return (int) Math.round(Math.sqrt(result)); 1919 } 1920 1921 /** 1922 * Crops the border of the array representing an image by hBorderSize 1923 * pixels on the left and right borders, and by vBorderSize pixels on the 1924 * top and bottom borders (so the width is 2 * hBorderSize smaller and 1925 * the height is 2 * vBorderSize smaller), then scales the image up to 1926 * match the original size using bilinear interpolation. 1927 */ shrinkAndScaleBilinear( Bitmap input, double hBorderSize, double vBorderSize)1928 private static Bitmap shrinkAndScaleBilinear( 1929 Bitmap input, double hBorderSize, double vBorderSize) { 1930 1931 int width = input.getWidth(); 1932 int height = input.getHeight(); 1933 1934 // Compute the proper step sizes 1935 double xInc = ((double) width - 1 - hBorderSize * 2) / (double) (width - 1); 1936 double yInc = ((double) height - 1 - vBorderSize * 2) / (double) (height - 1); 1937 1938 // Read the input bitmap into RGB arrays. 1939 int[] inputPixels = new int[width * height]; 1940 input.getPixels(inputPixels, 0, width, 0, 0, width, height); 1941 int[][] inputRgb = new int[width * height][3]; 1942 for (int i = 0; i < width * height; ++i) { 1943 inputRgb[i][0] = Color.red(inputPixels[i]); 1944 inputRgb[i][1] = Color.green(inputPixels[i]); 1945 inputRgb[i][2] = Color.blue(inputPixels[i]); 1946 } 1947 inputPixels = null; 1948 1949 // Prepare the output buffer. 1950 int[] outputPixels = new int[width * height]; 1951 1952 // Start the iteration. The first y coordinate is vBorderSize. 1953 double y = vBorderSize; 1954 for (int yIndex = 0; yIndex < height; ++yIndex) { 1955 // The first x coordinate is hBorderSize. 1956 double x = hBorderSize; 1957 for (int xIndex = 0; xIndex < width; ++xIndex) { 1958 // Determine the square of interest. 1959 int left = (int)x; // This is floor(x). 1960 int top = (int)y; // This is floor(y). 1961 int right = left + 1; 1962 int bottom = top + 1; 1963 1964 // (u, v) is the fractional part of (x, y). 1965 double u = x - (double)left; 1966 double v = y - (double)top; 1967 1968 // Precompute necessary products to save time. 1969 double p00 = (1.0 - u) * (1.0 - v); 1970 double p01 = (1.0 - u) * v; 1971 double p10 = u * (1.0 - v); 1972 double p11 = u * v; 1973 1974 // Clamp the indices to prevent out-of-bound that may be caused 1975 // by round-off error. 1976 if (left >= width) left = width - 1; 1977 if (top >= height) top = height - 1; 1978 if (right >= width) right = width - 1; 1979 if (bottom >= height) bottom = height - 1; 1980 1981 // Sample RGB values from the four corners. 1982 int[] rgb00 = inputRgb[top * width + left]; 1983 int[] rgb01 = inputRgb[bottom * width + left]; 1984 int[] rgb10 = inputRgb[top * width + right]; 1985 int[] rgb11 = inputRgb[bottom * width + right]; 1986 1987 // Interpolate each component of RGB separately. 1988 int[] mixedColor = new int[3]; 1989 for (int k = 0; k < 3; ++k) { 1990 mixedColor[k] = (int)Math.round( 1991 p00 * (double) rgb00[k] + p01 * (double) rgb01[k] 1992 + p10 * (double) rgb10[k] + p11 * (double) rgb11[k]); 1993 } 1994 // Convert RGB to bitmap Color format and store. 1995 outputPixels[yIndex * width + xIndex] = Color.rgb( 1996 mixedColor[0], mixedColor[1], mixedColor[2]); 1997 x += xInc; 1998 } 1999 y += yInc; 2000 } 2001 // Assemble the output buffer into a Bitmap object. 2002 return Bitmap.createBitmap(outputPixels, width, height, input.getConfig()); 2003 } 2004 2005 /** 2006 * Calls computeDifference on multiple cropped-and-scaled versions of 2007 * bitmap2. 2008 */ 2009 @TargetApi(12) computeMinimumDifference( Bitmap bitmap1, Bitmap bitmap2, Pair<Double, Double>[] borderCrops)2010 public static Difference computeMinimumDifference( 2011 Bitmap bitmap1, Bitmap bitmap2, Pair<Double, Double>[] borderCrops) { 2012 2013 // Compute the difference with the original image (bitmap2) first. 2014 Difference minDiff = computeDifference(bitmap1, bitmap2); 2015 // Then go through the list of borderCrops. 2016 for (Pair<Double, Double> borderCrop : borderCrops) { 2017 // Compute the difference between bitmap1 and a transformed 2018 // version of bitmap2. 2019 Bitmap bitmap2s = shrinkAndScaleBilinear(bitmap2, borderCrop.first, borderCrop.second); 2020 Difference d = computeDifference(bitmap1, bitmap2s); 2021 // Keep the minimum difference. 2022 if (d.greatestPixelDifference < minDiff.greatestPixelDifference) { 2023 minDiff = d; 2024 minDiff.bestMatchBorderCrop = borderCrop; 2025 } 2026 } 2027 return minDiff; 2028 } 2029 2030 /** 2031 * Calls computeMinimumDifference on a default list of borderCrop. 2032 */ 2033 @TargetApi(12) computeMinimumDifference( Bitmap bitmap1, Bitmap bitmap2, int trueWidth, int trueHeight)2034 public static Difference computeMinimumDifference( 2035 Bitmap bitmap1, Bitmap bitmap2, int trueWidth, int trueHeight) { 2036 2037 double hBorder = (double) bitmap1.getWidth() / (double) trueWidth; 2038 double vBorder = (double) bitmap1.getHeight() / (double) trueHeight; 2039 double hBorderH = 0.5 * hBorder; // Half-texel horizontal border 2040 double vBorderH = 0.5 * vBorder; // Half-texel vertical border 2041 return computeMinimumDifference( 2042 bitmap1, 2043 bitmap2, 2044 new Pair[] { 2045 Pair.create(hBorderH, 0.0), 2046 Pair.create(hBorderH, vBorderH), 2047 Pair.create(0.0, vBorderH), 2048 Pair.create(hBorder, 0.0), 2049 Pair.create(hBorder, vBorder), 2050 Pair.create(0.0, vBorder) 2051 }); 2052 // This default list of borderCrop comes from the behavior of 2053 // GLConsumer.computeTransformMatrix(). 2054 } 2055 2056 /* Describes the difference between two {@link Bitmap} instances. */ 2057 public static final class Difference { 2058 2059 public final int greatestPixelDifference; 2060 public final Pair<Integer, Integer> greatestPixelDifferenceCoordinates; 2061 public Pair<Double, Double> bestMatchBorderCrop; 2062 Difference(int greatestPixelDifference)2063 private Difference(int greatestPixelDifference) { 2064 this(greatestPixelDifference, null, Pair.create(0.0, 0.0)); 2065 } 2066 Difference( int greatestPixelDifference, Pair<Integer, Integer> greatestPixelDifferenceCoordinates)2067 private Difference( 2068 int greatestPixelDifference, 2069 Pair<Integer, Integer> greatestPixelDifferenceCoordinates) { 2070 this(greatestPixelDifference, greatestPixelDifferenceCoordinates, 2071 Pair.create(0.0, 0.0)); 2072 } 2073 Difference( int greatestPixelDifference, Pair<Integer, Integer> greatestPixelDifferenceCoordinates, Pair<Double, Double> bestMatchBorderCrop)2074 private Difference( 2075 int greatestPixelDifference, 2076 Pair<Integer, Integer> greatestPixelDifferenceCoordinates, 2077 Pair<Double, Double> bestMatchBorderCrop) { 2078 this.greatestPixelDifference = greatestPixelDifference; 2079 this.greatestPixelDifferenceCoordinates = greatestPixelDifferenceCoordinates; 2080 this.bestMatchBorderCrop = bestMatchBorderCrop; 2081 } 2082 } 2083 2084 } 2085 2086 /* Wrapper for MIME types. */ 2087 final class MimeTypes { 2088 MimeTypes()2089 private MimeTypes() {} 2090 2091 public static final String VIDEO_VP9 = "video/x-vnd.on2.vp9"; 2092 public static final String VIDEO_H264 = "video/avc"; 2093 isVideo(String mimeType)2094 public static boolean isVideo(String mimeType) { 2095 return mimeType.startsWith("video"); 2096 } 2097 2098 } 2099