1 /* 2 * Copyright (C) 2013 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17 package com.android.mediaframeworktest.unit; 18 19 import android.test.suitebuilder.annotation.SmallTest; 20 import android.util.Log; 21 import android.util.Pair; 22 import android.util.Range; 23 import android.util.Rational; 24 import android.util.SizeF; 25 import android.graphics.ImageFormat; 26 import android.graphics.Point; 27 import android.graphics.PointF; 28 import android.graphics.Rect; 29 import android.graphics.SurfaceTexture; 30 import android.hardware.camera2.CameraCharacteristics; 31 import android.hardware.camera2.CameraMetadata; 32 import android.hardware.camera2.CaptureRequest; 33 import android.hardware.camera2.CaptureResult; 34 import android.util.Size; 35 import android.hardware.camera2.impl.CameraMetadataNative; 36 import android.hardware.camera2.marshal.impl.MarshalQueryableEnum; 37 import android.hardware.camera2.params.ColorSpaceTransform; 38 import android.hardware.camera2.params.Face; 39 import android.hardware.camera2.params.HighSpeedVideoConfiguration; 40 import android.hardware.camera2.params.MeteringRectangle; 41 import android.hardware.camera2.params.ReprocessFormatsMap; 42 import android.hardware.camera2.params.RggbChannelVector; 43 import android.hardware.camera2.params.StreamConfiguration; 44 import android.hardware.camera2.params.StreamConfigurationDuration; 45 import android.hardware.camera2.params.StreamConfigurationMap; 46 import android.hardware.camera2.params.TonemapCurve; 47 import android.hardware.camera2.utils.TypeReference; 48 49 import static android.hardware.camera2.impl.CameraMetadataNative.*; 50 import static com.android.mediaframeworktest.unit.ByteArrayHelpers.*; 51 52 import java.lang.reflect.Array; 53 import java.nio.ByteBuffer; 54 import java.nio.ByteOrder; 55 import java.util.List; 56 57 /** 58 * <pre> 59 * adb shell am instrument \ 60 * -e class 'com.android.mediaframeworktest.unit.CameraMetadataTest' \ 61 * -w com.android.mediaframeworktest/.MediaFrameworkUnitTestRunner 62 * </pre> 63 */ 64 public class CameraMetadataTest extends junit.framework.TestCase { 65 66 private static final boolean VERBOSE = false; 67 private static final String TAG = "CameraMetadataTest"; 68 69 70 CameraMetadataNative mMetadata; 71 72 // Sections 73 static final int ANDROID_COLOR_CORRECTION = 0; 74 static final int ANDROID_CONTROL = 1; 75 76 // Section starts 77 static final int ANDROID_COLOR_CORRECTION_START = ANDROID_COLOR_CORRECTION << 16; 78 static final int ANDROID_CONTROL_START = ANDROID_CONTROL << 16; 79 80 // Tags 81 static final int ANDROID_COLOR_CORRECTION_MODE = ANDROID_COLOR_CORRECTION_START; 82 static final int ANDROID_COLOR_CORRECTION_TRANSFORM = ANDROID_COLOR_CORRECTION_START + 1; 83 static final int ANDROID_COLOR_CORRECTION_GAINS = ANDROID_COLOR_CORRECTION_START + 2; 84 85 static final int ANDROID_CONTROL_AE_ANTIBANDING_MODE = ANDROID_CONTROL_START; 86 static final int ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION = ANDROID_CONTROL_START + 1; 87 88 // From graphics.h 89 private static final int HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED = 0x22; 90 91 @Override setUp()92 public void setUp() { 93 mMetadata = new CameraMetadataNative(); 94 } 95 96 @Override tearDown()97 public void tearDown() throws Exception { 98 mMetadata = null; 99 } 100 101 @SmallTest testNew()102 public void testNew() { 103 assertEquals(0, mMetadata.getEntryCount()); 104 assertTrue(mMetadata.isEmpty()); 105 } 106 107 @SmallTest testGetTagFromKey()108 public void testGetTagFromKey() { 109 110 // Test success 111 112 assertEquals(ANDROID_COLOR_CORRECTION_MODE, 113 CameraMetadataNative.getTag("android.colorCorrection.mode")); 114 assertEquals(ANDROID_COLOR_CORRECTION_TRANSFORM, 115 CameraMetadataNative.getTag("android.colorCorrection.transform")); 116 assertEquals(ANDROID_CONTROL_AE_ANTIBANDING_MODE, 117 CameraMetadataNative.getTag("android.control.aeAntibandingMode")); 118 assertEquals(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, 119 CameraMetadataNative.getTag("android.control.aeExposureCompensation")); 120 121 // Test failures 122 123 try { 124 CameraMetadataNative.getTag(null); 125 fail("A null key should throw NPE"); 126 } catch(NullPointerException e) { 127 } 128 129 try { 130 CameraMetadataNative.getTag("android.control"); 131 fail("A section name only should not be a valid key"); 132 } catch(IllegalArgumentException e) { 133 } 134 135 try { 136 CameraMetadataNative.getTag("android.control.thisTagNameIsFakeAndDoesNotExist"); 137 fail("A valid section with an invalid tag name should not be a valid key"); 138 } catch(IllegalArgumentException e) { 139 } 140 141 try { 142 CameraMetadataNative.getTag("android"); 143 fail("A namespace name only should not be a valid key"); 144 } catch(IllegalArgumentException e) { 145 } 146 147 try { 148 CameraMetadataNative.getTag("this.key.is.definitely.invalid"); 149 fail("A completely fake key name should not be valid"); 150 } catch(IllegalArgumentException e) { 151 } 152 } 153 154 @SmallTest testGetTypeFromTag()155 public void testGetTypeFromTag() { 156 assertEquals(TYPE_BYTE, 157 CameraMetadataNative.getNativeType(ANDROID_COLOR_CORRECTION_MODE, Long.MAX_VALUE)); 158 assertEquals(TYPE_RATIONAL, 159 CameraMetadataNative.getNativeType(ANDROID_COLOR_CORRECTION_TRANSFORM, Long.MAX_VALUE)); 160 assertEquals(TYPE_FLOAT, 161 CameraMetadataNative.getNativeType(ANDROID_COLOR_CORRECTION_GAINS, Long.MAX_VALUE)); 162 assertEquals(TYPE_BYTE, 163 CameraMetadataNative.getNativeType(ANDROID_CONTROL_AE_ANTIBANDING_MODE, Long.MAX_VALUE)); 164 assertEquals(TYPE_INT32, 165 CameraMetadataNative.getNativeType(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, Long.MAX_VALUE)); 166 167 try { 168 CameraMetadataNative.getNativeType(0xDEADF00D, Long.MAX_VALUE); 169 fail("No type should exist for invalid tag 0xDEADF00D"); 170 } catch(IllegalArgumentException e) { 171 } 172 } 173 174 @SmallTest testReadWriteValues()175 public void testReadWriteValues() { 176 final byte ANDROID_COLOR_CORRECTION_MODE_HIGH_QUALITY = 2; 177 byte[] valueResult; 178 179 assertEquals(0, mMetadata.getEntryCount()); 180 assertEquals(true, mMetadata.isEmpty()); 181 182 // 183 // android.colorCorrection.mode (single enum byte) 184 // 185 186 assertEquals(null, mMetadata.readValues(ANDROID_COLOR_CORRECTION_MODE)); 187 188 // Write/read null values 189 mMetadata.writeValues(ANDROID_COLOR_CORRECTION_MODE, null); 190 assertEquals(null, mMetadata.readValues(ANDROID_COLOR_CORRECTION_MODE)); 191 192 // Write 0 values 193 mMetadata.writeValues(ANDROID_COLOR_CORRECTION_MODE, new byte[] {}); 194 195 // Read 0 values 196 valueResult = mMetadata.readValues(ANDROID_COLOR_CORRECTION_MODE); 197 assertNotNull(valueResult); 198 assertEquals(0, valueResult.length); 199 200 assertEquals(1, mMetadata.getEntryCount()); 201 assertEquals(false, mMetadata.isEmpty()); 202 203 // Write 1 value 204 mMetadata.writeValues(ANDROID_COLOR_CORRECTION_MODE, new byte[] { 205 ANDROID_COLOR_CORRECTION_MODE_HIGH_QUALITY 206 }); 207 208 // Read 1 value 209 valueResult = mMetadata.readValues(ANDROID_COLOR_CORRECTION_MODE); 210 assertNotNull(valueResult); 211 assertEquals(1, valueResult.length); 212 assertEquals(ANDROID_COLOR_CORRECTION_MODE_HIGH_QUALITY, valueResult[0]); 213 214 assertEquals(1, mMetadata.getEntryCount()); 215 assertEquals(false, mMetadata.isEmpty()); 216 217 // 218 // android.colorCorrection.colorCorrectionGains (float x 4 array) 219 // 220 221 final float[] colorCorrectionGains = new float[] { 1.0f, 2.0f, 3.0f, 4.0f}; 222 byte[] colorCorrectionGainsAsByteArray = new byte[colorCorrectionGains.length * 4]; 223 ByteBuffer colorCorrectionGainsByteBuffer = 224 ByteBuffer.wrap(colorCorrectionGainsAsByteArray).order(ByteOrder.nativeOrder()); 225 for (float f : colorCorrectionGains) 226 colorCorrectionGainsByteBuffer.putFloat(f); 227 228 // Read 229 assertNull(mMetadata.readValues(ANDROID_COLOR_CORRECTION_GAINS)); 230 mMetadata.writeValues(ANDROID_COLOR_CORRECTION_GAINS, colorCorrectionGainsAsByteArray); 231 232 // Write 233 assertArrayEquals(colorCorrectionGainsAsByteArray, 234 mMetadata.readValues(ANDROID_COLOR_CORRECTION_GAINS)); 235 236 assertEquals(2, mMetadata.getEntryCount()); 237 assertEquals(false, mMetadata.isEmpty()); 238 239 // Erase 240 mMetadata.writeValues(ANDROID_COLOR_CORRECTION_GAINS, null); 241 assertNull(mMetadata.readValues(ANDROID_COLOR_CORRECTION_GAINS)); 242 assertEquals(1, mMetadata.getEntryCount()); 243 } 244 245 /** 246 * Format an array into a string with the {@code badIndex} highlighted with {@code **}. 247 * 248 * <p>Numbers are printed as hexadecimal values.</p> 249 * 250 * <p>Example: {@code "[hello, **world**]"} for a {@code string[]}, 251 * or a {@code "[**0xFF**, 0xFF]"} for a {@code int[]}.</p> 252 */ formatArray(T array, int badIndex)253 private static <T> String formatArray(T array, int badIndex) { 254 StringBuilder builder = new StringBuilder(); 255 256 builder.append("["); 257 258 int len = Array.getLength(array); 259 for (int i = 0; i < len; ++i) { 260 261 Object elem = Array.get(array, i); 262 263 if (i == badIndex) { 264 builder.append("**"); 265 } 266 267 if (elem instanceof Number) { 268 builder.append(String.format("%x", elem)); 269 } else { 270 builder.append(elem); 271 } 272 273 if (i == badIndex) { 274 builder.append("**"); 275 } 276 277 if (i != len - 1) { 278 builder.append(", "); 279 } 280 } 281 282 builder.append("]"); 283 284 return builder.toString(); 285 } 286 assertArrayEquals(T expected, T actual)287 private static <T> void assertArrayEquals(T expected, T actual) { 288 if (!expected.getClass().isArray() || !actual.getClass().isArray()) { 289 throw new IllegalArgumentException("expected, actual must both be arrays"); 290 } 291 292 assertEquals("Array lengths must be equal", 293 Array.getLength(expected), Array.getLength(actual)); 294 295 int len = Array.getLength(expected); 296 for (int i = 0; i < len; ++i) { 297 298 Object expectedElement = Array.get(expected, i); 299 Object actualElement = Array.get(actual, i); 300 301 if (!expectedElement.equals(actualElement)) { 302 fail(String.format( 303 "element %d in array was not equal (expected %s, actual %s). " 304 + "Arrays were: (expected %s, actual %s).", 305 i, expectedElement, actualElement, 306 formatArray(expected, i), 307 formatArray(actual, i))); 308 } 309 } 310 } 311 assertArrayContains(T needle, T2 array)312 private static <T, T2> void assertArrayContains(T needle, T2 array) { 313 if (!array.getClass().isArray()) { 314 throw new IllegalArgumentException("actual must be array"); 315 } 316 317 int len = Array.getLength(array); 318 for (int i = 0; i < len; ++i) { 319 320 Object actualElement = Array.get(array, i); 321 322 if (needle.equals(actualElement)) { 323 return; 324 } 325 } 326 327 fail(String.format( 328 "could not find element in array (needle %s). " 329 + "Array was: %s.", 330 needle, 331 formatArray(array, len))); 332 } 333 checkKeyGetAndSet(String keyStr, TypeReference<T> typeToken, T expected, boolean reuse)334 private <T> void checkKeyGetAndSet(String keyStr, TypeReference<T> typeToken, T expected, 335 boolean reuse) { 336 Key<T> key = new Key<T>(keyStr, typeToken); 337 assertNull(mMetadata.get(key)); 338 mMetadata.set(key, null); 339 assertNull(mMetadata.get(key)); 340 mMetadata.set(key, expected); 341 342 T actual = mMetadata.get(key); 343 344 if (typeToken.getRawType().isArray()) { 345 assertArrayEquals(expected, actual); 346 } else { 347 assertEquals(expected, actual); 348 } 349 350 if (reuse) { 351 // reset the key incase we want to use it again 352 mMetadata.set(key, null); 353 } 354 } 355 checkKeyGetAndSet(String keyStr, TypeReference<T> typeToken, T expected)356 private <T> void checkKeyGetAndSet(String keyStr, TypeReference<T> typeToken, T expected) { 357 checkKeyGetAndSet(keyStr, typeToken, expected, /*reuse*/false); 358 } 359 checkKeyGetAndSet(String keyStr, Class<T> type, T expected)360 private <T> void checkKeyGetAndSet(String keyStr, Class<T> type, T expected) { 361 checkKeyGetAndSet(keyStr, TypeReference.createSpecializedTypeReference(type), expected); 362 } 363 364 /** 365 * Ensure that the data survives a marshal/unmarshal round-trip; 366 * it must also be equal to the {@code expectedNative} byte array. 367 * 368 * <p>As a side-effect, the metadata value corresponding to the key is now set to 369 * {@code expected}.</p> 370 * 371 * @return key created with {@code keyName} and {@code T} 372 */ checkKeyMarshal(String keyName, TypeReference<T> typeReference, T expected, byte[] expectedNative)373 private <T> Key<T> checkKeyMarshal(String keyName, TypeReference<T> typeReference, 374 T expected, byte[] expectedNative) { 375 Key<T> key = new Key<T>(keyName, typeReference); 376 377 mMetadata.set(key, null); 378 assertNull(mMetadata.get(key)); 379 380 // Write managed value -> make sure native bytes are what we expect 381 mMetadata.set(key, expected); 382 383 byte[] actualValues = mMetadata.readValues(key.getTag()); 384 assertArrayEquals(expectedNative, actualValues); 385 386 // Write managed value -> make sure read-out managed value is what we expect 387 T actual = mMetadata.get(key); 388 389 if (typeReference.getRawType().isArray()) { 390 assertArrayEquals(expected, actual); 391 } else { 392 assertEquals(expected, actual); 393 } 394 395 // Write native bytes -> make sure read-out managed value is what we expect 396 mMetadata.writeValues(key.getTag(), expectedNative); 397 actual = mMetadata.get(key); 398 399 if (typeReference.getRawType().isArray()) { 400 assertArrayEquals(expected, actual); 401 } else { 402 assertEquals(expected, actual); 403 } 404 405 return key; 406 } 407 408 /** 409 * Ensure that the data survives a marshal/unmarshal round-trip; 410 * it must also be equal to the {@code expectedNative} byte array. 411 * 412 * <p>As a side-effect, 413 * the metadata value corresponding to the key is now set to {@code expected}.</p> 414 * 415 * @return key created with {@code keyName} and {@code T} 416 */ checkKeyMarshal(String keyName, T expected, byte[] expectedNative)417 private <T> Key<T> checkKeyMarshal(String keyName, T expected, byte[] expectedNative) { 418 @SuppressWarnings("unchecked") 419 Class<T> expectedClass = (Class<T>) expected.getClass(); 420 return checkKeyMarshal(keyName, 421 TypeReference.createSpecializedTypeReference(expectedClass), 422 expected, 423 expectedNative); 424 } 425 426 @SmallTest testReadWritePrimitive()427 public void testReadWritePrimitive() { 428 // int32 (single) 429 checkKeyGetAndSet("android.control.aeExposureCompensation", Integer.TYPE, 0xC0FFEE); 430 431 // byte (single) 432 checkKeyGetAndSet("android.flash.maxEnergy", Byte.TYPE, (byte)6); 433 434 // int64 (single) 435 checkKeyGetAndSet("android.flash.firingTime", Long.TYPE, 0xABCD12345678FFFFL); 436 437 // float (single) 438 checkKeyGetAndSet("android.lens.aperture", Float.TYPE, Float.MAX_VALUE); 439 440 // double (single) -- technically double x 3, but we fake it 441 checkKeyGetAndSet("android.jpeg.gpsCoordinates", Double.TYPE, Double.MAX_VALUE); 442 443 // rational (single) 444 checkKeyGetAndSet("android.sensor.baseGainFactor", Rational.class, new Rational(1, 2)); 445 446 /** 447 * Weirder cases, that don't map 1:1 with the native types 448 */ 449 450 // bool (single) -- with TYPE_BYTE 451 checkKeyGetAndSet("android.control.aeLock", Boolean.TYPE, true); 452 453 // integer (single) -- with TYPE_BYTE 454 checkKeyGetAndSet("android.control.aePrecaptureTrigger", Integer.TYPE, 6); 455 } 456 457 @SmallTest testReadWritePrimitiveArray()458 public void testReadWritePrimitiveArray() { 459 // int32 (n) 460 checkKeyGetAndSet("android.sensor.info.sensitivityRange", int[].class, 461 new int[] { 462 0xC0FFEE, 0xDEADF00D 463 }); 464 465 // byte (n) 466 checkKeyGetAndSet("android.statistics.faceScores", byte[].class, new byte[] { 467 1, 2, 3, 4 468 }); 469 470 // int64 (n) 471 checkKeyGetAndSet("android.scaler.availableProcessedMinDurations", long[].class, 472 new long[] { 473 0xABCD12345678FFFFL, 0x1234ABCD5678FFFFL, 0xFFFF12345678ABCDL 474 }); 475 476 // float (n) 477 checkKeyGetAndSet("android.lens.info.availableApertures", float[].class, 478 new float[] { 479 Float.MAX_VALUE, Float.MIN_NORMAL, Float.MIN_VALUE 480 }); 481 482 // double (n) -- in particular double x 3 483 checkKeyGetAndSet("android.jpeg.gpsCoordinates", double[].class, 484 new double[] { 485 Double.MAX_VALUE, Double.MIN_NORMAL, Double.MIN_VALUE 486 }); 487 488 // rational (n) -- in particular rational x 9 489 checkKeyGetAndSet("android.sensor.calibrationTransform1", Rational[].class, 490 new Rational[] { 491 new Rational(1, 2), new Rational(3, 4), new Rational(5, 6), 492 new Rational(7, 8), new Rational(9, 10), new Rational(10, 11), 493 new Rational(12, 13), new Rational(14, 15), new Rational(15, 16) 494 }); 495 496 /** 497 * Weirder cases, that don't map 1:1 with the native types 498 */ 499 500 // bool (n) -- with TYPE_BYTE 501 checkKeyGetAndSet("android.control.aeLock", boolean[].class, new boolean[] { 502 true, false, true 503 }); 504 505 // integer (n) -- with TYPE_BYTE 506 checkKeyGetAndSet("android.control.aeAvailableModes", int[].class, new int[] { 507 1, 2, 3, 4 508 }); 509 } 510 511 private enum ColorCorrectionMode { 512 TRANSFORM_MATRIX, 513 FAST, 514 HIGH_QUALITY 515 } 516 517 private enum AeAntibandingMode { 518 OFF, 519 _50HZ, 520 _60HZ, 521 AUTO 522 } 523 524 private enum AvailableFormat { 525 RAW_SENSOR, 526 YV12, 527 YCrCb_420_SP, 528 IMPLEMENTATION_DEFINED, 529 YCbCr_420_888, 530 BLOB 531 } 532 533 @SmallTest testReadWriteEnum()534 public void testReadWriteEnum() { 535 // byte (single) 536 checkKeyGetAndSet("android.colorCorrection.mode", ColorCorrectionMode.class, 537 ColorCorrectionMode.HIGH_QUALITY); 538 539 // byte (single) 540 checkKeyGetAndSet("android.control.aeAntibandingMode", AeAntibandingMode.class, 541 AeAntibandingMode.AUTO); 542 543 // byte (n) 544 checkKeyGetAndSet("android.control.aeAvailableAntibandingModes", 545 AeAntibandingMode[].class, new AeAntibandingMode[] { 546 AeAntibandingMode.OFF, AeAntibandingMode._50HZ, AeAntibandingMode._60HZ, 547 AeAntibandingMode.AUTO 548 }); 549 550 /** 551 * Stranger cases that don't use byte enums 552 */ 553 // int (n) 554 checkKeyGetAndSet("android.scaler.availableFormats", AvailableFormat[].class, 555 new AvailableFormat[] { 556 AvailableFormat.RAW_SENSOR, 557 AvailableFormat.YV12, 558 AvailableFormat.IMPLEMENTATION_DEFINED, 559 AvailableFormat.YCbCr_420_888, 560 AvailableFormat.BLOB 561 }); 562 563 } 564 565 @SmallTest testReadWriteEnumWithCustomValues()566 public void testReadWriteEnumWithCustomValues() { 567 MarshalQueryableEnum.registerEnumValues(AeAntibandingMode.class, new int[] { 568 0, 569 10, 570 20, 571 30 572 }); 573 574 // byte (single) 575 checkKeyGetAndSet("android.control.aeAntibandingMode", AeAntibandingMode.class, 576 AeAntibandingMode.AUTO); 577 578 // byte (n) 579 checkKeyGetAndSet("android.control.aeAvailableAntibandingModes", 580 AeAntibandingMode[].class, new AeAntibandingMode[] { 581 AeAntibandingMode.OFF, AeAntibandingMode._50HZ, AeAntibandingMode._60HZ, 582 AeAntibandingMode.AUTO 583 }); 584 585 byte[] aeAntibandingModeValues = mMetadata.readValues(CameraMetadataNative 586 .getTag("android.control.aeAvailableAntibandingModes")); 587 byte[] expectedValues = new byte[] { 0, 10, 20, 30 }; 588 assertArrayEquals(expectedValues, aeAntibandingModeValues); 589 590 591 /** 592 * Stranger cases that don't use byte enums 593 */ 594 // int (n) 595 MarshalQueryableEnum.registerEnumValues(AvailableFormat.class, new int[] { 596 0x20, 597 0x32315659, 598 0x11, 599 0x22, 600 0x23, 601 0x21, 602 }); 603 604 checkKeyGetAndSet("android.scaler.availableFormats", AvailableFormat[].class, 605 new AvailableFormat[] { 606 AvailableFormat.RAW_SENSOR, 607 AvailableFormat.YV12, 608 AvailableFormat.IMPLEMENTATION_DEFINED, 609 AvailableFormat.YCbCr_420_888, 610 AvailableFormat.BLOB 611 }); 612 613 Key<AvailableFormat[]> availableFormatsKey = 614 new Key<AvailableFormat[]>("android.scaler.availableFormats", 615 AvailableFormat[].class); 616 byte[] availableFormatValues = mMetadata.readValues(CameraMetadataNative 617 .getTag(availableFormatsKey.getName())); 618 619 int[] expectedIntValues = new int[] { 620 0x20, 621 0x32315659, 622 0x22, 623 0x23, 624 0x21 625 }; 626 627 ByteBuffer bf = ByteBuffer.wrap(availableFormatValues).order(ByteOrder.nativeOrder()); 628 629 assertEquals(expectedIntValues.length * 4, availableFormatValues.length); 630 for (int i = 0; i < expectedIntValues.length; ++i) { 631 assertEquals(expectedIntValues[i], bf.getInt()); 632 } 633 } 634 635 @SmallTest testReadWriteSize()636 public void testReadWriteSize() { 637 // int32 x n 638 checkKeyGetAndSet("android.jpeg.thumbnailSize", Size.class, new Size(123, 456)); 639 640 // int32 x 2 x n 641 checkKeyGetAndSet("android.scaler.availableJpegSizes", Size[].class, new Size[] { 642 new Size(123, 456), 643 new Size(0xDEAD, 0xF00D), 644 new Size(0xF00, 0xB00) 645 }); 646 } 647 648 @SmallTest testReadWriteRggbChannelVector()649 public void testReadWriteRggbChannelVector() { 650 // int32 x n 651 checkKeyMarshal("android.colorCorrection.gains", 652 new RggbChannelVector(1.0f, 2.1f, 3.2f, 4.5f), 653 toByteArray(1.0f, 2.1f, 3.2f, 4.5f)); 654 655 // int32 x 2 x n [pretend; actual is not array] 656 checkKeyMarshal("android.colorCorrection.gains", 657 new RggbChannelVector[] { 658 new RggbChannelVector(1.0f, 2.0f, 3.0f, 4.0f), 659 new RggbChannelVector(9.0f, 8.0f, 7.0f, 6.0f), 660 new RggbChannelVector(1.3f, 5.5f, 2.4f, 6.7f), 661 }, toByteArray( 662 1.0f, 2.0f, 3.0f, 4.0f, 663 9.0f, 8.0f, 7.0f, 6.0f, 664 1.3f, 5.5f, 2.4f, 6.7f 665 )); 666 } 667 668 @SmallTest testReadWriteSizeF()669 public void testReadWriteSizeF() { 670 // int32 x n 671 checkKeyMarshal("android.sensor.info.physicalSize", 672 new SizeF(123f, 456f), 673 toByteArray(123f, 456f)); 674 675 // int32 x 2 x n 676 checkKeyMarshal("android.sensor.info.physicalSize", 677 new SizeF[] { 678 new SizeF(123f, 456f), 679 new SizeF(1.234f, 4.567f), 680 new SizeF(999.0f, 555.0f) 681 }, 682 toByteArray( 683 123f, 456f, 684 1.234f, 4.567f, 685 999.0f, 555.0f) 686 ); 687 } 688 689 @SmallTest testReadWriteRectangle()690 public void testReadWriteRectangle() { 691 // int32 x n 692 checkKeyMarshal("android.scaler.cropRegion", 693 // x1, y1, x2, y2 694 new Rect(10, 11, 1280, 1024), 695 // x, y, width, height 696 toByteArray(10, 11, 1280 - 10, 1024 - 11)); 697 698 // int32 x 2 x n [actually not array, but we pretend it is] 699 checkKeyMarshal("android.scaler.cropRegion", new Rect[] { 700 new Rect(110, 111, 11280, 11024), 701 new Rect(210, 111, 21280, 21024), 702 new Rect(310, 111, 31280, 31024) 703 }, toByteArray( 704 110, 111, 11280 - 110, 11024 - 111, 705 210, 111, 21280 - 210, 21024 - 111, 706 310, 111, 31280 - 310, 31024 - 111 707 )); 708 } 709 710 @SmallTest testReadWriteMeteringRectangle()711 public void testReadWriteMeteringRectangle() { 712 // int32 x 5 x area_count [but we pretend it's a single element] 713 checkKeyMarshal("android.control.aeRegions", 714 new MeteringRectangle(/*x*/1, /*y*/2, /*width*/100, /*height*/200, /*weight*/5), 715 /* xmin, ymin, xmax, ymax, weight */ 716 toByteArray(1, 2, 1 + 100, 2 + 200, 5)); 717 718 // int32 x 5 x area_count 719 checkKeyMarshal("android.control.afRegions", 720 new MeteringRectangle[] { 721 new MeteringRectangle(/*x*/5, /*y*/6, /*width*/123, /*height*/456, /*weight*/7), 722 new MeteringRectangle(/*x*/7, /*y*/8, /*width*/456, /*height*/999, /*weight*/6), 723 new MeteringRectangle(/*x*/1, /*y*/2, /*width*/100, /*height*/200, /*weight*/5) 724 }, 725 toByteArray( 726 5, 6, 5 + 123, 6 + 456, 7, 727 7, 8, 7 + 456, 8 + 999, 6, 728 1, 2, 1 + 100, 2 + 200, 5 729 )); 730 } 731 732 @SmallTest testReadWriteHighSpeedVideoConfiguration()733 public void testReadWriteHighSpeedVideoConfiguration() { 734 // int32 x 5 x 1 735 checkKeyMarshal("android.control.availableHighSpeedVideoConfigurations", 736 new HighSpeedVideoConfiguration( 737 /*width*/1000, /*height*/255, /*fpsMin*/30, /*fpsMax*/200, 738 /*batchSizeMax*/8), 739 /* width, height, fpsMin, fpsMax */ 740 toByteArray(1000, 255, 30, 200, 8)); 741 742 // int32 x 5 x 3 743 checkKeyMarshal("android.control.availableHighSpeedVideoConfigurations", 744 new HighSpeedVideoConfiguration[] { 745 new HighSpeedVideoConfiguration( 746 /*width*/1280, /*height*/720, /*fpsMin*/60, /*fpsMax*/120, 747 /*batchSizeMax*/8), 748 new HighSpeedVideoConfiguration( 749 /*width*/123, /*height*/456, /*fpsMin*/1, /*fpsMax*/200, 750 /*batchSizeMax*/4), 751 new HighSpeedVideoConfiguration( 752 /*width*/4096, /*height*/2592, /*fpsMin*/30, /*fpsMax*/60, 753 /*batchSizeMax*/2) 754 }, 755 toByteArray( 756 1280, 720, 60, 120, 8, 757 123, 456, 1, 200, 4, 758 4096, 2592, 30, 60, 2 759 )); 760 } 761 762 @SmallTest testReadWriteColorSpaceTransform()763 public void testReadWriteColorSpaceTransform() { 764 // rational x 3 x 3 765 checkKeyMarshal("android.colorCorrection.transform", 766 new ColorSpaceTransform(new Rational[] { 767 new Rational(1, 2), new Rational(3, 4), new Rational(5, 6), 768 new Rational(7, 8), new Rational(8, 9), new Rational(10, 11), 769 new Rational(1, 5), new Rational(2, 8), new Rational(3, 9), 770 }), 771 toByteArray( 772 1, 2, 3, 4, 5, 6, 773 7, 8, 8, 9, 10, 11, 774 1, 5, 1, 4, 1, 3)); 775 } 776 777 @SmallTest testReadWritePoint()778 public void testReadWritePoint() { 779 // int32 x 2 [actually 'x n' but pretend it's a single value for now] 780 checkKeyMarshal("android.statistics.hotPixelMap", 781 new Point(1, 2), 782 toByteArray(1, 2)); 783 784 // int32 x 2 x samples 785 checkKeyMarshal("android.statistics.hotPixelMap", 786 new Point[] { 787 new Point(1, 2), 788 new Point(3, 4), 789 new Point(5, 6), 790 new Point(7, 8), 791 }, 792 toByteArray( 793 1, 2, 794 3, 4, 795 5, 6, 796 7, 8) 797 ); 798 } 799 800 @SmallTest testReadWritePointF()801 public void testReadWritePointF() { 802 // float x 2 [actually 'x samples' but pretend it's a single value for now] 803 checkKeyMarshal( 804 "android.sensor.profileToneCurve", 805 new PointF(1.0f, 2.0f), 806 toByteArray(1.0f, 2.0f)); 807 808 // float x 2 x samples 809 checkKeyMarshal("android.sensor.profileToneCurve", 810 new PointF[] { 811 new PointF(1.0f, 2.0f), 812 new PointF(3.0f, 4.0f), 813 new PointF(5.0f, 6.0f), 814 new PointF(7.0f, 8.0f), 815 }, 816 toByteArray( 817 1.0f, 2.0f, 818 3.0f, 4.0f, 819 5.0f, 6.0f, 820 7.0f, 8.0f)); 821 } 822 823 @SmallTest testReadWritePair()824 public void testReadWritePair() { 825 // float x 2 826 checkKeyMarshal("android.lens.focusRange", 827 new TypeReference<Pair<Float, Float>>() {{ }}, 828 Pair.create(1.0f / 2.0f, 1.0f / 3.0f), 829 toByteArray(1.0f / 2.0f, 1.0f / 3.0f)); 830 831 // byte, int (fake from TYPE_BYTE) 832 // This takes advantage of the TYPE_BYTE -> int marshaler designed for enums. 833 checkKeyMarshal("android.flash.mode", 834 new TypeReference<Pair<Byte, Integer>>() {{ }}, 835 Pair.create((byte)123, 22), 836 toByteArray((byte)123, (byte)22)); 837 } 838 839 @SmallTest testReadWriteRange()840 public void testReadWriteRange() { 841 // int32 x 2 842 checkKeyMarshal("android.control.aeTargetFpsRange", 843 new TypeReference<Range<Integer>>() {{ }}, 844 Range.create(123, 456), 845 toByteArray(123, 456)); 846 847 // int64 x 2 848 checkKeyMarshal("android.sensor.info.exposureTimeRange", 849 new TypeReference<Range<Long>>() {{ }}, 850 Range.create(123L, 456L), 851 toByteArray(123L, 456L)); 852 } 853 854 @SmallTest testReadWriteStreamConfiguration()855 public void testReadWriteStreamConfiguration() { 856 // int32 x 4 x n 857 checkKeyMarshal("android.scaler.availableStreamConfigurations", 858 new StreamConfiguration[] { 859 new StreamConfiguration(ImageFormat.YUV_420_888, 640, 480, /*input*/false), 860 new StreamConfiguration(ImageFormat.RGB_565, 320, 240, /*input*/true), 861 }, 862 toByteArray( 863 ImageFormat.YUV_420_888, 640, 480, /*input*/0, 864 ImageFormat.RGB_565, 320, 240, /*input*/1) 865 ); 866 } 867 868 @SmallTest testReadWriteStreamConfigurationDuration()869 public void testReadWriteStreamConfigurationDuration() { 870 // Avoid sign extending ints when converting to a long 871 final long MASK_UNSIGNED_INT = 0x00000000ffffffffL; 872 873 // int64 x 4 x n 874 checkKeyMarshal("android.scaler.availableMinFrameDurations", 875 new StreamConfigurationDuration[] { 876 new StreamConfigurationDuration( 877 ImageFormat.YUV_420_888, 640, 480, /*duration*/123L), 878 new StreamConfigurationDuration( 879 ImageFormat.RGB_565, 320, 240, /*duration*/345L), 880 }, 881 toByteArray( 882 ImageFormat.YUV_420_888 & MASK_UNSIGNED_INT, 640L, 480L, /*duration*/123L, 883 ImageFormat.RGB_565 & MASK_UNSIGNED_INT, 320L, 240L, /*duration*/345L) 884 ); 885 } 886 887 888 @SmallTest testReadWriteReprocessFormatsMap()889 public void testReadWriteReprocessFormatsMap() { 890 891 // final int RAW_OPAQUE = 0x24; // TODO: add RAW_OPAQUE to ImageFormat 892 final int RAW16 = ImageFormat.RAW_SENSOR; 893 final int YUV_420_888 = ImageFormat.YUV_420_888; 894 final int BLOB = 0x21; 895 896 // TODO: also test HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED as an output 897 int[] contents = new int[] { 898 YUV_420_888, 3, YUV_420_888, ImageFormat.NV21, BLOB, 899 RAW16, 2, YUV_420_888, BLOB, 900 901 }; 902 903 // int32 x n 904 Key<ReprocessFormatsMap> key = new Key<ReprocessFormatsMap>( 905 "android.scaler.availableInputOutputFormatsMap", ReprocessFormatsMap.class); 906 mMetadata.writeValues(key.getTag(), toByteArray(contents)); 907 908 ReprocessFormatsMap map = mMetadata.get(key); 909 910 /* 911 * Make sure the inputs/outputs were what we expected. 912 * - Use public image format constants here. 913 */ 914 915 int[] expectedInputs = new int[] { 916 YUV_420_888, RAW16 917 }; 918 assertArrayEquals(expectedInputs, map.getInputs()); 919 920 int[] expectedYuvOutputs = new int[] { 921 YUV_420_888, ImageFormat.NV21, ImageFormat.JPEG, 922 }; 923 assertArrayEquals(expectedYuvOutputs, map.getOutputs(ImageFormat.YUV_420_888)); 924 925 int[] expectedRaw16Outputs = new int[] { 926 YUV_420_888, ImageFormat.JPEG, 927 }; 928 assertArrayEquals(expectedRaw16Outputs, map.getOutputs(ImageFormat.RAW_SENSOR)); 929 930 // Finally, do a round-trip check as a sanity 931 checkKeyMarshal( 932 "android.scaler.availableInputOutputFormatsMap", 933 new ReprocessFormatsMap(contents), 934 toByteArray(contents) 935 ); 936 } 937 938 @SmallTest testReadWriteString()939 public void testReadWriteString() { 940 // (byte) string 941 Key<String> gpsProcessingMethodKey = 942 new Key<String>("android.jpeg.gpsProcessingMethod", String.class); 943 944 String helloWorld = new String("HelloWorld"); 945 byte[] helloWorldBytes = new byte[] { 946 'H', 'e', 'l', 'l', 'o', 'W', 'o', 'r', 'l', 'd', '\0' }; 947 948 mMetadata.set(gpsProcessingMethodKey, helloWorld); 949 950 String actual = mMetadata.get(gpsProcessingMethodKey); 951 assertEquals(helloWorld, actual); 952 953 byte[] actualBytes = mMetadata.readValues(getTag(gpsProcessingMethodKey.getName())); 954 assertArrayEquals(helloWorldBytes, actualBytes); 955 956 // Does not yet test as a string[] since we don't support that in native code. 957 958 // (byte) string 959 Key<String[]> gpsProcessingMethodKeyArray = 960 new Key<String[]>("android.jpeg.gpsProcessingMethod", String[].class); 961 962 String[] gpsStrings = new String[] { "HelloWorld", "FooBar", "Shazbot" }; 963 byte[] gpsBytes = new byte[] { 964 'H', 'e', 'l', 'l', 'o', 'W', 'o', 'r', 'l', 'd', '\0', 965 'F', 'o', 'o', 'B', 'a', 'r', '\0', 966 'S', 'h', 'a', 'z', 'b', 'o', 't', '\0'}; 967 968 mMetadata.set(gpsProcessingMethodKeyArray, gpsStrings); 969 970 String[] actualArray = mMetadata.get(gpsProcessingMethodKeyArray); 971 assertArrayEquals(gpsStrings, actualArray); 972 973 byte[] actualBytes2 = mMetadata.readValues(getTag(gpsProcessingMethodKeyArray.getName())); 974 assertArrayEquals(gpsBytes, actualBytes2); 975 } 976 977 @SmallTest testReadWriteOverride()978 public void testReadWriteOverride() { 979 // 980 // android.scaler.availableFormats (int x n array) 981 // 982 int[] availableFormats = new int[] { 983 0x20, // RAW_SENSOR 984 0x32315659, // YV12 985 0x11, // YCrCb_420_SP 986 0x100, // ImageFormat.JPEG 987 0x22, // IMPLEMENTATION_DEFINED 988 0x23, // YCbCr_420_888 989 }; 990 int[] expectedIntValues = new int[] { 991 0x20, // RAW_SENSOR 992 0x32315659, // YV12 993 0x11, // YCrCb_420_SP 994 0x21, // BLOB 995 0x22, // IMPLEMENTATION_DEFINED 996 0x23, // YCbCr_420_888 997 }; 998 int availableFormatTag = CameraMetadataNative.getTag("android.scaler.availableFormats"); 999 1000 Key<int[]> formatKey = CameraCharacteristics.SCALER_AVAILABLE_FORMATS.getNativeKey(); 1001 1002 validateArrayMetadataReadWriteOverride(formatKey, availableFormats, 1003 expectedIntValues, availableFormatTag); 1004 1005 // 1006 // android.statistics.faces (Face x n array) 1007 // 1008 int[] expectedFaceIds = new int[] {1, 2, 3, 4, 5}; 1009 byte[] expectedFaceScores = new byte[] {10, 20, 30, 40, 50}; 1010 int numFaces = expectedFaceIds.length; 1011 Rect[] expectedRects = new Rect[numFaces]; 1012 for (int i = 0; i < numFaces; i++) { 1013 expectedRects[i] = new Rect(i*4 + 1, i * 4 + 2, i * 4 + 3, i * 4 + 4); 1014 } 1015 int[] expectedFaceLM = new int[] { 1016 1, 2, 3, 4, 5, 6, 1017 7, 8, 9, 10, 11, 12, 1018 13, 14, 15, 16, 17, 18, 1019 19, 20, 21, 22, 23, 24, 1020 25, 26, 27, 28, 29, 30, 1021 }; 1022 Point[] expectedFaceLMPoints = new Point[numFaces * 3]; 1023 for (int i = 0; i < numFaces; i++) { 1024 expectedFaceLMPoints[i*3] = new Point(expectedFaceLM[i*6], expectedFaceLM[i*6+1]); 1025 expectedFaceLMPoints[i*3+1] = new Point(expectedFaceLM[i*6+2], expectedFaceLM[i*6+3]); 1026 expectedFaceLMPoints[i*3+2] = new Point(expectedFaceLM[i*6+4], expectedFaceLM[i*6+5]); 1027 } 1028 1029 /** 1030 * Read - FACE_DETECT_MODE == FULL 1031 */ 1032 mMetadata.set(CaptureResult.STATISTICS_FACE_DETECT_MODE, 1033 CaptureResult.STATISTICS_FACE_DETECT_MODE_FULL); 1034 mMetadata.set(CaptureResult.STATISTICS_FACE_IDS, expectedFaceIds); 1035 mMetadata.set(CaptureResult.STATISTICS_FACE_SCORES, expectedFaceScores); 1036 mMetadata.set(CaptureResult.STATISTICS_FACE_RECTANGLES, expectedRects); 1037 mMetadata.set(CaptureResult.STATISTICS_FACE_LANDMARKS, expectedFaceLM); 1038 Face[] resultFaces = mMetadata.get(CaptureResult.STATISTICS_FACES); 1039 assertEquals(numFaces, resultFaces.length); 1040 for (int i = 0; i < numFaces; i++) { 1041 assertEquals(expectedFaceIds[i], resultFaces[i].getId()); 1042 assertEquals(expectedFaceScores[i], resultFaces[i].getScore()); 1043 assertEquals(expectedRects[i], resultFaces[i].getBounds()); 1044 assertEquals(expectedFaceLMPoints[i*3], resultFaces[i].getLeftEyePosition()); 1045 assertEquals(expectedFaceLMPoints[i*3+1], resultFaces[i].getRightEyePosition()); 1046 assertEquals(expectedFaceLMPoints[i*3+2], resultFaces[i].getMouthPosition()); 1047 } 1048 1049 /** 1050 * Read - FACE_DETECT_MODE == SIMPLE 1051 */ 1052 mMetadata.set(CaptureResult.STATISTICS_FACE_DETECT_MODE, 1053 CaptureResult.STATISTICS_FACE_DETECT_MODE_SIMPLE); 1054 mMetadata.set(CaptureResult.STATISTICS_FACE_SCORES, expectedFaceScores); 1055 mMetadata.set(CaptureResult.STATISTICS_FACE_RECTANGLES, expectedRects); 1056 Face[] resultSimpleFaces = mMetadata.get(CaptureResult.STATISTICS_FACES); 1057 assertEquals(numFaces, resultSimpleFaces.length); 1058 for (int i = 0; i < numFaces; i++) { 1059 assertEquals(Face.ID_UNSUPPORTED, resultSimpleFaces[i].getId()); 1060 assertEquals(expectedFaceScores[i], resultSimpleFaces[i].getScore()); 1061 assertEquals(expectedRects[i], resultSimpleFaces[i].getBounds()); 1062 assertNull(resultSimpleFaces[i].getLeftEyePosition()); 1063 assertNull(resultSimpleFaces[i].getRightEyePosition()); 1064 assertNull(resultSimpleFaces[i].getMouthPosition()); 1065 } 1066 1067 /** 1068 * Read/Write TonemapCurve 1069 */ 1070 float[] red = new float[] {0.0f, 0.0f, 1.0f, 1.0f}; 1071 float[] green = new float[] {0.0f, 1.0f, 1.0f, 0.0f}; 1072 float[] blue = new float[] { 1073 0.0000f, 0.0000f, 0.0667f, 0.2920f, 0.1333f, 0.4002f, 0.2000f, 0.4812f, 1074 0.2667f, 0.5484f, 0.3333f, 0.6069f, 0.4000f, 0.6594f, 0.4667f, 0.7072f, 1075 0.5333f, 0.7515f, 0.6000f, 0.7928f, 0.6667f, 0.8317f, 0.7333f, 0.8685f, 1076 0.8000f, 0.9035f, 0.8667f, 0.9370f, 0.9333f, 0.9691f, 1.0000f, 1.0000f}; 1077 TonemapCurve tcIn = new TonemapCurve(red, green, blue); 1078 mMetadata.set(CaptureResult.TONEMAP_CURVE, tcIn); 1079 float[] redOut = mMetadata.get(CaptureResult.TONEMAP_CURVE_RED); 1080 float[] greenOut = mMetadata.get(CaptureResult.TONEMAP_CURVE_GREEN); 1081 float[] blueOut = mMetadata.get(CaptureResult.TONEMAP_CURVE_BLUE); 1082 assertArrayEquals(red, redOut); 1083 assertArrayEquals(green, greenOut); 1084 assertArrayEquals(blue, blueOut); 1085 TonemapCurve tcOut = mMetadata.get(CaptureResult.TONEMAP_CURVE); 1086 assertEquals(tcIn, tcOut); 1087 mMetadata.set(CaptureResult.TONEMAP_CURVE_GREEN, null); 1088 // If any of channel has null curve, return a null TonemapCurve 1089 assertNull(mMetadata.get(CaptureResult.TONEMAP_CURVE)); 1090 } 1091 1092 /** 1093 * Set the raw native value of the available stream configurations; ensure that 1094 * the read-out managed value is consistent with what we write in. 1095 */ 1096 @SmallTest testOverrideStreamConfigurationMap()1097 public void testOverrideStreamConfigurationMap() { 1098 1099 /* 1100 * First, write all the raw values: 1101 * - availableStreamConfigurations 1102 * - availableMinFrameDurations 1103 * - availableStallDurations 1104 * 1105 * Then, read this out as a synthetic multi-key 'streamConfigurationMap' 1106 * 1107 * Finally, validate that the map was unmarshaled correctly 1108 * and is converting the internal formats to public formats properly. 1109 */ 1110 1111 // 1112 // android.scaler.availableStreamConfigurations (int x n x 4 array) 1113 // 1114 final int OUTPUT = 0; 1115 final int INPUT = 1; 1116 int[] rawAvailableStreamConfigs = new int[] { 1117 0x20, 3280, 2464, OUTPUT, // RAW16 1118 0x23, 3264, 2448, OUTPUT, // YCbCr_420_888 1119 0x23, 3200, 2400, OUTPUT, // YCbCr_420_888 1120 0x21, 3264, 2448, OUTPUT, // BLOB 1121 0x21, 3200, 2400, OUTPUT, // BLOB 1122 0x21, 2592, 1944, OUTPUT, // BLOB 1123 0x21, 2048, 1536, OUTPUT, // BLOB 1124 0x21, 1920, 1080, OUTPUT, // BLOB 1125 0x22, 640, 480, OUTPUT, // IMPLEMENTATION_DEFINED 1126 0x20, 320, 240, INPUT, // RAW16 1127 }; 1128 Key<StreamConfiguration[]> configKey = 1129 CameraCharacteristics.SCALER_AVAILABLE_STREAM_CONFIGURATIONS.getNativeKey(); 1130 mMetadata.writeValues(configKey.getTag(), 1131 toByteArray(rawAvailableStreamConfigs)); 1132 1133 // 1134 // android.scaler.availableMinFrameDurations (int x n x 4 array) 1135 // 1136 long[] expectedAvailableMinDurations = new long[] { 1137 0x20, 3280, 2464, 33333331, // RAW16 1138 0x23, 3264, 2448, 33333332, // YCbCr_420_888 1139 0x23, 3200, 2400, 33333333, // YCbCr_420_888 1140 0x100, 3264, 2448, 33333334, // ImageFormat.JPEG 1141 0x100, 3200, 2400, 33333335, // ImageFormat.JPEG 1142 0x100, 2592, 1944, 33333336, // ImageFormat.JPEG 1143 0x100, 2048, 1536, 33333337, // ImageFormat.JPEG 1144 0x100, 1920, 1080, 33333338 // ImageFormat.JPEG 1145 }; 1146 long[] rawAvailableMinDurations = new long[] { 1147 0x20, 3280, 2464, 33333331, // RAW16 1148 0x23, 3264, 2448, 33333332, // YCbCr_420_888 1149 0x23, 3200, 2400, 33333333, // YCbCr_420_888 1150 0x21, 3264, 2448, 33333334, // BLOB 1151 0x21, 3200, 2400, 33333335, // BLOB 1152 0x21, 2592, 1944, 33333336, // BLOB 1153 0x21, 2048, 1536, 33333337, // BLOB 1154 0x21, 1920, 1080, 33333338 // BLOB 1155 }; 1156 Key<StreamConfigurationDuration[]> durationKey = 1157 CameraCharacteristics.SCALER_AVAILABLE_MIN_FRAME_DURATIONS.getNativeKey(); 1158 mMetadata.writeValues(durationKey.getTag(), 1159 toByteArray(rawAvailableMinDurations)); 1160 1161 // 1162 // android.scaler.availableStallDurations (int x n x 4 array) 1163 // 1164 long[] expectedAvailableStallDurations = new long[] { 1165 0x20, 3280, 2464, 0, // RAW16 1166 0x23, 3264, 2448, 0, // YCbCr_420_888 1167 0x23, 3200, 2400, 0, // YCbCr_420_888 1168 0x100, 3264, 2448, 33333334, // ImageFormat.JPEG 1169 0x100, 3200, 2400, 33333335, // ImageFormat.JPEG 1170 0x100, 2592, 1944, 33333336, // ImageFormat.JPEG 1171 0x100, 2048, 1536, 33333337, // ImageFormat.JPEG 1172 0x100, 1920, 1080, 33333338 // ImageFormat.JPEG 1173 }; 1174 // Note: RAW16 and YUV_420_888 omitted intentionally; omitted values should default to 0 1175 long[] rawAvailableStallDurations = new long[] { 1176 0x21, 3264, 2448, 33333334, // BLOB 1177 0x21, 3200, 2400, 33333335, // BLOB 1178 0x21, 2592, 1944, 33333336, // BLOB 1179 0x21, 2048, 1536, 33333337, // BLOB 1180 0x21, 1920, 1080, 33333338 // BLOB 1181 }; 1182 Key<StreamConfigurationDuration[]> stallDurationKey = 1183 CameraCharacteristics.SCALER_AVAILABLE_STALL_DURATIONS.getNativeKey(); 1184 mMetadata.writeValues(stallDurationKey.getTag(), 1185 toByteArray(rawAvailableStallDurations)); 1186 1187 // 1188 // android.scaler.streamConfigurationMap (synthetic as StreamConfigurationMap) 1189 // 1190 StreamConfigurationMap streamConfigMap = mMetadata.get( 1191 CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP); 1192 1193 // Inputs 1194 checkStreamConfigurationMapByFormatSize( 1195 streamConfigMap, ImageFormat.RAW_SENSOR, 320, 240, /*output*/false); 1196 1197 // Outputs 1198 checkStreamConfigurationMapByFormatSize( 1199 streamConfigMap, HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, 640, 480, /*output*/true); 1200 checkStreamConfigurationMapByFormatSize( 1201 streamConfigMap, ImageFormat.JPEG, 1920, 1080, /*output*/true); 1202 checkStreamConfigurationMapByFormatSize( 1203 streamConfigMap, ImageFormat.JPEG, 2048, 1536, /*output*/true); 1204 checkStreamConfigurationMapByFormatSize( 1205 streamConfigMap, ImageFormat.JPEG, 2592, 1944, /*output*/true); 1206 checkStreamConfigurationMapByFormatSize( 1207 streamConfigMap, ImageFormat.JPEG, 3200, 2400, /*output*/true); 1208 checkStreamConfigurationMapByFormatSize( 1209 streamConfigMap, ImageFormat.YUV_420_888, 3200, 2400, /*output*/true); 1210 checkStreamConfigurationMapByFormatSize( 1211 streamConfigMap, ImageFormat.YUV_420_888, 3264, 2448, /*output*/true); 1212 checkStreamConfigurationMapByFormatSize( 1213 streamConfigMap, ImageFormat.RAW_SENSOR, 3280, 2464, /*output*/true); 1214 1215 // Min Frame Durations 1216 1217 final int DURATION_TUPLE_SIZE = 4; 1218 for (int i = 0; i < expectedAvailableMinDurations.length; i += DURATION_TUPLE_SIZE) { 1219 checkStreamConfigurationMapDurationByFormatSize( 1220 streamConfigMap, 1221 (int)expectedAvailableMinDurations[i], 1222 (int)expectedAvailableMinDurations[i+1], 1223 (int)expectedAvailableMinDurations[i+2], 1224 Duration.MinFrame, 1225 expectedAvailableMinDurations[i+3]); 1226 } 1227 1228 // Stall Frame Durations 1229 1230 for (int i = 0; i < expectedAvailableStallDurations.length; i += DURATION_TUPLE_SIZE) { 1231 checkStreamConfigurationMapDurationByFormatSize( 1232 streamConfigMap, 1233 (int)expectedAvailableStallDurations[i], 1234 (int)expectedAvailableStallDurations[i+1], 1235 (int)expectedAvailableStallDurations[i+2], 1236 Duration.Stall, 1237 expectedAvailableStallDurations[i+3]); 1238 } 1239 } 1240 assertKeyValueEquals(T expected, CameraCharacteristics.Key<T> key)1241 private <T> void assertKeyValueEquals(T expected, CameraCharacteristics.Key<T> key) { 1242 assertKeyValueEquals(expected, key.getNativeKey()); 1243 } 1244 assertKeyValueEquals(T expected, Key<T> key)1245 private <T> void assertKeyValueEquals(T expected, Key<T> key) { 1246 T actual = mMetadata.get(key); 1247 1248 assertEquals("Expected value for key " + key + " to match", expected, actual); 1249 } 1250 1251 @SmallTest testOverrideMaxRegions()1252 public void testOverrideMaxRegions() { 1253 // All keys are null before doing any writes. 1254 assertKeyValueEquals(null, CameraCharacteristics.CONTROL_MAX_REGIONS_AE); 1255 assertKeyValueEquals(null, CameraCharacteristics.CONTROL_MAX_REGIONS_AWB); 1256 assertKeyValueEquals(null, CameraCharacteristics.CONTROL_MAX_REGIONS_AF); 1257 1258 mMetadata.set(CameraCharacteristics.CONTROL_MAX_REGIONS, 1259 new int[] { /*AE*/1, /*AWB*/2, /*AF*/3 }); 1260 1261 // All keys are the expected value after doing a write 1262 assertKeyValueEquals(1, CameraCharacteristics.CONTROL_MAX_REGIONS_AE); 1263 assertKeyValueEquals(2, CameraCharacteristics.CONTROL_MAX_REGIONS_AWB); 1264 assertKeyValueEquals(3, CameraCharacteristics.CONTROL_MAX_REGIONS_AF); 1265 } 1266 1267 @SmallTest testOverrideMaxNumOutputStreams()1268 public void testOverrideMaxNumOutputStreams() { 1269 // All keys are null before doing any writes. 1270 assertKeyValueEquals(null, CameraCharacteristics.REQUEST_MAX_NUM_OUTPUT_RAW); 1271 assertKeyValueEquals(null, CameraCharacteristics.REQUEST_MAX_NUM_OUTPUT_PROC); 1272 assertKeyValueEquals(null, CameraCharacteristics.REQUEST_MAX_NUM_OUTPUT_PROC_STALLING); 1273 1274 mMetadata.set(CameraCharacteristics.REQUEST_MAX_NUM_OUTPUT_STREAMS, 1275 new int[] { /*AE*/1, /*AWB*/2, /*AF*/3 }); 1276 1277 // All keys are the expected value after doing a write 1278 assertKeyValueEquals(1, CameraCharacteristics.REQUEST_MAX_NUM_OUTPUT_RAW); 1279 assertKeyValueEquals(2, CameraCharacteristics.REQUEST_MAX_NUM_OUTPUT_PROC); 1280 assertKeyValueEquals(3, CameraCharacteristics.REQUEST_MAX_NUM_OUTPUT_PROC_STALLING); 1281 } 1282 1283 @SmallTest testCaptureResult()1284 public void testCaptureResult() { 1285 mMetadata.set(CaptureRequest.CONTROL_AE_MODE, 1286 CameraMetadata.CONTROL_AE_MODE_ON_AUTO_FLASH); 1287 1288 if (VERBOSE) mMetadata.dumpToLog(); 1289 1290 CaptureResult captureResult = new CaptureResult(mMetadata, /*sequenceId*/0); 1291 1292 List<CaptureResult.Key<?>> allKeys = captureResult.getKeys(); 1293 if (VERBOSE) Log.v(TAG, "testCaptureResult: key list size " + allKeys); 1294 for (CaptureResult.Key<?> key : captureResult.getKeys()) { 1295 if (VERBOSE) { 1296 Log.v(TAG, 1297 "testCaptureResult: key " + key + " value" + captureResult.get(key)); 1298 } 1299 } 1300 1301 assertTrue(allKeys.size() >= 1); // FIXME: android.statistics.faces counts as a key 1302 assertTrue(allKeys.contains(CaptureResult.CONTROL_AE_MODE)); 1303 1304 assertEquals(CameraMetadata.CONTROL_AE_MODE_ON_AUTO_FLASH, 1305 (int)captureResult.get(CaptureResult.CONTROL_AE_MODE)); 1306 } 1307 checkStreamConfigurationMapByFormatSize(StreamConfigurationMap configMap, int format, int width, int height, boolean output)1308 private static void checkStreamConfigurationMapByFormatSize(StreamConfigurationMap configMap, 1309 int format, int width, int height, 1310 boolean output) { 1311 1312 /** arbitrary class for which StreamConfigurationMap#isOutputSupportedFor(Class) is true */ 1313 final Class<?> IMPLEMENTATION_DEFINED_OUTPUT_CLASS = SurfaceTexture.class; 1314 1315 android.util.Size[] sizes; 1316 int[] formats; 1317 1318 if (output) { 1319 if (format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED) { 1320 sizes = configMap.getOutputSizes(IMPLEMENTATION_DEFINED_OUTPUT_CLASS); 1321 // in this case the 'is output format supported' is vacuously true 1322 formats = new int[] { HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED }; 1323 } else { 1324 sizes = configMap.getOutputSizes(format); 1325 formats = configMap.getOutputFormats(); 1326 assertTrue("Format must be supported by stream configuration map", 1327 configMap.isOutputSupportedFor(format)); 1328 } 1329 } else { 1330 // NOTE: No function to do input sizes from IMPL_DEFINED, so it would just fail for that 1331 sizes = configMap.getInputSizes(format); 1332 formats = configMap.getInputFormats(); 1333 } 1334 1335 android.util.Size expectedSize = new android.util.Size(width, height); 1336 1337 assertArrayContains(format, formats); 1338 assertArrayContains(expectedSize, sizes); 1339 } 1340 1341 private enum Duration { 1342 MinFrame, 1343 Stall 1344 } 1345 checkStreamConfigurationMapDurationByFormatSize( StreamConfigurationMap configMap, int format, int width, int height, Duration durationKind, long expectedDuration)1346 private static void checkStreamConfigurationMapDurationByFormatSize( 1347 StreamConfigurationMap configMap, 1348 int format, int width, int height, Duration durationKind, long expectedDuration) { 1349 1350 /** arbitrary class for which StreamConfigurationMap#isOutputSupportedFor(Class) is true */ 1351 final Class<?> IMPLEMENTATION_DEFINED_OUTPUT_CLASS = SurfaceTexture.class; 1352 1353 long actualDuration; 1354 1355 android.util.Size size = new android.util.Size(width, height); 1356 switch (durationKind) { 1357 case MinFrame: 1358 if (format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED) { 1359 actualDuration = configMap.getOutputMinFrameDuration( 1360 IMPLEMENTATION_DEFINED_OUTPUT_CLASS, size); 1361 } else { 1362 actualDuration = configMap.getOutputMinFrameDuration(format, size); 1363 } 1364 1365 break; 1366 case Stall: 1367 if (format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED) { 1368 actualDuration = configMap.getOutputStallDuration( 1369 IMPLEMENTATION_DEFINED_OUTPUT_CLASS, size); 1370 } else { 1371 actualDuration = configMap.getOutputStallDuration(format, size); 1372 } 1373 1374 break; 1375 default: 1376 throw new AssertionError(); 1377 } 1378 1379 assertEquals("Expected " + durationKind + " to match actual value", expectedDuration, 1380 actualDuration); 1381 } 1382 1383 /** 1384 * Validate metadata array tag read/write override. 1385 * 1386 * <p>Only support long and int array for now, can be easily extend to support other 1387 * primitive arrays.</p> 1388 */ validateArrayMetadataReadWriteOverride(Key<T> key, T expectedWriteValues, T expectedReadValues, int tag)1389 private <T> void validateArrayMetadataReadWriteOverride(Key<T> key, T expectedWriteValues, 1390 T expectedReadValues, int tag) { 1391 Class<?> type = expectedWriteValues.getClass(); 1392 if (!type.isArray()) { 1393 throw new IllegalArgumentException("This function expects an key with array type"); 1394 } else if (type != int[].class && type != long[].class) { 1395 throw new IllegalArgumentException("This function expects long or int array values"); 1396 } 1397 1398 // Write 1399 mMetadata.set(key, expectedWriteValues); 1400 1401 byte[] readOutValues = mMetadata.readValues(tag); 1402 1403 ByteBuffer bf = ByteBuffer.wrap(readOutValues).order(ByteOrder.nativeOrder()); 1404 1405 int readValuesLength = Array.getLength(expectedReadValues); 1406 int readValuesNumBytes = readValuesLength * 4; 1407 if (type == long[].class) { 1408 readValuesNumBytes = readValuesLength * 8; 1409 } 1410 1411 assertEquals(readValuesNumBytes, readOutValues.length); 1412 for (int i = 0; i < readValuesLength; ++i) { 1413 if (type == int[].class) { 1414 assertEquals(Array.getInt(expectedReadValues, i), bf.getInt()); 1415 } else if (type == long[].class) { 1416 assertEquals(Array.getLong(expectedReadValues, i), bf.getLong()); 1417 } 1418 } 1419 1420 // Read 1421 byte[] readOutValuesAsByteArray = new byte[readValuesNumBytes]; 1422 ByteBuffer readOutValuesByteBuffer = 1423 ByteBuffer.wrap(readOutValuesAsByteArray).order(ByteOrder.nativeOrder()); 1424 for (int i = 0; i < readValuesLength; ++i) { 1425 if (type == int[].class) { 1426 readOutValuesByteBuffer.putInt(Array.getInt(expectedReadValues, i)); 1427 } else if (type == long[].class) { 1428 readOutValuesByteBuffer.putLong(Array.getLong(expectedReadValues, i)); 1429 } 1430 } 1431 mMetadata.writeValues(tag, readOutValuesAsByteArray); 1432 1433 T result = mMetadata.get(key); 1434 assertNotNull(key.getName() + " result shouldn't be null", result); 1435 assertArrayEquals(expectedWriteValues, result); 1436 } 1437 1438 // TODO: move somewhere else 1439 @SmallTest testToByteArray()1440 public void testToByteArray() { 1441 assertArrayEquals(new byte[] { 5, 0, 0, 0, 6, 0, 0, 0 }, 1442 toByteArray(5, 6)); 1443 assertArrayEquals(new byte[] { 5, 0, 6, 0, }, 1444 toByteArray((short)5, (short)6)); 1445 assertArrayEquals(new byte[] { (byte)0xFF, (byte)0xFF, (byte)0xFF, (byte)0xFF, 1446 (byte)0xFF, (byte)0xFF, (byte)0xFF, (byte)0xFF,}, 1447 toByteArray(~0, ~0)); 1448 1449 assertArrayEquals(new byte[] { (byte)0xAB, (byte)0xFF, 0, 0, 1450 0x0D, (byte)0xF0, (byte)0xAD, (byte)0xDE }, 1451 toByteArray(0xFFAB, 0xDEADF00D)); 1452 } 1453 } 1454