1<?xml version="1.0" encoding="utf-8"?> 2<!-- Copyright (C) 2012 The Android Open Source Project 3 4 Licensed under the Apache License, Version 2.0 (the "License"); 5 you may not use this file except in compliance with the License. 6 You may obtain a copy of the License at 7 8 http://www.apache.org/licenses/LICENSE-2.0 9 10 Unless required by applicable law or agreed to in writing, software 11 distributed under the License is distributed on an "AS IS" BASIS, 12 WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 See the License for the specific language governing permissions and 14 limitations under the License. 15--> 16<metadata xmlns="http://schemas.android.com/service/camera/metadata/" 17xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" 18xsi:schemaLocation="http://schemas.android.com/service/camera/metadata/ metadata_definitions.xsd"> 19 20 <tags> 21 <tag id="BC"> 22 Needed for backwards compatibility with old Java API 23 </tag> 24 <tag id="V1"> 25 New features for first camera 2 release (API1) 26 </tag> 27 <tag id="RAW"> 28 Needed for useful RAW image processing and DNG file support 29 </tag> 30 <tag id="HAL2"> 31 Entry is only used by camera device legacy HAL 2.x 32 </tag> 33 <tag id="FULL"> 34 Entry is required for full hardware level devices, and optional for other hardware levels 35 </tag> 36 <tag id="DEPTH"> 37 Entry is required for the depth capability. 38 </tag> 39 <tag id="REPROC"> 40 Entry is required for the YUV or PRIVATE reprocessing capability. 41 </tag> 42 <tag id="LOGICALCAMERA"> 43 Entry is required for logical multi-camera capability. 44 </tag> 45 <tag id="HEIC"> 46 Entry is required for devices with HEIC (High Efficiency Image Format) support. 47 </tag> 48 <tag id="FUTURE"> 49 Entry is under-specified and is not required for now. This is for book-keeping purpose, 50 do not implement or use it, it may be revised for future. 51 </tag> 52 </tags> 53 54 <types> 55 <typedef name="pairFloatFloat"> 56 <language name="java">android.util.Pair<Float,Float></language> 57 </typedef> 58 <typedef name="pairDoubleDouble"> 59 <language name="java">android.util.Pair<Double,Double></language> 60 </typedef> 61 <typedef name="rectangle"> 62 <language name="java">android.graphics.Rect</language> 63 </typedef> 64 <typedef name="size"> 65 <language name="java">android.util.Size</language> 66 </typedef> 67 <typedef name="string"> 68 <language name="java">String</language> 69 </typedef> 70 <typedef name="boolean"> 71 <language name="java">boolean</language> 72 </typedef> 73 <typedef name="imageFormat"> 74 <language name="java">int</language> 75 </typedef> 76 <typedef name="streamConfigurationMap"> 77 <language name="java">android.hardware.camera2.params.StreamConfigurationMap</language> 78 </typedef> 79 <typedef name="streamConfiguration"> 80 <language name="java">android.hardware.camera2.params.StreamConfiguration</language> 81 </typedef> 82 <typedef name="recommendedStreamConfiguration"> 83 <language 84 name="java">android.hardware.camera2.params.RecommendedStreamConfiguration</language> 85 </typedef> 86 <typedef name="streamConfigurationDuration"> 87 <language name="java">android.hardware.camera2.params.StreamConfigurationDuration</language> 88 </typedef> 89 <typedef name="face"> 90 <language name="java">android.hardware.camera2.params.Face</language> 91 </typedef> 92 <typedef name="meteringRectangle"> 93 <language name="java">android.hardware.camera2.params.MeteringRectangle</language> 94 </typedef> 95 <typedef name="rangeFloat"> 96 <language name="java">android.util.Range<Float></language> 97 </typedef> 98 <typedef name="rangeInt"> 99 <language name="java">android.util.Range<Integer></language> 100 </typedef> 101 <typedef name="rangeLong"> 102 <language name="java">android.util.Range<Long></language> 103 </typedef> 104 <typedef name="colorSpaceTransform"> 105 <language name="java">android.hardware.camera2.params.ColorSpaceTransform</language> 106 </typedef> 107 <typedef name="rggbChannelVector"> 108 <language name="java">android.hardware.camera2.params.RggbChannelVector</language> 109 </typedef> 110 <typedef name="blackLevelPattern"> 111 <language name="java">android.hardware.camera2.params.BlackLevelPattern</language> 112 </typedef> 113 <typedef name="enumList"> 114 <language name="java">int</language> 115 </typedef> 116 <typedef name="sizeF"> 117 <language name="java">android.util.SizeF</language> 118 </typedef> 119 <typedef name="point"> 120 <language name="java">android.graphics.Point</language> 121 </typedef> 122 <typedef name="tonemapCurve"> 123 <language name="java">android.hardware.camera2.params.TonemapCurve</language> 124 </typedef> 125 <typedef name="lensShadingMap"> 126 <language name="java">android.hardware.camera2.params.LensShadingMap</language> 127 </typedef> 128 <typedef name="location"> 129 <language name="java">android.location.Location</language> 130 </typedef> 131 <typedef name="highSpeedVideoConfiguration"> 132 <language name="java">android.hardware.camera2.params.HighSpeedVideoConfiguration</language> 133 </typedef> 134 <typedef name="reprocessFormatsMap"> 135 <language name="java">android.hardware.camera2.params.ReprocessFormatsMap</language> 136 </typedef> 137 <typedef name="oisSample"> 138 <language name="java">android.hardware.camera2.params.OisSample</language> 139 </typedef> 140 <typedef name="mandatoryStreamCombination"> 141 <language name="java">android.hardware.camera2.params.MandatoryStreamCombination</language> 142 </typedef> 143 </types> 144 145 <namespace name="android"> 146 <section name="colorCorrection"> 147 <controls> 148 <entry name="mode" type="byte" visibility="public" enum="true" hwlevel="full"> 149 <enum> 150 <value>TRANSFORM_MATRIX 151 <notes>Use the android.colorCorrection.transform matrix 152 and android.colorCorrection.gains to do color conversion. 153 154 All advanced white balance adjustments (not specified 155 by our white balance pipeline) must be disabled. 156 157 If AWB is enabled with `android.control.awbMode != OFF`, then 158 TRANSFORM_MATRIX is ignored. The camera device will override 159 this value to either FAST or HIGH_QUALITY. 160 </notes> 161 </value> 162 <value>FAST 163 <notes>Color correction processing must not slow down 164 capture rate relative to sensor raw output. 165 166 Advanced white balance adjustments above and beyond 167 the specified white balance pipeline may be applied. 168 169 If AWB is enabled with `android.control.awbMode != OFF`, then 170 the camera device uses the last frame's AWB values 171 (or defaults if AWB has never been run). 172 </notes> 173 </value> 174 <value>HIGH_QUALITY 175 <notes>Color correction processing operates at improved 176 quality but the capture rate might be reduced (relative to sensor 177 raw output rate) 178 179 Advanced white balance adjustments above and beyond 180 the specified white balance pipeline may be applied. 181 182 If AWB is enabled with `android.control.awbMode != OFF`, then 183 the camera device uses the last frame's AWB values 184 (or defaults if AWB has never been run). 185 </notes> 186 </value> 187 </enum> 188 189 <description> 190 The mode control selects how the image data is converted from the 191 sensor's native color into linear sRGB color. 192 </description> 193 <details> 194 When auto-white balance (AWB) is enabled with android.control.awbMode, this 195 control is overridden by the AWB routine. When AWB is disabled, the 196 application controls how the color mapping is performed. 197 198 We define the expected processing pipeline below. For consistency 199 across devices, this is always the case with TRANSFORM_MATRIX. 200 201 When either FULL or HIGH_QUALITY is used, the camera device may 202 do additional processing but android.colorCorrection.gains and 203 android.colorCorrection.transform will still be provided by the 204 camera device (in the results) and be roughly correct. 205 206 Switching to TRANSFORM_MATRIX and using the data provided from 207 FAST or HIGH_QUALITY will yield a picture with the same white point 208 as what was produced by the camera device in the earlier frame. 209 210 The expected processing pipeline is as follows: 211 212 ![White balance processing pipeline](android.colorCorrection.mode/processing_pipeline.png) 213 214 The white balance is encoded by two values, a 4-channel white-balance 215 gain vector (applied in the Bayer domain), and a 3x3 color transform 216 matrix (applied after demosaic). 217 218 The 4-channel white-balance gains are defined as: 219 220 android.colorCorrection.gains = [ R G_even G_odd B ] 221 222 where `G_even` is the gain for green pixels on even rows of the 223 output, and `G_odd` is the gain for green pixels on the odd rows. 224 These may be identical for a given camera device implementation; if 225 the camera device does not support a separate gain for even/odd green 226 channels, it will use the `G_even` value, and write `G_odd` equal to 227 `G_even` in the output result metadata. 228 229 The matrices for color transforms are defined as a 9-entry vector: 230 231 android.colorCorrection.transform = [ I0 I1 I2 I3 I4 I5 I6 I7 I8 ] 232 233 which define a transform from input sensor colors, `P_in = [ r g b ]`, 234 to output linear sRGB, `P_out = [ r' g' b' ]`, 235 236 with colors as follows: 237 238 r' = I0r + I1g + I2b 239 g' = I3r + I4g + I5b 240 b' = I6r + I7g + I8b 241 242 Both the input and output value ranges must match. Overflow/underflow 243 values are clipped to fit within the range. 244 </details> 245 <hal_details> 246 HAL must support both FAST and HIGH_QUALITY if color correction control is available 247 on the camera device, but the underlying implementation can be the same for both modes. 248 That is, if the highest quality implementation on the camera device does not slow down 249 capture rate, then FAST and HIGH_QUALITY should generate the same output. 250 </hal_details> 251 </entry> 252 <entry name="transform" type="rational" visibility="public" 253 type_notes="3x3 rational matrix in row-major order" 254 container="array" typedef="colorSpaceTransform" hwlevel="full"> 255 <array> 256 <size>3</size> 257 <size>3</size> 258 </array> 259 <description>A color transform matrix to use to transform 260 from sensor RGB color space to output linear sRGB color space. 261 </description> 262 <units>Unitless scale factors</units> 263 <details>This matrix is either set by the camera device when the request 264 android.colorCorrection.mode is not TRANSFORM_MATRIX, or 265 directly by the application in the request when the 266 android.colorCorrection.mode is TRANSFORM_MATRIX. 267 268 In the latter case, the camera device may round the matrix to account 269 for precision issues; the final rounded matrix should be reported back 270 in this matrix result metadata. The transform should keep the magnitude 271 of the output color values within `[0, 1.0]` (assuming input color 272 values is within the normalized range `[0, 1.0]`), or clipping may occur. 273 274 The valid range of each matrix element varies on different devices, but 275 values within [-1.5, 3.0] are guaranteed not to be clipped. 276 </details> 277 </entry> 278 <entry name="gains" type="float" visibility="public" 279 type_notes="A 1D array of floats for 4 color channel gains" 280 container="array" typedef="rggbChannelVector" hwlevel="full"> 281 <array> 282 <size>4</size> 283 </array> 284 <description>Gains applying to Bayer raw color channels for 285 white-balance.</description> 286 <units>Unitless gain factors</units> 287 <details> 288 These per-channel gains are either set by the camera device 289 when the request android.colorCorrection.mode is not 290 TRANSFORM_MATRIX, or directly by the application in the 291 request when the android.colorCorrection.mode is 292 TRANSFORM_MATRIX. 293 294 The gains in the result metadata are the gains actually 295 applied by the camera device to the current frame. 296 297 The valid range of gains varies on different devices, but gains 298 between [1.0, 3.0] are guaranteed not to be clipped. Even if a given 299 device allows gains below 1.0, this is usually not recommended because 300 this can create color artifacts. 301 </details> 302 <hal_details> 303 The 4-channel white-balance gains are defined in 304 the order of `[R G_even G_odd B]`, where `G_even` is the gain 305 for green pixels on even rows of the output, and `G_odd` 306 is the gain for green pixels on the odd rows. 307 308 If a HAL does not support a separate gain for even/odd green 309 channels, it must use the `G_even` value, and write 310 `G_odd` equal to `G_even` in the output result metadata. 311 </hal_details> 312 </entry> 313 <entry name="aberrationMode" type="byte" visibility="public" enum="true" hwlevel="legacy"> 314 <enum> 315 <value>OFF 316 <notes> 317 No aberration correction is applied. 318 </notes> 319 </value> 320 <value>FAST 321 <notes> 322 Aberration correction will not slow down capture rate 323 relative to sensor raw output. 324 </notes> 325 </value> 326 <value>HIGH_QUALITY 327 <notes> 328 Aberration correction operates at improved quality but the capture rate might be 329 reduced (relative to sensor raw output rate) 330 </notes> 331 </value> 332 </enum> 333 <description> 334 Mode of operation for the chromatic aberration correction algorithm. 335 </description> 336 <range>android.colorCorrection.availableAberrationModes</range> 337 <details> 338 Chromatic (color) aberration is caused by the fact that different wavelengths of light 339 can not focus on the same point after exiting from the lens. This metadata defines 340 the high level control of chromatic aberration correction algorithm, which aims to 341 minimize the chromatic artifacts that may occur along the object boundaries in an 342 image. 343 344 FAST/HIGH_QUALITY both mean that camera device determined aberration 345 correction will be applied. HIGH_QUALITY mode indicates that the camera device will 346 use the highest-quality aberration correction algorithms, even if it slows down 347 capture rate. FAST means the camera device will not slow down capture rate when 348 applying aberration correction. 349 350 LEGACY devices will always be in FAST mode. 351 </details> 352 </entry> 353 </controls> 354 <dynamic> 355 <clone entry="android.colorCorrection.mode" kind="controls"> 356 </clone> 357 <clone entry="android.colorCorrection.transform" kind="controls"> 358 </clone> 359 <clone entry="android.colorCorrection.gains" kind="controls"> 360 </clone> 361 <clone entry="android.colorCorrection.aberrationMode" kind="controls"> 362 </clone> 363 </dynamic> 364 <static> 365 <entry name="availableAberrationModes" type="byte" visibility="public" 366 type_notes="list of enums" container="array" typedef="enumList" hwlevel="legacy"> 367 <array> 368 <size>n</size> 369 </array> 370 <description> 371 List of aberration correction modes for android.colorCorrection.aberrationMode that are 372 supported by this camera device. 373 </description> 374 <range>Any value listed in android.colorCorrection.aberrationMode</range> 375 <details> 376 This key lists the valid modes for android.colorCorrection.aberrationMode. If no 377 aberration correction modes are available for a device, this list will solely include 378 OFF mode. All camera devices will support either OFF or FAST mode. 379 380 Camera devices that support the MANUAL_POST_PROCESSING capability will always list 381 OFF mode. This includes all FULL level devices. 382 383 LEGACY devices will always only support FAST mode. 384 </details> 385 <hal_details> 386 HAL must support both FAST and HIGH_QUALITY if chromatic aberration control is available 387 on the camera device, but the underlying implementation can be the same for both modes. 388 That is, if the highest quality implementation on the camera device does not slow down 389 capture rate, then FAST and HIGH_QUALITY will generate the same output. 390 </hal_details> 391 <tag id="V1" /> 392 </entry> 393 </static> 394 </section> 395 <section name="control"> 396 <controls> 397 <entry name="aeAntibandingMode" type="byte" visibility="public" 398 enum="true" hwlevel="legacy"> 399 <enum> 400 <value>OFF 401 <notes> 402 The camera device will not adjust exposure duration to 403 avoid banding problems. 404 </notes> 405 </value> 406 <value>50HZ 407 <notes> 408 The camera device will adjust exposure duration to 409 avoid banding problems with 50Hz illumination sources. 410 </notes> 411 </value> 412 <value>60HZ 413 <notes> 414 The camera device will adjust exposure duration to 415 avoid banding problems with 60Hz illumination 416 sources. 417 </notes> 418 </value> 419 <value>AUTO 420 <notes> 421 The camera device will automatically adapt its 422 antibanding routine to the current illumination 423 condition. This is the default mode if AUTO is 424 available on given camera device. 425 </notes> 426 </value> 427 </enum> 428 <description> 429 The desired setting for the camera device's auto-exposure 430 algorithm's antibanding compensation. 431 </description> 432 <range> 433 android.control.aeAvailableAntibandingModes 434 </range> 435 <details> 436 Some kinds of lighting fixtures, such as some fluorescent 437 lights, flicker at the rate of the power supply frequency 438 (60Hz or 50Hz, depending on country). While this is 439 typically not noticeable to a person, it can be visible to 440 a camera device. If a camera sets its exposure time to the 441 wrong value, the flicker may become visible in the 442 viewfinder as flicker or in a final captured image, as a 443 set of variable-brightness bands across the image. 444 445 Therefore, the auto-exposure routines of camera devices 446 include antibanding routines that ensure that the chosen 447 exposure value will not cause such banding. The choice of 448 exposure time depends on the rate of flicker, which the 449 camera device can detect automatically, or the expected 450 rate can be selected by the application using this 451 control. 452 453 A given camera device may not support all of the possible 454 options for the antibanding mode. The 455 android.control.aeAvailableAntibandingModes key contains 456 the available modes for a given camera device. 457 458 AUTO mode is the default if it is available on given 459 camera device. When AUTO mode is not available, the 460 default will be either 50HZ or 60HZ, and both 50HZ 461 and 60HZ will be available. 462 463 If manual exposure control is enabled (by setting 464 android.control.aeMode or android.control.mode to OFF), 465 then this setting has no effect, and the application must 466 ensure it selects exposure times that do not cause banding 467 issues. The android.statistics.sceneFlicker key can assist 468 the application in this. 469 </details> 470 <hal_details> 471 For all capture request templates, this field must be set 472 to AUTO if AUTO mode is available. If AUTO is not available, 473 the default must be either 50HZ or 60HZ, and both 50HZ and 474 60HZ must be available. 475 476 If manual exposure control is enabled (by setting 477 android.control.aeMode or android.control.mode to OFF), 478 then the exposure values provided by the application must not be 479 adjusted for antibanding. 480 </hal_details> 481 <tag id="BC" /> 482 </entry> 483 <entry name="aeExposureCompensation" type="int32" visibility="public" hwlevel="legacy"> 484 <description>Adjustment to auto-exposure (AE) target image 485 brightness.</description> 486 <units>Compensation steps</units> 487 <range>android.control.aeCompensationRange</range> 488 <details> 489 The adjustment is measured as a count of steps, with the 490 step size defined by android.control.aeCompensationStep and the 491 allowed range by android.control.aeCompensationRange. 492 493 For example, if the exposure value (EV) step is 0.333, '6' 494 will mean an exposure compensation of +2 EV; -3 will mean an 495 exposure compensation of -1 EV. One EV represents a doubling 496 of image brightness. Note that this control will only be 497 effective if android.control.aeMode `!=` OFF. This control 498 will take effect even when android.control.aeLock `== true`. 499 500 In the event of exposure compensation value being changed, camera device 501 may take several frames to reach the newly requested exposure target. 502 During that time, android.control.aeState field will be in the SEARCHING 503 state. Once the new exposure target is reached, android.control.aeState will 504 change from SEARCHING to either CONVERGED, LOCKED (if AE lock is enabled), or 505 FLASH_REQUIRED (if the scene is too dark for still capture). 506 </details> 507 <tag id="BC" /> 508 </entry> 509 <entry name="aeLock" type="byte" visibility="public" enum="true" 510 typedef="boolean" hwlevel="legacy"> 511 <enum> 512 <value>OFF 513 <notes>Auto-exposure lock is disabled; the AE algorithm 514 is free to update its parameters.</notes></value> 515 <value>ON 516 <notes>Auto-exposure lock is enabled; the AE algorithm 517 must not update the exposure and sensitivity parameters 518 while the lock is active. 519 520 android.control.aeExposureCompensation setting changes 521 will still take effect while auto-exposure is locked. 522 523 Some rare LEGACY devices may not support 524 this, in which case the value will always be overridden to OFF. 525 </notes></value> 526 </enum> 527 <description>Whether auto-exposure (AE) is currently locked to its latest 528 calculated values.</description> 529 <details> 530 When set to `true` (ON), the AE algorithm is locked to its latest parameters, 531 and will not change exposure settings until the lock is set to `false` (OFF). 532 533 Note that even when AE is locked, the flash may be fired if 534 the android.control.aeMode is ON_AUTO_FLASH / 535 ON_ALWAYS_FLASH / ON_AUTO_FLASH_REDEYE. 536 537 When android.control.aeExposureCompensation is changed, even if the AE lock 538 is ON, the camera device will still adjust its exposure value. 539 540 If AE precapture is triggered (see android.control.aePrecaptureTrigger) 541 when AE is already locked, the camera device will not change the exposure time 542 (android.sensor.exposureTime) and sensitivity (android.sensor.sensitivity) 543 parameters. The flash may be fired if the android.control.aeMode 544 is ON_AUTO_FLASH/ON_AUTO_FLASH_REDEYE and the scene is too dark. If the 545 android.control.aeMode is ON_ALWAYS_FLASH, the scene may become overexposed. 546 Similarly, AE precapture trigger CANCEL has no effect when AE is already locked. 547 548 When an AE precapture sequence is triggered, AE unlock will not be able to unlock 549 the AE if AE is locked by the camera device internally during precapture metering 550 sequence In other words, submitting requests with AE unlock has no effect for an 551 ongoing precapture metering sequence. Otherwise, the precapture metering sequence 552 will never succeed in a sequence of preview requests where AE lock is always set 553 to `false`. 554 555 Since the camera device has a pipeline of in-flight requests, the settings that 556 get locked do not necessarily correspond to the settings that were present in the 557 latest capture result received from the camera device, since additional captures 558 and AE updates may have occurred even before the result was sent out. If an 559 application is switching between automatic and manual control and wishes to eliminate 560 any flicker during the switch, the following procedure is recommended: 561 562 1. Starting in auto-AE mode: 563 2. Lock AE 564 3. Wait for the first result to be output that has the AE locked 565 4. Copy exposure settings from that result into a request, set the request to manual AE 566 5. Submit the capture request, proceed to run manual AE as desired. 567 568 See android.control.aeState for AE lock related state transition details. 569 </details> 570 <tag id="BC" /> 571 </entry> 572 <entry name="aeMode" type="byte" visibility="public" enum="true" hwlevel="legacy"> 573 <enum> 574 <value>OFF 575 <notes> 576 The camera device's autoexposure routine is disabled. 577 578 The application-selected android.sensor.exposureTime, 579 android.sensor.sensitivity and 580 android.sensor.frameDuration are used by the camera 581 device, along with android.flash.* fields, if there's 582 a flash unit for this camera device. 583 584 Note that auto-white balance (AWB) and auto-focus (AF) 585 behavior is device dependent when AE is in OFF mode. 586 To have consistent behavior across different devices, 587 it is recommended to either set AWB and AF to OFF mode 588 or lock AWB and AF before setting AE to OFF. 589 See android.control.awbMode, android.control.afMode, 590 android.control.awbLock, and android.control.afTrigger 591 for more details. 592 593 LEGACY devices do not support the OFF mode and will 594 override attempts to use this value to ON. 595 </notes> 596 </value> 597 <value>ON 598 <notes> 599 The camera device's autoexposure routine is active, 600 with no flash control. 601 602 The application's values for 603 android.sensor.exposureTime, 604 android.sensor.sensitivity, and 605 android.sensor.frameDuration are ignored. The 606 application has control over the various 607 android.flash.* fields. 608 </notes> 609 </value> 610 <value>ON_AUTO_FLASH 611 <notes> 612 Like ON, except that the camera device also controls 613 the camera's flash unit, firing it in low-light 614 conditions. 615 616 The flash may be fired during a precapture sequence 617 (triggered by android.control.aePrecaptureTrigger) and 618 may be fired for captures for which the 619 android.control.captureIntent field is set to 620 STILL_CAPTURE 621 </notes> 622 </value> 623 <value>ON_ALWAYS_FLASH 624 <notes> 625 Like ON, except that the camera device also controls 626 the camera's flash unit, always firing it for still 627 captures. 628 629 The flash may be fired during a precapture sequence 630 (triggered by android.control.aePrecaptureTrigger) and 631 will always be fired for captures for which the 632 android.control.captureIntent field is set to 633 STILL_CAPTURE 634 </notes> 635 </value> 636 <value>ON_AUTO_FLASH_REDEYE 637 <notes> 638 Like ON_AUTO_FLASH, but with automatic red eye 639 reduction. 640 641 If deemed necessary by the camera device, a red eye 642 reduction flash will fire during the precapture 643 sequence. 644 </notes> 645 </value> 646 <value hal_version="3.3">ON_EXTERNAL_FLASH 647 <notes> 648 An external flash has been turned on. 649 650 It informs the camera device that an external flash has been turned on, and that 651 metering (and continuous focus if active) should be quickly recaculated to account 652 for the external flash. Otherwise, this mode acts like ON. 653 654 When the external flash is turned off, AE mode should be changed to one of the 655 other available AE modes. 656 657 If the camera device supports AE external flash mode, android.control.aeState must 658 be FLASH_REQUIRED after the camera device finishes AE scan and it's too dark without 659 flash. 660 </notes> 661 </value> 662 </enum> 663 <description>The desired mode for the camera device's 664 auto-exposure routine.</description> 665 <range>android.control.aeAvailableModes</range> 666 <details> 667 This control is only effective if android.control.mode is 668 AUTO. 669 670 When set to any of the ON modes, the camera device's 671 auto-exposure routine is enabled, overriding the 672 application's selected exposure time, sensor sensitivity, 673 and frame duration (android.sensor.exposureTime, 674 android.sensor.sensitivity, and 675 android.sensor.frameDuration). If one of the FLASH modes 676 is selected, the camera device's flash unit controls are 677 also overridden. 678 679 The FLASH modes are only available if the camera device 680 has a flash unit (android.flash.info.available is `true`). 681 682 If flash TORCH mode is desired, this field must be set to 683 ON or OFF, and android.flash.mode set to TORCH. 684 685 When set to any of the ON modes, the values chosen by the 686 camera device auto-exposure routine for the overridden 687 fields for a given capture will be available in its 688 CaptureResult. 689 </details> 690 <tag id="BC" /> 691 </entry> 692 <entry name="aeRegions" type="int32" visibility="public" 693 optional="true" container="array" typedef="meteringRectangle"> 694 <array> 695 <size>5</size> 696 <size>area_count</size> 697 </array> 698 <description>List of metering areas to use for auto-exposure adjustment.</description> 699 <units>Pixel coordinates within android.sensor.info.activeArraySize or 700 android.sensor.info.preCorrectionActiveArraySize depending on 701 distortion correction capability and mode</units> 702 <range>Coordinates must be between `[(0,0), (width, height))` of 703 android.sensor.info.activeArraySize or android.sensor.info.preCorrectionActiveArraySize 704 depending on distortion correction capability and mode</range> 705 <details> 706 Not available if android.control.maxRegionsAe is 0. 707 Otherwise will always be present. 708 709 The maximum number of regions supported by the device is determined by the value 710 of android.control.maxRegionsAe. 711 712 For devices not supporting android.distortionCorrection.mode control, the coordinate 713 system always follows that of android.sensor.info.activeArraySize, with (0,0) being 714 the top-left pixel in the active pixel array, and 715 (android.sensor.info.activeArraySize.width - 1, 716 android.sensor.info.activeArraySize.height - 1) being the bottom-right pixel in the 717 active pixel array. 718 719 For devices supporting android.distortionCorrection.mode control, the coordinate 720 system depends on the mode being set. 721 When the distortion correction mode is OFF, the coordinate system follows 722 android.sensor.info.preCorrectionActiveArraySize, with 723 `(0, 0)` being the top-left pixel of the pre-correction active array, and 724 (android.sensor.info.preCorrectionActiveArraySize.width - 1, 725 android.sensor.info.preCorrectionActiveArraySize.height - 1) being the bottom-right 726 pixel in the pre-correction active pixel array. 727 When the distortion correction mode is not OFF, the coordinate system follows 728 android.sensor.info.activeArraySize, with 729 `(0, 0)` being the top-left pixel of the active array, and 730 (android.sensor.info.activeArraySize.width - 1, 731 android.sensor.info.activeArraySize.height - 1) being the bottom-right pixel in the 732 active pixel array. 733 734 The weight must be within `[0, 1000]`, and represents a weight 735 for every pixel in the area. This means that a large metering area 736 with the same weight as a smaller area will have more effect in 737 the metering result. Metering areas can partially overlap and the 738 camera device will add the weights in the overlap region. 739 740 The weights are relative to weights of other exposure metering regions, so if only one 741 region is used, all non-zero weights will have the same effect. A region with 0 742 weight is ignored. 743 744 If all regions have 0 weight, then no specific metering area needs to be used by the 745 camera device. 746 747 If the metering region is outside the used android.scaler.cropRegion returned in 748 capture result metadata, the camera device will ignore the sections outside the crop 749 region and output only the intersection rectangle as the metering region in the result 750 metadata. If the region is entirely outside the crop region, it will be ignored and 751 not reported in the result metadata. 752 </details> 753 <ndk_details> 754 The data representation is `int[5 * area_count]`. 755 Every five elements represent a metering region of `(xmin, ymin, xmax, ymax, weight)`. 756 The rectangle is defined to be inclusive on xmin and ymin, but exclusive on xmax and 757 ymax. 758 </ndk_details> 759 <hal_details> 760 The HAL level representation of MeteringRectangle[] is a 761 int[5 * area_count]. 762 Every five elements represent a metering region of 763 (xmin, ymin, xmax, ymax, weight). 764 The rectangle is defined to be inclusive on xmin and ymin, but 765 exclusive on xmax and ymax. 766 HAL must always report metering regions in the coordinate system of pre-correction 767 active array. 768 </hal_details> 769 <tag id="BC" /> 770 </entry> 771 <entry name="aeTargetFpsRange" type="int32" visibility="public" 772 container="array" typedef="rangeInt" hwlevel="legacy"> 773 <array> 774 <size>2</size> 775 </array> 776 <description>Range over which the auto-exposure routine can 777 adjust the capture frame rate to maintain good 778 exposure.</description> 779 <units>Frames per second (FPS)</units> 780 <range>Any of the entries in android.control.aeAvailableTargetFpsRanges</range> 781 <details>Only constrains auto-exposure (AE) algorithm, not 782 manual control of android.sensor.exposureTime and 783 android.sensor.frameDuration.</details> 784 <tag id="BC" /> 785 </entry> 786 <entry name="aePrecaptureTrigger" type="byte" visibility="public" 787 enum="true" hwlevel="limited"> 788 <enum> 789 <value>IDLE 790 <notes>The trigger is idle.</notes> 791 </value> 792 <value>START 793 <notes>The precapture metering sequence will be started 794 by the camera device. 795 796 The exact effect of the precapture trigger depends on 797 the current AE mode and state.</notes> 798 </value> 799 <value>CANCEL 800 <notes>The camera device will cancel any currently active or completed 801 precapture metering sequence, the auto-exposure routine will return to its 802 initial state.</notes> 803 </value> 804 </enum> 805 <description>Whether the camera device will trigger a precapture 806 metering sequence when it processes this request.</description> 807 <details>This entry is normally set to IDLE, or is not 808 included at all in the request settings. When included and 809 set to START, the camera device will trigger the auto-exposure (AE) 810 precapture metering sequence. 811 812 When set to CANCEL, the camera device will cancel any active 813 precapture metering trigger, and return to its initial AE state. 814 If a precapture metering sequence is already completed, and the camera 815 device has implicitly locked the AE for subsequent still capture, the 816 CANCEL trigger will unlock the AE and return to its initial AE state. 817 818 The precapture sequence should be triggered before starting a 819 high-quality still capture for final metering decisions to 820 be made, and for firing pre-capture flash pulses to estimate 821 scene brightness and required final capture flash power, when 822 the flash is enabled. 823 824 Normally, this entry should be set to START for only a 825 single request, and the application should wait until the 826 sequence completes before starting a new one. 827 828 When a precapture metering sequence is finished, the camera device 829 may lock the auto-exposure routine internally to be able to accurately expose the 830 subsequent still capture image (`android.control.captureIntent == STILL_CAPTURE`). 831 For this case, the AE may not resume normal scan if no subsequent still capture is 832 submitted. To ensure that the AE routine restarts normal scan, the application should 833 submit a request with `android.control.aeLock == true`, followed by a request 834 with `android.control.aeLock == false`, if the application decides not to submit a 835 still capture request after the precapture sequence completes. Alternatively, for 836 API level 23 or newer devices, the CANCEL can be used to unlock the camera device 837 internally locked AE if the application doesn't submit a still capture request after 838 the AE precapture trigger. Note that, the CANCEL was added in API level 23, and must not 839 be used in devices that have earlier API levels. 840 841 The exact effect of auto-exposure (AE) precapture trigger 842 depends on the current AE mode and state; see 843 android.control.aeState for AE precapture state transition 844 details. 845 846 On LEGACY-level devices, the precapture trigger is not supported; 847 capturing a high-resolution JPEG image will automatically trigger a 848 precapture sequence before the high-resolution capture, including 849 potentially firing a pre-capture flash. 850 851 Using the precapture trigger and the auto-focus trigger android.control.afTrigger 852 simultaneously is allowed. However, since these triggers often require cooperation between 853 the auto-focus and auto-exposure routines (for example, the may need to be enabled for a 854 focus sweep), the camera device may delay acting on a later trigger until the previous 855 trigger has been fully handled. This may lead to longer intervals between the trigger and 856 changes to android.control.aeState indicating the start of the precapture sequence, for 857 example. 858 859 If both the precapture and the auto-focus trigger are activated on the same request, then 860 the camera device will complete them in the optimal order for that device. 861 </details> 862 <hal_details> 863 The HAL must support triggering the AE precapture trigger while an AF trigger is active 864 (and vice versa), or at the same time as the AF trigger. It is acceptable for the HAL to 865 treat these as two consecutive triggers, for example handling the AF trigger and then the 866 AE trigger. Or the HAL may choose to optimize the case with both triggers fired at once, 867 to minimize the latency for converging both focus and exposure/flash usage. 868 </hal_details> 869 <tag id="BC" /> 870 </entry> 871 <entry name="afMode" type="byte" visibility="public" enum="true" 872 hwlevel="legacy"> 873 <enum> 874 <value>OFF 875 <notes>The auto-focus routine does not control the lens; 876 android.lens.focusDistance is controlled by the 877 application.</notes></value> 878 <value>AUTO 879 <notes>Basic automatic focus mode. 880 881 In this mode, the lens does not move unless 882 the autofocus trigger action is called. When that trigger 883 is activated, AF will transition to ACTIVE_SCAN, then to 884 the outcome of the scan (FOCUSED or NOT_FOCUSED). 885 886 Always supported if lens is not fixed focus. 887 888 Use android.lens.info.minimumFocusDistance to determine if lens 889 is fixed-focus. 890 891 Triggering AF_CANCEL resets the lens position to default, 892 and sets the AF state to INACTIVE.</notes></value> 893 <value>MACRO 894 <notes>Close-up focusing mode. 895 896 In this mode, the lens does not move unless the 897 autofocus trigger action is called. When that trigger is 898 activated, AF will transition to ACTIVE_SCAN, then to 899 the outcome of the scan (FOCUSED or NOT_FOCUSED). This 900 mode is optimized for focusing on objects very close to 901 the camera. 902 903 When that trigger is activated, AF will transition to 904 ACTIVE_SCAN, then to the outcome of the scan (FOCUSED or 905 NOT_FOCUSED). Triggering cancel AF resets the lens 906 position to default, and sets the AF state to 907 INACTIVE.</notes></value> 908 <value>CONTINUOUS_VIDEO 909 <notes>In this mode, the AF algorithm modifies the lens 910 position continually to attempt to provide a 911 constantly-in-focus image stream. 912 913 The focusing behavior should be suitable for good quality 914 video recording; typically this means slower focus 915 movement and no overshoots. When the AF trigger is not 916 involved, the AF algorithm should start in INACTIVE state, 917 and then transition into PASSIVE_SCAN and PASSIVE_FOCUSED 918 states as appropriate. When the AF trigger is activated, 919 the algorithm should immediately transition into 920 AF_FOCUSED or AF_NOT_FOCUSED as appropriate, and lock the 921 lens position until a cancel AF trigger is received. 922 923 Once cancel is received, the algorithm should transition 924 back to INACTIVE and resume passive scan. Note that this 925 behavior is not identical to CONTINUOUS_PICTURE, since an 926 ongoing PASSIVE_SCAN must immediately be 927 canceled.</notes></value> 928 <value>CONTINUOUS_PICTURE 929 <notes>In this mode, the AF algorithm modifies the lens 930 position continually to attempt to provide a 931 constantly-in-focus image stream. 932 933 The focusing behavior should be suitable for still image 934 capture; typically this means focusing as fast as 935 possible. When the AF trigger is not involved, the AF 936 algorithm should start in INACTIVE state, and then 937 transition into PASSIVE_SCAN and PASSIVE_FOCUSED states as 938 appropriate as it attempts to maintain focus. When the AF 939 trigger is activated, the algorithm should finish its 940 PASSIVE_SCAN if active, and then transition into 941 AF_FOCUSED or AF_NOT_FOCUSED as appropriate, and lock the 942 lens position until a cancel AF trigger is received. 943 944 When the AF cancel trigger is activated, the algorithm 945 should transition back to INACTIVE and then act as if it 946 has just been started.</notes></value> 947 <value>EDOF 948 <notes>Extended depth of field (digital focus) mode. 949 950 The camera device will produce images with an extended 951 depth of field automatically; no special focusing 952 operations need to be done before taking a picture. 953 954 AF triggers are ignored, and the AF state will always be 955 INACTIVE.</notes></value> 956 </enum> 957 <description>Whether auto-focus (AF) is currently enabled, and what 958 mode it is set to.</description> 959 <range>android.control.afAvailableModes</range> 960 <details>Only effective if android.control.mode = AUTO and the lens is not fixed focus 961 (i.e. `android.lens.info.minimumFocusDistance > 0`). Also note that 962 when android.control.aeMode is OFF, the behavior of AF is device 963 dependent. It is recommended to lock AF by using android.control.afTrigger before 964 setting android.control.aeMode to OFF, or set AF mode to OFF when AE is OFF. 965 966 If the lens is controlled by the camera device auto-focus algorithm, 967 the camera device will report the current AF status in android.control.afState 968 in result metadata.</details> 969 <hal_details> 970 When afMode is AUTO or MACRO, the lens must not move until an AF trigger is sent in a 971 request (android.control.afTrigger `==` START). After an AF trigger, the afState will end 972 up with either FOCUSED_LOCKED or NOT_FOCUSED_LOCKED state (see 973 android.control.afState for detailed state transitions), which indicates that the lens is 974 locked and will not move. If camera movement (e.g. tilting camera) causes the lens to move 975 after the lens is locked, the HAL must compensate this movement appropriately such that 976 the same focal plane remains in focus. 977 978 When afMode is one of the continuous auto focus modes, the HAL is free to start a AF 979 scan whenever it's not locked. When the lens is locked after an AF trigger 980 (see android.control.afState for detailed state transitions), the HAL should maintain the 981 same lock behavior as above. 982 983 When afMode is OFF, the application controls focus manually. The accuracy of the 984 focus distance control depends on the android.lens.info.focusDistanceCalibration. 985 However, the lens must not move regardless of the camera movement for any focus distance 986 manual control. 987 988 To put this in concrete terms, if the camera has lens elements which may move based on 989 camera orientation or motion (e.g. due to gravity), then the HAL must drive the lens to 990 remain in a fixed position invariant to the camera's orientation or motion, for example, 991 by using accelerometer measurements in the lens control logic. This is a typical issue 992 that will arise on camera modules with open-loop VCMs. 993 </hal_details> 994 <tag id="BC" /> 995 </entry> 996 <entry name="afRegions" type="int32" visibility="public" 997 optional="true" container="array" typedef="meteringRectangle"> 998 <array> 999 <size>5</size> 1000 <size>area_count</size> 1001 </array> 1002 <description>List of metering areas to use for auto-focus.</description> 1003 <units>Pixel coordinates within android.sensor.info.activeArraySize or 1004 android.sensor.info.preCorrectionActiveArraySize depending on 1005 distortion correction capability and mode</units> 1006 <range>Coordinates must be between `[(0,0), (width, height))` of 1007 android.sensor.info.activeArraySize or android.sensor.info.preCorrectionActiveArraySize 1008 depending on distortion correction capability and mode</range> 1009 <details> 1010 Not available if android.control.maxRegionsAf is 0. 1011 Otherwise will always be present. 1012 1013 The maximum number of focus areas supported by the device is determined by the value 1014 of android.control.maxRegionsAf. 1015 1016 1017 For devices not supporting android.distortionCorrection.mode control, the coordinate 1018 system always follows that of android.sensor.info.activeArraySize, with (0,0) being 1019 the top-left pixel in the active pixel array, and 1020 (android.sensor.info.activeArraySize.width - 1, 1021 android.sensor.info.activeArraySize.height - 1) being the bottom-right pixel in the 1022 active pixel array. 1023 1024 For devices supporting android.distortionCorrection.mode control, the coordinate 1025 system depends on the mode being set. 1026 When the distortion correction mode is OFF, the coordinate system follows 1027 android.sensor.info.preCorrectionActiveArraySize, with 1028 `(0, 0)` being the top-left pixel of the pre-correction active array, and 1029 (android.sensor.info.preCorrectionActiveArraySize.width - 1, 1030 android.sensor.info.preCorrectionActiveArraySize.height - 1) being the bottom-right 1031 pixel in the pre-correction active pixel array. 1032 When the distortion correction mode is not OFF, the coordinate system follows 1033 android.sensor.info.activeArraySize, with 1034 `(0, 0)` being the top-left pixel of the active array, and 1035 (android.sensor.info.activeArraySize.width - 1, 1036 android.sensor.info.activeArraySize.height - 1) being the bottom-right pixel in the 1037 active pixel array. 1038 1039 The weight must be within `[0, 1000]`, and represents a weight 1040 for every pixel in the area. This means that a large metering area 1041 with the same weight as a smaller area will have more effect in 1042 the metering result. Metering areas can partially overlap and the 1043 camera device will add the weights in the overlap region. 1044 1045 The weights are relative to weights of other metering regions, so if only one region 1046 is used, all non-zero weights will have the same effect. A region with 0 weight is 1047 ignored. 1048 1049 If all regions have 0 weight, then no specific metering area needs to be used by the 1050 camera device. The capture result will either be a zero weight region as well, or 1051 the region selected by the camera device as the focus area of interest. 1052 1053 If the metering region is outside the used android.scaler.cropRegion returned in 1054 capture result metadata, the camera device will ignore the sections outside the crop 1055 region and output only the intersection rectangle as the metering region in the result 1056 metadata. If the region is entirely outside the crop region, it will be ignored and 1057 not reported in the result metadata. 1058 </details> 1059 <ndk_details> 1060 The data representation is `int[5 * area_count]`. 1061 Every five elements represent a metering region of `(xmin, ymin, xmax, ymax, weight)`. 1062 The rectangle is defined to be inclusive on xmin and ymin, but exclusive on xmax and 1063 ymax. 1064 </ndk_details> 1065 <hal_details> 1066 The HAL level representation of MeteringRectangle[] is a 1067 int[5 * area_count]. 1068 Every five elements represent a metering region of 1069 (xmin, ymin, xmax, ymax, weight). 1070 The rectangle is defined to be inclusive on xmin and ymin, but 1071 exclusive on xmax and ymax. 1072 HAL must always report metering regions in the coordinate system of pre-correction 1073 active array. 1074 </hal_details> 1075 <tag id="BC" /> 1076 </entry> 1077 <entry name="afTrigger" type="byte" visibility="public" enum="true" 1078 hwlevel="legacy"> 1079 <enum> 1080 <value>IDLE 1081 <notes>The trigger is idle.</notes> 1082 </value> 1083 <value>START 1084 <notes>Autofocus will trigger now.</notes> 1085 </value> 1086 <value>CANCEL 1087 <notes>Autofocus will return to its initial 1088 state, and cancel any currently active trigger.</notes> 1089 </value> 1090 </enum> 1091 <description> 1092 Whether the camera device will trigger autofocus for this request. 1093 </description> 1094 <details>This entry is normally set to IDLE, or is not 1095 included at all in the request settings. 1096 1097 When included and set to START, the camera device will trigger the 1098 autofocus algorithm. If autofocus is disabled, this trigger has no effect. 1099 1100 When set to CANCEL, the camera device will cancel any active trigger, 1101 and return to its initial AF state. 1102 1103 Generally, applications should set this entry to START or CANCEL for only a 1104 single capture, and then return it to IDLE (or not set at all). Specifying 1105 START for multiple captures in a row means restarting the AF operation over 1106 and over again. 1107 1108 See android.control.afState for what the trigger means for each AF mode. 1109 1110 Using the autofocus trigger and the precapture trigger android.control.aePrecaptureTrigger 1111 simultaneously is allowed. However, since these triggers often require cooperation between 1112 the auto-focus and auto-exposure routines (for example, the may need to be enabled for a 1113 focus sweep), the camera device may delay acting on a later trigger until the previous 1114 trigger has been fully handled. This may lead to longer intervals between the trigger and 1115 changes to android.control.afState, for example. 1116 </details> 1117 <hal_details> 1118 The HAL must support triggering the AF trigger while an AE precapture trigger is active 1119 (and vice versa), or at the same time as the AE trigger. It is acceptable for the HAL to 1120 treat these as two consecutive triggers, for example handling the AF trigger and then the 1121 AE trigger. Or the HAL may choose to optimize the case with both triggers fired at once, 1122 to minimize the latency for converging both focus and exposure/flash usage. 1123 </hal_details> 1124 <tag id="BC" /> 1125 </entry> 1126 <entry name="awbLock" type="byte" visibility="public" enum="true" 1127 typedef="boolean" hwlevel="legacy"> 1128 <enum> 1129 <value>OFF 1130 <notes>Auto-white balance lock is disabled; the AWB 1131 algorithm is free to update its parameters if in AUTO 1132 mode.</notes></value> 1133 <value>ON 1134 <notes>Auto-white balance lock is enabled; the AWB 1135 algorithm will not update its parameters while the lock 1136 is active.</notes></value> 1137 </enum> 1138 <description>Whether auto-white balance (AWB) is currently locked to its 1139 latest calculated values.</description> 1140 <details> 1141 When set to `true` (ON), the AWB algorithm is locked to its latest parameters, 1142 and will not change color balance settings until the lock is set to `false` (OFF). 1143 1144 Since the camera device has a pipeline of in-flight requests, the settings that 1145 get locked do not necessarily correspond to the settings that were present in the 1146 latest capture result received from the camera device, since additional captures 1147 and AWB updates may have occurred even before the result was sent out. If an 1148 application is switching between automatic and manual control and wishes to eliminate 1149 any flicker during the switch, the following procedure is recommended: 1150 1151 1. Starting in auto-AWB mode: 1152 2. Lock AWB 1153 3. Wait for the first result to be output that has the AWB locked 1154 4. Copy AWB settings from that result into a request, set the request to manual AWB 1155 5. Submit the capture request, proceed to run manual AWB as desired. 1156 1157 Note that AWB lock is only meaningful when 1158 android.control.awbMode is in the AUTO mode; in other modes, 1159 AWB is already fixed to a specific setting. 1160 1161 Some LEGACY devices may not support ON; the value is then overridden to OFF. 1162 </details> 1163 <tag id="BC" /> 1164 </entry> 1165 <entry name="awbMode" type="byte" visibility="public" enum="true" 1166 hwlevel="legacy"> 1167 <enum> 1168 <value>OFF 1169 <notes> 1170 The camera device's auto-white balance routine is disabled. 1171 1172 The application-selected color transform matrix 1173 (android.colorCorrection.transform) and gains 1174 (android.colorCorrection.gains) are used by the camera 1175 device for manual white balance control. 1176 </notes> 1177 </value> 1178 <value>AUTO 1179 <notes> 1180 The camera device's auto-white balance routine is active. 1181 1182 The application's values for android.colorCorrection.transform 1183 and android.colorCorrection.gains are ignored. 1184 For devices that support the MANUAL_POST_PROCESSING capability, the 1185 values used by the camera device for the transform and gains 1186 will be available in the capture result for this request. 1187 </notes> 1188 </value> 1189 <value>INCANDESCENT 1190 <notes> 1191 The camera device's auto-white balance routine is disabled; 1192 the camera device uses incandescent light as the assumed scene 1193 illumination for white balance. 1194 1195 While the exact white balance transforms are up to the 1196 camera device, they will approximately match the CIE 1197 standard illuminant A. 1198 1199 The application's values for android.colorCorrection.transform 1200 and android.colorCorrection.gains are ignored. 1201 For devices that support the MANUAL_POST_PROCESSING capability, the 1202 values used by the camera device for the transform and gains 1203 will be available in the capture result for this request. 1204 </notes> 1205 </value> 1206 <value>FLUORESCENT 1207 <notes> 1208 The camera device's auto-white balance routine is disabled; 1209 the camera device uses fluorescent light as the assumed scene 1210 illumination for white balance. 1211 1212 While the exact white balance transforms are up to the 1213 camera device, they will approximately match the CIE 1214 standard illuminant F2. 1215 1216 The application's values for android.colorCorrection.transform 1217 and android.colorCorrection.gains are ignored. 1218 For devices that support the MANUAL_POST_PROCESSING capability, the 1219 values used by the camera device for the transform and gains 1220 will be available in the capture result for this request. 1221 </notes> 1222 </value> 1223 <value>WARM_FLUORESCENT 1224 <notes> 1225 The camera device's auto-white balance routine is disabled; 1226 the camera device uses warm fluorescent light as the assumed scene 1227 illumination for white balance. 1228 1229 While the exact white balance transforms are up to the 1230 camera device, they will approximately match the CIE 1231 standard illuminant F4. 1232 1233 The application's values for android.colorCorrection.transform 1234 and android.colorCorrection.gains are ignored. 1235 For devices that support the MANUAL_POST_PROCESSING capability, the 1236 values used by the camera device for the transform and gains 1237 will be available in the capture result for this request. 1238 </notes> 1239 </value> 1240 <value>DAYLIGHT 1241 <notes> 1242 The camera device's auto-white balance routine is disabled; 1243 the camera device uses daylight light as the assumed scene 1244 illumination for white balance. 1245 1246 While the exact white balance transforms are up to the 1247 camera device, they will approximately match the CIE 1248 standard illuminant D65. 1249 1250 The application's values for android.colorCorrection.transform 1251 and android.colorCorrection.gains are ignored. 1252 For devices that support the MANUAL_POST_PROCESSING capability, the 1253 values used by the camera device for the transform and gains 1254 will be available in the capture result for this request. 1255 </notes> 1256 </value> 1257 <value>CLOUDY_DAYLIGHT 1258 <notes> 1259 The camera device's auto-white balance routine is disabled; 1260 the camera device uses cloudy daylight light as the assumed scene 1261 illumination for white balance. 1262 1263 The application's values for android.colorCorrection.transform 1264 and android.colorCorrection.gains are ignored. 1265 For devices that support the MANUAL_POST_PROCESSING capability, the 1266 values used by the camera device for the transform and gains 1267 will be available in the capture result for this request. 1268 </notes> 1269 </value> 1270 <value>TWILIGHT 1271 <notes> 1272 The camera device's auto-white balance routine is disabled; 1273 the camera device uses twilight light as the assumed scene 1274 illumination for white balance. 1275 1276 The application's values for android.colorCorrection.transform 1277 and android.colorCorrection.gains are ignored. 1278 For devices that support the MANUAL_POST_PROCESSING capability, the 1279 values used by the camera device for the transform and gains 1280 will be available in the capture result for this request. 1281 </notes> 1282 </value> 1283 <value>SHADE 1284 <notes> 1285 The camera device's auto-white balance routine is disabled; 1286 the camera device uses shade light as the assumed scene 1287 illumination for white balance. 1288 1289 The application's values for android.colorCorrection.transform 1290 and android.colorCorrection.gains are ignored. 1291 For devices that support the MANUAL_POST_PROCESSING capability, the 1292 values used by the camera device for the transform and gains 1293 will be available in the capture result for this request. 1294 </notes> 1295 </value> 1296 </enum> 1297 <description>Whether auto-white balance (AWB) is currently setting the color 1298 transform fields, and what its illumination target 1299 is.</description> 1300 <range>android.control.awbAvailableModes</range> 1301 <details> 1302 This control is only effective if android.control.mode is AUTO. 1303 1304 When set to the ON mode, the camera device's auto-white balance 1305 routine is enabled, overriding the application's selected 1306 android.colorCorrection.transform, android.colorCorrection.gains and 1307 android.colorCorrection.mode. Note that when android.control.aeMode 1308 is OFF, the behavior of AWB is device dependent. It is recommened to 1309 also set AWB mode to OFF or lock AWB by using android.control.awbLock before 1310 setting AE mode to OFF. 1311 1312 When set to the OFF mode, the camera device's auto-white balance 1313 routine is disabled. The application manually controls the white 1314 balance by android.colorCorrection.transform, android.colorCorrection.gains 1315 and android.colorCorrection.mode. 1316 1317 When set to any other modes, the camera device's auto-white 1318 balance routine is disabled. The camera device uses each 1319 particular illumination target for white balance 1320 adjustment. The application's values for 1321 android.colorCorrection.transform, 1322 android.colorCorrection.gains and 1323 android.colorCorrection.mode are ignored. 1324 </details> 1325 <tag id="BC" /> 1326 </entry> 1327 <entry name="awbRegions" type="int32" visibility="public" 1328 optional="true" container="array" typedef="meteringRectangle"> 1329 <array> 1330 <size>5</size> 1331 <size>area_count</size> 1332 </array> 1333 <description>List of metering areas to use for auto-white-balance illuminant 1334 estimation.</description> 1335 <units>Pixel coordinates within android.sensor.info.activeArraySize or 1336 android.sensor.info.preCorrectionActiveArraySize depending on 1337 distortion correction capability and mode</units> 1338 <range>Coordinates must be between `[(0,0), (width, height))` of 1339 android.sensor.info.activeArraySize or android.sensor.info.preCorrectionActiveArraySize 1340 depending on distortion correction capability and mode</range> 1341 <details> 1342 Not available if android.control.maxRegionsAwb is 0. 1343 Otherwise will always be present. 1344 1345 The maximum number of regions supported by the device is determined by the value 1346 of android.control.maxRegionsAwb. 1347 1348 For devices not supporting android.distortionCorrection.mode control, the coordinate 1349 system always follows that of android.sensor.info.activeArraySize, with (0,0) being 1350 the top-left pixel in the active pixel array, and 1351 (android.sensor.info.activeArraySize.width - 1, 1352 android.sensor.info.activeArraySize.height - 1) being the bottom-right pixel in the 1353 active pixel array. 1354 1355 For devices supporting android.distortionCorrection.mode control, the coordinate 1356 system depends on the mode being set. 1357 When the distortion correction mode is OFF, the coordinate system follows 1358 android.sensor.info.preCorrectionActiveArraySize, with 1359 `(0, 0)` being the top-left pixel of the pre-correction active array, and 1360 (android.sensor.info.preCorrectionActiveArraySize.width - 1, 1361 android.sensor.info.preCorrectionActiveArraySize.height - 1) being the bottom-right 1362 pixel in the pre-correction active pixel array. 1363 When the distortion correction mode is not OFF, the coordinate system follows 1364 android.sensor.info.activeArraySize, with 1365 `(0, 0)` being the top-left pixel of the active array, and 1366 (android.sensor.info.activeArraySize.width - 1, 1367 android.sensor.info.activeArraySize.height - 1) being the bottom-right pixel in the 1368 active pixel array. 1369 1370 The weight must range from 0 to 1000, and represents a weight 1371 for every pixel in the area. This means that a large metering area 1372 with the same weight as a smaller area will have more effect in 1373 the metering result. Metering areas can partially overlap and the 1374 camera device will add the weights in the overlap region. 1375 1376 The weights are relative to weights of other white balance metering regions, so if 1377 only one region is used, all non-zero weights will have the same effect. A region with 1378 0 weight is ignored. 1379 1380 If all regions have 0 weight, then no specific metering area needs to be used by the 1381 camera device. 1382 1383 If the metering region is outside the used android.scaler.cropRegion returned in 1384 capture result metadata, the camera device will ignore the sections outside the crop 1385 region and output only the intersection rectangle as the metering region in the result 1386 metadata. If the region is entirely outside the crop region, it will be ignored and 1387 not reported in the result metadata. 1388 </details> 1389 <ndk_details> 1390 The data representation is `int[5 * area_count]`. 1391 Every five elements represent a metering region of `(xmin, ymin, xmax, ymax, weight)`. 1392 The rectangle is defined to be inclusive on xmin and ymin, but exclusive on xmax and 1393 ymax. 1394 </ndk_details> 1395 <hal_details> 1396 The HAL level representation of MeteringRectangle[] is a 1397 int[5 * area_count]. 1398 Every five elements represent a metering region of 1399 (xmin, ymin, xmax, ymax, weight). 1400 The rectangle is defined to be inclusive on xmin and ymin, but 1401 exclusive on xmax and ymax. 1402 HAL must always report metering regions in the coordinate system of pre-correction 1403 active array. 1404 </hal_details> 1405 <tag id="BC" /> 1406 </entry> 1407 <entry name="captureIntent" type="byte" visibility="public" enum="true" 1408 hwlevel="legacy"> 1409 <enum> 1410 <value>CUSTOM 1411 <notes>The goal of this request doesn't fall into the other 1412 categories. The camera device will default to preview-like 1413 behavior.</notes></value> 1414 <value>PREVIEW 1415 <notes>This request is for a preview-like use case. 1416 1417 The precapture trigger may be used to start off a metering 1418 w/flash sequence. 1419 </notes></value> 1420 <value>STILL_CAPTURE 1421 <notes>This request is for a still capture-type 1422 use case. 1423 1424 If the flash unit is under automatic control, it may fire as needed. 1425 </notes></value> 1426 <value>VIDEO_RECORD 1427 <notes>This request is for a video recording 1428 use case.</notes></value> 1429 <value>VIDEO_SNAPSHOT 1430 <notes>This request is for a video snapshot (still 1431 image while recording video) use case. 1432 1433 The camera device should take the highest-quality image 1434 possible (given the other settings) without disrupting the 1435 frame rate of video recording. </notes></value> 1436 <value>ZERO_SHUTTER_LAG 1437 <notes>This request is for a ZSL usecase; the 1438 application will stream full-resolution images and 1439 reprocess one or several later for a final 1440 capture. 1441 </notes></value> 1442 <value>MANUAL 1443 <notes>This request is for manual capture use case where 1444 the applications want to directly control the capture parameters. 1445 1446 For example, the application may wish to manually control 1447 android.sensor.exposureTime, android.sensor.sensitivity, etc. 1448 </notes></value> 1449 <value hal_version="3.3">MOTION_TRACKING 1450 <notes>This request is for a motion tracking use case, where 1451 the application will use camera and inertial sensor data to 1452 locate and track objects in the world. 1453 1454 The camera device auto-exposure routine will limit the exposure time 1455 of the camera to no more than 20 milliseconds, to minimize motion blur. 1456 </notes></value> 1457 </enum> 1458 <description>Information to the camera device 3A (auto-exposure, 1459 auto-focus, auto-white balance) routines about the purpose 1460 of this capture, to help the camera device to decide optimal 3A 1461 strategy.</description> 1462 <details>This control (except for MANUAL) is only effective if 1463 `android.control.mode != OFF` and any 3A routine is active. 1464 1465 All intents are supported by all devices, except that: 1466 * ZERO_SHUTTER_LAG will be supported if android.request.availableCapabilities contains 1467 PRIVATE_REPROCESSING or YUV_REPROCESSING. 1468 * MANUAL will be supported if android.request.availableCapabilities contains 1469 MANUAL_SENSOR. 1470 * MOTION_TRACKING will be supported if android.request.availableCapabilities contains 1471 MOTION_TRACKING. 1472 </details> 1473 <tag id="BC" /> 1474 </entry> 1475 <entry name="effectMode" type="byte" visibility="public" enum="true" 1476 hwlevel="legacy"> 1477 <enum> 1478 <value>OFF 1479 <notes> 1480 No color effect will be applied. 1481 </notes> 1482 </value> 1483 <value optional="true">MONO 1484 <notes> 1485 A "monocolor" effect where the image is mapped into 1486 a single color. 1487 1488 This will typically be grayscale. 1489 </notes> 1490 </value> 1491 <value optional="true">NEGATIVE 1492 <notes> 1493 A "photo-negative" effect where the image's colors 1494 are inverted. 1495 </notes> 1496 </value> 1497 <value optional="true">SOLARIZE 1498 <notes> 1499 A "solarisation" effect (Sabattier effect) where the 1500 image is wholly or partially reversed in 1501 tone. 1502 </notes> 1503 </value> 1504 <value optional="true">SEPIA 1505 <notes> 1506 A "sepia" effect where the image is mapped into warm 1507 gray, red, and brown tones. 1508 </notes> 1509 </value> 1510 <value optional="true">POSTERIZE 1511 <notes> 1512 A "posterization" effect where the image uses 1513 discrete regions of tone rather than a continuous 1514 gradient of tones. 1515 </notes> 1516 </value> 1517 <value optional="true">WHITEBOARD 1518 <notes> 1519 A "whiteboard" effect where the image is typically displayed 1520 as regions of white, with black or grey details. 1521 </notes> 1522 </value> 1523 <value optional="true">BLACKBOARD 1524 <notes> 1525 A "blackboard" effect where the image is typically displayed 1526 as regions of black, with white or grey details. 1527 </notes> 1528 </value> 1529 <value optional="true">AQUA 1530 <notes> 1531 An "aqua" effect where a blue hue is added to the image. 1532 </notes> 1533 </value> 1534 </enum> 1535 <description>A special color effect to apply.</description> 1536 <range>android.control.availableEffects</range> 1537 <details> 1538 When this mode is set, a color effect will be applied 1539 to images produced by the camera device. The interpretation 1540 and implementation of these color effects is left to the 1541 implementor of the camera device, and should not be 1542 depended on to be consistent (or present) across all 1543 devices. 1544 </details> 1545 <tag id="BC" /> 1546 </entry> 1547 <entry name="mode" type="byte" visibility="public" enum="true" 1548 hwlevel="legacy"> 1549 <enum> 1550 <value>OFF 1551 <notes>Full application control of pipeline. 1552 1553 All control by the device's metering and focusing (3A) 1554 routines is disabled, and no other settings in 1555 android.control.* have any effect, except that 1556 android.control.captureIntent may be used by the camera 1557 device to select post-processing values for processing 1558 blocks that do not allow for manual control, or are not 1559 exposed by the camera API. 1560 1561 However, the camera device's 3A routines may continue to 1562 collect statistics and update their internal state so that 1563 when control is switched to AUTO mode, good control values 1564 can be immediately applied. 1565 </notes></value> 1566 <value>AUTO 1567 <notes>Use settings for each individual 3A routine. 1568 1569 Manual control of capture parameters is disabled. All 1570 controls in android.control.* besides sceneMode take 1571 effect.</notes></value> 1572 <value optional="true">USE_SCENE_MODE 1573 <notes>Use a specific scene mode. 1574 1575 Enabling this disables control.aeMode, control.awbMode and 1576 control.afMode controls; the camera device will ignore 1577 those settings while USE_SCENE_MODE is active (except for 1578 FACE_PRIORITY scene mode). Other control entries are still active. 1579 This setting can only be used if scene mode is supported (i.e. 1580 android.control.availableSceneModes 1581 contain some modes other than DISABLED).</notes></value> 1582 <value optional="true">OFF_KEEP_STATE 1583 <notes>Same as OFF mode, except that this capture will not be 1584 used by camera device background auto-exposure, auto-white balance and 1585 auto-focus algorithms (3A) to update their statistics. 1586 1587 Specifically, the 3A routines are locked to the last 1588 values set from a request with AUTO, OFF, or 1589 USE_SCENE_MODE, and any statistics or state updates 1590 collected from manual captures with OFF_KEEP_STATE will be 1591 discarded by the camera device. 1592 </notes></value> 1593 </enum> 1594 <description>Overall mode of 3A (auto-exposure, auto-white-balance, auto-focus) control 1595 routines.</description> 1596 <range>android.control.availableModes</range> 1597 <details> 1598 This is a top-level 3A control switch. When set to OFF, all 3A control 1599 by the camera device is disabled. The application must set the fields for 1600 capture parameters itself. 1601 1602 When set to AUTO, the individual algorithm controls in 1603 android.control.* are in effect, such as android.control.afMode. 1604 1605 When set to USE_SCENE_MODE, the individual controls in 1606 android.control.* are mostly disabled, and the camera device 1607 implements one of the scene mode settings (such as ACTION, 1608 SUNSET, or PARTY) as it wishes. The camera device scene mode 1609 3A settings are provided by {@link 1610 android.hardware.camera2.CaptureResult|ACameraCaptureSession_captureCallback_result 1611 capture results}. 1612 1613 When set to OFF_KEEP_STATE, it is similar to OFF mode, the only difference 1614 is that this frame will not be used by camera device background 3A statistics 1615 update, as if this frame is never captured. This mode can be used in the scenario 1616 where the application doesn't want a 3A manual control capture to affect 1617 the subsequent auto 3A capture results. 1618 </details> 1619 <tag id="BC" /> 1620 </entry> 1621 <entry name="sceneMode" type="byte" visibility="public" enum="true" 1622 hwlevel="legacy"> 1623 <enum> 1624 <value id="0">DISABLED 1625 <notes> 1626 Indicates that no scene modes are set for a given capture request. 1627 </notes> 1628 </value> 1629 <value>FACE_PRIORITY 1630 <notes>If face detection support exists, use face 1631 detection data for auto-focus, auto-white balance, and 1632 auto-exposure routines. 1633 1634 If face detection statistics are disabled 1635 (i.e. android.statistics.faceDetectMode is set to OFF), 1636 this should still operate correctly (but will not return 1637 face detection statistics to the framework). 1638 1639 Unlike the other scene modes, android.control.aeMode, 1640 android.control.awbMode, and android.control.afMode 1641 remain active when FACE_PRIORITY is set. 1642 </notes> 1643 </value> 1644 <value optional="true">ACTION 1645 <notes> 1646 Optimized for photos of quickly moving objects. 1647 1648 Similar to SPORTS. 1649 </notes> 1650 </value> 1651 <value optional="true">PORTRAIT 1652 <notes> 1653 Optimized for still photos of people. 1654 </notes> 1655 </value> 1656 <value optional="true">LANDSCAPE 1657 <notes> 1658 Optimized for photos of distant macroscopic objects. 1659 </notes> 1660 </value> 1661 <value optional="true">NIGHT 1662 <notes> 1663 Optimized for low-light settings. 1664 </notes> 1665 </value> 1666 <value optional="true">NIGHT_PORTRAIT 1667 <notes> 1668 Optimized for still photos of people in low-light 1669 settings. 1670 </notes> 1671 </value> 1672 <value optional="true">THEATRE 1673 <notes> 1674 Optimized for dim, indoor settings where flash must 1675 remain off. 1676 </notes> 1677 </value> 1678 <value optional="true">BEACH 1679 <notes> 1680 Optimized for bright, outdoor beach settings. 1681 </notes> 1682 </value> 1683 <value optional="true">SNOW 1684 <notes> 1685 Optimized for bright, outdoor settings containing snow. 1686 </notes> 1687 </value> 1688 <value optional="true">SUNSET 1689 <notes> 1690 Optimized for scenes of the setting sun. 1691 </notes> 1692 </value> 1693 <value optional="true">STEADYPHOTO 1694 <notes> 1695 Optimized to avoid blurry photos due to small amounts of 1696 device motion (for example: due to hand shake). 1697 </notes> 1698 </value> 1699 <value optional="true">FIREWORKS 1700 <notes> 1701 Optimized for nighttime photos of fireworks. 1702 </notes> 1703 </value> 1704 <value optional="true">SPORTS 1705 <notes> 1706 Optimized for photos of quickly moving people. 1707 1708 Similar to ACTION. 1709 </notes> 1710 </value> 1711 <value optional="true">PARTY 1712 <notes> 1713 Optimized for dim, indoor settings with multiple moving 1714 people. 1715 </notes> 1716 </value> 1717 <value optional="true">CANDLELIGHT 1718 <notes> 1719 Optimized for dim settings where the main light source 1720 is a candle. 1721 </notes> 1722 </value> 1723 <value optional="true">BARCODE 1724 <notes> 1725 Optimized for accurately capturing a photo of barcode 1726 for use by camera applications that wish to read the 1727 barcode value. 1728 </notes> 1729 </value> 1730 <value deprecated="true" optional="true" ndk_hidden="true">HIGH_SPEED_VIDEO 1731 <notes> 1732 This is deprecated, please use {@link 1733 android.hardware.camera2.CameraDevice#createConstrainedHighSpeedCaptureSession} 1734 and {@link 1735 android.hardware.camera2.CameraConstrainedHighSpeedCaptureSession#createHighSpeedRequestList} 1736 for high speed video recording. 1737 1738 Optimized for high speed video recording (frame rate >=60fps) use case. 1739 1740 The supported high speed video sizes and fps ranges are specified in 1741 android.control.availableHighSpeedVideoConfigurations. To get desired 1742 output frame rates, the application is only allowed to select video size 1743 and fps range combinations listed in this static metadata. The fps range 1744 can be control via android.control.aeTargetFpsRange. 1745 1746 In this mode, the camera device will override aeMode, awbMode, and afMode to 1747 ON, ON, and CONTINUOUS_VIDEO, respectively. All post-processing block mode 1748 controls will be overridden to be FAST. Therefore, no manual control of capture 1749 and post-processing parameters is possible. All other controls operate the 1750 same as when android.control.mode == AUTO. This means that all other 1751 android.control.* fields continue to work, such as 1752 1753 * android.control.aeTargetFpsRange 1754 * android.control.aeExposureCompensation 1755 * android.control.aeLock 1756 * android.control.awbLock 1757 * android.control.effectMode 1758 * android.control.aeRegions 1759 * android.control.afRegions 1760 * android.control.awbRegions 1761 * android.control.afTrigger 1762 * android.control.aePrecaptureTrigger 1763 1764 Outside of android.control.*, the following controls will work: 1765 1766 * android.flash.mode (automatic flash for still capture will not work since aeMode is ON) 1767 * android.lens.opticalStabilizationMode (if it is supported) 1768 * android.scaler.cropRegion 1769 * android.statistics.faceDetectMode 1770 1771 For high speed recording use case, the actual maximum supported frame rate may 1772 be lower than what camera can output, depending on the destination Surfaces for 1773 the image data. For example, if the destination surface is from video encoder, 1774 the application need check if the video encoder is capable of supporting the 1775 high frame rate for a given video size, or it will end up with lower recording 1776 frame rate. If the destination surface is from preview window, the preview frame 1777 rate will be bounded by the screen refresh rate. 1778 1779 The camera device will only support up to 2 output high speed streams 1780 (processed non-stalling format defined in android.request.maxNumOutputStreams) 1781 in this mode. This control will be effective only if all of below conditions are true: 1782 1783 * The application created no more than maxNumHighSpeedStreams processed non-stalling 1784 format output streams, where maxNumHighSpeedStreams is calculated as 1785 min(2, android.request.maxNumOutputStreams[Processed (but not-stalling)]). 1786 * The stream sizes are selected from the sizes reported by 1787 android.control.availableHighSpeedVideoConfigurations. 1788 * No processed non-stalling or raw streams are configured. 1789 1790 When above conditions are NOT satistied, the controls of this mode and 1791 android.control.aeTargetFpsRange will be ignored by the camera device, 1792 the camera device will fall back to android.control.mode `==` AUTO, 1793 and the returned capture result metadata will give the fps range choosen 1794 by the camera device. 1795 1796 Switching into or out of this mode may trigger some camera ISP/sensor 1797 reconfigurations, which may introduce extra latency. It is recommended that 1798 the application avoids unnecessary scene mode switch as much as possible. 1799 </notes> 1800 </value> 1801 <value optional="true">HDR 1802 <notes> 1803 Turn on a device-specific high dynamic range (HDR) mode. 1804 1805 In this scene mode, the camera device captures images 1806 that keep a larger range of scene illumination levels 1807 visible in the final image. For example, when taking a 1808 picture of a object in front of a bright window, both 1809 the object and the scene through the window may be 1810 visible when using HDR mode, while in normal AUTO mode, 1811 one or the other may be poorly exposed. As a tradeoff, 1812 HDR mode generally takes much longer to capture a single 1813 image, has no user control, and may have other artifacts 1814 depending on the HDR method used. 1815 1816 Therefore, HDR captures operate at a much slower rate 1817 than regular captures. 1818 1819 In this mode, on LIMITED or FULL devices, when a request 1820 is made with a android.control.captureIntent of 1821 STILL_CAPTURE, the camera device will capture an image 1822 using a high dynamic range capture technique. On LEGACY 1823 devices, captures that target a JPEG-format output will 1824 be captured with HDR, and the capture intent is not 1825 relevant. 1826 1827 The HDR capture may involve the device capturing a burst 1828 of images internally and combining them into one, or it 1829 may involve the device using specialized high dynamic 1830 range capture hardware. In all cases, a single image is 1831 produced in response to a capture request submitted 1832 while in HDR mode. 1833 1834 Since substantial post-processing is generally needed to 1835 produce an HDR image, only YUV, PRIVATE, and JPEG 1836 outputs are supported for LIMITED/FULL device HDR 1837 captures, and only JPEG outputs are supported for LEGACY 1838 HDR captures. Using a RAW output for HDR capture is not 1839 supported. 1840 1841 Some devices may also support always-on HDR, which 1842 applies HDR processing at full frame rate. For these 1843 devices, intents other than STILL_CAPTURE will also 1844 produce an HDR output with no frame rate impact compared 1845 to normal operation, though the quality may be lower 1846 than for STILL_CAPTURE intents. 1847 1848 If SCENE_MODE_HDR is used with unsupported output types 1849 or capture intents, the images captured will be as if 1850 the SCENE_MODE was not enabled at all. 1851 </notes> 1852 </value> 1853 <value optional="true" hidden="true">FACE_PRIORITY_LOW_LIGHT 1854 <notes>Same as FACE_PRIORITY scene mode, except that the camera 1855 device will choose higher sensitivity values (android.sensor.sensitivity) 1856 under low light conditions. 1857 1858 The camera device may be tuned to expose the images in a reduced 1859 sensitivity range to produce the best quality images. For example, 1860 if the android.sensor.info.sensitivityRange gives range of [100, 1600], 1861 the camera device auto-exposure routine tuning process may limit the actual 1862 exposure sensitivity range to [100, 1200] to ensure that the noise level isn't 1863 exessive in order to preserve the image quality. Under this situation, the image under 1864 low light may be under-exposed when the sensor max exposure time (bounded by the 1865 android.control.aeTargetFpsRange when android.control.aeMode is one of the 1866 ON_* modes) and effective max sensitivity are reached. This scene mode allows the 1867 camera device auto-exposure routine to increase the sensitivity up to the max 1868 sensitivity specified by android.sensor.info.sensitivityRange when the scene is too 1869 dark and the max exposure time is reached. The captured images may be noisier 1870 compared with the images captured in normal FACE_PRIORITY mode; therefore, it is 1871 recommended that the application only use this scene mode when it is capable of 1872 reducing the noise level of the captured images. 1873 1874 Unlike the other scene modes, android.control.aeMode, 1875 android.control.awbMode, and android.control.afMode 1876 remain active when FACE_PRIORITY_LOW_LIGHT is set. 1877 </notes> 1878 </value> 1879 <value optional="true" hidden="true" id="100">DEVICE_CUSTOM_START 1880 <notes> 1881 Scene mode values within the range of 1882 `[DEVICE_CUSTOM_START, DEVICE_CUSTOM_END]` are reserved for device specific 1883 customized scene modes. 1884 </notes> 1885 </value> 1886 <value optional="true" hidden="true" id="127">DEVICE_CUSTOM_END 1887 <notes> 1888 Scene mode values within the range of 1889 `[DEVICE_CUSTOM_START, DEVICE_CUSTOM_END]` are reserved for device specific 1890 customized scene modes. 1891 </notes> 1892 </value> 1893 </enum> 1894 <description> 1895 Control for which scene mode is currently active. 1896 </description> 1897 <range>android.control.availableSceneModes</range> 1898 <details> 1899 Scene modes are custom camera modes optimized for a certain set of conditions and 1900 capture settings. 1901 1902 This is the mode that that is active when 1903 `android.control.mode == USE_SCENE_MODE`. Aside from FACE_PRIORITY, these modes will 1904 disable android.control.aeMode, android.control.awbMode, and android.control.afMode 1905 while in use. 1906 1907 The interpretation and implementation of these scene modes is left 1908 to the implementor of the camera device. Their behavior will not be 1909 consistent across all devices, and any given device may only implement 1910 a subset of these modes. 1911 </details> 1912 <hal_details> 1913 HAL implementations that include scene modes are expected to provide 1914 the per-scene settings to use for android.control.aeMode, 1915 android.control.awbMode, and android.control.afMode in 1916 android.control.sceneModeOverrides. 1917 1918 For HIGH_SPEED_VIDEO mode, if it is included in android.control.availableSceneModes, the 1919 HAL must list supported video size and fps range in 1920 android.control.availableHighSpeedVideoConfigurations. For a given size, e.g. 1280x720, 1921 if the HAL has two different sensor configurations for normal streaming mode and high 1922 speed streaming, when this scene mode is set/reset in a sequence of capture requests, the 1923 HAL may have to switch between different sensor modes. This mode is deprecated in legacy 1924 HAL3.3, to support high speed video recording, please implement 1925 android.control.availableHighSpeedVideoConfigurations and CONSTRAINED_HIGH_SPEED_VIDEO 1926 capbility defined in android.request.availableCapabilities. 1927 </hal_details> 1928 <tag id="BC" /> 1929 </entry> 1930 <entry name="videoStabilizationMode" type="byte" visibility="public" 1931 enum="true" hwlevel="legacy"> 1932 <enum> 1933 <value>OFF 1934 <notes> 1935 Video stabilization is disabled. 1936 </notes></value> 1937 <value>ON 1938 <notes> 1939 Video stabilization is enabled. 1940 </notes></value> 1941 </enum> 1942 <description>Whether video stabilization is 1943 active.</description> 1944 <details> 1945 Video stabilization automatically warps images from 1946 the camera in order to stabilize motion between consecutive frames. 1947 1948 If enabled, video stabilization can modify the 1949 android.scaler.cropRegion to keep the video stream stabilized. 1950 1951 Switching between different video stabilization modes may take several 1952 frames to initialize, the camera device will report the current mode 1953 in capture result metadata. For example, When "ON" mode is requested, 1954 the video stabilization modes in the first several capture results may 1955 still be "OFF", and it will become "ON" when the initialization is 1956 done. 1957 1958 In addition, not all recording sizes or frame rates may be supported for 1959 stabilization by a device that reports stabilization support. It is guaranteed 1960 that an output targeting a MediaRecorder or MediaCodec will be stabilized if 1961 the recording resolution is less than or equal to 1920 x 1080 (width less than 1962 or equal to 1920, height less than or equal to 1080), and the recording 1963 frame rate is less than or equal to 30fps. At other sizes, the CaptureResult 1964 android.control.videoStabilizationMode field will return 1965 OFF if the recording output is not stabilized, or if there are no output 1966 Surface types that can be stabilized. 1967 1968 If a camera device supports both this mode and OIS 1969 (android.lens.opticalStabilizationMode), turning both modes on may 1970 produce undesirable interaction, so it is recommended not to enable 1971 both at the same time. 1972 </details> 1973 <tag id="BC" /> 1974 </entry> 1975 </controls> 1976 <static> 1977 <entry name="aeAvailableAntibandingModes" type="byte" visibility="public" 1978 type_notes="list of enums" container="array" typedef="enumList" 1979 hwlevel="legacy"> 1980 <array> 1981 <size>n</size> 1982 </array> 1983 <description> 1984 List of auto-exposure antibanding modes for android.control.aeAntibandingMode that are 1985 supported by this camera device. 1986 </description> 1987 <range>Any value listed in android.control.aeAntibandingMode</range> 1988 <details> 1989 Not all of the auto-exposure anti-banding modes may be 1990 supported by a given camera device. This field lists the 1991 valid anti-banding modes that the application may request 1992 for this camera device with the 1993 android.control.aeAntibandingMode control. 1994 </details> 1995 <tag id="BC" /> 1996 </entry> 1997 <entry name="aeAvailableModes" type="byte" visibility="public" 1998 type_notes="list of enums" container="array" typedef="enumList" 1999 hwlevel="legacy"> 2000 <array> 2001 <size>n</size> 2002 </array> 2003 <description> 2004 List of auto-exposure modes for android.control.aeMode that are supported by this camera 2005 device. 2006 </description> 2007 <range>Any value listed in android.control.aeMode</range> 2008 <details> 2009 Not all the auto-exposure modes may be supported by a 2010 given camera device, especially if no flash unit is 2011 available. This entry lists the valid modes for 2012 android.control.aeMode for this camera device. 2013 2014 All camera devices support ON, and all camera devices with flash 2015 units support ON_AUTO_FLASH and ON_ALWAYS_FLASH. 2016 2017 FULL mode camera devices always support OFF mode, 2018 which enables application control of camera exposure time, 2019 sensitivity, and frame duration. 2020 2021 LEGACY mode camera devices never support OFF mode. 2022 LIMITED mode devices support OFF if they support the MANUAL_SENSOR 2023 capability. 2024 </details> 2025 <tag id="BC" /> 2026 </entry> 2027 <entry name="aeAvailableTargetFpsRanges" type="int32" visibility="public" 2028 type_notes="list of pairs of frame rates" 2029 container="array" typedef="rangeInt" 2030 hwlevel="legacy"> 2031 <array> 2032 <size>2</size> 2033 <size>n</size> 2034 </array> 2035 <description>List of frame rate ranges for android.control.aeTargetFpsRange supported by 2036 this camera device.</description> 2037 <units>Frames per second (FPS)</units> 2038 <details> 2039 For devices at the LEGACY level or above: 2040 2041 * For constant-framerate recording, for each normal 2042 {@link android.media.CamcorderProfile CamcorderProfile}, that is, a 2043 {@link android.media.CamcorderProfile CamcorderProfile} that has 2044 {@link android.media.CamcorderProfile#quality quality} in 2045 the range [{@link android.media.CamcorderProfile#QUALITY_LOW QUALITY_LOW}, 2046 {@link android.media.CamcorderProfile#QUALITY_2160P QUALITY_2160P}], if the profile is 2047 supported by the device and has 2048 {@link android.media.CamcorderProfile#videoFrameRate videoFrameRate} `x`, this list will 2049 always include (`x`,`x`). 2050 2051 * Also, a camera device must either not support any 2052 {@link android.media.CamcorderProfile CamcorderProfile}, 2053 or support at least one 2054 normal {@link android.media.CamcorderProfile CamcorderProfile} that has 2055 {@link android.media.CamcorderProfile#videoFrameRate videoFrameRate} `x` >= 24. 2056 2057 For devices at the LIMITED level or above: 2058 2059 * For YUV_420_888 burst capture use case, this list will always include (`min`, `max`) 2060 and (`max`, `max`) where `min` <= 15 and `max` = the maximum output frame rate of the 2061 maximum YUV_420_888 output size. 2062 </details> 2063 <tag id="BC" /> 2064 </entry> 2065 <entry name="aeCompensationRange" type="int32" visibility="public" 2066 container="array" typedef="rangeInt" 2067 hwlevel="legacy"> 2068 <array> 2069 <size>2</size> 2070 </array> 2071 <description>Maximum and minimum exposure compensation values for 2072 android.control.aeExposureCompensation, in counts of android.control.aeCompensationStep, 2073 that are supported by this camera device.</description> 2074 <range> 2075 Range [0,0] indicates that exposure compensation is not supported. 2076 2077 For LIMITED and FULL devices, range must follow below requirements if exposure 2078 compensation is supported (`range != [0, 0]`): 2079 2080 `Min.exposure compensation * android.control.aeCompensationStep <= -2 EV` 2081 2082 `Max.exposure compensation * android.control.aeCompensationStep >= 2 EV` 2083 2084 LEGACY devices may support a smaller range than this. 2085 </range> 2086 <tag id="BC" /> 2087 </entry> 2088 <entry name="aeCompensationStep" type="rational" visibility="public" 2089 hwlevel="legacy"> 2090 <description>Smallest step by which the exposure compensation 2091 can be changed.</description> 2092 <units>Exposure Value (EV)</units> 2093 <details> 2094 This is the unit for android.control.aeExposureCompensation. For example, if this key has 2095 a value of `1/2`, then a setting of `-2` for android.control.aeExposureCompensation means 2096 that the target EV offset for the auto-exposure routine is -1 EV. 2097 2098 One unit of EV compensation changes the brightness of the captured image by a factor 2099 of two. +1 EV doubles the image brightness, while -1 EV halves the image brightness. 2100 </details> 2101 <hal_details> 2102 This must be less than or equal to 1/2. 2103 </hal_details> 2104 <tag id="BC" /> 2105 </entry> 2106 <entry name="afAvailableModes" type="byte" visibility="public" 2107 type_notes="List of enums" container="array" typedef="enumList" 2108 hwlevel="legacy"> 2109 <array> 2110 <size>n</size> 2111 </array> 2112 <description> 2113 List of auto-focus (AF) modes for android.control.afMode that are 2114 supported by this camera device. 2115 </description> 2116 <range>Any value listed in android.control.afMode</range> 2117 <details> 2118 Not all the auto-focus modes may be supported by a 2119 given camera device. This entry lists the valid modes for 2120 android.control.afMode for this camera device. 2121 2122 All LIMITED and FULL mode camera devices will support OFF mode, and all 2123 camera devices with adjustable focuser units 2124 (`android.lens.info.minimumFocusDistance > 0`) will support AUTO mode. 2125 2126 LEGACY devices will support OFF mode only if they support 2127 focusing to infinity (by also setting android.lens.focusDistance to 2128 `0.0f`). 2129 </details> 2130 <tag id="BC" /> 2131 </entry> 2132 <entry name="availableEffects" type="byte" visibility="public" 2133 type_notes="List of enums (android.control.effectMode)." container="array" 2134 typedef="enumList" hwlevel="legacy"> 2135 <array> 2136 <size>n</size> 2137 </array> 2138 <description> 2139 List of color effects for android.control.effectMode that are supported by this camera 2140 device. 2141 </description> 2142 <range>Any value listed in android.control.effectMode</range> 2143 <details> 2144 This list contains the color effect modes that can be applied to 2145 images produced by the camera device. 2146 Implementations are not expected to be consistent across all devices. 2147 If no color effect modes are available for a device, this will only list 2148 OFF. 2149 2150 A color effect will only be applied if 2151 android.control.mode != OFF. OFF is always included in this list. 2152 2153 This control has no effect on the operation of other control routines such 2154 as auto-exposure, white balance, or focus. 2155 </details> 2156 <tag id="BC" /> 2157 </entry> 2158 <entry name="availableSceneModes" type="byte" visibility="public" 2159 type_notes="List of enums (android.control.sceneMode)." 2160 container="array" typedef="enumList" hwlevel="legacy"> 2161 <array> 2162 <size>n</size> 2163 </array> 2164 <description> 2165 List of scene modes for android.control.sceneMode that are supported by this camera 2166 device. 2167 </description> 2168 <range>Any value listed in android.control.sceneMode</range> 2169 <details> 2170 This list contains scene modes that can be set for the camera device. 2171 Only scene modes that have been fully implemented for the 2172 camera device may be included here. Implementations are not expected 2173 to be consistent across all devices. 2174 2175 If no scene modes are supported by the camera device, this 2176 will be set to DISABLED. Otherwise DISABLED will not be listed. 2177 2178 FACE_PRIORITY is always listed if face detection is 2179 supported (i.e.`android.statistics.info.maxFaceCount > 2180 0`). 2181 </details> 2182 <tag id="BC" /> 2183 </entry> 2184 <entry name="availableVideoStabilizationModes" type="byte" 2185 visibility="public" type_notes="List of enums." container="array" 2186 typedef="enumList" hwlevel="legacy"> 2187 <array> 2188 <size>n</size> 2189 </array> 2190 <description> 2191 List of video stabilization modes for android.control.videoStabilizationMode 2192 that are supported by this camera device. 2193 </description> 2194 <range>Any value listed in android.control.videoStabilizationMode</range> 2195 <details> 2196 OFF will always be listed. 2197 </details> 2198 <tag id="BC" /> 2199 </entry> 2200 <entry name="awbAvailableModes" type="byte" visibility="public" 2201 type_notes="List of enums" 2202 container="array" typedef="enumList" hwlevel="legacy"> 2203 <array> 2204 <size>n</size> 2205 </array> 2206 <description> 2207 List of auto-white-balance modes for android.control.awbMode that are supported by this 2208 camera device. 2209 </description> 2210 <range>Any value listed in android.control.awbMode</range> 2211 <details> 2212 Not all the auto-white-balance modes may be supported by a 2213 given camera device. This entry lists the valid modes for 2214 android.control.awbMode for this camera device. 2215 2216 All camera devices will support ON mode. 2217 2218 Camera devices that support the MANUAL_POST_PROCESSING capability will always support OFF 2219 mode, which enables application control of white balance, by using 2220 android.colorCorrection.transform and android.colorCorrection.gains 2221 (android.colorCorrection.mode must be set to TRANSFORM_MATRIX). This includes all FULL 2222 mode camera devices. 2223 </details> 2224 <tag id="BC" /> 2225 </entry> 2226 <entry name="maxRegions" type="int32" visibility="ndk_public" 2227 container="array" hwlevel="legacy"> 2228 <array> 2229 <size>3</size> 2230 </array> 2231 <description> 2232 List of the maximum number of regions that can be used for metering in 2233 auto-exposure (AE), auto-white balance (AWB), and auto-focus (AF); 2234 this corresponds to the the maximum number of elements in 2235 android.control.aeRegions, android.control.awbRegions, 2236 and android.control.afRegions. 2237 </description> 2238 <range> 2239 Value must be &gt;= 0 for each element. For full-capability devices 2240 this value must be &gt;= 1 for AE and AF. The order of the elements is: 2241 `(AE, AWB, AF)`.</range> 2242 <tag id="BC" /> 2243 </entry> 2244 <entry name="maxRegionsAe" type="int32" visibility="java_public" 2245 synthetic="true" hwlevel="legacy"> 2246 <description> 2247 The maximum number of metering regions that can be used by the auto-exposure (AE) 2248 routine. 2249 </description> 2250 <range>Value will be &gt;= 0. For FULL-capability devices, this 2251 value will be &gt;= 1. 2252 </range> 2253 <details> 2254 This corresponds to the the maximum allowed number of elements in 2255 android.control.aeRegions. 2256 </details> 2257 <hal_details>This entry is private to the framework. Fill in 2258 maxRegions to have this entry be automatically populated. 2259 </hal_details> 2260 </entry> 2261 <entry name="maxRegionsAwb" type="int32" visibility="java_public" 2262 synthetic="true" hwlevel="legacy"> 2263 <description> 2264 The maximum number of metering regions that can be used by the auto-white balance (AWB) 2265 routine. 2266 </description> 2267 <range>Value will be &gt;= 0. 2268 </range> 2269 <details> 2270 This corresponds to the the maximum allowed number of elements in 2271 android.control.awbRegions. 2272 </details> 2273 <hal_details>This entry is private to the framework. Fill in 2274 maxRegions to have this entry be automatically populated. 2275 </hal_details> 2276 </entry> 2277 <entry name="maxRegionsAf" type="int32" visibility="java_public" 2278 synthetic="true" hwlevel="legacy"> 2279 <description> 2280 The maximum number of metering regions that can be used by the auto-focus (AF) routine. 2281 </description> 2282 <range>Value will be &gt;= 0. For FULL-capability devices, this 2283 value will be &gt;= 1. 2284 </range> 2285 <details> 2286 This corresponds to the the maximum allowed number of elements in 2287 android.control.afRegions. 2288 </details> 2289 <hal_details>This entry is private to the framework. Fill in 2290 maxRegions to have this entry be automatically populated. 2291 </hal_details> 2292 </entry> 2293 <entry name="sceneModeOverrides" type="byte" visibility="system" 2294 container="array" hwlevel="limited"> 2295 <array> 2296 <size>3</size> 2297 <size>length(availableSceneModes)</size> 2298 </array> 2299 <description> 2300 Ordered list of auto-exposure, auto-white balance, and auto-focus 2301 settings to use with each available scene mode. 2302 </description> 2303 <range> 2304 For each available scene mode, the list must contain three 2305 entries containing the android.control.aeMode, 2306 android.control.awbMode, and android.control.afMode values used 2307 by the camera device. The entry order is `(aeMode, awbMode, afMode)` 2308 where aeMode has the lowest index position. 2309 </range> 2310 <details> 2311 When a scene mode is enabled, the camera device is expected 2312 to override android.control.aeMode, android.control.awbMode, 2313 and android.control.afMode with its preferred settings for 2314 that scene mode. 2315 2316 The order of this list matches that of availableSceneModes, 2317 with 3 entries for each mode. The overrides listed 2318 for FACE_PRIORITY and FACE_PRIORITY_LOW_LIGHT (if supported) are ignored, 2319 since for that mode the application-set android.control.aeMode, 2320 android.control.awbMode, and android.control.afMode values are 2321 used instead, matching the behavior when android.control.mode 2322 is set to AUTO. It is recommended that the FACE_PRIORITY and 2323 FACE_PRIORITY_LOW_LIGHT (if supported) overrides should be set to 0. 2324 2325 For example, if availableSceneModes contains 2326 `(FACE_PRIORITY, ACTION, NIGHT)`, then the camera framework 2327 expects sceneModeOverrides to have 9 entries formatted like: 2328 `(0, 0, 0, ON_AUTO_FLASH, AUTO, CONTINUOUS_PICTURE, 2329 ON_AUTO_FLASH, INCANDESCENT, AUTO)`. 2330 </details> 2331 <hal_details> 2332 To maintain backward compatibility, this list will be made available 2333 in the static metadata of the camera service. The camera service will 2334 use these values to set android.control.aeMode, 2335 android.control.awbMode, and android.control.afMode when using a scene 2336 mode other than FACE_PRIORITY and FACE_PRIORITY_LOW_LIGHT (if supported). 2337 </hal_details> 2338 <tag id="BC" /> 2339 </entry> 2340 </static> 2341 <dynamic> 2342 <entry name="aePrecaptureId" type="int32" visibility="system" deprecated="true"> 2343 <description>The ID sent with the latest 2344 CAMERA2_TRIGGER_PRECAPTURE_METERING call</description> 2345 <deprecation_description> 2346 Removed in camera HAL v3 2347 </deprecation_description> 2348 <details>Must be 0 if no 2349 CAMERA2_TRIGGER_PRECAPTURE_METERING trigger received yet 2350 by HAL. Always updated even if AE algorithm ignores the 2351 trigger</details> 2352 </entry> 2353 <clone entry="android.control.aeAntibandingMode" kind="controls"> 2354 </clone> 2355 <clone entry="android.control.aeExposureCompensation" kind="controls"> 2356 </clone> 2357 <clone entry="android.control.aeLock" kind="controls"> 2358 </clone> 2359 <clone entry="android.control.aeMode" kind="controls"> 2360 </clone> 2361 <clone entry="android.control.aeRegions" kind="controls"> 2362 </clone> 2363 <clone entry="android.control.aeTargetFpsRange" kind="controls"> 2364 </clone> 2365 <clone entry="android.control.aePrecaptureTrigger" kind="controls"> 2366 </clone> 2367 <entry name="aeState" type="byte" visibility="public" enum="true" 2368 hwlevel="limited"> 2369 <enum> 2370 <value>INACTIVE 2371 <notes>AE is off or recently reset. 2372 2373 When a camera device is opened, it starts in 2374 this state. This is a transient state, the camera device may skip reporting 2375 this state in capture result.</notes></value> 2376 <value>SEARCHING 2377 <notes>AE doesn't yet have a good set of control values 2378 for the current scene. 2379 2380 This is a transient state, the camera device may skip 2381 reporting this state in capture result.</notes></value> 2382 <value>CONVERGED 2383 <notes>AE has a good set of control values for the 2384 current scene.</notes></value> 2385 <value>LOCKED 2386 <notes>AE has been locked.</notes></value> 2387 <value>FLASH_REQUIRED 2388 <notes>AE has a good set of control values, but flash 2389 needs to be fired for good quality still 2390 capture.</notes></value> 2391 <value>PRECAPTURE 2392 <notes>AE has been asked to do a precapture sequence 2393 and is currently executing it. 2394 2395 Precapture can be triggered through setting 2396 android.control.aePrecaptureTrigger to START. Currently 2397 active and completed (if it causes camera device internal AE lock) precapture 2398 metering sequence can be canceled through setting 2399 android.control.aePrecaptureTrigger to CANCEL. 2400 2401 Once PRECAPTURE completes, AE will transition to CONVERGED 2402 or FLASH_REQUIRED as appropriate. This is a transient 2403 state, the camera device may skip reporting this state in 2404 capture result.</notes></value> 2405 </enum> 2406 <description>Current state of the auto-exposure (AE) algorithm.</description> 2407 <details>Switching between or enabling AE modes (android.control.aeMode) always 2408 resets the AE state to INACTIVE. Similarly, switching between android.control.mode, 2409 or android.control.sceneMode if `android.control.mode == USE_SCENE_MODE` resets all 2410 the algorithm states to INACTIVE. 2411 2412 The camera device can do several state transitions between two results, if it is 2413 allowed by the state transition table. For example: INACTIVE may never actually be 2414 seen in a result. 2415 2416 The state in the result is the state for this image (in sync with this image): if 2417 AE state becomes CONVERGED, then the image data associated with this result should 2418 be good to use. 2419 2420 Below are state transition tables for different AE modes. 2421 2422 State | Transition Cause | New State | Notes 2423 :------------:|:----------------:|:---------:|:-----------------------: 2424 INACTIVE | | INACTIVE | Camera device auto exposure algorithm is disabled 2425 2426 When android.control.aeMode is AE_MODE_ON*: 2427 2428 State | Transition Cause | New State | Notes 2429 :-------------:|:--------------------------------------------:|:--------------:|:-----------------: 2430 INACTIVE | Camera device initiates AE scan | SEARCHING | Values changing 2431 INACTIVE | android.control.aeLock is ON | LOCKED | Values locked 2432 SEARCHING | Camera device finishes AE scan | CONVERGED | Good values, not changing 2433 SEARCHING | Camera device finishes AE scan | FLASH_REQUIRED | Converged but too dark w/o flash 2434 SEARCHING | android.control.aeLock is ON | LOCKED | Values locked 2435 CONVERGED | Camera device initiates AE scan | SEARCHING | Values changing 2436 CONVERGED | android.control.aeLock is ON | LOCKED | Values locked 2437 FLASH_REQUIRED | Camera device initiates AE scan | SEARCHING | Values changing 2438 FLASH_REQUIRED | android.control.aeLock is ON | LOCKED | Values locked 2439 LOCKED | android.control.aeLock is OFF | SEARCHING | Values not good after unlock 2440 LOCKED | android.control.aeLock is OFF | CONVERGED | Values good after unlock 2441 LOCKED | android.control.aeLock is OFF | FLASH_REQUIRED | Exposure good, but too dark 2442 PRECAPTURE | Sequence done. android.control.aeLock is OFF | CONVERGED | Ready for high-quality capture 2443 PRECAPTURE | Sequence done. android.control.aeLock is ON | LOCKED | Ready for high-quality capture 2444 LOCKED | aeLock is ON and aePrecaptureTrigger is START | LOCKED | Precapture trigger is ignored when AE is already locked 2445 LOCKED | aeLock is ON and aePrecaptureTrigger is CANCEL| LOCKED | Precapture trigger is ignored when AE is already locked 2446 Any state (excluding LOCKED) | android.control.aePrecaptureTrigger is START | PRECAPTURE | Start AE precapture metering sequence 2447 Any state (excluding LOCKED) | android.control.aePrecaptureTrigger is CANCEL| INACTIVE | Currently active precapture metering sequence is canceled 2448 2449 If the camera device supports AE external flash mode (ON_EXTERNAL_FLASH is included in 2450 android.control.aeAvailableModes), android.control.aeState must be FLASH_REQUIRED after 2451 the camera device finishes AE scan and it's too dark without flash. 2452 2453 For the above table, the camera device may skip reporting any state changes that happen 2454 without application intervention (i.e. mode switch, trigger, locking). Any state that 2455 can be skipped in that manner is called a transient state. 2456 2457 For example, for above AE modes (AE_MODE_ON*), in addition to the state transitions 2458 listed in above table, it is also legal for the camera device to skip one or more 2459 transient states between two results. See below table for examples: 2460 2461 State | Transition Cause | New State | Notes 2462 :-------------:|:-----------------------------------------------------------:|:--------------:|:-----------------: 2463 INACTIVE | Camera device finished AE scan | CONVERGED | Values are already good, transient states are skipped by camera device. 2464 Any state (excluding LOCKED) | android.control.aePrecaptureTrigger is START, sequence done | FLASH_REQUIRED | Converged but too dark w/o flash after a precapture sequence, transient states are skipped by camera device. 2465 Any state (excluding LOCKED) | android.control.aePrecaptureTrigger is START, sequence done | CONVERGED | Converged after a precapture sequence, transient states are skipped by camera device. 2466 Any state (excluding LOCKED) | android.control.aePrecaptureTrigger is CANCEL, converged | FLASH_REQUIRED | Converged but too dark w/o flash after a precapture sequence is canceled, transient states are skipped by camera device. 2467 Any state (excluding LOCKED) | android.control.aePrecaptureTrigger is CANCEL, converged | CONVERGED | Converged after a precapture sequenceis canceled, transient states are skipped by camera device. 2468 CONVERGED | Camera device finished AE scan | FLASH_REQUIRED | Converged but too dark w/o flash after a new scan, transient states are skipped by camera device. 2469 FLASH_REQUIRED | Camera device finished AE scan | CONVERGED | Converged after a new scan, transient states are skipped by camera device. 2470 </details> 2471 </entry> 2472 <clone entry="android.control.afMode" kind="controls"> 2473 </clone> 2474 <clone entry="android.control.afRegions" kind="controls"> 2475 </clone> 2476 <clone entry="android.control.afTrigger" kind="controls"> 2477 </clone> 2478 <entry name="afState" type="byte" visibility="public" enum="true" 2479 hwlevel="legacy"> 2480 <enum> 2481 <value>INACTIVE 2482 <notes>AF is off or has not yet tried to scan/been asked 2483 to scan. 2484 2485 When a camera device is opened, it starts in this 2486 state. This is a transient state, the camera device may 2487 skip reporting this state in capture 2488 result.</notes></value> 2489 <value>PASSIVE_SCAN 2490 <notes>AF is currently performing an AF scan initiated the 2491 camera device in a continuous autofocus mode. 2492 2493 Only used by CONTINUOUS_* AF modes. This is a transient 2494 state, the camera device may skip reporting this state in 2495 capture result.</notes></value> 2496 <value>PASSIVE_FOCUSED 2497 <notes>AF currently believes it is in focus, but may 2498 restart scanning at any time. 2499 2500 Only used by CONTINUOUS_* AF modes. This is a transient 2501 state, the camera device may skip reporting this state in 2502 capture result.</notes></value> 2503 <value>ACTIVE_SCAN 2504 <notes>AF is performing an AF scan because it was 2505 triggered by AF trigger. 2506 2507 Only used by AUTO or MACRO AF modes. This is a transient 2508 state, the camera device may skip reporting this state in 2509 capture result.</notes></value> 2510 <value>FOCUSED_LOCKED 2511 <notes>AF believes it is focused correctly and has locked 2512 focus. 2513 2514 This state is reached only after an explicit START AF trigger has been 2515 sent (android.control.afTrigger), when good focus has been obtained. 2516 2517 The lens will remain stationary until the AF mode (android.control.afMode) is changed or 2518 a new AF trigger is sent to the camera device (android.control.afTrigger). 2519 </notes></value> 2520 <value>NOT_FOCUSED_LOCKED 2521 <notes>AF has failed to focus successfully and has locked 2522 focus. 2523 2524 This state is reached only after an explicit START AF trigger has been 2525 sent (android.control.afTrigger), when good focus cannot be obtained. 2526 2527 The lens will remain stationary until the AF mode (android.control.afMode) is changed or 2528 a new AF trigger is sent to the camera device (android.control.afTrigger). 2529 </notes></value> 2530 <value>PASSIVE_UNFOCUSED 2531 <notes>AF finished a passive scan without finding focus, 2532 and may restart scanning at any time. 2533 2534 Only used by CONTINUOUS_* AF modes. This is a transient state, the camera 2535 device may skip reporting this state in capture result. 2536 2537 LEGACY camera devices do not support this state. When a passive 2538 scan has finished, it will always go to PASSIVE_FOCUSED. 2539 </notes></value> 2540 </enum> 2541 <description>Current state of auto-focus (AF) algorithm.</description> 2542 <details> 2543 Switching between or enabling AF modes (android.control.afMode) always 2544 resets the AF state to INACTIVE. Similarly, switching between android.control.mode, 2545 or android.control.sceneMode if `android.control.mode == USE_SCENE_MODE` resets all 2546 the algorithm states to INACTIVE. 2547 2548 The camera device can do several state transitions between two results, if it is 2549 allowed by the state transition table. For example: INACTIVE may never actually be 2550 seen in a result. 2551 2552 The state in the result is the state for this image (in sync with this image): if 2553 AF state becomes FOCUSED, then the image data associated with this result should 2554 be sharp. 2555 2556 Below are state transition tables for different AF modes. 2557 2558 When android.control.afMode is AF_MODE_OFF or AF_MODE_EDOF: 2559 2560 State | Transition Cause | New State | Notes 2561 :------------:|:----------------:|:---------:|:-----------: 2562 INACTIVE | | INACTIVE | Never changes 2563 2564 When android.control.afMode is AF_MODE_AUTO or AF_MODE_MACRO: 2565 2566 State | Transition Cause | New State | Notes 2567 :-----------------:|:----------------:|:------------------:|:--------------: 2568 INACTIVE | AF_TRIGGER | ACTIVE_SCAN | Start AF sweep, Lens now moving 2569 ACTIVE_SCAN | AF sweep done | FOCUSED_LOCKED | Focused, Lens now locked 2570 ACTIVE_SCAN | AF sweep done | NOT_FOCUSED_LOCKED | Not focused, Lens now locked 2571 ACTIVE_SCAN | AF_CANCEL | INACTIVE | Cancel/reset AF, Lens now locked 2572 FOCUSED_LOCKED | AF_CANCEL | INACTIVE | Cancel/reset AF 2573 FOCUSED_LOCKED | AF_TRIGGER | ACTIVE_SCAN | Start new sweep, Lens now moving 2574 NOT_FOCUSED_LOCKED | AF_CANCEL | INACTIVE | Cancel/reset AF 2575 NOT_FOCUSED_LOCKED | AF_TRIGGER | ACTIVE_SCAN | Start new sweep, Lens now moving 2576 Any state | Mode change | INACTIVE | 2577 2578 For the above table, the camera device may skip reporting any state changes that happen 2579 without application intervention (i.e. mode switch, trigger, locking). Any state that 2580 can be skipped in that manner is called a transient state. 2581 2582 For example, for these AF modes (AF_MODE_AUTO and AF_MODE_MACRO), in addition to the 2583 state transitions listed in above table, it is also legal for the camera device to skip 2584 one or more transient states between two results. See below table for examples: 2585 2586 State | Transition Cause | New State | Notes 2587 :-----------------:|:----------------:|:------------------:|:--------------: 2588 INACTIVE | AF_TRIGGER | FOCUSED_LOCKED | Focus is already good or good after a scan, lens is now locked. 2589 INACTIVE | AF_TRIGGER | NOT_FOCUSED_LOCKED | Focus failed after a scan, lens is now locked. 2590 FOCUSED_LOCKED | AF_TRIGGER | FOCUSED_LOCKED | Focus is already good or good after a scan, lens is now locked. 2591 NOT_FOCUSED_LOCKED | AF_TRIGGER | FOCUSED_LOCKED | Focus is good after a scan, lens is not locked. 2592 2593 2594 When android.control.afMode is AF_MODE_CONTINUOUS_VIDEO: 2595 2596 State | Transition Cause | New State | Notes 2597 :-----------------:|:-----------------------------------:|:------------------:|:--------------: 2598 INACTIVE | Camera device initiates new scan | PASSIVE_SCAN | Start AF scan, Lens now moving 2599 INACTIVE | AF_TRIGGER | NOT_FOCUSED_LOCKED | AF state query, Lens now locked 2600 PASSIVE_SCAN | Camera device completes current scan| PASSIVE_FOCUSED | End AF scan, Lens now locked 2601 PASSIVE_SCAN | Camera device fails current scan | PASSIVE_UNFOCUSED | End AF scan, Lens now locked 2602 PASSIVE_SCAN | AF_TRIGGER | FOCUSED_LOCKED | Immediate transition, if focus is good. Lens now locked 2603 PASSIVE_SCAN | AF_TRIGGER | NOT_FOCUSED_LOCKED | Immediate transition, if focus is bad. Lens now locked 2604 PASSIVE_SCAN | AF_CANCEL | INACTIVE | Reset lens position, Lens now locked 2605 PASSIVE_FOCUSED | Camera device initiates new scan | PASSIVE_SCAN | Start AF scan, Lens now moving 2606 PASSIVE_UNFOCUSED | Camera device initiates new scan | PASSIVE_SCAN | Start AF scan, Lens now moving 2607 PASSIVE_FOCUSED | AF_TRIGGER | FOCUSED_LOCKED | Immediate transition, lens now locked 2608 PASSIVE_UNFOCUSED | AF_TRIGGER | NOT_FOCUSED_LOCKED | Immediate transition, lens now locked 2609 FOCUSED_LOCKED | AF_TRIGGER | FOCUSED_LOCKED | No effect 2610 FOCUSED_LOCKED | AF_CANCEL | INACTIVE | Restart AF scan 2611 NOT_FOCUSED_LOCKED | AF_TRIGGER | NOT_FOCUSED_LOCKED | No effect 2612 NOT_FOCUSED_LOCKED | AF_CANCEL | INACTIVE | Restart AF scan 2613 2614 When android.control.afMode is AF_MODE_CONTINUOUS_PICTURE: 2615 2616 State | Transition Cause | New State | Notes 2617 :-----------------:|:------------------------------------:|:------------------:|:--------------: 2618 INACTIVE | Camera device initiates new scan | PASSIVE_SCAN | Start AF scan, Lens now moving 2619 INACTIVE | AF_TRIGGER | NOT_FOCUSED_LOCKED | AF state query, Lens now locked 2620 PASSIVE_SCAN | Camera device completes current scan | PASSIVE_FOCUSED | End AF scan, Lens now locked 2621 PASSIVE_SCAN | Camera device fails current scan | PASSIVE_UNFOCUSED | End AF scan, Lens now locked 2622 PASSIVE_SCAN | AF_TRIGGER | FOCUSED_LOCKED | Eventual transition once the focus is good. Lens now locked 2623 PASSIVE_SCAN | AF_TRIGGER | NOT_FOCUSED_LOCKED | Eventual transition if cannot find focus. Lens now locked 2624 PASSIVE_SCAN | AF_CANCEL | INACTIVE | Reset lens position, Lens now locked 2625 PASSIVE_FOCUSED | Camera device initiates new scan | PASSIVE_SCAN | Start AF scan, Lens now moving 2626 PASSIVE_UNFOCUSED | Camera device initiates new scan | PASSIVE_SCAN | Start AF scan, Lens now moving 2627 PASSIVE_FOCUSED | AF_TRIGGER | FOCUSED_LOCKED | Immediate trans. Lens now locked 2628 PASSIVE_UNFOCUSED | AF_TRIGGER | NOT_FOCUSED_LOCKED | Immediate trans. Lens now locked 2629 FOCUSED_LOCKED | AF_TRIGGER | FOCUSED_LOCKED | No effect 2630 FOCUSED_LOCKED | AF_CANCEL | INACTIVE | Restart AF scan 2631 NOT_FOCUSED_LOCKED | AF_TRIGGER | NOT_FOCUSED_LOCKED | No effect 2632 NOT_FOCUSED_LOCKED | AF_CANCEL | INACTIVE | Restart AF scan 2633 2634 When switch between AF_MODE_CONTINUOUS_* (CAF modes) and AF_MODE_AUTO/AF_MODE_MACRO 2635 (AUTO modes), the initial INACTIVE or PASSIVE_SCAN states may be skipped by the 2636 camera device. When a trigger is included in a mode switch request, the trigger 2637 will be evaluated in the context of the new mode in the request. 2638 See below table for examples: 2639 2640 State | Transition Cause | New State | Notes 2641 :-----------:|:--------------------------------------:|:----------------------------------------:|:--------------: 2642 any state | CAF-->AUTO mode switch | INACTIVE | Mode switch without trigger, initial state must be INACTIVE 2643 any state | CAF-->AUTO mode switch with AF_TRIGGER | trigger-reachable states from INACTIVE | Mode switch with trigger, INACTIVE is skipped 2644 any state | AUTO-->CAF mode switch | passively reachable states from INACTIVE | Mode switch without trigger, passive transient state is skipped 2645 </details> 2646 </entry> 2647 <entry name="afTriggerId" type="int32" visibility="system" deprecated="true"> 2648 <description>The ID sent with the latest 2649 CAMERA2_TRIGGER_AUTOFOCUS call</description> 2650 <deprecation_description> 2651 Removed in camera HAL v3 2652 </deprecation_description> 2653 <details>Must be 0 if no CAMERA2_TRIGGER_AUTOFOCUS trigger 2654 received yet by HAL. Always updated even if AF algorithm 2655 ignores the trigger</details> 2656 </entry> 2657 <clone entry="android.control.awbLock" kind="controls"> 2658 </clone> 2659 <clone entry="android.control.awbMode" kind="controls"> 2660 </clone> 2661 <clone entry="android.control.awbRegions" kind="controls"> 2662 </clone> 2663 <clone entry="android.control.captureIntent" kind="controls"> 2664 </clone> 2665 <entry name="awbState" type="byte" visibility="public" enum="true" 2666 hwlevel="limited"> 2667 <enum> 2668 <value>INACTIVE 2669 <notes>AWB is not in auto mode, or has not yet started metering. 2670 2671 When a camera device is opened, it starts in this 2672 state. This is a transient state, the camera device may 2673 skip reporting this state in capture 2674 result.</notes></value> 2675 <value>SEARCHING 2676 <notes>AWB doesn't yet have a good set of control 2677 values for the current scene. 2678 2679 This is a transient state, the camera device 2680 may skip reporting this state in capture result.</notes></value> 2681 <value>CONVERGED 2682 <notes>AWB has a good set of control values for the 2683 current scene.</notes></value> 2684 <value>LOCKED 2685 <notes>AWB has been locked. 2686 </notes></value> 2687 </enum> 2688 <description>Current state of auto-white balance (AWB) algorithm.</description> 2689 <details>Switching between or enabling AWB modes (android.control.awbMode) always 2690 resets the AWB state to INACTIVE. Similarly, switching between android.control.mode, 2691 or android.control.sceneMode if `android.control.mode == USE_SCENE_MODE` resets all 2692 the algorithm states to INACTIVE. 2693 2694 The camera device can do several state transitions between two results, if it is 2695 allowed by the state transition table. So INACTIVE may never actually be seen in 2696 a result. 2697 2698 The state in the result is the state for this image (in sync with this image): if 2699 AWB state becomes CONVERGED, then the image data associated with this result should 2700 be good to use. 2701 2702 Below are state transition tables for different AWB modes. 2703 2704 When `android.control.awbMode != AWB_MODE_AUTO`: 2705 2706 State | Transition Cause | New State | Notes 2707 :------------:|:----------------:|:---------:|:-----------------------: 2708 INACTIVE | |INACTIVE |Camera device auto white balance algorithm is disabled 2709 2710 When android.control.awbMode is AWB_MODE_AUTO: 2711 2712 State | Transition Cause | New State | Notes 2713 :-------------:|:--------------------------------:|:-------------:|:-----------------: 2714 INACTIVE | Camera device initiates AWB scan | SEARCHING | Values changing 2715 INACTIVE | android.control.awbLock is ON | LOCKED | Values locked 2716 SEARCHING | Camera device finishes AWB scan | CONVERGED | Good values, not changing 2717 SEARCHING | android.control.awbLock is ON | LOCKED | Values locked 2718 CONVERGED | Camera device initiates AWB scan | SEARCHING | Values changing 2719 CONVERGED | android.control.awbLock is ON | LOCKED | Values locked 2720 LOCKED | android.control.awbLock is OFF | SEARCHING | Values not good after unlock 2721 2722 For the above table, the camera device may skip reporting any state changes that happen 2723 without application intervention (i.e. mode switch, trigger, locking). Any state that 2724 can be skipped in that manner is called a transient state. 2725 2726 For example, for this AWB mode (AWB_MODE_AUTO), in addition to the state transitions 2727 listed in above table, it is also legal for the camera device to skip one or more 2728 transient states between two results. See below table for examples: 2729 2730 State | Transition Cause | New State | Notes 2731 :-------------:|:--------------------------------:|:-------------:|:-----------------: 2732 INACTIVE | Camera device finished AWB scan | CONVERGED | Values are already good, transient states are skipped by camera device. 2733 LOCKED | android.control.awbLock is OFF | CONVERGED | Values good after unlock, transient states are skipped by camera device. 2734 </details> 2735 </entry> 2736 <clone entry="android.control.effectMode" kind="controls"> 2737 </clone> 2738 <clone entry="android.control.mode" kind="controls"> 2739 </clone> 2740 <clone entry="android.control.sceneMode" kind="controls"> 2741 </clone> 2742 <clone entry="android.control.videoStabilizationMode" kind="controls"> 2743 </clone> 2744 </dynamic> 2745 <static> 2746 <entry name="availableHighSpeedVideoConfigurations" type="int32" visibility="hidden" 2747 container="array" typedef="highSpeedVideoConfiguration" hwlevel="limited"> 2748 <array> 2749 <size>5</size> 2750 <size>n</size> 2751 </array> 2752 <description> 2753 List of available high speed video size, fps range and max batch size configurations 2754 supported by the camera device, in the format of (width, height, fps_min, fps_max, batch_size_max). 2755 </description> 2756 <range> 2757 For each configuration, the fps_max &gt;= 120fps. 2758 </range> 2759 <details> 2760 When CONSTRAINED_HIGH_SPEED_VIDEO is supported in android.request.availableCapabilities, 2761 this metadata will list the supported high speed video size, fps range and max batch size 2762 configurations. All the sizes listed in this configuration will be a subset of the sizes 2763 reported by {@link android.hardware.camera2.params.StreamConfigurationMap#getOutputSizes} 2764 for processed non-stalling formats. 2765 2766 For the high speed video use case, the application must 2767 select the video size and fps range from this metadata to configure the recording and 2768 preview streams and setup the recording requests. For example, if the application intends 2769 to do high speed recording, it can select the maximum size reported by this metadata to 2770 configure output streams. Once the size is selected, application can filter this metadata 2771 by selected size and get the supported fps ranges, and use these fps ranges to setup the 2772 recording requests. Note that for the use case of multiple output streams, application 2773 must select one unique size from this metadata to use (e.g., preview and recording streams 2774 must have the same size). Otherwise, the high speed capture session creation will fail. 2775 2776 The min and max fps will be multiple times of 30fps. 2777 2778 High speed video streaming extends significant performance pressue to camera hardware, 2779 to achieve efficient high speed streaming, the camera device may have to aggregate 2780 multiple frames together and send to camera device for processing where the request 2781 controls are same for all the frames in this batch. Max batch size indicates 2782 the max possible number of frames the camera device will group together for this high 2783 speed stream configuration. This max batch size will be used to generate a high speed 2784 recording request list by 2785 {@link android.hardware.camera2.CameraConstrainedHighSpeedCaptureSession#createHighSpeedRequestList}. 2786 The max batch size for each configuration will satisfy below conditions: 2787 2788 * Each max batch size will be a divisor of its corresponding fps_max / 30. For example, 2789 if max_fps is 300, max batch size will only be 1, 2, 5, or 10. 2790 * The camera device may choose smaller internal batch size for each configuration, but 2791 the actual batch size will be a divisor of max batch size. For example, if the max batch 2792 size is 8, the actual batch size used by camera device will only be 1, 2, 4, or 8. 2793 * The max batch size in each configuration entry must be no larger than 32. 2794 2795 The camera device doesn't have to support batch mode to achieve high speed video recording, 2796 in such case, batch_size_max will be reported as 1 in each configuration entry. 2797 2798 This fps ranges in this configuration list can only be used to create requests 2799 that are submitted to a high speed camera capture session created by 2800 {@link android.hardware.camera2.CameraDevice#createConstrainedHighSpeedCaptureSession}. 2801 The fps ranges reported in this metadata must not be used to setup capture requests for 2802 normal capture session, or it will cause request error. 2803 </details> 2804 <hal_details> 2805 All the sizes listed in this configuration will be a subset of the sizes reported by 2806 android.scaler.availableStreamConfigurations for processed non-stalling output formats. 2807 Note that for all high speed video configurations, HAL must be able to support a minimum 2808 of two streams, though the application might choose to configure just one stream. 2809 2810 The HAL may support multiple sensor modes for high speed outputs, for example, 120fps 2811 sensor mode and 120fps recording, 240fps sensor mode for 240fps recording. The application 2812 usually starts preview first, then starts recording. To avoid sensor mode switch caused 2813 stutter when starting recording as much as possible, the application may want to ensure 2814 the same sensor mode is used for preview and recording. Therefore, The HAL must advertise 2815 the variable fps range [30, fps_max] for each fixed fps range in this configuration list. 2816 For example, if the HAL advertises [120, 120] and [240, 240], the HAL must also advertise 2817 [30, 120] and [30, 240] for each configuration. In doing so, if the application intends to 2818 do 120fps recording, it can select [30, 120] to start preview, and [120, 120] to start 2819 recording. For these variable fps ranges, it's up to the HAL to decide the actual fps 2820 values that are suitable for smooth preview streaming. If the HAL sees different max_fps 2821 values that fall into different sensor modes in a sequence of requests, the HAL must 2822 switch the sensor mode as quick as possible to minimize the mode switch caused stutter. 2823 </hal_details> 2824 <tag id="V1" /> 2825 </entry> 2826 <entry name="aeLockAvailable" type="byte" visibility="public" enum="true" 2827 typedef="boolean" hwlevel="legacy"> 2828 <enum> 2829 <value>FALSE</value> 2830 <value>TRUE</value> 2831 </enum> 2832 <description>Whether the camera device supports android.control.aeLock</description> 2833 <details> 2834 Devices with MANUAL_SENSOR capability or BURST_CAPTURE capability will always 2835 list `true`. This includes FULL devices. 2836 </details> 2837 <tag id="BC"/> 2838 </entry> 2839 <entry name="awbLockAvailable" type="byte" visibility="public" enum="true" 2840 typedef="boolean" hwlevel="legacy"> 2841 <enum> 2842 <value>FALSE</value> 2843 <value>TRUE</value> 2844 </enum> 2845 <description>Whether the camera device supports android.control.awbLock</description> 2846 <details> 2847 Devices with MANUAL_POST_PROCESSING capability or BURST_CAPTURE capability will 2848 always list `true`. This includes FULL devices. 2849 </details> 2850 <tag id="BC"/> 2851 </entry> 2852 <entry name="availableModes" type="byte" visibility="public" 2853 type_notes="List of enums (android.control.mode)." container="array" 2854 typedef="enumList" hwlevel="legacy"> 2855 <array> 2856 <size>n</size> 2857 </array> 2858 <description> 2859 List of control modes for android.control.mode that are supported by this camera 2860 device. 2861 </description> 2862 <range>Any value listed in android.control.mode</range> 2863 <details> 2864 This list contains control modes that can be set for the camera device. 2865 LEGACY mode devices will always support AUTO mode. LIMITED and FULL 2866 devices will always support OFF, AUTO modes. 2867 </details> 2868 </entry> 2869 <entry name="postRawSensitivityBoostRange" type="int32" visibility="public" 2870 type_notes="Range of supported post RAW sensitivitiy boosts" 2871 container="array" typedef="rangeInt"> 2872 <array> 2873 <size>2</size> 2874 </array> 2875 <description>Range of boosts for android.control.postRawSensitivityBoost supported 2876 by this camera device. 2877 </description> 2878 <units>ISO arithmetic units, the same as android.sensor.sensitivity</units> 2879 <details> 2880 Devices support post RAW sensitivity boost will advertise 2881 android.control.postRawSensitivityBoost key for controling 2882 post RAW sensitivity boost. 2883 2884 This key will be `null` for devices that do not support any RAW format 2885 outputs. For devices that do support RAW format outputs, this key will always 2886 present, and if a device does not support post RAW sensitivity boost, it will 2887 list `(100, 100)` in this key. 2888 </details> 2889 <hal_details> 2890 This key is added in legacy HAL3.4. For legacy HAL3.3 or earlier devices, camera 2891 framework will generate this key as `(100, 100)` if device supports any of RAW output 2892 formats. All legacy HAL3.4 and above devices should list this key if device supports 2893 any of RAW output formats. 2894 </hal_details> 2895 </entry> 2896 </static> 2897 <controls> 2898 <entry name="postRawSensitivityBoost" type="int32" visibility="public"> 2899 <description>The amount of additional sensitivity boost applied to output images 2900 after RAW sensor data is captured. 2901 </description> 2902 <units>ISO arithmetic units, the same as android.sensor.sensitivity</units> 2903 <range>android.control.postRawSensitivityBoostRange</range> 2904 <details> 2905 Some camera devices support additional digital sensitivity boosting in the 2906 camera processing pipeline after sensor RAW image is captured. 2907 Such a boost will be applied to YUV/JPEG format output images but will not 2908 have effect on RAW output formats like RAW_SENSOR, RAW10, RAW12 or RAW_OPAQUE. 2909 2910 This key will be `null` for devices that do not support any RAW format 2911 outputs. For devices that do support RAW format outputs, this key will always 2912 present, and if a device does not support post RAW sensitivity boost, it will 2913 list `100` in this key. 2914 2915 If the camera device cannot apply the exact boost requested, it will reduce the 2916 boost to the nearest supported value. 2917 The final boost value used will be available in the output capture result. 2918 2919 For devices that support post RAW sensitivity boost, the YUV/JPEG output images 2920 of such device will have the total sensitivity of 2921 `android.sensor.sensitivity * android.control.postRawSensitivityBoost / 100` 2922 The sensitivity of RAW format images will always be `android.sensor.sensitivity` 2923 2924 This control is only effective if android.control.aeMode or android.control.mode is set to 2925 OFF; otherwise the auto-exposure algorithm will override this value. 2926 </details> 2927 </entry> 2928 </controls> 2929 <dynamic> 2930 <clone entry="android.control.postRawSensitivityBoost" kind="controls"> 2931 </clone> 2932 </dynamic> 2933 <controls> 2934 <entry name="enableZsl" type="byte" visibility="public" enum="true" typedef="boolean"> 2935 <enum> 2936 <value>FALSE 2937 <notes>Requests with android.control.captureIntent == STILL_CAPTURE must be captured 2938 after previous requests.</notes></value> 2939 <value>TRUE 2940 <notes>Requests with android.control.captureIntent == STILL_CAPTURE may or may not be 2941 captured before previous requests.</notes></value> 2942 </enum> 2943 <description>Allow camera device to enable zero-shutter-lag mode for requests with 2944 android.control.captureIntent == STILL_CAPTURE. 2945 </description> 2946 <details> 2947 If enableZsl is `true`, the camera device may enable zero-shutter-lag mode for requests with 2948 STILL_CAPTURE capture intent. The camera device may use images captured in the past to 2949 produce output images for a zero-shutter-lag request. The result metadata including the 2950 android.sensor.timestamp reflects the source frames used to produce output images. 2951 Therefore, the contents of the output images and the result metadata may be out of order 2952 compared to previous regular requests. enableZsl does not affect requests with other 2953 capture intents. 2954 2955 For example, when requests are submitted in the following order: 2956 Request A: enableZsl is ON, android.control.captureIntent is PREVIEW 2957 Request B: enableZsl is ON, android.control.captureIntent is STILL_CAPTURE 2958 2959 The output images for request B may have contents captured before the output images for 2960 request A, and the result metadata for request B may be older than the result metadata for 2961 request A. 2962 2963 Note that when enableZsl is `true`, it is not guaranteed to get output images captured in 2964 the past for requests with STILL_CAPTURE capture intent. 2965 2966 For applications targeting SDK versions O and newer, the value of enableZsl in 2967 TEMPLATE_STILL_CAPTURE template may be `true`. The value in other templates is always 2968 `false` if present. 2969 2970 For applications targeting SDK versions older than O, the value of enableZsl in all 2971 capture templates is always `false` if present. 2972 2973 For application-operated ZSL, use CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG template. 2974 </details> 2975 <hal_details> 2976 It is valid for HAL to produce regular output images for requests with STILL_CAPTURE 2977 capture intent. 2978 </hal_details> 2979 </entry> 2980 </controls> 2981 <dynamic> 2982 <clone entry="android.control.enableZsl" kind="controls"> 2983 </clone> 2984 <entry name="afSceneChange" type="byte" visibility="public" enum="true" hal_version="3.3"> 2985 <enum> 2986 <value>NOT_DETECTED 2987 <notes>Scene change is not detected within the AF region(s).</notes></value> 2988 <value>DETECTED 2989 <notes>Scene change is detected within the AF region(s).</notes></value> 2990 </enum> 2991 <description>Whether a significant scene change is detected within the currently-set AF 2992 region(s).</description> 2993 <details>When the camera focus routine detects a change in the scene it is looking at, 2994 such as a large shift in camera viewpoint, significant motion in the scene, or a 2995 significant illumination change, this value will be set to DETECTED for a single capture 2996 result. Otherwise the value will be NOT_DETECTED. The threshold for detection is similar 2997 to what would trigger a new passive focus scan to begin in CONTINUOUS autofocus modes. 2998 2999 This key will be available if the camera device advertises this key via {@link 3000 android.hardware.camera2.CameraCharacteristics#getAvailableCaptureResultKeys|ACAMERA_REQUEST_AVAILABLE_RESULT_KEYS}. 3001 </details> 3002 </entry> 3003 </dynamic> 3004 </section> 3005 <section name="demosaic"> 3006 <controls> 3007 <entry name="mode" type="byte" enum="true"> 3008 <enum> 3009 <value>FAST 3010 <notes>Minimal or no slowdown of frame rate compared to 3011 Bayer RAW output.</notes></value> 3012 <value>HIGH_QUALITY 3013 <notes>Improved processing quality but the frame rate might be slowed down 3014 relative to raw output.</notes></value> 3015 </enum> 3016 <description>Controls the quality of the demosaicing 3017 processing.</description> 3018 <tag id="FUTURE" /> 3019 </entry> 3020 </controls> 3021 </section> 3022 <section name="edge"> 3023 <controls> 3024 <entry name="mode" type="byte" visibility="public" enum="true" hwlevel="full"> 3025 <enum> 3026 <value>OFF 3027 <notes>No edge enhancement is applied.</notes></value> 3028 <value>FAST 3029 <notes>Apply edge enhancement at a quality level that does not slow down frame rate 3030 relative to sensor output. It may be the same as OFF if edge enhancement will 3031 slow down frame rate relative to sensor.</notes></value> 3032 <value>HIGH_QUALITY 3033 <notes>Apply high-quality edge enhancement, at a cost of possibly reduced output frame rate. 3034 </notes></value> 3035 <value optional="true">ZERO_SHUTTER_LAG <notes>Edge enhancement is applied at different 3036 levels for different output streams, based on resolution. Streams at maximum recording 3037 resolution (see {@link 3038 android.hardware.camera2.CameraDevice#createCaptureSession|ACameraDevice_createCaptureSession}) 3039 or below have edge enhancement applied, while higher-resolution streams have no edge 3040 enhancement applied. The level of edge enhancement for low-resolution streams is tuned 3041 so that frame rate is not impacted, and the quality is equal to or better than FAST 3042 (since it is only applied to lower-resolution outputs, quality may improve from FAST). 3043 3044 This mode is intended to be used by applications operating in a zero-shutter-lag mode 3045 with YUV or PRIVATE reprocessing, where the application continuously captures 3046 high-resolution intermediate buffers into a circular buffer, from which a final image is 3047 produced via reprocessing when a user takes a picture. For such a use case, the 3048 high-resolution buffers must not have edge enhancement applied to maximize efficiency of 3049 preview and to avoid double-applying enhancement when reprocessed, while low-resolution 3050 buffers (used for recording or preview, generally) need edge enhancement applied for 3051 reasonable preview quality. 3052 3053 This mode is guaranteed to be supported by devices that support either the 3054 YUV_REPROCESSING or PRIVATE_REPROCESSING capabilities 3055 (android.request.availableCapabilities lists either of those capabilities) and it will 3056 be the default mode for CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG template. 3057 </notes></value> 3058 </enum> 3059 <description>Operation mode for edge 3060 enhancement.</description> 3061 <range>android.edge.availableEdgeModes</range> 3062 <details>Edge enhancement improves sharpness and details in the captured image. OFF means 3063 no enhancement will be applied by the camera device. 3064 3065 FAST/HIGH_QUALITY both mean camera device determined enhancement 3066 will be applied. HIGH_QUALITY mode indicates that the 3067 camera device will use the highest-quality enhancement algorithms, 3068 even if it slows down capture rate. FAST means the camera device will 3069 not slow down capture rate when applying edge enhancement. FAST may be the same as OFF if 3070 edge enhancement will slow down capture rate. Every output stream will have a similar 3071 amount of enhancement applied. 3072 3073 ZERO_SHUTTER_LAG is meant to be used by applications that maintain a continuous circular 3074 buffer of high-resolution images during preview and reprocess image(s) from that buffer 3075 into a final capture when triggered by the user. In this mode, the camera device applies 3076 edge enhancement to low-resolution streams (below maximum recording resolution) to 3077 maximize preview quality, but does not apply edge enhancement to high-resolution streams, 3078 since those will be reprocessed later if necessary. 3079 3080 For YUV_REPROCESSING, these FAST/HIGH_QUALITY modes both mean that the camera 3081 device will apply FAST/HIGH_QUALITY YUV-domain edge enhancement, respectively. 3082 The camera device may adjust its internal edge enhancement parameters for best 3083 image quality based on the android.reprocess.effectiveExposureFactor, if it is set. 3084 </details> 3085 <hal_details> 3086 For YUV_REPROCESSING The HAL can use android.reprocess.effectiveExposureFactor to 3087 adjust the internal edge enhancement reduction parameters appropriately to get the best 3088 quality images. 3089 </hal_details> 3090 <tag id="V1" /> 3091 <tag id="REPROC" /> 3092 </entry> 3093 <entry name="strength" type="byte"> 3094 <description>Control the amount of edge enhancement 3095 applied to the images</description> 3096 <units>1-10; 10 is maximum sharpening</units> 3097 <tag id="FUTURE" /> 3098 </entry> 3099 </controls> 3100 <static> 3101 <entry name="availableEdgeModes" type="byte" visibility="public" 3102 type_notes="list of enums" container="array" typedef="enumList" 3103 hwlevel="full"> 3104 <array> 3105 <size>n</size> 3106 </array> 3107 <description> 3108 List of edge enhancement modes for android.edge.mode that are supported by this camera 3109 device. 3110 </description> 3111 <range>Any value listed in android.edge.mode</range> 3112 <details> 3113 Full-capability camera devices must always support OFF; camera devices that support 3114 YUV_REPROCESSING or PRIVATE_REPROCESSING will list ZERO_SHUTTER_LAG; all devices will 3115 list FAST. 3116 </details> 3117 <hal_details> 3118 HAL must support both FAST and HIGH_QUALITY if edge enhancement control is available 3119 on the camera device, but the underlying implementation can be the same for both modes. 3120 That is, if the highest quality implementation on the camera device does not slow down 3121 capture rate, then FAST and HIGH_QUALITY will generate the same output. 3122 </hal_details> 3123 <tag id="V1" /> 3124 <tag id="REPROC" /> 3125 </entry> 3126 </static> 3127 <dynamic> 3128 <clone entry="android.edge.mode" kind="controls"> 3129 <tag id="V1" /> 3130 <tag id="REPROC" /> 3131 </clone> 3132 </dynamic> 3133 </section> 3134 <section name="flash"> 3135 <controls> 3136 <entry name="firingPower" type="byte"> 3137 <description>Power for flash firing/torch</description> 3138 <units>10 is max power; 0 is no flash. Linear</units> 3139 <range>0 - 10</range> 3140 <details>Power for snapshot may use a different scale than 3141 for torch mode. Only one entry for torch mode will be 3142 used</details> 3143 <tag id="FUTURE" /> 3144 </entry> 3145 <entry name="firingTime" type="int64"> 3146 <description>Firing time of flash relative to start of 3147 exposure</description> 3148 <units>nanoseconds</units> 3149 <range>0-(exposure time-flash duration)</range> 3150 <details>Clamped to (0, exposure time - flash 3151 duration).</details> 3152 <tag id="FUTURE" /> 3153 </entry> 3154 <entry name="mode" type="byte" visibility="public" enum="true" hwlevel="legacy"> 3155 <enum> 3156 <value>OFF 3157 <notes> 3158 Do not fire the flash for this capture. 3159 </notes> 3160 </value> 3161 <value>SINGLE 3162 <notes> 3163 If the flash is available and charged, fire flash 3164 for this capture. 3165 </notes> 3166 </value> 3167 <value>TORCH 3168 <notes> 3169 Transition flash to continuously on. 3170 </notes> 3171 </value> 3172 </enum> 3173 <description>The desired mode for for the camera device's flash control.</description> 3174 <details> 3175 This control is only effective when flash unit is available 3176 (`android.flash.info.available == true`). 3177 3178 When this control is used, the android.control.aeMode must be set to ON or OFF. 3179 Otherwise, the camera device auto-exposure related flash control (ON_AUTO_FLASH, 3180 ON_ALWAYS_FLASH, or ON_AUTO_FLASH_REDEYE) will override this control. 3181 3182 When set to OFF, the camera device will not fire flash for this capture. 3183 3184 When set to SINGLE, the camera device will fire flash regardless of the camera 3185 device's auto-exposure routine's result. When used in still capture case, this 3186 control should be used along with auto-exposure (AE) precapture metering sequence 3187 (android.control.aePrecaptureTrigger), otherwise, the image may be incorrectly exposed. 3188 3189 When set to TORCH, the flash will be on continuously. This mode can be used 3190 for use cases such as preview, auto-focus assist, still capture, or video recording. 3191 3192 The flash status will be reported by android.flash.state in the capture result metadata. 3193 </details> 3194 <tag id="BC" /> 3195 </entry> 3196 </controls> 3197 <static> 3198 <namespace name="info"> 3199 <entry name="available" type="byte" visibility="public" enum="true" 3200 typedef="boolean" hwlevel="legacy"> 3201 <enum> 3202 <value>FALSE</value> 3203 <value>TRUE</value> 3204 </enum> 3205 <description>Whether this camera device has a 3206 flash unit.</description> 3207 <details> 3208 Will be `false` if no flash is available. 3209 3210 If there is no flash unit, none of the flash controls do 3211 anything.</details> 3212 <tag id="BC" /> 3213 </entry> 3214 <entry name="chargeDuration" type="int64"> 3215 <description>Time taken before flash can fire 3216 again</description> 3217 <units>nanoseconds</units> 3218 <range>0-1e9</range> 3219 <details>1 second too long/too short for recharge? Should 3220 this be power-dependent?</details> 3221 <tag id="FUTURE" /> 3222 </entry> 3223 </namespace> 3224 <entry name="colorTemperature" type="byte"> 3225 <description>The x,y whitepoint of the 3226 flash</description> 3227 <units>pair of floats</units> 3228 <range>0-1 for both</range> 3229 <tag id="FUTURE" /> 3230 </entry> 3231 <entry name="maxEnergy" type="byte"> 3232 <description>Max energy output of the flash for a full 3233 power single flash</description> 3234 <units>lumen-seconds</units> 3235 <range>&gt;= 0</range> 3236 <tag id="FUTURE" /> 3237 </entry> 3238 </static> 3239 <dynamic> 3240 <clone entry="android.flash.firingPower" kind="controls"> 3241 </clone> 3242 <clone entry="android.flash.firingTime" kind="controls"> 3243 </clone> 3244 <clone entry="android.flash.mode" kind="controls"></clone> 3245 <entry name="state" type="byte" visibility="public" enum="true" 3246 hwlevel="limited"> 3247 <enum> 3248 <value>UNAVAILABLE 3249 <notes>No flash on camera.</notes></value> 3250 <value>CHARGING 3251 <notes>Flash is charging and cannot be fired.</notes></value> 3252 <value>READY 3253 <notes>Flash is ready to fire.</notes></value> 3254 <value>FIRED 3255 <notes>Flash fired for this capture.</notes></value> 3256 <value>PARTIAL 3257 <notes>Flash partially illuminated this frame. 3258 3259 This is usually due to the next or previous frame having 3260 the flash fire, and the flash spilling into this capture 3261 due to hardware limitations.</notes></value> 3262 </enum> 3263 <description>Current state of the flash 3264 unit.</description> 3265 <details> 3266 When the camera device doesn't have flash unit 3267 (i.e. `android.flash.info.available == false`), this state will always be UNAVAILABLE. 3268 Other states indicate the current flash status. 3269 3270 In certain conditions, this will be available on LEGACY devices: 3271 3272 * Flash-less cameras always return UNAVAILABLE. 3273 * Using android.control.aeMode `==` ON_ALWAYS_FLASH 3274 will always return FIRED. 3275 * Using android.flash.mode `==` TORCH 3276 will always return FIRED. 3277 3278 In all other conditions the state will not be available on 3279 LEGACY devices (i.e. it will be `null`). 3280 </details> 3281 </entry> 3282 </dynamic> 3283 </section> 3284 <section name="hotPixel"> 3285 <controls> 3286 <entry name="mode" type="byte" visibility="public" enum="true"> 3287 <enum> 3288 <value>OFF 3289 <notes> 3290 No hot pixel correction is applied. 3291 3292 The frame rate must not be reduced relative to sensor raw output 3293 for this option. 3294 3295 The hotpixel map may be returned in android.statistics.hotPixelMap. 3296 </notes> 3297 </value> 3298 <value>FAST 3299 <notes> 3300 Hot pixel correction is applied, without reducing frame 3301 rate relative to sensor raw output. 3302 3303 The hotpixel map may be returned in android.statistics.hotPixelMap. 3304 </notes> 3305 </value> 3306 <value>HIGH_QUALITY 3307 <notes> 3308 High-quality hot pixel correction is applied, at a cost 3309 of possibly reduced frame rate relative to sensor raw output. 3310 3311 The hotpixel map may be returned in android.statistics.hotPixelMap. 3312 </notes> 3313 </value> 3314 </enum> 3315 <description> 3316 Operational mode for hot pixel correction. 3317 </description> 3318 <range>android.hotPixel.availableHotPixelModes</range> 3319 <details> 3320 Hotpixel correction interpolates out, or otherwise removes, pixels 3321 that do not accurately measure the incoming light (i.e. pixels that 3322 are stuck at an arbitrary value or are oversensitive). 3323 </details> 3324 <tag id="V1" /> 3325 <tag id="RAW" /> 3326 </entry> 3327 </controls> 3328 <static> 3329 <entry name="availableHotPixelModes" type="byte" visibility="public" 3330 type_notes="list of enums" container="array" typedef="enumList"> 3331 <array> 3332 <size>n</size> 3333 </array> 3334 <description> 3335 List of hot pixel correction modes for android.hotPixel.mode that are supported by this 3336 camera device. 3337 </description> 3338 <range>Any value listed in android.hotPixel.mode</range> 3339 <details> 3340 FULL mode camera devices will always support FAST. 3341 </details> 3342 <hal_details> 3343 To avoid performance issues, there will be significantly fewer hot 3344 pixels than actual pixels on the camera sensor. 3345 HAL must support both FAST and HIGH_QUALITY if hot pixel correction control is available 3346 on the camera device, but the underlying implementation can be the same for both modes. 3347 That is, if the highest quality implementation on the camera device does not slow down 3348 capture rate, then FAST and HIGH_QUALITY will generate the same output. 3349 </hal_details> 3350 <tag id="V1" /> 3351 <tag id="RAW" /> 3352 </entry> 3353 </static> 3354 <dynamic> 3355 <clone entry="android.hotPixel.mode" kind="controls"> 3356 <tag id="V1" /> 3357 <tag id="RAW" /> 3358 </clone> 3359 </dynamic> 3360 </section> 3361 <section name="jpeg"> 3362 <controls> 3363 <entry name="gpsLocation" type="byte" visibility="java_public" synthetic="true" 3364 typedef="location" hwlevel="legacy"> 3365 <description> 3366 A location object to use when generating image GPS metadata. 3367 </description> 3368 <details> 3369 Setting a location object in a request will include the GPS coordinates of the location 3370 into any JPEG images captured based on the request. These coordinates can then be 3371 viewed by anyone who receives the JPEG image. 3372 3373 This tag is also used for HEIC image capture. 3374 </details> 3375 </entry> 3376 <entry name="gpsCoordinates" type="double" visibility="ndk_public" 3377 type_notes="latitude, longitude, altitude. First two in degrees, the third in meters" 3378 container="array" hwlevel="legacy"> 3379 <array> 3380 <size>3</size> 3381 </array> 3382 <description>GPS coordinates to include in output JPEG 3383 EXIF.</description> 3384 <range>(-180 - 180], [-90,90], [-inf, inf]</range> 3385 <details>This tag is also used for HEIC image capture.</details> 3386 <tag id="BC" /> 3387 </entry> 3388 <entry name="gpsProcessingMethod" type="byte" visibility="ndk_public" 3389 typedef="string" hwlevel="legacy"> 3390 <description>32 characters describing GPS algorithm to 3391 include in EXIF.</description> 3392 <units>UTF-8 null-terminated string</units> 3393 <details>This tag is also used for HEIC image capture.</details> 3394 <tag id="BC" /> 3395 </entry> 3396 <entry name="gpsTimestamp" type="int64" visibility="ndk_public" hwlevel="legacy"> 3397 <description>Time GPS fix was made to include in 3398 EXIF.</description> 3399 <units>UTC in seconds since January 1, 1970</units> 3400 <details>This tag is also used for HEIC image capture.</details> 3401 <tag id="BC" /> 3402 </entry> 3403 <entry name="orientation" type="int32" visibility="public" hwlevel="legacy"> 3404 <description>The orientation for a JPEG image.</description> 3405 <units>Degrees in multiples of 90</units> 3406 <range>0, 90, 180, 270</range> 3407 <details> 3408 The clockwise rotation angle in degrees, relative to the orientation 3409 to the camera, that the JPEG picture needs to be rotated by, to be viewed 3410 upright. 3411 3412 Camera devices may either encode this value into the JPEG EXIF header, or 3413 rotate the image data to match this orientation. When the image data is rotated, 3414 the thumbnail data will also be rotated. 3415 3416 Note that this orientation is relative to the orientation of the camera sensor, given 3417 by android.sensor.orientation. 3418 3419 To translate from the device orientation given by the Android sensor APIs for camera 3420 sensors which are not EXTERNAL, the following sample code may be used: 3421 3422 private int getJpegOrientation(CameraCharacteristics c, int deviceOrientation) { 3423 if (deviceOrientation == android.view.OrientationEventListener.ORIENTATION_UNKNOWN) return 0; 3424 int sensorOrientation = c.get(CameraCharacteristics.SENSOR_ORIENTATION); 3425 3426 // Round device orientation to a multiple of 90 3427 deviceOrientation = (deviceOrientation + 45) / 90 * 90; 3428 3429 // Reverse device orientation for front-facing cameras 3430 boolean facingFront = c.get(CameraCharacteristics.LENS_FACING) == CameraCharacteristics.LENS_FACING_FRONT; 3431 if (facingFront) deviceOrientation = -deviceOrientation; 3432 3433 // Calculate desired JPEG orientation relative to camera orientation to make 3434 // the image upright relative to the device orientation 3435 int jpegOrientation = (sensorOrientation + deviceOrientation + 360) % 360; 3436 3437 return jpegOrientation; 3438 } 3439 3440 For EXTERNAL cameras the sensor orientation will always be set to 0 and the facing will 3441 also be set to EXTERNAL. The above code is not relevant in such case. 3442 3443 This tag is also used to describe the orientation of the HEIC image capture, in which 3444 case the rotation is reflected by 3445 {@link android.media.ExifInterface#TAG_ORIENTATION EXIF orientation flag}, and not by 3446 rotating the image data itself. 3447 </details> 3448 <tag id="BC" /> 3449 </entry> 3450 <entry name="quality" type="byte" visibility="public" hwlevel="legacy"> 3451 <description>Compression quality of the final JPEG 3452 image.</description> 3453 <range>1-100; larger is higher quality</range> 3454 <details>85-95 is typical usage range. This tag is also used to describe the quality 3455 of the HEIC image capture.</details> 3456 <tag id="BC" /> 3457 </entry> 3458 <entry name="thumbnailQuality" type="byte" visibility="public" hwlevel="legacy"> 3459 <description>Compression quality of JPEG 3460 thumbnail.</description> 3461 <range>1-100; larger is higher quality</range> 3462 <details>This tag is also used to describe the quality of the HEIC image capture.</details> 3463 <tag id="BC" /> 3464 </entry> 3465 <entry name="thumbnailSize" type="int32" visibility="public" 3466 container="array" typedef="size" hwlevel="legacy"> 3467 <array> 3468 <size>2</size> 3469 </array> 3470 <description>Resolution of embedded JPEG thumbnail.</description> 3471 <range>android.jpeg.availableThumbnailSizes</range> 3472 <details>When set to (0, 0) value, the JPEG EXIF will not contain thumbnail, 3473 but the captured JPEG will still be a valid image. 3474 3475 For best results, when issuing a request for a JPEG image, the thumbnail size selected 3476 should have the same aspect ratio as the main JPEG output. 3477 3478 If the thumbnail image aspect ratio differs from the JPEG primary image aspect 3479 ratio, the camera device creates the thumbnail by cropping it from the primary image. 3480 For example, if the primary image has 4:3 aspect ratio, the thumbnail image has 3481 16:9 aspect ratio, the primary image will be cropped vertically (letterbox) to 3482 generate the thumbnail image. The thumbnail image will always have a smaller Field 3483 Of View (FOV) than the primary image when aspect ratios differ. 3484 3485 When an android.jpeg.orientation of non-zero degree is requested, 3486 the camera device will handle thumbnail rotation in one of the following ways: 3487 3488 * Set the {@link android.media.ExifInterface#TAG_ORIENTATION EXIF orientation flag} 3489 and keep jpeg and thumbnail image data unrotated. 3490 * Rotate the jpeg and thumbnail image data and not set 3491 {@link android.media.ExifInterface#TAG_ORIENTATION EXIF orientation flag}. In this 3492 case, LIMITED or FULL hardware level devices will report rotated thumnail size in 3493 capture result, so the width and height will be interchanged if 90 or 270 degree 3494 orientation is requested. LEGACY device will always report unrotated thumbnail 3495 size. 3496 3497 The tag is also used as thumbnail size for HEIC image format capture, in which case the 3498 the thumbnail rotation is reflected by 3499 {@link android.media.ExifInterface#TAG_ORIENTATION EXIF orientation flag}, and not by 3500 rotating the thumbnail data itself. 3501 </details> 3502 <hal_details> 3503 The HAL must not squeeze or stretch the downscaled primary image to generate thumbnail. 3504 The cropping must be done on the primary jpeg image rather than the sensor pre-correction 3505 active array. The stream cropping rule specified by "S5. Cropping" in camera3.h doesn't 3506 apply to the thumbnail image cropping. 3507 </hal_details> 3508 <tag id="BC" /> 3509 </entry> 3510 </controls> 3511 <static> 3512 <entry name="availableThumbnailSizes" type="int32" visibility="public" 3513 container="array" typedef="size" hwlevel="legacy"> 3514 <array> 3515 <size>2</size> 3516 <size>n</size> 3517 </array> 3518 <description>List of JPEG thumbnail sizes for android.jpeg.thumbnailSize supported by this 3519 camera device.</description> 3520 <details> 3521 This list will include at least one non-zero resolution, plus `(0,0)` for indicating no 3522 thumbnail should be generated. 3523 3524 Below condiditions will be satisfied for this size list: 3525 3526 * The sizes will be sorted by increasing pixel area (width x height). 3527 If several resolutions have the same area, they will be sorted by increasing width. 3528 * The aspect ratio of the largest thumbnail size will be same as the 3529 aspect ratio of largest JPEG output size in android.scaler.availableStreamConfigurations. 3530 The largest size is defined as the size that has the largest pixel area 3531 in a given size list. 3532 * Each output JPEG size in android.scaler.availableStreamConfigurations will have at least 3533 one corresponding size that has the same aspect ratio in availableThumbnailSizes, 3534 and vice versa. 3535 * All non-`(0, 0)` sizes will have non-zero widths and heights. 3536 3537 This list is also used as supported thumbnail sizes for HEIC image format capture. 3538 </details> 3539 <tag id="BC" /> 3540 </entry> 3541 <entry name="maxSize" type="int32" visibility="system"> 3542 <description>Maximum size in bytes for the compressed 3543 JPEG buffer</description> 3544 <range>Must be large enough to fit any JPEG produced by 3545 the camera</range> 3546 <details>This is used for sizing the gralloc buffers for 3547 JPEG</details> 3548 </entry> 3549 </static> 3550 <dynamic> 3551 <clone entry="android.jpeg.gpsLocation" kind="controls"> 3552 </clone> 3553 <clone entry="android.jpeg.gpsCoordinates" kind="controls"> 3554 </clone> 3555 <clone entry="android.jpeg.gpsProcessingMethod" 3556 kind="controls"></clone> 3557 <clone entry="android.jpeg.gpsTimestamp" kind="controls"> 3558 </clone> 3559 <clone entry="android.jpeg.orientation" kind="controls"> 3560 </clone> 3561 <clone entry="android.jpeg.quality" kind="controls"> 3562 </clone> 3563 <entry name="size" type="int32"> 3564 <description>The size of the compressed JPEG image, in 3565 bytes</description> 3566 <range>&gt;= 0</range> 3567 <details>If no JPEG output is produced for the request, 3568 this must be 0. 3569 3570 Otherwise, this describes the real size of the compressed 3571 JPEG image placed in the output stream. More specifically, 3572 if android.jpeg.maxSize = 1000000, and a specific capture 3573 has android.jpeg.size = 500000, then the output buffer from 3574 the JPEG stream will be 1000000 bytes, of which the first 3575 500000 make up the real data.</details> 3576 <tag id="FUTURE" /> 3577 </entry> 3578 <clone entry="android.jpeg.thumbnailQuality" 3579 kind="controls"></clone> 3580 <clone entry="android.jpeg.thumbnailSize" kind="controls"> 3581 </clone> 3582 </dynamic> 3583 </section> 3584 <section name="lens"> 3585 <controls> 3586 <entry name="aperture" type="float" visibility="public" hwlevel="full"> 3587 <description>The desired lens aperture size, as a ratio of lens focal length to the 3588 effective aperture diameter.</description> 3589 <units>The f-number (f/N)</units> 3590 <range>android.lens.info.availableApertures</range> 3591 <details>Setting this value is only supported on the camera devices that have a variable 3592 aperture lens. 3593 3594 When this is supported and android.control.aeMode is OFF, 3595 this can be set along with android.sensor.exposureTime, 3596 android.sensor.sensitivity, and android.sensor.frameDuration 3597 to achieve manual exposure control. 3598 3599 The requested aperture value may take several frames to reach the 3600 requested value; the camera device will report the current (intermediate) 3601 aperture size in capture result metadata while the aperture is changing. 3602 While the aperture is still changing, android.lens.state will be set to MOVING. 3603 3604 When this is supported and android.control.aeMode is one of 3605 the ON modes, this will be overridden by the camera device 3606 auto-exposure algorithm, the overridden values are then provided 3607 back to the user in the corresponding result.</details> 3608 <tag id="V1" /> 3609 </entry> 3610 <entry name="filterDensity" type="float" visibility="public" hwlevel="full"> 3611 <description> 3612 The desired setting for the lens neutral density filter(s). 3613 </description> 3614 <units>Exposure Value (EV)</units> 3615 <range>android.lens.info.availableFilterDensities</range> 3616 <details> 3617 This control will not be supported on most camera devices. 3618 3619 Lens filters are typically used to lower the amount of light the 3620 sensor is exposed to (measured in steps of EV). As used here, an EV 3621 step is the standard logarithmic representation, which are 3622 non-negative, and inversely proportional to the amount of light 3623 hitting the sensor. For example, setting this to 0 would result 3624 in no reduction of the incoming light, and setting this to 2 would 3625 mean that the filter is set to reduce incoming light by two stops 3626 (allowing 1/4 of the prior amount of light to the sensor). 3627 3628 It may take several frames before the lens filter density changes 3629 to the requested value. While the filter density is still changing, 3630 android.lens.state will be set to MOVING. 3631 </details> 3632 <tag id="V1" /> 3633 </entry> 3634 <entry name="focalLength" type="float" visibility="public" hwlevel="legacy"> 3635 <description> 3636 The desired lens focal length; used for optical zoom. 3637 </description> 3638 <units>Millimeters</units> 3639 <range>android.lens.info.availableFocalLengths</range> 3640 <details> 3641 This setting controls the physical focal length of the camera 3642 device's lens. Changing the focal length changes the field of 3643 view of the camera device, and is usually used for optical zoom. 3644 3645 Like android.lens.focusDistance and android.lens.aperture, this 3646 setting won't be applied instantaneously, and it may take several 3647 frames before the lens can change to the requested focal length. 3648 While the focal length is still changing, android.lens.state will 3649 be set to MOVING. 3650 3651 Optical zoom will not be supported on most devices. 3652 </details> 3653 <hal_details> 3654 For a logical camera device supporting both optical and digital zoom, if focalLength and 3655 cropRegion change in the same request, the camera device must make sure that the new 3656 focalLength and cropRegion take effect in the same frame. This is to make sure that there 3657 is no visible field-of-view jump during zoom. For example, if cropRegion is applied 3658 immediately, but focalLength takes more than 1 frame to take effect, the camera device 3659 will delay the cropRegion so that it's synchronized with focalLength. 3660 </hal_details> 3661 <tag id="V1" /> 3662 </entry> 3663 <entry name="focusDistance" type="float" visibility="public" hwlevel="full"> 3664 <description>Desired distance to plane of sharpest focus, 3665 measured from frontmost surface of the lens.</description> 3666 <units>See android.lens.info.focusDistanceCalibration for details</units> 3667 <range>&gt;= 0</range> 3668 <details> 3669 This control can be used for setting manual focus, on devices that support 3670 the MANUAL_SENSOR capability and have a variable-focus lens (see 3671 android.lens.info.minimumFocusDistance). 3672 3673 A value of `0.0f` means infinity focus. The value set will be clamped to 3674 `[0.0f, android.lens.info.minimumFocusDistance]`. 3675 3676 Like android.lens.focalLength, this setting won't be applied 3677 instantaneously, and it may take several frames before the lens 3678 can move to the requested focus distance. While the lens is still moving, 3679 android.lens.state will be set to MOVING. 3680 3681 LEGACY devices support at most setting this to `0.0f` 3682 for infinity focus. 3683 </details> 3684 <tag id="BC" /> 3685 <tag id="V1" /> 3686 </entry> 3687 <entry name="opticalStabilizationMode" type="byte" visibility="public" 3688 enum="true" hwlevel="limited"> 3689 <enum> 3690 <value>OFF 3691 <notes>Optical stabilization is unavailable.</notes> 3692 </value> 3693 <value optional="true">ON 3694 <notes>Optical stabilization is enabled.</notes> 3695 </value> 3696 </enum> 3697 <description> 3698 Sets whether the camera device uses optical image stabilization (OIS) 3699 when capturing images. 3700 </description> 3701 <range>android.lens.info.availableOpticalStabilization</range> 3702 <details> 3703 OIS is used to compensate for motion blur due to small 3704 movements of the camera during capture. Unlike digital image 3705 stabilization (android.control.videoStabilizationMode), OIS 3706 makes use of mechanical elements to stabilize the camera 3707 sensor, and thus allows for longer exposure times before 3708 camera shake becomes apparent. 3709 3710 Switching between different optical stabilization modes may take several 3711 frames to initialize, the camera device will report the current mode in 3712 capture result metadata. For example, When "ON" mode is requested, the 3713 optical stabilization modes in the first several capture results may still 3714 be "OFF", and it will become "ON" when the initialization is done. 3715 3716 If a camera device supports both OIS and digital image stabilization 3717 (android.control.videoStabilizationMode), turning both modes on may produce undesirable 3718 interaction, so it is recommended not to enable both at the same time. 3719 3720 Not all devices will support OIS; see 3721 android.lens.info.availableOpticalStabilization for 3722 available controls. 3723 </details> 3724 <tag id="V1" /> 3725 </entry> 3726 </controls> 3727 <static> 3728 <namespace name="info"> 3729 <entry name="availableApertures" type="float" visibility="public" 3730 container="array" hwlevel="full"> 3731 <array> 3732 <size>n</size> 3733 </array> 3734 <description>List of aperture size values for android.lens.aperture that are 3735 supported by this camera device.</description> 3736 <units>The aperture f-number</units> 3737 <details>If the camera device doesn't support a variable lens aperture, 3738 this list will contain only one value, which is the fixed aperture size. 3739 3740 If the camera device supports a variable aperture, the aperture values 3741 in this list will be sorted in ascending order.</details> 3742 <tag id="V1" /> 3743 </entry> 3744 <entry name="availableFilterDensities" type="float" visibility="public" 3745 container="array" hwlevel="full"> 3746 <array> 3747 <size>n</size> 3748 </array> 3749 <description> 3750 List of neutral density filter values for 3751 android.lens.filterDensity that are supported by this camera device. 3752 </description> 3753 <units>Exposure value (EV)</units> 3754 <range> 3755 Values are &gt;= 0 3756 </range> 3757 <details> 3758 If a neutral density filter is not supported by this camera device, 3759 this list will contain only 0. Otherwise, this list will include every 3760 filter density supported by the camera device, in ascending order. 3761 </details> 3762 <tag id="V1" /> 3763 </entry> 3764 <entry name="availableFocalLengths" type="float" visibility="public" 3765 type_notes="The list of available focal lengths" 3766 container="array" hwlevel="legacy"> 3767 <array> 3768 <size>n</size> 3769 </array> 3770 <description> 3771 List of focal lengths for android.lens.focalLength that are supported by this camera 3772 device. 3773 </description> 3774 <units>Millimeters</units> 3775 <range> 3776 Values are &gt; 0 3777 </range> 3778 <details> 3779 If optical zoom is not supported, this list will only contain 3780 a single value corresponding to the fixed focal length of the 3781 device. Otherwise, this list will include every focal length supported 3782 by the camera device, in ascending order. 3783 </details> 3784 <tag id="BC" /> 3785 <tag id="V1" /> 3786 </entry> 3787 <entry name="availableOpticalStabilization" type="byte" 3788 visibility="public" type_notes="list of enums" container="array" 3789 typedef="enumList" hwlevel="limited"> 3790 <array> 3791 <size>n</size> 3792 </array> 3793 <description> 3794 List of optical image stabilization (OIS) modes for 3795 android.lens.opticalStabilizationMode that are supported by this camera device. 3796 </description> 3797 <range>Any value listed in android.lens.opticalStabilizationMode</range> 3798 <details> 3799 If OIS is not supported by a given camera device, this list will 3800 contain only OFF. 3801 </details> 3802 <tag id="V1" /> 3803 </entry> 3804 <entry name="hyperfocalDistance" type="float" visibility="public" optional="true" 3805 hwlevel="limited" permission_needed="true"> 3806 <description>Hyperfocal distance for this lens.</description> 3807 <units>See android.lens.info.focusDistanceCalibration for details</units> 3808 <range>If lens is fixed focus, &gt;= 0. If lens has focuser unit, the value is 3809 within `(0.0f, android.lens.info.minimumFocusDistance]`</range> 3810 <details> 3811 If the lens is not fixed focus, the camera device will report this 3812 field when android.lens.info.focusDistanceCalibration is APPROXIMATE or CALIBRATED. 3813 </details> 3814 </entry> 3815 <entry name="minimumFocusDistance" type="float" visibility="public" optional="true" 3816 hwlevel="limited" permission_needed="true"> 3817 <description>Shortest distance from frontmost surface 3818 of the lens that can be brought into sharp focus.</description> 3819 <units>See android.lens.info.focusDistanceCalibration for details</units> 3820 <range>&gt;= 0</range> 3821 <details>If the lens is fixed-focus, this will be 3822 0.</details> 3823 <hal_details>Mandatory for FULL devices; LIMITED devices 3824 must always set this value to 0 for fixed-focus; and may omit 3825 the minimum focus distance otherwise. 3826 3827 This field is also mandatory for all devices advertising 3828 the MANUAL_SENSOR capability.</hal_details> 3829 <tag id="V1" /> 3830 </entry> 3831 <entry name="shadingMapSize" type="int32" visibility="ndk_public" 3832 type_notes="width and height (N, M) of lens shading map provided by the camera device." 3833 container="array" typedef="size" hwlevel="full"> 3834 <array> 3835 <size>2</size> 3836 </array> 3837 <description>Dimensions of lens shading map.</description> 3838 <range>Both values &gt;= 1</range> 3839 <details> 3840 The map should be on the order of 30-40 rows and columns, and 3841 must be smaller than 64x64. 3842 </details> 3843 <tag id="V1" /> 3844 </entry> 3845 <entry name="focusDistanceCalibration" type="byte" visibility="public" 3846 enum="true" hwlevel="limited"> 3847 <enum> 3848 <value>UNCALIBRATED 3849 <notes> 3850 The lens focus distance is not accurate, and the units used for 3851 android.lens.focusDistance do not correspond to any physical units. 3852 3853 Setting the lens to the same focus distance on separate occasions may 3854 result in a different real focus distance, depending on factors such 3855 as the orientation of the device, the age of the focusing mechanism, 3856 and the device temperature. The focus distance value will still be 3857 in the range of `[0, android.lens.info.minimumFocusDistance]`, where 0 3858 represents the farthest focus. 3859 </notes> 3860 </value> 3861 <value>APPROXIMATE 3862 <notes> 3863 The lens focus distance is measured in diopters. 3864 3865 However, setting the lens to the same focus distance 3866 on separate occasions may result in a different real 3867 focus distance, depending on factors such as the 3868 orientation of the device, the age of the focusing 3869 mechanism, and the device temperature. 3870 </notes> 3871 </value> 3872 <value>CALIBRATED 3873 <notes> 3874 The lens focus distance is measured in diopters, and 3875 is calibrated. 3876 3877 The lens mechanism is calibrated so that setting the 3878 same focus distance is repeatable on multiple 3879 occasions with good accuracy, and the focus distance 3880 corresponds to the real physical distance to the plane 3881 of best focus. 3882 </notes> 3883 </value> 3884 </enum> 3885 <description>The lens focus distance calibration quality.</description> 3886 <details> 3887 The lens focus distance calibration quality determines the reliability of 3888 focus related metadata entries, i.e. android.lens.focusDistance, 3889 android.lens.focusRange, android.lens.info.hyperfocalDistance, and 3890 android.lens.info.minimumFocusDistance. 3891 3892 APPROXIMATE and CALIBRATED devices report the focus metadata in 3893 units of diopters (1/meter), so `0.0f` represents focusing at infinity, 3894 and increasing positive numbers represent focusing closer and closer 3895 to the camera device. The focus distance control also uses diopters 3896 on these devices. 3897 3898 UNCALIBRATED devices do not use units that are directly comparable 3899 to any real physical measurement, but `0.0f` still represents farthest 3900 focus, and android.lens.info.minimumFocusDistance represents the 3901 nearest focus the device can achieve. 3902 </details> 3903 <hal_details> 3904 For devices advertise APPROXIMATE quality or higher, diopters 0 (infinity 3905 focus) must work. When autofocus is disabled (android.control.afMode == OFF) 3906 and the lens focus distance is set to 0 diopters 3907 (android.lens.focusDistance == 0), the lens will move to focus at infinity 3908 and is stably focused at infinity even if the device tilts. It may take the 3909 lens some time to move; during the move the lens state should be MOVING and 3910 the output diopter value should be changing toward 0. 3911 </hal_details> 3912 <tag id="V1" /> 3913 </entry> 3914 </namespace> 3915 <entry name="facing" type="byte" visibility="public" enum="true" hwlevel="legacy"> 3916 <enum> 3917 <value>FRONT 3918 <notes> 3919 The camera device faces the same direction as the device's screen. 3920 </notes></value> 3921 <value>BACK 3922 <notes> 3923 The camera device faces the opposite direction as the device's screen. 3924 </notes></value> 3925 <value>EXTERNAL 3926 <notes> 3927 The camera device is an external camera, and has no fixed facing relative to the 3928 device's screen. 3929 </notes></value> 3930 </enum> 3931 <description>Direction the camera faces relative to 3932 device screen.</description> 3933 </entry> 3934 <entry name="poseRotation" type="float" visibility="public" 3935 container="array" permission_needed="true"> 3936 <array> 3937 <size>4</size> 3938 </array> 3939 <description> 3940 The orientation of the camera relative to the sensor 3941 coordinate system. 3942 </description> 3943 <units> 3944 Quaternion coefficients 3945 </units> 3946 <details> 3947 The four coefficients that describe the quaternion 3948 rotation from the Android sensor coordinate system to a 3949 camera-aligned coordinate system where the X-axis is 3950 aligned with the long side of the image sensor, the Y-axis 3951 is aligned with the short side of the image sensor, and 3952 the Z-axis is aligned with the optical axis of the sensor. 3953 3954 To convert from the quaternion coefficients `(x,y,z,w)` 3955 to the axis of rotation `(a_x, a_y, a_z)` and rotation 3956 amount `theta`, the following formulas can be used: 3957 3958 theta = 2 * acos(w) 3959 a_x = x / sin(theta/2) 3960 a_y = y / sin(theta/2) 3961 a_z = z / sin(theta/2) 3962 3963 To create a 3x3 rotation matrix that applies the rotation 3964 defined by this quaternion, the following matrix can be 3965 used: 3966 3967 R = [ 1 - 2y^2 - 2z^2, 2xy - 2zw, 2xz + 2yw, 3968 2xy + 2zw, 1 - 2x^2 - 2z^2, 2yz - 2xw, 3969 2xz - 2yw, 2yz + 2xw, 1 - 2x^2 - 2y^2 ] 3970 3971 This matrix can then be used to apply the rotation to a 3972 column vector point with 3973 3974 `p' = Rp` 3975 3976 where `p` is in the device sensor coordinate system, and 3977 `p'` is in the camera-oriented coordinate system. 3978 </details> 3979 <tag id="DEPTH" /> 3980 </entry> 3981 <entry name="poseTranslation" type="float" visibility="public" 3982 container="array" permission_needed="true"> 3983 <array> 3984 <size>3</size> 3985 </array> 3986 <description>Position of the camera optical center.</description> 3987 <units>Meters</units> 3988 <details> 3989 The position of the camera device's lens optical center, 3990 as a three-dimensional vector `(x,y,z)`. 3991 3992 Prior to Android P, or when android.lens.poseReference is PRIMARY_CAMERA, this position 3993 is relative to the optical center of the largest camera device facing in the same 3994 direction as this camera, in the {@link android.hardware.SensorEvent Android sensor 3995 coordinate axes}. Note that only the axis definitions are shared with the sensor 3996 coordinate system, but not the origin. 3997 3998 If this device is the largest or only camera device with a given facing, then this 3999 position will be `(0, 0, 0)`; a camera device with a lens optical center located 3 cm 4000 from the main sensor along the +X axis (to the right from the user's perspective) will 4001 report `(0.03, 0, 0)`. Note that this means that, for many computer vision 4002 applications, the position needs to be negated to convert it to a translation from the 4003 camera to the origin. 4004 4005 To transform a pixel coordinates between two cameras facing the same direction, first 4006 the source camera android.lens.distortion must be corrected for. Then the source 4007 camera android.lens.intrinsicCalibration needs to be applied, followed by the 4008 android.lens.poseRotation of the source camera, the translation of the source camera 4009 relative to the destination camera, the android.lens.poseRotation of the destination 4010 camera, and finally the inverse of android.lens.intrinsicCalibration of the destination 4011 camera. This obtains a radial-distortion-free coordinate in the destination camera pixel 4012 coordinates. 4013 4014 To compare this against a real image from the destination camera, the destination camera 4015 image then needs to be corrected for radial distortion before comparison or sampling. 4016 4017 When android.lens.poseReference is GYROSCOPE, then this position is relative to 4018 the center of the primary gyroscope on the device. The axis definitions are the same as 4019 with PRIMARY_CAMERA. 4020 </details> 4021 <tag id="DEPTH" /> 4022 </entry> 4023 </static> 4024 <dynamic> 4025 <clone entry="android.lens.aperture" kind="controls"> 4026 <tag id="V1" /> 4027 </clone> 4028 <clone entry="android.lens.filterDensity" kind="controls"> 4029 <tag id="V1" /> 4030 </clone> 4031 <clone entry="android.lens.focalLength" kind="controls"> 4032 <tag id="BC" /> 4033 </clone> 4034 <clone entry="android.lens.focusDistance" kind="controls"> 4035 <details>Should be zero for fixed-focus cameras</details> 4036 <tag id="BC" /> 4037 </clone> 4038 <entry name="focusRange" type="float" visibility="public" 4039 type_notes="Range of scene distances that are in focus" 4040 container="array" typedef="pairFloatFloat" hwlevel="limited"> 4041 <array> 4042 <size>2</size> 4043 </array> 4044 <description>The range of scene distances that are in 4045 sharp focus (depth of field).</description> 4046 <units>A pair of focus distances in diopters: (near, 4047 far); see android.lens.info.focusDistanceCalibration for details.</units> 4048 <range>&gt;=0</range> 4049 <details>If variable focus not supported, can still report 4050 fixed depth of field range</details> 4051 <tag id="BC" /> 4052 </entry> 4053 <clone entry="android.lens.opticalStabilizationMode" 4054 kind="controls"> 4055 <tag id="V1" /> 4056 </clone> 4057 <entry name="state" type="byte" visibility="public" enum="true" hwlevel="limited"> 4058 <enum> 4059 <value>STATIONARY 4060 <notes> 4061 The lens parameters (android.lens.focalLength, android.lens.focusDistance, 4062 android.lens.filterDensity and android.lens.aperture) are not changing. 4063 </notes> 4064 </value> 4065 <value>MOVING 4066 <notes> 4067 One or several of the lens parameters 4068 (android.lens.focalLength, android.lens.focusDistance, 4069 android.lens.filterDensity or android.lens.aperture) is 4070 currently changing. 4071 </notes> 4072 </value> 4073 </enum> 4074 <description>Current lens status.</description> 4075 <details> 4076 For lens parameters android.lens.focalLength, android.lens.focusDistance, 4077 android.lens.filterDensity and android.lens.aperture, when changes are requested, 4078 they may take several frames to reach the requested values. This state indicates 4079 the current status of the lens parameters. 4080 4081 When the state is STATIONARY, the lens parameters are not changing. This could be 4082 either because the parameters are all fixed, or because the lens has had enough 4083 time to reach the most recently-requested values. 4084 If all these lens parameters are not changable for a camera device, as listed below: 4085 4086 * Fixed focus (`android.lens.info.minimumFocusDistance == 0`), which means 4087 android.lens.focusDistance parameter will always be 0. 4088 * Fixed focal length (android.lens.info.availableFocalLengths contains single value), 4089 which means the optical zoom is not supported. 4090 * No ND filter (android.lens.info.availableFilterDensities contains only 0). 4091 * Fixed aperture (android.lens.info.availableApertures contains single value). 4092 4093 Then this state will always be STATIONARY. 4094 4095 When the state is MOVING, it indicates that at least one of the lens parameters 4096 is changing. 4097 </details> 4098 <tag id="V1" /> 4099 </entry> 4100 <clone entry="android.lens.poseRotation" kind="static"> 4101 </clone> 4102 <clone entry="android.lens.poseTranslation" kind="static"> 4103 </clone> 4104 </dynamic> 4105 <static> 4106 <entry name="intrinsicCalibration" type="float" visibility="public" 4107 container="array" permission_needed="true"> 4108 <array> 4109 <size>5</size> 4110 </array> 4111 <description> 4112 The parameters for this camera device's intrinsic 4113 calibration. 4114 </description> 4115 <units> 4116 Pixels in the 4117 android.sensor.info.preCorrectionActiveArraySize 4118 coordinate system. 4119 </units> 4120 <details> 4121 The five calibration parameters that describe the 4122 transform from camera-centric 3D coordinates to sensor 4123 pixel coordinates: 4124 4125 [f_x, f_y, c_x, c_y, s] 4126 4127 Where `f_x` and `f_y` are the horizontal and vertical 4128 focal lengths, `[c_x, c_y]` is the position of the optical 4129 axis, and `s` is a skew parameter for the sensor plane not 4130 being aligned with the lens plane. 4131 4132 These are typically used within a transformation matrix K: 4133 4134 K = [ f_x, s, c_x, 4135 0, f_y, c_y, 4136 0 0, 1 ] 4137 4138 which can then be combined with the camera pose rotation 4139 `R` and translation `t` (android.lens.poseRotation and 4140 android.lens.poseTranslation, respectively) to calculate the 4141 complete transform from world coordinates to pixel 4142 coordinates: 4143 4144 P = [ K 0 * [ R -Rt 4145 0 1 ] 0 1 ] 4146 4147 (Note the negation of poseTranslation when mapping from camera 4148 to world coordinates, and multiplication by the rotation). 4149 4150 With `p_w` being a point in the world coordinate system 4151 and `p_s` being a point in the camera active pixel array 4152 coordinate system, and with the mapping including the 4153 homogeneous division by z: 4154 4155 p_h = (x_h, y_h, z_h) = P p_w 4156 p_s = p_h / z_h 4157 4158 so `[x_s, y_s]` is the pixel coordinates of the world 4159 point, `z_s = 1`, and `w_s` is a measurement of disparity 4160 (depth) in pixel coordinates. 4161 4162 Note that the coordinate system for this transform is the 4163 android.sensor.info.preCorrectionActiveArraySize system, 4164 where `(0,0)` is the top-left of the 4165 preCorrectionActiveArraySize rectangle. Once the pose and 4166 intrinsic calibration transforms have been applied to a 4167 world point, then the android.lens.distortion 4168 transform needs to be applied, and the result adjusted to 4169 be in the android.sensor.info.activeArraySize coordinate 4170 system (where `(0, 0)` is the top-left of the 4171 activeArraySize rectangle), to determine the final pixel 4172 coordinate of the world point for processed (non-RAW) 4173 output buffers. 4174 4175 For camera devices, the center of pixel `(x,y)` is located at 4176 coordinate `(x + 0.5, y + 0.5)`. So on a device with a 4177 precorrection active array of size `(10,10)`, the valid pixel 4178 indices go from `(0,0)-(9,9)`, and an perfectly-built camera would 4179 have an optical center at the exact center of the pixel grid, at 4180 coordinates `(5.0, 5.0)`, which is the top-left corner of pixel 4181 `(5,5)`. 4182 </details> 4183 <tag id="DEPTH" /> 4184 </entry> 4185 <entry name="radialDistortion" type="float" visibility="public" 4186 deprecated="true" container="array" permission_needed="true"> 4187 <array> 4188 <size>6</size> 4189 </array> 4190 <description> 4191 The correction coefficients to correct for this camera device's 4192 radial and tangential lens distortion. 4193 </description> 4194 <deprecation_description> 4195 This field was inconsistently defined in terms of its 4196 normalization. Use android.lens.distortion instead. 4197 </deprecation_description> 4198 <units> 4199 Unitless coefficients. 4200 </units> 4201 <details> 4202 Four radial distortion coefficients `[kappa_0, kappa_1, kappa_2, 4203 kappa_3]` and two tangential distortion coefficients 4204 `[kappa_4, kappa_5]` that can be used to correct the 4205 lens's geometric distortion with the mapping equations: 4206 4207 x_c = x_i * ( kappa_0 + kappa_1 * r^2 + kappa_2 * r^4 + kappa_3 * r^6 ) + 4208 kappa_4 * (2 * x_i * y_i) + kappa_5 * ( r^2 + 2 * x_i^2 ) 4209 y_c = y_i * ( kappa_0 + kappa_1 * r^2 + kappa_2 * r^4 + kappa_3 * r^6 ) + 4210 kappa_5 * (2 * x_i * y_i) + kappa_4 * ( r^2 + 2 * y_i^2 ) 4211 4212 Here, `[x_c, y_c]` are the coordinates to sample in the 4213 input image that correspond to the pixel values in the 4214 corrected image at the coordinate `[x_i, y_i]`: 4215 4216 correctedImage(x_i, y_i) = sample_at(x_c, y_c, inputImage) 4217 4218 The pixel coordinates are defined in a normalized 4219 coordinate system related to the 4220 android.lens.intrinsicCalibration calibration fields. 4221 Both `[x_i, y_i]` and `[x_c, y_c]` have `(0,0)` at the 4222 lens optical center `[c_x, c_y]`. The maximum magnitudes 4223 of both x and y coordinates are normalized to be 1 at the 4224 edge further from the optical center, so the range 4225 for both dimensions is `-1 <= x <= 1`. 4226 4227 Finally, `r` represents the radial distance from the 4228 optical center, `r^2 = x_i^2 + y_i^2`, and its magnitude 4229 is therefore no larger than `|r| <= sqrt(2)`. 4230 4231 The distortion model used is the Brown-Conrady model. 4232 </details> 4233 <tag id="DEPTH" /> 4234 </entry> 4235 </static> 4236 <dynamic> 4237 <clone entry="android.lens.intrinsicCalibration" kind="static"> 4238 </clone> 4239 <clone entry="android.lens.radialDistortion" kind="static"> 4240 </clone> 4241 </dynamic> 4242 <static> 4243 <entry name="poseReference" type="byte" visibility="public" enum="true" 4244 permission_needed="true" hal_version="3.3" > 4245 <enum> 4246 <value>PRIMARY_CAMERA 4247 <notes>The value of android.lens.poseTranslation is relative to the optical center of 4248 the largest camera device facing the same direction as this camera. 4249 4250 This is the default value for API levels before Android P. 4251 </notes> 4252 </value> 4253 <value>GYROSCOPE 4254 <notes>The value of android.lens.poseTranslation is relative to the position of the 4255 primary gyroscope of this Android device. 4256 </notes> 4257 </value> 4258 </enum> 4259 <description> 4260 The origin for android.lens.poseTranslation. 4261 </description> 4262 <details> 4263 Different calibration methods and use cases can produce better or worse results 4264 depending on the selected coordinate origin. 4265 </details> 4266 </entry> 4267 <entry name="distortion" type="float" visibility="public" container="array" 4268 permission_needed="true" hal_version="3.3" > 4269 <array> 4270 <size>5</size> 4271 </array> 4272 <description> 4273 The correction coefficients to correct for this camera device's 4274 radial and tangential lens distortion. 4275 4276 Replaces the deprecated android.lens.radialDistortion field, which was 4277 inconsistently defined. 4278 </description> 4279 <units> 4280 Unitless coefficients. 4281 </units> 4282 <details> 4283 Three radial distortion coefficients `[kappa_1, kappa_2, 4284 kappa_3]` and two tangential distortion coefficients 4285 `[kappa_4, kappa_5]` that can be used to correct the 4286 lens's geometric distortion with the mapping equations: 4287 4288 x_c = x_i * ( 1 + kappa_1 * r^2 + kappa_2 * r^4 + kappa_3 * r^6 ) + 4289 kappa_4 * (2 * x_i * y_i) + kappa_5 * ( r^2 + 2 * x_i^2 ) 4290 y_c = y_i * ( 1 + kappa_1 * r^2 + kappa_2 * r^4 + kappa_3 * r^6 ) + 4291 kappa_5 * (2 * x_i * y_i) + kappa_4 * ( r^2 + 2 * y_i^2 ) 4292 4293 Here, `[x_c, y_c]` are the coordinates to sample in the 4294 input image that correspond to the pixel values in the 4295 corrected image at the coordinate `[x_i, y_i]`: 4296 4297 correctedImage(x_i, y_i) = sample_at(x_c, y_c, inputImage) 4298 4299 The pixel coordinates are defined in a coordinate system 4300 related to the android.lens.intrinsicCalibration 4301 calibration fields; see that entry for details of the mapping stages. 4302 Both `[x_i, y_i]` and `[x_c, y_c]` 4303 have `(0,0)` at the lens optical center `[c_x, c_y]`, and 4304 the range of the coordinates depends on the focal length 4305 terms of the intrinsic calibration. 4306 4307 Finally, `r` represents the radial distance from the 4308 optical center, `r^2 = x_i^2 + y_i^2`. 4309 4310 The distortion model used is the Brown-Conrady model. 4311 </details> 4312 <tag id="DEPTH" /> 4313 </entry> 4314 </static> 4315 <dynamic> 4316 <clone entry="android.lens.distortion" kind="static"> 4317 </clone> 4318 </dynamic> 4319 </section> 4320 <section name="noiseReduction"> 4321 <controls> 4322 <entry name="mode" type="byte" visibility="public" enum="true" hwlevel="full"> 4323 <enum> 4324 <value>OFF 4325 <notes>No noise reduction is applied.</notes></value> 4326 <value>FAST 4327 <notes>Noise reduction is applied without reducing frame rate relative to sensor 4328 output. It may be the same as OFF if noise reduction will reduce frame rate 4329 relative to sensor.</notes></value> 4330 <value>HIGH_QUALITY 4331 <notes>High-quality noise reduction is applied, at the cost of possibly reduced frame 4332 rate relative to sensor output.</notes></value> 4333 <value optional="true">MINIMAL 4334 <notes>MINIMAL noise reduction is applied without reducing frame rate relative to 4335 sensor output. </notes></value> 4336 <value optional="true">ZERO_SHUTTER_LAG 4337 4338 <notes>Noise reduction is applied at different levels for different output streams, 4339 based on resolution. Streams at maximum recording resolution (see {@link 4340 android.hardware.camera2.CameraDevice#createCaptureSession|ACameraDevice_createCaptureSession}) 4341 or below have noise reduction applied, while higher-resolution streams have MINIMAL (if 4342 supported) or no noise reduction applied (if MINIMAL is not supported.) The degree of 4343 noise reduction for low-resolution streams is tuned so that frame rate is not impacted, 4344 and the quality is equal to or better than FAST (since it is only applied to 4345 lower-resolution outputs, quality may improve from FAST). 4346 4347 This mode is intended to be used by applications operating in a zero-shutter-lag mode 4348 with YUV or PRIVATE reprocessing, where the application continuously captures 4349 high-resolution intermediate buffers into a circular buffer, from which a final image is 4350 produced via reprocessing when a user takes a picture. For such a use case, the 4351 high-resolution buffers must not have noise reduction applied to maximize efficiency of 4352 preview and to avoid over-applying noise filtering when reprocessing, while 4353 low-resolution buffers (used for recording or preview, generally) need noise reduction 4354 applied for reasonable preview quality. 4355 4356 This mode is guaranteed to be supported by devices that support either the 4357 YUV_REPROCESSING or PRIVATE_REPROCESSING capabilities 4358 (android.request.availableCapabilities lists either of those capabilities) and it will 4359 be the default mode for CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG template. 4360 </notes></value> 4361 </enum> 4362 <description>Mode of operation for the noise reduction algorithm.</description> 4363 <range>android.noiseReduction.availableNoiseReductionModes</range> 4364 <details>The noise reduction algorithm attempts to improve image quality by removing 4365 excessive noise added by the capture process, especially in dark conditions. 4366 4367 OFF means no noise reduction will be applied by the camera device, for both raw and 4368 YUV domain. 4369 4370 MINIMAL means that only sensor raw domain basic noise reduction is enabled ,to remove 4371 demosaicing or other processing artifacts. For YUV_REPROCESSING, MINIMAL is same as OFF. 4372 This mode is optional, may not be support by all devices. The application should check 4373 android.noiseReduction.availableNoiseReductionModes before using it. 4374 4375 FAST/HIGH_QUALITY both mean camera device determined noise filtering 4376 will be applied. HIGH_QUALITY mode indicates that the camera device 4377 will use the highest-quality noise filtering algorithms, 4378 even if it slows down capture rate. FAST means the camera device will not 4379 slow down capture rate when applying noise filtering. FAST may be the same as MINIMAL if 4380 MINIMAL is listed, or the same as OFF if any noise filtering will slow down capture rate. 4381 Every output stream will have a similar amount of enhancement applied. 4382 4383 ZERO_SHUTTER_LAG is meant to be used by applications that maintain a continuous circular 4384 buffer of high-resolution images during preview and reprocess image(s) from that buffer 4385 into a final capture when triggered by the user. In this mode, the camera device applies 4386 noise reduction to low-resolution streams (below maximum recording resolution) to maximize 4387 preview quality, but does not apply noise reduction to high-resolution streams, since 4388 those will be reprocessed later if necessary. 4389 4390 For YUV_REPROCESSING, these FAST/HIGH_QUALITY modes both mean that the camera device 4391 will apply FAST/HIGH_QUALITY YUV domain noise reduction, respectively. The camera device 4392 may adjust the noise reduction parameters for best image quality based on the 4393 android.reprocess.effectiveExposureFactor if it is set. 4394 </details> 4395 <hal_details> 4396 For YUV_REPROCESSING The HAL can use android.reprocess.effectiveExposureFactor to 4397 adjust the internal noise reduction parameters appropriately to get the best quality 4398 images. 4399 </hal_details> 4400 <tag id="V1" /> 4401 <tag id="REPROC" /> 4402 </entry> 4403 <entry name="strength" type="byte"> 4404 <description>Control the amount of noise reduction 4405 applied to the images</description> 4406 <units>1-10; 10 is max noise reduction</units> 4407 <range>1 - 10</range> 4408 <tag id="FUTURE" /> 4409 </entry> 4410 </controls> 4411 <static> 4412 <entry name="availableNoiseReductionModes" type="byte" visibility="public" 4413 type_notes="list of enums" container="array" typedef="enumList" hwlevel="limited"> 4414 <array> 4415 <size>n</size> 4416 </array> 4417 <description> 4418 List of noise reduction modes for android.noiseReduction.mode that are supported 4419 by this camera device. 4420 </description> 4421 <range>Any value listed in android.noiseReduction.mode</range> 4422 <details> 4423 Full-capability camera devices will always support OFF and FAST. 4424 4425 Camera devices that support YUV_REPROCESSING or PRIVATE_REPROCESSING will support 4426 ZERO_SHUTTER_LAG. 4427 4428 Legacy-capability camera devices will only support FAST mode. 4429 </details> 4430 <hal_details> 4431 HAL must support both FAST and HIGH_QUALITY if noise reduction control is available 4432 on the camera device, but the underlying implementation can be the same for both modes. 4433 That is, if the highest quality implementation on the camera device does not slow down 4434 capture rate, then FAST and HIGH_QUALITY will generate the same output. 4435 </hal_details> 4436 <tag id="V1" /> 4437 <tag id="REPROC" /> 4438 </entry> 4439 </static> 4440 <dynamic> 4441 <clone entry="android.noiseReduction.mode" kind="controls"> 4442 <tag id="V1" /> 4443 <tag id="REPROC" /> 4444 </clone> 4445 </dynamic> 4446 </section> 4447 <section name="quirks"> 4448 <static> 4449 <entry name="meteringCropRegion" type="byte" visibility="system" deprecated="true" optional="true"> 4450 <description>If set to 1, the camera service does not 4451 scale 'normalized' coordinates with respect to the crop 4452 region. This applies to metering input (a{e,f,wb}Region 4453 and output (face rectangles).</description> 4454 <deprecation_description> 4455 Not used in HALv3 or newer 4456 </deprecation_description> 4457 <details>Normalized coordinates refer to those in the 4458 (-1000,1000) range mentioned in the 4459 android.hardware.Camera API. 4460 4461 HAL implementations should instead always use and emit 4462 sensor array-relative coordinates for all region data. Does 4463 not need to be listed in static metadata. Support will be 4464 removed in future versions of camera service.</details> 4465 </entry> 4466 <entry name="triggerAfWithAuto" type="byte" visibility="system" deprecated="true" optional="true"> 4467 <description>If set to 1, then the camera service always 4468 switches to FOCUS_MODE_AUTO before issuing a AF 4469 trigger.</description> 4470 <deprecation_description> 4471 Not used in HALv3 or newer 4472 </deprecation_description> 4473 <details>HAL implementations should implement AF trigger 4474 modes for AUTO, MACRO, CONTINUOUS_FOCUS, and 4475 CONTINUOUS_PICTURE modes instead of using this flag. Does 4476 not need to be listed in static metadata. Support will be 4477 removed in future versions of camera service</details> 4478 </entry> 4479 <entry name="useZslFormat" type="byte" visibility="system" deprecated="true" optional="true"> 4480 <description>If set to 1, the camera service uses 4481 CAMERA2_PIXEL_FORMAT_ZSL instead of 4482 HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED for the zero 4483 shutter lag stream</description> 4484 <deprecation_description> 4485 Not used in HALv3 or newer 4486 </deprecation_description> 4487 <details>HAL implementations should use gralloc usage flags 4488 to determine that a stream will be used for 4489 zero-shutter-lag, instead of relying on an explicit 4490 format setting. Does not need to be listed in static 4491 metadata. Support will be removed in future versions of 4492 camera service.</details> 4493 </entry> 4494 <entry name="usePartialResult" type="byte" visibility="hidden" deprecated="true" optional="true"> 4495 <description> 4496 If set to 1, the HAL will always split result 4497 metadata for a single capture into multiple buffers, 4498 returned using multiple process_capture_result calls. 4499 </description> 4500 <deprecation_description> 4501 Not used in HALv3 or newer; replaced by better partials mechanism 4502 </deprecation_description> 4503 <details> 4504 Does not need to be listed in static 4505 metadata. Support for partial results will be reworked in 4506 future versions of camera service. This quirk will stop 4507 working at that point; DO NOT USE without careful 4508 consideration of future support. 4509 </details> 4510 <hal_details> 4511 Refer to `camera3_capture_result::partial_result` 4512 for information on how to implement partial results. 4513 </hal_details> 4514 </entry> 4515 </static> 4516 <dynamic> 4517 <entry name="partialResult" type="byte" visibility="hidden" deprecated="true" optional="true" enum="true" typedef="boolean"> 4518 <enum> 4519 <value>FINAL 4520 <notes>The last or only metadata result buffer 4521 for this capture.</notes> 4522 </value> 4523 <value>PARTIAL 4524 <notes>A partial buffer of result metadata for this 4525 capture. More result buffers for this capture will be sent 4526 by the camera device, the last of which will be marked 4527 FINAL.</notes> 4528 </value> 4529 </enum> 4530 <description> 4531 Whether a result given to the framework is the 4532 final one for the capture, or only a partial that contains a 4533 subset of the full set of dynamic metadata 4534 values.</description> 4535 <deprecation_description> 4536 Not used in HALv3 or newer 4537 </deprecation_description> 4538 <range>Optional. Default value is FINAL.</range> 4539 <details> 4540 The entries in the result metadata buffers for a 4541 single capture may not overlap, except for this entry. The 4542 FINAL buffers must retain FIFO ordering relative to the 4543 requests that generate them, so the FINAL buffer for frame 3 must 4544 always be sent to the framework after the FINAL buffer for frame 2, and 4545 before the FINAL buffer for frame 4. PARTIAL buffers may be returned 4546 in any order relative to other frames, but all PARTIAL buffers for a given 4547 capture must arrive before the FINAL buffer for that capture. This entry may 4548 only be used by the camera device if quirks.usePartialResult is set to 1. 4549 </details> 4550 <hal_details> 4551 Refer to `camera3_capture_result::partial_result` 4552 for information on how to implement partial results. 4553 </hal_details> 4554 </entry> 4555 </dynamic> 4556 </section> 4557 <section name="request"> 4558 <controls> 4559 <entry name="frameCount" type="int32" visibility="system" deprecated="true"> 4560 <description>A frame counter set by the framework. Must 4561 be maintained unchanged in output frame. This value monotonically 4562 increases with every new result (that is, each new result has a unique 4563 frameCount value). 4564 </description> 4565 <deprecation_description> 4566 Not used in HALv3 or newer 4567 </deprecation_description> 4568 <units>incrementing integer</units> 4569 <range>Any int.</range> 4570 </entry> 4571 <entry name="id" type="int32" visibility="hidden"> 4572 <description>An application-specified ID for the current 4573 request. Must be maintained unchanged in output 4574 frame</description> 4575 <units>arbitrary integer assigned by application</units> 4576 <range>Any int</range> 4577 <tag id="V1" /> 4578 </entry> 4579 <entry name="inputStreams" type="int32" visibility="system" deprecated="true" 4580 container="array"> 4581 <array> 4582 <size>n</size> 4583 </array> 4584 <description>List which camera reprocess stream is used 4585 for the source of reprocessing data.</description> 4586 <deprecation_description> 4587 Not used in HALv3 or newer 4588 </deprecation_description> 4589 <units>List of camera reprocess stream IDs</units> 4590 <range> 4591 Typically, only one entry allowed, must be a valid reprocess stream ID. 4592 </range> 4593 <details>Only meaningful when android.request.type == 4594 REPROCESS. Ignored otherwise</details> 4595 <tag id="HAL2" /> 4596 </entry> 4597 <entry name="metadataMode" type="byte" visibility="system" 4598 enum="true"> 4599 <enum> 4600 <value>NONE 4601 <notes>No metadata should be produced on output, except 4602 for application-bound buffer data. If no 4603 application-bound streams exist, no frame should be 4604 placed in the output frame queue. If such streams 4605 exist, a frame should be placed on the output queue 4606 with null metadata but with the necessary output buffer 4607 information. Timestamp information should still be 4608 included with any output stream buffers</notes></value> 4609 <value>FULL 4610 <notes>All metadata should be produced. Statistics will 4611 only be produced if they are separately 4612 enabled</notes></value> 4613 </enum> 4614 <description>How much metadata to produce on 4615 output</description> 4616 <tag id="FUTURE" /> 4617 </entry> 4618 <entry name="outputStreams" type="int32" visibility="system" deprecated="true" 4619 container="array"> 4620 <array> 4621 <size>n</size> 4622 </array> 4623 <description>Lists which camera output streams image data 4624 from this capture must be sent to</description> 4625 <deprecation_description> 4626 Not used in HALv3 or newer 4627 </deprecation_description> 4628 <units>List of camera stream IDs</units> 4629 <range>List must only include streams that have been 4630 created</range> 4631 <details>If no output streams are listed, then the image 4632 data should simply be discarded. The image data must 4633 still be captured for metadata and statistics production, 4634 and the lens and flash must operate as requested.</details> 4635 <tag id="HAL2" /> 4636 </entry> 4637 <entry name="type" type="byte" visibility="system" deprecated="true" enum="true"> 4638 <enum> 4639 <value>CAPTURE 4640 <notes>Capture a new image from the imaging hardware, 4641 and process it according to the 4642 settings</notes></value> 4643 <value>REPROCESS 4644 <notes>Process previously captured data; the 4645 android.request.inputStreams parameter determines the 4646 source reprocessing stream. TODO: Mark dynamic metadata 4647 needed for reprocessing with [RP]</notes></value> 4648 </enum> 4649 <description>The type of the request; either CAPTURE or 4650 REPROCESS. For legacy HAL3, this tag is redundant. 4651 </description> 4652 <deprecation_description> 4653 Not used in HALv3 or newer 4654 </deprecation_description> 4655 <tag id="HAL2" /> 4656 </entry> 4657 </controls> 4658 <static> 4659 <entry name="maxNumOutputStreams" type="int32" visibility="ndk_public" 4660 container="array" hwlevel="legacy"> 4661 <array> 4662 <size>3</size> 4663 </array> 4664 <description>The maximum numbers of different types of output streams 4665 that can be configured and used simultaneously by a camera device. 4666 </description> 4667 <range> 4668 For processed (and stalling) format streams, &gt;= 1. 4669 4670 For Raw format (either stalling or non-stalling) streams, &gt;= 0. 4671 4672 For processed (but not stalling) format streams, &gt;= 3 4673 for FULL mode devices (`android.info.supportedHardwareLevel == FULL`); 4674 &gt;= 2 for LIMITED mode devices (`android.info.supportedHardwareLevel == LIMITED`). 4675 </range> 4676 <details> 4677 This is a 3 element tuple that contains the max number of output simultaneous 4678 streams for raw sensor, processed (but not stalling), and processed (and stalling) 4679 formats respectively. For example, assuming that JPEG is typically a processed and 4680 stalling stream, if max raw sensor format output stream number is 1, max YUV streams 4681 number is 3, and max JPEG stream number is 2, then this tuple should be `(1, 3, 2)`. 4682 4683 This lists the upper bound of the number of output streams supported by 4684 the camera device. Using more streams simultaneously may require more hardware and 4685 CPU resources that will consume more power. The image format for an output stream can 4686 be any supported format provided by android.scaler.availableStreamConfigurations. 4687 The formats defined in android.scaler.availableStreamConfigurations can be catergorized 4688 into the 3 stream types as below: 4689 4690 * Processed (but stalling): any non-RAW format with a stallDurations &gt; 0. 4691 Typically {@link android.graphics.ImageFormat#JPEG|AIMAGE_FORMAT_JPEG JPEG format}. 4692 * Raw formats: {@link android.graphics.ImageFormat#RAW_SENSOR|AIMAGE_FORMAT_RAW16 4693 RAW_SENSOR}, {@link android.graphics.ImageFormat#RAW10|AIMAGE_FORMAT_RAW10 RAW10}, or 4694 {@link android.graphics.ImageFormat#RAW12|AIMAGE_FORMAT_RAW12 RAW12}. 4695 * Processed (but not-stalling): any non-RAW format without a stall duration. Typically 4696 {@link android.graphics.ImageFormat#YUV_420_888|AIMAGE_FORMAT_YUV_420_888 YUV_420_888}, 4697 {@link android.graphics.ImageFormat#NV21 NV21}, {@link 4698 android.graphics.ImageFormat#YV12 YV12}, or {@link 4699 android.graphics.ImageFormat#Y8|AIMAGE_FORMAT_Y8 Y8} . 4700 </details> 4701 <tag id="BC" /> 4702 </entry> 4703 <entry name="maxNumOutputRaw" type="int32" visibility="java_public" synthetic="true" 4704 hwlevel="legacy"> 4705 <description>The maximum numbers of different types of output streams 4706 that can be configured and used simultaneously by a camera device 4707 for any `RAW` formats. 4708 </description> 4709 <range> 4710 &gt;= 0 4711 </range> 4712 <details> 4713 This value contains the max number of output simultaneous 4714 streams from the raw sensor. 4715 4716 This lists the upper bound of the number of output streams supported by 4717 the camera device. Using more streams simultaneously may require more hardware and 4718 CPU resources that will consume more power. The image format for this kind of an output stream can 4719 be any `RAW` and supported format provided by android.scaler.streamConfigurationMap. 4720 4721 In particular, a `RAW` format is typically one of: 4722 4723 * {@link android.graphics.ImageFormat#RAW_SENSOR|AIMAGE_FORMAT_RAW16 RAW_SENSOR} 4724 * {@link android.graphics.ImageFormat#RAW10|AIMAGE_FORMAT_RAW10 RAW10} 4725 * {@link android.graphics.ImageFormat#RAW12|AIMAGE_FORMAT_RAW12 RAW12} 4726 4727 LEGACY mode devices (android.info.supportedHardwareLevel `==` LEGACY) 4728 never support raw streams. 4729 </details> 4730 </entry> 4731 <entry name="maxNumOutputProc" type="int32" visibility="java_public" synthetic="true" 4732 hwlevel="legacy"> 4733 <description>The maximum numbers of different types of output streams 4734 that can be configured and used simultaneously by a camera device 4735 for any processed (but not-stalling) formats. 4736 </description> 4737 <range> 4738 &gt;= 3 4739 for FULL mode devices (`android.info.supportedHardwareLevel == FULL`); 4740 &gt;= 2 for LIMITED mode devices (`android.info.supportedHardwareLevel == LIMITED`). 4741 </range> 4742 <details> 4743 This value contains the max number of output simultaneous 4744 streams for any processed (but not-stalling) formats. 4745 4746 This lists the upper bound of the number of output streams supported by 4747 the camera device. Using more streams simultaneously may require more hardware and 4748 CPU resources that will consume more power. The image format for this kind of an output stream can 4749 be any non-`RAW` and supported format provided by android.scaler.streamConfigurationMap. 4750 4751 Processed (but not-stalling) is defined as any non-RAW format without a stall duration. 4752 Typically: 4753 4754 * {@link android.graphics.ImageFormat#YUV_420_888|AIMAGE_FORMAT_YUV_420_888 YUV_420_888} 4755 * {@link android.graphics.ImageFormat#NV21 NV21} 4756 * {@link android.graphics.ImageFormat#YV12 YV12} 4757 * Implementation-defined formats, i.e. {@link 4758 android.hardware.camera2.params.StreamConfigurationMap#isOutputSupportedFor(Class)} 4759 * {@link android.graphics.ImageFormat#Y8|AIMAGE_FORMAT_Y8 Y8} 4760 4761 For full guarantees, query {@link 4762 android.hardware.camera2.params.StreamConfigurationMap#getOutputStallDuration} with a 4763 processed format -- it will return 0 for a non-stalling stream. 4764 4765 LEGACY devices will support at least 2 processing/non-stalling streams. 4766 </details> 4767 </entry> 4768 <entry name="maxNumOutputProcStalling" type="int32" visibility="java_public" synthetic="true" 4769 hwlevel="legacy"> 4770 <description>The maximum numbers of different types of output streams 4771 that can be configured and used simultaneously by a camera device 4772 for any processed (and stalling) formats. 4773 </description> 4774 <range> 4775 &gt;= 1 4776 </range> 4777 <details> 4778 This value contains the max number of output simultaneous 4779 streams for any processed (but not-stalling) formats. 4780 4781 This lists the upper bound of the number of output streams supported by 4782 the camera device. Using more streams simultaneously may require more hardware and 4783 CPU resources that will consume more power. The image format for this kind of an output stream can 4784 be any non-`RAW` and supported format provided by android.scaler.streamConfigurationMap. 4785 4786 A processed and stalling format is defined as any non-RAW format with a stallDurations 4787 &gt; 0. Typically only the {@link 4788 android.graphics.ImageFormat#JPEG|AIMAGE_FORMAT_JPEG JPEG format} is a stalling format. 4789 4790 For full guarantees, query {@link 4791 android.hardware.camera2.params.StreamConfigurationMap#getOutputStallDuration} with a 4792 processed format -- it will return a non-0 value for a stalling stream. 4793 4794 LEGACY devices will support up to 1 processing/stalling stream. 4795 </details> 4796 </entry> 4797 <entry name="maxNumReprocessStreams" type="int32" visibility="system" 4798 deprecated="true" container="array"> 4799 <array> 4800 <size>1</size> 4801 </array> 4802 <description>How many reprocessing streams of any type 4803 can be allocated at the same time.</description> 4804 <deprecation_description> 4805 Not used in HALv3 or newer 4806 </deprecation_description> 4807 <range>&gt;= 0</range> 4808 <details> 4809 Only used by HAL2.x. 4810 4811 When set to 0, it means no reprocess stream is supported. 4812 </details> 4813 <tag id="HAL2" /> 4814 </entry> 4815 <entry name="maxNumInputStreams" type="int32" visibility="java_public" hwlevel="full"> 4816 <description> 4817 The maximum numbers of any type of input streams 4818 that can be configured and used simultaneously by a camera device. 4819 </description> 4820 <range> 4821 0 or 1. 4822 </range> 4823 <details>When set to 0, it means no input stream is supported. 4824 4825 The image format for a input stream can be any supported format returned by {@link 4826 android.hardware.camera2.params.StreamConfigurationMap#getInputFormats}. When using an 4827 input stream, there must be at least one output stream configured to to receive the 4828 reprocessed images. 4829 4830 When an input stream and some output streams are used in a reprocessing request, 4831 only the input buffer will be used to produce these output stream buffers, and a 4832 new sensor image will not be captured. 4833 4834 For example, for Zero Shutter Lag (ZSL) still capture use case, the input 4835 stream image format will be PRIVATE, the associated output stream image format 4836 should be JPEG. 4837 </details> 4838 <hal_details> 4839 For the reprocessing flow and controls, see 4840 hardware/libhardware/include/hardware/camera3.h Section 10 for more details. 4841 </hal_details> 4842 <tag id="REPROC" /> 4843 </entry> 4844 </static> 4845 <dynamic> 4846 <entry name="frameCount" type="int32" visibility="hidden" deprecated="true"> 4847 <description>A frame counter set by the framework. This value monotonically 4848 increases with every new result (that is, each new result has a unique 4849 frameCount value).</description> 4850 <deprecation_description> 4851 Not used in HALv3 or newer 4852 </deprecation_description> 4853 <units>count of frames</units> 4854 <range>&gt; 0</range> 4855 <details>Reset on release()</details> 4856 </entry> 4857 <clone entry="android.request.id" kind="controls"></clone> 4858 <clone entry="android.request.metadataMode" 4859 kind="controls"></clone> 4860 <clone entry="android.request.outputStreams" 4861 kind="controls"></clone> 4862 <entry name="pipelineDepth" type="byte" visibility="public" hwlevel="legacy"> 4863 <description>Specifies the number of pipeline stages the frame went 4864 through from when it was exposed to when the final completed result 4865 was available to the framework.</description> 4866 <range>&lt;= android.request.pipelineMaxDepth</range> 4867 <details>Depending on what settings are used in the request, and 4868 what streams are configured, the data may undergo less processing, 4869 and some pipeline stages skipped. 4870 4871 See android.request.pipelineMaxDepth for more details. 4872 </details> 4873 <hal_details> 4874 This value must always represent the accurate count of how many 4875 pipeline stages were actually used. 4876 </hal_details> 4877 </entry> 4878 </dynamic> 4879 <static> 4880 <entry name="pipelineMaxDepth" type="byte" visibility="public" hwlevel="legacy"> 4881 <description>Specifies the number of maximum pipeline stages a frame 4882 has to go through from when it's exposed to when it's available 4883 to the framework.</description> 4884 <details>A typical minimum value for this is 2 (one stage to expose, 4885 one stage to readout) from the sensor. The ISP then usually adds 4886 its own stages to do custom HW processing. Further stages may be 4887 added by SW processing. 4888 4889 Depending on what settings are used (e.g. YUV, JPEG) and what 4890 processing is enabled (e.g. face detection), the actual pipeline 4891 depth (specified by android.request.pipelineDepth) may be less than 4892 the max pipeline depth. 4893 4894 A pipeline depth of X stages is equivalent to a pipeline latency of 4895 X frame intervals. 4896 4897 This value will normally be 8 or less, however, for high speed capture session, 4898 the max pipeline depth will be up to 8 x size of high speed capture request list. 4899 </details> 4900 <hal_details> 4901 This value should be 4 or less, expect for the high speed recording session, where the 4902 max batch sizes may be larger than 1. 4903 </hal_details> 4904 </entry> 4905 <entry name="partialResultCount" type="int32" visibility="public" optional="true"> 4906 <description>Defines how many sub-components 4907 a result will be composed of. 4908 </description> 4909 <range>&gt;= 1</range> 4910 <details>In order to combat the pipeline latency, partial results 4911 may be delivered to the application layer from the camera device as 4912 soon as they are available. 4913 4914 Optional; defaults to 1. A value of 1 means that partial 4915 results are not supported, and only the final TotalCaptureResult will 4916 be produced by the camera device. 4917 4918 A typical use case for this might be: after requesting an 4919 auto-focus (AF) lock the new AF state might be available 50% 4920 of the way through the pipeline. The camera device could 4921 then immediately dispatch this state via a partial result to 4922 the application, and the rest of the metadata via later 4923 partial results. 4924 </details> 4925 </entry> 4926 <entry name="availableCapabilities" type="byte" visibility="public" 4927 enum="true" container="array" hwlevel="legacy"> 4928 <array> 4929 <size>n</size> 4930 </array> 4931 <enum> 4932 <value>BACKWARD_COMPATIBLE 4933 <notes>The minimal set of capabilities that every camera 4934 device (regardless of android.info.supportedHardwareLevel) 4935 supports. 4936 4937 This capability is listed by all normal devices, and 4938 indicates that the camera device has a feature set 4939 that's comparable to the baseline requirements for the 4940 older android.hardware.Camera API. 4941 4942 Devices with the DEPTH_OUTPUT capability might not list this 4943 capability, indicating that they support only depth measurement, 4944 not standard color output. 4945 </notes> 4946 </value> 4947 <value optional="true">MANUAL_SENSOR 4948 <notes> 4949 The camera device can be manually controlled (3A algorithms such 4950 as auto-exposure, and auto-focus can be bypassed). 4951 The camera device supports basic manual control of the sensor image 4952 acquisition related stages. This means the following controls are 4953 guaranteed to be supported: 4954 4955 * Manual frame duration control 4956 * android.sensor.frameDuration 4957 * android.sensor.info.maxFrameDuration 4958 * Manual exposure control 4959 * android.sensor.exposureTime 4960 * android.sensor.info.exposureTimeRange 4961 * Manual sensitivity control 4962 * android.sensor.sensitivity 4963 * android.sensor.info.sensitivityRange 4964 * Manual lens control (if the lens is adjustable) 4965 * android.lens.* 4966 * Manual flash control (if a flash unit is present) 4967 * android.flash.* 4968 * Manual black level locking 4969 * android.blackLevel.lock 4970 * Auto exposure lock 4971 * android.control.aeLock 4972 4973 If any of the above 3A algorithms are enabled, then the camera 4974 device will accurately report the values applied by 3A in the 4975 result. 4976 4977 A given camera device may also support additional manual sensor controls, 4978 but this capability only covers the above list of controls. 4979 4980 If this is supported, android.scaler.streamConfigurationMap will 4981 additionally return a min frame duration that is greater than 4982 zero for each supported size-format combination. 4983 4984 For camera devices with LOGICAL_MULTI_CAMERA capability, when the underlying active 4985 physical camera switches, exposureTime, sensitivity, and lens properties may change 4986 even if AE/AF is locked. However, the overall auto exposure and auto focus experience 4987 for users will be consistent. Refer to LOGICAL_MULTI_CAMERA capability for details. 4988 </notes> 4989 </value> 4990 <value optional="true">MANUAL_POST_PROCESSING 4991 <notes> 4992 The camera device post-processing stages can be manually controlled. 4993 The camera device supports basic manual control of the image post-processing 4994 stages. This means the following controls are guaranteed to be supported: 4995 4996 * Manual tonemap control 4997 * android.tonemap.curve 4998 * android.tonemap.mode 4999 * android.tonemap.maxCurvePoints 5000 * android.tonemap.gamma 5001 * android.tonemap.presetCurve 5002 5003 * Manual white balance control 5004 * android.colorCorrection.transform 5005 * android.colorCorrection.gains 5006 * Manual lens shading map control 5007 * android.shading.mode 5008 * android.statistics.lensShadingMapMode 5009 * android.statistics.lensShadingMap 5010 * android.lens.info.shadingMapSize 5011 * Manual aberration correction control (if aberration correction is supported) 5012 * android.colorCorrection.aberrationMode 5013 * android.colorCorrection.availableAberrationModes 5014 * Auto white balance lock 5015 * android.control.awbLock 5016 5017 If auto white balance is enabled, then the camera device 5018 will accurately report the values applied by AWB in the result. 5019 5020 A given camera device may also support additional post-processing 5021 controls, but this capability only covers the above list of controls. 5022 5023 For camera devices with LOGICAL_MULTI_CAMERA capability, when underlying active 5024 physical camera switches, tonemap, white balance, and shading map may change even if 5025 awb is locked. However, the overall post-processing experience for users will be 5026 consistent. Refer to LOGICAL_MULTI_CAMERA capability for details. 5027 </notes> 5028 </value> 5029 <value optional="true">RAW 5030 <notes> 5031 The camera device supports outputting RAW buffers and 5032 metadata for interpreting them. 5033 5034 Devices supporting the RAW capability allow both for 5035 saving DNG files, and for direct application processing of 5036 raw sensor images. 5037 5038 * RAW_SENSOR is supported as an output format. 5039 * The maximum available resolution for RAW_SENSOR streams 5040 will match either the value in 5041 android.sensor.info.pixelArraySize or 5042 android.sensor.info.preCorrectionActiveArraySize. 5043 * All DNG-related optional metadata entries are provided 5044 by the camera device. 5045 </notes> 5046 </value> 5047 <value optional="true" ndk_hidden="true">PRIVATE_REPROCESSING 5048 <notes> 5049 The camera device supports the Zero Shutter Lag reprocessing use case. 5050 5051 * One input stream is supported, that is, `android.request.maxNumInputStreams == 1`. 5052 * {@link android.graphics.ImageFormat#PRIVATE} is supported as an output/input format, 5053 that is, {@link android.graphics.ImageFormat#PRIVATE} is included in the lists of 5054 formats returned by {@link 5055 android.hardware.camera2.params.StreamConfigurationMap#getInputFormats} and {@link 5056 android.hardware.camera2.params.StreamConfigurationMap#getOutputFormats}. 5057 * {@link android.hardware.camera2.params.StreamConfigurationMap#getValidOutputFormatsForInput} 5058 returns non empty int[] for each supported input format returned by {@link 5059 android.hardware.camera2.params.StreamConfigurationMap#getInputFormats}. 5060 * Each size returned by {@link 5061 android.hardware.camera2.params.StreamConfigurationMap#getInputSizes 5062 getInputSizes(ImageFormat.PRIVATE)} is also included in {@link 5063 android.hardware.camera2.params.StreamConfigurationMap#getOutputSizes 5064 getOutputSizes(ImageFormat.PRIVATE)} 5065 * Using {@link android.graphics.ImageFormat#PRIVATE} does not cause a frame rate drop 5066 relative to the sensor's maximum capture rate (at that resolution). 5067 * {@link android.graphics.ImageFormat#PRIVATE} will be reprocessable into both 5068 {@link android.graphics.ImageFormat#YUV_420_888} and 5069 {@link android.graphics.ImageFormat#JPEG} formats. 5070 * For a MONOCHROME camera supporting Y8 format, {@link 5071 android.graphics.ImageFormat#PRIVATE} will be reprocessable into 5072 {@link android.graphics.ImageFormat#Y8}. 5073 * The maximum available resolution for PRIVATE streams 5074 (both input/output) will match the maximum available 5075 resolution of JPEG streams. 5076 * Static metadata android.reprocess.maxCaptureStall. 5077 * Only below controls are effective for reprocessing requests and 5078 will be present in capture results, other controls in reprocess 5079 requests will be ignored by the camera device. 5080 * android.jpeg.* 5081 * android.noiseReduction.mode 5082 * android.edge.mode 5083 * android.noiseReduction.availableNoiseReductionModes and 5084 android.edge.availableEdgeModes will both list ZERO_SHUTTER_LAG as a supported mode. 5085 </notes> 5086 </value> 5087 <value optional="true">READ_SENSOR_SETTINGS 5088 <notes> 5089 The camera device supports accurately reporting the sensor settings for many of 5090 the sensor controls while the built-in 3A algorithm is running. This allows 5091 reporting of sensor settings even when these settings cannot be manually changed. 5092 5093 The values reported for the following controls are guaranteed to be available 5094 in the CaptureResult, including when 3A is enabled: 5095 5096 * Exposure control 5097 * android.sensor.exposureTime 5098 * Sensitivity control 5099 * android.sensor.sensitivity 5100 * Lens controls (if the lens is adjustable) 5101 * android.lens.focusDistance 5102 * android.lens.aperture 5103 5104 This capability is a subset of the MANUAL_SENSOR control capability, and will 5105 always be included if the MANUAL_SENSOR capability is available. 5106 </notes> 5107 </value> 5108 <value optional="true">BURST_CAPTURE 5109 <notes> 5110 The camera device supports capturing high-resolution images at >= 20 frames per 5111 second, in at least the uncompressed YUV format, when post-processing settings are 5112 set to FAST. Additionally, all image resolutions less than 24 megapixels can be 5113 captured at >= 10 frames per second. Here, 'high resolution' means at least 8 5114 megapixels, or the maximum resolution of the device, whichever is smaller. 5115 </notes> 5116 <sdk_notes> 5117 More specifically, this means that a size matching the camera device's active array 5118 size is listed as a supported size for the {@link 5119 android.graphics.ImageFormat#YUV_420_888} format in either {@link 5120 android.hardware.camera2.params.StreamConfigurationMap#getOutputSizes} or {@link 5121 android.hardware.camera2.params.StreamConfigurationMap#getHighResolutionOutputSizes}, 5122 with a minimum frame duration for that format and size of either <= 1/20 s, or 5123 <= 1/10 s if the image size is less than 24 megapixels, respectively; and 5124 the android.control.aeAvailableTargetFpsRanges entry lists at least one FPS range 5125 where the minimum FPS is >= 1 / minimumFrameDuration for the maximum-size 5126 YUV_420_888 format. If that maximum size is listed in {@link 5127 android.hardware.camera2.params.StreamConfigurationMap#getHighResolutionOutputSizes}, 5128 then the list of resolutions for YUV_420_888 from {@link 5129 android.hardware.camera2.params.StreamConfigurationMap#getOutputSizes} contains at 5130 least one resolution >= 8 megapixels, with a minimum frame duration of <= 1/20 5131 s. 5132 5133 If the device supports the {@link 5134 android.graphics.ImageFormat#RAW10|AIMAGE_FORMAT_RAW10}, {@link 5135 android.graphics.ImageFormat#RAW12|AIMAGE_FORMAT_RAW12}, {@link 5136 android.graphics.ImageFormat#Y8|AIMAGE_FORMAT_Y8}, then those can also be 5137 captured at the same rate as the maximum-size YUV_420_888 resolution is. 5138 5139 If the device supports the PRIVATE_REPROCESSING capability, then the same guarantees 5140 as for the YUV_420_888 format also apply to the {@link 5141 android.graphics.ImageFormat#PRIVATE} format. 5142 5143 In addition, the android.sync.maxLatency field is guaranted to have a value between 0 5144 and 4, inclusive. android.control.aeLockAvailable and android.control.awbLockAvailable 5145 are also guaranteed to be `true` so burst capture with these two locks ON yields 5146 consistent image output. 5147 </sdk_notes> 5148 <ndk_notes> 5149 More specifically, this means that at least one output {@link 5150 android.graphics.ImageFormat#YUV_420_888|AIMAGE_FORMAT_YUV_420_888} size listed in 5151 {@link 5152 android.hardware.camera2.params.StreamConfigurationMap|ACAMERA_SCALER_AVAILABLE_STREAM_CONFIGURATIONS} 5153 is larger or equal to the 'high resolution' defined above, and can be captured at at 5154 least 20 fps. For the largest {@link 5155 android.graphics.ImageFormat#YUV_420_888|AIMAGE_FORMAT_YUV_420_888} size listed in 5156 {@link 5157 android.hardware.camera2.params.StreamConfigurationMap|ACAMERA_SCALER_AVAILABLE_STREAM_CONFIGURATIONS}, 5158 camera device can capture this size for at least 10 frames per second if the size is 5159 less than 24 megapixels. Also the android.control.aeAvailableTargetFpsRanges entry 5160 lists at least one FPS range where the minimum FPS is >= 1 / minimumFrameDuration 5161 for the largest YUV_420_888 size. 5162 5163 If the device supports the {@link 5164 android.graphics.ImageFormat#RAW10|AIMAGE_FORMAT_RAW10}, {@link 5165 android.graphics.ImageFormat#RAW12|AIMAGE_FORMAT_RAW12}, {@link 5166 android.graphics.ImageFormat#Y8|AIMAGE_FORMAT_Y8}, then those can also be 5167 captured at the same rate as the maximum-size YUV_420_888 resolution is. 5168 5169 In addition, the android.sync.maxLatency field is guaranted to have a value between 0 5170 and 4, inclusive. android.control.aeLockAvailable and android.control.awbLockAvailable 5171 are also guaranteed to be `true` so burst capture with these two locks ON yields 5172 consistent image output. 5173 </ndk_notes> 5174 </value> 5175 <value optional="true" ndk_hidden="true">YUV_REPROCESSING 5176 <notes> 5177 The camera device supports the YUV_420_888 reprocessing use case, similar as 5178 PRIVATE_REPROCESSING, This capability requires the camera device to support the 5179 following: 5180 5181 * One input stream is supported, that is, `android.request.maxNumInputStreams == 1`. 5182 * {@link android.graphics.ImageFormat#YUV_420_888} is supported as an output/input 5183 format, that is, YUV_420_888 is included in the lists of formats returned by {@link 5184 android.hardware.camera2.params.StreamConfigurationMap#getInputFormats} and {@link 5185 android.hardware.camera2.params.StreamConfigurationMap#getOutputFormats}. 5186 * {@link 5187 android.hardware.camera2.params.StreamConfigurationMap#getValidOutputFormatsForInput} 5188 returns non-empty int[] for each supported input format returned by {@link 5189 android.hardware.camera2.params.StreamConfigurationMap#getInputFormats}. 5190 * Each size returned by {@link 5191 android.hardware.camera2.params.StreamConfigurationMap#getInputSizes 5192 getInputSizes(YUV_420_888)} is also included in {@link 5193 android.hardware.camera2.params.StreamConfigurationMap#getOutputSizes 5194 getOutputSizes(YUV_420_888)} 5195 * Using {@link android.graphics.ImageFormat#YUV_420_888} does not cause a frame rate 5196 drop relative to the sensor's maximum capture rate (at that resolution). 5197 * {@link android.graphics.ImageFormat#YUV_420_888} will be reprocessable into both 5198 {@link android.graphics.ImageFormat#YUV_420_888} and {@link 5199 android.graphics.ImageFormat#JPEG} formats. 5200 * The maximum available resolution for {@link 5201 android.graphics.ImageFormat#YUV_420_888} streams (both input/output) will match the 5202 maximum available resolution of {@link android.graphics.ImageFormat#JPEG} streams. 5203 * For a MONOCHROME camera with Y8 format support, all the requirements mentioned 5204 above for YUV_420_888 apply for Y8 format as well. 5205 * Static metadata android.reprocess.maxCaptureStall. 5206 * Only the below controls are effective for reprocessing requests and will be present 5207 in capture results. The reprocess requests are from the original capture results 5208 that are associated with the intermediate {@link 5209 android.graphics.ImageFormat#YUV_420_888} output buffers. All other controls in the 5210 reprocess requests will be ignored by the camera device. 5211 * android.jpeg.* 5212 * android.noiseReduction.mode 5213 * android.edge.mode 5214 * android.reprocess.effectiveExposureFactor 5215 * android.noiseReduction.availableNoiseReductionModes and 5216 android.edge.availableEdgeModes will both list ZERO_SHUTTER_LAG as a supported mode. 5217 </notes> 5218 </value> 5219 <value optional="true">DEPTH_OUTPUT 5220 <notes> 5221 The camera device can produce depth measurements from its field of view. 5222 5223 This capability requires the camera device to support the following: 5224 5225 * {@link android.graphics.ImageFormat#DEPTH16|AIMAGE_FORMAT_DEPTH16} is supported as 5226 an output format. 5227 * {@link 5228 android.graphics.ImageFormat#DEPTH_POINT_CLOUD|AIMAGE_FORMAT_DEPTH_POINT_CLOUD} is 5229 optionally supported as an output format. 5230 * This camera device, and all camera devices with the same android.lens.facing, will 5231 list the following calibration metadata entries in both {@link 5232 android.hardware.camera2.CameraCharacteristics|ACameraManager_getCameraCharacteristics} 5233 and {@link 5234 android.hardware.camera2.CaptureResult|ACameraCaptureSession_captureCallback_result}: 5235 - android.lens.poseTranslation 5236 - android.lens.poseRotation 5237 - android.lens.intrinsicCalibration 5238 - android.lens.distortion 5239 * The android.depth.depthIsExclusive entry is listed by this device. 5240 * As of Android P, the android.lens.poseReference entry is listed by this device. 5241 * A LIMITED camera with only the DEPTH_OUTPUT capability does not have to support 5242 normal YUV_420_888, Y8, JPEG, and PRIV-format outputs. It only has to support the 5243 DEPTH16 format. 5244 5245 Generally, depth output operates at a slower frame rate than standard color capture, 5246 so the DEPTH16 and DEPTH_POINT_CLOUD formats will commonly have a stall duration that 5247 should be accounted for (see {@link 5248 android.hardware.camera2.params.StreamConfigurationMap#getOutputStallDuration|ACAMERA_DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS}). 5249 On a device that supports both depth and color-based output, to enable smooth preview, 5250 using a repeating burst is recommended, where a depth-output target is only included 5251 once every N frames, where N is the ratio between preview output rate and depth output 5252 rate, including depth stall time. 5253 </notes> 5254 </value> 5255 <value optional="true" ndk_hidden="true">CONSTRAINED_HIGH_SPEED_VIDEO 5256 <notes> 5257 The device supports constrained high speed video recording (frame rate >=120fps) use 5258 case. The camera device will support high speed capture session created by {@link 5259 android.hardware.camera2.CameraDevice#createConstrainedHighSpeedCaptureSession}, which 5260 only accepts high speed request lists created by {@link 5261 android.hardware.camera2.CameraConstrainedHighSpeedCaptureSession#createHighSpeedRequestList}. 5262 5263 A camera device can still support high speed video streaming by advertising the high 5264 speed FPS ranges in android.control.aeAvailableTargetFpsRanges. For this case, all 5265 normal capture request per frame control and synchronization requirements will apply 5266 to the high speed fps ranges, the same as all other fps ranges. This capability 5267 describes the capability of a specialized operating mode with many limitations (see 5268 below), which is only targeted at high speed video recording. 5269 5270 The supported high speed video sizes and fps ranges are specified in {@link 5271 android.hardware.camera2.params.StreamConfigurationMap#getHighSpeedVideoFpsRanges}. 5272 To get desired output frame rates, the application is only allowed to select video 5273 size and FPS range combinations provided by {@link 5274 android.hardware.camera2.params.StreamConfigurationMap#getHighSpeedVideoSizes}. The 5275 fps range can be controlled via android.control.aeTargetFpsRange. 5276 5277 In this capability, the camera device will override aeMode, awbMode, and afMode to 5278 ON, AUTO, and CONTINUOUS_VIDEO, respectively. All post-processing block mode 5279 controls will be overridden to be FAST. Therefore, no manual control of capture 5280 and post-processing parameters is possible. All other controls operate the 5281 same as when android.control.mode == AUTO. This means that all other 5282 android.control.* fields continue to work, such as 5283 5284 * android.control.aeTargetFpsRange 5285 * android.control.aeExposureCompensation 5286 * android.control.aeLock 5287 * android.control.awbLock 5288 * android.control.effectMode 5289 * android.control.aeRegions 5290 * android.control.afRegions 5291 * android.control.awbRegions 5292 * android.control.afTrigger 5293 * android.control.aePrecaptureTrigger 5294 5295 Outside of android.control.*, the following controls will work: 5296 5297 * android.flash.mode (TORCH mode only, automatic flash for still capture will not 5298 work since aeMode is ON) 5299 * android.lens.opticalStabilizationMode (if it is supported) 5300 * android.scaler.cropRegion 5301 * android.statistics.faceDetectMode (if it is supported) 5302 5303 For high speed recording use case, the actual maximum supported frame rate may 5304 be lower than what camera can output, depending on the destination Surfaces for 5305 the image data. For example, if the destination surface is from video encoder, 5306 the application need check if the video encoder is capable of supporting the 5307 high frame rate for a given video size, or it will end up with lower recording 5308 frame rate. If the destination surface is from preview window, the actual preview frame 5309 rate will be bounded by the screen refresh rate. 5310 5311 The camera device will only support up to 2 high speed simultaneous output surfaces 5312 (preview and recording surfaces) in this mode. Above controls will be effective only 5313 if all of below conditions are true: 5314 5315 * The application creates a camera capture session with no more than 2 surfaces via 5316 {@link 5317 android.hardware.camera2.CameraDevice#createConstrainedHighSpeedCaptureSession}. The 5318 targeted surfaces must be preview surface (either from {@link 5319 android.view.SurfaceView} or {@link android.graphics.SurfaceTexture}) or recording 5320 surface(either from {@link android.media.MediaRecorder#getSurface} or {@link 5321 android.media.MediaCodec#createInputSurface}). 5322 * The stream sizes are selected from the sizes reported by 5323 {@link android.hardware.camera2.params.StreamConfigurationMap#getHighSpeedVideoSizes}. 5324 * The FPS ranges are selected from {@link 5325 android.hardware.camera2.params.StreamConfigurationMap#getHighSpeedVideoFpsRanges}. 5326 5327 When above conditions are NOT satistied, 5328 {@link android.hardware.camera2.CameraDevice#createConstrainedHighSpeedCaptureSession} 5329 will fail. 5330 5331 Switching to a FPS range that has different maximum FPS may trigger some camera device 5332 reconfigurations, which may introduce extra latency. It is recommended that 5333 the application avoids unnecessary maximum target FPS changes as much as possible 5334 during high speed streaming. 5335 </notes> 5336 </value> 5337 <value optional="true" hal_version="3.3" >MOTION_TRACKING 5338 <notes> 5339 The camera device supports the MOTION_TRACKING value for 5340 android.control.captureIntent, which limits maximum exposure time to 20 ms. 5341 5342 This limits the motion blur of capture images, resulting in better image tracking 5343 results for use cases such as image stabilization or augmented reality. 5344 </notes> 5345 </value> 5346 <value optional="true" hal_version="3.3">LOGICAL_MULTI_CAMERA 5347 <notes> 5348 The camera device is a logical camera backed by two or more physical cameras. 5349 5350 In API level 28, the physical cameras must also be exposed to the application via 5351 {@link android.hardware.camera2.CameraManager#getCameraIdList}. 5352 5353 Starting from API level 29, some or all physical cameras may not be independently 5354 exposed to the application, in which case the physical camera IDs will not be 5355 available in {@link android.hardware.camera2.CameraManager#getCameraIdList}. But the 5356 application can still query the physical cameras' characteristics by calling 5357 {@link android.hardware.camera2.CameraManager#getCameraCharacteristics}. Additionally, 5358 if a physical camera is hidden from camera ID list, the mandatory stream combinations 5359 for that physical camera must be supported through the logical camera using physical 5360 streams. 5361 5362 Combinations of logical and physical streams, or physical streams from different 5363 physical cameras are not guaranteed. However, if the camera device supports 5364 {@link CameraDevice#isSessionConfigurationSupported|ACameraDevice_isSessionConfigurationSupported}, 5365 application must be able to query whether a stream combination involving physical 5366 streams is supported by calling 5367 {@link CameraDevice#isSessionConfigurationSupported|ACameraDevice_isSessionConfigurationSupported}. 5368 5369 Camera application shouldn't assume that there are at most 1 rear camera and 1 front 5370 camera in the system. For an application that switches between front and back cameras, 5371 the recommendation is to switch between the first rear camera and the first front 5372 camera in the list of supported camera devices. 5373 5374 This capability requires the camera device to support the following: 5375 5376 * The IDs of underlying physical cameras are returned via 5377 {@link android.hardware.camera2.CameraCharacteristics#getPhysicalCameraIds}. 5378 * This camera device must list static metadata 5379 android.logicalMultiCamera.sensorSyncType in 5380 {@link android.hardware.camera2.CameraCharacteristics}. 5381 * The underlying physical cameras' static metadata must list the following entries, 5382 so that the application can correlate pixels from the physical streams: 5383 - android.lens.poseReference 5384 - android.lens.poseRotation 5385 - android.lens.poseTranslation 5386 - android.lens.intrinsicCalibration 5387 - android.lens.distortion 5388 * The SENSOR_INFO_TIMESTAMP_SOURCE of the logical device and physical devices must be 5389 the same. 5390 * The logical camera must be LIMITED or higher device. 5391 5392 A logical camera device's dynamic metadata may contain 5393 android.logicalMultiCamera.activePhysicalId to notify the application of the current 5394 active physical camera Id. An active physical camera is the physical camera from which 5395 the logical camera's main image data outputs (YUV or RAW) and metadata come from. 5396 In addition, this serves as an indication which physical camera is used to output to 5397 a RAW stream, or in case only physical cameras support RAW, which physical RAW stream 5398 the application should request. 5399 5400 Logical camera's static metadata tags below describe the default active physical 5401 camera. An active physical camera is default if it's used when application directly 5402 uses requests built from a template. All templates will default to the same active 5403 physical camera. 5404 5405 - android.sensor.info.sensitivityRange 5406 - android.sensor.info.colorFilterArrangement 5407 - android.sensor.info.exposureTimeRange 5408 - android.sensor.info.maxFrameDuration 5409 - android.sensor.info.physicalSize 5410 - android.sensor.info.whiteLevel 5411 - android.sensor.info.lensShadingApplied 5412 - android.sensor.referenceIlluminant1 5413 - android.sensor.referenceIlluminant2 5414 - android.sensor.calibrationTransform1 5415 - android.sensor.calibrationTransform2 5416 - android.sensor.colorTransform1 5417 - android.sensor.colorTransform2 5418 - android.sensor.forwardMatrix1 5419 - android.sensor.forwardMatrix2 5420 - android.sensor.blackLevelPattern 5421 - android.sensor.maxAnalogSensitivity 5422 - android.sensor.opticalBlackRegions 5423 - android.sensor.availableTestPatternModes 5424 - android.lens.info.hyperfocalDistance 5425 - android.lens.info.minimumFocusDistance 5426 - android.lens.info.focusDistanceCalibration 5427 - android.lens.poseRotation 5428 - android.lens.poseTranslation 5429 - android.lens.intrinsicCalibration 5430 - android.lens.poseReference 5431 - android.lens.distortion 5432 5433 The field of view of all non-RAW physical streams must be the same or as close as 5434 possible to that of non-RAW logical streams. If the requested FOV is outside of the 5435 range supported by the physical camera, the physical stream for that physical camera 5436 will use either the maximum or minimum scaler crop region, depending on which one is 5437 closer to the requested FOV. For example, for a logical camera with wide-tele lens 5438 configuration where the wide lens is the default, if the logical camera's crop region 5439 is set to maximum, the physical stream for the tele lens will be configured to its 5440 maximum crop region. On the other hand, if the logical camera has a normal-wide lens 5441 configuration where the normal lens is the default, when the logical camera's crop 5442 region is set to maximum, the FOV of the logical streams will be that of the normal 5443 lens. The FOV of the physical streams for the wide lens will be the same as the 5444 logical stream, by making the crop region smaller than its active array size to 5445 compensate for the smaller focal length. 5446 5447 Even if the underlying physical cameras have different RAW characteristics (such as 5448 size or CFA pattern), a logical camera can still advertise RAW capability. In this 5449 case, when the application configures a RAW stream, the camera device will make sure 5450 the active physical camera will remain active to ensure consistent RAW output 5451 behavior, and not switch to other physical cameras. 5452 5453 The capture request and result metadata tags required for backward compatible camera 5454 functionalities will be solely based on the logical camera capabiltity. On the other 5455 hand, the use of manual capture controls (sensor or post-processing) with a 5456 logical camera may result in unexpected behavior when the HAL decides to switch 5457 between physical cameras with different characteristics under the hood. For example, 5458 when the application manually sets exposure time and sensitivity while zooming in, 5459 the brightness of the camera images may suddenly change because HAL switches from one 5460 physical camera to the other. 5461 </notes> 5462 </value> 5463 <value optional="true" hal_version="3.3" >MONOCHROME 5464 <notes> 5465 The camera device is a monochrome camera that doesn't contain a color filter array, 5466 and for YUV_420_888 stream, the pixel values on U and V planes are all 128. 5467 5468 A MONOCHROME camera must support the guaranteed stream combinations required for 5469 its device level and capabilities. Additionally, if the monochrome camera device 5470 supports Y8 format, all mandatory stream combination requirements related to {@link 5471 android.graphics.ImageFormat#YUV_420_888|AIMAGE_FORMAT_YUV_420_888 YUV_420_888} apply 5472 to {@link android.graphics.ImageFormat#Y8|AIMAGE_FORMAT_Y8 Y8} as well. There are no 5473 mandatory stream combination requirements with regard to 5474 {@link android.graphics.ImageFormat#Y8|AIMAGE_FORMAT_Y8 Y8} for Bayer camera devices. 5475 5476 Starting from Android Q, the SENSOR_INFO_COLOR_FILTER_ARRANGEMENT of a MONOCHROME 5477 camera will be either MONO or NIR. 5478 </notes> 5479 </value> 5480 <value optional="true" hal_version="3.4" >SECURE_IMAGE_DATA 5481 <notes> 5482 The camera device is capable of writing image data into a region of memory 5483 inaccessible to Android userspace or the Android kernel, and only accessible to 5484 trusted execution environments (TEE). 5485 </notes> 5486 </value> 5487 5488 5489 </enum> 5490 <description>List of capabilities that this camera device 5491 advertises as fully supporting.</description> 5492 <details> 5493 A capability is a contract that the camera device makes in order 5494 to be able to satisfy one or more use cases. 5495 5496 Listing a capability guarantees that the whole set of features 5497 required to support a common use will all be available. 5498 5499 Using a subset of the functionality provided by an unsupported 5500 capability may be possible on a specific camera device implementation; 5501 to do this query each of android.request.availableRequestKeys, 5502 android.request.availableResultKeys, 5503 android.request.availableCharacteristicsKeys. 5504 5505 The following capabilities are guaranteed to be available on 5506 android.info.supportedHardwareLevel `==` FULL devices: 5507 5508 * MANUAL_SENSOR 5509 * MANUAL_POST_PROCESSING 5510 5511 Other capabilities may be available on either FULL or LIMITED 5512 devices, but the application should query this key to be sure. 5513 </details> 5514 <hal_details> 5515 Additional constraint details per-capability will be available 5516 in the Compatibility Test Suite. 5517 5518 Minimum baseline requirements required for the 5519 BACKWARD_COMPATIBLE capability are not explicitly listed. 5520 Instead refer to "BC" tags and the camera CTS tests in the 5521 android.hardware.camera2.cts package. 5522 5523 Listed controls that can be either request or result (e.g. 5524 android.sensor.exposureTime) must be available both in the 5525 request and the result in order to be considered to be 5526 capability-compliant. 5527 5528 For example, if the HAL claims to support MANUAL control, 5529 then exposure time must be configurable via the request _and_ 5530 the actual exposure applied must be available via 5531 the result. 5532 5533 If MANUAL_SENSOR is omitted, the HAL may choose to omit the 5534 android.scaler.availableMinFrameDurations static property entirely. 5535 5536 For PRIVATE_REPROCESSING and YUV_REPROCESSING capabilities, see 5537 hardware/libhardware/include/hardware/camera3.h Section 10 for more information. 5538 5539 Devices that support the MANUAL_SENSOR capability must support the 5540 CAMERA3_TEMPLATE_MANUAL template defined in camera3.h. 5541 5542 Devices that support the PRIVATE_REPROCESSING capability or the 5543 YUV_REPROCESSING capability must support the 5544 CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG template defined in camera3.h. 5545 5546 For DEPTH_OUTPUT, the depth-format keys 5547 android.depth.availableDepthStreamConfigurations, 5548 android.depth.availableDepthMinFrameDurations, 5549 android.depth.availableDepthStallDurations must be available, in 5550 addition to the other keys explicitly mentioned in the DEPTH_OUTPUT 5551 enum notes. The entry android.depth.maxDepthSamples must be available 5552 if the DEPTH_POINT_CLOUD format is supported (HAL pixel format BLOB, dataspace 5553 DEPTH). 5554 5555 For a camera device with LOGICAL_MULTI_CAMERA capability, it should operate in the 5556 same way as a physical camera device based on its hardware level and capabilities. 5557 It's recommended that its feature set is superset of that of individual physical cameras. 5558 5559 * In camera1 API, to maintain application compatibility, for each camera facing, there 5560 may be one or more {logical_camera_id, physical_camera_1_id, physical_camera_2_id, ...} 5561 combinations, where logical_camera_id is composed of physical_camera_N_id, camera 5562 framework will only advertise one camera id 5563 (within the combinations for the particular facing) that is frontmost in the HAL 5564 published camera id list. 5565 For example, if HAL advertises 6 back facing camera IDs (ID0 to ID5), among which ID4 5566 and ID5 are logical cameras backed by ID0+ID1 and ID2+ID3 respectively. In this case, 5567 only ID0 will be available for camera1 API to use. 5568 5569 * Camera HAL is strongly recommended to advertise camera devices with best feature, 5570 power, performance, and latency tradeoffs at the front of the camera id list. 5571 5572 * Camera HAL may switch between physical cameras depending on focalLength or cropRegion. 5573 If physical cameras have different sizes, HAL must maintain a single logical camera 5574 activeArraySize/pixelArraySize/preCorrectionActiveArraySize, and must do proper mapping 5575 between logical camera and underlying physical cameras for all related metadata tags, 5576 such as crop region, 3A regions, and intrinsicCalibration. 5577 5578 * Starting from HIDL ICameraDevice version 3.5, camera HAL must support 5579 isStreamCombinationSupported for application to query whether a particular logical and 5580 physical streams combination are supported. 5581 5582 A MONOCHROME camera device must also advertise BACKWARD_COMPATIBLE capability, and must 5583 not advertise MANUAL_POST_PROCESSING capability. 5584 5585 * To maintain backward compatibility, the camera device must support all 5586 BACKWARD_COMPATIBLE required keys. The android.control.awbAvailableModes key only contains 5587 AUTO, and android.control.awbState are either CONVERGED or LOCKED depending on 5588 android.control.awbLock. 5589 5590 * android.colorCorrection.mode, android.colorCorrection.transform, and 5591 android.colorCorrection.gains must not be in available request and result keys. 5592 As a result, the camera device cannot be a FULL device. However, the HAL can 5593 still advertise other individual capabilites. 5594 5595 * If the device supports tonemap control, only android.tonemap.curveRed is used. 5596 CurveGreen and curveBlue are no-ops. 5597 5598 In Android API level 28, a MONOCHROME camera device must not have RAW capability. From 5599 API level 29, a camera is allowed to have both MONOCHROME and RAW capabilities. 5600 </hal_details> 5601 </entry> 5602 <entry name="availableRequestKeys" type="int32" visibility="ndk_public" 5603 container="array" hwlevel="legacy"> 5604 <array> 5605 <size>n</size> 5606 </array> 5607 <description>A list of all keys that the camera device has available 5608 to use with {@link android.hardware.camera2.CaptureRequest|ACaptureRequest}.</description> 5609 5610 <details>Attempting to set a key into a CaptureRequest that is not 5611 listed here will result in an invalid request and will be rejected 5612 by the camera device. 5613 5614 This field can be used to query the feature set of a camera device 5615 at a more granular level than capabilities. This is especially 5616 important for optional keys that are not listed under any capability 5617 in android.request.availableCapabilities. 5618 </details> 5619 <hal_details> 5620 Vendor tags can be listed here. Vendor tag metadata should also 5621 use the extensions C api (refer to camera3.h for more details). 5622 5623 Setting/getting vendor tags will be checked against the metadata 5624 vendor extensions API and not against this field. 5625 5626 The HAL must not consume any request tags that are not listed either 5627 here or in the vendor tag list. 5628 5629 The public camera2 API will always make the vendor tags visible 5630 via 5631 {@link android.hardware.camera2.CameraCharacteristics#getAvailableCaptureRequestKeys}. 5632 </hal_details> 5633 </entry> 5634 <entry name="availableResultKeys" type="int32" visibility="ndk_public" 5635 container="array" hwlevel="legacy"> 5636 <array> 5637 <size>n</size> 5638 </array> 5639 <description>A list of all keys that the camera device has available to use with {@link 5640 android.hardware.camera2.CaptureResult|ACameraCaptureSession_captureCallback_result}. 5641 </description> 5642 5643 <details>Attempting to get a key from a CaptureResult that is not 5644 listed here will always return a `null` value. Getting a key from 5645 a CaptureResult that is listed here will generally never return a `null` 5646 value. 5647 5648 The following keys may return `null` unless they are enabled: 5649 5650 * android.statistics.lensShadingMap (non-null iff android.statistics.lensShadingMapMode == ON) 5651 5652 (Those sometimes-null keys will nevertheless be listed here 5653 if they are available.) 5654 5655 This field can be used to query the feature set of a camera device 5656 at a more granular level than capabilities. This is especially 5657 important for optional keys that are not listed under any capability 5658 in android.request.availableCapabilities. 5659 </details> 5660 <hal_details> 5661 Tags listed here must always have an entry in the result metadata, 5662 even if that size is 0 elements. Only array-type tags (e.g. lists, 5663 matrices, strings) are allowed to have 0 elements. 5664 5665 Vendor tags can be listed here. Vendor tag metadata should also 5666 use the extensions C api (refer to camera3.h for more details). 5667 5668 Setting/getting vendor tags will be checked against the metadata 5669 vendor extensions API and not against this field. 5670 5671 The HAL must not produce any result tags that are not listed either 5672 here or in the vendor tag list. 5673 5674 The public camera2 API will always make the vendor tags visible via {@link 5675 android.hardware.camera2.CameraCharacteristics#getAvailableCaptureResultKeys}. 5676 </hal_details> 5677 </entry> 5678 <entry name="availableCharacteristicsKeys" type="int32" visibility="ndk_public" 5679 container="array" hwlevel="legacy"> 5680 <array> 5681 <size>n</size> 5682 </array> 5683 <description>A list of all keys that the camera device has available to use with {@link 5684 android.hardware.camera2.CameraCharacteristics|ACameraManager_getCameraCharacteristics}. 5685 </description> 5686 <details>This entry follows the same rules as 5687 android.request.availableResultKeys (except that it applies for 5688 CameraCharacteristics instead of CaptureResult). See above for more 5689 details. 5690 </details> 5691 <hal_details> 5692 Keys listed here must always have an entry in the static info metadata, 5693 even if that size is 0 elements. Only array-type tags (e.g. lists, 5694 matrices, strings) are allowed to have 0 elements. 5695 5696 Vendor tags can listed here. Vendor tag metadata should also use 5697 the extensions C api (refer to camera3.h for more details). 5698 5699 Setting/getting vendor tags will be checked against the metadata 5700 vendor extensions API and not against this field. 5701 5702 The HAL must not have any tags in its static info that are not listed 5703 either here or in the vendor tag list. 5704 5705 The public camera2 API will always make the vendor tags visible 5706 via {@link android.hardware.camera2.CameraCharacteristics#getKeys}. 5707 </hal_details> 5708 </entry> 5709 <entry name="availableSessionKeys" type="int32" visibility="ndk_public" 5710 container="array" hwlevel="legacy" hal_version="3.3"> 5711 <array> 5712 <size>n</size> 5713 </array> 5714 <description>A subset of the available request keys that the camera device 5715 can pass as part of the capture session initialization.</description> 5716 5717 <details> This is a subset of android.request.availableRequestKeys which 5718 contains a list of keys that are difficult to apply per-frame and 5719 can result in unexpected delays when modified during the capture session 5720 lifetime. Typical examples include parameters that require a 5721 time-consuming hardware re-configuration or internal camera pipeline 5722 change. For performance reasons we advise clients to pass their initial 5723 values as part of 5724 {@link SessionConfiguration#setSessionParameters|ACameraDevice_createCaptureSessionWithSessionParameters}. 5725 Once the camera capture session is enabled it is also recommended to avoid 5726 changing them from their initial values set in 5727 {@link SessionConfiguration#setSessionParameters|ACameraDevice_createCaptureSessionWithSessionParameters}. 5728 Control over session parameters can still be exerted in capture requests 5729 but clients should be aware and expect delays during their application. 5730 An example usage scenario could look like this: 5731 5732 * The camera client starts by quering the session parameter key list via 5733 {@link android.hardware.camera2.CameraCharacteristics#getAvailableSessionKeys|ACameraManager_getCameraCharacteristics}. 5734 * Before triggering the capture session create sequence, a capture request 5735 must be built via 5736 {@link CameraDevice#createCaptureRequest|ACameraDevice_createCaptureRequest} 5737 using an appropriate template matching the particular use case. 5738 * The client should go over the list of session parameters and check 5739 whether some of the keys listed matches with the parameters that 5740 they intend to modify as part of the first capture request. 5741 * If there is no such match, the capture request can be passed 5742 unmodified to 5743 {@link SessionConfiguration#setSessionParameters|ACameraDevice_createCaptureSessionWithSessionParameters}. 5744 * If matches do exist, the client should update the respective values 5745 and pass the request to 5746 {@link SessionConfiguration#setSessionParameters|ACameraDevice_createCaptureSessionWithSessionParameters}. 5747 * After the capture session initialization completes the session parameter 5748 key list can continue to serve as reference when posting or updating 5749 further requests. As mentioned above further changes to session 5750 parameters should ideally be avoided, if updates are necessary 5751 however clients could expect a delay/glitch during the 5752 parameter switch. 5753 5754 </details> 5755 <hal_details> 5756 If android.control.aeTargetFpsRange is part of the session parameters and constrained high 5757 speed mode is enabled, then only modifications of the maximum framerate value will be 5758 monitored by the framework and can trigger camera re-configuration. For more information 5759 about framerate ranges during constrained high speed sessions see 5760 {@link android.hardware.camera2.CameraDevice#createConstrainedHighSpeedCaptureSession}. 5761 Vendor tags can be listed here. Vendor tag metadata should also 5762 use the extensions C api (refer to 5763 android.hardware.camera.device.V3_4.StreamConfiguration.sessionParams for more details). 5764 5765 Setting/getting vendor tags will be checked against the metadata 5766 vendor extensions API and not against this field. 5767 5768 The HAL must not consume any request tags in the session parameters that 5769 are not listed either here or in the vendor tag list. 5770 5771 The public camera2 API will always make the vendor tags visible 5772 via 5773 {@link android.hardware.camera2.CameraCharacteristics#getAvailableSessionKeys}. 5774 </hal_details> 5775 </entry> 5776 <entry name="availablePhysicalCameraRequestKeys" type="int32" visibility="ndk_public" 5777 container="array" hwlevel="limited" hal_version="3.3"> 5778 <array> 5779 <size>n</size> 5780 </array> 5781 <description>A subset of the available request keys that can be overridden for 5782 physical devices backing a logical multi-camera.</description> 5783 <details> 5784 This is a subset of android.request.availableRequestKeys which contains a list 5785 of keys that can be overridden using {@link CaptureRequest.Builder#setPhysicalCameraKey}. 5786 The respective value of such request key can be obtained by calling 5787 {@link CaptureRequest.Builder#getPhysicalCameraKey}. Capture requests that contain 5788 individual physical device requests must be built via 5789 {@link android.hardware.camera2.CameraDevice#createCaptureRequest(int, Set)}. 5790 </details> 5791 <hal_details> 5792 Vendor tags can be listed here. Vendor tag metadata should also 5793 use the extensions C api (refer to 5794 android.hardware.camera.device.V3_4.CaptureRequest.physicalCameraSettings for more 5795 details). 5796 5797 Setting/getting vendor tags will be checked against the metadata 5798 vendor extensions API and not against this field. 5799 5800 The HAL must not consume any request tags in the session parameters that 5801 are not listed either here or in the vendor tag list. 5802 5803 There should be no overlap between this set of keys and the available session keys 5804 {@link android.hardware.camera2.CameraCharacteristics#getAvailableSessionKeys} along 5805 with any other controls that can have impact on the dual-camera sync. 5806 5807 The public camera2 API will always make the vendor tags visible 5808 via 5809 {@link android.hardware.camera2.CameraCharacteristics#getAvailablePhysicalCameraRequestKeys}. 5810 </hal_details> 5811 </entry> 5812 <entry name="characteristicKeysNeedingPermission" type="int32" visibility="hidden" 5813 container="array" hwlevel="legacy" hal_version="3.4"> 5814 <array> 5815 <size>n</size> 5816 </array> 5817 <description>A list of camera characteristics keys that are only available 5818 in case the camera client has camera permission.</description> 5819 5820 <details>The entry contains a subset of 5821 {@link android.hardware.camera2.CameraCharacteristics#getKeys} that require camera clients 5822 to acquire the {@link android.Manifest.permission#CAMERA} permission before calling 5823 {@link android.hardware.camera2.CameraManager#getCameraCharacteristics}. If the 5824 permission is not held by the camera client, then the values of the repsective properties 5825 will not be present in {@link android.hardware.camera2.CameraCharacteristics}. 5826 </details> 5827 <hal_details> 5828 Do not set this property directly, camera service will overwrite any previous values. 5829 </hal_details> 5830 </entry> 5831 </static> 5832 </section> 5833 <section name="scaler"> 5834 <controls> 5835 <entry name="cropRegion" type="int32" visibility="public" 5836 container="array" typedef="rectangle" hwlevel="legacy"> 5837 <array> 5838 <size>4</size> 5839 </array> 5840 <description>The desired region of the sensor to read out for this capture.</description> 5841 <units>Pixel coordinates relative to 5842 android.sensor.info.activeArraySize or 5843 android.sensor.info.preCorrectionActiveArraySize depending on distortion correction 5844 capability and mode</units> 5845 <details> 5846 This control can be used to implement digital zoom. 5847 5848 For devices not supporting android.distortionCorrection.mode control, the coordinate 5849 system always follows that of android.sensor.info.activeArraySize, with `(0, 0)` being 5850 the top-left pixel of the active array. 5851 5852 For devices supporting android.distortionCorrection.mode control, the coordinate 5853 system depends on the mode being set. 5854 When the distortion correction mode is OFF, the coordinate system follows 5855 android.sensor.info.preCorrectionActiveArraySize, with 5856 `(0, 0)` being the top-left pixel of the pre-correction active array. 5857 When the distortion correction mode is not OFF, the coordinate system follows 5858 android.sensor.info.activeArraySize, with 5859 `(0, 0)` being the top-left pixel of the active array. 5860 5861 Output streams use this rectangle to produce their output, 5862 cropping to a smaller region if necessary to maintain the 5863 stream's aspect ratio, then scaling the sensor input to 5864 match the output's configured resolution. 5865 5866 The crop region is applied after the RAW to other color 5867 space (e.g. YUV) conversion. Since raw streams 5868 (e.g. RAW16) don't have the conversion stage, they are not 5869 croppable. The crop region will be ignored by raw streams. 5870 5871 For non-raw streams, any additional per-stream cropping will 5872 be done to maximize the final pixel area of the stream. 5873 5874 For example, if the crop region is set to a 4:3 aspect 5875 ratio, then 4:3 streams will use the exact crop 5876 region. 16:9 streams will further crop vertically 5877 (letterbox). 5878 5879 Conversely, if the crop region is set to a 16:9, then 4:3 5880 outputs will crop horizontally (pillarbox), and 16:9 5881 streams will match exactly. These additional crops will 5882 be centered within the crop region. 5883 5884 If the coordinate system is android.sensor.info.activeArraySize, the width and height 5885 of the crop region cannot be set to be smaller than 5886 `floor( activeArraySize.width / android.scaler.availableMaxDigitalZoom )` and 5887 `floor( activeArraySize.height / android.scaler.availableMaxDigitalZoom )`, respectively. 5888 5889 If the coordinate system is android.sensor.info.preCorrectionActiveArraySize, the width 5890 and height of the crop region cannot be set to be smaller than 5891 `floor( preCorrectionActiveArraySize.width / android.scaler.availableMaxDigitalZoom )` 5892 and 5893 `floor( preCorrectionActiveArraySize.height / android.scaler.availableMaxDigitalZoom )`, 5894 respectively. 5895 5896 The camera device may adjust the crop region to account 5897 for rounding and other hardware requirements; the final 5898 crop region used will be included in the output capture 5899 result. 5900 </details> 5901 <ndk_details> 5902 The data representation is int[4], which maps to (left, top, width, height). 5903 </ndk_details> 5904 <hal_details> 5905 The output streams must maintain square pixels at all 5906 times, no matter what the relative aspect ratios of the 5907 crop region and the stream are. Negative values for 5908 corner are allowed for raw output if full pixel array is 5909 larger than active pixel array. Width and height may be 5910 rounded to nearest larger supportable width, especially 5911 for raw output, where only a few fixed scales may be 5912 possible. 5913 5914 For a set of output streams configured, if the sensor output is cropped to a smaller 5915 size than pre-correction active array size, the HAL need follow below cropping rules: 5916 5917 * The HAL need handle the cropRegion as if the sensor crop size is the effective 5918 pre-correction active array size. More specifically, the HAL must transform the request 5919 cropRegion from android.sensor.info.preCorrectionActiveArraySize to the sensor cropped 5920 pixel area size in this way: 5921 1. Translate the requested cropRegion w.r.t., the left top corner of the sensor 5922 cropped pixel area by (tx, ty), 5923 where `ty = sensorCrop.top * (sensorCrop.height / preCorrectionActiveArraySize.height)` 5924 and `tx = sensorCrop.left * (sensorCrop.width / preCorrectionActiveArraySize.width)`. 5925 The (sensorCrop.top, sensorCrop.left) is the coordinate based off the 5926 android.sensor.info.activeArraySize. 5927 2. Scale the width and height of requested cropRegion with scaling factor of 5928 sensorCrop.width/preCorrectionActiveArraySize.width and sensorCrop.height/preCorrectionActiveArraySize.height 5929 respectively. 5930 Once this new cropRegion is calculated, the HAL must use this region to crop the image 5931 with regard to the sensor crop size (effective pre-correction active array size). The 5932 HAL still need follow the general cropping rule for this new cropRegion and effective 5933 pre-correction active array size. 5934 5935 * The HAL must report the cropRegion with regard to android.sensor.info.preCorrectionActiveArraySize. 5936 The HAL need convert the new cropRegion generated above w.r.t., full pre-correction 5937 active array size. The reported cropRegion may be slightly different with the requested 5938 cropRegion since the HAL may adjust the crop region to account for rounding, conversion 5939 error, or other hardware limitations. 5940 5941 HAL2.x uses only (x, y, width) 5942 </hal_details> 5943 <tag id="BC" /> 5944 </entry> 5945 </controls> 5946 <static> 5947 <entry name="availableFormats" type="int32" 5948 visibility="hidden" deprecated="true" enum="true" 5949 container="array" typedef="imageFormat"> 5950 <array> 5951 <size>n</size> 5952 </array> 5953 <enum> 5954 <value optional="true" id="0x20">RAW16 5955 <notes> 5956 RAW16 is a standard, cross-platform format for raw image 5957 buffers with 16-bit pixels. 5958 5959 Buffers of this format are typically expected to have a 5960 Color Filter Array (CFA) layout, which is given in 5961 android.sensor.info.colorFilterArrangement. Sensors with 5962 CFAs that are not representable by a format in 5963 android.sensor.info.colorFilterArrangement should not 5964 use this format. 5965 5966 Buffers of this format will also follow the constraints given for 5967 RAW_OPAQUE buffers, but with relaxed performance constraints. 5968 5969 This format is intended to give users access to the full contents 5970 of the buffers coming directly from the image sensor prior to any 5971 cropping or scaling operations, and all coordinate systems for 5972 metadata used for this format are relative to the size of the 5973 active region of the image sensor before any geometric distortion 5974 correction has been applied (i.e. 5975 android.sensor.info.preCorrectionActiveArraySize). Supported 5976 dimensions for this format are limited to the full dimensions of 5977 the sensor (e.g. either android.sensor.info.pixelArraySize or 5978 android.sensor.info.preCorrectionActiveArraySize will be the 5979 only supported output size). 5980 5981 See android.scaler.availableInputOutputFormatsMap for 5982 the full set of performance guarantees. 5983 </notes> 5984 </value> 5985 <value optional="true" id="0x24">RAW_OPAQUE 5986 <notes> 5987 RAW_OPAQUE (or 5988 {@link android.graphics.ImageFormat#RAW_PRIVATE RAW_PRIVATE} 5989 as referred in public API) is a format for raw image buffers 5990 coming from an image sensor. 5991 5992 The actual structure of buffers of this format is 5993 platform-specific, but must follow several constraints: 5994 5995 1. No image post-processing operations may have been applied to 5996 buffers of this type. These buffers contain raw image data coming 5997 directly from the image sensor. 5998 1. If a buffer of this format is passed to the camera device for 5999 reprocessing, the resulting images will be identical to the images 6000 produced if the buffer had come directly from the sensor and was 6001 processed with the same settings. 6002 6003 The intended use for this format is to allow access to the native 6004 raw format buffers coming directly from the camera sensor without 6005 any additional conversions or decrease in framerate. 6006 6007 See android.scaler.availableInputOutputFormatsMap for the full set of 6008 performance guarantees. 6009 </notes> 6010 </value> 6011 <value optional="true" id="0x32315659">YV12 6012 <notes>YCrCb 4:2:0 Planar</notes> 6013 </value> 6014 <value optional="true" id="0x11">YCrCb_420_SP 6015 <notes>NV21</notes> 6016 </value> 6017 <value id="0x22">IMPLEMENTATION_DEFINED 6018 <notes>System internal format, not application-accessible</notes> 6019 </value> 6020 <value id="0x23">YCbCr_420_888 6021 <notes>Flexible YUV420 Format</notes> 6022 </value> 6023 <value id="0x21">BLOB 6024 <notes>JPEG format</notes> 6025 </value> 6026 <value id="0x25" hal_version="3.4">RAW10 6027 <notes>RAW10</notes> 6028 </value> 6029 <value id="0x26" hal_version="3.4">RAW12 6030 <notes>RAW12</notes> 6031 </value> 6032 <value id="0x20203859" hal_version="3.4">Y8 6033 <notes>Y8</notes> 6034 </value> 6035 </enum> 6036 <description>The list of image formats that are supported by this 6037 camera device for output streams.</description> 6038 <deprecation_description> 6039 Not used in HALv3 or newer 6040 </deprecation_description> 6041 <details> 6042 All camera devices will support JPEG and YUV_420_888 formats. 6043 6044 When set to YUV_420_888, application can access the YUV420 data directly. 6045 </details> 6046 <hal_details> 6047 These format values are from HAL_PIXEL_FORMAT_* in 6048 system/core/include/system/graphics.h. 6049 6050 When IMPLEMENTATION_DEFINED is used, the platform 6051 gralloc module will select a format based on the usage flags provided 6052 by the camera HAL device and the other endpoint of the stream. It is 6053 usually used by preview and recording streams, where the application doesn't 6054 need access the image data. 6055 6056 YCbCr_420_888 format must be supported by the HAL. When an image stream 6057 needs CPU/application direct access, this format will be used. For a MONOCHROME 6058 camera device, the pixel value of Cb and Cr planes is 128. 6059 6060 The BLOB format must be supported by the HAL. This is used for the JPEG stream. 6061 6062 A RAW_OPAQUE buffer should contain only pixel data. It is strongly 6063 recommended that any information used by the camera device when 6064 processing images is fully expressed by the result metadata 6065 for that image buffer. 6066 </hal_details> 6067 <tag id="BC" /> 6068 </entry> 6069 <entry name="availableJpegMinDurations" type="int64" visibility="hidden" deprecated="true" 6070 container="array"> 6071 <array> 6072 <size>n</size> 6073 </array> 6074 <description>The minimum frame duration that is supported 6075 for each resolution in android.scaler.availableJpegSizes. 6076 </description> 6077 <deprecation_description> 6078 Not used in HALv3 or newer 6079 </deprecation_description> 6080 <units>Nanoseconds</units> 6081 <range>TODO: Remove property.</range> 6082 <details> 6083 This corresponds to the minimum steady-state frame duration when only 6084 that JPEG stream is active and captured in a burst, with all 6085 processing (typically in android.*.mode) set to FAST. 6086 6087 When multiple streams are configured, the minimum 6088 frame duration will be &gt;= max(individual stream min 6089 durations)</details> 6090 <tag id="BC" /> 6091 </entry> 6092 <entry name="availableJpegSizes" type="int32" visibility="hidden" 6093 deprecated="true" container="array" typedef="size"> 6094 <array> 6095 <size>n</size> 6096 <size>2</size> 6097 </array> 6098 <description>The JPEG resolutions that are supported by this camera device.</description> 6099 <deprecation_description> 6100 Not used in HALv3 or newer 6101 </deprecation_description> 6102 <range>TODO: Remove property.</range> 6103 <details> 6104 The resolutions are listed as `(width, height)` pairs. All camera devices will support 6105 sensor maximum resolution (defined by android.sensor.info.activeArraySize). 6106 </details> 6107 <hal_details> 6108 The HAL must include sensor maximum resolution 6109 (defined by android.sensor.info.activeArraySize), 6110 and should include half/quarter of sensor maximum resolution. 6111 </hal_details> 6112 <tag id="BC" /> 6113 </entry> 6114 <entry name="availableMaxDigitalZoom" type="float" visibility="public" 6115 hwlevel="legacy"> 6116 <description>The maximum ratio between both active area width 6117 and crop region width, and active area height and 6118 crop region height, for android.scaler.cropRegion. 6119 </description> 6120 <units>Zoom scale factor</units> 6121 <range>&gt;=1</range> 6122 <details> 6123 This represents the maximum amount of zooming possible by 6124 the camera device, or equivalently, the minimum cropping 6125 window size. 6126 6127 Crop regions that have a width or height that is smaller 6128 than this ratio allows will be rounded up to the minimum 6129 allowed size by the camera device. 6130 </details> 6131 <tag id="BC" /> 6132 </entry> 6133 <entry name="availableProcessedMinDurations" type="int64" visibility="hidden" deprecated="true" 6134 container="array"> 6135 <array> 6136 <size>n</size> 6137 </array> 6138 <description>For each available processed output size (defined in 6139 android.scaler.availableProcessedSizes), this property lists the 6140 minimum supportable frame duration for that size. 6141 </description> 6142 <deprecation_description> 6143 Not used in HALv3 or newer 6144 </deprecation_description> 6145 <units>Nanoseconds</units> 6146 <details> 6147 This should correspond to the frame duration when only that processed 6148 stream is active, with all processing (typically in android.*.mode) 6149 set to FAST. 6150 6151 When multiple streams are configured, the minimum frame duration will 6152 be &gt;= max(individual stream min durations). 6153 </details> 6154 <tag id="BC" /> 6155 </entry> 6156 <entry name="availableProcessedSizes" type="int32" visibility="hidden" 6157 deprecated="true" container="array" typedef="size"> 6158 <array> 6159 <size>n</size> 6160 <size>2</size> 6161 </array> 6162 <description>The resolutions available for use with 6163 processed output streams, such as YV12, NV12, and 6164 platform opaque YUV/RGB streams to the GPU or video 6165 encoders.</description> 6166 <deprecation_description> 6167 Not used in HALv3 or newer 6168 </deprecation_description> 6169 <details> 6170 The resolutions are listed as `(width, height)` pairs. 6171 6172 For a given use case, the actual maximum supported resolution 6173 may be lower than what is listed here, depending on the destination 6174 Surface for the image data. For example, for recording video, 6175 the video encoder chosen may have a maximum size limit (e.g. 1080p) 6176 smaller than what the camera (e.g. maximum resolution is 3264x2448) 6177 can provide. 6178 6179 Please reference the documentation for the image data destination to 6180 check if it limits the maximum size for image data. 6181 </details> 6182 <hal_details> 6183 For FULL capability devices (`android.info.supportedHardwareLevel == FULL`), 6184 the HAL must include all JPEG sizes listed in android.scaler.availableJpegSizes 6185 and each below resolution if it is smaller than or equal to the sensor 6186 maximum resolution (if they are not listed in JPEG sizes already): 6187 6188 * 240p (320 x 240) 6189 * 480p (640 x 480) 6190 * 720p (1280 x 720) 6191 * 1080p (1920 x 1080) 6192 6193 For LIMITED capability devices (`android.info.supportedHardwareLevel == LIMITED`), 6194 the HAL only has to list up to the maximum video size supported by the devices. 6195 </hal_details> 6196 <tag id="BC" /> 6197 </entry> 6198 <entry name="availableRawMinDurations" type="int64" deprecated="true" 6199 container="array"> 6200 <array> 6201 <size>n</size> 6202 </array> 6203 <description> 6204 For each available raw output size (defined in 6205 android.scaler.availableRawSizes), this property lists the minimum 6206 supportable frame duration for that size. 6207 </description> 6208 <deprecation_description> 6209 Not used in HALv3 or newer 6210 </deprecation_description> 6211 <units>Nanoseconds</units> 6212 <details> 6213 Should correspond to the frame duration when only the raw stream is 6214 active. 6215 6216 When multiple streams are configured, the minimum 6217 frame duration will be &gt;= max(individual stream min 6218 durations)</details> 6219 <tag id="BC" /> 6220 </entry> 6221 <entry name="availableRawSizes" type="int32" deprecated="true" 6222 container="array" typedef="size"> 6223 <array> 6224 <size>n</size> 6225 <size>2</size> 6226 </array> 6227 <description>The resolutions available for use with raw 6228 sensor output streams, listed as width, 6229 height</description> 6230 <deprecation_description> 6231 Not used in HALv3 or newer 6232 </deprecation_description> 6233 </entry> 6234 </static> 6235 <dynamic> 6236 <clone entry="android.scaler.cropRegion" kind="controls"> 6237 </clone> 6238 </dynamic> 6239 <static> 6240 <entry name="availableInputOutputFormatsMap" type="int32" visibility="hidden" 6241 typedef="reprocessFormatsMap"> 6242 <description>The mapping of image formats that are supported by this 6243 camera device for input streams, to their corresponding output formats. 6244 </description> 6245 <details> 6246 All camera devices with at least 1 6247 android.request.maxNumInputStreams will have at least one 6248 available input format. 6249 6250 The camera device will support the following map of formats, 6251 if its dependent capability (android.request.availableCapabilities) is supported: 6252 6253 Input Format | Output Format | Capability 6254 :-------------------------------------------------|:--------------------------------------------------|:---------- 6255 {@link android.graphics.ImageFormat#PRIVATE} | {@link android.graphics.ImageFormat#JPEG} | PRIVATE_REPROCESSING 6256 {@link android.graphics.ImageFormat#PRIVATE} | {@link android.graphics.ImageFormat#YUV_420_888} | PRIVATE_REPROCESSING 6257 {@link android.graphics.ImageFormat#YUV_420_888} | {@link android.graphics.ImageFormat#JPEG} | YUV_REPROCESSING 6258 {@link android.graphics.ImageFormat#YUV_420_888} | {@link android.graphics.ImageFormat#YUV_420_888} | YUV_REPROCESSING 6259 6260 PRIVATE refers to a device-internal format that is not directly application-visible. A 6261 PRIVATE input surface can be acquired by {@link android.media.ImageReader#newInstance} 6262 with {@link android.graphics.ImageFormat#PRIVATE} as the format. 6263 6264 For a PRIVATE_REPROCESSING-capable camera device, using the PRIVATE format as either input 6265 or output will never hurt maximum frame rate (i.e. {@link 6266 android.hardware.camera2.params.StreamConfigurationMap#getOutputStallDuration 6267 getOutputStallDuration(ImageFormat.PRIVATE, size)} is always 0), 6268 6269 Attempting to configure an input stream with output streams not 6270 listed as available in this map is not valid. 6271 6272 Additionally, if the camera device is MONOCHROME with Y8 support, it will also support 6273 the following map of formats if its dependent capability 6274 (android.request.availableCapabilities) is supported: 6275 6276 Input Format | Output Format | Capability 6277 :-------------------------------------------------|:--------------------------------------------------|:---------- 6278 {@link android.graphics.ImageFormat#PRIVATE} | {@link android.graphics.ImageFormat#Y8} | PRIVATE_REPROCESSING 6279 {@link android.graphics.ImageFormat#Y8} | {@link android.graphics.ImageFormat#JPEG} | YUV_REPROCESSING 6280 {@link android.graphics.ImageFormat#Y8} | {@link android.graphics.ImageFormat#Y8} | YUV_REPROCESSING 6281 6282 </details> 6283 <hal_details> 6284 For the formats, see `system/core/include/system/graphics.h` for a definition 6285 of the image format enumerations. The PRIVATE format refers to the 6286 HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED format. The HAL could determine 6287 the actual format by using the gralloc usage flags. 6288 For ZSL use case in particular, the HAL could choose appropriate format (partially 6289 processed YUV or RAW based format) by checking the format and GRALLOC_USAGE_HW_CAMERA_ZSL. 6290 See camera3.h for more details. 6291 6292 This value is encoded as a variable-size array-of-arrays. 6293 The inner array always contains `[format, length, ...]` where 6294 `...` has `length` elements. An inner array is followed by another 6295 inner array if the total metadata entry size hasn't yet been exceeded. 6296 6297 A code sample to read/write this encoding (with a device that 6298 supports reprocessing IMPLEMENTATION_DEFINED to YUV_420_888, and JPEG, 6299 and reprocessing YUV_420_888 to YUV_420_888 and JPEG): 6300 6301 // reading 6302 int32_t* contents = &entry.i32[0]; 6303 for (size_t i = 0; i < entry.count; ) { 6304 int32_t format = contents[i++]; 6305 int32_t length = contents[i++]; 6306 int32_t output_formats[length]; 6307 memcpy(&output_formats[0], &contents[i], 6308 length * sizeof(int32_t)); 6309 i += length; 6310 } 6311 6312 // writing (static example, PRIVATE_REPROCESSING + YUV_REPROCESSING) 6313 int32_t[] contents = { 6314 IMPLEMENTATION_DEFINED, 2, YUV_420_888, BLOB, 6315 YUV_420_888, 2, YUV_420_888, BLOB, 6316 }; 6317 update_camera_metadata_entry(metadata, index, &contents[0], 6318 sizeof(contents)/sizeof(contents[0]), &updated_entry); 6319 6320 If the HAL claims to support any of the capabilities listed in the 6321 above details, then it must also support all the input-output 6322 combinations listed for that capability. It can optionally support 6323 additional formats if it so chooses. 6324 </hal_details> 6325 <tag id="REPROC" /> 6326 </entry> 6327 <entry name="availableStreamConfigurations" type="int32" visibility="ndk_public" 6328 enum="true" container="array" typedef="streamConfiguration" hwlevel="legacy"> 6329 <array> 6330 <size>n</size> 6331 <size>4</size> 6332 </array> 6333 <enum> 6334 <value>OUTPUT</value> 6335 <value>INPUT</value> 6336 </enum> 6337 <description>The available stream configurations that this 6338 camera device supports 6339 (i.e. format, width, height, output/input stream). 6340 </description> 6341 <details> 6342 The configurations are listed as `(format, width, height, input?)` 6343 tuples. 6344 6345 For a given use case, the actual maximum supported resolution 6346 may be lower than what is listed here, depending on the destination 6347 Surface for the image data. For example, for recording video, 6348 the video encoder chosen may have a maximum size limit (e.g. 1080p) 6349 smaller than what the camera (e.g. maximum resolution is 3264x2448) 6350 can provide. 6351 6352 Please reference the documentation for the image data destination to 6353 check if it limits the maximum size for image data. 6354 6355 Not all output formats may be supported in a configuration with 6356 an input stream of a particular format. For more details, see 6357 android.scaler.availableInputOutputFormatsMap. 6358 6359 The following table describes the minimum required output stream 6360 configurations based on the hardware level 6361 (android.info.supportedHardwareLevel): 6362 6363 Format | Size | Hardware Level | Notes 6364 :-------------:|:--------------------------------------------:|:--------------:|:--------------: 6365 JPEG | android.sensor.info.activeArraySize | Any | 6366 JPEG | 1920x1080 (1080p) | Any | if 1080p <= activeArraySize 6367 JPEG | 1280x720 (720) | Any | if 720p <= activeArraySize 6368 JPEG | 640x480 (480p) | Any | if 480p <= activeArraySize 6369 JPEG | 320x240 (240p) | Any | if 240p <= activeArraySize 6370 YUV_420_888 | all output sizes available for JPEG | FULL | 6371 YUV_420_888 | all output sizes available for JPEG, up to the maximum video size | LIMITED | 6372 IMPLEMENTATION_DEFINED | same as YUV_420_888 | Any | 6373 6374 Refer to android.request.availableCapabilities for additional 6375 mandatory stream configurations on a per-capability basis. 6376 6377 Exception on 176x144 (QCIF) resolution: camera devices usually have a fixed capability for 6378 downscaling from larger resolution to smaller, and the QCIF resolution sometimes is not 6379 fully supported due to this limitation on devices with high-resolution image sensors. 6380 Therefore, trying to configure a QCIF resolution stream together with any other 6381 stream larger than 1920x1080 resolution (either width or height) might not be supported, 6382 and capture session creation will fail if it is not. 6383 6384 </details> 6385 <hal_details> 6386 It is recommended (but not mandatory) to also include half/quarter 6387 of sensor maximum resolution for JPEG formats (regardless of hardware 6388 level). 6389 6390 (The following is a rewording of the above required table): 6391 6392 For JPEG format, the sizes may be restricted by below conditions: 6393 6394 * The HAL may choose the aspect ratio of each Jpeg size to be one of well known ones 6395 (e.g. 4:3, 16:9, 3:2 etc.). If the sensor maximum resolution 6396 (defined by android.sensor.info.activeArraySize) has an aspect ratio other than these, 6397 it does not have to be included in the supported JPEG sizes. 6398 * Some hardware JPEG encoders may have pixel boundary alignment requirements, such as 6399 the dimensions being a multiple of 16. 6400 6401 Therefore, the maximum JPEG size may be smaller than sensor maximum resolution. 6402 However, the largest JPEG size must be as close as possible to the sensor maximum 6403 resolution given above constraints. It is required that after aspect ratio adjustments, 6404 additional size reduction due to other issues must be less than 3% in area. For example, 6405 if the sensor maximum resolution is 3280x2464, if the maximum JPEG size has aspect 6406 ratio 4:3, the JPEG encoder alignment requirement is 16, the maximum JPEG size will be 6407 3264x2448. 6408 6409 For FULL capability devices (`android.info.supportedHardwareLevel == FULL`), 6410 the HAL must include all YUV_420_888 sizes that have JPEG sizes listed 6411 here as output streams. 6412 6413 It must also include each below resolution if it is smaller than or 6414 equal to the sensor maximum resolution (for both YUV_420_888 and JPEG 6415 formats), as output streams: 6416 6417 * 240p (320 x 240) 6418 * 480p (640 x 480) 6419 * 720p (1280 x 720) 6420 * 1080p (1920 x 1080) 6421 6422 For LIMITED capability devices 6423 (`android.info.supportedHardwareLevel == LIMITED`), 6424 the HAL only has to list up to the maximum video size 6425 supported by the device. 6426 6427 Regardless of hardware level, every output resolution available for 6428 YUV_420_888 must also be available for IMPLEMENTATION_DEFINED. 6429 6430 This supercedes the following fields, which are now deprecated: 6431 6432 * availableFormats 6433 * available[Processed,Raw,Jpeg]Sizes 6434 </hal_details> 6435 </entry> 6436 <entry name="availableMinFrameDurations" type="int64" visibility="ndk_public" 6437 container="array" typedef="streamConfigurationDuration" hwlevel="legacy"> 6438 <array> 6439 <size>4</size> 6440 <size>n</size> 6441 </array> 6442 <description>This lists the minimum frame duration for each 6443 format/size combination. 6444 </description> 6445 <units>(format, width, height, ns) x n</units> 6446 <details> 6447 This should correspond to the frame duration when only that 6448 stream is active, with all processing (typically in android.*.mode) 6449 set to either OFF or FAST. 6450 6451 When multiple streams are used in a request, the minimum frame 6452 duration will be max(individual stream min durations). 6453 6454 The minimum frame duration of a stream (of a particular format, size) 6455 is the same regardless of whether the stream is input or output. 6456 6457 See android.sensor.frameDuration and 6458 android.scaler.availableStallDurations for more details about 6459 calculating the max frame rate. 6460 </details> 6461 <tag id="V1" /> 6462 </entry> 6463 <entry name="availableStallDurations" type="int64" visibility="ndk_public" 6464 container="array" typedef="streamConfigurationDuration" hwlevel="legacy"> 6465 <array> 6466 <size>4</size> 6467 <size>n</size> 6468 </array> 6469 <description>This lists the maximum stall duration for each 6470 output format/size combination. 6471 </description> 6472 <units>(format, width, height, ns) x n</units> 6473 <details> 6474 A stall duration is how much extra time would get added 6475 to the normal minimum frame duration for a repeating request 6476 that has streams with non-zero stall. 6477 6478 For example, consider JPEG captures which have the following 6479 characteristics: 6480 6481 * JPEG streams act like processed YUV streams in requests for which 6482 they are not included; in requests in which they are directly 6483 referenced, they act as JPEG streams. This is because supporting a 6484 JPEG stream requires the underlying YUV data to always be ready for 6485 use by a JPEG encoder, but the encoder will only be used (and impact 6486 frame duration) on requests that actually reference a JPEG stream. 6487 * The JPEG processor can run concurrently to the rest of the camera 6488 pipeline, but cannot process more than 1 capture at a time. 6489 6490 In other words, using a repeating YUV request would result 6491 in a steady frame rate (let's say it's 30 FPS). If a single 6492 JPEG request is submitted periodically, the frame rate will stay 6493 at 30 FPS (as long as we wait for the previous JPEG to return each 6494 time). If we try to submit a repeating YUV + JPEG request, then 6495 the frame rate will drop from 30 FPS. 6496 6497 In general, submitting a new request with a non-0 stall time 6498 stream will _not_ cause a frame rate drop unless there are still 6499 outstanding buffers for that stream from previous requests. 6500 6501 Submitting a repeating request with streams (call this `S`) 6502 is the same as setting the minimum frame duration from 6503 the normal minimum frame duration corresponding to `S`, added with 6504 the maximum stall duration for `S`. 6505 6506 If interleaving requests with and without a stall duration, 6507 a request will stall by the maximum of the remaining times 6508 for each can-stall stream with outstanding buffers. 6509 6510 This means that a stalling request will not have an exposure start 6511 until the stall has completed. 6512 6513 This should correspond to the stall duration when only that stream is 6514 active, with all processing (typically in android.*.mode) set to FAST 6515 or OFF. Setting any of the processing modes to HIGH_QUALITY 6516 effectively results in an indeterminate stall duration for all 6517 streams in a request (the regular stall calculation rules are 6518 ignored). 6519 6520 The following formats may always have a stall duration: 6521 6522 * {@link android.graphics.ImageFormat#JPEG|AIMAGE_FORMAT_JPEG} 6523 * {@link android.graphics.ImageFormat#RAW_SENSOR|AIMAGE_FORMAT_RAW16} 6524 6525 The following formats will never have a stall duration: 6526 6527 * {@link android.graphics.ImageFormat#YUV_420_888|AIMAGE_FORMAT_YUV_420_888} 6528 * {@link android.graphics.ImageFormat#RAW10|AIMAGE_FORMAT_RAW10} 6529 * {@link android.graphics.ImageFormat#RAW12|AIMAGE_FORMAT_RAW12} 6530 * {@link android.graphics.ImageFormat#Y8|AIMAGE_FORMAT_Y8} 6531 6532 All other formats may or may not have an allowed stall duration on 6533 a per-capability basis; refer to android.request.availableCapabilities 6534 for more details. 6535 6536 See android.sensor.frameDuration for more information about 6537 calculating the max frame rate (absent stalls). 6538 </details> 6539 <hal_details> 6540 If possible, it is recommended that all non-JPEG formats 6541 (such as RAW16) should not have a stall duration. RAW10, RAW12, RAW_OPAQUE 6542 and IMPLEMENTATION_DEFINED must not have stall durations. 6543 </hal_details> 6544 <tag id="V1" /> 6545 </entry> 6546 <entry name="streamConfigurationMap" type="int32" visibility="java_public" 6547 synthetic="true" typedef="streamConfigurationMap" 6548 hwlevel="legacy"> 6549 <description>The available stream configurations that this 6550 camera device supports; also includes the minimum frame durations 6551 and the stall durations for each format/size combination. 6552 </description> 6553 <details> 6554 All camera devices will support sensor maximum resolution (defined by 6555 android.sensor.info.activeArraySize) for the JPEG format. 6556 6557 For a given use case, the actual maximum supported resolution 6558 may be lower than what is listed here, depending on the destination 6559 Surface for the image data. For example, for recording video, 6560 the video encoder chosen may have a maximum size limit (e.g. 1080p) 6561 smaller than what the camera (e.g. maximum resolution is 3264x2448) 6562 can provide. 6563 6564 Please reference the documentation for the image data destination to 6565 check if it limits the maximum size for image data. 6566 6567 The following table describes the minimum required output stream 6568 configurations based on the hardware level 6569 (android.info.supportedHardwareLevel): 6570 6571 Format | Size | Hardware Level | Notes 6572 :-------------------------------------------------:|:--------------------------------------------:|:--------------:|:--------------: 6573 {@link android.graphics.ImageFormat#JPEG} | android.sensor.info.activeArraySize (*1) | Any | 6574 {@link android.graphics.ImageFormat#JPEG} | 1920x1080 (1080p) | Any | if 1080p <= activeArraySize 6575 {@link android.graphics.ImageFormat#JPEG} | 1280x720 (720p) | Any | if 720p <= activeArraySize 6576 {@link android.graphics.ImageFormat#JPEG} | 640x480 (480p) | Any | if 480p <= activeArraySize 6577 {@link android.graphics.ImageFormat#JPEG} | 320x240 (240p) | Any | if 240p <= activeArraySize 6578 {@link android.graphics.ImageFormat#YUV_420_888} | all output sizes available for JPEG | FULL | 6579 {@link android.graphics.ImageFormat#YUV_420_888} | all output sizes available for JPEG, up to the maximum video size | LIMITED | 6580 {@link android.graphics.ImageFormat#PRIVATE} | same as YUV_420_888 | Any | 6581 6582 Refer to android.request.availableCapabilities and {@link 6583 android.hardware.camera2.CameraDevice#createCaptureSession} for additional mandatory 6584 stream configurations on a per-capability basis. 6585 6586 *1: For JPEG format, the sizes may be restricted by below conditions: 6587 6588 * The HAL may choose the aspect ratio of each Jpeg size to be one of well known ones 6589 (e.g. 4:3, 16:9, 3:2 etc.). If the sensor maximum resolution 6590 (defined by android.sensor.info.activeArraySize) has an aspect ratio other than these, 6591 it does not have to be included in the supported JPEG sizes. 6592 * Some hardware JPEG encoders may have pixel boundary alignment requirements, such as 6593 the dimensions being a multiple of 16. 6594 Therefore, the maximum JPEG size may be smaller than sensor maximum resolution. 6595 However, the largest JPEG size will be as close as possible to the sensor maximum 6596 resolution given above constraints. It is required that after aspect ratio adjustments, 6597 additional size reduction due to other issues must be less than 3% in area. For example, 6598 if the sensor maximum resolution is 3280x2464, if the maximum JPEG size has aspect 6599 ratio 4:3, and the JPEG encoder alignment requirement is 16, the maximum JPEG size will be 6600 3264x2448. 6601 6602 Exception on 176x144 (QCIF) resolution: camera devices usually have a fixed capability on 6603 downscaling from larger resolution to smaller ones, and the QCIF resolution can sometimes 6604 not be fully supported due to this limitation on devices with high-resolution image 6605 sensors. Therefore, trying to configure a QCIF resolution stream together with any other 6606 stream larger than 1920x1080 resolution (either width or height) might not be supported, 6607 and capture session creation will fail if it is not. 6608 6609 </details> 6610 <hal_details> 6611 Do not set this property directly 6612 (it is synthetic and will not be available at the HAL layer); 6613 set the android.scaler.availableStreamConfigurations instead. 6614 6615 Not all output formats may be supported in a configuration with 6616 an input stream of a particular format. For more details, see 6617 android.scaler.availableInputOutputFormatsMap. 6618 6619 It is recommended (but not mandatory) to also include half/quarter 6620 of sensor maximum resolution for JPEG formats (regardless of hardware 6621 level). 6622 6623 (The following is a rewording of the above required table): 6624 6625 The HAL must include sensor maximum resolution (defined by 6626 android.sensor.info.activeArraySize). 6627 6628 For FULL capability devices (`android.info.supportedHardwareLevel == FULL`), 6629 the HAL must include all YUV_420_888 sizes that have JPEG sizes listed 6630 here as output streams. 6631 6632 It must also include each below resolution if it is smaller than or 6633 equal to the sensor maximum resolution (for both YUV_420_888 and JPEG 6634 formats), as output streams: 6635 6636 * 240p (320 x 240) 6637 * 480p (640 x 480) 6638 * 720p (1280 x 720) 6639 * 1080p (1920 x 1080) 6640 6641 For LIMITED capability devices 6642 (`android.info.supportedHardwareLevel == LIMITED`), 6643 the HAL only has to list up to the maximum video size 6644 supported by the device. 6645 6646 Regardless of hardware level, every output resolution available for 6647 YUV_420_888 must also be available for IMPLEMENTATION_DEFINED. 6648 6649 This supercedes the following fields, which are now deprecated: 6650 6651 * availableFormats 6652 * available[Processed,Raw,Jpeg]Sizes 6653 </hal_details> 6654 </entry> 6655 <entry name="croppingType" type="byte" visibility="public" enum="true" 6656 hwlevel="legacy"> 6657 <enum> 6658 <value>CENTER_ONLY 6659 <notes> 6660 The camera device only supports centered crop regions. 6661 </notes> 6662 </value> 6663 <value>FREEFORM 6664 <notes> 6665 The camera device supports arbitrarily chosen crop regions. 6666 </notes> 6667 </value> 6668 </enum> 6669 <description>The crop type that this camera device supports.</description> 6670 <details> 6671 When passing a non-centered crop region (android.scaler.cropRegion) to a camera 6672 device that only supports CENTER_ONLY cropping, the camera device will move the 6673 crop region to the center of the sensor active array (android.sensor.info.activeArraySize) 6674 and keep the crop region width and height unchanged. The camera device will return the 6675 final used crop region in metadata result android.scaler.cropRegion. 6676 6677 Camera devices that support FREEFORM cropping will support any crop region that 6678 is inside of the active array. The camera device will apply the same crop region and 6679 return the final used crop region in capture result metadata android.scaler.cropRegion. 6680 6681 LEGACY capability devices will only support CENTER_ONLY cropping. 6682 </details> 6683 </entry> 6684 <entry name="availableRecommendedStreamConfigurations" type="int32" visibility="ndk_public" 6685 optional="true" enum="true" container="array" typedef="recommendedStreamConfiguration" 6686 hal_version="3.4"> 6687 <array> 6688 <size>n</size> 6689 <size>5</size> 6690 </array> 6691 <enum> 6692 <value id="0x0">PREVIEW 6693 <notes> 6694 Preview must only include non-stalling processed stream configurations with 6695 output formats like 6696 {@link android.graphics.ImageFormat#YUV_420_888|AIMAGE_FORMAT_YUV_420_888}, 6697 {@link android.graphics.ImageFormat#PRIVATE|AIMAGE_FORMAT_PRIVATE}, etc. 6698 </notes> 6699 </value> 6700 <value id="0x1">RECORD 6701 <notes> 6702 Video record must include stream configurations that match the advertised 6703 supported media profiles {@link android.media.CamcorderProfile} with 6704 IMPLEMENTATION_DEFINED format. 6705 </notes> 6706 </value> 6707 <value id="0x2">VIDEO_SNAPSHOT 6708 <notes> 6709 Video snapshot must include stream configurations at least as big as 6710 the maximum RECORD resolutions and only with 6711 {@link android.graphics.ImageFormat#JPEG|AIMAGE_FORMAT_JPEG JPEG output format}. 6712 Additionally the configurations shouldn't cause preview glitches and also be able to 6713 run at 30 fps. 6714 </notes> 6715 </value> 6716 <value id="0x3">SNAPSHOT 6717 <notes> 6718 Recommended snapshot stream configurations must include at least one with 6719 size close to android.sensor.info.activeArraySize and 6720 {@link android.graphics.ImageFormat#JPEG|AIMAGE_FORMAT_JPEG JPEG output format}. 6721 Taking into account restrictions on aspect ratio, alignment etc. the area of the 6722 maximum suggested size shouldn’t be less than 97% of the sensor array size area. 6723 </notes> 6724 </value> 6725 <value id="0x4">ZSL 6726 <notes> 6727 If supported, recommended input stream configurations must only be advertised with 6728 ZSL along with other processed and/or stalling output formats. 6729 </notes> 6730 </value> 6731 <value id="0x5">RAW 6732 <notes> 6733 If supported, recommended raw stream configurations must only include RAW based 6734 output formats. 6735 </notes> 6736 </value> 6737 <value id="0x6">LOW_LATENCY_SNAPSHOT 6738 <notes> 6739 If supported, the recommended low latency stream configurations must have 6740 end-to-end latency that does not exceed 200 ms. under standard operating conditions 6741 (reasonable light levels, not loaded system) and using template 6742 TEMPLATE_STILL_CAPTURE. This is primarily for listing configurations for the 6743 {@link android.graphics.ImageFormat#JPEG|AIMAGE_FORMAT_JPEG JPEG output format} 6744 however other supported output formats can be added as well. 6745 </notes> 6746 </value> 6747 <value id="0x7">PUBLIC_END 6748 </value> 6749 <value id="0x18">VENDOR_START 6750 <notes> 6751 Vendor defined use cases. These depend on the vendor implementation. 6752 </notes> 6753 </value> 6754 </enum> 6755 <description>Recommended stream configurations for common client use cases. 6756 </description> 6757 <details>Optional subset of the android.scaler.availableStreamConfigurations that contains 6758 similar tuples listed as 6759 (i.e. width, height, format, output/input stream, usecase bit field). 6760 Camera devices will be able to suggest particular stream configurations which are 6761 power and performance efficient for specific use cases. For more information about 6762 retrieving the suggestions see 6763 {@link android.hardware.camera2.CameraCharacteristics#getRecommendedStreamConfigurationMap}. 6764 </details> 6765 <ndk_details> 6766 The data representation is int[5], which maps to 6767 (width, height, format, output/input stream, usecase bit field). The array can be 6768 parsed using the following pseudo code: 6769 6770 struct StreamConfiguration { 6771 int32_t format; 6772 int32_t width; 6773 int32_t height; 6774 int32_t isInput; }; 6775 6776 void getPreferredStreamConfigurations( 6777 int32_t *array, size_t count, int32_t usecaseId, 6778 Vector < StreamConfiguration > * scs) { 6779 const size_t STREAM_CONFIGURATION_SIZE = 5; 6780 const size_t STREAM_WIDTH_OFFSET = 0; 6781 const size_t STREAM_HEIGHT_OFFSET = 1; 6782 const size_t STREAM_FORMAT_OFFSET = 2; 6783 const size_t STREAM_IS_INPUT_OFFSET = 3; 6784 const size_t STREAM_USECASE_BITMAP_OFFSET = 4; 6785 6786 for (size_t i = 0; i < count; i+= STREAM_CONFIGURATION_SIZE) { 6787 int32_t width = array[i + STREAM_WIDTH_OFFSET]; 6788 int32_t height = array[i + STREAM_HEIGHT_OFFSET]; 6789 int32_t format = array[i + STREAM_FORMAT_OFFSET]; 6790 int32_t isInput = array[i + STREAM_IS_INPUT_OFFSET]; 6791 int32_t supportedUsecases = array[i + STREAM_USECASE_BITMAP_OFFSET]; 6792 if (supportedUsecases & (1 << usecaseId)) { 6793 StreamConfiguration sc = {format, width, height, isInput}; 6794 scs->add(sc); 6795 } 6796 } 6797 } 6798 6799 </ndk_details> 6800 <hal_details> 6801 There are some requirements that need to be considered regarding the usecases and the 6802 suggested configurations: 6803 6804 * If android.scaler.availableRecommendedStreamConfigurations is set, then recommended 6805 stream configurations must be present for all mandatory usecases PREVIEW, 6806 SNAPSHOT, RECORD, VIDEO_SNAPSHOT. ZSL and RAW are 6807 required depending on device capabilities see android.request.availableCapabilities. 6808 * Non-existing usecases and non-vendor usecases within the range 6809 (RAW : VENDOR_START] are prohibited as well as stream configurations not 6810 present in the exhaustive android.scaler.availableStreamConfigurations list. 6811 6812 For example, in case the camera device supports only 4K and 1080p and both resolutions are 6813 recommended for the mandatory usecases except preview which can run efficiently only 6814 on 1080p. The array may look like this: 6815 6816 [3840, 2160, HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, 6817 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT, 6818 (1<< ANDROID_SCALER_AVAILABLE_RECOMMENDED_STREAM_CONFIGURATIONS_RECORD | 6819 1<< ANDROID_SCALER_AVAILABLE_RECOMMENDED_STREAM_CONFIGURATIONS_SNAPSHOT | 6820 1<< ANDROID_SCALER_AVAILABLE_RECOMMENDED_STREAM_CONFIGURATIONS_VIDEO_SNAPSHOT), 6821 6822 1920, 1080, HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, 6823 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT, 6824 (1<< ANDROID_SCALER_AVAILABLE_RECOMMENDED_STREAM_CONFIGURATIONS_PREVIEW | 6825 1<< ANDROID_SCALER_AVAILABLE_RECOMMENDED_STREAM_CONFIGURATIONS_RECORD | 6826 1<< ANDROID_SCALER_AVAILABLE_RECOMMENDED_STREAM_CONFIGURATIONS_SNAPSHOT | 6827 1<< ANDROID_SCALER_AVAILABLE_RECOMMENDED_STREAM_CONFIGURATIONS_VIDEO_SNAPSHOT)] 6828 6829 </hal_details> 6830 </entry> 6831 <entry name="availableRecommendedInputOutputFormatsMap" type="int32" visibility="ndk_public" 6832 optional="true" typedef="reprocessFormatsMap" hal_version="3.4"> 6833 <description>Recommended mappings of image formats that are supported by this 6834 camera device for input streams, to their corresponding output formats. 6835 </description> 6836 <details> 6837 This is a recommended subset of the complete list of mappings found in 6838 android.scaler.availableInputOutputFormatsMap. The same requirements apply here as well. 6839 The list however doesn't need to contain all available and supported mappings. Instead of 6840 this developers must list only recommended and efficient entries. 6841 If set, the information will be available in the ZERO_SHUTTER_LAG recommended stream 6842 configuration see 6843 {@link android.hardware.camera2.CameraCharacteristics#getRecommendedStreamConfigurationMap}. 6844 </details> 6845 <hal_details> 6846 For a code sample of the required data encoding please check 6847 android.scaler.availableInputOutputFormatsMap. 6848 </hal_details> 6849 <tag id="REPROC" /> 6850 </entry> 6851 <entry name="mandatoryStreamCombinations" type="int32" visibility="java_public" 6852 synthetic="true" container="array" typedef="mandatoryStreamCombination" hwlevel="limited"> 6853 <array> 6854 <size>n</size> 6855 </array> 6856 <description> 6857 An array of mandatory stream combinations generated according to the camera device 6858 {@link android.hardware.camera2.CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL} 6859 and {@link android.hardware.camera2.CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES}. 6860 This is an app-readable conversion of the mandatory stream combination 6861 {@link android.hardware.camera2.CameraDevice#createCaptureSession tables}. 6862 </description> 6863 <details> 6864 The array of 6865 {@link android.hardware.camera2.params.MandatoryStreamCombination combinations} is 6866 generated according to the documented 6867 {@link android.hardware.camera2.CameraDevice#createCaptureSession guideline} based on 6868 specific device level and capabilities. 6869 Clients can use the array as a quick reference to find an appropriate camera stream 6870 combination. 6871 As per documentation, the stream combinations with given PREVIEW, RECORD and 6872 MAXIMUM resolutions and anything smaller from the list given by 6873 {@link android.hardware.camera2.params.StreamConfigurationMap#getOutputSizes} are 6874 guaranteed to work. 6875 For a physical camera not independently exposed in 6876 {@link android.hardware.camera2.CameraManager#getCameraIdList}, the mandatory stream 6877 combinations for that physical camera Id are also generated, so that the application can 6878 configure them as physical streams via the logical camera. 6879 The mandatory stream combination array will be {@code null} in case the device is not 6880 backward compatible. 6881 </details> 6882 <hal_details> 6883 Do not set this property directly 6884 (it is synthetic and will not be available at the HAL layer). 6885 </hal_details> 6886 </entry> 6887 </static> 6888 </section> 6889 <section name="sensor"> 6890 <controls> 6891 <entry name="exposureTime" type="int64" visibility="public" hwlevel="full"> 6892 <description>Duration each pixel is exposed to 6893 light.</description> 6894 <units>Nanoseconds</units> 6895 <range>android.sensor.info.exposureTimeRange</range> 6896 <details>If the sensor can't expose this exact duration, it will shorten the 6897 duration exposed to the nearest possible value (rather than expose longer). 6898 The final exposure time used will be available in the output capture result. 6899 6900 This control is only effective if android.control.aeMode or android.control.mode is set to 6901 OFF; otherwise the auto-exposure algorithm will override this value. 6902 </details> 6903 <tag id="V1" /> 6904 </entry> 6905 <entry name="frameDuration" type="int64" visibility="public" hwlevel="full"> 6906 <description>Duration from start of frame exposure to 6907 start of next frame exposure.</description> 6908 <units>Nanoseconds</units> 6909 <range>See android.sensor.info.maxFrameDuration, {@link 6910 android.hardware.camera2.params.StreamConfigurationMap|ACAMERA_SCALER_AVAILABLE_MIN_FRAME_DURATIONS}. 6911 The duration is capped to `max(duration, exposureTime + overhead)`.</range> 6912 <details> 6913 The maximum frame rate that can be supported by a camera subsystem is 6914 a function of many factors: 6915 6916 * Requested resolutions of output image streams 6917 * Availability of binning / skipping modes on the imager 6918 * The bandwidth of the imager interface 6919 * The bandwidth of the various ISP processing blocks 6920 6921 Since these factors can vary greatly between different ISPs and 6922 sensors, the camera abstraction tries to represent the bandwidth 6923 restrictions with as simple a model as possible. 6924 6925 The model presented has the following characteristics: 6926 6927 * The image sensor is always configured to output the smallest 6928 resolution possible given the application's requested output stream 6929 sizes. The smallest resolution is defined as being at least as large 6930 as the largest requested output stream size; the camera pipeline must 6931 never digitally upsample sensor data when the crop region covers the 6932 whole sensor. In general, this means that if only small output stream 6933 resolutions are configured, the sensor can provide a higher frame 6934 rate. 6935 * Since any request may use any or all the currently configured 6936 output streams, the sensor and ISP must be configured to support 6937 scaling a single capture to all the streams at the same time. This 6938 means the camera pipeline must be ready to produce the largest 6939 requested output size without any delay. Therefore, the overall 6940 frame rate of a given configured stream set is governed only by the 6941 largest requested stream resolution. 6942 * Using more than one output stream in a request does not affect the 6943 frame duration. 6944 * Certain format-streams may need to do additional background processing 6945 before data is consumed/produced by that stream. These processors 6946 can run concurrently to the rest of the camera pipeline, but 6947 cannot process more than 1 capture at a time. 6948 6949 The necessary information for the application, given the model above, is provided via 6950 {@link 6951 android.hardware.camera2.params.StreamConfigurationMap#getOutputMinFrameDuration|ACAMERA_SCALER_AVAILABLE_MIN_FRAME_DURATIONS}. 6952 These are used to determine the maximum frame rate / minimum frame duration that is 6953 possible for a given stream configuration. 6954 6955 Specifically, the application can use the following rules to 6956 determine the minimum frame duration it can request from the camera 6957 device: 6958 6959 1. Let the set of currently configured input/output streams be called `S`. 6960 1. Find the minimum frame durations for each stream in `S`, by looking it up in {@link 6961 android.hardware.camera2.params.StreamConfigurationMap#getOutputMinFrameDuration|ACAMERA_SCALER_AVAILABLE_MIN_FRAME_DURATIONS} 6962 (with its respective size/format). Let this set of frame durations be called `F`. 6963 1. For any given request `R`, the minimum frame duration allowed for `R` is the maximum 6964 out of all values in `F`. Let the streams used in `R` be called `S_r`. 6965 6966 If none of the streams in `S_r` have a stall time (listed in {@link 6967 android.hardware.camera2.params.StreamConfigurationMap#getOutputStallDuration|ACAMERA_SCALER_AVAILABLE_STALL_DURATIONS} 6968 using its respective size/format), then the frame duration in `F` determines the steady 6969 state frame rate that the application will get if it uses `R` as a repeating request. Let 6970 this special kind of request be called `Rsimple`. 6971 6972 A repeating request `Rsimple` can be _occasionally_ interleaved by a single capture of a 6973 new request `Rstall` (which has at least one in-use stream with a non-0 stall time) and if 6974 `Rstall` has the same minimum frame duration this will not cause a frame rate loss if all 6975 buffers from the previous `Rstall` have already been delivered. 6976 6977 For more details about stalling, see {@link 6978 android.hardware.camera2.params.StreamConfigurationMap#getOutputStallDuration|ACAMERA_SCALER_AVAILABLE_STALL_DURATIONS}. 6979 6980 This control is only effective if android.control.aeMode or android.control.mode is set to 6981 OFF; otherwise the auto-exposure algorithm will override this value. 6982 </details> 6983 <hal_details> 6984 For more details about stalling, see 6985 android.scaler.availableStallDurations. 6986 </hal_details> 6987 <tag id="V1" /> 6988 </entry> 6989 <entry name="sensitivity" type="int32" visibility="public" hwlevel="full"> 6990 <description>The amount of gain applied to sensor data 6991 before processing.</description> 6992 <units>ISO arithmetic units</units> 6993 <range>android.sensor.info.sensitivityRange</range> 6994 <details> 6995 The sensitivity is the standard ISO sensitivity value, 6996 as defined in ISO 12232:2006. 6997 6998 The sensitivity must be within android.sensor.info.sensitivityRange, and 6999 if if it less than android.sensor.maxAnalogSensitivity, the camera device 7000 is guaranteed to use only analog amplification for applying the gain. 7001 7002 If the camera device cannot apply the exact sensitivity 7003 requested, it will reduce the gain to the nearest supported 7004 value. The final sensitivity used will be available in the 7005 output capture result. 7006 7007 This control is only effective if android.control.aeMode or android.control.mode is set to 7008 OFF; otherwise the auto-exposure algorithm will override this value. 7009 </details> 7010 <hal_details>ISO 12232:2006 REI method is acceptable.</hal_details> 7011 <tag id="V1" /> 7012 </entry> 7013 </controls> 7014 <static> 7015 <namespace name="info"> 7016 <entry name="activeArraySize" type="int32" visibility="public" 7017 type_notes="Four ints defining the active pixel rectangle" 7018 container="array" typedef="rectangle" hwlevel="legacy"> 7019 <array> 7020 <size>4</size> 7021 </array> 7022 <description> 7023 The area of the image sensor which corresponds to active pixels after any geometric 7024 distortion correction has been applied. 7025 </description> 7026 <units>Pixel coordinates on the image sensor</units> 7027 <details> 7028 This is the rectangle representing the size of the active region of the sensor (i.e. 7029 the region that actually receives light from the scene) after any geometric correction 7030 has been applied, and should be treated as the maximum size in pixels of any of the 7031 image output formats aside from the raw formats. 7032 7033 This rectangle is defined relative to the full pixel array; (0,0) is the top-left of 7034 the full pixel array, and the size of the full pixel array is given by 7035 android.sensor.info.pixelArraySize. 7036 7037 The coordinate system for most other keys that list pixel coordinates, including 7038 android.scaler.cropRegion, is defined relative to the active array rectangle given in 7039 this field, with `(0, 0)` being the top-left of this rectangle. 7040 7041 The active array may be smaller than the full pixel array, since the full array may 7042 include black calibration pixels or other inactive regions. 7043 7044 For devices that do not support android.distortionCorrection.mode control, the active 7045 array must be the same as android.sensor.info.preCorrectionActiveArraySize. 7046 7047 For devices that support android.distortionCorrection.mode control, the active array must 7048 be enclosed by android.sensor.info.preCorrectionActiveArraySize. The difference between 7049 pre-correction active array and active array accounts for scaling or cropping caused 7050 by lens geometric distortion correction. 7051 7052 In general, application should always refer to active array size for controls like 7053 metering regions or crop region. Two exceptions are when the application is dealing with 7054 RAW image buffers (RAW_SENSOR, RAW10, RAW12 etc), or when application explicitly set 7055 android.distortionCorrection.mode to OFF. In these cases, application should refer 7056 to android.sensor.info.preCorrectionActiveArraySize. 7057 </details> 7058 <ndk_details> 7059 The data representation is `int[4]`, which maps to `(left, top, width, height)`. 7060 </ndk_details> 7061 <hal_details> 7062 This array contains `(xmin, ymin, width, height)`. The `(xmin, ymin)` must be 7063 &gt;= `(0,0)`. 7064 The `(width, height)` must be &lt;= `android.sensor.info.pixelArraySize`. 7065 </hal_details> 7066 <tag id="RAW" /> 7067 </entry> 7068 <entry name="sensitivityRange" type="int32" visibility="public" 7069 type_notes="Range of supported sensitivities" 7070 container="array" typedef="rangeInt" 7071 hwlevel="full"> 7072 <array> 7073 <size>2</size> 7074 </array> 7075 <description>Range of sensitivities for android.sensor.sensitivity supported by this 7076 camera device.</description> 7077 <range>Min <= 100, Max &gt;= 800</range> 7078 <details> 7079 The values are the standard ISO sensitivity values, 7080 as defined in ISO 12232:2006. 7081 </details> 7082 7083 <tag id="BC" /> 7084 <tag id="V1" /> 7085 </entry> 7086 <entry name="colorFilterArrangement" type="byte" visibility="public" enum="true" 7087 hwlevel="full"> 7088 <enum> 7089 <value>RGGB</value> 7090 <value>GRBG</value> 7091 <value>GBRG</value> 7092 <value>BGGR</value> 7093 <value>RGB 7094 <notes>Sensor is not Bayer; output has 3 16-bit 7095 values for each pixel, instead of just 1 16-bit value 7096 per pixel.</notes></value> 7097 <value hal_version="3.4">MONO 7098 <notes>Sensor doesn't have any Bayer color filter. 7099 Such sensor captures visible light in monochrome. The exact weighting and 7100 wavelengths captured is not specified, but generally only includes the visible 7101 frequencies. This value implies a MONOCHROME camera.</notes></value> 7102 <value hal_version="3.4">NIR 7103 <notes>Sensor has a near infrared filter capturing light with wavelength between 7104 roughly 750nm and 1400nm, and the same filter covers the whole sensor array. This 7105 value implies a MONOCHROME camera.</notes></value> 7106 </enum> 7107 <description>The arrangement of color filters on sensor; 7108 represents the colors in the top-left 2x2 section of 7109 the sensor, in reading order, for a Bayer camera, or the 7110 light spectrum it captures for MONOCHROME camera. 7111 </description> 7112 <hal_details> 7113 Starting from Android Q, the colorFilterArrangement for a MONOCHROME camera must be 7114 single color patterns, such as MONO or NIR. 7115 </hal_details> 7116 <tag id="RAW" /> 7117 </entry> 7118 <entry name="exposureTimeRange" type="int64" visibility="public" 7119 type_notes="nanoseconds" container="array" typedef="rangeLong" 7120 hwlevel="full"> 7121 <array> 7122 <size>2</size> 7123 </array> 7124 <description>The range of image exposure times for android.sensor.exposureTime supported 7125 by this camera device. 7126 </description> 7127 <units>Nanoseconds</units> 7128 <range>The minimum exposure time will be less than 100 us. For FULL 7129 capability devices (android.info.supportedHardwareLevel == FULL), 7130 the maximum exposure time will be greater than 100ms.</range> 7131 <hal_details>For FULL capability devices (android.info.supportedHardwareLevel == FULL), 7132 The maximum of the range SHOULD be at least 1 second (1e9), MUST be at least 7133 100ms. 7134 </hal_details> 7135 <tag id="V1" /> 7136 </entry> 7137 <entry name="maxFrameDuration" type="int64" visibility="public" 7138 hwlevel="full"> 7139 <description>The maximum possible frame duration (minimum frame rate) for 7140 android.sensor.frameDuration that is supported this camera device.</description> 7141 <units>Nanoseconds</units> 7142 <range>For FULL capability devices 7143 (android.info.supportedHardwareLevel == FULL), at least 100ms. 7144 </range> 7145 <details>Attempting to use frame durations beyond the maximum will result in the frame 7146 duration being clipped to the maximum. See that control for a full definition of frame 7147 durations. 7148 7149 Refer to {@link 7150 android.hardware.camera2.params.StreamConfigurationMap#getOutputMinFrameDuration|ACAMERA_SCALER_AVAILABLE_MIN_FRAME_DURATIONS} 7151 for the minimum frame duration values. 7152 </details> 7153 <hal_details> 7154 For FULL capability devices (android.info.supportedHardwareLevel == FULL), 7155 The maximum of the range SHOULD be at least 7156 1 second (1e9), MUST be at least 100ms (100e6). 7157 7158 android.sensor.info.maxFrameDuration must be greater or 7159 equal to the android.sensor.info.exposureTimeRange max 7160 value (since exposure time overrides frame duration). 7161 7162 Available minimum frame durations for JPEG must be no greater 7163 than that of the YUV_420_888/IMPLEMENTATION_DEFINED 7164 minimum frame durations (for that respective size). 7165 7166 Since JPEG processing is considered offline and can take longer than 7167 a single uncompressed capture, refer to 7168 android.scaler.availableStallDurations 7169 for details about encoding this scenario. 7170 </hal_details> 7171 <tag id="V1" /> 7172 </entry> 7173 <entry name="physicalSize" type="float" visibility="public" 7174 type_notes="width x height" 7175 container="array" typedef="sizeF" hwlevel="legacy"> 7176 <array> 7177 <size>2</size> 7178 </array> 7179 <description>The physical dimensions of the full pixel 7180 array.</description> 7181 <units>Millimeters</units> 7182 <details>This is the physical size of the sensor pixel 7183 array defined by android.sensor.info.pixelArraySize. 7184 </details> 7185 <hal_details>Needed for FOV calculation for old API</hal_details> 7186 <tag id="V1" /> 7187 <tag id="BC" /> 7188 </entry> 7189 <entry name="pixelArraySize" type="int32" visibility="public" 7190 container="array" typedef="size" hwlevel="legacy"> 7191 <array> 7192 <size>2</size> 7193 </array> 7194 <description>Dimensions of the full pixel array, possibly 7195 including black calibration pixels.</description> 7196 <units>Pixels</units> 7197 <details>The pixel count of the full pixel array of the image sensor, which covers 7198 android.sensor.info.physicalSize area. This represents the full pixel dimensions of 7199 the raw buffers produced by this sensor. 7200 7201 If a camera device supports raw sensor formats, either this or 7202 android.sensor.info.preCorrectionActiveArraySize is the maximum dimensions for the raw 7203 output formats listed in {@link 7204 android.hardware.camera2.params.StreamConfigurationMap|ACAMERA_SCALER_AVAILABLE_STREAM_CONFIGURATIONS} 7205 (this depends on whether or not the image sensor returns buffers containing pixels that 7206 are not part of the active array region for blacklevel calibration or other purposes). 7207 7208 Some parts of the full pixel array may not receive light from the scene, 7209 or be otherwise inactive. The android.sensor.info.preCorrectionActiveArraySize key 7210 defines the rectangle of active pixels that will be included in processed image 7211 formats. 7212 </details> 7213 <tag id="RAW" /> 7214 <tag id="BC" /> 7215 </entry> 7216 <entry name="whiteLevel" type="int32" visibility="public"> 7217 <description> 7218 Maximum raw value output by sensor. 7219 </description> 7220 <range>&gt; 255 (8-bit output)</range> 7221 <details> 7222 This specifies the fully-saturated encoding level for the raw 7223 sample values from the sensor. This is typically caused by the 7224 sensor becoming highly non-linear or clipping. The minimum for 7225 each channel is specified by the offset in the 7226 android.sensor.blackLevelPattern key. 7227 7228 The white level is typically determined either by sensor bit depth 7229 (8-14 bits is expected), or by the point where the sensor response 7230 becomes too non-linear to be useful. The default value for this is 7231 maximum representable value for a 16-bit raw sample (2^16 - 1). 7232 7233 The white level values of captured images may vary for different 7234 capture settings (e.g., android.sensor.sensitivity). This key 7235 represents a coarse approximation for such case. It is recommended 7236 to use android.sensor.dynamicWhiteLevel for captures when supported 7237 by the camera device, which provides more accurate white level values. 7238 </details> 7239 <hal_details> 7240 The full bit depth of the sensor must be available in the raw data, 7241 so the value for linear sensors should not be significantly lower 7242 than maximum raw value supported, i.e. 2^(sensor bits per pixel). 7243 </hal_details> 7244 <tag id="RAW" /> 7245 </entry> 7246 <entry name="timestampSource" type="byte" visibility="public" 7247 enum="true" hwlevel="legacy"> 7248 <enum> 7249 <value>UNKNOWN 7250 <notes> 7251 Timestamps from android.sensor.timestamp are in nanoseconds and monotonic, 7252 but can not be compared to timestamps from other subsystems 7253 (e.g. accelerometer, gyro etc.), or other instances of the same or different 7254 camera devices in the same system. Timestamps between streams and results for 7255 a single camera instance are comparable, and the timestamps for all buffers 7256 and the result metadata generated by a single capture are identical. 7257 </notes> 7258 </value> 7259 <value>REALTIME 7260 <notes> 7261 Timestamps from android.sensor.timestamp are in the same timebase as 7262 {@link android.os.SystemClock#elapsedRealtimeNanos}, 7263 and they can be compared to other timestamps using that base. 7264 </notes> 7265 </value> 7266 </enum> 7267 <description>The time base source for sensor capture start timestamps.</description> 7268 <details> 7269 The timestamps provided for captures are always in nanoseconds and monotonic, but 7270 may not based on a time source that can be compared to other system time sources. 7271 7272 This characteristic defines the source for the timestamps, and therefore whether they 7273 can be compared against other system time sources/timestamps. 7274 </details> 7275 <hal_details> 7276 For camera devices implement UNKNOWN, the camera framework expects that the timestamp 7277 source to be SYSTEM_TIME_MONOTONIC. For camera devices implement REALTIME, the camera 7278 framework expects that the timestamp source to be SYSTEM_TIME_BOOTTIME. See 7279 system/core/include/utils/Timers.h for the definition of SYSTEM_TIME_MONOTONIC and 7280 SYSTEM_TIME_BOOTTIME. Note that HAL must follow above expectation; otherwise video 7281 recording might suffer unexpected behavior. 7282 7283 Also, camera devices which implement REALTIME must pass the ITS sensor fusion test which 7284 tests the alignment between camera timestamps and gyro sensor timestamps. 7285 </hal_details> 7286 <tag id="V1" /> 7287 </entry> 7288 <entry name="lensShadingApplied" type="byte" visibility="public" enum="true" 7289 typedef="boolean"> 7290 <enum> 7291 <value>FALSE</value> 7292 <value>TRUE</value> 7293 </enum> 7294 <description>Whether the RAW images output from this camera device are subject to 7295 lens shading correction.</description> 7296 <details> 7297 If TRUE, all images produced by the camera device in the RAW image formats will 7298 have lens shading correction already applied to it. If FALSE, the images will 7299 not be adjusted for lens shading correction. 7300 See android.request.maxNumOutputRaw for a list of RAW image formats. 7301 7302 This key will be `null` for all devices do not report this information. 7303 Devices with RAW capability will always report this information in this key. 7304 </details> 7305 </entry> 7306 <entry name="preCorrectionActiveArraySize" type="int32" visibility="public" 7307 type_notes="Four ints defining the active pixel rectangle" container="array" 7308 typedef="rectangle" hwlevel="legacy"> 7309 <array> 7310 <size>4</size> 7311 </array> 7312 <description> 7313 The area of the image sensor which corresponds to active pixels prior to the 7314 application of any geometric distortion correction. 7315 </description> 7316 <units>Pixel coordinates on the image sensor</units> 7317 <details> 7318 This is the rectangle representing the size of the active region of the sensor (i.e. 7319 the region that actually receives light from the scene) before any geometric correction 7320 has been applied, and should be treated as the active region rectangle for any of the 7321 raw formats. All metadata associated with raw processing (e.g. the lens shading 7322 correction map, and radial distortion fields) treats the top, left of this rectangle as 7323 the origin, (0,0). 7324 7325 The size of this region determines the maximum field of view and the maximum number of 7326 pixels that an image from this sensor can contain, prior to the application of 7327 geometric distortion correction. The effective maximum pixel dimensions of a 7328 post-distortion-corrected image is given by the android.sensor.info.activeArraySize 7329 field, and the effective maximum field of view for a post-distortion-corrected image 7330 can be calculated by applying the geometric distortion correction fields to this 7331 rectangle, and cropping to the rectangle given in android.sensor.info.activeArraySize. 7332 7333 E.g. to calculate position of a pixel, (x,y), in a processed YUV output image with the 7334 dimensions in android.sensor.info.activeArraySize given the position of a pixel, 7335 (x', y'), in the raw pixel array with dimensions give in 7336 android.sensor.info.pixelArraySize: 7337 7338 1. Choose a pixel (x', y') within the active array region of the raw buffer given in 7339 android.sensor.info.preCorrectionActiveArraySize, otherwise this pixel is considered 7340 to be outside of the FOV, and will not be shown in the processed output image. 7341 1. Apply geometric distortion correction to get the post-distortion pixel coordinate, 7342 (x_i, y_i). When applying geometric correction metadata, note that metadata for raw 7343 buffers is defined relative to the top, left of the 7344 android.sensor.info.preCorrectionActiveArraySize rectangle. 7345 1. If the resulting corrected pixel coordinate is within the region given in 7346 android.sensor.info.activeArraySize, then the position of this pixel in the 7347 processed output image buffer is `(x_i - activeArray.left, y_i - activeArray.top)`, 7348 when the top, left coordinate of that buffer is treated as (0, 0). 7349 7350 Thus, for pixel x',y' = (25, 25) on a sensor where android.sensor.info.pixelArraySize 7351 is (100,100), android.sensor.info.preCorrectionActiveArraySize is (10, 10, 100, 100), 7352 android.sensor.info.activeArraySize is (20, 20, 80, 80), and the geometric distortion 7353 correction doesn't change the pixel coordinate, the resulting pixel selected in 7354 pixel coordinates would be x,y = (25, 25) relative to the top,left of the raw buffer 7355 with dimensions given in android.sensor.info.pixelArraySize, and would be (5, 5) 7356 relative to the top,left of post-processed YUV output buffer with dimensions given in 7357 android.sensor.info.activeArraySize. 7358 7359 The currently supported fields that correct for geometric distortion are: 7360 7361 1. android.lens.distortion. 7362 7363 If the camera device doesn't support geometric distortion correction, or all of the 7364 geometric distortion fields are no-ops, this rectangle will be the same as the 7365 post-distortion-corrected rectangle given in android.sensor.info.activeArraySize. 7366 7367 This rectangle is defined relative to the full pixel array; (0,0) is the top-left of 7368 the full pixel array, and the size of the full pixel array is given by 7369 android.sensor.info.pixelArraySize. 7370 7371 The pre-correction active array may be smaller than the full pixel array, since the 7372 full array may include black calibration pixels or other inactive regions. 7373 </details> 7374 <ndk_details> 7375 The data representation is `int[4]`, which maps to `(left, top, width, height)`. 7376 </ndk_details> 7377 <hal_details> 7378 This array contains `(xmin, ymin, width, height)`. The `(xmin, ymin)` must be 7379 &gt;= `(0,0)`. 7380 The `(width, height)` must be &lt;= `android.sensor.info.pixelArraySize`. 7381 7382 If omitted by the HAL implementation, the camera framework will assume that this is 7383 the same as the post-correction active array region given in 7384 android.sensor.info.activeArraySize. 7385 </hal_details> 7386 <tag id="RAW" /> 7387 </entry> 7388 </namespace> 7389 <entry name="referenceIlluminant1" type="byte" visibility="public" 7390 enum="true" permission_needed="true" > 7391 <enum> 7392 <value id="1">DAYLIGHT</value> 7393 <value id="2">FLUORESCENT</value> 7394 <value id="3">TUNGSTEN 7395 <notes>Incandescent light</notes> 7396 </value> 7397 <value id="4">FLASH</value> 7398 <value id="9">FINE_WEATHER</value> 7399 <value id="10">CLOUDY_WEATHER</value> 7400 <value id="11">SHADE</value> 7401 <value id="12">DAYLIGHT_FLUORESCENT 7402 <notes>D 5700 - 7100K</notes> 7403 </value> 7404 <value id="13">DAY_WHITE_FLUORESCENT 7405 <notes>N 4600 - 5400K</notes> 7406 </value> 7407 <value id="14">COOL_WHITE_FLUORESCENT 7408 <notes>W 3900 - 4500K</notes> 7409 </value> 7410 <value id="15">WHITE_FLUORESCENT 7411 <notes>WW 3200 - 3700K</notes> 7412 </value> 7413 <value id="17">STANDARD_A</value> 7414 <value id="18">STANDARD_B</value> 7415 <value id="19">STANDARD_C</value> 7416 <value id="20">D55</value> 7417 <value id="21">D65</value> 7418 <value id="22">D75</value> 7419 <value id="23">D50</value> 7420 <value id="24">ISO_STUDIO_TUNGSTEN</value> 7421 </enum> 7422 <description> 7423 The standard reference illuminant used as the scene light source when 7424 calculating the android.sensor.colorTransform1, 7425 android.sensor.calibrationTransform1, and 7426 android.sensor.forwardMatrix1 matrices. 7427 </description> 7428 <details> 7429 The values in this key correspond to the values defined for the 7430 EXIF LightSource tag. These illuminants are standard light sources 7431 that are often used calibrating camera devices. 7432 7433 If this key is present, then android.sensor.colorTransform1, 7434 android.sensor.calibrationTransform1, and 7435 android.sensor.forwardMatrix1 will also be present. 7436 7437 Some devices may choose to provide a second set of calibration 7438 information for improved quality, including 7439 android.sensor.referenceIlluminant2 and its corresponding matrices. 7440 7441 Starting from Android Q, this key will not be present for a MONOCHROME camera, even if 7442 the camera device has RAW capability. 7443 </details> 7444 <hal_details> 7445 The first reference illuminant (android.sensor.referenceIlluminant1) 7446 and corresponding matrices must be present to support the RAW capability 7447 and DNG output. 7448 7449 When producing raw images with a color profile that has only been 7450 calibrated against a single light source, it is valid to omit 7451 android.sensor.referenceIlluminant2 along with the 7452 android.sensor.colorTransform2, android.sensor.calibrationTransform2, 7453 and android.sensor.forwardMatrix2 matrices. 7454 7455 If only android.sensor.referenceIlluminant1 is included, it should be 7456 chosen so that it is representative of typical scene lighting. In 7457 general, D50 or DAYLIGHT will be chosen for this case. 7458 7459 If both android.sensor.referenceIlluminant1 and 7460 android.sensor.referenceIlluminant2 are included, they should be 7461 chosen to represent the typical range of scene lighting conditions. 7462 In general, low color temperature illuminant such as Standard-A will 7463 be chosen for the first reference illuminant and a higher color 7464 temperature illuminant such as D65 will be chosen for the second 7465 reference illuminant. 7466 </hal_details> 7467 <tag id="RAW" /> 7468 </entry> 7469 <entry name="referenceIlluminant2" type="byte" visibility="public" 7470 permission_needed="true" > 7471 <description> 7472 The standard reference illuminant used as the scene light source when 7473 calculating the android.sensor.colorTransform2, 7474 android.sensor.calibrationTransform2, and 7475 android.sensor.forwardMatrix2 matrices. 7476 </description> 7477 <range>Any value listed in android.sensor.referenceIlluminant1</range> 7478 <details> 7479 See android.sensor.referenceIlluminant1 for more details. 7480 7481 If this key is present, then android.sensor.colorTransform2, 7482 android.sensor.calibrationTransform2, and 7483 android.sensor.forwardMatrix2 will also be present. 7484 7485 Starting from Android Q, this key will not be present for a MONOCHROME camera, even if 7486 the camera device has RAW capability. 7487 </details> 7488 <tag id="RAW" /> 7489 </entry> 7490 <entry name="calibrationTransform1" type="rational" 7491 visibility="public" optional="true" 7492 type_notes="3x3 matrix in row-major-order" container="array" 7493 typedef="colorSpaceTransform" permission_needed="true" > 7494 <array> 7495 <size>3</size> 7496 <size>3</size> 7497 </array> 7498 <description> 7499 A per-device calibration transform matrix that maps from the 7500 reference sensor colorspace to the actual device sensor colorspace. 7501 </description> 7502 <details> 7503 This matrix is used to correct for per-device variations in the 7504 sensor colorspace, and is used for processing raw buffer data. 7505 7506 The matrix is expressed as a 3x3 matrix in row-major-order, and 7507 contains a per-device calibration transform that maps colors 7508 from reference sensor color space (i.e. the "golden module" 7509 colorspace) into this camera device's native sensor color 7510 space under the first reference illuminant 7511 (android.sensor.referenceIlluminant1). 7512 7513 Starting from Android Q, this key will not be present for a MONOCHROME camera, even if 7514 the camera device has RAW capability. 7515 </details> 7516 <tag id="RAW" /> 7517 </entry> 7518 <entry name="calibrationTransform2" type="rational" 7519 visibility="public" optional="true" 7520 type_notes="3x3 matrix in row-major-order" container="array" 7521 typedef="colorSpaceTransform" permission_needed="true" > 7522 <array> 7523 <size>3</size> 7524 <size>3</size> 7525 </array> 7526 <description> 7527 A per-device calibration transform matrix that maps from the 7528 reference sensor colorspace to the actual device sensor colorspace 7529 (this is the colorspace of the raw buffer data). 7530 </description> 7531 <details> 7532 This matrix is used to correct for per-device variations in the 7533 sensor colorspace, and is used for processing raw buffer data. 7534 7535 The matrix is expressed as a 3x3 matrix in row-major-order, and 7536 contains a per-device calibration transform that maps colors 7537 from reference sensor color space (i.e. the "golden module" 7538 colorspace) into this camera device's native sensor color 7539 space under the second reference illuminant 7540 (android.sensor.referenceIlluminant2). 7541 7542 This matrix will only be present if the second reference 7543 illuminant is present. 7544 7545 Starting from Android Q, this key will not be present for a MONOCHROME camera, even if 7546 the camera device has RAW capability. 7547 </details> 7548 <tag id="RAW" /> 7549 </entry> 7550 <entry name="colorTransform1" type="rational" 7551 visibility="public" optional="true" 7552 type_notes="3x3 matrix in row-major-order" container="array" 7553 typedef="colorSpaceTransform" permission_needed="true" > 7554 <array> 7555 <size>3</size> 7556 <size>3</size> 7557 </array> 7558 <description> 7559 A matrix that transforms color values from CIE XYZ color space to 7560 reference sensor color space. 7561 </description> 7562 <details> 7563 This matrix is used to convert from the standard CIE XYZ color 7564 space to the reference sensor colorspace, and is used when processing 7565 raw buffer data. 7566 7567 The matrix is expressed as a 3x3 matrix in row-major-order, and 7568 contains a color transform matrix that maps colors from the CIE 7569 XYZ color space to the reference sensor color space (i.e. the 7570 "golden module" colorspace) under the first reference illuminant 7571 (android.sensor.referenceIlluminant1). 7572 7573 The white points chosen in both the reference sensor color space 7574 and the CIE XYZ colorspace when calculating this transform will 7575 match the standard white point for the first reference illuminant 7576 (i.e. no chromatic adaptation will be applied by this transform). 7577 7578 Starting from Android Q, this key will not be present for a MONOCHROME camera, even if 7579 the camera device has RAW capability. 7580 </details> 7581 <tag id="RAW" /> 7582 </entry> 7583 <entry name="colorTransform2" type="rational" 7584 visibility="public" optional="true" 7585 type_notes="3x3 matrix in row-major-order" container="array" 7586 typedef="colorSpaceTransform" permission_needed="true" > 7587 <array> 7588 <size>3</size> 7589 <size>3</size> 7590 </array> 7591 <description> 7592 A matrix that transforms color values from CIE XYZ color space to 7593 reference sensor color space. 7594 </description> 7595 <details> 7596 This matrix is used to convert from the standard CIE XYZ color 7597 space to the reference sensor colorspace, and is used when processing 7598 raw buffer data. 7599 7600 The matrix is expressed as a 3x3 matrix in row-major-order, and 7601 contains a color transform matrix that maps colors from the CIE 7602 XYZ color space to the reference sensor color space (i.e. the 7603 "golden module" colorspace) under the second reference illuminant 7604 (android.sensor.referenceIlluminant2). 7605 7606 The white points chosen in both the reference sensor color space 7607 and the CIE XYZ colorspace when calculating this transform will 7608 match the standard white point for the second reference illuminant 7609 (i.e. no chromatic adaptation will be applied by this transform). 7610 7611 This matrix will only be present if the second reference 7612 illuminant is present. 7613 7614 Starting from Android Q, this key will not be present for a MONOCHROME camera, even if 7615 the camera device has RAW capability. 7616 </details> 7617 <tag id="RAW" /> 7618 </entry> 7619 <entry name="forwardMatrix1" type="rational" 7620 visibility="public" optional="true" 7621 type_notes="3x3 matrix in row-major-order" container="array" 7622 typedef="colorSpaceTransform" permission_needed="true" > 7623 <array> 7624 <size>3</size> 7625 <size>3</size> 7626 </array> 7627 <description> 7628 A matrix that transforms white balanced camera colors from the reference 7629 sensor colorspace to the CIE XYZ colorspace with a D50 whitepoint. 7630 </description> 7631 <details> 7632 This matrix is used to convert to the standard CIE XYZ colorspace, and 7633 is used when processing raw buffer data. 7634 7635 This matrix is expressed as a 3x3 matrix in row-major-order, and contains 7636 a color transform matrix that maps white balanced colors from the 7637 reference sensor color space to the CIE XYZ color space with a D50 white 7638 point. 7639 7640 Under the first reference illuminant (android.sensor.referenceIlluminant1) 7641 this matrix is chosen so that the standard white point for this reference 7642 illuminant in the reference sensor colorspace is mapped to D50 in the 7643 CIE XYZ colorspace. 7644 7645 Starting from Android Q, this key will not be present for a MONOCHROME camera, even if 7646 the camera device has RAW capability. 7647 </details> 7648 <tag id="RAW" /> 7649 </entry> 7650 <entry name="forwardMatrix2" type="rational" 7651 visibility="public" optional="true" 7652 type_notes="3x3 matrix in row-major-order" container="array" 7653 typedef="colorSpaceTransform" permission_needed="true" > 7654 <array> 7655 <size>3</size> 7656 <size>3</size> 7657 </array> 7658 <description> 7659 A matrix that transforms white balanced camera colors from the reference 7660 sensor colorspace to the CIE XYZ colorspace with a D50 whitepoint. 7661 </description> 7662 <details> 7663 This matrix is used to convert to the standard CIE XYZ colorspace, and 7664 is used when processing raw buffer data. 7665 7666 This matrix is expressed as a 3x3 matrix in row-major-order, and contains 7667 a color transform matrix that maps white balanced colors from the 7668 reference sensor color space to the CIE XYZ color space with a D50 white 7669 point. 7670 7671 Under the second reference illuminant (android.sensor.referenceIlluminant2) 7672 this matrix is chosen so that the standard white point for this reference 7673 illuminant in the reference sensor colorspace is mapped to D50 in the 7674 CIE XYZ colorspace. 7675 7676 This matrix will only be present if the second reference 7677 illuminant is present. 7678 7679 Starting from Android Q, this key will not be present for a MONOCHROME camera, even if 7680 the camera device has RAW capability. 7681 </details> 7682 <tag id="RAW" /> 7683 </entry> 7684 <entry name="baseGainFactor" type="rational" 7685 optional="true"> 7686 <description>Gain factor from electrons to raw units when 7687 ISO=100</description> 7688 <tag id="FUTURE" /> 7689 </entry> 7690 <entry name="blackLevelPattern" type="int32" visibility="public" 7691 optional="true" type_notes="2x2 raw count block" container="array" 7692 typedef="blackLevelPattern"> 7693 <array> 7694 <size>4</size> 7695 </array> 7696 <description> 7697 A fixed black level offset for each of the color filter arrangement 7698 (CFA) mosaic channels. 7699 </description> 7700 <range>&gt;= 0 for each.</range> 7701 <details> 7702 This key specifies the zero light value for each of the CFA mosaic 7703 channels in the camera sensor. The maximal value output by the 7704 sensor is represented by the value in android.sensor.info.whiteLevel. 7705 7706 The values are given in the same order as channels listed for the CFA 7707 layout key (see android.sensor.info.colorFilterArrangement), i.e. the 7708 nth value given corresponds to the black level offset for the nth 7709 color channel listed in the CFA. 7710 7711 The black level values of captured images may vary for different 7712 capture settings (e.g., android.sensor.sensitivity). This key 7713 represents a coarse approximation for such case. It is recommended to 7714 use android.sensor.dynamicBlackLevel or use pixels from 7715 android.sensor.opticalBlackRegions directly for captures when 7716 supported by the camera device, which provides more accurate black 7717 level values. For raw capture in particular, it is recommended to use 7718 pixels from android.sensor.opticalBlackRegions to calculate black 7719 level values for each frame. 7720 7721 For a MONOCHROME camera device, all of the 2x2 channels must have the same values. 7722 </details> 7723 <hal_details> 7724 The values are given in row-column scan order, with the first value 7725 corresponding to the element of the CFA in row=0, column=0. 7726 </hal_details> 7727 <tag id="RAW" /> 7728 </entry> 7729 <entry name="maxAnalogSensitivity" type="int32" visibility="public" 7730 optional="true" hwlevel="full"> 7731 <description>Maximum sensitivity that is implemented 7732 purely through analog gain.</description> 7733 <details>For android.sensor.sensitivity values less than or 7734 equal to this, all applied gain must be analog. For 7735 values above this, the gain applied can be a mix of analog and 7736 digital.</details> 7737 <tag id="V1" /> 7738 <tag id="FULL" /> 7739 </entry> 7740 <entry name="orientation" type="int32" visibility="public" 7741 hwlevel="legacy"> 7742 <description>Clockwise angle through which the output image needs to be rotated to be 7743 upright on the device screen in its native orientation. 7744 </description> 7745 <units>Degrees of clockwise rotation; always a multiple of 7746 90</units> 7747 <range>0, 90, 180, 270</range> 7748 <details> 7749 Also defines the direction of rolling shutter readout, which is from top to bottom in 7750 the sensor's coordinate system. 7751 </details> 7752 <tag id="BC" /> 7753 </entry> 7754 <entry name="profileHueSatMapDimensions" type="int32" 7755 visibility="system" optional="true" 7756 type_notes="Number of samples for hue, saturation, and value" 7757 container="array"> 7758 <array> 7759 <size>3</size> 7760 </array> 7761 <description> 7762 The number of input samples for each dimension of 7763 android.sensor.profileHueSatMap. 7764 </description> 7765 <range> 7766 Hue &gt;= 1, 7767 Saturation &gt;= 2, 7768 Value &gt;= 1 7769 </range> 7770 <details> 7771 The number of input samples for the hue, saturation, and value 7772 dimension of android.sensor.profileHueSatMap. The order of the 7773 dimensions given is hue, saturation, value; where hue is the 0th 7774 element. 7775 </details> 7776 <tag id="RAW" /> 7777 </entry> 7778 </static> 7779 <dynamic> 7780 <clone entry="android.sensor.exposureTime" kind="controls"> 7781 </clone> 7782 <clone entry="android.sensor.frameDuration" 7783 kind="controls"></clone> 7784 <clone entry="android.sensor.sensitivity" kind="controls"> 7785 </clone> 7786 <entry name="timestamp" type="int64" visibility="public" 7787 hwlevel="legacy"> 7788 <description>Time at start of exposure of first 7789 row of the image sensor active array, in nanoseconds.</description> 7790 <units>Nanoseconds</units> 7791 <range>&gt; 0</range> 7792 <details>The timestamps are also included in all image 7793 buffers produced for the same capture, and will be identical 7794 on all the outputs. 7795 7796 When android.sensor.info.timestampSource `==` UNKNOWN, 7797 the timestamps measure time since an unspecified starting point, 7798 and are monotonically increasing. They can be compared with the 7799 timestamps for other captures from the same camera device, but are 7800 not guaranteed to be comparable to any other time source. 7801 7802 When android.sensor.info.timestampSource `==` REALTIME, the 7803 timestamps measure time in the same timebase as {@link 7804 android.os.SystemClock#elapsedRealtimeNanos}, and they can 7805 be compared to other timestamps from other subsystems that 7806 are using that base. 7807 7808 For reprocessing, the timestamp will match the start of exposure of 7809 the input image, i.e. {@link CaptureResult#SENSOR_TIMESTAMP the 7810 timestamp} in the TotalCaptureResult that was used to create the 7811 reprocess capture request. 7812 </details> 7813 <hal_details> 7814 All timestamps must be in reference to the kernel's 7815 CLOCK_BOOTTIME monotonic clock, which properly accounts for 7816 time spent asleep. This allows for synchronization with 7817 sensors that continue to operate while the system is 7818 otherwise asleep. 7819 7820 If android.sensor.info.timestampSource `==` REALTIME, 7821 The timestamp must be synchronized with the timestamps from other 7822 sensor subsystems that are using the same timebase. 7823 7824 For reprocessing, the input image's start of exposure can be looked up 7825 with android.sensor.timestamp from the metadata included in the 7826 capture request. 7827 </hal_details> 7828 <tag id="BC" /> 7829 </entry> 7830 <entry name="temperature" type="float" 7831 optional="true"> 7832 <description>The temperature of the sensor, sampled at the time 7833 exposure began for this frame. 7834 7835 The thermal diode being queried should be inside the sensor PCB, or 7836 somewhere close to it. 7837 </description> 7838 7839 <units>Celsius</units> 7840 <range>Optional. This value is missing if no temperature is available.</range> 7841 <tag id="FUTURE" /> 7842 </entry> 7843 <entry name="neutralColorPoint" type="rational" visibility="public" 7844 optional="true" container="array"> 7845 <array> 7846 <size>3</size> 7847 </array> 7848 <description> 7849 The estimated camera neutral color in the native sensor colorspace at 7850 the time of capture. 7851 </description> 7852 <details> 7853 This value gives the neutral color point encoded as an RGB value in the 7854 native sensor color space. The neutral color point indicates the 7855 currently estimated white point of the scene illumination. It can be 7856 used to interpolate between the provided color transforms when 7857 processing raw sensor data. 7858 7859 The order of the values is R, G, B; where R is in the lowest index. 7860 7861 Starting from Android Q, this key will not be present for a MONOCHROME camera, even if 7862 the camera device has RAW capability. 7863 </details> 7864 <tag id="RAW" /> 7865 </entry> 7866 <entry name="noiseProfile" type="double" visibility="public" 7867 optional="true" type_notes="Pairs of noise model coefficients" 7868 container="array" typedef="pairDoubleDouble"> 7869 <array> 7870 <size>2</size> 7871 <size>CFA Channels</size> 7872 </array> 7873 <description> 7874 Noise model coefficients for each CFA mosaic channel. 7875 </description> 7876 <details> 7877 This key contains two noise model coefficients for each CFA channel 7878 corresponding to the sensor amplification (S) and sensor readout 7879 noise (O). These are given as pairs of coefficients for each channel 7880 in the same order as channels listed for the CFA layout key 7881 (see android.sensor.info.colorFilterArrangement). This is 7882 represented as an array of Pair&lt;Double, Double&gt;, where 7883 the first member of the Pair at index n is the S coefficient and the 7884 second member is the O coefficient for the nth color channel in the CFA. 7885 7886 These coefficients are used in a two parameter noise model to describe 7887 the amount of noise present in the image for each CFA channel. The 7888 noise model used here is: 7889 7890 N(x) = sqrt(Sx + O) 7891 7892 Where x represents the recorded signal of a CFA channel normalized to 7893 the range [0, 1], and S and O are the noise model coeffiecients for 7894 that channel. 7895 7896 A more detailed description of the noise model can be found in the 7897 Adobe DNG specification for the NoiseProfile tag. 7898 7899 For a MONOCHROME camera, there is only one color channel. So the noise model coefficients 7900 will only contain one S and one O. 7901 7902 </details> 7903 <hal_details> 7904 For a CFA layout of RGGB, the list of coefficients would be given as 7905 an array of doubles S0,O0,S1,O1,..., where S0 and O0 are the coefficients 7906 for the red channel, S1 and O1 are the coefficients for the first green 7907 channel, etc. 7908 </hal_details> 7909 <tag id="RAW" /> 7910 </entry> 7911 <entry name="profileHueSatMap" type="float" 7912 visibility="system" optional="true" 7913 type_notes="Mapping for hue, saturation, and value" 7914 container="array"> 7915 <array> 7916 <size>hue_samples</size> 7917 <size>saturation_samples</size> 7918 <size>value_samples</size> 7919 <size>3</size> 7920 </array> 7921 <description> 7922 A mapping containing a hue shift, saturation scale, and value scale 7923 for each pixel. 7924 </description> 7925 <units> 7926 The hue shift is given in degrees; saturation and value scale factors are 7927 unitless and are between 0 and 1 inclusive 7928 </units> 7929 <details> 7930 hue_samples, saturation_samples, and value_samples are given in 7931 android.sensor.profileHueSatMapDimensions. 7932 7933 Each entry of this map contains three floats corresponding to the 7934 hue shift, saturation scale, and value scale, respectively; where the 7935 hue shift has the lowest index. The map entries are stored in the key 7936 in nested loop order, with the value divisions in the outer loop, the 7937 hue divisions in the middle loop, and the saturation divisions in the 7938 inner loop. All zero input saturation entries are required to have a 7939 value scale factor of 1.0. 7940 </details> 7941 <tag id="RAW" /> 7942 </entry> 7943 <entry name="profileToneCurve" type="float" 7944 visibility="system" optional="true" 7945 type_notes="Samples defining a spline for a tone-mapping curve" 7946 container="array"> 7947 <array> 7948 <size>samples</size> 7949 <size>2</size> 7950 </array> 7951 <description> 7952 A list of x,y samples defining a tone-mapping curve for gamma adjustment. 7953 </description> 7954 <range> 7955 Each sample has an input range of `[0, 1]` and an output range of 7956 `[0, 1]`. The first sample is required to be `(0, 0)`, and the last 7957 sample is required to be `(1, 1)`. 7958 </range> 7959 <details> 7960 This key contains a default tone curve that can be applied while 7961 processing the image as a starting point for user adjustments. 7962 The curve is specified as a list of value pairs in linear gamma. 7963 The curve is interpolated using a cubic spline. 7964 </details> 7965 <tag id="RAW" /> 7966 </entry> 7967 <entry name="greenSplit" type="float" visibility="public" optional="true"> 7968 <description> 7969 The worst-case divergence between Bayer green channels. 7970 </description> 7971 <range> 7972 &gt;= 0 7973 </range> 7974 <details> 7975 This value is an estimate of the worst case split between the 7976 Bayer green channels in the red and blue rows in the sensor color 7977 filter array. 7978 7979 The green split is calculated as follows: 7980 7981 1. A 5x5 pixel (or larger) window W within the active sensor array is 7982 chosen. The term 'pixel' here is taken to mean a group of 4 Bayer 7983 mosaic channels (R, Gr, Gb, B). The location and size of the window 7984 chosen is implementation defined, and should be chosen to provide a 7985 green split estimate that is both representative of the entire image 7986 for this camera sensor, and can be calculated quickly. 7987 1. The arithmetic mean of the green channels from the red 7988 rows (mean_Gr) within W is computed. 7989 1. The arithmetic mean of the green channels from the blue 7990 rows (mean_Gb) within W is computed. 7991 1. The maximum ratio R of the two means is computed as follows: 7992 `R = max((mean_Gr + 1)/(mean_Gb + 1), (mean_Gb + 1)/(mean_Gr + 1))` 7993 7994 The ratio R is the green split divergence reported for this property, 7995 which represents how much the green channels differ in the mosaic 7996 pattern. This value is typically used to determine the treatment of 7997 the green mosaic channels when demosaicing. 7998 7999 The green split value can be roughly interpreted as follows: 8000 8001 * R &lt; 1.03 is a negligible split (&lt;3% divergence). 8002 * 1.20 &lt;= R &gt;= 1.03 will require some software 8003 correction to avoid demosaic errors (3-20% divergence). 8004 * R &gt; 1.20 will require strong software correction to produce 8005 a usuable image (&gt;20% divergence). 8006 8007 Starting from Android Q, this key will not be present for a MONOCHROME camera, even if 8008 the camera device has RAW capability. 8009 </details> 8010 <hal_details> 8011 The green split given may be a static value based on prior 8012 characterization of the camera sensor using the green split 8013 calculation method given here over a large, representative, sample 8014 set of images. Other methods of calculation that produce equivalent 8015 results, and can be interpreted in the same manner, may be used. 8016 </hal_details> 8017 <tag id="RAW" /> 8018 </entry> 8019 </dynamic> 8020 <controls> 8021 <entry name="testPatternData" type="int32" visibility="public" optional="true" container="array"> 8022 <array> 8023 <size>4</size> 8024 </array> 8025 <description> 8026 A pixel `[R, G_even, G_odd, B]` that supplies the test pattern 8027 when android.sensor.testPatternMode is SOLID_COLOR. 8028 </description> 8029 <details> 8030 Each color channel is treated as an unsigned 32-bit integer. 8031 The camera device then uses the most significant X bits 8032 that correspond to how many bits are in its Bayer raw sensor 8033 output. 8034 8035 For example, a sensor with RAW10 Bayer output would use the 8036 10 most significant bits from each color channel. 8037 </details> 8038 <hal_details> 8039 </hal_details> 8040 </entry> 8041 <entry name="testPatternMode" type="int32" visibility="public" optional="true" 8042 enum="true"> 8043 <enum> 8044 <value>OFF 8045 <notes>No test pattern mode is used, and the camera 8046 device returns captures from the image sensor. 8047 8048 This is the default if the key is not set.</notes> 8049 </value> 8050 <value>SOLID_COLOR 8051 <notes> 8052 Each pixel in `[R, G_even, G_odd, B]` is replaced by its 8053 respective color channel provided in 8054 android.sensor.testPatternData. 8055 8056 For example: 8057 8058 android.testPatternData = [0, 0xFFFFFFFF, 0xFFFFFFFF, 0] 8059 8060 All green pixels are 100% green. All red/blue pixels are black. 8061 8062 android.testPatternData = [0xFFFFFFFF, 0, 0xFFFFFFFF, 0] 8063 8064 All red pixels are 100% red. Only the odd green pixels 8065 are 100% green. All blue pixels are 100% black. 8066 </notes> 8067 </value> 8068 <value>COLOR_BARS 8069 <notes> 8070 All pixel data is replaced with an 8-bar color pattern. 8071 8072 The vertical bars (left-to-right) are as follows: 8073 8074 * 100% white 8075 * yellow 8076 * cyan 8077 * green 8078 * magenta 8079 * red 8080 * blue 8081 * black 8082 8083 In general the image would look like the following: 8084 8085 W Y C G M R B K 8086 W Y C G M R B K 8087 W Y C G M R B K 8088 W Y C G M R B K 8089 W Y C G M R B K 8090 . . . . . . . . 8091 . . . . . . . . 8092 . . . . . . . . 8093 8094 (B = Blue, K = Black) 8095 8096 Each bar should take up 1/8 of the sensor pixel array width. 8097 When this is not possible, the bar size should be rounded 8098 down to the nearest integer and the pattern can repeat 8099 on the right side. 8100 8101 Each bar's height must always take up the full sensor 8102 pixel array height. 8103 8104 Each pixel in this test pattern must be set to either 8105 0% intensity or 100% intensity. 8106 </notes> 8107 </value> 8108 <value>COLOR_BARS_FADE_TO_GRAY 8109 <notes> 8110 The test pattern is similar to COLOR_BARS, except that 8111 each bar should start at its specified color at the top, 8112 and fade to gray at the bottom. 8113 8114 Furthermore each bar is further subdivided into a left and 8115 right half. The left half should have a smooth gradient, 8116 and the right half should have a quantized gradient. 8117 8118 In particular, the right half's should consist of blocks of the 8119 same color for 1/16th active sensor pixel array width. 8120 8121 The least significant bits in the quantized gradient should 8122 be copied from the most significant bits of the smooth gradient. 8123 8124 The height of each bar should always be a multiple of 128. 8125 When this is not the case, the pattern should repeat at the bottom 8126 of the image. 8127 </notes> 8128 </value> 8129 <value>PN9 8130 <notes> 8131 All pixel data is replaced by a pseudo-random sequence 8132 generated from a PN9 512-bit sequence (typically implemented 8133 in hardware with a linear feedback shift register). 8134 8135 The generator should be reset at the beginning of each frame, 8136 and thus each subsequent raw frame with this test pattern should 8137 be exactly the same as the last. 8138 </notes> 8139 </value> 8140 <value id="256">CUSTOM1 8141 <notes>The first custom test pattern. All custom patterns that are 8142 available only on this camera device are at least this numeric 8143 value. 8144 8145 All of the custom test patterns will be static 8146 (that is the raw image must not vary from frame to frame). 8147 </notes> 8148 </value> 8149 </enum> 8150 <description>When enabled, the sensor sends a test pattern instead of 8151 doing a real exposure from the camera. 8152 </description> 8153 <range>android.sensor.availableTestPatternModes</range> 8154 <details> 8155 When a test pattern is enabled, all manual sensor controls specified 8156 by android.sensor.* will be ignored. All other controls should 8157 work as normal. 8158 8159 For example, if manual flash is enabled, flash firing should still 8160 occur (and that the test pattern remain unmodified, since the flash 8161 would not actually affect it). 8162 8163 Defaults to OFF. 8164 </details> 8165 <hal_details> 8166 All test patterns are specified in the Bayer domain. 8167 8168 The HAL may choose to substitute test patterns from the sensor 8169 with test patterns from on-device memory. In that case, it should be 8170 indistinguishable to the ISP whether the data came from the 8171 sensor interconnect bus (such as CSI2) or memory. 8172 </hal_details> 8173 </entry> 8174 </controls> 8175 <dynamic> 8176 <clone entry="android.sensor.testPatternData" kind="controls"> 8177 </clone> 8178 <clone entry="android.sensor.testPatternMode" kind="controls"> 8179 </clone> 8180 </dynamic> 8181 <static> 8182 <entry name="availableTestPatternModes" type="int32" visibility="public" optional="true" 8183 type_notes="list of enums" container="array"> 8184 <array> 8185 <size>n</size> 8186 </array> 8187 <description>List of sensor test pattern modes for android.sensor.testPatternMode 8188 supported by this camera device. 8189 </description> 8190 <range>Any value listed in android.sensor.testPatternMode</range> 8191 <details> 8192 Defaults to OFF, and always includes OFF if defined. 8193 </details> 8194 <hal_details> 8195 All custom modes must be >= CUSTOM1. 8196 </hal_details> 8197 </entry> 8198 </static> 8199 <dynamic> 8200 <entry name="rollingShutterSkew" type="int64" visibility="public" hwlevel="limited"> 8201 <description>Duration between the start of first row exposure 8202 and the start of last row exposure.</description> 8203 <units>Nanoseconds</units> 8204 <range> &gt;= 0 and &lt; 8205 {@link android.hardware.camera2.params.StreamConfigurationMap#getOutputMinFrameDuration}.</range> 8206 <details> 8207 This is the exposure time skew between the first and last 8208 row exposure start times. The first row and the last row are 8209 the first and last rows inside of the 8210 android.sensor.info.activeArraySize. 8211 8212 For typical camera sensors that use rolling shutters, this is also equivalent 8213 to the frame readout time. 8214 </details> 8215 <hal_details> 8216 The HAL must report `0` if the sensor is using global shutter, where all pixels begin 8217 exposure at the same time. 8218 </hal_details> 8219 <tag id="V1" /> 8220 </entry> 8221 </dynamic> 8222 <static> 8223 <entry name="opticalBlackRegions" type="int32" visibility="public" optional="true" 8224 container="array" typedef="rectangle"> 8225 <array> 8226 <size>4</size> 8227 <size>num_regions</size> 8228 </array> 8229 <description>List of disjoint rectangles indicating the sensor 8230 optically shielded black pixel regions. 8231 </description> 8232 <details> 8233 In most camera sensors, the active array is surrounded by some 8234 optically shielded pixel areas. By blocking light, these pixels 8235 provides a reliable black reference for black level compensation 8236 in active array region. 8237 8238 This key provides a list of disjoint rectangles specifying the 8239 regions of optically shielded (with metal shield) black pixel 8240 regions if the camera device is capable of reading out these black 8241 pixels in the output raw images. In comparison to the fixed black 8242 level values reported by android.sensor.blackLevelPattern, this key 8243 may provide a more accurate way for the application to calculate 8244 black level of each captured raw images. 8245 8246 When this key is reported, the android.sensor.dynamicBlackLevel and 8247 android.sensor.dynamicWhiteLevel will also be reported. 8248 </details> 8249 <ndk_details> 8250 The data representation is `int[4]`, which maps to `(left, top, width, height)`. 8251 </ndk_details> 8252 <hal_details> 8253 This array contains (xmin, ymin, width, height). The (xmin, ymin) 8254 must be &gt;= (0,0) and &lt;= 8255 android.sensor.info.pixelArraySize. The (width, height) must be 8256 &lt;= android.sensor.info.pixelArraySize. Each region must be 8257 outside the region reported by 8258 android.sensor.info.preCorrectionActiveArraySize. 8259 8260 The HAL must report minimal number of disjoint regions for the 8261 optically shielded back pixel regions. For example, if a region can 8262 be covered by one rectangle, the HAL must not split this region into 8263 multiple rectangles. 8264 </hal_details> 8265 </entry> 8266 </static> 8267 <dynamic> 8268 <entry name="dynamicBlackLevel" type="float" visibility="public" 8269 optional="true" type_notes="2x2 raw count block" container="array"> 8270 <array> 8271 <size>4</size> 8272 </array> 8273 <description> 8274 A per-frame dynamic black level offset for each of the color filter 8275 arrangement (CFA) mosaic channels. 8276 </description> 8277 <range>&gt;= 0 for each.</range> 8278 <details> 8279 Camera sensor black levels may vary dramatically for different 8280 capture settings (e.g. android.sensor.sensitivity). The fixed black 8281 level reported by android.sensor.blackLevelPattern may be too 8282 inaccurate to represent the actual value on a per-frame basis. The 8283 camera device internal pipeline relies on reliable black level values 8284 to process the raw images appropriately. To get the best image 8285 quality, the camera device may choose to estimate the per frame black 8286 level values either based on optically shielded black regions 8287 (android.sensor.opticalBlackRegions) or its internal model. 8288 8289 This key reports the camera device estimated per-frame zero light 8290 value for each of the CFA mosaic channels in the camera sensor. The 8291 android.sensor.blackLevelPattern may only represent a coarse 8292 approximation of the actual black level values. This value is the 8293 black level used in camera device internal image processing pipeline 8294 and generally more accurate than the fixed black level values. 8295 However, since they are estimated values by the camera device, they 8296 may not be as accurate as the black level values calculated from the 8297 optical black pixels reported by android.sensor.opticalBlackRegions. 8298 8299 The values are given in the same order as channels listed for the CFA 8300 layout key (see android.sensor.info.colorFilterArrangement), i.e. the 8301 nth value given corresponds to the black level offset for the nth 8302 color channel listed in the CFA. 8303 8304 For a MONOCHROME camera, all of the 2x2 channels must have the same values. 8305 8306 This key will be available if android.sensor.opticalBlackRegions is available or the 8307 camera device advertises this key via {@link 8308 android.hardware.camera2.CameraCharacteristics#getAvailableCaptureResultKeys|ACAMERA_REQUEST_AVAILABLE_RESULT_KEYS}. 8309 </details> 8310 <hal_details> 8311 The values are given in row-column scan order, with the first value 8312 corresponding to the element of the CFA in row=0, column=0. 8313 </hal_details> 8314 <tag id="RAW" /> 8315 </entry> 8316 <entry name="dynamicWhiteLevel" type="int32" visibility="public" 8317 optional="true" > 8318 <description> 8319 Maximum raw value output by sensor for this frame. 8320 </description> 8321 <range> &gt;= 0</range> 8322 <details> 8323 Since the android.sensor.blackLevelPattern may change for different 8324 capture settings (e.g., android.sensor.sensitivity), the white 8325 level will change accordingly. This key is similar to 8326 android.sensor.info.whiteLevel, but specifies the camera device 8327 estimated white level for each frame. 8328 8329 This key will be available if android.sensor.opticalBlackRegions is 8330 available or the camera device advertises this key via 8331 {@link android.hardware.camera2.CameraCharacteristics#getAvailableCaptureRequestKeys|ACAMERA_REQUEST_AVAILABLE_RESULT_KEYS}. 8332 </details> 8333 <hal_details> 8334 The full bit depth of the sensor must be available in the raw data, 8335 so the value for linear sensors should not be significantly lower 8336 than maximum raw value supported, i.e. 2^(sensor bits per pixel). 8337 </hal_details> 8338 <tag id="RAW" /> 8339 </entry> 8340 </dynamic> 8341 <static> 8342 <entry name="opaqueRawSize" type="int32" visibility="system" container="array"> 8343 <array> 8344 <size>n</size> 8345 <size>3</size> 8346 </array> 8347 <description>Size in bytes for all the listed opaque RAW buffer sizes</description> 8348 <range>Must be large enough to fit the opaque RAW of corresponding size produced by 8349 the camera</range> 8350 <details> 8351 This configurations are listed as `(width, height, size_in_bytes)` tuples. 8352 This is used for sizing the gralloc buffers for opaque RAW buffers. 8353 All RAW_OPAQUE output stream configuration listed in 8354 android.scaler.availableStreamConfigurations will have a corresponding tuple in 8355 this key. 8356 </details> 8357 <hal_details> 8358 This key is added in legacy HAL3.4. 8359 8360 For legacy HAL3.4 or above: devices advertising RAW_OPAQUE format output must list this 8361 key. For legacy HAL3.3 or earlier devices: if RAW_OPAQUE ouput is advertised, camera 8362 framework will derive this key by assuming each pixel takes two bytes and no padding bytes 8363 between rows. 8364 </hal_details> 8365 </entry> 8366 </static> 8367 </section> 8368 <section name="shading"> 8369 <controls> 8370 <entry name="mode" type="byte" visibility="public" enum="true" hwlevel="full"> 8371 <enum> 8372 <value>OFF 8373 <notes>No lens shading correction is applied.</notes></value> 8374 <value>FAST 8375 <notes>Apply lens shading corrections, without slowing 8376 frame rate relative to sensor raw output</notes></value> 8377 <value>HIGH_QUALITY 8378 <notes>Apply high-quality lens shading correction, at the 8379 cost of possibly reduced frame rate.</notes></value> 8380 </enum> 8381 <description>Quality of lens shading correction applied 8382 to the image data.</description> 8383 <range>android.shading.availableModes</range> 8384 <details> 8385 When set to OFF mode, no lens shading correction will be applied by the 8386 camera device, and an identity lens shading map data will be provided 8387 if `android.statistics.lensShadingMapMode == ON`. For example, for lens 8388 shading map with size of `[ 4, 3 ]`, 8389 the output android.statistics.lensShadingCorrectionMap for this case will be an identity 8390 map shown below: 8391 8392 [ 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 8393 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 8394 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 8395 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 8396 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 8397 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0 ] 8398 8399 When set to other modes, lens shading correction will be applied by the camera 8400 device. Applications can request lens shading map data by setting 8401 android.statistics.lensShadingMapMode to ON, and then the camera device will provide lens 8402 shading map data in android.statistics.lensShadingCorrectionMap; the returned shading map 8403 data will be the one applied by the camera device for this capture request. 8404 8405 The shading map data may depend on the auto-exposure (AE) and AWB statistics, therefore 8406 the reliability of the map data may be affected by the AE and AWB algorithms. When AE and 8407 AWB are in AUTO modes(android.control.aeMode `!=` OFF and android.control.awbMode `!=` 8408 OFF), to get best results, it is recommended that the applications wait for the AE and AWB 8409 to be converged before using the returned shading map data. 8410 </details> 8411 </entry> 8412 <entry name="strength" type="byte"> 8413 <description>Control the amount of shading correction 8414 applied to the images</description> 8415 <units>unitless: 1-10; 10 is full shading 8416 compensation</units> 8417 <tag id="FUTURE" /> 8418 </entry> 8419 </controls> 8420 <dynamic> 8421 <clone entry="android.shading.mode" kind="controls"> 8422 </clone> 8423 </dynamic> 8424 <static> 8425 <entry name="availableModes" type="byte" visibility="public" 8426 type_notes="List of enums (android.shading.mode)." container="array" 8427 typedef="enumList" hwlevel="legacy"> 8428 <array> 8429 <size>n</size> 8430 </array> 8431 <description> 8432 List of lens shading modes for android.shading.mode that are supported by this camera device. 8433 </description> 8434 <range>Any value listed in android.shading.mode</range> 8435 <details> 8436 This list contains lens shading modes that can be set for the camera device. 8437 Camera devices that support the MANUAL_POST_PROCESSING capability will always 8438 list OFF and FAST mode. This includes all FULL level devices. 8439 LEGACY devices will always only support FAST mode. 8440 </details> 8441 <hal_details> 8442 HAL must support both FAST and HIGH_QUALITY if lens shading correction control is 8443 available on the camera device, but the underlying implementation can be the same for 8444 both modes. That is, if the highest quality implementation on the camera device does not 8445 slow down capture rate, then FAST and HIGH_QUALITY will generate the same output. 8446 </hal_details> 8447 </entry> 8448 </static> 8449 </section> 8450 <section name="statistics"> 8451 <controls> 8452 <entry name="faceDetectMode" type="byte" visibility="public" enum="true" 8453 hwlevel="legacy"> 8454 <enum> 8455 <value>OFF 8456 <notes>Do not include face detection statistics in capture 8457 results.</notes></value> 8458 <value optional="true">SIMPLE 8459 <notes>Return face rectangle and confidence values only. 8460 </notes></value> 8461 <value optional="true">FULL 8462 <notes>Return all face 8463 metadata. 8464 8465 In this mode, face rectangles, scores, landmarks, and face IDs are all valid. 8466 </notes></value> 8467 </enum> 8468 <description>Operating mode for the face detector 8469 unit.</description> 8470 <range>android.statistics.info.availableFaceDetectModes</range> 8471 <details>Whether face detection is enabled, and whether it 8472 should output just the basic fields or the full set of 8473 fields.</details> 8474 <hal_details> 8475 SIMPLE mode must fill in android.statistics.faceRectangles and 8476 android.statistics.faceScores. 8477 FULL mode must also fill in android.statistics.faceIds, and 8478 android.statistics.faceLandmarks. 8479 </hal_details> 8480 <tag id="BC" /> 8481 </entry> 8482 <entry name="histogramMode" type="byte" enum="true" typedef="boolean"> 8483 <enum> 8484 <value>OFF</value> 8485 <value>ON</value> 8486 </enum> 8487 <description>Operating mode for histogram 8488 generation</description> 8489 <tag id="FUTURE" /> 8490 </entry> 8491 <entry name="sharpnessMapMode" type="byte" enum="true" typedef="boolean"> 8492 <enum> 8493 <value>OFF</value> 8494 <value>ON</value> 8495 </enum> 8496 <description>Operating mode for sharpness map 8497 generation</description> 8498 <tag id="FUTURE" /> 8499 </entry> 8500 <entry name="hotPixelMapMode" type="byte" visibility="public" enum="true" 8501 typedef="boolean"> 8502 <enum> 8503 <value>OFF 8504 <notes>Hot pixel map production is disabled. 8505 </notes></value> 8506 <value>ON 8507 <notes>Hot pixel map production is enabled. 8508 </notes></value> 8509 </enum> 8510 <description> 8511 Operating mode for hot pixel map generation. 8512 </description> 8513 <range>android.statistics.info.availableHotPixelMapModes</range> 8514 <details> 8515 If set to `true`, a hot pixel map is returned in android.statistics.hotPixelMap. 8516 If set to `false`, no hot pixel map will be returned. 8517 </details> 8518 <tag id="V1" /> 8519 <tag id="RAW" /> 8520 </entry> 8521 </controls> 8522 <static> 8523 <namespace name="info"> 8524 <entry name="availableFaceDetectModes" type="byte" 8525 visibility="public" 8526 type_notes="List of enums from android.statistics.faceDetectMode" 8527 container="array" 8528 typedef="enumList" 8529 hwlevel="legacy"> 8530 <array> 8531 <size>n</size> 8532 </array> 8533 <description>List of face detection modes for android.statistics.faceDetectMode that are 8534 supported by this camera device. 8535 </description> 8536 <range>Any value listed in android.statistics.faceDetectMode</range> 8537 <details>OFF is always supported. 8538 </details> 8539 </entry> 8540 <entry name="histogramBucketCount" type="int32"> 8541 <description>Number of histogram buckets 8542 supported</description> 8543 <range>&gt;= 64</range> 8544 <tag id="FUTURE" /> 8545 </entry> 8546 <entry name="maxFaceCount" type="int32" visibility="public" hwlevel="legacy"> 8547 <description>The maximum number of simultaneously detectable 8548 faces.</description> 8549 <range>0 for cameras without available face detection; otherwise: 8550 `>=4` for LIMITED or FULL hwlevel devices or 8551 `>0` for LEGACY devices.</range> 8552 <tag id="BC" /> 8553 </entry> 8554 <entry name="maxHistogramCount" type="int32"> 8555 <description>Maximum value possible for a histogram 8556 bucket</description> 8557 <tag id="FUTURE" /> 8558 </entry> 8559 <entry name="maxSharpnessMapValue" type="int32"> 8560 <description>Maximum value possible for a sharpness map 8561 region.</description> 8562 <tag id="FUTURE" /> 8563 </entry> 8564 <entry name="sharpnessMapSize" type="int32" 8565 type_notes="width x height" container="array" typedef="size"> 8566 <array> 8567 <size>2</size> 8568 </array> 8569 <description>Dimensions of the sharpness 8570 map</description> 8571 <range>Must be at least 32 x 32</range> 8572 <tag id="FUTURE" /> 8573 </entry> 8574 <entry name="availableHotPixelMapModes" type="byte" visibility="public" 8575 type_notes="list of enums" container="array" typedef="boolean"> 8576 <array> 8577 <size>n</size> 8578 </array> 8579 <description> 8580 List of hot pixel map output modes for android.statistics.hotPixelMapMode that are 8581 supported by this camera device. 8582 </description> 8583 <range>Any value listed in android.statistics.hotPixelMapMode</range> 8584 <details> 8585 If no hotpixel map output is available for this camera device, this will contain only 8586 `false`. 8587 8588 ON is always supported on devices with the RAW capability. 8589 </details> 8590 <tag id="V1" /> 8591 <tag id="RAW" /> 8592 </entry> 8593 <entry name="availableLensShadingMapModes" type="byte" visibility="public" 8594 type_notes="list of enums" container="array" typedef="enumList"> 8595 <array> 8596 <size>n</size> 8597 </array> 8598 <description> 8599 List of lens shading map output modes for android.statistics.lensShadingMapMode that 8600 are supported by this camera device. 8601 </description> 8602 <range>Any value listed in android.statistics.lensShadingMapMode</range> 8603 <details> 8604 If no lens shading map output is available for this camera device, this key will 8605 contain only OFF. 8606 8607 ON is always supported on devices with the RAW capability. 8608 LEGACY mode devices will always only support OFF. 8609 </details> 8610 </entry> 8611 <entry name="availableOisDataModes" type="byte" visibility="public" 8612 type_notes="list of enums" container="array" typedef="enumList" hal_version="3.3"> 8613 <array> 8614 <size>n</size> 8615 </array> 8616 <description> 8617 List of OIS data output modes for android.statistics.oisDataMode that 8618 are supported by this camera device. 8619 </description> 8620 <range>Any value listed in android.statistics.oisDataMode</range> 8621 <details> 8622 If no OIS data output is available for this camera device, this key will 8623 contain only OFF. 8624 </details> 8625 </entry> 8626 </namespace> 8627 </static> 8628 <dynamic> 8629 <clone entry="android.statistics.faceDetectMode" 8630 kind="controls"></clone> 8631 <entry name="faceIds" type="int32" visibility="ndk_public" 8632 container="array" hwlevel="legacy"> 8633 <array> 8634 <size>n</size> 8635 </array> 8636 <description>List of unique IDs for detected faces.</description> 8637 <details> 8638 Each detected face is given a unique ID that is valid for as long as the face is visible 8639 to the camera device. A face that leaves the field of view and later returns may be 8640 assigned a new ID. 8641 8642 Only available if android.statistics.faceDetectMode == FULL</details> 8643 <tag id="BC" /> 8644 </entry> 8645 <entry name="faceLandmarks" type="int32" visibility="ndk_public" 8646 type_notes="(leftEyeX, leftEyeY, rightEyeX, rightEyeY, mouthX, mouthY)" 8647 container="array" hwlevel="legacy"> 8648 <array> 8649 <size>n</size> 8650 <size>6</size> 8651 </array> 8652 <description>List of landmarks for detected 8653 faces.</description> 8654 <details> 8655 For devices not supporting android.distortionCorrection.mode control, the coordinate 8656 system always follows that of android.sensor.info.activeArraySize, with `(0, 0)` being 8657 the top-left pixel of the active array. 8658 8659 For devices supporting android.distortionCorrection.mode control, the coordinate 8660 system depends on the mode being set. 8661 When the distortion correction mode is OFF, the coordinate system follows 8662 android.sensor.info.preCorrectionActiveArraySize, with 8663 `(0, 0)` being the top-left pixel of the pre-correction active array. 8664 When the distortion correction mode is not OFF, the coordinate system follows 8665 android.sensor.info.activeArraySize, with 8666 `(0, 0)` being the top-left pixel of the active array. 8667 8668 Only available if android.statistics.faceDetectMode == FULL</details> 8669 <hal_details> 8670 HAL must always report face landmarks in the coordinate system of pre-correction 8671 active array. 8672 </hal_details> 8673 <tag id="BC" /> 8674 </entry> 8675 <entry name="faceRectangles" type="int32" visibility="ndk_public" 8676 type_notes="(xmin, ymin, xmax, ymax). (0,0) is top-left of active pixel area" 8677 container="array" typedef="rectangle" hwlevel="legacy"> 8678 <array> 8679 <size>n</size> 8680 <size>4</size> 8681 </array> 8682 <description>List of the bounding rectangles for detected 8683 faces.</description> 8684 <details> 8685 For devices not supporting android.distortionCorrection.mode control, the coordinate 8686 system always follows that of android.sensor.info.activeArraySize, with `(0, 0)` being 8687 the top-left pixel of the active array. 8688 8689 For devices supporting android.distortionCorrection.mode control, the coordinate 8690 system depends on the mode being set. 8691 When the distortion correction mode is OFF, the coordinate system follows 8692 android.sensor.info.preCorrectionActiveArraySize, with 8693 `(0, 0)` being the top-left pixel of the pre-correction active array. 8694 When the distortion correction mode is not OFF, the coordinate system follows 8695 android.sensor.info.activeArraySize, with 8696 `(0, 0)` being the top-left pixel of the active array. 8697 8698 Only available if android.statistics.faceDetectMode != OFF</details> 8699 <ndk_details> 8700 The data representation is `int[4]`, which maps to `(left, top, right, bottom)`. 8701 </ndk_details> 8702 <hal_details> 8703 HAL must always report face rectangles in the coordinate system of pre-correction 8704 active array. 8705 </hal_details> 8706 <tag id="BC" /> 8707 </entry> 8708 <entry name="faceScores" type="byte" visibility="ndk_public" 8709 container="array" hwlevel="legacy"> 8710 <array> 8711 <size>n</size> 8712 </array> 8713 <description>List of the face confidence scores for 8714 detected faces</description> 8715 <range>1-100</range> 8716 <details>Only available if android.statistics.faceDetectMode != OFF. 8717 </details> 8718 <hal_details> 8719 The value should be meaningful (for example, setting 100 at 8720 all times is illegal).</hal_details> 8721 <tag id="BC" /> 8722 </entry> 8723 <entry name="faces" type="int32" visibility="java_public" synthetic="true" 8724 container="array" typedef="face" hwlevel="legacy"> 8725 <array> 8726 <size>n</size> 8727 </array> 8728 <description>List of the faces detected through camera face detection 8729 in this capture.</description> 8730 <details> 8731 Only available if android.statistics.faceDetectMode `!=` OFF. 8732 </details> 8733 </entry> 8734 <entry name="histogram" type="int32" 8735 type_notes="count of pixels for each color channel that fall into each histogram bucket, scaled to be between 0 and maxHistogramCount" 8736 container="array"> 8737 <array> 8738 <size>n</size> 8739 <size>3</size> 8740 </array> 8741 <description>A 3-channel histogram based on the raw 8742 sensor data</description> 8743 <details>The k'th bucket (0-based) covers the input range 8744 (with w = android.sensor.info.whiteLevel) of [ k * w/N, 8745 (k + 1) * w / N ). If only a monochrome sharpness map is 8746 supported, all channels should have the same data</details> 8747 <tag id="FUTURE" /> 8748 </entry> 8749 <clone entry="android.statistics.histogramMode" 8750 kind="controls"></clone> 8751 <entry name="sharpnessMap" type="int32" 8752 type_notes="estimated sharpness for each region of the input image. Normalized to be between 0 and maxSharpnessMapValue. Higher values mean sharper (better focused)" 8753 container="array"> 8754 <array> 8755 <size>n</size> 8756 <size>m</size> 8757 <size>3</size> 8758 </array> 8759 <description>A 3-channel sharpness map, based on the raw 8760 sensor data</description> 8761 <details>If only a monochrome sharpness map is supported, 8762 all channels should have the same data</details> 8763 <tag id="FUTURE" /> 8764 </entry> 8765 <clone entry="android.statistics.sharpnessMapMode" 8766 kind="controls"></clone> 8767 <entry name="lensShadingCorrectionMap" type="byte" visibility="java_public" 8768 typedef="lensShadingMap" hwlevel="full"> 8769 <description>The shading map is a low-resolution floating-point map 8770 that lists the coefficients used to correct for vignetting, for each 8771 Bayer color channel.</description> 8772 <range>Each gain factor is &gt;= 1</range> 8773 <details> 8774 The map provided here is the same map that is used by the camera device to 8775 correct both color shading and vignetting for output non-RAW images. 8776 8777 When there is no lens shading correction applied to RAW 8778 output images (android.sensor.info.lensShadingApplied `==` 8779 false), this map is the complete lens shading correction 8780 map; when there is some lens shading correction applied to 8781 the RAW output image (android.sensor.info.lensShadingApplied 8782 `==` true), this map reports the remaining lens shading 8783 correction map that needs to be applied to get shading 8784 corrected images that match the camera device's output for 8785 non-RAW formats. 8786 8787 For a complete shading correction map, the least shaded 8788 section of the image will have a gain factor of 1; all 8789 other sections will have gains above 1. 8790 8791 When android.colorCorrection.mode = TRANSFORM_MATRIX, the map 8792 will take into account the colorCorrection settings. 8793 8794 The shading map is for the entire active pixel array, and is not 8795 affected by the crop region specified in the request. Each shading map 8796 entry is the value of the shading compensation map over a specific 8797 pixel on the sensor. Specifically, with a (N x M) resolution shading 8798 map, and an active pixel array size (W x H), shading map entry 8799 (x,y) ϵ (0 ... N-1, 0 ... M-1) is the value of the shading map at 8800 pixel ( ((W-1)/(N-1)) * x, ((H-1)/(M-1)) * y) for the four color channels. 8801 The map is assumed to be bilinearly interpolated between the sample points. 8802 8803 The channel order is [R, Geven, Godd, B], where Geven is the green 8804 channel for the even rows of a Bayer pattern, and Godd is the odd rows. 8805 The shading map is stored in a fully interleaved format. 8806 8807 The shading map will generally have on the order of 30-40 rows and columns, 8808 and will be smaller than 64x64. 8809 8810 As an example, given a very small map defined as: 8811 8812 width,height = [ 4, 3 ] 8813 values = 8814 [ 1.3, 1.2, 1.15, 1.2, 1.2, 1.2, 1.15, 1.2, 8815 1.1, 1.2, 1.2, 1.2, 1.3, 1.2, 1.3, 1.3, 8816 1.2, 1.2, 1.25, 1.1, 1.1, 1.1, 1.1, 1.0, 8817 1.0, 1.0, 1.0, 1.0, 1.2, 1.3, 1.25, 1.2, 8818 1.3, 1.2, 1.2, 1.3, 1.2, 1.15, 1.1, 1.2, 8819 1.2, 1.1, 1.0, 1.2, 1.3, 1.15, 1.2, 1.3 ] 8820 8821 The low-resolution scaling map images for each channel are 8822 (displayed using nearest-neighbor interpolation): 8823 8824 ![Red lens shading map](android.statistics.lensShadingMap/red_shading.png) 8825 ![Green (even rows) lens shading map](android.statistics.lensShadingMap/green_e_shading.png) 8826 ![Green (odd rows) lens shading map](android.statistics.lensShadingMap/green_o_shading.png) 8827 ![Blue lens shading map](android.statistics.lensShadingMap/blue_shading.png) 8828 8829 As a visualization only, inverting the full-color map to recover an 8830 image of a gray wall (using bicubic interpolation for visual quality) as captured by the sensor gives: 8831 8832 ![Image of a uniform white wall (inverse shading map)](android.statistics.lensShadingMap/inv_shading.png) 8833 8834 For a MONOCHROME camera, all of the 2x2 channels must have the same values. An example 8835 shading map for such a camera is defined as: 8836 8837 android.lens.info.shadingMapSize = [ 4, 3 ] 8838 android.statistics.lensShadingMap = 8839 [ 1.3, 1.3, 1.3, 1.3, 1.2, 1.2, 1.2, 1.2, 8840 1.1, 1.1, 1.1, 1.1, 1.3, 1.3, 1.3, 1.3, 8841 1.2, 1.2, 1.2, 1.2, 1.1, 1.1, 1.1, 1.1, 8842 1.0, 1.0, 1.0, 1.0, 1.2, 1.2, 1.2, 1.2, 8843 1.3, 1.3, 1.3, 1.3, 1.2, 1.2, 1.2, 1.2, 8844 1.2, 1.2, 1.2, 1.2, 1.3, 1.3, 1.3, 1.3 ] 8845 8846 </details> 8847 </entry> 8848 <entry name="lensShadingMap" type="float" visibility="ndk_public" 8849 type_notes="2D array of float gain factors per channel to correct lens shading" 8850 container="array" hwlevel="full"> 8851 <array> 8852 <size>4</size> 8853 <size>n</size> 8854 <size>m</size> 8855 </array> 8856 <description>The shading map is a low-resolution floating-point map 8857 that lists the coefficients used to correct for vignetting and color shading, 8858 for each Bayer color channel of RAW image data.</description> 8859 <range>Each gain factor is &gt;= 1</range> 8860 <details> 8861 The map provided here is the same map that is used by the camera device to 8862 correct both color shading and vignetting for output non-RAW images. 8863 8864 When there is no lens shading correction applied to RAW 8865 output images (android.sensor.info.lensShadingApplied `==` 8866 false), this map is the complete lens shading correction 8867 map; when there is some lens shading correction applied to 8868 the RAW output image (android.sensor.info.lensShadingApplied 8869 `==` true), this map reports the remaining lens shading 8870 correction map that needs to be applied to get shading 8871 corrected images that match the camera device's output for 8872 non-RAW formats. 8873 8874 For a complete shading correction map, the least shaded 8875 section of the image will have a gain factor of 1; all 8876 other sections will have gains above 1. 8877 8878 When android.colorCorrection.mode = TRANSFORM_MATRIX, the map 8879 will take into account the colorCorrection settings. 8880 8881 The shading map is for the entire active pixel array, and is not 8882 affected by the crop region specified in the request. Each shading map 8883 entry is the value of the shading compensation map over a specific 8884 pixel on the sensor. Specifically, with a (N x M) resolution shading 8885 map, and an active pixel array size (W x H), shading map entry 8886 (x,y) ϵ (0 ... N-1, 0 ... M-1) is the value of the shading map at 8887 pixel ( ((W-1)/(N-1)) * x, ((H-1)/(M-1)) * y) for the four color channels. 8888 The map is assumed to be bilinearly interpolated between the sample points. 8889 8890 For a Bayer camera, the channel order is [R, Geven, Godd, B], where Geven is 8891 the green channel for the even rows of a Bayer pattern, and Godd is the odd rows. 8892 The shading map is stored in a fully interleaved format, and its size 8893 is provided in the camera static metadata by android.lens.info.shadingMapSize. 8894 8895 The shading map will generally have on the order of 30-40 rows and columns, 8896 and will be smaller than 64x64. 8897 8898 As an example, given a very small map for a Bayer camera defined as: 8899 8900 android.lens.info.shadingMapSize = [ 4, 3 ] 8901 android.statistics.lensShadingMap = 8902 [ 1.3, 1.2, 1.15, 1.2, 1.2, 1.2, 1.15, 1.2, 8903 1.1, 1.2, 1.2, 1.2, 1.3, 1.2, 1.3, 1.3, 8904 1.2, 1.2, 1.25, 1.1, 1.1, 1.1, 1.1, 1.0, 8905 1.0, 1.0, 1.0, 1.0, 1.2, 1.3, 1.25, 1.2, 8906 1.3, 1.2, 1.2, 1.3, 1.2, 1.15, 1.1, 1.2, 8907 1.2, 1.1, 1.0, 1.2, 1.3, 1.15, 1.2, 1.3 ] 8908 8909 The low-resolution scaling map images for each channel are 8910 (displayed using nearest-neighbor interpolation): 8911 8912 ![Red lens shading map](android.statistics.lensShadingMap/red_shading.png) 8913 ![Green (even rows) lens shading map](android.statistics.lensShadingMap/green_e_shading.png) 8914 ![Green (odd rows) lens shading map](android.statistics.lensShadingMap/green_o_shading.png) 8915 ![Blue lens shading map](android.statistics.lensShadingMap/blue_shading.png) 8916 8917 As a visualization only, inverting the full-color map to recover an 8918 image of a gray wall (using bicubic interpolation for visual quality) 8919 as captured by the sensor gives: 8920 8921 ![Image of a uniform white wall (inverse shading map)](android.statistics.lensShadingMap/inv_shading.png) 8922 8923 For a MONOCHROME camera, all of the 2x2 channels must have the same values. An example 8924 shading map for such a camera is defined as: 8925 8926 android.lens.info.shadingMapSize = [ 4, 3 ] 8927 android.statistics.lensShadingMap = 8928 [ 1.3, 1.3, 1.3, 1.3, 1.2, 1.2, 1.2, 1.2, 8929 1.1, 1.1, 1.1, 1.1, 1.3, 1.3, 1.3, 1.3, 8930 1.2, 1.2, 1.2, 1.2, 1.1, 1.1, 1.1, 1.1, 8931 1.0, 1.0, 1.0, 1.0, 1.2, 1.2, 1.2, 1.2, 8932 1.3, 1.3, 1.3, 1.3, 1.2, 1.2, 1.2, 1.2, 8933 1.2, 1.2, 1.2, 1.2, 1.3, 1.3, 1.3, 1.3 ] 8934 8935 Note that the RAW image data might be subject to lens shading 8936 correction not reported on this map. Query 8937 android.sensor.info.lensShadingApplied to see if RAW image data has subject 8938 to lens shading correction. If android.sensor.info.lensShadingApplied 8939 is TRUE, the RAW image data is subject to partial or full lens shading 8940 correction. In the case full lens shading correction is applied to RAW 8941 images, the gain factor map reported in this key will contain all 1.0 gains. 8942 In other words, the map reported in this key is the remaining lens shading 8943 that needs to be applied on the RAW image to get images without lens shading 8944 artifacts. See android.request.maxNumOutputRaw for a list of RAW image 8945 formats. 8946 </details> 8947 <hal_details> 8948 The lens shading map calculation may depend on exposure and white balance statistics. 8949 When AE and AWB are in AUTO modes 8950 (android.control.aeMode `!=` OFF and android.control.awbMode `!=` OFF), the HAL 8951 may have all the information it need to generate most accurate lens shading map. When 8952 AE or AWB are in manual mode 8953 (android.control.aeMode `==` OFF or android.control.awbMode `==` OFF), the shading map 8954 may be adversely impacted by manual exposure or white balance parameters. To avoid 8955 generating unreliable shading map data, the HAL may choose to lock the shading map with 8956 the latest known good map generated when the AE and AWB are in AUTO modes. 8957 </hal_details> 8958 </entry> 8959 <entry name="predictedColorGains" type="float" 8960 visibility="hidden" 8961 deprecated="true" 8962 optional="true" 8963 type_notes="A 1D array of floats for 4 color channel gains" 8964 container="array"> 8965 <array> 8966 <size>4</size> 8967 </array> 8968 <description>The best-fit color channel gains calculated 8969 by the camera device's statistics units for the current output frame. 8970 </description> 8971 <deprecation_description> 8972 Never fully implemented or specified; do not use 8973 </deprecation_description> 8974 <details> 8975 This may be different than the gains used for this frame, 8976 since statistics processing on data from a new frame 8977 typically completes after the transform has already been 8978 applied to that frame. 8979 8980 The 4 channel gains are defined in Bayer domain, 8981 see android.colorCorrection.gains for details. 8982 8983 This value should always be calculated by the auto-white balance (AWB) block, 8984 regardless of the android.control.* current values. 8985 </details> 8986 </entry> 8987 <entry name="predictedColorTransform" type="rational" 8988 visibility="hidden" 8989 deprecated="true" 8990 optional="true" 8991 type_notes="3x3 rational matrix in row-major order" 8992 container="array"> 8993 <array> 8994 <size>3</size> 8995 <size>3</size> 8996 </array> 8997 <description>The best-fit color transform matrix estimate 8998 calculated by the camera device's statistics units for the current 8999 output frame.</description> 9000 <deprecation_description> 9001 Never fully implemented or specified; do not use 9002 </deprecation_description> 9003 <details>The camera device will provide the estimate from its 9004 statistics unit on the white balance transforms to use 9005 for the next frame. These are the values the camera device believes 9006 are the best fit for the current output frame. This may 9007 be different than the transform used for this frame, since 9008 statistics processing on data from a new frame typically 9009 completes after the transform has already been applied to 9010 that frame. 9011 9012 These estimates must be provided for all frames, even if 9013 capture settings and color transforms are set by the application. 9014 9015 This value should always be calculated by the auto-white balance (AWB) block, 9016 regardless of the android.control.* current values. 9017 </details> 9018 </entry> 9019 <entry name="sceneFlicker" type="byte" visibility="public" enum="true" 9020 hwlevel="full"> 9021 <enum> 9022 <value>NONE 9023 <notes>The camera device does not detect any flickering illumination 9024 in the current scene.</notes></value> 9025 <value>50HZ 9026 <notes>The camera device detects illumination flickering at 50Hz 9027 in the current scene.</notes></value> 9028 <value>60HZ 9029 <notes>The camera device detects illumination flickering at 60Hz 9030 in the current scene.</notes></value> 9031 </enum> 9032 <description>The camera device estimated scene illumination lighting 9033 frequency.</description> 9034 <details> 9035 Many light sources, such as most fluorescent lights, flicker at a rate 9036 that depends on the local utility power standards. This flicker must be 9037 accounted for by auto-exposure routines to avoid artifacts in captured images. 9038 The camera device uses this entry to tell the application what the scene 9039 illuminant frequency is. 9040 9041 When manual exposure control is enabled 9042 (`android.control.aeMode == OFF` or `android.control.mode == 9043 OFF`), the android.control.aeAntibandingMode doesn't perform 9044 antibanding, and the application can ensure it selects 9045 exposure times that do not cause banding issues by looking 9046 into this metadata field. See 9047 android.control.aeAntibandingMode for more details. 9048 9049 Reports NONE if there doesn't appear to be flickering illumination. 9050 </details> 9051 </entry> 9052 <clone entry="android.statistics.hotPixelMapMode" kind="controls"> 9053 </clone> 9054 <entry name="hotPixelMap" type="int32" visibility="public" 9055 type_notes="list of coordinates based on android.sensor.pixelArraySize" 9056 container="array" typedef="point"> 9057 <array> 9058 <size>2</size> 9059 <size>n</size> 9060 </array> 9061 <description> 9062 List of `(x, y)` coordinates of hot/defective pixels on the sensor. 9063 </description> 9064 <range> 9065 n <= number of pixels on the sensor. 9066 The `(x, y)` coordinates must be bounded by 9067 android.sensor.info.pixelArraySize. 9068 </range> 9069 <details> 9070 A coordinate `(x, y)` must lie between `(0, 0)`, and 9071 `(width - 1, height - 1)` (inclusive), which are the top-left and 9072 bottom-right of the pixel array, respectively. The width and 9073 height dimensions are given in android.sensor.info.pixelArraySize. 9074 This may include hot pixels that lie outside of the active array 9075 bounds given by android.sensor.info.activeArraySize. 9076 </details> 9077 <hal_details> 9078 A hotpixel map contains the coordinates of pixels on the camera 9079 sensor that do report valid values (usually due to defects in 9080 the camera sensor). This includes pixels that are stuck at certain 9081 values, or have a response that does not accuractly encode the 9082 incoming light from the scene. 9083 9084 To avoid performance issues, there should be significantly fewer hot 9085 pixels than actual pixels on the camera sensor. 9086 </hal_details> 9087 <tag id="V1" /> 9088 <tag id="RAW" /> 9089 </entry> 9090 </dynamic> 9091 <controls> 9092 <entry name="lensShadingMapMode" type="byte" visibility="public" enum="true" hwlevel="full"> 9093 <enum> 9094 <value>OFF 9095 <notes>Do not include a lens shading map in the capture result.</notes></value> 9096 <value>ON 9097 <notes>Include a lens shading map in the capture result.</notes></value> 9098 </enum> 9099 <description>Whether the camera device will output the lens 9100 shading map in output result metadata.</description> 9101 <range>android.statistics.info.availableLensShadingMapModes</range> 9102 <details>When set to ON, 9103 android.statistics.lensShadingMap will be provided in 9104 the output result metadata. 9105 9106 ON is always supported on devices with the RAW capability. 9107 </details> 9108 <tag id="RAW" /> 9109 </entry> 9110 </controls> 9111 <dynamic> 9112 <clone entry="android.statistics.lensShadingMapMode" kind="controls"> 9113 </clone> 9114 </dynamic> 9115 <controls> 9116 <entry name="oisDataMode" type="byte" visibility="public" enum="true" hal_version="3.3"> 9117 <enum> 9118 <value>OFF 9119 <notes>Do not include OIS data in the capture result.</notes></value> 9120 <value>ON 9121 <notes>Include OIS data in the capture result.</notes> 9122 <sdk_notes>android.statistics.oisSamples provides OIS sample data in the 9123 output result metadata. 9124 </sdk_notes> 9125 <ndk_notes>android.statistics.oisTimestamps, android.statistics.oisXShifts, 9126 and android.statistics.oisYShifts provide OIS data in the output result metadata. 9127 </ndk_notes> 9128 </value> 9129 </enum> 9130 <description>A control for selecting whether optical stabilization (OIS) position 9131 information is included in output result metadata.</description> 9132 <range>android.statistics.info.availableOisDataModes</range> 9133 <details> 9134 Since optical image stabilization generally involves motion much faster than the duration 9135 of individualq image exposure, multiple OIS samples can be included for a single capture 9136 result. For example, if the OIS reporting operates at 200 Hz, a typical camera operating 9137 at 30fps may have 6-7 OIS samples per capture result. This information can be combined 9138 with the rolling shutter skew to account for lens motion during image exposure in 9139 post-processing algorithms. 9140 </details> 9141 </entry> 9142 </controls> 9143 <dynamic> 9144 <clone entry="android.statistics.oisDataMode" kind="controls"> 9145 </clone> 9146 <entry name="oisTimestamps" type="int64" visibility="ndk_public" container="array" hal_version="3.3"> 9147 <array> 9148 <size>n</size> 9149 </array> 9150 <description> 9151 An array of timestamps of OIS samples, in nanoseconds. 9152 </description> 9153 <units>nanoseconds</units> 9154 <details> 9155 The array contains the timestamps of OIS samples. The timestamps are in the same 9156 timebase as and comparable to android.sensor.timestamp. 9157 </details> 9158 </entry> 9159 <entry name="oisXShifts" type="float" visibility="ndk_public" container="array" hal_version="3.3"> 9160 <array> 9161 <size>n</size> 9162 </array> 9163 <description> 9164 An array of shifts of OIS samples, in x direction. 9165 </description> 9166 <units>Pixels in active array.</units> 9167 <details> 9168 The array contains the amount of shifts in x direction, in pixels, based on OIS samples. 9169 A positive value is a shift from left to right in the pre-correction active array 9170 coordinate system. For example, if the optical center is (1000, 500) in pre-correction 9171 active array coordinates, a shift of (3, 0) puts the new optical center at (1003, 500). 9172 9173 The number of shifts must match the number of timestamps in 9174 android.statistics.oisTimestamps. 9175 9176 The OIS samples are not affected by whether lens distortion correction is enabled (on 9177 supporting devices). They are always reported in pre-correction active array coordinates, 9178 since the scaling of OIS shifts would depend on the specific spot on the sensor the shift 9179 is needed. 9180 </details> 9181 </entry> 9182 <entry name="oisYShifts" type="float" visibility="ndk_public" container="array" hal_version="3.3"> 9183 <array> 9184 <size>n</size> 9185 </array> 9186 <description> 9187 An array of shifts of OIS samples, in y direction. 9188 </description> 9189 <units>Pixels in active array.</units> 9190 <details> 9191 The array contains the amount of shifts in y direction, in pixels, based on OIS samples. 9192 A positive value is a shift from top to bottom in pre-correction active array coordinate 9193 system. For example, if the optical center is (1000, 500) in active array coordinates, a 9194 shift of (0, 5) puts the new optical center at (1000, 505). 9195 9196 The number of shifts must match the number of timestamps in 9197 android.statistics.oisTimestamps. 9198 9199 The OIS samples are not affected by whether lens distortion correction is enabled (on 9200 supporting devices). They are always reported in pre-correction active array coordinates, 9201 since the scaling of OIS shifts would depend on the specific spot on the sensor the shift 9202 is needed. 9203 </details> 9204 </entry> 9205 <entry name="oisSamples" type="float" visibility="java_public" synthetic="true" 9206 container="array" typedef="oisSample" hal_version="3.3"> 9207 <array> 9208 <size>n</size> 9209 </array> 9210 <description> 9211 An array of optical stabilization (OIS) position samples. 9212 </description> 9213 <details> 9214 Each OIS sample contains the timestamp and the amount of shifts in x and y direction, 9215 in pixels, of the OIS sample. 9216 9217 A positive value for a shift in x direction is a shift from left to right in the 9218 pre-correction active array coordinate system. For example, if the optical center is 9219 (1000, 500) in pre-correction active array coordinates, a shift of (3, 0) puts the new 9220 optical center at (1003, 500). 9221 9222 A positive value for a shift in y direction is a shift from top to bottom in 9223 pre-correction active array coordinate system. For example, if the optical center is 9224 (1000, 500) in active array coordinates, a shift of (0, 5) puts the new optical center at 9225 (1000, 505). 9226 9227 The OIS samples are not affected by whether lens distortion correction is enabled (on 9228 supporting devices). They are always reported in pre-correction active array coordinates, 9229 since the scaling of OIS shifts would depend on the specific spot on the sensor the shift 9230 is needed. 9231 </details> 9232 </entry> 9233 </dynamic> 9234 </section> 9235 <section name="tonemap"> 9236 <controls> 9237 <entry name="curveBlue" type="float" visibility="ndk_public" 9238 type_notes="1D array of float pairs (P_IN, P_OUT). The maximum number of pairs is specified by android.tonemap.maxCurvePoints." 9239 container="array" hwlevel="full"> 9240 <array> 9241 <size>n</size> 9242 <size>2</size> 9243 </array> 9244 <description>Tonemapping / contrast / gamma curve for the blue 9245 channel, to use when android.tonemap.mode is 9246 CONTRAST_CURVE.</description> 9247 <details>See android.tonemap.curveRed for more details.</details> 9248 </entry> 9249 <entry name="curveGreen" type="float" visibility="ndk_public" 9250 type_notes="1D array of float pairs (P_IN, P_OUT). The maximum number of pairs is specified by android.tonemap.maxCurvePoints." 9251 container="array" hwlevel="full"> 9252 <array> 9253 <size>n</size> 9254 <size>2</size> 9255 </array> 9256 <description>Tonemapping / contrast / gamma curve for the green 9257 channel, to use when android.tonemap.mode is 9258 CONTRAST_CURVE.</description> 9259 <details>See android.tonemap.curveRed for more details.</details> 9260 </entry> 9261 <entry name="curveRed" type="float" visibility="ndk_public" 9262 type_notes="1D array of float pairs (P_IN, P_OUT). The maximum number of pairs is specified by android.tonemap.maxCurvePoints." 9263 container="array" hwlevel="full"> 9264 <array> 9265 <size>n</size> 9266 <size>2</size> 9267 </array> 9268 <description>Tonemapping / contrast / gamma curve for the red 9269 channel, to use when android.tonemap.mode is 9270 CONTRAST_CURVE.</description> 9271 <range>0-1 on both input and output coordinates, normalized 9272 as a floating-point value such that 0 == black and 1 == white. 9273 </range> 9274 <details> 9275 Each channel's curve is defined by an array of control points: 9276 9277 android.tonemap.curveRed = 9278 [ P0in, P0out, P1in, P1out, P2in, P2out, P3in, P3out, ..., PNin, PNout ] 9279 2 <= N <= android.tonemap.maxCurvePoints 9280 9281 These are sorted in order of increasing `Pin`; it is 9282 required that input values 0.0 and 1.0 are included in the list to 9283 define a complete mapping. For input values between control points, 9284 the camera device must linearly interpolate between the control 9285 points. 9286 9287 Each curve can have an independent number of points, and the number 9288 of points can be less than max (that is, the request doesn't have to 9289 always provide a curve with number of points equivalent to 9290 android.tonemap.maxCurvePoints). 9291 9292 For devices with MONOCHROME capability, all three channels must have the same set of 9293 control points. 9294 9295 A few examples, and their corresponding graphical mappings; these 9296 only specify the red channel and the precision is limited to 4 9297 digits, for conciseness. 9298 9299 Linear mapping: 9300 9301 android.tonemap.curveRed = [ 0, 0, 1.0, 1.0 ] 9302 9303 ![Linear mapping curve](android.tonemap.curveRed/linear_tonemap.png) 9304 9305 Invert mapping: 9306 9307 android.tonemap.curveRed = [ 0, 1.0, 1.0, 0 ] 9308 9309 ![Inverting mapping curve](android.tonemap.curveRed/inverse_tonemap.png) 9310 9311 Gamma 1/2.2 mapping, with 16 control points: 9312 9313 android.tonemap.curveRed = [ 9314 0.0000, 0.0000, 0.0667, 0.2920, 0.1333, 0.4002, 0.2000, 0.4812, 9315 0.2667, 0.5484, 0.3333, 0.6069, 0.4000, 0.6594, 0.4667, 0.7072, 9316 0.5333, 0.7515, 0.6000, 0.7928, 0.6667, 0.8317, 0.7333, 0.8685, 9317 0.8000, 0.9035, 0.8667, 0.9370, 0.9333, 0.9691, 1.0000, 1.0000 ] 9318 9319 ![Gamma = 1/2.2 tonemapping curve](android.tonemap.curveRed/gamma_tonemap.png) 9320 9321 Standard sRGB gamma mapping, per IEC 61966-2-1:1999, with 16 control points: 9322 9323 android.tonemap.curveRed = [ 9324 0.0000, 0.0000, 0.0667, 0.2864, 0.1333, 0.4007, 0.2000, 0.4845, 9325 0.2667, 0.5532, 0.3333, 0.6125, 0.4000, 0.6652, 0.4667, 0.7130, 9326 0.5333, 0.7569, 0.6000, 0.7977, 0.6667, 0.8360, 0.7333, 0.8721, 9327 0.8000, 0.9063, 0.8667, 0.9389, 0.9333, 0.9701, 1.0000, 1.0000 ] 9328 9329 ![sRGB tonemapping curve](android.tonemap.curveRed/srgb_tonemap.png) 9330 </details> 9331 <hal_details> 9332 For good quality of mapping, at least 128 control points are 9333 preferred. 9334 9335 A typical use case of this would be a gamma-1/2.2 curve, with as many 9336 control points used as are available. 9337 </hal_details> 9338 </entry> 9339 <entry name="curve" type="float" visibility="java_public" synthetic="true" 9340 typedef="tonemapCurve" 9341 hwlevel="full"> 9342 <description>Tonemapping / contrast / gamma curve to use when android.tonemap.mode 9343 is CONTRAST_CURVE.</description> 9344 <details> 9345 The tonemapCurve consist of three curves for each of red, green, and blue 9346 channels respectively. The following example uses the red channel as an 9347 example. The same logic applies to green and blue channel. 9348 Each channel's curve is defined by an array of control points: 9349 9350 curveRed = 9351 [ P0(in, out), P1(in, out), P2(in, out), P3(in, out), ..., PN(in, out) ] 9352 2 <= N <= android.tonemap.maxCurvePoints 9353 9354 These are sorted in order of increasing `Pin`; it is always 9355 guaranteed that input values 0.0 and 1.0 are included in the list to 9356 define a complete mapping. For input values between control points, 9357 the camera device must linearly interpolate between the control 9358 points. 9359 9360 Each curve can have an independent number of points, and the number 9361 of points can be less than max (that is, the request doesn't have to 9362 always provide a curve with number of points equivalent to 9363 android.tonemap.maxCurvePoints). 9364 9365 For devices with MONOCHROME capability, all three channels must have the same set of 9366 control points. 9367 9368 A few examples, and their corresponding graphical mappings; these 9369 only specify the red channel and the precision is limited to 4 9370 digits, for conciseness. 9371 9372 Linear mapping: 9373 9374 curveRed = [ (0, 0), (1.0, 1.0) ] 9375 9376 ![Linear mapping curve](android.tonemap.curveRed/linear_tonemap.png) 9377 9378 Invert mapping: 9379 9380 curveRed = [ (0, 1.0), (1.0, 0) ] 9381 9382 ![Inverting mapping curve](android.tonemap.curveRed/inverse_tonemap.png) 9383 9384 Gamma 1/2.2 mapping, with 16 control points: 9385 9386 curveRed = [ 9387 (0.0000, 0.0000), (0.0667, 0.2920), (0.1333, 0.4002), (0.2000, 0.4812), 9388 (0.2667, 0.5484), (0.3333, 0.6069), (0.4000, 0.6594), (0.4667, 0.7072), 9389 (0.5333, 0.7515), (0.6000, 0.7928), (0.6667, 0.8317), (0.7333, 0.8685), 9390 (0.8000, 0.9035), (0.8667, 0.9370), (0.9333, 0.9691), (1.0000, 1.0000) ] 9391 9392 ![Gamma = 1/2.2 tonemapping curve](android.tonemap.curveRed/gamma_tonemap.png) 9393 9394 Standard sRGB gamma mapping, per IEC 61966-2-1:1999, with 16 control points: 9395 9396 curveRed = [ 9397 (0.0000, 0.0000), (0.0667, 0.2864), (0.1333, 0.4007), (0.2000, 0.4845), 9398 (0.2667, 0.5532), (0.3333, 0.6125), (0.4000, 0.6652), (0.4667, 0.7130), 9399 (0.5333, 0.7569), (0.6000, 0.7977), (0.6667, 0.8360), (0.7333, 0.8721), 9400 (0.8000, 0.9063), (0.8667, 0.9389), (0.9333, 0.9701), (1.0000, 1.0000) ] 9401 9402 ![sRGB tonemapping curve](android.tonemap.curveRed/srgb_tonemap.png) 9403 </details> 9404 <hal_details> 9405 This entry is created by the framework from the curveRed, curveGreen and 9406 curveBlue entries. 9407 </hal_details> 9408 </entry> 9409 <entry name="mode" type="byte" visibility="public" enum="true" 9410 hwlevel="full"> 9411 <enum> 9412 <value>CONTRAST_CURVE 9413 <notes>Use the tone mapping curve specified in 9414 the android.tonemap.curve* entries. 9415 9416 All color enhancement and tonemapping must be disabled, except 9417 for applying the tonemapping curve specified by 9418 android.tonemap.curve. 9419 9420 Must not slow down frame rate relative to raw 9421 sensor output. 9422 </notes> 9423 </value> 9424 <value>FAST 9425 <notes> 9426 Advanced gamma mapping and color enhancement may be applied, without 9427 reducing frame rate compared to raw sensor output. 9428 </notes> 9429 </value> 9430 <value>HIGH_QUALITY 9431 <notes> 9432 High-quality gamma mapping and color enhancement will be applied, at 9433 the cost of possibly reduced frame rate compared to raw sensor output. 9434 </notes> 9435 </value> 9436 <value>GAMMA_VALUE 9437 <notes> 9438 Use the gamma value specified in android.tonemap.gamma to peform 9439 tonemapping. 9440 9441 All color enhancement and tonemapping must be disabled, except 9442 for applying the tonemapping curve specified by android.tonemap.gamma. 9443 9444 Must not slow down frame rate relative to raw sensor output. 9445 </notes> 9446 </value> 9447 <value>PRESET_CURVE 9448 <notes> 9449 Use the preset tonemapping curve specified in 9450 android.tonemap.presetCurve to peform tonemapping. 9451 9452 All color enhancement and tonemapping must be disabled, except 9453 for applying the tonemapping curve specified by 9454 android.tonemap.presetCurve. 9455 9456 Must not slow down frame rate relative to raw sensor output. 9457 </notes> 9458 </value> 9459 </enum> 9460 <description>High-level global contrast/gamma/tonemapping control. 9461 </description> 9462 <range>android.tonemap.availableToneMapModes</range> 9463 <details> 9464 When switching to an application-defined contrast curve by setting 9465 android.tonemap.mode to CONTRAST_CURVE, the curve is defined 9466 per-channel with a set of `(in, out)` points that specify the 9467 mapping from input high-bit-depth pixel value to the output 9468 low-bit-depth value. Since the actual pixel ranges of both input 9469 and output may change depending on the camera pipeline, the values 9470 are specified by normalized floating-point numbers. 9471 9472 More-complex color mapping operations such as 3D color look-up 9473 tables, selective chroma enhancement, or other non-linear color 9474 transforms will be disabled when android.tonemap.mode is 9475 CONTRAST_CURVE. 9476 9477 When using either FAST or HIGH_QUALITY, the camera device will 9478 emit its own tonemap curve in android.tonemap.curve. 9479 These values are always available, and as close as possible to the 9480 actually used nonlinear/nonglobal transforms. 9481 9482 If a request is sent with CONTRAST_CURVE with the camera device's 9483 provided curve in FAST or HIGH_QUALITY, the image's tonemap will be 9484 roughly the same.</details> 9485 </entry> 9486 </controls> 9487 <static> 9488 <entry name="maxCurvePoints" type="int32" visibility="public" 9489 hwlevel="full"> 9490 <description>Maximum number of supported points in the 9491 tonemap curve that can be used for android.tonemap.curve. 9492 </description> 9493 <details> 9494 If the actual number of points provided by the application (in android.tonemap.curve*) is 9495 less than this maximum, the camera device will resample the curve to its internal 9496 representation, using linear interpolation. 9497 9498 The output curves in the result metadata may have a different number 9499 of points than the input curves, and will represent the actual 9500 hardware curves used as closely as possible when linearly interpolated. 9501 </details> 9502 <hal_details> 9503 This value must be at least 64. This should be at least 128. 9504 </hal_details> 9505 </entry> 9506 <entry name="availableToneMapModes" type="byte" visibility="public" 9507 type_notes="list of enums" container="array" typedef="enumList" hwlevel="full"> 9508 <array> 9509 <size>n</size> 9510 </array> 9511 <description> 9512 List of tonemapping modes for android.tonemap.mode that are supported by this camera 9513 device. 9514 </description> 9515 <range>Any value listed in android.tonemap.mode</range> 9516 <details> 9517 Camera devices that support the MANUAL_POST_PROCESSING capability will always contain 9518 at least one of below mode combinations: 9519 9520 * CONTRAST_CURVE, FAST and HIGH_QUALITY 9521 * GAMMA_VALUE, PRESET_CURVE, FAST and HIGH_QUALITY 9522 9523 This includes all FULL level devices. 9524 </details> 9525 <hal_details> 9526 HAL must support both FAST and HIGH_QUALITY if automatic tonemap control is available 9527 on the camera device, but the underlying implementation can be the same for both modes. 9528 That is, if the highest quality implementation on the camera device does not slow down 9529 capture rate, then FAST and HIGH_QUALITY will generate the same output. 9530 </hal_details> 9531 </entry> 9532 </static> 9533 <dynamic> 9534 <clone entry="android.tonemap.curveBlue" kind="controls"> 9535 </clone> 9536 <clone entry="android.tonemap.curveGreen" kind="controls"> 9537 </clone> 9538 <clone entry="android.tonemap.curveRed" kind="controls"> 9539 </clone> 9540 <clone entry="android.tonemap.curve" kind="controls"> 9541 </clone> 9542 <clone entry="android.tonemap.mode" kind="controls"> 9543 </clone> 9544 </dynamic> 9545 <controls> 9546 <entry name="gamma" type="float" visibility="public"> 9547 <description> Tonemapping curve to use when android.tonemap.mode is 9548 GAMMA_VALUE 9549 </description> 9550 <details> 9551 The tonemap curve will be defined the following formula: 9552 * OUT = pow(IN, 1.0 / gamma) 9553 where IN and OUT is the input pixel value scaled to range [0.0, 1.0], 9554 pow is the power function and gamma is the gamma value specified by this 9555 key. 9556 9557 The same curve will be applied to all color channels. The camera device 9558 may clip the input gamma value to its supported range. The actual applied 9559 value will be returned in capture result. 9560 9561 The valid range of gamma value varies on different devices, but values 9562 within [1.0, 5.0] are guaranteed not to be clipped. 9563 </details> 9564 </entry> 9565 <entry name="presetCurve" type="byte" visibility="public" enum="true"> 9566 <enum> 9567 <value>SRGB 9568 <notes>Tonemapping curve is defined by sRGB</notes> 9569 </value> 9570 <value>REC709 9571 <notes>Tonemapping curve is defined by ITU-R BT.709</notes> 9572 </value> 9573 </enum> 9574 <description> Tonemapping curve to use when android.tonemap.mode is 9575 PRESET_CURVE 9576 </description> 9577 <details> 9578 The tonemap curve will be defined by specified standard. 9579 9580 sRGB (approximated by 16 control points): 9581 9582 ![sRGB tonemapping curve](android.tonemap.curveRed/srgb_tonemap.png) 9583 9584 Rec. 709 (approximated by 16 control points): 9585 9586 ![Rec. 709 tonemapping curve](android.tonemap.curveRed/rec709_tonemap.png) 9587 9588 Note that above figures show a 16 control points approximation of preset 9589 curves. Camera devices may apply a different approximation to the curve. 9590 </details> 9591 </entry> 9592 </controls> 9593 <dynamic> 9594 <clone entry="android.tonemap.gamma" kind="controls"> 9595 </clone> 9596 <clone entry="android.tonemap.presetCurve" kind="controls"> 9597 </clone> 9598 </dynamic> 9599 </section> 9600 <section name="led"> 9601 <controls> 9602 <entry name="transmit" type="byte" visibility="hidden" optional="true" 9603 enum="true" typedef="boolean"> 9604 <enum> 9605 <value>OFF</value> 9606 <value>ON</value> 9607 </enum> 9608 <description>This LED is nominally used to indicate to the user 9609 that the camera is powered on and may be streaming images back to the 9610 Application Processor. In certain rare circumstances, the OS may 9611 disable this when video is processed locally and not transmitted to 9612 any untrusted applications. 9613 9614 In particular, the LED *must* always be on when the data could be 9615 transmitted off the device. The LED *should* always be on whenever 9616 data is stored locally on the device. 9617 9618 The LED *may* be off if a trusted application is using the data that 9619 doesn't violate the above rules. 9620 </description> 9621 </entry> 9622 </controls> 9623 <dynamic> 9624 <clone entry="android.led.transmit" kind="controls"></clone> 9625 </dynamic> 9626 <static> 9627 <entry name="availableLeds" type="byte" visibility="hidden" optional="true" 9628 enum="true" 9629 container="array"> 9630 <array> 9631 <size>n</size> 9632 </array> 9633 <enum> 9634 <value>TRANSMIT 9635 <notes>android.led.transmit control is used.</notes> 9636 </value> 9637 </enum> 9638 <description>A list of camera LEDs that are available on this system. 9639 </description> 9640 </entry> 9641 </static> 9642 </section> 9643 <section name="info"> 9644 <static> 9645 <entry name="supportedHardwareLevel" type="byte" visibility="public" 9646 enum="true" hwlevel="legacy"> 9647 <enum> 9648 <value> 9649 LIMITED 9650 <notes> 9651 This camera device does not have enough capabilities to qualify as a `FULL` device or 9652 better. 9653 9654 Only the stream configurations listed in the `LEGACY` and `LIMITED` tables in the 9655 {@link android.hardware.camera2.CameraDevice#createCaptureSession|ACameraDevice_createCaptureSession 9656 createCaptureSession} documentation are guaranteed to be supported. 9657 9658 All `LIMITED` devices support the `BACKWARDS_COMPATIBLE` capability, indicating basic 9659 support for color image capture. The only exception is that the device may 9660 alternatively support only the `DEPTH_OUTPUT` capability, if it can only output depth 9661 measurements and not color images. 9662 9663 `LIMITED` devices and above require the use of android.control.aePrecaptureTrigger 9664 to lock exposure metering (and calculate flash power, for cameras with flash) before 9665 capturing a high-quality still image. 9666 9667 A `LIMITED` device that only lists the `BACKWARDS_COMPATIBLE` capability is only 9668 required to support full-automatic operation and post-processing (`OFF` is not 9669 supported for android.control.aeMode, android.control.afMode, or 9670 android.control.awbMode) 9671 9672 Additional capabilities may optionally be supported by a `LIMITED`-level device, and 9673 can be checked for in android.request.availableCapabilities. 9674 </notes> 9675 </value> 9676 <value> 9677 FULL 9678 <notes> 9679 This camera device is capable of supporting advanced imaging applications. 9680 9681 The stream configurations listed in the `FULL`, `LEGACY` and `LIMITED` tables in the 9682 {@link android.hardware.camera2.CameraDevice#createCaptureSession|ACameraDevice_createCaptureSession 9683 createCaptureSession} documentation are guaranteed to be supported. 9684 9685 A `FULL` device will support below capabilities: 9686 9687 * `BURST_CAPTURE` capability (android.request.availableCapabilities contains 9688 `BURST_CAPTURE`) 9689 * Per frame control (android.sync.maxLatency `==` PER_FRAME_CONTROL) 9690 * Manual sensor control (android.request.availableCapabilities contains `MANUAL_SENSOR`) 9691 * Manual post-processing control (android.request.availableCapabilities contains 9692 `MANUAL_POST_PROCESSING`) 9693 * The required exposure time range defined in android.sensor.info.exposureTimeRange 9694 * The required maxFrameDuration defined in android.sensor.info.maxFrameDuration 9695 9696 Note: 9697 Pre-API level 23, FULL devices also supported arbitrary cropping region 9698 (android.scaler.croppingType `== FREEFORM`); this requirement was relaxed in API level 9699 23, and `FULL` devices may only support `CENTERED` cropping. 9700 </notes> 9701 </value> 9702 <value> 9703 LEGACY 9704 <notes> 9705 This camera device is running in backward compatibility mode. 9706 9707 Only the stream configurations listed in the `LEGACY` table in the {@link 9708 android.hardware.camera2.CameraDevice#createCaptureSession|ACameraDevice_createCaptureSession 9709 createCaptureSession} documentation are supported. 9710 9711 A `LEGACY` device does not support per-frame control, manual sensor control, manual 9712 post-processing, arbitrary cropping regions, and has relaxed performance constraints. 9713 No additional capabilities beyond `BACKWARD_COMPATIBLE` will ever be listed by a 9714 `LEGACY` device in android.request.availableCapabilities. 9715 9716 In addition, the android.control.aePrecaptureTrigger is not functional on `LEGACY` 9717 devices. Instead, every request that includes a JPEG-format output target is treated 9718 as triggering a still capture, internally executing a precapture trigger. This may 9719 fire the flash for flash power metering during precapture, and then fire the flash 9720 for the final capture, if a flash is available on the device and the AE mode is set to 9721 enable the flash. 9722 9723 Devices that initially shipped with Android version {@link 9724 android.os.Build.VERSION_CODES#Q Q} or newer will not include any LEGACY-level devices. 9725 </notes> 9726 </value> 9727 <value> 9728 3 9729 <notes> 9730 This camera device is capable of YUV reprocessing and RAW data capture, in addition to 9731 FULL-level capabilities. 9732 9733 The stream configurations listed in the `LEVEL_3`, `RAW`, `FULL`, `LEGACY` and 9734 `LIMITED` tables in the {@link 9735 android.hardware.camera2.CameraDevice#createCaptureSession|ACameraDevice_createCaptureSession 9736 createCaptureSession} documentation are guaranteed to be supported. 9737 9738 The following additional capabilities are guaranteed to be supported: 9739 9740 * `YUV_REPROCESSING` capability (android.request.availableCapabilities contains 9741 `YUV_REPROCESSING`) 9742 * `RAW` capability (android.request.availableCapabilities contains 9743 `RAW`) 9744 </notes> 9745 </value> 9746 <value hal_version="3.3"> 9747 EXTERNAL 9748 <notes> 9749 This camera device is backed by an external camera connected to this Android device. 9750 9751 The device has capability identical to a LIMITED level device, with the following 9752 exceptions: 9753 9754 * The device may not report lens/sensor related information such as 9755 - android.lens.focalLength 9756 - android.lens.info.hyperfocalDistance 9757 - android.sensor.info.physicalSize 9758 - android.sensor.info.whiteLevel 9759 - android.sensor.blackLevelPattern 9760 - android.sensor.info.colorFilterArrangement 9761 - android.sensor.rollingShutterSkew 9762 * The device will report 0 for android.sensor.orientation 9763 * The device has less guarantee on stable framerate, as the framerate partly depends 9764 on the external camera being used. 9765 </notes> 9766 </value> 9767 </enum> 9768 <description> 9769 Generally classifies the overall set of the camera device functionality. 9770 </description> 9771 <details> 9772 The supported hardware level is a high-level description of the camera device's 9773 capabilities, summarizing several capabilities into one field. Each level adds additional 9774 features to the previous one, and is always a strict superset of the previous level. 9775 The ordering is `LEGACY < LIMITED < FULL < LEVEL_3`. 9776 9777 Starting from `LEVEL_3`, the level enumerations are guaranteed to be in increasing 9778 numerical value as well. To check if a given device is at least at a given hardware level, 9779 the following code snippet can be used: 9780 9781 // Returns true if the device supports the required hardware level, or better. 9782 boolean isHardwareLevelSupported(CameraCharacteristics c, int requiredLevel) { 9783 final int[] sortedHwLevels = { 9784 CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL_LEGACY, 9785 CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL_EXTERNAL, 9786 CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED, 9787 CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL_FULL, 9788 CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL_3 9789 }; 9790 int deviceLevel = c.get(CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL); 9791 if (requiredLevel == deviceLevel) { 9792 return true; 9793 } 9794 9795 for (int sortedlevel : sortedHwLevels) { 9796 if (sortedlevel == requiredLevel) { 9797 return true; 9798 } else if (sortedlevel == deviceLevel) { 9799 return false; 9800 } 9801 } 9802 return false; // Should never reach here 9803 } 9804 9805 At a high level, the levels are: 9806 9807 * `LEGACY` devices operate in a backwards-compatibility mode for older 9808 Android devices, and have very limited capabilities. 9809 * `LIMITED` devices represent the 9810 baseline feature set, and may also include additional capabilities that are 9811 subsets of `FULL`. 9812 * `FULL` devices additionally support per-frame manual control of sensor, flash, lens and 9813 post-processing settings, and image capture at a high rate. 9814 * `LEVEL_3` devices additionally support YUV reprocessing and RAW image capture, along 9815 with additional output stream configurations. 9816 * `EXTERNAL` devices are similar to `LIMITED` devices with exceptions like some sensor or 9817 lens information not reported or less stable framerates. 9818 9819 See the individual level enums for full descriptions of the supported capabilities. The 9820 android.request.availableCapabilities entry describes the device's capabilities at a 9821 finer-grain level, if needed. In addition, many controls have their available settings or 9822 ranges defined in individual entries from {@link 9823 android.hardware.camera2.CameraCharacteristics|ACameraManager_getCameraCharacteristics}. 9824 9825 Some features are not part of any particular hardware level or capability and must be 9826 queried separately. These include: 9827 9828 * Calibrated timestamps (android.sensor.info.timestampSource `==` REALTIME) 9829 * Precision lens control (android.lens.info.focusDistanceCalibration `==` CALIBRATED) 9830 * Face detection (android.statistics.info.availableFaceDetectModes) 9831 * Optical or electrical image stabilization 9832 (android.lens.info.availableOpticalStabilization, 9833 android.control.availableVideoStabilizationModes) 9834 9835 </details> 9836 <hal_details> 9837 A camera HALv3 device can implement one of three possible operational modes; LIMITED, 9838 FULL, and LEVEL_3. 9839 9840 FULL support or better is expected from new higher-end devices. Limited 9841 mode has hardware requirements roughly in line with those for a camera HAL device v1 9842 implementation, and is expected from older or inexpensive devices. Each level is a strict 9843 superset of the previous level, and they share the same essential operational flow. 9844 9845 For full details refer to "S3. Operational Modes" in camera3.h 9846 9847 Camera HAL3+ must not implement LEGACY mode. It is there for backwards compatibility in 9848 the `android.hardware.camera2` user-facing API only on legacy HALv1 devices, and is 9849 implemented by the camera framework code. 9850 9851 EXTERNAL level devices have lower peformance bar in CTS since the peformance might depend 9852 on the external camera being used and is not fully controlled by the device manufacturer. 9853 The ITS test suite is exempted for the same reason. 9854 </hal_details> 9855 </entry> 9856 <entry name="version" type="byte" visibility="public" typedef="string" hal_version="3.3"> 9857 <description> 9858 A short string for manufacturer version information about the camera device, such as 9859 ISP hardware, sensors, etc. 9860 </description> 9861 <details> 9862 This can be used in {@link android.media.ExifInterface#TAG_IMAGE_DESCRIPTION TAG_IMAGE_DESCRIPTION} 9863 in jpeg EXIF. This key may be absent if no version information is available on the 9864 device. 9865 </details> 9866 <hal_details> 9867 The string must consist of only alphanumeric characters, punctuation, and 9868 whitespace, i.e. it must match regular expression "[\p{Alnum}\p{Punct}\p{Space}]*". 9869 It must not exceed 256 characters. 9870 </hal_details> 9871 </entry> 9872 <entry name="supportedBufferManagementVersion" type="byte" visibility="system" 9873 enum="true" hal_version="3.4"> 9874 <enum> 9875 <value> 9876 HIDL_DEVICE_3_5 9877 <notes> 9878 This camera device supports and opts in to the buffer management APIs provided by 9879 HIDL ICameraDevice version 3.5. 9880 </notes> 9881 </value> 9882 </enum> 9883 <description> 9884 The version of buffer management API this camera device supports and opts into. 9885 </description> 9886 <details> 9887 When this key is not present, camera framework will interact with this camera device 9888 without any buffer management HAL API. When this key is present and camera framework 9889 supports the buffer management API version, camera framework will interact with camera 9890 HAL using such version of buffer management API. 9891 </details> 9892 </entry> 9893 </static> 9894 </section> 9895 <section name="blackLevel"> 9896 <controls> 9897 <entry name="lock" type="byte" visibility="public" enum="true" 9898 typedef="boolean" hwlevel="full"> 9899 <enum> 9900 <value>OFF</value> 9901 <value>ON</value> 9902 </enum> 9903 <description> Whether black-level compensation is locked 9904 to its current values, or is free to vary.</description> 9905 <details>When set to `true` (ON), the values used for black-level 9906 compensation will not change until the lock is set to 9907 `false` (OFF). 9908 9909 Since changes to certain capture parameters (such as 9910 exposure time) may require resetting of black level 9911 compensation, the camera device must report whether setting 9912 the black level lock was successful in the output result 9913 metadata. 9914 9915 For example, if a sequence of requests is as follows: 9916 9917 * Request 1: Exposure = 10ms, Black level lock = OFF 9918 * Request 2: Exposure = 10ms, Black level lock = ON 9919 * Request 3: Exposure = 10ms, Black level lock = ON 9920 * Request 4: Exposure = 20ms, Black level lock = ON 9921 * Request 5: Exposure = 20ms, Black level lock = ON 9922 * Request 6: Exposure = 20ms, Black level lock = ON 9923 9924 And the exposure change in Request 4 requires the camera 9925 device to reset the black level offsets, then the output 9926 result metadata is expected to be: 9927 9928 * Result 1: Exposure = 10ms, Black level lock = OFF 9929 * Result 2: Exposure = 10ms, Black level lock = ON 9930 * Result 3: Exposure = 10ms, Black level lock = ON 9931 * Result 4: Exposure = 20ms, Black level lock = OFF 9932 * Result 5: Exposure = 20ms, Black level lock = ON 9933 * Result 6: Exposure = 20ms, Black level lock = ON 9934 9935 This indicates to the application that on frame 4, black 9936 levels were reset due to exposure value changes, and pixel 9937 values may not be consistent across captures. 9938 9939 The camera device will maintain the lock to the extent 9940 possible, only overriding the lock to OFF when changes to 9941 other request parameters require a black level recalculation 9942 or reset. 9943 </details> 9944 <hal_details> 9945 If for some reason black level locking is no longer possible 9946 (for example, the analog gain has changed, which forces 9947 black level offsets to be recalculated), then the HAL must 9948 override this request (and it must report 'OFF' when this 9949 does happen) until the next capture for which locking is 9950 possible again.</hal_details> 9951 <tag id="HAL2" /> 9952 </entry> 9953 </controls> 9954 <dynamic> 9955 <clone entry="android.blackLevel.lock" 9956 kind="controls"> 9957 <details> 9958 Whether the black level offset was locked for this frame. Should be 9959 ON if android.blackLevel.lock was ON in the capture request, unless 9960 a change in other capture settings forced the camera device to 9961 perform a black level reset. 9962 </details> 9963 </clone> 9964 </dynamic> 9965 </section> 9966 <section name="sync"> 9967 <dynamic> 9968 <entry name="frameNumber" type="int64" visibility="ndk_public" 9969 enum="true" hwlevel="legacy"> 9970 <enum> 9971 <value id="-1">CONVERGING 9972 <notes> 9973 The current result is not yet fully synchronized to any request. 9974 9975 Synchronization is in progress, and reading metadata from this 9976 result may include a mix of data that have taken effect since the 9977 last synchronization time. 9978 9979 In some future result, within android.sync.maxLatency frames, 9980 this value will update to the actual frame number frame number 9981 the result is guaranteed to be synchronized to (as long as the 9982 request settings remain constant). 9983 </notes> 9984 </value> 9985 <value id="-2">UNKNOWN 9986 <notes> 9987 The current result's synchronization status is unknown. 9988 9989 The result may have already converged, or it may be in 9990 progress. Reading from this result may include some mix 9991 of settings from past requests. 9992 9993 After a settings change, the new settings will eventually all 9994 take effect for the output buffers and results. However, this 9995 value will not change when that happens. Altering settings 9996 rapidly may provide outcomes using mixes of settings from recent 9997 requests. 9998 9999 This value is intended primarily for backwards compatibility with 10000 the older camera implementations (for android.hardware.Camera). 10001 </notes> 10002 </value> 10003 </enum> 10004 <description>The frame number corresponding to the last request 10005 with which the output result (metadata + buffers) has been fully 10006 synchronized.</description> 10007 <range>Either a non-negative value corresponding to a 10008 `frame_number`, or one of the two enums (CONVERGING / UNKNOWN). 10009 </range> 10010 <details> 10011 When a request is submitted to the camera device, there is usually a 10012 delay of several frames before the controls get applied. A camera 10013 device may either choose to account for this delay by implementing a 10014 pipeline and carefully submit well-timed atomic control updates, or 10015 it may start streaming control changes that span over several frame 10016 boundaries. 10017 10018 In the latter case, whenever a request's settings change relative to 10019 the previous submitted request, the full set of changes may take 10020 multiple frame durations to fully take effect. Some settings may 10021 take effect sooner (in less frame durations) than others. 10022 10023 While a set of control changes are being propagated, this value 10024 will be CONVERGING. 10025 10026 Once it is fully known that a set of control changes have been 10027 finished propagating, and the resulting updated control settings 10028 have been read back by the camera device, this value will be set 10029 to a non-negative frame number (corresponding to the request to 10030 which the results have synchronized to). 10031 10032 Older camera device implementations may not have a way to detect 10033 when all camera controls have been applied, and will always set this 10034 value to UNKNOWN. 10035 10036 FULL capability devices will always have this value set to the 10037 frame number of the request corresponding to this result. 10038 10039 _Further details_: 10040 10041 * Whenever a request differs from the last request, any future 10042 results not yet returned may have this value set to CONVERGING (this 10043 could include any in-progress captures not yet returned by the camera 10044 device, for more details see pipeline considerations below). 10045 * Submitting a series of multiple requests that differ from the 10046 previous request (e.g. r1, r2, r3 s.t. r1 != r2 != r3) 10047 moves the new synchronization frame to the last non-repeating 10048 request (using the smallest frame number from the contiguous list of 10049 repeating requests). 10050 * Submitting the same request repeatedly will not change this value 10051 to CONVERGING, if it was already a non-negative value. 10052 * When this value changes to non-negative, that means that all of the 10053 metadata controls from the request have been applied, all of the 10054 metadata controls from the camera device have been read to the 10055 updated values (into the result), and all of the graphics buffers 10056 corresponding to this result are also synchronized to the request. 10057 10058 _Pipeline considerations_: 10059 10060 Submitting a request with updated controls relative to the previously 10061 submitted requests may also invalidate the synchronization state 10062 of all the results corresponding to currently in-flight requests. 10063 10064 In other words, results for this current request and up to 10065 android.request.pipelineMaxDepth prior requests may have their 10066 android.sync.frameNumber change to CONVERGING. 10067 </details> 10068 <hal_details> 10069 Using UNKNOWN here is illegal unless android.sync.maxLatency 10070 is also UNKNOWN. 10071 10072 FULL capability devices should simply set this value to the 10073 `frame_number` of the request this result corresponds to. 10074 </hal_details> 10075 <tag id="V1" /> 10076 </entry> 10077 </dynamic> 10078 <static> 10079 <entry name="maxLatency" type="int32" visibility="public" enum="true" 10080 hwlevel="legacy"> 10081 <enum> 10082 <value id="0">PER_FRAME_CONTROL 10083 <notes> 10084 Every frame has the requests immediately applied. 10085 10086 Changing controls over multiple requests one after another will 10087 produce results that have those controls applied atomically 10088 each frame. 10089 10090 All FULL capability devices will have this as their maxLatency. 10091 </notes> 10092 </value> 10093 <value id="-1">UNKNOWN 10094 <notes> 10095 Each new frame has some subset (potentially the entire set) 10096 of the past requests applied to the camera settings. 10097 10098 By submitting a series of identical requests, the camera device 10099 will eventually have the camera settings applied, but it is 10100 unknown when that exact point will be. 10101 10102 All LEGACY capability devices will have this as their maxLatency. 10103 </notes> 10104 </value> 10105 </enum> 10106 <description> 10107 The maximum number of frames that can occur after a request 10108 (different than the previous) has been submitted, and before the 10109 result's state becomes synchronized. 10110 </description> 10111 <units>Frame counts</units> 10112 <range>A positive value, PER_FRAME_CONTROL, or UNKNOWN.</range> 10113 <details> 10114 This defines the maximum distance (in number of metadata results), 10115 between the frame number of the request that has new controls to apply 10116 and the frame number of the result that has all the controls applied. 10117 10118 In other words this acts as an upper boundary for how many frames 10119 must occur before the camera device knows for a fact that the new 10120 submitted camera settings have been applied in outgoing frames. 10121 </details> 10122 <hal_details> 10123 For example if maxLatency was 2, 10124 10125 initial request = X (repeating) 10126 request1 = X 10127 request2 = Y 10128 request3 = Y 10129 request4 = Y 10130 10131 where requestN has frameNumber N, and the first of the repeating 10132 initial request's has frameNumber F (and F < 1). 10133 10134 initial result = X' + { android.sync.frameNumber == F } 10135 result1 = X' + { android.sync.frameNumber == F } 10136 result2 = X' + { android.sync.frameNumber == CONVERGING } 10137 result3 = X' + { android.sync.frameNumber == CONVERGING } 10138 result4 = X' + { android.sync.frameNumber == 2 } 10139 10140 where resultN has frameNumber N. 10141 10142 Since `result4` has a `frameNumber == 4` and 10143 `android.sync.frameNumber == 2`, the distance is clearly 10144 `4 - 2 = 2`. 10145 10146 Use `frame_count` from camera3_request_t instead of 10147 android.request.frameCount or 10148 `{@link android.hardware.camera2.CaptureResult#getFrameNumber}`. 10149 10150 LIMITED devices are strongly encouraged to use a non-negative 10151 value. If UNKNOWN is used here then app developers do not have a way 10152 to know when sensor settings have been applied. 10153 </hal_details> 10154 <tag id="V1" /> 10155 </entry> 10156 </static> 10157 </section> 10158 <section name="reprocess"> 10159 <controls> 10160 <entry name="effectiveExposureFactor" type="float" visibility="java_public" hwlevel="limited"> 10161 <description> 10162 The amount of exposure time increase factor applied to the original output 10163 frame by the application processing before sending for reprocessing. 10164 </description> 10165 <units>Relative exposure time increase factor.</units> 10166 <range> &gt;= 1.0</range> 10167 <details> 10168 This is optional, and will be supported if the camera device supports YUV_REPROCESSING 10169 capability (android.request.availableCapabilities contains YUV_REPROCESSING). 10170 10171 For some YUV reprocessing use cases, the application may choose to filter the original 10172 output frames to effectively reduce the noise to the same level as a frame that was 10173 captured with longer exposure time. To be more specific, assuming the original captured 10174 images were captured with a sensitivity of S and an exposure time of T, the model in 10175 the camera device is that the amount of noise in the image would be approximately what 10176 would be expected if the original capture parameters had been a sensitivity of 10177 S/effectiveExposureFactor and an exposure time of T*effectiveExposureFactor, rather 10178 than S and T respectively. If the captured images were processed by the application 10179 before being sent for reprocessing, then the application may have used image processing 10180 algorithms and/or multi-frame image fusion to reduce the noise in the 10181 application-processed images (input images). By using the effectiveExposureFactor 10182 control, the application can communicate to the camera device the actual noise level 10183 improvement in the application-processed image. With this information, the camera 10184 device can select appropriate noise reduction and edge enhancement parameters to avoid 10185 excessive noise reduction (android.noiseReduction.mode) and insufficient edge 10186 enhancement (android.edge.mode) being applied to the reprocessed frames. 10187 10188 For example, for multi-frame image fusion use case, the application may fuse 10189 multiple output frames together to a final frame for reprocessing. When N image are 10190 fused into 1 image for reprocessing, the exposure time increase factor could be up to 10191 square root of N (based on a simple photon shot noise model). The camera device will 10192 adjust the reprocessing noise reduction and edge enhancement parameters accordingly to 10193 produce the best quality images. 10194 10195 This is relative factor, 1.0 indicates the application hasn't processed the input 10196 buffer in a way that affects its effective exposure time. 10197 10198 This control is only effective for YUV reprocessing capture request. For noise 10199 reduction reprocessing, it is only effective when `android.noiseReduction.mode != OFF`. 10200 Similarly, for edge enhancement reprocessing, it is only effective when 10201 `android.edge.mode != OFF`. 10202 </details> 10203 <tag id="REPROC" /> 10204 </entry> 10205 </controls> 10206 <dynamic> 10207 <clone entry="android.reprocess.effectiveExposureFactor" kind="controls"> 10208 </clone> 10209 </dynamic> 10210 <static> 10211 <entry name="maxCaptureStall" type="int32" visibility="java_public" hwlevel="limited"> 10212 <description> 10213 The maximal camera capture pipeline stall (in unit of frame count) introduced by a 10214 reprocess capture request. 10215 </description> 10216 <units>Number of frames.</units> 10217 <range> &lt;= 4</range> 10218 <details> 10219 The key describes the maximal interference that one reprocess (input) request 10220 can introduce to the camera simultaneous streaming of regular (output) capture 10221 requests, including repeating requests. 10222 10223 When a reprocessing capture request is submitted while a camera output repeating request 10224 (e.g. preview) is being served by the camera device, it may preempt the camera capture 10225 pipeline for at least one frame duration so that the camera device is unable to process 10226 the following capture request in time for the next sensor start of exposure boundary. 10227 When this happens, the application may observe a capture time gap (longer than one frame 10228 duration) between adjacent capture output frames, which usually exhibits as preview 10229 glitch if the repeating request output targets include a preview surface. This key gives 10230 the worst-case number of frame stall introduced by one reprocess request with any kind of 10231 formats/sizes combination. 10232 10233 If this key reports 0, it means a reprocess request doesn't introduce any glitch to the 10234 ongoing camera repeating request outputs, as if this reprocess request is never issued. 10235 10236 This key is supported if the camera device supports PRIVATE or YUV reprocessing ( 10237 i.e. android.request.availableCapabilities contains PRIVATE_REPROCESSING or 10238 YUV_REPROCESSING). 10239 </details> 10240 <tag id="REPROC" /> 10241 </entry> 10242 </static> 10243 </section> 10244 <section name="depth"> 10245 <static> 10246 <entry name="maxDepthSamples" type="int32" visibility="system" hwlevel="limited"> 10247 <description>Maximum number of points that a depth point cloud may contain. 10248 </description> 10249 <details> 10250 If a camera device supports outputting depth range data in the form of a depth point 10251 cloud ({@link android.graphics.ImageFormat#DEPTH_POINT_CLOUD}), this is the maximum 10252 number of points an output buffer may contain. 10253 10254 Any given buffer may contain between 0 and maxDepthSamples points, inclusive. 10255 If output in the depth point cloud format is not supported, this entry will 10256 not be defined. 10257 </details> 10258 <tag id="DEPTH" /> 10259 </entry> 10260 <entry name="availableDepthStreamConfigurations" type="int32" visibility="ndk_public" 10261 enum="true" container="array" typedef="streamConfiguration" hwlevel="limited"> 10262 <array> 10263 <size>n</size> 10264 <size>4</size> 10265 </array> 10266 <enum> 10267 <value>OUTPUT</value> 10268 <value>INPUT</value> 10269 </enum> 10270 <description>The available depth dataspace stream 10271 configurations that this camera device supports 10272 (i.e. format, width, height, output/input stream). 10273 </description> 10274 <details> 10275 These are output stream configurations for use with 10276 dataSpace HAL_DATASPACE_DEPTH. The configurations are 10277 listed as `(format, width, height, input?)` tuples. 10278 10279 Only devices that support depth output for at least 10280 the HAL_PIXEL_FORMAT_Y16 dense depth map may include 10281 this entry. 10282 10283 A device that also supports the HAL_PIXEL_FORMAT_BLOB 10284 sparse depth point cloud must report a single entry for 10285 the format in this list as `(HAL_PIXEL_FORMAT_BLOB, 10286 android.depth.maxDepthSamples, 1, OUTPUT)` in addition to 10287 the entries for HAL_PIXEL_FORMAT_Y16. 10288 </details> 10289 <tag id="DEPTH" /> 10290 </entry> 10291 <entry name="availableDepthMinFrameDurations" type="int64" visibility="ndk_public" 10292 container="array" typedef="streamConfigurationDuration" hwlevel="limited"> 10293 <array> 10294 <size>4</size> 10295 <size>n</size> 10296 </array> 10297 <description>This lists the minimum frame duration for each 10298 format/size combination for depth output formats. 10299 </description> 10300 <units>(format, width, height, ns) x n</units> 10301 <details> 10302 This should correspond to the frame duration when only that 10303 stream is active, with all processing (typically in android.*.mode) 10304 set to either OFF or FAST. 10305 10306 When multiple streams are used in a request, the minimum frame 10307 duration will be max(individual stream min durations). 10308 10309 The minimum frame duration of a stream (of a particular format, size) 10310 is the same regardless of whether the stream is input or output. 10311 10312 See android.sensor.frameDuration and 10313 android.scaler.availableStallDurations for more details about 10314 calculating the max frame rate. 10315 </details> 10316 <tag id="DEPTH" /> 10317 </entry> 10318 <entry name="availableDepthStallDurations" type="int64" visibility="ndk_public" 10319 container="array" typedef="streamConfigurationDuration" hwlevel="limited"> 10320 <array> 10321 <size>4</size> 10322 <size>n</size> 10323 </array> 10324 <description>This lists the maximum stall duration for each 10325 output format/size combination for depth streams. 10326 </description> 10327 <units>(format, width, height, ns) x n</units> 10328 <details> 10329 A stall duration is how much extra time would get added 10330 to the normal minimum frame duration for a repeating request 10331 that has streams with non-zero stall. 10332 10333 This functions similarly to 10334 android.scaler.availableStallDurations for depth 10335 streams. 10336 10337 All depth output stream formats may have a nonzero stall 10338 duration. 10339 </details> 10340 <tag id="DEPTH" /> 10341 </entry> 10342 <entry name="depthIsExclusive" type="byte" visibility="public" 10343 enum="true" typedef="boolean" hwlevel="limited"> 10344 <enum> 10345 <value>FALSE</value> 10346 <value>TRUE</value> 10347 </enum> 10348 <description>Indicates whether a capture request may target both a 10349 DEPTH16 / DEPTH_POINT_CLOUD output, and normal color outputs (such as 10350 YUV_420_888, JPEG, or RAW) simultaneously. 10351 </description> 10352 <details> 10353 If TRUE, including both depth and color outputs in a single 10354 capture request is not supported. An application must interleave color 10355 and depth requests. If FALSE, a single request can target both types 10356 of output. 10357 10358 Typically, this restriction exists on camera devices that 10359 need to emit a specific pattern or wavelength of light to 10360 measure depth values, which causes the color image to be 10361 corrupted during depth measurement. 10362 </details> 10363 </entry> 10364 <entry name="availableRecommendedDepthStreamConfigurations" type="int32" 10365 visibility="ndk_public" optional="true" container="array" 10366 typedef="recommendedStreamConfiguration" hal_version="3.4"> 10367 <array> 10368 <size>n</size> 10369 <size>5</size> 10370 </array> 10371 <description>Recommended depth stream configurations for common client use cases. 10372 </description> 10373 <details>Optional subset of the android.depth.availableDepthStreamConfigurations that 10374 contains similar tuples listed as 10375 (i.e. width, height, format, output/input stream, usecase bit field). 10376 Camera devices will be able to suggest particular depth stream configurations which are 10377 power and performance efficient for specific use cases. For more information about 10378 retrieving the suggestions see 10379 {@link android.hardware.camera2.CameraCharacteristics#getRecommendedStreamConfigurationMap}. 10380 </details> 10381 <ndk_details> 10382 For data representation please refer to 10383 android.scaler.availableRecommendedStreamConfigurations 10384 </ndk_details> 10385 <hal_details> 10386 Recommended depth configurations are expected to be declared with SNAPSHOT and/or 10387 ZSL if supported by the device. 10388 For additional details on how to declare recommended stream configurations, check 10389 android.scaler.availableRecommendedStreamConfigurations. 10390 For additional requirements on depth streams please consider 10391 android.depth.availableDepthStreamConfigurations. 10392 </hal_details> 10393 </entry> 10394 <entry name="availableDynamicDepthStreamConfigurations" type="int32" visibility="ndk_public" 10395 enum="true" container="array" typedef="streamConfiguration" hal_version="3.4"> 10396 <array> 10397 <size>n</size> 10398 <size>4</size> 10399 </array> 10400 <enum> 10401 <value>OUTPUT</value> 10402 <value>INPUT</value> 10403 </enum> 10404 <description>The available dynamic depth dataspace stream 10405 configurations that this camera device supports 10406 (i.e. format, width, height, output/input stream). 10407 </description> 10408 <details> 10409 These are output stream configurations for use with 10410 dataSpace DYNAMIC_DEPTH. The configurations are 10411 listed as `(format, width, height, input?)` tuples. 10412 10413 Only devices that support depth output for at least 10414 the HAL_PIXEL_FORMAT_Y16 dense depth map along with 10415 HAL_PIXEL_FORMAT_BLOB with the same size or size with 10416 the same aspect ratio can have dynamic depth dataspace 10417 stream configuration. android.depth.depthIsExclusive also 10418 needs to be set to FALSE. 10419 </details> 10420 <hal_details> 10421 Do not set this property directly. 10422 It is populated by camera framework and must not be set 10423 at the HAL layer. 10424 </hal_details> 10425 <tag id="DEPTH" /> 10426 </entry> 10427 <entry name="availableDynamicDepthMinFrameDurations" type="int64" visibility="ndk_public" 10428 container="array" typedef="streamConfigurationDuration" hal_version="3.4"> 10429 <array> 10430 <size>4</size> 10431 <size>n</size> 10432 </array> 10433 <description>This lists the minimum frame duration for each 10434 format/size combination for dynamic depth output streams. 10435 </description> 10436 <units>(format, width, height, ns) x n</units> 10437 <details> 10438 This should correspond to the frame duration when only that 10439 stream is active, with all processing (typically in android.*.mode) 10440 set to either OFF or FAST. 10441 10442 When multiple streams are used in a request, the minimum frame 10443 duration will be max(individual stream min durations). 10444 10445 The minimum frame duration of a stream (of a particular format, size) 10446 is the same regardless of whether the stream is input or output. 10447 </details> 10448 <hal_details> 10449 Do not set this property directly. 10450 It is populated by camera framework and must not be set 10451 at the HAL layer. 10452 </hal_details> 10453 <tag id="DEPTH" /> 10454 </entry> 10455 <entry name="availableDynamicDepthStallDurations" type="int64" visibility="ndk_public" 10456 container="array" typedef="streamConfigurationDuration" hal_version="3.4"> 10457 <array> 10458 <size>4</size> 10459 <size>n</size> 10460 </array> 10461 <description>This lists the maximum stall duration for each 10462 output format/size combination for dynamic depth streams. 10463 </description> 10464 <units>(format, width, height, ns) x n</units> 10465 <details> 10466 A stall duration is how much extra time would get added 10467 to the normal minimum frame duration for a repeating request 10468 that has streams with non-zero stall. 10469 10470 All dynamic depth output streams may have a nonzero stall 10471 duration. 10472 </details> 10473 <hal_details> 10474 Do not set this property directly. 10475 It is populated by camera framework and must not be set 10476 at the HAL layer. 10477 </hal_details> 10478 <tag id="DEPTH" /> 10479 </entry> 10480 </static> 10481 </section> 10482 <section name="logicalMultiCamera"> 10483 <static> 10484 <entry name="physicalIds" type="byte" visibility="ndk_public" 10485 container="array" hwlevel="limited" hal_version="3.3"> 10486 <array> 10487 <size>n</size> 10488 </array> 10489 <description>String containing the ids of the underlying physical cameras. 10490 </description> 10491 <units>UTF-8 null-terminated string</units> 10492 <details> 10493 For a logical camera, this is concatenation of all underlying physical camera IDs. 10494 The null terminator for physical camera ID must be preserved so that the whole string 10495 can be tokenized using '\0' to generate list of physical camera IDs. 10496 10497 For example, if the physical camera IDs of the logical camera are "2" and "3", the 10498 value of this tag will be ['2', '\0', '3', '\0']. 10499 10500 The number of physical camera IDs must be no less than 2. 10501 </details> 10502 <tag id="LOGICALCAMERA" /> 10503 </entry> 10504 <entry name="sensorSyncType" type="byte" visibility="public" 10505 enum="true" hwlevel="limited" hal_version="3.3"> 10506 <enum> 10507 <value>APPROXIMATE 10508 <notes> 10509 A software mechanism is used to synchronize between the physical cameras. As a result, 10510 the timestamp of an image from a physical stream is only an approximation of the 10511 image sensor start-of-exposure time. 10512 </notes> 10513 </value> 10514 <value>CALIBRATED 10515 <notes> 10516 The camera device supports frame timestamp synchronization at the hardware level, 10517 and the timestamp of a physical stream image accurately reflects its 10518 start-of-exposure time. 10519 </notes> 10520 </value> 10521 </enum> 10522 <description>The accuracy of frame timestamp synchronization between physical cameras</description> 10523 <details> 10524 The accuracy of the frame timestamp synchronization determines the physical cameras' 10525 ability to start exposure at the same time. If the sensorSyncType is CALIBRATED, 10526 the physical camera sensors usually run in master-slave mode so that their shutter 10527 time is synchronized. For APPROXIMATE sensorSyncType, the camera sensors usually run in 10528 master-master mode, and there could be offset between their start of exposure. 10529 10530 In both cases, all images generated for a particular capture request still carry the same 10531 timestamps, so that they can be used to look up the matching frame number and 10532 onCaptureStarted callback. 10533 10534 This tag is only applicable if the logical camera device supports concurrent physical 10535 streams from different physical cameras. 10536 </details> 10537 <tag id="LOGICALCAMERA" /> 10538 </entry> 10539 </static> 10540 <dynamic> 10541 <entry name="activePhysicalId" type="byte" visibility="public" 10542 typedef="string" hal_version="3.4"> 10543 <description>String containing the ID of the underlying active physical camera. 10544 </description> 10545 <units>UTF-8 null-terminated string</units> 10546 <details> 10547 The ID of the active physical camera that's backing the logical camera. All camera 10548 streams and metadata that are not physical camera specific will be originating from this 10549 physical camera. 10550 10551 For a logical camera made up of physical cameras where each camera's lenses have 10552 different characteristics, the camera device may choose to switch between the physical 10553 cameras when application changes FOCAL_LENGTH or SCALER_CROP_REGION. 10554 At the time of lens switch, this result metadata reflects the new active physical camera 10555 ID. 10556 10557 This key will be available if the camera device advertises this key via {@link 10558 android.hardware.camera2.CameraCharacteristics#getAvailableCaptureResultKeys|ACAMERA_REQUEST_AVAILABLE_RESULT_KEYS}. 10559 When available, this must be one of valid physical IDs backing this logical multi-camera. 10560 If this key is not available for a logical multi-camera, the camera device implementation 10561 may still switch between different active physical cameras based on use case, but the 10562 current active physical camera information won't be available to the application. 10563 </details> 10564 <hal_details> 10565 Staring from HIDL ICameraDevice version 3.5, the tag must be available in the capture 10566 result metadata to indicate current active physical camera ID. 10567 </hal_details> 10568 <tag id="LOGICALCAMERA" /> 10569 </entry> 10570 </dynamic> 10571 </section> 10572 <section name="distortionCorrection"> 10573 <controls> 10574 <entry name="mode" type="byte" visibility="public" enum="true" hal_version="3.3"> 10575 <enum> 10576 <value>OFF 10577 <notes>No distortion correction is applied.</notes></value> 10578 <value>FAST <notes>Lens distortion correction is applied without reducing frame rate 10579 relative to sensor output. It may be the same as OFF if distortion correction would 10580 reduce frame rate relative to sensor.</notes></value> 10581 <value>HIGH_QUALITY <notes>High-quality distortion correction is applied, at the cost of 10582 possibly reduced frame rate relative to sensor output.</notes></value> 10583 </enum> 10584 <description>Mode of operation for the lens distortion correction block.</description> 10585 <range>android.distortionCorrection.availableModes</range> 10586 <details>The lens distortion correction block attempts to improve image quality by fixing 10587 radial, tangential, or other geometric aberrations in the camera device's optics. If 10588 available, the android.lens.distortion field documents the lens's distortion parameters. 10589 10590 OFF means no distortion correction is done. 10591 10592 FAST/HIGH_QUALITY both mean camera device determined distortion correction will be 10593 applied. HIGH_QUALITY mode indicates that the camera device will use the highest-quality 10594 correction algorithms, even if it slows down capture rate. FAST means the camera device 10595 will not slow down capture rate when applying correction. FAST may be the same as OFF if 10596 any correction at all would slow down capture rate. Every output stream will have a 10597 similar amount of enhancement applied. 10598 10599 The correction only applies to processed outputs such as YUV, Y8, JPEG, or DEPTH16; it is 10600 not applied to any RAW output. 10601 10602 This control will be on by default on devices that support this control. Applications 10603 disabling distortion correction need to pay extra attention with the coordinate system of 10604 metering regions, crop region, and face rectangles. When distortion correction is OFF, 10605 metadata coordinates follow the coordinate system of 10606 android.sensor.info.preCorrectionActiveArraySize. When distortion is not OFF, metadata 10607 coordinates follow the coordinate system of android.sensor.info.activeArraySize. The 10608 camera device will map these metadata fields to match the corrected image produced by the 10609 camera device, for both capture requests and results. However, this mapping is not very 10610 precise, since rectangles do not generally map to rectangles when corrected. Only linear 10611 scaling between the active array and precorrection active array coordinates is 10612 performed. Applications that require precise correction of metadata need to undo that 10613 linear scaling, and apply a more complete correction that takes into the account the app's 10614 own requirements. 10615 10616 The full list of metadata that is affected in this way by distortion correction is: 10617 10618 * android.control.afRegions 10619 * android.control.aeRegions 10620 * android.control.awbRegions 10621 * android.scaler.cropRegion 10622 * android.statistics.faces 10623 </details> 10624 </entry> 10625 </controls> 10626 <static> 10627 <entry name="availableModes" type="byte" visibility="public" 10628 type_notes="list of enums" container="array" typedef="enumList" hal_version="3.3"> 10629 <array> 10630 <size>n</size> 10631 </array> 10632 <description> 10633 List of distortion correction modes for android.distortionCorrection.mode that are 10634 supported by this camera device. 10635 </description> 10636 <range>Any value listed in android.distortionCorrection.mode</range> 10637 <details> 10638 No device is required to support this API; such devices will always list only 'OFF'. 10639 All devices that support this API will list both FAST and HIGH_QUALITY. 10640 </details> 10641 <hal_details> 10642 HAL must support both FAST and HIGH_QUALITY if distortion correction is available 10643 on the camera device, but the underlying implementation can be the same for both modes. 10644 That is, if the highest quality implementation on the camera device does not slow down 10645 capture rate, then FAST and HIGH_QUALITY will generate the same output. 10646 </hal_details> 10647 <tag id="V1" /> 10648 <tag id="REPROC" /> 10649 </entry> 10650 </static> 10651 <dynamic> 10652 <clone entry="android.distortionCorrection.mode" kind="controls" hal_version="3.3"> 10653 </clone> 10654 </dynamic> 10655 </section> 10656 <section name="heic"> 10657 <static> 10658 <namespace name="info"> 10659 <entry name="supported" type="byte" visibility="system" enum="true" 10660 typedef="boolean" hwlevel="limited" hal_version="3.4"> 10661 <enum> 10662 <value>FALSE</value> 10663 <value>TRUE</value> 10664 </enum> 10665 <description>Whether this camera device can support identical set of stream combinations 10666 involving HEIC image format, compared to the 10667 {@link android.hardware.camera2.CameraDevice#createCaptureSession table of combinations} 10668 involving JPEG image format required for the device's hardware level and capabilities. 10669 </description> 10670 <details> 10671 All the static, control and dynamic metadata tags related to JPEG apply to HEIC formats 10672 as well. For example, the same android.jpeg.orientation and android.jpeg.quality are 10673 used to control the orientation and quality of the HEIC image. Configuring JPEG and 10674 HEIC streams at the same time is not supported. 10675 10676 If a camera device supports HEIC format (ISO/IEC 23008-12), not only does it 10677 support the existing mandatory stream 10678 {@link android.hardware.camera2.CameraDevice#createCaptureSession combinations} 10679 required for the device's hardware level and capabilities, it also supports swapping 10680 each JPEG stream with HEIC stream in all guaranteed combinations. 10681 10682 For every HEIC stream configured by the application, the camera framework sets up 2 10683 internal streams with camera HAL: 10684 10685 * A YUV_420_888 or IMPLEMENTATION_DEFINED HAL stream as input to HEIC or HEVC encoder. 10686 * A BLOB stream with JPEG_APPS_SEGMENTS dataspace to extract application markers 10687 including EXIF and thumbnail to be saved in HEIF container. 10688 10689 A camera device can output HEIC format to the application if and only if: 10690 10691 * The system contains a HEIC or HEVC encoder with constant quality mode, and 10692 * This tag is set to TRUE, meaning that camera HAL supports replacing JPEG streams in 10693 all mandatory stream combinations with a [YUV_420_888/IMPLEMENTATION_DEFINED stream + 10694 JPEG_APPS_SEGMENT BLOB stream] combo. 10695 10696 As an example, if the camera device's hardware level is LIMITED, and it supports HEIC, 10697 in addition to the required stream combinations, HAL must support below stream 10698 combinations as well: 10699 10700 * IMPLEMENTATION_DEFINED/YUV_420_888 MAXIMUM + JPEG_SEGMENTS_BLOB, 10701 * PRIV PREVIEW + IMPLEMENTATION_DEFINED/YUV_420_888 MAXIMUM + JPEG_SEGMENTS_BLOB, 10702 * YUV PREVIEW + IMPLEMENTATION_DEFINED/YUV_420_888 MAXIMUM + JPEG_SEGMENTS_BLOB, 10703 * PRIV PREVIEW + YUV PREVIEW + IMPLEMENTATION_DEFINED/YUV_420_888 MAXIMUM + 10704 JPEG_SEGMENTS_BLOB 10705 10706 The selection logic between YUV_420_888 and IMPLEMENTATION_DEFINED for HAL internal 10707 stream is as follows: 10708 10709 if (HEIC encoder exists and supports the size) { 10710 use IMPLEMENTATION_DEFINED with GRALLOC_USAGE_HW_IMAGE_ENCODER usage flag; 10711 } else { 10712 // HVC encoder exists 10713 if (size is less than framework predefined tile size) { 10714 use IMPLEMENTATINO_DEFINED with GRALLOC_USAGE_HW_VIDEO_ENCODER usage flag; 10715 } else { 10716 use YUV_420_888; 10717 } 10718 } 10719 </details> 10720 <tag id="HEIC" /> 10721 </entry> 10722 <entry name="maxJpegAppSegmentsCount" type="byte" visibility="system" 10723 hwlevel="limited" hal_version="3.4"> 10724 <description>The maximum number of Jpeg APP segments supported by the camera HAL device. 10725 </description> 10726 <details> 10727 The camera framework will use this value to derive the size of the BLOB buffer with 10728 JPEG_APP_SEGMENTS dataspace, with each APP segment occupying at most 64K bytes. If the 10729 value of this tag is n, the size of the framework allocated buffer will be: 10730 10731 n * (2 + 0xFFFF) + sizeof(struct CameraBlob) 10732 10733 where 2 is number of bytes for APP marker, 0xFFFF is the maximum size per APP segment 10734 (including segment size). 10735 10736 The value of this tag must be at least 1, and APP1 marker (0xFFE1) segment must be the 10737 first segment stored in the JPEG_APPS_SEGMENTS BLOB buffer. APP1 segment stores EXIF and 10738 thumbnail. 10739 10740 Since media encoder embeds the orientation in the metadata of the output image, to be 10741 consistent between main image and thumbnail, camera HAL must not rotate the thumbnail 10742 image data based on android.jpeg.orientation. The framework will write the orientation 10743 into EXIF and HEIC container. 10744 10745 APP1 segment is followed immediately by one or multiple APP2 segments, and APPn 10746 segments. After the HAL fills and returns the JPEG_APP_SEGMENTS buffer, the camera 10747 framework modifies the APP1 segment by filling in the EXIF tags that are related to 10748 main image bitstream and the tags that can be derived from capture result metadata, 10749 before saving them into the HEIC container. 10750 10751 The value of this tag must not be more than 16. 10752 </details> 10753 <tag id="HEIC" /> 10754 </entry> 10755 </namespace> 10756 10757 <entry name="availableHeicStreamConfigurations" type="int32" visibility="ndk_public" 10758 enum="true" container="array" typedef="streamConfiguration" 10759 hwlevel="limited" hal_version="3.4"> 10760 <array> 10761 <size>n</size> 10762 <size>4</size> 10763 </array> 10764 <enum> 10765 <value>OUTPUT</value> 10766 <value>INPUT</value> 10767 </enum> 10768 <description>The available HEIC (ISO/IEC 23008-12) stream 10769 configurations that this camera device supports 10770 (i.e. format, width, height, output/input stream). 10771 </description> 10772 <details> 10773 The configurations are listed as `(format, width, height, input?)` tuples. 10774 10775 If the camera device supports HEIC image format, it will support identical set of stream 10776 combinations involving HEIC image format, compared to the combinations involving JPEG 10777 image format as required by the device's hardware level and capabilities. 10778 10779 All the static, control, and dynamic metadata tags related to JPEG apply to HEIC formats. 10780 Configuring JPEG and HEIC streams at the same time is not supported. 10781 </details> 10782 <ndk_details> 10783 All the configuration tuples `(format, width, height, input?)` will contain 10784 AIMAGE_FORMAT_HEIC format as OUTPUT only. 10785 </ndk_details> 10786 <hal_details> 10787 These are output stream configurations for use with dataSpace HAL_DATASPACE_HEIF. 10788 10789 Do not set this property directly. It is populated by camera framework and must not be 10790 set by the HAL layer. 10791 </hal_details> 10792 <tag id="HEIC" /> 10793 </entry> 10794 <entry name="availableHeicMinFrameDurations" type="int64" visibility="ndk_public" 10795 container="array" typedef="streamConfigurationDuration" hwlevel="limited" 10796 hal_version="3.4"> 10797 <array> 10798 <size>4</size> 10799 <size>n</size> 10800 </array> 10801 <description>This lists the minimum frame duration for each 10802 format/size combination for HEIC output formats. 10803 </description> 10804 <units>(format, width, height, ns) x n</units> 10805 <details> 10806 This should correspond to the frame duration when only that 10807 stream is active, with all processing (typically in android.*.mode) 10808 set to either OFF or FAST. 10809 10810 When multiple streams are used in a request, the minimum frame 10811 duration will be max(individual stream min durations). 10812 10813 See android.sensor.frameDuration and 10814 android.scaler.availableStallDurations for more details about 10815 calculating the max frame rate. 10816 </details> 10817 <hal_details> 10818 Do not set this property directly. It is populated by camera framework and must not be 10819 set by the HAL layer. 10820 </hal_details> 10821 <tag id="HEIC" /> 10822 </entry> 10823 <entry name="availableHeicStallDurations" type="int64" visibility="ndk_public" 10824 container="array" typedef="streamConfigurationDuration" hwlevel="limited" 10825 hal_version="3.4"> 10826 <array> 10827 <size>4</size> 10828 <size>n</size> 10829 </array> 10830 <description>This lists the maximum stall duration for each 10831 output format/size combination for HEIC streams. 10832 </description> 10833 <units>(format, width, height, ns) x n</units> 10834 <details> 10835 A stall duration is how much extra time would get added 10836 to the normal minimum frame duration for a repeating request 10837 that has streams with non-zero stall. 10838 10839 This functions similarly to 10840 android.scaler.availableStallDurations for HEIC 10841 streams. 10842 10843 All HEIC output stream formats may have a nonzero stall 10844 duration. 10845 </details> 10846 <hal_details> 10847 Do not set this property directly. It is populated by camera framework and must not be 10848 set by the HAL layer. 10849 </hal_details> 10850 <tag id="HEIC" /> 10851 </entry> 10852 </static> 10853 </section> 10854 </namespace> 10855</metadata> 10856