1 /* 2 * Copyright (C) 2014 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17 package com.example.android.hdrviewfinder; 18 19 import android.Manifest; 20 import android.content.Intent; 21 import android.content.pm.PackageManager; 22 import android.hardware.camera2.CameraAccessException; 23 import android.hardware.camera2.CameraCaptureSession; 24 import android.hardware.camera2.CameraCharacteristics; 25 import android.hardware.camera2.CameraDevice; 26 import android.hardware.camera2.CameraManager; 27 import android.hardware.camera2.CaptureRequest; 28 import android.hardware.camera2.CaptureResult; 29 import android.hardware.camera2.TotalCaptureResult; 30 import android.hardware.camera2.params.StreamConfigurationMap; 31 import android.net.Uri; 32 import android.os.Bundle; 33 import android.os.Handler; 34 import android.os.Looper; 35 import android.provider.Settings; 36 import android.renderscript.RenderScript; 37 import android.support.annotation.NonNull; 38 import android.support.design.widget.Snackbar; 39 import android.support.v4.app.ActivityCompat; 40 import android.support.v7.app.AppCompatActivity; 41 import android.util.Log; 42 import android.util.Size; 43 import android.view.GestureDetector; 44 import android.view.Menu; 45 import android.view.MenuItem; 46 import android.view.MotionEvent; 47 import android.view.Surface; 48 import android.view.SurfaceHolder; 49 import android.view.View; 50 import android.widget.Button; 51 import android.widget.TextView; 52 53 import java.util.ArrayList; 54 import java.util.List; 55 56 /** 57 * A small demo of advanced camera functionality with the Android camera2 API. 58 * 59 * <p>This demo implements a real-time high-dynamic-range camera viewfinder, 60 * by alternating the sensor's exposure time between two exposure values on even and odd 61 * frames, and then compositing together the latest two frames whenever a new frame is 62 * captured.</p> 63 * 64 * <p>The demo has three modes: Regular auto-exposure viewfinder, split-screen manual exposure, 65 * and the fused HDR viewfinder. The latter two use manual exposure controlled by the user, 66 * by swiping up/down on the right and left halves of the viewfinder. The left half controls 67 * the exposure time of even frames, and the right half controls the exposure time of odd frames. 68 * </p> 69 * 70 * <p>In split-screen mode, the even frames are shown on the left and the odd frames on the right, 71 * so the user can see two different exposures of the scene simultaneously. In fused HDR mode, 72 * the even/odd frames are merged together into a single image. By selecting different exposure 73 * values for the even/odd frames, the fused image has a higher dynamic range than the regular 74 * viewfinder.</p> 75 * 76 * <p>The HDR fusion and the split-screen viewfinder processing is done with RenderScript; as is the 77 * necessary YUV->RGB conversion. The camera subsystem outputs YUV images naturally, while the GPU 78 * and display subsystems generally only accept RGB data. Therefore, after the images are 79 * fused/composited, a standard YUV->RGB color transform is applied before the the data is written 80 * to the output Allocation. The HDR fusion algorithm is very simple, and tends to result in 81 * lower-contrast scenes, but has very few artifacts and can run very fast.</p> 82 * 83 * <p>Data is passed between the subsystems (camera, RenderScript, and display) using the 84 * Android {@link android.view.Surface} class, which allows for zero-copy transport of large 85 * buffers between processes and subsystems.</p> 86 */ 87 public class HdrViewfinderActivity extends AppCompatActivity implements 88 SurfaceHolder.Callback, CameraOps.ErrorDisplayer, CameraOps.CameraReadyListener { 89 90 private static final String TAG = "HdrViewfinderDemo"; 91 92 private static final String FRAGMENT_DIALOG = "dialog"; 93 94 private static final int REQUEST_PERMISSIONS_REQUEST_CODE = 34; 95 96 /** 97 * View for the camera preview. 98 */ 99 private FixedAspectSurfaceView mPreviewView; 100 101 /** 102 * Root view of this activity. 103 */ 104 private View rootView; 105 106 /** 107 * This shows the current mode of the app. 108 */ 109 private TextView mModeText; 110 111 // These show lengths of exposure for even frames, exposure for odd frames, and auto exposure. 112 private TextView mEvenExposureText, mOddExposureText, mAutoExposureText; 113 114 private Handler mUiHandler; 115 116 private CameraCharacteristics mCameraInfo; 117 118 private Surface mPreviewSurface; 119 private Surface mProcessingHdrSurface; 120 private Surface mProcessingNormalSurface; 121 CaptureRequest.Builder mHdrBuilder; 122 ArrayList<CaptureRequest> mHdrRequests = new ArrayList<CaptureRequest>(2); 123 124 CaptureRequest mPreviewRequest; 125 126 RenderScript mRS; 127 ViewfinderProcessor mProcessor; 128 CameraManager mCameraManager; 129 CameraOps mCameraOps; 130 131 private int mRenderMode = ViewfinderProcessor.MODE_NORMAL; 132 133 // Durations in nanoseconds 134 private static final long MICRO_SECOND = 1000; 135 private static final long MILLI_SECOND = MICRO_SECOND * 1000; 136 private static final long ONE_SECOND = MILLI_SECOND * 1000; 137 138 private long mOddExposure = ONE_SECOND / 33; 139 private long mEvenExposure = ONE_SECOND / 33; 140 141 private Object mOddExposureTag = new Object(); 142 private Object mEvenExposureTag = new Object(); 143 private Object mAutoExposureTag = new Object(); 144 145 @Override onCreate(Bundle savedInstanceState)146 protected void onCreate(Bundle savedInstanceState) { 147 super.onCreate(savedInstanceState); 148 setContentView(R.layout.main); 149 150 rootView = findViewById(R.id.panels); 151 152 mPreviewView = (FixedAspectSurfaceView) findViewById(R.id.preview); 153 mPreviewView.getHolder().addCallback(this); 154 mPreviewView.setGestureListener(this, mViewListener); 155 156 Button helpButton = (Button) findViewById(R.id.help_button); 157 helpButton.setOnClickListener(mHelpButtonListener); 158 159 mModeText = (TextView) findViewById(R.id.mode_label); 160 mEvenExposureText = (TextView) findViewById(R.id.even_exposure); 161 mOddExposureText = (TextView) findViewById(R.id.odd_exposure); 162 mAutoExposureText = (TextView) findViewById(R.id.auto_exposure); 163 164 mUiHandler = new Handler(Looper.getMainLooper()); 165 166 mRS = RenderScript.create(this); 167 168 // When permissions are revoked the app is restarted so onCreate is sufficient to check for 169 // permissions core to the Activity's functionality. 170 if (!checkCameraPermissions()) { 171 requestCameraPermissions(); 172 } else { 173 findAndOpenCamera(); 174 } 175 } 176 177 @Override onResume()178 protected void onResume() { 179 super.onResume(); 180 } 181 182 @Override onPause()183 protected void onPause() { 184 super.onPause(); 185 186 // Wait until camera is closed to ensure the next application can open it 187 if (mCameraOps != null) { 188 mCameraOps.closeCameraAndWait(); 189 mCameraOps = null; 190 } 191 } 192 193 @Override onCreateOptionsMenu(Menu menu)194 public boolean onCreateOptionsMenu(Menu menu) { 195 getMenuInflater().inflate(R.menu.main, menu); 196 return super.onCreateOptionsMenu(menu); 197 } 198 199 @Override onOptionsItemSelected(MenuItem item)200 public boolean onOptionsItemSelected(MenuItem item) { 201 switch (item.getItemId()) { 202 case R.id.info: { 203 MessageDialogFragment.newInstance(R.string.intro_message) 204 .show(getFragmentManager(), FRAGMENT_DIALOG); 205 break; 206 } 207 } 208 return super.onOptionsItemSelected(item); 209 } 210 211 private GestureDetector.OnGestureListener mViewListener 212 = new GestureDetector.SimpleOnGestureListener() { 213 214 @Override 215 public boolean onDown(MotionEvent e) { 216 return true; 217 } 218 219 @Override 220 public boolean onSingleTapUp(MotionEvent e) { 221 switchRenderMode(1); 222 return true; 223 } 224 225 @Override 226 public boolean onScroll(MotionEvent e1, MotionEvent e2, float distanceX, float distanceY) { 227 if (mRenderMode == ViewfinderProcessor.MODE_NORMAL) return false; 228 229 float xPosition = e1.getAxisValue(MotionEvent.AXIS_X); 230 float width = mPreviewView.getWidth(); 231 float height = mPreviewView.getHeight(); 232 233 float xPosNorm = xPosition / width; 234 float yDistNorm = distanceY / height; 235 236 final float ACCELERATION_FACTOR = 8; 237 double scaleFactor = Math.pow(2.f, yDistNorm * ACCELERATION_FACTOR); 238 239 // Even on left, odd on right 240 if (xPosNorm > 0.5) { 241 mOddExposure *= scaleFactor; 242 } else { 243 mEvenExposure *= scaleFactor; 244 } 245 246 setHdrBurst(); 247 248 return true; 249 } 250 }; 251 252 /** 253 * Show help dialogs. 254 */ 255 private View.OnClickListener mHelpButtonListener = new View.OnClickListener() { 256 public void onClick(View v) { 257 MessageDialogFragment.newInstance(R.string.help_text) 258 .show(getFragmentManager(), FRAGMENT_DIALOG); 259 } 260 }; 261 262 /** 263 * Return the current state of the camera permissions. 264 */ checkCameraPermissions()265 private boolean checkCameraPermissions() { 266 int permissionState = ActivityCompat.checkSelfPermission(this, Manifest.permission.CAMERA); 267 268 // Check if the Camera permission is already available. 269 if (permissionState != PackageManager.PERMISSION_GRANTED) { 270 // Camera permission has not been granted. 271 Log.i(TAG, "CAMERA permission has NOT been granted."); 272 return false; 273 } else { 274 // Camera permissions are available. 275 Log.i(TAG, "CAMERA permission has already been granted."); 276 return true; 277 } 278 } 279 280 /** 281 * Attempt to initialize the camera. 282 */ initializeCamera()283 private void initializeCamera() { 284 mCameraManager = (CameraManager) getSystemService(CAMERA_SERVICE); 285 if (mCameraManager != null) { 286 mCameraOps = new CameraOps(mCameraManager, 287 /*errorDisplayer*/ this, 288 /*readyListener*/ this, 289 /*readyHandler*/ mUiHandler); 290 291 mHdrRequests.add(null); 292 mHdrRequests.add(null); 293 } else { 294 Log.e(TAG, "Couldn't initialize the camera"); 295 } 296 } 297 requestCameraPermissions()298 private void requestCameraPermissions() { 299 boolean shouldProvideRationale = 300 ActivityCompat.shouldShowRequestPermissionRationale(this, 301 Manifest.permission.CAMERA); 302 303 // Provide an additional rationale to the user. This would happen if the user denied the 304 // request previously, but didn't check the "Don't ask again" checkbox. 305 if (shouldProvideRationale) { 306 Log.i(TAG, "Displaying camera permission rationale to provide additional context."); 307 Snackbar.make(rootView, R.string.camera_permission_rationale, Snackbar 308 .LENGTH_INDEFINITE) 309 .setAction(R.string.ok, new View.OnClickListener() { 310 @Override 311 public void onClick(View view) { 312 // Request Camera permission 313 ActivityCompat.requestPermissions(HdrViewfinderActivity.this, 314 new String[]{Manifest.permission.CAMERA}, 315 REQUEST_PERMISSIONS_REQUEST_CODE); 316 } 317 }) 318 .show(); 319 } else { 320 Log.i(TAG, "Requesting camera permission"); 321 // Request Camera permission. It's possible this can be auto answered if device policy 322 // sets the permission in a given state or the user denied the permission 323 // previously and checked "Never ask again". 324 ActivityCompat.requestPermissions(HdrViewfinderActivity.this, 325 new String[]{Manifest.permission.CAMERA}, 326 REQUEST_PERMISSIONS_REQUEST_CODE); 327 } 328 } 329 330 /** 331 * Callback received when a permissions request has been completed. 332 */ 333 @Override onRequestPermissionsResult(int requestCode, @NonNull String[] permissions, @NonNull int[] grantResults)334 public void onRequestPermissionsResult(int requestCode, @NonNull String[] permissions, 335 @NonNull int[] grantResults) { 336 Log.i(TAG, "onRequestPermissionResult"); 337 if (requestCode == REQUEST_PERMISSIONS_REQUEST_CODE) { 338 if (grantResults.length <= 0) { 339 // If user interaction was interrupted, the permission request is cancelled and you 340 // receive empty arrays. 341 Log.i(TAG, "User interaction was cancelled."); 342 } else if (grantResults[0] == PackageManager.PERMISSION_GRANTED) { 343 // Permission was granted. 344 findAndOpenCamera(); 345 } else { 346 // Permission denied. 347 348 // In this Activity we've chosen to notify the user that they 349 // have rejected a core permission for the app since it makes the Activity useless. 350 // We're communicating this message in a Snackbar since this is a sample app, but 351 // core permissions would typically be best requested during a welcome-screen flow. 352 353 // Additionally, it is important to remember that a permission might have been 354 // rejected without asking the user for permission (device policy or "Never ask 355 // again" prompts). Therefore, a user interface affordance is typically implemented 356 // when permissions are denied. Otherwise, your app could appear unresponsive to 357 // touches or interactions which have required permissions. 358 Snackbar.make(rootView, R.string.camera_permission_denied_explanation, Snackbar 359 .LENGTH_INDEFINITE) 360 .setAction(R.string.settings, new View.OnClickListener() { 361 @Override 362 public void onClick(View view) { 363 // Build intent that displays the App settings screen. 364 Intent intent = new Intent(); 365 intent.setAction(Settings.ACTION_APPLICATION_DETAILS_SETTINGS); 366 Uri uri = Uri.fromParts("package", BuildConfig.APPLICATION_ID, null); 367 intent.setData(uri); 368 intent.setFlags(Intent.FLAG_ACTIVITY_NEW_TASK); 369 startActivity(intent); 370 } 371 }) 372 .show(); 373 } 374 } 375 } 376 findAndOpenCamera()377 private void findAndOpenCamera() { 378 boolean cameraPermissions = checkCameraPermissions(); 379 if (cameraPermissions) { 380 String errorMessage = "Unknown error"; 381 boolean foundCamera = false; 382 initializeCamera(); 383 if (cameraPermissions && mCameraOps != null) { 384 try { 385 // Find first back-facing camera that has necessary capability. 386 String[] cameraIds = mCameraManager.getCameraIdList(); 387 for (String id : cameraIds) { 388 CameraCharacteristics info = mCameraManager.getCameraCharacteristics(id); 389 int facing = info.get(CameraCharacteristics.LENS_FACING); 390 391 int level = info.get(CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL); 392 boolean hasFullLevel 393 = (level 394 == CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL_FULL); 395 396 int[] capabilities = info 397 .get(CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES); 398 int syncLatency = info.get(CameraCharacteristics.SYNC_MAX_LATENCY); 399 boolean hasManualControl = hasCapability(capabilities, 400 CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR); 401 boolean hasEnoughCapability = hasManualControl && 402 syncLatency 403 == CameraCharacteristics.SYNC_MAX_LATENCY_PER_FRAME_CONTROL; 404 405 // All these are guaranteed by 406 // CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL_FULL, but checking 407 // for only the things we care about expands range of devices we can run on. 408 // We want: 409 // - Back-facing camera 410 // - Manual sensor control 411 // - Per-frame synchronization (so that exposure can be changed every frame) 412 if (facing == CameraCharacteristics.LENS_FACING_BACK && 413 (hasFullLevel || hasEnoughCapability)) { 414 // Found suitable camera - get info, open, and set up outputs 415 mCameraInfo = info; 416 mCameraOps.openCamera(id); 417 configureSurfaces(); 418 foundCamera = true; 419 break; 420 } 421 } 422 if (!foundCamera) { 423 errorMessage = getString(R.string.camera_no_good); 424 } 425 } catch (CameraAccessException e) { 426 errorMessage = getErrorString(e); 427 } 428 if (!foundCamera) { 429 showErrorDialog(errorMessage); 430 } 431 } 432 } 433 } 434 hasCapability(int[] capabilities, int capability)435 private boolean hasCapability(int[] capabilities, int capability) { 436 for (int c : capabilities) { 437 if (c == capability) return true; 438 } 439 return false; 440 } 441 switchRenderMode(int direction)442 private void switchRenderMode(int direction) { 443 if (mCameraOps != null) { 444 mRenderMode = (mRenderMode + direction) % 3; 445 446 mModeText.setText(getResources().getStringArray(R.array.mode_label_array)[mRenderMode]); 447 448 if (mProcessor != null) { 449 mProcessor.setRenderMode(mRenderMode); 450 } 451 if (mRenderMode == ViewfinderProcessor.MODE_NORMAL) { 452 mCameraOps.setRepeatingRequest(mPreviewRequest, 453 mCaptureCallback, mUiHandler); 454 } else { 455 setHdrBurst(); 456 } 457 } 458 } 459 460 /** 461 * Configure the surfaceview and RS processing. 462 */ configureSurfaces()463 private void configureSurfaces() { 464 // Find a good size for output - largest 16:9 aspect ratio that's less than 720p 465 final int MAX_WIDTH = 1280; 466 final float TARGET_ASPECT = 16.f / 9.f; 467 final float ASPECT_TOLERANCE = 0.1f; 468 469 StreamConfigurationMap configs = 470 mCameraInfo.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP); 471 472 Size[] outputSizes = configs.getOutputSizes(SurfaceHolder.class); 473 474 Size outputSize = outputSizes[0]; 475 float outputAspect = (float) outputSize.getWidth() / outputSize.getHeight(); 476 for (Size candidateSize : outputSizes) { 477 if (candidateSize.getWidth() > MAX_WIDTH) continue; 478 float candidateAspect = (float) candidateSize.getWidth() / candidateSize.getHeight(); 479 boolean goodCandidateAspect = 480 Math.abs(candidateAspect - TARGET_ASPECT) < ASPECT_TOLERANCE; 481 boolean goodOutputAspect = 482 Math.abs(outputAspect - TARGET_ASPECT) < ASPECT_TOLERANCE; 483 if ((goodCandidateAspect && !goodOutputAspect) || 484 candidateSize.getWidth() > outputSize.getWidth()) { 485 outputSize = candidateSize; 486 outputAspect = candidateAspect; 487 } 488 } 489 Log.i(TAG, "Resolution chosen: " + outputSize); 490 491 // Configure processing 492 mProcessor = new ViewfinderProcessor(mRS, outputSize); 493 setupProcessor(); 494 495 // Configure the output view - this will fire surfaceChanged 496 mPreviewView.setAspectRatio(outputAspect); 497 mPreviewView.getHolder().setFixedSize(outputSize.getWidth(), outputSize.getHeight()); 498 } 499 500 /** 501 * Once camera is open and output surfaces are ready, configure the RS processing 502 * and the camera device inputs/outputs. 503 */ 504 private void setupProcessor() { 505 if (mProcessor == null || mPreviewSurface == null) return; 506 507 mProcessor.setOutputSurface(mPreviewSurface); 508 mProcessingHdrSurface = mProcessor.getInputHdrSurface(); 509 mProcessingNormalSurface = mProcessor.getInputNormalSurface(); 510 511 List<Surface> cameraOutputSurfaces = new ArrayList<Surface>(); 512 cameraOutputSurfaces.add(mProcessingHdrSurface); 513 cameraOutputSurfaces.add(mProcessingNormalSurface); 514 515 mCameraOps.setSurfaces(cameraOutputSurfaces); 516 } 517 518 /** 519 * Start running an HDR burst on a configured camera session 520 */ 521 public void setHdrBurst() { 522 523 mHdrBuilder.set(CaptureRequest.SENSOR_SENSITIVITY, 1600); 524 mHdrBuilder.set(CaptureRequest.SENSOR_FRAME_DURATION, ONE_SECOND / 30); 525 526 mHdrBuilder.set(CaptureRequest.SENSOR_EXPOSURE_TIME, mEvenExposure); 527 mHdrBuilder.setTag(mEvenExposureTag); 528 mHdrRequests.set(0, mHdrBuilder.build()); 529 530 mHdrBuilder.set(CaptureRequest.SENSOR_EXPOSURE_TIME, mOddExposure); 531 mHdrBuilder.setTag(mOddExposureTag); 532 mHdrRequests.set(1, mHdrBuilder.build()); 533 534 mCameraOps.setRepeatingBurst(mHdrRequests, mCaptureCallback, mUiHandler); 535 } 536 537 /** 538 * Listener for completed captures 539 * Invoked on UI thread 540 */ 541 private CameraCaptureSession.CaptureCallback mCaptureCallback 542 = new CameraCaptureSession.CaptureCallback() { 543 544 public void onCaptureCompleted(CameraCaptureSession session, CaptureRequest request, 545 TotalCaptureResult result) { 546 547 // Only update UI every so many frames 548 // Use an odd number here to ensure both even and odd exposures get an occasional update 549 long frameNumber = result.getFrameNumber(); 550 if (frameNumber % 3 != 0) return; 551 552 long exposureTime = result.get(CaptureResult.SENSOR_EXPOSURE_TIME); 553 554 // Format exposure time nicely 555 String exposureText; 556 if (exposureTime > ONE_SECOND) { 557 exposureText = String.format("%.2f s", exposureTime / 1e9); 558 } else if (exposureTime > MILLI_SECOND) { 559 exposureText = String.format("%.2f ms", exposureTime / 1e6); 560 } else if (exposureTime > MICRO_SECOND) { 561 exposureText = String.format("%.2f us", exposureTime / 1e3); 562 } else { 563 exposureText = String.format("%d ns", exposureTime); 564 } 565 566 Object tag = request.getTag(); 567 Log.i(TAG, "Exposure: " + exposureText); 568 569 if (tag == mEvenExposureTag) { 570 mEvenExposureText.setText(exposureText); 571 572 mEvenExposureText.setEnabled(true); 573 mOddExposureText.setEnabled(true); 574 mAutoExposureText.setEnabled(false); 575 } else if (tag == mOddExposureTag) { 576 mOddExposureText.setText(exposureText); 577 578 mEvenExposureText.setEnabled(true); 579 mOddExposureText.setEnabled(true); 580 mAutoExposureText.setEnabled(false); 581 } else { 582 mAutoExposureText.setText(exposureText); 583 584 mEvenExposureText.setEnabled(false); 585 mOddExposureText.setEnabled(false); 586 mAutoExposureText.setEnabled(true); 587 } 588 } 589 }; 590 591 /** 592 * Callbacks for the FixedAspectSurfaceView 593 */ 594 595 @Override surfaceChanged(SurfaceHolder holder, int format, int width, int height)596 public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) { 597 mPreviewSurface = holder.getSurface(); 598 599 setupProcessor(); 600 } 601 602 @Override surfaceCreated(SurfaceHolder holder)603 public void surfaceCreated(SurfaceHolder holder) { 604 // ignored 605 } 606 607 @Override surfaceDestroyed(SurfaceHolder holder)608 public void surfaceDestroyed(SurfaceHolder holder) { 609 mPreviewSurface = null; 610 } 611 612 /** 613 * Callbacks for CameraOps 614 */ 615 @Override onCameraReady()616 public void onCameraReady() { 617 // Ready to send requests in, so set them up 618 try { 619 CaptureRequest.Builder previewBuilder = 620 mCameraOps.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW); 621 previewBuilder.addTarget(mProcessingNormalSurface); 622 previewBuilder.setTag(mAutoExposureTag); 623 mPreviewRequest = previewBuilder.build(); 624 625 mHdrBuilder = 626 mCameraOps.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW); 627 mHdrBuilder.set(CaptureRequest.CONTROL_AE_MODE, 628 CaptureRequest.CONTROL_AE_MODE_OFF); 629 mHdrBuilder.addTarget(mProcessingHdrSurface); 630 631 switchRenderMode(0); 632 633 } catch (CameraAccessException e) { 634 String errorMessage = getErrorString(e); 635 showErrorDialog(errorMessage); 636 } 637 } 638 639 /** 640 * Utility methods 641 */ 642 @Override showErrorDialog(String errorMessage)643 public void showErrorDialog(String errorMessage) { 644 MessageDialogFragment.newInstance(errorMessage).show(getFragmentManager(), FRAGMENT_DIALOG); 645 } 646 647 @Override getErrorString(CameraAccessException e)648 public String getErrorString(CameraAccessException e) { 649 String errorMessage; 650 switch (e.getReason()) { 651 case CameraAccessException.CAMERA_DISABLED: 652 errorMessage = getString(R.string.camera_disabled); 653 break; 654 case CameraAccessException.CAMERA_DISCONNECTED: 655 errorMessage = getString(R.string.camera_disconnected); 656 break; 657 case CameraAccessException.CAMERA_ERROR: 658 errorMessage = getString(R.string.camera_error); 659 break; 660 default: 661 errorMessage = getString(R.string.camera_unknown, e.getReason()); 662 break; 663 } 664 return errorMessage; 665 } 666 667 } 668