1 /*
2 * Copyright (C) 2012 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17 /*
18 * Contains implementation of a class EmulatedFakeCamera2 that encapsulates
19 * functionality of an advanced fake camera.
20 */
21
22 #include <inttypes.h>
23
24 #include <algorithm>
25 #include <cstdint>
26 #include <iterator>
27
28 #define LOG_NDEBUG 0
29 #define LOG_TAG "EmulatedCamera_FakeCamera2"
30 #include <utils/Log.h>
31
32 #include "EmulatedCameraFactory.h"
33 #include "EmulatedFakeCamera2.h"
34 #include "GrallocModule.h"
35
36 #define ERROR_CAMERA_NOT_PRESENT -EPIPE
37
38 #define CAMERA2_EXT_TRIGGER_TESTING_DISCONNECT 0xFFFFFFFF
39
40 template <typename T, size_t N>
41 char (&ArraySizeHelper(T (&array)[N]))[N];
42
43 template <typename T, size_t N>
44 char (&ArraySizeHelper(const T (&array)[N]))[N];
45
46 #define arraysize(array) (sizeof(ArraySizeHelper(array)))
47
48 namespace android {
49
50 const int64_t USEC = 1000LL;
51 const int64_t MSEC = USEC * 1000LL;
52 const int64_t SEC = MSEC * 1000LL;
53
54 const uint32_t EmulatedFakeCamera2::kAvailableFormats[] = {
55 HAL_PIXEL_FORMAT_RAW16,
56 HAL_PIXEL_FORMAT_BLOB, HAL_PIXEL_FORMAT_RGBA_8888,
57 // HAL_PIXEL_FORMAT_YV12,
58 HAL_PIXEL_FORMAT_YCrCb_420_SP};
59
60 const uint32_t EmulatedFakeCamera2::kAvailableRawSizes[2] = {
61 640, 480
62 // mSensorWidth, mSensorHeight
63 };
64
65 const uint64_t EmulatedFakeCamera2::kAvailableRawMinDurations[1] = {
66 static_cast<uint64_t>(Sensor::kFrameDurationRange[0])};
67
68 const uint32_t EmulatedFakeCamera2::kAvailableProcessedSizesBack[4] = {
69 640, 480, 320, 240
70 // mSensorWidth, mSensorHeight
71 };
72
73 const uint32_t EmulatedFakeCamera2::kAvailableProcessedSizesFront[4] = {
74 320, 240, 160, 120
75 // mSensorWidth, mSensorHeight
76 };
77
78 const uint64_t EmulatedFakeCamera2::kAvailableProcessedMinDurations[1] = {
79 static_cast<uint64_t>(Sensor::kFrameDurationRange[0])};
80
81 const uint32_t EmulatedFakeCamera2::kAvailableJpegSizesBack[2] = {
82 640, 480
83 // mSensorWidth, mSensorHeight
84 };
85
86 const uint32_t EmulatedFakeCamera2::kAvailableJpegSizesFront[2] = {
87 320, 240
88 // mSensorWidth, mSensorHeight
89 };
90
91 const uint64_t EmulatedFakeCamera2::kAvailableJpegMinDurations[1] = {
92 static_cast<uint64_t>(Sensor::kFrameDurationRange[0])};
93
EmulatedFakeCamera2(int cameraId,bool facingBack,struct hw_module_t * module)94 EmulatedFakeCamera2::EmulatedFakeCamera2(int cameraId, bool facingBack,
95 struct hw_module_t *module)
96 : EmulatedCamera2(cameraId, module),
97 mFacingBack(facingBack),
98 mIsConnected(false) {
99 ALOGD("Constructing emulated fake camera 2 facing %s",
100 facingBack ? "back" : "front");
101 }
102
~EmulatedFakeCamera2()103 EmulatedFakeCamera2::~EmulatedFakeCamera2() {
104 if (mCameraInfo != NULL) {
105 free_camera_metadata(mCameraInfo);
106 }
107 }
108
109 /****************************************************************************
110 * Public API overrides
111 ***************************************************************************/
112
Initialize(const cuttlefish::CameraDefinition & params)113 status_t EmulatedFakeCamera2::Initialize(const cuttlefish::CameraDefinition ¶ms) {
114 status_t res;
115
116 for (size_t index = 0; index < params.resolutions.size(); ++index) {
117 mAvailableRawSizes.push_back(params.resolutions[index].width);
118 mAvailableRawSizes.push_back(params.resolutions[index].height);
119 mAvailableProcessedSizes.push_back(params.resolutions[index].width);
120 mAvailableProcessedSizes.push_back(params.resolutions[index].height);
121 mAvailableJpegSizes.push_back(params.resolutions[index].width);
122 mAvailableJpegSizes.push_back(params.resolutions[index].height);
123 }
124
125 // Find max width/height
126 int32_t width = 0, height = 0;
127 for (size_t index = 0; index < params.resolutions.size(); ++index) {
128 if (width <= params.resolutions[index].width &&
129 height <= params.resolutions[index].height) {
130 width = params.resolutions[index].width;
131 height = params.resolutions[index].height;
132 }
133 }
134 if (width < 640 || height < 480) {
135 width = 640;
136 height = 480;
137 }
138 mSensorWidth = width;
139 mSensorHeight = height;
140
141 /* TODO(ender): probably should drop this. */
142 std::copy(kAvailableRawSizes,
143 kAvailableRawSizes + arraysize(kAvailableRawSizes),
144 std::back_inserter(mAvailableRawSizes));
145
146 if (params.orientation == cuttlefish::CameraDefinition::kFront) {
147 std::copy(kAvailableProcessedSizesFront,
148 kAvailableProcessedSizesFront +
149 arraysize(kAvailableProcessedSizesFront),
150 std::back_inserter(mAvailableProcessedSizes));
151 std::copy(kAvailableJpegSizesFront,
152 kAvailableJpegSizesFront + arraysize(kAvailableJpegSizesFront),
153 std::back_inserter(mAvailableJpegSizes));
154 } else {
155 std::copy(
156 kAvailableProcessedSizesBack,
157 kAvailableProcessedSizesBack + arraysize(kAvailableProcessedSizesBack),
158 mAvailableProcessedSizes.begin());
159 std::copy(kAvailableJpegSizesBack,
160 kAvailableJpegSizesBack + arraysize(kAvailableJpegSizesBack),
161 mAvailableJpegSizes.begin());
162 }
163
164 res = constructStaticInfo(&mCameraInfo, true);
165 if (res != OK) {
166 ALOGE("%s: Unable to allocate static info: %s (%d)", __FUNCTION__,
167 strerror(-res), res);
168 return res;
169 }
170 res = constructStaticInfo(&mCameraInfo, false);
171 if (res != OK) {
172 ALOGE("%s: Unable to fill in static info: %s (%d)", __FUNCTION__,
173 strerror(-res), res);
174 return res;
175 }
176 if (res != OK) return res;
177
178 mNextStreamId = 1;
179 mNextReprocessStreamId = 1;
180 mRawStreamCount = 0;
181 mProcessedStreamCount = 0;
182 mJpegStreamCount = 0;
183 mReprocessStreamCount = 0;
184
185 return NO_ERROR;
186 }
187
188 /****************************************************************************
189 * Camera module API overrides
190 ***************************************************************************/
191
connectCamera(hw_device_t ** device)192 status_t EmulatedFakeCamera2::connectCamera(hw_device_t **device) {
193 status_t res;
194 ALOGV("%s", __FUNCTION__);
195
196 {
197 Mutex::Autolock l(mMutex);
198 if (!mStatusPresent) {
199 ALOGE("%s: Camera ID %d is unplugged", __FUNCTION__, mCameraID);
200 return -ENODEV;
201 }
202 }
203
204 mConfigureThread = new ConfigureThread(this);
205 mReadoutThread = new ReadoutThread(this);
206 mControlThread = new ControlThread(this);
207 mSensor = new Sensor(mSensorWidth, mSensorHeight);
208 mJpegCompressor = new JpegCompressor();
209
210 mNextStreamId = 1;
211 mNextReprocessStreamId = 1;
212
213 res = mSensor->startUp();
214 if (res != NO_ERROR) return res;
215
216 res = mConfigureThread->run("EmulatedFakeCamera2::configureThread");
217 if (res != NO_ERROR) return res;
218
219 res = mReadoutThread->run("EmulatedFakeCamera2::readoutThread");
220 if (res != NO_ERROR) return res;
221
222 res = mControlThread->run("EmulatedFakeCamera2::controlThread");
223 if (res != NO_ERROR) return res;
224
225 status_t ret = EmulatedCamera2::connectCamera(device);
226
227 if (ret >= 0) {
228 mIsConnected = true;
229 }
230
231 return ret;
232 }
233
plugCamera()234 status_t EmulatedFakeCamera2::plugCamera() {
235 {
236 Mutex::Autolock l(mMutex);
237
238 if (!mStatusPresent) {
239 ALOGI("%s: Plugged back in", __FUNCTION__);
240 mStatusPresent = true;
241 }
242 }
243
244 return NO_ERROR;
245 }
246
unplugCamera()247 status_t EmulatedFakeCamera2::unplugCamera() {
248 {
249 Mutex::Autolock l(mMutex);
250
251 if (mStatusPresent) {
252 ALOGI("%s: Unplugged camera", __FUNCTION__);
253 mStatusPresent = false;
254 }
255 }
256
257 return closeCamera();
258 }
259
getHotplugStatus()260 camera_device_status_t EmulatedFakeCamera2::getHotplugStatus() {
261 Mutex::Autolock l(mMutex);
262 return mStatusPresent ? CAMERA_DEVICE_STATUS_PRESENT
263 : CAMERA_DEVICE_STATUS_NOT_PRESENT;
264 }
265
closeCamera()266 status_t EmulatedFakeCamera2::closeCamera() {
267 {
268 Mutex::Autolock l(mMutex);
269
270 status_t res;
271 ALOGV("%s", __FUNCTION__);
272
273 if (!mIsConnected) {
274 return NO_ERROR;
275 }
276
277 res = mSensor->shutDown();
278 if (res != NO_ERROR) {
279 ALOGE("%s: Unable to shut down sensor: %d", __FUNCTION__, res);
280 return res;
281 }
282
283 mConfigureThread->requestExit();
284 mReadoutThread->requestExit();
285 mControlThread->requestExit();
286 mJpegCompressor->cancel();
287 }
288
289 // give up the lock since we will now block and the threads
290 // can call back into this object
291 mConfigureThread->join();
292 mReadoutThread->join();
293 mControlThread->join();
294
295 ALOGV("%s exit", __FUNCTION__);
296
297 {
298 Mutex::Autolock l(mMutex);
299 mIsConnected = false;
300 }
301
302 return NO_ERROR;
303 }
304
getCameraInfo(struct camera_info * info)305 status_t EmulatedFakeCamera2::getCameraInfo(struct camera_info *info) {
306 info->facing = mFacingBack ? CAMERA_FACING_BACK : CAMERA_FACING_FRONT;
307 info->orientation =
308 EmulatedCameraFactory::Instance().getFakeCameraOrientation();
309 return EmulatedCamera2::getCameraInfo(info);
310 }
311
312 /****************************************************************************
313 * Camera device API overrides
314 ***************************************************************************/
315
316 /** Request input queue */
317
requestQueueNotify()318 int EmulatedFakeCamera2::requestQueueNotify() {
319 ALOGV("Request queue notification received");
320
321 ALOG_ASSERT(mRequestQueueSrc != NULL,
322 "%s: Request queue src not set, but received queue notification!",
323 __FUNCTION__);
324 ALOG_ASSERT(mFrameQueueDst != NULL,
325 "%s: Request queue src not set, but received queue notification!",
326 __FUNCTION__);
327 ALOG_ASSERT(mStreams.size() != 0,
328 "%s: No streams allocated, but received queue notification!",
329 __FUNCTION__);
330 return mConfigureThread->newRequestAvailable();
331 }
332
getInProgressCount()333 int EmulatedFakeCamera2::getInProgressCount() {
334 Mutex::Autolock l(mMutex);
335
336 if (!mStatusPresent) {
337 ALOGW("%s: Camera was physically disconnected", __FUNCTION__);
338 return ERROR_CAMERA_NOT_PRESENT;
339 }
340
341 int requestCount = 0;
342 requestCount += mConfigureThread->getInProgressCount();
343 requestCount += mReadoutThread->getInProgressCount();
344 requestCount += mJpegCompressor->isBusy() ? 1 : 0;
345
346 return requestCount;
347 }
348
constructDefaultRequest(int request_template,camera_metadata_t ** request)349 int EmulatedFakeCamera2::constructDefaultRequest(int request_template,
350 camera_metadata_t **request) {
351 if (request == NULL) return BAD_VALUE;
352 if (request_template < 0 || request_template >= CAMERA2_TEMPLATE_COUNT) {
353 return BAD_VALUE;
354 }
355
356 {
357 Mutex::Autolock l(mMutex);
358 if (!mStatusPresent) {
359 ALOGW("%s: Camera was physically disconnected", __FUNCTION__);
360 return ERROR_CAMERA_NOT_PRESENT;
361 }
362 }
363
364 status_t res;
365 // Pass 1, calculate size and allocate
366 res = constructDefaultRequest(request_template, request, true);
367 if (res != OK) {
368 return res;
369 }
370 // Pass 2, build request
371 res = constructDefaultRequest(request_template, request, false);
372 if (res != OK) {
373 ALOGE("Unable to populate new request for template %d", request_template);
374 }
375
376 return res;
377 }
378
allocateStream(uint32_t width,uint32_t height,int format,const camera2_stream_ops_t * stream_ops,uint32_t * stream_id,uint32_t * format_actual,uint32_t * usage,uint32_t * max_buffers)379 int EmulatedFakeCamera2::allocateStream(
380 uint32_t width, uint32_t height, int format,
381 const camera2_stream_ops_t *stream_ops, uint32_t *stream_id,
382 uint32_t *format_actual, uint32_t *usage, uint32_t *max_buffers) {
383 Mutex::Autolock l(mMutex);
384
385 if (!mStatusPresent) {
386 ALOGW("%s: Camera was physically disconnected", __FUNCTION__);
387 return ERROR_CAMERA_NOT_PRESENT;
388 }
389
390 // Temporary shim until FORMAT_ZSL is removed
391 if (format == CAMERA2_HAL_PIXEL_FORMAT_ZSL) {
392 format = HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED;
393 }
394
395 if (format != HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED) {
396 unsigned int numFormats = sizeof(kAvailableFormats) / sizeof(uint32_t);
397 unsigned int formatIdx = 0;
398 for (; formatIdx < numFormats; formatIdx++) {
399 if (format == (int)kAvailableFormats[formatIdx]) break;
400 }
401 if (formatIdx == numFormats) {
402 ALOGE("%s: Format 0x%x is not supported", __FUNCTION__, format);
403 return BAD_VALUE;
404 }
405 }
406
407 const uint32_t *availableSizes;
408 size_t availableSizeCount;
409 switch (format) {
410 case HAL_PIXEL_FORMAT_RAW16:
411 availableSizes = &mAvailableRawSizes.front();
412 availableSizeCount = mAvailableRawSizes.size();
413 break;
414 case HAL_PIXEL_FORMAT_BLOB:
415 availableSizes = &mAvailableJpegSizes.front();
416 availableSizeCount = mAvailableJpegSizes.size();
417 break;
418 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
419 case HAL_PIXEL_FORMAT_RGBA_8888:
420 case HAL_PIXEL_FORMAT_YV12:
421 case HAL_PIXEL_FORMAT_YCrCb_420_SP:
422 availableSizes = &mAvailableProcessedSizes.front();
423 availableSizeCount = mAvailableProcessedSizes.size();
424 break;
425 default:
426 ALOGE("%s: Unknown format 0x%x", __FUNCTION__, format);
427 return BAD_VALUE;
428 }
429
430 unsigned int resIdx = 0;
431 for (; resIdx < availableSizeCount; resIdx++) {
432 if (availableSizes[resIdx * 2] == width &&
433 availableSizes[resIdx * 2 + 1] == height)
434 break;
435 }
436 if (resIdx == availableSizeCount) {
437 ALOGE("%s: Format 0x%x does not support resolution %d, %d", __FUNCTION__,
438 format, width, height);
439 return BAD_VALUE;
440 }
441
442 switch (format) {
443 case HAL_PIXEL_FORMAT_RAW16:
444 if (mRawStreamCount >= kMaxRawStreamCount) {
445 ALOGE("%s: Cannot allocate another raw stream (%d already allocated)",
446 __FUNCTION__, mRawStreamCount);
447 return INVALID_OPERATION;
448 }
449 mRawStreamCount++;
450 break;
451 case HAL_PIXEL_FORMAT_BLOB:
452 if (mJpegStreamCount >= kMaxJpegStreamCount) {
453 ALOGE("%s: Cannot allocate another JPEG stream (%d already allocated)",
454 __FUNCTION__, mJpegStreamCount);
455 return INVALID_OPERATION;
456 }
457 mJpegStreamCount++;
458 break;
459 default:
460 if (mProcessedStreamCount >= kMaxProcessedStreamCount) {
461 ALOGE(
462 "%s: Cannot allocate another processed stream (%d already "
463 "allocated)",
464 __FUNCTION__, mProcessedStreamCount);
465 return INVALID_OPERATION;
466 }
467 mProcessedStreamCount++;
468 }
469
470 Stream newStream;
471 newStream.ops = stream_ops;
472 newStream.width = width;
473 newStream.height = height;
474 newStream.format = format;
475 // TODO: Query stride from gralloc
476 newStream.stride = width;
477
478 mStreams.add(mNextStreamId, newStream);
479
480 *stream_id = mNextStreamId;
481 if (format_actual) *format_actual = format;
482 *usage = GRALLOC_USAGE_HW_CAMERA_WRITE;
483 *max_buffers = kMaxBufferCount;
484
485 ALOGV("Stream allocated: %d, %d x %d, 0x%x. U: %x, B: %d", *stream_id, width,
486 height, format, *usage, *max_buffers);
487
488 mNextStreamId++;
489 return NO_ERROR;
490 }
491
registerStreamBuffers(uint32_t stream_id,int num_buffers,buffer_handle_t *)492 int EmulatedFakeCamera2::registerStreamBuffers(uint32_t stream_id,
493 int num_buffers,
494 buffer_handle_t * /*buffers*/) {
495 Mutex::Autolock l(mMutex);
496
497 if (!mStatusPresent) {
498 ALOGW("%s: Camera was physically disconnected", __FUNCTION__);
499 return ERROR_CAMERA_NOT_PRESENT;
500 }
501
502 ALOGV("%s: Stream %d registering %d buffers", __FUNCTION__, stream_id,
503 num_buffers);
504 // Need to find out what the final concrete pixel format for our stream is
505 // Assumes that all buffers have the same format.
506 if (num_buffers < 1) {
507 ALOGE("%s: Stream %d only has %d buffers!", __FUNCTION__, stream_id,
508 num_buffers);
509 return BAD_VALUE;
510 }
511
512 ssize_t streamIndex = mStreams.indexOfKey(stream_id);
513 if (streamIndex < 0) {
514 ALOGE("%s: Unknown stream id %d!", __FUNCTION__, stream_id);
515 return BAD_VALUE;
516 }
517
518 Stream &stream = mStreams.editValueAt(streamIndex);
519
520 int finalFormat = stream.format;
521
522 if (finalFormat == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED) {
523 finalFormat = HAL_PIXEL_FORMAT_RGBA_8888;
524 }
525
526 ALOGV("%s: Stream %d format set to %x, previously %x", __FUNCTION__,
527 stream_id, finalFormat, stream.format);
528
529 stream.format = finalFormat;
530
531 return NO_ERROR;
532 }
533
releaseStream(uint32_t stream_id)534 int EmulatedFakeCamera2::releaseStream(uint32_t stream_id) {
535 Mutex::Autolock l(mMutex);
536
537 ssize_t streamIndex = mStreams.indexOfKey(stream_id);
538 if (streamIndex < 0) {
539 ALOGE("%s: Unknown stream id %d!", __FUNCTION__, stream_id);
540 return BAD_VALUE;
541 }
542
543 if (isStreamInUse(stream_id)) {
544 ALOGE("%s: Cannot release stream %d; in use!", __FUNCTION__, stream_id);
545 return BAD_VALUE;
546 }
547
548 switch (mStreams.valueAt(streamIndex).format) {
549 case HAL_PIXEL_FORMAT_RAW16:
550 mRawStreamCount--;
551 break;
552 case HAL_PIXEL_FORMAT_BLOB:
553 mJpegStreamCount--;
554 break;
555 default:
556 mProcessedStreamCount--;
557 break;
558 }
559
560 mStreams.removeItemsAt(streamIndex);
561
562 return NO_ERROR;
563 }
564
allocateReprocessStreamFromStream(uint32_t output_stream_id,const camera2_stream_in_ops_t * stream_ops,uint32_t * stream_id)565 int EmulatedFakeCamera2::allocateReprocessStreamFromStream(
566 uint32_t output_stream_id, const camera2_stream_in_ops_t *stream_ops,
567 uint32_t *stream_id) {
568 Mutex::Autolock l(mMutex);
569
570 if (!mStatusPresent) {
571 ALOGW("%s: Camera was physically disconnected", __FUNCTION__);
572 return ERROR_CAMERA_NOT_PRESENT;
573 }
574
575 ssize_t baseStreamIndex = mStreams.indexOfKey(output_stream_id);
576 if (baseStreamIndex < 0) {
577 ALOGE("%s: Unknown output stream id %d!", __FUNCTION__, output_stream_id);
578 return BAD_VALUE;
579 }
580
581 const Stream &baseStream = mStreams[baseStreamIndex];
582
583 // We'll reprocess anything we produced
584
585 if (mReprocessStreamCount >= kMaxReprocessStreamCount) {
586 ALOGE("%s: Cannot allocate another reprocess stream (%d already allocated)",
587 __FUNCTION__, mReprocessStreamCount);
588 return INVALID_OPERATION;
589 }
590 mReprocessStreamCount++;
591
592 ReprocessStream newStream;
593 newStream.ops = stream_ops;
594 newStream.width = baseStream.width;
595 newStream.height = baseStream.height;
596 newStream.format = baseStream.format;
597 newStream.stride = baseStream.stride;
598 newStream.sourceStreamId = output_stream_id;
599
600 *stream_id = mNextReprocessStreamId;
601 mReprocessStreams.add(mNextReprocessStreamId, newStream);
602
603 ALOGV("Reprocess stream allocated: %d: %d, %d, 0x%x. Parent stream: %d",
604 *stream_id, newStream.width, newStream.height, newStream.format,
605 output_stream_id);
606
607 mNextReprocessStreamId++;
608 return NO_ERROR;
609 }
610
releaseReprocessStream(uint32_t stream_id)611 int EmulatedFakeCamera2::releaseReprocessStream(uint32_t stream_id) {
612 Mutex::Autolock l(mMutex);
613
614 ssize_t streamIndex = mReprocessStreams.indexOfKey(stream_id);
615 if (streamIndex < 0) {
616 ALOGE("%s: Unknown reprocess stream id %d!", __FUNCTION__, stream_id);
617 return BAD_VALUE;
618 }
619
620 if (isReprocessStreamInUse(stream_id)) {
621 ALOGE("%s: Cannot release reprocessing stream %d; in use!", __FUNCTION__,
622 stream_id);
623 return BAD_VALUE;
624 }
625
626 mReprocessStreamCount--;
627 mReprocessStreams.removeItemsAt(streamIndex);
628
629 return NO_ERROR;
630 }
631
triggerAction(uint32_t trigger_id,int32_t ext1,int32_t ext2)632 int EmulatedFakeCamera2::triggerAction(uint32_t trigger_id, int32_t ext1,
633 int32_t ext2) {
634 Mutex::Autolock l(mMutex);
635
636 if (trigger_id == CAMERA2_EXT_TRIGGER_TESTING_DISCONNECT) {
637 ALOGI("%s: Disconnect trigger - camera must be closed", __FUNCTION__);
638 mStatusPresent = false;
639
640 EmulatedCameraFactory::Instance().onStatusChanged(
641 mCameraID, CAMERA_DEVICE_STATUS_NOT_PRESENT);
642 }
643
644 if (!mStatusPresent) {
645 ALOGW("%s: Camera was physically disconnected", __FUNCTION__);
646 return ERROR_CAMERA_NOT_PRESENT;
647 }
648
649 return mControlThread->triggerAction(trigger_id, ext1, ext2);
650 }
651
652 /** Shutdown and debug methods */
653
dump(int fd)654 int EmulatedFakeCamera2::dump(int fd) {
655 String8 result;
656
657 result.appendFormat(" Camera HAL device: EmulatedFakeCamera2\n");
658 result.appendFormat(" Streams:\n");
659 for (size_t i = 0; i < mStreams.size(); i++) {
660 int id = mStreams.keyAt(i);
661 const Stream &s = mStreams.valueAt(i);
662 result.appendFormat(" Stream %d: %d x %d, format 0x%x, stride %d\n",
663 id, s.width, s.height, s.format, s.stride);
664 }
665
666 write(fd, result.string(), result.size());
667
668 return NO_ERROR;
669 }
670
signalError()671 void EmulatedFakeCamera2::signalError() {
672 // TODO: Let parent know so we can shut down cleanly
673 ALOGE("Worker thread is signaling a serious error");
674 }
675
676 /** Pipeline control worker thread methods */
677
ConfigureThread(EmulatedFakeCamera2 * parent)678 EmulatedFakeCamera2::ConfigureThread::ConfigureThread(
679 EmulatedFakeCamera2 *parent)
680 : Thread(false), mParent(parent), mRequestCount(0), mNextBuffers(NULL) {
681 mRunning = false;
682 }
683
~ConfigureThread()684 EmulatedFakeCamera2::ConfigureThread::~ConfigureThread() {}
685
readyToRun()686 status_t EmulatedFakeCamera2::ConfigureThread::readyToRun() {
687 Mutex::Autolock lock(mInputMutex);
688
689 ALOGV("Starting up ConfigureThread");
690 mRequest = NULL;
691 mActive = false;
692 mRunning = true;
693
694 mInputSignal.signal();
695 return NO_ERROR;
696 }
697
waitUntilRunning()698 status_t EmulatedFakeCamera2::ConfigureThread::waitUntilRunning() {
699 Mutex::Autolock lock(mInputMutex);
700 if (!mRunning) {
701 ALOGV("Waiting for configure thread to start");
702 mInputSignal.wait(mInputMutex);
703 }
704 return OK;
705 }
706
newRequestAvailable()707 status_t EmulatedFakeCamera2::ConfigureThread::newRequestAvailable() {
708 waitUntilRunning();
709
710 Mutex::Autolock lock(mInputMutex);
711
712 mActive = true;
713 mInputSignal.signal();
714
715 return OK;
716 }
717
isStreamInUse(uint32_t id)718 bool EmulatedFakeCamera2::ConfigureThread::isStreamInUse(uint32_t id) {
719 Mutex::Autolock lock(mInternalsMutex);
720
721 if (mNextBuffers == NULL) return false;
722 for (size_t i = 0; i < mNextBuffers->size(); i++) {
723 if ((*mNextBuffers)[i].streamId == (int)id) return true;
724 }
725 return false;
726 }
727
getInProgressCount()728 int EmulatedFakeCamera2::ConfigureThread::getInProgressCount() {
729 Mutex::Autolock lock(mInputMutex);
730 return mRequestCount;
731 }
732
threadLoop()733 bool EmulatedFakeCamera2::ConfigureThread::threadLoop() {
734 status_t res;
735
736 // Check if we're currently processing or just waiting
737 {
738 Mutex::Autolock lock(mInputMutex);
739 if (!mActive) {
740 // Inactive, keep waiting until we've been signaled
741 status_t res;
742 res = mInputSignal.waitRelative(mInputMutex, kWaitPerLoop);
743 if (res != NO_ERROR && res != TIMED_OUT) {
744 ALOGE("%s: Error waiting for input requests: %d", __FUNCTION__, res);
745 return false;
746 }
747 if (!mActive) return true;
748 ALOGV("New request available");
749 }
750 // Active
751 }
752
753 if (mRequest == NULL) {
754 Mutex::Autolock il(mInternalsMutex);
755
756 ALOGV("Configure: Getting next request");
757 res = mParent->mRequestQueueSrc->dequeue_request(mParent->mRequestQueueSrc,
758 &mRequest);
759 if (res != NO_ERROR) {
760 ALOGE("%s: Error dequeuing next request: %d", __FUNCTION__, res);
761 mParent->signalError();
762 return false;
763 }
764 if (mRequest == NULL) {
765 ALOGV("Configure: Request queue empty, going inactive");
766 // No requests available, go into inactive mode
767 Mutex::Autolock lock(mInputMutex);
768 mActive = false;
769 return true;
770 } else {
771 Mutex::Autolock lock(mInputMutex);
772 mRequestCount++;
773 }
774
775 camera_metadata_entry_t type;
776 res = find_camera_metadata_entry(mRequest, ANDROID_REQUEST_TYPE, &type);
777 if (res != NO_ERROR) {
778 ALOGE("%s: error reading request type", __FUNCTION__);
779 mParent->signalError();
780 return false;
781 }
782 bool success = false;
783 ;
784 switch (type.data.u8[0]) {
785 case ANDROID_REQUEST_TYPE_CAPTURE:
786 success = setupCapture();
787 break;
788 case ANDROID_REQUEST_TYPE_REPROCESS:
789 success = setupReprocess();
790 break;
791 default:
792 ALOGE("%s: Unexpected request type %d", __FUNCTION__, type.data.u8[0]);
793 mParent->signalError();
794 break;
795 }
796 if (!success) return false;
797 }
798
799 if (mWaitingForReadout) {
800 bool readoutDone;
801 readoutDone = mParent->mReadoutThread->waitForReady(kWaitPerLoop);
802 if (!readoutDone) return true;
803
804 if (mNextNeedsJpeg) {
805 ALOGV("Configure: Waiting for JPEG compressor");
806 } else {
807 ALOGV("Configure: Waiting for sensor");
808 }
809 mWaitingForReadout = false;
810 }
811
812 if (mNextNeedsJpeg) {
813 bool jpegDone;
814 jpegDone = mParent->mJpegCompressor->waitForDone(kWaitPerLoop);
815 if (!jpegDone) return true;
816
817 ALOGV("Configure: Waiting for sensor");
818 mNextNeedsJpeg = false;
819 }
820
821 if (mNextIsCapture) {
822 return configureNextCapture();
823 } else {
824 return configureNextReprocess();
825 }
826 }
827
setupCapture()828 bool EmulatedFakeCamera2::ConfigureThread::setupCapture() {
829 status_t res;
830
831 mNextIsCapture = true;
832 // Get necessary parameters for sensor config
833 mParent->mControlThread->processRequest(mRequest);
834
835 camera_metadata_entry_t streams;
836 res = find_camera_metadata_entry(mRequest, ANDROID_REQUEST_OUTPUT_STREAMS,
837 &streams);
838 if (res != NO_ERROR) {
839 ALOGE("%s: error reading output stream tag", __FUNCTION__);
840 mParent->signalError();
841 return false;
842 }
843
844 mNextBuffers = new Buffers;
845 mNextNeedsJpeg = false;
846 ALOGV("Configure: Setting up buffers for capture");
847 for (size_t i = 0; i < streams.count; i++) {
848 int streamId = streams.data.i32[i];
849 const Stream &s = mParent->getStreamInfo(streamId);
850 if (s.format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED) {
851 ALOGE(
852 "%s: Stream %d does not have a concrete pixel format, but "
853 "is included in a request!",
854 __FUNCTION__, streamId);
855 mParent->signalError();
856 return false;
857 }
858 StreamBuffer b;
859 b.streamId = streamId; // streams.data.u8[i];
860 b.width = s.width;
861 b.height = s.height;
862 b.format = s.format;
863 b.stride = s.stride;
864 mNextBuffers->push_back(b);
865 ALOGV(
866 "Configure: Buffer %zu: Stream %d, %d x %d, format 0x%x, "
867 "stride %d",
868 i, b.streamId, b.width, b.height, b.format, b.stride);
869 if (b.format == HAL_PIXEL_FORMAT_BLOB) {
870 mNextNeedsJpeg = true;
871 }
872 }
873
874 camera_metadata_entry_t e;
875 res = find_camera_metadata_entry(mRequest, ANDROID_REQUEST_FRAME_COUNT, &e);
876 if (res != NO_ERROR) {
877 ALOGE("%s: error reading frame count tag: %s (%d)", __FUNCTION__,
878 strerror(-res), res);
879 mParent->signalError();
880 return false;
881 }
882 mNextFrameNumber = *e.data.i32;
883
884 res = find_camera_metadata_entry(mRequest, ANDROID_SENSOR_EXPOSURE_TIME, &e);
885 if (res != NO_ERROR) {
886 ALOGE("%s: error reading exposure time tag: %s (%d)", __FUNCTION__,
887 strerror(-res), res);
888 mParent->signalError();
889 return false;
890 }
891 mNextExposureTime = *e.data.i64;
892
893 res = find_camera_metadata_entry(mRequest, ANDROID_SENSOR_FRAME_DURATION, &e);
894 if (res != NO_ERROR) {
895 ALOGE("%s: error reading frame duration tag", __FUNCTION__);
896 mParent->signalError();
897 return false;
898 }
899 mNextFrameDuration = *e.data.i64;
900
901 if (mNextFrameDuration < mNextExposureTime + Sensor::kMinVerticalBlank) {
902 mNextFrameDuration = mNextExposureTime + Sensor::kMinVerticalBlank;
903 }
904 res = find_camera_metadata_entry(mRequest, ANDROID_SENSOR_SENSITIVITY, &e);
905 if (res != NO_ERROR) {
906 ALOGE("%s: error reading sensitivity tag", __FUNCTION__);
907 mParent->signalError();
908 return false;
909 }
910 mNextSensitivity = *e.data.i32;
911
912 // Start waiting on readout thread
913 mWaitingForReadout = true;
914 ALOGV("Configure: Waiting for readout thread");
915
916 return true;
917 }
918
configureNextCapture()919 bool EmulatedFakeCamera2::ConfigureThread::configureNextCapture() {
920 bool vsync = mParent->mSensor->waitForVSync(kWaitPerLoop);
921 if (!vsync) return true;
922
923 Mutex::Autolock il(mInternalsMutex);
924 ALOGV("Configure: Configuring sensor for capture %d", mNextFrameNumber);
925 mParent->mSensor->setExposureTime(mNextExposureTime);
926 mParent->mSensor->setFrameDuration(mNextFrameDuration);
927 mParent->mSensor->setSensitivity(mNextSensitivity);
928
929 getBuffers();
930
931 ALOGV("Configure: Done configure for capture %d", mNextFrameNumber);
932 mParent->mReadoutThread->setNextOperation(true, mRequest, mNextBuffers);
933 mParent->mSensor->setDestinationBuffers(mNextBuffers);
934
935 mRequest = NULL;
936 mNextBuffers = NULL;
937
938 Mutex::Autolock lock(mInputMutex);
939 mRequestCount--;
940
941 return true;
942 }
943
setupReprocess()944 bool EmulatedFakeCamera2::ConfigureThread::setupReprocess() {
945 status_t res;
946
947 mNextNeedsJpeg = true;
948 mNextIsCapture = false;
949
950 camera_metadata_entry_t reprocessStreams;
951 res = find_camera_metadata_entry(mRequest, ANDROID_REQUEST_INPUT_STREAMS,
952 &reprocessStreams);
953 if (res != NO_ERROR) {
954 ALOGE("%s: error reading output stream tag", __FUNCTION__);
955 mParent->signalError();
956 return false;
957 }
958
959 mNextBuffers = new Buffers;
960
961 ALOGV("Configure: Setting up input buffers for reprocess");
962 for (size_t i = 0; i < reprocessStreams.count; i++) {
963 int streamId = reprocessStreams.data.i32[i];
964 const ReprocessStream &s = mParent->getReprocessStreamInfo(streamId);
965 if (s.format != HAL_PIXEL_FORMAT_RGB_888) {
966 ALOGE("%s: Only ZSL reprocessing supported!", __FUNCTION__);
967 mParent->signalError();
968 return false;
969 }
970 StreamBuffer b;
971 b.streamId = -streamId;
972 b.width = s.width;
973 b.height = s.height;
974 b.format = s.format;
975 b.stride = s.stride;
976 mNextBuffers->push_back(b);
977 }
978
979 camera_metadata_entry_t streams;
980 res = find_camera_metadata_entry(mRequest, ANDROID_REQUEST_OUTPUT_STREAMS,
981 &streams);
982 if (res != NO_ERROR) {
983 ALOGE("%s: error reading output stream tag", __FUNCTION__);
984 mParent->signalError();
985 return false;
986 }
987
988 ALOGV("Configure: Setting up output buffers for reprocess");
989 for (size_t i = 0; i < streams.count; i++) {
990 int streamId = streams.data.i32[i];
991 const Stream &s = mParent->getStreamInfo(streamId);
992 if (s.format != HAL_PIXEL_FORMAT_BLOB) {
993 // TODO: Support reprocess to YUV
994 ALOGE("%s: Non-JPEG output stream %d for reprocess not supported",
995 __FUNCTION__, streamId);
996 mParent->signalError();
997 return false;
998 }
999 StreamBuffer b;
1000 b.streamId = streams.data.u8[i];
1001 b.width = s.width;
1002 b.height = s.height;
1003 b.format = s.format;
1004 b.stride = s.stride;
1005 mNextBuffers->push_back(b);
1006 ALOGV(
1007 "Configure: Buffer %zu: Stream %d, %d x %d, format 0x%x, "
1008 "stride %d",
1009 i, b.streamId, b.width, b.height, b.format, b.stride);
1010 }
1011
1012 camera_metadata_entry_t e;
1013 res = find_camera_metadata_entry(mRequest, ANDROID_REQUEST_FRAME_COUNT, &e);
1014 if (res != NO_ERROR) {
1015 ALOGE("%s: error reading frame count tag: %s (%d)", __FUNCTION__,
1016 strerror(-res), res);
1017 mParent->signalError();
1018 return false;
1019 }
1020 mNextFrameNumber = *e.data.i32;
1021
1022 return true;
1023 }
1024
configureNextReprocess()1025 bool EmulatedFakeCamera2::ConfigureThread::configureNextReprocess() {
1026 Mutex::Autolock il(mInternalsMutex);
1027
1028 getBuffers();
1029
1030 ALOGV("Configure: Done configure for reprocess %d", mNextFrameNumber);
1031 mParent->mReadoutThread->setNextOperation(false, mRequest, mNextBuffers);
1032
1033 mRequest = NULL;
1034 mNextBuffers = NULL;
1035
1036 Mutex::Autolock lock(mInputMutex);
1037 mRequestCount--;
1038
1039 return true;
1040 }
1041
getBuffers()1042 bool EmulatedFakeCamera2::ConfigureThread::getBuffers() {
1043 status_t res;
1044 /** Get buffers to fill for this frame */
1045 for (size_t i = 0; i < mNextBuffers->size(); i++) {
1046 StreamBuffer &b = mNextBuffers->editItemAt(i);
1047
1048 if (b.streamId > 0) {
1049 ALOGV("Configure: Dequeing buffer from stream %d", b.streamId);
1050 Stream s = mParent->getStreamInfo(b.streamId);
1051 res = s.ops->dequeue_buffer(s.ops, &(b.buffer));
1052 if (res != NO_ERROR || b.buffer == NULL) {
1053 ALOGE("%s: Unable to dequeue buffer from stream %d: %s (%d)",
1054 __FUNCTION__, b.streamId, strerror(-res), res);
1055 mParent->signalError();
1056 return false;
1057 }
1058
1059 /* Import the buffer from the perspective of the graphics mapper */
1060 res = GrallocModule::getInstance().import(*(b.buffer), &b.importedBuffer);
1061 if (res != NO_ERROR) {
1062 ALOGE("%s: grbuffer_mapper.import failure: %s (%d)",
1063 __FUNCTION__, strerror(-res), res);
1064 s.ops->cancel_buffer(s.ops, b.buffer);
1065 mParent->signalError();
1066 return false;
1067 }
1068
1069 /* Lock the buffer from the perspective of the graphics mapper */
1070 const int usage = GRALLOC_USAGE_SW_WRITE_OFTEN |
1071 GRALLOC_USAGE_HW_CAMERA_WRITE;
1072
1073 res = GrallocModule::getInstance().lock(
1074 b.importedBuffer, usage, 0, 0, s.width, s.height,
1075 (void **)&(b.img));
1076
1077 if (res != NO_ERROR) {
1078 ALOGE("%s: grbuffer_mapper.lock failure: %s (%d)", __FUNCTION__,
1079 strerror(-res), res);
1080 s.ops->cancel_buffer(s.ops, b.buffer);
1081 mParent->signalError();
1082 return false;
1083 }
1084 } else {
1085 ALOGV("Configure: Acquiring buffer from reprocess stream %d",
1086 -b.streamId);
1087 ReprocessStream s = mParent->getReprocessStreamInfo(-b.streamId);
1088 res = s.ops->acquire_buffer(s.ops, &(b.buffer));
1089 if (res != NO_ERROR || b.buffer == NULL) {
1090 ALOGE(
1091 "%s: Unable to acquire buffer from reprocess stream %d: "
1092 "%s (%d)",
1093 __FUNCTION__, -b.streamId, strerror(-res), res);
1094 mParent->signalError();
1095 return false;
1096 }
1097
1098 /* Import the buffer from the perspective of the graphics mapper */
1099 res = GrallocModule::getInstance().import(*(b.buffer), &b.importedBuffer);
1100 if (res != NO_ERROR) {
1101 ALOGE("%s: grbuffer_mapper.import failure: %s (%d)",
1102 __FUNCTION__, strerror(-res), res);
1103 s.ops->release_buffer(s.ops, b.buffer);
1104 mParent->signalError();
1105 return false;
1106 }
1107
1108 /* Lock the buffer from the perspective of the graphics mapper */
1109 const int usage = GRALLOC_USAGE_SW_READ_OFTEN |
1110 GRALLOC_USAGE_HW_CAMERA_READ;
1111
1112 res = GrallocModule::getInstance().lock(
1113 b.importedBuffer, usage, 0, 0, s.width, s.height,
1114 (void **)&(b.img));
1115 if (res != NO_ERROR) {
1116 ALOGE("%s: grbuffer_mapper.lock failure: %s (%d)", __FUNCTION__,
1117 strerror(-res), res);
1118 s.ops->release_buffer(s.ops, b.buffer);
1119 mParent->signalError();
1120 return false;
1121 }
1122 }
1123 }
1124 return true;
1125 }
1126
ReadoutThread(EmulatedFakeCamera2 * parent)1127 EmulatedFakeCamera2::ReadoutThread::ReadoutThread(EmulatedFakeCamera2 *parent)
1128 : Thread(false),
1129 mParent(parent),
1130 mRunning(false),
1131 mActive(false),
1132 mRequestCount(0),
1133 mRequest(NULL),
1134 mBuffers(NULL) {
1135 mInFlightQueue = new InFlightQueue[kInFlightQueueSize];
1136 mInFlightHead = 0;
1137 mInFlightTail = 0;
1138 }
1139
~ReadoutThread()1140 EmulatedFakeCamera2::ReadoutThread::~ReadoutThread() {
1141 delete[] mInFlightQueue;
1142 }
1143
readyToRun()1144 status_t EmulatedFakeCamera2::ReadoutThread::readyToRun() {
1145 Mutex::Autolock lock(mInputMutex);
1146 ALOGV("Starting up ReadoutThread");
1147 mRunning = true;
1148 mInputSignal.signal();
1149 return NO_ERROR;
1150 }
1151
waitUntilRunning()1152 status_t EmulatedFakeCamera2::ReadoutThread::waitUntilRunning() {
1153 Mutex::Autolock lock(mInputMutex);
1154 if (!mRunning) {
1155 ALOGV("Waiting for readout thread to start");
1156 mInputSignal.wait(mInputMutex);
1157 }
1158 return OK;
1159 }
1160
waitForReady(nsecs_t timeout)1161 bool EmulatedFakeCamera2::ReadoutThread::waitForReady(nsecs_t timeout) {
1162 status_t res;
1163 Mutex::Autolock lock(mInputMutex);
1164 while (!readyForNextCapture()) {
1165 res = mReadySignal.waitRelative(mInputMutex, timeout);
1166 if (res == TIMED_OUT) return false;
1167 if (res != OK) {
1168 ALOGE("%s: Error waiting for ready: %s (%d)", __FUNCTION__,
1169 strerror(-res), res);
1170 return false;
1171 }
1172 }
1173 return true;
1174 }
1175
readyForNextCapture()1176 bool EmulatedFakeCamera2::ReadoutThread::readyForNextCapture() {
1177 return (mInFlightTail + 1) % kInFlightQueueSize != mInFlightHead;
1178 }
1179
setNextOperation(bool isCapture,camera_metadata_t * request,Buffers * buffers)1180 void EmulatedFakeCamera2::ReadoutThread::setNextOperation(
1181 bool isCapture, camera_metadata_t *request, Buffers *buffers) {
1182 Mutex::Autolock lock(mInputMutex);
1183 if (!readyForNextCapture()) {
1184 ALOGE("In flight queue full, dropping captures");
1185 mParent->signalError();
1186 return;
1187 }
1188 mInFlightQueue[mInFlightTail].isCapture = isCapture;
1189 mInFlightQueue[mInFlightTail].request = request;
1190 mInFlightQueue[mInFlightTail].buffers = buffers;
1191 mInFlightTail = (mInFlightTail + 1) % kInFlightQueueSize;
1192 mRequestCount++;
1193
1194 if (!mActive) {
1195 mActive = true;
1196 mInputSignal.signal();
1197 }
1198 }
1199
isStreamInUse(uint32_t id)1200 bool EmulatedFakeCamera2::ReadoutThread::isStreamInUse(uint32_t id) {
1201 // acquire in same order as threadLoop
1202 Mutex::Autolock iLock(mInternalsMutex);
1203 Mutex::Autolock lock(mInputMutex);
1204
1205 size_t i = mInFlightHead;
1206 while (i != mInFlightTail) {
1207 for (size_t j = 0; j < mInFlightQueue[i].buffers->size(); j++) {
1208 if ((*(mInFlightQueue[i].buffers))[j].streamId == (int)id) return true;
1209 }
1210 i = (i + 1) % kInFlightQueueSize;
1211 }
1212
1213 if (mBuffers != NULL) {
1214 for (i = 0; i < mBuffers->size(); i++) {
1215 if ((*mBuffers)[i].streamId == (int)id) return true;
1216 }
1217 }
1218
1219 return false;
1220 }
1221
getInProgressCount()1222 int EmulatedFakeCamera2::ReadoutThread::getInProgressCount() {
1223 Mutex::Autolock lock(mInputMutex);
1224
1225 return mRequestCount;
1226 }
1227
threadLoop()1228 bool EmulatedFakeCamera2::ReadoutThread::threadLoop() {
1229 static const nsecs_t kWaitPerLoop = 10000000L; // 10 ms
1230 status_t res;
1231 int32_t frameNumber;
1232
1233 // Check if we're currently processing or just waiting
1234 {
1235 Mutex::Autolock lock(mInputMutex);
1236 if (!mActive) {
1237 // Inactive, keep waiting until we've been signaled
1238 res = mInputSignal.waitRelative(mInputMutex, kWaitPerLoop);
1239 if (res != NO_ERROR && res != TIMED_OUT) {
1240 ALOGE("%s: Error waiting for capture requests: %d", __FUNCTION__, res);
1241 mParent->signalError();
1242 return false;
1243 }
1244 if (!mActive) return true;
1245 }
1246 // Active, see if we need a new request
1247 if (mRequest == NULL) {
1248 if (mInFlightHead == mInFlightTail) {
1249 // Go inactive
1250 ALOGV("Waiting for sensor data");
1251 mActive = false;
1252 return true;
1253 } else {
1254 Mutex::Autolock iLock(mInternalsMutex);
1255 mReadySignal.signal();
1256 mIsCapture = mInFlightQueue[mInFlightHead].isCapture;
1257 mRequest = mInFlightQueue[mInFlightHead].request;
1258 mBuffers = mInFlightQueue[mInFlightHead].buffers;
1259 mInFlightQueue[mInFlightHead].request = NULL;
1260 mInFlightQueue[mInFlightHead].buffers = NULL;
1261 mInFlightHead = (mInFlightHead + 1) % kInFlightQueueSize;
1262 ALOGV("Ready to read out request %p, %zu buffers", mRequest,
1263 mBuffers->size());
1264 }
1265 }
1266 }
1267
1268 // Active with request, wait on sensor to complete
1269
1270 nsecs_t captureTime;
1271
1272 if (mIsCapture) {
1273 bool gotFrame;
1274 gotFrame = mParent->mSensor->waitForNewFrame(kWaitPerLoop, &captureTime);
1275
1276 if (!gotFrame) return true;
1277 }
1278
1279 Mutex::Autolock iLock(mInternalsMutex);
1280
1281 camera_metadata_entry_t entry;
1282 if (!mIsCapture) {
1283 res =
1284 find_camera_metadata_entry(mRequest, ANDROID_SENSOR_TIMESTAMP, &entry);
1285 if (res != NO_ERROR) {
1286 ALOGE("%s: error reading reprocessing timestamp: %s (%d)", __FUNCTION__,
1287 strerror(-res), res);
1288 mParent->signalError();
1289 return false;
1290 }
1291 captureTime = entry.data.i64[0];
1292 }
1293
1294 res =
1295 find_camera_metadata_entry(mRequest, ANDROID_REQUEST_FRAME_COUNT, &entry);
1296 if (res != NO_ERROR) {
1297 ALOGE("%s: error reading frame count tag: %s (%d)", __FUNCTION__,
1298 strerror(-res), res);
1299 mParent->signalError();
1300 return false;
1301 }
1302 frameNumber = *entry.data.i32;
1303
1304 res = find_camera_metadata_entry(mRequest, ANDROID_REQUEST_METADATA_MODE,
1305 &entry);
1306 if (res != NO_ERROR) {
1307 ALOGE("%s: error reading metadata mode tag: %s (%d)", __FUNCTION__,
1308 strerror(-res), res);
1309 mParent->signalError();
1310 return false;
1311 }
1312
1313 // Got sensor data and request, construct frame and send it out
1314 ALOGV("Readout: Constructing metadata and frames for request %d",
1315 frameNumber);
1316
1317 if (*entry.data.u8 == ANDROID_REQUEST_METADATA_MODE_FULL) {
1318 ALOGV("Readout: Metadata requested, constructing");
1319
1320 camera_metadata_t *frame = NULL;
1321
1322 size_t frame_entries = get_camera_metadata_entry_count(mRequest);
1323 size_t frame_data = get_camera_metadata_data_count(mRequest);
1324
1325 // TODO: Dynamically calculate based on enabled statistics, etc
1326 frame_entries += 10;
1327 frame_data += 100;
1328
1329 res = mParent->mFrameQueueDst->dequeue_frame(
1330 mParent->mFrameQueueDst, frame_entries, frame_data, &frame);
1331
1332 if (res != NO_ERROR || frame == NULL) {
1333 ALOGE("%s: Unable to dequeue frame metadata buffer", __FUNCTION__);
1334 mParent->signalError();
1335 return false;
1336 }
1337
1338 res = append_camera_metadata(frame, mRequest);
1339 if (res != NO_ERROR) {
1340 ALOGE("Unable to append request metadata");
1341 }
1342
1343 if (mIsCapture) {
1344 add_camera_metadata_entry(frame, ANDROID_SENSOR_TIMESTAMP, &captureTime,
1345 1);
1346
1347 collectStatisticsMetadata(frame);
1348 // TODO: Collect all final values used from sensor in addition to
1349 // timestamp
1350 }
1351
1352 ALOGV("Readout: Enqueue frame %d", frameNumber);
1353 mParent->mFrameQueueDst->enqueue_frame(mParent->mFrameQueueDst, frame);
1354 }
1355 ALOGV("Readout: Free request");
1356 res = mParent->mRequestQueueSrc->free_request(mParent->mRequestQueueSrc,
1357 mRequest);
1358 if (res != NO_ERROR) {
1359 ALOGE("%s: Unable to return request buffer to queue: %d", __FUNCTION__,
1360 res);
1361 mParent->signalError();
1362 return false;
1363 }
1364 mRequest = NULL;
1365
1366 int compressedBufferIndex = -1;
1367 ALOGV("Readout: Processing %zu buffers", mBuffers->size());
1368 for (size_t i = 0; i < mBuffers->size(); i++) {
1369 const StreamBuffer &b = (*mBuffers)[i];
1370 ALOGV("Readout: Buffer %zu: Stream %d, %d x %d, format 0x%x, stride %d",
1371 i, b.streamId, b.width, b.height, b.format, b.stride);
1372 if (b.streamId > 0) {
1373 if (b.format == HAL_PIXEL_FORMAT_BLOB) {
1374 // Assumes only one BLOB buffer type per capture
1375 compressedBufferIndex = i;
1376 } else {
1377 ALOGV("Readout: Sending image buffer %zu (%p) to output stream %d",
1378 i, (void *)*(b.buffer), b.streamId);
1379 GrallocModule::getInstance().unlock(b.importedBuffer);
1380 GrallocModule::getInstance().release(b.importedBuffer);
1381 const Stream &s = mParent->getStreamInfo(b.streamId);
1382 res = s.ops->enqueue_buffer(s.ops, captureTime, b.buffer);
1383 if (res != OK) {
1384 ALOGE("Error enqueuing image buffer %p: %s (%d)", b.buffer,
1385 strerror(-res), res);
1386 mParent->signalError();
1387 }
1388 }
1389 }
1390 }
1391
1392 if (compressedBufferIndex == -1) {
1393 delete mBuffers;
1394 } else {
1395 ALOGV("Readout: Starting JPEG compression for buffer %d, stream %d",
1396 compressedBufferIndex, (*mBuffers)[compressedBufferIndex].streamId);
1397 mJpegTimestamp = captureTime;
1398 // Takes ownership of mBuffers
1399 mParent->mJpegCompressor->start(mBuffers, this);
1400 }
1401 mBuffers = NULL;
1402
1403 Mutex::Autolock l(mInputMutex);
1404 mRequestCount--;
1405 ALOGV("Readout: Done with request %d", frameNumber);
1406 return true;
1407 }
1408
onJpegDone(const StreamBuffer & jpegBuffer,bool success)1409 void EmulatedFakeCamera2::ReadoutThread::onJpegDone(
1410 const StreamBuffer &jpegBuffer, bool success) {
1411 if (!success) {
1412 ALOGE("%s: Error queueing compressed image buffer %p", __FUNCTION__,
1413 jpegBuffer.buffer);
1414 mParent->signalError();
1415 return;
1416 }
1417
1418 // Write to JPEG output stream
1419 ALOGV("%s: Compression complete, pushing to stream %d", __FUNCTION__,
1420 jpegBuffer.streamId);
1421
1422 GrallocModule::getInstance().unlock(jpegBuffer.importedBuffer);
1423 GrallocModule::getInstance().release(jpegBuffer.importedBuffer);
1424 const Stream &s = mParent->getStreamInfo(jpegBuffer.streamId);
1425 s.ops->enqueue_buffer(s.ops, mJpegTimestamp, jpegBuffer.buffer);
1426 }
1427
onJpegInputDone(const StreamBuffer & inputBuffer)1428 void EmulatedFakeCamera2::ReadoutThread::onJpegInputDone(
1429 const StreamBuffer &inputBuffer) {
1430 status_t res;
1431 GrallocModule::getInstance().unlock(inputBuffer.importedBuffer);
1432 GrallocModule::getInstance().release(inputBuffer.importedBuffer);
1433 const ReprocessStream &s =
1434 mParent->getReprocessStreamInfo(-inputBuffer.streamId);
1435 res = s.ops->release_buffer(s.ops, inputBuffer.buffer);
1436 if (res != OK) {
1437 ALOGE("Error releasing reprocess buffer %p: %s (%d)", inputBuffer.buffer,
1438 strerror(-res), res);
1439 mParent->signalError();
1440 }
1441 }
1442
collectStatisticsMetadata(camera_metadata_t * frame)1443 status_t EmulatedFakeCamera2::ReadoutThread::collectStatisticsMetadata(
1444 camera_metadata_t *frame) {
1445 // Completely fake face rectangles, don't correspond to real faces in scene
1446 ALOGV("Readout: Collecting statistics metadata");
1447
1448 status_t res;
1449 camera_metadata_entry_t entry;
1450 res = find_camera_metadata_entry(frame, ANDROID_STATISTICS_FACE_DETECT_MODE,
1451 &entry);
1452 if (res != OK) {
1453 ALOGE("%s: Unable to find face detect mode!", __FUNCTION__);
1454 return BAD_VALUE;
1455 }
1456
1457 if (entry.data.u8[0] == ANDROID_STATISTICS_FACE_DETECT_MODE_OFF) return OK;
1458
1459 // The coordinate system for the face regions is the raw sensor pixel
1460 // coordinates. Here, we map from the scene coordinates (0-19 in both axis)
1461 // to raw pixels, for the scene defined in fake-pipeline2/Scene.cpp. We
1462 // approximately place two faces on top of the windows of the house. No
1463 // actual faces exist there, but might one day. Note that this doesn't
1464 // account for the offsets used to account for aspect ratio differences, so
1465 // the rectangles don't line up quite right.
1466 const size_t numFaces = 2;
1467 int32_t rects[numFaces * 4] = {
1468 static_cast<int32_t>(mParent->mSensorWidth * 10 / 20),
1469 static_cast<int32_t>(mParent->mSensorHeight * 15 / 20),
1470 static_cast<int32_t>(mParent->mSensorWidth * 12 / 20),
1471 static_cast<int32_t>(mParent->mSensorHeight * 17 / 20),
1472
1473 static_cast<int32_t>(mParent->mSensorWidth * 16 / 20),
1474 static_cast<int32_t>(mParent->mSensorHeight * 15 / 20),
1475 static_cast<int32_t>(mParent->mSensorWidth * 18 / 20),
1476 static_cast<int32_t>(mParent->mSensorHeight * 17 / 20)};
1477 // To simulate some kind of real detection going on, we jitter the rectangles
1478 // on each frame by a few pixels in each dimension.
1479 for (size_t i = 0; i < numFaces * 4; i++) {
1480 rects[i] += (int32_t)(((float)rand() / (float)RAND_MAX) * 6 - 3);
1481 }
1482 // The confidence scores (0-100) are similarly jittered.
1483 uint8_t scores[numFaces] = {85, 95};
1484 for (size_t i = 0; i < numFaces; i++) {
1485 scores[i] += (int32_t)(((float)rand() / (float)RAND_MAX) * 10 - 5);
1486 }
1487
1488 res = add_camera_metadata_entry(frame, ANDROID_STATISTICS_FACE_RECTANGLES,
1489 rects, numFaces * 4);
1490 if (res != OK) {
1491 ALOGE("%s: Unable to add face rectangles!", __FUNCTION__);
1492 return BAD_VALUE;
1493 }
1494
1495 res = add_camera_metadata_entry(frame, ANDROID_STATISTICS_FACE_SCORES, scores,
1496 numFaces);
1497 if (res != OK) {
1498 ALOGE("%s: Unable to add face scores!", __FUNCTION__);
1499 return BAD_VALUE;
1500 }
1501
1502 if (entry.data.u8[0] == ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE) return OK;
1503
1504 // Advanced face detection options - add eye/mouth coordinates. The
1505 // coordinates in order are (leftEyeX, leftEyeY, rightEyeX, rightEyeY,
1506 // mouthX, mouthY). The mapping is the same as the face rectangles.
1507 int32_t features[numFaces * 6] = {
1508 static_cast<int32_t>(mParent->mSensorWidth * 10.5 / 20),
1509 static_cast<int32_t>(mParent->mSensorHeight * 16 / 20),
1510 static_cast<int32_t>(mParent->mSensorWidth * 11.5 / 20),
1511 static_cast<int32_t>(mParent->mSensorHeight * 16 / 20),
1512 static_cast<int32_t>(mParent->mSensorWidth * 11 / 20),
1513 static_cast<int32_t>(mParent->mSensorHeight * 16.5 / 20),
1514
1515 static_cast<int32_t>(mParent->mSensorWidth * 16.5 / 20),
1516 static_cast<int32_t>(mParent->mSensorHeight * 16 / 20),
1517 static_cast<int32_t>(mParent->mSensorWidth * 17.5 / 20),
1518 static_cast<int32_t>(mParent->mSensorHeight * 16 / 20),
1519 static_cast<int32_t>(mParent->mSensorWidth * 17 / 20),
1520 static_cast<int32_t>(mParent->mSensorHeight * 16.5 / 20),
1521 };
1522 // Jitter these a bit less than the rects
1523 for (size_t i = 0; i < numFaces * 6; i++) {
1524 features[i] += (int32_t)(((float)rand() / (float)RAND_MAX) * 4 - 2);
1525 }
1526 // These are unique IDs that are used to identify each face while it's
1527 // visible to the detector (if a face went away and came back, it'd get a
1528 // new ID).
1529 int32_t ids[numFaces] = {100, 200};
1530
1531 res = add_camera_metadata_entry(frame, ANDROID_STATISTICS_FACE_LANDMARKS,
1532 features, numFaces * 6);
1533 if (res != OK) {
1534 ALOGE("%s: Unable to add face landmarks!", __FUNCTION__);
1535 return BAD_VALUE;
1536 }
1537
1538 res = add_camera_metadata_entry(frame, ANDROID_STATISTICS_FACE_IDS, ids,
1539 numFaces);
1540 if (res != OK) {
1541 ALOGE("%s: Unable to add face scores!", __FUNCTION__);
1542 return BAD_VALUE;
1543 }
1544
1545 return OK;
1546 }
1547
ControlThread(EmulatedFakeCamera2 * parent)1548 EmulatedFakeCamera2::ControlThread::ControlThread(EmulatedFakeCamera2 *parent)
1549 : Thread(false), mParent(parent) {
1550 mRunning = false;
1551 }
1552
~ControlThread()1553 EmulatedFakeCamera2::ControlThread::~ControlThread() {}
1554
readyToRun()1555 status_t EmulatedFakeCamera2::ControlThread::readyToRun() {
1556 Mutex::Autolock lock(mInputMutex);
1557
1558 ALOGV("Starting up ControlThread");
1559 mRunning = true;
1560 mStartAf = false;
1561 mCancelAf = false;
1562 mStartPrecapture = false;
1563
1564 mControlMode = ANDROID_CONTROL_MODE_AUTO;
1565
1566 mEffectMode = ANDROID_CONTROL_EFFECT_MODE_OFF;
1567 mSceneMode = ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY;
1568
1569 mAfMode = ANDROID_CONTROL_AF_MODE_AUTO;
1570 mAfModeChange = false;
1571
1572 mAeMode = ANDROID_CONTROL_AE_MODE_ON;
1573 mAwbMode = ANDROID_CONTROL_AWB_MODE_AUTO;
1574
1575 mAfTriggerId = 0;
1576 mPrecaptureTriggerId = 0;
1577
1578 mAfState = ANDROID_CONTROL_AF_STATE_INACTIVE;
1579 mAeState = ANDROID_CONTROL_AE_STATE_INACTIVE;
1580 mAwbState = ANDROID_CONTROL_AWB_STATE_INACTIVE;
1581
1582 mExposureTime = kNormalExposureTime;
1583
1584 mInputSignal.signal();
1585 return NO_ERROR;
1586 }
1587
waitUntilRunning()1588 status_t EmulatedFakeCamera2::ControlThread::waitUntilRunning() {
1589 Mutex::Autolock lock(mInputMutex);
1590 if (!mRunning) {
1591 ALOGV("Waiting for control thread to start");
1592 mInputSignal.wait(mInputMutex);
1593 }
1594 return OK;
1595 }
1596
1597 // Override android.control.* fields with 3A values before sending request to
1598 // sensor
processRequest(camera_metadata_t * request)1599 status_t EmulatedFakeCamera2::ControlThread::processRequest(
1600 camera_metadata_t *request) {
1601 Mutex::Autolock lock(mInputMutex);
1602 // TODO: Add handling for all android.control.* fields here
1603 camera_metadata_entry_t mode;
1604 status_t res;
1605
1606 #define READ_IF_OK(res, what, def) (((res) == OK) ? (what) : (uint8_t)(def))
1607
1608 res = find_camera_metadata_entry(request, ANDROID_CONTROL_MODE, &mode);
1609 mControlMode = READ_IF_OK(res, mode.data.u8[0], ANDROID_CONTROL_MODE_OFF);
1610
1611 // disable all 3A
1612 if (mControlMode == ANDROID_CONTROL_MODE_OFF) {
1613 mEffectMode = ANDROID_CONTROL_EFFECT_MODE_OFF;
1614 mSceneMode = ANDROID_CONTROL_SCENE_MODE_DISABLED;
1615 mAfMode = ANDROID_CONTROL_AF_MODE_OFF;
1616 mAeLock = ANDROID_CONTROL_AE_LOCK_ON;
1617 mAeMode = ANDROID_CONTROL_AE_MODE_OFF;
1618 mAfModeChange = true;
1619 mStartAf = false;
1620 mCancelAf = true;
1621 mAeState = ANDROID_CONTROL_AE_STATE_INACTIVE;
1622 mAwbMode = ANDROID_CONTROL_AWB_MODE_OFF;
1623 return res;
1624 }
1625
1626 res = find_camera_metadata_entry(request, ANDROID_CONTROL_EFFECT_MODE, &mode);
1627 mEffectMode =
1628 READ_IF_OK(res, mode.data.u8[0], ANDROID_CONTROL_EFFECT_MODE_OFF);
1629
1630 res = find_camera_metadata_entry(request, ANDROID_CONTROL_SCENE_MODE, &mode);
1631 mSceneMode =
1632 READ_IF_OK(res, mode.data.u8[0], ANDROID_CONTROL_SCENE_MODE_DISABLED);
1633
1634 res = find_camera_metadata_entry(request, ANDROID_CONTROL_AF_MODE, &mode);
1635 if (mAfMode != mode.data.u8[0]) {
1636 ALOGV("AF new mode: %d, old mode %d", mode.data.u8[0], mAfMode);
1637 mAfMode = mode.data.u8[0];
1638 mAfModeChange = true;
1639 mStartAf = false;
1640 mCancelAf = false;
1641 }
1642
1643 res = find_camera_metadata_entry(request, ANDROID_CONTROL_AE_MODE, &mode);
1644 mAeMode = READ_IF_OK(res, mode.data.u8[0], ANDROID_CONTROL_AE_MODE_OFF);
1645
1646 res = find_camera_metadata_entry(request, ANDROID_CONTROL_AE_LOCK, &mode);
1647 uint8_t aeLockVal =
1648 READ_IF_OK(res, mode.data.u8[0], ANDROID_CONTROL_AE_LOCK_ON);
1649 bool aeLock = (aeLockVal == ANDROID_CONTROL_AE_LOCK_ON);
1650 if (mAeLock && !aeLock) {
1651 mAeState = ANDROID_CONTROL_AE_STATE_INACTIVE;
1652 }
1653 mAeLock = aeLock;
1654
1655 res = find_camera_metadata_entry(request, ANDROID_CONTROL_AWB_MODE, &mode);
1656 mAwbMode = READ_IF_OK(res, mode.data.u8[0], ANDROID_CONTROL_AWB_MODE_OFF);
1657
1658 // TODO: Override more control fields
1659
1660 if (mAeMode != ANDROID_CONTROL_AE_MODE_OFF) {
1661 camera_metadata_entry_t exposureTime;
1662 res = find_camera_metadata_entry(request, ANDROID_SENSOR_EXPOSURE_TIME,
1663 &exposureTime);
1664 if (res == OK) {
1665 exposureTime.data.i64[0] = mExposureTime;
1666 }
1667 }
1668
1669 #undef READ_IF_OK
1670
1671 return OK;
1672 }
1673
triggerAction(uint32_t msgType,int32_t ext1,int32_t ext2)1674 status_t EmulatedFakeCamera2::ControlThread::triggerAction(uint32_t msgType,
1675 int32_t ext1,
1676 int32_t ext2) {
1677 ALOGV("%s: Triggering %d (%d, %d)", __FUNCTION__, msgType, ext1, ext2);
1678 Mutex::Autolock lock(mInputMutex);
1679 switch (msgType) {
1680 case CAMERA2_TRIGGER_AUTOFOCUS:
1681 mAfTriggerId = ext1;
1682 mStartAf = true;
1683 mCancelAf = false;
1684 break;
1685 case CAMERA2_TRIGGER_CANCEL_AUTOFOCUS:
1686 mAfTriggerId = ext1;
1687 mStartAf = false;
1688 mCancelAf = true;
1689 break;
1690 case CAMERA2_TRIGGER_PRECAPTURE_METERING:
1691 mPrecaptureTriggerId = ext1;
1692 mStartPrecapture = true;
1693 break;
1694 default:
1695 ALOGE("%s: Unknown action triggered: %d (arguments %d %d)", __FUNCTION__,
1696 msgType, ext1, ext2);
1697 return BAD_VALUE;
1698 }
1699 return OK;
1700 }
1701
1702 const nsecs_t EmulatedFakeCamera2::ControlThread::kControlCycleDelay =
1703 100 * MSEC;
1704 const nsecs_t EmulatedFakeCamera2::ControlThread::kMinAfDuration = 500 * MSEC;
1705 const nsecs_t EmulatedFakeCamera2::ControlThread::kMaxAfDuration = 900 * MSEC;
1706 const float EmulatedFakeCamera2::ControlThread::kAfSuccessRate = 0.9;
1707 // Once every 5 seconds
1708 const float EmulatedFakeCamera2::ControlThread::kContinuousAfStartRate =
1709 kControlCycleDelay / 5.0 * SEC;
1710 const nsecs_t EmulatedFakeCamera2::ControlThread::kMinAeDuration = 500 * MSEC;
1711 const nsecs_t EmulatedFakeCamera2::ControlThread::kMaxAeDuration = 2 * SEC;
1712 const nsecs_t EmulatedFakeCamera2::ControlThread::kMinPrecaptureAeDuration =
1713 100 * MSEC;
1714 const nsecs_t EmulatedFakeCamera2::ControlThread::kMaxPrecaptureAeDuration =
1715 400 * MSEC;
1716 // Once every 3 seconds
1717 const float EmulatedFakeCamera2::ControlThread::kAeScanStartRate =
1718 kControlCycleDelay / 3000000000.0;
1719
1720 const nsecs_t EmulatedFakeCamera2::ControlThread::kNormalExposureTime =
1721 10 * MSEC;
1722 const nsecs_t EmulatedFakeCamera2::ControlThread::kExposureJump = 2 * MSEC;
1723 const nsecs_t EmulatedFakeCamera2::ControlThread::kMinExposureTime = 1 * MSEC;
1724
threadLoop()1725 bool EmulatedFakeCamera2::ControlThread::threadLoop() {
1726 bool afModeChange = false;
1727 bool afTriggered = false;
1728 bool afCancelled = false;
1729 uint8_t afState;
1730 uint8_t afMode;
1731 int32_t afTriggerId;
1732 bool precaptureTriggered = false;
1733 uint8_t aeState;
1734 uint8_t aeMode;
1735 bool aeLock;
1736 int32_t precaptureTriggerId;
1737 nsecs_t nextSleep = kControlCycleDelay;
1738
1739 {
1740 Mutex::Autolock lock(mInputMutex);
1741 if (mStartAf) {
1742 ALOGD("Starting AF trigger processing");
1743 afTriggered = true;
1744 mStartAf = false;
1745 } else if (mCancelAf) {
1746 ALOGD("Starting cancel AF trigger processing");
1747 afCancelled = true;
1748 mCancelAf = false;
1749 }
1750 afState = mAfState;
1751 afMode = mAfMode;
1752 afModeChange = mAfModeChange;
1753 mAfModeChange = false;
1754
1755 afTriggerId = mAfTriggerId;
1756
1757 if (mStartPrecapture) {
1758 ALOGD("Starting precapture trigger processing");
1759 precaptureTriggered = true;
1760 mStartPrecapture = false;
1761 }
1762 aeState = mAeState;
1763 aeMode = mAeMode;
1764 aeLock = mAeLock;
1765 precaptureTriggerId = mPrecaptureTriggerId;
1766 }
1767
1768 if (afCancelled || afModeChange) {
1769 ALOGV("Resetting AF state due to cancel/mode change");
1770 afState = ANDROID_CONTROL_AF_STATE_INACTIVE;
1771 updateAfState(afState, afTriggerId);
1772 mAfScanDuration = 0;
1773 mLockAfterPassiveScan = false;
1774 }
1775
1776 if (afTriggered) {
1777 afState = processAfTrigger(afMode, afState);
1778 }
1779
1780 afState = maybeStartAfScan(afMode, afState);
1781 afState = updateAfScan(afMode, afState, &nextSleep);
1782 updateAfState(afState, afTriggerId);
1783
1784 if (precaptureTriggered) {
1785 aeState = processPrecaptureTrigger(aeMode, aeState);
1786 }
1787
1788 aeState = maybeStartAeScan(aeMode, aeLock, aeState);
1789 aeState = updateAeScan(aeMode, aeLock, aeState, &nextSleep);
1790 updateAeState(aeState, precaptureTriggerId);
1791
1792 int ret;
1793 timespec t;
1794 t.tv_sec = 0;
1795 t.tv_nsec = nextSleep;
1796 do {
1797 ret = nanosleep(&t, &t);
1798 } while (ret != 0);
1799
1800 if (mAfScanDuration > 0) {
1801 mAfScanDuration -= nextSleep;
1802 }
1803 if (mAeScanDuration > 0) {
1804 mAeScanDuration -= nextSleep;
1805 }
1806
1807 return true;
1808 }
1809
processAfTrigger(uint8_t afMode,uint8_t afState)1810 int EmulatedFakeCamera2::ControlThread::processAfTrigger(uint8_t afMode,
1811 uint8_t afState) {
1812 switch (afMode) {
1813 case ANDROID_CONTROL_AF_MODE_OFF:
1814 case ANDROID_CONTROL_AF_MODE_EDOF:
1815 // Do nothing
1816 break;
1817 case ANDROID_CONTROL_AF_MODE_MACRO:
1818 case ANDROID_CONTROL_AF_MODE_AUTO:
1819 switch (afState) {
1820 case ANDROID_CONTROL_AF_STATE_INACTIVE:
1821 case ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED:
1822 case ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED:
1823 // Start new focusing cycle
1824 mAfScanDuration =
1825 ((double)rand() / RAND_MAX) * (kMaxAfDuration - kMinAfDuration) +
1826 kMinAfDuration;
1827 afState = ANDROID_CONTROL_AF_STATE_ACTIVE_SCAN;
1828 ALOGV("%s: AF scan start, duration %" PRId64 " ms", __FUNCTION__,
1829 mAfScanDuration / 1000000);
1830 break;
1831 case ANDROID_CONTROL_AF_STATE_ACTIVE_SCAN:
1832 // Ignore new request, already scanning
1833 break;
1834 default:
1835 ALOGE("Unexpected AF state in AUTO/MACRO AF mode: %d", afState);
1836 }
1837 break;
1838 case ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE:
1839 switch (afState) {
1840 // Picture mode waits for passive scan to complete
1841 case ANDROID_CONTROL_AF_STATE_PASSIVE_SCAN:
1842 mLockAfterPassiveScan = true;
1843 break;
1844 case ANDROID_CONTROL_AF_STATE_INACTIVE:
1845 afState = ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED;
1846 break;
1847 case ANDROID_CONTROL_AF_STATE_PASSIVE_FOCUSED:
1848 afState = ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED;
1849 break;
1850 case ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED:
1851 case ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED:
1852 // Must cancel to get out of these states
1853 break;
1854 default:
1855 ALOGE("Unexpected AF state in CONTINUOUS_PICTURE AF mode: %d",
1856 afState);
1857 }
1858 break;
1859 case ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO:
1860 switch (afState) {
1861 // Video mode does not wait for passive scan to complete
1862 case ANDROID_CONTROL_AF_STATE_PASSIVE_SCAN:
1863 case ANDROID_CONTROL_AF_STATE_INACTIVE:
1864 afState = ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED;
1865 break;
1866 case ANDROID_CONTROL_AF_STATE_PASSIVE_FOCUSED:
1867 afState = ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED;
1868 break;
1869 case ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED:
1870 case ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED:
1871 // Must cancel to get out of these states
1872 break;
1873 default:
1874 ALOGE("Unexpected AF state in CONTINUOUS_VIDEO AF mode: %d", afState);
1875 }
1876 break;
1877 default:
1878 break;
1879 }
1880 return afState;
1881 }
1882
maybeStartAfScan(uint8_t afMode,uint8_t afState)1883 int EmulatedFakeCamera2::ControlThread::maybeStartAfScan(uint8_t afMode,
1884 uint8_t afState) {
1885 if ((afMode == ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO ||
1886 afMode == ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE) &&
1887 (afState == ANDROID_CONTROL_AF_STATE_INACTIVE ||
1888 afState == ANDROID_CONTROL_AF_STATE_PASSIVE_FOCUSED)) {
1889 bool startScan = ((double)rand() / RAND_MAX) < kContinuousAfStartRate;
1890 if (startScan) {
1891 // Start new passive focusing cycle
1892 mAfScanDuration =
1893 ((double)rand() / RAND_MAX) * (kMaxAfDuration - kMinAfDuration) +
1894 kMinAfDuration;
1895 afState = ANDROID_CONTROL_AF_STATE_PASSIVE_SCAN;
1896 ALOGV("%s: AF passive scan start, duration %" PRId64 " ms", __FUNCTION__,
1897 mAfScanDuration / 1000000);
1898 }
1899 }
1900 return afState;
1901 }
1902
updateAfScan(uint8_t afMode,uint8_t afState,nsecs_t * maxSleep)1903 int EmulatedFakeCamera2::ControlThread::updateAfScan(uint8_t afMode,
1904 uint8_t afState,
1905 nsecs_t *maxSleep) {
1906 if (!(afState == ANDROID_CONTROL_AF_STATE_ACTIVE_SCAN ||
1907 afState == ANDROID_CONTROL_AF_STATE_PASSIVE_SCAN)) {
1908 return afState;
1909 }
1910
1911 if (mAfScanDuration <= 0) {
1912 ALOGV("%s: AF scan done", __FUNCTION__);
1913 switch (afMode) {
1914 case ANDROID_CONTROL_AF_MODE_MACRO:
1915 case ANDROID_CONTROL_AF_MODE_AUTO: {
1916 bool success = ((double)rand() / RAND_MAX) < kAfSuccessRate;
1917 if (success) {
1918 afState = ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED;
1919 } else {
1920 afState = ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED;
1921 }
1922 break;
1923 }
1924 case ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE:
1925 if (mLockAfterPassiveScan) {
1926 afState = ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED;
1927 mLockAfterPassiveScan = false;
1928 } else {
1929 afState = ANDROID_CONTROL_AF_STATE_PASSIVE_FOCUSED;
1930 }
1931 break;
1932 case ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO:
1933 afState = ANDROID_CONTROL_AF_STATE_PASSIVE_FOCUSED;
1934 break;
1935 default:
1936 ALOGE("Unexpected AF mode in scan state");
1937 }
1938 } else {
1939 if (mAfScanDuration <= *maxSleep) {
1940 *maxSleep = mAfScanDuration;
1941 }
1942 }
1943 return afState;
1944 }
1945
updateAfState(uint8_t newState,int32_t triggerId)1946 void EmulatedFakeCamera2::ControlThread::updateAfState(uint8_t newState,
1947 int32_t triggerId) {
1948 Mutex::Autolock lock(mInputMutex);
1949 if (mAfState != newState) {
1950 ALOGV("%s: Autofocus state now %d, id %d", __FUNCTION__, newState,
1951 triggerId);
1952 mAfState = newState;
1953 mParent->sendNotification(CAMERA2_MSG_AUTOFOCUS, newState, triggerId, 0);
1954 }
1955 }
1956
processPrecaptureTrigger(uint8_t aeMode,uint8_t aeState)1957 int EmulatedFakeCamera2::ControlThread::processPrecaptureTrigger(
1958 uint8_t aeMode, uint8_t aeState) {
1959 switch (aeMode) {
1960 case ANDROID_CONTROL_AE_MODE_OFF:
1961 // Don't do anything for these
1962 return aeState;
1963 case ANDROID_CONTROL_AE_MODE_ON:
1964 case ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH:
1965 case ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH:
1966 case ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE:
1967 // Trigger a precapture cycle
1968 aeState = ANDROID_CONTROL_AE_STATE_PRECAPTURE;
1969 mAeScanDuration =
1970 ((double)rand() / RAND_MAX) *
1971 (kMaxPrecaptureAeDuration - kMinPrecaptureAeDuration) +
1972 kMinPrecaptureAeDuration;
1973 ALOGD("%s: AE precapture scan start, duration %" PRId64 " ms",
1974 __FUNCTION__, mAeScanDuration / 1000000);
1975 }
1976 return aeState;
1977 }
1978
maybeStartAeScan(uint8_t aeMode,bool aeLocked,uint8_t aeState)1979 int EmulatedFakeCamera2::ControlThread::maybeStartAeScan(uint8_t aeMode,
1980 bool aeLocked,
1981 uint8_t aeState) {
1982 if (aeLocked) return aeState;
1983 switch (aeMode) {
1984 case ANDROID_CONTROL_AE_MODE_OFF:
1985 break;
1986 case ANDROID_CONTROL_AE_MODE_ON:
1987 case ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH:
1988 case ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH:
1989 case ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE: {
1990 if (aeState != ANDROID_CONTROL_AE_STATE_INACTIVE &&
1991 aeState != ANDROID_CONTROL_AE_STATE_CONVERGED)
1992 break;
1993
1994 bool startScan = ((double)rand() / RAND_MAX) < kAeScanStartRate;
1995 if (startScan) {
1996 mAeScanDuration =
1997 ((double)rand() / RAND_MAX) * (kMaxAeDuration - kMinAeDuration) +
1998 kMinAeDuration;
1999 aeState = ANDROID_CONTROL_AE_STATE_SEARCHING;
2000 ALOGV("%s: AE scan start, duration %" PRId64 " ms", __FUNCTION__,
2001 mAeScanDuration / 1000000);
2002 }
2003 }
2004 }
2005
2006 return aeState;
2007 }
2008
updateAeScan(uint8_t,bool aeLock,uint8_t aeState,nsecs_t * maxSleep)2009 int EmulatedFakeCamera2::ControlThread::updateAeScan(uint8_t /*aeMode*/,
2010 bool aeLock,
2011 uint8_t aeState,
2012 nsecs_t *maxSleep) {
2013 if (aeLock && aeState != ANDROID_CONTROL_AE_STATE_PRECAPTURE) {
2014 mAeScanDuration = 0;
2015 aeState = ANDROID_CONTROL_AE_STATE_LOCKED;
2016 } else if ((aeState == ANDROID_CONTROL_AE_STATE_SEARCHING) ||
2017 (aeState == ANDROID_CONTROL_AE_STATE_PRECAPTURE)) {
2018 if (mAeScanDuration <= 0) {
2019 ALOGV("%s: AE scan done", __FUNCTION__);
2020 aeState = aeLock ? ANDROID_CONTROL_AE_STATE_LOCKED
2021 : ANDROID_CONTROL_AE_STATE_CONVERGED;
2022
2023 Mutex::Autolock lock(mInputMutex);
2024 mExposureTime = kNormalExposureTime;
2025 } else {
2026 if (mAeScanDuration <= *maxSleep) {
2027 *maxSleep = mAeScanDuration;
2028 }
2029
2030 int64_t exposureDelta =
2031 ((double)rand() / RAND_MAX) * 2 * kExposureJump - kExposureJump;
2032 Mutex::Autolock lock(mInputMutex);
2033 mExposureTime = mExposureTime + exposureDelta;
2034 if (mExposureTime < kMinExposureTime) mExposureTime = kMinExposureTime;
2035 }
2036 }
2037
2038 return aeState;
2039 }
2040
updateAeState(uint8_t newState,int32_t triggerId)2041 void EmulatedFakeCamera2::ControlThread::updateAeState(uint8_t newState,
2042 int32_t triggerId) {
2043 Mutex::Autolock lock(mInputMutex);
2044 if (mAeState != newState) {
2045 ALOGV("%s: Autoexposure state now %d, id %d", __FUNCTION__, newState,
2046 triggerId);
2047 mAeState = newState;
2048 mParent->sendNotification(CAMERA2_MSG_AUTOEXPOSURE, newState, triggerId, 0);
2049 }
2050 }
2051
2052 /** Private methods */
2053
constructStaticInfo(camera_metadata_t ** info,bool sizeRequest) const2054 status_t EmulatedFakeCamera2::constructStaticInfo(camera_metadata_t **info,
2055 bool sizeRequest) const {
2056 size_t entryCount = 0;
2057 size_t dataCount = 0;
2058 status_t ret;
2059
2060 #define ADD_OR_SIZE(tag, data, count) \
2061 if ((ret = addOrSize(*info, sizeRequest, &entryCount, &dataCount, tag, data, \
2062 count)) != OK) \
2063 return ret
2064
2065 // android.lens
2066
2067 // 5 cm min focus distance for back camera, infinity (fixed focus) for front
2068 const float minFocusDistance = mFacingBack ? 1.0 / 0.05 : 0.0;
2069 ADD_OR_SIZE(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE, &minFocusDistance, 1);
2070 // 5 m hyperfocal distance for back camera, infinity (fixed focus) for front
2071 // const float hyperFocalDistance = mFacingBack ? 1.0 / 5.0 : 0.0;
2072 ADD_OR_SIZE(ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE, &minFocusDistance, 1);
2073
2074 static const float focalLength = 3.30f; // mm
2075 ADD_OR_SIZE(ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS, &focalLength, 1);
2076 static const float aperture = 2.8f;
2077 ADD_OR_SIZE(ANDROID_LENS_INFO_AVAILABLE_APERTURES, &aperture, 1);
2078 static const float filterDensity = 0;
2079 ADD_OR_SIZE(ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES, &filterDensity, 1);
2080 static const uint8_t availableOpticalStabilization =
2081 ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
2082 ADD_OR_SIZE(ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
2083 &availableOpticalStabilization, 1);
2084
2085 static const int32_t lensShadingMapSize[] = {1, 1};
2086 ADD_OR_SIZE(ANDROID_LENS_INFO_SHADING_MAP_SIZE, lensShadingMapSize,
2087 sizeof(lensShadingMapSize) / sizeof(int32_t));
2088
2089 int32_t lensFacing =
2090 mFacingBack ? ANDROID_LENS_FACING_BACK : ANDROID_LENS_FACING_FRONT;
2091 ADD_OR_SIZE(ANDROID_LENS_FACING, &lensFacing, 1);
2092
2093 // android.sensor
2094
2095 ADD_OR_SIZE(ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE,
2096 Sensor::kExposureTimeRange, 2);
2097
2098 ADD_OR_SIZE(ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
2099 &Sensor::kFrameDurationRange[1], 1);
2100
2101 ADD_OR_SIZE(ANDROID_SENSOR_INFO_SENSITIVITY_RANGE, Sensor::kSensitivityRange,
2102 sizeof(Sensor::kSensitivityRange) / sizeof(int32_t));
2103
2104 ADD_OR_SIZE(ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
2105 &Sensor::kColorFilterArrangement, 1);
2106
2107 static const float sensorPhysicalSize[2] = {3.20f, 2.40f}; // mm
2108 ADD_OR_SIZE(ANDROID_SENSOR_INFO_PHYSICAL_SIZE, sensorPhysicalSize, 2);
2109
2110 const int32_t pixelArray[] = {mSensorWidth, mSensorHeight};
2111 ADD_OR_SIZE(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE, pixelArray, 2);
2112
2113 ADD_OR_SIZE(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE, pixelArray, 2);
2114
2115 ADD_OR_SIZE(ANDROID_SENSOR_INFO_WHITE_LEVEL, &Sensor::kMaxRawValue, 1);
2116
2117 static const int32_t blackLevelPattern[4] = {
2118 static_cast<int32_t>(Sensor::kBlackLevel),
2119 static_cast<int32_t>(Sensor::kBlackLevel),
2120 static_cast<int32_t>(Sensor::kBlackLevel),
2121 static_cast<int32_t>(Sensor::kBlackLevel)};
2122 ADD_OR_SIZE(ANDROID_SENSOR_BLACK_LEVEL_PATTERN, blackLevelPattern,
2123 sizeof(blackLevelPattern) / sizeof(int32_t));
2124
2125 // TODO: sensor color calibration fields
2126
2127 // android.flash
2128 static const uint8_t flashAvailable = 0;
2129 ADD_OR_SIZE(ANDROID_FLASH_INFO_AVAILABLE, &flashAvailable, 1);
2130
2131 static const int64_t flashChargeDuration = 0;
2132 ADD_OR_SIZE(ANDROID_FLASH_INFO_CHARGE_DURATION, &flashChargeDuration, 1);
2133
2134 // android.tonemap
2135
2136 static const int32_t tonemapCurvePoints = 128;
2137 ADD_OR_SIZE(ANDROID_TONEMAP_MAX_CURVE_POINTS, &tonemapCurvePoints, 1);
2138
2139 // android.scaler
2140
2141 ADD_OR_SIZE(ANDROID_SCALER_AVAILABLE_FORMATS, kAvailableFormats,
2142 sizeof(kAvailableFormats) / sizeof(uint32_t));
2143
2144 ADD_OR_SIZE(ANDROID_SCALER_AVAILABLE_RAW_SIZES, &mAvailableRawSizes.front(),
2145 mAvailableRawSizes.size());
2146
2147 ADD_OR_SIZE(ANDROID_SCALER_AVAILABLE_RAW_MIN_DURATIONS,
2148 kAvailableRawMinDurations,
2149 sizeof(kAvailableRawMinDurations) / sizeof(uint64_t));
2150
2151 ADD_OR_SIZE(ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES,
2152 &mAvailableProcessedSizes.front(),
2153 mAvailableProcessedSizes.size());
2154
2155 ADD_OR_SIZE(ANDROID_SCALER_AVAILABLE_PROCESSED_MIN_DURATIONS,
2156 kAvailableProcessedMinDurations,
2157 sizeof(kAvailableProcessedMinDurations) / sizeof(uint64_t));
2158
2159 ADD_OR_SIZE(ANDROID_SCALER_AVAILABLE_JPEG_SIZES, &mAvailableJpegSizes.front(),
2160 mAvailableJpegSizes.size());
2161
2162 ADD_OR_SIZE(ANDROID_SCALER_AVAILABLE_JPEG_MIN_DURATIONS,
2163 kAvailableJpegMinDurations,
2164 sizeof(kAvailableJpegMinDurations) / sizeof(uint64_t));
2165
2166 static const float maxZoom = 10;
2167 ADD_OR_SIZE(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM, &maxZoom, 1);
2168
2169 // android.jpeg
2170
2171 static const int32_t jpegThumbnailSizes[] = {0, 0, 160, 120, 320, 240};
2172 ADD_OR_SIZE(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES, jpegThumbnailSizes,
2173 sizeof(jpegThumbnailSizes) / sizeof(int32_t));
2174
2175 static const int32_t jpegMaxSize = JpegCompressor::kMaxJpegSize;
2176 ADD_OR_SIZE(ANDROID_JPEG_MAX_SIZE, &jpegMaxSize, 1);
2177
2178 // android.stats
2179
2180 static const uint8_t availableFaceDetectModes[] = {
2181 ANDROID_STATISTICS_FACE_DETECT_MODE_OFF,
2182 ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE,
2183 ANDROID_STATISTICS_FACE_DETECT_MODE_FULL};
2184
2185 ADD_OR_SIZE(ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
2186 availableFaceDetectModes, sizeof(availableFaceDetectModes));
2187
2188 static const int32_t maxFaceCount = 8;
2189 ADD_OR_SIZE(ANDROID_STATISTICS_INFO_MAX_FACE_COUNT, &maxFaceCount, 1);
2190
2191 static const int32_t histogramSize = 64;
2192 ADD_OR_SIZE(ANDROID_STATISTICS_INFO_HISTOGRAM_BUCKET_COUNT, &histogramSize,
2193 1);
2194
2195 static const int32_t maxHistogramCount = 1000;
2196 ADD_OR_SIZE(ANDROID_STATISTICS_INFO_MAX_HISTOGRAM_COUNT, &maxHistogramCount,
2197 1);
2198
2199 static const int32_t sharpnessMapSize[2] = {64, 64};
2200 ADD_OR_SIZE(ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE, sharpnessMapSize,
2201 sizeof(sharpnessMapSize) / sizeof(int32_t));
2202
2203 static const int32_t maxSharpnessMapValue = 1000;
2204 ADD_OR_SIZE(ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
2205 &maxSharpnessMapValue, 1);
2206
2207 // android.control
2208
2209 static const uint8_t availableSceneModes[] = {
2210 ANDROID_CONTROL_SCENE_MODE_DISABLED
2211 };
2212 ADD_OR_SIZE(ANDROID_CONTROL_AVAILABLE_SCENE_MODES, availableSceneModes,
2213 sizeof(availableSceneModes));
2214
2215 static const uint8_t availableEffects[] = {ANDROID_CONTROL_EFFECT_MODE_OFF};
2216 ADD_OR_SIZE(ANDROID_CONTROL_AVAILABLE_EFFECTS, availableEffects,
2217 sizeof(availableEffects));
2218
2219 static const int32_t max3aRegions[] = {/*AE*/ 0, /*AWB*/ 0, /*AF*/ 0};
2220 ADD_OR_SIZE(ANDROID_CONTROL_MAX_REGIONS, max3aRegions,
2221 sizeof(max3aRegions) / sizeof(max3aRegions[0]));
2222
2223 static const uint8_t availableAeModes[] = {ANDROID_CONTROL_AE_MODE_OFF,
2224 ANDROID_CONTROL_AE_MODE_ON};
2225 ADD_OR_SIZE(ANDROID_CONTROL_AE_AVAILABLE_MODES, availableAeModes,
2226 sizeof(availableAeModes));
2227
2228 static const camera_metadata_rational exposureCompensationStep = {1, 3};
2229 ADD_OR_SIZE(ANDROID_CONTROL_AE_COMPENSATION_STEP, &exposureCompensationStep,
2230 1);
2231
2232 int32_t exposureCompensationRange[] = {-9, 9};
2233 ADD_OR_SIZE(ANDROID_CONTROL_AE_COMPENSATION_RANGE, exposureCompensationRange,
2234 sizeof(exposureCompensationRange) / sizeof(int32_t));
2235
2236 static const int32_t availableTargetFpsRanges[] = {5, 30, 15, 30};
2237 ADD_OR_SIZE(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
2238 availableTargetFpsRanges,
2239 sizeof(availableTargetFpsRanges) / sizeof(int32_t));
2240
2241 static const uint8_t availableAntibandingModes[] = {
2242 ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF,
2243 ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO};
2244 ADD_OR_SIZE(ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
2245 availableAntibandingModes, sizeof(availableAntibandingModes));
2246
2247 static const uint8_t availableAwbModes[] = {
2248 ANDROID_CONTROL_AWB_MODE_OFF,
2249 ANDROID_CONTROL_AWB_MODE_AUTO,
2250 ANDROID_CONTROL_AWB_MODE_INCANDESCENT,
2251 ANDROID_CONTROL_AWB_MODE_FLUORESCENT,
2252 ANDROID_CONTROL_AWB_MODE_DAYLIGHT,
2253 ANDROID_CONTROL_AWB_MODE_SHADE};
2254 ADD_OR_SIZE(ANDROID_CONTROL_AWB_AVAILABLE_MODES, availableAwbModes,
2255 sizeof(availableAwbModes));
2256
2257 static const uint8_t availableAfModesBack[] = {
2258 ANDROID_CONTROL_AF_MODE_OFF, ANDROID_CONTROL_AF_MODE_AUTO,
2259 ANDROID_CONTROL_AF_MODE_MACRO, ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO,
2260 ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE};
2261
2262 static const uint8_t availableAfModesFront[] = {ANDROID_CONTROL_AF_MODE_OFF};
2263
2264 if (mFacingBack) {
2265 ADD_OR_SIZE(ANDROID_CONTROL_AF_AVAILABLE_MODES, availableAfModesBack,
2266 sizeof(availableAfModesBack));
2267 } else {
2268 ADD_OR_SIZE(ANDROID_CONTROL_AF_AVAILABLE_MODES, availableAfModesFront,
2269 sizeof(availableAfModesFront));
2270 }
2271
2272 static const uint8_t availableVstabModes[] = {
2273 ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF};
2274 ADD_OR_SIZE(ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
2275 availableVstabModes, sizeof(availableVstabModes));
2276
2277 #undef ADD_OR_SIZE
2278 /** Allocate metadata if sizing */
2279 if (sizeRequest) {
2280 ALOGV(
2281 "Allocating %zu entries, %zu extra bytes for "
2282 "static camera info",
2283 entryCount, dataCount);
2284 *info = allocate_camera_metadata(entryCount, dataCount);
2285 if (*info == NULL) {
2286 ALOGE(
2287 "Unable to allocate camera static info"
2288 "(%zu entries, %zu bytes extra data)",
2289 entryCount, dataCount);
2290 return NO_MEMORY;
2291 }
2292 }
2293 return OK;
2294 }
2295
constructDefaultRequest(int request_template,camera_metadata_t ** request,bool sizeRequest) const2296 status_t EmulatedFakeCamera2::constructDefaultRequest(
2297 int request_template, camera_metadata_t **request, bool sizeRequest) const {
2298 size_t entryCount = 0;
2299 size_t dataCount = 0;
2300 status_t ret;
2301
2302 #define ADD_OR_SIZE(tag, data, count) \
2303 if ((ret = addOrSize(*request, sizeRequest, &entryCount, &dataCount, tag, \
2304 data, count)) != OK) \
2305 return ret
2306
2307 /** android.request */
2308
2309 static const uint8_t requestType = ANDROID_REQUEST_TYPE_CAPTURE;
2310 ADD_OR_SIZE(ANDROID_REQUEST_TYPE, &requestType, 1);
2311
2312 static const uint8_t metadataMode = ANDROID_REQUEST_METADATA_MODE_FULL;
2313 ADD_OR_SIZE(ANDROID_REQUEST_METADATA_MODE, &metadataMode, 1);
2314
2315 static const int32_t id = 0;
2316 ADD_OR_SIZE(ANDROID_REQUEST_ID, &id, 1);
2317
2318 static const int32_t frameCount = 0;
2319 ADD_OR_SIZE(ANDROID_REQUEST_FRAME_COUNT, &frameCount, 1);
2320
2321 // OUTPUT_STREAMS set by user
2322 entryCount += 1;
2323 dataCount += 5; // TODO: Should be maximum stream number
2324
2325 /** android.lens */
2326
2327 static const float focusDistance = 0;
2328 ADD_OR_SIZE(ANDROID_LENS_FOCUS_DISTANCE, &focusDistance, 1);
2329
2330 static const float aperture = 2.8f;
2331 ADD_OR_SIZE(ANDROID_LENS_APERTURE, &aperture, 1);
2332
2333 static const float focalLength = 5.0f;
2334 ADD_OR_SIZE(ANDROID_LENS_FOCAL_LENGTH, &focalLength, 1);
2335
2336 static const float filterDensity = 0;
2337 ADD_OR_SIZE(ANDROID_LENS_FILTER_DENSITY, &filterDensity, 1);
2338
2339 static const uint8_t opticalStabilizationMode =
2340 ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
2341 ADD_OR_SIZE(ANDROID_LENS_OPTICAL_STABILIZATION_MODE,
2342 &opticalStabilizationMode, 1);
2343
2344 // FOCUS_RANGE set only in frame
2345
2346 /** android.sensor */
2347
2348 static const int64_t exposureTime = 10 * MSEC;
2349 ADD_OR_SIZE(ANDROID_SENSOR_EXPOSURE_TIME, &exposureTime, 1);
2350
2351 static const int64_t frameDuration = 33333333L; // 1/30 s
2352 ADD_OR_SIZE(ANDROID_SENSOR_FRAME_DURATION, &frameDuration, 1);
2353
2354 static const int32_t sensitivity = 100;
2355 ADD_OR_SIZE(ANDROID_SENSOR_SENSITIVITY, &sensitivity, 1);
2356
2357 // TIMESTAMP set only in frame
2358
2359 /** android.flash */
2360
2361 static const uint8_t flashMode = ANDROID_FLASH_MODE_OFF;
2362 ADD_OR_SIZE(ANDROID_FLASH_MODE, &flashMode, 1);
2363
2364 static const uint8_t flashPower = 10;
2365 ADD_OR_SIZE(ANDROID_FLASH_FIRING_POWER, &flashPower, 1);
2366
2367 static const int64_t firingTime = 0;
2368 ADD_OR_SIZE(ANDROID_FLASH_FIRING_TIME, &firingTime, 1);
2369
2370 /** Processing block modes */
2371 uint8_t hotPixelMode = 0;
2372 uint8_t demosaicMode = 0;
2373 uint8_t noiseMode = 0;
2374 uint8_t shadingMode = 0;
2375 uint8_t colorMode = 0;
2376 uint8_t tonemapMode = 0;
2377 uint8_t edgeMode = 0;
2378 switch (request_template) {
2379 case CAMERA2_TEMPLATE_STILL_CAPTURE:
2380 // fall-through
2381 case CAMERA2_TEMPLATE_VIDEO_SNAPSHOT:
2382 // fall-through
2383 case CAMERA2_TEMPLATE_ZERO_SHUTTER_LAG:
2384 hotPixelMode = ANDROID_HOT_PIXEL_MODE_HIGH_QUALITY;
2385 demosaicMode = ANDROID_DEMOSAIC_MODE_HIGH_QUALITY;
2386 noiseMode = ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY;
2387 shadingMode = ANDROID_SHADING_MODE_HIGH_QUALITY;
2388 colorMode = ANDROID_COLOR_CORRECTION_MODE_HIGH_QUALITY;
2389 tonemapMode = ANDROID_TONEMAP_MODE_HIGH_QUALITY;
2390 edgeMode = ANDROID_EDGE_MODE_HIGH_QUALITY;
2391 break;
2392 case CAMERA2_TEMPLATE_PREVIEW:
2393 // fall-through
2394 case CAMERA2_TEMPLATE_VIDEO_RECORD:
2395 // fall-through
2396 default:
2397 hotPixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
2398 demosaicMode = ANDROID_DEMOSAIC_MODE_FAST;
2399 noiseMode = ANDROID_NOISE_REDUCTION_MODE_FAST;
2400 shadingMode = ANDROID_SHADING_MODE_FAST;
2401 colorMode = ANDROID_COLOR_CORRECTION_MODE_FAST;
2402 tonemapMode = ANDROID_TONEMAP_MODE_FAST;
2403 edgeMode = ANDROID_EDGE_MODE_FAST;
2404 break;
2405 }
2406 ADD_OR_SIZE(ANDROID_HOT_PIXEL_MODE, &hotPixelMode, 1);
2407 ADD_OR_SIZE(ANDROID_DEMOSAIC_MODE, &demosaicMode, 1);
2408 ADD_OR_SIZE(ANDROID_NOISE_REDUCTION_MODE, &noiseMode, 1);
2409 ADD_OR_SIZE(ANDROID_SHADING_MODE, &shadingMode, 1);
2410 ADD_OR_SIZE(ANDROID_COLOR_CORRECTION_MODE, &colorMode, 1);
2411 ADD_OR_SIZE(ANDROID_TONEMAP_MODE, &tonemapMode, 1);
2412 ADD_OR_SIZE(ANDROID_EDGE_MODE, &edgeMode, 1);
2413
2414 /** android.noise */
2415 static const uint8_t noiseStrength = 5;
2416 ADD_OR_SIZE(ANDROID_NOISE_REDUCTION_STRENGTH, &noiseStrength, 1);
2417
2418 /** android.color */
2419 static const float colorTransform[9] = {1.0f, 0.f, 0.f, 0.f, 1.f,
2420 0.f, 0.f, 0.f, 1.f};
2421 ADD_OR_SIZE(ANDROID_COLOR_CORRECTION_TRANSFORM, colorTransform, 9);
2422
2423 /** android.tonemap */
2424 static const float tonemapCurve[4] = {0.f, 0.f, 1.f, 1.f};
2425 ADD_OR_SIZE(ANDROID_TONEMAP_CURVE_RED, tonemapCurve, 4);
2426 ADD_OR_SIZE(ANDROID_TONEMAP_CURVE_GREEN, tonemapCurve, 4);
2427 ADD_OR_SIZE(ANDROID_TONEMAP_CURVE_BLUE, tonemapCurve, 4);
2428
2429 /** android.edge */
2430 static const uint8_t edgeStrength = 5;
2431 ADD_OR_SIZE(ANDROID_EDGE_STRENGTH, &edgeStrength, 1);
2432
2433 /** android.scaler */
2434 static const int32_t cropRegion[3] = {0, 0,
2435 static_cast<int32_t>(mSensorWidth)};
2436 ADD_OR_SIZE(ANDROID_SCALER_CROP_REGION, cropRegion, 3);
2437
2438 /** android.jpeg */
2439 static const int32_t jpegQuality = 80;
2440 ADD_OR_SIZE(ANDROID_JPEG_QUALITY, &jpegQuality, 1);
2441
2442 static const int32_t thumbnailSize[2] = {640, 480};
2443 ADD_OR_SIZE(ANDROID_JPEG_THUMBNAIL_SIZE, thumbnailSize, 2);
2444
2445 static const int32_t thumbnailQuality = 80;
2446 ADD_OR_SIZE(ANDROID_JPEG_THUMBNAIL_QUALITY, &thumbnailQuality, 1);
2447
2448 static const double gpsCoordinates[2] = {0, 0};
2449 ADD_OR_SIZE(ANDROID_JPEG_GPS_COORDINATES, gpsCoordinates, 2);
2450
2451 static const uint8_t gpsProcessingMethod[32] = "None";
2452 ADD_OR_SIZE(ANDROID_JPEG_GPS_PROCESSING_METHOD, gpsProcessingMethod, 32);
2453
2454 static const int64_t gpsTimestamp = 0;
2455 ADD_OR_SIZE(ANDROID_JPEG_GPS_TIMESTAMP, &gpsTimestamp, 1);
2456
2457 static const int32_t jpegOrientation = 0;
2458 ADD_OR_SIZE(ANDROID_JPEG_ORIENTATION, &jpegOrientation, 1);
2459
2460 /** android.stats */
2461
2462 static const uint8_t faceDetectMode = ANDROID_STATISTICS_FACE_DETECT_MODE_OFF;
2463 ADD_OR_SIZE(ANDROID_STATISTICS_FACE_DETECT_MODE, &faceDetectMode, 1);
2464
2465 static const uint8_t histogramMode = ANDROID_STATISTICS_HISTOGRAM_MODE_OFF;
2466 ADD_OR_SIZE(ANDROID_STATISTICS_HISTOGRAM_MODE, &histogramMode, 1);
2467
2468 static const uint8_t sharpnessMapMode =
2469 ANDROID_STATISTICS_SHARPNESS_MAP_MODE_OFF;
2470 ADD_OR_SIZE(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &sharpnessMapMode, 1);
2471
2472 // faceRectangles, faceScores, faceLandmarks, faceIds, histogram,
2473 // sharpnessMap only in frames
2474
2475 /** android.control */
2476
2477 uint8_t controlIntent = 0;
2478 switch (request_template) {
2479 case CAMERA2_TEMPLATE_PREVIEW:
2480 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW;
2481 break;
2482 case CAMERA2_TEMPLATE_STILL_CAPTURE:
2483 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE;
2484 break;
2485 case CAMERA2_TEMPLATE_VIDEO_RECORD:
2486 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD;
2487 break;
2488 case CAMERA2_TEMPLATE_VIDEO_SNAPSHOT:
2489 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT;
2490 break;
2491 case CAMERA2_TEMPLATE_ZERO_SHUTTER_LAG:
2492 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_ZERO_SHUTTER_LAG;
2493 break;
2494 default:
2495 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM;
2496 break;
2497 }
2498 ADD_OR_SIZE(ANDROID_CONTROL_CAPTURE_INTENT, &controlIntent, 1);
2499
2500 static const uint8_t controlMode = ANDROID_CONTROL_MODE_AUTO;
2501 ADD_OR_SIZE(ANDROID_CONTROL_MODE, &controlMode, 1);
2502
2503 static const uint8_t effectMode = ANDROID_CONTROL_EFFECT_MODE_OFF;
2504 ADD_OR_SIZE(ANDROID_CONTROL_EFFECT_MODE, &effectMode, 1);
2505
2506 static const uint8_t sceneMode = ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY;
2507 ADD_OR_SIZE(ANDROID_CONTROL_SCENE_MODE, &sceneMode, 1);
2508
2509 static const uint8_t aeMode = ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH;
2510 ADD_OR_SIZE(ANDROID_CONTROL_AE_MODE, &aeMode, 1);
2511
2512 static const uint8_t aeLock = ANDROID_CONTROL_AE_LOCK_OFF;
2513 ADD_OR_SIZE(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
2514
2515 static const int32_t controlRegions[5] = {
2516 0, 0, static_cast<int32_t>(mSensorWidth),
2517 static_cast<int32_t>(mSensorHeight), 1000};
2518 ADD_OR_SIZE(ANDROID_CONTROL_AE_REGIONS, controlRegions, 5);
2519
2520 static const int32_t aeExpCompensation = 0;
2521 ADD_OR_SIZE(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &aeExpCompensation, 1);
2522
2523 static const int32_t aeTargetFpsRange[2] = {10, 30};
2524 ADD_OR_SIZE(ANDROID_CONTROL_AE_TARGET_FPS_RANGE, aeTargetFpsRange, 2);
2525
2526 static const uint8_t aeAntibandingMode =
2527 ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO;
2528 ADD_OR_SIZE(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &aeAntibandingMode, 1);
2529
2530 static const uint8_t awbMode = ANDROID_CONTROL_AWB_MODE_AUTO;
2531 ADD_OR_SIZE(ANDROID_CONTROL_AWB_MODE, &awbMode, 1);
2532
2533 static const uint8_t awbLock = ANDROID_CONTROL_AWB_LOCK_OFF;
2534 ADD_OR_SIZE(ANDROID_CONTROL_AWB_LOCK, &awbLock, 1);
2535
2536 ADD_OR_SIZE(ANDROID_CONTROL_AWB_REGIONS, controlRegions, 5);
2537
2538 uint8_t afMode = 0;
2539 switch (request_template) {
2540 case CAMERA2_TEMPLATE_PREVIEW:
2541 afMode = ANDROID_CONTROL_AF_MODE_AUTO;
2542 break;
2543 case CAMERA2_TEMPLATE_STILL_CAPTURE:
2544 afMode = ANDROID_CONTROL_AF_MODE_AUTO;
2545 break;
2546 case CAMERA2_TEMPLATE_VIDEO_RECORD:
2547 afMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
2548 break;
2549 case CAMERA2_TEMPLATE_VIDEO_SNAPSHOT:
2550 afMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
2551 break;
2552 case CAMERA2_TEMPLATE_ZERO_SHUTTER_LAG:
2553 afMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
2554 break;
2555 default:
2556 afMode = ANDROID_CONTROL_AF_MODE_AUTO;
2557 break;
2558 }
2559 ADD_OR_SIZE(ANDROID_CONTROL_AF_MODE, &afMode, 1);
2560
2561 ADD_OR_SIZE(ANDROID_CONTROL_AF_REGIONS, controlRegions, 5);
2562
2563 static const uint8_t vstabMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
2564 ADD_OR_SIZE(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &vstabMode, 1);
2565
2566 // aeState, awbState, afState only in frame
2567
2568 /** Allocate metadata if sizing */
2569 if (sizeRequest) {
2570 ALOGV(
2571 "Allocating %zu entries, %zu extra bytes for "
2572 "request template type %d",
2573 entryCount, dataCount, request_template);
2574 *request = allocate_camera_metadata(entryCount, dataCount);
2575 if (*request == NULL) {
2576 ALOGE(
2577 "Unable to allocate new request template type %d "
2578 "(%zu entries, %zu bytes extra data)",
2579 request_template, entryCount, dataCount);
2580 return NO_MEMORY;
2581 }
2582 }
2583 return OK;
2584 #undef ADD_OR_SIZE
2585 }
2586
addOrSize(camera_metadata_t * request,bool sizeRequest,size_t * entryCount,size_t * dataCount,uint32_t tag,const void * entryData,size_t entryDataCount)2587 status_t EmulatedFakeCamera2::addOrSize(camera_metadata_t *request,
2588 bool sizeRequest, size_t *entryCount,
2589 size_t *dataCount, uint32_t tag,
2590 const void *entryData,
2591 size_t entryDataCount) {
2592 if (!sizeRequest) {
2593 return add_camera_metadata_entry(request, tag, entryData, entryDataCount);
2594 } else {
2595 int type = get_camera_metadata_tag_type(tag);
2596 if (type < 0) return BAD_VALUE;
2597 (*entryCount)++;
2598 (*dataCount) +=
2599 calculate_camera_metadata_entry_data_size(type, entryDataCount);
2600 return OK;
2601 }
2602 }
2603
isStreamInUse(uint32_t id)2604 bool EmulatedFakeCamera2::isStreamInUse(uint32_t id) {
2605 // Assumes mMutex is locked; otherwise new requests could enter
2606 // configureThread while readoutThread is being checked
2607
2608 // Order of isStreamInUse calls matters
2609 if (mConfigureThread->isStreamInUse(id) ||
2610 mReadoutThread->isStreamInUse(id) || mJpegCompressor->isStreamInUse(id)) {
2611 ALOGE("%s: Stream %d is in use in active requests!", __FUNCTION__, id);
2612 return true;
2613 }
2614 return false;
2615 }
2616
isReprocessStreamInUse(uint32_t)2617 bool EmulatedFakeCamera2::isReprocessStreamInUse(uint32_t /*id*/) {
2618 // TODO: implement
2619 return false;
2620 }
2621
getStreamInfo(uint32_t streamId)2622 const Stream &EmulatedFakeCamera2::getStreamInfo(uint32_t streamId) {
2623 Mutex::Autolock lock(mMutex);
2624
2625 return mStreams.valueFor(streamId);
2626 }
2627
getReprocessStreamInfo(uint32_t streamId)2628 const ReprocessStream &EmulatedFakeCamera2::getReprocessStreamInfo(
2629 uint32_t streamId) {
2630 Mutex::Autolock lock(mMutex);
2631
2632 return mReprocessStreams.valueFor(streamId);
2633 }
2634
2635 }; /* namespace android */
2636