1 /*
2 * Copyright (C) 2011 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17 /*
18 * Contains implementation of an abstract class EmulatedCameraDevice that
19 * defines functionality expected from an emulated physical camera device:
20 * - Obtaining and setting camera parameters
21 * - Capturing frames
22 * - Streaming video
23 * - etc.
24 */
25
26 #define LOG_NDEBUG 0
27 #define LOG_TAG "EmulatedCamera_Device"
28 #include "EmulatedCameraDevice.h"
29 #include <log/log.h>
30 #include <sys/select.h>
31 #include <algorithm>
32 #include <cmath>
33 #include "EmulatedCamera.h"
34
35 namespace android {
36
37 const float GAMMA_CORRECTION = 2.2f;
EmulatedCameraDevice(EmulatedCamera * camera_hal)38 EmulatedCameraDevice::EmulatedCameraDevice(EmulatedCamera* camera_hal)
39 : mObjectLock(),
40 mCurFrameTimestamp(0),
41 mCameraHAL(camera_hal),
42 mCurrentFrame(NULL),
43 mExposureCompensation(1.0f),
44 mWhiteBalanceScale(NULL),
45 mIsFocusing(false),
46 mSupportedWhiteBalanceScale(),
47 mState(ECDS_CONSTRUCTED) {}
48
~EmulatedCameraDevice()49 EmulatedCameraDevice::~EmulatedCameraDevice() {
50 ALOGV("EmulatedCameraDevice destructor");
51 if (mCurrentFrame != NULL) {
52 delete[] mCurrentFrame;
53 }
54 for (size_t i = 0; i < mSupportedWhiteBalanceScale.size(); ++i) {
55 if (mSupportedWhiteBalanceScale.valueAt(i) != NULL) {
56 delete[] mSupportedWhiteBalanceScale.valueAt(i);
57 }
58 }
59 }
60
61 /****************************************************************************
62 * Emulated camera device public API
63 ***************************************************************************/
64
Initialize()65 status_t EmulatedCameraDevice::Initialize() {
66 if (isInitialized()) {
67 ALOGW("%s: Emulated camera device is already initialized: mState = %d",
68 __FUNCTION__, mState);
69 return NO_ERROR;
70 }
71
72 /* Instantiate worker thread object. */
73 mWorkerThread = new WorkerThread(this);
74 if (getWorkerThread() == NULL) {
75 ALOGE("%s: Unable to instantiate worker thread object", __FUNCTION__);
76 return ENOMEM;
77 }
78
79 mState = ECDS_INITIALIZED;
80
81 return NO_ERROR;
82 }
83
startDeliveringFrames(bool one_burst)84 status_t EmulatedCameraDevice::startDeliveringFrames(bool one_burst) {
85 ALOGV("%s", __FUNCTION__);
86
87 if (!isStarted()) {
88 ALOGE("%s: Device is not started", __FUNCTION__);
89 return EINVAL;
90 }
91
92 /* Frames will be delivered from the thread routine. */
93 const status_t res = startWorkerThread(one_burst);
94 ALOGE_IF(res != NO_ERROR, "%s: startWorkerThread failed", __FUNCTION__);
95 return res;
96 }
97
stopDeliveringFrames()98 status_t EmulatedCameraDevice::stopDeliveringFrames() {
99 ALOGV("%s", __FUNCTION__);
100
101 if (!isStarted()) {
102 ALOGW("%s: Device is not started", __FUNCTION__);
103 return NO_ERROR;
104 }
105
106 const status_t res = stopWorkerThread();
107 ALOGE_IF(res != NO_ERROR, "%s: startWorkerThread failed", __FUNCTION__);
108 return res;
109 }
110
setExposureCompensation(const float ev)111 void EmulatedCameraDevice::setExposureCompensation(const float ev) {
112 ALOGV("%s", __FUNCTION__);
113
114 if (!isStarted()) {
115 ALOGW("%s: Fake camera device is not started.", __FUNCTION__);
116 }
117
118 mExposureCompensation = std::pow(2.0f, ev / GAMMA_CORRECTION);
119 ALOGV("New exposure compensation is %f", mExposureCompensation);
120 }
121
initializeWhiteBalanceModes(const char * mode,const float r_scale,const float b_scale)122 void EmulatedCameraDevice::initializeWhiteBalanceModes(const char* mode,
123 const float r_scale,
124 const float b_scale) {
125 ALOGV("%s with %s, %f, %f", __FUNCTION__, mode, r_scale, b_scale);
126 float* value = new float[3];
127 value[0] = r_scale;
128 value[1] = 1.0f;
129 value[2] = b_scale;
130 mSupportedWhiteBalanceScale.add(String8(mode), value);
131 }
132
setWhiteBalanceMode(const char * mode)133 void EmulatedCameraDevice::setWhiteBalanceMode(const char* mode) {
134 ALOGV("%s with white balance %s", __FUNCTION__, mode);
135 mWhiteBalanceScale = mSupportedWhiteBalanceScale.valueFor(String8(mode));
136 }
137
startAutoFocus()138 void EmulatedCameraDevice::startAutoFocus() { mIsFocusing = true; }
139
140 /* Computes the pixel value after adjusting the white balance to the current
141 * one. The input the y, u, v channel of the pixel and the adjusted value will
142 * be stored in place. The adjustment is done in RGB space.
143 */
changeWhiteBalance(uint8_t & y,uint8_t & u,uint8_t & v) const144 void EmulatedCameraDevice::changeWhiteBalance(uint8_t& y, uint8_t& u,
145 uint8_t& v) const {
146 float r_scale = mWhiteBalanceScale[0];
147 float b_scale = mWhiteBalanceScale[2];
148 int r = static_cast<float>(YUV2R(y, u, v)) / r_scale;
149 int g = YUV2G(y, u, v);
150 int b = static_cast<float>(YUV2B(y, u, v)) / b_scale;
151
152 y = RGB2Y(r, g, b);
153 u = RGB2U(r, g, b);
154 v = RGB2V(r, g, b);
155 }
156
simulateAutoFocus()157 void EmulatedCameraDevice::simulateAutoFocus() {
158 if (mIsFocusing) {
159 ALOGV("%s: Simulating auto-focus", __FUNCTION__);
160 mCameraHAL->onCameraFocusAcquired();
161 mIsFocusing = false;
162 }
163 }
164
getCurrentPreviewFrame(void * buffer)165 status_t EmulatedCameraDevice::getCurrentPreviewFrame(void* buffer) {
166 if (!isStarted()) {
167 ALOGE("%s: Device is not started", __FUNCTION__);
168 return EINVAL;
169 }
170 if (mCurrentFrame == NULL || buffer == NULL) {
171 ALOGE("%s: No framebuffer", __FUNCTION__);
172 return EINVAL;
173 }
174
175 /* In emulation the framebuffer is never RGB. */
176 switch (mPixelFormat) {
177 case V4L2_PIX_FMT_YVU420:
178 YV12ToRGB32(mCurrentFrame, buffer, mFrameWidth, mFrameHeight);
179 return NO_ERROR;
180 case V4L2_PIX_FMT_YUV420:
181 YU12ToRGB32(mCurrentFrame, buffer, mFrameWidth, mFrameHeight);
182 return NO_ERROR;
183 case V4L2_PIX_FMT_NV21:
184 NV21ToRGB32(mCurrentFrame, buffer, mFrameWidth, mFrameHeight);
185 return NO_ERROR;
186 case V4L2_PIX_FMT_NV12:
187 NV12ToRGB32(mCurrentFrame, buffer, mFrameWidth, mFrameHeight);
188 return NO_ERROR;
189
190 default:
191 ALOGE("%s: Unknown pixel format %.4s", __FUNCTION__,
192 reinterpret_cast<const char*>(&mPixelFormat));
193 return EINVAL;
194 }
195 }
196
197 /****************************************************************************
198 * Emulated camera device private API
199 ***************************************************************************/
200
commonStartDevice(int width,int height,uint32_t pix_fmt,int fps)201 status_t EmulatedCameraDevice::commonStartDevice(int width, int height,
202 uint32_t pix_fmt, int fps) {
203 /* Validate pixel format, and calculate framebuffer size at the same time. */
204 switch (pix_fmt) {
205 case V4L2_PIX_FMT_YVU420:
206 case V4L2_PIX_FMT_YUV420:
207 case V4L2_PIX_FMT_NV21:
208 case V4L2_PIX_FMT_NV12:
209 mFrameBufferSize = (width * height * 12) / 8;
210 break;
211
212 default:
213 ALOGE("%s: Unknown pixel format %.4s", __FUNCTION__,
214 reinterpret_cast<const char*>(&pix_fmt));
215 return EINVAL;
216 }
217
218 /* Cache framebuffer info. */
219 mFrameWidth = width;
220 mFrameHeight = height;
221 mPixelFormat = pix_fmt;
222 mTotalPixels = width * height;
223 mTargetFps = fps;
224
225 /* Allocate framebuffer. */
226 mCurrentFrame = new uint8_t[mFrameBufferSize];
227 if (mCurrentFrame == NULL) {
228 ALOGE("%s: Unable to allocate framebuffer", __FUNCTION__);
229 return ENOMEM;
230 }
231 ALOGV("%s: Allocated %p %zu bytes for %d pixels in %.4s[%dx%d] frame",
232 __FUNCTION__, mCurrentFrame, mFrameBufferSize, mTotalPixels,
233 reinterpret_cast<const char*>(&mPixelFormat), mFrameWidth,
234 mFrameHeight);
235 return NO_ERROR;
236 }
237
commonStopDevice()238 void EmulatedCameraDevice::commonStopDevice() {
239 mFrameWidth = mFrameHeight = mTotalPixels = 0;
240 mPixelFormat = 0;
241 mTargetFps = 0;
242
243 if (mCurrentFrame != NULL) {
244 delete[] mCurrentFrame;
245 mCurrentFrame = NULL;
246 }
247 }
248
getCameraParameters()249 const CameraParameters* EmulatedCameraDevice::getCameraParameters() {
250 return mCameraHAL->getCameraParameters();
251 }
252
253 /****************************************************************************
254 * Worker thread management.
255 ***************************************************************************/
256
startWorkerThread(bool one_burst)257 status_t EmulatedCameraDevice::startWorkerThread(bool one_burst) {
258 ALOGV("%s", __FUNCTION__);
259
260 if (!isInitialized()) {
261 ALOGE("%s: Emulated camera device is not initialized", __FUNCTION__);
262 return EINVAL;
263 }
264
265 const status_t res = getWorkerThread()->startThread(one_burst);
266 ALOGE_IF(res != NO_ERROR, "%s: Unable to start worker thread", __FUNCTION__);
267 return res;
268 }
269
stopWorkerThread()270 status_t EmulatedCameraDevice::stopWorkerThread() {
271 ALOGV("%s", __FUNCTION__);
272
273 if (!isInitialized()) {
274 ALOGE("%s: Emulated camera device is not initialized", __FUNCTION__);
275 return EINVAL;
276 }
277
278 const status_t res = getWorkerThread()->stopThread();
279 ALOGE_IF(res != NO_ERROR, "%s: Unable to stop worker thread", __FUNCTION__);
280 return res;
281 }
282
inWorkerThread()283 bool EmulatedCameraDevice::inWorkerThread() {
284 /* This will end the thread loop, and will terminate the thread. Derived
285 * classes must override this method. */
286 return false;
287 }
288
289 /****************************************************************************
290 * Worker thread implementation.
291 ***************************************************************************/
292
readyToRun()293 status_t EmulatedCameraDevice::WorkerThread::readyToRun() {
294 ALOGV("Starting emulated camera device worker thread...");
295
296 ALOGW_IF(mThreadControl >= 0 || mControlFD >= 0,
297 "%s: Thread control FDs are opened", __FUNCTION__);
298 /* Create a pair of FDs that would be used to control the thread. */
299 int thread_fds[2];
300 status_t ret;
301 Mutex::Autolock lock(mCameraDevice->mObjectLock);
302 if (pipe(thread_fds) == 0) {
303 mThreadControl = thread_fds[1];
304 mControlFD = thread_fds[0];
305 ALOGV("Emulated device's worker thread has been started.");
306 ret = NO_ERROR;
307 } else {
308 ALOGE("%s: Unable to create thread control FDs: %d -> %s", __FUNCTION__,
309 errno, strerror(errno));
310 ret = errno;
311 }
312
313 mSetup.signal();
314 return ret;
315 }
316
stopThread()317 status_t EmulatedCameraDevice::WorkerThread::stopThread() {
318 ALOGV("Stopping emulated camera device's worker thread...");
319
320 status_t res = EINVAL;
321
322 // Limit the scope of the Autolock
323 {
324 // If thread is running and readyToRun() has not finished running,
325 // then wait until it is done.
326 Mutex::Autolock lock(mCameraDevice->mObjectLock);
327 if (isRunning() && (mThreadControl < 0 || mControlFD < 0)) {
328 mSetup.wait(mCameraDevice->mObjectLock);
329 }
330 }
331
332 if (mThreadControl >= 0) {
333 /* Send "stop" message to the thread loop. */
334 const ControlMessage msg = THREAD_STOP;
335 const int wres =
336 TEMP_FAILURE_RETRY(write(mThreadControl, &msg, sizeof(msg)));
337 if (wres == sizeof(msg)) {
338 /* Stop the thread, and wait till it's terminated. */
339 res = requestExitAndWait();
340 if (res == NO_ERROR) {
341 /* Close control FDs. */
342 if (mThreadControl >= 0) {
343 close(mThreadControl);
344 mThreadControl = -1;
345 }
346 if (mControlFD >= 0) {
347 close(mControlFD);
348 mControlFD = -1;
349 }
350 ALOGV("Emulated camera device's worker thread has been stopped.");
351 } else {
352 ALOGE("%s: requestExitAndWait failed: %d -> %s", __FUNCTION__, res,
353 strerror(-res));
354 }
355 } else {
356 ALOGE("%s: Unable to send THREAD_STOP message: %d -> %s", __FUNCTION__,
357 errno, strerror(errno));
358 res = errno ? errno : EINVAL;
359 }
360 } else {
361 ALOGE("%s: Thread control FDs are not opened", __FUNCTION__);
362 }
363
364 return res;
365 }
366
367 EmulatedCameraDevice::WorkerThread::SelectRes
Select(int fd,int timeout)368 EmulatedCameraDevice::WorkerThread::Select(int fd, int timeout) {
369 fd_set fds[1];
370 struct timeval tv, *tvp = NULL;
371
372 mCameraDevice->simulateAutoFocus();
373
374 const int fd_num = (fd >= 0) ? std::max(fd, mControlFD) + 1 : mControlFD + 1;
375 FD_ZERO(fds);
376 FD_SET(mControlFD, fds);
377 if (fd >= 0) {
378 FD_SET(fd, fds);
379 }
380 if (timeout) {
381 tv.tv_sec = timeout / 1000000;
382 tv.tv_usec = timeout % 1000000;
383 tvp = &tv;
384 }
385 int res = TEMP_FAILURE_RETRY(select(fd_num, fds, NULL, NULL, tvp));
386 if (res < 0) {
387 ALOGE("%s: select returned %d and failed: %d -> %s", __FUNCTION__, res,
388 errno, strerror(errno));
389 return ERROR;
390 } else if (res == 0) {
391 /* Timeout. */
392 return TIMEOUT;
393 } else if (FD_ISSET(mControlFD, fds)) {
394 /* A control event. Lets read the message. */
395 ControlMessage msg;
396 res = TEMP_FAILURE_RETRY(read(mControlFD, &msg, sizeof(msg)));
397 if (res != sizeof(msg)) {
398 ALOGE("%s: Unexpected message size %d, or an error %d -> %s",
399 __FUNCTION__, res, errno, strerror(errno));
400 return ERROR;
401 }
402 /* THREAD_STOP is the only message expected here. */
403 if (msg == THREAD_STOP) {
404 ALOGV("%s: THREAD_STOP message is received", __FUNCTION__);
405 return EXIT_THREAD;
406 } else {
407 ALOGE("Unknown worker thread message %d", msg);
408 return ERROR;
409 }
410 } else {
411 /* Must be an FD. */
412 ALOGW_IF(fd < 0 || !FD_ISSET(fd, fds), "%s: Undefined 'select' result",
413 __FUNCTION__);
414 return READY;
415 }
416 }
417
418 }; /* namespace android */
419