1 /* Copyright (c) 2012-2017, The Linux Foundation. All rights reserved.
2 *
3 * Redistribution and use in source and binary forms, with or without
4 * modification, are permitted provided that the following conditions are
5 * met:
6 * * Redistributions of source code must retain the above copyright
7 * notice, this list of conditions and the following disclaimer.
8 * * Redistributions in binary form must reproduce the above
9 * copyright notice, this list of conditions and the following
10 * disclaimer in the documentation and/or other materials provided
11 * with the distribution.
12 * * Neither the name of The Linux Foundation nor the names of its
13 * contributors may be used to endorse or promote products derived
14 * from this software without specific prior written permission.
15 *
16 * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
17 * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
18 * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
19 * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
20 * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
21 * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
22 * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
23 * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
24 * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
25 * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
26 * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27 *
28 */
29
30 #define LOG_TAG "QCamera2HWI"
31
32 // System dependencies
33 #include <fcntl.h>
34 #include <stdio.h>
35 #include <stdlib.h>
36 #define STAT_H <SYSTEM_HEADER_PREFIX/stat.h>
37 #include STAT_H
38 #include <utils/Errors.h>
39
40 // Camera dependencies
41 #include "QCamera2HWI.h"
42 #include "QCameraTrace.h"
43
44 extern "C" {
45 #include "mm_camera_dbg.h"
46 }
47
48 namespace qcamera {
49
50 /*===========================================================================
51 * FUNCTION : zsl_channel_cb
52 *
53 * DESCRIPTION: helper function to handle ZSL superbuf callback directly from
54 * mm-camera-interface
55 *
56 * PARAMETERS :
57 * @recvd_frame : received super buffer
58 * @userdata : user data ptr
59 *
60 * RETURN : None
61 *
62 * NOTE : recvd_frame will be released after this call by caller, so if
63 * async operation needed for recvd_frame, it's our responsibility
64 * to save a copy for this variable to be used later.
65 *==========================================================================*/
zsl_channel_cb(mm_camera_super_buf_t * recvd_frame,void * userdata)66 void QCamera2HardwareInterface::zsl_channel_cb(mm_camera_super_buf_t *recvd_frame,
67 void *userdata)
68 {
69 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL1_ZSL_CH_CB);
70 LOGH("[KPI Perf]: E");
71 char value[PROPERTY_VALUE_MAX];
72 bool dump_raw = false;
73 bool log_matching = false;
74 QCamera2HardwareInterface *pme = (QCamera2HardwareInterface *)userdata;
75
76 if (pme == NULL ||
77 pme->mCameraHandle == 0 ||
78 (!validate_handle(pme->mCameraHandle->camera_handle,
79 recvd_frame->camera_handle))) {
80 LOGE("camera obj not valid");
81 return;
82 }
83
84 QCameraChannel *pChannel = pme->m_channels[QCAMERA_CH_TYPE_ZSL];
85 if (pChannel == NULL ||
86 (!validate_handle(pChannel->getMyHandle(),
87 recvd_frame->ch_id))) {
88 LOGE("ZSL channel doesn't exist, return here");
89 return;
90 }
91
92 if(pme->mParameters.isSceneSelectionEnabled() &&
93 !pme->m_stateMachine.isCaptureRunning()) {
94 pme->selectScene(pChannel, recvd_frame);
95 pChannel->bufDone(recvd_frame);
96 return;
97 }
98
99 LOGD("Frame CB Unlock : %d, is AEC Locked: %d",
100 recvd_frame->bUnlockAEC, pme->m_bLedAfAecLock);
101 if(recvd_frame->bUnlockAEC && pme->m_bLedAfAecLock) {
102 qcamera_sm_internal_evt_payload_t *payload =
103 (qcamera_sm_internal_evt_payload_t *)malloc(
104 sizeof(qcamera_sm_internal_evt_payload_t));
105 if (NULL != payload) {
106 memset(payload, 0, sizeof(qcamera_sm_internal_evt_payload_t));
107 payload->evt_type = QCAMERA_INTERNAL_EVT_RETRO_AEC_UNLOCK;
108 int32_t rc = pme->processEvt(QCAMERA_SM_EVT_EVT_INTERNAL, payload);
109 if (rc != NO_ERROR) {
110 LOGE("processEvt for retro AEC unlock failed");
111 free(payload);
112 payload = NULL;
113 }
114 } else {
115 LOGE("No memory for retro AEC event");
116 }
117 }
118
119 // Check if retro-active frames are completed and camera is
120 // ready to go ahead with LED estimation for regular frames
121 if (recvd_frame->bReadyForPrepareSnapshot) {
122 // Send an event
123 LOGD("Ready for Prepare Snapshot, signal ");
124 qcamera_sm_internal_evt_payload_t *payload =
125 (qcamera_sm_internal_evt_payload_t *)malloc(
126 sizeof(qcamera_sm_internal_evt_payload_t));
127 if (NULL != payload) {
128 memset(payload, 0, sizeof(qcamera_sm_internal_evt_payload_t));
129 payload->evt_type = QCAMERA_INTERNAL_EVT_READY_FOR_SNAPSHOT;
130 int32_t rc = pme->processEvt(QCAMERA_SM_EVT_EVT_INTERNAL, payload);
131 if (rc != NO_ERROR) {
132 LOGW("processEvt Ready for Snaphot failed");
133 free(payload);
134 payload = NULL;
135 }
136 } else {
137 LOGE("No memory for prepare signal event detect"
138 " qcamera_sm_internal_evt_payload_t");
139 }
140 }
141
142 /* indicate the parent that capture is done */
143 pme->captureDone();
144
145 // save a copy for the superbuf
146 mm_camera_super_buf_t* frame =
147 (mm_camera_super_buf_t *)malloc(sizeof(mm_camera_super_buf_t));
148 if (frame == NULL) {
149 LOGE("Error allocating memory to save received_frame structure.");
150 pChannel->bufDone(recvd_frame);
151 return;
152 }
153 *frame = *recvd_frame;
154
155 if (recvd_frame->num_bufs > 0) {
156 LOGI("[KPI Perf]: superbuf frame_idx %d",
157 recvd_frame->bufs[0]->frame_idx);
158 }
159
160 // DUMP RAW if available
161 property_get("persist.camera.zsl_raw", value, "0");
162 dump_raw = atoi(value) > 0 ? true : false;
163 if (dump_raw) {
164 for (uint32_t i = 0; i < recvd_frame->num_bufs; i++) {
165 if (recvd_frame->bufs[i]->stream_type == CAM_STREAM_TYPE_RAW) {
166 mm_camera_buf_def_t * raw_frame = recvd_frame->bufs[i];
167 QCameraStream *pStream = pChannel->getStreamByHandle(raw_frame->stream_id);
168 if (NULL != pStream) {
169 pme->dumpFrameToFile(pStream, raw_frame, QCAMERA_DUMP_FRM_RAW);
170 }
171 break;
172 }
173 }
174 }
175
176 for (uint32_t i = 0; i < recvd_frame->num_bufs; i++) {
177 if (recvd_frame->bufs[i]->stream_type == CAM_STREAM_TYPE_SNAPSHOT) {
178 mm_camera_buf_def_t * yuv_frame = recvd_frame->bufs[i];
179 QCameraStream *pStream = pChannel->getStreamByHandle(yuv_frame->stream_id);
180 if (NULL != pStream) {
181 pme->dumpFrameToFile(pStream, yuv_frame, QCAMERA_DUMP_FRM_INPUT_REPROCESS);
182 }
183 break;
184 }
185 }
186
187 // whether need FD Metadata along with Snapshot frame in ZSL mode
188 if(pme->needFDMetadata(QCAMERA_CH_TYPE_ZSL)){
189 //Need Face Detection result for snapshot frames
190 //Get the Meta Data frames
191 mm_camera_buf_def_t *pMetaFrame = NULL;
192 for (uint32_t i = 0; i < frame->num_bufs; i++) {
193 QCameraStream *pStream = pChannel->getStreamByHandle(frame->bufs[i]->stream_id);
194 if (pStream != NULL) {
195 if (pStream->isTypeOf(CAM_STREAM_TYPE_METADATA)) {
196 pMetaFrame = frame->bufs[i]; //find the metadata
197 break;
198 }
199 }
200 }
201
202 if(pMetaFrame != NULL){
203 metadata_buffer_t *pMetaData = (metadata_buffer_t *)pMetaFrame->buffer;
204 //send the face detection info
205 cam_faces_data_t faces_data;
206 pme->fillFacesData(faces_data, pMetaData);
207 //HARD CODE here before MCT can support
208 faces_data.detection_data.fd_type = QCAMERA_FD_SNAPSHOT;
209
210 qcamera_sm_internal_evt_payload_t *payload =
211 (qcamera_sm_internal_evt_payload_t *)malloc(sizeof(qcamera_sm_internal_evt_payload_t));
212 if (NULL != payload) {
213 memset(payload, 0, sizeof(qcamera_sm_internal_evt_payload_t));
214 payload->evt_type = QCAMERA_INTERNAL_EVT_FACE_DETECT_RESULT;
215 payload->faces_data = faces_data;
216 int32_t rc = pme->processEvt(QCAMERA_SM_EVT_EVT_INTERNAL, payload);
217 if (rc != NO_ERROR) {
218 LOGW("processEvt face_detection_result failed");
219 free(payload);
220 payload = NULL;
221 }
222 } else {
223 LOGE("No memory for face_detection_result qcamera_sm_internal_evt_payload_t");
224 }
225 }
226 }
227
228 property_get("persist.camera.dumpmetadata", value, "0");
229 int32_t enabled = atoi(value);
230 if (enabled) {
231 mm_camera_buf_def_t *pMetaFrame = NULL;
232 QCameraStream *pStream = NULL;
233 for (uint32_t i = 0; i < frame->num_bufs; i++) {
234 pStream = pChannel->getStreamByHandle(frame->bufs[i]->stream_id);
235 if (pStream != NULL) {
236 if (pStream->isTypeOf(CAM_STREAM_TYPE_METADATA)) {
237 pMetaFrame = frame->bufs[i];
238 if (pMetaFrame != NULL &&
239 ((metadata_buffer_t *)pMetaFrame->buffer)->is_tuning_params_valid) {
240 pme->dumpMetadataToFile(pStream, pMetaFrame, (char *) "ZSL_Snapshot");
241 }
242 break;
243 }
244 }
245 }
246 }
247
248 property_get("persist.camera.zsl_matching", value, "0");
249 log_matching = atoi(value) > 0 ? true : false;
250 if (log_matching) {
251 LOGH("ZSL super buffer contains:");
252 QCameraStream *pStream = NULL;
253 for (uint32_t i = 0; i < frame->num_bufs; i++) {
254 pStream = pChannel->getStreamByHandle(frame->bufs[i]->stream_id);
255 if (pStream != NULL ) {
256 LOGH("Buffer with V4L index %d frame index %d of type %d Timestamp: %ld %ld ",
257 frame->bufs[i]->buf_idx,
258 frame->bufs[i]->frame_idx,
259 pStream->getMyType(),
260 frame->bufs[i]->ts.tv_sec,
261 frame->bufs[i]->ts.tv_nsec);
262 }
263 }
264 }
265
266 // Wait on Postproc initialization if needed
267 // then send to postprocessor
268 if ((NO_ERROR != pme->waitDeferredWork(pme->mReprocJob)) ||
269 (NO_ERROR != pme->m_postprocessor.processData(frame))) {
270 LOGE("Failed to trigger process data");
271 pChannel->bufDone(recvd_frame);
272 free(frame);
273 frame = NULL;
274 return;
275 }
276
277 LOGH("[KPI Perf]: X");
278 }
279
280 /*===========================================================================
281 * FUNCTION : selectScene
282 *
283 * DESCRIPTION: send a preview callback when a specific selected scene is applied
284 *
285 * PARAMETERS :
286 * @pChannel: Camera channel
287 * @frame : Bundled super buffer
288 *
289 * RETURN : int32_t type of status
290 * NO_ERROR -- success
291 * none-zero failure code
292 *==========================================================================*/
selectScene(QCameraChannel * pChannel,mm_camera_super_buf_t * frame)293 int32_t QCamera2HardwareInterface::selectScene(QCameraChannel *pChannel,
294 mm_camera_super_buf_t *frame)
295 {
296 mm_camera_buf_def_t *pMetaFrame = NULL;
297 QCameraStream *pStream = NULL;
298 int32_t rc = NO_ERROR;
299
300 if ((NULL == frame) || (NULL == pChannel)) {
301 LOGE("Invalid scene select input");
302 return BAD_VALUE;
303 }
304
305 cam_scene_mode_type selectedScene = mParameters.getSelectedScene();
306 if (CAM_SCENE_MODE_MAX == selectedScene) {
307 LOGL("No selected scene");
308 return NO_ERROR;
309 }
310
311 for (uint32_t i = 0; i < frame->num_bufs; i++) {
312 pStream = pChannel->getStreamByHandle(frame->bufs[i]->stream_id);
313 if (pStream != NULL) {
314 if (pStream->isTypeOf(CAM_STREAM_TYPE_METADATA)) {
315 pMetaFrame = frame->bufs[i];
316 break;
317 }
318 }
319 }
320
321 if (NULL == pMetaFrame) {
322 LOGE("No metadata buffer found in scene select super buffer");
323 return NO_INIT;
324 }
325
326 metadata_buffer_t *pMetaData = (metadata_buffer_t *)pMetaFrame->buffer;
327
328 IF_META_AVAILABLE(cam_scene_mode_type, scene, CAM_INTF_META_CURRENT_SCENE, pMetaData) {
329 if ((*scene == selectedScene) &&
330 (mDataCb != NULL) &&
331 (msgTypeEnabledWithLock(CAMERA_MSG_PREVIEW_FRAME) > 0)) {
332 mm_camera_buf_def_t *preview_frame = NULL;
333 for (uint32_t i = 0; i < frame->num_bufs; i++) {
334 pStream = pChannel->getStreamByHandle(frame->bufs[i]->stream_id);
335 if (pStream != NULL) {
336 if (pStream->isTypeOf(CAM_STREAM_TYPE_PREVIEW)) {
337 preview_frame = frame->bufs[i];
338 break;
339 }
340 }
341 }
342 if (preview_frame) {
343 QCameraGrallocMemory *memory = (QCameraGrallocMemory *)preview_frame->mem_info;
344 uint32_t idx = preview_frame->buf_idx;
345 preview_frame->cache_flags |= CPU_HAS_READ;
346 rc = sendPreviewCallback(pStream, memory, idx);
347 if (NO_ERROR != rc) {
348 LOGE("Error triggering scene select preview callback");
349 } else {
350 mParameters.setSelectedScene(CAM_SCENE_MODE_MAX);
351 }
352 } else {
353 LOGE("No preview buffer found in scene select super buffer");
354 return NO_INIT;
355 }
356 }
357 } else {
358 LOGE("No current scene metadata!");
359 rc = NO_INIT;
360 }
361
362 return rc;
363 }
364
365 /*===========================================================================
366 * FUNCTION : capture_channel_cb_routine
367 *
368 * DESCRIPTION: helper function to handle snapshot superbuf callback directly from
369 * mm-camera-interface
370 *
371 * PARAMETERS :
372 * @recvd_frame : received super buffer
373 * @userdata : user data ptr
374 *
375 * RETURN : None
376 *
377 * NOTE : recvd_frame will be released after this call by caller, so if
378 * async operation needed for recvd_frame, it's our responsibility
379 * to save a copy for this variable to be used later.
380 *==========================================================================*/
capture_channel_cb_routine(mm_camera_super_buf_t * recvd_frame,void * userdata)381 void QCamera2HardwareInterface::capture_channel_cb_routine(mm_camera_super_buf_t *recvd_frame,
382 void *userdata)
383 {
384 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL1_CAPTURE_CH_CB);
385 char value[PROPERTY_VALUE_MAX];
386 LOGH("[KPI Perf]: E PROFILE_YUV_CB_TO_HAL");
387 QCamera2HardwareInterface *pme = (QCamera2HardwareInterface *)userdata;
388 if (pme == NULL ||
389 pme->mCameraHandle == NULL ||
390 !validate_handle(pme->mCameraHandle->camera_handle,
391 recvd_frame->camera_handle)){
392 LOGE("camera obj not valid");
393 return;
394 }
395
396 QCameraChannel *pChannel = pme->m_channels[QCAMERA_CH_TYPE_CAPTURE];
397 if (pChannel == NULL ||
398 !validate_handle(pChannel->getMyHandle(),
399 recvd_frame->ch_id)) {
400 LOGE("Capture channel doesn't exist, return here");
401 return;
402 }
403
404 // save a copy for the superbuf
405 mm_camera_super_buf_t* frame =
406 (mm_camera_super_buf_t *)malloc(sizeof(mm_camera_super_buf_t));
407 if (frame == NULL) {
408 LOGE("Error allocating memory to save received_frame structure.");
409 pChannel->bufDone(recvd_frame);
410 return;
411 }
412 *frame = *recvd_frame;
413
414 if (recvd_frame->num_bufs > 0) {
415 LOGI("[KPI Perf]: superbuf frame_idx %d",
416 recvd_frame->bufs[0]->frame_idx);
417 }
418
419 for ( uint32_t i= 0 ; i < recvd_frame->num_bufs ; i++ ) {
420 if ( recvd_frame->bufs[i]->stream_type == CAM_STREAM_TYPE_SNAPSHOT ) {
421 mm_camera_buf_def_t * yuv_frame = recvd_frame->bufs[i];
422 QCameraStream *pStream = pChannel->getStreamByHandle(yuv_frame->stream_id);
423 if ( NULL != pStream ) {
424 pme->dumpFrameToFile(pStream, yuv_frame, QCAMERA_DUMP_FRM_INPUT_REPROCESS);
425 }
426 break;
427 }
428 }
429
430 property_get("persist.camera.dumpmetadata", value, "0");
431 int32_t enabled = atoi(value);
432 if (enabled) {
433 mm_camera_buf_def_t *pMetaFrame = NULL;
434 QCameraStream *pStream = NULL;
435 for (uint32_t i = 0; i < frame->num_bufs; i++) {
436 pStream = pChannel->getStreamByHandle(frame->bufs[i]->stream_id);
437 if (pStream != NULL) {
438 if (pStream->isTypeOf(CAM_STREAM_TYPE_METADATA)) {
439 pMetaFrame = frame->bufs[i]; //find the metadata
440 if (pMetaFrame != NULL &&
441 ((metadata_buffer_t *)pMetaFrame->buffer)->is_tuning_params_valid) {
442 pme->dumpMetadataToFile(pStream, pMetaFrame, (char *) "Snapshot");
443 }
444 break;
445 }
446 }
447 }
448 }
449
450 // Wait on Postproc initialization if needed
451 // then send to postprocessor
452 if ((NO_ERROR != pme->waitDeferredWork(pme->mReprocJob)) ||
453 (NO_ERROR != pme->m_postprocessor.processData(frame))) {
454 LOGE("Failed to trigger process data");
455 pChannel->bufDone(recvd_frame);
456 free(frame);
457 frame = NULL;
458 return;
459 }
460
461 /* START of test register face image for face authentication */
462 #ifdef QCOM_TEST_FACE_REGISTER_FACE
463 static uint8_t bRunFaceReg = 1;
464
465 if (bRunFaceReg > 0) {
466 // find snapshot frame
467 QCameraStream *main_stream = NULL;
468 mm_camera_buf_def_t *main_frame = NULL;
469 for (int i = 0; i < recvd_frame->num_bufs; i++) {
470 QCameraStream *pStream =
471 pChannel->getStreamByHandle(recvd_frame->bufs[i]->stream_id);
472 if (pStream != NULL) {
473 if (pStream->isTypeOf(CAM_STREAM_TYPE_SNAPSHOT)) {
474 main_stream = pStream;
475 main_frame = recvd_frame->bufs[i];
476 break;
477 }
478 }
479 }
480 if (main_stream != NULL && main_frame != NULL) {
481 int32_t faceId = -1;
482 cam_pp_offline_src_config_t config;
483 memset(&config, 0, sizeof(cam_pp_offline_src_config_t));
484 config.num_of_bufs = 1;
485 main_stream->getFormat(config.input_fmt);
486 main_stream->getFrameDimension(config.input_dim);
487 main_stream->getFrameOffset(config.input_buf_planes.plane_info);
488 LOGH("DEBUG: registerFaceImage E");
489 int32_t rc = pme->registerFaceImage(main_frame->buffer, &config, faceId);
490 LOGH("DEBUG: registerFaceImage X, ret=%d, faceId=%d", rc, faceId);
491 bRunFaceReg = 0;
492 }
493 }
494
495 #endif
496 /* END of test register face image for face authentication */
497
498 LOGH("[KPI Perf]: X");
499 }
500 #ifdef TARGET_TS_MAKEUP
TsMakeupProcess_Preview(mm_camera_buf_def_t * pFrame,QCameraStream * pStream)501 bool QCamera2HardwareInterface::TsMakeupProcess_Preview(mm_camera_buf_def_t *pFrame,
502 QCameraStream * pStream) {
503 LOGD("begin");
504 bool bRet = false;
505 if (pStream == NULL || pFrame == NULL) {
506 bRet = false;
507 LOGH("pStream == NULL || pFrame == NULL");
508 } else {
509 bRet = TsMakeupProcess(pFrame, pStream, mFaceRect);
510 }
511 LOGD("end bRet = %d ",bRet);
512 return bRet;
513 }
514
TsMakeupProcess_Snapshot(mm_camera_buf_def_t * pFrame,QCameraStream * pStream)515 bool QCamera2HardwareInterface::TsMakeupProcess_Snapshot(mm_camera_buf_def_t *pFrame,
516 QCameraStream * pStream) {
517 LOGD("begin");
518 bool bRet = false;
519 if (pStream == NULL || pFrame == NULL) {
520 bRet = false;
521 LOGH("pStream == NULL || pFrame == NULL");
522 } else {
523 cam_frame_len_offset_t offset;
524 memset(&offset, 0, sizeof(cam_frame_len_offset_t));
525 pStream->getFrameOffset(offset);
526
527 cam_dimension_t dim;
528 pStream->getFrameDimension(dim);
529
530 unsigned char *yBuf = (unsigned char*)pFrame->buffer;
531 unsigned char *uvBuf = yBuf + offset.mp[0].len;
532 TSMakeupDataEx inMakeupData;
533 inMakeupData.frameWidth = dim.width;
534 inMakeupData.frameHeight = dim.height;
535 inMakeupData.yBuf = yBuf;
536 inMakeupData.uvBuf = uvBuf;
537 inMakeupData.yStride = offset.mp[0].stride;
538 inMakeupData.uvStride = offset.mp[1].stride;
539 LOGD("detect begin");
540 TSHandle fd_handle = ts_detectface_create_context();
541 if (fd_handle != NULL) {
542 cam_format_t fmt;
543 pStream->getFormat(fmt);
544 int iret = ts_detectface_detectEx(fd_handle, &inMakeupData);
545 LOGD("ts_detectface_detect iret = %d",iret);
546 if (iret <= 0) {
547 bRet = false;
548 } else {
549 TSRect faceRect;
550 memset(&faceRect,-1,sizeof(TSRect));
551 iret = ts_detectface_get_face_info(fd_handle, 0, &faceRect, NULL,NULL,NULL);
552 LOGD("ts_detectface_get_face_info iret=%d,faceRect.left=%ld,"
553 "faceRect.top=%ld,faceRect.right=%ld,faceRect.bottom=%ld"
554 ,iret,faceRect.left,faceRect.top,faceRect.right,faceRect.bottom);
555 bRet = TsMakeupProcess(pFrame,pStream,faceRect);
556 }
557 ts_detectface_destroy_context(&fd_handle);
558 fd_handle = NULL;
559 } else {
560 LOGH("fd_handle == NULL");
561 }
562 LOGD("detect end");
563 }
564 LOGD("end bRet = %d ",bRet);
565 return bRet;
566 }
567
TsMakeupProcess(mm_camera_buf_def_t * pFrame,QCameraStream * pStream,TSRect & faceRect)568 bool QCamera2HardwareInterface::TsMakeupProcess(mm_camera_buf_def_t *pFrame,
569 QCameraStream * pStream,TSRect& faceRect) {
570 bool bRet = false;
571 LOGD("begin");
572 if (pStream == NULL || pFrame == NULL) {
573 LOGH("pStream == NULL || pFrame == NULL ");
574 return false;
575 }
576
577 int whiteLevel, cleanLevel;
578 bool enableMakeup = (faceRect.left > -1) &&
579 (mParameters.getTsMakeupInfo(whiteLevel, cleanLevel));
580 if (enableMakeup) {
581 cam_dimension_t dim;
582 cam_frame_len_offset_t offset;
583 pStream->getFrameDimension(dim);
584 pStream->getFrameOffset(offset);
585 unsigned char *tempOriBuf = NULL;
586
587 tempOriBuf = (unsigned char*)pFrame->buffer;
588 unsigned char *yBuf = tempOriBuf;
589 unsigned char *uvBuf = tempOriBuf + offset.mp[0].len;
590 unsigned char *tmpBuf = new unsigned char[offset.frame_len];
591 if (tmpBuf == NULL) {
592 LOGH("tmpBuf == NULL ");
593 return false;
594 }
595 TSMakeupDataEx inMakeupData, outMakeupData;
596 whiteLevel = whiteLevel <= 0 ? 0 : (whiteLevel >= 100 ? 100 : whiteLevel);
597 cleanLevel = cleanLevel <= 0 ? 0 : (cleanLevel >= 100 ? 100 : cleanLevel);
598 inMakeupData.frameWidth = dim.width; // NV21 Frame width > 0
599 inMakeupData.frameHeight = dim.height; // NV21 Frame height > 0
600 inMakeupData.yBuf = yBuf; // Y buffer pointer
601 inMakeupData.uvBuf = uvBuf; // VU buffer pointer
602 inMakeupData.yStride = offset.mp[0].stride;
603 inMakeupData.uvStride = offset.mp[1].stride;
604 outMakeupData.frameWidth = dim.width; // NV21 Frame width > 0
605 outMakeupData.frameHeight = dim.height; // NV21 Frame height > 0
606 outMakeupData.yBuf = tmpBuf; // Y buffer pointer
607 outMakeupData.uvBuf = tmpBuf + offset.mp[0].len; // VU buffer pointer
608 outMakeupData.yStride = offset.mp[0].stride;
609 outMakeupData.uvStride = offset.mp[1].stride;
610 LOGD("faceRect:left 2:%ld,,right:%ld,,top:%ld,,bottom:%ld,,Level:%dx%d",
611 faceRect.left,faceRect.right,faceRect.top,faceRect.bottom,cleanLevel,whiteLevel);
612 ts_makeup_skin_beautyEx(&inMakeupData, &outMakeupData, &(faceRect),cleanLevel,whiteLevel);
613 memcpy((unsigned char*)pFrame->buffer, tmpBuf, offset.frame_len);
614 QCameraMemory *memory = (QCameraMemory *)pFrame->mem_info;
615 memory->cleanCache(pFrame->buf_idx);
616 if (tmpBuf != NULL) {
617 delete[] tmpBuf;
618 tmpBuf = NULL;
619 }
620 }
621 LOGD("end bRet = %d ",bRet);
622 return bRet;
623 }
624 #endif
625 /*===========================================================================
626 * FUNCTION : postproc_channel_cb_routine
627 *
628 * DESCRIPTION: helper function to handle postprocess superbuf callback directly from
629 * mm-camera-interface
630 *
631 * PARAMETERS :
632 * @recvd_frame : received super buffer
633 * @userdata : user data ptr
634 *
635 * RETURN : None
636 *
637 * NOTE : recvd_frame will be released after this call by caller, so if
638 * async operation needed for recvd_frame, it's our responsibility
639 * to save a copy for this variable to be used later.
640 *==========================================================================*/
postproc_channel_cb_routine(mm_camera_super_buf_t * recvd_frame,void * userdata)641 void QCamera2HardwareInterface::postproc_channel_cb_routine(mm_camera_super_buf_t *recvd_frame,
642 void *userdata)
643 {
644 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL1_PP_CH_CB);
645 LOGH("[KPI Perf]: E");
646 QCamera2HardwareInterface *pme = (QCamera2HardwareInterface *)userdata;
647
648 if (pme == NULL ||
649 pme->mCameraHandle == 0 ||
650 !validate_handle(pme->mCameraHandle->camera_handle,
651 recvd_frame->camera_handle)) {
652 LOGE("camera obj not valid");
653 return;
654 }
655
656 // save a copy for the superbuf
657 mm_camera_super_buf_t* frame =
658 (mm_camera_super_buf_t *)malloc(sizeof(mm_camera_super_buf_t));
659 if (frame == NULL) {
660 LOGE("Error allocating memory to save received_frame structure.");
661 return;
662 }
663 *frame = *recvd_frame;
664
665 if (recvd_frame->num_bufs > 0) {
666 LOGI("[KPI Perf]: frame_idx %d", recvd_frame->bufs[0]->frame_idx);
667 }
668 // Wait on JPEG create session
669 pme->waitDeferredWork(pme->mJpegJob);
670
671 // send to postprocessor
672 pme->m_postprocessor.processPPData(frame);
673
674 ATRACE_INT("Camera:Reprocess", 0);
675 LOGH("[KPI Perf]: X");
676 }
677
678 /*===========================================================================
679 * FUNCTION : synchronous_stream_cb_routine
680 *
681 * DESCRIPTION: Function to handle STREAM SYNC CALLBACKS
682 *
683 * PARAMETERS :
684 * @super_frame : received super buffer
685 * @stream : stream object
686 * @userdata : user data ptr
687 *
688 * RETURN : None
689 *
690 * NOTE : This Function is excecuted in mm-interface context.
691 * Avoid adding latency on this thread.
692 *==========================================================================*/
synchronous_stream_cb_routine(mm_camera_super_buf_t * super_frame,QCameraStream * stream,void * userdata)693 void QCamera2HardwareInterface::synchronous_stream_cb_routine(
694 mm_camera_super_buf_t *super_frame, QCameraStream * stream,
695 void *userdata)
696 {
697 nsecs_t frameTime = 0, mPreviewTimestamp = 0;
698 int err = NO_ERROR;
699
700 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL1_SYNC_STRM_CB);
701 LOGH("[KPI Perf] : BEGIN");
702 QCamera2HardwareInterface *pme = (QCamera2HardwareInterface *)userdata;
703
704 if (pme == NULL) {
705 LOGE("Invalid hardware object");
706 return;
707 }
708 if (super_frame == NULL) {
709 LOGE("Invalid super buffer");
710 return;
711 }
712 mm_camera_buf_def_t *frame = super_frame->bufs[0];
713 if (NULL == frame) {
714 LOGE("Frame is NULL");
715 return;
716 }
717
718 if (stream->getMyType() != CAM_STREAM_TYPE_PREVIEW) {
719 LOGE("This is only for PREVIEW stream for now");
720 return;
721 }
722
723 if(pme->m_bPreviewStarted) {
724 LOGI("[KPI Perf] : PROFILE_FIRST_PREVIEW_FRAME");
725
726 pme->m_perfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
727 pme->m_perfLockMgr.releasePerfLock(PERF_LOCK_OPEN_CAMERA);
728 pme->m_bPreviewStarted = false;
729
730 // Set power Hint for preview
731 pme->m_perfLockMgr.acquirePerfLock(PERF_LOCK_POWERHINT_PREVIEW, 0);
732 }
733
734 QCameraGrallocMemory *memory = (QCameraGrallocMemory *) frame->mem_info;
735 if (!pme->needProcessPreviewFrame(frame->frame_idx)) {
736 pthread_mutex_lock(&pme->mGrallocLock);
737 pme->mLastPreviewFrameID = frame->frame_idx;
738 memory->setBufferStatus(frame->buf_idx, STATUS_SKIPPED);
739 pthread_mutex_unlock(&pme->mGrallocLock);
740 LOGH("preview is not running, no need to process");
741 return;
742 }
743
744 if (pme->needDebugFps()) {
745 pme->debugShowPreviewFPS();
746 }
747
748 frameTime = nsecs_t(frame->ts.tv_sec) * 1000000000LL + frame->ts.tv_nsec;
749 // Convert Boottime from camera to Monotime for display if needed.
750 // Otherwise, mBootToMonoTimestampOffset value will be 0.
751 frameTime = frameTime - pme->mBootToMonoTimestampOffset;
752 // Calculate the future presentation time stamp for displaying frames at regular interval
753 #if 0 // Temporary removing the dependency on libgui
754 if (pme->getRecordingHintValue() == true) {
755 mPreviewTimestamp = pme->mCameraDisplay.computePresentationTimeStamp(frameTime);
756 }
757 #endif
758 stream->mStreamTimestamp = frameTime;
759
760 // Enqueue buffer to gralloc.
761 uint32_t idx = frame->buf_idx;
762 LOGD("%p Enqueue Buffer to display %d frame Time = %lld Display Time = %lld",
763 pme, idx, frameTime, mPreviewTimestamp);
764 err = memory->enqueueBuffer(idx, mPreviewTimestamp);
765
766 if (err == NO_ERROR) {
767 pthread_mutex_lock(&pme->mGrallocLock);
768 pme->mLastPreviewFrameID = frame->frame_idx;
769 pme->mEnqueuedBuffers++;
770 pthread_mutex_unlock(&pme->mGrallocLock);
771 } else {
772 LOGE("Enqueue Buffer failed");
773 }
774
775 LOGH("[KPI Perf] : END");
776 return;
777 }
778
779 /*===========================================================================
780 * FUNCTION : preview_stream_cb_routine
781 *
782 * DESCRIPTION: helper function to handle preview frame from preview stream in
783 * normal case with display.
784 *
785 * PARAMETERS :
786 * @super_frame : received super buffer
787 * @stream : stream object
788 * @userdata : user data ptr
789 *
790 * RETURN : None
791 *
792 * NOTE : caller passes the ownership of super_frame, it's our
793 * responsibility to free super_frame once it's done. The new
794 * preview frame will be sent to display, and an older frame
795 * will be dequeued from display and needs to be returned back
796 * to kernel for future use.
797 *==========================================================================*/
preview_stream_cb_routine(mm_camera_super_buf_t * super_frame,QCameraStream * stream,void * userdata)798 void QCamera2HardwareInterface::preview_stream_cb_routine(mm_camera_super_buf_t *super_frame,
799 QCameraStream * stream,
800 void *userdata)
801 {
802 CAMSCOPE_UPDATE_FLAGS(CAMSCOPE_SECTION_HAL, kpi_camscope_flags);
803 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL1_PREVIEW_STRM_CB);
804 LOGH("[KPI Perf] : BEGIN");
805 int err = NO_ERROR;
806 QCamera2HardwareInterface *pme = (QCamera2HardwareInterface *)userdata;
807 QCameraGrallocMemory *memory = (QCameraGrallocMemory *)super_frame->bufs[0]->mem_info;
808 uint8_t dequeueCnt = 0;
809
810 if (pme == NULL) {
811 LOGE("Invalid hardware object");
812 free(super_frame);
813 return;
814 }
815 if (memory == NULL) {
816 LOGE("Invalid memory object");
817 free(super_frame);
818 return;
819 }
820
821 mm_camera_buf_def_t *frame = super_frame->bufs[0];
822 if (NULL == frame) {
823 LOGE("preview frame is NULL");
824 free(super_frame);
825 return;
826 }
827
828 // For instant capture and for instant AEC, keep track of the frame counter.
829 // This count will be used to check against the corresponding bound values.
830 if (pme->mParameters.isInstantAECEnabled() ||
831 pme->mParameters.isInstantCaptureEnabled()) {
832 pme->mInstantAecFrameCount++;
833 }
834
835 pthread_mutex_lock(&pme->mGrallocLock);
836 if (!stream->isSyncCBEnabled()) {
837 pme->mLastPreviewFrameID = frame->frame_idx;
838 }
839 bool discardFrame = false;
840 if (!stream->isSyncCBEnabled() &&
841 !pme->needProcessPreviewFrame(frame->frame_idx))
842 {
843 discardFrame = true;
844 } else if (stream->isSyncCBEnabled() &&
845 memory->isBufSkipped(frame->buf_idx)) {
846 discardFrame = true;
847 memory->setBufferStatus(frame->buf_idx, STATUS_IDLE);
848 }
849 pthread_mutex_unlock(&pme->mGrallocLock);
850
851 if (discardFrame) {
852 LOGH("preview is not running, no need to process");
853 stream->bufDone(frame->buf_idx);
854 }
855
856 uint32_t idx = frame->buf_idx;
857
858 if (!pme->mParameters.isSecureMode()){
859 pme->dumpFrameToFile(stream, frame, QCAMERA_DUMP_FRM_PREVIEW);
860 }
861
862 if(pme->m_bPreviewStarted) {
863 LOGI("[KPI Perf] : PROFILE_FIRST_PREVIEW_FRAME");
864
865 pme->m_perfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
866 pme->m_perfLockMgr.releasePerfLock(PERF_LOCK_OPEN_CAMERA);
867 pme->m_bPreviewStarted = false;
868
869 // Set power Hint for preview
870 pme->m_perfLockMgr.acquirePerfLock(PERF_LOCK_POWERHINT_PREVIEW, 0);
871 }
872
873 if (!stream->isSyncCBEnabled() && !discardFrame) {
874
875 if (pme->needDebugFps()) {
876 pme->debugShowPreviewFPS();
877 }
878
879 LOGD("Enqueue Buffer to display %d", idx);
880 #ifdef TARGET_TS_MAKEUP
881 pme->TsMakeupProcess_Preview(frame,stream);
882 #endif
883 err = memory->enqueueBuffer(idx);
884
885 if (err == NO_ERROR) {
886 pthread_mutex_lock(&pme->mGrallocLock);
887 pme->mEnqueuedBuffers++;
888 dequeueCnt = pme->mEnqueuedBuffers;
889 pthread_mutex_unlock(&pme->mGrallocLock);
890 } else {
891 LOGE("Enqueue Buffer failed");
892 }
893 } else {
894 pthread_mutex_lock(&pme->mGrallocLock);
895 dequeueCnt = pme->mEnqueuedBuffers;
896 pthread_mutex_unlock(&pme->mGrallocLock);
897 }
898
899 uint8_t numMapped = memory->getMappable();
900 LOGD("EnqueuedCnt %d numMapped %d", dequeueCnt, numMapped);
901
902 for (uint8_t i = 0; i < dequeueCnt; i++) {
903 int dequeuedIdx = memory->dequeueBuffer();
904 LOGD("dequeuedIdx %d numMapped %d Loop running for %d", dequeuedIdx, numMapped, i);
905 if (dequeuedIdx < 0 || dequeuedIdx >= memory->getCnt()) {
906 LOGE("Invalid dequeued buffer index %d from display",
907 dequeuedIdx);
908 break;
909 } else {
910 pthread_mutex_lock(&pme->mGrallocLock);
911 pme->mEnqueuedBuffers--;
912 pthread_mutex_unlock(&pme->mGrallocLock);
913 if (dequeuedIdx >= numMapped) {
914 // This buffer has not yet been mapped to the backend
915 err = stream->mapNewBuffer((uint32_t)dequeuedIdx);
916 if (memory->checkIfAllBuffersMapped()) {
917 // check if mapping is done for all the buffers
918 // Signal the condition for create jpeg session
919 Mutex::Autolock l(pme->mMapLock);
920 pme->mMapCond.signal();
921 LOGH("Mapping done for all bufs");
922 } else {
923 LOGH("All buffers are not yet mapped");
924 }
925 }
926 }
927 // Get the updated mappable buffer count since it's modified in dequeueBuffer()
928 numMapped = memory->getMappable();
929 if (err < 0) {
930 LOGE("buffer mapping failed %d", err);
931 } else {
932 // Return dequeued buffer back to driver
933 err = stream->bufDone((uint32_t)dequeuedIdx);
934 if ( err < 0) {
935 LOGW("stream bufDone failed %d", err);
936 err = NO_ERROR;
937 }
938 }
939 }
940
941 // Handle preview data callback
942 if (pme->m_channels[QCAMERA_CH_TYPE_CALLBACK] == NULL) {
943 if (pme->needSendPreviewCallback() && !discardFrame &&
944 (!pme->mParameters.isSceneSelectionEnabled()) &&
945 (!pme->mParameters.isSecureMode())) {
946 frame->cache_flags |= CPU_HAS_READ;
947 int32_t rc = pme->sendPreviewCallback(stream, memory, idx);
948 if (NO_ERROR != rc) {
949 LOGW("Preview callback was not sent succesfully");
950 }
951 }
952 }
953
954 free(super_frame);
955 LOGH("[KPI Perf] : END");
956 return;
957 }
958
959 /*===========================================================================
960 * FUNCTION : sendPreviewCallback
961 *
962 * DESCRIPTION: helper function for triggering preview callbacks
963 *
964 * PARAMETERS :
965 * @stream : stream object
966 * @memory : Stream memory allocator
967 * @idx : buffer index
968 *
969 * RETURN : int32_t type of status
970 * NO_ERROR -- success
971 * none-zero failure code
972 *==========================================================================*/
sendPreviewCallback(QCameraStream * stream,QCameraMemory * memory,uint32_t idx)973 int32_t QCamera2HardwareInterface::sendPreviewCallback(QCameraStream *stream,
974 QCameraMemory *memory, uint32_t idx)
975 {
976 camera_memory_t *previewMem = NULL;
977 camera_memory_t *data = NULL;
978 camera_memory_t *dataToApp = NULL;
979 size_t previewBufSize = 0;
980 size_t previewBufSizeFromCallback = 0;
981 cam_dimension_t preview_dim;
982 cam_format_t previewFmt;
983 int32_t rc = NO_ERROR;
984 int32_t yStride = 0;
985 int32_t yScanline = 0;
986 int32_t uvStride = 0;
987 int32_t uvScanline = 0;
988 int32_t uStride = 0;
989 int32_t uScanline = 0;
990 int32_t vStride = 0;
991 int32_t vScanline = 0;
992 int32_t yStrideToApp = 0;
993 int32_t uvStrideToApp = 0;
994 int32_t yScanlineToApp = 0;
995 int32_t uvScanlineToApp = 0;
996 int32_t srcOffset = 0;
997 int32_t dstOffset = 0;
998 int32_t srcBaseOffset = 0;
999 int32_t dstBaseOffset = 0;
1000 int i;
1001
1002 if ((NULL == stream) || (NULL == memory)) {
1003 LOGE("Invalid preview callback input");
1004 return BAD_VALUE;
1005 }
1006
1007 cam_stream_info_t *streamInfo =
1008 reinterpret_cast<cam_stream_info_t *>(stream->getStreamInfoBuf()->getPtr(0));
1009 if (NULL == streamInfo) {
1010 LOGE("Invalid streamInfo");
1011 return BAD_VALUE;
1012 }
1013
1014 stream->getFrameDimension(preview_dim);
1015 stream->getFormat(previewFmt);
1016
1017 yStrideToApp = preview_dim.width;
1018 yScanlineToApp = preview_dim.height;
1019 uvStrideToApp = yStrideToApp;
1020 uvScanlineToApp = yScanlineToApp / 2;
1021
1022 /* The preview buffer size in the callback should be
1023 * (width*height*bytes_per_pixel). As all preview formats we support,
1024 * use 12 bits per pixel, buffer size = previewWidth * previewHeight * 3/2.
1025 * We need to put a check if some other formats are supported in future. */
1026 if ((previewFmt == CAM_FORMAT_YUV_420_NV21) ||
1027 (previewFmt == CAM_FORMAT_YUV_420_NV12) ||
1028 (previewFmt == CAM_FORMAT_YUV_420_YV12) ||
1029 (previewFmt == CAM_FORMAT_YUV_420_NV12_VENUS) ||
1030 (previewFmt == CAM_FORMAT_YUV_420_NV21_VENUS) ||
1031 (previewFmt == CAM_FORMAT_YUV_420_NV21_ADRENO)) {
1032 if(previewFmt == CAM_FORMAT_YUV_420_YV12) {
1033 yStride = streamInfo->buf_planes.plane_info.mp[0].stride;
1034 yScanline = streamInfo->buf_planes.plane_info.mp[0].scanline;
1035 uStride = streamInfo->buf_planes.plane_info.mp[1].stride;
1036 uScanline = streamInfo->buf_planes.plane_info.mp[1].scanline;
1037 vStride = streamInfo->buf_planes.plane_info.mp[2].stride;
1038 vScanline = streamInfo->buf_planes.plane_info.mp[2].scanline;
1039
1040 previewBufSize = (size_t)
1041 (yStride * yScanline + uStride * uScanline + vStride * vScanline);
1042 previewBufSizeFromCallback = previewBufSize;
1043 } else {
1044 yStride = streamInfo->buf_planes.plane_info.mp[0].stride;
1045 yScanline = streamInfo->buf_planes.plane_info.mp[0].scanline;
1046 uvStride = streamInfo->buf_planes.plane_info.mp[1].stride;
1047 uvScanline = streamInfo->buf_planes.plane_info.mp[1].scanline;
1048
1049 previewBufSize = (size_t)
1050 ((yStrideToApp * yScanlineToApp) + (uvStrideToApp * uvScanlineToApp));
1051
1052 previewBufSizeFromCallback = (size_t)
1053 ((yStride * yScanline) + (uvStride * uvScanline));
1054 }
1055 if(previewBufSize == previewBufSizeFromCallback) {
1056 previewMem = mGetMemory(memory->getFd(idx),
1057 previewBufSize, 1, mCallbackCookie);
1058 if (!previewMem || !previewMem->data) {
1059 LOGE("mGetMemory failed.\n");
1060 return NO_MEMORY;
1061 } else {
1062 data = previewMem;
1063 }
1064 } else {
1065 data = memory->getMemory(idx, false);
1066 dataToApp = mGetMemory(-1, previewBufSize, 1, mCallbackCookie);
1067 if (!dataToApp || !dataToApp->data) {
1068 LOGE("mGetMemory failed.\n");
1069 return NO_MEMORY;
1070 }
1071
1072 for (i = 0; i < preview_dim.height; i++) {
1073 srcOffset = i * yStride;
1074 dstOffset = i * yStrideToApp;
1075
1076 memcpy((unsigned char *) dataToApp->data + dstOffset,
1077 (unsigned char *) data->data + srcOffset,
1078 (size_t)yStrideToApp);
1079 }
1080
1081 srcBaseOffset = yStride * yScanline;
1082 dstBaseOffset = yStrideToApp * yScanlineToApp;
1083
1084 for (i = 0; i < preview_dim.height/2; i++) {
1085 srcOffset = i * uvStride + srcBaseOffset;
1086 dstOffset = i * uvStrideToApp + dstBaseOffset;
1087
1088 memcpy((unsigned char *) dataToApp->data + dstOffset,
1089 (unsigned char *) data->data + srcOffset,
1090 (size_t)yStrideToApp);
1091 }
1092 }
1093 } else {
1094 /*Invalid Buffer content. But can be used as a first preview frame trigger in
1095 framework/app */
1096 previewBufSize = (size_t)
1097 ((yStrideToApp * yScanlineToApp) +
1098 (uvStrideToApp * uvScanlineToApp));
1099 previewBufSizeFromCallback = 0;
1100 LOGW("Invalid preview format. Buffer content cannot be processed size = %d",
1101 previewBufSize);
1102 dataToApp = mGetMemory(-1, previewBufSize, 1, mCallbackCookie);
1103 if (!dataToApp || !dataToApp->data) {
1104 LOGE("mGetMemory failed.\n");
1105 return NO_MEMORY;
1106 }
1107 }
1108 qcamera_callback_argm_t cbArg;
1109 memset(&cbArg, 0, sizeof(qcamera_callback_argm_t));
1110 cbArg.cb_type = QCAMERA_DATA_CALLBACK;
1111 cbArg.msg_type = CAMERA_MSG_PREVIEW_FRAME;
1112 if (previewBufSize != 0 && previewBufSizeFromCallback != 0 &&
1113 previewBufSize == previewBufSizeFromCallback) {
1114 cbArg.data = data;
1115 } else {
1116 cbArg.data = dataToApp;
1117 }
1118 if ( previewMem ) {
1119 cbArg.user_data = previewMem;
1120 cbArg.release_cb = releaseCameraMemory;
1121 } else if (dataToApp) {
1122 cbArg.user_data = dataToApp;
1123 cbArg.release_cb = releaseCameraMemory;
1124 }
1125 cbArg.cookie = this;
1126 rc = m_cbNotifier.notifyCallback(cbArg);
1127 if (rc != NO_ERROR) {
1128 LOGW("fail sending notification");
1129 if (previewMem) {
1130 previewMem->release(previewMem);
1131 } else if (dataToApp) {
1132 dataToApp->release(dataToApp);
1133 }
1134 }
1135
1136 return rc;
1137 }
1138
1139 /*===========================================================================
1140 * FUNCTION : nodisplay_preview_stream_cb_routine
1141 *
1142 * DESCRIPTION: helper function to handle preview frame from preview stream in
1143 * no-display case
1144 *
1145 * PARAMETERS :
1146 * @super_frame : received super buffer
1147 * @stream : stream object
1148 * @userdata : user data ptr
1149 *
1150 * RETURN : None
1151 *
1152 * NOTE : caller passes the ownership of super_frame, it's our
1153 * responsibility to free super_frame once it's done.
1154 *==========================================================================*/
nodisplay_preview_stream_cb_routine(mm_camera_super_buf_t * super_frame,QCameraStream * stream,void * userdata)1155 void QCamera2HardwareInterface::nodisplay_preview_stream_cb_routine(
1156 mm_camera_super_buf_t *super_frame,
1157 QCameraStream *stream,
1158 void * userdata)
1159 {
1160 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL1_NODIS_PREVIEW_STRMCB);
1161 LOGH("[KPI Perf] E");
1162 QCamera2HardwareInterface *pme = (QCamera2HardwareInterface *)userdata;
1163 if (pme == NULL ||
1164 pme->mCameraHandle == NULL ||
1165 !validate_handle(pme->mCameraHandle->camera_handle,
1166 super_frame->camera_handle)){
1167 LOGE("camera obj not valid");
1168 // simply free super frame
1169 free(super_frame);
1170 return;
1171 }
1172 mm_camera_buf_def_t *frame = super_frame->bufs[0];
1173 if (NULL == frame) {
1174 LOGE("preview frame is NULL");
1175 free(super_frame);
1176 return;
1177 }
1178
1179 if (!pme->needProcessPreviewFrame(frame->frame_idx)) {
1180 LOGH("preview is not running, no need to process");
1181 stream->bufDone(frame->buf_idx);
1182 free(super_frame);
1183 return;
1184 }
1185
1186 if (pme->needDebugFps()) {
1187 pme->debugShowPreviewFPS();
1188 }
1189
1190 QCameraMemory *previewMemObj = (QCameraMemory *)frame->mem_info;
1191 camera_memory_t *preview_mem = NULL;
1192 if (previewMemObj != NULL) {
1193 preview_mem = previewMemObj->getMemory(frame->buf_idx, false);
1194 }
1195 if (NULL != previewMemObj && NULL != preview_mem) {
1196 pme->dumpFrameToFile(stream, frame, QCAMERA_DUMP_FRM_PREVIEW);
1197
1198 if ((pme->needProcessPreviewFrame(frame->frame_idx)) &&
1199 pme->needSendPreviewCallback() &&
1200 (pme->getRelatedCamSyncInfo()->mode != CAM_MODE_SECONDARY)) {
1201 qcamera_callback_argm_t cbArg;
1202 memset(&cbArg, 0, sizeof(qcamera_callback_argm_t));
1203 cbArg.cb_type = QCAMERA_DATA_CALLBACK;
1204 cbArg.msg_type = CAMERA_MSG_PREVIEW_FRAME;
1205 cbArg.data = preview_mem;
1206 cbArg.user_data = (void *) &frame->buf_idx;
1207 cbArg.cookie = stream;
1208 cbArg.release_cb = returnStreamBuffer;
1209 // Preset cache flags to be handled when the buffer comes back
1210 frame->cache_flags |= CPU_HAS_READ;
1211 int32_t rc = pme->m_cbNotifier.notifyCallback(cbArg);
1212 if (rc != NO_ERROR) {
1213 LOGE ("fail sending data notify");
1214 stream->bufDone(frame->buf_idx);
1215 }
1216 } else {
1217 stream->bufDone(frame->buf_idx);
1218 }
1219 }
1220 free(super_frame);
1221 LOGH("[KPI Perf] X");
1222 }
1223
1224 /*===========================================================================
1225 * FUNCTION : secure_stream_cb_routine
1226 *
1227 * DESCRIPTION: helper function to handle secure frame
1228 *
1229 * PARAMETERS :
1230 * @super_frame : received super buffer
1231 * @stream : stream object
1232 * @userdata : user data ptr
1233 *
1234 * RETURN : None
1235 *
1236 * NOTE : caller passes the ownership of super_frame, it's our
1237 * responsibility to free super_frame once it's done.
1238 *==========================================================================*/
secure_stream_cb_routine(mm_camera_super_buf_t * super_frame,QCameraStream * stream,void * userdata)1239 void QCamera2HardwareInterface::secure_stream_cb_routine(
1240 mm_camera_super_buf_t *super_frame,
1241 QCameraStream *stream, void *userdata)
1242 {
1243 ATRACE_CALL();
1244 LOGH("Enter");
1245 QCamera2HardwareInterface *pme = (QCamera2HardwareInterface *)userdata;
1246 if (pme == NULL ||
1247 pme->mCameraHandle == NULL ||
1248 pme->mCameraHandle->camera_handle != super_frame->camera_handle){
1249 LOGE("camera obj not valid");
1250 free(super_frame);
1251 return;
1252 }
1253 mm_camera_buf_def_t *frame = super_frame->bufs[0];
1254 if (NULL == frame) {
1255 LOGE("preview frame is NLUL");
1256 goto end;
1257 }
1258
1259 if (pme->isSecureMode()) {
1260 // Secure Mode
1261 // We will do QCAMERA_NOTIFY_CALLBACK and share FD in case of secure mode
1262 QCameraMemory *memObj = (QCameraMemory *)frame->mem_info;
1263 if (NULL == memObj) {
1264 LOGE("memObj is NULL");
1265 stream->bufDone(frame->buf_idx);
1266 goto end;
1267 }
1268
1269 int fd = memObj->getFd(frame->buf_idx);
1270 if (pme->mDataCb != NULL &&
1271 pme->msgTypeEnabledWithLock(CAMERA_MSG_PREVIEW_FRAME) > 0) {
1272 LOGD("Secure frame fd =%d for index = %d ", fd, frame->buf_idx);
1273 // Prepare Callback structure
1274 qcamera_callback_argm_t cbArg;
1275 memset(&cbArg, 0, sizeof(qcamera_callback_argm_t));
1276 cbArg.cb_type = QCAMERA_NOTIFY_CALLBACK;
1277 cbArg.msg_type = CAMERA_MSG_PREVIEW_FRAME;
1278 #ifndef VANILLA_HAL
1279 cbArg.ext1 = CAMERA_FRAME_DATA_FD;
1280 cbArg.ext2 = fd;
1281 #endif
1282 cbArg.user_data = (void *) &frame->buf_idx;
1283 cbArg.cookie = stream;
1284 cbArg.release_cb = returnStreamBuffer;
1285 pme->m_cbNotifier.notifyCallback(cbArg);
1286 } else {
1287 LOGH("No need to process secure frame, msg not enabled");
1288 stream->bufDone(frame->buf_idx);
1289 }
1290 } else {
1291 LOGH("No need to process secure frame, not in secure mode");
1292 stream->bufDone(frame->buf_idx);
1293 }
1294 end:
1295 free(super_frame);
1296 LOGH("Exit");
1297 return;
1298 }
1299
1300 /*===========================================================================
1301 * FUNCTION : rdi_mode_stream_cb_routine
1302 *
1303 * DESCRIPTION: helper function to handle RDI frame from preview stream in
1304 * rdi mode case
1305 *
1306 * PARAMETERS :
1307 * @super_frame : received super buffer
1308 * @stream : stream object
1309 * @userdata : user data ptr
1310 *
1311 * RETURN : None
1312 *
1313 * NOTE : caller passes the ownership of super_frame, it's our
1314 * responsibility to free super_frame once it's done.
1315 *==========================================================================*/
rdi_mode_stream_cb_routine(mm_camera_super_buf_t * super_frame,QCameraStream * stream,void * userdata)1316 void QCamera2HardwareInterface::rdi_mode_stream_cb_routine(
1317 mm_camera_super_buf_t *super_frame,
1318 QCameraStream *stream,
1319 void * userdata)
1320 {
1321 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL1_RDI_MODE_STRM_CB);
1322 QCameraMemory *previewMemObj = NULL;
1323 camera_memory_t *preview_mem = NULL;
1324
1325 LOGH("RDI_DEBUG Enter");
1326 QCamera2HardwareInterface *pme = (QCamera2HardwareInterface *)userdata;
1327 if (pme == NULL ||
1328 pme->mCameraHandle == NULL ||
1329 !validate_handle(pme->mCameraHandle->camera_handle,
1330 super_frame->camera_handle)){
1331 LOGE("camera obj not valid");
1332 free(super_frame);
1333 return;
1334 }
1335 mm_camera_buf_def_t *frame = super_frame->bufs[0];
1336 if (NULL == frame) {
1337 LOGE("preview frame is NLUL");
1338 goto end;
1339 }
1340 if (!pme->needProcessPreviewFrame(frame->frame_idx)) {
1341 LOGE("preview is not running, no need to process");
1342 stream->bufDone(frame->buf_idx);
1343 goto end;
1344 }
1345 if (pme->needDebugFps()) {
1346 pme->debugShowPreviewFPS();
1347 }
1348 // Non-secure Mode
1349 previewMemObj = (QCameraMemory *)frame->mem_info;
1350 if (NULL == previewMemObj) {
1351 LOGE("previewMemObj is NULL");
1352 stream->bufDone(frame->buf_idx);
1353 goto end;
1354 }
1355
1356 preview_mem = previewMemObj->getMemory(frame->buf_idx, false);
1357 if (NULL != preview_mem) {
1358 previewMemObj->cleanCache(frame->buf_idx);
1359 // Dump RAW frame
1360 pme->dumpFrameToFile(stream, frame, QCAMERA_DUMP_FRM_RAW);
1361 // Notify Preview callback frame
1362 if (pme->needProcessPreviewFrame(frame->frame_idx) &&
1363 pme->mDataCb != NULL &&
1364 pme->msgTypeEnabledWithLock(CAMERA_MSG_PREVIEW_FRAME) > 0) {
1365 qcamera_callback_argm_t cbArg;
1366 memset(&cbArg, 0, sizeof(qcamera_callback_argm_t));
1367 cbArg.cb_type = QCAMERA_DATA_CALLBACK;
1368 cbArg.msg_type = CAMERA_MSG_PREVIEW_FRAME;
1369 cbArg.data = preview_mem;
1370 cbArg.user_data = (void *) &frame->buf_idx;
1371 cbArg.cookie = stream;
1372 cbArg.release_cb = returnStreamBuffer;
1373 // Preset cache flags to be handled when the buffer comes back
1374 frame->cache_flags |= CPU_HAS_READ;
1375 pme->m_cbNotifier.notifyCallback(cbArg);
1376 } else {
1377 LOGE("preview_mem is NULL");
1378 stream->bufDone(frame->buf_idx);
1379 }
1380 } else {
1381 LOGE("preview_mem is NULL");
1382 stream->bufDone(frame->buf_idx);
1383 }
1384 end:
1385 free(super_frame);
1386 LOGH("RDI_DEBUG Exit");
1387 return;
1388 }
1389
1390 /*===========================================================================
1391 * FUNCTION : postview_stream_cb_routine
1392 *
1393 * DESCRIPTION: helper function to handle post frame from postview stream
1394 *
1395 * PARAMETERS :
1396 * @super_frame : received super buffer
1397 * @stream : stream object
1398 * @userdata : user data ptr
1399 *
1400 * RETURN : None
1401 *
1402 * NOTE : caller passes the ownership of super_frame, it's our
1403 * responsibility to free super_frame once it's done.
1404 *==========================================================================*/
postview_stream_cb_routine(mm_camera_super_buf_t * super_frame,QCameraStream * stream,void * userdata)1405 void QCamera2HardwareInterface::postview_stream_cb_routine(mm_camera_super_buf_t *super_frame,
1406 QCameraStream *stream,
1407 void *userdata)
1408 {
1409 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL1_POSTVIEW_STRM_CB);
1410 int err = NO_ERROR;
1411 QCamera2HardwareInterface *pme = (QCamera2HardwareInterface *)userdata;
1412 QCameraGrallocMemory *memory = (QCameraGrallocMemory *)super_frame->bufs[0]->mem_info;
1413
1414 if (pme == NULL) {
1415 LOGE("Invalid hardware object");
1416 free(super_frame);
1417 return;
1418 }
1419 if (memory == NULL) {
1420 LOGE("Invalid memory object");
1421 free(super_frame);
1422 return;
1423 }
1424
1425 LOGH("[KPI Perf] : BEGIN");
1426
1427 mm_camera_buf_def_t *frame = super_frame->bufs[0];
1428 if (NULL == frame) {
1429 LOGE("preview frame is NULL");
1430 free(super_frame);
1431 return;
1432 }
1433
1434 QCameraMemory *memObj = (QCameraMemory *)frame->mem_info;
1435 if (NULL != memObj) {
1436 pme->dumpFrameToFile(stream, frame, QCAMERA_DUMP_FRM_THUMBNAIL);
1437 }
1438
1439 // Return buffer back to driver
1440 err = stream->bufDone(frame->buf_idx);
1441 if ( err < 0) {
1442 LOGE("stream bufDone failed %d", err);
1443 }
1444
1445 free(super_frame);
1446 LOGH("[KPI Perf] : END");
1447 return;
1448 }
1449
1450 /*===========================================================================
1451 * FUNCTION : video_stream_cb_routine
1452 *
1453 * DESCRIPTION: helper function to handle video frame from video stream
1454 *
1455 * PARAMETERS :
1456 * @super_frame : received super buffer
1457 * @stream : stream object
1458 * @userdata : user data ptr
1459 *
1460 * RETURN : None
1461 *
1462 * NOTE : caller passes the ownership of super_frame, it's our
1463 * responsibility to free super_frame once it's done. video
1464 * frame will be sent to video encoder. Once video encoder is
1465 * done with the video frame, it will call another API
1466 * (release_recording_frame) to return the frame back
1467 *==========================================================================*/
video_stream_cb_routine(mm_camera_super_buf_t * super_frame,QCameraStream * stream,void * userdata)1468 void QCamera2HardwareInterface::video_stream_cb_routine(mm_camera_super_buf_t *super_frame,
1469 QCameraStream *stream,
1470 void *userdata)
1471 {
1472 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL1_VIDEO_STRM_CB);
1473 QCameraVideoMemory *videoMemObj = NULL;
1474 camera_memory_t *video_mem = NULL;
1475 nsecs_t timeStamp = 0;
1476 bool triggerTCB = FALSE;
1477
1478 LOGD("[KPI Perf] : BEGIN");
1479 QCamera2HardwareInterface *pme = (QCamera2HardwareInterface *)userdata;
1480 if (pme == NULL ||
1481 pme->mCameraHandle == 0 ||
1482 !validate_handle(pme->mCameraHandle->camera_handle,
1483 super_frame->camera_handle)) {
1484 // simply free super frame
1485 free(super_frame);
1486 return;
1487 }
1488
1489 mm_camera_buf_def_t *frame = super_frame->bufs[0];
1490
1491 if (pme->needDebugFps()) {
1492 pme->debugShowVideoFPS();
1493 }
1494 if(pme->m_bRecordStarted) {
1495 LOGI("[KPI Perf] : PROFILE_FIRST_RECORD_FRAME");
1496 pme->m_bRecordStarted = false ;
1497 }
1498 LOGD("Stream(%d), Timestamp: %ld %ld",
1499 frame->stream_id,
1500 frame->ts.tv_sec,
1501 frame->ts.tv_nsec);
1502
1503 if (frame->buf_type == CAM_STREAM_BUF_TYPE_MPLANE) {
1504 if (pme->mParameters.getVideoBatchSize() == 0) {
1505 timeStamp = nsecs_t(frame->ts.tv_sec) * 1000000000LL
1506 + frame->ts.tv_nsec;
1507 pme->dumpFrameToFile(stream, frame, QCAMERA_DUMP_FRM_VIDEO);
1508 videoMemObj = (QCameraVideoMemory *)frame->mem_info;
1509 video_mem = NULL;
1510 if (NULL != videoMemObj) {
1511 video_mem = videoMemObj->getMemory(frame->buf_idx,
1512 (pme->mStoreMetaDataInFrame > 0)? true : false);
1513 triggerTCB = TRUE;
1514 LOGH("Video frame TimeStamp : %lld batch = 0 index = %d",
1515 timeStamp, frame->buf_idx);
1516 }
1517 } else {
1518 //Handle video batch callback
1519 native_handle_t *nh = NULL;
1520 pme->dumpFrameToFile(stream, frame, QCAMERA_DUMP_FRM_VIDEO);
1521 QCameraVideoMemory *videoMemObj = (QCameraVideoMemory *)frame->mem_info;
1522 if ((stream->mCurMetaMemory == NULL)
1523 || (stream->mCurBufIndex == -1)) {
1524 //get Free metadata available
1525 for (int i = 0; i < CAMERA_MIN_VIDEO_BATCH_BUFFERS; i++) {
1526 if (stream->mStreamMetaMemory[i].consumerOwned == 0) {
1527 stream->mCurMetaMemory = videoMemObj->getMemory(i,true);
1528 stream->mCurBufIndex = 0;
1529 stream->mCurMetaIndex = i;
1530 stream->mStreamMetaMemory[i].numBuffers = 0;
1531 break;
1532 }
1533 }
1534 }
1535 video_mem = stream->mCurMetaMemory;
1536 nh = videoMemObj->getNativeHandle(stream->mCurMetaIndex);
1537 if (video_mem == NULL || nh == NULL) {
1538 LOGE("No Free metadata. Drop this frame");
1539 stream->mCurBufIndex = -1;
1540 stream->bufDone(frame->buf_idx);
1541 free(super_frame);
1542 return;
1543 }
1544
1545 int index = stream->mCurBufIndex;
1546 int fd_cnt = pme->mParameters.getVideoBatchSize();
1547 nsecs_t frame_ts = nsecs_t(frame->ts.tv_sec) * 1000000000LL
1548 + frame->ts.tv_nsec;
1549 if (index == 0) {
1550 stream->mFirstTimeStamp = frame_ts;
1551 }
1552
1553 stream->mStreamMetaMemory[stream->mCurMetaIndex].buf_index[index]
1554 = (uint8_t)frame->buf_idx;
1555 stream->mStreamMetaMemory[stream->mCurMetaIndex].numBuffers++;
1556 stream->mStreamMetaMemory[stream->mCurMetaIndex].consumerOwned
1557 = TRUE;
1558
1559 //Fill video metadata.
1560 videoMemObj->updateNativeHandle(nh, index, //native_handle
1561 (int)videoMemObj->getFd(frame->buf_idx), //FD
1562 (int)videoMemObj->getSize(frame->buf_idx),//Size
1563 (int)(frame_ts - stream->mFirstTimeStamp));
1564
1565 stream->mCurBufIndex++;
1566 if (stream->mCurBufIndex == fd_cnt) {
1567 timeStamp = stream->mFirstTimeStamp;
1568 LOGH("Video frame to encoder TimeStamp : %lld batch = %d Buffer idx = %d",
1569 timeStamp, fd_cnt,
1570 stream->mCurMetaIndex);
1571 stream->mCurBufIndex = -1;
1572 stream->mCurMetaIndex = -1;
1573 stream->mCurMetaMemory = NULL;
1574 triggerTCB = TRUE;
1575 }
1576 }
1577 } else {
1578 videoMemObj = (QCameraVideoMemory *)frame->mem_info;
1579 video_mem = NULL;
1580 native_handle_t *nh = NULL;
1581 int fd_cnt = frame->user_buf.bufs_used;
1582 if (NULL != videoMemObj) {
1583 video_mem = videoMemObj->getMemory(frame->buf_idx, true);
1584 nh = videoMemObj->getNativeHandle(frame->buf_idx);
1585 } else {
1586 LOGE("videoMemObj NULL");
1587 }
1588
1589 if (nh != NULL) {
1590 timeStamp = nsecs_t(frame->ts.tv_sec) * 1000000000LL
1591 + frame->ts.tv_nsec;
1592
1593 for (int i = 0; i < fd_cnt; i++) {
1594 if (frame->user_buf.buf_idx[i] >= 0) {
1595 mm_camera_buf_def_t *plane_frame =
1596 &frame->user_buf.plane_buf[frame->user_buf.buf_idx[i]];
1597 QCameraVideoMemory *frameobj =
1598 (QCameraVideoMemory *)plane_frame->mem_info;
1599 nsecs_t frame_ts = nsecs_t(plane_frame->ts.tv_sec) * 1000000000LL
1600 + plane_frame->ts.tv_nsec;
1601 //Fill video metadata.
1602 videoMemObj->updateNativeHandle(nh, i,
1603 (int)frameobj->getFd(plane_frame->buf_idx),
1604 (int)frameobj->getSize(plane_frame->buf_idx),
1605 (int)(frame_ts - timeStamp));
1606
1607 LOGD("Send Video frames to services/encoder delta : %lld FD = %d index = %d",
1608 (frame_ts - timeStamp), plane_frame->fd, plane_frame->buf_idx);
1609 pme->dumpFrameToFile(stream, plane_frame, QCAMERA_DUMP_FRM_VIDEO);
1610 }
1611 }
1612 triggerTCB = TRUE;
1613 LOGH("Batch buffer TimeStamp : %lld FD = %d index = %d fd_cnt = %d",
1614 timeStamp, frame->fd, frame->buf_idx, fd_cnt);
1615 } else {
1616 LOGE("No Video Meta Available. Return Buffer");
1617 stream->bufDone(super_frame->bufs[0]->buf_idx);
1618 }
1619 }
1620
1621 if ((NULL != video_mem) && (triggerTCB == TRUE)) {
1622 if ((pme->mDataCbTimestamp != NULL) &&
1623 pme->msgTypeEnabledWithLock(CAMERA_MSG_VIDEO_FRAME) > 0) {
1624 qcamera_callback_argm_t cbArg;
1625 memset(&cbArg, 0, sizeof(qcamera_callback_argm_t));
1626 cbArg.cb_type = QCAMERA_DATA_TIMESTAMP_CALLBACK;
1627 cbArg.msg_type = CAMERA_MSG_VIDEO_FRAME;
1628 cbArg.data = video_mem;
1629
1630 // For VT usecase, ISP uses AVtimer not CLOCK_BOOTTIME as time source.
1631 // So do not change video timestamp.
1632 if (!pme->mParameters.isAVTimerEnabled()) {
1633 // Convert Boottime from camera to Monotime for video if needed.
1634 // Otherwise, mBootToMonoTimestampOffset value will be 0.
1635 timeStamp = timeStamp - pme->mBootToMonoTimestampOffset;
1636 }
1637 LOGD("Final video buffer TimeStamp : %lld ", timeStamp);
1638 cbArg.timestamp = timeStamp;
1639 int32_t rc = pme->m_cbNotifier.notifyCallback(cbArg);
1640 if (rc != NO_ERROR) {
1641 LOGE("fail sending data notify");
1642 stream->bufDone(frame->buf_idx);
1643 }
1644 }
1645 }
1646
1647 free(super_frame);
1648 LOGD("[KPI Perf] : END");
1649 }
1650
1651 /*===========================================================================
1652 * FUNCTION : snapshot_channel_cb_routine
1653 *
1654 * DESCRIPTION: helper function to handle snapshot frame from snapshot channel
1655 *
1656 * PARAMETERS :
1657 * @super_frame : received super buffer
1658 * @userdata : user data ptr
1659 *
1660 * RETURN : None
1661 *
1662 * NOTE : recvd_frame will be released after this call by caller, so if
1663 * async operation needed for recvd_frame, it's our responsibility
1664 * to save a copy for this variable to be used later.
1665 *==========================================================================*/
snapshot_channel_cb_routine(mm_camera_super_buf_t * super_frame,void * userdata)1666 void QCamera2HardwareInterface::snapshot_channel_cb_routine(mm_camera_super_buf_t *super_frame,
1667 void *userdata)
1668 {
1669 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL1_SNAPSHOT_CH_CB);
1670 char value[PROPERTY_VALUE_MAX];
1671 QCameraChannel *pChannel = NULL;
1672
1673 LOGH("[KPI Perf]: E");
1674 QCamera2HardwareInterface *pme = (QCamera2HardwareInterface *)userdata;
1675 if (pme == NULL ||
1676 pme->mCameraHandle == NULL ||
1677 !validate_handle(pme->mCameraHandle->camera_handle,
1678 super_frame->camera_handle)){
1679 LOGE("camera obj not valid");
1680 // simply free super frame
1681 free(super_frame);
1682 return;
1683 }
1684
1685 if (pme->isLowPowerMode()) {
1686 pChannel = pme->m_channels[QCAMERA_CH_TYPE_VIDEO];
1687 } else {
1688 pChannel = pme->m_channels[QCAMERA_CH_TYPE_SNAPSHOT];
1689 }
1690
1691 if ((pChannel == NULL)
1692 || (!validate_handle(pChannel->getMyHandle(),
1693 super_frame->ch_id))) {
1694 LOGE("Snapshot channel doesn't exist, return here");
1695 return;
1696 }
1697
1698 property_get("persist.camera.dumpmetadata", value, "0");
1699 int32_t enabled = atoi(value);
1700 if (enabled) {
1701 mm_camera_buf_def_t *pMetaFrame = NULL;
1702 QCameraStream *pStream = NULL;
1703 for (uint32_t i = 0; i < super_frame->num_bufs; i++) {
1704 pStream = pChannel->getStreamByHandle(super_frame->bufs[i]->stream_id);
1705 if (pStream != NULL) {
1706 if (pStream->isTypeOf(CAM_STREAM_TYPE_METADATA)) {
1707 pMetaFrame = super_frame->bufs[i]; //find the metadata
1708 if (pMetaFrame != NULL &&
1709 ((metadata_buffer_t *)pMetaFrame->buffer)->is_tuning_params_valid) {
1710 pme->dumpMetadataToFile(pStream, pMetaFrame, (char *) "Snapshot");
1711 }
1712 break;
1713 }
1714 }
1715 }
1716 }
1717
1718 // save a copy for the superbuf
1719 mm_camera_super_buf_t* frame = (mm_camera_super_buf_t *)malloc(sizeof(mm_camera_super_buf_t));
1720 if (frame == NULL) {
1721 LOGE("Error allocating memory to save received_frame structure.");
1722 pChannel->bufDone(super_frame);
1723 return;
1724 }
1725 *frame = *super_frame;
1726
1727 if (frame->num_bufs > 0) {
1728 LOGI("[KPI Perf]: superbuf frame_idx %d",
1729 frame->bufs[0]->frame_idx);
1730 }
1731
1732 if ((NO_ERROR != pme->waitDeferredWork(pme->mReprocJob)) ||
1733 (NO_ERROR != pme->m_postprocessor.processData(frame))) {
1734 LOGE("Failed to trigger process data");
1735 pChannel->bufDone(super_frame);
1736 free(frame);
1737 frame = NULL;
1738 return;
1739 }
1740
1741 LOGH("[KPI Perf]: X");
1742 }
1743
1744 /*===========================================================================
1745 * FUNCTION : raw_stream_cb_routine
1746 *
1747 * DESCRIPTION: helper function to handle raw dump frame from raw stream
1748 *
1749 * PARAMETERS :
1750 * @super_frame : received super buffer
1751 * @stream : stream object
1752 * @userdata : user data ptr
1753 *
1754 * RETURN : None
1755 *
1756 * NOTE : caller passes the ownership of super_frame, it's our
1757 * responsibility to free super_frame once it's done. For raw
1758 * frame, there is no need to send to postprocessor for jpeg
1759 * encoding. this function will play shutter and send the data
1760 * callback to upper layer. Raw frame buffer will be returned
1761 * back to kernel, and frame will be free after use.
1762 *==========================================================================*/
raw_stream_cb_routine(mm_camera_super_buf_t * super_frame,QCameraStream *,void * userdata)1763 void QCamera2HardwareInterface::raw_stream_cb_routine(mm_camera_super_buf_t * super_frame,
1764 QCameraStream * /*stream*/,
1765 void * userdata)
1766 {
1767 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL1_RAW_STRM_CB);
1768 LOGH("[KPI Perf] : BEGIN");
1769 QCamera2HardwareInterface *pme = (QCamera2HardwareInterface *)userdata;
1770 if (pme == NULL ||
1771 pme->mCameraHandle == NULL ||
1772 !validate_handle(pme->mCameraHandle->camera_handle,
1773 super_frame->camera_handle)){
1774 LOGE("camera obj not valid");
1775 // simply free super frame
1776 free(super_frame);
1777 return;
1778 }
1779
1780 pme->m_postprocessor.processRawData(super_frame);
1781 LOGH("[KPI Perf] : END");
1782 }
1783
1784 /*===========================================================================
1785 * FUNCTION : raw_channel_cb_routine
1786 *
1787 * DESCRIPTION: helper function to handle RAW superbuf callback directly from
1788 * mm-camera-interface
1789 *
1790 * PARAMETERS :
1791 * @super_frame : received super buffer
1792 * @userdata : user data ptr
1793 *
1794 * RETURN : None
1795 *
1796 * NOTE : recvd_frame will be released after this call by caller, so if
1797 * async operation needed for recvd_frame, it's our responsibility
1798 * to save a copy for this variable to be used later.
1799 *==========================================================================*/
raw_channel_cb_routine(mm_camera_super_buf_t * super_frame,void * userdata)1800 void QCamera2HardwareInterface::raw_channel_cb_routine(mm_camera_super_buf_t *super_frame,
1801 void *userdata)
1802
1803 {
1804 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL1_RAW_CH_CB);
1805 char value[PROPERTY_VALUE_MAX];
1806
1807 LOGH("[KPI Perf]: E");
1808 QCamera2HardwareInterface *pme = (QCamera2HardwareInterface *)userdata;
1809 if (pme == NULL ||
1810 pme->mCameraHandle == NULL ||
1811 !validate_handle(pme->mCameraHandle->camera_handle,
1812 super_frame->camera_handle)){
1813 LOGE("camera obj not valid");
1814 // simply free super frame
1815 free(super_frame);
1816 return;
1817 }
1818
1819 QCameraChannel *pChannel = pme->m_channels[QCAMERA_CH_TYPE_RAW];
1820 if (pChannel == NULL) {
1821 LOGE("RAW channel doesn't exist, return here");
1822 return;
1823 }
1824
1825 if (!validate_handle(pChannel->getMyHandle(), super_frame->ch_id)) {
1826 LOGE("Invalid Input super buffer");
1827 pChannel->bufDone(super_frame);
1828 return;
1829 }
1830
1831 property_get("persist.camera.dumpmetadata", value, "0");
1832 int32_t enabled = atoi(value);
1833 if (enabled) {
1834 mm_camera_buf_def_t *pMetaFrame = NULL;
1835 QCameraStream *pStream = NULL;
1836 for (uint32_t i = 0; i < super_frame->num_bufs; i++) {
1837 pStream = pChannel->getStreamByHandle(super_frame->bufs[i]->stream_id);
1838 if (pStream != NULL) {
1839 if (pStream->isTypeOf(CAM_STREAM_TYPE_METADATA)) {
1840 pMetaFrame = super_frame->bufs[i]; //find the metadata
1841 if (pMetaFrame != NULL &&
1842 ((metadata_buffer_t *)pMetaFrame->buffer)->is_tuning_params_valid) {
1843 pme->dumpMetadataToFile(pStream, pMetaFrame, (char *) "raw");
1844 }
1845 break;
1846 }
1847 }
1848 }
1849 }
1850
1851 // save a copy for the superbuf
1852 mm_camera_super_buf_t* frame = (mm_camera_super_buf_t *)malloc(sizeof(mm_camera_super_buf_t));
1853 if (frame == NULL) {
1854 LOGE("Error allocating memory to save received_frame structure.");
1855 pChannel->bufDone(super_frame);
1856 return;
1857 }
1858 *frame = *super_frame;
1859
1860 if (frame->num_bufs > 0) {
1861 LOGI("[KPI Perf]: superbuf frame_idx %d",
1862 frame->bufs[0]->frame_idx);
1863 }
1864
1865 // Wait on Postproc initialization if needed
1866 // then send to postprocessor
1867 if ((NO_ERROR != pme->waitDeferredWork(pme->mReprocJob)) ||
1868 (NO_ERROR != pme->m_postprocessor.processData(frame))) {
1869 LOGE("Failed to trigger process data");
1870 pChannel->bufDone(super_frame);
1871 free(frame);
1872 frame = NULL;
1873 return;
1874 }
1875
1876 LOGH("[KPI Perf]: X");
1877
1878 }
1879
1880 /*===========================================================================
1881 * FUNCTION : preview_raw_stream_cb_routine
1882 *
1883 * DESCRIPTION: helper function to handle raw frame during standard preview
1884 *
1885 * PARAMETERS :
1886 * @super_frame : received super buffer
1887 * @stream : stream object
1888 * @userdata : user data ptr
1889 *
1890 * RETURN : None
1891 *
1892 * NOTE : caller passes the ownership of super_frame, it's our
1893 * responsibility to free super_frame once it's done.
1894 *==========================================================================*/
preview_raw_stream_cb_routine(mm_camera_super_buf_t * super_frame,QCameraStream * stream,void * userdata)1895 void QCamera2HardwareInterface::preview_raw_stream_cb_routine(mm_camera_super_buf_t * super_frame,
1896 QCameraStream * stream,
1897 void * userdata)
1898 {
1899 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL1_PREVIEW_RAW_STRM_CB);
1900 LOGH("[KPI Perf] : BEGIN");
1901 char value[PROPERTY_VALUE_MAX];
1902 bool dump_preview_raw = false, dump_video_raw = false;
1903
1904 QCamera2HardwareInterface *pme = (QCamera2HardwareInterface *)userdata;
1905 if (pme == NULL ||
1906 pme->mCameraHandle == NULL ||
1907 !validate_handle(pme->mCameraHandle->camera_handle,
1908 super_frame->camera_handle)){
1909 LOGE("camera obj not valid");
1910 // simply free super frame
1911 free(super_frame);
1912 return;
1913 }
1914
1915 mm_camera_buf_def_t *raw_frame = super_frame->bufs[0];
1916
1917 if (raw_frame != NULL) {
1918 property_get("persist.camera.preview_raw", value, "0");
1919 dump_preview_raw = atoi(value) > 0 ? true : false;
1920 property_get("persist.camera.video_raw", value, "0");
1921 dump_video_raw = atoi(value) > 0 ? true : false;
1922 if (dump_preview_raw || (pme->mParameters.getRecordingHintValue()
1923 && dump_video_raw)) {
1924 pme->dumpFrameToFile(stream, raw_frame, QCAMERA_DUMP_FRM_RAW);
1925 }
1926 stream->bufDone(raw_frame->buf_idx);
1927 }
1928 free(super_frame);
1929
1930 LOGH("[KPI Perf] : END");
1931 }
1932
1933 /*===========================================================================
1934 * FUNCTION : snapshot_raw_stream_cb_routine
1935 *
1936 * DESCRIPTION: helper function to handle raw frame during standard capture
1937 *
1938 * PARAMETERS :
1939 * @super_frame : received super buffer
1940 * @stream : stream object
1941 * @userdata : user data ptr
1942 *
1943 * RETURN : None
1944 *
1945 * NOTE : caller passes the ownership of super_frame, it's our
1946 * responsibility to free super_frame once it's done.
1947 *==========================================================================*/
snapshot_raw_stream_cb_routine(mm_camera_super_buf_t * super_frame,QCameraStream * stream,void * userdata)1948 void QCamera2HardwareInterface::snapshot_raw_stream_cb_routine(mm_camera_super_buf_t * super_frame,
1949 QCameraStream * stream,
1950 void * userdata)
1951 {
1952 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL1_SNAPSHOT_RAW_STRM_CB);
1953 LOGH("[KPI Perf] : BEGIN");
1954 char value[PROPERTY_VALUE_MAX];
1955 bool dump_raw = false;
1956
1957 QCamera2HardwareInterface *pme = (QCamera2HardwareInterface *)userdata;
1958 if (pme == NULL ||
1959 pme->mCameraHandle == NULL ||
1960 !validate_handle(pme->mCameraHandle->camera_handle,
1961 super_frame->camera_handle)){
1962 LOGE("camera obj not valid");
1963 // simply free super frame
1964 free(super_frame);
1965 return;
1966 }
1967
1968 property_get("persist.camera.snapshot_raw", value, "0");
1969 dump_raw = atoi(value) > 0 ? true : false;
1970
1971 for (uint32_t i = 0; i < super_frame->num_bufs; i++) {
1972 if (super_frame->bufs[i]->stream_type == CAM_STREAM_TYPE_RAW) {
1973 mm_camera_buf_def_t * raw_frame = super_frame->bufs[i];
1974 if (NULL != stream) {
1975 if (dump_raw) {
1976 pme->dumpFrameToFile(stream, raw_frame, QCAMERA_DUMP_FRM_RAW);
1977 }
1978 stream->bufDone(super_frame->bufs[i]->buf_idx);
1979 }
1980 break;
1981 }
1982 }
1983
1984 free(super_frame);
1985
1986 LOGH("[KPI Perf] : END");
1987 }
1988
1989 /*===========================================================================
1990 * FUNCTION : updateMetadata
1991 *
1992 * DESCRIPTION: Frame related parameter can be updated here
1993 *
1994 * PARAMETERS :
1995 * @pMetaData : pointer to metadata buffer
1996 *
1997 * RETURN : int32_t type of status
1998 * NO_ERROR -- success
1999 * none-zero failure code
2000 *==========================================================================*/
updateMetadata(metadata_buffer_t * pMetaData)2001 int32_t QCamera2HardwareInterface::updateMetadata(metadata_buffer_t *pMetaData)
2002 {
2003 int32_t rc = NO_ERROR;
2004
2005 if (pMetaData == NULL) {
2006 LOGE("Null Metadata buffer");
2007 return rc;
2008 }
2009
2010 // Sharpness
2011 cam_edge_application_t edge_application;
2012 memset(&edge_application, 0x00, sizeof(cam_edge_application_t));
2013 edge_application.sharpness = mParameters.getSharpness();
2014 if (edge_application.sharpness != 0) {
2015 edge_application.edge_mode = CAM_EDGE_MODE_FAST;
2016 } else {
2017 edge_application.edge_mode = CAM_EDGE_MODE_OFF;
2018 }
2019 ADD_SET_PARAM_ENTRY_TO_BATCH(pMetaData,
2020 CAM_INTF_META_EDGE_MODE, edge_application);
2021
2022 //Effect
2023 int32_t prmEffect = mParameters.getEffect();
2024 ADD_SET_PARAM_ENTRY_TO_BATCH(pMetaData, CAM_INTF_PARM_EFFECT, prmEffect);
2025
2026 //flip
2027 int32_t prmFlip = mParameters.getFlipMode(CAM_STREAM_TYPE_SNAPSHOT);
2028 ADD_SET_PARAM_ENTRY_TO_BATCH(pMetaData, CAM_INTF_PARM_FLIP, prmFlip);
2029
2030 //denoise
2031 uint8_t prmDenoise = (uint8_t)mParameters.isWNREnabled();
2032 ADD_SET_PARAM_ENTRY_TO_BATCH(pMetaData,
2033 CAM_INTF_META_NOISE_REDUCTION_MODE, prmDenoise);
2034
2035 //rotation & device rotation
2036 uint32_t prmRotation = mParameters.getJpegRotation();
2037 cam_rotation_info_t rotation_info;
2038 memset(&rotation_info, 0, sizeof(cam_rotation_info_t));
2039 if (prmRotation == 0) {
2040 rotation_info.rotation = ROTATE_0;
2041 } else if (prmRotation == 90) {
2042 rotation_info.rotation = ROTATE_90;
2043 } else if (prmRotation == 180) {
2044 rotation_info.rotation = ROTATE_180;
2045 } else if (prmRotation == 270) {
2046 rotation_info.rotation = ROTATE_270;
2047 }
2048
2049 uint32_t device_rotation = mParameters.getDeviceRotation();
2050 if (device_rotation == 0) {
2051 rotation_info.device_rotation = ROTATE_0;
2052 } else if (device_rotation == 90) {
2053 rotation_info.device_rotation = ROTATE_90;
2054 } else if (device_rotation == 180) {
2055 rotation_info.device_rotation = ROTATE_180;
2056 } else if (device_rotation == 270) {
2057 rotation_info.device_rotation = ROTATE_270;
2058 } else {
2059 rotation_info.device_rotation = ROTATE_0;
2060 }
2061
2062 ADD_SET_PARAM_ENTRY_TO_BATCH(pMetaData, CAM_INTF_PARM_ROTATION, rotation_info);
2063
2064 // Imglib Dynamic Scene Data
2065 cam_dyn_img_data_t dyn_img_data = mParameters.getDynamicImgData();
2066 if (mParameters.isStillMoreEnabled()) {
2067 cam_still_more_t stillmore_cap = mParameters.getStillMoreSettings();
2068 dyn_img_data.input_count = stillmore_cap.burst_count;
2069 }
2070 ADD_SET_PARAM_ENTRY_TO_BATCH(pMetaData,
2071 CAM_INTF_META_IMG_DYN_FEAT, dyn_img_data);
2072
2073 //CPP CDS
2074 int32_t prmCDSMode = mParameters.getCDSMode();
2075 ADD_SET_PARAM_ENTRY_TO_BATCH(pMetaData,
2076 CAM_INTF_PARM_CDS_MODE, prmCDSMode);
2077
2078 IF_META_AVAILABLE(cam_crop_data_t, crop_data, CAM_INTF_META_CROP_DATA, pMetaData) {
2079 if (isDualCamera()) {
2080 if ((mActiveCameras == MM_CAMERA_DUAL_CAM) && mBundledSnapshot) {
2081 crop_data->ignore_crop = 1; // CPP ignores the crop in this special zone
2082 // Set the margins to 0.
2083 crop_data->margins.widthMargins = 0.0f;
2084 crop_data->margins.heightMargins = 0.0f;
2085 } else {
2086 crop_data->ignore_crop = 0;
2087 // Get the frame margin data for the master camera and copy to the metadata
2088 crop_data->margins = m_pFovControl->getFrameMargins(mMasterCamera);
2089 }
2090 }
2091 }
2092
2093 return rc;
2094 }
2095
2096 /*===========================================================================
2097 * FUNCTION : metadata_stream_cb_routine
2098 *
2099 * DESCRIPTION: helper function to handle metadata frame from metadata stream
2100 *
2101 * PARAMETERS :
2102 * @super_frame : received super buffer
2103 * @stream : stream object
2104 * @userdata : user data ptr
2105 *
2106 * RETURN : None
2107 *
2108 * NOTE : caller passes the ownership of super_frame, it's our
2109 * responsibility to free super_frame once it's done. Metadata
2110 * could have valid entries for face detection result or
2111 * histogram statistics information.
2112 *==========================================================================*/
metadata_stream_cb_routine(mm_camera_super_buf_t * super_frame,QCameraStream * stream,void * userdata)2113 void QCamera2HardwareInterface::metadata_stream_cb_routine(mm_camera_super_buf_t * super_frame,
2114 QCameraStream * stream,
2115 void * userdata)
2116 {
2117 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL1_METADATA_STRM_CB);
2118 LOGD("[KPI Perf] : BEGIN");
2119 QCamera2HardwareInterface *pme = (QCamera2HardwareInterface *)userdata;
2120
2121 if (pme == NULL ||
2122 pme->mCameraHandle == 0 ||
2123 !validate_handle(pme->mCameraHandle->camera_handle,
2124 super_frame->camera_handle)) {
2125 // simply free super frame
2126 free(super_frame);
2127 return;
2128 }
2129
2130 mm_camera_buf_def_t *frame = super_frame->bufs[0];
2131 metadata_buffer_t *pMetaData = (metadata_buffer_t *)frame->buffer;
2132
2133 if (pme->isDualCamera()) {
2134 mm_camera_buf_def_t *frameMain = NULL;
2135 mm_camera_buf_def_t *frameAux = NULL;
2136 metadata_buffer_t *pMetaDataMain = NULL;
2137 metadata_buffer_t *pMetaDataAux = NULL;
2138 metadata_buffer_t *resultMetadata = NULL;
2139 if (super_frame->num_bufs == MM_CAMERA_MAX_CAM_CNT) {
2140 if (get_main_camera_handle(super_frame->bufs[0]->stream_id)) {
2141 frameMain = super_frame->bufs[0];
2142 frameAux = super_frame->bufs[1];
2143 } else {
2144 frameMain = super_frame->bufs[1];
2145 frameAux = super_frame->bufs[0];
2146 }
2147 pMetaDataMain = (metadata_buffer_t *)frameMain->buffer;
2148 pMetaDataAux = (metadata_buffer_t *)frameAux->buffer;
2149 } else {
2150 if (super_frame->camera_handle ==
2151 get_main_camera_handle(pme->mCameraHandle->camera_handle)) {
2152 pMetaDataMain = pMetaData;
2153 pMetaDataAux = NULL;
2154 } else if (super_frame->camera_handle ==
2155 get_aux_camera_handle(pme->mCameraHandle->camera_handle)) {
2156 pMetaDataMain = NULL;
2157 pMetaDataAux = pMetaData;
2158 }
2159 }
2160
2161 resultMetadata = pme->m_pFovControl->processResultMetadata(pMetaDataMain, pMetaDataAux);
2162 if (resultMetadata != NULL) {
2163 pMetaData = resultMetadata;
2164 } else {
2165 LOGE("FOV-control: processResultMetadata failed.");
2166 stream->bufDone(super_frame);
2167 free(super_frame);
2168 return;
2169 }
2170 }
2171
2172 if(pme->m_stateMachine.isNonZSLCaptureRunning()&&
2173 !pme->mLongshotEnabled) {
2174 //Make shutter call back in non ZSL mode once raw frame is received from VFE.
2175 pme->playShutter();
2176 }
2177
2178 if (pMetaData->is_tuning_params_valid && pme->mParameters.getRecordingHintValue() == true) {
2179 //Dump Tuning data for video
2180 pme->dumpMetadataToFile(stream,frame,(char *)"Video");
2181 }
2182
2183 IF_META_AVAILABLE(cam_hist_stats_t, stats_data, CAM_INTF_META_HISTOGRAM, pMetaData) {
2184 // process histogram statistics info
2185 qcamera_sm_internal_evt_payload_t *payload =
2186 (qcamera_sm_internal_evt_payload_t *)
2187 malloc(sizeof(qcamera_sm_internal_evt_payload_t));
2188 if (NULL != payload) {
2189 memset(payload, 0, sizeof(qcamera_sm_internal_evt_payload_t));
2190 payload->evt_type = QCAMERA_INTERNAL_EVT_HISTOGRAM_STATS;
2191 payload->stats_data = *stats_data;
2192 int32_t rc = pme->processEvt(QCAMERA_SM_EVT_EVT_INTERNAL, payload);
2193 if (rc != NO_ERROR) {
2194 LOGW("processEvt histogram failed");
2195 free(payload);
2196 payload = NULL;
2197
2198 }
2199 } else {
2200 LOGE("No memory for histogram qcamera_sm_internal_evt_payload_t");
2201 }
2202 }
2203
2204 IF_META_AVAILABLE(cam_face_detection_data_t, detection_data,
2205 CAM_INTF_META_FACE_DETECTION, pMetaData) {
2206
2207 cam_faces_data_t faces_data;
2208 pme->fillFacesData(faces_data, pMetaData);
2209 faces_data.detection_data.fd_type = QCAMERA_FD_PREVIEW; //HARD CODE here before MCT can support
2210
2211 qcamera_sm_internal_evt_payload_t *payload = (qcamera_sm_internal_evt_payload_t *)
2212 malloc(sizeof(qcamera_sm_internal_evt_payload_t));
2213 if (NULL != payload) {
2214 memset(payload, 0, sizeof(qcamera_sm_internal_evt_payload_t));
2215 payload->evt_type = QCAMERA_INTERNAL_EVT_FACE_DETECT_RESULT;
2216 payload->faces_data = faces_data;
2217 int32_t rc = pme->processEvt(QCAMERA_SM_EVT_EVT_INTERNAL, payload);
2218 if (rc != NO_ERROR) {
2219 LOGW("processEvt face detection failed");
2220 free(payload);
2221 payload = NULL;
2222 }
2223 } else {
2224 LOGE("No memory for face detect qcamera_sm_internal_evt_payload_t");
2225 }
2226 }
2227
2228 IF_META_AVAILABLE(uint32_t, afState, CAM_INTF_META_AF_STATE, pMetaData) {
2229 uint8_t forceAFUpdate = FALSE;
2230 //1. Earlier HAL used to rely on AF done flags set in metadata to generate callbacks to
2231 //upper layers. But in scenarios where metadata drops especially which contain important
2232 //AF information, APP will wait indefinitely for focus result resulting in capture hang.
2233 //2. HAL can check for AF state transitions to generate AF state callbacks to upper layers.
2234 //This will help overcome metadata drop issue with the earlier approach.
2235 //3. But sometimes AF state transitions can happen so fast within same metadata due to
2236 //which HAL will receive only the final AF state. HAL may perceive this as no change in AF
2237 //state depending on the state transitions happened (for example state A -> B -> A).
2238 //4. To overcome the drawbacks of both the approaches, we go for a hybrid model in which
2239 //we check state transition at both HAL level and AF module level. We rely on
2240 //'state transition' meta field set by AF module for the state transition detected by it.
2241 IF_META_AVAILABLE(uint8_t, stateChange, CAM_INTF_AF_STATE_TRANSITION, pMetaData) {
2242 forceAFUpdate = *stateChange;
2243 }
2244 //This is a special scenario in which when scene modes like landscape are selected, AF mode
2245 //gets changed to INFINITY at backend, but HAL will not be aware of it. Also, AF state in
2246 //such cases will be set to CAM_AF_STATE_INACTIVE by backend. So, detect the AF mode
2247 //change here and trigger AF callback @ processAutoFocusEvent().
2248 IF_META_AVAILABLE(uint32_t, afFocusMode, CAM_INTF_PARM_FOCUS_MODE, pMetaData) {
2249 if (((cam_focus_mode_type)(*afFocusMode) == CAM_FOCUS_MODE_INFINITY) &&
2250 pme->mActiveAF){
2251 forceAFUpdate = TRUE;
2252 }
2253 }
2254 if ((pme->m_currentFocusState != (*afState)) || forceAFUpdate) {
2255 cam_af_state_t prevFocusState = pme->m_currentFocusState;
2256 pme->m_currentFocusState = (cam_af_state_t)(*afState);
2257 qcamera_sm_internal_evt_payload_t *payload = (qcamera_sm_internal_evt_payload_t *)
2258 malloc(sizeof(qcamera_sm_internal_evt_payload_t));
2259 if (NULL != payload) {
2260 memset(payload, 0, sizeof(qcamera_sm_internal_evt_payload_t));
2261 payload->evt_type = QCAMERA_INTERNAL_EVT_FOCUS_UPDATE;
2262 payload->focus_data.focus_state = (cam_af_state_t)(*afState);
2263 //Need to flush ZSL Q only if we are transitioning from scanning state
2264 //to focused/not focused state.
2265 payload->focus_data.flush_info.needFlush =
2266 ((prevFocusState == CAM_AF_STATE_PASSIVE_SCAN) ||
2267 (prevFocusState == CAM_AF_STATE_ACTIVE_SCAN)) &&
2268 ((pme->m_currentFocusState == CAM_AF_STATE_FOCUSED_LOCKED) ||
2269 (pme->m_currentFocusState == CAM_AF_STATE_NOT_FOCUSED_LOCKED));
2270 payload->focus_data.flush_info.focused_frame_idx = frame->frame_idx;
2271
2272 IF_META_AVAILABLE(float, focusDistance,
2273 CAM_INTF_META_LENS_FOCUS_DISTANCE, pMetaData) {
2274 payload->focus_data.focus_dist.
2275 focus_distance[CAM_FOCUS_DISTANCE_OPTIMAL_INDEX] = *focusDistance;
2276 }
2277 IF_META_AVAILABLE(float, focusRange, CAM_INTF_META_LENS_FOCUS_RANGE, pMetaData) {
2278 payload->focus_data.focus_dist.
2279 focus_distance[CAM_FOCUS_DISTANCE_NEAR_INDEX] = focusRange[0];
2280 payload->focus_data.focus_dist.
2281 focus_distance[CAM_FOCUS_DISTANCE_FAR_INDEX] = focusRange[1];
2282 }
2283 IF_META_AVAILABLE(uint32_t, focusMode, CAM_INTF_PARM_FOCUS_MODE, pMetaData) {
2284 payload->focus_data.focus_mode = (cam_focus_mode_type)(*focusMode);
2285 }
2286 IF_META_AVAILABLE(uint8_t, isDepthFocus,
2287 CAM_INTF_META_FOCUS_DEPTH_INFO, pMetaData) {
2288 payload->focus_data.isDepth = *isDepthFocus;
2289 }
2290 int32_t rc = pme->processEvt(QCAMERA_SM_EVT_EVT_INTERNAL, payload);
2291 if (rc != NO_ERROR) {
2292 LOGW("processEvt focus failed");
2293 free(payload);
2294 payload = NULL;
2295 }
2296 } else {
2297 LOGE("No memory for focus qcamera_sm_internal_evt_payload_t");
2298 }
2299 }
2300 }
2301
2302 IF_META_AVAILABLE(cam_crop_data_t, crop_data, CAM_INTF_META_CROP_DATA, pMetaData) {
2303 if (crop_data->num_of_streams > MAX_NUM_STREAMS) {
2304 LOGE("Invalid num_of_streams %d in crop_data",
2305 crop_data->num_of_streams);
2306 } else {
2307 qcamera_sm_internal_evt_payload_t *payload =
2308 (qcamera_sm_internal_evt_payload_t *)
2309 malloc(sizeof(qcamera_sm_internal_evt_payload_t));
2310 if (NULL != payload) {
2311 memset(payload, 0, sizeof(qcamera_sm_internal_evt_payload_t));
2312 payload->evt_type = QCAMERA_INTERNAL_EVT_CROP_INFO;
2313 payload->crop_data = *crop_data;
2314 int32_t rc = pme->processEvt(QCAMERA_SM_EVT_EVT_INTERNAL, payload);
2315 if (rc != NO_ERROR) {
2316 LOGE("processEvt crop info failed");
2317 free(payload);
2318 payload = NULL;
2319 }
2320 } else {
2321 LOGE("No memory for prep_snapshot qcamera_sm_internal_evt_payload_t");
2322 }
2323 }
2324 }
2325
2326 IF_META_AVAILABLE(int32_t, prep_snapshot_done_state,
2327 CAM_INTF_META_PREP_SNAPSHOT_DONE, pMetaData) {
2328 qcamera_sm_internal_evt_payload_t *payload =
2329 (qcamera_sm_internal_evt_payload_t *)malloc(sizeof(qcamera_sm_internal_evt_payload_t));
2330 if (NULL != payload) {
2331 memset(payload, 0, sizeof(qcamera_sm_internal_evt_payload_t));
2332 payload->evt_type = QCAMERA_INTERNAL_EVT_PREP_SNAPSHOT_DONE;
2333 payload->prep_snapshot_state = (cam_prep_snapshot_state_t)*prep_snapshot_done_state;
2334 int32_t rc = pme->processEvt(QCAMERA_SM_EVT_EVT_INTERNAL, payload);
2335 if (rc != NO_ERROR) {
2336 LOGW("processEvt prep_snapshot failed");
2337 free(payload);
2338 payload = NULL;
2339 }
2340 } else {
2341 LOGE("No memory for prep_snapshot qcamera_sm_internal_evt_payload_t");
2342 }
2343 }
2344
2345 IF_META_AVAILABLE(cam_asd_hdr_scene_data_t, hdr_scene_data,
2346 CAM_INTF_META_ASD_HDR_SCENE_DATA, pMetaData) {
2347 LOGH("hdr_scene_data: %d %f\n",
2348 hdr_scene_data->is_hdr_scene, hdr_scene_data->hdr_confidence);
2349 //Handle this HDR meta data only if capture is not in process
2350 if (!pme->m_stateMachine.isCaptureRunning()) {
2351 qcamera_sm_internal_evt_payload_t *payload =
2352 (qcamera_sm_internal_evt_payload_t *)
2353 malloc(sizeof(qcamera_sm_internal_evt_payload_t));
2354 if (NULL != payload) {
2355 memset(payload, 0, sizeof(qcamera_sm_internal_evt_payload_t));
2356 payload->evt_type = QCAMERA_INTERNAL_EVT_HDR_UPDATE;
2357 payload->hdr_data = *hdr_scene_data;
2358 int32_t rc = pme->processEvt(QCAMERA_SM_EVT_EVT_INTERNAL, payload);
2359 if (rc != NO_ERROR) {
2360 LOGW("processEvt hdr update failed");
2361 free(payload);
2362 payload = NULL;
2363 }
2364 } else {
2365 LOGE("No memory for hdr update qcamera_sm_internal_evt_payload_t");
2366 }
2367 }
2368 }
2369
2370 IF_META_AVAILABLE(cam_asd_decision_t, cam_asd_info,
2371 CAM_INTF_META_ASD_SCENE_INFO, pMetaData) {
2372 qcamera_sm_internal_evt_payload_t *payload =
2373 (qcamera_sm_internal_evt_payload_t *)malloc(sizeof(qcamera_sm_internal_evt_payload_t));
2374 if (NULL != payload) {
2375 memset(payload, 0, sizeof(qcamera_sm_internal_evt_payload_t));
2376 payload->evt_type = QCAMERA_INTERNAL_EVT_ASD_UPDATE;
2377 payload->asd_data = (cam_asd_decision_t)*cam_asd_info;
2378 int32_t rc = pme->processEvt(QCAMERA_SM_EVT_EVT_INTERNAL, payload);
2379 if (rc != NO_ERROR) {
2380 LOGW("processEvt asd_update failed");
2381 free(payload);
2382 payload = NULL;
2383 }
2384 } else {
2385 LOGE("No memory for asd_update qcamera_sm_internal_evt_payload_t");
2386 }
2387 }
2388
2389 IF_META_AVAILABLE(cam_awb_params_t, awb_params, CAM_INTF_META_AWB_INFO, pMetaData) {
2390 LOGH(", metadata for awb params.");
2391 qcamera_sm_internal_evt_payload_t *payload =
2392 (qcamera_sm_internal_evt_payload_t *)
2393 malloc(sizeof(qcamera_sm_internal_evt_payload_t));
2394 if (NULL != payload) {
2395 memset(payload, 0, sizeof(qcamera_sm_internal_evt_payload_t));
2396 payload->evt_type = QCAMERA_INTERNAL_EVT_AWB_UPDATE;
2397 payload->awb_data = *awb_params;
2398 int32_t rc = pme->processEvt(QCAMERA_SM_EVT_EVT_INTERNAL, payload);
2399 if (rc != NO_ERROR) {
2400 LOGW("processEvt awb_update failed");
2401 free(payload);
2402 payload = NULL;
2403 }
2404 } else {
2405 LOGE("No memory for awb_update qcamera_sm_internal_evt_payload_t");
2406 }
2407 }
2408
2409 IF_META_AVAILABLE(uint32_t, flash_mode, CAM_INTF_META_FLASH_MODE, pMetaData) {
2410 pme->mExifParams.sensor_params.flash_mode = (cam_flash_mode_t)*flash_mode;
2411 }
2412
2413 IF_META_AVAILABLE(int32_t, flash_state, CAM_INTF_META_FLASH_STATE, pMetaData) {
2414 pme->mExifParams.sensor_params.flash_state = (cam_flash_state_t) *flash_state;
2415 }
2416
2417 IF_META_AVAILABLE(float, aperture_value, CAM_INTF_META_LENS_APERTURE, pMetaData) {
2418 pme->mExifParams.sensor_params.aperture_value = *aperture_value;
2419 }
2420
2421 IF_META_AVAILABLE(cam_3a_params_t, ae_params, CAM_INTF_META_AEC_INFO, pMetaData) {
2422 pme->mExifParams.cam_3a_params = *ae_params;
2423 pme->mExifParams.cam_3a_params_valid = TRUE;
2424 pme->mFlashNeeded = ae_params->flash_needed;
2425 pme->mExifParams.cam_3a_params.brightness = (float) pme->mParameters.getBrightness();
2426 qcamera_sm_internal_evt_payload_t *payload =
2427 (qcamera_sm_internal_evt_payload_t *)
2428 malloc(sizeof(qcamera_sm_internal_evt_payload_t));
2429 if (NULL != payload) {
2430 memset(payload, 0, sizeof(qcamera_sm_internal_evt_payload_t));
2431 payload->evt_type = QCAMERA_INTERNAL_EVT_AE_UPDATE;
2432 payload->ae_data = *ae_params;
2433 int32_t rc = pme->processEvt(QCAMERA_SM_EVT_EVT_INTERNAL, payload);
2434 if (rc != NO_ERROR) {
2435 LOGW("processEvt ae_update failed");
2436 free(payload);
2437 payload = NULL;
2438 }
2439 } else {
2440 LOGE("No memory for ae_update qcamera_sm_internal_evt_payload_t");
2441 }
2442 }
2443
2444 IF_META_AVAILABLE(int32_t, wb_mode, CAM_INTF_PARM_WHITE_BALANCE, pMetaData) {
2445 pme->mExifParams.cam_3a_params.wb_mode = (cam_wb_mode_type) *wb_mode;
2446 }
2447
2448 IF_META_AVAILABLE(cam_sensor_params_t, sensor_params, CAM_INTF_META_SENSOR_INFO, pMetaData) {
2449 pme->mExifParams.sensor_params = *sensor_params;
2450 }
2451
2452 IF_META_AVAILABLE(cam_ae_exif_debug_t, ae_exif_debug_params,
2453 CAM_INTF_META_EXIF_DEBUG_AE, pMetaData) {
2454 if (pme->mExifParams.debug_params) {
2455 pme->mExifParams.debug_params->ae_debug_params = *ae_exif_debug_params;
2456 pme->mExifParams.debug_params->ae_debug_params_valid = TRUE;
2457 }
2458 }
2459
2460 IF_META_AVAILABLE(cam_awb_exif_debug_t, awb_exif_debug_params,
2461 CAM_INTF_META_EXIF_DEBUG_AWB, pMetaData) {
2462 if (pme->mExifParams.debug_params) {
2463 pme->mExifParams.debug_params->awb_debug_params = *awb_exif_debug_params;
2464 pme->mExifParams.debug_params->awb_debug_params_valid = TRUE;
2465 }
2466 }
2467
2468 IF_META_AVAILABLE(cam_af_exif_debug_t, af_exif_debug_params,
2469 CAM_INTF_META_EXIF_DEBUG_AF, pMetaData) {
2470 if (pme->mExifParams.debug_params) {
2471 pme->mExifParams.debug_params->af_debug_params = *af_exif_debug_params;
2472 pme->mExifParams.debug_params->af_debug_params_valid = TRUE;
2473 }
2474 }
2475
2476 IF_META_AVAILABLE(cam_asd_exif_debug_t, asd_exif_debug_params,
2477 CAM_INTF_META_EXIF_DEBUG_ASD, pMetaData) {
2478 if (pme->mExifParams.debug_params) {
2479 pme->mExifParams.debug_params->asd_debug_params = *asd_exif_debug_params;
2480 pme->mExifParams.debug_params->asd_debug_params_valid = TRUE;
2481 }
2482 }
2483
2484 IF_META_AVAILABLE(cam_stats_buffer_exif_debug_t, stats_exif_debug_params,
2485 CAM_INTF_META_EXIF_DEBUG_STATS, pMetaData) {
2486 if (pme->mExifParams.debug_params) {
2487 pme->mExifParams.debug_params->stats_debug_params = *stats_exif_debug_params;
2488 pme->mExifParams.debug_params->stats_debug_params_valid = TRUE;
2489 }
2490 }
2491
2492 IF_META_AVAILABLE(cam_bestats_buffer_exif_debug_t, bestats_exif_debug_params,
2493 CAM_INTF_META_EXIF_DEBUG_BESTATS, pMetaData) {
2494 if (pme->mExifParams.debug_params) {
2495 pme->mExifParams.debug_params->bestats_debug_params = *bestats_exif_debug_params;
2496 pme->mExifParams.debug_params->bestats_debug_params_valid = TRUE;
2497 }
2498 }
2499
2500 IF_META_AVAILABLE(cam_bhist_buffer_exif_debug_t, bhist_exif_debug_params,
2501 CAM_INTF_META_EXIF_DEBUG_BHIST, pMetaData) {
2502 if (pme->mExifParams.debug_params) {
2503 pme->mExifParams.debug_params->bhist_debug_params = *bhist_exif_debug_params;
2504 pme->mExifParams.debug_params->bhist_debug_params_valid = TRUE;
2505 }
2506 }
2507
2508 IF_META_AVAILABLE(cam_q3a_tuning_info_t, q3a_tuning_exif_debug_params,
2509 CAM_INTF_META_EXIF_DEBUG_3A_TUNING, pMetaData) {
2510 if (pme->mExifParams.debug_params) {
2511 pme->mExifParams.debug_params->q3a_tuning_debug_params = *q3a_tuning_exif_debug_params;
2512 pme->mExifParams.debug_params->q3a_tuning_debug_params_valid = TRUE;
2513 }
2514 }
2515
2516 IF_META_AVAILABLE(uint32_t, led_mode, CAM_INTF_META_LED_MODE_OVERRIDE, pMetaData) {
2517 qcamera_sm_internal_evt_payload_t *payload =
2518 (qcamera_sm_internal_evt_payload_t *)
2519 malloc(sizeof(qcamera_sm_internal_evt_payload_t));
2520 if (NULL != payload) {
2521 memset(payload, 0, sizeof(qcamera_sm_internal_evt_payload_t));
2522 payload->evt_type = QCAMERA_INTERNAL_EVT_LED_MODE_OVERRIDE;
2523 payload->led_data = (cam_flash_mode_t)*led_mode;
2524 int32_t rc = pme->processEvt(QCAMERA_SM_EVT_EVT_INTERNAL, payload);
2525 if (rc != NO_ERROR) {
2526 LOGW("processEvt led mode override failed");
2527 free(payload);
2528 payload = NULL;
2529 }
2530 } else {
2531 LOGE("No memory for focus qcamera_sm_internal_evt_payload_t");
2532 }
2533 }
2534
2535 cam_edge_application_t edge_application;
2536 memset(&edge_application, 0x00, sizeof(cam_edge_application_t));
2537 edge_application.sharpness = pme->mParameters.getSharpness();
2538 if (edge_application.sharpness != 0) {
2539 edge_application.edge_mode = CAM_EDGE_MODE_FAST;
2540 } else {
2541 edge_application.edge_mode = CAM_EDGE_MODE_OFF;
2542 }
2543 ADD_SET_PARAM_ENTRY_TO_BATCH(pMetaData, CAM_INTF_META_EDGE_MODE, edge_application);
2544
2545 IF_META_AVAILABLE(cam_focus_pos_info_t, cur_pos_info,
2546 CAM_INTF_META_FOCUS_POSITION, pMetaData) {
2547 qcamera_sm_internal_evt_payload_t *payload =
2548 (qcamera_sm_internal_evt_payload_t *)malloc(sizeof(qcamera_sm_internal_evt_payload_t));
2549 if (NULL != payload) {
2550 memset(payload, 0, sizeof(qcamera_sm_internal_evt_payload_t));
2551 payload->evt_type = QCAMERA_INTERNAL_EVT_FOCUS_POS_UPDATE;
2552 payload->focus_pos = *cur_pos_info;
2553 int32_t rc = pme->processEvt(QCAMERA_SM_EVT_EVT_INTERNAL, payload);
2554 if (rc != NO_ERROR) {
2555 LOGW("processEvt focus_pos_update failed");
2556 free(payload);
2557 payload = NULL;
2558 }
2559 } else {
2560 LOGE("No memory for focus_pos_update qcamera_sm_internal_evt_payload_t");
2561 }
2562 }
2563
2564 if (pme->mParameters.getLowLightCapture()) {
2565 IF_META_AVAILABLE(cam_low_light_mode_t, low_light_level,
2566 CAM_INTF_META_LOW_LIGHT, pMetaData) {
2567 pme->mParameters.setLowLightLevel(*low_light_level);
2568 }
2569 }
2570
2571 IF_META_AVAILABLE(cam_dyn_img_data_t, dyn_img_data,
2572 CAM_INTF_META_IMG_DYN_FEAT, pMetaData) {
2573 pme->mParameters.setDynamicImgData(*dyn_img_data);
2574 }
2575
2576 IF_META_AVAILABLE(int32_t, touch_ae_status, CAM_INTF_META_TOUCH_AE_RESULT, pMetaData) {
2577 LOGD("touch_ae_status: %d", *touch_ae_status);
2578 }
2579
2580 if (pme->isDualCamera()) {
2581 pme->fillDualCameraFOVControl();
2582 }
2583
2584 IF_META_AVAILABLE(int32_t, led_result, CAM_INTF_META_LED_CALIB_RESULT, pMetaData) {
2585 qcamera_sm_internal_evt_payload_t *payload =
2586 (qcamera_sm_internal_evt_payload_t *)malloc(
2587 sizeof(qcamera_sm_internal_evt_payload_t));
2588 if (NULL != payload) {
2589 memset(payload, 0, sizeof(qcamera_sm_internal_evt_payload_t));
2590 payload->evt_type = QCAMERA_INTERNAL_EVT_LED_CALIB_UPDATE;
2591 payload->led_calib_result = (int32_t)*led_result;
2592 int32_t rc = pme->processEvt(QCAMERA_SM_EVT_EVT_INTERNAL, payload);
2593 if (rc != NO_ERROR) {
2594 LOGW("LED_calibration result update failed");
2595 free(payload);
2596 payload = NULL;
2597 }
2598 } else {
2599 LOGE("No memory for asd_update qcamera_sm_internal_evt_payload_t");
2600 }
2601 }
2602
2603 stream->bufDone(super_frame);
2604 free(super_frame);
2605
2606 LOGD("[KPI Perf] : END");
2607 }
2608
2609 /*===========================================================================
2610 * FUNCTION : reprocess_stream_cb_routine
2611 *
2612 * DESCRIPTION: helper function to handle reprocess frame from reprocess stream
2613 (after reprocess, e.g., ZSL snapshot frame after WNR if
2614 * WNR is enabled)
2615 *
2616 * PARAMETERS :
2617 * @super_frame : received super buffer
2618 * @stream : stream object
2619 * @userdata : user data ptr
2620 *
2621 * RETURN : None
2622 *
2623 * NOTE : caller passes the ownership of super_frame, it's our
2624 * responsibility to free super_frame once it's done. In this
2625 * case, reprocessed frame need to be passed to postprocessor
2626 * for jpeg encoding.
2627 *==========================================================================*/
reprocess_stream_cb_routine(mm_camera_super_buf_t * super_frame,QCameraStream *,void * userdata)2628 void QCamera2HardwareInterface::reprocess_stream_cb_routine(mm_camera_super_buf_t * super_frame,
2629 QCameraStream * /*stream*/,
2630 void * userdata)
2631 {
2632 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL1_REPROC_STRM_CB);
2633 LOGH("[KPI Perf]: E");
2634 QCamera2HardwareInterface *pme = (QCamera2HardwareInterface *)userdata;
2635 if (pme == NULL ||
2636 pme->mCameraHandle == NULL ||
2637 !validate_handle(pme->mCameraHandle->camera_handle,
2638 super_frame->camera_handle)){
2639 LOGE("camera obj not valid");
2640 // simply free super frame
2641 free(super_frame);
2642 return;
2643 }
2644
2645 pme->m_postprocessor.processPPData(super_frame);
2646
2647 LOGH("[KPI Perf]: X");
2648 }
2649
2650 /*===========================================================================
2651 * FUNCTION : callback_stream_cb_routine
2652 *
2653 * DESCRIPTION: function to process CALBACK stream data
2654 Frame will processed and sent to framework
2655 *
2656 * PARAMETERS :
2657 * @super_frame : received super buffer
2658 * @stream : stream object
2659 * @userdata : user data ptr
2660 *
2661 * RETURN : None
2662 *==========================================================================*/
callback_stream_cb_routine(mm_camera_super_buf_t * super_frame,QCameraStream * stream,void * userdata)2663 void QCamera2HardwareInterface::callback_stream_cb_routine(mm_camera_super_buf_t *super_frame,
2664 QCameraStream *stream, void *userdata)
2665 {
2666 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL1_CB_STRM_CB);
2667 LOGH("[KPI Perf]: E");
2668 QCamera2HardwareInterface *pme = (QCamera2HardwareInterface *)userdata;
2669
2670 if (pme == NULL ||
2671 pme->mCameraHandle == 0 ||
2672 !validate_handle(pme->mCameraHandle->camera_handle,
2673 super_frame->camera_handle)) {
2674 // simply free super frame
2675 free(super_frame);
2676 return;
2677 }
2678
2679 mm_camera_buf_def_t *frame = super_frame->bufs[0];
2680 if (NULL == frame) {
2681 LOGE("preview callback frame is NULL");
2682 free(super_frame);
2683 return;
2684 }
2685
2686 if (!pme->needProcessPreviewFrame(frame->frame_idx)) {
2687 LOGH("preview is not running, no need to process");
2688 stream->bufDone(frame->buf_idx);
2689 free(super_frame);
2690 return;
2691 }
2692
2693 QCameraMemory *previewMemObj = (QCameraMemory *)frame->mem_info;
2694 // Handle preview data callback
2695 if (pme->mDataCb != NULL &&
2696 (pme->msgTypeEnabledWithLock(CAMERA_MSG_PREVIEW_FRAME) > 0) &&
2697 (!pme->mParameters.isSceneSelectionEnabled())) {
2698 // Preset cache flags to be handled when the buffer comes back
2699 frame->cache_flags |= CPU_HAS_READ;
2700 int32_t rc = pme->sendPreviewCallback(stream, previewMemObj, frame->buf_idx);
2701 if (NO_ERROR != rc) {
2702 LOGE("Preview callback was not sent succesfully");
2703 }
2704 }
2705 stream->bufDone(frame->buf_idx);
2706 free(super_frame);
2707 LOGH("[KPI Perf]: X");
2708 }
2709
2710 /*===========================================================================
2711 * FUNCTION : dumpFrameToFile
2712 *
2713 * DESCRIPTION: helper function to dump jpeg into file for debug purpose.
2714 *
2715 * PARAMETERS :
2716 * @data : data ptr
2717 * @size : length of data buffer
2718 * @index : identifier for data
2719 *
2720 * RETURN : None
2721 *==========================================================================*/
dumpJpegToFile(const void * data,size_t size,uint32_t index)2722 void QCamera2HardwareInterface::dumpJpegToFile(const void *data,
2723 size_t size, uint32_t index)
2724 {
2725 char value[PROPERTY_VALUE_MAX];
2726 property_get("persist.camera.dumpimg", value, "0");
2727 uint32_t enabled = (uint32_t) atoi(value);
2728 uint32_t frm_num = 0;
2729 uint32_t skip_mode = 0;
2730
2731 char buf[32];
2732 cam_dimension_t dim;
2733 memset(buf, 0, sizeof(buf));
2734 memset(&dim, 0, sizeof(dim));
2735
2736 if(((enabled & QCAMERA_DUMP_FRM_OUTPUT_JPEG) && data) ||
2737 ((true == m_bIntJpegEvtPending) && data)) {
2738 frm_num = ((enabled & 0xffff0000) >> 16);
2739 if(frm_num == 0) {
2740 frm_num = 10; //default 10 frames
2741 }
2742 if(frm_num > 256) {
2743 frm_num = 256; //256 buffers cycle around
2744 }
2745 skip_mode = ((enabled & 0x0000ff00) >> 8);
2746 if(skip_mode == 0) {
2747 skip_mode = 1; //no-skip
2748 }
2749
2750 if( mDumpSkipCnt % skip_mode == 0) {
2751 if((frm_num == 256) && (mDumpFrmCnt >= frm_num)) {
2752 // reset frame count if cycling
2753 mDumpFrmCnt = 0;
2754 }
2755 if (mDumpFrmCnt <= frm_num) {
2756 snprintf(buf, sizeof(buf), QCAMERA_DUMP_FRM_LOCATION "%d_%d.jpg",
2757 mDumpFrmCnt, index);
2758 if (true == m_bIntJpegEvtPending) {
2759 strlcpy(m_BackendFileName, buf, QCAMERA_MAX_FILEPATH_LENGTH);
2760 mBackendFileSize = size;
2761 }
2762
2763 int file_fd = open(buf, O_RDWR | O_CREAT, 0777);
2764 if (file_fd >= 0) {
2765 ssize_t written_len = write(file_fd, data, size);
2766 fchmod(file_fd, S_IRUSR | S_IWUSR | S_IRGRP | S_IROTH);
2767 LOGH("written number of bytes %zd\n",
2768 written_len);
2769 close(file_fd);
2770 } else {
2771 LOGE("fail to open file for image dumping");
2772 }
2773 if (false == m_bIntJpegEvtPending) {
2774 mDumpFrmCnt++;
2775 }
2776 }
2777 }
2778 mDumpSkipCnt++;
2779 }
2780 }
2781
2782
dumpMetadataToFile(QCameraStream * stream,mm_camera_buf_def_t * frame,char * type)2783 void QCamera2HardwareInterface::dumpMetadataToFile(QCameraStream *stream,
2784 mm_camera_buf_def_t *frame,char *type)
2785 {
2786 char value[PROPERTY_VALUE_MAX];
2787 uint32_t frm_num = 0;
2788 metadata_buffer_t *metadata = (metadata_buffer_t *)frame->buffer;
2789 property_get("persist.camera.dumpmetadata", value, "0");
2790 uint32_t enabled = (uint32_t) atoi(value);
2791 if (stream == NULL) {
2792 LOGH("No op");
2793 return;
2794 }
2795
2796 uint32_t dumpFrmCnt = stream->mDumpMetaFrame;
2797 if(enabled){
2798 frm_num = ((enabled & 0xffff0000) >> 16);
2799 if (frm_num == 0) {
2800 frm_num = 10; //default 10 frames
2801 }
2802 if (frm_num > 256) {
2803 frm_num = 256; //256 buffers cycle around
2804 }
2805 if ((frm_num == 256) && (dumpFrmCnt >= frm_num)) {
2806 // reset frame count if cycling
2807 dumpFrmCnt = 0;
2808 }
2809 LOGH("dumpFrmCnt= %u, frm_num = %u", dumpFrmCnt, frm_num);
2810 if (dumpFrmCnt < frm_num) {
2811 char timeBuf[128];
2812 char buf[32];
2813 memset(buf, 0, sizeof(buf));
2814 memset(timeBuf, 0, sizeof(timeBuf));
2815 time_t current_time;
2816 struct tm * timeinfo;
2817 time (¤t_time);
2818 timeinfo = localtime (¤t_time);
2819 if (NULL != timeinfo) {
2820 strftime(timeBuf, sizeof(timeBuf),
2821 QCAMERA_DUMP_FRM_LOCATION "%Y%m%d%H%M%S", timeinfo);
2822 }
2823 String8 filePath(timeBuf);
2824 snprintf(buf, sizeof(buf), "%um_%s_%d.bin", dumpFrmCnt, type, frame->frame_idx);
2825 filePath.append(buf);
2826 int file_fd = open(filePath.string(), O_RDWR | O_CREAT, 0777);
2827 if (file_fd >= 0) {
2828 ssize_t written_len = 0;
2829 metadata->tuning_params.tuning_data_version = TUNING_DATA_VERSION;
2830 void *data = (void *)((uint8_t *)&metadata->tuning_params.tuning_data_version);
2831 written_len += write(file_fd, data, sizeof(uint32_t));
2832 data = (void *)((uint8_t *)&metadata->tuning_params.tuning_sensor_data_size);
2833 LOGH("tuning_sensor_data_size %d",(int)(*(int *)data));
2834 written_len += write(file_fd, data, sizeof(uint32_t));
2835 data = (void *)((uint8_t *)&metadata->tuning_params.tuning_vfe_data_size);
2836 LOGH("tuning_vfe_data_size %d",(int)(*(int *)data));
2837 written_len += write(file_fd, data, sizeof(uint32_t));
2838 data = (void *)((uint8_t *)&metadata->tuning_params.tuning_cpp_data_size);
2839 LOGH("tuning_cpp_data_size %d",(int)(*(int *)data));
2840 written_len += write(file_fd, data, sizeof(uint32_t));
2841 data = (void *)((uint8_t *)&metadata->tuning_params.tuning_cac_data_size);
2842 LOGH("tuning_cac_data_size %d",(int)(*(int *)data));
2843 written_len += write(file_fd, data, sizeof(uint32_t));
2844 data = (void *)((uint8_t *)&metadata->tuning_params.tuning_cac_data_size2);
2845 LOGH("< skrajago >tuning_cac_data_size %d",(int)(*(int *)data));
2846 written_len += write(file_fd, data, sizeof(uint32_t));
2847 size_t total_size = metadata->tuning_params.tuning_sensor_data_size;
2848 data = (void *)((uint8_t *)&metadata->tuning_params.data);
2849 written_len += write(file_fd, data, total_size);
2850 total_size = metadata->tuning_params.tuning_vfe_data_size;
2851 data = (void *)((uint8_t *)&metadata->tuning_params.data[TUNING_VFE_DATA_OFFSET]);
2852 written_len += write(file_fd, data, total_size);
2853 total_size = metadata->tuning_params.tuning_cpp_data_size;
2854 data = (void *)((uint8_t *)&metadata->tuning_params.data[TUNING_CPP_DATA_OFFSET]);
2855 written_len += write(file_fd, data, total_size);
2856 total_size = metadata->tuning_params.tuning_cac_data_size;
2857 data = (void *)((uint8_t *)&metadata->tuning_params.data[TUNING_CAC_DATA_OFFSET]);
2858 written_len += write(file_fd, data, total_size);
2859
2860 total_size = metadata->tuning_params.tuning_mod1_stats_data_size;
2861 data =
2862 (void *)((uint8_t *)&metadata->tuning_params.data[TUNING_MOD1_AEC_DATA_OFFSET]);
2863 written_len += write(file_fd, data, total_size);
2864 data =
2865 (void *)((uint8_t *)&metadata->tuning_params.data[TUNING_MOD1_AWB_DATA_OFFSET]);
2866 written_len += write(file_fd, data, total_size);
2867 data =
2868 (void *)((uint8_t *)&metadata->tuning_params.data[TUNING_MOD1_AF_DATA_OFFSET]);
2869 written_len += write(file_fd, data, total_size);
2870 close(file_fd);
2871 }else {
2872 LOGE("fail t open file for image dumping");
2873 }
2874 dumpFrmCnt++;
2875 }
2876 }
2877 stream->mDumpMetaFrame = dumpFrmCnt;
2878 }
2879 /*===========================================================================
2880 * FUNCTION : dumpFrameToFile
2881 *
2882 * DESCRIPTION: helper function to dump frame into file for debug purpose.
2883 *
2884 * PARAMETERS :
2885 * @data : data ptr
2886 * @size : length of data buffer
2887 * @index : identifier for data
2888 * @dump_type : type of the frame to be dumped. Only such
2889 * dump type is enabled, the frame will be
2890 * dumped into a file.
2891 *
2892 * RETURN : None
2893 *==========================================================================*/
dumpFrameToFile(QCameraStream * stream,mm_camera_buf_def_t * frame,uint32_t dump_type,const char * misc)2894 void QCamera2HardwareInterface::dumpFrameToFile(QCameraStream *stream,
2895 mm_camera_buf_def_t *frame, uint32_t dump_type, const char *misc)
2896 {
2897 char value[PROPERTY_VALUE_MAX];
2898 property_get("persist.camera.dumpimg", value, "0");
2899 uint32_t enabled = (uint32_t) atoi(value);
2900 uint32_t frm_num = 0;
2901 uint32_t skip_mode = 0;
2902
2903 if (NULL == stream) {
2904 LOGE("stream object is null");
2905 return;
2906 }
2907
2908 uint32_t dumpFrmCnt = stream->mDumpFrame;
2909
2910 if (true == m_bIntRawEvtPending) {
2911 enabled = QCAMERA_DUMP_FRM_RAW;
2912 }
2913
2914 if((enabled & QCAMERA_DUMP_FRM_MASK_ALL)) {
2915 if((enabled & dump_type) && stream && frame) {
2916 frm_num = ((enabled & 0xffff0000) >> 16);
2917 if(frm_num == 0) {
2918 frm_num = 10; //default 10 frames
2919 }
2920 if(frm_num > 256) {
2921 frm_num = 256; //256 buffers cycle around
2922 }
2923 skip_mode = ((enabled & 0x0000ff00) >> 8);
2924 if(skip_mode == 0) {
2925 skip_mode = 1; //no-skip
2926 }
2927 if(stream->mDumpSkipCnt == 0)
2928 stream->mDumpSkipCnt = 1;
2929
2930 if( stream->mDumpSkipCnt % skip_mode == 0) {
2931 if((frm_num == 256) && (dumpFrmCnt >= frm_num)) {
2932 // reset frame count if cycling
2933 dumpFrmCnt = 0;
2934 }
2935 if (dumpFrmCnt <= frm_num) {
2936 char buf[32];
2937 char timeBuf[128];
2938 time_t current_time;
2939 struct tm * timeinfo;
2940
2941 memset(timeBuf, 0, sizeof(timeBuf));
2942
2943 time (¤t_time);
2944 timeinfo = localtime (¤t_time);
2945 memset(buf, 0, sizeof(buf));
2946
2947 cam_dimension_t dim;
2948 memset(&dim, 0, sizeof(dim));
2949 stream->getFrameDimension(dim);
2950
2951 cam_frame_len_offset_t offset;
2952 memset(&offset, 0, sizeof(cam_frame_len_offset_t));
2953 stream->getFrameOffset(offset);
2954
2955 if (NULL != timeinfo) {
2956 strftime(timeBuf, sizeof(timeBuf),
2957 QCAMERA_DUMP_FRM_LOCATION "%Y%m%d%H%M%S", timeinfo);
2958 }
2959 String8 filePath(timeBuf);
2960 switch (dump_type) {
2961 case QCAMERA_DUMP_FRM_PREVIEW:
2962 {
2963 snprintf(buf, sizeof(buf), "%dp_%dx%d_%d.yuv",
2964 dumpFrmCnt, dim.width, dim.height, frame->frame_idx);
2965 }
2966 break;
2967 case QCAMERA_DUMP_FRM_THUMBNAIL:
2968 {
2969 snprintf(buf, sizeof(buf), "%dt_%dx%d_%d.yuv",
2970 dumpFrmCnt, dim.width, dim.height, frame->frame_idx);
2971 }
2972 break;
2973 case QCAMERA_DUMP_FRM_INPUT_JPEG:
2974 {
2975 if (!mParameters.isPostProcScaling()) {
2976 mParameters.getStreamDimension(CAM_STREAM_TYPE_SNAPSHOT, dim);
2977 } else {
2978 stream->getFrameDimension(dim);
2979 }
2980 if (misc != NULL) {
2981 snprintf(buf, sizeof(buf), "%ds_%dx%d_%d_%s.yuv",
2982 dumpFrmCnt, dim.width, dim.height, frame->frame_idx, misc);
2983 } else {
2984 snprintf(buf, sizeof(buf), "%ds_%dx%d_%d.yuv",
2985 dumpFrmCnt, dim.width, dim.height, frame->frame_idx);
2986 }
2987 }
2988 break;
2989 case QCAMERA_DUMP_FRM_INPUT_REPROCESS:
2990 {
2991 stream->getFrameDimension(dim);
2992 if (misc != NULL) {
2993 snprintf(buf, sizeof(buf), "%dir_%dx%d_%d_%s.yuv",
2994 dumpFrmCnt, dim.width, dim.height, frame->frame_idx, misc);
2995 } else {
2996 snprintf(buf, sizeof(buf), "%dir_%dx%d_%d.yuv",
2997 dumpFrmCnt, dim.width, dim.height, frame->frame_idx);
2998 }
2999 }
3000 break;
3001 case QCAMERA_DUMP_FRM_VIDEO:
3002 {
3003 snprintf(buf, sizeof(buf), "%dv_%dx%d_%d.yuv",
3004 dumpFrmCnt, dim.width, dim.height, frame->frame_idx);
3005 }
3006 break;
3007 case QCAMERA_DUMP_FRM_RAW:
3008 {
3009 mParameters.getStreamDimension(CAM_STREAM_TYPE_RAW, dim);
3010 snprintf(buf, sizeof(buf), "%dr_%dx%d_%d.raw",
3011 dumpFrmCnt, dim.width, dim.height, frame->frame_idx);
3012 }
3013 break;
3014 case QCAMERA_DUMP_FRM_OUTPUT_JPEG:
3015 {
3016 mParameters.getStreamDimension(CAM_STREAM_TYPE_SNAPSHOT, dim);
3017 snprintf(buf, sizeof(buf), "%dj_%dx%d_%d.yuv",
3018 dumpFrmCnt, dim.width, dim.height, frame->frame_idx);
3019 }
3020 break;
3021 default:
3022 LOGE("Not supported for dumping stream type %d",
3023 dump_type);
3024 return;
3025 }
3026
3027 filePath.append(buf);
3028 int file_fd = open(filePath.string(), O_RDWR | O_CREAT, 0777);
3029 ssize_t written_len = 0;
3030 if (file_fd >= 0) {
3031 void *data = NULL;
3032
3033 fchmod(file_fd, S_IRUSR | S_IWUSR | S_IRGRP | S_IROTH);
3034 for (uint32_t i = 0; i < offset.num_planes; i++) {
3035 uint32_t index = offset.mp[i].offset;
3036 if (i > 0) {
3037 index += offset.mp[i-1].len;
3038 }
3039
3040 if (offset.mp[i].meta_len != 0) {
3041 data = (void *)((uint8_t *)frame->buffer + index);
3042 written_len += write(file_fd, data,
3043 (size_t)offset.mp[i].meta_len);
3044 index += (uint32_t)offset.mp[i].meta_len;
3045 }
3046
3047 for (int j = 0; j < offset.mp[i].height; j++) {
3048 data = (void *)((uint8_t *)frame->buffer + index);
3049 written_len += write(file_fd, data,
3050 (size_t)offset.mp[i].width);
3051 index += (uint32_t)offset.mp[i].stride;
3052 }
3053 }
3054
3055 LOGH("written number of bytes %ld\n",
3056 written_len);
3057 close(file_fd);
3058 frame->cache_flags |= CPU_HAS_READ;
3059 } else {
3060 LOGE("fail to open file for image dumping");
3061 }
3062 if (true == m_bIntRawEvtPending) {
3063 strlcpy(m_BackendFileName, filePath.string(), QCAMERA_MAX_FILEPATH_LENGTH);
3064 mBackendFileSize = (size_t)written_len;
3065 } else {
3066 dumpFrmCnt++;
3067 }
3068 }
3069 }
3070 stream->mDumpSkipCnt++;
3071 }
3072 } else {
3073 dumpFrmCnt = 0;
3074 }
3075 stream->mDumpFrame = dumpFrmCnt;
3076 }
3077
3078 /*===========================================================================
3079 * FUNCTION : debugShowVideoFPS
3080 *
3081 * DESCRIPTION: helper function to log video frame FPS for debug purpose.
3082 *
3083 * PARAMETERS : None
3084 *
3085 * RETURN : None
3086 *==========================================================================*/
debugShowVideoFPS()3087 void QCamera2HardwareInterface::debugShowVideoFPS()
3088 {
3089 mVFrameCount++;
3090 nsecs_t now = systemTime();
3091 nsecs_t diff = now - mVLastFpsTime;
3092 if (diff > ms2ns(250)) {
3093 mVFps = (((double)(mVFrameCount - mVLastFrameCount)) *
3094 (double)(s2ns(1))) / (double)diff;
3095 LOGI("[KPI Perf]: PROFILE_VIDEO_FRAMES_PER_SECOND: %.4f Cam ID = %d",
3096 mVFps, mCameraId);
3097 mVLastFpsTime = now;
3098 mVLastFrameCount = mVFrameCount;
3099 }
3100 }
3101
3102 /*===========================================================================
3103 * FUNCTION : debugShowPreviewFPS
3104 *
3105 * DESCRIPTION: helper function to log preview frame FPS for debug purpose.
3106 *
3107 * PARAMETERS : None
3108 *
3109 * RETURN : None
3110 *==========================================================================*/
debugShowPreviewFPS()3111 void QCamera2HardwareInterface::debugShowPreviewFPS()
3112 {
3113 mPFrameCount++;
3114 nsecs_t now = systemTime();
3115 nsecs_t diff = now - mPLastFpsTime;
3116 if (diff > ms2ns(250)) {
3117 mPFps = (((double)(mPFrameCount - mPLastFrameCount)) *
3118 (double)(s2ns(1))) / (double)diff;
3119 LOGI("[KPI Perf]: PROFILE_PREVIEW_FRAMES_PER_SECOND : %.4f Cam ID = %d",
3120 mPFps, mCameraId);
3121 mPLastFpsTime = now;
3122 mPLastFrameCount = mPFrameCount;
3123 }
3124 }
3125
3126 /*===========================================================================
3127 * FUNCTION : fillFacesData
3128 *
3129 * DESCRIPTION: helper function to fill in face related metadata into a struct.
3130 *
3131 * PARAMETERS :
3132 * @faces_data : face features data to be filled
3133 * @metadata : metadata structure to read face features from
3134 *
3135 * RETURN : None
3136 *==========================================================================*/
fillFacesData(cam_faces_data_t & faces_data,metadata_buffer_t * metadata)3137 void QCamera2HardwareInterface::fillFacesData(cam_faces_data_t &faces_data,
3138 metadata_buffer_t *metadata)
3139 {
3140 memset(&faces_data, 0, sizeof(cam_faces_data_t));
3141
3142 IF_META_AVAILABLE(cam_face_detection_data_t, p_detection_data,
3143 CAM_INTF_META_FACE_DETECTION, metadata) {
3144 faces_data.detection_data = *p_detection_data;
3145 if (faces_data.detection_data.num_faces_detected > MAX_ROI) {
3146 faces_data.detection_data.num_faces_detected = MAX_ROI;
3147 }
3148
3149 LOGH("[KPI Perf] FD_DEBUG : NUMBER_OF_FACES_DETECTED %d",
3150 faces_data.detection_data.num_faces_detected);
3151
3152 IF_META_AVAILABLE(cam_face_recog_data_t, p_recog_data,
3153 CAM_INTF_META_FACE_RECOG, metadata) {
3154 faces_data.recog_valid = true;
3155 faces_data.recog_data = *p_recog_data;
3156 }
3157
3158 IF_META_AVAILABLE(cam_face_blink_data_t, p_blink_data,
3159 CAM_INTF_META_FACE_BLINK, metadata) {
3160 faces_data.blink_valid = true;
3161 faces_data.blink_data = *p_blink_data;
3162 }
3163
3164 IF_META_AVAILABLE(cam_face_gaze_data_t, p_gaze_data,
3165 CAM_INTF_META_FACE_GAZE, metadata) {
3166 faces_data.gaze_valid = true;
3167 faces_data.gaze_data = *p_gaze_data;
3168 }
3169
3170 IF_META_AVAILABLE(cam_face_smile_data_t, p_smile_data,
3171 CAM_INTF_META_FACE_SMILE, metadata) {
3172 faces_data.smile_valid = true;
3173 faces_data.smile_data = *p_smile_data;
3174 }
3175
3176 IF_META_AVAILABLE(cam_face_landmarks_data_t, p_landmarks,
3177 CAM_INTF_META_FACE_LANDMARK, metadata) {
3178 faces_data.landmark_valid = true;
3179 faces_data.landmark_data = *p_landmarks;
3180 }
3181
3182 IF_META_AVAILABLE(cam_face_contour_data_t, p_contour,
3183 CAM_INTF_META_FACE_CONTOUR, metadata) {
3184 faces_data.contour_valid = true;
3185 faces_data.contour_data = *p_contour;
3186 }
3187 }
3188 }
3189
3190 /*===========================================================================
3191 * FUNCTION : ~QCameraCbNotifier
3192 *
3193 * DESCRIPTION: Destructor for exiting the callback context.
3194 *
3195 * PARAMETERS : None
3196 *
3197 * RETURN : None
3198 *==========================================================================*/
~QCameraCbNotifier()3199 QCameraCbNotifier::~QCameraCbNotifier()
3200 {
3201 }
3202
3203 /*===========================================================================
3204 * FUNCTION : exit
3205 *
3206 * DESCRIPTION: exit notify thread.
3207 *
3208 * PARAMETERS : None
3209 *
3210 * RETURN : None
3211 *==========================================================================*/
exit()3212 void QCameraCbNotifier::exit()
3213 {
3214 mActive = false;
3215 mProcTh.exit();
3216 }
3217
3218 /*===========================================================================
3219 * FUNCTION : releaseNotifications
3220 *
3221 * DESCRIPTION: callback for releasing data stored in the callback queue.
3222 *
3223 * PARAMETERS :
3224 * @data : data to be released
3225 * @user_data : context data
3226 *
3227 * RETURN : None
3228 *==========================================================================*/
releaseNotifications(void * data,void * user_data)3229 void QCameraCbNotifier::releaseNotifications(void *data, void *user_data)
3230 {
3231 qcamera_callback_argm_t *arg = ( qcamera_callback_argm_t * ) data;
3232
3233 if ( ( NULL != arg ) && ( NULL != user_data ) ) {
3234 if ( arg->release_cb ) {
3235 arg->release_cb(arg->user_data, arg->cookie, FAILED_TRANSACTION);
3236 }
3237 }
3238 }
3239
3240 /*===========================================================================
3241 * FUNCTION : matchSnapshotNotifications
3242 *
3243 * DESCRIPTION: matches snapshot data callbacks
3244 *
3245 * PARAMETERS :
3246 * @data : data to match
3247 * @user_data : context data
3248 *
3249 * RETURN : bool match
3250 * true - match found
3251 * false- match not found
3252 *==========================================================================*/
matchSnapshotNotifications(void * data,void *)3253 bool QCameraCbNotifier::matchSnapshotNotifications(void *data,
3254 void */*user_data*/)
3255 {
3256 qcamera_callback_argm_t *arg = ( qcamera_callback_argm_t * ) data;
3257 if ( NULL != arg ) {
3258 if ( QCAMERA_DATA_SNAPSHOT_CALLBACK == arg->cb_type ) {
3259 return true;
3260 }
3261 }
3262
3263 return false;
3264 }
3265
3266 /*===========================================================================
3267 * FUNCTION : matchPreviewNotifications
3268 *
3269 * DESCRIPTION: matches preview data callbacks
3270 *
3271 * PARAMETERS :
3272 * @data : data to match
3273 * @user_data : context data
3274 *
3275 * RETURN : bool match
3276 * true - match found
3277 * false- match not found
3278 *==========================================================================*/
matchPreviewNotifications(void * data,void *)3279 bool QCameraCbNotifier::matchPreviewNotifications(void *data,
3280 void */*user_data*/)
3281 {
3282 qcamera_callback_argm_t *arg = ( qcamera_callback_argm_t * ) data;
3283 if (NULL != arg) {
3284 if ((QCAMERA_DATA_CALLBACK == arg->cb_type) &&
3285 (CAMERA_MSG_PREVIEW_FRAME == arg->msg_type)) {
3286 return true;
3287 }
3288 }
3289
3290 return false;
3291 }
3292
3293 /*===========================================================================
3294 * FUNCTION : matchTimestampNotifications
3295 *
3296 * DESCRIPTION: matches timestamp data callbacks
3297 *
3298 * PARAMETERS :
3299 * @data : data to match
3300 * @user_data : context data
3301 *
3302 * RETURN : bool match
3303 * true - match found
3304 * false- match not found
3305 *==========================================================================*/
matchTimestampNotifications(void * data,void *)3306 bool QCameraCbNotifier::matchTimestampNotifications(void *data,
3307 void */*user_data*/)
3308 {
3309 qcamera_callback_argm_t *arg = ( qcamera_callback_argm_t * ) data;
3310 if (NULL != arg) {
3311 if ((QCAMERA_DATA_TIMESTAMP_CALLBACK == arg->cb_type) &&
3312 (CAMERA_MSG_VIDEO_FRAME == arg->msg_type)) {
3313 return true;
3314 }
3315 }
3316
3317 return false;
3318 }
3319
3320 /*===========================================================================
3321 * FUNCTION : cbNotifyRoutine
3322 *
3323 * DESCRIPTION: callback thread which interfaces with the upper layers
3324 * given input commands.
3325 *
3326 * PARAMETERS :
3327 * @data : context data
3328 *
3329 * RETURN : None
3330 *==========================================================================*/
cbNotifyRoutine(void * data)3331 void * QCameraCbNotifier::cbNotifyRoutine(void * data)
3332 {
3333 int running = 1;
3334 int ret;
3335 QCameraCbNotifier *pme = (QCameraCbNotifier *)data;
3336 QCameraCmdThread *cmdThread = &pme->mProcTh;
3337 cmdThread->setName("CAM_cbNotify");
3338 uint8_t isSnapshotActive = FALSE;
3339 bool longShotEnabled = false;
3340 uint32_t numOfSnapshotExpected = 0;
3341 uint32_t numOfSnapshotRcvd = 0;
3342 int32_t cbStatus = NO_ERROR;
3343
3344 LOGD("E");
3345 do {
3346 do {
3347 ret = cam_sem_wait(&cmdThread->cmd_sem);
3348 if (ret != 0 && errno != EINVAL) {
3349 LOGD("cam_sem_wait error (%s)",
3350 strerror(errno));
3351 return NULL;
3352 }
3353 } while (ret != 0);
3354
3355 camera_cmd_type_t cmd = cmdThread->getCmd();
3356 LOGD("get cmd %d", cmd);
3357 switch (cmd) {
3358 case CAMERA_CMD_TYPE_START_DATA_PROC:
3359 {
3360 isSnapshotActive = TRUE;
3361 numOfSnapshotExpected = pme->mParent->numOfSnapshotsExpected();
3362 longShotEnabled = pme->mParent->isLongshotEnabled();
3363 LOGD("Num Snapshots Expected = %d",
3364 numOfSnapshotExpected);
3365 numOfSnapshotRcvd = 0;
3366 }
3367 break;
3368 case CAMERA_CMD_TYPE_STOP_DATA_PROC:
3369 {
3370 pme->mDataQ.flushNodes(matchSnapshotNotifications);
3371 isSnapshotActive = FALSE;
3372
3373 numOfSnapshotExpected = 0;
3374 numOfSnapshotRcvd = 0;
3375 }
3376 break;
3377 case CAMERA_CMD_TYPE_DO_NEXT_JOB:
3378 {
3379 qcamera_callback_argm_t *cb =
3380 (qcamera_callback_argm_t *)pme->mDataQ.dequeue();
3381 cbStatus = NO_ERROR;
3382 if (NULL != cb) {
3383 LOGD("cb type %d received",
3384 cb->cb_type);
3385
3386 if (pme->mParent->msgTypeEnabledWithLock(cb->msg_type)) {
3387 switch (cb->cb_type) {
3388 case QCAMERA_NOTIFY_CALLBACK:
3389 {
3390 if (cb->msg_type == CAMERA_MSG_FOCUS) {
3391 KPI_ATRACE_INT("Camera:AutoFocus", 0);
3392 LOGH("[KPI Perf] : PROFILE_SENDING_FOCUS_EVT_TO APP");
3393 }
3394 if (pme->mNotifyCb) {
3395 pme->mNotifyCb(cb->msg_type,
3396 cb->ext1,
3397 cb->ext2,
3398 pme->mCallbackCookie);
3399 } else {
3400 LOGW("notify callback not set!");
3401 }
3402 if (cb->release_cb) {
3403 cb->release_cb(cb->user_data, cb->cookie,
3404 cbStatus);
3405 }
3406 }
3407 break;
3408 case QCAMERA_DATA_CALLBACK:
3409 {
3410 if (pme->mDataCb) {
3411 pme->mDataCb(cb->msg_type,
3412 cb->data,
3413 cb->index,
3414 cb->metadata,
3415 pme->mCallbackCookie);
3416 } else {
3417 LOGW("data callback not set!");
3418 }
3419 if (cb->release_cb) {
3420 cb->release_cb(cb->user_data, cb->cookie,
3421 cbStatus);
3422 }
3423 }
3424 break;
3425 case QCAMERA_DATA_TIMESTAMP_CALLBACK:
3426 {
3427 if(pme->mDataCbTimestamp) {
3428 pme->mDataCbTimestamp(cb->timestamp,
3429 cb->msg_type,
3430 cb->data,
3431 cb->index,
3432 pme->mCallbackCookie);
3433 } else {
3434 LOGE("Timestamp data callback not set!");
3435 }
3436 if (cb->release_cb) {
3437 cb->release_cb(cb->user_data, cb->cookie,
3438 cbStatus);
3439 }
3440 }
3441 break;
3442 case QCAMERA_DATA_SNAPSHOT_CALLBACK:
3443 {
3444 if (TRUE == isSnapshotActive && pme->mDataCb ) {
3445 if (!longShotEnabled) {
3446 numOfSnapshotRcvd++;
3447 LOGI("Num Snapshots Received = %d Expected = %d",
3448 numOfSnapshotRcvd, numOfSnapshotExpected);
3449 if (numOfSnapshotExpected > 0 &&
3450 (numOfSnapshotExpected == numOfSnapshotRcvd)) {
3451 LOGI("Received all snapshots");
3452 // notify HWI that snapshot is done
3453 pme->mParent->processSyncEvt(QCAMERA_SM_EVT_SNAPSHOT_DONE,
3454 NULL);
3455 }
3456 }
3457 if (pme->mJpegCb) {
3458 LOGI("Calling JPEG Callback!! for camera %d"
3459 "release_data %p",
3460 "frame_idx %d",
3461 pme->mParent->getCameraId(),
3462 cb->user_data,
3463 cb->frame_index);
3464 pme->mJpegCb(cb->msg_type, cb->data,
3465 cb->index, cb->metadata,
3466 pme->mJpegCallbackCookie,
3467 cb->frame_index, cb->release_cb,
3468 cb->cookie, cb->user_data);
3469 // incase of non-null Jpeg cb we transfer
3470 // ownership of buffer to muxer. hence
3471 // release_cb should not be called
3472 // muxer will release after its done with
3473 // processing the buffer
3474 } else if(pme->mDataCb){
3475 pme->mDataCb(cb->msg_type, cb->data, cb->index,
3476 cb->metadata, pme->mCallbackCookie);
3477 if (cb->release_cb) {
3478 cb->release_cb(cb->user_data, cb->cookie,
3479 cbStatus);
3480 }
3481 }
3482 }
3483 }
3484 break;
3485 default:
3486 {
3487 LOGE("invalid cb type %d",
3488 cb->cb_type);
3489 cbStatus = BAD_VALUE;
3490 if (cb->release_cb) {
3491 cb->release_cb(cb->user_data, cb->cookie,
3492 cbStatus);
3493 }
3494 }
3495 break;
3496 };
3497 } else {
3498 LOGW("cb message type %d not enabled!",
3499 cb->msg_type);
3500 cbStatus = INVALID_OPERATION;
3501 if (cb->release_cb) {
3502 cb->release_cb(cb->user_data, cb->cookie, cbStatus);
3503 }
3504 }
3505 delete cb;
3506 } else {
3507 LOGW("invalid cb type passed");
3508 }
3509 }
3510 break;
3511 case CAMERA_CMD_TYPE_EXIT:
3512 {
3513 running = 0;
3514 pme->mDataQ.flush();
3515 }
3516 break;
3517 default:
3518 break;
3519 }
3520 } while (running);
3521 LOGD("X");
3522
3523 return NULL;
3524 }
3525
3526 /*===========================================================================
3527 * FUNCTION : notifyCallback
3528 *
3529 * DESCRIPTION: Enqueus pending callback notifications for the upper layers.
3530 *
3531 * PARAMETERS :
3532 * @cbArgs : callback arguments
3533 *
3534 * RETURN : int32_t type of status
3535 * NO_ERROR -- success
3536 * none-zero failure code
3537 *==========================================================================*/
notifyCallback(qcamera_callback_argm_t & cbArgs)3538 int32_t QCameraCbNotifier::notifyCallback(qcamera_callback_argm_t &cbArgs)
3539 {
3540 if (!mActive) {
3541 LOGE("notify thread is not active");
3542 return UNKNOWN_ERROR;
3543 }
3544
3545 qcamera_callback_argm_t *cbArg = new qcamera_callback_argm_t();
3546 if (NULL == cbArg) {
3547 LOGE("no mem for qcamera_callback_argm_t");
3548 return NO_MEMORY;
3549 }
3550 memset(cbArg, 0, sizeof(qcamera_callback_argm_t));
3551 *cbArg = cbArgs;
3552
3553 if (mDataQ.enqueue((void *)cbArg)) {
3554 return mProcTh.sendCmd(CAMERA_CMD_TYPE_DO_NEXT_JOB, FALSE, FALSE);
3555 } else {
3556 LOGE("Error adding cb data into queue");
3557 delete cbArg;
3558 return UNKNOWN_ERROR;
3559 }
3560 }
3561
3562 /*===========================================================================
3563 * FUNCTION : setCallbacks
3564 *
3565 * DESCRIPTION: Initializes the callback functions, which would be used for
3566 * communication with the upper layers and launches the callback
3567 * context in which the callbacks will occur.
3568 *
3569 * PARAMETERS :
3570 * @notifyCb : notification callback
3571 * @dataCb : data callback
3572 * @dataCbTimestamp : data with timestamp callback
3573 * @callbackCookie : callback context data
3574 *
3575 * RETURN : None
3576 *==========================================================================*/
setCallbacks(camera_notify_callback notifyCb,camera_data_callback dataCb,camera_data_timestamp_callback dataCbTimestamp,void * callbackCookie)3577 void QCameraCbNotifier::setCallbacks(camera_notify_callback notifyCb,
3578 camera_data_callback dataCb,
3579 camera_data_timestamp_callback dataCbTimestamp,
3580 void *callbackCookie)
3581 {
3582 if ( ( NULL == mNotifyCb ) &&
3583 ( NULL == mDataCb ) &&
3584 ( NULL == mDataCbTimestamp ) &&
3585 ( NULL == mCallbackCookie ) ) {
3586 mNotifyCb = notifyCb;
3587 mDataCb = dataCb;
3588 mDataCbTimestamp = dataCbTimestamp;
3589 mCallbackCookie = callbackCookie;
3590 mActive = true;
3591 mProcTh.launch(cbNotifyRoutine, this);
3592 } else {
3593 LOGE("Camera callback notifier already initialized!");
3594 }
3595 }
3596
3597 /*===========================================================================
3598 * FUNCTION : setJpegCallBacks
3599 *
3600 * DESCRIPTION: Initializes the JPEG callback function, which would be used for
3601 * communication with the upper layers and launches the callback
3602 * context in which the callbacks will occur.
3603 *
3604 * PARAMETERS :
3605 * @jpegCb : notification callback
3606 * @callbackCookie : callback context data
3607 *
3608 * RETURN : None
3609 *==========================================================================*/
setJpegCallBacks(jpeg_data_callback jpegCb,void * callbackCookie)3610 void QCameraCbNotifier::setJpegCallBacks(
3611 jpeg_data_callback jpegCb, void *callbackCookie)
3612 {
3613 LOGH("Setting JPEG Callback notifier");
3614 mJpegCb = jpegCb;
3615 mJpegCallbackCookie = callbackCookie;
3616 }
3617
3618 /*===========================================================================
3619 * FUNCTION : flushPreviewNotifications
3620 *
3621 * DESCRIPTION: flush all pending preview notifications
3622 * from the notifier queue
3623 *
3624 * PARAMETERS : None
3625 *
3626 * RETURN : int32_t type of status
3627 * NO_ERROR -- success
3628 * none-zero failure code
3629 *==========================================================================*/
flushPreviewNotifications()3630 int32_t QCameraCbNotifier::flushPreviewNotifications()
3631 {
3632 if (!mActive) {
3633 LOGE("notify thread is not active");
3634 return UNKNOWN_ERROR;
3635 }
3636 mDataQ.flushNodes(matchPreviewNotifications);
3637 return NO_ERROR;
3638 }
3639
3640 /*===========================================================================
3641 * FUNCTION : flushVideoNotifications
3642 *
3643 * DESCRIPTION: flush all pending video notifications
3644 * from the notifier queue
3645 *
3646 * PARAMETERS : None
3647 *
3648 * RETURN : int32_t type of status
3649 * NO_ERROR -- success
3650 * none-zero failure code
3651 *==========================================================================*/
flushVideoNotifications()3652 int32_t QCameraCbNotifier::flushVideoNotifications()
3653 {
3654 if (!mActive) {
3655 LOGE("notify thread is not active");
3656 return UNKNOWN_ERROR;
3657 }
3658 mDataQ.flushNodes(matchTimestampNotifications);
3659 return NO_ERROR;
3660 }
3661
3662 /*===========================================================================
3663 * FUNCTION : startSnapshots
3664 *
3665 * DESCRIPTION: Enables snapshot mode
3666 *
3667 * PARAMETERS : None
3668 *
3669 * RETURN : int32_t type of status
3670 * NO_ERROR -- success
3671 * none-zero failure code
3672 *==========================================================================*/
startSnapshots()3673 int32_t QCameraCbNotifier::startSnapshots()
3674 {
3675 return mProcTh.sendCmd(CAMERA_CMD_TYPE_START_DATA_PROC, FALSE, TRUE);
3676 }
3677
3678 /*===========================================================================
3679 * FUNCTION : stopSnapshots
3680 *
3681 * DESCRIPTION: Disables snapshot processing mode
3682 *
3683 * PARAMETERS : None
3684 *
3685 * RETURN : None
3686 *==========================================================================*/
stopSnapshots()3687 void QCameraCbNotifier::stopSnapshots()
3688 {
3689 mProcTh.sendCmd(CAMERA_CMD_TYPE_STOP_DATA_PROC, FALSE, TRUE);
3690 }
3691
3692 }; // namespace qcamera
3693