1 /* Copyright (c) 2016-2017, The Linux Foundation. All rights reserved.
2  *
3  * Redistribution and use in source and binary forms, with or without
4  * modification, are permitted provided that the following conditions are
5  * met:
6  *     * Redistributions of source code must retain the above copyright
7  *       notice, this list of conditions and the following disclaimer.
8  *     * Redistributions in binary form must reproduce the above
9  *       copyright notice, this list of conditions and the following
10  *       disclaimer in the documentation and/or other materials provided
11  *       with the distribution.
12  *     * Neither the name of The Linux Foundation nor the names of its
13  *       contributors may be used to endorse or promote products derived
14  *       from this software without specific prior written permission.
15  *
16  * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
17  * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
18  * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
19  * ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
20  * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
21  * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
22  * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
23  * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
24  * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
25  * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
26  * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27  *
28  */
29 
30 #define LOG_TAG "DualFOVPP"
31 // System dependencies
32 #include <dlfcn.h>
33 #include <utils/Errors.h>
34 #include <stdio.h>
35 #include <stdlib.h>
36 // Camera dependencies
37 #include "QCameraDualFOVPP.h"
38 #include "QCameraTrace.h"
39 #include "cam_intf.h"
40 extern "C" {
41 #include "mm_camera_dbg.h"
42 }
43 
44 #define LIB_PATH_LENGTH 100
45 
46 namespace qcamera {
47 
48 /*===========================================================================
49  * FUNCTION   : QCameraDualFOVPP
50  *
51  * DESCRIPTION: constructor of QCameraDualFOVPP.
52  *
53  * PARAMETERS : None
54  *
55  * RETURN     : None
56  *==========================================================================*/
QCameraDualFOVPP()57 QCameraDualFOVPP::QCameraDualFOVPP()
58     : QCameraHALPP()
59 {
60     m_dlHandle = NULL;
61     m_pCaps = NULL;
62 }
63 
64 /*===========================================================================
65  * FUNCTION   : ~QCameraDualFOVPP
66  *
67  * DESCRIPTION: destructor of QCameraDualFOVPP.
68  *
69  * PARAMETERS : None
70  *
71  * RETURN     : None
72  *==========================================================================*/
~QCameraDualFOVPP()73 QCameraDualFOVPP::~QCameraDualFOVPP()
74 {
75 }
76 
77 /*===========================================================================
78  * FUNCTION   : init
79  *
80  * DESCRIPTION: initialization of QCameraDualFOVPP
81  *
82  * PARAMETERS :
83  *   @bufNotifyCb      : call back function after HALPP process
84  *   @getOutputCb      : call back function to request output buffe
85  *   @pUserData        : Parent of HALPP, i.e. QCameraPostProc
86  *   @pStaticParam     : holds dual camera calibration data in an array and its size
87  *                       (expected size is 264 bytes)
88  *
89  * RETURN     : int32_t type of status
90  *              NO_ERROR  -- success
91  *              none-zero failure code
92  *==========================================================================*/
init(halPPBufNotify bufNotifyCb,halPPGetOutput getOutputCb,void * pUserData,void * pStaticParam)93 int32_t QCameraDualFOVPP::init(halPPBufNotify bufNotifyCb, halPPGetOutput getOutputCb,
94         void *pUserData, void *pStaticParam)
95 {
96     LOGD("E");
97     int32_t rc = NO_ERROR;
98     QCameraHALPP::init(bufNotifyCb, getOutputCb, pUserData);
99 
100     m_pCaps = (cam_capability_t *)pStaticParam;
101 
102     /* we should load 3rd libs here, with dlopen/dlsym */
103     doDualFovPPInit();
104 
105     LOGD("X");
106     return rc;
107 }
108 
109 /*===========================================================================
110  * FUNCTION   : deinit
111  *
112  * DESCRIPTION: de initialization of QCameraDualFOVPP
113  *
114  * PARAMETERS : None
115  *
116  * RETURN     : int32_t type of status
117  *              NO_ERROR  -- success
118  *              none-zero failure code
119  *==========================================================================*/
deinit()120 int32_t QCameraDualFOVPP::deinit()
121 {
122     int32_t rc = NO_ERROR;
123     LOGD("E");
124 
125     m_dlHandle = NULL;
126 
127     QCameraHALPP::deinit();
128     LOGD("X");
129     return rc;
130 }
131 
132 /*===========================================================================
133  * FUNCTION   : start
134  *
135  * DESCRIPTION: starting QCameraDualFOVPP
136  *
137  * PARAMETERS :
138  *
139  * RETURN     : int32_t type of status
140  *              NO_ERROR  -- success
141  *              none-zero failure code
142  *==========================================================================*/
start()143 int32_t QCameraDualFOVPP::start()
144 {
145     int32_t rc = NO_ERROR;
146     LOGD("E");
147 
148     rc = QCameraHALPP::start();
149 
150     LOGD("X");
151     return rc;
152 }
153 
154 
155 /*===========================================================================
156  * FUNCTION   : feedInput
157  *
158  * DESCRIPTION: function to feed input data.
159  *              Enqueue the frame index to inputQ if it is new frame
160  *              Also, add the input image data to frame hash map
161  *
162  * PARAMETERS :
163  *   @pInput  : ptr to input data
164  *
165  * RETURN     : int32_t type of status
166  *              NO_ERROR  -- success
167  *              none-zero failure code
168  *==========================================================================*/
feedInput(qcamera_hal_pp_data_t * pInputData)169 int32_t QCameraDualFOVPP::feedInput(qcamera_hal_pp_data_t *pInputData)
170 {
171     int32_t rc = NO_ERROR;
172     LOGD("E");
173     if (NULL != pInputData) {
174         QCameraStream* pSnapshotStream = NULL;
175         mm_camera_buf_def_t *pInputSnapshotBuf = getSnapshotBuf(pInputData, pSnapshotStream);
176         if (pInputSnapshotBuf != NULL) {
177             uint32_t frameIndex = pInputSnapshotBuf->frame_idx;
178             std::vector<qcamera_hal_pp_data_t*> *pVector = getFrameVector(frameIndex);
179             if(pVector == NULL) {
180                 LOGD("insert new frame index = %d", frameIndex);
181                 uint32_t *pFrameIndex = new uint32_t;
182                 *pFrameIndex = frameIndex;
183                 // new the vector first
184                 pVector = new std::vector<qcamera_hal_pp_data_t*>(WIDE_TELE_CAMERA_NUMBER);
185                 pVector->at(WIDE_INPUT) = NULL;
186                 pVector->at(TELE_INPUT) = NULL;
187                 // Add vector to the hash map
188                 m_frameMap[frameIndex] = pVector;
189                 // Enqueue the frame index (i.e. key of vector) to queue
190                 if (false == m_iuputQ.enqueue((void*)pFrameIndex)) {
191                     LOGE("Input Q is not active!!!");
192                     releaseData(pInputData);
193                     m_frameMap.erase(frameIndex);
194                     delete pFrameIndex;
195                     delete pVector;
196                     rc = INVALID_OPERATION;
197                     return rc;
198                 }
199             }
200             pInputData->frameIndex = frameIndex;
201             // Check if frame is from main wide camera
202             bool bIsMain = true;
203             uint32_t mainHandle = get_main_camera_handle(
204                     pInputData->src_reproc_frame->camera_handle);
205             if (mainHandle == 0) {
206                 bIsMain = false;
207             }
208             LOGD("mainHandle = %d, is main frame = %d", mainHandle, bIsMain);
209             // Add input data to vector
210             if (bIsMain) {
211                 pVector->at(WIDE_INPUT) = pInputData;
212             } else {
213                 pVector->at(TELE_INPUT) = pInputData;
214             }
215 
216             // request output buffer only if both wide and tele input data are recieved
217             if (pVector->at(0) != NULL && pVector->at(1) != NULL) {
218                 m_halPPGetOutputCB(frameIndex, m_pQCameraPostProc);
219             }
220         }
221     } else {
222         LOGE("pInput is NULL");
223         rc = UNEXPECTED_NULL;
224     }
225     LOGD("X");
226     return rc;
227 }
228 
229 /*===========================================================================
230  * FUNCTION   : feedOutput
231  *
232  * DESCRIPTION: function to feed output buffer and metadata
233  *
234  * PARAMETERS :
235  *   @pOutput     : ptr to output data
236  *
237  * RETURN     : int32_t type of status
238  *              NO_ERROR  -- success
239  *              none-zero failure code
240  *==========================================================================*/
feedOutput(qcamera_hal_pp_data_t * pOutputData)241 int32_t QCameraDualFOVPP::feedOutput(qcamera_hal_pp_data_t *pOutputData)
242 {
243     int32_t rc = NO_ERROR;
244     LOGD("E");
245     if (NULL != pOutputData) {
246         uint32_t frameIndex = pOutputData->frameIndex;
247         std::vector<qcamera_hal_pp_data_t*> *pVector = getFrameVector(frameIndex);
248         // Get the main (Wide) input frame in order to get output buffer len,
249         // and copy metadata buffer.
250         if (pVector != NULL && pVector->at(WIDE_INPUT) != NULL) {
251             qcamera_hal_pp_data_t *pInputData = pVector->at(WIDE_INPUT);
252             mm_camera_super_buf_t *pInputFrame = pInputData->frame;
253             QCameraStream* pSnapshotStream = NULL;
254             QCameraStream* pMetadataStream = NULL;
255             mm_camera_buf_def_t *pInputSnapshotBuf = getSnapshotBuf(pInputData, pSnapshotStream);
256             mm_camera_buf_def_t *pInputMetadataBuf = getMetadataBuf(pInputData, pMetadataStream);
257             mm_camera_super_buf_t *pOutputFrame = pOutputData->frame;
258             mm_camera_buf_def_t *pOutputBufDefs = pOutputData->bufs;
259 
260             if (pInputSnapshotBuf == NULL || pInputMetadataBuf == NULL) {
261                 LOGE("cannot get sanpshot or metadata buf def");
262                 releaseData(pOutputData);
263                 return UNEXPECTED_NULL;
264             }
265             if (pSnapshotStream == NULL || pMetadataStream == NULL) {
266                 LOGE("cannot get sanpshot or metadata stream");
267                 releaseData(pOutputData);
268                 return UNEXPECTED_NULL;
269             }
270 
271             // Copy main input frame info to output frame
272             pOutputFrame->camera_handle = pInputFrame->camera_handle;
273             pOutputFrame->ch_id = pInputFrame->ch_id;
274             pOutputFrame->num_bufs = HAL_PP_NUM_BUFS;//snapshot and metadata
275             pOutputFrame->bUnlockAEC = pInputFrame->bUnlockAEC;
276             pOutputFrame->bReadyForPrepareSnapshot = pInputFrame->bReadyForPrepareSnapshot;
277 
278             // Reconstruction of output_frame super buffer
279             pOutputFrame->bufs[0] = &pOutputBufDefs[0];
280             pOutputFrame->bufs[1] = &pOutputBufDefs[1];
281 
282             // Allocate heap buffer for output image frame
283             cam_frame_len_offset_t offset;
284             memset(&offset, 0, sizeof(cam_frame_len_offset_t));
285             LOGD("pInputSnapshotBuf->frame_len = %d", pInputSnapshotBuf->frame_len);
286             rc = pOutputData->snapshot_heap->allocate(1, pInputSnapshotBuf->frame_len);
287             if (rc < 0) {
288                 LOGE("Unable to allocate heap memory for image buf");
289                 releaseData(pOutputData);
290                 return NO_MEMORY;
291             }
292             pSnapshotStream->getFrameOffset(offset);
293             memcpy(&pOutputBufDefs[0], pInputSnapshotBuf, sizeof(mm_camera_buf_def_t));
294             LOGD("pOutputFrame->bufs[0]->fd = %d, pOutputFrame->bufs[0]->buffer = %x",
295                     pOutputFrame->bufs[0]->fd, pOutputFrame->bufs[0]->buffer);
296             pOutputData->snapshot_heap->getBufDef(offset, pOutputBufDefs[0], 0);
297             LOGD("pOutputFrame->bufs[0]->fd = %d, pOutputFrame->bufs[0]->buffer = %x",
298                     pOutputFrame->bufs[0]->fd, pOutputFrame->bufs[0]->buffer);
299 
300             // Allocate heap buffer for output metadata
301             LOGD("pInputMetadataBuf->frame_len = %d", pInputMetadataBuf->frame_len);
302             rc = pOutputData->metadata_heap->allocate(1, pInputMetadataBuf->frame_len);
303             if (rc < 0) {
304                 LOGE("Unable to allocate heap memory for metadata buf");
305                 releaseData(pOutputData);
306                 return NO_MEMORY;
307             }
308             memset(&offset, 0, sizeof(cam_frame_len_offset_t));
309             pMetadataStream->getFrameOffset(offset);
310             memcpy(&pOutputBufDefs[1], pInputMetadataBuf, sizeof(mm_camera_buf_def_t));
311             pOutputData->metadata_heap->getBufDef(offset, pOutputBufDefs[1], 0);
312             // copy the whole metadata
313             memcpy(pOutputBufDefs[1].buffer, pInputMetadataBuf->buffer,
314                     pInputMetadataBuf->frame_len);
315 
316             // Enqueue output_data to m_outgoingQ
317             if (false == m_outgoingQ.enqueue((void *)pOutputData)) {
318                 LOGE("outgoing Q is not active!!!");
319                 releaseData(pOutputData);
320                 rc = INVALID_OPERATION;
321             }
322         }
323     } else {
324         LOGE("pOutput is NULL");
325         rc = UNEXPECTED_NULL;
326     }
327     LOGD("X");
328     return rc;
329 }
330 
331 /*===========================================================================
332  * FUNCTION   : process
333  *
334  * DESCRIPTION: function to start CP FOV blending process
335  *
336  * PARAMETERS : None
337  *
338  * RETURN     : int32_t type of status
339  *              NO_ERROR  -- success
340  *              none-zero failure code
341  *==========================================================================*/
process()342 int32_t QCameraDualFOVPP::process()
343 {
344     int32_t rc = NO_ERROR;
345 
346     /* dump in/out frames */
347     char prop[PROPERTY_VALUE_MAX];
348     memset(prop, 0, sizeof(prop));
349     property_get("persist.camera.dualfov.dumpimg", prop, "0");
350     int dumpimg = atoi(prop);
351 
352     LOGD("E");
353 
354     // TODO: dequeue from m_inputQ and start process logic
355     // Start the blending process when it is ready
356     if (canProcess()) {
357         LOGI("start Dual FOV process");
358         uint32_t *pFrameIndex = (uint32_t *)m_iuputQ.dequeue();
359         if (pFrameIndex == NULL) {
360             LOGE("frame index is null");
361             return UNEXPECTED_NULL;
362         }
363         uint32_t frameIndex = *pFrameIndex;
364         std::vector<qcamera_hal_pp_data_t*> *pVector = getFrameVector(frameIndex);
365         // Search vector of input frames in frame map
366         if (pVector == NULL) {
367             LOGE("Cannot find vecotr of input frames");
368             return UNEXPECTED_NULL;
369         }
370         // Get input and output frame buffer
371         qcamera_hal_pp_data_t *pInputMainData =
372                 (qcamera_hal_pp_data_t *)pVector->at(WIDE_INPUT);
373         if (pInputMainData == NULL) {
374             LOGE("Cannot find input main data");
375             return UNEXPECTED_NULL;
376         }
377         if (pInputMainData->src_reproc_frame == NULL) {
378             LOGI("process pInputMainData->src_reproc_frame = NULL");
379         }
380         //mm_camera_super_buf_t *input_main_frame = input_main_data->frame;
381         qcamera_hal_pp_data_t *pInputAuxData =
382                 (qcamera_hal_pp_data_t *)pVector->at(TELE_INPUT);
383         if (pInputAuxData == NULL) {
384             LOGE("Cannot find input aux data");
385             return UNEXPECTED_NULL;
386         }
387 
388         //mm_camera_super_buf_t *input_aux_frame = input_aux_data->frame;
389         qcamera_hal_pp_data_t *pOutputData =
390                 (qcamera_hal_pp_data_t*)m_outgoingQ.dequeue();
391         if (pOutputData == NULL) {
392             LOGE("Cannot find output data");
393             return UNEXPECTED_NULL;
394         }
395 
396         QCameraStream* pMainSnapshotStream = NULL;
397         QCameraStream* pMainMetadataStream = NULL;
398         QCameraStream* pAuxSnapshotStream  = NULL;
399         QCameraStream* pAuxMetadataStream  = NULL;
400 
401         mm_camera_buf_def_t *main_snapshot_buf =
402                 getSnapshotBuf(pInputMainData, pMainSnapshotStream);
403         if (main_snapshot_buf == NULL) {
404             LOGE("main_snapshot_buf is NULL");
405             return UNEXPECTED_NULL;
406         }
407         mm_camera_buf_def_t *main_meta_buf = getMetadataBuf(pInputMainData, pMainMetadataStream);
408         if (main_meta_buf == NULL) {
409             LOGE("main_meta_buf is NULL");
410             return UNEXPECTED_NULL;
411         }
412         mm_camera_buf_def_t *aux_snapshot_buf = getSnapshotBuf(pInputAuxData, pAuxSnapshotStream);
413         if (aux_snapshot_buf == NULL) {
414             LOGE("aux_snapshot_buf is NULL");
415             return UNEXPECTED_NULL;
416         }
417         mm_camera_buf_def_t *aux_meta_buf = getMetadataBuf(pInputAuxData, pAuxMetadataStream);
418         if (aux_meta_buf == NULL) {
419             LOGE("aux_meta_buf is NULL");
420             return UNEXPECTED_NULL;
421         }
422 
423         mm_camera_super_buf_t *output_frame = pOutputData->frame;
424         mm_camera_buf_def_t *output_snapshot_buf = output_frame->bufs[0];
425 
426         // Use offset info from reproc stream
427         if (pMainSnapshotStream == NULL) {
428             LOGE("pMainSnapshotStream is NULL");
429             return UNEXPECTED_NULL;
430         }
431         cam_frame_len_offset_t frm_offset;
432         pMainSnapshotStream->getFrameOffset(frm_offset);
433         LOGI("Stream type:%d, stride:%d, scanline:%d, frame len:%d",
434                 pMainSnapshotStream->getMyType(),
435                 frm_offset.mp[0].stride, frm_offset.mp[0].scanline,
436                 frm_offset.frame_len);
437 
438         if (dumpimg) {
439             dumpYUVtoFile((uint8_t *)main_snapshot_buf->buffer, frm_offset,
440                     main_snapshot_buf->frame_idx, "wide");
441             dumpYUVtoFile((uint8_t *)aux_snapshot_buf->buffer,  frm_offset,
442                     aux_snapshot_buf->frame_idx,  "tele");
443         }
444 
445         //Get input and output parameter
446         dualfov_input_params_t inParams;
447         if (pAuxSnapshotStream == NULL) {
448             LOGE("pAuxSnapshotStream is NULL");
449             return UNEXPECTED_NULL;
450         }
451         getInputParams(main_meta_buf, aux_meta_buf, pMainSnapshotStream, pAuxSnapshotStream,
452                 inParams);
453         dumpInputParams(inParams);
454 
455         doDualFovPPProcess((const uint8_t *)main_snapshot_buf->buffer,
456                         (const uint8_t *)aux_snapshot_buf->buffer,
457                         inParams,
458                         (uint8_t *)output_snapshot_buf->buffer);
459 
460         if (dumpimg) {
461             dumpYUVtoFile((uint8_t *)output_snapshot_buf->buffer, frm_offset,
462                     main_snapshot_buf->frame_idx, "out");
463         }
464 
465         /* clean and invalidate caches, for input and output buffers*/
466         pOutputData->snapshot_heap->cleanInvalidateCache(0);
467 
468         QCameraMemory *pMem = (QCameraMemory *)main_snapshot_buf->mem_info;
469         pMem->invalidateCache(main_snapshot_buf->buf_idx);
470 
471         pMem = (QCameraMemory *)aux_snapshot_buf->mem_info;
472         pMem->invalidateCache(aux_snapshot_buf->buf_idx);
473 
474 
475         // Calling cb function to return output_data after processed.
476         m_halPPBufNotifyCB(pOutputData, m_pQCameraPostProc);
477 
478         // also send input buffer to postproc.
479         m_halPPBufNotifyCB(pInputMainData, m_pQCameraPostProc);
480         m_halPPBufNotifyCB(pInputAuxData, m_pQCameraPostProc);
481         //releaseData(pInputMainData);
482         //releaseData(pInputAuxData);
483 
484         // Release internal resource
485         m_frameMap.erase(frameIndex);
486         delete pFrameIndex;
487         delete pVector;
488     }
489     LOGD("X");
490     return rc;
491 }
492 
493 /*===========================================================================
494  * FUNCTION   : getSnapshotBuf
495  *
496  * DESCRIPTION: function to get snapshot buf def and the stream from frame
497  * PARAMETERS :
498  *   @pData           : input frame super buffer
499  *   @pSnapshotStream : stream of snapshot that found
500  * RETURN             : snapshot buf def
501  *==========================================================================*/
getSnapshotBuf(qcamera_hal_pp_data_t * pData,QCameraStream * & pSnapshotStream)502 mm_camera_buf_def_t* QCameraDualFOVPP::getSnapshotBuf(qcamera_hal_pp_data_t* pData,
503         QCameraStream* &pSnapshotStream)
504 {
505     mm_camera_buf_def_t *pBufDef = NULL;
506     if (pData == NULL) {
507         LOGE("Cannot find input frame super buffer");
508         return pBufDef;
509     }
510     mm_camera_super_buf_t *pFrame = pData->frame;
511     QCameraChannel *pChannel = m_pQCameraPostProc->getChannelByHandle(pFrame->ch_id);
512     if (pChannel == NULL) {
513         LOGE("Cannot find channel");
514         return pBufDef;
515     }
516     // Search for input snapshot frame buf
517     for (uint32_t i = 0; i < pFrame->num_bufs; i++) {
518         pSnapshotStream = pChannel->getStreamByHandle(pFrame->bufs[i]->stream_id);
519         if (pSnapshotStream != NULL) {
520             if (pSnapshotStream->isTypeOf(CAM_STREAM_TYPE_SNAPSHOT) ||
521                 pSnapshotStream->isOrignalTypeOf(CAM_STREAM_TYPE_SNAPSHOT)) {
522                     pBufDef = pFrame->bufs[i];
523                     break;
524             }
525         }
526     }
527     return pBufDef;
528 }
529 
530 /*===========================================================================
531  * FUNCTION   : getMetadataBuf
532  *
533  * DESCRIPTION: function to get metadata buf def and the stream from frame
534  * PARAMETERS :
535  *   @pData     : input frame super buffer
536  *   @pMetadataStream : stream of metadata that found
537  * RETURN     : metadata buf def
538  *==========================================================================*/
getMetadataBuf(qcamera_hal_pp_data_t * pData,QCameraStream * & pMetadataStream)539 mm_camera_buf_def_t* QCameraDualFOVPP::getMetadataBuf(qcamera_hal_pp_data_t *pData,
540         QCameraStream* &pMetadataStream)
541 {
542     mm_camera_buf_def_t *pBufDef = NULL;
543     if (pData == NULL) {
544         LOGE("Cannot find input frame super buffer");
545         return pBufDef;
546     }
547     mm_camera_super_buf_t* pFrame = pData->frame;
548     QCameraChannel *pChannel =
549             m_pQCameraPostProc->getChannelByHandle(pData->src_reproc_frame->ch_id);
550     LOGD("src_reproc_frame num_bufs = %d", pFrame->num_bufs);
551     if (pChannel == NULL) {
552             LOGE("Cannot find src_reproc_frame channel");
553             return pBufDef;
554     }
555     for (uint32_t i = 0;
556             (i < pData->src_reproc_frame->num_bufs); i++) {
557         pMetadataStream = pChannel->getStreamByHandle(pData->src_reproc_frame->bufs[i]->stream_id);
558         if (pData->src_reproc_frame->bufs[i]->stream_type == CAM_STREAM_TYPE_METADATA) {
559             pBufDef = pData->src_reproc_frame->bufs[i];
560             LOGD("find metadata stream and buf from src_reproc_frame");
561             break;
562         }
563     }
564     if (pBufDef == NULL) {
565         LOGD("frame num_bufs = %d", pFrame->num_bufs);
566         pChannel = m_pQCameraPostProc->getChannelByHandle(pFrame->ch_id);
567         if (pChannel == NULL) {
568             LOGE("Cannot find frame channel");
569             return pBufDef;
570         }
571         for (uint32_t i = 0; i < pFrame->num_bufs; i++) {
572             pMetadataStream = pChannel->getStreamByHandle(pFrame->bufs[i]->stream_id);
573             if (pMetadataStream != NULL) {
574                 LOGD("bufs[%d] stream_type = %d", i, pFrame->bufs[i]->stream_type);
575                 if (pFrame->bufs[i]->stream_type == CAM_STREAM_TYPE_METADATA) {
576                     pBufDef = pFrame->bufs[i];
577                     break;
578                 }
579             }
580         }
581     }
582     return pBufDef;
583 }
584 
585 /*===========================================================================
586  * FUNCTION   : canProcess
587  *
588  * DESCRIPTION: function to release internal resources
589  * RETURN     : If CP FOV can start blending process
590  *==========================================================================*/
canProcess()591 bool QCameraDualFOVPP::canProcess()
592 {
593     LOGD("E");
594     bool ready = false;
595     if(!m_iuputQ.isEmpty() && !m_outgoingQ.isEmpty()) {
596         ready = true;
597     }
598     LOGD("X");
599     return ready;
600 }
601 
602 /*===========================================================================
603  * FUNCTION   : getInputParams
604  *
605  * DESCRIPTION: Helper function to get input params from input metadata
606  *==========================================================================*/
getInputParams(mm_camera_buf_def_t * pMainMetaBuf,mm_camera_buf_def_t * pAuxMetaBuf,QCameraStream * pMainSnapshotStream,QCameraStream * pAuxSnapshotStream,dualfov_input_params_t & inParams)607 void QCameraDualFOVPP::getInputParams(mm_camera_buf_def_t *pMainMetaBuf,
608         mm_camera_buf_def_t *pAuxMetaBuf, QCameraStream* pMainSnapshotStream,
609         QCameraStream* pAuxSnapshotStream, dualfov_input_params_t& inParams)
610 {
611     LOGD("E");
612     memset(&inParams, 0, sizeof(dualfov_input_params_t));
613     metadata_buffer_t *pMainMeta = (metadata_buffer_t *)pMainMetaBuf->buffer;
614     metadata_buffer_t *pAuxMeta = (metadata_buffer_t *)pAuxMetaBuf->buffer;
615 
616     // Wide frame size
617     cam_frame_len_offset_t offset;
618     pMainSnapshotStream->getFrameOffset(offset);
619     inParams.wide.width     = offset.mp[0].width;
620     inParams.wide.height    = offset.mp[0].height;
621     inParams.wide.stride    = offset.mp[0].stride;
622     inParams.wide.scanline  = offset.mp[0].scanline;
623     inParams.wide.frame_len = offset.frame_len;
624 
625     // Tele frame size
626     pAuxSnapshotStream->getFrameOffset(offset);
627     inParams.tele.width     = offset.mp[0].width;
628     inParams.tele.height    = offset.mp[0].height;
629     inParams.tele.stride    = offset.mp[0].stride;
630     inParams.tele.scanline  = offset.mp[0].scanline;
631     inParams.tele.frame_len = offset.frame_len;
632 
633     // user_zoom
634     int32_t zoom_level = -1; // 0 means zoom 1x.
635     IF_META_AVAILABLE(int32_t, userZoom, CAM_INTF_PARM_ZOOM, pMainMeta) {
636         zoom_level = *userZoom;
637         LOGD("zoom level in main meta:%d", zoom_level);
638     }
639     inParams.user_zoom= getUserZoomRatio(zoom_level);
640     LOGI("dual fov total zoom ratio: %d", inParams.user_zoom);
641 
642     IF_META_AVAILABLE(int32_t, auxUserZoom, CAM_INTF_PARM_ZOOM, pAuxMeta) {
643         LOGD("zoom level in aux meta:%d", *auxUserZoom);
644     }
645 
646     IF_META_AVAILABLE(uint32_t, afState, CAM_INTF_META_AF_STATE, pMainMeta) {
647         if (((*afState) == CAM_AF_STATE_FOCUSED_LOCKED) ||
648             ((*afState) == CAM_AF_STATE_PASSIVE_FOCUSED)) {
649             inParams.af_status = AF_STATUS_VALID;
650         } else {
651             inParams.af_status = AF_STATUS_INVALID;
652         }
653         LOGD("af state:%d, output af status:%d", *afState, inParams.af_status);
654     }
655 
656     IF_META_AVAILABLE(uint32_t, auxAfState, CAM_INTF_META_AF_STATE, pAuxMeta) {
657         int aux_af_status = 0;
658         if (((*auxAfState) == CAM_AF_STATE_FOCUSED_LOCKED) ||
659             ((*auxAfState) == CAM_AF_STATE_PASSIVE_FOCUSED)) {
660             aux_af_status = AF_STATUS_VALID;
661         } else {
662             aux_af_status = AF_STATUS_INVALID;
663         }
664         LOGD("aux af state:%d, output af status:%d", *auxAfState, aux_af_status);
665     }
666 
667 
668     LOGD("X");
669 }
670 
671 
doDualFovPPInit()672 int32_t QCameraDualFOVPP::doDualFovPPInit()
673 {
674     LOGD("E");
675     int rc = NO_ERROR;
676 
677     LOGD("X");
678     return rc;
679 }
680 
doDualFovPPProcess(const uint8_t * pWide,const uint8_t * pTele,dualfov_input_params_t inParams,uint8_t * pOut)681 int32_t QCameraDualFOVPP::doDualFovPPProcess(const uint8_t* pWide, const uint8_t* pTele,
682                                                     dualfov_input_params_t inParams,
683                                                     uint8_t* pOut)
684 {
685     LOGW("E.");
686 
687     // trace begin
688 
689     // half image from main, and half image from tele
690 
691     // Y
692     memcpy(pOut, pWide, inParams.wide.stride * inParams.wide.scanline / 2);
693     memcpy(pOut  + inParams.wide.stride * inParams.wide.scanline / 2,
694            pTele + inParams.wide.stride * inParams.wide.scanline / 2,
695            inParams.wide.stride * inParams.wide.scanline / 2);
696 
697     // UV
698     uint32_t uv_offset = inParams.wide.stride * inParams.wide.scanline;
699     memcpy(pOut  + uv_offset,
700            pWide + uv_offset,
701            inParams.wide.stride * (inParams.wide.scanline / 2) / 2);
702     memcpy(pOut  + uv_offset + inParams.wide.stride * (inParams.wide.scanline / 2) / 2,
703            pTele + uv_offset + inParams.wide.stride * (inParams.wide.scanline / 2) / 2,
704            inParams.wide.stride * (inParams.wide.scanline / 2) / 2);
705 
706     // trace end
707 
708     LOGW("X.");
709     return NO_ERROR;
710 }
711 
getUserZoomRatio(int32_t zoom_level)712 uint32_t QCameraDualFOVPP::getUserZoomRatio(int32_t zoom_level)
713 {
714     uint32_t zoom_ratio = 4096;
715 
716     LOGD("E. input zoom level:%d", zoom_level);
717 
718     if (zoom_level < 0) {
719         LOGW("invalid zoom evel!");
720         /* got the zoom value from QCamera2HWI Parameters */
721         zoom_level = 0;
722     }
723 
724     // user_zoom_ratio = qcom_zoom_ratio * 4096 / 100
725     if (m_pCaps != NULL) {
726         zoom_ratio *= m_pCaps->zoom_ratio_tbl[zoom_level];
727         zoom_ratio /= 100;
728         LOGD("converted zoom ratio:%d", zoom_ratio);
729     }
730 
731     LOGD("X. zoom_ratio:%d", zoom_ratio);
732     return zoom_ratio;
733 }
734 
dumpYUVtoFile(const uint8_t * pBuf,cam_frame_len_offset_t offset,uint32_t idx,const char * name_prefix)735 void QCameraDualFOVPP::dumpYUVtoFile(const uint8_t* pBuf, cam_frame_len_offset_t offset, uint32_t idx, const char* name_prefix)
736 {
737     LOGD("E.");
738     char filename[256];
739 
740     snprintf(filename, sizeof(filename), QCAMERA_DUMP_FRM_LOCATION"%s_%dx%d_%d.yuv",
741                 name_prefix, offset.mp[0].stride, offset.mp[0].scanline, idx);
742 
743     QCameraHALPP::dumpYUVtoFile(pBuf,(const char*)filename, offset.frame_len);
744 
745     LOGD("X.");
746 }
747 
dumpInputParams(const dualfov_input_params_t & p)748 void QCameraDualFOVPP::dumpInputParams(const dualfov_input_params_t& p)
749 {
750     LOGD("E");
751 
752     const cam_frame_size_t* s = NULL;
753 
754     s = &p.wide;
755     LOGD("wide frame size: %d, %d, stride:%d, scanline:%d",
756             s->width, s->height, s->stride, s->scanline);
757 
758     s = &p.tele;
759     LOGD("wide frame size: %d, %d, stride:%d, scanline:%d",
760             s->width, s->height, s->stride, s->scanline);
761 
762     LOGD("zoom ratio: %f", p.user_zoom / 4096.0);
763     LOGD("X");
764 }
765 
766 
767 /*===========================================================================
768  * FUNCTION   : dumpOISData
769  *
770  * DESCRIPTION: Read Sensor OIS data from metadata and dump it
771  *
772  * PARAMETERS :
773  * @pMetadata : Frame metadata
774  *
775  * RETURN     : None
776  *
777  *==========================================================================*/
dumpOISData(metadata_buffer_t * pMetadata)778 void QCameraDualFOVPP::dumpOISData(metadata_buffer_t*  pMetadata)
779 {
780     if (!pMetadata) {
781         LOGD("OIS data not available");
782         return;
783     }
784 
785     IF_META_AVAILABLE(cam_ois_data_t, pOisData, CAM_INTF_META_OIS_READ_DATA, pMetadata) {
786         LOGD("Ois Data: data size: %d", pOisData->size);
787         uint8_t *data = pOisData->data;
788         if (pOisData->size == 8) {
789             LOGD("Ois Data Buffer : %d %d %d %d %d %d %d %d ",
790                     data[0], data[1], data[2], data[3], data[4], data[5], data[6], data[7]);
791         }
792     }
793     return;
794 }
795 
796 
797 } // namespace qcamera
798