1 /* Copyright (c) 2012-2016, The Linux Foundation. All rights reserved.
2 *
3 * Redistribution and use in source and binary forms, with or without
4 * modification, are permitted provided that the following conditions are
5 * met:
6 * * Redistributions of source code must retain the above copyright
7 * notice, this list of conditions and the following disclaimer.
8 * * Redistributions in binary form must reproduce the above
9 * copyright notice, this list of conditions and the following
10 * disclaimer in the documentation and/or other materials provided
11 * with the distribution.
12 * * Neither the name of The Linux Foundation nor the names of its
13 * contributors may be used to endorse or promote products derived
14 * from this software without specific prior written permission.
15 *
16 * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
17 * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
18 * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
19 * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
20 * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
21 * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
22 * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
23 * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
24 * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
25 * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
26 * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27 *
28 */
29
30 #define LOG_TAG "QCameraPostProc"
31
32 // System dependencies
33 #include <fcntl.h>
34 #include <stdio.h>
35 #include <stdlib.h>
36 #include <utils/Errors.h>
37
38 // Camera dependencies
39 #include "QCamera2HWI.h"
40 #include "QCameraPostProc.h"
41 #include "QCameraTrace.h"
42 #include "QCameraHALPP.h"
43 #include "QCameraDualFOVPP.h"
44 extern "C" {
45 #include "mm_camera_dbg.h"
46 }
47
48 namespace qcamera {
49
50 const char *QCameraPostProcessor::STORE_LOCATION = "/sdcard/img_%d.jpg";
51
52 #define FREE_JPEG_OUTPUT_BUFFER(ptr,cnt) \
53 int jpeg_bufs; \
54 for (jpeg_bufs = 0; jpeg_bufs < (int)cnt; jpeg_bufs++) { \
55 if (ptr[jpeg_bufs] != NULL) { \
56 free(ptr[jpeg_bufs]); \
57 ptr[jpeg_bufs] = NULL; \
58 } \
59 }
60
61 /*===========================================================================
62 * FUNCTION : QCameraPostProcessor
63 *
64 * DESCRIPTION: constructor of QCameraPostProcessor.
65 *
66 * PARAMETERS :
67 * @cam_ctrl : ptr to HWI object
68 *
69 * RETURN : None
70 *==========================================================================*/
QCameraPostProcessor(QCamera2HardwareInterface * cam_ctrl)71 QCameraPostProcessor::QCameraPostProcessor(QCamera2HardwareInterface *cam_ctrl)
72 : m_parent(cam_ctrl),
73 mJpegCB(NULL),
74 mJpegUserData(NULL),
75 mJpegClientHandle(0),
76 mJpegSessionId(0),
77 mJpegSessionIdHalPP(0),
78 m_pJpegExifObj(NULL),
79 m_bThumbnailNeeded(TRUE),
80 mPPChannelCount(0),
81 m_bInited(FALSE),
82 m_inputPPQ(releaseOngoingPPData, this),
83 m_ongoingPPQ(releaseOngoingPPData, this),
84 m_inputJpegQ(releaseJpegData, this),
85 m_ongoingJpegQ(releaseJpegData, this),
86 m_inputRawQ(releaseRawData, this),
87 mSaveFrmCnt(0),
88 mUseSaveProc(false),
89 mUseJpegBurst(false),
90 mJpegMemOpt(true),
91 m_JpegOutputMemCount(0),
92 m_JpegOutputMemCountHALPP(0),
93 pJpegSrcStream(NULL),
94 mNewJpegSessionNeeded(true),
95 mNewJpegSessionNeededHalPP(true),
96 m_bufCountPPQ(0),
97 m_PPindex(0),
98 m_halPPType(QCAMERA_HAL_PP_TYPE_UNDEFINED),
99 m_halPP(NULL)
100 {
101 memset(&mJpegHandle, 0, sizeof(mJpegHandle));
102 memset(&mJpegMpoHandle, 0, sizeof(mJpegMpoHandle));
103 memset(&m_pJpegOutputMem, 0, sizeof(m_pJpegOutputMem));
104 memset(m_pJpegOutputMemHalPP, 0, sizeof(void *) * MM_JPEG_MAX_BUF);
105 memset(mPPChannels, 0, sizeof(mPPChannels));
106 m_DataMem = NULL;
107 mOfflineDataBufs = NULL;
108 pthread_mutex_init(&m_reprocess_lock,NULL);
109 }
110
111 /*===========================================================================
112 * FUNCTION : ~QCameraPostProcessor
113 *
114 * DESCRIPTION: deconstructor of QCameraPostProcessor.
115 *
116 * PARAMETERS : None
117 *
118 * RETURN : None
119 *==========================================================================*/
~QCameraPostProcessor()120 QCameraPostProcessor::~QCameraPostProcessor()
121 {
122 {FREE_JPEG_OUTPUT_BUFFER(m_pJpegOutputMem,m_JpegOutputMemCount);}
123 {FREE_JPEG_OUTPUT_BUFFER(m_pJpegOutputMemHalPP, m_JpegOutputMemCountHALPP);}
124 if (m_pJpegExifObj != NULL) {
125 delete m_pJpegExifObj;
126 m_pJpegExifObj = NULL;
127 }
128 for (int8_t i = 0; i < mPPChannelCount; i++) {
129 QCameraChannel *pChannel = mPPChannels[i];
130 if ( pChannel != NULL ) {
131 pChannel->stop();
132 delete pChannel;
133 pChannel = NULL;
134 }
135 }
136 if (m_halPP != NULL) {
137 delete m_halPP;
138 m_halPP = NULL;
139 }
140 mPPChannelCount = 0;
141 pthread_mutex_destroy(&m_reprocess_lock);
142 }
143
144 /*===========================================================================
145 * FUNCTION : setJpegHandle
146 *
147 * DESCRIPTION: set JPEG client handles
148 *
149 * PARAMETERS :
150 * @pJpegHandle : JPEG ops handle
151 * @pJpegMpoHandle : MPO JPEG ops handle
152 * @clientHandle : JPEG client handle
153 *
154 * RETURN : int32_t type of status
155 * NO_ERROR -- success
156 * none-zero failure code
157 *==========================================================================*/
setJpegHandle(mm_jpeg_ops_t * pJpegHandle,mm_jpeg_mpo_ops_t * pJpegMpoHandle,uint32_t clientHandle)158 int32_t QCameraPostProcessor::setJpegHandle(mm_jpeg_ops_t *pJpegHandle,
159 mm_jpeg_mpo_ops_t *pJpegMpoHandle, uint32_t clientHandle)
160 {
161 LOGH("E mJpegClientHandle: %d, clientHandle: %d",
162 mJpegClientHandle, clientHandle);
163
164 if(pJpegHandle) {
165 memcpy(&mJpegHandle, pJpegHandle, sizeof(mm_jpeg_ops_t));
166 }
167
168 if(pJpegMpoHandle) {
169 memcpy(&mJpegMpoHandle, pJpegMpoHandle, sizeof(mm_jpeg_mpo_ops_t));
170 }
171 mJpegClientHandle = clientHandle;
172 LOGH("X mJpegClientHandle: %d, clientHandle: %d",
173 mJpegClientHandle, clientHandle);
174 return NO_ERROR;
175 }
176
177 /*===========================================================================
178 * FUNCTION : init
179 *
180 * DESCRIPTION: initialization of postprocessor
181 *
182 * PARAMETERS :
183 * @jpeg_cb : callback to handle jpeg event from mm-camera-interface
184 * @user_data : user data ptr for jpeg callback
185 *
186 * RETURN : int32_t type of status
187 * NO_ERROR -- success
188 * none-zero failure code
189 *==========================================================================*/
init(jpeg_encode_callback_t jpeg_cb,void * user_data)190 int32_t QCameraPostProcessor::init(jpeg_encode_callback_t jpeg_cb, void *user_data)
191 {
192 int32_t rc = NO_ERROR;
193 mJpegCB = jpeg_cb;
194 mJpegUserData = user_data;
195 m_dataProcTh.launch(dataProcessRoutine, this);
196 m_saveProcTh.launch(dataSaveRoutine, this);
197 m_parent->mParameters.setReprocCount();
198
199 /* get setting from mParameters to decie create halpp block or not,
200 * like: mParameters.getHalPPType().
201 *
202 * read from property for now.
203 */
204 char prop[PROPERTY_VALUE_MAX];
205 memset(prop, 0, sizeof(prop));
206 property_get("persist.camera.halpp", prop, "0");
207
208 m_halPPType = (HALPPType)atoi(prop);
209 if (m_halPPType < QCAMERA_HAL_PP_TYPE_UNDEFINED ||
210 m_halPPType >= QCAMERA_HAL_PP_TYPE_MAX) {
211 m_halPPType = QCAMERA_HAL_PP_TYPE_UNDEFINED;
212 }
213 LOGH("m_halPPType:%d", m_halPPType);
214
215 if ((m_parent->isDualCamera() && m_halPPType == QCAMERA_HAL_PP_TYPE_DUAL_FOV) ||
216 (m_parent->isDualCamera() && m_halPPType == QCAMERA_HAL_PP_TYPE_BOKEH) ||
217 (m_parent->isDualCamera() && m_halPPType == QCAMERA_HAL_PP_TYPE_CLEARSIGHT)) {
218 rc = initHALPP();
219 }
220
221 m_bInited = TRUE;
222 return rc;
223 }
224
225 /*===========================================================================
226 * FUNCTION : deinit
227 *
228 * DESCRIPTION: de-initialization of postprocessor
229 *
230 * PARAMETERS : None
231 *
232 * RETURN : int32_t type of status
233 * NO_ERROR -- success
234 * none-zero failure code
235 *==========================================================================*/
deinit()236 int32_t QCameraPostProcessor::deinit()
237 {
238 if (m_bInited == TRUE) {
239 m_dataProcTh.exit();
240 m_saveProcTh.exit();
241 if (m_halPP != NULL) {
242 m_halPP->deinit();
243 m_halPPType = QCAMERA_HAL_PP_TYPE_UNDEFINED;
244 }
245 m_bInited = FALSE;
246 }
247 return NO_ERROR;
248 }
249
250 /*===========================================================================
251 * FUNCTION : start
252 *
253 * DESCRIPTION: start postprocessor. Data process thread and data notify thread
254 * will be launched.
255 *
256 * PARAMETERS :
257 * @pSrcChannel : source channel obj ptr that possibly needs reprocess
258 *
259 * RETURN : int32_t type of status
260 * NO_ERROR -- success
261 * none-zero failure code
262 *
263 * NOTE : if any reprocess is needed, a reprocess channel/stream
264 * will be started.
265 *==========================================================================*/
start(QCameraChannel * pSrcChannel)266 int32_t QCameraPostProcessor::start(QCameraChannel *pSrcChannel)
267 {
268 char prop[PROPERTY_VALUE_MAX];
269 int32_t rc = NO_ERROR;
270 QCameraChannel *pInputChannel = pSrcChannel;
271
272 LOGH("E ");
273 if (m_bInited == FALSE) {
274 LOGE("postproc not initialized yet");
275 return UNKNOWN_ERROR;
276 }
277
278 if (m_DataMem != NULL) {
279 m_DataMem->release(m_DataMem);
280 m_DataMem = NULL;
281 }
282
283 if (pInputChannel == NULL) {
284 LOGE("Input Channel for pproc is NULL.");
285 return UNKNOWN_ERROR;
286 }
287
288 if ( m_parent->needReprocess() ) {
289 for (int8_t i = 0; i < mPPChannelCount; i++) {
290 // Delete previous reproc channel
291 QCameraReprocessChannel *pChannel = mPPChannels[i];
292 if (pChannel != NULL) {
293 pChannel->stop();
294 delete pChannel;
295 pChannel = NULL;
296 }
297 }
298 mPPChannelCount = 0;
299
300 m_bufCountPPQ = 0;
301 if (!m_parent->isLongshotEnabled()) {
302 m_parent->mParameters.setReprocCount();
303 }
304
305 if (m_parent->mParameters.getManualCaptureMode() >=
306 CAM_MANUAL_CAPTURE_TYPE_3) {
307 mPPChannelCount = m_parent->mParameters.getReprocCount() - 1;
308 } else {
309 mPPChannelCount = m_parent->mParameters.getReprocCount();
310 }
311
312 // Create all reproc channels and start channel
313 for (int8_t i = 0; i < mPPChannelCount; i++) {
314 mPPChannels[i] = m_parent->addReprocChannel(pInputChannel, i);
315 if (mPPChannels[i] == NULL) {
316 LOGE("cannot add multi reprocess channel i = %d", i);
317 return UNKNOWN_ERROR;
318 }
319 rc = mPPChannels[i]->start();
320 if (rc != 0) {
321 LOGE("cannot start multi reprocess channel i = %d", i);
322 delete mPPChannels[i];
323 mPPChannels[i] = NULL;
324 return UNKNOWN_ERROR;
325 }
326 pInputChannel = static_cast<QCameraChannel *>(mPPChannels[i]);
327 }
328 }
329
330 if (m_halPP != NULL && m_parent->needHALPP()) {
331 LOGD("HALPP is need, call QCameraHALPP::start() here");
332 rc = m_halPP->start();
333 }
334
335 property_get("persist.camera.longshot.save", prop, "0");
336 mUseSaveProc = atoi(prop) > 0 ? true : false;
337
338 m_PPindex = 0;
339 m_InputMetadata.clear();
340 m_dataProcTh.sendCmd(CAMERA_CMD_TYPE_START_DATA_PROC, TRUE, FALSE);
341 m_parent->m_cbNotifier.startSnapshots();
342 LOGH("X rc = %d", rc);
343 return rc;
344 }
345
346 /*===========================================================================
347 * FUNCTION : stop
348 *
349 * DESCRIPTION: stop postprocessor. Data process and notify thread will be stopped.
350 *
351 * PARAMETERS : None
352 *
353 * RETURN : int32_t type of status
354 * NO_ERROR -- success
355 * none-zero failure code
356 *
357 * NOTE : reprocess channel will be stopped and deleted if there is any
358 *==========================================================================*/
stop()359 int32_t QCameraPostProcessor::stop()
360 {
361 if (m_bInited == TRUE) {
362 m_parent->m_cbNotifier.stopSnapshots();
363
364 if (m_DataMem != NULL) {
365 m_DataMem->release(m_DataMem);
366 m_DataMem = NULL;
367 }
368
369 // dataProc Thread need to process "stop" as sync call because abort jpeg job should be a sync call
370 m_dataProcTh.sendCmd(CAMERA_CMD_TYPE_STOP_DATA_PROC, TRUE, TRUE);
371 }
372 // stop reproc channel if exists
373 for (int8_t i = 0; i < mPPChannelCount; i++) {
374 QCameraReprocessChannel *pChannel = mPPChannels[i];
375 if (pChannel != NULL) {
376 pChannel->stop();
377 delete pChannel;
378 pChannel = NULL;
379 mPPChannels[i] = NULL;
380 }
381 }
382 mPPChannelCount = 0;
383 m_PPindex = 0;
384 m_InputMetadata.clear();
385
386 if (mOfflineDataBufs != NULL) {
387 mOfflineDataBufs->deallocate();
388 delete mOfflineDataBufs;
389 mOfflineDataBufs = NULL;
390 }
391
392 if (m_halPP != NULL && m_parent->needHALPP()) {
393 LOGD("HALPP is need, call QCameraHALPP::stop() here");
394 m_halPP->stop();
395 }
396
397 return NO_ERROR;
398 }
399
400 /*===========================================================================
401 * FUNCTION : createJpegSession
402 *
403 * DESCRIPTION: start JPEG session in parallel to reproces to reduce the KPI
404 *
405 * PARAMETERS :
406 * @pSrcChannel : source channel obj ptr that possibly needs reprocess
407 *
408 * RETURN : int32_t type of status
409 * NO_ERROR -- success
410 * none-zero failure code
411 *==========================================================================*/
createJpegSession(QCameraChannel * pSrcChannel)412 int32_t QCameraPostProcessor::createJpegSession(QCameraChannel *pSrcChannel)
413 {
414 int32_t rc = NO_ERROR;
415
416 LOGH("E ");
417 if (m_bInited == FALSE) {
418 LOGE("postproc not initialized yet");
419 return UNKNOWN_ERROR;
420 }
421
422 if (pSrcChannel == NULL) {
423 LOGE("Input Channel for pproc is NULL.");
424 return UNKNOWN_ERROR;
425 }
426
427 if (mPPChannelCount > 0) {
428 QCameraChannel *pChannel = NULL;
429 int ppChannel_idx = mPPChannelCount - 1;
430 pChannel = m_parent->needReprocess() ? mPPChannels[ppChannel_idx] :
431 pSrcChannel;
432 QCameraStream *pSnapshotStream = NULL;
433 QCameraStream *pThumbStream = NULL;
434 bool thumb_stream_needed = ((!m_parent->isZSLMode() ||
435 (m_parent->mParameters.getFlipMode(CAM_STREAM_TYPE_SNAPSHOT) ==
436 m_parent->mParameters.getFlipMode(CAM_STREAM_TYPE_PREVIEW))) &&
437 !m_parent->mParameters.generateThumbFromMain());
438
439 if (pChannel == NULL) {
440 LOGE("Input Channel for pproc is NULL for index %d.",
441 ppChannel_idx);
442 return UNKNOWN_ERROR;
443 }
444
445 for (uint32_t i = 0; i < pChannel->getNumOfStreams(); ++i) {
446 QCameraStream *pStream = pChannel->getStreamByIndex(i);
447
448 if ( NULL == pStream ) {
449 break;
450 }
451
452 if (pStream->isTypeOf(CAM_STREAM_TYPE_SNAPSHOT) ||
453 pStream->isOrignalTypeOf(CAM_STREAM_TYPE_SNAPSHOT)) {
454 pSnapshotStream = pStream;
455 }
456
457 if ((thumb_stream_needed) &&
458 (pStream->isTypeOf(CAM_STREAM_TYPE_PREVIEW) ||
459 pStream->isTypeOf(CAM_STREAM_TYPE_POSTVIEW) ||
460 pStream->isOrignalTypeOf(CAM_STREAM_TYPE_PREVIEW) ||
461 pStream->isOrignalTypeOf(CAM_STREAM_TYPE_POSTVIEW))) {
462 pThumbStream = pStream;
463 }
464 }
465
466 // If thumbnail is not part of the reprocess channel, then
467 // try to get it from the source channel
468 if ((thumb_stream_needed) && (NULL == pThumbStream) &&
469 (pChannel == mPPChannels[ppChannel_idx])) {
470 for (uint32_t i = 0; i < pSrcChannel->getNumOfStreams(); ++i) {
471 QCameraStream *pStream = pSrcChannel->getStreamByIndex(i);
472
473 if ( NULL == pStream ) {
474 break;
475 }
476
477 if (pStream->isTypeOf(CAM_STREAM_TYPE_POSTVIEW) ||
478 pStream->isOrignalTypeOf(CAM_STREAM_TYPE_POSTVIEW) ||
479 pStream->isTypeOf(CAM_STREAM_TYPE_PREVIEW) ||
480 pStream->isOrignalTypeOf(CAM_STREAM_TYPE_PREVIEW)) {
481 pThumbStream = pStream;
482 }
483 }
484 }
485
486 if ( NULL != pSnapshotStream ) {
487 mm_jpeg_encode_params_t encodeParam;
488 memset(&encodeParam, 0, sizeof(mm_jpeg_encode_params_t));
489 rc = getJpegEncodingConfig(encodeParam, pSnapshotStream, pThumbStream);
490 if (rc != NO_ERROR) {
491 LOGE("error getting encoding config");
492 return rc;
493 }
494 LOGH("[KPI Perf] : call jpeg create_session");
495
496 rc = mJpegHandle.create_session(mJpegClientHandle,
497 &encodeParam,
498 &mJpegSessionId);
499 if (rc != NO_ERROR) {
500 LOGE("error creating a new jpeg encoding session");
501 return rc;
502 }
503 pJpegSrcStream = pSnapshotStream;
504 mNewJpegSessionNeeded = false;
505 }
506 }
507 LOGH("X ");
508 return rc;
509 }
510
511 /*===========================================================================
512 * FUNCTION : getJpegEncodingConfig
513 *
514 * DESCRIPTION: function to prepare encoding job information
515 *
516 * PARAMETERS :
517 * @encode_parm : param to be filled with encoding configuration
518 *
519 * RETURN : int32_t type of status
520 * NO_ERROR -- success
521 * none-zero failure code
522 *==========================================================================*/
getJpegEncodingConfig(mm_jpeg_encode_params_t & encode_parm,QCameraStream * main_stream,QCameraStream * thumb_stream,const mm_camera_super_buf_t * halpp_out_buf)523 int32_t QCameraPostProcessor::getJpegEncodingConfig(mm_jpeg_encode_params_t& encode_parm,
524 QCameraStream *main_stream,
525 QCameraStream *thumb_stream,
526 const mm_camera_super_buf_t *halpp_out_buf)
527 {
528 LOGD("E");
529 int32_t ret = NO_ERROR;
530 size_t out_size;
531
532 char prop[PROPERTY_VALUE_MAX];
533 property_get("persist.camera.jpeg_burst", prop, "0");
534 mUseJpegBurst = (atoi(prop) > 0) && !mUseSaveProc;
535 encode_parm.burst_mode = mUseJpegBurst;
536
537 cam_rect_t crop;
538 memset(&crop, 0, sizeof(cam_rect_t));
539 main_stream->getCropInfo(crop);
540
541 cam_dimension_t src_dim, dst_dim;
542 memset(&src_dim, 0, sizeof(cam_dimension_t));
543 memset(&dst_dim, 0, sizeof(cam_dimension_t));
544 main_stream->getFrameDimension(src_dim);
545
546 LOGD("src stream dimesion:%dx%d", src_dim.width, src_dim.height);
547
548 bool hdr_output_crop = m_parent->mParameters.isHDROutputCropEnabled();
549 if (hdr_output_crop && crop.height) {
550 dst_dim.height = crop.height;
551 } else {
552 dst_dim.height = src_dim.height;
553 }
554 if (hdr_output_crop && crop.width) {
555 dst_dim.width = crop.width;
556 } else {
557 dst_dim.width = src_dim.width;
558 }
559
560 // set rotation only when no online rotation or offline pp rotation is done before
561 if (!m_parent->needRotationReprocess()) {
562 encode_parm.rotation = m_parent->mParameters.getJpegRotation();
563 }
564
565 encode_parm.main_dim.src_dim = src_dim;
566 encode_parm.main_dim.dst_dim = dst_dim;
567
568 m_dst_dim = dst_dim;
569
570 encode_parm.jpeg_cb = mJpegCB;
571 encode_parm.userdata = mJpegUserData;
572
573 m_bThumbnailNeeded = TRUE; // need encode thumbnail by default
574 // system property to disable the thumbnail encoding in order to reduce the power
575 // by default thumbnail encoding is set to TRUE and explicitly set this property to
576 // disable the thumbnail encoding
577 property_get("persist.camera.tn.disable", prop, "0");
578 if (atoi(prop) == 1) {
579 m_bThumbnailNeeded = FALSE;
580 LOGH("m_bThumbnailNeeded is %d", m_bThumbnailNeeded);
581 }
582 cam_dimension_t thumbnailSize;
583 memset(&thumbnailSize, 0, sizeof(cam_dimension_t));
584 m_parent->getThumbnailSize(thumbnailSize);
585 if (thumbnailSize.width == 0 || thumbnailSize.height == 0) {
586 // (0,0) means no thumbnail
587 m_bThumbnailNeeded = FALSE;
588 }
589 encode_parm.encode_thumbnail = m_bThumbnailNeeded;
590
591 // get color format
592 cam_format_t img_fmt = CAM_FORMAT_YUV_420_NV12;
593 main_stream->getFormat(img_fmt);
594 encode_parm.color_format = getColorfmtFromImgFmt(img_fmt);
595
596 // get jpeg quality
597 uint32_t val = m_parent->getJpegQuality();
598 if (0U < val) {
599 encode_parm.quality = val;
600 } else {
601 LOGH("Using default JPEG quality");
602 encode_parm.quality = 85;
603 }
604 cam_frame_len_offset_t main_offset;
605 memset(&main_offset, 0, sizeof(cam_frame_len_offset_t));
606 main_stream->getFrameOffset(main_offset);
607
608 LOGD("frame offset info: len:%d, w:%d, h:%d, stride:%d, scanline:%d",
609 main_offset.frame_len, main_offset.mp[0].width, main_offset.mp[0].height,
610 main_offset.mp[0].stride, main_offset.mp[0].scanline);
611
612 // src buf config
613 if (halpp_out_buf == NULL) {
614 QCameraMemory *pStreamMem = main_stream->getStreamBufs();
615 if (pStreamMem == NULL) {
616 LOGE("cannot get stream bufs from main stream");
617 ret = BAD_VALUE;
618 goto on_error;
619 }
620 encode_parm.num_src_bufs = pStreamMem->getCnt();
621 for (uint32_t i = 0; i < encode_parm.num_src_bufs; i++) {
622 camera_memory_t *stream_mem = pStreamMem->getMemory(i, false);
623 if (stream_mem != NULL) {
624 encode_parm.src_main_buf[i].index = i;
625 encode_parm.src_main_buf[i].buf_size = stream_mem->size;
626 encode_parm.src_main_buf[i].buf_vaddr = (uint8_t *)stream_mem->data;
627 encode_parm.src_main_buf[i].fd = pStreamMem->getFd(i);
628 encode_parm.src_main_buf[i].format = MM_JPEG_FMT_YUV;
629 encode_parm.src_main_buf[i].offset = main_offset;
630 }
631 }
632 } else {
633 LOGH("use halpp output super buffer as jpeg input buffer!");
634
635 /* only one buffer from halpp output */
636 encode_parm.num_src_bufs = 1;
637 for (uint32_t i = 0; i < encode_parm.num_src_bufs; i++) {
638 encode_parm.src_main_buf[i].index = i;
639 encode_parm.src_main_buf[i].buf_size = halpp_out_buf->bufs[0]->frame_len;
640 encode_parm.src_main_buf[i].buf_vaddr = (uint8_t *)halpp_out_buf->bufs[0]->buffer;
641 encode_parm.src_main_buf[i].fd = halpp_out_buf->bufs[0]->fd;
642 encode_parm.src_main_buf[i].format = MM_JPEG_FMT_YUV;
643 encode_parm.src_main_buf[i].offset = main_offset;
644 LOGD("src main buf: idx %d, size:%d, vaddr:%p, fd:%d",
645 encode_parm.src_main_buf[i].index,
646 encode_parm.src_main_buf[i].buf_size,
647 encode_parm.src_main_buf[i].buf_vaddr,
648 encode_parm.src_main_buf[i].fd);
649 }
650 }
651
652 LOGI("Src Buffer cnt = %d, res = %dX%d len = %d rot = %d "
653 "src_dim = %dX%d dst_dim = %dX%d",
654 encode_parm.num_src_bufs,
655 main_offset.mp[0].width, main_offset.mp[0].height,
656 main_offset.frame_len, encode_parm.rotation,
657 src_dim.width, src_dim.height,
658 dst_dim.width, dst_dim.height);
659
660 if (m_bThumbnailNeeded == TRUE) {
661 m_parent->getThumbnailSize(encode_parm.thumb_dim.dst_dim);
662
663 if (thumb_stream == NULL) {
664 thumb_stream = main_stream;
665 }
666 if (((90 == m_parent->mParameters.getJpegRotation())
667 || (270 == m_parent->mParameters.getJpegRotation()))
668 && (m_parent->needRotationReprocess())) {
669 // swap thumbnail dimensions
670 cam_dimension_t tmp_dim = encode_parm.thumb_dim.dst_dim;
671 encode_parm.thumb_dim.dst_dim.width = tmp_dim.height;
672 encode_parm.thumb_dim.dst_dim.height = tmp_dim.width;
673 }
674
675 if (halpp_out_buf == NULL) {
676 QCameraMemory * pStreamMem = thumb_stream->getStreamBufs();
677 if (pStreamMem == NULL) {
678 LOGE("cannot get stream bufs from thumb stream");
679 ret = BAD_VALUE;
680 goto on_error;
681 }
682 cam_frame_len_offset_t thumb_offset;
683 memset(&thumb_offset, 0, sizeof(cam_frame_len_offset_t));
684 thumb_stream->getFrameOffset(thumb_offset);
685 encode_parm.num_tmb_bufs = pStreamMem->getCnt();
686 for (uint32_t i = 0; i < pStreamMem->getCnt(); i++) {
687 camera_memory_t *stream_mem = pStreamMem->getMemory(i, false);
688 if (stream_mem != NULL) {
689 encode_parm.src_thumb_buf[i].index = i;
690 encode_parm.src_thumb_buf[i].buf_size = stream_mem->size;
691 encode_parm.src_thumb_buf[i].buf_vaddr = (uint8_t *)stream_mem->data;
692 encode_parm.src_thumb_buf[i].fd = pStreamMem->getFd(i);
693 encode_parm.src_thumb_buf[i].format = MM_JPEG_FMT_YUV;
694 encode_parm.src_thumb_buf[i].offset = thumb_offset;
695 }
696 }
697
698 cam_format_t img_fmt_thumb = CAM_FORMAT_YUV_420_NV12;
699 thumb_stream->getFormat(img_fmt_thumb);
700 encode_parm.thumb_color_format = getColorfmtFromImgFmt(img_fmt_thumb);
701
702 // crop is the same if frame is the same
703 if (thumb_stream != main_stream) {
704 memset(&crop, 0, sizeof(cam_rect_t));
705 thumb_stream->getCropInfo(crop);
706 }
707
708 memset(&src_dim, 0, sizeof(cam_dimension_t));
709 thumb_stream->getFrameDimension(src_dim);
710 encode_parm.thumb_dim.src_dim = src_dim;
711
712 if (!m_parent->needRotationReprocess()) {
713 encode_parm.thumb_rotation = m_parent->mParameters.getJpegRotation();
714 }
715 encode_parm.thumb_dim.crop = crop;
716 encode_parm.thumb_from_postview =
717 !m_parent->mParameters.generateThumbFromMain() &&
718 (img_fmt_thumb != CAM_FORMAT_YUV_420_NV12_UBWC) &&
719 (m_parent->mParameters.useJpegExifRotation() ||
720 m_parent->mParameters.getJpegRotation() == 0);
721
722 if (encode_parm.thumb_from_postview &&
723 m_parent->mParameters.useJpegExifRotation()){
724 encode_parm.thumb_rotation =
725 m_parent->mParameters.getJpegExifRotation();
726 }
727 } else {
728 LOGH("use halpp output super buffer for thumbnail!");
729
730 // use main buf for thumbnail encoding in this case.
731 encode_parm.num_tmb_bufs = encode_parm.num_src_bufs;
732 for (uint32_t i = 0; i < encode_parm.num_tmb_bufs; i++) {
733 memcpy(&encode_parm.src_thumb_buf[i], &encode_parm.src_main_buf[i],
734 sizeof(mm_jpeg_buf_t));
735 }
736 encode_parm.thumb_color_format = encode_parm.color_format;
737
738 // copy params from src main frame
739 encode_parm.thumb_dim.src_dim = encode_parm.main_dim.src_dim;
740 encode_parm.thumb_rotation = encode_parm.rotation;
741 encode_parm.thumb_dim.crop = encode_parm.main_dim.crop;
742
743 encode_parm.thumb_from_postview = FALSE;
744 }
745
746 LOGI("Src THUMB buf_cnt = %d, res = %dX%d len = %d rot = %d "
747 "src_dim = %dX%d, dst_dim = %dX%d",
748 encode_parm.num_tmb_bufs,
749 encode_parm.src_thumb_buf[0].offset.mp[0].width,
750 encode_parm.src_thumb_buf[0].offset.mp[0].height,
751 encode_parm.src_thumb_buf[0].offset.frame_len,
752 encode_parm.thumb_rotation,
753 encode_parm.thumb_dim.src_dim.width,
754 encode_parm.thumb_dim.src_dim.height,
755 encode_parm.thumb_dim.dst_dim.width,
756 encode_parm.thumb_dim.dst_dim.height);
757 }
758
759 encode_parm.num_dst_bufs = 1;
760 if ((halpp_out_buf == NULL && mUseJpegBurst) ||
761 (halpp_out_buf != NULL && mUseJpegBurstHalPP)) {
762 encode_parm.num_dst_bufs = MAX_JPEG_BURST;
763 }
764
765 encode_parm.get_memory = NULL;
766 out_size = main_offset.frame_len;
767 if (mJpegMemOpt) {
768 encode_parm.get_memory = getJpegMemory;
769 encode_parm.put_memory = releaseJpegMemory;
770 out_size = sizeof(omx_jpeg_ouput_buf_t);
771 encode_parm.num_dst_bufs = encode_parm.num_src_bufs;
772 }
773
774 if (halpp_out_buf == NULL) {
775 m_JpegOutputMemCount = (uint32_t)encode_parm.num_dst_bufs;
776 for (uint32_t i = 0; i < m_JpegOutputMemCount; i++) {
777 if (m_pJpegOutputMem[i] != NULL)
778 free(m_pJpegOutputMem[i]);
779 omx_jpeg_ouput_buf_t omx_out_buf;
780 memset(&omx_out_buf, 0, sizeof(omx_jpeg_ouput_buf_t));
781 omx_out_buf.handle = this;
782 // allocate output buf for jpeg encoding
783 m_pJpegOutputMem[i] = malloc(out_size);
784
785 if (NULL == m_pJpegOutputMem[i]) {
786 ret = NO_MEMORY;
787 LOGE("initHeapMem for jpeg, ret = NO_MEMORY");
788 goto on_error;
789 }
790
791 if (mJpegMemOpt) {
792 memcpy(m_pJpegOutputMem[i], &omx_out_buf, sizeof(omx_out_buf));
793 }
794
795 encode_parm.dest_buf[i].index = i;
796 encode_parm.dest_buf[i].buf_size = main_offset.frame_len;
797 encode_parm.dest_buf[i].buf_vaddr = (uint8_t *)m_pJpegOutputMem[i];
798 encode_parm.dest_buf[i].fd = -1;
799 encode_parm.dest_buf[i].format = MM_JPEG_FMT_YUV;
800 encode_parm.dest_buf[i].offset = main_offset;
801 }
802 } else {
803 m_JpegOutputMemCountHALPP = (uint32_t)encode_parm.num_dst_bufs;
804 LOGD("num dst bufs:%d", encode_parm.num_dst_bufs);
805 for (uint32_t i = 0; i < m_JpegOutputMemCountHALPP; i++) {
806 if (m_pJpegOutputMemHalPP[i] != NULL) {
807 free(m_pJpegOutputMemHalPP[i]);
808 m_pJpegOutputMemHalPP[i] = NULL;
809 }
810 omx_jpeg_ouput_buf_t omx_out_buf;
811 memset(&omx_out_buf, 0, sizeof(omx_jpeg_ouput_buf_t));
812 omx_out_buf.handle = this;
813
814 // allocate output buf for jpeg encoding
815 m_pJpegOutputMemHalPP[i] = malloc(out_size);
816 if (NULL == m_pJpegOutputMemHalPP[i]) {
817 ret = NO_MEMORY;
818 LOGE("initHeapMem for jpeg, ret = NO_MEMORY");
819 goto on_error;
820 }
821
822 if (mJpegMemOpt) {
823 memcpy(m_pJpegOutputMemHalPP[i], &omx_out_buf, sizeof(omx_out_buf));
824 }
825
826 encode_parm.dest_buf[i].index = i;
827 encode_parm.dest_buf[i].buf_size = main_offset.frame_len;
828 encode_parm.dest_buf[i].buf_vaddr = (uint8_t *)m_pJpegOutputMemHalPP[i];
829 encode_parm.dest_buf[i].fd = -1;
830 encode_parm.dest_buf[i].format = MM_JPEG_FMT_YUV;
831 encode_parm.dest_buf[i].offset = main_offset;
832 }
833 }
834
835 LOGD("X");
836 return NO_ERROR;
837
838 on_error:
839 FREE_JPEG_OUTPUT_BUFFER(m_pJpegOutputMem, m_JpegOutputMemCount);
840
841 LOGD("X with error %d", ret);
842 return ret;
843 }
844
845 /*===========================================================================
846 * FUNCTION : sendEvtNotify
847 *
848 * DESCRIPTION: send event notify through notify callback registered by upper layer
849 *
850 * PARAMETERS :
851 * @msg_type: msg type of notify
852 * @ext1 : extension
853 * @ext2 : extension
854 *
855 * RETURN : int32_t type of status
856 * NO_ERROR -- success
857 * none-zero failure code
858 *==========================================================================*/
sendEvtNotify(int32_t msg_type,int32_t ext1,int32_t ext2)859 int32_t QCameraPostProcessor::sendEvtNotify(int32_t msg_type,
860 int32_t ext1,
861 int32_t ext2)
862 {
863 return m_parent->sendEvtNotify(msg_type, ext1, ext2);
864 }
865
866 /*===========================================================================
867 * FUNCTION : sendDataNotify
868 *
869 * DESCRIPTION: enqueue data into dataNotify thread
870 *
871 * PARAMETERS :
872 * @msg_type: data callback msg type
873 * @data : ptr to data memory struct
874 * @index : index to data buffer
875 * @metadata: ptr to meta data buffer if there is any
876 * @release_data : ptr to struct indicating if data need to be released
877 * after notify
878 * @super_buf_frame_idx : super buffer frame index
879 *
880 * RETURN : int32_t type of status
881 * NO_ERROR -- success
882 * none-zero failure code
883 *==========================================================================*/
sendDataNotify(int32_t msg_type,camera_memory_t * data,uint8_t index,camera_frame_metadata_t * metadata,qcamera_release_data_t * release_data,uint32_t super_buf_frame_idx)884 int32_t QCameraPostProcessor::sendDataNotify(int32_t msg_type,
885 camera_memory_t *data,
886 uint8_t index,
887 camera_frame_metadata_t *metadata,
888 qcamera_release_data_t *release_data,
889 uint32_t super_buf_frame_idx)
890 {
891 qcamera_data_argm_t *data_cb = (qcamera_data_argm_t *)malloc(sizeof(qcamera_data_argm_t));
892 if (NULL == data_cb) {
893 LOGE("no mem for acamera_data_argm_t");
894 return NO_MEMORY;
895 }
896 memset(data_cb, 0, sizeof(qcamera_data_argm_t));
897 data_cb->msg_type = msg_type;
898 data_cb->data = data;
899 data_cb->index = index;
900 data_cb->metadata = metadata;
901 if (release_data != NULL) {
902 data_cb->release_data = *release_data;
903 }
904
905 qcamera_callback_argm_t cbArg;
906 memset(&cbArg, 0, sizeof(qcamera_callback_argm_t));
907 cbArg.cb_type = QCAMERA_DATA_SNAPSHOT_CALLBACK;
908 cbArg.msg_type = msg_type;
909 cbArg.data = data;
910 cbArg.metadata = metadata;
911 cbArg.user_data = data_cb;
912 cbArg.cookie = this;
913 cbArg.release_cb = releaseNotifyData;
914 cbArg.frame_index = super_buf_frame_idx;
915 int rc = m_parent->m_cbNotifier.notifyCallback(cbArg);
916 if ( NO_ERROR != rc ) {
917 LOGE("Error enqueuing jpeg data into notify queue");
918 releaseNotifyData(data_cb, this, UNKNOWN_ERROR);
919 return UNKNOWN_ERROR;
920 }
921
922 return rc;
923 }
924
925 /*===========================================================================
926 * FUNCTION : validatePostProcess
927 *
928 * DESCRIPTION: Verify output buffer count of pp module
929 *
930 * PARAMETERS :
931 * @frame : process frame received from mm-camera-interface
932 *
933 * RETURN : bool type of status
934 * TRUE -- success
935 * FALSE failure
936 *==========================================================================*/
validatePostProcess(mm_camera_super_buf_t * frame)937 bool QCameraPostProcessor::validatePostProcess(mm_camera_super_buf_t *frame)
938 {
939 bool status = TRUE;
940 QCameraChannel *pChannel = NULL;
941 QCameraReprocessChannel *m_pReprocChannel = NULL;
942
943 if (frame == NULL) {
944 return status;
945 }
946
947 pChannel = m_parent->getChannelByHandle(frame->ch_id);
948 for (int8_t i = 0; i < mPPChannelCount; i++) {
949 if (pChannel == mPPChannels[i]->getSrcChannel()) {
950 m_pReprocChannel = mPPChannels[i];
951 break;
952 }
953 }
954
955 if ((m_pReprocChannel != NULL) && (pChannel == m_pReprocChannel->getSrcChannel())) {
956 QCameraStream *pStream = NULL;
957 for (uint8_t i = 0; i < m_pReprocChannel->getNumOfStreams(); i++) {
958 pStream = m_pReprocChannel->getStreamByIndex(i);
959 if (pStream && (m_inputPPQ.getCurrentSize() > 0) &&
960 (m_ongoingPPQ.getCurrentSize() >= pStream->getNumQueuedBuf())) {
961 LOGW("Out of PP Buffer PPQ = %d ongoingQ = %d Jpeg = %d onJpeg = %d",
962 m_inputPPQ.getCurrentSize(), m_ongoingPPQ.getCurrentSize(),
963 m_inputJpegQ.getCurrentSize(), m_ongoingJpegQ.getCurrentSize());
964 status = FALSE;
965 break;
966 }
967 }
968 }
969 return status;
970 }
971
972 /*===========================================================================
973 * FUNCTION : getOfflinePPInputBuffer
974 *
975 * DESCRIPTION: Function to generate offline post proc buffer
976 *
977 * PARAMETERS :
978 * @src_frame : process frame received from mm-camera-interface
979 *
980 * RETURN : Buffer pointer if successfull
981 * : NULL in case of failures
982 *==========================================================================*/
getOfflinePPInputBuffer(mm_camera_super_buf_t * src_frame)983 mm_camera_buf_def_t *QCameraPostProcessor::getOfflinePPInputBuffer(
984 mm_camera_super_buf_t *src_frame)
985 {
986 mm_camera_buf_def_t *mBufDefs = NULL;
987 QCameraChannel *pChannel = NULL;
988 QCameraStream *src_pStream = NULL;
989 mm_camera_buf_def_t *data_frame = NULL;
990 mm_camera_buf_def_t *meta_frame = NULL;
991
992 if (mOfflineDataBufs == NULL) {
993 LOGE("Offline Buffer not allocated");
994 return NULL;
995 }
996
997 uint32_t num_bufs = mOfflineDataBufs->getCnt();
998 size_t bufDefsSize = num_bufs * sizeof(mm_camera_buf_def_t);
999 mBufDefs = (mm_camera_buf_def_t *)malloc(bufDefsSize);
1000 if (mBufDefs == NULL) {
1001 LOGE("No memory");
1002 return NULL;
1003 }
1004 memset(mBufDefs, 0, bufDefsSize);
1005
1006 pChannel = m_parent->getChannelByHandle(src_frame->ch_id);
1007 for (uint32_t i = 0; i < src_frame->num_bufs; i++) {
1008 src_pStream = pChannel->getStreamByHandle(
1009 src_frame->bufs[i]->stream_id);
1010 if (src_pStream != NULL) {
1011 if (src_pStream->getMyType() == CAM_STREAM_TYPE_RAW) {
1012 LOGH("Found RAW input stream");
1013 data_frame = src_frame->bufs[i];
1014 } else if (src_pStream->getMyType() == CAM_STREAM_TYPE_METADATA){
1015 LOGH("Found Metada input stream");
1016 meta_frame = src_frame->bufs[i];
1017 }
1018 }
1019 }
1020
1021 if ((src_pStream != NULL) && (data_frame != NULL)) {
1022 cam_frame_len_offset_t offset;
1023 memset(&offset, 0, sizeof(cam_frame_len_offset_t));
1024 src_pStream->getFrameOffset(offset);
1025 for (uint32_t i = 0; i < num_bufs; i++) {
1026 mBufDefs[i] = *data_frame;
1027 mOfflineDataBufs->getBufDef(offset, mBufDefs[i], i);
1028
1029 LOGD("Dumping RAW data on offline buffer");
1030 /*Actual data memcpy just for verification*/
1031 memcpy(mBufDefs[i].buffer, data_frame->buffer,
1032 mBufDefs[i].frame_len);
1033 }
1034 releaseSuperBuf(src_frame, CAM_STREAM_TYPE_RAW);
1035 } else {
1036 free(mBufDefs);
1037 mBufDefs = NULL;
1038 }
1039
1040 LOGH("mBufDefs = %p", mBufDefs);
1041 return mBufDefs;
1042 }
1043
1044 /*===========================================================================
1045 * FUNCTION : processData
1046 *
1047 * DESCRIPTION: enqueue data into dataProc thread
1048 *
1049 * PARAMETERS :
1050 * @frame : process frame received from mm-camera-interface
1051 *
1052 * RETURN : int32_t type of status
1053 * NO_ERROR -- success
1054 * none-zero failure code
1055 *
1056 * NOTE : depends on if offline reprocess is needed, received frame will
1057 * be sent to either input queue of postprocess or jpeg encoding
1058 *==========================================================================*/
processData(mm_camera_super_buf_t * frame)1059 int32_t QCameraPostProcessor::processData(mm_camera_super_buf_t *frame)
1060 {
1061 if (m_bInited == FALSE) {
1062 LOGE("postproc not initialized yet");
1063 return UNKNOWN_ERROR;
1064 }
1065
1066 if (frame == NULL) {
1067 LOGE("Invalid parameter");
1068 return UNKNOWN_ERROR;
1069 }
1070
1071 mm_camera_buf_def_t *meta_frame = NULL;
1072 for (uint32_t i = 0; i < frame->num_bufs; i++) {
1073 // look through input superbuf
1074 if (frame->bufs[i]->stream_type == CAM_STREAM_TYPE_METADATA) {
1075 meta_frame = frame->bufs[i];
1076 break;
1077 }
1078 }
1079 if (meta_frame != NULL) {
1080 //Function to upadte metadata for frame based parameter
1081 m_parent->updateMetadata((metadata_buffer_t *)meta_frame->buffer);
1082 }
1083
1084 if (m_parent->needReprocess()) {
1085 if ((!m_parent->isLongshotEnabled() &&
1086 !m_parent->m_stateMachine.isNonZSLCaptureRunning()) ||
1087 (m_parent->isLongshotEnabled() &&
1088 m_parent->isCaptureShutterEnabled())) {
1089 //play shutter sound
1090 m_parent->playShutter();
1091 }
1092
1093 ATRACE_INT("Camera:Reprocess", 1);
1094 LOGH("need reprocess");
1095
1096 // enqueu to post proc input queue
1097 qcamera_pp_data_t *pp_request_job =
1098 (qcamera_pp_data_t *)malloc(sizeof(qcamera_pp_data_t));
1099 if (pp_request_job == NULL) {
1100 LOGE("No memory for pproc job");
1101 return NO_MEMORY;
1102 }
1103 memset(pp_request_job, 0, sizeof(qcamera_pp_data_t));
1104 pp_request_job->src_frame = frame;
1105 pp_request_job->src_reproc_frame = frame;
1106 pp_request_job->reprocCount = 0;
1107 pp_request_job->ppChannelIndex = 0;
1108
1109 if ((NULL != frame) &&
1110 (0 < frame->num_bufs)
1111 && (m_parent->isRegularCapture())) {
1112 /*Regular capture. Source stream will be deleted*/
1113 mm_camera_buf_def_t *bufs = NULL;
1114 uint32_t num_bufs = frame->num_bufs;
1115 bufs = new mm_camera_buf_def_t[num_bufs];
1116 if (NULL == bufs) {
1117 LOGE("Unable to allocate cached buffers");
1118 return NO_MEMORY;
1119 }
1120
1121 for (uint32_t i = 0; i < num_bufs; i++) {
1122 bufs[i] = *frame->bufs[i];
1123 frame->bufs[i] = &bufs[i];
1124 }
1125 pp_request_job->src_reproc_bufs = bufs;
1126
1127 // Don't release source frame after encoding
1128 // at this point the source channel will not exist.
1129 pp_request_job->reproc_frame_release = true;
1130 }
1131
1132 if (mOfflineDataBufs != NULL) {
1133 pp_request_job->offline_reproc_buf =
1134 getOfflinePPInputBuffer(frame);
1135 if (pp_request_job->offline_reproc_buf != NULL) {
1136 pp_request_job->offline_buffer = true;
1137 }
1138 }
1139
1140 if (false == m_inputPPQ.enqueue((void *)pp_request_job)) {
1141 LOGW("Input PP Q is not active!!!");
1142 releaseSuperBuf(frame);
1143 free(frame);
1144 free(pp_request_job);
1145 frame = NULL;
1146 pp_request_job = NULL;
1147 return NO_ERROR;
1148 }
1149 if (m_parent->mParameters.isAdvCamFeaturesEnabled()
1150 && (meta_frame != NULL)) {
1151 m_InputMetadata.add(meta_frame);
1152 }
1153 } else if (m_parent->mParameters.isNV16PictureFormat() ||
1154 m_parent->mParameters.isNV21PictureFormat()) {
1155 //check if raw frame information is needed.
1156 if(m_parent->mParameters.isYUVFrameInfoNeeded())
1157 setYUVFrameInfo(frame);
1158
1159 processRawData(frame);
1160 } else {
1161 //play shutter sound
1162 if(!m_parent->m_stateMachine.isNonZSLCaptureRunning() &&
1163 !m_parent->mLongshotEnabled)
1164 m_parent->playShutter();
1165
1166 LOGH("no need offline reprocess, sending to jpeg encoding");
1167 qcamera_jpeg_data_t *jpeg_job =
1168 (qcamera_jpeg_data_t *)malloc(sizeof(qcamera_jpeg_data_t));
1169 if (jpeg_job == NULL) {
1170 LOGE("No memory for jpeg job");
1171 return NO_MEMORY;
1172 }
1173
1174 memset(jpeg_job, 0, sizeof(qcamera_jpeg_data_t));
1175 jpeg_job->src_frame = frame;
1176
1177 if (meta_frame != NULL) {
1178 // fill in meta data frame ptr
1179 jpeg_job->metadata = (metadata_buffer_t *)meta_frame->buffer;
1180 }
1181
1182 // enqueu to jpeg input queue
1183 if (!m_inputJpegQ.enqueue((void *)jpeg_job)) {
1184 LOGW("Input Jpeg Q is not active!!!");
1185 releaseJpegJobData(jpeg_job);
1186 free(jpeg_job);
1187 jpeg_job = NULL;
1188 return NO_ERROR;
1189 }
1190 }
1191
1192 m_dataProcTh.sendCmd(CAMERA_CMD_TYPE_DO_NEXT_JOB, FALSE, FALSE);
1193 return NO_ERROR;
1194 }
1195
1196 /*===========================================================================
1197 * FUNCTION : processRawData
1198 *
1199 * DESCRIPTION: enqueue raw data into dataProc thread
1200 *
1201 * PARAMETERS :
1202 * @frame : process frame received from mm-camera-interface
1203 *
1204 * RETURN : int32_t type of status
1205 * NO_ERROR -- success
1206 * none-zero failure code
1207 *==========================================================================*/
processRawData(mm_camera_super_buf_t * frame)1208 int32_t QCameraPostProcessor::processRawData(mm_camera_super_buf_t *frame)
1209 {
1210 if (m_bInited == FALSE) {
1211 LOGE("postproc not initialized yet");
1212 return UNKNOWN_ERROR;
1213 }
1214
1215 // enqueu to raw input queue
1216 if (m_inputRawQ.enqueue((void *)frame)) {
1217 m_dataProcTh.sendCmd(CAMERA_CMD_TYPE_DO_NEXT_JOB, FALSE, FALSE);
1218 } else {
1219 LOGW("m_inputRawQ is not active!!!");
1220 releaseSuperBuf(frame);
1221 free(frame);
1222 frame = NULL;
1223 }
1224 return NO_ERROR;
1225 }
1226
1227 /*===========================================================================
1228 * FUNCTION : processJpegEvt
1229 *
1230 * DESCRIPTION: process jpeg event from mm-jpeg-interface.
1231 *
1232 * PARAMETERS :
1233 * @evt : payload of jpeg event, including information about jpeg encoding
1234 * status, jpeg size and so on.
1235 *
1236 * RETURN : int32_t type of status
1237 * NO_ERROR -- success
1238 * none-zero failure code
1239 *
1240 * NOTE : This event will also trigger DataProc thread to move to next job
1241 * processing (i.e., send a new jpeg encoding job to mm-jpeg-interface
1242 * if there is any pending job in jpeg input queue)
1243 *==========================================================================*/
processJpegEvt(qcamera_jpeg_evt_payload_t * evt)1244 int32_t QCameraPostProcessor::processJpegEvt(qcamera_jpeg_evt_payload_t *evt)
1245 {
1246 if (m_bInited == FALSE) {
1247 LOGE("postproc not initialized yet");
1248 return UNKNOWN_ERROR;
1249 }
1250
1251 int32_t rc = NO_ERROR;
1252 camera_memory_t *jpeg_mem = NULL;
1253 omx_jpeg_ouput_buf_t *jpeg_out = NULL;
1254 void *jpegData = NULL;
1255 if (mUseSaveProc && m_parent->isLongshotEnabled()) {
1256 qcamera_jpeg_evt_payload_t *saveData = ( qcamera_jpeg_evt_payload_t * ) malloc(sizeof(qcamera_jpeg_evt_payload_t));
1257 if ( NULL == saveData ) {
1258 LOGE("Can not allocate save data message!");
1259 return NO_MEMORY;
1260 }
1261 *saveData = *evt;
1262 if (m_inputSaveQ.enqueue((void *) saveData)) {
1263 m_saveProcTh.sendCmd(CAMERA_CMD_TYPE_DO_NEXT_JOB, FALSE, FALSE);
1264 } else {
1265 LOGD("m_inputSaveQ PP Q is not active!!!");
1266 free(saveData);
1267 saveData = NULL;
1268 return rc;
1269 }
1270 } else {
1271 /* To be removed later when ISP Frame sync feature is available
1272 qcamera_jpeg_data_t *jpeg_job =
1273 (qcamera_jpeg_data_t *)m_ongoingJpegQ.dequeue(matchJobId,
1274 (void*)&evt->jobId);
1275 uint32_t frame_idx = jpeg_job->src_frame->bufs[0]->frame_idx;*/
1276 uint32_t frame_idx = 75;
1277 LOGH("FRAME INDEX %d", frame_idx);
1278 // Release jpeg job data
1279 m_ongoingJpegQ.flushNodes(matchJobId, (void*)&evt->jobId);
1280
1281 if (m_inputPPQ.getCurrentSize() > 0) {
1282 m_dataProcTh.sendCmd(CAMERA_CMD_TYPE_DO_NEXT_JOB, FALSE, FALSE);
1283 }
1284 LOGH("[KPI Perf] : jpeg job %d", evt->jobId);
1285
1286 if ((false == m_parent->m_bIntJpegEvtPending) &&
1287 (m_parent->mDataCb == NULL ||
1288 m_parent->msgTypeEnabledWithLock(CAMERA_MSG_COMPRESSED_IMAGE) == 0 )) {
1289 LOGW("No dataCB or CAMERA_MSG_COMPRESSED_IMAGE not enabled");
1290 rc = NO_ERROR;
1291 goto end;
1292 }
1293
1294 if(evt->status == JPEG_JOB_STATUS_ERROR) {
1295 LOGE("Error event handled from jpeg, status = %d",
1296 evt->status);
1297 rc = FAILED_TRANSACTION;
1298 goto end;
1299 }
1300 if (!mJpegMemOpt) {
1301 jpegData = evt->out_data.buf_vaddr;
1302 }
1303 else {
1304 jpeg_out = (omx_jpeg_ouput_buf_t*) evt->out_data.buf_vaddr;
1305 if (jpeg_out != NULL) {
1306 jpeg_mem = (camera_memory_t *)jpeg_out->mem_hdl;
1307 if (jpeg_mem != NULL) {
1308 jpegData = jpeg_mem->data;
1309 }
1310 }
1311 }
1312 m_parent->dumpJpegToFile(jpegData,
1313 evt->out_data.buf_filled_len,
1314 evt->jobId);
1315 LOGH("Dump jpeg_size=%d", evt->out_data.buf_filled_len);
1316 if(true == m_parent->m_bIntJpegEvtPending) {
1317 //Sending JPEG snapshot taken notification to HAL
1318 pthread_mutex_lock(&m_parent->m_int_lock);
1319 pthread_cond_signal(&m_parent->m_int_cond);
1320 pthread_mutex_unlock(&m_parent->m_int_lock);
1321 m_dataProcTh.sendCmd(CAMERA_CMD_TYPE_DO_NEXT_JOB, FALSE, FALSE);
1322 return rc;
1323 }
1324 if (!mJpegMemOpt) {
1325 // alloc jpeg memory to pass to upper layer
1326 jpeg_mem = m_parent->mGetMemory(-1, evt->out_data.buf_filled_len,
1327 1, m_parent->mCallbackCookie);
1328 if (NULL == jpeg_mem) {
1329 rc = NO_MEMORY;
1330 LOGE("getMemory for jpeg, ret = NO_MEMORY");
1331 goto end;
1332 }
1333 memcpy(jpeg_mem->data, evt->out_data.buf_vaddr, evt->out_data.buf_filled_len);
1334 }
1335 LOGH("Calling upperlayer callback to store JPEG image");
1336 qcamera_release_data_t release_data;
1337 memset(&release_data, 0, sizeof(qcamera_release_data_t));
1338 release_data.data = jpeg_mem;
1339 LOGI("[KPI Perf]: PROFILE_JPEG_CB");
1340 rc = sendDataNotify(CAMERA_MSG_COMPRESSED_IMAGE,
1341 jpeg_mem,
1342 0,
1343 NULL,
1344 &release_data,
1345 frame_idx);
1346 m_parent->setOutputImageCount(m_parent->getOutputImageCount() + 1);
1347
1348 end:
1349 if (rc != NO_ERROR) {
1350 // send error msg to upper layer
1351 LOGE("Jpeg Encoding failed. Notify Application");
1352 sendEvtNotify(CAMERA_MSG_ERROR,
1353 UNKNOWN_ERROR,
1354 0);
1355
1356 if (NULL != jpeg_mem) {
1357 jpeg_mem->release(jpeg_mem);
1358 jpeg_mem = NULL;
1359 }
1360 }
1361
1362 /* check whether to send callback for depth map */
1363 if (m_parent->mParameters.isUbiRefocus() &&
1364 (m_parent->getOutputImageCount() + 1 ==
1365 m_parent->mParameters.getRefocusOutputCount())) {
1366 m_parent->setOutputImageCount(m_parent->getOutputImageCount() + 1);
1367
1368 jpeg_mem = m_DataMem;
1369 release_data.data = jpeg_mem;
1370 m_DataMem = NULL;
1371 LOGH("[KPI Perf]: send jpeg callback for depthmap ");
1372 rc = sendDataNotify(CAMERA_MSG_COMPRESSED_IMAGE,
1373 jpeg_mem,
1374 0,
1375 NULL,
1376 &release_data,
1377 frame_idx);
1378 if (rc != NO_ERROR) {
1379 // send error msg to upper layer
1380 sendEvtNotify(CAMERA_MSG_ERROR,
1381 UNKNOWN_ERROR,
1382 0);
1383 if (NULL != jpeg_mem) {
1384 jpeg_mem->release(jpeg_mem);
1385 jpeg_mem = NULL;
1386 }
1387 }
1388 m_DataMem = NULL;
1389 }
1390 }
1391
1392 // wait up data proc thread to do next job,
1393 // if previous request is blocked due to ongoing jpeg job
1394 m_dataProcTh.sendCmd(CAMERA_CMD_TYPE_DO_NEXT_JOB, FALSE, FALSE);
1395
1396 m_parent->m_perfLockMgr.releasePerfLock(PERF_LOCK_TAKE_SNAPSHOT);
1397
1398 return rc;
1399 }
1400
1401 /*===========================================================================
1402 * FUNCTION : processPPData
1403 *
1404 * DESCRIPTION: process received frame after reprocess.
1405 *
1406 * PARAMETERS :
1407 * @frame : received frame from reprocess channel.
1408 *
1409 * RETURN : int32_t type of status
1410 * NO_ERROR -- success
1411 * none-zero failure code
1412 *
1413 * NOTE : The frame after reprocess need to send to jpeg encoding.
1414 *==========================================================================*/
processPPData(mm_camera_super_buf_t * frame)1415 int32_t QCameraPostProcessor::processPPData(mm_camera_super_buf_t *frame)
1416 {
1417 bool triggerEvent = TRUE;
1418
1419 LOGD("QCameraPostProcessor::processPPData");
1420 bool needSuperBufMatch = m_parent->mParameters.generateThumbFromMain();
1421 if (m_bInited == FALSE) {
1422 LOGE("postproc not initialized yet");
1423 return UNKNOWN_ERROR;
1424 }
1425
1426 bool needHalPP = m_parent->needHALPP();
1427 LOGH("needHalPP:%d", needHalPP);
1428
1429 qcamera_pp_data_t *job = (qcamera_pp_data_t *)m_ongoingPPQ.dequeue();
1430 if (NULL == job) {
1431 LOGE("Cannot find reprocess job");
1432 return BAD_VALUE;
1433 }
1434
1435 if (!needSuperBufMatch && (job->src_frame == NULL
1436 || job->src_reproc_frame == NULL) ) {
1437 LOGE("Invalid reprocess job");
1438 return BAD_VALUE;
1439 }
1440
1441 if (!needSuperBufMatch && (m_parent->mParameters.isNV16PictureFormat() ||
1442 m_parent->mParameters.isNV21PictureFormat())) {
1443 releaseOngoingPPData(job, this);
1444 free(job);
1445
1446 if(m_parent->mParameters.isYUVFrameInfoNeeded())
1447 setYUVFrameInfo(frame);
1448 return processRawData(frame);
1449 }
1450
1451 #ifdef TARGET_TS_MAKEUP
1452 // find snapshot frame frame
1453 mm_camera_buf_def_t *pReprocFrame = NULL;
1454 QCameraStream * pSnapshotStream = NULL;
1455 QCameraChannel *pChannel = m_parent->getChannelByHandle(frame->ch_id);
1456 if (pChannel == NULL) {
1457 for (int8_t i = 0; i < mPPChannelCount; i++) {
1458 if ((mPPChannels[i] != NULL) &&
1459 (validate_handle(mPPChannels[i]->getMyHandle(), frame->ch_id))) {
1460 pChannel = mPPChannels[i];
1461 break;
1462 }
1463 }
1464 }
1465 if (pChannel == NULL) {
1466 LOGE("No corresponding channel (ch_id = %d) exist, return here",
1467 frame->ch_id);
1468 return BAD_VALUE;
1469 }
1470
1471 for (uint32_t i = 0; i < frame->num_bufs; i++) {
1472 pSnapshotStream = pChannel->getStreamByHandle(frame->bufs[i]->stream_id);
1473 if (pSnapshotStream != NULL) {
1474 if (pSnapshotStream->isOrignalTypeOf(CAM_STREAM_TYPE_SNAPSHOT)) {
1475 pReprocFrame = frame->bufs[i];
1476 break;
1477 }
1478 }
1479 }
1480
1481 if (pReprocFrame != NULL && m_parent->mParameters.isFaceDetectionEnabled()) {
1482 m_parent->TsMakeupProcess_Snapshot(pReprocFrame,pSnapshotStream);
1483 } else {
1484 LOGH("pReprocFrame == NULL || isFaceDetectionEnabled = %d",
1485 m_parent->mParameters.isFaceDetectionEnabled());
1486 }
1487 #endif
1488 int8_t mCurReprocCount = job->reprocCount;
1489 if ((m_parent->isLongshotEnabled()
1490 && (!(m_parent->mParameters.getQuadraCfa())|| (mCurReprocCount == 2)))
1491 && (!m_parent->isCaptureShutterEnabled())
1492 && (!m_parent->mCACDoneReceived)) {
1493 // play shutter sound for longshot
1494 // after reprocess is done
1495 m_parent->playShutter();
1496 }
1497 m_parent->mCACDoneReceived = FALSE;
1498
1499 int8_t mCurChannelIndex = job->ppChannelIndex;
1500 if ( mCurReprocCount > 1 ) {
1501 //In case of pp 2nd pass, we can release input of 2nd pass
1502 releaseSuperBuf(job->src_frame);
1503 free(job->src_frame);
1504 job->src_frame = NULL;
1505 }
1506
1507 LOGD("mCurReprocCount = %d mCurChannelIndex = %d mTotalNumReproc = %d",
1508 mCurReprocCount, mCurChannelIndex,
1509 m_parent->mParameters.getReprocCount());
1510 if (mCurReprocCount < m_parent->mParameters.getReprocCount()) {
1511 //More pp pass needed. Push frame back to pp queue.
1512 qcamera_pp_data_t *pp_request_job = job;
1513 pp_request_job->src_frame = frame;
1514
1515 if ((mPPChannels[mCurChannelIndex]->getReprocCount()
1516 == mCurReprocCount) &&
1517 (mPPChannels[mCurChannelIndex + 1] != NULL)) {
1518 pp_request_job->ppChannelIndex++;
1519 }
1520
1521 // enqueu to post proc input queue
1522 if (false == m_inputPPQ.enqueue((void *)pp_request_job)) {
1523 LOGW("m_input PP Q is not active!!!");
1524 releaseOngoingPPData(pp_request_job,this);
1525 free(pp_request_job);
1526 pp_request_job = NULL;
1527 triggerEvent = FALSE;
1528 }
1529 } else if (m_halPP != NULL && needHalPP) {
1530 qcamera_hal_pp_data_t *hal_pp_job =
1531 (qcamera_hal_pp_data_t*) malloc(sizeof(qcamera_hal_pp_data_t));
1532 if (hal_pp_job == NULL) {
1533 LOGE("No memory for qcamera_hal_pp_data_t data");
1534 return NO_MEMORY;
1535 }
1536 memset(hal_pp_job, 0, sizeof(qcamera_hal_pp_data_t));
1537 hal_pp_job->frame = frame;
1538 hal_pp_job->src_reproc_frame = job ? job->src_reproc_frame : NULL;
1539 hal_pp_job->src_reproc_bufs = job ? job->src_reproc_bufs : NULL;
1540 hal_pp_job->reproc_frame_release = job ? job->reproc_frame_release : false;
1541 hal_pp_job->offline_reproc_buf = job ? job->offline_reproc_buf : NULL;
1542 hal_pp_job->offline_buffer = job ? job->offline_buffer : false;
1543 m_halPP->feedInput(hal_pp_job);
1544 } else {
1545 //Done with post processing. Send frame to Jpeg
1546 qcamera_jpeg_data_t *jpeg_job =
1547 (qcamera_jpeg_data_t *)malloc(sizeof(qcamera_jpeg_data_t));
1548 if (jpeg_job == NULL) {
1549 LOGE("No memory for jpeg job");
1550 return NO_MEMORY;
1551 }
1552
1553 memset(jpeg_job, 0, sizeof(qcamera_jpeg_data_t));
1554 jpeg_job->src_frame = frame;
1555 jpeg_job->src_reproc_frame = job ? job->src_reproc_frame : NULL;
1556 jpeg_job->src_reproc_bufs = job ? job->src_reproc_bufs : NULL;
1557 jpeg_job->reproc_frame_release = job ? job->reproc_frame_release : false;
1558 jpeg_job->offline_reproc_buf = job ? job->offline_reproc_buf : NULL;
1559 jpeg_job->offline_buffer = job ? job->offline_buffer : false;
1560
1561 // find meta data frame
1562 mm_camera_buf_def_t *meta_frame = NULL;
1563 if (m_parent->mParameters.isAdvCamFeaturesEnabled()) {
1564 size_t meta_idx = m_parent->mParameters.getExifBufIndex(m_PPindex);
1565 if (m_InputMetadata.size() >= (meta_idx + 1)) {
1566 meta_frame = m_InputMetadata.itemAt(meta_idx);
1567 } else {
1568 LOGW("Input metadata vector contains %d entries, index required %d",
1569 m_InputMetadata.size(), meta_idx);
1570 }
1571 m_PPindex++;
1572 } else {
1573 for (uint32_t i = 0; job && job->src_reproc_frame &&
1574 (i < job->src_reproc_frame->num_bufs); i++) {
1575 // look through input superbuf
1576 if (job->src_reproc_frame->bufs[i]->stream_type == CAM_STREAM_TYPE_METADATA) {
1577 meta_frame = job->src_reproc_frame->bufs[i];
1578 break;
1579 }
1580 }
1581
1582 if (meta_frame == NULL) {
1583 // look through reprocess superbuf
1584 for (uint32_t i = 0; i < frame->num_bufs; i++) {
1585 if (frame->bufs[i]->stream_type == CAM_STREAM_TYPE_METADATA) {
1586 meta_frame = frame->bufs[i];
1587 break;
1588 }
1589 }
1590 }
1591 }
1592 if (meta_frame != NULL) {
1593 // fill in meta data frame ptr
1594 jpeg_job->metadata = (metadata_buffer_t *)meta_frame->buffer;
1595 }
1596
1597 if (m_parent->mParameters.getQuadraCfa()) {
1598 // find offline metadata frame for quadra CFA
1599 mm_camera_buf_def_t *pOfflineMetaFrame = NULL;
1600 QCameraStream * pOfflineMetadataStream = NULL;
1601 QCameraChannel *pChannel = m_parent->getChannelByHandle(frame->ch_id);
1602 if (pChannel == NULL) {
1603 for (int8_t i = 0; i < mPPChannelCount; i++) {
1604 if ((mPPChannels[i] != NULL) &&
1605 (mPPChannels[i]->getMyHandle() == frame->ch_id)) {
1606 pChannel = mPPChannels[i];
1607 break;
1608 }
1609 }
1610 }
1611 if (pChannel == NULL) {
1612 LOGE("No corresponding channel (ch_id = %d) exist, return here",
1613 frame->ch_id);
1614 return BAD_VALUE;
1615 }
1616
1617 for (uint32_t i = 0; i < frame->num_bufs; i++) {
1618 pOfflineMetadataStream = pChannel->getStreamByHandle(frame->bufs[i]->stream_id);
1619 if (pOfflineMetadataStream != NULL) {
1620 if (pOfflineMetadataStream->isOrignalTypeOf(CAM_STREAM_TYPE_METADATA)) {
1621 pOfflineMetaFrame = frame->bufs[i];
1622 break;
1623 }
1624 }
1625 }
1626 if (pOfflineMetaFrame != NULL) {
1627 // fill in meta data frame ptr
1628 jpeg_job->metadata = (metadata_buffer_t *)pOfflineMetaFrame->buffer;
1629
1630 // Dump offline metadata for Tuning
1631 char value[PROPERTY_VALUE_MAX];
1632 property_get("persist.camera.dumpmetadata", value, "0");
1633 int32_t enabled = atoi(value);
1634 if (enabled && jpeg_job->metadata->is_tuning_params_valid) {
1635 m_parent->dumpMetadataToFile(pOfflineMetadataStream,pOfflineMetaFrame,
1636 (char *)"Offline_isp_meta");
1637 }
1638 }
1639 }
1640
1641 // enqueu reprocessed frame to jpeg input queue
1642 if (false == m_inputJpegQ.enqueue((void *)jpeg_job)) {
1643 LOGW("Input Jpeg Q is not active!!!");
1644 releaseJpegJobData(jpeg_job);
1645 free(jpeg_job);
1646 jpeg_job = NULL;
1647 triggerEvent = FALSE;
1648 }
1649
1650 // free pp job buf
1651 pthread_mutex_lock(&m_reprocess_lock);
1652 if (job) {
1653 free(job);
1654 }
1655 pthread_mutex_unlock(&m_reprocess_lock);
1656 }
1657
1658 LOGD("");
1659 // wait up data proc thread
1660
1661 if (triggerEvent) {
1662 m_dataProcTh.sendCmd(CAMERA_CMD_TYPE_DO_NEXT_JOB, FALSE, FALSE);
1663 }
1664
1665 return NO_ERROR;
1666 }
1667
1668 /*===========================================================================
1669 * FUNCTION : findJpegJobByJobId
1670 *
1671 * DESCRIPTION: find a jpeg job from ongoing Jpeg queue by its job ID
1672 *
1673 * PARAMETERS :
1674 * @jobId : job Id of the job
1675 *
1676 * RETURN : ptr to a jpeg job struct. NULL if not found.
1677 *
1678 * NOTE : Currently only one job is sending to mm-jpeg-interface for jpeg
1679 * encoding. Therefore simply dequeue from the ongoing Jpeg Queue
1680 * will serve the purpose to find the jpeg job.
1681 *==========================================================================*/
findJpegJobByJobId(uint32_t jobId)1682 qcamera_jpeg_data_t *QCameraPostProcessor::findJpegJobByJobId(uint32_t jobId)
1683 {
1684 qcamera_jpeg_data_t * job = NULL;
1685 if (jobId == 0) {
1686 LOGE("not a valid jpeg jobId");
1687 return NULL;
1688 }
1689
1690 // currely only one jpeg job ongoing, so simply dequeue the head
1691 job = (qcamera_jpeg_data_t *)m_ongoingJpegQ.dequeue();
1692 return job;
1693 }
1694
1695 /*===========================================================================
1696 * FUNCTION : releasePPInputData
1697 *
1698 * DESCRIPTION: callback function to release post process input data node
1699 *
1700 * PARAMETERS :
1701 * @data : ptr to post process input data
1702 * @user_data : user data ptr (QCameraReprocessor)
1703 *
1704 * RETURN : None
1705 *==========================================================================*/
releasePPInputData(void * data,void * user_data)1706 void QCameraPostProcessor::releasePPInputData(void *data, void *user_data)
1707 {
1708 QCameraPostProcessor *pme = (QCameraPostProcessor *)user_data;
1709 if (NULL != pme) {
1710 qcamera_pp_request_t *pp_job = (qcamera_pp_request_t *)data;
1711 if (NULL != pp_job->src_frame) {
1712 pme->releaseSuperBuf(pp_job->src_frame);
1713 if (pp_job->src_frame == pp_job->src_reproc_frame)
1714 pp_job->src_reproc_frame = NULL;
1715 free(pp_job->src_frame);
1716 pp_job->src_frame = NULL;
1717 }
1718 if (NULL != pp_job->src_reproc_frame) {
1719 pme->releaseSuperBuf(pp_job->src_reproc_frame);
1720 free(pp_job->src_reproc_frame);
1721 pp_job->src_reproc_frame = NULL;
1722 }
1723 pp_job->reprocCount = 0;
1724 }
1725 }
1726
1727 /*===========================================================================
1728 * FUNCTION : releaseJpegData
1729 *
1730 * DESCRIPTION: callback function to release jpeg job node
1731 *
1732 * PARAMETERS :
1733 * @data : ptr to ongoing jpeg job data
1734 * @user_data : user data ptr (QCameraReprocessor)
1735 *
1736 * RETURN : None
1737 *==========================================================================*/
releaseJpegData(void * data,void * user_data)1738 void QCameraPostProcessor::releaseJpegData(void *data, void *user_data)
1739 {
1740 QCameraPostProcessor *pme = (QCameraPostProcessor *)user_data;
1741 if (NULL != pme) {
1742 pme->releaseJpegJobData((qcamera_jpeg_data_t *)data);
1743 LOGH("Rleased job ID %u",
1744 ((qcamera_jpeg_data_t *)data)->jobId);
1745 }
1746 }
1747
1748 /*===========================================================================
1749 * FUNCTION : releaseOngoingPPData
1750 *
1751 * DESCRIPTION: callback function to release ongoing postprocess job node
1752 *
1753 * PARAMETERS :
1754 * @data : ptr to onging postprocess job
1755 * @user_data : user data ptr (QCameraReprocessor)
1756 *
1757 * RETURN : None
1758 *==========================================================================*/
releaseOngoingPPData(void * data,void * user_data)1759 void QCameraPostProcessor::releaseOngoingPPData(void *data, void *user_data)
1760 {
1761 QCameraPostProcessor *pme = (QCameraPostProcessor *)user_data;
1762 if (NULL != pme) {
1763 qcamera_pp_data_t *pp_job = (qcamera_pp_data_t *)data;
1764 if (NULL != pp_job->src_frame) {
1765 if (!pp_job->reproc_frame_release) {
1766 pme->releaseSuperBuf(pp_job->src_frame);
1767 }
1768 if (pp_job->src_frame == pp_job->src_reproc_frame)
1769 pp_job->src_reproc_frame = NULL;
1770
1771 free(pp_job->src_frame);
1772 pp_job->src_frame = NULL;
1773 }
1774 if (NULL != pp_job->src_reproc_frame) {
1775 pme->releaseSuperBuf(pp_job->src_reproc_frame);
1776 free(pp_job->src_reproc_frame);
1777 pp_job->src_reproc_frame = NULL;
1778 }
1779 if ((pp_job->offline_reproc_buf != NULL)
1780 && (pp_job->offline_buffer)) {
1781 free(pp_job->offline_reproc_buf);
1782 pp_job->offline_buffer = false;
1783 }
1784 pp_job->reprocCount = 0;
1785 }
1786 }
1787
1788 /*===========================================================================
1789 * FUNCTION : releaseNotifyData
1790 *
1791 * DESCRIPTION: function to release internal resources in notify data struct
1792 *
1793 * PARAMETERS :
1794 * @user_data : ptr user data
1795 * @cookie : callback cookie
1796 * @cb_status : callback status
1797 *
1798 * RETURN : None
1799 *
1800 * NOTE : deallocate jpeg heap memory if it's not NULL
1801 *==========================================================================*/
releaseNotifyData(void * user_data,void * cookie,int32_t cb_status)1802 void QCameraPostProcessor::releaseNotifyData(void *user_data,
1803 void *cookie,
1804 int32_t cb_status)
1805 {
1806 LOGD("releaseNotifyData release_data %p", user_data);
1807
1808 qcamera_data_argm_t *app_cb = ( qcamera_data_argm_t * ) user_data;
1809 QCameraPostProcessor *postProc = ( QCameraPostProcessor * ) cookie;
1810 if ( ( NULL != app_cb ) && ( NULL != postProc ) ) {
1811
1812 if ( postProc->mUseSaveProc &&
1813 app_cb->release_data.unlinkFile &&
1814 ( NO_ERROR != cb_status ) ) {
1815
1816 String8 unlinkPath((const char *) app_cb->release_data.data->data,
1817 app_cb->release_data.data->size);
1818 int rc = unlink(unlinkPath.string());
1819 LOGH("Unlinking stored file rc = %d",
1820 rc);
1821 }
1822
1823 if (app_cb && NULL != app_cb->release_data.data) {
1824 app_cb->release_data.data->release(app_cb->release_data.data);
1825 app_cb->release_data.data = NULL;
1826 }
1827 if (app_cb && NULL != app_cb->release_data.frame) {
1828 postProc->releaseSuperBuf(app_cb->release_data.frame);
1829 free(app_cb->release_data.frame);
1830 app_cb->release_data.frame = NULL;
1831 }
1832 if (app_cb && NULL != app_cb->release_data.streamBufs) {
1833 app_cb->release_data.streamBufs->deallocate();
1834 delete app_cb->release_data.streamBufs;
1835 app_cb->release_data.streamBufs = NULL;
1836 }
1837 free(app_cb);
1838 }
1839 }
1840
1841 /*===========================================================================
1842 * FUNCTION : releaseSuperBuf
1843 *
1844 * DESCRIPTION: function to release a superbuf frame by returning back to kernel
1845 *
1846 * PARAMETERS :
1847 * @super_buf : ptr to the superbuf frame
1848 *
1849 * RETURN : None
1850 *==========================================================================*/
releaseSuperBuf(mm_camera_super_buf_t * super_buf)1851 void QCameraPostProcessor::releaseSuperBuf(mm_camera_super_buf_t *super_buf)
1852 {
1853 QCameraChannel *pChannel = NULL;
1854
1855 if (NULL != super_buf) {
1856 pChannel = m_parent->getChannelByHandle(super_buf->ch_id);
1857
1858 if ( NULL == pChannel ) {
1859 for (int8_t i = 0; i < mPPChannelCount; i++) {
1860 if ((mPPChannels[i] != NULL) &&
1861 (validate_handle(mPPChannels[i]->getMyHandle(),
1862 super_buf->ch_id))) {
1863 pChannel = mPPChannels[i];
1864 break;
1865 }
1866 }
1867 }
1868
1869 if (pChannel != NULL) {
1870 pChannel->bufDone(super_buf);
1871 } else {
1872 LOGE("Channel id %d not found!!",
1873 super_buf->ch_id);
1874 }
1875 }
1876 }
1877
1878 /*===========================================================================
1879 * FUNCTION : releaseSuperBuf
1880 *
1881 * DESCRIPTION : function to release a superbuf frame by returning back to kernel
1882 *
1883 * PARAMETERS :
1884 * @super_buf : ptr to the superbuf frame
1885 * @stream_type: Type of stream to be released
1886 *
1887 * RETURN : None
1888 *==========================================================================*/
releaseSuperBuf(mm_camera_super_buf_t * super_buf,cam_stream_type_t stream_type)1889 void QCameraPostProcessor::releaseSuperBuf(mm_camera_super_buf_t *super_buf,
1890 cam_stream_type_t stream_type)
1891 {
1892 QCameraChannel *pChannel = NULL;
1893
1894 if (NULL != super_buf) {
1895 pChannel = m_parent->getChannelByHandle(super_buf->ch_id);
1896 if (pChannel == NULL) {
1897 for (int8_t i = 0; i < mPPChannelCount; i++) {
1898 if ((mPPChannels[i] != NULL) &&
1899 (validate_handle(mPPChannels[i]->getMyHandle(),
1900 super_buf->ch_id))) {
1901 pChannel = mPPChannels[i];
1902 break;
1903 }
1904 }
1905 }
1906
1907 if (pChannel != NULL) {
1908 for (uint32_t i = 0; i < super_buf->num_bufs; i++) {
1909 if (super_buf->bufs[i] != NULL) {
1910 QCameraStream *pStream =
1911 pChannel->getStreamByHandle(super_buf->bufs[i]->stream_id);
1912 if ((pStream != NULL) && ((pStream->getMyType() == stream_type)
1913 || (pStream->getMyOriginalType() == stream_type))) {
1914 pChannel->bufDone(super_buf, super_buf->bufs[i]->stream_id);
1915 break;
1916 }
1917 }
1918 }
1919 } else {
1920 LOGE("Channel id %d not found!!",
1921 super_buf->ch_id);
1922 }
1923 }
1924 }
1925
1926 /*===========================================================================
1927 * FUNCTION : releaseJpegJobData
1928 *
1929 * DESCRIPTION: function to release internal resources in jpeg job struct
1930 *
1931 * PARAMETERS :
1932 * @job : ptr to jpeg job struct
1933 *
1934 * RETURN : None
1935 *
1936 * NOTE : original source frame need to be queued back to kernel for
1937 * future use. Output buf of jpeg job need to be released since
1938 * it's allocated for each job. Exif object need to be deleted.
1939 *==========================================================================*/
releaseJpegJobData(qcamera_jpeg_data_t * job)1940 void QCameraPostProcessor::releaseJpegJobData(qcamera_jpeg_data_t *job)
1941 {
1942 LOGD("E");
1943 if (NULL != job) {
1944 if (NULL != job->src_reproc_frame) {
1945 if (!job->reproc_frame_release) {
1946 releaseSuperBuf(job->src_reproc_frame);
1947 }
1948 free(job->src_reproc_frame);
1949 job->src_reproc_frame = NULL;
1950 }
1951
1952 if (NULL != job->src_frame) {
1953 if (!job->halPPAllocatedBuf) {
1954 releaseSuperBuf(job->src_frame);
1955 } else {
1956 // frame heap buffer was allocated for HAL PP
1957 if (job->hal_pp_bufs) {
1958 free(job->hal_pp_bufs);
1959 job->hal_pp_bufs = NULL;
1960 }
1961 if (job->snapshot_heap) {
1962 job->snapshot_heap->deallocate();
1963 delete job->snapshot_heap;
1964 job->snapshot_heap = NULL;
1965 }
1966 if (job->metadata_heap) {
1967 job->metadata_heap->deallocate();
1968 delete job->metadata_heap;
1969 job->metadata_heap = NULL;
1970 }
1971 }
1972 free(job->src_frame);
1973 job->src_frame = NULL;
1974 }
1975
1976 if (NULL != job->pJpegExifObj) {
1977 delete job->pJpegExifObj;
1978 job->pJpegExifObj = NULL;
1979 }
1980
1981 if (NULL != job->src_reproc_bufs) {
1982 delete [] job->src_reproc_bufs;
1983 }
1984
1985 if ((job->offline_reproc_buf != NULL)
1986 && (job->offline_buffer)) {
1987 free(job->offline_reproc_buf);
1988 job->offline_buffer = false;
1989 }
1990 }
1991 LOGD("X");
1992 }
1993
1994 /*===========================================================================
1995 * FUNCTION : releaseSaveJobData
1996 *
1997 * DESCRIPTION: function to release internal resources in store jobs
1998 *
1999 * PARAMETERS :
2000 * @job : ptr to save job struct
2001 *
2002 * RETURN : None
2003 *
2004 *==========================================================================*/
releaseSaveJobData(void * data,void * user_data)2005 void QCameraPostProcessor::releaseSaveJobData(void *data, void *user_data)
2006 {
2007 LOGD("E");
2008
2009 QCameraPostProcessor *pme = (QCameraPostProcessor *) user_data;
2010 if (NULL == pme) {
2011 LOGE("Invalid postproc handle");
2012 return;
2013 }
2014
2015 qcamera_jpeg_evt_payload_t *job_data = (qcamera_jpeg_evt_payload_t *) data;
2016 if (job_data == NULL) {
2017 LOGE("Invalid jpeg event data");
2018 return;
2019 }
2020
2021 // find job by jobId
2022 qcamera_jpeg_data_t *job = pme->findJpegJobByJobId(job_data->jobId);
2023
2024 if (NULL != job) {
2025 pme->releaseJpegJobData(job);
2026 free(job);
2027 } else {
2028 LOGE("Invalid jpeg job");
2029 }
2030
2031 LOGD("X");
2032 }
2033
2034 /*===========================================================================
2035 * FUNCTION : releaseRawData
2036 *
2037 * DESCRIPTION: function to release internal resources in store jobs
2038 *
2039 * PARAMETERS :
2040 * @job : ptr to save job struct
2041 *
2042 * RETURN : None
2043 *
2044 *==========================================================================*/
releaseRawData(void * data,void * user_data)2045 void QCameraPostProcessor::releaseRawData(void *data, void *user_data)
2046 {
2047 LOGD("E");
2048
2049 QCameraPostProcessor *pme = (QCameraPostProcessor *) user_data;
2050 if (NULL == pme) {
2051 LOGE("Invalid postproc handle");
2052 return;
2053 }
2054 mm_camera_super_buf_t *super_buf = (mm_camera_super_buf_t *) data;
2055 pme->releaseSuperBuf(super_buf);
2056
2057 LOGD("X");
2058 }
2059
2060
2061 /*===========================================================================
2062 * FUNCTION : getColorfmtFromImgFmt
2063 *
2064 * DESCRIPTION: function to return jpeg color format based on its image format
2065 *
2066 * PARAMETERS :
2067 * @img_fmt : image format
2068 *
2069 * RETURN : jpeg color format that can be understandable by omx lib
2070 *==========================================================================*/
getColorfmtFromImgFmt(cam_format_t img_fmt)2071 mm_jpeg_color_format QCameraPostProcessor::getColorfmtFromImgFmt(cam_format_t img_fmt)
2072 {
2073 switch (img_fmt) {
2074 case CAM_FORMAT_YUV_420_NV21:
2075 case CAM_FORMAT_YUV_420_NV21_VENUS:
2076 return MM_JPEG_COLOR_FORMAT_YCRCBLP_H2V2;
2077 case CAM_FORMAT_YUV_420_NV21_ADRENO:
2078 return MM_JPEG_COLOR_FORMAT_YCRCBLP_H2V2;
2079 case CAM_FORMAT_YUV_420_NV12:
2080 case CAM_FORMAT_YUV_420_NV12_VENUS:
2081 return MM_JPEG_COLOR_FORMAT_YCBCRLP_H2V2;
2082 case CAM_FORMAT_YUV_420_YV12:
2083 return MM_JPEG_COLOR_FORMAT_YCBCRLP_H2V2;
2084 case CAM_FORMAT_YUV_422_NV61:
2085 return MM_JPEG_COLOR_FORMAT_YCRCBLP_H2V1;
2086 case CAM_FORMAT_YUV_422_NV16:
2087 return MM_JPEG_COLOR_FORMAT_YCBCRLP_H2V1;
2088 default:
2089 return MM_JPEG_COLOR_FORMAT_YCRCBLP_H2V2;
2090 }
2091 }
2092
2093 /*===========================================================================
2094 * FUNCTION : getJpegImgTypeFromImgFmt
2095 *
2096 * DESCRIPTION: function to return jpeg encode image type based on its image format
2097 *
2098 * PARAMETERS :
2099 * @img_fmt : image format
2100 *
2101 * RETURN : return jpeg source image format (YUV or Bitstream)
2102 *==========================================================================*/
getJpegImgTypeFromImgFmt(cam_format_t img_fmt)2103 mm_jpeg_format_t QCameraPostProcessor::getJpegImgTypeFromImgFmt(cam_format_t img_fmt)
2104 {
2105 switch (img_fmt) {
2106 case CAM_FORMAT_YUV_420_NV21:
2107 case CAM_FORMAT_YUV_420_NV21_ADRENO:
2108 case CAM_FORMAT_YUV_420_NV12:
2109 case CAM_FORMAT_YUV_420_NV12_VENUS:
2110 case CAM_FORMAT_YUV_420_NV21_VENUS:
2111 case CAM_FORMAT_YUV_420_YV12:
2112 case CAM_FORMAT_YUV_422_NV61:
2113 case CAM_FORMAT_YUV_422_NV16:
2114 return MM_JPEG_FMT_YUV;
2115 default:
2116 return MM_JPEG_FMT_YUV;
2117 }
2118 }
2119
2120 /*===========================================================================
2121 * FUNCTION : queryStreams
2122 *
2123 * DESCRIPTION: utility method for retrieving main, thumbnail and reprocess
2124 * streams and frame from bundled super buffer
2125 *
2126 * PARAMETERS :
2127 * @main : ptr to main stream if present
2128 * @thumb : ptr to thumbnail stream if present
2129 * @reproc : ptr to reprocess stream if present
2130 * @main_image : ptr to main image if present
2131 * @thumb_image: ptr to thumbnail image if present
2132 * @frame : bundled super buffer
2133 * @reproc_frame : bundled source frame buffer
2134 *
2135 * RETURN : int32_t type of status
2136 * NO_ERROR -- success
2137 * none-zero failure code
2138 *==========================================================================*/
queryStreams(QCameraStream ** main,QCameraStream ** thumb,QCameraStream ** reproc,mm_camera_buf_def_t ** main_image,mm_camera_buf_def_t ** thumb_image,mm_camera_super_buf_t * frame,mm_camera_super_buf_t * reproc_frame)2139 int32_t QCameraPostProcessor::queryStreams(QCameraStream **main,
2140 QCameraStream **thumb,
2141 QCameraStream **reproc,
2142 mm_camera_buf_def_t **main_image,
2143 mm_camera_buf_def_t **thumb_image,
2144 mm_camera_super_buf_t *frame,
2145 mm_camera_super_buf_t *reproc_frame)
2146 {
2147 if (NULL == frame) {
2148 return NO_INIT;
2149 }
2150
2151 QCameraChannel *pChannel = m_parent->getChannelByHandle(frame->ch_id);
2152 // check reprocess channel if not found
2153 if (pChannel == NULL) {
2154 for (int8_t i = 0; i < mPPChannelCount; i++) {
2155 if ((mPPChannels[i] != NULL) &&
2156 validate_handle(mPPChannels[i]->getMyHandle(), frame->ch_id)) {
2157 pChannel = mPPChannels[i];
2158 break;
2159 }
2160 }
2161 }
2162 if (pChannel == NULL) {
2163 LOGD("No corresponding channel (ch_id = %d) exist, return here",
2164 frame->ch_id);
2165 return BAD_VALUE;
2166 }
2167
2168 // Use snapshot stream to create thumbnail if snapshot and preview
2169 // flip settings doesn't match in ZSL mode.
2170 bool thumb_stream_needed = ((!m_parent->isZSLMode() ||
2171 (m_parent->mParameters.getFlipMode(CAM_STREAM_TYPE_SNAPSHOT) ==
2172 m_parent->mParameters.getFlipMode(CAM_STREAM_TYPE_PREVIEW))) &&
2173 !m_parent->mParameters.generateThumbFromMain());
2174
2175 *main = *thumb = *reproc = NULL;
2176 *main_image = *thumb_image = NULL;
2177 // find snapshot frame and thumnail frame
2178 for (uint32_t i = 0; i < frame->num_bufs; i++) {
2179 QCameraStream *pStream =
2180 pChannel->getStreamByHandle(frame->bufs[i]->stream_id);
2181 if (pStream != NULL) {
2182 if (pStream->isTypeOf(CAM_STREAM_TYPE_SNAPSHOT) ||
2183 pStream->isOrignalTypeOf(CAM_STREAM_TYPE_SNAPSHOT) ||
2184 pStream->isTypeOf(CAM_STREAM_TYPE_VIDEO) ||
2185 pStream->isOrignalTypeOf(CAM_STREAM_TYPE_VIDEO) ||
2186 pStream->isOrignalTypeOf(CAM_STREAM_TYPE_RAW)) {
2187 *main= pStream;
2188 *main_image = frame->bufs[i];
2189 } else if (thumb_stream_needed &&
2190 (pStream->isTypeOf(CAM_STREAM_TYPE_PREVIEW) ||
2191 pStream->isTypeOf(CAM_STREAM_TYPE_POSTVIEW) ||
2192 pStream->isOrignalTypeOf(CAM_STREAM_TYPE_PREVIEW) ||
2193 pStream->isOrignalTypeOf(CAM_STREAM_TYPE_POSTVIEW))) {
2194 *thumb = pStream;
2195 *thumb_image = frame->bufs[i];
2196 }
2197 if (pStream->isTypeOf(CAM_STREAM_TYPE_OFFLINE_PROC) ) {
2198 *reproc = pStream;
2199 }
2200 }
2201 }
2202
2203 if (thumb_stream_needed && *thumb_image == NULL && reproc_frame != NULL) {
2204 QCameraChannel *pSrcReprocChannel = NULL;
2205 pSrcReprocChannel = m_parent->getChannelByHandle(reproc_frame->ch_id);
2206 if (pSrcReprocChannel != NULL) {
2207 // find thumbnail frame
2208 for (uint32_t i = 0; i < reproc_frame->num_bufs; i++) {
2209 QCameraStream *pStream =
2210 pSrcReprocChannel->getStreamByHandle(
2211 reproc_frame->bufs[i]->stream_id);
2212 if (pStream != NULL) {
2213 if (pStream->isTypeOf(CAM_STREAM_TYPE_PREVIEW) ||
2214 pStream->isTypeOf(CAM_STREAM_TYPE_POSTVIEW)) {
2215 *thumb = pStream;
2216 *thumb_image = reproc_frame->bufs[i];
2217 }
2218 }
2219 }
2220 }
2221 }
2222
2223 return NO_ERROR;
2224 }
2225
2226 /*===========================================================================
2227 * FUNCTION : syncStreamParams
2228 *
2229 * DESCRIPTION: Query the runtime parameters of all streams included
2230 * in the main and reprocessed frames
2231 *
2232 * PARAMETERS :
2233 * @frame : Main image super buffer
2234 * @reproc_frame : Image supper buffer that got processed
2235 *
2236 * RETURN : int32_t type of status
2237 * NO_ERROR -- success
2238 * none-zero failure code
2239 *==========================================================================*/
syncStreamParams(mm_camera_super_buf_t * frame,mm_camera_super_buf_t * reproc_frame)2240 int32_t QCameraPostProcessor::syncStreamParams(mm_camera_super_buf_t *frame,
2241 mm_camera_super_buf_t *reproc_frame)
2242 {
2243 QCameraStream *reproc_stream = NULL;
2244 QCameraStream *main_stream = NULL;
2245 QCameraStream *thumb_stream = NULL;
2246 mm_camera_buf_def_t *main_frame = NULL;
2247 mm_camera_buf_def_t *thumb_frame = NULL;
2248 int32_t ret = NO_ERROR;
2249
2250 ret = queryStreams(&main_stream,
2251 &thumb_stream,
2252 &reproc_stream,
2253 &main_frame,
2254 &thumb_frame,
2255 frame,
2256 reproc_frame);
2257 if (NO_ERROR != ret) {
2258 LOGE("Camera streams query from input frames failed %d",
2259 ret);
2260 return ret;
2261 }
2262
2263 if (NULL != main_stream) {
2264 ret = main_stream->syncRuntimeParams();
2265 if (NO_ERROR != ret) {
2266 LOGE("Syncing of main stream runtime parameters failed %d",
2267 ret);
2268 return ret;
2269 }
2270 }
2271
2272 if (NULL != thumb_stream) {
2273 ret = thumb_stream->syncRuntimeParams();
2274 if (NO_ERROR != ret) {
2275 LOGE("Syncing of thumb stream runtime parameters failed %d",
2276 ret);
2277 return ret;
2278 }
2279 }
2280
2281 if ((NULL != reproc_stream) && (reproc_stream != main_stream)) {
2282 ret = reproc_stream->syncRuntimeParams();
2283 if (NO_ERROR != ret) {
2284 LOGE("Syncing of reproc stream runtime parameters failed %d",
2285 ret);
2286 return ret;
2287 }
2288 }
2289
2290 return ret;
2291 }
2292
2293 /*===========================================================================
2294 * FUNCTION : encodeData
2295 *
2296 * DESCRIPTION: function to prepare encoding job information and send to
2297 * mm-jpeg-interface to do the encoding job
2298 *
2299 * PARAMETERS :
2300 * @jpeg_job_data : ptr to a struct saving job related information
2301 * @needNewSess : flag to indicate if a new jpeg encoding session need
2302 * to be created. After creation, this flag will be toggled
2303 *
2304 * RETURN : int32_t type of status
2305 * NO_ERROR -- success
2306 * none-zero failure code
2307 *==========================================================================*/
encodeData(qcamera_jpeg_data_t * jpeg_job_data,uint8_t & needNewSess)2308 int32_t QCameraPostProcessor::encodeData(qcamera_jpeg_data_t *jpeg_job_data,
2309 uint8_t &needNewSess)
2310 {
2311 int32_t ret = NO_ERROR;
2312 mm_jpeg_job_t jpg_job;
2313 uint32_t jobId = 0;
2314 QCameraStream *reproc_stream = NULL;
2315 QCameraStream *main_stream = NULL;
2316 mm_camera_buf_def_t *main_frame = NULL;
2317 QCameraStream *thumb_stream = NULL;
2318 mm_camera_buf_def_t *thumb_frame = NULL;
2319 mm_camera_super_buf_t *recvd_frame = jpeg_job_data->src_frame;
2320 cam_rect_t crop;
2321 cam_stream_parm_buffer_t param;
2322 cam_stream_img_prop_t imgProp;
2323 bool is_halpp_output_buf = jpeg_job_data->halPPAllocatedBuf;
2324
2325 LOGD("E, need new session:%d, is halpp output:%d", needNewSess, is_halpp_output_buf);
2326
2327 if (!is_halpp_output_buf) {
2328 // find channel
2329 QCameraChannel *pChannel = m_parent->getChannelByHandle(recvd_frame->ch_id);
2330 // check reprocess channel if not found
2331 if (pChannel == NULL) {
2332 for (int8_t i = 0; i < mPPChannelCount; i++) {
2333 if ((mPPChannels[i] != NULL) &&
2334 (validate_handle(mPPChannels[i]->getMyHandle(), recvd_frame->ch_id))) {
2335 pChannel = mPPChannels[i];
2336 break;
2337 }
2338 }
2339 }
2340
2341 if (pChannel == NULL) {
2342 LOGE("No corresponding channel (ch_id = %d) exist, return here",
2343 recvd_frame->ch_id);
2344 return BAD_VALUE;
2345 }
2346
2347 ret = queryStreams(&main_stream,
2348 &thumb_stream,
2349 &reproc_stream,
2350 &main_frame,
2351 &thumb_frame,
2352 recvd_frame,
2353 jpeg_job_data->src_reproc_frame);
2354 if (NO_ERROR != ret) {
2355 return ret;
2356 }
2357
2358 if(NULL == main_frame){
2359 LOGE("Main frame is NULL");
2360 return BAD_VALUE;
2361 }
2362
2363 if(NULL == thumb_frame){
2364 LOGD("Thumbnail frame does not exist");
2365 }
2366
2367 QCameraMemory *memObj = (QCameraMemory *)main_frame->mem_info;
2368 if (NULL == memObj) {
2369 LOGE("Memeory Obj of main frame is NULL");
2370 return NO_MEMORY;
2371 }
2372
2373 // dump snapshot frame if enabled
2374 m_parent->dumpFrameToFile(main_stream, main_frame,
2375 QCAMERA_DUMP_FRM_INPUT_JPEG, (char *)"CPP");
2376
2377 // send upperlayer callback for raw image
2378 camera_memory_t *mem = memObj->getMemory(main_frame->buf_idx, false);
2379 if (NULL != m_parent->mDataCb &&
2380 m_parent->msgTypeEnabledWithLock(CAMERA_MSG_RAW_IMAGE) > 0) {
2381 qcamera_callback_argm_t cbArg;
2382 memset(&cbArg, 0, sizeof(qcamera_callback_argm_t));
2383 cbArg.cb_type = QCAMERA_DATA_CALLBACK;
2384 cbArg.msg_type = CAMERA_MSG_RAW_IMAGE;
2385 cbArg.data = mem;
2386 cbArg.index = 0;
2387 // Data callback, set read/write flags
2388 main_frame->cache_flags |= CPU_HAS_READ;
2389 m_parent->m_cbNotifier.notifyCallback(cbArg);
2390 }
2391 if (NULL != m_parent->mNotifyCb &&
2392 m_parent->msgTypeEnabledWithLock(CAMERA_MSG_RAW_IMAGE_NOTIFY) > 0) {
2393 qcamera_callback_argm_t cbArg;
2394 memset(&cbArg, 0, sizeof(qcamera_callback_argm_t));
2395 cbArg.cb_type = QCAMERA_NOTIFY_CALLBACK;
2396 cbArg.msg_type = CAMERA_MSG_RAW_IMAGE_NOTIFY;
2397 cbArg.ext1 = 0;
2398 cbArg.ext2 = 0;
2399 m_parent->m_cbNotifier.notifyCallback(cbArg);
2400 }
2401 } else {
2402 /* we only need to care about src frame here for HAL PP output data */
2403 LOGD("channle id:%d, stream type:%d", jpeg_job_data->src_frame->ch_id,
2404 jpeg_job_data->hal_pp_bufs[0].stream_type);
2405
2406 QCameraChannel *pChannel = getChannelByHandle(jpeg_job_data->src_frame->ch_id);
2407 if (pChannel == NULL) {
2408 LOGE("Cannot find channel");
2409 return -1;
2410 }
2411
2412 // we use the stream info from reproc stream (type 9)
2413 main_stream = pChannel->getStreamByHandle(jpeg_job_data->src_frame->bufs[0]->stream_id);
2414 if (main_stream == NULL) {
2415 LOGE("main_stream is NULL, encode hal PP output failed");
2416 return BAD_VALUE;
2417 }
2418 LOGD("stream type:%d, stream original type:%d", main_stream->getMyType(),
2419 main_stream->getMyOriginalType());
2420
2421 /* currently we use postproc channel stream info */
2422 /* if we need to output different dim of hal pp out, we need modify here */
2423 cam_dimension_t hal_pp_out_dim;
2424 main_stream->getFrameDimension(hal_pp_out_dim);
2425 LOGD("stream dimesion:%dx%d", hal_pp_out_dim.width, hal_pp_out_dim.height);
2426
2427 recvd_frame = jpeg_job_data->src_frame;
2428 main_frame = jpeg_job_data->src_frame->bufs[0];
2429 if(main_frame == NULL){
2430 LOGE("Main frame is NULL");
2431 return BAD_VALUE;
2432 }
2433 // dump snapshot frame if enabled
2434 m_parent->dumpFrameToFile(main_stream, main_frame,
2435 QCAMERA_DUMP_FRM_INPUT_JPEG, (char *)"HALPP");
2436
2437 }
2438
2439
2440 if (mJpegClientHandle <= 0) {
2441 LOGE("Error: bug here, mJpegClientHandle is 0");
2442 return UNKNOWN_ERROR;
2443 }
2444
2445 if ((pJpegSrcStream != NULL) &&
2446 (!validate_handle(pJpegSrcStream->getMyHandle(),
2447 main_stream->getMyHandle()))) {
2448 if (mJpegSessionId) {
2449 mJpegHandle.destroy_session(mJpegSessionId);
2450 mJpegSessionId = 0;
2451 }
2452 pJpegSrcStream = NULL;
2453 mNewJpegSessionNeeded = TRUE;
2454 }
2455 if (needNewSess) {
2456 // create jpeg encoding session
2457 if (mJpegSessionId) {
2458 mJpegHandle.destroy_session(mJpegSessionId);
2459 mJpegSessionId = 0;
2460 }
2461 mm_jpeg_encode_params_t encodeParam;
2462 memset(&encodeParam, 0, sizeof(mm_jpeg_encode_params_t));
2463 if (!is_halpp_output_buf) {
2464 ret = getJpegEncodingConfig(encodeParam, main_stream, thumb_stream);
2465 } else {
2466 LOGH("get jpeg encode config for hal pp output");
2467 ret = getJpegEncodingConfig(encodeParam, main_stream, NULL, jpeg_job_data->src_frame);
2468 }
2469 if (ret != NO_ERROR) {
2470 LOGE("error getting encoding config");
2471 return ret;
2472 }
2473 LOGH("[KPI Perf] : call jpeg create_session");
2474 if (!is_halpp_output_buf) {
2475 ret = mJpegHandle.create_session(mJpegClientHandle, &encodeParam, &mJpegSessionId);
2476 } else {
2477 ret = mJpegHandle.create_session(mJpegClientHandle, &encodeParam, &mJpegSessionIdHalPP);
2478 }
2479 if (ret != NO_ERROR) {
2480 LOGE("error creating a new jpeg encoding session");
2481 return ret;
2482 }
2483 needNewSess = FALSE;
2484 pJpegSrcStream = main_stream;
2485 }
2486
2487 // Fill in new job
2488 memset(&jpg_job, 0, sizeof(mm_jpeg_job_t));
2489 jpg_job.job_type = JPEG_JOB_TYPE_ENCODE;
2490 if (!is_halpp_output_buf) {
2491 jpg_job.encode_job.session_id = mJpegSessionId;
2492 } else {
2493 jpg_job.encode_job.session_id = mJpegSessionIdHalPP;
2494 }
2495 jpg_job.encode_job.src_index = (int32_t)main_frame->buf_idx;
2496 jpg_job.encode_job.dst_index = 0;
2497
2498 if (mJpegMemOpt) {
2499 jpg_job.encode_job.dst_index = jpg_job.encode_job.src_index;
2500 } else if (mUseJpegBurst) {
2501 jpg_job.encode_job.dst_index = -1;
2502 }
2503
2504 LOGD("jpeg session id:%d, reproc frame:%p", jpg_job.encode_job.session_id,
2505 jpeg_job_data->src_reproc_frame);
2506
2507 if (jpg_job.encode_job.session_id == 0) {
2508 LOGE("invalid jpeg session id!");
2509 return UNKNOWN_ERROR;
2510 }
2511
2512 // use src to reproc frame as work buffer; if src buf is not available
2513 // jpeg interface will allocate work buffer
2514 if (jpeg_job_data->src_reproc_frame != NULL) {
2515 int32_t ret = NO_ERROR;
2516 QCameraStream *main_stream = NULL;
2517 mm_camera_buf_def_t *main_frame = NULL;
2518 QCameraStream *thumb_stream = NULL;
2519 mm_camera_buf_def_t *thumb_frame = NULL;
2520 QCameraStream *reproc_stream = NULL;
2521 mm_camera_buf_def_t *workBuf = NULL;
2522 // Call queryStreams to fetch source of reproc frame
2523 ret = queryStreams(&main_stream,
2524 &thumb_stream,
2525 &reproc_stream,
2526 &main_frame,
2527 &thumb_frame,
2528 jpeg_job_data->src_reproc_frame,
2529 NULL);
2530
2531 if ((NO_ERROR == ret) && ((workBuf = main_frame) != NULL)
2532 && !m_parent->isLowPowerMode()) {
2533 camera_memory_t *camWorkMem = NULL;
2534 int workBufIndex = workBuf->buf_idx;
2535 QCameraMemory *workMem = (QCameraMemory *)workBuf->mem_info;
2536 if (workMem != NULL) {
2537 camWorkMem = workMem->getMemory(workBufIndex, false);
2538 }
2539 if (camWorkMem != NULL && workMem != NULL) {
2540 jpg_job.encode_job.work_buf.buf_size = camWorkMem->size;
2541 jpg_job.encode_job.work_buf.buf_vaddr = (uint8_t *)camWorkMem->data;
2542 jpg_job.encode_job.work_buf.fd = workMem->getFd(workBufIndex);
2543 workMem->invalidateCache(workBufIndex);
2544 }
2545 }
2546 }
2547
2548 cam_dimension_t src_dim;
2549 memset(&src_dim, 0, sizeof(cam_dimension_t));
2550 main_stream->getFrameDimension(src_dim);
2551
2552 bool hdr_output_crop = m_parent->mParameters.isHDROutputCropEnabled();
2553 bool img_feature_enabled =
2554 m_parent->mParameters.isUbiFocusEnabled() ||
2555 m_parent->mParameters.isUbiRefocus() ||
2556 m_parent->mParameters.isChromaFlashEnabled() ||
2557 m_parent->mParameters.isOptiZoomEnabled() ||
2558 m_parent->mParameters.isStillMoreEnabled();
2559
2560 LOGH("Crop needed %d", img_feature_enabled);
2561 crop.left = 0;
2562 crop.top = 0;
2563 crop.height = src_dim.height;
2564 crop.width = src_dim.width;
2565
2566 param = main_stream->getOutputCrop();
2567 for (int i = 0; i < param.outputCrop.num_of_streams; i++) {
2568 if (param.outputCrop.crop_info[i].stream_id
2569 == main_stream->getMyServerID()) {
2570 crop = param.outputCrop.crop_info[i].crop;
2571 main_stream->setCropInfo(crop);
2572 }
2573 }
2574 if (img_feature_enabled) {
2575 memset(¶m, 0, sizeof(cam_stream_parm_buffer_t));
2576
2577 param = main_stream->getImgProp();
2578 imgProp = param.imgProp;
2579 main_stream->setCropInfo(imgProp.crop);
2580 crop = imgProp.crop;
2581 thumb_stream = NULL; /* use thumbnail from main image */
2582
2583 if ((reproc_stream != NULL) && (m_DataMem == NULL) &&
2584 m_parent->mParameters.isUbiRefocus()) {
2585
2586 QCameraHeapMemory* miscBufHandler = reproc_stream->getMiscBuf();
2587 cam_misc_buf_t* refocusResult =
2588 reinterpret_cast<cam_misc_buf_t *>(miscBufHandler->getPtr(0));
2589 uint32_t resultSize = refocusResult->header_size +
2590 refocusResult->width * refocusResult->height;
2591 camera_memory_t *dataMem = m_parent->mGetMemory(-1, resultSize,
2592 1, m_parent->mCallbackCookie);
2593
2594 LOGH("Refocus result header %u dims %dx%d",
2595 resultSize, refocusResult->width, refocusResult->height);
2596
2597 if (dataMem && dataMem->data) {
2598 memcpy(dataMem->data, refocusResult->data, resultSize);
2599 //save mem pointer for depth map
2600 m_DataMem = dataMem;
2601 }
2602 }
2603 } else if ((reproc_stream != NULL) && (m_parent->mParameters.isTruePortraitEnabled())) {
2604
2605 QCameraHeapMemory* miscBufHandler = reproc_stream->getMiscBuf();
2606 cam_misc_buf_t* tpResult =
2607 reinterpret_cast<cam_misc_buf_t *>(miscBufHandler->getPtr(0));
2608 uint32_t tpMetaSize = tpResult->header_size + tpResult->width * tpResult->height;
2609
2610 LOGH("True portrait result header %d% dims dx%d",
2611 tpMetaSize, tpResult->width, tpResult->height);
2612
2613 CAM_DUMP_TO_FILE(QCAMERA_DUMP_FRM_LOCATION"tp", "bm", -1, "y",
2614 &tpResult->data, tpMetaSize);
2615 }
2616
2617 cam_dimension_t dst_dim;
2618
2619 if (hdr_output_crop && crop.height) {
2620 dst_dim.height = crop.height;
2621 } else {
2622 dst_dim.height = src_dim.height;
2623 }
2624 if (hdr_output_crop && crop.width) {
2625 dst_dim.width = crop.width;
2626 } else {
2627 dst_dim.width = src_dim.width;
2628 }
2629
2630 // main dim
2631 jpg_job.encode_job.main_dim.src_dim = src_dim;
2632 jpg_job.encode_job.main_dim.dst_dim = dst_dim;
2633 jpg_job.encode_job.main_dim.crop = crop;
2634
2635 // get 3a sw version info
2636 cam_q3a_version_t sw_version =
2637 m_parent->getCamHalCapabilities()->q3a_version;
2638
2639 // get exif data
2640 QCameraExif *pJpegExifObj = m_parent->getExifData();
2641 jpeg_job_data->pJpegExifObj = pJpegExifObj;
2642 if (pJpegExifObj != NULL) {
2643 jpg_job.encode_job.exif_info.exif_data = pJpegExifObj->getEntries();
2644 jpg_job.encode_job.exif_info.numOfEntries =
2645 pJpegExifObj->getNumOfEntries();
2646 jpg_job.encode_job.exif_info.debug_data.sw_3a_version[0] =
2647 sw_version.major_version;
2648 jpg_job.encode_job.exif_info.debug_data.sw_3a_version[1] =
2649 sw_version.minor_version;
2650 jpg_job.encode_job.exif_info.debug_data.sw_3a_version[2] =
2651 sw_version.patch_version;
2652 jpg_job.encode_job.exif_info.debug_data.sw_3a_version[3] =
2653 sw_version.new_feature_des;
2654 }
2655
2656 // set rotation only when no online rotation or offline pp rotation is done before
2657 uint32_t jpeg_rotation = m_parent->mParameters.getJpegRotation();
2658 if (!m_parent->needRotationReprocess()) {
2659 jpg_job.encode_job.rotation = jpeg_rotation;
2660 }
2661 LOGH("jpeg rotation is set to %d", jpg_job.encode_job.rotation);
2662
2663 // thumbnail dim
2664 if (m_bThumbnailNeeded == TRUE) {
2665 m_parent->getThumbnailSize(jpg_job.encode_job.thumb_dim.dst_dim);
2666
2667 if (thumb_stream == NULL) {
2668 // need jpeg thumbnail, but no postview/preview stream exists
2669 // we use the main stream/frame to encode thumbnail
2670 thumb_stream = main_stream;
2671 thumb_frame = main_frame;
2672 }
2673 if (m_parent->needRotationReprocess() &&
2674 ((90 == jpeg_rotation) || (270 == jpeg_rotation))) {
2675 // swap thumbnail dimensions
2676 cam_dimension_t tmp_dim = jpg_job.encode_job.thumb_dim.dst_dim;
2677 jpg_job.encode_job.thumb_dim.dst_dim.width = tmp_dim.height;
2678 jpg_job.encode_job.thumb_dim.dst_dim.height = tmp_dim.width;
2679 }
2680
2681 memset(&src_dim, 0, sizeof(cam_dimension_t));
2682 thumb_stream->getFrameDimension(src_dim);
2683 jpg_job.encode_job.thumb_dim.src_dim = src_dim;
2684
2685 // crop is the same if frame is the same
2686 if (thumb_frame != main_frame) {
2687 crop.left = 0;
2688 crop.top = 0;
2689 crop.height = src_dim.height;
2690 crop.width = src_dim.width;
2691
2692 param = thumb_stream->getOutputCrop();
2693 for (int i = 0; i < param.outputCrop.num_of_streams; i++) {
2694 if (param.outputCrop.crop_info[i].stream_id
2695 == thumb_stream->getMyServerID()) {
2696 crop = param.outputCrop.crop_info[i].crop;
2697 thumb_stream->setCropInfo(crop);
2698 }
2699 }
2700 }
2701
2702
2703 jpg_job.encode_job.thumb_dim.crop = crop;
2704 if (thumb_frame != NULL) {
2705 jpg_job.encode_job.thumb_index = thumb_frame->buf_idx;
2706 }
2707 LOGI("Thumbnail idx = %d src w/h (%dx%d), dst w/h (%dx%d)",
2708 jpg_job.encode_job.thumb_index,
2709 jpg_job.encode_job.thumb_dim.src_dim.width,
2710 jpg_job.encode_job.thumb_dim.src_dim.height,
2711 jpg_job.encode_job.thumb_dim.dst_dim.width,
2712 jpg_job.encode_job.thumb_dim.dst_dim.height);
2713 }
2714
2715 LOGI("Main image idx = %d src w/h (%dx%d), dst w/h (%dx%d)",
2716 jpg_job.encode_job.src_index,
2717 jpg_job.encode_job.main_dim.src_dim.width,
2718 jpg_job.encode_job.main_dim.src_dim.height,
2719 jpg_job.encode_job.main_dim.dst_dim.width,
2720 jpg_job.encode_job.main_dim.dst_dim.height);
2721
2722 if (thumb_frame != NULL) {
2723 // dump thumbnail frame if enabled
2724 m_parent->dumpFrameToFile(thumb_stream, thumb_frame, QCAMERA_DUMP_FRM_THUMBNAIL);
2725 }
2726
2727 if (jpeg_job_data->metadata != NULL) {
2728 // fill in meta data frame ptr
2729 jpg_job.encode_job.p_metadata = jpeg_job_data->metadata;
2730 }
2731
2732 jpg_job.encode_job.hal_version = CAM_HAL_V1;
2733 m_parent->mExifParams.sensor_params.sens_type = m_parent->getSensorType();
2734 jpg_job.encode_job.cam_exif_params = m_parent->mExifParams;
2735 jpg_job.encode_job.cam_exif_params.debug_params =
2736 (mm_jpeg_debug_exif_params_t *) malloc (sizeof(mm_jpeg_debug_exif_params_t));
2737 if (!jpg_job.encode_job.cam_exif_params.debug_params) {
2738 LOGE("Out of Memory. Allocation failed for 3A debug exif params");
2739 return NO_MEMORY;
2740 }
2741
2742 jpg_job.encode_job.mobicat_mask = m_parent->mParameters.getMobicatMask();
2743
2744
2745 if (NULL != jpg_job.encode_job.p_metadata && (jpg_job.encode_job.mobicat_mask > 0)) {
2746
2747 if (m_parent->mExifParams.debug_params) {
2748 memcpy(jpg_job.encode_job.cam_exif_params.debug_params,
2749 m_parent->mExifParams.debug_params, (sizeof(mm_jpeg_debug_exif_params_t)));
2750
2751 /* Save a copy of mobicat params */
2752 jpg_job.encode_job.p_metadata->is_mobicat_aec_params_valid =
2753 jpg_job.encode_job.cam_exif_params.cam_3a_params_valid;
2754
2755 if (jpg_job.encode_job.cam_exif_params.cam_3a_params_valid) {
2756 jpg_job.encode_job.p_metadata->mobicat_aec_params =
2757 jpg_job.encode_job.cam_exif_params.cam_3a_params;
2758 }
2759
2760 /* Save a copy of 3A debug params */
2761 jpg_job.encode_job.p_metadata->is_statsdebug_ae_params_valid =
2762 jpg_job.encode_job.cam_exif_params.debug_params->ae_debug_params_valid;
2763 jpg_job.encode_job.p_metadata->is_statsdebug_awb_params_valid =
2764 jpg_job.encode_job.cam_exif_params.debug_params->awb_debug_params_valid;
2765 jpg_job.encode_job.p_metadata->is_statsdebug_af_params_valid =
2766 jpg_job.encode_job.cam_exif_params.debug_params->af_debug_params_valid;
2767 jpg_job.encode_job.p_metadata->is_statsdebug_asd_params_valid =
2768 jpg_job.encode_job.cam_exif_params.debug_params->asd_debug_params_valid;
2769 jpg_job.encode_job.p_metadata->is_statsdebug_stats_params_valid =
2770 jpg_job.encode_job.cam_exif_params.debug_params->stats_debug_params_valid;
2771 jpg_job.encode_job.p_metadata->is_statsdebug_bestats_params_valid =
2772 jpg_job.encode_job.cam_exif_params.debug_params->bestats_debug_params_valid;
2773 jpg_job.encode_job.p_metadata->is_statsdebug_bhist_params_valid =
2774 jpg_job.encode_job.cam_exif_params.debug_params->bhist_debug_params_valid;
2775 jpg_job.encode_job.p_metadata->is_statsdebug_3a_tuning_params_valid =
2776 jpg_job.encode_job.cam_exif_params.debug_params->q3a_tuning_debug_params_valid;
2777
2778 if (jpg_job.encode_job.cam_exif_params.debug_params->ae_debug_params_valid) {
2779 jpg_job.encode_job.p_metadata->statsdebug_ae_data =
2780 jpg_job.encode_job.cam_exif_params.debug_params->ae_debug_params;
2781 }
2782 if (jpg_job.encode_job.cam_exif_params.debug_params->awb_debug_params_valid) {
2783 jpg_job.encode_job.p_metadata->statsdebug_awb_data =
2784 jpg_job.encode_job.cam_exif_params.debug_params->awb_debug_params;
2785 }
2786 if (jpg_job.encode_job.cam_exif_params.debug_params->af_debug_params_valid) {
2787 jpg_job.encode_job.p_metadata->statsdebug_af_data =
2788 jpg_job.encode_job.cam_exif_params.debug_params->af_debug_params;
2789 }
2790 if (jpg_job.encode_job.cam_exif_params.debug_params->asd_debug_params_valid) {
2791 jpg_job.encode_job.p_metadata->statsdebug_asd_data =
2792 jpg_job.encode_job.cam_exif_params.debug_params->asd_debug_params;
2793 }
2794 if (jpg_job.encode_job.cam_exif_params.debug_params->stats_debug_params_valid) {
2795 jpg_job.encode_job.p_metadata->statsdebug_stats_buffer_data =
2796 jpg_job.encode_job.cam_exif_params.debug_params->stats_debug_params;
2797 }
2798 if (jpg_job.encode_job.cam_exif_params.debug_params->bestats_debug_params_valid) {
2799 jpg_job.encode_job.p_metadata->statsdebug_bestats_buffer_data =
2800 jpg_job.encode_job.cam_exif_params.debug_params->bestats_debug_params;
2801 }
2802 if (jpg_job.encode_job.cam_exif_params.debug_params->bhist_debug_params_valid) {
2803 jpg_job.encode_job.p_metadata->statsdebug_bhist_data =
2804 jpg_job.encode_job.cam_exif_params.debug_params->bhist_debug_params;
2805 }
2806 if (jpg_job.encode_job.cam_exif_params.debug_params->q3a_tuning_debug_params_valid) {
2807 jpg_job.encode_job.p_metadata->statsdebug_3a_tuning_data =
2808 jpg_job.encode_job.cam_exif_params.debug_params->q3a_tuning_debug_params;
2809 }
2810 }
2811
2812 }
2813
2814 /* Init the QTable */
2815 for (int i = 0; i < QTABLE_MAX; i++) {
2816 jpg_job.encode_job.qtable_set[i] = 0;
2817 }
2818
2819 const cam_sync_related_sensors_event_info_t* related_cam_info =
2820 m_parent->getRelatedCamSyncInfo();
2821 if (related_cam_info->sync_control == CAM_SYNC_RELATED_SENSORS_ON &&
2822 m_parent->getMpoComposition()) {
2823 jpg_job.encode_job.multi_image_info.type = MM_JPEG_TYPE_MPO;
2824 if (related_cam_info->type == CAM_TYPE_MAIN ) {
2825 jpg_job.encode_job.multi_image_info.is_primary = TRUE;
2826 LOGD("Encoding MPO Primary JPEG");
2827 } else {
2828 jpg_job.encode_job.multi_image_info.is_primary = FALSE;
2829 LOGD("Encoding MPO Aux JPEG");
2830 }
2831 jpg_job.encode_job.multi_image_info.num_of_images = 2;
2832 } else {
2833 LOGD("Encoding Single JPEG");
2834 jpg_job.encode_job.multi_image_info.type = MM_JPEG_TYPE_JPEG;
2835 jpg_job.encode_job.multi_image_info.is_primary = FALSE;
2836 jpg_job.encode_job.multi_image_info.num_of_images = 1;
2837 }
2838 //Do Cache ops before sending to encode
2839 if (main_frame != NULL) {
2840 main_stream->handleCacheOps(main_frame);
2841 }
2842 if ((thumb_stream != NULL) && (thumb_frame != NULL)) {
2843 thumb_stream->handleCacheOps(thumb_frame);
2844 }
2845
2846 LOGI("[KPI Perf] : PROFILE_JPEG_JOB_START");
2847 ret = mJpegHandle.start_job(&jpg_job, &jobId);
2848 if (jpg_job.encode_job.cam_exif_params.debug_params) {
2849 free(jpg_job.encode_job.cam_exif_params.debug_params);
2850 }
2851 if (ret == NO_ERROR) {
2852 // remember job info
2853 jpeg_job_data->jobId = jobId;
2854 }
2855
2856 return ret;
2857 }
2858
2859 /*===========================================================================
2860 * FUNCTION : processRawImageImpl
2861 *
2862 * DESCRIPTION: function to send raw image to upper layer
2863 *
2864 * PARAMETERS :
2865 * @recvd_frame : frame to be encoded
2866 *
2867 * RETURN : int32_t type of status
2868 * NO_ERROR -- success
2869 * none-zero failure code
2870 *==========================================================================*/
processRawImageImpl(mm_camera_super_buf_t * recvd_frame)2871 int32_t QCameraPostProcessor::processRawImageImpl(mm_camera_super_buf_t *recvd_frame)
2872 {
2873 int32_t rc = NO_ERROR;
2874
2875 QCameraChannel *pChannel = m_parent->getChannelByHandle(recvd_frame->ch_id);
2876 QCameraStream *pStream = NULL;
2877 mm_camera_buf_def_t *frame = NULL;
2878 // check reprocess channel if not found
2879 if (pChannel == NULL) {
2880 for (int8_t i = 0; i < mPPChannelCount; i++) {
2881 if ((mPPChannels[i] != NULL) &&
2882 (validate_handle(mPPChannels[i]->getMyHandle(), recvd_frame->ch_id))) {
2883 pChannel = mPPChannels[i];
2884 break;
2885 }
2886 }
2887 }
2888 if (pChannel == NULL) {
2889 LOGE("No corresponding channel (ch_id = %d) exist, return here",
2890 recvd_frame->ch_id);
2891 return BAD_VALUE;
2892 }
2893
2894 // find snapshot frame
2895 for (uint32_t i = 0; i < recvd_frame->num_bufs; i++) {
2896 QCameraStream *pCurStream =
2897 pChannel->getStreamByHandle(recvd_frame->bufs[i]->stream_id);
2898 if (pCurStream != NULL) {
2899 if (pCurStream->isTypeOf(CAM_STREAM_TYPE_SNAPSHOT) ||
2900 pCurStream->isTypeOf(CAM_STREAM_TYPE_RAW) ||
2901 pCurStream->isOrignalTypeOf(CAM_STREAM_TYPE_SNAPSHOT) ||
2902 pCurStream->isOrignalTypeOf(CAM_STREAM_TYPE_RAW)) {
2903 pStream = pCurStream;
2904 frame = recvd_frame->bufs[i];
2905 break;
2906 }
2907 }
2908 }
2909
2910 if ( NULL == frame ) {
2911 LOGE("No valid raw buffer");
2912 return BAD_VALUE;
2913 }
2914
2915 QCameraMemory *rawMemObj = (QCameraMemory *)frame->mem_info;
2916 bool zslChannelUsed = m_parent->isZSLMode() &&
2917 ( pChannel != mPPChannels[0] );
2918 camera_memory_t *raw_mem = NULL;
2919
2920 if (rawMemObj != NULL) {
2921 if (zslChannelUsed) {
2922 raw_mem = rawMemObj->getMemory(frame->buf_idx, false);
2923 } else {
2924 raw_mem = m_parent->mGetMemory(-1,
2925 frame->frame_len,
2926 1,
2927 m_parent->mCallbackCookie);
2928 if (NULL == raw_mem) {
2929 LOGE("Not enough memory for RAW cb ");
2930 return NO_MEMORY;
2931 }
2932 memcpy(raw_mem->data, frame->buffer, frame->frame_len);
2933 }
2934 }
2935
2936 if (NULL != rawMemObj && NULL != raw_mem) {
2937 // dump frame into file
2938 if (frame->stream_type == CAM_STREAM_TYPE_SNAPSHOT ||
2939 pStream->isOrignalTypeOf(CAM_STREAM_TYPE_SNAPSHOT)) {
2940 // for YUV422 NV16 case
2941 m_parent->dumpFrameToFile(pStream, frame, QCAMERA_DUMP_FRM_INPUT_JPEG);
2942 } else {
2943 //Received RAW snapshot taken notification
2944 m_parent->dumpFrameToFile(pStream, frame, QCAMERA_DUMP_FRM_RAW);
2945
2946 if(true == m_parent->m_bIntRawEvtPending) {
2947 //Sending RAW snapshot taken notification to HAL
2948 memset(&m_dst_dim, 0, sizeof(m_dst_dim));
2949 pStream->getFrameDimension(m_dst_dim);
2950 pthread_mutex_lock(&m_parent->m_int_lock);
2951 pthread_cond_signal(&m_parent->m_int_cond);
2952 pthread_mutex_unlock(&m_parent->m_int_lock);
2953 raw_mem->release(raw_mem);
2954 return rc;
2955 }
2956 }
2957
2958 // send data callback / notify for RAW_IMAGE
2959 if (NULL != m_parent->mDataCb &&
2960 m_parent->msgTypeEnabledWithLock(CAMERA_MSG_RAW_IMAGE) > 0) {
2961 qcamera_callback_argm_t cbArg;
2962 memset(&cbArg, 0, sizeof(qcamera_callback_argm_t));
2963 cbArg.cb_type = QCAMERA_DATA_CALLBACK;
2964 cbArg.msg_type = CAMERA_MSG_RAW_IMAGE;
2965 cbArg.data = raw_mem;
2966 cbArg.index = 0;
2967 frame->cache_flags |= CPU_HAS_READ;
2968 m_parent->m_cbNotifier.notifyCallback(cbArg);
2969 }
2970 if (NULL != m_parent->mNotifyCb &&
2971 m_parent->msgTypeEnabledWithLock(CAMERA_MSG_RAW_IMAGE_NOTIFY) > 0) {
2972 qcamera_callback_argm_t cbArg;
2973 memset(&cbArg, 0, sizeof(qcamera_callback_argm_t));
2974 cbArg.cb_type = QCAMERA_NOTIFY_CALLBACK;
2975 cbArg.msg_type = CAMERA_MSG_RAW_IMAGE_NOTIFY;
2976 cbArg.ext1 = 0;
2977 cbArg.ext2 = 0;
2978 frame->cache_flags |= CPU_HAS_READ;
2979 m_parent->m_cbNotifier.notifyCallback(cbArg);
2980 }
2981
2982 if ((m_parent->mDataCb != NULL) &&
2983 m_parent->msgTypeEnabledWithLock(CAMERA_MSG_COMPRESSED_IMAGE) > 0) {
2984 qcamera_release_data_t release_data;
2985 memset(&release_data, 0, sizeof(qcamera_release_data_t));
2986 if ( zslChannelUsed ) {
2987 release_data.frame = recvd_frame;
2988 } else {
2989 release_data.data = raw_mem;
2990 }
2991 rc = sendDataNotify(CAMERA_MSG_COMPRESSED_IMAGE,
2992 raw_mem,
2993 0,
2994 NULL,
2995 &release_data);
2996 } else {
2997 raw_mem->release(raw_mem);
2998 }
2999 } else {
3000 LOGE("Cannot get raw mem");
3001 rc = UNKNOWN_ERROR;
3002 }
3003
3004 return rc;
3005 }
3006
3007 /*===========================================================================
3008 * FUNCTION : dataSaveRoutine
3009 *
3010 * DESCRIPTION: data saving routine
3011 *
3012 * PARAMETERS :
3013 * @data : user data ptr (QCameraPostProcessor)
3014 *
3015 * RETURN : None
3016 *==========================================================================*/
dataSaveRoutine(void * data)3017 void *QCameraPostProcessor::dataSaveRoutine(void *data)
3018 {
3019 int running = 1;
3020 int ret;
3021 uint8_t is_active = FALSE;
3022 QCameraPostProcessor *pme = (QCameraPostProcessor *)data;
3023 QCameraCmdThread *cmdThread = &pme->m_saveProcTh;
3024 cmdThread->setName("CAM_JpegSave");
3025 char saveName[PROPERTY_VALUE_MAX];
3026
3027 LOGH("E");
3028 do {
3029 do {
3030 ret = cam_sem_wait(&cmdThread->cmd_sem);
3031 if (ret != 0 && errno != EINVAL) {
3032 LOGE("cam_sem_wait error (%s)",
3033 strerror(errno));
3034 return NULL;
3035 }
3036 } while (ret != 0);
3037
3038 // we got notified about new cmd avail in cmd queue
3039 camera_cmd_type_t cmd = cmdThread->getCmd();
3040 switch (cmd) {
3041 case CAMERA_CMD_TYPE_START_DATA_PROC:
3042 LOGH("start data proc");
3043 is_active = TRUE;
3044 pme->m_inputSaveQ.init();
3045 break;
3046 case CAMERA_CMD_TYPE_STOP_DATA_PROC:
3047 {
3048 LOGH("stop data proc");
3049 is_active = FALSE;
3050
3051 // flush input save Queue
3052 pme->m_inputSaveQ.flush();
3053
3054 // signal cmd is completed
3055 cam_sem_post(&cmdThread->sync_sem);
3056 }
3057 break;
3058 case CAMERA_CMD_TYPE_DO_NEXT_JOB:
3059 {
3060 LOGH("Do next job, active is %d", is_active);
3061
3062 qcamera_jpeg_evt_payload_t *job_data = (qcamera_jpeg_evt_payload_t *) pme->m_inputSaveQ.dequeue();
3063 if (job_data == NULL) {
3064 LOGE("Invalid jpeg event data");
3065 continue;
3066 }
3067 //qcamera_jpeg_data_t *jpeg_job =
3068 // (qcamera_jpeg_data_t *)pme->m_ongoingJpegQ.dequeue(false);
3069 //uint32_t frame_idx = jpeg_job->src_frame->bufs[0]->frame_idx;
3070 uint32_t frame_idx = 75;
3071
3072 pme->m_ongoingJpegQ.flushNodes(matchJobId, (void*)&job_data->jobId);
3073
3074 LOGH("[KPI Perf] : jpeg job %d", job_data->jobId);
3075
3076 if (is_active == TRUE) {
3077 memset(saveName, '\0', sizeof(saveName));
3078 snprintf(saveName,
3079 sizeof(saveName),
3080 QCameraPostProcessor::STORE_LOCATION,
3081 pme->mSaveFrmCnt);
3082
3083 int file_fd = open(saveName, O_RDWR | O_CREAT, 0655);
3084 if (file_fd >= 0) {
3085 ssize_t written_len = write(file_fd, job_data->out_data.buf_vaddr,
3086 job_data->out_data.buf_filled_len);
3087 if ((ssize_t)job_data->out_data.buf_filled_len != written_len) {
3088 LOGE("Failed save complete data %d bytes "
3089 "written instead of %d bytes!",
3090 written_len,
3091 job_data->out_data.buf_filled_len);
3092 } else {
3093 LOGH("written number of bytes %d\n",
3094 written_len);
3095 }
3096
3097 close(file_fd);
3098 } else {
3099 LOGE("fail t open file for saving");
3100 }
3101 pme->mSaveFrmCnt++;
3102
3103 camera_memory_t* jpeg_mem = pme->m_parent->mGetMemory(-1,
3104 strlen(saveName),
3105 1,
3106 pme->m_parent->mCallbackCookie);
3107 if (NULL == jpeg_mem) {
3108 ret = NO_MEMORY;
3109 LOGE("getMemory for jpeg, ret = NO_MEMORY");
3110 goto end;
3111 }
3112 memcpy(jpeg_mem->data, saveName, strlen(saveName));
3113
3114 LOGH("Calling upperlayer callback to store JPEG image");
3115 qcamera_release_data_t release_data;
3116 memset(&release_data, 0, sizeof(qcamera_release_data_t));
3117 release_data.data = jpeg_mem;
3118 release_data.unlinkFile = true;
3119 LOGI("[KPI Perf]: PROFILE_JPEG_CB ");
3120 ret = pme->sendDataNotify(CAMERA_MSG_COMPRESSED_IMAGE,
3121 jpeg_mem,
3122 0,
3123 NULL,
3124 &release_data,
3125 frame_idx);
3126 }
3127
3128 end:
3129 free(job_data);
3130 }
3131 break;
3132 case CAMERA_CMD_TYPE_EXIT:
3133 LOGH("save thread exit");
3134 running = 0;
3135 break;
3136 default:
3137 break;
3138 }
3139 } while (running);
3140 LOGH("X");
3141 return NULL;
3142 }
3143
3144 /*===========================================================================
3145 * FUNCTION : dataProcessRoutine
3146 *
3147 * DESCRIPTION: data process routine that handles input data either from input
3148 * Jpeg Queue to do jpeg encoding, or from input PP Queue to do
3149 * reprocess.
3150 *
3151 * PARAMETERS :
3152 * @data : user data ptr (QCameraPostProcessor)
3153 *
3154 * RETURN : None
3155 *==========================================================================*/
dataProcessRoutine(void * data)3156 void *QCameraPostProcessor::dataProcessRoutine(void *data)
3157 {
3158 int running = 1;
3159 int ret;
3160 uint8_t is_active = FALSE;
3161 QCameraPostProcessor *pme = (QCameraPostProcessor *)data;
3162 QCameraCmdThread *cmdThread = &pme->m_dataProcTh;
3163 cmdThread->setName("CAM_DataProc");
3164
3165 LOGH("E");
3166 do {
3167 do {
3168 ret = cam_sem_wait(&cmdThread->cmd_sem);
3169 if (ret != 0 && errno != EINVAL) {
3170 LOGE("cam_sem_wait error (%s)",
3171 strerror(errno));
3172 return NULL;
3173 }
3174 } while (ret != 0);
3175
3176 // we got notified about new cmd avail in cmd queue
3177 camera_cmd_type_t cmd = cmdThread->getCmd();
3178 switch (cmd) {
3179 case CAMERA_CMD_TYPE_START_DATA_PROC:
3180 LOGH("start data proc");
3181 is_active = TRUE;
3182
3183 pme->m_ongoingPPQ.init();
3184 pme->m_inputJpegQ.init();
3185 pme->m_inputPPQ.init();
3186 pme->m_inputRawQ.init();
3187 if (pme->m_halPP != NULL) {
3188 pme->m_halPP->initQ();
3189 }
3190 pme->m_saveProcTh.sendCmd(CAMERA_CMD_TYPE_START_DATA_PROC,
3191 FALSE,
3192 FALSE);
3193
3194 // signal cmd is completed
3195 cam_sem_post(&cmdThread->sync_sem);
3196
3197 break;
3198 case CAMERA_CMD_TYPE_STOP_DATA_PROC:
3199 {
3200 LOGH("stop data proc");
3201 is_active = FALSE;
3202
3203 pme->m_saveProcTh.sendCmd(CAMERA_CMD_TYPE_STOP_DATA_PROC,
3204 TRUE,
3205 TRUE);
3206 // cancel all ongoing jpeg jobs
3207 qcamera_jpeg_data_t *jpeg_job =
3208 (qcamera_jpeg_data_t *)pme->m_ongoingJpegQ.dequeue();
3209 while (jpeg_job != NULL) {
3210 pme->mJpegHandle.abort_job(jpeg_job->jobId);
3211
3212 pme->releaseJpegJobData(jpeg_job);
3213 free(jpeg_job);
3214
3215 jpeg_job = (qcamera_jpeg_data_t *)pme->m_ongoingJpegQ.dequeue();
3216 }
3217
3218 // destroy jpeg encoding session
3219 if ( 0 < pme->mJpegSessionId ) {
3220 pme->mJpegHandle.destroy_session(pme->mJpegSessionId);
3221 pme->mJpegSessionId = 0;
3222 }
3223
3224 // free jpeg out buf and exif obj
3225 {FREE_JPEG_OUTPUT_BUFFER(pme->m_pJpegOutputMem,
3226 pme->m_JpegOutputMemCount);}
3227
3228 // destroy hal pp jpeg encoding session
3229 if ( 0 < pme->mJpegSessionIdHalPP) {
3230 LOGE("destroying hal pp jpeg session:%d", pme->mJpegSessionIdHalPP);
3231 pme->mJpegHandle.destroy_session(pme->mJpegSessionIdHalPP);
3232 pme->mJpegSessionIdHalPP = 0;
3233 }
3234
3235 {FREE_JPEG_OUTPUT_BUFFER(pme->m_pJpegOutputMemHalPP,
3236 pme->m_JpegOutputMemCountHALPP);}
3237
3238 if (pme->m_pJpegExifObj != NULL) {
3239 delete pme->m_pJpegExifObj;
3240 pme->m_pJpegExifObj = NULL;
3241 }
3242
3243 // flush ongoing postproc Queue
3244 pme->m_ongoingPPQ.flush();
3245
3246 // flush input jpeg Queue
3247 pme->m_inputJpegQ.flush();
3248
3249 // flush input Postproc Queue
3250 pme->m_inputPPQ.flush();
3251
3252 // flush input raw Queue
3253 pme->m_inputRawQ.flush();
3254
3255 // flush m_halPP
3256 if (pme->m_halPP != NULL) {
3257 pme->m_halPP->flushQ();
3258 }
3259 // signal cmd is completed
3260 cam_sem_post(&cmdThread->sync_sem);
3261
3262 pme->pJpegSrcStream = NULL;
3263 pme->mNewJpegSessionNeeded = true;
3264 pme->mNewJpegSessionNeededHalPP = true;
3265 }
3266 break;
3267 case CAMERA_CMD_TYPE_DO_NEXT_JOB:
3268 {
3269 LOGH("Do next job, active is %d", is_active);
3270 if (is_active == TRUE) {
3271 qcamera_jpeg_data_t *jpeg_job = NULL;
3272
3273 if ((!pme->m_parent->isDualCamera()) ||
3274 (pme->m_ongoingJpegQ.isEmpty())) {
3275 jpeg_job = (qcamera_jpeg_data_t *)pme->m_inputJpegQ.dequeue();
3276 }
3277
3278 if (NULL != jpeg_job) {
3279 // To avoid any race conditions,
3280 // sync any stream specific parameters here.
3281 if (pme->m_parent->mParameters.isAdvCamFeaturesEnabled()) {
3282 // Sync stream params, only if advanced features configured
3283 // Reduces the latency for normal snapshot.
3284 pme->syncStreamParams(jpeg_job->src_frame, NULL);
3285 }
3286
3287 // add into ongoing jpeg job Q
3288 if (pme->m_ongoingJpegQ.enqueue((void *)jpeg_job)) {
3289 if (jpeg_job->halPPAllocatedBuf) {
3290 LOGD("buffer is allcoated from HAL PP.");
3291 ret = pme->encodeData(jpeg_job, pme->mNewJpegSessionNeededHalPP);
3292 } else {
3293 ret = pme->encodeData(jpeg_job, pme->mNewJpegSessionNeeded);
3294 }
3295 if (NO_ERROR != ret) {
3296 // dequeue the last one
3297 pme->m_ongoingJpegQ.dequeue(false);
3298 pme->releaseJpegJobData(jpeg_job);
3299 free(jpeg_job);
3300 jpeg_job = NULL;
3301 pme->sendEvtNotify(CAMERA_MSG_ERROR, UNKNOWN_ERROR, 0);
3302 }
3303 } else {
3304 LOGW("m_ongoingJpegQ is not active!!!");
3305 pme->releaseJpegJobData(jpeg_job);
3306 free(jpeg_job);
3307 jpeg_job = NULL;
3308 }
3309 }
3310
3311 // Process HAL PP data if ready
3312 if (pme->m_halPP != NULL) {
3313 pme->m_halPP->process();
3314 }
3315
3316 // process raw data if any
3317 mm_camera_super_buf_t *super_buf =
3318 (mm_camera_super_buf_t *)pme->m_inputRawQ.dequeue();
3319
3320 if (NULL != super_buf) {
3321 //play shutter sound
3322 pme->m_parent->playShutter();
3323 ret = pme->processRawImageImpl(super_buf);
3324 if (NO_ERROR != ret) {
3325 pme->releaseSuperBuf(super_buf);
3326 free(super_buf);
3327 pme->sendEvtNotify(CAMERA_MSG_ERROR, UNKNOWN_ERROR, 0);
3328 }
3329 }
3330
3331 ret = pme->doReprocess();
3332 if (NO_ERROR != ret) {
3333 pme->sendEvtNotify(CAMERA_MSG_ERROR, UNKNOWN_ERROR, 0);
3334 } else {
3335 ret = pme->stopCapture();
3336 }
3337
3338 } else {
3339 // not active, simply return buf and do no op
3340 qcamera_jpeg_data_t *jpeg_data =
3341 (qcamera_jpeg_data_t *)pme->m_inputJpegQ.dequeue();
3342 if (NULL != jpeg_data) {
3343 pme->releaseJpegJobData(jpeg_data);
3344 free(jpeg_data);
3345 }
3346 mm_camera_super_buf_t *super_buf =
3347 (mm_camera_super_buf_t *)pme->m_inputRawQ.dequeue();
3348 if (NULL != super_buf) {
3349 pme->releaseSuperBuf(super_buf);
3350 free(super_buf);
3351 }
3352
3353 // flush input Postproc Queue
3354 pme->m_inputPPQ.flush();
3355 }
3356 }
3357 break;
3358 case CAMERA_CMD_TYPE_EXIT:
3359 running = 0;
3360 break;
3361 default:
3362 break;
3363 }
3364 } while (running);
3365 LOGH("X");
3366 return NULL;
3367 }
3368
3369 /*===========================================================================
3370 * FUNCTION : doReprocess
3371 *
3372 * DESCRIPTION: Trigger channel reprocessing
3373 *
3374 * PARAMETERS :None
3375 *
3376 * RETURN : int32_t type of status
3377 * NO_ERROR -- success
3378 * none-zero failure code
3379 *==========================================================================*/
doReprocess()3380 int32_t QCameraPostProcessor::doReprocess()
3381 {
3382 int32_t ret = NO_ERROR;
3383 QCameraChannel *m_pSrcChannel = NULL;
3384 QCameraStream *pMetaStream = NULL;
3385 uint8_t meta_buf_index = 0;
3386 mm_camera_buf_def_t *meta_buf = NULL;
3387 mm_camera_super_buf_t *ppInputFrame = NULL;
3388
3389 qcamera_pp_data_t *ppreq_job = (qcamera_pp_data_t *)m_inputPPQ.peek();
3390 if ((ppreq_job == NULL) || (ppreq_job->src_frame == NULL)) {
3391 return ret;
3392 }
3393
3394 if (!validatePostProcess(ppreq_job->src_frame)) {
3395 return ret;
3396 }
3397
3398 ppreq_job = (qcamera_pp_data_t *)m_inputPPQ.dequeue();
3399 if (ppreq_job == NULL || ppreq_job->src_frame == NULL ||
3400 ppreq_job->src_reproc_frame == NULL) {
3401 return ret;
3402 }
3403
3404 mm_camera_super_buf_t *src_frame = ppreq_job->src_frame;
3405 mm_camera_super_buf_t *src_reproc_frame = ppreq_job->src_reproc_frame;
3406 int8_t mCurReprocCount = ppreq_job->reprocCount;
3407 int8_t mCurChannelIdx = ppreq_job->ppChannelIndex;
3408
3409 LOGD("frame = %p src_frame = %p mCurReprocCount = %d mCurChannelIdx = %d",
3410 src_frame,src_reproc_frame,mCurReprocCount, mCurChannelIdx);
3411
3412 if ((m_parent->mParameters.getManualCaptureMode() >=
3413 CAM_MANUAL_CAPTURE_TYPE_3) && (mCurChannelIdx == 0)) {
3414 ppInputFrame = src_reproc_frame;
3415 } else {
3416 ppInputFrame = src_frame;
3417 }
3418
3419 if (mPPChannelCount >= CAM_PP_CHANNEL_MAX) {
3420 LOGE("invalid channel count");
3421 return UNKNOWN_ERROR;
3422 }
3423
3424 // find meta data stream and index of meta data frame in the superbuf
3425 for (int8_t j = 0; j < mPPChannelCount; j++) {
3426 /*First search in src buffer for any offline metadata */
3427 for (uint32_t i = 0; i < src_frame->num_bufs; i++) {
3428 QCameraStream *pStream = mPPChannels[j]->getStreamByHandle(
3429 src_frame->bufs[i]->stream_id);
3430 if (pStream != NULL && pStream->isOrignalTypeOf(CAM_STREAM_TYPE_METADATA)) {
3431 meta_buf_index = (uint8_t) src_frame->bufs[i]->buf_idx;
3432 pMetaStream = pStream;
3433 meta_buf = src_frame->bufs[i];
3434 break;
3435 }
3436 }
3437
3438 if ((pMetaStream != NULL) && (meta_buf != NULL)) {
3439 LOGD("Found Offline stream metadata = %d",
3440 (int)meta_buf_index);
3441 break;
3442 }
3443 }
3444
3445 if ((pMetaStream == NULL) && (meta_buf == NULL)) {
3446 for (int8_t j = 0; j < mPPChannelCount; j++) {
3447 m_pSrcChannel = mPPChannels[j]->getSrcChannel();
3448 if (m_pSrcChannel == NULL)
3449 continue;
3450 for (uint32_t i = 0; i < src_reproc_frame->num_bufs; i++) {
3451 QCameraStream *pStream =
3452 m_pSrcChannel->getStreamByHandle(
3453 src_reproc_frame->bufs[i]->stream_id);
3454 if (pStream != NULL && pStream->isTypeOf(CAM_STREAM_TYPE_METADATA)) {
3455 meta_buf_index = (uint8_t) src_reproc_frame->bufs[i]->buf_idx;
3456 pMetaStream = pStream;
3457 meta_buf = src_reproc_frame->bufs[i];
3458 break;
3459 }
3460 }
3461 if ((pMetaStream != NULL) && (meta_buf != NULL)) {
3462 LOGD("Found Meta data info for reprocessing index = %d",
3463 (int)meta_buf_index);
3464 break;
3465 }
3466 }
3467 }
3468
3469 if (m_parent->mParameters.isAdvCamFeaturesEnabled()) {
3470 // No need to sync stream params, if none of the advanced features configured
3471 // Reduces the latency for normal snapshot.
3472 syncStreamParams(src_frame, src_reproc_frame);
3473 }
3474
3475 if (mPPChannels[mCurChannelIdx] != NULL) {
3476 // add into ongoing PP job Q
3477 ppreq_job->reprocCount = (int8_t) (mCurReprocCount + 1);
3478
3479 if ((m_parent->needOfflineReprocessing()) || (ppreq_job->offline_buffer)) {
3480 m_bufCountPPQ++;
3481 if (m_ongoingPPQ.enqueue((void *)ppreq_job)) {
3482 pthread_mutex_lock(&m_reprocess_lock);
3483 ret = mPPChannels[mCurChannelIdx]->doReprocessOffline(ppInputFrame,
3484 meta_buf, m_parent->mParameters);
3485 if (ret != NO_ERROR) {
3486 pthread_mutex_unlock(&m_reprocess_lock);
3487 goto end;
3488 }
3489
3490 if ((ppreq_job->offline_buffer) &&
3491 (ppreq_job->offline_reproc_buf)) {
3492 mPPChannels[mCurChannelIdx]->doReprocessOffline(
3493 ppreq_job->offline_reproc_buf, meta_buf);
3494 }
3495 pthread_mutex_unlock(&m_reprocess_lock);
3496 } else {
3497 LOGW("m_ongoingPPQ is not active!!!");
3498 ret = UNKNOWN_ERROR;
3499 goto end;
3500 }
3501 } else {
3502 m_bufCountPPQ++;
3503 if (!m_ongoingPPQ.enqueue((void *)ppreq_job)) {
3504 LOGW("m_ongoingJpegQ is not active!!!");
3505 ret = UNKNOWN_ERROR;
3506 goto end;
3507 }
3508
3509 int32_t numRequiredPPQBufsForSingleOutput = (int32_t)
3510 m_parent->mParameters.getNumberInBufsForSingleShot();
3511
3512 if (m_bufCountPPQ % numRequiredPPQBufsForSingleOutput == 0) {
3513 int32_t extra_pp_job_count =
3514 m_parent->mParameters.getNumberOutBufsForSingleShot() -
3515 m_parent->mParameters.getNumberInBufsForSingleShot();
3516
3517 for (int32_t i = 0; i < extra_pp_job_count; i++) {
3518 qcamera_pp_data_t *extra_pp_job =
3519 (qcamera_pp_data_t *)calloc(1, sizeof(qcamera_pp_data_t));
3520 if (!extra_pp_job) {
3521 LOGE("no mem for qcamera_pp_data_t");
3522 ret = NO_MEMORY;
3523 break;
3524 }
3525 extra_pp_job->reprocCount = ppreq_job->reprocCount;
3526 if (!m_ongoingPPQ.enqueue((void *)extra_pp_job)) {
3527 LOGW("m_ongoingJpegQ is not active!!!");
3528 releaseOngoingPPData(extra_pp_job, this);
3529 free(extra_pp_job);
3530 extra_pp_job = NULL;
3531 goto end;
3532 }
3533 }
3534 }
3535
3536 ret = mPPChannels[mCurChannelIdx]->doReprocess(ppInputFrame,
3537 m_parent->mParameters, pMetaStream, meta_buf_index);
3538 }
3539 } else {
3540 LOGE("Reprocess channel is NULL");
3541 ret = UNKNOWN_ERROR;
3542 }
3543
3544 end:
3545 if (ret != NO_ERROR) {
3546 releaseOngoingPPData(ppreq_job, this);
3547 if (ppreq_job != NULL) {
3548 free(ppreq_job);
3549 ppreq_job = NULL;
3550 }
3551 }
3552 return ret;
3553 }
3554
3555 /*===========================================================================
3556 * FUNCTION : getReprocChannel
3557 *
3558 * DESCRIPTION: Returns reprocessing channel handle
3559 *
3560 * PARAMETERS : index for reprocessing array
3561 *
3562 * RETURN : QCameraReprocessChannel * type of pointer
3563 NULL if no reprocessing channel
3564 *==========================================================================*/
getReprocChannel(uint8_t index)3565 QCameraReprocessChannel * QCameraPostProcessor::getReprocChannel(uint8_t index)
3566 {
3567 if (index >= mPPChannelCount) {
3568 LOGE("Invalid index value");
3569 return NULL;
3570 }
3571 return mPPChannels[index];
3572 }
3573
3574 /*===========================================================================
3575 * FUNCTION : stopCapture
3576 *
3577 * DESCRIPTION: Trigger image capture stop
3578 *
3579 * PARAMETERS :
3580 * None
3581 *
3582 * RETURN : int32_t type of status
3583 * NO_ERROR -- success
3584 * none-zero failure code
3585 *==========================================================================*/
stopCapture()3586 int32_t QCameraPostProcessor::stopCapture()
3587 {
3588 int rc = NO_ERROR;
3589
3590 if (m_parent->isRegularCapture()) {
3591 rc = m_parent->processAPI(
3592 QCAMERA_SM_EVT_STOP_CAPTURE_CHANNEL,
3593 NULL);
3594 }
3595 return rc;
3596 }
3597
3598 /*===========================================================================
3599 * FUNCTION : getJpegPaddingReq
3600 *
3601 * DESCRIPTION: function to add an entry to exif data
3602 *
3603 * PARAMETERS :
3604 * @padding_info : jpeg specific padding requirement
3605 *
3606 * RETURN : int32_t type of status
3607 * NO_ERROR -- success
3608 * none-zero failure code
3609 *==========================================================================*/
getJpegPaddingReq(cam_padding_info_t & padding_info)3610 int32_t QCameraPostProcessor::getJpegPaddingReq(cam_padding_info_t &padding_info)
3611 {
3612 // TODO: hardcode for now, needs to query from mm-jpeg-interface
3613 padding_info.width_padding = CAM_PAD_NONE;
3614 padding_info.height_padding = CAM_PAD_TO_16;
3615 padding_info.plane_padding = CAM_PAD_TO_WORD;
3616 padding_info.offset_info.offset_x = 0;
3617 padding_info.offset_info.offset_y = 0;
3618 return NO_ERROR;
3619 }
3620
3621 /*===========================================================================
3622 * FUNCTION : setYUVFrameInfo
3623 *
3624 * DESCRIPTION: set Raw YUV frame data info for up-layer
3625 *
3626 * PARAMETERS :
3627 * @frame : process frame received from mm-camera-interface
3628 *
3629 * RETURN : int32_t type of status
3630 * NO_ERROR -- success
3631 * none-zero failure code
3632 *
3633 * NOTE : currently we return frame len, y offset, cbcr offset and frame format
3634 *==========================================================================*/
setYUVFrameInfo(mm_camera_super_buf_t * recvd_frame)3635 int32_t QCameraPostProcessor::setYUVFrameInfo(mm_camera_super_buf_t *recvd_frame)
3636 {
3637 QCameraChannel *pChannel = m_parent->getChannelByHandle(recvd_frame->ch_id);
3638 // check reprocess channel if not found
3639 if (pChannel == NULL) {
3640 for (int8_t i = 0; i < mPPChannelCount; i++) {
3641 if ((mPPChannels[i] != NULL) &&
3642 (validate_handle(mPPChannels[i]->getMyHandle(), recvd_frame->ch_id))) {
3643 pChannel = mPPChannels[i];
3644 break;
3645 }
3646 }
3647 }
3648
3649 if (pChannel == NULL) {
3650 LOGE("No corresponding channel (ch_id = %d) exist, return here",
3651 recvd_frame->ch_id);
3652 return BAD_VALUE;
3653 }
3654
3655 // find snapshot frame
3656 for (uint32_t i = 0; i < recvd_frame->num_bufs; i++) {
3657 QCameraStream *pStream =
3658 pChannel->getStreamByHandle(recvd_frame->bufs[i]->stream_id);
3659 if (pStream != NULL) {
3660 if (pStream->isTypeOf(CAM_STREAM_TYPE_SNAPSHOT) ||
3661 pStream->isOrignalTypeOf(CAM_STREAM_TYPE_SNAPSHOT)) {
3662 //get the main frame, use stream info
3663 cam_frame_len_offset_t frame_offset;
3664 cam_dimension_t frame_dim;
3665 cam_format_t frame_fmt;
3666 const char *fmt_string;
3667 pStream->getFrameDimension(frame_dim);
3668 pStream->getFrameOffset(frame_offset);
3669 pStream->getFormat(frame_fmt);
3670 fmt_string = m_parent->mParameters.getFrameFmtString(frame_fmt);
3671
3672 int cbcr_offset = (int32_t)frame_offset.mp[0].len -
3673 frame_dim.width * frame_dim.height;
3674
3675 LOGH("frame width=%d, height=%d, yoff=%d, cbcroff=%d, fmt_string=%s",
3676 frame_dim.width, frame_dim.height, frame_offset.mp[0].offset, cbcr_offset, fmt_string);
3677 return NO_ERROR;
3678 }
3679 }
3680 }
3681
3682 return BAD_VALUE;
3683 }
3684
matchJobId(void * data,void *,void * match_data)3685 bool QCameraPostProcessor::matchJobId(void *data, void *, void *match_data)
3686 {
3687 qcamera_jpeg_data_t * job = (qcamera_jpeg_data_t *) data;
3688 uint32_t job_id = *((uint32_t *) match_data);
3689 return job->jobId == job_id;
3690 }
3691
3692 /*===========================================================================
3693 * FUNCTION : getJpegMemory
3694 *
3695 * DESCRIPTION: buffer allocation function
3696 * to pass to jpeg interface
3697 *
3698 * PARAMETERS :
3699 * @out_buf : buffer descriptor struct
3700 *
3701 * RETURN : int32_t type of status
3702 * NO_ERROR -- success
3703 * none-zero failure code
3704 *==========================================================================*/
getJpegMemory(omx_jpeg_ouput_buf_t * out_buf)3705 int QCameraPostProcessor::getJpegMemory(omx_jpeg_ouput_buf_t *out_buf)
3706 {
3707 LOGH("Allocating jpeg out buffer of size: %d", out_buf->size);
3708 QCameraPostProcessor *procInst = (QCameraPostProcessor *) out_buf->handle;
3709 camera_memory_t *cam_mem = procInst->m_parent->mGetMemory(out_buf->fd, out_buf->size, 1U,
3710 procInst->m_parent->mCallbackCookie);
3711 out_buf->mem_hdl = cam_mem;
3712 out_buf->vaddr = cam_mem->data;
3713
3714 return 0;
3715 }
3716
3717 /*===========================================================================
3718 * FUNCTION : releaseJpegMemory
3719 *
3720 * DESCRIPTION: release jpeg memory function
3721 * to pass to jpeg interface, in case of abort
3722 *
3723 * PARAMETERS :
3724 * @out_buf : buffer descriptor struct
3725 *
3726 * RETURN : int32_t type of status
3727 * NO_ERROR -- success
3728 * none-zero failure code
3729 *==========================================================================*/
releaseJpegMemory(omx_jpeg_ouput_buf_t * out_buf)3730 int QCameraPostProcessor::releaseJpegMemory(omx_jpeg_ouput_buf_t *out_buf)
3731 {
3732 if (out_buf && out_buf->mem_hdl) {
3733 LOGD("releasing jpeg out buffer of size: %d", out_buf->size);
3734 camera_memory_t *cam_mem = (camera_memory_t*)out_buf->mem_hdl;
3735 cam_mem->release(cam_mem);
3736 out_buf->mem_hdl = NULL;
3737 out_buf->vaddr = NULL;
3738 return NO_ERROR;
3739 }
3740 return -1;
3741 }
3742
3743 /*===========================================================================
3744 * FUNCTION : QCameraExif
3745 *
3746 * DESCRIPTION: constructor of QCameraExif
3747 *
3748 * PARAMETERS : None
3749 *
3750 * RETURN : None
3751 *==========================================================================*/
QCameraExif()3752 QCameraExif::QCameraExif()
3753 : m_nNumEntries(0)
3754 {
3755 memset(m_Entries, 0, sizeof(m_Entries));
3756 }
3757
3758 /*===========================================================================
3759 * FUNCTION : ~QCameraExif
3760 *
3761 * DESCRIPTION: deconstructor of QCameraExif. Will release internal memory ptr.
3762 *
3763 * PARAMETERS : None
3764 *
3765 * RETURN : None
3766 *==========================================================================*/
~QCameraExif()3767 QCameraExif::~QCameraExif()
3768 {
3769 for (uint32_t i = 0; i < m_nNumEntries; i++) {
3770 switch (m_Entries[i].tag_entry.type) {
3771 case EXIF_BYTE:
3772 {
3773 if (m_Entries[i].tag_entry.count > 1 &&
3774 m_Entries[i].tag_entry.data._bytes != NULL) {
3775 free(m_Entries[i].tag_entry.data._bytes);
3776 m_Entries[i].tag_entry.data._bytes = NULL;
3777 }
3778 }
3779 break;
3780 case EXIF_ASCII:
3781 {
3782 if (m_Entries[i].tag_entry.data._ascii != NULL) {
3783 free(m_Entries[i].tag_entry.data._ascii);
3784 m_Entries[i].tag_entry.data._ascii = NULL;
3785 }
3786 }
3787 break;
3788 case EXIF_SHORT:
3789 {
3790 if (m_Entries[i].tag_entry.count > 1 &&
3791 m_Entries[i].tag_entry.data._shorts != NULL) {
3792 free(m_Entries[i].tag_entry.data._shorts);
3793 m_Entries[i].tag_entry.data._shorts = NULL;
3794 }
3795 }
3796 break;
3797 case EXIF_LONG:
3798 {
3799 if (m_Entries[i].tag_entry.count > 1 &&
3800 m_Entries[i].tag_entry.data._longs != NULL) {
3801 free(m_Entries[i].tag_entry.data._longs);
3802 m_Entries[i].tag_entry.data._longs = NULL;
3803 }
3804 }
3805 break;
3806 case EXIF_RATIONAL:
3807 {
3808 if (m_Entries[i].tag_entry.count > 1 &&
3809 m_Entries[i].tag_entry.data._rats != NULL) {
3810 free(m_Entries[i].tag_entry.data._rats);
3811 m_Entries[i].tag_entry.data._rats = NULL;
3812 }
3813 }
3814 break;
3815 case EXIF_UNDEFINED:
3816 {
3817 if (m_Entries[i].tag_entry.data._undefined != NULL) {
3818 free(m_Entries[i].tag_entry.data._undefined);
3819 m_Entries[i].tag_entry.data._undefined = NULL;
3820 }
3821 }
3822 break;
3823 case EXIF_SLONG:
3824 {
3825 if (m_Entries[i].tag_entry.count > 1 &&
3826 m_Entries[i].tag_entry.data._slongs != NULL) {
3827 free(m_Entries[i].tag_entry.data._slongs);
3828 m_Entries[i].tag_entry.data._slongs = NULL;
3829 }
3830 }
3831 break;
3832 case EXIF_SRATIONAL:
3833 {
3834 if (m_Entries[i].tag_entry.count > 1 &&
3835 m_Entries[i].tag_entry.data._srats != NULL) {
3836 free(m_Entries[i].tag_entry.data._srats);
3837 m_Entries[i].tag_entry.data._srats = NULL;
3838 }
3839 }
3840 break;
3841 }
3842 }
3843 }
3844
3845 /*===========================================================================
3846 * FUNCTION : addEntry
3847 *
3848 * DESCRIPTION: function to add an entry to exif data
3849 *
3850 * PARAMETERS :
3851 * @tagid : exif tag ID
3852 * @type : data type
3853 * @count : number of data in uint of its type
3854 * @data : input data ptr
3855 *
3856 * RETURN : int32_t type of status
3857 * NO_ERROR -- success
3858 * none-zero failure code
3859 *==========================================================================*/
addEntry(exif_tag_id_t tagid,exif_tag_type_t type,uint32_t count,void * data)3860 int32_t QCameraExif::addEntry(exif_tag_id_t tagid,
3861 exif_tag_type_t type,
3862 uint32_t count,
3863 void *data)
3864 {
3865 int32_t rc = NO_ERROR;
3866 if(m_nNumEntries >= MAX_EXIF_TABLE_ENTRIES) {
3867 LOGE("Number of entries exceeded limit");
3868 return NO_MEMORY;
3869 }
3870
3871 m_Entries[m_nNumEntries].tag_id = tagid;
3872 m_Entries[m_nNumEntries].tag_entry.type = type;
3873 m_Entries[m_nNumEntries].tag_entry.count = count;
3874 m_Entries[m_nNumEntries].tag_entry.copy = 1;
3875 switch (type) {
3876 case EXIF_BYTE:
3877 {
3878 if (count > 1) {
3879 uint8_t *values = (uint8_t *)malloc(count);
3880 if (values == NULL) {
3881 LOGE("No memory for byte array");
3882 rc = NO_MEMORY;
3883 } else {
3884 memcpy(values, data, count);
3885 m_Entries[m_nNumEntries].tag_entry.data._bytes = values;
3886 }
3887 } else {
3888 m_Entries[m_nNumEntries].tag_entry.data._byte = *(uint8_t *)data;
3889 }
3890 }
3891 break;
3892 case EXIF_ASCII:
3893 {
3894 char *str = NULL;
3895 str = (char *)malloc(count + 1);
3896 if (str == NULL) {
3897 LOGE("No memory for ascii string");
3898 rc = NO_MEMORY;
3899 } else {
3900 memset(str, 0, count + 1);
3901 memcpy(str, data, count);
3902 m_Entries[m_nNumEntries].tag_entry.data._ascii = str;
3903 }
3904 }
3905 break;
3906 case EXIF_SHORT:
3907 {
3908 uint16_t *exif_data = (uint16_t *)data;
3909 if (count > 1) {
3910 uint16_t *values = (uint16_t *)malloc(count * sizeof(uint16_t));
3911 if (values == NULL) {
3912 LOGE("No memory for short array");
3913 rc = NO_MEMORY;
3914 } else {
3915 memcpy(values, exif_data, count * sizeof(uint16_t));
3916 m_Entries[m_nNumEntries].tag_entry.data._shorts = values;
3917 }
3918 } else {
3919 m_Entries[m_nNumEntries].tag_entry.data._short = *(uint16_t *)data;
3920 }
3921 }
3922 break;
3923 case EXIF_LONG:
3924 {
3925 uint32_t *exif_data = (uint32_t *)data;
3926 if (count > 1) {
3927 uint32_t *values = (uint32_t *)malloc(count * sizeof(uint32_t));
3928 if (values == NULL) {
3929 LOGE("No memory for long array");
3930 rc = NO_MEMORY;
3931 } else {
3932 memcpy(values, exif_data, count * sizeof(uint32_t));
3933 m_Entries[m_nNumEntries].tag_entry.data._longs = values;
3934 }
3935 } else {
3936 m_Entries[m_nNumEntries].tag_entry.data._long = *(uint32_t *)data;
3937 }
3938 }
3939 break;
3940 case EXIF_RATIONAL:
3941 {
3942 rat_t *exif_data = (rat_t *)data;
3943 if (count > 1) {
3944 rat_t *values = (rat_t *)malloc(count * sizeof(rat_t));
3945 if (values == NULL) {
3946 LOGE("No memory for rational array");
3947 rc = NO_MEMORY;
3948 } else {
3949 memcpy(values, exif_data, count * sizeof(rat_t));
3950 m_Entries[m_nNumEntries].tag_entry.data._rats = values;
3951 }
3952 } else {
3953 m_Entries[m_nNumEntries].tag_entry.data._rat = *(rat_t *)data;
3954 }
3955 }
3956 break;
3957 case EXIF_UNDEFINED:
3958 {
3959 uint8_t *values = (uint8_t *)malloc(count);
3960 if (values == NULL) {
3961 LOGE("No memory for undefined array");
3962 rc = NO_MEMORY;
3963 } else {
3964 memcpy(values, data, count);
3965 m_Entries[m_nNumEntries].tag_entry.data._undefined = values;
3966 }
3967 }
3968 break;
3969 case EXIF_SLONG:
3970 {
3971 uint32_t *exif_data = (uint32_t *)data;
3972 if (count > 1) {
3973 int32_t *values = (int32_t *)malloc(count * sizeof(int32_t));
3974 if (values == NULL) {
3975 LOGE("No memory for signed long array");
3976 rc = NO_MEMORY;
3977 } else {
3978 memcpy(values, exif_data, count * sizeof(int32_t));
3979 m_Entries[m_nNumEntries].tag_entry.data._slongs = values;
3980 }
3981 } else {
3982 m_Entries[m_nNumEntries].tag_entry.data._slong = *(int32_t *)data;
3983 }
3984 }
3985 break;
3986 case EXIF_SRATIONAL:
3987 {
3988 srat_t *exif_data = (srat_t *)data;
3989 if (count > 1) {
3990 srat_t *values = (srat_t *)malloc(count * sizeof(srat_t));
3991 if (values == NULL) {
3992 LOGE("No memory for signed rational array");
3993 rc = NO_MEMORY;
3994 } else {
3995 memcpy(values, exif_data, count * sizeof(srat_t));
3996 m_Entries[m_nNumEntries].tag_entry.data._srats = values;
3997 }
3998 } else {
3999 m_Entries[m_nNumEntries].tag_entry.data._srat = *(srat_t *)data;
4000 }
4001 }
4002 break;
4003 }
4004
4005 // Increase number of entries
4006 m_nNumEntries++;
4007 return rc;
4008 }
4009
4010 /*===========================================================================
4011 * FUNCTION : processHalPPDataCB
4012 *
4013 * DESCRIPTION: callback function to process frame after HAL PP block
4014 *
4015 * PARAMETERS :
4016 * @pOutput : output after HAL PP processed
4017 * @pUserData : user data ptr (QCameraReprocessor)
4018 *
4019 * RETURN : None
4020 *==========================================================================*/
processHalPPDataCB(qcamera_hal_pp_data_t * pOutput,void * pUserData)4021 void QCameraPostProcessor::processHalPPDataCB(qcamera_hal_pp_data_t *pOutput, void* pUserData)
4022 {
4023 QCameraPostProcessor *pme = (QCameraPostProcessor *)pUserData;
4024 pme->processHalPPData(pOutput);
4025 }
4026
4027 /*===========================================================================
4028 * FUNCTION : processHalPPData
4029 *
4030 * DESCRIPTION: process received frame after HAL PP block.
4031 *
4032 * PARAMETERS :
4033 * @pData : received qcamera_hal_pp_data_t data from HAL PP callback.
4034 *
4035 * RETURN : int32_t type of status
4036 * NO_ERROR -- success
4037 * none-zero failure code
4038 *
4039 * NOTE : The frame after HAL PP need to send to jpeg encoding.
4040 *==========================================================================*/
processHalPPData(qcamera_hal_pp_data_t * pData)4041 int32_t QCameraPostProcessor::processHalPPData(qcamera_hal_pp_data_t *pData)
4042 {
4043 int32_t rc = NO_ERROR;
4044 LOGD("E");
4045 if (m_bInited == FALSE) {
4046 LOGE("postproc not initialized yet");
4047 return UNKNOWN_ERROR;
4048 }
4049 if (pData == NULL) {
4050 LOGE("HAL PP processed data is NULL");
4051 return BAD_VALUE;
4052 }
4053 mm_camera_super_buf_t *frame = pData->frame;
4054 if (frame == NULL) {
4055 LOGE("HAL PP processed frame is NULL");
4056 return BAD_VALUE;
4057 }
4058 // send to JPEG encoding
4059 qcamera_jpeg_data_t *jpeg_job =
4060 (qcamera_jpeg_data_t *)malloc(sizeof(qcamera_jpeg_data_t));
4061 if (jpeg_job == NULL) {
4062 LOGE("No memory for jpeg job");
4063 return NO_MEMORY;
4064 }
4065
4066 memset(jpeg_job, 0, sizeof(qcamera_jpeg_data_t));
4067 jpeg_job->src_frame = frame;
4068 jpeg_job->halPPAllocatedBuf = pData->halPPAllocatedBuf;
4069 jpeg_job->hal_pp_bufs = pData->bufs;
4070 jpeg_job->snapshot_heap = pData->snapshot_heap;
4071 jpeg_job->metadata_heap = pData->metadata_heap;
4072 jpeg_job->src_reproc_frame = pData->src_reproc_frame;
4073 jpeg_job->src_reproc_bufs = pData->src_reproc_bufs;
4074 jpeg_job->reproc_frame_release = pData->reproc_frame_release;
4075 jpeg_job->offline_reproc_buf = pData->offline_reproc_buf;
4076 jpeg_job->offline_buffer = pData->offline_buffer;
4077 LOGD("halPPAllocatedBuf = %d", pData->halPPAllocatedBuf);
4078 LOGD("src_reproc_frame:%p", jpeg_job->src_reproc_frame);
4079
4080 if (!jpeg_job->halPPAllocatedBuf) {
4081 // check if to encode hal pp input buffer
4082 char prop[PROPERTY_VALUE_MAX];
4083 memset(prop, 0, sizeof(prop));
4084 property_get("persist.camera.dualfov.jpegnum", prop, "1");
4085 int dualfov_snap_num = atoi(prop);
4086 if (dualfov_snap_num == 1) {
4087 LOGE("No need to encode input buffer, just release it.");
4088 releaseJpegJobData(jpeg_job);
4089 free(jpeg_job);
4090 jpeg_job = NULL;
4091 return NO_ERROR;
4092 }
4093 }
4094
4095 // find meta data frame
4096 mm_camera_buf_def_t *meta_frame = NULL;
4097 // look through reprocess superbuf
4098 for (uint32_t i = 0; i < frame->num_bufs; i++) {
4099 if (frame->bufs[i]->stream_type == CAM_STREAM_TYPE_METADATA) {
4100 meta_frame = frame->bufs[i];
4101 break;
4102 }
4103 }
4104 if (meta_frame != NULL) {
4105 // fill in meta data frame ptr
4106 jpeg_job->metadata = (metadata_buffer_t *)meta_frame->buffer;
4107 }
4108 // Enqueue frame to jpeg input queue
4109 if (false == m_inputJpegQ.enqueue((void *)jpeg_job)) {
4110 LOGW("Input Jpeg Q is not active!!!");
4111 releaseJpegJobData(jpeg_job);
4112 free(jpeg_job);
4113 jpeg_job = NULL;
4114 }
4115
4116 // wake up data proc thread
4117 m_dataProcTh.sendCmd(CAMERA_CMD_TYPE_DO_NEXT_JOB, FALSE, FALSE);
4118 LOGD("X");
4119 return rc;
4120 }
4121
4122 /*===========================================================================
4123 * FUNCTION : getHalPPOutputBufferCB
4124 *
4125 * DESCRIPTION: callback function to request output buffer
4126 *
4127 * PARAMETERS :
4128 * @frameIndex : frameIndex needs to be appended in the output data
4129 * @pUserData : user data ptr (QCameraReprocessor)
4130 *
4131 * RETURN : None
4132 *==========================================================================*/
getHalPPOutputBufferCB(uint32_t frameIndex,void * pUserData)4133 void QCameraPostProcessor::getHalPPOutputBufferCB(uint32_t frameIndex, void* pUserData)
4134 {
4135 QCameraPostProcessor *pme = (QCameraPostProcessor *)pUserData;
4136 pme->getHalPPOutputBuffer(frameIndex);
4137 }
4138
4139 /*===========================================================================
4140 * FUNCTION : getHalPPOutputBuffer
4141 *
4142 * DESCRIPTION: function to send HAL PP output buffer
4143 * PARAMETERS :
4144 * @frameIndex : frameIndex needs to be appended in the output data
4145 * RETURN : None
4146 *==========================================================================*/
getHalPPOutputBuffer(uint32_t frameIndex)4147 void QCameraPostProcessor::getHalPPOutputBuffer(uint32_t frameIndex)
4148 {
4149 LOGD("E. Allocate HAL PP Output buffer");
4150 qcamera_hal_pp_data_t *output_data =
4151 (qcamera_hal_pp_data_t*) malloc(sizeof(qcamera_hal_pp_data_t));
4152 if (output_data == NULL) {
4153 LOGE("No memory for qcamera_hal_pp_data_t output data");
4154 return;
4155 }
4156 memset(output_data, 0, sizeof(qcamera_hal_pp_data_t));
4157 mm_camera_super_buf_t* output_frame =
4158 (mm_camera_super_buf_t *)malloc(sizeof(mm_camera_super_buf_t));
4159 if (output_frame == NULL) {
4160 LOGE("No memory for mm_camera_super_buf_t frame");
4161 free(output_data);
4162 return;
4163 }
4164 memset(output_frame, 0, sizeof(mm_camera_super_buf_t));
4165 output_data->frame = output_frame;
4166 output_data->bufs =
4167 (mm_camera_buf_def_t *)malloc(HAL_PP_NUM_BUFS * sizeof(mm_camera_buf_def_t));
4168 if (output_data->bufs == NULL) {
4169 LOGE("No memory for output_data->bufs");
4170 free(output_frame);
4171 free(output_data);
4172 return;
4173 }
4174 memset(output_data->bufs, 0, HAL_PP_NUM_BUFS * sizeof(mm_camera_buf_def_t));
4175 output_data->halPPAllocatedBuf = true;
4176 output_data->snapshot_heap = new QCameraHeapMemory(QCAMERA_ION_USE_CACHE);
4177 if (output_data->snapshot_heap == NULL) {
4178 LOGE("Unable to new heap memory obj for image buf");
4179 free(output_frame);
4180 free(output_data->bufs);
4181 free(output_data);
4182 return;
4183 }
4184 output_data->metadata_heap = new QCameraHeapMemory(QCAMERA_ION_USE_CACHE);
4185 if (output_data->metadata_heap == NULL) {
4186 LOGE("Unable to new heap memory obj for metadata buf");
4187 delete output_data->snapshot_heap;
4188 free(output_frame);
4189 free(output_data->bufs);
4190 free(output_data);
4191 return;
4192 }
4193 output_data->frameIndex = frameIndex;
4194 m_halPP->feedOutput(output_data);
4195 }
4196
4197 /*===========================================================================
4198 * FUNCTION : getChannelByHandle
4199 *
4200 * DESCRIPTION: function to get channel by handle
4201 * PARAMETERS :
4202 * @channelHandle : channel handle
4203 * RETURN : QCameraChannel
4204 *==========================================================================*/
getChannelByHandle(uint32_t channelHandle)4205 QCameraChannel *QCameraPostProcessor::getChannelByHandle(uint32_t channelHandle)
4206 {
4207 QCameraChannel *pChannel = m_parent->getChannelByHandle(channelHandle);
4208 // check reprocess channel if not found
4209 if (pChannel == NULL) {
4210 for (int8_t i = 0; i < mPPChannelCount; i++) {
4211 if ((mPPChannels[i] != NULL) &&
4212 (validate_handle(mPPChannels[i]->getMyHandle(), channelHandle))) {
4213 pChannel = mPPChannels[i];
4214 break;
4215 }
4216 }
4217 }
4218 return pChannel;
4219 }
4220
4221 /*===========================================================================
4222 * FUNCTION : initHALPP
4223 *
4224 * DESCRIPTION: function to create and init HALPP block
4225 * RETURN : None
4226 *==========================================================================*/
initHALPP()4227 int32_t QCameraPostProcessor::initHALPP()
4228 {
4229 int32_t rc = NO_ERROR;
4230 void *staticParam = NULL;
4231
4232 LOGD("E. m_halPPType:%d", m_halPPType);
4233
4234 switch (m_halPPType) {
4235 case QCAMERA_HAL_PP_TYPE_DUAL_FOV:
4236 m_halPP = new QCameraDualFOVPP();
4237 staticParam = (void*)m_parent->getCamHalCapabilities();
4238 break;
4239 case QCAMERA_HAL_PP_TYPE_BOKEH:
4240 case QCAMERA_HAL_PP_TYPE_CLEARSIGHT:
4241 break;
4242 default:
4243 break;
4244 }
4245
4246 if (m_halPP != NULL) {
4247 rc = m_halPP->init(QCameraPostProcessor::processHalPPDataCB,
4248 QCameraPostProcessor::getHalPPOutputBufferCB, this, staticParam);
4249 if (rc != NO_ERROR) {
4250 LOGE("HAL PP type %d init failed, rc = %d", m_halPPType, rc);
4251 }
4252 }
4253
4254 return rc;
4255 }
4256 }; // namespace qcamera
4257