1 /*
2  * Copyright 2015 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 
18 #include <utils/Log.h>
19 
20 #define DEBUG  0
21 #if DEBUG
22 #  define  DDD(...)    ALOGD(__VA_ARGS__)
23 #else
24 #  define  DDD(...)    ((void)0)
25 #endif
26 
27 #include "GoldfishAVCDec.h"
28 
29 #include <media/stagefright/foundation/ADebug.h>
30 #include <media/stagefright/MediaDefs.h>
31 #include <OMX_VideoExt.h>
32 #include <inttypes.h>
33 
34 #include <nativebase/nativebase.h>
35 
36 #include <android/hardware/graphics/allocator/3.0/IAllocator.h>
37 #include <android/hardware/graphics/mapper/3.0/IMapper.h>
38 #include <hidl/LegacySupport.h>
39 
40 using ::android::hardware::graphics::common::V1_2::PixelFormat;
41 using ::android::hardware::graphics::common::V1_0::BufferUsage;
42 
43 namespace android {
44 
45 #define componentName                   "video_decoder.avc"
46 #define codingType                      OMX_VIDEO_CodingAVC
47 #define CODEC_MIME_TYPE                 MEDIA_MIMETYPE_VIDEO_AVC
48 
49 /** Function and structure definitions to keep code similar for each codec */
50 #define ivdec_api_function              ih264d_api_function
51 #define ivdext_create_ip_t              ih264d_create_ip_t
52 #define ivdext_create_op_t              ih264d_create_op_t
53 #define ivdext_delete_ip_t              ih264d_delete_ip_t
54 #define ivdext_delete_op_t              ih264d_delete_op_t
55 #define ivdext_ctl_set_num_cores_ip_t   ih264d_ctl_set_num_cores_ip_t
56 #define ivdext_ctl_set_num_cores_op_t   ih264d_ctl_set_num_cores_op_t
57 
58 #define IVDEXT_CMD_CTL_SET_NUM_CORES    \
59         (IVD_CONTROL_API_COMMAND_TYPE_T)IH264D_CMD_CTL_SET_NUM_CORES
60 
61 static const CodecProfileLevel kProfileLevels[] = {
62     { OMX_VIDEO_AVCProfileConstrainedBaseline, OMX_VIDEO_AVCLevel52 },
63 
64     { OMX_VIDEO_AVCProfileBaseline, OMX_VIDEO_AVCLevel52 },
65 
66     { OMX_VIDEO_AVCProfileMain,     OMX_VIDEO_AVCLevel52 },
67 
68     { OMX_VIDEO_AVCProfileConstrainedHigh,     OMX_VIDEO_AVCLevel52 },
69 
70     { OMX_VIDEO_AVCProfileHigh,     OMX_VIDEO_AVCLevel52 },
71 };
72 
GoldfishAVCDec(const char * name,const OMX_CALLBACKTYPE * callbacks,OMX_PTR appData,OMX_COMPONENTTYPE ** component,RenderMode renderMode)73 GoldfishAVCDec::GoldfishAVCDec(
74         const char *name,
75         const OMX_CALLBACKTYPE *callbacks,
76         OMX_PTR appData,
77         OMX_COMPONENTTYPE **component, RenderMode renderMode)
78     : GoldfishVideoDecoderOMXComponent(
79             name, componentName, codingType,
80             kProfileLevels, ARRAY_SIZE(kProfileLevels),
81             320 /* width */, 240 /* height */, callbacks,
82             appData, component),
83       mOmxColorFormat(OMX_COLOR_FormatYUV420Planar),
84       mChangingResolution(false),
85       mSignalledError(false),
86       mInputOffset(0), mRenderMode(renderMode){
87     initPorts(
88             1 /* numMinInputBuffers */, kNumBuffers, INPUT_BUF_SIZE,
89             1 /* numMinOutputBuffers */, kNumBuffers, CODEC_MIME_TYPE);
90 
91     mTimeStart = mTimeEnd = systemTime();
92 
93     // If input dump is enabled, then open create an empty file
94     GENERATE_FILE_NAMES();
95     CREATE_DUMP_FILE(mInFile);
96     ALOGI("created %s %d object %p", __func__, __LINE__, this);
97 }
98 
~GoldfishAVCDec()99 GoldfishAVCDec::~GoldfishAVCDec() {
100     CHECK_EQ(deInitDecoder(), (status_t)OK);
101     DDD("destroyed %s %d object %p", __func__, __LINE__, this);
102 }
103 
logVersion()104 void GoldfishAVCDec::logVersion() {
105     // TODO: get emulation decoder implementation version from the host.
106     ALOGI("GoldfishAVC decoder version 1.0");
107 }
108 
resetPlugin()109 status_t GoldfishAVCDec::resetPlugin() {
110     mIsInFlush = false;
111     mReceivedEOS = false;
112 
113     /* Initialize both start and end times */
114     mTimeStart = mTimeEnd = systemTime();
115 
116     return OK;
117 }
118 
resetDecoder()119 status_t GoldfishAVCDec::resetDecoder() {
120     if (mContext) {
121     // The resolution may have changed, so our safest bet is to just destroy the
122     // current context and recreate another one, with the new width and height.
123     mContext->destroyH264Context();
124     mContext.reset(nullptr);
125 
126     }
127     return OK;
128 }
129 
setFlushMode()130 status_t GoldfishAVCDec::setFlushMode() {
131     /* Set the decoder in Flush mode, subsequent decode() calls will flush */
132     mIsInFlush = true;
133     mContext->flush();
134     return OK;
135 }
136 
initDecoder()137 status_t GoldfishAVCDec::initDecoder() {
138     /* Initialize the decoder */
139     if (mEnableAndroidNativeBuffers == false) {
140         mRenderMode = RenderMode::RENDER_BY_GUEST_CPU;
141     }
142     mContext.reset(new MediaH264Decoder(mRenderMode));
143     mContext->initH264Context(mWidth,
144                               mHeight,
145                               mWidth,
146                               mHeight,
147                               MediaH264Decoder::PixelFormat::YUV420P);
148 
149     /* Reset the plugin state */
150     resetPlugin();
151 
152     /* Get codec version */
153     logVersion();
154 
155     return OK;
156 }
157 
deInitDecoder()158 status_t GoldfishAVCDec::deInitDecoder() {
159     if (mContext) {
160         mContext->destroyH264Context();
161         mContext.reset();
162     }
163 
164     mChangingResolution = false;
165 
166     return OK;
167 }
168 
onReset()169 void GoldfishAVCDec::onReset() {
170     GoldfishVideoDecoderOMXComponent::onReset();
171 
172     mSignalledError = false;
173     mInputOffset = 0;
174     resetDecoder();
175     resetPlugin();
176 }
177 
getVUIParams(h264_image_t & img)178 bool GoldfishAVCDec::getVUIParams(h264_image_t& img) {
179     int32_t primaries = img.color_primaries;
180     bool fullRange = img.color_range == 2 ? true : false;
181     int32_t transfer = img.color_trc;
182     int32_t coeffs = img.colorspace;
183 
184     ColorAspects colorAspects;
185     ColorUtils::convertIsoColorAspectsToCodecAspects(
186             primaries, transfer, coeffs, fullRange, colorAspects);
187 
188     DDD("img pts %lld, primaries %d, range %d transfer %d colorspace %d", (long long)img.pts,
189             (int)img.color_primaries, (int)img.color_range, (int)img.color_trc, (int)img.colorspace);
190 
191     // Update color aspects if necessary.
192     if (colorAspectsDiffer(colorAspects, mBitstreamColorAspects)) {
193         mBitstreamColorAspects = colorAspects;
194         status_t err = handleColorAspectsChange();
195         CHECK(err == OK);
196     }
197     return true;
198 }
199 
setDecodeArgs(OMX_BUFFERHEADERTYPE * inHeader,OMX_BUFFERHEADERTYPE * outHeader)200 bool GoldfishAVCDec::setDecodeArgs(
201         OMX_BUFFERHEADERTYPE *inHeader,
202         OMX_BUFFERHEADERTYPE *outHeader) {
203     size_t sizeY = outputBufferWidth() * outputBufferHeight();
204     size_t sizeUV = sizeY / 4;
205 
206     /* When in flush and after EOS with zero byte input,
207      * inHeader is set to zero. Hence check for non-null */
208     if (inHeader) {
209         mConsumedBytes = inHeader->nFilledLen - mInputOffset;
210         mInPBuffer = inHeader->pBuffer + inHeader->nOffset + mInputOffset;
211         DDD("got input timestamp %lld in-addr-base %p real-data-offset %d inputoffset %d", (long long)(inHeader->nTimeStamp),
212                 inHeader->pBuffer, (int)(inHeader->nOffset + mInputOffset), (int)mInputOffset);
213     } else {
214         mConsumedBytes = 0;
215         mInPBuffer = nullptr;
216     }
217 
218     if (outHeader) {
219         if (outHeader->nAllocLen < sizeY + (sizeUV * 2)) {
220             ALOGE("outHeader->nAllocLen %d < needed size %d", outHeader->nAllocLen, (int)(sizeY + sizeUV * 2));
221             android_errorWriteLog(0x534e4554, "27833616");
222             return false;
223         }
224         mOutHeaderBuf = outHeader->pBuffer;
225     } else {
226         // We flush out on the host side
227         mOutHeaderBuf = nullptr;
228     }
229 
230     return true;
231 }
232 
readAndDiscardAllHostBuffers()233 void GoldfishAVCDec::readAndDiscardAllHostBuffers() {
234     while (mContext) {
235         h264_image_t img = mContext->getImage();
236         if (img.data != nullptr) {
237             DDD("img pts %lld is discarded", (long long)img.pts);
238         } else {
239             return;
240         }
241     }
242 }
243 
onPortFlushCompleted(OMX_U32 portIndex)244 void GoldfishAVCDec::onPortFlushCompleted(OMX_U32 portIndex) {
245     /* Once the output buffers are flushed, ignore any buffers that are held in decoder */
246     if (kOutputPortIndex == portIndex) {
247         setFlushMode();
248         DDD("%s %d", __func__, __LINE__);
249         readAndDiscardAllHostBuffers();
250         mContext->resetH264Context(mWidth, mHeight, mWidth, mHeight, MediaH264Decoder::PixelFormat::YUV420P);
251         if (!mCsd0.empty() && !mCsd1.empty()) {
252             mContext->decodeFrame(&(mCsd0[0]), mCsd0.size(), 0);
253             mContext->getImage();
254             mContext->decodeFrame(&(mCsd1[0]), mCsd1.size(), 0);
255             mContext->getImage();
256         }
257         resetPlugin();
258     } else {
259         mInputOffset = 0;
260     }
261 }
262 
copyImageData(OMX_BUFFERHEADERTYPE * outHeader,h264_image_t & img)263 void GoldfishAVCDec::copyImageData( OMX_BUFFERHEADERTYPE *outHeader, h264_image_t & img) {
264     int myStride = outputBufferWidth();
265     for (int i=0; i < mHeight; ++i) {
266         memcpy(outHeader->pBuffer + i * myStride, img.data + i * mWidth, mWidth);
267     }
268     int Y = myStride * outputBufferHeight();
269     for (int i=0; i < mHeight/2; ++i) {
270         memcpy(outHeader->pBuffer + Y + i * myStride / 2 , img.data + mWidth * mHeight + i * mWidth/2, mWidth/2);
271     }
272     int UV = Y/4;
273     for (int i=0; i < mHeight/2; ++i) {
274         memcpy(outHeader->pBuffer + Y + UV + i * myStride / 2 , img.data + mWidth * mHeight * 5/4 + i * mWidth/2, mWidth/2);
275     }
276 }
277 
getHostColorBufferId(void * header)278 int GoldfishAVCDec::getHostColorBufferId(void* header) {
279   if (mNWBuffers.find(header) == mNWBuffers.end()) {
280       DDD("cannot find color buffer for header %p", header);
281     return -1;
282   }
283   sp<ANativeWindowBuffer> nBuf = mNWBuffers[header];
284   cb_handle_t *handle = (cb_handle_t*)nBuf->handle;
285   DDD("found color buffer for header %p --> %d", header, handle->hostHandle);
286   return handle->hostHandle;
287 }
288 
onQueueFilled(OMX_U32 portIndex)289 void GoldfishAVCDec::onQueueFilled(OMX_U32 portIndex) {
290     static int count1=0;
291     DDD("calling %s count %d object %p", __func__, ++count1, this);
292     UNUSED(portIndex);
293     OMX_BUFFERHEADERTYPE *inHeader = NULL;
294     BufferInfo *inInfo = NULL;
295 
296     if (mSignalledError) {
297         return;
298     }
299     if (mOutputPortSettingsChange != NONE) {
300         return;
301     }
302 
303     if (mContext == nullptr) {
304         if (OK != initDecoder()) {
305             ALOGE("Failed to initialize decoder");
306             notify(OMX_EventError, OMX_ErrorUnsupportedSetting, 0, NULL);
307             mSignalledError = true;
308             return;
309         }
310     }
311 
312     List<BufferInfo *> &inQueue = getPortQueue(kInputPortIndex);
313     List<BufferInfo *> &outQueue = getPortQueue(kOutputPortIndex);
314 
315     int count2=0;
316     while (!outQueue.empty()) {
317         DDD("calling %s in while loop count %d", __func__, ++count2);
318         BufferInfo *outInfo;
319         OMX_BUFFERHEADERTYPE *outHeader;
320 
321         if (!mIsInFlush && (NULL == inHeader)) {
322             if (!inQueue.empty()) {
323                 inInfo = *inQueue.begin();
324                 inHeader = inInfo->mHeader;
325                 if (inHeader == NULL) {
326                     inQueue.erase(inQueue.begin());
327                     inInfo->mOwnedByUs = false;
328                     continue;
329                 }
330             } else {
331                 break;
332             }
333         }
334 
335         outInfo = *outQueue.begin();
336         outHeader = outInfo->mHeader;
337         outHeader->nFlags = 0;
338         outHeader->nTimeStamp = 0;
339         outHeader->nOffset = 0;
340 
341         if (inHeader != NULL) {
342             if (inHeader->nFilledLen == 0) {
343                 // An empty buffer can be end of stream (EOS) buffer, so
344                 // we'll set the decoder in flush mode if so. If it's not EOS,
345                 // then just release the buffer.
346                 inQueue.erase(inQueue.begin());
347                 inInfo->mOwnedByUs = false;
348                 notifyEmptyBufferDone(inHeader);
349 
350                 if (!(inHeader->nFlags & OMX_BUFFERFLAG_EOS)) {
351                     return;
352                 }
353 
354                 mReceivedEOS = true;
355                 inHeader = NULL;
356                 setFlushMode();
357             } else if (inHeader->nFlags & OMX_BUFFERFLAG_EOS) {
358                 mReceivedEOS = true;
359             }
360         }
361 
362         {
363             nsecs_t timeDelay, timeTaken;
364 
365             if (!setDecodeArgs(inHeader, outHeader)) {
366                 ALOGE("Decoder arg setup failed");
367                 notify(OMX_EventError, OMX_ErrorUndefined, 0, NULL);
368                 mSignalledError = true;
369                 return;
370             }
371 
372             mTimeStart = systemTime();
373             /* Compute time elapsed between end of previous decode()
374              * to start of current decode() */
375             timeDelay = mTimeStart - mTimeEnd;
376 
377             // TODO: We also need to send the timestamp
378             h264_result_t h264Res = {(int)MediaH264Decoder::Err::NoErr, 0};
379             if (inHeader != nullptr) {
380                 if (inHeader->nFlags & OMX_BUFFERFLAG_CODECCONFIG) {
381                     unsigned long mysize = (inHeader->nFilledLen - mInputOffset);
382                     uint8_t* mydata = mInPBuffer;
383                     if (mCsd0.empty()) {
384                         mCsd0.assign(mydata, mydata + mysize);
385                     } else if (mCsd1.empty()) {
386                         mCsd1.assign(mydata, mydata + mysize);
387                     }
388                 }
389                 DDD("Decoding frame(sz=%lu)", (unsigned long)(inHeader->nFilledLen - mInputOffset));
390                 h264Res = mContext->decodeFrame(mInPBuffer,
391                                                 inHeader->nFilledLen - mInputOffset,
392                                                 inHeader->nTimeStamp);
393                 mConsumedBytes = h264Res.bytesProcessed;
394                 if (h264Res.ret == (int)MediaH264Decoder::Err::DecoderRestarted) {
395                     mChangingResolution = true;
396                 }
397             } else {
398                 DDD("No more input data. Attempting to get a decoded frame, if any.");
399             }
400             h264_image_t img = {};
401 
402             bool readBackPixels = true;
403             if (mRenderMode == RenderMode::RENDER_BY_GUEST_CPU) {
404               img = mContext->getImage();
405             } else {
406                 int hostColorBufferId = getHostColorBufferId(outHeader);
407                 if (hostColorBufferId >= 0) {
408                     img = mContext->renderOnHostAndReturnImageMetadata(getHostColorBufferId(outHeader));
409                     readBackPixels = false;
410                 } else {
411                     img = mContext->getImage();
412                 }
413             }
414 
415 
416             if (img.data != nullptr) {
417                 getVUIParams(img);
418             }
419 
420             mTimeEnd = systemTime();
421             /* Compute time taken for decode() */
422             timeTaken = mTimeEnd - mTimeStart;
423 
424 
425             if (inHeader) {
426                 DDD("input time stamp %lld flag %d", inHeader->nTimeStamp, (int)(inHeader->nFlags));
427             }
428 
429             // If the decoder is in the changing resolution mode and there is no output present,
430             // that means the switching is done and it's ready to reset the decoder and the plugin.
431             if (mChangingResolution && img.data == nullptr) {
432                 mChangingResolution = false;
433                 DDD("re-create decoder because resolution changed");
434                 bool portWillReset = false;
435                 handlePortSettingsChange(&portWillReset, img.width, img.height);
436                 {
437                     DDD("handling port reset");
438                     DDD("port resetting (img.width=%u, img.height=%u, mWidth=%u, mHeight=%u)",
439                           img.width, img.height, mWidth, mHeight);
440                     //resetDecoder();
441                     resetPlugin();
442 
443                 //mContext->destroyH264Context();
444                 //mContext.reset(new MediaH264Decoder());
445                 mContext->resetH264Context(mWidth,
446                               mHeight,
447                               mWidth,
448                               mHeight,
449                               MediaH264Decoder::PixelFormat::YUV420P);
450                 //mInputOffset += mConsumedBytes;
451                 return;
452                 }
453             }
454 
455             if (img.data != nullptr) {
456                 int myWidth = img.width;
457                 int myHeight = img.height;
458                 if (myWidth != mWidth || myHeight != mHeight) {
459                     bool portWillReset = false;
460                     handlePortSettingsChange(&portWillReset, myWidth, myHeight);
461                     resetPlugin();
462                     mWidth = myWidth;
463                     mHeight = myHeight;
464                     if (portWillReset) {
465                         DDD("port will reset return now");
466                         return;
467                     } else {
468                         DDD("port will NOT reset keep going now");
469                     }
470                 }
471                 outHeader->nFilledLen =  (outputBufferWidth() * outputBufferHeight() * 3) / 2;
472                 if (readBackPixels) {
473                   if (outputBufferWidth() == mWidth && outputBufferHeight() == mHeight) {
474                     memcpy(outHeader->pBuffer, img.data, outHeader->nFilledLen);
475                   } else {
476                     copyImageData(outHeader, img);
477                   }
478                 }
479 
480                 outHeader->nTimeStamp = img.pts;
481                 DDD("got output timestamp %lld", (long long)(img.pts));
482 
483                 outInfo->mOwnedByUs = false;
484                 outQueue.erase(outQueue.begin());
485                 outInfo = NULL;
486                 notifyFillBufferDone(outHeader);
487                 outHeader = NULL;
488             } else if (mIsInFlush) {
489                 DDD("not img.data and it is in flush mode");
490                 /* If in flush mode and no output is returned by the codec,
491                  * then come out of flush mode */
492                 mIsInFlush = false;
493 
494                 /* If EOS was recieved on input port and there is no output
495                  * from the codec, then signal EOS on output port */
496                 if (mReceivedEOS) {
497                     ALOGI("received EOS, re-create host context");
498                     outHeader->nFilledLen = 0;
499                     outHeader->nFlags |= OMX_BUFFERFLAG_EOS;
500 
501                     outInfo->mOwnedByUs = false;
502                     outQueue.erase(outQueue.begin());
503                     outInfo = NULL;
504                     notifyFillBufferDone(outHeader);
505                     outHeader = NULL;
506                     resetPlugin();
507 
508                     //mContext->destroyH264Context();
509                 //mContext.reset(new MediaH264Decoder());
510                     mContext->resetH264Context(mWidth,
511                               mHeight,
512                               mWidth,
513                               mHeight,
514                               MediaH264Decoder::PixelFormat::YUV420P);
515 
516                 }
517             }
518             mInputOffset += mConsumedBytes;
519         }
520 
521         // If more than 4 bytes are remaining in input, then do not release it
522         if (inHeader != NULL && ((inHeader->nFilledLen - mInputOffset) <= 4)) {
523             inInfo->mOwnedByUs = false;
524             inQueue.erase(inQueue.begin());
525             inInfo = NULL;
526             notifyEmptyBufferDone(inHeader);
527             inHeader = NULL;
528             mInputOffset = 0;
529 
530             /* If input EOS is seen and decoder is not in flush mode,
531              * set the decoder in flush mode.
532              * There can be a case where EOS is sent along with last picture data
533              * In that case, only after decoding that input data, decoder has to be
534              * put in flush. This case is handled here  */
535 
536             if (mReceivedEOS && !mIsInFlush) {
537                 setFlushMode();
538             }
539         }
540     }
541 }
542 
internalGetParameter(OMX_INDEXTYPE index,OMX_PTR params)543 OMX_ERRORTYPE GoldfishAVCDec::internalGetParameter(
544         OMX_INDEXTYPE index, OMX_PTR params) {
545     const int32_t indexFull = index;
546     switch (indexFull) {
547         case kGetAndroidNativeBufferUsageIndex:
548         {
549             DDD("calling kGetAndroidNativeBufferUsageIndex");
550             GetAndroidNativeBufferUsageParams* nativeBuffersUsage = (GetAndroidNativeBufferUsageParams *) params;
551             nativeBuffersUsage->nUsage = (unsigned int)(BufferUsage::GPU_DATA_BUFFER);
552             return OMX_ErrorNone;
553         }
554 
555         default:
556             return GoldfishVideoDecoderOMXComponent::internalGetParameter(index, params);
557     }
558 }
559 
internalSetParameter(OMX_INDEXTYPE index,const OMX_PTR params)560 OMX_ERRORTYPE GoldfishAVCDec::internalSetParameter(
561         OMX_INDEXTYPE index, const OMX_PTR params) {
562     // Include extension index OMX_INDEXEXTTYPE.
563     const int32_t indexFull = index;
564 
565     switch (indexFull) {
566         case kEnableAndroidNativeBuffersIndex:
567         {
568             DDD("calling kEnableAndroidNativeBuffersIndex");
569             EnableAndroidNativeBuffersParams* enableNativeBuffers = (EnableAndroidNativeBuffersParams *) params;
570             if (enableNativeBuffers) {
571                 mEnableAndroidNativeBuffers = enableNativeBuffers->enable;
572                 if (mEnableAndroidNativeBuffers == false) {
573                     mNWBuffers.clear();
574                     DDD("disabled kEnableAndroidNativeBuffersIndex");
575                 } else {
576                     DDD("enabled kEnableAndroidNativeBuffersIndex");
577                 }
578             }
579             return OMX_ErrorNone;
580         }
581 
582         case kUseAndroidNativeBufferIndex:
583         {
584             if (mEnableAndroidNativeBuffers == false) {
585                 ALOGE("Error: not enabled Android Native Buffers");
586                 return OMX_ErrorBadParameter;
587             }
588             UseAndroidNativeBufferParams *use_buffer_params = (UseAndroidNativeBufferParams *)params;
589             if (use_buffer_params) {
590                 sp<ANativeWindowBuffer> nBuf = use_buffer_params->nativeBuffer;
591                 cb_handle_t *handle = (cb_handle_t*)nBuf->handle;
592                 void* dst = NULL;
593                 DDD("kUseAndroidNativeBufferIndex with handle %p host color handle %d calling usebuffer", handle,
594                       handle->hostHandle);
595                 useBufferCallerLockedAlready(use_buffer_params->bufferHeader,use_buffer_params->nPortIndex,
596                         use_buffer_params->pAppPrivate,handle->allocatedSize(), (OMX_U8*)dst);
597                 mNWBuffers[*(use_buffer_params->bufferHeader)] = use_buffer_params->nativeBuffer;;
598             }
599             return OMX_ErrorNone;
600         }
601 
602         default:
603             return GoldfishVideoDecoderOMXComponent::internalSetParameter(index, params);
604     }
605 }
606 
getExtensionIndex(const char * name,OMX_INDEXTYPE * index)607 OMX_ERRORTYPE GoldfishAVCDec::getExtensionIndex(
608         const char *name, OMX_INDEXTYPE *index) {
609 
610     if (mRenderMode == RenderMode::RENDER_BY_HOST_GPU) {
611         if (!strcmp(name, "OMX.google.android.index.enableAndroidNativeBuffers")) {
612             DDD("calling getExtensionIndex for enable ANB");
613             *(int32_t*)index = kEnableAndroidNativeBuffersIndex;
614             return OMX_ErrorNone;
615         } else if (!strcmp(name, "OMX.google.android.index.useAndroidNativeBuffer")) {
616             *(int32_t*)index = kUseAndroidNativeBufferIndex;
617             return OMX_ErrorNone;
618         } else if (!strcmp(name, "OMX.google.android.index.getAndroidNativeBufferUsage")) {
619             *(int32_t*)index = kGetAndroidNativeBufferUsageIndex;
620             return OMX_ErrorNone;
621         }
622     }
623     return GoldfishVideoDecoderOMXComponent::getExtensionIndex(name, index);
624 }
625 
getColorAspectPreference()626 int GoldfishAVCDec::getColorAspectPreference() {
627     return kPreferBitstream;
628 }
629 
630 }  // namespace android
631 
createGoldfishOMXComponent(const char * name,const OMX_CALLBACKTYPE * callbacks,OMX_PTR appData,OMX_COMPONENTTYPE ** component)632 android::GoldfishOMXComponent *createGoldfishOMXComponent(
633         const char *name, const OMX_CALLBACKTYPE *callbacks, OMX_PTR appData,
634         OMX_COMPONENTTYPE **component) {
635     if (!strncmp("OMX.android.goldfish", name, 20)) {
636       return new android::GoldfishAVCDec(name, callbacks, appData, component, RenderMode::RENDER_BY_HOST_GPU);
637     } else {
638       return new android::GoldfishAVCDec(name, callbacks, appData, component, RenderMode::RENDER_BY_GUEST_CPU);
639     }
640 }
641 
642