1 /*
2 * Copyright 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17 #include <inttypes.h>
18
19 //#define LOG_NDEBUG 0
20 #define LOG_TAG "SoftVideoEncoderOMXComponent"
21 #include <utils/Log.h>
22 #include <utils/misc.h>
23
24 #include <media/stagefright/omx/SoftVideoEncoderOMXComponent.h>
25 #include <media/stagefright/foundation/ADebug.h>
26 #include <media/stagefright/foundation/ALooper.h>
27 #include <media/stagefright/foundation/AMessage.h>
28 #include <media/stagefright/foundation/AUtils.h>
29 #include <media/stagefright/foundation/MediaDefs.h>
30 #include <media/hardware/HardwareAPI.h>
31 #include <media/openmax/OMX_IndexExt.h>
32
33 #include <ui/Fence.h>
34 #include <ui/GraphicBufferMapper.h>
35 #include <ui/Rect.h>
36
37 #include <hardware/gralloc.h>
38 #include <nativebase/nativebase.h>
39
40 namespace android {
41
42 const static OMX_COLOR_FORMATTYPE kSupportedColorFormats[] = {
43 OMX_COLOR_FormatYUV420Planar,
44 OMX_COLOR_FormatYUV420SemiPlanar,
45 OMX_COLOR_FormatAndroidOpaque
46 };
47
48 template<class T>
InitOMXParams(T * params)49 static void InitOMXParams(T *params) {
50 params->nSize = sizeof(T);
51 params->nVersion.s.nVersionMajor = 1;
52 params->nVersion.s.nVersionMinor = 0;
53 params->nVersion.s.nRevision = 0;
54 params->nVersion.s.nStep = 0;
55 }
56
SoftVideoEncoderOMXComponent(const char * name,const char * componentRole,OMX_VIDEO_CODINGTYPE codingType,const CodecProfileLevel * profileLevels,size_t numProfileLevels,int32_t width,int32_t height,const OMX_CALLBACKTYPE * callbacks,OMX_PTR appData,OMX_COMPONENTTYPE ** component)57 SoftVideoEncoderOMXComponent::SoftVideoEncoderOMXComponent(
58 const char *name,
59 const char *componentRole,
60 OMX_VIDEO_CODINGTYPE codingType,
61 const CodecProfileLevel *profileLevels,
62 size_t numProfileLevels,
63 int32_t width,
64 int32_t height,
65 const OMX_CALLBACKTYPE *callbacks,
66 OMX_PTR appData,
67 OMX_COMPONENTTYPE **component)
68 : SimpleSoftOMXComponent(name, callbacks, appData, component),
69 mInputDataIsMeta(false),
70 mWidth(width),
71 mHeight(height),
72 mBitrate(192000),
73 mFramerate(30 << 16), // Q16 format
74 mColorFormat(OMX_COLOR_FormatYUV420Planar),
75 mMinOutputBufferSize(384), // arbitrary, using one uncompressed macroblock
76 mMinCompressionRatio(1), // max output size is normally the input size
77 mComponentRole(componentRole),
78 mCodingType(codingType),
79 mProfileLevels(profileLevels),
80 mNumProfileLevels(numProfileLevels) {
81 }
82
initPorts(OMX_U32 numInputBuffers,OMX_U32 numOutputBuffers,OMX_U32 outputBufferSize,const char * mime,OMX_U32 minCompressionRatio)83 void SoftVideoEncoderOMXComponent::initPorts(
84 OMX_U32 numInputBuffers, OMX_U32 numOutputBuffers, OMX_U32 outputBufferSize,
85 const char *mime, OMX_U32 minCompressionRatio) {
86 OMX_PARAM_PORTDEFINITIONTYPE def;
87
88 mMinOutputBufferSize = outputBufferSize;
89 mMinCompressionRatio = minCompressionRatio;
90
91 InitOMXParams(&def);
92
93 def.nPortIndex = kInputPortIndex;
94 def.eDir = OMX_DirInput;
95 def.nBufferCountMin = numInputBuffers;
96 def.nBufferCountActual = def.nBufferCountMin;
97 def.bEnabled = OMX_TRUE;
98 def.bPopulated = OMX_FALSE;
99 def.eDomain = OMX_PortDomainVideo;
100 def.bBuffersContiguous = OMX_FALSE;
101 def.format.video.pNativeRender = NULL;
102 def.format.video.nFrameWidth = mWidth;
103 def.format.video.nFrameHeight = mHeight;
104 def.format.video.nStride = def.format.video.nFrameWidth;
105 def.format.video.nSliceHeight = def.format.video.nFrameHeight;
106 def.format.video.nBitrate = 0;
107 // frameRate is in Q16 format.
108 def.format.video.xFramerate = mFramerate;
109 def.format.video.bFlagErrorConcealment = OMX_FALSE;
110 def.nBufferAlignment = kInputBufferAlignment;
111 def.format.video.cMIMEType = const_cast<char *>("video/raw");
112 def.format.video.eCompressionFormat = OMX_VIDEO_CodingUnused;
113 def.format.video.eColorFormat = mColorFormat;
114 def.format.video.pNativeWindow = NULL;
115 // buffersize set in updatePortParams
116
117 addPort(def);
118
119 InitOMXParams(&def);
120
121 def.nPortIndex = kOutputPortIndex;
122 def.eDir = OMX_DirOutput;
123 def.nBufferCountMin = numOutputBuffers;
124 def.nBufferCountActual = def.nBufferCountMin;
125 def.bEnabled = OMX_TRUE;
126 def.bPopulated = OMX_FALSE;
127 def.eDomain = OMX_PortDomainVideo;
128 def.bBuffersContiguous = OMX_FALSE;
129 def.format.video.pNativeRender = NULL;
130 def.format.video.nFrameWidth = mWidth;
131 def.format.video.nFrameHeight = mHeight;
132 def.format.video.nStride = 0;
133 def.format.video.nSliceHeight = 0;
134 def.format.video.nBitrate = mBitrate;
135 def.format.video.xFramerate = 0 << 16;
136 def.format.video.bFlagErrorConcealment = OMX_FALSE;
137 def.nBufferAlignment = kOutputBufferAlignment;
138 def.format.video.cMIMEType = const_cast<char *>(mime);
139 def.format.video.eCompressionFormat = mCodingType;
140 def.format.video.eColorFormat = OMX_COLOR_FormatUnused;
141 def.format.video.pNativeWindow = NULL;
142 // buffersize set in updatePortParams
143
144 addPort(def);
145
146 updatePortParams();
147 }
148
updatePortParams()149 void SoftVideoEncoderOMXComponent::updatePortParams() {
150 OMX_PARAM_PORTDEFINITIONTYPE *inDef = &editPortInfo(kInputPortIndex)->mDef;
151 inDef->format.video.nFrameWidth = mWidth;
152 inDef->format.video.nFrameHeight = mHeight;
153 inDef->format.video.nStride = inDef->format.video.nFrameWidth;
154 inDef->format.video.nSliceHeight = inDef->format.video.nFrameHeight;
155 inDef->format.video.xFramerate = mFramerate;
156 inDef->format.video.eColorFormat = mColorFormat;
157 uint32_t rawBufferSize =
158 inDef->format.video.nStride * inDef->format.video.nSliceHeight * 3 / 2;
159 if (inDef->format.video.eColorFormat == OMX_COLOR_FormatAndroidOpaque) {
160 inDef->nBufferSize = max(sizeof(VideoNativeMetadata), sizeof(VideoGrallocMetadata));
161 } else {
162 inDef->nBufferSize = rawBufferSize;
163 }
164
165 OMX_PARAM_PORTDEFINITIONTYPE *outDef = &editPortInfo(kOutputPortIndex)->mDef;
166 outDef->format.video.nFrameWidth = mWidth;
167 outDef->format.video.nFrameHeight = mHeight;
168 outDef->format.video.nBitrate = mBitrate;
169
170 outDef->nBufferSize = max(mMinOutputBufferSize, rawBufferSize / mMinCompressionRatio);
171 }
172
internalSetPortParams(const OMX_PARAM_PORTDEFINITIONTYPE * port)173 OMX_ERRORTYPE SoftVideoEncoderOMXComponent::internalSetPortParams(
174 const OMX_PARAM_PORTDEFINITIONTYPE *port) {
175
176 if (!isValidOMXParam(port)) {
177 return OMX_ErrorBadParameter;
178 }
179
180 if (port->nPortIndex == kInputPortIndex) {
181 mWidth = port->format.video.nFrameWidth;
182 mHeight = port->format.video.nFrameHeight;
183
184 // xFramerate comes in Q16 format, in frames per second unit
185 mFramerate = port->format.video.xFramerate;
186
187 if (port->format.video.eCompressionFormat != OMX_VIDEO_CodingUnused
188 || (port->format.video.eColorFormat != OMX_COLOR_FormatYUV420Planar
189 && port->format.video.eColorFormat != OMX_COLOR_FormatYUV420SemiPlanar
190 && port->format.video.eColorFormat != OMX_COLOR_FormatAndroidOpaque)) {
191 return OMX_ErrorUnsupportedSetting;
192 }
193
194 mColorFormat = port->format.video.eColorFormat;
195 } else if (port->nPortIndex == kOutputPortIndex) {
196 if (port->format.video.eCompressionFormat != mCodingType
197 || port->format.video.eColorFormat != OMX_COLOR_FormatUnused) {
198 return OMX_ErrorUnsupportedSetting;
199 }
200
201 mBitrate = port->format.video.nBitrate;
202 } else {
203 return OMX_ErrorBadPortIndex;
204 }
205
206 updatePortParams();
207 return OMX_ErrorNone;
208 }
209
internalSetParameter(OMX_INDEXTYPE index,const OMX_PTR param)210 OMX_ERRORTYPE SoftVideoEncoderOMXComponent::internalSetParameter(
211 OMX_INDEXTYPE index, const OMX_PTR param) {
212 // can include extension index OMX_INDEXEXTTYPE
213 const int32_t indexFull = index;
214
215 switch (indexFull) {
216 case OMX_IndexParamVideoErrorCorrection:
217 {
218 return OMX_ErrorNotImplemented;
219 }
220
221 case OMX_IndexParamStandardComponentRole:
222 {
223 const OMX_PARAM_COMPONENTROLETYPE *roleParams =
224 (const OMX_PARAM_COMPONENTROLETYPE *)param;
225
226 if (!isValidOMXParam(roleParams)) {
227 return OMX_ErrorBadParameter;
228 }
229
230 if (strncmp((const char *)roleParams->cRole,
231 mComponentRole,
232 OMX_MAX_STRINGNAME_SIZE - 1)) {
233 return OMX_ErrorUnsupportedSetting;
234 }
235
236 return OMX_ErrorNone;
237 }
238
239 case OMX_IndexParamPortDefinition:
240 {
241 OMX_ERRORTYPE err = internalSetPortParams((const OMX_PARAM_PORTDEFINITIONTYPE *)param);
242
243 if (err != OMX_ErrorNone) {
244 return err;
245 }
246
247 return SimpleSoftOMXComponent::internalSetParameter(index, param);
248 }
249
250 case OMX_IndexParamVideoPortFormat:
251 {
252 const OMX_VIDEO_PARAM_PORTFORMATTYPE* format =
253 (const OMX_VIDEO_PARAM_PORTFORMATTYPE *)param;
254
255 if (!isValidOMXParam(format)) {
256 return OMX_ErrorBadParameter;
257 }
258
259 if (format->nPortIndex == kInputPortIndex) {
260 if (format->eColorFormat == OMX_COLOR_FormatYUV420Planar ||
261 format->eColorFormat == OMX_COLOR_FormatYUV420SemiPlanar ||
262 format->eColorFormat == OMX_COLOR_FormatAndroidOpaque) {
263 mColorFormat = format->eColorFormat;
264
265 updatePortParams();
266 return OMX_ErrorNone;
267 } else {
268 ALOGE("Unsupported color format %i", format->eColorFormat);
269 return OMX_ErrorUnsupportedSetting;
270 }
271 } else if (format->nPortIndex == kOutputPortIndex) {
272 if (format->eCompressionFormat == mCodingType) {
273 return OMX_ErrorNone;
274 } else {
275 return OMX_ErrorUnsupportedSetting;
276 }
277 } else {
278 return OMX_ErrorBadPortIndex;
279 }
280 }
281
282 case kStoreMetaDataExtensionIndex:
283 {
284 // storeMetaDataInBuffers
285 const StoreMetaDataInBuffersParams *storeParam =
286 (const StoreMetaDataInBuffersParams *)param;
287
288 if (!isValidOMXParam(storeParam)) {
289 return OMX_ErrorBadParameter;
290 }
291
292 if (storeParam->nPortIndex == kOutputPortIndex) {
293 return storeParam->bStoreMetaData ? OMX_ErrorUnsupportedSetting : OMX_ErrorNone;
294 } else if (storeParam->nPortIndex != kInputPortIndex) {
295 return OMX_ErrorBadPortIndex;
296 }
297
298 mInputDataIsMeta = (storeParam->bStoreMetaData == OMX_TRUE);
299 if (mInputDataIsMeta) {
300 mColorFormat = OMX_COLOR_FormatAndroidOpaque;
301 } else if (mColorFormat == OMX_COLOR_FormatAndroidOpaque) {
302 mColorFormat = OMX_COLOR_FormatYUV420Planar;
303 }
304 updatePortParams();
305 return OMX_ErrorNone;
306 }
307
308 default:
309 return SimpleSoftOMXComponent::internalSetParameter(index, param);
310 }
311 }
312
internalGetParameter(OMX_INDEXTYPE index,OMX_PTR param)313 OMX_ERRORTYPE SoftVideoEncoderOMXComponent::internalGetParameter(
314 OMX_INDEXTYPE index, OMX_PTR param) {
315 switch ((int)index) {
316 case OMX_IndexParamVideoErrorCorrection:
317 {
318 return OMX_ErrorNotImplemented;
319 }
320
321 case OMX_IndexParamVideoPortFormat:
322 {
323 OMX_VIDEO_PARAM_PORTFORMATTYPE *formatParams =
324 (OMX_VIDEO_PARAM_PORTFORMATTYPE *)param;
325
326 if (!isValidOMXParam(formatParams)) {
327 return OMX_ErrorBadParameter;
328 }
329
330 if (formatParams->nPortIndex == kInputPortIndex) {
331 if (formatParams->nIndex >= NELEM(kSupportedColorFormats)) {
332 return OMX_ErrorNoMore;
333 }
334
335 // Color formats, in order of preference
336 formatParams->eColorFormat = kSupportedColorFormats[formatParams->nIndex];
337 formatParams->eCompressionFormat = OMX_VIDEO_CodingUnused;
338 formatParams->xFramerate = mFramerate;
339 return OMX_ErrorNone;
340 } else if (formatParams->nPortIndex == kOutputPortIndex) {
341 formatParams->eCompressionFormat = mCodingType;
342 formatParams->eColorFormat = OMX_COLOR_FormatUnused;
343 formatParams->xFramerate = 0;
344 return OMX_ErrorNone;
345 } else {
346 return OMX_ErrorBadPortIndex;
347 }
348 }
349
350 case OMX_IndexParamVideoProfileLevelQuerySupported:
351 {
352 OMX_VIDEO_PARAM_PROFILELEVELTYPE *profileLevel =
353 (OMX_VIDEO_PARAM_PROFILELEVELTYPE *) param;
354
355 if (!isValidOMXParam(profileLevel)) {
356 return OMX_ErrorBadParameter;
357 }
358
359 if (profileLevel->nPortIndex != kOutputPortIndex) {
360 ALOGE("Invalid port index: %u", profileLevel->nPortIndex);
361 return OMX_ErrorUnsupportedIndex;
362 }
363
364 if (profileLevel->nProfileIndex >= mNumProfileLevels) {
365 return OMX_ErrorNoMore;
366 }
367
368 profileLevel->eProfile = mProfileLevels[profileLevel->nProfileIndex].mProfile;
369 profileLevel->eLevel = mProfileLevels[profileLevel->nProfileIndex].mLevel;
370 return OMX_ErrorNone;
371 }
372
373 case OMX_IndexParamConsumerUsageBits:
374 {
375 OMX_U32 *usageBits = (OMX_U32 *)param;
376 *usageBits = GRALLOC_USAGE_SW_READ_OFTEN;
377 return OMX_ErrorNone;
378 }
379
380 default:
381 return SimpleSoftOMXComponent::internalGetParameter(index, param);
382 }
383 }
384
385 // static
386 __attribute__((no_sanitize("integer")))
ConvertFlexYUVToPlanar(uint8_t * dst,size_t dstStride,size_t dstVStride,struct android_ycbcr * ycbcr,int32_t width,int32_t height)387 void SoftVideoEncoderOMXComponent::ConvertFlexYUVToPlanar(
388 uint8_t *dst, size_t dstStride, size_t dstVStride,
389 struct android_ycbcr *ycbcr, int32_t width, int32_t height) {
390 const uint8_t *src = (const uint8_t *)ycbcr->y;
391 const uint8_t *srcU = (const uint8_t *)ycbcr->cb;
392 const uint8_t *srcV = (const uint8_t *)ycbcr->cr;
393 uint8_t *dstU = dst + dstVStride * dstStride;
394 uint8_t *dstV = dstU + (dstVStride >> 1) * (dstStride >> 1);
395
396 for (size_t y = height; y > 0; --y) {
397 memcpy(dst, src, width);
398 dst += dstStride;
399 src += ycbcr->ystride;
400 }
401 if (ycbcr->cstride == ycbcr->ystride >> 1 && ycbcr->chroma_step == 1) {
402 // planar
403 for (size_t y = height >> 1; y > 0; --y) {
404 memcpy(dstU, srcU, width >> 1);
405 dstU += dstStride >> 1;
406 srcU += ycbcr->cstride;
407 memcpy(dstV, srcV, width >> 1);
408 dstV += dstStride >> 1;
409 srcV += ycbcr->cstride;
410 }
411 } else {
412 // arbitrary
413 for (size_t y = height >> 1; y > 0; --y) {
414 for (size_t x = width >> 1; x > 0; --x) {
415 *dstU++ = *srcU;
416 *dstV++ = *srcV;
417 srcU += ycbcr->chroma_step;
418 srcV += ycbcr->chroma_step;
419 }
420 dstU += (dstStride >> 1) - (width >> 1);
421 dstV += (dstStride >> 1) - (width >> 1);
422 srcU += ycbcr->cstride - (width >> 1) * ycbcr->chroma_step;
423 srcV += ycbcr->cstride - (width >> 1) * ycbcr->chroma_step;
424 }
425 }
426 }
427
428 // static
429 __attribute__((no_sanitize("integer")))
ConvertYUV420SemiPlanarToYUV420Planar(const uint8_t * inYVU,uint8_t * outYUV,int32_t width,int32_t height)430 void SoftVideoEncoderOMXComponent::ConvertYUV420SemiPlanarToYUV420Planar(
431 const uint8_t *inYVU, uint8_t* outYUV, int32_t width, int32_t height) {
432 // TODO: add support for stride
433 int32_t outYsize = width * height;
434 uint32_t *outY = (uint32_t *) outYUV;
435 uint16_t *outCb = (uint16_t *) (outYUV + outYsize);
436 uint16_t *outCr = (uint16_t *) (outYUV + outYsize + (outYsize >> 2));
437
438 /* Y copying */
439 memcpy(outY, inYVU, outYsize);
440
441 /* U & V copying */
442 // FIXME this only works if width is multiple of 4
443 uint32_t *inYVU_4 = (uint32_t *) (inYVU + outYsize);
444 for (int32_t i = height >> 1; i > 0; --i) {
445 for (int32_t j = width >> 2; j > 0; --j) {
446 uint32_t temp = *inYVU_4++;
447 uint32_t tempU = temp & 0xFF;
448 tempU = tempU | ((temp >> 8) & 0xFF00);
449
450 uint32_t tempV = (temp >> 8) & 0xFF;
451 tempV = tempV | ((temp >> 16) & 0xFF00);
452
453 *outCb++ = tempU;
454 *outCr++ = tempV;
455 }
456 }
457 }
458
459 // static
460 __attribute__((no_sanitize("integer")))
ConvertRGB32ToPlanar(uint8_t * dstY,size_t dstStride,size_t dstVStride,const uint8_t * src,size_t width,size_t height,size_t srcStride,bool bgr)461 void SoftVideoEncoderOMXComponent::ConvertRGB32ToPlanar(
462 uint8_t *dstY, size_t dstStride, size_t dstVStride,
463 const uint8_t *src, size_t width, size_t height, size_t srcStride,
464 bool bgr) {
465 CHECK((width & 1) == 0);
466 CHECK((height & 1) == 0);
467
468 uint8_t *dstU = dstY + dstStride * dstVStride;
469 uint8_t *dstV = dstU + (dstStride >> 1) * (dstVStride >> 1);
470
471 #ifdef SURFACE_IS_BGR32
472 bgr = !bgr;
473 #endif
474
475 const size_t redOffset = bgr ? 2 : 0;
476 const size_t greenOffset = 1;
477 const size_t blueOffset = bgr ? 0 : 2;
478
479 for (size_t y = 0; y < height; ++y) {
480 for (size_t x = 0; x < width; ++x) {
481 unsigned red = src[redOffset];
482 unsigned green = src[greenOffset];
483 unsigned blue = src[blueOffset];
484
485 // Using ITU-R BT.601-7 (03/2011)
486 // 2.5.1: Ey' = ( 0.299*R + 0.587*G + 0.114*B)
487 // 2.5.2: ECr' = ( 0.701*R - 0.587*G - 0.114*B) / 1.402
488 // ECb' = (-0.299*R - 0.587*G + 0.886*B) / 1.772
489 // 2.5.3: Y = 219 * Ey' + 16
490 // Cr = 224 * ECr' + 128
491 // Cb = 224 * ECb' + 128
492
493 unsigned luma =
494 ((red * 65 + green * 129 + blue * 25 + 128) >> 8) + 16;
495
496 dstY[x] = luma;
497
498 if ((x & 1) == 0 && (y & 1) == 0) {
499 unsigned U =
500 ((-red * 38 - green * 74 + blue * 112 + 128) >> 8) + 128;
501
502 unsigned V =
503 ((red * 112 - green * 94 - blue * 18 + 128) >> 8) + 128;
504
505 dstU[x >> 1] = U;
506 dstV[x >> 1] = V;
507 }
508 src += 4;
509 }
510
511 if ((y & 1) == 0) {
512 dstU += dstStride >> 1;
513 dstV += dstStride >> 1;
514 }
515
516 src += srcStride - 4 * width;
517 dstY += dstStride;
518 }
519 }
520
extractGraphicBuffer(uint8_t * dst,size_t dstSize,const uint8_t * src,size_t srcSize,size_t width,size_t height) const521 const uint8_t *SoftVideoEncoderOMXComponent::extractGraphicBuffer(
522 uint8_t *dst, size_t dstSize,
523 const uint8_t *src, size_t srcSize,
524 size_t width, size_t height) const {
525 size_t dstStride = width;
526 size_t dstVStride = height;
527
528 MetadataBufferType bufferType = *(MetadataBufferType *)src;
529 bool usingANWBuffer = bufferType == kMetadataBufferTypeANWBuffer;
530 if (!usingANWBuffer && bufferType != kMetadataBufferTypeGrallocSource) {
531 ALOGE("Unsupported metadata type (%d)", bufferType);
532 return NULL;
533 }
534
535 buffer_handle_t handle;
536 int format;
537 size_t srcStride;
538 size_t srcVStride;
539 if (usingANWBuffer) {
540 if (srcSize < sizeof(VideoNativeMetadata)) {
541 ALOGE("Metadata is too small (%zu vs %zu)", srcSize, sizeof(VideoNativeMetadata));
542 return NULL;
543 }
544
545 VideoNativeMetadata &nativeMeta = *(VideoNativeMetadata *)src;
546 ANativeWindowBuffer *buffer = nativeMeta.pBuffer;
547 handle = buffer->handle;
548 format = buffer->format;
549 srcStride = buffer->stride;
550 srcVStride = buffer->height;
551 // convert stride from pixels to bytes
552 if (format != HAL_PIXEL_FORMAT_YV12 &&
553 format != HAL_PIXEL_FORMAT_YCrCb_420_SP &&
554 format != HAL_PIXEL_FORMAT_YCbCr_420_888) {
555 // TODO do we need to support other formats?
556 srcStride *= 4;
557 }
558
559 if (nativeMeta.nFenceFd >= 0) {
560 sp<Fence> fence = new Fence(nativeMeta.nFenceFd);
561 nativeMeta.nFenceFd = -1;
562 status_t err = fence->wait(kFenceTimeoutMs);
563 if (err != OK) {
564 ALOGE("Timed out waiting on input fence");
565 return NULL;
566 }
567 }
568 } else {
569 // TODO: remove this part. Check if anyone uses this.
570
571 if (srcSize < sizeof(VideoGrallocMetadata)) {
572 ALOGE("Metadata is too small (%zu vs %zu)", srcSize, sizeof(VideoGrallocMetadata));
573 return NULL;
574 }
575
576 VideoGrallocMetadata &grallocMeta = *(VideoGrallocMetadata *)(src);
577 handle = grallocMeta.pHandle;
578 // assume HAL_PIXEL_FORMAT_RGBA_8888
579 // there is no way to get the src stride without the graphic buffer
580 format = HAL_PIXEL_FORMAT_RGBA_8888;
581 srcStride = width * 4;
582 srcVStride = height;
583 }
584
585 size_t neededSize =
586 dstStride * dstVStride + (width >> 1)
587 + (dstStride >> 1) * ((dstVStride >> 1) + (height >> 1) - 1);
588 if (dstSize < neededSize) {
589 ALOGE("destination buffer is too small (%zu vs %zu)", dstSize, neededSize);
590 return NULL;
591 }
592
593 auto& mapper = GraphicBufferMapper::get();
594
595 void *bits = NULL;
596 struct android_ycbcr ycbcr;
597 status_t res;
598 if (format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
599 res = mapper.lockYCbCr(
600 handle,
601 GRALLOC_USAGE_SW_READ_OFTEN | GRALLOC_USAGE_SW_WRITE_NEVER,
602 Rect(width, height), &ycbcr);
603 } else {
604 res = mapper.lock(
605 handle,
606 GRALLOC_USAGE_SW_READ_OFTEN | GRALLOC_USAGE_SW_WRITE_NEVER,
607 Rect(width, height), &bits);
608 }
609 if (res != OK) {
610 ALOGE("Unable to lock image buffer %p for access", handle);
611 return NULL;
612 }
613
614 switch (format) {
615 case HAL_PIXEL_FORMAT_YV12: // YCrCb / YVU planar
616 ycbcr.y = bits;
617 ycbcr.cr = (uint8_t *)bits + srcStride * srcVStride;
618 ycbcr.cb = (uint8_t *)ycbcr.cr + (srcStride >> 1) * (srcVStride >> 1);
619 ycbcr.chroma_step = 1;
620 ycbcr.cstride = srcStride >> 1;
621 ycbcr.ystride = srcStride;
622 ConvertFlexYUVToPlanar(dst, dstStride, dstVStride, &ycbcr, width, height);
623 break;
624 case HAL_PIXEL_FORMAT_YCrCb_420_SP: // YCrCb / YVU semiplanar, NV21
625 ycbcr.y = bits;
626 ycbcr.cr = (uint8_t *)bits + srcStride * srcVStride;
627 ycbcr.cb = (uint8_t *)ycbcr.cr + 1;
628 ycbcr.chroma_step = 2;
629 ycbcr.cstride = srcStride;
630 ycbcr.ystride = srcStride;
631 ConvertFlexYUVToPlanar(dst, dstStride, dstVStride, &ycbcr, width, height);
632 break;
633 case HAL_PIXEL_FORMAT_YCbCr_420_888: // YCbCr / YUV planar
634 ConvertFlexYUVToPlanar(dst, dstStride, dstVStride, &ycbcr, width, height);
635 break;
636 case HAL_PIXEL_FORMAT_RGBX_8888:
637 case HAL_PIXEL_FORMAT_RGBA_8888:
638 case HAL_PIXEL_FORMAT_BGRA_8888:
639 ConvertRGB32ToPlanar(
640 dst, dstStride, dstVStride,
641 (const uint8_t *)bits, width, height, srcStride,
642 format == HAL_PIXEL_FORMAT_BGRA_8888);
643 break;
644 default:
645 ALOGE("Unsupported pixel format %#x", format);
646 dst = NULL;
647 break;
648 }
649
650 if (mapper.unlock(handle) != OK) {
651 ALOGE("Unable to unlock image buffer %p for access", handle);
652 }
653
654 return dst;
655 }
656
getExtensionIndex(const char * name,OMX_INDEXTYPE * index)657 OMX_ERRORTYPE SoftVideoEncoderOMXComponent::getExtensionIndex(
658 const char *name, OMX_INDEXTYPE *index) {
659 if (!strcmp(name, "OMX.google.android.index.storeMetaDataInBuffers") ||
660 !strcmp(name, "OMX.google.android.index.storeANWBufferInMetadata")) {
661 *(int32_t*)index = kStoreMetaDataExtensionIndex;
662 return OMX_ErrorNone;
663 }
664 return SimpleSoftOMXComponent::getExtensionIndex(name, index);
665 }
666
validateInputBuffer(const OMX_BUFFERHEADERTYPE * inputBufferHeader)667 OMX_ERRORTYPE SoftVideoEncoderOMXComponent::validateInputBuffer(
668 const OMX_BUFFERHEADERTYPE *inputBufferHeader) {
669 size_t frameSize = mInputDataIsMeta ?
670 max(sizeof(VideoNativeMetadata), sizeof(VideoGrallocMetadata))
671 : mWidth * mHeight * 3 / 2;
672 if (inputBufferHeader->nFilledLen < frameSize) {
673 return OMX_ErrorUndefined;
674 } else if (inputBufferHeader->nFilledLen > frameSize) {
675 ALOGW("Input buffer contains more data than expected.");
676 }
677 return OMX_ErrorNone;
678 }
679
680 } // namespace android
681