1 /*
2 * Copyright (C) 2020 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17 //#define LOG_NDEBUG 0
18 #define LOG_TAG "WebmFrameThreadUnitTest"
19 #include <utils/Log.h>
20
21 #include <gtest/gtest.h>
22
23 #include <media/stagefright/MediaAdapter.h>
24 #include <media/stagefright/MediaDefs.h>
25 #include <media/stagefright/MetaData.h>
26 #include <media/stagefright/Utils.h>
27
28 #include <media/stagefright/foundation/ABuffer.h>
29 #include <media/stagefright/foundation/ADebug.h>
30 #include <media/stagefright/foundation/AMessage.h>
31 #include <media/stagefright/foundation/OpusHeader.h>
32
33 #include "webm/EbmlUtil.h"
34 #include "webm/WebmConstants.h"
35 #include "webm/WebmFrameThread.h"
36
37 using namespace android;
38 using namespace webm;
39
40 static constexpr int32_t kVideoIdx = 0;
41 static constexpr int32_t kAudioIdx = 1;
42 static constexpr int32_t kMaxStreamCount = 2;
43
44 static constexpr int32_t kCsdSize = 32;
45 static constexpr int32_t kFrameSize = 128;
46
47 static constexpr int32_t kMaxLoopCount = 20;
48 static constexpr int32_t kNumFramesToWrite = 32;
49 static constexpr int32_t kSyncFrameInterval = 10;
50 static constexpr uint64_t kDefaultTimeCodeScaleUs = 1000000; /* 1sec */
51
52 #define OUTPUT_FILE_NAME "/data/local/tmp/webmFrameThreadOutput.webm"
53
54 // LookUpTable of clips and metadata for component testing
55 static const struct InputData {
56 const char *mime;
57 int32_t firstParam;
58 int32_t secondParam;
59 bool isAudio;
60 } kInputData[] = {
61 {MEDIA_MIMETYPE_AUDIO_OPUS, 48000, 6, true},
62 {MEDIA_MIMETYPE_AUDIO_VORBIS, 44100, 1, true},
63 {MEDIA_MIMETYPE_VIDEO_VP9, 176, 144, false},
64 {MEDIA_MIMETYPE_VIDEO_VP8, 1920, 1080, false},
65 };
66
67 class WebmFrameThreadUnitTest : public ::testing::TestWithParam<std::pair<int32_t, int32_t>> {
68 public:
WebmFrameThreadUnitTest()69 WebmFrameThreadUnitTest()
70 : mSinkThread(nullptr), mAudioThread(nullptr), mVideoThread(nullptr), mSource{} {}
71
~WebmFrameThreadUnitTest()72 ~WebmFrameThreadUnitTest() {
73 if (mSinkThread) mSinkThread.clear();
74 if (mAudioThread) mAudioThread.clear();
75 if (mVideoThread) mVideoThread.clear();
76 }
77
SetUp()78 virtual void SetUp() override {
79 mSegmentDataStart = 0;
80 mFd = open(OUTPUT_FILE_NAME, O_CREAT | O_LARGEFILE | O_TRUNC | O_RDWR, S_IRUSR | S_IWUSR);
81 ASSERT_GE(mFd, 0) << "Failed to open output file " << OUTPUT_FILE_NAME;
82 }
83
TearDown()84 virtual void TearDown() override {
85 if (mFd >= 0) close(mFd);
86 for (int32_t idx = 0; idx < kMaxStreamCount; idx++) {
87 if (mSource[idx] != nullptr) {
88 mSource[idx].clear();
89 }
90 }
91 mVSink.clear();
92 mASink.clear();
93 mCuePoints.clear();
94 }
95
96 void addTrack(bool isAudio, int32_t index);
97 void writeFileData(int32_t inputFrameId, int32_t range);
98
99 void createWebmThreads(std::initializer_list<int32_t> indexList);
100 void startWebmFrameThreads();
101 void stopWebmFrameThreads();
102
103 int32_t mFd;
104 uint64_t mSegmentDataStart;
105
106 sp<WebmFrameSinkThread> mSinkThread;
107 sp<WebmFrameSourceThread> mAudioThread;
108 sp<WebmFrameSourceThread> mVideoThread;
109
110 List<sp<WebmElement>> mCuePoints;
111 sp<MediaAdapter> mSource[kMaxStreamCount];
112 LinkedBlockingQueue<const sp<WebmFrame>> mVSink;
113 LinkedBlockingQueue<const sp<WebmFrame>> mASink;
114 };
115
writeAudioHeaderData(const sp<AMessage> & format,const char * mimeType)116 void writeAudioHeaderData(const sp<AMessage> &format, const char *mimeType) {
117 if (strncasecmp(mimeType, MEDIA_MIMETYPE_AUDIO_OPUS, strlen(MEDIA_MIMETYPE_AUDIO_OPUS) + 1) &&
118 strncasecmp(mimeType, MEDIA_MIMETYPE_AUDIO_VORBIS,
119 strlen(MEDIA_MIMETYPE_AUDIO_VORBIS) + 1)) {
120 ASSERT_TRUE(false) << "Unsupported mime type";
121 }
122
123 // Dummy CSD buffers for Opus and Vorbis
124 char csdBuffer[kCsdSize];
125 memset(csdBuffer, 0xFF, sizeof(csdBuffer));
126
127 sp<ABuffer> csdBuffer0 = ABuffer::CreateAsCopy((void *)csdBuffer, kCsdSize);
128 ASSERT_NE(csdBuffer0.get(), nullptr) << "Unable to allocate buffer for CSD0 data";
129 ASSERT_NE(csdBuffer0->base(), nullptr) << "ABuffer base is null for CSD0";
130
131 sp<ABuffer> csdBuffer1 = ABuffer::CreateAsCopy((void *)csdBuffer, kCsdSize);
132 ASSERT_NE(csdBuffer1.get(), nullptr) << "Unable to allocate buffer for CSD1 data";
133 ASSERT_NE(csdBuffer1->base(), nullptr) << "ABuffer base is null for CSD1";
134
135 sp<ABuffer> csdBuffer2 = ABuffer::CreateAsCopy((void *)csdBuffer, kCsdSize);
136 ASSERT_NE(csdBuffer2.get(), nullptr) << "Unable to allocate buffer for CSD2 data";
137 ASSERT_NE(csdBuffer2->base(), nullptr) << "ABuffer base is null for CSD2";
138
139 format->setBuffer("csd-0", csdBuffer0);
140 format->setBuffer("csd-1", csdBuffer1);
141 format->setBuffer("csd-2", csdBuffer2);
142 }
143
addTrack(bool isAudio,int32_t index)144 void WebmFrameThreadUnitTest::addTrack(bool isAudio, int32_t index) {
145 ASSERT_LT(index, sizeof(kInputData) / sizeof(kInputData[0]))
146 << "Invalid index for loopup table";
147
148 sp<AMessage> format = new AMessage;
149 format->setString("mime", kInputData[index].mime);
150 if (!isAudio) {
151 format->setInt32("width", kInputData[index].firstParam);
152 format->setInt32("height", kInputData[index].secondParam);
153 } else {
154 format->setInt32("sample-rate", kInputData[index].firstParam);
155 format->setInt32("channel-count", kInputData[index].secondParam);
156 ASSERT_NO_FATAL_FAILURE(writeAudioHeaderData(format, kInputData[index].mime));
157 }
158
159 sp<MetaData> trackMeta = new MetaData;
160 convertMessageToMetaData(format, trackMeta);
161
162 if (!isAudio) {
163 mSource[kVideoIdx] = new MediaAdapter(trackMeta);
164 ASSERT_NE(mSource[kVideoIdx], nullptr) << "Unable to create source";
165 } else {
166 mSource[kAudioIdx] = new MediaAdapter(trackMeta);
167 ASSERT_NE(mSource[kAudioIdx], nullptr) << "Unable to create source";
168 }
169 }
170
createWebmThreads(std::initializer_list<int32_t> indexList)171 void WebmFrameThreadUnitTest::createWebmThreads(std::initializer_list<int32_t> indexList) {
172 mSinkThread = new WebmFrameSinkThread(mFd, mSegmentDataStart, mVSink, mASink, mCuePoints);
173 ASSERT_NE(mSinkThread, nullptr) << "Failed to create Sink Thread";
174
175 bool isAudio;
176 // MultiTrack input
177 for (int32_t index : indexList) {
178 isAudio = kInputData[index].isAudio;
179 ASSERT_NO_FATAL_FAILURE(addTrack(isAudio, index));
180 if (!isAudio) {
181 mVideoThread = new WebmFrameMediaSourceThread(mSource[kVideoIdx], kVideoType, mVSink,
182 kDefaultTimeCodeScaleUs, 0, 0, 1, 0);
183 } else {
184 mAudioThread = new WebmFrameMediaSourceThread(mSource[kAudioIdx], kAudioType, mASink,
185 kDefaultTimeCodeScaleUs, 0, 0, 1, 0);
186 }
187 }
188 // To handle single track file
189 if (!mVideoThread) {
190 mVideoThread = new WebmFrameEmptySourceThread(kVideoType, mVSink);
191 } else if (!mAudioThread) {
192 mAudioThread = new WebmFrameEmptySourceThread(kAudioType, mASink);
193 }
194 ASSERT_NE(mVideoThread, nullptr) << "Failed to create Video Thread";
195 ASSERT_NE(mAudioThread, nullptr) << "Failed to create Audio Thread";
196 }
197
startWebmFrameThreads()198 void WebmFrameThreadUnitTest::startWebmFrameThreads() {
199 status_t status = mAudioThread->start();
200 ASSERT_EQ(status, AMEDIA_OK) << "Failed to start Audio Thread";
201 status = mVideoThread->start();
202 ASSERT_EQ(status, AMEDIA_OK) << "Failed to start Video Thread";
203 status = mSinkThread->start();
204 ASSERT_EQ(status, AMEDIA_OK) << "Failed to start Sink Thread";
205 }
206
stopWebmFrameThreads()207 void WebmFrameThreadUnitTest::stopWebmFrameThreads() {
208 status_t status = mAudioThread->stop();
209 ASSERT_EQ(status, AMEDIA_OK) << "Failed to stop Audio Thread";
210 status = mVideoThread->stop();
211 ASSERT_EQ(status, AMEDIA_OK) << "Failed to stop Video Thread";
212 status = mSinkThread->stop();
213 ASSERT_EQ(status, AMEDIA_OK) << "Failed to stop Sink Thread";
214 }
215
216 // Write dummy data to a file
writeFileData(int32_t inputFrameId,int32_t range)217 void WebmFrameThreadUnitTest::writeFileData(int32_t inputFrameId, int32_t range) {
218 char data[kFrameSize];
219 memset(data, 0xFF, sizeof(data));
220 int32_t status = OK;
221 do {
222 // Queue frames for both A/V tracks
223 for (int32_t idx = kVideoIdx; idx < kMaxStreamCount; idx++) {
224 sp<ABuffer> buffer = new ABuffer((void *)data, kFrameSize);
225 ASSERT_NE(buffer.get(), nullptr) << "ABuffer returned nullptr";
226
227 // Released in MediaAdapter::signalBufferReturned().
228 MediaBuffer *mediaBuffer = new MediaBuffer(buffer);
229 ASSERT_NE(mediaBuffer, nullptr) << "MediaBuffer returned nullptr";
230
231 mediaBuffer->add_ref();
232 mediaBuffer->set_range(buffer->offset(), buffer->size());
233
234 MetaDataBase &sampleMetaData = mediaBuffer->meta_data();
235 sampleMetaData.setInt64(kKeyTime, inputFrameId * kDefaultTimeCodeScaleUs);
236
237 // For audio codecs, treat all frame as sync frame
238 if ((idx == kAudioIdx) || (inputFrameId % kSyncFrameInterval == 0)) {
239 sampleMetaData.setInt32(kKeyIsSyncFrame, true);
240 }
241
242 // This pushBuffer will wait until the mediaBuffer is consumed.
243 if (mSource[idx] != nullptr) {
244 status = mSource[idx]->pushBuffer(mediaBuffer);
245 }
246 ASSERT_EQ(status, OK);
247 }
248 inputFrameId++;
249 } while (inputFrameId < range);
250 }
251
TEST_P(WebmFrameThreadUnitTest,WriteTest)252 TEST_P(WebmFrameThreadUnitTest, WriteTest) {
253 int32_t index1 = GetParam().first;
254 int32_t index2 = GetParam().second;
255 ASSERT_NO_FATAL_FAILURE(createWebmThreads({index1, index2}));
256
257 ASSERT_NO_FATAL_FAILURE(startWebmFrameThreads());
258
259 ASSERT_NO_FATAL_FAILURE(writeFileData(0, kNumFramesToWrite));
260
261 if (mSource[kAudioIdx]) mSource[kAudioIdx]->stop();
262 if (mSource[kVideoIdx]) mSource[kVideoIdx]->stop();
263
264 ASSERT_NO_FATAL_FAILURE(stopWebmFrameThreads());
265 }
266
TEST_P(WebmFrameThreadUnitTest,PauseTest)267 TEST_P(WebmFrameThreadUnitTest, PauseTest) {
268 int32_t index1 = GetParam().first;
269 int32_t index2 = GetParam().second;
270 ASSERT_NO_FATAL_FAILURE(createWebmThreads({index1, index2}));
271
272 ASSERT_NO_FATAL_FAILURE(startWebmFrameThreads());
273
274 int32_t offset = 0;
275 ASSERT_NO_FATAL_FAILURE(writeFileData(offset, kNumFramesToWrite));
276 offset += kNumFramesToWrite;
277
278 for (int idx = 0; idx < kMaxLoopCount; idx++) {
279 // pause the threads
280 status_t status = mAudioThread->pause();
281 ASSERT_EQ(status, AMEDIA_OK) << "Failed to pause Audio Thread";
282 status = mVideoThread->pause();
283 ASSERT_EQ(status, AMEDIA_OK) << "Failed to pause Video Thread";
284
285 // Under pause state, no write should happen
286 ASSERT_NO_FATAL_FAILURE(writeFileData(offset, kNumFramesToWrite));
287 offset += kNumFramesToWrite;
288
289 status = mAudioThread->resume();
290 ASSERT_EQ(status, AMEDIA_OK) << "Failed to resume Audio Thread";
291 status = mVideoThread->resume();
292 ASSERT_EQ(status, AMEDIA_OK) << "Failed to resume Video Thread";
293
294 ASSERT_NO_FATAL_FAILURE(writeFileData(offset, kNumFramesToWrite));
295 offset += kNumFramesToWrite;
296 }
297
298 if (mSource[kAudioIdx]) mSource[kAudioIdx]->stop();
299 if (mSource[kVideoIdx]) mSource[kVideoIdx]->stop();
300 ASSERT_NO_FATAL_FAILURE(stopWebmFrameThreads());
301 }
302
303 INSTANTIATE_TEST_SUITE_P(WebmFrameThreadUnitTestAll, WebmFrameThreadUnitTest,
304 ::testing::Values(std::make_pair(0, 1), std::make_pair(0, 2),
305 std::make_pair(0, 3), std::make_pair(1, 0),
306 std::make_pair(1, 2), std::make_pair(1, 3),
307 std::make_pair(2, 3)));
308
main(int argc,char ** argv)309 int main(int argc, char **argv) {
310 ::testing::InitGoogleTest(&argc, argv);
311 int status = RUN_ALL_TESTS();
312 ALOGV("Test result = %d\n", status);
313 return status;
314 }
315