1 /*
2  * Copyright (C) 2014 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 //#define LOG_NDEBUG 0
18 #define LOG_TAG "WebmFrameThread"
19 
20 #include "WebmConstants.h"
21 #include "WebmFrameThread.h"
22 
23 #include <media/stagefright/MetaData.h>
24 #include <media/stagefright/foundation/ADebug.h>
25 
26 #include <utils/Log.h>
27 #include <inttypes.h>
28 
29 using namespace webm;
30 
31 namespace android {
32 
wrap(void * arg)33 void *WebmFrameThread::wrap(void *arg) {
34     WebmFrameThread *worker = reinterpret_cast<WebmFrameThread*>(arg);
35     worker->run();
36     return NULL;
37 }
38 
start()39 status_t WebmFrameThread::start() {
40     status_t err = OK;
41     pthread_attr_t attr;
42     pthread_attr_init(&attr);
43     pthread_attr_setdetachstate(&attr, PTHREAD_CREATE_JOINABLE);
44     if ((err = pthread_create(&mThread, &attr, WebmFrameThread::wrap, this))) {
45         mThread = 0;
46     }
47     pthread_attr_destroy(&attr);
48     return err;
49 }
50 
stop()51 status_t WebmFrameThread::stop() {
52     void *status = nullptr;
53     if (mThread) {
54         pthread_join(mThread, &status);
55         mThread = 0;
56     }
57     return (status_t)(intptr_t)status;
58 }
59 
60 //=================================================================================================
61 
WebmFrameSourceThread(int type,LinkedBlockingQueue<const sp<WebmFrame>> & sink)62 WebmFrameSourceThread::WebmFrameSourceThread(
63     int type,
64     LinkedBlockingQueue<const sp<WebmFrame> >& sink)
65     : mType(type), mSink(sink) {
66 }
67 
68 //=================================================================================================
69 
WebmFrameSinkThread(const int & fd,const uint64_t & off,sp<WebmFrameSourceThread> videoThread,sp<WebmFrameSourceThread> audioThread,List<sp<WebmElement>> & cues)70 WebmFrameSinkThread::WebmFrameSinkThread(
71         const int& fd,
72         const uint64_t& off,
73         sp<WebmFrameSourceThread> videoThread,
74         sp<WebmFrameSourceThread> audioThread,
75         List<sp<WebmElement> >& cues)
76     : mFd(fd),
77       mSegmentDataStart(off),
78       mVideoFrames(videoThread->mSink),
79       mAudioFrames(audioThread->mSink),
80       mCues(cues),
81       mStartOffsetTimecode(UINT64_MAX),
82       mDone(true) {
83 }
84 
WebmFrameSinkThread(const int & fd,const uint64_t & off,LinkedBlockingQueue<const sp<WebmFrame>> & videoSource,LinkedBlockingQueue<const sp<WebmFrame>> & audioSource,List<sp<WebmElement>> & cues)85 WebmFrameSinkThread::WebmFrameSinkThread(
86         const int& fd,
87         const uint64_t& off,
88         LinkedBlockingQueue<const sp<WebmFrame> >& videoSource,
89         LinkedBlockingQueue<const sp<WebmFrame> >& audioSource,
90         List<sp<WebmElement> >& cues)
91     : mFd(fd),
92       mSegmentDataStart(off),
93       mVideoFrames(videoSource),
94       mAudioFrames(audioSource),
95       mCues(cues),
96       mStartOffsetTimecode(UINT64_MAX),
97       mDone(true) {
98 }
99 
100 // Initializes a webm cluster with its starting timecode.
101 //
102 // frames:
103 //   sequence of input audio/video frames received from the source.
104 //
105 // clusterTimecodeL:
106 //   the starting timecode of the cluster; this is the timecode of the first
107 //   frame since frames are ordered by timestamp.
108 //
109 // children:
110 //   list to hold child elements in a webm cluster (start timecode and
111 //   simple blocks).
112 //
113 // static
initCluster(List<const sp<WebmFrame>> & frames,uint64_t & clusterTimecodeL,List<sp<WebmElement>> & children)114 void WebmFrameSinkThread::initCluster(
115     List<const sp<WebmFrame> >& frames,
116     uint64_t& clusterTimecodeL,
117     List<sp<WebmElement> >& children) {
118     CHECK(!frames.empty() && children.empty());
119 
120     const sp<WebmFrame> f = *(frames.begin());
121     clusterTimecodeL = f->mAbsTimecode;
122     WebmUnsigned *clusterTimecode = new WebmUnsigned(kMkvTimecode, clusterTimecodeL);
123     children.clear();
124     children.push_back(clusterTimecode);
125 }
126 
writeCluster(List<sp<WebmElement>> & children)127 void WebmFrameSinkThread::writeCluster(List<sp<WebmElement> >& children) {
128     // children must contain at least one simpleblock and its timecode
129     CHECK_GE(children.size(), 2u);
130 
131     uint64_t size;
132     sp<WebmElement> cluster = new WebmMaster(kMkvCluster, children);
133     cluster->write(mFd, size);
134     children.clear();
135 }
136 
137 // Write out (possibly multiple) webm cluster(s) from frames split on video key frames.
138 //
139 // last:
140 //   current flush is triggered by EOS instead of a second outstanding video key frame.
flushFrames(List<const sp<WebmFrame>> & frames,bool last)141 void WebmFrameSinkThread::flushFrames(List<const sp<WebmFrame> >& frames, bool last) {
142     if (frames.empty()) {
143         return;
144     }
145 
146     uint64_t clusterTimecodeL;
147     List<sp<WebmElement> > children;
148     initCluster(frames, clusterTimecodeL, children);
149 
150     uint64_t cueTime = clusterTimecodeL;
151     off_t fpos = ::lseek(mFd, 0, SEEK_CUR);
152     size_t n = frames.size();
153     if (!last) {
154         // If we are not flushing the last sequence of outstanding frames, flushFrames
155         // must have been called right after we have pushed a second outstanding video key
156         // frame (the last frame), which belongs to the next cluster; also hold back on
157         // flushing the second to last frame before we check its type. A audio frame
158         // should precede the aforementioned video key frame in the next sequence, a video
159         // frame should be the last frame in the current (to-be-flushed) sequence.
160         CHECK_GE(n, 2u);
161         n -= 2;
162     }
163 
164     for (size_t i = 0; i < n; i++) {
165         const sp<WebmFrame> f = *(frames.begin());
166         if (f->mType == kVideoType && f->mKey) {
167             cueTime = f->mAbsTimecode;
168         }
169 
170         if (f->mAbsTimecode - clusterTimecodeL > INT16_MAX) {
171             writeCluster(children);
172             initCluster(frames, clusterTimecodeL, children);
173         }
174 
175         frames.erase(frames.begin());
176         children.push_back(f->SimpleBlock(clusterTimecodeL));
177     }
178 
179     // equivalent to last==false
180     if (!frames.empty()) {
181         // decide whether to write out the second to last frame.
182         const sp<WebmFrame> secondLastFrame = *(frames.begin());
183         if (secondLastFrame->mType == kVideoType) {
184             frames.erase(frames.begin());
185             children.push_back(secondLastFrame->SimpleBlock(clusterTimecodeL));
186         }
187     }
188 
189     writeCluster(children);
190     sp<WebmElement> cuePoint = WebmElement::CuePointEntry(cueTime, 1, fpos - mSegmentDataStart);
191     mCues.push_back(cuePoint);
192 }
193 
start()194 status_t WebmFrameSinkThread::start() {
195     mDone = false;
196     return WebmFrameThread::start();
197 }
198 
stop()199 status_t WebmFrameSinkThread::stop() {
200     mDone = true;
201     mVideoFrames.push(WebmFrame::EOS);
202     mAudioFrames.push(WebmFrame::EOS);
203     return WebmFrameThread::stop();
204 }
205 
run()206 void WebmFrameSinkThread::run() {
207     int numVideoKeyFrames = 0;
208     List<const sp<WebmFrame> > outstandingFrames;
209     while (!mDone) {
210         ALOGV("wait v frame");
211         const sp<WebmFrame> videoFrame = mVideoFrames.peek();
212         ALOGV("v frame: %p", videoFrame.get());
213 
214         ALOGV("wait a frame");
215         const sp<WebmFrame> audioFrame = mAudioFrames.peek();
216         ALOGV("a frame: %p", audioFrame.get());
217 
218         if (mStartOffsetTimecode == UINT64_MAX) {
219             mStartOffsetTimecode =
220                     std::min(audioFrame->getAbsTimecode(), videoFrame->getAbsTimecode());
221         }
222 
223         if (videoFrame->mEos && audioFrame->mEos) {
224             break;
225         }
226 
227         if (*audioFrame < *videoFrame) {
228             ALOGV("take a frame");
229             mAudioFrames.take();
230             audioFrame->updateAbsTimecode(audioFrame->getAbsTimecode() - mStartOffsetTimecode);
231             outstandingFrames.push_back(audioFrame);
232         } else {
233             ALOGV("take v frame");
234             mVideoFrames.take();
235             videoFrame->updateAbsTimecode(videoFrame->getAbsTimecode() - mStartOffsetTimecode);
236             outstandingFrames.push_back(videoFrame);
237             if (videoFrame->mKey)
238                 numVideoKeyFrames++;
239         }
240 
241         if (numVideoKeyFrames == 2) {
242             flushFrames(outstandingFrames, /* last = */ false);
243             numVideoKeyFrames--;
244         }
245     }
246     ALOGV("flushing last cluster (size %zu)", outstandingFrames.size());
247     flushFrames(outstandingFrames, /* last = */ true);
248     mDone = true;
249 }
250 
251 //=================================================================================================
252 
253 static const int64_t kInitialDelayTimeUs = 700000LL;
254 
clearFlags()255 void WebmFrameMediaSourceThread::clearFlags() {
256     mDone = false;
257     mPaused = false;
258     mResumed = false;
259     mStarted = false;
260     mReachedEOS = false;
261 }
262 
WebmFrameMediaSourceThread(const sp<MediaSource> & source,int type,LinkedBlockingQueue<const sp<WebmFrame>> & sink,uint64_t timeCodeScale,int64_t startTimeRealUs,int32_t startTimeOffsetMs,int numTracks,bool realTimeRecording)263 WebmFrameMediaSourceThread::WebmFrameMediaSourceThread(
264         const sp<MediaSource>& source,
265         int type,
266         LinkedBlockingQueue<const sp<WebmFrame> >& sink,
267         uint64_t timeCodeScale,
268         int64_t startTimeRealUs,
269         int32_t startTimeOffsetMs,
270         int numTracks,
271         bool realTimeRecording)
272     : WebmFrameSourceThread(type, sink),
273       mSource(source),
274       mTimeCodeScale(timeCodeScale),
275       mTrackDurationUs(0) {
276     clearFlags();
277     mStartTimeUs = startTimeRealUs;
278     if (realTimeRecording && numTracks > 1) {
279         /*
280          * Copied from MPEG4Writer
281          *
282          * This extra delay of accepting incoming audio/video signals
283          * helps to align a/v start time at the beginning of a recording
284          * session, and it also helps eliminate the "recording" sound for
285          * camcorder applications.
286          *
287          * If client does not set the start time offset, we fall back to
288          * use the default initial delay value.
289          */
290         int64_t startTimeOffsetUs = startTimeOffsetMs * 1000LL;
291         if (startTimeOffsetUs < 0) {  // Start time offset was not set
292             startTimeOffsetUs = kInitialDelayTimeUs;
293         }
294         mStartTimeUs += startTimeOffsetUs;
295         ALOGI("Start time offset: %" PRId64 " us", startTimeOffsetUs);
296     }
297 }
298 
start()299 status_t WebmFrameMediaSourceThread::start() {
300     sp<MetaData> meta = new MetaData;
301     meta->setInt64(kKeyTime, mStartTimeUs);
302     status_t err = mSource->start(meta.get());
303     if (err != OK) {
304         mDone = true;
305         mReachedEOS = true;
306         return err;
307     } else {
308         mStarted = true;
309         return WebmFrameThread::start();
310     }
311 }
312 
resume()313 status_t WebmFrameMediaSourceThread::resume() {
314     if (!mDone && mPaused) {
315         mPaused = false;
316         mResumed = true;
317     }
318     return OK;
319 }
320 
pause()321 status_t WebmFrameMediaSourceThread::pause() {
322     if (mStarted) {
323         mPaused = true;
324     }
325     return OK;
326 }
327 
stop()328 status_t WebmFrameMediaSourceThread::stop() {
329     if (mStarted) {
330         mStarted = false;
331         mDone = true;
332         mSource->stop();
333         return WebmFrameThread::stop();
334     }
335     return OK;
336 }
337 
run()338 void WebmFrameMediaSourceThread::run() {
339     int32_t count = 0;
340     int64_t timestampUs = 0xdeadbeef;
341     int64_t lastTimestampUs = 0; // Previous sample time stamp
342     int64_t lastDurationUs = 0; // Previous sample duration
343     int64_t previousPausedDurationUs = 0;
344 
345     const uint64_t kUninitialized = 0xffffffffffffffffL;
346     mStartTimeUs = kUninitialized;
347 
348     status_t err = OK;
349     MediaBufferBase *buffer;
350     while (!mDone && (err = mSource->read(&buffer, NULL)) == OK) {
351         if (buffer->range_length() == 0) {
352             buffer->release();
353             buffer = NULL;
354             continue;
355         }
356 
357         MetaDataBase &md = buffer->meta_data();
358         CHECK(md.findInt64(kKeyTime, &timestampUs));
359         if (mStartTimeUs == kUninitialized) {
360             mStartTimeUs = timestampUs;
361         }
362 
363         if (mPaused && !mResumed) {
364             lastDurationUs = timestampUs - lastTimestampUs;
365             lastTimestampUs = timestampUs;
366             buffer->release();
367             buffer = NULL;
368             continue;
369         }
370         ++count;
371 
372         // adjust time-stamps after pause/resume
373         if (mResumed) {
374             int64_t durExcludingEarlierPausesUs = timestampUs - previousPausedDurationUs;
375             CHECK_GE(durExcludingEarlierPausesUs, 0LL);
376             int64_t pausedDurationUs = durExcludingEarlierPausesUs - mTrackDurationUs;
377             CHECK_GE(pausedDurationUs, lastDurationUs);
378             previousPausedDurationUs += pausedDurationUs - lastDurationUs;
379             mResumed = false;
380         }
381         timestampUs -= previousPausedDurationUs;
382         CHECK_GE(timestampUs, 0LL);
383 
384         int32_t isSync = false;
385         md.findInt32(kKeyIsSyncFrame, &isSync);
386         const sp<WebmFrame> f = new WebmFrame(
387             mType,
388             isSync,
389             timestampUs * 1000 / mTimeCodeScale,
390             buffer);
391         mSink.push(f);
392 
393         ALOGV(
394             "%s %s frame at %" PRId64 " size %zu\n",
395             mType == kVideoType ? "video" : "audio",
396             isSync ? "I" : "P",
397             timestampUs * 1000 / mTimeCodeScale,
398             buffer->range_length());
399 
400         buffer->release();
401         buffer = NULL;
402 
403         if (timestampUs > mTrackDurationUs) {
404             mTrackDurationUs = timestampUs;
405         }
406         lastDurationUs = timestampUs - lastTimestampUs;
407         lastTimestampUs = timestampUs;
408     }
409 
410     mTrackDurationUs += lastDurationUs;
411     mSink.push(WebmFrame::EOS);
412 }
413 }
414