1 /*
2  * Copyright (C) 2013 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 #ifndef GRAPHIC_BUFFER_SOURCE_H_
18 
19 #define GRAPHIC_BUFFER_SOURCE_H_
20 
21 #include <binder/Status.h>
22 #include <utils/RefBase.h>
23 
24 #include <media/hardware/VideoAPI.h>
25 #include <media/stagefright/foundation/ABase.h>
26 #include <media/stagefright/foundation/AHandlerReflector.h>
27 #include <media/stagefright/foundation/ALooper.h>
28 #include <media/stagefright/bqhelper/ComponentWrapper.h>
29 #include <android/hardware/graphics/bufferqueue/1.0/IGraphicBufferProducer.h>
30 #include <android/hardware/graphics/bufferqueue/2.0/IGraphicBufferProducer.h>
31 
32 namespace android {
33 
34 using ::android::binder::Status;
35 
36 struct FrameDropper;
37 class BufferItem;
38 class IGraphicBufferProducer;
39 class IGraphicBufferConsumer;
40 /*
41  * This class is used to feed codecs from a Surface via BufferQueue or
42  * HW producer.
43  *
44  * Instances of the class don't run on a dedicated thread.  Instead,
45  * various events trigger data movement:
46  *
47  *  - Availability of a new frame of data from the BufferQueue (notified
48  *    via the onFrameAvailable callback).
49  *  - The return of a codec buffer.
50  *  - Application signaling end-of-stream.
51  *  - Transition to or from "executing" state.
52  *
53  * Frames of data (and, perhaps, the end-of-stream indication) can arrive
54  * before the codec is in the "executing" state, so we need to queue
55  * things up until we're ready to go.
56  *
57  * The GraphicBufferSource can be configure dynamically to discard frames
58  * from the source:
59  *
60  * - if their timestamp is less than a start time
61  * - if the source is suspended or stopped and the suspend/stop-time is reached
62  * - if EOS was signaled
63  * - if there is no encoder connected to it
64  *
65  * The source, furthermore, may choose to not encode (drop) frames if:
66  *
67  * - to throttle the frame rate (keep it under a certain limit)
68  *
69  * Finally the source may optionally hold onto the last non-discarded frame
70  * (even if it was dropped) to reencode it after an interval if no further
71  * frames are sent by the producer.
72  */
73 class GraphicBufferSource : public RefBase {
74 public:
75     GraphicBufferSource();
76 
77     virtual ~GraphicBufferSource();
78 
79     // We can't throw an exception if the constructor fails, so we just set
80     // this and require that the caller test the value.
initCheck()81     status_t initCheck() const {
82         return mInitCheck;
83     }
84 
85     // Returns the handle to the producer side of the BufferQueue.  Buffers
86     // queued on this will be received by GraphicBufferSource.
87     sp<IGraphicBufferProducer> getIGraphicBufferProducer() const;
88 
89     // Returns the handle to the bufferqueue HAL (V1_0) producer side of the BufferQueue.
90     // Buffers queued on this will be received by GraphicBufferSource.
91     sp<::android::hardware::graphics::bufferqueue::V1_0::IGraphicBufferProducer>
92         getHGraphicBufferProducer_V1_0() const;
93 
94     // Returns the handle to the bufferqueue HAL producer side of the BufferQueue.
95     // Buffers queued on this will be received by GraphicBufferSource.
96     sp<::android::hardware::graphics::bufferqueue::V2_0::IGraphicBufferProducer>
97         getHGraphicBufferProducer() const;
98 
99     // This is called when component transitions to running state, which means
100     // we can start handing it buffers.  If we already have buffers of data
101     // sitting in the BufferQueue, this will send them to the codec.
102     Status start();
103 
104     // This is called when component transitions to stopped, indicating that
105     // the codec is meant to return all buffers back to the client for them
106     // to be freed. Do NOT submit any more buffers to the component.
107     Status stop();
108 
109     // This is called when component transitions to released, indicating that
110     // we are shutting down.
111     Status release();
112 
113     // A "codec buffer", i.e. a buffer that can be used to pass data into
114     // the encoder, has been allocated.  (This call does not call back into
115     // component.)
116     Status onInputBufferAdded(int32_t bufferId);
117 
118     // Called when encoder is no longer using the buffer.  If we have a BQ
119     // buffer available, fill it with a new frame of data; otherwise, just mark
120     // it as available.
121     Status onInputBufferEmptied(int32_t bufferId, int fenceFd);
122 
123     // IGraphicBufferSource interface
124     // ------------------------------
125 
126     // Configure the buffer source to be used with a component with the default
127     // data space.
128     status_t configure(
129         const sp<ComponentWrapper> &component,
130         int32_t dataSpace,
131         int32_t bufferCount,
132         uint32_t frameWidth,
133         uint32_t frameHeight,
134         uint32_t consumerUsage);
135 
136     // This is called after the last input frame has been submitted or buffer
137     // timestamp is greater or equal than stopTimeUs. We need to submit an empty
138     // buffer with the EOS flag set.  If we don't have a codec buffer ready,
139     // we just set the mEndOfStream flag.
140     status_t signalEndOfInputStream();
141 
142     // If suspend is true, all incoming buffers (including those currently
143     // in the BufferQueue) with timestamp larger than timeUs will be discarded
144     // until the suspension is lifted. If suspend is false, all incoming buffers
145     // including those currently in the BufferQueue) with timestamp larger than
146     // timeUs will be processed. timeUs uses SYSTEM_TIME_MONOTONIC time base.
147     status_t setSuspend(bool suspend, int64_t timeUs);
148 
149     // Specifies the interval after which we requeue the buffer previously
150     // queued to the encoder. This is useful in the case of surface flinger
151     // providing the input surface if the resulting encoded stream is to
152     // be displayed "live". If we were not to push through the extra frame
153     // the decoder on the remote end would be unable to decode the latest frame.
154     // This API must be called before transitioning the encoder to "executing"
155     // state and once this behaviour is specified it cannot be reset.
156     status_t setRepeatPreviousFrameDelayUs(int64_t repeatAfterUs);
157 
158     // Sets the input buffer timestamp offset.
159     // When set, the sample's timestamp will be adjusted with the timeOffsetUs.
160     status_t setTimeOffsetUs(int64_t timeOffsetUs);
161 
162     /*
163      * Set the maximum frame rate on the source.
164      *
165      * When maxFps is a positive number, it indicates the maximum rate at which
166      * the buffers from this source will be sent to the encoder. Excessive
167      * frames will be dropped to meet the frame rate requirement.
168      *
169      * When maxFps is a negative number, any frame drop logic will be disabled
170      * and all frames from this source will be sent to the encoder, even when
171      * the timestamp goes backwards. Note that some components may still drop
172      * out-of-order frames silently, so this usually has to be used in
173      * conjunction with OMXNodeInstance::setMaxPtsGapUs() workaround.
174      *
175      * When maxFps is 0, this call will fail with BAD_VALUE.
176      */
177     status_t setMaxFps(float maxFps);
178 
179     // Sets the time lapse (or slow motion) parameters.
180     // When set, the sample's timestamp will be modified to playback framerate,
181     // and capture timestamp will be modified to capture rate.
182     status_t setTimeLapseConfig(double fps, double captureFps);
183 
184     // Sets the start time us (in system time), samples before which should
185     // be dropped and not submitted to encoder
186     status_t setStartTimeUs(int64_t startTimeUs);
187 
188     // Sets the stop time us (in system time), samples after which should be dropped
189     // and not submitted to encoder. timeUs uses SYSTEM_TIME_MONOTONIC time base.
190     status_t setStopTimeUs(int64_t stopTimeUs);
191 
192     // Gets the stop time offset in us. This is the time offset between latest buffer
193     // time and the stopTimeUs. If stop time is not set, INVALID_OPERATION will be returned.
194     // If return is OK, *stopTimeOffsetUs will contain the valid offset. Otherwise,
195     // *stopTimeOffsetUs will not be modified. Positive stopTimeOffsetUs means buffer time
196     // larger than stopTimeUs.
197     status_t getStopTimeOffsetUs(int64_t *stopTimeOffsetUs);
198 
199     // Sets the desired color aspects, e.g. to be used when producer does not specify a dataspace.
200     status_t setColorAspects(int32_t aspectsPacked);
201 
202 protected:
203 
204     // BufferQueue::ConsumerListener interface, called when a new frame of
205     // data is available.  If we're executing and a codec buffer is
206     // available, we acquire the buffer, copy the GraphicBuffer reference
207     // into the codec buffer, and call Empty[This]Buffer.  If we're not yet
208     // executing or there's no codec buffer available, we just increment
209     // mNumFramesAvailable and return.
210     void onFrameAvailable(const BufferItem& item) ;
211 
212     // BufferQueue::ConsumerListener interface, called when the client has
213     // released one or more GraphicBuffers.  We clear out the appropriate
214     // set of mBufferSlot entries.
215     void onBuffersReleased() ;
216 
217     // BufferQueue::ConsumerListener interface, called when the client has
218     // changed the sideband stream. GraphicBufferSource doesn't handle sideband
219     // streams so this is a no-op (and should never be called).
220     void onSidebandStreamChanged() ;
221 
222 private:
223     // BQ::ConsumerListener interface
224     // ------------------------------
225     struct ConsumerProxy;
226     sp<ConsumerProxy> mConsumerProxy;
227 
228     // Lock, covers all member variables.
229     mutable Mutex mMutex;
230 
231     // Used to report constructor failure.
232     status_t mInitCheck;
233 
234     // Graphic buffer reference objects
235     // --------------------------------
236 
237     // These are used to keep a shared reference to GraphicBuffers and gralloc handles owned by the
238     // GraphicBufferSource as well as to manage the cache slots. Separate references are owned by
239     // the buffer cache (controlled by the buffer queue/buffer producer) and the codec.
240 
241     // When we get a buffer from the producer (BQ) it designates them to be cached into specific
242     // slots. Each slot owns a shared reference to the graphic buffer (we track these using
243     // CachedBuffer) that is in that slot, but the producer controls the slots.
244     struct CachedBuffer;
245 
246     // When we acquire a buffer, we must release it back to the producer once we (or the codec)
247     // no longer uses it (as long as the buffer is still in the cache slot). We use shared
248     // AcquiredBuffer instances for this purpose - and we call release buffer when the last
249     // reference is relinquished.
250     struct AcquiredBuffer;
251 
252     // We also need to keep some extra metadata (other than the buffer reference) for acquired
253     // buffers. These are tracked in VideoBuffer struct.
254     struct VideoBuffer {
255         std::shared_ptr<AcquiredBuffer> mBuffer;
256         nsecs_t mTimestampNs;
257         android_dataspace_t mDataspace;
258     };
259 
260     // Cached and aquired buffers
261     // --------------------------------
262 
263     typedef int slot_id;
264 
265     // Maps a slot to the cached buffer in that slot
266     KeyedVector<slot_id, std::shared_ptr<CachedBuffer>> mBufferSlots;
267 
268     // Queue of buffers acquired in chronological order that are not yet submitted to the codec
269     List<VideoBuffer> mAvailableBuffers;
270 
271     // Number of buffers that have been signaled by the producer that they are available, but
272     // we've been unable to acquire them due to our max acquire count
273     int32_t mNumAvailableUnacquiredBuffers;
274 
275     // Number of frames acquired from consumer (debug only)
276     // (as in aquireBuffer called, and release needs to be called)
277     int32_t mNumOutstandingAcquires;
278 
279     // Acquire a buffer from the BQ and store it in |item| if successful
280     // \return OK on success, or error on failure.
281     status_t acquireBuffer_l(VideoBuffer *item);
282 
283     // Called when a buffer was acquired from the producer
284     void onBufferAcquired_l(const VideoBuffer &buffer);
285 
286     // marks the buffer at the slot no longer cached, and accounts for the outstanding
287     // acquire count. Returns true if the slot was populated; otherwise, false.
288     bool discardBufferInSlot_l(slot_id i);
289 
290     // marks the buffer at the slot index no longer cached, and accounts for the outstanding
291     // acquire count
292     void discardBufferAtSlotIndex_l(ssize_t bsi);
293 
294     // release all acquired and unacquired available buffers
295     // This method will return if it fails to acquire an unacquired available buffer, which will
296     // leave mNumAvailableUnacquiredBuffers positive on return.
297     void releaseAllAvailableBuffers_l();
298 
299     // returns whether we have any available buffers (acquired or not-yet-acquired)
haveAvailableBuffers_l()300     bool haveAvailableBuffers_l() const {
301         return !mAvailableBuffers.empty() || mNumAvailableUnacquiredBuffers > 0;
302     }
303 
304     // Codec buffers
305     // -------------
306 
307     // When we queue buffers to the encoder, we must hold the references to the graphic buffers
308     // in those buffers - as the producer may free the slots.
309 
310     typedef int32_t codec_buffer_id;
311 
312     // set of codec buffer ID-s of buffers available to fill
313     List<codec_buffer_id> mFreeCodecBuffers;
314 
315     // maps codec buffer ID-s to buffer info submitted to the codec. Used to keep a reference for
316     // the graphics buffer.
317     KeyedVector<codec_buffer_id, std::shared_ptr<AcquiredBuffer>> mSubmittedCodecBuffers;
318 
319     // Processes the next acquired frame. If there is no available codec buffer, it returns false
320     // without any further action.
321     //
322     // Otherwise, it consumes the next acquired frame and determines if it needs to be discarded or
323     // dropped. If neither are needed, it submits it to the codec. It also saves the latest
324     // non-dropped frame and submits it for repeat encoding (if this is enabled).
325     //
326     // \require there must be an acquired frame (i.e. we're in the onFrameAvailable callback,
327     // or if we're in codecBufferEmptied and mNumFramesAvailable is nonzero).
328     // \require codec must be executing
329     // \returns true if acquired (and handled) the next frame. Otherwise, false.
330     bool fillCodecBuffer_l();
331 
332     // Calculates the media timestamp for |item| and on success it submits the buffer to the codec,
333     // while also keeping a reference for it in mSubmittedCodecBuffers.
334     // Returns UNKNOWN_ERROR if the buffer was not submitted due to buffer timestamp. Otherwise,
335     // it returns any submit success or error value returned by the codec.
336     status_t submitBuffer_l(const VideoBuffer &item);
337 
338     // Submits an empty buffer, with the EOS flag set if there is an available codec buffer and
339     // sets mEndOfStreamSent flag. Does nothing if there is no codec buffer available.
340     void submitEndOfInputStream_l();
341 
342     // Set to true if we want to send end-of-stream after we run out of available frames from the
343     // producer
344     bool mEndOfStream;
345 
346     // Flag that the EOS was submitted to the encoder
347     bool mEndOfStreamSent;
348 
349     // Dataspace for the last frame submitted to the codec
350     android_dataspace mLastDataspace;
351 
352     // Default color aspects for this source
353     int32_t mDefaultColorAspectsPacked;
354 
355     // called when the data space of the input buffer changes
356     void onDataspaceChanged_l(android_dataspace dataspace, android_pixel_format pixelFormat);
357 
358     // Pointer back to the component that created us.  We send buffers here.
359     sp<ComponentWrapper> mComponent;
360 
361     // Set by start() / stop().
362     bool mExecuting;
363 
364     bool mSuspended;
365 
366     // returns true if this source is unconditionally discarding acquired buffers at the moment
367     // regardless of the metadata of those buffers
368     bool areWeDiscardingAvailableBuffers_l();
369 
370     int64_t mLastFrameTimestampUs;
371 
372     // Our BufferQueue interfaces. mProducer is passed to the producer through
373     // getIGraphicBufferProducer, and mConsumer is used internally to retrieve
374     // the buffers queued by the producer.
375     sp<IGraphicBufferProducer> mProducer;
376     sp<IGraphicBufferConsumer> mConsumer;
377 
378     // The time to stop sending buffers.
379     int64_t mStopTimeUs;
380 
381     struct ActionItem {
382         typedef enum {
383             PAUSE,
384             RESUME,
385             STOP
386         } ActionType;
387         ActionType mAction;
388         int64_t mActionTimeUs;
389     };
390 
391     // Maintain last action timestamp to ensure all the action timestamps are
392     // monotonically increasing.
393     int64_t mLastActionTimeUs;
394 
395     // An action queue that queue up all the actions sent to GraphicBufferSource.
396     // STOP action should only show up at the end of the list as all the actions
397     // after a STOP action will be discarded. mActionQueue is protected by mMutex.
398     List<ActionItem> mActionQueue;
399 
400     ////
401     friend struct AHandlerReflector<GraphicBufferSource>;
402 
403     enum {
404         kWhatRepeatLastFrame,   ///< queue last frame for reencoding
405     };
406     enum {
407         kRepeatLastFrameCount = 10,
408     };
409 
410     int64_t mSkipFramesBeforeNs;
411 
412     sp<FrameDropper> mFrameDropper;
413 
414     sp<ALooper> mLooper;
415     sp<AHandlerReflector<GraphicBufferSource> > mReflector;
416 
417     // Repeat last frame feature
418     // -------------------------
419     // configuration parameter: repeat interval for frame repeating (<0 if repeating is disabled)
420     int64_t mFrameRepeatIntervalUs;
421 
422     // current frame repeat generation - used to cancel a pending frame repeat
423     int32_t mRepeatLastFrameGeneration;
424 
425     // number of times to repeat latest frame (0 = none)
426     int32_t mOutstandingFrameRepeatCount;
427 
428     // The previous buffer should've been repeated but
429     // no codec buffer was available at the time.
430     bool mFrameRepeatBlockedOnCodecBuffer;
431 
432     // hold a reference to the last acquired (and not discarded) frame for frame repeating
433     VideoBuffer mLatestBuffer;
434 
435     // queue last frame for reencode after the repeat interval.
436     void queueFrameRepeat_l();
437 
438     // save |item| as the latest buffer and queue it for reencode (repeat)
439     void setLatestBuffer_l(const VideoBuffer &item);
440 
441     // submit last frame to encoder and queue it for reencode
442     // \return true if buffer was submitted, false if it wasn't (e.g. source is suspended, there
443     // is no available codec buffer)
444     bool repeatLatestBuffer_l();
445 
446     // Time lapse / slow motion configuration
447     // --------------------------------------
448 
449     // desired frame rate for encoding - value <= 0 if undefined
450     double mFps;
451 
452     // desired frame rate for capture - value <= 0 if undefined
453     double mCaptureFps;
454 
455     // Time lapse mode is enabled if the capture frame rate is defined and it is
456     // smaller than half the encoding frame rate (if defined). In this mode,
457     // frames that come in between the capture interval (the reciprocal of the
458     // capture frame rate) are dropped and the encoding timestamp is adjusted to
459     // match the desired encoding frame rate.
460     //
461     // Slow motion mode is enabled if both encoding and capture frame rates are
462     // defined and the encoding frame rate is less than half the capture frame
463     // rate. In this mode, the source is expected to produce frames with an even
464     // timestamp interval (after rounding) with the configured capture fps. The
465     // first source timestamp is used as the source base time. Afterwards, the
466     // timestamp of each source frame is snapped to the nearest expected capture
467     // timestamp and scaled to match the configured encoding frame rate.
468 
469     // These modes must be enabled before using this source.
470 
471     // adjusted capture timestamp of the base frame
472     int64_t mBaseCaptureUs;
473 
474     // adjusted encoding timestamp of the base frame
475     int64_t mBaseFrameUs;
476 
477     // number of frames from the base time
478     int64_t mFrameCount;
479 
480     // adjusted capture timestamp for previous frame (negative if there were
481     // none)
482     int64_t mPrevCaptureUs;
483 
484     // adjusted media timestamp for previous frame (negative if there were none)
485     int64_t mPrevFrameUs;
486 
487     // desired offset between media time and capture time
488     int64_t mInputBufferTimeOffsetUs;
489 
490     // Calculates and outputs the timestamp to use for a buffer with a specific buffer timestamp
491     // |bufferTimestampNs|. Returns false on failure (buffer too close or timestamp is moving
492     // backwards). Otherwise, stores the media timestamp in |*codecTimeUs| and returns true.
493     //
494     // This method takes into account the start time offset and any time lapse or slow motion time
495     // adjustment requests.
496     bool calculateCodecTimestamp_l(nsecs_t bufferTimeNs, int64_t *codecTimeUs);
497 
498     void onMessageReceived(const sp<AMessage> &msg);
499 
500     DISALLOW_EVIL_CONSTRUCTORS(GraphicBufferSource);
501 };
502 
503 }  // namespace android
504 
505 #endif  // GRAPHIC_BUFFER_SOURCE_H_
506