1 /*
2  * Copyright 2013 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 #include <assert.h>
18 #include <ctype.h>
19 #include <fcntl.h>
20 #include <inttypes.h>
21 #include <getopt.h>
22 #include <signal.h>
23 #include <stdio.h>
24 #include <stdlib.h>
25 #include <string.h>
26 #include <sys/stat.h>
27 #include <sys/types.h>
28 #include <sys/wait.h>
29 
30 #include <termios.h>
31 #include <unistd.h>
32 
33 #define LOG_TAG "ScreenRecord"
34 #define ATRACE_TAG ATRACE_TAG_GRAPHICS
35 //#define LOG_NDEBUG 0
36 #include <utils/Log.h>
37 
38 #include <binder/IPCThreadState.h>
39 #include <utils/Errors.h>
40 #include <utils/Timers.h>
41 #include <utils/Trace.h>
42 
43 #include <gui/Surface.h>
44 #include <gui/SurfaceComposerClient.h>
45 #include <gui/ISurfaceComposer.h>
46 #include <ui/DisplayInfo.h>
47 #include <media/openmax/OMX_IVCommon.h>
48 #include <media/stagefright/foundation/ABuffer.h>
49 #include <media/stagefright/foundation/AMessage.h>
50 #include <media/stagefright/MediaCodec.h>
51 #include <media/stagefright/MediaCodecConstants.h>
52 #include <media/stagefright/MediaErrors.h>
53 #include <media/stagefright/MediaMuxer.h>
54 #include <media/stagefright/PersistentSurface.h>
55 #include <mediadrm/ICrypto.h>
56 #include <media/MediaCodecBuffer.h>
57 
58 #include "screenrecord.h"
59 #include "Overlay.h"
60 #include "FrameOutput.h"
61 
62 using android::ABuffer;
63 using android::ALooper;
64 using android::AMessage;
65 using android::AString;
66 using android::DisplayInfo;
67 using android::FrameOutput;
68 using android::IBinder;
69 using android::IGraphicBufferProducer;
70 using android::ISurfaceComposer;
71 using android::MediaCodec;
72 using android::MediaCodecBuffer;
73 using android::MediaMuxer;
74 using android::Overlay;
75 using android::PersistentSurface;
76 using android::ProcessState;
77 using android::Rect;
78 using android::String8;
79 using android::SurfaceComposerClient;
80 using android::Vector;
81 using android::sp;
82 using android::status_t;
83 
84 using android::DISPLAY_ORIENTATION_0;
85 using android::DISPLAY_ORIENTATION_180;
86 using android::DISPLAY_ORIENTATION_90;
87 using android::INVALID_OPERATION;
88 using android::NAME_NOT_FOUND;
89 using android::NO_ERROR;
90 using android::UNKNOWN_ERROR;
91 
92 static const uint32_t kMinBitRate = 100000;         // 0.1Mbps
93 static const uint32_t kMaxBitRate = 200 * 1000000;  // 200Mbps
94 static const uint32_t kMaxTimeLimitSec = 180;       // 3 minutes
95 static const uint32_t kFallbackWidth = 1280;        // 720p
96 static const uint32_t kFallbackHeight = 720;
97 static const char* kMimeTypeAvc = "video/avc";
98 
99 // Command-line parameters.
100 static bool gVerbose = false;           // chatty on stdout
101 static bool gRotate = false;            // rotate 90 degrees
102 static bool gMonotonicTime = false;     // use system monotonic time for timestamps
103 static bool gPersistentSurface = false; // use persistent surface
104 static enum {
105     FORMAT_MP4, FORMAT_H264, FORMAT_WEBM, FORMAT_3GPP, FORMAT_FRAMES, FORMAT_RAW_FRAMES
106 } gOutputFormat = FORMAT_MP4;           // data format for output
107 static AString gCodecName = "";         // codec name override
108 static bool gSizeSpecified = false;     // was size explicitly requested?
109 static bool gWantInfoScreen = false;    // do we want initial info screen?
110 static bool gWantFrameTime = false;     // do we want times on each frame?
111 static uint32_t gVideoWidth = 0;        // default width+height
112 static uint32_t gVideoHeight = 0;
113 static uint32_t gBitRate = 20000000;     // 20Mbps
114 static uint32_t gTimeLimitSec = kMaxTimeLimitSec;
115 static uint32_t gBframes = 0;
116 
117 // Set by signal handler to stop recording.
118 static volatile bool gStopRequested = false;
119 
120 // Previous signal handler state, restored after first hit.
121 static struct sigaction gOrigSigactionINT;
122 static struct sigaction gOrigSigactionHUP;
123 
124 
125 /*
126  * Catch keyboard interrupt signals.  On receipt, the "stop requested"
127  * flag is raised, and the original handler is restored (so that, if
128  * we get stuck finishing, a second Ctrl-C will kill the process).
129  */
signalCatcher(int signum)130 static void signalCatcher(int signum)
131 {
132     gStopRequested = true;
133     switch (signum) {
134     case SIGINT:
135     case SIGHUP:
136         sigaction(SIGINT, &gOrigSigactionINT, NULL);
137         sigaction(SIGHUP, &gOrigSigactionHUP, NULL);
138         break;
139     default:
140         abort();
141         break;
142     }
143 }
144 
145 /*
146  * Configures signal handlers.  The previous handlers are saved.
147  *
148  * If the command is run from an interactive adb shell, we get SIGINT
149  * when Ctrl-C is hit.  If we're run from the host, the local adb process
150  * gets the signal, and we get a SIGHUP when the terminal disconnects.
151  */
configureSignals()152 static status_t configureSignals() {
153     struct sigaction act;
154     memset(&act, 0, sizeof(act));
155     act.sa_handler = signalCatcher;
156     if (sigaction(SIGINT, &act, &gOrigSigactionINT) != 0) {
157         status_t err = -errno;
158         fprintf(stderr, "Unable to configure SIGINT handler: %s\n",
159                 strerror(errno));
160         return err;
161     }
162     if (sigaction(SIGHUP, &act, &gOrigSigactionHUP) != 0) {
163         status_t err = -errno;
164         fprintf(stderr, "Unable to configure SIGHUP handler: %s\n",
165                 strerror(errno));
166         return err;
167     }
168     signal(SIGPIPE, SIG_IGN);
169     return NO_ERROR;
170 }
171 
172 /*
173  * Configures and starts the MediaCodec encoder.  Obtains an input surface
174  * from the codec.
175  */
prepareEncoder(float displayFps,sp<MediaCodec> * pCodec,sp<IGraphicBufferProducer> * pBufferProducer)176 static status_t prepareEncoder(float displayFps, sp<MediaCodec>* pCodec,
177         sp<IGraphicBufferProducer>* pBufferProducer) {
178     status_t err;
179 
180     if (gVerbose) {
181         printf("Configuring recorder for %dx%d %s at %.2fMbps\n",
182                 gVideoWidth, gVideoHeight, kMimeTypeAvc, gBitRate / 1000000.0);
183         fflush(stdout);
184     }
185 
186     sp<AMessage> format = new AMessage;
187     format->setInt32(KEY_WIDTH, gVideoWidth);
188     format->setInt32(KEY_HEIGHT, gVideoHeight);
189     format->setString(KEY_MIME, kMimeTypeAvc);
190     format->setInt32(KEY_COLOR_FORMAT, OMX_COLOR_FormatAndroidOpaque);
191     format->setInt32(KEY_BIT_RATE, gBitRate);
192     format->setFloat(KEY_FRAME_RATE, displayFps);
193     format->setInt32(KEY_I_FRAME_INTERVAL, 10);
194     format->setInt32(KEY_MAX_B_FRAMES, gBframes);
195     if (gBframes > 0) {
196         format->setInt32(KEY_PROFILE, AVCProfileMain);
197         format->setInt32(KEY_LEVEL, AVCLevel41);
198     }
199 
200     sp<android::ALooper> looper = new android::ALooper;
201     looper->setName("screenrecord_looper");
202     looper->start();
203     ALOGV("Creating codec");
204     sp<MediaCodec> codec;
205     if (gCodecName.empty()) {
206         codec = MediaCodec::CreateByType(looper, kMimeTypeAvc, true);
207         if (codec == NULL) {
208             fprintf(stderr, "ERROR: unable to create %s codec instance\n",
209                     kMimeTypeAvc);
210             return UNKNOWN_ERROR;
211         }
212     } else {
213         codec = MediaCodec::CreateByComponentName(looper, gCodecName);
214         if (codec == NULL) {
215             fprintf(stderr, "ERROR: unable to create %s codec instance\n",
216                     gCodecName.c_str());
217             return UNKNOWN_ERROR;
218         }
219     }
220 
221     err = codec->configure(format, NULL, NULL,
222             MediaCodec::CONFIGURE_FLAG_ENCODE);
223     if (err != NO_ERROR) {
224         fprintf(stderr, "ERROR: unable to configure %s codec at %dx%d (err=%d)\n",
225                 kMimeTypeAvc, gVideoWidth, gVideoHeight, err);
226         codec->release();
227         return err;
228     }
229 
230     ALOGV("Creating encoder input surface");
231     sp<IGraphicBufferProducer> bufferProducer;
232     if (gPersistentSurface) {
233         sp<PersistentSurface> surface = MediaCodec::CreatePersistentInputSurface();
234         bufferProducer = surface->getBufferProducer();
235         err = codec->setInputSurface(surface);
236     } else {
237         err = codec->createInputSurface(&bufferProducer);
238     }
239     if (err != NO_ERROR) {
240         fprintf(stderr,
241             "ERROR: unable to %s encoder input surface (err=%d)\n",
242             gPersistentSurface ? "set" : "create",
243             err);
244         codec->release();
245         return err;
246     }
247 
248     ALOGV("Starting codec");
249     err = codec->start();
250     if (err != NO_ERROR) {
251         fprintf(stderr, "ERROR: unable to start codec (err=%d)\n", err);
252         codec->release();
253         return err;
254     }
255 
256     ALOGV("Codec prepared");
257     *pCodec = codec;
258     *pBufferProducer = bufferProducer;
259     return 0;
260 }
261 
262 /*
263  * Sets the display projection, based on the display dimensions, video size,
264  * and device orientation.
265  */
setDisplayProjection(SurfaceComposerClient::Transaction & t,const sp<IBinder> & dpy,const DisplayInfo & mainDpyInfo)266 static status_t setDisplayProjection(
267         SurfaceComposerClient::Transaction& t,
268         const sp<IBinder>& dpy,
269         const DisplayInfo& mainDpyInfo) {
270 
271     // Set the region of the layer stack we're interested in, which in our
272     // case is "all of it".
273     Rect layerStackRect(mainDpyInfo.viewportW, mainDpyInfo.viewportH);
274 
275     // We need to preserve the aspect ratio of the display.
276     float displayAspect = (float) mainDpyInfo.viewportH / (float) mainDpyInfo.viewportW;
277 
278 
279     // Set the way we map the output onto the display surface (which will
280     // be e.g. 1280x720 for a 720p video).  The rect is interpreted
281     // post-rotation, so if the display is rotated 90 degrees we need to
282     // "pre-rotate" it by flipping width/height, so that the orientation
283     // adjustment changes it back.
284     //
285     // We might want to encode a portrait display as landscape to use more
286     // of the screen real estate.  (If players respect a 90-degree rotation
287     // hint, we can essentially get a 720x1280 video instead of 1280x720.)
288     // In that case, we swap the configured video width/height and then
289     // supply a rotation value to the display projection.
290     uint32_t videoWidth, videoHeight;
291     uint32_t outWidth, outHeight;
292     if (!gRotate) {
293         videoWidth = gVideoWidth;
294         videoHeight = gVideoHeight;
295     } else {
296         videoWidth = gVideoHeight;
297         videoHeight = gVideoWidth;
298     }
299     if (videoHeight > (uint32_t)(videoWidth * displayAspect)) {
300         // limited by narrow width; reduce height
301         outWidth = videoWidth;
302         outHeight = (uint32_t)(videoWidth * displayAspect);
303     } else {
304         // limited by short height; restrict width
305         outHeight = videoHeight;
306         outWidth = (uint32_t)(videoHeight / displayAspect);
307     }
308     uint32_t offX, offY;
309     offX = (videoWidth - outWidth) / 2;
310     offY = (videoHeight - outHeight) / 2;
311     Rect displayRect(offX, offY, offX + outWidth, offY + outHeight);
312 
313     if (gVerbose) {
314         if (gRotate) {
315             printf("Rotated content area is %ux%u at offset x=%d y=%d\n",
316                     outHeight, outWidth, offY, offX);
317             fflush(stdout);
318         } else {
319             printf("Content area is %ux%u at offset x=%d y=%d\n",
320                     outWidth, outHeight, offX, offY);
321             fflush(stdout);
322         }
323     }
324 
325     t.setDisplayProjection(dpy,
326             gRotate ? DISPLAY_ORIENTATION_90 : DISPLAY_ORIENTATION_0,
327             layerStackRect, displayRect);
328     return NO_ERROR;
329 }
330 
331 /*
332  * Configures the virtual display.  When this completes, virtual display
333  * frames will start arriving from the buffer producer.
334  */
prepareVirtualDisplay(const DisplayInfo & mainDpyInfo,const sp<IGraphicBufferProducer> & bufferProducer,sp<IBinder> * pDisplayHandle)335 static status_t prepareVirtualDisplay(const DisplayInfo& mainDpyInfo,
336         const sp<IGraphicBufferProducer>& bufferProducer,
337         sp<IBinder>* pDisplayHandle) {
338     sp<IBinder> dpy = SurfaceComposerClient::createDisplay(
339             String8("ScreenRecorder"), false /*secure*/);
340 
341     SurfaceComposerClient::Transaction t;
342     t.setDisplaySurface(dpy, bufferProducer);
343     setDisplayProjection(t, dpy, mainDpyInfo);
344     t.setDisplayLayerStack(dpy, 0);    // default stack
345     t.apply();
346 
347     *pDisplayHandle = dpy;
348 
349     return NO_ERROR;
350 }
351 
352 /*
353  * Runs the MediaCodec encoder, sending the output to the MediaMuxer.  The
354  * input frames are coming from the virtual display as fast as SurfaceFlinger
355  * wants to send them.
356  *
357  * Exactly one of muxer or rawFp must be non-null.
358  *
359  * The muxer must *not* have been started before calling.
360  */
runEncoder(const sp<MediaCodec> & encoder,const sp<MediaMuxer> & muxer,FILE * rawFp,const sp<IBinder> & mainDpy,const sp<IBinder> & virtualDpy,uint8_t orientation)361 static status_t runEncoder(const sp<MediaCodec>& encoder,
362         const sp<MediaMuxer>& muxer, FILE* rawFp, const sp<IBinder>& mainDpy,
363         const sp<IBinder>& virtualDpy, uint8_t orientation) {
364     static int kTimeout = 250000;   // be responsive on signal
365     status_t err;
366     ssize_t trackIdx = -1;
367     uint32_t debugNumFrames = 0;
368     int64_t startWhenNsec = systemTime(CLOCK_MONOTONIC);
369     int64_t endWhenNsec = startWhenNsec + seconds_to_nanoseconds(gTimeLimitSec);
370     DisplayInfo mainDpyInfo;
371     bool firstFrame = true;
372 
373     assert((rawFp == NULL && muxer != NULL) || (rawFp != NULL && muxer == NULL));
374 
375     Vector<sp<MediaCodecBuffer> > buffers;
376     err = encoder->getOutputBuffers(&buffers);
377     if (err != NO_ERROR) {
378         fprintf(stderr, "Unable to get output buffers (err=%d)\n", err);
379         return err;
380     }
381 
382     // Run until we're signaled.
383     while (!gStopRequested) {
384         size_t bufIndex, offset, size;
385         int64_t ptsUsec;
386         uint32_t flags;
387 
388         if (firstFrame) {
389             ATRACE_NAME("first_frame");
390             firstFrame = false;
391         }
392 
393         if (systemTime(CLOCK_MONOTONIC) > endWhenNsec) {
394             if (gVerbose) {
395                 printf("Time limit reached\n");
396                 fflush(stdout);
397             }
398             break;
399         }
400 
401         ALOGV("Calling dequeueOutputBuffer");
402         err = encoder->dequeueOutputBuffer(&bufIndex, &offset, &size, &ptsUsec,
403                 &flags, kTimeout);
404         ALOGV("dequeueOutputBuffer returned %d", err);
405         switch (err) {
406         case NO_ERROR:
407             // got a buffer
408             if ((flags & MediaCodec::BUFFER_FLAG_CODECCONFIG) != 0) {
409                 ALOGV("Got codec config buffer (%zu bytes)", size);
410                 if (muxer != NULL) {
411                     // ignore this -- we passed the CSD into MediaMuxer when
412                     // we got the format change notification
413                     size = 0;
414                 }
415             }
416             if (size != 0) {
417                 ALOGV("Got data in buffer %zu, size=%zu, pts=%" PRId64,
418                         bufIndex, size, ptsUsec);
419 
420                 { // scope
421                     ATRACE_NAME("orientation");
422                     // Check orientation, update if it has changed.
423                     //
424                     // Polling for changes is inefficient and wrong, but the
425                     // useful stuff is hard to get at without a Dalvik VM.
426                     err = SurfaceComposerClient::getDisplayInfo(mainDpy,
427                             &mainDpyInfo);
428                     if (err != NO_ERROR) {
429                         ALOGW("getDisplayInfo(main) failed: %d", err);
430                     } else if (orientation != mainDpyInfo.orientation) {
431                         ALOGD("orientation changed, now %d", mainDpyInfo.orientation);
432                         SurfaceComposerClient::Transaction t;
433                         setDisplayProjection(t, virtualDpy, mainDpyInfo);
434                         t.apply();
435                         orientation = mainDpyInfo.orientation;
436                     }
437                 }
438 
439                 // If the virtual display isn't providing us with timestamps,
440                 // use the current time.  This isn't great -- we could get
441                 // decoded data in clusters -- but we're not expecting
442                 // to hit this anyway.
443                 if (ptsUsec == 0) {
444                     ptsUsec = systemTime(SYSTEM_TIME_MONOTONIC) / 1000;
445                 }
446 
447                 if (muxer == NULL) {
448                     fwrite(buffers[bufIndex]->data(), 1, size, rawFp);
449                     // Flush the data immediately in case we're streaming.
450                     // We don't want to do this if all we've written is
451                     // the SPS/PPS data because mplayer gets confused.
452                     if ((flags & MediaCodec::BUFFER_FLAG_CODECCONFIG) == 0) {
453                         fflush(rawFp);
454                     }
455                 } else {
456                     // The MediaMuxer docs are unclear, but it appears that we
457                     // need to pass either the full set of BufferInfo flags, or
458                     // (flags & BUFFER_FLAG_SYNCFRAME).
459                     //
460                     // If this blocks for too long we could drop frames.  We may
461                     // want to queue these up and do them on a different thread.
462                     ATRACE_NAME("write sample");
463                     assert(trackIdx != -1);
464                     // TODO
465                     sp<ABuffer> buffer = new ABuffer(
466                             buffers[bufIndex]->data(), buffers[bufIndex]->size());
467                     err = muxer->writeSampleData(buffer, trackIdx,
468                             ptsUsec, flags);
469                     if (err != NO_ERROR) {
470                         fprintf(stderr,
471                             "Failed writing data to muxer (err=%d)\n", err);
472                         return err;
473                     }
474                 }
475                 debugNumFrames++;
476             }
477             err = encoder->releaseOutputBuffer(bufIndex);
478             if (err != NO_ERROR) {
479                 fprintf(stderr, "Unable to release output buffer (err=%d)\n",
480                         err);
481                 return err;
482             }
483             if ((flags & MediaCodec::BUFFER_FLAG_EOS) != 0) {
484                 // Not expecting EOS from SurfaceFlinger.  Go with it.
485                 ALOGI("Received end-of-stream");
486                 gStopRequested = true;
487             }
488             break;
489         case -EAGAIN:                       // INFO_TRY_AGAIN_LATER
490             ALOGV("Got -EAGAIN, looping");
491             break;
492         case android::INFO_FORMAT_CHANGED:    // INFO_OUTPUT_FORMAT_CHANGED
493             {
494                 // Format includes CSD, which we must provide to muxer.
495                 ALOGV("Encoder format changed");
496                 sp<AMessage> newFormat;
497                 encoder->getOutputFormat(&newFormat);
498                 if (muxer != NULL) {
499                     trackIdx = muxer->addTrack(newFormat);
500                     ALOGV("Starting muxer");
501                     err = muxer->start();
502                     if (err != NO_ERROR) {
503                         fprintf(stderr, "Unable to start muxer (err=%d)\n", err);
504                         return err;
505                     }
506                 }
507             }
508             break;
509         case android::INFO_OUTPUT_BUFFERS_CHANGED:   // INFO_OUTPUT_BUFFERS_CHANGED
510             // Not expected for an encoder; handle it anyway.
511             ALOGV("Encoder buffers changed");
512             err = encoder->getOutputBuffers(&buffers);
513             if (err != NO_ERROR) {
514                 fprintf(stderr,
515                         "Unable to get new output buffers (err=%d)\n", err);
516                 return err;
517             }
518             break;
519         case INVALID_OPERATION:
520             ALOGW("dequeueOutputBuffer returned INVALID_OPERATION");
521             return err;
522         default:
523             fprintf(stderr,
524                     "Got weird result %d from dequeueOutputBuffer\n", err);
525             return err;
526         }
527     }
528 
529     ALOGV("Encoder stopping (req=%d)", gStopRequested);
530     if (gVerbose) {
531         printf("Encoder stopping; recorded %u frames in %" PRId64 " seconds\n",
532                 debugNumFrames, nanoseconds_to_seconds(
533                         systemTime(CLOCK_MONOTONIC) - startWhenNsec));
534         fflush(stdout);
535     }
536     return NO_ERROR;
537 }
538 
539 /*
540  * Raw H.264 byte stream output requested.  Send the output to stdout
541  * if desired.  If the output is a tty, reconfigure it to avoid the
542  * CRLF line termination that we see with "adb shell" commands.
543  */
prepareRawOutput(const char * fileName)544 static FILE* prepareRawOutput(const char* fileName) {
545     FILE* rawFp = NULL;
546 
547     if (strcmp(fileName, "-") == 0) {
548         if (gVerbose) {
549             fprintf(stderr, "ERROR: verbose output and '-' not compatible");
550             return NULL;
551         }
552         rawFp = stdout;
553     } else {
554         rawFp = fopen(fileName, "w");
555         if (rawFp == NULL) {
556             fprintf(stderr, "fopen raw failed: %s\n", strerror(errno));
557             return NULL;
558         }
559     }
560 
561     int fd = fileno(rawFp);
562     if (isatty(fd)) {
563         // best effort -- reconfigure tty for "raw"
564         ALOGD("raw video output to tty (fd=%d)", fd);
565         struct termios term;
566         if (tcgetattr(fd, &term) == 0) {
567             cfmakeraw(&term);
568             if (tcsetattr(fd, TCSANOW, &term) == 0) {
569                 ALOGD("tty successfully configured for raw");
570             }
571         }
572     }
573 
574     return rawFp;
575 }
576 
floorToEven(uint32_t num)577 static inline uint32_t floorToEven(uint32_t num) {
578     return num & ~1;
579 }
580 
581 /*
582  * Main "do work" start point.
583  *
584  * Configures codec, muxer, and virtual display, then starts moving bits
585  * around.
586  */
recordScreen(const char * fileName)587 static status_t recordScreen(const char* fileName) {
588     status_t err;
589 
590     // Configure signal handler.
591     err = configureSignals();
592     if (err != NO_ERROR) return err;
593 
594     // Start Binder thread pool.  MediaCodec needs to be able to receive
595     // messages from mediaserver.
596     sp<ProcessState> self = ProcessState::self();
597     self->startThreadPool();
598 
599     // Get main display parameters.
600     const sp<IBinder> mainDpy = SurfaceComposerClient::getInternalDisplayToken();
601     if (mainDpy == nullptr) {
602         fprintf(stderr, "ERROR: no display\n");
603         return NAME_NOT_FOUND;
604     }
605 
606     DisplayInfo mainDpyInfo;
607     err = SurfaceComposerClient::getDisplayInfo(mainDpy, &mainDpyInfo);
608     if (err != NO_ERROR) {
609         fprintf(stderr, "ERROR: unable to get display characteristics\n");
610         return err;
611     }
612 
613     if (gVerbose) {
614         printf("Main display is %dx%d @%.2ffps (orientation=%u)\n",
615                 mainDpyInfo.viewportW, mainDpyInfo.viewportH, mainDpyInfo.fps,
616                 mainDpyInfo.orientation);
617         fflush(stdout);
618     }
619 
620     // Encoder can't take odd number as config
621     if (gVideoWidth == 0) {
622         gVideoWidth = floorToEven(mainDpyInfo.viewportW);
623     }
624     if (gVideoHeight == 0) {
625         gVideoHeight = floorToEven(mainDpyInfo.viewportH);
626     }
627 
628     // Configure and start the encoder.
629     sp<MediaCodec> encoder;
630     sp<FrameOutput> frameOutput;
631     sp<IGraphicBufferProducer> encoderInputSurface;
632     if (gOutputFormat != FORMAT_FRAMES && gOutputFormat != FORMAT_RAW_FRAMES) {
633         err = prepareEncoder(mainDpyInfo.fps, &encoder, &encoderInputSurface);
634 
635         if (err != NO_ERROR && !gSizeSpecified) {
636             // fallback is defined for landscape; swap if we're in portrait
637             bool needSwap = gVideoWidth < gVideoHeight;
638             uint32_t newWidth = needSwap ? kFallbackHeight : kFallbackWidth;
639             uint32_t newHeight = needSwap ? kFallbackWidth : kFallbackHeight;
640             if (gVideoWidth != newWidth && gVideoHeight != newHeight) {
641                 ALOGV("Retrying with 720p");
642                 fprintf(stderr, "WARNING: failed at %dx%d, retrying at %dx%d\n",
643                         gVideoWidth, gVideoHeight, newWidth, newHeight);
644                 gVideoWidth = newWidth;
645                 gVideoHeight = newHeight;
646                 err = prepareEncoder(mainDpyInfo.fps, &encoder,
647                         &encoderInputSurface);
648             }
649         }
650         if (err != NO_ERROR) return err;
651 
652         // From here on, we must explicitly release() the encoder before it goes
653         // out of scope, or we will get an assertion failure from stagefright
654         // later on in a different thread.
655     } else {
656         // We're not using an encoder at all.  The "encoder input surface" we hand to
657         // SurfaceFlinger will just feed directly to us.
658         frameOutput = new FrameOutput();
659         err = frameOutput->createInputSurface(gVideoWidth, gVideoHeight, &encoderInputSurface);
660         if (err != NO_ERROR) {
661             return err;
662         }
663     }
664 
665     // Draw the "info" page by rendering a frame with GLES and sending
666     // it directly to the encoder.
667     // TODO: consider displaying this as a regular layer to avoid b/11697754
668     if (gWantInfoScreen) {
669         Overlay::drawInfoPage(encoderInputSurface);
670     }
671 
672     // Configure optional overlay.
673     sp<IGraphicBufferProducer> bufferProducer;
674     sp<Overlay> overlay;
675     if (gWantFrameTime) {
676         // Send virtual display frames to an external texture.
677         overlay = new Overlay(gMonotonicTime);
678         err = overlay->start(encoderInputSurface, &bufferProducer);
679         if (err != NO_ERROR) {
680             if (encoder != NULL) encoder->release();
681             return err;
682         }
683         if (gVerbose) {
684             printf("Bugreport overlay created\n");
685             fflush(stdout);
686         }
687     } else {
688         // Use the encoder's input surface as the virtual display surface.
689         bufferProducer = encoderInputSurface;
690     }
691 
692     // Configure virtual display.
693     sp<IBinder> dpy;
694     err = prepareVirtualDisplay(mainDpyInfo, bufferProducer, &dpy);
695     if (err != NO_ERROR) {
696         if (encoder != NULL) encoder->release();
697         return err;
698     }
699 
700     sp<MediaMuxer> muxer = NULL;
701     FILE* rawFp = NULL;
702     switch (gOutputFormat) {
703         case FORMAT_MP4:
704         case FORMAT_WEBM:
705         case FORMAT_3GPP: {
706             // Configure muxer.  We have to wait for the CSD blob from the encoder
707             // before we can start it.
708             err = unlink(fileName);
709             if (err != 0 && errno != ENOENT) {
710                 fprintf(stderr, "ERROR: couldn't remove existing file\n");
711                 abort();
712             }
713             int fd = open(fileName, O_CREAT | O_LARGEFILE | O_TRUNC | O_RDWR, S_IRUSR | S_IWUSR);
714             if (fd < 0) {
715                 fprintf(stderr, "ERROR: couldn't open file\n");
716                 abort();
717             }
718             if (gOutputFormat == FORMAT_MP4) {
719                 muxer = new MediaMuxer(fd, MediaMuxer::OUTPUT_FORMAT_MPEG_4);
720             } else if (gOutputFormat == FORMAT_WEBM) {
721                 muxer = new MediaMuxer(fd, MediaMuxer::OUTPUT_FORMAT_WEBM);
722             } else {
723                 muxer = new MediaMuxer(fd, MediaMuxer::OUTPUT_FORMAT_THREE_GPP);
724             }
725             close(fd);
726             if (gRotate) {
727                 muxer->setOrientationHint(90);  // TODO: does this do anything?
728             }
729             break;
730         }
731         case FORMAT_H264:
732         case FORMAT_FRAMES:
733         case FORMAT_RAW_FRAMES: {
734             rawFp = prepareRawOutput(fileName);
735             if (rawFp == NULL) {
736                 if (encoder != NULL) encoder->release();
737                 return -1;
738             }
739             break;
740         }
741         default:
742             fprintf(stderr, "ERROR: unknown format %d\n", gOutputFormat);
743             abort();
744     }
745 
746     if (gOutputFormat == FORMAT_FRAMES || gOutputFormat == FORMAT_RAW_FRAMES) {
747         // TODO: if we want to make this a proper feature, we should output
748         //       an outer header with version info.  Right now we never change
749         //       the frame size or format, so we could conceivably just send
750         //       the current frame header once and then follow it with an
751         //       unbroken stream of data.
752 
753         // Make the EGL context current again.  This gets unhooked if we're
754         // using "--bugreport" mode.
755         // TODO: figure out if we can eliminate this
756         frameOutput->prepareToCopy();
757 
758         while (!gStopRequested) {
759             // Poll for frames, the same way we do for MediaCodec.  We do
760             // all of the work on the main thread.
761             //
762             // Ideally we'd sleep indefinitely and wake when the
763             // stop was requested, but this will do for now.  (It almost
764             // works because wait() wakes when a signal hits, but we
765             // need to handle the edge cases.)
766             bool rawFrames = gOutputFormat == FORMAT_RAW_FRAMES;
767             err = frameOutput->copyFrame(rawFp, 250000, rawFrames);
768             if (err == ETIMEDOUT) {
769                 err = NO_ERROR;
770             } else if (err != NO_ERROR) {
771                 ALOGE("Got error %d from copyFrame()", err);
772                 break;
773             }
774         }
775     } else {
776         // Main encoder loop.
777         err = runEncoder(encoder, muxer, rawFp, mainDpy, dpy,
778                 mainDpyInfo.orientation);
779         if (err != NO_ERROR) {
780             fprintf(stderr, "Encoder failed (err=%d)\n", err);
781             // fall through to cleanup
782         }
783 
784         if (gVerbose) {
785             printf("Stopping encoder and muxer\n");
786             fflush(stdout);
787         }
788     }
789 
790     // Shut everything down, starting with the producer side.
791     encoderInputSurface = NULL;
792     SurfaceComposerClient::destroyDisplay(dpy);
793     if (overlay != NULL) overlay->stop();
794     if (encoder != NULL) encoder->stop();
795     if (muxer != NULL) {
796         // If we don't stop muxer explicitly, i.e. let the destructor run,
797         // it may hang (b/11050628).
798         err = muxer->stop();
799     } else if (rawFp != stdout) {
800         fclose(rawFp);
801     }
802     if (encoder != NULL) encoder->release();
803 
804     return err;
805 }
806 
807 /*
808  * Sends a broadcast to the media scanner to tell it about the new video.
809  *
810  * This is optional, but nice to have.
811  */
notifyMediaScanner(const char * fileName)812 static status_t notifyMediaScanner(const char* fileName) {
813     // need to do allocations before the fork()
814     String8 fileUrl("file://");
815     fileUrl.append(fileName);
816 
817     const char* kCommand = "/system/bin/am";
818     const char* const argv[] = {
819             kCommand,
820             "broadcast",
821             "-a",
822             "android.intent.action.MEDIA_SCANNER_SCAN_FILE",
823             "-d",
824             fileUrl.string(),
825             NULL
826     };
827     if (gVerbose) {
828         printf("Executing:");
829         for (int i = 0; argv[i] != NULL; i++) {
830             printf(" %s", argv[i]);
831         }
832         putchar('\n');
833         fflush(stdout);
834     }
835 
836     pid_t pid = fork();
837     if (pid < 0) {
838         int err = errno;
839         ALOGW("fork() failed: %s", strerror(err));
840         return -err;
841     } else if (pid > 0) {
842         // parent; wait for the child, mostly to make the verbose-mode output
843         // look right, but also to check for and log failures
844         int status;
845         pid_t actualPid = TEMP_FAILURE_RETRY(waitpid(pid, &status, 0));
846         if (actualPid != pid) {
847             ALOGW("waitpid(%d) returned %d (errno=%d)", pid, actualPid, errno);
848         } else if (status != 0) {
849             ALOGW("'am broadcast' exited with status=%d", status);
850         } else {
851             ALOGV("'am broadcast' exited successfully");
852         }
853     } else {
854         if (!gVerbose) {
855             // non-verbose, suppress 'am' output
856             ALOGV("closing stdout/stderr in child");
857             int fd = open("/dev/null", O_WRONLY);
858             if (fd >= 0) {
859                 dup2(fd, STDOUT_FILENO);
860                 dup2(fd, STDERR_FILENO);
861                 close(fd);
862             }
863         }
864         execv(kCommand, const_cast<char* const*>(argv));
865         ALOGE("execv(%s) failed: %s\n", kCommand, strerror(errno));
866         exit(1);
867     }
868     return NO_ERROR;
869 }
870 
871 /*
872  * Parses a string of the form "1280x720".
873  *
874  * Returns true on success.
875  */
parseWidthHeight(const char * widthHeight,uint32_t * pWidth,uint32_t * pHeight)876 static bool parseWidthHeight(const char* widthHeight, uint32_t* pWidth,
877         uint32_t* pHeight) {
878     long width, height;
879     char* end;
880 
881     // Must specify base 10, or "0x0" gets parsed differently.
882     width = strtol(widthHeight, &end, 10);
883     if (end == widthHeight || *end != 'x' || *(end+1) == '\0') {
884         // invalid chars in width, or missing 'x', or missing height
885         return false;
886     }
887     height = strtol(end + 1, &end, 10);
888     if (*end != '\0') {
889         // invalid chars in height
890         return false;
891     }
892 
893     *pWidth = width;
894     *pHeight = height;
895     return true;
896 }
897 
898 /*
899  * Accepts a string with a bare number ("4000000") or with a single-character
900  * unit ("4m").
901  *
902  * Returns an error if parsing fails.
903  */
parseValueWithUnit(const char * str,uint32_t * pValue)904 static status_t parseValueWithUnit(const char* str, uint32_t* pValue) {
905     long value;
906     char* endptr;
907 
908     value = strtol(str, &endptr, 10);
909     if (*endptr == '\0') {
910         // bare number
911         *pValue = value;
912         return NO_ERROR;
913     } else if (toupper(*endptr) == 'M' && *(endptr+1) == '\0') {
914         *pValue = value * 1000000;  // check for overflow?
915         return NO_ERROR;
916     } else {
917         fprintf(stderr, "Unrecognized value: %s\n", str);
918         return UNKNOWN_ERROR;
919     }
920 }
921 
922 /*
923  * Dumps usage on stderr.
924  */
usage()925 static void usage() {
926     fprintf(stderr,
927         "Usage: screenrecord [options] <filename>\n"
928         "\n"
929         "Android screenrecord v%d.%d.  Records the device's display to a .mp4 file.\n"
930         "\n"
931         "Options:\n"
932         "--size WIDTHxHEIGHT\n"
933         "    Set the video size, e.g. \"1280x720\".  Default is the device's main\n"
934         "    display resolution (if supported), 1280x720 if not.  For best results,\n"
935         "    use a size supported by the AVC encoder.\n"
936         "--bit-rate RATE\n"
937         "    Set the video bit rate, in bits per second.  Value may be specified as\n"
938         "    bits or megabits, e.g. '4000000' is equivalent to '4M'.  Default %dMbps.\n"
939         "--bugreport\n"
940         "    Add additional information, such as a timestamp overlay, that is helpful\n"
941         "    in videos captured to illustrate bugs.\n"
942         "--time-limit TIME\n"
943         "    Set the maximum recording time, in seconds.  Default / maximum is %d.\n"
944         "--verbose\n"
945         "    Display interesting information on stdout.\n"
946         "--help\n"
947         "    Show this message.\n"
948         "\n"
949         "Recording continues until Ctrl-C is hit or the time limit is reached.\n"
950         "\n",
951         kVersionMajor, kVersionMinor, gBitRate / 1000000, gTimeLimitSec
952         );
953 }
954 
955 /*
956  * Parses args and kicks things off.
957  */
main(int argc,char * const argv[])958 int main(int argc, char* const argv[]) {
959     static const struct option longOptions[] = {
960         { "help",               no_argument,        NULL, 'h' },
961         { "verbose",            no_argument,        NULL, 'v' },
962         { "size",               required_argument,  NULL, 's' },
963         { "bit-rate",           required_argument,  NULL, 'b' },
964         { "time-limit",         required_argument,  NULL, 't' },
965         { "bugreport",          no_argument,        NULL, 'u' },
966         // "unofficial" options
967         { "show-device-info",   no_argument,        NULL, 'i' },
968         { "show-frame-time",    no_argument,        NULL, 'f' },
969         { "rotate",             no_argument,        NULL, 'r' },
970         { "output-format",      required_argument,  NULL, 'o' },
971         { "codec-name",         required_argument,  NULL, 'N' },
972         { "monotonic-time",     no_argument,        NULL, 'm' },
973         { "persistent-surface", no_argument,        NULL, 'p' },
974         { "bframes",            required_argument,  NULL, 'B' },
975         { NULL,                 0,                  NULL, 0 }
976     };
977 
978     while (true) {
979         int optionIndex = 0;
980         int ic = getopt_long(argc, argv, "", longOptions, &optionIndex);
981         if (ic == -1) {
982             break;
983         }
984 
985         switch (ic) {
986         case 'h':
987             usage();
988             return 0;
989         case 'v':
990             gVerbose = true;
991             break;
992         case 's':
993             if (!parseWidthHeight(optarg, &gVideoWidth, &gVideoHeight)) {
994                 fprintf(stderr, "Invalid size '%s', must be width x height\n",
995                         optarg);
996                 return 2;
997             }
998             if (gVideoWidth == 0 || gVideoHeight == 0) {
999                 fprintf(stderr,
1000                     "Invalid size %ux%u, width and height may not be zero\n",
1001                     gVideoWidth, gVideoHeight);
1002                 return 2;
1003             }
1004             gSizeSpecified = true;
1005             break;
1006         case 'b':
1007             if (parseValueWithUnit(optarg, &gBitRate) != NO_ERROR) {
1008                 return 2;
1009             }
1010             if (gBitRate < kMinBitRate || gBitRate > kMaxBitRate) {
1011                 fprintf(stderr,
1012                         "Bit rate %dbps outside acceptable range [%d,%d]\n",
1013                         gBitRate, kMinBitRate, kMaxBitRate);
1014                 return 2;
1015             }
1016             break;
1017         case 't':
1018             gTimeLimitSec = atoi(optarg);
1019             if (gTimeLimitSec == 0 || gTimeLimitSec > kMaxTimeLimitSec) {
1020                 fprintf(stderr,
1021                         "Time limit %ds outside acceptable range [1,%d]\n",
1022                         gTimeLimitSec, kMaxTimeLimitSec);
1023                 return 2;
1024             }
1025             break;
1026         case 'u':
1027             gWantInfoScreen = true;
1028             gWantFrameTime = true;
1029             break;
1030         case 'i':
1031             gWantInfoScreen = true;
1032             break;
1033         case 'f':
1034             gWantFrameTime = true;
1035             break;
1036         case 'r':
1037             // experimental feature
1038             gRotate = true;
1039             break;
1040         case 'o':
1041             if (strcmp(optarg, "mp4") == 0) {
1042                 gOutputFormat = FORMAT_MP4;
1043             } else if (strcmp(optarg, "h264") == 0) {
1044                 gOutputFormat = FORMAT_H264;
1045             } else if (strcmp(optarg, "webm") == 0) {
1046                 gOutputFormat = FORMAT_WEBM;
1047             } else if (strcmp(optarg, "3gpp") == 0) {
1048                 gOutputFormat = FORMAT_3GPP;
1049             } else if (strcmp(optarg, "frames") == 0) {
1050                 gOutputFormat = FORMAT_FRAMES;
1051             } else if (strcmp(optarg, "raw-frames") == 0) {
1052                 gOutputFormat = FORMAT_RAW_FRAMES;
1053             } else {
1054                 fprintf(stderr, "Unknown format '%s'\n", optarg);
1055                 return 2;
1056             }
1057             break;
1058         case 'N':
1059             gCodecName = optarg;
1060             break;
1061         case 'm':
1062             gMonotonicTime = true;
1063             break;
1064         case 'p':
1065             gPersistentSurface = true;
1066             break;
1067         case 'B':
1068             if (parseValueWithUnit(optarg, &gBframes) != NO_ERROR) {
1069                 return 2;
1070             }
1071             break;
1072         default:
1073             if (ic != '?') {
1074                 fprintf(stderr, "getopt_long returned unexpected value 0x%x\n", ic);
1075             }
1076             return 2;
1077         }
1078     }
1079 
1080     if (optind != argc - 1) {
1081         fprintf(stderr, "Must specify output file (see --help).\n");
1082         return 2;
1083     }
1084 
1085     const char* fileName = argv[optind];
1086     if (gOutputFormat == FORMAT_MP4) {
1087         // MediaMuxer tries to create the file in the constructor, but we don't
1088         // learn about the failure until muxer.start(), which returns a generic
1089         // error code without logging anything.  We attempt to create the file
1090         // now for better diagnostics.
1091         int fd = open(fileName, O_CREAT | O_RDWR, 0644);
1092         if (fd < 0) {
1093             fprintf(stderr, "Unable to open '%s': %s\n", fileName, strerror(errno));
1094             return 1;
1095         }
1096         close(fd);
1097     }
1098 
1099     status_t err = recordScreen(fileName);
1100     if (err == NO_ERROR) {
1101         // Try to notify the media scanner.  Not fatal if this fails.
1102         notifyMediaScanner(fileName);
1103     }
1104     ALOGD(err == NO_ERROR ? "success" : "failed");
1105     return (int) err;
1106 }
1107