1 /*
2  * Copyright (C) 2009 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 #include <inttypes.h>
18 #include <fcntl.h>
19 #include <stdlib.h>
20 #include <string.h>
21 #include <sys/time.h>
22 #include <sys/types.h>
23 #include <sys/stat.h>
24 
25 //#define LOG_NDEBUG 0
26 #define LOG_TAG "stagefright"
27 #include <media/stagefright/foundation/ADebug.h>
28 
29 #include "jpeg.h"
30 #include "SineSource.h"
31 
32 #include <binder/IServiceManager.h>
33 #include <binder/ProcessState.h>
34 #include <datasource/DataSourceFactory.h>
35 #include <media/DataSource.h>
36 #include <media/MediaSource.h>
37 #include <media/IMediaHTTPService.h>
38 #include <media/IMediaPlayerService.h>
39 #include <media/stagefright/foundation/ABuffer.h>
40 #include <media/stagefright/foundation/ALooper.h>
41 #include <media/stagefright/foundation/AMessage.h>
42 #include <media/stagefright/foundation/AUtils.h>
43 #include <media/stagefright/JPEGSource.h>
44 #include <media/stagefright/InterfaceUtils.h>
45 #include <media/stagefright/MediaCodec.h>
46 #include <media/stagefright/MediaCodecConstants.h>
47 #include <media/stagefright/MediaCodecList.h>
48 #include <media/stagefright/MediaDefs.h>
49 #include <media/stagefright/MediaErrors.h>
50 #include <media/stagefright/MediaExtractor.h>
51 #include <media/stagefright/MediaExtractorFactory.h>
52 #include <media/stagefright/MetaData.h>
53 #include <media/stagefright/SimpleDecodingSource.h>
54 #include <media/stagefright/Utils.h>
55 #include <media/mediametadataretriever.h>
56 
57 #include <media/stagefright/foundation/hexdump.h>
58 #include <media/stagefright/MPEG2TSWriter.h>
59 #include <media/stagefright/MPEG4Writer.h>
60 
61 #include <private/media/VideoFrame.h>
62 
63 #include <gui/GLConsumer.h>
64 #include <gui/Surface.h>
65 #include <gui/SurfaceComposerClient.h>
66 
67 #include <android/hardware/media/omx/1.0/IOmx.h>
68 
69 #include "AudioPlayer.h"
70 
71 using namespace android;
72 
73 static long gNumRepetitions;
74 static long gMaxNumFrames;  // 0 means decode all available.
75 static long gReproduceBug;  // if not -1.
76 static bool gPreferSoftwareCodec;
77 static bool gForceToUseHardwareCodec;
78 static bool gPlaybackAudio;
79 static bool gWriteMP4;
80 static bool gDisplayHistogram;
81 static bool gVerbose = false;
82 static bool showProgress = true;
83 static String8 gWriteMP4Filename;
84 static String8 gComponentNameOverride;
85 
86 static sp<ANativeWindow> gSurface;
87 
getNowUs()88 static int64_t getNowUs() {
89     struct timeval tv;
90     gettimeofday(&tv, NULL);
91 
92     return (int64_t)tv.tv_usec + tv.tv_sec * 1000000ll;
93 }
94 
CompareIncreasing(const int64_t * a,const int64_t * b)95 static int CompareIncreasing(const int64_t *a, const int64_t *b) {
96     return (*a) < (*b) ? -1 : (*a) > (*b) ? 1 : 0;
97 }
98 
displayDecodeHistogram(Vector<int64_t> * decodeTimesUs)99 static void displayDecodeHistogram(Vector<int64_t> *decodeTimesUs) {
100     printf("decode times:\n");
101 
102     decodeTimesUs->sort(CompareIncreasing);
103 
104     size_t n = decodeTimesUs->size();
105     int64_t minUs = decodeTimesUs->itemAt(0);
106     int64_t maxUs = decodeTimesUs->itemAt(n - 1);
107 
108     printf("min decode time %" PRId64 " us (%.2f secs)\n", minUs, minUs / 1E6);
109     printf("max decode time %" PRId64 " us (%.2f secs)\n", maxUs, maxUs / 1E6);
110 
111     size_t counts[100];
112     for (size_t i = 0; i < 100; ++i) {
113         counts[i] = 0;
114     }
115 
116     for (size_t i = 0; i < n; ++i) {
117         int64_t x = decodeTimesUs->itemAt(i);
118 
119         size_t slot = ((x - minUs) * 100) / (maxUs - minUs);
120         if (slot == 100) { slot = 99; }
121 
122         ++counts[slot];
123     }
124 
125     for (size_t i = 0; i < 100; ++i) {
126         int64_t slotUs = minUs + (i * (maxUs - minUs) / 100);
127 
128         double fps = 1E6 / slotUs;
129         printf("[%.2f fps]: %zu\n", fps, counts[i]);
130     }
131 }
132 
displayAVCProfileLevelIfPossible(const sp<MetaData> & meta)133 static void displayAVCProfileLevelIfPossible(const sp<MetaData>& meta) {
134     uint32_t type;
135     const void *data;
136     size_t size;
137     if (meta->findData(kKeyAVCC, &type, &data, &size)) {
138         const uint8_t *ptr = (const uint8_t *)data;
139         CHECK(size >= 7);
140         CHECK(ptr[0] == 1);  // configurationVersion == 1
141         uint8_t profile = ptr[1];
142         uint8_t level = ptr[3];
143         fprintf(stderr, "AVC video profile %d and level %d\n", profile, level);
144     }
145 }
146 
dumpSource(const sp<MediaSource> & source,const String8 & filename)147 static void dumpSource(const sp<MediaSource> &source, const String8 &filename) {
148     FILE *out = fopen(filename.string(), "wb");
149 
150     CHECK_EQ((status_t)OK, source->start());
151 
152     status_t err;
153     for (;;) {
154         MediaBufferBase *mbuf;
155         err = source->read(&mbuf);
156 
157         if (err == INFO_FORMAT_CHANGED) {
158             continue;
159         } else if (err != OK) {
160             break;
161         }
162 
163         if (gVerbose) {
164             MetaDataBase &meta = mbuf->meta_data();
165             fprintf(stdout, "sample format: %s\n", meta.toString().c_str());
166         }
167 
168         CHECK_EQ(
169                 fwrite((const uint8_t *)mbuf->data() + mbuf->range_offset(),
170                        1,
171                        mbuf->range_length(),
172                        out),
173                 mbuf->range_length());
174 
175         mbuf->release();
176         mbuf = NULL;
177     }
178 
179     CHECK_EQ((status_t)OK, source->stop());
180 
181     fclose(out);
182     out = NULL;
183 }
184 
playSource(sp<MediaSource> & source)185 static void playSource(sp<MediaSource> &source) {
186     sp<MetaData> meta = source->getFormat();
187 
188     const char *mime;
189     CHECK(meta->findCString(kKeyMIMEType, &mime));
190 
191     sp<MediaSource> rawSource;
192     if (!strcasecmp(MEDIA_MIMETYPE_AUDIO_RAW, mime)) {
193         rawSource = source;
194     } else {
195         int flags = 0;
196         if (gPreferSoftwareCodec) {
197             flags |= MediaCodecList::kPreferSoftwareCodecs;
198         }
199         if (gForceToUseHardwareCodec) {
200             CHECK(!gPreferSoftwareCodec);
201             flags |= MediaCodecList::kHardwareCodecsOnly;
202         }
203         rawSource = SimpleDecodingSource::Create(
204                 source, flags, gSurface,
205                 gComponentNameOverride.isEmpty() ? nullptr : gComponentNameOverride.c_str(),
206                 !gComponentNameOverride.isEmpty());
207         if (rawSource == NULL) {
208             return;
209         }
210         displayAVCProfileLevelIfPossible(meta);
211     }
212 
213     source.clear();
214 
215     status_t err = rawSource->start();
216 
217     if (err != OK) {
218         fprintf(stderr, "rawSource returned error %d (0x%08x)\n", err, err);
219         return;
220     }
221 
222     if (gPlaybackAudio) {
223         AudioPlayer *player = new AudioPlayer(NULL);
224         player->setSource(rawSource);
225         rawSource.clear();
226 
227         err = player->start(true /* sourceAlreadyStarted */);
228 
229         if (err == OK) {
230             status_t finalStatus;
231             while (!player->reachedEOS(&finalStatus)) {
232                 usleep(100000ll);
233             }
234         } else {
235             fprintf(stderr, "unable to start playback err=%d (0x%08x)\n", err, err);
236         }
237 
238         delete player;
239         player = NULL;
240 
241         return;
242     } else if (gReproduceBug >= 3 && gReproduceBug <= 5) {
243         int64_t durationUs;
244         CHECK(meta->findInt64(kKeyDuration, &durationUs));
245 
246         status_t err;
247         MediaBufferBase *buffer;
248         MediaSource::ReadOptions options;
249         int64_t seekTimeUs = -1;
250         for (;;) {
251             err = rawSource->read(&buffer, &options);
252             options.clearSeekTo();
253 
254             bool shouldSeek = false;
255             if (err == INFO_FORMAT_CHANGED) {
256                 CHECK(buffer == NULL);
257 
258                 printf("format changed.\n");
259                 continue;
260             } else if (err != OK) {
261                 printf("reached EOF.\n");
262 
263                 shouldSeek = true;
264             } else {
265                 int64_t timestampUs;
266                 CHECK(buffer->meta_data().findInt64(kKeyTime, &timestampUs));
267 
268                 bool failed = false;
269 
270                 if (seekTimeUs >= 0) {
271                     int64_t diff = timestampUs - seekTimeUs;
272 
273                     if (diff < 0) {
274                         diff = -diff;
275                     }
276 
277                     if ((gReproduceBug == 4 && diff > 500000)
278                         || (gReproduceBug == 5 && timestampUs < 0)) {
279                         printf("wanted: %.2f secs, got: %.2f secs\n",
280                                seekTimeUs / 1E6, timestampUs / 1E6);
281 
282                         printf("ERROR: ");
283                         failed = true;
284                     }
285                 }
286 
287                 printf("buffer has timestamp %" PRId64 " us (%.2f secs)\n",
288                        timestampUs, timestampUs / 1E6);
289 
290                 buffer->release();
291                 buffer = NULL;
292 
293                 if (failed) {
294                     break;
295                 }
296 
297                 shouldSeek = ((double)rand() / RAND_MAX) < 0.1;
298 
299                 if (gReproduceBug == 3) {
300                     shouldSeek = false;
301                 }
302             }
303 
304             seekTimeUs = -1;
305 
306             if (shouldSeek) {
307                 seekTimeUs = (rand() * (float)durationUs) / (float)RAND_MAX;
308                 options.setSeekTo(seekTimeUs);
309 
310                 printf("seeking to %" PRId64 " us (%.2f secs)\n",
311                        seekTimeUs, seekTimeUs / 1E6);
312             }
313         }
314 
315         rawSource->stop();
316 
317         return;
318     }
319 
320     int n = 0;
321     int64_t startTime = getNowUs();
322 
323     long numIterationsLeft = gNumRepetitions;
324     MediaSource::ReadOptions options;
325 
326     int64_t sumDecodeUs = 0;
327     int64_t totalBytes = 0;
328 
329     Vector<int64_t> decodeTimesUs;
330 
331     while (numIterationsLeft-- > 0) {
332         long numFrames = 0;
333 
334         MediaBufferBase *buffer;
335 
336         for (;;) {
337             int64_t startDecodeUs = getNowUs();
338             status_t err = rawSource->read(&buffer, &options);
339             int64_t delayDecodeUs = getNowUs() - startDecodeUs;
340 
341             options.clearSeekTo();
342 
343             if (err != OK) {
344                 CHECK(buffer == NULL);
345 
346                 if (err == INFO_FORMAT_CHANGED) {
347                     printf("format changed.\n");
348                     continue;
349                 }
350 
351                 break;
352             }
353 
354             if (buffer->range_length() > 0) {
355                 if (gDisplayHistogram && n > 0) {
356                     // Ignore the first time since it includes some setup
357                     // cost.
358                     decodeTimesUs.push(delayDecodeUs);
359                 }
360 
361                 if (gVerbose) {
362                     MetaDataBase &meta = buffer->meta_data();
363                     fprintf(stdout, "%ld sample format: %s\n", numFrames, meta.toString().c_str());
364                 } else if (showProgress && (n++ % 16) == 0) {
365                     printf(".");
366                     fflush(stdout);
367                 }
368             }
369 
370             sumDecodeUs += delayDecodeUs;
371             totalBytes += buffer->range_length();
372 
373             buffer->release();
374             buffer = NULL;
375 
376             ++numFrames;
377             if (gMaxNumFrames > 0 && numFrames == gMaxNumFrames) {
378                 break;
379             }
380 
381             if (gReproduceBug == 1 && numFrames == 40) {
382                 printf("seeking past the end now.");
383                 options.setSeekTo(0x7fffffffL);
384             } else if (gReproduceBug == 2 && numFrames == 40) {
385                 printf("seeking to 5 secs.");
386                 options.setSeekTo(5000000);
387             }
388         }
389 
390         if (showProgress) {
391             printf("$");
392             fflush(stdout);
393         }
394 
395         options.setSeekTo(0);
396     }
397 
398     rawSource->stop();
399     printf("\n");
400 
401     int64_t delay = getNowUs() - startTime;
402     if (!strncasecmp("video/", mime, 6)) {
403         printf("avg. %.2f fps\n", n * 1E6 / delay);
404 
405         printf("avg. time to decode one buffer %.2f usecs\n",
406                (double)sumDecodeUs / n);
407 
408         printf("decoded a total of %d frame(s).\n", n);
409 
410         if (gDisplayHistogram) {
411             displayDecodeHistogram(&decodeTimesUs);
412         }
413     } else if (!strncasecmp("audio/", mime, 6)) {
414         // Frame count makes less sense for audio, as the output buffer
415         // sizes may be different across decoders.
416         printf("avg. %.2f KB/sec\n", totalBytes / 1024 * 1E6 / delay);
417 
418         printf("decoded a total of %" PRId64 " bytes\n", totalBytes);
419     }
420 }
421 
422 ////////////////////////////////////////////////////////////////////////////////
423 
424 struct DetectSyncSource : public MediaSource {
425     explicit DetectSyncSource(const sp<MediaSource> &source);
426 
427     virtual status_t start(MetaData *params = NULL);
428     virtual status_t stop();
429     virtual sp<MetaData> getFormat();
430 
431     virtual status_t read(
432             MediaBufferBase **buffer, const ReadOptions *options);
433 
434 private:
435     enum StreamType {
436         AVC,
437         MPEG4,
438         H263,
439         OTHER,
440     };
441 
442     sp<MediaSource> mSource;
443     StreamType mStreamType;
444     bool mSawFirstIDRFrame;
445 
446     DISALLOW_EVIL_CONSTRUCTORS(DetectSyncSource);
447 };
448 
DetectSyncSource(const sp<MediaSource> & source)449 DetectSyncSource::DetectSyncSource(const sp<MediaSource> &source)
450     : mSource(source),
451       mStreamType(OTHER),
452       mSawFirstIDRFrame(false) {
453     const char *mime;
454     CHECK(mSource->getFormat()->findCString(kKeyMIMEType, &mime));
455 
456     if (!strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_AVC)) {
457         mStreamType = AVC;
458     } else if (!strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_MPEG4)) {
459         mStreamType = MPEG4;
460         CHECK(!"sync frame detection not implemented yet for MPEG4");
461     } else if (!strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_H263)) {
462         mStreamType = H263;
463         CHECK(!"sync frame detection not implemented yet for H.263");
464     }
465 }
466 
start(MetaData * params)467 status_t DetectSyncSource::start(MetaData *params) {
468     mSawFirstIDRFrame = false;
469 
470     return mSource->start(params);
471 }
472 
stop()473 status_t DetectSyncSource::stop() {
474     return mSource->stop();
475 }
476 
getFormat()477 sp<MetaData> DetectSyncSource::getFormat() {
478     return mSource->getFormat();
479 }
480 
isIDRFrame(MediaBufferBase * buffer)481 static bool isIDRFrame(MediaBufferBase *buffer) {
482     const uint8_t *data =
483         (const uint8_t *)buffer->data() + buffer->range_offset();
484     size_t size = buffer->range_length();
485     for (size_t i = 0; i + 3 < size; ++i) {
486         if (!memcmp("\x00\x00\x01", &data[i], 3)) {
487             uint8_t nalType = data[i + 3] & 0x1f;
488             if (nalType == 5) {
489                 return true;
490             }
491         }
492     }
493 
494     return false;
495 }
496 
read(MediaBufferBase ** buffer,const ReadOptions * options)497 status_t DetectSyncSource::read(
498         MediaBufferBase **buffer, const ReadOptions *options) {
499     for (;;) {
500         status_t err = mSource->read(buffer, options);
501 
502         if (err != OK) {
503             return err;
504         }
505 
506         if (mStreamType == AVC) {
507             bool isIDR = isIDRFrame(*buffer);
508             (*buffer)->meta_data().setInt32(kKeyIsSyncFrame, isIDR);
509             if (isIDR) {
510                 mSawFirstIDRFrame = true;
511             }
512         } else {
513             (*buffer)->meta_data().setInt32(kKeyIsSyncFrame, true);
514         }
515 
516         if (mStreamType != AVC || mSawFirstIDRFrame) {
517             break;
518         }
519 
520         // Ignore everything up to the first IDR frame.
521         (*buffer)->release();
522         *buffer = NULL;
523     }
524 
525     return OK;
526 }
527 
528 ////////////////////////////////////////////////////////////////////////////////
529 
writeSourcesToMP4(Vector<sp<MediaSource>> & sources,bool syncInfoPresent)530 static void writeSourcesToMP4(
531         Vector<sp<MediaSource> > &sources, bool syncInfoPresent) {
532 #if 0
533     sp<MPEG4Writer> writer =
534         new MPEG4Writer(gWriteMP4Filename.string());
535 #else
536     int fd = open(gWriteMP4Filename.string(), O_CREAT | O_LARGEFILE | O_TRUNC | O_RDWR, S_IRUSR | S_IWUSR);
537     if (fd < 0) {
538         fprintf(stderr, "couldn't open file");
539         return;
540     }
541     sp<MPEG2TSWriter> writer =
542         new MPEG2TSWriter(fd);
543 #endif
544 
545     // at most one minute.
546     writer->setMaxFileDuration(60000000ll);
547 
548     for (size_t i = 0; i < sources.size(); ++i) {
549         sp<MediaSource> source = sources.editItemAt(i);
550 
551         CHECK_EQ(writer->addSource(
552                     syncInfoPresent ? source : new DetectSyncSource(source)),
553                 (status_t)OK);
554     }
555 
556     sp<MetaData> params = new MetaData;
557     params->setInt32(kKeyRealTimeRecording, false);
558     CHECK_EQ(writer->start(params.get()), (status_t)OK);
559 
560     while (!writer->reachedEOS()) {
561         usleep(100000);
562     }
563     writer->stop();
564 }
565 
performSeekTest(const sp<MediaSource> & source)566 static void performSeekTest(const sp<MediaSource> &source) {
567     CHECK_EQ((status_t)OK, source->start());
568 
569     int64_t durationUs;
570     CHECK(source->getFormat()->findInt64(kKeyDuration, &durationUs));
571 
572     for (int64_t seekTimeUs = 0; seekTimeUs <= durationUs;
573             seekTimeUs += 60000ll) {
574         MediaSource::ReadOptions options;
575         options.setSeekTo(
576                 seekTimeUs, MediaSource::ReadOptions::SEEK_PREVIOUS_SYNC);
577 
578         MediaBufferBase *buffer;
579         status_t err;
580         for (;;) {
581             err = source->read(&buffer, &options);
582 
583             options.clearSeekTo();
584 
585             if (err == INFO_FORMAT_CHANGED) {
586                 CHECK(buffer == NULL);
587                 continue;
588             }
589 
590             if (err != OK) {
591                 CHECK(buffer == NULL);
592                 break;
593             }
594 
595             CHECK(buffer != NULL);
596 
597             if (buffer->range_length() > 0) {
598                 break;
599             }
600 
601             buffer->release();
602             buffer = NULL;
603         }
604 
605         if (err == OK) {
606             int64_t timeUs;
607             CHECK(buffer->meta_data().findInt64(kKeyTime, &timeUs));
608 
609             printf("%" PRId64 "\t%" PRId64 "\t%" PRId64 "\n",
610                    seekTimeUs, timeUs, seekTimeUs - timeUs);
611 
612             buffer->release();
613             buffer = NULL;
614         } else {
615             printf("ERROR\n");
616             break;
617         }
618     }
619 
620     CHECK_EQ((status_t)OK, source->stop());
621 }
622 
usage(const char * me)623 static void usage(const char *me) {
624     fprintf(stderr, "usage: %s [options] [input_filename]\n", me);
625     fprintf(stderr, "       -h(elp)\n");
626     fprintf(stderr, "       -a(udio)\n");
627     fprintf(stderr, "       -n repetitions\n");
628     fprintf(stderr, "       -l(ist) components\n");
629     fprintf(stderr, "       -m max-number-of-frames-to-decode in each pass\n");
630     fprintf(stderr, "       -b bug to reproduce\n");
631     fprintf(stderr, "       -i(nfo) dump codec info (profiles and color formats supported, details)\n");
632     fprintf(stderr, "       -t(humbnail) extract video thumbnail or album art\n");
633     fprintf(stderr, "       -s(oftware) prefer software codec\n");
634     fprintf(stderr, "       -r(hardware) force to use hardware codec\n");
635     fprintf(stderr, "       -o playback audio\n");
636     fprintf(stderr, "       -w(rite) filename (write to .mp4 file)\n");
637     fprintf(stderr, "       -k seek test\n");
638     fprintf(stderr, "       -N(ame) of the component\n");
639     fprintf(stderr, "       -x display a histogram of decoding times/fps "
640                     "(video only)\n");
641     fprintf(stderr, "       -q don't show progress indicator\n");
642     fprintf(stderr, "       -S allocate buffers from a surface\n");
643     fprintf(stderr, "       -T allocate buffers from a surface texture\n");
644     fprintf(stderr, "       -d(ump) output_filename (raw stream data to a file)\n");
645     fprintf(stderr, "       -D(ump) output_filename (decoded PCM data to a file)\n");
646     fprintf(stderr, "       -v be more verbose\n");
647 }
648 
dumpCodecDetails(bool queryDecoders)649 static void dumpCodecDetails(bool queryDecoders) {
650     const char *codecType = queryDecoders? "Decoder" : "Encoder";
651     printf("\n%s infos by media types:\n"
652            "=============================\n", codecType);
653 
654     sp<IMediaCodecList> list = MediaCodecList::getInstance();
655     size_t numCodecs = list->countCodecs();
656 
657     // gather all media types supported by codec class, and link to codecs that support them
658     KeyedVector<AString, Vector<sp<MediaCodecInfo>>> allMediaTypes;
659     for (size_t codec_ix = 0; codec_ix < numCodecs; ++codec_ix) {
660         sp<MediaCodecInfo> info = list->getCodecInfo(codec_ix);
661         if (info->isEncoder() == !queryDecoders) {
662             Vector<AString> supportedMediaTypes;
663             info->getSupportedMediaTypes(&supportedMediaTypes);
664             if (!supportedMediaTypes.size()) {
665                 printf("warning: %s does not support any media types\n",
666                         info->getCodecName());
667             } else {
668                 for (const AString &mediaType : supportedMediaTypes) {
669                     if (allMediaTypes.indexOfKey(mediaType) < 0) {
670                         allMediaTypes.add(mediaType, Vector<sp<MediaCodecInfo>>());
671                     }
672                     allMediaTypes.editValueFor(mediaType).add(info);
673                 }
674             }
675         }
676     }
677 
678     KeyedVector<AString, bool> visitedCodecs;
679     for (size_t type_ix = 0; type_ix < allMediaTypes.size(); ++type_ix) {
680         const AString &mediaType = allMediaTypes.keyAt(type_ix);
681         printf("\nMedia type '%s':\n", mediaType.c_str());
682 
683         for (const sp<MediaCodecInfo> &info : allMediaTypes.valueAt(type_ix)) {
684             sp<MediaCodecInfo::Capabilities> caps = info->getCapabilitiesFor(mediaType.c_str());
685             if (caps == NULL) {
686                 printf("warning: %s does not have capabilities for type %s\n",
687                         info->getCodecName(), mediaType.c_str());
688                 continue;
689             }
690             printf("  %s \"%s\" supports\n",
691                        codecType, info->getCodecName());
692 
693             auto printList = [](const char *type, const Vector<AString> &values){
694                 printf("    %s: [", type);
695                 for (size_t j = 0; j < values.size(); ++j) {
696                     printf("\n      %s%s", values[j].c_str(),
697                             j == values.size() - 1 ? " " : ",");
698                 }
699                 printf("]\n");
700             };
701 
702             if (visitedCodecs.indexOfKey(info->getCodecName()) < 0) {
703                 visitedCodecs.add(info->getCodecName(), true);
704                 {
705                     Vector<AString> aliases;
706                     info->getAliases(&aliases);
707                     // quote alias
708                     for (AString &alias : aliases) {
709                         alias.insert("\"", 1, 0);
710                         alias.append('"');
711                     }
712                     printList("aliases", aliases);
713                 }
714                 {
715                     uint32_t attrs = info->getAttributes();
716                     Vector<AString> list;
717                     list.add(AStringPrintf("encoder: %d", !!(attrs & MediaCodecInfo::kFlagIsEncoder)));
718                     list.add(AStringPrintf("vendor: %d", !!(attrs & MediaCodecInfo::kFlagIsVendor)));
719                     list.add(AStringPrintf("software-only: %d", !!(attrs & MediaCodecInfo::kFlagIsSoftwareOnly)));
720                     list.add(AStringPrintf("hw-accelerated: %d", !!(attrs & MediaCodecInfo::kFlagIsHardwareAccelerated)));
721                     printList(AStringPrintf("attributes: %#x", attrs).c_str(), list);
722                 }
723 
724                 printf("    owner: \"%s\"\n", info->getOwnerName());
725                 printf("    rank: %u\n", info->getRank());
726             } else {
727                 printf("    aliases, attributes, owner, rank: see above\n");
728             }
729 
730             {
731                 Vector<AString> list;
732                 Vector<MediaCodecInfo::ProfileLevel> profileLevels;
733                 caps->getSupportedProfileLevels(&profileLevels);
734                 for (const MediaCodecInfo::ProfileLevel &pl : profileLevels) {
735                     const char *niceProfile =
736                         mediaType.equalsIgnoreCase(MIMETYPE_AUDIO_AAC)   ? asString_AACObject(pl.mProfile) :
737                         mediaType.equalsIgnoreCase(MIMETYPE_VIDEO_MPEG2) ? asString_MPEG2Profile(pl.mProfile) :
738                         mediaType.equalsIgnoreCase(MIMETYPE_VIDEO_H263)  ? asString_H263Profile(pl.mProfile) :
739                         mediaType.equalsIgnoreCase(MIMETYPE_VIDEO_MPEG4) ? asString_MPEG4Profile(pl.mProfile) :
740                         mediaType.equalsIgnoreCase(MIMETYPE_VIDEO_AVC)   ? asString_AVCProfile(pl.mProfile) :
741                         mediaType.equalsIgnoreCase(MIMETYPE_VIDEO_VP8)   ? asString_VP8Profile(pl.mProfile) :
742                         mediaType.equalsIgnoreCase(MIMETYPE_VIDEO_HEVC)  ? asString_HEVCProfile(pl.mProfile) :
743                         mediaType.equalsIgnoreCase(MIMETYPE_VIDEO_VP9)   ? asString_VP9Profile(pl.mProfile) :
744                         mediaType.equalsIgnoreCase(MIMETYPE_VIDEO_AV1)   ? asString_AV1Profile(pl.mProfile) :"??";
745                     const char *niceLevel =
746                         mediaType.equalsIgnoreCase(MIMETYPE_VIDEO_MPEG2) ? asString_MPEG2Level(pl.mLevel) :
747                         mediaType.equalsIgnoreCase(MIMETYPE_VIDEO_H263)  ? asString_H263Level(pl.mLevel) :
748                         mediaType.equalsIgnoreCase(MIMETYPE_VIDEO_MPEG4) ? asString_MPEG4Level(pl.mLevel) :
749                         mediaType.equalsIgnoreCase(MIMETYPE_VIDEO_AVC)   ? asString_AVCLevel(pl.mLevel) :
750                         mediaType.equalsIgnoreCase(MIMETYPE_VIDEO_VP8)   ? asString_VP8Level(pl.mLevel) :
751                         mediaType.equalsIgnoreCase(MIMETYPE_VIDEO_HEVC)  ? asString_HEVCTierLevel(pl.mLevel) :
752                         mediaType.equalsIgnoreCase(MIMETYPE_VIDEO_VP9)   ? asString_VP9Level(pl.mLevel) :
753                         mediaType.equalsIgnoreCase(MIMETYPE_VIDEO_AV1)   ? asString_AV1Level(pl.mLevel) :
754                         "??";
755 
756                     list.add(AStringPrintf("% 5u/% 5u (%s/%s)",
757                             pl.mProfile, pl.mLevel, niceProfile, niceLevel));
758                 }
759                 printList("profile/levels", list);
760             }
761 
762             {
763                 Vector<AString> list;
764                 Vector<uint32_t> colors;
765                 caps->getSupportedColorFormats(&colors);
766                 for (uint32_t color : colors) {
767                     list.add(AStringPrintf("%#x (%s)", color,
768                             asString_ColorFormat((int32_t)color)));
769                 }
770                 printList("colors", list);
771             }
772 
773             printf("    details: %s\n", caps->getDetails()->debugString(6).c_str());
774         }
775     }
776 }
777 
main(int argc,char ** argv)778 int main(int argc, char **argv) {
779     android::ProcessState::self()->startThreadPool();
780 
781     bool audioOnly = false;
782     bool listComponents = false;
783     bool dumpCodecInfo = false;
784     bool extractThumbnail = false;
785     bool seekTest = false;
786     bool useSurfaceAlloc = false;
787     bool useSurfaceTexAlloc = false;
788     bool dumpStream = false;
789     bool dumpPCMStream = false;
790     String8 dumpStreamFilename;
791     gNumRepetitions = 1;
792     gMaxNumFrames = 0;
793     gReproduceBug = -1;
794     gPreferSoftwareCodec = false;
795     gForceToUseHardwareCodec = false;
796     gPlaybackAudio = false;
797     gWriteMP4 = false;
798     gDisplayHistogram = false;
799 
800     sp<android::ALooper> looper;
801 
802     int res;
803     while ((res = getopt(argc, argv, "vhaqn:lm:b:itsrow:kN:xSTd:D:")) >= 0) {
804         switch (res) {
805             case 'a':
806             {
807                 audioOnly = true;
808                 break;
809             }
810 
811             case 'q':
812             {
813                 showProgress = false;
814                 break;
815             }
816 
817             case 'd':
818             {
819                 dumpStream = true;
820                 dumpStreamFilename.setTo(optarg);
821                 break;
822             }
823 
824             case 'D':
825             {
826                 dumpPCMStream = true;
827                 audioOnly = true;
828                 dumpStreamFilename.setTo(optarg);
829                 break;
830             }
831 
832             case 'N':
833             {
834                 gComponentNameOverride.setTo(optarg);
835                 break;
836             }
837 
838             case 'l':
839             {
840                 listComponents = true;
841                 break;
842             }
843 
844             case 'm':
845             case 'n':
846             case 'b':
847             {
848                 char *end;
849                 long x = strtol(optarg, &end, 10);
850 
851                 if (*end != '\0' || end == optarg || x <= 0) {
852                     x = 1;
853                 }
854 
855                 if (res == 'n') {
856                     gNumRepetitions = x;
857                 } else if (res == 'm') {
858                     gMaxNumFrames = x;
859                 } else {
860                     CHECK_EQ(res, 'b');
861                     gReproduceBug = x;
862                 }
863                 break;
864             }
865 
866             case 'w':
867             {
868                 gWriteMP4 = true;
869                 gWriteMP4Filename.setTo(optarg);
870                 break;
871             }
872 
873             case 'i':
874             {
875                 dumpCodecInfo = true;
876                 break;
877             }
878 
879             case 't':
880             {
881                 extractThumbnail = true;
882                 break;
883             }
884 
885             case 's':
886             {
887                 gPreferSoftwareCodec = true;
888                 break;
889             }
890 
891             case 'r':
892             {
893                 gForceToUseHardwareCodec = true;
894                 break;
895             }
896 
897             case 'o':
898             {
899                 gPlaybackAudio = true;
900                 break;
901             }
902 
903             case 'k':
904             {
905                 seekTest = true;
906                 break;
907             }
908 
909             case 'x':
910             {
911                 gDisplayHistogram = true;
912                 break;
913             }
914 
915             case 'S':
916             {
917                 useSurfaceAlloc = true;
918                 break;
919             }
920 
921             case 'T':
922             {
923                 useSurfaceTexAlloc = true;
924                 break;
925             }
926 
927             case 'v':
928             {
929                 gVerbose = true;
930                 break;
931             }
932 
933             case '?':
934             case 'h':
935             default:
936             {
937                 usage(argv[0]);
938                 exit(1);
939                 break;
940             }
941         }
942     }
943 
944     if (gPlaybackAudio && !audioOnly) {
945         // This doesn't make any sense if we're decoding the video track.
946         gPlaybackAudio = false;
947     }
948 
949     argc -= optind;
950     argv += optind;
951 
952     if (extractThumbnail) {
953         sp<IServiceManager> sm = defaultServiceManager();
954         sp<IBinder> binder = sm->getService(String16("media.player"));
955         sp<IMediaPlayerService> service =
956             interface_cast<IMediaPlayerService>(binder);
957 
958         CHECK(service.get() != NULL);
959 
960         sp<IMediaMetadataRetriever> retriever =
961             service->createMetadataRetriever();
962 
963         CHECK(retriever != NULL);
964 
965         for (int k = 0; k < argc; ++k) {
966             const char *filename = argv[k];
967 
968             bool failed = true;
969 
970             int fd = open(filename, O_RDONLY | O_LARGEFILE);
971             CHECK_GE(fd, 0);
972 
973             off64_t fileSize = lseek64(fd, 0, SEEK_END);
974             CHECK_GE(fileSize, 0ll);
975 
976             CHECK_EQ(retriever->setDataSource(fd, 0, fileSize), (status_t)OK);
977 
978             close(fd);
979             fd = -1;
980 
981             sp<IMemory> mem =
982                     retriever->getFrameAtTime(-1,
983                             MediaSource::ReadOptions::SEEK_PREVIOUS_SYNC,
984                             HAL_PIXEL_FORMAT_RGB_565,
985                             false /*metaOnly*/);
986 
987             if (mem != NULL) {
988                 failed = false;
989                 printf("getFrameAtTime(%s) => OK\n", filename);
990 
991                 VideoFrame *frame = (VideoFrame *)mem->pointer();
992 
993                 CHECK_EQ(writeJpegFile("/sdcard/out.jpg",
994                             frame->getFlattenedData(),
995                             frame->mWidth, frame->mHeight), 0);
996             }
997 
998             {
999                 mem = retriever->extractAlbumArt();
1000 
1001                 if (mem != NULL) {
1002                     failed = false;
1003                     printf("extractAlbumArt(%s) => OK\n", filename);
1004                 }
1005             }
1006 
1007             if (failed) {
1008                 printf("both getFrameAtTime and extractAlbumArt "
1009                     "failed on file '%s'.\n", filename);
1010             }
1011         }
1012 
1013         return 0;
1014     }
1015 
1016     if (dumpCodecInfo) {
1017         dumpCodecDetails(true /* queryDecoders */);
1018         dumpCodecDetails(false /* queryDecoders */);
1019     }
1020 
1021     if (listComponents) {
1022         using ::android::hardware::hidl_vec;
1023         using ::android::hardware::hidl_string;
1024         using namespace ::android::hardware::media::omx::V1_0;
1025         sp<IOmx> omx = IOmx::getService();
1026         CHECK(omx.get() != nullptr);
1027 
1028         hidl_vec<IOmx::ComponentInfo> nodeList;
1029         auto transStatus = omx->listNodes([](
1030                 const auto& status, const auto& nodeList) {
1031                     CHECK(status == Status::OK);
1032                     for (const auto& info : nodeList) {
1033                         printf("%s\t Roles: ", info.mName.c_str());
1034                         for (const auto& role : info.mRoles) {
1035                             printf("%s\t", role.c_str());
1036                         }
1037                     }
1038                 });
1039         CHECK(transStatus.isOk());
1040     }
1041 
1042     sp<SurfaceComposerClient> composerClient;
1043     sp<SurfaceControl> control;
1044 
1045     if ((useSurfaceAlloc || useSurfaceTexAlloc) && !audioOnly) {
1046         if (useSurfaceAlloc) {
1047             composerClient = new SurfaceComposerClient;
1048             CHECK_EQ(composerClient->initCheck(), (status_t)OK);
1049 
1050             control = composerClient->createSurface(
1051                     String8("A Surface"),
1052                     1280,
1053                     800,
1054                     PIXEL_FORMAT_RGB_565,
1055                     0);
1056 
1057             CHECK(control != NULL);
1058             CHECK(control->isValid());
1059 
1060             SurfaceComposerClient::Transaction{}
1061                     .setLayer(control, INT_MAX)
1062                     .show(control)
1063                     .apply();
1064 
1065             gSurface = control->getSurface();
1066             CHECK(gSurface != NULL);
1067         } else {
1068             CHECK(useSurfaceTexAlloc);
1069 
1070             sp<IGraphicBufferProducer> producer;
1071             sp<IGraphicBufferConsumer> consumer;
1072             BufferQueue::createBufferQueue(&producer, &consumer);
1073             sp<GLConsumer> texture = new GLConsumer(consumer, 0 /* tex */,
1074                     GLConsumer::TEXTURE_EXTERNAL, true /* useFenceSync */,
1075                     false /* isControlledByApp */);
1076             gSurface = new Surface(producer);
1077         }
1078     }
1079 
1080     status_t err = OK;
1081 
1082     for (int k = 0; k < argc && err == OK; ++k) {
1083         bool syncInfoPresent = true;
1084 
1085         const char *filename = argv[k];
1086 
1087         sp<DataSource> dataSource =
1088             DataSourceFactory::getInstance()->CreateFromURI(NULL /* httpService */, filename);
1089 
1090         if (strncasecmp(filename, "sine:", 5) && dataSource == NULL) {
1091             fprintf(stderr, "Unable to create data source.\n");
1092             return 1;
1093         }
1094 
1095         bool isJPEG = false;
1096 
1097         size_t len = strlen(filename);
1098         if (len >= 4 && !strcasecmp(filename + len - 4, ".jpg")) {
1099             isJPEG = true;
1100         }
1101 
1102         Vector<sp<MediaSource> > mediaSources;
1103         sp<MediaSource> mediaSource;
1104 
1105         if (isJPEG) {
1106             mediaSource = new JPEGSource(dataSource);
1107             if (gWriteMP4) {
1108                 mediaSources.push(mediaSource);
1109             }
1110         } else if (!strncasecmp("sine:", filename, 5)) {
1111             char *end;
1112             long sampleRate = strtol(filename + 5, &end, 10);
1113 
1114             if (end == filename + 5) {
1115                 sampleRate = 44100;
1116             }
1117             mediaSource = new SineSource(sampleRate, 1);
1118             if (gWriteMP4) {
1119                 mediaSources.push(mediaSource);
1120             }
1121         } else {
1122             sp<IMediaExtractor> extractor = MediaExtractorFactory::Create(dataSource);
1123 
1124             if (extractor == NULL) {
1125                 fprintf(stderr, "could not create extractor.\n");
1126                 return -1;
1127             }
1128 
1129             sp<MetaData> meta = extractor->getMetaData();
1130 
1131             if (meta != NULL) {
1132                 const char *mime;
1133                 if (!meta->findCString(kKeyMIMEType, &mime)) {
1134                     fprintf(stderr, "extractor did not provide MIME type.\n");
1135                     return -1;
1136                 }
1137 
1138                 if (!strcasecmp(mime, MEDIA_MIMETYPE_CONTAINER_MPEG2TS)) {
1139                     syncInfoPresent = false;
1140                 }
1141             }
1142 
1143             size_t numTracks = extractor->countTracks();
1144 
1145             if (gWriteMP4) {
1146                 bool haveAudio = false;
1147                 bool haveVideo = false;
1148                 for (size_t i = 0; i < numTracks; ++i) {
1149                     sp<MediaSource> source = CreateMediaSourceFromIMediaSource(
1150                             extractor->getTrack(i));
1151                     if (source == nullptr) {
1152                         fprintf(stderr, "skip NULL track %zu, track count %zu.\n", i, numTracks);
1153                         continue;
1154                     }
1155 
1156                     const char *mime;
1157                     CHECK(source->getFormat()->findCString(
1158                                 kKeyMIMEType, &mime));
1159 
1160                     bool useTrack = false;
1161                     if (!haveAudio && !strncasecmp("audio/", mime, 6)) {
1162                         haveAudio = true;
1163                         useTrack = true;
1164                     } else if (!haveVideo && !strncasecmp("video/", mime, 6)) {
1165                         haveVideo = true;
1166                         useTrack = true;
1167                     }
1168 
1169                     if (useTrack) {
1170                         mediaSources.push(source);
1171 
1172                         if (haveAudio && haveVideo) {
1173                             break;
1174                         }
1175                     }
1176                 }
1177             } else {
1178                 sp<MetaData> meta;
1179                 size_t i;
1180                 for (i = 0; i < numTracks; ++i) {
1181                     meta = extractor->getTrackMetaData(
1182                             i, MediaExtractor::kIncludeExtensiveMetaData);
1183 
1184                     if (meta == NULL) {
1185                         continue;
1186                     }
1187                     const char *mime;
1188                     meta->findCString(kKeyMIMEType, &mime);
1189 
1190                     if (audioOnly && !strncasecmp(mime, "audio/", 6)) {
1191                         break;
1192                     }
1193 
1194                     if (!audioOnly && !strncasecmp(mime, "video/", 6)) {
1195                         break;
1196                     }
1197 
1198                     meta = NULL;
1199                 }
1200 
1201                 if (meta == NULL) {
1202                     fprintf(stderr,
1203                             "No suitable %s track found. The '-a' option will "
1204                             "target audio tracks only, the default is to target "
1205                             "video tracks only.\n",
1206                             audioOnly ? "audio" : "video");
1207                     return -1;
1208                 }
1209 
1210                 int64_t thumbTimeUs;
1211                 if (meta->findInt64(kKeyThumbnailTime, &thumbTimeUs)) {
1212                     printf("thumbnailTime: %" PRId64 " us (%.2f secs)\n",
1213                            thumbTimeUs, thumbTimeUs / 1E6);
1214                 }
1215 
1216                 mediaSource = CreateMediaSourceFromIMediaSource(extractor->getTrack(i));
1217                 if (mediaSource == nullptr) {
1218                     fprintf(stderr, "skip NULL track %zu, total tracks %zu.\n", i, numTracks);
1219                     return -1;
1220                 }
1221             }
1222         }
1223 
1224         if (gWriteMP4) {
1225             writeSourcesToMP4(mediaSources, syncInfoPresent);
1226         } else if (dumpStream) {
1227             dumpSource(mediaSource, dumpStreamFilename);
1228         } else if (dumpPCMStream) {
1229             sp<MediaSource> decSource = SimpleDecodingSource::Create(mediaSource);
1230             dumpSource(decSource, dumpStreamFilename);
1231         } else if (seekTest) {
1232             performSeekTest(mediaSource);
1233         } else {
1234             playSource(mediaSource);
1235         }
1236     }
1237 
1238     if ((useSurfaceAlloc || useSurfaceTexAlloc) && !audioOnly) {
1239         gSurface.clear();
1240 
1241         if (useSurfaceAlloc) {
1242             composerClient->dispose();
1243         }
1244     }
1245 
1246     return 0;
1247 }
1248