Bug 1098126 - Use MoofParser to read fragmented MP4 data; r=mattwoodrow
--- a/media/libstagefright/binding/Index.cpp
+++ b/media/libstagefright/binding/Index.cpp
@@ -70,18 +70,104 @@ RangeFinder::Contains(MediaByteRange aBy
if (mRanges[mIndex].Contains(aByteRange)) {
return true;
}
}
return false;
}
+SampleIterator::SampleIterator(Index* aIndex)
+ : mIndex(aIndex)
+ , mCurrentMoof(0)
+ , mCurrentSample(0)
+{
+}
+
+MP4Sample* SampleIterator::GetNext()
+{
+ nsAutoPtr<MP4Sample> sample(Get());
+ if (!sample) {
+ return nullptr;
+ }
+
+ // Do the blocking read
+ sample->data = sample->extra_buffer = new uint8_t[sample->size];
+
+ size_t bytesRead;
+ mIndex->mSource->ReadAt(sample->byte_offset, sample->data, sample->size,
+ &bytesRead);
+
+ // Lets just return what we've got so that we propagate the error
+ sample->size = bytesRead;
+
+ Next();
+
+ return sample.forget();
+}
+
+MP4Sample* SampleIterator::Get()
+{
+ if (!mIndex->mMoofParser) {
+ return nullptr;
+ }
+
+ nsTArray<Moof>& moofs = mIndex->mMoofParser->mMoofs;
+ while (true) {
+ if (mCurrentMoof >= moofs.Length()) {
+ return nsAutoPtr<MP4Sample>();
+ }
+ if (mCurrentSample < moofs[mCurrentMoof].mIndex.Length()) {
+ break;
+ }
+ mCurrentSample = 0;
+ ++mCurrentMoof;
+ }
+ Sample& s = moofs[mCurrentMoof].mIndex[mCurrentSample];
+ nsAutoPtr<MP4Sample> sample(new MP4Sample());
+ sample->decode_timestamp = s.mDecodeTime;
+ sample->composition_timestamp = s.mCompositionRange.start;
+ sample->duration = s.mCompositionRange.end - s.mCompositionRange.start;
+ sample->byte_offset = s.mByteRange.mStart;
+ sample->is_sync_point = s.mSync;
+
+ sample->size = s.mByteRange.mEnd - s.mByteRange.mStart;
+
+ return sample.forget();
+}
+
+void SampleIterator::Next()
+{
+ ++mCurrentSample;
+}
+
+void SampleIterator::Seek(Microseconds aTime)
+{
+ size_t syncMoof = 0;
+ size_t syncSample = 0;
+ mCurrentMoof = 0;
+ mCurrentSample = 0;
+ while (true) {
+ nsAutoPtr<MP4Sample> sample(Get());
+ if (sample->composition_timestamp > aTime) {
+ break;
+ }
+ if (sample->is_sync_point) {
+ syncMoof = mCurrentMoof;
+ syncSample = mCurrentSample;
+ }
+ Next();
+ }
+ mCurrentMoof = syncMoof;
+ mCurrentSample = syncSample;
+}
+
Index::Index(const stagefright::Vector<MediaSource::Indice>& aIndex,
Stream* aSource, uint32_t aTrackId)
+ : mSource(aSource)
{
if (aIndex.isEmpty()) {
mMoofParser = new MoofParser(aSource, aTrackId);
} else {
for (size_t i = 0; i < aIndex.size(); i++) {
const MediaSource::Indice& indice = aIndex[i];
Sample sample;
sample.mByteRange = MediaByteRange(indice.start_offset,
--- a/media/libstagefright/binding/MoofParser.cpp
+++ b/media/libstagefright/binding/MoofParser.cpp
@@ -205,16 +205,17 @@ Moof::ParseTrun(Box& aBox, Tfhd& aTfhd,
ctsOffset = reader->Read32();
}
}
Sample sample;
sample.mByteRange = MediaByteRange(offset, offset + sampleSize);
offset += sampleSize;
+ sample.mDecodeTime = decodeTime;
sample.mCompositionRange = Interval<Microseconds>(
aMdhd.ToMicroseconds((int64_t)decodeTime + ctsOffset - aEdts.mMediaStart),
aMdhd.ToMicroseconds((int64_t)decodeTime + ctsOffset + sampleDuration - aEdts.mMediaStart));
decodeTime += sampleDuration;
sample.mSync = !(sampleFlags & 0x1010000);
mIndex.AppendElement(sample);
--- a/media/libstagefright/binding/include/mp4_demuxer/DecoderData.h
+++ b/media/libstagefright/binding/include/mp4_demuxer/DecoderData.h
@@ -160,14 +160,13 @@ public:
uint8_t* data;
size_t size;
CryptoSample crypto;
nsRefPtr<nsRcTArray<uint8_t>> prefix_data;
void Prepend(const uint8_t* aData, size_t aSize);
-private:
nsAutoArrayPtr<uint8_t> extra_buffer;
};
}
#endif
--- a/media/libstagefright/binding/include/mp4_demuxer/Index.h
+++ b/media/libstagefright/binding/include/mp4_demuxer/Index.h
@@ -9,31 +9,54 @@
#include "mp4_demuxer/mp4_demuxer.h"
namespace mp4_demuxer
{
template <typename T> class Interval;
class MoofParser;
class Sample;
+class Index;
+
+class SampleIterator
+{
+public:
+ SampleIterator(Index* aIndex);
+ MP4Sample* GetNext();
+ void Seek(Microseconds aTime);
+
+private:
+ MP4Sample* Get();
+ void Next();
+ nsRefPtr<Index> mIndex;
+ size_t mCurrentMoof;
+ size_t mCurrentSample;
+};
class Index
{
public:
+ NS_INLINE_DECL_THREADSAFE_REFCOUNTING(Index)
+
Index(const stagefright::Vector<stagefright::MediaSource::Indice>& aIndex,
Stream* aSource, uint32_t aTrackId);
- ~Index();
void UpdateMoofIndex(const nsTArray<mozilla::MediaByteRange>& aByteRanges);
Microseconds GetEndCompositionIfBuffered(
const nsTArray<mozilla::MediaByteRange>& aByteRanges);
void ConvertByteRangesToTimeRanges(
const nsTArray<mozilla::MediaByteRange>& aByteRanges,
nsTArray<Interval<Microseconds>>* aTimeRanges);
uint64_t GetEvictionOffset(Microseconds aTime);
+ bool IsFragmented() { return mMoofParser; }
+
+ friend class SampleIterator;
private:
+ ~Index();
+
+ Stream* mSource;
nsTArray<Sample> mIndex;
nsAutoPtr<MoofParser> mMoofParser;
};
}
#endif
--- a/media/libstagefright/binding/include/mp4_demuxer/MoofParser.h
+++ b/media/libstagefright/binding/include/mp4_demuxer/MoofParser.h
@@ -103,16 +103,17 @@ public:
explicit Edts(Box& aBox);
int64_t mMediaStart;
};
struct Sample
{
mozilla::MediaByteRange mByteRange;
+ Microseconds mDecodeTime;
Interval<Microseconds> mCompositionRange;
bool mSync;
};
class Moof
{
public:
Moof(Box& aBox, Trex& aTrex, Mdhd& aMdhd, Edts& aEdts);
--- a/media/libstagefright/binding/mp4_demuxer.cpp
+++ b/media/libstagefright/binding/mp4_demuxer.cpp
@@ -21,21 +21,23 @@ namespace mp4_demuxer
{
struct StageFrightPrivate
{
sp<MPEG4Extractor> mExtractor;
sp<MediaSource> mAudio;
MediaSource::ReadOptions mAudioOptions;
+ nsAutoPtr<SampleIterator> mAudioIterator;
sp<MediaSource> mVideo;
MediaSource::ReadOptions mVideoOptions;
+ nsAutoPtr<SampleIterator> mVideoIterator;
- nsTArray<nsAutoPtr<Index>> mIndexes;
+ nsTArray<nsRefPtr<Index>> mIndexes;
};
class DataSourceAdapter : public DataSource
{
public:
explicit DataSourceAdapter(Stream* aSource) : mSource(aSource) {}
~DataSourceAdapter() {}
@@ -95,31 +97,41 @@ MP4Demuxer::Init()
sp<MetaData> metaData = e->getTrackMetaData(i);
const char* mimeType;
if (metaData == nullptr || !metaData->findCString(kKeyMIMEType, &mimeType)) {
continue;
}
if (!mPrivate->mAudio.get() && !strncmp(mimeType, "audio/", 6)) {
- mPrivate->mAudio = e->getTrack(i);
- if (mPrivate->mAudio->start() != OK) {
+ sp<MediaSource> track = e->getTrack(i);
+ if (track->start() != OK) {
return false;
}
+ mPrivate->mAudio = track;
mAudioConfig.Update(metaData, mimeType);
- mPrivate->mIndexes.AppendElement(new Index(
- mPrivate->mAudio->exportIndex(), mSource, mAudioConfig.mTrackId));
+ nsRefPtr<Index> index = new Index(mPrivate->mAudio->exportIndex(),
+ mSource, mAudioConfig.mTrackId);
+ mPrivate->mIndexes.AppendElement(index);
+ if (index->IsFragmented()) {
+ mPrivate->mAudioIterator = new SampleIterator(index);
+ }
} else if (!mPrivate->mVideo.get() && !strncmp(mimeType, "video/", 6)) {
- mPrivate->mVideo = e->getTrack(i);
- if (mPrivate->mVideo->start() != OK) {
+ sp<MediaSource> track = e->getTrack(i);
+ if (track->start() != OK) {
return false;
}
+ mPrivate->mVideo = track;
mVideoConfig.Update(metaData, mimeType);
- mPrivate->mIndexes.AppendElement(new Index(
- mPrivate->mVideo->exportIndex(), mSource, mVideoConfig.mTrackId));
+ nsRefPtr<Index> index = new Index(mPrivate->mVideo->exportIndex(),
+ mSource, mVideoConfig.mTrackId);
+ mPrivate->mIndexes.AppendElement(index);
+ if (index->IsFragmented()) {
+ mPrivate->mVideoIterator = new SampleIterator(index);
+ }
}
}
sp<MetaData> metaData = e->getMetaData();
mCrypto.Update(metaData);
return mPrivate->mAudio.get() || mPrivate->mVideo.get();
}
@@ -145,30 +157,42 @@ bool
MP4Demuxer::CanSeek()
{
return mPrivate->mExtractor->flags() & MediaExtractor::CAN_SEEK;
}
void
MP4Demuxer::SeekAudio(Microseconds aTime)
{
- mPrivate->mAudioOptions.setSeekTo(
- aTime, MediaSource::ReadOptions::SEEK_PREVIOUS_SYNC);
+ if (mPrivate->mAudioIterator) {
+ mPrivate->mAudioIterator->Seek(aTime);
+ } else {
+ mPrivate->mAudioOptions.setSeekTo(
+ aTime, MediaSource::ReadOptions::SEEK_PREVIOUS_SYNC);
+ }
}
void
MP4Demuxer::SeekVideo(Microseconds aTime)
{
- mPrivate->mVideoOptions.setSeekTo(
- aTime, MediaSource::ReadOptions::SEEK_PREVIOUS_SYNC);
+ if (mPrivate->mVideoIterator) {
+ mPrivate->mVideoIterator->Seek(aTime);
+ } else {
+ mPrivate->mVideoOptions.setSeekTo(
+ aTime, MediaSource::ReadOptions::SEEK_PREVIOUS_SYNC);
+ }
}
MP4Sample*
MP4Demuxer::DemuxAudioSample()
{
+ if (mPrivate->mAudioIterator) {
+ return mPrivate->mAudioIterator->GetNext();
+ }
+
nsAutoPtr<MP4Sample> sample(new MP4Sample());
status_t status =
mPrivate->mAudio->read(&sample->mMediaBuffer, &mPrivate->mAudioOptions);
mPrivate->mAudioOptions.clearSeekTo();
if (status < 0) {
return nullptr;
}
@@ -176,16 +200,24 @@ MP4Demuxer::DemuxAudioSample()
sample->Update(mAudioConfig.media_time);
return sample.forget();
}
MP4Sample*
MP4Demuxer::DemuxVideoSample()
{
+ if (mPrivate->mVideoIterator) {
+ nsAutoPtr<MP4Sample> sample(mPrivate->mVideoIterator->GetNext());
+ if (sample) {
+ sample->prefix_data = mVideoConfig.annex_b;
+ }
+ return sample.forget();
+ }
+
nsAutoPtr<MP4Sample> sample(new MP4Sample());
status_t status =
mPrivate->mVideo->read(&sample->mMediaBuffer, &mPrivate->mVideoOptions);
mPrivate->mVideoOptions.clearSeekTo();
if (status < 0) {
return nullptr;
}