Bug 1199518: [webm] P2. Properly determine next keyframe time. r=kinetik
authorJean-Yves Avenard <jyavenard@mozilla.com>
Sat, 12 Sep 2015 20:51:31 +1000
changeset 296474 f72d4bfd281163432af96b0444d89b36e17c47b3
parent 296473 1794ed0461559432d6685eab79bf41f6351e46d6
child 296475 e668e5f2fb8ab962908d4903662ede26ba9b6868
push id962
push userjlund@mozilla.com
push dateFri, 04 Dec 2015 23:28:54 +0000
treeherdermozilla-release@23a2d286e80f [default view] [failures only]
perfherder[talos] [build metrics] [platform microbench] (compared to previous push)
reviewerskinetik
bugs1199518
milestone43.0a1
first release with
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
last release without
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
Bug 1199518: [webm] P2. Properly determine next keyframe time. r=kinetik
dom/media/webm/WebMDemuxer.cpp
dom/media/webm/WebMDemuxer.h
--- a/dom/media/webm/WebMDemuxer.cpp
+++ b/dom/media/webm/WebMDemuxer.cpp
@@ -29,16 +29,21 @@
 
 namespace mozilla {
 
 using namespace gfx;
 
 PRLogModuleInfo* gWebMDemuxerLog = nullptr;
 extern PRLogModuleInfo* gNesteggLog;
 
+// How far ahead will we look when searching future keyframe. In microseconds.
+// This value is based on what appears to be a reasonable value as most webm
+// files encountered appear to have keyframes located < 4s.
+#define MAX_LOOK_AHEAD 10000000
+
 // Functions for reading and seeking using WebMDemuxer required for
 // nestegg_io. The 'user data' passed to these functions is the
 // demuxer.
 static int webmdemux_read(void* aBuffer, size_t aLength, void* aUserData)
 {
   MOZ_ASSERT(aUserData);
   MOZ_ASSERT(aLength < UINT32_MAX);
   WebMDemuxer* demuxer = reinterpret_cast<WebMDemuxer*>(aUserData);
@@ -657,30 +662,16 @@ WebMDemuxer::DemuxPacket()
   nsRefPtr<NesteggPacketHolder> holder = new NesteggPacketHolder();
   if (!holder->Init(packet, offset, track, false)) {
     return nullptr;
   }
 
   return holder;
 }
 
-int64_t
-WebMDemuxer::GetNextKeyframeTime()
-{
-  EnsureUpToDateIndex();
-  uint64_t keyframeTime;
-  uint64_t lastFrame =
-    media::TimeUnit::FromMicroseconds(mLastVideoFrameTime.refOr(0)).ToNanoseconds();
-  if (!mBufferedState->GetNextKeyframeTime(lastFrame, &keyframeTime) ||
-      keyframeTime <= lastFrame) {
-    return -1;
-  }
-  return media::TimeUnit::FromNanoseconds(keyframeTime).ToMicroseconds();
-}
-
 void
 WebMDemuxer::PushAudioPacket(NesteggPacketHolder* aItem)
 {
   mAudioPackets.PushFront(aItem);
 }
 
 void
 WebMDemuxer::PushVideoPacket(NesteggPacketHolder* aItem)
@@ -874,43 +865,69 @@ WebMTrackDemuxer::GetSamples(int32_t aNu
     UpdateSamples(samples->mSamples);
     return SamplesPromise::CreateAndResolve(samples, __func__);
   }
 }
 
 void
 WebMTrackDemuxer::SetNextKeyFrameTime()
 {
+  if (mType != TrackInfo::kVideoTrack) {
+    return;
+  }
+
   int64_t frameTime = -1;
 
   mNextKeyframeTime.reset();
 
-  if (mType == TrackInfo::kVideoTrack) {
-    MediaRawDataQueue skipSamplesQueue;
-    bool foundKeyframe = false;
-    while (!foundKeyframe && mSamples.GetSize()) {
-      nsRefPtr<MediaRawData> sample(mSamples.PopFront());
-      if (sample->mKeyframe) {
-        frameTime = sample->mTime;
-        foundKeyframe = true;
-      }
-      skipSamplesQueue.PushFront(sample);
+  MediaRawDataQueue skipSamplesQueue;
+  nsRefPtr<MediaRawData> sample;
+  bool foundKeyframe = false;
+  while (!foundKeyframe && mSamples.GetSize()) {
+    sample = mSamples.PopFront();
+    if (sample->mKeyframe) {
+      frameTime = sample->mTime;
+      foundKeyframe = true;
     }
-    while(skipSamplesQueue.GetSize()) {
-      nsRefPtr<MediaRawData> data = skipSamplesQueue.PopFront();
-      mSamples.PushFront(data);
+    skipSamplesQueue.Push(sample);
+  }
+  Maybe<int64_t> startTime;
+  if (skipSamplesQueue.GetSize()) {
+    sample = skipSamplesQueue.PopFront();
+    startTime.emplace(sample->mTimecode);
+    skipSamplesQueue.PushFront(sample);
+  }
+  // Demux and buffer frames until we find a keyframe.
+  while (!foundKeyframe && (sample = NextSample())) {
+    if (sample->mKeyframe) {
+      frameTime = sample->mTime;
+      foundKeyframe = true;
     }
-    if (frameTime == -1) {
-      frameTime = mParent->GetNextKeyframeTime();
+    skipSamplesQueue.Push(sample);
+    if (!startTime) {
+      startTime.emplace(sample->mTimecode);
+    } else if (!foundKeyframe &&
+               sample->mTimecode > startTime.ref() + MAX_LOOK_AHEAD) {
+      WEBM_DEBUG("Couldn't find keyframe in a reasonable time, aborting");
+      break;
     }
   }
+  // We may have demuxed more than intended, so ensure that all frames are kept
+  // in the right order.
+  mSamples.PushFront(skipSamplesQueue);
 
   if (frameTime != -1) {
     mNextKeyframeTime.emplace(media::TimeUnit::FromMicroseconds(frameTime));
-    WEBM_DEBUG("Next Keyframe %f", mNextKeyframeTime.value().ToSeconds());
+    WEBM_DEBUG("Next Keyframe %f (%u queued %.02fs)",
+               mNextKeyframeTime.value().ToSeconds(),
+               uint32_t(mSamples.GetSize()),
+               media::TimeUnit::FromMicroseconds(mSamples.Last()->mTimecode - mSamples.First()->mTimecode).ToSeconds());
+  } else {
+    WEBM_DEBUG("Couldn't determine next keyframe time  (%u queued)",
+               uint32_t(mSamples.GetSize()));
   }
 }
 
 void
 WebMTrackDemuxer::Reset()
 {
   mSamples.Reset();
   media::TimeIntervals buffered = GetBuffered();
--- a/dom/media/webm/WebMDemuxer.h
+++ b/dom/media/webm/WebMDemuxer.h
@@ -23,32 +23,53 @@ class MediaRawDataQueue {
   uint32_t GetSize() {
     return mQueue.size();
   }
 
   void Push(MediaRawData* aItem) {
     mQueue.push_back(aItem);
   }
 
+  void Push(const MediaRawDataQueue& aOther) {
+    mQueue.insert(mQueue.end(), aOther.mQueue.begin(), aOther.mQueue.end());
+  }
+
   void PushFront(MediaRawData* aItem) {
     mQueue.push_front(aItem);
   }
 
+  void PushFront(const MediaRawDataQueue& aOther) {
+    mQueue.insert(mQueue.begin(), aOther.mQueue.begin(), aOther.mQueue.end());
+  }
+
   already_AddRefed<MediaRawData> PopFront() {
     nsRefPtr<MediaRawData> result = mQueue.front().forget();
     mQueue.pop_front();
     return result.forget();
   }
 
   void Reset() {
     while (!mQueue.empty()) {
       mQueue.pop_front();
     }
   }
 
+  MediaRawDataQueue& operator=(const MediaRawDataQueue& aOther) {
+    mQueue = aOther.mQueue;
+    return *this;
+  }
+
+  const nsRefPtr<MediaRawData>& First() const {
+    return mQueue.front();
+  }
+
+  const nsRefPtr<MediaRawData>& Last() const {
+    return mQueue.back();
+  }
+
 private:
   std::deque<nsRefPtr<MediaRawData>> mQueue;
 };
 
 class WebMTrackDemuxer;
 
 class WebMDemuxer : public MediaDataDemuxer
 {
@@ -111,18 +132,16 @@ private:
   void Cleanup();
   nsresult InitBufferedState();
   nsresult ReadMetadata();
   void NotifyDataArrived(uint32_t aLength, int64_t aOffset) override;
   void NotifyDataRemoved() override;
   void EnsureUpToDateIndex();
   media::TimeIntervals GetBuffered();
   virtual nsresult SeekInternal(const media::TimeUnit& aTarget);
-  // Get the timestamp of the next keyframe
-  int64_t GetNextKeyframeTime();
 
   // Read a packet from the nestegg file. Returns nullptr if all packets for
   // the particular track have been read. Pass TrackInfo::kVideoTrack or
   // TrackInfo::kVideoTrack to indicate the type of the packet we want to read.
   nsRefPtr<NesteggPacketHolder> NextPacket(TrackInfo::TrackType aType);
 
   // Internal method that demuxes the next packet from the stream. The caller
   // is responsible for making sure it doesn't get lost.