Bug 1138253 - Count dropped frames directly; r=cpearce
authorAnthony Jones <ajones@mozilla.com>
Tue, 03 Mar 2015 17:46:48 +1300
changeset 231477 0d806ade061f74ab6e32539330ce060af849511d
parent 231476 743bc389a2cee853ef6af8ea74a758b34f52ff3f
child 231478 c964a0be6c594e97884f37269dfd48d1aa9a4223
push id56268
push userajones@mozilla.com
push dateTue, 03 Mar 2015 04:47:07 +0000
treeherdermozilla-inbound@0d806ade061f [default view] [failures only]
perfherder[talos] [build metrics] [platform microbench] (compared to previous push)
reviewerscpearce
bugs1138253
milestone39.0a1
first release with
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
last release without
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
Bug 1138253 - Count dropped frames directly; r=cpearce
dom/html/HTMLVideoElement.cpp
dom/media/AbstractMediaDecoder.h
dom/media/MediaDecoder.h
dom/media/MediaDecoderStateMachine.cpp
dom/media/android/AndroidMediaReader.cpp
dom/media/fmp4/MP4Reader.cpp
dom/media/gstreamer/GStreamerReader.cpp
dom/media/mediasource/SourceBufferDecoder.cpp
dom/media/mediasource/SourceBufferDecoder.h
dom/media/omx/MediaOmxReader.cpp
dom/media/webaudio/BufferDecoder.cpp
dom/media/webaudio/BufferDecoder.h
dom/media/webm/IntelWebMVideoDecoder.cpp
dom/media/webm/SoftwareWebMVideoDecoder.cpp
--- a/dom/html/HTMLVideoElement.cpp
+++ b/dom/html/HTMLVideoElement.cpp
@@ -210,18 +210,18 @@ HTMLVideoElement::GetVideoPlaybackQualit
       if (perf) {
         creationTime = perf->GetDOMTiming()->TimeStampToDOMHighRes(TimeStamp::Now());
       }
     }
 
     if (mDecoder) {
       MediaDecoder::FrameStatistics& stats = mDecoder->GetFrameStatistics();
       totalFrames = stats.GetParsedFrames();
-      droppedFrames = totalFrames - stats.GetPresentedFrames();
-      corruptedFrames = totalFrames - stats.GetDecodedFrames();
+      droppedFrames = stats.GetDroppedFrames();
+      corruptedFrames = 0;
     }
   }
 
   nsRefPtr<VideoPlaybackQuality> playbackQuality =
     new VideoPlaybackQuality(this, creationTime, totalFrames, droppedFrames,
                              corruptedFrames);
   return playbackQuality.forget();
 }
--- a/dom/media/AbstractMediaDecoder.h
+++ b/dom/media/AbstractMediaDecoder.h
@@ -56,19 +56,21 @@ public:
   // Get the current MediaResource being used. Its URI will be returned
   // by currentSrc. Returns what was passed to Load(), if Load() has been called.
   virtual MediaResource* GetResource() const = 0;
 
   // Called by the decode thread to keep track of the number of bytes read
   // from the resource.
   virtual void NotifyBytesConsumed(int64_t aBytes, int64_t aOffset) = 0;
 
-  // Increments the parsed and decoded frame counters by the passed in counts.
+  // Increments the parsed, decoded and dropped frame counters by the passed in
+  // counts.
   // Can be called on any thread.
-  virtual void NotifyDecodedFrames(uint32_t aParsed, uint32_t aDecoded) = 0;
+  virtual void NotifyDecodedFrames(uint32_t aParsed, uint32_t aDecoded,
+                                   uint32_t aDropped) = 0;
 
   // Return the duration of the media in microseconds.
   virtual int64_t GetMediaDuration() = 0;
 
   // Set the duration of the media in microseconds.
   virtual void SetMediaDuration(int64_t aDuration) = 0;
 
   // Sets the duration of the media in microseconds. The MediaDecoder
@@ -133,24 +135,25 @@ public:
   virtual bool HasInitializationData() { return false; }
 
   // Stack based class to assist in notifying the frame statistics of
   // parsed and decoded frames. Use inside video demux & decode functions
   // to ensure all parsed and decoded frames are reported on all return paths.
   class AutoNotifyDecoded {
   public:
     explicit AutoNotifyDecoded(AbstractMediaDecoder* aDecoder)
-      : mParsed(0), mDecoded(0), mDecoder(aDecoder) {}
+      : mParsed(0), mDecoded(0), mDropped(0), mDecoder(aDecoder) {}
     ~AutoNotifyDecoded() {
       if (mDecoder) {
-        mDecoder->NotifyDecodedFrames(mParsed, mDecoded);
+        mDecoder->NotifyDecodedFrames(mParsed, mDecoded, mDropped);
       }
     }
     uint32_t mParsed;
     uint32_t mDecoded;
+    uint32_t mDropped;
 
   private:
     AbstractMediaDecoder* mDecoder;
   };
 
 #ifdef MOZ_EME
   virtual nsresult SetCDMProxy(CDMProxy* aProxy) { return NS_ERROR_NOT_IMPLEMENTED; }
   virtual CDMProxy* GetCDMProxy() { return nullptr; }
--- a/dom/media/MediaDecoder.h
+++ b/dom/media/MediaDecoder.h
@@ -932,17 +932,18 @@ public:
   // Threadsafe.
   class FrameStatistics {
   public:
 
     FrameStatistics() :
         mReentrantMonitor("MediaDecoder::FrameStats"),
         mParsedFrames(0),
         mDecodedFrames(0),
-        mPresentedFrames(0) {}
+        mPresentedFrames(0),
+        mDroppedFrames(0) {}
 
     // Returns number of frames which have been parsed from the media.
     // Can be called on any thread.
     uint32_t GetParsedFrames() {
       ReentrantMonitorAutoEnter mon(mReentrantMonitor);
       return mParsedFrames;
     }
 
@@ -956,24 +957,32 @@ public:
     // Returns the number of decoded frames which have been sent to the rendering
     // pipeline for painting ("presented").
     // Can be called on any thread.
     uint32_t GetPresentedFrames() {
       ReentrantMonitorAutoEnter mon(mReentrantMonitor);
       return mPresentedFrames;
     }
 
+    // Number of frames that have been skipped because they have missed their
+    // compoisition deadline.
+    uint32_t GetDroppedFrames() {
+      return mDroppedFrames;
+    }
+
     // Increments the parsed and decoded frame counters by the passed in counts.
     // Can be called on any thread.
-    void NotifyDecodedFrames(uint32_t aParsed, uint32_t aDecoded) {
-      if (aParsed == 0 && aDecoded == 0)
+    void NotifyDecodedFrames(uint32_t aParsed, uint32_t aDecoded,
+                             uint32_t aDropped) {
+      if (aParsed == 0 && aDecoded == 0 && aDropped == 0)
         return;
       ReentrantMonitorAutoEnter mon(mReentrantMonitor);
       mParsedFrames += aParsed;
       mDecodedFrames += aDecoded;
+      mDroppedFrames += aDropped;
     }
 
     // Increments the presented frame counters.
     // Can be called on any thread.
     void NotifyPresentedFrame() {
       ReentrantMonitorAutoEnter mon(mReentrantMonitor);
       ++mPresentedFrames;
     }
@@ -989,26 +998,29 @@ public:
 
     // Number of parsed frames which were actually decoded.
     // Access protected by mReentrantMonitor.
     uint32_t mDecodedFrames;
 
     // Number of decoded frames which were actually sent down the rendering
     // pipeline to be painted ("presented"). Access protected by mReentrantMonitor.
     uint32_t mPresentedFrames;
+
+    uint32_t mDroppedFrames;
   };
 
   // Return the frame decode/paint related statistics.
   FrameStatistics& GetFrameStatistics() { return mFrameStats; }
 
   // Increments the parsed and decoded frame counters by the passed in counts.
   // Can be called on any thread.
-  virtual void NotifyDecodedFrames(uint32_t aParsed, uint32_t aDecoded) MOZ_OVERRIDE
+  virtual void NotifyDecodedFrames(uint32_t aParsed, uint32_t aDecoded,
+                                   uint32_t aDropped) MOZ_OVERRIDE
   {
-    GetFrameStatistics().NotifyDecodedFrames(aParsed, aDecoded);
+    GetFrameStatistics().NotifyDecodedFrames(aParsed, aDecoded, aDropped);
   }
 
 protected:
   virtual ~MediaDecoder();
   void SetStateMachineParameters();
 
   static void DormantTimerExpired(nsITimer *aTimer, void *aClosure);
 
--- a/dom/media/MediaDecoderStateMachine.cpp
+++ b/dom/media/MediaDecoderStateMachine.cpp
@@ -3150,22 +3150,23 @@ void MediaDecoderStateMachine::AdvanceFr
   nsRefPtr<VideoData> currentFrame;
   if (VideoQueue().GetSize() > 0) {
     VideoData* frame = VideoQueue().PeekFront();
 #ifdef PR_LOGGING
     int32_t droppedFrames = 0;
 #endif
     while (IsRealTime() || clock_time >= frame->mTime) {
       mVideoFrameEndTime = frame->GetEndTime();
+      if (currentFrame) {
+        mDecoder->NotifyDecodedFrames(0, 0, 1);
 #ifdef PR_LOGGING
-      if (currentFrame) {
         VERBOSE_LOG("discarding video frame mTime=%lld clock_time=%lld (%d so far)",
                     currentFrame->mTime, clock_time, ++droppedFrames);
+#endif
       }
-#endif
       currentFrame = frame;
       nsRefPtr<VideoData> releaseMe = VideoQueue().PopFront();
       // Notify the decode thread that the video queue's buffers may have
       // free'd up space for more frames.
       mDecoder->GetReentrantMonitor().NotifyAll();
       OnPlaybackOffsetUpdate(frame->mOffset);
       if (VideoQueue().GetSize() == 0)
         break;
--- a/dom/media/android/AndroidMediaReader.cpp
+++ b/dom/media/android/AndroidMediaReader.cpp
@@ -157,16 +157,17 @@ bool AndroidMediaReader::DecodeVideoFram
 
     if (aKeyframeSkip) {
       // Disable keyframe skipping for now as
       // stagefright doesn't seem to be telling us
       // when a frame is a keyframe.
 #if 0
       if (!frame.mKeyFrame) {
         ++a.mParsed;
+        ++a.mDropped;
         continue;
       }
 #endif
       aKeyframeSkip = false;
     }
 
     if (frame.mSize == 0)
       return true;
@@ -246,17 +247,17 @@ bool AndroidMediaReader::DecodeVideoFram
                             picture);
     }
 
     if (!v) {
       return false;
     }
     a.mParsed++;
     a.mDecoded++;
-    NS_ASSERTION(decoded <= parsed, "Expect to decode fewer frames than parsed in AndroidMedia...");
+    NS_ASSERTION(a.mDecoded <= a.mParsed, "Expect to decode fewer frames than parsed in AndroidMedia...");
 
     // Since MPAPI doesn't give us the end time of frames, we keep one frame
     // buffered in AndroidMediaReader and push it into the queue as soon
     // we read the following frame so we can use that frame's start time as
     // the end time of the buffered frame.
     if (!mLastVideoFrame) {
       mLastVideoFrame = v;
       continue;
--- a/dom/media/fmp4/MP4Reader.cpp
+++ b/dom/media/fmp4/MP4Reader.cpp
@@ -593,17 +593,17 @@ MP4Reader::RequestVideoData(bool aSkipTo
 
   bool eos = false;
   if (ShouldSkip(aSkipToNextKeyframe, aTimeThreshold)) {
     uint32_t parsed = 0;
     eos = !SkipVideoDemuxToNextKeyFrame(aTimeThreshold, parsed);
     if (!eos && NS_FAILED(mVideo.mDecoder->Flush())) {
       NS_WARNING("Failed to skip/flush video when skipping-to-next-keyframe.");
     }
-    mDecoder->NotifyDecodedFrames(parsed, 0);
+    mDecoder->NotifyDecodedFrames(parsed, 0, parsed);
   }
 
   MonitorAutoLock lock(mVideo.mMonitor);
   nsRefPtr<VideoDataPromise> p = mVideo.mPromise.Ensure(__func__);
   if (eos) {
     mVideo.mPromise.Reject(END_OF_STREAM, __func__);
   } else {
     ScheduleUpdate(kVideo);
--- a/dom/media/gstreamer/GStreamerReader.cpp
+++ b/dom/media/gstreamer/GStreamerReader.cpp
@@ -752,30 +752,31 @@ bool GStreamerReader::DecodeVideoFrame(b
           return true;
         }
       }
       else {
         return true;
       }
     }
 
-    mDecoder->NotifyDecodedFrames(0, 1);
+    mDecoder->NotifyDecodedFrames(0, 1, 0);
 
 #if GST_VERSION_MAJOR >= 1
     GstSample *sample = gst_app_sink_pull_sample(mVideoAppSink);
     buffer = gst_buffer_ref(gst_sample_get_buffer(sample));
     gst_sample_unref(sample);
 #else
     buffer = gst_app_sink_pull_buffer(mVideoAppSink);
 #endif
     mVideoSinkBufferCount--;
   }
 
   bool isKeyframe = !GST_BUFFER_FLAG_IS_SET(buffer, GST_BUFFER_FLAG_DELTA_UNIT);
   if ((aKeyFrameSkip && !isKeyframe)) {
+    mDecoder->NotifyDecodedFrames(0, 0, 1);
     gst_buffer_unref(buffer);
     return true;
   }
 
   int64_t timestamp = GST_BUFFER_TIMESTAMP(buffer);
   {
     ReentrantMonitorAutoEnter mon(mGstThreadsMonitor);
     timestamp = gst_segment_to_stream_time(&mVideoSegment,
@@ -1145,17 +1146,17 @@ GstFlowReturn GStreamerReader::NewBuffer
 
 void GStreamerReader::NewVideoBuffer()
 {
   ReentrantMonitorAutoEnter mon(mGstThreadsMonitor);
   /* We have a new video buffer queued in the video sink. Increment the counter
    * and notify the decode thread potentially blocked in DecodeVideoFrame
    */
 
-  mDecoder->NotifyDecodedFrames(1, 0);
+  mDecoder->NotifyDecodedFrames(1, 0, 0);
   mVideoSinkBufferCount++;
   mon.NotifyAll();
 }
 
 void GStreamerReader::NewAudioBuffer()
 {
   ReentrantMonitorAutoEnter mon(mGstThreadsMonitor);
   /* We have a new audio buffer queued in the audio sink. Increment the counter
--- a/dom/media/mediasource/SourceBufferDecoder.cpp
+++ b/dom/media/mediasource/SourceBufferDecoder.cpp
@@ -179,19 +179,20 @@ SourceBufferDecoder::OnDecodeThread() co
 
 SourceBufferResource*
 SourceBufferDecoder::GetResource() const
 {
   return static_cast<SourceBufferResource*>(mResource.get());
 }
 
 void
-SourceBufferDecoder::NotifyDecodedFrames(uint32_t aParsed, uint32_t aDecoded)
+SourceBufferDecoder::NotifyDecodedFrames(uint32_t aParsed, uint32_t aDecoded,
+                                         uint32_t aDropped)
 {
-  return mParentDecoder->NotifyDecodedFrames(aParsed, aDecoded);
+  return mParentDecoder->NotifyDecodedFrames(aParsed, aDecoded, aDropped);
 }
 
 void
 SourceBufferDecoder::SetMediaDuration(int64_t aDuration)
 {
   mMediaDuration = aDuration;
 }
 
--- a/dom/media/mediasource/SourceBufferDecoder.h
+++ b/dom/media/mediasource/SourceBufferDecoder.h
@@ -47,17 +47,17 @@ public:
   virtual MediaDecoderOwner* GetOwner() MOZ_FINAL MOZ_OVERRIDE;
   virtual SourceBufferResource* GetResource() const MOZ_FINAL MOZ_OVERRIDE;
   virtual ReentrantMonitor& GetReentrantMonitor() MOZ_FINAL MOZ_OVERRIDE;
   virtual VideoFrameContainer* GetVideoFrameContainer() MOZ_FINAL MOZ_OVERRIDE;
   virtual void MetadataLoaded(nsAutoPtr<MediaInfo> aInfo, nsAutoPtr<MetadataTags> aTags, bool aRestoredFromDormant) MOZ_FINAL MOZ_OVERRIDE;
   virtual void FirstFrameLoaded(nsAutoPtr<MediaInfo> aInfo, bool aRestoredFromDormant) MOZ_FINAL MOZ_OVERRIDE;
   virtual void NotifyBytesConsumed(int64_t aBytes, int64_t aOffset) MOZ_FINAL MOZ_OVERRIDE;
   virtual void NotifyDataArrived(const char* aBuffer, uint32_t aLength, int64_t aOffset) MOZ_FINAL MOZ_OVERRIDE;
-  virtual void NotifyDecodedFrames(uint32_t aParsed, uint32_t aDecoded) MOZ_FINAL MOZ_OVERRIDE;
+  virtual void NotifyDecodedFrames(uint32_t aParsed, uint32_t aDecoded, uint32_t aDropped) MOZ_FINAL MOZ_OVERRIDE;
   virtual void NotifyWaitingForResourcesStatusChanged() MOZ_FINAL MOZ_OVERRIDE;
   virtual void OnReadMetadataCompleted() MOZ_FINAL MOZ_OVERRIDE;
   virtual void QueueMetadata(int64_t aTime, nsAutoPtr<MediaInfo> aInfo, nsAutoPtr<MetadataTags> aTags) MOZ_FINAL MOZ_OVERRIDE;
   virtual void RemoveMediaTracks() MOZ_FINAL MOZ_OVERRIDE;
   virtual void SetMediaDuration(int64_t aDuration) MOZ_FINAL MOZ_OVERRIDE;
   virtual void SetMediaEndTime(int64_t aTime) MOZ_FINAL MOZ_OVERRIDE;
   virtual void SetMediaSeekable(bool aMediaSeekable) MOZ_FINAL MOZ_OVERRIDE;
   virtual void UpdateEstimatedMediaDuration(int64_t aDuration) MOZ_FINAL MOZ_OVERRIDE;
--- a/dom/media/omx/MediaOmxReader.cpp
+++ b/dom/media/omx/MediaOmxReader.cpp
@@ -390,17 +390,17 @@ bool MediaOmxReader::DecodeVideoFrame(bo
     doSeek = false;
     mVideoSeekTimeUs = -1;
 
     // Ignore empty buffer which stagefright media read will sporadically return
     if (frame.mSize == 0 && !frame.mGraphicBuffer) {
       continue;
     }
 
-    parsed++;
+    a.mParsed++;
     if (frame.mShouldSkip && mSkipCount < MAX_DROPPED_FRAMES) {
       mSkipCount++;
       continue;
     }
 
     mSkipCount = 0;
 
     aKeyframeSkip = false;
@@ -468,17 +468,17 @@ bool MediaOmxReader::DecodeVideoFrame(bo
     }
 
     if (!v) {
       NS_WARNING("Unable to create VideoData");
       return false;
     }
 
     a.mDecoded++;
-    NS_ASSERTION(decoded <= parsed, "Expect to decode fewer frames than parsed in OMX decoder...");
+    NS_ASSERTION(a.mDecoded <= a.mParsed, "Expect to decode fewer frames than parsed in OMX decoder...");
 
     mVideoQueue.Push(v);
 
     break;
   }
 
   return true;
 }
--- a/dom/media/webaudio/BufferDecoder.cpp
+++ b/dom/media/webaudio/BufferDecoder.cpp
@@ -78,17 +78,18 @@ BufferDecoder::GetResource() const
 
 void
 BufferDecoder::NotifyBytesConsumed(int64_t aBytes, int64_t aOffset)
 {
   // ignore
 }
 
 void
-BufferDecoder::NotifyDecodedFrames(uint32_t aParsed, uint32_t aDecoded)
+BufferDecoder::NotifyDecodedFrames(uint32_t aParsed, uint32_t aDecoded,
+                                   uint32_t aDropped)
 {
   // ignore
 }
 
 int64_t
 BufferDecoder::GetMediaDuration()
 {
   // unknown
--- a/dom/media/webaudio/BufferDecoder.h
+++ b/dom/media/webaudio/BufferDecoder.h
@@ -37,17 +37,18 @@ public:
   virtual bool OnStateMachineThread() const MOZ_FINAL MOZ_OVERRIDE;
 
   virtual bool OnDecodeThread() const MOZ_FINAL MOZ_OVERRIDE;
 
   virtual MediaResource* GetResource() const MOZ_FINAL MOZ_OVERRIDE;
 
   virtual void NotifyBytesConsumed(int64_t aBytes, int64_t aOffset) MOZ_FINAL MOZ_OVERRIDE;
 
-  virtual void NotifyDecodedFrames(uint32_t aParsed, uint32_t aDecoded) MOZ_FINAL MOZ_OVERRIDE;
+  virtual void NotifyDecodedFrames(uint32_t aParsed, uint32_t aDecoded,
+                                   uint32_t aDropped) MOZ_FINAL MOZ_OVERRIDE;
 
   virtual int64_t GetMediaDuration() MOZ_FINAL MOZ_OVERRIDE;
 
   virtual void SetMediaDuration(int64_t aDuration) MOZ_FINAL MOZ_OVERRIDE;
 
   virtual void UpdateEstimatedMediaDuration(int64_t aDuration) MOZ_FINAL MOZ_OVERRIDE;
 
   virtual void SetMediaSeekable(bool aMediaSeekable) MOZ_FINAL MOZ_OVERRIDE;
--- a/dom/media/webm/IntelWebMVideoDecoder.cpp
+++ b/dom/media/webm/IntelWebMVideoDecoder.cpp
@@ -337,21 +337,22 @@ bool
 IntelWebMVideoDecoder::DecodeVideoFrame(bool& aKeyframeSkip,
                                         int64_t aTimeThreshold)
 {
   AbstractMediaDecoder::AutoNotifyDecoded a(mReader->GetDecoder());
 
   MOZ_ASSERT(mPlatform && mReader->GetDecoder());
 
   if (aKeyframeSkip) {
-    bool ok = SkipVideoDemuxToNextKeyFrame(aTimeThreshold, a.mParsed);
+    bool ok = SkipVideoDemuxToNextKeyFrame(aTimeThreshold, a.mDropped);
     if (!ok) {
       NS_WARNING("Failed to skip demux up to next keyframe");
       return false;
     }
+    a.mParsed = a.mDropped;
     aKeyframeSkip = false;
     nsresult rv = mMediaDataDecoder->Flush();
     NS_ENSURE_SUCCESS(rv, false);
   }
 
   NS_ASSERTION(mReader->GetDecoder()->OnDecodeThread(), "Should be on decode thread.");
   bool rv = Decode();
   {
--- a/dom/media/webm/SoftwareWebMVideoDecoder.cpp
+++ b/dom/media/webm/SoftwareWebMVideoDecoder.cpp
@@ -138,32 +138,34 @@ SoftwareWebMVideoDecoder::DecodeVideoFra
     if (mReader->GetVideoCodec() == NESTEGG_CODEC_VP8) {
       vpx_codec_peek_stream_info(vpx_codec_vp8_dx(), data, length, &si);
     } else if (mReader->GetVideoCodec() == NESTEGG_CODEC_VP9) {
       vpx_codec_peek_stream_info(vpx_codec_vp9_dx(), data, length, &si);
     }
     if (aKeyframeSkip && (!si.is_kf || tstamp_usecs < aTimeThreshold)) {
       // Skipping to next keyframe...
       a.mParsed++; // Assume 1 frame per chunk.
+      a.mDropped++;
       continue;
     }
 
     if (aKeyframeSkip && si.is_kf) {
       aKeyframeSkip = false;
     }
 
     if (vpx_codec_decode(&mVPX, data, length, nullptr, 0)) {
       return false;
     }
 
     // If the timestamp of the video frame is less than
     // the time threshold required then it is not added
     // to the video queue and won't be displayed.
     if (tstamp_usecs < aTimeThreshold) {
       a.mParsed++; // Assume 1 frame per chunk.
+      a.mDropped++;
       continue;
     }
 
     vpx_codec_iter_t  iter = nullptr;
     vpx_image_t      *img;
 
     while ((img = vpx_codec_get_frame(&mVPX, &iter))) {
       NS_ASSERTION(img->fmt == VPX_IMG_FMT_I420, "WebM image format not I420");