Bug 1289668 - Refactor FrameStatistics writers to use Data struct - r=kamidphish
authorGerald Squelart <gsquelart@mozilla.com>
Mon, 18 Jul 2016 10:41:40 +1000
changeset 347097 12a97f1e4297f74a5a82e54dacefc05c3d58bed1
parent 347096 98c9b3317cbfc02821d096a7e44303da625e59be
child 347098 fff6018d7bba1434d86fd42265569118e19387f7
push id6389
push userraliiev@mozilla.com
push dateMon, 19 Sep 2016 13:38:22 +0000
treeherdermozilla-beta@01d67bfe6c81 [default view] [failures only]
perfherder[talos] [build metrics] [platform microbench] (compared to previous push)
reviewerskamidphish
bugs1289668
milestone50.0a1
first release with
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
last release without
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
Bug 1289668 - Refactor FrameStatistics writers to use Data struct - r=kamidphish Decoders now use FrameStatisticsData to gather data for their frame-related notifications. This will ease introducing new members later on. MozReview-Commit-ID: DWdOSPX3JM
dom/media/AbstractMediaDecoder.h
dom/media/FrameStatistics.h
dom/media/MediaDecoder.h
dom/media/MediaFormatReader.cpp
dom/media/android/AndroidMediaReader.cpp
dom/media/mediasink/VideoSink.cpp
dom/media/ogg/OggReader.cpp
dom/media/omx/MediaOmxReader.cpp
dom/media/raw/RawReader.cpp
dom/media/webaudio/BufferDecoder.cpp
dom/media/webaudio/BufferDecoder.h
--- a/dom/media/AbstractMediaDecoder.h
+++ b/dom/media/AbstractMediaDecoder.h
@@ -5,16 +5,17 @@
  * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
 
 #ifndef AbstractMediaDecoder_h_
 #define AbstractMediaDecoder_h_
 
 #include "mozilla/Attributes.h"
 #include "mozilla/StateMirroring.h"
 
+#include "FrameStatistics.h"
 #include "MediaEventSource.h"
 #include "MediaInfo.h"
 #include "nsISupports.h"
 #include "nsDataHashtable.h"
 #include "nsThreadUtils.h"
 
 class GMPCrashHelper;
 
@@ -52,18 +53,17 @@ public:
 
   // Get the current MediaResource being used. Its URI will be returned
   // by currentSrc. Returns what was passed to Load(), if Load() has been called.
   virtual MediaResource* GetResource() const = 0;
 
   // Increments the parsed, decoded and dropped frame counters by the passed in
   // counts.
   // Can be called on any thread.
-  virtual void NotifyDecodedFrames(uint32_t aParsed, uint32_t aDecoded,
-                                   uint32_t aDropped) = 0;
+  virtual void NotifyDecodedFrames(const FrameStatisticsData& aStats) = 0;
 
   virtual AbstractCanonical<media::NullableTimeUnit>* CanonicalDurationOrNull() { return nullptr; };
 
   // Return an event that will be notified when data arrives in MediaResource.
   // MediaDecoderReader will register with this event to receive notifications
   // in order to update buffer ranges.
   // Return null if this decoder doesn't support the event.
   virtual MediaEventSource<void>* DataArrivedEvent()
@@ -94,25 +94,25 @@ public:
   virtual already_AddRefed<GMPCrashHelper> GetCrashHelper() { return nullptr; }
 
   // Stack based class to assist in notifying the frame statistics of
   // parsed and decoded frames. Use inside video demux & decode functions
   // to ensure all parsed and decoded frames are reported on all return paths.
   class AutoNotifyDecoded {
   public:
     explicit AutoNotifyDecoded(AbstractMediaDecoder* aDecoder)
-      : mParsed(0), mDecoded(0), mDropped(0), mDecoder(aDecoder) {}
+      : mDecoder(aDecoder)
+    {}
     ~AutoNotifyDecoded() {
       if (mDecoder) {
-        mDecoder->NotifyDecodedFrames(mParsed, mDecoded, mDropped);
+        mDecoder->NotifyDecodedFrames(mStats);
       }
     }
-    uint32_t mParsed;
-    uint32_t mDecoded;
-    uint32_t mDropped;
+
+    FrameStatisticsData mStats;
 
   private:
     AbstractMediaDecoder* mDecoder;
   };
 
   // Classes directly inheriting from AbstractMediaDecoder do not support
   // Observe and it should never be called directly.
   NS_IMETHOD Observe(nsISupports *aSubject, const char * aTopic, const char16_t * aData) override
--- a/dom/media/FrameStatistics.h
+++ b/dom/media/FrameStatistics.h
@@ -2,16 +2,18 @@
 /* vim: set ts=8 sts=2 et sw=2 tw=80: */
 /* This Source Code Form is subject to the terms of the Mozilla Public
  * License, v. 2.0. If a copy of the MPL was not distributed with this
  * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
 
 #ifndef FrameStatistics_h_
 #define FrameStatistics_h_
 
+#include "mozilla/ReentrantMonitor.h"
+
 namespace mozilla {
 
 struct FrameStatisticsData
 {
   // Number of frames parsed and demuxed from media.
   // Access protected by mReentrantMonitor.
   uint64_t mParsedFrames = 0;
 
@@ -21,16 +23,32 @@ struct FrameStatisticsData
 
   // Number of decoded frames which were actually sent down the rendering
   // pipeline to be painted ("presented"). Access protected by mReentrantMonitor.
   uint64_t mPresentedFrames = 0;
 
   // Number of frames that have been skipped because they have missed their
   // composition deadline.
   uint64_t mDroppedFrames = 0;
+
+  FrameStatisticsData() = default;
+  FrameStatisticsData(uint64_t aParsed, uint64_t aDecoded, uint64_t aDropped)
+    : mParsedFrames(aParsed)
+    , mDecodedFrames(aDecoded)
+    , mDroppedFrames(aDropped)
+  {}
+
+  void
+  Accumulate(const FrameStatisticsData& aStats)
+  {
+    mParsedFrames += aStats.mParsedFrames;
+    mDecodedFrames += aStats.mDecodedFrames;
+    mPresentedFrames += aStats.mPresentedFrames;
+    mDroppedFrames += aStats.mDroppedFrames;
+  }
 };
 
 // Frame decoding/painting related performance counters.
 // Threadsafe.
 class FrameStatistics
 {
 public:
   NS_INLINE_DECL_THREADSAFE_REFCOUNTING(FrameStatistics);
@@ -77,26 +95,20 @@ public:
   uint64_t GetDroppedFrames() const
   {
     ReentrantMonitorAutoEnter mon(mReentrantMonitor);
     return mFrameStatisticsData.mDroppedFrames;
   }
 
   // Increments the parsed and decoded frame counters by the passed in counts.
   // Can be called on any thread.
-  void NotifyDecodedFrames(uint64_t aParsed, uint64_t aDecoded,
-                           uint64_t aDropped)
+  void NotifyDecodedFrames(const FrameStatisticsData& aStats)
   {
-    if (aParsed == 0 && aDecoded == 0 && aDropped == 0) {
-      return;
-    }
     ReentrantMonitorAutoEnter mon(mReentrantMonitor);
-    mFrameStatisticsData.mParsedFrames += aParsed;
-    mFrameStatisticsData.mDecodedFrames += aDecoded;
-    mFrameStatisticsData.mDroppedFrames += aDropped;
+    mFrameStatisticsData.Accumulate(aStats);
   }
 
   // Increments the presented frame counters.
   // Can be called on any thread.
   void NotifyPresentedFrame()
   {
     ReentrantMonitorAutoEnter mon(mReentrantMonitor);
     ++mFrameStatisticsData.mPresentedFrames;
--- a/dom/media/MediaDecoder.h
+++ b/dom/media/MediaDecoder.h
@@ -22,17 +22,16 @@
 #include "necko-config.h"
 #include "nsAutoPtr.h"
 #include "nsCOMPtr.h"
 #include "nsIObserver.h"
 #include "nsISupports.h"
 #include "nsITimer.h"
 
 #include "AbstractMediaDecoder.h"
-#include "FrameStatistics.h"
 #include "MediaDecoderOwner.h"
 #include "MediaEventSource.h"
 #include "MediaMetadataManager.h"
 #include "MediaResource.h"
 #include "MediaResourceCallback.h"
 #include "MediaStatistics.h"
 #include "MediaStreamGraph.h"
 #include "TimeUnits.h"
@@ -479,20 +478,19 @@ private:
   // at any time.
   MediaStatistics GetStatistics();
 
   // Return the frame decode/paint related statistics.
   FrameStatistics& GetFrameStatistics() { return *mFrameStats; }
 
   // Increments the parsed and decoded frame counters by the passed in counts.
   // Can be called on any thread.
-  virtual void NotifyDecodedFrames(uint32_t aParsed, uint32_t aDecoded,
-                                   uint32_t aDropped) override
+  virtual void NotifyDecodedFrames(const FrameStatisticsData& aStats) override
   {
-    GetFrameStatistics().NotifyDecodedFrames(aParsed, aDecoded, aDropped);
+    GetFrameStatistics().NotifyDecodedFrames(aStats);
   }
 
   void UpdateReadyState()
   {
     MOZ_ASSERT(NS_IsMainThread());
     if (!IsShutdown()) {
       mOwner->UpdateReadyState();
     }
--- a/dom/media/MediaFormatReader.cpp
+++ b/dom/media/MediaFormatReader.cpp
@@ -1010,17 +1010,17 @@ MediaFormatReader::HandleDemuxedSamples(
     }
 
     LOGV("Input:%lld (dts:%lld kf:%d)",
          sample->mTime, sample->mTimecode, sample->mKeyframe);
     decoder.mOutputRequested = true;
     decoder.mNumSamplesInput++;
     decoder.mSizeOfQueue++;
     if (aTrack == TrackInfo::kVideoTrack) {
-      aA.mParsed++;
+      aA.mStats.mParsedFrames++;
     }
 
     if (mDemuxOnly) {
       ReturnOutput(sample, aTrack);
     } else if (!DecodeDemuxedSamples(aTrack, sample)) {
       NotifyError(aTrack);
       return;
     }
@@ -1187,17 +1187,17 @@ MediaFormatReader::Update(TrackType aTra
         Some(TimeInterval(TimeUnit::FromMicroseconds(output->mTime),
                           TimeUnit::FromMicroseconds(output->GetEndTime())));
       decoder.mNumSamplesOutputTotal++;
       ReturnOutput(output, aTrack);
       // We have a decoded sample ready to be returned.
       if (aTrack == TrackType::kVideoTrack) {
         uint64_t delta =
           decoder.mNumSamplesOutputTotal - mLastReportedNumDecodedFrames;
-        a.mDecoded = static_cast<uint32_t>(delta);
+        a.mStats.mDecodedFrames = static_cast<uint32_t>(delta);
         mLastReportedNumDecodedFrames = decoder.mNumSamplesOutputTotal;
         nsCString error;
         mVideo.mIsHardwareAccelerated =
           mVideo.mDecoder && mVideo.mDecoder->IsHardwareAccelerated(error);
       }
     } else if (decoder.HasFatalError()) {
       LOG("Rejecting %s promise: DECODE_ERROR", TrackTypeToStr(aTrack));
       decoder.RejectPromise(DECODE_ERROR, __func__);
@@ -1486,17 +1486,17 @@ MediaFormatReader::DropDecodedSamples(Tr
     if (time >= decoder.mTimeThreshold.ref().Time()) {
       // We would have reached our internal seek target.
       decoder.mTimeThreshold.reset();
     }
   }
   decoder.mOutput.Clear();
   decoder.mSizeOfQueue -= lengthDecodedQueue;
   if (aTrack == TrackInfo::kVideoTrack && mDecoder) {
-    mDecoder->NotifyDecodedFrames(0, 0, lengthDecodedQueue);
+    mDecoder->NotifyDecodedFrames({ 0, 0, lengthDecodedQueue });
   }
 }
 
 void
 MediaFormatReader::SkipVideoDemuxToNextKeyFrame(media::TimeUnit aTimeThreshold)
 {
   MOZ_ASSERT(OnTaskQueue());
   LOG("Skipping up to %lld", aTimeThreshold.ToMicroseconds());
@@ -1522,25 +1522,25 @@ MediaFormatReader::VideoSkipReset(uint32
 {
   MOZ_ASSERT(OnTaskQueue());
 
   // Some frames may have been output by the decoder since we initiated the
   // videoskip process and we know they would be late.
   DropDecodedSamples(TrackInfo::kVideoTrack);
   // Report the pending frames as dropped.
   if (mDecoder) {
-    mDecoder->NotifyDecodedFrames(0, 0, SizeOfVideoQueueInFrames());
+    mDecoder->NotifyDecodedFrames({ 0, 0, SizeOfVideoQueueInFrames() });
   }
 
   // Cancel any pending demux request and pending demuxed samples.
   mVideo.mDemuxRequest.DisconnectIfExists();
   Reset(TrackType::kVideoTrack);
 
   if (mDecoder) {
-    mDecoder->NotifyDecodedFrames(aSkipped, 0, aSkipped);
+    mDecoder->NotifyDecodedFrames({ aSkipped, 0, aSkipped });
   }
 
   mVideo.mNumSamplesSkippedTotal += aSkipped;
 }
 
 void
 MediaFormatReader::OnVideoSkipCompleted(uint32_t aSkipped)
 {
--- a/dom/media/android/AndroidMediaReader.cpp
+++ b/dom/media/android/AndroidMediaReader.cpp
@@ -150,18 +150,18 @@ bool AndroidMediaReader::DecodeVideoFram
     mVideoSeekTimeUs = -1;
 
     if (aKeyframeSkip) {
       // Disable keyframe skipping for now as
       // stagefright doesn't seem to be telling us
       // when a frame is a keyframe.
 #if 0
       if (!frame.mKeyFrame) {
-        ++a.mParsed;
-        ++a.mDropped;
+        ++a.mStats.mParsedFrames;
+        ++a.mStats.mDroppedFrames;
         continue;
       }
 #endif
       aKeyframeSkip = false;
     }
 
     if (frame.mSize == 0)
       return true;
@@ -239,19 +239,19 @@ bool AndroidMediaReader::DecodeVideoFram
                             frame.mKeyFrame,
                             -1,
                             picture);
     }
 
     if (!v) {
       return false;
     }
-    a.mParsed++;
-    a.mDecoded++;
-    NS_ASSERTION(a.mDecoded <= a.mParsed, "Expect to decode fewer frames than parsed in AndroidMedia...");
+    a.mStats.mParsedFrames++;
+    a.mStats.mDecodedFrames++;
+    NS_ASSERTION(a.mStats.mDecodedFrames <= a.mStats.mParsedFrames, "Expect to decode fewer frames than parsed in AndroidMedia...");
 
     // Since MPAPI doesn't give us the end time of frames, we keep one frame
     // buffered in AndroidMediaReader and push it into the queue as soon
     // we read the following frame so we can use that frame's start time as
     // the end time of the buffered frame.
     if (!mLastVideoFrame) {
       mLastVideoFrame = v;
       continue;
--- a/dom/media/mediasink/VideoSink.cpp
+++ b/dom/media/mediasink/VideoSink.cpp
@@ -401,17 +401,17 @@ VideoSink::UpdateRenderedVideoFrames()
     while (VideoQueue().GetSize() > 0) {
       RefPtr<MediaData> nextFrame = VideoQueue().PeekFront();
       if (nextFrame->mTime > clockTime) {
         remainingTime = nextFrame->mTime - clockTime;
         break;
       }
       ++framesRemoved;
       if (!currentFrame->As<VideoData>()->mSentToCompositor) {
-        mFrameStats.NotifyDecodedFrames(0, 0, 1);
+        mFrameStats.NotifyDecodedFrames({ 0, 0, 1 });
         VSINK_LOG_V("discarding video frame mTime=%lld clock_time=%lld",
                     currentFrame->mTime, clockTime);
       }
       currentFrame = VideoQueue().PopFront();
     }
     VideoQueue().PushFront(currentFrame);
     if (framesRemoved > 0) {
       mVideoFrameEndTime = currentFrame->GetEndTime();
--- a/dom/media/ogg/OggReader.cpp
+++ b/dom/media/ogg/OggReader.cpp
@@ -916,27 +916,27 @@ bool OggReader::DecodeVideoFrame(bool &a
     }
     packet = NextOggPacket(mTheoraState);
   } while (packet && mTheoraState->IsHeader(packet));
   if (!packet) {
     return false;
   }
   nsAutoRef<ogg_packet> autoRelease(packet);
 
-  a.mParsed++;
+  a.mStats.mParsedFrames++;
   NS_ASSERTION(packet && packet->granulepos != -1,
                 "Must know first packet's granulepos");
   bool eos = packet->e_o_s;
   int64_t frameEndTime = mTheoraState->Time(packet->granulepos);
   if (!aKeyframeSkip ||
      (th_packet_iskeyframe(packet) && frameEndTime >= aTimeThreshold))
   {
     aKeyframeSkip = false;
     nsresult res = DecodeTheora(packet, aTimeThreshold);
-    a.mDecoded++;
+    a.mStats.mDecodedFrames++;
     if (NS_FAILED(res)) {
       return false;
     }
   }
 
   if (eos) {
     // We've encountered an end of bitstream packet. Inform the queue that
     // there will be no more frames.
--- a/dom/media/omx/MediaOmxReader.cpp
+++ b/dom/media/omx/MediaOmxReader.cpp
@@ -352,17 +352,17 @@ bool MediaOmxReader::DecodeVideoFrame(bo
     doSeek = false;
     mVideoSeekTimeUs = -1;
 
     // Ignore empty buffer which stagefright media read will sporadically return
     if (frame.mSize == 0 && !frame.mGraphicBuffer) {
       continue;
     }
 
-    a.mParsed++;
+    a.mStats.mParsedFrames++;
     if (frame.mShouldSkip && mSkipCount < MAX_DROPPED_FRAMES) {
       mSkipCount++;
       continue;
     }
 
     mSkipCount = 0;
 
     aKeyframeSkip = false;
@@ -429,18 +429,18 @@ bool MediaOmxReader::DecodeVideoFrame(bo
                             picture);
     }
 
     if (!v) {
       NS_WARNING("Unable to create VideoData");
       return false;
     }
 
-    a.mDecoded++;
-    NS_ASSERTION(a.mDecoded <= a.mParsed, "Expect to decode fewer frames than parsed in OMX decoder...");
+    a.mStats.mDecodedFrames++;
+    NS_ASSERTION(a.mStats.mDecodedFrames <= a.mStats.mParsedFrames, "Expect to decode fewer frames than parsed in OMX decoder...");
 
     mVideoQueue.Push(v);
 
     break;
   }
 
   return true;
 }
--- a/dom/media/raw/RawReader.cpp
+++ b/dom/media/raw/RawReader.cpp
@@ -151,17 +151,17 @@ bool RawReader::DecodeVideoFrame(bool &a
         !(header.packetID == 0xFF && header.codecID == RAW_ID /* "YUV" */)) {
       return false;
     }
 
     if (!ReadFromResource(buffer.get(), length)) {
       return false;
     }
 
-    a.mParsed++;
+    a.mStats.mParsedFrames++;
 
     if (currentFrameTime >= aTimeThreshold)
       break;
 
     mCurrentFrame++;
     currentFrameTime += static_cast<double>(USECS_PER_S) / mFrameRate;
   }
 
@@ -195,17 +195,17 @@ bool RawReader::DecodeVideoFrame(bool &a
                                             1, // In raw video every frame is a keyframe
                                             -1,
                                             mPicture);
   if (!v)
     return false;
 
   mVideoQueue.Push(v);
   mCurrentFrame++;
-  a.mDecoded++;
+  a.mStats.mDecodedFrames++;
 
   return true;
 }
 
 RefPtr<MediaDecoderReader::SeekPromise>
 RawReader::Seek(SeekTarget aTarget, int64_t aEndTime)
 {
   MOZ_ASSERT(OnTaskQueue());
--- a/dom/media/webaudio/BufferDecoder.cpp
+++ b/dom/media/webaudio/BufferDecoder.cpp
@@ -37,18 +37,17 @@ BufferDecoder::BeginDecoding(TaskQueue* 
 
 MediaResource*
 BufferDecoder::GetResource() const
 {
   return mResource;
 }
 
 void
-BufferDecoder::NotifyDecodedFrames(uint32_t aParsed, uint32_t aDecoded,
-                                   uint32_t aDropped)
+BufferDecoder::NotifyDecodedFrames(const FrameStatisticsData& aStats)
 {
   // ignore
 }
 
 VideoFrameContainer*
 BufferDecoder::GetVideoFrameContainer()
 {
   // no video frame
--- a/dom/media/webaudio/BufferDecoder.h
+++ b/dom/media/webaudio/BufferDecoder.h
@@ -28,18 +28,17 @@ public:
 
   NS_DECL_THREADSAFE_ISUPPORTS
 
   // This has to be called before decoding begins
   void BeginDecoding(TaskQueue* aTaskQueueIdentity);
 
   MediaResource* GetResource() const final override;
 
-  void NotifyDecodedFrames(uint32_t aParsed, uint32_t aDecoded,
-                           uint32_t aDropped) final override;
+  void NotifyDecodedFrames(const FrameStatisticsData& aStats) final override;
 
   VideoFrameContainer* GetVideoFrameContainer() final override;
   layers::ImageContainer* GetImageContainer() final override;
 
   MediaDecoderOwner* GetOwner() final override;
 
   already_AddRefed<GMPCrashHelper> GetCrashHelper() override;