Bug 1138253 - Clean up AutoNotifyDecoded; r=cpearce
authorAnthony Jones <ajones@mozilla.com>
Tue, 03 Mar 2015 17:46:46 +1300
changeset 231527 743bc389a2cee853ef6af8ea74a758b34f52ff3f
parent 231526 d08d7a1a55bae44082e7e7218338099abff07549
child 231528 0d806ade061f74ab6e32539330ce060af849511d
push id28353
push usercbook@mozilla.com
push dateTue, 03 Mar 2015 12:54:59 +0000
treeherdermozilla-central@985070813323 [default view] [failures only]
perfherder[talos] [build metrics] [platform microbench] (compared to previous push)
reviewerscpearce
bugs1138253
milestone39.0a1
first release with
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
last release without
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
Bug 1138253 - Clean up AutoNotifyDecoded; r=cpearce
dom/media/AbstractMediaDecoder.h
dom/media/android/AndroidMediaReader.cpp
dom/media/fmp4/MP4Reader.cpp
dom/media/ogg/OggReader.cpp
dom/media/omx/MediaOmxReader.cpp
dom/media/raw/RawReader.cpp
dom/media/webm/IntelWebMVideoDecoder.cpp
dom/media/webm/SoftwareWebMVideoDecoder.cpp
dom/media/wmf/WMFReader.cpp
--- a/dom/media/AbstractMediaDecoder.h
+++ b/dom/media/AbstractMediaDecoder.h
@@ -132,27 +132,28 @@ public:
   // on the resource length to limit reads.
   virtual bool HasInitializationData() { return false; }
 
   // Stack based class to assist in notifying the frame statistics of
   // parsed and decoded frames. Use inside video demux & decode functions
   // to ensure all parsed and decoded frames are reported on all return paths.
   class AutoNotifyDecoded {
   public:
-    AutoNotifyDecoded(AbstractMediaDecoder* aDecoder, uint32_t& aParsed, uint32_t& aDecoded)
-      : mDecoder(aDecoder), mParsed(aParsed), mDecoded(aDecoded) {}
+    explicit AutoNotifyDecoded(AbstractMediaDecoder* aDecoder)
+      : mParsed(0), mDecoded(0), mDecoder(aDecoder) {}
     ~AutoNotifyDecoded() {
       if (mDecoder) {
         mDecoder->NotifyDecodedFrames(mParsed, mDecoded);
       }
     }
+    uint32_t mParsed;
+    uint32_t mDecoded;
+
   private:
     AbstractMediaDecoder* mDecoder;
-    uint32_t& mParsed;
-    uint32_t& mDecoded;
   };
 
 #ifdef MOZ_EME
   virtual nsresult SetCDMProxy(CDMProxy* aProxy) { return NS_ERROR_NOT_IMPLEMENTED; }
   virtual CDMProxy* GetCDMProxy() { return nullptr; }
 #endif
 };
 
--- a/dom/media/android/AndroidMediaReader.cpp
+++ b/dom/media/android/AndroidMediaReader.cpp
@@ -120,18 +120,17 @@ nsresult AndroidMediaReader::ResetDecode
   return MediaDecoderReader::ResetDecode();
 }
 
 bool AndroidMediaReader::DecodeVideoFrame(bool &aKeyframeSkip,
                                           int64_t aTimeThreshold)
 {
   // Record number of frames decoded and parsed. Automatically update the
   // stats counters using the AutoNotifyDecoded stack-based class.
-  uint32_t parsed = 0, decoded = 0;
-  AbstractMediaDecoder::AutoNotifyDecoded autoNotify(mDecoder, parsed, decoded);
+  AbstractMediaDecoder::AutoNotifyDecoded a(mDecoder);
 
   // Throw away the currently buffered frame if we are seeking.
   if (mLastVideoFrame && mVideoSeekTimeUs != -1) {
     mLastVideoFrame = nullptr;
   }
 
   ImageBufferCallback bufferCallback(mDecoder->GetImageContainer());
   nsRefPtr<Image> currentImage;
@@ -157,17 +156,17 @@ bool AndroidMediaReader::DecodeVideoFram
     mVideoSeekTimeUs = -1;
 
     if (aKeyframeSkip) {
       // Disable keyframe skipping for now as
       // stagefright doesn't seem to be telling us
       // when a frame is a keyframe.
 #if 0
       if (!frame.mKeyFrame) {
-        ++parsed;
+        ++a.mParsed;
         continue;
       }
 #endif
       aKeyframeSkip = false;
     }
 
     if (frame.mSize == 0)
       return true;
@@ -245,18 +244,18 @@ bool AndroidMediaReader::DecodeVideoFram
                             frame.mKeyFrame,
                             -1,
                             picture);
     }
 
     if (!v) {
       return false;
     }
-    parsed++;
-    decoded++;
+    a.mParsed++;
+    a.mDecoded++;
     NS_ASSERTION(decoded <= parsed, "Expect to decode fewer frames than parsed in AndroidMedia...");
 
     // Since MPAPI doesn't give us the end time of frames, we keep one frame
     // buffered in AndroidMediaReader and push it into the queue as soon
     // we read the following frame so we can use that frame's start time as
     // the end time of the buffered frame.
     if (!mLastVideoFrame) {
       mLastVideoFrame = v;
--- a/dom/media/fmp4/MP4Reader.cpp
+++ b/dom/media/fmp4/MP4Reader.cpp
@@ -667,33 +667,32 @@ MP4Reader::Update(TrackType aTrack)
   MOZ_ASSERT(GetTaskQueue()->IsCurrentThreadIn());
 
   if (mShutdown) {
     return;
   }
 
   // Record number of frames decoded and parsed. Automatically update the
   // stats counters using the AutoNotifyDecoded stack-based class.
-  uint32_t parsed = 0, decoded = 0;
-  AbstractMediaDecoder::AutoNotifyDecoded autoNotify(mDecoder, parsed, decoded);
+  AbstractMediaDecoder::AutoNotifyDecoded a(mDecoder);
 
   bool needInput = false;
   bool needOutput = false;
   auto& decoder = GetDecoderData(aTrack);
   {
     MonitorAutoLock lock(decoder.mMonitor);
     decoder.mUpdateScheduled = false;
     if (NeedInput(decoder)) {
       needInput = true;
       decoder.mInputExhausted = false;
       decoder.mNumSamplesInput++;
     }
     if (aTrack == kVideo) {
       uint64_t delta = decoder.mNumSamplesOutput - mLastReportedNumDecodedFrames;
-      decoded = static_cast<uint32_t>(delta);
+      a.mDecoded = static_cast<uint32_t>(delta);
       mLastReportedNumDecodedFrames = decoder.mNumSamplesOutput;
     }
     if (decoder.HasPromise()) {
       needOutput = true;
       if (!decoder.mOutput.IsEmpty()) {
         nsRefPtr<MediaData> output = decoder.mOutput[0];
         decoder.mOutput.RemoveElementAt(0);
         ReturnOutput(output, aTrack);
@@ -717,17 +716,17 @@ MP4Reader::Update(TrackType aTrack)
     if (sample && !mFoundSPSForTelemetry && AnnexB::HasSPS(sample)) {
       nsRefPtr<ByteBuffer> extradata = AnnexB::ExtractExtraData(sample);
       mFoundSPSForTelemetry = AccumulateSPSTelemetry(extradata);
     }
 
     if (sample) {
       decoder.mDecoder->Input(sample);
       if (aTrack == kVideo) {
-        parsed++;
+        a.mParsed++;
       }
     } else {
       {
         MonitorAutoLock lock(decoder.mMonitor);
         MOZ_ASSERT(!decoder.mDemuxEOS);
         decoder.mDemuxEOS = true;
       }
       // DrainComplete takes care of reporting EOS upwards
--- a/dom/media/ogg/OggReader.cpp
+++ b/dom/media/ogg/OggReader.cpp
@@ -894,43 +894,42 @@ nsresult OggReader::DecodeTheora(ogg_pac
 
 bool OggReader::DecodeVideoFrame(bool &aKeyframeSkip,
                                      int64_t aTimeThreshold)
 {
   NS_ASSERTION(mDecoder->OnDecodeThread(), "Should be on decode thread.");
 
   // Record number of frames decoded and parsed. Automatically update the
   // stats counters using the AutoNotifyDecoded stack-based class.
-  uint32_t parsed = 0, decoded = 0;
-  AbstractMediaDecoder::AutoNotifyDecoded autoNotify(mDecoder, parsed, decoded);
+  AbstractMediaDecoder::AutoNotifyDecoded a(mDecoder);
 
   // Read the next data packet. Skip any non-data packets we encounter.
   ogg_packet* packet = 0;
   do {
     if (packet) {
       OggCodecState::ReleasePacket(packet);
     }
     packet = NextOggPacket(mTheoraState);
   } while (packet && mTheoraState->IsHeader(packet));
   if (!packet) {
     return false;
   }
   nsAutoRef<ogg_packet> autoRelease(packet);
 
-  parsed++;
+  a.mParsed++;
   NS_ASSERTION(packet && packet->granulepos != -1,
                 "Must know first packet's granulepos");
   bool eos = packet->e_o_s;
   int64_t frameEndTime = mTheoraState->Time(packet->granulepos);
   if (!aKeyframeSkip ||
      (th_packet_iskeyframe(packet) && frameEndTime >= aTimeThreshold))
   {
     aKeyframeSkip = false;
     nsresult res = DecodeTheora(packet, aTimeThreshold);
-    decoded++;
+    a.mDecoded++;
     if (NS_FAILED(res)) {
       return false;
     }
   }
 
   if (eos) {
     // We've encountered an end of bitstream packet. Inform the queue that
     // there will be no more frames.
--- a/dom/media/omx/MediaOmxReader.cpp
+++ b/dom/media/omx/MediaOmxReader.cpp
@@ -365,18 +365,17 @@ MediaOmxReader::IsMediaSeekable()
 bool MediaOmxReader::DecodeVideoFrame(bool &aKeyframeSkip,
                                       int64_t aTimeThreshold)
 {
   NS_ASSERTION(mDecoder->OnDecodeThread(), "Should be on decode thread.");
   EnsureActive();
 
   // Record number of frames decoded and parsed. Automatically update the
   // stats counters using the AutoNotifyDecoded stack-based class.
-  uint32_t parsed = 0, decoded = 0;
-  AbstractMediaDecoder::AutoNotifyDecoded autoNotify(mDecoder, parsed, decoded);
+  AbstractMediaDecoder::AutoNotifyDecoded a(mDecoder);
 
   bool doSeek = mVideoSeekTimeUs != -1;
   if (doSeek) {
     aTimeThreshold = mVideoSeekTimeUs;
   }
 
   TimeStamp start = TimeStamp::Now();
 
@@ -468,17 +467,17 @@ bool MediaOmxReader::DecodeVideoFrame(bo
                             picture);
     }
 
     if (!v) {
       NS_WARNING("Unable to create VideoData");
       return false;
     }
 
-    decoded++;
+    a.mDecoded++;
     NS_ASSERTION(decoded <= parsed, "Expect to decode fewer frames than parsed in OMX decoder...");
 
     mVideoQueue.Push(v);
 
     break;
   }
 
   return true;
--- a/dom/media/raw/RawReader.cpp
+++ b/dom/media/raw/RawReader.cpp
@@ -152,18 +152,17 @@ bool RawReader::ReadFromResource(MediaRe
 bool RawReader::DecodeVideoFrame(bool &aKeyframeSkip,
                                      int64_t aTimeThreshold)
 {
   NS_ASSERTION(mDecoder->OnDecodeThread(),
                "Should be on decode thread.");
 
   // Record number of frames decoded and parsed. Automatically update the
   // stats counters using the AutoNotifyDecoded stack-based class.
-  uint32_t parsed = 0, decoded = 0;
-  AbstractMediaDecoder::AutoNotifyDecoded autoNotify(mDecoder, parsed, decoded);
+  AbstractMediaDecoder::AutoNotifyDecoded a(mDecoder);
 
   if (!mFrameSize)
     return false; // Metadata read failed.  We should refuse to play.
 
   int64_t currentFrameTime = USECS_PER_S * mCurrentFrame / mFrameRate;
   uint32_t length = mFrameSize - sizeof(RawPacketHeader);
 
   nsAutoArrayPtr<uint8_t> buffer(new uint8_t[length]);
@@ -180,17 +179,17 @@ bool RawReader::DecodeVideoFrame(bool &a
         !(header.packetID == 0xFF && header.codecID == RAW_ID /* "YUV" */)) {
       return false;
     }
 
     if (!ReadFromResource(resource, buffer, length)) {
       return false;
     }
 
-    parsed++;
+    a.mParsed++;
 
     if (currentFrameTime >= aTimeThreshold)
       break;
 
     mCurrentFrame++;
     currentFrameTime += static_cast<double>(USECS_PER_S) / mFrameRate;
   }
 
@@ -224,17 +223,17 @@ bool RawReader::DecodeVideoFrame(bool &a
                                             1, // In raw video every frame is a keyframe
                                             -1,
                                             ToIntRect(mPicture));
   if (!v)
     return false;
 
   mVideoQueue.Push(v);
   mCurrentFrame++;
-  decoded++;
+  a.mDecoded++;
   currentFrameTime += USECS_PER_S / mFrameRate;
 
   return true;
 }
 
 nsRefPtr<MediaDecoderReader::SeekPromise>
 RawReader::Seek(int64_t aTime, int64_t aEndTime)
 {
--- a/dom/media/webm/IntelWebMVideoDecoder.cpp
+++ b/dom/media/webm/IntelWebMVideoDecoder.cpp
@@ -332,40 +332,39 @@ IntelWebMVideoDecoder::SkipVideoDemuxToN
 
   return true;
 }
 
 bool
 IntelWebMVideoDecoder::DecodeVideoFrame(bool& aKeyframeSkip,
                                         int64_t aTimeThreshold)
 {
-  uint32_t parsed = 0, decoded = 0;
-  AbstractMediaDecoder::AutoNotifyDecoded autoNotify(mReader->GetDecoder(), parsed, decoded);
+  AbstractMediaDecoder::AutoNotifyDecoded a(mReader->GetDecoder());
 
   MOZ_ASSERT(mPlatform && mReader->GetDecoder());
 
   if (aKeyframeSkip) {
-    bool ok = SkipVideoDemuxToNextKeyFrame(aTimeThreshold, parsed);
+    bool ok = SkipVideoDemuxToNextKeyFrame(aTimeThreshold, a.mParsed);
     if (!ok) {
       NS_WARNING("Failed to skip demux up to next keyframe");
       return false;
     }
     aKeyframeSkip = false;
     nsresult rv = mMediaDataDecoder->Flush();
     NS_ENSURE_SUCCESS(rv, false);
   }
 
   NS_ASSERTION(mReader->GetDecoder()->OnDecodeThread(), "Should be on decode thread.");
   bool rv = Decode();
   {
     // Report the number of "decoded" frames as the difference in the
     // mNumSamplesOutput field since the last time we were called.
     MonitorAutoLock mon(mMonitor);
     uint64_t delta = mNumSamplesOutput - mLastReportedNumDecodedFrames;
-    decoded = static_cast<uint32_t>(delta);
+    a.mDecoded = static_cast<uint32_t>(delta);
     mLastReportedNumDecodedFrames = mNumSamplesOutput;
   }
   return rv;
 }
 
 VP8Sample*
 IntelWebMVideoDecoder::PopSample()
 {
--- a/dom/media/webm/SoftwareWebMVideoDecoder.cpp
+++ b/dom/media/webm/SoftwareWebMVideoDecoder.cpp
@@ -74,19 +74,17 @@ bool
 SoftwareWebMVideoDecoder::DecodeVideoFrame(bool &aKeyframeSkip,
                                            int64_t aTimeThreshold)
 {
   NS_ASSERTION(mReader->GetDecoder()->OnDecodeThread(),
                "Should be on decode thread.");
 
   // Record number of frames decoded and parsed. Automatically update the
   // stats counters using the AutoNotifyDecoded stack-based class.
-  uint32_t parsed = 0, decoded = 0;
-  AbstractMediaDecoder::AutoNotifyDecoded autoNotify(mReader->GetDecoder(),
-                                                     parsed, decoded);
+  AbstractMediaDecoder::AutoNotifyDecoded a(mReader->GetDecoder());
 
   nsAutoRef<NesteggPacketHolder> holder(mReader->NextPacket(WebMReader::VIDEO));
   if (!holder) {
     return false;
   }
 
   nestegg_packet* packet = holder->mPacket;
   unsigned int track = 0;
@@ -139,33 +137,33 @@ SoftwareWebMVideoDecoder::DecodeVideoFra
     si.sz = sizeof(si);
     if (mReader->GetVideoCodec() == NESTEGG_CODEC_VP8) {
       vpx_codec_peek_stream_info(vpx_codec_vp8_dx(), data, length, &si);
     } else if (mReader->GetVideoCodec() == NESTEGG_CODEC_VP9) {
       vpx_codec_peek_stream_info(vpx_codec_vp9_dx(), data, length, &si);
     }
     if (aKeyframeSkip && (!si.is_kf || tstamp_usecs < aTimeThreshold)) {
       // Skipping to next keyframe...
-      parsed++; // Assume 1 frame per chunk.
+      a.mParsed++; // Assume 1 frame per chunk.
       continue;
     }
 
     if (aKeyframeSkip && si.is_kf) {
       aKeyframeSkip = false;
     }
 
     if (vpx_codec_decode(&mVPX, data, length, nullptr, 0)) {
       return false;
     }
 
     // If the timestamp of the video frame is less than
     // the time threshold required then it is not added
     // to the video queue and won't be displayed.
     if (tstamp_usecs < aTimeThreshold) {
-      parsed++; // Assume 1 frame per chunk.
+      a.mParsed++; // Assume 1 frame per chunk.
       continue;
     }
 
     vpx_codec_iter_t  iter = nullptr;
     vpx_image_t      *img;
 
     while ((img = vpx_codec_get_frame(&mVPX, &iter))) {
       NS_ASSERTION(img->fmt == VPX_IMG_FMT_I420, "WebM image format not I420");
@@ -213,19 +211,19 @@ SoftwareWebMVideoDecoder::DecodeVideoFra
                                                 (next_tstamp / NS_PER_USEC) - tstamp_usecs,
                                                 b,
                                                 si.is_kf,
                                                 -1,
                                                 picture);
       if (!v) {
         return false;
       }
-      parsed++;
-      decoded++;
-      NS_ASSERTION(decoded <= parsed,
+      a.mParsed++;
+      a.mDecoded++;
+      NS_ASSERTION(a.mDecoded <= a.mParsed,
         "Expect only 1 frame per chunk per packet in WebM...");
       mReader->VideoQueue().Push(v);
     }
   }
 
   return true;
 }
 
--- a/dom/media/wmf/WMFReader.cpp
+++ b/dom/media/wmf/WMFReader.cpp
@@ -803,18 +803,17 @@ WMFReader::CreateD3DVideoFrame(IMFSample
 bool
 WMFReader::DecodeVideoFrame(bool &aKeyframeSkip,
                             int64_t aTimeThreshold)
 {
   NS_ASSERTION(mDecoder->OnDecodeThread(), "Should be on decode thread.");
 
   // Record number of frames decoded and parsed. Automatically update the
   // stats counters using the AutoNotifyDecoded stack-based class.
-  uint32_t parsed = 0, decoded = 0;
-  AbstractMediaDecoder::AutoNotifyDecoded autoNotify(mDecoder, parsed, decoded);
+  AbstractMediaDecoder::AutoNotifyDecoded a(mDecoder);
 
   HRESULT hr;
 
   hr = mSourceReader->ReadSample(MF_SOURCE_READER_FIRST_VIDEO_STREAM,
                                  0, // control flags
                                  0, // read stream index
                                  nullptr,
                                  nullptr,
@@ -871,18 +870,18 @@ WMFReader::DecodeVideoFrame(bool &aKeyfr
   VideoData* v = nullptr;
   if (mUseHwAccel) {
     hr = CreateD3DVideoFrame(sample, timestamp, duration, offset, &v);
   } else {
     hr = CreateBasicVideoFrame(sample, timestamp, duration, offset, &v);
   }
   NS_ENSURE_TRUE(SUCCEEDED(hr) && v, false);
 
-  parsed++;
-  decoded++;
+  a.mParsed++;
+  a.mDecoded++;
   mVideoQueue.Push(v);
 
   #ifdef LOG_SAMPLE_DECODE
   DECODER_LOG("Decoded video sample timestamp=%lld duration=%lld stride=%d height=%u flags=%u",
               timestamp, duration, mVideoStride, mVideoHeight, flags);
   #endif
 
   if ((flags & MF_SOURCE_READERF_ENDOFSTREAM)) {