Bug 1423253 - Make future frames in a VideoFrameContainer black when a track is disabled. r=padenot
authorAndreas Pehrson <apehrson@mozilla.com>
Fri, 22 Mar 2019 11:45:33 +0000
changeset 465658 f37f5f1fc1e7e912fbffac2fbb8572b9af654867
parent 465657 81d85b9029903a05648fd609d4f425b0add12c6c
child 465659 9019e6f0a6c3a52b9215eea8a5f499b4290c8707
push id35744
push userapavel@mozilla.com
push dateFri, 22 Mar 2019 16:44:08 +0000
treeherdermozilla-central@e66a2b59914d [default view] [failures only]
perfherder[talos] [build metrics] [platform microbench] (compared to previous push)
reviewerspadenot
bugs1423253
milestone68.0a1
first release with
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
last release without
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
Bug 1423253 - Make future frames in a VideoFrameContainer black when a track is disabled. r=padenot Differential Revision: https://phabricator.services.mozilla.com/D22927
dom/media/VideoStreamTrack.cpp
--- a/dom/media/VideoStreamTrack.cpp
+++ b/dom/media/VideoStreamTrack.cpp
@@ -30,16 +30,18 @@ static bool SetImageToBlackPixel(PlanarY
   return aImage->CopyData(data);
 }
 
 class VideoOutput : public DirectMediaStreamTrackListener {
  protected:
   virtual ~VideoOutput() = default;
 
   void DropPastFrames() {
+    mMutex.AssertCurrentThreadOwns();
+
     TimeStamp now = TimeStamp::Now();
     size_t nrChunksInPast = 0;
     for (const auto& idChunkPair : mFrames) {
       const VideoChunk& chunk = idChunkPair.second();
       if (chunk.mTimeStamp > now) {
         break;
       }
       ++nrChunksInPast;
@@ -48,32 +50,34 @@ class VideoOutput : public DirectMediaSt
       // We need to keep one frame that starts in the past, because it only ends
       // when the next frame starts (which also needs to be in the past for it
       // to drop).
       mFrames.RemoveElementsAt(0, nrChunksInPast - 1);
     }
   }
 
   void SendFrames() {
+    mMutex.AssertCurrentThreadOwns();
+
     DropPastFrames();
 
     if (mFrames.IsEmpty()) {
       return;
     }
 
     // Collect any new frames produced in this iteration.
     AutoTArray<ImageContainer::NonOwningImage, 16> images;
     PrincipalHandle lastPrincipalHandle = PRINCIPAL_HANDLE_NONE;
 
     for (const auto& idChunkPair : mFrames) {
       ImageContainer::FrameID frameId = idChunkPair.first();
       const VideoChunk& chunk = idChunkPair.second();
       const VideoFrame& frame = chunk.mFrame;
       Image* image = frame.GetImage();
-      if (frame.GetForceBlack()) {
+      if (frame.GetForceBlack() || !mEnabled) {
         if (!mBlackImage) {
           RefPtr<Image> blackImage = mVideoFrameContainer->GetImageContainer()
                                          ->CreatePlanarYCbCrImage();
           if (blackImage) {
             // Sets the image to a single black pixel, which will be scaled to
             // fill the rendered size.
             if (SetImageToBlackPixel(blackImage->AsPlanarYCbCrImage())) {
               mBlackImage = blackImage;
@@ -122,16 +126,17 @@ class VideoOutput : public DirectMediaSt
       : mMutex("VideoOutput::mMutex"),
         mVideoFrameContainer(aContainer),
         mMainThread(aMainThread) {}
   void NotifyRealtimeTrackData(MediaStreamGraph* aGraph,
                                StreamTime aTrackOffset,
                                const MediaSegment& aMedia) override {
     MOZ_ASSERT(aMedia.GetType() == MediaSegment::VIDEO);
     const VideoSegment& video = static_cast<const VideoSegment&>(aMedia);
+    MutexAutoLock lock(mMutex);
     for (VideoSegment::ConstChunkIterator i(video); !i.IsEnded(); i.Next()) {
       if (!mLastFrameTime.IsNull() && i->mTimeStamp < mLastFrameTime) {
         // Time can go backwards if the source is a captured MediaDecoder and
         // it seeks, as the previously buffered frames would stretch into the
         // future. If this happens, we clear the buffered frames and start over.
         mFrames.ClearAndRetainStorage();
       }
       mFrames.AppendElement(MakePair(mVideoFrameContainer->NewFrameID(), *i));
@@ -146,22 +151,35 @@ class VideoOutput : public DirectMediaSt
     mFrames.ClearAndRetainStorage();
     mVideoFrameContainer->ClearFutureFrames();
   }
   void NotifyEnded() override {
     // Doesn't need locking by mMutex, since for the track to end, it must have
     // been ended by the source, meaning that the source won't append more data.
     mFrames.ClearAndRetainStorage();
   }
+  void NotifyEnabledStateChanged(bool aEnabled) override {
+    MutexAutoLock lock(mMutex);
+    mEnabled = aEnabled;
+    // Since mEnabled will affect whether frames are real, or black, we assign
+    // new FrameIDs whenever this changes.
+    for (auto& idChunkPair : mFrames) {
+      idChunkPair.first() = mVideoFrameContainer->NewFrameID();
+    }
+    SendFrames();
+  }
 
+  Mutex mMutex;
   TimeStamp mLastFrameTime;
   // Once the frame is forced to black, we initialize mBlackImage for use in any
   // following forced-black frames.
   RefPtr<Image> mBlackImage;
   bool mEnabled = true;
+  // This array is accessed from both the direct video thread, and the graph
+  // thread. Protected by mMutex.
   nsTArray<Pair<ImageContainer::FrameID, VideoChunk>> mFrames;
   const RefPtr<VideoFrameContainer> mVideoFrameContainer;
   const RefPtr<AbstractThread> mMainThread;
 };
 
 namespace dom {
 
 VideoStreamTrack::VideoStreamTrack(DOMMediaStream* aStream, TrackID aTrackID,